6 #ifndef LOCK_FREE_REF_COUNT_IMPL
7 #error "This is an impl file and must not be included directly!"
12 #pragma warning(disable : 4127) // conditional expression is constant
15 namespace xenium::reclamation {
17 template <
class Traits>
18 template <
class T, std::
size_t N,
class Deleter>
19 class lock_free_ref_count<Traits>::enable_concurrent_ptr<T, N, Deleter>::free_list {
22 if (max_local_elements > 0) {
23 if (
auto* result = local_free_list().pop()) {
32 guard = acquire_guard(head, std::memory_order_acquire);
33 if (guard.get() ==
nullptr) {
39 marked_ptr expected(guard);
40 auto next = guard->next_free().load(std::memory_order_relaxed);
43 if (head.compare_exchange_weak(expected, next, std::memory_order_relaxed)) {
44 assert((guard->ref_count().load(std::memory_order_relaxed) & RefCountClaimBit) != 0 &&
45 "ClaimBit must be set for a node on the free list");
47 auto* ptr = guard.get();
48 ptr->ref_count().fetch_sub(RefCountClaimBit, std::memory_order_relaxed);
49 ptr->next_free().store(
nullptr, std::memory_order_relaxed);
57 assert(node->ref_count().load(std::memory_order_relaxed) & RefCountClaimBit &&
58 "ClaimBit must be set for a node to be put on the free list");
59 if (max_local_elements > 0 && local_free_list().push(node)) {
63 add_nodes(node, node);
67 void add_nodes(T* first, T* last) {
69 auto old = head.load(std::memory_order_acquire);
71 last->next_free().store(old, std::memory_order_relaxed);
74 }
while (!head.compare_exchange_weak(old, first, std::memory_order_release, std::memory_order_acquire));
79 concurrent_ptr<T, N> head;
81 class thread_local_free_list {
83 ~thread_local_free_list() noexcept {
84 if (head ==
nullptr) {
89 auto next = last->next_free().load(std::memory_order_relaxed);
92 next = next->next_free().load(std::memory_order_relaxed);
94 global_free_list.add_nodes(first, last);
98 if (number_of_elements >= max_local_elements) {
101 node->next_free().store(head, std::memory_order_relaxed);
103 ++number_of_elements;
110 assert(number_of_elements > 0);
111 head = result->next_free().load(std::memory_order_relaxed).get();
112 --number_of_elements;
114 result->ref_count().fetch_add(RefCountInc - RefCountClaimBit, std::memory_order_relaxed);
115 result->next_free().store(
nullptr, std::memory_order_relaxed);
121 size_t number_of_elements = 0;
125 static constexpr
size_t max_local_elements = Traits::thread_local_free_list_size;
127 static thread_local_free_list& local_free_list() {
130 alignas(64)
static thread_local thread_local_free_list local_free_list;
131 return local_free_list;
135 template <
class Traits>
136 template <
class T, std::
size_t N,
class Deleter>
137 void* lock_free_ref_count<Traits>::enable_concurrent_ptr<T, N, Deleter>::operator
new(
size_t sz) {
138 assert(sz ==
sizeof(T) &&
"Cannot handle allocations of anything other than T instances");
139 T* result = global_free_list.pop();
140 if (result ==
nullptr) {
141 auto h =
static_cast<header*
>(::operator
new(sz +
sizeof(header)));
142 h->ref_count.store(RefCountInc, std::memory_order_release);
143 result =
static_cast<T*
>(
static_cast<void*
>(h + 1));
149 template <
class Traits>
150 template <
class T, std::
size_t N,
class Deleter>
151 void lock_free_ref_count<Traits>::enable_concurrent_ptr<T, N, Deleter>::operator
delete(
void* p) {
152 auto* node =
static_cast<T*
>(p);
153 assert((node->ref_count().load(std::memory_order_relaxed) & RefCountClaimBit) == 0);
155 if (node->decrement_refcnt()) {
156 node->push_to_free_list();
160 template <
class Traits>
161 template <
class T, std::
size_t N,
class Deleter>
162 bool lock_free_ref_count<Traits>::enable_concurrent_ptr<T, N, Deleter>::decrement_refcnt() {
163 unsigned old_refcnt, new_refcnt;
165 old_refcnt = ref_count().load(std::memory_order_relaxed);
166 new_refcnt = old_refcnt - RefCountInc;
167 if (new_refcnt == 0) {
168 new_refcnt = RefCountClaimBit;
171 }
while (!ref_count().compare_exchange_weak(old_refcnt,
173 new_refcnt == RefCountClaimBit ? std::memory_order_acquire
174 : std::memory_order_release,
175 std::memory_order_relaxed));
178 return ((old_refcnt - new_refcnt) & RefCountClaimBit) != 0;
181 template <
class Traits>
182 template <
class T,
class MarkedPtr>
183 lock_free_ref_count<Traits>::guard_ptr<T, MarkedPtr>::guard_ptr(
const MarkedPtr& p) noexcept : base(p) {
184 if (this->ptr.get() !=
nullptr) {
185 this->ptr->ref_count().fetch_add(RefCountInc, std::memory_order_relaxed);
189 template <
class Traits>
190 template <
class T,
class MarkedPtr>
191 lock_free_ref_count<Traits>::guard_ptr<T, MarkedPtr>::guard_ptr(
const guard_ptr& p) noexcept : guard_ptr(p.ptr) {}
193 template <
class Traits>
194 template <
class T,
class MarkedPtr>
195 lock_free_ref_count<Traits>::guard_ptr<T, MarkedPtr>::guard_ptr(guard_ptr&& p) noexcept : base(p.ptr) {
199 template <
class Traits>
200 template <
class T,
class MarkedPtr>
201 auto lock_free_ref_count<Traits>::guard_ptr<T, MarkedPtr>::operator=(
const guard_ptr& p) -> guard_ptr& {
208 if (this->ptr.get() !=
nullptr) {
209 this->ptr->ref_count().fetch_add(RefCountInc, std::memory_order_relaxed);
214 template <
class Traits>
215 template <
class T,
class MarkedPtr>
216 auto lock_free_ref_count<Traits>::guard_ptr<T, MarkedPtr>::operator=(guard_ptr&& p) noexcept -> guard_ptr& {
222 this->ptr = std::move(p.ptr);
227 template <
class Traits>
228 template <
class T,
class MarkedPtr>
229 void lock_free_ref_count<Traits>::guard_ptr<T, MarkedPtr>::acquire(
const concurrent_ptr<T>& p,
230 std::memory_order order) noexcept {
240 auto q = p.load(std::memory_order_acquire);
242 if (q.get() ==
nullptr) {
248 q->ref_count().fetch_add(RefCountInc, std::memory_order_acquire);
250 if (q == p.load(order)) {
256 template <
class Traits>
257 template <
class T,
class MarkedPtr>
258 bool lock_free_ref_count<Traits>::guard_ptr<T, MarkedPtr>::acquire_if_equal(
const concurrent_ptr<T>& p,
259 const MarkedPtr& expected,
260 std::memory_order order) noexcept {
263 auto q = p.load(std::memory_order_acquire);
269 if (q.get() ==
nullptr) {
275 q->ref_count().fetch_add(RefCountInc, std::memory_order_acquire);
277 if (q == p.load(order)) {
285 template <
class Traits>
286 template <
class T,
class MarkedPtr>
287 void lock_free_ref_count<Traits>::guard_ptr<T, MarkedPtr>::reset() noexcept {
288 auto* p = this->ptr.get();
294 if (p->decrement_refcnt()) {
295 if (!p->is_destroyed()) {
299 p->push_to_free_list();
303 template <
class Traits>
304 template <
class T,
class MarkedPtr>
305 void lock_free_ref_count<Traits>::guard_ptr<T, MarkedPtr>::reclaim(Deleter) noexcept {
306 if (this->ptr.get() !=
nullptr) {
307 assert(this->ptr->refs() > 1);
312 this->ptr->ref_count().fetch_sub(RefCountInc, std::memory_order_release);
317 template <
class Traits>
318 template <
class T, std::
size_t N,
class Deleter>
319 typename lock_free_ref_count<Traits>::template enable_concurrent_ptr<T, N, Deleter>::free_list
320 lock_free_ref_count<Traits>::enable_concurrent_ptr<T, N, Deleter>::global_free_list;
322 #ifdef TRACK_ALLOCATIONS
323 template <
class Traits>
324 inline detail::allocation_counter& lock_free_ref_count<Traits>::allocation_counter() {
325 return allocation_counter_;
328 template <
class Traits>
329 inline void lock_free_ref_count<Traits>::count_allocation() {
330 allocation_counter().count_allocation();
333 template <
class Traits>
334 inline void lock_free_ref_count<Traits>::count_reclamation() {
335 allocation_counter().count_reclamation();