6 #ifndef XENIUM_LOCK_FREE_REF_COUNT_HPP
7 #define XENIUM_LOCK_FREE_REF_COUNT_HPP
9 #include <xenium/reclamation/detail/allocation_tracker.hpp>
10 #include <xenium/reclamation/detail/concurrent_ptr.hpp>
11 #include <xenium/reclamation/detail/guard_ptr.hpp>
13 #include <xenium/acquire_guard.hpp>
14 #include <xenium/parameter.hpp>
46 template <std::
size_t Value>
50 namespace reclamation {
51 template <
bool InsertPadding = false, std::
size_t ThreadLocalFreeListSize = 0>
52 struct lock_free_ref_count_traits {
56 template <
class... Policies>
57 using with = lock_free_ref_count_traits<
80 template <
class Traits = lock_free_ref_count_traits<>>
82 template <
class T,
class MarkedPtr>
86 template <
class... Policies>
89 template <
class T, std::
size_t N = T::number_of_mark_bits>
92 template <
class T, std::
size_t N = 0,
class DeleterT = std::default_delete<T>>
93 class enable_concurrent_ptr;
95 class region_guard {};
99 static constexpr
unsigned RefCountInc = 2;
100 static constexpr
unsigned RefCountClaimBit = 1;
102 ALLOCATION_TRACKING_FUNCTIONS;
103 #ifdef TRACK_ALLOCATIONS
104 inline static thread_local detail::registered_allocation_counter<lock_free_ref_count> allocation_counter_;
105 static detail::allocation_counter& allocation_counter();
109 template <
class Traits>
110 template <
class T, std::
size_t N,
class DeleterT>
111 class lock_free_ref_count<Traits>::enable_concurrent_ptr :
private detail::tracked_object<lock_free_ref_count> {
113 enable_concurrent_ptr(
const enable_concurrent_ptr&) noexcept =
delete;
114 enable_concurrent_ptr(enable_concurrent_ptr&&) noexcept = delete;
115 enable_concurrent_ptr& operator=(const enable_concurrent_ptr&) noexcept = delete;
116 enable_concurrent_ptr& operator=(enable_concurrent_ptr&&) noexcept = delete;
119 enable_concurrent_ptr() noexcept { destroyed().store(
false, std::memory_order_relaxed); }
120 virtual ~enable_concurrent_ptr() noexcept {
121 assert(!is_destroyed());
122 destroyed().store(
true, std::memory_order_relaxed);
126 using Deleter = DeleterT;
127 static_assert(std::is_same<Deleter, std::default_delete<T>>::value,
128 "lock_free_ref_count reclamation can only be used with std::default_delete as Deleter.");
130 static constexpr std::size_t number_of_mark_bits = N;
131 [[nodiscard]]
unsigned refs()
const {
return getHeader()->ref_count.load(std::memory_order_relaxed) >> 1; }
133 void*
operator new(
size_t sz);
134 void operator delete(
void* p);
137 bool decrement_refcnt();
138 [[nodiscard]]
bool is_destroyed()
const {
return getHeader()->destroyed.load(std::memory_order_relaxed); }
139 void push_to_free_list() { global_free_list.push(
static_cast<T*
>(
this)); }
141 struct unpadded_header {
142 std::atomic<unsigned> ref_count;
143 std::atomic<bool> destroyed;
144 concurrent_ptr<T, N> next_free;
146 struct padded_header : unpadded_header {
147 char padding[64 -
sizeof(unpadded_header)];
149 using header = std::conditional_t<Traits::insert_padding, padded_header, unpadded_header>;
150 header* getHeader() {
return static_cast<header*
>(
static_cast<void*
>(
this)) - 1; }
151 [[nodiscard]]
const header* getHeader()
const {
152 return static_cast<const header*
>(
static_cast<const void*
>(
this)) - 1;
155 std::atomic<unsigned>& ref_count() {
return getHeader()->ref_count; }
156 std::atomic<bool>& destroyed() {
return getHeader()->destroyed; }
157 concurrent_ptr<T, N>& next_free() {
return getHeader()->next_free; }
159 friend class lock_free_ref_count;
161 using guard_ptr =
typename concurrent_ptr<T, N>::guard_ptr;
162 using marked_ptr =
typename concurrent_ptr<T, N>::marked_ptr;
165 static free_list global_free_list;
168 template <
class Traits>
169 template <
class T,
class MarkedPtr>
170 class lock_free_ref_count<Traits>::guard_ptr :
public detail::guard_ptr<T, MarkedPtr, guard_ptr<T, MarkedPtr>> {
171 using base = detail::guard_ptr<T, MarkedPtr, guard_ptr>;
172 using Deleter =
typename T::Deleter;
175 template <
class, std::
size_t,
class>
176 friend class enable_concurrent_ptr;
179 explicit guard_ptr(
const MarkedPtr& p = MarkedPtr()) noexcept;
180 guard_ptr(const guard_ptr& p) noexcept;
181 guard_ptr(guard_ptr&& p) noexcept;
183 guard_ptr& operator=(const guard_ptr& p);
184 guard_ptr& operator=(guard_ptr&& p) noexcept;
187 void acquire(const concurrent_ptr<T>& p, std::memory_order order = std::memory_order_seq_cst) noexcept;
190 bool acquire_if_equal(const concurrent_ptr<T>& p,
191 const MarkedPtr& expected,
192 std::memory_order order = std::memory_order_seq_cst) noexcept;
195 void reset() noexcept;
198 void reclaim(Deleter d = Deleter()) noexcept;
203 #define LOCK_FREE_REF_COUNT_IMPL
204 #include <xenium/reclamation/impl/lock_free_ref_count.hpp>
205 #undef LOCK_FREE_REF_COUNT_IMPL