6 #ifndef HAZARD_POINTER_IMPL
7 #error "This is an impl file and must not be included directly!"
10 #include <xenium/aligned_object.hpp>
11 #include <xenium/detail/port.hpp>
19 #pragma warning(disable : 4324) // structure was padded due to alignment specifier
22 namespace xenium::reclamation {
24 template <
class Traits>
25 template <
class T,
class MarkedPtr>
26 hazard_pointer<Traits>::guard_ptr<T, MarkedPtr>::guard_ptr(
const MarkedPtr& p) : base(p), hp() {
27 if (this->ptr.get() !=
nullptr) {
28 hp = local_thread_data.alloc_hazard_pointer();
29 hp->set_object(this->ptr.get());
33 template <
class Traits>
34 template <
class T,
class MarkedPtr>
35 hazard_pointer<Traits>::guard_ptr<T, MarkedPtr>::guard_ptr(
const guard_ptr& p) : guard_ptr(p.ptr) {}
37 template <
class Traits>
38 template <
class T,
class MarkedPtr>
39 hazard_pointer<Traits>::guard_ptr<T, MarkedPtr>::guard_ptr(guard_ptr&& p) noexcept : base(p.ptr), hp(p.hp) {
44 template <
class Traits>
45 template <
class T,
class MarkedPtr>
46 auto hazard_pointer<Traits>::guard_ptr<T, MarkedPtr>::operator=(
const guard_ptr& p) -> guard_ptr& {
52 hp = local_thread_data.alloc_hazard_pointer();
55 hp->set_object(this->ptr.get());
59 template <
class Traits>
60 template <
class T,
class MarkedPtr>
61 auto hazard_pointer<Traits>::guard_ptr<T, MarkedPtr>::operator=(guard_ptr&& p) noexcept -> guard_ptr& {
67 this->ptr = std::move(p.ptr);
74 template <
class Traits>
75 template <
class T,
class MarkedPtr>
76 void hazard_pointer<Traits>::guard_ptr<T, MarkedPtr>::acquire(
const concurrent_ptr<T>& p, std::memory_order order) {
77 auto p1 = p.load(std::memory_order_relaxed);
78 if (p1 == this->ptr) {
81 if (p1 !=
nullptr && hp ==
nullptr) {
82 hp = local_thread_data.alloc_hazard_pointer();
92 hp->set_object(p1.get());
95 }
while (p1.get() != p2.get());
100 template <
class Traits>
101 template <
class T,
class MarkedPtr>
102 bool hazard_pointer<Traits>::guard_ptr<T, MarkedPtr>::acquire_if_equal(
const concurrent_ptr<T>& p,
103 const MarkedPtr& expected,
104 std::memory_order order) {
105 auto p1 = p.load(std::memory_order_relaxed);
106 if (p1 ==
nullptr || p1 != expected) {
108 return p1 == expected;
112 hp = local_thread_data.alloc_hazard_pointer();
114 hp->set_object(p1.get());
116 this->ptr = p.load(order);
117 if (this->ptr != p1) {
124 template <
class Traits>
125 template <
class T,
class MarkedPtr>
126 void hazard_pointer<Traits>::guard_ptr<T, MarkedPtr>::reset() noexcept {
127 local_thread_data.release_hazard_pointer(hp);
131 template <
class Traits>
132 template <
class T,
class MarkedPtr>
133 void hazard_pointer<Traits>::guard_ptr<T, MarkedPtr>::do_swap(guard_ptr& g) noexcept {
137 template <
class Traits>
138 template <
class T,
class MarkedPtr>
139 void hazard_pointer<Traits>::guard_ptr<T, MarkedPtr>::reclaim(Deleter d) noexcept {
140 auto* p = this->ptr.get();
142 p->set_deleter(std::move(d));
143 if (local_thread_data.add_retired_node(p) >= allocation_strategy::retired_nodes_threshold()) {
144 local_thread_data.scan();
149 template <
class Strategy,
class Derived>
150 struct alignas(64) basic_hp_thread_control_block :
151 detail::thread_block_list<Derived>::entry,
152 aligned_object<basic_hp_thread_control_block<Strategy, Derived>> {
153 struct hazard_pointer {
154 void set_object(detail::deletable_object* obj) {
156 value.store(
reinterpret_cast<void**
>(obj), std::memory_order_release);
164 std::atomic_thread_fence(std::memory_order_seq_cst);
167 bool try_get_object(detail::deletable_object*& result)
const {
170 constexpr
auto memory_order = TSAN_MEMORY_ORDER(std::memory_order_acquire, std::memory_order_relaxed);
171 auto v = value.load(memory_order);
173 result =
reinterpret_cast<detail::deletable_object*
>(v.get());
179 void set_link(hazard_pointer* link) {
181 value.store(marked_ptr<void*, 1>(
reinterpret_cast<void**
>(link), 1), std::memory_order_release);
183 [[nodiscard]] hazard_pointer* get_link()
const {
185 return reinterpret_cast<hazard_pointer*
>(value.load(std::memory_order_relaxed).get());
188 [[nodiscard]]
bool is_link()
const {
return value.load(std::memory_order_relaxed).mark() != 0; }
193 std::atomic<marked_ptr<void*, 1>> value;
196 using hint = hazard_pointer*;
198 void initialize(hint& hint) {
199 Strategy::number_of_active_hps.fetch_add(
self().number_of_hps(), std::memory_order_relaxed);
200 hint = initialize_block(
self());
204 Strategy::number_of_active_hps.fetch_sub(
self().number_of_hps(), std::memory_order_relaxed);
205 detail::thread_block_list<Derived>::entry::abandon();
208 hazard_pointer* alloc_hazard_pointer(hint& hint) {
210 if (result ==
nullptr) {
211 result =
self().need_more_hps();
214 hint = result->get_link();
218 void release_hazard_pointer(hazard_pointer*& hp, hint& hint) {
227 Derived&
self() {
return static_cast<Derived&
>(*this); }
229 hazard_pointer* begin() {
return &pointers[0]; }
230 hazard_pointer* end() {
return &pointers[Strategy::K]; }
231 [[nodiscard]]
const hazard_pointer* begin()
const {
return &pointers[0]; }
232 [[nodiscard]]
const hazard_pointer* end()
const {
return &pointers[Strategy::K]; }
234 template <
typename T>
235 static hazard_pointer* initialize_block(T& block) {
236 auto begin = block.begin();
237 auto end = block.end() - 1;
238 for (
auto it = begin; it != end;) {
243 end->set_link(block.initialize_next_block());
247 static void gather_protected_pointers(std::vector<const detail::deletable_object*>& protected_ptrs,
248 const hazard_pointer* begin,
249 const hazard_pointer* end) {
250 for (
auto it = begin; it != end; ++it) {
251 detail::deletable_object* obj;
252 if (it->try_get_object(obj)) {
253 protected_ptrs.push_back(obj);
258 hazard_pointer pointers[Strategy::K];
261 template <
class Strategy>
262 struct static_hp_thread_control_block :
263 basic_hp_thread_control_block<Strategy, static_hp_thread_control_block<Strategy>> {
264 using base = basic_hp_thread_control_block<Strategy, static_hp_thread_control_block>;
265 using hazard_pointer =
typename base::hazard_pointer;
268 void gather_protected_pointers(std::vector<const detail::deletable_object*>& protected_ptrs)
const {
269 base::gather_protected_pointers(protected_ptrs, this->begin(), this->end());
273 hazard_pointer* need_more_hps() {
throw bad_hazard_pointer_alloc(
"hazard pointer pool exceeded"); }
274 [[nodiscard]] constexpr
size_t number_of_hps()
const {
return Strategy::K; }
275 [[nodiscard]] constexpr hazard_pointer* initialize_next_block()
const {
return nullptr; }
278 template <
class Strategy>
279 struct dynamic_hp_thread_control_block :
280 basic_hp_thread_control_block<Strategy, dynamic_hp_thread_control_block<Strategy>> {
281 using base = basic_hp_thread_control_block<Strategy, dynamic_hp_thread_control_block>;
282 using hazard_pointer =
typename base::hazard_pointer;
285 void gather_protected_pointers(std::vector<const detail::deletable_object*>& protected_ptrs)
const {
286 gather_protected_pointers(*
this, protected_ptrs);
290 struct alignas(64) hazard_pointer_block : aligned_object<hazard_pointer_block> {
291 explicit hazard_pointer_block(
size_t size) : size(size) {}
293 hazard_pointer* begin() {
return reinterpret_cast<hazard_pointer*
>(
this + 1); }
294 hazard_pointer* end() {
return begin() + size; }
296 [[nodiscard]]
const hazard_pointer* begin()
const {
return reinterpret_cast<const hazard_pointer*
>(
this + 1); }
297 [[nodiscard]]
const hazard_pointer* end()
const {
return begin() + size; }
299 [[nodiscard]]
const hazard_pointer_block* next_block()
const {
return next; }
300 hazard_pointer* initialize_next_block() {
return next ? base::initialize_block(*next) : nullptr; }
302 hazard_pointer_block* next =
nullptr;
306 [[nodiscard]]
const hazard_pointer_block* next_block()
const {
308 return hp_block.load(std::memory_order_acquire);
310 [[nodiscard]]
size_t number_of_hps()
const {
return total_number_of_hps; }
311 hazard_pointer* need_more_hps() {
return allocate_new_hazard_pointer_block(); }
313 hazard_pointer* initialize_next_block() {
314 auto block = hp_block.load(std::memory_order_relaxed);
315 return block ? base::initialize_block(*block) : nullptr;
318 template <
typename T>
319 static void gather_protected_pointers(
const T& block,
320 std::vector<const detail::deletable_object*>& protected_ptrs) {
321 base::gather_protected_pointers(protected_ptrs, block.begin(), block.end());
323 const auto* next = block.next_block();
325 gather_protected_pointers(*next, protected_ptrs);
329 static detail::deletable_object* as_internal_pointer(hazard_pointer* p) {
332 auto marked =
reinterpret_cast<size_t>(p) | 1;
333 return reinterpret_cast<detail::deletable_object*
>(marked);
336 hazard_pointer* allocate_new_hazard_pointer_block() {
337 size_t hps = std::max(
static_cast<size_t>(Strategy::K), total_number_of_hps / 2);
338 total_number_of_hps += hps;
339 Strategy::number_of_active_hps.fetch_add(hps, std::memory_order_relaxed);
341 size_t buffer_size =
sizeof(hazard_pointer_block) + hps *
sizeof(hazard_pointer);
342 void* buffer = hazard_pointer_block::operator
new(buffer_size);
343 auto block = ::new (buffer) hazard_pointer_block(hps);
344 auto result = this->initialize_block(*block);
345 block->next = hp_block.load(std::memory_order_relaxed);
347 hp_block.store(block, std::memory_order_release);
351 size_t total_number_of_hps = Strategy::K;
352 std::atomic<hazard_pointer_block*> hp_block;
356 template <
class Traits>
357 struct alignas(64) hazard_pointer<Traits>::thread_data : aligned_object<thread_data> {
358 using HP =
typename thread_control_block::hazard_pointer*;
361 if (retire_list !=
nullptr) {
363 if (retire_list !=
nullptr) {
364 global_thread_block_list.abandon_retired_nodes(retire_list);
366 retire_list =
nullptr;
369 if (control_block !=
nullptr) {
370 global_thread_block_list.release_entry(control_block);
371 control_block =
nullptr;
375 HP alloc_hazard_pointer() {
376 ensure_has_control_block();
377 return control_block->alloc_hazard_pointer(hint);
380 void release_hazard_pointer(HP& hp) { control_block->release_hazard_pointer(hp, hint); }
382 std::size_t add_retired_node(detail::deletable_object* p) {
383 p->next = retire_list;
385 return ++number_of_retired_nodes;
389 std::vector<const detail::deletable_object*> protected_pointers;
390 protected_pointers.reserve(allocation_strategy::number_of_active_hazard_pointers());
393 std::atomic_thread_fence(std::memory_order_seq_cst);
395 auto adopted_nodes = global_thread_block_list.adopt_abandoned_retired_nodes();
398 global_thread_block_list.begin(), global_thread_block_list.end(), [&protected_pointers](
const auto& entry) {
401 constexpr auto memory_order = TSAN_MEMORY_ORDER(std::memory_order_acquire, std::memory_order_relaxed);
402 if (entry.is_active(memory_order)) {
403 entry.gather_protected_pointers(protected_pointers);
408 std::atomic_thread_fence(std::memory_order_acquire);
410 std::sort(protected_pointers.begin(), protected_pointers.end());
412 auto* list = retire_list;
413 retire_list =
nullptr;
414 number_of_retired_nodes = 0;
415 reclaim_nodes(list, protected_pointers);
416 reclaim_nodes(adopted_nodes, protected_pointers);
420 void ensure_has_control_block() {
421 if (control_block ==
nullptr) {
422 control_block = global_thread_block_list.acquire_entry();
423 control_block->initialize(hint);
427 void reclaim_nodes(detail::deletable_object* list,
428 const std::vector<const detail::deletable_object*>& protected_pointers) {
429 while (list !=
nullptr) {
433 if (std::binary_search(protected_pointers.begin(), protected_pointers.end(), cur)) {
434 add_retired_node(cur);
440 detail::deletable_object* retire_list =
nullptr;
441 std::size_t number_of_retired_nodes = 0;
442 typename thread_control_block::hint hint{};
444 thread_control_block* control_block =
nullptr;
446 friend class hazard_pointer;
447 ALLOCATION_COUNTER(hazard_pointer);
450 #ifdef TRACK_ALLOCATIONS
451 template <
class Traits>
452 inline void hazard_pointer<Traits>::count_allocation() {
453 local_thread_data.allocation_counter.count_allocation();
456 template <
class Traits>
457 inline void hazard_pointer<Traits>::count_reclamation() {
458 local_thread_data.allocation_counter.count_reclamation();