6 #ifndef XENIUM_DETAIL_THREAD_BLOCK_LIST_HPP
7 #define XENIUM_DETAIL_THREAD_BLOCK_LIST_HPP
14 #pragma warning(disable : 4324) // structure was padded due to alignment specifier
17 namespace xenium::reclamation::detail {
19 template <
typename T,
typename DeletableObject = detail::deletable_
object>
20 class thread_block_list {
21 enum class entry_state { free, inactive, active };
25 entry() : next_entry(nullptr), state(entry_state::active) {}
31 [[nodiscard]]
bool is_active(std::memory_order memory_order = std::memory_order_relaxed)
const {
32 return state.load(memory_order) == entry_state::active;
39 state.store(entry_state::free, std::memory_order_release);
43 assert(state.load(std::memory_order_relaxed) == entry_state::inactive);
44 state.store(entry_state::active, std::memory_order_release);
48 friend class thread_block_list;
50 bool try_adopt(entry_state initial_state) {
51 if (state.load(std::memory_order_relaxed) == entry_state::free) {
52 auto expected = entry_state::free;
54 return state.compare_exchange_strong(expected, initial_state, std::memory_order_acquire);
64 std::atomic<entry_state> state;
70 explicit iterator(T* ptr) : ptr(ptr) {}
73 using iterator_category = std::forward_iterator_tag;
75 using difference_type = std::ptrdiff_t;
81 void swap(iterator& other) noexcept { std::swap(ptr, other.ptr); }
83 iterator& operator++() {
84 assert(ptr !=
nullptr);
85 ptr = ptr->next_entry;
89 iterator operator++(
int) {
90 assert(ptr !=
nullptr);
92 ptr = ptr->next_entry;
96 bool operator==(
const iterator& rhs)
const {
return ptr == rhs.ptr; }
98 bool operator!=(
const iterator& rhs)
const {
return ptr != rhs.ptr; }
100 T& operator*()
const {
101 assert(ptr !=
nullptr);
105 T* operator->()
const {
106 assert(ptr !=
nullptr);
110 friend class thread_block_list;
113 T* acquire_entry() {
return adopt_or_create_entry(entry_state::active); }
115 T* acquire_inactive_entry() {
return adopt_or_create_entry(entry_state::inactive); }
117 void release_entry(T* entry) { entry->abandon(); }
121 return iterator{head.load(std::memory_order_acquire)};
124 iterator end() {
return iterator{}; }
126 void abandon_retired_nodes(DeletableObject* obj) {
128 auto* next = last->next;
134 auto* h = abandoned_retired_nodes.load(std::memory_order_relaxed);
139 !abandoned_retired_nodes.compare_exchange_weak(h, obj, std::memory_order_release, std::memory_order_relaxed));
142 DeletableObject* adopt_abandoned_retired_nodes() {
143 if (abandoned_retired_nodes.load(std::memory_order_relaxed) ==
nullptr) {
148 return abandoned_retired_nodes.exchange(
nullptr, std::memory_order_acquire);
152 void add_entry(T* node) {
153 auto* h = head.load(std::memory_order_relaxed);
155 node->next_entry = h;
157 }
while (!head.compare_exchange_weak(h, node, std::memory_order_release, std::memory_order_relaxed));
160 T* adopt_or_create_entry(entry_state initial_state) {
161 static_assert(std::is_base_of<entry, T>::value,
"T must derive from entry.");
164 T* result = head.load(std::memory_order_acquire);
166 if (result->try_adopt(initial_state)) {
170 result = result->next_entry;
174 result->state.store(initial_state, std::memory_order_relaxed);
179 std::atomic<T*> head{
nullptr};
181 alignas(64) std::atomic<DeletableObject*> abandoned_retired_nodes{
nullptr};