2525#ifndef SHARE_UTILITIES_LOCKFREESTACK_HPP
2626#define SHARE_UTILITIES_LOCKFREESTACK_HPP
2727
28+ #include " runtime/atomic.hpp"
2829#include " runtime/atomicAccess.hpp"
2930#include " utilities/debug.hpp"
3031#include " utilities/globalDefinitions.hpp"
3435// a result, there is no allocation involved in adding objects to the stack
3536// or removing them from the stack.
3637//
37- // To be used in a LockFreeStack of objects of type T, an object of
38- // type T must have a list entry member of type T* volatile, with an
39- // non-member accessor function returning a pointer to that member. A
40- // LockFreeStack is associated with the class of its elements and an
41- // entry member from that class.
38+ // To be used in a LockFreeStack of objects of type T, an object of type T
39+ // must have a list entry member. A list entry member is a data member whose
40+ // type is either (1) Atomic<T*>, or (2) T* volatile. There must be a
41+ // non-member or static member function returning a pointer to that member,
42+ // which is used to provide access to it by a LockFreeStack. A LockFreeStack
43+ // is associated with the class of its elements and an entry member from that
44+ // class by being specialized on the element class and a pointer to the
45+ // function for accessing that entry member.
4246//
4347// An object can be in multiple stacks at the same time, so long as
4448// each stack uses a different entry member. That is, the class of the
5256//
5357// \tparam T is the class of the elements in the stack.
5458//
55- // \tparam next_ptr is a function pointer. Applying this function to
59+ // \tparam next_accessor is a function pointer. Applying this function to
5660// an object of type T must return a pointer to the list entry member
5761// of the object associated with the LockFreeStack type.
58- template <typename T, T* volatile * (*next_ptr)(T&) >
62+ template <typename T, auto next_accessor >
5963class LockFreeStack {
60- T* volatile _top;
64+ Atomic<T*> _top;
6165
6266 void prepend_impl (T* first, T* last) {
6367 T* cur = top ();
6468 T* old;
6569 do {
6670 old = cur;
6771 set_next (*last, cur);
68- cur = AtomicAccess::cmpxchg (& _top, cur, first);
72+ cur = _top. compare_exchange ( cur, first);
6973 } while (old != cur);
7074 }
7175
7276 NONCOPYABLE (LockFreeStack);
7377
78+ template <typename NextAccessor>
79+ static constexpr void use_atomic_access_impl (NextAccessor) {
80+ static_assert (DependentAlwaysFalse<NextAccessor>, " Invalid next accessor" );
81+ }
82+ static constexpr bool use_atomic_access_impl (T* volatile * (*)(T&)) { return true ; }
83+ static constexpr bool use_atomic_access_impl (Atomic<T*>* (*)(T&)) { return false ; }
84+
85+ static constexpr bool use_atomic_access = use_atomic_access_impl(next_accessor);
86+
7487public:
7588 LockFreeStack () : _top(nullptr ) {}
7689 ~LockFreeStack () { assert (empty (), " stack not empty" ); }
@@ -89,7 +102,7 @@ class LockFreeStack {
89102 new_top = next (*result);
90103 }
91104 // CAS even on empty pop, for consistent membar behavior.
92- result = AtomicAccess::cmpxchg (& _top, result, new_top);
105+ result = _top. compare_exchange ( result, new_top);
93106 } while (result != old);
94107 if (result != nullptr ) {
95108 set_next (*result, nullptr );
@@ -101,7 +114,7 @@ class LockFreeStack {
101114 // list of elements. Acts as a full memory barrier.
102115 // postcondition: empty()
103116 T* pop_all () {
104- return AtomicAccess::xchg (& _top, (T*) nullptr );
117+ return _top. exchange ( nullptr );
105118 }
106119
107120 // Atomically adds value to the top of this stack. Acts as a full
@@ -143,9 +156,9 @@ class LockFreeStack {
143156 // Return true if the stack is empty.
144157 bool empty () const { return top () == nullptr ; }
145158
146- // Return the most recently pushed element, or nullptr if the stack is empty.
159+ // Return the most recently pushed element, or null if the stack is empty.
147160 // The returned element is not removed from the stack.
148- T* top () const { return AtomicAccess::load (& _top); }
161+ T* top () const { return _top. load_relaxed ( ); }
149162
150163 // Return the number of objects in the stack. There must be no concurrent
151164 // pops while the length is being determined.
@@ -160,15 +173,23 @@ class LockFreeStack {
160173 // Return the entry following value in the list used by the
161174 // specialized LockFreeStack class.
162175 static T* next (const T& value) {
163- return AtomicAccess::load (next_ptr (const_cast <T&>(value)));
176+ if constexpr (use_atomic_access) {
177+ return AtomicAccess::load (next_accessor (const_cast <T&>(value)));
178+ } else {
179+ return next_accessor (const_cast <T&>(value))->load_relaxed ();
180+ }
164181 }
165182
166183 // Set the entry following value to new_next in the list used by the
167184 // specialized LockFreeStack class. Not thread-safe; in particular,
168185 // if value is in an instance of this specialization of LockFreeStack,
169186 // there must be no concurrent push or pop operations on that stack.
170187 static void set_next (T& value, T* new_next) {
171- AtomicAccess::store (next_ptr (value), new_next);
188+ if constexpr (use_atomic_access) {
189+ AtomicAccess::store (next_accessor (value), new_next);
190+ } else {
191+ next_accessor (value)->store_relaxed (new_next);
192+ }
172193 }
173194};
174195
0 commit comments