@@ -40,8 +40,7 @@ enum class MemoryScope : int {
4040#endif
4141};
4242
43- namespace atomic {
44-
43+ namespace impl {
4544LIBC_INLINE constexpr int order (MemoryOrder mem_ord) {
4645 return static_cast <int >(mem_ord);
4746}
@@ -61,6 +60,7 @@ LIBC_INLINE constexpr int infer_failure_order(MemoryOrder mem_ord) {
6160 return order (MemoryOrder::ACQUIRE);
6261 return order (mem_ord);
6362}
63+ } // namespace impl
6464
6565template <typename T> struct Atomic {
6666 static_assert (is_trivially_copyable_v<T> && is_copy_constructible_v<T> &&
@@ -111,10 +111,11 @@ template <typename T> struct Atomic {
111111 [[maybe_unused]] MemoryScope mem_scope = MemoryScope::DEVICE) {
112112 T res;
113113#if __has_builtin(__scoped_atomic_load)
114- __scoped_atomic_load (addressof (val), addressof (res), order (mem_ord ),
115- scope (mem_scope));
114+ __scoped_atomic_load (impl:: addressof (val), impl:: addressof (res),
115+ impl::order (mem_ord), impl:: scope (mem_scope));
116116#else
117- __atomic_load (addressof (val), addressof (res), order (mem_ord));
117+ __atomic_load (impl::addressof (val), impl::addressof (res),
118+ impl::order (mem_ord));
118119#endif
119120 return res;
120121 }
@@ -129,20 +130,22 @@ template <typename T> struct Atomic {
129130 store (T rhs, MemoryOrder mem_ord = MemoryOrder::SEQ_CST,
130131 [[maybe_unused]] MemoryScope mem_scope = MemoryScope::DEVICE) {
131132#if __has_builtin(__scoped_atomic_store)
132- __scoped_atomic_store (addressof (val), addressof (rhs), order (mem_ord ),
133- scope (mem_scope));
133+ __scoped_atomic_store (impl:: addressof (val), impl:: addressof (rhs),
134+ impl::order (mem_ord), impl:: scope (mem_scope));
134135#else
135- __atomic_store (addressof (val), addressof (rhs), order (mem_ord));
136+ __atomic_store (impl::addressof (val), impl::addressof (rhs),
137+ impl::order (mem_ord));
136138#endif
137139 }
138140
139141 // Atomic compare exchange
140142 LIBC_INLINE bool compare_exchange_strong (
141143 T &expected, T desired, MemoryOrder mem_ord = MemoryOrder::SEQ_CST,
142144 [[maybe_unused]] MemoryScope mem_scope = MemoryScope::DEVICE) {
143- return __atomic_compare_exchange (addressof (val), addressof (expected),
144- addressof (desired), false , order (mem_ord),
145- infer_failure_order (mem_ord));
145+ return __atomic_compare_exchange (
146+ impl::addressof (val), impl::addressof (expected),
147+ impl::addressof (desired), false , impl::order (mem_ord),
148+ impl::infer_failure_order (mem_ord));
146149 }
147150
148151 // Atomic compare exchange (separate success and failure memory orders)
@@ -151,17 +154,19 @@ template <typename T> struct Atomic {
151154 MemoryOrder failure_order,
152155 [[maybe_unused]] MemoryScope mem_scope = MemoryScope::DEVICE) {
153156 return __atomic_compare_exchange (
154- addressof (val), addressof (expected), addressof (desired), false ,
155- order (success_order), order (failure_order));
157+ impl::addressof (val), impl::addressof (expected),
158+ impl::addressof (desired), false , impl::order (success_order),
159+ impl::order (failure_order));
156160 }
157161
158162 // Atomic compare exchange (weak version)
159163 LIBC_INLINE bool compare_exchange_weak (
160164 T &expected, T desired, MemoryOrder mem_ord = MemoryOrder::SEQ_CST,
161165 [[maybe_unused]] MemoryScope mem_scope = MemoryScope::DEVICE) {
162- return __atomic_compare_exchange (addressof (val), addressof (expected),
163- addressof (desired), true , order (mem_ord),
164- infer_failure_order (mem_ord));
166+ return __atomic_compare_exchange (
167+ impl::addressof (val), impl::addressof (expected),
168+ impl::addressof (desired), true , impl::order (mem_ord),
169+ impl::infer_failure_order (mem_ord));
165170 }
166171
167172 // Atomic compare exchange (weak version with separate success and failure
@@ -171,20 +176,22 @@ template <typename T> struct Atomic {
171176 MemoryOrder failure_order,
172177 [[maybe_unused]] MemoryScope mem_scope = MemoryScope::DEVICE) {
173178 return __atomic_compare_exchange (
174- addressof (val), addressof (expected), addressof (desired), true ,
175- order (success_order), order (failure_order));
179+ impl::addressof (val), impl::addressof (expected),
180+ impl::addressof (desired), true , impl::order (success_order),
181+ impl::order (failure_order));
176182 }
177183
178184 LIBC_INLINE T
179185 exchange (T desired, MemoryOrder mem_ord = MemoryOrder::SEQ_CST,
180186 [[maybe_unused]] MemoryScope mem_scope = MemoryScope::DEVICE) {
181187 T ret;
182188#if __has_builtin(__scoped_atomic_exchange)
183- __scoped_atomic_exchange (addressof (val), addressof (desired), addressof (ret),
184- order (mem_ord), scope (mem_scope));
189+ __scoped_atomic_exchange (impl::addressof (val), impl::addressof (desired),
190+ impl::addressof (ret), impl::order (mem_ord),
191+ impl::scope (mem_scope));
185192#else
186- __atomic_exchange (addressof (val), addressof (desired), addressof (ret ),
187- order (mem_ord));
193+ __atomic_exchange (impl:: addressof (val), impl:: addressof (desired),
194+ impl::addressof (ret), impl:: order (mem_ord));
188195#endif
189196 return ret;
190197 }
@@ -194,10 +201,12 @@ template <typename T> struct Atomic {
194201 [[maybe_unused]] MemoryScope mem_scope = MemoryScope::DEVICE) {
195202 static_assert (cpp::is_integral_v<T>, " T must be an integral type." );
196203#if __has_builtin(__scoped_atomic_fetch_add)
197- return __scoped_atomic_fetch_add (addressof (val), increment, order (mem_ord),
198- scope (mem_scope));
204+ return __scoped_atomic_fetch_add (impl::addressof (val), increment,
205+ impl::order (mem_ord),
206+ impl::scope (mem_scope));
199207#else
200- return __atomic_fetch_add (addressof (val), increment, order (mem_ord));
208+ return __atomic_fetch_add (impl::addressof (val), increment,
209+ impl::order (mem_ord));
201210#endif
202211 }
203212
@@ -206,10 +215,11 @@ template <typename T> struct Atomic {
206215 [[maybe_unused]] MemoryScope mem_scope = MemoryScope::DEVICE) {
207216 static_assert (cpp::is_integral_v<T>, " T must be an integral type." );
208217#if __has_builtin(__scoped_atomic_fetch_or)
209- return __scoped_atomic_fetch_or (addressof (val), mask, order (mem_ord),
210- scope (mem_scope));
218+ return __scoped_atomic_fetch_or (impl::addressof (val), mask,
219+ impl::order (mem_ord),
220+ impl::scope (mem_scope));
211221#else
212- return __atomic_fetch_or (addressof (val), mask, order (mem_ord));
222+ return __atomic_fetch_or (impl:: addressof (val), mask, impl:: order (mem_ord));
213223#endif
214224 }
215225
@@ -218,10 +228,11 @@ template <typename T> struct Atomic {
218228 [[maybe_unused]] MemoryScope mem_scope = MemoryScope::DEVICE) {
219229 static_assert (cpp::is_integral_v<T>, " T must be an integral type." );
220230#if __has_builtin(__scoped_atomic_fetch_and)
221- return __scoped_atomic_fetch_and (addressof (val), mask, order (mem_ord),
222- scope (mem_scope));
231+ return __scoped_atomic_fetch_and (impl::addressof (val), mask,
232+ impl::order (mem_ord),
233+ impl::scope (mem_scope));
223234#else
224- return __atomic_fetch_and (addressof (val), mask, order (mem_ord));
235+ return __atomic_fetch_and (impl:: addressof (val), mask, impl:: order (mem_ord));
225236#endif
226237 }
227238
@@ -230,10 +241,12 @@ template <typename T> struct Atomic {
230241 [[maybe_unused]] MemoryScope mem_scope = MemoryScope::DEVICE) {
231242 static_assert (cpp::is_integral_v<T>, " T must be an integral type." );
232243#if __has_builtin(__scoped_atomic_fetch_sub)
233- return __scoped_atomic_fetch_sub (addressof (val), decrement, order (mem_ord),
234- scope (mem_scope));
244+ return __scoped_atomic_fetch_sub (impl::addressof (val), decrement,
245+ impl::order (mem_ord),
246+ impl::scope (mem_scope));
235247#else
236- return __atomic_fetch_sub (addressof (val), decrement, order (mem_ord));
248+ return __atomic_fetch_sub (impl::addressof (val), decrement,
249+ impl::order (mem_ord));
237250#endif
238251 }
239252
@@ -275,9 +288,10 @@ template <typename T> struct AtomicRef {
275288 [[maybe_unused]] MemoryScope mem_scope = MemoryScope::DEVICE) const {
276289 T res;
277290#if __has_builtin(__scoped_atomic_load)
278- __scoped_atomic_load (ptr, &res, order (mem_ord), scope (mem_scope));
291+ __scoped_atomic_load (ptr, &res, impl::order (mem_ord),
292+ impl::scope (mem_scope));
279293#else
280- __atomic_load (ptr, &res, order (mem_ord));
294+ __atomic_load (ptr, &res, impl:: order (mem_ord));
281295#endif
282296 return res;
283297 }
@@ -292,9 +306,10 @@ template <typename T> struct AtomicRef {
292306 store (T rhs, MemoryOrder mem_ord = MemoryOrder::SEQ_CST,
293307 [[maybe_unused]] MemoryScope mem_scope = MemoryScope::DEVICE) const {
294308#if __has_builtin(__scoped_atomic_store)
295- __scoped_atomic_store (ptr, &rhs, order (mem_ord), scope (mem_scope));
309+ __scoped_atomic_store (ptr, &rhs, impl::order (mem_ord),
310+ impl::scope (mem_scope));
296311#else
297- __atomic_store (ptr, &rhs, order (mem_ord));
312+ __atomic_store (ptr, &rhs, impl:: order (mem_ord));
298313#endif
299314 }
300315
@@ -303,8 +318,8 @@ template <typename T> struct AtomicRef {
303318 T &expected, T desired, MemoryOrder mem_ord = MemoryOrder::SEQ_CST,
304319 [[maybe_unused]] MemoryScope mem_scope = MemoryScope::DEVICE) const {
305320 return __atomic_compare_exchange (ptr, &expected, &desired, false ,
306- order (mem_ord),
307- infer_failure_order (mem_ord));
321+ impl:: order (mem_ord),
322+ impl:: infer_failure_order (mem_ord));
308323 }
309324
310325 // Atomic compare exchange (strong, separate success/failure memory orders)
@@ -313,8 +328,8 @@ template <typename T> struct AtomicRef {
313328 MemoryOrder failure_order,
314329 [[maybe_unused]] MemoryScope mem_scope = MemoryScope::DEVICE) const {
315330 return __atomic_compare_exchange (ptr, &expected, &desired, false ,
316- order (success_order),
317- order (failure_order));
331+ impl:: order (success_order),
332+ impl:: order (failure_order));
318333 }
319334
320335 // Atomic exchange
@@ -323,10 +338,10 @@ template <typename T> struct AtomicRef {
323338 [[maybe_unused]] MemoryScope mem_scope = MemoryScope::DEVICE) const {
324339 T ret;
325340#if __has_builtin(__scoped_atomic_exchange)
326- __scoped_atomic_exchange (ptr, &desired, &ret, order (mem_ord),
327- scope (mem_scope));
341+ __scoped_atomic_exchange (ptr, &desired, &ret, impl:: order (mem_ord),
342+ impl:: scope (mem_scope));
328343#else
329- __atomic_exchange (ptr, &desired, &ret, order (mem_ord));
344+ __atomic_exchange (ptr, &desired, &ret, impl:: order (mem_ord));
330345#endif
331346 return ret;
332347 }
@@ -336,10 +351,10 @@ template <typename T> struct AtomicRef {
336351 [[maybe_unused]] MemoryScope mem_scope = MemoryScope::DEVICE) const {
337352 static_assert (cpp::is_integral_v<T>, " T must be an integral type." );
338353#if __has_builtin(__scoped_atomic_fetch_add)
339- return __scoped_atomic_fetch_add (ptr, increment, order (mem_ord),
340- scope (mem_scope));
354+ return __scoped_atomic_fetch_add (ptr, increment, impl:: order (mem_ord),
355+ impl:: scope (mem_scope));
341356#else
342- return __atomic_fetch_add (ptr, increment, order (mem_ord));
357+ return __atomic_fetch_add (ptr, increment, impl:: order (mem_ord));
343358#endif
344359 }
345360
@@ -348,10 +363,10 @@ template <typename T> struct AtomicRef {
348363 [[maybe_unused]] MemoryScope mem_scope = MemoryScope::DEVICE) const {
349364 static_assert (cpp::is_integral_v<T>, " T must be an integral type." );
350365#if __has_builtin(__scoped_atomic_fetch_or)
351- return __scoped_atomic_fetch_or (ptr, mask, order (mem_ord),
352- scope (mem_scope));
366+ return __scoped_atomic_fetch_or (ptr, mask, impl:: order (mem_ord),
367+ impl:: scope (mem_scope));
353368#else
354- return __atomic_fetch_or (ptr, mask, order (mem_ord));
369+ return __atomic_fetch_or (ptr, mask, impl:: order (mem_ord));
355370#endif
356371 }
357372
@@ -360,10 +375,10 @@ template <typename T> struct AtomicRef {
360375 [[maybe_unused]] MemoryScope mem_scope = MemoryScope::DEVICE) const {
361376 static_assert (cpp::is_integral_v<T>, " T must be an integral type." );
362377#if __has_builtin(__scoped_atomic_fetch_and)
363- return __scoped_atomic_fetch_and (ptr, mask, order (mem_ord),
364- scope (mem_scope));
378+ return __scoped_atomic_fetch_and (ptr, mask, impl:: order (mem_ord),
379+ impl:: scope (mem_scope));
365380#else
366- return __atomic_fetch_and (ptr, mask, order (mem_ord));
381+ return __atomic_fetch_and (ptr, mask, impl:: order (mem_ord));
367382#endif
368383 }
369384
@@ -372,10 +387,10 @@ template <typename T> struct AtomicRef {
372387 [[maybe_unused]] MemoryScope mem_scope = MemoryScope::DEVICE) const {
373388 static_assert (cpp::is_integral_v<T>, " T must be an integral type." );
374389#if __has_builtin(__scoped_atomic_fetch_sub)
375- return __scoped_atomic_fetch_sub (ptr, decrement, order (mem_ord),
376- scope (mem_scope));
390+ return __scoped_atomic_fetch_sub (ptr, decrement, impl:: order (mem_ord),
391+ impl:: scope (mem_scope));
377392#else
378- return __atomic_fetch_sub (ptr, decrement, order (mem_ord));
393+ return __atomic_fetch_sub (ptr, decrement, impl:: order (mem_ord));
379394#endif
380395 }
381396};
@@ -408,11 +423,6 @@ LIBC_INLINE void atomic_signal_fence([[maybe_unused]] MemoryOrder mem_ord) {
408423 asm volatile (" " ::: " memory" );
409424#endif
410425}
411- } // namespace atomic
412-
413- using atomic::Atomic;
414- using atomic::AtomicRef;
415-
416426} // namespace cpp
417427} // namespace LIBC_NAMESPACE_DECL
418428
0 commit comments