1919
2020#include < __assert>
2121#include < __atomic/check_memory_order.h>
22- #include < __atomic/cxx_atomic_impl.h>
23- #include < __atomic/is_always_lock_free.h>
2422#include < __atomic/to_gcc_order.h>
2523#include < __config>
2624#include < __memory/addressof.h>
@@ -46,22 +44,22 @@ _LIBCPP_BEGIN_NAMESPACE_STD
4644
4745template <class _Tp , bool = is_integral_v<_Tp> && !is_same_v<_Tp, bool >, bool = is_floating_point_v<_Tp>>
4846struct __atomic_ref_base {
49- mutable __cxx_atomic_impl< _Tp&> __a_ ;
47+ _Tp* __ptr_ ;
5048
5149 using value_type = _Tp;
5250
5351 static constexpr size_t required_alignment = alignof (_Tp);
5452
55- static constexpr bool is_always_lock_free = __libcpp_is_always_lock_free< _Tp>::__value ;
53+ static constexpr bool is_always_lock_free = __atomic_always_lock_free( sizeof ( _Tp), 0 ) ;
5654
57- _LIBCPP_HIDE_FROM_ABI bool is_lock_free () const noexcept { return __cxx_atomic_is_lock_free (sizeof (_Tp)); }
55+ _LIBCPP_HIDE_FROM_ABI bool is_lock_free () const noexcept { return __atomic_is_lock_free (sizeof (_Tp), 0 ); }
5856
5957 _LIBCPP_HIDE_FROM_ABI void store (_Tp __desired, memory_order __order = memory_order::seq_cst) const noexcept
6058 _LIBCPP_CHECK_STORE_MEMORY_ORDER(__order) {
6159 _LIBCPP_ASSERT_UNCATEGORIZED (
6260 __order == memory_order::relaxed || __order == memory_order::release || __order == memory_order::seq_cst,
6361 " memory order argument to atomic store operation is invalid" );
64- __cxx_atomic_store (&__a_, __desired, __order);
62+ __atomic_store (__ptr_, std::addressof ( __desired), __to_gcc_order ( __order) );
6563 }
6664
6765 _LIBCPP_HIDE_FROM_ABI _Tp operator =(_Tp __desired) const noexcept {
@@ -75,13 +73,19 @@ struct __atomic_ref_base {
7573 __order == memory_order::relaxed || __order == memory_order::consume || __order == memory_order::acquire ||
7674 __order == memory_order::seq_cst,
7775 " memory order argument to atomic load operation is invalid" );
78- return __cxx_atomic_load (&__a_, __order);
76+ alignas (_Tp) unsigned char __mem[sizeof (_Tp)];
77+ auto * __ret = reinterpret_cast <_Tp*>(__mem);
78+ __atomic_load (__ptr_, __ret, __to_gcc_order (__order));
79+ return *__ret;
7980 }
8081
8182 _LIBCPP_HIDE_FROM_ABI operator _Tp () const noexcept { return load (); }
8283
8384 _LIBCPP_HIDE_FROM_ABI _Tp exchange (_Tp __desired, memory_order __order = memory_order::seq_cst) const noexcept {
84- return __cxx_atomic_exchange (&__a_, __desired, __order);
85+ alignas (_Tp) unsigned char __mem[sizeof (_Tp)];
86+ auto * __ret = reinterpret_cast <_Tp*>(__mem);
87+ __atomic_exchange (__ptr_, std::addressof (__desired), __ret, __to_gcc_order (__order));
88+ return *__ret;
8589 }
8690 _LIBCPP_HIDE_FROM_ABI bool
8791 compare_exchange_weak (_Tp& __expected, _Tp __desired, memory_order __success, memory_order __failure) const noexcept
@@ -90,7 +94,13 @@ struct __atomic_ref_base {
9094 __failure == memory_order::relaxed || __failure == memory_order::consume ||
9195 __failure == memory_order::acquire || __failure == memory_order::seq_cst,
9296 " failure memory order argument to weak atomic compare-and-exchange operation is invalid" );
93- return __cxx_atomic_compare_exchange_weak (&__a_, &__expected, __desired, __success, __failure);
97+ return __atomic_compare_exchange (
98+ __ptr_,
99+ std::addressof (__expected),
100+ std::addressof (__desired),
101+ true ,
102+ __to_gcc_order (__success),
103+ __to_gcc_order (__failure));
94104 }
95105 _LIBCPP_HIDE_FROM_ABI bool
96106 compare_exchange_strong (_Tp& __expected, _Tp __desired, memory_order __success, memory_order __failure) const noexcept
@@ -99,16 +109,34 @@ struct __atomic_ref_base {
99109 __failure == memory_order::relaxed || __failure == memory_order::consume ||
100110 __failure == memory_order::acquire || __failure == memory_order::seq_cst,
101111 " failure memory order argument to strong atomic compare-and-exchange operation is invalid" );
102- return __cxx_atomic_compare_exchange_strong (&__a_, &__expected, __desired, __success, __failure);
112+ return __atomic_compare_exchange (
113+ __ptr_,
114+ std::addressof (__expected),
115+ std::addressof (__desired),
116+ false ,
117+ __to_gcc_order (__success),
118+ __to_gcc_order (__failure));
103119 }
104120
105121 _LIBCPP_HIDE_FROM_ABI bool
106122 compare_exchange_weak (_Tp& __expected, _Tp __desired, memory_order __order = memory_order::seq_cst) const noexcept {
107- return __cxx_atomic_compare_exchange_weak (&__a_, &__expected, __desired, __order, __order);
123+ return __atomic_compare_exchange (
124+ __ptr_,
125+ std::addressof (__expected),
126+ std::addressof (__desired),
127+ true ,
128+ __to_gcc_order (__order),
129+ __to_gcc_failure_order (__order));
108130 }
109131 _LIBCPP_HIDE_FROM_ABI bool
110132 compare_exchange_strong (_Tp& __expected, _Tp __desired, memory_order __order = memory_order::seq_cst) const noexcept {
111- return __cxx_atomic_compare_exchange_strong (&__a_, &__expected, __desired, __order, __order);
133+ return __atomic_compare_exchange (
134+ __ptr_,
135+ std::addressof (__expected),
136+ std::addressof (__desired),
137+ false ,
138+ __to_gcc_order (__order),
139+ __to_gcc_failure_order (__order));
112140 }
113141
114142 _LIBCPP_HIDE_FROM_ABI void wait (_Tp __old, memory_order __order = memory_order::seq_cst) const noexcept
@@ -117,12 +145,18 @@ struct __atomic_ref_base {
117145 __order == memory_order::relaxed || __order == memory_order::consume || __order == memory_order::acquire ||
118146 __order == memory_order::seq_cst,
119147 " memory order argument to atomic wait operation is invalid" );
120- __cxx_atomic_wait (addressof (__a_), __old, __order);
148+ // FIXME
149+ (void )__old;
150+ (void )__order;
151+ }
152+ _LIBCPP_HIDE_FROM_ABI void notify_one () const noexcept {
153+ // FIXME
154+ }
155+ _LIBCPP_HIDE_FROM_ABI void notify_all () const noexcept {
156+ // FIXME
121157 }
122- _LIBCPP_HIDE_FROM_ABI void notify_one () const noexcept { __cxx_atomic_notify_one (addressof (__a_)); }
123- _LIBCPP_HIDE_FROM_ABI void notify_all () const noexcept { __cxx_atomic_notify_all (addressof (__a_)); }
124158
125- _LIBCPP_HIDE_FROM_ABI __atomic_ref_base (_Tp& __obj) : __a_( __obj) {}
159+ _LIBCPP_HIDE_FROM_ABI __atomic_ref_base (_Tp& __obj) : __ptr_(& __obj) {}
126160};
127161
128162template <class _Tp >
@@ -137,19 +171,19 @@ struct __atomic_ref_base<_Tp, /*_IsIntegral=*/true, /*_IsFloatingPoint=*/false>
137171 _LIBCPP_HIDE_FROM_ABI _Tp operator =(_Tp __desired) const noexcept { return __base::operator =(__desired); }
138172
139173 _LIBCPP_HIDE_FROM_ABI _Tp fetch_add (_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {
140- return __cxx_atomic_fetch_add (& this ->__a_ , __arg, __order);
174+ return __atomic_fetch_add ( this ->__ptr_ , __arg, __to_gcc_order ( __order) );
141175 }
142176 _LIBCPP_HIDE_FROM_ABI _Tp fetch_sub (_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {
143- return __cxx_atomic_fetch_sub (& this ->__a_ , __arg, __order);
177+ return __atomic_fetch_sub ( this ->__ptr_ , __arg, __to_gcc_order ( __order) );
144178 }
145179 _LIBCPP_HIDE_FROM_ABI _Tp fetch_and (_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {
146- return __cxx_atomic_fetch_and (& this ->__a_ , __arg, __order);
180+ return __atomic_fetch_and ( this ->__ptr_ , __arg, __to_gcc_order ( __order) );
147181 }
148182 _LIBCPP_HIDE_FROM_ABI _Tp fetch_or (_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {
149- return __cxx_atomic_fetch_or (& this ->__a_ , __arg, __order);
183+ return __atomic_fetch_or ( this ->__ptr_ , __arg, __to_gcc_order ( __order) );
150184 }
151185 _LIBCPP_HIDE_FROM_ABI _Tp fetch_xor (_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {
152- return __cxx_atomic_fetch_xor (& this ->__a_ , __arg, __order);
186+ return __atomic_fetch_xor ( this ->__ptr_ , __arg, __to_gcc_order ( __order) );
153187 }
154188
155189 _LIBCPP_HIDE_FROM_ABI _Tp operator ++(int ) const noexcept { return fetch_add (_Tp (1 )); }
@@ -175,10 +209,10 @@ struct __atomic_ref_base<_Tp, /*_IsIntegral=*/false, /*_IsFloatingPoint=*/true>
175209 _LIBCPP_HIDE_FROM_ABI _Tp operator =(_Tp __desired) const noexcept { return __base::operator =(__desired); }
176210
177211 _LIBCPP_HIDE_FROM_ABI _Tp fetch_add (_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {
178- return __cxx_atomic_fetch_add (& this ->__a_ , __arg, __order);
212+ return __atomic_fetch_add ( this ->__ptr_ , __arg, __to_gcc_order ( __order) );
179213 }
180214 _LIBCPP_HIDE_FROM_ABI _Tp fetch_sub (_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {
181- return __cxx_atomic_fetch_sub (& this ->__a_ , __arg, __order);
215+ return __atomic_fetch_sub ( this ->__ptr_ , __arg, __to_gcc_order ( __order) );
182216 }
183217
184218 _LIBCPP_HIDE_FROM_ABI _Tp operator +=(_Tp __arg) const noexcept { return fetch_add (__arg) + __arg; }
@@ -210,10 +244,10 @@ struct atomic_ref<_Tp*> : public __atomic_ref_base<_Tp*> {
210244 using difference_type = ptrdiff_t ;
211245
212246 _LIBCPP_HIDE_FROM_ABI _Tp* fetch_add (ptrdiff_t __arg, memory_order __order = memory_order_seq_cst) const noexcept {
213- return __cxx_atomic_fetch_add (& this ->__a_ , __arg, __order);
247+ return __atomic_fetch_add ( this ->__ptr_ , __arg * sizeof (_Tp), __to_gcc_order ( __order) );
214248 }
215249 _LIBCPP_HIDE_FROM_ABI _Tp* fetch_sub (ptrdiff_t __arg, memory_order __order = memory_order_seq_cst) const noexcept {
216- return __cxx_atomic_fetch_sub (& this ->__a_ , __arg, __order);
250+ return __atomic_fetch_sub ( this ->__ptr_ , __arg * sizeof (_Tp), __to_gcc_order ( __order) );
217251 }
218252
219253 _LIBCPP_HIDE_FROM_ABI _Tp* operator ++(int ) const noexcept { return fetch_add (1 ); }
0 commit comments