@@ -57,18 +57,6 @@ struct __atomic_waitable< _Tp,
5757 decltype (__atomic_waitable_traits<__decay_t <_Tp> >::__atomic_contention_address(
5858 std::declval<const _Tp&>()))> > : true_type {};
5959
60- template <class _AtomicWaitable , class _Poll >
61- struct __atomic_wait_poll_impl {
62- const _AtomicWaitable& __a_;
63- _Poll __poll_;
64- memory_order __order_;
65-
66- _LIBCPP_HIDE_FROM_ABI bool operator ()() const {
67- auto __current_val = __atomic_waitable_traits<__decay_t <_AtomicWaitable> >::__atomic_load (__a_, __order_);
68- return __poll_ (__current_val);
69- }
70- };
71-
7260#if _LIBCPP_HAS_THREADS
7361
7462_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_one (void const volatile *) _NOEXCEPT;
@@ -144,11 +132,16 @@ struct __atomic_wait_backoff_impl {
144132// value. The predicate function must not return `false` spuriously.
145133template <class _AtomicWaitable , class _Poll >
146134_LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void
147- __atomic_wait_unless (const _AtomicWaitable& __a, _Poll&& __poll, memory_order __order ) {
135+ __atomic_wait_unless (const _AtomicWaitable& __a, memory_order __order, _Poll&& __poll) {
148136 static_assert (__atomic_waitable<_AtomicWaitable>::value, " " );
149- __atomic_wait_poll_impl<_AtomicWaitable, __decay_t <_Poll> > __poll_impl = {__a, __poll, __order};
150137 __atomic_wait_backoff_impl<_AtomicWaitable, __decay_t <_Poll> > __backoff_fn = {__a, __poll, __order};
151- std::__libcpp_thread_poll_with_backoff (__poll_impl, __backoff_fn);
138+ std::__libcpp_thread_poll_with_backoff (
139+ /* poll */
140+ [&]() {
141+ auto __current_val = __atomic_waitable_traits<__decay_t <_AtomicWaitable> >::__atomic_load (__a, __order);
142+ return __poll (__current_val);
143+ },
144+ /* backoff */ __backoff_fn);
152145}
153146
154147template <class _AtomicWaitable >
@@ -166,9 +159,14 @@ _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void __atomic_notify_all(const _
166159#else // _LIBCPP_HAS_THREADS
167160
168161template <class _AtomicWaitable , class _Poll >
169- _LIBCPP_HIDE_FROM_ABI void __atomic_wait_unless (const _AtomicWaitable& __a, _Poll&& __poll, memory_order __order) {
170- __atomic_wait_poll_impl<_AtomicWaitable, __decay_t <_Poll> > __poll_fn = {__a, __poll, __order};
171- std::__libcpp_thread_poll_with_backoff (__poll_fn, __spinning_backoff_policy ());
162+ _LIBCPP_HIDE_FROM_ABI void __atomic_wait_unless (const _AtomicWaitable& __a, memory_order __order, _Poll&& __poll) {
163+ std::__libcpp_thread_poll_with_backoff (
164+ /* poll */
165+ [&]() {
166+ auto __current_val = __atomic_waitable_traits<__decay_t <_AtomicWaitable> >::__atomic_load (__a, __order);
167+ return __poll (__current_val);
168+ },
169+ /* backoff */ __spinning_backoff_policy ());
172170}
173171
174172template <class _AtomicWaitable >
@@ -184,20 +182,13 @@ _LIBCPP_HIDE_FROM_ABI bool __cxx_nonatomic_compare_equal(_Tp const& __lhs, _Tp c
184182 return std::memcmp (std::addressof (__lhs), std::addressof (__rhs), sizeof (_Tp)) == 0 ;
185183}
186184
187- template <class _Tp >
188- struct __atomic_compare_unequal_to {
189- _Tp __val_;
190- _LIBCPP_HIDE_FROM_ABI bool operator ()(const _Tp& __arg) const {
191- return !std::__cxx_nonatomic_compare_equal (__arg, __val_);
192- }
193- };
194-
195- template <class _AtomicWaitable , class _Up >
185+ template <class _AtomicWaitable , class _Tp >
196186_LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void
197- __atomic_wait (_AtomicWaitable& __a, _Up __val, memory_order __order) {
187+ __atomic_wait (_AtomicWaitable& __a, _Tp __val, memory_order __order) {
198188 static_assert (__atomic_waitable<_AtomicWaitable>::value, " " );
199- __atomic_compare_unequal_to<_Up> __nonatomic_equal = {__val};
200- std::__atomic_wait_unless (__a, __nonatomic_equal, __order);
189+ std::__atomic_wait_unless (__a, __order, [&](_Tp const & __current) {
190+ return !std::__cxx_nonatomic_compare_equal (__current, __val);
191+ });
201192}
202193
203194_LIBCPP_END_NAMESPACE_STD
0 commit comments