@@ -58,20 +58,16 @@ struct __atomic_waitable< _Tp,
58
58
#if _LIBCPP_STD_VER >= 20
59
59
# if _LIBCPP_HAS_THREADS
60
60
61
- _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_one (void const volatile *) _NOEXCEPT;
62
- _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_all (void const volatile *) _NOEXCEPT;
63
- _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI __cxx_contention_t
64
- __libcpp_atomic_monitor (void const volatile *) _NOEXCEPT;
65
- _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void
66
- __libcpp_atomic_wait (void const volatile *, __cxx_contention_t ) _NOEXCEPT;
67
-
68
- _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void
69
- __cxx_atomic_notify_one (__cxx_atomic_contention_t const volatile *) _NOEXCEPT;
70
- _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void
71
- __cxx_atomic_notify_all (__cxx_atomic_contention_t const volatile *) _NOEXCEPT;
72
- _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI __cxx_contention_t
61
+ _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_one (void const volatile *) _NOEXCEPT;
62
+ _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_all (void const volatile *) _NOEXCEPT;
63
+ _LIBCPP_EXPORTED_FROM_ABI __cxx_contention_t __libcpp_atomic_monitor (void const volatile *) _NOEXCEPT;
64
+ _LIBCPP_EXPORTED_FROM_ABI void __libcpp_atomic_wait (void const volatile *, __cxx_contention_t ) _NOEXCEPT;
65
+
66
+ _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_one (__cxx_atomic_contention_t const volatile *) _NOEXCEPT;
67
+ _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_all (__cxx_atomic_contention_t const volatile *) _NOEXCEPT;
68
+ _LIBCPP_EXPORTED_FROM_ABI __cxx_contention_t
73
69
__libcpp_atomic_monitor (__cxx_atomic_contention_t const volatile *) _NOEXCEPT;
74
- _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void
70
+ _LIBCPP_EXPORTED_FROM_ABI void
75
71
__libcpp_atomic_wait (__cxx_atomic_contention_t const volatile *, __cxx_contention_t ) _NOEXCEPT;
76
72
77
73
template <class _AtomicWaitable , class _Poll >
@@ -82,7 +78,6 @@ struct __atomic_wait_backoff_impl {
82
78
83
79
using __waitable_traits _LIBCPP_NODEBUG = __atomic_waitable_traits<__decay_t <_AtomicWaitable> >;
84
80
85
- _LIBCPP_AVAILABILITY_SYNC
86
81
_LIBCPP_HIDE_FROM_ABI bool
87
82
__update_monitor_val_and_poll (__cxx_atomic_contention_t const volatile *, __cxx_contention_t & __monitor_val) const {
88
83
// In case the contention type happens to be __cxx_atomic_contention_t, i.e. __cxx_atomic_impl<int64_t>,
@@ -95,7 +90,6 @@ struct __atomic_wait_backoff_impl {
95
90
return __poll_ (__monitor_val);
96
91
}
97
92
98
- _LIBCPP_AVAILABILITY_SYNC
99
93
_LIBCPP_HIDE_FROM_ABI bool
100
94
__update_monitor_val_and_poll (void const volatile * __contention_address, __cxx_contention_t & __monitor_val) const {
101
95
// In case the contention type is anything else, platform wait is monitoring a __cxx_atomic_contention_t
@@ -105,7 +99,6 @@ struct __atomic_wait_backoff_impl {
105
99
return __poll_ (__current_val);
106
100
}
107
101
108
- _LIBCPP_AVAILABILITY_SYNC
109
102
_LIBCPP_HIDE_FROM_ABI bool operator ()(chrono::nanoseconds __elapsed) const {
110
103
if (__elapsed > chrono::microseconds (4 )) {
111
104
auto __contention_address = __waitable_traits::__atomic_contention_address (__a_);
@@ -128,8 +121,7 @@ struct __atomic_wait_backoff_impl {
128
121
// `false`, it must set the argument to its current understanding of the atomic
129
122
// value. The predicate function must not return `false` spuriously.
130
123
template <class _AtomicWaitable , class _Poll >
131
- _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void
132
- __atomic_wait_unless (const _AtomicWaitable& __a, memory_order __order, _Poll&& __poll) {
124
+ _LIBCPP_HIDE_FROM_ABI void __atomic_wait_unless (const _AtomicWaitable& __a, memory_order __order, _Poll&& __poll) {
133
125
static_assert (__atomic_waitable<_AtomicWaitable>::value, " " );
134
126
__atomic_wait_backoff_impl<_AtomicWaitable, __decay_t <_Poll> > __backoff_fn = {__a, __poll, __order};
135
127
std::__libcpp_thread_poll_with_backoff (
@@ -142,13 +134,13 @@ __atomic_wait_unless(const _AtomicWaitable& __a, memory_order __order, _Poll&& _
142
134
}
143
135
144
136
template <class _AtomicWaitable >
145
- _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void __atomic_notify_one (const _AtomicWaitable& __a) {
137
+ _LIBCPP_HIDE_FROM_ABI void __atomic_notify_one (const _AtomicWaitable& __a) {
146
138
static_assert (__atomic_waitable<_AtomicWaitable>::value, " " );
147
139
std::__cxx_atomic_notify_one (__atomic_waitable_traits<__decay_t <_AtomicWaitable> >::__atomic_contention_address (__a));
148
140
}
149
141
150
142
template <class _AtomicWaitable >
151
- _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void __atomic_notify_all (const _AtomicWaitable& __a) {
143
+ _LIBCPP_HIDE_FROM_ABI void __atomic_notify_all (const _AtomicWaitable& __a) {
152
144
static_assert (__atomic_waitable<_AtomicWaitable>::value, " " );
153
145
std::__cxx_atomic_notify_all (__atomic_waitable_traits<__decay_t <_AtomicWaitable> >::__atomic_contention_address (__a));
154
146
}
@@ -180,8 +172,7 @@ _LIBCPP_HIDE_FROM_ABI bool __cxx_nonatomic_compare_equal(_Tp const& __lhs, _Tp c
180
172
}
181
173
182
174
template <class _AtomicWaitable , class _Tp >
183
- _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void
184
- __atomic_wait (_AtomicWaitable& __a, _Tp __val, memory_order __order) {
175
+ _LIBCPP_HIDE_FROM_ABI void __atomic_wait (_AtomicWaitable& __a, _Tp __val, memory_order __order) {
185
176
static_assert (__atomic_waitable<_AtomicWaitable>::value, " " );
186
177
std::__atomic_wait_unless (__a, __order, [&](_Tp const & __current) {
187
178
return !std::__cxx_nonatomic_compare_equal (__current, __val);
0 commit comments