21
21
#include "utils_windows_intrin.h"
22
22
23
23
#pragma intrinsic(_BitScanForward64)
24
- #else
25
- #include < pthread.h>
24
+ #else // !_WIN32
26
25
27
- #ifndef __cplusplus
26
+ #include <pthread.h>
28
27
#include <stdatomic.h>
29
- #else /* __cplusplus */
30
- #include < atomic>
31
- #define _Atomic (X ) std::atomic<X>
32
-
33
- using std::memory_order_acq_rel;
34
- using std::memory_order_acquire;
35
- using std::memory_order_relaxed;
36
- using std::memory_order_release;
37
28
38
- #endif /* __cplusplus */
39
-
40
- #endif /* _WIN32 */
29
+ #endif /* !_WIN32 */
41
30
42
31
#include "utils_common.h"
43
32
#include "utils_sanitizers.h"
@@ -118,14 +107,6 @@ static __inline void utils_atomic_load_acquire_ptr(void **ptr, void **out) {
118
107
* (uintptr_t * )out = ret ;
119
108
}
120
109
121
- static __inline void utils_atomic_store_release_u64 (uint64_t *ptr,
122
- uint64_t *val) {
123
- ASSERT_IS_ALIGNED ((uintptr_t )ptr, 8 );
124
- ASSERT_IS_ALIGNED ((uintptr_t )val, 8 );
125
- utils_annotate_release (ptr);
126
- InterlockedExchange64 ((LONG64 volatile *)ptr, *(LONG64 *)val);
127
- }
128
-
129
110
static __inline void utils_atomic_store_release_ptr (void * * ptr , void * val ) {
130
111
ASSERT_IS_ALIGNED ((uintptr_t )ptr , 8 );
131
112
utils_annotate_release (ptr );
@@ -146,14 +127,12 @@ static __inline uint64_t utils_atomic_decrement_u64(uint64_t *ptr) {
146
127
147
128
static __inline uint64_t utils_fetch_and_add_u64 (uint64_t * ptr , uint64_t val ) {
148
129
ASSERT_IS_ALIGNED ((uintptr_t )ptr , 8 );
149
- ASSERT_IS_ALIGNED ((uintptr_t )&val, 8 );
150
130
// return the value that had previously been in *ptr
151
131
return InterlockedExchangeAdd64 ((LONG64 volatile * )(ptr ), val );
152
132
}
153
133
154
134
static __inline uint64_t utils_fetch_and_sub_u64 (uint64_t * ptr , uint64_t val ) {
155
135
ASSERT_IS_ALIGNED ((uintptr_t )ptr , 8 );
156
- ASSERT_IS_ALIGNED ((uintptr_t )&val, 8 );
157
136
// return the value that had previously been in *ptr
158
137
// NOTE: on Windows there is no *Sub* version of InterlockedExchange
159
138
return InterlockedExchangeAdd64 ((LONG64 volatile * )(ptr ), - (LONG64 )val );
@@ -203,14 +182,6 @@ static inline void utils_atomic_load_acquire_ptr(void **ptr, void **out) {
203
182
utils_annotate_acquire ((void * )ptr );
204
183
}
205
184
206
- static inline void utils_atomic_store_release_u64 (uint64_t *ptr,
207
- uint64_t *val) {
208
- ASSERT_IS_ALIGNED ((uintptr_t )ptr, 8 );
209
- ASSERT_IS_ALIGNED ((uintptr_t )val, 8 );
210
- utils_annotate_release (ptr);
211
- __atomic_store (ptr, val, memory_order_release);
212
- }
213
-
214
185
static inline void utils_atomic_store_release_ptr (void * * ptr , void * val ) {
215
186
ASSERT_IS_ALIGNED ((uintptr_t )ptr , 8 );
216
187
utils_annotate_release (ptr );
0 commit comments