@@ -131,6 +131,12 @@ static inline uint64_t utils_atomic_decrement_u64(uint64_t *ptr) {
131131 return InterlockedDecrement64 ((LONG64 volatile *)ptr);
132132}
133133
134+ static inline uint64_t utils_atomic_and_u64 (uint64_t *ptr, uint64_t val) {
135+ ASSERT_IS_ALIGNED ((uintptr_t )ptr, 8 );
136+ // return the value that had previously been in *ptr
137+ return InterlockedAnd64 ((LONG64 volatile *)(ptr), val);
138+ }
139+
134140static inline uint64_t utils_fetch_and_add_u64 (uint64_t *ptr, uint64_t val) {
135141 ASSERT_IS_ALIGNED ((uintptr_t )ptr, 8 );
136142 // return the value that had previously been in *ptr
@@ -198,6 +204,12 @@ static inline uint64_t utils_atomic_decrement_u64(uint64_t *val) {
198204 return __atomic_sub_fetch (val, 1 , memory_order_acq_rel);
199205}
200206
207+ static inline uint64_t utils_atomic_and_u64 (uint64_t *ptr, uint64_t val) {
208+ ASSERT_IS_ALIGNED ((uintptr_t )ptr, 8 );
209+ // return the value that had previously been in *ptr
210+ return __atomic_fetch_and (ptr, val, memory_order_acq_rel);
211+ }
212+
201213static inline uint64_t utils_fetch_and_add_u64 (uint64_t *ptr, uint64_t val) {
202214 ASSERT_IS_ALIGNED ((uintptr_t )ptr, 8 );
203215 // return the value that had previously been in *ptr
0 commit comments