Skip to content

Commit 6f2102b

Browse files
authored
CDRIVER-4229 emulate int32 and int atomic operations on zSeries (#897)
1 parent 8d1af19 commit 6f2102b

File tree

2 files changed

+286
-14
lines changed

2 files changed

+286
-14
lines changed

src/libbson/src/bson/bson-atomic.c

Lines changed: 122 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -48,40 +48,40 @@ bson_memory_barrier (void)
4848
}
4949

5050
/**
51-
* 32-bit x86 does not support 64-bit atomic integer operations.
51+
* Some platforms do not support compiler intrinsics for atomic operations.
5252
* We emulate that here using a spin lock and regular arithmetic operations
5353
*/
54-
static int8_t g64bitAtomicLock = 0;
54+
static int8_t gEmulAtomicLock = 0;
5555

5656
static void
57-
_lock_64bit_atomic ()
57+
_lock_emul_atomic ()
5858
{
5959
int i;
6060
if (bson_atomic_int8_compare_exchange_weak (
61-
&g64bitAtomicLock, 0, 1, bson_memory_order_acquire) == 0) {
61+
&gEmulAtomicLock, 0, 1, bson_memory_order_acquire) == 0) {
6262
/* Successfully took the spinlock */
6363
return;
6464
}
6565
/* Failed. Try taking ten more times, then begin sleeping. */
6666
for (i = 0; i < 10; ++i) {
6767
if (bson_atomic_int8_compare_exchange_weak (
68-
&g64bitAtomicLock, 0, 1, bson_memory_order_acquire) == 0) {
68+
&gEmulAtomicLock, 0, 1, bson_memory_order_acquire) == 0) {
6969
/* Succeeded in taking the lock */
7070
return;
7171
}
7272
}
7373
/* Still don't have the lock. Spin and yield */
7474
while (bson_atomic_int8_compare_exchange_weak (
75-
&g64bitAtomicLock, 0, 1, bson_memory_order_acquire) != 0) {
75+
&gEmulAtomicLock, 0, 1, bson_memory_order_acquire) != 0) {
7676
bson_thrd_yield ();
7777
}
7878
}
7979

8080
static void
81-
_unlock_64bit_atomic ()
81+
_unlock_emul_atomic ()
8282
{
8383
int64_t rv = bson_atomic_int8_exchange (
84-
&g64bitAtomicLock, 0, bson_memory_order_release);
84+
&gEmulAtomicLock, 0, bson_memory_order_release);
8585
BSON_ASSERT (rv == 1 && "Released atomic lock while not holding it");
8686
}
8787

@@ -91,10 +91,10 @@ _bson_emul_atomic_int64_fetch_add (volatile int64_t *p,
9191
enum bson_memory_order _unused)
9292
{
9393
int64_t ret;
94-
_lock_64bit_atomic ();
94+
_lock_emul_atomic ();
9595
ret = *p;
9696
*p += n;
97-
_unlock_64bit_atomic ();
97+
_unlock_emul_atomic ();
9898
return ret;
9999
}
100100

@@ -104,10 +104,10 @@ _bson_emul_atomic_int64_exchange (volatile int64_t *p,
104104
enum bson_memory_order _unused)
105105
{
106106
int64_t ret;
107-
_lock_64bit_atomic ();
107+
_lock_emul_atomic ();
108108
ret = *p;
109109
*p = n;
110-
_unlock_64bit_atomic ();
110+
_unlock_emul_atomic ();
111111
return ret;
112112
}
113113

@@ -118,12 +118,12 @@ _bson_emul_atomic_int64_compare_exchange_strong (volatile int64_t *p,
118118
enum bson_memory_order _unused)
119119
{
120120
int64_t ret;
121-
_lock_64bit_atomic ();
121+
_lock_emul_atomic ();
122122
ret = *p;
123123
if (ret == expect_value) {
124124
*p = new_value;
125125
}
126-
_unlock_64bit_atomic ();
126+
_unlock_emul_atomic ();
127127
return ret;
128128
}
129129

@@ -137,3 +137,111 @@ _bson_emul_atomic_int64_compare_exchange_weak (volatile int64_t *p,
137137
return _bson_emul_atomic_int64_compare_exchange_strong (
138138
p, expect_value, new_value, order);
139139
}
140+
141+
142+
int32_t
143+
_bson_emul_atomic_int32_fetch_add (volatile int32_t *p,
144+
int32_t n,
145+
enum bson_memory_order _unused)
146+
{
147+
int32_t ret;
148+
_lock_emul_atomic ();
149+
ret = *p;
150+
*p += n;
151+
_unlock_emul_atomic ();
152+
return ret;
153+
}
154+
155+
int32_t
156+
_bson_emul_atomic_int32_exchange (volatile int32_t *p,
157+
int32_t n,
158+
enum bson_memory_order _unused)
159+
{
160+
int32_t ret;
161+
_lock_emul_atomic ();
162+
ret = *p;
163+
*p = n;
164+
_unlock_emul_atomic ();
165+
return ret;
166+
}
167+
168+
int32_t
169+
_bson_emul_atomic_int32_compare_exchange_strong (volatile int32_t *p,
170+
int32_t expect_value,
171+
int32_t new_value,
172+
enum bson_memory_order _unused)
173+
{
174+
int32_t ret;
175+
_lock_emul_atomic ();
176+
ret = *p;
177+
if (ret == expect_value) {
178+
*p = new_value;
179+
}
180+
_unlock_emul_atomic ();
181+
return ret;
182+
}
183+
184+
int32_t
185+
_bson_emul_atomic_int32_compare_exchange_weak (volatile int32_t *p,
186+
int32_t expect_value,
187+
int32_t new_value,
188+
enum bson_memory_order order)
189+
{
190+
/* We're emulating. We can't do a weak version. */
191+
return _bson_emul_atomic_int32_compare_exchange_strong (
192+
p, expect_value, new_value, order);
193+
}
194+
195+
196+
int
197+
_bson_emul_atomic_int_fetch_add (volatile int *p,
198+
int n,
199+
enum bson_memory_order _unused)
200+
{
201+
int ret;
202+
_lock_emul_atomic ();
203+
ret = *p;
204+
*p += n;
205+
_unlock_emul_atomic ();
206+
return ret;
207+
}
208+
209+
int
210+
_bson_emul_atomic_int_exchange (volatile int *p,
211+
int n,
212+
enum bson_memory_order _unused)
213+
{
214+
int ret;
215+
_lock_emul_atomic ();
216+
ret = *p;
217+
*p = n;
218+
_unlock_emul_atomic ();
219+
return ret;
220+
}
221+
222+
int
223+
_bson_emul_atomic_int_compare_exchange_strong (volatile int *p,
224+
int expect_value,
225+
int new_value,
226+
enum bson_memory_order _unused)
227+
{
228+
int ret;
229+
_lock_emul_atomic ();
230+
ret = *p;
231+
if (ret == expect_value) {
232+
*p = new_value;
233+
}
234+
_unlock_emul_atomic ();
235+
return ret;
236+
}
237+
238+
int
239+
_bson_emul_atomic_int_compare_exchange_weak (volatile int *p,
240+
int expect_value,
241+
int new_value,
242+
enum bson_memory_order order)
243+
{
244+
/* We're emulating. We can't do a weak version. */
245+
return _bson_emul_atomic_int_compare_exchange_strong (
246+
p, expect_value, new_value, order);
247+
}

0 commit comments

Comments
 (0)