25
25
#include <sched.h>
26
26
#endif
27
27
28
- static void
29
- _thrd_yield (void )
28
+ static void _thrd_yield (void )
30
29
{
31
- BSON_IF_WINDOWS (SwitchToThread ();)
32
- BSON_IF_POSIX (sched_yield ();)
30
+ BSON_IF_WINDOWS (SwitchToThread ();)
31
+ BSON_IF_POSIX (sched_yield ();)
33
32
}
34
33
35
34
/**
@@ -38,87 +37,83 @@ _thrd_yield (void)
38
37
*/
39
38
static int8_t gEmulAtomicLock = 0 ;
40
39
41
- static void
42
- _lock_emul_atomic (void )
40
+ static void _lock_emul_atomic (void )
43
41
{
44
- int i ;
45
- if (phongo_atomic_int8_compare_exchange_weak (& gEmulAtomicLock , 0 , 1 , phongo_memory_order_acquire ) == 0 ) {
46
- /* Successfully took the spinlock */
47
- return ;
48
- }
49
- /* Failed. Try taking ten more times, then begin sleeping. */
50
- for (i = 0 ; i < 10 ; ++ i ) {
51
- if (phongo_atomic_int8_compare_exchange_weak (& gEmulAtomicLock , 0 , 1 , phongo_memory_order_acquire ) == 0 ) {
52
- /* Succeeded in taking the lock */
53
- return ;
54
- }
55
- }
56
- /* Still don't have the lock. Spin and yield */
57
- while (phongo_atomic_int8_compare_exchange_weak (& gEmulAtomicLock , 0 , 1 , phongo_memory_order_acquire ) != 0 ) {
58
- _thrd_yield ();
59
- }
42
+ int i ;
43
+ if (phongo_atomic_int8_compare_exchange_weak (& gEmulAtomicLock , 0 , 1 , phongo_memory_order_acquire ) == 0 ) {
44
+ /* Successfully took the spinlock */
45
+ return ;
46
+ }
47
+ /* Failed. Try taking ten more times, then begin sleeping. */
48
+ for (i = 0 ; i < 10 ; ++ i ) {
49
+ if (phongo_atomic_int8_compare_exchange_weak (& gEmulAtomicLock , 0 , 1 , phongo_memory_order_acquire ) == 0 ) {
50
+ /* Succeeded in taking the lock */
51
+ return ;
52
+ }
53
+ }
54
+ /* Still don't have the lock. Spin and yield */
55
+ while (phongo_atomic_int8_compare_exchange_weak (& gEmulAtomicLock , 0 , 1 , phongo_memory_order_acquire ) != 0 ) {
56
+ _thrd_yield ();
57
+ }
60
58
}
61
59
62
- static void
63
- _unlock_emul_atomic (void )
60
+ static void _unlock_emul_atomic (void )
64
61
{
65
- int64_t rv = phongo_atomic_int8_exchange (& gEmulAtomicLock , 0 , phongo_memory_order_release );
66
- BSON_ASSERT (rv == 1 && "Released atomic lock while not holding it" );
62
+ int64_t rv = phongo_atomic_int8_exchange (& gEmulAtomicLock , 0 , phongo_memory_order_release );
63
+ BSON_ASSERT (rv == 1 && "Released atomic lock while not holding it" );
67
64
}
68
65
69
- int32_t
70
- _phongo_emul_atomic_int32_fetch_add (volatile int32_t * p , int32_t n , enum phongo_memory_order _unused )
66
+ int32_t _phongo_emul_atomic_int32_fetch_add (volatile int32_t * p , int32_t n , enum phongo_memory_order _unused )
71
67
{
72
- int32_t ret ;
68
+ int32_t ret ;
73
69
74
- BSON_UNUSED (_unused );
70
+ BSON_UNUSED (_unused );
75
71
76
- _lock_emul_atomic ();
77
- ret = * p ;
78
- * p += n ;
79
- _unlock_emul_atomic ();
80
- return ret ;
72
+ _lock_emul_atomic ();
73
+ ret = * p ;
74
+ * p += n ;
75
+ _unlock_emul_atomic ();
76
+ return ret ;
81
77
}
82
78
83
- int32_t
84
- _phongo_emul_atomic_int32_exchange (volatile int32_t * p , int32_t n , enum phongo_memory_order _unused )
79
+ int32_t _phongo_emul_atomic_int32_exchange (volatile int32_t * p , int32_t n , enum phongo_memory_order _unused )
85
80
{
86
- int32_t ret ;
81
+ int32_t ret ;
87
82
88
- BSON_UNUSED (_unused );
83
+ BSON_UNUSED (_unused );
89
84
90
- _lock_emul_atomic ();
91
- ret = * p ;
92
- * p = n ;
93
- _unlock_emul_atomic ();
94
- return ret ;
85
+ _lock_emul_atomic ();
86
+ ret = * p ;
87
+ * p = n ;
88
+ _unlock_emul_atomic ();
89
+ return ret ;
95
90
}
96
91
97
- int32_t
98
- _phongo_emul_atomic_int32_compare_exchange_strong ( volatile int32_t * p ,
99
- int32_t expect_value ,
100
- int32_t new_value ,
101
- enum phongo_memory_order _unused )
92
+ int32_t _phongo_emul_atomic_int32_compare_exchange_strong (
93
+ volatile int32_t * p ,
94
+ int32_t expect_value ,
95
+ int32_t new_value ,
96
+ enum phongo_memory_order _unused )
102
97
{
103
- int32_t ret ;
98
+ int32_t ret ;
104
99
105
- BSON_UNUSED (_unused );
100
+ BSON_UNUSED (_unused );
106
101
107
- _lock_emul_atomic ();
108
- ret = * p ;
109
- if (ret == expect_value ) {
110
- * p = new_value ;
111
- }
112
- _unlock_emul_atomic ();
113
- return ret ;
102
+ _lock_emul_atomic ();
103
+ ret = * p ;
104
+ if (ret == expect_value ) {
105
+ * p = new_value ;
106
+ }
107
+ _unlock_emul_atomic ();
108
+ return ret ;
114
109
}
115
110
116
- int32_t
117
- _phongo_emul_atomic_int32_compare_exchange_weak ( volatile int32_t * p ,
118
- int32_t expect_value ,
119
- int32_t new_value ,
120
- enum phongo_memory_order order )
111
+ int32_t _phongo_emul_atomic_int32_compare_exchange_weak (
112
+ volatile int32_t * p ,
113
+ int32_t expect_value ,
114
+ int32_t new_value ,
115
+ enum phongo_memory_order order )
121
116
{
122
- /* We're emulating. We can't do a weak version. */
123
- return _phongo_emul_atomic_int32_compare_exchange_strong (p , expect_value , new_value , order );
117
+ /* We're emulating. We can't do a weak version. */
118
+ return _phongo_emul_atomic_int32_compare_exchange_strong (p , expect_value , new_value , order );
124
119
}
0 commit comments