@@ -19,7 +19,7 @@ limitations under the License.
1919
2020#include " types.hpp"
2121
22- #if __cplusplus >= 201103L || _MSC_VER >= 1700
22+ #if __cplusplus >= 201103L || (defined( _MSC_VER) && _MSC_VER >= 1700)
2323 // C++ 11 (or partial)
2424 #include < atomic>
2525
@@ -199,77 +199,175 @@ class Task {
199199 T* _data;
200200 };
201201
202+ #endif
203+
204+
205+ #if defined(CONCURRENCY_ENABLED) || (__cplusplus >= 201103L) || (defined(_MSC_VER) && _MSC_VER >= 1700)
206+ #include < atomic>
202207 #define ATOMIC_INT std::atomic_int
203208 #define ATOMIC_BOOL std::atomic_bool
204209
205210#else
206- #if defined(__APPLE__)
207- #define ATOMIC_INT std::atomic_int
208- #define ATOMIC_BOOL std::atomic_bool
209- #elif __cplusplus < 201103L
210- // ! Stubs for NON CONCURRENT USAGE !
211- // Used to compile and provide a non concurrent version AND
212- // when atomic.h is not available (VS C++)
213- const int memory_order_relaxed = 0 ;
214- const int memory_order_acquire = 2 ;
215- const int memory_order_release = 3 ;
216- #include < iostream>
217-
218- class atomic_int {
219- private:
220- int _n;
221-
222- public:
223- atomic_int (int n=0 ) { _n = n; }
224- atomic_int& operator =(int n) {
225- _n = n;
226- return *this ;
227- }
228- int load (int mo = memory_order_relaxed) const { (void )mo; return _n; }
229- void store (int n, int mo = memory_order_release) { (void )mo; _n = n; }
230- atomic_int& operator ++(int ) {
231- _n++;
232- return *this ;
233- }
234- atomic_int fetch_add (atomic_int) {
235- _n++;
236- return atomic_int (_n - 1 );
237- }
238- bool compare_exchange_strong (int & expected, int desired) {
239- if (_n != expected)
240- return false ;
211+ // ! Stubs for NON CONCURRENT USAGE !
212+ // Used when compiling for older C++ standards (C++98/03)
213+
214+ // Use enum instead of const int to prevent linkage issues
215+ enum fallback_memory_order {
216+ memory_order_relaxed = 0 ,
217+ memory_order_consume = 1 ,
218+ memory_order_acquire = 2 ,
219+ memory_order_release = 3 ,
220+ memory_order_acq_rel = 4 ,
221+ memory_order_seq_cst = 5
222+ };
223+
224+ // Naming the class 'fallback_...' avoids conflicts if the macro
225+ // matches the class name recursively in some preprocessors.
226+ class fallback_atomic_int {
227+ private:
228+ volatile int _n;
229+
230+ // Disable copy constructor and assignment operator
231+ // (Atomics should not be copyable)
232+ fallback_atomic_int (const fallback_atomic_int&);
233+ fallback_atomic_int& operator =(const fallback_atomic_int&);
234+
235+ public:
236+ fallback_atomic_int (int n = 0 ) : _n(n) {}
237+
238+ // Assignment returns the value (int), NOT the object reference
239+ int operator =(int n)
240+ {
241+ _n = n;
242+ return n;
243+ }
244+
245+ operator int () const
246+ {
247+ return _n;
248+ }
249+
250+ int load (int mo = memory_order_relaxed) const
251+ {
252+ (void )mo;
253+ return _n;
254+ }
255+
256+ void store (int n, int mo = memory_order_release)
257+ {
258+ (void )mo;
259+ _n = n;
260+ }
261+
262+ // Postfix ++ (x++) returns OLD value
263+ int operator ++(int )
264+ {
265+ int old = _n;
266+ _n++;
267+ return old;
268+ }
269+
270+ // Prefix ++ (++x) returns NEW value
271+ int operator ++()
272+ {
273+ return ++_n;
274+ }
275+
276+ // Standard signature: takes int delta, returns OLD value
277+ int fetch_add (int delta, int mo = memory_order_seq_cst)
278+ {
279+ (void )mo;
280+ int old = _n;
281+ _n += delta;
282+ return old;
283+ }
284+
285+ int fetch_sub (int delta, int mo = memory_order_seq_cst)
286+ {
287+ (void )mo;
288+ int old = _n;
289+ _n -= delta;
290+ return old;
291+ }
292+
293+ // CRITICAL FIX: Must update 'expected' on failure
294+ bool compare_exchange_strong (int & expected, int desired, int mo = memory_order_seq_cst)
295+ {
296+ (void )mo;
241297
298+ if (_n == expected) {
242299 _n = desired;
243300 return true ;
244- }
245- };
246-
247- class atomic_bool {
248- private:
249- bool _b;
250-
251- public:
252- atomic_bool (bool b=false ) { _b = b; }
253- atomic_bool& operator =(bool b) { _b = b; return *this ; }
254- bool load (int mo = memory_order_relaxed) const { (void )mo; return _b; }
255- void store (bool b, int mo = memory_order_release) { (void )mo; _b = b; }
256- bool exchange (bool expected, int mo = memory_order_acquire) {
257- (void )mo;
258- bool b = _b;
259- _b = expected;
260- return b;
261- }
262- };
263-
264- #define ATOMIC_INT atomic_int
265- #define ATOMIC_BOOL atomic_bool
266- #else
267- #define ATOMIC_INT std::atomic_int
268- #define ATOMIC_BOOL std::atomic_bool
269- #endif
301+ } else {
302+ expected = _n;
303+ return false ;
304+ }
305+ }
306+
307+ bool compare_exchange_weak (int & expected, int desired, int mo = memory_order_seq_cst)
308+ {
309+ return compare_exchange_strong (expected, desired, mo);
310+ }
311+ };
312+
313+ class fallback_atomic_bool {
314+ private:
315+ volatile bool _b;
316+
317+ fallback_atomic_bool (const fallback_atomic_bool&);
318+ fallback_atomic_bool& operator =(const fallback_atomic_bool&);
319+
320+ public:
321+ fallback_atomic_bool (bool b = false ) : _b(b) {}
322+
323+ bool operator =(bool b)
324+ {
325+ _b = b;
326+ return b;
327+ }
328+
329+ operator bool () const
330+ {
331+ return _b;
332+ }
270333
271- #endif // (__cplusplus && __cplusplus < 201103L) || (_MSC_VER && _MSC_VER < 1700)
334+ bool load (int mo = memory_order_relaxed) const
335+ {
336+ (void )mo;
337+ return _b;
338+ }
272339
340+ void store (bool b, int mo = memory_order_release)
341+ {
342+ (void )mo;
343+ _b = b;
344+ }
345+
346+ bool exchange (bool val, int mo = memory_order_acquire)
347+ {
348+ (void )mo;
349+ bool old = _b;
350+ _b = val;
351+ return old;
352+ }
353+
354+ bool compare_exchange_strong (bool & expected, bool desired, int mo = memory_order_seq_cst)
355+ {
356+ (void )mo;
357+ if (_b == expected) {
358+ _b = desired;
359+ return true ;
360+ } else {
361+ expected = _b;
362+ return false ;
363+ }
364+ }
365+ };
366+
367+ #define ATOMIC_INT fallback_atomic_int
368+ #define ATOMIC_BOOL fallback_atomic_bool
369+
370+ #endif
273371
274372
275373#endif
0 commit comments