@@ -310,6 +310,46 @@ ATOMIC_OPS()
310
310
#undef ATOMIC_OPS
311
311
#undef ATOMIC_OP
312
312
313
+ static __always_inline bool arch_atomic_inc_unless_negative (atomic_t * v )
314
+ {
315
+ int prev , rc ;
316
+
317
+ __asm__ __volatile__ (
318
+ "0: lr.w %[p], %[c]\n"
319
+ " bltz %[p], 1f\n"
320
+ " addi %[rc], %[p], 1\n"
321
+ " sc.w.rl %[rc], %[rc], %[c]\n"
322
+ " bnez %[rc], 0b\n"
323
+ " fence rw, rw\n"
324
+ "1:\n"
325
+ : [p ]"=&r" (prev ), [rc ]"=&r" (rc ), [c ]"+A" (v -> counter )
326
+ :
327
+ : "memory" );
328
+ return !(prev < 0 );
329
+ }
330
+
331
+ #define arch_atomic_inc_unless_negative arch_atomic_inc_unless_negative
332
+
333
+ static __always_inline bool arch_atomic_dec_unless_positive (atomic_t * v )
334
+ {
335
+ int prev , rc ;
336
+
337
+ __asm__ __volatile__ (
338
+ "0: lr.w %[p], %[c]\n"
339
+ " bgtz %[p], 1f\n"
340
+ " addi %[rc], %[p], -1\n"
341
+ " sc.w.rl %[rc], %[rc], %[c]\n"
342
+ " bnez %[rc], 0b\n"
343
+ " fence rw, rw\n"
344
+ "1:\n"
345
+ : [p ]"=&r" (prev ), [rc ]"=&r" (rc ), [c ]"+A" (v -> counter )
346
+ :
347
+ : "memory" );
348
+ return !(prev > 0 );
349
+ }
350
+
351
+ #define arch_atomic_dec_unless_positive arch_atomic_dec_unless_positive
352
+
313
353
static __always_inline int arch_atomic_dec_if_positive (atomic_t * v )
314
354
{
315
355
int prev , rc ;
@@ -331,6 +371,48 @@ static __always_inline int arch_atomic_dec_if_positive(atomic_t *v)
331
371
#define arch_atomic_dec_if_positive arch_atomic_dec_if_positive
332
372
333
373
#ifndef CONFIG_GENERIC_ATOMIC64
374
+ static __always_inline bool arch_atomic64_inc_unless_negative (atomic64_t * v )
375
+ {
376
+ s64 prev ;
377
+ long rc ;
378
+
379
+ __asm__ __volatile__ (
380
+ "0: lr.d %[p], %[c]\n"
381
+ " bltz %[p], 1f\n"
382
+ " addi %[rc], %[p], 1\n"
383
+ " sc.d.rl %[rc], %[rc], %[c]\n"
384
+ " bnez %[rc], 0b\n"
385
+ " fence rw, rw\n"
386
+ "1:\n"
387
+ : [p ]"=&r" (prev ), [rc ]"=&r" (rc ), [c ]"+A" (v -> counter )
388
+ :
389
+ : "memory" );
390
+ return !(prev < 0 );
391
+ }
392
+
393
+ #define arch_atomic64_inc_unless_negative arch_atomic64_inc_unless_negative
394
+
395
+ static __always_inline bool arch_atomic64_dec_unless_positive (atomic64_t * v )
396
+ {
397
+ s64 prev ;
398
+ long rc ;
399
+
400
+ __asm__ __volatile__ (
401
+ "0: lr.d %[p], %[c]\n"
402
+ " bgtz %[p], 1f\n"
403
+ " addi %[rc], %[p], -1\n"
404
+ " sc.d.rl %[rc], %[rc], %[c]\n"
405
+ " bnez %[rc], 0b\n"
406
+ " fence rw, rw\n"
407
+ "1:\n"
408
+ : [p ]"=&r" (prev ), [rc ]"=&r" (rc ), [c ]"+A" (v -> counter )
409
+ :
410
+ : "memory" );
411
+ return !(prev > 0 );
412
+ }
413
+
414
+ #define arch_atomic64_dec_unless_positive arch_atomic64_dec_unless_positive
415
+
334
416
static __always_inline s64 arch_atomic64_dec_if_positive (atomic64_t * v )
335
417
{
336
418
s64 prev ;
0 commit comments