@@ -310,47 +310,47 @@ ATOMIC_OPS()
310
310
#undef ATOMIC_OPS
311
311
#undef ATOMIC_OP
312
312
313
- static __always_inline int arch_atomic_sub_if_positive (atomic_t * v , int offset )
313
+ static __always_inline int arch_atomic_dec_if_positive (atomic_t * v )
314
314
{
315
315
int prev , rc ;
316
316
317
317
__asm__ __volatile__ (
318
318
"0: lr.w %[p], %[c]\n"
319
- " sub %[rc], %[p], %[o] \n"
319
+ " addi %[rc], %[p], -1 \n"
320
320
" bltz %[rc], 1f\n"
321
321
" sc.w.rl %[rc], %[rc], %[c]\n"
322
322
" bnez %[rc], 0b\n"
323
323
" fence rw, rw\n"
324
324
"1:\n"
325
325
: [p ]"=&r" (prev ), [rc ]"=&r" (rc ), [c ]"+A" (v -> counter )
326
- : [ o ] "r" ( offset )
326
+ :
327
327
: "memory" );
328
- return prev - offset ;
328
+ return prev - 1 ;
329
329
}
330
330
331
- #define arch_atomic_dec_if_positive ( v ) arch_atomic_sub_if_positive(v, 1)
331
+ #define arch_atomic_dec_if_positive arch_atomic_dec_if_positive
332
332
333
333
#ifndef CONFIG_GENERIC_ATOMIC64
334
- static __always_inline s64 arch_atomic64_sub_if_positive (atomic64_t * v , s64 offset )
334
+ static __always_inline s64 arch_atomic64_dec_if_positive (atomic64_t * v )
335
335
{
336
336
s64 prev ;
337
337
long rc ;
338
338
339
339
__asm__ __volatile__ (
340
340
"0: lr.d %[p], %[c]\n"
341
- " sub %[rc], %[p], %[o] \n"
341
+ " addi %[rc], %[p], -1 \n"
342
342
" bltz %[rc], 1f\n"
343
343
" sc.d.rl %[rc], %[rc], %[c]\n"
344
344
" bnez %[rc], 0b\n"
345
345
" fence rw, rw\n"
346
346
"1:\n"
347
347
: [p ]"=&r" (prev ), [rc ]"=&r" (rc ), [c ]"+A" (v -> counter )
348
- : [ o ] "r" ( offset )
348
+ :
349
349
: "memory" );
350
- return prev - offset ;
350
+ return prev - 1 ;
351
351
}
352
352
353
- #define arch_atomic64_dec_if_positive ( v ) arch_atomic64_sub_if_positive(v, 1)
353
+ #define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive
354
354
#endif
355
355
356
356
#endif /* _ASM_RISCV_ATOMIC_H */
0 commit comments