Skip to content

Commit 4420658

Browse files
guoren83palmer-dabbelt
authored andcommitted
riscv: atomic: Add custom conditional atomic operation implementation
Add conditional atomic operations' custom implementation (similar to dec_if_positive), here is the list: - arch_atomic_inc_unless_negative - arch_atomic_dec_unless_positive - arch_atomic64_inc_unless_negative - arch_atomic64_dec_unless_positive Signed-off-by: Guo Ren <[email protected]> Signed-off-by: Guo Ren <[email protected]> Link: https://lore.kernel.org/r/[email protected] Signed-off-by: Palmer Dabbelt <[email protected]>
1 parent 1d7f693 commit 4420658

File tree

1 file changed

+82
-0
lines changed

1 file changed

+82
-0
lines changed

arch/riscv/include/asm/atomic.h

Lines changed: 82 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -310,6 +310,46 @@ ATOMIC_OPS()
310310
#undef ATOMIC_OPS
311311
#undef ATOMIC_OP
312312

313+
static __always_inline bool arch_atomic_inc_unless_negative(atomic_t *v)
314+
{
315+
int prev, rc;
316+
317+
__asm__ __volatile__ (
318+
"0: lr.w %[p], %[c]\n"
319+
" bltz %[p], 1f\n"
320+
" addi %[rc], %[p], 1\n"
321+
" sc.w.rl %[rc], %[rc], %[c]\n"
322+
" bnez %[rc], 0b\n"
323+
" fence rw, rw\n"
324+
"1:\n"
325+
: [p]"=&r" (prev), [rc]"=&r" (rc), [c]"+A" (v->counter)
326+
:
327+
: "memory");
328+
return !(prev < 0);
329+
}
330+
331+
#define arch_atomic_inc_unless_negative arch_atomic_inc_unless_negative
332+
333+
static __always_inline bool arch_atomic_dec_unless_positive(atomic_t *v)
334+
{
335+
int prev, rc;
336+
337+
__asm__ __volatile__ (
338+
"0: lr.w %[p], %[c]\n"
339+
" bgtz %[p], 1f\n"
340+
" addi %[rc], %[p], -1\n"
341+
" sc.w.rl %[rc], %[rc], %[c]\n"
342+
" bnez %[rc], 0b\n"
343+
" fence rw, rw\n"
344+
"1:\n"
345+
: [p]"=&r" (prev), [rc]"=&r" (rc), [c]"+A" (v->counter)
346+
:
347+
: "memory");
348+
return !(prev > 0);
349+
}
350+
351+
#define arch_atomic_dec_unless_positive arch_atomic_dec_unless_positive
352+
313353
static __always_inline int arch_atomic_dec_if_positive(atomic_t *v)
314354
{
315355
int prev, rc;
@@ -331,6 +371,48 @@ static __always_inline int arch_atomic_dec_if_positive(atomic_t *v)
331371
#define arch_atomic_dec_if_positive arch_atomic_dec_if_positive
332372

333373
#ifndef CONFIG_GENERIC_ATOMIC64
374+
static __always_inline bool arch_atomic64_inc_unless_negative(atomic64_t *v)
375+
{
376+
s64 prev;
377+
long rc;
378+
379+
__asm__ __volatile__ (
380+
"0: lr.d %[p], %[c]\n"
381+
" bltz %[p], 1f\n"
382+
" addi %[rc], %[p], 1\n"
383+
" sc.d.rl %[rc], %[rc], %[c]\n"
384+
" bnez %[rc], 0b\n"
385+
" fence rw, rw\n"
386+
"1:\n"
387+
: [p]"=&r" (prev), [rc]"=&r" (rc), [c]"+A" (v->counter)
388+
:
389+
: "memory");
390+
return !(prev < 0);
391+
}
392+
393+
#define arch_atomic64_inc_unless_negative arch_atomic64_inc_unless_negative
394+
395+
static __always_inline bool arch_atomic64_dec_unless_positive(atomic64_t *v)
396+
{
397+
s64 prev;
398+
long rc;
399+
400+
__asm__ __volatile__ (
401+
"0: lr.d %[p], %[c]\n"
402+
" bgtz %[p], 1f\n"
403+
" addi %[rc], %[p], -1\n"
404+
" sc.d.rl %[rc], %[rc], %[c]\n"
405+
" bnez %[rc], 0b\n"
406+
" fence rw, rw\n"
407+
"1:\n"
408+
: [p]"=&r" (prev), [rc]"=&r" (rc), [c]"+A" (v->counter)
409+
:
410+
: "memory");
411+
return !(prev > 0);
412+
}
413+
414+
#define arch_atomic64_dec_unless_positive arch_atomic64_dec_unless_positive
415+
334416
static __always_inline s64 arch_atomic64_dec_if_positive(atomic64_t *v)
335417
{
336418
s64 prev;

0 commit comments

Comments
 (0)