Skip to content

Commit 2e57d1d

Browse files
committed
Merge tag 'cmpxchg.2024.05.11a' of git://git.kernel.org/pub/scm/linux/kernel/git/paulmck/linux-rcu
Pull cmpxchg updates from Paul McKenney: "Provide one-byte and two-byte cmpxchg() support on sparc32, parisc, and csky This provides native one-byte and two-byte cmpxchg() support for sparc32 and parisc, courtesy of Al Viro. This support is provided by the same hashed-array-of-locks technique used for the other atomic operations provided for these two platforms. There is also emulated one-byte cmpxchg() support for csky using a new cmpxchg_emu_u8() function that uses a four-byte cmpxchg() to emulate the one-byte variant. Similar patches for emulation of one-byte cmpxchg() for arc, sh, and xtensa have not yet received maintainer acks, so they are slated for the v6.11 merge window" * tag 'cmpxchg.2024.05.11a' of git://git.kernel.org/pub/scm/linux/kernel/git/paulmck/linux-rcu: csky: Emulate one-byte cmpxchg lib: Add one-byte emulation function parisc: add u16 support to cmpxchg() parisc: add missing export of __cmpxchg_u8() parisc: unify implementations of __cmpxchg_u{8,32,64} parisc: __cmpxchg_u32(): lift conversion into the callers sparc32: add __cmpxchg_u{8,16}() and teach __cmpxchg() to handle those sizes sparc32: unify __cmpxchg_u{32,64} sparc32: make the first argument of __cmpxchg_u64() volatile u64 * sparc32: make __cmpxchg_u32() return u32
2 parents c0b9620 + 5800e77 commit 2e57d1d

File tree

11 files changed

+133
-83
lines changed

11 files changed

+133
-83
lines changed

arch/Kconfig

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1617,4 +1617,7 @@ config CC_HAS_SANE_FUNCTION_ALIGNMENT
16171617
# strict alignment always, even with -falign-functions.
16181618
def_bool CC_HAS_MIN_FUNCTION_ALIGNMENT || CC_IS_CLANG
16191619

1620+
config ARCH_NEED_CMPXCHG_1_EMU
1621+
bool
1622+
16201623
endmenu

arch/csky/Kconfig

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,7 @@ config CSKY
3737
select ARCH_INLINE_SPIN_UNLOCK_BH if !PREEMPTION
3838
select ARCH_INLINE_SPIN_UNLOCK_IRQ if !PREEMPTION
3939
select ARCH_INLINE_SPIN_UNLOCK_IRQRESTORE if !PREEMPTION
40+
select ARCH_NEED_CMPXCHG_1_EMU
4041
select ARCH_WANT_FRAME_POINTERS if !CPU_CK610 && $(cc-option,-mbacktrace)
4142
select ARCH_WANT_DEFAULT_TOPDOWN_MMAP_LAYOUT
4243
select COMMON_CLK

arch/csky/include/asm/cmpxchg.h

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
#ifdef CONFIG_SMP
77
#include <linux/bug.h>
88
#include <asm/barrier.h>
9+
#include <linux/cmpxchg-emu.h>
910

1011
#define __xchg_relaxed(new, ptr, size) \
1112
({ \
@@ -61,6 +62,9 @@
6162
__typeof__(old) __old = (old); \
6263
__typeof__(*(ptr)) __ret; \
6364
switch (size) { \
65+
case 1: \
66+
__ret = (__typeof__(*(ptr)))cmpxchg_emu_u8((volatile u8 *)__ptr, (uintptr_t)__old, (uintptr_t)__new); \
67+
break; \
6468
case 4: \
6569
asm volatile ( \
6670
"1: ldex.w %0, (%3) \n" \
@@ -91,6 +95,9 @@
9195
__typeof__(old) __old = (old); \
9296
__typeof__(*(ptr)) __ret; \
9397
switch (size) { \
98+
case 1: \
99+
__ret = (__typeof__(*(ptr)))cmpxchg_emu_u8((volatile u8 *)__ptr, (uintptr_t)__old, (uintptr_t)__new); \
100+
break; \
94101
case 4: \
95102
asm volatile ( \
96103
"1: ldex.w %0, (%3) \n" \
@@ -122,6 +129,9 @@
122129
__typeof__(old) __old = (old); \
123130
__typeof__(*(ptr)) __ret; \
124131
switch (size) { \
132+
case 1: \
133+
__ret = (__typeof__(*(ptr)))cmpxchg_emu_u8((volatile u8 *)__ptr, (uintptr_t)__old, (uintptr_t)__new); \
134+
break; \
125135
case 4: \
126136
asm volatile ( \
127137
RELEASE_FENCE \

arch/parisc/include/asm/cmpxchg.h

Lines changed: 10 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -56,26 +56,24 @@ __arch_xchg(unsigned long x, volatile void *ptr, int size)
5656
/* bug catcher for when unsupported size is used - won't link */
5757
extern void __cmpxchg_called_with_bad_pointer(void);
5858

59-
/* __cmpxchg_u32/u64 defined in arch/parisc/lib/bitops.c */
60-
extern unsigned long __cmpxchg_u32(volatile unsigned int *m, unsigned int old,
61-
unsigned int new_);
62-
extern u64 __cmpxchg_u64(volatile u64 *ptr, u64 old, u64 new_);
59+
/* __cmpxchg_u... defined in arch/parisc/lib/bitops.c */
6360
extern u8 __cmpxchg_u8(volatile u8 *ptr, u8 old, u8 new_);
61+
extern u16 __cmpxchg_u16(volatile u16 *ptr, u16 old, u16 new_);
62+
extern u32 __cmpxchg_u32(volatile u32 *m, u32 old, u32 new_);
63+
extern u64 __cmpxchg_u64(volatile u64 *ptr, u64 old, u64 new_);
6464

6565
/* don't worry...optimizer will get rid of most of this */
6666
static inline unsigned long
6767
__cmpxchg(volatile void *ptr, unsigned long old, unsigned long new_, int size)
6868
{
69-
switch (size) {
69+
return
7070
#ifdef CONFIG_64BIT
71-
case 8: return __cmpxchg_u64((u64 *)ptr, old, new_);
71+
size == 8 ? __cmpxchg_u64(ptr, old, new_) :
7272
#endif
73-
case 4: return __cmpxchg_u32((unsigned int *)ptr,
74-
(unsigned int)old, (unsigned int)new_);
75-
case 1: return __cmpxchg_u8((u8 *)ptr, old & 0xff, new_ & 0xff);
76-
}
77-
__cmpxchg_called_with_bad_pointer();
78-
return old;
73+
size == 4 ? __cmpxchg_u32(ptr, old, new_) :
74+
size == 2 ? __cmpxchg_u16(ptr, old, new_) :
75+
size == 1 ? __cmpxchg_u8(ptr, old, new_) :
76+
(__cmpxchg_called_with_bad_pointer(), old);
7977
}
8078

8179
#define arch_cmpxchg(ptr, o, n) \

arch/parisc/kernel/parisc_ksyms.c

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,8 @@ EXPORT_SYMBOL(memset);
2222
#include <linux/atomic.h>
2323
EXPORT_SYMBOL(__xchg8);
2424
EXPORT_SYMBOL(__xchg32);
25+
EXPORT_SYMBOL(__cmpxchg_u8);
26+
EXPORT_SYMBOL(__cmpxchg_u16);
2527
EXPORT_SYMBOL(__cmpxchg_u32);
2628
EXPORT_SYMBOL(__cmpxchg_u64);
2729
#ifdef CONFIG_SMP

arch/parisc/lib/bitops.c

Lines changed: 17 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -56,38 +56,20 @@ unsigned long notrace __xchg8(char x, volatile char *ptr)
5656
}
5757

5858

59-
u64 notrace __cmpxchg_u64(volatile u64 *ptr, u64 old, u64 new)
60-
{
61-
unsigned long flags;
62-
u64 prev;
63-
64-
_atomic_spin_lock_irqsave(ptr, flags);
65-
if ((prev = *ptr) == old)
66-
*ptr = new;
67-
_atomic_spin_unlock_irqrestore(ptr, flags);
68-
return prev;
69-
}
70-
71-
unsigned long notrace __cmpxchg_u32(volatile unsigned int *ptr, unsigned int old, unsigned int new)
72-
{
73-
unsigned long flags;
74-
unsigned int prev;
75-
76-
_atomic_spin_lock_irqsave(ptr, flags);
77-
if ((prev = *ptr) == old)
78-
*ptr = new;
79-
_atomic_spin_unlock_irqrestore(ptr, flags);
80-
return (unsigned long)prev;
81-
}
82-
83-
u8 notrace __cmpxchg_u8(volatile u8 *ptr, u8 old, u8 new)
84-
{
85-
unsigned long flags;
86-
u8 prev;
87-
88-
_atomic_spin_lock_irqsave(ptr, flags);
89-
if ((prev = *ptr) == old)
90-
*ptr = new;
91-
_atomic_spin_unlock_irqrestore(ptr, flags);
92-
return prev;
93-
}
59+
#define CMPXCHG(T) \
60+
T notrace __cmpxchg_##T(volatile T *ptr, T old, T new) \
61+
{ \
62+
unsigned long flags; \
63+
T prev; \
64+
\
65+
_atomic_spin_lock_irqsave(ptr, flags); \
66+
if ((prev = *ptr) == old) \
67+
*ptr = new; \
68+
_atomic_spin_unlock_irqrestore(ptr, flags); \
69+
return prev; \
70+
}
71+
72+
CMPXCHG(u64)
73+
CMPXCHG(u32)
74+
CMPXCHG(u16)
75+
CMPXCHG(u8)

arch/sparc/include/asm/cmpxchg_32.h

Lines changed: 9 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -38,21 +38,19 @@ static __always_inline unsigned long __arch_xchg(unsigned long x, __volatile__ v
3838

3939
/* bug catcher for when unsupported size is used - won't link */
4040
void __cmpxchg_called_with_bad_pointer(void);
41-
/* we only need to support cmpxchg of a u32 on sparc */
42-
unsigned long __cmpxchg_u32(volatile u32 *m, u32 old, u32 new_);
41+
u8 __cmpxchg_u8(volatile u8 *m, u8 old, u8 new_);
42+
u16 __cmpxchg_u16(volatile u16 *m, u16 old, u16 new_);
43+
u32 __cmpxchg_u32(volatile u32 *m, u32 old, u32 new_);
4344

4445
/* don't worry...optimizer will get rid of most of this */
4546
static inline unsigned long
4647
__cmpxchg(volatile void *ptr, unsigned long old, unsigned long new_, int size)
4748
{
48-
switch (size) {
49-
case 4:
50-
return __cmpxchg_u32((u32 *)ptr, (u32)old, (u32)new_);
51-
default:
52-
__cmpxchg_called_with_bad_pointer();
53-
break;
54-
}
55-
return old;
49+
return
50+
size == 1 ? __cmpxchg_u8(ptr, old, new_) :
51+
size == 2 ? __cmpxchg_u16(ptr, old, new_) :
52+
size == 4 ? __cmpxchg_u32(ptr, old, new_) :
53+
(__cmpxchg_called_with_bad_pointer(), old);
5654
}
5755

5856
#define arch_cmpxchg(ptr, o, n) \
@@ -63,7 +61,7 @@ __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new_, int size)
6361
(unsigned long)_n_, sizeof(*(ptr))); \
6462
})
6563

66-
u64 __cmpxchg_u64(u64 *ptr, u64 old, u64 new);
64+
u64 __cmpxchg_u64(volatile u64 *ptr, u64 old, u64 new);
6765
#define arch_cmpxchg64(ptr, old, new) __cmpxchg_u64(ptr, old, new)
6866

6967
#include <asm-generic/cmpxchg-local.h>

arch/sparc/lib/atomic32.c

Lines changed: 20 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -159,32 +159,27 @@ unsigned long sp32___change_bit(unsigned long *addr, unsigned long mask)
159159
}
160160
EXPORT_SYMBOL(sp32___change_bit);
161161

162-
unsigned long __cmpxchg_u32(volatile u32 *ptr, u32 old, u32 new)
163-
{
164-
unsigned long flags;
165-
u32 prev;
166-
167-
spin_lock_irqsave(ATOMIC_HASH(ptr), flags);
168-
if ((prev = *ptr) == old)
169-
*ptr = new;
170-
spin_unlock_irqrestore(ATOMIC_HASH(ptr), flags);
171-
172-
return (unsigned long)prev;
173-
}
162+
#define CMPXCHG(T) \
163+
T __cmpxchg_##T(volatile T *ptr, T old, T new) \
164+
{ \
165+
unsigned long flags; \
166+
T prev; \
167+
\
168+
spin_lock_irqsave(ATOMIC_HASH(ptr), flags); \
169+
if ((prev = *ptr) == old) \
170+
*ptr = new; \
171+
spin_unlock_irqrestore(ATOMIC_HASH(ptr), flags);\
172+
\
173+
return prev; \
174+
}
175+
176+
CMPXCHG(u8)
177+
CMPXCHG(u16)
178+
CMPXCHG(u32)
179+
CMPXCHG(u64)
180+
EXPORT_SYMBOL(__cmpxchg_u8);
181+
EXPORT_SYMBOL(__cmpxchg_u16);
174182
EXPORT_SYMBOL(__cmpxchg_u32);
175-
176-
u64 __cmpxchg_u64(u64 *ptr, u64 old, u64 new)
177-
{
178-
unsigned long flags;
179-
u64 prev;
180-
181-
spin_lock_irqsave(ATOMIC_HASH(ptr), flags);
182-
if ((prev = *ptr) == old)
183-
*ptr = new;
184-
spin_unlock_irqrestore(ATOMIC_HASH(ptr), flags);
185-
186-
return prev;
187-
}
188183
EXPORT_SYMBOL(__cmpxchg_u64);
189184

190185
unsigned long __xchg_u32(volatile u32 *ptr, u32 new)

include/linux/cmpxchg-emu.h

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
/* SPDX-License-Identifier: GPL-2.0+ */
2+
/*
3+
* Emulated 1-byte and 2-byte cmpxchg operations for architectures
4+
* lacking direct support for these sizes. These are implemented in terms
5+
* of 4-byte cmpxchg operations.
6+
*
7+
* Copyright (C) 2024 Paul E. McKenney.
8+
*/
9+
10+
#ifndef __LINUX_CMPXCHG_EMU_H
11+
#define __LINUX_CMPXCHG_EMU_H
12+
13+
uintptr_t cmpxchg_emu_u8(volatile u8 *p, uintptr_t old, uintptr_t new);
14+
15+
#endif /* __LINUX_CMPXCHG_EMU_H */

lib/Makefile

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -236,6 +236,7 @@ obj-$(CONFIG_FUNCTION_ERROR_INJECTION) += error-inject.o
236236
lib-$(CONFIG_GENERIC_BUG) += bug.o
237237

238238
obj-$(CONFIG_HAVE_ARCH_TRACEHOOK) += syscall.o
239+
obj-$(CONFIG_ARCH_NEED_CMPXCHG_1_EMU) += cmpxchg-emu.o
239240

240241
obj-$(CONFIG_DYNAMIC_DEBUG_CORE) += dynamic_debug.o
241242
#ensure exported functions have prototypes

0 commit comments

Comments
 (0)