Skip to content

Commit 33eb8ab

Browse files
ubizjakIngo Molnar
authored andcommitted
locking/atomic/x86: Merge __arch{,_try}_cmpxchg64_emu_local() with __arch{,_try}_cmpxchg64_emu()
Macros __arch{,_try}_cmpxchg64_emu() are almost identical to their local variants __arch{,_try}_cmpxchg64_emu_local(), differing only by lock prefixes. Merge these two macros by introducing additional macro parameters to pass lock location and lock prefix from their respective static inline functions. No functional change intended. Signed-off-by: Uros Bizjak <[email protected]> Signed-off-by: Ingo Molnar <[email protected]> Cc: Linus Torvalds <[email protected]> Cc: "H. Peter Anvin" <[email protected]> Link: https://lore.kernel.org/r/[email protected]
1 parent d26e46f commit 33eb8ab

File tree

1 file changed

+10
-46
lines changed

1 file changed

+10
-46
lines changed

arch/x86/include/asm/cmpxchg_32.h

Lines changed: 10 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -86,14 +86,14 @@ static __always_inline bool __try_cmpxchg64_local(volatile u64 *ptr, u64 *oldp,
8686
* to simulate the cmpxchg8b on the 80386 and 80486 CPU.
8787
*/
8888

89-
#define __arch_cmpxchg64_emu(_ptr, _old, _new) \
89+
#define __arch_cmpxchg64_emu(_ptr, _old, _new, _lock_loc, _lock) \
9090
({ \
9191
union __u64_halves o = { .full = (_old), }, \
9292
n = { .full = (_new), }; \
9393
\
94-
asm volatile(ALTERNATIVE(LOCK_PREFIX_HERE \
94+
asm volatile(ALTERNATIVE(_lock_loc \
9595
"call cmpxchg8b_emu", \
96-
"lock; cmpxchg8b %[ptr]", X86_FEATURE_CX8) \
96+
_lock "cmpxchg8b %[ptr]", X86_FEATURE_CX8) \
9797
: [ptr] "+m" (*(_ptr)), \
9898
"+a" (o.low), "+d" (o.high) \
9999
: "b" (n.low), "c" (n.high), "S" (_ptr) \
@@ -104,40 +104,25 @@ static __always_inline bool __try_cmpxchg64_local(volatile u64 *ptr, u64 *oldp,
104104

105105
static __always_inline u64 arch_cmpxchg64(volatile u64 *ptr, u64 old, u64 new)
106106
{
107-
return __arch_cmpxchg64_emu(ptr, old, new);
107+
return __arch_cmpxchg64_emu(ptr, old, new, LOCK_PREFIX_HERE, "lock; ");
108108
}
109109
#define arch_cmpxchg64 arch_cmpxchg64
110110

111-
#define __arch_cmpxchg64_emu_local(_ptr, _old, _new) \
112-
({ \
113-
union __u64_halves o = { .full = (_old), }, \
114-
n = { .full = (_new), }; \
115-
\
116-
asm volatile(ALTERNATIVE("call cmpxchg8b_emu", \
117-
"cmpxchg8b %[ptr]", X86_FEATURE_CX8) \
118-
: [ptr] "+m" (*(_ptr)), \
119-
"+a" (o.low), "+d" (o.high) \
120-
: "b" (n.low), "c" (n.high), "S" (_ptr) \
121-
: "memory"); \
122-
\
123-
o.full; \
124-
})
125-
126111
static __always_inline u64 arch_cmpxchg64_local(volatile u64 *ptr, u64 old, u64 new)
127112
{
128-
return __arch_cmpxchg64_emu_local(ptr, old, new);
113+
return __arch_cmpxchg64_emu(ptr, old, new, ,);
129114
}
130115
#define arch_cmpxchg64_local arch_cmpxchg64_local
131116

132-
#define __arch_try_cmpxchg64_emu(_ptr, _oldp, _new) \
117+
#define __arch_try_cmpxchg64_emu(_ptr, _oldp, _new, _lock_loc, _lock) \
133118
({ \
134119
union __u64_halves o = { .full = *(_oldp), }, \
135120
n = { .full = (_new), }; \
136121
bool ret; \
137122
\
138-
asm volatile(ALTERNATIVE(LOCK_PREFIX_HERE \
123+
asm volatile(ALTERNATIVE(_lock_loc \
139124
"call cmpxchg8b_emu", \
140-
"lock; cmpxchg8b %[ptr]", X86_FEATURE_CX8) \
125+
_lock "cmpxchg8b %[ptr]", X86_FEATURE_CX8) \
141126
CC_SET(e) \
142127
: CC_OUT(e) (ret), \
143128
[ptr] "+m" (*(_ptr)), \
@@ -153,34 +138,13 @@ static __always_inline u64 arch_cmpxchg64_local(volatile u64 *ptr, u64 old, u64
153138

154139
static __always_inline bool arch_try_cmpxchg64(volatile u64 *ptr, u64 *oldp, u64 new)
155140
{
156-
return __arch_try_cmpxchg64_emu(ptr, oldp, new);
141+
return __arch_try_cmpxchg64_emu(ptr, oldp, new, LOCK_PREFIX_HERE, "lock; ");
157142
}
158143
#define arch_try_cmpxchg64 arch_try_cmpxchg64
159144

160-
#define __arch_try_cmpxchg64_emu_local(_ptr, _oldp, _new) \
161-
({ \
162-
union __u64_halves o = { .full = *(_oldp), }, \
163-
n = { .full = (_new), }; \
164-
bool ret; \
165-
\
166-
asm volatile(ALTERNATIVE("call cmpxchg8b_emu", \
167-
"cmpxchg8b %[ptr]", X86_FEATURE_CX8) \
168-
CC_SET(e) \
169-
: CC_OUT(e) (ret), \
170-
[ptr] "+m" (*(_ptr)), \
171-
"+a" (o.low), "+d" (o.high) \
172-
: "b" (n.low), "c" (n.high), "S" (_ptr) \
173-
: "memory"); \
174-
\
175-
if (unlikely(!ret)) \
176-
*(_oldp) = o.full; \
177-
\
178-
likely(ret); \
179-
})
180-
181145
static __always_inline bool arch_try_cmpxchg64_local(volatile u64 *ptr, u64 *oldp, u64 new)
182146
{
183-
return __arch_try_cmpxchg64_emu_local(ptr, oldp, new);
147+
return __arch_try_cmpxchg64_emu(ptr, oldp, new, ,);
184148
}
185149
#define arch_try_cmpxchg64_local arch_try_cmpxchg64_local
186150

0 commit comments

Comments
 (0)