Skip to content

Commit aef95da

Browse files
ubizjakIngo Molnar
authored andcommitted
locking/atomic/x86: Introduce arch_try_cmpxchg64() for !CONFIG_X86_CMPXCHG64
Commit: 6d12c8d ("percpu: Wire up cmpxchg128") improved emulated cmpxchg8b_emu() library function to return success/failure in a ZF flag. Define arch_try_cmpxchg64() for !CONFIG_X86_CMPXCHG64 targets to override the generic archy_try_cmpxchg() with an optimized target specific implementation that handles ZF flag. The assembly code at the call sites improves from: bf56d: e8 fc ff ff ff call cmpxchg8b_emu bf572: 8b 74 24 28 mov 0x28(%esp),%esi bf576: 89 c3 mov %eax,%ebx bf578: 89 d1 mov %edx,%ecx bf57a: 8b 7c 24 2c mov 0x2c(%esp),%edi bf57e: 89 f0 mov %esi,%eax bf580: 89 fa mov %edi,%edx bf582: 31 d8 xor %ebx,%eax bf584: 31 ca xor %ecx,%edx bf586: 09 d0 or %edx,%eax bf588: 0f 84 e3 01 00 00 je bf771 <...> to: bf572: e8 fc ff ff ff call cmpxchg8b_emu bf577: 0f 84 b6 01 00 00 je bf733 <...> Signed-off-by: Uros Bizjak <[email protected]> Signed-off-by: Ingo Molnar <[email protected]> Cc: Linus Torvalds <[email protected]> Cc: "H. Peter Anvin" <[email protected]> Link: https://lore.kernel.org/r/[email protected]
1 parent 7016cc5 commit aef95da

File tree

1 file changed

+28
-0
lines changed

1 file changed

+28
-0
lines changed

arch/x86/include/asm/cmpxchg_32.h

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -122,6 +122,34 @@ static __always_inline u64 arch_cmpxchg64_local(volatile u64 *ptr, u64 old, u64
122122
}
123123
#define arch_cmpxchg64_local arch_cmpxchg64_local
124124

125+
#define __arch_try_cmpxchg64_emu(_ptr, _oldp, _new) \
126+
({ \
127+
union __u64_halves o = { .full = *(_oldp), }, \
128+
n = { .full = (_new), }; \
129+
bool ret; \
130+
\
131+
asm volatile(ALTERNATIVE(LOCK_PREFIX_HERE \
132+
"call cmpxchg8b_emu", \
133+
"lock; cmpxchg8b %[ptr]", X86_FEATURE_CX8) \
134+
CC_SET(e) \
135+
: CC_OUT(e) (ret), \
136+
[ptr] "+m" (*(_ptr)), \
137+
"+a" (o.low), "+d" (o.high) \
138+
: "b" (n.low), "c" (n.high), "S" (_ptr) \
139+
: "memory"); \
140+
\
141+
if (unlikely(!ret)) \
142+
*(_oldp) = o.full; \
143+
\
144+
likely(ret); \
145+
})
146+
147+
static __always_inline bool arch_try_cmpxchg64(volatile u64 *ptr, u64 *oldp, u64 new)
148+
{
149+
return __arch_try_cmpxchg64_emu(ptr, oldp, new);
150+
}
151+
#define arch_try_cmpxchg64 arch_try_cmpxchg64
152+
125153
#endif
126154

127155
#define system_has_cmpxchg64() boot_cpu_has(X86_FEATURE_CX8)

0 commit comments

Comments
 (0)