@@ -64,6 +64,11 @@ static __always_inline bool __try_cmpxchg64(volatile u64 *ptr, u64 *oldp, u64 ne
64
64
return __arch_try_cmpxchg64 (ptr , oldp , new , LOCK_PREFIX );
65
65
}
66
66
67
+ static __always_inline bool __try_cmpxchg64_local (volatile u64 * ptr , u64 * oldp , u64 new )
68
+ {
69
+ return __arch_try_cmpxchg64 (ptr , oldp , new , );
70
+ }
71
+
67
72
#ifdef CONFIG_X86_CMPXCHG64
68
73
69
74
#define arch_cmpxchg64 __cmpxchg64
@@ -72,6 +77,8 @@ static __always_inline bool __try_cmpxchg64(volatile u64 *ptr, u64 *oldp, u64 ne
72
77
73
78
#define arch_try_cmpxchg64 __try_cmpxchg64
74
79
80
+ #define arch_try_cmpxchg64_local __try_cmpxchg64_local
81
+
75
82
#else
76
83
77
84
/*
@@ -150,6 +157,33 @@ static __always_inline bool arch_try_cmpxchg64(volatile u64 *ptr, u64 *oldp, u64
150
157
}
151
158
#define arch_try_cmpxchg64 arch_try_cmpxchg64
152
159
160
+ #define __arch_try_cmpxchg64_emu_local (_ptr , _oldp , _new ) \
161
+ ({ \
162
+ union __u64_halves o = { .full = *(_oldp), }, \
163
+ n = { .full = (_new), }; \
164
+ bool ret; \
165
+ \
166
+ asm volatile(ALTERNATIVE("call cmpxchg8b_emu", \
167
+ "cmpxchg8b %[ptr]", X86_FEATURE_CX8) \
168
+ CC_SET(e) \
169
+ : CC_OUT(e) (ret), \
170
+ [ptr] "+m" (*(_ptr)), \
171
+ "+a" (o.low), "+d" (o.high) \
172
+ : "b" (n.low), "c" (n.high), "S" (_ptr) \
173
+ : "memory"); \
174
+ \
175
+ if (unlikely(!ret)) \
176
+ *(_oldp) = o.full; \
177
+ \
178
+ likely(ret); \
179
+ })
180
+
181
+ static __always_inline bool arch_try_cmpxchg64_local (volatile u64 * ptr , u64 * oldp , u64 new )
182
+ {
183
+ return __arch_try_cmpxchg64_emu_local (ptr , oldp , new );
184
+ }
185
+ #define arch_try_cmpxchg64_local arch_try_cmpxchg64_local
186
+
153
187
#endif
154
188
155
189
#define system_has_cmpxchg64 () boot_cpu_has(X86_FEATURE_CX8)
0 commit comments