Skip to content

Commit 0f6563a

Browse files
committed
Merge branch 'for-next/asm' into for-next/core
* for-next/asm: arm64: uaccess: remove unnecessary earlyclobber arm64: uaccess: permit put_{user,kernel} to use zero register arm64: uaccess: permit __smp_store_release() to use zero register arm64: atomics: lse: improve cmpxchg implementation
2 parents 67eacd6 + 1724208 commit 0f6563a

File tree

3 files changed

+12
-19
lines changed

3 files changed

+12
-19
lines changed

arch/arm64/include/asm/atomic_lse.h

Lines changed: 5 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -251,22 +251,15 @@ __lse__cmpxchg_case_##name##sz(volatile void *ptr, \
251251
u##sz old, \
252252
u##sz new) \
253253
{ \
254-
register unsigned long x0 asm ("x0") = (unsigned long)ptr; \
255-
register u##sz x1 asm ("x1") = old; \
256-
register u##sz x2 asm ("x2") = new; \
257-
unsigned long tmp; \
258-
\
259254
asm volatile( \
260255
__LSE_PREAMBLE \
261-
" mov %" #w "[tmp], %" #w "[old]\n" \
262-
" cas" #mb #sfx "\t%" #w "[tmp], %" #w "[new], %[v]\n" \
263-
" mov %" #w "[ret], %" #w "[tmp]" \
264-
: [ret] "+r" (x0), [v] "+Q" (*(u##sz *)ptr), \
265-
[tmp] "=&r" (tmp) \
266-
: [old] "r" (x1), [new] "r" (x2) \
256+
" cas" #mb #sfx " %" #w "[old], %" #w "[new], %[v]\n" \
257+
: [v] "+Q" (*(u##sz *)ptr), \
258+
[old] "+r" (old) \
259+
: [new] "rZ" (new) \
267260
: cl); \
268261
\
269-
return x0; \
262+
return old; \
270263
}
271264

272265
__CMPXCHG_CASE(w, b, , 8, )

arch/arm64/include/asm/barrier.h

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -131,25 +131,25 @@ do { \
131131
case 1: \
132132
asm volatile ("stlrb %w1, %0" \
133133
: "=Q" (*__p) \
134-
: "r" (*(__u8 *)__u.__c) \
134+
: "rZ" (*(__u8 *)__u.__c) \
135135
: "memory"); \
136136
break; \
137137
case 2: \
138138
asm volatile ("stlrh %w1, %0" \
139139
: "=Q" (*__p) \
140-
: "r" (*(__u16 *)__u.__c) \
140+
: "rZ" (*(__u16 *)__u.__c) \
141141
: "memory"); \
142142
break; \
143143
case 4: \
144144
asm volatile ("stlr %w1, %0" \
145145
: "=Q" (*__p) \
146-
: "r" (*(__u32 *)__u.__c) \
146+
: "rZ" (*(__u32 *)__u.__c) \
147147
: "memory"); \
148148
break; \
149149
case 8: \
150-
asm volatile ("stlr %1, %0" \
150+
asm volatile ("stlr %x1, %0" \
151151
: "=Q" (*__p) \
152-
: "r" (*(__u64 *)__u.__c) \
152+
: "rZ" (*(__u64 *)__u.__c) \
153153
: "memory"); \
154154
break; \
155155
} \

arch/arm64/include/asm/uaccess.h

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -237,7 +237,7 @@ static inline void __user *__uaccess_mask_ptr(const void __user *ptr)
237237
"1: " load " " reg "1, [%2]\n" \
238238
"2:\n" \
239239
_ASM_EXTABLE_##type##ACCESS_ERR_ZERO(1b, 2b, %w0, %w1) \
240-
: "+r" (err), "=&r" (x) \
240+
: "+r" (err), "=r" (x) \
241241
: "r" (addr))
242242

243243
#define __raw_get_mem(ldr, x, ptr, err, type) \
@@ -327,7 +327,7 @@ do { \
327327
"2:\n" \
328328
_ASM_EXTABLE_##type##ACCESS_ERR(1b, 2b, %w0) \
329329
: "+r" (err) \
330-
: "r" (x), "r" (addr))
330+
: "rZ" (x), "r" (addr))
331331

332332
#define __raw_put_mem(str, x, ptr, err, type) \
333333
do { \

0 commit comments

Comments
 (0)