Skip to content

Commit d791a4d

Browse files
committed
Merge tag 'x86-asm-2024-05-13' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip
Pull x86 asm updates from Ingo Molnar: - Clean up & fix asm() operand modifiers & constraints - Misc cleanups * tag 'x86-asm-2024-05-13' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip: x86/alternatives: Remove a superfluous newline in _static_cpu_has() x86/asm/64: Clean up memset16(), memset32(), memset64() assembly constraints in <asm/string_64.h> x86/asm: Use "m" operand constraint in WRUSSQ asm template x86/asm: Use %a instead of %P operand modifier in asm templates x86/asm: Use %c/%n instead of %P operand modifier in asm templates x86/asm: Remove %P operand modifier from altinstr asm templates
2 parents 019040f + a0c8cf9 commit d791a4d

File tree

10 files changed

+51
-49
lines changed

10 files changed

+51
-49
lines changed

arch/x86/boot/main.c

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -119,8 +119,8 @@ static void init_heap(void)
119119
char *stack_end;
120120

121121
if (boot_params.hdr.loadflags & CAN_USE_HEAP) {
122-
asm("leal %P1(%%esp),%0"
123-
: "=r" (stack_end) : "i" (-STACK_SIZE));
122+
asm("leal %n1(%%esp),%0"
123+
: "=r" (stack_end) : "i" (STACK_SIZE));
124124

125125
heap_end = (char *)
126126
((size_t)boot_params.hdr.heap_end_ptr + 0x200);

arch/x86/include/asm/alternative.h

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -294,10 +294,10 @@ static inline int alternatives_text_reserved(void *start, void *end)
294294
* Otherwise, if CPU has feature1, newinstr1 is used.
295295
* Otherwise, oldinstr is used.
296296
*/
297-
#define alternative_input_2(oldinstr, newinstr1, ft_flags1, newinstr2, \
298-
ft_flags2, input...) \
299-
asm_inline volatile(ALTERNATIVE_2(oldinstr, newinstr1, ft_flags1, \
300-
newinstr2, ft_flags2) \
297+
#define alternative_input_2(oldinstr, newinstr1, ft_flags1, newinstr2, \
298+
ft_flags2, input...) \
299+
asm_inline volatile(ALTERNATIVE_2(oldinstr, newinstr1, ft_flags1, \
300+
newinstr2, ft_flags2) \
301301
: : "i" (0), ## input)
302302

303303
/* Like alternative_input, but with a single output argument */
@@ -307,7 +307,7 @@ static inline int alternatives_text_reserved(void *start, void *end)
307307

308308
/* Like alternative_io, but for replacing a direct call with another one. */
309309
#define alternative_call(oldfunc, newfunc, ft_flags, output, input...) \
310-
asm_inline volatile (ALTERNATIVE("call %P[old]", "call %P[new]", ft_flags) \
310+
asm_inline volatile (ALTERNATIVE("call %c[old]", "call %c[new]", ft_flags) \
311311
: output : [old] "i" (oldfunc), [new] "i" (newfunc), ## input)
312312

313313
/*
@@ -316,12 +316,12 @@ static inline int alternatives_text_reserved(void *start, void *end)
316316
* Otherwise, if CPU has feature1, function1 is used.
317317
* Otherwise, old function is used.
318318
*/
319-
#define alternative_call_2(oldfunc, newfunc1, ft_flags1, newfunc2, ft_flags2, \
320-
output, input...) \
321-
asm_inline volatile (ALTERNATIVE_2("call %P[old]", "call %P[new1]", ft_flags1,\
322-
"call %P[new2]", ft_flags2) \
323-
: output, ASM_CALL_CONSTRAINT \
324-
: [old] "i" (oldfunc), [new1] "i" (newfunc1), \
319+
#define alternative_call_2(oldfunc, newfunc1, ft_flags1, newfunc2, ft_flags2, \
320+
output, input...) \
321+
asm_inline volatile (ALTERNATIVE_2("call %c[old]", "call %c[new1]", ft_flags1, \
322+
"call %c[new2]", ft_flags2) \
323+
: output, ASM_CALL_CONSTRAINT \
324+
: [old] "i" (oldfunc), [new1] "i" (newfunc1), \
325325
[new2] "i" (newfunc2), ## input)
326326

327327
/*

arch/x86/include/asm/apic.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -92,7 +92,7 @@ static inline void native_apic_mem_write(u32 reg, u32 v)
9292
{
9393
volatile u32 *addr = (volatile u32 *)(APIC_BASE + reg);
9494

95-
alternative_io("movl %0, %P1", "xchgl %0, %P1", X86_BUG_11AP,
95+
alternative_io("movl %0, %1", "xchgl %0, %1", X86_BUG_11AP,
9696
ASM_OUTPUT2("=r" (v), "=m" (*addr)),
9797
ASM_OUTPUT2("0" (v), "m" (*addr)));
9898
}

arch/x86/include/asm/atomic64_32.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ static __always_inline s64 arch_atomic64_read_nonatomic(const atomic64_t *v)
5050

5151
#ifdef CONFIG_X86_CMPXCHG64
5252
#define __alternative_atomic64(f, g, out, in...) \
53-
asm volatile("call %P[func]" \
53+
asm volatile("call %c[func]" \
5454
: out : [func] "i" (atomic64_##g##_cx8), ## in)
5555

5656
#define ATOMIC64_DECL(sym) ATOMIC64_DECL_ONE(sym##_cx8)

arch/x86/include/asm/cpufeature.h

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -172,11 +172,10 @@ extern void clear_cpu_cap(struct cpuinfo_x86 *c, unsigned int bit);
172172
*/
173173
static __always_inline bool _static_cpu_has(u16 bit)
174174
{
175-
asm goto(
176-
ALTERNATIVE_TERNARY("jmp 6f", %P[feature], "", "jmp %l[t_no]")
175+
asm goto(ALTERNATIVE_TERNARY("jmp 6f", %c[feature], "", "jmp %l[t_no]")
177176
".pushsection .altinstr_aux,\"ax\"\n"
178177
"6:\n"
179-
" testb %[bitnum]," _ASM_RIP(%P[cap_byte]) "\n"
178+
" testb %[bitnum], %a[cap_byte]\n"
180179
" jnz %l[t_yes]\n"
181180
" jmp %l[t_no]\n"
182181
".popsection\n"

arch/x86/include/asm/irq_stack.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -100,7 +100,7 @@
100100
}
101101

102102
#define ASM_CALL_ARG0 \
103-
"call %P[__func] \n" \
103+
"call %c[__func] \n" \
104104
ASM_REACHABLE
105105

106106
#define ASM_CALL_ARG1 \

arch/x86/include/asm/processor.h

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -586,7 +586,7 @@ extern char ignore_fpu_irq;
586586
# define BASE_PREFETCH ""
587587
# define ARCH_HAS_PREFETCH
588588
#else
589-
# define BASE_PREFETCH "prefetcht0 %P1"
589+
# define BASE_PREFETCH "prefetcht0 %1"
590590
#endif
591591

592592
/*
@@ -597,7 +597,7 @@ extern char ignore_fpu_irq;
597597
*/
598598
static inline void prefetch(const void *x)
599599
{
600-
alternative_input(BASE_PREFETCH, "prefetchnta %P1",
600+
alternative_input(BASE_PREFETCH, "prefetchnta %1",
601601
X86_FEATURE_XMM,
602602
"m" (*(const char *)x));
603603
}
@@ -609,7 +609,7 @@ static inline void prefetch(const void *x)
609609
*/
610610
static __always_inline void prefetchw(const void *x)
611611
{
612-
alternative_input(BASE_PREFETCH, "prefetchw %P1",
612+
alternative_input(BASE_PREFETCH, "prefetchw %1",
613613
X86_FEATURE_3DNOWPREFETCH,
614614
"m" (*(const char *)x));
615615
}

arch/x86/include/asm/special_insns.h

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -182,8 +182,8 @@ static __always_inline void clflush(volatile void *__p)
182182

183183
static inline void clflushopt(volatile void *__p)
184184
{
185-
alternative_io(".byte 0x3e; clflush %P0",
186-
".byte 0x66; clflush %P0",
185+
alternative_io(".byte 0x3e; clflush %0",
186+
".byte 0x66; clflush %0",
187187
X86_FEATURE_CLFLUSHOPT,
188188
"+m" (*(volatile char __force *)__p));
189189
}
@@ -205,9 +205,9 @@ static inline void clwb(volatile void *__p)
205205
#ifdef CONFIG_X86_USER_SHADOW_STACK
206206
static inline int write_user_shstk_64(u64 __user *addr, u64 val)
207207
{
208-
asm goto("1: wrussq %[val], (%[addr])\n"
208+
asm goto("1: wrussq %[val], %[addr]\n"
209209
_ASM_EXTABLE(1b, %l[fail])
210-
:: [addr] "r" (addr), [val] "r" (val)
210+
:: [addr] "m" (*addr), [val] "r" (val)
211211
:: fail);
212212
return 0;
213213
fail:

arch/x86/include/asm/string_64.h

Lines changed: 24 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -30,37 +30,40 @@ void *__memset(void *s, int c, size_t n);
3030
#define __HAVE_ARCH_MEMSET16
3131
static inline void *memset16(uint16_t *s, uint16_t v, size_t n)
3232
{
33-
long d0, d1;
34-
asm volatile("rep\n\t"
35-
"stosw"
36-
: "=&c" (d0), "=&D" (d1)
37-
: "a" (v), "1" (s), "0" (n)
38-
: "memory");
39-
return s;
33+
const __auto_type s0 = s;
34+
asm volatile (
35+
"rep stosw"
36+
: "+D" (s), "+c" (n)
37+
: "a" (v)
38+
: "memory"
39+
);
40+
return s0;
4041
}
4142

4243
#define __HAVE_ARCH_MEMSET32
4344
static inline void *memset32(uint32_t *s, uint32_t v, size_t n)
4445
{
45-
long d0, d1;
46-
asm volatile("rep\n\t"
47-
"stosl"
48-
: "=&c" (d0), "=&D" (d1)
49-
: "a" (v), "1" (s), "0" (n)
50-
: "memory");
51-
return s;
46+
const __auto_type s0 = s;
47+
asm volatile (
48+
"rep stosl"
49+
: "+D" (s), "+c" (n)
50+
: "a" (v)
51+
: "memory"
52+
);
53+
return s0;
5254
}
5355

5456
#define __HAVE_ARCH_MEMSET64
5557
static inline void *memset64(uint64_t *s, uint64_t v, size_t n)
5658
{
57-
long d0, d1;
58-
asm volatile("rep\n\t"
59-
"stosq"
60-
: "=&c" (d0), "=&D" (d1)
61-
: "a" (v), "1" (s), "0" (n)
62-
: "memory");
63-
return s;
59+
const __auto_type s0 = s;
60+
asm volatile (
61+
"rep stosq"
62+
: "+D" (s), "+c" (n)
63+
: "a" (v)
64+
: "memory"
65+
);
66+
return s0;
6467
}
6568
#endif
6669

arch/x86/include/asm/uaccess.h

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,7 @@ extern int __get_user_bad(void);
7878
int __ret_gu; \
7979
register __inttype(*(ptr)) __val_gu asm("%"_ASM_DX); \
8080
__chk_user_ptr(ptr); \
81-
asm volatile("call __" #fn "_%P4" \
81+
asm volatile("call __" #fn "_%c4" \
8282
: "=a" (__ret_gu), "=r" (__val_gu), \
8383
ASM_CALL_CONSTRAINT \
8484
: "0" (ptr), "i" (sizeof(*(ptr)))); \
@@ -177,7 +177,7 @@ extern void __put_user_nocheck_8(void);
177177
__chk_user_ptr(__ptr); \
178178
__ptr_pu = __ptr; \
179179
__val_pu = __x; \
180-
asm volatile("call __" #fn "_%P[size]" \
180+
asm volatile("call __" #fn "_%c[size]" \
181181
: "=c" (__ret_pu), \
182182
ASM_CALL_CONSTRAINT \
183183
: "0" (__ptr_pu), \

0 commit comments

Comments
 (0)