Skip to content

Commit 34fdce6

Browse files
author
Peter Zijlstra
committed
x86: Change {JMP,CALL}_NOSPEC argument
In order to change the {JMP,CALL}_NOSPEC macros to call out-of-line versions of the retpoline magic, we need to remove the '%' from the argument, such that we can paste it onto symbol names. Signed-off-by: Peter Zijlstra (Intel) <[email protected]> Acked-by: Josh Poimboeuf <[email protected]> Link: https://lkml.kernel.org/r/[email protected]
1 parent ca3f0d8 commit 34fdce6

File tree

11 files changed

+35
-35
lines changed

11 files changed

+35
-35
lines changed

arch/x86/crypto/aesni-intel_asm.S

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2758,7 +2758,7 @@ SYM_FUNC_START(aesni_xts_crypt8)
27582758
pxor INC, STATE4
27592759
movdqu IV, 0x30(OUTP)
27602760

2761-
CALL_NOSPEC %r11
2761+
CALL_NOSPEC r11
27622762

27632763
movdqu 0x00(OUTP), INC
27642764
pxor INC, STATE1
@@ -2803,7 +2803,7 @@ SYM_FUNC_START(aesni_xts_crypt8)
28032803
_aesni_gf128mul_x_ble()
28042804
movups IV, (IVP)
28052805

2806-
CALL_NOSPEC %r11
2806+
CALL_NOSPEC r11
28072807

28082808
movdqu 0x40(OUTP), INC
28092809
pxor INC, STATE1

arch/x86/crypto/camellia-aesni-avx-asm_64.S

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1228,7 +1228,7 @@ SYM_FUNC_START_LOCAL(camellia_xts_crypt_16way)
12281228
vpxor 14 * 16(%rax), %xmm15, %xmm14;
12291229
vpxor 15 * 16(%rax), %xmm15, %xmm15;
12301230

1231-
CALL_NOSPEC %r9;
1231+
CALL_NOSPEC r9;
12321232

12331233
addq $(16 * 16), %rsp;
12341234

arch/x86/crypto/camellia-aesni-avx2-asm_64.S

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1339,7 +1339,7 @@ SYM_FUNC_START_LOCAL(camellia_xts_crypt_32way)
13391339
vpxor 14 * 32(%rax), %ymm15, %ymm14;
13401340
vpxor 15 * 32(%rax), %ymm15, %ymm15;
13411341

1342-
CALL_NOSPEC %r9;
1342+
CALL_NOSPEC r9;
13431343

13441344
addq $(16 * 32), %rsp;
13451345

arch/x86/crypto/crc32c-pcl-intel-asm_64.S

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,7 @@
7575

7676
.text
7777
SYM_FUNC_START(crc_pcl)
78-
#define bufp %rdi
78+
#define bufp rdi
7979
#define bufp_dw %edi
8080
#define bufp_w %di
8181
#define bufp_b %dil
@@ -105,9 +105,9 @@ SYM_FUNC_START(crc_pcl)
105105
## 1) ALIGN:
106106
################################################################
107107

108-
mov bufp, bufptmp # rdi = *buf
109-
neg bufp
110-
and $7, bufp # calculate the unalignment amount of
108+
mov %bufp, bufptmp # rdi = *buf
109+
neg %bufp
110+
and $7, %bufp # calculate the unalignment amount of
111111
# the address
112112
je proc_block # Skip if aligned
113113

@@ -123,13 +123,13 @@ SYM_FUNC_START(crc_pcl)
123123
do_align:
124124
#### Calculate CRC of unaligned bytes of the buffer (if any)
125125
movq (bufptmp), tmp # load a quadward from the buffer
126-
add bufp, bufptmp # align buffer pointer for quadword
126+
add %bufp, bufptmp # align buffer pointer for quadword
127127
# processing
128-
sub bufp, len # update buffer length
128+
sub %bufp, len # update buffer length
129129
align_loop:
130130
crc32b %bl, crc_init_dw # compute crc32 of 1-byte
131131
shr $8, tmp # get next byte
132-
dec bufp
132+
dec %bufp
133133
jne align_loop
134134

135135
proc_block:
@@ -169,10 +169,10 @@ continue_block:
169169
xor crc2, crc2
170170

171171
## branch into array
172-
lea jump_table(%rip), bufp
173-
movzxw (bufp, %rax, 2), len
174-
lea crc_array(%rip), bufp
175-
lea (bufp, len, 1), bufp
172+
lea jump_table(%rip), %bufp
173+
movzxw (%bufp, %rax, 2), len
174+
lea crc_array(%rip), %bufp
175+
lea (%bufp, len, 1), %bufp
176176
JMP_NOSPEC bufp
177177

178178
################################################################
@@ -218,9 +218,9 @@ LABEL crc_ %i
218218
## 4) Combine three results:
219219
################################################################
220220

221-
lea (K_table-8)(%rip), bufp # first entry is for idx 1
221+
lea (K_table-8)(%rip), %bufp # first entry is for idx 1
222222
shlq $3, %rax # rax *= 8
223-
pmovzxdq (bufp,%rax), %xmm0 # 2 consts: K1:K2
223+
pmovzxdq (%bufp,%rax), %xmm0 # 2 consts: K1:K2
224224
leal (%eax,%eax,2), %eax # rax *= 3 (total *24)
225225
subq %rax, tmp # tmp -= rax*24
226226

arch/x86/entry/entry_32.S

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -816,7 +816,7 @@ SYM_CODE_START(ret_from_fork)
816816

817817
/* kernel thread */
818818
1: movl %edi, %eax
819-
CALL_NOSPEC %ebx
819+
CALL_NOSPEC ebx
820820
/*
821821
* A kernel thread is allowed to return here after successfully
822822
* calling do_execve(). Exit to userspace to complete the execve()
@@ -1501,7 +1501,7 @@ SYM_CODE_START_LOCAL_NOALIGN(common_exception_read_cr2)
15011501

15021502
TRACE_IRQS_OFF
15031503
movl %esp, %eax # pt_regs pointer
1504-
CALL_NOSPEC %edi
1504+
CALL_NOSPEC edi
15051505
jmp ret_from_exception
15061506
SYM_CODE_END(common_exception_read_cr2)
15071507

@@ -1522,7 +1522,7 @@ SYM_CODE_START_LOCAL_NOALIGN(common_exception)
15221522

15231523
TRACE_IRQS_OFF
15241524
movl %esp, %eax # pt_regs pointer
1525-
CALL_NOSPEC %edi
1525+
CALL_NOSPEC edi
15261526
jmp ret_from_exception
15271527
SYM_CODE_END(common_exception)
15281528

arch/x86/entry/entry_64.S

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -349,7 +349,7 @@ SYM_CODE_START(ret_from_fork)
349349
/* kernel thread */
350350
UNWIND_HINT_EMPTY
351351
movq %r12, %rdi
352-
CALL_NOSPEC %rbx
352+
CALL_NOSPEC rbx
353353
/*
354354
* A kernel thread is allowed to return here after successfully
355355
* calling do_execve(). Exit to userspace to complete the execve()

arch/x86/include/asm/nospec-branch.h

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -118,22 +118,22 @@
118118
.macro JMP_NOSPEC reg:req
119119
#ifdef CONFIG_RETPOLINE
120120
ANNOTATE_NOSPEC_ALTERNATIVE
121-
ALTERNATIVE_2 __stringify(ANNOTATE_RETPOLINE_SAFE; jmp *\reg), \
122-
__stringify(RETPOLINE_JMP \reg), X86_FEATURE_RETPOLINE, \
123-
__stringify(lfence; ANNOTATE_RETPOLINE_SAFE; jmp *\reg), X86_FEATURE_RETPOLINE_AMD
121+
ALTERNATIVE_2 __stringify(ANNOTATE_RETPOLINE_SAFE; jmp *%\reg), \
122+
__stringify(RETPOLINE_JMP %\reg), X86_FEATURE_RETPOLINE,\
123+
__stringify(lfence; ANNOTATE_RETPOLINE_SAFE; jmp *%\reg), X86_FEATURE_RETPOLINE_AMD
124124
#else
125-
jmp *\reg
125+
jmp *%\reg
126126
#endif
127127
.endm
128128

129129
.macro CALL_NOSPEC reg:req
130130
#ifdef CONFIG_RETPOLINE
131131
ANNOTATE_NOSPEC_ALTERNATIVE
132-
ALTERNATIVE_2 __stringify(ANNOTATE_RETPOLINE_SAFE; call *\reg), \
133-
__stringify(RETPOLINE_CALL \reg), X86_FEATURE_RETPOLINE,\
134-
__stringify(lfence; ANNOTATE_RETPOLINE_SAFE; call *\reg), X86_FEATURE_RETPOLINE_AMD
132+
ALTERNATIVE_2 __stringify(ANNOTATE_RETPOLINE_SAFE; call *%\reg),\
133+
__stringify(RETPOLINE_CALL %\reg), X86_FEATURE_RETPOLINE,\
134+
__stringify(lfence; ANNOTATE_RETPOLINE_SAFE; call *%\reg), X86_FEATURE_RETPOLINE_AMD
135135
#else
136-
call *\reg
136+
call *%\reg
137137
#endif
138138
.endm
139139

arch/x86/kernel/ftrace_32.S

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -189,5 +189,5 @@ return_to_handler:
189189
movl %eax, %ecx
190190
popl %edx
191191
popl %eax
192-
JMP_NOSPEC %ecx
192+
JMP_NOSPEC ecx
193193
#endif

arch/x86/kernel/ftrace_64.S

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -301,7 +301,7 @@ trace:
301301
* function tracing is enabled.
302302
*/
303303
movq ftrace_trace_function, %r8
304-
CALL_NOSPEC %r8
304+
CALL_NOSPEC r8
305305
restore_mcount_regs
306306

307307
jmp fgraph_trace
@@ -338,6 +338,6 @@ SYM_CODE_START(return_to_handler)
338338
movq 8(%rsp), %rdx
339339
movq (%rsp), %rax
340340
addq $24, %rsp
341-
JMP_NOSPEC %rdi
341+
JMP_NOSPEC rdi
342342
SYM_CODE_END(return_to_handler)
343343
#endif

arch/x86/lib/checksum_32.S

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -153,7 +153,7 @@ SYM_FUNC_START(csum_partial)
153153
negl %ebx
154154
lea 45f(%ebx,%ebx,2), %ebx
155155
testl %esi, %esi
156-
JMP_NOSPEC %ebx
156+
JMP_NOSPEC ebx
157157

158158
# Handle 2-byte-aligned regions
159159
20: addw (%esi), %ax
@@ -436,7 +436,7 @@ SYM_FUNC_START(csum_partial_copy_generic)
436436
andl $-32,%edx
437437
lea 3f(%ebx,%ebx), %ebx
438438
testl %esi, %esi
439-
JMP_NOSPEC %ebx
439+
JMP_NOSPEC ebx
440440
1: addl $64,%esi
441441
addl $64,%edi
442442
SRC(movb -32(%edx),%bl) ; SRC(movb (%edx),%bl)

0 commit comments

Comments
 (0)