Skip to content

Commit 69fc06f

Browse files
committed
Merge tag 'objtool-core-2020-06-01' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip
Pull objtool updates from Ingo Molnar: "There are a lot of objtool changes in this cycle, all across the map: - Speed up objtool significantly, especially when there are large number of sections - Improve objtool's understanding of special instructions such as IRET, to reduce the number of annotations required - Implement 'noinstr' validation - Do baby steps for non-x86 objtool use - Simplify/fix retpoline decoding - Add vmlinux validation - Improve documentation - Fix various bugs and apply smaller cleanups" * tag 'objtool-core-2020-06-01' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip: (54 commits) objtool: Enable compilation of objtool for all architectures objtool: Move struct objtool_file into arch-independent header objtool: Exit successfully when requesting help objtool: Add check_kcov_mode() to the uaccess safelist samples/ftrace: Fix asm function ELF annotations objtool: optimize add_dead_ends for split sections objtool: use gelf_getsymshndx to handle >64k sections objtool: Allow no-op CFI ops in alternatives x86/retpoline: Fix retpoline unwind x86: Change {JMP,CALL}_NOSPEC argument x86: Simplify retpoline declaration x86/speculation: Change FILL_RETURN_BUFFER to work with objtool objtool: Add support for intra-function calls objtool: Move the IRET hack into the arch decoder objtool: Remove INSN_STACK objtool: Make handle_insn_ops() unconditional objtool: Rework allocating stack_ops on decode objtool: UNWIND_HINT_RET_OFFSET should not check registers objtool: is_fentry_call() crashes if call has no destination x86,smap: Fix smap_{save,restore}() alternatives ...
2 parents 6005606 + 0decf1f commit 69fc06f

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

46 files changed

+1228
-724
lines changed

arch/x86/crypto/aesni-intel_asm.S

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2758,7 +2758,7 @@ SYM_FUNC_START(aesni_xts_crypt8)
27582758
pxor INC, STATE4
27592759
movdqu IV, 0x30(OUTP)
27602760

2761-
CALL_NOSPEC %r11
2761+
CALL_NOSPEC r11
27622762

27632763
movdqu 0x00(OUTP), INC
27642764
pxor INC, STATE1
@@ -2803,7 +2803,7 @@ SYM_FUNC_START(aesni_xts_crypt8)
28032803
_aesni_gf128mul_x_ble()
28042804
movups IV, (IVP)
28052805

2806-
CALL_NOSPEC %r11
2806+
CALL_NOSPEC r11
28072807

28082808
movdqu 0x40(OUTP), INC
28092809
pxor INC, STATE1

arch/x86/crypto/camellia-aesni-avx-asm_64.S

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1228,7 +1228,7 @@ SYM_FUNC_START_LOCAL(camellia_xts_crypt_16way)
12281228
vpxor 14 * 16(%rax), %xmm15, %xmm14;
12291229
vpxor 15 * 16(%rax), %xmm15, %xmm15;
12301230

1231-
CALL_NOSPEC %r9;
1231+
CALL_NOSPEC r9;
12321232

12331233
addq $(16 * 16), %rsp;
12341234

arch/x86/crypto/camellia-aesni-avx2-asm_64.S

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1339,7 +1339,7 @@ SYM_FUNC_START_LOCAL(camellia_xts_crypt_32way)
13391339
vpxor 14 * 32(%rax), %ymm15, %ymm14;
13401340
vpxor 15 * 32(%rax), %ymm15, %ymm15;
13411341

1342-
CALL_NOSPEC %r9;
1342+
CALL_NOSPEC r9;
13431343

13441344
addq $(16 * 32), %rsp;
13451345

arch/x86/crypto/crc32c-pcl-intel-asm_64.S

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,7 @@
7575

7676
.text
7777
SYM_FUNC_START(crc_pcl)
78-
#define bufp %rdi
78+
#define bufp rdi
7979
#define bufp_dw %edi
8080
#define bufp_w %di
8181
#define bufp_b %dil
@@ -105,9 +105,9 @@ SYM_FUNC_START(crc_pcl)
105105
## 1) ALIGN:
106106
################################################################
107107

108-
mov bufp, bufptmp # rdi = *buf
109-
neg bufp
110-
and $7, bufp # calculate the unalignment amount of
108+
mov %bufp, bufptmp # rdi = *buf
109+
neg %bufp
110+
and $7, %bufp # calculate the unalignment amount of
111111
# the address
112112
je proc_block # Skip if aligned
113113

@@ -123,13 +123,13 @@ SYM_FUNC_START(crc_pcl)
123123
do_align:
124124
#### Calculate CRC of unaligned bytes of the buffer (if any)
125125
movq (bufptmp), tmp # load a quadward from the buffer
126-
add bufp, bufptmp # align buffer pointer for quadword
126+
add %bufp, bufptmp # align buffer pointer for quadword
127127
# processing
128-
sub bufp, len # update buffer length
128+
sub %bufp, len # update buffer length
129129
align_loop:
130130
crc32b %bl, crc_init_dw # compute crc32 of 1-byte
131131
shr $8, tmp # get next byte
132-
dec bufp
132+
dec %bufp
133133
jne align_loop
134134

135135
proc_block:
@@ -169,10 +169,10 @@ continue_block:
169169
xor crc2, crc2
170170

171171
## branch into array
172-
lea jump_table(%rip), bufp
173-
movzxw (bufp, %rax, 2), len
174-
lea crc_array(%rip), bufp
175-
lea (bufp, len, 1), bufp
172+
lea jump_table(%rip), %bufp
173+
movzxw (%bufp, %rax, 2), len
174+
lea crc_array(%rip), %bufp
175+
lea (%bufp, len, 1), %bufp
176176
JMP_NOSPEC bufp
177177

178178
################################################################
@@ -218,9 +218,9 @@ LABEL crc_ %i
218218
## 4) Combine three results:
219219
################################################################
220220

221-
lea (K_table-8)(%rip), bufp # first entry is for idx 1
221+
lea (K_table-8)(%rip), %bufp # first entry is for idx 1
222222
shlq $3, %rax # rax *= 8
223-
pmovzxdq (bufp,%rax), %xmm0 # 2 consts: K1:K2
223+
pmovzxdq (%bufp,%rax), %xmm0 # 2 consts: K1:K2
224224
leal (%eax,%eax,2), %eax # rax *= 3 (total *24)
225225
subq %rax, tmp # tmp -= rax*24
226226

arch/x86/entry/entry_32.S

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -816,7 +816,7 @@ SYM_CODE_START(ret_from_fork)
816816

817817
/* kernel thread */
818818
1: movl %edi, %eax
819-
CALL_NOSPEC %ebx
819+
CALL_NOSPEC ebx
820820
/*
821821
* A kernel thread is allowed to return here after successfully
822822
* calling do_execve(). Exit to userspace to complete the execve()
@@ -1501,7 +1501,7 @@ SYM_CODE_START_LOCAL_NOALIGN(common_exception_read_cr2)
15011501

15021502
TRACE_IRQS_OFF
15031503
movl %esp, %eax # pt_regs pointer
1504-
CALL_NOSPEC %edi
1504+
CALL_NOSPEC edi
15051505
jmp ret_from_exception
15061506
SYM_CODE_END(common_exception_read_cr2)
15071507

@@ -1522,7 +1522,7 @@ SYM_CODE_START_LOCAL_NOALIGN(common_exception)
15221522

15231523
TRACE_IRQS_OFF
15241524
movl %esp, %eax # pt_regs pointer
1525-
CALL_NOSPEC %edi
1525+
CALL_NOSPEC edi
15261526
jmp ret_from_exception
15271527
SYM_CODE_END(common_exception)
15281528

arch/x86/entry/entry_64.S

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -348,7 +348,7 @@ SYM_CODE_START(ret_from_fork)
348348
/* kernel thread */
349349
UNWIND_HINT_EMPTY
350350
movq %r12, %rdi
351-
CALL_NOSPEC %rbx
351+
CALL_NOSPEC rbx
352352
/*
353353
* A kernel thread is allowed to return here after successfully
354354
* calling do_execve(). Exit to userspace to complete the execve()
Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
#ifdef CONFIG_64BIT
2+
GEN(rax)
3+
GEN(rbx)
4+
GEN(rcx)
5+
GEN(rdx)
6+
GEN(rsi)
7+
GEN(rdi)
8+
GEN(rbp)
9+
GEN(r8)
10+
GEN(r9)
11+
GEN(r10)
12+
GEN(r11)
13+
GEN(r12)
14+
GEN(r13)
15+
GEN(r14)
16+
GEN(r15)
17+
#else
18+
GEN(eax)
19+
GEN(ebx)
20+
GEN(ecx)
21+
GEN(edx)
22+
GEN(esi)
23+
GEN(edi)
24+
GEN(ebp)
25+
#endif

arch/x86/include/asm/asm-prototypes.h

Lines changed: 15 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -17,24 +17,19 @@ extern void cmpxchg8b_emu(void);
1717
#endif
1818

1919
#ifdef CONFIG_RETPOLINE
20-
#ifdef CONFIG_X86_32
21-
#define INDIRECT_THUNK(reg) extern asmlinkage void __x86_indirect_thunk_e ## reg(void);
22-
#else
23-
#define INDIRECT_THUNK(reg) extern asmlinkage void __x86_indirect_thunk_r ## reg(void);
24-
INDIRECT_THUNK(8)
25-
INDIRECT_THUNK(9)
26-
INDIRECT_THUNK(10)
27-
INDIRECT_THUNK(11)
28-
INDIRECT_THUNK(12)
29-
INDIRECT_THUNK(13)
30-
INDIRECT_THUNK(14)
31-
INDIRECT_THUNK(15)
32-
#endif
33-
INDIRECT_THUNK(ax)
34-
INDIRECT_THUNK(bx)
35-
INDIRECT_THUNK(cx)
36-
INDIRECT_THUNK(dx)
37-
INDIRECT_THUNK(si)
38-
INDIRECT_THUNK(di)
39-
INDIRECT_THUNK(bp)
20+
21+
#define DECL_INDIRECT_THUNK(reg) \
22+
extern asmlinkage void __x86_indirect_thunk_ ## reg (void);
23+
24+
#define DECL_RETPOLINE(reg) \
25+
extern asmlinkage void __x86_retpoline_ ## reg (void);
26+
27+
#undef GEN
28+
#define GEN(reg) DECL_INDIRECT_THUNK(reg)
29+
#include <asm/GEN-for-each-reg.h>
30+
31+
#undef GEN
32+
#define GEN(reg) DECL_RETPOLINE(reg)
33+
#include <asm/GEN-for-each-reg.h>
34+
4035
#endif /* CONFIG_RETPOLINE */

arch/x86/include/asm/nospec-branch.h

Lines changed: 20 additions & 56 deletions
Original file line numberDiff line numberDiff line change
@@ -4,20 +4,13 @@
44
#define _ASM_X86_NOSPEC_BRANCH_H_
55

66
#include <linux/static_key.h>
7+
#include <linux/frame.h>
78

89
#include <asm/alternative.h>
910
#include <asm/alternative-asm.h>
1011
#include <asm/cpufeatures.h>
1112
#include <asm/msr-index.h>
12-
13-
/*
14-
* This should be used immediately before a retpoline alternative. It tells
15-
* objtool where the retpolines are so that it can make sense of the control
16-
* flow by just reading the original instruction(s) and ignoring the
17-
* alternatives.
18-
*/
19-
#define ANNOTATE_NOSPEC_ALTERNATIVE \
20-
ANNOTATE_IGNORE_ALTERNATIVE
13+
#include <asm/unwind_hints.h>
2114

2215
/*
2316
* Fill the CPU return stack buffer.
@@ -46,21 +39,25 @@
4639
#define __FILL_RETURN_BUFFER(reg, nr, sp) \
4740
mov $(nr/2), reg; \
4841
771: \
42+
ANNOTATE_INTRA_FUNCTION_CALL; \
4943
call 772f; \
5044
773: /* speculation trap */ \
45+
UNWIND_HINT_EMPTY; \
5146
pause; \
5247
lfence; \
5348
jmp 773b; \
5449
772: \
50+
ANNOTATE_INTRA_FUNCTION_CALL; \
5551
call 774f; \
5652
775: /* speculation trap */ \
53+
UNWIND_HINT_EMPTY; \
5754
pause; \
5855
lfence; \
5956
jmp 775b; \
6057
774: \
58+
add $(BITS_PER_LONG/8) * 2, sp; \
6159
dec reg; \
62-
jnz 771b; \
63-
add $(BITS_PER_LONG/8) * nr, sp;
60+
jnz 771b;
6461

6562
#ifdef __ASSEMBLY__
6663

@@ -76,58 +73,28 @@
7673
.popsection
7774
.endm
7875

79-
/*
80-
* These are the bare retpoline primitives for indirect jmp and call.
81-
* Do not use these directly; they only exist to make the ALTERNATIVE
82-
* invocation below less ugly.
83-
*/
84-
.macro RETPOLINE_JMP reg:req
85-
call .Ldo_rop_\@
86-
.Lspec_trap_\@:
87-
pause
88-
lfence
89-
jmp .Lspec_trap_\@
90-
.Ldo_rop_\@:
91-
mov \reg, (%_ASM_SP)
92-
ret
93-
.endm
94-
95-
/*
96-
* This is a wrapper around RETPOLINE_JMP so the called function in reg
97-
* returns to the instruction after the macro.
98-
*/
99-
.macro RETPOLINE_CALL reg:req
100-
jmp .Ldo_call_\@
101-
.Ldo_retpoline_jmp_\@:
102-
RETPOLINE_JMP \reg
103-
.Ldo_call_\@:
104-
call .Ldo_retpoline_jmp_\@
105-
.endm
106-
10776
/*
10877
* JMP_NOSPEC and CALL_NOSPEC macros can be used instead of a simple
10978
* indirect jmp/call which may be susceptible to the Spectre variant 2
11079
* attack.
11180
*/
11281
.macro JMP_NOSPEC reg:req
11382
#ifdef CONFIG_RETPOLINE
114-
ANNOTATE_NOSPEC_ALTERNATIVE
115-
ALTERNATIVE_2 __stringify(ANNOTATE_RETPOLINE_SAFE; jmp *\reg), \
116-
__stringify(RETPOLINE_JMP \reg), X86_FEATURE_RETPOLINE, \
117-
__stringify(lfence; ANNOTATE_RETPOLINE_SAFE; jmp *\reg), X86_FEATURE_RETPOLINE_AMD
83+
ALTERNATIVE_2 __stringify(ANNOTATE_RETPOLINE_SAFE; jmp *%\reg), \
84+
__stringify(jmp __x86_retpoline_\reg), X86_FEATURE_RETPOLINE, \
85+
__stringify(lfence; ANNOTATE_RETPOLINE_SAFE; jmp *%\reg), X86_FEATURE_RETPOLINE_AMD
11886
#else
119-
jmp *\reg
87+
jmp *%\reg
12088
#endif
12189
.endm
12290

12391
.macro CALL_NOSPEC reg:req
12492
#ifdef CONFIG_RETPOLINE
125-
ANNOTATE_NOSPEC_ALTERNATIVE
126-
ALTERNATIVE_2 __stringify(ANNOTATE_RETPOLINE_SAFE; call *\reg), \
127-
__stringify(RETPOLINE_CALL \reg), X86_FEATURE_RETPOLINE,\
128-
__stringify(lfence; ANNOTATE_RETPOLINE_SAFE; call *\reg), X86_FEATURE_RETPOLINE_AMD
93+
ALTERNATIVE_2 __stringify(ANNOTATE_RETPOLINE_SAFE; call *%\reg), \
94+
__stringify(call __x86_retpoline_\reg), X86_FEATURE_RETPOLINE, \
95+
__stringify(lfence; ANNOTATE_RETPOLINE_SAFE; call *%\reg), X86_FEATURE_RETPOLINE_AMD
12996
#else
130-
call *\reg
97+
call *%\reg
13198
#endif
13299
.endm
133100

@@ -137,10 +104,8 @@
137104
*/
138105
.macro FILL_RETURN_BUFFER reg:req nr:req ftr:req
139106
#ifdef CONFIG_RETPOLINE
140-
ANNOTATE_NOSPEC_ALTERNATIVE
141-
ALTERNATIVE "jmp .Lskip_rsb_\@", \
142-
__stringify(__FILL_RETURN_BUFFER(\reg,\nr,%_ASM_SP)) \
143-
\ftr
107+
ALTERNATIVE "jmp .Lskip_rsb_\@", "", \ftr
108+
__FILL_RETURN_BUFFER(\reg,\nr,%_ASM_SP)
144109
.Lskip_rsb_\@:
145110
#endif
146111
.endm
@@ -161,16 +126,16 @@
161126
* which is ensured when CONFIG_RETPOLINE is defined.
162127
*/
163128
# define CALL_NOSPEC \
164-
ANNOTATE_NOSPEC_ALTERNATIVE \
165129
ALTERNATIVE_2( \
166130
ANNOTATE_RETPOLINE_SAFE \
167131
"call *%[thunk_target]\n", \
168-
"call __x86_indirect_thunk_%V[thunk_target]\n", \
132+
"call __x86_retpoline_%V[thunk_target]\n", \
169133
X86_FEATURE_RETPOLINE, \
170134
"lfence;\n" \
171135
ANNOTATE_RETPOLINE_SAFE \
172136
"call *%[thunk_target]\n", \
173137
X86_FEATURE_RETPOLINE_AMD)
138+
174139
# define THUNK_TARGET(addr) [thunk_target] "r" (addr)
175140

176141
#else /* CONFIG_X86_32 */
@@ -180,7 +145,6 @@
180145
* here, anyway.
181146
*/
182147
# define CALL_NOSPEC \
183-
ANNOTATE_NOSPEC_ALTERNATIVE \
184148
ALTERNATIVE_2( \
185149
ANNOTATE_RETPOLINE_SAFE \
186150
"call *%[thunk_target]\n", \

arch/x86/include/asm/orc_types.h

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -58,8 +58,7 @@
5858
#define ORC_TYPE_CALL 0
5959
#define ORC_TYPE_REGS 1
6060
#define ORC_TYPE_REGS_IRET 2
61-
#define UNWIND_HINT_TYPE_SAVE 3
62-
#define UNWIND_HINT_TYPE_RESTORE 4
61+
#define UNWIND_HINT_TYPE_RET_OFFSET 3
6362

6463
#ifndef __ASSEMBLY__
6564
/*

0 commit comments

Comments
 (0)