Skip to content

Commit 4e3aa92

Browse files
author
Peter Zijlstra
committed
x86/nospec: Unwreck the RSB stuffing
Commit 2b12993 ("x86/speculation: Add RSB VM Exit protections") made a right mess of the RSB stuffing, rewrite the whole thing to not suck. Thanks to Andrew for the enlightening comment about Post-Barrier RSB things so we can make this code less magical. Cc: [email protected] Signed-off-by: Peter Zijlstra (Intel) <[email protected]> Link: https://lkml.kernel.org/r/[email protected]
1 parent 7df5488 commit 4e3aa92

File tree

1 file changed

+39
-41
lines changed

1 file changed

+39
-41
lines changed

arch/x86/include/asm/nospec-branch.h

Lines changed: 39 additions & 41 deletions
Original file line numberDiff line numberDiff line change
@@ -35,33 +35,44 @@
3535
#define RSB_CLEAR_LOOPS 32 /* To forcibly overwrite all entries */
3636

3737
/*
38+
* Common helper for __FILL_RETURN_BUFFER and __FILL_ONE_RETURN.
39+
*/
40+
#define __FILL_RETURN_SLOT \
41+
ANNOTATE_INTRA_FUNCTION_CALL; \
42+
call 772f; \
43+
int3; \
44+
772:
45+
46+
/*
47+
* Stuff the entire RSB.
48+
*
3849
* Google experimented with loop-unrolling and this turned out to be
3950
* the optimal version - two calls, each with their own speculation
4051
* trap should their return address end up getting used, in a loop.
4152
*/
42-
#define __FILL_RETURN_BUFFER(reg, nr, sp) \
43-
mov $(nr/2), reg; \
44-
771: \
45-
ANNOTATE_INTRA_FUNCTION_CALL; \
46-
call 772f; \
47-
773: /* speculation trap */ \
48-
UNWIND_HINT_EMPTY; \
49-
pause; \
50-
lfence; \
51-
jmp 773b; \
52-
772: \
53-
ANNOTATE_INTRA_FUNCTION_CALL; \
54-
call 774f; \
55-
775: /* speculation trap */ \
56-
UNWIND_HINT_EMPTY; \
57-
pause; \
58-
lfence; \
59-
jmp 775b; \
60-
774: \
61-
add $(BITS_PER_LONG/8) * 2, sp; \
62-
dec reg; \
63-
jnz 771b; \
64-
/* barrier for jnz misprediction */ \
53+
#define __FILL_RETURN_BUFFER(reg, nr) \
54+
mov $(nr/2), reg; \
55+
771: \
56+
__FILL_RETURN_SLOT \
57+
__FILL_RETURN_SLOT \
58+
add $(BITS_PER_LONG/8) * 2, %_ASM_SP; \
59+
dec reg; \
60+
jnz 771b; \
61+
/* barrier for jnz misprediction */ \
62+
lfence;
63+
64+
/*
65+
* Stuff a single RSB slot.
66+
*
67+
* To mitigate Post-Barrier RSB speculation, one CALL instruction must be
68+
* forced to retire before letting a RET instruction execute.
69+
*
70+
* On PBRSB-vulnerable CPUs, it is not safe for a RET to be executed
71+
* before this point.
72+
*/
73+
#define __FILL_ONE_RETURN \
74+
__FILL_RETURN_SLOT \
75+
add $(BITS_PER_LONG/8), %_ASM_SP; \
6576
lfence;
6677

6778
#ifdef __ASSEMBLY__
@@ -132,28 +143,15 @@
132143
#endif
133144
.endm
134145

135-
.macro ISSUE_UNBALANCED_RET_GUARD
136-
ANNOTATE_INTRA_FUNCTION_CALL
137-
call .Lunbalanced_ret_guard_\@
138-
int3
139-
.Lunbalanced_ret_guard_\@:
140-
add $(BITS_PER_LONG/8), %_ASM_SP
141-
lfence
142-
.endm
143-
144146
/*
145147
* A simpler FILL_RETURN_BUFFER macro. Don't make people use the CPP
146148
* monstrosity above, manually.
147149
*/
148-
.macro FILL_RETURN_BUFFER reg:req nr:req ftr:req ftr2
149-
.ifb \ftr2
150-
ALTERNATIVE "jmp .Lskip_rsb_\@", "", \ftr
151-
.else
152-
ALTERNATIVE_2 "jmp .Lskip_rsb_\@", "", \ftr, "jmp .Lunbalanced_\@", \ftr2
153-
.endif
154-
__FILL_RETURN_BUFFER(\reg,\nr,%_ASM_SP)
155-
.Lunbalanced_\@:
156-
ISSUE_UNBALANCED_RET_GUARD
150+
.macro FILL_RETURN_BUFFER reg:req nr:req ftr:req ftr2=ALT_NOT(X86_FEATURE_ALWAYS)
151+
ALTERNATIVE_2 "jmp .Lskip_rsb_\@", \
152+
__stringify(__FILL_RETURN_BUFFER(\reg,\nr)), \ftr, \
153+
__stringify(__FILL_ONE_RETURN), \ftr2
154+
157155
.Lskip_rsb_\@:
158156
.endm
159157

0 commit comments

Comments
 (0)