|
35 | 35 | #define RSB_CLEAR_LOOPS 32 /* To forcibly overwrite all entries */ |
36 | 36 |
|
37 | 37 | /* |
| 38 | + * Common helper for __FILL_RETURN_BUFFER and __FILL_ONE_RETURN. |
| 39 | + */ |
| 40 | +#define __FILL_RETURN_SLOT \ |
| 41 | + ANNOTATE_INTRA_FUNCTION_CALL; \ |
| 42 | + call 772f; \ |
| 43 | + int3; \ |
| 44 | +772: |
| 45 | + |
| 46 | +/* |
| 47 | + * Stuff the entire RSB. |
| 48 | + * |
38 | 49 | * Google experimented with loop-unrolling and this turned out to be |
39 | 50 | * the optimal version - two calls, each with their own speculation |
40 | 51 | * trap should their return address end up getting used, in a loop. |
41 | 52 | */ |
42 | | -#define __FILL_RETURN_BUFFER(reg, nr, sp) \ |
43 | | - mov $(nr/2), reg; \ |
44 | | -771: \ |
45 | | - ANNOTATE_INTRA_FUNCTION_CALL; \ |
46 | | - call 772f; \ |
47 | | -773: /* speculation trap */ \ |
48 | | - UNWIND_HINT_EMPTY; \ |
49 | | - pause; \ |
50 | | - lfence; \ |
51 | | - jmp 773b; \ |
52 | | -772: \ |
53 | | - ANNOTATE_INTRA_FUNCTION_CALL; \ |
54 | | - call 774f; \ |
55 | | -775: /* speculation trap */ \ |
56 | | - UNWIND_HINT_EMPTY; \ |
57 | | - pause; \ |
58 | | - lfence; \ |
59 | | - jmp 775b; \ |
60 | | -774: \ |
61 | | - add $(BITS_PER_LONG/8) * 2, sp; \ |
62 | | - dec reg; \ |
63 | | - jnz 771b; \ |
64 | | - /* barrier for jnz misprediction */ \ |
| 53 | +#ifdef CONFIG_X86_64 |
| 54 | +#define __FILL_RETURN_BUFFER(reg, nr) \ |
| 55 | + mov $(nr/2), reg; \ |
| 56 | +771: \ |
| 57 | + __FILL_RETURN_SLOT \ |
| 58 | + __FILL_RETURN_SLOT \ |
| 59 | + add $(BITS_PER_LONG/8) * 2, %_ASM_SP; \ |
| 60 | + dec reg; \ |
| 61 | + jnz 771b; \ |
| 62 | + /* barrier for jnz misprediction */ \ |
| 63 | + lfence; |
| 64 | +#else |
| 65 | +/* |
| 66 | + * i386 doesn't unconditionally have LFENCE, as such it can't |
| 67 | + * do a loop. |
| 68 | + */ |
| 69 | +#define __FILL_RETURN_BUFFER(reg, nr) \ |
| 70 | + .rept nr; \ |
| 71 | + __FILL_RETURN_SLOT; \ |
| 72 | + .endr; \ |
| 73 | + add $(BITS_PER_LONG/8) * nr, %_ASM_SP; |
| 74 | +#endif |
| 75 | + |
| 76 | +/* |
| 77 | + * Stuff a single RSB slot. |
| 78 | + * |
| 79 | + * To mitigate Post-Barrier RSB speculation, one CALL instruction must be |
| 80 | + * forced to retire before letting a RET instruction execute. |
| 81 | + * |
| 82 | + * On PBRSB-vulnerable CPUs, it is not safe for a RET to be executed |
| 83 | + * before this point. |
| 84 | + */ |
| 85 | +#define __FILL_ONE_RETURN \ |
| 86 | + __FILL_RETURN_SLOT \ |
| 87 | + add $(BITS_PER_LONG/8), %_ASM_SP; \ |
65 | 88 | lfence; |
66 | 89 |
|
67 | 90 | #ifdef __ASSEMBLY__ |
|
132 | 155 | #endif |
133 | 156 | .endm |
134 | 157 |
|
135 | | -.macro ISSUE_UNBALANCED_RET_GUARD |
136 | | - ANNOTATE_INTRA_FUNCTION_CALL |
137 | | - call .Lunbalanced_ret_guard_\@ |
138 | | - int3 |
139 | | -.Lunbalanced_ret_guard_\@: |
140 | | - add $(BITS_PER_LONG/8), %_ASM_SP |
141 | | - lfence |
142 | | -.endm |
143 | | - |
144 | 158 | /* |
145 | 159 | * A simpler FILL_RETURN_BUFFER macro. Don't make people use the CPP |
146 | 160 | * monstrosity above, manually. |
147 | 161 | */ |
148 | | -.macro FILL_RETURN_BUFFER reg:req nr:req ftr:req ftr2 |
149 | | -.ifb \ftr2 |
150 | | - ALTERNATIVE "jmp .Lskip_rsb_\@", "", \ftr |
151 | | -.else |
152 | | - ALTERNATIVE_2 "jmp .Lskip_rsb_\@", "", \ftr, "jmp .Lunbalanced_\@", \ftr2 |
153 | | -.endif |
154 | | - __FILL_RETURN_BUFFER(\reg,\nr,%_ASM_SP) |
155 | | -.Lunbalanced_\@: |
156 | | - ISSUE_UNBALANCED_RET_GUARD |
| 162 | +.macro FILL_RETURN_BUFFER reg:req nr:req ftr:req ftr2=ALT_NOT(X86_FEATURE_ALWAYS) |
| 163 | + ALTERNATIVE_2 "jmp .Lskip_rsb_\@", \ |
| 164 | + __stringify(__FILL_RETURN_BUFFER(\reg,\nr)), \ftr, \ |
| 165 | + __stringify(__FILL_ONE_RETURN), \ftr2 |
| 166 | + |
157 | 167 | .Lskip_rsb_\@: |
158 | 168 | .endm |
159 | 169 |
|
|
0 commit comments