|
213 | 213 | *
|
214 | 214 | * Be careful: we may have nonzero SS base due to ESPFIX.
|
215 | 215 | */
|
216 |
| - andl $0x0000ffff, 3*4(%esp) |
| 216 | + andl $0x0000ffff, 4*4(%esp) |
217 | 217 |
|
218 | 218 | #ifdef CONFIG_VM86
|
219 |
| - testl $X86_EFLAGS_VM, 4*4(%esp) |
| 219 | + testl $X86_EFLAGS_VM, 5*4(%esp) |
220 | 220 | jnz .Lfrom_usermode_no_fixup_\@
|
221 | 221 | #endif
|
222 |
| - testl $USER_SEGMENT_RPL_MASK, 3*4(%esp) |
| 222 | + testl $USER_SEGMENT_RPL_MASK, 4*4(%esp) |
223 | 223 | jnz .Lfrom_usermode_no_fixup_\@
|
224 | 224 |
|
225 |
| - orl $CS_FROM_KERNEL, 3*4(%esp) |
| 225 | + orl $CS_FROM_KERNEL, 4*4(%esp) |
226 | 226 |
|
227 | 227 | /*
|
228 | 228 | * When we're here from kernel mode; the (exception) stack looks like:
|
229 | 229 | *
|
230 |
| - * 5*4(%esp) - <previous context> |
231 |
| - * 4*4(%esp) - flags |
232 |
| - * 3*4(%esp) - cs |
233 |
| - * 2*4(%esp) - ip |
234 |
| - * 1*4(%esp) - orig_eax |
235 |
| - * 0*4(%esp) - gs / function |
| 230 | + * 6*4(%esp) - <previous context> |
| 231 | + * 5*4(%esp) - flags |
| 232 | + * 4*4(%esp) - cs |
| 233 | + * 3*4(%esp) - ip |
| 234 | + * 2*4(%esp) - orig_eax |
| 235 | + * 1*4(%esp) - gs / function |
| 236 | + * 0*4(%esp) - fs |
236 | 237 | *
|
237 | 238 | * Lets build a 5 entry IRET frame after that, such that struct pt_regs
|
238 | 239 | * is complete and in particular regs->sp is correct. This gives us
|
239 |
| - * the original 5 enties as gap: |
| 240 | + * the original 6 enties as gap: |
240 | 241 | *
|
241 |
| - * 12*4(%esp) - <previous context> |
242 |
| - * 11*4(%esp) - gap / flags |
243 |
| - * 10*4(%esp) - gap / cs |
244 |
| - * 9*4(%esp) - gap / ip |
245 |
| - * 8*4(%esp) - gap / orig_eax |
246 |
| - * 7*4(%esp) - gap / gs / function |
247 |
| - * 6*4(%esp) - ss |
248 |
| - * 5*4(%esp) - sp |
249 |
| - * 4*4(%esp) - flags |
250 |
| - * 3*4(%esp) - cs |
251 |
| - * 2*4(%esp) - ip |
252 |
| - * 1*4(%esp) - orig_eax |
253 |
| - * 0*4(%esp) - gs / function |
| 242 | + * 14*4(%esp) - <previous context> |
| 243 | + * 13*4(%esp) - gap / flags |
| 244 | + * 12*4(%esp) - gap / cs |
| 245 | + * 11*4(%esp) - gap / ip |
| 246 | + * 10*4(%esp) - gap / orig_eax |
| 247 | + * 9*4(%esp) - gap / gs / function |
| 248 | + * 8*4(%esp) - gap / fs |
| 249 | + * 7*4(%esp) - ss |
| 250 | + * 6*4(%esp) - sp |
| 251 | + * 5*4(%esp) - flags |
| 252 | + * 4*4(%esp) - cs |
| 253 | + * 3*4(%esp) - ip |
| 254 | + * 2*4(%esp) - orig_eax |
| 255 | + * 1*4(%esp) - gs / function |
| 256 | + * 0*4(%esp) - fs |
254 | 257 | */
|
255 | 258 |
|
256 | 259 | pushl %ss # ss
|
257 | 260 | pushl %esp # sp (points at ss)
|
258 |
| - addl $6*4, (%esp) # point sp back at the previous context |
259 |
| - pushl 6*4(%esp) # flags |
260 |
| - pushl 6*4(%esp) # cs |
261 |
| - pushl 6*4(%esp) # ip |
262 |
| - pushl 6*4(%esp) # orig_eax |
263 |
| - pushl 6*4(%esp) # gs / function |
| 261 | + addl $7*4, (%esp) # point sp back at the previous context |
| 262 | + pushl 7*4(%esp) # flags |
| 263 | + pushl 7*4(%esp) # cs |
| 264 | + pushl 7*4(%esp) # ip |
| 265 | + pushl 7*4(%esp) # orig_eax |
| 266 | + pushl 7*4(%esp) # gs / function |
| 267 | + pushl 7*4(%esp) # fs |
264 | 268 | .Lfrom_usermode_no_fixup_\@:
|
265 | 269 | .endm
|
266 | 270 |
|
|
308 | 312 | .if \skip_gs == 0
|
309 | 313 | PUSH_GS
|
310 | 314 | .endif
|
311 |
| - FIXUP_FRAME |
312 | 315 | pushl %fs
|
| 316 | + FIXUP_FRAME |
313 | 317 | pushl %es
|
314 | 318 | pushl %ds
|
315 | 319 | pushl \pt_regs_ax
|
|
0 commit comments