@@ -483,110 +483,7 @@ SYM_FUNC_START_LOCAL_NOALIGN(.Lrelocated)
483
483
jmp *%rax
484
484
SYM_FUNC_END(.Lrelocated)
485
485
486
- /*
487
- * This is the 32-bit trampoline that will be copied over to low memory. It
488
- * will be called using the ordinary 64-bit calling convention from code
489
- * running in 64-bit mode.
490
- *
491
- * Return address is at the top of the stack (might be above 4G).
492
- * The first argument (EDI) contains the address of the temporary PGD level
493
- * page table in 32-bit addressable memory which will be programmed into
494
- * register CR3.
495
- */
496
- .section ".rodata" , "a" , @progbits
497
- SYM_CODE_START(trampoline_32bit_src)
498
- /*
499
- * Preserve callee save 64-bit registers on the stack: this is
500
- * necessary because the architecture does not guarantee that GPRs will
501
- * retain their full 64-bit values across a 32-bit mode switch.
502
- */
503
- pushq %r15
504
- pushq %r14
505
- pushq %r13
506
- pushq %r12
507
- pushq %rbp
508
- pushq %rbx
509
-
510
- /* Preserve top half of RSP in a legacy mode GPR to avoid truncation */
511
- movq %rsp , %rbx
512
- shrq $32 , %rbx
513
-
514
- /* Switch to compatibility mode (CS.L = 0 CS.D = 1) via far return */
515
- pushq $__KERNEL32_CS
516
- leaq 0f(%rip ), %rax
517
- pushq %rax
518
- lretq
519
-
520
- /*
521
- * The 32-bit code below will do a far jump back to long mode and end
522
- * up here after reconfiguring the number of paging levels. First, the
523
- * stack pointer needs to be restored to its full 64-bit value before
524
- * the callee save register contents can be popped from the stack.
525
- */
526
- .Lret:
527
- shlq $32 , %rbx
528
- orq %rbx , %rsp
529
-
530
- /* Restore the preserved 64-bit registers */
531
- popq %rbx
532
- popq %rbp
533
- popq %r12
534
- popq %r13
535
- popq %r14
536
- popq %r15
537
- retq
538
-
539
486
.code32
540
- 0:
541
- /* Disable paging */
542
- movl %cr0 , %eax
543
- btrl $X86_CR0_PG_BIT, %eax
544
- movl %eax , %cr0
545
-
546
- /* Point CR3 to the trampoline's new top level page table */
547
- movl %edi , %cr3
548
-
549
- /* Set EFER.LME=1 as a precaution in case hypervsior pulls the rug */
550
- movl $MSR_EFER, %ecx
551
- rdmsr
552
- btsl $_EFER_LME, %eax
553
- /* Avoid writing EFER if no change was made (for TDX guest) */
554
- jc 1f
555
- wrmsr
556
- 1:
557
- /* Toggle CR4.LA57 */
558
- movl %cr4 , %eax
559
- btcl $X86_CR4_LA57_BIT, %eax
560
- movl %eax , %cr4
561
-
562
- /* Enable paging again. */
563
- movl %cr0 , %eax
564
- btsl $X86_CR0_PG_BIT, %eax
565
- movl %eax , %cr0
566
-
567
- /*
568
- * Return to the 64-bit calling code using LJMP rather than LRET, to
569
- * avoid the need for a 32-bit addressable stack. The destination
570
- * address will be adjusted after the template code is copied into a
571
- * 32-bit addressable buffer.
572
- */
573
- .Ljmp: ljmpl $__KERNEL_CS, $(.Lret - trampoline_32bit_src)
574
- SYM_CODE_END(trampoline_32bit_src)
575
-
576
- /*
577
- * This symbol is placed right after trampoline_32bit_src() so its address can
578
- * be used to infer the size of the trampoline code.
579
- */
580
- SYM_DATA(trampoline_ljmp_imm_offset, .word .Ljmp + 1 - trampoline_32bit_src)
581
-
582
- /*
583
- * The trampoline code has a size limit.
584
- * Make sure we fail to compile if the trampoline code grows
585
- * beyond TRAMPOLINE_32BIT_CODE_SIZE bytes.
586
- */
587
- .org trampoline_32bit_src + TRAMPOLINE_32BIT_CODE_SIZE
588
-
589
- .text
590
487
SYM_FUNC_START_LOCAL_NOALIGN(.Lno_longmode)
591
488
/* This isn't an x86-64 CPU, so hang intentionally, we cannot continue */
592
489
1:
0 commit comments