File tree Expand file tree Collapse file tree 6 files changed +15
-17
lines changed Expand file tree Collapse file tree 6 files changed +15
-17
lines changed Original file line number Diff line number Diff line change 32
32
ENTRY(__cpu_soft_restart)
33
33
/ * Clear sctlr_el1 flags . * /
34
34
mrs x12 , sctlr_el1
35
- ldr x13 , = SCTLR_ELx_FLAGS
35
+ mov_q x13 , SCTLR_ELx_FLAGS
36
36
bic x12 , x12 , x13
37
37
pre_disable_mmu_workaround
38
38
msr sctlr_el1 , x12
Original file line number Diff line number Diff line change @@ -457,17 +457,19 @@ SYM_FUNC_START_LOCAL(__primary_switched)
457
457
b start_kernel
458
458
SYM_FUNC_END(__primary_switched)
459
459
460
+ .pushsection ".rodata" , "a"
461
+ SYM_DATA_START(kimage_vaddr)
462
+ .quad _text - TEXT_OFFSET
463
+ SYM_DATA_END(kimage_vaddr)
464
+ EXPORT_SYMBOL(kimage_vaddr)
465
+ .popsection
466
+
460
467
/ *
461
468
* end early head section , begin head code th at is also used for
462
469
* hotplug and needs to have the same protections as the text region
463
470
* /
464
471
. section ".idmap.text" , "awx"
465
472
466
- SYM_DATA_START(kimage_vaddr)
467
- .quad _text - TEXT_OFFSET
468
- SYM_DATA_END(kimage_vaddr)
469
- EXPORT_SYMBOL(kimage_vaddr)
470
-
471
473
/ *
472
474
* If we're fortunate enough to boot at EL2 , ensure th at the world is
473
475
* sane before dropping to EL1.
Original file line number Diff line number Diff line change @@ -63,7 +63,7 @@ el1_sync:
63
63
beq 9f // Nothing to reset!
64
64
65
65
/ * Someone called kvm_call_hyp() against the hyp - stub... * /
66
- ldr x0 , = HVC_STUB_ERR
66
+ mov_q x0 , HVC_STUB_ERR
67
67
eret
68
68
69
69
9 : mov x0 , xzr
Original file line number Diff line number Diff line change @@ -41,7 +41,7 @@ ENTRY(arm64_relocate_new_kernel)
41
41
cmp x0 , #CurrentEL_EL2
42
42
b.ne 1f
43
43
mrs x0 , sctlr_el2
44
- ldr x1 , = SCTLR_ELx_FLAGS
44
+ mov_q x1 , SCTLR_ELx_FLAGS
45
45
bic x0 , x0 , x1
46
46
pre_disable_mmu_workaround
47
47
msr sctlr_el2 , x0
@@ -113,8 +113,6 @@ ENTRY(arm64_relocate_new_kernel)
113
113
114
114
ENDPROC(arm64_relocate_new_kernel)
115
115
116
- .ltorg
117
-
118
116
. align 3 / * To keep the 64 - bit values below naturally aligned. * /
119
117
120
118
.Lcopy_end:
Original file line number Diff line number Diff line change @@ -60,7 +60,7 @@ alternative_else_nop_endif
60
60
msr ttbr0_el2 , x4
61
61
62
62
mrs x4 , tcr_el1
63
- ldr x5 , = TCR_EL2_MASK
63
+ mov_q x5 , TCR_EL2_MASK
64
64
and x4 , x4 , x5
65
65
mov x5 , #TCR_EL2_RES1
66
66
orr x4 , x4 , x5
@@ -102,7 +102,7 @@ alternative_else_nop_endif
102
102
* as well as the EE bit on BE. Drop the A flag since the compiler
103
103
* is allowed to generate unaligned accesses.
104
104
* /
105
- ldr x4 , = (SCTLR_EL2_RES1 | (SCTLR_ELx_FLAGS & ~SCTLR_ELx_A))
105
+ mov_q x4 , (SCTLR_EL2_RES1 | (SCTLR_ELx_FLAGS & ~SCTLR_ELx_A))
106
106
CPU_BE( orr x4 , x4 , #SCTLR_ELx_EE)
107
107
msr sctlr_el2 , x4
108
108
isb
@@ -142,7 +142,7 @@ reset:
142
142
* case we coming via HVC_SOFT_RESTART.
143
143
* /
144
144
mrs x5 , sctlr_el2
145
- ldr x6 , = SCTLR_ELx_FLAGS
145
+ mov_q x6 , SCTLR_ELx_FLAGS
146
146
bic x5 , x5 , x6 // Clear SCTL_M and etc
147
147
pre_disable_mmu_workaround
148
148
msr sctlr_el2 , x5
@@ -155,11 +155,9 @@ reset:
155
155
eret
156
156
157
157
1 : / * Bad stub call * /
158
- ldr x0 , = HVC_STUB_ERR
158
+ mov_q x0 , HVC_STUB_ERR
159
159
eret
160
160
161
161
SYM_CODE_END(__kvm_handle_stub_hvc)
162
162
163
- .ltorg
164
-
165
163
.popsection
Original file line number Diff line number Diff line change @@ -411,7 +411,7 @@ SYM_FUNC_START(__cpu_setup)
411
411
* Set/prepare TCR and TTBR. We use 512GB ( 39 - bit) address range for
412
412
* both user and kernel.
413
413
* /
414
- ldr x10 , = TCR_TxSZ(VA_BITS) | TCR_CACHE_FLAGS | TCR_SMP_FLAGS | \
414
+ mov_q x10 , TCR_TxSZ(VA_BITS) | TCR_CACHE_FLAGS | TCR_SMP_FLAGS | \
415
415
TCR_TG_FLAGS | TCR_KASLR_FLAGS | TCR_ASID16 | \
416
416
TCR_TBI0 | TCR_A1 | TCR_KASAN_FLAGS
417
417
tcr_clear_errata_bits x10 , x9 , x5
You can’t perform that action at this time.
0 commit comments