Skip to content

Commit fea8610

Browse files
dgarskedanielinux
authored andcommitted
Reverted boot assembly back to changes from original PR #306. This version was a refactor work in progress.
1 parent 467bfc7 commit fea8610

File tree

1 file changed

+49
-178
lines changed

1 file changed

+49
-178
lines changed

src/boot_aarch64_start.S

Lines changed: 49 additions & 178 deletions
Original file line numberDiff line numberDiff line change
@@ -65,185 +65,73 @@
6565

6666

6767
/* GICv2 Register Offsets */
68+
#define GICD_BASE 0xF9010000
6869
#define GICD_CTLR 0x0000
6970
#define GICD_TYPER 0x0004
7071
#define GICD_SGIR 0x0F00
7172
#define GICD_IGROUPRn 0x0080
73+
74+
#define GICC_BASE 0xF9020000
7275
#define GICC_PMR 0x0004
7376

74-
#ifndef USE_BUILTIN_STARTUP
75-
/* This is the entry function. If this is the start of a cold boot, the CPU
76-
* will be at the highest exception level (EL) and the CPU must be configured
77-
* for each of the levels down to the target EL: either EL2 for a hypervisor
78-
* or EL1 for a standard OS.
79-
*
80-
* Configuration only enables secure EL3 and forces all lower levels NS.
81-
*
82-
* AA64_TARGET_EL: 1 or 2
83-
* AA64_GICVERSION: 0- no external GIC, 2: GICv2, 3: GICv3
84-
* AA64_ENABLE_EL3_SMC: Enable SMC call handling in EL3
85-
* AA64_ENABLE_EL3_PM: Enable handling of power management (TWE, TWI)
86-
*/
77+
.equ TZPCDECPROT0_SET_BASE, 0x02200804
78+
.equ TZPCDECPROT1_SET_BASE, 0x02200810
79+
.equ OCRAM_TZPC_ADDR , 0x02200000
8780

81+
#ifndef USE_BUILTIN_STARTUP
8882
.section ".boot", "ax"
8983
.global _vector_table
9084
_vector_table:
91-
/* If we are booted as a Linux direct boot, then X0 will have FDT */
92-
mov x21, x0 /* save ATAG/FDT address */
93-
94-
95-
96-
/* Get highest EL implemented in this CPU */
97-
bl aa64_get_highest_el
98-
mov x19, x0 /* save highest EL in x19 */
85+
mov x21, x0 // read ATAG/FDT address
9986

100-
/* Get current EL */
101-
bl aa64_get_current_el
102-
mov x20, x0 /* save current EL in x20 */
87+
4: ldr x1, =_vector_table // get start of .text in x1
88+
// Read current EL
89+
mrs x0, CurrentEL
90+
and x0, x0, #0x0C
10391

104-
cmp x19, x20 /* EL is at highest? */
105-
bne 3f
106-
bl aa64_setup_el_highest
107-
108-
3: cmp x20, #0x3 /* at EL3? */
92+
// EL == 3?
93+
cmp x0, #12
10994
bne 2f
110-
bl aa64_setup_el3
95+
3: mrs x2, scr_el3
96+
orr x2, x2, 0x0F // scr_el3 |= NS|IRQ|FIQ|EA
97+
msr scr_el3, x2
98+
99+
msr cptr_el3, xzr // enable FP/SIMD
111100

112-
2: cmp x20, #0x1 /* EL == 1? */
101+
// EL == 1?
102+
2: cmp x0, #4
113103
beq 1f
114104

115-
/* EL2 Setup */
116-
mov x2, #3 << 20
117-
msr cptr_el2, x2 /* Disable FP/SIMD traps for EL2 */
105+
// EL == 2?
106+
mov x2, #3 << 20
107+
msr cptr_el2, x2 /* Enable FP/SIMD */
118108
b 0f
119109

120-
/* EL1 Setup */
121-
1: mov x0, #3 << 20
122-
msr cpacr_el1, x0 /* Disable FP/SIMD traps for EL1 */
110+
1: mov x0, #3 << 20
111+
msr cpacr_el1, x0 // Enable FP/SIMD for EL1
123112
msr sp_el1, x1
124113

125-
/* Suspend slave CPUs */
126-
0: mrs x3, mpidr_el1 /* read MPIDR_EL1 */
127-
and x3, x3, #3 /* CPUID = MPIDR_EL1 & 0x03 */
128-
cbz x3, 8f /* if 0, branch forward */
129-
7: wfi /* infinite sleep */
130-
b 7b
114+
/* Suspend slave CPUs */
115+
0: mrs x3, mpidr_el1 // read MPIDR_EL1
116+
and x3, x3, #3 // CPUID = MPIDR_EL1 & 0x03
117+
cbz x3, 8f // if 0, branch forward
118+
7: wfi // infinite sleep
119+
b 7b
131120

132-
8: ldr x1, =_vector_table /* ??? get start of .text in x1 */
133-
mov sp, x1 /* XXX set stack pointer */
121+
8: mov sp, x1 // set stack pointer
134122

135-
#ifdef CORTEX_A72
136-
bl init_A72
123+
#ifdef CPU_A72
124+
bl init_A72
137125
#endif
138-
bl boot_entry_C /* boot_entry_C never returns */
139-
b 7b /* go to sleep anyhow in case. */
126+
bl boot_entry_C // boot_entry_C never returns
127+
b 7b // go to sleep anyhow in case.
140128
#endif /* USE_BUILTIN_STARTUP */
141129

142-
/* Return the highest EL implemented on this CPU in x0
143-
* No stack usage. No clobbers. */
144-
.global aa64_get_highest_el
145-
.type aa64_get_highest_el, @function
146-
aa64_get_highest_el:
147-
mrs x0, ID_AA64PFR0_EL1
148-
tst x0, ID_AA64PFR0_EL3_MASK
149-
cbz 2f /* Highest is not EL3? */
150-
mov x0, #0x3
151-
ret
152-
2: tst x0, ID_AA64PFR0_EL2_MASK
153-
cbz 1f /* Highest is not EL2? */
154-
mov x0, #0x2
155-
ret
156-
1: mov x0, #0x1 /* Highest is EL1 */
157-
ret
158-
159-
/* Return the current EL on this CPU in x0
160-
* No stack usage. No clobbers. */
161-
.global aa64_get_current_el
162-
.type aa64_get_current_el, @function
163-
aa64_get_current_el:
164-
mrs x0, CURRENT_EL
165-
tst x0, CURRENT_EL3_MASK
166-
cbz 2f /* Current is not EL3? */
167-
mov x0, #0x3
168-
ret
169-
2: tst x0, CURRENT_EL2_MASK
170-
cbz 1f /* Current is not EL2? */
171-
mov x0, #0x2
172-
ret
173-
1: tst x0, CURRENT_EL1_MASK
174-
cbz 0f /* Current is not EL1? */
175-
mov x0, #0x1
176-
ret
177-
0: mov x0, #0x0 /* Current is EL0 */
178-
ret
179-
180-
/* Perform chip setup when at the highest EL
181-
* No stack. Clobbers: x0 */
182-
.global aa64_setup_el_highest
183-
.type aa64_setup_el_highest, @function
184-
aa64_setup_el_highest
185-
#if defined(AA64_CNTFRQ)
186-
/* Set the counter-timer frequency to AA64_CNTFRQ*/
187-
mov x0, AA64_CNTFRQ
188-
msr cntfrq_el0, x0
189-
#endif
190-
ret
191-
192-
/* Perform chip setup when at the EL3
193-
* No stack. Clobbers: x0 */
194-
.global aa64_setup_el3
195-
.type aa64_setup_el3, @function
196-
aa64_setup_el3
197-
mrs x0, scr_el3 /* Get Secure Config Reg scr_el3 */
198-
bic x0, x0, #(1 << 18) /* EEL2 Disable Secure EL2 */
199-
#if !defined (AA64_ENABLE_EL3_PM)
200-
bic x0, x0, #(1 << 13) /* TWE Disable trap WFE to EL3 */
201-
bic x0, x0, #(1 << 12) /* TWI Disable trap WFI to EL3 */
202-
#else
203-
orr x0, x0, #(1 << 13) /* TWE Enable trap WFE to EL3 */
204-
orr x0, x0, #(1 << 12) /* TWI Enable trap WFI to EL3 */
205-
#endif
206-
orr x0, x0, #(1 << 11) /* ST Disable trap SEL1 acc CNTPS to EL3 */
207-
orr x0, x0, #(1 << 10) /* RW Next lower level is AArch64 */
208-
orr x0, x0, #(1 << 9) /* SIF Disable Sec Ins Fetch from NS mem */
209-
#if defined(AA64_TARGET_EL) && (AA64_TARGET_EL==2)
210-
orr x0, x0, #(1 << 8) /* HCE Enable Hypervisor Call HVC */
211-
#else
212-
bic x0, x0, #(1 << 8) /* HCE Disable Hypervisor Call HVC */
213-
#endif
214-
#if !defined(AA64_ENABLE_EL3_SMC)
215-
orr x0, x0, #(1 << 7) /* SMD Disable Secure Monitor Call SMC */
216-
#else
217-
bic x0, x0, #(1 << 7) /* SMD Enable Secure Monitor Call SMC */
218-
#endif
219-
bic x0, x0, #(1 << 3) /* EA Disable EA and SError to EL3 */
220-
bic x0, x0, #(1 << 2) /* FIQ Disable FIQ to EL3 */
221-
bic x0, x0, #(1 << 1) /* IRQ Disable IRQ to EL3 */
222-
orr x0, x0, #(1 << 0) /* NS EL0, EL1, and EL2 are NS */
223-
msr scr_el3, x0 /* Set scr_el3 */
224-
225-
mrs x0, cptr_el3 /* Get EL3 Feature Trap Reg CPTR_EL3 */
226-
bic x0, x0, #(1 << 31) /* TCPAC Disable config traps to EL3 */
227-
bic x0, x0, #(1 << 30) /* TAM Disable AM traps to EL3 */
228-
bic x0, x0, #(1 << 20) /* TTA Disable trace traps to EL3 */
229-
bic x0, x0, #(1 << 12) /* ESM Disable SVCR traps to EL3 */
230-
bic x0, x0, #(1 << 10) /* TFP Disable FP/SIMD traps to EL3 */
231-
bic x0, x0, #(1 << 20) /* EZ Disable ZCR traps to EL3 */
232-
msr cptr_el3, x0 /* Set cptr_el3 */
233-
234-
#if defined(AA64_TARGET_EL) && (AA64_TARGET_EL==2)
235-
orr x0, x0, #(1 << 8) /* HCE Enable Hypervisor Call HVC */
236-
#else
237-
bic x0, x0, #(1 << 8) /* HCE Disable Hypervisor Call HVC */
238-
#endif
239-
240-
ret
241-
242130

243131
/* Initialize GIC 400 (GICv2) */
244132
.global gicv2_init_secure
245133
gicv2_init_secure:
246-
ldr x0, =AA64_GICD_BASE
134+
ldr x0, =GICD_BASE
247135
mov w9, #0x3 /* EnableGrp0 | EnableGrp1 */
248136
str w9, [x0, GICD_CTLR] /* Secure GICD_CTLR */
249137
ldr w9, [x0, GICD_TYPER]
@@ -256,7 +144,7 @@ gicv2_init_secure:
256144
sub w10, w10, #0x1
257145
cbnz w10, 0b
258146

259-
ldr x1, =AA64_GICC_BASE /* GICC_CTLR */
147+
ldr x1, =GICC_BASE /* GICC_CTLR */
260148
mov w0, #3 /* EnableGrp0 | EnableGrp1 */
261149
str w0, [x1]
262150

@@ -328,33 +216,16 @@ init_A72:
328216
msr vbar_el3, x1
329217

330218
el3_state:
331-
mrs x0, scr_el3 /* Get scr_el3 */
332-
bic x0, x0, #(1 << 18) /* EEL2 Disable Secure EL2 */
333-
#if !defined (AA64_ENABLE_EL3_PM)
334-
bic x0, x0, #(1 << 13) /* TWE Disable trap WFE to EL3 */
335-
bic x0, x0, #(1 << 12) /* TWI Disable trap WFI to EL3 */
336-
#else
337-
orr x0, x0, #(1 << 13) /* TWE Enable trap WFE to EL3 */
338-
orr x0, x0, #(1 << 12) /* TWI Enable trap WFI to EL3 */
339-
#endif
340-
orr x0, x0, #(1 << 11) /* ST Disable trap SEL1 access CNTPS to EL3 */
341-
orr x0, x0, #(1 << 10) /* RW Next lower level is AArch64 */
342-
orr x0, x0, #(1 << 9) /* SIF Disable secure ins. fetches from NS */
343-
#if defined(AA64_TARGET_EL) && (AA64_TARGET_EL==2)
344-
orr x0, x0, #(1 << 8) /* HCE Enable Hypervisor Call HVC */
345-
#else
346-
bic x0, x0, #(1 << 8) /* HCE Disable Hypervisor Call HVC */
347-
#endif
348-
#if !defined(AA64_ENABLE_EL3_SMC)
349-
orr x0, x0, #(1 << 7) /* SMD Disable Secure Monitor Call SMC */
350-
#else
351-
bic x0, x0, #(1 << 7) /* SMD Enable Secure Monitor Call SMC */
352-
#endif
353-
orr x0, x0, #(1 << 3) /* EA Enable EA and SError to EL3 for now */
354-
orr x0, x0, #(1 << 2) /* FIQ Enable FIQ to EL3 for now */
355-
orr x0, x0, #(1 << 1) /* IRQ Enable IRQ to EL3 for now */
356-
orr x0, x0, #(1 << 0) /* NS EL0, EL1, and EL2 are NS */
357-
msr scr_el3, x0 /* Set scr_el3 */
219+
mrs x0, scr_el3 /* scr_el3 config */
220+
bic x0, x0, #(1 << 13) /* Trap WFE instruciton to EL3 off */
221+
bic x0, x0, #(1 << 12) /* Traps TWI ins to EL3 off */
222+
bic x0, x0, #(1 << 11) /* Traps EL1 access to physical secure timer to EL3 on */
223+
orr x0, x0, #(1 << 10) /* Next lower level is AArch64 */
224+
bic x0, x0, #(1 << 9) /* Secure state instuction fetches from non-secure memory are permitted */
225+
bic x0, x0, #(1 << 8) /* Hypervisor Call instruction disabled */
226+
bic x0, x0, #(1 << 7) /* Secure Monitor Call enabled */
227+
orr x0, x0, #0xf /* IRQ|FIQ|EA to EL3 */
228+
msr scr_el3, x0
358229

359230
mrs x0, sctlr_el3 /* sctlr_el3 config */
360231
bic x0, x0, #(1 << 19) /* Disable EL3 translation XN */

0 commit comments

Comments
 (0)