@@ -90,14 +90,14 @@ static inline void arm_m_exc_tail(void)
9090 * our bookeeping around EXC_RETURN, so do it early.
9191 */
9292 void z_check_stack_sentinel (void );
93- void * isr_lr = (void * ) * arm_m_exc_lr_ptr ;
93+ void * isr_lr = (void * )* arm_m_exc_lr_ptr ;
9494
9595 if (IS_ENABLED (CONFIG_STACK_SENTINEL )) {
9696 z_check_stack_sentinel ();
9797 }
9898 if (isr_lr != arm_m_cs_ptrs .lr_fixup ) {
9999 arm_m_cs_ptrs .lr_save = isr_lr ;
100- * arm_m_exc_lr_ptr = (uint32_t ) arm_m_cs_ptrs .lr_fixup ;
100+ * arm_m_exc_lr_ptr = (uint32_t )arm_m_cs_ptrs .lr_fixup ;
101101 }
102102#endif
103103}
@@ -137,92 +137,91 @@ static ALWAYS_INLINE void arm_m_switch(void *switch_to, void **switched_from)
137137 */
138138 register uint32_t r4 __asm__("r4" ) = (uint32_t )switch_to ;
139139 register uint32_t r5 __asm__("r5" ) = (uint32_t )switched_from ;
140- __asm__ volatile (
141- _R7_CLOBBER_OPT ("push {r7};" )
142- /* Construct and push a {r12, lr, pc} group at the top
143- * of the frame, where PC points to the final restore location
144- * at the end of this sequence.
145- */
146- "mov r6, r12;"
147- "mov r7, lr;"
148- "ldr r8, =3f;" /* address of restore PC */
149- "add r8, r8, #1;" /* set thumb bit */
150- "push {r6-r8};"
151- "sub sp, sp, #24;" /* skip over space for r6-r11 */
152- "push {r0-r5};"
153- "mov r2, #0x01000000;" /* APSR (only care about thumb bit) */
154- "mov r0, #0;" /* Leave r0 zero for code blow */
140+ __asm__ volatile (_R7_CLOBBER_OPT ("push {r7};" )
141+ /* Construct and push a {r12, lr, pc} group at the top
142+ * of the frame, where PC points to the final restore location
143+ * at the end of this sequence.
144+ */
145+ "mov r6, r12;"
146+ "mov r7, lr;"
147+ "ldr r8, =3f;" /* address of restore PC */
148+ "add r8, r8, #1;" /* set thumb bit */
149+ "push {r6-r8};"
150+ "sub sp, sp, #24;" /* skip over space for r6-r11 */
151+ "push {r0-r5};"
152+ "mov r2, #0x01000000;" /* APSR (only care about thumb bit) */
153+ "mov r0, #0;" /* Leave r0 zero for code blow */
155154#ifdef CONFIG_BUILTIN_STACK_GUARD
156- "mrs r1, psplim;"
157- "push {r1-r2};"
158- "msr psplim, r0;" /* zero it so we can move the stack */
155+ "mrs r1, psplim;"
156+ "push {r1-r2};"
157+ "msr psplim, r0;" /* zero it so we can move the stack */
159158#else
160- "push {r2 };"
159+ "push {r2 };"
161160#endif
162161
163162#ifdef CONFIG_FPU
164- /* Push FPU state (if active) to our outgoing stack */
165- " mrs r8, control;" /* read CONTROL.FPCA */
166- " and r7, r8, #4;" /* r7 == have_fpu */
167- " cbz r7, 1f;"
168- " bic r8, r8, #4;" /* clear CONTROL.FPCA */
169- " msr control, r8;"
170- " vmrs r6, fpscr;"
171- " push {r6};"
172- " vpush {s0-s31};"
173- "1: push {r7};" /* have_fpu word */
174-
175- /* Pop FPU state (if present) from incoming frame in r4 */
176- " ldm r4!, {r7};" /* have_fpu word */
177- " cbz r7, 2f;"
178- " vldm r4!, {s0-s31};" /* (note: sets FPCA bit for us) */
179- " ldm r4!, {r6};"
180- " vmsr fpscr, r6;"
181- "2:;"
163+ /* Push FPU state (if active) to our outgoing stack */
164+ " mrs r8, control;" /* read CONTROL.FPCA */
165+ " and r7, r8, #4;" /* r7 == have_fpu */
166+ " cbz r7, 1f;"
167+ " bic r8, r8, #4;" /* clear CONTROL.FPCA */
168+ " msr control, r8;"
169+ " vmrs r6, fpscr;"
170+ " push {r6};"
171+ " vpush {s0-s31};"
172+ "1: push {r7};" /* have_fpu word */
173+
174+ /* Pop FPU state (if present) from incoming frame in r4 */
175+ " ldm r4!, {r7};" /* have_fpu word */
176+ " cbz r7, 2f;"
177+ " vldm r4!, {s0-s31};" /* (note: sets FPCA bit for us) */
178+ " ldm r4!, {r6};"
179+ " vmsr fpscr, r6;"
180+ "2:;"
182181#endif
183182
184183#if defined(CONFIG_USERSPACE ) && defined(CONFIG_USE_SWITCH )
185- " ldr r8, =arm_m_switch_control;"
186- " ldr r8, [r8];"
187- " msr control, r8;"
184+ " ldr r8, =arm_m_switch_control;"
185+ " ldr r8, [r8];"
186+ " msr control, r8;"
188187#endif
189188
190- /* Save the outgoing switch handle (which is SP), swap stacks,
191- * and enable interrupts. The restore process is
192- * interruptible code (running in the incoming thread) once
193- * the stack is valid.
194- */
195- "str sp, [r5];"
196- "mov sp, r4;"
197- "msr basepri, r0;"
189+ /* Save the outgoing switch handle (which is SP), swap stacks,
190+ * and enable interrupts. The restore process is
191+ * interruptible code (running in the incoming thread) once
192+ * the stack is valid.
193+ */
194+ "str sp, [r5];"
195+ "mov sp, r4;"
196+ "msr basepri, r0;"
198197
199198 /* Restore is super simple: pop the flags (and stack limit if
200199 * enabled) then slurp in the whole GPR set in two
201200 * instructions. (The instruction encoding disallows popping
202201 * both LR and PC in a single instruction)
203202 */
204203#ifdef CONFIG_BUILTIN_STACK_GUARD
205- "pop {r1-r2};"
206- "msr psplim, r1;"
204+ "pop {r1-r2};"
205+ "msr psplim, r1;"
207206#else
208- "pop {r2 };"
207+ "pop {r2 };"
209208#endif
210209#ifdef _ARM_M_SWITCH_HAVE_DSP
211- "msr apsr_nzcvqg, r2;" /* bonkers syntax */
210+ "msr apsr_nzcvqg, r2;" /* bonkers syntax */
212211#else
213- "msr apsr_nzcvq, r2;" /* not even source-compatible! */
212+ "msr apsr_nzcvq, r2;" /* not even source-compatible! */
214213#endif
215- "pop {r0-r12, lr};"
216- "pop {pc};"
214+ "pop {r0-r12, lr};"
215+ "pop {pc};"
217216
218- "3:" /* Label for restore address */
219- _R7_CLOBBER_OPT ("pop {r7};" )
220- :: "r" ( r4 ), "r" (r5 )
221- : "r6" , "r8" , "r9" , "r10" ,
217+ "3:" /* Label for restore address */
218+ _R7_CLOBBER_OPT ("pop {r7};" ):: "r" ( r4 ),
219+ "r" (r5 )
220+ : "r6" , "r8" , "r9" , "r10" ,
222221#ifndef CONFIG_ARM_GCC_FP_WORKAROUND
223- "r7" ,
222+ "r7" ,
224223#endif
225- "r11" );
224+ "r11" );
226225}
227226
228227#ifdef CONFIG_USE_SWITCH
0 commit comments