Skip to content

Commit 1568e5d

Browse files
committed
arch/arm: clang-format changes
Late-arriving clang-format-demanded changes that are too hard to split and squash into the original patches. No behavior changes. Signed-off-by: Andy Ross <[email protected]>
1 parent a161328 commit 1568e5d

File tree

2 files changed

+69
-71
lines changed

2 files changed

+69
-71
lines changed

arch/arm/core/cortex_m/arm-m-switch.c

Lines changed: 7 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -233,7 +233,7 @@ static void iciit_fixup(struct k_thread *th, struct hw_frame_base *hw, uint32_t
233233
/* Stash original return address, replace with hook */
234234
th->arch.iciit_pc = hw->pc;
235235
th->arch.iciit_apsr = hw->apsr;
236-
hw->pc = (uint32_t) arm_m_iciit_stub;
236+
hw->pc = (uint32_t)arm_m_iciit_stub;
237237
}
238238
#endif
239239
}
@@ -413,7 +413,7 @@ void *arm_m_new_stack(char *base, uint32_t sz, void *entry, void *arg0, void *ar
413413
uint32_t *s_top = (uint32_t *)(stack + K_KERNEL_STACK_SIZEOF(z_interrupt_stacks[0]));
414414

415415
arm_m_exc_lr_ptr = &s_top[-1];
416-
arm_m_cs_ptrs.lr_fixup = (void *) (1 | (uint32_t)arm_m_exc_exit); /* thumb bit! */
416+
arm_m_cs_ptrs.lr_fixup = (void *)(1 | (uint32_t)arm_m_exc_exit); /* thumb bit! */
417417
#endif
418418

419419
baddr = ((uint32_t)base + 7) & ~7;
@@ -457,16 +457,15 @@ bool arm_m_must_switch(void)
457457
*/
458458
uint32_t pri = _EXC_IRQ_DEFAULT_PRIO;
459459

460-
__asm__ volatile("msr basepri, %0" :: "r"(pri));
460+
__asm__ volatile("msr basepri, %0" ::"r"(pri));
461461

462462
/* Secure mode transistions can push a non-thread frame to the
463463
* stack. If not enabled, we already know by construction
464464
* that we're handling the bottom level of the interrupt stack
465465
* and returning to thread mode.
466466
*/
467-
if ((IS_ENABLED(CONFIG_ARM_SECURE_FIRMWARE) ||
468-
IS_ENABLED(CONFIG_ARM_NONSECURE_FIRMWARE))
469-
&& !is_thread_return((uint32_t)arm_m_cs_ptrs.lr_save)) {
467+
if ((IS_ENABLED(CONFIG_ARM_SECURE_FIRMWARE) || IS_ENABLED(CONFIG_ARM_NONSECURE_FIRMWARE)) &&
468+
!is_thread_return((uint32_t)arm_m_cs_ptrs.lr_save)) {
470469
return false;
471470
}
472471

@@ -551,14 +550,14 @@ __asm__(".globl arm_m_exc_exit;"
551550
" bl arm_m_must_switch;"
552551
" ldr r2, =arm_m_cs_ptrs;"
553552
" mov r3, #0;"
554-
" ldr lr, [r2, #8];" /* lr_save */
553+
" ldr lr, [r2, #8];" /* lr_save */
555554
" cbz r0, 1f;"
556555
" mov lr, #0xfffffffd;" /* integer-only LR */
557556
" ldm r2, {r0, r1};" /* fields: out, in */
558557
" stm r0, {r4-r11};" /* out is a switch_frame */
559558
" ldm r1!, {r7-r11};" /* in is a synth_frame */
560559
" ldm r1, {r4-r6};"
561560
"1:\n"
562-
" msr basepri, r3;" /* release lock taken in must_switch */
561+
" msr basepri, r3;" /* release lock taken in must_switch */
563562
" bx lr;");
564563
#endif

include/zephyr/arch/arm/arm-m-switch.h

Lines changed: 62 additions & 63 deletions
Original file line numberDiff line numberDiff line change
@@ -90,14 +90,14 @@ static inline void arm_m_exc_tail(void)
9090
* our bookeeping around EXC_RETURN, so do it early.
9191
*/
9292
void z_check_stack_sentinel(void);
93-
void *isr_lr = (void *) *arm_m_exc_lr_ptr;
93+
void *isr_lr = (void *)*arm_m_exc_lr_ptr;
9494

9595
if (IS_ENABLED(CONFIG_STACK_SENTINEL)) {
9696
z_check_stack_sentinel();
9797
}
9898
if (isr_lr != arm_m_cs_ptrs.lr_fixup) {
9999
arm_m_cs_ptrs.lr_save = isr_lr;
100-
*arm_m_exc_lr_ptr = (uint32_t) arm_m_cs_ptrs.lr_fixup;
100+
*arm_m_exc_lr_ptr = (uint32_t)arm_m_cs_ptrs.lr_fixup;
101101
}
102102
#endif
103103
}
@@ -137,92 +137,91 @@ static ALWAYS_INLINE void arm_m_switch(void *switch_to, void **switched_from)
137137
*/
138138
register uint32_t r4 __asm__("r4") = (uint32_t)switch_to;
139139
register uint32_t r5 __asm__("r5") = (uint32_t)switched_from;
140-
__asm__ volatile(
141-
_R7_CLOBBER_OPT("push {r7};")
142-
/* Construct and push a {r12, lr, pc} group at the top
143-
* of the frame, where PC points to the final restore location
144-
* at the end of this sequence.
145-
*/
146-
"mov r6, r12;"
147-
"mov r7, lr;"
148-
"ldr r8, =3f;" /* address of restore PC */
149-
"add r8, r8, #1;" /* set thumb bit */
150-
"push {r6-r8};"
151-
"sub sp, sp, #24;" /* skip over space for r6-r11 */
152-
"push {r0-r5};"
153-
"mov r2, #0x01000000;" /* APSR (only care about thumb bit) */
154-
"mov r0, #0;" /* Leave r0 zero for code blow */
140+
__asm__ volatile(_R7_CLOBBER_OPT("push {r7};")
141+
/* Construct and push a {r12, lr, pc} group at the top
142+
* of the frame, where PC points to the final restore location
143+
* at the end of this sequence.
144+
*/
145+
"mov r6, r12;"
146+
"mov r7, lr;"
147+
"ldr r8, =3f;" /* address of restore PC */
148+
"add r8, r8, #1;" /* set thumb bit */
149+
"push {r6-r8};"
150+
"sub sp, sp, #24;" /* skip over space for r6-r11 */
151+
"push {r0-r5};"
152+
"mov r2, #0x01000000;" /* APSR (only care about thumb bit) */
153+
"mov r0, #0;" /* Leave r0 zero for code blow */
155154
#ifdef CONFIG_BUILTIN_STACK_GUARD
156-
"mrs r1, psplim;"
157-
"push {r1-r2};"
158-
"msr psplim, r0;" /* zero it so we can move the stack */
155+
"mrs r1, psplim;"
156+
"push {r1-r2};"
157+
"msr psplim, r0;" /* zero it so we can move the stack */
159158
#else
160-
"push {r2};"
159+
"push {r2};"
161160
#endif
162161

163162
#ifdef CONFIG_FPU
164-
/* Push FPU state (if active) to our outgoing stack */
165-
" mrs r8, control;" /* read CONTROL.FPCA */
166-
" and r7, r8, #4;" /* r7 == have_fpu */
167-
" cbz r7, 1f;"
168-
" bic r8, r8, #4;" /* clear CONTROL.FPCA */
169-
" msr control, r8;"
170-
" vmrs r6, fpscr;"
171-
" push {r6};"
172-
" vpush {s0-s31};"
173-
"1: push {r7};" /* have_fpu word */
174-
175-
/* Pop FPU state (if present) from incoming frame in r4 */
176-
" ldm r4!, {r7};" /* have_fpu word */
177-
" cbz r7, 2f;"
178-
" vldm r4!, {s0-s31};" /* (note: sets FPCA bit for us) */
179-
" ldm r4!, {r6};"
180-
" vmsr fpscr, r6;"
181-
"2:;"
163+
/* Push FPU state (if active) to our outgoing stack */
164+
" mrs r8, control;" /* read CONTROL.FPCA */
165+
" and r7, r8, #4;" /* r7 == have_fpu */
166+
" cbz r7, 1f;"
167+
" bic r8, r8, #4;" /* clear CONTROL.FPCA */
168+
" msr control, r8;"
169+
" vmrs r6, fpscr;"
170+
" push {r6};"
171+
" vpush {s0-s31};"
172+
"1: push {r7};" /* have_fpu word */
173+
174+
/* Pop FPU state (if present) from incoming frame in r4 */
175+
" ldm r4!, {r7};" /* have_fpu word */
176+
" cbz r7, 2f;"
177+
" vldm r4!, {s0-s31};" /* (note: sets FPCA bit for us) */
178+
" ldm r4!, {r6};"
179+
" vmsr fpscr, r6;"
180+
"2:;"
182181
#endif
183182

184183
#if defined(CONFIG_USERSPACE) && defined(CONFIG_USE_SWITCH)
185-
" ldr r8, =arm_m_switch_control;"
186-
" ldr r8, [r8];"
187-
" msr control, r8;"
184+
" ldr r8, =arm_m_switch_control;"
185+
" ldr r8, [r8];"
186+
" msr control, r8;"
188187
#endif
189188

190-
/* Save the outgoing switch handle (which is SP), swap stacks,
191-
* and enable interrupts. The restore process is
192-
* interruptible code (running in the incoming thread) once
193-
* the stack is valid.
194-
*/
195-
"str sp, [r5];"
196-
"mov sp, r4;"
197-
"msr basepri, r0;"
189+
/* Save the outgoing switch handle (which is SP), swap stacks,
190+
* and enable interrupts. The restore process is
191+
* interruptible code (running in the incoming thread) once
192+
* the stack is valid.
193+
*/
194+
"str sp, [r5];"
195+
"mov sp, r4;"
196+
"msr basepri, r0;"
198197

199198
/* Restore is super simple: pop the flags (and stack limit if
200199
* enabled) then slurp in the whole GPR set in two
201200
* instructions. (The instruction encoding disallows popping
202201
* both LR and PC in a single instruction)
203202
*/
204203
#ifdef CONFIG_BUILTIN_STACK_GUARD
205-
"pop {r1-r2};"
206-
"msr psplim, r1;"
204+
"pop {r1-r2};"
205+
"msr psplim, r1;"
207206
#else
208-
"pop {r2};"
207+
"pop {r2};"
209208
#endif
210209
#ifdef _ARM_M_SWITCH_HAVE_DSP
211-
"msr apsr_nzcvqg, r2;" /* bonkers syntax */
210+
"msr apsr_nzcvqg, r2;" /* bonkers syntax */
212211
#else
213-
"msr apsr_nzcvq, r2;" /* not even source-compatible! */
212+
"msr apsr_nzcvq, r2;" /* not even source-compatible! */
214213
#endif
215-
"pop {r0-r12, lr};"
216-
"pop {pc};"
214+
"pop {r0-r12, lr};"
215+
"pop {pc};"
217216

218-
"3:" /* Label for restore address */
219-
_R7_CLOBBER_OPT("pop {r7};")
220-
::"r"(r4), "r"(r5)
221-
: "r6", "r8", "r9", "r10",
217+
"3:" /* Label for restore address */
218+
_R7_CLOBBER_OPT("pop {r7};")::"r"(r4),
219+
"r"(r5)
220+
: "r6", "r8", "r9", "r10",
222221
#ifndef CONFIG_ARM_GCC_FP_WORKAROUND
223-
"r7",
222+
"r7",
224223
#endif
225-
"r11");
224+
"r11");
226225
}
227226

228227
#ifdef CONFIG_USE_SWITCH

0 commit comments

Comments
 (0)