@@ -127,13 +127,13 @@ static void __attribute__((noinline)) wolfBoot_image_confirm_signature_ok(
127127 */
128128#define PART_SANITY_CHECK (p ) \
129129 /* Redundant set of r2=0 */ \
130- asm volatile("mov r2, #0"); \
131- asm volatile("mov r2, #0"); \
132- asm volatile("mov r2, #0"); \
133- asm volatile("mov r2, #0"); \
134- asm volatile("mov r2, #0"); \
130+ asm volatile("mov r2, #0":::"r2" ); \
131+ asm volatile("mov r2, #0":::"r2" ); \
132+ asm volatile("mov r2, #0":::"r2" ); \
133+ asm volatile("mov r2, #0":::"r2" ); \
134+ asm volatile("mov r2, #0":::"r2" ); \
135135 /* Loading hdr_ok flag, verifying */ \
136- asm volatile ("mov r2, %0" ::"r" ((p )-> hdr_ok )); \
136+ asm volatile ("mov r2, %0" ::"r" ((p )-> hdr_ok ): "r2" ); \
137137 asm volatile ("cmp r2, #1" ); \
138138 asm volatile ("bne ." ); \
139139 asm volatile ("cmp r2, #1" ); \
@@ -143,13 +143,13 @@ static void __attribute__((noinline)) wolfBoot_image_confirm_signature_ok(
143143 asm volatile ("cmp r2, #1" ); \
144144 asm volatile ("bne .-12" ); \
145145 /* Redundant set of r2=0 */ \
146- asm volatile ("mov r2, #0" ); \
147- asm volatile ("mov r2, #0" ); \
148- asm volatile ("mov r2, #0" ); \
149- asm volatile ("mov r2, #0" ); \
150- asm volatile ("mov r2, #0" ); \
146+ asm volatile ("mov r2, #0" ::: "r2" ); \
147+ asm volatile ("mov r2, #0" ::: "r2" ); \
148+ asm volatile ("mov r2, #0" ::: "r2" ); \
149+ asm volatile ("mov r2, #0" ::: "r2" ); \
150+ asm volatile ("mov r2, #0" ::: "r2" ); \
151151 /* Loading hdr_ok flag, verifying */ \
152- asm volatile ("mov r2, %0" ::"r" ((p )-> sha_ok )); \
152+ asm volatile ("mov r2, %0" ::"r" ((p )-> sha_ok ): "r2" ); \
153153 asm volatile ("cmp r2, #1" ); \
154154 asm volatile ("bne ." ); \
155155 asm volatile ("cmp r2, #1" ); \
@@ -159,13 +159,13 @@ static void __attribute__((noinline)) wolfBoot_image_confirm_signature_ok(
159159 asm volatile ("cmp r2, #1" ); \
160160 asm volatile ("bne .-12" ); \
161161 /* Redundant set of r2=0 */ \
162- asm volatile ("mov r2, #0" ); \
163- asm volatile ("mov r2, #0" ); \
164- asm volatile ("mov r2, #0" ); \
165- asm volatile ("mov r2, #0" ); \
166- asm volatile ("mov r2, #0" ); \
162+ asm volatile ("mov r2, #0" ::: "r2" ); \
163+ asm volatile ("mov r2, #0" ::: "r2" ); \
164+ asm volatile ("mov r2, #0" ::: "r2" ); \
165+ asm volatile ("mov r2, #0" ::: "r2" ); \
166+ asm volatile ("mov r2, #0" ::: "r2" ); \
167167 /* Loading signature_ok flag, verifying */ \
168- asm volatile ("mov r2, %0" ::"r" ((p )-> signature_ok )); \
168+ asm volatile ("mov r2, %0" ::"r" ((p )-> signature_ok ): "r2" ); \
169169 asm volatile ("cmp r2, #1" ); \
170170 asm volatile ("bne ." ); \
171171 asm volatile ("cmp r2, #1" ); \
@@ -181,7 +181,7 @@ static void __attribute__((noinline)) wolfBoot_image_confirm_signature_ok(
181181 asm volatile ("mov r2, #0" ); \
182182 asm volatile ("mov r2, #0" ); \
183183 /* Loading ~(signature_ok) flag, verifying */ \
184- asm volatile ("mov r2, %0" ::"r" ((p )-> not_signature_ok )); \
184+ asm volatile ("mov r2, %0" ::"r" ((p )-> not_signature_ok ): "r2" ); \
185185 asm volatile ("cmp r2, #0xFFFFFFFE" ); \
186186 asm volatile ("bne ." ); \
187187 asm volatile ("cmp r2, #0xFFFFFFFE" ); \
@@ -191,14 +191,14 @@ static void __attribute__((noinline)) wolfBoot_image_confirm_signature_ok(
191191 asm volatile ("cmp r2, #0xFFFFFFFE" ); \
192192 asm volatile ("bne .-12" ); \
193193 /* Redundant set of r2=0 */ \
194- asm volatile ("mov r2, #0" ); \
195- asm volatile ("mov r2, #0" ); \
196- asm volatile ("mov r2, #0" ); \
197- asm volatile ("mov r2, #0" ); \
198- asm volatile ("mov r2, #0" ); \
194+ asm volatile ("mov r2, #0" ::: "r2" ); \
195+ asm volatile ("mov r2, #0" ::: "r2" ); \
196+ asm volatile ("mov r2, #0" ::: "r2" ); \
197+ asm volatile ("mov r2, #0" ::: "r2" ); \
198+ asm volatile ("mov r2, #0" ::: "r2" ); \
199199 /* Loading canary value, verifying */ \
200- asm volatile ("mov r2, %0" ::"r" ((p )-> canary_FEED6789 )); \
201- asm volatile ("mov r0, %0" ::"r" (0xFEED6789 )); \
200+ asm volatile ("mov r2, %0" ::"r" ((p )-> canary_FEED6789 ): "r2" ); \
201+ asm volatile ("mov r0, %0" ::"r" (0xFEED6789 ): "r0" ); \
202202 asm volatile ("cmp r2, r0" ); \
203203 asm volatile ("bne ." ); \
204204 asm volatile ("cmp r2, r0" ); \
@@ -208,14 +208,14 @@ static void __attribute__((noinline)) wolfBoot_image_confirm_signature_ok(
208208 asm volatile ("cmp r2, r0" ); \
209209 asm volatile ("bne .-12" ); \
210210 /* Redundant set of r2=0 */ \
211- asm volatile ("mov r2, #0" ); \
212- asm volatile ("mov r2, #0" ); \
213- asm volatile ("mov r2, #0" ); \
214- asm volatile ("mov r2, #0" ); \
215- asm volatile ("mov r2, #0" ); \
211+ asm volatile ("mov r2, #0" ::: "r2" ); \
212+ asm volatile ("mov r2, #0" ::: "r2" ); \
213+ asm volatile ("mov r2, #0" ::: "r2" ); \
214+ asm volatile ("mov r2, #0" ::: "r2" ); \
215+ asm volatile ("mov r2, #0" ::: "r2" ); \
216216 /* Loading canary value, verifying */ \
217- asm volatile ("mov r2, %0" ::"r" ((p )-> canary_FEED4567 )); \
218- asm volatile ("mov r0, %0" ::"r" (0xFEED4567 )); \
217+ asm volatile ("mov r2, %0" ::"r" ((p )-> canary_FEED4567 ): "r2" ); \
218+ asm volatile ("mov r0, %0" ::"r" (0xFEED4567 ): "r0" ); \
219219 asm volatile ("cmp r2, r0" ); \
220220 asm volatile ("bne ." ); \
221221 asm volatile ("cmp r2, r0" ); \
@@ -225,14 +225,14 @@ static void __attribute__((noinline)) wolfBoot_image_confirm_signature_ok(
225225 asm volatile ("cmp r2, r0" ); \
226226 asm volatile ("bne .-12" ); \
227227 /* Redundant set of r2=0 */ \
228- asm volatile ("mov r2, #0" ); \
229- asm volatile ("mov r2, #0" ); \
230- asm volatile ("mov r2, #0" ); \
231- asm volatile ("mov r2, #0" ); \
232- asm volatile ("mov r2, #0" ); \
228+ asm volatile ("mov r2, #0" ::: "r2" ); \
229+ asm volatile ("mov r2, #0" ::: "r2" ); \
230+ asm volatile ("mov r2, #0" ::: "r2" ); \
231+ asm volatile ("mov r2, #0" ::: "r2" ); \
232+ asm volatile ("mov r2, #0" ::: "r2" ); \
233233 /* Loading canary value, verifying */ \
234- asm volatile ("mov r2, %0" ::"r" ((p )-> canary_FEED89AB )); \
235- asm volatile ("mov r0, %0" ::"r" (0xFEED89AB )); \
234+ asm volatile ("mov r2, %0" ::"r" ((p )-> canary_FEED89AB ): "r2" ); \
235+ asm volatile ("mov r0, %0" ::"r" (0xFEED89AB ): "r0" ); \
236236 asm volatile ("cmp r2, r0" ); \
237237 asm volatile ("bne ." ); \
238238 asm volatile ("cmp r2, r0" ); \
@@ -251,16 +251,16 @@ static void __attribute__((noinline)) wolfBoot_image_confirm_signature_ok(
251251#define RSA_VERIFY_FN (ret ,fn ,...) \
252252 { \
253253 /* Redundant set of r0=0 */ \
254- asm volatile ("mov r0, #0" ); \
255- asm volatile ("mov r0, #0" ); \
256- asm volatile ("mov r0, #0" ); \
254+ asm volatile ("mov r0, #0" ::: "r0" ); \
255+ asm volatile ("mov r0, #0" ::: "r0" ); \
256+ asm volatile ("mov r0, #0" ::: "r0" ); \
257257 /* Call the function */ \
258258 int tmp_ret = fn (__VA_ARGS__ ); \
259259 ret = -1 ; \
260260 /* Redundant set of r2=SHA_DIGEST_SIZE */ \
261- asm volatile ("mov r2, %0" ::"r" (WOLFBOOT_SHA_DIGEST_SIZE )); \
262- asm volatile ("mov r2, %0" ::"r" (WOLFBOOT_SHA_DIGEST_SIZE )); \
263- asm volatile ("mov r2, %0" ::"r" (WOLFBOOT_SHA_DIGEST_SIZE )); \
261+ asm volatile ("mov r2, %0" ::"r" (WOLFBOOT_SHA_DIGEST_SIZE ): "r2" ); \
262+ asm volatile ("mov r2, %0" ::"r" (WOLFBOOT_SHA_DIGEST_SIZE ): "r2" ); \
263+ asm volatile ("mov r2, %0" ::"r" (WOLFBOOT_SHA_DIGEST_SIZE ): "r2" ); \
264264 /* Redundant check for fn() return value >= r2 */ \
265265 asm volatile ("cmp r0, r2" ); \
266266 asm volatile ("blt nope" ); \
@@ -288,9 +288,9 @@ static void __attribute__((noinline)) wolfBoot_image_confirm_signature_ok(
288288 if (!img || !digest) \
289289 asm volatile("b hnope"); \
290290 /* Redundant set of r0=50*/ \
291- asm volatile ("mov r0, #50" ); \
292- asm volatile ("mov r0, #50" ); \
293- asm volatile ("mov r0, #50" ); \
291+ asm volatile ("mov r0, #50" ::: "r0" ); \
292+ asm volatile ("mov r0, #50" ::: "r0" ); \
293+ asm volatile ("mov r0, #50" ::: "r0" ); \
294294 compare_res = XMEMCMP (digest , img -> sha_hash , WOLFBOOT_SHA_DIGEST_SIZE ); \
295295 /* Redundant checks that ensure the function actually returned 0 */ \
296296 asm volatile ("cmp r0, #0" ); \
@@ -330,9 +330,9 @@ static void __attribute__((noinline)) wolfBoot_image_confirm_signature_ok(
330330 */
331331#define VERIFY_FN (img ,p_res ,fn ,...) \
332332 /* Redundant set of r0=50*/ \
333- asm volatile("mov r0, #50"); \
334- asm volatile("mov r0, #50"); \
335- asm volatile("mov r0, #50"); \
333+ asm volatile("mov r0, #50":::"r0" ); \
334+ asm volatile("mov r0, #50":::"r0" ); \
335+ asm volatile("mov r0, #50":::"r0" ); \
336336 /* Call the verify function */ \
337337 fn (__VA_ARGS__ ); \
338338 /* Redundant checks that ensure the function actually returned 0 */ \
@@ -384,19 +384,19 @@ static void __attribute__((noinline)) wolfBoot_image_confirm_signature_ok(
384384 /* Stash the registry values */ \
385385 asm volatile("push {r4, r5, r6, r7}"); \
386386 /* Redundant initialization with 'failure' values */ \
387- asm volatile ("mov r0, #0" ); \
388- asm volatile ("mov r4, #1" ); \
389- asm volatile ("mov r5, #0" ); \
390- asm volatile ("mov r6, #2" ); \
391- asm volatile ("mov r7, #0" ); \
392- asm volatile ("mov r0, #0" ); \
393- asm volatile ("mov r4, #1" ); \
394- asm volatile ("mov r5, #0" ); \
395- asm volatile ("mov r6, #2" ); \
396- asm volatile ("mov r7, #0" ); \
387+ asm volatile ("mov r0, #0" ::: "r0" ); \
388+ asm volatile ("mov r4, #1" ::: "r4" ); \
389+ asm volatile ("mov r5, #0" ::: "r5" ); \
390+ asm volatile ("mov r6, #2" ::: "r6" ); \
391+ asm volatile ("mov r7, #0" ::: "r7" ); \
392+ asm volatile ("mov r0, #0" ::: "r0" ); \
393+ asm volatile ("mov r4, #1" ::: "r4" ); \
394+ asm volatile ("mov r5, #0" ::: "r5" ); \
395+ asm volatile ("mov r6, #2" ::: "r6" ); \
396+ asm volatile ("mov r7, #0" ::: "r7" ); \
397397 /* Read the fb_ok flag, jump to end_check \
398398 * if proven fb_ok == 1 */ \
399- asm volatile ("mov r0, %0" ::"r" (fb_ok )); \
399+ asm volatile ("mov r0, %0" ::"r" (fb_ok ): "r0" ); \
400400 asm volatile ("cmp r0, #1" ); \
401401 asm volatile ("bne do_check" ); \
402402 asm volatile ("cmp r0, #1" ); \
@@ -407,20 +407,20 @@ static void __attribute__((noinline)) wolfBoot_image_confirm_signature_ok(
407407 /* Do the actual version check: */ \
408408 asm volatile ("do_check:" ); \
409409 /* Read update versions to reg r5 and r7 */ \
410- asm volatile ("mov r0, #1" ); \
411- asm volatile ("mov r0, #1" ); \
412- asm volatile ("mov r0, #1" ); \
410+ asm volatile ("mov r0, #1" ::: "r0" ); \
411+ asm volatile ("mov r0, #1" ::: "r0" ); \
412+ asm volatile ("mov r0, #1" ::: "r0" ); \
413413 asm volatile ("bl wolfBoot_get_image_version" ); \
414- asm volatile ("mov r5, r0" ); \
415- asm volatile ("mov r5, r0" ); \
416- asm volatile ("mov r5, r0" ); \
417- asm volatile ("mov r0, #1" ); \
418- asm volatile ("mov r0, #1" ); \
419- asm volatile ("mov r0, #1" ); \
414+ asm volatile ("mov r5, r0" ::: "r5" ); \
415+ asm volatile ("mov r5, r0" ::: "r5" ); \
416+ asm volatile ("mov r5, r0" ::: "r5" ); \
417+ asm volatile ("mov r0, #1" ::: "r0" ); \
418+ asm volatile ("mov r0, #1" ::: "r0" ); \
419+ asm volatile ("mov r0, #1" ::: "r0" ); \
420420 asm volatile ("bl wolfBoot_get_image_version" ); \
421- asm volatile ("mov r7, r0" ); \
422- asm volatile ("mov r7, r0" ); \
423- asm volatile ("mov r7, r0" ); \
421+ asm volatile ("mov r7, r0" ::: "r7" ); \
422+ asm volatile ("mov r7, r0" ::: "r7" ); \
423+ asm volatile ("mov r7, r0" ::: "r7" ); \
424424 /* Compare r5 and r7, if not equal, something went very wrong, */ \
425425 asm volatile ("cmp r5, r7" ); \
426426 asm volatile ("bne ." ); \
@@ -431,20 +431,20 @@ static void __attribute__((noinline)) wolfBoot_image_confirm_signature_ok(
431431 asm volatile ("cmp r5, r7" ); \
432432 asm volatile ("bne .-12" ); \
433433 /* Read current versions to reg r4 and r6 */ \
434- asm volatile ("mov r0, #0" ); \
435- asm volatile ("mov r0, #0" ); \
436- asm volatile ("mov r0, #0" ); \
434+ asm volatile ("mov r0, #0" ::: "r0" ); \
435+ asm volatile ("mov r0, #0" ::: "r0" ); \
436+ asm volatile ("mov r0, #0" ::: "r0" ); \
437437 asm volatile ("bl wolfBoot_get_image_version" ); \
438- asm volatile ("mov r4, r0" ); \
439- asm volatile ("mov r4, r0" ); \
440- asm volatile ("mov r4, r0" ); \
441- asm volatile ("mov r0, #0" ); \
442- asm volatile ("mov r0, #0" ); \
443- asm volatile ("mov r0, #0" ); \
438+ asm volatile ("mov r4, r0" ::: "r4" ); \
439+ asm volatile ("mov r4, r0" ::: "r4" ); \
440+ asm volatile ("mov r4, r0" ::: "r4" ); \
441+ asm volatile ("mov r0, #0" ::: "r0" ); \
442+ asm volatile ("mov r0, #0" ::: "r0" ); \
443+ asm volatile ("mov r0, #0" ::: "r0" ); \
444444 asm volatile ("bl wolfBoot_get_image_version" ); \
445- asm volatile ("mov r6, r0" ); \
446- asm volatile ("mov r6, r0" ); \
447- asm volatile ("mov r6, r0" ); \
445+ asm volatile ("mov r6, r0" ::: "r6" ); \
446+ asm volatile ("mov r6, r0" ::: "r6" ); \
447+ asm volatile ("mov r6, r0" ::: "r6" ); \
448448 asm volatile ("cmp r4, r6" ); \
449449 asm volatile ("bne ." ); \
450450 asm volatile ("cmp r4, r6" ); \
@@ -453,9 +453,9 @@ static void __attribute__((noinline)) wolfBoot_image_confirm_signature_ok(
453453 asm volatile ("bne .-8" ); \
454454 asm volatile ("cmp r4, r6" ); \
455455 asm volatile ("bne .-12" ); \
456- asm volatile ("mov r0, #0" ); \
457- asm volatile ("mov r0, #0" ); \
458- asm volatile ("mov r0, #0" ); \
456+ asm volatile ("mov r0, #0" ::: "r0" ); \
457+ asm volatile ("mov r0, #0" ::: "r0" ); \
458+ asm volatile ("mov r0, #0" ::: "r0" ); \
459459 /* Compare the two versions in registries */ \
460460 asm volatile ("cmp r4, r5" ); \
461461 asm volatile ("bge ." ); \
@@ -467,34 +467,34 @@ static void __attribute__((noinline)) wolfBoot_image_confirm_signature_ok(
467467 asm volatile ("bge .-12" ); \
468468 asm volatile ("end_check:" ); \
469469 /* Restore previously saved registry values */ \
470- asm volatile ("pop {r4, r5, r6, r7}" )
470+ asm volatile ("pop {r4, r5, r6, r7}" ::: "r4" , "r5" , "r6" , "r7" )
471471
472472#define CONFIRM_MASK_VALID (id , mask ) \
473- asm volatile("mov r1, %0" :: "r"(id)); \
473+ asm volatile("mov r1, %0" :: "r"(id):"r1" ); \
474474 /* id &= 0x0F */ \
475- asm volatile ("and.w r1, r1, #15" ); \
476- asm volatile ("mov r0, %0" :: "r" (mask )); \
477- asm volatile ("movs r2, #1" ); \
478- asm volatile ("lsls r2, r1" ); \
479- asm volatile ("ands r2, r0" ); \
480- asm volatile ("movs r0, #1" ); \
481- asm volatile ("lsls r0, r1" ); \
475+ asm volatile ("and.w r1, r1, #15" ::: "r1" ); \
476+ asm volatile ("mov r0, %0" :: "r" (mask ): "r0" ); \
477+ asm volatile ("movs r2, #1" ::: "r2" ); \
478+ asm volatile ("lsls r2, r1" ::: "r2" ); \
479+ asm volatile ("ands r2, r0" ::: "r2" ); \
480+ asm volatile ("movs r0, #1" ::: "r0" ); \
481+ asm volatile ("lsls r0, r1" ::: "r0" ); \
482482 asm volatile ("cmp r0, r2" ); \
483483 asm volatile ("bne ." ); \
484484 asm volatile ("mov r0, %0" :: "r" (mask )); \
485- asm volatile ("movs r2, #1" ); \
486- asm volatile ("lsls r2, r1" ); \
487- asm volatile ("ands r2, r0" ); \
488- asm volatile ("movs r0, #1" ); \
489- asm volatile ("lsls r0, r1" ); \
485+ asm volatile ("movs r2, #1" ::: "r2" ); \
486+ asm volatile ("lsls r2, r1" ::: "r2" ); \
487+ asm volatile ("ands r2, r0" ::: "r2" ); \
488+ asm volatile ("movs r0, #1" ::: "r0" ); \
489+ asm volatile ("lsls r0, r1" ::: "r0" ); \
490490 asm volatile ("cmp r0, r2" ); \
491491 asm volatile ("bne ." ); \
492- asm volatile ("mov r0, %0" :: "r" (mask )); \
493- asm volatile ("movs r2, #1" ); \
494- asm volatile ("lsls r2, r1" ); \
495- asm volatile ("ands r2, r0" ); \
496- asm volatile ("movs r0, #1" ); \
497- asm volatile ("lsls r0, r1" ); \
492+ asm volatile ("mov r0, %0" :: "r" (mask ): "r0" ); \
493+ asm volatile ("movs r2, #1" ::: "r2" ); \
494+ asm volatile ("lsls r2, r1" ::: "r2" ); \
495+ asm volatile ("ands r2, r0" ::: "r2" ); \
496+ asm volatile ("movs r0, #1" ::: "r0" ); \
497+ asm volatile ("lsls r0, r1" ::: "r0" ); \
498498 asm volatile ("cmp r0, r2" ); \
499499 asm volatile ("bne ." ); \
500500
0 commit comments