@@ -134,13 +134,13 @@ static void __attribute__((noinline)) wolfBoot_image_confirm_signature_ok(
134134 asm volatile("mov r2, #0":::"r2"); \
135135 /* Loading hdr_ok flag, verifying */ \
136136 asm volatile ("mov r2, %0" ::"r" ((p )-> hdr_ok ):"r2" ); \
137- asm volatile ("cmp r2, #1" ); \
137+ asm volatile ("cmp r2, #1" ::: "cc" ); \
138138 asm volatile ("bne ." ); \
139- asm volatile ("cmp r2, #1" ); \
139+ asm volatile ("cmp r2, #1" ::: "cc" ); \
140140 asm volatile ("bne .-4" ); \
141- asm volatile ("cmp r2, #1" ); \
141+ asm volatile ("cmp r2, #1" ::: "cc" ); \
142142 asm volatile ("bne .-8" ); \
143- asm volatile ("cmp r2, #1" ); \
143+ asm volatile ("cmp r2, #1" ::: "cc" ); \
144144 asm volatile ("bne .-12" ); \
145145 /* Redundant set of r2=0 */ \
146146 asm volatile ("mov r2, #0" :::"r2" ); \
@@ -150,13 +150,13 @@ static void __attribute__((noinline)) wolfBoot_image_confirm_signature_ok(
150150 asm volatile ("mov r2, #0" :::"r2" ); \
151151 /* Loading hdr_ok flag, verifying */ \
152152 asm volatile ("mov r2, %0" ::"r" ((p )-> sha_ok ):"r2" ); \
153- asm volatile ("cmp r2, #1" ); \
153+ asm volatile ("cmp r2, #1" ::: "cc" ); \
154154 asm volatile ("bne ." ); \
155- asm volatile ("cmp r2, #1" ); \
155+ asm volatile ("cmp r2, #1" ::: "cc" ); \
156156 asm volatile ("bne .-4" ); \
157- asm volatile ("cmp r2, #1" ); \
157+ asm volatile ("cmp r2, #1" ::: "cc" ); \
158158 asm volatile ("bne .-8" ); \
159- asm volatile ("cmp r2, #1" ); \
159+ asm volatile ("cmp r2, #1" ::: "cc" ); \
160160 asm volatile ("bne .-12" ); \
161161 /* Redundant set of r2=0 */ \
162162 asm volatile ("mov r2, #0" :::"r2" ); \
@@ -166,13 +166,13 @@ static void __attribute__((noinline)) wolfBoot_image_confirm_signature_ok(
166166 asm volatile ("mov r2, #0" :::"r2" ); \
167167 /* Loading signature_ok flag, verifying */ \
168168 asm volatile ("mov r2, %0" ::"r" ((p )-> signature_ok ):"r2" ); \
169- asm volatile ("cmp r2, #1" ); \
169+ asm volatile ("cmp r2, #1" ::: "cc" ); \
170170 asm volatile ("bne ." ); \
171- asm volatile ("cmp r2, #1" ); \
171+ asm volatile ("cmp r2, #1" ::: "cc" ); \
172172 asm volatile ("bne .-4" ); \
173- asm volatile ("cmp r2, #1" ); \
173+ asm volatile ("cmp r2, #1" ::: "cc" ); \
174174 asm volatile ("bne .-8" ); \
175- asm volatile ("cmp r2, #1" ); \
175+ asm volatile ("cmp r2, #1" ::: "cc" ); \
176176 asm volatile ("bne .-12" ); \
177177 /* Redundant set of r2=0 */ \
178178 asm volatile ("mov r2, #0" ); \
@@ -182,13 +182,13 @@ static void __attribute__((noinline)) wolfBoot_image_confirm_signature_ok(
182182 asm volatile ("mov r2, #0" ); \
183183 /* Loading ~(signature_ok) flag, verifying */ \
184184 asm volatile ("mov r2, %0" ::"r" ((p )-> not_signature_ok ):"r2" ); \
185- asm volatile ("cmp r2, #0xFFFFFFFE" ); \
185+ asm volatile ("cmp r2, #0xFFFFFFFE" ::: "cc" ); \
186186 asm volatile ("bne ." ); \
187- asm volatile ("cmp r2, #0xFFFFFFFE" ); \
187+ asm volatile ("cmp r2, #0xFFFFFFFE" ::: "cc" ); \
188188 asm volatile ("bne .-4" ); \
189- asm volatile ("cmp r2, #0xFFFFFFFE" ); \
189+ asm volatile ("cmp r2, #0xFFFFFFFE" ::: "cc" ); \
190190 asm volatile ("bne .-8" ); \
191- asm volatile ("cmp r2, #0xFFFFFFFE" ); \
191+ asm volatile ("cmp r2, #0xFFFFFFFE" ::: "cc" ); \
192192 asm volatile ("bne .-12" ); \
193193 /* Redundant set of r2=0 */ \
194194 asm volatile ("mov r2, #0" :::"r2" ); \
@@ -199,13 +199,13 @@ static void __attribute__((noinline)) wolfBoot_image_confirm_signature_ok(
199199 /* Loading canary value, verifying */ \
200200 asm volatile ("mov r2, %0" ::"r" ((p )-> canary_FEED6789 ):"r2" ); \
201201 asm volatile ("mov r0, %0" ::"r" (0xFEED6789 ):"r0" ); \
202- asm volatile ("cmp r2, r0" ); \
202+ asm volatile ("cmp r2, r0" ::: "cc" ); \
203203 asm volatile ("bne ." ); \
204- asm volatile ("cmp r2, r0" ); \
204+ asm volatile ("cmp r2, r0" ::: "cc" ); \
205205 asm volatile ("bne .-4" ); \
206- asm volatile ("cmp r2, r0" ); \
206+ asm volatile ("cmp r2, r0" ::: "cc" ); \
207207 asm volatile ("bne .-8" ); \
208- asm volatile ("cmp r2, r0" ); \
208+ asm volatile ("cmp r2, r0" ::: "cc" ); \
209209 asm volatile ("bne .-12" ); \
210210 /* Redundant set of r2=0 */ \
211211 asm volatile ("mov r2, #0" :::"r2" ); \
@@ -216,13 +216,13 @@ static void __attribute__((noinline)) wolfBoot_image_confirm_signature_ok(
216216 /* Loading canary value, verifying */ \
217217 asm volatile ("mov r2, %0" ::"r" ((p )-> canary_FEED4567 ):"r2" ); \
218218 asm volatile ("mov r0, %0" ::"r" (0xFEED4567 ):"r0" ); \
219- asm volatile ("cmp r2, r0" ); \
219+ asm volatile ("cmp r2, r0" ::: "cc" ); \
220220 asm volatile ("bne ." ); \
221- asm volatile ("cmp r2, r0" ); \
221+ asm volatile ("cmp r2, r0" ::: "cc" ); \
222222 asm volatile ("bne .-4" ); \
223- asm volatile ("cmp r2, r0" ); \
223+ asm volatile ("cmp r2, r0" ::: "cc" ); \
224224 asm volatile ("bne .-8" ); \
225- asm volatile ("cmp r2, r0" ); \
225+ asm volatile ("cmp r2, r0" ::: "cc" ); \
226226 asm volatile ("bne .-12" ); \
227227 /* Redundant set of r2=0 */ \
228228 asm volatile ("mov r2, #0" :::"r2" ); \
@@ -233,13 +233,13 @@ static void __attribute__((noinline)) wolfBoot_image_confirm_signature_ok(
233233 /* Loading canary value, verifying */ \
234234 asm volatile ("mov r2, %0" ::"r" ((p )-> canary_FEED89AB ):"r2" ); \
235235 asm volatile ("mov r0, %0" ::"r" (0xFEED89AB ):"r0" ); \
236- asm volatile ("cmp r2, r0" ); \
236+ asm volatile ("cmp r2, r0" ::: "cc" ); \
237237 asm volatile ("bne ." ); \
238- asm volatile ("cmp r2, r0" ); \
238+ asm volatile ("cmp r2, r0" ::: "cc" ); \
239239 asm volatile ("bne .-4" ); \
240- asm volatile ("cmp r2, r0" ); \
240+ asm volatile ("cmp r2, r0" ::: "cc" ); \
241241 asm volatile ("bne .-8" ); \
242- asm volatile ("cmp r2, r0" ); \
242+ asm volatile ("cmp r2, r0" ::: "cc" ); \
243243 asm volatile ("bne .-12" )
244244
245245/**
@@ -262,13 +262,13 @@ static void __attribute__((noinline)) wolfBoot_image_confirm_signature_ok(
262262 asm volatile ("mov r2, %0" ::"r" (WOLFBOOT_SHA_DIGEST_SIZE ):"r2" ); \
263263 asm volatile ("mov r2, %0" ::"r" (WOLFBOOT_SHA_DIGEST_SIZE ):"r2" ); \
264264 /* Redundant check for fn() return value >= r2 */ \
265- asm volatile ("cmp r0, r2" ); \
265+ asm volatile ("cmp r0, r2" ::: "cc" ); \
266266 asm volatile ("blt nope" ); \
267- asm volatile ("cmp r0, r2" ); \
267+ asm volatile ("cmp r0, r2" ::: "cc" ); \
268268 asm volatile ("blt nope" ); \
269- asm volatile ("cmp r0, r2" ); \
269+ asm volatile ("cmp r0, r2" ::: "cc" ); \
270270 asm volatile ("blt nope" ); \
271- asm volatile ("cmp r0, r2" ); \
271+ asm volatile ("cmp r0, r2" ::: "cc" ); \
272272 asm volatile ("blt nope" ); \
273273 /* Return value is set here in case of success */ \
274274 ret = tmp_ret ; \
@@ -293,24 +293,24 @@ static void __attribute__((noinline)) wolfBoot_image_confirm_signature_ok(
293293 asm volatile ("mov r0, #50" :::"r0" ); \
294294 compare_res = XMEMCMP (digest , img -> sha_hash , WOLFBOOT_SHA_DIGEST_SIZE ); \
295295 /* Redundant checks that ensure the function actually returned 0 */ \
296+ asm volatile ("cmp r0, #0" :::"cc" ); \
297+ asm volatile ("bne hnope" :::"cc" ); \
296298 asm volatile ("cmp r0, #0" ); \
299+ asm volatile ("bne hnope" :::"cc" ); \
300+ asm volatile ("cmp r0, #0" :::"cc" ); \
297301 asm volatile ("bne hnope" ); \
298- asm volatile ("cmp r0, #0" ); \
299- asm volatile ("bne hnope" ); \
300- asm volatile ("cmp r0, #0" ); \
301- asm volatile ("bne hnope" ); \
302- asm volatile ("cmp r0, #0" ); \
302+ asm volatile ("cmp r0, #0" :::"cc" ); \
303303 asm volatile ("bne hnope" ); \
304304 /* Repeat memcmp call */ \
305305 compare_res = XMEMCMP (digest , img -> sha_hash , WOLFBOOT_SHA_DIGEST_SIZE ); \
306306 /* Redundant checks that ensure the function actually returned 0 */ \
307- asm volatile ("cmp r0, #0" ); \
307+ asm volatile ("cmp r0, #0" ::: "cc" ); \
308308 asm volatile ("bne hnope" ); \
309- asm volatile ("cmp r0, #0" ); \
309+ asm volatile ("cmp r0, #0" ::: "cc" ); \
310310 asm volatile ("bne hnope" ); \
311- asm volatile ("cmp r0, #0" ); \
311+ asm volatile ("cmp r0, #0" ::: "cc" ); \
312312 asm volatile ("bne hnope" ); \
313- asm volatile ("cmp r0, #0" ); \
313+ asm volatile ("cmp r0, #0" ::: "cc" ); \
314314 asm volatile ("bne hnope" ); \
315315 /* Confirm that the signature is OK */ \
316316 wolfBoot_image_confirm_signature_ok (img ); \
@@ -336,26 +336,26 @@ static void __attribute__((noinline)) wolfBoot_image_confirm_signature_ok(
336336 /* Call the verify function */ \
337337 fn (__VA_ARGS__ ); \
338338 /* Redundant checks that ensure the function actually returned 0 */ \
339- asm volatile ("cmp r0, #0" ); \
339+ asm volatile ("cmp r0, #0" ::: "cc" ); \
340340 asm volatile ("bne nope" ); \
341- asm volatile ("cmp r0, #0" ); \
341+ asm volatile ("cmp r0, #0" ::: "cc" ); \
342342 asm volatile ("bne nope" ); \
343- asm volatile ("cmp r0, #0" ); \
343+ asm volatile ("cmp r0, #0" ::: "cc" ); \
344344 asm volatile ("bne nope" ); \
345- asm volatile ("cmp r0, #0" ); \
345+ asm volatile ("cmp r0, #0" ::: "cc" ); \
346346 asm volatile ("bne nope" ); \
347347 /* Check that res = 1, a few times, reading the value from memory */ \
348348 asm volatile ("ldr r2, [%0]" ::"r" (p_res )); \
349- asm volatile ("cmp r2, #1" ); \
349+ asm volatile ("cmp r2, #1" ::: "cc" ); \
350350 asm volatile ("bne nope" ); \
351351 asm volatile ("ldr r2, [%0]" ::"r" (p_res )); \
352- asm volatile ("cmp r2, #1" ); \
352+ asm volatile ("cmp r2, #1" ::: "cc" ); \
353353 asm volatile ("bne nope" ); \
354354 asm volatile ("ldr r2, [%0]" ::"r" (p_res )); \
355- asm volatile ("cmp r2, #1" ); \
355+ asm volatile ("cmp r2, #1" ::: "cc" ); \
356356 asm volatile ("bne nope" ); \
357357 asm volatile ("ldr r2, [%0]" ::"r" (p_res )); \
358- asm volatile ("cmp r2, #1" ); \
358+ asm volatile ("cmp r2, #1" ::: "cc" ); \
359359 asm volatile ("bne nope" ); \
360360 /* Confirm that the signature is OK */ \
361361 wolfBoot_image_confirm_signature_ok (img ); \
@@ -397,11 +397,11 @@ static void __attribute__((noinline)) wolfBoot_image_confirm_signature_ok(
397397 /* Read the fb_ok flag, jump to end_check \
398398 * if proven fb_ok == 1 */ \
399399 asm volatile ("mov r0, %0" ::"r" (fb_ok ):"r0" ); \
400- asm volatile ("cmp r0, #1" ); \
400+ asm volatile ("cmp r0, #1" ::: "cc" ); \
401401 asm volatile ("bne do_check" ); \
402- asm volatile ("cmp r0, #1" ); \
402+ asm volatile ("cmp r0, #1" ::: "cc" ); \
403403 asm volatile ("bne do_check" ); \
404- asm volatile ("cmp r0, #1" ); \
404+ asm volatile ("cmp r0, #1" ::: "cc" ); \
405405 asm volatile ("bne do_check" ); \
406406 asm volatile ("b end_check" ); \
407407 /* Do the actual version check: */ \
@@ -422,13 +422,13 @@ static void __attribute__((noinline)) wolfBoot_image_confirm_signature_ok(
422422 asm volatile ("mov r7, r0" :::"r7" ); \
423423 asm volatile ("mov r7, r0" :::"r7" ); \
424424 /* Compare r5 and r7, if not equal, something went very wrong, */ \
425- asm volatile ("cmp r5, r7" ); \
425+ asm volatile ("cmp r5, r7" ::: "cc" ); \
426426 asm volatile ("bne ." ); \
427- asm volatile ("cmp r5, r7" ); \
427+ asm volatile ("cmp r5, r7" ::: "cc" ); \
428428 asm volatile ("bne .-4" ); \
429- asm volatile ("cmp r5, r7" ); \
429+ asm volatile ("cmp r5, r7" ::: "cc" ); \
430430 asm volatile ("bne .-8" ); \
431- asm volatile ("cmp r5, r7" ); \
431+ asm volatile ("cmp r5, r7" ::: "cc" ); \
432432 asm volatile ("bne .-12" ); \
433433 /* Read current versions to reg r4 and r6 */ \
434434 asm volatile ("mov r0, #0" :::"r0" ); \
@@ -445,25 +445,25 @@ static void __attribute__((noinline)) wolfBoot_image_confirm_signature_ok(
445445 asm volatile ("mov r6, r0" :::"r6" ); \
446446 asm volatile ("mov r6, r0" :::"r6" ); \
447447 asm volatile ("mov r6, r0" :::"r6" ); \
448- asm volatile ("cmp r4, r6" ); \
448+ asm volatile ("cmp r4, r6" ::: "cc" ); \
449449 asm volatile ("bne ." ); \
450- asm volatile ("cmp r4, r6" ); \
450+ asm volatile ("cmp r4, r6" ::: "cc" ); \
451451 asm volatile ("bne .-4" ); \
452- asm volatile ("cmp r4, r6" ); \
452+ asm volatile ("cmp r4, r6" ::: "cc" ); \
453453 asm volatile ("bne .-8" ); \
454- asm volatile ("cmp r4, r6" ); \
454+ asm volatile ("cmp r4, r6" ::: "cc" ); \
455455 asm volatile ("bne .-12" ); \
456456 asm volatile ("mov r0, #0" :::"r0" ); \
457457 asm volatile ("mov r0, #0" :::"r0" ); \
458458 asm volatile ("mov r0, #0" :::"r0" ); \
459459 /* Compare the two versions in registries */ \
460- asm volatile ("cmp r4, r5" ); \
460+ asm volatile ("cmp r4, r5" ::: "cc" ); \
461461 asm volatile ("bge ." ); \
462- asm volatile ("cmp r6, r7" ); \
462+ asm volatile ("cmp r6, r7" ::: "cc" ); \
463463 asm volatile ("bge .-4" ); \
464- asm volatile ("cmp r4, r5" ); \
464+ asm volatile ("cmp r4, r5" ::: "cc" ); \
465465 asm volatile ("bge .-8" ); \
466- asm volatile ("cmp r6, r7" ); \
466+ asm volatile ("cmp r6, r7" ::: "cc" ); \
467467 asm volatile ("bge .-12" ); \
468468 asm volatile ("end_check:" ); \
469469 /* Restore previously saved registry values */ \
@@ -475,27 +475,27 @@ static void __attribute__((noinline)) wolfBoot_image_confirm_signature_ok(
475475 asm volatile ("and.w r1, r1, #15" :::"r1" ); \
476476 asm volatile ("mov r0, %0" :: "r" (mask ):"r0" ); \
477477 asm volatile ("movs r2, #1" :::"r2" ); \
478- asm volatile ("lsls r2, r1" :::"r2" ); \
479- asm volatile ("ands r2, r0" :::"r2" ); \
480- asm volatile ("movs r0, #1" :::"r0 " ); \
481- asm volatile ("lsls r0, r1" :::"r0" ); \
478+ asm volatile ("lsls r2, r1" :::"r2" , "cc" ); \
479+ asm volatile ("ands r2, r0" :::"r2" , "cc" ); \
480+ asm volatile ("movs r0, #1" :::"cc " ); \
481+ asm volatile ("lsls r0, r1" :::"r0" , "cc" ); \
482482 asm volatile ("cmp r0, r2" ); \
483483 asm volatile ("bne ." ); \
484484 asm volatile ("mov r0, %0" :: "r" (mask )); \
485485 asm volatile ("movs r2, #1" :::"r2" ); \
486- asm volatile ("lsls r2, r1" :::"r2" ); \
487- asm volatile ("ands r2, r0" :::"r2" ); \
486+ asm volatile ("lsls r2, r1" :::"r2" , "cc" ); \
487+ asm volatile ("ands r2, r0" :::"r2" , "cc" ); \
488488 asm volatile ("movs r0, #1" :::"r0" ); \
489- asm volatile ("lsls r0, r1" :::"r0" ); \
490- asm volatile ("cmp r0, r2" ); \
489+ asm volatile ("lsls r0, r1" :::"r0" , "cc" ); \
490+ asm volatile ("cmp r0, r2" ::: "cc" ); \
491491 asm volatile ("bne ." ); \
492492 asm volatile ("mov r0, %0" :: "r" (mask ):"r0" ); \
493493 asm volatile ("movs r2, #1" :::"r2" ); \
494- asm volatile ("lsls r2, r1" :::"r2" ); \
495- asm volatile ("ands r2, r0" :::"r2" ); \
494+ asm volatile ("lsls r2, r1" :::"r2" , "cc" ); \
495+ asm volatile ("ands r2, r0" :::"r2" , "cc" ); \
496496 asm volatile ("movs r0, #1" :::"r0" ); \
497- asm volatile ("lsls r0, r1" :::"r0" ); \
498- asm volatile ("cmp r0, r2" ); \
497+ asm volatile ("lsls r0, r1" :::"r0" , "cc" ); \
498+ asm volatile ("cmp r0, r2" ::: "cc" ); \
499499 asm volatile ("bne ." ); \
500500
501501#else
0 commit comments