Skip to content

Commit fb4f9e5

Browse files
committed
[CMSIS_5]: Updated to a65b7c9a3
1 parent 9977458 commit fb4f9e5

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

57 files changed

+9495
-8989
lines changed

cmsis/TARGET_CORTEX_A/cmsis_armcc.h

Lines changed: 5 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
/**************************************************************************//**
22
* @file cmsis_armcc.h
33
* @brief CMSIS compiler specific macros, functions, instructions
4-
* @version V1.0.3
5-
* @date 15. May 2019
4+
* @version V1.0.4
5+
* @date 30. July 2019
66
******************************************************************************/
77
/*
88
* Copyright (c) 2009-2019 Arm Limited. All rights reserved.
@@ -114,29 +114,17 @@
114114
/**
115115
\brief Instruction Synchronization Barrier
116116
*/
117-
#define __ISB() do {\
118-
__schedule_barrier();\
119-
__isb(0xF);\
120-
__schedule_barrier();\
121-
} while (0U)
117+
#define __ISB() __isb(0xF)
122118

123119
/**
124120
\brief Data Synchronization Barrier
125121
*/
126-
#define __DSB() do {\
127-
__schedule_barrier();\
128-
__dsb(0xF);\
129-
__schedule_barrier();\
130-
} while (0U)
122+
#define __DSB() __dsb(0xF)
131123

132124
/**
133125
\brief Data Memory Barrier
134126
*/
135-
#define __DMB() do {\
136-
__schedule_barrier();\
137-
__dmb(0xF);\
138-
__schedule_barrier();\
139-
} while (0U)
127+
#define __DMB() __dmb(0xF)
140128

141129
/**
142130
\brief Reverse byte order (32 bit)

cmsis/TARGET_CORTEX_A/cmsis_armclang.h

Lines changed: 12 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
/**************************************************************************//**
22
* @file cmsis_armclang.h
33
* @brief CMSIS compiler specific macros, functions, instructions
4-
* @version V1.1.1
5-
* @date 15. May 2019
4+
* @version V1.2.0
5+
* @date 05. August 2019
66
******************************************************************************/
77
/*
88
* Copyright (c) 2009-2019 Arm Limited. All rights reserved.
@@ -130,29 +130,17 @@
130130
/**
131131
\brief Instruction Synchronization Barrier
132132
*/
133-
#define __ISB() do {\
134-
__schedule_barrier();\
135-
__builtin_arm_isb(0xF);\
136-
__schedule_barrier();\
137-
} while (0U)
133+
#define __ISB() __builtin_arm_isb(0xF)
138134

139135
/**
140136
\brief Data Synchronization Barrier
141137
*/
142-
#define __DSB() do {\
143-
__schedule_barrier();\
144-
__builtin_arm_dsb(0xF);\
145-
__schedule_barrier();\
146-
} while (0U)
138+
#define __DSB() __builtin_arm_dsb(0xF)
147139

148140
/**
149141
\brief Data Memory Barrier
150142
*/
151-
#define __DMB() do {\
152-
__schedule_barrier();\
153-
__builtin_arm_dmb(0xF);\
154-
__schedule_barrier();\
155-
} while (0U)
143+
#define __DMB() __builtin_arm_dmb(0xF)
156144

157145
/**
158146
\brief Reverse byte order (32 bit)
@@ -322,6 +310,8 @@ __STATIC_FORCEINLINE uint8_t __CLZ(uint32_t value)
322310

323311
#if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
324312

313+
#define __SADD8 __builtin_arm_sadd8
314+
#define __SADD16 __builtin_arm_sadd16
325315
#define __QADD8 __builtin_arm_qadd8
326316
#define __QSUB8 __builtin_arm_qsub8
327317
#define __QADD16 __builtin_arm_qadd16
@@ -342,7 +332,10 @@ __STATIC_FORCEINLINE uint8_t __CLZ(uint32_t value)
342332
#define __SMUSD __builtin_arm_smusd
343333
#define __SMUSDX __builtin_arm_smusdx
344334
#define __SMLSDX __builtin_arm_smlsdx
345-
335+
#define __USAT16 __builtin_arm_usat16
336+
#define __SSUB8 __builtin_arm_ssub8
337+
#define __SXTB16 __builtin_arm_sxtb16
338+
#define __SXTAB16 __builtin_arm_sxtab16
346339

347340

348341
__STATIC_FORCEINLINE int32_t __QADD( int32_t op1, int32_t op2)
@@ -408,7 +401,7 @@ __STATIC_FORCEINLINE uint32_t __get_CPSR(void)
408401
*/
409402
__STATIC_FORCEINLINE void __set_CPSR(uint32_t cpsr)
410403
{
411-
__ASM volatile ("MSR cpsr, %0" : : "r" (cpsr) : "memory");
404+
__ASM volatile ("MSR cpsr, %0" : : "r" (cpsr) : "cc", "memory");
412405
}
413406

414407
/** \brief Get Mode

cmsis/TARGET_CORTEX_A/cmsis_gcc.h

Lines changed: 115 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
/**************************************************************************//**
22
* @file cmsis_gcc.h
33
* @brief CMSIS compiler specific macros, functions, instructions
4-
* @version V1.2.0
5-
* @date 17. May 2019
4+
* @version V1.3.0
5+
* @date 17. December 2019
66
******************************************************************************/
77
/*
88
* Copyright (c) 2009-2019 Arm Limited. All rights reserved.
@@ -119,6 +119,15 @@ __STATIC_FORCEINLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2)
119119
}
120120

121121

122+
__STATIC_FORCEINLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2)
123+
{
124+
uint32_t result;
125+
126+
__ASM volatile ("qsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
127+
return(result);
128+
}
129+
130+
122131
__STATIC_FORCEINLINE uint32_t __QADD16(uint32_t op1, uint32_t op2)
123132
{
124133
uint32_t result;
@@ -127,6 +136,14 @@ __STATIC_FORCEINLINE uint32_t __QADD16(uint32_t op1, uint32_t op2)
127136
return(result);
128137
}
129138

139+
__STATIC_FORCEINLINE uint32_t __QADD8(uint32_t op1, uint32_t op2)
140+
{
141+
uint32_t result;
142+
143+
__ASM volatile ("qadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
144+
return(result);
145+
}
146+
130147
__STATIC_FORCEINLINE int32_t __QADD( int32_t op1, int32_t op2)
131148
{
132149
int32_t result;
@@ -135,6 +152,22 @@ __STATIC_FORCEINLINE int32_t __QADD( int32_t op1, int32_t op2)
135152
return(result);
136153
}
137154

155+
__STATIC_FORCEINLINE uint32_t __QSAX(uint32_t op1, uint32_t op2)
156+
{
157+
uint32_t result;
158+
159+
__ASM ("qsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
160+
return(result);
161+
}
162+
163+
__STATIC_FORCEINLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2)
164+
{
165+
uint32_t result;
166+
167+
__ASM ("shsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
168+
return(result);
169+
}
170+
138171
__STATIC_FORCEINLINE uint64_t __SMLALD (uint32_t op1, uint32_t op2, uint64_t acc)
139172
{
140173
union llreg_u{
@@ -160,6 +193,15 @@ __STATIC_FORCEINLINE int32_t __QSUB( int32_t op1, int32_t op2)
160193
return(result);
161194
}
162195

196+
__STATIC_FORCEINLINE uint32_t __SXTB16(uint32_t op1)
197+
{
198+
uint32_t result;
199+
200+
__ASM ("sxtb16 %0, %1" : "=r" (result) : "r" (op1));
201+
return(result);
202+
}
203+
204+
163205
__STATIC_FORCEINLINE uint32_t __SMUAD (uint32_t op1, uint32_t op2)
164206
{
165207
uint32_t result;
@@ -168,9 +210,14 @@ __STATIC_FORCEINLINE uint32_t __SMUAD (uint32_t op1, uint32_t op2)
168210
return(result);
169211
}
170212

213+
214+
171215
#define __PKHBT(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0x0000FFFFUL) | \
172216
((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL) )
173217

218+
#define __PKHTB(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0xFFFF0000UL) | \
219+
((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL) )
220+
174221
__STATIC_FORCEINLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3)
175222
{
176223
uint32_t result;
@@ -220,7 +267,61 @@ __STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
220267
return(result);
221268
}
222269

270+
__STATIC_FORCEINLINE uint32_t __SMUSD (uint32_t op1, uint32_t op2)
271+
{
272+
uint32_t result;
273+
274+
__ASM volatile ("smusd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
275+
return(result);
276+
}
277+
278+
__STATIC_FORCEINLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2)
279+
{
280+
uint32_t result;
281+
282+
__ASM volatile ("smusdx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
283+
return(result);
284+
}
285+
286+
__STATIC_FORCEINLINE uint32_t __QASX(uint32_t op1, uint32_t op2)
287+
{
288+
uint32_t result;
289+
290+
__ASM ("qasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
291+
return(result);
292+
}
293+
294+
__STATIC_FORCEINLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2)
295+
{
296+
uint32_t result;
297+
298+
__ASM ("shadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
299+
return(result);
300+
}
301+
302+
__STATIC_FORCEINLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2)
303+
{
304+
uint32_t result;
223305

306+
__ASM ("shsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
307+
return(result);
308+
}
309+
310+
__STATIC_FORCEINLINE uint32_t __SHASX(uint32_t op1, uint32_t op2)
311+
{
312+
uint32_t result;
313+
314+
__ASM ("shasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
315+
return(result);
316+
}
317+
318+
__STATIC_FORCEINLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3)
319+
{
320+
uint32_t result;
321+
322+
__ASM volatile ("smlsdx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
323+
return(result);
324+
}
224325

225326

226327
/* ########################## Core Instruction Access ######################### */
@@ -232,12 +333,12 @@ __STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
232333
/**
233334
\brief Wait For Interrupt
234335
*/
235-
#define __WFI() __ASM volatile ("wfi")
336+
#define __WFI() __ASM volatile ("wfi":::"memory")
236337

237338
/**
238339
\brief Wait For Event
239340
*/
240-
#define __WFE() __ASM volatile ("wfe")
341+
#define __WFE() __ASM volatile ("wfe":::"memory")
241342

242343
/**
243344
\brief Send Event
@@ -289,7 +390,7 @@ __STATIC_FORCEINLINE uint32_t __REV(uint32_t value)
289390
#else
290391
uint32_t result;
291392

292-
__ASM volatile ("rev %0, %1" : "=r" (result) : "r" (value) );
393+
__ASM ("rev %0, %1" : "=r" (result) : "r" (value) );
293394
return result;
294395
#endif
295396
}
@@ -300,14 +401,12 @@ __STATIC_FORCEINLINE uint32_t __REV(uint32_t value)
300401
\param [in] value Value to reverse
301402
\return Reversed value
302403
*/
303-
#ifndef __NO_EMBEDDED_ASM
304-
__attribute__((section(".rev16_text"))) __STATIC_INLINE uint32_t __REV16(uint32_t value)
404+
__STATIC_FORCEINLINE uint32_t __REV16(uint32_t value)
305405
{
306406
uint32_t result;
307-
__ASM volatile("rev16 %0, %1" : "=r" (result) : "r" (value));
407+
__ASM ("rev16 %0, %1" : "=r" (result) : "r" (value));
308408
return result;
309409
}
310-
#endif
311410

312411
/**
313412
\brief Reverse byte order (16 bit)
@@ -322,7 +421,7 @@ __STATIC_FORCEINLINE int16_t __REVSH(int16_t value)
322421
#else
323422
int16_t result;
324423

325-
__ASM volatile ("revsh %0, %1" : "=r" (result) : "r" (value) );
424+
__ASM ("revsh %0, %1" : "=r" (result) : "r" (value) );
326425
return result;
327426
#endif
328427
}
@@ -364,7 +463,7 @@ __STATIC_FORCEINLINE uint32_t __RBIT(uint32_t value)
364463
#if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
365464
(defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
366465
(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
367-
__ASM volatile ("rbit %0, %1" : "=r" (result) : "r" (value) );
466+
__ASM ("rbit %0, %1" : "=r" (result) : "r" (value) );
368467
#else
369468
int32_t s = (4U /*sizeof(v)*/ * 8U) - 1U; /* extra shift needed at end */
370469

@@ -529,11 +628,11 @@ __STATIC_FORCEINLINE void __CLREX(void)
529628
\param [in] sat Bit position to saturate to (1..32)
530629
\return Saturated value
531630
*/
532-
#define __SSAT(ARG1,ARG2) \
631+
#define __SSAT(ARG1, ARG2) \
533632
__extension__ \
534633
({ \
535634
int32_t __RES, __ARG1 = (ARG1); \
536-
__ASM ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
635+
__ASM volatile ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) : "cc" ); \
537636
__RES; \
538637
})
539638

@@ -545,11 +644,11 @@ __extension__ \
545644
\param [in] sat Bit position to saturate to (0..31)
546645
\return Saturated value
547646
*/
548-
#define __USAT(ARG1,ARG2) \
647+
#define __USAT(ARG1, ARG2) \
549648
__extension__ \
550649
({ \
551650
uint32_t __RES, __ARG1 = (ARG1); \
552-
__ASM ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
651+
__ASM volatile ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) : "cc" ); \
553652
__RES; \
554653
})
555654

@@ -637,7 +736,7 @@ __STATIC_FORCEINLINE uint32_t __get_CPSR(void)
637736
*/
638737
__STATIC_FORCEINLINE void __set_CPSR(uint32_t cpsr)
639738
{
640-
__ASM volatile ("MSR cpsr, %0" : : "r" (cpsr) : "memory");
739+
__ASM volatile ("MSR cpsr, %0" : : "r" (cpsr) : "cc", "memory");
641740
}
642741

643742
/** \brief Get Mode

cmsis/TARGET_CORTEX_A/cmsis_iccarm.h

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,8 @@
1010
// Copyright (c) 2017-2018 IAR Systems
1111
// Copyright (c) 2018-2019 Arm Limited
1212
//
13+
// SPDX-License-Identifier: Apache-2.0
14+
//
1315
// Licensed under the Apache License, Version 2.0 (the "License")
1416
// you may not use this file except in compliance with the License.
1517
// You may obtain a copy of the License at

0 commit comments

Comments
 (0)