29#pragma GCC diagnostic push
30#pragma GCC diagnostic ignored "-Wsign-conversion"
31#pragma GCC diagnostic ignored "-Wconversion"
32#pragma GCC diagnostic ignored "-Wunused-parameter"
36 #define __has_builtin(x) (0)
44 #define __INLINE inline
46#ifndef __STATIC_INLINE
47 #define __STATIC_INLINE static inline
49#ifndef __STATIC_FORCEINLINE
50 #define __STATIC_FORCEINLINE __attribute__((always_inline)) static inline
53 #define __NO_RETURN __attribute__((__noreturn__))
56 #define __USED __attribute__((used))
59 #define __WEAK __attribute__((weak))
62 #define __PACKED __attribute__((packed, aligned(1)))
64#ifndef __PACKED_STRUCT
65 #define __PACKED_STRUCT struct __attribute__((packed, aligned(1)))
68 #define __PACKED_UNION union __attribute__((packed, aligned(1)))
70#ifndef __UNALIGNED_UINT32
71 #pragma GCC diagnostic push
72 #pragma GCC diagnostic ignored "-Wpacked"
73 #pragma GCC diagnostic ignored "-Wattributes"
74 struct __attribute__((packed))
T_UINT32 { uint32_t v; };
75 #pragma GCC diagnostic pop
76 #define __UNALIGNED_UINT32(x) (((struct T_UINT32 *)(x))->v)
78#ifndef __UNALIGNED_UINT16_WRITE
79 #pragma GCC diagnostic push
80 #pragma GCC diagnostic ignored "-Wpacked"
81 #pragma GCC diagnostic ignored "-Wattributes"
82 __PACKED_STRUCT T_UINT16_WRITE { uint16_t v; };
83 #pragma GCC diagnostic pop
84 #define __UNALIGNED_UINT16_WRITE(addr, val) (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
86#ifndef __UNALIGNED_UINT16_READ
87 #pragma GCC diagnostic push
88 #pragma GCC diagnostic ignored "-Wpacked"
89 #pragma GCC diagnostic ignored "-Wattributes"
90 __PACKED_STRUCT T_UINT16_READ { uint16_t v; };
91 #pragma GCC diagnostic pop
92 #define __UNALIGNED_UINT16_READ(addr) (((const struct T_UINT16_READ *)(const void *)(addr))->v)
94#ifndef __UNALIGNED_UINT32_WRITE
95 #pragma GCC diagnostic push
96 #pragma GCC diagnostic ignored "-Wpacked"
97 #pragma GCC diagnostic ignored "-Wattributes"
98 __PACKED_STRUCT T_UINT32_WRITE { uint32_t v; };
99 #pragma GCC diagnostic pop
100 #define __UNALIGNED_UINT32_WRITE(addr, val) (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
102#ifndef __UNALIGNED_UINT32_READ
103 #pragma GCC diagnostic push
104 #pragma GCC diagnostic ignored "-Wpacked"
105 #pragma GCC diagnostic ignored "-Wattributes"
106 __PACKED_STRUCT T_UINT32_READ { uint32_t v; };
107 #pragma GCC diagnostic pop
108 #define __UNALIGNED_UINT32_READ(addr) (((const struct T_UINT32_READ *)(const void *)(addr))->v)
111 #define __ALIGNED(x) __attribute__((aligned(x)))
114 #define __RESTRICT __restrict
116#ifndef __COMPILER_BARRIER
117 #define __COMPILER_BARRIER() __ASM volatile("":::"memory")
122#ifndef __PROGRAM_START
133 extern void _start(
void) __NO_RETURN;
146 extern const __copy_table_t __copy_table_start__;
147 extern const __copy_table_t __copy_table_end__;
148 extern const __zero_table_t __zero_table_start__;
149 extern const __zero_table_t __zero_table_end__;
151 for (__copy_table_t
const* pTable = &__copy_table_start__; pTable < &__copy_table_end__; ++pTable) {
152 for(uint32_t i=0u; i<pTable->wlen; ++i) {
153 pTable->dest[i] = pTable->src[i];
157 for (__zero_table_t
const* pTable = &__zero_table_start__; pTable < &__zero_table_end__; ++pTable) {
158 for(uint32_t i=0u; i<pTable->wlen; ++i) {
159 pTable->dest[i] = 0u;
166#define __PROGRAM_START __cmsis_start
170#define __INITIAL_SP __StackTop
174#define __STACK_LIMIT __StackLimit
177#ifndef __VECTOR_TABLE
178#define __VECTOR_TABLE __Vectors
181#ifndef __VECTOR_TABLE_ATTRIBUTE
182#define __VECTOR_TABLE_ATTRIBUTE __attribute((used, section(".vectors")))
198 __ASM
volatile (
"cpsie i" : : :
"memory");
209 __ASM
volatile (
"cpsid i" : : :
"memory");
222 __ASM
volatile (
"MRS %0, control" :
"=r" (result) );
227#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
233__STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(
void)
237 __ASM
volatile (
"MRS %0, control_ns" :
"=r" (result) );
250 __ASM
volatile (
"MSR control, %0" : :
"r" (control) :
"memory");
254#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
260__STATIC_FORCEINLINE
void __TZ_set_CONTROL_NS(uint32_t control)
262 __ASM
volatile (
"MSR control_ns, %0" : :
"r" (control) :
"memory");
276 __ASM
volatile (
"MRS %0, ipsr" :
"=r" (result) );
290 __ASM
volatile (
"MRS %0, apsr" :
"=r" (result) );
304 __ASM
volatile (
"MRS %0, xpsr" :
"=r" (result) );
318 __ASM
volatile (
"MRS %0, psp" :
"=r" (result) );
323#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
329__STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(
void)
333 __ASM
volatile (
"MRS %0, psp_ns" :
"=r" (result) );
344__STATIC_FORCEINLINE
void __set_PSP(uint32_t topOfProcStack)
346 __ASM
volatile (
"MSR psp, %0" : :
"r" (topOfProcStack) : );
350#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
356__STATIC_FORCEINLINE
void __TZ_set_PSP_NS(uint32_t topOfProcStack)
358 __ASM
volatile (
"MSR psp_ns, %0" : :
"r" (topOfProcStack) : );
372 __ASM
volatile (
"MRS %0, msp" :
"=r" (result) );
377#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
383__STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(
void)
387 __ASM
volatile (
"MRS %0, msp_ns" :
"=r" (result) );
398__STATIC_FORCEINLINE
void __set_MSP(uint32_t topOfMainStack)
400 __ASM
volatile (
"MSR msp, %0" : :
"r" (topOfMainStack) : );
404#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
410__STATIC_FORCEINLINE
void __TZ_set_MSP_NS(uint32_t topOfMainStack)
412 __ASM
volatile (
"MSR msp_ns, %0" : :
"r" (topOfMainStack) : );
417#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
423__STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(
void)
427 __ASM
volatile (
"MRS %0, sp_ns" :
"=r" (result) );
437__STATIC_FORCEINLINE
void __TZ_set_SP_NS(uint32_t topOfStack)
439 __ASM
volatile (
"MSR sp_ns, %0" : :
"r" (topOfStack) : );
453 __ASM
volatile (
"MRS %0, primask" :
"=r" (result) ::
"memory");
458#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
464__STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(
void)
468 __ASM
volatile (
"MRS %0, primask_ns" :
"=r" (result) ::
"memory");
481 __ASM
volatile (
"MSR primask, %0" : :
"r" (priMask) :
"memory");
485#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
491__STATIC_FORCEINLINE
void __TZ_set_PRIMASK_NS(uint32_t priMask)
493 __ASM
volatile (
"MSR primask_ns, %0" : :
"r" (priMask) :
"memory");
498#if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
499 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
500 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
506__STATIC_FORCEINLINE
void __enable_fault_irq(
void)
508 __ASM
volatile (
"cpsie f" : : :
"memory");
517__STATIC_FORCEINLINE
void __disable_fault_irq(
void)
519 __ASM
volatile (
"cpsid f" : : :
"memory");
528__STATIC_FORCEINLINE uint32_t __get_BASEPRI(
void)
532 __ASM
volatile (
"MRS %0, basepri" :
"=r" (result) );
537#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
543__STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(
void)
547 __ASM
volatile (
"MRS %0, basepri_ns" :
"=r" (result) );
558__STATIC_FORCEINLINE
void __set_BASEPRI(uint32_t basePri)
560 __ASM
volatile (
"MSR basepri, %0" : :
"r" (basePri) :
"memory");
564#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
570__STATIC_FORCEINLINE
void __TZ_set_BASEPRI_NS(uint32_t basePri)
572 __ASM
volatile (
"MSR basepri_ns, %0" : :
"r" (basePri) :
"memory");
583__STATIC_FORCEINLINE
void __set_BASEPRI_MAX(uint32_t basePri)
585 __ASM
volatile (
"MSR basepri_max, %0" : :
"r" (basePri) :
"memory");
594__STATIC_FORCEINLINE uint32_t __get_FAULTMASK(
void)
598 __ASM
volatile (
"MRS %0, faultmask" :
"=r" (result) );
603#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
609__STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(
void)
613 __ASM
volatile (
"MRS %0, faultmask_ns" :
"=r" (result) );
624__STATIC_FORCEINLINE
void __set_FAULTMASK(uint32_t faultMask)
626 __ASM
volatile (
"MSR faultmask, %0" : :
"r" (faultMask) :
"memory");
630#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
636__STATIC_FORCEINLINE
void __TZ_set_FAULTMASK_NS(uint32_t faultMask)
638 __ASM
volatile (
"MSR faultmask_ns, %0" : :
"r" (faultMask) :
"memory");
647#if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
648 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
659__STATIC_FORCEINLINE uint32_t __get_PSPLIM(
void)
661#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
662 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
667 __ASM
volatile (
"MRS %0, psplim" :
"=r" (result) );
672#if (defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3))
681__STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(
void)
683#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
688 __ASM
volatile (
"MRS %0, psplim_ns" :
"=r" (result) );
704__STATIC_FORCEINLINE
void __set_PSPLIM(uint32_t ProcStackPtrLimit)
706#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
707 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
709 (void)ProcStackPtrLimit;
711 __ASM
volatile (
"MSR psplim, %0" : :
"r" (ProcStackPtrLimit));
716#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
725__STATIC_FORCEINLINE
void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)
727#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
729 (void)ProcStackPtrLimit;
731 __ASM
volatile (
"MSR psplim_ns, %0\n" : :
"r" (ProcStackPtrLimit));
746__STATIC_FORCEINLINE uint32_t __get_MSPLIM(
void)
748#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
749 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
754 __ASM
volatile (
"MRS %0, msplim" :
"=r" (result) );
760#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
769__STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(
void)
771#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
776 __ASM
volatile (
"MRS %0, msplim_ns" :
"=r" (result) );
792__STATIC_FORCEINLINE
void __set_MSPLIM(uint32_t MainStackPtrLimit)
794#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
795 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
797 (void)MainStackPtrLimit;
799 __ASM
volatile (
"MSR msplim, %0" : :
"r" (MainStackPtrLimit));
804#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
813__STATIC_FORCEINLINE
void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)
815#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
817 (void)MainStackPtrLimit;
819 __ASM
volatile (
"MSR msplim_ns, %0" : :
"r" (MainStackPtrLimit));
835#if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
836 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
837#if __has_builtin(__builtin_arm_get_fpscr)
841 return __builtin_arm_get_fpscr();
845 __ASM
volatile (
"VMRS %0, fpscr" :
"=r" (result) );
861#if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
862 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
863#if __has_builtin(__builtin_arm_set_fpscr)
867 __builtin_arm_set_fpscr(fpscr);
869 __ASM
volatile (
"VMSR fpscr, %0" : :
"r" (fpscr) :
"vfpcc",
"memory");
889#if defined (__thumb__) && !defined (__thumb2__)
890#define __CMSIS_GCC_OUT_REG(r) "=l" (r)
891#define __CMSIS_GCC_RW_REG(r) "+l" (r)
892#define __CMSIS_GCC_USE_REG(r) "l" (r)
894#define __CMSIS_GCC_OUT_REG(r) "=r" (r)
895#define __CMSIS_GCC_RW_REG(r) "+r" (r)
896#define __CMSIS_GCC_USE_REG(r) "r" (r)
903#define __NOP() __ASM volatile ("nop")
909#define __WFI() __ASM volatile ("wfi")
917#define __WFE() __ASM volatile ("wfe")
924#define __SEV() __ASM volatile ("sev")
933__STATIC_FORCEINLINE
void __ISB(
void)
935 __ASM
volatile (
"isb 0xF":::
"memory");
944__STATIC_FORCEINLINE
void __DSB(
void)
946 __ASM
volatile (
"dsb 0xF":::
"memory");
955__STATIC_FORCEINLINE
void __DMB(
void)
957 __ASM
volatile (
"dmb 0xF":::
"memory");
967__STATIC_FORCEINLINE uint32_t
__REV(uint32_t value)
969#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)
970 return __builtin_bswap32(value);
974 __ASM
volatile (
"rev %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
986__STATIC_FORCEINLINE uint32_t
__REV16(uint32_t value)
990 __ASM
volatile (
"rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
1001__STATIC_FORCEINLINE int16_t
__REVSH(int16_t value)
1003#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
1004 return (int16_t)__builtin_bswap16(value);
1008 __ASM
volatile (
"revsh %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
1021__STATIC_FORCEINLINE uint32_t
__ROR(uint32_t op1, uint32_t op2)
1028 return (op1 >> op2) | (op1 << (32U - op2));
1039#define __BKPT(value) __ASM volatile ("bkpt "#value)
1048__STATIC_FORCEINLINE uint32_t
__RBIT(uint32_t value)
1052#if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1053 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1054 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
1055 __ASM
volatile (
"rbit %0, %1" :
"=r" (result) :
"r" (value) );
1057 uint32_t s = (4U * 8U) - 1U;
1060 for (value >>= 1U; value != 0U; value >>= 1U)
1063 result |= value & 1U;
1078__STATIC_FORCEINLINE uint8_t
__CLZ(uint32_t value)
1093 return __builtin_clz(value);
1097#if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1098 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1099 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1100 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
1107__STATIC_FORCEINLINE uint8_t __LDREXB(
volatile uint8_t *addr)
1111#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
1112 __ASM
volatile (
"ldrexb %0, %1" :
"=r" (result) :
"Q" (*addr) );
1117 __ASM
volatile (
"ldrexb %0, [%1]" :
"=r" (result) :
"r" (addr) :
"memory" );
1119 return ((uint8_t) result);
1129__STATIC_FORCEINLINE uint16_t __LDREXH(
volatile uint16_t *addr)
1133#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
1134 __ASM
volatile (
"ldrexh %0, %1" :
"=r" (result) :
"Q" (*addr) );
1139 __ASM
volatile (
"ldrexh %0, [%1]" :
"=r" (result) :
"r" (addr) :
"memory" );
1141 return ((uint16_t) result);
1151__STATIC_FORCEINLINE uint32_t __LDREXW(
volatile uint32_t *addr)
1155 __ASM
volatile (
"ldrex %0, %1" :
"=r" (result) :
"Q" (*addr) );
1168__STATIC_FORCEINLINE uint32_t __STREXB(uint8_t value,
volatile uint8_t *addr)
1172 __ASM
volatile (
"strexb %0, %2, %1" :
"=&r" (result),
"=Q" (*addr) :
"r" ((uint32_t)value) );
1185__STATIC_FORCEINLINE uint32_t __STREXH(uint16_t value,
volatile uint16_t *addr)
1189 __ASM
volatile (
"strexh %0, %2, %1" :
"=&r" (result),
"=Q" (*addr) :
"r" ((uint32_t)value) );
1202__STATIC_FORCEINLINE uint32_t __STREXW(uint32_t value,
volatile uint32_t *addr)
1206 __ASM
volatile (
"strex %0, %2, %1" :
"=&r" (result),
"=Q" (*addr) :
"r" (value) );
1215__STATIC_FORCEINLINE
void __CLREX(
void)
1217 __ASM
volatile (
"clrex" :::
"memory");
1226#if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1227 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1228 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
1236#define __SSAT(ARG1,ARG2) \
1239 int32_t __RES, __ARG1 = (ARG1); \
1240 __ASM ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1252#define __USAT(ARG1,ARG2) \
1255 uint32_t __RES, __ARG1 = (ARG1); \
1256 __ASM ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1268__STATIC_FORCEINLINE uint32_t __RRX(uint32_t value)
1272 __ASM
volatile (
"rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
1283__STATIC_FORCEINLINE uint8_t __LDRBT(
volatile uint8_t *ptr)
1287#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
1288 __ASM
volatile (
"ldrbt %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1293 __ASM
volatile (
"ldrbt %0, [%1]" :
"=r" (result) :
"r" (ptr) :
"memory" );
1295 return ((uint8_t) result);
1305__STATIC_FORCEINLINE uint16_t __LDRHT(
volatile uint16_t *ptr)
1309#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
1310 __ASM
volatile (
"ldrht %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1315 __ASM
volatile (
"ldrht %0, [%1]" :
"=r" (result) :
"r" (ptr) :
"memory" );
1317 return ((uint16_t) result);
1327__STATIC_FORCEINLINE uint32_t __LDRT(
volatile uint32_t *ptr)
1331 __ASM
volatile (
"ldrt %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1342__STATIC_FORCEINLINE
void __STRBT(uint8_t value,
volatile uint8_t *ptr)
1344 __ASM
volatile (
"strbt %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) );
1354__STATIC_FORCEINLINE
void __STRHT(uint16_t value,
volatile uint16_t *ptr)
1356 __ASM
volatile (
"strht %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) );
1366__STATIC_FORCEINLINE
void __STRT(uint32_t value,
volatile uint32_t *ptr)
1368 __ASM
volatile (
"strt %1, %0" :
"=Q" (*ptr) :
"r" (value) );
1382__STATIC_FORCEINLINE int32_t
__SSAT(int32_t val, uint32_t sat)
1384 if ((sat >= 1U) && (sat <= 32U))
1386 const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
1387 const int32_t min = -1 - max ;
1407__STATIC_FORCEINLINE uint32_t
__USAT(int32_t val, uint32_t sat)
1411 const uint32_t max = ((1U << sat) - 1U);
1412 if (val > (int32_t)max)
1421 return (uint32_t)val;
1429#if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1430 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
1437__STATIC_FORCEINLINE uint8_t __LDAB(
volatile uint8_t *ptr)
1441 __ASM
volatile (
"ldab %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1442 return ((uint8_t) result);
1452__STATIC_FORCEINLINE uint16_t __LDAH(
volatile uint16_t *ptr)
1456 __ASM
volatile (
"ldah %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1457 return ((uint16_t) result);
1467__STATIC_FORCEINLINE uint32_t __LDA(
volatile uint32_t *ptr)
1471 __ASM
volatile (
"lda %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1482__STATIC_FORCEINLINE
void __STLB(uint8_t value,
volatile uint8_t *ptr)
1484 __ASM
volatile (
"stlb %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) );
1494__STATIC_FORCEINLINE
void __STLH(uint16_t value,
volatile uint16_t *ptr)
1496 __ASM
volatile (
"stlh %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) );
1506__STATIC_FORCEINLINE
void __STL(uint32_t value,
volatile uint32_t *ptr)
1508 __ASM
volatile (
"stl %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) );
1518__STATIC_FORCEINLINE uint8_t __LDAEXB(
volatile uint8_t *ptr)
1522 __ASM
volatile (
"ldaexb %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1523 return ((uint8_t) result);
1533__STATIC_FORCEINLINE uint16_t __LDAEXH(
volatile uint16_t *ptr)
1537 __ASM
volatile (
"ldaexh %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1538 return ((uint16_t) result);
1548__STATIC_FORCEINLINE uint32_t __LDAEX(
volatile uint32_t *ptr)
1552 __ASM
volatile (
"ldaex %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1565__STATIC_FORCEINLINE uint32_t __STLEXB(uint8_t value,
volatile uint8_t *ptr)
1569 __ASM
volatile (
"stlexb %0, %2, %1" :
"=&r" (result),
"=Q" (*ptr) :
"r" ((uint32_t)value) );
1582__STATIC_FORCEINLINE uint32_t __STLEXH(uint16_t value,
volatile uint16_t *ptr)
1586 __ASM
volatile (
"stlexh %0, %2, %1" :
"=&r" (result),
"=Q" (*ptr) :
"r" ((uint32_t)value) );
1599__STATIC_FORCEINLINE uint32_t __STLEX(uint32_t value,
volatile uint32_t *ptr)
1603 __ASM
volatile (
"stlex %0, %2, %1" :
"=&r" (result),
"=Q" (*ptr) :
"r" ((uint32_t)value) );
1619#if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
1621__STATIC_FORCEINLINE uint32_t __SADD8(uint32_t op1, uint32_t op2)
1625 __ASM
volatile (
"sadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1629__STATIC_FORCEINLINE uint32_t __QADD8(uint32_t op1, uint32_t op2)
1633 __ASM
volatile (
"qadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1637__STATIC_FORCEINLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2)
1641 __ASM
volatile (
"shadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1645__STATIC_FORCEINLINE uint32_t __UADD8(uint32_t op1, uint32_t op2)
1649 __ASM
volatile (
"uadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1653__STATIC_FORCEINLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2)
1657 __ASM
volatile (
"uqadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1661__STATIC_FORCEINLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2)
1665 __ASM
volatile (
"uhadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1670__STATIC_FORCEINLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2)
1674 __ASM
volatile (
"ssub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1678__STATIC_FORCEINLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2)
1682 __ASM
volatile (
"qsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1686__STATIC_FORCEINLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2)
1690 __ASM
volatile (
"shsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1694__STATIC_FORCEINLINE uint32_t __USUB8(uint32_t op1, uint32_t op2)
1698 __ASM
volatile (
"usub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1702__STATIC_FORCEINLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2)
1706 __ASM
volatile (
"uqsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1710__STATIC_FORCEINLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2)
1714 __ASM
volatile (
"uhsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1719__STATIC_FORCEINLINE uint32_t __SADD16(uint32_t op1, uint32_t op2)
1723 __ASM
volatile (
"sadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1727__STATIC_FORCEINLINE uint32_t __QADD16(uint32_t op1, uint32_t op2)
1731 __ASM
volatile (
"qadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1735__STATIC_FORCEINLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2)
1739 __ASM
volatile (
"shadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1743__STATIC_FORCEINLINE uint32_t __UADD16(uint32_t op1, uint32_t op2)
1747 __ASM
volatile (
"uadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1751__STATIC_FORCEINLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2)
1755 __ASM
volatile (
"uqadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1759__STATIC_FORCEINLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2)
1763 __ASM
volatile (
"uhadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1767__STATIC_FORCEINLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2)
1771 __ASM
volatile (
"ssub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1775__STATIC_FORCEINLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2)
1779 __ASM
volatile (
"qsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1783__STATIC_FORCEINLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2)
1787 __ASM
volatile (
"shsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1791__STATIC_FORCEINLINE uint32_t __USUB16(uint32_t op1, uint32_t op2)
1795 __ASM
volatile (
"usub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1799__STATIC_FORCEINLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2)
1803 __ASM
volatile (
"uqsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1807__STATIC_FORCEINLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2)
1811 __ASM
volatile (
"uhsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1815__STATIC_FORCEINLINE uint32_t __SASX(uint32_t op1, uint32_t op2)
1819 __ASM
volatile (
"sasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1823__STATIC_FORCEINLINE uint32_t __QASX(uint32_t op1, uint32_t op2)
1827 __ASM
volatile (
"qasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1831__STATIC_FORCEINLINE uint32_t __SHASX(uint32_t op1, uint32_t op2)
1835 __ASM
volatile (
"shasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1839__STATIC_FORCEINLINE uint32_t __UASX(uint32_t op1, uint32_t op2)
1843 __ASM
volatile (
"uasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1847__STATIC_FORCEINLINE uint32_t __UQASX(uint32_t op1, uint32_t op2)
1851 __ASM
volatile (
"uqasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1855__STATIC_FORCEINLINE uint32_t __UHASX(uint32_t op1, uint32_t op2)
1859 __ASM
volatile (
"uhasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1863__STATIC_FORCEINLINE uint32_t __SSAX(uint32_t op1, uint32_t op2)
1867 __ASM
volatile (
"ssax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1871__STATIC_FORCEINLINE uint32_t __QSAX(uint32_t op1, uint32_t op2)
1875 __ASM
volatile (
"qsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1879__STATIC_FORCEINLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2)
1883 __ASM
volatile (
"shsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1887__STATIC_FORCEINLINE uint32_t __USAX(uint32_t op1, uint32_t op2)
1891 __ASM
volatile (
"usax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1895__STATIC_FORCEINLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2)
1899 __ASM
volatile (
"uqsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1903__STATIC_FORCEINLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2)
1907 __ASM
volatile (
"uhsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1911__STATIC_FORCEINLINE uint32_t __USAD8(uint32_t op1, uint32_t op2)
1915 __ASM
volatile (
"usad8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1919__STATIC_FORCEINLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3)
1923 __ASM
volatile (
"usada8 %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
1927#define __SSAT16(ARG1,ARG2) \
1929 int32_t __RES, __ARG1 = (ARG1); \
1930 __ASM ("ssat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1934#define __USAT16(ARG1,ARG2) \
1936 uint32_t __RES, __ARG1 = (ARG1); \
1937 __ASM ("usat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1941__STATIC_FORCEINLINE uint32_t __UXTB16(uint32_t op1)
1945 __ASM
volatile (
"uxtb16 %0, %1" :
"=r" (result) :
"r" (op1));
1949__STATIC_FORCEINLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2)
1953 __ASM
volatile (
"uxtab16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1957__STATIC_FORCEINLINE uint32_t __SXTB16(uint32_t op1)
1961 __ASM
volatile (
"sxtb16 %0, %1" :
"=r" (result) :
"r" (op1));
1965__STATIC_FORCEINLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2)
1969 __ASM
volatile (
"sxtab16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1973__STATIC_FORCEINLINE uint32_t __SMUAD (uint32_t op1, uint32_t op2)
1977 __ASM
volatile (
"smuad %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1981__STATIC_FORCEINLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2)
1985 __ASM
volatile (
"smuadx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1989__STATIC_FORCEINLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3)
1993 __ASM
volatile (
"smlad %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
1997__STATIC_FORCEINLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3)
2001 __ASM
volatile (
"smladx %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
2005__STATIC_FORCEINLINE uint64_t __SMLALD (uint32_t op1, uint32_t op2, uint64_t acc)
2014 __ASM
volatile (
"smlald %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
2016 __ASM
volatile (
"smlald %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
2022__STATIC_FORCEINLINE uint64_t __SMLALDX (uint32_t op1, uint32_t op2, uint64_t acc)
2031 __ASM
volatile (
"smlaldx %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
2033 __ASM
volatile (
"smlaldx %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
2039__STATIC_FORCEINLINE uint32_t __SMUSD (uint32_t op1, uint32_t op2)
2043 __ASM
volatile (
"smusd %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
2047__STATIC_FORCEINLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2)
2051 __ASM
volatile (
"smusdx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
2055__STATIC_FORCEINLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3)
2059 __ASM
volatile (
"smlsd %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
2063__STATIC_FORCEINLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3)
2067 __ASM
volatile (
"smlsdx %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
2071__STATIC_FORCEINLINE uint64_t __SMLSLD (uint32_t op1, uint32_t op2, uint64_t acc)
2080 __ASM
volatile (
"smlsld %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
2082 __ASM
volatile (
"smlsld %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
2088__STATIC_FORCEINLINE uint64_t __SMLSLDX (uint32_t op1, uint32_t op2, uint64_t acc)
2097 __ASM
volatile (
"smlsldx %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
2099 __ASM
volatile (
"smlsldx %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
2105__STATIC_FORCEINLINE uint32_t __SEL (uint32_t op1, uint32_t op2)
2109 __ASM
volatile (
"sel %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
2113__STATIC_FORCEINLINE int32_t __QADD( int32_t op1, int32_t op2)
2117 __ASM
volatile (
"qadd %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
2121__STATIC_FORCEINLINE int32_t __QSUB( int32_t op1, int32_t op2)
2125 __ASM
volatile (
"qsub %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
2130#define __PKHBT(ARG1,ARG2,ARG3) \
2132 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
2133 __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
2137#define __PKHTB(ARG1,ARG2,ARG3) \
2139 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
2141 __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2) ); \
2143 __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
2148#define __PKHBT(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0x0000FFFFUL) | \
2149 ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL) )
2151#define __PKHTB(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0xFFFF0000UL) | \
2152 ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL) )
2154__STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
2158 __ASM
volatile (
"smmla %0, %1, %2, %3" :
"=r" (result):
"r" (op1),
"r" (op2),
"r" (op3) );
2166#pragma GCC diagnostic pop
__STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
Signed Saturate.
Definition cmsis_gcc.h:1382
__STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
Unsigned Saturate.
Definition cmsis_gcc.h:1407
__STATIC_FORCEINLINE void __DSB(void)
Data Synchronization Barrier.
Definition cmsis_gcc.h:944
__STATIC_FORCEINLINE uint32_t __REV16(uint32_t value)
Reverse byte order (16 bit)
Definition cmsis_gcc.h:986
__STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
Rotate Right in unsigned value (32 bit)
Definition cmsis_gcc.h:1021
__STATIC_FORCEINLINE void __DMB(void)
Data Memory Barrier.
Definition cmsis_gcc.h:955
__STATIC_FORCEINLINE int16_t __REVSH(int16_t value)
Reverse byte order (16 bit)
Definition cmsis_gcc.h:1001
__STATIC_FORCEINLINE uint32_t __REV(uint32_t value)
Reverse byte order (32 bit)
Definition cmsis_gcc.h:967
__STATIC_FORCEINLINE void __ISB(void)
Instruction Synchronization Barrier.
Definition cmsis_gcc.h:933
__STATIC_FORCEINLINE uint8_t __CLZ(uint32_t value)
Count leading zeros.
Definition cmsis_gcc.h:1078
__STATIC_FORCEINLINE uint32_t __RBIT(uint32_t value)
Reverse bit order of value.
Definition cmsis_gcc.h:1048
__STATIC_FORCEINLINE void __set_CONTROL(uint32_t control)
Set Control Register.
Definition cmsis_gcc.h:248
__STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack)
Set Main Stack Pointer.
Definition cmsis_gcc.h:398
__STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack)
Set Process Stack Pointer.
Definition cmsis_gcc.h:344
__STATIC_FORCEINLINE void __disable_irq(void)
Disable IRQ Interrupts.
Definition cmsis_gcc.h:207
__STATIC_FORCEINLINE uint32_t __get_FPSCR(void)
Get FPSCR.
Definition cmsis_gcc.h:833
__STATIC_FORCEINLINE uint32_t __get_PRIMASK(void)
Get Priority Mask.
Definition cmsis_gcc.h:449
__STATIC_FORCEINLINE uint32_t __get_xPSR(void)
Get xPSR Register.
Definition cmsis_gcc.h:300
__STATIC_FORCEINLINE void __set_FPSCR(uint32_t fpscr)
Set FPSCR.
Definition cmsis_gcc.h:859
__STATIC_FORCEINLINE uint32_t __get_MSP(void)
Get Main Stack Pointer.
Definition cmsis_gcc.h:368
__STATIC_FORCEINLINE uint32_t __get_CONTROL(void)
Get Control Register.
Definition cmsis_gcc.h:218
__STATIC_FORCEINLINE uint32_t __get_PSP(void)
Get Process Stack Pointer.
Definition cmsis_gcc.h:314
__STATIC_FORCEINLINE uint32_t __get_APSR(void)
Get APSR Register.
Definition cmsis_gcc.h:286
__STATIC_FORCEINLINE void __enable_irq(void)
Enable IRQ Interrupts.
Definition cmsis_gcc.h:196
__STATIC_FORCEINLINE uint32_t __get_IPSR(void)
Get IPSR Register.
Definition cmsis_gcc.h:272
__STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask)
Set Priority Mask.
Definition cmsis_gcc.h:479
__STATIC_FORCEINLINE __NO_RETURN void __cmsis_start(void)
Initializes data and bss sections.
Definition cmsis_gcc.h:131
Definition cmsis_gcc.h:74