24 #ifndef __CORE_CMINSTR_H 25 #define __CORE_CMINSTR_H 35 #if defined ( __CC_ARM ) 38 #if (__ARMCC_VERSION < 400677) 39 #error "Please use ARM Compiler Toolchain V4.0.677 or later!" 79 #define __ISB() __isb(0xF) 87 #define __DSB() __dsb(0xF) 95 #define __DMB() __dmb(0xF) 147 #if (__CORTEX_M >= 0x03) 156 #define __RBIT __rbit 166 #define __LDREXB(ptr) ((uint8_t ) __ldrex(ptr)) 176 #define __LDREXH(ptr) ((uint16_t) __ldrex(ptr)) 186 #define __LDREXW(ptr) ((uint32_t ) __ldrex(ptr)) 198 #define __STREXB(value, ptr) __strex(value, ptr) 210 #define __STREXH(value, ptr) __strex(value, ptr) 222 #define __STREXW(value, ptr) __strex(value, ptr) 230 #define __CLREX __clrex 241 #define __SSAT __ssat 252 #define __USAT __usat 268 #elif defined ( __ICCARM__ ) 271 #include <cmsis_iar.h> 274 #elif defined ( __TMS470__ ) 277 #include <cmsis_ccs.h> 280 #elif defined ( __GNUC__ ) 289 __ASM volatile (
"nop");
300 __ASM volatile (
"wfi");
311 __ASM volatile (
"wfe");
321 __ASM volatile (
"sev");
333 __ASM volatile (
"isb");
344 __ASM volatile (
"dsb");
355 __ASM volatile (
"dmb");
370 __ASM volatile (
"rev %0, %1" :
"=r" (result) :
"r" (value) );
386 __ASM volatile (
"rev16 %0, %1" :
"=r" (result) :
"r" (value) );
402 __ASM volatile (
"revsh %0, %1" :
"=r" (result) :
"r" (value) );
418 __ASM volatile (
"ror %0, %0, %1" :
"+r" (op1) :
"r" (op2) );
423 #if (__CORTEX_M >= 0x03) 432 __attribute__( ( always_inline ) )
__STATIC_INLINE uint32_t __RBIT(uint32_t value)
436 __ASM volatile (
"rbit %0, %1" :
"=r" (result) :
"r" (value) );
448 __attribute__( ( always_inline ) )
__STATIC_INLINE uint8_t __LDREXB(
volatile uint8_t *addr)
452 __ASM volatile (
"ldrexb %0, [%1]" :
"=r" (result) :
"r" (addr) );
464 __attribute__( ( always_inline ) )
__STATIC_INLINE uint16_t __LDREXH(
volatile uint16_t *addr)
468 __ASM volatile (
"ldrexh %0, [%1]" :
"=r" (result) :
"r" (addr) );
480 __attribute__( ( always_inline ) )
__STATIC_INLINE uint32_t __LDREXW(
volatile uint32_t *addr)
484 __ASM volatile (
"ldrex %0, [%1]" :
"=r" (result) :
"r" (addr) );
498 __attribute__( ( always_inline ) )
__STATIC_INLINE uint32_t __STREXB(uint8_t value,
volatile uint8_t *addr)
502 __ASM volatile (
"strexb %0, %2, [%1]" :
"=&r" (result) :
"r" (addr),
"r" (value) );
516 __attribute__( ( always_inline ) )
__STATIC_INLINE uint32_t __STREXH(uint16_t value,
volatile uint16_t *addr)
520 __ASM volatile (
"strexh %0, %2, [%1]" :
"=&r" (result) :
"r" (addr),
"r" (value) );
534 __attribute__( ( always_inline ) )
__STATIC_INLINE uint32_t __STREXW(uint32_t value,
volatile uint32_t *addr)
538 __ASM volatile (
"strex %0, %2, [%1]" :
"=&r" (result) :
"r" (addr),
"r" (value) );
550 __ASM volatile (
"clrex");
562 #define __SSAT(ARG1,ARG2) \ 564 uint32_t __RES, __ARG1 = (ARG1); \ 565 __ASM ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \ 578 #define __USAT(ARG1,ARG2) \ 580 uint32_t __RES, __ARG1 = (ARG1); \ 581 __ASM ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \ 593 __attribute__( ( always_inline ) )
__STATIC_INLINE uint8_t __CLZ(uint32_t value)
597 __ASM volatile (
"clz %0, %1" :
"=r" (result) :
"r" (value) );
606 #elif defined ( __TASKING__ ) __STATIC_INLINE void __DSB(void)
Data Synchronization Barrier.
Definition: core_cmInstr.h:342
#define __STATIC_INLINE
Definition: core_cm0plus.h:78
__STATIC_INLINE void __SEV(void)
Send Event.
Definition: core_cmInstr.h:319
__STATIC_INLINE void __WFI(void)
Wait For Interrupt.
Definition: core_cmInstr.h:298
#define __ASM
Definition: core_cm0plus.h:76
__STATIC_INLINE void __DMB(void)
Data Memory Barrier.
Definition: core_cmInstr.h:353
__STATIC_INLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
Rotate Right in unsigned value (32 bit)
Definition: core_cmInstr.h:415
__STATIC_INLINE void __WFE(void)
Wait For Event.
Definition: core_cmInstr.h:309
__STATIC_INLINE void __NOP(void)
No Operation.
Definition: core_cmInstr.h:287
__STATIC_INLINE int32_t __REVSH(int32_t value)
Reverse byte order in signed short value.
Definition: core_cmInstr.h:398
__STATIC_INLINE uint32_t __REV16(uint32_t value)
Reverse byte order (16 bit)
Definition: core_cmInstr.h:382
__STATIC_INLINE uint32_t __REV(uint32_t value)
Reverse byte order (32 bit)
Definition: core_cmInstr.h:366
__STATIC_INLINE void __ISB(void)
Instruction Synchronization Barrier.
Definition: core_cmInstr.h:331