Advertisement
Guest User

core_cmInstr.h

a guest
Jan 6th, 2018
420
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
C 15.60 KB | None | 0 0
  1. /**************************************************************************//**
  2.  * @file     core_cmInstr.h
  3.  * @brief    CMSIS Cortex-M Core Instruction Access Header File
  4.  * @version  V2.10
  5.  * @date     19. July 2011
  6.  *
  7.  * @note
  8.  * Copyright (C) 2009-2011 ARM Limited. All rights reserved.
  9.  *
  10.  * @par
  11.  * ARM Limited (ARM) is supplying this software for use with Cortex-M
  12.  * processor based microcontrollers.  This file can be freely distributed
  13.  * within development tools that are supporting such ARM based processors.
  14.  *
  15.  * @par
  16.  * THIS SOFTWARE IS PROVIDED "AS IS".  NO WARRANTIES, WHETHER EXPRESS, IMPLIED
  17.  * OR STATUTORY, INCLUDING, BUT NOT LIMITED TO, IMPLIED WARRANTIES OF
  18.  * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE APPLY TO THIS SOFTWARE.
  19.  * ARM SHALL NOT, IN ANY CIRCUMSTANCES, BE LIABLE FOR SPECIAL, INCIDENTAL, OR
  20.  * CONSEQUENTIAL DAMAGES, FOR ANY REASON WHATSOEVER.
  21.  *
  22.  ******************************************************************************/
  23.  
  24. #ifndef __CORE_CMINSTR_H
  25. #define __CORE_CMINSTR_H
  26.  
  27.  
  28. /* ##########################  Core Instruction Access  ######################### */
  29. /** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface
  30.   Access to dedicated instructions
  31.   @{
  32. */
  33.  
  34. #if   defined ( __CC_ARM ) /*------------------RealView Compiler -----------------*/
  35. /* ARM armcc specific functions */
  36.  
  37. #if (__ARMCC_VERSION < 400677)
  38.   #error "Please use ARM Compiler Toolchain V4.0.677 or later!"
  39. #endif
  40.  
  41.  
  42. /** \brief  No Operation
  43.     No Operation does nothing. This instruction can be used for code alignment purposes.
  44.  */
  45. #define __NOP                             __nop
  46.  
  47.  
  48. /** \brief  Wait For Interrupt
  49.     Wait For Interrupt is a hint instruction that suspends execution
  50.     until one of a number of events occurs.
  51.  */
  52. #define __WFI                             __wfi
  53.  
  54.  
  55. /** \brief  Wait For Event
  56.     Wait For Event is a hint instruction that permits the processor to enter
  57.     a low-power state until one of a number of events occurs.
  58.  */
  59. #define __WFE                             __wfe
  60.  
  61.  
  62. /** \brief  Send Event
  63.     Send Event is a hint instruction. It causes an event to be signaled to the CPU.
  64.  */
  65. #define __SEV                             __sev
  66.  
  67.  
  68. /** \brief  Instruction Synchronization Barrier
  69.     Instruction Synchronization Barrier flushes the pipeline in the processor,
  70.     so that all instructions following the ISB are fetched from cache or
  71.     memory, after the instruction has been completed.
  72.  */
  73. #define __ISB()                           __isb(0xF)
  74.  
  75.  
  76. /** \brief  Data Synchronization Barrier
  77.     This function acts as a special kind of Data Memory Barrier.
  78.     It completes when all explicit memory accesses before this instruction complete.
  79.  */
  80. #define __DSB()                           __dsb(0xF)
  81.  
  82.  
  83. /** \brief  Data Memory Barrier
  84.     This function ensures the apparent order of the explicit memory operations before
  85.     and after the instruction, without ensuring their completion.
  86.  */
  87. #define __DMB()                           __dmb(0xF)
  88.  
  89.  
  90. /** \brief  Reverse byte order (32 bit)
  91.     This function reverses the byte order in integer value.
  92.     \param [in]    value  Value to reverse
  93.     \return               Reversed value
  94.  */
  95. #define __REV                             __rev
  96.  
  97.  
  98. /** \brief  Reverse byte order (16 bit)
  99.     This function reverses the byte order in two unsigned short values.
  100.     \param [in]    value  Value to reverse
  101.     \return               Reversed value
  102.  */
  103. static __INLINE __ASM uint32_t __REV16(uint32_t value)
  104. {
  105.   rev16 r0, r0
  106.   bx lr
  107. }
  108.  
  109.  
  110. /** \brief  Reverse byte order in signed short value
  111.     This function reverses the byte order in a signed short value with sign extension to integer.
  112.     \param [in]    value  Value to reverse
  113.     \return               Reversed value
  114.  */
  115. static __INLINE __ASM int32_t __REVSH(int32_t value)
  116. {
  117.   revsh r0, r0
  118.   bx lr
  119. }
  120.  
  121.  
  122. #if       (__CORTEX_M >= 0x03)
  123.  
  124. /** \brief  Reverse bit order of value
  125.     This function reverses the bit order of the given value.
  126.     \param [in]    value  Value to reverse
  127.     \return               Reversed value
  128.  */
  129. #define __RBIT                            __rbit
  130.  
  131.  
  132. /** \brief  LDR Exclusive (8 bit)
  133.     This function performs a exclusive LDR command for 8 bit value.
  134.     \param [in]    ptr  Pointer to data
  135.     \return             value of type uint8_t at (*ptr)
  136.  */
  137. #define __LDREXB(ptr)                     ((uint8_t ) __ldrex(ptr))
  138.  
  139.  
  140. /** \brief  LDR Exclusive (16 bit)
  141.     This function performs a exclusive LDR command for 16 bit values.
  142.     \param [in]    ptr  Pointer to data
  143.     \return        value of type uint16_t at (*ptr)
  144.  */
  145. #define __LDREXH(ptr)                     ((uint16_t) __ldrex(ptr))
  146.  
  147.  
  148. /** \brief  LDR Exclusive (32 bit)
  149.     This function performs a exclusive LDR command for 32 bit values.
  150.     \param [in]    ptr  Pointer to data
  151.     \return        value of type uint32_t at (*ptr)
  152.  */
  153. #define __LDREXW(ptr)                     ((uint32_t ) __ldrex(ptr))
  154.  
  155.  
  156. /** \brief  STR Exclusive (8 bit)
  157.     This function performs a exclusive STR command for 8 bit values.
  158.     \param [in]  value  Value to store
  159.     \param [in]    ptr  Pointer to location
  160.     \return          0  Function succeeded
  161.     \return          1  Function failed
  162.  */
  163. #define __STREXB(value, ptr)              __strex(value, ptr)
  164.  
  165.  
  166. /** \brief  STR Exclusive (16 bit)
  167.     This function performs a exclusive STR command for 16 bit values.
  168.     \param [in]  value  Value to store
  169.     \param [in]    ptr  Pointer to location
  170.     \return          0  Function succeeded
  171.     \return          1  Function failed
  172.  */
  173. #define __STREXH(value, ptr)              __strex(value, ptr)
  174.  
  175.  
  176. /** \brief  STR Exclusive (32 bit)
  177.     This function performs a exclusive STR command for 32 bit values.
  178.     \param [in]  value  Value to store
  179.     \param [in]    ptr  Pointer to location
  180.     \return          0  Function succeeded
  181.     \return          1  Function failed
  182.  */
  183. #define __STREXW(value, ptr)              __strex(value, ptr)
  184.  
  185.  
  186. /** \brief  Remove the exclusive lock
  187.     This function removes the exclusive lock which is created by LDREX.
  188.  */
  189. #define __CLREX                           __clrex
  190.  
  191.  
  192. /** \brief  Signed Saturate
  193.     This function saturates a signed value.
  194.     \param [in]  value  Value to be saturated
  195.     \param [in]    sat  Bit position to saturate to (1..32)
  196.     \return             Saturated value
  197.  */
  198. #define __SSAT                            __ssat
  199.  
  200.  
  201. /** \brief  Unsigned Saturate
  202.     This function saturates an unsigned value.
  203.     \param [in]  value  Value to be saturated
  204.     \param [in]    sat  Bit position to saturate to (0..31)
  205.     \return             Saturated value
  206.  */
  207. #define __USAT                            __usat
  208.  
  209.  
  210. /** \brief  Count leading zeros
  211.     This function counts the number of leading zeros of a data value.
  212.     \param [in]  value  Value to count the leading zeros
  213.     \return             number of leading zeros in value
  214.  */
  215. #define __CLZ                             __clz
  216.  
  217. #endif /* (__CORTEX_M >= 0x03) */
  218.  
  219.  
  220.  
  221. #elif defined ( __ICCARM__ ) /*------------------ ICC Compiler -------------------*/
  222. /* IAR iccarm specific functions */
  223.  
  224. #include <cmsis_iar.h>
  225.  
  226.  
  227. #elif defined ( __GNUC__ ) /*------------------ GNU Compiler ---------------------*/
  228. /* GNU gcc specific functions */
  229.  
  230. /** \brief  No Operation
  231.     No Operation does nothing. This instruction can be used for code alignment purposes.
  232.  */
  233. __attribute__( ( always_inline ) ) static __INLINE void __NOP(void)
  234. {
  235.   __ASM volatile ("nop");
  236. }
  237.  
  238.  
  239. /** \brief  Wait For Interrupt
  240.     Wait For Interrupt is a hint instruction that suspends execution
  241.     until one of a number of events occurs.
  242.  */
  243. __attribute__( ( always_inline ) ) static __INLINE void __WFI(void)
  244. {
  245.   __ASM volatile ("wfi");
  246. }
  247.  
  248.  
  249. /** \brief  Wait For Event
  250.     Wait For Event is a hint instruction that permits the processor to enter
  251.     a low-power state until one of a number of events occurs.
  252.  */
  253. __attribute__( ( always_inline ) ) static __INLINE void __WFE(void)
  254. {
  255.   __ASM volatile ("wfe");
  256. }
  257.  
  258.  
  259. /** \brief  Send Event
  260.     Send Event is a hint instruction. It causes an event to be signaled to the CPU.
  261.  */
  262. __attribute__( ( always_inline ) ) static __INLINE void __SEV(void)
  263. {
  264.   __ASM volatile ("sev");
  265. }
  266.  
  267.  
  268. /** \brief  Instruction Synchronization Barrier
  269.     Instruction Synchronization Barrier flushes the pipeline in the processor,
  270.     so that all instructions following the ISB are fetched from cache or
  271.     memory, after the instruction has been completed.
  272.  */
  273. __attribute__( ( always_inline ) ) static __INLINE void __ISB(void)
  274. {
  275.   __ASM volatile ("isb");
  276. }
  277.  
  278.  
  279. /** \brief  Data Synchronization Barrier
  280.     This function acts as a special kind of Data Memory Barrier.
  281.     It completes when all explicit memory accesses before this instruction complete.
  282.  */
  283. __attribute__( ( always_inline ) ) static __INLINE void __DSB(void)
  284. {
  285.   __ASM volatile ("dsb");
  286. }
  287.  
  288.  
  289. /** \brief  Data Memory Barrier
  290.     This function ensures the apparent order of the explicit memory operations before
  291.     and after the instruction, without ensuring their completion.
  292.  */
  293. __attribute__( ( always_inline ) ) static __INLINE void __DMB(void)
  294. {
  295.   __ASM volatile ("dmb");
  296. }
  297.  
  298.  
  299. /** \brief  Reverse byte order (32 bit)
  300.     This function reverses the byte order in integer value.
  301.     \param [in]    value  Value to reverse
  302.     \return               Reversed value
  303.  */
  304. __attribute__( ( always_inline ) ) static __INLINE uint32_t __REV(uint32_t value)
  305. {
  306.   uint32_t result;
  307.  
  308.   __ASM volatile ("rev %0, %1" : "=r" (result) : "r" (value) );
  309.   return(result);
  310. }
  311.  
  312.  
  313. /** \brief  Reverse byte order (16 bit)
  314.     This function reverses the byte order in two unsigned short values.
  315.     \param [in]    value  Value to reverse
  316.     \return               Reversed value
  317.  */
  318. __attribute__( ( always_inline ) ) static __INLINE uint32_t __REV16(uint32_t value)
  319. {
  320.   uint32_t result;
  321.  
  322.   __ASM volatile ("rev16 %0, %1" : "=r" (result) : "r" (value) );
  323.   return(result);
  324. }
  325.  
  326.  
  327. /** \brief  Reverse byte order in signed short value
  328.     This function reverses the byte order in a signed short value with sign extension to integer.
  329.     \param [in]    value  Value to reverse
  330.     \return               Reversed value
  331.  */
  332. __attribute__( ( always_inline ) ) static __INLINE int32_t __REVSH(int32_t value)
  333. {
  334.   uint32_t result;
  335.  
  336.   __ASM volatile ("revsh %0, %1" : "=r" (result) : "r" (value) );
  337.   return (int32_t)(result);
  338. }
  339.  
  340.  
  341. #if       (__CORTEX_M >= 0x03)
  342.  
  343. /** \brief  Reverse bit order of value
  344.     This function reverses the bit order of the given value.
  345.     \param [in]    value  Value to reverse
  346.     \return               Reversed value
  347.  */
  348. __attribute__( ( always_inline ) ) static __INLINE uint32_t __RBIT(uint32_t value)
  349. {
  350.   uint32_t result;
  351.  
  352.    __ASM volatile ("rbit %0, %1" : "=r" (result) : "r" (value) );
  353.    return(result);
  354. }
  355.  
  356.  
  357. /** \brief  LDR Exclusive (8 bit)
  358.     This function performs a exclusive LDR command for 8 bit value.
  359.     \param [in]    ptr  Pointer to data
  360.     \return             value of type uint8_t at (*ptr)
  361.  */
  362. __attribute__( ( always_inline ) ) static __INLINE uint8_t __LDREXB(volatile uint8_t *addr)
  363. {
  364.     uint8_t result;
  365.  
  366.    __ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) );
  367.    return(result);
  368. }
  369.  
  370.  
  371. /** \brief  LDR Exclusive (16 bit)
  372.     This function performs a exclusive LDR command for 16 bit values.
  373.     \param [in]    ptr  Pointer to data
  374.     \return        value of type uint16_t at (*ptr)
  375.  */
  376. __attribute__( ( always_inline ) ) static __INLINE uint16_t __LDREXH(volatile uint16_t *addr)
  377. {
  378.     uint16_t result;
  379.  
  380.    __ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) );
  381.    return(result);
  382. }
  383.  
  384.  
  385. /** \brief  LDR Exclusive (32 bit)
  386.     This function performs a exclusive LDR command for 32 bit values.
  387.     \param [in]    ptr  Pointer to data
  388.     \return        value of type uint32_t at (*ptr)
  389.  */
  390. __attribute__( ( always_inline ) ) static __INLINE uint32_t __LDREXW(volatile uint32_t *addr)
  391. {
  392.     uint32_t result;
  393.  
  394.    __ASM volatile ("ldrex %0, [%1]" : "=r" (result) : "r" (addr) );
  395.    return(result);
  396. }
  397.  
  398.  
  399. /** \brief  STR Exclusive (8 bit)
  400.     This function performs a exclusive STR command for 8 bit values.
  401.     \param [in]  value  Value to store
  402.     \param [in]    ptr  Pointer to location
  403.     \return          0  Function succeeded
  404.     \return          1  Function failed
  405.  */
  406. __attribute__( ( always_inline ) ) static __INLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr)
  407. {
  408.    uint32_t result;
  409.  
  410.    __ASM volatile ("strexb %0, %2, [%1]" : "=r" (result) : "r" (addr), "r" (value) );
  411.    return(result);
  412. }
  413.  
  414.  
  415. /** \brief  STR Exclusive (16 bit)
  416.     This function performs a exclusive STR command for 16 bit values.
  417.     \param [in]  value  Value to store
  418.     \param [in]    ptr  Pointer to location
  419.     \return          0  Function succeeded
  420.     \return          1  Function failed
  421.  */
  422. __attribute__( ( always_inline ) ) static __INLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr)
  423. {
  424.    uint32_t result;
  425.  
  426.    __ASM volatile ("strexh %0, %2, [%1]" : "=r" (result) : "r" (addr), "r" (value) );
  427.    return(result);
  428. }
  429.  
  430.  
  431. /** \brief  STR Exclusive (32 bit)
  432.     This function performs a exclusive STR command for 32 bit values.
  433.     \param [in]  value  Value to store
  434.     \param [in]    ptr  Pointer to location
  435.     \return          0  Function succeeded
  436.     \return          1  Function failed
  437.  */
  438. __attribute__( ( always_inline ) ) static __INLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr)
  439. {
  440.    uint32_t result;
  441.  
  442.    __ASM volatile ("strex %0, %2, [%1]" : "=r" (result) : "r" (addr), "r" (value) );
  443.    return(result);
  444. }
  445.  
  446.  
  447. /** \brief  Remove the exclusive lock
  448.     This function removes the exclusive lock which is created by LDREX.
  449.  */
  450. __attribute__( ( always_inline ) ) static __INLINE void __CLREX(void)
  451. {
  452.   __ASM volatile ("clrex");
  453. }
  454.  
  455.  
  456. /** \brief  Signed Saturate
  457.     This function saturates a signed value.
  458.     \param [in]  value  Value to be saturated
  459.     \param [in]    sat  Bit position to saturate to (1..32)
  460.     \return             Saturated value
  461.  */
  462. #define __SSAT(ARG1,ARG2) \
  463. ({                          \
  464.   uint32_t __RES, __ARG1 = (ARG1); \
  465.   __ASM ("ssat %0, %1, %2" : "=r" (__RES) :  "I" (ARG2), "r" (__ARG1) ); \
  466.   __RES; \
  467.  })
  468.  
  469.  
  470. /** \brief  Unsigned Saturate
  471.     This function saturates an unsigned value.
  472.     \param [in]  value  Value to be saturated
  473.     \param [in]    sat  Bit position to saturate to (0..31)
  474.     \return             Saturated value
  475.  */
  476. #define __USAT(ARG1,ARG2) \
  477. ({                          \
  478.   uint32_t __RES, __ARG1 = (ARG1); \
  479.   __ASM ("usat %0, %1, %2" : "=r" (__RES) :  "I" (ARG2), "r" (__ARG1) ); \
  480.   __RES; \
  481.  })
  482.  
  483.  
  484. /** \brief  Count leading zeros
  485.     This function counts the number of leading zeros of a data value.
  486.     \param [in]  value  Value to count the leading zeros
  487.     \return             number of leading zeros in value
  488.  */
  489. __attribute__( ( always_inline ) ) static __INLINE uint8_t __CLZ(uint32_t value)
  490. {
  491.   uint8_t result;
  492.  
  493.   __ASM volatile ("clz %0, %1" : "=r" (result) : "r" (value) );
  494.   return(result);
  495. }
  496.  
  497. #endif /* (__CORTEX_M >= 0x03) */
  498.  
  499.  
  500.  
  501.  
  502. #elif defined ( __TASKING__ ) /*------------------ TASKING Compiler --------------*/
  503. /* TASKING carm specific functions */
  504.  
  505. /*
  506.  * The CMSIS functions have been implemented as intrinsics in the compiler.
  507.  * Please use "carm -?i" to get an up to date list of all intrinsics,
  508.  * Including the CMSIS ones.
  509.  */
  510.  
  511. #endif
  512.  
  513. /*@}*/ /* end of group CMSIS_Core_InstructionInterface */
  514.  
  515. #endif /* __CORE_CMINSTR_H */
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement