Преглед изворни кода

macro check

added  checks if macro is defined bore it is used.
Martin Günther пре 10 година
родитељ
комит
1c3e8f13e0

+ 19 - 11
CMSIS/Core/Include/cmsis_armcc.h

@@ -30,6 +30,7 @@
   #error "Please use ARM Compiler Toolchain V4.0.677 or later!"
 #endif
 
+
 /* ###########################  Core Function Access  ########################### */
 /** \ingroup  CMSIS_Core_FunctionInterface
     \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions
@@ -171,7 +172,8 @@ __STATIC_INLINE void __set_PRIMASK(uint32_t priMask)
 }
 
 
-#if (defined (__CORTEX_M) && (__CORTEX_M >= 0x03U) || (defined (__CORTEX_SC) && (__CORTEX_SC >= 300U)))
+#if ((defined (__CORTEX_M ) && (__CORTEX_M  >=   3U)) || \
+     (defined (__CORTEX_SC) && (__CORTEX_SC >= 300U))     )
 
 /**
   \brief   Enable FIQ
@@ -249,10 +251,11 @@ __STATIC_INLINE void __set_FAULTMASK(uint32_t faultMask)
   __regFaultMask = (faultMask & (uint32_t)1);
 }
 
-#endif /* (defined (__CORTEX_M) && (__CORTEX_M >= 0x03U) || (defined (__CORTEX_SC) && (__CORTEX_SC >= 300U))) */
+#endif /* ((defined (__CORTEX_M ) && (__CORTEX_M  >=   3U)) || \
+           (defined (__CORTEX_SC) && (__CORTEX_SC >= 300U))     ) */
 
 
-#if       (defined (__CORTEX_M) && ((__CORTEX_M == 0x04U) || (__CORTEX_SC == 0x07U)))
+#if (defined (__CORTEX_M) && (__CORTEX_M >= 4U))
 
 /**
   \brief   Get FPSCR
@@ -261,7 +264,8 @@ __STATIC_INLINE void __set_FAULTMASK(uint32_t faultMask)
  */
 __STATIC_INLINE uint32_t __get_FPSCR(void)
 {
-#if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && (defined (__FPU_USED) && (__FPU_USED == 1U)))
+#if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
+     (defined (__FPU_USED   ) && (__FPU_USED    == 1U))     )
   register uint32_t __regfpscr         __ASM("fpscr");
   return(__regfpscr);
 #else
@@ -277,13 +281,14 @@ __STATIC_INLINE uint32_t __get_FPSCR(void)
  */
 __STATIC_INLINE void __set_FPSCR(uint32_t fpscr)
 {
-#if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && (defined (__FPU_USED) && (__FPU_USED == 1U)))
+#if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
+     (defined (__FPU_USED   ) && (__FPU_USED    == 1U))     )
   register uint32_t __regfpscr         __ASM("fpscr");
   __regfpscr = (fpscr);
 #endif
 }
 
-#endif /* (defined (__CORTEX_M) && ((__CORTEX_M == 0x04U) || (__CORTEX_SC == 0x07U))) */
+#endif /* (defined (__CORTEX_M) && (__CORTEX_M >= 4U)) */
 
 
 
@@ -423,7 +428,8 @@ __attribute__((section(".revsh_text"))) __STATIC_INLINE __ASM int32_t __REVSH(in
   \param [in]    value  Value to reverse
   \return               Reversed value
  */
-#if (defined (__CORTEX_M) && (__CORTEX_M >= 0x03U) || (defined (__CORTEX_SC) && (__CORTEX_SC >= 300U)))
+#if ((defined (__CORTEX_M ) && (__CORTEX_M  >=   3U)) || \
+     (defined (__CORTEX_SC) && (__CORTEX_SC >= 300U))     )
   #define __RBIT                          __rbit
 #else
 __attribute__((always_inline)) __STATIC_INLINE uint32_t __RBIT(uint32_t value)
@@ -453,7 +459,8 @@ __attribute__((always_inline)) __STATIC_INLINE uint32_t __RBIT(uint32_t value)
 #define __CLZ                             __clz
 
 
-#if (defined (__CORTEX_M) && (__CORTEX_M >= 0x03U) || (defined (__CORTEX_SC) && (__CORTEX_SC >= 300U)))
+#if ((defined (__CORTEX_M ) && (__CORTEX_M  >=   3U)) || \
+     (defined (__CORTEX_SC) && (__CORTEX_SC >= 300U))     )
 
 /**
   \brief   LDR Exclusive (8 bit)
@@ -635,7 +642,8 @@ __attribute__((section(".rrx_text"))) __STATIC_INLINE __ASM uint32_t __RRX(uint3
  */
 #define __STRT(value, ptr)                __strt(value, ptr)
 
-#endif /* (defined (__CORTEX_M) && (__CORTEX_M >= 0x03U) || (defined (__CORTEX_SC) && (__CORTEX_SC >= 300U))) */
+#endif /* ((defined (__CORTEX_M ) && (__CORTEX_M >=    3U)) || \
+           (defined (__CORTEX_SC) && (__CORTEX_SC >= 300U))     ) */
 
 /*@}*/ /* end of group CMSIS_Core_InstructionInterface */
 
@@ -646,7 +654,7 @@ __attribute__((section(".rrx_text"))) __STATIC_INLINE __ASM uint32_t __RRX(uint3
   @{
 */
 
-#if (defined (__CORTEX_M) && (__CORTEX_M >= 0x04U))  /* only for Cortex-M4 and above */
+#if (defined (__CORTEX_M) && (__CORTEX_M >= 4U))
 
 
 #define __SADD8                           __sadd8
@@ -718,7 +726,7 @@ __attribute__((section(".rrx_text"))) __STATIC_INLINE __ASM uint32_t __RRX(uint3
 #define __SMMLA(ARG1,ARG2,ARG3)          ( (int32_t)((((int64_t)(ARG1) * (ARG2)) + \
                                                       ((int64_t)(ARG3) << 32U)     ) >> 32U))
 
-#endif /* (defined (__CORTEX_M) && (__CORTEX_M >= 0x04U)) */
+#endif /* (defined (__CORTEX_M) && (__CORTEX_M >= 4U)) */
 /*@} end of group CMSIS_SIMD_intrinsics */
 
 

+ 22 - 18
CMSIS/Core/Include/cmsis_armclang.h

@@ -360,7 +360,7 @@ __attribute__((always_inline)) __STATIC_INLINE void __TZ_set_PRIMASK_NS(uint32_t
 #endif
 
 
-#if ((__ARM_ARCH_7M__ == 1U) || (__ARM_ARCH_7EM__ == 1U) || (__ARM_ARCH_8M__ == 1U))  /* ToDo:  ARMCC_V6: check if this is ok for cortex >=3 */
+#if ((__ARM_ARCH_7M__ == 1U) || (__ARM_ARCH_7EM__ == 1U) || (__ARM_ARCH_8M__ == 1U))
 
 /**
   \brief   Enable FIQ
@@ -518,7 +518,7 @@ __attribute__((always_inline)) __STATIC_INLINE void __TZ_set_FAULTMASK_NS(uint32
 #endif
 
 
-#endif /* ((__ARM_ARCH_7M__ == 1U) || (__ARM_ARCH_8M__ == 1U)) */
+#endif /* ((__ARM_ARCH_7M__ == 1U) || (__ARM_ARCH_7EM__ == 1U) || (__ARM_ARCH_8M__ == 1U)) */
 
 
 #if (__ARM_ARCH_8M__ == 1U)
@@ -537,7 +537,7 @@ __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_PSPLIM(void)
 }
 
 
-#if  (__ARM_FEATURE_CMSE == 3U) && (__ARM_ARCH_PROFILE == 'M')     /* ToDo:  ARMCC_V6: check predefined macro for mainline */
+#if  (__ARM_FEATURE_CMSE == 3U) && (__ARM_ARCH_PROFILE == 'M')
 /**
   \brief   Get Process Stack Pointer Limit (non-secure)
   \details Returns the current value of the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
@@ -564,7 +564,7 @@ __attribute__((always_inline)) __STATIC_INLINE void __set_PSPLIM(uint32_t ProcSt
 }
 
 
-#if  (__ARM_FEATURE_CMSE == 3U) && (__ARM_ARCH_PROFILE == 'M')     /* ToDo:  ARMCC_V6: check predefined macro for mainline */
+#if  (__ARM_FEATURE_CMSE == 3U) && (__ARM_ARCH_PROFILE == 'M')
 /**
   \brief   Set Process Stack Pointer (non-secure)
   \details Assigns the given value to the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
@@ -592,7 +592,7 @@ __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_MSPLIM(void)
 }
 
 
-#if  (__ARM_FEATURE_CMSE == 3U) && (__ARM_ARCH_PROFILE == 'M')     /* ToDo:  ARMCC_V6: check predefined macro for mainline */
+#if  (__ARM_FEATURE_CMSE == 3U) && (__ARM_ARCH_PROFILE == 'M')
 /**
   \brief   Get Main Stack Pointer Limit (non-secure)
   \details Returns the current value of the non-secure Main Stack Pointer Limit(MSPLIM) when in secure state.
@@ -619,7 +619,7 @@ __attribute__((always_inline)) __STATIC_INLINE void __set_MSPLIM(uint32_t MainSt
 }
 
 
-#if  (__ARM_FEATURE_CMSE == 3U) && (__ARM_ARCH_PROFILE == 'M')     /* ToDo:  ARMCC_V6: check predefined macro for mainline */
+#if  (__ARM_FEATURE_CMSE == 3U) && (__ARM_ARCH_PROFILE == 'M')
 /**
   \brief   Set Main Stack Pointer Limit (non-secure)
   \details Assigns the given value to the non-secure Main Stack Pointer Limit (MSPLIM) when in secure state.
@@ -634,7 +634,7 @@ __attribute__((always_inline)) __STATIC_INLINE void __TZ_set_MSPLIM_NS(uint32_t
 #endif /* (__ARM_ARCH_8M__ == 1U) */
 
 
-#if ((__ARM_ARCH_7EM__ == 1U) || (__ARM_ARCH_8M__ == 1U))  /* ToDo:  ARMCC_V6: check if this is ok for cortex >=4 */
+#if ((__ARM_ARCH_7EM__ == 1U) || (__ARM_ARCH_8M__ == 1U))
 
 /**
   \brief   Get FPSCR
@@ -644,7 +644,8 @@ __attribute__((always_inline)) __STATIC_INLINE void __TZ_set_MSPLIM_NS(uint32_t
 /* #define __get_FPSCR      __builtin_arm_get_fpscr */
 __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_FPSCR(void)
 {
-#if (__FPU_PRESENT == 1U) && (__FPU_USED == 1U)
+#if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
+     (defined (__FPU_USED   ) && (__FPU_USED    == 1U))     )
   uint32_t result;
 
   __ASM volatile ("");                                 /* Empty asm statement works as a scheduling barrier */
@@ -652,7 +653,7 @@ __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_FPSCR(void)
   __ASM volatile ("");
   return(result);
 #else
-   return(0);
+   return(0U);
 #endif
 }
 
@@ -664,7 +665,8 @@ __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_FPSCR(void)
  */
 __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_FPSCR_NS(void)
 {
-#if (__FPU_PRESENT == 1U) && (__FPU_USED == 1U)
+#if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
+     (defined (__FPU_USED   ) && (__FPU_USED    == 1U))     )
   uint32_t result;
 
   __ASM volatile ("");                                 /* Empty asm statement works as a scheduling barrier */
@@ -672,7 +674,7 @@ __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_FPSCR_NS(void)
   __ASM volatile ("");
   return(result);
 #else
-   return(0);
+   return(0U);
 #endif
 }
 #endif
@@ -686,7 +688,8 @@ __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_FPSCR_NS(void)
 /* #define __set_FPSCR      __builtin_arm_set_fpscr */
 __attribute__((always_inline)) __STATIC_INLINE void __set_FPSCR(uint32_t fpscr)
 {
-#if (__FPU_PRESENT == 1U) && (__FPU_USED == 1U)
+#if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
+     (defined (__FPU_USED   ) && (__FPU_USED    == 1U))     )
   __ASM volatile ("");                                 /* Empty asm statement works as a scheduling barrier */
 /*  __ASM volatile ("VMSR fpscr, %0" : : "r" (fpscr) : "vfpcc"); */
   __ASM volatile ("VMSR fpscr, %0" : : "r" (fpscr) :);
@@ -702,7 +705,8 @@ __attribute__((always_inline)) __STATIC_INLINE void __set_FPSCR(uint32_t fpscr)
  */
 __attribute__((always_inline)) __STATIC_INLINE void __TZ_set_FPSCR_NS(uint32_t fpscr)
 {
-#if (__FPU_PRESENT == 1U) && (__FPU_USED == 1U)
+#if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
+     (defined (__FPU_USED   ) && (__FPU_USED    == 1U))     )
   __ASM volatile ("");                                 /* Empty asm statement works as a scheduling barrier */
 /*  __ASM volatile ("VMSR fpscr_ns, %0" : : "r" (fpscr) : "vfpcc"); */
   __ASM volatile ("VMSR fpscr_ns, %0" : : "r" (fpscr) : );
@@ -802,7 +806,7 @@ __attribute__((always_inline)) __STATIC_INLINE void __TZ_set_FPSCR_NS(uint32_t f
   \param [in]    value  Value to reverse
   \return               Reversed value
  */
-#define __REV16          __builtin_bswap16                           /* ToDo:  ARMCC_V6: check if __builtin_bswap16 could be used */
+#define __REV16          __builtin_bswap16                /* ToDo ARMCLANG: check if __builtin_bswap16 could be used */
 #if 0
 __attribute__((always_inline)) __STATIC_INLINE uint32_t __REV16(uint32_t value)
 {
@@ -820,7 +824,7 @@ __attribute__((always_inline)) __STATIC_INLINE uint32_t __REV16(uint32_t value)
   \param [in]    value  Value to reverse
   \return               Reversed value
  */
-                                                          /* ToDo:  ARMCC_V6: check if __builtin_bswap16 could be used */
+                                                          /* ToDo ARMCLANG: check if __builtin_bswap16 could be used */
 __attribute__((always_inline)) __STATIC_INLINE int32_t __REVSH(int32_t value)
 {
   int32_t result;
@@ -859,7 +863,7 @@ __attribute__((always_inline)) __STATIC_INLINE uint32_t __ROR(uint32_t op1, uint
   \param [in]    value  Value to reverse
   \return               Reversed value
  */
-                                                          /* ToDo:  ARMCC_V6: check if __builtin_arm_rbit is supported */
+                                                          /* ToDo ARMCLANG: check if __builtin_arm_rbit is supported */
 __attribute__((always_inline)) __STATIC_INLINE uint32_t __RBIT(uint32_t value)
 {
   uint32_t result;
@@ -891,7 +895,7 @@ __attribute__((always_inline)) __STATIC_INLINE uint32_t __RBIT(uint32_t value)
 #define __CLZ             __builtin_clz
 
 
-#if ((__ARM_ARCH_7M__ == 1U) || (__ARM_ARCH_7EM__ == 1U) || (__ARM_ARCH_8M__ == 1U))  /* ToDo:  ARMCC_V6: check if this is ok for cortex >=3 */
+#if ((__ARM_ARCH_7M__ == 1U) || (__ARM_ARCH_7EM__ == 1U) || (__ARM_ARCH_8M__ == 1U))
 
 /**
   \brief   LDR Exclusive (8 bit)
@@ -1246,7 +1250,7 @@ __attribute__((always_inline)) __STATIC_INLINE void __STL(uint32_t value, volati
   @{
 */
 
-#if (__ARM_FEATURE_DSP == 1U)        /* ToDo:  ARMCC_V6: This should be ARCH >= ARMv7-M + SIMD */
+#if (__ARM_FEATURE_DSP == 1U)                             /* ToDo ARMCLANG: This should be ARCH >= ARMv7-M + SIMD */
 
 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SADD8(uint32_t op1, uint32_t op2)
 {

+ 19 - 12
CMSIS/Core/Include/cmsis_gcc.h

@@ -204,7 +204,8 @@ __attribute__((always_inline)) __STATIC_INLINE void __set_PRIMASK(uint32_t priMa
 }
 
 
-#if (defined (__CORTEX_M) && (__CORTEX_M >= 0x03U) || (defined (__CORTEX_SC) && (__CORTEX_SC >= 300U)))
+#if ((defined (__CORTEX_M ) && (__CORTEX_M  >=   3U)) || \
+     (defined (__CORTEX_SC) && (__CORTEX_SC >= 300U))     )
 
 /**
   \brief   Enable FIQ
@@ -289,10 +290,11 @@ __attribute__((always_inline)) __STATIC_INLINE void __set_FAULTMASK(uint32_t fau
   __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory");
 }
 
-#endif /* (defined (__CORTEX_M) && (__CORTEX_M >= 0x03U) || (defined (__CORTEX_SC) && (__CORTEX_SC >= 300U))) */
+#endif /* ((defined (__CORTEX_M ) && (__CORTEX_M  >=   3U)) || \
+           (defined (__CORTEX_SC) && (__CORTEX_SC >= 300U))     ) */
 
 
-#if       (defined (__CORTEX_M) && ((__CORTEX_M == 0x04U) || (__CORTEX_SC == 0x07U)))
+#if (defined (__CORTEX_M) && (__CORTEX_M >= 4U))
 
 /**
   \brief   Get FPSCR
@@ -301,7 +303,8 @@ __attribute__((always_inline)) __STATIC_INLINE void __set_FAULTMASK(uint32_t fau
  */
 __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_FPSCR(void)
 {
-#if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && (defined (__FPU_USED) && (__FPU_USED == 1U)))
+#if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
+     (defined (__FPU_USED   ) && (__FPU_USED    == 1U))     )
   uint32_t result;
 
   __ASM volatile ("");                                 /* Empty asm statement works as a scheduling barrier */
@@ -309,7 +312,7 @@ __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_FPSCR(void)
   __ASM volatile ("");
   return(result);
 #else
-   return(0);
+   return(0U);
 #endif
 }
 
@@ -321,14 +324,15 @@ __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_FPSCR(void)
  */
 __attribute__((always_inline)) __STATIC_INLINE void __set_FPSCR(uint32_t fpscr)
 {
-#if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && (defined (__FPU_USED) && (__FPU_USED == 1U)))
+#if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
+     (defined (__FPU_USED   ) && (__FPU_USED    == 1U))     )
   __ASM volatile ("");                                           /* Empty asm statement works as a scheduling barrier */
   __ASM volatile ("VMSR fpscr, %0" : : "r" (fpscr) : "vfpcc");
   __ASM volatile ("");
 #endif
 }
 
-#endif /* (defined (__CORTEX_M) && ((__CORTEX_M == 0x04U) || (__CORTEX_SC == 0x07U))) */
+#endif /* (defined (__CORTEX_M) && (__CORTEX_M >= 4U)) */
 
 
 
@@ -513,7 +517,8 @@ __attribute__((always_inline)) __STATIC_INLINE uint32_t __RBIT(uint32_t value)
 {
   uint32_t result;
 
-#if (defined (__CORTEX_M) && (__CORTEX_M >= 0x03U) || (defined (__CORTEX_SC) && (__CORTEX_SC >= 300U)))
+#if ((defined (__CORTEX_M ) && (__CORTEX_M  >=   3U)) || \
+     (defined (__CORTEX_SC) && (__CORTEX_SC >= 300U))     )
    __ASM volatile ("rbit %0, %1" : "=r" (result) : "r" (value) );
 #else
   int32_t s = 4 /*sizeof(v)*/ * 8 - 1; /* extra shift needed at end */
@@ -540,7 +545,8 @@ __attribute__((always_inline)) __STATIC_INLINE uint32_t __RBIT(uint32_t value)
 #define __CLZ             __builtin_clz
 
 
-#if (defined (__CORTEX_M) && (__CORTEX_M >= 0x03U) || (defined (__CORTEX_SC) && (__CORTEX_SC >= 300U)))
+#if ((defined (__CORTEX_M ) && (__CORTEX_M  >=   3U)) || \
+     (defined (__CORTEX_SC) && (__CORTEX_SC >= 300U))     )
 
 /**
   \brief   LDR Exclusive (8 bit)
@@ -802,7 +808,8 @@ __attribute__((always_inline)) __STATIC_INLINE void __STRT(uint32_t value, volat
    __ASM volatile ("strt %1, %0" : "=Q" (*ptr) : "r" (value) );
 }
 
-#endif /* (defined (__CORTEX_M) && (__CORTEX_M >= 0x03U) || (defined (__CORTEX_SC) && (__CORTEX_SC >= 300U))) */
+#endif /* ((defined (__CORTEX_M ) && (__CORTEX_M >=    3U)) || \
+           (defined (__CORTEX_SC) && (__CORTEX_SC >= 300U))     ) */
 
 /*@}*/ /* end of group CMSIS_Core_InstructionInterface */
 
@@ -813,7 +820,7 @@ __attribute__((always_inline)) __STATIC_INLINE void __STRT(uint32_t value, volat
   @{
 */
 
-#if (defined (__CORTEX_M) && (__CORTEX_M >= 0x04U))  /* only for Cortex-M4 and above */
+#if (defined (__CORTEX_M) && (__CORTEX_M >= 4U))
 
 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SADD8(uint32_t op1, uint32_t op2)
 {
@@ -1348,7 +1355,7 @@ __attribute__((always_inline)) __STATIC_INLINE uint32_t __SMMLA (int32_t op1, in
  return(result);
 }
 
-#endif /* (defined (__CORTEX_M) && (__CORTEX_M >= 0x04U)) */
+#endif /* (defined (__CORTEX_M) && (__CORTEX_M >= 4U)) */
 /*@} end of group CMSIS_SIMD_intrinsics */
 
 

+ 2 - 1
CMSIS/DoxyGen/Core/src/Overview.txt

@@ -76,7 +76,8 @@ The CMSIS-CORE \ref Templates_pg supplied by ARM have been tested and verified w
     <tr>
       <td>V5.00</td>
       <td>
-         Changed: license under Apache-2.0.
+         Changed: license under Apache-2.0. \n
+         Added: check if macro is defined before use.
       </td>
     </tr>
     <tr>