cmsis_armcc.h 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605
  1. /**************************************************************************//**
  2. * @file cmsis_armcc.h
  3. * @brief CMSIS compiler ARMCC (Arm Compiler 5) header file
  4. * @version V1.0.6
  5. * @date 13. November 2022
  6. ******************************************************************************/
  7. /*
  8. * Copyright (c) 2009-2021 Arm Limited. All rights reserved.
  9. *
  10. * SPDX-License-Identifier: Apache-2.0
  11. *
  12. * Licensed under the Apache License, Version 2.0 (the License); you may
  13. * not use this file except in compliance with the License.
  14. * You may obtain a copy of the License at
  15. *
  16. * www.apache.org/licenses/LICENSE-2.0
  17. *
  18. * Unless required by applicable law or agreed to in writing, software
  19. * distributed under the License is distributed on an AS IS BASIS, WITHOUT
  20. * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  21. * See the License for the specific language governing permissions and
  22. * limitations under the License.
  23. */
  24. #ifndef __CMSIS_ARMCC_H
  25. #define __CMSIS_ARMCC_H
  26. #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 400677)
  27. #error "Please use Arm Compiler Toolchain V4.0.677 or later!"
  28. #endif
  29. /* CMSIS compiler control architecture macros */
  30. #if (defined (__TARGET_ARCH_7_A ) && (__TARGET_ARCH_7_A == 1))
  31. #define __ARM_ARCH_7A__ 1
  32. #endif
  33. /* CMSIS compiler specific defines */
  34. #ifndef __ASM
  35. #define __ASM __asm
  36. #endif
  37. #ifndef __INLINE
  38. #define __INLINE __inline
  39. #endif
  40. #ifndef __FORCEINLINE
  41. #define __FORCEINLINE __forceinline
  42. #endif
  43. #ifndef __STATIC_INLINE
  44. #define __STATIC_INLINE static __inline
  45. #endif
  46. #ifndef __STATIC_FORCEINLINE
  47. #define __STATIC_FORCEINLINE static __forceinline
  48. #endif
  49. #ifndef __NO_RETURN
  50. #define __NO_RETURN __declspec(noreturn)
  51. #endif
  52. #ifndef CMSIS_DEPRECATED
  53. #define CMSIS_DEPRECATED __attribute__((deprecated))
  54. #endif
  55. #ifndef __USED
  56. #define __USED __attribute__((used))
  57. #endif
  58. #ifndef __WEAK
  59. #define __WEAK __attribute__((weak))
  60. #endif
  61. #ifndef __PACKED
  62. #define __PACKED __attribute__((packed))
  63. #endif
  64. #ifndef __PACKED_STRUCT
  65. #define __PACKED_STRUCT __packed struct
  66. #endif
  67. #ifndef __PACKED_UNION
  68. #define __PACKED_UNION __packed union
  69. #endif
  70. #ifndef __UNALIGNED_UINT32 /* deprecated */
  71. #define __UNALIGNED_UINT32(x) (*((__packed uint32_t *)(x)))
  72. #endif
  73. #ifndef __UNALIGNED_UINT16_WRITE
  74. #define __UNALIGNED_UINT16_WRITE(addr, val) ((*((__packed uint16_t *)(addr))) = (val))
  75. #endif
  76. #ifndef __UNALIGNED_UINT16_READ
  77. #define __UNALIGNED_UINT16_READ(addr) (*((const __packed uint16_t *)(addr)))
  78. #endif
  79. #ifndef __UNALIGNED_UINT32_WRITE
  80. #define __UNALIGNED_UINT32_WRITE(addr, val) ((*((__packed uint32_t *)(addr))) = (val))
  81. #endif
  82. #ifndef __UNALIGNED_UINT32_READ
  83. #define __UNALIGNED_UINT32_READ(addr) (*((const __packed uint32_t *)(addr)))
  84. #endif
  85. #ifndef __ALIGNED
  86. #define __ALIGNED(x) __attribute__((aligned(x)))
  87. #endif
  88. #ifndef __RESTRICT
  89. #define __RESTRICT __restrict
  90. #endif
  91. #ifndef __COMPILER_BARRIER
  92. #define __COMPILER_BARRIER() __memory_changed()
  93. #endif
  94. /* ########################## Core Instruction Access ######################### */
  95. /**
  96. \brief No Operation
  97. \details No Operation does nothing. This instruction can be used for code alignment purposes.
  98. */
  99. #define __NOP __nop
  100. /**
  101. \brief Wait For Interrupt
  102. \details Wait For Interrupt is a hint instruction that suspends execution until one of a number of events occurs.
  103. */
  104. #define __WFI __wfi
  105. /**
  106. \brief Wait For Event
  107. \details Wait For Event is a hint instruction that permits the processor to enter
  108. a low-power state until one of a number of events occurs.
  109. */
  110. #define __WFE __wfe
  111. /**
  112. \brief Send Event
  113. \details Send Event is a hint instruction. It causes an event to be signaled to the CPU.
  114. */
  115. #define __SEV __sev
  116. /**
  117. \brief Instruction Synchronization Barrier
  118. \details Instruction Synchronization Barrier flushes the pipeline in the processor,
  119. so that all instructions following the ISB are fetched from cache or memory,
  120. after the instruction has been completed.
  121. */
  122. #define __ISB() __isb(0xF)
  123. /**
  124. \brief Data Synchronization Barrier
  125. \details Acts as a special kind of Data Memory Barrier.
  126. It completes when all explicit memory accesses before this instruction complete.
  127. */
  128. #define __DSB() __dsb(0xF)
  129. /**
  130. \brief Data Memory Barrier
  131. \details Ensures the apparent order of the explicit memory operations before
  132. and after the instruction, without ensuring their completion.
  133. */
  134. #define __DMB() __dmb(0xF)
  135. /**
  136. \brief Reverse byte order (32 bit)
  137. \details Reverses the byte order in unsigned integer value. For example, 0x12345678 becomes 0x78563412.
  138. \param [in] value Value to reverse
  139. \return Reversed value
  140. */
  141. #define __REV __rev
  142. /**
  143. \brief Reverse byte order (16 bit)
  144. \details Reverses the byte order within each halfword of a word. For example, 0x12345678 becomes 0x34127856.
  145. \param [in] value Value to reverse
  146. \return Reversed value
  147. */
  148. #ifndef __NO_EMBEDDED_ASM
  149. __attribute__((section(".rev16_text"))) __STATIC_INLINE __ASM uint32_t __REV16(uint32_t value)
  150. {
  151. rev16 r0, r0
  152. bx lr
  153. }
  154. #endif
  155. /**
  156. \brief Reverse byte order (16 bit)
  157. \details Reverses the byte order in a 16-bit value and returns the signed 16-bit result. For example, 0x0080 becomes 0x8000.
  158. \param [in] value Value to reverse
  159. \return Reversed value
  160. */
  161. #ifndef __NO_EMBEDDED_ASM
  162. __attribute__((section(".revsh_text"))) __STATIC_INLINE __ASM int16_t __REVSH(int16_t value)
  163. {
  164. revsh r0, r0
  165. bx lr
  166. }
  167. #endif
  168. /**
  169. \brief Rotate Right in unsigned value (32 bit)
  170. \details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
  171. \param [in] op1 Value to rotate
  172. \param [in] op2 Number of Bits to rotate
  173. \return Rotated value
  174. */
  175. #define __ROR __ror
  176. /**
  177. \brief Breakpoint
  178. \details Causes the processor to enter Debug state.
  179. Debug tools can use this to investigate system state when the instruction at a particular address is reached.
  180. \param [in] value is ignored by the processor.
  181. If required, a debugger can use it to store additional information about the breakpoint.
  182. */
  183. #define __BKPT(value) __breakpoint(value)
  184. /**
  185. \brief Reverse bit order of value
  186. \details Reverses the bit order of the given value.
  187. \param [in] value Value to reverse
  188. \return Reversed value
  189. */
  190. #define __RBIT __rbit
  191. /**
  192. \brief Count leading zeros
  193. \details Counts the number of leading zeros of a data value.
  194. \param [in] value Value to count the leading zeros
  195. \return number of leading zeros in value
  196. */
  197. #define __CLZ __clz
  198. /**
  199. \brief LDR Exclusive (8 bit)
  200. \details Executes a exclusive LDR instruction for 8 bit value.
  201. \param [in] ptr Pointer to data
  202. \return value of type uint8_t at (*ptr)
  203. */
  204. #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
  205. #define __LDREXB(ptr) ((uint8_t ) __ldrex(ptr))
  206. #else
  207. #define __LDREXB(ptr) _Pragma("push") _Pragma("diag_suppress 3731") ((uint8_t ) __ldrex(ptr)) _Pragma("pop")
  208. #endif
  209. /**
  210. \brief LDR Exclusive (16 bit)
  211. \details Executes a exclusive LDR instruction for 16 bit values.
  212. \param [in] ptr Pointer to data
  213. \return value of type uint16_t at (*ptr)
  214. */
  215. #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
  216. #define __LDREXH(ptr) ((uint16_t) __ldrex(ptr))
  217. #else
  218. #define __LDREXH(ptr) _Pragma("push") _Pragma("diag_suppress 3731") ((uint16_t) __ldrex(ptr)) _Pragma("pop")
  219. #endif
  220. /**
  221. \brief LDR Exclusive (32 bit)
  222. \details Executes a exclusive LDR instruction for 32 bit values.
  223. \param [in] ptr Pointer to data
  224. \return value of type uint32_t at (*ptr)
  225. */
  226. #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
  227. #define __LDREXW(ptr) ((uint32_t ) __ldrex(ptr))
  228. #else
  229. #define __LDREXW(ptr) _Pragma("push") _Pragma("diag_suppress 3731") ((uint32_t ) __ldrex(ptr)) _Pragma("pop")
  230. #endif
  231. /**
  232. \brief STR Exclusive (8 bit)
  233. \details Executes a exclusive STR instruction for 8 bit values.
  234. \param [in] value Value to store
  235. \param [in] ptr Pointer to location
  236. \return 0 Function succeeded
  237. \return 1 Function failed
  238. */
  239. #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
  240. #define __STREXB(value, ptr) __strex(value, ptr)
  241. #else
  242. #define __STREXB(value, ptr) _Pragma("push") _Pragma("diag_suppress 3731") __strex(value, ptr) _Pragma("pop")
  243. #endif
  244. /**
  245. \brief STR Exclusive (16 bit)
  246. \details Executes a exclusive STR instruction for 16 bit values.
  247. \param [in] value Value to store
  248. \param [in] ptr Pointer to location
  249. \return 0 Function succeeded
  250. \return 1 Function failed
  251. */
  252. #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
  253. #define __STREXH(value, ptr) __strex(value, ptr)
  254. #else
  255. #define __STREXH(value, ptr) _Pragma("push") _Pragma("diag_suppress 3731") __strex(value, ptr) _Pragma("pop")
  256. #endif
  257. /**
  258. \brief STR Exclusive (32 bit)
  259. \details Executes a exclusive STR instruction for 32 bit values.
  260. \param [in] value Value to store
  261. \param [in] ptr Pointer to location
  262. \return 0 Function succeeded
  263. \return 1 Function failed
  264. */
  265. #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
  266. #define __STREXW(value, ptr) __strex(value, ptr)
  267. #else
  268. #define __STREXW(value, ptr) _Pragma("push") _Pragma("diag_suppress 3731") __strex(value, ptr) _Pragma("pop")
  269. #endif
  270. /**
  271. \brief Remove the exclusive lock
  272. \details Removes the exclusive lock which is created by LDREX.
  273. */
  274. #define __CLREX __clrex
  275. /**
  276. \brief Signed Saturate
  277. \details Saturates a signed value.
  278. \param [in] value Value to be saturated
  279. \param [in] sat Bit position to saturate to (1..32)
  280. \return Saturated value
  281. */
  282. #define __SSAT __ssat
  283. /**
  284. \brief Unsigned Saturate
  285. \details Saturates an unsigned value.
  286. \param [in] value Value to be saturated
  287. \param [in] sat Bit position to saturate to (0..31)
  288. \return Saturated value
  289. */
  290. #define __USAT __usat
  291. /* ########################### Core Function Access ########################### */
  292. /**
  293. \brief Enable IRQ Interrupts
  294. \details Enables IRQ interrupts by clearing special-purpose register PRIMASK.
  295. Can only be executed in Privileged modes.
  296. */
  297. /* intrinsic void __enable_irq(); */
  298. /**
  299. \brief Disable IRQ Interrupts
  300. \details Disables IRQ interrupts by setting special-purpose register PRIMASK.
  301. Can only be executed in Privileged modes.
  302. */
  303. /* intrinsic void __disable_irq(void); */
  304. /**
  305. \brief Enable FIQ
  306. \details Enables FIQ interrupts by clearing the F-bit in the CPSR.
  307. Can only be executed in Privileged modes.
  308. */
  309. #define __enable_fault_irq __enable_fiq
  310. /**
  311. \brief Disable FIQ
  312. \details Disables FIQ interrupts by setting the F-bit in the CPSR.
  313. Can only be executed in Privileged modes.
  314. */
  315. #define __disable_fault_irq __disable_fiq
  316. /**
  317. \brief Get FPSCR (Floating Point Status/Control)
  318. \return Floating Point Status/Control register value
  319. */
  320. __STATIC_INLINE uint32_t __get_FPSCR(void)
  321. {
  322. #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
  323. (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
  324. register uint32_t __regfpscr __ASM("fpscr");
  325. return(__regfpscr);
  326. #else
  327. return(0U);
  328. #endif
  329. }
  330. /**
  331. \brief Set FPSCR (Floating Point Status/Control)
  332. \param [in] fpscr Floating Point Status/Control value to set
  333. */
  334. __STATIC_INLINE void __set_FPSCR(uint32_t fpscr)
  335. {
  336. #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
  337. (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
  338. register uint32_t __regfpscr __ASM("fpscr");
  339. __regfpscr = (fpscr);
  340. #else
  341. (void)fpscr;
  342. #endif
  343. }
  344. /** \brief Get CPSR (Current Program Status Register)
  345. \return CPSR Register value
  346. */
  347. __STATIC_INLINE uint32_t __get_CPSR(void)
  348. {
  349. register uint32_t __regCPSR __ASM("cpsr");
  350. return(__regCPSR);
  351. }
  352. /** \brief Set CPSR (Current Program Status Register)
  353. \param [in] cpsr CPSR value to set
  354. */
  355. __STATIC_INLINE void __set_CPSR(uint32_t cpsr)
  356. {
  357. register uint32_t __regCPSR __ASM("cpsr");
  358. __regCPSR = cpsr;
  359. }
  360. /** \brief Get Mode
  361. \return Processor Mode
  362. */
  363. __STATIC_INLINE uint32_t __get_mode(void)
  364. {
  365. return (__get_CPSR() & 0x1FU);
  366. }
  367. /** \brief Set Mode
  368. \param [in] mode Mode value to set
  369. */
  370. __STATIC_INLINE __ASM void __set_mode(uint32_t mode)
  371. {
  372. MOV r1, lr
  373. MSR CPSR_C, r0
  374. BX r1
  375. }
  376. /** \brief Get Stack Pointer
  377. \return Stack Pointer
  378. */
  379. __STATIC_INLINE __ASM uint32_t __get_SP(void)
  380. {
  381. MOV r0, sp
  382. BX lr
  383. }
  384. /** \brief Set Stack Pointer
  385. \param [in] stack Stack Pointer value to set
  386. */
  387. __STATIC_INLINE __ASM void __set_SP(uint32_t stack)
  388. {
  389. MOV sp, r0
  390. BX lr
  391. }
  392. /** \brief Get USR/SYS Stack Pointer
  393. \return USR/SYSStack Pointer
  394. */
  395. __STATIC_INLINE __ASM uint32_t __get_SP_usr(void)
  396. {
  397. ARM
  398. PRESERVE8
  399. MRS R1, CPSR
  400. CPS #0x1F ;no effect in USR mode
  401. MOV R0, SP
  402. MSR CPSR_c, R1 ;no effect in USR mode
  403. ISB
  404. BX LR
  405. }
  406. /** \brief Set USR/SYS Stack Pointer
  407. \param [in] topOfProcStack USR/SYS Stack Pointer value to set
  408. */
  409. __STATIC_INLINE __ASM void __set_SP_usr(uint32_t topOfProcStack)
  410. {
  411. ARM
  412. PRESERVE8
  413. MRS R1, CPSR
  414. CPS #0x1F ;no effect in USR mode
  415. MOV SP, R0
  416. MSR CPSR_c, R1 ;no effect in USR mode
  417. ISB
  418. BX LR
  419. }
  420. /** \brief Get FPEXC (Floating Point Exception Control Register)
  421. \return Floating Point Exception Control Register value
  422. */
  423. __STATIC_INLINE uint32_t __get_FPEXC(void)
  424. {
  425. #if (__FPU_PRESENT == 1)
  426. register uint32_t __regfpexc __ASM("fpexc");
  427. return(__regfpexc);
  428. #else
  429. return(0);
  430. #endif
  431. }
  432. /** \brief Set FPEXC (Floating Point Exception Control Register)
  433. \param [in] fpexc Floating Point Exception Control value to set
  434. */
  435. __STATIC_INLINE void __set_FPEXC(uint32_t fpexc)
  436. {
  437. #if (__FPU_PRESENT == 1)
  438. register uint32_t __regfpexc __ASM("fpexc");
  439. __regfpexc = (fpexc);
  440. #endif
  441. }
  442. /*
  443. * Include common core functions to access Coprocessor 15 registers
  444. */
  445. #define __get_CP(cp, op1, Rt, CRn, CRm, op2) do { register volatile uint32_t tmp __ASM("cp" # cp ":" # op1 ":c" # CRn ":c" # CRm ":" # op2); (Rt) = tmp; } while(0)
  446. #define __set_CP(cp, op1, Rt, CRn, CRm, op2) do { register volatile uint32_t tmp __ASM("cp" # cp ":" # op1 ":c" # CRn ":c" # CRm ":" # op2); tmp = (Rt); } while(0)
  447. #define __get_CP64(cp, op1, Rt, CRm) \
  448. do { \
  449. uint32_t ltmp, htmp; \
  450. __ASM volatile("MRRC p" # cp ", " # op1 ", ltmp, htmp, c" # CRm); \
  451. (Rt) = ((((uint64_t)htmp) << 32U) | ((uint64_t)ltmp)); \
  452. } while(0)
  453. #define __set_CP64(cp, op1, Rt, CRm) \
  454. do { \
  455. const uint64_t tmp = (Rt); \
  456. const uint32_t ltmp = (uint32_t)(tmp); \
  457. const uint32_t htmp = (uint32_t)(tmp >> 32U); \
  458. __ASM volatile("MCRR p" # cp ", " # op1 ", ltmp, htmp, c" # CRm); \
  459. } while(0)
  460. #include "cmsis_cp15.h"
  461. /** \brief Enable Floating Point Unit
  462. Critical section, called from undef handler, so systick is disabled
  463. */
  464. __STATIC_INLINE __ASM void __FPU_Enable(void)
  465. {
  466. ARM
  467. //Permit access to VFP/NEON, registers by modifying CPACR
  468. MRC p15,0,R1,c1,c0,2
  469. ORR R1,R1,#0x00F00000
  470. MCR p15,0,R1,c1,c0,2
  471. //Ensure that subsequent instructions occur in the context of VFP/NEON access permitted
  472. ISB
  473. //Enable VFP/NEON
  474. VMRS R1,FPEXC
  475. ORR R1,R1,#0x40000000
  476. VMSR FPEXC,R1
  477. //Initialise VFP/NEON registers to 0
  478. MOV R2,#0
  479. //Initialise D16 registers to 0
  480. VMOV D0, R2,R2
  481. VMOV D1, R2,R2
  482. VMOV D2, R2,R2
  483. VMOV D3, R2,R2
  484. VMOV D4, R2,R2
  485. VMOV D5, R2,R2
  486. VMOV D6, R2,R2
  487. VMOV D7, R2,R2
  488. VMOV D8, R2,R2
  489. VMOV D9, R2,R2
  490. VMOV D10,R2,R2
  491. VMOV D11,R2,R2
  492. VMOV D12,R2,R2
  493. VMOV D13,R2,R2
  494. VMOV D14,R2,R2
  495. VMOV D15,R2,R2
  496. IF {TARGET_FEATURE_EXTENSION_REGISTER_COUNT} == 32
  497. //Initialise D32 registers to 0
  498. VMOV D16,R2,R2
  499. VMOV D17,R2,R2
  500. VMOV D18,R2,R2
  501. VMOV D19,R2,R2
  502. VMOV D20,R2,R2
  503. VMOV D21,R2,R2
  504. VMOV D22,R2,R2
  505. VMOV D23,R2,R2
  506. VMOV D24,R2,R2
  507. VMOV D25,R2,R2
  508. VMOV D26,R2,R2
  509. VMOV D27,R2,R2
  510. VMOV D28,R2,R2
  511. VMOV D29,R2,R2
  512. VMOV D30,R2,R2
  513. VMOV D31,R2,R2
  514. ENDIF
  515. //Initialise FPSCR to a known state
  516. VMRS R1,FPSCR
  517. LDR R2,=0x00086060 //Mask off all bits that do not have to be preserved. Non-preserved bits can/should be zero.
  518. AND R1,R1,R2
  519. VMSR FPSCR,R1
  520. BX LR
  521. }
  522. #endif /* __CMSIS_ARMCC_H */