core_cm.h 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570
  1. /*
  2. * Copyright (c) 2013-2016 ARM Limited. All rights reserved.
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Licensed under the Apache License, Version 2.0 (the License); you may
  7. * not use this file except in compliance with the License.
  8. * You may obtain a copy of the License at
  9. *
  10. * http://www.apache.org/licenses/LICENSE-2.0
  11. *
  12. * Unless required by applicable law or agreed to in writing, software
  13. * distributed under the License is distributed on an AS IS BASIS, WITHOUT
  14. * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15. * See the License for the specific language governing permissions and
  16. * limitations under the License.
  17. *
  18. * -----------------------------------------------------------------------------
  19. *
  20. * Project: CMSIS-RTOS RTX
  21. * Title: Cortex-M Core definitions
  22. *
  23. * -----------------------------------------------------------------------------
  24. */
  25. #ifndef __CORE_CM_H
  26. #define __CORE_CM_H
  27. #include "RTE_Components.h"
  28. #include CMSIS_device_header
  29. #if !defined(__NO_EXCLUSIVE_ACCESS) && (__CORTEX_M < 3U)
  30. #define __NO_EXCLUSIVE_ACCESS
  31. #endif
  32. // ==== Service Calls definitions ====
  33. #define SVC_ArgN(n) \
  34. register uint32_t __r##n __asm("r"#n)
  35. #define SVC_ArgR(n,a) \
  36. register uint32_t __r##n __asm("r"#n) = (uint32_t)a
  37. #define SVC_Arg0() \
  38. SVC_ArgN(0); \
  39. SVC_ArgN(1); \
  40. SVC_ArgN(2); \
  41. SVC_ArgN(3)
  42. #define SVC_Arg1(a1) \
  43. SVC_ArgR(0,a1); \
  44. SVC_ArgN(1); \
  45. SVC_ArgN(2); \
  46. SVC_ArgN(3)
  47. #define SVC_Arg2(a1,a2) \
  48. SVC_ArgR(0,a1); \
  49. SVC_ArgR(1,a2); \
  50. SVC_ArgN(2); \
  51. SVC_ArgN(3)
  52. #define SVC_Arg3(a1,a2,a3) \
  53. SVC_ArgR(0,a1); \
  54. SVC_ArgR(1,a2); \
  55. SVC_ArgR(2,a3); \
  56. SVC_ArgN(3)
  57. #define SVC_Arg4(a1,a2,a3,a4) \
  58. SVC_ArgR(0,a1); \
  59. SVC_ArgR(1,a2); \
  60. SVC_ArgR(2,a3); \
  61. SVC_ArgR(3,a4)
  62. #if (__CORTEX_M >= 3U)
  63. #define SVC_Call0(f) \
  64. __ASM volatile \
  65. ( \
  66. "ldr r12,="#f"\n\t" \
  67. "svc 0" \
  68. : "=r" (__r0), "=r" (__r1), "=r" (__r2), "=r" (__r3) \
  69. : "r" (__r0), "r" (__r1), "r" (__r2), "r" (__r3) \
  70. : "r12", "lr", "cc" \
  71. )
  72. #else
  73. #define SVC_Call0(f) \
  74. __ASM volatile \
  75. ( \
  76. "ldr r7,="#f"\n\t" \
  77. "svc 0" \
  78. : "=r" (__r0), "=r" (__r1), "=r" (__r2), "=r" (__r3) \
  79. : "r" (__r0), "r" (__r1), "r" (__r2), "r" (__r3) \
  80. : "r7", "lr", "cc" \
  81. )
  82. #endif
  83. #define SVC0_0N(f,t) \
  84. __attribute__((always_inline)) \
  85. static inline t __svc##f (void) { \
  86. SVC_Arg0(); \
  87. SVC_Call0(os_svc##f); \
  88. }
  89. #define SVC0_0(f,t) \
  90. __attribute__((always_inline)) \
  91. static inline t __svc##f (void) { \
  92. SVC_Arg0(); \
  93. SVC_Call0(os_svc##f); \
  94. return (t) __r0; \
  95. }
  96. #define SVC0_1N(f,t,t1) \
  97. __attribute__((always_inline)) \
  98. static inline t __svc##f (t1 a1) { \
  99. SVC_Arg1(a1); \
  100. SVC_Call0(os_svc##f); \
  101. }
  102. #define SVC0_1(f,t,t1) \
  103. __attribute__((always_inline)) \
  104. static inline t __svc##f (t1 a1) { \
  105. SVC_Arg1(a1); \
  106. SVC_Call0(os_svc##f); \
  107. return (t) __r0; \
  108. }
  109. #define SVC0_2(f,t,t1,t2) \
  110. __attribute__((always_inline)) \
  111. static inline t __svc##f (t1 a1, t2 a2) { \
  112. SVC_Arg2(a1,a2); \
  113. SVC_Call0(os_svc##f); \
  114. return (t) __r0; \
  115. }
  116. #define SVC0_3(f,t,t1,t2,t3) \
  117. __attribute__((always_inline)) \
  118. static inline t __svc##f (t1 a1, t2 a2, t3 a3) { \
  119. SVC_Arg3(a1,a2,a3); \
  120. SVC_Call0(os_svc##f); \
  121. return (t) __r0; \
  122. }
  123. #define SVC0_4(f,t,t1,t2,t3,t4) \
  124. __attribute__((always_inline)) \
  125. static inline t __svc##f (t1 a1, t2 a2, t3 a3, t4 a4) { \
  126. SVC_Arg4(a1,a2,a3,a4); \
  127. SVC_Call0(os_svc##f); \
  128. return (t) __r0; \
  129. }
  130. // ==== Core Peripherals functions ====
  131. #define XPSR_INITIAL_VALUE 0x01000000U
  132. extern uint32_t SystemCoreClock; // System Clock Frequency (Core Clock)
  133. /// Initialize SVC and PendSV System Service Calls
  134. __STATIC_INLINE void os_SVC_Initialize (void) {
  135. #if (__CORTEX_M >= 3U)
  136. uint32_t p, n;
  137. SCB->SHP[10] = 0xFFU;
  138. n = 32U - (uint32_t)__CLZ(~(SCB->SHP[10] | 0xFFFFFF00U));
  139. p = NVIC_GetPriorityGrouping();
  140. if (p >= n) {
  141. n = p + 1U;
  142. }
  143. SCB->SHP[7] = (uint8_t)(0xFEU << n);
  144. #else
  145. SCB->SHP[1] |= 0x00FF0000U;
  146. SCB->SHP[0] |= (SCB->SHP[1] << (8+1)) & 0xFC000000U;
  147. #endif
  148. }
  149. /// Setup SysTick Timer
  150. /// \param[in] period Timer Load value
  151. __STATIC_INLINE void os_SysTick_Setup (uint32_t period) {
  152. SysTick->LOAD = period - 1U;
  153. SysTick->VAL = 0U;
  154. #if (__CORTEX_M >= 3U)
  155. SCB->SHP[11] = 0xFFU;
  156. #else
  157. SCB->SHP[1] |= 0xFF000000U;
  158. #endif
  159. }
  160. /// Get SysTick Period
  161. /// \return SysTick Period
  162. __STATIC_INLINE uint32_t os_SysTick_GetPeriod (void) {
  163. return (SysTick->LOAD + 1U);
  164. }
  165. /// Get SysTick Value
  166. /// \return SysTick Value
  167. __STATIC_INLINE uint32_t os_SysTick_GetVal (void) {
  168. return (SysTick->LOAD - SysTick->VAL);
  169. }
  170. /// Get SysTick Overflow (Auto Clear)
  171. /// \return SysTick Overflow flag
  172. __STATIC_INLINE uint32_t os_SysTick_GetOvf (void) {
  173. return ((SysTick->CTRL >> 16) & 1U);
  174. }
  175. /// Enable SysTick Timer
  176. __STATIC_INLINE void os_SysTick_Enable (void) {
  177. SysTick->CTRL = SysTick_CTRL_ENABLE_Msk |
  178. SysTick_CTRL_TICKINT_Msk |
  179. SysTick_CTRL_CLKSOURCE_Msk;
  180. }
  181. /// Disable SysTick Timer
  182. __STATIC_INLINE void os_SysTick_Disable (void) {
  183. SysTick->CTRL = 0U;
  184. }
  185. /// Setup External Tick Timer Interrupt
  186. /// \param[in] irqn Interrupt number
  187. __STATIC_INLINE void os_ExtTick_SetupIRQ (int32_t irqn) {
  188. NVIC->IP[irqn] = 0xFFU;
  189. }
  190. /// Enable External Tick Timer Interrupt
  191. /// \param[in] irqn Interrupt number
  192. __STATIC_INLINE void os_ExtTick_EnableIRQ (int32_t irqn) {
  193. NVIC->ISER[irqn >> 5] = 1U << (irqn & 0x1F);
  194. }
  195. /// Disable External Tick Timer Interrupt
  196. /// \param[in] irqn Interrupt number
  197. __STATIC_INLINE void os_ExtTick_DisableIRQ (int32_t irqn) {
  198. NVIC->ICER[irqn >> 5] = 1U << (irqn & 0x1F);
  199. }
  200. /// Get Pending SV (Service Call) and ST (SysTick) Flags
  201. /// \return Pending SV&ST Flags
  202. __STATIC_INLINE uint8_t os_GetPendSV_ST (void) {
  203. return ((uint8_t)((SCB->ICSR & (SCB_ICSR_PENDSVSET_Msk | SCB_ICSR_PENDSTSET_Msk)) >> 24));
  204. }
  205. /// Get Pending SV (Service Call) Flag
  206. /// \return Pending SV Flag
  207. __STATIC_INLINE uint8_t os_GetPendSV (void) {
  208. return ((uint8_t)((SCB->ICSR & (SCB_ICSR_PENDSVSET_Msk)) >> 24));
  209. }
  210. /// Clear Pending SV (Service Call) and ST (SysTick) Flags
  211. __STATIC_INLINE void os_ClrPendSV_ST (void) {
  212. SCB->ICSR = SCB_ICSR_PENDSVCLR_Msk | SCB_ICSR_PENDSTCLR_Msk;
  213. }
  214. /// Clear Pending SV (Service Call) Flag
  215. __STATIC_INLINE void os_ClrPendSV (void) {
  216. SCB->ICSR = SCB_ICSR_PENDSVCLR_Msk;
  217. }
  218. /// Set Pending SV (Service Call) Flag
  219. __STATIC_INLINE void os_SetPendSV (void) {
  220. SCB->ICSR = SCB_ICSR_PENDSVSET_Msk;
  221. }
  222. /// Set Pending Flags
  223. /// \param[in] flags Flags to set
  224. __STATIC_INLINE void os_SetPendFlags (uint8_t flags) {
  225. SCB->ICSR = ((uint32_t)flags << 24);
  226. }
  227. // ==== Exclusive Access Operation ====
  228. #if (__CORTEX_M >= 3U)
  229. /// Exclusive Access Operation: Write (8-bit)
  230. /// \param[in] mem Memory address
  231. /// \param[in] val Value to write
  232. /// \return Previous value
  233. __STATIC_INLINE uint8_t os_exc_wr8 (uint8_t *mem, uint8_t val) {
  234. register uint32_t res;
  235. register uint8_t ret;
  236. __ASM volatile (
  237. "loop%=:\n\t"
  238. "ldrexb %[ret],[%[mem]]\n\t"
  239. "strexb %[res],%[val],[%[mem]]\n\t"
  240. "cbz %[res],exit%=\n\t"
  241. "b loop%=\n\t"
  242. "exit%=:"
  243. : [ret] "=&l" (ret),
  244. [res] "=&l" (res)
  245. : [mem] "l" (mem),
  246. [val] "l" (val)
  247. : "memory"
  248. );
  249. return ret;
  250. }
  251. /// Exclusive Access Operation: Set bits (32-bit)
  252. /// \param[in] mem Memory address
  253. /// \param[in] bits Bit mask
  254. /// \return New value
  255. __STATIC_INLINE uint32_t os_exc_set32 (uint32_t *mem, uint32_t bits) {
  256. register uint32_t val, res;
  257. register uint32_t ret;
  258. __ASM volatile (
  259. "loop%=:\n\t"
  260. "ldrex %[val],[%[mem]]\n\t"
  261. "orr %[ret],%[val],%[bits]\n\t"
  262. "strex %[res],%[ret],[%[mem]]\n\t"
  263. "cbz %[res],exit%=\n\t"
  264. "b loop%=\n\t"
  265. "exit%=:"
  266. : [ret] "=&l" (ret),
  267. [val] "=&l" (val),
  268. [res] "=&l" (res)
  269. : [mem] "l" (mem),
  270. [bits] "l" (bits)
  271. : "memory"
  272. );
  273. return ret;
  274. }
  275. /// Exclusive Access Operation: Clear bits (32-bit)
  276. /// \param[in] mem Memory address
  277. /// \param[in] bits Bit mask
  278. /// \return Previous value
  279. __STATIC_INLINE uint32_t os_exc_clr32 (uint32_t *mem, uint32_t bits) {
  280. register uint32_t val, res;
  281. register uint32_t ret;
  282. __ASM volatile (
  283. "loop%=:\n\t"
  284. "ldrex %[ret],[%[mem]]\n\t"
  285. "bic %[val],%[ret],%[bits]\n\t"
  286. "strex %[res],%[val],[%[mem]]\n\t"
  287. "cbz %[res],exit%=\n\t"
  288. "b loop%=\n\t"
  289. "exit%=:"
  290. : [ret] "=&l" (ret),
  291. [val] "=&l" (val),
  292. [res] "=&l" (res)
  293. : [mem] "l" (mem),
  294. [bits] "l" (bits)
  295. : "memory"
  296. );
  297. return ret;
  298. }
  299. /// Exclusive Access Operation: Check if all specified bits (32-bit) are active and clear them
  300. /// \param[in] mem Memory address
  301. /// \param[in] bits Bit mask
  302. /// \return Active bits before clearing or 0 if not active
  303. __STATIC_INLINE uint32_t os_exc_chk32_all (uint32_t *mem, uint32_t bits) {
  304. register uint32_t val, res;
  305. register uint32_t ret;
  306. __ASM volatile (
  307. "loop%=:\n\t"
  308. "ldrex %[ret],[%[mem]]\n\t"
  309. "and %[val],%[ret],%[bits]\n\t"
  310. "cmp %[val],%[bits]\n\t"
  311. "beq update%=\n\t"
  312. "clrex\n\t"
  313. "movs %[ret],#0\n\t"
  314. "b exit%=\n\t"
  315. "update%=:\n\t"
  316. "bic %[val],%[ret],%[bits]\n\t"
  317. "strex %[res],%[val],[%[mem]]\n\t"
  318. "cbz %[res],exit%=\n\t"
  319. "b loop%=\n\t"
  320. "exit%=:"
  321. : [ret] "=&l" (ret),
  322. [val] "=&l" (val),
  323. [res] "=&l" (res)
  324. : [mem] "l" (mem),
  325. [bits] "l" (bits)
  326. : "cc", "memory"
  327. );
  328. return ret;
  329. }
  330. /// Exclusive Access Operation: Check if any specified bits (32-bit) are active and clear them
  331. /// \param[in] mem Memory address
  332. /// \param[in] bits Bit mask
  333. /// \return Active bits before clearing or 0 if not active
  334. __STATIC_INLINE uint32_t os_exc_chk32_any (uint32_t *mem, uint32_t bits) {
  335. register uint32_t val, res;
  336. register uint32_t ret;
  337. __ASM volatile (
  338. "loop%=:\n\t"
  339. "ldrex %[ret],[%[mem]]\n\t"
  340. "ands %[val],%[ret],%[bits]\n\t"
  341. "bne update%=\n\t"
  342. "clrex\n\t"
  343. "movs %[ret],#0\n\t"
  344. "b exit%=\n\t"
  345. "update%=:\n\t"
  346. "bic %[val],%[ret],%[bits]\n\t"
  347. "strex %[res],%[val],[%[mem]]\n\t"
  348. "cbz %[res],exit%=\n\t"
  349. "b loop%=\n\t"
  350. "exit%=:"
  351. : [ret] "=&l" (ret),
  352. [val] "=&l" (val),
  353. [res] "=&l" (res)
  354. : [mem] "l" (mem),
  355. [bits] "l" (bits)
  356. : "cc", "memory"
  357. );
  358. return ret;
  359. }
  360. /// Exclusive Access Operation: Increment (32-bit)
  361. /// \param[in] mem Memory address
  362. /// \return Previous value
  363. __STATIC_INLINE uint32_t os_exc_inc32 (uint32_t *mem) {
  364. register uint32_t val, res;
  365. register uint32_t ret;
  366. __ASM volatile (
  367. "loop%=:\n\t"
  368. "ldrex %[ret],[%[mem]]\n\t"
  369. "adds %[val],%[ret],#1\n\t"
  370. "strex %[res],%[val],[%[mem]]\n\t"
  371. "cbz %[res],exit%=\n\t"
  372. "b loop%=\n\t"
  373. "exit%=:"
  374. : [ret] "=&l" (ret),
  375. [val] "=&l" (val),
  376. [res] "=&l" (res)
  377. : [mem] "l" (mem)
  378. : "cc", "memory"
  379. );
  380. return ret;
  381. }
  382. /// Exclusive Access Operation: Increment (16-bit) if Less Than
  383. /// \param[in] mem Memory address
  384. /// \param[in] max Maximum value
  385. /// \return Previous value
  386. __STATIC_INLINE uint16_t os_exc_inc16_lt (uint16_t *mem, uint16_t max) {
  387. register uint32_t val, res;
  388. register uint16_t ret;
  389. __ASM volatile (
  390. "loop%=:\n\t"
  391. "ldrexh %[ret],[%[mem]]\n\t"
  392. "cmp %[max],%[ret]\n\t"
  393. "bhi update%=\n\t"
  394. "clrex\n\t"
  395. "b exit%=\n\t"
  396. "update%=:\n\t"
  397. "adds %[val],%[ret],#1\n\t"
  398. "strexh %[res],%[val],[%[mem]]\n\t"
  399. "cbz %[res],exit%=\n\t"
  400. "b loop%=\n\t"
  401. "exit%=:"
  402. : [ret] "=&l" (ret),
  403. [val] "=&l" (val),
  404. [res] "=&l" (res)
  405. : [mem] "l" (mem),
  406. [max] "l" (max)
  407. : "cc", "memory"
  408. );
  409. return ret;
  410. }
  411. /// Exclusive Access Operation: Increment (16-bit) and clear on Limit
  412. /// \param[in] mem Memory address
  413. /// \param[in] max Maximum value
  414. /// \return Previous value
  415. __STATIC_INLINE uint16_t os_exc_inc16_lim (uint16_t *mem, uint16_t lim) {
  416. register uint32_t val, res;
  417. register uint16_t ret;
  418. __ASM volatile (
  419. "loop%=:\n\t"
  420. "ldrexh %[ret],[%[mem]]\n\t"
  421. "adds %[val],%[ret],#1\n\t"
  422. "cmp %[lim],%[val]\n\t"
  423. "bhi update%=\n\t"
  424. "movs %[val],#0\n\t"
  425. "update%=:\n\t"
  426. "strexh %[res],%[val],[%[mem]]\n\t"
  427. "cbz %[res],exit%=\n\t"
  428. "b loop%=\n\t"
  429. "exit%=:"
  430. : [ret] "=&l" (ret),
  431. [val] "=&l" (val),
  432. [res] "=&l" (res)
  433. : [mem] "l" (mem),
  434. [lim] "l" (lim)
  435. : "cc", "memory"
  436. );
  437. return ret;
  438. }
  439. /// Exclusive Access Operation: Decrement (32-bit) if Not Zero
  440. /// \param[in] mem Memory address
  441. /// \return Previous value
  442. __STATIC_INLINE uint32_t os_exc_dec32_nz (uint32_t *mem) {
  443. register uint32_t val, res;
  444. register uint32_t ret;
  445. __ASM volatile (
  446. "loop%=:\n\t"
  447. "ldrex %[ret],[%[mem]]\n\t"
  448. "cbnz %[ret],update%=\n\t"
  449. "clrex\n\t"
  450. "b exit%=\n\t"
  451. "update%=:\n\t"
  452. "subs %[val],%[ret],#1\n\t"
  453. "strex %[res],%[val],[%[mem]]\n\t"
  454. "cbz %[res],exit%=\n\t"
  455. "b loop%=\n\t"
  456. "exit%=:"
  457. : [ret] "=&l" (ret),
  458. [val] "=&l" (val),
  459. [res] "=&l" (res)
  460. : [mem] "l" (mem)
  461. : "cc", "memory"
  462. );
  463. return ret;
  464. }
  465. /// Exclusive Access Operation: Decrement (16-bit) if Not Zero
  466. /// \param[in] mem Memory address
  467. /// \return Previous value
  468. __STATIC_INLINE uint16_t os_exc_dec16_nz (uint16_t *mem) {
  469. register uint32_t val, res;
  470. register uint16_t ret;
  471. __ASM volatile (
  472. "loop%=:\n\t"
  473. "ldrexh %[ret],[%[mem]]\n\t"
  474. "cbnz %[ret],update%=\n\t"
  475. "clrex\n\t"
  476. "b exit%=\n\t"
  477. "update%=:\n\t"
  478. "subs %[val],%[ret],#1\n\t"
  479. "strexh %[res],%[val],[%[mem]]\n\t"
  480. "cbz %[res],exit%=\n\t"
  481. "b loop%=\n\t"
  482. "exit%=:"
  483. : [ret] "=&l" (ret),
  484. [val] "=&l" (val),
  485. [res] "=&l" (res)
  486. : [mem] "l" (mem)
  487. : "cc", "memory"
  488. );
  489. return ret;
  490. }
  491. #endif // __CORTEX_M >= 3U
  492. #endif // __CORE_CM_H