core_cm.h 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696
  1. /*
  2. * Copyright (c) 2013-2016 ARM Limited. All rights reserved.
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Licensed under the Apache License, Version 2.0 (the License); you may
  7. * not use this file except in compliance with the License.
  8. * You may obtain a copy of the License at
  9. *
  10. * http://www.apache.org/licenses/LICENSE-2.0
  11. *
  12. * Unless required by applicable law or agreed to in writing, software
  13. * distributed under the License is distributed on an AS IS BASIS, WITHOUT
  14. * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15. * See the License for the specific language governing permissions and
  16. * limitations under the License.
  17. *
  18. * -----------------------------------------------------------------------------
  19. *
  20. * Project: CMSIS-RTOS RTX
  21. * Title: Cortex-M Core definitions
  22. *
  23. * -----------------------------------------------------------------------------
  24. */
  25. #ifndef CORE_CM_H_
  26. #define CORE_CM_H_
  27. #include "RTE_Components.h"
  28. #include CMSIS_device_header
  29. #ifndef __ARM_ARCH_6M__
  30. #define __ARM_ARCH_6M__ 0U
  31. #endif
  32. #ifndef __ARM_ARCH_7M__
  33. #define __ARM_ARCH_7M__ 0U
  34. #endif
  35. #ifndef __ARM_ARCH_7EM__
  36. #define __ARM_ARCH_7EM__ 0U
  37. #endif
  38. #ifndef __ARM_ARCH_8M_BASE__
  39. #define __ARM_ARCH_8M_BASE__ 0U
  40. #endif
  41. #ifndef __ARM_ARCH_8M_MAIN__
  42. #define __ARM_ARCH_8M_MAIN__ 0U
  43. #endif
  44. #if ((__ARM_ARCH_6M__ + \
  45. __ARM_ARCH_7M__ + \
  46. __ARM_ARCH_7EM__ + \
  47. __ARM_ARCH_8M_BASE__ + \
  48. __ARM_ARCH_8M_MAIN__) != 1U)
  49. #error "Unknown ARM Architecture!"
  50. #endif
  51. #ifdef RTE_CMSIS_RTOS2_RTX5_ARMV8M_NS
  52. #define __DOMAIN_NS 1U
  53. #endif
  54. #ifndef __DOMAIN_NS
  55. #define __DOMAIN_NS 0U
  56. #elif ((__DOMAIN_NS == 1U) && \
  57. ((__ARM_ARCH_6M__ == 1U) || \
  58. (__ARM_ARCH_7M__ == 1U) || \
  59. (__ARM_ARCH_7EM__ == 1U)))
  60. #error "Non-secure domain requires ARMv8-M Architecture!"
  61. #endif
  62. #ifndef __EXCLUSIVE_ACCESS
  63. #if ((__ARM_ARCH_7M__ == 1U) || \
  64. (__ARM_ARCH_7EM__ == 1U) || \
  65. (__ARM_ARCH_8M_BASE__ == 1U) || \
  66. (__ARM_ARCH_8M_MAIN__ == 1U))
  67. #define __EXCLUSIVE_ACCESS 1U
  68. #else
  69. #define __EXCLUSIVE_ACCESS 0U
  70. #endif
  71. #endif
  72. #define IS_PRIVILEGED() ((__get_CONTROL() & 1U) == 0U)
  73. #define IS_IRQ_MODE() (__get_IPSR() != 0U)
  74. #if ((__ARM_ARCH_7M__ == 1U) || \
  75. (__ARM_ARCH_7EM__ == 1U) || \
  76. (__ARM_ARCH_8M_MAIN__ == 1U))
  77. #define IS_IRQ_MASKED() ((__get_PRIMASK() != 0U) || (__get_BASEPRI() != 0U))
  78. #else
  79. #define IS_IRQ_MASKED() (__get_PRIMASK() != 0U)
  80. #endif
  81. #define XPSR_INITIAL_VALUE 0x01000000U
  82. #if (__DOMAIN_NS == 1U)
  83. #define STACK_FRAME_INIT 0xBCU
  84. #else
  85. #define STACK_FRAME_INIT 0xFDU
  86. #endif
  87. #define IS_EXTENDED_STACK_FRAME(n) (((n) & 0x10U) == 0U)
  88. // ==== Service Calls definitions ====
  89. #if ((__ARM_ARCH_7M__ == 1U) || \
  90. (__ARM_ARCH_7EM__ == 1U) || \
  91. (__ARM_ARCH_8M_MAIN__ == 1U))
  92. #define SVC_RegF "r12"
  93. #elif ((__ARM_ARCH_6M__ == 1U) || \
  94. (__ARM_ARCH_8M_BASE__ == 1U))
  95. #define SVC_RegF "r7"
  96. #endif
  97. #define SVC_ArgN(n) \
  98. register uint32_t __r##n __ASM("r"#n)
  99. #define SVC_ArgR(n,a) \
  100. register uint32_t __r##n __ASM("r"#n) = (uint32_t)a
  101. #define SVC_ArgF(f) \
  102. register uint32_t __rf __ASM(SVC_RegF) = (uint32_t)f
  103. #define SVC_In0 "r"(__rf)
  104. #define SVC_In1 "r"(__rf),"r"(__r0)
  105. #define SVC_In2 "r"(__rf),"r"(__r0),"r"(__r1)
  106. #define SVC_In3 "r"(__rf),"r"(__r0),"r"(__r1),"r"(__r2)
  107. #define SVC_In4 "r"(__rf),"r"(__r0),"r"(__r1),"r"(__r2),"r"(__r3)
  108. #define SVC_Out0
  109. #define SVC_Out1 "=r"(__r0)
  110. #define SVC_CL0 "r0","r1","r2","r3","lr","cc"
  111. #define SVC_CL1 "r1","r2","r3","lr","cc"
  112. #define SVC_CL2 "r2","r3","lr","cc"
  113. #define SVC_CL3 "r3","lr","cc"
  114. #define SVC_CL4 "lr","cc"
  115. #define SVC_Call0(in, out, cl) \
  116. __ASM volatile ("svc 0" : out : in : cl)
  117. #define SVC0_0N(f,t) \
  118. __attribute__((always_inline)) \
  119. __STATIC_INLINE t __svc##f (void) { \
  120. SVC_ArgF(os_svc##f); \
  121. SVC_Call0(SVC_In0, SVC_Out0, SVC_CL0); \
  122. }
  123. #define SVC0_0(f,t) \
  124. __attribute__((always_inline)) \
  125. __STATIC_INLINE t __svc##f (void) { \
  126. SVC_ArgN(0); \
  127. SVC_ArgF(os_svc##f); \
  128. SVC_Call0(SVC_In0, SVC_Out1, SVC_CL1); \
  129. return (t) __r0; \
  130. }
  131. #define SVC0_1N(f,t,t1) \
  132. __attribute__((always_inline)) \
  133. __STATIC_INLINE t __svc##f (t1 a1) { \
  134. SVC_ArgR(0,a1); \
  135. SVC_ArgF(os_svc##f); \
  136. SVC_Call0(SVC_In1, SVC_Out0, SVC_CL1); \
  137. }
  138. #define SVC0_1(f,t,t1) \
  139. __attribute__((always_inline)) \
  140. __STATIC_INLINE t __svc##f (t1 a1) { \
  141. SVC_ArgR(0,a1); \
  142. SVC_ArgF(os_svc##f); \
  143. SVC_Call0(SVC_In1, SVC_Out1, SVC_CL1); \
  144. return (t) __r0; \
  145. }
  146. #define SVC0_2(f,t,t1,t2) \
  147. __attribute__((always_inline)) \
  148. __STATIC_INLINE t __svc##f (t1 a1, t2 a2) { \
  149. SVC_ArgR(0,a1); \
  150. SVC_ArgR(1,a2); \
  151. SVC_ArgF(os_svc##f); \
  152. SVC_Call0(SVC_In2, SVC_Out1, SVC_CL2); \
  153. return (t) __r0; \
  154. }
  155. #define SVC0_3(f,t,t1,t2,t3) \
  156. __attribute__((always_inline)) \
  157. __STATIC_INLINE t __svc##f (t1 a1, t2 a2, t3 a3) { \
  158. SVC_ArgR(0,a1); \
  159. SVC_ArgR(1,a2); \
  160. SVC_ArgR(2,a3); \
  161. SVC_ArgF(os_svc##f); \
  162. SVC_Call0(SVC_In3, SVC_Out1, SVC_CL3); \
  163. return (t) __r0; \
  164. }
  165. #define SVC0_4(f,t,t1,t2,t3,t4) \
  166. __attribute__((always_inline)) \
  167. __STATIC_INLINE t __svc##f (t1 a1, t2 a2, t3 a3, t4 a4) { \
  168. SVC_ArgR(0,a1); \
  169. SVC_ArgR(1,a2); \
  170. SVC_ArgR(2,a3); \
  171. SVC_ArgR(3,a4); \
  172. SVC_ArgF(os_svc##f); \
  173. SVC_Call0(SVC_In4, SVC_Out1, SVC_CL4); \
  174. return (t) __r0; \
  175. }
  176. // ==== Core Peripherals functions ====
  177. extern uint32_t SystemCoreClock; // System Clock Frequency (Core Clock)
  178. /// Initialize SVC and PendSV System Service Calls
  179. __STATIC_INLINE void os_SVC_Initialize (void) {
  180. #if (__ARM_ARCH_8M_MAIN__ == 1U)
  181. uint32_t p, n;
  182. SCB->SHPR[10] = 0xFFU;
  183. n = 32U - (uint32_t)__CLZ(~(SCB->SHPR[10] | 0xFFFFFF00U));
  184. p = NVIC_GetPriorityGrouping();
  185. if (p >= n) {
  186. n = p + 1U;
  187. }
  188. SCB->SHPR[7] = (uint8_t)(0xFEU << n);
  189. #elif (__ARM_ARCH_8M_BASE__ == 1U)
  190. SCB->SHPR[1] |= 0x00FF0000U;
  191. SCB->SHPR[0] |= (SCB->SHPR[1] << (8+1)) & 0xFC000000U;
  192. #elif ((__ARM_ARCH_7M__ == 1U) || (__ARM_ARCH_7EM__ == 1U))
  193. uint32_t p, n;
  194. SCB->SHP[10] = 0xFFU;
  195. n = 32U - (uint32_t)__CLZ(~(SCB->SHP[10] | 0xFFFFFF00U));
  196. p = NVIC_GetPriorityGrouping();
  197. if (p >= n) {
  198. n = p + 1U;
  199. }
  200. SCB->SHP[7] = (uint8_t)(0xFEU << n);
  201. #elif (__ARM_ARCH_6M__ == 1U)
  202. SCB->SHP[1] |= 0x00FF0000U;
  203. SCB->SHP[0] |= (SCB->SHP[1] << (8+1)) & 0xFC000000U;
  204. #endif
  205. }
  206. /// Setup SysTick Timer
  207. /// \param[in] period Timer Load value
  208. __STATIC_INLINE void os_SysTick_Setup (uint32_t period) {
  209. SysTick->LOAD = period - 1U;
  210. SysTick->VAL = 0U;
  211. #if (__ARM_ARCH_8M_MAIN__ == 1U)
  212. SCB->SHPR[11] = 0xFFU;
  213. #elif (__ARM_ARCH_8M_BASE__ == 1U)
  214. SCB->SHPR[1] |= 0xFF000000U;
  215. #elif ((__ARM_ARCH_7M__ == 1U) || (__ARM_ARCH_7EM__ == 1U))
  216. SCB->SHP[11] = 0xFFU;
  217. #elif (__ARM_ARCH_6M__ == 1U)
  218. SCB->SHP[1] |= 0xFF000000U;
  219. #endif
  220. }
  221. /// Get SysTick Period
  222. /// \return SysTick Period
  223. __STATIC_INLINE uint32_t os_SysTick_GetPeriod (void) {
  224. return (SysTick->LOAD + 1U);
  225. }
  226. /// Get SysTick Value
  227. /// \return SysTick Value
  228. __STATIC_INLINE uint32_t os_SysTick_GetVal (void) {
  229. return (SysTick->LOAD - SysTick->VAL);
  230. }
  231. /// Get SysTick Overflow (Auto Clear)
  232. /// \return SysTick Overflow flag
  233. __STATIC_INLINE uint32_t os_SysTick_GetOvf (void) {
  234. return ((SysTick->CTRL >> 16) & 1U);
  235. }
  236. /// Enable SysTick Timer
  237. __STATIC_INLINE void os_SysTick_Enable (void) {
  238. SysTick->CTRL = SysTick_CTRL_ENABLE_Msk |
  239. SysTick_CTRL_TICKINT_Msk |
  240. SysTick_CTRL_CLKSOURCE_Msk;
  241. }
  242. /// Disable SysTick Timer
  243. __STATIC_INLINE void os_SysTick_Disable (void) {
  244. SysTick->CTRL = 0U;
  245. }
  246. /// Setup External Tick Timer Interrupt
  247. /// \param[in] irqn Interrupt number
  248. __STATIC_INLINE void os_ExtTick_SetupIRQ (int32_t irqn) {
  249. #if (__ARM_ARCH_8M_MAIN__ == 1U)
  250. NVIC->IPR[irqn] = 0xFFU;
  251. #elif (__ARM_ARCH_8M_BASE__ == 1U)
  252. NVIC->IPR[irqn >> 2] = (NVIC->IPR[irqn >> 2] & ~(0xFFU << ((irqn & 3) << 3))) |
  253. (0xFFU << ((irqn & 3) << 3));
  254. #elif ((__ARM_ARCH_7M__ == 1U) || \
  255. (__ARM_ARCH_7EM__ == 1U))
  256. NVIC->IP[irqn] = 0xFFU;
  257. #elif (__ARM_ARCH_6M__ == 1U)
  258. NVIC->IP[irqn >> 2] = (NVIC->IP[irqn >> 2] & ~(0xFFU << ((irqn & 3) << 3))) |
  259. (0xFFU << ((irqn & 3) << 3));
  260. #endif
  261. }
  262. /// Enable External Tick Timer Interrupt
  263. /// \param[in] irqn Interrupt number
  264. __STATIC_INLINE void os_ExtTick_EnableIRQ (int32_t irqn) {
  265. NVIC->ISER[irqn >> 5] = 1U << (irqn & 0x1F);
  266. }
  267. /// Disable External Tick Timer Interrupt
  268. /// \param[in] irqn Interrupt number
  269. __STATIC_INLINE void os_ExtTick_DisableIRQ (int32_t irqn) {
  270. NVIC->ICER[irqn >> 5] = 1U << (irqn & 0x1F);
  271. }
  272. /// Get Pending SV (Service Call) and ST (SysTick) Flags
  273. /// \return Pending SV&ST Flags
  274. __STATIC_INLINE uint8_t os_GetPendSV_ST (void) {
  275. return ((uint8_t)((SCB->ICSR & (SCB_ICSR_PENDSVSET_Msk | SCB_ICSR_PENDSTSET_Msk)) >> 24));
  276. }
  277. /// Get Pending SV (Service Call) Flag
  278. /// \return Pending SV Flag
  279. __STATIC_INLINE uint8_t os_GetPendSV (void) {
  280. return ((uint8_t)((SCB->ICSR & (SCB_ICSR_PENDSVSET_Msk)) >> 24));
  281. }
  282. /// Clear Pending SV (Service Call) and ST (SysTick) Flags
  283. __STATIC_INLINE void os_ClrPendSV_ST (void) {
  284. SCB->ICSR = SCB_ICSR_PENDSVCLR_Msk | SCB_ICSR_PENDSTCLR_Msk;
  285. }
  286. /// Clear Pending SV (Service Call) Flag
  287. __STATIC_INLINE void os_ClrPendSV (void) {
  288. SCB->ICSR = SCB_ICSR_PENDSVCLR_Msk;
  289. }
  290. /// Set Pending SV (Service Call) Flag
  291. __STATIC_INLINE void os_SetPendSV (void) {
  292. SCB->ICSR = SCB_ICSR_PENDSVSET_Msk;
  293. }
  294. /// Set Pending Flags
  295. /// \param[in] flags Flags to set
  296. __STATIC_INLINE void os_SetPendFlags (uint8_t flags) {
  297. SCB->ICSR = ((uint32_t)flags << 24);
  298. }
  299. // ==== Exclusive Access Operation ====
  300. #if (__EXCLUSIVE_ACCESS == 1U)
  301. /// Exclusive Access Operation: Write (8-bit)
  302. /// \param[in] mem Memory address
  303. /// \param[in] val Value to write
  304. /// \return Previous value
  305. __STATIC_INLINE uint8_t os_exc_wr8 (uint8_t *mem, uint8_t val) {
  306. register uint32_t res;
  307. register uint8_t ret;
  308. __ASM volatile (
  309. ".syntax unified\n\t"
  310. "1:\n\t"
  311. "ldrexb %[ret],[%[mem]]\n\t"
  312. "strexb %[res],%[val],[%[mem]]\n\t"
  313. "cbz %[res],2f\n\t"
  314. "b 1b\n\t"
  315. "2:"
  316. : [ret] "=&l" (ret),
  317. [res] "=&l" (res)
  318. : [mem] "l" (mem),
  319. [val] "l" (val)
  320. : "memory"
  321. );
  322. return ret;
  323. }
  324. /// Exclusive Access Operation: Set bits (32-bit)
  325. /// \param[in] mem Memory address
  326. /// \param[in] bits Bit mask
  327. /// \return New value
  328. __STATIC_INLINE uint32_t os_exc_set32 (uint32_t *mem, uint32_t bits) {
  329. register uint32_t val, res;
  330. register uint32_t ret;
  331. __ASM volatile (
  332. ".syntax unified\n\t"
  333. "1:\n\t"
  334. "ldrex %[val],[%[mem]]\n\t"
  335. #if (__ARM_ARCH_8M_BASE__ == 1U)
  336. "mov %[ret],%[val]\n\t"
  337. "orrs %[ret],%[bits]\n\t"
  338. #else
  339. "orr %[ret],%[val],%[bits]\n\t"
  340. #endif
  341. "strex %[res],%[ret],[%[mem]]\n\t"
  342. "cbz %[res],2f\n\t"
  343. "b 1b\n\t"
  344. "2:"
  345. : [ret] "=&l" (ret),
  346. [val] "=&l" (val),
  347. [res] "=&l" (res)
  348. : [mem] "l" (mem),
  349. [bits] "l" (bits)
  350. #if (__ARM_ARCH_8M_BASE__ == 1U)
  351. : "memory", "cc"
  352. #else
  353. : "memory"
  354. #endif
  355. );
  356. return ret;
  357. }
  358. /// Exclusive Access Operation: Clear bits (32-bit)
  359. /// \param[in] mem Memory address
  360. /// \param[in] bits Bit mask
  361. /// \return Previous value
  362. __STATIC_INLINE uint32_t os_exc_clr32 (uint32_t *mem, uint32_t bits) {
  363. register uint32_t val, res;
  364. register uint32_t ret;
  365. __ASM volatile (
  366. ".syntax unified\n\t"
  367. "1:\n\t"
  368. "ldrex %[ret],[%[mem]]\n\t"
  369. #if (__ARM_ARCH_8M_BASE__ == 1U)
  370. "mov %[val],%[ret]\n\t"
  371. "bics %[val],%[bits]\n\t"
  372. #else
  373. "bic %[val],%[ret],%[bits]\n\t"
  374. #endif
  375. "strex %[res],%[val],[%[mem]]\n\t"
  376. "cbz %[res],2f\n\t"
  377. "b 1b\n\t"
  378. "2:"
  379. : [ret] "=&l" (ret),
  380. [val] "=&l" (val),
  381. [res] "=&l" (res)
  382. : [mem] "l" (mem),
  383. [bits] "l" (bits)
  384. #if (__ARM_ARCH_8M_BASE__ == 1U)
  385. : "memory", "cc"
  386. #else
  387. : "memory"
  388. #endif
  389. );
  390. return ret;
  391. }
  392. /// Exclusive Access Operation: Check if all specified bits (32-bit) are active and clear them
  393. /// \param[in] mem Memory address
  394. /// \param[in] bits Bit mask
  395. /// \return Active bits before clearing or 0 if not active
  396. __STATIC_INLINE uint32_t os_exc_chk32_all (uint32_t *mem, uint32_t bits) {
  397. register uint32_t val, res;
  398. register uint32_t ret;
  399. __ASM volatile (
  400. ".syntax unified\n\t"
  401. "1:\n\t"
  402. "ldrex %[ret],[%[mem]]\n\t"
  403. #if (__ARM_ARCH_8M_BASE__ == 1U)
  404. "mov %[val],%[ret]\n\t"
  405. "ands %[val],%[bits]\n\t"
  406. #else
  407. "and %[val],%[ret],%[bits]\n\t"
  408. #endif
  409. "cmp %[val],%[bits]\n\t"
  410. "beq 2f\n\t"
  411. "clrex\n\t"
  412. "movs %[ret],#0\n\t"
  413. "b 3f\n\t"
  414. "2:\n\t"
  415. #if (__ARM_ARCH_8M_BASE__ == 1U)
  416. "mov %[val],%[ret]\n\t"
  417. "bics %[val],%[bits]\n\t"
  418. #else
  419. "bic %[val],%[ret],%[bits]\n\t"
  420. #endif
  421. "strex %[res],%[val],[%[mem]]\n\t"
  422. "cbz %[res],3f\n\t"
  423. "b 1b\n\t"
  424. "3:"
  425. : [ret] "=&l" (ret),
  426. [val] "=&l" (val),
  427. [res] "=&l" (res)
  428. : [mem] "l" (mem),
  429. [bits] "l" (bits)
  430. : "cc", "memory"
  431. );
  432. return ret;
  433. }
  434. /// Exclusive Access Operation: Check if any specified bits (32-bit) are active and clear them
  435. /// \param[in] mem Memory address
  436. /// \param[in] bits Bit mask
  437. /// \return Active bits before clearing or 0 if not active
  438. __STATIC_INLINE uint32_t os_exc_chk32_any (uint32_t *mem, uint32_t bits) {
  439. register uint32_t val, res;
  440. register uint32_t ret;
  441. __ASM volatile (
  442. ".syntax unified\n\t"
  443. "1:\n\t"
  444. "ldrex %[ret],[%[mem]]\n\t"
  445. "tst %[ret],%[bits]\n\t"
  446. "bne 2f\n\t"
  447. "clrex\n\t"
  448. "movs %[ret],#0\n\t"
  449. "b 3f\n\t"
  450. "2:\n\t"
  451. #if (__ARM_ARCH_8M_BASE__ == 1U)
  452. "mov %[val],%[ret]\n\t"
  453. "bics %[val],%[bits]\n\t"
  454. #else
  455. "bic %[val],%[ret],%[bits]\n\t"
  456. #endif
  457. "strex %[res],%[val],[%[mem]]\n\t"
  458. "cbz %[res],3f\n\t"
  459. "b 1b\n\t"
  460. "3:"
  461. : [ret] "=&l" (ret),
  462. [val] "=&l" (val),
  463. [res] "=&l" (res)
  464. : [mem] "l" (mem),
  465. [bits] "l" (bits)
  466. : "cc", "memory"
  467. );
  468. return ret;
  469. }
  470. /// Exclusive Access Operation: Increment (32-bit)
  471. /// \param[in] mem Memory address
  472. /// \return Previous value
  473. __STATIC_INLINE uint32_t os_exc_inc32 (uint32_t *mem) {
  474. register uint32_t val, res;
  475. register uint32_t ret;
  476. __ASM volatile (
  477. ".syntax unified\n\t"
  478. "1:\n\t"
  479. "ldrex %[ret],[%[mem]]\n\t"
  480. "adds %[val],%[ret],#1\n\t"
  481. "strex %[res],%[val],[%[mem]]\n\t"
  482. "cbz %[res],2f\n\t"
  483. "b 1b\n\t"
  484. "2:"
  485. : [ret] "=&l" (ret),
  486. [val] "=&l" (val),
  487. [res] "=&l" (res)
  488. : [mem] "l" (mem)
  489. : "cc", "memory"
  490. );
  491. return ret;
  492. }
  493. /// Exclusive Access Operation: Increment (16-bit) if Less Than
  494. /// \param[in] mem Memory address
  495. /// \param[in] max Maximum value
  496. /// \return Previous value
  497. __STATIC_INLINE uint16_t os_exc_inc16_lt (uint16_t *mem, uint16_t max) {
  498. register uint32_t val, res;
  499. register uint16_t ret;
  500. __ASM volatile (
  501. ".syntax unified\n\t"
  502. "1:\n\t"
  503. "ldrexh %[ret],[%[mem]]\n\t"
  504. "cmp %[max],%[ret]\n\t"
  505. "bhi 2f\n\t"
  506. "clrex\n\t"
  507. "b 3f\n\t"
  508. "2:\n\t"
  509. "adds %[val],%[ret],#1\n\t"
  510. "strexh %[res],%[val],[%[mem]]\n\t"
  511. "cbz %[res],3f\n\t"
  512. "b 1b\n\t"
  513. "3:"
  514. : [ret] "=&l" (ret),
  515. [val] "=&l" (val),
  516. [res] "=&l" (res)
  517. : [mem] "l" (mem),
  518. [max] "l" (max)
  519. : "cc", "memory"
  520. );
  521. return ret;
  522. }
  523. /// Exclusive Access Operation: Increment (16-bit) and clear on Limit
  524. /// \param[in] mem Memory address
  525. /// \param[in] max Maximum value
  526. /// \return Previous value
  527. __STATIC_INLINE uint16_t os_exc_inc16_lim (uint16_t *mem, uint16_t lim) {
  528. register uint32_t val, res;
  529. register uint16_t ret;
  530. __ASM volatile (
  531. ".syntax unified\n\t"
  532. "1:\n\t"
  533. "ldrexh %[ret],[%[mem]]\n\t"
  534. "adds %[val],%[ret],#1\n\t"
  535. "cmp %[lim],%[val]\n\t"
  536. "bhi 2f\n\t"
  537. "movs %[val],#0\n\t"
  538. "2:\n\t"
  539. "strexh %[res],%[val],[%[mem]]\n\t"
  540. "cbz %[res],3f\n\t"
  541. "b 1b\n\t"
  542. "3:"
  543. : [ret] "=&l" (ret),
  544. [val] "=&l" (val),
  545. [res] "=&l" (res)
  546. : [mem] "l" (mem),
  547. [lim] "l" (lim)
  548. : "cc", "memory"
  549. );
  550. return ret;
  551. }
  552. /// Exclusive Access Operation: Decrement (32-bit) if Not Zero
  553. /// \param[in] mem Memory address
  554. /// \return Previous value
  555. __STATIC_INLINE uint32_t os_exc_dec32_nz (uint32_t *mem) {
  556. register uint32_t val, res;
  557. register uint32_t ret;
  558. __ASM volatile (
  559. ".syntax unified\n\t"
  560. "1:\n\t"
  561. "ldrex %[ret],[%[mem]]\n\t"
  562. "cbnz %[ret],2f\n\t"
  563. "clrex\n\t"
  564. "b 3f\n\t"
  565. "2:\n\t"
  566. "subs %[val],%[ret],#1\n\t"
  567. "strex %[res],%[val],[%[mem]]\n\t"
  568. "cbz %[res],3f\n\t"
  569. "b 1b\n\t"
  570. "3:"
  571. : [ret] "=&l" (ret),
  572. [val] "=&l" (val),
  573. [res] "=&l" (res)
  574. : [mem] "l" (mem)
  575. : "cc", "memory"
  576. );
  577. return ret;
  578. }
  579. /// Exclusive Access Operation: Decrement (16-bit) if Not Zero
  580. /// \param[in] mem Memory address
  581. /// \return Previous value
  582. __STATIC_INLINE uint16_t os_exc_dec16_nz (uint16_t *mem) {
  583. register uint32_t val, res;
  584. register uint16_t ret;
  585. __ASM volatile (
  586. ".syntax unified\n\t"
  587. "1:\n\t"
  588. "ldrexh %[ret],[%[mem]]\n\t"
  589. "cbnz %[ret],2f\n\t"
  590. "clrex\n\t"
  591. "b 3f\n\t"
  592. "2:\n\t"
  593. "subs %[val],%[ret],#1\n\t"
  594. "strexh %[res],%[val],[%[mem]]\n\t"
  595. "cbz %[res],3f\n\t"
  596. "b 1b\n\t"
  597. "3:"
  598. : [ret] "=&l" (ret),
  599. [val] "=&l" (val),
  600. [res] "=&l" (res)
  601. : [mem] "l" (mem)
  602. : "cc", "memory"
  603. );
  604. return ret;
  605. }
  606. #endif // (__EXCLUSIVE_ACCESS == 1U)
  607. #endif // CORE_CM_H_