atomic_arm.c 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406
  1. /*
  2. * Copyright (c) 2006-2023, RT-Thread Development Team
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Change Logs:
  7. * Date Author Notes
  8. * 2022-07-27 flybreak the first version
  9. * 2026-03-09 wdfk-prog add 8/16-bit atomic operations support
  10. */
  11. #include <rtthread.h>
  12. #if defined (__IAR_SYSTEMS_ICC__) /* for IAR Compiler */
  13. #include <intrinsics.h>
  14. #include <iccarm_builtin.h>
  15. #endif
  16. /**
  17. \brief LDR Exclusive (8 bit)
  18. \details Executes a exclusive LDR instruction for 8 bit values.
  19. \param [in] ptr Pointer to data
  20. \return value of type uint8_t at (*ptr)
  21. */
  22. #if defined (__ARMCC_VERSION) && (__ARMCC_VERSION >= 6010050) /* ARM Compiler V6 */
  23. #ifndef __LDREXB
  24. #define __LDREXB (uint8_t)__builtin_arm_ldrex
  25. #endif
  26. #define __LDREXB_PRIV(ptr) ((rt_atomic8_t)__LDREXB((volatile uint8_t *)(ptr)))
  27. #elif defined(__ARMCC_VERSION) /* ARM Compiler V5 */
  28. #if __ARMCC_VERSION < 5060020
  29. #define __LDREXB_PRIV(ptr) ((rt_atomic8_t ) __ldrex(ptr))
  30. #else
  31. #define __LDREXB_PRIV(ptr) _Pragma("push") _Pragma("diag_suppress 3731") ((rt_atomic8_t ) __ldrex(ptr)) _Pragma("pop")
  32. #endif
  33. #elif defined (__IAR_SYSTEMS_ICC__) /* for IAR Compiler */
  34. _Pragma("inline=forced") __intrinsic rt_atomic8_t __LDREXB_PRIV(volatile rt_atomic8_t *ptr)
  35. {
  36. return __iar_builtin_LDREXB((volatile unsigned char *)ptr);
  37. }
  38. #elif defined (__GNUC__) /* GNU GCC Compiler */
  39. __attribute__((always_inline)) static inline rt_atomic8_t __LDREXB_PRIV(volatile rt_atomic8_t *addr)
  40. {
  41. uint32_t result;
  42. #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
  43. __asm volatile ("ldrexb %0, %1" : "=r" (result) : "Q" (*addr) );
  44. #else
  45. __asm volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
  46. #endif
  47. return (rt_atomic8_t)result;
  48. }
  49. #endif
  50. /**
  51. \brief STR Exclusive (8 bit)
  52. \details Executes a exclusive STR instruction for 8 bit values.
  53. \param [in] value Value to store
  54. \param [in] ptr Pointer to location
  55. \return 0 Function succeeded
  56. \return 1 Function failed
  57. */
  58. #if defined (__ARMCC_VERSION) && (__ARMCC_VERSION >= 6010050) /* ARM Compiler V6 */
  59. #ifndef __STREXB
  60. #define __STREXB (uint32_t)__builtin_arm_strex
  61. #endif
  62. #define __STREXB_PRIV(value, ptr) ((rt_atomic_t)__STREXB((uint8_t)(value), (volatile uint8_t *)(ptr)))
  63. #elif defined(__ARMCC_VERSION) /* ARM Compiler V5 */
  64. #if __ARMCC_VERSION < 5060020
  65. #define __STREXB_PRIV(value, ptr) __strex(value, ptr)
  66. #else
  67. #define __STREXB_PRIV(value, ptr) _Pragma("push") _Pragma("diag_suppress 3731") __strex(value, ptr) _Pragma("pop")
  68. #endif
  69. #elif defined (__IAR_SYSTEMS_ICC__) /* for IAR Compiler */
  70. _Pragma("inline=forced") __intrinsic rt_atomic_t __STREXB_PRIV(rt_atomic8_t value, volatile rt_atomic8_t *ptr)
  71. {
  72. return __iar_builtin_STREXB(value, (volatile unsigned char *)ptr);
  73. }
  74. #elif defined (__GNUC__) /* GNU GCC Compiler */
  75. __attribute__((always_inline)) static inline rt_atomic_t __STREXB_PRIV(rt_atomic8_t value, volatile rt_atomic8_t *addr)
  76. {
  77. rt_atomic_t result;
  78. __asm volatile ("strexb %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
  79. return result;
  80. }
  81. #endif
  82. /**
  83. \brief LDR Exclusive (16 bit)
  84. \details Executes a exclusive LDR instruction for 16 bit values.
  85. \param [in] ptr Pointer to data
  86. \return value of type uint16_t at (*ptr)
  87. */
  88. #if defined (__ARMCC_VERSION) && (__ARMCC_VERSION >= 6010050) /* ARM Compiler V6 */
  89. #ifndef __LDREXH
  90. #define __LDREXH (uint16_t)__builtin_arm_ldrex
  91. #endif
  92. #define __LDREXH_PRIV(ptr) ((rt_atomic16_t)__LDREXH((volatile uint16_t *)(ptr)))
  93. #elif defined(__ARMCC_VERSION) /* ARM Compiler V5 */
  94. #if __ARMCC_VERSION < 5060020
  95. #define __LDREXH_PRIV(ptr) ((rt_atomic16_t ) __ldrex(ptr))
  96. #else
  97. #define __LDREXH_PRIV(ptr) _Pragma("push") _Pragma("diag_suppress 3731") ((rt_atomic16_t ) __ldrex(ptr)) _Pragma("pop")
  98. #endif
  99. #elif defined (__IAR_SYSTEMS_ICC__) /* for IAR Compiler */
  100. _Pragma("inline=forced") __intrinsic rt_atomic16_t __LDREXH_PRIV(volatile rt_atomic16_t *ptr)
  101. {
  102. return __iar_builtin_LDREXH((volatile unsigned short *)ptr);
  103. }
  104. #elif defined (__GNUC__) /* GNU GCC Compiler */
  105. __attribute__((always_inline)) static inline rt_atomic16_t __LDREXH_PRIV(volatile rt_atomic16_t *addr)
  106. {
  107. uint32_t result;
  108. #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
  109. __asm volatile ("ldrexh %0, %1" : "=r" (result) : "Q" (*addr) );
  110. #else
  111. __asm volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
  112. #endif
  113. return (rt_atomic16_t)result;
  114. }
  115. #endif
  116. /**
  117. \brief STR Exclusive (16 bit)
  118. \details Executes a exclusive STR instruction for 16 bit values.
  119. \param [in] value Value to store
  120. \param [in] ptr Pointer to location
  121. \return 0 Function succeeded
  122. \return 1 Function failed
  123. */
  124. #if defined (__ARMCC_VERSION) && (__ARMCC_VERSION >= 6010050) /* ARM Compiler V6 */
  125. #ifndef __STREXH
  126. #define __STREXH (uint32_t)__builtin_arm_strex
  127. #endif
  128. #define __STREXH_PRIV(value, ptr) ((rt_atomic_t)__STREXH((uint16_t)(value), (volatile uint16_t *)(ptr)))
  129. #elif defined(__ARMCC_VERSION) /* ARM Compiler V5 */
  130. #if __ARMCC_VERSION < 5060020
  131. #define __STREXH_PRIV(value, ptr) __strex(value, ptr)
  132. #else
  133. #define __STREXH_PRIV(value, ptr) _Pragma("push") _Pragma("diag_suppress 3731") __strex(value, ptr) _Pragma("pop")
  134. #endif
  135. #elif defined (__IAR_SYSTEMS_ICC__) /* for IAR Compiler */
  136. _Pragma("inline=forced") __intrinsic rt_atomic_t __STREXH_PRIV(rt_atomic16_t value, volatile rt_atomic16_t *ptr)
  137. {
  138. return __iar_builtin_STREXH(value, (volatile unsigned short *)ptr);
  139. }
  140. #elif defined (__GNUC__) /* GNU GCC Compiler */
  141. __attribute__((always_inline)) static inline rt_atomic_t __STREXH_PRIV(rt_atomic16_t value, volatile rt_atomic16_t *addr)
  142. {
  143. rt_atomic_t result;
  144. __asm volatile ("strexh %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
  145. return result;
  146. }
  147. #endif
  148. /**
  149. \brief LDR Exclusive (32 bit)
  150. \details Executes a exclusive LDR instruction for 32 bit values.
  151. \param [in] ptr Pointer to data
  152. \return value of type uint32_t at (*ptr)
  153. */
  154. #if defined (__ARMCC_VERSION) && (__ARMCC_VERSION >= 6010050) /* ARM Compiler V6 */
  155. #define __LDREXW (rt_atomic_t)__builtin_arm_ldrex
  156. #elif defined(__ARMCC_VERSION) /* ARM Compiler V5 */
  157. #if __ARMCC_VERSION < 5060020
  158. #define __LDREXW(ptr) ((rt_atomic_t ) __ldrex(ptr))
  159. #else
  160. #define __LDREXW(ptr) _Pragma("push") _Pragma("diag_suppress 3731") ((rt_atomic_t ) __ldrex(ptr)) _Pragma("pop")
  161. #endif
  162. #elif defined (__IAR_SYSTEMS_ICC__) /* for IAR Compiler */
  163. _Pragma("inline=forced") __intrinsic rt_atomic_t __LDREXW(volatile rt_atomic_t *ptr)
  164. {
  165. return __iar_builtin_LDREX((volatile unsigned int *)ptr);
  166. }
  167. #elif defined (__GNUC__) /* GNU GCC Compiler */
  168. __attribute__((always_inline)) static inline rt_atomic_t __LDREXW(volatile rt_atomic_t *addr)
  169. {
  170. rt_atomic_t result;
  171. __asm volatile ("ldrex %0, %1" : "=r" (result) : "Q" (*addr) );
  172. return result;
  173. }
  174. #endif
  175. /**
  176. \brief STR Exclusive (32 bit)
  177. \details Executes a exclusive STR instruction for 32 bit values.
  178. \param [in] value Value to store
  179. \param [in] ptr Pointer to location
  180. \return 0 Function succeeded
  181. \return 1 Function failed
  182. */
  183. #if defined (__ARMCC_VERSION) && (__ARMCC_VERSION >= 6010050) /* ARM Compiler V6 */
  184. #define __STREXW (rt_atomic_t)__builtin_arm_strex
  185. #elif defined(__ARMCC_VERSION) /* ARM Compiler V5 */
  186. #if __ARMCC_VERSION < 5060020
  187. #define __STREXW(value, ptr) __strex(value, ptr)
  188. #else
  189. #define __STREXW(value, ptr) _Pragma("push") _Pragma("diag_suppress 3731") __strex(value, ptr) _Pragma("pop")
  190. #endif
  191. #elif defined (__IAR_SYSTEMS_ICC__) /* for IAR Compiler */
  192. _Pragma("inline=forced") __intrinsic rt_atomic_t __STREXW(rt_atomic_t value, volatile rt_atomic_t *ptr)
  193. {
  194. return __STREX(value, (unsigned int *)ptr);
  195. }
  196. #elif defined (__GNUC__) /* GNU GCC Compiler */
  197. __attribute__((always_inline)) static inline rt_atomic_t __STREXW(volatile rt_atomic_t value, volatile rt_atomic_t *addr)
  198. {
  199. rt_atomic_t result;
  200. __asm volatile ("strex %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
  201. return result;
  202. }
  203. #endif
  204. rt_atomic_t rt_hw_atomic_load(volatile rt_atomic_t *ptr)
  205. {
  206. rt_atomic_t oldval;
  207. do
  208. {
  209. oldval = __LDREXW(ptr);
  210. } while ((__STREXW(oldval, ptr)) != 0U);
  211. return oldval;
  212. }
  213. void rt_hw_atomic_store(volatile rt_atomic_t *ptr, rt_atomic_t val)
  214. {
  215. do
  216. {
  217. __LDREXW(ptr);
  218. } while ((__STREXW(val, ptr)) != 0U);
  219. }
  220. rt_atomic8_t rt_hw_atomic_load8(volatile rt_atomic8_t *ptr)
  221. {
  222. rt_atomic8_t oldval;
  223. do
  224. {
  225. oldval = __LDREXB_PRIV(ptr);
  226. } while ((__STREXB_PRIV(oldval, ptr)) != 0U);
  227. return oldval;
  228. }
  229. void rt_hw_atomic_store8(volatile rt_atomic8_t *ptr, rt_atomic8_t val)
  230. {
  231. do
  232. {
  233. __LDREXB_PRIV(ptr);
  234. } while ((__STREXB_PRIV(val, ptr)) != 0U);
  235. }
  236. rt_atomic16_t rt_hw_atomic_load16(volatile rt_atomic16_t *ptr)
  237. {
  238. rt_atomic16_t oldval;
  239. do
  240. {
  241. oldval = __LDREXH_PRIV(ptr);
  242. } while ((__STREXH_PRIV(oldval, ptr)) != 0U);
  243. return oldval;
  244. }
  245. void rt_hw_atomic_store16(volatile rt_atomic16_t *ptr, rt_atomic16_t val)
  246. {
  247. do
  248. {
  249. __LDREXH_PRIV(ptr);
  250. } while ((__STREXH_PRIV(val, ptr)) != 0U);
  251. }
  252. rt_atomic_t rt_hw_atomic_add(volatile rt_atomic_t *ptr, rt_atomic_t val)
  253. {
  254. rt_atomic_t oldval;
  255. do
  256. {
  257. oldval = __LDREXW(ptr);
  258. } while ((__STREXW(oldval + val, ptr)) != 0U);
  259. return oldval;
  260. }
  261. rt_atomic_t rt_hw_atomic_sub(volatile rt_atomic_t *ptr, rt_atomic_t val)
  262. {
  263. rt_atomic_t oldval;
  264. do
  265. {
  266. oldval = __LDREXW(ptr);
  267. } while ((__STREXW(oldval - val, ptr)) != 0U);
  268. return oldval;
  269. }
  270. rt_atomic8_t rt_hw_atomic_and8(volatile rt_atomic8_t *ptr, rt_atomic8_t val)
  271. {
  272. rt_atomic8_t oldval;
  273. do
  274. {
  275. oldval = __LDREXB_PRIV(ptr);
  276. } while ((__STREXB_PRIV((rt_atomic8_t)(oldval & val), ptr)) != 0U);
  277. return oldval;
  278. }
  279. rt_atomic8_t rt_hw_atomic_or8(volatile rt_atomic8_t *ptr, rt_atomic8_t val)
  280. {
  281. rt_atomic8_t oldval;
  282. do
  283. {
  284. oldval = __LDREXB_PRIV(ptr);
  285. } while ((__STREXB_PRIV((rt_atomic8_t)(oldval | val), ptr)) != 0U);
  286. return oldval;
  287. }
  288. rt_atomic16_t rt_hw_atomic_and16(volatile rt_atomic16_t *ptr, rt_atomic16_t val)
  289. {
  290. rt_atomic16_t oldval;
  291. do
  292. {
  293. oldval = __LDREXH_PRIV(ptr);
  294. } while ((__STREXH_PRIV((rt_atomic16_t)(oldval & val), ptr)) != 0U);
  295. return oldval;
  296. }
  297. rt_atomic16_t rt_hw_atomic_or16(volatile rt_atomic16_t *ptr, rt_atomic16_t val)
  298. {
  299. rt_atomic16_t oldval;
  300. do
  301. {
  302. oldval = __LDREXH_PRIV(ptr);
  303. } while ((__STREXH_PRIV((rt_atomic16_t)(oldval | val), ptr)) != 0U);
  304. return oldval;
  305. }
  306. rt_atomic_t rt_hw_atomic_and(volatile rt_atomic_t *ptr, rt_atomic_t val)
  307. {
  308. rt_atomic_t oldval;
  309. do
  310. {
  311. oldval = __LDREXW(ptr);
  312. } while ((__STREXW(oldval & val, ptr)) != 0U);
  313. return oldval;
  314. }
  315. rt_atomic_t rt_hw_atomic_or(volatile rt_atomic_t *ptr, rt_atomic_t val)
  316. {
  317. rt_atomic_t oldval;
  318. do
  319. {
  320. oldval = __LDREXW(ptr);
  321. } while ((__STREXW(oldval | val, ptr)) != 0U);
  322. return oldval;
  323. }
  324. rt_atomic_t rt_hw_atomic_xor(volatile rt_atomic_t *ptr, rt_atomic_t val)
  325. {
  326. rt_atomic_t oldval;
  327. do
  328. {
  329. oldval = __LDREXW(ptr);
  330. } while ((__STREXW(oldval ^ val, ptr)) != 0U);
  331. return oldval;
  332. }
  333. rt_atomic_t rt_hw_atomic_exchange(volatile rt_atomic_t *ptr, rt_atomic_t val)
  334. {
  335. rt_atomic_t oldval;
  336. do
  337. {
  338. oldval = __LDREXW(ptr);
  339. } while ((__STREXW(val, ptr)) != 0U);
  340. return oldval;
  341. }
  342. void rt_hw_atomic_flag_clear(volatile rt_atomic_t *ptr)
  343. {
  344. do
  345. {
  346. __LDREXW(ptr);
  347. } while ((__STREXW(0, ptr)) != 0U);
  348. }
  349. rt_atomic_t rt_hw_atomic_flag_test_and_set(volatile rt_atomic_t *ptr)
  350. {
  351. rt_atomic_t oldval;
  352. do
  353. {
  354. oldval = __LDREXW(ptr);
  355. } while ((__STREXW(1, ptr)) != 0U);
  356. return oldval;
  357. }
  358. rt_atomic_t rt_hw_atomic_compare_exchange_strong(volatile rt_atomic_t *ptr, rt_atomic_t *old, rt_atomic_t new)
  359. {
  360. rt_atomic_t result;
  361. rt_atomic_t temp = *old;
  362. do
  363. {
  364. result = __LDREXW(ptr);
  365. if (result != temp)
  366. {
  367. *old = result;
  368. __STREXW(result, ptr);
  369. break;
  370. }
  371. } while ((__STREXW(new, ptr)) != 0U);
  372. return (result == temp);
  373. }