stdatomic.c 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490
  1. /*
  2. * SPDX-FileCopyrightText: 2015-2021 Espressif Systems (Shanghai) CO LTD
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. */
  6. //replacement for gcc built-in functions
  7. #include "sdkconfig.h"
  8. #include <stdbool.h>
  9. #include <stdint.h>
  10. #include "soc/soc_caps.h"
  11. #include "freertos/FreeRTOS.h"
  12. #ifdef __XTENSA__
  13. #include "xtensa/config/core-isa.h"
  14. #ifndef XCHAL_HAVE_S32C1I
  15. #error "XCHAL_HAVE_S32C1I not defined, include correct header!"
  16. #endif
  17. #define HAS_ATOMICS_32 (XCHAL_HAVE_S32C1I == 1)
  18. // no 64-bit atomics on Xtensa
  19. #define HAS_ATOMICS_64 0
  20. #else // RISCV
  21. // GCC toolchain will define this pre-processor if "A" extension is supported
  22. #ifndef __riscv_atomic
  23. #define __riscv_atomic 0
  24. #endif
  25. #define HAS_ATOMICS_32 (__riscv_atomic == 1)
  26. #define HAS_ATOMICS_64 ((__riscv_atomic == 1) && (__riscv_xlen == 64))
  27. #endif // (__XTENSA__, __riscv)
  28. #if SOC_CPU_CORES_NUM == 1
  29. // Single core SoC: atomics can be implemented using portSET_INTERRUPT_MASK_FROM_ISR
  30. // and portCLEAR_INTERRUPT_MASK_FROM_ISR, which disables and enables interrupts.
  31. #define _ATOMIC_ENTER_CRITICAL() ({ \
  32. unsigned state = portSET_INTERRUPT_MASK_FROM_ISR(); \
  33. state; \
  34. })
  35. #define _ATOMIC_EXIT_CRITICAL(state) do { \
  36. portCLEAR_INTERRUPT_MASK_FROM_ISR(state); \
  37. } while (0)
  38. #else // SOC_CPU_CORES_NUM
  39. _Static_assert(HAS_ATOMICS_32, "32-bit atomics should be supported if SOC_CPU_CORES_NUM > 1");
  40. // Only need to implement 64-bit atomics here. Use a single global portMUX_TYPE spinlock
  41. // to emulate the atomics.
  42. static portMUX_TYPE s_atomic_lock = portMUX_INITIALIZER_UNLOCKED;
  43. // Return value is not used but kept for compatibility with the single-core version above.
  44. #define _ATOMIC_ENTER_CRITICAL() ({ \
  45. portENTER_CRITICAL_SAFE(&s_atomic_lock); \
  46. 0; \
  47. })
  48. #define _ATOMIC_EXIT_CRITICAL(state) do { \
  49. (void) (state); \
  50. portEXIT_CRITICAL_SAFE(&s_atomic_lock); \
  51. } while(0)
  52. #endif // SOC_CPU_CORES_NUM
  53. #ifdef __clang__
  54. // Clang doesn't allow to define "__sync_*" atomics. The workaround is to define function with name "__sync_*_builtin",
  55. // which implements "__sync_*" atomic functionality and use asm directive to set the value of symbol "__sync_*" to the name
  56. // of defined function.
  57. #define CLANG_ATOMIC_SUFFIX(name_) name_ ## _builtin
  58. #define CLANG_DECLARE_ALIAS(name_) \
  59. __asm__(".type " # name_ ", @function\n" \
  60. ".global " #name_ "\n" \
  61. ".equ " #name_ ", " #name_ "_builtin");
  62. #else // __clang__
  63. #define CLANG_ATOMIC_SUFFIX(name_) name_
  64. #define CLANG_DECLARE_ALIAS(name_)
  65. #endif // __clang__
  66. #define ATOMIC_LOAD(n, type) type __atomic_load_ ## n (const type* mem, int memorder) \
  67. { \
  68. unsigned state = _ATOMIC_ENTER_CRITICAL(); \
  69. type ret = *mem; \
  70. _ATOMIC_EXIT_CRITICAL(state); \
  71. return ret; \
  72. }
  73. #define ATOMIC_STORE(n, type) void __atomic_store_ ## n (type* mem, type val, int memorder) \
  74. { \
  75. unsigned state = _ATOMIC_ENTER_CRITICAL(); \
  76. *mem = val; \
  77. _ATOMIC_EXIT_CRITICAL(state); \
  78. }
  79. #define ATOMIC_EXCHANGE(n, type) type __atomic_exchange_ ## n (type* mem, type val, int memorder) \
  80. { \
  81. unsigned state = _ATOMIC_ENTER_CRITICAL(); \
  82. type ret = *mem; \
  83. *mem = val; \
  84. _ATOMIC_EXIT_CRITICAL(state); \
  85. return ret; \
  86. }
  87. #define CMP_EXCHANGE(n, type) bool __atomic_compare_exchange_ ## n (type* mem, type* expect, type desired, bool weak, int success, int failure) \
  88. { \
  89. bool ret = false; \
  90. unsigned state = _ATOMIC_ENTER_CRITICAL(); \
  91. if (*mem == *expect) { \
  92. ret = true; \
  93. *mem = desired; \
  94. } else { \
  95. *expect = *mem; \
  96. } \
  97. _ATOMIC_EXIT_CRITICAL(state); \
  98. return ret; \
  99. }
  100. #define FETCH_ADD(n, type) type __atomic_fetch_add_ ## n (type* ptr, type value, int memorder) \
  101. { \
  102. unsigned state = _ATOMIC_ENTER_CRITICAL(); \
  103. type ret = *ptr; \
  104. *ptr = *ptr + value; \
  105. _ATOMIC_EXIT_CRITICAL(state); \
  106. return ret; \
  107. }
  108. #define ADD_FETCH(n, type) type __atomic_add_fetch_ ## n (type* ptr, type value, int memorder) \
  109. { \
  110. unsigned state = _ATOMIC_ENTER_CRITICAL(); \
  111. type ret = *ptr + value; \
  112. *ptr = ret; \
  113. _ATOMIC_EXIT_CRITICAL(state); \
  114. return ret; \
  115. }
  116. #define FETCH_SUB(n, type) type __atomic_fetch_sub_ ## n (type* ptr, type value, int memorder) \
  117. { \
  118. unsigned state = _ATOMIC_ENTER_CRITICAL(); \
  119. type ret = *ptr; \
  120. *ptr = *ptr - value; \
  121. _ATOMIC_EXIT_CRITICAL(state); \
  122. return ret; \
  123. }
  124. #define SUB_FETCH(n, type) type __atomic_sub_fetch_ ## n (type* ptr, type value, int memorder) \
  125. { \
  126. unsigned state = _ATOMIC_ENTER_CRITICAL(); \
  127. type ret = *ptr - value; \
  128. *ptr = ret; \
  129. _ATOMIC_EXIT_CRITICAL(state); \
  130. return ret; \
  131. }
  132. #define FETCH_AND(n, type) type __atomic_fetch_and_ ## n (type* ptr, type value, int memorder) \
  133. { \
  134. unsigned state = _ATOMIC_ENTER_CRITICAL(); \
  135. type ret = *ptr; \
  136. *ptr = *ptr & value; \
  137. _ATOMIC_EXIT_CRITICAL(state); \
  138. return ret; \
  139. }
  140. #define AND_FETCH(n, type) type __atomic_and_fetch_ ## n (type* ptr, type value, int memorder) \
  141. { \
  142. unsigned state = _ATOMIC_ENTER_CRITICAL(); \
  143. type ret = *ptr & value; \
  144. *ptr = ret; \
  145. _ATOMIC_EXIT_CRITICAL(state); \
  146. return ret; \
  147. }
  148. #define FETCH_OR(n, type) type __atomic_fetch_or_ ## n (type* ptr, type value, int memorder) \
  149. { \
  150. unsigned state = _ATOMIC_ENTER_CRITICAL(); \
  151. type ret = *ptr; \
  152. *ptr = *ptr | value; \
  153. _ATOMIC_EXIT_CRITICAL(state); \
  154. return ret; \
  155. }
  156. #define OR_FETCH(n, type) type __atomic_or_fetch_ ## n (type* ptr, type value, int memorder) \
  157. { \
  158. unsigned state = _ATOMIC_ENTER_CRITICAL(); \
  159. type ret = *ptr | value; \
  160. *ptr = ret; \
  161. _ATOMIC_EXIT_CRITICAL(state); \
  162. return ret; \
  163. }
  164. #define FETCH_XOR(n, type) type __atomic_fetch_xor_ ## n (type* ptr, type value, int memorder) \
  165. { \
  166. unsigned state = _ATOMIC_ENTER_CRITICAL(); \
  167. type ret = *ptr; \
  168. *ptr = *ptr ^ value; \
  169. _ATOMIC_EXIT_CRITICAL(state); \
  170. return ret; \
  171. }
  172. #define XOR_FETCH(n, type) type __atomic_xor_fetch_ ## n (type* ptr, type value, int memorder) \
  173. { \
  174. unsigned state = _ATOMIC_ENTER_CRITICAL(); \
  175. type ret = *ptr ^ value; \
  176. *ptr = ret; \
  177. _ATOMIC_EXIT_CRITICAL(state); \
  178. return ret; \
  179. }
  180. #define FETCH_NAND(n, type) type __atomic_fetch_nand_ ## n (type* ptr, type value, int memorder) \
  181. { \
  182. unsigned state = _ATOMIC_ENTER_CRITICAL(); \
  183. type ret = *ptr; \
  184. *ptr = ~(*ptr & value); \
  185. _ATOMIC_EXIT_CRITICAL(state); \
  186. return ret; \
  187. }
  188. #define NAND_FETCH(n, type) type __atomic_nand_fetch_ ## n (type* ptr, type value, int memorder) \
  189. { \
  190. unsigned state = _ATOMIC_ENTER_CRITICAL(); \
  191. type ret = ~(*ptr & value); \
  192. *ptr = ret; \
  193. _ATOMIC_EXIT_CRITICAL(state); \
  194. return ret; \
  195. }
  196. #define SYNC_FETCH_OP(op, n, type) type CLANG_ATOMIC_SUFFIX(__sync_fetch_and_ ## op ##_ ## n) (type* ptr, type value) \
  197. { \
  198. return __atomic_fetch_ ## op ##_ ## n (ptr, value, __ATOMIC_SEQ_CST); \
  199. } \
  200. CLANG_DECLARE_ALIAS( __sync_fetch_and_ ## op ##_ ## n )
  201. #define SYNC_OP_FETCH(op, n, type) type CLANG_ATOMIC_SUFFIX(__sync_ ## op ##_and_fetch_ ## n) (type* ptr, type value) \
  202. { \
  203. return __atomic_ ## op ##_fetch_ ## n (ptr, value, __ATOMIC_SEQ_CST); \
  204. } \
  205. CLANG_DECLARE_ALIAS( __sync_ ## op ##_and_fetch_ ## n )
  206. #define SYNC_BOOL_CMP_EXCHANGE(n, type) bool CLANG_ATOMIC_SUFFIX(__sync_bool_compare_and_swap_ ## n) (type *ptr, type oldval, type newval) \
  207. { \
  208. bool ret = false; \
  209. unsigned state = _ATOMIC_ENTER_CRITICAL(); \
  210. if (*ptr == oldval) { \
  211. *ptr = newval; \
  212. ret = true; \
  213. } \
  214. _ATOMIC_EXIT_CRITICAL(state); \
  215. return ret; \
  216. } \
  217. CLANG_DECLARE_ALIAS( __sync_bool_compare_and_swap_ ## n )
  218. #define SYNC_VAL_CMP_EXCHANGE(n, type) type CLANG_ATOMIC_SUFFIX(__sync_val_compare_and_swap_ ## n) (type *ptr, type oldval, type newval) \
  219. { \
  220. unsigned state = _ATOMIC_ENTER_CRITICAL(); \
  221. type ret = *ptr; \
  222. if (*ptr == oldval) { \
  223. *ptr = newval; \
  224. } \
  225. _ATOMIC_EXIT_CRITICAL(state); \
  226. return ret; \
  227. } \
  228. CLANG_DECLARE_ALIAS( __sync_val_compare_and_swap_ ## n )
  229. #define SYNC_LOCK_TEST_AND_SET(n, type) type CLANG_ATOMIC_SUFFIX(__sync_lock_test_and_set_ ## n) (type *ptr, type val) \
  230. { \
  231. unsigned state = _ATOMIC_ENTER_CRITICAL(); \
  232. type ret = *ptr; \
  233. *ptr = val; \
  234. _ATOMIC_EXIT_CRITICAL(state); \
  235. return ret; \
  236. } \
  237. CLANG_DECLARE_ALIAS( __sync_lock_test_and_set_ ## n )
  238. #define SYNC_LOCK_RELEASE(n, type) void CLANG_ATOMIC_SUFFIX(__sync_lock_release_ ## n) (type *ptr) \
  239. { \
  240. unsigned state = _ATOMIC_ENTER_CRITICAL(); \
  241. *ptr = 0; \
  242. _ATOMIC_EXIT_CRITICAL(state); \
  243. } \
  244. CLANG_DECLARE_ALIAS( __sync_lock_release_ ## n )
  245. #if !HAS_ATOMICS_32
  246. ATOMIC_EXCHANGE(1, uint8_t)
  247. ATOMIC_EXCHANGE(2, uint16_t)
  248. ATOMIC_EXCHANGE(4, uint32_t)
  249. CMP_EXCHANGE(1, uint8_t)
  250. CMP_EXCHANGE(2, uint16_t)
  251. CMP_EXCHANGE(4, uint32_t)
  252. FETCH_ADD(1, uint8_t)
  253. FETCH_ADD(2, uint16_t)
  254. FETCH_ADD(4, uint32_t)
  255. ADD_FETCH(1, uint8_t)
  256. ADD_FETCH(2, uint16_t)
  257. ADD_FETCH(4, uint32_t)
  258. FETCH_SUB(1, uint8_t)
  259. FETCH_SUB(2, uint16_t)
  260. FETCH_SUB(4, uint32_t)
  261. SUB_FETCH(1, uint8_t)
  262. SUB_FETCH(2, uint16_t)
  263. SUB_FETCH(4, uint32_t)
  264. FETCH_AND(1, uint8_t)
  265. FETCH_AND(2, uint16_t)
  266. FETCH_AND(4, uint32_t)
  267. AND_FETCH(1, uint8_t)
  268. AND_FETCH(2, uint16_t)
  269. AND_FETCH(4, uint32_t)
  270. FETCH_OR(1, uint8_t)
  271. FETCH_OR(2, uint16_t)
  272. FETCH_OR(4, uint32_t)
  273. OR_FETCH(1, uint8_t)
  274. OR_FETCH(2, uint16_t)
  275. OR_FETCH(4, uint32_t)
  276. FETCH_XOR(1, uint8_t)
  277. FETCH_XOR(2, uint16_t)
  278. FETCH_XOR(4, uint32_t)
  279. XOR_FETCH(1, uint8_t)
  280. XOR_FETCH(2, uint16_t)
  281. XOR_FETCH(4, uint32_t)
  282. FETCH_NAND(1, uint8_t)
  283. FETCH_NAND(2, uint16_t)
  284. FETCH_NAND(4, uint32_t)
  285. NAND_FETCH(1, uint8_t)
  286. NAND_FETCH(2, uint16_t)
  287. NAND_FETCH(4, uint32_t)
  288. SYNC_FETCH_OP(add, 1, uint8_t)
  289. SYNC_FETCH_OP(add, 2, uint16_t)
  290. SYNC_FETCH_OP(add, 4, uint32_t)
  291. SYNC_OP_FETCH(add, 1, uint8_t)
  292. SYNC_OP_FETCH(add, 2, uint16_t)
  293. SYNC_OP_FETCH(add, 4, uint32_t)
  294. SYNC_FETCH_OP(sub, 1, uint8_t)
  295. SYNC_FETCH_OP(sub, 2, uint16_t)
  296. SYNC_FETCH_OP(sub, 4, uint32_t)
  297. SYNC_OP_FETCH(sub, 1, uint8_t)
  298. SYNC_OP_FETCH(sub, 2, uint16_t)
  299. SYNC_OP_FETCH(sub, 4, uint32_t)
  300. SYNC_FETCH_OP(and, 1, uint8_t)
  301. SYNC_FETCH_OP(and, 2, uint16_t)
  302. SYNC_FETCH_OP(and, 4, uint32_t)
  303. SYNC_OP_FETCH(and, 1, uint8_t)
  304. SYNC_OP_FETCH(and, 2, uint16_t)
  305. SYNC_OP_FETCH(and, 4, uint32_t)
  306. SYNC_FETCH_OP(or, 1, uint8_t)
  307. SYNC_FETCH_OP(or, 2, uint16_t)
  308. SYNC_FETCH_OP(or, 4, uint32_t)
  309. SYNC_OP_FETCH(or, 1, uint8_t)
  310. SYNC_OP_FETCH(or, 2, uint16_t)
  311. SYNC_OP_FETCH(or, 4, uint32_t)
  312. SYNC_FETCH_OP(xor, 1, uint8_t)
  313. SYNC_FETCH_OP(xor, 2, uint16_t)
  314. SYNC_FETCH_OP(xor, 4, uint32_t)
  315. SYNC_OP_FETCH(xor, 1, uint8_t)
  316. SYNC_OP_FETCH(xor, 2, uint16_t)
  317. SYNC_OP_FETCH(xor, 4, uint32_t)
  318. SYNC_FETCH_OP(nand, 1, uint8_t)
  319. SYNC_FETCH_OP(nand, 2, uint16_t)
  320. SYNC_FETCH_OP(nand, 4, uint32_t)
  321. SYNC_OP_FETCH(nand, 1, uint8_t)
  322. SYNC_OP_FETCH(nand, 2, uint16_t)
  323. SYNC_OP_FETCH(nand, 4, uint32_t)
  324. SYNC_BOOL_CMP_EXCHANGE(1, uint8_t)
  325. SYNC_BOOL_CMP_EXCHANGE(2, uint16_t)
  326. SYNC_BOOL_CMP_EXCHANGE(4, uint32_t)
  327. SYNC_VAL_CMP_EXCHANGE(1, uint8_t)
  328. SYNC_VAL_CMP_EXCHANGE(2, uint16_t)
  329. SYNC_VAL_CMP_EXCHANGE(4, uint32_t)
  330. SYNC_LOCK_TEST_AND_SET(1, uint8_t)
  331. SYNC_LOCK_TEST_AND_SET(2, uint16_t)
  332. SYNC_LOCK_TEST_AND_SET(4, uint32_t)
  333. SYNC_LOCK_RELEASE(1, uint8_t)
  334. SYNC_LOCK_RELEASE(2, uint16_t)
  335. SYNC_LOCK_RELEASE(4, uint32_t)
  336. // LLVM has not implemented native atomic load/stores for riscv targets without the Atomic extension. LLVM thread: https://reviews.llvm.org/D47553.
  337. // Even though GCC does transform them, these libcalls need to be available for the case where a LLVM based project links against IDF.
  338. ATOMIC_LOAD(1, uint8_t)
  339. ATOMIC_LOAD(2, uint16_t)
  340. ATOMIC_LOAD(4, uint32_t)
  341. ATOMIC_STORE(1, uint8_t)
  342. ATOMIC_STORE(2, uint16_t)
  343. ATOMIC_STORE(4, uint32_t)
  344. #endif // !HAS_ATOMICS_32
  345. #if !HAS_ATOMICS_64
  346. ATOMIC_EXCHANGE(8, uint64_t)
  347. CMP_EXCHANGE(8, uint64_t)
  348. FETCH_ADD(8, uint64_t)
  349. FETCH_SUB(8, uint64_t)
  350. FETCH_AND(8, uint64_t)
  351. FETCH_OR(8, uint64_t)
  352. FETCH_XOR(8, uint64_t)
  353. FETCH_NAND(8, uint64_t)
  354. ADD_FETCH(8, uint64_t)
  355. SUB_FETCH(8, uint64_t)
  356. AND_FETCH(8, uint64_t)
  357. OR_FETCH(8, uint64_t)
  358. XOR_FETCH(8, uint64_t)
  359. NAND_FETCH(8, uint64_t)
  360. SYNC_FETCH_OP(add, 8, uint64_t)
  361. SYNC_FETCH_OP(sub, 8, uint64_t)
  362. SYNC_FETCH_OP(and, 8, uint64_t)
  363. SYNC_FETCH_OP(or, 8, uint64_t)
  364. SYNC_FETCH_OP(xor, 8, uint64_t)
  365. SYNC_FETCH_OP(nand, 8, uint64_t)
  366. SYNC_OP_FETCH(add, 8, uint64_t)
  367. SYNC_OP_FETCH(sub, 8, uint64_t)
  368. SYNC_OP_FETCH(and, 8, uint64_t)
  369. SYNC_OP_FETCH(or, 8, uint64_t)
  370. SYNC_OP_FETCH(xor, 8, uint64_t)
  371. SYNC_OP_FETCH(nand, 8, uint64_t)
  372. SYNC_BOOL_CMP_EXCHANGE(8, uint64_t)
  373. SYNC_VAL_CMP_EXCHANGE(8, uint64_t)
  374. SYNC_LOCK_TEST_AND_SET(8, uint64_t)
  375. SYNC_LOCK_RELEASE(8, uint64_t)
  376. // LLVM has not implemented native atomic load/stores for riscv targets without the Atomic extension. LLVM thread: https://reviews.llvm.org/D47553.
  377. // Even though GCC does transform them, these libcalls need to be available for the case where a LLVM based project links against IDF.
  378. ATOMIC_LOAD(8, uint64_t)
  379. ATOMIC_STORE(8, uint64_t)
  380. #endif // !HAS_ATOMICS_64