stdatomic.c 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512
  1. /*
  2. * SPDX-FileCopyrightText: 2015-2022 Espressif Systems (Shanghai) CO LTD
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. */
  6. //replacement for gcc built-in functions
  7. #include "sdkconfig.h"
  8. #include <stdbool.h>
  9. #include <stdint.h>
  10. #include <string.h>
  11. #include "soc/soc_caps.h"
  12. #include "freertos/FreeRTOS.h"
  13. #ifdef __XTENSA__
  14. #include "xtensa/config/core-isa.h"
  15. #ifndef XCHAL_HAVE_S32C1I
  16. #error "XCHAL_HAVE_S32C1I not defined, include correct header!"
  17. #endif
  18. #define HAS_ATOMICS_32 (XCHAL_HAVE_S32C1I == 1)
  19. // no 64-bit atomics on Xtensa
  20. #define HAS_ATOMICS_64 0
  21. #else // RISCV
  22. // GCC toolchain will define this pre-processor if "A" extension is supported
  23. #ifndef __riscv_atomic
  24. #define __riscv_atomic 0
  25. #endif
  26. #define HAS_ATOMICS_32 (__riscv_atomic == 1)
  27. #define HAS_ATOMICS_64 ((__riscv_atomic == 1) && (__riscv_xlen == 64))
  28. #endif // (__XTENSA__, __riscv)
  29. #if SOC_CPU_CORES_NUM == 1
  30. // Single core SoC: atomics can be implemented using portSET_INTERRUPT_MASK_FROM_ISR
  31. // and portCLEAR_INTERRUPT_MASK_FROM_ISR, which disables and enables interrupts.
  32. #define _ATOMIC_ENTER_CRITICAL() ({ \
  33. unsigned state = portSET_INTERRUPT_MASK_FROM_ISR(); \
  34. state; \
  35. })
  36. #define _ATOMIC_EXIT_CRITICAL(state) do { \
  37. portCLEAR_INTERRUPT_MASK_FROM_ISR(state); \
  38. } while (0)
  39. #else // SOC_CPU_CORES_NUM
  40. _Static_assert(HAS_ATOMICS_32, "32-bit atomics should be supported if SOC_CPU_CORES_NUM > 1");
  41. // Only need to implement 64-bit atomics here. Use a single global portMUX_TYPE spinlock
  42. // to emulate the atomics.
  43. static portMUX_TYPE s_atomic_lock = portMUX_INITIALIZER_UNLOCKED;
  44. // Return value is not used but kept for compatibility with the single-core version above.
  45. #define _ATOMIC_ENTER_CRITICAL() ({ \
  46. portENTER_CRITICAL_SAFE(&s_atomic_lock); \
  47. 0; \
  48. })
  49. #define _ATOMIC_EXIT_CRITICAL(state) do { \
  50. (void) (state); \
  51. portEXIT_CRITICAL_SAFE(&s_atomic_lock); \
  52. } while(0)
  53. #endif // SOC_CPU_CORES_NUM
  54. #ifdef __clang__
  55. // Clang doesn't allow to define "__sync_*" atomics. The workaround is to define function with name "__sync_*_builtin",
  56. // which implements "__sync_*" atomic functionality and use asm directive to set the value of symbol "__sync_*" to the name
  57. // of defined function.
  58. #define CLANG_ATOMIC_SUFFIX(name_) name_ ## _builtin
  59. #define CLANG_DECLARE_ALIAS(name_) \
  60. __asm__(".type " # name_ ", @function\n" \
  61. ".global " #name_ "\n" \
  62. ".equ " #name_ ", " #name_ "_builtin");
  63. #else // __clang__
  64. #define CLANG_ATOMIC_SUFFIX(name_) name_
  65. #define CLANG_DECLARE_ALIAS(name_)
  66. #endif // __clang__
  67. #define ATOMIC_LOAD(n, type) type __atomic_load_ ## n (const volatile void* mem, int memorder) \
  68. { \
  69. unsigned state = _ATOMIC_ENTER_CRITICAL(); \
  70. type ret = *(const volatile type*)mem; \
  71. _ATOMIC_EXIT_CRITICAL(state); \
  72. return ret; \
  73. }
  74. #define ATOMIC_STORE(n, type) void __atomic_store_ ## n (volatile void * mem, type val, int memorder) \
  75. { \
  76. unsigned state = _ATOMIC_ENTER_CRITICAL(); \
  77. *(volatile type *)mem = val; \
  78. _ATOMIC_EXIT_CRITICAL(state); \
  79. }
  80. #define ATOMIC_EXCHANGE(n, type) type __atomic_exchange_ ## n (volatile void* mem, type val, int memorder) \
  81. { \
  82. unsigned state = _ATOMIC_ENTER_CRITICAL(); \
  83. type ret = *(volatile type*)mem; \
  84. *(volatile type*)mem = val; \
  85. _ATOMIC_EXIT_CRITICAL(state); \
  86. return ret; \
  87. }
  88. #define CMP_EXCHANGE(n, type) bool __atomic_compare_exchange_ ## n (volatile void* mem, void* expect, type desired, bool weak, int success, int failure) \
  89. { \
  90. bool ret = false; \
  91. unsigned state = _ATOMIC_ENTER_CRITICAL(); \
  92. if (*(volatile type*)mem == *(type*)expect) { \
  93. ret = true; \
  94. *(volatile type*)mem = desired; \
  95. } else { \
  96. *(type*)expect = *(volatile type*)mem; \
  97. } \
  98. _ATOMIC_EXIT_CRITICAL(state); \
  99. return ret; \
  100. }
  101. #define FETCH_ADD(n, type) type __atomic_fetch_add_ ## n (volatile void* ptr, type value, int memorder) \
  102. { \
  103. unsigned state = _ATOMIC_ENTER_CRITICAL(); \
  104. type ret = *(volatile type*)ptr; \
  105. *(volatile type*)ptr = *(volatile type*)ptr + value; \
  106. _ATOMIC_EXIT_CRITICAL(state); \
  107. return ret; \
  108. }
  109. #define ADD_FETCH(n, type) type __atomic_add_fetch_ ## n (volatile void* ptr, type value, int memorder) \
  110. { \
  111. unsigned state = _ATOMIC_ENTER_CRITICAL(); \
  112. type ret = *(volatile type*)ptr + value; \
  113. *(volatile type*)ptr = ret; \
  114. _ATOMIC_EXIT_CRITICAL(state); \
  115. return ret; \
  116. }
  117. #define FETCH_SUB(n, type) type __atomic_fetch_sub_ ## n (volatile void* ptr, type value, int memorder) \
  118. { \
  119. unsigned state = _ATOMIC_ENTER_CRITICAL(); \
  120. type ret = *(volatile type*)ptr; \
  121. *(volatile type*)ptr = *(volatile type*)ptr - value; \
  122. _ATOMIC_EXIT_CRITICAL(state); \
  123. return ret; \
  124. }
  125. #define SUB_FETCH(n, type) type __atomic_sub_fetch_ ## n (volatile void* ptr, type value, int memorder) \
  126. { \
  127. unsigned state = _ATOMIC_ENTER_CRITICAL(); \
  128. type ret = *(volatile type*)ptr - value; \
  129. *(volatile type*)ptr = ret; \
  130. _ATOMIC_EXIT_CRITICAL(state); \
  131. return ret; \
  132. }
  133. #define FETCH_AND(n, type) type __atomic_fetch_and_ ## n (volatile void* ptr, type value, int memorder) \
  134. { \
  135. unsigned state = _ATOMIC_ENTER_CRITICAL(); \
  136. type ret = *(volatile type*)ptr; \
  137. *(volatile type*)ptr = *(volatile type*)ptr & value; \
  138. _ATOMIC_EXIT_CRITICAL(state); \
  139. return ret; \
  140. }
  141. #define AND_FETCH(n, type) type __atomic_and_fetch_ ## n (volatile void* ptr, type value, int memorder) \
  142. { \
  143. unsigned state = _ATOMIC_ENTER_CRITICAL(); \
  144. type ret = *(volatile type*)ptr & value; \
  145. *(volatile type*)ptr = ret; \
  146. _ATOMIC_EXIT_CRITICAL(state); \
  147. return ret; \
  148. }
  149. #define FETCH_OR(n, type) type __atomic_fetch_or_ ## n (volatile void* ptr, type value, int memorder) \
  150. { \
  151. unsigned state = _ATOMIC_ENTER_CRITICAL(); \
  152. type ret = *(volatile type*)ptr; \
  153. *(volatile type*)ptr = *(volatile type*)ptr | value; \
  154. _ATOMIC_EXIT_CRITICAL(state); \
  155. return ret; \
  156. }
  157. #define OR_FETCH(n, type) type __atomic_or_fetch_ ## n (volatile void* ptr, type value, int memorder) \
  158. { \
  159. unsigned state = _ATOMIC_ENTER_CRITICAL(); \
  160. type ret = *(volatile type*)ptr | value; \
  161. *(volatile type*)ptr = ret; \
  162. _ATOMIC_EXIT_CRITICAL(state); \
  163. return ret; \
  164. }
  165. #define FETCH_XOR(n, type) type __atomic_fetch_xor_ ## n (volatile void* ptr, type value, int memorder) \
  166. { \
  167. unsigned state = _ATOMIC_ENTER_CRITICAL(); \
  168. type ret = *(volatile type*)ptr; \
  169. *(volatile type*)ptr = *(volatile type*)ptr ^ value; \
  170. _ATOMIC_EXIT_CRITICAL(state); \
  171. return ret; \
  172. }
  173. #define XOR_FETCH(n, type) type __atomic_xor_fetch_ ## n (volatile void* ptr, type value, int memorder) \
  174. { \
  175. unsigned state = _ATOMIC_ENTER_CRITICAL(); \
  176. type ret = *(volatile type*)ptr ^ value; \
  177. *(volatile type*)ptr = ret; \
  178. _ATOMIC_EXIT_CRITICAL(state); \
  179. return ret; \
  180. }
  181. #define FETCH_NAND(n, type) type __atomic_fetch_nand_ ## n (volatile void* ptr, type value, int memorder) \
  182. { \
  183. unsigned state = _ATOMIC_ENTER_CRITICAL(); \
  184. type ret = *(volatile type*)ptr; \
  185. *(volatile type*)ptr = ~(*(volatile type*)ptr & value); \
  186. _ATOMIC_EXIT_CRITICAL(state); \
  187. return ret; \
  188. }
  189. #define NAND_FETCH(n, type) type __atomic_nand_fetch_ ## n (volatile void* ptr, type value, int memorder) \
  190. { \
  191. unsigned state = _ATOMIC_ENTER_CRITICAL(); \
  192. type ret = ~(*(volatile type*)ptr & value); \
  193. *(volatile type*)ptr = ret; \
  194. _ATOMIC_EXIT_CRITICAL(state); \
  195. return ret; \
  196. }
  197. #define SYNC_FETCH_OP(op, n, type) type CLANG_ATOMIC_SUFFIX(__sync_fetch_and_ ## op ##_ ## n) (volatile void* ptr, type value) \
  198. { \
  199. return __atomic_fetch_ ## op ##_ ## n (ptr, value, __ATOMIC_SEQ_CST); \
  200. } \
  201. CLANG_DECLARE_ALIAS( __sync_fetch_and_ ## op ##_ ## n )
  202. #define SYNC_OP_FETCH(op, n, type) type CLANG_ATOMIC_SUFFIX(__sync_ ## op ##_and_fetch_ ## n) (volatile void* ptr, type value) \
  203. { \
  204. return __atomic_ ## op ##_fetch_ ## n (ptr, value, __ATOMIC_SEQ_CST); \
  205. } \
  206. CLANG_DECLARE_ALIAS( __sync_ ## op ##_and_fetch_ ## n )
  207. #define SYNC_BOOL_CMP_EXCHANGE(n, type) bool CLANG_ATOMIC_SUFFIX(__sync_bool_compare_and_swap_ ## n) (volatile void* ptr, type oldval, type newval) \
  208. { \
  209. bool ret = false; \
  210. unsigned state = _ATOMIC_ENTER_CRITICAL(); \
  211. if (*(volatile type*)ptr == oldval) { \
  212. *(volatile type*)ptr = newval; \
  213. ret = true; \
  214. } \
  215. _ATOMIC_EXIT_CRITICAL(state); \
  216. return ret; \
  217. } \
  218. CLANG_DECLARE_ALIAS( __sync_bool_compare_and_swap_ ## n )
  219. #define SYNC_VAL_CMP_EXCHANGE(n, type) type CLANG_ATOMIC_SUFFIX(__sync_val_compare_and_swap_ ## n) (volatile void* ptr, type oldval, type newval) \
  220. { \
  221. unsigned state = _ATOMIC_ENTER_CRITICAL(); \
  222. type ret = *(volatile type*)ptr; \
  223. if (*(volatile type*)ptr == oldval) { \
  224. *(volatile type*)ptr = newval; \
  225. } \
  226. _ATOMIC_EXIT_CRITICAL(state); \
  227. return ret; \
  228. } \
  229. CLANG_DECLARE_ALIAS( __sync_val_compare_and_swap_ ## n )
  230. #define SYNC_LOCK_TEST_AND_SET(n, type) type CLANG_ATOMIC_SUFFIX(__sync_lock_test_and_set_ ## n) (volatile void* ptr, type val) \
  231. { \
  232. unsigned state = _ATOMIC_ENTER_CRITICAL(); \
  233. type ret = *(volatile type*)ptr; \
  234. *(volatile type*)ptr = val; \
  235. _ATOMIC_EXIT_CRITICAL(state); \
  236. return ret; \
  237. } \
  238. CLANG_DECLARE_ALIAS( __sync_lock_test_and_set_ ## n )
  239. #define SYNC_LOCK_RELEASE(n, type) void CLANG_ATOMIC_SUFFIX(__sync_lock_release_ ## n) (volatile void* ptr) \
  240. { \
  241. unsigned state = _ATOMIC_ENTER_CRITICAL(); \
  242. *(volatile type*)ptr = 0; \
  243. _ATOMIC_EXIT_CRITICAL(state); \
  244. } \
  245. CLANG_DECLARE_ALIAS( __sync_lock_release_ ## n )
  246. #if !HAS_ATOMICS_32
  247. _Static_assert(sizeof(unsigned char) == 1, "atomics require a 1-byte type");
  248. _Static_assert(sizeof(short unsigned int) == 2, "atomics require a 2-bytes type");
  249. _Static_assert(sizeof(unsigned int) == 4, "atomics require a 4-bytes type");
  250. ATOMIC_EXCHANGE(1, unsigned char)
  251. ATOMIC_EXCHANGE(2, short unsigned int)
  252. ATOMIC_EXCHANGE(4, unsigned int)
  253. CMP_EXCHANGE(1, unsigned char)
  254. CMP_EXCHANGE(2, short unsigned int)
  255. CMP_EXCHANGE(4, unsigned int)
  256. FETCH_ADD(1, unsigned char)
  257. FETCH_ADD(2, short unsigned int)
  258. FETCH_ADD(4, unsigned int)
  259. ADD_FETCH(1, unsigned char)
  260. ADD_FETCH(2, short unsigned int)
  261. ADD_FETCH(4, unsigned int)
  262. FETCH_SUB(1, unsigned char)
  263. FETCH_SUB(2, short unsigned int)
  264. FETCH_SUB(4, unsigned int)
  265. SUB_FETCH(1, unsigned char)
  266. SUB_FETCH(2, short unsigned int)
  267. SUB_FETCH(4, unsigned int)
  268. FETCH_AND(1, unsigned char)
  269. FETCH_AND(2, short unsigned int)
  270. FETCH_AND(4, unsigned int)
  271. AND_FETCH(1, unsigned char)
  272. AND_FETCH(2, short unsigned int)
  273. AND_FETCH(4, unsigned int)
  274. FETCH_OR(1, unsigned char)
  275. FETCH_OR(2, short unsigned int)
  276. FETCH_OR(4, unsigned int)
  277. OR_FETCH(1, unsigned char)
  278. OR_FETCH(2, short unsigned int)
  279. OR_FETCH(4, unsigned int)
  280. FETCH_XOR(1, unsigned char)
  281. FETCH_XOR(2, short unsigned int)
  282. FETCH_XOR(4, unsigned int)
  283. XOR_FETCH(1, unsigned char)
  284. XOR_FETCH(2, short unsigned int)
  285. XOR_FETCH(4, unsigned int)
  286. FETCH_NAND(1, unsigned char)
  287. FETCH_NAND(2, short unsigned int)
  288. FETCH_NAND(4, unsigned int)
  289. NAND_FETCH(1, unsigned char)
  290. NAND_FETCH(2, short unsigned int)
  291. NAND_FETCH(4, unsigned int)
  292. SYNC_FETCH_OP(add, 1, unsigned char)
  293. SYNC_FETCH_OP(add, 2, short unsigned int)
  294. SYNC_FETCH_OP(add, 4, unsigned int)
  295. SYNC_OP_FETCH(add, 1, unsigned char)
  296. SYNC_OP_FETCH(add, 2, short unsigned int)
  297. SYNC_OP_FETCH(add, 4, unsigned int)
  298. SYNC_FETCH_OP(sub, 1, unsigned char)
  299. SYNC_FETCH_OP(sub, 2, short unsigned int)
  300. SYNC_FETCH_OP(sub, 4, unsigned int)
  301. SYNC_OP_FETCH(sub, 1, unsigned char)
  302. SYNC_OP_FETCH(sub, 2, short unsigned int)
  303. SYNC_OP_FETCH(sub, 4, unsigned int)
  304. SYNC_FETCH_OP(and, 1, unsigned char)
  305. SYNC_FETCH_OP(and, 2, short unsigned int)
  306. SYNC_FETCH_OP(and, 4, unsigned int)
  307. SYNC_OP_FETCH(and, 1, unsigned char)
  308. SYNC_OP_FETCH(and, 2, short unsigned int)
  309. SYNC_OP_FETCH(and, 4, unsigned int)
  310. SYNC_FETCH_OP(or, 1, unsigned char)
  311. SYNC_FETCH_OP(or, 2, short unsigned int)
  312. SYNC_FETCH_OP(or, 4, unsigned int)
  313. SYNC_OP_FETCH(or, 1, unsigned char)
  314. SYNC_OP_FETCH(or, 2, short unsigned int)
  315. SYNC_OP_FETCH(or, 4, unsigned int)
  316. SYNC_FETCH_OP(xor, 1, unsigned char)
  317. SYNC_FETCH_OP(xor, 2, short unsigned int)
  318. SYNC_FETCH_OP(xor, 4, unsigned int)
  319. SYNC_OP_FETCH(xor, 1, unsigned char)
  320. SYNC_OP_FETCH(xor, 2, short unsigned int)
  321. SYNC_OP_FETCH(xor, 4, unsigned int)
  322. SYNC_FETCH_OP(nand, 1, unsigned char)
  323. SYNC_FETCH_OP(nand, 2, short unsigned int)
  324. SYNC_FETCH_OP(nand, 4, unsigned int)
  325. SYNC_OP_FETCH(nand, 1, unsigned char)
  326. SYNC_OP_FETCH(nand, 2, short unsigned int)
  327. SYNC_OP_FETCH(nand, 4, unsigned int)
  328. SYNC_BOOL_CMP_EXCHANGE(1, unsigned char)
  329. SYNC_BOOL_CMP_EXCHANGE(2, short unsigned int)
  330. SYNC_BOOL_CMP_EXCHANGE(4, unsigned int)
  331. SYNC_VAL_CMP_EXCHANGE(1, unsigned char)
  332. SYNC_VAL_CMP_EXCHANGE(2, short unsigned int)
  333. SYNC_VAL_CMP_EXCHANGE(4, unsigned int)
  334. SYNC_LOCK_TEST_AND_SET(1, unsigned char)
  335. SYNC_LOCK_TEST_AND_SET(2, short unsigned int)
  336. SYNC_LOCK_TEST_AND_SET(4, unsigned int)
  337. SYNC_LOCK_RELEASE(1, unsigned char)
  338. SYNC_LOCK_RELEASE(2, short unsigned int)
  339. SYNC_LOCK_RELEASE(4, unsigned int)
  340. // LLVM has not implemented native atomic load/stores for riscv targets without the Atomic extension. LLVM thread: https://reviews.llvm.org/D47553.
  341. // Even though GCC does transform them, these libcalls need to be available for the case where a LLVM based project links against IDF.
  342. ATOMIC_LOAD(1, unsigned char)
  343. ATOMIC_LOAD(2, short unsigned int)
  344. ATOMIC_LOAD(4, unsigned int)
  345. ATOMIC_STORE(1, unsigned char)
  346. ATOMIC_STORE(2, short unsigned int)
  347. ATOMIC_STORE(4, unsigned int)
  348. #endif // !HAS_ATOMICS_32
  349. #if !HAS_ATOMICS_64
  350. _Static_assert(sizeof(long long unsigned int) == 8, "atomics require a 8-bytes type");
  351. ATOMIC_EXCHANGE(8, long long unsigned int)
  352. CMP_EXCHANGE(8, long long unsigned int)
  353. FETCH_ADD(8, long long unsigned int)
  354. FETCH_SUB(8, long long unsigned int)
  355. FETCH_AND(8, long long unsigned int)
  356. FETCH_OR(8, long long unsigned int)
  357. FETCH_XOR(8, long long unsigned int)
  358. FETCH_NAND(8, long long unsigned int)
  359. ADD_FETCH(8, long long unsigned int)
  360. SUB_FETCH(8, long long unsigned int)
  361. AND_FETCH(8, long long unsigned int)
  362. OR_FETCH(8, long long unsigned int)
  363. XOR_FETCH(8, long long unsigned int)
  364. NAND_FETCH(8, long long unsigned int)
  365. SYNC_FETCH_OP(add, 8, long long unsigned int)
  366. SYNC_FETCH_OP(sub, 8, long long unsigned int)
  367. SYNC_FETCH_OP(and, 8, long long unsigned int)
  368. SYNC_FETCH_OP(or, 8, long long unsigned int)
  369. SYNC_FETCH_OP(xor, 8, long long unsigned int)
  370. SYNC_FETCH_OP(nand, 8, long long unsigned int)
  371. SYNC_OP_FETCH(add, 8, long long unsigned int)
  372. SYNC_OP_FETCH(sub, 8, long long unsigned int)
  373. SYNC_OP_FETCH(and, 8, long long unsigned int)
  374. SYNC_OP_FETCH(or, 8, long long unsigned int)
  375. SYNC_OP_FETCH(xor, 8, long long unsigned int)
  376. SYNC_OP_FETCH(nand, 8, long long unsigned int)
  377. SYNC_BOOL_CMP_EXCHANGE(8, long long unsigned int)
  378. SYNC_VAL_CMP_EXCHANGE(8, long long unsigned int)
  379. SYNC_LOCK_TEST_AND_SET(8, long long unsigned int)
  380. SYNC_LOCK_RELEASE(8, long long unsigned int)
  381. // LLVM has not implemented native atomic load/stores for riscv targets without the Atomic extension. LLVM thread: https://reviews.llvm.org/D47553.
  382. // Even though GCC does transform them, these libcalls need to be available for the case where a LLVM based project links against IDF.
  383. ATOMIC_LOAD(8, long long unsigned int)
  384. ATOMIC_STORE(8, long long unsigned int)
  385. #endif // !HAS_ATOMICS_64
  386. // Clang generates calls to the __atomic_load/__atomic_store functions for object size more then 4 bytes
  387. void CLANG_ATOMIC_SUFFIX( __atomic_load ) (int size, void *src, void *dest, int model) {
  388. unsigned state = _ATOMIC_ENTER_CRITICAL();
  389. memcpy(dest, src, size);
  390. _ATOMIC_EXIT_CRITICAL(state);
  391. }
  392. CLANG_DECLARE_ALIAS( __atomic_load )
  393. void CLANG_ATOMIC_SUFFIX( __atomic_store ) (int size, void *dest, void *src, int model) {
  394. unsigned state = _ATOMIC_ENTER_CRITICAL();
  395. memcpy(dest, src, size);
  396. _ATOMIC_EXIT_CRITICAL(state);
  397. }
  398. CLANG_DECLARE_ALIAS( __atomic_store)