bh_atomic.h 7.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299
  1. /*
  2. * Copyright (C) 2023 Amazon Inc. All rights reserved.
  3. * SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  4. */
  5. #ifndef _BH_ATOMIC_H
  6. #define _BH_ATOMIC_H
  7. #include "bh_platform.h"
  8. #include "gnuc.h"
  9. #ifdef __cplusplus
  10. extern "C" {
  11. #endif
  12. /*
  13. * Why don't we use C11 stdatomics here?
  14. *
  15. * Unlike C11 stdatomics,
  16. *
  17. * - bh_atomic_xxx_t is guaranteed to have the same size as the base type.
  18. * Thus more friendly to our AOT conventions.
  19. *
  20. * - It's available for C++.
  21. * Although C++23 will have C-compatible stdatomics.h, it isn't widely
  22. * available yet.
  23. */
  24. /*
  25. * Note about BH_ATOMIC_32_IS_ATOMIC
  26. *
  27. * If BH_ATOMIC_32_IS_ATOMIC == 0, BH_ATOMIC_xxx operations defined below
  28. * are not really atomic and require an external lock.
  29. *
  30. * Expected usage is:
  31. *
  32. * bh_atomic_32_t var = 0;
  33. * uint32 old;
  34. * #if BH_ATOMIC_32_IS_ATOMIC == 0
  35. * lock(&some_lock);
  36. * #endif
  37. * old = BH_ATOMIC_32_FETCH_AND(var, 1);
  38. * #if BH_ATOMIC_32_IS_ATOMIC == 0
  39. * unlock(&some_lock);
  40. * #endif
  41. */
  42. typedef uint64 bh_atomic_64_t;
  43. typedef uint32 bh_atomic_32_t;
  44. typedef uint16 bh_atomic_16_t;
  45. /* The flag can be defined by the user if the platform
  46. * supports atomic 32-bit operations.
  47. * If left undefined, it will be automatically defined
  48. * according to the platform.
  49. */
  50. #ifdef WASM_UINT64_IS_ATOMIC
  51. #define BH_ATOMIC_64_IS_ATOMIC WASM_UINT64_IS_ATOMIC
  52. #endif /* WASM_UINT64_IS_ATOMIC */
  53. #ifdef WASM_UINT32_IS_ATOMIC
  54. #define BH_ATOMIC_32_IS_ATOMIC WASM_UINT32_IS_ATOMIC
  55. #endif /* WASM_UINT32_IS_ATOMIC */
  56. #ifdef WASM_UINT16_IS_ATOMIC
  57. #define BH_ATOMIC_16_IS_ATOMIC WASM_UINT16_IS_ATOMIC
  58. #endif /* WASM_UINT16_IS_ATOMIC */
  59. #if defined(__GNUC_PREREQ)
  60. #if __GNUC_PREREQ(4, 7)
  61. #define CLANG_GCC_HAS_ATOMIC_BUILTIN
  62. #endif
  63. #elif defined(__clang__)
  64. #if __clang_major__ > 3 || (__clang_major__ == 3 && __clang_minor__ >= 0)
  65. #define CLANG_GCC_HAS_ATOMIC_BUILTIN
  66. #endif
  67. #endif
  68. #if defined(CLANG_GCC_HAS_ATOMIC_BUILTIN)
  69. #ifndef BH_ATOMIC_64_IS_ATOMIC
  70. #define BH_ATOMIC_64_IS_ATOMIC 1
  71. #endif
  72. #ifndef BH_ATOMIC_32_IS_ATOMIC
  73. #define BH_ATOMIC_32_IS_ATOMIC 1
  74. #endif
  75. #ifndef BH_ATOMIC_16_IS_ATOMIC
  76. #define BH_ATOMIC_16_IS_ATOMIC 1
  77. #endif
  78. #else
  79. #ifndef BH_ATOMIC_64_IS_ATOMIC
  80. #define BH_ATOMIC_64_IS_ATOMIC 0
  81. #endif
  82. #ifndef BH_ATOMIC_32_IS_ATOMIC
  83. #define BH_ATOMIC_32_IS_ATOMIC 0
  84. #endif
  85. #ifndef BH_ATOMIC_16_IS_ATOMIC
  86. #define BH_ATOMIC_16_IS_ATOMIC 0
  87. #endif
  88. #endif
  89. /* Force disable atomic 16-bit operations on bare-metal RISC-V
  90. * because the 16-bit atomic operations is emulated by 32-bit
  91. * atomic operations, which has linkage problem on current toolchain:
  92. * in function `shared_memory_inc_reference':
  93. * wasm_shared_memory.c:85:(.text.shared_memory_inc_reference+0x10): undefined
  94. * reference to `__atomic_fetch_add_2'
  95. */
  96. #ifndef WASM_UINT16_IS_ATOMIC
  97. #if !defined(__linux__) && !defined(__FreeBSD__) && !defined(__NetBSD__) \
  98. && !defined(__OpenBSD__) && defined(__riscv)
  99. #undef BH_ATOMIC_16_IS_ATOMIC
  100. #define BH_ATOMIC_16_IS_ATOMIC 0
  101. #endif
  102. #endif
  103. /* On some 32-bit platform, disable 64-bit atomic operations, otherwise
  104. * undefined reference to `__atomic_load_8' */
  105. #ifndef WASM_UINT64_IS_ATOMIC
  106. #if !defined(__linux__) && !defined(__FreeBSD__) && !defined(__NetBSD__) \
  107. && !defined(__OpenBSD__) && (defined(__riscv) || defined(__arm__)) \
  108. && UINT32_MAX == UINTPTR_MAX
  109. #undef BH_ATOMIC_64_IS_ATOMIC
  110. #define BH_ATOMIC_64_IS_ATOMIC 0
  111. #endif
  112. #endif
  113. #if BH_ATOMIC_64_IS_ATOMIC != 0
  114. #define BH_ATOMIC_64_LOAD(v) __atomic_load_n(&(v), __ATOMIC_SEQ_CST)
  115. #define BH_ATOMIC_64_STORE(v, val) __atomic_store_n(&(v), val, __ATOMIC_SEQ_CST)
  116. #define BH_ATOMIC_64_FETCH_OR(v, val) \
  117. __atomic_fetch_or(&(v), (val), __ATOMIC_SEQ_CST)
  118. #define BH_ATOMIC_64_FETCH_AND(v, val) \
  119. __atomic_fetch_and(&(v), (val), __ATOMIC_SEQ_CST)
  120. #define BH_ATOMIC_64_FETCH_ADD(v, val) \
  121. __atomic_fetch_add(&(v), (val), __ATOMIC_SEQ_CST)
  122. #define BH_ATOMIC_64_FETCH_SUB(v, val) \
  123. __atomic_fetch_sub(&(v), (val), __ATOMIC_SEQ_CST)
  124. #else /* else of BH_ATOMIC_64_IS_ATOMIC != 0 */
  125. #define BH_ATOMIC_64_LOAD(v) (v)
  126. #define BH_ATOMIC_64_STORE(v, val) (v) = val
  127. #define BH_ATOMIC_64_FETCH_OR(v, val) nonatomic_64_fetch_or(&(v), val)
  128. #define BH_ATOMIC_64_FETCH_AND(v, val) nonatomic_64_fetch_and(&(v), val)
  129. #define BH_ATOMIC_64_FETCH_ADD(v, val) nonatomic_64_fetch_add(&(v), val)
  130. #define BH_ATOMIC_64_FETCH_SUB(v, val) nonatomic_64_fetch_sub(&(v), val)
  131. static inline uint64
  132. nonatomic_64_fetch_or(bh_atomic_64_t *p, uint64 val)
  133. {
  134. uint64 old = *p;
  135. *p |= val;
  136. return old;
  137. }
  138. static inline uint64
  139. nonatomic_64_fetch_and(bh_atomic_64_t *p, uint64 val)
  140. {
  141. uint64 old = *p;
  142. *p &= val;
  143. return old;
  144. }
  145. static inline uint64
  146. nonatomic_64_fetch_add(bh_atomic_64_t *p, uint64 val)
  147. {
  148. uint64 old = *p;
  149. *p += val;
  150. return old;
  151. }
  152. static inline uint64
  153. nonatomic_64_fetch_sub(bh_atomic_64_t *p, uint64 val)
  154. {
  155. uint64 old = *p;
  156. *p -= val;
  157. return old;
  158. }
  159. #endif
  160. #if BH_ATOMIC_32_IS_ATOMIC != 0
  161. #define BH_ATOMIC_32_LOAD(v) __atomic_load_n(&(v), __ATOMIC_SEQ_CST)
  162. #define BH_ATOMIC_32_STORE(v, val) __atomic_store_n(&(v), val, __ATOMIC_SEQ_CST)
  163. #define BH_ATOMIC_32_FETCH_OR(v, val) \
  164. __atomic_fetch_or(&(v), (val), __ATOMIC_SEQ_CST)
  165. #define BH_ATOMIC_32_FETCH_AND(v, val) \
  166. __atomic_fetch_and(&(v), (val), __ATOMIC_SEQ_CST)
  167. #define BH_ATOMIC_32_FETCH_ADD(v, val) \
  168. __atomic_fetch_add(&(v), (val), __ATOMIC_SEQ_CST)
  169. #define BH_ATOMIC_32_FETCH_SUB(v, val) \
  170. __atomic_fetch_sub(&(v), (val), __ATOMIC_SEQ_CST)
  171. #else /* else of BH_ATOMIC_32_IS_ATOMIC != 0 */
  172. #define BH_ATOMIC_32_LOAD(v) (v)
  173. #define BH_ATOMIC_32_STORE(v, val) (v) = val
  174. #define BH_ATOMIC_32_FETCH_OR(v, val) nonatomic_32_fetch_or(&(v), val)
  175. #define BH_ATOMIC_32_FETCH_AND(v, val) nonatomic_32_fetch_and(&(v), val)
  176. #define BH_ATOMIC_32_FETCH_ADD(v, val) nonatomic_32_fetch_add(&(v), val)
  177. #define BH_ATOMIC_32_FETCH_SUB(v, val) nonatomic_32_fetch_sub(&(v), val)
  178. static inline uint32
  179. nonatomic_32_fetch_or(bh_atomic_32_t *p, uint32 val)
  180. {
  181. uint32 old = *p;
  182. *p |= val;
  183. return old;
  184. }
  185. static inline uint32
  186. nonatomic_32_fetch_and(bh_atomic_32_t *p, uint32 val)
  187. {
  188. uint32 old = *p;
  189. *p &= val;
  190. return old;
  191. }
  192. static inline uint32
  193. nonatomic_32_fetch_add(bh_atomic_32_t *p, uint32 val)
  194. {
  195. uint32 old = *p;
  196. *p += val;
  197. return old;
  198. }
  199. static inline uint32
  200. nonatomic_32_fetch_sub(bh_atomic_32_t *p, uint32 val)
  201. {
  202. uint32 old = *p;
  203. *p -= val;
  204. return old;
  205. }
  206. #endif
  207. #if BH_ATOMIC_16_IS_ATOMIC != 0
  208. #define BH_ATOMIC_16_IS_ATOMIC 1
  209. #define BH_ATOMIC_16_LOAD(v) __atomic_load_n(&(v), __ATOMIC_SEQ_CST)
  210. #define BH_ATOMIC_16_STORE(v, val) __atomic_store_n(&(v), val, __ATOMIC_SEQ_CST)
  211. #define BH_ATOMIC_16_FETCH_OR(v, val) \
  212. __atomic_fetch_or(&(v), (val), __ATOMIC_SEQ_CST)
  213. #define BH_ATOMIC_16_FETCH_AND(v, val) \
  214. __atomic_fetch_and(&(v), (val), __ATOMIC_SEQ_CST)
  215. #define BH_ATOMIC_16_FETCH_ADD(v, val) \
  216. __atomic_fetch_add(&(v), (val), __ATOMIC_SEQ_CST)
  217. #define BH_ATOMIC_16_FETCH_SUB(v, val) \
  218. __atomic_fetch_sub(&(v), (val), __ATOMIC_SEQ_CST)
  219. #else /* else of BH_ATOMIC_16_IS_ATOMIC != 0 */
  220. #define BH_ATOMIC_16_LOAD(v) (v)
  221. #define BH_ATOMIC_16_STORE(v) (v) = val
  222. #define BH_ATOMIC_16_FETCH_OR(v, val) nonatomic_16_fetch_or(&(v), val)
  223. #define BH_ATOMIC_16_FETCH_AND(v, val) nonatomic_16_fetch_and(&(v), val)
  224. #define BH_ATOMIC_16_FETCH_ADD(v, val) nonatomic_16_fetch_add(&(v), val)
  225. #define BH_ATOMIC_16_FETCH_SUB(v, val) nonatomic_16_fetch_sub(&(v), val)
  226. static inline uint16
  227. nonatomic_16_fetch_or(bh_atomic_16_t *p, uint16 val)
  228. {
  229. uint16 old = *p;
  230. *p |= val;
  231. return old;
  232. }
  233. static inline uint16
  234. nonatomic_16_fetch_and(bh_atomic_16_t *p, uint16 val)
  235. {
  236. uint16 old = *p;
  237. *p &= val;
  238. return old;
  239. }
  240. static inline uint16
  241. nonatomic_16_fetch_add(bh_atomic_16_t *p, uint16 val)
  242. {
  243. uint16 old = *p;
  244. *p += val;
  245. return old;
  246. }
  247. static inline uint16
  248. nonatomic_16_fetch_sub(bh_atomic_16_t *p, uint16 val)
  249. {
  250. uint16 old = *p;
  251. *p -= val;
  252. return old;
  253. }
  254. #endif
  255. #ifdef __cplusplus
  256. }
  257. #endif
  258. #endif /* end of _BH_ATOMIC_H */