bh_atomic.h 7.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298
  1. /*
  2. * Copyright (C) 2023 Amazon Inc. All rights reserved.
  3. * SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  4. */
  5. #ifndef _BH_ATOMIC_H
  6. #define _BH_ATOMIC_H
  7. #include "gnuc.h"
  8. #ifdef __cplusplus
  9. extern "C" {
  10. #endif
  11. /*
  12. * Why don't we use C11 stdatomics here?
  13. *
  14. * Unlike C11 stdatomics,
  15. *
  16. * - bh_atomic_xxx_t is guaranteed to have the same size as the base type.
  17. * Thus more friendly to our AOT conventions.
  18. *
  19. * - It's available for C++.
  20. * Although C++23 will have C-compatible stdatomics.h, it isn't widely
  21. * available yet.
  22. */
  23. /*
  24. * Note about BH_ATOMIC_32_IS_ATOMIC
  25. *
  26. * If BH_ATOMIC_32_IS_ATOMIC == 0, BH_ATOMIC_xxx operations defined below
  27. * are not really atomic and require an external lock.
  28. *
  29. * Expected usage is:
  30. *
  31. * bh_atomic_32_t var = 0;
  32. * uint32 old;
  33. * #if BH_ATOMIC_32_IS_ATOMIC == 0
  34. * lock(&some_lock);
  35. * #endif
  36. * old = BH_ATOMIC_32_FETCH_AND(var, 1);
  37. * #if BH_ATOMIC_32_IS_ATOMIC == 0
  38. * unlock(&some_lock);
  39. * #endif
  40. */
  41. typedef uint64 bh_atomic_64_t;
  42. typedef uint32 bh_atomic_32_t;
  43. typedef uint16 bh_atomic_16_t;
  44. /* The flag can be defined by the user if the platform
  45. * supports atomic 32-bit operations.
  46. * If left undefined, it will be automatically defined
  47. * according to the platform.
  48. */
  49. #ifdef WASM_UINT64_IS_ATOMIC
  50. #define BH_ATOMIC_64_IS_ATOMIC WASM_UINT64_IS_ATOMIC
  51. #endif /* WASM_UINT64_IS_ATOMIC */
  52. #ifdef WASM_UINT32_IS_ATOMIC
  53. #define BH_ATOMIC_32_IS_ATOMIC WASM_UINT32_IS_ATOMIC
  54. #endif /* WASM_UINT32_IS_ATOMIC */
  55. #ifdef WASM_UINT16_IS_ATOMIC
  56. #define BH_ATOMIC_16_IS_ATOMIC WASM_UINT16_IS_ATOMIC
  57. #endif /* WASM_UINT16_IS_ATOMIC */
  58. #if defined(__GNUC_PREREQ)
  59. #if __GNUC_PREREQ(4, 7)
  60. #define CLANG_GCC_HAS_ATOMIC_BUILTIN
  61. #endif
  62. #elif defined(__clang__)
  63. #if __clang_major__ > 3 || (__clang_major__ == 3 && __clang_minor__ >= 0)
  64. #define CLANG_GCC_HAS_ATOMIC_BUILTIN
  65. #endif
  66. #endif
  67. #if defined(CLANG_GCC_HAS_ATOMIC_BUILTIN)
  68. #ifndef BH_ATOMIC_64_IS_ATOMIC
  69. #define BH_ATOMIC_64_IS_ATOMIC 1
  70. #endif
  71. #ifndef BH_ATOMIC_32_IS_ATOMIC
  72. #define BH_ATOMIC_32_IS_ATOMIC 1
  73. #endif
  74. #ifndef BH_ATOMIC_16_IS_ATOMIC
  75. #define BH_ATOMIC_16_IS_ATOMIC 1
  76. #endif
  77. #else
  78. #ifndef BH_ATOMIC_64_IS_ATOMIC
  79. #define BH_ATOMIC_64_IS_ATOMIC 0
  80. #endif
  81. #ifndef BH_ATOMIC_32_IS_ATOMIC
  82. #define BH_ATOMIC_32_IS_ATOMIC 0
  83. #endif
  84. #ifndef BH_ATOMIC_16_IS_ATOMIC
  85. #define BH_ATOMIC_16_IS_ATOMIC 0
  86. #endif
  87. #endif
  88. /* Force disable atomic 16-bit operations on bare-metal RISC-V
  89. * because the 16-bit atomic operations is emulated by 32-bit
  90. * atomic operations, which has linkage problem on current toolchain:
  91. * in function `shared_memory_inc_reference':
  92. * wasm_shared_memory.c:85:(.text.shared_memory_inc_reference+0x10): undefined
  93. * reference to `__atomic_fetch_add_2'
  94. */
  95. #ifndef WASM_UINT16_IS_ATOMIC
  96. #if !defined(__linux__) && !defined(__FreeBSD__) && !defined(__NetBSD__) \
  97. && !defined(__OpenBSD__) && defined(__riscv)
  98. #undef BH_ATOMIC_16_IS_ATOMIC
  99. #define BH_ATOMIC_16_IS_ATOMIC 0
  100. #endif
  101. #endif
  102. /* On some 32-bit platform, disable 64-bit atomic operations, otherwise
  103. * undefined reference to `__atomic_load_8' */
  104. #ifndef WASM_UINT64_IS_ATOMIC
  105. #if !defined(__linux__) && !defined(__FreeBSD__) && !defined(__NetBSD__) \
  106. && !defined(__OpenBSD__) && (defined(__riscv) || defined(__arm__)) \
  107. && UINT32_MAX == UINTPTR_MAX
  108. #undef BH_ATOMIC_64_IS_ATOMIC
  109. #define BH_ATOMIC_64_IS_ATOMIC 0
  110. #endif
  111. #endif
  112. #if BH_ATOMIC_64_IS_ATOMIC != 0
  113. #define BH_ATOMIC_64_LOAD(v) __atomic_load_n(&(v), __ATOMIC_SEQ_CST)
  114. #define BH_ATOMIC_64_STORE(v, val) __atomic_store_n(&(v), val, __ATOMIC_SEQ_CST)
  115. #define BH_ATOMIC_64_FETCH_OR(v, val) \
  116. __atomic_fetch_or(&(v), (val), __ATOMIC_SEQ_CST)
  117. #define BH_ATOMIC_64_FETCH_AND(v, val) \
  118. __atomic_fetch_and(&(v), (val), __ATOMIC_SEQ_CST)
  119. #define BH_ATOMIC_64_FETCH_ADD(v, val) \
  120. __atomic_fetch_add(&(v), (val), __ATOMIC_SEQ_CST)
  121. #define BH_ATOMIC_64_FETCH_SUB(v, val) \
  122. __atomic_fetch_sub(&(v), (val), __ATOMIC_SEQ_CST)
  123. #else /* else of BH_ATOMIC_64_IS_ATOMIC != 0 */
  124. #define BH_ATOMIC_64_LOAD(v) (v)
  125. #define BH_ATOMIC_64_STORE(v, val) (v) = val
  126. #define BH_ATOMIC_64_FETCH_OR(v, val) nonatomic_64_fetch_or(&(v), val)
  127. #define BH_ATOMIC_64_FETCH_AND(v, val) nonatomic_64_fetch_and(&(v), val)
  128. #define BH_ATOMIC_64_FETCH_ADD(v, val) nonatomic_64_fetch_add(&(v), val)
  129. #define BH_ATOMIC_64_FETCH_SUB(v, val) nonatomic_64_fetch_sub(&(v), val)
  130. static inline uint64
  131. nonatomic_64_fetch_or(bh_atomic_64_t *p, uint64 val)
  132. {
  133. uint64 old = *p;
  134. *p |= val;
  135. return old;
  136. }
  137. static inline uint64
  138. nonatomic_64_fetch_and(bh_atomic_64_t *p, uint64 val)
  139. {
  140. uint64 old = *p;
  141. *p &= val;
  142. return old;
  143. }
  144. static inline uint64
  145. nonatomic_64_fetch_add(bh_atomic_64_t *p, uint64 val)
  146. {
  147. uint64 old = *p;
  148. *p += val;
  149. return old;
  150. }
  151. static inline uint64
  152. nonatomic_64_fetch_sub(bh_atomic_64_t *p, uint64 val)
  153. {
  154. uint64 old = *p;
  155. *p -= val;
  156. return old;
  157. }
  158. #endif
  159. #if BH_ATOMIC_32_IS_ATOMIC != 0
  160. #define BH_ATOMIC_32_LOAD(v) __atomic_load_n(&(v), __ATOMIC_SEQ_CST)
  161. #define BH_ATOMIC_32_STORE(v, val) __atomic_store_n(&(v), val, __ATOMIC_SEQ_CST)
  162. #define BH_ATOMIC_32_FETCH_OR(v, val) \
  163. __atomic_fetch_or(&(v), (val), __ATOMIC_SEQ_CST)
  164. #define BH_ATOMIC_32_FETCH_AND(v, val) \
  165. __atomic_fetch_and(&(v), (val), __ATOMIC_SEQ_CST)
  166. #define BH_ATOMIC_32_FETCH_ADD(v, val) \
  167. __atomic_fetch_add(&(v), (val), __ATOMIC_SEQ_CST)
  168. #define BH_ATOMIC_32_FETCH_SUB(v, val) \
  169. __atomic_fetch_sub(&(v), (val), __ATOMIC_SEQ_CST)
  170. #else /* else of BH_ATOMIC_32_IS_ATOMIC != 0 */
  171. #define BH_ATOMIC_32_LOAD(v) (v)
  172. #define BH_ATOMIC_32_STORE(v, val) (v) = val
  173. #define BH_ATOMIC_32_FETCH_OR(v, val) nonatomic_32_fetch_or(&(v), val)
  174. #define BH_ATOMIC_32_FETCH_AND(v, val) nonatomic_32_fetch_and(&(v), val)
  175. #define BH_ATOMIC_32_FETCH_ADD(v, val) nonatomic_32_fetch_add(&(v), val)
  176. #define BH_ATOMIC_32_FETCH_SUB(v, val) nonatomic_32_fetch_sub(&(v), val)
  177. static inline uint32
  178. nonatomic_32_fetch_or(bh_atomic_32_t *p, uint32 val)
  179. {
  180. uint32 old = *p;
  181. *p |= val;
  182. return old;
  183. }
  184. static inline uint32
  185. nonatomic_32_fetch_and(bh_atomic_32_t *p, uint32 val)
  186. {
  187. uint32 old = *p;
  188. *p &= val;
  189. return old;
  190. }
  191. static inline uint32
  192. nonatomic_32_fetch_add(bh_atomic_32_t *p, uint32 val)
  193. {
  194. uint32 old = *p;
  195. *p += val;
  196. return old;
  197. }
  198. static inline uint32
  199. nonatomic_32_fetch_sub(bh_atomic_32_t *p, uint32 val)
  200. {
  201. uint32 old = *p;
  202. *p -= val;
  203. return old;
  204. }
  205. #endif
  206. #if BH_ATOMIC_16_IS_ATOMIC != 0
  207. #define BH_ATOMIC_16_IS_ATOMIC 1
  208. #define BH_ATOMIC_16_LOAD(v) __atomic_load_n(&(v), __ATOMIC_SEQ_CST)
  209. #define BH_ATOMIC_16_STORE(v, val) __atomic_store_n(&(v), val, __ATOMIC_SEQ_CST)
  210. #define BH_ATOMIC_16_FETCH_OR(v, val) \
  211. __atomic_fetch_or(&(v), (val), __ATOMIC_SEQ_CST)
  212. #define BH_ATOMIC_16_FETCH_AND(v, val) \
  213. __atomic_fetch_and(&(v), (val), __ATOMIC_SEQ_CST)
  214. #define BH_ATOMIC_16_FETCH_ADD(v, val) \
  215. __atomic_fetch_add(&(v), (val), __ATOMIC_SEQ_CST)
  216. #define BH_ATOMIC_16_FETCH_SUB(v, val) \
  217. __atomic_fetch_sub(&(v), (val), __ATOMIC_SEQ_CST)
  218. #else /* else of BH_ATOMIC_16_IS_ATOMIC != 0 */
  219. #define BH_ATOMIC_16_LOAD(v) (v)
  220. #define BH_ATOMIC_16_STORE(v) (v) = val
  221. #define BH_ATOMIC_16_FETCH_OR(v, val) nonatomic_16_fetch_or(&(v), val)
  222. #define BH_ATOMIC_16_FETCH_AND(v, val) nonatomic_16_fetch_and(&(v), val)
  223. #define BH_ATOMIC_16_FETCH_ADD(v, val) nonatomic_16_fetch_add(&(v), val)
  224. #define BH_ATOMIC_16_FETCH_SUB(v, val) nonatomic_16_fetch_sub(&(v), val)
  225. static inline uint16
  226. nonatomic_16_fetch_or(bh_atomic_16_t *p, uint16 val)
  227. {
  228. uint16 old = *p;
  229. *p |= val;
  230. return old;
  231. }
  232. static inline uint16
  233. nonatomic_16_fetch_and(bh_atomic_16_t *p, uint16 val)
  234. {
  235. uint16 old = *p;
  236. *p &= val;
  237. return old;
  238. }
  239. static inline uint16
  240. nonatomic_16_fetch_add(bh_atomic_16_t *p, uint16 val)
  241. {
  242. uint16 old = *p;
  243. *p += val;
  244. return old;
  245. }
  246. static inline uint16
  247. nonatomic_16_fetch_sub(bh_atomic_16_t *p, uint16 val)
  248. {
  249. uint16 old = *p;
  250. *p -= val;
  251. return old;
  252. }
  253. #endif
  254. #ifdef __cplusplus
  255. }
  256. #endif
  257. #endif /* end of _BH_ATOMIC_H */