bh_atomic.h 4.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183
  1. /*
  2. * Copyright (C) 2023 Amazon Inc. All rights reserved.
  3. * SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  4. */
  5. #ifndef _BH_ATOMIC_H
  6. #define _BH_ATOMIC_H
  7. #include "gnuc.h"
  8. #ifdef __cplusplus
  9. extern "C" {
  10. #endif
  11. /*
  12. * Why don't we use C11 stdatomics here?
  13. *
  14. * Unlike C11 stdatomics,
  15. *
  16. * - bh_atomic_xxx_t is guaranteed to have the same size as the base type.
  17. * Thus more friendly to our AOT conventions.
  18. *
  19. * - It's available for C++.
  20. * Although C++23 will have C-compatible stdatomics.h, it isn't widely
  21. * available yet.
  22. */
  23. /*
  24. * Note about BH_ATOMIC_32_IS_ATOMIC
  25. *
  26. * If BH_ATOMIC_32_IS_ATOMIC == 0, BH_ATOMIC_xxx operations defined below
  27. * are not really atomic and require an external lock.
  28. *
  29. * Expected usage is:
  30. *
  31. * bh_atomic_32_t var = 0;
  32. * uint32 old;
  33. * #if BH_ATOMIC_32_IS_ATOMIC == 0
  34. * lock(&some_lock);
  35. * #endif
  36. * old = BH_ATOMIC_32_FETCH_AND(var, 1);
  37. * #if BH_ATOMIC_32_IS_ATOMIC == 0
  38. * unlock(&some_lock);
  39. * #endif
  40. */
  41. typedef uint32 bh_atomic_32_t;
  42. typedef uint16 bh_atomic_16_t;
  43. #if defined(__GNUC_PREREQ)
  44. #if __GNUC_PREREQ(4, 7)
  45. #define CLANG_GCC_HAS_ATOMIC_BUILTIN
  46. #endif
  47. #elif defined(__clang__)
  48. #if __clang_major__ > 3 || (__clang_major__ == 3 && __clang_minor__ >= 0)
  49. #define CLANG_GCC_HAS_ATOMIC_BUILTIN
  50. #endif
  51. #endif
  52. #if defined(CLANG_GCC_HAS_ATOMIC_BUILTIN)
  53. #define BH_ATOMIC_32_IS_ATOMIC 1
  54. #define BH_ATOMIC_32_LOAD(v) __atomic_load_n(&(v), __ATOMIC_SEQ_CST)
  55. #define BH_ATOMIC_32_STORE(v, val) __atomic_store_n(&(v), val, __ATOMIC_SEQ_CST)
  56. #define BH_ATOMIC_32_FETCH_OR(v, val) \
  57. __atomic_fetch_or(&(v), (val), __ATOMIC_SEQ_CST)
  58. #define BH_ATOMIC_32_FETCH_AND(v, val) \
  59. __atomic_fetch_and(&(v), (val), __ATOMIC_SEQ_CST)
  60. #define BH_ATOMIC_32_FETCH_ADD(v, val) \
  61. __atomic_fetch_add(&(v), (val), __ATOMIC_SEQ_CST)
  62. #define BH_ATOMIC_32_FETCH_SUB(v, val) \
  63. __atomic_fetch_sub(&(v), (val), __ATOMIC_SEQ_CST)
  64. #define BH_ATOMIC_16_IS_ATOMIC 1
  65. #define BH_ATOMIC_16_LOAD(v) __atomic_load_n(&(v), __ATOMIC_SEQ_CST)
  66. #define BH_ATOMIC_16_STORE(v, val) __atomic_store_n(&(v), val, __ATOMIC_SEQ_CST)
  67. #define BH_ATOMIC_16_FETCH_OR(v, val) \
  68. __atomic_fetch_or(&(v), (val), __ATOMIC_SEQ_CST)
  69. #define BH_ATOMIC_16_FETCH_AND(v, val) \
  70. __atomic_fetch_and(&(v), (val), __ATOMIC_SEQ_CST)
  71. #define BH_ATOMIC_16_FETCH_ADD(v, val) \
  72. __atomic_fetch_add(&(v), (val), __ATOMIC_SEQ_CST)
  73. #define BH_ATOMIC_16_FETCH_SUB(v, val) \
  74. __atomic_fetch_sub(&(v), (val), __ATOMIC_SEQ_CST)
  75. #else /* else of defined(CLANG_GCC_HAS_ATOMIC_BUILTIN) */
  76. #define BH_ATOMIC_32_LOAD(v) (v)
  77. #define BH_ATOMIC_32_STORE(v, val) (v) = val
  78. #define BH_ATOMIC_32_FETCH_OR(v, val) nonatomic_32_fetch_or(&(v), val)
  79. #define BH_ATOMIC_32_FETCH_AND(v, val) nonatomic_32_fetch_and(&(v), val)
  80. #define BH_ATOMIC_32_FETCH_ADD(v, val) nonatomic_32_fetch_add(&(v), val)
  81. #define BH_ATOMIC_32_FETCH_SUB(v, val) nonatomic_32_fetch_sub(&(v), val)
  82. #define BH_ATOMIC_16_LOAD(v) (v)
  83. #define BH_ATOMIC_16_STORE(v) (v) = val
  84. #define BH_ATOMIC_16_FETCH_OR(v, val) nonatomic_16_fetch_or(&(v), val)
  85. #define BH_ATOMIC_16_FETCH_AND(v, val) nonatomic_16_fetch_and(&(v), val)
  86. #define BH_ATOMIC_16_FETCH_ADD(v, val) nonatomic_16_fetch_add(&(v), val)
  87. #define BH_ATOMIC_16_FETCH_SUB(v, val) nonatomic_16_fetch_sub(&(v), val)
  88. static inline uint32
  89. nonatomic_32_fetch_or(bh_atomic_32_t *p, uint32 val)
  90. {
  91. uint32 old = *p;
  92. *p |= val;
  93. return old;
  94. }
  95. static inline uint32
  96. nonatomic_32_fetch_and(bh_atomic_32_t *p, uint32 val)
  97. {
  98. uint32 old = *p;
  99. *p &= val;
  100. return old;
  101. }
  102. static inline uint32
  103. nonatomic_32_fetch_add(bh_atomic_32_t *p, uint32 val)
  104. {
  105. uint32 old = *p;
  106. *p += val;
  107. return old;
  108. }
  109. static inline uint32
  110. nonatomic_32_fetch_sub(bh_atomic_32_t *p, uint32 val)
  111. {
  112. uint32 old = *p;
  113. *p -= val;
  114. return old;
  115. }
  116. static inline uint16
  117. nonatomic_16_fetch_or(bh_atomic_16_t *p, uint16 val)
  118. {
  119. uint16 old = *p;
  120. *p |= val;
  121. return old;
  122. }
  123. static inline uint16
  124. nonatomic_16_fetch_and(bh_atomic_16_t *p, uint16 val)
  125. {
  126. uint16 old = *p;
  127. *p &= val;
  128. return old;
  129. }
  130. static inline uint16
  131. nonatomic_16_fetch_add(bh_atomic_16_t *p, uint16 val)
  132. {
  133. uint16 old = *p;
  134. *p += val;
  135. return old;
  136. }
  137. static inline uint16
  138. nonatomic_16_fetch_sub(bh_atomic_16_t *p, uint16 val)
  139. {
  140. uint16 old = *p;
  141. *p -= val;
  142. return old;
  143. }
  144. /* The flag can be defined by the user if the platform
  145. supports atomic access to uint32 aligned memory. */
  146. #ifdef WASM_UINT32_IS_ATOMIC
  147. #define BH_ATOMIC_32_IS_ATOMIC 1
  148. #else /* else of WASM_UINT32_IS_ATOMIC */
  149. #define BH_ATOMIC_32_IS_ATOMIC 0
  150. #endif /* WASM_UINT32_IS_ATOMIC */
  151. #ifdef WASM_UINT16_IS_ATOMIC
  152. #define BH_ATOMIC_16_IS_ATOMIC 1
  153. #else /* else of WASM_UINT16_IS_ATOMIC */
  154. #define BH_ATOMIC_16_IS_ATOMIC 0
  155. #endif /* WASM_UINT16_IS_ATOMIC */
  156. #endif
  157. #ifdef __cplusplus
  158. }
  159. #endif
  160. #endif /* end of _BH_ATOMIC_H */