|
|
@@ -45,6 +45,7 @@ extern "C" {
|
|
|
*/
|
|
|
|
|
|
typedef uint32 bh_atomic_32_t;
|
|
|
+typedef uint16 bh_atomic_16_t;
|
|
|
|
|
|
#if defined(__GNUC_PREREQ)
|
|
|
#if __GNUC_PREREQ(4, 7)
|
|
|
@@ -59,6 +60,7 @@ typedef uint32 bh_atomic_32_t;
|
|
|
#if defined(CLANG_GCC_HAS_ATOMIC_BUILTIN)
|
|
|
#define BH_ATOMIC_32_IS_ATOMIC 1
|
|
|
#define BH_ATOMIC_32_LOAD(v) __atomic_load_n(&(v), __ATOMIC_SEQ_CST)
|
|
|
+#define BH_ATOMIC_32_STORE(v, val) __atomic_store_n(&(v), val, __ATOMIC_SEQ_CST)
|
|
|
#define BH_ATOMIC_32_FETCH_OR(v, val) \
|
|
|
__atomic_fetch_or(&(v), (val), __ATOMIC_SEQ_CST)
|
|
|
#define BH_ATOMIC_32_FETCH_AND(v, val) \
|
|
|
@@ -67,13 +69,33 @@ typedef uint32 bh_atomic_32_t;
|
|
|
__atomic_fetch_add(&(v), (val), __ATOMIC_SEQ_CST)
|
|
|
#define BH_ATOMIC_32_FETCH_SUB(v, val) \
|
|
|
__atomic_fetch_sub(&(v), (val), __ATOMIC_SEQ_CST)
|
|
|
+
|
|
|
+#define BH_ATOMIC_16_IS_ATOMIC 1
|
|
|
+#define BH_ATOMIC_16_LOAD(v) __atomic_load_n(&(v), __ATOMIC_SEQ_CST)
|
|
|
+#define BH_ATOMIC_16_STORE(v, val) __atomic_store_n(&(v), val, __ATOMIC_SEQ_CST)
|
|
|
+#define BH_ATOMIC_16_FETCH_OR(v, val) \
|
|
|
+ __atomic_fetch_or(&(v), (val), __ATOMIC_SEQ_CST)
|
|
|
+#define BH_ATOMIC_16_FETCH_AND(v, val) \
|
|
|
+ __atomic_fetch_and(&(v), (val), __ATOMIC_SEQ_CST)
|
|
|
+#define BH_ATOMIC_16_FETCH_ADD(v, val) \
|
|
|
+ __atomic_fetch_add(&(v), (val), __ATOMIC_SEQ_CST)
|
|
|
+#define BH_ATOMIC_16_FETCH_SUB(v, val) \
|
|
|
+ __atomic_fetch_sub(&(v), (val), __ATOMIC_SEQ_CST)
|
|
|
#else /* else of defined(CLANG_GCC_HAS_ATOMIC_BUILTIN) */
|
|
|
#define BH_ATOMIC_32_LOAD(v) (v)
|
|
|
+#define BH_ATOMIC_32_STORE(v, val) (v) = val
|
|
|
#define BH_ATOMIC_32_FETCH_OR(v, val) nonatomic_32_fetch_or(&(v), val)
|
|
|
#define BH_ATOMIC_32_FETCH_AND(v, val) nonatomic_32_fetch_and(&(v), val)
|
|
|
#define BH_ATOMIC_32_FETCH_ADD(v, val) nonatomic_32_fetch_add(&(v), val)
|
|
|
#define BH_ATOMIC_32_FETCH_SUB(v, val) nonatomic_32_fetch_sub(&(v), val)
|
|
|
|
|
|
+#define BH_ATOMIC_16_LOAD(v) (v)
|
|
|
+#define BH_ATOMIC_16_STORE(v) (v) = val
|
|
|
+#define BH_ATOMIC_16_FETCH_OR(v, val) nonatomic_16_fetch_or(&(v), val)
|
|
|
+#define BH_ATOMIC_16_FETCH_AND(v, val) nonatomic_16_fetch_and(&(v), val)
|
|
|
+#define BH_ATOMIC_16_FETCH_ADD(v, val) nonatomic_16_fetch_add(&(v), val)
|
|
|
+#define BH_ATOMIC_16_FETCH_SUB(v, val) nonatomic_16_fetch_sub(&(v), val)
|
|
|
+
|
|
|
static inline uint32
|
|
|
nonatomic_32_fetch_or(bh_atomic_32_t *p, uint32 val)
|
|
|
{
|
|
|
@@ -106,6 +128,38 @@ nonatomic_32_fetch_sub(bh_atomic_32_t *p, uint32 val)
|
|
|
return old;
|
|
|
}
|
|
|
|
|
|
+static inline uint16
|
|
|
+nonatomic_16_fetch_or(bh_atomic_16_t *p, uint16 val)
|
|
|
+{
|
|
|
+ uint16 old = *p;
|
|
|
+ *p |= val;
|
|
|
+ return old;
|
|
|
+}
|
|
|
+
|
|
|
+static inline uint16
|
|
|
+nonatomic_16_fetch_and(bh_atomic_16_t *p, uint16 val)
|
|
|
+{
|
|
|
+ uint16 old = *p;
|
|
|
+ *p &= val;
|
|
|
+ return old;
|
|
|
+}
|
|
|
+
|
|
|
+static inline uint16
|
|
|
+nonatomic_16_fetch_add(bh_atomic_16_t *p, uint16 val)
|
|
|
+{
|
|
|
+ uint16 old = *p;
|
|
|
+ *p += val;
|
|
|
+ return old;
|
|
|
+}
|
|
|
+
|
|
|
+static inline uint16
|
|
|
+nonatomic_16_fetch_sub(bh_atomic_16_t *p, uint16 val)
|
|
|
+{
|
|
|
+ uint16 old = *p;
|
|
|
+ *p -= val;
|
|
|
+ return old;
|
|
|
+}
|
|
|
+
|
|
|
/* The flag can be defined by the user if the platform
|
|
|
supports atomic access to uint32 aligned memory. */
|
|
|
#ifdef WASM_UINT32_IS_ATOMIC
|
|
|
@@ -114,6 +168,12 @@ nonatomic_32_fetch_sub(bh_atomic_32_t *p, uint32 val)
|
|
|
#define BH_ATOMIC_32_IS_ATOMIC 0
|
|
|
#endif /* WASM_UINT32_IS_ATOMIC */
|
|
|
|
|
|
+#ifdef WASM_UINT16_IS_ATOMIC
|
|
|
+#define BH_ATOMIC_16_IS_ATOMIC 1
|
|
|
+#else /* else of WASM_UINT16_IS_ATOMIC */
|
|
|
+#define BH_ATOMIC_16_IS_ATOMIC 0
|
|
|
+#endif /* WASM_UINT16_IS_ATOMIC */
|
|
|
+
|
|
|
#endif
|
|
|
|
|
|
#ifdef __cplusplus
|