stdatomic.h 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414
  1. /*-
  2. * Copyright (c) 2011 Ed Schouten <ed@FreeBSD.org>
  3. * David Chisnall <theraven@FreeBSD.org>
  4. * All rights reserved.
  5. *
  6. * Redistribution and use in source and binary forms, with or without
  7. * modification, are permitted provided that the following conditions
  8. * are met:
  9. * 1. Redistributions of source code must retain the above copyright
  10. * notice, this list of conditions and the following disclaimer.
  11. * 2. Redistributions in binary form must reproduce the above copyright
  12. * notice, this list of conditions and the following disclaimer in the
  13. * documentation and/or other materials provided with the distribution.
  14. *
  15. * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
  16. * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
  17. * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
  18. * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
  19. * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
  20. * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
  21. * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
  22. * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
  23. * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
  24. * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
  25. * SUCH DAMAGE.
  26. *
  27. * $FreeBSD$
  28. */
  29. #ifndef _STDATOMIC_H_
  30. #define _STDATOMIC_H_
  31. #include <sys/cdefs.h>
  32. #include <sys/_types.h>
  33. #include <stdint.h>
  34. #if __has_extension(c_atomic) || __has_extension(cxx_atomic)
  35. #define __CLANG_ATOMICS
  36. #elif __GNUC_PREREQ__(4, 7)
  37. #define __GNUC_ATOMICS
  38. #elif defined(__GNUC__)
  39. #define __SYNC_ATOMICS
  40. #else
  41. #error "stdatomic.h does not support your compiler"
  42. #endif
  43. /*
  44. * 7.17.1 Atomic lock-free macros.
  45. */
  46. #ifdef __GCC_ATOMIC_BOOL_LOCK_FREE
  47. #define ATOMIC_BOOL_LOCK_FREE __GCC_ATOMIC_BOOL_LOCK_FREE
  48. #endif
  49. #ifdef __GCC_ATOMIC_CHAR_LOCK_FREE
  50. #define ATOMIC_CHAR_LOCK_FREE __GCC_ATOMIC_CHAR_LOCK_FREE
  51. #endif
  52. #ifdef __GCC_ATOMIC_CHAR16_T_LOCK_FREE
  53. #define ATOMIC_CHAR16_T_LOCK_FREE __GCC_ATOMIC_CHAR16_T_LOCK_FREE
  54. #endif
  55. #ifdef __GCC_ATOMIC_CHAR32_T_LOCK_FREE
  56. #define ATOMIC_CHAR32_T_LOCK_FREE __GCC_ATOMIC_CHAR32_T_LOCK_FREE
  57. #endif
  58. #ifdef __GCC_ATOMIC_WCHAR_T_LOCK_FREE
  59. #define ATOMIC_WCHAR_T_LOCK_FREE __GCC_ATOMIC_WCHAR_T_LOCK_FREE
  60. #endif
  61. #ifdef __GCC_ATOMIC_SHORT_LOCK_FREE
  62. #define ATOMIC_SHORT_LOCK_FREE __GCC_ATOMIC_SHORT_LOCK_FREE
  63. #endif
  64. #ifdef __GCC_ATOMIC_INT_LOCK_FREE
  65. #define ATOMIC_INT_LOCK_FREE __GCC_ATOMIC_INT_LOCK_FREE
  66. #endif
  67. #ifdef __GCC_ATOMIC_LONG_LOCK_FREE
  68. #define ATOMIC_LONG_LOCK_FREE __GCC_ATOMIC_LONG_LOCK_FREE
  69. #endif
  70. #ifdef __GCC_ATOMIC_LLONG_LOCK_FREE
  71. #define ATOMIC_LLONG_LOCK_FREE __GCC_ATOMIC_LLONG_LOCK_FREE
  72. #endif
  73. #ifdef __GCC_ATOMIC_POINTER_LOCK_FREE
  74. #define ATOMIC_POINTER_LOCK_FREE __GCC_ATOMIC_POINTER_LOCK_FREE
  75. #endif
  76. /*
  77. * 7.17.2 Initialization.
  78. */
  79. #if defined(__CLANG_ATOMICS)
  80. #define ATOMIC_VAR_INIT(value) (value)
  81. #define atomic_init(obj, value) __c11_atomic_init(obj, value)
  82. #else
  83. #define ATOMIC_VAR_INIT(value) { .__val = (value) }
  84. #define atomic_init(obj, value) ((void)((obj)->__val = (value)))
  85. #endif
  86. /*
  87. * Clang and recent GCC both provide predefined macros for the memory
  88. * orderings. If we are using a compiler that doesn't define them, use the
  89. * clang values - these will be ignored in the fallback path.
  90. */
  91. #ifndef __ATOMIC_RELAXED
  92. #define __ATOMIC_RELAXED 0
  93. #endif
  94. #ifndef __ATOMIC_CONSUME
  95. #define __ATOMIC_CONSUME 1
  96. #endif
  97. #ifndef __ATOMIC_ACQUIRE
  98. #define __ATOMIC_ACQUIRE 2
  99. #endif
  100. #ifndef __ATOMIC_RELEASE
  101. #define __ATOMIC_RELEASE 3
  102. #endif
  103. #ifndef __ATOMIC_ACQ_REL
  104. #define __ATOMIC_ACQ_REL 4
  105. #endif
  106. #ifndef __ATOMIC_SEQ_CST
  107. #define __ATOMIC_SEQ_CST 5
  108. #endif
  109. /*
  110. * 7.17.3 Order and consistency.
  111. *
  112. * The memory_order_* constants that denote the barrier behaviour of the
  113. * atomic operations.
  114. */
  115. typedef enum {
  116. memory_order_relaxed = __ATOMIC_RELAXED,
  117. memory_order_consume = __ATOMIC_CONSUME,
  118. memory_order_acquire = __ATOMIC_ACQUIRE,
  119. memory_order_release = __ATOMIC_RELEASE,
  120. memory_order_acq_rel = __ATOMIC_ACQ_REL,
  121. memory_order_seq_cst = __ATOMIC_SEQ_CST
  122. } memory_order;
  123. /*
  124. * 7.17.4 Fences.
  125. */
  126. static __inline void
  127. atomic_thread_fence(memory_order __order __unused)
  128. {
  129. #ifdef __CLANG_ATOMICS
  130. __c11_atomic_thread_fence(__order);
  131. #elif defined(__GNUC_ATOMICS)
  132. __atomic_thread_fence(__order);
  133. #else
  134. __sync_synchronize();
  135. #endif
  136. }
  137. static __inline void
  138. atomic_signal_fence(memory_order __order __unused)
  139. {
  140. #ifdef __CLANG_ATOMICS
  141. __c11_atomic_signal_fence(__order);
  142. #elif defined(__GNUC_ATOMICS)
  143. __atomic_signal_fence(__order);
  144. #else
  145. __asm volatile ("" ::: "memory");
  146. #endif
  147. }
  148. /*
  149. * 7.17.5 Lock-free property.
  150. */
  151. #if defined(_KERNEL)
  152. /* Atomics in kernelspace are always lock-free. */
  153. #define atomic_is_lock_free(obj) \
  154. ((void)(obj), (_Bool)1)
  155. #elif defined(__CLANG_ATOMICS)
  156. #define atomic_is_lock_free(obj) \
  157. __atomic_is_lock_free(sizeof(*(obj)), obj)
  158. #elif defined(__GNUC_ATOMICS)
  159. #define atomic_is_lock_free(obj) \
  160. __atomic_is_lock_free(sizeof((obj)->__val), &(obj)->__val)
  161. #else
  162. #define atomic_is_lock_free(obj) \
  163. ((void)(obj), sizeof((obj)->__val) <= sizeof(void *))
  164. #endif
  165. /*
  166. * 7.17.6 Atomic integer types.
  167. */
  168. typedef _Atomic(_Bool) atomic_bool;
  169. typedef _Atomic(char) atomic_char;
  170. typedef _Atomic(signed char) atomic_schar;
  171. typedef _Atomic(unsigned char) atomic_uchar;
  172. typedef _Atomic(short) atomic_short;
  173. typedef _Atomic(unsigned short) atomic_ushort;
  174. typedef _Atomic(int) atomic_int;
  175. typedef _Atomic(unsigned int) atomic_uint;
  176. typedef _Atomic(long) atomic_long;
  177. typedef _Atomic(unsigned long) atomic_ulong;
  178. typedef _Atomic(long long) atomic_llong;
  179. typedef _Atomic(unsigned long long) atomic_ullong;
  180. #if 0
  181. typedef _Atomic(__char16_t) atomic_char16_t;
  182. typedef _Atomic(__char32_t) atomic_char32_t;
  183. #endif
  184. typedef _Atomic(wchar_t) atomic_wchar_t;
  185. typedef _Atomic(int_least8_t) atomic_int_least8_t;
  186. typedef _Atomic(uint_least8_t) atomic_uint_least8_t;
  187. typedef _Atomic(int_least16_t) atomic_int_least16_t;
  188. typedef _Atomic(uint_least16_t) atomic_uint_least16_t;
  189. typedef _Atomic(int_least32_t) atomic_int_least32_t;
  190. typedef _Atomic(uint_least32_t) atomic_uint_least32_t;
  191. typedef _Atomic(int_least64_t) atomic_int_least64_t;
  192. typedef _Atomic(uint_least64_t) atomic_uint_least64_t;
  193. typedef _Atomic(int_fast8_t) atomic_int_fast8_t;
  194. typedef _Atomic(uint_fast8_t) atomic_uint_fast8_t;
  195. typedef _Atomic(int_fast16_t) atomic_int_fast16_t;
  196. typedef _Atomic(uint_fast16_t) atomic_uint_fast16_t;
  197. typedef _Atomic(int_fast32_t) atomic_int_fast32_t;
  198. typedef _Atomic(uint_fast32_t) atomic_uint_fast32_t;
  199. typedef _Atomic(int_fast64_t) atomic_int_fast64_t;
  200. typedef _Atomic(uint_fast64_t) atomic_uint_fast64_t;
  201. typedef _Atomic(intptr_t) atomic_intptr_t;
  202. typedef _Atomic(uintptr_t) atomic_uintptr_t;
  203. typedef _Atomic(size_t) atomic_size_t;
  204. typedef _Atomic(ptrdiff_t) atomic_ptrdiff_t;
  205. typedef _Atomic(intmax_t) atomic_intmax_t;
  206. typedef _Atomic(uintmax_t) atomic_uintmax_t;
  207. /*
  208. * 7.17.7 Operations on atomic types.
  209. */
  210. /*
  211. * Compiler-specific operations.
  212. */
  213. #if defined(__CLANG_ATOMICS)
  214. #define atomic_compare_exchange_strong_explicit(object, expected, \
  215. desired, success, failure) \
  216. __c11_atomic_compare_exchange_strong(object, expected, desired, \
  217. success, failure)
  218. #define atomic_compare_exchange_weak_explicit(object, expected, \
  219. desired, success, failure) \
  220. __c11_atomic_compare_exchange_weak(object, expected, desired, \
  221. success, failure)
  222. #define atomic_exchange_explicit(object, desired, order) \
  223. __c11_atomic_exchange(object, desired, order)
  224. #define atomic_fetch_add_explicit(object, operand, order) \
  225. __c11_atomic_fetch_add(object, operand, order)
  226. #define atomic_fetch_and_explicit(object, operand, order) \
  227. __c11_atomic_fetch_and(object, operand, order)
  228. #define atomic_fetch_or_explicit(object, operand, order) \
  229. __c11_atomic_fetch_or(object, operand, order)
  230. #define atomic_fetch_sub_explicit(object, operand, order) \
  231. __c11_atomic_fetch_sub(object, operand, order)
  232. #define atomic_fetch_xor_explicit(object, operand, order) \
  233. __c11_atomic_fetch_xor(object, operand, order)
  234. #define atomic_load_explicit(object, order) \
  235. __c11_atomic_load(object, order)
  236. #define atomic_store_explicit(object, desired, order) \
  237. __c11_atomic_store(object, desired, order)
  238. #elif defined(__GNUC_ATOMICS)
  239. #define atomic_compare_exchange_strong_explicit(object, expected, \
  240. desired, success, failure) \
  241. __atomic_compare_exchange_n(&(object)->__val, expected, \
  242. desired, 0, success, failure)
  243. #define atomic_compare_exchange_weak_explicit(object, expected, \
  244. desired, success, failure) \
  245. __atomic_compare_exchange_n(&(object)->__val, expected, \
  246. desired, 1, success, failure)
  247. #define atomic_exchange_explicit(object, desired, order) \
  248. __atomic_exchange_n(&(object)->__val, desired, order)
  249. #define atomic_fetch_add_explicit(object, operand, order) \
  250. __atomic_fetch_add(&(object)->__val, operand, order)
  251. #define atomic_fetch_and_explicit(object, operand, order) \
  252. __atomic_fetch_and(&(object)->__val, operand, order)
  253. #define atomic_fetch_or_explicit(object, operand, order) \
  254. __atomic_fetch_or(&(object)->__val, operand, order)
  255. #define atomic_fetch_sub_explicit(object, operand, order) \
  256. __atomic_fetch_sub(&(object)->__val, operand, order)
  257. #define atomic_fetch_xor_explicit(object, operand, order) \
  258. __atomic_fetch_xor(&(object)->__val, operand, order)
  259. #define atomic_load_explicit(object, order) \
  260. __atomic_load_n(&(object)->__val, order)
  261. #define atomic_store_explicit(object, desired, order) \
  262. __atomic_store_n(&(object)->__val, desired, order)
  263. #else
  264. #define __atomic_apply_stride(object, operand) \
  265. (((__typeof__((object)->__val))0) + (operand))
  266. #define atomic_compare_exchange_strong_explicit(object, expected, \
  267. desired, success, failure) __extension__ ({ \
  268. __typeof__(expected) __ep = (expected); \
  269. __typeof__(*__ep) __e = *__ep; \
  270. (void)(success); (void)(failure); \
  271. (_Bool)((*__ep = __sync_val_compare_and_swap(&(object)->__val, \
  272. __e, desired)) == __e); \
  273. })
  274. #define atomic_compare_exchange_weak_explicit(object, expected, \
  275. desired, success, failure) \
  276. atomic_compare_exchange_strong_explicit(object, expected, \
  277. desired, success, failure)
  278. #if __has_builtin(__sync_swap)
  279. /* Clang provides a full-barrier atomic exchange - use it if available. */
  280. #define atomic_exchange_explicit(object, desired, order) \
  281. ((void)(order), __sync_swap(&(object)->__val, desired))
  282. #else
  283. /*
  284. * __sync_lock_test_and_set() is only an acquire barrier in theory (although in
  285. * practice it is usually a full barrier) so we need an explicit barrier before
  286. * it.
  287. */
  288. #define atomic_exchange_explicit(object, desired, order) \
  289. __extension__ ({ \
  290. __typeof__(object) __o = (object); \
  291. __typeof__(desired) __d = (desired); \
  292. (void)(order); \
  293. __sync_synchronize(); \
  294. __sync_lock_test_and_set(&(__o)->__val, __d); \
  295. })
  296. #endif
  297. #define atomic_fetch_add_explicit(object, operand, order) \
  298. ((void)(order), __sync_fetch_and_add(&(object)->__val, \
  299. __atomic_apply_stride(object, operand)))
  300. #define atomic_fetch_and_explicit(object, operand, order) \
  301. ((void)(order), __sync_fetch_and_and(&(object)->__val, operand))
  302. #define atomic_fetch_or_explicit(object, operand, order) \
  303. ((void)(order), __sync_fetch_and_or(&(object)->__val, operand))
  304. #define atomic_fetch_sub_explicit(object, operand, order) \
  305. ((void)(order), __sync_fetch_and_sub(&(object)->__val, \
  306. __atomic_apply_stride(object, operand)))
  307. #define atomic_fetch_xor_explicit(object, operand, order) \
  308. ((void)(order), __sync_fetch_and_xor(&(object)->__val, operand))
  309. #define atomic_load_explicit(object, order) \
  310. ((void)(order), __sync_fetch_and_add(&(object)->__val, 0))
  311. #define atomic_store_explicit(object, desired, order) \
  312. ((void)atomic_exchange_explicit(object, desired, order))
  313. #endif
  314. /*
  315. * Convenience functions.
  316. *
  317. * Don't provide these in kernel space. In kernel space, we should be
  318. * disciplined enough to always provide explicit barriers.
  319. */
  320. #ifndef _KERNEL
  321. #define atomic_compare_exchange_strong(object, expected, desired) \
  322. atomic_compare_exchange_strong_explicit(object, expected, \
  323. desired, memory_order_seq_cst, memory_order_seq_cst)
  324. #define atomic_compare_exchange_weak(object, expected, desired) \
  325. atomic_compare_exchange_weak_explicit(object, expected, \
  326. desired, memory_order_seq_cst, memory_order_seq_cst)
  327. #define atomic_exchange(object, desired) \
  328. atomic_exchange_explicit(object, desired, memory_order_seq_cst)
  329. #define atomic_fetch_add(object, operand) \
  330. atomic_fetch_add_explicit(object, operand, memory_order_seq_cst)
  331. #define atomic_fetch_and(object, operand) \
  332. atomic_fetch_and_explicit(object, operand, memory_order_seq_cst)
  333. #define atomic_fetch_or(object, operand) \
  334. atomic_fetch_or_explicit(object, operand, memory_order_seq_cst)
  335. #define atomic_fetch_sub(object, operand) \
  336. atomic_fetch_sub_explicit(object, operand, memory_order_seq_cst)
  337. #define atomic_fetch_xor(object, operand) \
  338. atomic_fetch_xor_explicit(object, operand, memory_order_seq_cst)
  339. #define atomic_load(object) \
  340. atomic_load_explicit(object, memory_order_seq_cst)
  341. #define atomic_store(object, desired) \
  342. atomic_store_explicit(object, desired, memory_order_seq_cst)
  343. #endif /* !_KERNEL */
  344. /*
  345. * 7.17.8 Atomic flag type and operations.
  346. *
  347. * XXX: Assume atomic_bool can be used as an atomic_flag. Is there some
  348. * kind of compiler built-in type we could use?
  349. */
  350. typedef struct {
  351. atomic_bool __flag;
  352. } atomic_flag;
  353. #define ATOMIC_FLAG_INIT { ATOMIC_VAR_INIT(0) }
  354. static __inline _Bool
  355. atomic_flag_test_and_set_explicit(volatile atomic_flag *__object,
  356. memory_order __order)
  357. {
  358. return (atomic_exchange_explicit(&__object->__flag, 1, __order));
  359. }
  360. static __inline void
  361. atomic_flag_clear_explicit(volatile atomic_flag *__object, memory_order __order)
  362. {
  363. atomic_store_explicit(&__object->__flag, 0, __order);
  364. }
  365. #ifndef _KERNEL
  366. static __inline _Bool
  367. atomic_flag_test_and_set(volatile atomic_flag *__object)
  368. {
  369. return (atomic_flag_test_and_set_explicit(__object,
  370. memory_order_seq_cst));
  371. }
  372. static __inline void
  373. atomic_flag_clear(volatile atomic_flag *__object)
  374. {
  375. atomic_flag_clear_explicit(__object, memory_order_seq_cst);
  376. }
  377. #endif /* !_KERNEL */
  378. #endif /* !_STDATOMIC_H_ */