stdatomic.h 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409
  1. /*-
  2. * Copyright (c) 2011 Ed Schouten <ed@FreeBSD.org>
  3. * David Chisnall <theraven@FreeBSD.org>
  4. * All rights reserved.
  5. *
  6. * Redistribution and use in source and binary forms, with or without
  7. * modification, are permitted provided that the following conditions
  8. * are met:
  9. * 1. Redistributions of source code must retain the above copyright
  10. * notice, this list of conditions and the following disclaimer.
  11. * 2. Redistributions in binary form must reproduce the above copyright
  12. * notice, this list of conditions and the following disclaimer in the
  13. * documentation and/or other materials provided with the distribution.
  14. *
  15. * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
  16. * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
  17. * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
  18. * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
  19. * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
  20. * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
  21. * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
  22. * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
  23. * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
  24. * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
  25. * SUCH DAMAGE.
  26. *
  27. * $FreeBSD$
  28. */
  29. #ifndef _STDATOMIC_H_
  30. #define _STDATOMIC_H_
  31. #include <sys/cdefs.h>
  32. #include <sys/_types.h>
  33. #if __has_extension(c_atomic) || __has_extension(cxx_atomic)
  34. #define __CLANG_ATOMICS
  35. #elif __GNUC_PREREQ__(4, 7)
  36. #define __GNUC_ATOMICS
  37. #elif defined(__GNUC__)
  38. #define __SYNC_ATOMICS
  39. #else
  40. #error "stdatomic.h does not support your compiler"
  41. #endif
  42. /*
  43. * 7.17.1 Atomic lock-free macros.
  44. */
  45. #ifdef __GCC_ATOMIC_BOOL_LOCK_FREE
  46. #define ATOMIC_BOOL_LOCK_FREE __GCC_ATOMIC_BOOL_LOCK_FREE
  47. #endif
  48. #ifdef __GCC_ATOMIC_CHAR_LOCK_FREE
  49. #define ATOMIC_CHAR_LOCK_FREE __GCC_ATOMIC_CHAR_LOCK_FREE
  50. #endif
  51. #ifdef __GCC_ATOMIC_CHAR16_T_LOCK_FREE
  52. #define ATOMIC_CHAR16_T_LOCK_FREE __GCC_ATOMIC_CHAR16_T_LOCK_FREE
  53. #endif
  54. #ifdef __GCC_ATOMIC_CHAR32_T_LOCK_FREE
  55. #define ATOMIC_CHAR32_T_LOCK_FREE __GCC_ATOMIC_CHAR32_T_LOCK_FREE
  56. #endif
  57. #ifdef __GCC_ATOMIC_WCHAR_T_LOCK_FREE
  58. #define ATOMIC_WCHAR_T_LOCK_FREE __GCC_ATOMIC_WCHAR_T_LOCK_FREE
  59. #endif
  60. #ifdef __GCC_ATOMIC_SHORT_LOCK_FREE
  61. #define ATOMIC_SHORT_LOCK_FREE __GCC_ATOMIC_SHORT_LOCK_FREE
  62. #endif
  63. #ifdef __GCC_ATOMIC_INT_LOCK_FREE
  64. #define ATOMIC_INT_LOCK_FREE __GCC_ATOMIC_INT_LOCK_FREE
  65. #endif
  66. #ifdef __GCC_ATOMIC_LONG_LOCK_FREE
  67. #define ATOMIC_LONG_LOCK_FREE __GCC_ATOMIC_LONG_LOCK_FREE
  68. #endif
  69. #ifdef __GCC_ATOMIC_LLONG_LOCK_FREE
  70. #define ATOMIC_LLONG_LOCK_FREE __GCC_ATOMIC_LLONG_LOCK_FREE
  71. #endif
  72. #ifdef __GCC_ATOMIC_POINTER_LOCK_FREE
  73. #define ATOMIC_POINTER_LOCK_FREE __GCC_ATOMIC_POINTER_LOCK_FREE
  74. #endif
  75. /*
  76. * 7.17.2 Initialization.
  77. */
  78. #if defined(__CLANG_ATOMICS)
  79. #define ATOMIC_VAR_INIT(value) (value)
  80. #define atomic_init(obj, value) __c11_atomic_init(obj, value)
  81. #else
  82. #define ATOMIC_VAR_INIT(value) { .__val = (value) }
  83. #define atomic_init(obj, value) ((void)((obj)->__val = (value)))
  84. #endif
  85. /*
  86. * Clang and recent GCC both provide predefined macros for the memory
  87. * orderings. If we are using a compiler that doesn't define them, use the
  88. * clang values - these will be ignored in the fallback path.
  89. */
  90. #ifndef __ATOMIC_RELAXED
  91. #define __ATOMIC_RELAXED 0
  92. #endif
  93. #ifndef __ATOMIC_CONSUME
  94. #define __ATOMIC_CONSUME 1
  95. #endif
  96. #ifndef __ATOMIC_ACQUIRE
  97. #define __ATOMIC_ACQUIRE 2
  98. #endif
  99. #ifndef __ATOMIC_RELEASE
  100. #define __ATOMIC_RELEASE 3
  101. #endif
  102. #ifndef __ATOMIC_ACQ_REL
  103. #define __ATOMIC_ACQ_REL 4
  104. #endif
  105. #ifndef __ATOMIC_SEQ_CST
  106. #define __ATOMIC_SEQ_CST 5
  107. #endif
  108. /*
  109. * 7.17.3 Order and consistency.
  110. *
  111. * The memory_order_* constants that denote the barrier behaviour of the
  112. * atomic operations.
  113. */
  114. typedef enum {
  115. memory_order_relaxed = __ATOMIC_RELAXED,
  116. memory_order_consume = __ATOMIC_CONSUME,
  117. memory_order_acquire = __ATOMIC_ACQUIRE,
  118. memory_order_release = __ATOMIC_RELEASE,
  119. memory_order_acq_rel = __ATOMIC_ACQ_REL,
  120. memory_order_seq_cst = __ATOMIC_SEQ_CST
  121. } memory_order;
  122. /*
  123. * 7.17.4 Fences.
  124. */
  125. static __inline void
  126. atomic_thread_fence(memory_order __order __unused)
  127. {
  128. #ifdef __CLANG_ATOMICS
  129. __c11_atomic_thread_fence(__order);
  130. #elif defined(__GNUC_ATOMICS)
  131. __atomic_thread_fence(__order);
  132. #else
  133. __sync_synchronize();
  134. #endif
  135. }
  136. static __inline void
  137. atomic_signal_fence(memory_order __order __unused)
  138. {
  139. #ifdef __CLANG_ATOMICS
  140. __c11_atomic_signal_fence(__order);
  141. #elif defined(__GNUC_ATOMICS)
  142. __atomic_signal_fence(__order);
  143. #else
  144. __asm volatile ("" ::: "memory");
  145. #endif
  146. }
  147. /*
  148. * 7.17.5 Lock-free property.
  149. */
  150. #if defined(_KERNEL)
  151. /* Atomics in kernelspace are always lock-free. */
  152. #define atomic_is_lock_free(obj) \
  153. ((void)(obj), (_Bool)1)
  154. #elif defined(__CLANG_ATOMICS) || defined(__GNUC_ATOMICS)
  155. #define atomic_is_lock_free(obj) \
  156. __atomic_is_lock_free(sizeof(*(obj)), obj)
  157. #else
  158. #define atomic_is_lock_free(obj) \
  159. ((void)(obj), sizeof((obj)->__val) <= sizeof(void *))
  160. #endif
  161. /*
  162. * 7.17.6 Atomic integer types.
  163. */
  164. typedef _Atomic(_Bool) atomic_bool;
  165. typedef _Atomic(char) atomic_char;
  166. typedef _Atomic(signed char) atomic_schar;
  167. typedef _Atomic(unsigned char) atomic_uchar;
  168. typedef _Atomic(short) atomic_short;
  169. typedef _Atomic(unsigned short) atomic_ushort;
  170. typedef _Atomic(int) atomic_int;
  171. typedef _Atomic(unsigned int) atomic_uint;
  172. typedef _Atomic(long) atomic_long;
  173. typedef _Atomic(unsigned long) atomic_ulong;
  174. typedef _Atomic(long long) atomic_llong;
  175. typedef _Atomic(unsigned long long) atomic_ullong;
  176. #if 0
  177. typedef _Atomic(__char16_t) atomic_char16_t;
  178. typedef _Atomic(__char32_t) atomic_char32_t;
  179. #endif
  180. typedef _Atomic(wchar_t) atomic_wchar_t;
  181. typedef _Atomic(int_least8_t) atomic_int_least8_t;
  182. typedef _Atomic(uint_least8_t) atomic_uint_least8_t;
  183. typedef _Atomic(int_least16_t) atomic_int_least16_t;
  184. typedef _Atomic(uint_least16_t) atomic_uint_least16_t;
  185. typedef _Atomic(int_least32_t) atomic_int_least32_t;
  186. typedef _Atomic(uint_least32_t) atomic_uint_least32_t;
  187. typedef _Atomic(int_least64_t) atomic_int_least64_t;
  188. typedef _Atomic(uint_least64_t) atomic_uint_least64_t;
  189. typedef _Atomic(int_fast8_t) atomic_int_fast8_t;
  190. typedef _Atomic(uint_fast8_t) atomic_uint_fast8_t;
  191. typedef _Atomic(int_fast16_t) atomic_int_fast16_t;
  192. typedef _Atomic(uint_fast16_t) atomic_uint_fast16_t;
  193. typedef _Atomic(int_fast32_t) atomic_int_fast32_t;
  194. typedef _Atomic(uint_fast32_t) atomic_uint_fast32_t;
  195. typedef _Atomic(int_fast64_t) atomic_int_fast64_t;
  196. typedef _Atomic(uint_fast64_t) atomic_uint_fast64_t;
  197. typedef _Atomic(intptr_t) atomic_intptr_t;
  198. typedef _Atomic(uintptr_t) atomic_uintptr_t;
  199. typedef _Atomic(size_t) atomic_size_t;
  200. typedef _Atomic(ptrdiff_t) atomic_ptrdiff_t;
  201. typedef _Atomic(intmax_t) atomic_intmax_t;
  202. typedef _Atomic(uintmax_t) atomic_uintmax_t;
  203. /*
  204. * 7.17.7 Operations on atomic types.
  205. */
  206. /*
  207. * Compiler-specific operations.
  208. */
  209. #if defined(__CLANG_ATOMICS)
  210. #define atomic_compare_exchange_strong_explicit(object, expected, \
  211. desired, success, failure) \
  212. __c11_atomic_compare_exchange_strong(object, expected, desired, \
  213. success, failure)
  214. #define atomic_compare_exchange_weak_explicit(object, expected, \
  215. desired, success, failure) \
  216. __c11_atomic_compare_exchange_weak(object, expected, desired, \
  217. success, failure)
  218. #define atomic_exchange_explicit(object, desired, order) \
  219. __c11_atomic_exchange(object, desired, order)
  220. #define atomic_fetch_add_explicit(object, operand, order) \
  221. __c11_atomic_fetch_add(object, operand, order)
  222. #define atomic_fetch_and_explicit(object, operand, order) \
  223. __c11_atomic_fetch_and(object, operand, order)
  224. #define atomic_fetch_or_explicit(object, operand, order) \
  225. __c11_atomic_fetch_or(object, operand, order)
  226. #define atomic_fetch_sub_explicit(object, operand, order) \
  227. __c11_atomic_fetch_sub(object, operand, order)
  228. #define atomic_fetch_xor_explicit(object, operand, order) \
  229. __c11_atomic_fetch_xor(object, operand, order)
  230. #define atomic_load_explicit(object, order) \
  231. __c11_atomic_load(object, order)
  232. #define atomic_store_explicit(object, desired, order) \
  233. __c11_atomic_store(object, desired, order)
  234. #elif defined(__GNUC_ATOMICS)
  235. #define atomic_compare_exchange_strong_explicit(object, expected, \
  236. desired, success, failure) \
  237. __atomic_compare_exchange_n(object, expected, \
  238. desired, 0, success, failure)
  239. #define atomic_compare_exchange_weak_explicit(object, expected, \
  240. desired, success, failure) \
  241. __atomic_compare_exchange_n(object, expected, \
  242. desired, 1, success, failure)
  243. #define atomic_exchange_explicit(object, desired, order) \
  244. __atomic_exchange_n(object, desired, order)
  245. #define atomic_fetch_add_explicit(object, operand, order) \
  246. __atomic_fetch_add(object, operand, order)
  247. #define atomic_fetch_and_explicit(object, operand, order) \
  248. __atomic_fetch_and(object, operand, order)
  249. #define atomic_fetch_or_explicit(object, operand, order) \
  250. __atomic_fetch_or(object, operand, order)
  251. #define atomic_fetch_sub_explicit(object, operand, order) \
  252. __atomic_fetch_sub(object, operand, order)
  253. #define atomic_fetch_xor_explicit(object, operand, order) \
  254. __atomic_fetch_xor(object, operand, order)
  255. #define atomic_load_explicit(object, order) \
  256. __atomic_load_n(object, order)
  257. #define atomic_store_explicit(object, desired, order) \
  258. __atomic_store_n(object, desired, order)
  259. #else
  260. #define __atomic_apply_stride(object, operand) \
  261. (((__typeof__((object)->__val))0) + (operand))
  262. #define atomic_compare_exchange_strong_explicit(object, expected, \
  263. desired, success, failure) __extension__ ({ \
  264. __typeof__(expected) __ep = (expected); \
  265. __typeof__(*__ep) __e = *__ep; \
  266. (void)(success); (void)(failure); \
  267. (_Bool)((*__ep = __sync_val_compare_and_swap(&(object)->__val, \
  268. __e, desired)) == __e); \
  269. })
  270. #define atomic_compare_exchange_weak_explicit(object, expected, \
  271. desired, success, failure) \
  272. atomic_compare_exchange_strong_explicit(object, expected, \
  273. desired, success, failure)
  274. #if __has_builtin(__sync_swap)
  275. /* Clang provides a full-barrier atomic exchange - use it if available. */
  276. #define atomic_exchange_explicit(object, desired, order) \
  277. ((void)(order), __sync_swap(&(object)->__val, desired))
  278. #else
  279. /*
  280. * __sync_lock_test_and_set() is only an acquire barrier in theory (although in
  281. * practice it is usually a full barrier) so we need an explicit barrier before
  282. * it.
  283. */
  284. #define atomic_exchange_explicit(object, desired, order) \
  285. __extension__ ({ \
  286. __typeof__(object) __o = (object); \
  287. __typeof__(desired) __d = (desired); \
  288. (void)(order); \
  289. __sync_synchronize(); \
  290. __sync_lock_test_and_set(&(__o)->__val, __d); \
  291. })
  292. #endif
  293. #define atomic_fetch_add_explicit(object, operand, order) \
  294. ((void)(order), __sync_fetch_and_add(&(object)->__val, \
  295. __atomic_apply_stride(object, operand)))
  296. #define atomic_fetch_and_explicit(object, operand, order) \
  297. ((void)(order), __sync_fetch_and_and(&(object)->__val, operand))
  298. #define atomic_fetch_or_explicit(object, operand, order) \
  299. ((void)(order), __sync_fetch_and_or(&(object)->__val, operand))
  300. #define atomic_fetch_sub_explicit(object, operand, order) \
  301. ((void)(order), __sync_fetch_and_sub(&(object)->__val, \
  302. __atomic_apply_stride(object, operand)))
  303. #define atomic_fetch_xor_explicit(object, operand, order) \
  304. ((void)(order), __sync_fetch_and_xor(&(object)->__val, operand))
  305. #define atomic_load_explicit(object, order) \
  306. ((void)(order), __sync_fetch_and_add(&(object)->__val, 0))
  307. #define atomic_store_explicit(object, desired, order) \
  308. ((void)atomic_exchange_explicit(object, desired, order))
  309. #endif
  310. /*
  311. * Convenience functions.
  312. *
  313. * Don't provide these in kernel space. In kernel space, we should be
  314. * disciplined enough to always provide explicit barriers.
  315. */
  316. #ifndef _KERNEL
  317. #define atomic_compare_exchange_strong(object, expected, desired) \
  318. atomic_compare_exchange_strong_explicit(object, expected, \
  319. desired, memory_order_seq_cst, memory_order_seq_cst)
  320. #define atomic_compare_exchange_weak(object, expected, desired) \
  321. atomic_compare_exchange_weak_explicit(object, expected, \
  322. desired, memory_order_seq_cst, memory_order_seq_cst)
  323. #define atomic_exchange(object, desired) \
  324. atomic_exchange_explicit(object, desired, memory_order_seq_cst)
  325. #define atomic_fetch_add(object, operand) \
  326. atomic_fetch_add_explicit(object, operand, memory_order_seq_cst)
  327. #define atomic_fetch_and(object, operand) \
  328. atomic_fetch_and_explicit(object, operand, memory_order_seq_cst)
  329. #define atomic_fetch_or(object, operand) \
  330. atomic_fetch_or_explicit(object, operand, memory_order_seq_cst)
  331. #define atomic_fetch_sub(object, operand) \
  332. atomic_fetch_sub_explicit(object, operand, memory_order_seq_cst)
  333. #define atomic_fetch_xor(object, operand) \
  334. atomic_fetch_xor_explicit(object, operand, memory_order_seq_cst)
  335. #define atomic_load(object) \
  336. atomic_load_explicit(object, memory_order_seq_cst)
  337. #define atomic_store(object, desired) \
  338. atomic_store_explicit(object, desired, memory_order_seq_cst)
  339. #endif /* !_KERNEL */
  340. /*
  341. * 7.17.8 Atomic flag type and operations.
  342. *
  343. * XXX: Assume atomic_bool can be used as an atomic_flag. Is there some
  344. * kind of compiler built-in type we could use?
  345. */
  346. typedef struct {
  347. atomic_bool __flag;
  348. } atomic_flag;
  349. #define ATOMIC_FLAG_INIT { ATOMIC_VAR_INIT(0) }
  350. static __inline _Bool
  351. atomic_flag_test_and_set_explicit(volatile atomic_flag *__object,
  352. memory_order __order)
  353. {
  354. return (atomic_exchange_explicit(&__object->__flag, 1, __order));
  355. }
  356. static __inline void
  357. atomic_flag_clear_explicit(volatile atomic_flag *__object, memory_order __order)
  358. {
  359. atomic_store_explicit(&__object->__flag, 0, __order);
  360. }
  361. #ifndef _KERNEL
  362. static __inline _Bool
  363. atomic_flag_test_and_set(volatile atomic_flag *__object)
  364. {
  365. return (atomic_flag_test_and_set_explicit(__object,
  366. memory_order_seq_cst));
  367. }
  368. static __inline void
  369. atomic_flag_clear(volatile atomic_flag *__object)
  370. {
  371. atomic_flag_clear_explicit(__object, memory_order_seq_cst);
  372. }
  373. #endif /* !_KERNEL */
  374. #endif /* !_STDATOMIC_H_ */