atomic 50 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795
  1. // -*- C++ -*- header.
  2. // Copyright (C) 2008-2023 Free Software Foundation, Inc.
  3. //
  4. // This file is part of the GNU ISO C++ Library. This library is free
  5. // software; you can redistribute it and/or modify it under the
  6. // terms of the GNU General Public License as published by the
  7. // Free Software Foundation; either version 3, or (at your option)
  8. // any later version.
  9. // This library is distributed in the hope that it will be useful,
  10. // but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  12. // GNU General Public License for more details.
  13. // Under Section 7 of GPL version 3, you are granted additional
  14. // permissions described in the GCC Runtime Library Exception, version
  15. // 3.1, as published by the Free Software Foundation.
  16. // You should have received a copy of the GNU General Public License and
  17. // a copy of the GCC Runtime Library Exception along with this program;
  18. // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
  19. // <http://www.gnu.org/licenses/>.
  20. /** @file include/atomic
  21. * This is a Standard C++ Library header.
  22. */
  23. // Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
  24. // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
  25. #ifndef _GLIBCXX_ATOMIC
  26. #define _GLIBCXX_ATOMIC 1
  27. #pragma GCC system_header
  28. #if __cplusplus < 201103L
  29. # include <bits/c++0x_warning.h>
  30. #else
  31. #include <bits/atomic_base.h>
  32. namespace std _GLIBCXX_VISIBILITY(default)
  33. {
  34. _GLIBCXX_BEGIN_NAMESPACE_VERSION
  35. /**
  36. * @addtogroup atomics
  37. * @{
  38. */
  39. #if __cplusplus >= 201703L
  40. # define __cpp_lib_atomic_is_always_lock_free 201603L
  41. #endif
  42. template<typename _Tp>
  43. struct atomic;
  44. /// atomic<bool>
  45. // NB: No operators or fetch-operations for this type.
  46. template<>
  47. struct atomic<bool>
  48. {
  49. using value_type = bool;
  50. private:
  51. __atomic_base<bool> _M_base;
  52. public:
  53. atomic() noexcept = default;
  54. ~atomic() noexcept = default;
  55. atomic(const atomic&) = delete;
  56. atomic& operator=(const atomic&) = delete;
  57. atomic& operator=(const atomic&) volatile = delete;
  58. constexpr atomic(bool __i) noexcept : _M_base(__i) { }
  59. bool
  60. operator=(bool __i) noexcept
  61. { return _M_base.operator=(__i); }
  62. bool
  63. operator=(bool __i) volatile noexcept
  64. { return _M_base.operator=(__i); }
  65. operator bool() const noexcept
  66. { return _M_base.load(); }
  67. operator bool() const volatile noexcept
  68. { return _M_base.load(); }
  69. bool
  70. is_lock_free() const noexcept { return _M_base.is_lock_free(); }
  71. bool
  72. is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); }
  73. #if __cplusplus >= 201703L
  74. static constexpr bool is_always_lock_free = ATOMIC_BOOL_LOCK_FREE == 2;
  75. #endif
  76. void
  77. store(bool __i, memory_order __m = memory_order_seq_cst) noexcept
  78. { _M_base.store(__i, __m); }
  79. void
  80. store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept
  81. { _M_base.store(__i, __m); }
  82. bool
  83. load(memory_order __m = memory_order_seq_cst) const noexcept
  84. { return _M_base.load(__m); }
  85. bool
  86. load(memory_order __m = memory_order_seq_cst) const volatile noexcept
  87. { return _M_base.load(__m); }
  88. bool
  89. exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept
  90. { return _M_base.exchange(__i, __m); }
  91. bool
  92. exchange(bool __i,
  93. memory_order __m = memory_order_seq_cst) volatile noexcept
  94. { return _M_base.exchange(__i, __m); }
  95. bool
  96. compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
  97. memory_order __m2) noexcept
  98. { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
  99. bool
  100. compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
  101. memory_order __m2) volatile noexcept
  102. { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
  103. bool
  104. compare_exchange_weak(bool& __i1, bool __i2,
  105. memory_order __m = memory_order_seq_cst) noexcept
  106. { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
  107. bool
  108. compare_exchange_weak(bool& __i1, bool __i2,
  109. memory_order __m = memory_order_seq_cst) volatile noexcept
  110. { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
  111. bool
  112. compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
  113. memory_order __m2) noexcept
  114. { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
  115. bool
  116. compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
  117. memory_order __m2) volatile noexcept
  118. { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
  119. bool
  120. compare_exchange_strong(bool& __i1, bool __i2,
  121. memory_order __m = memory_order_seq_cst) noexcept
  122. { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
  123. bool
  124. compare_exchange_strong(bool& __i1, bool __i2,
  125. memory_order __m = memory_order_seq_cst) volatile noexcept
  126. { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
  127. #if __cpp_lib_atomic_wait
  128. void
  129. wait(bool __old, memory_order __m = memory_order_seq_cst) const noexcept
  130. { _M_base.wait(__old, __m); }
  131. // TODO add const volatile overload
  132. void
  133. notify_one() noexcept
  134. { _M_base.notify_one(); }
  135. void
  136. notify_all() noexcept
  137. { _M_base.notify_all(); }
  138. #endif // __cpp_lib_atomic_wait
  139. };
  140. /// @cond undocumented
  141. #if __cpp_lib_atomic_value_initialization
  142. # define _GLIBCXX20_INIT(I) = I
  143. #else
  144. # define _GLIBCXX20_INIT(I)
  145. #endif
  146. /// @endcond
  147. /**
  148. * @brief Generic atomic type, primary class template.
  149. *
  150. * @tparam _Tp Type to be made atomic, must be trivially copyable.
  151. */
  152. template<typename _Tp>
  153. struct atomic
  154. {
  155. using value_type = _Tp;
  156. private:
  157. // Align 1/2/4/8/16-byte types to at least their size.
  158. static constexpr int _S_min_alignment
  159. = (sizeof(_Tp) & (sizeof(_Tp) - 1)) || sizeof(_Tp) > 16
  160. ? 0 : sizeof(_Tp);
  161. static constexpr int _S_alignment
  162. = _S_min_alignment > alignof(_Tp) ? _S_min_alignment : alignof(_Tp);
  163. alignas(_S_alignment) _Tp _M_i _GLIBCXX20_INIT(_Tp());
  164. static_assert(__is_trivially_copyable(_Tp),
  165. "std::atomic requires a trivially copyable type");
  166. static_assert(sizeof(_Tp) > 0,
  167. "Incomplete or zero-sized types are not supported");
  168. #if __cplusplus > 201703L
  169. static_assert(is_copy_constructible_v<_Tp>);
  170. static_assert(is_move_constructible_v<_Tp>);
  171. static_assert(is_copy_assignable_v<_Tp>);
  172. static_assert(is_move_assignable_v<_Tp>);
  173. #endif
  174. public:
  175. atomic() = default;
  176. ~atomic() noexcept = default;
  177. atomic(const atomic&) = delete;
  178. atomic& operator=(const atomic&) = delete;
  179. atomic& operator=(const atomic&) volatile = delete;
  180. constexpr atomic(_Tp __i) noexcept : _M_i(__i)
  181. {
  182. #if __cplusplus >= 201402L && __has_builtin(__builtin_clear_padding)
  183. if _GLIBCXX17_CONSTEXPR (__atomic_impl::__maybe_has_padding<_Tp>())
  184. __builtin_clear_padding(std::__addressof(_M_i));
  185. #endif
  186. }
  187. operator _Tp() const noexcept
  188. { return load(); }
  189. operator _Tp() const volatile noexcept
  190. { return load(); }
  191. _Tp
  192. operator=(_Tp __i) noexcept
  193. { store(__i); return __i; }
  194. _Tp
  195. operator=(_Tp __i) volatile noexcept
  196. { store(__i); return __i; }
  197. bool
  198. is_lock_free() const noexcept
  199. {
  200. // Produce a fake, minimally aligned pointer.
  201. return __atomic_is_lock_free(sizeof(_M_i),
  202. reinterpret_cast<void *>(-_S_alignment));
  203. }
  204. bool
  205. is_lock_free() const volatile noexcept
  206. {
  207. // Produce a fake, minimally aligned pointer.
  208. return __atomic_is_lock_free(sizeof(_M_i),
  209. reinterpret_cast<void *>(-_S_alignment));
  210. }
  211. #if __cplusplus >= 201703L
  212. static constexpr bool is_always_lock_free
  213. = __atomic_always_lock_free(sizeof(_M_i), 0);
  214. #endif
  215. void
  216. store(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
  217. {
  218. __atomic_store(std::__addressof(_M_i),
  219. __atomic_impl::__clear_padding(__i),
  220. int(__m));
  221. }
  222. void
  223. store(_Tp __i, memory_order __m = memory_order_seq_cst) volatile noexcept
  224. {
  225. __atomic_store(std::__addressof(_M_i),
  226. __atomic_impl::__clear_padding(__i),
  227. int(__m));
  228. }
  229. _Tp
  230. load(memory_order __m = memory_order_seq_cst) const noexcept
  231. {
  232. alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
  233. _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
  234. __atomic_load(std::__addressof(_M_i), __ptr, int(__m));
  235. return *__ptr;
  236. }
  237. _Tp
  238. load(memory_order __m = memory_order_seq_cst) const volatile noexcept
  239. {
  240. alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
  241. _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
  242. __atomic_load(std::__addressof(_M_i), __ptr, int(__m));
  243. return *__ptr;
  244. }
  245. _Tp
  246. exchange(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
  247. {
  248. alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
  249. _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
  250. __atomic_exchange(std::__addressof(_M_i),
  251. __atomic_impl::__clear_padding(__i),
  252. __ptr, int(__m));
  253. return *__ptr;
  254. }
  255. _Tp
  256. exchange(_Tp __i,
  257. memory_order __m = memory_order_seq_cst) volatile noexcept
  258. {
  259. alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
  260. _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
  261. __atomic_exchange(std::__addressof(_M_i),
  262. __atomic_impl::__clear_padding(__i),
  263. __ptr, int(__m));
  264. return *__ptr;
  265. }
  266. bool
  267. compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
  268. memory_order __f) noexcept
  269. {
  270. return __atomic_impl::__compare_exchange(_M_i, __e, __i, true,
  271. __s, __f);
  272. }
  273. bool
  274. compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
  275. memory_order __f) volatile noexcept
  276. {
  277. return __atomic_impl::__compare_exchange(_M_i, __e, __i, true,
  278. __s, __f);
  279. }
  280. bool
  281. compare_exchange_weak(_Tp& __e, _Tp __i,
  282. memory_order __m = memory_order_seq_cst) noexcept
  283. { return compare_exchange_weak(__e, __i, __m,
  284. __cmpexch_failure_order(__m)); }
  285. bool
  286. compare_exchange_weak(_Tp& __e, _Tp __i,
  287. memory_order __m = memory_order_seq_cst) volatile noexcept
  288. { return compare_exchange_weak(__e, __i, __m,
  289. __cmpexch_failure_order(__m)); }
  290. bool
  291. compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
  292. memory_order __f) noexcept
  293. {
  294. return __atomic_impl::__compare_exchange(_M_i, __e, __i, false,
  295. __s, __f);
  296. }
  297. bool
  298. compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
  299. memory_order __f) volatile noexcept
  300. {
  301. return __atomic_impl::__compare_exchange(_M_i, __e, __i, false,
  302. __s, __f);
  303. }
  304. bool
  305. compare_exchange_strong(_Tp& __e, _Tp __i,
  306. memory_order __m = memory_order_seq_cst) noexcept
  307. { return compare_exchange_strong(__e, __i, __m,
  308. __cmpexch_failure_order(__m)); }
  309. bool
  310. compare_exchange_strong(_Tp& __e, _Tp __i,
  311. memory_order __m = memory_order_seq_cst) volatile noexcept
  312. { return compare_exchange_strong(__e, __i, __m,
  313. __cmpexch_failure_order(__m)); }
  314. #if __cpp_lib_atomic_wait
  315. void
  316. wait(_Tp __old, memory_order __m = memory_order_seq_cst) const noexcept
  317. {
  318. std::__atomic_wait_address_v(&_M_i, __old,
  319. [__m, this] { return this->load(__m); });
  320. }
  321. // TODO add const volatile overload
  322. void
  323. notify_one() noexcept
  324. { std::__atomic_notify_address(&_M_i, false); }
  325. void
  326. notify_all() noexcept
  327. { std::__atomic_notify_address(&_M_i, true); }
  328. #endif // __cpp_lib_atomic_wait
  329. };
  330. #undef _GLIBCXX20_INIT
  331. /// Partial specialization for pointer types.
  332. template<typename _Tp>
  333. struct atomic<_Tp*>
  334. {
  335. using value_type = _Tp*;
  336. using difference_type = ptrdiff_t;
  337. typedef _Tp* __pointer_type;
  338. typedef __atomic_base<_Tp*> __base_type;
  339. __base_type _M_b;
  340. atomic() noexcept = default;
  341. ~atomic() noexcept = default;
  342. atomic(const atomic&) = delete;
  343. atomic& operator=(const atomic&) = delete;
  344. atomic& operator=(const atomic&) volatile = delete;
  345. constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { }
  346. operator __pointer_type() const noexcept
  347. { return __pointer_type(_M_b); }
  348. operator __pointer_type() const volatile noexcept
  349. { return __pointer_type(_M_b); }
  350. __pointer_type
  351. operator=(__pointer_type __p) noexcept
  352. { return _M_b.operator=(__p); }
  353. __pointer_type
  354. operator=(__pointer_type __p) volatile noexcept
  355. { return _M_b.operator=(__p); }
  356. __pointer_type
  357. operator++(int) noexcept
  358. {
  359. #if __cplusplus >= 201703L
  360. static_assert( is_object<_Tp>::value, "pointer to object type" );
  361. #endif
  362. return _M_b++;
  363. }
  364. __pointer_type
  365. operator++(int) volatile noexcept
  366. {
  367. #if __cplusplus >= 201703L
  368. static_assert( is_object<_Tp>::value, "pointer to object type" );
  369. #endif
  370. return _M_b++;
  371. }
  372. __pointer_type
  373. operator--(int) noexcept
  374. {
  375. #if __cplusplus >= 201703L
  376. static_assert( is_object<_Tp>::value, "pointer to object type" );
  377. #endif
  378. return _M_b--;
  379. }
  380. __pointer_type
  381. operator--(int) volatile noexcept
  382. {
  383. #if __cplusplus >= 201703L
  384. static_assert( is_object<_Tp>::value, "pointer to object type" );
  385. #endif
  386. return _M_b--;
  387. }
  388. __pointer_type
  389. operator++() noexcept
  390. {
  391. #if __cplusplus >= 201703L
  392. static_assert( is_object<_Tp>::value, "pointer to object type" );
  393. #endif
  394. return ++_M_b;
  395. }
  396. __pointer_type
  397. operator++() volatile noexcept
  398. {
  399. #if __cplusplus >= 201703L
  400. static_assert( is_object<_Tp>::value, "pointer to object type" );
  401. #endif
  402. return ++_M_b;
  403. }
  404. __pointer_type
  405. operator--() noexcept
  406. {
  407. #if __cplusplus >= 201703L
  408. static_assert( is_object<_Tp>::value, "pointer to object type" );
  409. #endif
  410. return --_M_b;
  411. }
  412. __pointer_type
  413. operator--() volatile noexcept
  414. {
  415. #if __cplusplus >= 201703L
  416. static_assert( is_object<_Tp>::value, "pointer to object type" );
  417. #endif
  418. return --_M_b;
  419. }
  420. __pointer_type
  421. operator+=(ptrdiff_t __d) noexcept
  422. {
  423. #if __cplusplus >= 201703L
  424. static_assert( is_object<_Tp>::value, "pointer to object type" );
  425. #endif
  426. return _M_b.operator+=(__d);
  427. }
  428. __pointer_type
  429. operator+=(ptrdiff_t __d) volatile noexcept
  430. {
  431. #if __cplusplus >= 201703L
  432. static_assert( is_object<_Tp>::value, "pointer to object type" );
  433. #endif
  434. return _M_b.operator+=(__d);
  435. }
  436. __pointer_type
  437. operator-=(ptrdiff_t __d) noexcept
  438. {
  439. #if __cplusplus >= 201703L
  440. static_assert( is_object<_Tp>::value, "pointer to object type" );
  441. #endif
  442. return _M_b.operator-=(__d);
  443. }
  444. __pointer_type
  445. operator-=(ptrdiff_t __d) volatile noexcept
  446. {
  447. #if __cplusplus >= 201703L
  448. static_assert( is_object<_Tp>::value, "pointer to object type" );
  449. #endif
  450. return _M_b.operator-=(__d);
  451. }
  452. bool
  453. is_lock_free() const noexcept
  454. { return _M_b.is_lock_free(); }
  455. bool
  456. is_lock_free() const volatile noexcept
  457. { return _M_b.is_lock_free(); }
  458. #if __cplusplus >= 201703L
  459. static constexpr bool is_always_lock_free
  460. = ATOMIC_POINTER_LOCK_FREE == 2;
  461. #endif
  462. void
  463. store(__pointer_type __p,
  464. memory_order __m = memory_order_seq_cst) noexcept
  465. { return _M_b.store(__p, __m); }
  466. void
  467. store(__pointer_type __p,
  468. memory_order __m = memory_order_seq_cst) volatile noexcept
  469. { return _M_b.store(__p, __m); }
  470. __pointer_type
  471. load(memory_order __m = memory_order_seq_cst) const noexcept
  472. { return _M_b.load(__m); }
  473. __pointer_type
  474. load(memory_order __m = memory_order_seq_cst) const volatile noexcept
  475. { return _M_b.load(__m); }
  476. __pointer_type
  477. exchange(__pointer_type __p,
  478. memory_order __m = memory_order_seq_cst) noexcept
  479. { return _M_b.exchange(__p, __m); }
  480. __pointer_type
  481. exchange(__pointer_type __p,
  482. memory_order __m = memory_order_seq_cst) volatile noexcept
  483. { return _M_b.exchange(__p, __m); }
  484. bool
  485. compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
  486. memory_order __m1, memory_order __m2) noexcept
  487. { return _M_b.compare_exchange_weak(__p1, __p2, __m1, __m2); }
  488. bool
  489. compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
  490. memory_order __m1,
  491. memory_order __m2) volatile noexcept
  492. { return _M_b.compare_exchange_weak(__p1, __p2, __m1, __m2); }
  493. bool
  494. compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
  495. memory_order __m = memory_order_seq_cst) noexcept
  496. {
  497. return compare_exchange_weak(__p1, __p2, __m,
  498. __cmpexch_failure_order(__m));
  499. }
  500. bool
  501. compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
  502. memory_order __m = memory_order_seq_cst) volatile noexcept
  503. {
  504. return compare_exchange_weak(__p1, __p2, __m,
  505. __cmpexch_failure_order(__m));
  506. }
  507. bool
  508. compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
  509. memory_order __m1, memory_order __m2) noexcept
  510. { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
  511. bool
  512. compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
  513. memory_order __m1,
  514. memory_order __m2) volatile noexcept
  515. { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
  516. bool
  517. compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
  518. memory_order __m = memory_order_seq_cst) noexcept
  519. {
  520. return _M_b.compare_exchange_strong(__p1, __p2, __m,
  521. __cmpexch_failure_order(__m));
  522. }
  523. bool
  524. compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
  525. memory_order __m = memory_order_seq_cst) volatile noexcept
  526. {
  527. return _M_b.compare_exchange_strong(__p1, __p2, __m,
  528. __cmpexch_failure_order(__m));
  529. }
  530. #if __cpp_lib_atomic_wait
  531. void
  532. wait(__pointer_type __old, memory_order __m = memory_order_seq_cst) const noexcept
  533. { _M_b.wait(__old, __m); }
  534. // TODO add const volatile overload
  535. void
  536. notify_one() noexcept
  537. { _M_b.notify_one(); }
  538. void
  539. notify_all() noexcept
  540. { _M_b.notify_all(); }
  541. #endif // __cpp_lib_atomic_wait
  542. __pointer_type
  543. fetch_add(ptrdiff_t __d,
  544. memory_order __m = memory_order_seq_cst) noexcept
  545. {
  546. #if __cplusplus >= 201703L
  547. static_assert( is_object<_Tp>::value, "pointer to object type" );
  548. #endif
  549. return _M_b.fetch_add(__d, __m);
  550. }
  551. __pointer_type
  552. fetch_add(ptrdiff_t __d,
  553. memory_order __m = memory_order_seq_cst) volatile noexcept
  554. {
  555. #if __cplusplus >= 201703L
  556. static_assert( is_object<_Tp>::value, "pointer to object type" );
  557. #endif
  558. return _M_b.fetch_add(__d, __m);
  559. }
  560. __pointer_type
  561. fetch_sub(ptrdiff_t __d,
  562. memory_order __m = memory_order_seq_cst) noexcept
  563. {
  564. #if __cplusplus >= 201703L
  565. static_assert( is_object<_Tp>::value, "pointer to object type" );
  566. #endif
  567. return _M_b.fetch_sub(__d, __m);
  568. }
  569. __pointer_type
  570. fetch_sub(ptrdiff_t __d,
  571. memory_order __m = memory_order_seq_cst) volatile noexcept
  572. {
  573. #if __cplusplus >= 201703L
  574. static_assert( is_object<_Tp>::value, "pointer to object type" );
  575. #endif
  576. return _M_b.fetch_sub(__d, __m);
  577. }
  578. };
  579. /// Explicit specialization for char.
  580. template<>
  581. struct atomic<char> : __atomic_base<char>
  582. {
  583. typedef char __integral_type;
  584. typedef __atomic_base<char> __base_type;
  585. atomic() noexcept = default;
  586. ~atomic() noexcept = default;
  587. atomic(const atomic&) = delete;
  588. atomic& operator=(const atomic&) = delete;
  589. atomic& operator=(const atomic&) volatile = delete;
  590. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  591. using __base_type::operator __integral_type;
  592. using __base_type::operator=;
  593. #if __cplusplus >= 201703L
  594. static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
  595. #endif
  596. };
  597. /// Explicit specialization for signed char.
  598. template<>
  599. struct atomic<signed char> : __atomic_base<signed char>
  600. {
  601. typedef signed char __integral_type;
  602. typedef __atomic_base<signed char> __base_type;
  603. atomic() noexcept= default;
  604. ~atomic() noexcept = default;
  605. atomic(const atomic&) = delete;
  606. atomic& operator=(const atomic&) = delete;
  607. atomic& operator=(const atomic&) volatile = delete;
  608. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  609. using __base_type::operator __integral_type;
  610. using __base_type::operator=;
  611. #if __cplusplus >= 201703L
  612. static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
  613. #endif
  614. };
  615. /// Explicit specialization for unsigned char.
  616. template<>
  617. struct atomic<unsigned char> : __atomic_base<unsigned char>
  618. {
  619. typedef unsigned char __integral_type;
  620. typedef __atomic_base<unsigned char> __base_type;
  621. atomic() noexcept= default;
  622. ~atomic() noexcept = default;
  623. atomic(const atomic&) = delete;
  624. atomic& operator=(const atomic&) = delete;
  625. atomic& operator=(const atomic&) volatile = delete;
  626. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  627. using __base_type::operator __integral_type;
  628. using __base_type::operator=;
  629. #if __cplusplus >= 201703L
  630. static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
  631. #endif
  632. };
  633. /// Explicit specialization for short.
  634. template<>
  635. struct atomic<short> : __atomic_base<short>
  636. {
  637. typedef short __integral_type;
  638. typedef __atomic_base<short> __base_type;
  639. atomic() noexcept = default;
  640. ~atomic() noexcept = default;
  641. atomic(const atomic&) = delete;
  642. atomic& operator=(const atomic&) = delete;
  643. atomic& operator=(const atomic&) volatile = delete;
  644. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  645. using __base_type::operator __integral_type;
  646. using __base_type::operator=;
  647. #if __cplusplus >= 201703L
  648. static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
  649. #endif
  650. };
  651. /// Explicit specialization for unsigned short.
  652. template<>
  653. struct atomic<unsigned short> : __atomic_base<unsigned short>
  654. {
  655. typedef unsigned short __integral_type;
  656. typedef __atomic_base<unsigned short> __base_type;
  657. atomic() noexcept = default;
  658. ~atomic() noexcept = default;
  659. atomic(const atomic&) = delete;
  660. atomic& operator=(const atomic&) = delete;
  661. atomic& operator=(const atomic&) volatile = delete;
  662. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  663. using __base_type::operator __integral_type;
  664. using __base_type::operator=;
  665. #if __cplusplus >= 201703L
  666. static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
  667. #endif
  668. };
  669. /// Explicit specialization for int.
  670. template<>
  671. struct atomic<int> : __atomic_base<int>
  672. {
  673. typedef int __integral_type;
  674. typedef __atomic_base<int> __base_type;
  675. atomic() noexcept = default;
  676. ~atomic() noexcept = default;
  677. atomic(const atomic&) = delete;
  678. atomic& operator=(const atomic&) = delete;
  679. atomic& operator=(const atomic&) volatile = delete;
  680. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  681. using __base_type::operator __integral_type;
  682. using __base_type::operator=;
  683. #if __cplusplus >= 201703L
  684. static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
  685. #endif
  686. };
  687. /// Explicit specialization for unsigned int.
  688. template<>
  689. struct atomic<unsigned int> : __atomic_base<unsigned int>
  690. {
  691. typedef unsigned int __integral_type;
  692. typedef __atomic_base<unsigned int> __base_type;
  693. atomic() noexcept = default;
  694. ~atomic() noexcept = default;
  695. atomic(const atomic&) = delete;
  696. atomic& operator=(const atomic&) = delete;
  697. atomic& operator=(const atomic&) volatile = delete;
  698. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  699. using __base_type::operator __integral_type;
  700. using __base_type::operator=;
  701. #if __cplusplus >= 201703L
  702. static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
  703. #endif
  704. };
  705. /// Explicit specialization for long.
  706. template<>
  707. struct atomic<long> : __atomic_base<long>
  708. {
  709. typedef long __integral_type;
  710. typedef __atomic_base<long> __base_type;
  711. atomic() noexcept = default;
  712. ~atomic() noexcept = default;
  713. atomic(const atomic&) = delete;
  714. atomic& operator=(const atomic&) = delete;
  715. atomic& operator=(const atomic&) volatile = delete;
  716. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  717. using __base_type::operator __integral_type;
  718. using __base_type::operator=;
  719. #if __cplusplus >= 201703L
  720. static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
  721. #endif
  722. };
  723. /// Explicit specialization for unsigned long.
  724. template<>
  725. struct atomic<unsigned long> : __atomic_base<unsigned long>
  726. {
  727. typedef unsigned long __integral_type;
  728. typedef __atomic_base<unsigned long> __base_type;
  729. atomic() noexcept = default;
  730. ~atomic() noexcept = default;
  731. atomic(const atomic&) = delete;
  732. atomic& operator=(const atomic&) = delete;
  733. atomic& operator=(const atomic&) volatile = delete;
  734. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  735. using __base_type::operator __integral_type;
  736. using __base_type::operator=;
  737. #if __cplusplus >= 201703L
  738. static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
  739. #endif
  740. };
  741. /// Explicit specialization for long long.
  742. template<>
  743. struct atomic<long long> : __atomic_base<long long>
  744. {
  745. typedef long long __integral_type;
  746. typedef __atomic_base<long long> __base_type;
  747. atomic() noexcept = default;
  748. ~atomic() noexcept = default;
  749. atomic(const atomic&) = delete;
  750. atomic& operator=(const atomic&) = delete;
  751. atomic& operator=(const atomic&) volatile = delete;
  752. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  753. using __base_type::operator __integral_type;
  754. using __base_type::operator=;
  755. #if __cplusplus >= 201703L
  756. static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
  757. #endif
  758. };
  759. /// Explicit specialization for unsigned long long.
  760. template<>
  761. struct atomic<unsigned long long> : __atomic_base<unsigned long long>
  762. {
  763. typedef unsigned long long __integral_type;
  764. typedef __atomic_base<unsigned long long> __base_type;
  765. atomic() noexcept = default;
  766. ~atomic() noexcept = default;
  767. atomic(const atomic&) = delete;
  768. atomic& operator=(const atomic&) = delete;
  769. atomic& operator=(const atomic&) volatile = delete;
  770. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  771. using __base_type::operator __integral_type;
  772. using __base_type::operator=;
  773. #if __cplusplus >= 201703L
  774. static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
  775. #endif
  776. };
  777. /// Explicit specialization for wchar_t.
  778. template<>
  779. struct atomic<wchar_t> : __atomic_base<wchar_t>
  780. {
  781. typedef wchar_t __integral_type;
  782. typedef __atomic_base<wchar_t> __base_type;
  783. atomic() noexcept = default;
  784. ~atomic() noexcept = default;
  785. atomic(const atomic&) = delete;
  786. atomic& operator=(const atomic&) = delete;
  787. atomic& operator=(const atomic&) volatile = delete;
  788. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  789. using __base_type::operator __integral_type;
  790. using __base_type::operator=;
  791. #if __cplusplus >= 201703L
  792. static constexpr bool is_always_lock_free = ATOMIC_WCHAR_T_LOCK_FREE == 2;
  793. #endif
  794. };
  795. #ifdef _GLIBCXX_USE_CHAR8_T
  796. /// Explicit specialization for char8_t.
  797. template<>
  798. struct atomic<char8_t> : __atomic_base<char8_t>
  799. {
  800. typedef char8_t __integral_type;
  801. typedef __atomic_base<char8_t> __base_type;
  802. atomic() noexcept = default;
  803. ~atomic() noexcept = default;
  804. atomic(const atomic&) = delete;
  805. atomic& operator=(const atomic&) = delete;
  806. atomic& operator=(const atomic&) volatile = delete;
  807. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  808. using __base_type::operator __integral_type;
  809. using __base_type::operator=;
  810. #if __cplusplus > 201402L
  811. static constexpr bool is_always_lock_free
  812. = ATOMIC_CHAR8_T_LOCK_FREE == 2;
  813. #endif
  814. };
  815. #endif
  816. /// Explicit specialization for char16_t.
  817. template<>
  818. struct atomic<char16_t> : __atomic_base<char16_t>
  819. {
  820. typedef char16_t __integral_type;
  821. typedef __atomic_base<char16_t> __base_type;
  822. atomic() noexcept = default;
  823. ~atomic() noexcept = default;
  824. atomic(const atomic&) = delete;
  825. atomic& operator=(const atomic&) = delete;
  826. atomic& operator=(const atomic&) volatile = delete;
  827. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  828. using __base_type::operator __integral_type;
  829. using __base_type::operator=;
  830. #if __cplusplus >= 201703L
  831. static constexpr bool is_always_lock_free
  832. = ATOMIC_CHAR16_T_LOCK_FREE == 2;
  833. #endif
  834. };
  835. /// Explicit specialization for char32_t.
  836. template<>
  837. struct atomic<char32_t> : __atomic_base<char32_t>
  838. {
  839. typedef char32_t __integral_type;
  840. typedef __atomic_base<char32_t> __base_type;
  841. atomic() noexcept = default;
  842. ~atomic() noexcept = default;
  843. atomic(const atomic&) = delete;
  844. atomic& operator=(const atomic&) = delete;
  845. atomic& operator=(const atomic&) volatile = delete;
  846. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  847. using __base_type::operator __integral_type;
  848. using __base_type::operator=;
  849. #if __cplusplus >= 201703L
  850. static constexpr bool is_always_lock_free
  851. = ATOMIC_CHAR32_T_LOCK_FREE == 2;
  852. #endif
  853. };
  854. /// atomic_bool
  855. typedef atomic<bool> atomic_bool;
  856. /// atomic_char
  857. typedef atomic<char> atomic_char;
  858. /// atomic_schar
  859. typedef atomic<signed char> atomic_schar;
  860. /// atomic_uchar
  861. typedef atomic<unsigned char> atomic_uchar;
  862. /// atomic_short
  863. typedef atomic<short> atomic_short;
  864. /// atomic_ushort
  865. typedef atomic<unsigned short> atomic_ushort;
  866. /// atomic_int
  867. typedef atomic<int> atomic_int;
  868. /// atomic_uint
  869. typedef atomic<unsigned int> atomic_uint;
  870. /// atomic_long
  871. typedef atomic<long> atomic_long;
  872. /// atomic_ulong
  873. typedef atomic<unsigned long> atomic_ulong;
  874. /// atomic_llong
  875. typedef atomic<long long> atomic_llong;
  876. /// atomic_ullong
  877. typedef atomic<unsigned long long> atomic_ullong;
  878. /// atomic_wchar_t
  879. typedef atomic<wchar_t> atomic_wchar_t;
  880. #ifdef _GLIBCXX_USE_CHAR8_T
  881. /// atomic_char8_t
  882. typedef atomic<char8_t> atomic_char8_t;
  883. #endif
  884. /// atomic_char16_t
  885. typedef atomic<char16_t> atomic_char16_t;
  886. /// atomic_char32_t
  887. typedef atomic<char32_t> atomic_char32_t;
  888. #ifdef _GLIBCXX_USE_C99_STDINT_TR1
  889. // _GLIBCXX_RESOLVE_LIB_DEFECTS
  890. // 2441. Exact-width atomic typedefs should be provided
  891. /// atomic_int8_t
  892. typedef atomic<int8_t> atomic_int8_t;
  893. /// atomic_uint8_t
  894. typedef atomic<uint8_t> atomic_uint8_t;
  895. /// atomic_int16_t
  896. typedef atomic<int16_t> atomic_int16_t;
  897. /// atomic_uint16_t
  898. typedef atomic<uint16_t> atomic_uint16_t;
  899. /// atomic_int32_t
  900. typedef atomic<int32_t> atomic_int32_t;
  901. /// atomic_uint32_t
  902. typedef atomic<uint32_t> atomic_uint32_t;
  903. /// atomic_int64_t
  904. typedef atomic<int64_t> atomic_int64_t;
  905. /// atomic_uint64_t
  906. typedef atomic<uint64_t> atomic_uint64_t;
  907. /// atomic_int_least8_t
  908. typedef atomic<int_least8_t> atomic_int_least8_t;
  909. /// atomic_uint_least8_t
  910. typedef atomic<uint_least8_t> atomic_uint_least8_t;
  911. /// atomic_int_least16_t
  912. typedef atomic<int_least16_t> atomic_int_least16_t;
  913. /// atomic_uint_least16_t
  914. typedef atomic<uint_least16_t> atomic_uint_least16_t;
  915. /// atomic_int_least32_t
  916. typedef atomic<int_least32_t> atomic_int_least32_t;
  917. /// atomic_uint_least32_t
  918. typedef atomic<uint_least32_t> atomic_uint_least32_t;
  919. /// atomic_int_least64_t
  920. typedef atomic<int_least64_t> atomic_int_least64_t;
  921. /// atomic_uint_least64_t
  922. typedef atomic<uint_least64_t> atomic_uint_least64_t;
  923. /// atomic_int_fast8_t
  924. typedef atomic<int_fast8_t> atomic_int_fast8_t;
  925. /// atomic_uint_fast8_t
  926. typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
  927. /// atomic_int_fast16_t
  928. typedef atomic<int_fast16_t> atomic_int_fast16_t;
  929. /// atomic_uint_fast16_t
  930. typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
  931. /// atomic_int_fast32_t
  932. typedef atomic<int_fast32_t> atomic_int_fast32_t;
  933. /// atomic_uint_fast32_t
  934. typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
  935. /// atomic_int_fast64_t
  936. typedef atomic<int_fast64_t> atomic_int_fast64_t;
  937. /// atomic_uint_fast64_t
  938. typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
  939. #endif
  940. /// atomic_intptr_t
  941. typedef atomic<intptr_t> atomic_intptr_t;
  942. /// atomic_uintptr_t
  943. typedef atomic<uintptr_t> atomic_uintptr_t;
  944. /// atomic_size_t
  945. typedef atomic<size_t> atomic_size_t;
  946. /// atomic_ptrdiff_t
  947. typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
  948. #ifdef _GLIBCXX_USE_C99_STDINT_TR1
  949. /// atomic_intmax_t
  950. typedef atomic<intmax_t> atomic_intmax_t;
  951. /// atomic_uintmax_t
  952. typedef atomic<uintmax_t> atomic_uintmax_t;
  953. #endif
  954. // Function definitions, atomic_flag operations.
  955. inline bool
  956. atomic_flag_test_and_set_explicit(atomic_flag* __a,
  957. memory_order __m) noexcept
  958. { return __a->test_and_set(__m); }
  959. inline bool
  960. atomic_flag_test_and_set_explicit(volatile atomic_flag* __a,
  961. memory_order __m) noexcept
  962. { return __a->test_and_set(__m); }
  963. #if __cpp_lib_atomic_flag_test
  964. inline bool
  965. atomic_flag_test(const atomic_flag* __a) noexcept
  966. { return __a->test(); }
  967. inline bool
  968. atomic_flag_test(const volatile atomic_flag* __a) noexcept
  969. { return __a->test(); }
  970. inline bool
  971. atomic_flag_test_explicit(const atomic_flag* __a,
  972. memory_order __m) noexcept
  973. { return __a->test(__m); }
  974. inline bool
  975. atomic_flag_test_explicit(const volatile atomic_flag* __a,
  976. memory_order __m) noexcept
  977. { return __a->test(__m); }
  978. #endif
  979. inline void
  980. atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept
  981. { __a->clear(__m); }
  982. inline void
  983. atomic_flag_clear_explicit(volatile atomic_flag* __a,
  984. memory_order __m) noexcept
  985. { __a->clear(__m); }
  986. inline bool
  987. atomic_flag_test_and_set(atomic_flag* __a) noexcept
  988. { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
  989. inline bool
  990. atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept
  991. { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
  992. inline void
  993. atomic_flag_clear(atomic_flag* __a) noexcept
  994. { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
  995. inline void
  996. atomic_flag_clear(volatile atomic_flag* __a) noexcept
  997. { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
  998. #if __cpp_lib_atomic_wait
  999. inline void
  1000. atomic_flag_wait(atomic_flag* __a, bool __old) noexcept
  1001. { __a->wait(__old); }
  1002. inline void
  1003. atomic_flag_wait_explicit(atomic_flag* __a, bool __old,
  1004. memory_order __m) noexcept
  1005. { __a->wait(__old, __m); }
  1006. inline void
  1007. atomic_flag_notify_one(atomic_flag* __a) noexcept
  1008. { __a->notify_one(); }
  1009. inline void
  1010. atomic_flag_notify_all(atomic_flag* __a) noexcept
  1011. { __a->notify_all(); }
  1012. #endif // __cpp_lib_atomic_wait
  1013. /// @cond undocumented
  1014. // _GLIBCXX_RESOLVE_LIB_DEFECTS
  1015. // 3220. P0558 broke conforming C++14 uses of atomic shared_ptr
  1016. template<typename _Tp>
  1017. using __atomic_val_t = __type_identity_t<_Tp>;
  1018. template<typename _Tp>
  1019. using __atomic_diff_t = typename atomic<_Tp>::difference_type;
  1020. /// @endcond
  1021. // [atomics.nonmembers] Non-member functions.
  1022. // Function templates generally applicable to atomic types.
  1023. template<typename _ITp>
  1024. inline bool
  1025. atomic_is_lock_free(const atomic<_ITp>* __a) noexcept
  1026. { return __a->is_lock_free(); }
  1027. template<typename _ITp>
  1028. inline bool
  1029. atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept
  1030. { return __a->is_lock_free(); }
  1031. template<typename _ITp>
  1032. inline void
  1033. atomic_init(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
  1034. { __a->store(__i, memory_order_relaxed); }
  1035. template<typename _ITp>
  1036. inline void
  1037. atomic_init(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
  1038. { __a->store(__i, memory_order_relaxed); }
  1039. template<typename _ITp>
  1040. inline void
  1041. atomic_store_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
  1042. memory_order __m) noexcept
  1043. { __a->store(__i, __m); }
  1044. template<typename _ITp>
  1045. inline void
  1046. atomic_store_explicit(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
  1047. memory_order __m) noexcept
  1048. { __a->store(__i, __m); }
  1049. template<typename _ITp>
  1050. inline _ITp
  1051. atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept
  1052. { return __a->load(__m); }
  1053. template<typename _ITp>
  1054. inline _ITp
  1055. atomic_load_explicit(const volatile atomic<_ITp>* __a,
  1056. memory_order __m) noexcept
  1057. { return __a->load(__m); }
  1058. template<typename _ITp>
  1059. inline _ITp
  1060. atomic_exchange_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
  1061. memory_order __m) noexcept
  1062. { return __a->exchange(__i, __m); }
  1063. template<typename _ITp>
  1064. inline _ITp
  1065. atomic_exchange_explicit(volatile atomic<_ITp>* __a,
  1066. __atomic_val_t<_ITp> __i,
  1067. memory_order __m) noexcept
  1068. { return __a->exchange(__i, __m); }
  1069. template<typename _ITp>
  1070. inline bool
  1071. atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
  1072. __atomic_val_t<_ITp>* __i1,
  1073. __atomic_val_t<_ITp> __i2,
  1074. memory_order __m1,
  1075. memory_order __m2) noexcept
  1076. { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
  1077. template<typename _ITp>
  1078. inline bool
  1079. atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a,
  1080. __atomic_val_t<_ITp>* __i1,
  1081. __atomic_val_t<_ITp> __i2,
  1082. memory_order __m1,
  1083. memory_order __m2) noexcept
  1084. { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
  1085. template<typename _ITp>
  1086. inline bool
  1087. atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
  1088. __atomic_val_t<_ITp>* __i1,
  1089. __atomic_val_t<_ITp> __i2,
  1090. memory_order __m1,
  1091. memory_order __m2) noexcept
  1092. { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
  1093. template<typename _ITp>
  1094. inline bool
  1095. atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a,
  1096. __atomic_val_t<_ITp>* __i1,
  1097. __atomic_val_t<_ITp> __i2,
  1098. memory_order __m1,
  1099. memory_order __m2) noexcept
  1100. { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
  1101. template<typename _ITp>
  1102. inline void
  1103. atomic_store(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
  1104. { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
  1105. template<typename _ITp>
  1106. inline void
  1107. atomic_store(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
  1108. { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
  1109. template<typename _ITp>
  1110. inline _ITp
  1111. atomic_load(const atomic<_ITp>* __a) noexcept
  1112. { return atomic_load_explicit(__a, memory_order_seq_cst); }
  1113. template<typename _ITp>
  1114. inline _ITp
  1115. atomic_load(const volatile atomic<_ITp>* __a) noexcept
  1116. { return atomic_load_explicit(__a, memory_order_seq_cst); }
  1117. template<typename _ITp>
  1118. inline _ITp
  1119. atomic_exchange(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
  1120. { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
  1121. template<typename _ITp>
  1122. inline _ITp
  1123. atomic_exchange(volatile atomic<_ITp>* __a,
  1124. __atomic_val_t<_ITp> __i) noexcept
  1125. { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
  1126. template<typename _ITp>
  1127. inline bool
  1128. atomic_compare_exchange_weak(atomic<_ITp>* __a,
  1129. __atomic_val_t<_ITp>* __i1,
  1130. __atomic_val_t<_ITp> __i2) noexcept
  1131. {
  1132. return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
  1133. memory_order_seq_cst,
  1134. memory_order_seq_cst);
  1135. }
  1136. template<typename _ITp>
  1137. inline bool
  1138. atomic_compare_exchange_weak(volatile atomic<_ITp>* __a,
  1139. __atomic_val_t<_ITp>* __i1,
  1140. __atomic_val_t<_ITp> __i2) noexcept
  1141. {
  1142. return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
  1143. memory_order_seq_cst,
  1144. memory_order_seq_cst);
  1145. }
  1146. template<typename _ITp>
  1147. inline bool
  1148. atomic_compare_exchange_strong(atomic<_ITp>* __a,
  1149. __atomic_val_t<_ITp>* __i1,
  1150. __atomic_val_t<_ITp> __i2) noexcept
  1151. {
  1152. return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
  1153. memory_order_seq_cst,
  1154. memory_order_seq_cst);
  1155. }
  1156. template<typename _ITp>
  1157. inline bool
  1158. atomic_compare_exchange_strong(volatile atomic<_ITp>* __a,
  1159. __atomic_val_t<_ITp>* __i1,
  1160. __atomic_val_t<_ITp> __i2) noexcept
  1161. {
  1162. return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
  1163. memory_order_seq_cst,
  1164. memory_order_seq_cst);
  1165. }
  1166. #if __cpp_lib_atomic_wait
  1167. template<typename _Tp>
  1168. inline void
  1169. atomic_wait(const atomic<_Tp>* __a,
  1170. typename std::atomic<_Tp>::value_type __old) noexcept
  1171. { __a->wait(__old); }
  1172. template<typename _Tp>
  1173. inline void
  1174. atomic_wait_explicit(const atomic<_Tp>* __a,
  1175. typename std::atomic<_Tp>::value_type __old,
  1176. std::memory_order __m) noexcept
  1177. { __a->wait(__old, __m); }
  1178. template<typename _Tp>
  1179. inline void
  1180. atomic_notify_one(atomic<_Tp>* __a) noexcept
  1181. { __a->notify_one(); }
  1182. template<typename _Tp>
  1183. inline void
  1184. atomic_notify_all(atomic<_Tp>* __a) noexcept
  1185. { __a->notify_all(); }
  1186. #endif // __cpp_lib_atomic_wait
  1187. // Function templates for atomic_integral and atomic_pointer operations only.
  1188. // Some operations (and, or, xor) are only available for atomic integrals,
  1189. // which is implemented by taking a parameter of type __atomic_base<_ITp>*.
  1190. template<typename _ITp>
  1191. inline _ITp
  1192. atomic_fetch_add_explicit(atomic<_ITp>* __a,
  1193. __atomic_diff_t<_ITp> __i,
  1194. memory_order __m) noexcept
  1195. { return __a->fetch_add(__i, __m); }
  1196. template<typename _ITp>
  1197. inline _ITp
  1198. atomic_fetch_add_explicit(volatile atomic<_ITp>* __a,
  1199. __atomic_diff_t<_ITp> __i,
  1200. memory_order __m) noexcept
  1201. { return __a->fetch_add(__i, __m); }
  1202. template<typename _ITp>
  1203. inline _ITp
  1204. atomic_fetch_sub_explicit(atomic<_ITp>* __a,
  1205. __atomic_diff_t<_ITp> __i,
  1206. memory_order __m) noexcept
  1207. { return __a->fetch_sub(__i, __m); }
  1208. template<typename _ITp>
  1209. inline _ITp
  1210. atomic_fetch_sub_explicit(volatile atomic<_ITp>* __a,
  1211. __atomic_diff_t<_ITp> __i,
  1212. memory_order __m) noexcept
  1213. { return __a->fetch_sub(__i, __m); }
  1214. template<typename _ITp>
  1215. inline _ITp
  1216. atomic_fetch_and_explicit(__atomic_base<_ITp>* __a,
  1217. __atomic_val_t<_ITp> __i,
  1218. memory_order __m) noexcept
  1219. { return __a->fetch_and(__i, __m); }
  1220. template<typename _ITp>
  1221. inline _ITp
  1222. atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a,
  1223. __atomic_val_t<_ITp> __i,
  1224. memory_order __m) noexcept
  1225. { return __a->fetch_and(__i, __m); }
  1226. template<typename _ITp>
  1227. inline _ITp
  1228. atomic_fetch_or_explicit(__atomic_base<_ITp>* __a,
  1229. __atomic_val_t<_ITp> __i,
  1230. memory_order __m) noexcept
  1231. { return __a->fetch_or(__i, __m); }
  1232. template<typename _ITp>
  1233. inline _ITp
  1234. atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a,
  1235. __atomic_val_t<_ITp> __i,
  1236. memory_order __m) noexcept
  1237. { return __a->fetch_or(__i, __m); }
  1238. template<typename _ITp>
  1239. inline _ITp
  1240. atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a,
  1241. __atomic_val_t<_ITp> __i,
  1242. memory_order __m) noexcept
  1243. { return __a->fetch_xor(__i, __m); }
  1244. template<typename _ITp>
  1245. inline _ITp
  1246. atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a,
  1247. __atomic_val_t<_ITp> __i,
  1248. memory_order __m) noexcept
  1249. { return __a->fetch_xor(__i, __m); }
  1250. template<typename _ITp>
  1251. inline _ITp
  1252. atomic_fetch_add(atomic<_ITp>* __a,
  1253. __atomic_diff_t<_ITp> __i) noexcept
  1254. { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
  1255. template<typename _ITp>
  1256. inline _ITp
  1257. atomic_fetch_add(volatile atomic<_ITp>* __a,
  1258. __atomic_diff_t<_ITp> __i) noexcept
  1259. { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
  1260. template<typename _ITp>
  1261. inline _ITp
  1262. atomic_fetch_sub(atomic<_ITp>* __a,
  1263. __atomic_diff_t<_ITp> __i) noexcept
  1264. { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
  1265. template<typename _ITp>
  1266. inline _ITp
  1267. atomic_fetch_sub(volatile atomic<_ITp>* __a,
  1268. __atomic_diff_t<_ITp> __i) noexcept
  1269. { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
  1270. template<typename _ITp>
  1271. inline _ITp
  1272. atomic_fetch_and(__atomic_base<_ITp>* __a,
  1273. __atomic_val_t<_ITp> __i) noexcept
  1274. { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
  1275. template<typename _ITp>
  1276. inline _ITp
  1277. atomic_fetch_and(volatile __atomic_base<_ITp>* __a,
  1278. __atomic_val_t<_ITp> __i) noexcept
  1279. { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
  1280. template<typename _ITp>
  1281. inline _ITp
  1282. atomic_fetch_or(__atomic_base<_ITp>* __a,
  1283. __atomic_val_t<_ITp> __i) noexcept
  1284. { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
  1285. template<typename _ITp>
  1286. inline _ITp
  1287. atomic_fetch_or(volatile __atomic_base<_ITp>* __a,
  1288. __atomic_val_t<_ITp> __i) noexcept
  1289. { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
  1290. template<typename _ITp>
  1291. inline _ITp
  1292. atomic_fetch_xor(__atomic_base<_ITp>* __a,
  1293. __atomic_val_t<_ITp> __i) noexcept
  1294. { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
  1295. template<typename _ITp>
  1296. inline _ITp
  1297. atomic_fetch_xor(volatile __atomic_base<_ITp>* __a,
  1298. __atomic_val_t<_ITp> __i) noexcept
  1299. { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
  1300. #if __cplusplus > 201703L
  1301. #define __cpp_lib_atomic_float 201711L
  1302. template<>
  1303. struct atomic<float> : __atomic_float<float>
  1304. {
  1305. atomic() noexcept = default;
  1306. constexpr
  1307. atomic(float __fp) noexcept : __atomic_float<float>(__fp)
  1308. { }
  1309. atomic& operator=(const atomic&) volatile = delete;
  1310. atomic& operator=(const atomic&) = delete;
  1311. using __atomic_float<float>::operator=;
  1312. };
  1313. template<>
  1314. struct atomic<double> : __atomic_float<double>
  1315. {
  1316. atomic() noexcept = default;
  1317. constexpr
  1318. atomic(double __fp) noexcept : __atomic_float<double>(__fp)
  1319. { }
  1320. atomic& operator=(const atomic&) volatile = delete;
  1321. atomic& operator=(const atomic&) = delete;
  1322. using __atomic_float<double>::operator=;
  1323. };
  1324. template<>
  1325. struct atomic<long double> : __atomic_float<long double>
  1326. {
  1327. atomic() noexcept = default;
  1328. constexpr
  1329. atomic(long double __fp) noexcept : __atomic_float<long double>(__fp)
  1330. { }
  1331. atomic& operator=(const atomic&) volatile = delete;
  1332. atomic& operator=(const atomic&) = delete;
  1333. using __atomic_float<long double>::operator=;
  1334. };
  1335. #ifdef __STDCPP_FLOAT16_T__
  1336. template<>
  1337. struct atomic<_Float16> : __atomic_float<_Float16>
  1338. {
  1339. atomic() noexcept = default;
  1340. constexpr
  1341. atomic(_Float16 __fp) noexcept : __atomic_float<_Float16>(__fp)
  1342. { }
  1343. atomic& operator=(const atomic&) volatile = delete;
  1344. atomic& operator=(const atomic&) = delete;
  1345. using __atomic_float<_Float16>::operator=;
  1346. };
  1347. #endif
  1348. #ifdef __STDCPP_FLOAT32_T__
  1349. template<>
  1350. struct atomic<_Float32> : __atomic_float<_Float32>
  1351. {
  1352. atomic() noexcept = default;
  1353. constexpr
  1354. atomic(_Float32 __fp) noexcept : __atomic_float<_Float32>(__fp)
  1355. { }
  1356. atomic& operator=(const atomic&) volatile = delete;
  1357. atomic& operator=(const atomic&) = delete;
  1358. using __atomic_float<_Float32>::operator=;
  1359. };
  1360. #endif
  1361. #ifdef __STDCPP_FLOAT64_T__
  1362. template<>
  1363. struct atomic<_Float64> : __atomic_float<_Float64>
  1364. {
  1365. atomic() noexcept = default;
  1366. constexpr
  1367. atomic(_Float64 __fp) noexcept : __atomic_float<_Float64>(__fp)
  1368. { }
  1369. atomic& operator=(const atomic&) volatile = delete;
  1370. atomic& operator=(const atomic&) = delete;
  1371. using __atomic_float<_Float64>::operator=;
  1372. };
  1373. #endif
  1374. #ifdef __STDCPP_FLOAT128_T__
  1375. template<>
  1376. struct atomic<_Float128> : __atomic_float<_Float128>
  1377. {
  1378. atomic() noexcept = default;
  1379. constexpr
  1380. atomic(_Float128 __fp) noexcept : __atomic_float<_Float128>(__fp)
  1381. { }
  1382. atomic& operator=(const atomic&) volatile = delete;
  1383. atomic& operator=(const atomic&) = delete;
  1384. using __atomic_float<_Float128>::operator=;
  1385. };
  1386. #endif
  1387. #ifdef __STDCPP_BFLOAT16_T__
  1388. template<>
  1389. struct atomic<__gnu_cxx::__bfloat16_t> : __atomic_float<__gnu_cxx::__bfloat16_t>
  1390. {
  1391. atomic() noexcept = default;
  1392. constexpr
  1393. atomic(__gnu_cxx::__bfloat16_t __fp) noexcept : __atomic_float<__gnu_cxx::__bfloat16_t>(__fp)
  1394. { }
  1395. atomic& operator=(const atomic&) volatile = delete;
  1396. atomic& operator=(const atomic&) = delete;
  1397. using __atomic_float<__gnu_cxx::__bfloat16_t>::operator=;
  1398. };
  1399. #endif
  1400. #define __cpp_lib_atomic_ref 201806L
  1401. /// Class template to provide atomic operations on a non-atomic variable.
  1402. template<typename _Tp>
  1403. struct atomic_ref : __atomic_ref<_Tp>
  1404. {
  1405. explicit
  1406. atomic_ref(_Tp& __t) noexcept : __atomic_ref<_Tp>(__t)
  1407. { }
  1408. atomic_ref& operator=(const atomic_ref&) = delete;
  1409. atomic_ref(const atomic_ref&) = default;
  1410. using __atomic_ref<_Tp>::operator=;
  1411. };
  1412. #define __cpp_lib_atomic_lock_free_type_aliases 201907L
  1413. #ifdef _GLIBCXX_HAVE_PLATFORM_WAIT
  1414. using atomic_signed_lock_free
  1415. = atomic<make_signed_t<__detail::__platform_wait_t>>;
  1416. using atomic_unsigned_lock_free
  1417. = atomic<make_unsigned_t<__detail::__platform_wait_t>>;
  1418. #elif ATOMIC_INT_LOCK_FREE || !(ATOMIC_LONG_LOCK_FREE || ATOMIC_CHAR_LOCK_FREE)
  1419. using atomic_signed_lock_free = atomic<signed int>;
  1420. using atomic_unsigned_lock_free = atomic<unsigned int>;
  1421. #elif ATOMIC_LONG_LOCK_FREE
  1422. using atomic_signed_lock_free = atomic<signed long>;
  1423. using atomic_unsigned_lock_free = atomic<unsigned long>;
  1424. #elif ATOMIC_CHAR_LOCK_FREE
  1425. using atomic_signed_lock_free = atomic<signed char>;
  1426. using atomic_unsigned_lock_free = atomic<unsigned char>;
  1427. #endif
  1428. #endif // C++2a
  1429. /// @} group atomics
  1430. _GLIBCXX_END_NAMESPACE_VERSION
  1431. } // namespace
  1432. #endif // C++11
  1433. #endif // _GLIBCXX_ATOMIC