shared_ptr_atomic.h 23 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853
  1. // shared_ptr atomic access -*- C++ -*-
  2. // Copyright (C) 2014-2023 Free Software Foundation, Inc.
  3. //
  4. // This file is part of the GNU ISO C++ Library. This library is free
  5. // software; you can redistribute it and/or modify it under the
  6. // terms of the GNU General Public License as published by the
  7. // Free Software Foundation; either version 3, or (at your option)
  8. // any later version.
  9. // This library is distributed in the hope that it will be useful,
  10. // but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  12. // GNU General Public License for more details.
  13. // Under Section 7 of GPL version 3, you are granted additional
  14. // permissions described in the GCC Runtime Library Exception, version
  15. // 3.1, as published by the Free Software Foundation.
  16. // You should have received a copy of the GNU General Public License and
  17. // a copy of the GCC Runtime Library Exception along with this program;
  18. // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
  19. // <http://www.gnu.org/licenses/>.
  20. /** @file bits/shared_ptr_atomic.h
  21. * This is an internal header file, included by other library headers.
  22. * Do not attempt to use it directly. @headername{memory}
  23. */
  24. #ifndef _SHARED_PTR_ATOMIC_H
  25. #define _SHARED_PTR_ATOMIC_H 1
  26. #include <bits/atomic_base.h>
  27. // Annotations for the custom locking in atomic<shared_ptr<T>>.
  28. #if defined _GLIBCXX_TSAN && __has_include(<sanitizer/tsan_interface.h>)
  29. #include <sanitizer/tsan_interface.h>
  30. #define _GLIBCXX_TSAN_MUTEX_DESTROY(X) \
  31. __tsan_mutex_destroy(X, __tsan_mutex_not_static)
  32. #define _GLIBCXX_TSAN_MUTEX_TRY_LOCK(X) \
  33. __tsan_mutex_pre_lock(X, __tsan_mutex_not_static|__tsan_mutex_try_lock)
  34. #define _GLIBCXX_TSAN_MUTEX_TRY_LOCK_FAILED(X) __tsan_mutex_post_lock(X, \
  35. __tsan_mutex_not_static|__tsan_mutex_try_lock_failed, 0)
  36. #define _GLIBCXX_TSAN_MUTEX_LOCKED(X) \
  37. __tsan_mutex_post_lock(X, __tsan_mutex_not_static, 0)
  38. #define _GLIBCXX_TSAN_MUTEX_PRE_UNLOCK(X) __tsan_mutex_pre_unlock(X, 0)
  39. #define _GLIBCXX_TSAN_MUTEX_POST_UNLOCK(X) __tsan_mutex_post_unlock(X, 0)
  40. #define _GLIBCXX_TSAN_MUTEX_PRE_SIGNAL(X) __tsan_mutex_pre_signal(X, 0)
  41. #define _GLIBCXX_TSAN_MUTEX_POST_SIGNAL(X) __tsan_mutex_post_signal(X, 0)
  42. #else
  43. #define _GLIBCXX_TSAN_MUTEX_DESTROY(X)
  44. #define _GLIBCXX_TSAN_MUTEX_TRY_LOCK(X)
  45. #define _GLIBCXX_TSAN_MUTEX_TRY_LOCK_FAILED(X)
  46. #define _GLIBCXX_TSAN_MUTEX_LOCKED(X)
  47. #define _GLIBCXX_TSAN_MUTEX_PRE_UNLOCK(X)
  48. #define _GLIBCXX_TSAN_MUTEX_POST_UNLOCK(X)
  49. #define _GLIBCXX_TSAN_MUTEX_PRE_SIGNAL(X)
  50. #define _GLIBCXX_TSAN_MUTEX_POST_SIGNAL(X)
  51. #endif
  52. namespace std _GLIBCXX_VISIBILITY(default)
  53. {
  54. _GLIBCXX_BEGIN_NAMESPACE_VERSION
  55. /**
  56. * @addtogroup pointer_abstractions
  57. * @relates shared_ptr
  58. * @{
  59. */
  60. /// @cond undocumented
  61. struct _Sp_locker
  62. {
  63. _Sp_locker(const _Sp_locker&) = delete;
  64. _Sp_locker& operator=(const _Sp_locker&) = delete;
  65. #ifdef __GTHREADS
  66. explicit
  67. _Sp_locker(const void*) noexcept;
  68. _Sp_locker(const void*, const void*) noexcept;
  69. ~_Sp_locker();
  70. private:
  71. unsigned char _M_key1;
  72. unsigned char _M_key2;
  73. #else
  74. explicit _Sp_locker(const void*, const void* = nullptr) { }
  75. #endif
  76. };
  77. /// @endcond
  78. /**
  79. * @brief Report whether shared_ptr atomic operations are lock-free.
  80. * @param __p A non-null pointer to a shared_ptr object.
  81. * @return True if atomic access to @c *__p is lock-free, false otherwise.
  82. * @{
  83. */
  84. template<typename _Tp, _Lock_policy _Lp>
  85. inline bool
  86. atomic_is_lock_free(const __shared_ptr<_Tp, _Lp>*)
  87. {
  88. #ifdef __GTHREADS
  89. return __gthread_active_p() == 0;
  90. #else
  91. return true;
  92. #endif
  93. }
  94. template<typename _Tp>
  95. inline bool
  96. atomic_is_lock_free(const shared_ptr<_Tp>* __p)
  97. { return std::atomic_is_lock_free<_Tp, __default_lock_policy>(__p); }
  98. /// @}
  99. /**
  100. * @brief Atomic load for shared_ptr objects.
  101. * @param __p A non-null pointer to a shared_ptr object.
  102. * @return @c *__p
  103. *
  104. * The memory order shall not be `memory_order_release` or
  105. * `memory_order_acq_rel`.
  106. * @{
  107. */
  108. template<typename _Tp>
  109. inline shared_ptr<_Tp>
  110. atomic_load_explicit(const shared_ptr<_Tp>* __p, memory_order)
  111. {
  112. _Sp_locker __lock{__p};
  113. return *__p;
  114. }
  115. template<typename _Tp>
  116. inline shared_ptr<_Tp>
  117. atomic_load(const shared_ptr<_Tp>* __p)
  118. { return std::atomic_load_explicit(__p, memory_order_seq_cst); }
  119. template<typename _Tp, _Lock_policy _Lp>
  120. inline __shared_ptr<_Tp, _Lp>
  121. atomic_load_explicit(const __shared_ptr<_Tp, _Lp>* __p, memory_order)
  122. {
  123. _Sp_locker __lock{__p};
  124. return *__p;
  125. }
  126. template<typename _Tp, _Lock_policy _Lp>
  127. inline __shared_ptr<_Tp, _Lp>
  128. atomic_load(const __shared_ptr<_Tp, _Lp>* __p)
  129. { return std::atomic_load_explicit(__p, memory_order_seq_cst); }
  130. /// @}
  131. /**
  132. * @brief Atomic store for shared_ptr objects.
  133. * @param __p A non-null pointer to a shared_ptr object.
  134. * @param __r The value to store.
  135. *
  136. * The memory order shall not be `memory_order_acquire` or
  137. * `memory_order_acq_rel`.
  138. * @{
  139. */
  140. template<typename _Tp>
  141. inline void
  142. atomic_store_explicit(shared_ptr<_Tp>* __p, shared_ptr<_Tp> __r,
  143. memory_order)
  144. {
  145. _Sp_locker __lock{__p};
  146. __p->swap(__r); // use swap so that **__p not destroyed while lock held
  147. }
  148. template<typename _Tp>
  149. inline void
  150. atomic_store(shared_ptr<_Tp>* __p, shared_ptr<_Tp> __r)
  151. { std::atomic_store_explicit(__p, std::move(__r), memory_order_seq_cst); }
  152. template<typename _Tp, _Lock_policy _Lp>
  153. inline void
  154. atomic_store_explicit(__shared_ptr<_Tp, _Lp>* __p,
  155. __shared_ptr<_Tp, _Lp> __r,
  156. memory_order)
  157. {
  158. _Sp_locker __lock{__p};
  159. __p->swap(__r); // use swap so that **__p not destroyed while lock held
  160. }
  161. template<typename _Tp, _Lock_policy _Lp>
  162. inline void
  163. atomic_store(__shared_ptr<_Tp, _Lp>* __p, __shared_ptr<_Tp, _Lp> __r)
  164. { std::atomic_store_explicit(__p, std::move(__r), memory_order_seq_cst); }
  165. /// @}
  166. /**
  167. * @brief Atomic exchange for shared_ptr objects.
  168. * @param __p A non-null pointer to a shared_ptr object.
  169. * @param __r New value to store in `*__p`.
  170. * @return The original value of `*__p`
  171. * @{
  172. */
  173. template<typename _Tp>
  174. inline shared_ptr<_Tp>
  175. atomic_exchange_explicit(shared_ptr<_Tp>* __p, shared_ptr<_Tp> __r,
  176. memory_order)
  177. {
  178. _Sp_locker __lock{__p};
  179. __p->swap(__r);
  180. return __r;
  181. }
  182. template<typename _Tp>
  183. inline shared_ptr<_Tp>
  184. atomic_exchange(shared_ptr<_Tp>* __p, shared_ptr<_Tp> __r)
  185. {
  186. return std::atomic_exchange_explicit(__p, std::move(__r),
  187. memory_order_seq_cst);
  188. }
  189. template<typename _Tp, _Lock_policy _Lp>
  190. inline __shared_ptr<_Tp, _Lp>
  191. atomic_exchange_explicit(__shared_ptr<_Tp, _Lp>* __p,
  192. __shared_ptr<_Tp, _Lp> __r,
  193. memory_order)
  194. {
  195. _Sp_locker __lock{__p};
  196. __p->swap(__r);
  197. return __r;
  198. }
  199. template<typename _Tp, _Lock_policy _Lp>
  200. inline __shared_ptr<_Tp, _Lp>
  201. atomic_exchange(__shared_ptr<_Tp, _Lp>* __p, __shared_ptr<_Tp, _Lp> __r)
  202. {
  203. return std::atomic_exchange_explicit(__p, std::move(__r),
  204. memory_order_seq_cst);
  205. }
  206. /// @}
  207. /**
  208. * @brief Atomic compare-and-swap for shared_ptr objects.
  209. * @param __p A non-null pointer to a shared_ptr object.
  210. * @param __v A non-null pointer to a shared_ptr object.
  211. * @param __w A non-null pointer to a shared_ptr object.
  212. * @return True if `*__p` was equivalent to `*__v`, false otherwise.
  213. *
  214. * The memory order for failure shall not be `memory_order_release` or
  215. * `memory_order_acq_rel`.
  216. * @{
  217. */
  218. template<typename _Tp>
  219. bool
  220. atomic_compare_exchange_strong_explicit(shared_ptr<_Tp>* __p,
  221. shared_ptr<_Tp>* __v,
  222. shared_ptr<_Tp> __w,
  223. memory_order,
  224. memory_order)
  225. {
  226. shared_ptr<_Tp> __x; // goes out of scope after __lock
  227. _Sp_locker __lock{__p, __v};
  228. owner_less<shared_ptr<_Tp>> __less;
  229. if (*__p == *__v && !__less(*__p, *__v) && !__less(*__v, *__p))
  230. {
  231. __x = std::move(*__p);
  232. *__p = std::move(__w);
  233. return true;
  234. }
  235. __x = std::move(*__v);
  236. *__v = *__p;
  237. return false;
  238. }
  239. template<typename _Tp>
  240. inline bool
  241. atomic_compare_exchange_strong(shared_ptr<_Tp>* __p, shared_ptr<_Tp>* __v,
  242. shared_ptr<_Tp> __w)
  243. {
  244. return std::atomic_compare_exchange_strong_explicit(__p, __v,
  245. std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
  246. }
  247. template<typename _Tp>
  248. inline bool
  249. atomic_compare_exchange_weak_explicit(shared_ptr<_Tp>* __p,
  250. shared_ptr<_Tp>* __v,
  251. shared_ptr<_Tp> __w,
  252. memory_order __success,
  253. memory_order __failure)
  254. {
  255. return std::atomic_compare_exchange_strong_explicit(__p, __v,
  256. std::move(__w), __success, __failure);
  257. }
  258. template<typename _Tp>
  259. inline bool
  260. atomic_compare_exchange_weak(shared_ptr<_Tp>* __p, shared_ptr<_Tp>* __v,
  261. shared_ptr<_Tp> __w)
  262. {
  263. return std::atomic_compare_exchange_weak_explicit(__p, __v,
  264. std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
  265. }
  266. template<typename _Tp, _Lock_policy _Lp>
  267. bool
  268. atomic_compare_exchange_strong_explicit(__shared_ptr<_Tp, _Lp>* __p,
  269. __shared_ptr<_Tp, _Lp>* __v,
  270. __shared_ptr<_Tp, _Lp> __w,
  271. memory_order,
  272. memory_order)
  273. {
  274. __shared_ptr<_Tp, _Lp> __x; // goes out of scope after __lock
  275. _Sp_locker __lock{__p, __v};
  276. owner_less<__shared_ptr<_Tp, _Lp>> __less;
  277. if (*__p == *__v && !__less(*__p, *__v) && !__less(*__v, *__p))
  278. {
  279. __x = std::move(*__p);
  280. *__p = std::move(__w);
  281. return true;
  282. }
  283. __x = std::move(*__v);
  284. *__v = *__p;
  285. return false;
  286. }
  287. template<typename _Tp, _Lock_policy _Lp>
  288. inline bool
  289. atomic_compare_exchange_strong(__shared_ptr<_Tp, _Lp>* __p,
  290. __shared_ptr<_Tp, _Lp>* __v,
  291. __shared_ptr<_Tp, _Lp> __w)
  292. {
  293. return std::atomic_compare_exchange_strong_explicit(__p, __v,
  294. std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
  295. }
  296. template<typename _Tp, _Lock_policy _Lp>
  297. inline bool
  298. atomic_compare_exchange_weak_explicit(__shared_ptr<_Tp, _Lp>* __p,
  299. __shared_ptr<_Tp, _Lp>* __v,
  300. __shared_ptr<_Tp, _Lp> __w,
  301. memory_order __success,
  302. memory_order __failure)
  303. {
  304. return std::atomic_compare_exchange_strong_explicit(__p, __v,
  305. std::move(__w), __success, __failure);
  306. }
  307. template<typename _Tp, _Lock_policy _Lp>
  308. inline bool
  309. atomic_compare_exchange_weak(__shared_ptr<_Tp, _Lp>* __p,
  310. __shared_ptr<_Tp, _Lp>* __v,
  311. __shared_ptr<_Tp, _Lp> __w)
  312. {
  313. return std::atomic_compare_exchange_weak_explicit(__p, __v,
  314. std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
  315. }
  316. /// @}
  317. /// @} group pointer_abstractions
  318. #if __cplusplus >= 202002L
  319. # define __cpp_lib_atomic_shared_ptr 201711L
  320. template<typename _Tp>
  321. struct atomic;
  322. /**
  323. * @addtogroup pointer_abstractions
  324. * @relates shared_ptr
  325. * @{
  326. */
  327. template<typename _Up>
  328. static constexpr bool __is_shared_ptr = false;
  329. template<typename _Up>
  330. static constexpr bool __is_shared_ptr<shared_ptr<_Up>> = true;
  331. template<typename _Tp>
  332. class _Sp_atomic
  333. {
  334. using value_type = _Tp;
  335. friend struct atomic<_Tp>;
  336. // An atomic version of __shared_count<> and __weak_count<>.
  337. // Stores a _Sp_counted_base<>* but uses the LSB as a lock.
  338. struct _Atomic_count
  339. {
  340. // Either __shared_count<> or __weak_count<>
  341. using __count_type = decltype(_Tp::_M_refcount);
  342. // _Sp_counted_base<>*
  343. using pointer = decltype(__count_type::_M_pi);
  344. // Ensure we can use the LSB as the lock bit.
  345. static_assert(alignof(remove_pointer_t<pointer>) > 1);
  346. constexpr _Atomic_count() noexcept = default;
  347. explicit
  348. _Atomic_count(__count_type&& __c) noexcept
  349. : _M_val(reinterpret_cast<uintptr_t>(__c._M_pi))
  350. {
  351. __c._M_pi = nullptr;
  352. }
  353. ~_Atomic_count()
  354. {
  355. auto __val = _M_val.load(memory_order_relaxed);
  356. _GLIBCXX_TSAN_MUTEX_DESTROY(&_M_val);
  357. __glibcxx_assert(!(__val & _S_lock_bit));
  358. if (auto __pi = reinterpret_cast<pointer>(__val))
  359. {
  360. if constexpr (__is_shared_ptr<_Tp>)
  361. __pi->_M_release();
  362. else
  363. __pi->_M_weak_release();
  364. }
  365. }
  366. _Atomic_count(const _Atomic_count&) = delete;
  367. _Atomic_count& operator=(const _Atomic_count&) = delete;
  368. // Precondition: Caller does not hold lock!
  369. // Returns the raw pointer value without the lock bit set.
  370. pointer
  371. lock(memory_order __o) const noexcept
  372. {
  373. // To acquire the lock we flip the LSB from 0 to 1.
  374. auto __current = _M_val.load(memory_order_relaxed);
  375. while (__current & _S_lock_bit)
  376. {
  377. #if __cpp_lib_atomic_wait
  378. __detail::__thread_relax();
  379. #endif
  380. __current = _M_val.load(memory_order_relaxed);
  381. }
  382. _GLIBCXX_TSAN_MUTEX_TRY_LOCK(&_M_val);
  383. while (!_M_val.compare_exchange_strong(__current,
  384. __current | _S_lock_bit,
  385. __o,
  386. memory_order_relaxed))
  387. {
  388. _GLIBCXX_TSAN_MUTEX_TRY_LOCK_FAILED(&_M_val);
  389. #if __cpp_lib_atomic_wait
  390. __detail::__thread_relax();
  391. #endif
  392. __current = __current & ~_S_lock_bit;
  393. _GLIBCXX_TSAN_MUTEX_TRY_LOCK(&_M_val);
  394. }
  395. _GLIBCXX_TSAN_MUTEX_LOCKED(&_M_val);
  396. return reinterpret_cast<pointer>(__current);
  397. }
  398. // Precondition: caller holds lock!
  399. void
  400. unlock(memory_order __o) const noexcept
  401. {
  402. _GLIBCXX_TSAN_MUTEX_PRE_UNLOCK(&_M_val);
  403. _M_val.fetch_sub(1, __o);
  404. _GLIBCXX_TSAN_MUTEX_POST_UNLOCK(&_M_val);
  405. }
  406. // Swaps the values of *this and __c, and unlocks *this.
  407. // Precondition: caller holds lock!
  408. void
  409. _M_swap_unlock(__count_type& __c, memory_order __o) noexcept
  410. {
  411. if (__o != memory_order_seq_cst)
  412. __o = memory_order_release;
  413. auto __x = reinterpret_cast<uintptr_t>(__c._M_pi);
  414. _GLIBCXX_TSAN_MUTEX_PRE_UNLOCK(&_M_val);
  415. __x = _M_val.exchange(__x, __o);
  416. _GLIBCXX_TSAN_MUTEX_POST_UNLOCK(&_M_val);
  417. __c._M_pi = reinterpret_cast<pointer>(__x & ~_S_lock_bit);
  418. }
  419. #if __cpp_lib_atomic_wait
  420. // Precondition: caller holds lock!
  421. void
  422. _M_wait_unlock(memory_order __o) const noexcept
  423. {
  424. _GLIBCXX_TSAN_MUTEX_PRE_UNLOCK(&_M_val);
  425. auto __v = _M_val.fetch_sub(1, memory_order_relaxed);
  426. _GLIBCXX_TSAN_MUTEX_POST_UNLOCK(&_M_val);
  427. _M_val.wait(__v & ~_S_lock_bit, __o);
  428. }
  429. void
  430. notify_one() noexcept
  431. {
  432. _GLIBCXX_TSAN_MUTEX_PRE_SIGNAL(&_M_val);
  433. _M_val.notify_one();
  434. _GLIBCXX_TSAN_MUTEX_POST_SIGNAL(&_M_val);
  435. }
  436. void
  437. notify_all() noexcept
  438. {
  439. _GLIBCXX_TSAN_MUTEX_PRE_SIGNAL(&_M_val);
  440. _M_val.notify_all();
  441. _GLIBCXX_TSAN_MUTEX_POST_SIGNAL(&_M_val);
  442. }
  443. #endif
  444. private:
  445. mutable __atomic_base<uintptr_t> _M_val{0};
  446. static constexpr uintptr_t _S_lock_bit{1};
  447. };
  448. typename _Tp::element_type* _M_ptr = nullptr;
  449. _Atomic_count _M_refcount;
  450. static typename _Atomic_count::pointer
  451. _S_add_ref(typename _Atomic_count::pointer __p)
  452. {
  453. if (__p)
  454. {
  455. if constexpr (__is_shared_ptr<_Tp>)
  456. __p->_M_add_ref_copy();
  457. else
  458. __p->_M_weak_add_ref();
  459. }
  460. return __p;
  461. }
  462. constexpr _Sp_atomic() noexcept = default;
  463. explicit
  464. _Sp_atomic(value_type __r) noexcept
  465. : _M_ptr(__r._M_ptr), _M_refcount(std::move(__r._M_refcount))
  466. { }
  467. ~_Sp_atomic() = default;
  468. _Sp_atomic(const _Sp_atomic&) = delete;
  469. void operator=(const _Sp_atomic&) = delete;
  470. value_type
  471. load(memory_order __o) const noexcept
  472. {
  473. __glibcxx_assert(__o != memory_order_release
  474. && __o != memory_order_acq_rel);
  475. // Ensure that the correct value of _M_ptr is visible after locking.,
  476. // by upgrading relaxed or consume to acquire.
  477. if (__o != memory_order_seq_cst)
  478. __o = memory_order_acquire;
  479. value_type __ret;
  480. auto __pi = _M_refcount.lock(__o);
  481. __ret._M_ptr = _M_ptr;
  482. __ret._M_refcount._M_pi = _S_add_ref(__pi);
  483. _M_refcount.unlock(memory_order_relaxed);
  484. return __ret;
  485. }
  486. void
  487. swap(value_type& __r, memory_order __o) noexcept
  488. {
  489. _M_refcount.lock(memory_order_acquire);
  490. std::swap(_M_ptr, __r._M_ptr);
  491. _M_refcount._M_swap_unlock(__r._M_refcount, __o);
  492. }
  493. bool
  494. compare_exchange_strong(value_type& __expected, value_type __desired,
  495. memory_order __o, memory_order __o2) noexcept
  496. {
  497. bool __result = true;
  498. auto __pi = _M_refcount.lock(memory_order_acquire);
  499. if (_M_ptr == __expected._M_ptr
  500. && __pi == __expected._M_refcount._M_pi)
  501. {
  502. _M_ptr = __desired._M_ptr;
  503. _M_refcount._M_swap_unlock(__desired._M_refcount, __o);
  504. }
  505. else
  506. {
  507. _Tp __sink = std::move(__expected);
  508. __expected._M_ptr = _M_ptr;
  509. __expected._M_refcount._M_pi = _S_add_ref(__pi);
  510. _M_refcount.unlock(__o2);
  511. __result = false;
  512. }
  513. return __result;
  514. }
  515. #if __cpp_lib_atomic_wait
  516. void
  517. wait(value_type __old, memory_order __o) const noexcept
  518. {
  519. auto __pi = _M_refcount.lock(memory_order_acquire);
  520. if (_M_ptr == __old._M_ptr && __pi == __old._M_refcount._M_pi)
  521. _M_refcount._M_wait_unlock(__o);
  522. else
  523. _M_refcount.unlock(memory_order_relaxed);
  524. }
  525. void
  526. notify_one() noexcept
  527. {
  528. _M_refcount.notify_one();
  529. }
  530. void
  531. notify_all() noexcept
  532. {
  533. _M_refcount.notify_all();
  534. }
  535. #endif
  536. };
  537. template<typename _Tp>
  538. struct atomic<shared_ptr<_Tp>>
  539. {
  540. public:
  541. using value_type = shared_ptr<_Tp>;
  542. static constexpr bool is_always_lock_free = false;
  543. bool
  544. is_lock_free() const noexcept
  545. { return false; }
  546. constexpr atomic() noexcept = default;
  547. // _GLIBCXX_RESOLVE_LIB_DEFECTS
  548. // 3661. constinit atomic<shared_ptr<T>> a(nullptr); should work
  549. constexpr atomic(nullptr_t) noexcept : atomic() { }
  550. atomic(shared_ptr<_Tp> __r) noexcept
  551. : _M_impl(std::move(__r))
  552. { }
  553. atomic(const atomic&) = delete;
  554. void operator=(const atomic&) = delete;
  555. shared_ptr<_Tp>
  556. load(memory_order __o = memory_order_seq_cst) const noexcept
  557. { return _M_impl.load(__o); }
  558. operator shared_ptr<_Tp>() const noexcept
  559. { return _M_impl.load(memory_order_seq_cst); }
  560. void
  561. store(shared_ptr<_Tp> __desired,
  562. memory_order __o = memory_order_seq_cst) noexcept
  563. { _M_impl.swap(__desired, __o); }
  564. void
  565. operator=(shared_ptr<_Tp> __desired) noexcept
  566. { _M_impl.swap(__desired, memory_order_seq_cst); }
  567. // _GLIBCXX_RESOLVE_LIB_DEFECTS
  568. // 3893. LWG 3661 broke atomic<shared_ptr<T>> a; a = nullptr;
  569. void
  570. operator=(nullptr_t) noexcept
  571. { store(nullptr); }
  572. shared_ptr<_Tp>
  573. exchange(shared_ptr<_Tp> __desired,
  574. memory_order __o = memory_order_seq_cst) noexcept
  575. {
  576. _M_impl.swap(__desired, __o);
  577. return __desired;
  578. }
  579. bool
  580. compare_exchange_strong(shared_ptr<_Tp>& __expected,
  581. shared_ptr<_Tp> __desired,
  582. memory_order __o, memory_order __o2) noexcept
  583. {
  584. return _M_impl.compare_exchange_strong(__expected, __desired, __o, __o2);
  585. }
  586. bool
  587. compare_exchange_strong(value_type& __expected, value_type __desired,
  588. memory_order __o = memory_order_seq_cst) noexcept
  589. {
  590. memory_order __o2;
  591. switch (__o)
  592. {
  593. case memory_order_acq_rel:
  594. __o2 = memory_order_acquire;
  595. break;
  596. case memory_order_release:
  597. __o2 = memory_order_relaxed;
  598. break;
  599. default:
  600. __o2 = __o;
  601. }
  602. return compare_exchange_strong(__expected, std::move(__desired),
  603. __o, __o2);
  604. }
  605. bool
  606. compare_exchange_weak(value_type& __expected, value_type __desired,
  607. memory_order __o, memory_order __o2) noexcept
  608. {
  609. return compare_exchange_strong(__expected, std::move(__desired),
  610. __o, __o2);
  611. }
  612. bool
  613. compare_exchange_weak(value_type& __expected, value_type __desired,
  614. memory_order __o = memory_order_seq_cst) noexcept
  615. {
  616. return compare_exchange_strong(__expected, std::move(__desired), __o);
  617. }
  618. #if __cpp_lib_atomic_wait
  619. void
  620. wait(value_type __old,
  621. memory_order __o = memory_order_seq_cst) const noexcept
  622. {
  623. _M_impl.wait(std::move(__old), __o);
  624. }
  625. void
  626. notify_one() noexcept
  627. {
  628. _M_impl.notify_one();
  629. }
  630. void
  631. notify_all() noexcept
  632. {
  633. _M_impl.notify_all();
  634. }
  635. #endif
  636. private:
  637. _Sp_atomic<shared_ptr<_Tp>> _M_impl;
  638. };
  639. template<typename _Tp>
  640. struct atomic<weak_ptr<_Tp>>
  641. {
  642. public:
  643. using value_type = weak_ptr<_Tp>;
  644. static constexpr bool is_always_lock_free = false;
  645. bool
  646. is_lock_free() const noexcept
  647. { return false; }
  648. constexpr atomic() noexcept = default;
  649. atomic(weak_ptr<_Tp> __r) noexcept
  650. : _M_impl(move(__r))
  651. { }
  652. atomic(const atomic&) = delete;
  653. void operator=(const atomic&) = delete;
  654. weak_ptr<_Tp>
  655. load(memory_order __o = memory_order_seq_cst) const noexcept
  656. { return _M_impl.load(__o); }
  657. operator weak_ptr<_Tp>() const noexcept
  658. { return _M_impl.load(memory_order_seq_cst); }
  659. void
  660. store(weak_ptr<_Tp> __desired,
  661. memory_order __o = memory_order_seq_cst) noexcept
  662. { _M_impl.swap(__desired, __o); }
  663. void
  664. operator=(weak_ptr<_Tp> __desired) noexcept
  665. { _M_impl.swap(__desired, memory_order_seq_cst); }
  666. weak_ptr<_Tp>
  667. exchange(weak_ptr<_Tp> __desired,
  668. memory_order __o = memory_order_seq_cst) noexcept
  669. {
  670. _M_impl.swap(__desired, __o);
  671. return __desired;
  672. }
  673. bool
  674. compare_exchange_strong(weak_ptr<_Tp>& __expected,
  675. weak_ptr<_Tp> __desired,
  676. memory_order __o, memory_order __o2) noexcept
  677. {
  678. return _M_impl.compare_exchange_strong(__expected, __desired, __o, __o2);
  679. }
  680. bool
  681. compare_exchange_strong(value_type& __expected, value_type __desired,
  682. memory_order __o = memory_order_seq_cst) noexcept
  683. {
  684. memory_order __o2;
  685. switch (__o)
  686. {
  687. case memory_order_acq_rel:
  688. __o2 = memory_order_acquire;
  689. break;
  690. case memory_order_release:
  691. __o2 = memory_order_relaxed;
  692. break;
  693. default:
  694. __o2 = __o;
  695. }
  696. return compare_exchange_strong(__expected, std::move(__desired),
  697. __o, __o2);
  698. }
  699. bool
  700. compare_exchange_weak(value_type& __expected, value_type __desired,
  701. memory_order __o, memory_order __o2) noexcept
  702. {
  703. return compare_exchange_strong(__expected, std::move(__desired),
  704. __o, __o2);
  705. }
  706. bool
  707. compare_exchange_weak(value_type& __expected, value_type __desired,
  708. memory_order __o = memory_order_seq_cst) noexcept
  709. {
  710. return compare_exchange_strong(__expected, std::move(__desired), __o);
  711. }
  712. #if __cpp_lib_atomic_wait
  713. void
  714. wait(value_type __old,
  715. memory_order __o = memory_order_seq_cst) const noexcept
  716. {
  717. _M_impl.wait(std::move(__old), __o);
  718. }
  719. void
  720. notify_one() noexcept
  721. {
  722. _M_impl.notify_one();
  723. }
  724. void
  725. notify_all() noexcept
  726. {
  727. _M_impl.notify_all();
  728. }
  729. #endif
  730. private:
  731. _Sp_atomic<weak_ptr<_Tp>> _M_impl;
  732. };
  733. /// @} group pointer_abstractions
  734. #endif // C++20
  735. _GLIBCXX_END_NAMESPACE_VERSION
  736. } // namespace
  737. #endif // _SHARED_PTR_ATOMIC_H