30#ifndef _SHARED_PTR_ATOMIC_H
31#define _SHARED_PTR_ATOMIC_H 1
36#if defined _GLIBCXX_TSAN && __has_include(<sanitizer/tsan_interface.h>)
37#include <sanitizer/tsan_interface.h>
38#define _GLIBCXX_TSAN_MUTEX_DESTROY(X) \
39 __tsan_mutex_destroy(X, __tsan_mutex_not_static)
40#define _GLIBCXX_TSAN_MUTEX_TRY_LOCK(X) \
41 __tsan_mutex_pre_lock(X, __tsan_mutex_not_static|__tsan_mutex_try_lock)
42#define _GLIBCXX_TSAN_MUTEX_TRY_LOCK_FAILED(X) __tsan_mutex_post_lock(X, \
43 __tsan_mutex_not_static|__tsan_mutex_try_lock_failed, 0)
44#define _GLIBCXX_TSAN_MUTEX_LOCKED(X) \
45 __tsan_mutex_post_lock(X, __tsan_mutex_not_static, 0)
46#define _GLIBCXX_TSAN_MUTEX_PRE_UNLOCK(X) __tsan_mutex_pre_unlock(X, 0)
47#define _GLIBCXX_TSAN_MUTEX_POST_UNLOCK(X) __tsan_mutex_post_unlock(X, 0)
48#define _GLIBCXX_TSAN_MUTEX_PRE_SIGNAL(X) __tsan_mutex_pre_signal(X, 0)
49#define _GLIBCXX_TSAN_MUTEX_POST_SIGNAL(X) __tsan_mutex_post_signal(X, 0)
51#define _GLIBCXX_TSAN_MUTEX_DESTROY(X)
52#define _GLIBCXX_TSAN_MUTEX_TRY_LOCK(X)
53#define _GLIBCXX_TSAN_MUTEX_TRY_LOCK_FAILED(X)
54#define _GLIBCXX_TSAN_MUTEX_LOCKED(X)
55#define _GLIBCXX_TSAN_MUTEX_PRE_UNLOCK(X)
56#define _GLIBCXX_TSAN_MUTEX_POST_UNLOCK(X)
57#define _GLIBCXX_TSAN_MUTEX_PRE_SIGNAL(X)
58#define _GLIBCXX_TSAN_MUTEX_POST_SIGNAL(X)
61namespace std _GLIBCXX_VISIBILITY(default)
63_GLIBCXX_BEGIN_NAMESPACE_VERSION
75 _Sp_locker(
const _Sp_locker&) =
delete;
76 _Sp_locker& operator=(
const _Sp_locker&) =
delete;
80 _Sp_locker(
const void*)
noexcept;
81 _Sp_locker(
const void*,
const void*)
noexcept;
85 unsigned char _M_key1;
86 unsigned char _M_key2;
88 explicit _Sp_locker(
const void*,
const void* =
nullptr) { }
100 template<
typename _Tp, _Lock_policy _Lp>
102 atomic_is_lock_free(
const __shared_ptr<_Tp, _Lp>* __p)
105 return __gthread_active_p() == 0;
111 template<
typename _Tp>
114 {
return std::atomic_is_lock_free<_Tp, __default_lock_policy>(__p); }
127 template<
typename _Tp>
128 inline shared_ptr<_Tp>
131 _Sp_locker __lock{__p};
135 template<
typename _Tp>
136 inline shared_ptr<_Tp>
138 {
return std::atomic_load_explicit(__p, memory_order_seq_cst); }
140 template<
typename _Tp, _Lock_policy _Lp>
141 inline __shared_ptr<_Tp, _Lp>
144 _Sp_locker __lock{__p};
148 template<
typename _Tp, _Lock_policy _Lp>
149 inline __shared_ptr<_Tp, _Lp>
150 atomic_load(
const __shared_ptr<_Tp, _Lp>* __p)
151 {
return std::atomic_load_explicit(__p, memory_order_seq_cst); }
163 template<
typename _Tp>
168 _Sp_locker __lock{__p};
172 template<
typename _Tp>
175 { std::atomic_store_explicit(__p,
std::move(__r), memory_order_seq_cst); }
177 template<
typename _Tp, _Lock_policy _Lp>
179 atomic_store_explicit(__shared_ptr<_Tp, _Lp>* __p,
180 __shared_ptr<_Tp, _Lp> __r,
183 _Sp_locker __lock{__p};
187 template<
typename _Tp, _Lock_policy _Lp>
189 atomic_store(__shared_ptr<_Tp, _Lp>* __p, __shared_ptr<_Tp, _Lp> __r)
190 { std::atomic_store_explicit(__p,
std::move(__r), memory_order_seq_cst); }
200 template<
typename _Tp>
201 inline shared_ptr<_Tp>
205 _Sp_locker __lock{__p};
210 template<
typename _Tp>
211 inline shared_ptr<_Tp>
214 return std::atomic_exchange_explicit(__p,
std::move(__r),
215 memory_order_seq_cst);
218 template<
typename _Tp, _Lock_policy _Lp>
219 inline __shared_ptr<_Tp, _Lp>
220 atomic_exchange_explicit(__shared_ptr<_Tp, _Lp>* __p,
221 __shared_ptr<_Tp, _Lp> __r,
224 _Sp_locker __lock{__p};
229 template<
typename _Tp, _Lock_policy _Lp>
230 inline __shared_ptr<_Tp, _Lp>
231 atomic_exchange(__shared_ptr<_Tp, _Lp>* __p, __shared_ptr<_Tp, _Lp> __r)
233 return std::atomic_exchange_explicit(__p,
std::move(__r),
234 memory_order_seq_cst);
249 template<
typename _Tp>
258 _Sp_locker __lock{__p, __v};
260 if (*__p == *__v && !__less(*__p, *__v) && !__less(*__v, *__p))
271 template<
typename _Tp>
276 return std::atomic_compare_exchange_strong_explicit(__p, __v,
277 std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
280 template<
typename _Tp>
288 return std::atomic_compare_exchange_strong_explicit(__p, __v,
292 template<
typename _Tp>
297 return std::atomic_compare_exchange_weak_explicit(__p, __v,
298 std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
301 template<
typename _Tp, _Lock_policy _Lp>
303 atomic_compare_exchange_strong_explicit(__shared_ptr<_Tp, _Lp>* __p,
304 __shared_ptr<_Tp, _Lp>* __v,
305 __shared_ptr<_Tp, _Lp> __w,
309 __shared_ptr<_Tp, _Lp> __x;
310 _Sp_locker __lock{__p, __v};
312 if (*__p == *__v && !__less(*__p, *__v) && !__less(*__v, *__p))
323 template<
typename _Tp, _Lock_policy _Lp>
325 atomic_compare_exchange_strong(__shared_ptr<_Tp, _Lp>* __p,
326 __shared_ptr<_Tp, _Lp>* __v,
327 __shared_ptr<_Tp, _Lp> __w)
329 return std::atomic_compare_exchange_strong_explicit(__p, __v,
330 std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
333 template<
typename _Tp, _Lock_policy _Lp>
335 atomic_compare_exchange_weak_explicit(__shared_ptr<_Tp, _Lp>* __p,
336 __shared_ptr<_Tp, _Lp>* __v,
337 __shared_ptr<_Tp, _Lp> __w,
341 return std::atomic_compare_exchange_strong_explicit(__p, __v,
345 template<
typename _Tp, _Lock_policy _Lp>
347 atomic_compare_exchange_weak(__shared_ptr<_Tp, _Lp>* __p,
348 __shared_ptr<_Tp, _Lp>* __v,
349 __shared_ptr<_Tp, _Lp> __w)
351 return std::atomic_compare_exchange_weak_explicit(__p, __v,
352 std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
358#if __cplusplus >= 202002L
359# define __cpp_lib_atomic_shared_ptr 201711L
360 template<
typename _Tp>
369 template<
typename _Up>
370 static constexpr bool __is_shared_ptr =
false;
371 template<
typename _Up>
372 static constexpr bool __is_shared_ptr<shared_ptr<_Up>> =
true;
374 template<
typename _Tp>
377 using value_type = _Tp;
379 friend class atomic<_Tp>;
386 using __count_type =
decltype(_Tp::_M_refcount);
389 using pointer =
decltype(__count_type::_M_pi);
392 static_assert(
alignof(remove_pointer_t<pointer>) > 1);
394 constexpr _Atomic_count() noexcept = default;
397 _Atomic_count(__count_type&& __c) noexcept
398 : _M_val(reinterpret_cast<uintptr_t>(__c._M_pi))
405 auto __val = _M_val.load(memory_order_relaxed);
406 _GLIBCXX_TSAN_MUTEX_DESTROY(&_M_val);
407 __glibcxx_assert(!(__val & _S_lock_bit));
408 if (
auto __pi =
reinterpret_cast<pointer
>(__val))
410 if constexpr (__is_shared_ptr<_Tp>)
413 __pi->_M_weak_release();
417 _Atomic_count(
const _Atomic_count&) =
delete;
418 _Atomic_count& operator=(
const _Atomic_count&) =
delete;
427 auto __current = _M_val.load(memory_order_relaxed);
428 while (__current & _S_lock_bit)
430#if __cpp_lib_atomic_wait
431 __detail::__thread_relax();
433 __current = _M_val.load(memory_order_relaxed);
436 _GLIBCXX_TSAN_MUTEX_TRY_LOCK(&_M_val);
438 while (!_M_val.compare_exchange_strong(__current,
439 __current | _S_lock_bit,
441 memory_order_relaxed))
443 _GLIBCXX_TSAN_MUTEX_TRY_LOCK_FAILED(&_M_val);
444#if __cpp_lib_atomic_wait
445 __detail::__thread_relax();
447 __current = __current & ~_S_lock_bit;
448 _GLIBCXX_TSAN_MUTEX_TRY_LOCK(&_M_val);
450 _GLIBCXX_TSAN_MUTEX_LOCKED(&_M_val);
451 return reinterpret_cast<pointer
>(__current);
458 _GLIBCXX_TSAN_MUTEX_PRE_UNLOCK(&_M_val);
459 _M_val.fetch_sub(1, __o);
460 _GLIBCXX_TSAN_MUTEX_POST_UNLOCK(&_M_val);
466 _M_swap_unlock(__count_type& __c,
memory_order __o)
noexcept
468 if (__o != memory_order_seq_cst)
469 __o = memory_order_release;
470 auto __x =
reinterpret_cast<uintptr_t
>(__c._M_pi);
471 _GLIBCXX_TSAN_MUTEX_PRE_UNLOCK(&_M_val);
472 __x = _M_val.exchange(__x, __o);
473 _GLIBCXX_TSAN_MUTEX_POST_UNLOCK(&_M_val);
474 __c._M_pi =
reinterpret_cast<pointer
>(__x & ~_S_lock_bit);
477#if __cpp_lib_atomic_wait
482 _GLIBCXX_TSAN_MUTEX_PRE_UNLOCK(&_M_val);
483 auto __v = _M_val.fetch_sub(1, memory_order_relaxed);
484 _GLIBCXX_TSAN_MUTEX_POST_UNLOCK(&_M_val);
485 _M_val.wait(__v & ~_S_lock_bit, __o);
489 notify_one() noexcept
491 _GLIBCXX_TSAN_MUTEX_PRE_SIGNAL(&_M_val);
493 _GLIBCXX_TSAN_MUTEX_POST_SIGNAL(&_M_val);
497 notify_all() noexcept
499 _GLIBCXX_TSAN_MUTEX_PRE_SIGNAL(&_M_val);
501 _GLIBCXX_TSAN_MUTEX_POST_SIGNAL(&_M_val);
506 mutable __atomic_base<uintptr_t> _M_val{0};
507 static constexpr uintptr_t _S_lock_bit{1};
510 typename _Tp::element_type* _M_ptr =
nullptr;
511 _Atomic_count _M_refcount;
513 static typename _Atomic_count::pointer
514 _S_add_ref(
typename _Atomic_count::pointer __p)
518 if constexpr (__is_shared_ptr<_Tp>)
519 __p->_M_add_ref_copy();
521 __p->_M_weak_add_ref();
526 constexpr _Sp_atomic() noexcept = default;
529 _Sp_atomic(value_type __r) noexcept
530 : _M_ptr(__r._M_ptr), _M_refcount(
std::
move(__r._M_refcount))
533 ~_Sp_atomic() =
default;
535 _Sp_atomic(
const _Sp_atomic&) =
delete;
536 void operator=(
const _Sp_atomic&) =
delete;
541 __glibcxx_assert(__o != memory_order_release
542 && __o != memory_order_acq_rel);
545 if (__o != memory_order_seq_cst)
546 __o = memory_order_acquire;
549 auto __pi = _M_refcount.lock(__o);
550 __ret._M_ptr = _M_ptr;
551 __ret._M_refcount._M_pi = _S_add_ref(__pi);
552 _M_refcount.unlock(memory_order_relaxed);
559 _M_refcount.lock(memory_order_acquire);
561 _M_refcount._M_swap_unlock(__r._M_refcount, __o);
565 compare_exchange_strong(value_type& __expected, value_type __desired,
568 bool __result =
true;
569 auto __pi = _M_refcount.lock(memory_order_acquire);
570 if (_M_ptr == __expected._M_ptr
571 && __pi == __expected._M_refcount._M_pi)
573 _M_ptr = __desired._M_ptr;
574 _M_refcount._M_swap_unlock(__desired._M_refcount, __o);
579 __expected._M_ptr = _M_ptr;
580 __expected._M_refcount._M_pi = _S_add_ref(__pi);
581 _M_refcount.unlock(__o2);
587#if __cpp_lib_atomic_wait
591 auto __pi = _M_refcount.lock(memory_order_acquire);
592 if (_M_ptr == __old._M_ptr && __pi == __old._M_refcount._M_pi)
593 _M_refcount._M_wait_unlock(__o);
595 _M_refcount.unlock(memory_order_relaxed);
599 notify_one() noexcept
601 _M_refcount.notify_one();
605 notify_all() noexcept
607 _M_refcount.notify_all();
612 template<
typename _Tp>
613 class atomic<shared_ptr<_Tp>>
616 using value_type = shared_ptr<_Tp>;
618 static constexpr bool is_always_lock_free =
false;
621 is_lock_free() const noexcept
624 constexpr atomic() noexcept = default;
628 constexpr atomic(nullptr_t) noexcept : atomic() { }
630 atomic(shared_ptr<_Tp> __r) noexcept
634 atomic(
const atomic&) =
delete;
635 void operator=(
const atomic&) =
delete;
638 load(
memory_order __o = memory_order_seq_cst)
const noexcept
639 {
return _M_impl.load(__o); }
641 operator shared_ptr<_Tp>() const noexcept
642 {
return _M_impl.load(memory_order_seq_cst); }
645 store(shared_ptr<_Tp> __desired,
647 { _M_impl.swap(__desired, __o); }
650 operator=(shared_ptr<_Tp> __desired)
noexcept
651 { _M_impl.swap(__desired, memory_order_seq_cst); }
656 operator=(nullptr_t)
noexcept
660 exchange(shared_ptr<_Tp> __desired,
663 _M_impl.swap(__desired, __o);
668 compare_exchange_strong(shared_ptr<_Tp>& __expected,
669 shared_ptr<_Tp> __desired,
672 return _M_impl.compare_exchange_strong(__expected, __desired, __o, __o2);
676 compare_exchange_strong(value_type& __expected, value_type __desired,
682 case memory_order_acq_rel:
683 __o2 = memory_order_acquire;
685 case memory_order_release:
686 __o2 = memory_order_relaxed;
691 return compare_exchange_strong(__expected,
std::move(__desired),
696 compare_exchange_weak(value_type& __expected, value_type __desired,
699 return compare_exchange_strong(__expected,
std::move(__desired),
704 compare_exchange_weak(value_type& __expected, value_type __desired,
707 return compare_exchange_strong(__expected,
std::move(__desired), __o);
710#if __cpp_lib_atomic_wait
712 wait(value_type __old,
719 notify_one() noexcept
721 _M_impl.notify_one();
725 notify_all() noexcept
727 _M_impl.notify_all();
732 _Sp_atomic<shared_ptr<_Tp>> _M_impl;
735 template<
typename _Tp>
736 class atomic<weak_ptr<_Tp>>
739 using value_type = weak_ptr<_Tp>;
741 static constexpr bool is_always_lock_free =
false;
744 is_lock_free() const noexcept
747 constexpr atomic() noexcept = default;
749 atomic(weak_ptr<_Tp> __r) noexcept
753 atomic(
const atomic&) =
delete;
754 void operator=(
const atomic&) =
delete;
757 load(
memory_order __o = memory_order_seq_cst)
const noexcept
758 {
return _M_impl.load(__o); }
760 operator weak_ptr<_Tp>() const noexcept
761 {
return _M_impl.load(memory_order_seq_cst); }
764 store(weak_ptr<_Tp> __desired,
766 { _M_impl.swap(__desired, __o); }
769 operator=(weak_ptr<_Tp> __desired)
noexcept
770 { _M_impl.swap(__desired, memory_order_seq_cst); }
773 exchange(weak_ptr<_Tp> __desired,
776 _M_impl.swap(__desired, __o);
781 compare_exchange_strong(weak_ptr<_Tp>& __expected,
782 weak_ptr<_Tp> __desired,
785 return _M_impl.compare_exchange_strong(__expected, __desired, __o, __o2);
789 compare_exchange_strong(value_type& __expected, value_type __desired,
795 case memory_order_acq_rel:
796 __o2 = memory_order_acquire;
798 case memory_order_release:
799 __o2 = memory_order_relaxed;
804 return compare_exchange_strong(__expected,
std::move(__desired),
809 compare_exchange_weak(value_type& __expected, value_type __desired,
812 return compare_exchange_strong(__expected,
std::move(__desired),
817 compare_exchange_weak(value_type& __expected, value_type __desired,
820 return compare_exchange_strong(__expected,
std::move(__desired), __o);
823#if __cpp_lib_atomic_wait
825 wait(value_type __old,
832 notify_one() noexcept
834 _M_impl.notify_one();
838 notify_all() noexcept
840 _M_impl.notify_all();
845 _Sp_atomic<weak_ptr<_Tp>> _M_impl;
850_GLIBCXX_END_NAMESPACE_VERSION
void swap(shared_ptr< _Tp > &__a, shared_ptr< _Tp > &__b) noexcept
Swap overload for shared_ptr.
constexpr std::remove_reference< _Tp >::type && move(_Tp &&__t) noexcept
Convert a value to an rvalue.
void swap(any &__x, any &__y) noexcept
Exchange the states of two any objects.
memory_order
Enumeration for memory_order.
void lock(_L1 &__l1, _L2 &__l2, _L3 &... __l3)
Generic lock.
ISO C++ entities toplevel namespace is std.
A smart pointer with reference-counted copy semantics.
Primary template owner_less.