30#ifndef _SHARED_PTR_ATOMIC_H
31#define _SHARED_PTR_ATOMIC_H 1
36#if defined _GLIBCXX_TSAN && __has_include(<sanitizer/tsan_interface.h>)
37#include <sanitizer/tsan_interface.h>
38#define _GLIBCXX_TSAN_MUTEX_DESTROY(X) \
39 __tsan_mutex_destroy(X, __tsan_mutex_not_static)
40#define _GLIBCXX_TSAN_MUTEX_TRY_LOCK(X) \
41 __tsan_mutex_pre_lock(X, __tsan_mutex_not_static|__tsan_mutex_try_lock)
42#define _GLIBCXX_TSAN_MUTEX_TRY_LOCK_FAILED(X) __tsan_mutex_post_lock(X, \
43 __tsan_mutex_not_static|__tsan_mutex_try_lock_failed, 0)
44#define _GLIBCXX_TSAN_MUTEX_LOCKED(X) \
45 __tsan_mutex_post_lock(X, __tsan_mutex_not_static, 0)
46#define _GLIBCXX_TSAN_MUTEX_PRE_UNLOCK(X) __tsan_mutex_pre_unlock(X, 0)
47#define _GLIBCXX_TSAN_MUTEX_POST_UNLOCK(X) __tsan_mutex_post_unlock(X, 0)
48#define _GLIBCXX_TSAN_MUTEX_PRE_SIGNAL(X) __tsan_mutex_pre_signal(X, 0)
49#define _GLIBCXX_TSAN_MUTEX_POST_SIGNAL(X) __tsan_mutex_post_signal(X, 0)
51#define _GLIBCXX_TSAN_MUTEX_DESTROY(X)
52#define _GLIBCXX_TSAN_MUTEX_TRY_LOCK(X)
53#define _GLIBCXX_TSAN_MUTEX_TRY_LOCK_FAILED(X)
54#define _GLIBCXX_TSAN_MUTEX_LOCKED(X)
55#define _GLIBCXX_TSAN_MUTEX_PRE_UNLOCK(X)
56#define _GLIBCXX_TSAN_MUTEX_POST_UNLOCK(X)
57#define _GLIBCXX_TSAN_MUTEX_PRE_SIGNAL(X)
58#define _GLIBCXX_TSAN_MUTEX_POST_SIGNAL(X)
61namespace std _GLIBCXX_VISIBILITY(default)
63_GLIBCXX_BEGIN_NAMESPACE_VERSION
75 _Sp_locker(
const _Sp_locker&) =
delete;
76 _Sp_locker& operator=(
const _Sp_locker&) =
delete;
80 _Sp_locker(
const void*)
noexcept;
81 _Sp_locker(
const void*,
const void*)
noexcept;
85 unsigned char _M_key1;
86 unsigned char _M_key2;
88 explicit _Sp_locker(
const void*,
const void* =
nullptr) { }
100 template<
typename _Tp, _Lock_policy _Lp>
105 return __gthread_active_p() == 0;
111 template<
typename _Tp>
114 {
return std::atomic_is_lock_free<_Tp, __default_lock_policy>(__p); }
127 template<
typename _Tp>
128 inline shared_ptr<_Tp>
131 _Sp_locker __lock{__p};
135 template<
typename _Tp>
138 {
return std::atomic_load_explicit(__p, memory_order_seq_cst); }
140 template<
typename _Tp, _Lock_policy _Lp>
141 inline __shared_ptr<_Tp, _Lp>
142 atomic_load_explicit(
const __shared_ptr<_Tp, _Lp>* __p,
memory_order)
144 _Sp_locker __lock{__p};
148 template<
typename _Tp, _Lock_policy _Lp>
149 inline __shared_ptr<_Tp, _Lp>
150 atomic_load(
const __shared_ptr<_Tp, _Lp>* __p)
151 {
return std::atomic_load_explicit(__p, memory_order_seq_cst); }
163 template<
typename _Tp>
168 _Sp_locker __lock{__p};
172 template<
typename _Tp>
175 { std::atomic_store_explicit(__p,
std::move(__r), memory_order_seq_cst); }
177 template<
typename _Tp, _Lock_policy _Lp>
179 atomic_store_explicit(__shared_ptr<_Tp, _Lp>* __p,
180 __shared_ptr<_Tp, _Lp> __r,
183 _Sp_locker __lock{__p};
187 template<
typename _Tp, _Lock_policy _Lp>
189 atomic_store(__shared_ptr<_Tp, _Lp>* __p, __shared_ptr<_Tp, _Lp> __r)
190 { std::atomic_store_explicit(__p,
std::move(__r), memory_order_seq_cst); }
200 template<
typename _Tp>
201 inline shared_ptr<_Tp>
205 _Sp_locker __lock{__p};
210 template<
typename _Tp>
214 return std::atomic_exchange_explicit(__p,
std::move(__r),
215 memory_order_seq_cst);
218 template<
typename _Tp, _Lock_policy _Lp>
219 inline __shared_ptr<_Tp, _Lp>
220 atomic_exchange_explicit(__shared_ptr<_Tp, _Lp>* __p,
221 __shared_ptr<_Tp, _Lp> __r,
224 _Sp_locker __lock{__p};
229 template<
typename _Tp, _Lock_policy _Lp>
230 inline __shared_ptr<_Tp, _Lp>
231 atomic_exchange(__shared_ptr<_Tp, _Lp>* __p, __shared_ptr<_Tp, _Lp> __r)
233 return std::atomic_exchange_explicit(__p,
std::move(__r),
234 memory_order_seq_cst);
249 template<
typename _Tp>
258 _Sp_locker __lock{__p, __v};
260 if (*__p == *__v && !__less(*__p, *__v) && !__less(*__v, *__p))
271 template<
typename _Tp>
276 return std::atomic_compare_exchange_strong_explicit(__p, __v,
277 std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
280 template<
typename _Tp>
282 atomic_compare_exchange_weak_explicit(shared_ptr<_Tp>* __p,
283 shared_ptr<_Tp>* __v,
288 return std::atomic_compare_exchange_strong_explicit(__p, __v,
292 template<
typename _Tp>
294 atomic_compare_exchange_weak(shared_ptr<_Tp>* __p, shared_ptr<_Tp>* __v,
297 return std::atomic_compare_exchange_weak_explicit(__p, __v,
298 std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
301 template<
typename _Tp, _Lock_policy _Lp>
303 atomic_compare_exchange_strong_explicit(__shared_ptr<_Tp, _Lp>* __p,
304 __shared_ptr<_Tp, _Lp>* __v,
305 __shared_ptr<_Tp, _Lp> __w,
309 __shared_ptr<_Tp, _Lp> __x;
310 _Sp_locker __lock{__p, __v};
311 owner_less<__shared_ptr<_Tp, _Lp>> __less;
312 if (*__p == *__v && !__less(*__p, *__v) && !__less(*__v, *__p))
323 template<
typename _Tp, _Lock_policy _Lp>
325 atomic_compare_exchange_strong(__shared_ptr<_Tp, _Lp>* __p,
326 __shared_ptr<_Tp, _Lp>* __v,
327 __shared_ptr<_Tp, _Lp> __w)
329 return std::atomic_compare_exchange_strong_explicit(__p, __v,
330 std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
333 template<
typename _Tp, _Lock_policy _Lp>
335 atomic_compare_exchange_weak_explicit(__shared_ptr<_Tp, _Lp>* __p,
336 __shared_ptr<_Tp, _Lp>* __v,
337 __shared_ptr<_Tp, _Lp> __w,
341 return std::atomic_compare_exchange_strong_explicit(__p, __v,
345 template<
typename _Tp, _Lock_policy _Lp>
347 atomic_compare_exchange_weak(__shared_ptr<_Tp, _Lp>* __p,
348 __shared_ptr<_Tp, _Lp>* __v,
349 __shared_ptr<_Tp, _Lp> __w)
351 return std::atomic_compare_exchange_weak_explicit(__p, __v,
352 std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
356#if __cplusplus >= 202002L
357# define __cpp_lib_atomic_shared_ptr 201711L
358 template<
typename _Tp>
361 template<
typename _Up>
362 static constexpr bool __is_shared_ptr =
false;
363 template<
typename _Up>
364 static constexpr bool __is_shared_ptr<shared_ptr<_Up>> =
true;
366 template<
typename _Tp>
369 using value_type = _Tp;
371 friend class atomic<_Tp>;
378 using __count_type =
decltype(_Tp::_M_refcount);
381 using pointer =
decltype(__count_type::_M_pi);
384 static_assert(
alignof(remove_pointer_t<pointer>) > 1);
386 constexpr _Atomic_count() noexcept = default;
389 _Atomic_count(__count_type&& __c) noexcept
390 : _M_val(reinterpret_cast<uintptr_t>(__c._M_pi))
397 auto __val = _M_val.load(memory_order_relaxed);
398 _GLIBCXX_TSAN_MUTEX_DESTROY(&_M_val);
399 __glibcxx_assert(!(__val & _S_lock_bit));
400 if (
auto __pi =
reinterpret_cast<pointer
>(__val))
402 if constexpr (__is_shared_ptr<_Tp>)
405 __pi->_M_weak_release();
409 _Atomic_count(
const _Atomic_count&) =
delete;
410 _Atomic_count& operator=(
const _Atomic_count&) =
delete;
419 auto __current = _M_val.load(memory_order_relaxed);
420 while (__current & _S_lock_bit)
422#if __cpp_lib_atomic_wait
423 __detail::__thread_relax();
425 __current = _M_val.load(memory_order_relaxed);
428 _GLIBCXX_TSAN_MUTEX_TRY_LOCK(&_M_val);
430 while (!_M_val.compare_exchange_strong(__current,
431 __current | _S_lock_bit,
433 memory_order_relaxed))
435 _GLIBCXX_TSAN_MUTEX_TRY_LOCK_FAILED(&_M_val);
436#if __cpp_lib_atomic_wait
437 __detail::__thread_relax();
439 __current = __current & ~_S_lock_bit;
440 _GLIBCXX_TSAN_MUTEX_TRY_LOCK(&_M_val);
442 _GLIBCXX_TSAN_MUTEX_LOCKED(&_M_val);
443 return reinterpret_cast<pointer
>(__current);
450 _GLIBCXX_TSAN_MUTEX_PRE_UNLOCK(&_M_val);
451 _M_val.fetch_sub(1, __o);
452 _GLIBCXX_TSAN_MUTEX_POST_UNLOCK(&_M_val);
458 _M_swap_unlock(__count_type& __c,
memory_order __o)
noexcept
460 if (__o != memory_order_seq_cst)
461 __o = memory_order_release;
462 auto __x =
reinterpret_cast<uintptr_t
>(__c._M_pi);
463 _GLIBCXX_TSAN_MUTEX_PRE_UNLOCK(&_M_val);
464 __x = _M_val.exchange(__x, __o);
465 _GLIBCXX_TSAN_MUTEX_POST_UNLOCK(&_M_val);
466 __c._M_pi =
reinterpret_cast<pointer
>(__x & ~_S_lock_bit);
469#if __cpp_lib_atomic_wait
474 _GLIBCXX_TSAN_MUTEX_PRE_UNLOCK(&_M_val);
475 auto __v = _M_val.fetch_sub(1, memory_order_relaxed);
476 _GLIBCXX_TSAN_MUTEX_POST_UNLOCK(&_M_val);
477 _M_val.wait(__v & ~_S_lock_bit, __o);
481 notify_one() noexcept
483 _GLIBCXX_TSAN_MUTEX_PRE_SIGNAL(&_M_val);
485 _GLIBCXX_TSAN_MUTEX_POST_SIGNAL(&_M_val);
489 notify_all() noexcept
491 _GLIBCXX_TSAN_MUTEX_PRE_SIGNAL(&_M_val);
493 _GLIBCXX_TSAN_MUTEX_POST_SIGNAL(&_M_val);
498 mutable __atomic_base<uintptr_t> _M_val{0};
499 static constexpr uintptr_t _S_lock_bit{1};
502 typename _Tp::element_type* _M_ptr =
nullptr;
503 _Atomic_count _M_refcount;
505 static typename _Atomic_count::pointer
506 _S_add_ref(
typename _Atomic_count::pointer __p)
510 if constexpr (__is_shared_ptr<_Tp>)
511 __p->_M_add_ref_copy();
513 __p->_M_weak_add_ref();
518 constexpr _Sp_atomic() noexcept = default;
521 _Sp_atomic(value_type __r) noexcept
522 : _M_ptr(__r._M_ptr), _M_refcount(
std::
move(__r._M_refcount))
525 ~_Sp_atomic() =
default;
527 _Sp_atomic(
const _Sp_atomic&) =
delete;
528 void operator=(
const _Sp_atomic&) =
delete;
533 __glibcxx_assert(__o != memory_order_release
534 && __o != memory_order_acq_rel);
537 if (__o != memory_order_seq_cst)
538 __o = memory_order_acquire;
541 auto __pi = _M_refcount.lock(__o);
542 __ret._M_ptr = _M_ptr;
543 __ret._M_refcount._M_pi = _S_add_ref(__pi);
544 _M_refcount.unlock(memory_order_relaxed);
551 _M_refcount.lock(memory_order_acquire);
553 _M_refcount._M_swap_unlock(__r._M_refcount, __o);
557 compare_exchange_strong(value_type& __expected, value_type __desired,
560 bool __result =
true;
561 auto __pi = _M_refcount.lock(memory_order_acquire);
562 if (_M_ptr == __expected._M_ptr
563 && __pi == __expected._M_refcount._M_pi)
565 _M_ptr = __desired._M_ptr;
566 _M_refcount._M_swap_unlock(__desired._M_refcount, __o);
571 __expected._M_ptr = _M_ptr;
572 __expected._M_refcount._M_pi = _S_add_ref(__pi);
573 _M_refcount.unlock(__o2);
579#if __cpp_lib_atomic_wait
583 auto __pi = _M_refcount.lock(memory_order_acquire);
584 if (_M_ptr == __old._M_ptr && __pi == __old._M_refcount._M_pi)
585 _M_refcount._M_wait_unlock(__o);
587 _M_refcount.unlock(memory_order_relaxed);
591 notify_one() noexcept
593 _M_refcount.notify_one();
597 notify_all() noexcept
599 _M_refcount.notify_all();
604 template<
typename _Tp>
605 class atomic<shared_ptr<_Tp>>
608 using value_type = shared_ptr<_Tp>;
610 static constexpr bool is_always_lock_free =
false;
613 is_lock_free() const noexcept
616 constexpr atomic() noexcept = default;
620 constexpr atomic(nullptr_t) noexcept : atomic() { }
622 atomic(shared_ptr<_Tp> __r) noexcept
626 atomic(
const atomic&) =
delete;
627 void operator=(
const atomic&) =
delete;
630 load(
memory_order __o = memory_order_seq_cst)
const noexcept
631 {
return _M_impl.load(__o); }
633 operator shared_ptr<_Tp>() const noexcept
634 {
return _M_impl.load(memory_order_seq_cst); }
637 store(shared_ptr<_Tp> __desired,
639 { _M_impl.swap(__desired, __o); }
642 operator=(shared_ptr<_Tp> __desired)
noexcept
643 { _M_impl.swap(__desired, memory_order_seq_cst); }
648 operator=(nullptr_t)
noexcept
652 exchange(shared_ptr<_Tp> __desired,
655 _M_impl.swap(__desired, __o);
660 compare_exchange_strong(shared_ptr<_Tp>& __expected,
661 shared_ptr<_Tp> __desired,
664 return _M_impl.compare_exchange_strong(__expected, __desired, __o, __o2);
668 compare_exchange_strong(value_type& __expected, value_type __desired,
674 case memory_order_acq_rel:
675 __o2 = memory_order_acquire;
677 case memory_order_release:
678 __o2 = memory_order_relaxed;
683 return compare_exchange_strong(__expected,
std::move(__desired),
688 compare_exchange_weak(value_type& __expected, value_type __desired,
691 return compare_exchange_strong(__expected,
std::move(__desired),
696 compare_exchange_weak(value_type& __expected, value_type __desired,
699 return compare_exchange_strong(__expected,
std::move(__desired), __o);
702#if __cpp_lib_atomic_wait
704 wait(value_type __old,
711 notify_one() noexcept
713 _M_impl.notify_one();
717 notify_all() noexcept
719 _M_impl.notify_all();
724 _Sp_atomic<shared_ptr<_Tp>> _M_impl;
727 template<
typename _Tp>
728 class atomic<weak_ptr<_Tp>>
731 using value_type = weak_ptr<_Tp>;
733 static constexpr bool is_always_lock_free =
false;
736 is_lock_free() const noexcept
739 constexpr atomic() noexcept = default;
741 atomic(weak_ptr<_Tp> __r) noexcept
745 atomic(
const atomic&) =
delete;
746 void operator=(
const atomic&) =
delete;
749 load(
memory_order __o = memory_order_seq_cst)
const noexcept
750 {
return _M_impl.load(__o); }
752 operator weak_ptr<_Tp>() const noexcept
753 {
return _M_impl.load(memory_order_seq_cst); }
756 store(weak_ptr<_Tp> __desired,
758 { _M_impl.swap(__desired, __o); }
761 operator=(weak_ptr<_Tp> __desired)
noexcept
762 { _M_impl.swap(__desired, memory_order_seq_cst); }
765 exchange(weak_ptr<_Tp> __desired,
768 _M_impl.swap(__desired, __o);
773 compare_exchange_strong(weak_ptr<_Tp>& __expected,
774 weak_ptr<_Tp> __desired,
777 return _M_impl.compare_exchange_strong(__expected, __desired, __o, __o2);
781 compare_exchange_strong(value_type& __expected, value_type __desired,
787 case memory_order_acq_rel:
788 __o2 = memory_order_acquire;
790 case memory_order_release:
791 __o2 = memory_order_relaxed;
796 return compare_exchange_strong(__expected,
std::move(__desired),
801 compare_exchange_weak(value_type& __expected, value_type __desired,
804 return compare_exchange_strong(__expected,
std::move(__desired),
809 compare_exchange_weak(value_type& __expected, value_type __desired,
812 return compare_exchange_strong(__expected,
std::move(__desired), __o);
815#if __cpp_lib_atomic_wait
817 wait(value_type __old,
824 notify_one() noexcept
826 _M_impl.notify_one();
830 notify_all() noexcept
832 _M_impl.notify_all();
837 _Sp_atomic<weak_ptr<_Tp>> _M_impl;
844_GLIBCXX_END_NAMESPACE_VERSION
shared_ptr< _Tp > atomic_exchange_explicit(shared_ptr< _Tp > *__p, shared_ptr< _Tp > __r, memory_order)
Atomic exchange for shared_ptr objects.
bool atomic_compare_exchange_strong_explicit(shared_ptr< _Tp > *__p, shared_ptr< _Tp > *__v, shared_ptr< _Tp > __w, memory_order, memory_order)
Atomic compare-and-swap for shared_ptr objects.
void atomic_store_explicit(shared_ptr< _Tp > *__p, shared_ptr< _Tp > __r, memory_order)
Atomic store for shared_ptr objects.
bool atomic_is_lock_free(const __shared_ptr< _Tp, _Lp > *__p)
Report whether shared_ptr atomic operations are lock-free.
void swap(shared_ptr< _Tp > &__a, shared_ptr< _Tp > &__b) noexcept
Swap overload for shared_ptr.
shared_ptr< _Tp > atomic_load_explicit(const shared_ptr< _Tp > *__p, memory_order)
Atomic load for shared_ptr objects.
constexpr std::remove_reference< _Tp >::type && move(_Tp &&__t) noexcept
Convert a value to an rvalue.
void swap(any &__x, any &__y) noexcept
Exchange the states of two any objects.
memory_order
Enumeration for memory_order.
void lock(_L1 &__l1, _L2 &__l2, _L3 &... __l3)
Generic lock.
ISO C++ entities toplevel namespace is std.
A smart pointer with reference-counted copy semantics.
Primary template owner_less.