30#ifndef _GLIBCXX_ATOMIC_BASE_H
31#define _GLIBCXX_ATOMIC_BASE_H 1
33#pragma GCC system_header
40#if __cplusplus > 201703L && _GLIBCXX_HOSTED
44#ifndef _GLIBCXX_ALWAYS_INLINE
45#define _GLIBCXX_ALWAYS_INLINE inline __attribute__((__always_inline__))
48namespace std _GLIBCXX_VISIBILITY(default)
50_GLIBCXX_BEGIN_NAMESPACE_VERSION
60#if __cplusplus > 201703L
71 inline constexpr memory_order memory_order_relaxed = memory_order::relaxed;
72 inline constexpr memory_order memory_order_consume = memory_order::consume;
73 inline constexpr memory_order memory_order_acquire = memory_order::acquire;
74 inline constexpr memory_order memory_order_release = memory_order::release;
75 inline constexpr memory_order memory_order_acq_rel = memory_order::acq_rel;
76 inline constexpr memory_order memory_order_seq_cst = memory_order::seq_cst;
89 enum __memory_order_modifier
91 __memory_order_mask = 0x0ffff,
92 __memory_order_modifier_mask = 0xffff0000,
93 __memory_order_hle_acquire = 0x10000,
94 __memory_order_hle_release = 0x20000
113 return __m == memory_order_acq_rel ? memory_order_acquire
114 : __m == memory_order_release ? memory_order_relaxed : __m;
120 return memory_order(__cmpexch_failure_order2(__m & __memory_order_mask)
121 | __memory_order_modifier(__m & __memory_order_modifier_mask));
125 __is_valid_cmpexch_failure_order(
memory_order __m)
noexcept
127 return (__m & __memory_order_mask) != memory_order_release
128 && (__m & __memory_order_mask) != memory_order_acq_rel;
131 _GLIBCXX_ALWAYS_INLINE
void
133 { __atomic_thread_fence(
int(__m)); }
135 _GLIBCXX_ALWAYS_INLINE
void
137 { __atomic_signal_fence(
int(__m)); }
140 template<
typename _Tp>
149 template<
typename _IntTp>
150 struct __atomic_base;
152#if __cplusplus <= 201703L
153# define _GLIBCXX20_INIT(I)
155# define __cpp_lib_atomic_value_initialization 201911L
156# define _GLIBCXX20_INIT(I) = I
159#define ATOMIC_VAR_INIT(_VI) { _VI }
161 template<
typename _Tp>
164 template<
typename _Tp>
168#if __GCC_ATOMIC_TEST_AND_SET_TRUEVAL == 1
169 typedef bool __atomic_flag_data_type;
171 typedef unsigned char __atomic_flag_data_type;
184 _GLIBCXX_BEGIN_EXTERN_C
188 __atomic_flag_data_type _M_i _GLIBCXX20_INIT({});
191 _GLIBCXX_END_EXTERN_C
193#define ATOMIC_FLAG_INIT { 0 }
209 _GLIBCXX_ALWAYS_INLINE
bool
210 test_and_set(
memory_order __m = memory_order_seq_cst)
noexcept
212 return __atomic_test_and_set (&_M_i,
int(__m));
215 _GLIBCXX_ALWAYS_INLINE
bool
216 test_and_set(
memory_order __m = memory_order_seq_cst)
volatile noexcept
218 return __atomic_test_and_set (&_M_i,
int(__m));
221#if __cplusplus > 201703L
222#define __cpp_lib_atomic_flag_test 201907L
224 _GLIBCXX_ALWAYS_INLINE
bool
225 test(
memory_order __m = memory_order_seq_cst)
const noexcept
227 __atomic_flag_data_type __v;
228 __atomic_load(&_M_i, &__v,
int(__m));
229 return __v == __GCC_ATOMIC_TEST_AND_SET_TRUEVAL;
232 _GLIBCXX_ALWAYS_INLINE
bool
233 test(
memory_order __m = memory_order_seq_cst)
const volatile noexcept
235 __atomic_flag_data_type __v;
236 __atomic_load(&_M_i, &__v,
int(__m));
237 return __v == __GCC_ATOMIC_TEST_AND_SET_TRUEVAL;
240#if __cpp_lib_atomic_wait
241 _GLIBCXX_ALWAYS_INLINE
void
245 const __atomic_flag_data_type __v
246 = __old ? __GCC_ATOMIC_TEST_AND_SET_TRUEVAL : 0;
248 std::__atomic_wait_address_v(&_M_i, __v,
249 [__m,
this] {
return __atomic_load_n(&_M_i,
int(__m)); });
254 _GLIBCXX_ALWAYS_INLINE
void
255 notify_one()
noexcept
256 { std::__atomic_notify_address(&_M_i,
false); }
260 _GLIBCXX_ALWAYS_INLINE
void
261 notify_all()
noexcept
262 { std::__atomic_notify_address(&_M_i,
true); }
268 _GLIBCXX_ALWAYS_INLINE
void
272 = __m & __memory_order_mask;
273 __glibcxx_assert(__b != memory_order_consume);
274 __glibcxx_assert(__b != memory_order_acquire);
275 __glibcxx_assert(__b != memory_order_acq_rel);
277 __atomic_clear (&_M_i,
int(__m));
280 _GLIBCXX_ALWAYS_INLINE
void
281 clear(
memory_order __m = memory_order_seq_cst)
volatile noexcept
284 = __m & __memory_order_mask;
285 __glibcxx_assert(__b != memory_order_consume);
286 __glibcxx_assert(__b != memory_order_acquire);
287 __glibcxx_assert(__b != memory_order_acq_rel);
289 __atomic_clear (&_M_i,
int(__m));
293 static constexpr __atomic_flag_data_type
295 {
return __i ? __GCC_ATOMIC_TEST_AND_SET_TRUEVAL : 0; }
323 template<
typename _ITp>
326 using value_type = _ITp;
327 using difference_type = value_type;
330 typedef _ITp __int_type;
332 static constexpr int _S_alignment =
333 sizeof(_ITp) >
alignof(_ITp) ?
sizeof(_ITp) :
alignof(_ITp);
335 alignas(_S_alignment) __int_type _M_i _GLIBCXX20_INIT(0);
345 constexpr __atomic_base(__int_type __i) noexcept : _M_i (__i) { }
347 operator __int_type()
const noexcept
350 operator __int_type()
const volatile noexcept
354 operator=(__int_type __i)
noexcept
361 operator=(__int_type __i)
volatile noexcept
368 operator++(
int)
noexcept
369 {
return fetch_add(1); }
372 operator++(
int)
volatile noexcept
373 {
return fetch_add(1); }
376 operator--(
int)
noexcept
377 {
return fetch_sub(1); }
380 operator--(
int)
volatile noexcept
381 {
return fetch_sub(1); }
384 operator++()
noexcept
385 {
return __atomic_add_fetch(&_M_i, 1,
int(memory_order_seq_cst)); }
388 operator++()
volatile noexcept
389 {
return __atomic_add_fetch(&_M_i, 1,
int(memory_order_seq_cst)); }
392 operator--()
noexcept
393 {
return __atomic_sub_fetch(&_M_i, 1,
int(memory_order_seq_cst)); }
396 operator--()
volatile noexcept
397 {
return __atomic_sub_fetch(&_M_i, 1,
int(memory_order_seq_cst)); }
400 operator+=(__int_type __i)
noexcept
401 {
return __atomic_add_fetch(&_M_i, __i,
int(memory_order_seq_cst)); }
404 operator+=(__int_type __i)
volatile noexcept
405 {
return __atomic_add_fetch(&_M_i, __i,
int(memory_order_seq_cst)); }
408 operator-=(__int_type __i)
noexcept
409 {
return __atomic_sub_fetch(&_M_i, __i,
int(memory_order_seq_cst)); }
412 operator-=(__int_type __i)
volatile noexcept
413 {
return __atomic_sub_fetch(&_M_i, __i,
int(memory_order_seq_cst)); }
416 operator&=(__int_type __i)
noexcept
417 {
return __atomic_and_fetch(&_M_i, __i,
int(memory_order_seq_cst)); }
420 operator&=(__int_type __i)
volatile noexcept
421 {
return __atomic_and_fetch(&_M_i, __i,
int(memory_order_seq_cst)); }
424 operator|=(__int_type __i)
noexcept
425 {
return __atomic_or_fetch(&_M_i, __i,
int(memory_order_seq_cst)); }
428 operator|=(__int_type __i)
volatile noexcept
429 {
return __atomic_or_fetch(&_M_i, __i,
int(memory_order_seq_cst)); }
432 operator^=(__int_type __i)
noexcept
433 {
return __atomic_xor_fetch(&_M_i, __i,
int(memory_order_seq_cst)); }
436 operator^=(__int_type __i)
volatile noexcept
437 {
return __atomic_xor_fetch(&_M_i, __i,
int(memory_order_seq_cst)); }
440 is_lock_free()
const noexcept
443 return __atomic_is_lock_free(
sizeof(_M_i),
444 reinterpret_cast<void *
>(-_S_alignment));
448 is_lock_free()
const volatile noexcept
451 return __atomic_is_lock_free(
sizeof(_M_i),
452 reinterpret_cast<void *
>(-_S_alignment));
455 _GLIBCXX_ALWAYS_INLINE
void
456 store(__int_type __i,
memory_order __m = memory_order_seq_cst)
noexcept
459 = __m & __memory_order_mask;
460 __glibcxx_assert(__b != memory_order_acquire);
461 __glibcxx_assert(__b != memory_order_acq_rel);
462 __glibcxx_assert(__b != memory_order_consume);
464 __atomic_store_n(&_M_i, __i,
int(__m));
467 _GLIBCXX_ALWAYS_INLINE
void
468 store(__int_type __i,
469 memory_order __m = memory_order_seq_cst)
volatile noexcept
472 = __m & __memory_order_mask;
473 __glibcxx_assert(__b != memory_order_acquire);
474 __glibcxx_assert(__b != memory_order_acq_rel);
475 __glibcxx_assert(__b != memory_order_consume);
477 __atomic_store_n(&_M_i, __i,
int(__m));
480 _GLIBCXX_ALWAYS_INLINE __int_type
481 load(
memory_order __m = memory_order_seq_cst)
const noexcept
484 = __m & __memory_order_mask;
485 __glibcxx_assert(__b != memory_order_release);
486 __glibcxx_assert(__b != memory_order_acq_rel);
488 return __atomic_load_n(&_M_i,
int(__m));
491 _GLIBCXX_ALWAYS_INLINE __int_type
492 load(
memory_order __m = memory_order_seq_cst)
const volatile noexcept
495 = __m & __memory_order_mask;
496 __glibcxx_assert(__b != memory_order_release);
497 __glibcxx_assert(__b != memory_order_acq_rel);
499 return __atomic_load_n(&_M_i,
int(__m));
502 _GLIBCXX_ALWAYS_INLINE __int_type
503 exchange(__int_type __i,
506 return __atomic_exchange_n(&_M_i, __i,
int(__m));
510 _GLIBCXX_ALWAYS_INLINE __int_type
511 exchange(__int_type __i,
512 memory_order __m = memory_order_seq_cst)
volatile noexcept
514 return __atomic_exchange_n(&_M_i, __i,
int(__m));
517 _GLIBCXX_ALWAYS_INLINE
bool
518 compare_exchange_weak(__int_type& __i1, __int_type __i2,
521 __glibcxx_assert(__is_valid_cmpexch_failure_order(__m2));
523 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1,
524 int(__m1),
int(__m2));
527 _GLIBCXX_ALWAYS_INLINE
bool
528 compare_exchange_weak(__int_type& __i1, __int_type __i2,
532 __glibcxx_assert(__is_valid_cmpexch_failure_order(__m2));
534 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1,
535 int(__m1),
int(__m2));
538 _GLIBCXX_ALWAYS_INLINE
bool
539 compare_exchange_weak(__int_type& __i1, __int_type __i2,
542 return compare_exchange_weak(__i1, __i2, __m,
543 __cmpexch_failure_order(__m));
546 _GLIBCXX_ALWAYS_INLINE
bool
547 compare_exchange_weak(__int_type& __i1, __int_type __i2,
548 memory_order __m = memory_order_seq_cst)
volatile noexcept
550 return compare_exchange_weak(__i1, __i2, __m,
551 __cmpexch_failure_order(__m));
554 _GLIBCXX_ALWAYS_INLINE
bool
555 compare_exchange_strong(__int_type& __i1, __int_type __i2,
558 __glibcxx_assert(__is_valid_cmpexch_failure_order(__m2));
560 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0,
561 int(__m1),
int(__m2));
564 _GLIBCXX_ALWAYS_INLINE
bool
565 compare_exchange_strong(__int_type& __i1, __int_type __i2,
569 __glibcxx_assert(__is_valid_cmpexch_failure_order(__m2));
571 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0,
572 int(__m1),
int(__m2));
575 _GLIBCXX_ALWAYS_INLINE
bool
576 compare_exchange_strong(__int_type& __i1, __int_type __i2,
579 return compare_exchange_strong(__i1, __i2, __m,
580 __cmpexch_failure_order(__m));
583 _GLIBCXX_ALWAYS_INLINE
bool
584 compare_exchange_strong(__int_type& __i1, __int_type __i2,
585 memory_order __m = memory_order_seq_cst)
volatile noexcept
587 return compare_exchange_strong(__i1, __i2, __m,
588 __cmpexch_failure_order(__m));
591#if __cpp_lib_atomic_wait
592 _GLIBCXX_ALWAYS_INLINE
void
593 wait(__int_type __old,
596 std::__atomic_wait_address_v(&_M_i, __old,
597 [__m,
this] {
return this->load(__m); });
602 _GLIBCXX_ALWAYS_INLINE
void
603 notify_one()
noexcept
604 { std::__atomic_notify_address(&_M_i,
false); }
608 _GLIBCXX_ALWAYS_INLINE
void
609 notify_all()
noexcept
610 { std::__atomic_notify_address(&_M_i,
true); }
615 _GLIBCXX_ALWAYS_INLINE __int_type
616 fetch_add(__int_type __i,
618 {
return __atomic_fetch_add(&_M_i, __i,
int(__m)); }
620 _GLIBCXX_ALWAYS_INLINE __int_type
621 fetch_add(__int_type __i,
622 memory_order __m = memory_order_seq_cst)
volatile noexcept
623 {
return __atomic_fetch_add(&_M_i, __i,
int(__m)); }
625 _GLIBCXX_ALWAYS_INLINE __int_type
626 fetch_sub(__int_type __i,
628 {
return __atomic_fetch_sub(&_M_i, __i,
int(__m)); }
630 _GLIBCXX_ALWAYS_INLINE __int_type
631 fetch_sub(__int_type __i,
632 memory_order __m = memory_order_seq_cst)
volatile noexcept
633 {
return __atomic_fetch_sub(&_M_i, __i,
int(__m)); }
635 _GLIBCXX_ALWAYS_INLINE __int_type
636 fetch_and(__int_type __i,
638 {
return __atomic_fetch_and(&_M_i, __i,
int(__m)); }
640 _GLIBCXX_ALWAYS_INLINE __int_type
641 fetch_and(__int_type __i,
642 memory_order __m = memory_order_seq_cst)
volatile noexcept
643 {
return __atomic_fetch_and(&_M_i, __i,
int(__m)); }
645 _GLIBCXX_ALWAYS_INLINE __int_type
646 fetch_or(__int_type __i,
648 {
return __atomic_fetch_or(&_M_i, __i,
int(__m)); }
650 _GLIBCXX_ALWAYS_INLINE __int_type
651 fetch_or(__int_type __i,
652 memory_order __m = memory_order_seq_cst)
volatile noexcept
653 {
return __atomic_fetch_or(&_M_i, __i,
int(__m)); }
655 _GLIBCXX_ALWAYS_INLINE __int_type
656 fetch_xor(__int_type __i,
658 {
return __atomic_fetch_xor(&_M_i, __i,
int(__m)); }
660 _GLIBCXX_ALWAYS_INLINE __int_type
661 fetch_xor(__int_type __i,
662 memory_order __m = memory_order_seq_cst)
volatile noexcept
663 {
return __atomic_fetch_xor(&_M_i, __i,
int(__m)); }
668 template<
typename _PTp>
672 typedef _PTp* __pointer_type;
674 __pointer_type _M_p _GLIBCXX20_INIT(
nullptr);
678 _M_type_size(ptrdiff_t __d)
const {
return __d *
sizeof(_PTp); }
681 _M_type_size(ptrdiff_t __d)
const volatile {
return __d *
sizeof(_PTp); }
691 constexpr __atomic_base(__pointer_type __p) noexcept : _M_p (__p) { }
693 operator __pointer_type()
const noexcept
696 operator __pointer_type()
const volatile noexcept
700 operator=(__pointer_type __p)
noexcept
707 operator=(__pointer_type __p)
volatile noexcept
714 operator++(
int)
noexcept
715 {
return fetch_add(1); }
718 operator++(
int)
volatile noexcept
719 {
return fetch_add(1); }
722 operator--(
int)
noexcept
723 {
return fetch_sub(1); }
726 operator--(
int)
volatile noexcept
727 {
return fetch_sub(1); }
730 operator++()
noexcept
731 {
return __atomic_add_fetch(&_M_p, _M_type_size(1),
732 int(memory_order_seq_cst)); }
735 operator++()
volatile noexcept
736 {
return __atomic_add_fetch(&_M_p, _M_type_size(1),
737 int(memory_order_seq_cst)); }
740 operator--()
noexcept
741 {
return __atomic_sub_fetch(&_M_p, _M_type_size(1),
742 int(memory_order_seq_cst)); }
745 operator--()
volatile noexcept
746 {
return __atomic_sub_fetch(&_M_p, _M_type_size(1),
747 int(memory_order_seq_cst)); }
750 operator+=(ptrdiff_t __d)
noexcept
751 {
return __atomic_add_fetch(&_M_p, _M_type_size(__d),
752 int(memory_order_seq_cst)); }
755 operator+=(ptrdiff_t __d)
volatile noexcept
756 {
return __atomic_add_fetch(&_M_p, _M_type_size(__d),
757 int(memory_order_seq_cst)); }
760 operator-=(ptrdiff_t __d)
noexcept
761 {
return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
762 int(memory_order_seq_cst)); }
765 operator-=(ptrdiff_t __d)
volatile noexcept
766 {
return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
767 int(memory_order_seq_cst)); }
770 is_lock_free()
const noexcept
773 return __atomic_is_lock_free(
sizeof(_M_p),
774 reinterpret_cast<void *
>(-__alignof(_M_p)));
778 is_lock_free()
const volatile noexcept
781 return __atomic_is_lock_free(
sizeof(_M_p),
782 reinterpret_cast<void *
>(-__alignof(_M_p)));
785 _GLIBCXX_ALWAYS_INLINE
void
786 store(__pointer_type __p,
790 = __m & __memory_order_mask;
792 __glibcxx_assert(__b != memory_order_acquire);
793 __glibcxx_assert(__b != memory_order_acq_rel);
794 __glibcxx_assert(__b != memory_order_consume);
796 __atomic_store_n(&_M_p, __p,
int(__m));
799 _GLIBCXX_ALWAYS_INLINE
void
800 store(__pointer_type __p,
801 memory_order __m = memory_order_seq_cst)
volatile noexcept
804 = __m & __memory_order_mask;
805 __glibcxx_assert(__b != memory_order_acquire);
806 __glibcxx_assert(__b != memory_order_acq_rel);
807 __glibcxx_assert(__b != memory_order_consume);
809 __atomic_store_n(&_M_p, __p,
int(__m));
812 _GLIBCXX_ALWAYS_INLINE __pointer_type
813 load(
memory_order __m = memory_order_seq_cst)
const noexcept
816 = __m & __memory_order_mask;
817 __glibcxx_assert(__b != memory_order_release);
818 __glibcxx_assert(__b != memory_order_acq_rel);
820 return __atomic_load_n(&_M_p,
int(__m));
823 _GLIBCXX_ALWAYS_INLINE __pointer_type
824 load(
memory_order __m = memory_order_seq_cst)
const volatile noexcept
827 = __m & __memory_order_mask;
828 __glibcxx_assert(__b != memory_order_release);
829 __glibcxx_assert(__b != memory_order_acq_rel);
831 return __atomic_load_n(&_M_p,
int(__m));
834 _GLIBCXX_ALWAYS_INLINE __pointer_type
838 return __atomic_exchange_n(&_M_p, __p,
int(__m));
842 _GLIBCXX_ALWAYS_INLINE __pointer_type
844 memory_order __m = memory_order_seq_cst)
volatile noexcept
846 return __atomic_exchange_n(&_M_p, __p,
int(__m));
849 _GLIBCXX_ALWAYS_INLINE
bool
850 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
854 __glibcxx_assert(__is_valid_cmpexch_failure_order(__m2));
856 return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0,
857 int(__m1),
int(__m2));
860 _GLIBCXX_ALWAYS_INLINE
bool
861 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
865 __glibcxx_assert(__is_valid_cmpexch_failure_order(__m2));
867 return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0,
868 int(__m1),
int(__m2));
871#if __cpp_lib_atomic_wait
872 _GLIBCXX_ALWAYS_INLINE
void
873 wait(__pointer_type __old,
876 std::__atomic_wait_address_v(&_M_p, __old,
878 {
return this->load(__m); });
883 _GLIBCXX_ALWAYS_INLINE
void
884 notify_one()
const noexcept
885 { std::__atomic_notify_address(&_M_p,
false); }
889 _GLIBCXX_ALWAYS_INLINE
void
890 notify_all()
const noexcept
891 { std::__atomic_notify_address(&_M_p,
true); }
896 _GLIBCXX_ALWAYS_INLINE __pointer_type
897 fetch_add(ptrdiff_t __d,
899 {
return __atomic_fetch_add(&_M_p, _M_type_size(__d),
int(__m)); }
901 _GLIBCXX_ALWAYS_INLINE __pointer_type
902 fetch_add(ptrdiff_t __d,
903 memory_order __m = memory_order_seq_cst)
volatile noexcept
904 {
return __atomic_fetch_add(&_M_p, _M_type_size(__d),
int(__m)); }
906 _GLIBCXX_ALWAYS_INLINE __pointer_type
907 fetch_sub(ptrdiff_t __d,
909 {
return __atomic_fetch_sub(&_M_p, _M_type_size(__d),
int(__m)); }
911 _GLIBCXX_ALWAYS_INLINE __pointer_type
912 fetch_sub(ptrdiff_t __d,
913 memory_order __m = memory_order_seq_cst)
volatile noexcept
914 {
return __atomic_fetch_sub(&_M_p, _M_type_size(__d),
int(__m)); }
917#if __cplusplus > 201703L
919 namespace __atomic_impl
922 template<
typename _Tp>
926 template<
typename _Tp>
929 template<
size_t _Size,
size_t _Align>
930 _GLIBCXX_ALWAYS_INLINE
bool
931 is_lock_free() noexcept
934 return __atomic_is_lock_free(_Size,
reinterpret_cast<void *
>(-_Align));
937 template<
typename _Tp>
938 _GLIBCXX_ALWAYS_INLINE
void
939 store(_Tp* __ptr, _Val<_Tp> __t, memory_order __m)
noexcept
942 template<
typename _Tp>
943 _GLIBCXX_ALWAYS_INLINE _Val<_Tp>
944 load(
const _Tp* __ptr, memory_order __m)
noexcept
946 alignas(_Tp)
unsigned char __buf[
sizeof(_Tp)];
947 auto* __dest =
reinterpret_cast<_Val<_Tp>*
>(__buf);
948 __atomic_load(__ptr, __dest,
int(__m));
952 template<
typename _Tp>
953 _GLIBCXX_ALWAYS_INLINE _Val<_Tp>
954 exchange(_Tp* __ptr, _Val<_Tp> __desired, memory_order __m)
noexcept
956 alignas(_Tp)
unsigned char __buf[
sizeof(_Tp)];
957 auto* __dest =
reinterpret_cast<_Val<_Tp>*
>(__buf);
962 template<
typename _Tp>
963 _GLIBCXX_ALWAYS_INLINE
bool
964 compare_exchange_weak(_Tp* __ptr, _Val<_Tp>& __expected,
965 _Val<_Tp> __desired, memory_order __success,
966 memory_order __failure)
noexcept
968 __glibcxx_assert(__is_valid_cmpexch_failure_order(__failure));
972 int(__success),
int(__failure));
975 template<
typename _Tp>
976 _GLIBCXX_ALWAYS_INLINE
bool
977 compare_exchange_strong(_Tp* __ptr, _Val<_Tp>& __expected,
978 _Val<_Tp> __desired, memory_order __success,
979 memory_order __failure)
noexcept
981 __glibcxx_assert(__is_valid_cmpexch_failure_order(__failure));
985 int(__success),
int(__failure));
988#if __cpp_lib_atomic_wait
989 template<
typename _Tp>
990 _GLIBCXX_ALWAYS_INLINE
void
991 wait(
const _Tp* __ptr, _Val<_Tp> __old,
992 memory_order __m = memory_order_seq_cst)
noexcept
994 std::__atomic_wait_address_v(__ptr, __old,
995 [__ptr, __m]() {
return __atomic_impl::load(__ptr, __m); });
1000 template<
typename _Tp>
1001 _GLIBCXX_ALWAYS_INLINE
void
1002 notify_one(
const _Tp* __ptr)
noexcept
1003 { std::__atomic_notify_address(__ptr,
false); }
1007 template<
typename _Tp>
1008 _GLIBCXX_ALWAYS_INLINE
void
1009 notify_all(
const _Tp* __ptr)
noexcept
1010 { std::__atomic_notify_address(__ptr,
true); }
1015 template<
typename _Tp>
1016 _GLIBCXX_ALWAYS_INLINE _Tp
1017 fetch_add(_Tp* __ptr, _Diff<_Tp> __i, memory_order __m)
noexcept
1018 {
return __atomic_fetch_add(__ptr, __i,
int(__m)); }
1020 template<
typename _Tp>
1021 _GLIBCXX_ALWAYS_INLINE _Tp
1022 fetch_sub(_Tp* __ptr, _Diff<_Tp> __i, memory_order __m)
noexcept
1023 {
return __atomic_fetch_sub(__ptr, __i,
int(__m)); }
1025 template<
typename _Tp>
1026 _GLIBCXX_ALWAYS_INLINE _Tp
1027 fetch_and(_Tp* __ptr, _Val<_Tp> __i, memory_order __m)
noexcept
1028 {
return __atomic_fetch_and(__ptr, __i,
int(__m)); }
1030 template<
typename _Tp>
1031 _GLIBCXX_ALWAYS_INLINE _Tp
1032 fetch_or(_Tp* __ptr, _Val<_Tp> __i, memory_order __m)
noexcept
1033 {
return __atomic_fetch_or(__ptr, __i,
int(__m)); }
1035 template<
typename _Tp>
1036 _GLIBCXX_ALWAYS_INLINE _Tp
1037 fetch_xor(_Tp* __ptr, _Val<_Tp> __i, memory_order __m)
noexcept
1038 {
return __atomic_fetch_xor(__ptr, __i,
int(__m)); }
1040 template<
typename _Tp>
1041 _GLIBCXX_ALWAYS_INLINE _Tp
1042 __add_fetch(_Tp* __ptr, _Diff<_Tp> __i)
noexcept
1043 {
return __atomic_add_fetch(__ptr, __i, __ATOMIC_SEQ_CST); }
1045 template<
typename _Tp>
1046 _GLIBCXX_ALWAYS_INLINE _Tp
1047 __sub_fetch(_Tp* __ptr, _Diff<_Tp> __i)
noexcept
1048 {
return __atomic_sub_fetch(__ptr, __i, __ATOMIC_SEQ_CST); }
1050 template<
typename _Tp>
1051 _GLIBCXX_ALWAYS_INLINE _Tp
1052 __and_fetch(_Tp* __ptr, _Val<_Tp> __i)
noexcept
1053 {
return __atomic_and_fetch(__ptr, __i, __ATOMIC_SEQ_CST); }
1055 template<
typename _Tp>
1056 _GLIBCXX_ALWAYS_INLINE _Tp
1057 __or_fetch(_Tp* __ptr, _Val<_Tp> __i)
noexcept
1058 {
return __atomic_or_fetch(__ptr, __i, __ATOMIC_SEQ_CST); }
1060 template<
typename _Tp>
1061 _GLIBCXX_ALWAYS_INLINE _Tp
1062 __xor_fetch(_Tp* __ptr, _Val<_Tp> __i)
noexcept
1063 {
return __atomic_xor_fetch(__ptr, __i, __ATOMIC_SEQ_CST); }
1065 template<
typename _Tp>
1067 __fetch_add_flt(_Tp* __ptr, _Val<_Tp> __i, memory_order __m)
noexcept
1069 _Val<_Tp> __oldval = load(__ptr, memory_order_relaxed);
1070 _Val<_Tp> __newval = __oldval + __i;
1071 while (!compare_exchange_weak(__ptr, __oldval, __newval, __m,
1072 memory_order_relaxed))
1073 __newval = __oldval + __i;
1077 template<
typename _Tp>
1079 __fetch_sub_flt(_Tp* __ptr, _Val<_Tp> __i, memory_order __m)
noexcept
1081 _Val<_Tp> __oldval = load(__ptr, memory_order_relaxed);
1082 _Val<_Tp> __newval = __oldval - __i;
1083 while (!compare_exchange_weak(__ptr, __oldval, __newval, __m,
1084 memory_order_relaxed))
1085 __newval = __oldval - __i;
1089 template<
typename _Tp>
1091 __add_fetch_flt(_Tp* __ptr, _Val<_Tp> __i)
noexcept
1093 _Val<_Tp> __oldval = load(__ptr, memory_order_relaxed);
1094 _Val<_Tp> __newval = __oldval + __i;
1095 while (!compare_exchange_weak(__ptr, __oldval, __newval,
1096 memory_order_seq_cst,
1097 memory_order_relaxed))
1098 __newval = __oldval + __i;
1102 template<
typename _Tp>
1104 __sub_fetch_flt(_Tp* __ptr, _Val<_Tp> __i)
noexcept
1106 _Val<_Tp> __oldval = load(__ptr, memory_order_relaxed);
1107 _Val<_Tp> __newval = __oldval - __i;
1108 while (!compare_exchange_weak(__ptr, __oldval, __newval,
1109 memory_order_seq_cst,
1110 memory_order_relaxed))
1111 __newval = __oldval - __i;
1117 template<
typename _Fp>
1118 struct __atomic_float
1120 static_assert(is_floating_point_v<_Fp>);
1122 static constexpr size_t _S_alignment = __alignof__(_Fp);
1125 using value_type = _Fp;
1126 using difference_type = value_type;
1128 static constexpr bool is_always_lock_free
1129 = __atomic_always_lock_free(
sizeof(_Fp), 0);
1131 __atomic_float() =
default;
1134 __atomic_float(_Fp __t) : _M_fp(__t)
1137 __atomic_float(
const __atomic_float&) =
delete;
1138 __atomic_float& operator=(
const __atomic_float&) =
delete;
1139 __atomic_float& operator=(
const __atomic_float&)
volatile =
delete;
1142 operator=(_Fp __t)
volatile noexcept
1149 operator=(_Fp __t)
noexcept
1156 is_lock_free() const volatile noexcept
1157 {
return __atomic_impl::is_lock_free<sizeof(_Fp), _S_alignment>(); }
1160 is_lock_free() const noexcept
1161 {
return __atomic_impl::is_lock_free<sizeof(_Fp), _S_alignment>(); }
1164 store(_Fp __t, memory_order __m = memory_order_seq_cst)
volatile noexcept
1165 { __atomic_impl::store(&_M_fp, __t, __m); }
1168 store(_Fp __t, memory_order __m = memory_order_seq_cst)
noexcept
1169 { __atomic_impl::store(&_M_fp, __t, __m); }
1172 load(memory_order __m = memory_order_seq_cst)
const volatile noexcept
1173 {
return __atomic_impl::load(&_M_fp, __m); }
1176 load(memory_order __m = memory_order_seq_cst)
const noexcept
1177 {
return __atomic_impl::load(&_M_fp, __m); }
1179 operator _Fp() const volatile noexcept {
return this->load(); }
1180 operator _Fp() const noexcept {
return this->load(); }
1184 memory_order __m = memory_order_seq_cst)
volatile noexcept
1185 {
return __atomic_impl::exchange(&_M_fp, __desired, __m); }
1189 memory_order __m = memory_order_seq_cst)
noexcept
1190 {
return __atomic_impl::exchange(&_M_fp, __desired, __m); }
1193 compare_exchange_weak(_Fp& __expected, _Fp __desired,
1194 memory_order __success,
1195 memory_order __failure)
noexcept
1197 return __atomic_impl::compare_exchange_weak(&_M_fp,
1198 __expected, __desired,
1199 __success, __failure);
1203 compare_exchange_weak(_Fp& __expected, _Fp __desired,
1204 memory_order __success,
1205 memory_order __failure)
volatile noexcept
1207 return __atomic_impl::compare_exchange_weak(&_M_fp,
1208 __expected, __desired,
1209 __success, __failure);
1213 compare_exchange_strong(_Fp& __expected, _Fp __desired,
1214 memory_order __success,
1215 memory_order __failure)
noexcept
1217 return __atomic_impl::compare_exchange_strong(&_M_fp,
1218 __expected, __desired,
1219 __success, __failure);
1223 compare_exchange_strong(_Fp& __expected, _Fp __desired,
1224 memory_order __success,
1225 memory_order __failure)
volatile noexcept
1227 return __atomic_impl::compare_exchange_strong(&_M_fp,
1228 __expected, __desired,
1229 __success, __failure);
1233 compare_exchange_weak(_Fp& __expected, _Fp __desired,
1234 memory_order __order = memory_order_seq_cst)
1237 return compare_exchange_weak(__expected, __desired, __order,
1238 __cmpexch_failure_order(__order));
1242 compare_exchange_weak(_Fp& __expected, _Fp __desired,
1243 memory_order __order = memory_order_seq_cst)
1246 return compare_exchange_weak(__expected, __desired, __order,
1247 __cmpexch_failure_order(__order));
1251 compare_exchange_strong(_Fp& __expected, _Fp __desired,
1252 memory_order __order = memory_order_seq_cst)
1255 return compare_exchange_strong(__expected, __desired, __order,
1256 __cmpexch_failure_order(__order));
1260 compare_exchange_strong(_Fp& __expected, _Fp __desired,
1261 memory_order __order = memory_order_seq_cst)
1264 return compare_exchange_strong(__expected, __desired, __order,
1265 __cmpexch_failure_order(__order));
1268#if __cpp_lib_atomic_wait
1269 _GLIBCXX_ALWAYS_INLINE
void
1270 wait(_Fp __old, memory_order __m = memory_order_seq_cst)
const noexcept
1271 { __atomic_impl::wait(&_M_fp, __old, __m); }
1275 _GLIBCXX_ALWAYS_INLINE
void
1276 notify_one() const noexcept
1277 { __atomic_impl::notify_one(&_M_fp); }
1281 _GLIBCXX_ALWAYS_INLINE
void
1282 notify_all() const noexcept
1283 { __atomic_impl::notify_all(&_M_fp); }
1289 fetch_add(value_type __i,
1290 memory_order __m = memory_order_seq_cst)
noexcept
1291 {
return __atomic_impl::__fetch_add_flt(&_M_fp, __i, __m); }
1294 fetch_add(value_type __i,
1295 memory_order __m = memory_order_seq_cst)
volatile noexcept
1296 {
return __atomic_impl::__fetch_add_flt(&_M_fp, __i, __m); }
1299 fetch_sub(value_type __i,
1300 memory_order __m = memory_order_seq_cst)
noexcept
1301 {
return __atomic_impl::__fetch_sub_flt(&_M_fp, __i, __m); }
1304 fetch_sub(value_type __i,
1305 memory_order __m = memory_order_seq_cst)
volatile noexcept
1306 {
return __atomic_impl::__fetch_sub_flt(&_M_fp, __i, __m); }
1309 operator+=(value_type __i)
noexcept
1310 {
return __atomic_impl::__add_fetch_flt(&_M_fp, __i); }
1313 operator+=(value_type __i)
volatile noexcept
1314 {
return __atomic_impl::__add_fetch_flt(&_M_fp, __i); }
1317 operator-=(value_type __i)
noexcept
1318 {
return __atomic_impl::__sub_fetch_flt(&_M_fp, __i); }
1321 operator-=(value_type __i)
volatile noexcept
1322 {
return __atomic_impl::__sub_fetch_flt(&_M_fp, __i); }
1325 alignas(_S_alignment) _Fp _M_fp _GLIBCXX20_INIT(0);
1327#undef _GLIBCXX20_INIT
1329 template<
typename _Tp,
1330 bool = is_integral_v<_Tp>,
bool = is_floating_point_v<_Tp>>
1331 struct __atomic_ref;
1334 template<
typename _Tp>
1335 struct __atomic_ref<_Tp, false, false>
1337 static_assert(is_trivially_copyable_v<_Tp>);
1340 static constexpr int _S_min_alignment
1341 = (
sizeof(_Tp) & (
sizeof(_Tp) - 1)) ||
sizeof(_Tp) > 16
1345 using value_type = _Tp;
1347 static constexpr bool is_always_lock_free
1348 = __atomic_always_lock_free(
sizeof(_Tp), 0);
1350 static constexpr size_t required_alignment
1351 = _S_min_alignment >
alignof(_Tp) ? _S_min_alignment : alignof(_Tp);
1353 __atomic_ref& operator=(
const __atomic_ref&) =
delete;
1357 { __glibcxx_assert(((uintptr_t)_M_ptr % required_alignment) == 0); }
1359 __atomic_ref(
const __atomic_ref&)
noexcept =
default;
1362 operator=(_Tp __t)
const noexcept
1368 operator _Tp() const noexcept {
return this->load(); }
1371 is_lock_free() const noexcept
1372 {
return __atomic_impl::is_lock_free<sizeof(_Tp), required_alignment>(); }
1375 store(_Tp __t, memory_order __m = memory_order_seq_cst)
const noexcept
1376 { __atomic_impl::store(_M_ptr, __t, __m); }
1379 load(memory_order __m = memory_order_seq_cst)
const noexcept
1380 {
return __atomic_impl::load(_M_ptr, __m); }
1383 exchange(_Tp __desired, memory_order __m = memory_order_seq_cst)
1385 {
return __atomic_impl::exchange(_M_ptr, __desired, __m); }
1388 compare_exchange_weak(_Tp& __expected, _Tp __desired,
1389 memory_order __success,
1390 memory_order __failure)
const noexcept
1392 return __atomic_impl::compare_exchange_weak(_M_ptr,
1393 __expected, __desired,
1394 __success, __failure);
1398 compare_exchange_strong(_Tp& __expected, _Tp __desired,
1399 memory_order __success,
1400 memory_order __failure)
const noexcept
1402 return __atomic_impl::compare_exchange_strong(_M_ptr,
1403 __expected, __desired,
1404 __success, __failure);
1408 compare_exchange_weak(_Tp& __expected, _Tp __desired,
1409 memory_order __order = memory_order_seq_cst)
1412 return compare_exchange_weak(__expected, __desired, __order,
1413 __cmpexch_failure_order(__order));
1417 compare_exchange_strong(_Tp& __expected, _Tp __desired,
1418 memory_order __order = memory_order_seq_cst)
1421 return compare_exchange_strong(__expected, __desired, __order,
1422 __cmpexch_failure_order(__order));
1425#if __cpp_lib_atomic_wait
1426 _GLIBCXX_ALWAYS_INLINE
void
1427 wait(_Tp __old, memory_order __m = memory_order_seq_cst)
const noexcept
1428 { __atomic_impl::wait(_M_ptr, __old, __m); }
1432 _GLIBCXX_ALWAYS_INLINE
void
1433 notify_one() const noexcept
1434 { __atomic_impl::notify_one(_M_ptr); }
1438 _GLIBCXX_ALWAYS_INLINE
void
1439 notify_all() const noexcept
1440 { __atomic_impl::notify_all(_M_ptr); }
1450 template<
typename _Tp>
1451 struct __atomic_ref<_Tp, true, false>
1453 static_assert(is_integral_v<_Tp>);
1456 using value_type = _Tp;
1457 using difference_type = value_type;
1459 static constexpr bool is_always_lock_free
1460 = __atomic_always_lock_free(
sizeof(_Tp), 0);
1462 static constexpr size_t required_alignment
1463 =
sizeof(_Tp) >
alignof(_Tp) ?
sizeof(_Tp) : alignof(_Tp);
1465 __atomic_ref() =
delete;
1466 __atomic_ref& operator=(
const __atomic_ref&) =
delete;
1469 __atomic_ref(_Tp& __t) : _M_ptr(&__t)
1470 { __glibcxx_assert(((uintptr_t)_M_ptr % required_alignment) == 0); }
1472 __atomic_ref(
const __atomic_ref&)
noexcept =
default;
1475 operator=(_Tp __t)
const noexcept
1481 operator _Tp() const noexcept {
return this->load(); }
1484 is_lock_free() const noexcept
1486 return __atomic_impl::is_lock_free<sizeof(_Tp), required_alignment>();
1490 store(_Tp __t, memory_order __m = memory_order_seq_cst)
const noexcept
1491 { __atomic_impl::store(_M_ptr, __t, __m); }
1494 load(memory_order __m = memory_order_seq_cst)
const noexcept
1495 {
return __atomic_impl::load(_M_ptr, __m); }
1499 memory_order __m = memory_order_seq_cst)
const noexcept
1500 {
return __atomic_impl::exchange(_M_ptr, __desired, __m); }
1503 compare_exchange_weak(_Tp& __expected, _Tp __desired,
1504 memory_order __success,
1505 memory_order __failure)
const noexcept
1507 return __atomic_impl::compare_exchange_weak(_M_ptr,
1508 __expected, __desired,
1509 __success, __failure);
1513 compare_exchange_strong(_Tp& __expected, _Tp __desired,
1514 memory_order __success,
1515 memory_order __failure)
const noexcept
1517 return __atomic_impl::compare_exchange_strong(_M_ptr,
1518 __expected, __desired,
1519 __success, __failure);
1523 compare_exchange_weak(_Tp& __expected, _Tp __desired,
1524 memory_order __order = memory_order_seq_cst)
1527 return compare_exchange_weak(__expected, __desired, __order,
1528 __cmpexch_failure_order(__order));
1532 compare_exchange_strong(_Tp& __expected, _Tp __desired,
1533 memory_order __order = memory_order_seq_cst)
1536 return compare_exchange_strong(__expected, __desired, __order,
1537 __cmpexch_failure_order(__order));
1540#if __cpp_lib_atomic_wait
1541 _GLIBCXX_ALWAYS_INLINE
void
1542 wait(_Tp __old, memory_order __m = memory_order_seq_cst)
const noexcept
1543 { __atomic_impl::wait(_M_ptr, __old, __m); }
1547 _GLIBCXX_ALWAYS_INLINE
void
1548 notify_one() const noexcept
1549 { __atomic_impl::notify_one(_M_ptr); }
1553 _GLIBCXX_ALWAYS_INLINE
void
1554 notify_all() const noexcept
1555 { __atomic_impl::notify_all(_M_ptr); }
1561 fetch_add(value_type __i,
1562 memory_order __m = memory_order_seq_cst)
const noexcept
1563 {
return __atomic_impl::fetch_add(_M_ptr, __i, __m); }
1566 fetch_sub(value_type __i,
1567 memory_order __m = memory_order_seq_cst)
const noexcept
1568 {
return __atomic_impl::fetch_sub(_M_ptr, __i, __m); }
1571 fetch_and(value_type __i,
1572 memory_order __m = memory_order_seq_cst)
const noexcept
1573 {
return __atomic_impl::fetch_and(_M_ptr, __i, __m); }
1576 fetch_or(value_type __i,
1577 memory_order __m = memory_order_seq_cst)
const noexcept
1578 {
return __atomic_impl::fetch_or(_M_ptr, __i, __m); }
1581 fetch_xor(value_type __i,
1582 memory_order __m = memory_order_seq_cst)
const noexcept
1583 {
return __atomic_impl::fetch_xor(_M_ptr, __i, __m); }
1585 _GLIBCXX_ALWAYS_INLINE value_type
1586 operator++(
int)
const noexcept
1587 {
return fetch_add(1); }
1589 _GLIBCXX_ALWAYS_INLINE value_type
1590 operator--(
int)
const noexcept
1591 {
return fetch_sub(1); }
1594 operator++() const noexcept
1595 {
return __atomic_impl::__add_fetch(_M_ptr, value_type(1)); }
1598 operator--() const noexcept
1599 {
return __atomic_impl::__sub_fetch(_M_ptr, value_type(1)); }
1602 operator+=(value_type __i)
const noexcept
1603 {
return __atomic_impl::__add_fetch(_M_ptr, __i); }
1606 operator-=(value_type __i)
const noexcept
1607 {
return __atomic_impl::__sub_fetch(_M_ptr, __i); }
1610 operator&=(value_type __i)
const noexcept
1611 {
return __atomic_impl::__and_fetch(_M_ptr, __i); }
1614 operator|=(value_type __i)
const noexcept
1615 {
return __atomic_impl::__or_fetch(_M_ptr, __i); }
1618 operator^=(value_type __i)
const noexcept
1619 {
return __atomic_impl::__xor_fetch(_M_ptr, __i); }
1626 template<
typename _Fp>
1627 struct __atomic_ref<_Fp, false, true>
1629 static_assert(is_floating_point_v<_Fp>);
1632 using value_type = _Fp;
1633 using difference_type = value_type;
1635 static constexpr bool is_always_lock_free
1636 = __atomic_always_lock_free(
sizeof(_Fp), 0);
1638 static constexpr size_t required_alignment = __alignof__(_Fp);
1640 __atomic_ref() =
delete;
1641 __atomic_ref& operator=(
const __atomic_ref&) =
delete;
1644 __atomic_ref(_Fp& __t) : _M_ptr(&__t)
1645 { __glibcxx_assert(((uintptr_t)_M_ptr % required_alignment) == 0); }
1647 __atomic_ref(
const __atomic_ref&)
noexcept =
default;
1650 operator=(_Fp __t)
const noexcept
1656 operator _Fp() const noexcept {
return this->load(); }
1659 is_lock_free() const noexcept
1661 return __atomic_impl::is_lock_free<sizeof(_Fp), required_alignment>();
1665 store(_Fp __t, memory_order __m = memory_order_seq_cst)
const noexcept
1666 { __atomic_impl::store(_M_ptr, __t, __m); }
1669 load(memory_order __m = memory_order_seq_cst)
const noexcept
1670 {
return __atomic_impl::load(_M_ptr, __m); }
1674 memory_order __m = memory_order_seq_cst)
const noexcept
1675 {
return __atomic_impl::exchange(_M_ptr, __desired, __m); }
1678 compare_exchange_weak(_Fp& __expected, _Fp __desired,
1679 memory_order __success,
1680 memory_order __failure)
const noexcept
1682 return __atomic_impl::compare_exchange_weak(_M_ptr,
1683 __expected, __desired,
1684 __success, __failure);
1688 compare_exchange_strong(_Fp& __expected, _Fp __desired,
1689 memory_order __success,
1690 memory_order __failure)
const noexcept
1692 return __atomic_impl::compare_exchange_strong(_M_ptr,
1693 __expected, __desired,
1694 __success, __failure);
1698 compare_exchange_weak(_Fp& __expected, _Fp __desired,
1699 memory_order __order = memory_order_seq_cst)
1702 return compare_exchange_weak(__expected, __desired, __order,
1703 __cmpexch_failure_order(__order));
1707 compare_exchange_strong(_Fp& __expected, _Fp __desired,
1708 memory_order __order = memory_order_seq_cst)
1711 return compare_exchange_strong(__expected, __desired, __order,
1712 __cmpexch_failure_order(__order));
1715#if __cpp_lib_atomic_wait
1716 _GLIBCXX_ALWAYS_INLINE
void
1717 wait(_Fp __old, memory_order __m = memory_order_seq_cst)
const noexcept
1718 { __atomic_impl::wait(_M_ptr, __old, __m); }
1722 _GLIBCXX_ALWAYS_INLINE
void
1723 notify_one() const noexcept
1724 { __atomic_impl::notify_one(_M_ptr); }
1728 _GLIBCXX_ALWAYS_INLINE
void
1729 notify_all() const noexcept
1730 { __atomic_impl::notify_all(_M_ptr); }
1736 fetch_add(value_type __i,
1737 memory_order __m = memory_order_seq_cst)
const noexcept
1738 {
return __atomic_impl::__fetch_add_flt(_M_ptr, __i, __m); }
1741 fetch_sub(value_type __i,
1742 memory_order __m = memory_order_seq_cst)
const noexcept
1743 {
return __atomic_impl::__fetch_sub_flt(_M_ptr, __i, __m); }
1746 operator+=(value_type __i)
const noexcept
1747 {
return __atomic_impl::__add_fetch_flt(_M_ptr, __i); }
1750 operator-=(value_type __i)
const noexcept
1751 {
return __atomic_impl::__sub_fetch_flt(_M_ptr, __i); }
1758 template<
typename _Tp>
1759 struct __atomic_ref<_Tp*,
false,
false>
1762 using value_type = _Tp*;
1763 using difference_type = ptrdiff_t;
1765 static constexpr bool is_always_lock_free = ATOMIC_POINTER_LOCK_FREE == 2;
1767 static constexpr size_t required_alignment = __alignof__(_Tp*);
1769 __atomic_ref() =
delete;
1770 __atomic_ref& operator=(
const __atomic_ref&) =
delete;
1774 { __glibcxx_assert(((uintptr_t)_M_ptr % required_alignment) == 0); }
1776 __atomic_ref(
const __atomic_ref&)
noexcept =
default;
1779 operator=(_Tp* __t)
const noexcept
1785 operator _Tp*()
const noexcept {
return this->load(); }
1788 is_lock_free() const noexcept
1790 return __atomic_impl::is_lock_free<sizeof(_Tp*), required_alignment>();
1794 store(_Tp* __t,
memory_order __m = memory_order_seq_cst)
const noexcept
1795 { __atomic_impl::store(_M_ptr, __t, __m); }
1798 load(
memory_order __m = memory_order_seq_cst)
const noexcept
1799 {
return __atomic_impl::load(_M_ptr, __m); }
1803 memory_order __m = memory_order_seq_cst)
const noexcept
1804 {
return __atomic_impl::exchange(_M_ptr, __desired, __m); }
1807 compare_exchange_weak(_Tp*& __expected, _Tp* __desired,
1811 return __atomic_impl::compare_exchange_weak(_M_ptr,
1812 __expected, __desired,
1813 __success, __failure);
1817 compare_exchange_strong(_Tp*& __expected, _Tp* __desired,
1821 return __atomic_impl::compare_exchange_strong(_M_ptr,
1822 __expected, __desired,
1823 __success, __failure);
1827 compare_exchange_weak(_Tp*& __expected, _Tp* __desired,
1831 return compare_exchange_weak(__expected, __desired, __order,
1832 __cmpexch_failure_order(__order));
1836 compare_exchange_strong(_Tp*& __expected, _Tp* __desired,
1840 return compare_exchange_strong(__expected, __desired, __order,
1841 __cmpexch_failure_order(__order));
1844#if __cpp_lib_atomic_wait
1845 _GLIBCXX_ALWAYS_INLINE
void
1846 wait(_Tp* __old,
memory_order __m = memory_order_seq_cst)
const noexcept
1847 { __atomic_impl::wait(_M_ptr, __old, __m); }
1851 _GLIBCXX_ALWAYS_INLINE
void
1852 notify_one() const noexcept
1853 { __atomic_impl::notify_one(_M_ptr); }
1857 _GLIBCXX_ALWAYS_INLINE
void
1858 notify_all() const noexcept
1859 { __atomic_impl::notify_all(_M_ptr); }
1864 _GLIBCXX_ALWAYS_INLINE value_type
1865 fetch_add(difference_type __d,
1866 memory_order __m = memory_order_seq_cst)
const noexcept
1867 {
return __atomic_impl::fetch_add(_M_ptr, _S_type_size(__d), __m); }
1869 _GLIBCXX_ALWAYS_INLINE value_type
1870 fetch_sub(difference_type __d,
1871 memory_order __m = memory_order_seq_cst)
const noexcept
1872 {
return __atomic_impl::fetch_sub(_M_ptr, _S_type_size(__d), __m); }
1875 operator++(
int)
const noexcept
1876 {
return fetch_add(1); }
1879 operator--(
int)
const noexcept
1880 {
return fetch_sub(1); }
1883 operator++() const noexcept
1885 return __atomic_impl::__add_fetch(_M_ptr, _S_type_size(1));
1889 operator--() const noexcept
1891 return __atomic_impl::__sub_fetch(_M_ptr, _S_type_size(1));
1895 operator+=(difference_type __d)
const noexcept
1897 return __atomic_impl::__add_fetch(_M_ptr, _S_type_size(__d));
1901 operator-=(difference_type __d)
const noexcept
1903 return __atomic_impl::__sub_fetch(_M_ptr, _S_type_size(__d));
1907 static constexpr ptrdiff_t
1908 _S_type_size(ptrdiff_t __d)
noexcept
1910 static_assert(is_object_v<_Tp>);
1911 return __d *
sizeof(_Tp);
1921_GLIBCXX_END_NAMESPACE_VERSION
typename conditional< _Cond, _Iftrue, _Iffalse >::type conditional_t
Alias template for conditional.
typename remove_volatile< _Tp >::type remove_volatile_t
Alias template for remove_volatile.
constexpr _Tp * __addressof(_Tp &__r) noexcept
Same as C++11 std::addressof.
memory_order
Enumeration for memory_order.
_Tp kill_dependency(_Tp __y) noexcept
kill_dependency
ISO C++ entities toplevel namespace is std.
bitset< _Nb > operator&(const bitset< _Nb > &__x, const bitset< _Nb > &__y) noexcept
Global bitwise operations on bitsets.
constexpr _Tp exchange(_Tp &__obj, _Up &&__new_val)
Assign __new_val to __obj and return its previous value.
bitset< _Nb > operator|(const bitset< _Nb > &__x, const bitset< _Nb > &__y) noexcept
Global bitwise operations on bitsets.
Base class for atomic integrals.
Base type for atomic_flag.