30#ifndef _GLIBCXX_ATOMIC_BASE_H
31#define _GLIBCXX_ATOMIC_BASE_H 1
33#pragma GCC system_header
41#if __cplusplus > 201703L && _GLIBCXX_HOSTED
45#ifndef _GLIBCXX_ALWAYS_INLINE
46#define _GLIBCXX_ALWAYS_INLINE inline __attribute__((__always_inline__))
49namespace std _GLIBCXX_VISIBILITY(default)
51_GLIBCXX_BEGIN_NAMESPACE_VERSION
61#if __cplusplus > 201703L
72 inline constexpr memory_order memory_order_relaxed = memory_order::relaxed;
73 inline constexpr memory_order memory_order_consume = memory_order::consume;
74 inline constexpr memory_order memory_order_acquire = memory_order::acquire;
75 inline constexpr memory_order memory_order_release = memory_order::release;
76 inline constexpr memory_order memory_order_acq_rel = memory_order::acq_rel;
77 inline constexpr memory_order memory_order_seq_cst = memory_order::seq_cst;
91 enum __memory_order_modifier
93 __memory_order_mask = 0x0ffff,
94 __memory_order_modifier_mask = 0xffff0000,
95 __memory_order_hle_acquire = 0x10000,
96 __memory_order_hle_release = 0x20000
118 return __m == memory_order_acq_rel ? memory_order_acquire
119 : __m == memory_order_release ? memory_order_relaxed : __m;
125 return memory_order(__cmpexch_failure_order2(__m & __memory_order_mask)
126 | __memory_order_modifier(__m & __memory_order_modifier_mask));
130 __is_valid_cmpexch_failure_order(
memory_order __m)
noexcept
132 return (__m & __memory_order_mask) != memory_order_release
133 && (__m & __memory_order_mask) != memory_order_acq_rel;
137 template<
typename _IntTp>
138 struct __atomic_base;
142 _GLIBCXX_ALWAYS_INLINE
void
144 { __atomic_thread_fence(
int(__m)); }
146 _GLIBCXX_ALWAYS_INLINE
void
148 { __atomic_signal_fence(
int(__m)); }
151 template<
typename _Tp>
159#if __cplusplus >= 202002L
160# define __cpp_lib_atomic_value_initialization 201911L
164#if __cpp_lib_atomic_value_initialization
165# define _GLIBCXX20_INIT(I) = I
167# define _GLIBCXX20_INIT(I)
171#define ATOMIC_VAR_INIT(_VI) { _VI }
173 template<
typename _Tp>
176 template<
typename _Tp>
180#if __GCC_ATOMIC_TEST_AND_SET_TRUEVAL == 1
181 typedef bool __atomic_flag_data_type;
183 typedef unsigned char __atomic_flag_data_type;
198 _GLIBCXX_BEGIN_EXTERN_C
200 struct __atomic_flag_base
202 __atomic_flag_data_type _M_i _GLIBCXX20_INIT({});
205 _GLIBCXX_END_EXTERN_C
209#define ATOMIC_FLAG_INIT { 0 }
222 : __atomic_flag_base{ _S_init(__i) }
225 _GLIBCXX_ALWAYS_INLINE
bool
226 test_and_set(
memory_order __m = memory_order_seq_cst)
noexcept
228 return __atomic_test_and_set (&_M_i,
int(__m));
231 _GLIBCXX_ALWAYS_INLINE
bool
232 test_and_set(
memory_order __m = memory_order_seq_cst)
volatile noexcept
234 return __atomic_test_and_set (&_M_i,
int(__m));
237#if __cplusplus > 201703L
238#define __cpp_lib_atomic_flag_test 201907L
240 _GLIBCXX_ALWAYS_INLINE
bool
241 test(
memory_order __m = memory_order_seq_cst)
const noexcept
243 __atomic_flag_data_type __v;
244 __atomic_load(&_M_i, &__v,
int(__m));
245 return __v == __GCC_ATOMIC_TEST_AND_SET_TRUEVAL;
248 _GLIBCXX_ALWAYS_INLINE
bool
249 test(
memory_order __m = memory_order_seq_cst)
const volatile noexcept
251 __atomic_flag_data_type __v;
252 __atomic_load(&_M_i, &__v,
int(__m));
253 return __v == __GCC_ATOMIC_TEST_AND_SET_TRUEVAL;
256#if __cpp_lib_atomic_wait
257 _GLIBCXX_ALWAYS_INLINE
void
261 const __atomic_flag_data_type __v
262 = __old ? __GCC_ATOMIC_TEST_AND_SET_TRUEVAL : 0;
264 std::__atomic_wait_address_v(&_M_i, __v,
265 [__m,
this] {
return __atomic_load_n(&_M_i,
int(__m)); });
270 _GLIBCXX_ALWAYS_INLINE
void
271 notify_one()
noexcept
272 { std::__atomic_notify_address(&_M_i,
false); }
276 _GLIBCXX_ALWAYS_INLINE
void
277 notify_all()
noexcept
278 { std::__atomic_notify_address(&_M_i,
true); }
284 _GLIBCXX_ALWAYS_INLINE
void
288 = __m & __memory_order_mask;
289 __glibcxx_assert(__b != memory_order_consume);
290 __glibcxx_assert(__b != memory_order_acquire);
291 __glibcxx_assert(__b != memory_order_acq_rel);
293 __atomic_clear (&_M_i,
int(__m));
296 _GLIBCXX_ALWAYS_INLINE
void
297 clear(
memory_order __m = memory_order_seq_cst)
volatile noexcept
300 = __m & __memory_order_mask;
301 __glibcxx_assert(__b != memory_order_consume);
302 __glibcxx_assert(__b != memory_order_acquire);
303 __glibcxx_assert(__b != memory_order_acq_rel);
305 __atomic_clear (&_M_i,
int(__m));
309 static constexpr __atomic_flag_data_type
311 {
return __i ? __GCC_ATOMIC_TEST_AND_SET_TRUEVAL : 0; }
340 template<
typename _ITp>
343 using value_type = _ITp;
344 using difference_type = value_type;
347 typedef _ITp __int_type;
349 static constexpr int _S_alignment =
350 sizeof(_ITp) >
alignof(_ITp) ?
sizeof(_ITp) : alignof(_ITp);
352 alignas(_S_alignment) __int_type _M_i _GLIBCXX20_INIT(0);
355 __atomic_base() noexcept = default;
356 ~__atomic_base() noexcept = default;
357 __atomic_base(const __atomic_base&) = delete;
358 __atomic_base& operator=(const __atomic_base&) = delete;
359 __atomic_base& operator=(const __atomic_base&) volatile = delete;
362 constexpr __atomic_base(__int_type __i) noexcept : _M_i (__i) { }
364 operator __int_type() const noexcept
367 operator __int_type() const volatile noexcept
371 operator=(__int_type __i)
noexcept
378 operator=(__int_type __i)
volatile noexcept
385 operator++(
int)
noexcept
386 {
return fetch_add(1); }
389 operator++(
int)
volatile noexcept
390 {
return fetch_add(1); }
393 operator--(
int)
noexcept
394 {
return fetch_sub(1); }
397 operator--(
int)
volatile noexcept
398 {
return fetch_sub(1); }
401 operator++() noexcept
402 {
return __atomic_add_fetch(&_M_i, 1,
int(memory_order_seq_cst)); }
405 operator++() volatile noexcept
406 {
return __atomic_add_fetch(&_M_i, 1,
int(memory_order_seq_cst)); }
409 operator--() noexcept
410 {
return __atomic_sub_fetch(&_M_i, 1,
int(memory_order_seq_cst)); }
413 operator--() volatile noexcept
414 {
return __atomic_sub_fetch(&_M_i, 1,
int(memory_order_seq_cst)); }
417 operator+=(__int_type __i)
noexcept
418 {
return __atomic_add_fetch(&_M_i, __i,
int(memory_order_seq_cst)); }
421 operator+=(__int_type __i)
volatile noexcept
422 {
return __atomic_add_fetch(&_M_i, __i,
int(memory_order_seq_cst)); }
425 operator-=(__int_type __i)
noexcept
426 {
return __atomic_sub_fetch(&_M_i, __i,
int(memory_order_seq_cst)); }
429 operator-=(__int_type __i)
volatile noexcept
430 {
return __atomic_sub_fetch(&_M_i, __i,
int(memory_order_seq_cst)); }
433 operator&=(__int_type __i)
noexcept
434 {
return __atomic_and_fetch(&_M_i, __i,
int(memory_order_seq_cst)); }
437 operator&=(__int_type __i)
volatile noexcept
438 {
return __atomic_and_fetch(&_M_i, __i,
int(memory_order_seq_cst)); }
441 operator|=(__int_type __i)
noexcept
442 {
return __atomic_or_fetch(&_M_i, __i,
int(memory_order_seq_cst)); }
445 operator|=(__int_type __i)
volatile noexcept
446 {
return __atomic_or_fetch(&_M_i, __i,
int(memory_order_seq_cst)); }
449 operator^=(__int_type __i)
noexcept
450 {
return __atomic_xor_fetch(&_M_i, __i,
int(memory_order_seq_cst)); }
453 operator^=(__int_type __i)
volatile noexcept
454 {
return __atomic_xor_fetch(&_M_i, __i,
int(memory_order_seq_cst)); }
457 is_lock_free() const noexcept
460 return __atomic_is_lock_free(
sizeof(_M_i),
461 reinterpret_cast<void *
>(-_S_alignment));
465 is_lock_free() const volatile noexcept
468 return __atomic_is_lock_free(
sizeof(_M_i),
469 reinterpret_cast<void *
>(-_S_alignment));
472 _GLIBCXX_ALWAYS_INLINE
void
473 store(__int_type __i, memory_order __m = memory_order_seq_cst)
noexcept
476 = __m & __memory_order_mask;
477 __glibcxx_assert(__b != memory_order_acquire);
478 __glibcxx_assert(__b != memory_order_acq_rel);
479 __glibcxx_assert(__b != memory_order_consume);
481 __atomic_store_n(&_M_i, __i,
int(__m));
484 _GLIBCXX_ALWAYS_INLINE
void
485 store(__int_type __i,
486 memory_order __m = memory_order_seq_cst)
volatile noexcept
489 = __m & __memory_order_mask;
490 __glibcxx_assert(__b != memory_order_acquire);
491 __glibcxx_assert(__b != memory_order_acq_rel);
492 __glibcxx_assert(__b != memory_order_consume);
494 __atomic_store_n(&_M_i, __i,
int(__m));
497 _GLIBCXX_ALWAYS_INLINE __int_type
498 load(memory_order __m = memory_order_seq_cst)
const noexcept
501 = __m & __memory_order_mask;
502 __glibcxx_assert(__b != memory_order_release);
503 __glibcxx_assert(__b != memory_order_acq_rel);
505 return __atomic_load_n(&_M_i,
int(__m));
508 _GLIBCXX_ALWAYS_INLINE __int_type
509 load(memory_order __m = memory_order_seq_cst)
const volatile noexcept
512 = __m & __memory_order_mask;
513 __glibcxx_assert(__b != memory_order_release);
514 __glibcxx_assert(__b != memory_order_acq_rel);
516 return __atomic_load_n(&_M_i,
int(__m));
519 _GLIBCXX_ALWAYS_INLINE __int_type
521 memory_order __m = memory_order_seq_cst)
noexcept
523 return __atomic_exchange_n(&_M_i, __i,
int(__m));
527 _GLIBCXX_ALWAYS_INLINE __int_type
529 memory_order __m = memory_order_seq_cst)
volatile noexcept
531 return __atomic_exchange_n(&_M_i, __i,
int(__m));
534 _GLIBCXX_ALWAYS_INLINE
bool
535 compare_exchange_weak(__int_type& __i1, __int_type __i2,
536 memory_order __m1, memory_order __m2)
noexcept
538 __glibcxx_assert(__is_valid_cmpexch_failure_order(__m2));
540 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1,
541 int(__m1),
int(__m2));
544 _GLIBCXX_ALWAYS_INLINE
bool
545 compare_exchange_weak(__int_type& __i1, __int_type __i2,
547 memory_order __m2)
volatile noexcept
549 __glibcxx_assert(__is_valid_cmpexch_failure_order(__m2));
551 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1,
552 int(__m1),
int(__m2));
555 _GLIBCXX_ALWAYS_INLINE
bool
556 compare_exchange_weak(__int_type& __i1, __int_type __i2,
557 memory_order __m = memory_order_seq_cst)
noexcept
559 return compare_exchange_weak(__i1, __i2, __m,
560 __cmpexch_failure_order(__m));
563 _GLIBCXX_ALWAYS_INLINE
bool
564 compare_exchange_weak(__int_type& __i1, __int_type __i2,
565 memory_order __m = memory_order_seq_cst)
volatile noexcept
567 return compare_exchange_weak(__i1, __i2, __m,
568 __cmpexch_failure_order(__m));
571 _GLIBCXX_ALWAYS_INLINE
bool
572 compare_exchange_strong(__int_type& __i1, __int_type __i2,
573 memory_order __m1, memory_order __m2)
noexcept
575 __glibcxx_assert(__is_valid_cmpexch_failure_order(__m2));
577 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0,
578 int(__m1),
int(__m2));
581 _GLIBCXX_ALWAYS_INLINE
bool
582 compare_exchange_strong(__int_type& __i1, __int_type __i2,
584 memory_order __m2)
volatile noexcept
586 __glibcxx_assert(__is_valid_cmpexch_failure_order(__m2));
588 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0,
589 int(__m1),
int(__m2));
592 _GLIBCXX_ALWAYS_INLINE
bool
593 compare_exchange_strong(__int_type& __i1, __int_type __i2,
594 memory_order __m = memory_order_seq_cst)
noexcept
596 return compare_exchange_strong(__i1, __i2, __m,
597 __cmpexch_failure_order(__m));
600 _GLIBCXX_ALWAYS_INLINE
bool
601 compare_exchange_strong(__int_type& __i1, __int_type __i2,
602 memory_order __m = memory_order_seq_cst)
volatile noexcept
604 return compare_exchange_strong(__i1, __i2, __m,
605 __cmpexch_failure_order(__m));
608#if __cpp_lib_atomic_wait
609 _GLIBCXX_ALWAYS_INLINE
void
610 wait(__int_type __old,
611 memory_order __m = memory_order_seq_cst)
const noexcept
613 std::__atomic_wait_address_v(&_M_i, __old,
614 [__m,
this] {
return this->load(__m); });
619 _GLIBCXX_ALWAYS_INLINE
void
620 notify_one() noexcept
621 { std::__atomic_notify_address(&_M_i,
false); }
625 _GLIBCXX_ALWAYS_INLINE
void
626 notify_all() noexcept
627 { std::__atomic_notify_address(&_M_i,
true); }
632 _GLIBCXX_ALWAYS_INLINE __int_type
633 fetch_add(__int_type __i,
634 memory_order __m = memory_order_seq_cst)
noexcept
635 {
return __atomic_fetch_add(&_M_i, __i,
int(__m)); }
637 _GLIBCXX_ALWAYS_INLINE __int_type
638 fetch_add(__int_type __i,
639 memory_order __m = memory_order_seq_cst)
volatile noexcept
640 {
return __atomic_fetch_add(&_M_i, __i,
int(__m)); }
642 _GLIBCXX_ALWAYS_INLINE __int_type
643 fetch_sub(__int_type __i,
644 memory_order __m = memory_order_seq_cst)
noexcept
645 {
return __atomic_fetch_sub(&_M_i, __i,
int(__m)); }
647 _GLIBCXX_ALWAYS_INLINE __int_type
648 fetch_sub(__int_type __i,
649 memory_order __m = memory_order_seq_cst)
volatile noexcept
650 {
return __atomic_fetch_sub(&_M_i, __i,
int(__m)); }
652 _GLIBCXX_ALWAYS_INLINE __int_type
653 fetch_and(__int_type __i,
654 memory_order __m = memory_order_seq_cst)
noexcept
655 {
return __atomic_fetch_and(&_M_i, __i,
int(__m)); }
657 _GLIBCXX_ALWAYS_INLINE __int_type
658 fetch_and(__int_type __i,
659 memory_order __m = memory_order_seq_cst)
volatile noexcept
660 {
return __atomic_fetch_and(&_M_i, __i,
int(__m)); }
662 _GLIBCXX_ALWAYS_INLINE __int_type
663 fetch_or(__int_type __i,
664 memory_order __m = memory_order_seq_cst)
noexcept
665 {
return __atomic_fetch_or(&_M_i, __i,
int(__m)); }
667 _GLIBCXX_ALWAYS_INLINE __int_type
668 fetch_or(__int_type __i,
669 memory_order __m = memory_order_seq_cst)
volatile noexcept
670 {
return __atomic_fetch_or(&_M_i, __i,
int(__m)); }
672 _GLIBCXX_ALWAYS_INLINE __int_type
673 fetch_xor(__int_type __i,
674 memory_order __m = memory_order_seq_cst)
noexcept
675 {
return __atomic_fetch_xor(&_M_i, __i,
int(__m)); }
677 _GLIBCXX_ALWAYS_INLINE __int_type
678 fetch_xor(__int_type __i,
679 memory_order __m = memory_order_seq_cst)
volatile noexcept
680 {
return __atomic_fetch_xor(&_M_i, __i,
int(__m)); }
685 template<
typename _PTp>
686 struct __atomic_base<_PTp*>
689 typedef _PTp* __pointer_type;
691 __pointer_type _M_p _GLIBCXX20_INIT(
nullptr);
695 _M_type_size(ptrdiff_t __d)
const {
return __d *
sizeof(_PTp); }
698 _M_type_size(ptrdiff_t __d)
const volatile {
return __d *
sizeof(_PTp); }
701 __atomic_base() noexcept = default;
702 ~__atomic_base() noexcept = default;
703 __atomic_base(const __atomic_base&) = delete;
704 __atomic_base& operator=(const __atomic_base&) = delete;
705 __atomic_base& operator=(const __atomic_base&) volatile = delete;
708 constexpr __atomic_base(__pointer_type __p) noexcept : _M_p (__p) { }
710 operator __pointer_type() const noexcept
713 operator __pointer_type() const volatile noexcept
717 operator=(__pointer_type __p)
noexcept
724 operator=(__pointer_type __p)
volatile noexcept
731 operator++(
int)
noexcept
732 {
return fetch_add(1); }
735 operator++(
int)
volatile noexcept
736 {
return fetch_add(1); }
739 operator--(
int)
noexcept
740 {
return fetch_sub(1); }
743 operator--(
int)
volatile noexcept
744 {
return fetch_sub(1); }
747 operator++() noexcept
748 {
return __atomic_add_fetch(&_M_p, _M_type_size(1),
749 int(memory_order_seq_cst)); }
752 operator++() volatile noexcept
753 {
return __atomic_add_fetch(&_M_p, _M_type_size(1),
754 int(memory_order_seq_cst)); }
757 operator--() noexcept
758 {
return __atomic_sub_fetch(&_M_p, _M_type_size(1),
759 int(memory_order_seq_cst)); }
762 operator--() volatile noexcept
763 {
return __atomic_sub_fetch(&_M_p, _M_type_size(1),
764 int(memory_order_seq_cst)); }
767 operator+=(ptrdiff_t __d)
noexcept
768 {
return __atomic_add_fetch(&_M_p, _M_type_size(__d),
769 int(memory_order_seq_cst)); }
772 operator+=(ptrdiff_t __d)
volatile noexcept
773 {
return __atomic_add_fetch(&_M_p, _M_type_size(__d),
774 int(memory_order_seq_cst)); }
777 operator-=(ptrdiff_t __d)
noexcept
778 {
return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
779 int(memory_order_seq_cst)); }
782 operator-=(ptrdiff_t __d)
volatile noexcept
783 {
return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
784 int(memory_order_seq_cst)); }
787 is_lock_free() const noexcept
790 return __atomic_is_lock_free(
sizeof(_M_p),
791 reinterpret_cast<void *
>(-__alignof(_M_p)));
795 is_lock_free() const volatile noexcept
798 return __atomic_is_lock_free(
sizeof(_M_p),
799 reinterpret_cast<void *
>(-__alignof(_M_p)));
802 _GLIBCXX_ALWAYS_INLINE
void
803 store(__pointer_type __p,
807 = __m & __memory_order_mask;
809 __glibcxx_assert(__b != memory_order_acquire);
810 __glibcxx_assert(__b != memory_order_acq_rel);
811 __glibcxx_assert(__b != memory_order_consume);
813 __atomic_store_n(&_M_p, __p,
int(__m));
816 _GLIBCXX_ALWAYS_INLINE
void
817 store(__pointer_type __p,
818 memory_order __m = memory_order_seq_cst)
volatile noexcept
821 = __m & __memory_order_mask;
822 __glibcxx_assert(__b != memory_order_acquire);
823 __glibcxx_assert(__b != memory_order_acq_rel);
824 __glibcxx_assert(__b != memory_order_consume);
826 __atomic_store_n(&_M_p, __p,
int(__m));
829 _GLIBCXX_ALWAYS_INLINE __pointer_type
830 load(
memory_order __m = memory_order_seq_cst)
const noexcept
833 = __m & __memory_order_mask;
834 __glibcxx_assert(__b != memory_order_release);
835 __glibcxx_assert(__b != memory_order_acq_rel);
837 return __atomic_load_n(&_M_p,
int(__m));
840 _GLIBCXX_ALWAYS_INLINE __pointer_type
841 load(
memory_order __m = memory_order_seq_cst)
const volatile noexcept
844 = __m & __memory_order_mask;
845 __glibcxx_assert(__b != memory_order_release);
846 __glibcxx_assert(__b != memory_order_acq_rel);
848 return __atomic_load_n(&_M_p,
int(__m));
851 _GLIBCXX_ALWAYS_INLINE __pointer_type
855 return __atomic_exchange_n(&_M_p, __p,
int(__m));
859 _GLIBCXX_ALWAYS_INLINE __pointer_type
861 memory_order __m = memory_order_seq_cst)
volatile noexcept
863 return __atomic_exchange_n(&_M_p, __p,
int(__m));
866 _GLIBCXX_ALWAYS_INLINE
bool
867 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
871 __glibcxx_assert(__is_valid_cmpexch_failure_order(__m2));
873 return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 1,
874 int(__m1),
int(__m2));
877 _GLIBCXX_ALWAYS_INLINE
bool
878 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
882 __glibcxx_assert(__is_valid_cmpexch_failure_order(__m2));
884 return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 1,
885 int(__m1),
int(__m2));
888 _GLIBCXX_ALWAYS_INLINE
bool
889 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
893 __glibcxx_assert(__is_valid_cmpexch_failure_order(__m2));
895 return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0,
896 int(__m1),
int(__m2));
899 _GLIBCXX_ALWAYS_INLINE
bool
900 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
904 __glibcxx_assert(__is_valid_cmpexch_failure_order(__m2));
906 return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0,
907 int(__m1),
int(__m2));
910#if __cpp_lib_atomic_wait
911 _GLIBCXX_ALWAYS_INLINE
void
912 wait(__pointer_type __old,
915 std::__atomic_wait_address_v(&_M_p, __old,
917 {
return this->load(__m); });
922 _GLIBCXX_ALWAYS_INLINE
void
923 notify_one() const noexcept
924 { std::__atomic_notify_address(&_M_p,
false); }
928 _GLIBCXX_ALWAYS_INLINE
void
929 notify_all() const noexcept
930 { std::__atomic_notify_address(&_M_p,
true); }
935 _GLIBCXX_ALWAYS_INLINE __pointer_type
936 fetch_add(ptrdiff_t __d,
938 {
return __atomic_fetch_add(&_M_p, _M_type_size(__d),
int(__m)); }
940 _GLIBCXX_ALWAYS_INLINE __pointer_type
941 fetch_add(ptrdiff_t __d,
942 memory_order __m = memory_order_seq_cst)
volatile noexcept
943 {
return __atomic_fetch_add(&_M_p, _M_type_size(__d),
int(__m)); }
945 _GLIBCXX_ALWAYS_INLINE __pointer_type
946 fetch_sub(ptrdiff_t __d,
948 {
return __atomic_fetch_sub(&_M_p, _M_type_size(__d),
int(__m)); }
950 _GLIBCXX_ALWAYS_INLINE __pointer_type
951 fetch_sub(ptrdiff_t __d,
952 memory_order __m = memory_order_seq_cst)
volatile noexcept
953 {
return __atomic_fetch_sub(&_M_p, _M_type_size(__d),
int(__m)); }
956 namespace __atomic_impl
960 template<
typename _Tp>
962 __maybe_has_padding()
964#if ! __has_builtin(__builtin_clear_padding)
966#elif __has_builtin(__has_unique_object_representations)
967 return !__has_unique_object_representations(_Tp)
968 && !is_same<_Tp, float>::value && !is_same<_Tp, double>::value;
974 template<
typename _Tp>
975 _GLIBCXX_ALWAYS_INLINE _Tp*
976 __clear_padding(_Tp& __val)
noexcept
979#if __has_builtin(__builtin_clear_padding)
980 if _GLIBCXX17_CONSTEXPR (__atomic_impl::__maybe_has_padding<_Tp>())
981 __builtin_clear_padding(__ptr);
987 template<
typename _Tp>
988 using _Val =
typename remove_volatile<_Tp>::type;
990 template<
typename _Tp>
991 _GLIBCXX_ALWAYS_INLINE
bool
992 __compare_exchange(_Tp& __val, _Val<_Tp>& __e, _Val<_Tp>& __i,
994 memory_order __s, memory_order __f)
noexcept
996 __glibcxx_assert(__is_valid_cmpexch_failure_order(__f));
998 using _Vp = _Val<_Tp>;
1000 if _GLIBCXX17_CONSTEXPR (__atomic_impl::__maybe_has_padding<_Vp>())
1004 alignas(_Vp)
unsigned char __buf[
sizeof(_Vp)];
1005 _Vp* __exp = ::new((
void*)__buf) _Vp(__e);
1006 __atomic_impl::__clear_padding(*__exp);
1008 __atomic_impl::__clear_padding(__i),
1009 __is_weak,
int(__s),
int(__f)))
1018 __is_weak,
int(__s),
int(__f));
1022#if __cplusplus > 201703L
1024 namespace __atomic_impl
1027 template<
typename _Tp>
1028 using _Diff = __conditional_t<is_pointer_v<_Tp>, ptrdiff_t, _Val<_Tp>>;
1030 template<
size_t _Size,
size_t _Align>
1031 _GLIBCXX_ALWAYS_INLINE
bool
1032 is_lock_free() noexcept
1035 return __atomic_is_lock_free(_Size,
reinterpret_cast<void *
>(-_Align));
1038 template<
typename _Tp>
1039 _GLIBCXX_ALWAYS_INLINE
void
1040 store(_Tp* __ptr, _Val<_Tp> __t, memory_order __m)
noexcept
1042 __atomic_store(__ptr, __atomic_impl::__clear_padding(__t),
int(__m));
1045 template<
typename _Tp>
1046 _GLIBCXX_ALWAYS_INLINE _Val<_Tp>
1047 load(
const _Tp* __ptr, memory_order __m)
noexcept
1049 alignas(_Tp)
unsigned char __buf[
sizeof(_Tp)];
1050 auto* __dest =
reinterpret_cast<_Val<_Tp>*
>(__buf);
1051 __atomic_load(__ptr, __dest,
int(__m));
1055 template<
typename _Tp>
1056 _GLIBCXX_ALWAYS_INLINE _Val<_Tp>
1057 exchange(_Tp* __ptr, _Val<_Tp> __desired, memory_order __m)
noexcept
1059 alignas(_Tp)
unsigned char __buf[
sizeof(_Tp)];
1060 auto* __dest =
reinterpret_cast<_Val<_Tp>*
>(__buf);
1061 __atomic_exchange(__ptr, __atomic_impl::__clear_padding(__desired),
1066 template<
typename _Tp>
1067 _GLIBCXX_ALWAYS_INLINE
bool
1068 compare_exchange_weak(_Tp* __ptr, _Val<_Tp>& __expected,
1069 _Val<_Tp> __desired, memory_order __success,
1070 memory_order __failure)
noexcept
1072 return __atomic_impl::__compare_exchange(*__ptr, __expected, __desired,
1073 true, __success, __failure);
1076 template<
typename _Tp>
1077 _GLIBCXX_ALWAYS_INLINE
bool
1078 compare_exchange_strong(_Tp* __ptr, _Val<_Tp>& __expected,
1079 _Val<_Tp> __desired, memory_order __success,
1080 memory_order __failure)
noexcept
1082 return __atomic_impl::__compare_exchange(*__ptr, __expected, __desired,
1083 false, __success, __failure);
1086#if __cpp_lib_atomic_wait
1087 template<
typename _Tp>
1088 _GLIBCXX_ALWAYS_INLINE
void
1089 wait(
const _Tp* __ptr, _Val<_Tp> __old,
1090 memory_order __m = memory_order_seq_cst)
noexcept
1092 std::__atomic_wait_address_v(__ptr, __old,
1093 [__ptr, __m]() {
return __atomic_impl::load(__ptr, __m); });
1098 template<
typename _Tp>
1099 _GLIBCXX_ALWAYS_INLINE
void
1100 notify_one(
const _Tp* __ptr)
noexcept
1101 { std::__atomic_notify_address(__ptr,
false); }
1105 template<
typename _Tp>
1106 _GLIBCXX_ALWAYS_INLINE
void
1107 notify_all(
const _Tp* __ptr)
noexcept
1108 { std::__atomic_notify_address(__ptr,
true); }
1113 template<
typename _Tp>
1114 _GLIBCXX_ALWAYS_INLINE _Tp
1115 fetch_add(_Tp* __ptr, _Diff<_Tp> __i, memory_order __m)
noexcept
1116 {
return __atomic_fetch_add(__ptr, __i,
int(__m)); }
1118 template<
typename _Tp>
1119 _GLIBCXX_ALWAYS_INLINE _Tp
1120 fetch_sub(_Tp* __ptr, _Diff<_Tp> __i, memory_order __m)
noexcept
1121 {
return __atomic_fetch_sub(__ptr, __i,
int(__m)); }
1123 template<
typename _Tp>
1124 _GLIBCXX_ALWAYS_INLINE _Tp
1125 fetch_and(_Tp* __ptr, _Val<_Tp> __i, memory_order __m)
noexcept
1126 {
return __atomic_fetch_and(__ptr, __i,
int(__m)); }
1128 template<
typename _Tp>
1129 _GLIBCXX_ALWAYS_INLINE _Tp
1130 fetch_or(_Tp* __ptr, _Val<_Tp> __i, memory_order __m)
noexcept
1131 {
return __atomic_fetch_or(__ptr, __i,
int(__m)); }
1133 template<
typename _Tp>
1134 _GLIBCXX_ALWAYS_INLINE _Tp
1135 fetch_xor(_Tp* __ptr, _Val<_Tp> __i, memory_order __m)
noexcept
1136 {
return __atomic_fetch_xor(__ptr, __i,
int(__m)); }
1138 template<
typename _Tp>
1139 _GLIBCXX_ALWAYS_INLINE _Tp
1140 __add_fetch(_Tp* __ptr, _Diff<_Tp> __i)
noexcept
1141 {
return __atomic_add_fetch(__ptr, __i, __ATOMIC_SEQ_CST); }
1143 template<
typename _Tp>
1144 _GLIBCXX_ALWAYS_INLINE _Tp
1145 __sub_fetch(_Tp* __ptr, _Diff<_Tp> __i)
noexcept
1146 {
return __atomic_sub_fetch(__ptr, __i, __ATOMIC_SEQ_CST); }
1148 template<
typename _Tp>
1149 _GLIBCXX_ALWAYS_INLINE _Tp
1150 __and_fetch(_Tp* __ptr, _Val<_Tp> __i)
noexcept
1151 {
return __atomic_and_fetch(__ptr, __i, __ATOMIC_SEQ_CST); }
1153 template<
typename _Tp>
1154 _GLIBCXX_ALWAYS_INLINE _Tp
1155 __or_fetch(_Tp* __ptr, _Val<_Tp> __i)
noexcept
1156 {
return __atomic_or_fetch(__ptr, __i, __ATOMIC_SEQ_CST); }
1158 template<
typename _Tp>
1159 _GLIBCXX_ALWAYS_INLINE _Tp
1160 __xor_fetch(_Tp* __ptr, _Val<_Tp> __i)
noexcept
1161 {
return __atomic_xor_fetch(__ptr, __i, __ATOMIC_SEQ_CST); }
1163 template<
typename _Tp>
1165 __fetch_add_flt(_Tp* __ptr, _Val<_Tp> __i, memory_order __m)
noexcept
1167 _Val<_Tp> __oldval = load(__ptr, memory_order_relaxed);
1168 _Val<_Tp> __newval = __oldval + __i;
1169 while (!compare_exchange_weak(__ptr, __oldval, __newval, __m,
1170 memory_order_relaxed))
1171 __newval = __oldval + __i;
1175 template<
typename _Tp>
1177 __fetch_sub_flt(_Tp* __ptr, _Val<_Tp> __i, memory_order __m)
noexcept
1179 _Val<_Tp> __oldval = load(__ptr, memory_order_relaxed);
1180 _Val<_Tp> __newval = __oldval - __i;
1181 while (!compare_exchange_weak(__ptr, __oldval, __newval, __m,
1182 memory_order_relaxed))
1183 __newval = __oldval - __i;
1187 template<
typename _Tp>
1189 __add_fetch_flt(_Tp* __ptr, _Val<_Tp> __i)
noexcept
1191 _Val<_Tp> __oldval = load(__ptr, memory_order_relaxed);
1192 _Val<_Tp> __newval = __oldval + __i;
1193 while (!compare_exchange_weak(__ptr, __oldval, __newval,
1194 memory_order_seq_cst,
1195 memory_order_relaxed))
1196 __newval = __oldval + __i;
1200 template<
typename _Tp>
1202 __sub_fetch_flt(_Tp* __ptr, _Val<_Tp> __i)
noexcept
1204 _Val<_Tp> __oldval = load(__ptr, memory_order_relaxed);
1205 _Val<_Tp> __newval = __oldval - __i;
1206 while (!compare_exchange_weak(__ptr, __oldval, __newval,
1207 memory_order_seq_cst,
1208 memory_order_relaxed))
1209 __newval = __oldval - __i;
1215 template<
typename _Fp>
1216 struct __atomic_float
1218 static_assert(is_floating_point_v<_Fp>);
1220 static constexpr size_t _S_alignment = __alignof__(_Fp);
1223 using value_type = _Fp;
1224 using difference_type = value_type;
1226 static constexpr bool is_always_lock_free
1227 = __atomic_always_lock_free(
sizeof(_Fp), 0);
1229 __atomic_float() =
default;
1232 __atomic_float(_Fp __t) : _M_fp(__t)
1235 __atomic_float(
const __atomic_float&) =
delete;
1236 __atomic_float& operator=(
const __atomic_float&) =
delete;
1237 __atomic_float& operator=(
const __atomic_float&)
volatile =
delete;
1240 operator=(_Fp __t)
volatile noexcept
1247 operator=(_Fp __t)
noexcept
1254 is_lock_free() const volatile noexcept
1255 {
return __atomic_impl::is_lock_free<sizeof(_Fp), _S_alignment>(); }
1258 is_lock_free() const noexcept
1259 {
return __atomic_impl::is_lock_free<sizeof(_Fp), _S_alignment>(); }
1262 store(_Fp __t, memory_order __m = memory_order_seq_cst)
volatile noexcept
1263 { __atomic_impl::store(&_M_fp, __t, __m); }
1266 store(_Fp __t, memory_order __m = memory_order_seq_cst)
noexcept
1267 { __atomic_impl::store(&_M_fp, __t, __m); }
1270 load(memory_order __m = memory_order_seq_cst)
const volatile noexcept
1271 {
return __atomic_impl::load(&_M_fp, __m); }
1274 load(memory_order __m = memory_order_seq_cst)
const noexcept
1275 {
return __atomic_impl::load(&_M_fp, __m); }
1277 operator _Fp() const volatile noexcept {
return this->load(); }
1278 operator _Fp() const noexcept {
return this->load(); }
1282 memory_order __m = memory_order_seq_cst)
volatile noexcept
1283 {
return __atomic_impl::exchange(&_M_fp, __desired, __m); }
1287 memory_order __m = memory_order_seq_cst)
noexcept
1288 {
return __atomic_impl::exchange(&_M_fp, __desired, __m); }
1291 compare_exchange_weak(_Fp& __expected, _Fp __desired,
1292 memory_order __success,
1293 memory_order __failure)
noexcept
1295 return __atomic_impl::compare_exchange_weak(&_M_fp,
1296 __expected, __desired,
1297 __success, __failure);
1301 compare_exchange_weak(_Fp& __expected, _Fp __desired,
1302 memory_order __success,
1303 memory_order __failure)
volatile noexcept
1305 return __atomic_impl::compare_exchange_weak(&_M_fp,
1306 __expected, __desired,
1307 __success, __failure);
1311 compare_exchange_strong(_Fp& __expected, _Fp __desired,
1312 memory_order __success,
1313 memory_order __failure)
noexcept
1315 return __atomic_impl::compare_exchange_strong(&_M_fp,
1316 __expected, __desired,
1317 __success, __failure);
1321 compare_exchange_strong(_Fp& __expected, _Fp __desired,
1322 memory_order __success,
1323 memory_order __failure)
volatile noexcept
1325 return __atomic_impl::compare_exchange_strong(&_M_fp,
1326 __expected, __desired,
1327 __success, __failure);
1331 compare_exchange_weak(_Fp& __expected, _Fp __desired,
1332 memory_order __order = memory_order_seq_cst)
1335 return compare_exchange_weak(__expected, __desired, __order,
1336 __cmpexch_failure_order(__order));
1340 compare_exchange_weak(_Fp& __expected, _Fp __desired,
1341 memory_order __order = memory_order_seq_cst)
1344 return compare_exchange_weak(__expected, __desired, __order,
1345 __cmpexch_failure_order(__order));
1349 compare_exchange_strong(_Fp& __expected, _Fp __desired,
1350 memory_order __order = memory_order_seq_cst)
1353 return compare_exchange_strong(__expected, __desired, __order,
1354 __cmpexch_failure_order(__order));
1358 compare_exchange_strong(_Fp& __expected, _Fp __desired,
1359 memory_order __order = memory_order_seq_cst)
1362 return compare_exchange_strong(__expected, __desired, __order,
1363 __cmpexch_failure_order(__order));
1366#if __cpp_lib_atomic_wait
1367 _GLIBCXX_ALWAYS_INLINE
void
1368 wait(_Fp __old, memory_order __m = memory_order_seq_cst)
const noexcept
1369 { __atomic_impl::wait(&_M_fp, __old, __m); }
1373 _GLIBCXX_ALWAYS_INLINE
void
1374 notify_one() const noexcept
1375 { __atomic_impl::notify_one(&_M_fp); }
1379 _GLIBCXX_ALWAYS_INLINE
void
1380 notify_all() const noexcept
1381 { __atomic_impl::notify_all(&_M_fp); }
1387 fetch_add(value_type __i,
1388 memory_order __m = memory_order_seq_cst)
noexcept
1389 {
return __atomic_impl::__fetch_add_flt(&_M_fp, __i, __m); }
1392 fetch_add(value_type __i,
1393 memory_order __m = memory_order_seq_cst)
volatile noexcept
1394 {
return __atomic_impl::__fetch_add_flt(&_M_fp, __i, __m); }
1397 fetch_sub(value_type __i,
1398 memory_order __m = memory_order_seq_cst)
noexcept
1399 {
return __atomic_impl::__fetch_sub_flt(&_M_fp, __i, __m); }
1402 fetch_sub(value_type __i,
1403 memory_order __m = memory_order_seq_cst)
volatile noexcept
1404 {
return __atomic_impl::__fetch_sub_flt(&_M_fp, __i, __m); }
1407 operator+=(value_type __i)
noexcept
1408 {
return __atomic_impl::__add_fetch_flt(&_M_fp, __i); }
1411 operator+=(value_type __i)
volatile noexcept
1412 {
return __atomic_impl::__add_fetch_flt(&_M_fp, __i); }
1415 operator-=(value_type __i)
noexcept
1416 {
return __atomic_impl::__sub_fetch_flt(&_M_fp, __i); }
1419 operator-=(value_type __i)
volatile noexcept
1420 {
return __atomic_impl::__sub_fetch_flt(&_M_fp, __i); }
1423 alignas(_S_alignment) _Fp _M_fp _GLIBCXX20_INIT(0);
1425#undef _GLIBCXX20_INIT
1427 template<
typename _Tp,
1428 bool = is_integral_v<_Tp>,
bool = is_floating_point_v<_Tp>>
1429 struct __atomic_ref;
1432 template<
typename _Tp>
1433 struct __atomic_ref<_Tp, false, false>
1435 static_assert(is_trivially_copyable_v<_Tp>);
1438 static constexpr int _S_min_alignment
1439 = (
sizeof(_Tp) & (
sizeof(_Tp) - 1)) ||
sizeof(_Tp) > 16
1443 using value_type = _Tp;
1445 static constexpr bool is_always_lock_free
1446 = __atomic_always_lock_free(
sizeof(_Tp), 0);
1448 static constexpr size_t required_alignment
1449 = _S_min_alignment >
alignof(_Tp) ? _S_min_alignment : alignof(_Tp);
1451 __atomic_ref& operator=(
const __atomic_ref&) =
delete;
1455 { __glibcxx_assert(((uintptr_t)_M_ptr % required_alignment) == 0); }
1457 __atomic_ref(
const __atomic_ref&)
noexcept =
default;
1460 operator=(_Tp __t)
const noexcept
1466 operator _Tp() const noexcept {
return this->load(); }
1469 is_lock_free() const noexcept
1470 {
return __atomic_impl::is_lock_free<sizeof(_Tp), required_alignment>(); }
1473 store(_Tp __t, memory_order __m = memory_order_seq_cst)
const noexcept
1474 { __atomic_impl::store(_M_ptr, __t, __m); }
1477 load(memory_order __m = memory_order_seq_cst)
const noexcept
1478 {
return __atomic_impl::load(_M_ptr, __m); }
1481 exchange(_Tp __desired, memory_order __m = memory_order_seq_cst)
1483 {
return __atomic_impl::exchange(_M_ptr, __desired, __m); }
1486 compare_exchange_weak(_Tp& __expected, _Tp __desired,
1487 memory_order __success,
1488 memory_order __failure)
const noexcept
1490 return __atomic_impl::compare_exchange_weak(_M_ptr,
1491 __expected, __desired,
1492 __success, __failure);
1496 compare_exchange_strong(_Tp& __expected, _Tp __desired,
1497 memory_order __success,
1498 memory_order __failure)
const noexcept
1500 return __atomic_impl::compare_exchange_strong(_M_ptr,
1501 __expected, __desired,
1502 __success, __failure);
1506 compare_exchange_weak(_Tp& __expected, _Tp __desired,
1507 memory_order __order = memory_order_seq_cst)
1510 return compare_exchange_weak(__expected, __desired, __order,
1511 __cmpexch_failure_order(__order));
1515 compare_exchange_strong(_Tp& __expected, _Tp __desired,
1516 memory_order __order = memory_order_seq_cst)
1519 return compare_exchange_strong(__expected, __desired, __order,
1520 __cmpexch_failure_order(__order));
1523#if __cpp_lib_atomic_wait
1524 _GLIBCXX_ALWAYS_INLINE
void
1525 wait(_Tp __old, memory_order __m = memory_order_seq_cst)
const noexcept
1526 { __atomic_impl::wait(_M_ptr, __old, __m); }
1530 _GLIBCXX_ALWAYS_INLINE
void
1531 notify_one() const noexcept
1532 { __atomic_impl::notify_one(_M_ptr); }
1536 _GLIBCXX_ALWAYS_INLINE
void
1537 notify_all() const noexcept
1538 { __atomic_impl::notify_all(_M_ptr); }
1548 template<
typename _Tp>
1549 struct __atomic_ref<_Tp, true, false>
1551 static_assert(is_integral_v<_Tp>);
1554 using value_type = _Tp;
1555 using difference_type = value_type;
1557 static constexpr bool is_always_lock_free
1558 = __atomic_always_lock_free(
sizeof(_Tp), 0);
1560 static constexpr size_t required_alignment
1561 =
sizeof(_Tp) >
alignof(_Tp) ?
sizeof(_Tp) : alignof(_Tp);
1563 __atomic_ref() =
delete;
1564 __atomic_ref& operator=(
const __atomic_ref&) =
delete;
1567 __atomic_ref(_Tp& __t) : _M_ptr(&__t)
1568 { __glibcxx_assert(((uintptr_t)_M_ptr % required_alignment) == 0); }
1570 __atomic_ref(
const __atomic_ref&)
noexcept =
default;
1573 operator=(_Tp __t)
const noexcept
1579 operator _Tp() const noexcept {
return this->load(); }
1582 is_lock_free() const noexcept
1584 return __atomic_impl::is_lock_free<sizeof(_Tp), required_alignment>();
1588 store(_Tp __t, memory_order __m = memory_order_seq_cst)
const noexcept
1589 { __atomic_impl::store(_M_ptr, __t, __m); }
1592 load(memory_order __m = memory_order_seq_cst)
const noexcept
1593 {
return __atomic_impl::load(_M_ptr, __m); }
1597 memory_order __m = memory_order_seq_cst)
const noexcept
1598 {
return __atomic_impl::exchange(_M_ptr, __desired, __m); }
1601 compare_exchange_weak(_Tp& __expected, _Tp __desired,
1602 memory_order __success,
1603 memory_order __failure)
const noexcept
1605 return __atomic_impl::compare_exchange_weak(_M_ptr,
1606 __expected, __desired,
1607 __success, __failure);
1611 compare_exchange_strong(_Tp& __expected, _Tp __desired,
1612 memory_order __success,
1613 memory_order __failure)
const noexcept
1615 return __atomic_impl::compare_exchange_strong(_M_ptr,
1616 __expected, __desired,
1617 __success, __failure);
1621 compare_exchange_weak(_Tp& __expected, _Tp __desired,
1622 memory_order __order = memory_order_seq_cst)
1625 return compare_exchange_weak(__expected, __desired, __order,
1626 __cmpexch_failure_order(__order));
1630 compare_exchange_strong(_Tp& __expected, _Tp __desired,
1631 memory_order __order = memory_order_seq_cst)
1634 return compare_exchange_strong(__expected, __desired, __order,
1635 __cmpexch_failure_order(__order));
1638#if __cpp_lib_atomic_wait
1639 _GLIBCXX_ALWAYS_INLINE
void
1640 wait(_Tp __old, memory_order __m = memory_order_seq_cst)
const noexcept
1641 { __atomic_impl::wait(_M_ptr, __old, __m); }
1645 _GLIBCXX_ALWAYS_INLINE
void
1646 notify_one() const noexcept
1647 { __atomic_impl::notify_one(_M_ptr); }
1651 _GLIBCXX_ALWAYS_INLINE
void
1652 notify_all() const noexcept
1653 { __atomic_impl::notify_all(_M_ptr); }
1659 fetch_add(value_type __i,
1660 memory_order __m = memory_order_seq_cst)
const noexcept
1661 {
return __atomic_impl::fetch_add(_M_ptr, __i, __m); }
1664 fetch_sub(value_type __i,
1665 memory_order __m = memory_order_seq_cst)
const noexcept
1666 {
return __atomic_impl::fetch_sub(_M_ptr, __i, __m); }
1669 fetch_and(value_type __i,
1670 memory_order __m = memory_order_seq_cst)
const noexcept
1671 {
return __atomic_impl::fetch_and(_M_ptr, __i, __m); }
1674 fetch_or(value_type __i,
1675 memory_order __m = memory_order_seq_cst)
const noexcept
1676 {
return __atomic_impl::fetch_or(_M_ptr, __i, __m); }
1679 fetch_xor(value_type __i,
1680 memory_order __m = memory_order_seq_cst)
const noexcept
1681 {
return __atomic_impl::fetch_xor(_M_ptr, __i, __m); }
1683 _GLIBCXX_ALWAYS_INLINE value_type
1684 operator++(
int)
const noexcept
1685 {
return fetch_add(1); }
1687 _GLIBCXX_ALWAYS_INLINE value_type
1688 operator--(
int)
const noexcept
1689 {
return fetch_sub(1); }
1692 operator++() const noexcept
1693 {
return __atomic_impl::__add_fetch(_M_ptr, value_type(1)); }
1696 operator--() const noexcept
1697 {
return __atomic_impl::__sub_fetch(_M_ptr, value_type(1)); }
1700 operator+=(value_type __i)
const noexcept
1701 {
return __atomic_impl::__add_fetch(_M_ptr, __i); }
1704 operator-=(value_type __i)
const noexcept
1705 {
return __atomic_impl::__sub_fetch(_M_ptr, __i); }
1708 operator&=(value_type __i)
const noexcept
1709 {
return __atomic_impl::__and_fetch(_M_ptr, __i); }
1712 operator|=(value_type __i)
const noexcept
1713 {
return __atomic_impl::__or_fetch(_M_ptr, __i); }
1716 operator^=(value_type __i)
const noexcept
1717 {
return __atomic_impl::__xor_fetch(_M_ptr, __i); }
1724 template<
typename _Fp>
1725 struct __atomic_ref<_Fp, false, true>
1727 static_assert(is_floating_point_v<_Fp>);
1730 using value_type = _Fp;
1731 using difference_type = value_type;
1733 static constexpr bool is_always_lock_free
1734 = __atomic_always_lock_free(
sizeof(_Fp), 0);
1736 static constexpr size_t required_alignment = __alignof__(_Fp);
1738 __atomic_ref() =
delete;
1739 __atomic_ref& operator=(
const __atomic_ref&) =
delete;
1742 __atomic_ref(_Fp& __t) : _M_ptr(&__t)
1743 { __glibcxx_assert(((uintptr_t)_M_ptr % required_alignment) == 0); }
1745 __atomic_ref(
const __atomic_ref&)
noexcept =
default;
1748 operator=(_Fp __t)
const noexcept
1754 operator _Fp() const noexcept {
return this->load(); }
1757 is_lock_free() const noexcept
1759 return __atomic_impl::is_lock_free<sizeof(_Fp), required_alignment>();
1763 store(_Fp __t, memory_order __m = memory_order_seq_cst)
const noexcept
1764 { __atomic_impl::store(_M_ptr, __t, __m); }
1767 load(memory_order __m = memory_order_seq_cst)
const noexcept
1768 {
return __atomic_impl::load(_M_ptr, __m); }
1772 memory_order __m = memory_order_seq_cst)
const noexcept
1773 {
return __atomic_impl::exchange(_M_ptr, __desired, __m); }
1776 compare_exchange_weak(_Fp& __expected, _Fp __desired,
1777 memory_order __success,
1778 memory_order __failure)
const noexcept
1780 return __atomic_impl::compare_exchange_weak(_M_ptr,
1781 __expected, __desired,
1782 __success, __failure);
1786 compare_exchange_strong(_Fp& __expected, _Fp __desired,
1787 memory_order __success,
1788 memory_order __failure)
const noexcept
1790 return __atomic_impl::compare_exchange_strong(_M_ptr,
1791 __expected, __desired,
1792 __success, __failure);
1796 compare_exchange_weak(_Fp& __expected, _Fp __desired,
1797 memory_order __order = memory_order_seq_cst)
1800 return compare_exchange_weak(__expected, __desired, __order,
1801 __cmpexch_failure_order(__order));
1805 compare_exchange_strong(_Fp& __expected, _Fp __desired,
1806 memory_order __order = memory_order_seq_cst)
1809 return compare_exchange_strong(__expected, __desired, __order,
1810 __cmpexch_failure_order(__order));
1813#if __cpp_lib_atomic_wait
1814 _GLIBCXX_ALWAYS_INLINE
void
1815 wait(_Fp __old, memory_order __m = memory_order_seq_cst)
const noexcept
1816 { __atomic_impl::wait(_M_ptr, __old, __m); }
1820 _GLIBCXX_ALWAYS_INLINE
void
1821 notify_one() const noexcept
1822 { __atomic_impl::notify_one(_M_ptr); }
1826 _GLIBCXX_ALWAYS_INLINE
void
1827 notify_all() const noexcept
1828 { __atomic_impl::notify_all(_M_ptr); }
1834 fetch_add(value_type __i,
1835 memory_order __m = memory_order_seq_cst)
const noexcept
1836 {
return __atomic_impl::__fetch_add_flt(_M_ptr, __i, __m); }
1839 fetch_sub(value_type __i,
1840 memory_order __m = memory_order_seq_cst)
const noexcept
1841 {
return __atomic_impl::__fetch_sub_flt(_M_ptr, __i, __m); }
1844 operator+=(value_type __i)
const noexcept
1845 {
return __atomic_impl::__add_fetch_flt(_M_ptr, __i); }
1848 operator-=(value_type __i)
const noexcept
1849 {
return __atomic_impl::__sub_fetch_flt(_M_ptr, __i); }
1856 template<
typename _Tp>
1857 struct __atomic_ref<_Tp*,
false,
false>
1860 using value_type = _Tp*;
1861 using difference_type = ptrdiff_t;
1863 static constexpr bool is_always_lock_free = ATOMIC_POINTER_LOCK_FREE == 2;
1865 static constexpr size_t required_alignment = __alignof__(_Tp*);
1867 __atomic_ref() =
delete;
1868 __atomic_ref& operator=(
const __atomic_ref&) =
delete;
1872 { __glibcxx_assert(((uintptr_t)_M_ptr % required_alignment) == 0); }
1874 __atomic_ref(
const __atomic_ref&)
noexcept =
default;
1877 operator=(_Tp* __t)
const noexcept
1883 operator _Tp*()
const noexcept {
return this->load(); }
1886 is_lock_free() const noexcept
1888 return __atomic_impl::is_lock_free<sizeof(_Tp*), required_alignment>();
1892 store(_Tp* __t,
memory_order __m = memory_order_seq_cst)
const noexcept
1893 { __atomic_impl::store(_M_ptr, __t, __m); }
1896 load(
memory_order __m = memory_order_seq_cst)
const noexcept
1897 {
return __atomic_impl::load(_M_ptr, __m); }
1901 memory_order __m = memory_order_seq_cst)
const noexcept
1902 {
return __atomic_impl::exchange(_M_ptr, __desired, __m); }
1905 compare_exchange_weak(_Tp*& __expected, _Tp* __desired,
1909 return __atomic_impl::compare_exchange_weak(_M_ptr,
1910 __expected, __desired,
1911 __success, __failure);
1915 compare_exchange_strong(_Tp*& __expected, _Tp* __desired,
1919 return __atomic_impl::compare_exchange_strong(_M_ptr,
1920 __expected, __desired,
1921 __success, __failure);
1925 compare_exchange_weak(_Tp*& __expected, _Tp* __desired,
1929 return compare_exchange_weak(__expected, __desired, __order,
1930 __cmpexch_failure_order(__order));
1934 compare_exchange_strong(_Tp*& __expected, _Tp* __desired,
1938 return compare_exchange_strong(__expected, __desired, __order,
1939 __cmpexch_failure_order(__order));
1942#if __cpp_lib_atomic_wait
1943 _GLIBCXX_ALWAYS_INLINE
void
1944 wait(_Tp* __old,
memory_order __m = memory_order_seq_cst)
const noexcept
1945 { __atomic_impl::wait(_M_ptr, __old, __m); }
1949 _GLIBCXX_ALWAYS_INLINE
void
1950 notify_one() const noexcept
1951 { __atomic_impl::notify_one(_M_ptr); }
1955 _GLIBCXX_ALWAYS_INLINE
void
1956 notify_all() const noexcept
1957 { __atomic_impl::notify_all(_M_ptr); }
1962 _GLIBCXX_ALWAYS_INLINE value_type
1963 fetch_add(difference_type __d,
1964 memory_order __m = memory_order_seq_cst)
const noexcept
1965 {
return __atomic_impl::fetch_add(_M_ptr, _S_type_size(__d), __m); }
1967 _GLIBCXX_ALWAYS_INLINE value_type
1968 fetch_sub(difference_type __d,
1969 memory_order __m = memory_order_seq_cst)
const noexcept
1970 {
return __atomic_impl::fetch_sub(_M_ptr, _S_type_size(__d), __m); }
1973 operator++(
int)
const noexcept
1974 {
return fetch_add(1); }
1977 operator--(
int)
const noexcept
1978 {
return fetch_sub(1); }
1981 operator++() const noexcept
1983 return __atomic_impl::__add_fetch(_M_ptr, _S_type_size(1));
1987 operator--() const noexcept
1989 return __atomic_impl::__sub_fetch(_M_ptr, _S_type_size(1));
1993 operator+=(difference_type __d)
const noexcept
1995 return __atomic_impl::__add_fetch(_M_ptr, _S_type_size(__d));
1999 operator-=(difference_type __d)
const noexcept
2001 return __atomic_impl::__sub_fetch(_M_ptr, _S_type_size(__d));
2005 static constexpr ptrdiff_t
2006 _S_type_size(ptrdiff_t __d)
noexcept
2008 static_assert(is_object_v<_Tp>);
2009 return __d *
sizeof(_Tp);
2020_GLIBCXX_END_NAMESPACE_VERSION
constexpr _Tp * __addressof(_Tp &__r) noexcept
Same as C++11 std::addressof.
_Tp kill_dependency(_Tp __y) noexcept
kill_dependency
memory_order
Enumeration for memory_order.
ISO C++ entities toplevel namespace is std.
constexpr _Tp exchange(_Tp &__obj, _Up &&__new_val) noexcept(__and_< is_nothrow_move_constructible< _Tp >, is_nothrow_assignable< _Tp &, _Up > >::value)
Assign __new_val to __obj and return its previous value.
constexpr bitset< _Nb > operator|(const bitset< _Nb > &__x, const bitset< _Nb > &__y) noexcept
Global bitwise operations on bitsets.
constexpr bitset< _Nb > operator&(const bitset< _Nb > &__x, const bitset< _Nb > &__y) noexcept
Global bitwise operations on bitsets.