30 #ifndef _GLIBCXX_ATOMIC_BASE_H
31 #define _GLIBCXX_ATOMIC_BASE_H 1
33 #pragma GCC system_header
40 namespace std _GLIBCXX_VISIBILITY(default)
42 _GLIBCXX_BEGIN_NAMESPACE_VERSION
62 enum __memory_order_modifier
64 __memory_order_mask = 0x0ffff,
65 __memory_order_modifier_mask = 0xffff0000,
66 __memory_order_hle_acquire = 0x10000,
67 __memory_order_hle_release = 0x20000
71 operator|(
memory_order __m, __memory_order_modifier __mod)
77 operator&(
memory_order __m, __memory_order_modifier __mod)
86 return __m == memory_order_acq_rel ? memory_order_acquire
87 : __m == memory_order_release ? memory_order_relaxed : __m;
93 return memory_order(__cmpexch_failure_order2(__m & __memory_order_mask)
94 | (__m & __memory_order_modifier_mask));
99 { __atomic_thread_fence(__m); }
103 { __atomic_signal_fence(__m); }
106 template<
typename _Tp>
116 template<
typename _IntTp>
234 #define ATOMIC_VAR_INIT(_VI) { _VI }
236 template<
typename _Tp>
239 template<
typename _Tp>
243 #if __GCC_ATOMIC_TEST_AND_SET_TRUEVAL == 1
244 typedef bool __atomic_flag_data_type;
246 typedef unsigned char __atomic_flag_data_type;
259 _GLIBCXX_BEGIN_EXTERN_C
263 __atomic_flag_data_type _M_i;
266 _GLIBCXX_END_EXTERN_C
268 #define ATOMIC_FLAG_INIT { 0 }
285 test_and_set(
memory_order __m = memory_order_seq_cst) noexcept
287 return __atomic_test_and_set (&_M_i, __m);
291 test_and_set(
memory_order __m = memory_order_seq_cst)
volatile noexcept
293 return __atomic_test_and_set (&_M_i, __m);
300 __glibcxx_assert(__b != memory_order_consume);
301 __glibcxx_assert(__b != memory_order_acquire);
302 __glibcxx_assert(__b != memory_order_acq_rel);
304 __atomic_clear (&_M_i, __m);
308 clear(
memory_order __m = memory_order_seq_cst)
volatile noexcept
311 __glibcxx_assert(__b != memory_order_consume);
312 __glibcxx_assert(__b != memory_order_acquire);
313 __glibcxx_assert(__b != memory_order_acq_rel);
315 __atomic_clear (&_M_i, __m);
319 static constexpr __atomic_flag_data_type
321 {
return __i ? __GCC_ATOMIC_TEST_AND_SET_TRUEVAL : 0; }
348 template<
typename _ITp>
352 typedef _ITp __int_type;
364 constexpr
__atomic_base(__int_type __i) noexcept : _M_i (__i) { }
366 operator __int_type() const noexcept
369 operator __int_type() const volatile noexcept
373 operator=(__int_type __i) noexcept
380 operator=(__int_type __i)
volatile noexcept
387 operator++(
int) noexcept
388 {
return fetch_add(1); }
391 operator++(
int) volatile noexcept
392 {
return fetch_add(1); }
395 operator--(
int) noexcept
396 {
return fetch_sub(1); }
399 operator--(
int) volatile noexcept
400 {
return fetch_sub(1); }
403 operator++() noexcept
404 {
return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
407 operator++() volatile noexcept
408 {
return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
411 operator--() noexcept
412 {
return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
415 operator--() volatile noexcept
416 {
return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
419 operator+=(__int_type __i) noexcept
420 {
return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
423 operator+=(__int_type __i)
volatile noexcept
424 {
return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
427 operator-=(__int_type __i) noexcept
428 {
return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
431 operator-=(__int_type __i)
volatile noexcept
432 {
return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
435 operator&=(__int_type __i) noexcept
436 {
return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
439 operator&=(__int_type __i)
volatile noexcept
440 {
return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
443 operator|=(__int_type __i) noexcept
444 {
return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
447 operator|=(__int_type __i)
volatile noexcept
448 {
return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
451 operator^=(__int_type __i) noexcept
452 {
return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
455 operator^=(__int_type __i)
volatile noexcept
456 {
return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
459 is_lock_free() const noexcept
460 {
return __atomic_is_lock_free(
sizeof(_M_i),
nullptr); }
463 is_lock_free() const volatile noexcept
464 {
return __atomic_is_lock_free(
sizeof(_M_i),
nullptr); }
467 store(__int_type __i,
memory_order __m = memory_order_seq_cst) noexcept
470 __glibcxx_assert(__b != memory_order_acquire);
471 __glibcxx_assert(__b != memory_order_acq_rel);
472 __glibcxx_assert(__b != memory_order_consume);
474 __atomic_store_n(&_M_i, __i, __m);
478 store(__int_type __i,
479 memory_order __m = memory_order_seq_cst) volatile noexcept
482 __glibcxx_assert(__b != memory_order_acquire);
483 __glibcxx_assert(__b != memory_order_acq_rel);
484 __glibcxx_assert(__b != memory_order_consume);
486 __atomic_store_n(&_M_i, __i, __m);
490 load(
memory_order __m = memory_order_seq_cst) const noexcept
493 __glibcxx_assert(__b != memory_order_release);
494 __glibcxx_assert(__b != memory_order_acq_rel);
496 return __atomic_load_n(&_M_i, __m);
500 load(
memory_order __m = memory_order_seq_cst) const volatile noexcept
503 __glibcxx_assert(__b != memory_order_release);
504 __glibcxx_assert(__b != memory_order_acq_rel);
506 return __atomic_load_n(&_M_i, __m);
510 exchange(__int_type __i,
513 return __atomic_exchange_n(&_M_i, __i, __m);
518 exchange(__int_type __i,
519 memory_order __m = memory_order_seq_cst) volatile noexcept
521 return __atomic_exchange_n(&_M_i, __i, __m);
525 compare_exchange_weak(__int_type& __i1, __int_type __i2,
530 __glibcxx_assert(__b2 != memory_order_release);
531 __glibcxx_assert(__b2 != memory_order_acq_rel);
532 __glibcxx_assert(__b2 <= __b1);
534 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
538 compare_exchange_weak(__int_type& __i1, __int_type __i2,
544 __glibcxx_assert(__b2 != memory_order_release);
545 __glibcxx_assert(__b2 != memory_order_acq_rel);
546 __glibcxx_assert(__b2 <= __b1);
548 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
552 compare_exchange_weak(__int_type& __i1, __int_type __i2,
555 return compare_exchange_weak(__i1, __i2, __m,
556 __cmpexch_failure_order(__m));
560 compare_exchange_weak(__int_type& __i1, __int_type __i2,
561 memory_order __m = memory_order_seq_cst) volatile noexcept
563 return compare_exchange_weak(__i1, __i2, __m,
564 __cmpexch_failure_order(__m));
568 compare_exchange_strong(__int_type& __i1, __int_type __i2,
573 __glibcxx_assert(__b2 != memory_order_release);
574 __glibcxx_assert(__b2 != memory_order_acq_rel);
575 __glibcxx_assert(__b2 <= __b1);
577 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
581 compare_exchange_strong(__int_type& __i1, __int_type __i2,
588 __glibcxx_assert(__b2 != memory_order_release);
589 __glibcxx_assert(__b2 != memory_order_acq_rel);
590 __glibcxx_assert(__b2 <= __b1);
592 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
596 compare_exchange_strong(__int_type& __i1, __int_type __i2,
599 return compare_exchange_strong(__i1, __i2, __m,
600 __cmpexch_failure_order(__m));
604 compare_exchange_strong(__int_type& __i1, __int_type __i2,
605 memory_order __m = memory_order_seq_cst) volatile noexcept
607 return compare_exchange_strong(__i1, __i2, __m,
608 __cmpexch_failure_order(__m));
612 fetch_add(__int_type __i,
614 {
return __atomic_fetch_add(&_M_i, __i, __m); }
617 fetch_add(__int_type __i,
618 memory_order __m = memory_order_seq_cst) volatile noexcept
619 {
return __atomic_fetch_add(&_M_i, __i, __m); }
622 fetch_sub(__int_type __i,
624 {
return __atomic_fetch_sub(&_M_i, __i, __m); }
627 fetch_sub(__int_type __i,
628 memory_order __m = memory_order_seq_cst) volatile noexcept
629 {
return __atomic_fetch_sub(&_M_i, __i, __m); }
632 fetch_and(__int_type __i,
634 {
return __atomic_fetch_and(&_M_i, __i, __m); }
637 fetch_and(__int_type __i,
638 memory_order __m = memory_order_seq_cst) volatile noexcept
639 {
return __atomic_fetch_and(&_M_i, __i, __m); }
642 fetch_or(__int_type __i,
644 {
return __atomic_fetch_or(&_M_i, __i, __m); }
647 fetch_or(__int_type __i,
648 memory_order __m = memory_order_seq_cst) volatile noexcept
649 {
return __atomic_fetch_or(&_M_i, __i, __m); }
652 fetch_xor(__int_type __i,
654 {
return __atomic_fetch_xor(&_M_i, __i, __m); }
657 fetch_xor(__int_type __i,
658 memory_order __m = memory_order_seq_cst) volatile noexcept
659 {
return __atomic_fetch_xor(&_M_i, __i, __m); }
664 template<
typename _PTp>
668 typedef _PTp* __pointer_type;
674 _M_type_size(ptrdiff_t __d) {
return __d *
sizeof(_PTp); }
677 _M_type_size(ptrdiff_t __d)
volatile {
return __d *
sizeof(_PTp); }
687 constexpr
__atomic_base(__pointer_type __p) noexcept : _M_p (__p) { }
689 operator __pointer_type()
const noexcept
692 operator __pointer_type()
const volatile noexcept
696 operator=(__pointer_type __p) noexcept
703 operator=(__pointer_type __p)
volatile noexcept
710 operator++(
int) noexcept
711 {
return fetch_add(1); }
714 operator++(
int)
volatile noexcept
715 {
return fetch_add(1); }
718 operator--(
int) noexcept
719 {
return fetch_sub(1); }
722 operator--(
int)
volatile noexcept
723 {
return fetch_sub(1); }
726 operator++() noexcept
727 {
return __atomic_add_fetch(&_M_p, _M_type_size(1),
728 memory_order_seq_cst); }
731 operator++()
volatile noexcept
732 {
return __atomic_add_fetch(&_M_p, _M_type_size(1),
733 memory_order_seq_cst); }
736 operator--() noexcept
737 {
return __atomic_sub_fetch(&_M_p, _M_type_size(1),
738 memory_order_seq_cst); }
741 operator--()
volatile noexcept
742 {
return __atomic_sub_fetch(&_M_p, _M_type_size(1),
743 memory_order_seq_cst); }
746 operator+=(ptrdiff_t __d) noexcept
747 {
return __atomic_add_fetch(&_M_p, _M_type_size(__d),
748 memory_order_seq_cst); }
751 operator+=(ptrdiff_t __d)
volatile noexcept
752 {
return __atomic_add_fetch(&_M_p, _M_type_size(__d),
753 memory_order_seq_cst); }
756 operator-=(ptrdiff_t __d) noexcept
757 {
return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
758 memory_order_seq_cst); }
761 operator-=(ptrdiff_t __d)
volatile noexcept
762 {
return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
763 memory_order_seq_cst); }
766 is_lock_free()
const noexcept
767 {
return __atomic_is_lock_free(
sizeof(__pointer_type),
nullptr); }
770 is_lock_free()
const volatile noexcept
771 {
return __atomic_is_lock_free(
sizeof(__pointer_type),
nullptr); }
774 store(__pointer_type __p,
779 __glibcxx_assert(__b != memory_order_acquire);
780 __glibcxx_assert(__b != memory_order_acq_rel);
781 __glibcxx_assert(__b != memory_order_consume);
783 __atomic_store_n(&_M_p, __p, __m);
787 store(__pointer_type __p,
788 memory_order __m = memory_order_seq_cst)
volatile noexcept
791 __glibcxx_assert(__b != memory_order_acquire);
792 __glibcxx_assert(__b != memory_order_acq_rel);
793 __glibcxx_assert(__b != memory_order_consume);
795 __atomic_store_n(&_M_p, __p, __m);
799 load(
memory_order __m = memory_order_seq_cst)
const noexcept
802 __glibcxx_assert(__b != memory_order_release);
803 __glibcxx_assert(__b != memory_order_acq_rel);
805 return __atomic_load_n(&_M_p, __m);
809 load(
memory_order __m = memory_order_seq_cst)
const volatile noexcept
812 __glibcxx_assert(__b != memory_order_release);
813 __glibcxx_assert(__b != memory_order_acq_rel);
815 return __atomic_load_n(&_M_p, __m);
819 exchange(__pointer_type __p,
822 return __atomic_exchange_n(&_M_p, __p, __m);
827 exchange(__pointer_type __p,
828 memory_order __m = memory_order_seq_cst)
volatile noexcept
830 return __atomic_exchange_n(&_M_p, __p, __m);
834 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
840 __glibcxx_assert(__b2 != memory_order_release);
841 __glibcxx_assert(__b2 != memory_order_acq_rel);
842 __glibcxx_assert(__b2 <= __b1);
844 return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
848 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
855 __glibcxx_assert(__b2 != memory_order_release);
856 __glibcxx_assert(__b2 != memory_order_acq_rel);
857 __glibcxx_assert(__b2 <= __b1);
859 return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
863 fetch_add(ptrdiff_t __d,
865 {
return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
868 fetch_add(ptrdiff_t __d,
869 memory_order __m = memory_order_seq_cst)
volatile noexcept
870 {
return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
873 fetch_sub(ptrdiff_t __d,
875 {
return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
878 fetch_sub(ptrdiff_t __d,
879 memory_order __m = memory_order_seq_cst)
volatile noexcept
880 {
return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
885 _GLIBCXX_END_NAMESPACE_VERSION
__atomic_base< int_least64_t > atomic_int_least64_t
atomic_int_least64_t
__atomic_base< ptrdiff_t > atomic_ptrdiff_t
atomic_ptrdiff_t
__atomic_base< uint_fast64_t > atomic_uint_fast64_t
atomic_uint_fast64_t
__atomic_base< int_least16_t > atomic_int_least16_t
atomic_int_least16_t
__atomic_base< int_fast32_t > atomic_int_fast32_t
atomic_int_fast32_t
_Tp kill_dependency(_Tp __y) noexcept
kill_dependency
__atomic_base< uint_fast32_t > atomic_uint_fast32_t
atomic_uint_fast32_t
__atomic_base< intptr_t > atomic_intptr_t
atomic_intptr_t
__atomic_base< int_fast64_t > atomic_int_fast64_t
atomic_int_fast64_t
__atomic_base< wchar_t > atomic_wchar_t
atomic_wchar_t
__atomic_base< int_least32_t > atomic_int_least32_t
atomic_int_least32_t
__atomic_base< unsigned long > atomic_ulong
atomic_ulong
__atomic_base< uintptr_t > atomic_uintptr_t
atomic_uintptr_t
__atomic_base< int_least8_t > atomic_int_least8_t
atomic_int_least8_t
__atomic_base< intmax_t > atomic_intmax_t
atomic_intmax_t
__atomic_base< uint_least64_t > atomic_uint_least64_t
atomic_uint_least64_t
__atomic_base< short > atomic_short
atomic_short
ISO C++ entities toplevel namespace is std.
__atomic_base< int_fast8_t > atomic_int_fast8_t
atomic_int_fast8_t
__atomic_base< long long > atomic_llong
atomic_llong
__atomic_base< long > atomic_long
atomic_long
Base class for atomic integrals.
__atomic_base< int > atomic_int
atomic_int
memory_order
Enumeration for memory_order.
__atomic_base< char32_t > atomic_char32_t
atomic_char32_t
__atomic_base< unsigned short > atomic_ushort
atomic_ushort
__atomic_base< signed char > atomic_schar
atomic_schar
__atomic_base< unsigned char > atomic_uchar
atomic_uchar
__atomic_base< size_t > atomic_size_t
atomic_size_t
__atomic_base< char > atomic_char
atomic_char
__atomic_base< uint_least32_t > atomic_uint_least32_t
atomic_uint_least32_t
__atomic_base< unsigned int > atomic_uint
atomic_uint
__atomic_base< uintmax_t > atomic_uintmax_t
atomic_uintmax_t
__atomic_base< char16_t > atomic_char16_t
atomic_char16_t
__atomic_base< uint_fast16_t > atomic_uint_fast16_t
atomic_uint_fast16_t
__atomic_base< int_fast16_t > atomic_int_fast16_t
atomic_int_fast16_t
__atomic_base< uint_least8_t > atomic_uint_least8_t
atomic_uint_least8_t
__atomic_base< uint_fast8_t > atomic_uint_fast8_t
atomic_uint_fast8_t
__atomic_base< uint_least16_t > atomic_uint_least16_t
atomic_uint_least16_t
Base type for atomic_flag.
__atomic_base< unsigned long long > atomic_ullong
atomic_ullong