32 #ifndef _GLIBCXX_ATOMIC
33 #define _GLIBCXX_ATOMIC 1
35 #pragma GCC system_header
37 #ifndef __GXX_EXPERIMENTAL_CXX0X__
43 namespace std _GLIBCXX_VISIBILITY(default)
45 _GLIBCXX_BEGIN_NAMESPACE_VERSION
66 constexpr
atomic_bool(
bool __i) noexcept : _M_base(__i) { }
69 operator=(
bool __i) noexcept
70 {
return _M_base.operator=(__i); }
73 operator=(
bool __i)
volatile noexcept
74 {
return _M_base.operator=(__i); }
76 operator bool()
const noexcept
77 {
return _M_base.load(); }
79 operator bool()
const volatile noexcept
80 {
return _M_base.load(); }
83 is_lock_free()
const noexcept {
return _M_base.is_lock_free(); }
86 is_lock_free()
const volatile noexcept {
return _M_base.is_lock_free(); }
89 store(
bool __i,
memory_order __m = memory_order_seq_cst) noexcept
90 { _M_base.store(__i, __m); }
93 store(
bool __i,
memory_order __m = memory_order_seq_cst)
volatile noexcept
94 { _M_base.store(__i, __m); }
97 load(
memory_order __m = memory_order_seq_cst)
const noexcept
98 {
return _M_base.load(__m); }
101 load(
memory_order __m = memory_order_seq_cst)
const volatile noexcept
102 {
return _M_base.load(__m); }
105 exchange(
bool __i,
memory_order __m = memory_order_seq_cst) noexcept
106 {
return _M_base.exchange(__i, __m); }
110 memory_order __m = memory_order_seq_cst)
volatile noexcept
111 {
return _M_base.exchange(__i, __m); }
114 compare_exchange_weak(
bool& __i1,
bool __i2,
memory_order __m1,
116 {
return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
119 compare_exchange_weak(
bool& __i1,
bool __i2,
memory_order __m1,
121 {
return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
124 compare_exchange_weak(
bool& __i1,
bool __i2,
126 {
return _M_base.compare_exchange_weak(__i1, __i2, __m); }
129 compare_exchange_weak(
bool& __i1,
bool __i2,
130 memory_order __m = memory_order_seq_cst)
volatile noexcept
131 {
return _M_base.compare_exchange_weak(__i1, __i2, __m); }
134 compare_exchange_strong(
bool& __i1,
bool __i2,
memory_order __m1,
136 {
return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
139 compare_exchange_strong(
bool& __i1,
bool __i2,
memory_order __m1,
141 {
return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
144 compare_exchange_strong(
bool& __i1,
bool __i2,
146 {
return _M_base.compare_exchange_strong(__i1, __i2, __m); }
149 compare_exchange_strong(
bool& __i1,
bool __i2,
150 memory_order __m = memory_order_seq_cst)
volatile noexcept
151 {
return _M_base.compare_exchange_strong(__i1, __i2, __m); }
157 template<
typename _Tp>
164 atomic() noexcept =
default;
165 ~
atomic() noexcept =
default;
170 constexpr
atomic(_Tp __i) noexcept : _M_i(__i) { }
172 operator _Tp()
const noexcept
175 operator _Tp()
const volatile noexcept
179 operator=(_Tp __i) noexcept
180 { store(__i);
return __i; }
183 operator=(_Tp __i)
volatile noexcept
184 { store(__i);
return __i; }
187 is_lock_free()
const noexcept
188 {
return __atomic_is_lock_free(
sizeof(_M_i), &_M_i); }
191 is_lock_free()
const volatile noexcept
192 {
return __atomic_is_lock_free(
sizeof(_M_i), &_M_i); }
195 store(_Tp __i,
memory_order _m = memory_order_seq_cst) noexcept
196 { __atomic_store(&_M_i, &__i, _m); }
199 store(_Tp __i,
memory_order _m = memory_order_seq_cst)
volatile noexcept
200 { __atomic_store(&_M_i, &__i, _m); }
203 load(
memory_order _m = memory_order_seq_cst)
const noexcept
206 __atomic_load(&_M_i, &tmp, _m);
211 load(
memory_order _m = memory_order_seq_cst)
const volatile noexcept
214 __atomic_load(&_M_i, &tmp, _m);
219 exchange(_Tp __i,
memory_order _m = memory_order_seq_cst) noexcept
222 __atomic_exchange(&_M_i, &__i, &tmp, _m);
228 memory_order _m = memory_order_seq_cst)
volatile noexcept
231 __atomic_exchange(&_M_i, &__i, &tmp, _m);
236 compare_exchange_weak(_Tp& __e, _Tp __i,
memory_order __s,
239 return __atomic_compare_exchange(&_M_i, &__e, &__i,
true, __s, __f);
243 compare_exchange_weak(_Tp& __e, _Tp __i,
memory_order __s,
246 return __atomic_compare_exchange(&_M_i, &__e, &__i,
true, __s, __f);
250 compare_exchange_weak(_Tp& __e, _Tp __i,
252 {
return compare_exchange_weak(__e, __i, __m, __m); }
255 compare_exchange_weak(_Tp& __e, _Tp __i,
256 memory_order __m = memory_order_seq_cst)
volatile noexcept
257 {
return compare_exchange_weak(__e, __i, __m, __m); }
260 compare_exchange_strong(_Tp& __e, _Tp __i,
memory_order __s,
263 return __atomic_compare_exchange(&_M_i, &__e, &__i,
false, __s, __f);
267 compare_exchange_strong(_Tp& __e, _Tp __i,
memory_order __s,
270 return __atomic_compare_exchange(&_M_i, &__e, &__i,
false, __s, __f);
274 compare_exchange_strong(_Tp& __e, _Tp __i,
276 {
return compare_exchange_strong(__e, __i, __m, __m); }
279 compare_exchange_strong(_Tp& __e, _Tp __i,
280 memory_order __m = memory_order_seq_cst)
volatile noexcept
281 {
return compare_exchange_strong(__e, __i, __m, __m); }
286 template<
typename _Tp>
289 typedef _Tp* __pointer_type;
293 atomic() noexcept =
default;
294 ~
atomic() noexcept =
default;
299 constexpr
atomic(__pointer_type __p) noexcept : _M_b(__p) { }
301 operator __pointer_type()
const noexcept
302 {
return __pointer_type(_M_b); }
304 operator __pointer_type()
const volatile noexcept
305 {
return __pointer_type(_M_b); }
308 operator=(__pointer_type __p) noexcept
309 {
return _M_b.operator=(__p); }
312 operator=(__pointer_type __p)
volatile noexcept
313 {
return _M_b.operator=(__p); }
316 operator++(
int) noexcept
320 operator++(
int)
volatile noexcept
324 operator--(
int) noexcept
328 operator--(
int)
volatile noexcept
332 operator++() noexcept
336 operator++()
volatile noexcept
340 operator--() noexcept
344 operator--()
volatile noexcept
348 operator+=(ptrdiff_t __d) noexcept
349 {
return _M_b.operator+=(__d); }
352 operator+=(ptrdiff_t __d)
volatile noexcept
353 {
return _M_b.operator+=(__d); }
356 operator-=(ptrdiff_t __d) noexcept
357 {
return _M_b.operator-=(__d); }
360 operator-=(ptrdiff_t __d)
volatile noexcept
361 {
return _M_b.operator-=(__d); }
364 is_lock_free()
const noexcept
365 {
return _M_b.is_lock_free(); }
368 is_lock_free()
const volatile noexcept
369 {
return _M_b.is_lock_free(); }
372 store(__pointer_type __p,
374 {
return _M_b.store(__p, __m); }
377 store(__pointer_type __p,
378 memory_order __m = memory_order_seq_cst)
volatile noexcept
379 {
return _M_b.store(__p, __m); }
382 load(
memory_order __m = memory_order_seq_cst)
const noexcept
383 {
return _M_b.load(__m); }
386 load(
memory_order __m = memory_order_seq_cst)
const volatile noexcept
387 {
return _M_b.load(__m); }
390 exchange(__pointer_type __p,
392 {
return _M_b.exchange(__p, __m); }
395 exchange(__pointer_type __p,
396 memory_order __m = memory_order_seq_cst)
volatile noexcept
397 {
return _M_b.exchange(__p, __m); }
400 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
402 {
return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
405 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
408 {
return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
411 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
414 return compare_exchange_weak(__p1, __p2, __m,
415 __cmpexch_failure_order(__m));
419 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
420 memory_order __m = memory_order_seq_cst)
volatile noexcept
422 return compare_exchange_weak(__p1, __p2, __m,
423 __cmpexch_failure_order(__m));
427 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
429 {
return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
432 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
435 {
return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
438 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
441 return _M_b.compare_exchange_strong(__p1, __p2, __m,
442 __cmpexch_failure_order(__m));
446 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
447 memory_order __m = memory_order_seq_cst)
volatile noexcept
449 return _M_b.compare_exchange_strong(__p1, __p2, __m,
450 __cmpexch_failure_order(__m));
454 fetch_add(ptrdiff_t __d,
456 {
return _M_b.fetch_add(__d, __m); }
459 fetch_add(ptrdiff_t __d,
460 memory_order __m = memory_order_seq_cst)
volatile noexcept
461 {
return _M_b.fetch_add(__d, __m); }
464 fetch_sub(ptrdiff_t __d,
466 {
return _M_b.fetch_sub(__d, __m); }
469 fetch_sub(ptrdiff_t __d,
470 memory_order __m = memory_order_seq_cst)
volatile noexcept
471 {
return _M_b.fetch_sub(__d, __m); }
479 typedef bool __integral_type;
482 atomic() noexcept =
default;
483 ~
atomic() noexcept =
default;
490 using __base_type::operator __integral_type;
491 using __base_type::operator=;
498 typedef char __integral_type;
501 atomic() noexcept =
default;
502 ~
atomic() noexcept =
default;
509 using __base_type::operator __integral_type;
510 using __base_type::operator=;
517 typedef signed char __integral_type;
520 atomic() noexcept=
default;
521 ~
atomic() noexcept =
default;
528 using __base_type::operator __integral_type;
529 using __base_type::operator=;
536 typedef unsigned char __integral_type;
539 atomic() noexcept=
default;
540 ~
atomic() noexcept =
default;
547 using __base_type::operator __integral_type;
548 using __base_type::operator=;
555 typedef short __integral_type;
558 atomic() noexcept =
default;
559 ~
atomic() noexcept =
default;
566 using __base_type::operator __integral_type;
567 using __base_type::operator=;
574 typedef unsigned short __integral_type;
577 atomic() noexcept =
default;
578 ~
atomic() noexcept =
default;
585 using __base_type::operator __integral_type;
586 using __base_type::operator=;
593 typedef int __integral_type;
596 atomic() noexcept =
default;
597 ~
atomic() noexcept =
default;
604 using __base_type::operator __integral_type;
605 using __base_type::operator=;
612 typedef unsigned int __integral_type;
615 atomic() noexcept =
default;
616 ~
atomic() noexcept =
default;
623 using __base_type::operator __integral_type;
624 using __base_type::operator=;
631 typedef long __integral_type;
634 atomic() noexcept =
default;
635 ~
atomic() noexcept =
default;
642 using __base_type::operator __integral_type;
643 using __base_type::operator=;
650 typedef unsigned long __integral_type;
653 atomic() noexcept =
default;
654 ~
atomic() noexcept =
default;
661 using __base_type::operator __integral_type;
662 using __base_type::operator=;
669 typedef long long __integral_type;
672 atomic() noexcept =
default;
673 ~
atomic() noexcept =
default;
680 using __base_type::operator __integral_type;
681 using __base_type::operator=;
688 typedef unsigned long long __integral_type;
691 atomic() noexcept =
default;
692 ~
atomic() noexcept =
default;
699 using __base_type::operator __integral_type;
700 using __base_type::operator=;
707 typedef wchar_t __integral_type;
710 atomic() noexcept =
default;
711 ~
atomic() noexcept =
default;
718 using __base_type::operator __integral_type;
719 using __base_type::operator=;
726 typedef char16_t __integral_type;
729 atomic() noexcept =
default;
730 ~
atomic() noexcept =
default;
737 using __base_type::operator __integral_type;
738 using __base_type::operator=;
745 typedef char32_t __integral_type;
748 atomic() noexcept =
default;
749 ~
atomic() noexcept =
default;
756 using __base_type::operator __integral_type;
757 using __base_type::operator=;
763 atomic_flag_test_and_set_explicit(
atomic_flag* __a,
765 {
return __a->test_and_set(__m); }
768 atomic_flag_test_and_set_explicit(
volatile atomic_flag* __a,
770 {
return __a->test_and_set(__m); }
773 atomic_flag_clear_explicit(atomic_flag* __a,
memory_order __m) noexcept
777 atomic_flag_clear_explicit(
volatile atomic_flag* __a,
782 atomic_flag_test_and_set(atomic_flag* __a) noexcept
783 {
return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
786 atomic_flag_test_and_set(
volatile atomic_flag* __a) noexcept
787 {
return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
790 atomic_flag_clear(atomic_flag* __a) noexcept
791 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
794 atomic_flag_clear(
volatile atomic_flag* __a) noexcept
795 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
799 template<
typename _ITp>
801 atomic_is_lock_free(
const atomic<_ITp>* __a) noexcept
802 {
return __a->is_lock_free(); }
804 template<
typename _ITp>
806 atomic_is_lock_free(
const volatile atomic<_ITp>* __a) noexcept
807 {
return __a->is_lock_free(); }
809 template<
typename _ITp>
811 atomic_init(atomic<_ITp>* __a, _ITp __i) noexcept;
813 template<
typename _ITp>
815 atomic_init(
volatile atomic<_ITp>* __a, _ITp __i) noexcept;
817 template<
typename _ITp>
819 atomic_store_explicit(atomic<_ITp>* __a, _ITp __i,
821 { __a->store(__i, __m); }
823 template<
typename _ITp>
825 atomic_store_explicit(
volatile atomic<_ITp>* __a, _ITp __i,
827 { __a->store(__i, __m); }
829 template<
typename _ITp>
831 atomic_load_explicit(
const atomic<_ITp>* __a,
memory_order __m) noexcept
832 {
return __a->load(__m); }
834 template<
typename _ITp>
836 atomic_load_explicit(
const volatile atomic<_ITp>* __a,
838 {
return __a->load(__m); }
840 template<
typename _ITp>
842 atomic_exchange_explicit(atomic<_ITp>* __a, _ITp __i,
844 {
return __a->exchange(__i, __m); }
846 template<
typename _ITp>
848 atomic_exchange_explicit(
volatile atomic<_ITp>* __a, _ITp __i,
850 {
return __a->exchange(__i, __m); }
852 template<
typename _ITp>
854 atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
855 _ITp* __i1, _ITp __i2,
858 {
return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
860 template<
typename _ITp>
862 atomic_compare_exchange_weak_explicit(
volatile atomic<_ITp>* __a,
863 _ITp* __i1, _ITp __i2,
866 {
return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
868 template<
typename _ITp>
870 atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
871 _ITp* __i1, _ITp __i2,
874 {
return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
876 template<
typename _ITp>
878 atomic_compare_exchange_strong_explicit(
volatile atomic<_ITp>* __a,
879 _ITp* __i1, _ITp __i2,
882 {
return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
885 template<
typename _ITp>
887 atomic_store(atomic<_ITp>* __a, _ITp __i) noexcept
888 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
890 template<
typename _ITp>
892 atomic_store(
volatile atomic<_ITp>* __a, _ITp __i) noexcept
893 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
895 template<
typename _ITp>
897 atomic_load(
const atomic<_ITp>* __a) noexcept
898 {
return atomic_load_explicit(__a, memory_order_seq_cst); }
900 template<
typename _ITp>
902 atomic_load(
const volatile atomic<_ITp>* __a) noexcept
903 {
return atomic_load_explicit(__a, memory_order_seq_cst); }
905 template<
typename _ITp>
907 atomic_exchange(atomic<_ITp>* __a, _ITp __i) noexcept
908 {
return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
910 template<
typename _ITp>
912 atomic_exchange(
volatile atomic<_ITp>* __a, _ITp __i) noexcept
913 {
return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
915 template<
typename _ITp>
917 atomic_compare_exchange_weak(atomic<_ITp>* __a,
918 _ITp* __i1, _ITp __i2) noexcept
920 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
921 memory_order_seq_cst,
922 memory_order_seq_cst);
925 template<
typename _ITp>
927 atomic_compare_exchange_weak(
volatile atomic<_ITp>* __a,
928 _ITp* __i1, _ITp __i2) noexcept
930 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
931 memory_order_seq_cst,
932 memory_order_seq_cst);
935 template<
typename _ITp>
937 atomic_compare_exchange_strong(atomic<_ITp>* __a,
938 _ITp* __i1, _ITp __i2) noexcept
940 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
941 memory_order_seq_cst,
942 memory_order_seq_cst);
945 template<
typename _ITp>
947 atomic_compare_exchange_strong(
volatile atomic<_ITp>* __a,
948 _ITp* __i1, _ITp __i2) noexcept
950 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
951 memory_order_seq_cst,
952 memory_order_seq_cst);
959 template<
typename _ITp>
961 atomic_fetch_add_explicit(__atomic_base<_ITp>* __a, _ITp __i,
963 {
return __a->fetch_add(__i, __m); }
965 template<
typename _ITp>
967 atomic_fetch_add_explicit(
volatile __atomic_base<_ITp>* __a, _ITp __i,
969 {
return __a->fetch_add(__i, __m); }
971 template<
typename _ITp>
973 atomic_fetch_sub_explicit(__atomic_base<_ITp>* __a, _ITp __i,
975 {
return __a->fetch_sub(__i, __m); }
977 template<
typename _ITp>
979 atomic_fetch_sub_explicit(
volatile __atomic_base<_ITp>* __a, _ITp __i,
981 {
return __a->fetch_sub(__i, __m); }
983 template<
typename _ITp>
985 atomic_fetch_and_explicit(__atomic_base<_ITp>* __a, _ITp __i,
987 {
return __a->fetch_and(__i, __m); }
989 template<
typename _ITp>
991 atomic_fetch_and_explicit(
volatile __atomic_base<_ITp>* __a, _ITp __i,
993 {
return __a->fetch_and(__i, __m); }
995 template<
typename _ITp>
997 atomic_fetch_or_explicit(__atomic_base<_ITp>* __a, _ITp __i,
999 {
return __a->fetch_or(__i, __m); }
1001 template<
typename _ITp>
1003 atomic_fetch_or_explicit(
volatile __atomic_base<_ITp>* __a, _ITp __i,
1005 {
return __a->fetch_or(__i, __m); }
1007 template<
typename _ITp>
1009 atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1011 {
return __a->fetch_xor(__i, __m); }
1013 template<
typename _ITp>
1015 atomic_fetch_xor_explicit(
volatile __atomic_base<_ITp>* __a, _ITp __i,
1017 {
return __a->fetch_xor(__i, __m); }
1019 template<
typename _ITp>
1021 atomic_fetch_add(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1022 {
return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1024 template<
typename _ITp>
1026 atomic_fetch_add(
volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1027 {
return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1029 template<
typename _ITp>
1031 atomic_fetch_sub(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1032 {
return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1034 template<
typename _ITp>
1036 atomic_fetch_sub(
volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1037 {
return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1039 template<
typename _ITp>
1041 atomic_fetch_and(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1042 {
return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1044 template<
typename _ITp>
1046 atomic_fetch_and(
volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1047 {
return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1049 template<
typename _ITp>
1051 atomic_fetch_or(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1052 {
return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1054 template<
typename _ITp>
1056 atomic_fetch_or(
volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1057 {
return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1059 template<
typename _ITp>
1061 atomic_fetch_xor(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1062 {
return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1064 template<
typename _ITp>
1066 atomic_fetch_xor(
volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1067 {
return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1071 template<
typename _ITp>
1073 atomic_fetch_add_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
1075 {
return __a->fetch_add(__d, __m); }
1077 template<
typename _ITp>
1079 atomic_fetch_add_explicit(
volatile atomic<_ITp*>* __a, ptrdiff_t __d,
1081 {
return __a->fetch_add(__d, __m); }
1083 template<
typename _ITp>
1085 atomic_fetch_add(
volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1086 {
return __a->fetch_add(__d); }
1088 template<
typename _ITp>
1090 atomic_fetch_add(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1091 {
return __a->fetch_add(__d); }
1093 template<
typename _ITp>
1095 atomic_fetch_sub_explicit(
volatile atomic<_ITp*>* __a,
1097 {
return __a->fetch_sub(__d, __m); }
1099 template<
typename _ITp>
1101 atomic_fetch_sub_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
1103 {
return __a->fetch_sub(__d, __m); }
1105 template<
typename _ITp>
1107 atomic_fetch_sub(
volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1108 {
return __a->fetch_sub(__d); }
1110 template<
typename _ITp>
1112 atomic_fetch_sub(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1113 {
return __a->fetch_sub(__d); }
1116 _GLIBCXX_END_NAMESPACE_VERSION
memory_order
Enumeration for memory_order.
atomic 29.4.3, Generic atomic type, primary class template.