3 // Copyright (C) 2008-2019 Free Software Foundation, Inc.
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
25 /** @file include/atomic
26 * This is a Standard C++ Library header.
29 // Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
30 // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
32 #ifndef _GLIBCXX_ATOMIC
33 #define _GLIBCXX_ATOMIC 1
35 #pragma GCC system_header
37 #if __cplusplus < 201103L
38 # include <bits/c++0x_warning.h>
41 #include <bits/atomic_base.h>
42 #include <bits/move.h>
44 namespace std _GLIBCXX_VISIBILITY(default)
46 _GLIBCXX_BEGIN_NAMESPACE_VERSION
53 #if __cplusplus >= 201703L
54 # define __cpp_lib_atomic_is_always_lock_free 201603
57 template<typename _Tp>
61 // NB: No operators or fetch-operations for this type.
65 using value_type = bool;
68 __atomic_base<bool> _M_base;
71 atomic() noexcept = default;
72 ~atomic() noexcept = default;
73 atomic(const atomic&) = delete;
74 atomic& operator=(const atomic&) = delete;
75 atomic& operator=(const atomic&) volatile = delete;
77 constexpr atomic(bool __i) noexcept : _M_base(__i) { }
80 operator=(bool __i) noexcept
81 { return _M_base.operator=(__i); }
84 operator=(bool __i) volatile noexcept
85 { return _M_base.operator=(__i); }
87 operator bool() const noexcept
88 { return _M_base.load(); }
90 operator bool() const volatile noexcept
91 { return _M_base.load(); }
94 is_lock_free() const noexcept { return _M_base.is_lock_free(); }
97 is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); }
99 #if __cplusplus >= 201703L
100 static constexpr bool is_always_lock_free = ATOMIC_BOOL_LOCK_FREE == 2;
104 store(bool __i, memory_order __m = memory_order_seq_cst) noexcept
105 { _M_base.store(__i, __m); }
108 store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept
109 { _M_base.store(__i, __m); }
112 load(memory_order __m = memory_order_seq_cst) const noexcept
113 { return _M_base.load(__m); }
116 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
117 { return _M_base.load(__m); }
120 exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept
121 { return _M_base.exchange(__i, __m); }
125 memory_order __m = memory_order_seq_cst) volatile noexcept
126 { return _M_base.exchange(__i, __m); }
129 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
130 memory_order __m2) noexcept
131 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
134 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
135 memory_order __m2) volatile noexcept
136 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
139 compare_exchange_weak(bool& __i1, bool __i2,
140 memory_order __m = memory_order_seq_cst) noexcept
141 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
144 compare_exchange_weak(bool& __i1, bool __i2,
145 memory_order __m = memory_order_seq_cst) volatile noexcept
146 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
149 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
150 memory_order __m2) noexcept
151 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
154 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
155 memory_order __m2) volatile noexcept
156 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
159 compare_exchange_strong(bool& __i1, bool __i2,
160 memory_order __m = memory_order_seq_cst) noexcept
161 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
164 compare_exchange_strong(bool& __i1, bool __i2,
165 memory_order __m = memory_order_seq_cst) volatile noexcept
166 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
171 * @brief Generic atomic type, primary class template.
173 * @tparam _Tp Type to be made atomic, must be trivally copyable.
175 template<typename _Tp>
178 using value_type = _Tp;
181 // Align 1/2/4/8/16-byte types to at least their size.
182 static constexpr int _S_min_alignment
183 = (sizeof(_Tp) & (sizeof(_Tp) - 1)) || sizeof(_Tp) > 16
186 static constexpr int _S_alignment
187 = _S_min_alignment > alignof(_Tp) ? _S_min_alignment : alignof(_Tp);
189 alignas(_S_alignment) _Tp _M_i;
191 static_assert(__is_trivially_copyable(_Tp),
192 "std::atomic requires a trivially copyable type");
194 static_assert(sizeof(_Tp) > 0,
195 "Incomplete or zero-sized types are not supported");
198 atomic() noexcept = default;
199 ~atomic() noexcept = default;
200 atomic(const atomic&) = delete;
201 atomic& operator=(const atomic&) = delete;
202 atomic& operator=(const atomic&) volatile = delete;
204 constexpr atomic(_Tp __i) noexcept : _M_i(__i) { }
206 operator _Tp() const noexcept
209 operator _Tp() const volatile noexcept
213 operator=(_Tp __i) noexcept
214 { store(__i); return __i; }
217 operator=(_Tp __i) volatile noexcept
218 { store(__i); return __i; }
221 is_lock_free() const noexcept
223 // Produce a fake, minimally aligned pointer.
224 return __atomic_is_lock_free(sizeof(_M_i),
225 reinterpret_cast<void *>(-_S_alignment));
229 is_lock_free() const volatile noexcept
231 // Produce a fake, minimally aligned pointer.
232 return __atomic_is_lock_free(sizeof(_M_i),
233 reinterpret_cast<void *>(-_S_alignment));
236 #if __cplusplus >= 201703L
237 static constexpr bool is_always_lock_free
238 = __atomic_always_lock_free(sizeof(_M_i), 0);
242 store(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
243 { __atomic_store(std::__addressof(_M_i), std::__addressof(__i), int(__m)); }
246 store(_Tp __i, memory_order __m = memory_order_seq_cst) volatile noexcept
247 { __atomic_store(std::__addressof(_M_i), std::__addressof(__i), int(__m)); }
250 load(memory_order __m = memory_order_seq_cst) const noexcept
252 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
253 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
254 __atomic_load(std::__addressof(_M_i), __ptr, int(__m));
259 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
261 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
262 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
263 __atomic_load(std::__addressof(_M_i), __ptr, int(__m));
268 exchange(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
270 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
271 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
272 __atomic_exchange(std::__addressof(_M_i), std::__addressof(__i),
279 memory_order __m = memory_order_seq_cst) volatile noexcept
281 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
282 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
283 __atomic_exchange(std::__addressof(_M_i), std::__addressof(__i),
289 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
290 memory_order __f) noexcept
292 return __atomic_compare_exchange(std::__addressof(_M_i),
293 std::__addressof(__e),
294 std::__addressof(__i),
295 true, int(__s), int(__f));
299 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
300 memory_order __f) volatile noexcept
302 return __atomic_compare_exchange(std::__addressof(_M_i),
303 std::__addressof(__e),
304 std::__addressof(__i),
305 true, int(__s), int(__f));
309 compare_exchange_weak(_Tp& __e, _Tp __i,
310 memory_order __m = memory_order_seq_cst) noexcept
311 { return compare_exchange_weak(__e, __i, __m,
312 __cmpexch_failure_order(__m)); }
315 compare_exchange_weak(_Tp& __e, _Tp __i,
316 memory_order __m = memory_order_seq_cst) volatile noexcept
317 { return compare_exchange_weak(__e, __i, __m,
318 __cmpexch_failure_order(__m)); }
321 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
322 memory_order __f) noexcept
324 return __atomic_compare_exchange(std::__addressof(_M_i),
325 std::__addressof(__e),
326 std::__addressof(__i),
327 false, int(__s), int(__f));
331 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
332 memory_order __f) volatile noexcept
334 return __atomic_compare_exchange(std::__addressof(_M_i),
335 std::__addressof(__e),
336 std::__addressof(__i),
337 false, int(__s), int(__f));
341 compare_exchange_strong(_Tp& __e, _Tp __i,
342 memory_order __m = memory_order_seq_cst) noexcept
343 { return compare_exchange_strong(__e, __i, __m,
344 __cmpexch_failure_order(__m)); }
347 compare_exchange_strong(_Tp& __e, _Tp __i,
348 memory_order __m = memory_order_seq_cst) volatile noexcept
349 { return compare_exchange_strong(__e, __i, __m,
350 __cmpexch_failure_order(__m)); }
354 /// Partial specialization for pointer types.
355 template<typename _Tp>
358 using value_type = _Tp*;
359 using difference_type = ptrdiff_t;
361 typedef _Tp* __pointer_type;
362 typedef __atomic_base<_Tp*> __base_type;
365 atomic() noexcept = default;
366 ~atomic() noexcept = default;
367 atomic(const atomic&) = delete;
368 atomic& operator=(const atomic&) = delete;
369 atomic& operator=(const atomic&) volatile = delete;
371 constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { }
373 operator __pointer_type() const noexcept
374 { return __pointer_type(_M_b); }
376 operator __pointer_type() const volatile noexcept
377 { return __pointer_type(_M_b); }
380 operator=(__pointer_type __p) noexcept
381 { return _M_b.operator=(__p); }
384 operator=(__pointer_type __p) volatile noexcept
385 { return _M_b.operator=(__p); }
388 operator++(int) noexcept
390 #if __cplusplus >= 201703L
391 static_assert( is_object<_Tp>::value, "pointer to object type" );
397 operator++(int) volatile noexcept
399 #if __cplusplus >= 201703L
400 static_assert( is_object<_Tp>::value, "pointer to object type" );
406 operator--(int) noexcept
408 #if __cplusplus >= 201703L
409 static_assert( is_object<_Tp>::value, "pointer to object type" );
415 operator--(int) volatile noexcept
417 #if __cplusplus >= 201703L
418 static_assert( is_object<_Tp>::value, "pointer to object type" );
424 operator++() noexcept
426 #if __cplusplus >= 201703L
427 static_assert( is_object<_Tp>::value, "pointer to object type" );
433 operator++() volatile noexcept
435 #if __cplusplus >= 201703L
436 static_assert( is_object<_Tp>::value, "pointer to object type" );
442 operator--() noexcept
444 #if __cplusplus >= 201703L
445 static_assert( is_object<_Tp>::value, "pointer to object type" );
451 operator--() volatile noexcept
453 #if __cplusplus >= 201703L
454 static_assert( is_object<_Tp>::value, "pointer to object type" );
460 operator+=(ptrdiff_t __d) noexcept
462 #if __cplusplus >= 201703L
463 static_assert( is_object<_Tp>::value, "pointer to object type" );
465 return _M_b.operator+=(__d);
469 operator+=(ptrdiff_t __d) volatile noexcept
471 #if __cplusplus >= 201703L
472 static_assert( is_object<_Tp>::value, "pointer to object type" );
474 return _M_b.operator+=(__d);
478 operator-=(ptrdiff_t __d) noexcept
480 #if __cplusplus >= 201703L
481 static_assert( is_object<_Tp>::value, "pointer to object type" );
483 return _M_b.operator-=(__d);
487 operator-=(ptrdiff_t __d) volatile noexcept
489 #if __cplusplus >= 201703L
490 static_assert( is_object<_Tp>::value, "pointer to object type" );
492 return _M_b.operator-=(__d);
496 is_lock_free() const noexcept
497 { return _M_b.is_lock_free(); }
500 is_lock_free() const volatile noexcept
501 { return _M_b.is_lock_free(); }
503 #if __cplusplus >= 201703L
504 static constexpr bool is_always_lock_free = ATOMIC_POINTER_LOCK_FREE == 2;
508 store(__pointer_type __p,
509 memory_order __m = memory_order_seq_cst) noexcept
510 { return _M_b.store(__p, __m); }
513 store(__pointer_type __p,
514 memory_order __m = memory_order_seq_cst) volatile noexcept
515 { return _M_b.store(__p, __m); }
518 load(memory_order __m = memory_order_seq_cst) const noexcept
519 { return _M_b.load(__m); }
522 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
523 { return _M_b.load(__m); }
526 exchange(__pointer_type __p,
527 memory_order __m = memory_order_seq_cst) noexcept
528 { return _M_b.exchange(__p, __m); }
531 exchange(__pointer_type __p,
532 memory_order __m = memory_order_seq_cst) volatile noexcept
533 { return _M_b.exchange(__p, __m); }
536 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
537 memory_order __m1, memory_order __m2) noexcept
538 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
541 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
543 memory_order __m2) volatile noexcept
544 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
547 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
548 memory_order __m = memory_order_seq_cst) noexcept
550 return compare_exchange_weak(__p1, __p2, __m,
551 __cmpexch_failure_order(__m));
555 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
556 memory_order __m = memory_order_seq_cst) volatile noexcept
558 return compare_exchange_weak(__p1, __p2, __m,
559 __cmpexch_failure_order(__m));
563 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
564 memory_order __m1, memory_order __m2) noexcept
565 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
568 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
570 memory_order __m2) volatile noexcept
571 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
574 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
575 memory_order __m = memory_order_seq_cst) noexcept
577 return _M_b.compare_exchange_strong(__p1, __p2, __m,
578 __cmpexch_failure_order(__m));
582 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
583 memory_order __m = memory_order_seq_cst) volatile noexcept
585 return _M_b.compare_exchange_strong(__p1, __p2, __m,
586 __cmpexch_failure_order(__m));
590 fetch_add(ptrdiff_t __d,
591 memory_order __m = memory_order_seq_cst) noexcept
593 #if __cplusplus >= 201703L
594 static_assert( is_object<_Tp>::value, "pointer to object type" );
596 return _M_b.fetch_add(__d, __m);
600 fetch_add(ptrdiff_t __d,
601 memory_order __m = memory_order_seq_cst) volatile noexcept
603 #if __cplusplus >= 201703L
604 static_assert( is_object<_Tp>::value, "pointer to object type" );
606 return _M_b.fetch_add(__d, __m);
610 fetch_sub(ptrdiff_t __d,
611 memory_order __m = memory_order_seq_cst) noexcept
613 #if __cplusplus >= 201703L
614 static_assert( is_object<_Tp>::value, "pointer to object type" );
616 return _M_b.fetch_sub(__d, __m);
620 fetch_sub(ptrdiff_t __d,
621 memory_order __m = memory_order_seq_cst) volatile noexcept
623 #if __cplusplus >= 201703L
624 static_assert( is_object<_Tp>::value, "pointer to object type" );
626 return _M_b.fetch_sub(__d, __m);
631 /// Explicit specialization for char.
633 struct atomic<char> : __atomic_base<char>
635 typedef char __integral_type;
636 typedef __atomic_base<char> __base_type;
638 atomic() noexcept = default;
639 ~atomic() noexcept = default;
640 atomic(const atomic&) = delete;
641 atomic& operator=(const atomic&) = delete;
642 atomic& operator=(const atomic&) volatile = delete;
644 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
646 using __base_type::operator __integral_type;
647 using __base_type::operator=;
649 #if __cplusplus >= 201703L
650 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
654 /// Explicit specialization for signed char.
656 struct atomic<signed char> : __atomic_base<signed char>
658 typedef signed char __integral_type;
659 typedef __atomic_base<signed char> __base_type;
661 atomic() noexcept= default;
662 ~atomic() noexcept = default;
663 atomic(const atomic&) = delete;
664 atomic& operator=(const atomic&) = delete;
665 atomic& operator=(const atomic&) volatile = delete;
667 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
669 using __base_type::operator __integral_type;
670 using __base_type::operator=;
672 #if __cplusplus >= 201703L
673 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
677 /// Explicit specialization for unsigned char.
679 struct atomic<unsigned char> : __atomic_base<unsigned char>
681 typedef unsigned char __integral_type;
682 typedef __atomic_base<unsigned char> __base_type;
684 atomic() noexcept= default;
685 ~atomic() noexcept = default;
686 atomic(const atomic&) = delete;
687 atomic& operator=(const atomic&) = delete;
688 atomic& operator=(const atomic&) volatile = delete;
690 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
692 using __base_type::operator __integral_type;
693 using __base_type::operator=;
695 #if __cplusplus >= 201703L
696 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
700 /// Explicit specialization for short.
702 struct atomic<short> : __atomic_base<short>
704 typedef short __integral_type;
705 typedef __atomic_base<short> __base_type;
707 atomic() noexcept = default;
708 ~atomic() noexcept = default;
709 atomic(const atomic&) = delete;
710 atomic& operator=(const atomic&) = delete;
711 atomic& operator=(const atomic&) volatile = delete;
713 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
715 using __base_type::operator __integral_type;
716 using __base_type::operator=;
718 #if __cplusplus >= 201703L
719 static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
723 /// Explicit specialization for unsigned short.
725 struct atomic<unsigned short> : __atomic_base<unsigned short>
727 typedef unsigned short __integral_type;
728 typedef __atomic_base<unsigned short> __base_type;
730 atomic() noexcept = default;
731 ~atomic() noexcept = default;
732 atomic(const atomic&) = delete;
733 atomic& operator=(const atomic&) = delete;
734 atomic& operator=(const atomic&) volatile = delete;
736 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
738 using __base_type::operator __integral_type;
739 using __base_type::operator=;
741 #if __cplusplus >= 201703L
742 static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
746 /// Explicit specialization for int.
748 struct atomic<int> : __atomic_base<int>
750 typedef int __integral_type;
751 typedef __atomic_base<int> __base_type;
753 atomic() noexcept = default;
754 ~atomic() noexcept = default;
755 atomic(const atomic&) = delete;
756 atomic& operator=(const atomic&) = delete;
757 atomic& operator=(const atomic&) volatile = delete;
759 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
761 using __base_type::operator __integral_type;
762 using __base_type::operator=;
764 #if __cplusplus >= 201703L
765 static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
769 /// Explicit specialization for unsigned int.
771 struct atomic<unsigned int> : __atomic_base<unsigned int>
773 typedef unsigned int __integral_type;
774 typedef __atomic_base<unsigned int> __base_type;
776 atomic() noexcept = default;
777 ~atomic() noexcept = default;
778 atomic(const atomic&) = delete;
779 atomic& operator=(const atomic&) = delete;
780 atomic& operator=(const atomic&) volatile = delete;
782 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
784 using __base_type::operator __integral_type;
785 using __base_type::operator=;
787 #if __cplusplus >= 201703L
788 static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
792 /// Explicit specialization for long.
794 struct atomic<long> : __atomic_base<long>
796 typedef long __integral_type;
797 typedef __atomic_base<long> __base_type;
799 atomic() noexcept = default;
800 ~atomic() noexcept = default;
801 atomic(const atomic&) = delete;
802 atomic& operator=(const atomic&) = delete;
803 atomic& operator=(const atomic&) volatile = delete;
805 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
807 using __base_type::operator __integral_type;
808 using __base_type::operator=;
810 #if __cplusplus >= 201703L
811 static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
815 /// Explicit specialization for unsigned long.
817 struct atomic<unsigned long> : __atomic_base<unsigned long>
819 typedef unsigned long __integral_type;
820 typedef __atomic_base<unsigned long> __base_type;
822 atomic() noexcept = default;
823 ~atomic() noexcept = default;
824 atomic(const atomic&) = delete;
825 atomic& operator=(const atomic&) = delete;
826 atomic& operator=(const atomic&) volatile = delete;
828 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
830 using __base_type::operator __integral_type;
831 using __base_type::operator=;
833 #if __cplusplus >= 201703L
834 static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
838 /// Explicit specialization for long long.
840 struct atomic<long long> : __atomic_base<long long>
842 typedef long long __integral_type;
843 typedef __atomic_base<long long> __base_type;
845 atomic() noexcept = default;
846 ~atomic() noexcept = default;
847 atomic(const atomic&) = delete;
848 atomic& operator=(const atomic&) = delete;
849 atomic& operator=(const atomic&) volatile = delete;
851 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
853 using __base_type::operator __integral_type;
854 using __base_type::operator=;
856 #if __cplusplus >= 201703L
857 static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
861 /// Explicit specialization for unsigned long long.
863 struct atomic<unsigned long long> : __atomic_base<unsigned long long>
865 typedef unsigned long long __integral_type;
866 typedef __atomic_base<unsigned long long> __base_type;
868 atomic() noexcept = default;
869 ~atomic() noexcept = default;
870 atomic(const atomic&) = delete;
871 atomic& operator=(const atomic&) = delete;
872 atomic& operator=(const atomic&) volatile = delete;
874 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
876 using __base_type::operator __integral_type;
877 using __base_type::operator=;
879 #if __cplusplus >= 201703L
880 static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
884 /// Explicit specialization for wchar_t.
886 struct atomic<wchar_t> : __atomic_base<wchar_t>
888 typedef wchar_t __integral_type;
889 typedef __atomic_base<wchar_t> __base_type;
891 atomic() noexcept = default;
892 ~atomic() noexcept = default;
893 atomic(const atomic&) = delete;
894 atomic& operator=(const atomic&) = delete;
895 atomic& operator=(const atomic&) volatile = delete;
897 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
899 using __base_type::operator __integral_type;
900 using __base_type::operator=;
902 #if __cplusplus >= 201703L
903 static constexpr bool is_always_lock_free = ATOMIC_WCHAR_T_LOCK_FREE == 2;
907 #ifdef _GLIBCXX_USE_CHAR8_T
908 /// Explicit specialization for char8_t.
910 struct atomic<char8_t> : __atomic_base<char8_t>
912 typedef char8_t __integral_type;
913 typedef __atomic_base<char8_t> __base_type;
915 atomic() noexcept = default;
916 ~atomic() noexcept = default;
917 atomic(const atomic&) = delete;
918 atomic& operator=(const atomic&) = delete;
919 atomic& operator=(const atomic&) volatile = delete;
921 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
923 using __base_type::operator __integral_type;
924 using __base_type::operator=;
926 #if __cplusplus > 201402L
927 static constexpr bool is_always_lock_free = ATOMIC_CHAR8_T_LOCK_FREE == 2;
932 /// Explicit specialization for char16_t.
934 struct atomic<char16_t> : __atomic_base<char16_t>
936 typedef char16_t __integral_type;
937 typedef __atomic_base<char16_t> __base_type;
939 atomic() noexcept = default;
940 ~atomic() noexcept = default;
941 atomic(const atomic&) = delete;
942 atomic& operator=(const atomic&) = delete;
943 atomic& operator=(const atomic&) volatile = delete;
945 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
947 using __base_type::operator __integral_type;
948 using __base_type::operator=;
950 #if __cplusplus >= 201703L
951 static constexpr bool is_always_lock_free = ATOMIC_CHAR16_T_LOCK_FREE == 2;
955 /// Explicit specialization for char32_t.
957 struct atomic<char32_t> : __atomic_base<char32_t>
959 typedef char32_t __integral_type;
960 typedef __atomic_base<char32_t> __base_type;
962 atomic() noexcept = default;
963 ~atomic() noexcept = default;
964 atomic(const atomic&) = delete;
965 atomic& operator=(const atomic&) = delete;
966 atomic& operator=(const atomic&) volatile = delete;
968 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
970 using __base_type::operator __integral_type;
971 using __base_type::operator=;
973 #if __cplusplus >= 201703L
974 static constexpr bool is_always_lock_free = ATOMIC_CHAR32_T_LOCK_FREE == 2;
980 typedef atomic<bool> atomic_bool;
983 typedef atomic<char> atomic_char;
986 typedef atomic<signed char> atomic_schar;
989 typedef atomic<unsigned char> atomic_uchar;
992 typedef atomic<short> atomic_short;
995 typedef atomic<unsigned short> atomic_ushort;
998 typedef atomic<int> atomic_int;
1001 typedef atomic<unsigned int> atomic_uint;
1004 typedef atomic<long> atomic_long;
1007 typedef atomic<unsigned long> atomic_ulong;
1010 typedef atomic<long long> atomic_llong;
1013 typedef atomic<unsigned long long> atomic_ullong;
1016 typedef atomic<wchar_t> atomic_wchar_t;
1018 #ifdef _GLIBCXX_USE_CHAR8_T
1020 typedef atomic<char8_t> atomic_char8_t;
1024 typedef atomic<char16_t> atomic_char16_t;
1027 typedef atomic<char32_t> atomic_char32_t;
1029 #ifdef _GLIBCXX_USE_C99_STDINT_TR1
1030 // _GLIBCXX_RESOLVE_LIB_DEFECTS
1031 // 2441. Exact-width atomic typedefs should be provided
1034 typedef atomic<int8_t> atomic_int8_t;
1037 typedef atomic<uint8_t> atomic_uint8_t;
1040 typedef atomic<int16_t> atomic_int16_t;
1043 typedef atomic<uint16_t> atomic_uint16_t;
1046 typedef atomic<int32_t> atomic_int32_t;
1049 typedef atomic<uint32_t> atomic_uint32_t;
1052 typedef atomic<int64_t> atomic_int64_t;
1055 typedef atomic<uint64_t> atomic_uint64_t;
1058 /// atomic_int_least8_t
1059 typedef atomic<int_least8_t> atomic_int_least8_t;
1061 /// atomic_uint_least8_t
1062 typedef atomic<uint_least8_t> atomic_uint_least8_t;
1064 /// atomic_int_least16_t
1065 typedef atomic<int_least16_t> atomic_int_least16_t;
1067 /// atomic_uint_least16_t
1068 typedef atomic<uint_least16_t> atomic_uint_least16_t;
1070 /// atomic_int_least32_t
1071 typedef atomic<int_least32_t> atomic_int_least32_t;
1073 /// atomic_uint_least32_t
1074 typedef atomic<uint_least32_t> atomic_uint_least32_t;
1076 /// atomic_int_least64_t
1077 typedef atomic<int_least64_t> atomic_int_least64_t;
1079 /// atomic_uint_least64_t
1080 typedef atomic<uint_least64_t> atomic_uint_least64_t;
1083 /// atomic_int_fast8_t
1084 typedef atomic<int_fast8_t> atomic_int_fast8_t;
1086 /// atomic_uint_fast8_t
1087 typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
1089 /// atomic_int_fast16_t
1090 typedef atomic<int_fast16_t> atomic_int_fast16_t;
1092 /// atomic_uint_fast16_t
1093 typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
1095 /// atomic_int_fast32_t
1096 typedef atomic<int_fast32_t> atomic_int_fast32_t;
1098 /// atomic_uint_fast32_t
1099 typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
1101 /// atomic_int_fast64_t
1102 typedef atomic<int_fast64_t> atomic_int_fast64_t;
1104 /// atomic_uint_fast64_t
1105 typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
1110 typedef atomic<intptr_t> atomic_intptr_t;
1112 /// atomic_uintptr_t
1113 typedef atomic<uintptr_t> atomic_uintptr_t;
1116 typedef atomic<size_t> atomic_size_t;
1118 /// atomic_ptrdiff_t
1119 typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
1121 #ifdef _GLIBCXX_USE_C99_STDINT_TR1
1123 typedef atomic<intmax_t> atomic_intmax_t;
1125 /// atomic_uintmax_t
1126 typedef atomic<uintmax_t> atomic_uintmax_t;
1129 // Function definitions, atomic_flag operations.
1131 atomic_flag_test_and_set_explicit(atomic_flag* __a,
1132 memory_order __m) noexcept
1133 { return __a->test_and_set(__m); }
1136 atomic_flag_test_and_set_explicit(volatile atomic_flag* __a,
1137 memory_order __m) noexcept
1138 { return __a->test_and_set(__m); }
1141 atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept
1142 { __a->clear(__m); }
1145 atomic_flag_clear_explicit(volatile atomic_flag* __a,
1146 memory_order __m) noexcept
1147 { __a->clear(__m); }
1150 atomic_flag_test_and_set(atomic_flag* __a) noexcept
1151 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1154 atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept
1155 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1158 atomic_flag_clear(atomic_flag* __a) noexcept
1159 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1162 atomic_flag_clear(volatile atomic_flag* __a) noexcept
1163 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1166 template<typename _Tp>
1167 using __atomic_val_t = typename atomic<_Tp>::value_type;
1168 template<typename _Tp>
1169 using __atomic_diff_t = typename atomic<_Tp>::difference_type;
1171 // [atomics.nonmembers] Non-member functions.
1172 // Function templates generally applicable to atomic types.
1173 template<typename _ITp>
1175 atomic_is_lock_free(const atomic<_ITp>* __a) noexcept
1176 { return __a->is_lock_free(); }
1178 template<typename _ITp>
1180 atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept
1181 { return __a->is_lock_free(); }
1183 template<typename _ITp>
1185 atomic_init(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1186 { __a->store(__i, memory_order_relaxed); }
1188 template<typename _ITp>
1190 atomic_init(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1191 { __a->store(__i, memory_order_relaxed); }
1193 template<typename _ITp>
1195 atomic_store_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1196 memory_order __m) noexcept
1197 { __a->store(__i, __m); }
1199 template<typename _ITp>
1201 atomic_store_explicit(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1202 memory_order __m) noexcept
1203 { __a->store(__i, __m); }
1205 template<typename _ITp>
1207 atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept
1208 { return __a->load(__m); }
1210 template<typename _ITp>
1212 atomic_load_explicit(const volatile atomic<_ITp>* __a,
1213 memory_order __m) noexcept
1214 { return __a->load(__m); }
1216 template<typename _ITp>
1218 atomic_exchange_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1219 memory_order __m) noexcept
1220 { return __a->exchange(__i, __m); }
1222 template<typename _ITp>
1224 atomic_exchange_explicit(volatile atomic<_ITp>* __a,
1225 __atomic_val_t<_ITp> __i,
1226 memory_order __m) noexcept
1227 { return __a->exchange(__i, __m); }
1229 template<typename _ITp>
1231 atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
1232 __atomic_val_t<_ITp>* __i1,
1233 __atomic_val_t<_ITp> __i2,
1235 memory_order __m2) noexcept
1236 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1238 template<typename _ITp>
1240 atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a,
1241 __atomic_val_t<_ITp>* __i1,
1242 __atomic_val_t<_ITp> __i2,
1244 memory_order __m2) noexcept
1245 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1247 template<typename _ITp>
1249 atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
1250 __atomic_val_t<_ITp>* __i1,
1251 __atomic_val_t<_ITp> __i2,
1253 memory_order __m2) noexcept
1254 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1256 template<typename _ITp>
1258 atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a,
1259 __atomic_val_t<_ITp>* __i1,
1260 __atomic_val_t<_ITp> __i2,
1262 memory_order __m2) noexcept
1263 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1266 template<typename _ITp>
1268 atomic_store(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1269 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1271 template<typename _ITp>
1273 atomic_store(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1274 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1276 template<typename _ITp>
1278 atomic_load(const atomic<_ITp>* __a) noexcept
1279 { return atomic_load_explicit(__a, memory_order_seq_cst); }
1281 template<typename _ITp>
1283 atomic_load(const volatile atomic<_ITp>* __a) noexcept
1284 { return atomic_load_explicit(__a, memory_order_seq_cst); }
1286 template<typename _ITp>
1288 atomic_exchange(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1289 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1291 template<typename _ITp>
1293 atomic_exchange(volatile atomic<_ITp>* __a,
1294 __atomic_val_t<_ITp> __i) noexcept
1295 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1297 template<typename _ITp>
1299 atomic_compare_exchange_weak(atomic<_ITp>* __a,
1300 __atomic_val_t<_ITp>* __i1,
1301 __atomic_val_t<_ITp> __i2) noexcept
1303 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1304 memory_order_seq_cst,
1305 memory_order_seq_cst);
1308 template<typename _ITp>
1310 atomic_compare_exchange_weak(volatile atomic<_ITp>* __a,
1311 __atomic_val_t<_ITp>* __i1,
1312 __atomic_val_t<_ITp> __i2) noexcept
1314 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1315 memory_order_seq_cst,
1316 memory_order_seq_cst);
1319 template<typename _ITp>
1321 atomic_compare_exchange_strong(atomic<_ITp>* __a,
1322 __atomic_val_t<_ITp>* __i1,
1323 __atomic_val_t<_ITp> __i2) noexcept
1325 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1326 memory_order_seq_cst,
1327 memory_order_seq_cst);
1330 template<typename _ITp>
1332 atomic_compare_exchange_strong(volatile atomic<_ITp>* __a,
1333 __atomic_val_t<_ITp>* __i1,
1334 __atomic_val_t<_ITp> __i2) noexcept
1336 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1337 memory_order_seq_cst,
1338 memory_order_seq_cst);
1341 // Function templates for atomic_integral and atomic_pointer operations only.
1342 // Some operations (and, or, xor) are only available for atomic integrals,
1343 // which is implemented by taking a parameter of type __atomic_base<_ITp>*.
1345 template<typename _ITp>
1347 atomic_fetch_add_explicit(atomic<_ITp>* __a,
1348 __atomic_diff_t<_ITp> __i,
1349 memory_order __m) noexcept
1350 { return __a->fetch_add(__i, __m); }
1352 template<typename _ITp>
1354 atomic_fetch_add_explicit(volatile atomic<_ITp>* __a,
1355 __atomic_diff_t<_ITp> __i,
1356 memory_order __m) noexcept
1357 { return __a->fetch_add(__i, __m); }
1359 template<typename _ITp>
1361 atomic_fetch_sub_explicit(atomic<_ITp>* __a,
1362 __atomic_diff_t<_ITp> __i,
1363 memory_order __m) noexcept
1364 { return __a->fetch_sub(__i, __m); }
1366 template<typename _ITp>
1368 atomic_fetch_sub_explicit(volatile atomic<_ITp>* __a,
1369 __atomic_diff_t<_ITp> __i,
1370 memory_order __m) noexcept
1371 { return __a->fetch_sub(__i, __m); }
1373 template<typename _ITp>
1375 atomic_fetch_and_explicit(__atomic_base<_ITp>* __a,
1376 __atomic_val_t<_ITp> __i,
1377 memory_order __m) noexcept
1378 { return __a->fetch_and(__i, __m); }
1380 template<typename _ITp>
1382 atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a,
1383 __atomic_val_t<_ITp> __i,
1384 memory_order __m) noexcept
1385 { return __a->fetch_and(__i, __m); }
1387 template<typename _ITp>
1389 atomic_fetch_or_explicit(__atomic_base<_ITp>* __a,
1390 __atomic_val_t<_ITp> __i,
1391 memory_order __m) noexcept
1392 { return __a->fetch_or(__i, __m); }
1394 template<typename _ITp>
1396 atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a,
1397 __atomic_val_t<_ITp> __i,
1398 memory_order __m) noexcept
1399 { return __a->fetch_or(__i, __m); }
1401 template<typename _ITp>
1403 atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a,
1404 __atomic_val_t<_ITp> __i,
1405 memory_order __m) noexcept
1406 { return __a->fetch_xor(__i, __m); }
1408 template<typename _ITp>
1410 atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a,
1411 __atomic_val_t<_ITp> __i,
1412 memory_order __m) noexcept
1413 { return __a->fetch_xor(__i, __m); }
1415 template<typename _ITp>
1417 atomic_fetch_add(atomic<_ITp>* __a,
1418 __atomic_diff_t<_ITp> __i) noexcept
1419 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1421 template<typename _ITp>
1423 atomic_fetch_add(volatile atomic<_ITp>* __a,
1424 __atomic_diff_t<_ITp> __i) noexcept
1425 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1427 template<typename _ITp>
1429 atomic_fetch_sub(atomic<_ITp>* __a,
1430 __atomic_diff_t<_ITp> __i) noexcept
1431 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1433 template<typename _ITp>
1435 atomic_fetch_sub(volatile atomic<_ITp>* __a,
1436 __atomic_diff_t<_ITp> __i) noexcept
1437 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1439 template<typename _ITp>
1441 atomic_fetch_and(__atomic_base<_ITp>* __a,
1442 __atomic_val_t<_ITp> __i) noexcept
1443 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1445 template<typename _ITp>
1447 atomic_fetch_and(volatile __atomic_base<_ITp>* __a,
1448 __atomic_val_t<_ITp> __i) noexcept
1449 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1451 template<typename _ITp>
1453 atomic_fetch_or(__atomic_base<_ITp>* __a,
1454 __atomic_val_t<_ITp> __i) noexcept
1455 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1457 template<typename _ITp>
1459 atomic_fetch_or(volatile __atomic_base<_ITp>* __a,
1460 __atomic_val_t<_ITp> __i) noexcept
1461 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1463 template<typename _ITp>
1465 atomic_fetch_xor(__atomic_base<_ITp>* __a,
1466 __atomic_val_t<_ITp> __i) noexcept
1467 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1469 template<typename _ITp>
1471 atomic_fetch_xor(volatile __atomic_base<_ITp>* __a,
1472 __atomic_val_t<_ITp> __i) noexcept
1473 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1477 _GLIBCXX_END_NAMESPACE_VERSION
1482 #endif // _GLIBCXX_ATOMIC