3 // Copyright (C) 2008-2018 Free Software Foundation, Inc.
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
25 /** @file include/atomic
26 * This is a Standard C++ Library header.
29 // Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
30 // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
32 #ifndef _GLIBCXX_ATOMIC
33 #define _GLIBCXX_ATOMIC 1
35 #pragma GCC system_header
37 #if __cplusplus < 201103L
38 # include <bits/c++0x_warning.h>
41 #include <bits/atomic_base.h>
42 #include <bits/move.h>
44 namespace std _GLIBCXX_VISIBILITY(default)
46 _GLIBCXX_BEGIN_NAMESPACE_VERSION
53 #if __cplusplus > 201402L
54 # define __cpp_lib_atomic_is_always_lock_free 201603
57 template<typename _Tp>
61 // NB: No operators or fetch-operations for this type.
66 __atomic_base<bool> _M_base;
69 atomic() noexcept = default;
70 ~atomic() noexcept = default;
71 atomic(const atomic&) = delete;
72 atomic& operator=(const atomic&) = delete;
73 atomic& operator=(const atomic&) volatile = delete;
75 constexpr atomic(bool __i) noexcept : _M_base(__i) { }
78 operator=(bool __i) noexcept
79 { return _M_base.operator=(__i); }
82 operator=(bool __i) volatile noexcept
83 { return _M_base.operator=(__i); }
85 operator bool() const noexcept
86 { return _M_base.load(); }
88 operator bool() const volatile noexcept
89 { return _M_base.load(); }
92 is_lock_free() const noexcept { return _M_base.is_lock_free(); }
95 is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); }
97 #if __cplusplus > 201402L
98 static constexpr bool is_always_lock_free = ATOMIC_BOOL_LOCK_FREE == 2;
102 store(bool __i, memory_order __m = memory_order_seq_cst) noexcept
103 { _M_base.store(__i, __m); }
106 store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept
107 { _M_base.store(__i, __m); }
110 load(memory_order __m = memory_order_seq_cst) const noexcept
111 { return _M_base.load(__m); }
114 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
115 { return _M_base.load(__m); }
118 exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept
119 { return _M_base.exchange(__i, __m); }
123 memory_order __m = memory_order_seq_cst) volatile noexcept
124 { return _M_base.exchange(__i, __m); }
127 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
128 memory_order __m2) noexcept
129 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
132 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
133 memory_order __m2) volatile noexcept
134 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
137 compare_exchange_weak(bool& __i1, bool __i2,
138 memory_order __m = memory_order_seq_cst) noexcept
139 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
142 compare_exchange_weak(bool& __i1, bool __i2,
143 memory_order __m = memory_order_seq_cst) volatile noexcept
144 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
147 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
148 memory_order __m2) noexcept
149 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
152 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
153 memory_order __m2) volatile noexcept
154 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
157 compare_exchange_strong(bool& __i1, bool __i2,
158 memory_order __m = memory_order_seq_cst) noexcept
159 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
162 compare_exchange_strong(bool& __i1, bool __i2,
163 memory_order __m = memory_order_seq_cst) volatile noexcept
164 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
169 * @brief Generic atomic type, primary class template.
171 * @tparam _Tp Type to be made atomic, must be trivally copyable.
173 template<typename _Tp>
177 // Align 1/2/4/8/16-byte types to at least their size.
178 static constexpr int _S_min_alignment
179 = (sizeof(_Tp) & (sizeof(_Tp) - 1)) || sizeof(_Tp) > 16
182 static constexpr int _S_alignment
183 = _S_min_alignment > alignof(_Tp) ? _S_min_alignment : alignof(_Tp);
185 alignas(_S_alignment) _Tp _M_i;
187 static_assert(__is_trivially_copyable(_Tp),
188 "std::atomic requires a trivially copyable type");
190 static_assert(sizeof(_Tp) > 0,
191 "Incomplete or zero-sized types are not supported");
194 atomic() noexcept = default;
195 ~atomic() noexcept = default;
196 atomic(const atomic&) = delete;
197 atomic& operator=(const atomic&) = delete;
198 atomic& operator=(const atomic&) volatile = delete;
200 constexpr atomic(_Tp __i) noexcept : _M_i(__i) { }
202 operator _Tp() const noexcept
205 operator _Tp() const volatile noexcept
209 operator=(_Tp __i) noexcept
210 { store(__i); return __i; }
213 operator=(_Tp __i) volatile noexcept
214 { store(__i); return __i; }
217 is_lock_free() const noexcept
219 // Produce a fake, minimally aligned pointer.
220 return __atomic_is_lock_free(sizeof(_M_i),
221 reinterpret_cast<void *>(-__alignof(_M_i)));
225 is_lock_free() const volatile noexcept
227 // Produce a fake, minimally aligned pointer.
228 return __atomic_is_lock_free(sizeof(_M_i),
229 reinterpret_cast<void *>(-__alignof(_M_i)));
232 #if __cplusplus > 201402L
233 static constexpr bool is_always_lock_free
234 = __atomic_always_lock_free(sizeof(_M_i), 0);
238 store(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
239 { __atomic_store(std::__addressof(_M_i), std::__addressof(__i), __m); }
242 store(_Tp __i, memory_order __m = memory_order_seq_cst) volatile noexcept
243 { __atomic_store(std::__addressof(_M_i), std::__addressof(__i), __m); }
246 load(memory_order __m = memory_order_seq_cst) const noexcept
248 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
249 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
250 __atomic_load(std::__addressof(_M_i), __ptr, __m);
255 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
257 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
258 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
259 __atomic_load(std::__addressof(_M_i), __ptr, __m);
264 exchange(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
266 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
267 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
268 __atomic_exchange(std::__addressof(_M_i), std::__addressof(__i),
275 memory_order __m = memory_order_seq_cst) volatile noexcept
277 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
278 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
279 __atomic_exchange(std::__addressof(_M_i), std::__addressof(__i),
285 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
286 memory_order __f) noexcept
288 return __atomic_compare_exchange(std::__addressof(_M_i),
289 std::__addressof(__e),
290 std::__addressof(__i),
295 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
296 memory_order __f) volatile noexcept
298 return __atomic_compare_exchange(std::__addressof(_M_i),
299 std::__addressof(__e),
300 std::__addressof(__i),
305 compare_exchange_weak(_Tp& __e, _Tp __i,
306 memory_order __m = memory_order_seq_cst) noexcept
307 { return compare_exchange_weak(__e, __i, __m,
308 __cmpexch_failure_order(__m)); }
311 compare_exchange_weak(_Tp& __e, _Tp __i,
312 memory_order __m = memory_order_seq_cst) volatile noexcept
313 { return compare_exchange_weak(__e, __i, __m,
314 __cmpexch_failure_order(__m)); }
317 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
318 memory_order __f) noexcept
320 return __atomic_compare_exchange(std::__addressof(_M_i),
321 std::__addressof(__e),
322 std::__addressof(__i),
327 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
328 memory_order __f) volatile noexcept
330 return __atomic_compare_exchange(std::__addressof(_M_i),
331 std::__addressof(__e),
332 std::__addressof(__i),
337 compare_exchange_strong(_Tp& __e, _Tp __i,
338 memory_order __m = memory_order_seq_cst) noexcept
339 { return compare_exchange_strong(__e, __i, __m,
340 __cmpexch_failure_order(__m)); }
343 compare_exchange_strong(_Tp& __e, _Tp __i,
344 memory_order __m = memory_order_seq_cst) volatile noexcept
345 { return compare_exchange_strong(__e, __i, __m,
346 __cmpexch_failure_order(__m)); }
350 /// Partial specialization for pointer types.
351 template<typename _Tp>
354 typedef _Tp* __pointer_type;
355 typedef __atomic_base<_Tp*> __base_type;
358 atomic() noexcept = default;
359 ~atomic() noexcept = default;
360 atomic(const atomic&) = delete;
361 atomic& operator=(const atomic&) = delete;
362 atomic& operator=(const atomic&) volatile = delete;
364 constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { }
366 operator __pointer_type() const noexcept
367 { return __pointer_type(_M_b); }
369 operator __pointer_type() const volatile noexcept
370 { return __pointer_type(_M_b); }
373 operator=(__pointer_type __p) noexcept
374 { return _M_b.operator=(__p); }
377 operator=(__pointer_type __p) volatile noexcept
378 { return _M_b.operator=(__p); }
381 operator++(int) noexcept
385 operator++(int) volatile noexcept
389 operator--(int) noexcept
393 operator--(int) volatile noexcept
397 operator++() noexcept
401 operator++() volatile noexcept
405 operator--() noexcept
409 operator--() volatile noexcept
413 operator+=(ptrdiff_t __d) noexcept
414 { return _M_b.operator+=(__d); }
417 operator+=(ptrdiff_t __d) volatile noexcept
418 { return _M_b.operator+=(__d); }
421 operator-=(ptrdiff_t __d) noexcept
422 { return _M_b.operator-=(__d); }
425 operator-=(ptrdiff_t __d) volatile noexcept
426 { return _M_b.operator-=(__d); }
429 is_lock_free() const noexcept
430 { return _M_b.is_lock_free(); }
433 is_lock_free() const volatile noexcept
434 { return _M_b.is_lock_free(); }
436 #if __cplusplus > 201402L
437 static constexpr bool is_always_lock_free = ATOMIC_POINTER_LOCK_FREE == 2;
441 store(__pointer_type __p,
442 memory_order __m = memory_order_seq_cst) noexcept
443 { return _M_b.store(__p, __m); }
446 store(__pointer_type __p,
447 memory_order __m = memory_order_seq_cst) volatile noexcept
448 { return _M_b.store(__p, __m); }
451 load(memory_order __m = memory_order_seq_cst) const noexcept
452 { return _M_b.load(__m); }
455 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
456 { return _M_b.load(__m); }
459 exchange(__pointer_type __p,
460 memory_order __m = memory_order_seq_cst) noexcept
461 { return _M_b.exchange(__p, __m); }
464 exchange(__pointer_type __p,
465 memory_order __m = memory_order_seq_cst) volatile noexcept
466 { return _M_b.exchange(__p, __m); }
469 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
470 memory_order __m1, memory_order __m2) noexcept
471 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
474 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
476 memory_order __m2) volatile noexcept
477 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
480 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
481 memory_order __m = memory_order_seq_cst) noexcept
483 return compare_exchange_weak(__p1, __p2, __m,
484 __cmpexch_failure_order(__m));
488 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
489 memory_order __m = memory_order_seq_cst) volatile noexcept
491 return compare_exchange_weak(__p1, __p2, __m,
492 __cmpexch_failure_order(__m));
496 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
497 memory_order __m1, memory_order __m2) noexcept
498 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
501 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
503 memory_order __m2) volatile noexcept
504 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
507 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
508 memory_order __m = memory_order_seq_cst) noexcept
510 return _M_b.compare_exchange_strong(__p1, __p2, __m,
511 __cmpexch_failure_order(__m));
515 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
516 memory_order __m = memory_order_seq_cst) volatile noexcept
518 return _M_b.compare_exchange_strong(__p1, __p2, __m,
519 __cmpexch_failure_order(__m));
523 fetch_add(ptrdiff_t __d,
524 memory_order __m = memory_order_seq_cst) noexcept
525 { return _M_b.fetch_add(__d, __m); }
528 fetch_add(ptrdiff_t __d,
529 memory_order __m = memory_order_seq_cst) volatile noexcept
530 { return _M_b.fetch_add(__d, __m); }
533 fetch_sub(ptrdiff_t __d,
534 memory_order __m = memory_order_seq_cst) noexcept
535 { return _M_b.fetch_sub(__d, __m); }
538 fetch_sub(ptrdiff_t __d,
539 memory_order __m = memory_order_seq_cst) volatile noexcept
540 { return _M_b.fetch_sub(__d, __m); }
544 /// Explicit specialization for char.
546 struct atomic<char> : __atomic_base<char>
548 typedef char __integral_type;
549 typedef __atomic_base<char> __base_type;
551 atomic() noexcept = default;
552 ~atomic() noexcept = default;
553 atomic(const atomic&) = delete;
554 atomic& operator=(const atomic&) = delete;
555 atomic& operator=(const atomic&) volatile = delete;
557 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
559 using __base_type::operator __integral_type;
560 using __base_type::operator=;
562 #if __cplusplus > 201402L
563 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
567 /// Explicit specialization for signed char.
569 struct atomic<signed char> : __atomic_base<signed char>
571 typedef signed char __integral_type;
572 typedef __atomic_base<signed char> __base_type;
574 atomic() noexcept= default;
575 ~atomic() noexcept = default;
576 atomic(const atomic&) = delete;
577 atomic& operator=(const atomic&) = delete;
578 atomic& operator=(const atomic&) volatile = delete;
580 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
582 using __base_type::operator __integral_type;
583 using __base_type::operator=;
585 #if __cplusplus > 201402L
586 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
590 /// Explicit specialization for unsigned char.
592 struct atomic<unsigned char> : __atomic_base<unsigned char>
594 typedef unsigned char __integral_type;
595 typedef __atomic_base<unsigned char> __base_type;
597 atomic() noexcept= default;
598 ~atomic() noexcept = default;
599 atomic(const atomic&) = delete;
600 atomic& operator=(const atomic&) = delete;
601 atomic& operator=(const atomic&) volatile = delete;
603 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
605 using __base_type::operator __integral_type;
606 using __base_type::operator=;
608 #if __cplusplus > 201402L
609 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
613 /// Explicit specialization for short.
615 struct atomic<short> : __atomic_base<short>
617 typedef short __integral_type;
618 typedef __atomic_base<short> __base_type;
620 atomic() noexcept = default;
621 ~atomic() noexcept = default;
622 atomic(const atomic&) = delete;
623 atomic& operator=(const atomic&) = delete;
624 atomic& operator=(const atomic&) volatile = delete;
626 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
628 using __base_type::operator __integral_type;
629 using __base_type::operator=;
631 #if __cplusplus > 201402L
632 static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
636 /// Explicit specialization for unsigned short.
638 struct atomic<unsigned short> : __atomic_base<unsigned short>
640 typedef unsigned short __integral_type;
641 typedef __atomic_base<unsigned short> __base_type;
643 atomic() noexcept = default;
644 ~atomic() noexcept = default;
645 atomic(const atomic&) = delete;
646 atomic& operator=(const atomic&) = delete;
647 atomic& operator=(const atomic&) volatile = delete;
649 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
651 using __base_type::operator __integral_type;
652 using __base_type::operator=;
654 #if __cplusplus > 201402L
655 static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
659 /// Explicit specialization for int.
661 struct atomic<int> : __atomic_base<int>
663 typedef int __integral_type;
664 typedef __atomic_base<int> __base_type;
666 atomic() noexcept = default;
667 ~atomic() noexcept = default;
668 atomic(const atomic&) = delete;
669 atomic& operator=(const atomic&) = delete;
670 atomic& operator=(const atomic&) volatile = delete;
672 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
674 using __base_type::operator __integral_type;
675 using __base_type::operator=;
677 #if __cplusplus > 201402L
678 static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
682 /// Explicit specialization for unsigned int.
684 struct atomic<unsigned int> : __atomic_base<unsigned int>
686 typedef unsigned int __integral_type;
687 typedef __atomic_base<unsigned int> __base_type;
689 atomic() noexcept = default;
690 ~atomic() noexcept = default;
691 atomic(const atomic&) = delete;
692 atomic& operator=(const atomic&) = delete;
693 atomic& operator=(const atomic&) volatile = delete;
695 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
697 using __base_type::operator __integral_type;
698 using __base_type::operator=;
700 #if __cplusplus > 201402L
701 static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
705 /// Explicit specialization for long.
707 struct atomic<long> : __atomic_base<long>
709 typedef long __integral_type;
710 typedef __atomic_base<long> __base_type;
712 atomic() noexcept = default;
713 ~atomic() noexcept = default;
714 atomic(const atomic&) = delete;
715 atomic& operator=(const atomic&) = delete;
716 atomic& operator=(const atomic&) volatile = delete;
718 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
720 using __base_type::operator __integral_type;
721 using __base_type::operator=;
723 #if __cplusplus > 201402L
724 static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
728 /// Explicit specialization for unsigned long.
730 struct atomic<unsigned long> : __atomic_base<unsigned long>
732 typedef unsigned long __integral_type;
733 typedef __atomic_base<unsigned long> __base_type;
735 atomic() noexcept = default;
736 ~atomic() noexcept = default;
737 atomic(const atomic&) = delete;
738 atomic& operator=(const atomic&) = delete;
739 atomic& operator=(const atomic&) volatile = delete;
741 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
743 using __base_type::operator __integral_type;
744 using __base_type::operator=;
746 #if __cplusplus > 201402L
747 static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
751 /// Explicit specialization for long long.
753 struct atomic<long long> : __atomic_base<long long>
755 typedef long long __integral_type;
756 typedef __atomic_base<long long> __base_type;
758 atomic() noexcept = default;
759 ~atomic() noexcept = default;
760 atomic(const atomic&) = delete;
761 atomic& operator=(const atomic&) = delete;
762 atomic& operator=(const atomic&) volatile = delete;
764 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
766 using __base_type::operator __integral_type;
767 using __base_type::operator=;
769 #if __cplusplus > 201402L
770 static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
774 /// Explicit specialization for unsigned long long.
776 struct atomic<unsigned long long> : __atomic_base<unsigned long long>
778 typedef unsigned long long __integral_type;
779 typedef __atomic_base<unsigned long long> __base_type;
781 atomic() noexcept = default;
782 ~atomic() noexcept = default;
783 atomic(const atomic&) = delete;
784 atomic& operator=(const atomic&) = delete;
785 atomic& operator=(const atomic&) volatile = delete;
787 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
789 using __base_type::operator __integral_type;
790 using __base_type::operator=;
792 #if __cplusplus > 201402L
793 static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
797 /// Explicit specialization for wchar_t.
799 struct atomic<wchar_t> : __atomic_base<wchar_t>
801 typedef wchar_t __integral_type;
802 typedef __atomic_base<wchar_t> __base_type;
804 atomic() noexcept = default;
805 ~atomic() noexcept = default;
806 atomic(const atomic&) = delete;
807 atomic& operator=(const atomic&) = delete;
808 atomic& operator=(const atomic&) volatile = delete;
810 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
812 using __base_type::operator __integral_type;
813 using __base_type::operator=;
815 #if __cplusplus > 201402L
816 static constexpr bool is_always_lock_free = ATOMIC_WCHAR_T_LOCK_FREE == 2;
820 /// Explicit specialization for char16_t.
822 struct atomic<char16_t> : __atomic_base<char16_t>
824 typedef char16_t __integral_type;
825 typedef __atomic_base<char16_t> __base_type;
827 atomic() noexcept = default;
828 ~atomic() noexcept = default;
829 atomic(const atomic&) = delete;
830 atomic& operator=(const atomic&) = delete;
831 atomic& operator=(const atomic&) volatile = delete;
833 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
835 using __base_type::operator __integral_type;
836 using __base_type::operator=;
838 #if __cplusplus > 201402L
839 static constexpr bool is_always_lock_free = ATOMIC_CHAR16_T_LOCK_FREE == 2;
843 /// Explicit specialization for char32_t.
845 struct atomic<char32_t> : __atomic_base<char32_t>
847 typedef char32_t __integral_type;
848 typedef __atomic_base<char32_t> __base_type;
850 atomic() noexcept = default;
851 ~atomic() noexcept = default;
852 atomic(const atomic&) = delete;
853 atomic& operator=(const atomic&) = delete;
854 atomic& operator=(const atomic&) volatile = delete;
856 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
858 using __base_type::operator __integral_type;
859 using __base_type::operator=;
861 #if __cplusplus > 201402L
862 static constexpr bool is_always_lock_free = ATOMIC_CHAR32_T_LOCK_FREE == 2;
868 typedef atomic<bool> atomic_bool;
871 typedef atomic<char> atomic_char;
874 typedef atomic<signed char> atomic_schar;
877 typedef atomic<unsigned char> atomic_uchar;
880 typedef atomic<short> atomic_short;
883 typedef atomic<unsigned short> atomic_ushort;
886 typedef atomic<int> atomic_int;
889 typedef atomic<unsigned int> atomic_uint;
892 typedef atomic<long> atomic_long;
895 typedef atomic<unsigned long> atomic_ulong;
898 typedef atomic<long long> atomic_llong;
901 typedef atomic<unsigned long long> atomic_ullong;
904 typedef atomic<wchar_t> atomic_wchar_t;
907 typedef atomic<char16_t> atomic_char16_t;
910 typedef atomic<char32_t> atomic_char32_t;
912 #ifdef _GLIBCXX_USE_C99_STDINT_TR1
913 // _GLIBCXX_RESOLVE_LIB_DEFECTS
914 // 2441. Exact-width atomic typedefs should be provided
917 typedef atomic<int8_t> atomic_int8_t;
920 typedef atomic<uint8_t> atomic_uint8_t;
923 typedef atomic<int16_t> atomic_int16_t;
926 typedef atomic<uint16_t> atomic_uint16_t;
929 typedef atomic<int32_t> atomic_int32_t;
932 typedef atomic<uint32_t> atomic_uint32_t;
935 typedef atomic<int64_t> atomic_int64_t;
938 typedef atomic<uint64_t> atomic_uint64_t;
941 /// atomic_int_least8_t
942 typedef atomic<int_least8_t> atomic_int_least8_t;
944 /// atomic_uint_least8_t
945 typedef atomic<uint_least8_t> atomic_uint_least8_t;
947 /// atomic_int_least16_t
948 typedef atomic<int_least16_t> atomic_int_least16_t;
950 /// atomic_uint_least16_t
951 typedef atomic<uint_least16_t> atomic_uint_least16_t;
953 /// atomic_int_least32_t
954 typedef atomic<int_least32_t> atomic_int_least32_t;
956 /// atomic_uint_least32_t
957 typedef atomic<uint_least32_t> atomic_uint_least32_t;
959 /// atomic_int_least64_t
960 typedef atomic<int_least64_t> atomic_int_least64_t;
962 /// atomic_uint_least64_t
963 typedef atomic<uint_least64_t> atomic_uint_least64_t;
966 /// atomic_int_fast8_t
967 typedef atomic<int_fast8_t> atomic_int_fast8_t;
969 /// atomic_uint_fast8_t
970 typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
972 /// atomic_int_fast16_t
973 typedef atomic<int_fast16_t> atomic_int_fast16_t;
975 /// atomic_uint_fast16_t
976 typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
978 /// atomic_int_fast32_t
979 typedef atomic<int_fast32_t> atomic_int_fast32_t;
981 /// atomic_uint_fast32_t
982 typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
984 /// atomic_int_fast64_t
985 typedef atomic<int_fast64_t> atomic_int_fast64_t;
987 /// atomic_uint_fast64_t
988 typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
993 typedef atomic<intptr_t> atomic_intptr_t;
996 typedef atomic<uintptr_t> atomic_uintptr_t;
999 typedef atomic<size_t> atomic_size_t;
1001 /// atomic_ptrdiff_t
1002 typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
1004 #ifdef _GLIBCXX_USE_C99_STDINT_TR1
1006 typedef atomic<intmax_t> atomic_intmax_t;
1008 /// atomic_uintmax_t
1009 typedef atomic<uintmax_t> atomic_uintmax_t;
1012 // Function definitions, atomic_flag operations.
1014 atomic_flag_test_and_set_explicit(atomic_flag* __a,
1015 memory_order __m) noexcept
1016 { return __a->test_and_set(__m); }
1019 atomic_flag_test_and_set_explicit(volatile atomic_flag* __a,
1020 memory_order __m) noexcept
1021 { return __a->test_and_set(__m); }
1024 atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept
1025 { __a->clear(__m); }
1028 atomic_flag_clear_explicit(volatile atomic_flag* __a,
1029 memory_order __m) noexcept
1030 { __a->clear(__m); }
1033 atomic_flag_test_and_set(atomic_flag* __a) noexcept
1034 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1037 atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept
1038 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1041 atomic_flag_clear(atomic_flag* __a) noexcept
1042 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1045 atomic_flag_clear(volatile atomic_flag* __a) noexcept
1046 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1049 // Function templates generally applicable to atomic types.
1050 template<typename _ITp>
1052 atomic_is_lock_free(const atomic<_ITp>* __a) noexcept
1053 { return __a->is_lock_free(); }
1055 template<typename _ITp>
1057 atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept
1058 { return __a->is_lock_free(); }
1060 template<typename _ITp>
1062 atomic_init(atomic<_ITp>* __a, _ITp __i) noexcept
1063 { __a->store(__i, memory_order_relaxed); }
1065 template<typename _ITp>
1067 atomic_init(volatile atomic<_ITp>* __a, _ITp __i) noexcept
1068 { __a->store(__i, memory_order_relaxed); }
1070 template<typename _ITp>
1072 atomic_store_explicit(atomic<_ITp>* __a, _ITp __i,
1073 memory_order __m) noexcept
1074 { __a->store(__i, __m); }
1076 template<typename _ITp>
1078 atomic_store_explicit(volatile atomic<_ITp>* __a, _ITp __i,
1079 memory_order __m) noexcept
1080 { __a->store(__i, __m); }
1082 template<typename _ITp>
1084 atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept
1085 { return __a->load(__m); }
1087 template<typename _ITp>
1089 atomic_load_explicit(const volatile atomic<_ITp>* __a,
1090 memory_order __m) noexcept
1091 { return __a->load(__m); }
1093 template<typename _ITp>
1095 atomic_exchange_explicit(atomic<_ITp>* __a, _ITp __i,
1096 memory_order __m) noexcept
1097 { return __a->exchange(__i, __m); }
1099 template<typename _ITp>
1101 atomic_exchange_explicit(volatile atomic<_ITp>* __a, _ITp __i,
1102 memory_order __m) noexcept
1103 { return __a->exchange(__i, __m); }
1105 template<typename _ITp>
1107 atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
1108 _ITp* __i1, _ITp __i2,
1110 memory_order __m2) noexcept
1111 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1113 template<typename _ITp>
1115 atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a,
1116 _ITp* __i1, _ITp __i2,
1118 memory_order __m2) noexcept
1119 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1121 template<typename _ITp>
1123 atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
1124 _ITp* __i1, _ITp __i2,
1126 memory_order __m2) noexcept
1127 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1129 template<typename _ITp>
1131 atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a,
1132 _ITp* __i1, _ITp __i2,
1134 memory_order __m2) noexcept
1135 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1138 template<typename _ITp>
1140 atomic_store(atomic<_ITp>* __a, _ITp __i) noexcept
1141 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1143 template<typename _ITp>
1145 atomic_store(volatile atomic<_ITp>* __a, _ITp __i) noexcept
1146 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1148 template<typename _ITp>
1150 atomic_load(const atomic<_ITp>* __a) noexcept
1151 { return atomic_load_explicit(__a, memory_order_seq_cst); }
1153 template<typename _ITp>
1155 atomic_load(const volatile atomic<_ITp>* __a) noexcept
1156 { return atomic_load_explicit(__a, memory_order_seq_cst); }
1158 template<typename _ITp>
1160 atomic_exchange(atomic<_ITp>* __a, _ITp __i) noexcept
1161 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1163 template<typename _ITp>
1165 atomic_exchange(volatile atomic<_ITp>* __a, _ITp __i) noexcept
1166 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1168 template<typename _ITp>
1170 atomic_compare_exchange_weak(atomic<_ITp>* __a,
1171 _ITp* __i1, _ITp __i2) noexcept
1173 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1174 memory_order_seq_cst,
1175 memory_order_seq_cst);
1178 template<typename _ITp>
1180 atomic_compare_exchange_weak(volatile atomic<_ITp>* __a,
1181 _ITp* __i1, _ITp __i2) noexcept
1183 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1184 memory_order_seq_cst,
1185 memory_order_seq_cst);
1188 template<typename _ITp>
1190 atomic_compare_exchange_strong(atomic<_ITp>* __a,
1191 _ITp* __i1, _ITp __i2) noexcept
1193 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1194 memory_order_seq_cst,
1195 memory_order_seq_cst);
1198 template<typename _ITp>
1200 atomic_compare_exchange_strong(volatile atomic<_ITp>* __a,
1201 _ITp* __i1, _ITp __i2) noexcept
1203 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1204 memory_order_seq_cst,
1205 memory_order_seq_cst);
1208 // Function templates for atomic_integral operations only, using
1209 // __atomic_base. Template argument should be constricted to
1210 // intergral types as specified in the standard, excluding address
1212 template<typename _ITp>
1214 atomic_fetch_add_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1215 memory_order __m) noexcept
1216 { return __a->fetch_add(__i, __m); }
1218 template<typename _ITp>
1220 atomic_fetch_add_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1221 memory_order __m) noexcept
1222 { return __a->fetch_add(__i, __m); }
1224 template<typename _ITp>
1226 atomic_fetch_sub_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1227 memory_order __m) noexcept
1228 { return __a->fetch_sub(__i, __m); }
1230 template<typename _ITp>
1232 atomic_fetch_sub_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1233 memory_order __m) noexcept
1234 { return __a->fetch_sub(__i, __m); }
1236 template<typename _ITp>
1238 atomic_fetch_and_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1239 memory_order __m) noexcept
1240 { return __a->fetch_and(__i, __m); }
1242 template<typename _ITp>
1244 atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1245 memory_order __m) noexcept
1246 { return __a->fetch_and(__i, __m); }
1248 template<typename _ITp>
1250 atomic_fetch_or_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1251 memory_order __m) noexcept
1252 { return __a->fetch_or(__i, __m); }
1254 template<typename _ITp>
1256 atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1257 memory_order __m) noexcept
1258 { return __a->fetch_or(__i, __m); }
1260 template<typename _ITp>
1262 atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1263 memory_order __m) noexcept
1264 { return __a->fetch_xor(__i, __m); }
1266 template<typename _ITp>
1268 atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1269 memory_order __m) noexcept
1270 { return __a->fetch_xor(__i, __m); }
1272 template<typename _ITp>
1274 atomic_fetch_add(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1275 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1277 template<typename _ITp>
1279 atomic_fetch_add(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1280 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1282 template<typename _ITp>
1284 atomic_fetch_sub(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1285 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1287 template<typename _ITp>
1289 atomic_fetch_sub(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1290 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1292 template<typename _ITp>
1294 atomic_fetch_and(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1295 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1297 template<typename _ITp>
1299 atomic_fetch_and(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1300 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1302 template<typename _ITp>
1304 atomic_fetch_or(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1305 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1307 template<typename _ITp>
1309 atomic_fetch_or(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1310 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1312 template<typename _ITp>
1314 atomic_fetch_xor(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1315 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1317 template<typename _ITp>
1319 atomic_fetch_xor(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1320 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1323 // Partial specializations for pointers.
1324 template<typename _ITp>
1326 atomic_fetch_add_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
1327 memory_order __m) noexcept
1328 { return __a->fetch_add(__d, __m); }
1330 template<typename _ITp>
1332 atomic_fetch_add_explicit(volatile atomic<_ITp*>* __a, ptrdiff_t __d,
1333 memory_order __m) noexcept
1334 { return __a->fetch_add(__d, __m); }
1336 template<typename _ITp>
1338 atomic_fetch_add(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1339 { return __a->fetch_add(__d); }
1341 template<typename _ITp>
1343 atomic_fetch_add(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1344 { return __a->fetch_add(__d); }
1346 template<typename _ITp>
1348 atomic_fetch_sub_explicit(volatile atomic<_ITp*>* __a,
1349 ptrdiff_t __d, memory_order __m) noexcept
1350 { return __a->fetch_sub(__d, __m); }
1352 template<typename _ITp>
1354 atomic_fetch_sub_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
1355 memory_order __m) noexcept
1356 { return __a->fetch_sub(__d, __m); }
1358 template<typename _ITp>
1360 atomic_fetch_sub(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1361 { return __a->fetch_sub(__d); }
1363 template<typename _ITp>
1365 atomic_fetch_sub(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1366 { return __a->fetch_sub(__d); }
1369 _GLIBCXX_END_NAMESPACE_VERSION
1374 #endif // _GLIBCXX_ATOMIC