libstdc++
atomic
Go to the documentation of this file.
1// -*- C++ -*- header.
2
3// Copyright (C) 2008-2021 Free Software Foundation, Inc.
4//
5// This file is part of the GNU ISO C++ Library. This library is free
6// software; you can redistribute it and/or modify it under the
7// terms of the GNU General Public License as published by the
8// Free Software Foundation; either version 3, or (at your option)
9// any later version.
10
11// This library is distributed in the hope that it will be useful,
12// but WITHOUT ANY WARRANTY; without even the implied warranty of
13// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14// GNU General Public License for more details.
15
16// Under Section 7 of GPL version 3, you are granted additional
17// permissions described in the GCC Runtime Library Exception, version
18// 3.1, as published by the Free Software Foundation.
19
20// You should have received a copy of the GNU General Public License and
21// a copy of the GCC Runtime Library Exception along with this program;
22// see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23// <http://www.gnu.org/licenses/>.
24
25/** @file include/atomic
26 * This is a Standard C++ Library header.
27 */
28
29// Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
30// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
31
32#ifndef _GLIBCXX_ATOMIC
33#define _GLIBCXX_ATOMIC 1
34
35#pragma GCC system_header
36
37#if __cplusplus < 201103L
38# include <bits/c++0x_warning.h>
39#else
40
41#include <bits/atomic_base.h>
42
43namespace std _GLIBCXX_VISIBILITY(default)
44{
45_GLIBCXX_BEGIN_NAMESPACE_VERSION
46
47 /**
48 * @addtogroup atomics
49 * @{
50 */
51
52#if __cplusplus >= 201703L
53# define __cpp_lib_atomic_is_always_lock_free 201603
54#endif
55
56 template<typename _Tp>
57 struct atomic;
58
59 /// atomic<bool>
60 // NB: No operators or fetch-operations for this type.
61 template<>
62 struct atomic<bool>
63 {
64 using value_type = bool;
65
66 private:
67 __atomic_base<bool> _M_base;
68
69 public:
70 atomic() noexcept = default;
71 ~atomic() noexcept = default;
72 atomic(const atomic&) = delete;
73 atomic& operator=(const atomic&) = delete;
74 atomic& operator=(const atomic&) volatile = delete;
75
76 constexpr atomic(bool __i) noexcept : _M_base(__i) { }
77
78 bool
79 operator=(bool __i) noexcept
80 { return _M_base.operator=(__i); }
81
82 bool
83 operator=(bool __i) volatile noexcept
84 { return _M_base.operator=(__i); }
85
86 operator bool() const noexcept
87 { return _M_base.load(); }
88
89 operator bool() const volatile noexcept
90 { return _M_base.load(); }
91
92 bool
93 is_lock_free() const noexcept { return _M_base.is_lock_free(); }
94
95 bool
96 is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); }
97
98#if __cplusplus >= 201703L
99 static constexpr bool is_always_lock_free = ATOMIC_BOOL_LOCK_FREE == 2;
100#endif
101
102 void
103 store(bool __i, memory_order __m = memory_order_seq_cst) noexcept
104 { _M_base.store(__i, __m); }
105
106 void
107 store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept
108 { _M_base.store(__i, __m); }
109
110 bool
111 load(memory_order __m = memory_order_seq_cst) const noexcept
112 { return _M_base.load(__m); }
113
114 bool
115 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
116 { return _M_base.load(__m); }
117
118 bool
119 exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept
120 { return _M_base.exchange(__i, __m); }
121
122 bool
123 exchange(bool __i,
124 memory_order __m = memory_order_seq_cst) volatile noexcept
125 { return _M_base.exchange(__i, __m); }
126
127 bool
128 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
129 memory_order __m2) noexcept
130 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
131
132 bool
133 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
134 memory_order __m2) volatile noexcept
135 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
136
137 bool
138 compare_exchange_weak(bool& __i1, bool __i2,
139 memory_order __m = memory_order_seq_cst) noexcept
140 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
141
142 bool
143 compare_exchange_weak(bool& __i1, bool __i2,
144 memory_order __m = memory_order_seq_cst) volatile noexcept
145 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
146
147 bool
148 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
149 memory_order __m2) noexcept
150 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
151
152 bool
153 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
154 memory_order __m2) volatile noexcept
155 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
156
157 bool
158 compare_exchange_strong(bool& __i1, bool __i2,
159 memory_order __m = memory_order_seq_cst) noexcept
160 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
161
162 bool
163 compare_exchange_strong(bool& __i1, bool __i2,
164 memory_order __m = memory_order_seq_cst) volatile noexcept
165 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
166
167#if __cpp_lib_atomic_wait
168 void
169 wait(bool __old, memory_order __m = memory_order_seq_cst) const noexcept
170 { _M_base.wait(__old, __m); }
171
172 // TODO add const volatile overload
173
174 void
175 notify_one() noexcept
176 { _M_base.notify_one(); }
177
178 void
179 notify_all() noexcept
180 { _M_base.notify_all(); }
181#endif // __cpp_lib_atomic_wait
182 };
183
184#if __cplusplus <= 201703L
185# define _GLIBCXX20_INIT(I)
186#else
187# define _GLIBCXX20_INIT(I) = I
188#endif
189
190 /**
191 * @brief Generic atomic type, primary class template.
192 *
193 * @tparam _Tp Type to be made atomic, must be trivially copyable.
194 */
195 template<typename _Tp>
196 struct atomic
197 {
198 using value_type = _Tp;
199
200 private:
201 // Align 1/2/4/8/16-byte types to at least their size.
202 static constexpr int _S_min_alignment
203 = (sizeof(_Tp) & (sizeof(_Tp) - 1)) || sizeof(_Tp) > 16
204 ? 0 : sizeof(_Tp);
205
206 static constexpr int _S_alignment
207 = _S_min_alignment > alignof(_Tp) ? _S_min_alignment : alignof(_Tp);
208
209 alignas(_S_alignment) _Tp _M_i _GLIBCXX20_INIT(_Tp());
210
211 static_assert(__is_trivially_copyable(_Tp),
212 "std::atomic requires a trivially copyable type");
213
214 static_assert(sizeof(_Tp) > 0,
215 "Incomplete or zero-sized types are not supported");
216
217#if __cplusplus > 201703L
218 static_assert(is_copy_constructible_v<_Tp>);
219 static_assert(is_move_constructible_v<_Tp>);
220 static_assert(is_copy_assignable_v<_Tp>);
221 static_assert(is_move_assignable_v<_Tp>);
222#endif
223
224 public:
225 atomic() = default;
226 ~atomic() noexcept = default;
227 atomic(const atomic&) = delete;
228 atomic& operator=(const atomic&) = delete;
229 atomic& operator=(const atomic&) volatile = delete;
230
231 constexpr atomic(_Tp __i) noexcept : _M_i(__i) { }
232
233 operator _Tp() const noexcept
234 { return load(); }
235
236 operator _Tp() const volatile noexcept
237 { return load(); }
238
239 _Tp
240 operator=(_Tp __i) noexcept
241 { store(__i); return __i; }
242
243 _Tp
244 operator=(_Tp __i) volatile noexcept
245 { store(__i); return __i; }
246
247 bool
248 is_lock_free() const noexcept
249 {
250 // Produce a fake, minimally aligned pointer.
251 return __atomic_is_lock_free(sizeof(_M_i),
252 reinterpret_cast<void *>(-_S_alignment));
253 }
254
255 bool
256 is_lock_free() const volatile noexcept
257 {
258 // Produce a fake, minimally aligned pointer.
259 return __atomic_is_lock_free(sizeof(_M_i),
260 reinterpret_cast<void *>(-_S_alignment));
261 }
262
263#if __cplusplus >= 201703L
264 static constexpr bool is_always_lock_free
265 = __atomic_always_lock_free(sizeof(_M_i), 0);
266#endif
267
268 void
269 store(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
270 {
271 __atomic_store(std::__addressof(_M_i), std::__addressof(__i), int(__m));
272 }
273
274 void
275 store(_Tp __i, memory_order __m = memory_order_seq_cst) volatile noexcept
276 {
277 __atomic_store(std::__addressof(_M_i), std::__addressof(__i), int(__m));
278 }
279
280 _Tp
281 load(memory_order __m = memory_order_seq_cst) const noexcept
282 {
283 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
284 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
285 __atomic_load(std::__addressof(_M_i), __ptr, int(__m));
286 return *__ptr;
287 }
288
289 _Tp
290 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
291 {
292 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
293 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
294 __atomic_load(std::__addressof(_M_i), __ptr, int(__m));
295 return *__ptr;
296 }
297
298 _Tp
299 exchange(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
300 {
301 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
302 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
303 __atomic_exchange(std::__addressof(_M_i), std::__addressof(__i),
304 __ptr, int(__m));
305 return *__ptr;
306 }
307
308 _Tp
309 exchange(_Tp __i,
310 memory_order __m = memory_order_seq_cst) volatile noexcept
311 {
312 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
313 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
314 __atomic_exchange(std::__addressof(_M_i), std::__addressof(__i),
315 __ptr, int(__m));
316 return *__ptr;
317 }
318
319 bool
320 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
321 memory_order __f) noexcept
322 {
323 __glibcxx_assert(__is_valid_cmpexch_failure_order(__f));
324
325 return __atomic_compare_exchange(std::__addressof(_M_i),
326 std::__addressof(__e),
327 std::__addressof(__i),
328 true, int(__s), int(__f));
329 }
330
331 bool
332 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
333 memory_order __f) volatile noexcept
334 {
335 __glibcxx_assert(__is_valid_cmpexch_failure_order(__f));
336
337 return __atomic_compare_exchange(std::__addressof(_M_i),
338 std::__addressof(__e),
339 std::__addressof(__i),
340 true, int(__s), int(__f));
341 }
342
343 bool
344 compare_exchange_weak(_Tp& __e, _Tp __i,
345 memory_order __m = memory_order_seq_cst) noexcept
346 { return compare_exchange_weak(__e, __i, __m,
347 __cmpexch_failure_order(__m)); }
348
349 bool
350 compare_exchange_weak(_Tp& __e, _Tp __i,
351 memory_order __m = memory_order_seq_cst) volatile noexcept
352 { return compare_exchange_weak(__e, __i, __m,
353 __cmpexch_failure_order(__m)); }
354
355 bool
356 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
357 memory_order __f) noexcept
358 {
359 __glibcxx_assert(__is_valid_cmpexch_failure_order(__f));
360
361 return __atomic_compare_exchange(std::__addressof(_M_i),
362 std::__addressof(__e),
363 std::__addressof(__i),
364 false, int(__s), int(__f));
365 }
366
367 bool
368 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
369 memory_order __f) volatile noexcept
370 {
371 __glibcxx_assert(__is_valid_cmpexch_failure_order(__f));
372
373 return __atomic_compare_exchange(std::__addressof(_M_i),
374 std::__addressof(__e),
375 std::__addressof(__i),
376 false, int(__s), int(__f));
377 }
378
379 bool
380 compare_exchange_strong(_Tp& __e, _Tp __i,
381 memory_order __m = memory_order_seq_cst) noexcept
382 { return compare_exchange_strong(__e, __i, __m,
383 __cmpexch_failure_order(__m)); }
384
385 bool
386 compare_exchange_strong(_Tp& __e, _Tp __i,
387 memory_order __m = memory_order_seq_cst) volatile noexcept
388 { return compare_exchange_strong(__e, __i, __m,
389 __cmpexch_failure_order(__m)); }
390
391#if __cpp_lib_atomic_wait
392 void
393 wait(_Tp __old, memory_order __m = memory_order_seq_cst) const noexcept
394 {
395 std::__atomic_wait_address_v(&_M_i, __old,
396 [__m, this] { return this->load(__m); });
397 }
398
399 // TODO add const volatile overload
400
401 void
402 notify_one() noexcept
403 { std::__atomic_notify_address(&_M_i, false); }
404
405 void
406 notify_all() noexcept
407 { std::__atomic_notify_address(&_M_i, true); }
408#endif // __cpp_lib_atomic_wait
409
410 };
411#undef _GLIBCXX20_INIT
412
413 /// Partial specialization for pointer types.
414 template<typename _Tp>
415 struct atomic<_Tp*>
416 {
417 using value_type = _Tp*;
418 using difference_type = ptrdiff_t;
419
420 typedef _Tp* __pointer_type;
421 typedef __atomic_base<_Tp*> __base_type;
422 __base_type _M_b;
423
424 atomic() noexcept = default;
425 ~atomic() noexcept = default;
426 atomic(const atomic&) = delete;
427 atomic& operator=(const atomic&) = delete;
428 atomic& operator=(const atomic&) volatile = delete;
429
430 constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { }
431
432 operator __pointer_type() const noexcept
433 { return __pointer_type(_M_b); }
434
435 operator __pointer_type() const volatile noexcept
436 { return __pointer_type(_M_b); }
437
438 __pointer_type
439 operator=(__pointer_type __p) noexcept
440 { return _M_b.operator=(__p); }
441
442 __pointer_type
443 operator=(__pointer_type __p) volatile noexcept
444 { return _M_b.operator=(__p); }
445
446 __pointer_type
447 operator++(int) noexcept
448 {
449#if __cplusplus >= 201703L
450 static_assert( is_object<_Tp>::value, "pointer to object type" );
451#endif
452 return _M_b++;
453 }
454
455 __pointer_type
456 operator++(int) volatile noexcept
457 {
458#if __cplusplus >= 201703L
459 static_assert( is_object<_Tp>::value, "pointer to object type" );
460#endif
461 return _M_b++;
462 }
463
464 __pointer_type
465 operator--(int) noexcept
466 {
467#if __cplusplus >= 201703L
468 static_assert( is_object<_Tp>::value, "pointer to object type" );
469#endif
470 return _M_b--;
471 }
472
473 __pointer_type
474 operator--(int) volatile noexcept
475 {
476#if __cplusplus >= 201703L
477 static_assert( is_object<_Tp>::value, "pointer to object type" );
478#endif
479 return _M_b--;
480 }
481
482 __pointer_type
483 operator++() noexcept
484 {
485#if __cplusplus >= 201703L
486 static_assert( is_object<_Tp>::value, "pointer to object type" );
487#endif
488 return ++_M_b;
489 }
490
491 __pointer_type
492 operator++() volatile noexcept
493 {
494#if __cplusplus >= 201703L
495 static_assert( is_object<_Tp>::value, "pointer to object type" );
496#endif
497 return ++_M_b;
498 }
499
500 __pointer_type
501 operator--() noexcept
502 {
503#if __cplusplus >= 201703L
504 static_assert( is_object<_Tp>::value, "pointer to object type" );
505#endif
506 return --_M_b;
507 }
508
509 __pointer_type
510 operator--() volatile noexcept
511 {
512#if __cplusplus >= 201703L
513 static_assert( is_object<_Tp>::value, "pointer to object type" );
514#endif
515 return --_M_b;
516 }
517
518 __pointer_type
519 operator+=(ptrdiff_t __d) noexcept
520 {
521#if __cplusplus >= 201703L
522 static_assert( is_object<_Tp>::value, "pointer to object type" );
523#endif
524 return _M_b.operator+=(__d);
525 }
526
527 __pointer_type
528 operator+=(ptrdiff_t __d) volatile noexcept
529 {
530#if __cplusplus >= 201703L
531 static_assert( is_object<_Tp>::value, "pointer to object type" );
532#endif
533 return _M_b.operator+=(__d);
534 }
535
536 __pointer_type
537 operator-=(ptrdiff_t __d) noexcept
538 {
539#if __cplusplus >= 201703L
540 static_assert( is_object<_Tp>::value, "pointer to object type" );
541#endif
542 return _M_b.operator-=(__d);
543 }
544
545 __pointer_type
546 operator-=(ptrdiff_t __d) volatile noexcept
547 {
548#if __cplusplus >= 201703L
549 static_assert( is_object<_Tp>::value, "pointer to object type" );
550#endif
551 return _M_b.operator-=(__d);
552 }
553
554 bool
555 is_lock_free() const noexcept
556 { return _M_b.is_lock_free(); }
557
558 bool
559 is_lock_free() const volatile noexcept
560 { return _M_b.is_lock_free(); }
561
562#if __cplusplus >= 201703L
563 static constexpr bool is_always_lock_free = ATOMIC_POINTER_LOCK_FREE == 2;
564#endif
565
566 void
567 store(__pointer_type __p,
568 memory_order __m = memory_order_seq_cst) noexcept
569 { return _M_b.store(__p, __m); }
570
571 void
572 store(__pointer_type __p,
573 memory_order __m = memory_order_seq_cst) volatile noexcept
574 { return _M_b.store(__p, __m); }
575
576 __pointer_type
577 load(memory_order __m = memory_order_seq_cst) const noexcept
578 { return _M_b.load(__m); }
579
580 __pointer_type
581 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
582 { return _M_b.load(__m); }
583
584 __pointer_type
585 exchange(__pointer_type __p,
586 memory_order __m = memory_order_seq_cst) noexcept
587 { return _M_b.exchange(__p, __m); }
588
589 __pointer_type
590 exchange(__pointer_type __p,
591 memory_order __m = memory_order_seq_cst) volatile noexcept
592 { return _M_b.exchange(__p, __m); }
593
594 bool
595 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
596 memory_order __m1, memory_order __m2) noexcept
597 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
598
599 bool
600 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
601 memory_order __m1,
602 memory_order __m2) volatile noexcept
603 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
604
605 bool
606 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
607 memory_order __m = memory_order_seq_cst) noexcept
608 {
609 return compare_exchange_weak(__p1, __p2, __m,
610 __cmpexch_failure_order(__m));
611 }
612
613 bool
614 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
615 memory_order __m = memory_order_seq_cst) volatile noexcept
616 {
617 return compare_exchange_weak(__p1, __p2, __m,
618 __cmpexch_failure_order(__m));
619 }
620
621 bool
622 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
623 memory_order __m1, memory_order __m2) noexcept
624 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
625
626 bool
627 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
628 memory_order __m1,
629 memory_order __m2) volatile noexcept
630 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
631
632 bool
633 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
634 memory_order __m = memory_order_seq_cst) noexcept
635 {
636 return _M_b.compare_exchange_strong(__p1, __p2, __m,
637 __cmpexch_failure_order(__m));
638 }
639
640 bool
641 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
642 memory_order __m = memory_order_seq_cst) volatile noexcept
643 {
644 return _M_b.compare_exchange_strong(__p1, __p2, __m,
645 __cmpexch_failure_order(__m));
646 }
647
648#if __cpp_lib_atomic_wait
649 void
650 wait(__pointer_type __old, memory_order __m = memory_order_seq_cst) const noexcept
651 { _M_b.wait(__old, __m); }
652
653 // TODO add const volatile overload
654
655 void
656 notify_one() noexcept
657 { _M_b.notify_one(); }
658
659 void
660 notify_all() noexcept
661 { _M_b.notify_all(); }
662#endif // __cpp_lib_atomic_wait
663 __pointer_type
664 fetch_add(ptrdiff_t __d,
665 memory_order __m = memory_order_seq_cst) noexcept
666 {
667#if __cplusplus >= 201703L
668 static_assert( is_object<_Tp>::value, "pointer to object type" );
669#endif
670 return _M_b.fetch_add(__d, __m);
671 }
672
673 __pointer_type
674 fetch_add(ptrdiff_t __d,
675 memory_order __m = memory_order_seq_cst) volatile noexcept
676 {
677#if __cplusplus >= 201703L
678 static_assert( is_object<_Tp>::value, "pointer to object type" );
679#endif
680 return _M_b.fetch_add(__d, __m);
681 }
682
683 __pointer_type
684 fetch_sub(ptrdiff_t __d,
685 memory_order __m = memory_order_seq_cst) noexcept
686 {
687#if __cplusplus >= 201703L
688 static_assert( is_object<_Tp>::value, "pointer to object type" );
689#endif
690 return _M_b.fetch_sub(__d, __m);
691 }
692
693 __pointer_type
694 fetch_sub(ptrdiff_t __d,
695 memory_order __m = memory_order_seq_cst) volatile noexcept
696 {
697#if __cplusplus >= 201703L
698 static_assert( is_object<_Tp>::value, "pointer to object type" );
699#endif
700 return _M_b.fetch_sub(__d, __m);
701 }
702 };
703
704
705 /// Explicit specialization for char.
706 template<>
707 struct atomic<char> : __atomic_base<char>
708 {
709 typedef char __integral_type;
710 typedef __atomic_base<char> __base_type;
711
712 atomic() noexcept = default;
713 ~atomic() noexcept = default;
714 atomic(const atomic&) = delete;
715 atomic& operator=(const atomic&) = delete;
716 atomic& operator=(const atomic&) volatile = delete;
717
718 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
719
720 using __base_type::operator __integral_type;
721 using __base_type::operator=;
722
723#if __cplusplus >= 201703L
724 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
725#endif
726 };
727
728 /// Explicit specialization for signed char.
729 template<>
730 struct atomic<signed char> : __atomic_base<signed char>
731 {
732 typedef signed char __integral_type;
733 typedef __atomic_base<signed char> __base_type;
734
735 atomic() noexcept= default;
736 ~atomic() noexcept = default;
737 atomic(const atomic&) = delete;
738 atomic& operator=(const atomic&) = delete;
739 atomic& operator=(const atomic&) volatile = delete;
740
741 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
742
743 using __base_type::operator __integral_type;
744 using __base_type::operator=;
745
746#if __cplusplus >= 201703L
747 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
748#endif
749 };
750
751 /// Explicit specialization for unsigned char.
752 template<>
753 struct atomic<unsigned char> : __atomic_base<unsigned char>
754 {
755 typedef unsigned char __integral_type;
756 typedef __atomic_base<unsigned char> __base_type;
757
758 atomic() noexcept= default;
759 ~atomic() noexcept = default;
760 atomic(const atomic&) = delete;
761 atomic& operator=(const atomic&) = delete;
762 atomic& operator=(const atomic&) volatile = delete;
763
764 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
765
766 using __base_type::operator __integral_type;
767 using __base_type::operator=;
768
769#if __cplusplus >= 201703L
770 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
771#endif
772 };
773
774 /// Explicit specialization for short.
775 template<>
776 struct atomic<short> : __atomic_base<short>
777 {
778 typedef short __integral_type;
779 typedef __atomic_base<short> __base_type;
780
781 atomic() noexcept = default;
782 ~atomic() noexcept = default;
783 atomic(const atomic&) = delete;
784 atomic& operator=(const atomic&) = delete;
785 atomic& operator=(const atomic&) volatile = delete;
786
787 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
788
789 using __base_type::operator __integral_type;
790 using __base_type::operator=;
791
792#if __cplusplus >= 201703L
793 static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
794#endif
795 };
796
797 /// Explicit specialization for unsigned short.
798 template<>
799 struct atomic<unsigned short> : __atomic_base<unsigned short>
800 {
801 typedef unsigned short __integral_type;
802 typedef __atomic_base<unsigned short> __base_type;
803
804 atomic() noexcept = default;
805 ~atomic() noexcept = default;
806 atomic(const atomic&) = delete;
807 atomic& operator=(const atomic&) = delete;
808 atomic& operator=(const atomic&) volatile = delete;
809
810 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
811
812 using __base_type::operator __integral_type;
813 using __base_type::operator=;
814
815#if __cplusplus >= 201703L
816 static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
817#endif
818 };
819
820 /// Explicit specialization for int.
821 template<>
822 struct atomic<int> : __atomic_base<int>
823 {
824 typedef int __integral_type;
825 typedef __atomic_base<int> __base_type;
826
827 atomic() noexcept = default;
828 ~atomic() noexcept = default;
829 atomic(const atomic&) = delete;
830 atomic& operator=(const atomic&) = delete;
831 atomic& operator=(const atomic&) volatile = delete;
832
833 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
834
835 using __base_type::operator __integral_type;
836 using __base_type::operator=;
837
838#if __cplusplus >= 201703L
839 static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
840#endif
841 };
842
843 /// Explicit specialization for unsigned int.
844 template<>
845 struct atomic<unsigned int> : __atomic_base<unsigned int>
846 {
847 typedef unsigned int __integral_type;
848 typedef __atomic_base<unsigned int> __base_type;
849
850 atomic() noexcept = default;
851 ~atomic() noexcept = default;
852 atomic(const atomic&) = delete;
853 atomic& operator=(const atomic&) = delete;
854 atomic& operator=(const atomic&) volatile = delete;
855
856 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
857
858 using __base_type::operator __integral_type;
859 using __base_type::operator=;
860
861#if __cplusplus >= 201703L
862 static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
863#endif
864 };
865
866 /// Explicit specialization for long.
867 template<>
868 struct atomic<long> : __atomic_base<long>
869 {
870 typedef long __integral_type;
871 typedef __atomic_base<long> __base_type;
872
873 atomic() noexcept = default;
874 ~atomic() noexcept = default;
875 atomic(const atomic&) = delete;
876 atomic& operator=(const atomic&) = delete;
877 atomic& operator=(const atomic&) volatile = delete;
878
879 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
880
881 using __base_type::operator __integral_type;
882 using __base_type::operator=;
883
884#if __cplusplus >= 201703L
885 static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
886#endif
887 };
888
889 /// Explicit specialization for unsigned long.
890 template<>
891 struct atomic<unsigned long> : __atomic_base<unsigned long>
892 {
893 typedef unsigned long __integral_type;
894 typedef __atomic_base<unsigned long> __base_type;
895
896 atomic() noexcept = default;
897 ~atomic() noexcept = default;
898 atomic(const atomic&) = delete;
899 atomic& operator=(const atomic&) = delete;
900 atomic& operator=(const atomic&) volatile = delete;
901
902 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
903
904 using __base_type::operator __integral_type;
905 using __base_type::operator=;
906
907#if __cplusplus >= 201703L
908 static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
909#endif
910 };
911
912 /// Explicit specialization for long long.
913 template<>
914 struct atomic<long long> : __atomic_base<long long>
915 {
916 typedef long long __integral_type;
917 typedef __atomic_base<long long> __base_type;
918
919 atomic() noexcept = default;
920 ~atomic() noexcept = default;
921 atomic(const atomic&) = delete;
922 atomic& operator=(const atomic&) = delete;
923 atomic& operator=(const atomic&) volatile = delete;
924
925 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
926
927 using __base_type::operator __integral_type;
928 using __base_type::operator=;
929
930#if __cplusplus >= 201703L
931 static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
932#endif
933 };
934
935 /// Explicit specialization for unsigned long long.
936 template<>
937 struct atomic<unsigned long long> : __atomic_base<unsigned long long>
938 {
939 typedef unsigned long long __integral_type;
940 typedef __atomic_base<unsigned long long> __base_type;
941
942 atomic() noexcept = default;
943 ~atomic() noexcept = default;
944 atomic(const atomic&) = delete;
945 atomic& operator=(const atomic&) = delete;
946 atomic& operator=(const atomic&) volatile = delete;
947
948 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
949
950 using __base_type::operator __integral_type;
951 using __base_type::operator=;
952
953#if __cplusplus >= 201703L
954 static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
955#endif
956 };
957
958 /// Explicit specialization for wchar_t.
959 template<>
960 struct atomic<wchar_t> : __atomic_base<wchar_t>
961 {
962 typedef wchar_t __integral_type;
963 typedef __atomic_base<wchar_t> __base_type;
964
965 atomic() noexcept = default;
966 ~atomic() noexcept = default;
967 atomic(const atomic&) = delete;
968 atomic& operator=(const atomic&) = delete;
969 atomic& operator=(const atomic&) volatile = delete;
970
971 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
972
973 using __base_type::operator __integral_type;
974 using __base_type::operator=;
975
976#if __cplusplus >= 201703L
977 static constexpr bool is_always_lock_free = ATOMIC_WCHAR_T_LOCK_FREE == 2;
978#endif
979 };
980
981#ifdef _GLIBCXX_USE_CHAR8_T
982 /// Explicit specialization for char8_t.
983 template<>
984 struct atomic<char8_t> : __atomic_base<char8_t>
985 {
986 typedef char8_t __integral_type;
987 typedef __atomic_base<char8_t> __base_type;
988
989 atomic() noexcept = default;
990 ~atomic() noexcept = default;
991 atomic(const atomic&) = delete;
992 atomic& operator=(const atomic&) = delete;
993 atomic& operator=(const atomic&) volatile = delete;
994
995 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
996
997 using __base_type::operator __integral_type;
998 using __base_type::operator=;
999
1000#if __cplusplus > 201402L
1001 static constexpr bool is_always_lock_free = ATOMIC_CHAR8_T_LOCK_FREE == 2;
1002#endif
1003 };
1004#endif
1005
1006 /// Explicit specialization for char16_t.
1007 template<>
1008 struct atomic<char16_t> : __atomic_base<char16_t>
1009 {
1010 typedef char16_t __integral_type;
1011 typedef __atomic_base<char16_t> __base_type;
1012
1013 atomic() noexcept = default;
1014 ~atomic() noexcept = default;
1015 atomic(const atomic&) = delete;
1016 atomic& operator=(const atomic&) = delete;
1017 atomic& operator=(const atomic&) volatile = delete;
1018
1019 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1020
1021 using __base_type::operator __integral_type;
1022 using __base_type::operator=;
1023
1024#if __cplusplus >= 201703L
1025 static constexpr bool is_always_lock_free = ATOMIC_CHAR16_T_LOCK_FREE == 2;
1026#endif
1027 };
1028
1029 /// Explicit specialization for char32_t.
1030 template<>
1031 struct atomic<char32_t> : __atomic_base<char32_t>
1032 {
1033 typedef char32_t __integral_type;
1034 typedef __atomic_base<char32_t> __base_type;
1035
1036 atomic() noexcept = default;
1037 ~atomic() noexcept = default;
1038 atomic(const atomic&) = delete;
1039 atomic& operator=(const atomic&) = delete;
1040 atomic& operator=(const atomic&) volatile = delete;
1041
1042 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1043
1044 using __base_type::operator __integral_type;
1045 using __base_type::operator=;
1046
1047#if __cplusplus >= 201703L
1048 static constexpr bool is_always_lock_free = ATOMIC_CHAR32_T_LOCK_FREE == 2;
1049#endif
1050 };
1051
1052
1053 /// atomic_bool
1054 typedef atomic<bool> atomic_bool;
1055
1056 /// atomic_char
1057 typedef atomic<char> atomic_char;
1058
1059 /// atomic_schar
1060 typedef atomic<signed char> atomic_schar;
1061
1062 /// atomic_uchar
1063 typedef atomic<unsigned char> atomic_uchar;
1064
1065 /// atomic_short
1066 typedef atomic<short> atomic_short;
1067
1068 /// atomic_ushort
1069 typedef atomic<unsigned short> atomic_ushort;
1070
1071 /// atomic_int
1072 typedef atomic<int> atomic_int;
1073
1074 /// atomic_uint
1075 typedef atomic<unsigned int> atomic_uint;
1076
1077 /// atomic_long
1078 typedef atomic<long> atomic_long;
1079
1080 /// atomic_ulong
1081 typedef atomic<unsigned long> atomic_ulong;
1082
1083 /// atomic_llong
1084 typedef atomic<long long> atomic_llong;
1085
1086 /// atomic_ullong
1087 typedef atomic<unsigned long long> atomic_ullong;
1088
1089 /// atomic_wchar_t
1090 typedef atomic<wchar_t> atomic_wchar_t;
1091
1092#ifdef _GLIBCXX_USE_CHAR8_T
1093 /// atomic_char8_t
1094 typedef atomic<char8_t> atomic_char8_t;
1095#endif
1096
1097 /// atomic_char16_t
1098 typedef atomic<char16_t> atomic_char16_t;
1099
1100 /// atomic_char32_t
1101 typedef atomic<char32_t> atomic_char32_t;
1102
1103#ifdef _GLIBCXX_USE_C99_STDINT_TR1
1104 // _GLIBCXX_RESOLVE_LIB_DEFECTS
1105 // 2441. Exact-width atomic typedefs should be provided
1106
1107 /// atomic_int8_t
1108 typedef atomic<int8_t> atomic_int8_t;
1109
1110 /// atomic_uint8_t
1111 typedef atomic<uint8_t> atomic_uint8_t;
1112
1113 /// atomic_int16_t
1114 typedef atomic<int16_t> atomic_int16_t;
1115
1116 /// atomic_uint16_t
1117 typedef atomic<uint16_t> atomic_uint16_t;
1118
1119 /// atomic_int32_t
1120 typedef atomic<int32_t> atomic_int32_t;
1121
1122 /// atomic_uint32_t
1123 typedef atomic<uint32_t> atomic_uint32_t;
1124
1125 /// atomic_int64_t
1126 typedef atomic<int64_t> atomic_int64_t;
1127
1128 /// atomic_uint64_t
1129 typedef atomic<uint64_t> atomic_uint64_t;
1130
1131
1132 /// atomic_int_least8_t
1133 typedef atomic<int_least8_t> atomic_int_least8_t;
1134
1135 /// atomic_uint_least8_t
1136 typedef atomic<uint_least8_t> atomic_uint_least8_t;
1137
1138 /// atomic_int_least16_t
1139 typedef atomic<int_least16_t> atomic_int_least16_t;
1140
1141 /// atomic_uint_least16_t
1142 typedef atomic<uint_least16_t> atomic_uint_least16_t;
1143
1144 /// atomic_int_least32_t
1145 typedef atomic<int_least32_t> atomic_int_least32_t;
1146
1147 /// atomic_uint_least32_t
1148 typedef atomic<uint_least32_t> atomic_uint_least32_t;
1149
1150 /// atomic_int_least64_t
1151 typedef atomic<int_least64_t> atomic_int_least64_t;
1152
1153 /// atomic_uint_least64_t
1154 typedef atomic<uint_least64_t> atomic_uint_least64_t;
1155
1156
1157 /// atomic_int_fast8_t
1158 typedef atomic<int_fast8_t> atomic_int_fast8_t;
1159
1160 /// atomic_uint_fast8_t
1161 typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
1162
1163 /// atomic_int_fast16_t
1164 typedef atomic<int_fast16_t> atomic_int_fast16_t;
1165
1166 /// atomic_uint_fast16_t
1167 typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
1168
1169 /// atomic_int_fast32_t
1170 typedef atomic<int_fast32_t> atomic_int_fast32_t;
1171
1172 /// atomic_uint_fast32_t
1173 typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
1174
1175 /// atomic_int_fast64_t
1176 typedef atomic<int_fast64_t> atomic_int_fast64_t;
1177
1178 /// atomic_uint_fast64_t
1179 typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
1180#endif
1181
1182
1183 /// atomic_intptr_t
1184 typedef atomic<intptr_t> atomic_intptr_t;
1185
1186 /// atomic_uintptr_t
1187 typedef atomic<uintptr_t> atomic_uintptr_t;
1188
1189 /// atomic_size_t
1190 typedef atomic<size_t> atomic_size_t;
1191
1192 /// atomic_ptrdiff_t
1193 typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
1194
1195#ifdef _GLIBCXX_USE_C99_STDINT_TR1
1196 /// atomic_intmax_t
1197 typedef atomic<intmax_t> atomic_intmax_t;
1198
1199 /// atomic_uintmax_t
1200 typedef atomic<uintmax_t> atomic_uintmax_t;
1201#endif
1202
1203 // Function definitions, atomic_flag operations.
1204 inline bool
1205 atomic_flag_test_and_set_explicit(atomic_flag* __a,
1206 memory_order __m) noexcept
1207 { return __a->test_and_set(__m); }
1208
1209 inline bool
1210 atomic_flag_test_and_set_explicit(volatile atomic_flag* __a,
1211 memory_order __m) noexcept
1212 { return __a->test_and_set(__m); }
1213
1214#if __cpp_lib_atomic_flag_test
1215 inline bool
1216 atomic_flag_test(const atomic_flag* __a) noexcept
1217 { return __a->test(); }
1218
1219 inline bool
1220 atomic_flag_test(const volatile atomic_flag* __a) noexcept
1221 { return __a->test(); }
1222
1223 inline bool
1224 atomic_flag_test_explicit(const atomic_flag* __a,
1225 memory_order __m) noexcept
1226 { return __a->test(__m); }
1227
1228 inline bool
1229 atomic_flag_test_explicit(const volatile atomic_flag* __a,
1230 memory_order __m) noexcept
1231 { return __a->test(__m); }
1232#endif
1233
1234 inline void
1235 atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept
1236 { __a->clear(__m); }
1237
1238 inline void
1239 atomic_flag_clear_explicit(volatile atomic_flag* __a,
1240 memory_order __m) noexcept
1241 { __a->clear(__m); }
1242
1243 inline bool
1244 atomic_flag_test_and_set(atomic_flag* __a) noexcept
1245 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1246
1247 inline bool
1248 atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept
1249 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1250
1251 inline void
1252 atomic_flag_clear(atomic_flag* __a) noexcept
1253 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1254
1255 inline void
1256 atomic_flag_clear(volatile atomic_flag* __a) noexcept
1257 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1258
1259#if __cpp_lib_atomic_wait
1260 inline void
1261 atomic_flag_wait(atomic_flag* __a, bool __old) noexcept
1262 { __a->wait(__old); }
1263
1264 inline void
1265 atomic_flag_wait_explicit(atomic_flag* __a, bool __old,
1266 memory_order __m) noexcept
1267 { __a->wait(__old, __m); }
1268
1269 inline void
1270 atomic_flag_notify_one(atomic_flag* __a) noexcept
1271 { __a->notify_one(); }
1272
1273 inline void
1274 atomic_flag_notify_all(atomic_flag* __a) noexcept
1275 { __a->notify_all(); }
1276#endif // __cpp_lib_atomic_wait
1277
1278 /// @cond undocumented
1279 // _GLIBCXX_RESOLVE_LIB_DEFECTS
1280 // 3220. P0558 broke conforming C++14 uses of atomic shared_ptr
1281 template<typename _Tp>
1282 using __atomic_val_t = __type_identity_t<_Tp>;
1283 template<typename _Tp>
1284 using __atomic_diff_t = typename atomic<_Tp>::difference_type;
1285 /// @endcond
1286
1287 // [atomics.nonmembers] Non-member functions.
1288 // Function templates generally applicable to atomic types.
1289 template<typename _ITp>
1290 inline bool
1291 atomic_is_lock_free(const atomic<_ITp>* __a) noexcept
1292 { return __a->is_lock_free(); }
1293
1294 template<typename _ITp>
1295 inline bool
1296 atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept
1297 { return __a->is_lock_free(); }
1298
1299 template<typename _ITp>
1300 inline void
1301 atomic_init(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1302 { __a->store(__i, memory_order_relaxed); }
1303
1304 template<typename _ITp>
1305 inline void
1306 atomic_init(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1307 { __a->store(__i, memory_order_relaxed); }
1308
1309 template<typename _ITp>
1310 inline void
1311 atomic_store_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1312 memory_order __m) noexcept
1313 { __a->store(__i, __m); }
1314
1315 template<typename _ITp>
1316 inline void
1317 atomic_store_explicit(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1318 memory_order __m) noexcept
1319 { __a->store(__i, __m); }
1320
1321 template<typename _ITp>
1322 inline _ITp
1323 atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept
1324 { return __a->load(__m); }
1325
1326 template<typename _ITp>
1327 inline _ITp
1328 atomic_load_explicit(const volatile atomic<_ITp>* __a,
1329 memory_order __m) noexcept
1330 { return __a->load(__m); }
1331
1332 template<typename _ITp>
1333 inline _ITp
1334 atomic_exchange_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1335 memory_order __m) noexcept
1336 { return __a->exchange(__i, __m); }
1337
1338 template<typename _ITp>
1339 inline _ITp
1340 atomic_exchange_explicit(volatile atomic<_ITp>* __a,
1341 __atomic_val_t<_ITp> __i,
1342 memory_order __m) noexcept
1343 { return __a->exchange(__i, __m); }
1344
1345 template<typename _ITp>
1346 inline bool
1347 atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
1348 __atomic_val_t<_ITp>* __i1,
1349 __atomic_val_t<_ITp> __i2,
1350 memory_order __m1,
1351 memory_order __m2) noexcept
1352 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1353
1354 template<typename _ITp>
1355 inline bool
1356 atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a,
1357 __atomic_val_t<_ITp>* __i1,
1358 __atomic_val_t<_ITp> __i2,
1359 memory_order __m1,
1360 memory_order __m2) noexcept
1361 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1362
1363 template<typename _ITp>
1364 inline bool
1365 atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
1366 __atomic_val_t<_ITp>* __i1,
1367 __atomic_val_t<_ITp> __i2,
1368 memory_order __m1,
1369 memory_order __m2) noexcept
1370 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1371
1372 template<typename _ITp>
1373 inline bool
1374 atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a,
1375 __atomic_val_t<_ITp>* __i1,
1376 __atomic_val_t<_ITp> __i2,
1377 memory_order __m1,
1378 memory_order __m2) noexcept
1379 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1380
1381
1382 template<typename _ITp>
1383 inline void
1384 atomic_store(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1385 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1386
1387 template<typename _ITp>
1388 inline void
1389 atomic_store(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1390 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1391
1392 template<typename _ITp>
1393 inline _ITp
1394 atomic_load(const atomic<_ITp>* __a) noexcept
1395 { return atomic_load_explicit(__a, memory_order_seq_cst); }
1396
1397 template<typename _ITp>
1398 inline _ITp
1399 atomic_load(const volatile atomic<_ITp>* __a) noexcept
1400 { return atomic_load_explicit(__a, memory_order_seq_cst); }
1401
1402 template<typename _ITp>
1403 inline _ITp
1404 atomic_exchange(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1405 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1406
1407 template<typename _ITp>
1408 inline _ITp
1409 atomic_exchange(volatile atomic<_ITp>* __a,
1410 __atomic_val_t<_ITp> __i) noexcept
1411 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1412
1413 template<typename _ITp>
1414 inline bool
1415 atomic_compare_exchange_weak(atomic<_ITp>* __a,
1416 __atomic_val_t<_ITp>* __i1,
1417 __atomic_val_t<_ITp> __i2) noexcept
1418 {
1419 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1420 memory_order_seq_cst,
1421 memory_order_seq_cst);
1422 }
1423
1424 template<typename _ITp>
1425 inline bool
1426 atomic_compare_exchange_weak(volatile atomic<_ITp>* __a,
1427 __atomic_val_t<_ITp>* __i1,
1428 __atomic_val_t<_ITp> __i2) noexcept
1429 {
1430 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1431 memory_order_seq_cst,
1432 memory_order_seq_cst);
1433 }
1434
1435 template<typename _ITp>
1436 inline bool
1437 atomic_compare_exchange_strong(atomic<_ITp>* __a,
1438 __atomic_val_t<_ITp>* __i1,
1439 __atomic_val_t<_ITp> __i2) noexcept
1440 {
1441 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1442 memory_order_seq_cst,
1443 memory_order_seq_cst);
1444 }
1445
1446 template<typename _ITp>
1447 inline bool
1448 atomic_compare_exchange_strong(volatile atomic<_ITp>* __a,
1449 __atomic_val_t<_ITp>* __i1,
1450 __atomic_val_t<_ITp> __i2) noexcept
1451 {
1452 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1453 memory_order_seq_cst,
1454 memory_order_seq_cst);
1455 }
1456
1457
1458#if __cpp_lib_atomic_wait
1459 template<typename _Tp>
1460 inline void
1461 atomic_wait(const atomic<_Tp>* __a,
1462 typename std::atomic<_Tp>::value_type __old) noexcept
1463 { __a->wait(__old); }
1464
1465 template<typename _Tp>
1466 inline void
1467 atomic_wait_explicit(const atomic<_Tp>* __a,
1468 typename std::atomic<_Tp>::value_type __old,
1469 std::memory_order __m) noexcept
1470 { __a->wait(__old, __m); }
1471
1472 template<typename _Tp>
1473 inline void
1474 atomic_notify_one(atomic<_Tp>* __a) noexcept
1475 { __a->notify_one(); }
1476
1477 template<typename _Tp>
1478 inline void
1479 atomic_notify_all(atomic<_Tp>* __a) noexcept
1480 { __a->notify_all(); }
1481#endif // __cpp_lib_atomic_wait
1482
1483 // Function templates for atomic_integral and atomic_pointer operations only.
1484 // Some operations (and, or, xor) are only available for atomic integrals,
1485 // which is implemented by taking a parameter of type __atomic_base<_ITp>*.
1486
1487 template<typename _ITp>
1488 inline _ITp
1489 atomic_fetch_add_explicit(atomic<_ITp>* __a,
1490 __atomic_diff_t<_ITp> __i,
1491 memory_order __m) noexcept
1492 { return __a->fetch_add(__i, __m); }
1493
1494 template<typename _ITp>
1495 inline _ITp
1496 atomic_fetch_add_explicit(volatile atomic<_ITp>* __a,
1497 __atomic_diff_t<_ITp> __i,
1498 memory_order __m) noexcept
1499 { return __a->fetch_add(__i, __m); }
1500
1501 template<typename _ITp>
1502 inline _ITp
1503 atomic_fetch_sub_explicit(atomic<_ITp>* __a,
1504 __atomic_diff_t<_ITp> __i,
1505 memory_order __m) noexcept
1506 { return __a->fetch_sub(__i, __m); }
1507
1508 template<typename _ITp>
1509 inline _ITp
1510 atomic_fetch_sub_explicit(volatile atomic<_ITp>* __a,
1511 __atomic_diff_t<_ITp> __i,
1512 memory_order __m) noexcept
1513 { return __a->fetch_sub(__i, __m); }
1514
1515 template<typename _ITp>
1516 inline _ITp
1517 atomic_fetch_and_explicit(__atomic_base<_ITp>* __a,
1518 __atomic_val_t<_ITp> __i,
1519 memory_order __m) noexcept
1520 { return __a->fetch_and(__i, __m); }
1521
1522 template<typename _ITp>
1523 inline _ITp
1524 atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a,
1525 __atomic_val_t<_ITp> __i,
1526 memory_order __m) noexcept
1527 { return __a->fetch_and(__i, __m); }
1528
1529 template<typename _ITp>
1530 inline _ITp
1531 atomic_fetch_or_explicit(__atomic_base<_ITp>* __a,
1532 __atomic_val_t<_ITp> __i,
1533 memory_order __m) noexcept
1534 { return __a->fetch_or(__i, __m); }
1535
1536 template<typename _ITp>
1537 inline _ITp
1538 atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a,
1539 __atomic_val_t<_ITp> __i,
1540 memory_order __m) noexcept
1541 { return __a->fetch_or(__i, __m); }
1542
1543 template<typename _ITp>
1544 inline _ITp
1545 atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a,
1546 __atomic_val_t<_ITp> __i,
1547 memory_order __m) noexcept
1548 { return __a->fetch_xor(__i, __m); }
1549
1550 template<typename _ITp>
1551 inline _ITp
1552 atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a,
1553 __atomic_val_t<_ITp> __i,
1554 memory_order __m) noexcept
1555 { return __a->fetch_xor(__i, __m); }
1556
1557 template<typename _ITp>
1558 inline _ITp
1559 atomic_fetch_add(atomic<_ITp>* __a,
1560 __atomic_diff_t<_ITp> __i) noexcept
1561 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1562
1563 template<typename _ITp>
1564 inline _ITp
1565 atomic_fetch_add(volatile atomic<_ITp>* __a,
1566 __atomic_diff_t<_ITp> __i) noexcept
1567 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1568
1569 template<typename _ITp>
1570 inline _ITp
1571 atomic_fetch_sub(atomic<_ITp>* __a,
1572 __atomic_diff_t<_ITp> __i) noexcept
1573 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1574
1575 template<typename _ITp>
1576 inline _ITp
1577 atomic_fetch_sub(volatile atomic<_ITp>* __a,
1578 __atomic_diff_t<_ITp> __i) noexcept
1579 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1580
1581 template<typename _ITp>
1582 inline _ITp
1583 atomic_fetch_and(__atomic_base<_ITp>* __a,
1584 __atomic_val_t<_ITp> __i) noexcept
1585 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1586
1587 template<typename _ITp>
1588 inline _ITp
1589 atomic_fetch_and(volatile __atomic_base<_ITp>* __a,
1590 __atomic_val_t<_ITp> __i) noexcept
1591 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1592
1593 template<typename _ITp>
1594 inline _ITp
1595 atomic_fetch_or(__atomic_base<_ITp>* __a,
1596 __atomic_val_t<_ITp> __i) noexcept
1597 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1598
1599 template<typename _ITp>
1600 inline _ITp
1601 atomic_fetch_or(volatile __atomic_base<_ITp>* __a,
1602 __atomic_val_t<_ITp> __i) noexcept
1603 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1604
1605 template<typename _ITp>
1606 inline _ITp
1607 atomic_fetch_xor(__atomic_base<_ITp>* __a,
1608 __atomic_val_t<_ITp> __i) noexcept
1609 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1610
1611 template<typename _ITp>
1612 inline _ITp
1613 atomic_fetch_xor(volatile __atomic_base<_ITp>* __a,
1614 __atomic_val_t<_ITp> __i) noexcept
1615 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1616
1617#if __cplusplus > 201703L
1618#define __cpp_lib_atomic_float 201711L
1619 template<>
1620 struct atomic<float> : __atomic_float<float>
1621 {
1622 atomic() noexcept = default;
1623
1624 constexpr
1625 atomic(float __fp) noexcept : __atomic_float<float>(__fp)
1626 { }
1627
1628 atomic& operator=(const atomic&) volatile = delete;
1629 atomic& operator=(const atomic&) = delete;
1630
1631 using __atomic_float<float>::operator=;
1632 };
1633
1634 template<>
1635 struct atomic<double> : __atomic_float<double>
1636 {
1637 atomic() noexcept = default;
1638
1639 constexpr
1640 atomic(double __fp) noexcept : __atomic_float<double>(__fp)
1641 { }
1642
1643 atomic& operator=(const atomic&) volatile = delete;
1644 atomic& operator=(const atomic&) = delete;
1645
1646 using __atomic_float<double>::operator=;
1647 };
1648
1649 template<>
1650 struct atomic<long double> : __atomic_float<long double>
1651 {
1652 atomic() noexcept = default;
1653
1654 constexpr
1655 atomic(long double __fp) noexcept : __atomic_float<long double>(__fp)
1656 { }
1657
1658 atomic& operator=(const atomic&) volatile = delete;
1659 atomic& operator=(const atomic&) = delete;
1660
1661 using __atomic_float<long double>::operator=;
1662 };
1663
1664#define __cpp_lib_atomic_ref 201806L
1665
1666 /// Class template to provide atomic operations on a non-atomic variable.
1667 template<typename _Tp>
1668 struct atomic_ref : __atomic_ref<_Tp>
1669 {
1670 explicit
1671 atomic_ref(_Tp& __t) noexcept : __atomic_ref<_Tp>(__t)
1672 { }
1673
1674 atomic_ref& operator=(const atomic_ref&) = delete;
1675
1676 atomic_ref(const atomic_ref&) = default;
1677
1678 using __atomic_ref<_Tp>::operator=;
1679 };
1680
1681#endif // C++2a
1682
1683 /// @} group atomics
1684
1685_GLIBCXX_END_NAMESPACE_VERSION
1686} // namespace
1687
1688#endif // C++11
1689
1690#endif // _GLIBCXX_ATOMIC