libstdc++
atomic_base.h
Go to the documentation of this file.
1 // -*- C++ -*- header.
2 
3 // Copyright (C) 2008-2013 Free Software Foundation, Inc.
4 //
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
9 // any later version.
10 
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
15 
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
19 
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
24 
25 /** @file bits/atomic_base.h
26  * This is an internal header file, included by other library headers.
27  * Do not attempt to use it directly. @headername{atomic}
28  */
29 
30 #ifndef _GLIBCXX_ATOMIC_BASE_H
31 #define _GLIBCXX_ATOMIC_BASE_H 1
32 
33 #pragma GCC system_header
34 
35 #include <bits/c++config.h>
36 #include <stdbool.h>
37 #include <stdint.h>
39 
40 namespace std _GLIBCXX_VISIBILITY(default)
41 {
42 _GLIBCXX_BEGIN_NAMESPACE_VERSION
43 
44  /**
45  * @defgroup atomics Atomics
46  *
47  * Components for performing atomic operations.
48  * @{
49  */
50 
51  /// Enumeration for memory_order
52  typedef enum memory_order
53  {
54  memory_order_relaxed,
55  memory_order_consume,
56  memory_order_acquire,
57  memory_order_release,
58  memory_order_acq_rel,
59  memory_order_seq_cst
60  } memory_order;
61 
62  enum __memory_order_modifier
63  {
64  __memory_order_mask = 0x0ffff,
65  __memory_order_modifier_mask = 0xffff0000,
66  __memory_order_hle_acquire = 0x10000,
67  __memory_order_hle_release = 0x20000
68  };
69 
70  constexpr memory_order
71  operator|(memory_order __m, __memory_order_modifier __mod)
72  {
73  return memory_order(__m | int(__mod));
74  }
75 
76  constexpr memory_order
77  operator&(memory_order __m, __memory_order_modifier __mod)
78  {
79  return memory_order(__m & int(__mod));
80  }
81 
82  // Drop release ordering as per [atomics.types.operations.req]/21
83  constexpr memory_order
84  __cmpexch_failure_order2(memory_order __m) noexcept
85  {
86  return __m == memory_order_acq_rel ? memory_order_acquire
87  : __m == memory_order_release ? memory_order_relaxed : __m;
88  }
89 
90  constexpr memory_order
91  __cmpexch_failure_order(memory_order __m) noexcept
92  {
93  return memory_order(__cmpexch_failure_order2(__m & __memory_order_mask)
94  | (__m & __memory_order_modifier_mask));
95  }
96 
97  inline void
98  atomic_thread_fence(memory_order __m) noexcept
99  { __atomic_thread_fence(__m); }
100 
101  inline void
102  atomic_signal_fence(memory_order __m) noexcept
103  { __atomic_signal_fence(__m); }
104 
105  /// kill_dependency
106  template<typename _Tp>
107  inline _Tp
108  kill_dependency(_Tp __y) noexcept
109  {
110  _Tp __ret(__y);
111  return __ret;
112  }
113 
114 
115  // Base types for atomics.
116  template<typename _IntTp>
118 
119  /// atomic_char
121 
122  /// atomic_schar
124 
125  /// atomic_uchar
127 
128  /// atomic_short
130 
131  /// atomic_ushort
133 
134  /// atomic_int
136 
137  /// atomic_uint
139 
140  /// atomic_long
142 
143  /// atomic_ulong
145 
146  /// atomic_llong
148 
149  /// atomic_ullong
151 
152  /// atomic_wchar_t
154 
155  /// atomic_char16_t
157 
158  /// atomic_char32_t
160 
161  /// atomic_char32_t
163 
164 
165  /// atomic_int_least8_t
167 
168  /// atomic_uint_least8_t
170 
171  /// atomic_int_least16_t
173 
174  /// atomic_uint_least16_t
176 
177  /// atomic_int_least32_t
179 
180  /// atomic_uint_least32_t
182 
183  /// atomic_int_least64_t
185 
186  /// atomic_uint_least64_t
188 
189 
190  /// atomic_int_fast8_t
192 
193  /// atomic_uint_fast8_t
195 
196  /// atomic_int_fast16_t
198 
199  /// atomic_uint_fast16_t
201 
202  /// atomic_int_fast32_t
204 
205  /// atomic_uint_fast32_t
207 
208  /// atomic_int_fast64_t
210 
211  /// atomic_uint_fast64_t
213 
214 
215  /// atomic_intptr_t
217 
218  /// atomic_uintptr_t
220 
221  /// atomic_size_t
223 
224  /// atomic_intmax_t
226 
227  /// atomic_uintmax_t
229 
230  /// atomic_ptrdiff_t
232 
233 
234 #define ATOMIC_VAR_INIT(_VI) { _VI }
235 
236  template<typename _Tp>
237  struct atomic;
238 
239  template<typename _Tp>
240  struct atomic<_Tp*>;
241 
242  /* The target's "set" value for test-and-set may not be exactly 1. */
243 #if __GCC_ATOMIC_TEST_AND_SET_TRUEVAL == 1
244  typedef bool __atomic_flag_data_type;
245 #else
246  typedef unsigned char __atomic_flag_data_type;
247 #endif
248 
249  /**
250  * @brief Base type for atomic_flag.
251  *
252  * Base type is POD with data, allowing atomic_flag to derive from
253  * it and meet the standard layout type requirement. In addition to
254  * compatibilty with a C interface, this allows different
255  * implementations of atomic_flag to use the same atomic operation
256  * functions, via a standard conversion to the __atomic_flag_base
257  * argument.
258  */
259  _GLIBCXX_BEGIN_EXTERN_C
260 
262  {
263  __atomic_flag_data_type _M_i;
264  };
265 
266  _GLIBCXX_END_EXTERN_C
267 
268 #define ATOMIC_FLAG_INIT { 0 }
269 
270  /// atomic_flag
272  {
273  atomic_flag() noexcept = default;
274  ~atomic_flag() noexcept = default;
275  atomic_flag(const atomic_flag&) = delete;
276  atomic_flag& operator=(const atomic_flag&) = delete;
277  atomic_flag& operator=(const atomic_flag&) volatile = delete;
278 
279  // Conversion to ATOMIC_FLAG_INIT.
280  constexpr atomic_flag(bool __i) noexcept
281  : __atomic_flag_base{ _S_init(__i) }
282  { }
283 
284  bool
285  test_and_set(memory_order __m = memory_order_seq_cst) noexcept
286  {
287  return __atomic_test_and_set (&_M_i, __m);
288  }
289 
290  bool
291  test_and_set(memory_order __m = memory_order_seq_cst) volatile noexcept
292  {
293  return __atomic_test_and_set (&_M_i, __m);
294  }
295 
296  void
297  clear(memory_order __m = memory_order_seq_cst) noexcept
298  {
299  memory_order __b = __m & __memory_order_mask;
300  __glibcxx_assert(__b != memory_order_consume);
301  __glibcxx_assert(__b != memory_order_acquire);
302  __glibcxx_assert(__b != memory_order_acq_rel);
303 
304  __atomic_clear (&_M_i, __m);
305  }
306 
307  void
308  clear(memory_order __m = memory_order_seq_cst) volatile noexcept
309  {
310  memory_order __b = __m & __memory_order_mask;
311  __glibcxx_assert(__b != memory_order_consume);
312  __glibcxx_assert(__b != memory_order_acquire);
313  __glibcxx_assert(__b != memory_order_acq_rel);
314 
315  __atomic_clear (&_M_i, __m);
316  }
317 
318  private:
319  static constexpr __atomic_flag_data_type
320  _S_init(bool __i)
321  { return __i ? __GCC_ATOMIC_TEST_AND_SET_TRUEVAL : 0; }
322  };
323 
324 
325  /// Base class for atomic integrals.
326  //
327  // For each of the integral types, define atomic_[integral type] struct
328  //
329  // atomic_bool bool
330  // atomic_char char
331  // atomic_schar signed char
332  // atomic_uchar unsigned char
333  // atomic_short short
334  // atomic_ushort unsigned short
335  // atomic_int int
336  // atomic_uint unsigned int
337  // atomic_long long
338  // atomic_ulong unsigned long
339  // atomic_llong long long
340  // atomic_ullong unsigned long long
341  // atomic_char16_t char16_t
342  // atomic_char32_t char32_t
343  // atomic_wchar_t wchar_t
344  //
345  // NB: Assuming _ITp is an integral scalar type that is 1, 2, 4, or
346  // 8 bytes, since that is what GCC built-in functions for atomic
347  // memory access expect.
348  template<typename _ITp>
349  struct __atomic_base
350  {
351  private:
352  typedef _ITp __int_type;
353 
354  __int_type _M_i;
355 
356  public:
357  __atomic_base() noexcept = default;
358  ~__atomic_base() noexcept = default;
359  __atomic_base(const __atomic_base&) = delete;
360  __atomic_base& operator=(const __atomic_base&) = delete;
361  __atomic_base& operator=(const __atomic_base&) volatile = delete;
362 
363  // Requires __int_type convertible to _M_i.
364  constexpr __atomic_base(__int_type __i) noexcept : _M_i (__i) { }
365 
366  operator __int_type() const noexcept
367  { return load(); }
368 
369  operator __int_type() const volatile noexcept
370  { return load(); }
371 
372  __int_type
373  operator=(__int_type __i) noexcept
374  {
375  store(__i);
376  return __i;
377  }
378 
379  __int_type
380  operator=(__int_type __i) volatile noexcept
381  {
382  store(__i);
383  return __i;
384  }
385 
386  __int_type
387  operator++(int) noexcept
388  { return fetch_add(1); }
389 
390  __int_type
391  operator++(int) volatile noexcept
392  { return fetch_add(1); }
393 
394  __int_type
395  operator--(int) noexcept
396  { return fetch_sub(1); }
397 
398  __int_type
399  operator--(int) volatile noexcept
400  { return fetch_sub(1); }
401 
402  __int_type
403  operator++() noexcept
404  { return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
405 
406  __int_type
407  operator++() volatile noexcept
408  { return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
409 
410  __int_type
411  operator--() noexcept
412  { return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
413 
414  __int_type
415  operator--() volatile noexcept
416  { return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
417 
418  __int_type
419  operator+=(__int_type __i) noexcept
420  { return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
421 
422  __int_type
423  operator+=(__int_type __i) volatile noexcept
424  { return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
425 
426  __int_type
427  operator-=(__int_type __i) noexcept
428  { return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
429 
430  __int_type
431  operator-=(__int_type __i) volatile noexcept
432  { return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
433 
434  __int_type
435  operator&=(__int_type __i) noexcept
436  { return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
437 
438  __int_type
439  operator&=(__int_type __i) volatile noexcept
440  { return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
441 
442  __int_type
443  operator|=(__int_type __i) noexcept
444  { return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
445 
446  __int_type
447  operator|=(__int_type __i) volatile noexcept
448  { return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
449 
450  __int_type
451  operator^=(__int_type __i) noexcept
452  { return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
453 
454  __int_type
455  operator^=(__int_type __i) volatile noexcept
456  { return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
457 
458  bool
459  is_lock_free() const noexcept
460  { return __atomic_is_lock_free(sizeof(_M_i), nullptr); }
461 
462  bool
463  is_lock_free() const volatile noexcept
464  { return __atomic_is_lock_free(sizeof(_M_i), nullptr); }
465 
466  void
467  store(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept
468  {
469  memory_order __b = __m & __memory_order_mask;
470  __glibcxx_assert(__b != memory_order_acquire);
471  __glibcxx_assert(__b != memory_order_acq_rel);
472  __glibcxx_assert(__b != memory_order_consume);
473 
474  __atomic_store_n(&_M_i, __i, __m);
475  }
476 
477  void
478  store(__int_type __i,
479  memory_order __m = memory_order_seq_cst) volatile noexcept
480  {
481  memory_order __b = __m & __memory_order_mask;
482  __glibcxx_assert(__b != memory_order_acquire);
483  __glibcxx_assert(__b != memory_order_acq_rel);
484  __glibcxx_assert(__b != memory_order_consume);
485 
486  __atomic_store_n(&_M_i, __i, __m);
487  }
488 
489  __int_type
490  load(memory_order __m = memory_order_seq_cst) const noexcept
491  {
492  memory_order __b = __m & __memory_order_mask;
493  __glibcxx_assert(__b != memory_order_release);
494  __glibcxx_assert(__b != memory_order_acq_rel);
495 
496  return __atomic_load_n(&_M_i, __m);
497  }
498 
499  __int_type
500  load(memory_order __m = memory_order_seq_cst) const volatile noexcept
501  {
502  memory_order __b = __m & __memory_order_mask;
503  __glibcxx_assert(__b != memory_order_release);
504  __glibcxx_assert(__b != memory_order_acq_rel);
505 
506  return __atomic_load_n(&_M_i, __m);
507  }
508 
509  __int_type
510  exchange(__int_type __i,
511  memory_order __m = memory_order_seq_cst) noexcept
512  {
513  return __atomic_exchange_n(&_M_i, __i, __m);
514  }
515 
516 
517  __int_type
518  exchange(__int_type __i,
519  memory_order __m = memory_order_seq_cst) volatile noexcept
520  {
521  return __atomic_exchange_n(&_M_i, __i, __m);
522  }
523 
524  bool
525  compare_exchange_weak(__int_type& __i1, __int_type __i2,
526  memory_order __m1, memory_order __m2) noexcept
527  {
528  memory_order __b2 = __m2 & __memory_order_mask;
529  memory_order __b1 = __m1 & __memory_order_mask;
530  __glibcxx_assert(__b2 != memory_order_release);
531  __glibcxx_assert(__b2 != memory_order_acq_rel);
532  __glibcxx_assert(__b2 <= __b1);
533 
534  return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
535  }
536 
537  bool
538  compare_exchange_weak(__int_type& __i1, __int_type __i2,
539  memory_order __m1,
540  memory_order __m2) volatile noexcept
541  {
542  memory_order __b2 = __m2 & __memory_order_mask;
543  memory_order __b1 = __m1 & __memory_order_mask;
544  __glibcxx_assert(__b2 != memory_order_release);
545  __glibcxx_assert(__b2 != memory_order_acq_rel);
546  __glibcxx_assert(__b2 <= __b1);
547 
548  return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
549  }
550 
551  bool
552  compare_exchange_weak(__int_type& __i1, __int_type __i2,
553  memory_order __m = memory_order_seq_cst) noexcept
554  {
555  return compare_exchange_weak(__i1, __i2, __m,
556  __cmpexch_failure_order(__m));
557  }
558 
559  bool
560  compare_exchange_weak(__int_type& __i1, __int_type __i2,
561  memory_order __m = memory_order_seq_cst) volatile noexcept
562  {
563  return compare_exchange_weak(__i1, __i2, __m,
564  __cmpexch_failure_order(__m));
565  }
566 
567  bool
568  compare_exchange_strong(__int_type& __i1, __int_type __i2,
569  memory_order __m1, memory_order __m2) noexcept
570  {
571  memory_order __b2 = __m2 & __memory_order_mask;
572  memory_order __b1 = __m1 & __memory_order_mask;
573  __glibcxx_assert(__b2 != memory_order_release);
574  __glibcxx_assert(__b2 != memory_order_acq_rel);
575  __glibcxx_assert(__b2 <= __b1);
576 
577  return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
578  }
579 
580  bool
581  compare_exchange_strong(__int_type& __i1, __int_type __i2,
582  memory_order __m1,
583  memory_order __m2) volatile noexcept
584  {
585  memory_order __b2 = __m2 & __memory_order_mask;
586  memory_order __b1 = __m1 & __memory_order_mask;
587 
588  __glibcxx_assert(__b2 != memory_order_release);
589  __glibcxx_assert(__b2 != memory_order_acq_rel);
590  __glibcxx_assert(__b2 <= __b1);
591 
592  return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
593  }
594 
595  bool
596  compare_exchange_strong(__int_type& __i1, __int_type __i2,
597  memory_order __m = memory_order_seq_cst) noexcept
598  {
599  return compare_exchange_strong(__i1, __i2, __m,
600  __cmpexch_failure_order(__m));
601  }
602 
603  bool
604  compare_exchange_strong(__int_type& __i1, __int_type __i2,
605  memory_order __m = memory_order_seq_cst) volatile noexcept
606  {
607  return compare_exchange_strong(__i1, __i2, __m,
608  __cmpexch_failure_order(__m));
609  }
610 
611  __int_type
612  fetch_add(__int_type __i,
613  memory_order __m = memory_order_seq_cst) noexcept
614  { return __atomic_fetch_add(&_M_i, __i, __m); }
615 
616  __int_type
617  fetch_add(__int_type __i,
618  memory_order __m = memory_order_seq_cst) volatile noexcept
619  { return __atomic_fetch_add(&_M_i, __i, __m); }
620 
621  __int_type
622  fetch_sub(__int_type __i,
623  memory_order __m = memory_order_seq_cst) noexcept
624  { return __atomic_fetch_sub(&_M_i, __i, __m); }
625 
626  __int_type
627  fetch_sub(__int_type __i,
628  memory_order __m = memory_order_seq_cst) volatile noexcept
629  { return __atomic_fetch_sub(&_M_i, __i, __m); }
630 
631  __int_type
632  fetch_and(__int_type __i,
633  memory_order __m = memory_order_seq_cst) noexcept
634  { return __atomic_fetch_and(&_M_i, __i, __m); }
635 
636  __int_type
637  fetch_and(__int_type __i,
638  memory_order __m = memory_order_seq_cst) volatile noexcept
639  { return __atomic_fetch_and(&_M_i, __i, __m); }
640 
641  __int_type
642  fetch_or(__int_type __i,
643  memory_order __m = memory_order_seq_cst) noexcept
644  { return __atomic_fetch_or(&_M_i, __i, __m); }
645 
646  __int_type
647  fetch_or(__int_type __i,
648  memory_order __m = memory_order_seq_cst) volatile noexcept
649  { return __atomic_fetch_or(&_M_i, __i, __m); }
650 
651  __int_type
652  fetch_xor(__int_type __i,
653  memory_order __m = memory_order_seq_cst) noexcept
654  { return __atomic_fetch_xor(&_M_i, __i, __m); }
655 
656  __int_type
657  fetch_xor(__int_type __i,
658  memory_order __m = memory_order_seq_cst) volatile noexcept
659  { return __atomic_fetch_xor(&_M_i, __i, __m); }
660  };
661 
662 
663  /// Partial specialization for pointer types.
664  template<typename _PTp>
665  struct __atomic_base<_PTp*>
666  {
667  private:
668  typedef _PTp* __pointer_type;
669 
670  __pointer_type _M_p;
671 
672  // Factored out to facilitate explicit specialization.
673  constexpr ptrdiff_t
674  _M_type_size(ptrdiff_t __d) { return __d * sizeof(_PTp); }
675 
676  constexpr ptrdiff_t
677  _M_type_size(ptrdiff_t __d) volatile { return __d * sizeof(_PTp); }
678 
679  public:
680  __atomic_base() noexcept = default;
681  ~__atomic_base() noexcept = default;
682  __atomic_base(const __atomic_base&) = delete;
683  __atomic_base& operator=(const __atomic_base&) = delete;
684  __atomic_base& operator=(const __atomic_base&) volatile = delete;
685 
686  // Requires __pointer_type convertible to _M_p.
687  constexpr __atomic_base(__pointer_type __p) noexcept : _M_p (__p) { }
688 
689  operator __pointer_type() const noexcept
690  { return load(); }
691 
692  operator __pointer_type() const volatile noexcept
693  { return load(); }
694 
695  __pointer_type
696  operator=(__pointer_type __p) noexcept
697  {
698  store(__p);
699  return __p;
700  }
701 
702  __pointer_type
703  operator=(__pointer_type __p) volatile noexcept
704  {
705  store(__p);
706  return __p;
707  }
708 
709  __pointer_type
710  operator++(int) noexcept
711  { return fetch_add(1); }
712 
713  __pointer_type
714  operator++(int) volatile noexcept
715  { return fetch_add(1); }
716 
717  __pointer_type
718  operator--(int) noexcept
719  { return fetch_sub(1); }
720 
721  __pointer_type
722  operator--(int) volatile noexcept
723  { return fetch_sub(1); }
724 
725  __pointer_type
726  operator++() noexcept
727  { return __atomic_add_fetch(&_M_p, _M_type_size(1),
728  memory_order_seq_cst); }
729 
730  __pointer_type
731  operator++() volatile noexcept
732  { return __atomic_add_fetch(&_M_p, _M_type_size(1),
733  memory_order_seq_cst); }
734 
735  __pointer_type
736  operator--() noexcept
737  { return __atomic_sub_fetch(&_M_p, _M_type_size(1),
738  memory_order_seq_cst); }
739 
740  __pointer_type
741  operator--() volatile noexcept
742  { return __atomic_sub_fetch(&_M_p, _M_type_size(1),
743  memory_order_seq_cst); }
744 
745  __pointer_type
746  operator+=(ptrdiff_t __d) noexcept
747  { return __atomic_add_fetch(&_M_p, _M_type_size(__d),
748  memory_order_seq_cst); }
749 
750  __pointer_type
751  operator+=(ptrdiff_t __d) volatile noexcept
752  { return __atomic_add_fetch(&_M_p, _M_type_size(__d),
753  memory_order_seq_cst); }
754 
755  __pointer_type
756  operator-=(ptrdiff_t __d) noexcept
757  { return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
758  memory_order_seq_cst); }
759 
760  __pointer_type
761  operator-=(ptrdiff_t __d) volatile noexcept
762  { return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
763  memory_order_seq_cst); }
764 
765  bool
766  is_lock_free() const noexcept
767  { return __atomic_is_lock_free(sizeof(__pointer_type), nullptr); }
768 
769  bool
770  is_lock_free() const volatile noexcept
771  { return __atomic_is_lock_free(sizeof(__pointer_type), nullptr); }
772 
773  void
774  store(__pointer_type __p,
775  memory_order __m = memory_order_seq_cst) noexcept
776  {
777  memory_order __b = __m & __memory_order_mask;
778 
779  __glibcxx_assert(__b != memory_order_acquire);
780  __glibcxx_assert(__b != memory_order_acq_rel);
781  __glibcxx_assert(__b != memory_order_consume);
782 
783  __atomic_store_n(&_M_p, __p, __m);
784  }
785 
786  void
787  store(__pointer_type __p,
788  memory_order __m = memory_order_seq_cst) volatile noexcept
789  {
790  memory_order __b = __m & __memory_order_mask;
791  __glibcxx_assert(__b != memory_order_acquire);
792  __glibcxx_assert(__b != memory_order_acq_rel);
793  __glibcxx_assert(__b != memory_order_consume);
794 
795  __atomic_store_n(&_M_p, __p, __m);
796  }
797 
798  __pointer_type
799  load(memory_order __m = memory_order_seq_cst) const noexcept
800  {
801  memory_order __b = __m & __memory_order_mask;
802  __glibcxx_assert(__b != memory_order_release);
803  __glibcxx_assert(__b != memory_order_acq_rel);
804 
805  return __atomic_load_n(&_M_p, __m);
806  }
807 
808  __pointer_type
809  load(memory_order __m = memory_order_seq_cst) const volatile noexcept
810  {
811  memory_order __b = __m & __memory_order_mask;
812  __glibcxx_assert(__b != memory_order_release);
813  __glibcxx_assert(__b != memory_order_acq_rel);
814 
815  return __atomic_load_n(&_M_p, __m);
816  }
817 
818  __pointer_type
819  exchange(__pointer_type __p,
820  memory_order __m = memory_order_seq_cst) noexcept
821  {
822  return __atomic_exchange_n(&_M_p, __p, __m);
823  }
824 
825 
826  __pointer_type
827  exchange(__pointer_type __p,
828  memory_order __m = memory_order_seq_cst) volatile noexcept
829  {
830  return __atomic_exchange_n(&_M_p, __p, __m);
831  }
832 
833  bool
834  compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
835  memory_order __m1,
836  memory_order __m2) noexcept
837  {
838  memory_order __b2 = __m2 & __memory_order_mask;
839  memory_order __b1 = __m1 & __memory_order_mask;
840  __glibcxx_assert(__b2 != memory_order_release);
841  __glibcxx_assert(__b2 != memory_order_acq_rel);
842  __glibcxx_assert(__b2 <= __b1);
843 
844  return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
845  }
846 
847  bool
848  compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
849  memory_order __m1,
850  memory_order __m2) volatile noexcept
851  {
852  memory_order __b2 = __m2 & __memory_order_mask;
853  memory_order __b1 = __m1 & __memory_order_mask;
854 
855  __glibcxx_assert(__b2 != memory_order_release);
856  __glibcxx_assert(__b2 != memory_order_acq_rel);
857  __glibcxx_assert(__b2 <= __b1);
858 
859  return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
860  }
861 
862  __pointer_type
863  fetch_add(ptrdiff_t __d,
864  memory_order __m = memory_order_seq_cst) noexcept
865  { return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
866 
867  __pointer_type
868  fetch_add(ptrdiff_t __d,
869  memory_order __m = memory_order_seq_cst) volatile noexcept
870  { return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
871 
872  __pointer_type
873  fetch_sub(ptrdiff_t __d,
874  memory_order __m = memory_order_seq_cst) noexcept
875  { return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
876 
877  __pointer_type
878  fetch_sub(ptrdiff_t __d,
879  memory_order __m = memory_order_seq_cst) volatile noexcept
880  { return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
881  };
882 
883  // @} group atomics
884 
885 _GLIBCXX_END_NAMESPACE_VERSION
886 } // namespace std
887 
888 #endif
__atomic_base< int_least64_t > atomic_int_least64_t
atomic_int_least64_t
Definition: atomic_base.h:184
__atomic_base< ptrdiff_t > atomic_ptrdiff_t
atomic_ptrdiff_t
Definition: atomic_base.h:231
atomic_flag
Definition: atomic_base.h:271
__atomic_base< uint_fast64_t > atomic_uint_fast64_t
atomic_uint_fast64_t
Definition: atomic_base.h:212
__atomic_base< int_least16_t > atomic_int_least16_t
atomic_int_least16_t
Definition: atomic_base.h:172
__atomic_base< int_fast32_t > atomic_int_fast32_t
atomic_int_fast32_t
Definition: atomic_base.h:203
_Tp kill_dependency(_Tp __y) noexcept
kill_dependency
Definition: atomic_base.h:108
__atomic_base< uint_fast32_t > atomic_uint_fast32_t
atomic_uint_fast32_t
Definition: atomic_base.h:206
__atomic_base< intptr_t > atomic_intptr_t
atomic_intptr_t
Definition: atomic_base.h:216
__atomic_base< int_fast64_t > atomic_int_fast64_t
atomic_int_fast64_t
Definition: atomic_base.h:209
__atomic_base< wchar_t > atomic_wchar_t
atomic_wchar_t
Definition: atomic_base.h:153
__atomic_base< int_least32_t > atomic_int_least32_t
atomic_int_least32_t
Definition: atomic_base.h:178
__atomic_base< unsigned long > atomic_ulong
atomic_ulong
Definition: atomic_base.h:144
__atomic_base< uintptr_t > atomic_uintptr_t
atomic_uintptr_t
Definition: atomic_base.h:219
__atomic_base< int_least8_t > atomic_int_least8_t
atomic_int_least8_t
Definition: atomic_base.h:166
__atomic_base< intmax_t > atomic_intmax_t
atomic_intmax_t
Definition: atomic_base.h:225
__atomic_base< uint_least64_t > atomic_uint_least64_t
atomic_uint_least64_t
Definition: atomic_base.h:187
__atomic_base< short > atomic_short
atomic_short
Definition: atomic_base.h:129
ISO C++ entities toplevel namespace is std.
__atomic_base< int_fast8_t > atomic_int_fast8_t
atomic_int_fast8_t
Definition: atomic_base.h:191
__atomic_base< long long > atomic_llong
atomic_llong
Definition: atomic_base.h:147
__atomic_base< long > atomic_long
atomic_long
Definition: atomic_base.h:141
Base class for atomic integrals.
Definition: atomic_base.h:117
__atomic_base< int > atomic_int
atomic_int
Definition: atomic_base.h:135
memory_order
Enumeration for memory_order.
Definition: atomic_base.h:52
__atomic_base< char32_t > atomic_char32_t
atomic_char32_t
Definition: atomic_base.h:159
__atomic_base< unsigned short > atomic_ushort
atomic_ushort
Definition: atomic_base.h:132
__atomic_base< signed char > atomic_schar
atomic_schar
Definition: atomic_base.h:123
__atomic_base< unsigned char > atomic_uchar
atomic_uchar
Definition: atomic_base.h:126
__atomic_base< size_t > atomic_size_t
atomic_size_t
Definition: atomic_base.h:222
__atomic_base< char > atomic_char
atomic_char
Definition: atomic_base.h:117
__atomic_base< uint_least32_t > atomic_uint_least32_t
atomic_uint_least32_t
Definition: atomic_base.h:181
__atomic_base< unsigned int > atomic_uint
atomic_uint
Definition: atomic_base.h:138
__atomic_base< uintmax_t > atomic_uintmax_t
atomic_uintmax_t
Definition: atomic_base.h:228
__atomic_base< char16_t > atomic_char16_t
atomic_char16_t
Definition: atomic_base.h:156
__atomic_base< uint_fast16_t > atomic_uint_fast16_t
atomic_uint_fast16_t
Definition: atomic_base.h:200
__atomic_base< int_fast16_t > atomic_int_fast16_t
atomic_int_fast16_t
Definition: atomic_base.h:197
__atomic_base< uint_least8_t > atomic_uint_least8_t
atomic_uint_least8_t
Definition: atomic_base.h:169
__atomic_base< uint_fast8_t > atomic_uint_fast8_t
atomic_uint_fast8_t
Definition: atomic_base.h:194
__atomic_base< uint_least16_t > atomic_uint_least16_t
atomic_uint_least16_t
Definition: atomic_base.h:175
Base type for atomic_flag.
Definition: atomic_base.h:261
__atomic_base< unsigned long long > atomic_ullong
atomic_ullong
Definition: atomic_base.h:150