libstdc++
atomic_base.h
Go to the documentation of this file.
1 // -*- C++ -*- header.
2 
3 // Copyright (C) 2008, 2009, 2010, 2011, 2012 Free Software Foundation, Inc.
4 //
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
9 // any later version.
10 
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
15 
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
19 
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
24 
25 /** @file bits/atomic_base.h
26  * This is an internal header file, included by other library headers.
27  * Do not attempt to use it directly. @headername{atomic}
28  */
29 
30 #ifndef _GLIBCXX_ATOMIC_BASE_H
31 #define _GLIBCXX_ATOMIC_BASE_H 1
32 
33 #pragma GCC system_header
34 
35 #include <bits/c++config.h>
36 #include <stdbool.h>
37 #include <stdint.h>
39 
40 namespace std _GLIBCXX_VISIBILITY(default)
41 {
42 _GLIBCXX_BEGIN_NAMESPACE_VERSION
43 
44  /**
45  * @defgroup atomics Atomics
46  *
47  * Components for performing atomic operations.
48  * @{
49  */
50 
51  /// Enumeration for memory_order
52  typedef enum memory_order
53  {
54  memory_order_relaxed,
55  memory_order_consume,
56  memory_order_acquire,
57  memory_order_release,
58  memory_order_acq_rel,
59  memory_order_seq_cst
60  } memory_order;
61 
62  // Drop release ordering as per [atomics.types.operations.req]/21
63  constexpr memory_order
64  __cmpexch_failure_order(memory_order __m) noexcept
65  {
66  return __m == memory_order_acq_rel ? memory_order_acquire
67  : __m == memory_order_release ? memory_order_relaxed : __m;
68  }
69 
70  inline void
71  atomic_thread_fence(memory_order __m) noexcept
72  { __atomic_thread_fence(__m); }
73 
74  inline void
75  atomic_signal_fence(memory_order __m) noexcept
76  { __atomic_signal_fence(__m); }
77 
78  /// kill_dependency
79  template<typename _Tp>
80  inline _Tp
81  kill_dependency(_Tp __y) noexcept
82  {
83  _Tp __ret(__y);
84  return __ret;
85  }
86 
87 
88  // Base types for atomics.
89  template<typename _IntTp>
90  struct __atomic_base;
91 
92  /// atomic_char
94 
95  /// atomic_schar
97 
98  /// atomic_uchar
100 
101  /// atomic_short
103 
104  /// atomic_ushort
106 
107  /// atomic_int
109 
110  /// atomic_uint
112 
113  /// atomic_long
115 
116  /// atomic_ulong
118 
119  /// atomic_llong
121 
122  /// atomic_ullong
124 
125  /// atomic_wchar_t
127 
128  /// atomic_char16_t
130 
131  /// atomic_char32_t
133 
134  /// atomic_char32_t
136 
137 
138  /// atomic_int_least8_t
140 
141  /// atomic_uint_least8_t
143 
144  /// atomic_int_least16_t
146 
147  /// atomic_uint_least16_t
149 
150  /// atomic_int_least32_t
152 
153  /// atomic_uint_least32_t
155 
156  /// atomic_int_least64_t
158 
159  /// atomic_uint_least64_t
161 
162 
163  /// atomic_int_fast8_t
165 
166  /// atomic_uint_fast8_t
168 
169  /// atomic_int_fast16_t
171 
172  /// atomic_uint_fast16_t
174 
175  /// atomic_int_fast32_t
177 
178  /// atomic_uint_fast32_t
180 
181  /// atomic_int_fast64_t
183 
184  /// atomic_uint_fast64_t
186 
187 
188  /// atomic_intptr_t
190 
191  /// atomic_uintptr_t
193 
194  /// atomic_size_t
196 
197  /// atomic_intmax_t
199 
200  /// atomic_uintmax_t
202 
203  /// atomic_ptrdiff_t
205 
206 
207 #define ATOMIC_VAR_INIT(_VI) { _VI }
208 
209  template<typename _Tp>
210  struct atomic;
211 
212  template<typename _Tp>
213  struct atomic<_Tp*>;
214 
215  /* The target's "set" value for test-and-set may not be exactly 1. */
216 #if __GCC_ATOMIC_TEST_AND_SET_TRUEVAL == 1
217  typedef bool __atomic_flag_data_type;
218 #else
219  typedef unsigned char __atomic_flag_data_type;
220 #endif
221 
222  /**
223  * @brief Base type for atomic_flag.
224  *
225  * Base type is POD with data, allowing atomic_flag to derive from
226  * it and meet the standard layout type requirement. In addition to
227  * compatibilty with a C interface, this allows different
228  * implementations of atomic_flag to use the same atomic operation
229  * functions, via a standard conversion to the __atomic_flag_base
230  * argument.
231  */
232  _GLIBCXX_BEGIN_EXTERN_C
233 
235  {
236  __atomic_flag_data_type _M_i;
237  };
238 
239  _GLIBCXX_END_EXTERN_C
240 
241 #define ATOMIC_FLAG_INIT { 0 }
242 
243  /// atomic_flag
245  {
246  atomic_flag() noexcept = default;
247  ~atomic_flag() noexcept = default;
248  atomic_flag(const atomic_flag&) = delete;
249  atomic_flag& operator=(const atomic_flag&) = delete;
250  atomic_flag& operator=(const atomic_flag&) volatile = delete;
251 
252  // Conversion to ATOMIC_FLAG_INIT.
253  constexpr atomic_flag(bool __i) noexcept
254  : __atomic_flag_base{ _S_init(__i) }
255  { }
256 
257  bool
258  test_and_set(memory_order __m = memory_order_seq_cst) noexcept
259  {
260  return __atomic_test_and_set (&_M_i, __m);
261  }
262 
263  bool
264  test_and_set(memory_order __m = memory_order_seq_cst) volatile noexcept
265  {
266  return __atomic_test_and_set (&_M_i, __m);
267  }
268 
269  void
270  clear(memory_order __m = memory_order_seq_cst) noexcept
271  {
272  __glibcxx_assert(__m != memory_order_consume);
273  __glibcxx_assert(__m != memory_order_acquire);
274  __glibcxx_assert(__m != memory_order_acq_rel);
275 
276  __atomic_clear (&_M_i, __m);
277  }
278 
279  void
280  clear(memory_order __m = memory_order_seq_cst) volatile noexcept
281  {
282  __glibcxx_assert(__m != memory_order_consume);
283  __glibcxx_assert(__m != memory_order_acquire);
284  __glibcxx_assert(__m != memory_order_acq_rel);
285 
286  __atomic_clear (&_M_i, __m);
287  }
288 
289  private:
290  static constexpr __atomic_flag_data_type
291  _S_init(bool __i)
292  { return __i ? __GCC_ATOMIC_TEST_AND_SET_TRUEVAL : 0; }
293  };
294 
295 
296  /// Base class for atomic integrals.
297  //
298  // For each of the integral types, define atomic_[integral type] struct
299  //
300  // atomic_bool bool
301  // atomic_char char
302  // atomic_schar signed char
303  // atomic_uchar unsigned char
304  // atomic_short short
305  // atomic_ushort unsigned short
306  // atomic_int int
307  // atomic_uint unsigned int
308  // atomic_long long
309  // atomic_ulong unsigned long
310  // atomic_llong long long
311  // atomic_ullong unsigned long long
312  // atomic_char16_t char16_t
313  // atomic_char32_t char32_t
314  // atomic_wchar_t wchar_t
315  //
316  // NB: Assuming _ITp is an integral scalar type that is 1, 2, 4, or
317  // 8 bytes, since that is what GCC built-in functions for atomic
318  // memory access expect.
319  template<typename _ITp>
320  struct __atomic_base
321  {
322  private:
323  typedef _ITp __int_type;
324 
325  __int_type _M_i;
326 
327  public:
328  __atomic_base() noexcept = default;
329  ~__atomic_base() noexcept = default;
330  __atomic_base(const __atomic_base&) = delete;
331  __atomic_base& operator=(const __atomic_base&) = delete;
332  __atomic_base& operator=(const __atomic_base&) volatile = delete;
333 
334  // Requires __int_type convertible to _M_i.
335  constexpr __atomic_base(__int_type __i) noexcept : _M_i (__i) { }
336 
337  operator __int_type() const noexcept
338  { return load(); }
339 
340  operator __int_type() const volatile noexcept
341  { return load(); }
342 
343  __int_type
344  operator=(__int_type __i) noexcept
345  {
346  store(__i);
347  return __i;
348  }
349 
350  __int_type
351  operator=(__int_type __i) volatile noexcept
352  {
353  store(__i);
354  return __i;
355  }
356 
357  __int_type
358  operator++(int) noexcept
359  { return fetch_add(1); }
360 
361  __int_type
362  operator++(int) volatile noexcept
363  { return fetch_add(1); }
364 
365  __int_type
366  operator--(int) noexcept
367  { return fetch_sub(1); }
368 
369  __int_type
370  operator--(int) volatile noexcept
371  { return fetch_sub(1); }
372 
373  __int_type
374  operator++() noexcept
375  { return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
376 
377  __int_type
378  operator++() volatile noexcept
379  { return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
380 
381  __int_type
382  operator--() noexcept
383  { return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
384 
385  __int_type
386  operator--() volatile noexcept
387  { return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
388 
389  __int_type
390  operator+=(__int_type __i) noexcept
391  { return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
392 
393  __int_type
394  operator+=(__int_type __i) volatile noexcept
395  { return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
396 
397  __int_type
398  operator-=(__int_type __i) noexcept
399  { return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
400 
401  __int_type
402  operator-=(__int_type __i) volatile noexcept
403  { return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
404 
405  __int_type
406  operator&=(__int_type __i) noexcept
407  { return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
408 
409  __int_type
410  operator&=(__int_type __i) volatile noexcept
411  { return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
412 
413  __int_type
414  operator|=(__int_type __i) noexcept
415  { return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
416 
417  __int_type
418  operator|=(__int_type __i) volatile noexcept
419  { return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
420 
421  __int_type
422  operator^=(__int_type __i) noexcept
423  { return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
424 
425  __int_type
426  operator^=(__int_type __i) volatile noexcept
427  { return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
428 
429  bool
430  is_lock_free() const noexcept
431  { return __atomic_is_lock_free (sizeof (_M_i), &_M_i); }
432 
433  bool
434  is_lock_free() const volatile noexcept
435  { return __atomic_is_lock_free (sizeof (_M_i), &_M_i); }
436 
437  void
438  store(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept
439  {
440  __glibcxx_assert(__m != memory_order_acquire);
441  __glibcxx_assert(__m != memory_order_acq_rel);
442  __glibcxx_assert(__m != memory_order_consume);
443 
444  __atomic_store_n(&_M_i, __i, __m);
445  }
446 
447  void
448  store(__int_type __i,
449  memory_order __m = memory_order_seq_cst) volatile noexcept
450  {
451  __glibcxx_assert(__m != memory_order_acquire);
452  __glibcxx_assert(__m != memory_order_acq_rel);
453  __glibcxx_assert(__m != memory_order_consume);
454 
455  __atomic_store_n(&_M_i, __i, __m);
456  }
457 
458  __int_type
459  load(memory_order __m = memory_order_seq_cst) const noexcept
460  {
461  __glibcxx_assert(__m != memory_order_release);
462  __glibcxx_assert(__m != memory_order_acq_rel);
463 
464  return __atomic_load_n(&_M_i, __m);
465  }
466 
467  __int_type
468  load(memory_order __m = memory_order_seq_cst) const volatile noexcept
469  {
470  __glibcxx_assert(__m != memory_order_release);
471  __glibcxx_assert(__m != memory_order_acq_rel);
472 
473  return __atomic_load_n(&_M_i, __m);
474  }
475 
476  __int_type
477  exchange(__int_type __i,
478  memory_order __m = memory_order_seq_cst) noexcept
479  {
480  return __atomic_exchange_n(&_M_i, __i, __m);
481  }
482 
483 
484  __int_type
485  exchange(__int_type __i,
486  memory_order __m = memory_order_seq_cst) volatile noexcept
487  {
488  return __atomic_exchange_n(&_M_i, __i, __m);
489  }
490 
491  bool
492  compare_exchange_weak(__int_type& __i1, __int_type __i2,
493  memory_order __m1, memory_order __m2) noexcept
494  {
495  __glibcxx_assert(__m2 != memory_order_release);
496  __glibcxx_assert(__m2 != memory_order_acq_rel);
497  __glibcxx_assert(__m2 <= __m1);
498 
499  return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
500  }
501 
502  bool
503  compare_exchange_weak(__int_type& __i1, __int_type __i2,
504  memory_order __m1,
505  memory_order __m2) volatile noexcept
506  {
507  __glibcxx_assert(__m2 != memory_order_release);
508  __glibcxx_assert(__m2 != memory_order_acq_rel);
509  __glibcxx_assert(__m2 <= __m1);
510 
511  return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
512  }
513 
514  bool
515  compare_exchange_weak(__int_type& __i1, __int_type __i2,
516  memory_order __m = memory_order_seq_cst) noexcept
517  {
518  return compare_exchange_weak(__i1, __i2, __m,
519  __cmpexch_failure_order(__m));
520  }
521 
522  bool
523  compare_exchange_weak(__int_type& __i1, __int_type __i2,
524  memory_order __m = memory_order_seq_cst) volatile noexcept
525  {
526  return compare_exchange_weak(__i1, __i2, __m,
527  __cmpexch_failure_order(__m));
528  }
529 
530  bool
531  compare_exchange_strong(__int_type& __i1, __int_type __i2,
532  memory_order __m1, memory_order __m2) noexcept
533  {
534  __glibcxx_assert(__m2 != memory_order_release);
535  __glibcxx_assert(__m2 != memory_order_acq_rel);
536  __glibcxx_assert(__m2 <= __m1);
537 
538  return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
539  }
540 
541  bool
542  compare_exchange_strong(__int_type& __i1, __int_type __i2,
543  memory_order __m1,
544  memory_order __m2) volatile noexcept
545  {
546  __glibcxx_assert(__m2 != memory_order_release);
547  __glibcxx_assert(__m2 != memory_order_acq_rel);
548  __glibcxx_assert(__m2 <= __m1);
549 
550  return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
551  }
552 
553  bool
554  compare_exchange_strong(__int_type& __i1, __int_type __i2,
555  memory_order __m = memory_order_seq_cst) noexcept
556  {
557  return compare_exchange_strong(__i1, __i2, __m,
558  __cmpexch_failure_order(__m));
559  }
560 
561  bool
562  compare_exchange_strong(__int_type& __i1, __int_type __i2,
563  memory_order __m = memory_order_seq_cst) volatile noexcept
564  {
565  return compare_exchange_strong(__i1, __i2, __m,
566  __cmpexch_failure_order(__m));
567  }
568 
569  __int_type
570  fetch_add(__int_type __i,
571  memory_order __m = memory_order_seq_cst) noexcept
572  { return __atomic_fetch_add(&_M_i, __i, __m); }
573 
574  __int_type
575  fetch_add(__int_type __i,
576  memory_order __m = memory_order_seq_cst) volatile noexcept
577  { return __atomic_fetch_add(&_M_i, __i, __m); }
578 
579  __int_type
580  fetch_sub(__int_type __i,
581  memory_order __m = memory_order_seq_cst) noexcept
582  { return __atomic_fetch_sub(&_M_i, __i, __m); }
583 
584  __int_type
585  fetch_sub(__int_type __i,
586  memory_order __m = memory_order_seq_cst) volatile noexcept
587  { return __atomic_fetch_sub(&_M_i, __i, __m); }
588 
589  __int_type
590  fetch_and(__int_type __i,
591  memory_order __m = memory_order_seq_cst) noexcept
592  { return __atomic_fetch_and(&_M_i, __i, __m); }
593 
594  __int_type
595  fetch_and(__int_type __i,
596  memory_order __m = memory_order_seq_cst) volatile noexcept
597  { return __atomic_fetch_and(&_M_i, __i, __m); }
598 
599  __int_type
600  fetch_or(__int_type __i,
601  memory_order __m = memory_order_seq_cst) noexcept
602  { return __atomic_fetch_or(&_M_i, __i, __m); }
603 
604  __int_type
605  fetch_or(__int_type __i,
606  memory_order __m = memory_order_seq_cst) volatile noexcept
607  { return __atomic_fetch_or(&_M_i, __i, __m); }
608 
609  __int_type
610  fetch_xor(__int_type __i,
611  memory_order __m = memory_order_seq_cst) noexcept
612  { return __atomic_fetch_xor(&_M_i, __i, __m); }
613 
614  __int_type
615  fetch_xor(__int_type __i,
616  memory_order __m = memory_order_seq_cst) volatile noexcept
617  { return __atomic_fetch_xor(&_M_i, __i, __m); }
618  };
619 
620 
621  /// Partial specialization for pointer types.
622  template<typename _PTp>
623  struct __atomic_base<_PTp*>
624  {
625  private:
626  typedef _PTp* __pointer_type;
627 
628  __pointer_type _M_p;
629 
630  // Factored out to facilitate explicit specialization.
631  constexpr ptrdiff_t
632  _M_type_size(ptrdiff_t __d) { return __d * sizeof(_PTp); }
633 
634  constexpr ptrdiff_t
635  _M_type_size(ptrdiff_t __d) volatile { return __d * sizeof(_PTp); }
636 
637  public:
638  __atomic_base() noexcept = default;
639  ~__atomic_base() noexcept = default;
640  __atomic_base(const __atomic_base&) = delete;
641  __atomic_base& operator=(const __atomic_base&) = delete;
642  __atomic_base& operator=(const __atomic_base&) volatile = delete;
643 
644  // Requires __pointer_type convertible to _M_p.
645  constexpr __atomic_base(__pointer_type __p) noexcept : _M_p (__p) { }
646 
647  operator __pointer_type() const noexcept
648  { return load(); }
649 
650  operator __pointer_type() const volatile noexcept
651  { return load(); }
652 
653  __pointer_type
654  operator=(__pointer_type __p) noexcept
655  {
656  store(__p);
657  return __p;
658  }
659 
660  __pointer_type
661  operator=(__pointer_type __p) volatile noexcept
662  {
663  store(__p);
664  return __p;
665  }
666 
667  __pointer_type
668  operator++(int) noexcept
669  { return fetch_add(1); }
670 
671  __pointer_type
672  operator++(int) volatile noexcept
673  { return fetch_add(1); }
674 
675  __pointer_type
676  operator--(int) noexcept
677  { return fetch_sub(1); }
678 
679  __pointer_type
680  operator--(int) volatile noexcept
681  { return fetch_sub(1); }
682 
683  __pointer_type
684  operator++() noexcept
685  { return __atomic_add_fetch(&_M_p, _M_type_size(1),
686  memory_order_seq_cst); }
687 
688  __pointer_type
689  operator++() volatile noexcept
690  { return __atomic_add_fetch(&_M_p, _M_type_size(1),
691  memory_order_seq_cst); }
692 
693  __pointer_type
694  operator--() noexcept
695  { return __atomic_sub_fetch(&_M_p, _M_type_size(1),
696  memory_order_seq_cst); }
697 
698  __pointer_type
699  operator--() volatile noexcept
700  { return __atomic_sub_fetch(&_M_p, _M_type_size(1),
701  memory_order_seq_cst); }
702 
703  __pointer_type
704  operator+=(ptrdiff_t __d) noexcept
705  { return __atomic_add_fetch(&_M_p, _M_type_size(__d),
706  memory_order_seq_cst); }
707 
708  __pointer_type
709  operator+=(ptrdiff_t __d) volatile noexcept
710  { return __atomic_add_fetch(&_M_p, _M_type_size(__d),
711  memory_order_seq_cst); }
712 
713  __pointer_type
714  operator-=(ptrdiff_t __d) noexcept
715  { return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
716  memory_order_seq_cst); }
717 
718  __pointer_type
719  operator-=(ptrdiff_t __d) volatile noexcept
720  { return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
721  memory_order_seq_cst); }
722 
723  bool
724  is_lock_free() const noexcept
725  { return __atomic_is_lock_free(sizeof(__pointer_type), &_M_p); }
726 
727  bool
728  is_lock_free() const volatile noexcept
729  { return __atomic_is_lock_free(sizeof(__pointer_type), &_M_p); }
730 
731  void
732  store(__pointer_type __p,
733  memory_order __m = memory_order_seq_cst) noexcept
734  {
735  __glibcxx_assert(__m != memory_order_acquire);
736  __glibcxx_assert(__m != memory_order_acq_rel);
737  __glibcxx_assert(__m != memory_order_consume);
738 
739  __atomic_store_n(&_M_p, __p, __m);
740  }
741 
742  void
743  store(__pointer_type __p,
744  memory_order __m = memory_order_seq_cst) volatile noexcept
745  {
746  __glibcxx_assert(__m != memory_order_acquire);
747  __glibcxx_assert(__m != memory_order_acq_rel);
748  __glibcxx_assert(__m != memory_order_consume);
749 
750  __atomic_store_n(&_M_p, __p, __m);
751  }
752 
753  __pointer_type
754  load(memory_order __m = memory_order_seq_cst) const noexcept
755  {
756  __glibcxx_assert(__m != memory_order_release);
757  __glibcxx_assert(__m != memory_order_acq_rel);
758 
759  return __atomic_load_n(&_M_p, __m);
760  }
761 
762  __pointer_type
763  load(memory_order __m = memory_order_seq_cst) const volatile noexcept
764  {
765  __glibcxx_assert(__m != memory_order_release);
766  __glibcxx_assert(__m != memory_order_acq_rel);
767 
768  return __atomic_load_n(&_M_p, __m);
769  }
770 
771  __pointer_type
772  exchange(__pointer_type __p,
773  memory_order __m = memory_order_seq_cst) noexcept
774  {
775  return __atomic_exchange_n(&_M_p, __p, __m);
776  }
777 
778 
779  __pointer_type
780  exchange(__pointer_type __p,
781  memory_order __m = memory_order_seq_cst) volatile noexcept
782  {
783  return __atomic_exchange_n(&_M_p, __p, __m);
784  }
785 
786  bool
787  compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
788  memory_order __m1,
789  memory_order __m2) noexcept
790  {
791  __glibcxx_assert(__m2 != memory_order_release);
792  __glibcxx_assert(__m2 != memory_order_acq_rel);
793  __glibcxx_assert(__m2 <= __m1);
794 
795  return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
796  }
797 
798  bool
799  compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
800  memory_order __m1,
801  memory_order __m2) volatile noexcept
802  {
803  __glibcxx_assert(__m2 != memory_order_release);
804  __glibcxx_assert(__m2 != memory_order_acq_rel);
805  __glibcxx_assert(__m2 <= __m1);
806 
807  return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
808  }
809 
810  __pointer_type
811  fetch_add(ptrdiff_t __d,
812  memory_order __m = memory_order_seq_cst) noexcept
813  { return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
814 
815  __pointer_type
816  fetch_add(ptrdiff_t __d,
817  memory_order __m = memory_order_seq_cst) volatile noexcept
818  { return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
819 
820  __pointer_type
821  fetch_sub(ptrdiff_t __d,
822  memory_order __m = memory_order_seq_cst) noexcept
823  { return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
824 
825  __pointer_type
826  fetch_sub(ptrdiff_t __d,
827  memory_order __m = memory_order_seq_cst) volatile noexcept
828  { return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
829  };
830 
831  // @} group atomics
832 
833 _GLIBCXX_END_NAMESPACE_VERSION
834 } // namespace std
835 
836 #endif
__atomic_base< int_least16_t > atomic_int_least16_t
atomic_int_least16_t
Definition: atomic_base.h:145
memory_order
Enumeration for memory_order.
Definition: atomic_base.h:52
__atomic_base< unsigned int > atomic_uint
atomic_uint
Definition: atomic_base.h:111
__atomic_base< unsigned long long > atomic_ullong
atomic_ullong
Definition: atomic_base.h:123
__atomic_base< wchar_t > atomic_wchar_t
atomic_wchar_t
Definition: atomic_base.h:126
__atomic_base< int_fast32_t > atomic_int_fast32_t
atomic_int_fast32_t
Definition: atomic_base.h:176
__atomic_base< uint_fast16_t > atomic_uint_fast16_t
atomic_uint_fast16_t
Definition: atomic_base.h:173
__atomic_base< uintptr_t > atomic_uintptr_t
atomic_uintptr_t
Definition: atomic_base.h:192
__atomic_base< int_fast64_t > atomic_int_fast64_t
atomic_int_fast64_t
Definition: atomic_base.h:182
__atomic_base< signed char > atomic_schar
atomic_schar
Definition: atomic_base.h:96
__atomic_base< unsigned char > atomic_uchar
atomic_uchar
Definition: atomic_base.h:99
_Tp kill_dependency(_Tp __y) noexcept
kill_dependency
Definition: atomic_base.h:81
__atomic_base< char32_t > atomic_char32_t
atomic_char32_t
Definition: atomic_base.h:132
__atomic_base< unsigned short > atomic_ushort
atomic_ushort
Definition: atomic_base.h:105
__atomic_base< uint_fast8_t > atomic_uint_fast8_t
atomic_uint_fast8_t
Definition: atomic_base.h:167
__atomic_base< int_least64_t > atomic_int_least64_t
atomic_int_least64_t
Definition: atomic_base.h:157
atomic 29.4.3, Generic atomic type, primary class template.
Definition: atomic:158
__atomic_base< int > atomic_int
atomic_int
Definition: atomic_base.h:108
__atomic_base< ptrdiff_t > atomic_ptrdiff_t
atomic_ptrdiff_t
Definition: atomic_base.h:204
__atomic_base< int_fast16_t > atomic_int_fast16_t
atomic_int_fast16_t
Definition: atomic_base.h:170
__atomic_base< long > atomic_long
atomic_long
Definition: atomic_base.h:114
__atomic_base< intmax_t > atomic_intmax_t
atomic_intmax_t
Definition: atomic_base.h:198
__atomic_base< int_least32_t > atomic_int_least32_t
atomic_int_least32_t
Definition: atomic_base.h:151
__atomic_base< long long > atomic_llong
atomic_llong
Definition: atomic_base.h:120
atomic_flag
Definition: atomic_base.h:244
__atomic_base< uint_least16_t > atomic_uint_least16_t
atomic_uint_least16_t
Definition: atomic_base.h:148
Base type for atomic_flag.
Definition: atomic_base.h:234
__atomic_base< uint_least32_t > atomic_uint_least32_t
atomic_uint_least32_t
Definition: atomic_base.h:154
__atomic_base< char > atomic_char
atomic_char
Definition: atomic_base.h:90
__atomic_base< uint_least64_t > atomic_uint_least64_t
atomic_uint_least64_t
Definition: atomic_base.h:160
__atomic_base< unsigned long > atomic_ulong
atomic_ulong
Definition: atomic_base.h:117
__atomic_base< uintmax_t > atomic_uintmax_t
atomic_uintmax_t
Definition: atomic_base.h:201
__atomic_base< int_least8_t > atomic_int_least8_t
atomic_int_least8_t
Definition: atomic_base.h:139
__atomic_base< uint_least8_t > atomic_uint_least8_t
atomic_uint_least8_t
Definition: atomic_base.h:142
__atomic_base< size_t > atomic_size_t
atomic_size_t
Definition: atomic_base.h:195
__atomic_base< uint_fast32_t > atomic_uint_fast32_t
atomic_uint_fast32_t
Definition: atomic_base.h:179
__atomic_base< int_fast8_t > atomic_int_fast8_t
atomic_int_fast8_t
Definition: atomic_base.h:164
__atomic_base< uint_fast64_t > atomic_uint_fast64_t
atomic_uint_fast64_t
Definition: atomic_base.h:185
__atomic_base< intptr_t > atomic_intptr_t
atomic_intptr_t
Definition: atomic_base.h:189
__atomic_base< short > atomic_short
atomic_short
Definition: atomic_base.h:102
__atomic_base< char16_t > atomic_char16_t
atomic_char16_t
Definition: atomic_base.h:129
Base class for atomic integrals.
Definition: atomic_base.h:90