libstdc++
atomic
Go to the documentation of this file.
1 // -*- C++ -*- header.
2 
3 // Copyright (C) 2008-2015 Free Software Foundation, Inc.
4 //
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
9 // any later version.
10 
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
15 
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
19 
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
24 
25 /** @file include/atomic
26  * This is a Standard C++ Library header.
27  */
28 
29 // Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
30 // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
31 
32 #ifndef _GLIBCXX_ATOMIC
33 #define _GLIBCXX_ATOMIC 1
34 
35 #pragma GCC system_header
36 
37 #if __cplusplus < 201103L
38 # include <bits/c++0x_warning.h>
39 #else
40 
41 #include <bits/atomic_base.h>
42 
43 namespace std _GLIBCXX_VISIBILITY(default)
44 {
45 _GLIBCXX_BEGIN_NAMESPACE_VERSION
46 
47  /**
48  * @addtogroup atomics
49  * @{
50  */
51 
52  template<typename _Tp>
53  struct atomic;
54 
55  /// atomic<bool>
56  // NB: No operators or fetch-operations for this type.
57  template<>
58  struct atomic<bool>
59  {
60  private:
61  __atomic_base<bool> _M_base;
62 
63  public:
64  atomic() noexcept = default;
65  ~atomic() noexcept = default;
66  atomic(const atomic&) = delete;
67  atomic& operator=(const atomic&) = delete;
68  atomic& operator=(const atomic&) volatile = delete;
69 
70  constexpr atomic(bool __i) noexcept : _M_base(__i) { }
71 
72  bool
73  operator=(bool __i) noexcept
74  { return _M_base.operator=(__i); }
75 
76  bool
77  operator=(bool __i) volatile noexcept
78  { return _M_base.operator=(__i); }
79 
80  operator bool() const noexcept
81  { return _M_base.load(); }
82 
83  operator bool() const volatile noexcept
84  { return _M_base.load(); }
85 
86  bool
87  is_lock_free() const noexcept { return _M_base.is_lock_free(); }
88 
89  bool
90  is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); }
91 
92  void
93  store(bool __i, memory_order __m = memory_order_seq_cst) noexcept
94  { _M_base.store(__i, __m); }
95 
96  void
97  store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept
98  { _M_base.store(__i, __m); }
99 
100  bool
101  load(memory_order __m = memory_order_seq_cst) const noexcept
102  { return _M_base.load(__m); }
103 
104  bool
105  load(memory_order __m = memory_order_seq_cst) const volatile noexcept
106  { return _M_base.load(__m); }
107 
108  bool
109  exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept
110  { return _M_base.exchange(__i, __m); }
111 
112  bool
113  exchange(bool __i,
114  memory_order __m = memory_order_seq_cst) volatile noexcept
115  { return _M_base.exchange(__i, __m); }
116 
117  bool
118  compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
119  memory_order __m2) noexcept
120  { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
121 
122  bool
123  compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
124  memory_order __m2) volatile noexcept
125  { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
126 
127  bool
128  compare_exchange_weak(bool& __i1, bool __i2,
129  memory_order __m = memory_order_seq_cst) noexcept
130  { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
131 
132  bool
133  compare_exchange_weak(bool& __i1, bool __i2,
134  memory_order __m = memory_order_seq_cst) volatile noexcept
135  { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
136 
137  bool
138  compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
139  memory_order __m2) noexcept
140  { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
141 
142  bool
143  compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
144  memory_order __m2) volatile noexcept
145  { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
146 
147  bool
148  compare_exchange_strong(bool& __i1, bool __i2,
149  memory_order __m = memory_order_seq_cst) noexcept
150  { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
151 
152  bool
153  compare_exchange_strong(bool& __i1, bool __i2,
154  memory_order __m = memory_order_seq_cst) volatile noexcept
155  { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
156  };
157 
158 
159  /**
160  * @brief Generic atomic type, primary class template.
161  *
162  * @tparam _Tp Type to be made atomic, must be trivally copyable.
163  */
164  template<typename _Tp>
165  struct atomic
166  {
167  private:
168  // Align 1/2/4/8/16-byte types to at least their size.
169  static constexpr int _S_min_alignment
170  = (sizeof(_Tp) & (sizeof(_Tp) - 1)) || sizeof(_Tp) > 16
171  ? 0 : sizeof(_Tp);
172 
173  static constexpr int _S_alignment
174  = _S_min_alignment > alignof(_Tp) ? _S_min_alignment : alignof(_Tp);
175 
176  alignas(_S_alignment) _Tp _M_i;
177 
178  static_assert(__is_trivially_copyable(_Tp),
179  "std::atomic requires a trivially copyable type");
180 
181  static_assert(sizeof(_Tp) > 0,
182  "Incomplete or zero-sized types are not supported");
183 
184  public:
185  atomic() noexcept = default;
186  ~atomic() noexcept = default;
187  atomic(const atomic&) = delete;
188  atomic& operator=(const atomic&) = delete;
189  atomic& operator=(const atomic&) volatile = delete;
190 
191  constexpr atomic(_Tp __i) noexcept : _M_i(__i) { }
192 
193  operator _Tp() const noexcept
194  { return load(); }
195 
196  operator _Tp() const volatile noexcept
197  { return load(); }
198 
199  _Tp
200  operator=(_Tp __i) noexcept
201  { store(__i); return __i; }
202 
203  _Tp
204  operator=(_Tp __i) volatile noexcept
205  { store(__i); return __i; }
206 
207  bool
208  is_lock_free() const noexcept
209  {
210  // Produce a fake, minimally aligned pointer.
211  return __atomic_is_lock_free(sizeof(_M_i),
212  reinterpret_cast<void *>(-__alignof(_M_i)));
213  }
214 
215  bool
216  is_lock_free() const volatile noexcept
217  {
218  // Produce a fake, minimally aligned pointer.
219  return __atomic_is_lock_free(sizeof(_M_i),
220  reinterpret_cast<void *>(-__alignof(_M_i)));
221  }
222 
223  void
224  store(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
225  { __atomic_store(&_M_i, &__i, __m); }
226 
227  void
228  store(_Tp __i, memory_order __m = memory_order_seq_cst) volatile noexcept
229  { __atomic_store(&_M_i, &__i, __m); }
230 
231  _Tp
232  load(memory_order __m = memory_order_seq_cst) const noexcept
233  {
234  _Tp tmp;
235  __atomic_load(&_M_i, &tmp, __m);
236  return tmp;
237  }
238 
239  _Tp
240  load(memory_order __m = memory_order_seq_cst) const volatile noexcept
241  {
242  _Tp tmp;
243  __atomic_load(&_M_i, &tmp, __m);
244  return tmp;
245  }
246 
247  _Tp
248  exchange(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
249  {
250  _Tp tmp;
251  __atomic_exchange(&_M_i, &__i, &tmp, __m);
252  return tmp;
253  }
254 
255  _Tp
256  exchange(_Tp __i,
257  memory_order __m = memory_order_seq_cst) volatile noexcept
258  {
259  _Tp tmp;
260  __atomic_exchange(&_M_i, &__i, &tmp, __m);
261  return tmp;
262  }
263 
264  bool
265  compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
266  memory_order __f) noexcept
267  {
268  return __atomic_compare_exchange(&_M_i, &__e, &__i, true, __s, __f);
269  }
270 
271  bool
272  compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
273  memory_order __f) volatile noexcept
274  {
275  return __atomic_compare_exchange(&_M_i, &__e, &__i, true, __s, __f);
276  }
277 
278  bool
279  compare_exchange_weak(_Tp& __e, _Tp __i,
280  memory_order __m = memory_order_seq_cst) noexcept
281  { return compare_exchange_weak(__e, __i, __m,
282  __cmpexch_failure_order(__m)); }
283 
284  bool
285  compare_exchange_weak(_Tp& __e, _Tp __i,
286  memory_order __m = memory_order_seq_cst) volatile noexcept
287  { return compare_exchange_weak(__e, __i, __m,
288  __cmpexch_failure_order(__m)); }
289 
290  bool
291  compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
292  memory_order __f) noexcept
293  {
294  return __atomic_compare_exchange(&_M_i, &__e, &__i, false, __s, __f);
295  }
296 
297  bool
298  compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
299  memory_order __f) volatile noexcept
300  {
301  return __atomic_compare_exchange(&_M_i, &__e, &__i, false, __s, __f);
302  }
303 
304  bool
305  compare_exchange_strong(_Tp& __e, _Tp __i,
306  memory_order __m = memory_order_seq_cst) noexcept
307  { return compare_exchange_strong(__e, __i, __m,
308  __cmpexch_failure_order(__m)); }
309 
310  bool
311  compare_exchange_strong(_Tp& __e, _Tp __i,
312  memory_order __m = memory_order_seq_cst) volatile noexcept
313  { return compare_exchange_strong(__e, __i, __m,
314  __cmpexch_failure_order(__m)); }
315  };
316 
317 
318  /// Partial specialization for pointer types.
319  template<typename _Tp>
320  struct atomic<_Tp*>
321  {
322  typedef _Tp* __pointer_type;
323  typedef __atomic_base<_Tp*> __base_type;
324  __base_type _M_b;
325 
326  atomic() noexcept = default;
327  ~atomic() noexcept = default;
328  atomic(const atomic&) = delete;
329  atomic& operator=(const atomic&) = delete;
330  atomic& operator=(const atomic&) volatile = delete;
331 
332  constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { }
333 
334  operator __pointer_type() const noexcept
335  { return __pointer_type(_M_b); }
336 
337  operator __pointer_type() const volatile noexcept
338  { return __pointer_type(_M_b); }
339 
340  __pointer_type
341  operator=(__pointer_type __p) noexcept
342  { return _M_b.operator=(__p); }
343 
344  __pointer_type
345  operator=(__pointer_type __p) volatile noexcept
346  { return _M_b.operator=(__p); }
347 
348  __pointer_type
349  operator++(int) noexcept
350  { return _M_b++; }
351 
352  __pointer_type
353  operator++(int) volatile noexcept
354  { return _M_b++; }
355 
356  __pointer_type
357  operator--(int) noexcept
358  { return _M_b--; }
359 
360  __pointer_type
361  operator--(int) volatile noexcept
362  { return _M_b--; }
363 
364  __pointer_type
365  operator++() noexcept
366  { return ++_M_b; }
367 
368  __pointer_type
369  operator++() volatile noexcept
370  { return ++_M_b; }
371 
372  __pointer_type
373  operator--() noexcept
374  { return --_M_b; }
375 
376  __pointer_type
377  operator--() volatile noexcept
378  { return --_M_b; }
379 
380  __pointer_type
381  operator+=(ptrdiff_t __d) noexcept
382  { return _M_b.operator+=(__d); }
383 
384  __pointer_type
385  operator+=(ptrdiff_t __d) volatile noexcept
386  { return _M_b.operator+=(__d); }
387 
388  __pointer_type
389  operator-=(ptrdiff_t __d) noexcept
390  { return _M_b.operator-=(__d); }
391 
392  __pointer_type
393  operator-=(ptrdiff_t __d) volatile noexcept
394  { return _M_b.operator-=(__d); }
395 
396  bool
397  is_lock_free() const noexcept
398  { return _M_b.is_lock_free(); }
399 
400  bool
401  is_lock_free() const volatile noexcept
402  { return _M_b.is_lock_free(); }
403 
404  void
405  store(__pointer_type __p,
406  memory_order __m = memory_order_seq_cst) noexcept
407  { return _M_b.store(__p, __m); }
408 
409  void
410  store(__pointer_type __p,
411  memory_order __m = memory_order_seq_cst) volatile noexcept
412  { return _M_b.store(__p, __m); }
413 
414  __pointer_type
415  load(memory_order __m = memory_order_seq_cst) const noexcept
416  { return _M_b.load(__m); }
417 
418  __pointer_type
419  load(memory_order __m = memory_order_seq_cst) const volatile noexcept
420  { return _M_b.load(__m); }
421 
422  __pointer_type
423  exchange(__pointer_type __p,
424  memory_order __m = memory_order_seq_cst) noexcept
425  { return _M_b.exchange(__p, __m); }
426 
427  __pointer_type
428  exchange(__pointer_type __p,
429  memory_order __m = memory_order_seq_cst) volatile noexcept
430  { return _M_b.exchange(__p, __m); }
431 
432  bool
433  compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
434  memory_order __m1, memory_order __m2) noexcept
435  { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
436 
437  bool
438  compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
439  memory_order __m1,
440  memory_order __m2) volatile noexcept
441  { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
442 
443  bool
444  compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
445  memory_order __m = memory_order_seq_cst) noexcept
446  {
447  return compare_exchange_weak(__p1, __p2, __m,
448  __cmpexch_failure_order(__m));
449  }
450 
451  bool
452  compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
453  memory_order __m = memory_order_seq_cst) volatile noexcept
454  {
455  return compare_exchange_weak(__p1, __p2, __m,
456  __cmpexch_failure_order(__m));
457  }
458 
459  bool
460  compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
461  memory_order __m1, memory_order __m2) noexcept
462  { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
463 
464  bool
465  compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
466  memory_order __m1,
467  memory_order __m2) volatile noexcept
468  { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
469 
470  bool
471  compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
472  memory_order __m = memory_order_seq_cst) noexcept
473  {
474  return _M_b.compare_exchange_strong(__p1, __p2, __m,
475  __cmpexch_failure_order(__m));
476  }
477 
478  bool
479  compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
480  memory_order __m = memory_order_seq_cst) volatile noexcept
481  {
482  return _M_b.compare_exchange_strong(__p1, __p2, __m,
483  __cmpexch_failure_order(__m));
484  }
485 
486  __pointer_type
487  fetch_add(ptrdiff_t __d,
488  memory_order __m = memory_order_seq_cst) noexcept
489  { return _M_b.fetch_add(__d, __m); }
490 
491  __pointer_type
492  fetch_add(ptrdiff_t __d,
493  memory_order __m = memory_order_seq_cst) volatile noexcept
494  { return _M_b.fetch_add(__d, __m); }
495 
496  __pointer_type
497  fetch_sub(ptrdiff_t __d,
498  memory_order __m = memory_order_seq_cst) noexcept
499  { return _M_b.fetch_sub(__d, __m); }
500 
501  __pointer_type
502  fetch_sub(ptrdiff_t __d,
503  memory_order __m = memory_order_seq_cst) volatile noexcept
504  { return _M_b.fetch_sub(__d, __m); }
505  };
506 
507 
508  /// Explicit specialization for char.
509  template<>
510  struct atomic<char> : __atomic_base<char>
511  {
512  typedef char __integral_type;
513  typedef __atomic_base<char> __base_type;
514 
515  atomic() noexcept = default;
516  ~atomic() noexcept = default;
517  atomic(const atomic&) = delete;
518  atomic& operator=(const atomic&) = delete;
519  atomic& operator=(const atomic&) volatile = delete;
520 
521  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
522 
523  using __base_type::operator __integral_type;
524  using __base_type::operator=;
525  };
526 
527  /// Explicit specialization for signed char.
528  template<>
529  struct atomic<signed char> : __atomic_base<signed char>
530  {
531  typedef signed char __integral_type;
532  typedef __atomic_base<signed char> __base_type;
533 
534  atomic() noexcept= default;
535  ~atomic() noexcept = default;
536  atomic(const atomic&) = delete;
537  atomic& operator=(const atomic&) = delete;
538  atomic& operator=(const atomic&) volatile = delete;
539 
540  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
541 
542  using __base_type::operator __integral_type;
543  using __base_type::operator=;
544  };
545 
546  /// Explicit specialization for unsigned char.
547  template<>
548  struct atomic<unsigned char> : __atomic_base<unsigned char>
549  {
550  typedef unsigned char __integral_type;
551  typedef __atomic_base<unsigned char> __base_type;
552 
553  atomic() noexcept= default;
554  ~atomic() noexcept = default;
555  atomic(const atomic&) = delete;
556  atomic& operator=(const atomic&) = delete;
557  atomic& operator=(const atomic&) volatile = delete;
558 
559  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
560 
561  using __base_type::operator __integral_type;
562  using __base_type::operator=;
563  };
564 
565  /// Explicit specialization for short.
566  template<>
567  struct atomic<short> : __atomic_base<short>
568  {
569  typedef short __integral_type;
570  typedef __atomic_base<short> __base_type;
571 
572  atomic() noexcept = default;
573  ~atomic() noexcept = default;
574  atomic(const atomic&) = delete;
575  atomic& operator=(const atomic&) = delete;
576  atomic& operator=(const atomic&) volatile = delete;
577 
578  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
579 
580  using __base_type::operator __integral_type;
581  using __base_type::operator=;
582  };
583 
584  /// Explicit specialization for unsigned short.
585  template<>
586  struct atomic<unsigned short> : __atomic_base<unsigned short>
587  {
588  typedef unsigned short __integral_type;
589  typedef __atomic_base<unsigned short> __base_type;
590 
591  atomic() noexcept = default;
592  ~atomic() noexcept = default;
593  atomic(const atomic&) = delete;
594  atomic& operator=(const atomic&) = delete;
595  atomic& operator=(const atomic&) volatile = delete;
596 
597  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
598 
599  using __base_type::operator __integral_type;
600  using __base_type::operator=;
601  };
602 
603  /// Explicit specialization for int.
604  template<>
605  struct atomic<int> : __atomic_base<int>
606  {
607  typedef int __integral_type;
608  typedef __atomic_base<int> __base_type;
609 
610  atomic() noexcept = default;
611  ~atomic() noexcept = default;
612  atomic(const atomic&) = delete;
613  atomic& operator=(const atomic&) = delete;
614  atomic& operator=(const atomic&) volatile = delete;
615 
616  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
617 
618  using __base_type::operator __integral_type;
619  using __base_type::operator=;
620  };
621 
622  /// Explicit specialization for unsigned int.
623  template<>
624  struct atomic<unsigned int> : __atomic_base<unsigned int>
625  {
626  typedef unsigned int __integral_type;
627  typedef __atomic_base<unsigned int> __base_type;
628 
629  atomic() noexcept = default;
630  ~atomic() noexcept = default;
631  atomic(const atomic&) = delete;
632  atomic& operator=(const atomic&) = delete;
633  atomic& operator=(const atomic&) volatile = delete;
634 
635  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
636 
637  using __base_type::operator __integral_type;
638  using __base_type::operator=;
639  };
640 
641  /// Explicit specialization for long.
642  template<>
643  struct atomic<long> : __atomic_base<long>
644  {
645  typedef long __integral_type;
646  typedef __atomic_base<long> __base_type;
647 
648  atomic() noexcept = default;
649  ~atomic() noexcept = default;
650  atomic(const atomic&) = delete;
651  atomic& operator=(const atomic&) = delete;
652  atomic& operator=(const atomic&) volatile = delete;
653 
654  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
655 
656  using __base_type::operator __integral_type;
657  using __base_type::operator=;
658  };
659 
660  /// Explicit specialization for unsigned long.
661  template<>
662  struct atomic<unsigned long> : __atomic_base<unsigned long>
663  {
664  typedef unsigned long __integral_type;
665  typedef __atomic_base<unsigned long> __base_type;
666 
667  atomic() noexcept = default;
668  ~atomic() noexcept = default;
669  atomic(const atomic&) = delete;
670  atomic& operator=(const atomic&) = delete;
671  atomic& operator=(const atomic&) volatile = delete;
672 
673  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
674 
675  using __base_type::operator __integral_type;
676  using __base_type::operator=;
677  };
678 
679  /// Explicit specialization for long long.
680  template<>
681  struct atomic<long long> : __atomic_base<long long>
682  {
683  typedef long long __integral_type;
684  typedef __atomic_base<long long> __base_type;
685 
686  atomic() noexcept = default;
687  ~atomic() noexcept = default;
688  atomic(const atomic&) = delete;
689  atomic& operator=(const atomic&) = delete;
690  atomic& operator=(const atomic&) volatile = delete;
691 
692  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
693 
694  using __base_type::operator __integral_type;
695  using __base_type::operator=;
696  };
697 
698  /// Explicit specialization for unsigned long long.
699  template<>
700  struct atomic<unsigned long long> : __atomic_base<unsigned long long>
701  {
702  typedef unsigned long long __integral_type;
703  typedef __atomic_base<unsigned long long> __base_type;
704 
705  atomic() noexcept = default;
706  ~atomic() noexcept = default;
707  atomic(const atomic&) = delete;
708  atomic& operator=(const atomic&) = delete;
709  atomic& operator=(const atomic&) volatile = delete;
710 
711  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
712 
713  using __base_type::operator __integral_type;
714  using __base_type::operator=;
715  };
716 
717  /// Explicit specialization for wchar_t.
718  template<>
719  struct atomic<wchar_t> : __atomic_base<wchar_t>
720  {
721  typedef wchar_t __integral_type;
722  typedef __atomic_base<wchar_t> __base_type;
723 
724  atomic() noexcept = default;
725  ~atomic() noexcept = default;
726  atomic(const atomic&) = delete;
727  atomic& operator=(const atomic&) = delete;
728  atomic& operator=(const atomic&) volatile = delete;
729 
730  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
731 
732  using __base_type::operator __integral_type;
733  using __base_type::operator=;
734  };
735 
736  /// Explicit specialization for char16_t.
737  template<>
738  struct atomic<char16_t> : __atomic_base<char16_t>
739  {
740  typedef char16_t __integral_type;
741  typedef __atomic_base<char16_t> __base_type;
742 
743  atomic() noexcept = default;
744  ~atomic() noexcept = default;
745  atomic(const atomic&) = delete;
746  atomic& operator=(const atomic&) = delete;
747  atomic& operator=(const atomic&) volatile = delete;
748 
749  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
750 
751  using __base_type::operator __integral_type;
752  using __base_type::operator=;
753  };
754 
755  /// Explicit specialization for char32_t.
756  template<>
757  struct atomic<char32_t> : __atomic_base<char32_t>
758  {
759  typedef char32_t __integral_type;
760  typedef __atomic_base<char32_t> __base_type;
761 
762  atomic() noexcept = default;
763  ~atomic() noexcept = default;
764  atomic(const atomic&) = delete;
765  atomic& operator=(const atomic&) = delete;
766  atomic& operator=(const atomic&) volatile = delete;
767 
768  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
769 
770  using __base_type::operator __integral_type;
771  using __base_type::operator=;
772  };
773 
774 
775  /// atomic_bool
776  typedef atomic<bool> atomic_bool;
777 
778  /// atomic_char
779  typedef atomic<char> atomic_char;
780 
781  /// atomic_schar
782  typedef atomic<signed char> atomic_schar;
783 
784  /// atomic_uchar
785  typedef atomic<unsigned char> atomic_uchar;
786 
787  /// atomic_short
788  typedef atomic<short> atomic_short;
789 
790  /// atomic_ushort
791  typedef atomic<unsigned short> atomic_ushort;
792 
793  /// atomic_int
794  typedef atomic<int> atomic_int;
795 
796  /// atomic_uint
797  typedef atomic<unsigned int> atomic_uint;
798 
799  /// atomic_long
800  typedef atomic<long> atomic_long;
801 
802  /// atomic_ulong
803  typedef atomic<unsigned long> atomic_ulong;
804 
805  /// atomic_llong
806  typedef atomic<long long> atomic_llong;
807 
808  /// atomic_ullong
809  typedef atomic<unsigned long long> atomic_ullong;
810 
811  /// atomic_wchar_t
812  typedef atomic<wchar_t> atomic_wchar_t;
813 
814  /// atomic_char16_t
815  typedef atomic<char16_t> atomic_char16_t;
816 
817  /// atomic_char32_t
818  typedef atomic<char32_t> atomic_char32_t;
819 
820 
821  /// atomic_int_least8_t
822  typedef atomic<int_least8_t> atomic_int_least8_t;
823 
824  /// atomic_uint_least8_t
825  typedef atomic<uint_least8_t> atomic_uint_least8_t;
826 
827  /// atomic_int_least16_t
828  typedef atomic<int_least16_t> atomic_int_least16_t;
829 
830  /// atomic_uint_least16_t
831  typedef atomic<uint_least16_t> atomic_uint_least16_t;
832 
833  /// atomic_int_least32_t
834  typedef atomic<int_least32_t> atomic_int_least32_t;
835 
836  /// atomic_uint_least32_t
837  typedef atomic<uint_least32_t> atomic_uint_least32_t;
838 
839  /// atomic_int_least64_t
840  typedef atomic<int_least64_t> atomic_int_least64_t;
841 
842  /// atomic_uint_least64_t
843  typedef atomic<uint_least64_t> atomic_uint_least64_t;
844 
845 
846  /// atomic_int_fast8_t
847  typedef atomic<int_fast8_t> atomic_int_fast8_t;
848 
849  /// atomic_uint_fast8_t
850  typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
851 
852  /// atomic_int_fast16_t
853  typedef atomic<int_fast16_t> atomic_int_fast16_t;
854 
855  /// atomic_uint_fast16_t
856  typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
857 
858  /// atomic_int_fast32_t
859  typedef atomic<int_fast32_t> atomic_int_fast32_t;
860 
861  /// atomic_uint_fast32_t
862  typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
863 
864  /// atomic_int_fast64_t
865  typedef atomic<int_fast64_t> atomic_int_fast64_t;
866 
867  /// atomic_uint_fast64_t
868  typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
869 
870 
871  /// atomic_intptr_t
872  typedef atomic<intptr_t> atomic_intptr_t;
873 
874  /// atomic_uintptr_t
875  typedef atomic<uintptr_t> atomic_uintptr_t;
876 
877  /// atomic_size_t
878  typedef atomic<size_t> atomic_size_t;
879 
880  /// atomic_intmax_t
881  typedef atomic<intmax_t> atomic_intmax_t;
882 
883  /// atomic_uintmax_t
884  typedef atomic<uintmax_t> atomic_uintmax_t;
885 
886  /// atomic_ptrdiff_t
887  typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
888 
889 
890  // Function definitions, atomic_flag operations.
891  inline bool
892  atomic_flag_test_and_set_explicit(atomic_flag* __a,
893  memory_order __m) noexcept
894  { return __a->test_and_set(__m); }
895 
896  inline bool
897  atomic_flag_test_and_set_explicit(volatile atomic_flag* __a,
898  memory_order __m) noexcept
899  { return __a->test_and_set(__m); }
900 
901  inline void
902  atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept
903  { __a->clear(__m); }
904 
905  inline void
906  atomic_flag_clear_explicit(volatile atomic_flag* __a,
907  memory_order __m) noexcept
908  { __a->clear(__m); }
909 
910  inline bool
911  atomic_flag_test_and_set(atomic_flag* __a) noexcept
912  { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
913 
914  inline bool
915  atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept
916  { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
917 
918  inline void
919  atomic_flag_clear(atomic_flag* __a) noexcept
920  { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
921 
922  inline void
923  atomic_flag_clear(volatile atomic_flag* __a) noexcept
924  { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
925 
926 
927  // Function templates generally applicable to atomic types.
928  template<typename _ITp>
929  inline bool
930  atomic_is_lock_free(const atomic<_ITp>* __a) noexcept
931  { return __a->is_lock_free(); }
932 
933  template<typename _ITp>
934  inline bool
935  atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept
936  { return __a->is_lock_free(); }
937 
938  template<typename _ITp>
939  inline void
940  atomic_init(atomic<_ITp>* __a, _ITp __i) noexcept
941  { __a->store(__i, memory_order_relaxed); }
942 
943  template<typename _ITp>
944  inline void
945  atomic_init(volatile atomic<_ITp>* __a, _ITp __i) noexcept
946  { __a->store(__i, memory_order_relaxed); }
947 
948  template<typename _ITp>
949  inline void
950  atomic_store_explicit(atomic<_ITp>* __a, _ITp __i,
951  memory_order __m) noexcept
952  { __a->store(__i, __m); }
953 
954  template<typename _ITp>
955  inline void
956  atomic_store_explicit(volatile atomic<_ITp>* __a, _ITp __i,
957  memory_order __m) noexcept
958  { __a->store(__i, __m); }
959 
960  template<typename _ITp>
961  inline _ITp
962  atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept
963  { return __a->load(__m); }
964 
965  template<typename _ITp>
966  inline _ITp
967  atomic_load_explicit(const volatile atomic<_ITp>* __a,
968  memory_order __m) noexcept
969  { return __a->load(__m); }
970 
971  template<typename _ITp>
972  inline _ITp
973  atomic_exchange_explicit(atomic<_ITp>* __a, _ITp __i,
974  memory_order __m) noexcept
975  { return __a->exchange(__i, __m); }
976 
977  template<typename _ITp>
978  inline _ITp
979  atomic_exchange_explicit(volatile atomic<_ITp>* __a, _ITp __i,
980  memory_order __m) noexcept
981  { return __a->exchange(__i, __m); }
982 
983  template<typename _ITp>
984  inline bool
985  atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
986  _ITp* __i1, _ITp __i2,
987  memory_order __m1,
988  memory_order __m2) noexcept
989  { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
990 
991  template<typename _ITp>
992  inline bool
993  atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a,
994  _ITp* __i1, _ITp __i2,
995  memory_order __m1,
996  memory_order __m2) noexcept
997  { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
998 
999  template<typename _ITp>
1000  inline bool
1001  atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
1002  _ITp* __i1, _ITp __i2,
1003  memory_order __m1,
1004  memory_order __m2) noexcept
1005  { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1006 
1007  template<typename _ITp>
1008  inline bool
1009  atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a,
1010  _ITp* __i1, _ITp __i2,
1011  memory_order __m1,
1012  memory_order __m2) noexcept
1013  { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1014 
1015 
1016  template<typename _ITp>
1017  inline void
1018  atomic_store(atomic<_ITp>* __a, _ITp __i) noexcept
1019  { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1020 
1021  template<typename _ITp>
1022  inline void
1023  atomic_store(volatile atomic<_ITp>* __a, _ITp __i) noexcept
1024  { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1025 
1026  template<typename _ITp>
1027  inline _ITp
1028  atomic_load(const atomic<_ITp>* __a) noexcept
1029  { return atomic_load_explicit(__a, memory_order_seq_cst); }
1030 
1031  template<typename _ITp>
1032  inline _ITp
1033  atomic_load(const volatile atomic<_ITp>* __a) noexcept
1034  { return atomic_load_explicit(__a, memory_order_seq_cst); }
1035 
1036  template<typename _ITp>
1037  inline _ITp
1038  atomic_exchange(atomic<_ITp>* __a, _ITp __i) noexcept
1039  { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1040 
1041  template<typename _ITp>
1042  inline _ITp
1043  atomic_exchange(volatile atomic<_ITp>* __a, _ITp __i) noexcept
1044  { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1045 
1046  template<typename _ITp>
1047  inline bool
1048  atomic_compare_exchange_weak(atomic<_ITp>* __a,
1049  _ITp* __i1, _ITp __i2) noexcept
1050  {
1051  return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1052  memory_order_seq_cst,
1053  memory_order_seq_cst);
1054  }
1055 
1056  template<typename _ITp>
1057  inline bool
1058  atomic_compare_exchange_weak(volatile atomic<_ITp>* __a,
1059  _ITp* __i1, _ITp __i2) noexcept
1060  {
1061  return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1062  memory_order_seq_cst,
1063  memory_order_seq_cst);
1064  }
1065 
1066  template<typename _ITp>
1067  inline bool
1068  atomic_compare_exchange_strong(atomic<_ITp>* __a,
1069  _ITp* __i1, _ITp __i2) noexcept
1070  {
1071  return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1072  memory_order_seq_cst,
1073  memory_order_seq_cst);
1074  }
1075 
1076  template<typename _ITp>
1077  inline bool
1078  atomic_compare_exchange_strong(volatile atomic<_ITp>* __a,
1079  _ITp* __i1, _ITp __i2) noexcept
1080  {
1081  return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1082  memory_order_seq_cst,
1083  memory_order_seq_cst);
1084  }
1085 
1086  // Function templates for atomic_integral operations only, using
1087  // __atomic_base. Template argument should be constricted to
1088  // intergral types as specified in the standard, excluding address
1089  // types.
1090  template<typename _ITp>
1091  inline _ITp
1092  atomic_fetch_add_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1093  memory_order __m) noexcept
1094  { return __a->fetch_add(__i, __m); }
1095 
1096  template<typename _ITp>
1097  inline _ITp
1098  atomic_fetch_add_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1099  memory_order __m) noexcept
1100  { return __a->fetch_add(__i, __m); }
1101 
1102  template<typename _ITp>
1103  inline _ITp
1104  atomic_fetch_sub_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1105  memory_order __m) noexcept
1106  { return __a->fetch_sub(__i, __m); }
1107 
1108  template<typename _ITp>
1109  inline _ITp
1110  atomic_fetch_sub_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1111  memory_order __m) noexcept
1112  { return __a->fetch_sub(__i, __m); }
1113 
1114  template<typename _ITp>
1115  inline _ITp
1116  atomic_fetch_and_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1117  memory_order __m) noexcept
1118  { return __a->fetch_and(__i, __m); }
1119 
1120  template<typename _ITp>
1121  inline _ITp
1122  atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1123  memory_order __m) noexcept
1124  { return __a->fetch_and(__i, __m); }
1125 
1126  template<typename _ITp>
1127  inline _ITp
1128  atomic_fetch_or_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1129  memory_order __m) noexcept
1130  { return __a->fetch_or(__i, __m); }
1131 
1132  template<typename _ITp>
1133  inline _ITp
1134  atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1135  memory_order __m) noexcept
1136  { return __a->fetch_or(__i, __m); }
1137 
1138  template<typename _ITp>
1139  inline _ITp
1140  atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1141  memory_order __m) noexcept
1142  { return __a->fetch_xor(__i, __m); }
1143 
1144  template<typename _ITp>
1145  inline _ITp
1146  atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1147  memory_order __m) noexcept
1148  { return __a->fetch_xor(__i, __m); }
1149 
1150  template<typename _ITp>
1151  inline _ITp
1152  atomic_fetch_add(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1153  { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1154 
1155  template<typename _ITp>
1156  inline _ITp
1157  atomic_fetch_add(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1158  { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1159 
1160  template<typename _ITp>
1161  inline _ITp
1162  atomic_fetch_sub(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1163  { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1164 
1165  template<typename _ITp>
1166  inline _ITp
1167  atomic_fetch_sub(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1168  { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1169 
1170  template<typename _ITp>
1171  inline _ITp
1172  atomic_fetch_and(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1173  { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1174 
1175  template<typename _ITp>
1176  inline _ITp
1177  atomic_fetch_and(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1178  { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1179 
1180  template<typename _ITp>
1181  inline _ITp
1182  atomic_fetch_or(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1183  { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1184 
1185  template<typename _ITp>
1186  inline _ITp
1187  atomic_fetch_or(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1188  { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1189 
1190  template<typename _ITp>
1191  inline _ITp
1192  atomic_fetch_xor(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1193  { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1194 
1195  template<typename _ITp>
1196  inline _ITp
1197  atomic_fetch_xor(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1198  { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1199 
1200 
1201  // Partial specializations for pointers.
1202  template<typename _ITp>
1203  inline _ITp*
1204  atomic_fetch_add_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
1205  memory_order __m) noexcept
1206  { return __a->fetch_add(__d, __m); }
1207 
1208  template<typename _ITp>
1209  inline _ITp*
1210  atomic_fetch_add_explicit(volatile atomic<_ITp*>* __a, ptrdiff_t __d,
1211  memory_order __m) noexcept
1212  { return __a->fetch_add(__d, __m); }
1213 
1214  template<typename _ITp>
1215  inline _ITp*
1216  atomic_fetch_add(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1217  { return __a->fetch_add(__d); }
1218 
1219  template<typename _ITp>
1220  inline _ITp*
1221  atomic_fetch_add(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1222  { return __a->fetch_add(__d); }
1223 
1224  template<typename _ITp>
1225  inline _ITp*
1226  atomic_fetch_sub_explicit(volatile atomic<_ITp*>* __a,
1227  ptrdiff_t __d, memory_order __m) noexcept
1228  { return __a->fetch_sub(__d, __m); }
1229 
1230  template<typename _ITp>
1231  inline _ITp*
1232  atomic_fetch_sub_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
1233  memory_order __m) noexcept
1234  { return __a->fetch_sub(__d, __m); }
1235 
1236  template<typename _ITp>
1237  inline _ITp*
1238  atomic_fetch_sub(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1239  { return __a->fetch_sub(__d); }
1240 
1241  template<typename _ITp>
1242  inline _ITp*
1243  atomic_fetch_sub(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1244  { return __a->fetch_sub(__d); }
1245  // @} group atomics
1246 
1247 _GLIBCXX_END_NAMESPACE_VERSION
1248 } // namespace
1249 
1250 #endif // C++11
1251 
1252 #endif // _GLIBCXX_ATOMIC