30 #ifndef _GLIBCXX_ATOMIC_BASE_H 31 #define _GLIBCXX_ATOMIC_BASE_H 1 33 #pragma GCC system_header 39 #ifndef _GLIBCXX_ALWAYS_INLINE 40 #define _GLIBCXX_ALWAYS_INLINE inline __attribute__((__always_inline__)) 43 namespace std _GLIBCXX_VISIBILITY(default)
45 _GLIBCXX_BEGIN_NAMESPACE_VERSION
65 enum __memory_order_modifier
67 __memory_order_mask = 0x0ffff,
68 __memory_order_modifier_mask = 0xffff0000,
69 __memory_order_hle_acquire = 0x10000,
70 __memory_order_hle_release = 0x20000
73 constexpr memory_order
74 operator|(memory_order __m, __memory_order_modifier __mod)
79 constexpr memory_order
80 operator&(memory_order __m, __memory_order_modifier __mod)
86 constexpr memory_order
87 __cmpexch_failure_order2(memory_order __m) noexcept
89 return __m == memory_order_acq_rel ? memory_order_acquire
90 : __m == memory_order_release ? memory_order_relaxed : __m;
93 constexpr memory_order
94 __cmpexch_failure_order(memory_order __m) noexcept
96 return memory_order(__cmpexch_failure_order2(__m & __memory_order_mask)
97 | (__m & __memory_order_modifier_mask));
100 _GLIBCXX_ALWAYS_INLINE
void 101 atomic_thread_fence(memory_order __m) noexcept
102 { __atomic_thread_fence(__m); }
104 _GLIBCXX_ALWAYS_INLINE
void 105 atomic_signal_fence(memory_order __m) noexcept
106 { __atomic_signal_fence(__m); }
109 template<
typename _Tp>
119 template<
typename _IntTp>
123 #define ATOMIC_VAR_INIT(_VI) { _VI } 125 template<
typename _Tp>
128 template<
typename _Tp>
132 #if __GCC_ATOMIC_TEST_AND_SET_TRUEVAL == 1 133 typedef bool __atomic_flag_data_type;
135 typedef unsigned char __atomic_flag_data_type;
148 _GLIBCXX_BEGIN_EXTERN_C
152 __atomic_flag_data_type _M_i;
155 _GLIBCXX_END_EXTERN_C
157 #define ATOMIC_FLAG_INIT { 0 } 173 _GLIBCXX_ALWAYS_INLINE
bool 174 test_and_set(memory_order __m = memory_order_seq_cst) noexcept
176 return __atomic_test_and_set (&_M_i, __m);
179 _GLIBCXX_ALWAYS_INLINE
bool 180 test_and_set(memory_order __m = memory_order_seq_cst)
volatile noexcept
182 return __atomic_test_and_set (&_M_i, __m);
185 _GLIBCXX_ALWAYS_INLINE
void 186 clear(memory_order __m = memory_order_seq_cst) noexcept
188 memory_order __b = __m & __memory_order_mask;
189 __glibcxx_assert(__b != memory_order_consume);
190 __glibcxx_assert(__b != memory_order_acquire);
191 __glibcxx_assert(__b != memory_order_acq_rel);
193 __atomic_clear (&_M_i, __m);
196 _GLIBCXX_ALWAYS_INLINE
void 197 clear(memory_order __m = memory_order_seq_cst)
volatile noexcept
199 memory_order __b = __m & __memory_order_mask;
200 __glibcxx_assert(__b != memory_order_consume);
201 __glibcxx_assert(__b != memory_order_acquire);
202 __glibcxx_assert(__b != memory_order_acq_rel);
204 __atomic_clear (&_M_i, __m);
208 static constexpr __atomic_flag_data_type
210 {
return __i ? __GCC_ATOMIC_TEST_AND_SET_TRUEVAL : 0; }
237 template<
typename _ITp>
241 typedef _ITp __int_type;
243 static constexpr
int _S_alignment =
244 sizeof(_ITp) >
alignof(_ITp) ?
sizeof(_ITp) :
alignof(_ITp);
246 alignas(_S_alignment) __int_type _M_i;
256 constexpr
__atomic_base(__int_type __i) noexcept : _M_i (__i) { }
258 operator __int_type()
const noexcept
261 operator __int_type()
const volatile noexcept
265 operator=(__int_type __i) noexcept
272 operator=(__int_type __i)
volatile noexcept
279 operator++(
int) noexcept
280 {
return fetch_add(1); }
283 operator++(
int)
volatile noexcept
284 {
return fetch_add(1); }
287 operator--(
int) noexcept
288 {
return fetch_sub(1); }
291 operator--(
int)
volatile noexcept
292 {
return fetch_sub(1); }
295 operator++() noexcept
296 {
return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
299 operator++()
volatile noexcept
300 {
return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
303 operator--() noexcept
304 {
return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
307 operator--()
volatile noexcept
308 {
return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
311 operator+=(__int_type __i) noexcept
312 {
return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
315 operator+=(__int_type __i)
volatile noexcept
316 {
return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
319 operator-=(__int_type __i) noexcept
320 {
return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
323 operator-=(__int_type __i)
volatile noexcept
324 {
return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
327 operator&=(__int_type __i) noexcept
328 {
return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
331 operator&=(__int_type __i)
volatile noexcept
332 {
return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
335 operator|=(__int_type __i) noexcept
336 {
return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
339 operator|=(__int_type __i)
volatile noexcept
340 {
return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
343 operator^=(__int_type __i) noexcept
344 {
return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
347 operator^=(__int_type __i)
volatile noexcept
348 {
return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
351 is_lock_free()
const noexcept
354 return __atomic_is_lock_free(
sizeof(_M_i),
355 reinterpret_cast<void *>(-__alignof(_M_i)));
359 is_lock_free()
const volatile noexcept
362 return __atomic_is_lock_free(
sizeof(_M_i),
363 reinterpret_cast<void *>(-__alignof(_M_i)));
366 _GLIBCXX_ALWAYS_INLINE
void 367 store(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept
369 memory_order __b = __m & __memory_order_mask;
370 __glibcxx_assert(__b != memory_order_acquire);
371 __glibcxx_assert(__b != memory_order_acq_rel);
372 __glibcxx_assert(__b != memory_order_consume);
374 __atomic_store_n(&_M_i, __i, __m);
377 _GLIBCXX_ALWAYS_INLINE
void 378 store(__int_type __i,
379 memory_order __m = memory_order_seq_cst)
volatile noexcept
381 memory_order __b = __m & __memory_order_mask;
382 __glibcxx_assert(__b != memory_order_acquire);
383 __glibcxx_assert(__b != memory_order_acq_rel);
384 __glibcxx_assert(__b != memory_order_consume);
386 __atomic_store_n(&_M_i, __i, __m);
389 _GLIBCXX_ALWAYS_INLINE __int_type
390 load(memory_order __m = memory_order_seq_cst)
const noexcept
392 memory_order __b = __m & __memory_order_mask;
393 __glibcxx_assert(__b != memory_order_release);
394 __glibcxx_assert(__b != memory_order_acq_rel);
396 return __atomic_load_n(&_M_i, __m);
399 _GLIBCXX_ALWAYS_INLINE __int_type
400 load(memory_order __m = memory_order_seq_cst)
const volatile noexcept
402 memory_order __b = __m & __memory_order_mask;
403 __glibcxx_assert(__b != memory_order_release);
404 __glibcxx_assert(__b != memory_order_acq_rel);
406 return __atomic_load_n(&_M_i, __m);
409 _GLIBCXX_ALWAYS_INLINE __int_type
411 memory_order __m = memory_order_seq_cst) noexcept
413 return __atomic_exchange_n(&_M_i, __i, __m);
417 _GLIBCXX_ALWAYS_INLINE __int_type
419 memory_order __m = memory_order_seq_cst)
volatile noexcept
421 return __atomic_exchange_n(&_M_i, __i, __m);
424 _GLIBCXX_ALWAYS_INLINE
bool 425 compare_exchange_weak(__int_type& __i1, __int_type __i2,
426 memory_order __m1, memory_order __m2) noexcept
428 memory_order __b2 = __m2 & __memory_order_mask;
429 memory_order __b1 = __m1 & __memory_order_mask;
430 __glibcxx_assert(__b2 != memory_order_release);
431 __glibcxx_assert(__b2 != memory_order_acq_rel);
432 __glibcxx_assert(__b2 <= __b1);
434 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
437 _GLIBCXX_ALWAYS_INLINE
bool 438 compare_exchange_weak(__int_type& __i1, __int_type __i2,
440 memory_order __m2)
volatile noexcept
442 memory_order __b2 = __m2 & __memory_order_mask;
443 memory_order __b1 = __m1 & __memory_order_mask;
444 __glibcxx_assert(__b2 != memory_order_release);
445 __glibcxx_assert(__b2 != memory_order_acq_rel);
446 __glibcxx_assert(__b2 <= __b1);
448 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
451 _GLIBCXX_ALWAYS_INLINE
bool 452 compare_exchange_weak(__int_type& __i1, __int_type __i2,
453 memory_order __m = memory_order_seq_cst) noexcept
455 return compare_exchange_weak(__i1, __i2, __m,
456 __cmpexch_failure_order(__m));
459 _GLIBCXX_ALWAYS_INLINE
bool 460 compare_exchange_weak(__int_type& __i1, __int_type __i2,
461 memory_order __m = memory_order_seq_cst)
volatile noexcept
463 return compare_exchange_weak(__i1, __i2, __m,
464 __cmpexch_failure_order(__m));
467 _GLIBCXX_ALWAYS_INLINE
bool 468 compare_exchange_strong(__int_type& __i1, __int_type __i2,
469 memory_order __m1, memory_order __m2) noexcept
471 memory_order __b2 = __m2 & __memory_order_mask;
472 memory_order __b1 = __m1 & __memory_order_mask;
473 __glibcxx_assert(__b2 != memory_order_release);
474 __glibcxx_assert(__b2 != memory_order_acq_rel);
475 __glibcxx_assert(__b2 <= __b1);
477 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
480 _GLIBCXX_ALWAYS_INLINE
bool 481 compare_exchange_strong(__int_type& __i1, __int_type __i2,
483 memory_order __m2)
volatile noexcept
485 memory_order __b2 = __m2 & __memory_order_mask;
486 memory_order __b1 = __m1 & __memory_order_mask;
488 __glibcxx_assert(__b2 != memory_order_release);
489 __glibcxx_assert(__b2 != memory_order_acq_rel);
490 __glibcxx_assert(__b2 <= __b1);
492 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
495 _GLIBCXX_ALWAYS_INLINE
bool 496 compare_exchange_strong(__int_type& __i1, __int_type __i2,
497 memory_order __m = memory_order_seq_cst) noexcept
499 return compare_exchange_strong(__i1, __i2, __m,
500 __cmpexch_failure_order(__m));
503 _GLIBCXX_ALWAYS_INLINE
bool 504 compare_exchange_strong(__int_type& __i1, __int_type __i2,
505 memory_order __m = memory_order_seq_cst)
volatile noexcept
507 return compare_exchange_strong(__i1, __i2, __m,
508 __cmpexch_failure_order(__m));
511 _GLIBCXX_ALWAYS_INLINE __int_type
512 fetch_add(__int_type __i,
513 memory_order __m = memory_order_seq_cst) noexcept
514 {
return __atomic_fetch_add(&_M_i, __i, __m); }
516 _GLIBCXX_ALWAYS_INLINE __int_type
517 fetch_add(__int_type __i,
518 memory_order __m = memory_order_seq_cst)
volatile noexcept
519 {
return __atomic_fetch_add(&_M_i, __i, __m); }
521 _GLIBCXX_ALWAYS_INLINE __int_type
522 fetch_sub(__int_type __i,
523 memory_order __m = memory_order_seq_cst) noexcept
524 {
return __atomic_fetch_sub(&_M_i, __i, __m); }
526 _GLIBCXX_ALWAYS_INLINE __int_type
527 fetch_sub(__int_type __i,
528 memory_order __m = memory_order_seq_cst)
volatile noexcept
529 {
return __atomic_fetch_sub(&_M_i, __i, __m); }
531 _GLIBCXX_ALWAYS_INLINE __int_type
532 fetch_and(__int_type __i,
533 memory_order __m = memory_order_seq_cst) noexcept
534 {
return __atomic_fetch_and(&_M_i, __i, __m); }
536 _GLIBCXX_ALWAYS_INLINE __int_type
537 fetch_and(__int_type __i,
538 memory_order __m = memory_order_seq_cst)
volatile noexcept
539 {
return __atomic_fetch_and(&_M_i, __i, __m); }
541 _GLIBCXX_ALWAYS_INLINE __int_type
542 fetch_or(__int_type __i,
543 memory_order __m = memory_order_seq_cst) noexcept
544 {
return __atomic_fetch_or(&_M_i, __i, __m); }
546 _GLIBCXX_ALWAYS_INLINE __int_type
547 fetch_or(__int_type __i,
548 memory_order __m = memory_order_seq_cst)
volatile noexcept
549 {
return __atomic_fetch_or(&_M_i, __i, __m); }
551 _GLIBCXX_ALWAYS_INLINE __int_type
552 fetch_xor(__int_type __i,
553 memory_order __m = memory_order_seq_cst) noexcept
554 {
return __atomic_fetch_xor(&_M_i, __i, __m); }
556 _GLIBCXX_ALWAYS_INLINE __int_type
557 fetch_xor(__int_type __i,
558 memory_order __m = memory_order_seq_cst)
volatile noexcept
559 {
return __atomic_fetch_xor(&_M_i, __i, __m); }
564 template<
typename _PTp>
568 typedef _PTp* __pointer_type;
574 _M_type_size(ptrdiff_t __d)
const {
return __d *
sizeof(_PTp); }
577 _M_type_size(ptrdiff_t __d)
const volatile {
return __d *
sizeof(_PTp); }
587 constexpr
__atomic_base(__pointer_type __p) noexcept : _M_p (__p) { }
589 operator __pointer_type()
const noexcept
592 operator __pointer_type()
const volatile noexcept
596 operator=(__pointer_type __p) noexcept
603 operator=(__pointer_type __p)
volatile noexcept
610 operator++(
int) noexcept
611 {
return fetch_add(1); }
614 operator++(
int)
volatile noexcept
615 {
return fetch_add(1); }
618 operator--(
int) noexcept
619 {
return fetch_sub(1); }
622 operator--(
int)
volatile noexcept
623 {
return fetch_sub(1); }
626 operator++() noexcept
627 {
return __atomic_add_fetch(&_M_p, _M_type_size(1),
628 memory_order_seq_cst); }
631 operator++()
volatile noexcept
632 {
return __atomic_add_fetch(&_M_p, _M_type_size(1),
633 memory_order_seq_cst); }
636 operator--() noexcept
637 {
return __atomic_sub_fetch(&_M_p, _M_type_size(1),
638 memory_order_seq_cst); }
641 operator--()
volatile noexcept
642 {
return __atomic_sub_fetch(&_M_p, _M_type_size(1),
643 memory_order_seq_cst); }
646 operator+=(ptrdiff_t __d) noexcept
647 {
return __atomic_add_fetch(&_M_p, _M_type_size(__d),
648 memory_order_seq_cst); }
651 operator+=(ptrdiff_t __d)
volatile noexcept
652 {
return __atomic_add_fetch(&_M_p, _M_type_size(__d),
653 memory_order_seq_cst); }
656 operator-=(ptrdiff_t __d) noexcept
657 {
return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
658 memory_order_seq_cst); }
661 operator-=(ptrdiff_t __d)
volatile noexcept
662 {
return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
663 memory_order_seq_cst); }
666 is_lock_free()
const noexcept
669 return __atomic_is_lock_free(
sizeof(_M_p),
670 reinterpret_cast<void *>(-__alignof(_M_p)));
674 is_lock_free()
const volatile noexcept
677 return __atomic_is_lock_free(
sizeof(_M_p),
678 reinterpret_cast<void *>(-__alignof(_M_p)));
681 _GLIBCXX_ALWAYS_INLINE
void 682 store(__pointer_type __p,
683 memory_order __m = memory_order_seq_cst) noexcept
685 memory_order __b = __m & __memory_order_mask;
687 __glibcxx_assert(__b != memory_order_acquire);
688 __glibcxx_assert(__b != memory_order_acq_rel);
689 __glibcxx_assert(__b != memory_order_consume);
691 __atomic_store_n(&_M_p, __p, __m);
694 _GLIBCXX_ALWAYS_INLINE
void 695 store(__pointer_type __p,
696 memory_order __m = memory_order_seq_cst)
volatile noexcept
698 memory_order __b = __m & __memory_order_mask;
699 __glibcxx_assert(__b != memory_order_acquire);
700 __glibcxx_assert(__b != memory_order_acq_rel);
701 __glibcxx_assert(__b != memory_order_consume);
703 __atomic_store_n(&_M_p, __p, __m);
706 _GLIBCXX_ALWAYS_INLINE __pointer_type
707 load(memory_order __m = memory_order_seq_cst)
const noexcept
709 memory_order __b = __m & __memory_order_mask;
710 __glibcxx_assert(__b != memory_order_release);
711 __glibcxx_assert(__b != memory_order_acq_rel);
713 return __atomic_load_n(&_M_p, __m);
716 _GLIBCXX_ALWAYS_INLINE __pointer_type
717 load(memory_order __m = memory_order_seq_cst)
const volatile noexcept
719 memory_order __b = __m & __memory_order_mask;
720 __glibcxx_assert(__b != memory_order_release);
721 __glibcxx_assert(__b != memory_order_acq_rel);
723 return __atomic_load_n(&_M_p, __m);
726 _GLIBCXX_ALWAYS_INLINE __pointer_type
728 memory_order __m = memory_order_seq_cst) noexcept
730 return __atomic_exchange_n(&_M_p, __p, __m);
734 _GLIBCXX_ALWAYS_INLINE __pointer_type
736 memory_order __m = memory_order_seq_cst)
volatile noexcept
738 return __atomic_exchange_n(&_M_p, __p, __m);
741 _GLIBCXX_ALWAYS_INLINE
bool 742 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
744 memory_order __m2) noexcept
746 memory_order __b2 = __m2 & __memory_order_mask;
747 memory_order __b1 = __m1 & __memory_order_mask;
748 __glibcxx_assert(__b2 != memory_order_release);
749 __glibcxx_assert(__b2 != memory_order_acq_rel);
750 __glibcxx_assert(__b2 <= __b1);
752 return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
755 _GLIBCXX_ALWAYS_INLINE
bool 756 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
758 memory_order __m2)
volatile noexcept
760 memory_order __b2 = __m2 & __memory_order_mask;
761 memory_order __b1 = __m1 & __memory_order_mask;
763 __glibcxx_assert(__b2 != memory_order_release);
764 __glibcxx_assert(__b2 != memory_order_acq_rel);
765 __glibcxx_assert(__b2 <= __b1);
767 return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
770 _GLIBCXX_ALWAYS_INLINE __pointer_type
771 fetch_add(ptrdiff_t __d,
772 memory_order __m = memory_order_seq_cst) noexcept
773 {
return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
775 _GLIBCXX_ALWAYS_INLINE __pointer_type
776 fetch_add(ptrdiff_t __d,
777 memory_order __m = memory_order_seq_cst)
volatile noexcept
778 {
return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
780 _GLIBCXX_ALWAYS_INLINE __pointer_type
781 fetch_sub(ptrdiff_t __d,
782 memory_order __m = memory_order_seq_cst) noexcept
783 {
return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
785 _GLIBCXX_ALWAYS_INLINE __pointer_type
786 fetch_sub(ptrdiff_t __d,
787 memory_order __m = memory_order_seq_cst)
volatile noexcept
788 {
return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
793 _GLIBCXX_END_NAMESPACE_VERSION
Generic atomic type, primary class template.
bitset< _Nb > operator|(const bitset< _Nb > &__x, const bitset< _Nb > &__y) noexcept
Global bitwise operations on bitsets.
Base class for atomic integrals.
bitset< _Nb > operator&(const bitset< _Nb > &__x, const bitset< _Nb > &__y) noexcept
Global bitwise operations on bitsets.
_Tp exchange(_Tp &__obj, _Up &&__new_val)
Assign __new_val to __obj and return its previous value.
memory_order
Enumeration for memory_order.
_Tp kill_dependency(_Tp __y) noexcept
kill_dependency
ISO C++ entities toplevel namespace is std.
Base type for atomic_flag.