30 #ifndef _GLIBCXX_ATOMIC_BASE_H
31 #define _GLIBCXX_ATOMIC_BASE_H 1
33 #pragma GCC system_header
40 namespace std _GLIBCXX_VISIBILITY(default)
42 _GLIBCXX_BEGIN_NAMESPACE_VERSION
62 enum __memory_order_modifier
64 __memory_order_mask = 0x0ffff,
65 __memory_order_modifier_mask = 0xffff0000,
66 __memory_order_hle_acquire = 0x10000,
67 __memory_order_hle_release = 0x20000
86 return __m == memory_order_acq_rel ? memory_order_acquire
87 : __m == memory_order_release ? memory_order_relaxed : __m;
93 return memory_order(__cmpexch_failure_order2(__m & __memory_order_mask)
94 | (__m & __memory_order_modifier_mask));
99 { __atomic_thread_fence(__m); }
103 { __atomic_signal_fence(__m); }
106 template<
typename _Tp>
116 template<
typename _IntTp>
234 #define ATOMIC_VAR_INIT(_VI) { _VI }
236 template<
typename _Tp>
239 template<
typename _Tp>
253 _GLIBCXX_BEGIN_EXTERN_C
258 #if __GCC_ATOMIC_TEST_AND_SET_TRUEVAL == 1
265 _GLIBCXX_END_EXTERN_C
267 #define ATOMIC_FLAG_INIT { 0 }
284 test_and_set(
memory_order __m = memory_order_seq_cst) noexcept
286 return __atomic_test_and_set (&_M_i, __m);
290 test_and_set(
memory_order __m = memory_order_seq_cst)
volatile noexcept
292 return __atomic_test_and_set (&_M_i, __m);
299 __glibcxx_assert(__b != memory_order_consume);
300 __glibcxx_assert(__b != memory_order_acquire);
301 __glibcxx_assert(__b != memory_order_acq_rel);
303 __atomic_clear (&_M_i, __m);
307 clear(
memory_order __m = memory_order_seq_cst)
volatile noexcept
310 __glibcxx_assert(__b != memory_order_consume);
311 __glibcxx_assert(__b != memory_order_acquire);
312 __glibcxx_assert(__b != memory_order_acq_rel);
314 __atomic_clear (&_M_i, __m);
342 template<
typename _ITp>
346 typedef _ITp __int_type;
358 constexpr
__atomic_base(__int_type __i) noexcept : _M_i (__i) { }
360 operator __int_type() const noexcept
363 operator __int_type() const volatile noexcept
367 operator=(__int_type __i) noexcept
374 operator=(__int_type __i)
volatile noexcept
381 operator++(
int) noexcept
382 {
return fetch_add(1); }
385 operator++(
int) volatile noexcept
386 {
return fetch_add(1); }
389 operator--(
int) noexcept
390 {
return fetch_sub(1); }
393 operator--(
int) volatile noexcept
394 {
return fetch_sub(1); }
397 operator++() noexcept
398 {
return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
401 operator++() volatile noexcept
402 {
return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
405 operator--() noexcept
406 {
return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
409 operator--() volatile noexcept
410 {
return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
413 operator+=(__int_type __i) noexcept
414 {
return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
417 operator+=(__int_type __i)
volatile noexcept
418 {
return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
421 operator-=(__int_type __i) noexcept
422 {
return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
425 operator-=(__int_type __i)
volatile noexcept
426 {
return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
429 operator&=(__int_type __i) noexcept
430 {
return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
433 operator&=(__int_type __i)
volatile noexcept
434 {
return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
437 operator|=(__int_type __i) noexcept
438 {
return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
441 operator|=(__int_type __i)
volatile noexcept
442 {
return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
445 operator^=(__int_type __i) noexcept
446 {
return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
449 operator^=(__int_type __i)
volatile noexcept
450 {
return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
453 is_lock_free() const noexcept
454 {
return __atomic_is_lock_free(
sizeof(_M_i),
nullptr); }
457 is_lock_free() const volatile noexcept
458 {
return __atomic_is_lock_free(
sizeof(_M_i),
nullptr); }
461 store(__int_type __i,
memory_order __m = memory_order_seq_cst) noexcept
464 __glibcxx_assert(__b != memory_order_acquire);
465 __glibcxx_assert(__b != memory_order_acq_rel);
466 __glibcxx_assert(__b != memory_order_consume);
468 __atomic_store_n(&_M_i, __i, __m);
472 store(__int_type __i,
473 memory_order __m = memory_order_seq_cst) volatile noexcept
476 __glibcxx_assert(__b != memory_order_acquire);
477 __glibcxx_assert(__b != memory_order_acq_rel);
478 __glibcxx_assert(__b != memory_order_consume);
480 __atomic_store_n(&_M_i, __i, __m);
484 load(
memory_order __m = memory_order_seq_cst) const noexcept
487 __glibcxx_assert(__b != memory_order_release);
488 __glibcxx_assert(__b != memory_order_acq_rel);
490 return __atomic_load_n(&_M_i, __m);
494 load(
memory_order __m = memory_order_seq_cst) const volatile noexcept
497 __glibcxx_assert(__b != memory_order_release);
498 __glibcxx_assert(__b != memory_order_acq_rel);
500 return __atomic_load_n(&_M_i, __m);
504 exchange(__int_type __i,
507 return __atomic_exchange_n(&_M_i, __i, __m);
512 exchange(__int_type __i,
513 memory_order __m = memory_order_seq_cst) volatile noexcept
515 return __atomic_exchange_n(&_M_i, __i, __m);
519 compare_exchange_weak(__int_type& __i1, __int_type __i2,
524 __glibcxx_assert(__b2 != memory_order_release);
525 __glibcxx_assert(__b2 != memory_order_acq_rel);
526 __glibcxx_assert(__b2 <= __b1);
528 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
532 compare_exchange_weak(__int_type& __i1, __int_type __i2,
538 __glibcxx_assert(__b2 != memory_order_release);
539 __glibcxx_assert(__b2 != memory_order_acq_rel);
540 __glibcxx_assert(__b2 <= __b1);
542 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
546 compare_exchange_weak(__int_type& __i1, __int_type __i2,
549 return compare_exchange_weak(__i1, __i2, __m,
550 __cmpexch_failure_order(__m));
554 compare_exchange_weak(__int_type& __i1, __int_type __i2,
555 memory_order __m = memory_order_seq_cst) volatile noexcept
557 return compare_exchange_weak(__i1, __i2, __m,
558 __cmpexch_failure_order(__m));
562 compare_exchange_strong(__int_type& __i1, __int_type __i2,
567 __glibcxx_assert(__b2 != memory_order_release);
568 __glibcxx_assert(__b2 != memory_order_acq_rel);
569 __glibcxx_assert(__b2 <= __b1);
571 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
575 compare_exchange_strong(__int_type& __i1, __int_type __i2,
582 __glibcxx_assert(__b2 != memory_order_release);
583 __glibcxx_assert(__b2 != memory_order_acq_rel);
584 __glibcxx_assert(__b2 <= __b1);
586 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
590 compare_exchange_strong(__int_type& __i1, __int_type __i2,
593 return compare_exchange_strong(__i1, __i2, __m,
594 __cmpexch_failure_order(__m));
598 compare_exchange_strong(__int_type& __i1, __int_type __i2,
599 memory_order __m = memory_order_seq_cst) volatile noexcept
601 return compare_exchange_strong(__i1, __i2, __m,
602 __cmpexch_failure_order(__m));
606 fetch_add(__int_type __i,
608 {
return __atomic_fetch_add(&_M_i, __i, __m); }
611 fetch_add(__int_type __i,
612 memory_order __m = memory_order_seq_cst) volatile noexcept
613 {
return __atomic_fetch_add(&_M_i, __i, __m); }
616 fetch_sub(__int_type __i,
618 {
return __atomic_fetch_sub(&_M_i, __i, __m); }
621 fetch_sub(__int_type __i,
622 memory_order __m = memory_order_seq_cst) volatile noexcept
623 {
return __atomic_fetch_sub(&_M_i, __i, __m); }
626 fetch_and(__int_type __i,
628 {
return __atomic_fetch_and(&_M_i, __i, __m); }
631 fetch_and(__int_type __i,
632 memory_order __m = memory_order_seq_cst) volatile noexcept
633 {
return __atomic_fetch_and(&_M_i, __i, __m); }
636 fetch_or(__int_type __i,
638 {
return __atomic_fetch_or(&_M_i, __i, __m); }
641 fetch_or(__int_type __i,
642 memory_order __m = memory_order_seq_cst) volatile noexcept
643 {
return __atomic_fetch_or(&_M_i, __i, __m); }
646 fetch_xor(__int_type __i,
648 {
return __atomic_fetch_xor(&_M_i, __i, __m); }
651 fetch_xor(__int_type __i,
652 memory_order __m = memory_order_seq_cst) volatile noexcept
653 {
return __atomic_fetch_xor(&_M_i, __i, __m); }
658 template<
typename _PTp>
662 typedef _PTp* __pointer_type;
668 _M_type_size(ptrdiff_t __d) {
return __d *
sizeof(_PTp); }
671 _M_type_size(ptrdiff_t __d)
volatile {
return __d *
sizeof(_PTp); }
681 constexpr
__atomic_base(__pointer_type __p) noexcept : _M_p (__p) { }
683 operator __pointer_type()
const noexcept
686 operator __pointer_type()
const volatile noexcept
690 operator=(__pointer_type __p) noexcept
697 operator=(__pointer_type __p)
volatile noexcept
704 operator++(
int) noexcept
705 {
return fetch_add(1); }
708 operator++(
int)
volatile noexcept
709 {
return fetch_add(1); }
712 operator--(
int) noexcept
713 {
return fetch_sub(1); }
716 operator--(
int)
volatile noexcept
717 {
return fetch_sub(1); }
720 operator++() noexcept
721 {
return __atomic_add_fetch(&_M_p, _M_type_size(1),
722 memory_order_seq_cst); }
725 operator++()
volatile noexcept
726 {
return __atomic_add_fetch(&_M_p, _M_type_size(1),
727 memory_order_seq_cst); }
730 operator--() noexcept
731 {
return __atomic_sub_fetch(&_M_p, _M_type_size(1),
732 memory_order_seq_cst); }
735 operator--()
volatile noexcept
736 {
return __atomic_sub_fetch(&_M_p, _M_type_size(1),
737 memory_order_seq_cst); }
740 operator+=(ptrdiff_t __d) noexcept
741 {
return __atomic_add_fetch(&_M_p, _M_type_size(__d),
742 memory_order_seq_cst); }
745 operator+=(ptrdiff_t __d)
volatile noexcept
746 {
return __atomic_add_fetch(&_M_p, _M_type_size(__d),
747 memory_order_seq_cst); }
750 operator-=(ptrdiff_t __d) noexcept
751 {
return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
752 memory_order_seq_cst); }
755 operator-=(ptrdiff_t __d)
volatile noexcept
756 {
return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
757 memory_order_seq_cst); }
760 is_lock_free()
const noexcept
761 {
return __atomic_is_lock_free(_M_type_size(1),
nullptr); }
764 is_lock_free()
const volatile noexcept
765 {
return __atomic_is_lock_free(_M_type_size(1),
nullptr); }
768 store(__pointer_type __p,
773 __glibcxx_assert(__b != memory_order_acquire);
774 __glibcxx_assert(__b != memory_order_acq_rel);
775 __glibcxx_assert(__b != memory_order_consume);
777 __atomic_store_n(&_M_p, __p, __m);
781 store(__pointer_type __p,
782 memory_order __m = memory_order_seq_cst)
volatile noexcept
785 __glibcxx_assert(__b != memory_order_acquire);
786 __glibcxx_assert(__b != memory_order_acq_rel);
787 __glibcxx_assert(__b != memory_order_consume);
789 __atomic_store_n(&_M_p, __p, __m);
793 load(
memory_order __m = memory_order_seq_cst)
const noexcept
796 __glibcxx_assert(__b != memory_order_release);
797 __glibcxx_assert(__b != memory_order_acq_rel);
799 return __atomic_load_n(&_M_p, __m);
803 load(
memory_order __m = memory_order_seq_cst)
const volatile noexcept
806 __glibcxx_assert(__b != memory_order_release);
807 __glibcxx_assert(__b != memory_order_acq_rel);
809 return __atomic_load_n(&_M_p, __m);
813 exchange(__pointer_type __p,
816 return __atomic_exchange_n(&_M_p, __p, __m);
821 exchange(__pointer_type __p,
822 memory_order __m = memory_order_seq_cst)
volatile noexcept
824 return __atomic_exchange_n(&_M_p, __p, __m);
828 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
834 __glibcxx_assert(__b2 != memory_order_release);
835 __glibcxx_assert(__b2 != memory_order_acq_rel);
836 __glibcxx_assert(__b2 <= __b1);
838 return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
842 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
849 __glibcxx_assert(__b2 != memory_order_release);
850 __glibcxx_assert(__b2 != memory_order_acq_rel);
851 __glibcxx_assert(__b2 <= __b1);
853 return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
857 fetch_add(ptrdiff_t __d,
859 {
return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
862 fetch_add(ptrdiff_t __d,
863 memory_order __m = memory_order_seq_cst)
volatile noexcept
864 {
return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
867 fetch_sub(ptrdiff_t __d,
869 {
return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
872 fetch_sub(ptrdiff_t __d,
873 memory_order __m = memory_order_seq_cst)
volatile noexcept
874 {
return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
879 _GLIBCXX_END_NAMESPACE_VERSION