32 #ifndef _GLIBCXX_ATOMIC
33 #define _GLIBCXX_ATOMIC 1
35 #pragma GCC system_header
37 #if __cplusplus < 201103L
44 namespace std _GLIBCXX_VISIBILITY(default)
46 _GLIBCXX_BEGIN_NAMESPACE_VERSION
53 #if __cplusplus > 201402L
54 # define __cpp_lib_atomic_is_always_lock_free 201603
57 template<
typename _Tp>
69 atomic() noexcept =
default;
70 ~
atomic() noexcept =
default;
75 constexpr
atomic(
bool __i) noexcept : _M_base(__i) { }
78 operator=(
bool __i) noexcept
79 {
return _M_base.operator=(__i); }
82 operator=(
bool __i)
volatile noexcept
83 {
return _M_base.operator=(__i); }
85 operator bool()
const noexcept
86 {
return _M_base.load(); }
88 operator bool()
const volatile noexcept
89 {
return _M_base.load(); }
92 is_lock_free()
const noexcept {
return _M_base.is_lock_free(); }
95 is_lock_free()
const volatile noexcept {
return _M_base.is_lock_free(); }
97 #if __cplusplus > 201402L
102 store(
bool __i,
memory_order __m = memory_order_seq_cst) noexcept
103 { _M_base.store(__i, __m); }
106 store(
bool __i,
memory_order __m = memory_order_seq_cst)
volatile noexcept
107 { _M_base.store(__i, __m); }
110 load(
memory_order __m = memory_order_seq_cst)
const noexcept
111 {
return _M_base.load(__m); }
114 load(
memory_order __m = memory_order_seq_cst)
const volatile noexcept
115 {
return _M_base.load(__m); }
118 exchange(
bool __i,
memory_order __m = memory_order_seq_cst) noexcept
119 {
return _M_base.exchange(__i, __m); }
123 memory_order __m = memory_order_seq_cst)
volatile noexcept
124 {
return _M_base.exchange(__i, __m); }
127 compare_exchange_weak(
bool& __i1,
bool __i2,
memory_order __m1,
129 {
return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
132 compare_exchange_weak(
bool& __i1,
bool __i2,
memory_order __m1,
134 {
return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
137 compare_exchange_weak(
bool& __i1,
bool __i2,
139 {
return _M_base.compare_exchange_weak(__i1, __i2, __m); }
142 compare_exchange_weak(
bool& __i1,
bool __i2,
143 memory_order __m = memory_order_seq_cst)
volatile noexcept
144 {
return _M_base.compare_exchange_weak(__i1, __i2, __m); }
147 compare_exchange_strong(
bool& __i1,
bool __i2,
memory_order __m1,
149 {
return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
152 compare_exchange_strong(
bool& __i1,
bool __i2,
memory_order __m1,
154 {
return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
157 compare_exchange_strong(
bool& __i1,
bool __i2,
159 {
return _M_base.compare_exchange_strong(__i1, __i2, __m); }
162 compare_exchange_strong(
bool& __i1,
bool __i2,
163 memory_order __m = memory_order_seq_cst)
volatile noexcept
164 {
return _M_base.compare_exchange_strong(__i1, __i2, __m); }
173 template<
typename _Tp>
178 static constexpr
int _S_min_alignment
179 = (
sizeof(_Tp) & (
sizeof(_Tp) - 1)) ||
sizeof(_Tp) > 16
182 static constexpr
int _S_alignment
183 = _S_min_alignment >
alignof(_Tp) ? _S_min_alignment :
alignof(_Tp);
185 alignas(_S_alignment) _Tp _M_i;
187 static_assert(__is_trivially_copyable(_Tp),
188 "std::atomic requires a trivially copyable type");
190 static_assert(
sizeof(_Tp) > 0,
191 "Incomplete or zero-sized types are not supported");
194 atomic() noexcept = default;
195 ~
atomic() noexcept = default;
200 constexpr
atomic(_Tp __i) noexcept : _M_i(__i) { }
202 operator _Tp() const noexcept
205 operator _Tp() const volatile noexcept
209 operator=(_Tp __i) noexcept
210 { store(__i);
return __i; }
213 operator=(_Tp __i)
volatile noexcept
214 { store(__i);
return __i; }
217 is_lock_free() const noexcept
220 return __atomic_is_lock_free(
sizeof(_M_i),
221 reinterpret_cast<void *>(-__alignof(_M_i)));
225 is_lock_free() const volatile noexcept
228 return __atomic_is_lock_free(
sizeof(_M_i),
229 reinterpret_cast<void *>(-__alignof(_M_i)));
232 #if __cplusplus > 201402L
233 static constexpr
bool is_always_lock_free
234 = __atomic_always_lock_free(
sizeof(_M_i), 0);
238 store(_Tp __i,
memory_order __m = memory_order_seq_cst) noexcept
242 store(_Tp __i,
memory_order __m = memory_order_seq_cst) volatile noexcept
246 load(
memory_order __m = memory_order_seq_cst) const noexcept
248 alignas(_Tp)
unsigned char __buf[
sizeof(_Tp)];
249 _Tp* __ptr =
reinterpret_cast<_Tp*
>(__buf);
255 load(
memory_order __m = memory_order_seq_cst) const volatile noexcept
257 alignas(_Tp)
unsigned char __buf[
sizeof(_Tp)];
258 _Tp* __ptr =
reinterpret_cast<_Tp*
>(__buf);
264 exchange(_Tp __i,
memory_order __m = memory_order_seq_cst) noexcept
266 alignas(_Tp)
unsigned char __buf[
sizeof(_Tp)];
267 _Tp* __ptr =
reinterpret_cast<_Tp*
>(__buf);
275 memory_order __m = memory_order_seq_cst) volatile noexcept
277 alignas(_Tp)
unsigned char __buf[
sizeof(_Tp)];
278 _Tp* __ptr =
reinterpret_cast<_Tp*
>(__buf);
285 compare_exchange_weak(_Tp& __e, _Tp __i,
memory_order __s,
295 compare_exchange_weak(_Tp& __e, _Tp __i,
memory_order __s,
305 compare_exchange_weak(_Tp& __e, _Tp __i,
307 {
return compare_exchange_weak(__e, __i, __m,
308 __cmpexch_failure_order(__m)); }
311 compare_exchange_weak(_Tp& __e, _Tp __i,
312 memory_order __m = memory_order_seq_cst) volatile noexcept
313 {
return compare_exchange_weak(__e, __i, __m,
314 __cmpexch_failure_order(__m)); }
317 compare_exchange_strong(_Tp& __e, _Tp __i,
memory_order __s,
327 compare_exchange_strong(_Tp& __e, _Tp __i,
memory_order __s,
337 compare_exchange_strong(_Tp& __e, _Tp __i,
339 {
return compare_exchange_strong(__e, __i, __m,
340 __cmpexch_failure_order(__m)); }
343 compare_exchange_strong(_Tp& __e, _Tp __i,
344 memory_order __m = memory_order_seq_cst) volatile noexcept
345 {
return compare_exchange_strong(__e, __i, __m,
346 __cmpexch_failure_order(__m)); }
351 template<
typename _Tp>
354 typedef _Tp* __pointer_type;
358 atomic() noexcept =
default;
359 ~
atomic() noexcept =
default;
364 constexpr
atomic(__pointer_type __p) noexcept : _M_b(__p) { }
366 operator __pointer_type()
const noexcept
367 {
return __pointer_type(_M_b); }
369 operator __pointer_type()
const volatile noexcept
370 {
return __pointer_type(_M_b); }
373 operator=(__pointer_type __p) noexcept
374 {
return _M_b.operator=(__p); }
377 operator=(__pointer_type __p)
volatile noexcept
378 {
return _M_b.operator=(__p); }
381 operator++(
int) noexcept
385 operator++(
int)
volatile noexcept
389 operator--(
int) noexcept
393 operator--(
int)
volatile noexcept
397 operator++() noexcept
401 operator++()
volatile noexcept
405 operator--() noexcept
409 operator--()
volatile noexcept
413 operator+=(ptrdiff_t __d) noexcept
414 {
return _M_b.operator+=(__d); }
417 operator+=(ptrdiff_t __d)
volatile noexcept
418 {
return _M_b.operator+=(__d); }
421 operator-=(ptrdiff_t __d) noexcept
422 {
return _M_b.operator-=(__d); }
425 operator-=(ptrdiff_t __d)
volatile noexcept
426 {
return _M_b.operator-=(__d); }
429 is_lock_free()
const noexcept
430 {
return _M_b.is_lock_free(); }
433 is_lock_free()
const volatile noexcept
434 {
return _M_b.is_lock_free(); }
436 #if __cplusplus > 201402L
437 static constexpr
bool is_always_lock_free = ATOMIC_POINTER_LOCK_FREE == 2;
441 store(__pointer_type __p,
443 {
return _M_b.store(__p, __m); }
446 store(__pointer_type __p,
447 memory_order __m = memory_order_seq_cst)
volatile noexcept
448 {
return _M_b.store(__p, __m); }
451 load(
memory_order __m = memory_order_seq_cst)
const noexcept
452 {
return _M_b.load(__m); }
455 load(
memory_order __m = memory_order_seq_cst)
const volatile noexcept
456 {
return _M_b.load(__m); }
461 {
return _M_b.exchange(__p, __m); }
465 memory_order __m = memory_order_seq_cst)
volatile noexcept
466 {
return _M_b.exchange(__p, __m); }
469 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
471 {
return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
474 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
477 {
return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
480 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
483 return compare_exchange_weak(__p1, __p2, __m,
484 __cmpexch_failure_order(__m));
488 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
489 memory_order __m = memory_order_seq_cst)
volatile noexcept
491 return compare_exchange_weak(__p1, __p2, __m,
492 __cmpexch_failure_order(__m));
496 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
498 {
return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
501 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
504 {
return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
507 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
510 return _M_b.compare_exchange_strong(__p1, __p2, __m,
511 __cmpexch_failure_order(__m));
515 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
516 memory_order __m = memory_order_seq_cst)
volatile noexcept
518 return _M_b.compare_exchange_strong(__p1, __p2, __m,
519 __cmpexch_failure_order(__m));
523 fetch_add(ptrdiff_t __d,
525 {
return _M_b.fetch_add(__d, __m); }
528 fetch_add(ptrdiff_t __d,
529 memory_order __m = memory_order_seq_cst)
volatile noexcept
530 {
return _M_b.fetch_add(__d, __m); }
533 fetch_sub(ptrdiff_t __d,
535 {
return _M_b.fetch_sub(__d, __m); }
538 fetch_sub(ptrdiff_t __d,
539 memory_order __m = memory_order_seq_cst)
volatile noexcept
540 {
return _M_b.fetch_sub(__d, __m); }
548 typedef char __integral_type;
551 atomic() noexcept =
default;
552 ~
atomic() noexcept =
default;
559 using __base_type::operator __integral_type;
560 using __base_type::operator=;
562 #if __cplusplus > 201402L
563 static constexpr
bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
571 typedef signed char __integral_type;
574 atomic() noexcept=
default;
575 ~
atomic() noexcept =
default;
582 using __base_type::operator __integral_type;
583 using __base_type::operator=;
585 #if __cplusplus > 201402L
586 static constexpr
bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
594 typedef unsigned char __integral_type;
597 atomic() noexcept=
default;
598 ~
atomic() noexcept =
default;
605 using __base_type::operator __integral_type;
606 using __base_type::operator=;
608 #if __cplusplus > 201402L
609 static constexpr
bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
617 typedef short __integral_type;
620 atomic() noexcept =
default;
621 ~
atomic() noexcept =
default;
628 using __base_type::operator __integral_type;
629 using __base_type::operator=;
631 #if __cplusplus > 201402L
632 static constexpr
bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
640 typedef unsigned short __integral_type;
643 atomic() noexcept =
default;
644 ~
atomic() noexcept =
default;
651 using __base_type::operator __integral_type;
652 using __base_type::operator=;
654 #if __cplusplus > 201402L
655 static constexpr
bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
663 typedef int __integral_type;
666 atomic() noexcept =
default;
667 ~
atomic() noexcept =
default;
674 using __base_type::operator __integral_type;
675 using __base_type::operator=;
677 #if __cplusplus > 201402L
678 static constexpr
bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
686 typedef unsigned int __integral_type;
689 atomic() noexcept =
default;
690 ~
atomic() noexcept =
default;
697 using __base_type::operator __integral_type;
698 using __base_type::operator=;
700 #if __cplusplus > 201402L
701 static constexpr
bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
709 typedef long __integral_type;
712 atomic() noexcept =
default;
713 ~
atomic() noexcept =
default;
720 using __base_type::operator __integral_type;
721 using __base_type::operator=;
723 #if __cplusplus > 201402L
724 static constexpr
bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
732 typedef unsigned long __integral_type;
735 atomic() noexcept =
default;
736 ~
atomic() noexcept =
default;
743 using __base_type::operator __integral_type;
744 using __base_type::operator=;
746 #if __cplusplus > 201402L
747 static constexpr
bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
755 typedef long long __integral_type;
758 atomic() noexcept =
default;
759 ~
atomic() noexcept =
default;
766 using __base_type::operator __integral_type;
767 using __base_type::operator=;
769 #if __cplusplus > 201402L
770 static constexpr
bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
778 typedef unsigned long long __integral_type;
781 atomic() noexcept =
default;
782 ~
atomic() noexcept =
default;
789 using __base_type::operator __integral_type;
790 using __base_type::operator=;
792 #if __cplusplus > 201402L
793 static constexpr
bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
801 typedef wchar_t __integral_type;
804 atomic() noexcept =
default;
805 ~
atomic() noexcept =
default;
812 using __base_type::operator __integral_type;
813 using __base_type::operator=;
815 #if __cplusplus > 201402L
816 static constexpr
bool is_always_lock_free = ATOMIC_WCHAR_T_LOCK_FREE == 2;
824 typedef char16_t __integral_type;
827 atomic() noexcept =
default;
828 ~
atomic() noexcept =
default;
835 using __base_type::operator __integral_type;
836 using __base_type::operator=;
838 #if __cplusplus > 201402L
839 static constexpr
bool is_always_lock_free = ATOMIC_CHAR16_T_LOCK_FREE == 2;
847 typedef char32_t __integral_type;
850 atomic() noexcept =
default;
851 ~
atomic() noexcept =
default;
858 using __base_type::operator __integral_type;
859 using __base_type::operator=;
861 #if __cplusplus > 201402L
862 static constexpr
bool is_always_lock_free = ATOMIC_CHAR32_T_LOCK_FREE == 2;
1012 atomic_flag_test_and_set_explicit(
atomic_flag* __a,
1014 {
return __a->test_and_set(__m); }
1017 atomic_flag_test_and_set_explicit(
volatile atomic_flag* __a,
1019 {
return __a->test_and_set(__m); }
1022 atomic_flag_clear_explicit(atomic_flag* __a,
memory_order __m) noexcept
1023 { __a->clear(__m); }
1026 atomic_flag_clear_explicit(
volatile atomic_flag* __a,
1028 { __a->clear(__m); }
1031 atomic_flag_test_and_set(atomic_flag* __a) noexcept
1032 {
return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1035 atomic_flag_test_and_set(
volatile atomic_flag* __a) noexcept
1036 {
return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1039 atomic_flag_clear(atomic_flag* __a) noexcept
1040 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1043 atomic_flag_clear(
volatile atomic_flag* __a) noexcept
1044 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1048 template<
typename _ITp>
1050 atomic_is_lock_free(
const atomic<_ITp>* __a) noexcept
1051 {
return __a->is_lock_free(); }
1053 template<
typename _ITp>
1055 atomic_is_lock_free(
const volatile atomic<_ITp>* __a) noexcept
1056 {
return __a->is_lock_free(); }
1058 template<
typename _ITp>
1060 atomic_init(atomic<_ITp>* __a, _ITp __i) noexcept
1061 { __a->store(__i, memory_order_relaxed); }
1063 template<
typename _ITp>
1065 atomic_init(
volatile atomic<_ITp>* __a, _ITp __i) noexcept
1066 { __a->store(__i, memory_order_relaxed); }
1068 template<
typename _ITp>
1070 atomic_store_explicit(atomic<_ITp>* __a, _ITp __i,
1072 { __a->store(__i, __m); }
1074 template<
typename _ITp>
1076 atomic_store_explicit(
volatile atomic<_ITp>* __a, _ITp __i,
1078 { __a->store(__i, __m); }
1080 template<
typename _ITp>
1082 atomic_load_explicit(
const atomic<_ITp>* __a,
memory_order __m) noexcept
1083 {
return __a->load(__m); }
1085 template<
typename _ITp>
1087 atomic_load_explicit(
const volatile atomic<_ITp>* __a,
1089 {
return __a->load(__m); }
1091 template<
typename _ITp>
1093 atomic_exchange_explicit(atomic<_ITp>* __a, _ITp __i,
1095 {
return __a->exchange(__i, __m); }
1097 template<
typename _ITp>
1099 atomic_exchange_explicit(
volatile atomic<_ITp>* __a, _ITp __i,
1101 {
return __a->exchange(__i, __m); }
1103 template<
typename _ITp>
1105 atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
1106 _ITp* __i1, _ITp __i2,
1109 {
return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1111 template<
typename _ITp>
1113 atomic_compare_exchange_weak_explicit(
volatile atomic<_ITp>* __a,
1114 _ITp* __i1, _ITp __i2,
1117 {
return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1119 template<
typename _ITp>
1121 atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
1122 _ITp* __i1, _ITp __i2,
1125 {
return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1127 template<
typename _ITp>
1129 atomic_compare_exchange_strong_explicit(
volatile atomic<_ITp>* __a,
1130 _ITp* __i1, _ITp __i2,
1133 {
return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1136 template<
typename _ITp>
1138 atomic_store(atomic<_ITp>* __a, _ITp __i) noexcept
1139 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1141 template<
typename _ITp>
1143 atomic_store(
volatile atomic<_ITp>* __a, _ITp __i) noexcept
1144 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1146 template<
typename _ITp>
1148 atomic_load(
const atomic<_ITp>* __a) noexcept
1149 {
return atomic_load_explicit(__a, memory_order_seq_cst); }
1151 template<
typename _ITp>
1153 atomic_load(
const volatile atomic<_ITp>* __a) noexcept
1154 {
return atomic_load_explicit(__a, memory_order_seq_cst); }
1156 template<
typename _ITp>
1158 atomic_exchange(atomic<_ITp>* __a, _ITp __i) noexcept
1159 {
return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1161 template<
typename _ITp>
1163 atomic_exchange(
volatile atomic<_ITp>* __a, _ITp __i) noexcept
1164 {
return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1166 template<
typename _ITp>
1168 atomic_compare_exchange_weak(atomic<_ITp>* __a,
1169 _ITp* __i1, _ITp __i2) noexcept
1171 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1172 memory_order_seq_cst,
1173 memory_order_seq_cst);
1176 template<
typename _ITp>
1178 atomic_compare_exchange_weak(
volatile atomic<_ITp>* __a,
1179 _ITp* __i1, _ITp __i2) noexcept
1181 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1182 memory_order_seq_cst,
1183 memory_order_seq_cst);
1186 template<
typename _ITp>
1188 atomic_compare_exchange_strong(atomic<_ITp>* __a,
1189 _ITp* __i1, _ITp __i2) noexcept
1191 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1192 memory_order_seq_cst,
1193 memory_order_seq_cst);
1196 template<
typename _ITp>
1198 atomic_compare_exchange_strong(
volatile atomic<_ITp>* __a,
1199 _ITp* __i1, _ITp __i2) noexcept
1201 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1202 memory_order_seq_cst,
1203 memory_order_seq_cst);
1210 template<
typename _ITp>
1212 atomic_fetch_add_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1214 {
return __a->fetch_add(__i, __m); }
1216 template<
typename _ITp>
1218 atomic_fetch_add_explicit(
volatile __atomic_base<_ITp>* __a, _ITp __i,
1220 {
return __a->fetch_add(__i, __m); }
1222 template<
typename _ITp>
1224 atomic_fetch_sub_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1226 {
return __a->fetch_sub(__i, __m); }
1228 template<
typename _ITp>
1230 atomic_fetch_sub_explicit(
volatile __atomic_base<_ITp>* __a, _ITp __i,
1232 {
return __a->fetch_sub(__i, __m); }
1234 template<
typename _ITp>
1236 atomic_fetch_and_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1238 {
return __a->fetch_and(__i, __m); }
1240 template<
typename _ITp>
1242 atomic_fetch_and_explicit(
volatile __atomic_base<_ITp>* __a, _ITp __i,
1244 {
return __a->fetch_and(__i, __m); }
1246 template<
typename _ITp>
1248 atomic_fetch_or_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1250 {
return __a->fetch_or(__i, __m); }
1252 template<
typename _ITp>
1254 atomic_fetch_or_explicit(
volatile __atomic_base<_ITp>* __a, _ITp __i,
1256 {
return __a->fetch_or(__i, __m); }
1258 template<
typename _ITp>
1260 atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1262 {
return __a->fetch_xor(__i, __m); }
1264 template<
typename _ITp>
1266 atomic_fetch_xor_explicit(
volatile __atomic_base<_ITp>* __a, _ITp __i,
1268 {
return __a->fetch_xor(__i, __m); }
1270 template<
typename _ITp>
1272 atomic_fetch_add(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1273 {
return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1275 template<
typename _ITp>
1277 atomic_fetch_add(
volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1278 {
return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1280 template<
typename _ITp>
1282 atomic_fetch_sub(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1283 {
return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1285 template<
typename _ITp>
1287 atomic_fetch_sub(
volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1288 {
return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1290 template<
typename _ITp>
1292 atomic_fetch_and(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1293 {
return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1295 template<
typename _ITp>
1297 atomic_fetch_and(
volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1298 {
return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1300 template<
typename _ITp>
1302 atomic_fetch_or(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1303 {
return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1305 template<
typename _ITp>
1307 atomic_fetch_or(
volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1308 {
return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1310 template<
typename _ITp>
1312 atomic_fetch_xor(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1313 {
return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1315 template<
typename _ITp>
1317 atomic_fetch_xor(
volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1318 {
return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1322 template<
typename _ITp>
1324 atomic_fetch_add_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
1326 {
return __a->fetch_add(__d, __m); }
1328 template<
typename _ITp>
1330 atomic_fetch_add_explicit(
volatile atomic<_ITp*>* __a, ptrdiff_t __d,
1332 {
return __a->fetch_add(__d, __m); }
1334 template<
typename _ITp>
1336 atomic_fetch_add(
volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1337 {
return __a->fetch_add(__d); }
1339 template<
typename _ITp>
1341 atomic_fetch_add(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1342 {
return __a->fetch_add(__d); }
1344 template<
typename _ITp>
1346 atomic_fetch_sub_explicit(
volatile atomic<_ITp*>* __a,
1348 {
return __a->fetch_sub(__d, __m); }
1350 template<
typename _ITp>
1352 atomic_fetch_sub_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
1354 {
return __a->fetch_sub(__d, __m); }
1356 template<
typename _ITp>
1358 atomic_fetch_sub(
volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1359 {
return __a->fetch_sub(__d); }
1361 template<
typename _ITp>
1363 atomic_fetch_sub(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1364 {
return __a->fetch_sub(__d); }
1367 _GLIBCXX_END_NAMESPACE_VERSION
1372 #endif // _GLIBCXX_ATOMIC
Explicit specialization for wchar_t.
atomic< signed char > atomic_schar
atomic_schar
Explicit specialization for unsigned long long.
atomic< char32_t > atomic_char32_t
atomic_char32_t
atomic< int_fast64_t > atomic_int_fast64_t
atomic_int_fast64_t
atomic< uint_fast64_t > atomic_uint_fast64_t
atomic_uint_fast64_t
atomic< ptrdiff_t > atomic_ptrdiff_t
atomic_ptrdiff_t
Explicit specialization for long.
atomic< size_t > atomic_size_t
atomic_size_t
atomic< uint_least8_t > atomic_uint_least8_t
atomic_uint_least8_t
memory_order
Enumeration for memory_order.
atomic< wchar_t > atomic_wchar_t
atomic_wchar_t
atomic< int16_t > atomic_int16_t
atomic_int16_t
atomic< char > atomic_char
atomic_char
atomic< uint_fast32_t > atomic_uint_fast32_t
atomic_uint_fast32_t
atomic< int_fast32_t > atomic_int_fast32_t
atomic_int_fast32_t
_Tp exchange(_Tp &__obj, _Up &&__new_val)
Assign __new_val to __obj and return its previous value.
atomic< unsigned short > atomic_ushort
atomic_ushort
atomic< int_least64_t > atomic_int_least64_t
atomic_int_least64_t
#define ATOMIC_BOOL_LOCK_FREE
Explicit specialization for unsigned short.
atomic< uintmax_t > atomic_uintmax_t
atomic_uintmax_t
atomic< int_least8_t > atomic_int_least8_t
atomic_int_least8_t
atomic< unsigned long long > atomic_ullong
atomic_ullong
Explicit specialization for unsigned long.
atomic< unsigned long > atomic_ulong
atomic_ulong
atomic< int_least32_t > atomic_int_least32_t
atomic_int_least32_t
Explicit specialization for signed char.
atomic< unsigned char > atomic_uchar
atomic_uchar
atomic< intptr_t > atomic_intptr_t
atomic_intptr_t
atomic< intmax_t > atomic_intmax_t
atomic_intmax_t
atomic< bool > atomic_bool
atomic_bool
Explicit specialization for int.
atomic< char16_t > atomic_char16_t
atomic_char16_t
atomic< uint_least64_t > atomic_uint_least64_t
atomic_uint_least64_t
Explicit specialization for char16_t.
Explicit specialization for short.
Explicit specialization for unsigned int.
atomic< int > atomic_int
atomic_int
atomic< uintptr_t > atomic_uintptr_t
atomic_uintptr_t
atomic< unsigned int > atomic_uint
atomic_uint
atomic< uint64_t > atomic_uint64_t
atomic_uint64_t
atomic< uint32_t > atomic_uint32_t
atomic_uint32_t
atomic< uint8_t > atomic_uint8_t
atomic_uint8_t
Explicit specialization for char32_t.
atomic< short > atomic_short
atomic_short
Explicit specialization for char.
constexpr _Tp * __addressof(_Tp &__r) noexcept
Same as C++11 std::addressof.
atomic< uint_fast8_t > atomic_uint_fast8_t
atomic_uint_fast8_t
atomic< int_fast16_t > atomic_int_fast16_t
atomic_int_fast16_t
Generic atomic type, primary class template.
atomic< int8_t > atomic_int8_t
atomic_int8_t
atomic< int_least16_t > atomic_int_least16_t
atomic_int_least16_t
atomic< int64_t > atomic_int64_t
atomic_int64_t
atomic< int_fast8_t > atomic_int_fast8_t
atomic_int_fast8_t
atomic< uint_least32_t > atomic_uint_least32_t
atomic_uint_least32_t
atomic< uint16_t > atomic_uint16_t
atomic_uint16_t
atomic< uint_fast16_t > atomic_uint_fast16_t
atomic_uint_fast16_t
Explicit specialization for long long.
atomic< int32_t > atomic_int32_t
atomic_int32_t
atomic< uint_least16_t > atomic_uint_least16_t
atomic_uint_least16_t
atomic< long long > atomic_llong
atomic_llong
atomic< long > atomic_long
atomic_long
Explicit specialization for unsigned char.