32 #ifndef _GLIBCXX_ATOMIC
33 #define _GLIBCXX_ATOMIC 1
35 #pragma GCC system_header
37 #if __cplusplus < 201103L
44 namespace std _GLIBCXX_VISIBILITY(default)
46 _GLIBCXX_BEGIN_NAMESPACE_VERSION
53 #if __cplusplus >= 201703L
54 # define __cpp_lib_atomic_is_always_lock_free 201603
57 template<
typename _Tp>
65 using value_type = bool;
81 {
return _M_base.operator=(__i); }
84 operator=(
bool __i)
volatile noexcept
85 {
return _M_base.operator=(__i); }
88 {
return _M_base.load(); }
90 operator bool()
const volatile noexcept
91 {
return _M_base.load(); }
94 is_lock_free()
const noexcept {
return _M_base.is_lock_free(); }
97 is_lock_free()
const volatile noexcept {
return _M_base.is_lock_free(); }
99 #if __cplusplus >= 201703L
105 { _M_base.store(__i, __m); }
109 { _M_base.store(__i, __m); }
113 {
return _M_base.load(__m); }
117 {
return _M_base.load(__m); }
121 {
return _M_base.exchange(__i, __m); }
126 {
return _M_base.exchange(__i, __m); }
129 compare_exchange_weak(
bool& __i1,
bool __i2,
memory_order __m1,
131 {
return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
134 compare_exchange_weak(
bool& __i1,
bool __i2,
memory_order __m1,
136 {
return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
139 compare_exchange_weak(
bool& __i1,
bool __i2,
141 {
return _M_base.compare_exchange_weak(__i1, __i2, __m); }
144 compare_exchange_weak(
bool& __i1,
bool __i2,
146 {
return _M_base.compare_exchange_weak(__i1, __i2, __m); }
149 compare_exchange_strong(
bool& __i1,
bool __i2,
memory_order __m1,
151 {
return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
154 compare_exchange_strong(
bool& __i1,
bool __i2,
memory_order __m1,
156 {
return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
159 compare_exchange_strong(
bool& __i1,
bool __i2,
161 {
return _M_base.compare_exchange_strong(__i1, __i2, __m); }
164 compare_exchange_strong(
bool& __i1,
bool __i2,
166 {
return _M_base.compare_exchange_strong(__i1, __i2, __m); }
175 template<
typename _Tp>
178 using value_type = _Tp;
182 static constexpr
int _S_min_alignment
183 = (
sizeof(_Tp) & (
sizeof(_Tp) - 1)) ||
sizeof(_Tp) > 16
186 static constexpr
int _S_alignment
187 = _S_min_alignment >
alignof(_Tp) ? _S_min_alignment :
alignof(_Tp);
189 alignas(_S_alignment) _Tp _M_i;
191 static_assert(__is_trivially_copyable(_Tp),
192 "std::atomic requires a trivially copyable type");
194 static_assert(
sizeof(_Tp) > 0,
195 "Incomplete or zero-sized types are not supported");
209 operator _Tp() const volatile
noexcept
214 { store(__i);
return __i; }
217 operator=(_Tp __i)
volatile noexcept
218 { store(__i);
return __i; }
224 return __atomic_is_lock_free(
sizeof(_M_i),
225 reinterpret_cast<void *>(-_S_alignment));
229 is_lock_free() const volatile
noexcept
232 return __atomic_is_lock_free(
sizeof(_M_i),
233 reinterpret_cast<void *>(-_S_alignment));
236 #if __cplusplus >= 201703L
237 static constexpr
bool is_always_lock_free
238 = __atomic_always_lock_free(
sizeof(_M_i), 0);
242 store(_Tp __i,
memory_order __m = memory_order_seq_cst) noexcept
246 store(_Tp __i,
memory_order __m = memory_order_seq_cst) volatile noexcept
250 load(
memory_order __m = memory_order_seq_cst) const noexcept
252 alignas(_Tp)
unsigned char __buf[
sizeof(_Tp)];
253 _Tp* __ptr =
reinterpret_cast<_Tp*
>(__buf);
259 load(
memory_order __m = memory_order_seq_cst) const volatile noexcept
261 alignas(_Tp)
unsigned char __buf[
sizeof(_Tp)];
262 _Tp* __ptr =
reinterpret_cast<_Tp*
>(__buf);
268 exchange(_Tp __i,
memory_order __m = memory_order_seq_cst) noexcept
270 alignas(_Tp)
unsigned char __buf[
sizeof(_Tp)];
271 _Tp* __ptr =
reinterpret_cast<_Tp*
>(__buf);
279 memory_order __m = memory_order_seq_cst) volatile noexcept
281 alignas(_Tp)
unsigned char __buf[
sizeof(_Tp)];
282 _Tp* __ptr =
reinterpret_cast<_Tp*
>(__buf);
289 compare_exchange_weak(_Tp& __e, _Tp __i,
memory_order __s,
295 true,
int(__s),
int(__f));
299 compare_exchange_weak(_Tp& __e, _Tp __i,
memory_order __s,
305 true,
int(__s),
int(__f));
309 compare_exchange_weak(_Tp& __e, _Tp __i,
311 {
return compare_exchange_weak(__e, __i, __m,
312 __cmpexch_failure_order(__m)); }
315 compare_exchange_weak(_Tp& __e, _Tp __i,
316 memory_order __m = memory_order_seq_cst) volatile noexcept
317 {
return compare_exchange_weak(__e, __i, __m,
318 __cmpexch_failure_order(__m)); }
321 compare_exchange_strong(_Tp& __e, _Tp __i,
memory_order __s,
327 false,
int(__s),
int(__f));
331 compare_exchange_strong(_Tp& __e, _Tp __i,
memory_order __s,
337 false,
int(__s),
int(__f));
341 compare_exchange_strong(_Tp& __e, _Tp __i,
343 {
return compare_exchange_strong(__e, __i, __m,
344 __cmpexch_failure_order(__m)); }
347 compare_exchange_strong(_Tp& __e, _Tp __i,
348 memory_order __m = memory_order_seq_cst) volatile noexcept
349 {
return compare_exchange_strong(__e, __i, __m,
350 __cmpexch_failure_order(__m)); }
355 template<
typename _Tp>
358 using value_type = _Tp*;
359 using difference_type = ptrdiff_t;
361 typedef _Tp* __pointer_type;
365 atomic() noexcept =
default;
366 ~
atomic() noexcept =
default;
371 constexpr
atomic(__pointer_type __p) noexcept : _M_b(__p) { }
373 operator __pointer_type()
const noexcept
374 {
return __pointer_type(_M_b); }
376 operator __pointer_type()
const volatile noexcept
377 {
return __pointer_type(_M_b); }
380 operator=(__pointer_type __p) noexcept
381 {
return _M_b.operator=(__p); }
384 operator=(__pointer_type __p)
volatile noexcept
385 {
return _M_b.operator=(__p); }
388 operator++(
int) noexcept
390 #if __cplusplus >= 201703L
397 operator++(
int)
volatile noexcept
399 #if __cplusplus >= 201703L
406 operator--(
int) noexcept
408 #if __cplusplus >= 201703L
415 operator--(
int)
volatile noexcept
417 #if __cplusplus >= 201703L
424 operator++() noexcept
426 #if __cplusplus >= 201703L
433 operator++()
volatile noexcept
435 #if __cplusplus >= 201703L
442 operator--() noexcept
444 #if __cplusplus >= 201703L
451 operator--()
volatile noexcept
453 #if __cplusplus >= 201703L
460 operator+=(ptrdiff_t __d) noexcept
462 #if __cplusplus >= 201703L
465 return _M_b.operator+=(__d);
469 operator+=(ptrdiff_t __d)
volatile noexcept
471 #if __cplusplus >= 201703L
474 return _M_b.operator+=(__d);
478 operator-=(ptrdiff_t __d) noexcept
480 #if __cplusplus >= 201703L
483 return _M_b.operator-=(__d);
487 operator-=(ptrdiff_t __d)
volatile noexcept
489 #if __cplusplus >= 201703L
492 return _M_b.operator-=(__d);
496 is_lock_free()
const noexcept
497 {
return _M_b.is_lock_free(); }
500 is_lock_free()
const volatile noexcept
501 {
return _M_b.is_lock_free(); }
503 #if __cplusplus >= 201703L
504 static constexpr
bool is_always_lock_free = ATOMIC_POINTER_LOCK_FREE == 2;
508 store(__pointer_type __p,
510 {
return _M_b.store(__p, __m); }
513 store(__pointer_type __p,
514 memory_order __m = memory_order_seq_cst)
volatile noexcept
515 {
return _M_b.store(__p, __m); }
518 load(
memory_order __m = memory_order_seq_cst)
const noexcept
519 {
return _M_b.load(__m); }
522 load(
memory_order __m = memory_order_seq_cst)
const volatile noexcept
523 {
return _M_b.load(__m); }
528 {
return _M_b.exchange(__p, __m); }
532 memory_order __m = memory_order_seq_cst)
volatile noexcept
533 {
return _M_b.exchange(__p, __m); }
536 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
538 {
return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
541 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
544 {
return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
547 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
550 return compare_exchange_weak(__p1, __p2, __m,
551 __cmpexch_failure_order(__m));
555 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
556 memory_order __m = memory_order_seq_cst)
volatile noexcept
558 return compare_exchange_weak(__p1, __p2, __m,
559 __cmpexch_failure_order(__m));
563 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
565 {
return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
568 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
571 {
return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
574 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
577 return _M_b.compare_exchange_strong(__p1, __p2, __m,
578 __cmpexch_failure_order(__m));
582 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
583 memory_order __m = memory_order_seq_cst)
volatile noexcept
585 return _M_b.compare_exchange_strong(__p1, __p2, __m,
586 __cmpexch_failure_order(__m));
590 fetch_add(ptrdiff_t __d,
593 #if __cplusplus >= 201703L
596 return _M_b.fetch_add(__d, __m);
600 fetch_add(ptrdiff_t __d,
601 memory_order __m = memory_order_seq_cst)
volatile noexcept
603 #if __cplusplus >= 201703L
606 return _M_b.fetch_add(__d, __m);
610 fetch_sub(ptrdiff_t __d,
613 #if __cplusplus >= 201703L
616 return _M_b.fetch_sub(__d, __m);
620 fetch_sub(ptrdiff_t __d,
621 memory_order __m = memory_order_seq_cst)
volatile noexcept
623 #if __cplusplus >= 201703L
626 return _M_b.fetch_sub(__d, __m);
635 typedef char __integral_type;
638 atomic() noexcept =
default;
639 ~
atomic() noexcept =
default;
646 using __base_type::operator __integral_type;
647 using __base_type::operator=;
649 #if __cplusplus >= 201703L
650 static constexpr
bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
658 typedef signed char __integral_type;
661 atomic() noexcept=
default;
662 ~
atomic() noexcept =
default;
669 using __base_type::operator __integral_type;
670 using __base_type::operator=;
672 #if __cplusplus >= 201703L
673 static constexpr
bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
681 typedef unsigned char __integral_type;
684 atomic() noexcept=
default;
685 ~
atomic() noexcept =
default;
692 using __base_type::operator __integral_type;
693 using __base_type::operator=;
695 #if __cplusplus >= 201703L
696 static constexpr
bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
704 typedef short __integral_type;
707 atomic() noexcept =
default;
708 ~
atomic() noexcept =
default;
715 using __base_type::operator __integral_type;
716 using __base_type::operator=;
718 #if __cplusplus >= 201703L
719 static constexpr
bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
727 typedef unsigned short __integral_type;
730 atomic() noexcept =
default;
731 ~
atomic() noexcept =
default;
738 using __base_type::operator __integral_type;
739 using __base_type::operator=;
741 #if __cplusplus >= 201703L
742 static constexpr
bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
750 typedef int __integral_type;
753 atomic() noexcept =
default;
754 ~
atomic() noexcept =
default;
761 using __base_type::operator __integral_type;
762 using __base_type::operator=;
764 #if __cplusplus >= 201703L
765 static constexpr
bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
773 typedef unsigned int __integral_type;
776 atomic() noexcept =
default;
777 ~
atomic() noexcept =
default;
784 using __base_type::operator __integral_type;
785 using __base_type::operator=;
787 #if __cplusplus >= 201703L
788 static constexpr
bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
796 typedef long __integral_type;
799 atomic() noexcept =
default;
800 ~
atomic() noexcept =
default;
807 using __base_type::operator __integral_type;
808 using __base_type::operator=;
810 #if __cplusplus >= 201703L
811 static constexpr
bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
819 typedef unsigned long __integral_type;
822 atomic() noexcept =
default;
823 ~
atomic() noexcept =
default;
830 using __base_type::operator __integral_type;
831 using __base_type::operator=;
833 #if __cplusplus >= 201703L
834 static constexpr
bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
842 typedef long long __integral_type;
845 atomic() noexcept =
default;
846 ~
atomic() noexcept =
default;
853 using __base_type::operator __integral_type;
854 using __base_type::operator=;
856 #if __cplusplus >= 201703L
857 static constexpr
bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
865 typedef unsigned long long __integral_type;
868 atomic() noexcept =
default;
869 ~
atomic() noexcept =
default;
876 using __base_type::operator __integral_type;
877 using __base_type::operator=;
879 #if __cplusplus >= 201703L
880 static constexpr
bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
888 typedef wchar_t __integral_type;
891 atomic() noexcept =
default;
892 ~
atomic() noexcept =
default;
899 using __base_type::operator __integral_type;
900 using __base_type::operator=;
902 #if __cplusplus >= 201703L
903 static constexpr
bool is_always_lock_free = ATOMIC_WCHAR_T_LOCK_FREE == 2;
907 #ifdef _GLIBCXX_USE_CHAR8_T
912 typedef char8_t __integral_type;
915 atomic() noexcept = default;
916 ~
atomic() noexcept = default;
921 constexpr
atomic(__integral_type __i) noexcept : __base_type(__i) { }
923 using __base_type::operator __integral_type;
924 using __base_type::operator=;
926 #if __cplusplus > 201402L
927 static constexpr
bool is_always_lock_free = ATOMIC_CHAR8_T_LOCK_FREE == 2;
936 typedef char16_t __integral_type;
939 atomic() noexcept =
default;
940 ~
atomic() noexcept =
default;
947 using __base_type::operator __integral_type;
948 using __base_type::operator=;
950 #if __cplusplus >= 201703L
951 static constexpr
bool is_always_lock_free = ATOMIC_CHAR16_T_LOCK_FREE == 2;
959 typedef char32_t __integral_type;
962 atomic() noexcept =
default;
963 ~
atomic() noexcept =
default;
970 using __base_type::operator __integral_type;
971 using __base_type::operator=;
973 #if __cplusplus >= 201703L
974 static constexpr
bool is_always_lock_free = ATOMIC_CHAR32_T_LOCK_FREE == 2;
1018 #ifdef _GLIBCXX_USE_CHAR8_T
1029 #ifdef _GLIBCXX_USE_C99_STDINT_TR1
1121 #ifdef _GLIBCXX_USE_C99_STDINT_TR1
1131 atomic_flag_test_and_set_explicit(
atomic_flag* __a,
1133 {
return __a->test_and_set(__m); }
1136 atomic_flag_test_and_set_explicit(
volatile atomic_flag* __a,
1138 {
return __a->test_and_set(__m); }
1141 atomic_flag_clear_explicit(atomic_flag* __a,
memory_order __m) noexcept
1142 { __a->clear(__m); }
1145 atomic_flag_clear_explicit(
volatile atomic_flag* __a,
1147 { __a->clear(__m); }
1150 atomic_flag_test_and_set(atomic_flag* __a) noexcept
1151 {
return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1154 atomic_flag_test_and_set(
volatile atomic_flag* __a) noexcept
1155 {
return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1158 atomic_flag_clear(atomic_flag* __a) noexcept
1159 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1162 atomic_flag_clear(
volatile atomic_flag* __a) noexcept
1163 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1166 template<
typename _Tp>
1167 using __atomic_val_t =
typename atomic<_Tp>::value_type;
1168 template<
typename _Tp>
1169 using __atomic_diff_t =
typename atomic<_Tp>::difference_type;
1173 template<
typename _ITp>
1175 atomic_is_lock_free(
const atomic<_ITp>* __a) noexcept
1176 {
return __a->is_lock_free(); }
1178 template<
typename _ITp>
1180 atomic_is_lock_free(
const volatile atomic<_ITp>* __a) noexcept
1181 {
return __a->is_lock_free(); }
1183 template<
typename _ITp>
1185 atomic_init(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1186 { __a->store(__i, memory_order_relaxed); }
1188 template<
typename _ITp>
1190 atomic_init(
volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1191 { __a->store(__i, memory_order_relaxed); }
1193 template<
typename _ITp>
1195 atomic_store_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1197 { __a->store(__i, __m); }
1199 template<
typename _ITp>
1201 atomic_store_explicit(
volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1203 { __a->store(__i, __m); }
1205 template<
typename _ITp>
1207 atomic_load_explicit(
const atomic<_ITp>* __a,
memory_order __m) noexcept
1208 {
return __a->load(__m); }
1210 template<
typename _ITp>
1212 atomic_load_explicit(
const volatile atomic<_ITp>* __a,
1214 {
return __a->load(__m); }
1216 template<
typename _ITp>
1218 atomic_exchange_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1220 {
return __a->exchange(__i, __m); }
1222 template<
typename _ITp>
1224 atomic_exchange_explicit(
volatile atomic<_ITp>* __a,
1225 __atomic_val_t<_ITp> __i,
1227 {
return __a->exchange(__i, __m); }
1229 template<
typename _ITp>
1231 atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
1232 __atomic_val_t<_ITp>* __i1,
1233 __atomic_val_t<_ITp> __i2,
1236 {
return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1238 template<
typename _ITp>
1240 atomic_compare_exchange_weak_explicit(
volatile atomic<_ITp>* __a,
1241 __atomic_val_t<_ITp>* __i1,
1242 __atomic_val_t<_ITp> __i2,
1245 {
return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1247 template<
typename _ITp>
1249 atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
1250 __atomic_val_t<_ITp>* __i1,
1251 __atomic_val_t<_ITp> __i2,
1254 {
return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1256 template<
typename _ITp>
1258 atomic_compare_exchange_strong_explicit(
volatile atomic<_ITp>* __a,
1259 __atomic_val_t<_ITp>* __i1,
1260 __atomic_val_t<_ITp> __i2,
1263 {
return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1266 template<
typename _ITp>
1268 atomic_store(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1269 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1271 template<
typename _ITp>
1273 atomic_store(
volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1274 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1276 template<
typename _ITp>
1278 atomic_load(
const atomic<_ITp>* __a) noexcept
1279 {
return atomic_load_explicit(__a, memory_order_seq_cst); }
1281 template<
typename _ITp>
1283 atomic_load(
const volatile atomic<_ITp>* __a) noexcept
1284 {
return atomic_load_explicit(__a, memory_order_seq_cst); }
1286 template<
typename _ITp>
1288 atomic_exchange(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1289 {
return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1291 template<
typename _ITp>
1293 atomic_exchange(
volatile atomic<_ITp>* __a,
1294 __atomic_val_t<_ITp> __i) noexcept
1295 {
return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1297 template<
typename _ITp>
1299 atomic_compare_exchange_weak(atomic<_ITp>* __a,
1300 __atomic_val_t<_ITp>* __i1,
1301 __atomic_val_t<_ITp> __i2) noexcept
1303 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1304 memory_order_seq_cst,
1305 memory_order_seq_cst);
1308 template<
typename _ITp>
1310 atomic_compare_exchange_weak(
volatile atomic<_ITp>* __a,
1311 __atomic_val_t<_ITp>* __i1,
1312 __atomic_val_t<_ITp> __i2) noexcept
1314 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1315 memory_order_seq_cst,
1316 memory_order_seq_cst);
1319 template<
typename _ITp>
1321 atomic_compare_exchange_strong(atomic<_ITp>* __a,
1322 __atomic_val_t<_ITp>* __i1,
1323 __atomic_val_t<_ITp> __i2) noexcept
1325 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1326 memory_order_seq_cst,
1327 memory_order_seq_cst);
1330 template<
typename _ITp>
1332 atomic_compare_exchange_strong(
volatile atomic<_ITp>* __a,
1333 __atomic_val_t<_ITp>* __i1,
1334 __atomic_val_t<_ITp> __i2) noexcept
1336 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1337 memory_order_seq_cst,
1338 memory_order_seq_cst);
1345 template<
typename _ITp>
1347 atomic_fetch_add_explicit(atomic<_ITp>* __a,
1348 __atomic_diff_t<_ITp> __i,
1350 {
return __a->fetch_add(__i, __m); }
1352 template<
typename _ITp>
1354 atomic_fetch_add_explicit(
volatile atomic<_ITp>* __a,
1355 __atomic_diff_t<_ITp> __i,
1357 {
return __a->fetch_add(__i, __m); }
1359 template<
typename _ITp>
1361 atomic_fetch_sub_explicit(atomic<_ITp>* __a,
1362 __atomic_diff_t<_ITp> __i,
1364 {
return __a->fetch_sub(__i, __m); }
1366 template<
typename _ITp>
1368 atomic_fetch_sub_explicit(
volatile atomic<_ITp>* __a,
1369 __atomic_diff_t<_ITp> __i,
1371 {
return __a->fetch_sub(__i, __m); }
1373 template<
typename _ITp>
1375 atomic_fetch_and_explicit(__atomic_base<_ITp>* __a,
1376 __atomic_val_t<_ITp> __i,
1378 {
return __a->fetch_and(__i, __m); }
1380 template<
typename _ITp>
1382 atomic_fetch_and_explicit(
volatile __atomic_base<_ITp>* __a,
1383 __atomic_val_t<_ITp> __i,
1385 {
return __a->fetch_and(__i, __m); }
1387 template<
typename _ITp>
1389 atomic_fetch_or_explicit(__atomic_base<_ITp>* __a,
1390 __atomic_val_t<_ITp> __i,
1392 {
return __a->fetch_or(__i, __m); }
1394 template<
typename _ITp>
1396 atomic_fetch_or_explicit(
volatile __atomic_base<_ITp>* __a,
1397 __atomic_val_t<_ITp> __i,
1399 {
return __a->fetch_or(__i, __m); }
1401 template<
typename _ITp>
1403 atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a,
1404 __atomic_val_t<_ITp> __i,
1406 {
return __a->fetch_xor(__i, __m); }
1408 template<
typename _ITp>
1410 atomic_fetch_xor_explicit(
volatile __atomic_base<_ITp>* __a,
1411 __atomic_val_t<_ITp> __i,
1413 {
return __a->fetch_xor(__i, __m); }
1415 template<
typename _ITp>
1417 atomic_fetch_add(atomic<_ITp>* __a,
1418 __atomic_diff_t<_ITp> __i) noexcept
1419 {
return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1421 template<
typename _ITp>
1423 atomic_fetch_add(
volatile atomic<_ITp>* __a,
1424 __atomic_diff_t<_ITp> __i) noexcept
1425 {
return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1427 template<
typename _ITp>
1429 atomic_fetch_sub(atomic<_ITp>* __a,
1430 __atomic_diff_t<_ITp> __i) noexcept
1431 {
return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1433 template<
typename _ITp>
1435 atomic_fetch_sub(
volatile atomic<_ITp>* __a,
1436 __atomic_diff_t<_ITp> __i) noexcept
1437 {
return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1439 template<
typename _ITp>
1441 atomic_fetch_and(__atomic_base<_ITp>* __a,
1442 __atomic_val_t<_ITp> __i) noexcept
1443 {
return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1445 template<
typename _ITp>
1447 atomic_fetch_and(
volatile __atomic_base<_ITp>* __a,
1448 __atomic_val_t<_ITp> __i) noexcept
1449 {
return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1451 template<
typename _ITp>
1453 atomic_fetch_or(__atomic_base<_ITp>* __a,
1454 __atomic_val_t<_ITp> __i) noexcept
1455 {
return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1457 template<
typename _ITp>
1459 atomic_fetch_or(
volatile __atomic_base<_ITp>* __a,
1460 __atomic_val_t<_ITp> __i) noexcept
1461 {
return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1463 template<
typename _ITp>
1465 atomic_fetch_xor(__atomic_base<_ITp>* __a,
1466 __atomic_val_t<_ITp> __i) noexcept
1467 {
return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1469 template<
typename _ITp>
1471 atomic_fetch_xor(
volatile __atomic_base<_ITp>* __a,
1472 __atomic_val_t<_ITp> __i) noexcept
1473 {
return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1477 _GLIBCXX_END_NAMESPACE_VERSION
1482 #endif // _GLIBCXX_ATOMIC
Explicit specialization for long long.
atomic< unsigned int > atomic_uint
atomic_uint
atomic< uint_least64_t > atomic_uint_least64_t
atomic_uint_least64_t
atomic< bool > atomic_bool
atomic_bool
atomic< uintptr_t > atomic_uintptr_t
atomic_uintptr_t
atomic< uint32_t > atomic_uint32_t
atomic_uint32_t
atomic< uint16_t > atomic_uint16_t
atomic_uint16_t
Explicit specialization for unsigned char.
atomic< unsigned short > atomic_ushort
atomic_ushort
atomic< uint_least8_t > atomic_uint_least8_t
atomic_uint_least8_t
atomic< int_least32_t > atomic_int_least32_t
atomic_int_least32_t
#define ATOMIC_BOOL_LOCK_FREE
Explicit specialization for wchar_t.
Explicit specialization for unsigned long long.
atomic< int > atomic_int
atomic_int
_Tp exchange(_Tp &__obj, _Up &&__new_val)
Assign __new_val to __obj and return its previous value.
atomic< uintmax_t > atomic_uintmax_t
atomic_uintmax_t
Explicit specialization for long.
atomic< unsigned long > atomic_ulong
atomic_ulong
atomic< char32_t > atomic_char32_t
atomic_char32_t
atomic< intptr_t > atomic_intptr_t
atomic_intptr_t
atomic< int64_t > atomic_int64_t
atomic_int64_t
atomic< int_fast8_t > atomic_int_fast8_t
atomic_int_fast8_t
atomic< uint64_t > atomic_uint64_t
atomic_uint64_t
atomic< uint_fast64_t > atomic_uint_fast64_t
atomic_uint_fast64_t
Explicit specialization for unsigned short.
Explicit specialization for unsigned long.
atomic< unsigned char > atomic_uchar
atomic_uchar
atomic< int_fast32_t > atomic_int_fast32_t
atomic_int_fast32_t
Explicit specialization for signed char.
atomic< signed char > atomic_schar
atomic_schar
atomic< int_least64_t > atomic_int_least64_t
atomic_int_least64_t
enable_if< ::__array_traits< _Tp, _Nm >::_Is_swappable::value >::type noexcept(noexcept(__one.swap(__two)))
swap
atomic< uint_fast32_t > atomic_uint_fast32_t
atomic_uint_fast32_t
atomic< int_least8_t > atomic_int_least8_t
atomic_int_least8_t
atomic< wchar_t > atomic_wchar_t
atomic_wchar_t
atomic< int_fast64_t > atomic_int_fast64_t
atomic_int_fast64_t
atomic< int32_t > atomic_int32_t
atomic_int32_t
atomic< uint_least16_t > atomic_uint_least16_t
atomic_uint_least16_t
atomic< long > atomic_long
atomic_long
atomic< int_fast16_t > atomic_int_fast16_t
atomic_int_fast16_t
Explicit specialization for int.
Explicit specialization for char16_t.
Explicit specialization for short.
atomic< int_least16_t > atomic_int_least16_t
atomic_int_least16_t
atomic< char16_t > atomic_char16_t
atomic_char16_t
Explicit specialization for unsigned int.
atomic< uint_least32_t > atomic_uint_least32_t
atomic_uint_least32_t
Explicit specialization for char32_t.
atomic< size_t > atomic_size_t
atomic_size_t
Explicit specialization for char.
atomic< unsigned long long > atomic_ullong
atomic_ullong
constexpr _Tp * __addressof(_Tp &__r) noexcept
Same as C++11 std::addressof.
atomic< ptrdiff_t > atomic_ptrdiff_t
atomic_ptrdiff_t
Generic atomic type, primary class template.
atomic< uint_fast16_t > atomic_uint_fast16_t
atomic_uint_fast16_t
atomic< uint8_t > atomic_uint8_t
atomic_uint8_t
atomic< int16_t > atomic_int16_t
atomic_int16_t
atomic< intmax_t > atomic_intmax_t
atomic_intmax_t
atomic< long long > atomic_llong
atomic_llong
memory_order
Enumeration for memory_order.
atomic< int8_t > atomic_int8_t
atomic_int8_t
atomic< short > atomic_short
atomic_short
atomic< char > atomic_char
atomic_char
atomic< uint_fast8_t > atomic_uint_fast8_t
atomic_uint_fast8_t