2 //===--------------------------- atomic -----------------------------------===//
4 // The LLVM Compiler Infrastructure
6 // This file is distributed under the University of Illinois Open Source
7 // License. See LICENSE.TXT for details.
9 //===----------------------------------------------------------------------===//
11 #ifndef _LIBCPP_ATOMIC
12 #define _LIBCPP_ATOMIC
20 // order and consistency
22 typedef enum memory_order
25 memory_order_consume, // load-consume
26 memory_order_acquire, // load-acquire
27 memory_order_release, // store-release
28 memory_order_acq_rel, // store-release load-acquire
29 memory_order_seq_cst // store-release load-acquire
32 template <class T> T kill_dependency(T y) noexcept;
36 #define ATOMIC_BOOL_LOCK_FREE unspecified
37 #define ATOMIC_CHAR_LOCK_FREE unspecified
38 #define ATOMIC_CHAR16_T_LOCK_FREE unspecified
39 #define ATOMIC_CHAR32_T_LOCK_FREE unspecified
40 #define ATOMIC_WCHAR_T_LOCK_FREE unspecified
41 #define ATOMIC_SHORT_LOCK_FREE unspecified
42 #define ATOMIC_INT_LOCK_FREE unspecified
43 #define ATOMIC_LONG_LOCK_FREE unspecified
44 #define ATOMIC_LLONG_LOCK_FREE unspecified
45 #define ATOMIC_POINTER_LOCK_FREE unspecified
47 // flag type and operations
49 typedef struct atomic_flag
51 bool test_and_set(memory_order m = memory_order_seq_cst) volatile noexcept;
52 bool test_and_set(memory_order m = memory_order_seq_cst) noexcept;
53 void clear(memory_order m = memory_order_seq_cst) volatile noexcept;
54 void clear(memory_order m = memory_order_seq_cst) noexcept;
55 atomic_flag() noexcept = default;
56 atomic_flag(const atomic_flag&) = delete;
57 atomic_flag& operator=(const atomic_flag&) = delete;
58 atomic_flag& operator=(const atomic_flag&) volatile = delete;
62 atomic_flag_test_and_set(volatile atomic_flag* obj) noexcept;
65 atomic_flag_test_and_set(atomic_flag* obj) noexcept;
68 atomic_flag_test_and_set_explicit(volatile atomic_flag* obj,
69 memory_order m) noexcept;
72 atomic_flag_test_and_set_explicit(atomic_flag* obj, memory_order m) noexcept;
75 atomic_flag_clear(volatile atomic_flag* obj) noexcept;
78 atomic_flag_clear(atomic_flag* obj) noexcept;
81 atomic_flag_clear_explicit(volatile atomic_flag* obj, memory_order m) noexcept;
84 atomic_flag_clear_explicit(atomic_flag* obj, memory_order m) noexcept;
86 #define ATOMIC_FLAG_INIT see below
87 #define ATOMIC_VAR_INIT(value) see below
92 bool is_lock_free() const volatile noexcept;
93 bool is_lock_free() const noexcept;
94 void store(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
95 void store(T desr, memory_order m = memory_order_seq_cst) noexcept;
96 T load(memory_order m = memory_order_seq_cst) const volatile noexcept;
97 T load(memory_order m = memory_order_seq_cst) const noexcept;
98 operator T() const volatile noexcept;
99 operator T() const noexcept;
100 T exchange(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
101 T exchange(T desr, memory_order m = memory_order_seq_cst) noexcept;
102 bool compare_exchange_weak(T& expc, T desr,
103 memory_order s, memory_order f) volatile noexcept;
104 bool compare_exchange_weak(T& expc, T desr, memory_order s, memory_order f) noexcept;
105 bool compare_exchange_strong(T& expc, T desr,
106 memory_order s, memory_order f) volatile noexcept;
107 bool compare_exchange_strong(T& expc, T desr,
108 memory_order s, memory_order f) noexcept;
109 bool compare_exchange_weak(T& expc, T desr,
110 memory_order m = memory_order_seq_cst) volatile noexcept;
111 bool compare_exchange_weak(T& expc, T desr,
112 memory_order m = memory_order_seq_cst) noexcept;
113 bool compare_exchange_strong(T& expc, T desr,
114 memory_order m = memory_order_seq_cst) volatile noexcept;
115 bool compare_exchange_strong(T& expc, T desr,
116 memory_order m = memory_order_seq_cst) noexcept;
118 atomic() noexcept = default;
119 constexpr atomic(T desr) noexcept;
120 atomic(const atomic&) = delete;
121 atomic& operator=(const atomic&) = delete;
122 atomic& operator=(const atomic&) volatile = delete;
123 T operator=(T) volatile noexcept;
124 T operator=(T) noexcept;
128 struct atomic<integral>
130 bool is_lock_free() const volatile noexcept;
131 bool is_lock_free() const noexcept;
132 void store(integral desr, memory_order m = memory_order_seq_cst) volatile noexcept;
133 void store(integral desr, memory_order m = memory_order_seq_cst) noexcept;
134 integral load(memory_order m = memory_order_seq_cst) const volatile noexcept;
135 integral load(memory_order m = memory_order_seq_cst) const noexcept;
136 operator integral() const volatile noexcept;
137 operator integral() const noexcept;
138 integral exchange(integral desr,
139 memory_order m = memory_order_seq_cst) volatile noexcept;
140 integral exchange(integral desr, memory_order m = memory_order_seq_cst) noexcept;
141 bool compare_exchange_weak(integral& expc, integral desr,
142 memory_order s, memory_order f) volatile noexcept;
143 bool compare_exchange_weak(integral& expc, integral desr,
144 memory_order s, memory_order f) noexcept;
145 bool compare_exchange_strong(integral& expc, integral desr,
146 memory_order s, memory_order f) volatile noexcept;
147 bool compare_exchange_strong(integral& expc, integral desr,
148 memory_order s, memory_order f) noexcept;
149 bool compare_exchange_weak(integral& expc, integral desr,
150 memory_order m = memory_order_seq_cst) volatile noexcept;
151 bool compare_exchange_weak(integral& expc, integral desr,
152 memory_order m = memory_order_seq_cst) noexcept;
153 bool compare_exchange_strong(integral& expc, integral desr,
154 memory_order m = memory_order_seq_cst) volatile noexcept;
155 bool compare_exchange_strong(integral& expc, integral desr,
156 memory_order m = memory_order_seq_cst) noexcept;
159 fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
160 integral fetch_add(integral op, memory_order m = memory_order_seq_cst) noexcept;
162 fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
163 integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) noexcept;
165 fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
166 integral fetch_and(integral op, memory_order m = memory_order_seq_cst) noexcept;
168 fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
169 integral fetch_or(integral op, memory_order m = memory_order_seq_cst) noexcept;
171 fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
172 integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) noexcept;
174 atomic() noexcept = default;
175 constexpr atomic(integral desr) noexcept;
176 atomic(const atomic&) = delete;
177 atomic& operator=(const atomic&) = delete;
178 atomic& operator=(const atomic&) volatile = delete;
179 integral operator=(integral desr) volatile noexcept;
180 integral operator=(integral desr) noexcept;
182 integral operator++(int) volatile noexcept;
183 integral operator++(int) noexcept;
184 integral operator--(int) volatile noexcept;
185 integral operator--(int) noexcept;
186 integral operator++() volatile noexcept;
187 integral operator++() noexcept;
188 integral operator--() volatile noexcept;
189 integral operator--() noexcept;
190 integral operator+=(integral op) volatile noexcept;
191 integral operator+=(integral op) noexcept;
192 integral operator-=(integral op) volatile noexcept;
193 integral operator-=(integral op) noexcept;
194 integral operator&=(integral op) volatile noexcept;
195 integral operator&=(integral op) noexcept;
196 integral operator|=(integral op) volatile noexcept;
197 integral operator|=(integral op) noexcept;
198 integral operator^=(integral op) volatile noexcept;
199 integral operator^=(integral op) noexcept;
205 bool is_lock_free() const volatile noexcept;
206 bool is_lock_free() const noexcept;
207 void store(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
208 void store(T* desr, memory_order m = memory_order_seq_cst) noexcept;
209 T* load(memory_order m = memory_order_seq_cst) const volatile noexcept;
210 T* load(memory_order m = memory_order_seq_cst) const noexcept;
211 operator T*() const volatile noexcept;
212 operator T*() const noexcept;
213 T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
214 T* exchange(T* desr, memory_order m = memory_order_seq_cst) noexcept;
215 bool compare_exchange_weak(T*& expc, T* desr,
216 memory_order s, memory_order f) volatile noexcept;
217 bool compare_exchange_weak(T*& expc, T* desr,
218 memory_order s, memory_order f) noexcept;
219 bool compare_exchange_strong(T*& expc, T* desr,
220 memory_order s, memory_order f) volatile noexcept;
221 bool compare_exchange_strong(T*& expc, T* desr,
222 memory_order s, memory_order f) noexcept;
223 bool compare_exchange_weak(T*& expc, T* desr,
224 memory_order m = memory_order_seq_cst) volatile noexcept;
225 bool compare_exchange_weak(T*& expc, T* desr,
226 memory_order m = memory_order_seq_cst) noexcept;
227 bool compare_exchange_strong(T*& expc, T* desr,
228 memory_order m = memory_order_seq_cst) volatile noexcept;
229 bool compare_exchange_strong(T*& expc, T* desr,
230 memory_order m = memory_order_seq_cst) noexcept;
231 T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
232 T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
233 T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
234 T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
236 atomic() noexcept = default;
237 constexpr atomic(T* desr) noexcept;
238 atomic(const atomic&) = delete;
239 atomic& operator=(const atomic&) = delete;
240 atomic& operator=(const atomic&) volatile = delete;
242 T* operator=(T*) volatile noexcept;
243 T* operator=(T*) noexcept;
244 T* operator++(int) volatile noexcept;
245 T* operator++(int) noexcept;
246 T* operator--(int) volatile noexcept;
247 T* operator--(int) noexcept;
248 T* operator++() volatile noexcept;
249 T* operator++() noexcept;
250 T* operator--() volatile noexcept;
251 T* operator--() noexcept;
252 T* operator+=(ptrdiff_t op) volatile noexcept;
253 T* operator+=(ptrdiff_t op) noexcept;
254 T* operator-=(ptrdiff_t op) volatile noexcept;
255 T* operator-=(ptrdiff_t op) noexcept;
261 atomic_is_lock_free(const volatile atomic<T>* obj) noexcept;
265 atomic_is_lock_free(const atomic<T>* obj) noexcept;
269 atomic_init(volatile atomic<T>* obj, T desr) noexcept;
273 atomic_init(atomic<T>* obj, T desr) noexcept;
277 atomic_store(volatile atomic<T>* obj, T desr) noexcept;
281 atomic_store(atomic<T>* obj, T desr) noexcept;
285 atomic_store_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
289 atomic_store_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
293 atomic_load(const volatile atomic<T>* obj) noexcept;
297 atomic_load(const atomic<T>* obj) noexcept;
301 atomic_load_explicit(const volatile atomic<T>* obj, memory_order m) noexcept;
305 atomic_load_explicit(const atomic<T>* obj, memory_order m) noexcept;
309 atomic_exchange(volatile atomic<T>* obj, T desr) noexcept;
313 atomic_exchange(atomic<T>* obj, T desr) noexcept;
317 atomic_exchange_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
321 atomic_exchange_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
325 atomic_compare_exchange_weak(volatile atomic<T>* obj, T* expc, T desr) noexcept;
329 atomic_compare_exchange_weak(atomic<T>* obj, T* expc, T desr) noexcept;
333 atomic_compare_exchange_strong(volatile atomic<T>* obj, T* expc, T desr) noexcept;
337 atomic_compare_exchange_strong(atomic<T>* obj, T* expc, T desr) noexcept;
341 atomic_compare_exchange_weak_explicit(volatile atomic<T>* obj, T* expc,
343 memory_order s, memory_order f) noexcept;
347 atomic_compare_exchange_weak_explicit(atomic<T>* obj, T* expc, T desr,
348 memory_order s, memory_order f) noexcept;
352 atomic_compare_exchange_strong_explicit(volatile atomic<T>* obj,
354 memory_order s, memory_order f) noexcept;
358 atomic_compare_exchange_strong_explicit(atomic<T>* obj, T* expc,
360 memory_order s, memory_order f) noexcept;
362 template <class Integral>
364 atomic_fetch_add(volatile atomic<Integral>* obj, Integral op) noexcept;
366 template <class Integral>
368 atomic_fetch_add(atomic<Integral>* obj, Integral op) noexcept;
370 template <class Integral>
372 atomic_fetch_add_explicit(volatile atomic<Integral>* obj, Integral op,
373 memory_order m) noexcept;
374 template <class Integral>
376 atomic_fetch_add_explicit(atomic<Integral>* obj, Integral op,
377 memory_order m) noexcept;
378 template <class Integral>
380 atomic_fetch_sub(volatile atomic<Integral>* obj, Integral op) noexcept;
382 template <class Integral>
384 atomic_fetch_sub(atomic<Integral>* obj, Integral op) noexcept;
386 template <class Integral>
388 atomic_fetch_sub_explicit(volatile atomic<Integral>* obj, Integral op,
389 memory_order m) noexcept;
390 template <class Integral>
392 atomic_fetch_sub_explicit(atomic<Integral>* obj, Integral op,
393 memory_order m) noexcept;
394 template <class Integral>
396 atomic_fetch_and(volatile atomic<Integral>* obj, Integral op) noexcept;
398 template <class Integral>
400 atomic_fetch_and(atomic<Integral>* obj, Integral op) noexcept;
402 template <class Integral>
404 atomic_fetch_and_explicit(volatile atomic<Integral>* obj, Integral op,
405 memory_order m) noexcept;
406 template <class Integral>
408 atomic_fetch_and_explicit(atomic<Integral>* obj, Integral op,
409 memory_order m) noexcept;
410 template <class Integral>
412 atomic_fetch_or(volatile atomic<Integral>* obj, Integral op) noexcept;
414 template <class Integral>
416 atomic_fetch_or(atomic<Integral>* obj, Integral op) noexcept;
418 template <class Integral>
420 atomic_fetch_or_explicit(volatile atomic<Integral>* obj, Integral op,
421 memory_order m) noexcept;
422 template <class Integral>
424 atomic_fetch_or_explicit(atomic<Integral>* obj, Integral op,
425 memory_order m) noexcept;
426 template <class Integral>
428 atomic_fetch_xor(volatile atomic<Integral>* obj, Integral op) noexcept;
430 template <class Integral>
432 atomic_fetch_xor(atomic<Integral>* obj, Integral op) noexcept;
434 template <class Integral>
436 atomic_fetch_xor_explicit(volatile atomic<Integral>* obj, Integral op,
437 memory_order m) noexcept;
438 template <class Integral>
440 atomic_fetch_xor_explicit(atomic<Integral>* obj, Integral op,
441 memory_order m) noexcept;
445 atomic_fetch_add(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
449 atomic_fetch_add(atomic<T*>* obj, ptrdiff_t op) noexcept;
453 atomic_fetch_add_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
454 memory_order m) noexcept;
457 atomic_fetch_add_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
461 atomic_fetch_sub(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
465 atomic_fetch_sub(atomic<T*>* obj, ptrdiff_t op) noexcept;
469 atomic_fetch_sub_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
470 memory_order m) noexcept;
473 atomic_fetch_sub_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
475 // Atomics for standard typedef types
477 typedef atomic<bool> atomic_bool;
478 typedef atomic<char> atomic_char;
479 typedef atomic<signed char> atomic_schar;
480 typedef atomic<unsigned char> atomic_uchar;
481 typedef atomic<short> atomic_short;
482 typedef atomic<unsigned short> atomic_ushort;
483 typedef atomic<int> atomic_int;
484 typedef atomic<unsigned int> atomic_uint;
485 typedef atomic<long> atomic_long;
486 typedef atomic<unsigned long> atomic_ulong;
487 typedef atomic<long long> atomic_llong;
488 typedef atomic<unsigned long long> atomic_ullong;
489 typedef atomic<char16_t> atomic_char16_t;
490 typedef atomic<char32_t> atomic_char32_t;
491 typedef atomic<wchar_t> atomic_wchar_t;
493 typedef atomic<int_least8_t> atomic_int_least8_t;
494 typedef atomic<uint_least8_t> atomic_uint_least8_t;
495 typedef atomic<int_least16_t> atomic_int_least16_t;
496 typedef atomic<uint_least16_t> atomic_uint_least16_t;
497 typedef atomic<int_least32_t> atomic_int_least32_t;
498 typedef atomic<uint_least32_t> atomic_uint_least32_t;
499 typedef atomic<int_least64_t> atomic_int_least64_t;
500 typedef atomic<uint_least64_t> atomic_uint_least64_t;
502 typedef atomic<int_fast8_t> atomic_int_fast8_t;
503 typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
504 typedef atomic<int_fast16_t> atomic_int_fast16_t;
505 typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
506 typedef atomic<int_fast32_t> atomic_int_fast32_t;
507 typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
508 typedef atomic<int_fast64_t> atomic_int_fast64_t;
509 typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
511 typedef atomic<intptr_t> atomic_intptr_t;
512 typedef atomic<uintptr_t> atomic_uintptr_t;
513 typedef atomic<size_t> atomic_size_t;
514 typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
515 typedef atomic<intmax_t> atomic_intmax_t;
516 typedef atomic<uintmax_t> atomic_uintmax_t;
520 void atomic_thread_fence(memory_order m) noexcept;
521 void atomic_signal_fence(memory_order m) noexcept;
530 #include <type_traits>
532 #if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
533 #pragma GCC system_header
536 #ifdef _LIBCPP_HAS_NO_THREADS
537 #error <atomic> is not supported on this single threaded system
538 #else // !_LIBCPP_HAS_NO_THREADS
540 _LIBCPP_BEGIN_NAMESPACE_STD
542 #if !__has_feature(cxx_atomic) && _GNUC_VER < 407
543 #error <atomic> is not implemented
546 typedef enum memory_order
548 memory_order_relaxed, memory_order_consume, memory_order_acquire,
549 memory_order_release, memory_order_acq_rel, memory_order_seq_cst
553 namespace __gcc_atomic {
554 template <typename _Tp>
555 struct __gcc_atomic_t {
558 static_assert(is_trivially_copyable<_Tp>::value,
559 "std::atomic<Tp> requires that 'Tp' be a trivially copyable type");
562 _LIBCPP_INLINE_VISIBILITY
563 #ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
564 __gcc_atomic_t() _NOEXCEPT = default;
566 __gcc_atomic_t() _NOEXCEPT : __a_value() {}
567 #endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
568 _LIBCPP_CONSTEXPR explicit __gcc_atomic_t(_Tp value) _NOEXCEPT
569 : __a_value(value) {}
572 #define _Atomic(x) __gcc_atomic::__gcc_atomic_t<x>
574 template <typename _Tp> _Tp __create();
576 template <typename _Tp, typename _Td>
577 typename enable_if<sizeof(_Tp()->__a_value = __create<_Td>()), char>::type
578 __test_atomic_assignable(int);
579 template <typename _Tp, typename _Up>
580 __two __test_atomic_assignable(...);
582 template <typename _Tp, typename _Td>
583 struct __can_assign {
584 static const bool value =
585 sizeof(__test_atomic_assignable<_Tp, _Td>(1)) == sizeof(char);
588 static inline _LIBCPP_CONSTEXPR int __to_gcc_order(memory_order __order) {
589 // Avoid switch statement to make this a constexpr.
590 return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
591 (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
592 (__order == memory_order_release ? __ATOMIC_RELEASE:
593 (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
594 (__order == memory_order_acq_rel ? __ATOMIC_ACQ_REL:
595 __ATOMIC_CONSUME))));
598 static inline _LIBCPP_CONSTEXPR int __to_gcc_failure_order(memory_order __order) {
599 // Avoid switch statement to make this a constexpr.
600 return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
601 (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
602 (__order == memory_order_release ? __ATOMIC_RELAXED:
603 (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
604 (__order == memory_order_acq_rel ? __ATOMIC_ACQUIRE:
605 __ATOMIC_CONSUME))));
608 } // namespace __gcc_atomic
610 template <typename _Tp>
613 __gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value>::type
614 __c11_atomic_init(volatile _Atomic(_Tp)* __a, _Tp __val) {
615 __a->__a_value = __val;
618 template <typename _Tp>
621 !__gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value &&
622 __gcc_atomic::__can_assign< _Atomic(_Tp)*, _Tp>::value>::type
623 __c11_atomic_init(volatile _Atomic(_Tp)* __a, _Tp __val) {
624 // [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because
625 // the default operator= in an object is not volatile, a byte-by-byte copy
627 volatile char* to = reinterpret_cast<volatile char*>(&__a->__a_value);
628 volatile char* end = to + sizeof(_Tp);
629 char* from = reinterpret_cast<char*>(&__val);
635 template <typename _Tp>
636 static inline void __c11_atomic_init(_Atomic(_Tp)* __a, _Tp __val) {
637 __a->__a_value = __val;
640 static inline void __c11_atomic_thread_fence(memory_order __order) {
641 __atomic_thread_fence(__gcc_atomic::__to_gcc_order(__order));
644 static inline void __c11_atomic_signal_fence(memory_order __order) {
645 __atomic_signal_fence(__gcc_atomic::__to_gcc_order(__order));
648 template <typename _Tp>
649 static inline void __c11_atomic_store(volatile _Atomic(_Tp)* __a, _Tp __val,
650 memory_order __order) {
651 return __atomic_store(&__a->__a_value, &__val,
652 __gcc_atomic::__to_gcc_order(__order));
655 template <typename _Tp>
656 static inline void __c11_atomic_store(_Atomic(_Tp)* __a, _Tp __val,
657 memory_order __order) {
658 __atomic_store(&__a->__a_value, &__val,
659 __gcc_atomic::__to_gcc_order(__order));
662 template <typename _Tp>
663 static inline _Tp __c11_atomic_load(volatile _Atomic(_Tp)* __a,
664 memory_order __order) {
666 __atomic_load(&__a->__a_value, &__ret,
667 __gcc_atomic::__to_gcc_order(__order));
671 template <typename _Tp>
672 static inline _Tp __c11_atomic_load(_Atomic(_Tp)* __a, memory_order __order) {
674 __atomic_load(&__a->__a_value, &__ret,
675 __gcc_atomic::__to_gcc_order(__order));
679 template <typename _Tp>
680 static inline _Tp __c11_atomic_exchange(volatile _Atomic(_Tp)* __a,
681 _Tp __value, memory_order __order) {
683 __atomic_exchange(&__a->__a_value, &__value, &__ret,
684 __gcc_atomic::__to_gcc_order(__order));
688 template <typename _Tp>
689 static inline _Tp __c11_atomic_exchange(_Atomic(_Tp)* __a, _Tp __value,
690 memory_order __order) {
692 __atomic_exchange(&__a->__a_value, &__value, &__ret,
693 __gcc_atomic::__to_gcc_order(__order));
697 template <typename _Tp>
698 static inline bool __c11_atomic_compare_exchange_strong(
699 volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
700 memory_order __success, memory_order __failure) {
701 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
703 __gcc_atomic::__to_gcc_order(__success),
704 __gcc_atomic::__to_gcc_failure_order(__failure));
707 template <typename _Tp>
708 static inline bool __c11_atomic_compare_exchange_strong(
709 _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
710 memory_order __failure) {
711 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
713 __gcc_atomic::__to_gcc_order(__success),
714 __gcc_atomic::__to_gcc_failure_order(__failure));
717 template <typename _Tp>
718 static inline bool __c11_atomic_compare_exchange_weak(
719 volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
720 memory_order __success, memory_order __failure) {
721 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
723 __gcc_atomic::__to_gcc_order(__success),
724 __gcc_atomic::__to_gcc_failure_order(__failure));
727 template <typename _Tp>
728 static inline bool __c11_atomic_compare_exchange_weak(
729 _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
730 memory_order __failure) {
731 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
733 __gcc_atomic::__to_gcc_order(__success),
734 __gcc_atomic::__to_gcc_failure_order(__failure));
737 template <typename _Tp>
738 struct __skip_amt { enum {value = 1}; };
740 template <typename _Tp>
741 struct __skip_amt<_Tp*> { enum {value = sizeof(_Tp)}; };
743 // FIXME: Haven't figured out what the spec says about using arrays with
744 // atomic_fetch_add. Force a failure rather than creating bad behavior.
745 template <typename _Tp>
746 struct __skip_amt<_Tp[]> { };
747 template <typename _Tp, int n>
748 struct __skip_amt<_Tp[n]> { };
750 template <typename _Tp, typename _Td>
751 static inline _Tp __c11_atomic_fetch_add(volatile _Atomic(_Tp)* __a,
752 _Td __delta, memory_order __order) {
753 return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
754 __gcc_atomic::__to_gcc_order(__order));
757 template <typename _Tp, typename _Td>
758 static inline _Tp __c11_atomic_fetch_add(_Atomic(_Tp)* __a, _Td __delta,
759 memory_order __order) {
760 return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
761 __gcc_atomic::__to_gcc_order(__order));
764 template <typename _Tp, typename _Td>
765 static inline _Tp __c11_atomic_fetch_sub(volatile _Atomic(_Tp)* __a,
766 _Td __delta, memory_order __order) {
767 return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
768 __gcc_atomic::__to_gcc_order(__order));
771 template <typename _Tp, typename _Td>
772 static inline _Tp __c11_atomic_fetch_sub(_Atomic(_Tp)* __a, _Td __delta,
773 memory_order __order) {
774 return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
775 __gcc_atomic::__to_gcc_order(__order));
778 template <typename _Tp>
779 static inline _Tp __c11_atomic_fetch_and(volatile _Atomic(_Tp)* __a,
780 _Tp __pattern, memory_order __order) {
781 return __atomic_fetch_and(&__a->__a_value, __pattern,
782 __gcc_atomic::__to_gcc_order(__order));
785 template <typename _Tp>
786 static inline _Tp __c11_atomic_fetch_and(_Atomic(_Tp)* __a,
787 _Tp __pattern, memory_order __order) {
788 return __atomic_fetch_and(&__a->__a_value, __pattern,
789 __gcc_atomic::__to_gcc_order(__order));
792 template <typename _Tp>
793 static inline _Tp __c11_atomic_fetch_or(volatile _Atomic(_Tp)* __a,
794 _Tp __pattern, memory_order __order) {
795 return __atomic_fetch_or(&__a->__a_value, __pattern,
796 __gcc_atomic::__to_gcc_order(__order));
799 template <typename _Tp>
800 static inline _Tp __c11_atomic_fetch_or(_Atomic(_Tp)* __a, _Tp __pattern,
801 memory_order __order) {
802 return __atomic_fetch_or(&__a->__a_value, __pattern,
803 __gcc_atomic::__to_gcc_order(__order));
806 template <typename _Tp>
807 static inline _Tp __c11_atomic_fetch_xor(volatile _Atomic(_Tp)* __a,
808 _Tp __pattern, memory_order __order) {
809 return __atomic_fetch_xor(&__a->__a_value, __pattern,
810 __gcc_atomic::__to_gcc_order(__order));
813 template <typename _Tp>
814 static inline _Tp __c11_atomic_fetch_xor(_Atomic(_Tp)* __a, _Tp __pattern,
815 memory_order __order) {
816 return __atomic_fetch_xor(&__a->__a_value, __pattern,
817 __gcc_atomic::__to_gcc_order(__order));
819 #endif // _GNUC_VER >= 407
822 inline _LIBCPP_INLINE_VISIBILITY
824 kill_dependency(_Tp __y) _NOEXCEPT
831 template <class _Tp, bool = is_integral<_Tp>::value && !is_same<_Tp, bool>::value>
832 struct __atomic_base // false
834 mutable _Atomic(_Tp) __a_;
836 _LIBCPP_INLINE_VISIBILITY
837 bool is_lock_free() const volatile _NOEXCEPT
839 #if __has_feature(cxx_atomic)
840 return __c11_atomic_is_lock_free(sizeof(_Tp));
842 return __atomic_is_lock_free(sizeof(_Tp), 0);
845 _LIBCPP_INLINE_VISIBILITY
846 bool is_lock_free() const _NOEXCEPT
847 {return static_cast<__atomic_base const volatile*>(this)->is_lock_free();}
848 _LIBCPP_INLINE_VISIBILITY
849 void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
850 {__c11_atomic_store(&__a_, __d, __m);}
851 _LIBCPP_INLINE_VISIBILITY
852 void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
853 {__c11_atomic_store(&__a_, __d, __m);}
854 _LIBCPP_INLINE_VISIBILITY
855 _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
856 {return __c11_atomic_load(&__a_, __m);}
857 _LIBCPP_INLINE_VISIBILITY
858 _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
859 {return __c11_atomic_load(&__a_, __m);}
860 _LIBCPP_INLINE_VISIBILITY
861 operator _Tp() const volatile _NOEXCEPT {return load();}
862 _LIBCPP_INLINE_VISIBILITY
863 operator _Tp() const _NOEXCEPT {return load();}
864 _LIBCPP_INLINE_VISIBILITY
865 _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
866 {return __c11_atomic_exchange(&__a_, __d, __m);}
867 _LIBCPP_INLINE_VISIBILITY
868 _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
869 {return __c11_atomic_exchange(&__a_, __d, __m);}
870 _LIBCPP_INLINE_VISIBILITY
871 bool compare_exchange_weak(_Tp& __e, _Tp __d,
872 memory_order __s, memory_order __f) volatile _NOEXCEPT
873 {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
874 _LIBCPP_INLINE_VISIBILITY
875 bool compare_exchange_weak(_Tp& __e, _Tp __d,
876 memory_order __s, memory_order __f) _NOEXCEPT
877 {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
878 _LIBCPP_INLINE_VISIBILITY
879 bool compare_exchange_strong(_Tp& __e, _Tp __d,
880 memory_order __s, memory_order __f) volatile _NOEXCEPT
881 {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
882 _LIBCPP_INLINE_VISIBILITY
883 bool compare_exchange_strong(_Tp& __e, _Tp __d,
884 memory_order __s, memory_order __f) _NOEXCEPT
885 {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
886 _LIBCPP_INLINE_VISIBILITY
887 bool compare_exchange_weak(_Tp& __e, _Tp __d,
888 memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
889 {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
890 _LIBCPP_INLINE_VISIBILITY
891 bool compare_exchange_weak(_Tp& __e, _Tp __d,
892 memory_order __m = memory_order_seq_cst) _NOEXCEPT
893 {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
894 _LIBCPP_INLINE_VISIBILITY
895 bool compare_exchange_strong(_Tp& __e, _Tp __d,
896 memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
897 {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
898 _LIBCPP_INLINE_VISIBILITY
899 bool compare_exchange_strong(_Tp& __e, _Tp __d,
900 memory_order __m = memory_order_seq_cst) _NOEXCEPT
901 {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
903 _LIBCPP_INLINE_VISIBILITY
904 #ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
905 __atomic_base() _NOEXCEPT = default;
907 __atomic_base() _NOEXCEPT : __a_() {}
908 #endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
910 _LIBCPP_INLINE_VISIBILITY
911 _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {}
912 #ifndef _LIBCPP_HAS_NO_DELETED_FUNCTIONS
913 __atomic_base(const __atomic_base&) = delete;
914 __atomic_base& operator=(const __atomic_base&) = delete;
915 __atomic_base& operator=(const __atomic_base&) volatile = delete;
916 #else // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
918 __atomic_base(const __atomic_base&);
919 __atomic_base& operator=(const __atomic_base&);
920 __atomic_base& operator=(const __atomic_base&) volatile;
921 #endif // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
927 struct __atomic_base<_Tp, true>
928 : public __atomic_base<_Tp, false>
930 typedef __atomic_base<_Tp, false> __base;
931 _LIBCPP_INLINE_VISIBILITY
932 __atomic_base() _NOEXCEPT _LIBCPP_DEFAULT
933 _LIBCPP_INLINE_VISIBILITY
934 _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {}
936 _LIBCPP_INLINE_VISIBILITY
937 _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
938 {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
939 _LIBCPP_INLINE_VISIBILITY
940 _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
941 {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
942 _LIBCPP_INLINE_VISIBILITY
943 _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
944 {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
945 _LIBCPP_INLINE_VISIBILITY
946 _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
947 {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
948 _LIBCPP_INLINE_VISIBILITY
949 _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
950 {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
951 _LIBCPP_INLINE_VISIBILITY
952 _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
953 {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
954 _LIBCPP_INLINE_VISIBILITY
955 _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
956 {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
957 _LIBCPP_INLINE_VISIBILITY
958 _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
959 {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
960 _LIBCPP_INLINE_VISIBILITY
961 _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
962 {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
963 _LIBCPP_INLINE_VISIBILITY
964 _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
965 {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
967 _LIBCPP_INLINE_VISIBILITY
968 _Tp operator++(int) volatile _NOEXCEPT {return fetch_add(_Tp(1));}
969 _LIBCPP_INLINE_VISIBILITY
970 _Tp operator++(int) _NOEXCEPT {return fetch_add(_Tp(1));}
971 _LIBCPP_INLINE_VISIBILITY
972 _Tp operator--(int) volatile _NOEXCEPT {return fetch_sub(_Tp(1));}
973 _LIBCPP_INLINE_VISIBILITY
974 _Tp operator--(int) _NOEXCEPT {return fetch_sub(_Tp(1));}
975 _LIBCPP_INLINE_VISIBILITY
976 _Tp operator++() volatile _NOEXCEPT {return fetch_add(_Tp(1)) + _Tp(1);}
977 _LIBCPP_INLINE_VISIBILITY
978 _Tp operator++() _NOEXCEPT {return fetch_add(_Tp(1)) + _Tp(1);}
979 _LIBCPP_INLINE_VISIBILITY
980 _Tp operator--() volatile _NOEXCEPT {return fetch_sub(_Tp(1)) - _Tp(1);}
981 _LIBCPP_INLINE_VISIBILITY
982 _Tp operator--() _NOEXCEPT {return fetch_sub(_Tp(1)) - _Tp(1);}
983 _LIBCPP_INLINE_VISIBILITY
984 _Tp operator+=(_Tp __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
985 _LIBCPP_INLINE_VISIBILITY
986 _Tp operator+=(_Tp __op) _NOEXCEPT {return fetch_add(__op) + __op;}
987 _LIBCPP_INLINE_VISIBILITY
988 _Tp operator-=(_Tp __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
989 _LIBCPP_INLINE_VISIBILITY
990 _Tp operator-=(_Tp __op) _NOEXCEPT {return fetch_sub(__op) - __op;}
991 _LIBCPP_INLINE_VISIBILITY
992 _Tp operator&=(_Tp __op) volatile _NOEXCEPT {return fetch_and(__op) & __op;}
993 _LIBCPP_INLINE_VISIBILITY
994 _Tp operator&=(_Tp __op) _NOEXCEPT {return fetch_and(__op) & __op;}
995 _LIBCPP_INLINE_VISIBILITY
996 _Tp operator|=(_Tp __op) volatile _NOEXCEPT {return fetch_or(__op) | __op;}
997 _LIBCPP_INLINE_VISIBILITY
998 _Tp operator|=(_Tp __op) _NOEXCEPT {return fetch_or(__op) | __op;}
999 _LIBCPP_INLINE_VISIBILITY
1000 _Tp operator^=(_Tp __op) volatile _NOEXCEPT {return fetch_xor(__op) ^ __op;}
1001 _LIBCPP_INLINE_VISIBILITY
1002 _Tp operator^=(_Tp __op) _NOEXCEPT {return fetch_xor(__op) ^ __op;}
1007 template <class _Tp>
1009 : public __atomic_base<_Tp>
1011 typedef __atomic_base<_Tp> __base;
1012 _LIBCPP_INLINE_VISIBILITY
1013 atomic() _NOEXCEPT _LIBCPP_DEFAULT
1014 _LIBCPP_INLINE_VISIBILITY
1015 _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {}
1017 _LIBCPP_INLINE_VISIBILITY
1018 _Tp operator=(_Tp __d) volatile _NOEXCEPT
1019 {__base::store(__d); return __d;}
1020 _LIBCPP_INLINE_VISIBILITY
1021 _Tp operator=(_Tp __d) _NOEXCEPT
1022 {__base::store(__d); return __d;}
1027 template <class _Tp>
1029 : public __atomic_base<_Tp*>
1031 typedef __atomic_base<_Tp*> __base;
1032 _LIBCPP_INLINE_VISIBILITY
1033 atomic() _NOEXCEPT _LIBCPP_DEFAULT
1034 _LIBCPP_INLINE_VISIBILITY
1035 _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {}
1037 _LIBCPP_INLINE_VISIBILITY
1038 _Tp* operator=(_Tp* __d) volatile _NOEXCEPT
1039 {__base::store(__d); return __d;}
1040 _LIBCPP_INLINE_VISIBILITY
1041 _Tp* operator=(_Tp* __d) _NOEXCEPT
1042 {__base::store(__d); return __d;}
1044 _LIBCPP_INLINE_VISIBILITY
1045 _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1047 {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
1048 _LIBCPP_INLINE_VISIBILITY
1049 _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1050 {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
1051 _LIBCPP_INLINE_VISIBILITY
1052 _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1054 {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
1055 _LIBCPP_INLINE_VISIBILITY
1056 _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1057 {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
1059 _LIBCPP_INLINE_VISIBILITY
1060 _Tp* operator++(int) volatile _NOEXCEPT {return fetch_add(1);}
1061 _LIBCPP_INLINE_VISIBILITY
1062 _Tp* operator++(int) _NOEXCEPT {return fetch_add(1);}
1063 _LIBCPP_INLINE_VISIBILITY
1064 _Tp* operator--(int) volatile _NOEXCEPT {return fetch_sub(1);}
1065 _LIBCPP_INLINE_VISIBILITY
1066 _Tp* operator--(int) _NOEXCEPT {return fetch_sub(1);}
1067 _LIBCPP_INLINE_VISIBILITY
1068 _Tp* operator++() volatile _NOEXCEPT {return fetch_add(1) + 1;}
1069 _LIBCPP_INLINE_VISIBILITY
1070 _Tp* operator++() _NOEXCEPT {return fetch_add(1) + 1;}
1071 _LIBCPP_INLINE_VISIBILITY
1072 _Tp* operator--() volatile _NOEXCEPT {return fetch_sub(1) - 1;}
1073 _LIBCPP_INLINE_VISIBILITY
1074 _Tp* operator--() _NOEXCEPT {return fetch_sub(1) - 1;}
1075 _LIBCPP_INLINE_VISIBILITY
1076 _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
1077 _LIBCPP_INLINE_VISIBILITY
1078 _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT {return fetch_add(__op) + __op;}
1079 _LIBCPP_INLINE_VISIBILITY
1080 _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
1081 _LIBCPP_INLINE_VISIBILITY
1082 _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT {return fetch_sub(__op) - __op;}
1085 // atomic_is_lock_free
1087 template <class _Tp>
1088 inline _LIBCPP_INLINE_VISIBILITY
1090 atomic_is_lock_free(const volatile atomic<_Tp>* __o) _NOEXCEPT
1092 return __o->is_lock_free();
1095 template <class _Tp>
1096 inline _LIBCPP_INLINE_VISIBILITY
1098 atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT
1100 return __o->is_lock_free();
1105 template <class _Tp>
1106 inline _LIBCPP_INLINE_VISIBILITY
1108 atomic_init(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1110 __c11_atomic_init(&__o->__a_, __d);
1113 template <class _Tp>
1114 inline _LIBCPP_INLINE_VISIBILITY
1116 atomic_init(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1118 __c11_atomic_init(&__o->__a_, __d);
1123 template <class _Tp>
1124 inline _LIBCPP_INLINE_VISIBILITY
1126 atomic_store(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1131 template <class _Tp>
1132 inline _LIBCPP_INLINE_VISIBILITY
1134 atomic_store(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1139 // atomic_store_explicit
1141 template <class _Tp>
1142 inline _LIBCPP_INLINE_VISIBILITY
1144 atomic_store_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1146 __o->store(__d, __m);
1149 template <class _Tp>
1150 inline _LIBCPP_INLINE_VISIBILITY
1152 atomic_store_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1154 __o->store(__d, __m);
1159 template <class _Tp>
1160 inline _LIBCPP_INLINE_VISIBILITY
1162 atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT
1167 template <class _Tp>
1168 inline _LIBCPP_INLINE_VISIBILITY
1170 atomic_load(const atomic<_Tp>* __o) _NOEXCEPT
1175 // atomic_load_explicit
1177 template <class _Tp>
1178 inline _LIBCPP_INLINE_VISIBILITY
1180 atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1182 return __o->load(__m);
1185 template <class _Tp>
1186 inline _LIBCPP_INLINE_VISIBILITY
1188 atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1190 return __o->load(__m);
1195 template <class _Tp>
1196 inline _LIBCPP_INLINE_VISIBILITY
1198 atomic_exchange(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1200 return __o->exchange(__d);
1203 template <class _Tp>
1204 inline _LIBCPP_INLINE_VISIBILITY
1206 atomic_exchange(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1208 return __o->exchange(__d);
1211 // atomic_exchange_explicit
1213 template <class _Tp>
1214 inline _LIBCPP_INLINE_VISIBILITY
1216 atomic_exchange_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1218 return __o->exchange(__d, __m);
1221 template <class _Tp>
1222 inline _LIBCPP_INLINE_VISIBILITY
1224 atomic_exchange_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1226 return __o->exchange(__d, __m);
1229 // atomic_compare_exchange_weak
1231 template <class _Tp>
1232 inline _LIBCPP_INLINE_VISIBILITY
1234 atomic_compare_exchange_weak(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1236 return __o->compare_exchange_weak(*__e, __d);
1239 template <class _Tp>
1240 inline _LIBCPP_INLINE_VISIBILITY
1242 atomic_compare_exchange_weak(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1244 return __o->compare_exchange_weak(*__e, __d);
1247 // atomic_compare_exchange_strong
1249 template <class _Tp>
1250 inline _LIBCPP_INLINE_VISIBILITY
1252 atomic_compare_exchange_strong(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1254 return __o->compare_exchange_strong(*__e, __d);
1257 template <class _Tp>
1258 inline _LIBCPP_INLINE_VISIBILITY
1260 atomic_compare_exchange_strong(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1262 return __o->compare_exchange_strong(*__e, __d);
1265 // atomic_compare_exchange_weak_explicit
1267 template <class _Tp>
1268 inline _LIBCPP_INLINE_VISIBILITY
1270 atomic_compare_exchange_weak_explicit(volatile atomic<_Tp>* __o, _Tp* __e,
1272 memory_order __s, memory_order __f) _NOEXCEPT
1274 return __o->compare_exchange_weak(*__e, __d, __s, __f);
1277 template <class _Tp>
1278 inline _LIBCPP_INLINE_VISIBILITY
1280 atomic_compare_exchange_weak_explicit(atomic<_Tp>* __o, _Tp* __e, _Tp __d,
1281 memory_order __s, memory_order __f) _NOEXCEPT
1283 return __o->compare_exchange_weak(*__e, __d, __s, __f);
1286 // atomic_compare_exchange_strong_explicit
1288 template <class _Tp>
1289 inline _LIBCPP_INLINE_VISIBILITY
1291 atomic_compare_exchange_strong_explicit(volatile atomic<_Tp>* __o,
1293 memory_order __s, memory_order __f) _NOEXCEPT
1295 return __o->compare_exchange_strong(*__e, __d, __s, __f);
1298 template <class _Tp>
1299 inline _LIBCPP_INLINE_VISIBILITY
1301 atomic_compare_exchange_strong_explicit(atomic<_Tp>* __o, _Tp* __e,
1303 memory_order __s, memory_order __f) _NOEXCEPT
1305 return __o->compare_exchange_strong(*__e, __d, __s, __f);
1310 template <class _Tp>
1311 inline _LIBCPP_INLINE_VISIBILITY
1314 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1317 atomic_fetch_add(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1319 return __o->fetch_add(__op);
1322 template <class _Tp>
1323 inline _LIBCPP_INLINE_VISIBILITY
1326 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1329 atomic_fetch_add(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1331 return __o->fetch_add(__op);
1334 template <class _Tp>
1335 inline _LIBCPP_INLINE_VISIBILITY
1337 atomic_fetch_add(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1339 return __o->fetch_add(__op);
1342 template <class _Tp>
1343 inline _LIBCPP_INLINE_VISIBILITY
1345 atomic_fetch_add(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1347 return __o->fetch_add(__op);
1350 // atomic_fetch_add_explicit
1352 template <class _Tp>
1353 inline _LIBCPP_INLINE_VISIBILITY
1356 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1359 atomic_fetch_add_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1361 return __o->fetch_add(__op, __m);
1364 template <class _Tp>
1365 inline _LIBCPP_INLINE_VISIBILITY
1368 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1371 atomic_fetch_add_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1373 return __o->fetch_add(__op, __m);
1376 template <class _Tp>
1377 inline _LIBCPP_INLINE_VISIBILITY
1379 atomic_fetch_add_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
1380 memory_order __m) _NOEXCEPT
1382 return __o->fetch_add(__op, __m);
1385 template <class _Tp>
1386 inline _LIBCPP_INLINE_VISIBILITY
1388 atomic_fetch_add_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
1390 return __o->fetch_add(__op, __m);
1395 template <class _Tp>
1396 inline _LIBCPP_INLINE_VISIBILITY
1399 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1402 atomic_fetch_sub(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1404 return __o->fetch_sub(__op);
1407 template <class _Tp>
1408 inline _LIBCPP_INLINE_VISIBILITY
1411 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1414 atomic_fetch_sub(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1416 return __o->fetch_sub(__op);
1419 template <class _Tp>
1420 inline _LIBCPP_INLINE_VISIBILITY
1422 atomic_fetch_sub(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1424 return __o->fetch_sub(__op);
1427 template <class _Tp>
1428 inline _LIBCPP_INLINE_VISIBILITY
1430 atomic_fetch_sub(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1432 return __o->fetch_sub(__op);
1435 // atomic_fetch_sub_explicit
1437 template <class _Tp>
1438 inline _LIBCPP_INLINE_VISIBILITY
1441 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1444 atomic_fetch_sub_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1446 return __o->fetch_sub(__op, __m);
1449 template <class _Tp>
1450 inline _LIBCPP_INLINE_VISIBILITY
1453 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1456 atomic_fetch_sub_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1458 return __o->fetch_sub(__op, __m);
1461 template <class _Tp>
1462 inline _LIBCPP_INLINE_VISIBILITY
1464 atomic_fetch_sub_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
1465 memory_order __m) _NOEXCEPT
1467 return __o->fetch_sub(__op, __m);
1470 template <class _Tp>
1471 inline _LIBCPP_INLINE_VISIBILITY
1473 atomic_fetch_sub_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
1475 return __o->fetch_sub(__op, __m);
1480 template <class _Tp>
1481 inline _LIBCPP_INLINE_VISIBILITY
1484 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1487 atomic_fetch_and(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1489 return __o->fetch_and(__op);
1492 template <class _Tp>
1493 inline _LIBCPP_INLINE_VISIBILITY
1496 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1499 atomic_fetch_and(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1501 return __o->fetch_and(__op);
1504 // atomic_fetch_and_explicit
1506 template <class _Tp>
1507 inline _LIBCPP_INLINE_VISIBILITY
1510 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1513 atomic_fetch_and_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1515 return __o->fetch_and(__op, __m);
1518 template <class _Tp>
1519 inline _LIBCPP_INLINE_VISIBILITY
1522 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1525 atomic_fetch_and_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1527 return __o->fetch_and(__op, __m);
1532 template <class _Tp>
1533 inline _LIBCPP_INLINE_VISIBILITY
1536 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1539 atomic_fetch_or(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1541 return __o->fetch_or(__op);
1544 template <class _Tp>
1545 inline _LIBCPP_INLINE_VISIBILITY
1548 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1551 atomic_fetch_or(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1553 return __o->fetch_or(__op);
1556 // atomic_fetch_or_explicit
1558 template <class _Tp>
1559 inline _LIBCPP_INLINE_VISIBILITY
1562 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1565 atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1567 return __o->fetch_or(__op, __m);
1570 template <class _Tp>
1571 inline _LIBCPP_INLINE_VISIBILITY
1574 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1577 atomic_fetch_or_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1579 return __o->fetch_or(__op, __m);
1584 template <class _Tp>
1585 inline _LIBCPP_INLINE_VISIBILITY
1588 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1591 atomic_fetch_xor(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1593 return __o->fetch_xor(__op);
1596 template <class _Tp>
1597 inline _LIBCPP_INLINE_VISIBILITY
1600 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1603 atomic_fetch_xor(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1605 return __o->fetch_xor(__op);
1608 // atomic_fetch_xor_explicit
1610 template <class _Tp>
1611 inline _LIBCPP_INLINE_VISIBILITY
1614 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1617 atomic_fetch_xor_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1619 return __o->fetch_xor(__op, __m);
1622 template <class _Tp>
1623 inline _LIBCPP_INLINE_VISIBILITY
1626 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1629 atomic_fetch_xor_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1631 return __o->fetch_xor(__op, __m);
1634 // flag type and operations
1636 typedef struct atomic_flag
1640 _LIBCPP_INLINE_VISIBILITY
1641 bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1642 {return __c11_atomic_exchange(&__a_, true, __m);}
1643 _LIBCPP_INLINE_VISIBILITY
1644 bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT
1645 {return __c11_atomic_exchange(&__a_, true, __m);}
1646 _LIBCPP_INLINE_VISIBILITY
1647 void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1648 {__c11_atomic_store(&__a_, false, __m);}
1649 _LIBCPP_INLINE_VISIBILITY
1650 void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT
1651 {__c11_atomic_store(&__a_, false, __m);}
1653 _LIBCPP_INLINE_VISIBILITY
1654 #ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
1655 atomic_flag() _NOEXCEPT = default;
1657 atomic_flag() _NOEXCEPT : __a_() {}
1658 #endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
1660 _LIBCPP_INLINE_VISIBILITY
1661 atomic_flag(bool __b) _NOEXCEPT : __a_(__b) {}
1663 #ifndef _LIBCPP_HAS_NO_DELETED_FUNCTIONS
1664 atomic_flag(const atomic_flag&) = delete;
1665 atomic_flag& operator=(const atomic_flag&) = delete;
1666 atomic_flag& operator=(const atomic_flag&) volatile = delete;
1667 #else // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
1669 atomic_flag(const atomic_flag&);
1670 atomic_flag& operator=(const atomic_flag&);
1671 atomic_flag& operator=(const atomic_flag&) volatile;
1672 #endif // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
1675 inline _LIBCPP_INLINE_VISIBILITY
1677 atomic_flag_test_and_set(volatile atomic_flag* __o) _NOEXCEPT
1679 return __o->test_and_set();
1682 inline _LIBCPP_INLINE_VISIBILITY
1684 atomic_flag_test_and_set(atomic_flag* __o) _NOEXCEPT
1686 return __o->test_and_set();
1689 inline _LIBCPP_INLINE_VISIBILITY
1691 atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
1693 return __o->test_and_set(__m);
1696 inline _LIBCPP_INLINE_VISIBILITY
1698 atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
1700 return __o->test_and_set(__m);
1703 inline _LIBCPP_INLINE_VISIBILITY
1705 atomic_flag_clear(volatile atomic_flag* __o) _NOEXCEPT
1710 inline _LIBCPP_INLINE_VISIBILITY
1712 atomic_flag_clear(atomic_flag* __o) _NOEXCEPT
1717 inline _LIBCPP_INLINE_VISIBILITY
1719 atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
1724 inline _LIBCPP_INLINE_VISIBILITY
1726 atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
1733 inline _LIBCPP_INLINE_VISIBILITY
1735 atomic_thread_fence(memory_order __m) _NOEXCEPT
1737 __c11_atomic_thread_fence(__m);
1740 inline _LIBCPP_INLINE_VISIBILITY
1742 atomic_signal_fence(memory_order __m) _NOEXCEPT
1744 __c11_atomic_signal_fence(__m);
1747 // Atomics for standard typedef types
1749 typedef atomic<bool> atomic_bool;
1750 typedef atomic<char> atomic_char;
1751 typedef atomic<signed char> atomic_schar;
1752 typedef atomic<unsigned char> atomic_uchar;
1753 typedef atomic<short> atomic_short;
1754 typedef atomic<unsigned short> atomic_ushort;
1755 typedef atomic<int> atomic_int;
1756 typedef atomic<unsigned int> atomic_uint;
1757 typedef atomic<long> atomic_long;
1758 typedef atomic<unsigned long> atomic_ulong;
1759 typedef atomic<long long> atomic_llong;
1760 typedef atomic<unsigned long long> atomic_ullong;
1761 typedef atomic<char16_t> atomic_char16_t;
1762 typedef atomic<char32_t> atomic_char32_t;
1763 typedef atomic<wchar_t> atomic_wchar_t;
1765 typedef atomic<int_least8_t> atomic_int_least8_t;
1766 typedef atomic<uint_least8_t> atomic_uint_least8_t;
1767 typedef atomic<int_least16_t> atomic_int_least16_t;
1768 typedef atomic<uint_least16_t> atomic_uint_least16_t;
1769 typedef atomic<int_least32_t> atomic_int_least32_t;
1770 typedef atomic<uint_least32_t> atomic_uint_least32_t;
1771 typedef atomic<int_least64_t> atomic_int_least64_t;
1772 typedef atomic<uint_least64_t> atomic_uint_least64_t;
1774 typedef atomic<int_fast8_t> atomic_int_fast8_t;
1775 typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
1776 typedef atomic<int_fast16_t> atomic_int_fast16_t;
1777 typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
1778 typedef atomic<int_fast32_t> atomic_int_fast32_t;
1779 typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
1780 typedef atomic<int_fast64_t> atomic_int_fast64_t;
1781 typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
1783 typedef atomic<intptr_t> atomic_intptr_t;
1784 typedef atomic<uintptr_t> atomic_uintptr_t;
1785 typedef atomic<size_t> atomic_size_t;
1786 typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
1787 typedef atomic<intmax_t> atomic_intmax_t;
1788 typedef atomic<uintmax_t> atomic_uintmax_t;
1790 #define ATOMIC_FLAG_INIT {false}
1791 #define ATOMIC_VAR_INIT(__v) {__v}
1793 // lock-free property
1795 #define ATOMIC_BOOL_LOCK_FREE __GCC_ATOMIC_BOOL_LOCK_FREE
1796 #define ATOMIC_CHAR_LOCK_FREE __GCC_ATOMIC_CHAR_LOCK_FREE
1797 #define ATOMIC_CHAR16_T_LOCK_FREE __GCC_ATOMIC_CHAR16_T_LOCK_FREE
1798 #define ATOMIC_CHAR32_T_LOCK_FREE __GCC_ATOMIC_CHAR32_T_LOCK_FREE
1799 #define ATOMIC_WCHAR_T_LOCK_FREE __GCC_ATOMIC_WCHAR_T_LOCK_FREE
1800 #define ATOMIC_SHORT_LOCK_FREE __GCC_ATOMIC_SHORT_LOCK_FREE
1801 #define ATOMIC_INT_LOCK_FREE __GCC_ATOMIC_INT_LOCK_FREE
1802 #define ATOMIC_LONG_LOCK_FREE __GCC_ATOMIC_LONG_LOCK_FREE
1803 #define ATOMIC_LLONG_LOCK_FREE __GCC_ATOMIC_LLONG_LOCK_FREE
1804 #define ATOMIC_POINTER_LOCK_FREE __GCC_ATOMIC_POINTER_LOCK_FREE
1806 #endif // !__has_feature(cxx_atomic)
1808 _LIBCPP_END_NAMESPACE_STD
1810 #endif // !_LIBCPP_HAS_NO_THREADS
1812 #endif // _LIBCPP_ATOMIC