2 //===--------------------------- atomic -----------------------------------===//
4 // The LLVM Compiler Infrastructure
6 // This file is distributed under the University of Illinois Open Source
7 // License. See LICENSE.TXT for details.
9 //===----------------------------------------------------------------------===//
11 #ifndef _LIBCPP_ATOMIC
12 #define _LIBCPP_ATOMIC
22 #define __cpp_lib_atomic_is_always_lock_free // as specified by SG10
24 // order and consistency
26 typedef enum memory_order
29 memory_order_consume, // load-consume
30 memory_order_acquire, // load-acquire
31 memory_order_release, // store-release
32 memory_order_acq_rel, // store-release load-acquire
33 memory_order_seq_cst // store-release load-acquire
36 template <class T> T kill_dependency(T y) noexcept;
40 #define ATOMIC_BOOL_LOCK_FREE unspecified
41 #define ATOMIC_CHAR_LOCK_FREE unspecified
42 #define ATOMIC_CHAR16_T_LOCK_FREE unspecified
43 #define ATOMIC_CHAR32_T_LOCK_FREE unspecified
44 #define ATOMIC_WCHAR_T_LOCK_FREE unspecified
45 #define ATOMIC_SHORT_LOCK_FREE unspecified
46 #define ATOMIC_INT_LOCK_FREE unspecified
47 #define ATOMIC_LONG_LOCK_FREE unspecified
48 #define ATOMIC_LLONG_LOCK_FREE unspecified
49 #define ATOMIC_POINTER_LOCK_FREE unspecified
51 // flag type and operations
53 typedef struct atomic_flag
55 bool test_and_set(memory_order m = memory_order_seq_cst) volatile noexcept;
56 bool test_and_set(memory_order m = memory_order_seq_cst) noexcept;
57 void clear(memory_order m = memory_order_seq_cst) volatile noexcept;
58 void clear(memory_order m = memory_order_seq_cst) noexcept;
59 atomic_flag() noexcept = default;
60 atomic_flag(const atomic_flag&) = delete;
61 atomic_flag& operator=(const atomic_flag&) = delete;
62 atomic_flag& operator=(const atomic_flag&) volatile = delete;
66 atomic_flag_test_and_set(volatile atomic_flag* obj) noexcept;
69 atomic_flag_test_and_set(atomic_flag* obj) noexcept;
72 atomic_flag_test_and_set_explicit(volatile atomic_flag* obj,
73 memory_order m) noexcept;
76 atomic_flag_test_and_set_explicit(atomic_flag* obj, memory_order m) noexcept;
79 atomic_flag_clear(volatile atomic_flag* obj) noexcept;
82 atomic_flag_clear(atomic_flag* obj) noexcept;
85 atomic_flag_clear_explicit(volatile atomic_flag* obj, memory_order m) noexcept;
88 atomic_flag_clear_explicit(atomic_flag* obj, memory_order m) noexcept;
90 #define ATOMIC_FLAG_INIT see below
91 #define ATOMIC_VAR_INIT(value) see below
96 static constexpr bool is_always_lock_free;
97 bool is_lock_free() const volatile noexcept;
98 bool is_lock_free() const noexcept;
99 void store(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
100 void store(T desr, memory_order m = memory_order_seq_cst) noexcept;
101 T load(memory_order m = memory_order_seq_cst) const volatile noexcept;
102 T load(memory_order m = memory_order_seq_cst) const noexcept;
103 operator T() const volatile noexcept;
104 operator T() const noexcept;
105 T exchange(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
106 T exchange(T desr, memory_order m = memory_order_seq_cst) noexcept;
107 bool compare_exchange_weak(T& expc, T desr,
108 memory_order s, memory_order f) volatile noexcept;
109 bool compare_exchange_weak(T& expc, T desr, memory_order s, memory_order f) noexcept;
110 bool compare_exchange_strong(T& expc, T desr,
111 memory_order s, memory_order f) volatile noexcept;
112 bool compare_exchange_strong(T& expc, T desr,
113 memory_order s, memory_order f) noexcept;
114 bool compare_exchange_weak(T& expc, T desr,
115 memory_order m = memory_order_seq_cst) volatile noexcept;
116 bool compare_exchange_weak(T& expc, T desr,
117 memory_order m = memory_order_seq_cst) noexcept;
118 bool compare_exchange_strong(T& expc, T desr,
119 memory_order m = memory_order_seq_cst) volatile noexcept;
120 bool compare_exchange_strong(T& expc, T desr,
121 memory_order m = memory_order_seq_cst) noexcept;
123 atomic() noexcept = default;
124 constexpr atomic(T desr) noexcept;
125 atomic(const atomic&) = delete;
126 atomic& operator=(const atomic&) = delete;
127 atomic& operator=(const atomic&) volatile = delete;
128 T operator=(T) volatile noexcept;
129 T operator=(T) noexcept;
133 struct atomic<integral>
135 static constexpr bool is_always_lock_free;
136 bool is_lock_free() const volatile noexcept;
137 bool is_lock_free() const noexcept;
138 void store(integral desr, memory_order m = memory_order_seq_cst) volatile noexcept;
139 void store(integral desr, memory_order m = memory_order_seq_cst) noexcept;
140 integral load(memory_order m = memory_order_seq_cst) const volatile noexcept;
141 integral load(memory_order m = memory_order_seq_cst) const noexcept;
142 operator integral() const volatile noexcept;
143 operator integral() const noexcept;
144 integral exchange(integral desr,
145 memory_order m = memory_order_seq_cst) volatile noexcept;
146 integral exchange(integral desr, memory_order m = memory_order_seq_cst) noexcept;
147 bool compare_exchange_weak(integral& expc, integral desr,
148 memory_order s, memory_order f) volatile noexcept;
149 bool compare_exchange_weak(integral& expc, integral desr,
150 memory_order s, memory_order f) noexcept;
151 bool compare_exchange_strong(integral& expc, integral desr,
152 memory_order s, memory_order f) volatile noexcept;
153 bool compare_exchange_strong(integral& expc, integral desr,
154 memory_order s, memory_order f) noexcept;
155 bool compare_exchange_weak(integral& expc, integral desr,
156 memory_order m = memory_order_seq_cst) volatile noexcept;
157 bool compare_exchange_weak(integral& expc, integral desr,
158 memory_order m = memory_order_seq_cst) noexcept;
159 bool compare_exchange_strong(integral& expc, integral desr,
160 memory_order m = memory_order_seq_cst) volatile noexcept;
161 bool compare_exchange_strong(integral& expc, integral desr,
162 memory_order m = memory_order_seq_cst) noexcept;
165 fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
166 integral fetch_add(integral op, memory_order m = memory_order_seq_cst) noexcept;
168 fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
169 integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) noexcept;
171 fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
172 integral fetch_and(integral op, memory_order m = memory_order_seq_cst) noexcept;
174 fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
175 integral fetch_or(integral op, memory_order m = memory_order_seq_cst) noexcept;
177 fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
178 integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) noexcept;
180 atomic() noexcept = default;
181 constexpr atomic(integral desr) noexcept;
182 atomic(const atomic&) = delete;
183 atomic& operator=(const atomic&) = delete;
184 atomic& operator=(const atomic&) volatile = delete;
185 integral operator=(integral desr) volatile noexcept;
186 integral operator=(integral desr) noexcept;
188 integral operator++(int) volatile noexcept;
189 integral operator++(int) noexcept;
190 integral operator--(int) volatile noexcept;
191 integral operator--(int) noexcept;
192 integral operator++() volatile noexcept;
193 integral operator++() noexcept;
194 integral operator--() volatile noexcept;
195 integral operator--() noexcept;
196 integral operator+=(integral op) volatile noexcept;
197 integral operator+=(integral op) noexcept;
198 integral operator-=(integral op) volatile noexcept;
199 integral operator-=(integral op) noexcept;
200 integral operator&=(integral op) volatile noexcept;
201 integral operator&=(integral op) noexcept;
202 integral operator|=(integral op) volatile noexcept;
203 integral operator|=(integral op) noexcept;
204 integral operator^=(integral op) volatile noexcept;
205 integral operator^=(integral op) noexcept;
211 static constexpr bool is_always_lock_free;
212 bool is_lock_free() const volatile noexcept;
213 bool is_lock_free() const noexcept;
214 void store(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
215 void store(T* desr, memory_order m = memory_order_seq_cst) noexcept;
216 T* load(memory_order m = memory_order_seq_cst) const volatile noexcept;
217 T* load(memory_order m = memory_order_seq_cst) const noexcept;
218 operator T*() const volatile noexcept;
219 operator T*() const noexcept;
220 T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
221 T* exchange(T* desr, memory_order m = memory_order_seq_cst) noexcept;
222 bool compare_exchange_weak(T*& expc, T* desr,
223 memory_order s, memory_order f) volatile noexcept;
224 bool compare_exchange_weak(T*& expc, T* desr,
225 memory_order s, memory_order f) noexcept;
226 bool compare_exchange_strong(T*& expc, T* desr,
227 memory_order s, memory_order f) volatile noexcept;
228 bool compare_exchange_strong(T*& expc, T* desr,
229 memory_order s, memory_order f) noexcept;
230 bool compare_exchange_weak(T*& expc, T* desr,
231 memory_order m = memory_order_seq_cst) volatile noexcept;
232 bool compare_exchange_weak(T*& expc, T* desr,
233 memory_order m = memory_order_seq_cst) noexcept;
234 bool compare_exchange_strong(T*& expc, T* desr,
235 memory_order m = memory_order_seq_cst) volatile noexcept;
236 bool compare_exchange_strong(T*& expc, T* desr,
237 memory_order m = memory_order_seq_cst) noexcept;
238 T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
239 T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
240 T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
241 T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
243 atomic() noexcept = default;
244 constexpr atomic(T* desr) noexcept;
245 atomic(const atomic&) = delete;
246 atomic& operator=(const atomic&) = delete;
247 atomic& operator=(const atomic&) volatile = delete;
249 T* operator=(T*) volatile noexcept;
250 T* operator=(T*) noexcept;
251 T* operator++(int) volatile noexcept;
252 T* operator++(int) noexcept;
253 T* operator--(int) volatile noexcept;
254 T* operator--(int) noexcept;
255 T* operator++() volatile noexcept;
256 T* operator++() noexcept;
257 T* operator--() volatile noexcept;
258 T* operator--() noexcept;
259 T* operator+=(ptrdiff_t op) volatile noexcept;
260 T* operator+=(ptrdiff_t op) noexcept;
261 T* operator-=(ptrdiff_t op) volatile noexcept;
262 T* operator-=(ptrdiff_t op) noexcept;
268 atomic_is_lock_free(const volatile atomic<T>* obj) noexcept;
272 atomic_is_lock_free(const atomic<T>* obj) noexcept;
276 atomic_init(volatile atomic<T>* obj, T desr) noexcept;
280 atomic_init(atomic<T>* obj, T desr) noexcept;
284 atomic_store(volatile atomic<T>* obj, T desr) noexcept;
288 atomic_store(atomic<T>* obj, T desr) noexcept;
292 atomic_store_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
296 atomic_store_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
300 atomic_load(const volatile atomic<T>* obj) noexcept;
304 atomic_load(const atomic<T>* obj) noexcept;
308 atomic_load_explicit(const volatile atomic<T>* obj, memory_order m) noexcept;
312 atomic_load_explicit(const atomic<T>* obj, memory_order m) noexcept;
316 atomic_exchange(volatile atomic<T>* obj, T desr) noexcept;
320 atomic_exchange(atomic<T>* obj, T desr) noexcept;
324 atomic_exchange_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
328 atomic_exchange_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
332 atomic_compare_exchange_weak(volatile atomic<T>* obj, T* expc, T desr) noexcept;
336 atomic_compare_exchange_weak(atomic<T>* obj, T* expc, T desr) noexcept;
340 atomic_compare_exchange_strong(volatile atomic<T>* obj, T* expc, T desr) noexcept;
344 atomic_compare_exchange_strong(atomic<T>* obj, T* expc, T desr) noexcept;
348 atomic_compare_exchange_weak_explicit(volatile atomic<T>* obj, T* expc,
350 memory_order s, memory_order f) noexcept;
354 atomic_compare_exchange_weak_explicit(atomic<T>* obj, T* expc, T desr,
355 memory_order s, memory_order f) noexcept;
359 atomic_compare_exchange_strong_explicit(volatile atomic<T>* obj,
361 memory_order s, memory_order f) noexcept;
365 atomic_compare_exchange_strong_explicit(atomic<T>* obj, T* expc,
367 memory_order s, memory_order f) noexcept;
369 template <class Integral>
371 atomic_fetch_add(volatile atomic<Integral>* obj, Integral op) noexcept;
373 template <class Integral>
375 atomic_fetch_add(atomic<Integral>* obj, Integral op) noexcept;
377 template <class Integral>
379 atomic_fetch_add_explicit(volatile atomic<Integral>* obj, Integral op,
380 memory_order m) noexcept;
381 template <class Integral>
383 atomic_fetch_add_explicit(atomic<Integral>* obj, Integral op,
384 memory_order m) noexcept;
385 template <class Integral>
387 atomic_fetch_sub(volatile atomic<Integral>* obj, Integral op) noexcept;
389 template <class Integral>
391 atomic_fetch_sub(atomic<Integral>* obj, Integral op) noexcept;
393 template <class Integral>
395 atomic_fetch_sub_explicit(volatile atomic<Integral>* obj, Integral op,
396 memory_order m) noexcept;
397 template <class Integral>
399 atomic_fetch_sub_explicit(atomic<Integral>* obj, Integral op,
400 memory_order m) noexcept;
401 template <class Integral>
403 atomic_fetch_and(volatile atomic<Integral>* obj, Integral op) noexcept;
405 template <class Integral>
407 atomic_fetch_and(atomic<Integral>* obj, Integral op) noexcept;
409 template <class Integral>
411 atomic_fetch_and_explicit(volatile atomic<Integral>* obj, Integral op,
412 memory_order m) noexcept;
413 template <class Integral>
415 atomic_fetch_and_explicit(atomic<Integral>* obj, Integral op,
416 memory_order m) noexcept;
417 template <class Integral>
419 atomic_fetch_or(volatile atomic<Integral>* obj, Integral op) noexcept;
421 template <class Integral>
423 atomic_fetch_or(atomic<Integral>* obj, Integral op) noexcept;
425 template <class Integral>
427 atomic_fetch_or_explicit(volatile atomic<Integral>* obj, Integral op,
428 memory_order m) noexcept;
429 template <class Integral>
431 atomic_fetch_or_explicit(atomic<Integral>* obj, Integral op,
432 memory_order m) noexcept;
433 template <class Integral>
435 atomic_fetch_xor(volatile atomic<Integral>* obj, Integral op) noexcept;
437 template <class Integral>
439 atomic_fetch_xor(atomic<Integral>* obj, Integral op) noexcept;
441 template <class Integral>
443 atomic_fetch_xor_explicit(volatile atomic<Integral>* obj, Integral op,
444 memory_order m) noexcept;
445 template <class Integral>
447 atomic_fetch_xor_explicit(atomic<Integral>* obj, Integral op,
448 memory_order m) noexcept;
452 atomic_fetch_add(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
456 atomic_fetch_add(atomic<T*>* obj, ptrdiff_t op) noexcept;
460 atomic_fetch_add_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
461 memory_order m) noexcept;
464 atomic_fetch_add_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
468 atomic_fetch_sub(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
472 atomic_fetch_sub(atomic<T*>* obj, ptrdiff_t op) noexcept;
476 atomic_fetch_sub_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
477 memory_order m) noexcept;
480 atomic_fetch_sub_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
482 // Atomics for standard typedef types
484 typedef atomic<bool> atomic_bool;
485 typedef atomic<char> atomic_char;
486 typedef atomic<signed char> atomic_schar;
487 typedef atomic<unsigned char> atomic_uchar;
488 typedef atomic<short> atomic_short;
489 typedef atomic<unsigned short> atomic_ushort;
490 typedef atomic<int> atomic_int;
491 typedef atomic<unsigned int> atomic_uint;
492 typedef atomic<long> atomic_long;
493 typedef atomic<unsigned long> atomic_ulong;
494 typedef atomic<long long> atomic_llong;
495 typedef atomic<unsigned long long> atomic_ullong;
496 typedef atomic<char16_t> atomic_char16_t;
497 typedef atomic<char32_t> atomic_char32_t;
498 typedef atomic<wchar_t> atomic_wchar_t;
500 typedef atomic<int_least8_t> atomic_int_least8_t;
501 typedef atomic<uint_least8_t> atomic_uint_least8_t;
502 typedef atomic<int_least16_t> atomic_int_least16_t;
503 typedef atomic<uint_least16_t> atomic_uint_least16_t;
504 typedef atomic<int_least32_t> atomic_int_least32_t;
505 typedef atomic<uint_least32_t> atomic_uint_least32_t;
506 typedef atomic<int_least64_t> atomic_int_least64_t;
507 typedef atomic<uint_least64_t> atomic_uint_least64_t;
509 typedef atomic<int_fast8_t> atomic_int_fast8_t;
510 typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
511 typedef atomic<int_fast16_t> atomic_int_fast16_t;
512 typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
513 typedef atomic<int_fast32_t> atomic_int_fast32_t;
514 typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
515 typedef atomic<int_fast64_t> atomic_int_fast64_t;
516 typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
518 typedef atomic<int8_t> atomic_int8_t;
519 typedef atomic<uint8_t> atomic_uint8_t;
520 typedef atomic<int16_t> atomic_int16_t;
521 typedef atomic<uint16_t> atomic_uint16_t;
522 typedef atomic<int32_t> atomic_int32_t;
523 typedef atomic<uint32_t> atomic_uint32_t;
524 typedef atomic<int64_t> atomic_int64_t;
525 typedef atomic<uint64_t> atomic_uint64_t;
527 typedef atomic<intptr_t> atomic_intptr_t;
528 typedef atomic<uintptr_t> atomic_uintptr_t;
529 typedef atomic<size_t> atomic_size_t;
530 typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
531 typedef atomic<intmax_t> atomic_intmax_t;
532 typedef atomic<uintmax_t> atomic_uintmax_t;
536 void atomic_thread_fence(memory_order m) noexcept;
537 void atomic_signal_fence(memory_order m) noexcept;
546 #include <type_traits>
548 #if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
549 #pragma GCC system_header
552 #ifdef _LIBCPP_HAS_NO_THREADS
553 #error <atomic> is not supported on this single threaded system
555 #if !defined(_LIBCPP_HAS_C_ATOMIC_IMP) && !defined(_LIBCPP_HAS_GCC_ATOMIC_IMP)
556 #error <atomic> is not implemented
559 #if _LIBCPP_STD_VER > 14
560 // FIXME: use the right feature test macro value as chose by SG10.
561 # define __cpp_lib_atomic_is_always_lock_free 201603L
564 _LIBCPP_BEGIN_NAMESPACE_STD
566 typedef enum memory_order
568 memory_order_relaxed, memory_order_consume, memory_order_acquire,
569 memory_order_release, memory_order_acq_rel, memory_order_seq_cst
572 #if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP)
573 namespace __gcc_atomic {
574 template <typename _Tp>
575 struct __gcc_atomic_t {
578 static_assert(is_trivially_copyable<_Tp>::value,
579 "std::atomic<Tp> requires that 'Tp' be a trivially copyable type");
582 _LIBCPP_INLINE_VISIBILITY
583 #ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
584 __gcc_atomic_t() _NOEXCEPT = default;
586 __gcc_atomic_t() _NOEXCEPT : __a_value() {}
587 #endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
588 _LIBCPP_CONSTEXPR explicit __gcc_atomic_t(_Tp value) _NOEXCEPT
589 : __a_value(value) {}
592 #define _Atomic(x) __gcc_atomic::__gcc_atomic_t<x>
594 template <typename _Tp> _Tp __create();
596 template <typename _Tp, typename _Td>
597 typename enable_if<sizeof(_Tp()->__a_value = __create<_Td>()), char>::type
598 __test_atomic_assignable(int);
599 template <typename _Tp, typename _Up>
600 __two __test_atomic_assignable(...);
602 template <typename _Tp, typename _Td>
603 struct __can_assign {
604 static const bool value =
605 sizeof(__test_atomic_assignable<_Tp, _Td>(1)) == sizeof(char);
608 static inline _LIBCPP_CONSTEXPR int __to_gcc_order(memory_order __order) {
609 // Avoid switch statement to make this a constexpr.
610 return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
611 (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
612 (__order == memory_order_release ? __ATOMIC_RELEASE:
613 (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
614 (__order == memory_order_acq_rel ? __ATOMIC_ACQ_REL:
615 __ATOMIC_CONSUME))));
618 static inline _LIBCPP_CONSTEXPR int __to_gcc_failure_order(memory_order __order) {
619 // Avoid switch statement to make this a constexpr.
620 return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
621 (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
622 (__order == memory_order_release ? __ATOMIC_RELAXED:
623 (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
624 (__order == memory_order_acq_rel ? __ATOMIC_ACQUIRE:
625 __ATOMIC_CONSUME))));
628 } // namespace __gcc_atomic
630 template <typename _Tp>
633 __gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value>::type
634 __c11_atomic_init(volatile _Atomic(_Tp)* __a, _Tp __val) {
635 __a->__a_value = __val;
638 template <typename _Tp>
641 !__gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value &&
642 __gcc_atomic::__can_assign< _Atomic(_Tp)*, _Tp>::value>::type
643 __c11_atomic_init(volatile _Atomic(_Tp)* __a, _Tp __val) {
644 // [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because
645 // the default operator= in an object is not volatile, a byte-by-byte copy
647 volatile char* to = reinterpret_cast<volatile char*>(&__a->__a_value);
648 volatile char* end = to + sizeof(_Tp);
649 char* from = reinterpret_cast<char*>(&__val);
655 template <typename _Tp>
656 static inline void __c11_atomic_init(_Atomic(_Tp)* __a, _Tp __val) {
657 __a->__a_value = __val;
660 static inline void __c11_atomic_thread_fence(memory_order __order) {
661 __atomic_thread_fence(__gcc_atomic::__to_gcc_order(__order));
664 static inline void __c11_atomic_signal_fence(memory_order __order) {
665 __atomic_signal_fence(__gcc_atomic::__to_gcc_order(__order));
668 template <typename _Tp>
669 static inline void __c11_atomic_store(volatile _Atomic(_Tp)* __a, _Tp __val,
670 memory_order __order) {
671 return __atomic_store(&__a->__a_value, &__val,
672 __gcc_atomic::__to_gcc_order(__order));
675 template <typename _Tp>
676 static inline void __c11_atomic_store(_Atomic(_Tp)* __a, _Tp __val,
677 memory_order __order) {
678 __atomic_store(&__a->__a_value, &__val,
679 __gcc_atomic::__to_gcc_order(__order));
682 template <typename _Tp>
683 static inline _Tp __c11_atomic_load(volatile _Atomic(_Tp)* __a,
684 memory_order __order) {
686 __atomic_load(&__a->__a_value, &__ret,
687 __gcc_atomic::__to_gcc_order(__order));
691 template <typename _Tp>
692 static inline _Tp __c11_atomic_load(_Atomic(_Tp)* __a, memory_order __order) {
694 __atomic_load(&__a->__a_value, &__ret,
695 __gcc_atomic::__to_gcc_order(__order));
699 template <typename _Tp>
700 static inline _Tp __c11_atomic_exchange(volatile _Atomic(_Tp)* __a,
701 _Tp __value, memory_order __order) {
703 __atomic_exchange(&__a->__a_value, &__value, &__ret,
704 __gcc_atomic::__to_gcc_order(__order));
708 template <typename _Tp>
709 static inline _Tp __c11_atomic_exchange(_Atomic(_Tp)* __a, _Tp __value,
710 memory_order __order) {
712 __atomic_exchange(&__a->__a_value, &__value, &__ret,
713 __gcc_atomic::__to_gcc_order(__order));
717 template <typename _Tp>
718 static inline bool __c11_atomic_compare_exchange_strong(
719 volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
720 memory_order __success, memory_order __failure) {
721 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
723 __gcc_atomic::__to_gcc_order(__success),
724 __gcc_atomic::__to_gcc_failure_order(__failure));
727 template <typename _Tp>
728 static inline bool __c11_atomic_compare_exchange_strong(
729 _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
730 memory_order __failure) {
731 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
733 __gcc_atomic::__to_gcc_order(__success),
734 __gcc_atomic::__to_gcc_failure_order(__failure));
737 template <typename _Tp>
738 static inline bool __c11_atomic_compare_exchange_weak(
739 volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
740 memory_order __success, memory_order __failure) {
741 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
743 __gcc_atomic::__to_gcc_order(__success),
744 __gcc_atomic::__to_gcc_failure_order(__failure));
747 template <typename _Tp>
748 static inline bool __c11_atomic_compare_exchange_weak(
749 _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
750 memory_order __failure) {
751 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
753 __gcc_atomic::__to_gcc_order(__success),
754 __gcc_atomic::__to_gcc_failure_order(__failure));
757 template <typename _Tp>
758 struct __skip_amt { enum {value = 1}; };
760 template <typename _Tp>
761 struct __skip_amt<_Tp*> { enum {value = sizeof(_Tp)}; };
763 // FIXME: Haven't figured out what the spec says about using arrays with
764 // atomic_fetch_add. Force a failure rather than creating bad behavior.
765 template <typename _Tp>
766 struct __skip_amt<_Tp[]> { };
767 template <typename _Tp, int n>
768 struct __skip_amt<_Tp[n]> { };
770 template <typename _Tp, typename _Td>
771 static inline _Tp __c11_atomic_fetch_add(volatile _Atomic(_Tp)* __a,
772 _Td __delta, memory_order __order) {
773 return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
774 __gcc_atomic::__to_gcc_order(__order));
777 template <typename _Tp, typename _Td>
778 static inline _Tp __c11_atomic_fetch_add(_Atomic(_Tp)* __a, _Td __delta,
779 memory_order __order) {
780 return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
781 __gcc_atomic::__to_gcc_order(__order));
784 template <typename _Tp, typename _Td>
785 static inline _Tp __c11_atomic_fetch_sub(volatile _Atomic(_Tp)* __a,
786 _Td __delta, memory_order __order) {
787 return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
788 __gcc_atomic::__to_gcc_order(__order));
791 template <typename _Tp, typename _Td>
792 static inline _Tp __c11_atomic_fetch_sub(_Atomic(_Tp)* __a, _Td __delta,
793 memory_order __order) {
794 return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
795 __gcc_atomic::__to_gcc_order(__order));
798 template <typename _Tp>
799 static inline _Tp __c11_atomic_fetch_and(volatile _Atomic(_Tp)* __a,
800 _Tp __pattern, memory_order __order) {
801 return __atomic_fetch_and(&__a->__a_value, __pattern,
802 __gcc_atomic::__to_gcc_order(__order));
805 template <typename _Tp>
806 static inline _Tp __c11_atomic_fetch_and(_Atomic(_Tp)* __a,
807 _Tp __pattern, memory_order __order) {
808 return __atomic_fetch_and(&__a->__a_value, __pattern,
809 __gcc_atomic::__to_gcc_order(__order));
812 template <typename _Tp>
813 static inline _Tp __c11_atomic_fetch_or(volatile _Atomic(_Tp)* __a,
814 _Tp __pattern, memory_order __order) {
815 return __atomic_fetch_or(&__a->__a_value, __pattern,
816 __gcc_atomic::__to_gcc_order(__order));
819 template <typename _Tp>
820 static inline _Tp __c11_atomic_fetch_or(_Atomic(_Tp)* __a, _Tp __pattern,
821 memory_order __order) {
822 return __atomic_fetch_or(&__a->__a_value, __pattern,
823 __gcc_atomic::__to_gcc_order(__order));
826 template <typename _Tp>
827 static inline _Tp __c11_atomic_fetch_xor(volatile _Atomic(_Tp)* __a,
828 _Tp __pattern, memory_order __order) {
829 return __atomic_fetch_xor(&__a->__a_value, __pattern,
830 __gcc_atomic::__to_gcc_order(__order));
833 template <typename _Tp>
834 static inline _Tp __c11_atomic_fetch_xor(_Atomic(_Tp)* __a, _Tp __pattern,
835 memory_order __order) {
836 return __atomic_fetch_xor(&__a->__a_value, __pattern,
837 __gcc_atomic::__to_gcc_order(__order));
839 #endif // _LIBCPP_HAS_GCC_ATOMIC_IMP
842 inline _LIBCPP_INLINE_VISIBILITY
844 kill_dependency(_Tp __y) _NOEXCEPT
849 #define ATOMIC_BOOL_LOCK_FREE __GCC_ATOMIC_BOOL_LOCK_FREE
850 #define ATOMIC_CHAR_LOCK_FREE __GCC_ATOMIC_CHAR_LOCK_FREE
851 #define ATOMIC_CHAR16_T_LOCK_FREE __GCC_ATOMIC_CHAR16_T_LOCK_FREE
852 #define ATOMIC_CHAR32_T_LOCK_FREE __GCC_ATOMIC_CHAR32_T_LOCK_FREE
853 #define ATOMIC_WCHAR_T_LOCK_FREE __GCC_ATOMIC_WCHAR_T_LOCK_FREE
854 #define ATOMIC_SHORT_LOCK_FREE __GCC_ATOMIC_SHORT_LOCK_FREE
855 #define ATOMIC_INT_LOCK_FREE __GCC_ATOMIC_INT_LOCK_FREE
856 #define ATOMIC_LONG_LOCK_FREE __GCC_ATOMIC_LONG_LOCK_FREE
857 #define ATOMIC_LLONG_LOCK_FREE __GCC_ATOMIC_LLONG_LOCK_FREE
858 #define ATOMIC_POINTER_LOCK_FREE __GCC_ATOMIC_POINTER_LOCK_FREE
862 template <class _Tp, bool = is_integral<_Tp>::value && !is_same<_Tp, bool>::value>
863 struct __atomic_base // false
865 mutable _Atomic(_Tp) __a_;
867 #if defined(__cpp_lib_atomic_is_always_lock_free)
868 static _LIBCPP_CONSTEXPR bool is_always_lock_free = __atomic_always_lock_free(sizeof(__a_), 0);
871 _LIBCPP_INLINE_VISIBILITY
872 bool is_lock_free() const volatile _NOEXCEPT
874 #if defined(_LIBCPP_HAS_C_ATOMIC_IMP)
875 return __c11_atomic_is_lock_free(sizeof(_Tp));
877 return __atomic_is_lock_free(sizeof(_Tp), 0);
880 _LIBCPP_INLINE_VISIBILITY
881 bool is_lock_free() const _NOEXCEPT
882 {return static_cast<__atomic_base const volatile*>(this)->is_lock_free();}
883 _LIBCPP_INLINE_VISIBILITY
884 void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
885 {__c11_atomic_store(&__a_, __d, __m);}
886 _LIBCPP_INLINE_VISIBILITY
887 void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
888 {__c11_atomic_store(&__a_, __d, __m);}
889 _LIBCPP_INLINE_VISIBILITY
890 _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
891 {return __c11_atomic_load(&__a_, __m);}
892 _LIBCPP_INLINE_VISIBILITY
893 _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
894 {return __c11_atomic_load(&__a_, __m);}
895 _LIBCPP_INLINE_VISIBILITY
896 operator _Tp() const volatile _NOEXCEPT {return load();}
897 _LIBCPP_INLINE_VISIBILITY
898 operator _Tp() const _NOEXCEPT {return load();}
899 _LIBCPP_INLINE_VISIBILITY
900 _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
901 {return __c11_atomic_exchange(&__a_, __d, __m);}
902 _LIBCPP_INLINE_VISIBILITY
903 _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
904 {return __c11_atomic_exchange(&__a_, __d, __m);}
905 _LIBCPP_INLINE_VISIBILITY
906 bool compare_exchange_weak(_Tp& __e, _Tp __d,
907 memory_order __s, memory_order __f) volatile _NOEXCEPT
908 {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
909 _LIBCPP_INLINE_VISIBILITY
910 bool compare_exchange_weak(_Tp& __e, _Tp __d,
911 memory_order __s, memory_order __f) _NOEXCEPT
912 {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
913 _LIBCPP_INLINE_VISIBILITY
914 bool compare_exchange_strong(_Tp& __e, _Tp __d,
915 memory_order __s, memory_order __f) volatile _NOEXCEPT
916 {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
917 _LIBCPP_INLINE_VISIBILITY
918 bool compare_exchange_strong(_Tp& __e, _Tp __d,
919 memory_order __s, memory_order __f) _NOEXCEPT
920 {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
921 _LIBCPP_INLINE_VISIBILITY
922 bool compare_exchange_weak(_Tp& __e, _Tp __d,
923 memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
924 {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
925 _LIBCPP_INLINE_VISIBILITY
926 bool compare_exchange_weak(_Tp& __e, _Tp __d,
927 memory_order __m = memory_order_seq_cst) _NOEXCEPT
928 {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
929 _LIBCPP_INLINE_VISIBILITY
930 bool compare_exchange_strong(_Tp& __e, _Tp __d,
931 memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
932 {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
933 _LIBCPP_INLINE_VISIBILITY
934 bool compare_exchange_strong(_Tp& __e, _Tp __d,
935 memory_order __m = memory_order_seq_cst) _NOEXCEPT
936 {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
938 _LIBCPP_INLINE_VISIBILITY
939 #ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
940 __atomic_base() _NOEXCEPT = default;
942 __atomic_base() _NOEXCEPT : __a_() {}
943 #endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
945 _LIBCPP_INLINE_VISIBILITY
946 _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {}
947 #ifndef _LIBCPP_HAS_NO_DELETED_FUNCTIONS
948 __atomic_base(const __atomic_base&) = delete;
949 __atomic_base& operator=(const __atomic_base&) = delete;
950 __atomic_base& operator=(const __atomic_base&) volatile = delete;
951 #else // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
953 __atomic_base(const __atomic_base&);
954 __atomic_base& operator=(const __atomic_base&);
955 __atomic_base& operator=(const __atomic_base&) volatile;
956 #endif // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
959 #if defined(__cpp_lib_atomic_is_always_lock_free)
960 template <class _Tp, bool __b>
961 _LIBCPP_CONSTEXPR bool __atomic_base<_Tp, __b>::is_always_lock_free;
967 struct __atomic_base<_Tp, true>
968 : public __atomic_base<_Tp, false>
970 typedef __atomic_base<_Tp, false> __base;
971 _LIBCPP_INLINE_VISIBILITY
972 __atomic_base() _NOEXCEPT _LIBCPP_DEFAULT
973 _LIBCPP_INLINE_VISIBILITY
974 _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {}
976 _LIBCPP_INLINE_VISIBILITY
977 _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
978 {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
979 _LIBCPP_INLINE_VISIBILITY
980 _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
981 {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
982 _LIBCPP_INLINE_VISIBILITY
983 _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
984 {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
985 _LIBCPP_INLINE_VISIBILITY
986 _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
987 {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
988 _LIBCPP_INLINE_VISIBILITY
989 _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
990 {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
991 _LIBCPP_INLINE_VISIBILITY
992 _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
993 {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
994 _LIBCPP_INLINE_VISIBILITY
995 _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
996 {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
997 _LIBCPP_INLINE_VISIBILITY
998 _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
999 {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
1000 _LIBCPP_INLINE_VISIBILITY
1001 _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1002 {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
1003 _LIBCPP_INLINE_VISIBILITY
1004 _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1005 {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
1007 _LIBCPP_INLINE_VISIBILITY
1008 _Tp operator++(int) volatile _NOEXCEPT {return fetch_add(_Tp(1));}
1009 _LIBCPP_INLINE_VISIBILITY
1010 _Tp operator++(int) _NOEXCEPT {return fetch_add(_Tp(1));}
1011 _LIBCPP_INLINE_VISIBILITY
1012 _Tp operator--(int) volatile _NOEXCEPT {return fetch_sub(_Tp(1));}
1013 _LIBCPP_INLINE_VISIBILITY
1014 _Tp operator--(int) _NOEXCEPT {return fetch_sub(_Tp(1));}
1015 _LIBCPP_INLINE_VISIBILITY
1016 _Tp operator++() volatile _NOEXCEPT {return fetch_add(_Tp(1)) + _Tp(1);}
1017 _LIBCPP_INLINE_VISIBILITY
1018 _Tp operator++() _NOEXCEPT {return fetch_add(_Tp(1)) + _Tp(1);}
1019 _LIBCPP_INLINE_VISIBILITY
1020 _Tp operator--() volatile _NOEXCEPT {return fetch_sub(_Tp(1)) - _Tp(1);}
1021 _LIBCPP_INLINE_VISIBILITY
1022 _Tp operator--() _NOEXCEPT {return fetch_sub(_Tp(1)) - _Tp(1);}
1023 _LIBCPP_INLINE_VISIBILITY
1024 _Tp operator+=(_Tp __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
1025 _LIBCPP_INLINE_VISIBILITY
1026 _Tp operator+=(_Tp __op) _NOEXCEPT {return fetch_add(__op) + __op;}
1027 _LIBCPP_INLINE_VISIBILITY
1028 _Tp operator-=(_Tp __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
1029 _LIBCPP_INLINE_VISIBILITY
1030 _Tp operator-=(_Tp __op) _NOEXCEPT {return fetch_sub(__op) - __op;}
1031 _LIBCPP_INLINE_VISIBILITY
1032 _Tp operator&=(_Tp __op) volatile _NOEXCEPT {return fetch_and(__op) & __op;}
1033 _LIBCPP_INLINE_VISIBILITY
1034 _Tp operator&=(_Tp __op) _NOEXCEPT {return fetch_and(__op) & __op;}
1035 _LIBCPP_INLINE_VISIBILITY
1036 _Tp operator|=(_Tp __op) volatile _NOEXCEPT {return fetch_or(__op) | __op;}
1037 _LIBCPP_INLINE_VISIBILITY
1038 _Tp operator|=(_Tp __op) _NOEXCEPT {return fetch_or(__op) | __op;}
1039 _LIBCPP_INLINE_VISIBILITY
1040 _Tp operator^=(_Tp __op) volatile _NOEXCEPT {return fetch_xor(__op) ^ __op;}
1041 _LIBCPP_INLINE_VISIBILITY
1042 _Tp operator^=(_Tp __op) _NOEXCEPT {return fetch_xor(__op) ^ __op;}
1047 template <class _Tp>
1049 : public __atomic_base<_Tp>
1051 typedef __atomic_base<_Tp> __base;
1052 _LIBCPP_INLINE_VISIBILITY
1053 atomic() _NOEXCEPT _LIBCPP_DEFAULT
1054 _LIBCPP_INLINE_VISIBILITY
1055 _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {}
1057 _LIBCPP_INLINE_VISIBILITY
1058 _Tp operator=(_Tp __d) volatile _NOEXCEPT
1059 {__base::store(__d); return __d;}
1060 _LIBCPP_INLINE_VISIBILITY
1061 _Tp operator=(_Tp __d) _NOEXCEPT
1062 {__base::store(__d); return __d;}
1067 template <class _Tp>
1069 : public __atomic_base<_Tp*>
1071 typedef __atomic_base<_Tp*> __base;
1072 _LIBCPP_INLINE_VISIBILITY
1073 atomic() _NOEXCEPT _LIBCPP_DEFAULT
1074 _LIBCPP_INLINE_VISIBILITY
1075 _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {}
1077 _LIBCPP_INLINE_VISIBILITY
1078 _Tp* operator=(_Tp* __d) volatile _NOEXCEPT
1079 {__base::store(__d); return __d;}
1080 _LIBCPP_INLINE_VISIBILITY
1081 _Tp* operator=(_Tp* __d) _NOEXCEPT
1082 {__base::store(__d); return __d;}
1084 _LIBCPP_INLINE_VISIBILITY
1085 _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1087 {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
1088 _LIBCPP_INLINE_VISIBILITY
1089 _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1090 {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
1091 _LIBCPP_INLINE_VISIBILITY
1092 _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1094 {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
1095 _LIBCPP_INLINE_VISIBILITY
1096 _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1097 {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
1099 _LIBCPP_INLINE_VISIBILITY
1100 _Tp* operator++(int) volatile _NOEXCEPT {return fetch_add(1);}
1101 _LIBCPP_INLINE_VISIBILITY
1102 _Tp* operator++(int) _NOEXCEPT {return fetch_add(1);}
1103 _LIBCPP_INLINE_VISIBILITY
1104 _Tp* operator--(int) volatile _NOEXCEPT {return fetch_sub(1);}
1105 _LIBCPP_INLINE_VISIBILITY
1106 _Tp* operator--(int) _NOEXCEPT {return fetch_sub(1);}
1107 _LIBCPP_INLINE_VISIBILITY
1108 _Tp* operator++() volatile _NOEXCEPT {return fetch_add(1) + 1;}
1109 _LIBCPP_INLINE_VISIBILITY
1110 _Tp* operator++() _NOEXCEPT {return fetch_add(1) + 1;}
1111 _LIBCPP_INLINE_VISIBILITY
1112 _Tp* operator--() volatile _NOEXCEPT {return fetch_sub(1) - 1;}
1113 _LIBCPP_INLINE_VISIBILITY
1114 _Tp* operator--() _NOEXCEPT {return fetch_sub(1) - 1;}
1115 _LIBCPP_INLINE_VISIBILITY
1116 _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
1117 _LIBCPP_INLINE_VISIBILITY
1118 _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT {return fetch_add(__op) + __op;}
1119 _LIBCPP_INLINE_VISIBILITY
1120 _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
1121 _LIBCPP_INLINE_VISIBILITY
1122 _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT {return fetch_sub(__op) - __op;}
1125 // atomic_is_lock_free
1127 template <class _Tp>
1128 inline _LIBCPP_INLINE_VISIBILITY
1130 atomic_is_lock_free(const volatile atomic<_Tp>* __o) _NOEXCEPT
1132 return __o->is_lock_free();
1135 template <class _Tp>
1136 inline _LIBCPP_INLINE_VISIBILITY
1138 atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT
1140 return __o->is_lock_free();
1145 template <class _Tp>
1146 inline _LIBCPP_INLINE_VISIBILITY
1148 atomic_init(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1150 __c11_atomic_init(&__o->__a_, __d);
1153 template <class _Tp>
1154 inline _LIBCPP_INLINE_VISIBILITY
1156 atomic_init(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1158 __c11_atomic_init(&__o->__a_, __d);
1163 template <class _Tp>
1164 inline _LIBCPP_INLINE_VISIBILITY
1166 atomic_store(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1171 template <class _Tp>
1172 inline _LIBCPP_INLINE_VISIBILITY
1174 atomic_store(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1179 // atomic_store_explicit
1181 template <class _Tp>
1182 inline _LIBCPP_INLINE_VISIBILITY
1184 atomic_store_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1186 __o->store(__d, __m);
1189 template <class _Tp>
1190 inline _LIBCPP_INLINE_VISIBILITY
1192 atomic_store_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1194 __o->store(__d, __m);
1199 template <class _Tp>
1200 inline _LIBCPP_INLINE_VISIBILITY
1202 atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT
1207 template <class _Tp>
1208 inline _LIBCPP_INLINE_VISIBILITY
1210 atomic_load(const atomic<_Tp>* __o) _NOEXCEPT
1215 // atomic_load_explicit
1217 template <class _Tp>
1218 inline _LIBCPP_INLINE_VISIBILITY
1220 atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1222 return __o->load(__m);
1225 template <class _Tp>
1226 inline _LIBCPP_INLINE_VISIBILITY
1228 atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1230 return __o->load(__m);
1235 template <class _Tp>
1236 inline _LIBCPP_INLINE_VISIBILITY
1238 atomic_exchange(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1240 return __o->exchange(__d);
1243 template <class _Tp>
1244 inline _LIBCPP_INLINE_VISIBILITY
1246 atomic_exchange(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1248 return __o->exchange(__d);
1251 // atomic_exchange_explicit
1253 template <class _Tp>
1254 inline _LIBCPP_INLINE_VISIBILITY
1256 atomic_exchange_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1258 return __o->exchange(__d, __m);
1261 template <class _Tp>
1262 inline _LIBCPP_INLINE_VISIBILITY
1264 atomic_exchange_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1266 return __o->exchange(__d, __m);
1269 // atomic_compare_exchange_weak
1271 template <class _Tp>
1272 inline _LIBCPP_INLINE_VISIBILITY
1274 atomic_compare_exchange_weak(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1276 return __o->compare_exchange_weak(*__e, __d);
1279 template <class _Tp>
1280 inline _LIBCPP_INLINE_VISIBILITY
1282 atomic_compare_exchange_weak(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1284 return __o->compare_exchange_weak(*__e, __d);
1287 // atomic_compare_exchange_strong
1289 template <class _Tp>
1290 inline _LIBCPP_INLINE_VISIBILITY
1292 atomic_compare_exchange_strong(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1294 return __o->compare_exchange_strong(*__e, __d);
1297 template <class _Tp>
1298 inline _LIBCPP_INLINE_VISIBILITY
1300 atomic_compare_exchange_strong(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1302 return __o->compare_exchange_strong(*__e, __d);
1305 // atomic_compare_exchange_weak_explicit
1307 template <class _Tp>
1308 inline _LIBCPP_INLINE_VISIBILITY
1310 atomic_compare_exchange_weak_explicit(volatile atomic<_Tp>* __o, _Tp* __e,
1312 memory_order __s, memory_order __f) _NOEXCEPT
1314 return __o->compare_exchange_weak(*__e, __d, __s, __f);
1317 template <class _Tp>
1318 inline _LIBCPP_INLINE_VISIBILITY
1320 atomic_compare_exchange_weak_explicit(atomic<_Tp>* __o, _Tp* __e, _Tp __d,
1321 memory_order __s, memory_order __f) _NOEXCEPT
1323 return __o->compare_exchange_weak(*__e, __d, __s, __f);
1326 // atomic_compare_exchange_strong_explicit
1328 template <class _Tp>
1329 inline _LIBCPP_INLINE_VISIBILITY
1331 atomic_compare_exchange_strong_explicit(volatile atomic<_Tp>* __o,
1333 memory_order __s, memory_order __f) _NOEXCEPT
1335 return __o->compare_exchange_strong(*__e, __d, __s, __f);
1338 template <class _Tp>
1339 inline _LIBCPP_INLINE_VISIBILITY
1341 atomic_compare_exchange_strong_explicit(atomic<_Tp>* __o, _Tp* __e,
1343 memory_order __s, memory_order __f) _NOEXCEPT
1345 return __o->compare_exchange_strong(*__e, __d, __s, __f);
1350 template <class _Tp>
1351 inline _LIBCPP_INLINE_VISIBILITY
1354 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1357 atomic_fetch_add(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1359 return __o->fetch_add(__op);
1362 template <class _Tp>
1363 inline _LIBCPP_INLINE_VISIBILITY
1366 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1369 atomic_fetch_add(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1371 return __o->fetch_add(__op);
1374 template <class _Tp>
1375 inline _LIBCPP_INLINE_VISIBILITY
1377 atomic_fetch_add(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1379 return __o->fetch_add(__op);
1382 template <class _Tp>
1383 inline _LIBCPP_INLINE_VISIBILITY
1385 atomic_fetch_add(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1387 return __o->fetch_add(__op);
1390 // atomic_fetch_add_explicit
1392 template <class _Tp>
1393 inline _LIBCPP_INLINE_VISIBILITY
1396 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1399 atomic_fetch_add_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1401 return __o->fetch_add(__op, __m);
1404 template <class _Tp>
1405 inline _LIBCPP_INLINE_VISIBILITY
1408 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1411 atomic_fetch_add_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1413 return __o->fetch_add(__op, __m);
1416 template <class _Tp>
1417 inline _LIBCPP_INLINE_VISIBILITY
1419 atomic_fetch_add_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
1420 memory_order __m) _NOEXCEPT
1422 return __o->fetch_add(__op, __m);
1425 template <class _Tp>
1426 inline _LIBCPP_INLINE_VISIBILITY
1428 atomic_fetch_add_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
1430 return __o->fetch_add(__op, __m);
1435 template <class _Tp>
1436 inline _LIBCPP_INLINE_VISIBILITY
1439 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1442 atomic_fetch_sub(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1444 return __o->fetch_sub(__op);
1447 template <class _Tp>
1448 inline _LIBCPP_INLINE_VISIBILITY
1451 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1454 atomic_fetch_sub(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1456 return __o->fetch_sub(__op);
1459 template <class _Tp>
1460 inline _LIBCPP_INLINE_VISIBILITY
1462 atomic_fetch_sub(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1464 return __o->fetch_sub(__op);
1467 template <class _Tp>
1468 inline _LIBCPP_INLINE_VISIBILITY
1470 atomic_fetch_sub(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1472 return __o->fetch_sub(__op);
1475 // atomic_fetch_sub_explicit
1477 template <class _Tp>
1478 inline _LIBCPP_INLINE_VISIBILITY
1481 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1484 atomic_fetch_sub_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1486 return __o->fetch_sub(__op, __m);
1489 template <class _Tp>
1490 inline _LIBCPP_INLINE_VISIBILITY
1493 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1496 atomic_fetch_sub_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1498 return __o->fetch_sub(__op, __m);
1501 template <class _Tp>
1502 inline _LIBCPP_INLINE_VISIBILITY
1504 atomic_fetch_sub_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
1505 memory_order __m) _NOEXCEPT
1507 return __o->fetch_sub(__op, __m);
1510 template <class _Tp>
1511 inline _LIBCPP_INLINE_VISIBILITY
1513 atomic_fetch_sub_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
1515 return __o->fetch_sub(__op, __m);
1520 template <class _Tp>
1521 inline _LIBCPP_INLINE_VISIBILITY
1524 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1527 atomic_fetch_and(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1529 return __o->fetch_and(__op);
1532 template <class _Tp>
1533 inline _LIBCPP_INLINE_VISIBILITY
1536 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1539 atomic_fetch_and(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1541 return __o->fetch_and(__op);
1544 // atomic_fetch_and_explicit
1546 template <class _Tp>
1547 inline _LIBCPP_INLINE_VISIBILITY
1550 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1553 atomic_fetch_and_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1555 return __o->fetch_and(__op, __m);
1558 template <class _Tp>
1559 inline _LIBCPP_INLINE_VISIBILITY
1562 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1565 atomic_fetch_and_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1567 return __o->fetch_and(__op, __m);
1572 template <class _Tp>
1573 inline _LIBCPP_INLINE_VISIBILITY
1576 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1579 atomic_fetch_or(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1581 return __o->fetch_or(__op);
1584 template <class _Tp>
1585 inline _LIBCPP_INLINE_VISIBILITY
1588 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1591 atomic_fetch_or(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1593 return __o->fetch_or(__op);
1596 // atomic_fetch_or_explicit
1598 template <class _Tp>
1599 inline _LIBCPP_INLINE_VISIBILITY
1602 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1605 atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1607 return __o->fetch_or(__op, __m);
1610 template <class _Tp>
1611 inline _LIBCPP_INLINE_VISIBILITY
1614 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1617 atomic_fetch_or_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1619 return __o->fetch_or(__op, __m);
1624 template <class _Tp>
1625 inline _LIBCPP_INLINE_VISIBILITY
1628 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1631 atomic_fetch_xor(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1633 return __o->fetch_xor(__op);
1636 template <class _Tp>
1637 inline _LIBCPP_INLINE_VISIBILITY
1640 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1643 atomic_fetch_xor(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1645 return __o->fetch_xor(__op);
1648 // atomic_fetch_xor_explicit
1650 template <class _Tp>
1651 inline _LIBCPP_INLINE_VISIBILITY
1654 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1657 atomic_fetch_xor_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1659 return __o->fetch_xor(__op, __m);
1662 template <class _Tp>
1663 inline _LIBCPP_INLINE_VISIBILITY
1666 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1669 atomic_fetch_xor_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1671 return __o->fetch_xor(__op, __m);
1674 // flag type and operations
1676 typedef struct atomic_flag
1680 _LIBCPP_INLINE_VISIBILITY
1681 bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1682 {return __c11_atomic_exchange(&__a_, true, __m);}
1683 _LIBCPP_INLINE_VISIBILITY
1684 bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT
1685 {return __c11_atomic_exchange(&__a_, true, __m);}
1686 _LIBCPP_INLINE_VISIBILITY
1687 void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1688 {__c11_atomic_store(&__a_, false, __m);}
1689 _LIBCPP_INLINE_VISIBILITY
1690 void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT
1691 {__c11_atomic_store(&__a_, false, __m);}
1693 _LIBCPP_INLINE_VISIBILITY
1694 #ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
1695 atomic_flag() _NOEXCEPT = default;
1697 atomic_flag() _NOEXCEPT : __a_() {}
1698 #endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
1700 _LIBCPP_INLINE_VISIBILITY
1701 atomic_flag(bool __b) _NOEXCEPT : __a_(__b) {} // EXTENSION
1703 #ifndef _LIBCPP_HAS_NO_DELETED_FUNCTIONS
1704 atomic_flag(const atomic_flag&) = delete;
1705 atomic_flag& operator=(const atomic_flag&) = delete;
1706 atomic_flag& operator=(const atomic_flag&) volatile = delete;
1707 #else // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
1709 atomic_flag(const atomic_flag&);
1710 atomic_flag& operator=(const atomic_flag&);
1711 atomic_flag& operator=(const atomic_flag&) volatile;
1712 #endif // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
1715 inline _LIBCPP_INLINE_VISIBILITY
1717 atomic_flag_test_and_set(volatile atomic_flag* __o) _NOEXCEPT
1719 return __o->test_and_set();
1722 inline _LIBCPP_INLINE_VISIBILITY
1724 atomic_flag_test_and_set(atomic_flag* __o) _NOEXCEPT
1726 return __o->test_and_set();
1729 inline _LIBCPP_INLINE_VISIBILITY
1731 atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
1733 return __o->test_and_set(__m);
1736 inline _LIBCPP_INLINE_VISIBILITY
1738 atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
1740 return __o->test_and_set(__m);
1743 inline _LIBCPP_INLINE_VISIBILITY
1745 atomic_flag_clear(volatile atomic_flag* __o) _NOEXCEPT
1750 inline _LIBCPP_INLINE_VISIBILITY
1752 atomic_flag_clear(atomic_flag* __o) _NOEXCEPT
1757 inline _LIBCPP_INLINE_VISIBILITY
1759 atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
1764 inline _LIBCPP_INLINE_VISIBILITY
1766 atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
1773 inline _LIBCPP_INLINE_VISIBILITY
1775 atomic_thread_fence(memory_order __m) _NOEXCEPT
1777 __c11_atomic_thread_fence(__m);
1780 inline _LIBCPP_INLINE_VISIBILITY
1782 atomic_signal_fence(memory_order __m) _NOEXCEPT
1784 __c11_atomic_signal_fence(__m);
1787 // Atomics for standard typedef types
1789 typedef atomic<bool> atomic_bool;
1790 typedef atomic<char> atomic_char;
1791 typedef atomic<signed char> atomic_schar;
1792 typedef atomic<unsigned char> atomic_uchar;
1793 typedef atomic<short> atomic_short;
1794 typedef atomic<unsigned short> atomic_ushort;
1795 typedef atomic<int> atomic_int;
1796 typedef atomic<unsigned int> atomic_uint;
1797 typedef atomic<long> atomic_long;
1798 typedef atomic<unsigned long> atomic_ulong;
1799 typedef atomic<long long> atomic_llong;
1800 typedef atomic<unsigned long long> atomic_ullong;
1801 typedef atomic<char16_t> atomic_char16_t;
1802 typedef atomic<char32_t> atomic_char32_t;
1803 typedef atomic<wchar_t> atomic_wchar_t;
1805 typedef atomic<int_least8_t> atomic_int_least8_t;
1806 typedef atomic<uint_least8_t> atomic_uint_least8_t;
1807 typedef atomic<int_least16_t> atomic_int_least16_t;
1808 typedef atomic<uint_least16_t> atomic_uint_least16_t;
1809 typedef atomic<int_least32_t> atomic_int_least32_t;
1810 typedef atomic<uint_least32_t> atomic_uint_least32_t;
1811 typedef atomic<int_least64_t> atomic_int_least64_t;
1812 typedef atomic<uint_least64_t> atomic_uint_least64_t;
1814 typedef atomic<int_fast8_t> atomic_int_fast8_t;
1815 typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
1816 typedef atomic<int_fast16_t> atomic_int_fast16_t;
1817 typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
1818 typedef atomic<int_fast32_t> atomic_int_fast32_t;
1819 typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
1820 typedef atomic<int_fast64_t> atomic_int_fast64_t;
1821 typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
1823 typedef atomic< int8_t> atomic_int8_t;
1824 typedef atomic<uint8_t> atomic_uint8_t;
1825 typedef atomic< int16_t> atomic_int16_t;
1826 typedef atomic<uint16_t> atomic_uint16_t;
1827 typedef atomic< int32_t> atomic_int32_t;
1828 typedef atomic<uint32_t> atomic_uint32_t;
1829 typedef atomic< int64_t> atomic_int64_t;
1830 typedef atomic<uint64_t> atomic_uint64_t;
1832 typedef atomic<intptr_t> atomic_intptr_t;
1833 typedef atomic<uintptr_t> atomic_uintptr_t;
1834 typedef atomic<size_t> atomic_size_t;
1835 typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
1836 typedef atomic<intmax_t> atomic_intmax_t;
1837 typedef atomic<uintmax_t> atomic_uintmax_t;
1839 #define ATOMIC_FLAG_INIT {false}
1840 #define ATOMIC_VAR_INIT(__v) {__v}
1842 _LIBCPP_END_NAMESPACE_STD
1844 #endif // _LIBCPP_ATOMIC