2 //===--------------------------- atomic -----------------------------------===//
4 // The LLVM Compiler Infrastructure
6 // This file is distributed under the University of Illinois Open Source
7 // License. See LICENSE.TXT for details.
9 //===----------------------------------------------------------------------===//
11 #ifndef _LIBCPP_ATOMIC
12 #define _LIBCPP_ATOMIC
20 // order and consistency
22 typedef enum memory_order
25 memory_order_consume, // load-consume
26 memory_order_acquire, // load-acquire
27 memory_order_release, // store-release
28 memory_order_acq_rel, // store-release load-acquire
29 memory_order_seq_cst // store-release load-acquire
32 template <class T> T kill_dependency(T y) noexcept;
36 #define ATOMIC_BOOL_LOCK_FREE unspecified
37 #define ATOMIC_CHAR_LOCK_FREE unspecified
38 #define ATOMIC_CHAR16_T_LOCK_FREE unspecified
39 #define ATOMIC_CHAR32_T_LOCK_FREE unspecified
40 #define ATOMIC_WCHAR_T_LOCK_FREE unspecified
41 #define ATOMIC_SHORT_LOCK_FREE unspecified
42 #define ATOMIC_INT_LOCK_FREE unspecified
43 #define ATOMIC_LONG_LOCK_FREE unspecified
44 #define ATOMIC_LLONG_LOCK_FREE unspecified
45 #define ATOMIC_POINTER_LOCK_FREE unspecified
47 // flag type and operations
49 typedef struct atomic_flag
51 bool test_and_set(memory_order m = memory_order_seq_cst) volatile noexcept;
52 bool test_and_set(memory_order m = memory_order_seq_cst) noexcept;
53 void clear(memory_order m = memory_order_seq_cst) volatile noexcept;
54 void clear(memory_order m = memory_order_seq_cst) noexcept;
55 atomic_flag() noexcept = default;
56 atomic_flag(const atomic_flag&) = delete;
57 atomic_flag& operator=(const atomic_flag&) = delete;
58 atomic_flag& operator=(const atomic_flag&) volatile = delete;
62 atomic_flag_test_and_set(volatile atomic_flag* obj) noexcept;
65 atomic_flag_test_and_set(atomic_flag* obj) noexcept;
68 atomic_flag_test_and_set_explicit(volatile atomic_flag* obj,
69 memory_order m) noexcept;
72 atomic_flag_test_and_set_explicit(atomic_flag* obj, memory_order m) noexcept;
75 atomic_flag_clear(volatile atomic_flag* obj) noexcept;
78 atomic_flag_clear(atomic_flag* obj) noexcept;
81 atomic_flag_clear_explicit(volatile atomic_flag* obj, memory_order m) noexcept;
84 atomic_flag_clear_explicit(atomic_flag* obj, memory_order m) noexcept;
86 #define ATOMIC_FLAG_INIT see below
87 #define ATOMIC_VAR_INIT(value) see below
92 bool is_lock_free() const volatile noexcept;
93 bool is_lock_free() const noexcept;
94 void store(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
95 void store(T desr, memory_order m = memory_order_seq_cst) noexcept;
96 T load(memory_order m = memory_order_seq_cst) const volatile noexcept;
97 T load(memory_order m = memory_order_seq_cst) const noexcept;
98 operator T() const volatile noexcept;
99 operator T() const noexcept;
100 T exchange(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
101 T exchange(T desr, memory_order m = memory_order_seq_cst) noexcept;
102 bool compare_exchange_weak(T& expc, T desr,
103 memory_order s, memory_order f) volatile noexcept;
104 bool compare_exchange_weak(T& expc, T desr, memory_order s, memory_order f) noexcept;
105 bool compare_exchange_strong(T& expc, T desr,
106 memory_order s, memory_order f) volatile noexcept;
107 bool compare_exchange_strong(T& expc, T desr,
108 memory_order s, memory_order f) noexcept;
109 bool compare_exchange_weak(T& expc, T desr,
110 memory_order m = memory_order_seq_cst) volatile noexcept;
111 bool compare_exchange_weak(T& expc, T desr,
112 memory_order m = memory_order_seq_cst) noexcept;
113 bool compare_exchange_strong(T& expc, T desr,
114 memory_order m = memory_order_seq_cst) volatile noexcept;
115 bool compare_exchange_strong(T& expc, T desr,
116 memory_order m = memory_order_seq_cst) noexcept;
118 atomic() noexcept = default;
119 constexpr atomic(T desr) noexcept;
120 atomic(const atomic&) = delete;
121 atomic& operator=(const atomic&) = delete;
122 atomic& operator=(const atomic&) volatile = delete;
123 T operator=(T) volatile noexcept;
124 T operator=(T) noexcept;
128 struct atomic<integral>
130 bool is_lock_free() const volatile noexcept;
131 bool is_lock_free() const noexcept;
132 void store(integral desr, memory_order m = memory_order_seq_cst) volatile noexcept;
133 void store(integral desr, memory_order m = memory_order_seq_cst) noexcept;
134 integral load(memory_order m = memory_order_seq_cst) const volatile noexcept;
135 integral load(memory_order m = memory_order_seq_cst) const noexcept;
136 operator integral() const volatile noexcept;
137 operator integral() const noexcept;
138 integral exchange(integral desr,
139 memory_order m = memory_order_seq_cst) volatile noexcept;
140 integral exchange(integral desr, memory_order m = memory_order_seq_cst) noexcept;
141 bool compare_exchange_weak(integral& expc, integral desr,
142 memory_order s, memory_order f) volatile noexcept;
143 bool compare_exchange_weak(integral& expc, integral desr,
144 memory_order s, memory_order f) noexcept;
145 bool compare_exchange_strong(integral& expc, integral desr,
146 memory_order s, memory_order f) volatile noexcept;
147 bool compare_exchange_strong(integral& expc, integral desr,
148 memory_order s, memory_order f) noexcept;
149 bool compare_exchange_weak(integral& expc, integral desr,
150 memory_order m = memory_order_seq_cst) volatile noexcept;
151 bool compare_exchange_weak(integral& expc, integral desr,
152 memory_order m = memory_order_seq_cst) noexcept;
153 bool compare_exchange_strong(integral& expc, integral desr,
154 memory_order m = memory_order_seq_cst) volatile noexcept;
155 bool compare_exchange_strong(integral& expc, integral desr,
156 memory_order m = memory_order_seq_cst) noexcept;
159 fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
160 integral fetch_add(integral op, memory_order m = memory_order_seq_cst) noexcept;
162 fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
163 integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) noexcept;
165 fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
166 integral fetch_and(integral op, memory_order m = memory_order_seq_cst) noexcept;
168 fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
169 integral fetch_or(integral op, memory_order m = memory_order_seq_cst) noexcept;
171 fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
172 integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) noexcept;
174 atomic() noexcept = default;
175 constexpr atomic(integral desr) noexcept;
176 atomic(const atomic&) = delete;
177 atomic& operator=(const atomic&) = delete;
178 atomic& operator=(const atomic&) volatile = delete;
179 integral operator=(integral desr) volatile noexcept;
180 integral operator=(integral desr) noexcept;
182 integral operator++(int) volatile noexcept;
183 integral operator++(int) noexcept;
184 integral operator--(int) volatile noexcept;
185 integral operator--(int) noexcept;
186 integral operator++() volatile noexcept;
187 integral operator++() noexcept;
188 integral operator--() volatile noexcept;
189 integral operator--() noexcept;
190 integral operator+=(integral op) volatile noexcept;
191 integral operator+=(integral op) noexcept;
192 integral operator-=(integral op) volatile noexcept;
193 integral operator-=(integral op) noexcept;
194 integral operator&=(integral op) volatile noexcept;
195 integral operator&=(integral op) noexcept;
196 integral operator|=(integral op) volatile noexcept;
197 integral operator|=(integral op) noexcept;
198 integral operator^=(integral op) volatile noexcept;
199 integral operator^=(integral op) noexcept;
205 bool is_lock_free() const volatile noexcept;
206 bool is_lock_free() const noexcept;
207 void store(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
208 void store(T* desr, memory_order m = memory_order_seq_cst) noexcept;
209 T* load(memory_order m = memory_order_seq_cst) const volatile noexcept;
210 T* load(memory_order m = memory_order_seq_cst) const noexcept;
211 operator T*() const volatile noexcept;
212 operator T*() const noexcept;
213 T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
214 T* exchange(T* desr, memory_order m = memory_order_seq_cst) noexcept;
215 bool compare_exchange_weak(T*& expc, T* desr,
216 memory_order s, memory_order f) volatile noexcept;
217 bool compare_exchange_weak(T*& expc, T* desr,
218 memory_order s, memory_order f) noexcept;
219 bool compare_exchange_strong(T*& expc, T* desr,
220 memory_order s, memory_order f) volatile noexcept;
221 bool compare_exchange_strong(T*& expc, T* desr,
222 memory_order s, memory_order f) noexcept;
223 bool compare_exchange_weak(T*& expc, T* desr,
224 memory_order m = memory_order_seq_cst) volatile noexcept;
225 bool compare_exchange_weak(T*& expc, T* desr,
226 memory_order m = memory_order_seq_cst) noexcept;
227 bool compare_exchange_strong(T*& expc, T* desr,
228 memory_order m = memory_order_seq_cst) volatile noexcept;
229 bool compare_exchange_strong(T*& expc, T* desr,
230 memory_order m = memory_order_seq_cst) noexcept;
231 T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
232 T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
233 T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
234 T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
236 atomic() noexcept = default;
237 constexpr atomic(T* desr) noexcept;
238 atomic(const atomic&) = delete;
239 atomic& operator=(const atomic&) = delete;
240 atomic& operator=(const atomic&) volatile = delete;
242 T* operator=(T*) volatile noexcept;
243 T* operator=(T*) noexcept;
244 T* operator++(int) volatile noexcept;
245 T* operator++(int) noexcept;
246 T* operator--(int) volatile noexcept;
247 T* operator--(int) noexcept;
248 T* operator++() volatile noexcept;
249 T* operator++() noexcept;
250 T* operator--() volatile noexcept;
251 T* operator--() noexcept;
252 T* operator+=(ptrdiff_t op) volatile noexcept;
253 T* operator+=(ptrdiff_t op) noexcept;
254 T* operator-=(ptrdiff_t op) volatile noexcept;
255 T* operator-=(ptrdiff_t op) noexcept;
261 atomic_is_lock_free(const volatile atomic<T>* obj) noexcept;
265 atomic_is_lock_free(const atomic<T>* obj) noexcept;
269 atomic_init(volatile atomic<T>* obj, T desr) noexcept;
273 atomic_init(atomic<T>* obj, T desr) noexcept;
277 atomic_store(volatile atomic<T>* obj, T desr) noexcept;
281 atomic_store(atomic<T>* obj, T desr) noexcept;
285 atomic_store_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
289 atomic_store_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
293 atomic_load(const volatile atomic<T>* obj) noexcept;
297 atomic_load(const atomic<T>* obj) noexcept;
301 atomic_load_explicit(const volatile atomic<T>* obj, memory_order m) noexcept;
305 atomic_load_explicit(const atomic<T>* obj, memory_order m) noexcept;
309 atomic_exchange(volatile atomic<T>* obj, T desr) noexcept;
313 atomic_exchange(atomic<T>* obj, T desr) noexcept;
317 atomic_exchange_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
321 atomic_exchange_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
325 atomic_compare_exchange_weak(volatile atomic<T>* obj, T* expc, T desr) noexcept;
329 atomic_compare_exchange_weak(atomic<T>* obj, T* expc, T desr) noexcept;
333 atomic_compare_exchange_strong(volatile atomic<T>* obj, T* expc, T desr) noexcept;
337 atomic_compare_exchange_strong(atomic<T>* obj, T* expc, T desr) noexcept;
341 atomic_compare_exchange_weak_explicit(volatile atomic<T>* obj, T* expc,
343 memory_order s, memory_order f) noexcept;
347 atomic_compare_exchange_weak_explicit(atomic<T>* obj, T* expc, T desr,
348 memory_order s, memory_order f) noexcept;
352 atomic_compare_exchange_strong_explicit(volatile atomic<T>* obj,
354 memory_order s, memory_order f) noexcept;
358 atomic_compare_exchange_strong_explicit(atomic<T>* obj, T* expc,
360 memory_order s, memory_order f) noexcept;
362 template <class Integral>
364 atomic_fetch_add(volatile atomic<Integral>* obj, Integral op) noexcept;
366 template <class Integral>
368 atomic_fetch_add(atomic<Integral>* obj, Integral op) noexcept;
370 template <class Integral>
372 atomic_fetch_add_explicit(volatile atomic<Integral>* obj, Integral op,
373 memory_order m) noexcept;
374 template <class Integral>
376 atomic_fetch_add_explicit(atomic<Integral>* obj, Integral op,
377 memory_order m) noexcept;
378 template <class Integral>
380 atomic_fetch_sub(volatile atomic<Integral>* obj, Integral op) noexcept;
382 template <class Integral>
384 atomic_fetch_sub(atomic<Integral>* obj, Integral op) noexcept;
386 template <class Integral>
388 atomic_fetch_sub_explicit(volatile atomic<Integral>* obj, Integral op,
389 memory_order m) noexcept;
390 template <class Integral>
392 atomic_fetch_sub_explicit(atomic<Integral>* obj, Integral op,
393 memory_order m) noexcept;
394 template <class Integral>
396 atomic_fetch_and(volatile atomic<Integral>* obj, Integral op) noexcept;
398 template <class Integral>
400 atomic_fetch_and(atomic<Integral>* obj, Integral op) noexcept;
402 template <class Integral>
404 atomic_fetch_and_explicit(volatile atomic<Integral>* obj, Integral op,
405 memory_order m) noexcept;
406 template <class Integral>
408 atomic_fetch_and_explicit(atomic<Integral>* obj, Integral op,
409 memory_order m) noexcept;
410 template <class Integral>
412 atomic_fetch_or(volatile atomic<Integral>* obj, Integral op) noexcept;
414 template <class Integral>
416 atomic_fetch_or(atomic<Integral>* obj, Integral op) noexcept;
418 template <class Integral>
420 atomic_fetch_or_explicit(volatile atomic<Integral>* obj, Integral op,
421 memory_order m) noexcept;
422 template <class Integral>
424 atomic_fetch_or_explicit(atomic<Integral>* obj, Integral op,
425 memory_order m) noexcept;
426 template <class Integral>
428 atomic_fetch_xor(volatile atomic<Integral>* obj, Integral op) noexcept;
430 template <class Integral>
432 atomic_fetch_xor(atomic<Integral>* obj, Integral op) noexcept;
434 template <class Integral>
436 atomic_fetch_xor_explicit(volatile atomic<Integral>* obj, Integral op,
437 memory_order m) noexcept;
438 template <class Integral>
440 atomic_fetch_xor_explicit(atomic<Integral>* obj, Integral op,
441 memory_order m) noexcept;
445 atomic_fetch_add(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
449 atomic_fetch_add(atomic<T*>* obj, ptrdiff_t op) noexcept;
453 atomic_fetch_add_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
454 memory_order m) noexcept;
457 atomic_fetch_add_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
461 atomic_fetch_sub(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
465 atomic_fetch_sub(atomic<T*>* obj, ptrdiff_t op) noexcept;
469 atomic_fetch_sub_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
470 memory_order m) noexcept;
473 atomic_fetch_sub_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
475 // Atomics for standard typedef types
477 typedef atomic<bool> atomic_bool;
478 typedef atomic<char> atomic_char;
479 typedef atomic<signed char> atomic_schar;
480 typedef atomic<unsigned char> atomic_uchar;
481 typedef atomic<short> atomic_short;
482 typedef atomic<unsigned short> atomic_ushort;
483 typedef atomic<int> atomic_int;
484 typedef atomic<unsigned int> atomic_uint;
485 typedef atomic<long> atomic_long;
486 typedef atomic<unsigned long> atomic_ulong;
487 typedef atomic<long long> atomic_llong;
488 typedef atomic<unsigned long long> atomic_ullong;
489 typedef atomic<char16_t> atomic_char16_t;
490 typedef atomic<char32_t> atomic_char32_t;
491 typedef atomic<wchar_t> atomic_wchar_t;
493 typedef atomic<int_least8_t> atomic_int_least8_t;
494 typedef atomic<uint_least8_t> atomic_uint_least8_t;
495 typedef atomic<int_least16_t> atomic_int_least16_t;
496 typedef atomic<uint_least16_t> atomic_uint_least16_t;
497 typedef atomic<int_least32_t> atomic_int_least32_t;
498 typedef atomic<uint_least32_t> atomic_uint_least32_t;
499 typedef atomic<int_least64_t> atomic_int_least64_t;
500 typedef atomic<uint_least64_t> atomic_uint_least64_t;
502 typedef atomic<int_fast8_t> atomic_int_fast8_t;
503 typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
504 typedef atomic<int_fast16_t> atomic_int_fast16_t;
505 typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
506 typedef atomic<int_fast32_t> atomic_int_fast32_t;
507 typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
508 typedef atomic<int_fast64_t> atomic_int_fast64_t;
509 typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
511 typedef atomic<intptr_t> atomic_intptr_t;
512 typedef atomic<uintptr_t> atomic_uintptr_t;
513 typedef atomic<size_t> atomic_size_t;
514 typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
515 typedef atomic<intmax_t> atomic_intmax_t;
516 typedef atomic<uintmax_t> atomic_uintmax_t;
520 void atomic_thread_fence(memory_order m) noexcept;
521 void atomic_signal_fence(memory_order m) noexcept;
530 #include <type_traits>
532 #if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
533 #pragma GCC system_header
536 #ifdef _LIBCPP_HAS_NO_THREADS
537 #error <atomic> is not supported on this single threaded system
539 #if !defined(_LIBCPP_HAS_C_ATOMIC_IMP) && !defined(_LIBCPP_HAS_GCC_ATOMIC_IMP)
540 #error <atomic> is not implemented
543 _LIBCPP_BEGIN_NAMESPACE_STD
545 typedef enum memory_order
547 memory_order_relaxed, memory_order_consume, memory_order_acquire,
548 memory_order_release, memory_order_acq_rel, memory_order_seq_cst
551 #if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP)
552 namespace __gcc_atomic {
553 template <typename _Tp>
554 struct __gcc_atomic_t {
557 static_assert(is_trivially_copyable<_Tp>::value,
558 "std::atomic<Tp> requires that 'Tp' be a trivially copyable type");
561 _LIBCPP_INLINE_VISIBILITY
562 #ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
563 __gcc_atomic_t() _NOEXCEPT = default;
565 __gcc_atomic_t() _NOEXCEPT : __a_value() {}
566 #endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
567 _LIBCPP_CONSTEXPR explicit __gcc_atomic_t(_Tp value) _NOEXCEPT
568 : __a_value(value) {}
571 #define _Atomic(x) __gcc_atomic::__gcc_atomic_t<x>
573 template <typename _Tp> _Tp __create();
575 template <typename _Tp, typename _Td>
576 typename enable_if<sizeof(_Tp()->__a_value = __create<_Td>()), char>::type
577 __test_atomic_assignable(int);
578 template <typename _Tp, typename _Up>
579 __two __test_atomic_assignable(...);
581 template <typename _Tp, typename _Td>
582 struct __can_assign {
583 static const bool value =
584 sizeof(__test_atomic_assignable<_Tp, _Td>(1)) == sizeof(char);
587 static inline _LIBCPP_CONSTEXPR int __to_gcc_order(memory_order __order) {
588 // Avoid switch statement to make this a constexpr.
589 return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
590 (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
591 (__order == memory_order_release ? __ATOMIC_RELEASE:
592 (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
593 (__order == memory_order_acq_rel ? __ATOMIC_ACQ_REL:
594 __ATOMIC_CONSUME))));
597 static inline _LIBCPP_CONSTEXPR int __to_gcc_failure_order(memory_order __order) {
598 // Avoid switch statement to make this a constexpr.
599 return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
600 (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
601 (__order == memory_order_release ? __ATOMIC_RELAXED:
602 (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
603 (__order == memory_order_acq_rel ? __ATOMIC_ACQUIRE:
604 __ATOMIC_CONSUME))));
607 } // namespace __gcc_atomic
609 template <typename _Tp>
612 __gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value>::type
613 __c11_atomic_init(volatile _Atomic(_Tp)* __a, _Tp __val) {
614 __a->__a_value = __val;
617 template <typename _Tp>
620 !__gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value &&
621 __gcc_atomic::__can_assign< _Atomic(_Tp)*, _Tp>::value>::type
622 __c11_atomic_init(volatile _Atomic(_Tp)* __a, _Tp __val) {
623 // [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because
624 // the default operator= in an object is not volatile, a byte-by-byte copy
626 volatile char* to = reinterpret_cast<volatile char*>(&__a->__a_value);
627 volatile char* end = to + sizeof(_Tp);
628 char* from = reinterpret_cast<char*>(&__val);
634 template <typename _Tp>
635 static inline void __c11_atomic_init(_Atomic(_Tp)* __a, _Tp __val) {
636 __a->__a_value = __val;
639 static inline void __c11_atomic_thread_fence(memory_order __order) {
640 __atomic_thread_fence(__gcc_atomic::__to_gcc_order(__order));
643 static inline void __c11_atomic_signal_fence(memory_order __order) {
644 __atomic_signal_fence(__gcc_atomic::__to_gcc_order(__order));
647 template <typename _Tp>
648 static inline void __c11_atomic_store(volatile _Atomic(_Tp)* __a, _Tp __val,
649 memory_order __order) {
650 return __atomic_store(&__a->__a_value, &__val,
651 __gcc_atomic::__to_gcc_order(__order));
654 template <typename _Tp>
655 static inline void __c11_atomic_store(_Atomic(_Tp)* __a, _Tp __val,
656 memory_order __order) {
657 __atomic_store(&__a->__a_value, &__val,
658 __gcc_atomic::__to_gcc_order(__order));
661 template <typename _Tp>
662 static inline _Tp __c11_atomic_load(volatile _Atomic(_Tp)* __a,
663 memory_order __order) {
665 __atomic_load(&__a->__a_value, &__ret,
666 __gcc_atomic::__to_gcc_order(__order));
670 template <typename _Tp>
671 static inline _Tp __c11_atomic_load(_Atomic(_Tp)* __a, memory_order __order) {
673 __atomic_load(&__a->__a_value, &__ret,
674 __gcc_atomic::__to_gcc_order(__order));
678 template <typename _Tp>
679 static inline _Tp __c11_atomic_exchange(volatile _Atomic(_Tp)* __a,
680 _Tp __value, memory_order __order) {
682 __atomic_exchange(&__a->__a_value, &__value, &__ret,
683 __gcc_atomic::__to_gcc_order(__order));
687 template <typename _Tp>
688 static inline _Tp __c11_atomic_exchange(_Atomic(_Tp)* __a, _Tp __value,
689 memory_order __order) {
691 __atomic_exchange(&__a->__a_value, &__value, &__ret,
692 __gcc_atomic::__to_gcc_order(__order));
696 template <typename _Tp>
697 static inline bool __c11_atomic_compare_exchange_strong(
698 volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
699 memory_order __success, memory_order __failure) {
700 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
702 __gcc_atomic::__to_gcc_order(__success),
703 __gcc_atomic::__to_gcc_failure_order(__failure));
706 template <typename _Tp>
707 static inline bool __c11_atomic_compare_exchange_strong(
708 _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
709 memory_order __failure) {
710 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
712 __gcc_atomic::__to_gcc_order(__success),
713 __gcc_atomic::__to_gcc_failure_order(__failure));
716 template <typename _Tp>
717 static inline bool __c11_atomic_compare_exchange_weak(
718 volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
719 memory_order __success, memory_order __failure) {
720 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
722 __gcc_atomic::__to_gcc_order(__success),
723 __gcc_atomic::__to_gcc_failure_order(__failure));
726 template <typename _Tp>
727 static inline bool __c11_atomic_compare_exchange_weak(
728 _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
729 memory_order __failure) {
730 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
732 __gcc_atomic::__to_gcc_order(__success),
733 __gcc_atomic::__to_gcc_failure_order(__failure));
736 template <typename _Tp>
737 struct __skip_amt { enum {value = 1}; };
739 template <typename _Tp>
740 struct __skip_amt<_Tp*> { enum {value = sizeof(_Tp)}; };
742 // FIXME: Haven't figured out what the spec says about using arrays with
743 // atomic_fetch_add. Force a failure rather than creating bad behavior.
744 template <typename _Tp>
745 struct __skip_amt<_Tp[]> { };
746 template <typename _Tp, int n>
747 struct __skip_amt<_Tp[n]> { };
749 template <typename _Tp, typename _Td>
750 static inline _Tp __c11_atomic_fetch_add(volatile _Atomic(_Tp)* __a,
751 _Td __delta, memory_order __order) {
752 return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
753 __gcc_atomic::__to_gcc_order(__order));
756 template <typename _Tp, typename _Td>
757 static inline _Tp __c11_atomic_fetch_add(_Atomic(_Tp)* __a, _Td __delta,
758 memory_order __order) {
759 return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
760 __gcc_atomic::__to_gcc_order(__order));
763 template <typename _Tp, typename _Td>
764 static inline _Tp __c11_atomic_fetch_sub(volatile _Atomic(_Tp)* __a,
765 _Td __delta, memory_order __order) {
766 return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
767 __gcc_atomic::__to_gcc_order(__order));
770 template <typename _Tp, typename _Td>
771 static inline _Tp __c11_atomic_fetch_sub(_Atomic(_Tp)* __a, _Td __delta,
772 memory_order __order) {
773 return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
774 __gcc_atomic::__to_gcc_order(__order));
777 template <typename _Tp>
778 static inline _Tp __c11_atomic_fetch_and(volatile _Atomic(_Tp)* __a,
779 _Tp __pattern, memory_order __order) {
780 return __atomic_fetch_and(&__a->__a_value, __pattern,
781 __gcc_atomic::__to_gcc_order(__order));
784 template <typename _Tp>
785 static inline _Tp __c11_atomic_fetch_and(_Atomic(_Tp)* __a,
786 _Tp __pattern, memory_order __order) {
787 return __atomic_fetch_and(&__a->__a_value, __pattern,
788 __gcc_atomic::__to_gcc_order(__order));
791 template <typename _Tp>
792 static inline _Tp __c11_atomic_fetch_or(volatile _Atomic(_Tp)* __a,
793 _Tp __pattern, memory_order __order) {
794 return __atomic_fetch_or(&__a->__a_value, __pattern,
795 __gcc_atomic::__to_gcc_order(__order));
798 template <typename _Tp>
799 static inline _Tp __c11_atomic_fetch_or(_Atomic(_Tp)* __a, _Tp __pattern,
800 memory_order __order) {
801 return __atomic_fetch_or(&__a->__a_value, __pattern,
802 __gcc_atomic::__to_gcc_order(__order));
805 template <typename _Tp>
806 static inline _Tp __c11_atomic_fetch_xor(volatile _Atomic(_Tp)* __a,
807 _Tp __pattern, memory_order __order) {
808 return __atomic_fetch_xor(&__a->__a_value, __pattern,
809 __gcc_atomic::__to_gcc_order(__order));
812 template <typename _Tp>
813 static inline _Tp __c11_atomic_fetch_xor(_Atomic(_Tp)* __a, _Tp __pattern,
814 memory_order __order) {
815 return __atomic_fetch_xor(&__a->__a_value, __pattern,
816 __gcc_atomic::__to_gcc_order(__order));
818 #endif // _LIBCPP_HAS_GCC_ATOMIC_IMP
821 inline _LIBCPP_INLINE_VISIBILITY
823 kill_dependency(_Tp __y) _NOEXCEPT
830 template <class _Tp, bool = is_integral<_Tp>::value && !is_same<_Tp, bool>::value>
831 struct __atomic_base // false
833 mutable _Atomic(_Tp) __a_;
835 _LIBCPP_INLINE_VISIBILITY
836 bool is_lock_free() const volatile _NOEXCEPT
838 #if defined(_LIBCPP_HAS_C_ATOMIC_IMP)
839 return __c11_atomic_is_lock_free(sizeof(_Tp));
841 return __atomic_is_lock_free(sizeof(_Tp), 0);
844 _LIBCPP_INLINE_VISIBILITY
845 bool is_lock_free() const _NOEXCEPT
846 {return static_cast<__atomic_base const volatile*>(this)->is_lock_free();}
847 _LIBCPP_INLINE_VISIBILITY
848 void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
849 {__c11_atomic_store(&__a_, __d, __m);}
850 _LIBCPP_INLINE_VISIBILITY
851 void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
852 {__c11_atomic_store(&__a_, __d, __m);}
853 _LIBCPP_INLINE_VISIBILITY
854 _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
855 {return __c11_atomic_load(&__a_, __m);}
856 _LIBCPP_INLINE_VISIBILITY
857 _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
858 {return __c11_atomic_load(&__a_, __m);}
859 _LIBCPP_INLINE_VISIBILITY
860 operator _Tp() const volatile _NOEXCEPT {return load();}
861 _LIBCPP_INLINE_VISIBILITY
862 operator _Tp() const _NOEXCEPT {return load();}
863 _LIBCPP_INLINE_VISIBILITY
864 _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
865 {return __c11_atomic_exchange(&__a_, __d, __m);}
866 _LIBCPP_INLINE_VISIBILITY
867 _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
868 {return __c11_atomic_exchange(&__a_, __d, __m);}
869 _LIBCPP_INLINE_VISIBILITY
870 bool compare_exchange_weak(_Tp& __e, _Tp __d,
871 memory_order __s, memory_order __f) volatile _NOEXCEPT
872 {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
873 _LIBCPP_INLINE_VISIBILITY
874 bool compare_exchange_weak(_Tp& __e, _Tp __d,
875 memory_order __s, memory_order __f) _NOEXCEPT
876 {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
877 _LIBCPP_INLINE_VISIBILITY
878 bool compare_exchange_strong(_Tp& __e, _Tp __d,
879 memory_order __s, memory_order __f) volatile _NOEXCEPT
880 {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
881 _LIBCPP_INLINE_VISIBILITY
882 bool compare_exchange_strong(_Tp& __e, _Tp __d,
883 memory_order __s, memory_order __f) _NOEXCEPT
884 {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
885 _LIBCPP_INLINE_VISIBILITY
886 bool compare_exchange_weak(_Tp& __e, _Tp __d,
887 memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
888 {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
889 _LIBCPP_INLINE_VISIBILITY
890 bool compare_exchange_weak(_Tp& __e, _Tp __d,
891 memory_order __m = memory_order_seq_cst) _NOEXCEPT
892 {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
893 _LIBCPP_INLINE_VISIBILITY
894 bool compare_exchange_strong(_Tp& __e, _Tp __d,
895 memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
896 {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
897 _LIBCPP_INLINE_VISIBILITY
898 bool compare_exchange_strong(_Tp& __e, _Tp __d,
899 memory_order __m = memory_order_seq_cst) _NOEXCEPT
900 {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
902 _LIBCPP_INLINE_VISIBILITY
903 #ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
904 __atomic_base() _NOEXCEPT = default;
906 __atomic_base() _NOEXCEPT : __a_() {}
907 #endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
909 _LIBCPP_INLINE_VISIBILITY
910 _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {}
911 #ifndef _LIBCPP_HAS_NO_DELETED_FUNCTIONS
912 __atomic_base(const __atomic_base&) = delete;
913 __atomic_base& operator=(const __atomic_base&) = delete;
914 __atomic_base& operator=(const __atomic_base&) volatile = delete;
915 #else // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
917 __atomic_base(const __atomic_base&);
918 __atomic_base& operator=(const __atomic_base&);
919 __atomic_base& operator=(const __atomic_base&) volatile;
920 #endif // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
926 struct __atomic_base<_Tp, true>
927 : public __atomic_base<_Tp, false>
929 typedef __atomic_base<_Tp, false> __base;
930 _LIBCPP_INLINE_VISIBILITY
931 __atomic_base() _NOEXCEPT _LIBCPP_DEFAULT
932 _LIBCPP_INLINE_VISIBILITY
933 _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {}
935 _LIBCPP_INLINE_VISIBILITY
936 _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
937 {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
938 _LIBCPP_INLINE_VISIBILITY
939 _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
940 {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
941 _LIBCPP_INLINE_VISIBILITY
942 _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
943 {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
944 _LIBCPP_INLINE_VISIBILITY
945 _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
946 {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
947 _LIBCPP_INLINE_VISIBILITY
948 _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
949 {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
950 _LIBCPP_INLINE_VISIBILITY
951 _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
952 {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
953 _LIBCPP_INLINE_VISIBILITY
954 _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
955 {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
956 _LIBCPP_INLINE_VISIBILITY
957 _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
958 {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
959 _LIBCPP_INLINE_VISIBILITY
960 _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
961 {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
962 _LIBCPP_INLINE_VISIBILITY
963 _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
964 {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
966 _LIBCPP_INLINE_VISIBILITY
967 _Tp operator++(int) volatile _NOEXCEPT {return fetch_add(_Tp(1));}
968 _LIBCPP_INLINE_VISIBILITY
969 _Tp operator++(int) _NOEXCEPT {return fetch_add(_Tp(1));}
970 _LIBCPP_INLINE_VISIBILITY
971 _Tp operator--(int) volatile _NOEXCEPT {return fetch_sub(_Tp(1));}
972 _LIBCPP_INLINE_VISIBILITY
973 _Tp operator--(int) _NOEXCEPT {return fetch_sub(_Tp(1));}
974 _LIBCPP_INLINE_VISIBILITY
975 _Tp operator++() volatile _NOEXCEPT {return fetch_add(_Tp(1)) + _Tp(1);}
976 _LIBCPP_INLINE_VISIBILITY
977 _Tp operator++() _NOEXCEPT {return fetch_add(_Tp(1)) + _Tp(1);}
978 _LIBCPP_INLINE_VISIBILITY
979 _Tp operator--() volatile _NOEXCEPT {return fetch_sub(_Tp(1)) - _Tp(1);}
980 _LIBCPP_INLINE_VISIBILITY
981 _Tp operator--() _NOEXCEPT {return fetch_sub(_Tp(1)) - _Tp(1);}
982 _LIBCPP_INLINE_VISIBILITY
983 _Tp operator+=(_Tp __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
984 _LIBCPP_INLINE_VISIBILITY
985 _Tp operator+=(_Tp __op) _NOEXCEPT {return fetch_add(__op) + __op;}
986 _LIBCPP_INLINE_VISIBILITY
987 _Tp operator-=(_Tp __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
988 _LIBCPP_INLINE_VISIBILITY
989 _Tp operator-=(_Tp __op) _NOEXCEPT {return fetch_sub(__op) - __op;}
990 _LIBCPP_INLINE_VISIBILITY
991 _Tp operator&=(_Tp __op) volatile _NOEXCEPT {return fetch_and(__op) & __op;}
992 _LIBCPP_INLINE_VISIBILITY
993 _Tp operator&=(_Tp __op) _NOEXCEPT {return fetch_and(__op) & __op;}
994 _LIBCPP_INLINE_VISIBILITY
995 _Tp operator|=(_Tp __op) volatile _NOEXCEPT {return fetch_or(__op) | __op;}
996 _LIBCPP_INLINE_VISIBILITY
997 _Tp operator|=(_Tp __op) _NOEXCEPT {return fetch_or(__op) | __op;}
998 _LIBCPP_INLINE_VISIBILITY
999 _Tp operator^=(_Tp __op) volatile _NOEXCEPT {return fetch_xor(__op) ^ __op;}
1000 _LIBCPP_INLINE_VISIBILITY
1001 _Tp operator^=(_Tp __op) _NOEXCEPT {return fetch_xor(__op) ^ __op;}
1006 template <class _Tp>
1008 : public __atomic_base<_Tp>
1010 typedef __atomic_base<_Tp> __base;
1011 _LIBCPP_INLINE_VISIBILITY
1012 atomic() _NOEXCEPT _LIBCPP_DEFAULT
1013 _LIBCPP_INLINE_VISIBILITY
1014 _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {}
1016 _LIBCPP_INLINE_VISIBILITY
1017 _Tp operator=(_Tp __d) volatile _NOEXCEPT
1018 {__base::store(__d); return __d;}
1019 _LIBCPP_INLINE_VISIBILITY
1020 _Tp operator=(_Tp __d) _NOEXCEPT
1021 {__base::store(__d); return __d;}
1026 template <class _Tp>
1028 : public __atomic_base<_Tp*>
1030 typedef __atomic_base<_Tp*> __base;
1031 _LIBCPP_INLINE_VISIBILITY
1032 atomic() _NOEXCEPT _LIBCPP_DEFAULT
1033 _LIBCPP_INLINE_VISIBILITY
1034 _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {}
1036 _LIBCPP_INLINE_VISIBILITY
1037 _Tp* operator=(_Tp* __d) volatile _NOEXCEPT
1038 {__base::store(__d); return __d;}
1039 _LIBCPP_INLINE_VISIBILITY
1040 _Tp* operator=(_Tp* __d) _NOEXCEPT
1041 {__base::store(__d); return __d;}
1043 _LIBCPP_INLINE_VISIBILITY
1044 _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1046 {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
1047 _LIBCPP_INLINE_VISIBILITY
1048 _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1049 {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
1050 _LIBCPP_INLINE_VISIBILITY
1051 _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1053 {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
1054 _LIBCPP_INLINE_VISIBILITY
1055 _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1056 {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
1058 _LIBCPP_INLINE_VISIBILITY
1059 _Tp* operator++(int) volatile _NOEXCEPT {return fetch_add(1);}
1060 _LIBCPP_INLINE_VISIBILITY
1061 _Tp* operator++(int) _NOEXCEPT {return fetch_add(1);}
1062 _LIBCPP_INLINE_VISIBILITY
1063 _Tp* operator--(int) volatile _NOEXCEPT {return fetch_sub(1);}
1064 _LIBCPP_INLINE_VISIBILITY
1065 _Tp* operator--(int) _NOEXCEPT {return fetch_sub(1);}
1066 _LIBCPP_INLINE_VISIBILITY
1067 _Tp* operator++() volatile _NOEXCEPT {return fetch_add(1) + 1;}
1068 _LIBCPP_INLINE_VISIBILITY
1069 _Tp* operator++() _NOEXCEPT {return fetch_add(1) + 1;}
1070 _LIBCPP_INLINE_VISIBILITY
1071 _Tp* operator--() volatile _NOEXCEPT {return fetch_sub(1) - 1;}
1072 _LIBCPP_INLINE_VISIBILITY
1073 _Tp* operator--() _NOEXCEPT {return fetch_sub(1) - 1;}
1074 _LIBCPP_INLINE_VISIBILITY
1075 _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
1076 _LIBCPP_INLINE_VISIBILITY
1077 _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT {return fetch_add(__op) + __op;}
1078 _LIBCPP_INLINE_VISIBILITY
1079 _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
1080 _LIBCPP_INLINE_VISIBILITY
1081 _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT {return fetch_sub(__op) - __op;}
1084 // atomic_is_lock_free
1086 template <class _Tp>
1087 inline _LIBCPP_INLINE_VISIBILITY
1089 atomic_is_lock_free(const volatile atomic<_Tp>* __o) _NOEXCEPT
1091 return __o->is_lock_free();
1094 template <class _Tp>
1095 inline _LIBCPP_INLINE_VISIBILITY
1097 atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT
1099 return __o->is_lock_free();
1104 template <class _Tp>
1105 inline _LIBCPP_INLINE_VISIBILITY
1107 atomic_init(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1109 __c11_atomic_init(&__o->__a_, __d);
1112 template <class _Tp>
1113 inline _LIBCPP_INLINE_VISIBILITY
1115 atomic_init(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1117 __c11_atomic_init(&__o->__a_, __d);
1122 template <class _Tp>
1123 inline _LIBCPP_INLINE_VISIBILITY
1125 atomic_store(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1130 template <class _Tp>
1131 inline _LIBCPP_INLINE_VISIBILITY
1133 atomic_store(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1138 // atomic_store_explicit
1140 template <class _Tp>
1141 inline _LIBCPP_INLINE_VISIBILITY
1143 atomic_store_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1145 __o->store(__d, __m);
1148 template <class _Tp>
1149 inline _LIBCPP_INLINE_VISIBILITY
1151 atomic_store_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1153 __o->store(__d, __m);
1158 template <class _Tp>
1159 inline _LIBCPP_INLINE_VISIBILITY
1161 atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT
1166 template <class _Tp>
1167 inline _LIBCPP_INLINE_VISIBILITY
1169 atomic_load(const atomic<_Tp>* __o) _NOEXCEPT
1174 // atomic_load_explicit
1176 template <class _Tp>
1177 inline _LIBCPP_INLINE_VISIBILITY
1179 atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1181 return __o->load(__m);
1184 template <class _Tp>
1185 inline _LIBCPP_INLINE_VISIBILITY
1187 atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1189 return __o->load(__m);
1194 template <class _Tp>
1195 inline _LIBCPP_INLINE_VISIBILITY
1197 atomic_exchange(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1199 return __o->exchange(__d);
1202 template <class _Tp>
1203 inline _LIBCPP_INLINE_VISIBILITY
1205 atomic_exchange(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1207 return __o->exchange(__d);
1210 // atomic_exchange_explicit
1212 template <class _Tp>
1213 inline _LIBCPP_INLINE_VISIBILITY
1215 atomic_exchange_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1217 return __o->exchange(__d, __m);
1220 template <class _Tp>
1221 inline _LIBCPP_INLINE_VISIBILITY
1223 atomic_exchange_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1225 return __o->exchange(__d, __m);
1228 // atomic_compare_exchange_weak
1230 template <class _Tp>
1231 inline _LIBCPP_INLINE_VISIBILITY
1233 atomic_compare_exchange_weak(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1235 return __o->compare_exchange_weak(*__e, __d);
1238 template <class _Tp>
1239 inline _LIBCPP_INLINE_VISIBILITY
1241 atomic_compare_exchange_weak(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1243 return __o->compare_exchange_weak(*__e, __d);
1246 // atomic_compare_exchange_strong
1248 template <class _Tp>
1249 inline _LIBCPP_INLINE_VISIBILITY
1251 atomic_compare_exchange_strong(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1253 return __o->compare_exchange_strong(*__e, __d);
1256 template <class _Tp>
1257 inline _LIBCPP_INLINE_VISIBILITY
1259 atomic_compare_exchange_strong(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1261 return __o->compare_exchange_strong(*__e, __d);
1264 // atomic_compare_exchange_weak_explicit
1266 template <class _Tp>
1267 inline _LIBCPP_INLINE_VISIBILITY
1269 atomic_compare_exchange_weak_explicit(volatile atomic<_Tp>* __o, _Tp* __e,
1271 memory_order __s, memory_order __f) _NOEXCEPT
1273 return __o->compare_exchange_weak(*__e, __d, __s, __f);
1276 template <class _Tp>
1277 inline _LIBCPP_INLINE_VISIBILITY
1279 atomic_compare_exchange_weak_explicit(atomic<_Tp>* __o, _Tp* __e, _Tp __d,
1280 memory_order __s, memory_order __f) _NOEXCEPT
1282 return __o->compare_exchange_weak(*__e, __d, __s, __f);
1285 // atomic_compare_exchange_strong_explicit
1287 template <class _Tp>
1288 inline _LIBCPP_INLINE_VISIBILITY
1290 atomic_compare_exchange_strong_explicit(volatile atomic<_Tp>* __o,
1292 memory_order __s, memory_order __f) _NOEXCEPT
1294 return __o->compare_exchange_strong(*__e, __d, __s, __f);
1297 template <class _Tp>
1298 inline _LIBCPP_INLINE_VISIBILITY
1300 atomic_compare_exchange_strong_explicit(atomic<_Tp>* __o, _Tp* __e,
1302 memory_order __s, memory_order __f) _NOEXCEPT
1304 return __o->compare_exchange_strong(*__e, __d, __s, __f);
1309 template <class _Tp>
1310 inline _LIBCPP_INLINE_VISIBILITY
1313 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1316 atomic_fetch_add(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1318 return __o->fetch_add(__op);
1321 template <class _Tp>
1322 inline _LIBCPP_INLINE_VISIBILITY
1325 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1328 atomic_fetch_add(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1330 return __o->fetch_add(__op);
1333 template <class _Tp>
1334 inline _LIBCPP_INLINE_VISIBILITY
1336 atomic_fetch_add(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1338 return __o->fetch_add(__op);
1341 template <class _Tp>
1342 inline _LIBCPP_INLINE_VISIBILITY
1344 atomic_fetch_add(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1346 return __o->fetch_add(__op);
1349 // atomic_fetch_add_explicit
1351 template <class _Tp>
1352 inline _LIBCPP_INLINE_VISIBILITY
1355 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1358 atomic_fetch_add_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1360 return __o->fetch_add(__op, __m);
1363 template <class _Tp>
1364 inline _LIBCPP_INLINE_VISIBILITY
1367 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1370 atomic_fetch_add_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1372 return __o->fetch_add(__op, __m);
1375 template <class _Tp>
1376 inline _LIBCPP_INLINE_VISIBILITY
1378 atomic_fetch_add_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
1379 memory_order __m) _NOEXCEPT
1381 return __o->fetch_add(__op, __m);
1384 template <class _Tp>
1385 inline _LIBCPP_INLINE_VISIBILITY
1387 atomic_fetch_add_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
1389 return __o->fetch_add(__op, __m);
1394 template <class _Tp>
1395 inline _LIBCPP_INLINE_VISIBILITY
1398 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1401 atomic_fetch_sub(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1403 return __o->fetch_sub(__op);
1406 template <class _Tp>
1407 inline _LIBCPP_INLINE_VISIBILITY
1410 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1413 atomic_fetch_sub(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1415 return __o->fetch_sub(__op);
1418 template <class _Tp>
1419 inline _LIBCPP_INLINE_VISIBILITY
1421 atomic_fetch_sub(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1423 return __o->fetch_sub(__op);
1426 template <class _Tp>
1427 inline _LIBCPP_INLINE_VISIBILITY
1429 atomic_fetch_sub(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1431 return __o->fetch_sub(__op);
1434 // atomic_fetch_sub_explicit
1436 template <class _Tp>
1437 inline _LIBCPP_INLINE_VISIBILITY
1440 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1443 atomic_fetch_sub_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1445 return __o->fetch_sub(__op, __m);
1448 template <class _Tp>
1449 inline _LIBCPP_INLINE_VISIBILITY
1452 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1455 atomic_fetch_sub_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1457 return __o->fetch_sub(__op, __m);
1460 template <class _Tp>
1461 inline _LIBCPP_INLINE_VISIBILITY
1463 atomic_fetch_sub_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
1464 memory_order __m) _NOEXCEPT
1466 return __o->fetch_sub(__op, __m);
1469 template <class _Tp>
1470 inline _LIBCPP_INLINE_VISIBILITY
1472 atomic_fetch_sub_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
1474 return __o->fetch_sub(__op, __m);
1479 template <class _Tp>
1480 inline _LIBCPP_INLINE_VISIBILITY
1483 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1486 atomic_fetch_and(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1488 return __o->fetch_and(__op);
1491 template <class _Tp>
1492 inline _LIBCPP_INLINE_VISIBILITY
1495 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1498 atomic_fetch_and(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1500 return __o->fetch_and(__op);
1503 // atomic_fetch_and_explicit
1505 template <class _Tp>
1506 inline _LIBCPP_INLINE_VISIBILITY
1509 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1512 atomic_fetch_and_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1514 return __o->fetch_and(__op, __m);
1517 template <class _Tp>
1518 inline _LIBCPP_INLINE_VISIBILITY
1521 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1524 atomic_fetch_and_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1526 return __o->fetch_and(__op, __m);
1531 template <class _Tp>
1532 inline _LIBCPP_INLINE_VISIBILITY
1535 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1538 atomic_fetch_or(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1540 return __o->fetch_or(__op);
1543 template <class _Tp>
1544 inline _LIBCPP_INLINE_VISIBILITY
1547 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1550 atomic_fetch_or(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1552 return __o->fetch_or(__op);
1555 // atomic_fetch_or_explicit
1557 template <class _Tp>
1558 inline _LIBCPP_INLINE_VISIBILITY
1561 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1564 atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1566 return __o->fetch_or(__op, __m);
1569 template <class _Tp>
1570 inline _LIBCPP_INLINE_VISIBILITY
1573 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1576 atomic_fetch_or_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1578 return __o->fetch_or(__op, __m);
1583 template <class _Tp>
1584 inline _LIBCPP_INLINE_VISIBILITY
1587 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1590 atomic_fetch_xor(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1592 return __o->fetch_xor(__op);
1595 template <class _Tp>
1596 inline _LIBCPP_INLINE_VISIBILITY
1599 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1602 atomic_fetch_xor(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1604 return __o->fetch_xor(__op);
1607 // atomic_fetch_xor_explicit
1609 template <class _Tp>
1610 inline _LIBCPP_INLINE_VISIBILITY
1613 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1616 atomic_fetch_xor_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1618 return __o->fetch_xor(__op, __m);
1621 template <class _Tp>
1622 inline _LIBCPP_INLINE_VISIBILITY
1625 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1628 atomic_fetch_xor_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1630 return __o->fetch_xor(__op, __m);
1633 // flag type and operations
1635 typedef struct atomic_flag
1639 _LIBCPP_INLINE_VISIBILITY
1640 bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1641 {return __c11_atomic_exchange(&__a_, true, __m);}
1642 _LIBCPP_INLINE_VISIBILITY
1643 bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT
1644 {return __c11_atomic_exchange(&__a_, true, __m);}
1645 _LIBCPP_INLINE_VISIBILITY
1646 void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1647 {__c11_atomic_store(&__a_, false, __m);}
1648 _LIBCPP_INLINE_VISIBILITY
1649 void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT
1650 {__c11_atomic_store(&__a_, false, __m);}
1652 _LIBCPP_INLINE_VISIBILITY
1653 #ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
1654 atomic_flag() _NOEXCEPT = default;
1656 atomic_flag() _NOEXCEPT : __a_() {}
1657 #endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
1659 _LIBCPP_INLINE_VISIBILITY
1660 atomic_flag(bool __b) _NOEXCEPT : __a_(__b) {}
1662 #ifndef _LIBCPP_HAS_NO_DELETED_FUNCTIONS
1663 atomic_flag(const atomic_flag&) = delete;
1664 atomic_flag& operator=(const atomic_flag&) = delete;
1665 atomic_flag& operator=(const atomic_flag&) volatile = delete;
1666 #else // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
1668 atomic_flag(const atomic_flag&);
1669 atomic_flag& operator=(const atomic_flag&);
1670 atomic_flag& operator=(const atomic_flag&) volatile;
1671 #endif // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
1674 inline _LIBCPP_INLINE_VISIBILITY
1676 atomic_flag_test_and_set(volatile atomic_flag* __o) _NOEXCEPT
1678 return __o->test_and_set();
1681 inline _LIBCPP_INLINE_VISIBILITY
1683 atomic_flag_test_and_set(atomic_flag* __o) _NOEXCEPT
1685 return __o->test_and_set();
1688 inline _LIBCPP_INLINE_VISIBILITY
1690 atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
1692 return __o->test_and_set(__m);
1695 inline _LIBCPP_INLINE_VISIBILITY
1697 atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
1699 return __o->test_and_set(__m);
1702 inline _LIBCPP_INLINE_VISIBILITY
1704 atomic_flag_clear(volatile atomic_flag* __o) _NOEXCEPT
1709 inline _LIBCPP_INLINE_VISIBILITY
1711 atomic_flag_clear(atomic_flag* __o) _NOEXCEPT
1716 inline _LIBCPP_INLINE_VISIBILITY
1718 atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
1723 inline _LIBCPP_INLINE_VISIBILITY
1725 atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
1732 inline _LIBCPP_INLINE_VISIBILITY
1734 atomic_thread_fence(memory_order __m) _NOEXCEPT
1736 __c11_atomic_thread_fence(__m);
1739 inline _LIBCPP_INLINE_VISIBILITY
1741 atomic_signal_fence(memory_order __m) _NOEXCEPT
1743 __c11_atomic_signal_fence(__m);
1746 // Atomics for standard typedef types
1748 typedef atomic<bool> atomic_bool;
1749 typedef atomic<char> atomic_char;
1750 typedef atomic<signed char> atomic_schar;
1751 typedef atomic<unsigned char> atomic_uchar;
1752 typedef atomic<short> atomic_short;
1753 typedef atomic<unsigned short> atomic_ushort;
1754 typedef atomic<int> atomic_int;
1755 typedef atomic<unsigned int> atomic_uint;
1756 typedef atomic<long> atomic_long;
1757 typedef atomic<unsigned long> atomic_ulong;
1758 typedef atomic<long long> atomic_llong;
1759 typedef atomic<unsigned long long> atomic_ullong;
1760 typedef atomic<char16_t> atomic_char16_t;
1761 typedef atomic<char32_t> atomic_char32_t;
1762 typedef atomic<wchar_t> atomic_wchar_t;
1764 typedef atomic<int_least8_t> atomic_int_least8_t;
1765 typedef atomic<uint_least8_t> atomic_uint_least8_t;
1766 typedef atomic<int_least16_t> atomic_int_least16_t;
1767 typedef atomic<uint_least16_t> atomic_uint_least16_t;
1768 typedef atomic<int_least32_t> atomic_int_least32_t;
1769 typedef atomic<uint_least32_t> atomic_uint_least32_t;
1770 typedef atomic<int_least64_t> atomic_int_least64_t;
1771 typedef atomic<uint_least64_t> atomic_uint_least64_t;
1773 typedef atomic<int_fast8_t> atomic_int_fast8_t;
1774 typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
1775 typedef atomic<int_fast16_t> atomic_int_fast16_t;
1776 typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
1777 typedef atomic<int_fast32_t> atomic_int_fast32_t;
1778 typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
1779 typedef atomic<int_fast64_t> atomic_int_fast64_t;
1780 typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
1782 typedef atomic<intptr_t> atomic_intptr_t;
1783 typedef atomic<uintptr_t> atomic_uintptr_t;
1784 typedef atomic<size_t> atomic_size_t;
1785 typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
1786 typedef atomic<intmax_t> atomic_intmax_t;
1787 typedef atomic<uintmax_t> atomic_uintmax_t;
1789 #define ATOMIC_FLAG_INIT {false}
1790 #define ATOMIC_VAR_INIT(__v) {__v}
1792 #define ATOMIC_BOOL_LOCK_FREE __GCC_ATOMIC_BOOL_LOCK_FREE
1793 #define ATOMIC_CHAR_LOCK_FREE __GCC_ATOMIC_CHAR_LOCK_FREE
1794 #define ATOMIC_CHAR16_T_LOCK_FREE __GCC_ATOMIC_CHAR16_T_LOCK_FREE
1795 #define ATOMIC_CHAR32_T_LOCK_FREE __GCC_ATOMIC_CHAR32_T_LOCK_FREE
1796 #define ATOMIC_WCHAR_T_LOCK_FREE __GCC_ATOMIC_WCHAR_T_LOCK_FREE
1797 #define ATOMIC_SHORT_LOCK_FREE __GCC_ATOMIC_SHORT_LOCK_FREE
1798 #define ATOMIC_INT_LOCK_FREE __GCC_ATOMIC_INT_LOCK_FREE
1799 #define ATOMIC_LONG_LOCK_FREE __GCC_ATOMIC_LONG_LOCK_FREE
1800 #define ATOMIC_LLONG_LOCK_FREE __GCC_ATOMIC_LLONG_LOCK_FREE
1801 #define ATOMIC_POINTER_LOCK_FREE __GCC_ATOMIC_POINTER_LOCK_FREE
1803 _LIBCPP_END_NAMESPACE_STD
1805 #endif // _LIBCPP_ATOMIC