2 //===--------------------------- atomic -----------------------------------===//
4 // The LLVM Compiler Infrastructure
6 // This file is distributed under the University of Illinois Open Source
7 // License. See LICENSE.TXT for details.
9 //===----------------------------------------------------------------------===//
11 #ifndef _LIBCPP_ATOMIC
12 #define _LIBCPP_ATOMIC
20 // order and consistency
22 typedef enum memory_order
25 memory_order_consume, // load-consume
26 memory_order_acquire, // load-acquire
27 memory_order_release, // store-release
28 memory_order_acq_rel, // store-release load-acquire
29 memory_order_seq_cst // store-release load-acquire
32 template <class T> T kill_dependency(T y) noexcept;
36 #define ATOMIC_BOOL_LOCK_FREE unspecified
37 #define ATOMIC_CHAR_LOCK_FREE unspecified
38 #define ATOMIC_CHAR16_T_LOCK_FREE unspecified
39 #define ATOMIC_CHAR32_T_LOCK_FREE unspecified
40 #define ATOMIC_WCHAR_T_LOCK_FREE unspecified
41 #define ATOMIC_SHORT_LOCK_FREE unspecified
42 #define ATOMIC_INT_LOCK_FREE unspecified
43 #define ATOMIC_LONG_LOCK_FREE unspecified
44 #define ATOMIC_LLONG_LOCK_FREE unspecified
45 #define ATOMIC_POINTER_LOCK_FREE unspecified
47 // flag type and operations
49 typedef struct atomic_flag
51 bool test_and_set(memory_order m = memory_order_seq_cst) volatile noexcept;
52 bool test_and_set(memory_order m = memory_order_seq_cst) noexcept;
53 void clear(memory_order m = memory_order_seq_cst) volatile noexcept;
54 void clear(memory_order m = memory_order_seq_cst) noexcept;
55 atomic_flag() noexcept = default;
56 atomic_flag(const atomic_flag&) = delete;
57 atomic_flag& operator=(const atomic_flag&) = delete;
58 atomic_flag& operator=(const atomic_flag&) volatile = delete;
62 atomic_flag_test_and_set(volatile atomic_flag* obj) noexcept;
65 atomic_flag_test_and_set(atomic_flag* obj) noexcept;
68 atomic_flag_test_and_set_explicit(volatile atomic_flag* obj,
69 memory_order m) noexcept;
72 atomic_flag_test_and_set_explicit(atomic_flag* obj, memory_order m) noexcept;
75 atomic_flag_clear(volatile atomic_flag* obj) noexcept;
78 atomic_flag_clear(atomic_flag* obj) noexcept;
81 atomic_flag_clear_explicit(volatile atomic_flag* obj, memory_order m) noexcept;
84 atomic_flag_clear_explicit(atomic_flag* obj, memory_order m) noexcept;
86 #define ATOMIC_FLAG_INIT see below
87 #define ATOMIC_VAR_INIT(value) see below
92 bool is_lock_free() const volatile noexcept;
93 bool is_lock_free() const noexcept;
94 void store(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
95 void store(T desr, memory_order m = memory_order_seq_cst) noexcept;
96 T load(memory_order m = memory_order_seq_cst) const volatile noexcept;
97 T load(memory_order m = memory_order_seq_cst) const noexcept;
98 operator T() const volatile noexcept;
99 operator T() const noexcept;
100 T exchange(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
101 T exchange(T desr, memory_order m = memory_order_seq_cst) noexcept;
102 bool compare_exchange_weak(T& expc, T desr,
103 memory_order s, memory_order f) volatile noexcept;
104 bool compare_exchange_weak(T& expc, T desr, memory_order s, memory_order f) noexcept;
105 bool compare_exchange_strong(T& expc, T desr,
106 memory_order s, memory_order f) volatile noexcept;
107 bool compare_exchange_strong(T& expc, T desr,
108 memory_order s, memory_order f) noexcept;
109 bool compare_exchange_weak(T& expc, T desr,
110 memory_order m = memory_order_seq_cst) volatile noexcept;
111 bool compare_exchange_weak(T& expc, T desr,
112 memory_order m = memory_order_seq_cst) noexcept;
113 bool compare_exchange_strong(T& expc, T desr,
114 memory_order m = memory_order_seq_cst) volatile noexcept;
115 bool compare_exchange_strong(T& expc, T desr,
116 memory_order m = memory_order_seq_cst) noexcept;
118 atomic() noexcept = default;
119 constexpr atomic(T desr) noexcept;
120 atomic(const atomic&) = delete;
121 atomic& operator=(const atomic&) = delete;
122 atomic& operator=(const atomic&) volatile = delete;
123 T operator=(T) volatile noexcept;
124 T operator=(T) noexcept;
128 struct atomic<integral>
130 bool is_lock_free() const volatile noexcept;
131 bool is_lock_free() const noexcept;
132 void store(integral desr, memory_order m = memory_order_seq_cst) volatile noexcept;
133 void store(integral desr, memory_order m = memory_order_seq_cst) noexcept;
134 integral load(memory_order m = memory_order_seq_cst) const volatile noexcept;
135 integral load(memory_order m = memory_order_seq_cst) const noexcept;
136 operator integral() const volatile noexcept;
137 operator integral() const noexcept;
138 integral exchange(integral desr,
139 memory_order m = memory_order_seq_cst) volatile noexcept;
140 integral exchange(integral desr, memory_order m = memory_order_seq_cst) noexcept;
141 bool compare_exchange_weak(integral& expc, integral desr,
142 memory_order s, memory_order f) volatile noexcept;
143 bool compare_exchange_weak(integral& expc, integral desr,
144 memory_order s, memory_order f) noexcept;
145 bool compare_exchange_strong(integral& expc, integral desr,
146 memory_order s, memory_order f) volatile noexcept;
147 bool compare_exchange_strong(integral& expc, integral desr,
148 memory_order s, memory_order f) noexcept;
149 bool compare_exchange_weak(integral& expc, integral desr,
150 memory_order m = memory_order_seq_cst) volatile noexcept;
151 bool compare_exchange_weak(integral& expc, integral desr,
152 memory_order m = memory_order_seq_cst) noexcept;
153 bool compare_exchange_strong(integral& expc, integral desr,
154 memory_order m = memory_order_seq_cst) volatile noexcept;
155 bool compare_exchange_strong(integral& expc, integral desr,
156 memory_order m = memory_order_seq_cst) noexcept;
159 fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
160 integral fetch_add(integral op, memory_order m = memory_order_seq_cst) noexcept;
162 fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
163 integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) noexcept;
165 fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
166 integral fetch_and(integral op, memory_order m = memory_order_seq_cst) noexcept;
168 fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
169 integral fetch_or(integral op, memory_order m = memory_order_seq_cst) noexcept;
171 fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
172 integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) noexcept;
174 atomic() noexcept = default;
175 constexpr atomic(integral desr) noexcept;
176 atomic(const atomic&) = delete;
177 atomic& operator=(const atomic&) = delete;
178 atomic& operator=(const atomic&) volatile = delete;
179 integral operator=(integral desr) volatile noexcept;
180 integral operator=(integral desr) noexcept;
182 integral operator++(int) volatile noexcept;
183 integral operator++(int) noexcept;
184 integral operator--(int) volatile noexcept;
185 integral operator--(int) noexcept;
186 integral operator++() volatile noexcept;
187 integral operator++() noexcept;
188 integral operator--() volatile noexcept;
189 integral operator--() noexcept;
190 integral operator+=(integral op) volatile noexcept;
191 integral operator+=(integral op) noexcept;
192 integral operator-=(integral op) volatile noexcept;
193 integral operator-=(integral op) noexcept;
194 integral operator&=(integral op) volatile noexcept;
195 integral operator&=(integral op) noexcept;
196 integral operator|=(integral op) volatile noexcept;
197 integral operator|=(integral op) noexcept;
198 integral operator^=(integral op) volatile noexcept;
199 integral operator^=(integral op) noexcept;
205 bool is_lock_free() const volatile noexcept;
206 bool is_lock_free() const noexcept;
207 void store(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
208 void store(T* desr, memory_order m = memory_order_seq_cst) noexcept;
209 T* load(memory_order m = memory_order_seq_cst) const volatile noexcept;
210 T* load(memory_order m = memory_order_seq_cst) const noexcept;
211 operator T*() const volatile noexcept;
212 operator T*() const noexcept;
213 T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
214 T* exchange(T* desr, memory_order m = memory_order_seq_cst) noexcept;
215 bool compare_exchange_weak(T*& expc, T* desr,
216 memory_order s, memory_order f) volatile noexcept;
217 bool compare_exchange_weak(T*& expc, T* desr,
218 memory_order s, memory_order f) noexcept;
219 bool compare_exchange_strong(T*& expc, T* desr,
220 memory_order s, memory_order f) volatile noexcept;
221 bool compare_exchange_strong(T*& expc, T* desr,
222 memory_order s, memory_order f) noexcept;
223 bool compare_exchange_weak(T*& expc, T* desr,
224 memory_order m = memory_order_seq_cst) volatile noexcept;
225 bool compare_exchange_weak(T*& expc, T* desr,
226 memory_order m = memory_order_seq_cst) noexcept;
227 bool compare_exchange_strong(T*& expc, T* desr,
228 memory_order m = memory_order_seq_cst) volatile noexcept;
229 bool compare_exchange_strong(T*& expc, T* desr,
230 memory_order m = memory_order_seq_cst) noexcept;
231 T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
232 T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
233 T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
234 T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
236 atomic() noexcept = default;
237 constexpr atomic(T* desr) noexcept;
238 atomic(const atomic&) = delete;
239 atomic& operator=(const atomic&) = delete;
240 atomic& operator=(const atomic&) volatile = delete;
242 T* operator=(T*) volatile noexcept;
243 T* operator=(T*) noexcept;
244 T* operator++(int) volatile noexcept;
245 T* operator++(int) noexcept;
246 T* operator--(int) volatile noexcept;
247 T* operator--(int) noexcept;
248 T* operator++() volatile noexcept;
249 T* operator++() noexcept;
250 T* operator--() volatile noexcept;
251 T* operator--() noexcept;
252 T* operator+=(ptrdiff_t op) volatile noexcept;
253 T* operator+=(ptrdiff_t op) noexcept;
254 T* operator-=(ptrdiff_t op) volatile noexcept;
255 T* operator-=(ptrdiff_t op) noexcept;
261 atomic_is_lock_free(const volatile atomic<T>* obj) noexcept;
265 atomic_is_lock_free(const atomic<T>* obj) noexcept;
269 atomic_init(volatile atomic<T>* obj, T desr) noexcept;
273 atomic_init(atomic<T>* obj, T desr) noexcept;
277 atomic_store(volatile atomic<T>* obj, T desr) noexcept;
281 atomic_store(atomic<T>* obj, T desr) noexcept;
285 atomic_store_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
289 atomic_store_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
293 atomic_load(const volatile atomic<T>* obj) noexcept;
297 atomic_load(const atomic<T>* obj) noexcept;
301 atomic_load_explicit(const volatile atomic<T>* obj, memory_order m) noexcept;
305 atomic_load_explicit(const atomic<T>* obj, memory_order m) noexcept;
309 atomic_exchange(volatile atomic<T>* obj, T desr) noexcept;
313 atomic_exchange(atomic<T>* obj, T desr) noexcept;
317 atomic_exchange_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
321 atomic_exchange_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
325 atomic_compare_exchange_weak(volatile atomic<T>* obj, T* expc, T desr) noexcept;
329 atomic_compare_exchange_weak(atomic<T>* obj, T* expc, T desr) noexcept;
333 atomic_compare_exchange_strong(volatile atomic<T>* obj, T* expc, T desr) noexcept;
337 atomic_compare_exchange_strong(atomic<T>* obj, T* expc, T desr) noexcept;
341 atomic_compare_exchange_weak_explicit(volatile atomic<T>* obj, T* expc,
343 memory_order s, memory_order f) noexcept;
347 atomic_compare_exchange_weak_explicit(atomic<T>* obj, T* expc, T desr,
348 memory_order s, memory_order f) noexcept;
352 atomic_compare_exchange_strong_explicit(volatile atomic<T>* obj,
354 memory_order s, memory_order f) noexcept;
358 atomic_compare_exchange_strong_explicit(atomic<T>* obj, T* expc,
360 memory_order s, memory_order f) noexcept;
362 template <class Integral>
364 atomic_fetch_add(volatile atomic<Integral>* obj, Integral op) noexcept;
366 template <class Integral>
368 atomic_fetch_add(atomic<Integral>* obj, Integral op) noexcept;
370 template <class Integral>
372 atomic_fetch_add_explicit(volatile atomic<Integral>* obj, Integral op,
373 memory_order m) noexcept;
374 template <class Integral>
376 atomic_fetch_add_explicit(atomic<Integral>* obj, Integral op,
377 memory_order m) noexcept;
378 template <class Integral>
380 atomic_fetch_sub(volatile atomic<Integral>* obj, Integral op) noexcept;
382 template <class Integral>
384 atomic_fetch_sub(atomic<Integral>* obj, Integral op) noexcept;
386 template <class Integral>
388 atomic_fetch_sub_explicit(volatile atomic<Integral>* obj, Integral op,
389 memory_order m) noexcept;
390 template <class Integral>
392 atomic_fetch_sub_explicit(atomic<Integral>* obj, Integral op,
393 memory_order m) noexcept;
394 template <class Integral>
396 atomic_fetch_and(volatile atomic<Integral>* obj, Integral op) noexcept;
398 template <class Integral>
400 atomic_fetch_and(atomic<Integral>* obj, Integral op) noexcept;
402 template <class Integral>
404 atomic_fetch_and_explicit(volatile atomic<Integral>* obj, Integral op,
405 memory_order m) noexcept;
406 template <class Integral>
408 atomic_fetch_and_explicit(atomic<Integral>* obj, Integral op,
409 memory_order m) noexcept;
410 template <class Integral>
412 atomic_fetch_or(volatile atomic<Integral>* obj, Integral op) noexcept;
414 template <class Integral>
416 atomic_fetch_or(atomic<Integral>* obj, Integral op) noexcept;
418 template <class Integral>
420 atomic_fetch_or_explicit(volatile atomic<Integral>* obj, Integral op,
421 memory_order m) noexcept;
422 template <class Integral>
424 atomic_fetch_or_explicit(atomic<Integral>* obj, Integral op,
425 memory_order m) noexcept;
426 template <class Integral>
428 atomic_fetch_xor(volatile atomic<Integral>* obj, Integral op) noexcept;
430 template <class Integral>
432 atomic_fetch_xor(atomic<Integral>* obj, Integral op) noexcept;
434 template <class Integral>
436 atomic_fetch_xor_explicit(volatile atomic<Integral>* obj, Integral op,
437 memory_order m) noexcept;
438 template <class Integral>
440 atomic_fetch_xor_explicit(atomic<Integral>* obj, Integral op,
441 memory_order m) noexcept;
445 atomic_fetch_add(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
449 atomic_fetch_add(atomic<T*>* obj, ptrdiff_t op) noexcept;
453 atomic_fetch_add_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
454 memory_order m) noexcept;
457 atomic_fetch_add_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
461 atomic_fetch_sub(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
465 atomic_fetch_sub(atomic<T*>* obj, ptrdiff_t op) noexcept;
469 atomic_fetch_sub_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
470 memory_order m) noexcept;
473 atomic_fetch_sub_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
475 // Atomics for standard typedef types
477 typedef atomic<bool> atomic_bool;
478 typedef atomic<char> atomic_char;
479 typedef atomic<signed char> atomic_schar;
480 typedef atomic<unsigned char> atomic_uchar;
481 typedef atomic<short> atomic_short;
482 typedef atomic<unsigned short> atomic_ushort;
483 typedef atomic<int> atomic_int;
484 typedef atomic<unsigned int> atomic_uint;
485 typedef atomic<long> atomic_long;
486 typedef atomic<unsigned long> atomic_ulong;
487 typedef atomic<long long> atomic_llong;
488 typedef atomic<unsigned long long> atomic_ullong;
489 typedef atomic<char16_t> atomic_char16_t;
490 typedef atomic<char32_t> atomic_char32_t;
491 typedef atomic<wchar_t> atomic_wchar_t;
493 typedef atomic<int_least8_t> atomic_int_least8_t;
494 typedef atomic<uint_least8_t> atomic_uint_least8_t;
495 typedef atomic<int_least16_t> atomic_int_least16_t;
496 typedef atomic<uint_least16_t> atomic_uint_least16_t;
497 typedef atomic<int_least32_t> atomic_int_least32_t;
498 typedef atomic<uint_least32_t> atomic_uint_least32_t;
499 typedef atomic<int_least64_t> atomic_int_least64_t;
500 typedef atomic<uint_least64_t> atomic_uint_least64_t;
502 typedef atomic<int_fast8_t> atomic_int_fast8_t;
503 typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
504 typedef atomic<int_fast16_t> atomic_int_fast16_t;
505 typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
506 typedef atomic<int_fast32_t> atomic_int_fast32_t;
507 typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
508 typedef atomic<int_fast64_t> atomic_int_fast64_t;
509 typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
511 typedef atomic<intptr_t> atomic_intptr_t;
512 typedef atomic<uintptr_t> atomic_uintptr_t;
513 typedef atomic<size_t> atomic_size_t;
514 typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
515 typedef atomic<intmax_t> atomic_intmax_t;
516 typedef atomic<uintmax_t> atomic_uintmax_t;
520 void atomic_thread_fence(memory_order m) noexcept;
521 void atomic_signal_fence(memory_order m) noexcept;
530 #include <type_traits>
532 #if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
533 #pragma GCC system_header
536 #ifdef _LIBCPP_HAS_NO_THREADS
537 #error <atomic> is not supported on this single threaded system
538 #else // !_LIBCPP_HAS_NO_THREADS
540 _LIBCPP_BEGIN_NAMESPACE_STD
542 #if !__has_feature(cxx_atomic) && _GNUC_VER < 407
543 #error <atomic> is not implemented
546 typedef enum memory_order
548 memory_order_relaxed, memory_order_consume, memory_order_acquire,
549 memory_order_release, memory_order_acq_rel, memory_order_seq_cst
553 namespace __gcc_atomic {
554 template <typename _Tp>
555 struct __gcc_atomic_t {
556 __gcc_atomic_t() _NOEXCEPT {}
557 _LIBCPP_CONSTEXPR explicit __gcc_atomic_t(_Tp value) _NOEXCEPT
558 : __a_value(value) {}
561 #define _Atomic(x) __gcc_atomic::__gcc_atomic_t<x>
563 template <typename _Tp> _Tp __create();
565 template <typename _Tp, typename _Td>
566 typename enable_if<sizeof(_Tp()->__a_value = __create<_Td>()), char>::type
567 __test_atomic_assignable(int);
568 template <typename _Tp, typename _Up>
569 __two __test_atomic_assignable(...);
571 template <typename _Tp, typename _Td>
572 struct __can_assign {
573 static const bool value =
574 sizeof(__test_atomic_assignable<_Tp, _Td>(1)) == sizeof(char);
577 static inline constexpr int __to_gcc_order(memory_order __order) {
578 // Avoid switch statement to make this a constexpr.
579 return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
580 (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
581 (__order == memory_order_release ? __ATOMIC_RELEASE:
582 (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
583 (__order == memory_order_acq_rel ? __ATOMIC_ACQ_REL:
584 __ATOMIC_CONSUME))));
587 static inline constexpr int __to_gcc_failure_order(memory_order __order) {
588 // Avoid switch statement to make this a constexpr.
589 return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
590 (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
591 (__order == memory_order_release ? __ATOMIC_RELAXED:
592 (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
593 (__order == memory_order_acq_rel ? __ATOMIC_ACQUIRE:
594 __ATOMIC_CONSUME))));
597 } // namespace __gcc_atomic
599 template <typename _Tp>
602 __gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value>::type
603 __c11_atomic_init(volatile _Atomic(_Tp)* __a, _Tp __val) {
604 __a->__a_value = __val;
607 template <typename _Tp>
610 !__gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value &&
611 __gcc_atomic::__can_assign< _Atomic(_Tp)*, _Tp>::value>::type
612 __c11_atomic_init(volatile _Atomic(_Tp)* __a, _Tp __val) {
613 // [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because
614 // the default operator= in an object is not volatile, a byte-by-byte copy
616 volatile char* to = reinterpret_cast<volatile char*>(&__a->__a_value);
617 volatile char* end = to + sizeof(_Tp);
618 char* from = reinterpret_cast<char*>(&__val);
624 template <typename _Tp>
625 static inline void __c11_atomic_init(_Atomic(_Tp)* __a, _Tp __val) {
626 __a->__a_value = __val;
629 static inline void __c11_atomic_thread_fence(memory_order __order) {
630 __atomic_thread_fence(__gcc_atomic::__to_gcc_order(__order));
633 static inline void __c11_atomic_signal_fence(memory_order __order) {
634 __atomic_signal_fence(__gcc_atomic::__to_gcc_order(__order));
637 template <typename _Tp>
638 static inline void __c11_atomic_store(volatile _Atomic(_Tp)* __a, _Tp __val,
639 memory_order __order) {
640 return __atomic_store(&__a->__a_value, &__val,
641 __gcc_atomic::__to_gcc_order(__order));
644 template <typename _Tp>
645 static inline void __c11_atomic_store(_Atomic(_Tp)* __a, _Tp __val,
646 memory_order __order) {
647 __atomic_store(&__a->__a_value, &__val,
648 __gcc_atomic::__to_gcc_order(__order));
651 template <typename _Tp>
652 static inline _Tp __c11_atomic_load(volatile _Atomic(_Tp)* __a,
653 memory_order __order) {
655 __atomic_load(&__a->__a_value, &__ret,
656 __gcc_atomic::__to_gcc_order(__order));
660 template <typename _Tp>
661 static inline _Tp __c11_atomic_load(_Atomic(_Tp)* __a, memory_order __order) {
663 __atomic_load(&__a->__a_value, &__ret,
664 __gcc_atomic::__to_gcc_order(__order));
668 template <typename _Tp>
669 static inline _Tp __c11_atomic_exchange(volatile _Atomic(_Tp)* __a,
670 _Tp __value, memory_order __order) {
672 __atomic_exchange(&__a->__a_value, &__value, &__ret,
673 __gcc_atomic::__to_gcc_order(__order));
677 template <typename _Tp>
678 static inline _Tp __c11_atomic_exchange(_Atomic(_Tp)* __a, _Tp __value,
679 memory_order __order) {
681 __atomic_exchange(&__a->__a_value, &__value, &__ret,
682 __gcc_atomic::__to_gcc_order(__order));
686 template <typename _Tp>
687 static inline bool __c11_atomic_compare_exchange_strong(
688 volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
689 memory_order __success, memory_order __failure) {
690 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
692 __gcc_atomic::__to_gcc_order(__success),
693 __gcc_atomic::__to_gcc_failure_order(__failure));
696 template <typename _Tp>
697 static inline bool __c11_atomic_compare_exchange_strong(
698 _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
699 memory_order __failure) {
700 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
702 __gcc_atomic::__to_gcc_order(__success),
703 __gcc_atomic::__to_gcc_failure_order(__failure));
706 template <typename _Tp>
707 static inline bool __c11_atomic_compare_exchange_weak(
708 volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
709 memory_order __success, memory_order __failure) {
710 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
712 __gcc_atomic::__to_gcc_order(__success),
713 __gcc_atomic::__to_gcc_failure_order(__failure));
716 template <typename _Tp>
717 static inline bool __c11_atomic_compare_exchange_weak(
718 _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
719 memory_order __failure) {
720 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
722 __gcc_atomic::__to_gcc_order(__success),
723 __gcc_atomic::__to_gcc_failure_order(__failure));
726 template <typename _Tp>
727 struct __skip_amt { enum {value = 1}; };
729 template <typename _Tp>
730 struct __skip_amt<_Tp*> { enum {value = sizeof(_Tp)}; };
732 // FIXME: Haven't figured out what the spec says about using arrays with
733 // atomic_fetch_add. Force a failure rather than creating bad behavior.
734 template <typename _Tp>
735 struct __skip_amt<_Tp[]> { };
736 template <typename _Tp, int n>
737 struct __skip_amt<_Tp[n]> { };
739 template <typename _Tp, typename _Td>
740 static inline _Tp __c11_atomic_fetch_add(volatile _Atomic(_Tp)* __a,
741 _Td __delta, memory_order __order) {
742 return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
743 __gcc_atomic::__to_gcc_order(__order));
746 template <typename _Tp, typename _Td>
747 static inline _Tp __c11_atomic_fetch_add(_Atomic(_Tp)* __a, _Td __delta,
748 memory_order __order) {
749 return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
750 __gcc_atomic::__to_gcc_order(__order));
753 template <typename _Tp, typename _Td>
754 static inline _Tp __c11_atomic_fetch_sub(volatile _Atomic(_Tp)* __a,
755 _Td __delta, memory_order __order) {
756 return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
757 __gcc_atomic::__to_gcc_order(__order));
760 template <typename _Tp, typename _Td>
761 static inline _Tp __c11_atomic_fetch_sub(_Atomic(_Tp)* __a, _Td __delta,
762 memory_order __order) {
763 return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
764 __gcc_atomic::__to_gcc_order(__order));
767 template <typename _Tp>
768 static inline _Tp __c11_atomic_fetch_and(volatile _Atomic(_Tp)* __a,
769 _Tp __pattern, memory_order __order) {
770 return __atomic_fetch_and(&__a->__a_value, __pattern,
771 __gcc_atomic::__to_gcc_order(__order));
774 template <typename _Tp>
775 static inline _Tp __c11_atomic_fetch_and(_Atomic(_Tp)* __a,
776 _Tp __pattern, memory_order __order) {
777 return __atomic_fetch_and(&__a->__a_value, __pattern,
778 __gcc_atomic::__to_gcc_order(__order));
781 template <typename _Tp>
782 static inline _Tp __c11_atomic_fetch_or(volatile _Atomic(_Tp)* __a,
783 _Tp __pattern, memory_order __order) {
784 return __atomic_fetch_or(&__a->__a_value, __pattern,
785 __gcc_atomic::__to_gcc_order(__order));
788 template <typename _Tp>
789 static inline _Tp __c11_atomic_fetch_or(_Atomic(_Tp)* __a, _Tp __pattern,
790 memory_order __order) {
791 return __atomic_fetch_or(&__a->__a_value, __pattern,
792 __gcc_atomic::__to_gcc_order(__order));
795 template <typename _Tp>
796 static inline _Tp __c11_atomic_fetch_xor(volatile _Atomic(_Tp)* __a,
797 _Tp __pattern, memory_order __order) {
798 return __atomic_fetch_xor(&__a->__a_value, __pattern,
799 __gcc_atomic::__to_gcc_order(__order));
802 template <typename _Tp>
803 static inline _Tp __c11_atomic_fetch_xor(_Atomic(_Tp)* __a, _Tp __pattern,
804 memory_order __order) {
805 return __atomic_fetch_xor(&__a->__a_value, __pattern,
806 __gcc_atomic::__to_gcc_order(__order));
808 #endif // _GNUC_VER >= 407
811 inline _LIBCPP_INLINE_VISIBILITY
813 kill_dependency(_Tp __y) _NOEXCEPT
820 template <class _Tp, bool = is_integral<_Tp>::value && !is_same<_Tp, bool>::value>
821 struct __atomic_base // false
823 mutable _Atomic(_Tp) __a_;
825 _LIBCPP_INLINE_VISIBILITY
826 bool is_lock_free() const volatile _NOEXCEPT
828 #if __has_feature(cxx_atomic)
829 return __c11_atomic_is_lock_free(sizeof(_Tp));
831 return __atomic_is_lock_free(sizeof(_Tp), 0);
834 _LIBCPP_INLINE_VISIBILITY
835 bool is_lock_free() const _NOEXCEPT
836 {return static_cast<__atomic_base const volatile*>(this)->is_lock_free();}
837 _LIBCPP_INLINE_VISIBILITY
838 void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
839 {__c11_atomic_store(&__a_, __d, __m);}
840 _LIBCPP_INLINE_VISIBILITY
841 void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
842 {__c11_atomic_store(&__a_, __d, __m);}
843 _LIBCPP_INLINE_VISIBILITY
844 _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
845 {return __c11_atomic_load(&__a_, __m);}
846 _LIBCPP_INLINE_VISIBILITY
847 _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
848 {return __c11_atomic_load(&__a_, __m);}
849 _LIBCPP_INLINE_VISIBILITY
850 operator _Tp() const volatile _NOEXCEPT {return load();}
851 _LIBCPP_INLINE_VISIBILITY
852 operator _Tp() const _NOEXCEPT {return load();}
853 _LIBCPP_INLINE_VISIBILITY
854 _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
855 {return __c11_atomic_exchange(&__a_, __d, __m);}
856 _LIBCPP_INLINE_VISIBILITY
857 _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
858 {return __c11_atomic_exchange(&__a_, __d, __m);}
859 _LIBCPP_INLINE_VISIBILITY
860 bool compare_exchange_weak(_Tp& __e, _Tp __d,
861 memory_order __s, memory_order __f) volatile _NOEXCEPT
862 {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
863 _LIBCPP_INLINE_VISIBILITY
864 bool compare_exchange_weak(_Tp& __e, _Tp __d,
865 memory_order __s, memory_order __f) _NOEXCEPT
866 {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
867 _LIBCPP_INLINE_VISIBILITY
868 bool compare_exchange_strong(_Tp& __e, _Tp __d,
869 memory_order __s, memory_order __f) volatile _NOEXCEPT
870 {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
871 _LIBCPP_INLINE_VISIBILITY
872 bool compare_exchange_strong(_Tp& __e, _Tp __d,
873 memory_order __s, memory_order __f) _NOEXCEPT
874 {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
875 _LIBCPP_INLINE_VISIBILITY
876 bool compare_exchange_weak(_Tp& __e, _Tp __d,
877 memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
878 {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
879 _LIBCPP_INLINE_VISIBILITY
880 bool compare_exchange_weak(_Tp& __e, _Tp __d,
881 memory_order __m = memory_order_seq_cst) _NOEXCEPT
882 {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
883 _LIBCPP_INLINE_VISIBILITY
884 bool compare_exchange_strong(_Tp& __e, _Tp __d,
885 memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
886 {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
887 _LIBCPP_INLINE_VISIBILITY
888 bool compare_exchange_strong(_Tp& __e, _Tp __d,
889 memory_order __m = memory_order_seq_cst) _NOEXCEPT
890 {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
892 _LIBCPP_INLINE_VISIBILITY
893 #ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
894 __atomic_base() _NOEXCEPT = default;
896 __atomic_base() _NOEXCEPT : __a_() {}
897 #endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
899 _LIBCPP_INLINE_VISIBILITY
900 _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {}
901 #ifndef _LIBCPP_HAS_NO_DELETED_FUNCTIONS
902 __atomic_base(const __atomic_base&) = delete;
903 __atomic_base& operator=(const __atomic_base&) = delete;
904 __atomic_base& operator=(const __atomic_base&) volatile = delete;
905 #else // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
907 __atomic_base(const __atomic_base&);
908 __atomic_base& operator=(const __atomic_base&);
909 __atomic_base& operator=(const __atomic_base&) volatile;
910 #endif // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
916 struct __atomic_base<_Tp, true>
917 : public __atomic_base<_Tp, false>
919 typedef __atomic_base<_Tp, false> __base;
920 _LIBCPP_INLINE_VISIBILITY
921 __atomic_base() _NOEXCEPT _LIBCPP_DEFAULT
922 _LIBCPP_INLINE_VISIBILITY
923 _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {}
925 _LIBCPP_INLINE_VISIBILITY
926 _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
927 {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
928 _LIBCPP_INLINE_VISIBILITY
929 _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
930 {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
931 _LIBCPP_INLINE_VISIBILITY
932 _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
933 {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
934 _LIBCPP_INLINE_VISIBILITY
935 _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
936 {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
937 _LIBCPP_INLINE_VISIBILITY
938 _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
939 {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
940 _LIBCPP_INLINE_VISIBILITY
941 _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
942 {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
943 _LIBCPP_INLINE_VISIBILITY
944 _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
945 {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
946 _LIBCPP_INLINE_VISIBILITY
947 _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
948 {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
949 _LIBCPP_INLINE_VISIBILITY
950 _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
951 {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
952 _LIBCPP_INLINE_VISIBILITY
953 _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
954 {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
956 _LIBCPP_INLINE_VISIBILITY
957 _Tp operator++(int) volatile _NOEXCEPT {return fetch_add(_Tp(1));}
958 _LIBCPP_INLINE_VISIBILITY
959 _Tp operator++(int) _NOEXCEPT {return fetch_add(_Tp(1));}
960 _LIBCPP_INLINE_VISIBILITY
961 _Tp operator--(int) volatile _NOEXCEPT {return fetch_sub(_Tp(1));}
962 _LIBCPP_INLINE_VISIBILITY
963 _Tp operator--(int) _NOEXCEPT {return fetch_sub(_Tp(1));}
964 _LIBCPP_INLINE_VISIBILITY
965 _Tp operator++() volatile _NOEXCEPT {return fetch_add(_Tp(1)) + _Tp(1);}
966 _LIBCPP_INLINE_VISIBILITY
967 _Tp operator++() _NOEXCEPT {return fetch_add(_Tp(1)) + _Tp(1);}
968 _LIBCPP_INLINE_VISIBILITY
969 _Tp operator--() volatile _NOEXCEPT {return fetch_sub(_Tp(1)) - _Tp(1);}
970 _LIBCPP_INLINE_VISIBILITY
971 _Tp operator--() _NOEXCEPT {return fetch_sub(_Tp(1)) - _Tp(1);}
972 _LIBCPP_INLINE_VISIBILITY
973 _Tp operator+=(_Tp __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
974 _LIBCPP_INLINE_VISIBILITY
975 _Tp operator+=(_Tp __op) _NOEXCEPT {return fetch_add(__op) + __op;}
976 _LIBCPP_INLINE_VISIBILITY
977 _Tp operator-=(_Tp __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
978 _LIBCPP_INLINE_VISIBILITY
979 _Tp operator-=(_Tp __op) _NOEXCEPT {return fetch_sub(__op) - __op;}
980 _LIBCPP_INLINE_VISIBILITY
981 _Tp operator&=(_Tp __op) volatile _NOEXCEPT {return fetch_and(__op) & __op;}
982 _LIBCPP_INLINE_VISIBILITY
983 _Tp operator&=(_Tp __op) _NOEXCEPT {return fetch_and(__op) & __op;}
984 _LIBCPP_INLINE_VISIBILITY
985 _Tp operator|=(_Tp __op) volatile _NOEXCEPT {return fetch_or(__op) | __op;}
986 _LIBCPP_INLINE_VISIBILITY
987 _Tp operator|=(_Tp __op) _NOEXCEPT {return fetch_or(__op) | __op;}
988 _LIBCPP_INLINE_VISIBILITY
989 _Tp operator^=(_Tp __op) volatile _NOEXCEPT {return fetch_xor(__op) ^ __op;}
990 _LIBCPP_INLINE_VISIBILITY
991 _Tp operator^=(_Tp __op) _NOEXCEPT {return fetch_xor(__op) ^ __op;}
998 : public __atomic_base<_Tp>
1000 typedef __atomic_base<_Tp> __base;
1001 _LIBCPP_INLINE_VISIBILITY
1002 atomic() _NOEXCEPT _LIBCPP_DEFAULT
1003 _LIBCPP_INLINE_VISIBILITY
1004 _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {}
1006 _LIBCPP_INLINE_VISIBILITY
1007 _Tp operator=(_Tp __d) volatile _NOEXCEPT
1008 {__base::store(__d); return __d;}
1009 _LIBCPP_INLINE_VISIBILITY
1010 _Tp operator=(_Tp __d) _NOEXCEPT
1011 {__base::store(__d); return __d;}
1016 template <class _Tp>
1018 : public __atomic_base<_Tp*>
1020 typedef __atomic_base<_Tp*> __base;
1021 _LIBCPP_INLINE_VISIBILITY
1022 atomic() _NOEXCEPT _LIBCPP_DEFAULT
1023 _LIBCPP_INLINE_VISIBILITY
1024 _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {}
1026 _LIBCPP_INLINE_VISIBILITY
1027 _Tp* operator=(_Tp* __d) volatile _NOEXCEPT
1028 {__base::store(__d); return __d;}
1029 _LIBCPP_INLINE_VISIBILITY
1030 _Tp* operator=(_Tp* __d) _NOEXCEPT
1031 {__base::store(__d); return __d;}
1033 _LIBCPP_INLINE_VISIBILITY
1034 _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1036 {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
1037 _LIBCPP_INLINE_VISIBILITY
1038 _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1039 {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
1040 _LIBCPP_INLINE_VISIBILITY
1041 _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1043 {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
1044 _LIBCPP_INLINE_VISIBILITY
1045 _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1046 {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
1048 _LIBCPP_INLINE_VISIBILITY
1049 _Tp* operator++(int) volatile _NOEXCEPT {return fetch_add(1);}
1050 _LIBCPP_INLINE_VISIBILITY
1051 _Tp* operator++(int) _NOEXCEPT {return fetch_add(1);}
1052 _LIBCPP_INLINE_VISIBILITY
1053 _Tp* operator--(int) volatile _NOEXCEPT {return fetch_sub(1);}
1054 _LIBCPP_INLINE_VISIBILITY
1055 _Tp* operator--(int) _NOEXCEPT {return fetch_sub(1);}
1056 _LIBCPP_INLINE_VISIBILITY
1057 _Tp* operator++() volatile _NOEXCEPT {return fetch_add(1) + 1;}
1058 _LIBCPP_INLINE_VISIBILITY
1059 _Tp* operator++() _NOEXCEPT {return fetch_add(1) + 1;}
1060 _LIBCPP_INLINE_VISIBILITY
1061 _Tp* operator--() volatile _NOEXCEPT {return fetch_sub(1) - 1;}
1062 _LIBCPP_INLINE_VISIBILITY
1063 _Tp* operator--() _NOEXCEPT {return fetch_sub(1) - 1;}
1064 _LIBCPP_INLINE_VISIBILITY
1065 _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
1066 _LIBCPP_INLINE_VISIBILITY
1067 _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT {return fetch_add(__op) + __op;}
1068 _LIBCPP_INLINE_VISIBILITY
1069 _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
1070 _LIBCPP_INLINE_VISIBILITY
1071 _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT {return fetch_sub(__op) - __op;}
1074 // atomic_is_lock_free
1076 template <class _Tp>
1077 inline _LIBCPP_INLINE_VISIBILITY
1079 atomic_is_lock_free(const volatile atomic<_Tp>* __o) _NOEXCEPT
1081 return __o->is_lock_free();
1084 template <class _Tp>
1085 inline _LIBCPP_INLINE_VISIBILITY
1087 atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT
1089 return __o->is_lock_free();
1094 template <class _Tp>
1095 inline _LIBCPP_INLINE_VISIBILITY
1097 atomic_init(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1099 __c11_atomic_init(&__o->__a_, __d);
1102 template <class _Tp>
1103 inline _LIBCPP_INLINE_VISIBILITY
1105 atomic_init(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1107 __c11_atomic_init(&__o->__a_, __d);
1112 template <class _Tp>
1113 inline _LIBCPP_INLINE_VISIBILITY
1115 atomic_store(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1120 template <class _Tp>
1121 inline _LIBCPP_INLINE_VISIBILITY
1123 atomic_store(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1128 // atomic_store_explicit
1130 template <class _Tp>
1131 inline _LIBCPP_INLINE_VISIBILITY
1133 atomic_store_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1135 __o->store(__d, __m);
1138 template <class _Tp>
1139 inline _LIBCPP_INLINE_VISIBILITY
1141 atomic_store_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1143 __o->store(__d, __m);
1148 template <class _Tp>
1149 inline _LIBCPP_INLINE_VISIBILITY
1151 atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT
1156 template <class _Tp>
1157 inline _LIBCPP_INLINE_VISIBILITY
1159 atomic_load(const atomic<_Tp>* __o) _NOEXCEPT
1164 // atomic_load_explicit
1166 template <class _Tp>
1167 inline _LIBCPP_INLINE_VISIBILITY
1169 atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1171 return __o->load(__m);
1174 template <class _Tp>
1175 inline _LIBCPP_INLINE_VISIBILITY
1177 atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1179 return __o->load(__m);
1184 template <class _Tp>
1185 inline _LIBCPP_INLINE_VISIBILITY
1187 atomic_exchange(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1189 return __o->exchange(__d);
1192 template <class _Tp>
1193 inline _LIBCPP_INLINE_VISIBILITY
1195 atomic_exchange(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1197 return __o->exchange(__d);
1200 // atomic_exchange_explicit
1202 template <class _Tp>
1203 inline _LIBCPP_INLINE_VISIBILITY
1205 atomic_exchange_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1207 return __o->exchange(__d, __m);
1210 template <class _Tp>
1211 inline _LIBCPP_INLINE_VISIBILITY
1213 atomic_exchange_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1215 return __o->exchange(__d, __m);
1218 // atomic_compare_exchange_weak
1220 template <class _Tp>
1221 inline _LIBCPP_INLINE_VISIBILITY
1223 atomic_compare_exchange_weak(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1225 return __o->compare_exchange_weak(*__e, __d);
1228 template <class _Tp>
1229 inline _LIBCPP_INLINE_VISIBILITY
1231 atomic_compare_exchange_weak(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1233 return __o->compare_exchange_weak(*__e, __d);
1236 // atomic_compare_exchange_strong
1238 template <class _Tp>
1239 inline _LIBCPP_INLINE_VISIBILITY
1241 atomic_compare_exchange_strong(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1243 return __o->compare_exchange_strong(*__e, __d);
1246 template <class _Tp>
1247 inline _LIBCPP_INLINE_VISIBILITY
1249 atomic_compare_exchange_strong(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1251 return __o->compare_exchange_strong(*__e, __d);
1254 // atomic_compare_exchange_weak_explicit
1256 template <class _Tp>
1257 inline _LIBCPP_INLINE_VISIBILITY
1259 atomic_compare_exchange_weak_explicit(volatile atomic<_Tp>* __o, _Tp* __e,
1261 memory_order __s, memory_order __f) _NOEXCEPT
1263 return __o->compare_exchange_weak(*__e, __d, __s, __f);
1266 template <class _Tp>
1267 inline _LIBCPP_INLINE_VISIBILITY
1269 atomic_compare_exchange_weak_explicit(atomic<_Tp>* __o, _Tp* __e, _Tp __d,
1270 memory_order __s, memory_order __f) _NOEXCEPT
1272 return __o->compare_exchange_weak(*__e, __d, __s, __f);
1275 // atomic_compare_exchange_strong_explicit
1277 template <class _Tp>
1278 inline _LIBCPP_INLINE_VISIBILITY
1280 atomic_compare_exchange_strong_explicit(volatile atomic<_Tp>* __o,
1282 memory_order __s, memory_order __f) _NOEXCEPT
1284 return __o->compare_exchange_strong(*__e, __d, __s, __f);
1287 template <class _Tp>
1288 inline _LIBCPP_INLINE_VISIBILITY
1290 atomic_compare_exchange_strong_explicit(atomic<_Tp>* __o, _Tp* __e,
1292 memory_order __s, memory_order __f) _NOEXCEPT
1294 return __o->compare_exchange_strong(*__e, __d, __s, __f);
1299 template <class _Tp>
1300 inline _LIBCPP_INLINE_VISIBILITY
1303 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1306 atomic_fetch_add(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1308 return __o->fetch_add(__op);
1311 template <class _Tp>
1312 inline _LIBCPP_INLINE_VISIBILITY
1315 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1318 atomic_fetch_add(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1320 return __o->fetch_add(__op);
1323 template <class _Tp>
1324 inline _LIBCPP_INLINE_VISIBILITY
1326 atomic_fetch_add(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1328 return __o->fetch_add(__op);
1331 template <class _Tp>
1332 inline _LIBCPP_INLINE_VISIBILITY
1334 atomic_fetch_add(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1336 return __o->fetch_add(__op);
1339 // atomic_fetch_add_explicit
1341 template <class _Tp>
1342 inline _LIBCPP_INLINE_VISIBILITY
1345 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1348 atomic_fetch_add_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1350 return __o->fetch_add(__op, __m);
1353 template <class _Tp>
1354 inline _LIBCPP_INLINE_VISIBILITY
1357 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1360 atomic_fetch_add_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1362 return __o->fetch_add(__op, __m);
1365 template <class _Tp>
1366 inline _LIBCPP_INLINE_VISIBILITY
1368 atomic_fetch_add_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
1369 memory_order __m) _NOEXCEPT
1371 return __o->fetch_add(__op, __m);
1374 template <class _Tp>
1375 inline _LIBCPP_INLINE_VISIBILITY
1377 atomic_fetch_add_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
1379 return __o->fetch_add(__op, __m);
1384 template <class _Tp>
1385 inline _LIBCPP_INLINE_VISIBILITY
1388 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1391 atomic_fetch_sub(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1393 return __o->fetch_sub(__op);
1396 template <class _Tp>
1397 inline _LIBCPP_INLINE_VISIBILITY
1400 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1403 atomic_fetch_sub(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1405 return __o->fetch_sub(__op);
1408 template <class _Tp>
1409 inline _LIBCPP_INLINE_VISIBILITY
1411 atomic_fetch_sub(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1413 return __o->fetch_sub(__op);
1416 template <class _Tp>
1417 inline _LIBCPP_INLINE_VISIBILITY
1419 atomic_fetch_sub(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1421 return __o->fetch_sub(__op);
1424 // atomic_fetch_sub_explicit
1426 template <class _Tp>
1427 inline _LIBCPP_INLINE_VISIBILITY
1430 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1433 atomic_fetch_sub_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1435 return __o->fetch_sub(__op, __m);
1438 template <class _Tp>
1439 inline _LIBCPP_INLINE_VISIBILITY
1442 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1445 atomic_fetch_sub_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1447 return __o->fetch_sub(__op, __m);
1450 template <class _Tp>
1451 inline _LIBCPP_INLINE_VISIBILITY
1453 atomic_fetch_sub_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
1454 memory_order __m) _NOEXCEPT
1456 return __o->fetch_sub(__op, __m);
1459 template <class _Tp>
1460 inline _LIBCPP_INLINE_VISIBILITY
1462 atomic_fetch_sub_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
1464 return __o->fetch_sub(__op, __m);
1469 template <class _Tp>
1470 inline _LIBCPP_INLINE_VISIBILITY
1473 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1476 atomic_fetch_and(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1478 return __o->fetch_and(__op);
1481 template <class _Tp>
1482 inline _LIBCPP_INLINE_VISIBILITY
1485 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1488 atomic_fetch_and(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1490 return __o->fetch_and(__op);
1493 // atomic_fetch_and_explicit
1495 template <class _Tp>
1496 inline _LIBCPP_INLINE_VISIBILITY
1499 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1502 atomic_fetch_and_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1504 return __o->fetch_and(__op, __m);
1507 template <class _Tp>
1508 inline _LIBCPP_INLINE_VISIBILITY
1511 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1514 atomic_fetch_and_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1516 return __o->fetch_and(__op, __m);
1521 template <class _Tp>
1522 inline _LIBCPP_INLINE_VISIBILITY
1525 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1528 atomic_fetch_or(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1530 return __o->fetch_or(__op);
1533 template <class _Tp>
1534 inline _LIBCPP_INLINE_VISIBILITY
1537 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1540 atomic_fetch_or(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1542 return __o->fetch_or(__op);
1545 // atomic_fetch_or_explicit
1547 template <class _Tp>
1548 inline _LIBCPP_INLINE_VISIBILITY
1551 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1554 atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1556 return __o->fetch_or(__op, __m);
1559 template <class _Tp>
1560 inline _LIBCPP_INLINE_VISIBILITY
1563 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1566 atomic_fetch_or_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1568 return __o->fetch_or(__op, __m);
1573 template <class _Tp>
1574 inline _LIBCPP_INLINE_VISIBILITY
1577 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1580 atomic_fetch_xor(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1582 return __o->fetch_xor(__op);
1585 template <class _Tp>
1586 inline _LIBCPP_INLINE_VISIBILITY
1589 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1592 atomic_fetch_xor(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1594 return __o->fetch_xor(__op);
1597 // atomic_fetch_xor_explicit
1599 template <class _Tp>
1600 inline _LIBCPP_INLINE_VISIBILITY
1603 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1606 atomic_fetch_xor_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1608 return __o->fetch_xor(__op, __m);
1611 template <class _Tp>
1612 inline _LIBCPP_INLINE_VISIBILITY
1615 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1618 atomic_fetch_xor_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1620 return __o->fetch_xor(__op, __m);
1623 // flag type and operations
1625 typedef struct atomic_flag
1629 _LIBCPP_INLINE_VISIBILITY
1630 bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1631 {return __c11_atomic_exchange(&__a_, true, __m);}
1632 _LIBCPP_INLINE_VISIBILITY
1633 bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT
1634 {return __c11_atomic_exchange(&__a_, true, __m);}
1635 _LIBCPP_INLINE_VISIBILITY
1636 void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1637 {__c11_atomic_store(&__a_, false, __m);}
1638 _LIBCPP_INLINE_VISIBILITY
1639 void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT
1640 {__c11_atomic_store(&__a_, false, __m);}
1642 _LIBCPP_INLINE_VISIBILITY
1643 #ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
1644 atomic_flag() _NOEXCEPT = default;
1646 atomic_flag() _NOEXCEPT : __a_() {}
1647 #endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
1649 _LIBCPP_INLINE_VISIBILITY
1650 atomic_flag(bool __b) _NOEXCEPT : __a_(__b) {}
1652 #ifndef _LIBCPP_HAS_NO_DELETED_FUNCTIONS
1653 atomic_flag(const atomic_flag&) = delete;
1654 atomic_flag& operator=(const atomic_flag&) = delete;
1655 atomic_flag& operator=(const atomic_flag&) volatile = delete;
1656 #else // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
1658 atomic_flag(const atomic_flag&);
1659 atomic_flag& operator=(const atomic_flag&);
1660 atomic_flag& operator=(const atomic_flag&) volatile;
1661 #endif // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
1664 inline _LIBCPP_INLINE_VISIBILITY
1666 atomic_flag_test_and_set(volatile atomic_flag* __o) _NOEXCEPT
1668 return __o->test_and_set();
1671 inline _LIBCPP_INLINE_VISIBILITY
1673 atomic_flag_test_and_set(atomic_flag* __o) _NOEXCEPT
1675 return __o->test_and_set();
1678 inline _LIBCPP_INLINE_VISIBILITY
1680 atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
1682 return __o->test_and_set(__m);
1685 inline _LIBCPP_INLINE_VISIBILITY
1687 atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
1689 return __o->test_and_set(__m);
1692 inline _LIBCPP_INLINE_VISIBILITY
1694 atomic_flag_clear(volatile atomic_flag* __o) _NOEXCEPT
1699 inline _LIBCPP_INLINE_VISIBILITY
1701 atomic_flag_clear(atomic_flag* __o) _NOEXCEPT
1706 inline _LIBCPP_INLINE_VISIBILITY
1708 atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
1713 inline _LIBCPP_INLINE_VISIBILITY
1715 atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
1722 inline _LIBCPP_INLINE_VISIBILITY
1724 atomic_thread_fence(memory_order __m) _NOEXCEPT
1726 __c11_atomic_thread_fence(__m);
1729 inline _LIBCPP_INLINE_VISIBILITY
1731 atomic_signal_fence(memory_order __m) _NOEXCEPT
1733 __c11_atomic_signal_fence(__m);
1736 // Atomics for standard typedef types
1738 typedef atomic<bool> atomic_bool;
1739 typedef atomic<char> atomic_char;
1740 typedef atomic<signed char> atomic_schar;
1741 typedef atomic<unsigned char> atomic_uchar;
1742 typedef atomic<short> atomic_short;
1743 typedef atomic<unsigned short> atomic_ushort;
1744 typedef atomic<int> atomic_int;
1745 typedef atomic<unsigned int> atomic_uint;
1746 typedef atomic<long> atomic_long;
1747 typedef atomic<unsigned long> atomic_ulong;
1748 typedef atomic<long long> atomic_llong;
1749 typedef atomic<unsigned long long> atomic_ullong;
1750 typedef atomic<char16_t> atomic_char16_t;
1751 typedef atomic<char32_t> atomic_char32_t;
1752 typedef atomic<wchar_t> atomic_wchar_t;
1754 typedef atomic<int_least8_t> atomic_int_least8_t;
1755 typedef atomic<uint_least8_t> atomic_uint_least8_t;
1756 typedef atomic<int_least16_t> atomic_int_least16_t;
1757 typedef atomic<uint_least16_t> atomic_uint_least16_t;
1758 typedef atomic<int_least32_t> atomic_int_least32_t;
1759 typedef atomic<uint_least32_t> atomic_uint_least32_t;
1760 typedef atomic<int_least64_t> atomic_int_least64_t;
1761 typedef atomic<uint_least64_t> atomic_uint_least64_t;
1763 typedef atomic<int_fast8_t> atomic_int_fast8_t;
1764 typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
1765 typedef atomic<int_fast16_t> atomic_int_fast16_t;
1766 typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
1767 typedef atomic<int_fast32_t> atomic_int_fast32_t;
1768 typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
1769 typedef atomic<int_fast64_t> atomic_int_fast64_t;
1770 typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
1772 typedef atomic<intptr_t> atomic_intptr_t;
1773 typedef atomic<uintptr_t> atomic_uintptr_t;
1774 typedef atomic<size_t> atomic_size_t;
1775 typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
1776 typedef atomic<intmax_t> atomic_intmax_t;
1777 typedef atomic<uintmax_t> atomic_uintmax_t;
1779 #define ATOMIC_FLAG_INIT {false}
1780 #define ATOMIC_VAR_INIT(__v) {__v}
1782 // lock-free property
1784 #define ATOMIC_BOOL_LOCK_FREE __GCC_ATOMIC_BOOL_LOCK_FREE
1785 #define ATOMIC_CHAR_LOCK_FREE __GCC_ATOMIC_CHAR_LOCK_FREE
1786 #define ATOMIC_CHAR16_T_LOCK_FREE __GCC_ATOMIC_CHAR16_T_LOCK_FREE
1787 #define ATOMIC_CHAR32_T_LOCK_FREE __GCC_ATOMIC_CHAR32_T_LOCK_FREE
1788 #define ATOMIC_WCHAR_T_LOCK_FREE __GCC_ATOMIC_WCHAR_T_LOCK_FREE
1789 #define ATOMIC_SHORT_LOCK_FREE __GCC_ATOMIC_SHORT_LOCK_FREE
1790 #define ATOMIC_INT_LOCK_FREE __GCC_ATOMIC_INT_LOCK_FREE
1791 #define ATOMIC_LONG_LOCK_FREE __GCC_ATOMIC_LONG_LOCK_FREE
1792 #define ATOMIC_LLONG_LOCK_FREE __GCC_ATOMIC_LLONG_LOCK_FREE
1793 #define ATOMIC_POINTER_LOCK_FREE __GCC_ATOMIC_POINTER_LOCK_FREE
1795 #endif // !__has_feature(cxx_atomic)
1797 _LIBCPP_END_NAMESPACE_STD
1799 #endif // !_LIBCPP_HAS_NO_THREADS
1801 #endif // _LIBCPP_ATOMIC