2 //===--------------------------- atomic -----------------------------------===//
4 // The LLVM Compiler Infrastructure
6 // This file is distributed under the University of Illinois Open Source
7 // License. See LICENSE.TXT for details.
9 //===----------------------------------------------------------------------===//
11 #ifndef _LIBCPP_ATOMIC
12 #define _LIBCPP_ATOMIC
20 // order and consistency
22 typedef enum memory_order
25 memory_order_consume, // load-consume
26 memory_order_acquire, // load-acquire
27 memory_order_release, // store-release
28 memory_order_acq_rel, // store-release load-acquire
29 memory_order_seq_cst // store-release load-acquire
32 template <class T> T kill_dependency(T y) noexcept;
36 #define ATOMIC_BOOL_LOCK_FREE unspecified
37 #define ATOMIC_CHAR_LOCK_FREE unspecified
38 #define ATOMIC_CHAR16_T_LOCK_FREE unspecified
39 #define ATOMIC_CHAR32_T_LOCK_FREE unspecified
40 #define ATOMIC_WCHAR_T_LOCK_FREE unspecified
41 #define ATOMIC_SHORT_LOCK_FREE unspecified
42 #define ATOMIC_INT_LOCK_FREE unspecified
43 #define ATOMIC_LONG_LOCK_FREE unspecified
44 #define ATOMIC_LLONG_LOCK_FREE unspecified
45 #define ATOMIC_POINTER_LOCK_FREE unspecified
47 // flag type and operations
49 typedef struct atomic_flag
51 bool test_and_set(memory_order m = memory_order_seq_cst) volatile noexcept;
52 bool test_and_set(memory_order m = memory_order_seq_cst) noexcept;
53 void clear(memory_order m = memory_order_seq_cst) volatile noexcept;
54 void clear(memory_order m = memory_order_seq_cst) noexcept;
55 atomic_flag() noexcept = default;
56 atomic_flag(const atomic_flag&) = delete;
57 atomic_flag& operator=(const atomic_flag&) = delete;
58 atomic_flag& operator=(const atomic_flag&) volatile = delete;
62 atomic_flag_test_and_set(volatile atomic_flag* obj) noexcept;
65 atomic_flag_test_and_set(atomic_flag* obj) noexcept;
68 atomic_flag_test_and_set_explicit(volatile atomic_flag* obj,
69 memory_order m) noexcept;
72 atomic_flag_test_and_set_explicit(atomic_flag* obj, memory_order m) noexcept;
75 atomic_flag_clear(volatile atomic_flag* obj) noexcept;
78 atomic_flag_clear(atomic_flag* obj) noexcept;
81 atomic_flag_clear_explicit(volatile atomic_flag* obj, memory_order m) noexcept;
84 atomic_flag_clear_explicit(atomic_flag* obj, memory_order m) noexcept;
86 #define ATOMIC_FLAG_INIT see below
87 #define ATOMIC_VAR_INIT(value) see below
92 bool is_lock_free() const volatile noexcept;
93 bool is_lock_free() const noexcept;
94 void store(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
95 void store(T desr, memory_order m = memory_order_seq_cst) noexcept;
96 T load(memory_order m = memory_order_seq_cst) const volatile noexcept;
97 T load(memory_order m = memory_order_seq_cst) const noexcept;
98 operator T() const volatile noexcept;
99 operator T() const noexcept;
100 T exchange(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
101 T exchange(T desr, memory_order m = memory_order_seq_cst) noexcept;
102 bool compare_exchange_weak(T& expc, T desr,
103 memory_order s, memory_order f) volatile noexcept;
104 bool compare_exchange_weak(T& expc, T desr, memory_order s, memory_order f) noexcept;
105 bool compare_exchange_strong(T& expc, T desr,
106 memory_order s, memory_order f) volatile noexcept;
107 bool compare_exchange_strong(T& expc, T desr,
108 memory_order s, memory_order f) noexcept;
109 bool compare_exchange_weak(T& expc, T desr,
110 memory_order m = memory_order_seq_cst) volatile noexcept;
111 bool compare_exchange_weak(T& expc, T desr,
112 memory_order m = memory_order_seq_cst) noexcept;
113 bool compare_exchange_strong(T& expc, T desr,
114 memory_order m = memory_order_seq_cst) volatile noexcept;
115 bool compare_exchange_strong(T& expc, T desr,
116 memory_order m = memory_order_seq_cst) noexcept;
118 atomic() noexcept = default;
119 constexpr atomic(T desr) noexcept;
120 atomic(const atomic&) = delete;
121 atomic& operator=(const atomic&) = delete;
122 atomic& operator=(const atomic&) volatile = delete;
123 T operator=(T) volatile noexcept;
124 T operator=(T) noexcept;
128 struct atomic<integral>
130 bool is_lock_free() const volatile noexcept;
131 bool is_lock_free() const noexcept;
132 void store(integral desr, memory_order m = memory_order_seq_cst) volatile noexcept;
133 void store(integral desr, memory_order m = memory_order_seq_cst) noexcept;
134 integral load(memory_order m = memory_order_seq_cst) const volatile noexcept;
135 integral load(memory_order m = memory_order_seq_cst) const noexcept;
136 operator integral() const volatile noexcept;
137 operator integral() const noexcept;
138 integral exchange(integral desr,
139 memory_order m = memory_order_seq_cst) volatile noexcept;
140 integral exchange(integral desr, memory_order m = memory_order_seq_cst) noexcept;
141 bool compare_exchange_weak(integral& expc, integral desr,
142 memory_order s, memory_order f) volatile noexcept;
143 bool compare_exchange_weak(integral& expc, integral desr,
144 memory_order s, memory_order f) noexcept;
145 bool compare_exchange_strong(integral& expc, integral desr,
146 memory_order s, memory_order f) volatile noexcept;
147 bool compare_exchange_strong(integral& expc, integral desr,
148 memory_order s, memory_order f) noexcept;
149 bool compare_exchange_weak(integral& expc, integral desr,
150 memory_order m = memory_order_seq_cst) volatile noexcept;
151 bool compare_exchange_weak(integral& expc, integral desr,
152 memory_order m = memory_order_seq_cst) noexcept;
153 bool compare_exchange_strong(integral& expc, integral desr,
154 memory_order m = memory_order_seq_cst) volatile noexcept;
155 bool compare_exchange_strong(integral& expc, integral desr,
156 memory_order m = memory_order_seq_cst) noexcept;
159 fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
160 integral fetch_add(integral op, memory_order m = memory_order_seq_cst) noexcept;
162 fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
163 integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) noexcept;
165 fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
166 integral fetch_and(integral op, memory_order m = memory_order_seq_cst) noexcept;
168 fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
169 integral fetch_or(integral op, memory_order m = memory_order_seq_cst) noexcept;
171 fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
172 integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) noexcept;
174 atomic() noexcept = default;
175 constexpr atomic(integral desr) noexcept;
176 atomic(const atomic&) = delete;
177 atomic& operator=(const atomic&) = delete;
178 atomic& operator=(const atomic&) volatile = delete;
179 integral operator=(integral desr) volatile noexcept;
180 integral operator=(integral desr) noexcept;
182 integral operator++(int) volatile noexcept;
183 integral operator++(int) noexcept;
184 integral operator--(int) volatile noexcept;
185 integral operator--(int) noexcept;
186 integral operator++() volatile noexcept;
187 integral operator++() noexcept;
188 integral operator--() volatile noexcept;
189 integral operator--() noexcept;
190 integral operator+=(integral op) volatile noexcept;
191 integral operator+=(integral op) noexcept;
192 integral operator-=(integral op) volatile noexcept;
193 integral operator-=(integral op) noexcept;
194 integral operator&=(integral op) volatile noexcept;
195 integral operator&=(integral op) noexcept;
196 integral operator|=(integral op) volatile noexcept;
197 integral operator|=(integral op) noexcept;
198 integral operator^=(integral op) volatile noexcept;
199 integral operator^=(integral op) noexcept;
205 bool is_lock_free() const volatile noexcept;
206 bool is_lock_free() const noexcept;
207 void store(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
208 void store(T* desr, memory_order m = memory_order_seq_cst) noexcept;
209 T* load(memory_order m = memory_order_seq_cst) const volatile noexcept;
210 T* load(memory_order m = memory_order_seq_cst) const noexcept;
211 operator T*() const volatile noexcept;
212 operator T*() const noexcept;
213 T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
214 T* exchange(T* desr, memory_order m = memory_order_seq_cst) noexcept;
215 bool compare_exchange_weak(T*& expc, T* desr,
216 memory_order s, memory_order f) volatile noexcept;
217 bool compare_exchange_weak(T*& expc, T* desr,
218 memory_order s, memory_order f) noexcept;
219 bool compare_exchange_strong(T*& expc, T* desr,
220 memory_order s, memory_order f) volatile noexcept;
221 bool compare_exchange_strong(T*& expc, T* desr,
222 memory_order s, memory_order f) noexcept;
223 bool compare_exchange_weak(T*& expc, T* desr,
224 memory_order m = memory_order_seq_cst) volatile noexcept;
225 bool compare_exchange_weak(T*& expc, T* desr,
226 memory_order m = memory_order_seq_cst) noexcept;
227 bool compare_exchange_strong(T*& expc, T* desr,
228 memory_order m = memory_order_seq_cst) volatile noexcept;
229 bool compare_exchange_strong(T*& expc, T* desr,
230 memory_order m = memory_order_seq_cst) noexcept;
231 T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
232 T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
233 T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
234 T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
236 atomic() noexcept = default;
237 constexpr atomic(T* desr) noexcept;
238 atomic(const atomic&) = delete;
239 atomic& operator=(const atomic&) = delete;
240 atomic& operator=(const atomic&) volatile = delete;
242 T* operator=(T*) volatile noexcept;
243 T* operator=(T*) noexcept;
244 T* operator++(int) volatile noexcept;
245 T* operator++(int) noexcept;
246 T* operator--(int) volatile noexcept;
247 T* operator--(int) noexcept;
248 T* operator++() volatile noexcept;
249 T* operator++() noexcept;
250 T* operator--() volatile noexcept;
251 T* operator--() noexcept;
252 T* operator+=(ptrdiff_t op) volatile noexcept;
253 T* operator+=(ptrdiff_t op) noexcept;
254 T* operator-=(ptrdiff_t op) volatile noexcept;
255 T* operator-=(ptrdiff_t op) noexcept;
261 atomic_is_lock_free(const volatile atomic<T>* obj) noexcept;
265 atomic_is_lock_free(const atomic<T>* obj) noexcept;
269 atomic_init(volatile atomic<T>* obj, T desr) noexcept;
273 atomic_init(atomic<T>* obj, T desr) noexcept;
277 atomic_store(volatile atomic<T>* obj, T desr) noexcept;
281 atomic_store(atomic<T>* obj, T desr) noexcept;
285 atomic_store_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
289 atomic_store_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
293 atomic_load(const volatile atomic<T>* obj) noexcept;
297 atomic_load(const atomic<T>* obj) noexcept;
301 atomic_load_explicit(const volatile atomic<T>* obj, memory_order m) noexcept;
305 atomic_load_explicit(const atomic<T>* obj, memory_order m) noexcept;
309 atomic_exchange(volatile atomic<T>* obj, T desr) noexcept;
313 atomic_exchange(atomic<T>* obj, T desr) noexcept;
317 atomic_exchange_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
321 atomic_exchange_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
325 atomic_compare_exchange_weak(volatile atomic<T>* obj, T* expc, T desr) noexcept;
329 atomic_compare_exchange_weak(atomic<T>* obj, T* expc, T desr) noexcept;
333 atomic_compare_exchange_strong(volatile atomic<T>* obj, T* expc, T desr) noexcept;
337 atomic_compare_exchange_strong(atomic<T>* obj, T* expc, T desr) noexcept;
341 atomic_compare_exchange_weak_explicit(volatile atomic<T>* obj, T* expc,
343 memory_order s, memory_order f) noexcept;
347 atomic_compare_exchange_weak_explicit(atomic<T>* obj, T* expc, T desr,
348 memory_order s, memory_order f) noexcept;
352 atomic_compare_exchange_strong_explicit(volatile atomic<T>* obj,
354 memory_order s, memory_order f) noexcept;
358 atomic_compare_exchange_strong_explicit(atomic<T>* obj, T* expc,
360 memory_order s, memory_order f) noexcept;
362 template <class Integral>
364 atomic_fetch_add(volatile atomic<Integral>* obj, Integral op) noexcept;
366 template <class Integral>
368 atomic_fetch_add(atomic<Integral>* obj, Integral op) noexcept;
370 template <class Integral>
372 atomic_fetch_add_explicit(volatile atomic<Integral>* obj, Integral op,
373 memory_order m) noexcept;
374 template <class Integral>
376 atomic_fetch_add_explicit(atomic<Integral>* obj, Integral op,
377 memory_order m) noexcept;
378 template <class Integral>
380 atomic_fetch_sub(volatile atomic<Integral>* obj, Integral op) noexcept;
382 template <class Integral>
384 atomic_fetch_sub(atomic<Integral>* obj, Integral op) noexcept;
386 template <class Integral>
388 atomic_fetch_sub_explicit(volatile atomic<Integral>* obj, Integral op,
389 memory_order m) noexcept;
390 template <class Integral>
392 atomic_fetch_sub_explicit(atomic<Integral>* obj, Integral op,
393 memory_order m) noexcept;
394 template <class Integral>
396 atomic_fetch_and(volatile atomic<Integral>* obj, Integral op) noexcept;
398 template <class Integral>
400 atomic_fetch_and(atomic<Integral>* obj, Integral op) noexcept;
402 template <class Integral>
404 atomic_fetch_and_explicit(volatile atomic<Integral>* obj, Integral op,
405 memory_order m) noexcept;
406 template <class Integral>
408 atomic_fetch_and_explicit(atomic<Integral>* obj, Integral op,
409 memory_order m) noexcept;
410 template <class Integral>
412 atomic_fetch_or(volatile atomic<Integral>* obj, Integral op) noexcept;
414 template <class Integral>
416 atomic_fetch_or(atomic<Integral>* obj, Integral op) noexcept;
418 template <class Integral>
420 atomic_fetch_or_explicit(volatile atomic<Integral>* obj, Integral op,
421 memory_order m) noexcept;
422 template <class Integral>
424 atomic_fetch_or_explicit(atomic<Integral>* obj, Integral op,
425 memory_order m) noexcept;
426 template <class Integral>
428 atomic_fetch_xor(volatile atomic<Integral>* obj, Integral op) noexcept;
430 template <class Integral>
432 atomic_fetch_xor(atomic<Integral>* obj, Integral op) noexcept;
434 template <class Integral>
436 atomic_fetch_xor_explicit(volatile atomic<Integral>* obj, Integral op,
437 memory_order m) noexcept;
438 template <class Integral>
440 atomic_fetch_xor_explicit(atomic<Integral>* obj, Integral op,
441 memory_order m) noexcept;
445 atomic_fetch_add(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
449 atomic_fetch_add(atomic<T*>* obj, ptrdiff_t op) noexcept;
453 atomic_fetch_add_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
454 memory_order m) noexcept;
457 atomic_fetch_add_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
461 atomic_fetch_sub(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
465 atomic_fetch_sub(atomic<T*>* obj, ptrdiff_t op) noexcept;
469 atomic_fetch_sub_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
470 memory_order m) noexcept;
473 atomic_fetch_sub_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
475 // Atomics for standard typedef types
477 typedef atomic<bool> atomic_bool;
478 typedef atomic<char> atomic_char;
479 typedef atomic<signed char> atomic_schar;
480 typedef atomic<unsigned char> atomic_uchar;
481 typedef atomic<short> atomic_short;
482 typedef atomic<unsigned short> atomic_ushort;
483 typedef atomic<int> atomic_int;
484 typedef atomic<unsigned int> atomic_uint;
485 typedef atomic<long> atomic_long;
486 typedef atomic<unsigned long> atomic_ulong;
487 typedef atomic<long long> atomic_llong;
488 typedef atomic<unsigned long long> atomic_ullong;
489 typedef atomic<char16_t> atomic_char16_t;
490 typedef atomic<char32_t> atomic_char32_t;
491 typedef atomic<wchar_t> atomic_wchar_t;
493 typedef atomic<int_least8_t> atomic_int_least8_t;
494 typedef atomic<uint_least8_t> atomic_uint_least8_t;
495 typedef atomic<int_least16_t> atomic_int_least16_t;
496 typedef atomic<uint_least16_t> atomic_uint_least16_t;
497 typedef atomic<int_least32_t> atomic_int_least32_t;
498 typedef atomic<uint_least32_t> atomic_uint_least32_t;
499 typedef atomic<int_least64_t> atomic_int_least64_t;
500 typedef atomic<uint_least64_t> atomic_uint_least64_t;
502 typedef atomic<int_fast8_t> atomic_int_fast8_t;
503 typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
504 typedef atomic<int_fast16_t> atomic_int_fast16_t;
505 typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
506 typedef atomic<int_fast32_t> atomic_int_fast32_t;
507 typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
508 typedef atomic<int_fast64_t> atomic_int_fast64_t;
509 typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
511 typedef atomic<intptr_t> atomic_intptr_t;
512 typedef atomic<uintptr_t> atomic_uintptr_t;
513 typedef atomic<size_t> atomic_size_t;
514 typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
515 typedef atomic<intmax_t> atomic_intmax_t;
516 typedef atomic<uintmax_t> atomic_uintmax_t;
520 void atomic_thread_fence(memory_order m) noexcept;
521 void atomic_signal_fence(memory_order m) noexcept;
530 #include <type_traits>
532 #if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
533 #pragma GCC system_header
536 #ifdef _LIBCPP_HAS_NO_THREADS
537 #error <atomic> is not supported on this single threaded system
538 #else // !_LIBCPP_HAS_NO_THREADS
540 _LIBCPP_BEGIN_NAMESPACE_STD
542 #if !__has_feature(cxx_atomic) && _GNUC_VER < 407
543 #error <atomic> is not implemented
546 typedef enum memory_order
548 memory_order_relaxed, memory_order_consume, memory_order_acquire,
549 memory_order_release, memory_order_acq_rel, memory_order_seq_cst
553 namespace __gcc_atomic {
554 template <typename T>
555 struct __gcc_atomic_t {
556 __gcc_atomic_t() _NOEXCEPT {}
557 explicit __gcc_atomic_t(T value) _NOEXCEPT : __a_value(value) {}
560 #define _Atomic(x) __gcc_atomic::__gcc_atomic_t<x>
562 template <typename T> T __create();
564 template <typename __Tp, typename __Td>
565 typename enable_if<sizeof(__Tp()->__a_value = __create<__Td>()), char>::type
566 __test_atomic_assignable(int);
567 template <typename T, typename U>
568 __two __test_atomic_assignable(...);
570 template <typename __Tp, typename __Td>
571 struct __can_assign {
572 static const bool value =
573 sizeof(__test_atomic_assignable<__Tp, __Td>(1)) == sizeof(char);
576 static inline constexpr int __to_gcc_order(memory_order __order) {
577 // Avoid switch statement to make this a constexpr.
578 return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
579 (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
580 (__order == memory_order_release ? __ATOMIC_RELEASE:
581 (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
582 (__order == memory_order_acq_rel ? __ATOMIC_ACQ_REL:
583 __ATOMIC_CONSUME))));
586 } // namespace __gcc_atomic
588 template <typename _Tp>
591 __gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value>::type
592 __c11_atomic_init(volatile _Atomic(_Tp)* __a, _Tp __val) {
593 __a->__a_value = __val;
596 template <typename _Tp>
599 !__gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value &&
600 __gcc_atomic::__can_assign< _Atomic(_Tp)*, _Tp>::value>::type
601 __c11_atomic_init(volatile _Atomic(_Tp)* __a, _Tp __val) {
602 // [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because
603 // the default operator= in an object is not volatile, a byte-by-byte copy
605 volatile char* to = reinterpret_cast<volatile char*>(&__a->__a_value);
606 volatile char* end = to + sizeof(_Tp);
607 char* from = reinterpret_cast<char*>(&__val);
613 template <typename _Tp>
614 static inline void __c11_atomic_init(_Atomic(_Tp)* __a, _Tp __val) {
615 __a->__a_value = __val;
618 static inline void __c11_atomic_thread_fence(memory_order __order) {
619 __atomic_thread_fence(__gcc_atomic::__to_gcc_order(__order));
622 static inline void __c11_atomic_signal_fence(memory_order __order) {
623 __atomic_signal_fence(__gcc_atomic::__to_gcc_order(__order));
626 static inline bool __c11_atomic_is_lock_free(size_t __size) {
627 return __atomic_is_lock_free(__size, 0);
630 template <typename _Tp>
631 static inline void __c11_atomic_store(volatile _Atomic(_Tp)* __a, _Tp __val,
632 memory_order __order) {
633 return __atomic_store(&__a->__a_value, &__val,
634 __gcc_atomic::__to_gcc_order(__order));
637 template <typename _Tp>
638 static inline void __c11_atomic_store(_Atomic(_Tp)* __a, _Tp __val,
639 memory_order __order) {
640 return __atomic_store(&__a->__a_value, &__val,
641 __gcc_atomic::__to_gcc_order(__order));
644 template <typename _Tp>
645 static inline _Tp __c11_atomic_load(volatile _Atomic(_Tp)* __a,
646 memory_order __order) {
648 __atomic_load(&__a->__a_value, &__ret,
649 __gcc_atomic::__to_gcc_order(__order));
653 template <typename _Tp>
654 static inline _Tp __c11_atomic_load(_Atomic(_Tp)* __a, memory_order __order) {
656 __atomic_load(&__a->__a_value, &__ret,
657 __gcc_atomic::__to_gcc_order(__order));
661 template <typename _Tp>
662 static inline _Tp __c11_atomic_exchange(volatile _Atomic(_Tp)* __a,
663 _Tp __value, memory_order __order) {
665 __atomic_exchange(&__a->__a_value, &__value, &__ret,
666 __gcc_atomic::__to_gcc_order(__order));
670 template <typename _Tp>
671 static inline _Tp __c11_atomic_exchange(_Atomic(_Tp)* __a, _Tp __value,
672 memory_order __order) {
674 __atomic_exchange(&__a->__a_value, &__value, &__ret,
675 __gcc_atomic::__to_gcc_order(__order));
679 template <typename _Tp>
680 static inline bool __c11_atomic_compare_exchange_strong(
681 volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
682 memory_order __success, memory_order __failure) {
683 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
685 __gcc_atomic::__to_gcc_order(__success),
686 __gcc_atomic::__to_gcc_order(__failure));
689 template <typename _Tp>
690 static inline bool __c11_atomic_compare_exchange_strong(
691 _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
692 memory_order __failure) {
693 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
695 __gcc_atomic::__to_gcc_order(__success),
696 __gcc_atomic::__to_gcc_order(__failure));
699 template <typename _Tp>
700 static inline bool __c11_atomic_compare_exchange_weak(
701 volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
702 memory_order __success, memory_order __failure) {
703 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
705 __gcc_atomic::__to_gcc_order(__success),
706 __gcc_atomic::__to_gcc_order(__failure));
709 template <typename _Tp>
710 static inline bool __c11_atomic_compare_exchange_weak(
711 _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
712 memory_order __failure) {
713 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
715 __gcc_atomic::__to_gcc_order(__success),
716 __gcc_atomic::__to_gcc_order(__failure));
719 template <typename _Tp>
720 struct __skip_amt { enum {value = 1}; };
722 template <typename _Tp>
723 struct __skip_amt<_Tp*> { enum {value = sizeof(_Tp)}; };
725 // FIXME: Haven't figured out what the spec says about using arrays with
726 // atomic_fetch_add. Force a failure rather than creating bad behavior.
727 template <typename _Tp>
728 struct __skip_amt<_Tp[]> { };
729 template <typename _Tp, int n>
730 struct __skip_amt<_Tp[n]> { };
732 template <typename _Tp, typename _Td>
733 static inline _Tp __c11_atomic_fetch_add(volatile _Atomic(_Tp)* __a,
734 _Td __delta, memory_order __order) {
735 return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
736 __gcc_atomic::__to_gcc_order(__order));
739 template <typename _Tp, typename _Td>
740 static inline _Tp __c11_atomic_fetch_add(_Atomic(_Tp)* __a, _Td __delta,
741 memory_order __order) {
742 return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
743 __gcc_atomic::__to_gcc_order(__order));
746 template <typename _Tp, typename _Td>
747 static inline _Tp __c11_atomic_fetch_sub(volatile _Atomic(_Tp)* __a,
748 _Td __delta, memory_order __order) {
749 return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
750 __gcc_atomic::__to_gcc_order(__order));
753 template <typename _Tp, typename _Td>
754 static inline _Tp __c11_atomic_fetch_sub(_Atomic(_Tp)* __a, _Td __delta,
755 memory_order __order) {
756 return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
757 __gcc_atomic::__to_gcc_order(__order));
760 template <typename _Tp>
761 static inline _Tp __c11_atomic_fetch_and(volatile _Atomic(_Tp)* __a,
762 _Tp __pattern, memory_order __order) {
763 return __atomic_fetch_and(&__a->__a_value, __pattern,
764 __gcc_atomic::__to_gcc_order(__order));
767 template <typename _Tp>
768 static inline _Tp __c11_atomic_fetch_and(_Atomic(_Tp)* __a,
769 _Tp __pattern, memory_order __order) {
770 return __atomic_fetch_and(&__a->__a_value, __pattern,
771 __gcc_atomic::__to_gcc_order(__order));
774 template <typename _Tp>
775 static inline _Tp __c11_atomic_fetch_or(volatile _Atomic(_Tp)* __a,
776 _Tp __pattern, memory_order __order) {
777 return __atomic_fetch_or(&__a->__a_value, __pattern,
778 __gcc_atomic::__to_gcc_order(__order));
781 template <typename _Tp>
782 static inline _Tp __c11_atomic_fetch_or(_Atomic(_Tp)* __a, _Tp __pattern,
783 memory_order __order) {
784 return __atomic_fetch_or(&__a->__a_value, __pattern,
785 __gcc_atomic::__to_gcc_order(__order));
788 template <typename _Tp>
789 static inline _Tp __c11_atomic_fetch_xor(volatile _Atomic(_Tp)* __a,
790 _Tp __pattern, memory_order __order) {
791 return __atomic_fetch_xor(&__a->__a_value, __pattern,
792 __gcc_atomic::__to_gcc_order(__order));
795 template <typename _Tp>
796 static inline _Tp __c11_atomic_fetch_xor(_Atomic(_Tp)* __a, _Tp __pattern,
797 memory_order __order) {
798 return __atomic_fetch_xor(&__a->__a_value, __pattern,
799 __gcc_atomic::__to_gcc_order(__order));
801 #endif // _GNUC_VER >= 407
804 inline _LIBCPP_INLINE_VISIBILITY
806 kill_dependency(_Tp __y) _NOEXCEPT
813 template <class _Tp, bool = is_integral<_Tp>::value && !is_same<_Tp, bool>::value>
814 struct __atomic_base // false
816 mutable _Atomic(_Tp) __a_;
818 _LIBCPP_INLINE_VISIBILITY
819 bool is_lock_free() const volatile _NOEXCEPT
820 {return __c11_atomic_is_lock_free(sizeof(_Tp));}
821 _LIBCPP_INLINE_VISIBILITY
822 bool is_lock_free() const _NOEXCEPT
823 {return __c11_atomic_is_lock_free(sizeof(_Tp));}
824 _LIBCPP_INLINE_VISIBILITY
825 void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
826 {__c11_atomic_store(&__a_, __d, __m);}
827 _LIBCPP_INLINE_VISIBILITY
828 void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
829 {__c11_atomic_store(&__a_, __d, __m);}
830 _LIBCPP_INLINE_VISIBILITY
831 _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
832 {return __c11_atomic_load(&__a_, __m);}
833 _LIBCPP_INLINE_VISIBILITY
834 _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
835 {return __c11_atomic_load(&__a_, __m);}
836 _LIBCPP_INLINE_VISIBILITY
837 operator _Tp() const volatile _NOEXCEPT {return load();}
838 _LIBCPP_INLINE_VISIBILITY
839 operator _Tp() const _NOEXCEPT {return load();}
840 _LIBCPP_INLINE_VISIBILITY
841 _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
842 {return __c11_atomic_exchange(&__a_, __d, __m);}
843 _LIBCPP_INLINE_VISIBILITY
844 _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
845 {return __c11_atomic_exchange(&__a_, __d, __m);}
846 _LIBCPP_INLINE_VISIBILITY
847 bool compare_exchange_weak(_Tp& __e, _Tp __d,
848 memory_order __s, memory_order __f) volatile _NOEXCEPT
849 {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
850 _LIBCPP_INLINE_VISIBILITY
851 bool compare_exchange_weak(_Tp& __e, _Tp __d,
852 memory_order __s, memory_order __f) _NOEXCEPT
853 {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
854 _LIBCPP_INLINE_VISIBILITY
855 bool compare_exchange_strong(_Tp& __e, _Tp __d,
856 memory_order __s, memory_order __f) volatile _NOEXCEPT
857 {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
858 _LIBCPP_INLINE_VISIBILITY
859 bool compare_exchange_strong(_Tp& __e, _Tp __d,
860 memory_order __s, memory_order __f) _NOEXCEPT
861 {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
862 _LIBCPP_INLINE_VISIBILITY
863 bool compare_exchange_weak(_Tp& __e, _Tp __d,
864 memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
865 {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
866 _LIBCPP_INLINE_VISIBILITY
867 bool compare_exchange_weak(_Tp& __e, _Tp __d,
868 memory_order __m = memory_order_seq_cst) _NOEXCEPT
869 {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
870 _LIBCPP_INLINE_VISIBILITY
871 bool compare_exchange_strong(_Tp& __e, _Tp __d,
872 memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
873 {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
874 _LIBCPP_INLINE_VISIBILITY
875 bool compare_exchange_strong(_Tp& __e, _Tp __d,
876 memory_order __m = memory_order_seq_cst) _NOEXCEPT
877 {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
879 _LIBCPP_INLINE_VISIBILITY
880 #ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
881 __atomic_base() _NOEXCEPT = default;
883 __atomic_base() _NOEXCEPT : __a_() {}
884 #endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
886 _LIBCPP_INLINE_VISIBILITY
887 _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {}
888 #ifndef _LIBCPP_HAS_NO_DELETED_FUNCTIONS
889 __atomic_base(const __atomic_base&) = delete;
890 __atomic_base& operator=(const __atomic_base&) = delete;
891 __atomic_base& operator=(const __atomic_base&) volatile = delete;
892 #else // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
894 __atomic_base(const __atomic_base&);
895 __atomic_base& operator=(const __atomic_base&);
896 __atomic_base& operator=(const __atomic_base&) volatile;
897 #endif // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
903 struct __atomic_base<_Tp, true>
904 : public __atomic_base<_Tp, false>
906 typedef __atomic_base<_Tp, false> __base;
907 _LIBCPP_INLINE_VISIBILITY
908 __atomic_base() _NOEXCEPT _LIBCPP_DEFAULT
909 _LIBCPP_INLINE_VISIBILITY
910 _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {}
912 _LIBCPP_INLINE_VISIBILITY
913 _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
914 {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
915 _LIBCPP_INLINE_VISIBILITY
916 _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
917 {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
918 _LIBCPP_INLINE_VISIBILITY
919 _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
920 {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
921 _LIBCPP_INLINE_VISIBILITY
922 _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
923 {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
924 _LIBCPP_INLINE_VISIBILITY
925 _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
926 {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
927 _LIBCPP_INLINE_VISIBILITY
928 _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
929 {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
930 _LIBCPP_INLINE_VISIBILITY
931 _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
932 {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
933 _LIBCPP_INLINE_VISIBILITY
934 _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
935 {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
936 _LIBCPP_INLINE_VISIBILITY
937 _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
938 {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
939 _LIBCPP_INLINE_VISIBILITY
940 _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
941 {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
943 _LIBCPP_INLINE_VISIBILITY
944 _Tp operator++(int) volatile _NOEXCEPT {return fetch_add(_Tp(1));}
945 _LIBCPP_INLINE_VISIBILITY
946 _Tp operator++(int) _NOEXCEPT {return fetch_add(_Tp(1));}
947 _LIBCPP_INLINE_VISIBILITY
948 _Tp operator--(int) volatile _NOEXCEPT {return fetch_sub(_Tp(1));}
949 _LIBCPP_INLINE_VISIBILITY
950 _Tp operator--(int) _NOEXCEPT {return fetch_sub(_Tp(1));}
951 _LIBCPP_INLINE_VISIBILITY
952 _Tp operator++() volatile _NOEXCEPT {return fetch_add(_Tp(1)) + _Tp(1);}
953 _LIBCPP_INLINE_VISIBILITY
954 _Tp operator++() _NOEXCEPT {return fetch_add(_Tp(1)) + _Tp(1);}
955 _LIBCPP_INLINE_VISIBILITY
956 _Tp operator--() volatile _NOEXCEPT {return fetch_sub(_Tp(1)) - _Tp(1);}
957 _LIBCPP_INLINE_VISIBILITY
958 _Tp operator--() _NOEXCEPT {return fetch_sub(_Tp(1)) - _Tp(1);}
959 _LIBCPP_INLINE_VISIBILITY
960 _Tp operator+=(_Tp __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
961 _LIBCPP_INLINE_VISIBILITY
962 _Tp operator+=(_Tp __op) _NOEXCEPT {return fetch_add(__op) + __op;}
963 _LIBCPP_INLINE_VISIBILITY
964 _Tp operator-=(_Tp __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
965 _LIBCPP_INLINE_VISIBILITY
966 _Tp operator-=(_Tp __op) _NOEXCEPT {return fetch_sub(__op) - __op;}
967 _LIBCPP_INLINE_VISIBILITY
968 _Tp operator&=(_Tp __op) volatile _NOEXCEPT {return fetch_and(__op) & __op;}
969 _LIBCPP_INLINE_VISIBILITY
970 _Tp operator&=(_Tp __op) _NOEXCEPT {return fetch_and(__op) & __op;}
971 _LIBCPP_INLINE_VISIBILITY
972 _Tp operator|=(_Tp __op) volatile _NOEXCEPT {return fetch_or(__op) | __op;}
973 _LIBCPP_INLINE_VISIBILITY
974 _Tp operator|=(_Tp __op) _NOEXCEPT {return fetch_or(__op) | __op;}
975 _LIBCPP_INLINE_VISIBILITY
976 _Tp operator^=(_Tp __op) volatile _NOEXCEPT {return fetch_xor(__op) ^ __op;}
977 _LIBCPP_INLINE_VISIBILITY
978 _Tp operator^=(_Tp __op) _NOEXCEPT {return fetch_xor(__op) ^ __op;}
985 : public __atomic_base<_Tp>
987 typedef __atomic_base<_Tp> __base;
988 _LIBCPP_INLINE_VISIBILITY
989 atomic() _NOEXCEPT _LIBCPP_DEFAULT
990 _LIBCPP_INLINE_VISIBILITY
991 _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {}
993 _LIBCPP_INLINE_VISIBILITY
994 _Tp operator=(_Tp __d) volatile _NOEXCEPT
995 {__base::store(__d); return __d;}
996 _LIBCPP_INLINE_VISIBILITY
997 _Tp operator=(_Tp __d) _NOEXCEPT
998 {__base::store(__d); return __d;}
1003 template <class _Tp>
1005 : public __atomic_base<_Tp*>
1007 typedef __atomic_base<_Tp*> __base;
1008 _LIBCPP_INLINE_VISIBILITY
1009 atomic() _NOEXCEPT _LIBCPP_DEFAULT
1010 _LIBCPP_INLINE_VISIBILITY
1011 _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {}
1013 _LIBCPP_INLINE_VISIBILITY
1014 _Tp* operator=(_Tp* __d) volatile _NOEXCEPT
1015 {__base::store(__d); return __d;}
1016 _LIBCPP_INLINE_VISIBILITY
1017 _Tp* operator=(_Tp* __d) _NOEXCEPT
1018 {__base::store(__d); return __d;}
1020 _LIBCPP_INLINE_VISIBILITY
1021 _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1023 {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
1024 _LIBCPP_INLINE_VISIBILITY
1025 _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1026 {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
1027 _LIBCPP_INLINE_VISIBILITY
1028 _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1030 {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
1031 _LIBCPP_INLINE_VISIBILITY
1032 _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1033 {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
1035 _LIBCPP_INLINE_VISIBILITY
1036 _Tp* operator++(int) volatile _NOEXCEPT {return fetch_add(1);}
1037 _LIBCPP_INLINE_VISIBILITY
1038 _Tp* operator++(int) _NOEXCEPT {return fetch_add(1);}
1039 _LIBCPP_INLINE_VISIBILITY
1040 _Tp* operator--(int) volatile _NOEXCEPT {return fetch_sub(1);}
1041 _LIBCPP_INLINE_VISIBILITY
1042 _Tp* operator--(int) _NOEXCEPT {return fetch_sub(1);}
1043 _LIBCPP_INLINE_VISIBILITY
1044 _Tp* operator++() volatile _NOEXCEPT {return fetch_add(1) + 1;}
1045 _LIBCPP_INLINE_VISIBILITY
1046 _Tp* operator++() _NOEXCEPT {return fetch_add(1) + 1;}
1047 _LIBCPP_INLINE_VISIBILITY
1048 _Tp* operator--() volatile _NOEXCEPT {return fetch_sub(1) - 1;}
1049 _LIBCPP_INLINE_VISIBILITY
1050 _Tp* operator--() _NOEXCEPT {return fetch_sub(1) - 1;}
1051 _LIBCPP_INLINE_VISIBILITY
1052 _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
1053 _LIBCPP_INLINE_VISIBILITY
1054 _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT {return fetch_add(__op) + __op;}
1055 _LIBCPP_INLINE_VISIBILITY
1056 _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
1057 _LIBCPP_INLINE_VISIBILITY
1058 _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT {return fetch_sub(__op) - __op;}
1061 // atomic_is_lock_free
1063 template <class _Tp>
1064 inline _LIBCPP_INLINE_VISIBILITY
1066 atomic_is_lock_free(const volatile atomic<_Tp>* __o) _NOEXCEPT
1068 return __o->is_lock_free();
1071 template <class _Tp>
1072 inline _LIBCPP_INLINE_VISIBILITY
1074 atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT
1076 return __o->is_lock_free();
1081 template <class _Tp>
1082 inline _LIBCPP_INLINE_VISIBILITY
1084 atomic_init(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1086 __c11_atomic_init(&__o->__a_, __d);
1089 template <class _Tp>
1090 inline _LIBCPP_INLINE_VISIBILITY
1092 atomic_init(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1094 __c11_atomic_init(&__o->__a_, __d);
1099 template <class _Tp>
1100 inline _LIBCPP_INLINE_VISIBILITY
1102 atomic_store(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1107 template <class _Tp>
1108 inline _LIBCPP_INLINE_VISIBILITY
1110 atomic_store(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1115 // atomic_store_explicit
1117 template <class _Tp>
1118 inline _LIBCPP_INLINE_VISIBILITY
1120 atomic_store_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1122 __o->store(__d, __m);
1125 template <class _Tp>
1126 inline _LIBCPP_INLINE_VISIBILITY
1128 atomic_store_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1130 __o->store(__d, __m);
1135 template <class _Tp>
1136 inline _LIBCPP_INLINE_VISIBILITY
1138 atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT
1143 template <class _Tp>
1144 inline _LIBCPP_INLINE_VISIBILITY
1146 atomic_load(const atomic<_Tp>* __o) _NOEXCEPT
1151 // atomic_load_explicit
1153 template <class _Tp>
1154 inline _LIBCPP_INLINE_VISIBILITY
1156 atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1158 return __o->load(__m);
1161 template <class _Tp>
1162 inline _LIBCPP_INLINE_VISIBILITY
1164 atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1166 return __o->load(__m);
1171 template <class _Tp>
1172 inline _LIBCPP_INLINE_VISIBILITY
1174 atomic_exchange(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1176 return __o->exchange(__d);
1179 template <class _Tp>
1180 inline _LIBCPP_INLINE_VISIBILITY
1182 atomic_exchange(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1184 return __o->exchange(__d);
1187 // atomic_exchange_explicit
1189 template <class _Tp>
1190 inline _LIBCPP_INLINE_VISIBILITY
1192 atomic_exchange_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1194 return __o->exchange(__d, __m);
1197 template <class _Tp>
1198 inline _LIBCPP_INLINE_VISIBILITY
1200 atomic_exchange_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1202 return __o->exchange(__d, __m);
1205 // atomic_compare_exchange_weak
1207 template <class _Tp>
1208 inline _LIBCPP_INLINE_VISIBILITY
1210 atomic_compare_exchange_weak(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1212 return __o->compare_exchange_weak(*__e, __d);
1215 template <class _Tp>
1216 inline _LIBCPP_INLINE_VISIBILITY
1218 atomic_compare_exchange_weak(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1220 return __o->compare_exchange_weak(*__e, __d);
1223 // atomic_compare_exchange_strong
1225 template <class _Tp>
1226 inline _LIBCPP_INLINE_VISIBILITY
1228 atomic_compare_exchange_strong(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1230 return __o->compare_exchange_strong(*__e, __d);
1233 template <class _Tp>
1234 inline _LIBCPP_INLINE_VISIBILITY
1236 atomic_compare_exchange_strong(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1238 return __o->compare_exchange_strong(*__e, __d);
1241 // atomic_compare_exchange_weak_explicit
1243 template <class _Tp>
1244 inline _LIBCPP_INLINE_VISIBILITY
1246 atomic_compare_exchange_weak_explicit(volatile atomic<_Tp>* __o, _Tp* __e,
1248 memory_order __s, memory_order __f) _NOEXCEPT
1250 return __o->compare_exchange_weak(*__e, __d, __s, __f);
1253 template <class _Tp>
1254 inline _LIBCPP_INLINE_VISIBILITY
1256 atomic_compare_exchange_weak_explicit(atomic<_Tp>* __o, _Tp* __e, _Tp __d,
1257 memory_order __s, memory_order __f) _NOEXCEPT
1259 return __o->compare_exchange_weak(*__e, __d, __s, __f);
1262 // atomic_compare_exchange_strong_explicit
1264 template <class _Tp>
1265 inline _LIBCPP_INLINE_VISIBILITY
1267 atomic_compare_exchange_strong_explicit(volatile atomic<_Tp>* __o,
1269 memory_order __s, memory_order __f) _NOEXCEPT
1271 return __o->compare_exchange_strong(*__e, __d, __s, __f);
1274 template <class _Tp>
1275 inline _LIBCPP_INLINE_VISIBILITY
1277 atomic_compare_exchange_strong_explicit(atomic<_Tp>* __o, _Tp* __e,
1279 memory_order __s, memory_order __f) _NOEXCEPT
1281 return __o->compare_exchange_strong(*__e, __d, __s, __f);
1286 template <class _Tp>
1287 inline _LIBCPP_INLINE_VISIBILITY
1290 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1293 atomic_fetch_add(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1295 return __o->fetch_add(__op);
1298 template <class _Tp>
1299 inline _LIBCPP_INLINE_VISIBILITY
1302 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1305 atomic_fetch_add(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1307 return __o->fetch_add(__op);
1310 template <class _Tp>
1311 inline _LIBCPP_INLINE_VISIBILITY
1313 atomic_fetch_add(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1315 return __o->fetch_add(__op);
1318 template <class _Tp>
1319 inline _LIBCPP_INLINE_VISIBILITY
1321 atomic_fetch_add(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1323 return __o->fetch_add(__op);
1326 // atomic_fetch_add_explicit
1328 template <class _Tp>
1329 inline _LIBCPP_INLINE_VISIBILITY
1332 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1335 atomic_fetch_add_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1337 return __o->fetch_add(__op, __m);
1340 template <class _Tp>
1341 inline _LIBCPP_INLINE_VISIBILITY
1344 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1347 atomic_fetch_add_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1349 return __o->fetch_add(__op, __m);
1352 template <class _Tp>
1353 inline _LIBCPP_INLINE_VISIBILITY
1355 atomic_fetch_add_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
1356 memory_order __m) _NOEXCEPT
1358 return __o->fetch_add(__op, __m);
1361 template <class _Tp>
1362 inline _LIBCPP_INLINE_VISIBILITY
1364 atomic_fetch_add_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
1366 return __o->fetch_add(__op, __m);
1371 template <class _Tp>
1372 inline _LIBCPP_INLINE_VISIBILITY
1375 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1378 atomic_fetch_sub(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1380 return __o->fetch_sub(__op);
1383 template <class _Tp>
1384 inline _LIBCPP_INLINE_VISIBILITY
1387 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1390 atomic_fetch_sub(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1392 return __o->fetch_sub(__op);
1395 template <class _Tp>
1396 inline _LIBCPP_INLINE_VISIBILITY
1398 atomic_fetch_sub(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1400 return __o->fetch_sub(__op);
1403 template <class _Tp>
1404 inline _LIBCPP_INLINE_VISIBILITY
1406 atomic_fetch_sub(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1408 return __o->fetch_sub(__op);
1411 // atomic_fetch_sub_explicit
1413 template <class _Tp>
1414 inline _LIBCPP_INLINE_VISIBILITY
1417 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1420 atomic_fetch_sub_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1422 return __o->fetch_sub(__op, __m);
1425 template <class _Tp>
1426 inline _LIBCPP_INLINE_VISIBILITY
1429 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1432 atomic_fetch_sub_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1434 return __o->fetch_sub(__op, __m);
1437 template <class _Tp>
1438 inline _LIBCPP_INLINE_VISIBILITY
1440 atomic_fetch_sub_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
1441 memory_order __m) _NOEXCEPT
1443 return __o->fetch_sub(__op, __m);
1446 template <class _Tp>
1447 inline _LIBCPP_INLINE_VISIBILITY
1449 atomic_fetch_sub_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
1451 return __o->fetch_sub(__op, __m);
1456 template <class _Tp>
1457 inline _LIBCPP_INLINE_VISIBILITY
1460 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1463 atomic_fetch_and(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1465 return __o->fetch_and(__op);
1468 template <class _Tp>
1469 inline _LIBCPP_INLINE_VISIBILITY
1472 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1475 atomic_fetch_and(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1477 return __o->fetch_and(__op);
1480 // atomic_fetch_and_explicit
1482 template <class _Tp>
1483 inline _LIBCPP_INLINE_VISIBILITY
1486 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1489 atomic_fetch_and_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1491 return __o->fetch_and(__op, __m);
1494 template <class _Tp>
1495 inline _LIBCPP_INLINE_VISIBILITY
1498 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1501 atomic_fetch_and_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1503 return __o->fetch_and(__op, __m);
1508 template <class _Tp>
1509 inline _LIBCPP_INLINE_VISIBILITY
1512 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1515 atomic_fetch_or(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1517 return __o->fetch_or(__op);
1520 template <class _Tp>
1521 inline _LIBCPP_INLINE_VISIBILITY
1524 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1527 atomic_fetch_or(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1529 return __o->fetch_or(__op);
1532 // atomic_fetch_or_explicit
1534 template <class _Tp>
1535 inline _LIBCPP_INLINE_VISIBILITY
1538 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1541 atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1543 return __o->fetch_or(__op, __m);
1546 template <class _Tp>
1547 inline _LIBCPP_INLINE_VISIBILITY
1550 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1553 atomic_fetch_or_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1555 return __o->fetch_or(__op, __m);
1560 template <class _Tp>
1561 inline _LIBCPP_INLINE_VISIBILITY
1564 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1567 atomic_fetch_xor(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1569 return __o->fetch_xor(__op);
1572 template <class _Tp>
1573 inline _LIBCPP_INLINE_VISIBILITY
1576 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1579 atomic_fetch_xor(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1581 return __o->fetch_xor(__op);
1584 // atomic_fetch_xor_explicit
1586 template <class _Tp>
1587 inline _LIBCPP_INLINE_VISIBILITY
1590 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1593 atomic_fetch_xor_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1595 return __o->fetch_xor(__op, __m);
1598 template <class _Tp>
1599 inline _LIBCPP_INLINE_VISIBILITY
1602 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1605 atomic_fetch_xor_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1607 return __o->fetch_xor(__op, __m);
1610 // flag type and operations
1612 typedef struct atomic_flag
1616 _LIBCPP_INLINE_VISIBILITY
1617 bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1618 {return __c11_atomic_exchange(&__a_, true, __m);}
1619 _LIBCPP_INLINE_VISIBILITY
1620 bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT
1621 {return __c11_atomic_exchange(&__a_, true, __m);}
1622 _LIBCPP_INLINE_VISIBILITY
1623 void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1624 {__c11_atomic_store(&__a_, false, __m);}
1625 _LIBCPP_INLINE_VISIBILITY
1626 void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT
1627 {__c11_atomic_store(&__a_, false, __m);}
1629 _LIBCPP_INLINE_VISIBILITY
1630 #ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
1631 atomic_flag() _NOEXCEPT = default;
1633 atomic_flag() _NOEXCEPT : __a_() {}
1634 #endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
1636 _LIBCPP_INLINE_VISIBILITY
1637 atomic_flag(bool __b) _NOEXCEPT : __a_(__b) {}
1639 #ifndef _LIBCPP_HAS_NO_DELETED_FUNCTIONS
1640 atomic_flag(const atomic_flag&) = delete;
1641 atomic_flag& operator=(const atomic_flag&) = delete;
1642 atomic_flag& operator=(const atomic_flag&) volatile = delete;
1643 #else // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
1645 atomic_flag(const atomic_flag&);
1646 atomic_flag& operator=(const atomic_flag&);
1647 atomic_flag& operator=(const atomic_flag&) volatile;
1648 #endif // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
1651 inline _LIBCPP_INLINE_VISIBILITY
1653 atomic_flag_test_and_set(volatile atomic_flag* __o) _NOEXCEPT
1655 return __o->test_and_set();
1658 inline _LIBCPP_INLINE_VISIBILITY
1660 atomic_flag_test_and_set(atomic_flag* __o) _NOEXCEPT
1662 return __o->test_and_set();
1665 inline _LIBCPP_INLINE_VISIBILITY
1667 atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
1669 return __o->test_and_set(__m);
1672 inline _LIBCPP_INLINE_VISIBILITY
1674 atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
1676 return __o->test_and_set(__m);
1679 inline _LIBCPP_INLINE_VISIBILITY
1681 atomic_flag_clear(volatile atomic_flag* __o) _NOEXCEPT
1686 inline _LIBCPP_INLINE_VISIBILITY
1688 atomic_flag_clear(atomic_flag* __o) _NOEXCEPT
1693 inline _LIBCPP_INLINE_VISIBILITY
1695 atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
1700 inline _LIBCPP_INLINE_VISIBILITY
1702 atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
1709 inline _LIBCPP_INLINE_VISIBILITY
1711 atomic_thread_fence(memory_order __m) _NOEXCEPT
1713 __c11_atomic_thread_fence(__m);
1716 inline _LIBCPP_INLINE_VISIBILITY
1718 atomic_signal_fence(memory_order __m) _NOEXCEPT
1720 __c11_atomic_signal_fence(__m);
1723 // Atomics for standard typedef types
1725 typedef atomic<bool> atomic_bool;
1726 typedef atomic<char> atomic_char;
1727 typedef atomic<signed char> atomic_schar;
1728 typedef atomic<unsigned char> atomic_uchar;
1729 typedef atomic<short> atomic_short;
1730 typedef atomic<unsigned short> atomic_ushort;
1731 typedef atomic<int> atomic_int;
1732 typedef atomic<unsigned int> atomic_uint;
1733 typedef atomic<long> atomic_long;
1734 typedef atomic<unsigned long> atomic_ulong;
1735 typedef atomic<long long> atomic_llong;
1736 typedef atomic<unsigned long long> atomic_ullong;
1737 typedef atomic<char16_t> atomic_char16_t;
1738 typedef atomic<char32_t> atomic_char32_t;
1739 typedef atomic<wchar_t> atomic_wchar_t;
1741 typedef atomic<int_least8_t> atomic_int_least8_t;
1742 typedef atomic<uint_least8_t> atomic_uint_least8_t;
1743 typedef atomic<int_least16_t> atomic_int_least16_t;
1744 typedef atomic<uint_least16_t> atomic_uint_least16_t;
1745 typedef atomic<int_least32_t> atomic_int_least32_t;
1746 typedef atomic<uint_least32_t> atomic_uint_least32_t;
1747 typedef atomic<int_least64_t> atomic_int_least64_t;
1748 typedef atomic<uint_least64_t> atomic_uint_least64_t;
1750 typedef atomic<int_fast8_t> atomic_int_fast8_t;
1751 typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
1752 typedef atomic<int_fast16_t> atomic_int_fast16_t;
1753 typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
1754 typedef atomic<int_fast32_t> atomic_int_fast32_t;
1755 typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
1756 typedef atomic<int_fast64_t> atomic_int_fast64_t;
1757 typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
1759 typedef atomic<intptr_t> atomic_intptr_t;
1760 typedef atomic<uintptr_t> atomic_uintptr_t;
1761 typedef atomic<size_t> atomic_size_t;
1762 typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
1763 typedef atomic<intmax_t> atomic_intmax_t;
1764 typedef atomic<uintmax_t> atomic_uintmax_t;
1766 #define ATOMIC_FLAG_INIT {false}
1767 #define ATOMIC_VAR_INIT(__v) {__v}
1769 // lock-free property
1771 #define ATOMIC_BOOL_LOCK_FREE __GCC_ATOMIC_BOOL_LOCK_FREE
1772 #define ATOMIC_CHAR_LOCK_FREE __GCC_ATOMIC_CHAR_LOCK_FREE
1773 #define ATOMIC_CHAR16_T_LOCK_FREE __GCC_ATOMIC_CHAR16_T_LOCK_FREE
1774 #define ATOMIC_CHAR32_T_LOCK_FREE __GCC_ATOMIC_CHAR32_T_LOCK_FREE
1775 #define ATOMIC_WCHAR_T_LOCK_FREE __GCC_ATOMIC_WCHAR_T_LOCK_FREE
1776 #define ATOMIC_SHORT_LOCK_FREE __GCC_ATOMIC_SHORT_LOCK_FREE
1777 #define ATOMIC_INT_LOCK_FREE __GCC_ATOMIC_INT_LOCK_FREE
1778 #define ATOMIC_LONG_LOCK_FREE __GCC_ATOMIC_LONG_LOCK_FREE
1779 #define ATOMIC_LLONG_LOCK_FREE __GCC_ATOMIC_LLONG_LOCK_FREE
1780 #define ATOMIC_POINTER_LOCK_FREE __GCC_ATOMIC_POINTER_LOCK_FREE
1782 #endif // !__has_feature(cxx_atomic)
1784 _LIBCPP_END_NAMESPACE_STD
1786 #endif // !_LIBCPP_HAS_NO_THREADS
1788 #endif // _LIBCPP_ATOMIC