2 //===--------------------------- atomic -----------------------------------===//
4 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
5 // See https://llvm.org/LICENSE.txt for license information.
6 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
8 //===----------------------------------------------------------------------===//
10 #ifndef _LIBCPP_ATOMIC
11 #define _LIBCPP_ATOMIC
21 #define __cpp_lib_atomic_is_always_lock_free // as specified by SG10
23 // order and consistency
25 enum memory_order: unspecified // enum class in C++20
28 consume, // load-consume
29 acquire, // load-acquire
30 release, // store-release
31 acq_rel, // store-release load-acquire
32 seq_cst // store-release load-acquire
35 inline constexpr auto memory_order_relaxed = memory_order::relaxed;
36 inline constexpr auto memory_order_consume = memory_order::consume;
37 inline constexpr auto memory_order_acquire = memory_order::acquire;
38 inline constexpr auto memory_order_release = memory_order::release;
39 inline constexpr auto memory_order_acq_rel = memory_order::acq_rel;
40 inline constexpr auto memory_order_seq_cst = memory_order::seq_cst;
42 template <class T> T kill_dependency(T y) noexcept;
46 #define ATOMIC_BOOL_LOCK_FREE unspecified
47 #define ATOMIC_CHAR_LOCK_FREE unspecified
48 #define ATOMIC_CHAR16_T_LOCK_FREE unspecified
49 #define ATOMIC_CHAR32_T_LOCK_FREE unspecified
50 #define ATOMIC_WCHAR_T_LOCK_FREE unspecified
51 #define ATOMIC_SHORT_LOCK_FREE unspecified
52 #define ATOMIC_INT_LOCK_FREE unspecified
53 #define ATOMIC_LONG_LOCK_FREE unspecified
54 #define ATOMIC_LLONG_LOCK_FREE unspecified
55 #define ATOMIC_POINTER_LOCK_FREE unspecified
57 // flag type and operations
59 typedef struct atomic_flag
61 bool test_and_set(memory_order m = memory_order_seq_cst) volatile noexcept;
62 bool test_and_set(memory_order m = memory_order_seq_cst) noexcept;
63 void clear(memory_order m = memory_order_seq_cst) volatile noexcept;
64 void clear(memory_order m = memory_order_seq_cst) noexcept;
65 atomic_flag() noexcept = default;
66 atomic_flag(const atomic_flag&) = delete;
67 atomic_flag& operator=(const atomic_flag&) = delete;
68 atomic_flag& operator=(const atomic_flag&) volatile = delete;
72 atomic_flag_test_and_set(volatile atomic_flag* obj) noexcept;
75 atomic_flag_test_and_set(atomic_flag* obj) noexcept;
78 atomic_flag_test_and_set_explicit(volatile atomic_flag* obj,
79 memory_order m) noexcept;
82 atomic_flag_test_and_set_explicit(atomic_flag* obj, memory_order m) noexcept;
85 atomic_flag_clear(volatile atomic_flag* obj) noexcept;
88 atomic_flag_clear(atomic_flag* obj) noexcept;
91 atomic_flag_clear_explicit(volatile atomic_flag* obj, memory_order m) noexcept;
94 atomic_flag_clear_explicit(atomic_flag* obj, memory_order m) noexcept;
96 #define ATOMIC_FLAG_INIT see below
97 #define ATOMIC_VAR_INIT(value) see below
102 static constexpr bool is_always_lock_free;
103 bool is_lock_free() const volatile noexcept;
104 bool is_lock_free() const noexcept;
105 void store(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
106 void store(T desr, memory_order m = memory_order_seq_cst) noexcept;
107 T load(memory_order m = memory_order_seq_cst) const volatile noexcept;
108 T load(memory_order m = memory_order_seq_cst) const noexcept;
109 operator T() const volatile noexcept;
110 operator T() const noexcept;
111 T exchange(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
112 T exchange(T desr, memory_order m = memory_order_seq_cst) noexcept;
113 bool compare_exchange_weak(T& expc, T desr,
114 memory_order s, memory_order f) volatile noexcept;
115 bool compare_exchange_weak(T& expc, T desr, memory_order s, memory_order f) noexcept;
116 bool compare_exchange_strong(T& expc, T desr,
117 memory_order s, memory_order f) volatile noexcept;
118 bool compare_exchange_strong(T& expc, T desr,
119 memory_order s, memory_order f) noexcept;
120 bool compare_exchange_weak(T& expc, T desr,
121 memory_order m = memory_order_seq_cst) volatile noexcept;
122 bool compare_exchange_weak(T& expc, T desr,
123 memory_order m = memory_order_seq_cst) noexcept;
124 bool compare_exchange_strong(T& expc, T desr,
125 memory_order m = memory_order_seq_cst) volatile noexcept;
126 bool compare_exchange_strong(T& expc, T desr,
127 memory_order m = memory_order_seq_cst) noexcept;
129 atomic() noexcept = default;
130 constexpr atomic(T desr) noexcept;
131 atomic(const atomic&) = delete;
132 atomic& operator=(const atomic&) = delete;
133 atomic& operator=(const atomic&) volatile = delete;
134 T operator=(T) volatile noexcept;
135 T operator=(T) noexcept;
139 struct atomic<integral>
141 static constexpr bool is_always_lock_free;
142 bool is_lock_free() const volatile noexcept;
143 bool is_lock_free() const noexcept;
144 void store(integral desr, memory_order m = memory_order_seq_cst) volatile noexcept;
145 void store(integral desr, memory_order m = memory_order_seq_cst) noexcept;
146 integral load(memory_order m = memory_order_seq_cst) const volatile noexcept;
147 integral load(memory_order m = memory_order_seq_cst) const noexcept;
148 operator integral() const volatile noexcept;
149 operator integral() const noexcept;
150 integral exchange(integral desr,
151 memory_order m = memory_order_seq_cst) volatile noexcept;
152 integral exchange(integral desr, memory_order m = memory_order_seq_cst) noexcept;
153 bool compare_exchange_weak(integral& expc, integral desr,
154 memory_order s, memory_order f) volatile noexcept;
155 bool compare_exchange_weak(integral& expc, integral desr,
156 memory_order s, memory_order f) noexcept;
157 bool compare_exchange_strong(integral& expc, integral desr,
158 memory_order s, memory_order f) volatile noexcept;
159 bool compare_exchange_strong(integral& expc, integral desr,
160 memory_order s, memory_order f) noexcept;
161 bool compare_exchange_weak(integral& expc, integral desr,
162 memory_order m = memory_order_seq_cst) volatile noexcept;
163 bool compare_exchange_weak(integral& expc, integral desr,
164 memory_order m = memory_order_seq_cst) noexcept;
165 bool compare_exchange_strong(integral& expc, integral desr,
166 memory_order m = memory_order_seq_cst) volatile noexcept;
167 bool compare_exchange_strong(integral& expc, integral desr,
168 memory_order m = memory_order_seq_cst) noexcept;
171 fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
172 integral fetch_add(integral op, memory_order m = memory_order_seq_cst) noexcept;
174 fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
175 integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) noexcept;
177 fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
178 integral fetch_and(integral op, memory_order m = memory_order_seq_cst) noexcept;
180 fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
181 integral fetch_or(integral op, memory_order m = memory_order_seq_cst) noexcept;
183 fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
184 integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) noexcept;
186 atomic() noexcept = default;
187 constexpr atomic(integral desr) noexcept;
188 atomic(const atomic&) = delete;
189 atomic& operator=(const atomic&) = delete;
190 atomic& operator=(const atomic&) volatile = delete;
191 integral operator=(integral desr) volatile noexcept;
192 integral operator=(integral desr) noexcept;
194 integral operator++(int) volatile noexcept;
195 integral operator++(int) noexcept;
196 integral operator--(int) volatile noexcept;
197 integral operator--(int) noexcept;
198 integral operator++() volatile noexcept;
199 integral operator++() noexcept;
200 integral operator--() volatile noexcept;
201 integral operator--() noexcept;
202 integral operator+=(integral op) volatile noexcept;
203 integral operator+=(integral op) noexcept;
204 integral operator-=(integral op) volatile noexcept;
205 integral operator-=(integral op) noexcept;
206 integral operator&=(integral op) volatile noexcept;
207 integral operator&=(integral op) noexcept;
208 integral operator|=(integral op) volatile noexcept;
209 integral operator|=(integral op) noexcept;
210 integral operator^=(integral op) volatile noexcept;
211 integral operator^=(integral op) noexcept;
217 static constexpr bool is_always_lock_free;
218 bool is_lock_free() const volatile noexcept;
219 bool is_lock_free() const noexcept;
220 void store(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
221 void store(T* desr, memory_order m = memory_order_seq_cst) noexcept;
222 T* load(memory_order m = memory_order_seq_cst) const volatile noexcept;
223 T* load(memory_order m = memory_order_seq_cst) const noexcept;
224 operator T*() const volatile noexcept;
225 operator T*() const noexcept;
226 T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
227 T* exchange(T* desr, memory_order m = memory_order_seq_cst) noexcept;
228 bool compare_exchange_weak(T*& expc, T* desr,
229 memory_order s, memory_order f) volatile noexcept;
230 bool compare_exchange_weak(T*& expc, T* desr,
231 memory_order s, memory_order f) noexcept;
232 bool compare_exchange_strong(T*& expc, T* desr,
233 memory_order s, memory_order f) volatile noexcept;
234 bool compare_exchange_strong(T*& expc, T* desr,
235 memory_order s, memory_order f) noexcept;
236 bool compare_exchange_weak(T*& expc, T* desr,
237 memory_order m = memory_order_seq_cst) volatile noexcept;
238 bool compare_exchange_weak(T*& expc, T* desr,
239 memory_order m = memory_order_seq_cst) noexcept;
240 bool compare_exchange_strong(T*& expc, T* desr,
241 memory_order m = memory_order_seq_cst) volatile noexcept;
242 bool compare_exchange_strong(T*& expc, T* desr,
243 memory_order m = memory_order_seq_cst) noexcept;
244 T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
245 T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
246 T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
247 T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
249 atomic() noexcept = default;
250 constexpr atomic(T* desr) noexcept;
251 atomic(const atomic&) = delete;
252 atomic& operator=(const atomic&) = delete;
253 atomic& operator=(const atomic&) volatile = delete;
255 T* operator=(T*) volatile noexcept;
256 T* operator=(T*) noexcept;
257 T* operator++(int) volatile noexcept;
258 T* operator++(int) noexcept;
259 T* operator--(int) volatile noexcept;
260 T* operator--(int) noexcept;
261 T* operator++() volatile noexcept;
262 T* operator++() noexcept;
263 T* operator--() volatile noexcept;
264 T* operator--() noexcept;
265 T* operator+=(ptrdiff_t op) volatile noexcept;
266 T* operator+=(ptrdiff_t op) noexcept;
267 T* operator-=(ptrdiff_t op) volatile noexcept;
268 T* operator-=(ptrdiff_t op) noexcept;
274 atomic_is_lock_free(const volatile atomic<T>* obj) noexcept;
278 atomic_is_lock_free(const atomic<T>* obj) noexcept;
282 atomic_init(volatile atomic<T>* obj, T desr) noexcept;
286 atomic_init(atomic<T>* obj, T desr) noexcept;
290 atomic_store(volatile atomic<T>* obj, T desr) noexcept;
294 atomic_store(atomic<T>* obj, T desr) noexcept;
298 atomic_store_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
302 atomic_store_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
306 atomic_load(const volatile atomic<T>* obj) noexcept;
310 atomic_load(const atomic<T>* obj) noexcept;
314 atomic_load_explicit(const volatile atomic<T>* obj, memory_order m) noexcept;
318 atomic_load_explicit(const atomic<T>* obj, memory_order m) noexcept;
322 atomic_exchange(volatile atomic<T>* obj, T desr) noexcept;
326 atomic_exchange(atomic<T>* obj, T desr) noexcept;
330 atomic_exchange_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
334 atomic_exchange_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
338 atomic_compare_exchange_weak(volatile atomic<T>* obj, T* expc, T desr) noexcept;
342 atomic_compare_exchange_weak(atomic<T>* obj, T* expc, T desr) noexcept;
346 atomic_compare_exchange_strong(volatile atomic<T>* obj, T* expc, T desr) noexcept;
350 atomic_compare_exchange_strong(atomic<T>* obj, T* expc, T desr) noexcept;
354 atomic_compare_exchange_weak_explicit(volatile atomic<T>* obj, T* expc,
356 memory_order s, memory_order f) noexcept;
360 atomic_compare_exchange_weak_explicit(atomic<T>* obj, T* expc, T desr,
361 memory_order s, memory_order f) noexcept;
365 atomic_compare_exchange_strong_explicit(volatile atomic<T>* obj,
367 memory_order s, memory_order f) noexcept;
371 atomic_compare_exchange_strong_explicit(atomic<T>* obj, T* expc,
373 memory_order s, memory_order f) noexcept;
375 template <class Integral>
377 atomic_fetch_add(volatile atomic<Integral>* obj, Integral op) noexcept;
379 template <class Integral>
381 atomic_fetch_add(atomic<Integral>* obj, Integral op) noexcept;
383 template <class Integral>
385 atomic_fetch_add_explicit(volatile atomic<Integral>* obj, Integral op,
386 memory_order m) noexcept;
387 template <class Integral>
389 atomic_fetch_add_explicit(atomic<Integral>* obj, Integral op,
390 memory_order m) noexcept;
391 template <class Integral>
393 atomic_fetch_sub(volatile atomic<Integral>* obj, Integral op) noexcept;
395 template <class Integral>
397 atomic_fetch_sub(atomic<Integral>* obj, Integral op) noexcept;
399 template <class Integral>
401 atomic_fetch_sub_explicit(volatile atomic<Integral>* obj, Integral op,
402 memory_order m) noexcept;
403 template <class Integral>
405 atomic_fetch_sub_explicit(atomic<Integral>* obj, Integral op,
406 memory_order m) noexcept;
407 template <class Integral>
409 atomic_fetch_and(volatile atomic<Integral>* obj, Integral op) noexcept;
411 template <class Integral>
413 atomic_fetch_and(atomic<Integral>* obj, Integral op) noexcept;
415 template <class Integral>
417 atomic_fetch_and_explicit(volatile atomic<Integral>* obj, Integral op,
418 memory_order m) noexcept;
419 template <class Integral>
421 atomic_fetch_and_explicit(atomic<Integral>* obj, Integral op,
422 memory_order m) noexcept;
423 template <class Integral>
425 atomic_fetch_or(volatile atomic<Integral>* obj, Integral op) noexcept;
427 template <class Integral>
429 atomic_fetch_or(atomic<Integral>* obj, Integral op) noexcept;
431 template <class Integral>
433 atomic_fetch_or_explicit(volatile atomic<Integral>* obj, Integral op,
434 memory_order m) noexcept;
435 template <class Integral>
437 atomic_fetch_or_explicit(atomic<Integral>* obj, Integral op,
438 memory_order m) noexcept;
439 template <class Integral>
441 atomic_fetch_xor(volatile atomic<Integral>* obj, Integral op) noexcept;
443 template <class Integral>
445 atomic_fetch_xor(atomic<Integral>* obj, Integral op) noexcept;
447 template <class Integral>
449 atomic_fetch_xor_explicit(volatile atomic<Integral>* obj, Integral op,
450 memory_order m) noexcept;
451 template <class Integral>
453 atomic_fetch_xor_explicit(atomic<Integral>* obj, Integral op,
454 memory_order m) noexcept;
458 atomic_fetch_add(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
462 atomic_fetch_add(atomic<T*>* obj, ptrdiff_t op) noexcept;
466 atomic_fetch_add_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
467 memory_order m) noexcept;
470 atomic_fetch_add_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
474 atomic_fetch_sub(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
478 atomic_fetch_sub(atomic<T*>* obj, ptrdiff_t op) noexcept;
482 atomic_fetch_sub_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
483 memory_order m) noexcept;
486 atomic_fetch_sub_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
488 // Atomics for standard typedef types
490 typedef atomic<bool> atomic_bool;
491 typedef atomic<char> atomic_char;
492 typedef atomic<signed char> atomic_schar;
493 typedef atomic<unsigned char> atomic_uchar;
494 typedef atomic<short> atomic_short;
495 typedef atomic<unsigned short> atomic_ushort;
496 typedef atomic<int> atomic_int;
497 typedef atomic<unsigned int> atomic_uint;
498 typedef atomic<long> atomic_long;
499 typedef atomic<unsigned long> atomic_ulong;
500 typedef atomic<long long> atomic_llong;
501 typedef atomic<unsigned long long> atomic_ullong;
502 typedef atomic<char16_t> atomic_char16_t;
503 typedef atomic<char32_t> atomic_char32_t;
504 typedef atomic<wchar_t> atomic_wchar_t;
506 typedef atomic<int_least8_t> atomic_int_least8_t;
507 typedef atomic<uint_least8_t> atomic_uint_least8_t;
508 typedef atomic<int_least16_t> atomic_int_least16_t;
509 typedef atomic<uint_least16_t> atomic_uint_least16_t;
510 typedef atomic<int_least32_t> atomic_int_least32_t;
511 typedef atomic<uint_least32_t> atomic_uint_least32_t;
512 typedef atomic<int_least64_t> atomic_int_least64_t;
513 typedef atomic<uint_least64_t> atomic_uint_least64_t;
515 typedef atomic<int_fast8_t> atomic_int_fast8_t;
516 typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
517 typedef atomic<int_fast16_t> atomic_int_fast16_t;
518 typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
519 typedef atomic<int_fast32_t> atomic_int_fast32_t;
520 typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
521 typedef atomic<int_fast64_t> atomic_int_fast64_t;
522 typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
524 typedef atomic<int8_t> atomic_int8_t;
525 typedef atomic<uint8_t> atomic_uint8_t;
526 typedef atomic<int16_t> atomic_int16_t;
527 typedef atomic<uint16_t> atomic_uint16_t;
528 typedef atomic<int32_t> atomic_int32_t;
529 typedef atomic<uint32_t> atomic_uint32_t;
530 typedef atomic<int64_t> atomic_int64_t;
531 typedef atomic<uint64_t> atomic_uint64_t;
533 typedef atomic<intptr_t> atomic_intptr_t;
534 typedef atomic<uintptr_t> atomic_uintptr_t;
535 typedef atomic<size_t> atomic_size_t;
536 typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
537 typedef atomic<intmax_t> atomic_intmax_t;
538 typedef atomic<uintmax_t> atomic_uintmax_t;
542 void atomic_thread_fence(memory_order m) noexcept;
543 void atomic_signal_fence(memory_order m) noexcept;
552 #include <type_traits>
555 #if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
556 #pragma GCC system_header
559 #ifdef _LIBCPP_HAS_NO_THREADS
560 # error <atomic> is not supported on this single threaded system
562 #ifdef _LIBCPP_HAS_NO_ATOMIC_HEADER
563 # error <atomic> is not implemented
565 #ifdef kill_dependency
566 # error C++ standard library is incompatible with <stdatomic.h>
569 #define _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) \
570 _LIBCPP_DIAGNOSE_WARNING(__m == memory_order_consume || \
571 __m == memory_order_acquire || \
572 __m == memory_order_acq_rel, \
573 "memory order argument to atomic operation is invalid")
575 #define _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) \
576 _LIBCPP_DIAGNOSE_WARNING(__m == memory_order_release || \
577 __m == memory_order_acq_rel, \
578 "memory order argument to atomic operation is invalid")
580 #define _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__m, __f) \
581 _LIBCPP_DIAGNOSE_WARNING(__f == memory_order_release || \
582 __f == memory_order_acq_rel, \
583 "memory order argument to atomic operation is invalid")
585 _LIBCPP_BEGIN_NAMESPACE_STD
587 // Figure out what the underlying type for `memory_order` would be if it were
588 // declared as an unscoped enum (accounting for -fshort-enums). Use this result
589 // to pin the underlying type in C++20.
590 enum __legacy_memory_order {
599 typedef underlying_type<__legacy_memory_order>::type __memory_order_underlying_t;
601 #if _LIBCPP_STD_VER > 17
603 enum class memory_order : __memory_order_underlying_t {
604 relaxed = __mo_relaxed,
605 consume = __mo_consume,
606 acquire = __mo_acquire,
607 release = __mo_release,
608 acq_rel = __mo_acq_rel,
609 seq_cst = __mo_seq_cst
612 inline constexpr auto memory_order_relaxed = memory_order::relaxed;
613 inline constexpr auto memory_order_consume = memory_order::consume;
614 inline constexpr auto memory_order_acquire = memory_order::acquire;
615 inline constexpr auto memory_order_release = memory_order::release;
616 inline constexpr auto memory_order_acq_rel = memory_order::acq_rel;
617 inline constexpr auto memory_order_seq_cst = memory_order::seq_cst;
621 typedef enum memory_order {
622 memory_order_relaxed = __mo_relaxed,
623 memory_order_consume = __mo_consume,
624 memory_order_acquire = __mo_acquire,
625 memory_order_release = __mo_release,
626 memory_order_acq_rel = __mo_acq_rel,
627 memory_order_seq_cst = __mo_seq_cst,
630 #endif // _LIBCPP_STD_VER > 17
632 static_assert((is_same<underlying_type<memory_order>::type, __memory_order_underlying_t>::value),
633 "unexpected underlying type for std::memory_order");
635 #if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP) || \
636 defined(_LIBCPP_ATOMIC_ONLY_USE_BUILTINS)
638 // [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because
639 // the default operator= in an object is not volatile, a byte-by-byte copy
641 template <typename _Tp, typename _Tv> _LIBCPP_INLINE_VISIBILITY
642 typename enable_if<is_assignable<_Tp&, _Tv>::value>::type
643 __cxx_atomic_assign_volatile(_Tp& __a_value, _Tv const& __val) {
646 template <typename _Tp, typename _Tv> _LIBCPP_INLINE_VISIBILITY
647 typename enable_if<is_assignable<_Tp&, _Tv>::value>::type
648 __cxx_atomic_assign_volatile(_Tp volatile& __a_value, _Tv volatile const& __val) {
649 volatile char* __to = reinterpret_cast<volatile char*>(&__a_value);
650 volatile char* __end = __to + sizeof(_Tp);
651 volatile const char* __from = reinterpret_cast<volatile const char*>(&__val);
652 while (__to != __end)
658 #if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP)
660 template <typename _Tp>
661 struct __cxx_atomic_base_impl {
663 _LIBCPP_INLINE_VISIBILITY
664 #ifndef _LIBCPP_CXX03_LANG
665 __cxx_atomic_base_impl() _NOEXCEPT = default;
667 __cxx_atomic_base_impl() _NOEXCEPT : __a_value() {}
668 #endif // _LIBCPP_CXX03_LANG
669 _LIBCPP_CONSTEXPR explicit __cxx_atomic_base_impl(_Tp value) _NOEXCEPT
670 : __a_value(value) {}
674 _LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR int __to_gcc_order(memory_order __order) {
675 // Avoid switch statement to make this a constexpr.
676 return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
677 (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
678 (__order == memory_order_release ? __ATOMIC_RELEASE:
679 (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
680 (__order == memory_order_acq_rel ? __ATOMIC_ACQ_REL:
681 __ATOMIC_CONSUME))));
684 _LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR int __to_gcc_failure_order(memory_order __order) {
685 // Avoid switch statement to make this a constexpr.
686 return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
687 (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
688 (__order == memory_order_release ? __ATOMIC_RELAXED:
689 (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
690 (__order == memory_order_acq_rel ? __ATOMIC_ACQUIRE:
691 __ATOMIC_CONSUME))));
694 template <typename _Tp>
695 _LIBCPP_INLINE_VISIBILITY
696 void __cxx_atomic_init(volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp __val) {
697 __cxx_atomic_assign_volatile(__a->__a_value, __val);
700 template <typename _Tp>
701 _LIBCPP_INLINE_VISIBILITY
702 void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp>* __a, _Tp __val) {
703 __a->__a_value = __val;
706 _LIBCPP_INLINE_VISIBILITY inline
707 void __cxx_atomic_thread_fence(memory_order __order) {
708 __atomic_thread_fence(__to_gcc_order(__order));
711 _LIBCPP_INLINE_VISIBILITY inline
712 void __cxx_atomic_signal_fence(memory_order __order) {
713 __atomic_signal_fence(__to_gcc_order(__order));
716 template <typename _Tp>
717 _LIBCPP_INLINE_VISIBILITY
718 void __cxx_atomic_store(volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp __val,
719 memory_order __order) {
720 __atomic_store(&__a->__a_value, &__val,
721 __to_gcc_order(__order));
724 template <typename _Tp>
725 _LIBCPP_INLINE_VISIBILITY
726 void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp>* __a, _Tp __val,
727 memory_order __order) {
728 __atomic_store(&__a->__a_value, &__val,
729 __to_gcc_order(__order));
732 template <typename _Tp>
733 _LIBCPP_INLINE_VISIBILITY
734 _Tp __cxx_atomic_load(const volatile __cxx_atomic_base_impl<_Tp>* __a,
735 memory_order __order) {
737 __atomic_load(&__a->__a_value, &__ret,
738 __to_gcc_order(__order));
742 template <typename _Tp>
743 _LIBCPP_INLINE_VISIBILITY
744 _Tp __cxx_atomic_load(const __cxx_atomic_base_impl<_Tp>* __a, memory_order __order) {
746 __atomic_load(&__a->__a_value, &__ret,
747 __to_gcc_order(__order));
751 template <typename _Tp>
752 _LIBCPP_INLINE_VISIBILITY
753 _Tp __cxx_atomic_exchange(volatile __cxx_atomic_base_impl<_Tp>* __a,
754 _Tp __value, memory_order __order) {
756 __atomic_exchange(&__a->__a_value, &__value, &__ret,
757 __to_gcc_order(__order));
761 template <typename _Tp>
762 _LIBCPP_INLINE_VISIBILITY
763 _Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp>* __a, _Tp __value,
764 memory_order __order) {
766 __atomic_exchange(&__a->__a_value, &__value, &__ret,
767 __to_gcc_order(__order));
771 template <typename _Tp>
772 _LIBCPP_INLINE_VISIBILITY
773 bool __cxx_atomic_compare_exchange_strong(
774 volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value,
775 memory_order __success, memory_order __failure) {
776 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
778 __to_gcc_order(__success),
779 __to_gcc_failure_order(__failure));
782 template <typename _Tp>
783 _LIBCPP_INLINE_VISIBILITY
784 bool __cxx_atomic_compare_exchange_strong(
785 __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success,
786 memory_order __failure) {
787 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
789 __to_gcc_order(__success),
790 __to_gcc_failure_order(__failure));
793 template <typename _Tp>
794 _LIBCPP_INLINE_VISIBILITY
795 bool __cxx_atomic_compare_exchange_weak(
796 volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value,
797 memory_order __success, memory_order __failure) {
798 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
800 __to_gcc_order(__success),
801 __to_gcc_failure_order(__failure));
804 template <typename _Tp>
805 _LIBCPP_INLINE_VISIBILITY
806 bool __cxx_atomic_compare_exchange_weak(
807 __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success,
808 memory_order __failure) {
809 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
811 __to_gcc_order(__success),
812 __to_gcc_failure_order(__failure));
815 template <typename _Tp>
816 struct __skip_amt { enum {value = 1}; };
818 template <typename _Tp>
819 struct __skip_amt<_Tp*> { enum {value = sizeof(_Tp)}; };
821 // FIXME: Haven't figured out what the spec says about using arrays with
822 // atomic_fetch_add. Force a failure rather than creating bad behavior.
823 template <typename _Tp>
824 struct __skip_amt<_Tp[]> { };
825 template <typename _Tp, int n>
826 struct __skip_amt<_Tp[n]> { };
828 template <typename _Tp, typename _Td>
829 _LIBCPP_INLINE_VISIBILITY
830 _Tp __cxx_atomic_fetch_add(volatile __cxx_atomic_base_impl<_Tp>* __a,
831 _Td __delta, memory_order __order) {
832 return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
833 __to_gcc_order(__order));
836 template <typename _Tp, typename _Td>
837 _LIBCPP_INLINE_VISIBILITY
838 _Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp>* __a, _Td __delta,
839 memory_order __order) {
840 return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
841 __to_gcc_order(__order));
844 template <typename _Tp, typename _Td>
845 _LIBCPP_INLINE_VISIBILITY
846 _Tp __cxx_atomic_fetch_sub(volatile __cxx_atomic_base_impl<_Tp>* __a,
847 _Td __delta, memory_order __order) {
848 return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
849 __to_gcc_order(__order));
852 template <typename _Tp, typename _Td>
853 _LIBCPP_INLINE_VISIBILITY
854 _Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp>* __a, _Td __delta,
855 memory_order __order) {
856 return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
857 __to_gcc_order(__order));
860 template <typename _Tp>
861 _LIBCPP_INLINE_VISIBILITY
862 _Tp __cxx_atomic_fetch_and(volatile __cxx_atomic_base_impl<_Tp>* __a,
863 _Tp __pattern, memory_order __order) {
864 return __atomic_fetch_and(&__a->__a_value, __pattern,
865 __to_gcc_order(__order));
868 template <typename _Tp>
869 _LIBCPP_INLINE_VISIBILITY
870 _Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp>* __a,
871 _Tp __pattern, memory_order __order) {
872 return __atomic_fetch_and(&__a->__a_value, __pattern,
873 __to_gcc_order(__order));
876 template <typename _Tp>
877 _LIBCPP_INLINE_VISIBILITY
878 _Tp __cxx_atomic_fetch_or(volatile __cxx_atomic_base_impl<_Tp>* __a,
879 _Tp __pattern, memory_order __order) {
880 return __atomic_fetch_or(&__a->__a_value, __pattern,
881 __to_gcc_order(__order));
884 template <typename _Tp>
885 _LIBCPP_INLINE_VISIBILITY
886 _Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern,
887 memory_order __order) {
888 return __atomic_fetch_or(&__a->__a_value, __pattern,
889 __to_gcc_order(__order));
892 template <typename _Tp>
893 _LIBCPP_INLINE_VISIBILITY
894 _Tp __cxx_atomic_fetch_xor(volatile __cxx_atomic_base_impl<_Tp>* __a,
895 _Tp __pattern, memory_order __order) {
896 return __atomic_fetch_xor(&__a->__a_value, __pattern,
897 __to_gcc_order(__order));
900 template <typename _Tp>
901 _LIBCPP_INLINE_VISIBILITY
902 _Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern,
903 memory_order __order) {
904 return __atomic_fetch_xor(&__a->__a_value, __pattern,
905 __to_gcc_order(__order));
908 #define __cxx_atomic_is_lock_free(__s) __atomic_is_lock_free(__s, 0)
910 #elif defined(_LIBCPP_HAS_C_ATOMIC_IMP)
912 template <typename _Tp>
913 struct __cxx_atomic_base_impl {
915 _LIBCPP_INLINE_VISIBILITY
916 #ifndef _LIBCPP_CXX03_LANG
917 __cxx_atomic_base_impl() _NOEXCEPT = default;
919 __cxx_atomic_base_impl() _NOEXCEPT : __a_value() {}
920 #endif // _LIBCPP_CXX03_LANG
921 _LIBCPP_CONSTEXPR explicit __cxx_atomic_base_impl(_Tp value) _NOEXCEPT
922 : __a_value(value) {}
923 _Atomic(_Tp) __a_value;
926 #define __cxx_atomic_is_lock_free(__s) __c11_atomic_is_lock_free(__s)
928 _LIBCPP_INLINE_VISIBILITY inline
929 void __cxx_atomic_thread_fence(memory_order __order) _NOEXCEPT {
930 __c11_atomic_thread_fence(static_cast<__memory_order_underlying_t>(__order));
933 _LIBCPP_INLINE_VISIBILITY inline
934 void __cxx_atomic_signal_fence(memory_order __order) _NOEXCEPT {
935 __c11_atomic_signal_fence(static_cast<__memory_order_underlying_t>(__order));
939 _LIBCPP_INLINE_VISIBILITY
940 void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __val) _NOEXCEPT {
941 __c11_atomic_init(&__a->__a_value, __val);
944 _LIBCPP_INLINE_VISIBILITY
945 void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp> * __a, _Tp __val) _NOEXCEPT {
946 __c11_atomic_init(&__a->__a_value, __val);
950 _LIBCPP_INLINE_VISIBILITY
951 void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __val, memory_order __order) _NOEXCEPT {
952 __c11_atomic_store(&__a->__a_value, __val, static_cast<__memory_order_underlying_t>(__order));
955 _LIBCPP_INLINE_VISIBILITY
956 void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp> * __a, _Tp __val, memory_order __order) _NOEXCEPT {
957 __c11_atomic_store(&__a->__a_value, __val, static_cast<__memory_order_underlying_t>(__order));
961 _LIBCPP_INLINE_VISIBILITY
962 _Tp __cxx_atomic_load(__cxx_atomic_base_impl<_Tp> const volatile* __a, memory_order __order) _NOEXCEPT {
963 using __ptr_type = typename remove_const<decltype(__a->__a_value)>::type*;
964 return __c11_atomic_load(const_cast<__ptr_type>(&__a->__a_value), static_cast<__memory_order_underlying_t>(__order));
967 _LIBCPP_INLINE_VISIBILITY
968 _Tp __cxx_atomic_load(__cxx_atomic_base_impl<_Tp> const* __a, memory_order __order) _NOEXCEPT {
969 using __ptr_type = typename remove_const<decltype(__a->__a_value)>::type*;
970 return __c11_atomic_load(const_cast<__ptr_type>(&__a->__a_value), static_cast<__memory_order_underlying_t>(__order));
974 _LIBCPP_INLINE_VISIBILITY
975 _Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __value, memory_order __order) _NOEXCEPT {
976 return __c11_atomic_exchange(&__a->__a_value, __value, static_cast<__memory_order_underlying_t>(__order));
979 _LIBCPP_INLINE_VISIBILITY
980 _Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp> * __a, _Tp __value, memory_order __order) _NOEXCEPT {
981 return __c11_atomic_exchange(&__a->__a_value, __value, static_cast<__memory_order_underlying_t>(__order));
985 _LIBCPP_INLINE_VISIBILITY
986 bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
987 return __c11_atomic_compare_exchange_strong(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__failure));
990 _LIBCPP_INLINE_VISIBILITY
991 bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_base_impl<_Tp> * __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
992 return __c11_atomic_compare_exchange_strong(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__failure));
996 _LIBCPP_INLINE_VISIBILITY
997 bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
998 return __c11_atomic_compare_exchange_weak(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__failure));
1001 _LIBCPP_INLINE_VISIBILITY
1002 bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_base_impl<_Tp> * __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
1003 return __c11_atomic_compare_exchange_weak(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__failure));
1007 _LIBCPP_INLINE_VISIBILITY
1008 _Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __delta, memory_order __order) _NOEXCEPT {
1009 return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1012 _LIBCPP_INLINE_VISIBILITY
1013 _Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp> * __a, _Tp __delta, memory_order __order) _NOEXCEPT {
1014 return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1018 _LIBCPP_INLINE_VISIBILITY
1019 _Tp* __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp*> volatile* __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
1020 return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1023 _LIBCPP_INLINE_VISIBILITY
1024 _Tp* __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp*> * __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
1025 return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1029 _LIBCPP_INLINE_VISIBILITY
1030 _Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __delta, memory_order __order) _NOEXCEPT {
1031 return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1034 _LIBCPP_INLINE_VISIBILITY
1035 _Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp> * __a, _Tp __delta, memory_order __order) _NOEXCEPT {
1036 return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1039 _LIBCPP_INLINE_VISIBILITY
1040 _Tp* __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp*> volatile* __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
1041 return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1044 _LIBCPP_INLINE_VISIBILITY
1045 _Tp* __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp*> * __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
1046 return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1050 _LIBCPP_INLINE_VISIBILITY
1051 _Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1052 return __c11_atomic_fetch_and(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1055 _LIBCPP_INLINE_VISIBILITY
1056 _Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1057 return __c11_atomic_fetch_and(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1061 _LIBCPP_INLINE_VISIBILITY
1062 _Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1063 return __c11_atomic_fetch_or(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1066 _LIBCPP_INLINE_VISIBILITY
1067 _Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1068 return __c11_atomic_fetch_or(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1072 _LIBCPP_INLINE_VISIBILITY
1073 _Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1074 return __c11_atomic_fetch_xor(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1077 _LIBCPP_INLINE_VISIBILITY
1078 _Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1079 return __c11_atomic_fetch_xor(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1082 #endif // _LIBCPP_HAS_GCC_ATOMIC_IMP, _LIBCPP_HAS_C_ATOMIC_IMP
1084 template <class _Tp>
1085 _LIBCPP_INLINE_VISIBILITY
1086 _Tp kill_dependency(_Tp __y) _NOEXCEPT
1091 #if defined(__CLANG_ATOMIC_BOOL_LOCK_FREE)
1092 # define ATOMIC_BOOL_LOCK_FREE __CLANG_ATOMIC_BOOL_LOCK_FREE
1093 # define ATOMIC_CHAR_LOCK_FREE __CLANG_ATOMIC_CHAR_LOCK_FREE
1094 # define ATOMIC_CHAR16_T_LOCK_FREE __CLANG_ATOMIC_CHAR16_T_LOCK_FREE
1095 # define ATOMIC_CHAR32_T_LOCK_FREE __CLANG_ATOMIC_CHAR32_T_LOCK_FREE
1096 # define ATOMIC_WCHAR_T_LOCK_FREE __CLANG_ATOMIC_WCHAR_T_LOCK_FREE
1097 # define ATOMIC_SHORT_LOCK_FREE __CLANG_ATOMIC_SHORT_LOCK_FREE
1098 # define ATOMIC_INT_LOCK_FREE __CLANG_ATOMIC_INT_LOCK_FREE
1099 # define ATOMIC_LONG_LOCK_FREE __CLANG_ATOMIC_LONG_LOCK_FREE
1100 # define ATOMIC_LLONG_LOCK_FREE __CLANG_ATOMIC_LLONG_LOCK_FREE
1101 # define ATOMIC_POINTER_LOCK_FREE __CLANG_ATOMIC_POINTER_LOCK_FREE
1102 #elif defined(__GCC_ATOMIC_BOOL_LOCK_FREE)
1103 # define ATOMIC_BOOL_LOCK_FREE __GCC_ATOMIC_BOOL_LOCK_FREE
1104 # define ATOMIC_CHAR_LOCK_FREE __GCC_ATOMIC_CHAR_LOCK_FREE
1105 # define ATOMIC_CHAR16_T_LOCK_FREE __GCC_ATOMIC_CHAR16_T_LOCK_FREE
1106 # define ATOMIC_CHAR32_T_LOCK_FREE __GCC_ATOMIC_CHAR32_T_LOCK_FREE
1107 # define ATOMIC_WCHAR_T_LOCK_FREE __GCC_ATOMIC_WCHAR_T_LOCK_FREE
1108 # define ATOMIC_SHORT_LOCK_FREE __GCC_ATOMIC_SHORT_LOCK_FREE
1109 # define ATOMIC_INT_LOCK_FREE __GCC_ATOMIC_INT_LOCK_FREE
1110 # define ATOMIC_LONG_LOCK_FREE __GCC_ATOMIC_LONG_LOCK_FREE
1111 # define ATOMIC_LLONG_LOCK_FREE __GCC_ATOMIC_LLONG_LOCK_FREE
1112 # define ATOMIC_POINTER_LOCK_FREE __GCC_ATOMIC_POINTER_LOCK_FREE
1115 #ifdef _LIBCPP_ATOMIC_ONLY_USE_BUILTINS
1117 template<typename _Tp>
1118 struct __cxx_atomic_lock_impl {
1120 _LIBCPP_INLINE_VISIBILITY
1121 __cxx_atomic_lock_impl() _NOEXCEPT
1122 : __a_value(), __a_lock(0) {}
1123 _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR explicit
1124 __cxx_atomic_lock_impl(_Tp value) _NOEXCEPT
1125 : __a_value(value), __a_lock(0) {}
1128 mutable __cxx_atomic_base_impl<_LIBCPP_ATOMIC_FLAG_TYPE> __a_lock;
1130 _LIBCPP_INLINE_VISIBILITY void __lock() const volatile {
1131 while(1 == __cxx_atomic_exchange(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(true), memory_order_acquire))
1134 _LIBCPP_INLINE_VISIBILITY void __lock() const {
1135 while(1 == __cxx_atomic_exchange(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(true), memory_order_acquire))
1138 _LIBCPP_INLINE_VISIBILITY void __unlock() const volatile {
1139 __cxx_atomic_store(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(false), memory_order_release);
1141 _LIBCPP_INLINE_VISIBILITY void __unlock() const {
1142 __cxx_atomic_store(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(false), memory_order_release);
1144 _LIBCPP_INLINE_VISIBILITY _Tp __read() const volatile {
1147 __cxx_atomic_assign_volatile(__old, __a_value);
1151 _LIBCPP_INLINE_VISIBILITY _Tp __read() const {
1153 _Tp __old = __a_value;
1159 template <typename _Tp>
1160 _LIBCPP_INLINE_VISIBILITY
1161 void __cxx_atomic_init(volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __val) {
1162 __cxx_atomic_assign_volatile(__a->__a_value, __val);
1164 template <typename _Tp>
1165 _LIBCPP_INLINE_VISIBILITY
1166 void __cxx_atomic_init(__cxx_atomic_lock_impl<_Tp>* __a, _Tp __val) {
1167 __a->__a_value = __val;
1170 template <typename _Tp>
1171 _LIBCPP_INLINE_VISIBILITY
1172 void __cxx_atomic_store(volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __val, memory_order) {
1174 __cxx_atomic_assign_volatile(__a->__a_value, __val);
1177 template <typename _Tp>
1178 _LIBCPP_INLINE_VISIBILITY
1179 void __cxx_atomic_store(__cxx_atomic_lock_impl<_Tp>* __a, _Tp __val, memory_order) {
1181 __a->__a_value = __val;
1185 template <typename _Tp>
1186 _LIBCPP_INLINE_VISIBILITY
1187 _Tp __cxx_atomic_load(const volatile __cxx_atomic_lock_impl<_Tp>* __a, memory_order) {
1188 return __a->__read();
1190 template <typename _Tp>
1191 _LIBCPP_INLINE_VISIBILITY
1192 _Tp __cxx_atomic_load(const __cxx_atomic_lock_impl<_Tp>* __a, memory_order) {
1193 return __a->__read();
1196 template <typename _Tp>
1197 _LIBCPP_INLINE_VISIBILITY
1198 _Tp __cxx_atomic_exchange(volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __value, memory_order) {
1201 __cxx_atomic_assign_volatile(__old, __a->__a_value);
1202 __cxx_atomic_assign_volatile(__a->__a_value, __value);
1206 template <typename _Tp>
1207 _LIBCPP_INLINE_VISIBILITY
1208 _Tp __cxx_atomic_exchange(__cxx_atomic_lock_impl<_Tp>* __a, _Tp __value, memory_order) {
1210 _Tp __old = __a->__a_value;
1211 __a->__a_value = __value;
1216 template <typename _Tp>
1217 _LIBCPP_INLINE_VISIBILITY
1218 bool __cxx_atomic_compare_exchange_strong(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1219 _Tp* __expected, _Tp __value, memory_order, memory_order) {
1222 __cxx_atomic_assign_volatile(temp, __a->__a_value);
1223 bool __ret = temp == *__expected;
1225 __cxx_atomic_assign_volatile(__a->__a_value, __value);
1227 __cxx_atomic_assign_volatile(*__expected, __a->__a_value);
1231 template <typename _Tp>
1232 _LIBCPP_INLINE_VISIBILITY
1233 bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_lock_impl<_Tp>* __a,
1234 _Tp* __expected, _Tp __value, memory_order, memory_order) {
1236 bool __ret = __a->__a_value == *__expected;
1238 __a->__a_value = __value;
1240 *__expected = __a->__a_value;
1245 template <typename _Tp>
1246 _LIBCPP_INLINE_VISIBILITY
1247 bool __cxx_atomic_compare_exchange_weak(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1248 _Tp* __expected, _Tp __value, memory_order, memory_order) {
1251 __cxx_atomic_assign_volatile(temp, __a->__a_value);
1252 bool __ret = temp == *__expected;
1254 __cxx_atomic_assign_volatile(__a->__a_value, __value);
1256 __cxx_atomic_assign_volatile(*__expected, __a->__a_value);
1260 template <typename _Tp>
1261 _LIBCPP_INLINE_VISIBILITY
1262 bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_lock_impl<_Tp>* __a,
1263 _Tp* __expected, _Tp __value, memory_order, memory_order) {
1265 bool __ret = __a->__a_value == *__expected;
1267 __a->__a_value = __value;
1269 *__expected = __a->__a_value;
1274 template <typename _Tp, typename _Td>
1275 _LIBCPP_INLINE_VISIBILITY
1276 _Tp __cxx_atomic_fetch_add(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1277 _Td __delta, memory_order) {
1280 __cxx_atomic_assign_volatile(__old, __a->__a_value);
1281 __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old + __delta));
1285 template <typename _Tp, typename _Td>
1286 _LIBCPP_INLINE_VISIBILITY
1287 _Tp __cxx_atomic_fetch_add(__cxx_atomic_lock_impl<_Tp>* __a,
1288 _Td __delta, memory_order) {
1290 _Tp __old = __a->__a_value;
1291 __a->__a_value += __delta;
1296 template <typename _Tp, typename _Td>
1297 _LIBCPP_INLINE_VISIBILITY
1298 _Tp* __cxx_atomic_fetch_add(volatile __cxx_atomic_lock_impl<_Tp*>* __a,
1299 ptrdiff_t __delta, memory_order) {
1302 __cxx_atomic_assign_volatile(__old, __a->__a_value);
1303 __cxx_atomic_assign_volatile(__a->__a_value, __old + __delta);
1307 template <typename _Tp, typename _Td>
1308 _LIBCPP_INLINE_VISIBILITY
1309 _Tp* __cxx_atomic_fetch_add(__cxx_atomic_lock_impl<_Tp*>* __a,
1310 ptrdiff_t __delta, memory_order) {
1312 _Tp* __old = __a->__a_value;
1313 __a->__a_value += __delta;
1318 template <typename _Tp, typename _Td>
1319 _LIBCPP_INLINE_VISIBILITY
1320 _Tp __cxx_atomic_fetch_sub(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1321 _Td __delta, memory_order) {
1324 __cxx_atomic_assign_volatile(__old, __a->__a_value);
1325 __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old - __delta));
1329 template <typename _Tp, typename _Td>
1330 _LIBCPP_INLINE_VISIBILITY
1331 _Tp __cxx_atomic_fetch_sub(__cxx_atomic_lock_impl<_Tp>* __a,
1332 _Td __delta, memory_order) {
1334 _Tp __old = __a->__a_value;
1335 __a->__a_value -= __delta;
1340 template <typename _Tp>
1341 _LIBCPP_INLINE_VISIBILITY
1342 _Tp __cxx_atomic_fetch_and(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1343 _Tp __pattern, memory_order) {
1346 __cxx_atomic_assign_volatile(__old, __a->__a_value);
1347 __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old & __pattern));
1351 template <typename _Tp>
1352 _LIBCPP_INLINE_VISIBILITY
1353 _Tp __cxx_atomic_fetch_and(__cxx_atomic_lock_impl<_Tp>* __a,
1354 _Tp __pattern, memory_order) {
1356 _Tp __old = __a->__a_value;
1357 __a->__a_value &= __pattern;
1362 template <typename _Tp>
1363 _LIBCPP_INLINE_VISIBILITY
1364 _Tp __cxx_atomic_fetch_or(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1365 _Tp __pattern, memory_order) {
1368 __cxx_atomic_assign_volatile(__old, __a->__a_value);
1369 __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old | __pattern));
1373 template <typename _Tp>
1374 _LIBCPP_INLINE_VISIBILITY
1375 _Tp __cxx_atomic_fetch_or(__cxx_atomic_lock_impl<_Tp>* __a,
1376 _Tp __pattern, memory_order) {
1378 _Tp __old = __a->__a_value;
1379 __a->__a_value |= __pattern;
1384 template <typename _Tp>
1385 _LIBCPP_INLINE_VISIBILITY
1386 _Tp __cxx_atomic_fetch_xor(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1387 _Tp __pattern, memory_order) {
1390 __cxx_atomic_assign_volatile(__old, __a->__a_value);
1391 __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old ^ __pattern));
1395 template <typename _Tp>
1396 _LIBCPP_INLINE_VISIBILITY
1397 _Tp __cxx_atomic_fetch_xor(__cxx_atomic_lock_impl<_Tp>* __a,
1398 _Tp __pattern, memory_order) {
1400 _Tp __old = __a->__a_value;
1401 __a->__a_value ^= __pattern;
1406 #ifdef __cpp_lib_atomic_is_always_lock_free
1408 template<typename _Tp> struct __cxx_is_always_lock_free {
1409 enum { __value = __atomic_always_lock_free(sizeof(_Tp), 0) }; };
1413 template<typename _Tp> struct __cxx_is_always_lock_free { enum { __value = false }; };
1414 // Implementations must match the C ATOMIC_*_LOCK_FREE macro values.
1415 template<> struct __cxx_is_always_lock_free<bool> { enum { __value = 2 == ATOMIC_BOOL_LOCK_FREE }; };
1416 template<> struct __cxx_is_always_lock_free<char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; };
1417 template<> struct __cxx_is_always_lock_free<signed char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; };
1418 template<> struct __cxx_is_always_lock_free<unsigned char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; };
1419 template<> struct __cxx_is_always_lock_free<char16_t> { enum { __value = 2 == ATOMIC_CHAR16_T_LOCK_FREE }; };
1420 template<> struct __cxx_is_always_lock_free<char32_t> { enum { __value = 2 == ATOMIC_CHAR32_T_LOCK_FREE }; };
1421 template<> struct __cxx_is_always_lock_free<wchar_t> { enum { __value = 2 == ATOMIC_WCHAR_T_LOCK_FREE }; };
1422 template<> struct __cxx_is_always_lock_free<short> { enum { __value = 2 == ATOMIC_SHORT_LOCK_FREE }; };
1423 template<> struct __cxx_is_always_lock_free<unsigned short> { enum { __value = 2 == ATOMIC_SHORT_LOCK_FREE }; };
1424 template<> struct __cxx_is_always_lock_free<int> { enum { __value = 2 == ATOMIC_INT_LOCK_FREE }; };
1425 template<> struct __cxx_is_always_lock_free<unsigned int> { enum { __value = 2 == ATOMIC_INT_LOCK_FREE }; };
1426 template<> struct __cxx_is_always_lock_free<long> { enum { __value = 2 == ATOMIC_LONG_LOCK_FREE }; };
1427 template<> struct __cxx_is_always_lock_free<unsigned long> { enum { __value = 2 == ATOMIC_LONG_LOCK_FREE }; };
1428 template<> struct __cxx_is_always_lock_free<long long> { enum { __value = 2 == ATOMIC_LLONG_LOCK_FREE }; };
1429 template<> struct __cxx_is_always_lock_free<unsigned long long> { enum { __value = 2 == ATOMIC_LLONG_LOCK_FREE }; };
1430 template<typename _Tp> struct __cxx_is_always_lock_free<_Tp*> { enum { __value = 2 == ATOMIC_POINTER_LOCK_FREE }; };
1431 template<> struct __cxx_is_always_lock_free<std::nullptr_t> { enum { __value = 2 == ATOMIC_POINTER_LOCK_FREE }; };
1433 #endif //__cpp_lib_atomic_is_always_lock_free
1435 template <typename _Tp,
1436 typename _Base = typename conditional<__cxx_is_always_lock_free<_Tp>::__value,
1437 __cxx_atomic_base_impl<_Tp>,
1438 __cxx_atomic_lock_impl<_Tp> >::type>
1440 template <typename _Tp,
1441 typename _Base = __cxx_atomic_base_impl<_Tp> >
1442 #endif //_LIBCPP_ATOMIC_ONLY_USE_BUILTINS
1443 struct __cxx_atomic_impl : public _Base {
1445 #if _GNUC_VER >= 501
1446 static_assert(is_trivially_copyable<_Tp>::value,
1447 "std::atomic<Tp> requires that 'Tp' be a trivially copyable type");
1450 _LIBCPP_INLINE_VISIBILITY __cxx_atomic_impl() _NOEXCEPT _LIBCPP_DEFAULT
1451 _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR explicit __cxx_atomic_impl(_Tp value) _NOEXCEPT
1455 // general atomic<T>
1457 template <class _Tp, bool = is_integral<_Tp>::value && !is_same<_Tp, bool>::value>
1458 struct __atomic_base // false
1460 mutable __cxx_atomic_impl<_Tp> __a_;
1462 #if defined(__cpp_lib_atomic_is_always_lock_free)
1463 static _LIBCPP_CONSTEXPR bool is_always_lock_free = __atomic_always_lock_free(sizeof(__a_), 0);
1466 _LIBCPP_INLINE_VISIBILITY
1467 bool is_lock_free() const volatile _NOEXCEPT
1468 {return __cxx_atomic_is_lock_free(sizeof(_Tp));}
1469 _LIBCPP_INLINE_VISIBILITY
1470 bool is_lock_free() const _NOEXCEPT
1471 {return static_cast<__atomic_base const volatile*>(this)->is_lock_free();}
1472 _LIBCPP_INLINE_VISIBILITY
1473 void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1474 _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1475 {__cxx_atomic_store(&__a_, __d, __m);}
1476 _LIBCPP_INLINE_VISIBILITY
1477 void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1478 _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1479 {__cxx_atomic_store(&__a_, __d, __m);}
1480 _LIBCPP_INLINE_VISIBILITY
1481 _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
1482 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1483 {return __cxx_atomic_load(&__a_, __m);}
1484 _LIBCPP_INLINE_VISIBILITY
1485 _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
1486 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1487 {return __cxx_atomic_load(&__a_, __m);}
1488 _LIBCPP_INLINE_VISIBILITY
1489 operator _Tp() const volatile _NOEXCEPT {return load();}
1490 _LIBCPP_INLINE_VISIBILITY
1491 operator _Tp() const _NOEXCEPT {return load();}
1492 _LIBCPP_INLINE_VISIBILITY
1493 _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1494 {return __cxx_atomic_exchange(&__a_, __d, __m);}
1495 _LIBCPP_INLINE_VISIBILITY
1496 _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1497 {return __cxx_atomic_exchange(&__a_, __d, __m);}
1498 _LIBCPP_INLINE_VISIBILITY
1499 bool compare_exchange_weak(_Tp& __e, _Tp __d,
1500 memory_order __s, memory_order __f) volatile _NOEXCEPT
1501 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1502 {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
1503 _LIBCPP_INLINE_VISIBILITY
1504 bool compare_exchange_weak(_Tp& __e, _Tp __d,
1505 memory_order __s, memory_order __f) _NOEXCEPT
1506 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1507 {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
1508 _LIBCPP_INLINE_VISIBILITY
1509 bool compare_exchange_strong(_Tp& __e, _Tp __d,
1510 memory_order __s, memory_order __f) volatile _NOEXCEPT
1511 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1512 {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
1513 _LIBCPP_INLINE_VISIBILITY
1514 bool compare_exchange_strong(_Tp& __e, _Tp __d,
1515 memory_order __s, memory_order __f) _NOEXCEPT
1516 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1517 {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
1518 _LIBCPP_INLINE_VISIBILITY
1519 bool compare_exchange_weak(_Tp& __e, _Tp __d,
1520 memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1521 {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
1522 _LIBCPP_INLINE_VISIBILITY
1523 bool compare_exchange_weak(_Tp& __e, _Tp __d,
1524 memory_order __m = memory_order_seq_cst) _NOEXCEPT
1525 {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
1526 _LIBCPP_INLINE_VISIBILITY
1527 bool compare_exchange_strong(_Tp& __e, _Tp __d,
1528 memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1529 {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
1530 _LIBCPP_INLINE_VISIBILITY
1531 bool compare_exchange_strong(_Tp& __e, _Tp __d,
1532 memory_order __m = memory_order_seq_cst) _NOEXCEPT
1533 {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
1535 _LIBCPP_INLINE_VISIBILITY
1536 __atomic_base() _NOEXCEPT _LIBCPP_DEFAULT
1538 _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR
1539 __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {}
1541 #ifndef _LIBCPP_CXX03_LANG
1542 __atomic_base(const __atomic_base&) = delete;
1543 __atomic_base& operator=(const __atomic_base&) = delete;
1544 __atomic_base& operator=(const __atomic_base&) volatile = delete;
1547 __atomic_base(const __atomic_base&);
1548 __atomic_base& operator=(const __atomic_base&);
1549 __atomic_base& operator=(const __atomic_base&) volatile;
1553 #if defined(__cpp_lib_atomic_is_always_lock_free)
1554 template <class _Tp, bool __b>
1555 _LIBCPP_CONSTEXPR bool __atomic_base<_Tp, __b>::is_always_lock_free;
1560 template <class _Tp>
1561 struct __atomic_base<_Tp, true>
1562 : public __atomic_base<_Tp, false>
1564 typedef __atomic_base<_Tp, false> __base;
1565 _LIBCPP_INLINE_VISIBILITY
1566 __atomic_base() _NOEXCEPT _LIBCPP_DEFAULT
1567 _LIBCPP_INLINE_VISIBILITY
1568 _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {}
1570 _LIBCPP_INLINE_VISIBILITY
1571 _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1572 {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
1573 _LIBCPP_INLINE_VISIBILITY
1574 _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1575 {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
1576 _LIBCPP_INLINE_VISIBILITY
1577 _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1578 {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
1579 _LIBCPP_INLINE_VISIBILITY
1580 _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1581 {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
1582 _LIBCPP_INLINE_VISIBILITY
1583 _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1584 {return __cxx_atomic_fetch_and(&this->__a_, __op, __m);}
1585 _LIBCPP_INLINE_VISIBILITY
1586 _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1587 {return __cxx_atomic_fetch_and(&this->__a_, __op, __m);}
1588 _LIBCPP_INLINE_VISIBILITY
1589 _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1590 {return __cxx_atomic_fetch_or(&this->__a_, __op, __m);}
1591 _LIBCPP_INLINE_VISIBILITY
1592 _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1593 {return __cxx_atomic_fetch_or(&this->__a_, __op, __m);}
1594 _LIBCPP_INLINE_VISIBILITY
1595 _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1596 {return __cxx_atomic_fetch_xor(&this->__a_, __op, __m);}
1597 _LIBCPP_INLINE_VISIBILITY
1598 _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1599 {return __cxx_atomic_fetch_xor(&this->__a_, __op, __m);}
1601 _LIBCPP_INLINE_VISIBILITY
1602 _Tp operator++(int) volatile _NOEXCEPT {return fetch_add(_Tp(1));}
1603 _LIBCPP_INLINE_VISIBILITY
1604 _Tp operator++(int) _NOEXCEPT {return fetch_add(_Tp(1));}
1605 _LIBCPP_INLINE_VISIBILITY
1606 _Tp operator--(int) volatile _NOEXCEPT {return fetch_sub(_Tp(1));}
1607 _LIBCPP_INLINE_VISIBILITY
1608 _Tp operator--(int) _NOEXCEPT {return fetch_sub(_Tp(1));}
1609 _LIBCPP_INLINE_VISIBILITY
1610 _Tp operator++() volatile _NOEXCEPT {return fetch_add(_Tp(1)) + _Tp(1);}
1611 _LIBCPP_INLINE_VISIBILITY
1612 _Tp operator++() _NOEXCEPT {return fetch_add(_Tp(1)) + _Tp(1);}
1613 _LIBCPP_INLINE_VISIBILITY
1614 _Tp operator--() volatile _NOEXCEPT {return fetch_sub(_Tp(1)) - _Tp(1);}
1615 _LIBCPP_INLINE_VISIBILITY
1616 _Tp operator--() _NOEXCEPT {return fetch_sub(_Tp(1)) - _Tp(1);}
1617 _LIBCPP_INLINE_VISIBILITY
1618 _Tp operator+=(_Tp __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
1619 _LIBCPP_INLINE_VISIBILITY
1620 _Tp operator+=(_Tp __op) _NOEXCEPT {return fetch_add(__op) + __op;}
1621 _LIBCPP_INLINE_VISIBILITY
1622 _Tp operator-=(_Tp __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
1623 _LIBCPP_INLINE_VISIBILITY
1624 _Tp operator-=(_Tp __op) _NOEXCEPT {return fetch_sub(__op) - __op;}
1625 _LIBCPP_INLINE_VISIBILITY
1626 _Tp operator&=(_Tp __op) volatile _NOEXCEPT {return fetch_and(__op) & __op;}
1627 _LIBCPP_INLINE_VISIBILITY
1628 _Tp operator&=(_Tp __op) _NOEXCEPT {return fetch_and(__op) & __op;}
1629 _LIBCPP_INLINE_VISIBILITY
1630 _Tp operator|=(_Tp __op) volatile _NOEXCEPT {return fetch_or(__op) | __op;}
1631 _LIBCPP_INLINE_VISIBILITY
1632 _Tp operator|=(_Tp __op) _NOEXCEPT {return fetch_or(__op) | __op;}
1633 _LIBCPP_INLINE_VISIBILITY
1634 _Tp operator^=(_Tp __op) volatile _NOEXCEPT {return fetch_xor(__op) ^ __op;}
1635 _LIBCPP_INLINE_VISIBILITY
1636 _Tp operator^=(_Tp __op) _NOEXCEPT {return fetch_xor(__op) ^ __op;}
1641 template <class _Tp>
1643 : public __atomic_base<_Tp>
1645 typedef __atomic_base<_Tp> __base;
1646 _LIBCPP_INLINE_VISIBILITY
1647 atomic() _NOEXCEPT _LIBCPP_DEFAULT
1648 _LIBCPP_INLINE_VISIBILITY
1649 _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {}
1651 _LIBCPP_INLINE_VISIBILITY
1652 _Tp operator=(_Tp __d) volatile _NOEXCEPT
1653 {__base::store(__d); return __d;}
1654 _LIBCPP_INLINE_VISIBILITY
1655 _Tp operator=(_Tp __d) _NOEXCEPT
1656 {__base::store(__d); return __d;}
1661 template <class _Tp>
1663 : public __atomic_base<_Tp*>
1665 typedef __atomic_base<_Tp*> __base;
1666 _LIBCPP_INLINE_VISIBILITY
1667 atomic() _NOEXCEPT _LIBCPP_DEFAULT
1668 _LIBCPP_INLINE_VISIBILITY
1669 _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {}
1671 _LIBCPP_INLINE_VISIBILITY
1672 _Tp* operator=(_Tp* __d) volatile _NOEXCEPT
1673 {__base::store(__d); return __d;}
1674 _LIBCPP_INLINE_VISIBILITY
1675 _Tp* operator=(_Tp* __d) _NOEXCEPT
1676 {__base::store(__d); return __d;}
1678 _LIBCPP_INLINE_VISIBILITY
1679 _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1681 {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
1682 _LIBCPP_INLINE_VISIBILITY
1683 _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1684 {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
1685 _LIBCPP_INLINE_VISIBILITY
1686 _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1688 {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
1689 _LIBCPP_INLINE_VISIBILITY
1690 _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1691 {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
1693 _LIBCPP_INLINE_VISIBILITY
1694 _Tp* operator++(int) volatile _NOEXCEPT {return fetch_add(1);}
1695 _LIBCPP_INLINE_VISIBILITY
1696 _Tp* operator++(int) _NOEXCEPT {return fetch_add(1);}
1697 _LIBCPP_INLINE_VISIBILITY
1698 _Tp* operator--(int) volatile _NOEXCEPT {return fetch_sub(1);}
1699 _LIBCPP_INLINE_VISIBILITY
1700 _Tp* operator--(int) _NOEXCEPT {return fetch_sub(1);}
1701 _LIBCPP_INLINE_VISIBILITY
1702 _Tp* operator++() volatile _NOEXCEPT {return fetch_add(1) + 1;}
1703 _LIBCPP_INLINE_VISIBILITY
1704 _Tp* operator++() _NOEXCEPT {return fetch_add(1) + 1;}
1705 _LIBCPP_INLINE_VISIBILITY
1706 _Tp* operator--() volatile _NOEXCEPT {return fetch_sub(1) - 1;}
1707 _LIBCPP_INLINE_VISIBILITY
1708 _Tp* operator--() _NOEXCEPT {return fetch_sub(1) - 1;}
1709 _LIBCPP_INLINE_VISIBILITY
1710 _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
1711 _LIBCPP_INLINE_VISIBILITY
1712 _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT {return fetch_add(__op) + __op;}
1713 _LIBCPP_INLINE_VISIBILITY
1714 _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
1715 _LIBCPP_INLINE_VISIBILITY
1716 _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT {return fetch_sub(__op) - __op;}
1719 // atomic_is_lock_free
1721 template <class _Tp>
1722 _LIBCPP_INLINE_VISIBILITY
1724 atomic_is_lock_free(const volatile atomic<_Tp>* __o) _NOEXCEPT
1726 return __o->is_lock_free();
1729 template <class _Tp>
1730 _LIBCPP_INLINE_VISIBILITY
1732 atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT
1734 return __o->is_lock_free();
1739 template <class _Tp>
1740 _LIBCPP_INLINE_VISIBILITY
1742 atomic_init(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1744 __cxx_atomic_init(&__o->__a_, __d);
1747 template <class _Tp>
1748 _LIBCPP_INLINE_VISIBILITY
1750 atomic_init(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1752 __cxx_atomic_init(&__o->__a_, __d);
1757 template <class _Tp>
1758 _LIBCPP_INLINE_VISIBILITY
1760 atomic_store(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1765 template <class _Tp>
1766 _LIBCPP_INLINE_VISIBILITY
1768 atomic_store(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1773 // atomic_store_explicit
1775 template <class _Tp>
1776 _LIBCPP_INLINE_VISIBILITY
1778 atomic_store_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1779 _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1781 __o->store(__d, __m);
1784 template <class _Tp>
1785 _LIBCPP_INLINE_VISIBILITY
1787 atomic_store_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1788 _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1790 __o->store(__d, __m);
1795 template <class _Tp>
1796 _LIBCPP_INLINE_VISIBILITY
1798 atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT
1803 template <class _Tp>
1804 _LIBCPP_INLINE_VISIBILITY
1806 atomic_load(const atomic<_Tp>* __o) _NOEXCEPT
1811 // atomic_load_explicit
1813 template <class _Tp>
1814 _LIBCPP_INLINE_VISIBILITY
1816 atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1817 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1819 return __o->load(__m);
1822 template <class _Tp>
1823 _LIBCPP_INLINE_VISIBILITY
1825 atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1826 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1828 return __o->load(__m);
1833 template <class _Tp>
1834 _LIBCPP_INLINE_VISIBILITY
1836 atomic_exchange(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1838 return __o->exchange(__d);
1841 template <class _Tp>
1842 _LIBCPP_INLINE_VISIBILITY
1844 atomic_exchange(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1846 return __o->exchange(__d);
1849 // atomic_exchange_explicit
1851 template <class _Tp>
1852 _LIBCPP_INLINE_VISIBILITY
1854 atomic_exchange_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1856 return __o->exchange(__d, __m);
1859 template <class _Tp>
1860 _LIBCPP_INLINE_VISIBILITY
1862 atomic_exchange_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1864 return __o->exchange(__d, __m);
1867 // atomic_compare_exchange_weak
1869 template <class _Tp>
1870 _LIBCPP_INLINE_VISIBILITY
1872 atomic_compare_exchange_weak(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1874 return __o->compare_exchange_weak(*__e, __d);
1877 template <class _Tp>
1878 _LIBCPP_INLINE_VISIBILITY
1880 atomic_compare_exchange_weak(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1882 return __o->compare_exchange_weak(*__e, __d);
1885 // atomic_compare_exchange_strong
1887 template <class _Tp>
1888 _LIBCPP_INLINE_VISIBILITY
1890 atomic_compare_exchange_strong(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1892 return __o->compare_exchange_strong(*__e, __d);
1895 template <class _Tp>
1896 _LIBCPP_INLINE_VISIBILITY
1898 atomic_compare_exchange_strong(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1900 return __o->compare_exchange_strong(*__e, __d);
1903 // atomic_compare_exchange_weak_explicit
1905 template <class _Tp>
1906 _LIBCPP_INLINE_VISIBILITY
1908 atomic_compare_exchange_weak_explicit(volatile atomic<_Tp>* __o, _Tp* __e,
1910 memory_order __s, memory_order __f) _NOEXCEPT
1911 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1913 return __o->compare_exchange_weak(*__e, __d, __s, __f);
1916 template <class _Tp>
1917 _LIBCPP_INLINE_VISIBILITY
1919 atomic_compare_exchange_weak_explicit(atomic<_Tp>* __o, _Tp* __e, _Tp __d,
1920 memory_order __s, memory_order __f) _NOEXCEPT
1921 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1923 return __o->compare_exchange_weak(*__e, __d, __s, __f);
1926 // atomic_compare_exchange_strong_explicit
1928 template <class _Tp>
1929 _LIBCPP_INLINE_VISIBILITY
1931 atomic_compare_exchange_strong_explicit(volatile atomic<_Tp>* __o,
1933 memory_order __s, memory_order __f) _NOEXCEPT
1934 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1936 return __o->compare_exchange_strong(*__e, __d, __s, __f);
1939 template <class _Tp>
1940 _LIBCPP_INLINE_VISIBILITY
1942 atomic_compare_exchange_strong_explicit(atomic<_Tp>* __o, _Tp* __e,
1944 memory_order __s, memory_order __f) _NOEXCEPT
1945 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1947 return __o->compare_exchange_strong(*__e, __d, __s, __f);
1952 template <class _Tp>
1953 _LIBCPP_INLINE_VISIBILITY
1956 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1959 atomic_fetch_add(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1961 return __o->fetch_add(__op);
1964 template <class _Tp>
1965 _LIBCPP_INLINE_VISIBILITY
1968 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1971 atomic_fetch_add(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1973 return __o->fetch_add(__op);
1976 template <class _Tp>
1977 _LIBCPP_INLINE_VISIBILITY
1979 atomic_fetch_add(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1981 return __o->fetch_add(__op);
1984 template <class _Tp>
1985 _LIBCPP_INLINE_VISIBILITY
1987 atomic_fetch_add(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1989 return __o->fetch_add(__op);
1992 // atomic_fetch_add_explicit
1994 template <class _Tp>
1995 _LIBCPP_INLINE_VISIBILITY
1998 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2001 atomic_fetch_add_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2003 return __o->fetch_add(__op, __m);
2006 template <class _Tp>
2007 _LIBCPP_INLINE_VISIBILITY
2010 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2013 atomic_fetch_add_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2015 return __o->fetch_add(__op, __m);
2018 template <class _Tp>
2019 _LIBCPP_INLINE_VISIBILITY
2021 atomic_fetch_add_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
2022 memory_order __m) _NOEXCEPT
2024 return __o->fetch_add(__op, __m);
2027 template <class _Tp>
2028 _LIBCPP_INLINE_VISIBILITY
2030 atomic_fetch_add_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
2032 return __o->fetch_add(__op, __m);
2037 template <class _Tp>
2038 _LIBCPP_INLINE_VISIBILITY
2041 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2044 atomic_fetch_sub(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
2046 return __o->fetch_sub(__op);
2049 template <class _Tp>
2050 _LIBCPP_INLINE_VISIBILITY
2053 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2056 atomic_fetch_sub(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
2058 return __o->fetch_sub(__op);
2061 template <class _Tp>
2062 _LIBCPP_INLINE_VISIBILITY
2064 atomic_fetch_sub(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
2066 return __o->fetch_sub(__op);
2069 template <class _Tp>
2070 _LIBCPP_INLINE_VISIBILITY
2072 atomic_fetch_sub(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
2074 return __o->fetch_sub(__op);
2077 // atomic_fetch_sub_explicit
2079 template <class _Tp>
2080 _LIBCPP_INLINE_VISIBILITY
2083 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2086 atomic_fetch_sub_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2088 return __o->fetch_sub(__op, __m);
2091 template <class _Tp>
2092 _LIBCPP_INLINE_VISIBILITY
2095 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2098 atomic_fetch_sub_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2100 return __o->fetch_sub(__op, __m);
2103 template <class _Tp>
2104 _LIBCPP_INLINE_VISIBILITY
2106 atomic_fetch_sub_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
2107 memory_order __m) _NOEXCEPT
2109 return __o->fetch_sub(__op, __m);
2112 template <class _Tp>
2113 _LIBCPP_INLINE_VISIBILITY
2115 atomic_fetch_sub_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
2117 return __o->fetch_sub(__op, __m);
2122 template <class _Tp>
2123 _LIBCPP_INLINE_VISIBILITY
2126 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2129 atomic_fetch_and(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
2131 return __o->fetch_and(__op);
2134 template <class _Tp>
2135 _LIBCPP_INLINE_VISIBILITY
2138 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2141 atomic_fetch_and(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
2143 return __o->fetch_and(__op);
2146 // atomic_fetch_and_explicit
2148 template <class _Tp>
2149 _LIBCPP_INLINE_VISIBILITY
2152 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2155 atomic_fetch_and_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2157 return __o->fetch_and(__op, __m);
2160 template <class _Tp>
2161 _LIBCPP_INLINE_VISIBILITY
2164 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2167 atomic_fetch_and_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2169 return __o->fetch_and(__op, __m);
2174 template <class _Tp>
2175 _LIBCPP_INLINE_VISIBILITY
2178 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2181 atomic_fetch_or(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
2183 return __o->fetch_or(__op);
2186 template <class _Tp>
2187 _LIBCPP_INLINE_VISIBILITY
2190 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2193 atomic_fetch_or(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
2195 return __o->fetch_or(__op);
2198 // atomic_fetch_or_explicit
2200 template <class _Tp>
2201 _LIBCPP_INLINE_VISIBILITY
2204 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2207 atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2209 return __o->fetch_or(__op, __m);
2212 template <class _Tp>
2213 _LIBCPP_INLINE_VISIBILITY
2216 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2219 atomic_fetch_or_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2221 return __o->fetch_or(__op, __m);
2226 template <class _Tp>
2227 _LIBCPP_INLINE_VISIBILITY
2230 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2233 atomic_fetch_xor(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
2235 return __o->fetch_xor(__op);
2238 template <class _Tp>
2239 _LIBCPP_INLINE_VISIBILITY
2242 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2245 atomic_fetch_xor(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
2247 return __o->fetch_xor(__op);
2250 // atomic_fetch_xor_explicit
2252 template <class _Tp>
2253 _LIBCPP_INLINE_VISIBILITY
2256 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2259 atomic_fetch_xor_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2261 return __o->fetch_xor(__op, __m);
2264 template <class _Tp>
2265 _LIBCPP_INLINE_VISIBILITY
2268 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2271 atomic_fetch_xor_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2273 return __o->fetch_xor(__op, __m);
2276 // flag type and operations
2278 typedef struct atomic_flag
2280 __cxx_atomic_impl<_LIBCPP_ATOMIC_FLAG_TYPE> __a_;
2282 _LIBCPP_INLINE_VISIBILITY
2283 bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
2284 {return __cxx_atomic_exchange(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(true), __m);}
2285 _LIBCPP_INLINE_VISIBILITY
2286 bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT
2287 {return __cxx_atomic_exchange(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(true), __m);}
2288 _LIBCPP_INLINE_VISIBILITY
2289 void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
2290 {__cxx_atomic_store(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(false), __m);}
2291 _LIBCPP_INLINE_VISIBILITY
2292 void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT
2293 {__cxx_atomic_store(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(false), __m);}
2295 _LIBCPP_INLINE_VISIBILITY
2296 atomic_flag() _NOEXCEPT _LIBCPP_DEFAULT
2298 _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR
2299 atomic_flag(bool __b) _NOEXCEPT : __a_(__b) {} // EXTENSION
2301 #ifndef _LIBCPP_CXX03_LANG
2302 atomic_flag(const atomic_flag&) = delete;
2303 atomic_flag& operator=(const atomic_flag&) = delete;
2304 atomic_flag& operator=(const atomic_flag&) volatile = delete;
2307 atomic_flag(const atomic_flag&);
2308 atomic_flag& operator=(const atomic_flag&);
2309 atomic_flag& operator=(const atomic_flag&) volatile;
2313 inline _LIBCPP_INLINE_VISIBILITY
2315 atomic_flag_test_and_set(volatile atomic_flag* __o) _NOEXCEPT
2317 return __o->test_and_set();
2320 inline _LIBCPP_INLINE_VISIBILITY
2322 atomic_flag_test_and_set(atomic_flag* __o) _NOEXCEPT
2324 return __o->test_and_set();
2327 inline _LIBCPP_INLINE_VISIBILITY
2329 atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
2331 return __o->test_and_set(__m);
2334 inline _LIBCPP_INLINE_VISIBILITY
2336 atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
2338 return __o->test_and_set(__m);
2341 inline _LIBCPP_INLINE_VISIBILITY
2343 atomic_flag_clear(volatile atomic_flag* __o) _NOEXCEPT
2348 inline _LIBCPP_INLINE_VISIBILITY
2350 atomic_flag_clear(atomic_flag* __o) _NOEXCEPT
2355 inline _LIBCPP_INLINE_VISIBILITY
2357 atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
2362 inline _LIBCPP_INLINE_VISIBILITY
2364 atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
2371 inline _LIBCPP_INLINE_VISIBILITY
2373 atomic_thread_fence(memory_order __m) _NOEXCEPT
2375 __cxx_atomic_thread_fence(__m);
2378 inline _LIBCPP_INLINE_VISIBILITY
2380 atomic_signal_fence(memory_order __m) _NOEXCEPT
2382 __cxx_atomic_signal_fence(__m);
2385 // Atomics for standard typedef types
2387 typedef atomic<bool> atomic_bool;
2388 typedef atomic<char> atomic_char;
2389 typedef atomic<signed char> atomic_schar;
2390 typedef atomic<unsigned char> atomic_uchar;
2391 typedef atomic<short> atomic_short;
2392 typedef atomic<unsigned short> atomic_ushort;
2393 typedef atomic<int> atomic_int;
2394 typedef atomic<unsigned int> atomic_uint;
2395 typedef atomic<long> atomic_long;
2396 typedef atomic<unsigned long> atomic_ulong;
2397 typedef atomic<long long> atomic_llong;
2398 typedef atomic<unsigned long long> atomic_ullong;
2399 typedef atomic<char16_t> atomic_char16_t;
2400 typedef atomic<char32_t> atomic_char32_t;
2401 typedef atomic<wchar_t> atomic_wchar_t;
2403 typedef atomic<int_least8_t> atomic_int_least8_t;
2404 typedef atomic<uint_least8_t> atomic_uint_least8_t;
2405 typedef atomic<int_least16_t> atomic_int_least16_t;
2406 typedef atomic<uint_least16_t> atomic_uint_least16_t;
2407 typedef atomic<int_least32_t> atomic_int_least32_t;
2408 typedef atomic<uint_least32_t> atomic_uint_least32_t;
2409 typedef atomic<int_least64_t> atomic_int_least64_t;
2410 typedef atomic<uint_least64_t> atomic_uint_least64_t;
2412 typedef atomic<int_fast8_t> atomic_int_fast8_t;
2413 typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
2414 typedef atomic<int_fast16_t> atomic_int_fast16_t;
2415 typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
2416 typedef atomic<int_fast32_t> atomic_int_fast32_t;
2417 typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
2418 typedef atomic<int_fast64_t> atomic_int_fast64_t;
2419 typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
2421 typedef atomic< int8_t> atomic_int8_t;
2422 typedef atomic<uint8_t> atomic_uint8_t;
2423 typedef atomic< int16_t> atomic_int16_t;
2424 typedef atomic<uint16_t> atomic_uint16_t;
2425 typedef atomic< int32_t> atomic_int32_t;
2426 typedef atomic<uint32_t> atomic_uint32_t;
2427 typedef atomic< int64_t> atomic_int64_t;
2428 typedef atomic<uint64_t> atomic_uint64_t;
2430 typedef atomic<intptr_t> atomic_intptr_t;
2431 typedef atomic<uintptr_t> atomic_uintptr_t;
2432 typedef atomic<size_t> atomic_size_t;
2433 typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
2434 typedef atomic<intmax_t> atomic_intmax_t;
2435 typedef atomic<uintmax_t> atomic_uintmax_t;
2437 #define ATOMIC_FLAG_INIT {false}
2438 #define ATOMIC_VAR_INIT(__v) {__v}
2440 _LIBCPP_END_NAMESPACE_STD
2442 #endif // _LIBCPP_ATOMIC