2 //===--------------------------- atomic -----------------------------------===//
4 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
5 // See https://llvm.org/LICENSE.txt for license information.
6 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
8 //===----------------------------------------------------------------------===//
10 #ifndef _LIBCPP_ATOMIC
11 #define _LIBCPP_ATOMIC
21 #define __cpp_lib_atomic_is_always_lock_free // as specified by SG10
23 // order and consistency
25 enum memory_order: unspecified // enum class in C++20
28 consume, // load-consume
29 acquire, // load-acquire
30 release, // store-release
31 acq_rel, // store-release load-acquire
32 seq_cst // store-release load-acquire
35 inline constexpr auto memory_order_relaxed = memory_order::relaxed;
36 inline constexpr auto memory_order_consume = memory_order::consume;
37 inline constexpr auto memory_order_acquire = memory_order::acquire;
38 inline constexpr auto memory_order_release = memory_order::release;
39 inline constexpr auto memory_order_acq_rel = memory_order::acq_rel;
40 inline constexpr auto memory_order_seq_cst = memory_order::seq_cst;
42 template <class T> T kill_dependency(T y) noexcept;
46 #define ATOMIC_BOOL_LOCK_FREE unspecified
47 #define ATOMIC_CHAR_LOCK_FREE unspecified
48 #define ATOMIC_CHAR16_T_LOCK_FREE unspecified
49 #define ATOMIC_CHAR32_T_LOCK_FREE unspecified
50 #define ATOMIC_WCHAR_T_LOCK_FREE unspecified
51 #define ATOMIC_SHORT_LOCK_FREE unspecified
52 #define ATOMIC_INT_LOCK_FREE unspecified
53 #define ATOMIC_LONG_LOCK_FREE unspecified
54 #define ATOMIC_LLONG_LOCK_FREE unspecified
55 #define ATOMIC_POINTER_LOCK_FREE unspecified
62 static constexpr bool is_always_lock_free;
63 bool is_lock_free() const volatile noexcept;
64 bool is_lock_free() const noexcept;
66 atomic() noexcept = default;
67 constexpr atomic(T desr) noexcept;
68 atomic(const atomic&) = delete;
69 atomic& operator=(const atomic&) = delete;
70 atomic& operator=(const atomic&) volatile = delete;
72 T load(memory_order m = memory_order_seq_cst) const volatile noexcept;
73 T load(memory_order m = memory_order_seq_cst) const noexcept;
74 operator T() const volatile noexcept;
75 operator T() const noexcept;
76 void store(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
77 void store(T desr, memory_order m = memory_order_seq_cst) noexcept;
78 T operator=(T) volatile noexcept;
79 T operator=(T) noexcept;
81 T exchange(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
82 T exchange(T desr, memory_order m = memory_order_seq_cst) noexcept;
83 bool compare_exchange_weak(T& expc, T desr,
84 memory_order s, memory_order f) volatile noexcept;
85 bool compare_exchange_weak(T& expc, T desr, memory_order s, memory_order f) noexcept;
86 bool compare_exchange_strong(T& expc, T desr,
87 memory_order s, memory_order f) volatile noexcept;
88 bool compare_exchange_strong(T& expc, T desr,
89 memory_order s, memory_order f) noexcept;
90 bool compare_exchange_weak(T& expc, T desr,
91 memory_order m = memory_order_seq_cst) volatile noexcept;
92 bool compare_exchange_weak(T& expc, T desr,
93 memory_order m = memory_order_seq_cst) noexcept;
94 bool compare_exchange_strong(T& expc, T desr,
95 memory_order m = memory_order_seq_cst) volatile noexcept;
96 bool compare_exchange_strong(T& expc, T desr,
97 memory_order m = memory_order_seq_cst) noexcept;
99 void wait(T, memory_order = memory_order::seq_cst) const volatile noexcept;
100 void wait(T, memory_order = memory_order::seq_cst) const noexcept;
101 void notify_one() volatile noexcept;
102 void notify_one() noexcept;
103 void notify_all() volatile noexcept;
104 void notify_all() noexcept;
108 struct atomic<integral>
110 using value_type = integral;
112 static constexpr bool is_always_lock_free;
113 bool is_lock_free() const volatile noexcept;
114 bool is_lock_free() const noexcept;
116 atomic() noexcept = default;
117 constexpr atomic(integral desr) noexcept;
118 atomic(const atomic&) = delete;
119 atomic& operator=(const atomic&) = delete;
120 atomic& operator=(const atomic&) volatile = delete;
122 integral load(memory_order m = memory_order_seq_cst) const volatile noexcept;
123 integral load(memory_order m = memory_order_seq_cst) const noexcept;
124 operator integral() const volatile noexcept;
125 operator integral() const noexcept;
126 void store(integral desr, memory_order m = memory_order_seq_cst) volatile noexcept;
127 void store(integral desr, memory_order m = memory_order_seq_cst) noexcept;
128 integral operator=(integral desr) volatile noexcept;
129 integral operator=(integral desr) noexcept;
131 integral exchange(integral desr,
132 memory_order m = memory_order_seq_cst) volatile noexcept;
133 integral exchange(integral desr, memory_order m = memory_order_seq_cst) noexcept;
134 bool compare_exchange_weak(integral& expc, integral desr,
135 memory_order s, memory_order f) volatile noexcept;
136 bool compare_exchange_weak(integral& expc, integral desr,
137 memory_order s, memory_order f) noexcept;
138 bool compare_exchange_strong(integral& expc, integral desr,
139 memory_order s, memory_order f) volatile noexcept;
140 bool compare_exchange_strong(integral& expc, integral desr,
141 memory_order s, memory_order f) noexcept;
142 bool compare_exchange_weak(integral& expc, integral desr,
143 memory_order m = memory_order_seq_cst) volatile noexcept;
144 bool compare_exchange_weak(integral& expc, integral desr,
145 memory_order m = memory_order_seq_cst) noexcept;
146 bool compare_exchange_strong(integral& expc, integral desr,
147 memory_order m = memory_order_seq_cst) volatile noexcept;
148 bool compare_exchange_strong(integral& expc, integral desr,
149 memory_order m = memory_order_seq_cst) noexcept;
151 integral fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
152 integral fetch_add(integral op, memory_order m = memory_order_seq_cst) noexcept;
153 integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
154 integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) noexcept;
155 integral fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
156 integral fetch_and(integral op, memory_order m = memory_order_seq_cst) noexcept;
157 integral fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
158 integral fetch_or(integral op, memory_order m = memory_order_seq_cst) noexcept;
159 integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
160 integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) noexcept;
162 integral operator++(int) volatile noexcept;
163 integral operator++(int) noexcept;
164 integral operator--(int) volatile noexcept;
165 integral operator--(int) noexcept;
166 integral operator++() volatile noexcept;
167 integral operator++() noexcept;
168 integral operator--() volatile noexcept;
169 integral operator--() noexcept;
170 integral operator+=(integral op) volatile noexcept;
171 integral operator+=(integral op) noexcept;
172 integral operator-=(integral op) volatile noexcept;
173 integral operator-=(integral op) noexcept;
174 integral operator&=(integral op) volatile noexcept;
175 integral operator&=(integral op) noexcept;
176 integral operator|=(integral op) volatile noexcept;
177 integral operator|=(integral op) noexcept;
178 integral operator^=(integral op) volatile noexcept;
179 integral operator^=(integral op) noexcept;
181 void wait(integral, memory_order = memory_order::seq_cst) const volatile noexcept;
182 void wait(integral, memory_order = memory_order::seq_cst) const noexcept;
183 void notify_one() volatile noexcept;
184 void notify_one() noexcept;
185 void notify_all() volatile noexcept;
186 void notify_all() noexcept;
192 using value_type = T*;
194 static constexpr bool is_always_lock_free;
195 bool is_lock_free() const volatile noexcept;
196 bool is_lock_free() const noexcept;
198 atomic() noexcept = default;
199 constexpr atomic(T* desr) noexcept;
200 atomic(const atomic&) = delete;
201 atomic& operator=(const atomic&) = delete;
202 atomic& operator=(const atomic&) volatile = delete;
204 T* load(memory_order m = memory_order_seq_cst) const volatile noexcept;
205 T* load(memory_order m = memory_order_seq_cst) const noexcept;
206 operator T*() const volatile noexcept;
207 operator T*() const noexcept;
208 void store(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
209 void store(T* desr, memory_order m = memory_order_seq_cst) noexcept;
210 T* operator=(T*) volatile noexcept;
211 T* operator=(T*) noexcept;
213 T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
214 T* exchange(T* desr, memory_order m = memory_order_seq_cst) noexcept;
215 bool compare_exchange_weak(T*& expc, T* desr,
216 memory_order s, memory_order f) volatile noexcept;
217 bool compare_exchange_weak(T*& expc, T* desr,
218 memory_order s, memory_order f) noexcept;
219 bool compare_exchange_strong(T*& expc, T* desr,
220 memory_order s, memory_order f) volatile noexcept;
221 bool compare_exchange_strong(T*& expc, T* desr,
222 memory_order s, memory_order f) noexcept;
223 bool compare_exchange_weak(T*& expc, T* desr,
224 memory_order m = memory_order_seq_cst) volatile noexcept;
225 bool compare_exchange_weak(T*& expc, T* desr,
226 memory_order m = memory_order_seq_cst) noexcept;
227 bool compare_exchange_strong(T*& expc, T* desr,
228 memory_order m = memory_order_seq_cst) volatile noexcept;
229 bool compare_exchange_strong(T*& expc, T* desr,
230 memory_order m = memory_order_seq_cst) noexcept;
231 T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
232 T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
233 T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
234 T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
236 T* operator++(int) volatile noexcept;
237 T* operator++(int) noexcept;
238 T* operator--(int) volatile noexcept;
239 T* operator--(int) noexcept;
240 T* operator++() volatile noexcept;
241 T* operator++() noexcept;
242 T* operator--() volatile noexcept;
243 T* operator--() noexcept;
244 T* operator+=(ptrdiff_t op) volatile noexcept;
245 T* operator+=(ptrdiff_t op) noexcept;
246 T* operator-=(ptrdiff_t op) volatile noexcept;
247 T* operator-=(ptrdiff_t op) noexcept;
249 void wait(T*, memory_order = memory_order::seq_cst) const volatile noexcept;
250 void wait(T*, memory_order = memory_order::seq_cst) const noexcept;
251 void notify_one() volatile noexcept;
252 void notify_one() noexcept;
253 void notify_all() volatile noexcept;
254 void notify_all() noexcept;
259 bool atomic_is_lock_free(const volatile atomic<T>* obj) noexcept;
262 bool atomic_is_lock_free(const atomic<T>* obj) noexcept;
265 void atomic_store(volatile atomic<T>* obj, T desr) noexcept;
268 void atomic_store(atomic<T>* obj, T desr) noexcept;
271 void atomic_store_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
274 void atomic_store_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
277 T atomic_load(const volatile atomic<T>* obj) noexcept;
280 T atomic_load(const atomic<T>* obj) noexcept;
283 T atomic_load_explicit(const volatile atomic<T>* obj, memory_order m) noexcept;
286 T atomic_load_explicit(const atomic<T>* obj, memory_order m) noexcept;
289 T atomic_exchange(volatile atomic<T>* obj, T desr) noexcept;
292 T atomic_exchange(atomic<T>* obj, T desr) noexcept;
295 T atomic_exchange_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
298 T atomic_exchange_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
301 bool atomic_compare_exchange_weak(volatile atomic<T>* obj, T* expc, T desr) noexcept;
304 bool atomic_compare_exchange_weak(atomic<T>* obj, T* expc, T desr) noexcept;
307 bool atomic_compare_exchange_strong(volatile atomic<T>* obj, T* expc, T desr) noexcept;
310 bool atomic_compare_exchange_strong(atomic<T>* obj, T* expc, T desr) noexcept;
313 bool atomic_compare_exchange_weak_explicit(volatile atomic<T>* obj, T* expc,
315 memory_order s, memory_order f) noexcept;
318 bool atomic_compare_exchange_weak_explicit(atomic<T>* obj, T* expc, T desr,
319 memory_order s, memory_order f) noexcept;
322 bool atomic_compare_exchange_strong_explicit(volatile atomic<T>* obj,
324 memory_order s, memory_order f) noexcept;
327 bool atomic_compare_exchange_strong_explicit(atomic<T>* obj, T* expc,
329 memory_order s, memory_order f) noexcept;
332 void atomic_wait(const volatile atomic<T>* obj, T old) noexcept;
335 void atomic_wait(const atomic<T>* obj, T old) noexcept;
338 void atomic_wait_explicit(const volatile atomic<T>* obj, T old, memory_order m) noexcept;
341 void atomic_wait_explicit(const atomic<T>* obj, T old, memory_order m) noexcept;
344 void atomic_one(volatile atomic<T>* obj) noexcept;
347 void atomic_one(atomic<T>* obj) noexcept;
350 void atomic_all(volatile atomic<T>* obj) noexcept;
353 void atomic_all(atomic<T>* obj) noexcept;
355 template <class Integral>
356 Integral atomic_fetch_add(volatile atomic<Integral>* obj, Integral op) noexcept;
358 template <class Integral>
359 Integral atomic_fetch_add(atomic<Integral>* obj, Integral op) noexcept;
361 template <class Integral>
362 Integral atomic_fetch_add_explicit(volatile atomic<Integral>* obj, Integral op,
363 memory_order m) noexcept;
364 template <class Integral>
365 Integral atomic_fetch_add_explicit(atomic<Integral>* obj, Integral op,
366 memory_order m) noexcept;
367 template <class Integral>
368 Integral atomic_fetch_sub(volatile atomic<Integral>* obj, Integral op) noexcept;
370 template <class Integral>
371 Integral atomic_fetch_sub(atomic<Integral>* obj, Integral op) noexcept;
373 template <class Integral>
374 Integral atomic_fetch_sub_explicit(volatile atomic<Integral>* obj, Integral op,
375 memory_order m) noexcept;
377 template <class Integral>
378 Integral atomic_fetch_sub_explicit(atomic<Integral>* obj, Integral op,
379 memory_order m) noexcept;
381 template <class Integral>
382 Integral atomic_fetch_and(volatile atomic<Integral>* obj, Integral op) noexcept;
384 template <class Integral>
385 Integral atomic_fetch_and(atomic<Integral>* obj, Integral op) noexcept;
387 template <class Integral>
388 Integral atomic_fetch_and_explicit(volatile atomic<Integral>* obj, Integral op,
389 memory_order m) noexcept;
391 template <class Integral>
392 Integral atomic_fetch_and_explicit(atomic<Integral>* obj, Integral op,
393 memory_order m) noexcept;
395 template <class Integral>
396 Integral atomic_fetch_or(volatile atomic<Integral>* obj, Integral op) noexcept;
398 template <class Integral>
399 Integral atomic_fetch_or(atomic<Integral>* obj, Integral op) noexcept;
401 template <class Integral>
402 Integral atomic_fetch_or_explicit(volatile atomic<Integral>* obj, Integral op,
403 memory_order m) noexcept;
405 template <class Integral>
406 Integral atomic_fetch_or_explicit(atomic<Integral>* obj, Integral op,
407 memory_order m) noexcept;
409 template <class Integral>
410 Integral atomic_fetch_xor(volatile atomic<Integral>* obj, Integral op) noexcept;
412 template <class Integral>
413 Integral atomic_fetch_xor(atomic<Integral>* obj, Integral op) noexcept;
415 template <class Integral>
416 Integral atomic_fetch_xor_explicit(volatile atomic<Integral>* obj, Integral op,
417 memory_order m) noexcept;
419 template <class Integral>
420 Integral atomic_fetch_xor_explicit(atomic<Integral>* obj, Integral op,
421 memory_order m) noexcept;
424 T* atomic_fetch_add(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
427 T* atomic_fetch_add(atomic<T*>* obj, ptrdiff_t op) noexcept;
430 T* atomic_fetch_add_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
431 memory_order m) noexcept;
434 T* atomic_fetch_add_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
437 T* atomic_fetch_sub(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
440 T* atomic_fetch_sub(atomic<T*>* obj, ptrdiff_t op) noexcept;
443 T* atomic_fetch_sub_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
444 memory_order m) noexcept;
447 T* atomic_fetch_sub_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
449 // Atomics for standard typedef types
451 typedef atomic<bool> atomic_bool;
452 typedef atomic<char> atomic_char;
453 typedef atomic<signed char> atomic_schar;
454 typedef atomic<unsigned char> atomic_uchar;
455 typedef atomic<short> atomic_short;
456 typedef atomic<unsigned short> atomic_ushort;
457 typedef atomic<int> atomic_int;
458 typedef atomic<unsigned int> atomic_uint;
459 typedef atomic<long> atomic_long;
460 typedef atomic<unsigned long> atomic_ulong;
461 typedef atomic<long long> atomic_llong;
462 typedef atomic<unsigned long long> atomic_ullong;
463 typedef atomic<char16_t> atomic_char16_t;
464 typedef atomic<char32_t> atomic_char32_t;
465 typedef atomic<wchar_t> atomic_wchar_t;
467 typedef atomic<int_least8_t> atomic_int_least8_t;
468 typedef atomic<uint_least8_t> atomic_uint_least8_t;
469 typedef atomic<int_least16_t> atomic_int_least16_t;
470 typedef atomic<uint_least16_t> atomic_uint_least16_t;
471 typedef atomic<int_least32_t> atomic_int_least32_t;
472 typedef atomic<uint_least32_t> atomic_uint_least32_t;
473 typedef atomic<int_least64_t> atomic_int_least64_t;
474 typedef atomic<uint_least64_t> atomic_uint_least64_t;
476 typedef atomic<int_fast8_t> atomic_int_fast8_t;
477 typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
478 typedef atomic<int_fast16_t> atomic_int_fast16_t;
479 typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
480 typedef atomic<int_fast32_t> atomic_int_fast32_t;
481 typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
482 typedef atomic<int_fast64_t> atomic_int_fast64_t;
483 typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
485 typedef atomic<int8_t> atomic_int8_t;
486 typedef atomic<uint8_t> atomic_uint8_t;
487 typedef atomic<int16_t> atomic_int16_t;
488 typedef atomic<uint16_t> atomic_uint16_t;
489 typedef atomic<int32_t> atomic_int32_t;
490 typedef atomic<uint32_t> atomic_uint32_t;
491 typedef atomic<int64_t> atomic_int64_t;
492 typedef atomic<uint64_t> atomic_uint64_t;
494 typedef atomic<intptr_t> atomic_intptr_t;
495 typedef atomic<uintptr_t> atomic_uintptr_t;
496 typedef atomic<size_t> atomic_size_t;
497 typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
498 typedef atomic<intmax_t> atomic_intmax_t;
499 typedef atomic<uintmax_t> atomic_uintmax_t;
501 // flag type and operations
503 typedef struct atomic_flag
505 atomic_flag() noexcept = default;
506 atomic_flag(const atomic_flag&) = delete;
507 atomic_flag& operator=(const atomic_flag&) = delete;
508 atomic_flag& operator=(const atomic_flag&) volatile = delete;
510 bool test(memory_order m = memory_order_seq_cst) volatile noexcept;
511 bool test(memory_order m = memory_order_seq_cst) noexcept;
512 bool test_and_set(memory_order m = memory_order_seq_cst) volatile noexcept;
513 bool test_and_set(memory_order m = memory_order_seq_cst) noexcept;
514 void clear(memory_order m = memory_order_seq_cst) volatile noexcept;
515 void clear(memory_order m = memory_order_seq_cst) noexcept;
517 void wait(bool, memory_order = memory_order::seq_cst) const volatile noexcept;
518 void wait(bool, memory_order = memory_order::seq_cst) const noexcept;
519 void notify_one() volatile noexcept;
520 void notify_one() noexcept;
521 void notify_all() volatile noexcept;
522 void notify_all() noexcept;
525 bool atomic_flag_test(volatile atomic_flag* obj) noexcept;
526 bool atomic_flag_test(atomic_flag* obj) noexcept;
527 bool atomic_flag_test_explicit(volatile atomic_flag* obj,
528 memory_order m) noexcept;
529 bool atomic_flag_test_explicit(atomic_flag* obj, memory_order m) noexcept;
530 bool atomic_flag_test_and_set(volatile atomic_flag* obj) noexcept;
531 bool atomic_flag_test_and_set(atomic_flag* obj) noexcept;
532 bool atomic_flag_test_and_set_explicit(volatile atomic_flag* obj,
533 memory_order m) noexcept;
534 bool atomic_flag_test_and_set_explicit(atomic_flag* obj, memory_order m) noexcept;
535 void atomic_flag_clear(volatile atomic_flag* obj) noexcept;
536 void atomic_flag_clear(atomic_flag* obj) noexcept;
537 void atomic_flag_clear_explicit(volatile atomic_flag* obj, memory_order m) noexcept;
538 void atomic_flag_clear_explicit(atomic_flag* obj, memory_order m) noexcept;
540 void atomic_wait(const volatile atomic_flag* obj, T old) noexcept;
541 void atomic_wait(const atomic_flag* obj, T old) noexcept;
542 void atomic_wait_explicit(const volatile atomic_flag* obj, T old, memory_order m) noexcept;
543 void atomic_wait_explicit(const atomic_flag* obj, T old, memory_order m) noexcept;
544 void atomic_one(volatile atomic_flag* obj) noexcept;
545 void atomic_one(atomic_flag* obj) noexcept;
546 void atomic_all(volatile atomic_flag* obj) noexcept;
547 void atomic_all(atomic_flag* obj) noexcept;
551 void atomic_thread_fence(memory_order m) noexcept;
552 void atomic_signal_fence(memory_order m) noexcept;
557 void atomic_init(volatile atomic<T>* obj, typename atomic<T>::value_type desr) noexcept;
560 void atomic_init(atomic<T>* obj, typename atomic<T>::value_type desr) noexcept;
562 #define ATOMIC_VAR_INIT(value) see below
564 #define ATOMIC_FLAG_INIT see below
571 #include <__threading_support>
575 #include <type_traits>
578 #if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
579 #pragma GCC system_header
582 #ifdef _LIBCPP_HAS_NO_THREADS
583 # error <atomic> is not supported on this single threaded system
585 #ifdef _LIBCPP_HAS_NO_ATOMIC_HEADER
586 # error <atomic> is not implemented
588 #ifdef kill_dependency
589 # error C++ standard library is incompatible with <stdatomic.h>
592 #define _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) \
593 _LIBCPP_DIAGNOSE_WARNING(__m == memory_order_consume || \
594 __m == memory_order_acquire || \
595 __m == memory_order_acq_rel, \
596 "memory order argument to atomic operation is invalid")
598 #define _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) \
599 _LIBCPP_DIAGNOSE_WARNING(__m == memory_order_release || \
600 __m == memory_order_acq_rel, \
601 "memory order argument to atomic operation is invalid")
603 #define _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__m, __f) \
604 _LIBCPP_DIAGNOSE_WARNING(__f == memory_order_release || \
605 __f == memory_order_acq_rel, \
606 "memory order argument to atomic operation is invalid")
608 _LIBCPP_BEGIN_NAMESPACE_STD
610 // Figure out what the underlying type for `memory_order` would be if it were
611 // declared as an unscoped enum (accounting for -fshort-enums). Use this result
612 // to pin the underlying type in C++20.
613 enum __legacy_memory_order {
622 typedef underlying_type<__legacy_memory_order>::type __memory_order_underlying_t;
624 #if _LIBCPP_STD_VER > 17
626 enum class memory_order : __memory_order_underlying_t {
627 relaxed = __mo_relaxed,
628 consume = __mo_consume,
629 acquire = __mo_acquire,
630 release = __mo_release,
631 acq_rel = __mo_acq_rel,
632 seq_cst = __mo_seq_cst
635 inline constexpr auto memory_order_relaxed = memory_order::relaxed;
636 inline constexpr auto memory_order_consume = memory_order::consume;
637 inline constexpr auto memory_order_acquire = memory_order::acquire;
638 inline constexpr auto memory_order_release = memory_order::release;
639 inline constexpr auto memory_order_acq_rel = memory_order::acq_rel;
640 inline constexpr auto memory_order_seq_cst = memory_order::seq_cst;
644 typedef enum memory_order {
645 memory_order_relaxed = __mo_relaxed,
646 memory_order_consume = __mo_consume,
647 memory_order_acquire = __mo_acquire,
648 memory_order_release = __mo_release,
649 memory_order_acq_rel = __mo_acq_rel,
650 memory_order_seq_cst = __mo_seq_cst,
653 #endif // _LIBCPP_STD_VER > 17
655 template <typename _Tp> _LIBCPP_INLINE_VISIBILITY
656 bool __cxx_nonatomic_compare_equal(_Tp const& __lhs, _Tp const& __rhs) {
657 return memcmp(&__lhs, &__rhs, sizeof(_Tp)) == 0;
660 static_assert((is_same<underlying_type<memory_order>::type, __memory_order_underlying_t>::value),
661 "unexpected underlying type for std::memory_order");
663 #if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP) || \
664 defined(_LIBCPP_ATOMIC_ONLY_USE_BUILTINS)
666 // [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because
667 // the default operator= in an object is not volatile, a byte-by-byte copy
669 template <typename _Tp, typename _Tv> _LIBCPP_INLINE_VISIBILITY
670 typename enable_if<is_assignable<_Tp&, _Tv>::value>::type
671 __cxx_atomic_assign_volatile(_Tp& __a_value, _Tv const& __val) {
674 template <typename _Tp, typename _Tv> _LIBCPP_INLINE_VISIBILITY
675 typename enable_if<is_assignable<_Tp&, _Tv>::value>::type
676 __cxx_atomic_assign_volatile(_Tp volatile& __a_value, _Tv volatile const& __val) {
677 volatile char* __to = reinterpret_cast<volatile char*>(&__a_value);
678 volatile char* __end = __to + sizeof(_Tp);
679 volatile const char* __from = reinterpret_cast<volatile const char*>(&__val);
680 while (__to != __end)
686 #if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP)
688 template <typename _Tp>
689 struct __cxx_atomic_base_impl {
691 _LIBCPP_INLINE_VISIBILITY
692 #ifndef _LIBCPP_CXX03_LANG
693 __cxx_atomic_base_impl() _NOEXCEPT = default;
695 __cxx_atomic_base_impl() _NOEXCEPT : __a_value() {}
696 #endif // _LIBCPP_CXX03_LANG
697 _LIBCPP_CONSTEXPR explicit __cxx_atomic_base_impl(_Tp value) _NOEXCEPT
698 : __a_value(value) {}
702 _LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR int __to_gcc_order(memory_order __order) {
703 // Avoid switch statement to make this a constexpr.
704 return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
705 (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
706 (__order == memory_order_release ? __ATOMIC_RELEASE:
707 (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
708 (__order == memory_order_acq_rel ? __ATOMIC_ACQ_REL:
709 __ATOMIC_CONSUME))));
712 _LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR int __to_gcc_failure_order(memory_order __order) {
713 // Avoid switch statement to make this a constexpr.
714 return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
715 (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
716 (__order == memory_order_release ? __ATOMIC_RELAXED:
717 (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
718 (__order == memory_order_acq_rel ? __ATOMIC_ACQUIRE:
719 __ATOMIC_CONSUME))));
722 template <typename _Tp>
723 _LIBCPP_INLINE_VISIBILITY
724 void __cxx_atomic_init(volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp __val) {
725 __cxx_atomic_assign_volatile(__a->__a_value, __val);
728 template <typename _Tp>
729 _LIBCPP_INLINE_VISIBILITY
730 void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp>* __a, _Tp __val) {
731 __a->__a_value = __val;
734 _LIBCPP_INLINE_VISIBILITY inline
735 void __cxx_atomic_thread_fence(memory_order __order) {
736 __atomic_thread_fence(__to_gcc_order(__order));
739 _LIBCPP_INLINE_VISIBILITY inline
740 void __cxx_atomic_signal_fence(memory_order __order) {
741 __atomic_signal_fence(__to_gcc_order(__order));
744 template <typename _Tp>
745 _LIBCPP_INLINE_VISIBILITY
746 void __cxx_atomic_store(volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp __val,
747 memory_order __order) {
748 __atomic_store(&__a->__a_value, &__val,
749 __to_gcc_order(__order));
752 template <typename _Tp>
753 _LIBCPP_INLINE_VISIBILITY
754 void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp>* __a, _Tp __val,
755 memory_order __order) {
756 __atomic_store(&__a->__a_value, &__val,
757 __to_gcc_order(__order));
760 template <typename _Tp>
761 _LIBCPP_INLINE_VISIBILITY
762 _Tp __cxx_atomic_load(const volatile __cxx_atomic_base_impl<_Tp>* __a,
763 memory_order __order) {
765 __atomic_load(&__a->__a_value, &__ret,
766 __to_gcc_order(__order));
770 template <typename _Tp>
771 _LIBCPP_INLINE_VISIBILITY
772 _Tp __cxx_atomic_load(const __cxx_atomic_base_impl<_Tp>* __a, memory_order __order) {
774 __atomic_load(&__a->__a_value, &__ret,
775 __to_gcc_order(__order));
779 template <typename _Tp>
780 _LIBCPP_INLINE_VISIBILITY
781 _Tp __cxx_atomic_exchange(volatile __cxx_atomic_base_impl<_Tp>* __a,
782 _Tp __value, memory_order __order) {
784 __atomic_exchange(&__a->__a_value, &__value, &__ret,
785 __to_gcc_order(__order));
789 template <typename _Tp>
790 _LIBCPP_INLINE_VISIBILITY
791 _Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp>* __a, _Tp __value,
792 memory_order __order) {
794 __atomic_exchange(&__a->__a_value, &__value, &__ret,
795 __to_gcc_order(__order));
799 template <typename _Tp>
800 _LIBCPP_INLINE_VISIBILITY
801 bool __cxx_atomic_compare_exchange_strong(
802 volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value,
803 memory_order __success, memory_order __failure) {
804 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
806 __to_gcc_order(__success),
807 __to_gcc_failure_order(__failure));
810 template <typename _Tp>
811 _LIBCPP_INLINE_VISIBILITY
812 bool __cxx_atomic_compare_exchange_strong(
813 __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success,
814 memory_order __failure) {
815 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
817 __to_gcc_order(__success),
818 __to_gcc_failure_order(__failure));
821 template <typename _Tp>
822 _LIBCPP_INLINE_VISIBILITY
823 bool __cxx_atomic_compare_exchange_weak(
824 volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value,
825 memory_order __success, memory_order __failure) {
826 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
828 __to_gcc_order(__success),
829 __to_gcc_failure_order(__failure));
832 template <typename _Tp>
833 _LIBCPP_INLINE_VISIBILITY
834 bool __cxx_atomic_compare_exchange_weak(
835 __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success,
836 memory_order __failure) {
837 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
839 __to_gcc_order(__success),
840 __to_gcc_failure_order(__failure));
843 template <typename _Tp>
844 struct __skip_amt { enum {value = 1}; };
846 template <typename _Tp>
847 struct __skip_amt<_Tp*> { enum {value = sizeof(_Tp)}; };
849 // FIXME: Haven't figured out what the spec says about using arrays with
850 // atomic_fetch_add. Force a failure rather than creating bad behavior.
851 template <typename _Tp>
852 struct __skip_amt<_Tp[]> { };
853 template <typename _Tp, int n>
854 struct __skip_amt<_Tp[n]> { };
856 template <typename _Tp, typename _Td>
857 _LIBCPP_INLINE_VISIBILITY
858 _Tp __cxx_atomic_fetch_add(volatile __cxx_atomic_base_impl<_Tp>* __a,
859 _Td __delta, memory_order __order) {
860 return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
861 __to_gcc_order(__order));
864 template <typename _Tp, typename _Td>
865 _LIBCPP_INLINE_VISIBILITY
866 _Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp>* __a, _Td __delta,
867 memory_order __order) {
868 return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
869 __to_gcc_order(__order));
872 template <typename _Tp, typename _Td>
873 _LIBCPP_INLINE_VISIBILITY
874 _Tp __cxx_atomic_fetch_sub(volatile __cxx_atomic_base_impl<_Tp>* __a,
875 _Td __delta, memory_order __order) {
876 return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
877 __to_gcc_order(__order));
880 template <typename _Tp, typename _Td>
881 _LIBCPP_INLINE_VISIBILITY
882 _Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp>* __a, _Td __delta,
883 memory_order __order) {
884 return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
885 __to_gcc_order(__order));
888 template <typename _Tp>
889 _LIBCPP_INLINE_VISIBILITY
890 _Tp __cxx_atomic_fetch_and(volatile __cxx_atomic_base_impl<_Tp>* __a,
891 _Tp __pattern, memory_order __order) {
892 return __atomic_fetch_and(&__a->__a_value, __pattern,
893 __to_gcc_order(__order));
896 template <typename _Tp>
897 _LIBCPP_INLINE_VISIBILITY
898 _Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp>* __a,
899 _Tp __pattern, memory_order __order) {
900 return __atomic_fetch_and(&__a->__a_value, __pattern,
901 __to_gcc_order(__order));
904 template <typename _Tp>
905 _LIBCPP_INLINE_VISIBILITY
906 _Tp __cxx_atomic_fetch_or(volatile __cxx_atomic_base_impl<_Tp>* __a,
907 _Tp __pattern, memory_order __order) {
908 return __atomic_fetch_or(&__a->__a_value, __pattern,
909 __to_gcc_order(__order));
912 template <typename _Tp>
913 _LIBCPP_INLINE_VISIBILITY
914 _Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern,
915 memory_order __order) {
916 return __atomic_fetch_or(&__a->__a_value, __pattern,
917 __to_gcc_order(__order));
920 template <typename _Tp>
921 _LIBCPP_INLINE_VISIBILITY
922 _Tp __cxx_atomic_fetch_xor(volatile __cxx_atomic_base_impl<_Tp>* __a,
923 _Tp __pattern, memory_order __order) {
924 return __atomic_fetch_xor(&__a->__a_value, __pattern,
925 __to_gcc_order(__order));
928 template <typename _Tp>
929 _LIBCPP_INLINE_VISIBILITY
930 _Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern,
931 memory_order __order) {
932 return __atomic_fetch_xor(&__a->__a_value, __pattern,
933 __to_gcc_order(__order));
936 #define __cxx_atomic_is_lock_free(__s) __atomic_is_lock_free(__s, 0)
938 #elif defined(_LIBCPP_HAS_C_ATOMIC_IMP)
940 template <typename _Tp>
941 struct __cxx_atomic_base_impl {
943 _LIBCPP_INLINE_VISIBILITY
944 #ifndef _LIBCPP_CXX03_LANG
945 __cxx_atomic_base_impl() _NOEXCEPT = default;
947 __cxx_atomic_base_impl() _NOEXCEPT : __a_value() {}
948 #endif // _LIBCPP_CXX03_LANG
949 _LIBCPP_CONSTEXPR explicit __cxx_atomic_base_impl(_Tp value) _NOEXCEPT
950 : __a_value(value) {}
951 _LIBCPP_DISABLE_EXTENSION_WARNING _Atomic(_Tp) __a_value;
954 #define __cxx_atomic_is_lock_free(__s) __c11_atomic_is_lock_free(__s)
956 _LIBCPP_INLINE_VISIBILITY inline
957 void __cxx_atomic_thread_fence(memory_order __order) _NOEXCEPT {
958 __c11_atomic_thread_fence(static_cast<__memory_order_underlying_t>(__order));
961 _LIBCPP_INLINE_VISIBILITY inline
962 void __cxx_atomic_signal_fence(memory_order __order) _NOEXCEPT {
963 __c11_atomic_signal_fence(static_cast<__memory_order_underlying_t>(__order));
967 _LIBCPP_INLINE_VISIBILITY
968 void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __val) _NOEXCEPT {
969 __c11_atomic_init(&__a->__a_value, __val);
972 _LIBCPP_INLINE_VISIBILITY
973 void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp> * __a, _Tp __val) _NOEXCEPT {
974 __c11_atomic_init(&__a->__a_value, __val);
978 _LIBCPP_INLINE_VISIBILITY
979 void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __val, memory_order __order) _NOEXCEPT {
980 __c11_atomic_store(&__a->__a_value, __val, static_cast<__memory_order_underlying_t>(__order));
983 _LIBCPP_INLINE_VISIBILITY
984 void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp> * __a, _Tp __val, memory_order __order) _NOEXCEPT {
985 __c11_atomic_store(&__a->__a_value, __val, static_cast<__memory_order_underlying_t>(__order));
989 _LIBCPP_INLINE_VISIBILITY
990 _Tp __cxx_atomic_load(__cxx_atomic_base_impl<_Tp> const volatile* __a, memory_order __order) _NOEXCEPT {
991 using __ptr_type = typename remove_const<decltype(__a->__a_value)>::type*;
992 return __c11_atomic_load(const_cast<__ptr_type>(&__a->__a_value), static_cast<__memory_order_underlying_t>(__order));
995 _LIBCPP_INLINE_VISIBILITY
996 _Tp __cxx_atomic_load(__cxx_atomic_base_impl<_Tp> const* __a, memory_order __order) _NOEXCEPT {
997 using __ptr_type = typename remove_const<decltype(__a->__a_value)>::type*;
998 return __c11_atomic_load(const_cast<__ptr_type>(&__a->__a_value), static_cast<__memory_order_underlying_t>(__order));
1002 _LIBCPP_INLINE_VISIBILITY
1003 _Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __value, memory_order __order) _NOEXCEPT {
1004 return __c11_atomic_exchange(&__a->__a_value, __value, static_cast<__memory_order_underlying_t>(__order));
1007 _LIBCPP_INLINE_VISIBILITY
1008 _Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp> * __a, _Tp __value, memory_order __order) _NOEXCEPT {
1009 return __c11_atomic_exchange(&__a->__a_value, __value, static_cast<__memory_order_underlying_t>(__order));
1013 _LIBCPP_INLINE_VISIBILITY
1014 bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
1015 return __c11_atomic_compare_exchange_strong(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__failure));
1018 _LIBCPP_INLINE_VISIBILITY
1019 bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_base_impl<_Tp> * __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
1020 return __c11_atomic_compare_exchange_strong(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__failure));
1024 _LIBCPP_INLINE_VISIBILITY
1025 bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
1026 return __c11_atomic_compare_exchange_weak(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__failure));
1029 _LIBCPP_INLINE_VISIBILITY
1030 bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_base_impl<_Tp> * __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
1031 return __c11_atomic_compare_exchange_weak(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__failure));
1035 _LIBCPP_INLINE_VISIBILITY
1036 _Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __delta, memory_order __order) _NOEXCEPT {
1037 return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1040 _LIBCPP_INLINE_VISIBILITY
1041 _Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp> * __a, _Tp __delta, memory_order __order) _NOEXCEPT {
1042 return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1046 _LIBCPP_INLINE_VISIBILITY
1047 _Tp* __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp*> volatile* __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
1048 return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1051 _LIBCPP_INLINE_VISIBILITY
1052 _Tp* __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp*> * __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
1053 return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1057 _LIBCPP_INLINE_VISIBILITY
1058 _Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __delta, memory_order __order) _NOEXCEPT {
1059 return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1062 _LIBCPP_INLINE_VISIBILITY
1063 _Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp> * __a, _Tp __delta, memory_order __order) _NOEXCEPT {
1064 return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1067 _LIBCPP_INLINE_VISIBILITY
1068 _Tp* __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp*> volatile* __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
1069 return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1072 _LIBCPP_INLINE_VISIBILITY
1073 _Tp* __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp*> * __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
1074 return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1078 _LIBCPP_INLINE_VISIBILITY
1079 _Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1080 return __c11_atomic_fetch_and(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1083 _LIBCPP_INLINE_VISIBILITY
1084 _Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1085 return __c11_atomic_fetch_and(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1089 _LIBCPP_INLINE_VISIBILITY
1090 _Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1091 return __c11_atomic_fetch_or(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1094 _LIBCPP_INLINE_VISIBILITY
1095 _Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1096 return __c11_atomic_fetch_or(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1100 _LIBCPP_INLINE_VISIBILITY
1101 _Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1102 return __c11_atomic_fetch_xor(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1105 _LIBCPP_INLINE_VISIBILITY
1106 _Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1107 return __c11_atomic_fetch_xor(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1110 #endif // _LIBCPP_HAS_GCC_ATOMIC_IMP, _LIBCPP_HAS_C_ATOMIC_IMP
1112 template <class _Tp>
1113 _LIBCPP_INLINE_VISIBILITY
1114 _Tp kill_dependency(_Tp __y) _NOEXCEPT
1119 #if defined(__CLANG_ATOMIC_BOOL_LOCK_FREE)
1120 # define ATOMIC_BOOL_LOCK_FREE __CLANG_ATOMIC_BOOL_LOCK_FREE
1121 # define ATOMIC_CHAR_LOCK_FREE __CLANG_ATOMIC_CHAR_LOCK_FREE
1122 # define ATOMIC_CHAR16_T_LOCK_FREE __CLANG_ATOMIC_CHAR16_T_LOCK_FREE
1123 # define ATOMIC_CHAR32_T_LOCK_FREE __CLANG_ATOMIC_CHAR32_T_LOCK_FREE
1124 # define ATOMIC_WCHAR_T_LOCK_FREE __CLANG_ATOMIC_WCHAR_T_LOCK_FREE
1125 # define ATOMIC_SHORT_LOCK_FREE __CLANG_ATOMIC_SHORT_LOCK_FREE
1126 # define ATOMIC_INT_LOCK_FREE __CLANG_ATOMIC_INT_LOCK_FREE
1127 # define ATOMIC_LONG_LOCK_FREE __CLANG_ATOMIC_LONG_LOCK_FREE
1128 # define ATOMIC_LLONG_LOCK_FREE __CLANG_ATOMIC_LLONG_LOCK_FREE
1129 # define ATOMIC_POINTER_LOCK_FREE __CLANG_ATOMIC_POINTER_LOCK_FREE
1130 #elif defined(__GCC_ATOMIC_BOOL_LOCK_FREE)
1131 # define ATOMIC_BOOL_LOCK_FREE __GCC_ATOMIC_BOOL_LOCK_FREE
1132 # define ATOMIC_CHAR_LOCK_FREE __GCC_ATOMIC_CHAR_LOCK_FREE
1133 # define ATOMIC_CHAR16_T_LOCK_FREE __GCC_ATOMIC_CHAR16_T_LOCK_FREE
1134 # define ATOMIC_CHAR32_T_LOCK_FREE __GCC_ATOMIC_CHAR32_T_LOCK_FREE
1135 # define ATOMIC_WCHAR_T_LOCK_FREE __GCC_ATOMIC_WCHAR_T_LOCK_FREE
1136 # define ATOMIC_SHORT_LOCK_FREE __GCC_ATOMIC_SHORT_LOCK_FREE
1137 # define ATOMIC_INT_LOCK_FREE __GCC_ATOMIC_INT_LOCK_FREE
1138 # define ATOMIC_LONG_LOCK_FREE __GCC_ATOMIC_LONG_LOCK_FREE
1139 # define ATOMIC_LLONG_LOCK_FREE __GCC_ATOMIC_LLONG_LOCK_FREE
1140 # define ATOMIC_POINTER_LOCK_FREE __GCC_ATOMIC_POINTER_LOCK_FREE
1143 #ifdef _LIBCPP_ATOMIC_ONLY_USE_BUILTINS
1145 template<typename _Tp>
1146 struct __cxx_atomic_lock_impl {
1148 _LIBCPP_INLINE_VISIBILITY
1149 __cxx_atomic_lock_impl() _NOEXCEPT
1150 : __a_value(), __a_lock(0) {}
1151 _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR explicit
1152 __cxx_atomic_lock_impl(_Tp value) _NOEXCEPT
1153 : __a_value(value), __a_lock(0) {}
1156 mutable __cxx_atomic_base_impl<_LIBCPP_ATOMIC_FLAG_TYPE> __a_lock;
1158 _LIBCPP_INLINE_VISIBILITY void __lock() const volatile {
1159 while(1 == __cxx_atomic_exchange(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(true), memory_order_acquire))
1162 _LIBCPP_INLINE_VISIBILITY void __lock() const {
1163 while(1 == __cxx_atomic_exchange(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(true), memory_order_acquire))
1166 _LIBCPP_INLINE_VISIBILITY void __unlock() const volatile {
1167 __cxx_atomic_store(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(false), memory_order_release);
1169 _LIBCPP_INLINE_VISIBILITY void __unlock() const {
1170 __cxx_atomic_store(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(false), memory_order_release);
1172 _LIBCPP_INLINE_VISIBILITY _Tp __read() const volatile {
1175 __cxx_atomic_assign_volatile(__old, __a_value);
1179 _LIBCPP_INLINE_VISIBILITY _Tp __read() const {
1181 _Tp __old = __a_value;
1187 template <typename _Tp>
1188 _LIBCPP_INLINE_VISIBILITY
1189 void __cxx_atomic_init(volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __val) {
1190 __cxx_atomic_assign_volatile(__a->__a_value, __val);
1192 template <typename _Tp>
1193 _LIBCPP_INLINE_VISIBILITY
1194 void __cxx_atomic_init(__cxx_atomic_lock_impl<_Tp>* __a, _Tp __val) {
1195 __a->__a_value = __val;
1198 template <typename _Tp>
1199 _LIBCPP_INLINE_VISIBILITY
1200 void __cxx_atomic_store(volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __val, memory_order) {
1202 __cxx_atomic_assign_volatile(__a->__a_value, __val);
1205 template <typename _Tp>
1206 _LIBCPP_INLINE_VISIBILITY
1207 void __cxx_atomic_store(__cxx_atomic_lock_impl<_Tp>* __a, _Tp __val, memory_order) {
1209 __a->__a_value = __val;
1213 template <typename _Tp>
1214 _LIBCPP_INLINE_VISIBILITY
1215 _Tp __cxx_atomic_load(const volatile __cxx_atomic_lock_impl<_Tp>* __a, memory_order) {
1216 return __a->__read();
1218 template <typename _Tp>
1219 _LIBCPP_INLINE_VISIBILITY
1220 _Tp __cxx_atomic_load(const __cxx_atomic_lock_impl<_Tp>* __a, memory_order) {
1221 return __a->__read();
1224 template <typename _Tp>
1225 _LIBCPP_INLINE_VISIBILITY
1226 _Tp __cxx_atomic_exchange(volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __value, memory_order) {
1229 __cxx_atomic_assign_volatile(__old, __a->__a_value);
1230 __cxx_atomic_assign_volatile(__a->__a_value, __value);
1234 template <typename _Tp>
1235 _LIBCPP_INLINE_VISIBILITY
1236 _Tp __cxx_atomic_exchange(__cxx_atomic_lock_impl<_Tp>* __a, _Tp __value, memory_order) {
1238 _Tp __old = __a->__a_value;
1239 __a->__a_value = __value;
1244 template <typename _Tp>
1245 _LIBCPP_INLINE_VISIBILITY
1246 bool __cxx_atomic_compare_exchange_strong(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1247 _Tp* __expected, _Tp __value, memory_order, memory_order) {
1250 __cxx_atomic_assign_volatile(__temp, __a->__a_value);
1251 bool __ret = __temp == *__expected;
1253 __cxx_atomic_assign_volatile(__a->__a_value, __value);
1255 __cxx_atomic_assign_volatile(*__expected, __a->__a_value);
1259 template <typename _Tp>
1260 _LIBCPP_INLINE_VISIBILITY
1261 bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_lock_impl<_Tp>* __a,
1262 _Tp* __expected, _Tp __value, memory_order, memory_order) {
1264 bool __ret = __a->__a_value == *__expected;
1266 __a->__a_value = __value;
1268 *__expected = __a->__a_value;
1273 template <typename _Tp>
1274 _LIBCPP_INLINE_VISIBILITY
1275 bool __cxx_atomic_compare_exchange_weak(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1276 _Tp* __expected, _Tp __value, memory_order, memory_order) {
1279 __cxx_atomic_assign_volatile(__temp, __a->__a_value);
1280 bool __ret = __temp == *__expected;
1282 __cxx_atomic_assign_volatile(__a->__a_value, __value);
1284 __cxx_atomic_assign_volatile(*__expected, __a->__a_value);
1288 template <typename _Tp>
1289 _LIBCPP_INLINE_VISIBILITY
1290 bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_lock_impl<_Tp>* __a,
1291 _Tp* __expected, _Tp __value, memory_order, memory_order) {
1293 bool __ret = __a->__a_value == *__expected;
1295 __a->__a_value = __value;
1297 *__expected = __a->__a_value;
1302 template <typename _Tp, typename _Td>
1303 _LIBCPP_INLINE_VISIBILITY
1304 _Tp __cxx_atomic_fetch_add(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1305 _Td __delta, memory_order) {
1308 __cxx_atomic_assign_volatile(__old, __a->__a_value);
1309 __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old + __delta));
1313 template <typename _Tp, typename _Td>
1314 _LIBCPP_INLINE_VISIBILITY
1315 _Tp __cxx_atomic_fetch_add(__cxx_atomic_lock_impl<_Tp>* __a,
1316 _Td __delta, memory_order) {
1318 _Tp __old = __a->__a_value;
1319 __a->__a_value += __delta;
1324 template <typename _Tp, typename _Td>
1325 _LIBCPP_INLINE_VISIBILITY
1326 _Tp* __cxx_atomic_fetch_add(volatile __cxx_atomic_lock_impl<_Tp*>* __a,
1327 ptrdiff_t __delta, memory_order) {
1330 __cxx_atomic_assign_volatile(__old, __a->__a_value);
1331 __cxx_atomic_assign_volatile(__a->__a_value, __old + __delta);
1335 template <typename _Tp, typename _Td>
1336 _LIBCPP_INLINE_VISIBILITY
1337 _Tp* __cxx_atomic_fetch_add(__cxx_atomic_lock_impl<_Tp*>* __a,
1338 ptrdiff_t __delta, memory_order) {
1340 _Tp* __old = __a->__a_value;
1341 __a->__a_value += __delta;
1346 template <typename _Tp, typename _Td>
1347 _LIBCPP_INLINE_VISIBILITY
1348 _Tp __cxx_atomic_fetch_sub(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1349 _Td __delta, memory_order) {
1352 __cxx_atomic_assign_volatile(__old, __a->__a_value);
1353 __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old - __delta));
1357 template <typename _Tp, typename _Td>
1358 _LIBCPP_INLINE_VISIBILITY
1359 _Tp __cxx_atomic_fetch_sub(__cxx_atomic_lock_impl<_Tp>* __a,
1360 _Td __delta, memory_order) {
1362 _Tp __old = __a->__a_value;
1363 __a->__a_value -= __delta;
1368 template <typename _Tp>
1369 _LIBCPP_INLINE_VISIBILITY
1370 _Tp __cxx_atomic_fetch_and(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1371 _Tp __pattern, memory_order) {
1374 __cxx_atomic_assign_volatile(__old, __a->__a_value);
1375 __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old & __pattern));
1379 template <typename _Tp>
1380 _LIBCPP_INLINE_VISIBILITY
1381 _Tp __cxx_atomic_fetch_and(__cxx_atomic_lock_impl<_Tp>* __a,
1382 _Tp __pattern, memory_order) {
1384 _Tp __old = __a->__a_value;
1385 __a->__a_value &= __pattern;
1390 template <typename _Tp>
1391 _LIBCPP_INLINE_VISIBILITY
1392 _Tp __cxx_atomic_fetch_or(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1393 _Tp __pattern, memory_order) {
1396 __cxx_atomic_assign_volatile(__old, __a->__a_value);
1397 __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old | __pattern));
1401 template <typename _Tp>
1402 _LIBCPP_INLINE_VISIBILITY
1403 _Tp __cxx_atomic_fetch_or(__cxx_atomic_lock_impl<_Tp>* __a,
1404 _Tp __pattern, memory_order) {
1406 _Tp __old = __a->__a_value;
1407 __a->__a_value |= __pattern;
1412 template <typename _Tp>
1413 _LIBCPP_INLINE_VISIBILITY
1414 _Tp __cxx_atomic_fetch_xor(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1415 _Tp __pattern, memory_order) {
1418 __cxx_atomic_assign_volatile(__old, __a->__a_value);
1419 __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old ^ __pattern));
1423 template <typename _Tp>
1424 _LIBCPP_INLINE_VISIBILITY
1425 _Tp __cxx_atomic_fetch_xor(__cxx_atomic_lock_impl<_Tp>* __a,
1426 _Tp __pattern, memory_order) {
1428 _Tp __old = __a->__a_value;
1429 __a->__a_value ^= __pattern;
1434 #ifdef __cpp_lib_atomic_is_always_lock_free
1436 template<typename _Tp> struct __cxx_is_always_lock_free {
1437 enum { __value = __atomic_always_lock_free(sizeof(_Tp), 0) }; };
1441 template<typename _Tp> struct __cxx_is_always_lock_free { enum { __value = false }; };
1442 // Implementations must match the C ATOMIC_*_LOCK_FREE macro values.
1443 template<> struct __cxx_is_always_lock_free<bool> { enum { __value = 2 == ATOMIC_BOOL_LOCK_FREE }; };
1444 template<> struct __cxx_is_always_lock_free<char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; };
1445 template<> struct __cxx_is_always_lock_free<signed char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; };
1446 template<> struct __cxx_is_always_lock_free<unsigned char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; };
1447 template<> struct __cxx_is_always_lock_free<char16_t> { enum { __value = 2 == ATOMIC_CHAR16_T_LOCK_FREE }; };
1448 template<> struct __cxx_is_always_lock_free<char32_t> { enum { __value = 2 == ATOMIC_CHAR32_T_LOCK_FREE }; };
1449 template<> struct __cxx_is_always_lock_free<wchar_t> { enum { __value = 2 == ATOMIC_WCHAR_T_LOCK_FREE }; };
1450 template<> struct __cxx_is_always_lock_free<short> { enum { __value = 2 == ATOMIC_SHORT_LOCK_FREE }; };
1451 template<> struct __cxx_is_always_lock_free<unsigned short> { enum { __value = 2 == ATOMIC_SHORT_LOCK_FREE }; };
1452 template<> struct __cxx_is_always_lock_free<int> { enum { __value = 2 == ATOMIC_INT_LOCK_FREE }; };
1453 template<> struct __cxx_is_always_lock_free<unsigned int> { enum { __value = 2 == ATOMIC_INT_LOCK_FREE }; };
1454 template<> struct __cxx_is_always_lock_free<long> { enum { __value = 2 == ATOMIC_LONG_LOCK_FREE }; };
1455 template<> struct __cxx_is_always_lock_free<unsigned long> { enum { __value = 2 == ATOMIC_LONG_LOCK_FREE }; };
1456 template<> struct __cxx_is_always_lock_free<long long> { enum { __value = 2 == ATOMIC_LLONG_LOCK_FREE }; };
1457 template<> struct __cxx_is_always_lock_free<unsigned long long> { enum { __value = 2 == ATOMIC_LLONG_LOCK_FREE }; };
1458 template<typename _Tp> struct __cxx_is_always_lock_free<_Tp*> { enum { __value = 2 == ATOMIC_POINTER_LOCK_FREE }; };
1459 template<> struct __cxx_is_always_lock_free<std::nullptr_t> { enum { __value = 2 == ATOMIC_POINTER_LOCK_FREE }; };
1461 #endif //__cpp_lib_atomic_is_always_lock_free
1463 template <typename _Tp,
1464 typename _Base = typename conditional<__cxx_is_always_lock_free<_Tp>::__value,
1465 __cxx_atomic_base_impl<_Tp>,
1466 __cxx_atomic_lock_impl<_Tp> >::type>
1468 template <typename _Tp,
1469 typename _Base = __cxx_atomic_base_impl<_Tp> >
1470 #endif //_LIBCPP_ATOMIC_ONLY_USE_BUILTINS
1471 struct __cxx_atomic_impl : public _Base {
1473 #if _GNUC_VER >= 501
1474 static_assert(is_trivially_copyable<_Tp>::value,
1475 "std::atomic<Tp> requires that 'Tp' be a trivially copyable type");
1478 _LIBCPP_INLINE_VISIBILITY __cxx_atomic_impl() _NOEXCEPT _LIBCPP_DEFAULT
1479 _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR explicit __cxx_atomic_impl(_Tp value) _NOEXCEPT
1484 using __cxx_contention_t = int32_t;
1486 using __cxx_contention_t = int64_t;
1489 #if _LIBCPP_STD_VER >= 11
1491 using __cxx_atomic_contention_t = __cxx_atomic_impl<__cxx_contention_t>;
1493 #ifndef _LIBCPP_HAS_NO_PLATFORM_WAIT
1495 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_one(void const volatile*);
1496 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_all(void const volatile*);
1497 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI __cxx_contention_t __libcpp_atomic_monitor(void const volatile*);
1498 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __libcpp_atomic_wait(void const volatile*, __cxx_contention_t);
1500 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_one(__cxx_atomic_contention_t const volatile*);
1501 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_all(__cxx_atomic_contention_t const volatile*);
1502 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI __cxx_contention_t __libcpp_atomic_monitor(__cxx_atomic_contention_t const volatile*);
1503 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __libcpp_atomic_wait(__cxx_atomic_contention_t const volatile*, __cxx_contention_t);
1505 template <class _Atp, class _Fn>
1506 struct __libcpp_atomic_wait_backoff_impl {
1509 _LIBCPP_AVAILABILITY_SYNC
1510 _LIBCPP_INLINE_VISIBILITY bool operator()(chrono::nanoseconds __elapsed) const
1512 if(__elapsed > chrono::microseconds(64))
1514 auto const __monitor = __libcpp_atomic_monitor(__a);
1517 __libcpp_atomic_wait(__a, __monitor);
1519 else if(__elapsed > chrono::microseconds(4))
1520 __libcpp_thread_yield();
1527 template <class _Atp, class _Fn>
1528 _LIBCPP_AVAILABILITY_SYNC
1529 _LIBCPP_INLINE_VISIBILITY bool __cxx_atomic_wait(_Atp* __a, _Fn && __test_fn)
1531 __libcpp_atomic_wait_backoff_impl<_Atp, typename decay<_Fn>::type> __backoff_fn = {__a, __test_fn};
1532 return __libcpp_thread_poll_with_backoff(__test_fn, __backoff_fn);
1535 #else // _LIBCPP_HAS_NO_PLATFORM_WAIT
1537 template <class _Tp>
1538 _LIBCPP_INLINE_VISIBILITY void __cxx_atomic_notify_all(__cxx_atomic_impl<_Tp> const volatile*) { }
1539 template <class _Tp>
1540 _LIBCPP_INLINE_VISIBILITY void __cxx_atomic_notify_one(__cxx_atomic_impl<_Tp> const volatile*) { }
1541 template <class _Atp, class _Fn>
1542 _LIBCPP_INLINE_VISIBILITY bool __cxx_atomic_wait(_Atp*, _Fn && __test_fn)
1544 return __libcpp_thread_poll_with_backoff(__test_fn, __libcpp_timed_backoff_policy());
1547 #endif // _LIBCPP_HAS_NO_PLATFORM_WAIT
1549 template <class _Atp, class _Tp>
1550 struct __cxx_atomic_wait_test_fn_impl {
1553 memory_order __order;
1554 _LIBCPP_INLINE_VISIBILITY bool operator()() const
1556 return !__cxx_nonatomic_compare_equal(__cxx_atomic_load(__a, __order), __val);
1560 template <class _Atp, class _Tp>
1561 _LIBCPP_AVAILABILITY_SYNC
1562 _LIBCPP_INLINE_VISIBILITY bool __cxx_atomic_wait(_Atp* __a, _Tp const __val, memory_order __order)
1564 __cxx_atomic_wait_test_fn_impl<_Atp, _Tp> __test_fn = {__a, __val, __order};
1565 return __cxx_atomic_wait(__a, __test_fn);
1568 #endif //_LIBCPP_STD_VER >= 11
1570 // general atomic<T>
1572 template <class _Tp, bool = is_integral<_Tp>::value && !is_same<_Tp, bool>::value>
1573 struct __atomic_base // false
1575 mutable __cxx_atomic_impl<_Tp> __a_;
1577 #if defined(__cpp_lib_atomic_is_always_lock_free)
1578 static _LIBCPP_CONSTEXPR bool is_always_lock_free = __atomic_always_lock_free(sizeof(__a_), 0);
1581 _LIBCPP_INLINE_VISIBILITY
1582 bool is_lock_free() const volatile _NOEXCEPT
1583 {return __cxx_atomic_is_lock_free(sizeof(_Tp));}
1584 _LIBCPP_INLINE_VISIBILITY
1585 bool is_lock_free() const _NOEXCEPT
1586 {return static_cast<__atomic_base const volatile*>(this)->is_lock_free();}
1587 _LIBCPP_INLINE_VISIBILITY
1588 void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1589 _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1590 {__cxx_atomic_store(&__a_, __d, __m);}
1591 _LIBCPP_INLINE_VISIBILITY
1592 void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1593 _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1594 {__cxx_atomic_store(&__a_, __d, __m);}
1595 _LIBCPP_INLINE_VISIBILITY
1596 _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
1597 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1598 {return __cxx_atomic_load(&__a_, __m);}
1599 _LIBCPP_INLINE_VISIBILITY
1600 _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
1601 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1602 {return __cxx_atomic_load(&__a_, __m);}
1603 _LIBCPP_INLINE_VISIBILITY
1604 operator _Tp() const volatile _NOEXCEPT {return load();}
1605 _LIBCPP_INLINE_VISIBILITY
1606 operator _Tp() const _NOEXCEPT {return load();}
1607 _LIBCPP_INLINE_VISIBILITY
1608 _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1609 {return __cxx_atomic_exchange(&__a_, __d, __m);}
1610 _LIBCPP_INLINE_VISIBILITY
1611 _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1612 {return __cxx_atomic_exchange(&__a_, __d, __m);}
1613 _LIBCPP_INLINE_VISIBILITY
1614 bool compare_exchange_weak(_Tp& __e, _Tp __d,
1615 memory_order __s, memory_order __f) volatile _NOEXCEPT
1616 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1617 {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
1618 _LIBCPP_INLINE_VISIBILITY
1619 bool compare_exchange_weak(_Tp& __e, _Tp __d,
1620 memory_order __s, memory_order __f) _NOEXCEPT
1621 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1622 {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
1623 _LIBCPP_INLINE_VISIBILITY
1624 bool compare_exchange_strong(_Tp& __e, _Tp __d,
1625 memory_order __s, memory_order __f) volatile _NOEXCEPT
1626 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1627 {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
1628 _LIBCPP_INLINE_VISIBILITY
1629 bool compare_exchange_strong(_Tp& __e, _Tp __d,
1630 memory_order __s, memory_order __f) _NOEXCEPT
1631 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1632 {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
1633 _LIBCPP_INLINE_VISIBILITY
1634 bool compare_exchange_weak(_Tp& __e, _Tp __d,
1635 memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1636 {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
1637 _LIBCPP_INLINE_VISIBILITY
1638 bool compare_exchange_weak(_Tp& __e, _Tp __d,
1639 memory_order __m = memory_order_seq_cst) _NOEXCEPT
1640 {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
1641 _LIBCPP_INLINE_VISIBILITY
1642 bool compare_exchange_strong(_Tp& __e, _Tp __d,
1643 memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1644 {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
1645 _LIBCPP_INLINE_VISIBILITY
1646 bool compare_exchange_strong(_Tp& __e, _Tp __d,
1647 memory_order __m = memory_order_seq_cst) _NOEXCEPT
1648 {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
1650 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void wait(_Tp __v, memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
1651 {__cxx_atomic_wait(&__a_, __v, __m);}
1652 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void wait(_Tp __v, memory_order __m = memory_order_seq_cst) const _NOEXCEPT
1653 {__cxx_atomic_wait(&__a_, __v, __m);}
1654 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_one() volatile _NOEXCEPT
1655 {__cxx_atomic_notify_one(&__a_);}
1656 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_one() _NOEXCEPT
1657 {__cxx_atomic_notify_one(&__a_);}
1658 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_all() volatile _NOEXCEPT
1659 {__cxx_atomic_notify_all(&__a_);}
1660 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_all() _NOEXCEPT
1661 {__cxx_atomic_notify_all(&__a_);}
1663 _LIBCPP_INLINE_VISIBILITY
1664 __atomic_base() _NOEXCEPT _LIBCPP_DEFAULT
1666 _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR
1667 __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {}
1669 #ifndef _LIBCPP_CXX03_LANG
1670 __atomic_base(const __atomic_base&) = delete;
1671 __atomic_base& operator=(const __atomic_base&) = delete;
1672 __atomic_base& operator=(const __atomic_base&) volatile = delete;
1675 _LIBCPP_INLINE_VISIBILITY
1676 __atomic_base(const __atomic_base&);
1677 _LIBCPP_INLINE_VISIBILITY
1678 __atomic_base& operator=(const __atomic_base&);
1679 _LIBCPP_INLINE_VISIBILITY
1680 __atomic_base& operator=(const __atomic_base&) volatile;
1684 #if defined(__cpp_lib_atomic_is_always_lock_free)
1685 template <class _Tp, bool __b>
1686 _LIBCPP_CONSTEXPR bool __atomic_base<_Tp, __b>::is_always_lock_free;
1691 template <class _Tp>
1692 struct __atomic_base<_Tp, true>
1693 : public __atomic_base<_Tp, false>
1695 typedef __atomic_base<_Tp, false> __base;
1696 _LIBCPP_INLINE_VISIBILITY
1697 __atomic_base() _NOEXCEPT _LIBCPP_DEFAULT
1698 _LIBCPP_INLINE_VISIBILITY
1699 _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {}
1701 _LIBCPP_INLINE_VISIBILITY
1702 _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1703 {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
1704 _LIBCPP_INLINE_VISIBILITY
1705 _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1706 {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
1707 _LIBCPP_INLINE_VISIBILITY
1708 _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1709 {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
1710 _LIBCPP_INLINE_VISIBILITY
1711 _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1712 {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
1713 _LIBCPP_INLINE_VISIBILITY
1714 _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1715 {return __cxx_atomic_fetch_and(&this->__a_, __op, __m);}
1716 _LIBCPP_INLINE_VISIBILITY
1717 _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1718 {return __cxx_atomic_fetch_and(&this->__a_, __op, __m);}
1719 _LIBCPP_INLINE_VISIBILITY
1720 _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1721 {return __cxx_atomic_fetch_or(&this->__a_, __op, __m);}
1722 _LIBCPP_INLINE_VISIBILITY
1723 _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1724 {return __cxx_atomic_fetch_or(&this->__a_, __op, __m);}
1725 _LIBCPP_INLINE_VISIBILITY
1726 _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1727 {return __cxx_atomic_fetch_xor(&this->__a_, __op, __m);}
1728 _LIBCPP_INLINE_VISIBILITY
1729 _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1730 {return __cxx_atomic_fetch_xor(&this->__a_, __op, __m);}
1732 _LIBCPP_INLINE_VISIBILITY
1733 _Tp operator++(int) volatile _NOEXCEPT {return fetch_add(_Tp(1));}
1734 _LIBCPP_INLINE_VISIBILITY
1735 _Tp operator++(int) _NOEXCEPT {return fetch_add(_Tp(1));}
1736 _LIBCPP_INLINE_VISIBILITY
1737 _Tp operator--(int) volatile _NOEXCEPT {return fetch_sub(_Tp(1));}
1738 _LIBCPP_INLINE_VISIBILITY
1739 _Tp operator--(int) _NOEXCEPT {return fetch_sub(_Tp(1));}
1740 _LIBCPP_INLINE_VISIBILITY
1741 _Tp operator++() volatile _NOEXCEPT {return fetch_add(_Tp(1)) + _Tp(1);}
1742 _LIBCPP_INLINE_VISIBILITY
1743 _Tp operator++() _NOEXCEPT {return fetch_add(_Tp(1)) + _Tp(1);}
1744 _LIBCPP_INLINE_VISIBILITY
1745 _Tp operator--() volatile _NOEXCEPT {return fetch_sub(_Tp(1)) - _Tp(1);}
1746 _LIBCPP_INLINE_VISIBILITY
1747 _Tp operator--() _NOEXCEPT {return fetch_sub(_Tp(1)) - _Tp(1);}
1748 _LIBCPP_INLINE_VISIBILITY
1749 _Tp operator+=(_Tp __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
1750 _LIBCPP_INLINE_VISIBILITY
1751 _Tp operator+=(_Tp __op) _NOEXCEPT {return fetch_add(__op) + __op;}
1752 _LIBCPP_INLINE_VISIBILITY
1753 _Tp operator-=(_Tp __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
1754 _LIBCPP_INLINE_VISIBILITY
1755 _Tp operator-=(_Tp __op) _NOEXCEPT {return fetch_sub(__op) - __op;}
1756 _LIBCPP_INLINE_VISIBILITY
1757 _Tp operator&=(_Tp __op) volatile _NOEXCEPT {return fetch_and(__op) & __op;}
1758 _LIBCPP_INLINE_VISIBILITY
1759 _Tp operator&=(_Tp __op) _NOEXCEPT {return fetch_and(__op) & __op;}
1760 _LIBCPP_INLINE_VISIBILITY
1761 _Tp operator|=(_Tp __op) volatile _NOEXCEPT {return fetch_or(__op) | __op;}
1762 _LIBCPP_INLINE_VISIBILITY
1763 _Tp operator|=(_Tp __op) _NOEXCEPT {return fetch_or(__op) | __op;}
1764 _LIBCPP_INLINE_VISIBILITY
1765 _Tp operator^=(_Tp __op) volatile _NOEXCEPT {return fetch_xor(__op) ^ __op;}
1766 _LIBCPP_INLINE_VISIBILITY
1767 _Tp operator^=(_Tp __op) _NOEXCEPT {return fetch_xor(__op) ^ __op;}
1772 template <class _Tp>
1774 : public __atomic_base<_Tp>
1776 typedef __atomic_base<_Tp> __base;
1777 typedef _Tp value_type;
1778 _LIBCPP_INLINE_VISIBILITY
1779 atomic() _NOEXCEPT _LIBCPP_DEFAULT
1780 _LIBCPP_INLINE_VISIBILITY
1781 _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {}
1783 _LIBCPP_INLINE_VISIBILITY
1784 _Tp operator=(_Tp __d) volatile _NOEXCEPT
1785 {__base::store(__d); return __d;}
1786 _LIBCPP_INLINE_VISIBILITY
1787 _Tp operator=(_Tp __d) _NOEXCEPT
1788 {__base::store(__d); return __d;}
1793 template <class _Tp>
1795 : public __atomic_base<_Tp*>
1797 typedef __atomic_base<_Tp*> __base;
1798 typedef _Tp* value_type;
1799 _LIBCPP_INLINE_VISIBILITY
1800 atomic() _NOEXCEPT _LIBCPP_DEFAULT
1801 _LIBCPP_INLINE_VISIBILITY
1802 _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {}
1804 _LIBCPP_INLINE_VISIBILITY
1805 _Tp* operator=(_Tp* __d) volatile _NOEXCEPT
1806 {__base::store(__d); return __d;}
1807 _LIBCPP_INLINE_VISIBILITY
1808 _Tp* operator=(_Tp* __d) _NOEXCEPT
1809 {__base::store(__d); return __d;}
1811 _LIBCPP_INLINE_VISIBILITY
1812 _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1814 {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
1815 _LIBCPP_INLINE_VISIBILITY
1816 _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1817 {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
1818 _LIBCPP_INLINE_VISIBILITY
1819 _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1821 {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
1822 _LIBCPP_INLINE_VISIBILITY
1823 _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1824 {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
1826 _LIBCPP_INLINE_VISIBILITY
1827 _Tp* operator++(int) volatile _NOEXCEPT {return fetch_add(1);}
1828 _LIBCPP_INLINE_VISIBILITY
1829 _Tp* operator++(int) _NOEXCEPT {return fetch_add(1);}
1830 _LIBCPP_INLINE_VISIBILITY
1831 _Tp* operator--(int) volatile _NOEXCEPT {return fetch_sub(1);}
1832 _LIBCPP_INLINE_VISIBILITY
1833 _Tp* operator--(int) _NOEXCEPT {return fetch_sub(1);}
1834 _LIBCPP_INLINE_VISIBILITY
1835 _Tp* operator++() volatile _NOEXCEPT {return fetch_add(1) + 1;}
1836 _LIBCPP_INLINE_VISIBILITY
1837 _Tp* operator++() _NOEXCEPT {return fetch_add(1) + 1;}
1838 _LIBCPP_INLINE_VISIBILITY
1839 _Tp* operator--() volatile _NOEXCEPT {return fetch_sub(1) - 1;}
1840 _LIBCPP_INLINE_VISIBILITY
1841 _Tp* operator--() _NOEXCEPT {return fetch_sub(1) - 1;}
1842 _LIBCPP_INLINE_VISIBILITY
1843 _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
1844 _LIBCPP_INLINE_VISIBILITY
1845 _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT {return fetch_add(__op) + __op;}
1846 _LIBCPP_INLINE_VISIBILITY
1847 _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
1848 _LIBCPP_INLINE_VISIBILITY
1849 _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT {return fetch_sub(__op) - __op;}
1852 // atomic_is_lock_free
1854 template <class _Tp>
1855 _LIBCPP_INLINE_VISIBILITY
1857 atomic_is_lock_free(const volatile atomic<_Tp>* __o) _NOEXCEPT
1859 return __o->is_lock_free();
1862 template <class _Tp>
1863 _LIBCPP_INLINE_VISIBILITY
1865 atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT
1867 return __o->is_lock_free();
1872 template <class _Tp>
1873 _LIBCPP_INLINE_VISIBILITY
1875 atomic_init(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1877 __cxx_atomic_init(&__o->__a_, __d);
1880 template <class _Tp>
1881 _LIBCPP_INLINE_VISIBILITY
1883 atomic_init(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1885 __cxx_atomic_init(&__o->__a_, __d);
1890 template <class _Tp>
1891 _LIBCPP_INLINE_VISIBILITY
1893 atomic_store(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1898 template <class _Tp>
1899 _LIBCPP_INLINE_VISIBILITY
1901 atomic_store(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1906 // atomic_store_explicit
1908 template <class _Tp>
1909 _LIBCPP_INLINE_VISIBILITY
1911 atomic_store_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1912 _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1914 __o->store(__d, __m);
1917 template <class _Tp>
1918 _LIBCPP_INLINE_VISIBILITY
1920 atomic_store_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1921 _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1923 __o->store(__d, __m);
1928 template <class _Tp>
1929 _LIBCPP_INLINE_VISIBILITY
1931 atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT
1936 template <class _Tp>
1937 _LIBCPP_INLINE_VISIBILITY
1939 atomic_load(const atomic<_Tp>* __o) _NOEXCEPT
1944 // atomic_load_explicit
1946 template <class _Tp>
1947 _LIBCPP_INLINE_VISIBILITY
1949 atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1950 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1952 return __o->load(__m);
1955 template <class _Tp>
1956 _LIBCPP_INLINE_VISIBILITY
1958 atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1959 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1961 return __o->load(__m);
1966 template <class _Tp>
1967 _LIBCPP_INLINE_VISIBILITY
1969 atomic_exchange(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1971 return __o->exchange(__d);
1974 template <class _Tp>
1975 _LIBCPP_INLINE_VISIBILITY
1977 atomic_exchange(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1979 return __o->exchange(__d);
1982 // atomic_exchange_explicit
1984 template <class _Tp>
1985 _LIBCPP_INLINE_VISIBILITY
1987 atomic_exchange_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1989 return __o->exchange(__d, __m);
1992 template <class _Tp>
1993 _LIBCPP_INLINE_VISIBILITY
1995 atomic_exchange_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1997 return __o->exchange(__d, __m);
2000 // atomic_compare_exchange_weak
2002 template <class _Tp>
2003 _LIBCPP_INLINE_VISIBILITY
2005 atomic_compare_exchange_weak(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
2007 return __o->compare_exchange_weak(*__e, __d);
2010 template <class _Tp>
2011 _LIBCPP_INLINE_VISIBILITY
2013 atomic_compare_exchange_weak(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
2015 return __o->compare_exchange_weak(*__e, __d);
2018 // atomic_compare_exchange_strong
2020 template <class _Tp>
2021 _LIBCPP_INLINE_VISIBILITY
2023 atomic_compare_exchange_strong(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
2025 return __o->compare_exchange_strong(*__e, __d);
2028 template <class _Tp>
2029 _LIBCPP_INLINE_VISIBILITY
2031 atomic_compare_exchange_strong(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
2033 return __o->compare_exchange_strong(*__e, __d);
2036 // atomic_compare_exchange_weak_explicit
2038 template <class _Tp>
2039 _LIBCPP_INLINE_VISIBILITY
2041 atomic_compare_exchange_weak_explicit(volatile atomic<_Tp>* __o, _Tp* __e,
2043 memory_order __s, memory_order __f) _NOEXCEPT
2044 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
2046 return __o->compare_exchange_weak(*__e, __d, __s, __f);
2049 template <class _Tp>
2050 _LIBCPP_INLINE_VISIBILITY
2052 atomic_compare_exchange_weak_explicit(atomic<_Tp>* __o, _Tp* __e, _Tp __d,
2053 memory_order __s, memory_order __f) _NOEXCEPT
2054 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
2056 return __o->compare_exchange_weak(*__e, __d, __s, __f);
2059 // atomic_compare_exchange_strong_explicit
2061 template <class _Tp>
2062 _LIBCPP_INLINE_VISIBILITY
2064 atomic_compare_exchange_strong_explicit(volatile atomic<_Tp>* __o,
2066 memory_order __s, memory_order __f) _NOEXCEPT
2067 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
2069 return __o->compare_exchange_strong(*__e, __d, __s, __f);
2072 template <class _Tp>
2073 _LIBCPP_INLINE_VISIBILITY
2075 atomic_compare_exchange_strong_explicit(atomic<_Tp>* __o, _Tp* __e,
2077 memory_order __s, memory_order __f) _NOEXCEPT
2078 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
2080 return __o->compare_exchange_strong(*__e, __d, __s, __f);
2085 template <class _Tp>
2086 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2087 void atomic_wait(const volatile atomic<_Tp>* __o,
2088 typename atomic<_Tp>::value_type __v) _NOEXCEPT
2090 return __o->wait(__v);
2093 template <class _Tp>
2094 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2095 void atomic_wait(const atomic<_Tp>* __o,
2096 typename atomic<_Tp>::value_type __v) _NOEXCEPT
2098 return __o->wait(__v);
2101 // atomic_wait_explicit
2103 template <class _Tp>
2104 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2105 void atomic_wait_explicit(const volatile atomic<_Tp>* __o,
2106 typename atomic<_Tp>::value_type __v,
2107 memory_order __m) _NOEXCEPT
2108 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
2110 return __o->wait(__v, __m);
2113 template <class _Tp>
2114 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2115 void atomic_wait_explicit(const atomic<_Tp>* __o,
2116 typename atomic<_Tp>::value_type __v,
2117 memory_order __m) _NOEXCEPT
2118 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
2120 return __o->wait(__v, __m);
2123 // atomic_notify_one
2125 template <class _Tp>
2126 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2127 void atomic_notify_one(volatile atomic<_Tp>* __o) _NOEXCEPT
2131 template <class _Tp>
2132 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2133 void atomic_notify_one(atomic<_Tp>* __o) _NOEXCEPT
2138 // atomic_notify_one
2140 template <class _Tp>
2141 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2142 void atomic_notify_all(volatile atomic<_Tp>* __o) _NOEXCEPT
2146 template <class _Tp>
2147 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2148 void atomic_notify_all(atomic<_Tp>* __o) _NOEXCEPT
2155 template <class _Tp>
2156 _LIBCPP_INLINE_VISIBILITY
2159 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2162 atomic_fetch_add(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
2164 return __o->fetch_add(__op);
2167 template <class _Tp>
2168 _LIBCPP_INLINE_VISIBILITY
2171 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2174 atomic_fetch_add(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
2176 return __o->fetch_add(__op);
2179 template <class _Tp>
2180 _LIBCPP_INLINE_VISIBILITY
2182 atomic_fetch_add(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
2184 return __o->fetch_add(__op);
2187 template <class _Tp>
2188 _LIBCPP_INLINE_VISIBILITY
2190 atomic_fetch_add(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
2192 return __o->fetch_add(__op);
2195 // atomic_fetch_add_explicit
2197 template <class _Tp>
2198 _LIBCPP_INLINE_VISIBILITY
2201 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2204 atomic_fetch_add_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2206 return __o->fetch_add(__op, __m);
2209 template <class _Tp>
2210 _LIBCPP_INLINE_VISIBILITY
2213 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2216 atomic_fetch_add_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2218 return __o->fetch_add(__op, __m);
2221 template <class _Tp>
2222 _LIBCPP_INLINE_VISIBILITY
2224 atomic_fetch_add_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
2225 memory_order __m) _NOEXCEPT
2227 return __o->fetch_add(__op, __m);
2230 template <class _Tp>
2231 _LIBCPP_INLINE_VISIBILITY
2233 atomic_fetch_add_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
2235 return __o->fetch_add(__op, __m);
2240 template <class _Tp>
2241 _LIBCPP_INLINE_VISIBILITY
2244 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2247 atomic_fetch_sub(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
2249 return __o->fetch_sub(__op);
2252 template <class _Tp>
2253 _LIBCPP_INLINE_VISIBILITY
2256 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2259 atomic_fetch_sub(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
2261 return __o->fetch_sub(__op);
2264 template <class _Tp>
2265 _LIBCPP_INLINE_VISIBILITY
2267 atomic_fetch_sub(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
2269 return __o->fetch_sub(__op);
2272 template <class _Tp>
2273 _LIBCPP_INLINE_VISIBILITY
2275 atomic_fetch_sub(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
2277 return __o->fetch_sub(__op);
2280 // atomic_fetch_sub_explicit
2282 template <class _Tp>
2283 _LIBCPP_INLINE_VISIBILITY
2286 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2289 atomic_fetch_sub_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2291 return __o->fetch_sub(__op, __m);
2294 template <class _Tp>
2295 _LIBCPP_INLINE_VISIBILITY
2298 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2301 atomic_fetch_sub_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2303 return __o->fetch_sub(__op, __m);
2306 template <class _Tp>
2307 _LIBCPP_INLINE_VISIBILITY
2309 atomic_fetch_sub_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
2310 memory_order __m) _NOEXCEPT
2312 return __o->fetch_sub(__op, __m);
2315 template <class _Tp>
2316 _LIBCPP_INLINE_VISIBILITY
2318 atomic_fetch_sub_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
2320 return __o->fetch_sub(__op, __m);
2325 template <class _Tp>
2326 _LIBCPP_INLINE_VISIBILITY
2329 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2332 atomic_fetch_and(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
2334 return __o->fetch_and(__op);
2337 template <class _Tp>
2338 _LIBCPP_INLINE_VISIBILITY
2341 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2344 atomic_fetch_and(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
2346 return __o->fetch_and(__op);
2349 // atomic_fetch_and_explicit
2351 template <class _Tp>
2352 _LIBCPP_INLINE_VISIBILITY
2355 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2358 atomic_fetch_and_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2360 return __o->fetch_and(__op, __m);
2363 template <class _Tp>
2364 _LIBCPP_INLINE_VISIBILITY
2367 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2370 atomic_fetch_and_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2372 return __o->fetch_and(__op, __m);
2377 template <class _Tp>
2378 _LIBCPP_INLINE_VISIBILITY
2381 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2384 atomic_fetch_or(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
2386 return __o->fetch_or(__op);
2389 template <class _Tp>
2390 _LIBCPP_INLINE_VISIBILITY
2393 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2396 atomic_fetch_or(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
2398 return __o->fetch_or(__op);
2401 // atomic_fetch_or_explicit
2403 template <class _Tp>
2404 _LIBCPP_INLINE_VISIBILITY
2407 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2410 atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2412 return __o->fetch_or(__op, __m);
2415 template <class _Tp>
2416 _LIBCPP_INLINE_VISIBILITY
2419 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2422 atomic_fetch_or_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2424 return __o->fetch_or(__op, __m);
2429 template <class _Tp>
2430 _LIBCPP_INLINE_VISIBILITY
2433 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2436 atomic_fetch_xor(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
2438 return __o->fetch_xor(__op);
2441 template <class _Tp>
2442 _LIBCPP_INLINE_VISIBILITY
2445 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2448 atomic_fetch_xor(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
2450 return __o->fetch_xor(__op);
2453 // atomic_fetch_xor_explicit
2455 template <class _Tp>
2456 _LIBCPP_INLINE_VISIBILITY
2459 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2462 atomic_fetch_xor_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2464 return __o->fetch_xor(__op, __m);
2467 template <class _Tp>
2468 _LIBCPP_INLINE_VISIBILITY
2471 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2474 atomic_fetch_xor_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2476 return __o->fetch_xor(__op, __m);
2479 // flag type and operations
2481 typedef struct atomic_flag
2483 __cxx_atomic_impl<_LIBCPP_ATOMIC_FLAG_TYPE> __a_;
2485 _LIBCPP_INLINE_VISIBILITY
2486 bool test(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
2487 {return _LIBCPP_ATOMIC_FLAG_TYPE(true) == __cxx_atomic_load(&__a_, __m);}
2488 _LIBCPP_INLINE_VISIBILITY
2489 bool test(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
2490 {return _LIBCPP_ATOMIC_FLAG_TYPE(true) == __cxx_atomic_load(&__a_, __m);}
2492 _LIBCPP_INLINE_VISIBILITY
2493 bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
2494 {return __cxx_atomic_exchange(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(true), __m);}
2495 _LIBCPP_INLINE_VISIBILITY
2496 bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT
2497 {return __cxx_atomic_exchange(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(true), __m);}
2498 _LIBCPP_INLINE_VISIBILITY
2499 void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
2500 {__cxx_atomic_store(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(false), __m);}
2501 _LIBCPP_INLINE_VISIBILITY
2502 void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT
2503 {__cxx_atomic_store(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(false), __m);}
2505 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2506 void wait(bool __v, memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
2507 {__cxx_atomic_wait(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(__v), __m);}
2508 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2509 void wait(bool __v, memory_order __m = memory_order_seq_cst) const _NOEXCEPT
2510 {__cxx_atomic_wait(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(__v), __m);}
2511 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2512 void notify_one() volatile _NOEXCEPT
2513 {__cxx_atomic_notify_one(&__a_);}
2514 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2515 void notify_one() _NOEXCEPT
2516 {__cxx_atomic_notify_one(&__a_);}
2517 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2518 void notify_all() volatile _NOEXCEPT
2519 {__cxx_atomic_notify_all(&__a_);}
2520 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2521 void notify_all() _NOEXCEPT
2522 {__cxx_atomic_notify_all(&__a_);}
2524 _LIBCPP_INLINE_VISIBILITY
2525 atomic_flag() _NOEXCEPT _LIBCPP_DEFAULT
2527 _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR
2528 atomic_flag(bool __b) _NOEXCEPT : __a_(__b) {} // EXTENSION
2530 #ifndef _LIBCPP_CXX03_LANG
2531 atomic_flag(const atomic_flag&) = delete;
2532 atomic_flag& operator=(const atomic_flag&) = delete;
2533 atomic_flag& operator=(const atomic_flag&) volatile = delete;
2536 _LIBCPP_INLINE_VISIBILITY
2537 atomic_flag(const atomic_flag&);
2538 _LIBCPP_INLINE_VISIBILITY
2539 atomic_flag& operator=(const atomic_flag&);
2540 _LIBCPP_INLINE_VISIBILITY
2541 atomic_flag& operator=(const atomic_flag&) volatile;
2546 inline _LIBCPP_INLINE_VISIBILITY
2548 atomic_flag_test(const volatile atomic_flag* __o) _NOEXCEPT
2553 inline _LIBCPP_INLINE_VISIBILITY
2555 atomic_flag_test(const atomic_flag* __o) _NOEXCEPT
2560 inline _LIBCPP_INLINE_VISIBILITY
2562 atomic_flag_test_explicit(const volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
2564 return __o->test(__m);
2567 inline _LIBCPP_INLINE_VISIBILITY
2569 atomic_flag_test_explicit(const atomic_flag* __o, memory_order __m) _NOEXCEPT
2571 return __o->test(__m);
2574 inline _LIBCPP_INLINE_VISIBILITY
2576 atomic_flag_test_and_set(volatile atomic_flag* __o) _NOEXCEPT
2578 return __o->test_and_set();
2581 inline _LIBCPP_INLINE_VISIBILITY
2583 atomic_flag_test_and_set(atomic_flag* __o) _NOEXCEPT
2585 return __o->test_and_set();
2588 inline _LIBCPP_INLINE_VISIBILITY
2590 atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
2592 return __o->test_and_set(__m);
2595 inline _LIBCPP_INLINE_VISIBILITY
2597 atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
2599 return __o->test_and_set(__m);
2602 inline _LIBCPP_INLINE_VISIBILITY
2604 atomic_flag_clear(volatile atomic_flag* __o) _NOEXCEPT
2609 inline _LIBCPP_INLINE_VISIBILITY
2611 atomic_flag_clear(atomic_flag* __o) _NOEXCEPT
2616 inline _LIBCPP_INLINE_VISIBILITY
2618 atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
2623 inline _LIBCPP_INLINE_VISIBILITY
2625 atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
2630 inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2632 atomic_flag_wait(const volatile atomic_flag* __o, bool __v) _NOEXCEPT
2637 inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2639 atomic_flag_wait(const atomic_flag* __o, bool __v) _NOEXCEPT
2644 inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2646 atomic_flag_wait_explicit(const volatile atomic_flag* __o,
2647 bool __v, memory_order __m) _NOEXCEPT
2649 __o->wait(__v, __m);
2652 inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2654 atomic_flag_wait_explicit(const atomic_flag* __o,
2655 bool __v, memory_order __m) _NOEXCEPT
2657 __o->wait(__v, __m);
2660 inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2662 atomic_flag_notify_one(volatile atomic_flag* __o) _NOEXCEPT
2667 inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2669 atomic_flag_notify_one(atomic_flag* __o) _NOEXCEPT
2674 inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2676 atomic_flag_notify_all(volatile atomic_flag* __o) _NOEXCEPT
2681 inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2683 atomic_flag_notify_all(atomic_flag* __o) _NOEXCEPT
2690 inline _LIBCPP_INLINE_VISIBILITY
2692 atomic_thread_fence(memory_order __m) _NOEXCEPT
2694 __cxx_atomic_thread_fence(__m);
2697 inline _LIBCPP_INLINE_VISIBILITY
2699 atomic_signal_fence(memory_order __m) _NOEXCEPT
2701 __cxx_atomic_signal_fence(__m);
2704 // Atomics for standard typedef types
2706 typedef atomic<bool> atomic_bool;
2707 typedef atomic<char> atomic_char;
2708 typedef atomic<signed char> atomic_schar;
2709 typedef atomic<unsigned char> atomic_uchar;
2710 typedef atomic<short> atomic_short;
2711 typedef atomic<unsigned short> atomic_ushort;
2712 typedef atomic<int> atomic_int;
2713 typedef atomic<unsigned int> atomic_uint;
2714 typedef atomic<long> atomic_long;
2715 typedef atomic<unsigned long> atomic_ulong;
2716 typedef atomic<long long> atomic_llong;
2717 typedef atomic<unsigned long long> atomic_ullong;
2718 typedef atomic<char16_t> atomic_char16_t;
2719 typedef atomic<char32_t> atomic_char32_t;
2720 typedef atomic<wchar_t> atomic_wchar_t;
2722 typedef atomic<int_least8_t> atomic_int_least8_t;
2723 typedef atomic<uint_least8_t> atomic_uint_least8_t;
2724 typedef atomic<int_least16_t> atomic_int_least16_t;
2725 typedef atomic<uint_least16_t> atomic_uint_least16_t;
2726 typedef atomic<int_least32_t> atomic_int_least32_t;
2727 typedef atomic<uint_least32_t> atomic_uint_least32_t;
2728 typedef atomic<int_least64_t> atomic_int_least64_t;
2729 typedef atomic<uint_least64_t> atomic_uint_least64_t;
2731 typedef atomic<int_fast8_t> atomic_int_fast8_t;
2732 typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
2733 typedef atomic<int_fast16_t> atomic_int_fast16_t;
2734 typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
2735 typedef atomic<int_fast32_t> atomic_int_fast32_t;
2736 typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
2737 typedef atomic<int_fast64_t> atomic_int_fast64_t;
2738 typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
2740 typedef atomic< int8_t> atomic_int8_t;
2741 typedef atomic<uint8_t> atomic_uint8_t;
2742 typedef atomic< int16_t> atomic_int16_t;
2743 typedef atomic<uint16_t> atomic_uint16_t;
2744 typedef atomic< int32_t> atomic_int32_t;
2745 typedef atomic<uint32_t> atomic_uint32_t;
2746 typedef atomic< int64_t> atomic_int64_t;
2747 typedef atomic<uint64_t> atomic_uint64_t;
2749 typedef atomic<intptr_t> atomic_intptr_t;
2750 typedef atomic<uintptr_t> atomic_uintptr_t;
2751 typedef atomic<size_t> atomic_size_t;
2752 typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
2753 typedef atomic<intmax_t> atomic_intmax_t;
2754 typedef atomic<uintmax_t> atomic_uintmax_t;
2756 // atomic_*_lock_free : prefer the contention type most highly, then the largest lock-free type
2758 #ifdef __cpp_lib_atomic_is_always_lock_free
2759 # define _LIBCPP_CONTENTION_LOCK_FREE __atomic_always_lock_free(sizeof(__cxx_contention_t), 0)
2761 # define _LIBCPP_CONTENTION_LOCK_FREE false
2764 #if ATOMIC_LLONG_LOCK_FREE == 2
2765 typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, long long>::type __libcpp_signed_lock_free;
2766 typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned long long>::type __libcpp_unsigned_lock_free;
2767 #elif ATOMIC_INT_LOCK_FREE == 2
2768 typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, int>::type __libcpp_signed_lock_free;
2769 typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned int>::type __libcpp_unsigned_lock_free;
2770 #elif ATOMIC_SHORT_LOCK_FREE == 2
2771 typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, short>::type __libcpp_signed_lock_free;
2772 typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned short>::type __libcpp_unsigned_lock_free;
2773 #elif ATOMIC_CHAR_LOCK_FREE == 2
2774 typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, char>::type __libcpp_signed_lock_free;
2775 typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned char>::type __libcpp_unsigned_lock_free;
2777 // No signed/unsigned lock-free types
2780 typedef atomic<__libcpp_signed_lock_free> atomic_signed_lock_free;
2781 typedef atomic<__libcpp_unsigned_lock_free> atomic_unsigned_lock_free;
2783 #define ATOMIC_FLAG_INIT {false}
2784 #define ATOMIC_VAR_INIT(__v) {__v}
2786 _LIBCPP_END_NAMESPACE_STD
2788 #endif // _LIBCPP_ATOMIC