2 * Copyright (c) 2011 Ed Schouten <ed@FreeBSD.org>
3 * David Chisnall <theraven@FreeBSD.org>
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
9 * 1. Redistributions of source code must retain the above copyright
10 * notice, this list of conditions and the following disclaimer.
11 * 2. Redistributions in binary form must reproduce the above copyright
12 * notice, this list of conditions and the following disclaimer in the
13 * documentation and/or other materials provided with the distribution.
15 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
16 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
17 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
18 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
19 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
20 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
21 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
22 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
23 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
24 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
33 #include <sys/cdefs.h>
34 #include <sys/_types.h>
36 #if __has_extension(c_atomic) || __has_extension(cxx_atomic)
37 #define __CLANG_ATOMICS
38 #elif __GNUC_PREREQ__(4, 7)
39 #define __GNUC_ATOMICS
40 #elif !defined(__GNUC__)
41 #error "stdatomic.h does not support your compiler"
44 #if !defined(__CLANG_ATOMICS)
45 #define _Atomic(T) struct { volatile T __val; }
49 * 7.17.2 Initialization.
52 #if defined(__CLANG_ATOMICS)
53 #define ATOMIC_VAR_INIT(value) (value)
54 #define atomic_init(obj, value) __c11_atomic_init(obj, value)
56 #define ATOMIC_VAR_INIT(value) { .__val = (value) }
57 #define atomic_init(obj, value) ((void)((obj)->__val = (value)))
61 * Clang and recent GCC both provide predefined macros for the memory
62 * orderings. If we are using a compiler that doesn't define them, use the
63 * clang values - these will be ignored in the fallback path.
66 #ifndef __ATOMIC_RELAXED
67 #define __ATOMIC_RELAXED 0
69 #ifndef __ATOMIC_CONSUME
70 #define __ATOMIC_CONSUME 1
72 #ifndef __ATOMIC_ACQUIRE
73 #define __ATOMIC_ACQUIRE 2
75 #ifndef __ATOMIC_RELEASE
76 #define __ATOMIC_RELEASE 3
78 #ifndef __ATOMIC_ACQ_REL
79 #define __ATOMIC_ACQ_REL 4
81 #ifndef __ATOMIC_SEQ_CST
82 #define __ATOMIC_SEQ_CST 5
86 * 7.17.3 Order and consistency.
88 * The memory_order_* constants that denote the barrier behaviour of the
93 memory_order_relaxed = __ATOMIC_RELAXED,
94 memory_order_consume = __ATOMIC_CONSUME,
95 memory_order_acquire = __ATOMIC_ACQUIRE,
96 memory_order_release = __ATOMIC_RELEASE,
97 memory_order_acq_rel = __ATOMIC_ACQ_REL,
98 memory_order_seq_cst = __ATOMIC_SEQ_CST
105 #ifdef __CLANG_ATOMICS
106 #define atomic_thread_fence(order) __c11_atomic_thread_fence(order)
107 #define atomic_signal_fence(order) __c11_atomic_signal_fence(order)
108 #elif defined(__GNUC_ATOMICS)
109 #define atomic_thread_fence(order) __atomic_thread_fence(order)
110 #define atomic_signal_fence(order) __atomic_signal_fence(order)
112 #define atomic_thread_fence(order) ((void)(order), __sync_synchronize())
113 #define atomic_signal_fence(order) __extension__ ({ \
115 __asm volatile ("" ::: "memory"); \
121 * 7.17.5 Lock-free property.
124 #if defined(__CLANG_ATOMICS) || defined(__GNUC_ATOMICS)
125 #define atomic_is_lock_free(obj) \
126 __atomic_is_lock_free(sizeof((obj)->__val), &(obj)->__val)
128 #define atomic_is_lock_free(obj) \
129 ((void)(obj), sizeof((obj)->__val) <= sizeof(void *))
133 * 7.17.6 Atomic integer types.
136 typedef _Atomic(_Bool) atomic_bool;
137 typedef _Atomic(char) atomic_char;
138 typedef _Atomic(signed char) atomic_schar;
139 typedef _Atomic(unsigned char) atomic_uchar;
140 typedef _Atomic(short) atomic_short;
141 typedef _Atomic(unsigned short) atomic_ushort;
142 typedef _Atomic(int) atomic_int;
143 typedef _Atomic(unsigned int) atomic_uint;
144 typedef _Atomic(long) atomic_long;
145 typedef _Atomic(unsigned long) atomic_ulong;
146 typedef _Atomic(long long) atomic_llong;
147 typedef _Atomic(unsigned long long) atomic_ullong;
149 typedef _Atomic(__char16_t) atomic_char16_t;
150 typedef _Atomic(__char32_t) atomic_char32_t;
152 typedef _Atomic(__wchar_t) atomic_wchar_t;
153 typedef _Atomic(__int_least8_t) atomic_int_least8_t;
154 typedef _Atomic(__uint_least8_t) atomic_uint_least8_t;
155 typedef _Atomic(__int_least16_t) atomic_int_least16_t;
156 typedef _Atomic(__uint_least16_t) atomic_uint_least16_t;
157 typedef _Atomic(__int_least32_t) atomic_int_least32_t;
158 typedef _Atomic(__uint_least32_t) atomic_uint_least32_t;
159 typedef _Atomic(__int_least64_t) atomic_int_least64_t;
160 typedef _Atomic(__uint_least64_t) atomic_uint_least64_t;
161 typedef _Atomic(__int_fast8_t) atomic_int_fast8_t;
162 typedef _Atomic(__uint_fast8_t) atomic_uint_fast8_t;
163 typedef _Atomic(__int_fast16_t) atomic_int_fast16_t;
164 typedef _Atomic(__uint_fast16_t) atomic_uint_fast16_t;
165 typedef _Atomic(__int_fast32_t) atomic_int_fast32_t;
166 typedef _Atomic(__uint_fast32_t) atomic_uint_fast32_t;
167 typedef _Atomic(__int_fast64_t) atomic_int_fast64_t;
168 typedef _Atomic(__uint_fast64_t) atomic_uint_fast64_t;
169 typedef _Atomic(__intptr_t) atomic_intptr_t;
170 typedef _Atomic(__uintptr_t) atomic_uintptr_t;
171 typedef _Atomic(__size_t) atomic_size_t;
172 typedef _Atomic(__ptrdiff_t) atomic_ptrdiff_t;
173 typedef _Atomic(__intmax_t) atomic_intmax_t;
174 typedef _Atomic(__uintmax_t) atomic_uintmax_t;
177 * 7.17.7 Operations on atomic types.
181 * Compiler-specific operations.
184 #if defined(__CLANG_ATOMICS)
185 #define atomic_compare_exchange_strong_explicit(object, expected, \
186 desired, success, failure) \
187 __c11_atomic_compare_exchange_strong(object, expected, desired, \
189 #define atomic_compare_exchange_weak_explicit(object, expected, \
190 desired, success, failure) \
191 __c11_atomic_compare_exchange_weak(object, expected, desired, \
193 #define atomic_exchange_explicit(object, desired, order) \
194 __c11_atomic_exchange(object, desired, order)
195 #define atomic_fetch_add_explicit(object, operand, order) \
196 __c11_atomic_fetch_add(object, operand, order)
197 #define atomic_fetch_and_explicit(object, operand, order) \
198 __c11_atomic_fetch_and(object, operand, order)
199 #define atomic_fetch_or_explicit(object, operand, order) \
200 __c11_atomic_fetch_or(object, operand, order)
201 #define atomic_fetch_sub_explicit(object, operand, order) \
202 __c11_atomic_fetch_sub(object, operand, order)
203 #define atomic_fetch_xor_explicit(object, operand, order) \
204 __c11_atomic_fetch_xor(object, operand, order)
205 #define atomic_load_explicit(object, order) \
206 __c11_atomic_load(object, order)
207 #define atomic_store_explicit(object, desired, order) \
208 __c11_atomic_store(object, desired, order)
209 #elif defined(__GNUC_ATOMICS)
210 #define atomic_compare_exchange_strong_explicit(object, expected, \
211 desired, success, failure) \
212 __atomic_compare_exchange_n(&(object)->__val, expected, \
213 desired, 0, success, failure)
214 #define atomic_compare_exchange_weak_explicit(object, expected, \
215 desired, success, failure) \
216 __atomic_compare_exchange_n(&(object)->__val, expected, \
217 desired, 1, success, failure)
218 #define atomic_exchange_explicit(object, desired, order) \
219 __atomic_exchange_n(&(object)->__val, desired, order)
220 #define atomic_fetch_add_explicit(object, operand, order) \
221 __atomic_fetch_add(&(object)->__val, operand, order)
222 #define atomic_fetch_and_explicit(object, operand, order) \
223 __atomic_fetch_and(&(object)->__val, operand, order)
224 #define atomic_fetch_or_explicit(object, operand, order) \
225 __atomic_fetch_or(&(object)->__val, operand, order)
226 #define atomic_fetch_sub_explicit(object, operand, order) \
227 __atomic_fetch_sub(&(object)->__val, operand, order)
228 #define atomic_fetch_xor_explicit(object, operand, order) \
229 __atomic_fetch_xor(&(object)->__val, operand, order)
230 #define atomic_load_explicit(object, order) \
231 __atomic_load_n(&(object)->__val, order)
232 #define atomic_store_explicit(object, desired, order) \
233 __atomic_store_n(&(object)->__val, desired, order)
235 #define atomic_compare_exchange_strong_explicit(object, expected, \
236 desired, success, failure) __extension__ ({ \
237 __typeof__((object)->__val) __v; \
238 __typeof__(expected) __e; \
243 __v = __sync_val_compare_and_swap(&(object)->__val, \
245 __r = (*__e == __v); \
250 #define atomic_compare_exchange_weak_explicit(object, expected, \
251 desired, success, failure) \
252 atomic_compare_exchange_strong_explicit(object, expected, \
253 desired, success, failure)
254 #if __has_builtin(__sync_swap)
255 /* Clang provides a full-barrier atomic exchange - use it if available. */
256 #define atomic_exchange_explicit(object, desired, order) \
257 ((void)(order), __sync_swap(&(object)->__val, desired))
260 * __sync_lock_test_and_set() is only an acquire barrier in theory (although in
261 * practice it is usually a full barrier) so we need an explicit barrier before
264 #define atomic_exchange_explicit(object, desired, order) \
266 __typeof__(object) __o = (object); \
267 __typeof__(desired) __d = (desired); \
269 __sync_synchronize(); \
270 __sync_lock_test_and_set(&(__o)->__val, __d); \
273 #define atomic_fetch_add_explicit(object, operand, order) \
274 __sync_fetch_and_add(&(object)->__val, operand)
275 #define atomic_fetch_and_explicit(object, operand, order) \
276 __sync_fetch_and_and(&(object)->__val, operand)
277 #define atomic_fetch_or_explicit(object, operand, order) \
278 __sync_fetch_and_or(&(object)->__val, operand)
279 #define atomic_fetch_sub_explicit(object, operand, order) \
280 __sync_fetch_and_sub(&(object)->__val, operand)
281 #define atomic_fetch_xor_explicit(object, operand, order) \
282 __sync_fetch_and_xor(&(object)->__val, operand)
283 #define atomic_load_explicit(object, order) \
284 __sync_fetch_and_add(&(object)->__val, 0)
285 #define atomic_store_explicit(object, desired, order) __extension__ ({ \
286 __typeof__(object) __o = (object); \
287 __typeof__(desired) __d = (desired); \
289 __sync_synchronize(); \
291 __sync_synchronize(); \
296 * Convenience functions.
299 #define atomic_compare_exchange_strong(object, expected, desired) \
300 atomic_compare_exchange_strong_explicit(object, expected, \
301 desired, memory_order_seq_cst, memory_order_seq_cst)
302 #define atomic_compare_exchange_weak(object, expected, desired) \
303 atomic_compare_exchange_weak_explicit(object, expected, \
304 desired, memory_order_seq_cst, memory_order_seq_cst)
305 #define atomic_exchange(object, desired) \
306 atomic_exchange_explicit(object, desired, memory_order_seq_cst)
307 #define atomic_fetch_add(object, operand) \
308 atomic_fetch_add_explicit(object, operand, memory_order_seq_cst)
309 #define atomic_fetch_and(object, operand) \
310 atomic_fetch_and_explicit(object, operand, memory_order_seq_cst)
311 #define atomic_fetch_or(object, operand) \
312 atomic_fetch_or_explicit(object, operand, memory_order_seq_cst)
313 #define atomic_fetch_sub(object, operand) \
314 atomic_fetch_sub_explicit(object, operand, memory_order_seq_cst)
315 #define atomic_fetch_xor(object, operand) \
316 atomic_fetch_xor_explicit(object, operand, memory_order_seq_cst)
317 #define atomic_load(object) \
318 atomic_load_explicit(object, memory_order_seq_cst)
319 #define atomic_store(object, desired) \
320 atomic_store_explicit(object, desired, memory_order_seq_cst)
323 * 7.17.8 Atomic flag type and operations.
326 typedef atomic_bool atomic_flag;
328 #define ATOMIC_FLAG_INIT ATOMIC_VAR_INIT(0)
330 #define atomic_flag_clear_explicit(object, order) \
331 atomic_store_explicit(object, 0, order)
332 #define atomic_flag_test_and_set_explicit(object, order) \
333 atomic_compare_exchange_strong_explicit(object, 0, 1, order, order)
335 #define atomic_flag_clear(object) \
336 atomic_flag_clear_explicit(object, memory_order_seq_cst)
337 #define atomic_flag_test_and_set(object) \
338 atomic_flag_test_and_set_explicit(object, memory_order_seq_cst)
340 #endif /* !_STDATOMIC_H_ */