]> CyberLeo.Net >> Repos - FreeBSD/FreeBSD.git/blob - include/stdatomic.h
Revert r231673 and r231682 for now, until we can run a full make
[FreeBSD/FreeBSD.git] / include / stdatomic.h
1 /*-
2  * Copyright (c) 2011 Ed Schouten <ed@FreeBSD.org>
3  *                    David Chisnall <theraven@FreeBSD.org>
4  * All rights reserved.
5  *
6  * Redistribution and use in source and binary forms, with or without
7  * modification, are permitted provided that the following conditions
8  * are met:
9  * 1. Redistributions of source code must retain the above copyright
10  *    notice, this list of conditions and the following disclaimer.
11  * 2. Redistributions in binary form must reproduce the above copyright
12  *    notice, this list of conditions and the following disclaimer in the
13  *    documentation and/or other materials provided with the distribution.
14  *
15  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
16  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
17  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
18  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
19  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
20  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
21  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
22  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
23  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
24  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
25  * SUCH DAMAGE.
26  *
27  * $FreeBSD$
28  */
29
30 #ifndef _STDATOMIC_H_
31 #define _STDATOMIC_H_
32
33 #include <sys/cdefs.h>
34 #include <sys/_types.h>
35
36 #if __has_feature(cxx_atomic)
37 #define __CLANG_ATOMICS
38 #elif __GNUC_PREREQ__(4, 7)
39 #define __GNUC_ATOMICS
40 #elif !defined(__GNUC__)
41 #error "stdatomic.h does not support your compiler"
42 #endif
43
44 #if !defined(__CLANG_ATOMICS)
45 #define _Atomic(T)                      struct { volatile T __val; }
46 #endif
47
48 /*
49  * 7.17.2 Initialization.
50  */
51
52 #if defined(__CLANG_ATOMICS)
53 #define ATOMIC_VAR_INIT(value)          (value)
54 #define atomic_init(obj, value)         __atomic_init(obj, value)
55 #else
56 #define ATOMIC_VAR_INIT(value)          { .__val = (value) }
57 #define atomic_init(obj, value) do {                                    \
58         (obj)->__val = (value);                                         \
59 } while (0)
60 #endif
61
62 /*
63  * Clang and recent GCC both provide predefined macros for the memory
64  * orderings.  If we are using a compiler that doesn't define them, use the
65  * clang values - these will be ignored in the fallback path.
66  */
67
68 #ifndef __ATOMIC_RELAXED
69 #define __ATOMIC_RELAXED                0
70 #endif
71 #ifndef __ATOMIC_CONSUME
72 #define __ATOMIC_CONSUME                1
73 #endif
74 #ifndef __ATOMIC_ACQUIRE
75 #define __ATOMIC_ACQUIRE                2
76 #endif
77 #ifndef __ATOMIC_RELEASE
78 #define __ATOMIC_RELEASE                3
79 #endif
80 #ifndef __ATOMIC_ACQ_REL
81 #define __ATOMIC_ACQ_REL                4
82 #endif
83 #ifndef __ATOMIC_SEQ_CST
84 #define __ATOMIC_SEQ_CST                5
85 #endif
86
87 /*
88  * 7.17.3 Order and consistency.
89  *
90  * The memory_order_* constants that denote the barrier behaviour of the
91  * atomic operations.
92  */
93
94 enum memory_order {
95         memory_order_relaxed = __ATOMIC_RELAXED,
96         memory_order_consume = __ATOMIC_CONSUME,
97         memory_order_acquire = __ATOMIC_ACQUIRE,
98         memory_order_release = __ATOMIC_RELEASE,
99         memory_order_acq_rel = __ATOMIC_ACQ_REL,
100         memory_order_seq_cst = __ATOMIC_SEQ_CST
101 };
102
103 /*
104  * 7.17.4 Fences.
105  */
106
107 #if defined(__CLANG_ATOMICS) || defined(__GNUC_ATOMICS)
108 #define atomic_thread_fence(order)      __atomic_thread_fence(order)
109 #define atomic_signal_fence(order)      __atomic_signal_fence(order)
110 #else
111 #define atomic_thread_fence(order)      __sync_synchronize()
112 #define atomic_signal_fence(order)      __asm volatile ("" : : : "memory")
113 #endif
114
115 /*
116  * 7.17.5 Lock-free property.
117  */
118
119 #if defined(__CLANG_ATOMICS)
120 #define atomic_is_lock_free(obj) \
121         __atomic_is_lock_free(sizeof(obj))
122 #elif defined(__GNUC_ATOMICS)
123 #define atomic_is_lock_free(obj) \
124         __atomic_is_lock_free(sizeof((obj)->__val))
125 #else
126 #define atomic_is_lock_free(obj) \
127         (sizeof((obj)->__val) <= sizeof(void *))
128 #endif
129
130 /*
131  * 7.17.6 Atomic integer types.
132  */
133
134 typedef _Atomic(_Bool)                  atomic_bool;
135 typedef _Atomic(char)                   atomic_char;
136 typedef _Atomic(signed char)            atomic_schar;
137 typedef _Atomic(unsigned char)          atomic_uchar;
138 typedef _Atomic(short)                  atomic_short;
139 typedef _Atomic(unsigned short)         atomic_ushort;
140 typedef _Atomic(int)                    atomic_int;
141 typedef _Atomic(unsigned int)           atomic_uint;
142 typedef _Atomic(long)                   atomic_long;
143 typedef _Atomic(unsigned long)          atomic_ulong;
144 typedef _Atomic(long long)              atomic_llong;
145 typedef _Atomic(unsigned long long)     atomic_ullong;
146 #if 0
147 typedef _Atomic(__char16_t)             atomic_char16_t;
148 typedef _Atomic(__char32_t)             atomic_char32_t;
149 #endif
150 typedef _Atomic(__wchar_t)              atomic_wchar_t;
151 typedef _Atomic(__int_least8_t)         atomic_int_least8_t;
152 typedef _Atomic(__uint_least8_t)        atomic_uint_least8_t;
153 typedef _Atomic(__int_least16_t)        atomic_int_least16_t;
154 typedef _Atomic(__uint_least16_t)       atomic_uint_least16_t;
155 typedef _Atomic(__int_least32_t)        atomic_int_least32_t;
156 typedef _Atomic(__uint_least32_t)       atomic_uint_least32_t;
157 typedef _Atomic(__int_least64_t)        atomic_int_least64_t;
158 typedef _Atomic(__uint_least64_t)       atomic_uint_least64_t;
159 typedef _Atomic(__int_fast8_t)          atomic_int_fast8_t;
160 typedef _Atomic(__uint_fast8_t)         atomic_uint_fast8_t;
161 typedef _Atomic(__int_fast16_t)         atomic_int_fast16_t;
162 typedef _Atomic(__uint_fast16_t)        atomic_uint_fast16_t;
163 typedef _Atomic(__int_fast32_t)         atomic_int_fast32_t;
164 typedef _Atomic(__uint_fast32_t)        atomic_uint_fast32_t;
165 typedef _Atomic(__int_fast64_t)         atomic_int_fast64_t;
166 typedef _Atomic(__uint_fast64_t)        atomic_uint_fast64_t;
167 typedef _Atomic(__intptr_t)             atomic_intptr_t;
168 typedef _Atomic(__uintptr_t)            atomic_uintptr_t;
169 typedef _Atomic(__size_t)               atomic_size_t;
170 typedef _Atomic(__ptrdiff_t)            atomic_ptrdiff_t;
171 typedef _Atomic(__intmax_t)             atomic_intmax_t;
172 typedef _Atomic(__uintmax_t)            atomic_uintmax_t;
173
174 /*
175  * 7.17.7 Operations on atomic types.
176  */
177
178 /*
179  * Compiler-specific operations.
180  */
181
182 #if defined(__CLANG_ATOMICS)
183 #define atomic_compare_exchange_strong_explicit(object, expected,       \
184     desired, success, failure)                                          \
185         __atomic_compare_exchange_strong(object, expected, desired,     \
186             success, failure)
187 #define atomic_compare_exchange_weak_explicit(object, expected,         \
188     desired, success, failure)                                          \
189         __atomic_compare_exchange_weak(object, expected, desired,       \
190             success, failure)
191 #define atomic_exchange_explicit(object, desired, order)                \
192         __atomic_exchange(object, desired, order)
193 #define atomic_fetch_add_explicit(object, operand, order)               \
194         __atomic_fetch_add(object, operand, order)
195 #define atomic_fetch_and_explicit(object, operand, order)               \
196         __atomic_fetch_and(object, operand, order)
197 #define atomic_fetch_or_explicit(object, operand, order)                \
198         __atomic_fetch_or(object, operand, order)
199 #define atomic_fetch_sub_explicit(object, operand, order)               \
200         __atomic_fetch_sub(object, operand, order)
201 #define atomic_fetch_xor_explicit(object, operand, order)               \
202         __atomic_fetch_xor(object, operand, order)
203 #define atomic_load_explicit(object, order)                             \
204         __atomic_load(object, order)
205 #define atomic_store_explicit(object, desired, order)                   \
206         __atomic_store(object, desired, order)
207 #elif defined(__GNUC_ATOMICS)
208 #define atomic_compare_exchange_strong_explicit(object, expected,       \
209     desired, success, failure)                                          \
210         __atomic_compare_exchange_n(&(object)->__val, expected,         \
211             desired, 0, success, failure)
212 #define atomic_compare_exchange_weak_explicit(object, expected,         \
213     desired, success, failure)                                          \
214         __atomic_compare_exchange_n(&(object)->__val, expected,         \
215             desired, 1, success, failure)
216 #define atomic_exchange_explicit(object, desired, order)                \
217         __atomic_exchange_n(&(object)->__val, desired, order)
218 #define atomic_fetch_add_explicit(object, operand, order)               \
219         __atomic_fetch_add(&(object)->__val, operand, order)
220 #define atomic_fetch_and_explicit(object, operand, order)               \
221         __atomic_fetch_and(&(object)->__val, operand, order)
222 #define atomic_fetch_or_explicit(object, operand, order)                \
223         __atomic_fetch_or(&(object)->__val, operand, order)
224 #define atomic_fetch_sub_explicit(object, operand, order)               \
225         __atomic_fetch_sub(&(object)->__val, operand, order)
226 #define atomic_fetch_xor_explicit(object, operand, order)               \
227         __atomic_fetch_xor(&(object)->__val, operand, order)
228 #define atomic_load_explicit(object, order)                             \
229         __atomic_load_n(&(object)->__val, order)
230 #define atomic_store_explicit(object, desired, order)                   \
231         __atomic_store_n(&(object)->__val, desired, order)
232 #else
233 #define atomic_compare_exchange_strong_explicit(object, expected,       \
234     desired, success, failure) ({                                       \
235         __typeof__((object)->__val) __v;                                \
236         _Bool __r;                                                      \
237         __v = __sync_val_compare_and_swap(&(object)->__val,             \
238             *(expected), desired);                                      \
239         __r = *(expected) == __v;                                       \
240         *(expected) = __v;                                              \
241         __r;                                                            \
242 })
243
244 #define atomic_compare_exchange_weak_explicit(object, expected,         \
245     desired, success, failure)                                          \
246         atomic_compare_exchange_strong_explicit(object, expected,       \
247                 desired, success, failure)
248 #if __has_builtin(__sync_swap)
249 /* Clang provides a full-barrier atomic exchange - use it if available. */
250 #define atomic_exchange_explicit(object, desired, order)                \
251         __sync_swap(&(object)->__val, desired)
252 #else
253 /*
254  * __sync_lock_test_and_set() is only an acquire barrier in theory (although in
255  * practice it is usually a full barrier) so we need an explicit barrier after
256  * it.
257  */
258 #define atomic_exchange_explicit(object, desired, order) ({             \
259         __typeof__((object)->__val) __v;                                \
260         __v = __sync_lock_test_and_set(&(object)->__val, desired);      \
261         __sync_synchronize();                                           \
262         __v;                                                            \
263 })
264 #endif
265 #define atomic_fetch_add_explicit(object, operand, order)               \
266         __sync_fetch_and_add(&(object)->__val, operand)
267 #define atomic_fetch_and_explicit(object, operand, order)               \
268         __sync_fetch_and_and(&(object)->__val, operand)
269 #define atomic_fetch_or_explicit(object, operand, order)                \
270         __sync_fetch_and_or(&(object)->__val, operand)
271 #define atomic_fetch_sub_explicit(object, operand, order)               \
272         __sync_fetch_and_sub(&(object)->__val, operand)
273 #define atomic_fetch_xor_explicit(object, operand, order)               \
274         __sync_fetch_and_xor(&(object)->__val, operand)
275 #define atomic_load_explicit(object, order)                             \
276         __sync_fetch_and_add(&(object)->__val, 0)
277 #define atomic_store_explicit(object, desired, order) do {              \
278         __sync_synchronize();                                           \
279         (object)->__val = (desired);                                    \
280         __sync_synchronize();                                           \
281 } while (0)
282 #endif
283
284 /*
285  * Convenience functions.
286  */
287
288 #define atomic_compare_exchange_strong(object, expected, desired)       \
289         atomic_compare_exchange_strong_explicit(object, expected,       \
290             desired, memory_order_seq_cst, memory_order_seq_cst)
291 #define atomic_compare_exchange_weak(object, expected, desired)         \
292         atomic_compare_exchange_weak_explicit(object, expected,         \
293             desired, memory_order_seq_cst, memory_order_seq_cst)
294 #define atomic_exchange(object, desired)                                \
295         atomic_exchange_explicit(object, desired, memory_order_seq_cst)
296 #define atomic_fetch_add(object, operand)                               \
297         atomic_fetch_add_explicit(object, operand, memory_order_seq_cst)
298 #define atomic_fetch_and(object, operand)                               \
299         atomic_fetch_and_explicit(object, operand, memory_order_seq_cst)
300 #define atomic_fetch_or(object, operand)                                \
301         atomic_fetch_or_explicit(object, operand, memory_order_seq_cst)
302 #define atomic_fetch_sub(object, operand)                               \
303         atomic_fetch_sub_explicit(object, operand, memory_order_seq_cst)
304 #define atomic_fetch_xor(object, operand)                               \
305         atomic_fetch_xor_explicit(object, operand, memory_order_seq_cst)
306 #define atomic_load(object)                                             \
307         atomic_load_explicit(object, memory_order_seq_cst)
308 #define atomic_store(object, desired)                                   \
309         atomic_store_explicit(object, desired, memory_order_seq_cst)
310
311 /*
312  * 7.17.8 Atomic flag type and operations.
313  */
314
315 typedef atomic_bool                     atomic_flag;
316
317 #define ATOMIC_FLAG_INIT                ATOMIC_VAR_INIT(0)
318
319 #define atomic_flag_clear_explicit(object, order)                       \
320         atomic_store_explicit(object, 0, order)
321 #define atomic_flag_test_and_set_explicit(object, order)                \
322         atomic_compare_exchange_strong_explicit(object, 0, 1, order, order)
323
324 #define atomic_flag_clear(object)                                       \
325         atomic_flag_clear_explicit(object, memory_order_seq_cst)
326 #define atomic_flag_test_and_set(object)                                \
327         atomic_flag_test_and_set_explicit(object, memory_order_seq_cst)
328
329 #endif /* !_STDATOMIC_H_ */