]> CyberLeo.Net >> Repos - FreeBSD/releng/9.2.git/blob - include/stdatomic.h
MF9 r254783: Correct implementation of atomic_flag_test_and_set
[FreeBSD/releng/9.2.git] / include / stdatomic.h
1 /*-
2  * Copyright (c) 2011 Ed Schouten <ed@FreeBSD.org>
3  *                    David Chisnall <theraven@FreeBSD.org>
4  * All rights reserved.
5  *
6  * Redistribution and use in source and binary forms, with or without
7  * modification, are permitted provided that the following conditions
8  * are met:
9  * 1. Redistributions of source code must retain the above copyright
10  *    notice, this list of conditions and the following disclaimer.
11  * 2. Redistributions in binary form must reproduce the above copyright
12  *    notice, this list of conditions and the following disclaimer in the
13  *    documentation and/or other materials provided with the distribution.
14  *
15  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
16  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
17  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
18  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
19  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
20  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
21  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
22  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
23  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
24  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
25  * SUCH DAMAGE.
26  *
27  * $FreeBSD$
28  */
29
30 #ifndef _STDATOMIC_H_
31 #define _STDATOMIC_H_
32
33 #include <sys/cdefs.h>
34 #include <sys/_types.h>
35
36 #if __has_feature(cxx_atomic)
37 #define __CLANG_ATOMICS
38 #elif __GNUC_PREREQ__(4, 7)
39 #define __GNUC_ATOMICS
40 #elif !defined(__GNUC__)
41 #error "stdatomic.h does not support your compiler"
42 #endif
43
44 #if !defined(__CLANG_ATOMICS)
45 #define _Atomic(T)                      struct { volatile T __val; }
46 #endif
47
48 /*
49  * 7.17.2 Initialization.
50  */
51
52 #if defined(__CLANG_ATOMICS)
53 #define ATOMIC_VAR_INIT(value)          (value)
54 #define atomic_init(obj, value)         __c11_atomic_init(obj, value)
55 #else
56 #define ATOMIC_VAR_INIT(value)          { .__val = (value) }
57 #define atomic_init(obj, value) do {                                    \
58         (obj)->__val = (value);                                         \
59 } while (0)
60 #endif
61
62 /*
63  * Clang and recent GCC both provide predefined macros for the memory
64  * orderings.  If we are using a compiler that doesn't define them, use the
65  * clang values - these will be ignored in the fallback path.
66  */
67
68 #ifndef __ATOMIC_RELAXED
69 #define __ATOMIC_RELAXED                0
70 #endif
71 #ifndef __ATOMIC_CONSUME
72 #define __ATOMIC_CONSUME                1
73 #endif
74 #ifndef __ATOMIC_ACQUIRE
75 #define __ATOMIC_ACQUIRE                2
76 #endif
77 #ifndef __ATOMIC_RELEASE
78 #define __ATOMIC_RELEASE                3
79 #endif
80 #ifndef __ATOMIC_ACQ_REL
81 #define __ATOMIC_ACQ_REL                4
82 #endif
83 #ifndef __ATOMIC_SEQ_CST
84 #define __ATOMIC_SEQ_CST                5
85 #endif
86
87 /*
88  * 7.17.3 Order and consistency.
89  *
90  * The memory_order_* constants that denote the barrier behaviour of the
91  * atomic operations.
92  */
93
94 enum memory_order {
95         memory_order_relaxed = __ATOMIC_RELAXED,
96         memory_order_consume = __ATOMIC_CONSUME,
97         memory_order_acquire = __ATOMIC_ACQUIRE,
98         memory_order_release = __ATOMIC_RELEASE,
99         memory_order_acq_rel = __ATOMIC_ACQ_REL,
100         memory_order_seq_cst = __ATOMIC_SEQ_CST
101 };
102
103 /*
104  * 7.17.4 Fences.
105  */
106
107 #ifdef __CLANG_ATOMICS
108 #define atomic_thread_fence(order)      __c11_atomic_thread_fence(order)
109 #define atomic_signal_fence(order)      __c11_atomic_signal_fence(order)
110 #elif defined(__GNUC_ATOMICS)
111 #define atomic_thread_fence(order)      __atomic_thread_fence(order)
112 #define atomic_signal_fence(order)      __atomic_signal_fence(order)
113 #else
114 #define atomic_thread_fence(order)      __sync_synchronize()
115 #define atomic_signal_fence(order)      __asm volatile ("" : : : "memory")
116 #endif
117
118 /*
119  * 7.17.5 Lock-free property.
120  */
121
122 #if defined(__CLANG_ATOMICS)
123 #define atomic_is_lock_free(obj) \
124         __c11_atomic_is_lock_free(sizeof(obj))
125 #elif defined(__GNUC_ATOMICS)
126 #define atomic_is_lock_free(obj) \
127         __atomic_is_lock_free(sizeof((obj)->__val))
128 #else
129 #define atomic_is_lock_free(obj) \
130         (sizeof((obj)->__val) <= sizeof(void *))
131 #endif
132
133 /*
134  * 7.17.6 Atomic integer types.
135  */
136
137 typedef _Atomic(_Bool)                  atomic_bool;
138 typedef _Atomic(char)                   atomic_char;
139 typedef _Atomic(signed char)            atomic_schar;
140 typedef _Atomic(unsigned char)          atomic_uchar;
141 typedef _Atomic(short)                  atomic_short;
142 typedef _Atomic(unsigned short)         atomic_ushort;
143 typedef _Atomic(int)                    atomic_int;
144 typedef _Atomic(unsigned int)           atomic_uint;
145 typedef _Atomic(long)                   atomic_long;
146 typedef _Atomic(unsigned long)          atomic_ulong;
147 typedef _Atomic(long long)              atomic_llong;
148 typedef _Atomic(unsigned long long)     atomic_ullong;
149 #if 0
150 typedef _Atomic(__char16_t)             atomic_char16_t;
151 typedef _Atomic(__char32_t)             atomic_char32_t;
152 #endif
153 typedef _Atomic(__wchar_t)              atomic_wchar_t;
154 typedef _Atomic(__int_least8_t)         atomic_int_least8_t;
155 typedef _Atomic(__uint_least8_t)        atomic_uint_least8_t;
156 typedef _Atomic(__int_least16_t)        atomic_int_least16_t;
157 typedef _Atomic(__uint_least16_t)       atomic_uint_least16_t;
158 typedef _Atomic(__int_least32_t)        atomic_int_least32_t;
159 typedef _Atomic(__uint_least32_t)       atomic_uint_least32_t;
160 typedef _Atomic(__int_least64_t)        atomic_int_least64_t;
161 typedef _Atomic(__uint_least64_t)       atomic_uint_least64_t;
162 typedef _Atomic(__int_fast8_t)          atomic_int_fast8_t;
163 typedef _Atomic(__uint_fast8_t)         atomic_uint_fast8_t;
164 typedef _Atomic(__int_fast16_t)         atomic_int_fast16_t;
165 typedef _Atomic(__uint_fast16_t)        atomic_uint_fast16_t;
166 typedef _Atomic(__int_fast32_t)         atomic_int_fast32_t;
167 typedef _Atomic(__uint_fast32_t)        atomic_uint_fast32_t;
168 typedef _Atomic(__int_fast64_t)         atomic_int_fast64_t;
169 typedef _Atomic(__uint_fast64_t)        atomic_uint_fast64_t;
170 typedef _Atomic(__intptr_t)             atomic_intptr_t;
171 typedef _Atomic(__uintptr_t)            atomic_uintptr_t;
172 typedef _Atomic(__size_t)               atomic_size_t;
173 typedef _Atomic(__ptrdiff_t)            atomic_ptrdiff_t;
174 typedef _Atomic(__intmax_t)             atomic_intmax_t;
175 typedef _Atomic(__uintmax_t)            atomic_uintmax_t;
176
177 /*
178  * 7.17.7 Operations on atomic types.
179  */
180
181 /*
182  * Compiler-specific operations.
183  */
184
185 #if defined(__CLANG_ATOMICS)
186 #define atomic_compare_exchange_strong_explicit(object, expected,       \
187     desired, success, failure)                                          \
188         __c11_atomic_compare_exchange_strong(object, expected, desired, \
189             success, failure)
190 #define atomic_compare_exchange_weak_explicit(object, expected,         \
191     desired, success, failure)                                          \
192         __c11_atomic_compare_exchange_weak(object, expected, desired,   \
193             success, failure)
194 #define atomic_exchange_explicit(object, desired, order)                \
195         __c11_atomic_exchange(object, desired, order)
196 #define atomic_fetch_add_explicit(object, operand, order)               \
197         __c11_atomic_fetch_add(object, operand, order)
198 #define atomic_fetch_and_explicit(object, operand, order)               \
199         __c11_atomic_fetch_and(object, operand, order)
200 #define atomic_fetch_or_explicit(object, operand, order)                \
201         __c11_atomic_fetch_or(object, operand, order)
202 #define atomic_fetch_sub_explicit(object, operand, order)               \
203         __c11_atomic_fetch_sub(object, operand, order)
204 #define atomic_fetch_xor_explicit(object, operand, order)               \
205         __c11_atomic_fetch_xor(object, operand, order)
206 #define atomic_load_explicit(object, order)                             \
207         __c11_atomic_load(object, order)
208 #define atomic_store_explicit(object, desired, order)                   \
209         __c11_atomic_store(object, desired, order)
210 #elif defined(__GNUC_ATOMICS)
211 #define atomic_compare_exchange_strong_explicit(object, expected,       \
212     desired, success, failure)                                          \
213         __atomic_compare_exchange_n(&(object)->__val, expected,         \
214             desired, 0, success, failure)
215 #define atomic_compare_exchange_weak_explicit(object, expected,         \
216     desired, success, failure)                                          \
217         __atomic_compare_exchange_n(&(object)->__val, expected,         \
218             desired, 1, success, failure)
219 #define atomic_exchange_explicit(object, desired, order)                \
220         __atomic_exchange_n(&(object)->__val, desired, order)
221 #define atomic_fetch_add_explicit(object, operand, order)               \
222         __atomic_fetch_add(&(object)->__val, operand, order)
223 #define atomic_fetch_and_explicit(object, operand, order)               \
224         __atomic_fetch_and(&(object)->__val, operand, order)
225 #define atomic_fetch_or_explicit(object, operand, order)                \
226         __atomic_fetch_or(&(object)->__val, operand, order)
227 #define atomic_fetch_sub_explicit(object, operand, order)               \
228         __atomic_fetch_sub(&(object)->__val, operand, order)
229 #define atomic_fetch_xor_explicit(object, operand, order)               \
230         __atomic_fetch_xor(&(object)->__val, operand, order)
231 #define atomic_load_explicit(object, order)                             \
232         __atomic_load_n(&(object)->__val, order)
233 #define atomic_store_explicit(object, desired, order)                   \
234         __atomic_store_n(&(object)->__val, desired, order)
235 #else
236 #define atomic_compare_exchange_strong_explicit(object, expected,       \
237     desired, success, failure) ({                                       \
238         __typeof__((object)->__val) __v;                                \
239         _Bool __r;                                                      \
240         __v = __sync_val_compare_and_swap(&(object)->__val,             \
241             *(expected), desired);                                      \
242         __r = *(expected) == __v;                                       \
243         *(expected) = __v;                                              \
244         __r;                                                            \
245 })
246
247 #define atomic_compare_exchange_weak_explicit(object, expected,         \
248     desired, success, failure)                                          \
249         atomic_compare_exchange_strong_explicit(object, expected,       \
250                 desired, success, failure)
251 #if __has_builtin(__sync_swap)
252 /* Clang provides a full-barrier atomic exchange - use it if available. */
253 #define atomic_exchange_explicit(object, desired, order)                \
254         __sync_swap(&(object)->__val, desired)
255 #else
256 /*
257  * __sync_lock_test_and_set() is only an acquire barrier in theory (although in
258  * practice it is usually a full barrier) so we need an explicit barrier after
259  * it.
260  */
261 #define atomic_exchange_explicit(object, desired, order) ({             \
262         __typeof__((object)->__val) __v;                                \
263         __v = __sync_lock_test_and_set(&(object)->__val, desired);      \
264         __sync_synchronize();                                           \
265         __v;                                                            \
266 })
267 #endif
268 #define atomic_fetch_add_explicit(object, operand, order)               \
269         __sync_fetch_and_add(&(object)->__val, operand)
270 #define atomic_fetch_and_explicit(object, operand, order)               \
271         __sync_fetch_and_and(&(object)->__val, operand)
272 #define atomic_fetch_or_explicit(object, operand, order)                \
273         __sync_fetch_and_or(&(object)->__val, operand)
274 #define atomic_fetch_sub_explicit(object, operand, order)               \
275         __sync_fetch_and_sub(&(object)->__val, operand)
276 #define atomic_fetch_xor_explicit(object, operand, order)               \
277         __sync_fetch_and_xor(&(object)->__val, operand)
278 #define atomic_load_explicit(object, order)                             \
279         __sync_fetch_and_add(&(object)->__val, 0)
280 #define atomic_store_explicit(object, desired, order) do {              \
281         __sync_synchronize();                                           \
282         (object)->__val = (desired);                                    \
283         __sync_synchronize();                                           \
284 } while (0)
285 #endif
286
287 /*
288  * Convenience functions.
289  */
290
291 #define atomic_compare_exchange_strong(object, expected, desired)       \
292         atomic_compare_exchange_strong_explicit(object, expected,       \
293             desired, memory_order_seq_cst, memory_order_seq_cst)
294 #define atomic_compare_exchange_weak(object, expected, desired)         \
295         atomic_compare_exchange_weak_explicit(object, expected,         \
296             desired, memory_order_seq_cst, memory_order_seq_cst)
297 #define atomic_exchange(object, desired)                                \
298         atomic_exchange_explicit(object, desired, memory_order_seq_cst)
299 #define atomic_fetch_add(object, operand)                               \
300         atomic_fetch_add_explicit(object, operand, memory_order_seq_cst)
301 #define atomic_fetch_and(object, operand)                               \
302         atomic_fetch_and_explicit(object, operand, memory_order_seq_cst)
303 #define atomic_fetch_or(object, operand)                                \
304         atomic_fetch_or_explicit(object, operand, memory_order_seq_cst)
305 #define atomic_fetch_sub(object, operand)                               \
306         atomic_fetch_sub_explicit(object, operand, memory_order_seq_cst)
307 #define atomic_fetch_xor(object, operand)                               \
308         atomic_fetch_xor_explicit(object, operand, memory_order_seq_cst)
309 #define atomic_load(object)                                             \
310         atomic_load_explicit(object, memory_order_seq_cst)
311 #define atomic_store(object, desired)                                   \
312         atomic_store_explicit(object, desired, memory_order_seq_cst)
313
314 /*
315  * 7.17.8 Atomic flag type and operations.
316  */
317
318 typedef atomic_bool                     atomic_flag;
319
320 #define ATOMIC_FLAG_INIT                ATOMIC_VAR_INIT(0)
321
322 #define atomic_flag_clear_explicit(object, order)                       \
323         atomic_store_explicit(object, 0, order)
324 #define atomic_flag_test_and_set_explicit(object, order)                \
325         atomic_exchange_explicit(object, 1, order)
326
327 #define atomic_flag_clear(object)                                       \
328         atomic_flag_clear_explicit(object, memory_order_seq_cst)
329 #define atomic_flag_test_and_set(object)                                \
330         atomic_flag_test_and_set_explicit(object, memory_order_seq_cst)
331
332 #endif /* !_STDATOMIC_H_ */