]> CyberLeo.Net >> Repos - FreeBSD/FreeBSD.git/blob - include/stdatomic.h
Remove unneeded guard.
[FreeBSD/FreeBSD.git] / include / stdatomic.h
1 /*-
2  * Copyright (c) 2011 Ed Schouten <ed@FreeBSD.org>
3  *                    David Chisnall <theraven@FreeBSD.org>
4  * All rights reserved.
5  *
6  * Redistribution and use in source and binary forms, with or without
7  * modification, are permitted provided that the following conditions
8  * are met:
9  * 1. Redistributions of source code must retain the above copyright
10  *    notice, this list of conditions and the following disclaimer.
11  * 2. Redistributions in binary form must reproduce the above copyright
12  *    notice, this list of conditions and the following disclaimer in the
13  *    documentation and/or other materials provided with the distribution.
14  *
15  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
16  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
17  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
18  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
19  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
20  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
21  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
22  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
23  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
24  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
25  * SUCH DAMAGE.
26  *
27  * $FreeBSD$
28  */
29
30 #ifndef _STDATOMIC_H_
31 #define _STDATOMIC_H_
32
33 #include <sys/cdefs.h>
34 #include <sys/_types.h>
35
36 #if __has_feature(cxx_atomic)
37 #define __CLANG_ATOMICS
38 #elif defined(__GNUC__)
39 #define __GNUC_ATOMICS
40 #else
41 #error "stdatomic.h does not support your compiler"
42 #endif
43
44 #ifdef __GNUC_ATOMICS
45 #define _Atomic(T)                              struct { volatile T __val; }
46 #endif
47
48 /*
49  * 7.17.2 Initialization.
50  */
51
52 #if defined(__CLANG_ATOMICS)
53 #define ATOMIC_VAR_INIT(value)                  (value)
54 #define atomic_init(obj, value)                 __atomic_init(obj, value)
55 #elif defined(__GNUC_ATOMICS)
56 #define ATOMIC_VAR_INIT(value)                  { .__val = (value) }
57 #define atomic_init(obj, value)                 (obj = ATOMIC_VAR_INIT(value))
58 #endif
59
60 /*
61  * Clang and recent GCC both provide predefined macros for the memory
62  * orderings.  If we are using a compiler that doesn't define them, use the
63  * clang values - these will be ignored in the fallback path.
64  */
65
66 #ifndef __ATOMIC_RELAXED
67 #define __ATOMIC_RELAXED                        0
68 #endif
69 #ifndef __ATOMIC_CONSUME
70 #define __ATOMIC_CONSUME                        1
71 #endif
72 #ifndef __ATOMIC_ACQUIRE
73 #define __ATOMIC_ACQUIRE                        2
74 #endif
75 #ifndef __ATOMIC_RELEASE
76 #define __ATOMIC_RELEASE                        3
77 #endif
78 #ifndef __ATOMIC_ACQ_REL
79 #define __ATOMIC_ACQ_REL                        4
80 #endif
81 #ifndef __ATOMIC_SEQ_CST
82 #define __ATOMIC_SEQ_CST                        5
83 #endif
84
85 /*
86  * 7.17.3 Order and consistency.
87  *
88  * The memory_order_* constants that denote the barrier behaviour of the
89  * atomic operations.  
90  */
91
92 enum memory_order {
93         memory_order_relaxed = __ATOMIC_RELAXED,
94         memory_order_consume = __ATOMIC_CONSUME,
95         memory_order_acquire = __ATOMIC_ACQUIRE,
96         memory_order_release = __ATOMIC_RELEASE,
97         memory_order_acq_rel = __ATOMIC_ACQ_REL,
98         memory_order_seq_cst = __ATOMIC_SEQ_CST
99 };
100
101 /*
102  * 7.17.4 Fences.
103  */
104
105 #if defined(__CLANG_ATOMICS)
106 #define atomic_thread_fence(order)      __atomic_thread_fence(order)
107 #elif defined(__GNUC_ATOMICS)
108 #define atomic_thread_fence(order)      __sync_synchronize()
109 #endif
110 #define atomic_signal_fence(order)      __asm volatile ("" : : : "memory");
111
112 /*
113  * 7.17.5 Lock-free property.
114  */
115
116 #if defined(__CLANG_ATOMICS)
117 #define atomic_is_lock_free(obj)        __atomic_is_lock_free(obj)
118 #elif defined(__GNUC_ATOMICS)
119 #define atomic_is_lock_free(obj)        (sizeof((obj->__val)) <= sizeof(void *))
120 #endif
121
122 /*
123  * 7.17.6 Atomic integer types.
124  */
125
126 typedef _Atomic(_Bool)                  atomic_bool;
127 typedef _Atomic(char)                   atomic_char;
128 typedef _Atomic(signed char)            atomic_schar;
129 typedef _Atomic(unsigned char)          atomic_uchar;
130 typedef _Atomic(short)                  atomic_short;
131 typedef _Atomic(unsigned short)         atomic_ushort;
132 typedef _Atomic(int)                    atomic_int;
133 typedef _Atomic(unsigned int)           atomic_uint;
134 typedef _Atomic(long)                   atomic_long;
135 typedef _Atomic(unsigned long)          atomic_ulong;
136 typedef _Atomic(long long)              atomic_llong;
137 typedef _Atomic(unsigned long long)     atomic_ullong;
138 #if 0
139 typedef _Atomic(__char16_t)             atomic_char16_t;
140 typedef _Atomic(__char32_t)             atomic_char32_t;
141 #endif
142 typedef _Atomic(__wchar_t)              atomic_wchar_t;
143 typedef _Atomic(__int_least8_t)         atomic_int_least8_t;
144 typedef _Atomic(__uint_least8_t)        atomic_uint_least8_t;
145 typedef _Atomic(__int_least16_t)        atomic_int_least16_t;
146 typedef _Atomic(__uint_least16_t)       atomic_uint_least16_t;
147 typedef _Atomic(__int_least32_t)        atomic_int_least32_t;
148 typedef _Atomic(__uint_least32_t)       atomic_uint_least32_t;
149 typedef _Atomic(__int_least64_t)        atomic_int_least64_t;
150 typedef _Atomic(__uint_least64_t)       atomic_uint_least64_t;
151 typedef _Atomic(__int_fast8_t)          atomic_int_fast8_t;
152 typedef _Atomic(__uint_fast8_t)         atomic_uint_fast8_t;
153 typedef _Atomic(__int_fast16_t)         atomic_int_fast16_t;
154 typedef _Atomic(__uint_fast16_t)        atomic_uint_fast16_t;
155 typedef _Atomic(__int_fast32_t)         atomic_int_fast32_t;
156 typedef _Atomic(__uint_fast32_t)        atomic_uint_fast32_t;
157 typedef _Atomic(__int_fast64_t)         atomic_int_fast64_t;
158 typedef _Atomic(__uint_fast64_t)        atomic_uint_fast64_t;
159 typedef _Atomic(__intptr_t)             atomic_intptr_t;
160 typedef _Atomic(__uintptr_t)            atomic_uintptr_t;
161 typedef _Atomic(__size_t)               atomic_size_t;
162 typedef _Atomic(__ptrdiff_t)            atomic_ptrdiff_t;
163 typedef _Atomic(__intmax_t)             atomic_intmax_t;
164 typedef _Atomic(__uintmax_t)            atomic_uintmax_t;
165
166 /*
167  * 7.17.7 Operations on atomic types.
168  */
169
170 /*
171  * Compiler-specific operations.
172  */
173
174 #if defined(__CLANG_ATOMICS)
175 #define atomic_compare_exchange_strong_explicit(object, expected,       \
176     desired, success, failure)                                          \
177         __atomic_compare_exchange_strong(object, expected, desired,     \
178             success, failure)
179 #define atomic_compare_exchange_weak_explicit(object, expected,         \
180     desired, success, failure)                                          \
181         __atomic_compare_exchange_weak(object, expected, desired,       \
182             success, failure)
183 #define atomic_exchange_explicit(object, desired, order)                \
184         __atomic_exchange(object, desired, order)
185 #define atomic_fetch_add_explicit(object, operand, order)               \
186         __atomic_fetch_add(object, operand, order)
187 #define atomic_fetch_and_explicit(object, operand, order)               \
188         __atomic_fetch_and(object, operand, order)
189 #define atomic_fetch_or_explicit(object, operand, order)                \
190         __atomic_fetch_or(object, operand, order)
191 #define atomic_fetch_sub_explicit(object, operand, order)               \
192         __atomic_fetch_sub(object, operand, order)
193 #define atomic_fetch_xor_explicit(object, operand, order)               \
194         __atomic_fetch_xor(object, operand, order)
195 #define atomic_load_explicit(object, order)                             \
196         __atomic_load(object, order)
197 #define atomic_store_explicit(object, desired, order)                   \
198         __atomic_store(object, desired, order)
199 #elif defined(__GNUC_ATOMICS)
200 #define atomic_compare_exchange_strong_explicit(object, expected,       \
201     desired, success, failure) ({                                       \
202         __typeof__((object)->__val) __v;                                \
203         __v =                                                           \
204         __sync_val_compare_and_swap((__typeof(&((object)->__val)))object,\
205                 *expected, desired);                                    \
206         *expected = __v;                                                \
207         (*expected == __v);                                             \
208         })
209
210 #define atomic_compare_exchange_weak_explicit(object, expected,         \
211     desired, success, failure)                                          \
212         atomic_compare_exchange_strong_explicit(object, expected,       \
213                 desired, success, failure)
214 #if __has_builtin(__sync_swap)
215 /* Clang provides a full-barrier atomic exchange - use it if available. */
216 #define atomic_exchange_explicit(object, desired, order)                \
217         __sync_swap(&(object)->value, desired)
218 #else
219 /*
220  * __sync_lock_test_and_set() is only an acquire barrier in theory (although in
221  * practice it is usually a full barrier) so we need an explicit barrier after
222  * it.
223  */
224 #define atomic_exchange_explicit(object, desired, order) ({             \
225         __typeof__((object)->__val) __v;                                \
226         __v = __sync_lock_test_and_set(object, desired);                \
227         __sync_synchronize();                                           \
228         __v;                                                            \
229 })
230 #endif
231 #define atomic_fetch_add_explicit(object, operand, order)               \
232         __sync_fetch_and_add(&(object)->__val, operand)
233 #define atomic_fetch_and_explicit(object, operand, order)               \
234         __sync_fetch_and_and(&(object)->__val, operand)
235 #define atomic_fetch_or_explicit(object, operand, order)                \
236         __sync_fetch_and_or(&(object)->__val, operand)
237 #define atomic_fetch_sub_explicit(object, operand, order)               \
238         __sync_fetch_and_sub(&(object)->__val, operand)
239 #define atomic_fetch_xor_explicit(object, operand, order)               \
240         __sync_fetch_and_xor(&(object)->__val, operand)
241 #define atomic_load_explicit(object, order)                             \
242         __sync_fetch_and_add(&(object)->__val, 0)
243 #define atomic_store_explicit(object, desired, order) do {              \
244         __sync_synchronize();                                           \
245         (object)->__val = (desired);                                    \
246         __sync_synchronize();                                           \
247 } while (0)
248 #endif
249
250 /*
251  * Convenience functions.
252  */
253
254 #define atomic_compare_exchange_strong(object, expected, desired)       \
255         atomic_compare_exchange_strong_explicit(object, expected,       \
256             desired, memory_order_seq_cst, memory_order_seq_cst)
257 #define atomic_compare_exchange_weak(object, expected, desired)         \
258         atomic_compare_exchange_weak_explicit(object, expected,         \
259             desired, memory_order_seq_cst, memory_order_seq_cst)
260 #define atomic_exchange(object, desired)                                \
261         atomic_exchange_explicit(object, desired, memory_order_seq_cst)
262 #define atomic_fetch_add(object, operand)                               \
263         atomic_fetch_add_explicit(object, operand, memory_order_seq_cst)
264 #define atomic_fetch_and(object, operand)                               \
265         atomic_fetch_and_explicit(object, operand, memory_order_seq_cst)
266 #define atomic_fetch_or(object, operand)                                \
267         atomic_fetch_or_explicit(object, operand, memory_order_seq_cst)
268 #define atomic_fetch_sub(object, operand)                               \
269         atomic_fetch_sub_explicit(object, operand, memory_order_seq_cst)
270 #define atomic_fetch_xor(object, operand)                               \
271         atomic_fetch_xor_explicit(object, operand, memory_order_seq_cst)
272 #define atomic_load(object)                                             \
273         atomic_load_explicit(object, memory_order_seq_cst)
274 #define atomic_store(object, desired)                                   \
275         atomic_store_explicit(object, desired, memory_order_seq_cst)
276
277 /*
278  * 7.17.8 Atomic flag type and operations.
279  */
280
281 typedef atomic_bool             atomic_flag;
282
283 #define ATOMIC_FLAG_INIT        ATOMIC_VAR_INIT(0)
284
285 #define atomic_flag_clear_explicit(object, order)                       \
286         atomic_store_explicit(object, 0, order)
287 #define atomic_flag_test_and_set_explicit(object, order)                \
288         atomic_compare_exchange_strong_explicit(object, 0, 1, order, order)
289
290 #define atomic_flag_clear(object)                                       \
291         atomic_flag_clear_explicit(object, 0, memory_order_seq_cst)
292 #define atomic_flag_test_and_set(object)                                \
293         atomic_flag_test_and_set_explicit(object, 0, 1,                 \
294             memory_order_seq_cst, memory_order_seq_cst)
295
296 #endif /* !_STDATOMIC_H_ */