2 * Copyright 2009-2015 Samy Al Bahra.
3 * Copyright 2011 Devon H. O'Dell <devon.odell@gmail.com>
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
9 * 1. Redistributions of source code must retain the above copyright
10 * notice, this list of conditions and the following disclaimer.
11 * 2. Redistributions in binary form must reproduce the above copyright
12 * notice, this list of conditions and the following disclaimer in the
13 * documentation and/or other materials provided with the distribution.
15 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
16 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
17 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
18 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
19 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
20 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
21 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
22 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
23 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
24 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
32 #error Do not include this file directly, use ck_pr.h
37 #include <ck_stdint.h>
40 * The following represent supported atomic operations.
41 * These operations may be emulated.
45 /* Minimum requirements for the CK_PR interface are met. */
49 #define CK_PR_LOCK_PREFIX
51 #define CK_PR_LOCK_PREFIX "lock "
55 * Prevent speculative execution in busy-wait loops (P4 <=)
56 * or "predefined delay".
58 CK_CC_INLINE static void
61 __asm__ __volatile__("pause" ::: "memory");
65 #define CK_PR_FENCE(T, I) \
66 CK_CC_INLINE static void \
67 ck_pr_fence_strict_##T(void) \
69 __asm__ __volatile__(I ::: "memory"); \
72 CK_PR_FENCE(atomic, "sfence")
73 CK_PR_FENCE(atomic_store, "sfence")
74 CK_PR_FENCE(atomic_load, "mfence")
75 CK_PR_FENCE(store_atomic, "sfence")
76 CK_PR_FENCE(load_atomic, "mfence")
77 CK_PR_FENCE(load, "lfence")
78 CK_PR_FENCE(load_store, "mfence")
79 CK_PR_FENCE(store, "sfence")
80 CK_PR_FENCE(store_load, "mfence")
81 CK_PR_FENCE(memory, "mfence")
82 CK_PR_FENCE(release, "mfence")
83 CK_PR_FENCE(acquire, "mfence")
84 CK_PR_FENCE(acqrel, "mfence")
85 CK_PR_FENCE(lock, "mfence")
86 CK_PR_FENCE(unlock, "mfence")
91 * Atomic fetch-and-store operations.
93 #define CK_PR_FAS(S, M, T, C, I) \
94 CK_CC_INLINE static T \
95 ck_pr_fas_##S(M *target, T v) \
97 __asm__ __volatile__(I " %0, %1" \
98 : "+m" (*(C *)target), \
105 CK_PR_FAS(ptr, void, void *, char, "xchgl")
107 #define CK_PR_FAS_S(S, T, I) CK_PR_FAS(S, T, T, T, I)
109 CK_PR_FAS_S(char, char, "xchgb")
110 CK_PR_FAS_S(uint, unsigned int, "xchgl")
111 CK_PR_FAS_S(int, int, "xchgl")
112 CK_PR_FAS_S(32, uint32_t, "xchgl")
113 CK_PR_FAS_S(16, uint16_t, "xchgw")
114 CK_PR_FAS_S(8, uint8_t, "xchgb")
119 #define CK_PR_LOAD(S, M, T, C, I) \
120 CK_CC_INLINE static T \
121 ck_pr_md_load_##S(const M *target) \
124 __asm__ __volatile__(I " %1, %0" \
126 : "m" (*(const C *)target) \
131 CK_PR_LOAD(ptr, void, void *, char, "movl")
133 #define CK_PR_LOAD_S(S, T, I) CK_PR_LOAD(S, T, T, T, I)
135 CK_PR_LOAD_S(char, char, "movb")
136 CK_PR_LOAD_S(uint, unsigned int, "movl")
137 CK_PR_LOAD_S(int, int, "movl")
138 CK_PR_LOAD_S(32, uint32_t, "movl")
139 CK_PR_LOAD_S(16, uint16_t, "movw")
140 CK_PR_LOAD_S(8, uint8_t, "movb")
145 #define CK_PR_STORE(S, M, T, C, I) \
146 CK_CC_INLINE static void \
147 ck_pr_md_store_##S(M *target, T v) \
149 __asm__ __volatile__(I " %1, %0" \
150 : "=m" (*(C *)target) \
151 : CK_CC_IMM "q" (v) \
156 CK_PR_STORE(ptr, void, const void *, char, "movl")
158 #define CK_PR_STORE_S(S, T, I) CK_PR_STORE(S, T, T, T, I)
160 CK_PR_STORE_S(char, char, "movb")
161 CK_PR_STORE_S(uint, unsigned int, "movl")
162 CK_PR_STORE_S(int, int, "movl")
163 CK_PR_STORE_S(32, uint32_t, "movl")
164 CK_PR_STORE_S(16, uint16_t, "movw")
165 CK_PR_STORE_S(8, uint8_t, "movb")
171 * Atomic fetch-and-add operations.
173 #define CK_PR_FAA(S, M, T, C, I) \
174 CK_CC_INLINE static T \
175 ck_pr_faa_##S(M *target, T d) \
177 __asm__ __volatile__(CK_PR_LOCK_PREFIX I " %1, %0" \
178 : "+m" (*(C *)target), \
185 CK_PR_FAA(ptr, void, uintptr_t, char, "xaddl")
187 #define CK_PR_FAA_S(S, T, I) CK_PR_FAA(S, T, T, T, I)
189 CK_PR_FAA_S(char, char, "xaddb")
190 CK_PR_FAA_S(uint, unsigned int, "xaddl")
191 CK_PR_FAA_S(int, int, "xaddl")
192 CK_PR_FAA_S(32, uint32_t, "xaddl")
193 CK_PR_FAA_S(16, uint16_t, "xaddw")
194 CK_PR_FAA_S(8, uint8_t, "xaddb")
200 * Atomic store-only unary operations.
202 #define CK_PR_UNARY(K, S, T, C, I) \
203 CK_PR_UNARY_R(K, S, T, C, I) \
204 CK_PR_UNARY_V(K, S, T, C, I)
206 #define CK_PR_UNARY_R(K, S, T, C, I) \
207 CK_CC_INLINE static void \
208 ck_pr_##K##_##S(T *target) \
210 __asm__ __volatile__(CK_PR_LOCK_PREFIX I " %0" \
211 : "+m" (*(C *)target) \
217 #define CK_PR_UNARY_V(K, S, T, C, I) \
218 CK_CC_INLINE static void \
219 ck_pr_##K##_##S##_zero(T *target, bool *r) \
221 __asm__ __volatile__(CK_PR_LOCK_PREFIX I " %0; setz %1" \
222 : "+m" (*(C *)target), \
230 #define CK_PR_UNARY_S(K, S, T, I) CK_PR_UNARY(K, S, T, T, I)
232 #define CK_PR_GENERATE(K) \
233 CK_PR_UNARY(K, ptr, void, char, #K "l") \
234 CK_PR_UNARY_S(K, char, char, #K "b") \
235 CK_PR_UNARY_S(K, int, int, #K "l") \
236 CK_PR_UNARY_S(K, uint, unsigned int, #K "l") \
237 CK_PR_UNARY_S(K, 32, uint32_t, #K "l") \
238 CK_PR_UNARY_S(K, 16, uint16_t, #K "w") \
239 CK_PR_UNARY_S(K, 8, uint8_t, #K "b")
245 /* not does not affect condition flags. */
247 #define CK_PR_UNARY_V(a, b, c, d, e)
250 #undef CK_PR_GENERATE
257 * Atomic store-only binary operations.
259 #define CK_PR_BINARY(K, S, M, T, C, I) \
260 CK_CC_INLINE static void \
261 ck_pr_##K##_##S(M *target, T d) \
263 __asm__ __volatile__(CK_PR_LOCK_PREFIX I " %1, %0" \
264 : "+m" (*(C *)target) \
265 : CK_CC_IMM "q" (d) \
270 #define CK_PR_BINARY_S(K, S, T, I) CK_PR_BINARY(K, S, T, T, T, I)
272 #define CK_PR_GENERATE(K) \
273 CK_PR_BINARY(K, ptr, void, uintptr_t, char, #K "l") \
274 CK_PR_BINARY_S(K, char, char, #K "b") \
275 CK_PR_BINARY_S(K, int, int, #K "l") \
276 CK_PR_BINARY_S(K, uint, unsigned int, #K "l") \
277 CK_PR_BINARY_S(K, 32, uint32_t, #K "l") \
278 CK_PR_BINARY_S(K, 16, uint16_t, #K "w") \
279 CK_PR_BINARY_S(K, 8, uint8_t, #K "b")
287 #undef CK_PR_GENERATE
288 #undef CK_PR_BINARY_S
292 * Atomic compare and swap.
294 #define CK_PR_CAS(S, M, T, C, I) \
295 CK_CC_INLINE static bool \
296 ck_pr_cas_##S(M *target, T compare, T set) \
299 __asm__ __volatile__(CK_PR_LOCK_PREFIX I " %2, %0; setz %1" \
300 : "+m" (*(C *)target), \
308 CK_PR_CAS(ptr, void, void *, char, "cmpxchgl")
310 #define CK_PR_CAS_S(S, T, I) CK_PR_CAS(S, T, T, T, I)
312 CK_PR_CAS_S(char, char, "cmpxchgb")
313 CK_PR_CAS_S(int, int, "cmpxchgl")
314 CK_PR_CAS_S(uint, unsigned int, "cmpxchgl")
315 CK_PR_CAS_S(32, uint32_t, "cmpxchgl")
316 CK_PR_CAS_S(16, uint16_t, "cmpxchgw")
317 CK_PR_CAS_S(8, uint8_t, "cmpxchgb")
323 * Compare and swap, set *v to old value of target.
325 #define CK_PR_CAS_O(S, M, T, C, I, R) \
326 CK_CC_INLINE static bool \
327 ck_pr_cas_##S##_value(M *target, T compare, T set, M *v) \
330 __asm__ __volatile__(CK_PR_LOCK_PREFIX "cmpxchg" I " %3, %0;" \
331 "mov %% " R ", %2;" \
333 : "+m" (*(C *)target), \
342 CK_PR_CAS_O(ptr, void, void *, char, "l", "eax")
344 #define CK_PR_CAS_O_S(S, T, I, R) \
345 CK_PR_CAS_O(S, T, T, T, I, R)
347 CK_PR_CAS_O_S(char, char, "b", "al")
348 CK_PR_CAS_O_S(int, int, "l", "eax")
349 CK_PR_CAS_O_S(uint, unsigned int, "l", "eax")
350 CK_PR_CAS_O_S(32, uint32_t, "l", "eax")
351 CK_PR_CAS_O_S(16, uint16_t, "w", "ax")
352 CK_PR_CAS_O_S(8, uint8_t, "b", "al")
358 * Atomic bit test operations.
360 #define CK_PR_BT(K, S, T, P, C, I) \
361 CK_CC_INLINE static bool \
362 ck_pr_##K##_##S(T *target, unsigned int b) \
365 __asm__ __volatile__(CK_PR_LOCK_PREFIX I "; setc %1" \
366 : "+m" (*(C *)target), \
373 #define CK_PR_BT_S(K, S, T, I) CK_PR_BT(K, S, T, T, T, I)
375 #define CK_PR_GENERATE(K) \
376 CK_PR_BT(K, ptr, void, uint32_t, char, #K "l %2, %0") \
377 CK_PR_BT_S(K, uint, unsigned int, #K "l %2, %0") \
378 CK_PR_BT_S(K, int, int, #K "l %2, %0") \
379 CK_PR_BT_S(K, 32, uint32_t, #K "l %2, %0") \
380 CK_PR_BT_S(K, 16, uint16_t, #K "w %w2, %0")
386 #undef CK_PR_GENERATE
389 #endif /* CK_PR_X86_H */