1 #ifndef JEMALLOC_INTERNAL_ATOMIC_GCC_ATOMIC_H
2 #define JEMALLOC_INTERNAL_ATOMIC_GCC_ATOMIC_H
4 #include "jemalloc/internal/assert.h"
6 #define ATOMIC_INIT(...) {__VA_ARGS__}
9 atomic_memory_order_relaxed,
10 atomic_memory_order_acquire,
11 atomic_memory_order_release,
12 atomic_memory_order_acq_rel,
13 atomic_memory_order_seq_cst
14 } atomic_memory_order_t;
17 atomic_enum_to_builtin(atomic_memory_order_t mo) {
19 case atomic_memory_order_relaxed:
20 return __ATOMIC_RELAXED;
21 case atomic_memory_order_acquire:
22 return __ATOMIC_ACQUIRE;
23 case atomic_memory_order_release:
24 return __ATOMIC_RELEASE;
25 case atomic_memory_order_acq_rel:
26 return __ATOMIC_ACQ_REL;
27 case atomic_memory_order_seq_cst:
28 return __ATOMIC_SEQ_CST;
30 /* Can't happen; the switch is exhaustive. */
35 atomic_fence(atomic_memory_order_t mo) {
36 __atomic_thread_fence(atomic_enum_to_builtin(mo));
39 #define JEMALLOC_GENERATE_ATOMICS(type, short_type, \
40 /* unused */ lg_size) \
43 } atomic_##short_type##_t; \
46 atomic_load_##short_type(const atomic_##short_type##_t *a, \
47 atomic_memory_order_t mo) { \
49 __atomic_load(&a->repr, &result, atomic_enum_to_builtin(mo)); \
54 atomic_store_##short_type(atomic_##short_type##_t *a, type val, \
55 atomic_memory_order_t mo) { \
56 __atomic_store(&a->repr, &val, atomic_enum_to_builtin(mo)); \
60 atomic_exchange_##short_type(atomic_##short_type##_t *a, type val, \
61 atomic_memory_order_t mo) { \
63 __atomic_exchange(&a->repr, &val, &result, \
64 atomic_enum_to_builtin(mo)); \
69 atomic_compare_exchange_weak_##short_type(atomic_##short_type##_t *a, \
70 type *expected, type desired, atomic_memory_order_t success_mo, \
71 atomic_memory_order_t failure_mo) { \
72 return __atomic_compare_exchange(&a->repr, expected, &desired, \
73 true, atomic_enum_to_builtin(success_mo), \
74 atomic_enum_to_builtin(failure_mo)); \
78 atomic_compare_exchange_strong_##short_type(atomic_##short_type##_t *a, \
79 type *expected, type desired, atomic_memory_order_t success_mo, \
80 atomic_memory_order_t failure_mo) { \
81 return __atomic_compare_exchange(&a->repr, expected, &desired, \
83 atomic_enum_to_builtin(success_mo), \
84 atomic_enum_to_builtin(failure_mo)); \
88 #define JEMALLOC_GENERATE_INT_ATOMICS(type, short_type, \
89 /* unused */ lg_size) \
90 JEMALLOC_GENERATE_ATOMICS(type, short_type, /* unused */ lg_size) \
93 atomic_fetch_add_##short_type(atomic_##short_type##_t *a, type val, \
94 atomic_memory_order_t mo) { \
95 return __atomic_fetch_add(&a->repr, val, \
96 atomic_enum_to_builtin(mo)); \
100 atomic_fetch_sub_##short_type(atomic_##short_type##_t *a, type val, \
101 atomic_memory_order_t mo) { \
102 return __atomic_fetch_sub(&a->repr, val, \
103 atomic_enum_to_builtin(mo)); \
107 atomic_fetch_and_##short_type(atomic_##short_type##_t *a, type val, \
108 atomic_memory_order_t mo) { \
109 return __atomic_fetch_and(&a->repr, val, \
110 atomic_enum_to_builtin(mo)); \
114 atomic_fetch_or_##short_type(atomic_##short_type##_t *a, type val, \
115 atomic_memory_order_t mo) { \
116 return __atomic_fetch_or(&a->repr, val, \
117 atomic_enum_to_builtin(mo)); \
121 atomic_fetch_xor_##short_type(atomic_##short_type##_t *a, type val, \
122 atomic_memory_order_t mo) { \
123 return __atomic_fetch_xor(&a->repr, val, \
124 atomic_enum_to_builtin(mo)); \
127 #endif /* JEMALLOC_INTERNAL_ATOMIC_GCC_ATOMIC_H */