1 /******************************************************************************/
2 #ifdef JEMALLOC_H_TYPES
4 #endif /* JEMALLOC_H_TYPES */
5 /******************************************************************************/
6 #ifdef JEMALLOC_H_STRUCTS
8 #endif /* JEMALLOC_H_STRUCTS */
9 /******************************************************************************/
10 #ifdef JEMALLOC_H_EXTERNS
12 #define atomic_read_uint64(p) atomic_add_uint64(p, 0)
13 #define atomic_read_uint32(p) atomic_add_uint32(p, 0)
14 #define atomic_read_z(p) atomic_add_z(p, 0)
15 #define atomic_read_u(p) atomic_add_u(p, 0)
17 #endif /* JEMALLOC_H_EXTERNS */
18 /******************************************************************************/
19 #ifdef JEMALLOC_H_INLINES
21 #ifndef JEMALLOC_ENABLE_INLINE
22 uint64_t atomic_add_uint64(uint64_t *p, uint64_t x);
23 uint64_t atomic_sub_uint64(uint64_t *p, uint64_t x);
24 uint32_t atomic_add_uint32(uint32_t *p, uint32_t x);
25 uint32_t atomic_sub_uint32(uint32_t *p, uint32_t x);
26 size_t atomic_add_z(size_t *p, size_t x);
27 size_t atomic_sub_z(size_t *p, size_t x);
28 unsigned atomic_add_u(unsigned *p, unsigned x);
29 unsigned atomic_sub_u(unsigned *p, unsigned x);
32 #if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_ATOMIC_C_))
33 /******************************************************************************/
34 /* 64-bit operations. */
35 #if (LG_SIZEOF_PTR == 3 || LG_SIZEOF_INT == 3)
36 # ifdef __GCC_HAVE_SYNC_COMPARE_AND_SWAP_8
37 JEMALLOC_INLINE uint64_t
38 atomic_add_uint64(uint64_t *p, uint64_t x)
41 return (__sync_add_and_fetch(p, x));
44 JEMALLOC_INLINE uint64_t
45 atomic_sub_uint64(uint64_t *p, uint64_t x)
48 return (__sync_sub_and_fetch(p, x));
50 #elif (defined(_MSC_VER))
51 JEMALLOC_INLINE uint64_t
52 atomic_add_uint64(uint64_t *p, uint64_t x)
55 return (InterlockedExchangeAdd64(p, x));
58 JEMALLOC_INLINE uint64_t
59 atomic_sub_uint64(uint64_t *p, uint64_t x)
62 return (InterlockedExchangeAdd64(p, -((int64_t)x)));
64 #elif (defined(JEMALLOC_OSATOMIC))
65 JEMALLOC_INLINE uint64_t
66 atomic_add_uint64(uint64_t *p, uint64_t x)
69 return (OSAtomicAdd64((int64_t)x, (int64_t *)p));
72 JEMALLOC_INLINE uint64_t
73 atomic_sub_uint64(uint64_t *p, uint64_t x)
76 return (OSAtomicAdd64(-((int64_t)x), (int64_t *)p));
78 # elif (defined(__amd64__) || defined(__x86_64__))
79 JEMALLOC_INLINE uint64_t
80 atomic_add_uint64(uint64_t *p, uint64_t x)
85 : "+r" (x), "=m" (*p) /* Outputs. */
86 : "m" (*p) /* Inputs. */
92 JEMALLOC_INLINE uint64_t
93 atomic_sub_uint64(uint64_t *p, uint64_t x)
96 x = (uint64_t)(-(int64_t)x);
99 : "+r" (x), "=m" (*p) /* Outputs. */
100 : "m" (*p) /* Inputs. */
105 # elif (defined(JEMALLOC_ATOMIC9))
106 JEMALLOC_INLINE uint64_t
107 atomic_add_uint64(uint64_t *p, uint64_t x)
111 * atomic_fetchadd_64() doesn't exist, but we only ever use this
112 * function on LP64 systems, so atomic_fetchadd_long() will do.
114 assert(sizeof(uint64_t) == sizeof(unsigned long));
116 return (atomic_fetchadd_long(p, (unsigned long)x) + x);
119 JEMALLOC_INLINE uint64_t
120 atomic_sub_uint64(uint64_t *p, uint64_t x)
123 assert(sizeof(uint64_t) == sizeof(unsigned long));
125 return (atomic_fetchadd_long(p, (unsigned long)(-(long)x)) - x);
127 # elif (defined(JE_FORCE_SYNC_COMPARE_AND_SWAP_8))
128 JEMALLOC_INLINE uint64_t
129 atomic_add_uint64(uint64_t *p, uint64_t x)
132 return (__sync_add_and_fetch(p, x));
135 JEMALLOC_INLINE uint64_t
136 atomic_sub_uint64(uint64_t *p, uint64_t x)
139 return (__sync_sub_and_fetch(p, x));
142 # error "Missing implementation for 64-bit atomic operations"
146 /******************************************************************************/
147 /* 32-bit operations. */
148 #ifdef __GCC_HAVE_SYNC_COMPARE_AND_SWAP_4
149 JEMALLOC_INLINE uint32_t
150 atomic_add_uint32(uint32_t *p, uint32_t x)
153 return (__sync_add_and_fetch(p, x));
156 JEMALLOC_INLINE uint32_t
157 atomic_sub_uint32(uint32_t *p, uint32_t x)
160 return (__sync_sub_and_fetch(p, x));
162 #elif (defined(_MSC_VER))
163 JEMALLOC_INLINE uint32_t
164 atomic_add_uint32(uint32_t *p, uint32_t x)
167 return (InterlockedExchangeAdd(p, x));
170 JEMALLOC_INLINE uint32_t
171 atomic_sub_uint32(uint32_t *p, uint32_t x)
174 return (InterlockedExchangeAdd(p, -((int32_t)x)));
176 #elif (defined(JEMALLOC_OSATOMIC))
177 JEMALLOC_INLINE uint32_t
178 atomic_add_uint32(uint32_t *p, uint32_t x)
181 return (OSAtomicAdd32((int32_t)x, (int32_t *)p));
184 JEMALLOC_INLINE uint32_t
185 atomic_sub_uint32(uint32_t *p, uint32_t x)
188 return (OSAtomicAdd32(-((int32_t)x), (int32_t *)p));
190 #elif (defined(__i386__) || defined(__amd64__) || defined(__x86_64__))
191 JEMALLOC_INLINE uint32_t
192 atomic_add_uint32(uint32_t *p, uint32_t x)
196 "lock; xaddl %0, %1;"
197 : "+r" (x), "=m" (*p) /* Outputs. */
198 : "m" (*p) /* Inputs. */
204 JEMALLOC_INLINE uint32_t
205 atomic_sub_uint32(uint32_t *p, uint32_t x)
208 x = (uint32_t)(-(int32_t)x);
210 "lock; xaddl %0, %1;"
211 : "+r" (x), "=m" (*p) /* Outputs. */
212 : "m" (*p) /* Inputs. */
217 #elif (defined(JEMALLOC_ATOMIC9))
218 JEMALLOC_INLINE uint32_t
219 atomic_add_uint32(uint32_t *p, uint32_t x)
222 return (atomic_fetchadd_32(p, x) + x);
225 JEMALLOC_INLINE uint32_t
226 atomic_sub_uint32(uint32_t *p, uint32_t x)
229 return (atomic_fetchadd_32(p, (uint32_t)(-(int32_t)x)) - x);
231 #elif (defined(JE_FORCE_SYNC_COMPARE_AND_SWAP_4))
232 JEMALLOC_INLINE uint32_t
233 atomic_add_uint32(uint32_t *p, uint32_t x)
236 return (__sync_add_and_fetch(p, x));
239 JEMALLOC_INLINE uint32_t
240 atomic_sub_uint32(uint32_t *p, uint32_t x)
243 return (__sync_sub_and_fetch(p, x));
246 # error "Missing implementation for 32-bit atomic operations"
249 /******************************************************************************/
250 /* size_t operations. */
251 JEMALLOC_INLINE size_t
252 atomic_add_z(size_t *p, size_t x)
255 #if (LG_SIZEOF_PTR == 3)
256 return ((size_t)atomic_add_uint64((uint64_t *)p, (uint64_t)x));
257 #elif (LG_SIZEOF_PTR == 2)
258 return ((size_t)atomic_add_uint32((uint32_t *)p, (uint32_t)x));
262 JEMALLOC_INLINE size_t
263 atomic_sub_z(size_t *p, size_t x)
266 #if (LG_SIZEOF_PTR == 3)
267 return ((size_t)atomic_add_uint64((uint64_t *)p,
268 (uint64_t)-((int64_t)x)));
269 #elif (LG_SIZEOF_PTR == 2)
270 return ((size_t)atomic_add_uint32((uint32_t *)p,
271 (uint32_t)-((int32_t)x)));
275 /******************************************************************************/
276 /* unsigned operations. */
277 JEMALLOC_INLINE unsigned
278 atomic_add_u(unsigned *p, unsigned x)
281 #if (LG_SIZEOF_INT == 3)
282 return ((unsigned)atomic_add_uint64((uint64_t *)p, (uint64_t)x));
283 #elif (LG_SIZEOF_INT == 2)
284 return ((unsigned)atomic_add_uint32((uint32_t *)p, (uint32_t)x));
288 JEMALLOC_INLINE unsigned
289 atomic_sub_u(unsigned *p, unsigned x)
292 #if (LG_SIZEOF_INT == 3)
293 return ((unsigned)atomic_add_uint64((uint64_t *)p,
294 (uint64_t)-((int64_t)x)));
295 #elif (LG_SIZEOF_INT == 2)
296 return ((unsigned)atomic_add_uint32((uint32_t *)p,
297 (uint32_t)-((int32_t)x)));
300 /******************************************************************************/
303 #endif /* JEMALLOC_H_INLINES */
304 /******************************************************************************/