]> CyberLeo.Net >> Repos - FreeBSD/FreeBSD.git/blob - sys/powerpc/include/atomic.h
This commit was generated by cvs2svn to compensate for changes in r172683,
[FreeBSD/FreeBSD.git] / sys / powerpc / include / atomic.h
1 /*-
2  * Copyright (c) 2001 Benno Rice
3  * Copyright (c) 2001 David E. O'Brien
4  * Copyright (c) 1998 Doug Rabson
5  * All rights reserved.
6  *
7  * Redistribution and use in source and binary forms, with or without
8  * modification, are permitted provided that the following conditions
9  * are met:
10  * 1. Redistributions of source code must retain the above copyright
11  *    notice, this list of conditions and the following disclaimer.
12  * 2. Redistributions in binary form must reproduce the above copyright
13  *    notice, this list of conditions and the following disclaimer in the
14  *    documentation and/or other materials provided with the distribution.
15  *
16  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
17  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
18  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
19  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
20  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
22  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
23  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
24  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
25  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
26  * SUCH DAMAGE.
27  *
28  * $FreeBSD$
29  */
30
31 #ifndef _MACHINE_ATOMIC_H_
32 #define _MACHINE_ATOMIC_H_
33
34 #include <machine/cpufunc.h>
35
36 #ifndef _SYS_CDEFS_H_
37 #error this file needs sys/cdefs.h as a prerequisite
38 #endif
39
40 /*
41  * Various simple arithmetic on memory which is atomic in the presence
42  * of interrupts and SMP safe.
43  */
44
45 void    atomic_set_8(volatile uint8_t *, uint8_t);
46 void    atomic_clear_8(volatile uint8_t *, uint8_t);
47 void    atomic_add_8(volatile uint8_t *, uint8_t);
48 void    atomic_subtract_8(volatile uint8_t *, uint8_t);
49
50 void    atomic_set_16(volatile uint16_t *, uint16_t);
51 void    atomic_clear_16(volatile uint16_t *, uint16_t);
52 void    atomic_add_16(volatile uint16_t *, uint16_t);
53 void    atomic_subtract_16(volatile uint16_t *, uint16_t);
54
55 static __inline void
56 atomic_set_32(volatile uint32_t *p, uint32_t v)
57 {
58         uint32_t temp;
59
60 #ifdef __GNUCLIKE_ASM
61         __asm __volatile (
62                 "1:\tlwarx %0, 0, %2\n\t"       /* load old value */
63                 "or %0, %3, %0\n\t"             /* calculate new value */
64                 "stwcx. %0, 0, %2\n\t"          /* attempt to store */
65                 "bne- 1b\n\t"                   /* spin if failed */
66                 : "=&r" (temp), "=m" (*p)
67                 : "r" (p), "r" (v), "m" (*p)
68                 : "cc", "memory");
69 #endif
70 }
71
72 static __inline void
73 atomic_clear_32(volatile uint32_t *p, uint32_t v)
74 {
75         uint32_t temp;
76
77 #ifdef __GNUCLIKE_ASM
78         __asm __volatile (
79                 "1:\tlwarx %0, 0, %2\n\t"       /* load old value */
80                 "andc %0, %0, %3\n\t"           /* calculate new value */
81                 "stwcx. %0, 0, %2\n\t"          /* attempt to store */
82                 "bne- 1b\n\t"                   /* spin if failed */
83                 : "=&r" (temp), "=m" (*p)
84                 : "r" (p), "r" (v), "m" (*p)
85                 : "cc", "memory");
86 #endif
87 }
88
89 static __inline void
90 atomic_add_32(volatile uint32_t *p, uint32_t v)
91 {
92         uint32_t temp;
93
94 #ifdef __GNUCLIKE_ASM
95         __asm __volatile (
96                 "1:\tlwarx %0, 0, %2\n\t"       /* load old value */
97                 "add %0, %3, %0\n\t"            /* calculate new value */
98                 "stwcx. %0, 0, %2\n\t"          /* attempt to store */
99                 "bne- 1b\n\t"                   /* spin if failed */
100                 : "=&r" (temp), "=m" (*p)
101                 : "r" (p), "r" (v), "m" (*p)
102                 : "cc", "memory");
103 #endif
104 }
105
106 static __inline void
107 atomic_subtract_32(volatile uint32_t *p, uint32_t v)
108 {
109         uint32_t temp;
110
111 #ifdef __GNUCLIKE_ASM
112         __asm __volatile (
113                 "1:\tlwarx %0, 0, %2\n\t"       /* load old value */
114                 "subf %0, %3, %0\n\t"           /* calculate new value */
115                 "stwcx. %0, 0, %2\n\t"          /* attempt to store */
116                 "bne- 1b\n\t"                   /* spin if failed */
117                 : "=&r" (temp), "=m" (*p)
118                 : "r" (p), "r" (v), "m" (*p)
119                 : "cc", "memory");
120 #endif
121 }
122
123 static __inline uint32_t
124 atomic_readandclear_32(volatile uint32_t *addr)
125 {
126         uint32_t result,temp;
127
128 #ifdef __GNUCLIKE_ASM
129         __asm __volatile (
130                 "\tsync\n"                      /* drain writes */
131                 "1:\tlwarx %0, 0, %3\n\t"       /* load old value */
132                 "li %1, 0\n\t"                  /* load new value */
133                 "stwcx. %1, 0, %3\n\t"          /* attempt to store */
134                 "bne- 1b\n\t"                   /* spin if failed */
135                 : "=&r"(result), "=&r"(temp), "=m" (*addr)
136                 : "r" (addr), "m" (*addr)
137                 : "cc", "memory");
138 #endif
139
140         return (result);
141 }
142
143 #if 0
144
145 /*
146  * So far I haven't found a way to implement atomic 64-bit ops on the
147  * 32-bit PowerPC without involving major headaches.  If anyone has
148  * any ideas, please let me know. =)
149  *      - benno@FreeBSD.org
150  */
151
152 static __inline void
153 atomic_set_64(volatile u_int64_t *p, u_int64_t v)
154 {
155         u_int64_t temp;
156
157         __asm __volatile (
158                 : "=&r" (temp), "=r" (*p)
159                 : "r" (*p), "r" (v)
160                 : "memory");
161 }
162
163 static __inline void
164 atomic_clear_64(volatile u_int64_t *p, u_int64_t v)
165 {
166         u_int64_t temp;
167
168         __asm __volatile (
169                 : "=&r" (temp), "=r" (*p)
170                 : "r" (*p), "r" (v)
171                 : "memory");
172 }
173
174 static __inline void
175 atomic_add_64(volatile u_int64_t *p, u_int64_t v)
176 {
177         u_int64_t temp;
178
179         __asm __volatile (
180                 : "=&r" (temp), "=r" (*p)
181                 : "r" (*p), "r" (v)
182                 : "memory");
183 }
184
185 static __inline void
186 atomic_subtract_64(volatile u_int64_t *p, u_int64_t v)
187 {
188         u_int64_t temp;
189
190         __asm __volatile (
191                 : "=&r" (temp), "=r" (*p)
192                 : "r" (*p), "r" (v)
193                 : "memory");
194 }
195
196 static __inline u_int64_t
197 atomic_readandclear_64(volatile u_int64_t *addr)
198 {
199         u_int64_t result,temp;
200
201         __asm __volatile (
202                 : "=&r"(result), "=&r"(temp), "=r" (*addr)
203                 : "r"(*addr)
204                 : "memory");
205
206         return result;
207 }
208
209 #endif /* 0 */
210
211 #define atomic_set_char                 atomic_set_8
212 #define atomic_clear_char               atomic_clear_8
213 #define atomic_add_char                 atomic_add_8
214 #define atomic_subtract_char            atomic_subtract_8
215
216 #define atomic_set_short                atomic_set_16
217 #define atomic_clear_short              atomic_clear_16
218 #define atomic_add_short                atomic_add_16
219 #define atomic_subtract_short           atomic_subtract_16
220
221 #define atomic_set_int                  atomic_set_32
222 #define atomic_clear_int                atomic_clear_32
223 #define atomic_add_int                  atomic_add_32
224 #define atomic_subtract_int             atomic_subtract_32
225 #define atomic_readandclear_int         atomic_readandclear_32
226
227 #define atomic_set_long                 atomic_set_32
228 #define atomic_clear_long               atomic_clear_32
229 #define atomic_add_long(p, v)           atomic_add_32((uint32_t *)p, (uint32_t)v)
230 #define atomic_subtract_long(p, v)      atomic_subtract_32((uint32_t *)p, (uint32_t)v)
231 #define atomic_readandclear_long        atomic_readandclear_32
232
233 #define atomic_set_ptr                  atomic_set_32
234 #define atomic_clear_ptr                atomic_clear_32
235 #define atomic_add_ptr                  atomic_add_32
236 #define atomic_subtract_ptr             atomic_subtract_32
237
238 #if 0
239
240 /* See above. */
241
242 #define atomic_set_long_long            atomic_set_64
243 #define atomic_clear_long_long          atomic_clear_64
244 #define atomic_add_long_long            atomic_add_64
245 #define atomic_subtract_long_long       atomic_subtract_64
246 #define atomic_readandclear_long_long   atomic_readandclear_64
247
248 #endif /* 0 */
249
250 #define ATOMIC_ACQ_REL(NAME, WIDTH, TYPE)                               \
251 static __inline void                                                    \
252 atomic_##NAME##_acq_##WIDTH(volatile u_int##WIDTH##_t *p, u_int##WIDTH##_t v) \
253 {                                                                       \
254         atomic_##NAME##_##WIDTH(p, v);                                  \
255         powerpc_mb();                                                   \
256 }                                                                       \
257                                                                         \
258 static __inline void                                                    \
259 atomic_##NAME##_rel_##WIDTH(volatile u_int##WIDTH##_t *p, u_int##WIDTH##_t v) \
260 {                                                                       \
261         powerpc_mb();                                                   \
262         atomic_##NAME##_##WIDTH(p, v);                                  \
263 }                                                                       \
264                                                                         \
265 static __inline void                                                    \
266 atomic_##NAME##_acq_##TYPE(volatile u_int##WIDTH##_t *p, u_int##WIDTH##_t v) \
267 {                                                                       \
268         atomic_##NAME##_##WIDTH(p, v);                                  \
269         powerpc_mb();                                                   \
270 }                                                                       \
271                                                                         \
272 static __inline void                                                    \
273 atomic_##NAME##_rel_##TYPE(volatile u_int##WIDTH##_t *p, u_int##WIDTH##_t v) \
274 {                                                                       \
275         powerpc_mb();                                                   \
276         atomic_##NAME##_##WIDTH(p, v);                                  \
277 }
278
279 ATOMIC_ACQ_REL(set, 8, char)
280 ATOMIC_ACQ_REL(clear, 8, char)
281 ATOMIC_ACQ_REL(add, 8, char)
282 ATOMIC_ACQ_REL(subtract, 8, char)
283 ATOMIC_ACQ_REL(set, 16, short)
284 ATOMIC_ACQ_REL(clear, 16, short)
285 ATOMIC_ACQ_REL(add, 16, short)
286 ATOMIC_ACQ_REL(subtract, 16, short)
287 ATOMIC_ACQ_REL(set, 32, int)
288 ATOMIC_ACQ_REL(clear, 32, int)
289 ATOMIC_ACQ_REL(add, 32, int)
290 ATOMIC_ACQ_REL(subtract, 32, int)
291
292 #define atomic_set_acq_long             atomic_set_acq_32
293 #define atomic_set_rel_long             atomic_set_rel_32
294 #define atomic_clear_acq_long           atomic_clear_acq_32
295 #define atomic_clear_rel_long           atomic_clear_rel_32
296 #define atomic_add_acq_long             atomic_add_acq_32
297 #define atomic_add_rel_long             atomic_add_rel_32
298 #define atomic_subtract_acq_long        atomic_subtract_acq_32
299 #define atomic_subtract_rel_long        atomic_subtract_rel_32
300
301 #define atomic_set_acq_ptr              atomic_set_acq_32
302 #define atomic_set_rel_ptr              atomic_set_rel_32
303 #define atomic_clear_acq_ptr            atomic_clear_acq_32
304 #define atomic_clear_rel_ptr            atomic_clear_rel_32
305 #define atomic_add_acq_ptr              atomic_add_acq_32
306 #define atomic_add_rel_ptr              atomic_add_rel_32
307 #define atomic_subtract_acq_ptr         atomic_subtract_acq_32
308 #define atomic_subtract_rel_ptr         atomic_subtract_rel_32
309
310 #undef ATOMIC_ACQ_REL
311
312 /*
313  * We assume that a = b will do atomic loads and stores.
314  */
315 #define ATOMIC_STORE_LOAD(TYPE, WIDTH)                          \
316 static __inline u_##TYPE                                        \
317 atomic_load_acq_##WIDTH(volatile u_##TYPE *p)                   \
318 {                                                               \
319         u_##TYPE v;                                             \
320                                                                 \
321         v = *p;                                                 \
322         powerpc_mb();                                           \
323         return (v);                                             \
324 }                                                               \
325                                                                 \
326 static __inline void                                            \
327 atomic_store_rel_##WIDTH(volatile u_##TYPE *p, u_##TYPE v)      \
328 {                                                               \
329         powerpc_mb();                                           \
330         *p = v;                                                 \
331 }                                                               \
332                                                                 \
333 static __inline u_##TYPE                                        \
334 atomic_load_acq_##TYPE(volatile u_##TYPE *p)                    \
335 {                                                               \
336         u_##TYPE v;                                             \
337                                                                 \
338         v = *p;                                                 \
339         powerpc_mb();                                           \
340         return (v);                                             \
341 }                                                               \
342                                                                 \
343 static __inline void                                            \
344 atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)       \
345 {                                                               \
346         powerpc_mb();                                           \
347         *p = v;                                                 \
348 }
349
350 ATOMIC_STORE_LOAD(char,         8)
351 ATOMIC_STORE_LOAD(short,        16)
352 ATOMIC_STORE_LOAD(int,          32)
353
354 #define atomic_load_acq_long    atomic_load_acq_32
355 #define atomic_store_rel_long   atomic_store_rel_32
356
357 #define atomic_load_acq_ptr     atomic_load_acq_32
358 #define atomic_store_rel_ptr    atomic_store_rel_32
359
360 #undef ATOMIC_STORE_LOAD
361
362 /*
363  * Atomically compare the value stored at *p with cmpval and if the
364  * two values are equal, update the value of *p with newval. Returns
365  * zero if the compare failed, nonzero otherwise.
366  */
367 static __inline uint32_t
368 atomic_cmpset_32(volatile uint32_t* p, uint32_t cmpval, uint32_t newval)
369 {
370         uint32_t        ret;
371
372 #ifdef __GNUCLIKE_ASM
373         __asm __volatile (
374                 "1:\tlwarx %0, 0, %2\n\t"       /* load old value */
375                 "cmplw %3, %0\n\t"              /* compare */
376                 "bne 2f\n\t"                    /* exit if not equal */
377                 "stwcx. %4, 0, %2\n\t"          /* attempt to store */
378                 "bne- 1b\n\t"                   /* spin if failed */
379                 "li %0, 1\n\t"                  /* success - retval = 1 */
380                 "b 3f\n\t"                      /* we've succeeded */
381                 "2:\n\t"
382                 "stwcx. %0, 0, %2\n\t"          /* clear reservation (74xx) */
383                 "li %0, 0\n\t"                  /* failure - retval = 0 */
384                 "3:\n\t"
385                 : "=&r" (ret), "=m" (*p)
386                 : "r" (p), "r" (cmpval), "r" (newval), "m" (*p)
387                 : "cc", "memory");
388 #endif
389
390         return (ret);
391 }
392
393 #if 0
394
395 /*
396  * Atomically compare the value stored at *p with cmpval and if the
397  * two values are equal, update the value of *p with newval. Returns
398  * zero if the compare failed, nonzero otherwise.
399  */
400 static __inline u_int64_t
401 atomic_cmpset_64(volatile u_int64_t* p, u_int64_t cmpval, u_int64_t newval)
402 {
403         u_int64_t ret;
404
405         __asm __volatile (
406                 : "=&r" (ret), "=r" (*p)
407                 : "r" (cmpval), "r" (newval), "r" (*p)
408                 : "memory");
409
410         return ret;
411 }
412
413 #endif /* 0 */
414
415 #define atomic_cmpset_int       atomic_cmpset_32
416 #define atomic_cmpset_long      atomic_cmpset_32
417
418 #define atomic_cmpset_ptr(dst, old, new)        \
419     atomic_cmpset_32((volatile u_int *)(dst), (u_int)(old), (u_int)(new))
420
421 #if 0
422 #define atomic_cmpset_long_long atomic_cmpset_64
423 #endif /* 0 */
424
425 static __inline uint32_t
426 atomic_cmpset_acq_32(volatile uint32_t *p, uint32_t cmpval, uint32_t newval)
427 {
428         int retval;
429
430         retval = atomic_cmpset_32(p, cmpval, newval);
431         powerpc_mb();
432         return (retval);
433 }
434
435 static __inline uint32_t
436 atomic_cmpset_rel_32(volatile uint32_t *p, uint32_t cmpval, uint32_t newval)
437 {
438         powerpc_mb();
439         return (atomic_cmpset_32(p, cmpval, newval));
440 }
441
442 #define atomic_cmpset_acq_int   atomic_cmpset_acq_32
443 #define atomic_cmpset_rel_int   atomic_cmpset_rel_32
444 #define atomic_cmpset_acq_long  atomic_cmpset_acq_32
445 #define atomic_cmpset_rel_long  atomic_cmpset_rel_32
446
447 #define atomic_cmpset_acq_ptr(dst, old, new)    \
448     atomic_cmpset_acq_32((volatile u_int *)(dst), (u_int)(old), (u_int)(new))
449 #define atomic_cmpset_rel_ptr(dst, old, new)    \
450     atomic_cmpset_rel_32((volatile u_int *)(dst), (u_int)(old), (u_int)(new))
451
452 static __inline uint32_t
453 atomic_fetchadd_32(volatile uint32_t *p, uint32_t v)
454 {
455         uint32_t value;
456
457         do {
458                 value = *p;
459         } while (!atomic_cmpset_32(p, value, value + v));
460         return (value);
461 }
462
463 #define atomic_fetchadd_int     atomic_fetchadd_32
464
465 #endif /* ! _MACHINE_ATOMIC_H_ */