]> CyberLeo.Net >> Repos - FreeBSD/FreeBSD.git/blob - sys/powerpc/include/atomic.h
This commit was generated by cvs2svn to compensate for changes in r174993,
[FreeBSD/FreeBSD.git] / sys / powerpc / include / atomic.h
1 /*-
2  * Copyright (c) 2001 Benno Rice
3  * Copyright (c) 2001 David E. O'Brien
4  * Copyright (c) 1998 Doug Rabson
5  * All rights reserved.
6  *
7  * Redistribution and use in source and binary forms, with or without
8  * modification, are permitted provided that the following conditions
9  * are met:
10  * 1. Redistributions of source code must retain the above copyright
11  *    notice, this list of conditions and the following disclaimer.
12  * 2. Redistributions in binary form must reproduce the above copyright
13  *    notice, this list of conditions and the following disclaimer in the
14  *    documentation and/or other materials provided with the distribution.
15  *
16  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
17  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
18  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
19  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
20  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
22  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
23  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
24  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
25  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
26  * SUCH DAMAGE.
27  *
28  * $FreeBSD$
29  */
30
31 #ifndef _MACHINE_ATOMIC_H_
32 #define _MACHINE_ATOMIC_H_
33
34 #include <machine/cpufunc.h>
35
36 #ifndef _SYS_CDEFS_H_
37 #error this file needs sys/cdefs.h as a prerequisite
38 #endif
39
40 /*
41  * Various simple arithmetic on memory which is atomic in the presence
42  * of interrupts and SMP safe.
43  */
44
45 void    atomic_set_8(volatile uint8_t *, uint8_t);
46 void    atomic_clear_8(volatile uint8_t *, uint8_t);
47 void    atomic_add_8(volatile uint8_t *, uint8_t);
48 void    atomic_subtract_8(volatile uint8_t *, uint8_t);
49
50 void    atomic_set_16(volatile uint16_t *, uint16_t);
51 void    atomic_clear_16(volatile uint16_t *, uint16_t);
52 void    atomic_add_16(volatile uint16_t *, uint16_t);
53 void    atomic_subtract_16(volatile uint16_t *, uint16_t);
54
55 static __inline void
56 atomic_set_32(volatile uint32_t *p, uint32_t v)
57 {
58         uint32_t temp;
59
60 #ifdef __GNUCLIKE_ASM
61         __asm __volatile (
62                 "1:\tlwarx %0, 0, %2\n\t"       /* load old value */
63                 "or %0, %3, %0\n\t"             /* calculate new value */
64                 "stwcx. %0, 0, %2\n\t"          /* attempt to store */
65                 "bne- 1b\n\t"                   /* spin if failed */
66                 : "=&r" (temp), "=m" (*p)
67                 : "r" (p), "r" (v), "m" (*p)
68                 : "cc", "memory");
69 #endif
70 }
71
72 static __inline void
73 atomic_clear_32(volatile uint32_t *p, uint32_t v)
74 {
75         uint32_t temp;
76
77 #ifdef __GNUCLIKE_ASM
78         __asm __volatile (
79                 "1:\tlwarx %0, 0, %2\n\t"       /* load old value */
80                 "andc %0, %0, %3\n\t"           /* calculate new value */
81                 "stwcx. %0, 0, %2\n\t"          /* attempt to store */
82                 "bne- 1b\n\t"                   /* spin if failed */
83                 : "=&r" (temp), "=m" (*p)
84                 : "r" (p), "r" (v), "m" (*p)
85                 : "cc", "memory");
86 #endif
87 }
88
89 static __inline void
90 atomic_add_32(volatile uint32_t *p, uint32_t v)
91 {
92         uint32_t temp;
93
94 #ifdef __GNUCLIKE_ASM
95         __asm __volatile (
96                 "1:\tlwarx %0, 0, %2\n\t"       /* load old value */
97                 "add %0, %3, %0\n\t"            /* calculate new value */
98                 "stwcx. %0, 0, %2\n\t"          /* attempt to store */
99                 "bne- 1b\n\t"                   /* spin if failed */
100                 : "=&r" (temp), "=m" (*p)
101                 : "r" (p), "r" (v), "m" (*p)
102                 : "cc", "memory");
103 #endif
104 }
105
106 static __inline void
107 atomic_subtract_32(volatile uint32_t *p, uint32_t v)
108 {
109         uint32_t temp;
110
111 #ifdef __GNUCLIKE_ASM
112         __asm __volatile (
113                 "1:\tlwarx %0, 0, %2\n\t"       /* load old value */
114                 "subf %0, %3, %0\n\t"           /* calculate new value */
115                 "stwcx. %0, 0, %2\n\t"          /* attempt to store */
116                 "bne- 1b\n\t"                   /* spin if failed */
117                 : "=&r" (temp), "=m" (*p)
118                 : "r" (p), "r" (v), "m" (*p)
119                 : "cc", "memory");
120 #endif
121 }
122
123 static __inline uint32_t
124 atomic_readandclear_32(volatile uint32_t *addr)
125 {
126         uint32_t result,temp;
127
128 #ifdef __GNUCLIKE_ASM
129         __asm __volatile (
130                 "\tsync\n"                      /* drain writes */
131                 "1:\tlwarx %0, 0, %3\n\t"       /* load old value */
132                 "li %1, 0\n\t"                  /* load new value */
133                 "stwcx. %1, 0, %3\n\t"          /* attempt to store */
134                 "bne- 1b\n\t"                   /* spin if failed */
135                 : "=&r"(result), "=&r"(temp), "=m" (*addr)
136                 : "r" (addr), "m" (*addr)
137                 : "cc", "memory");
138 #endif
139
140         return (result);
141 }
142
143 #if 0
144
145 /*
146  * So far I haven't found a way to implement atomic 64-bit ops on the
147  * 32-bit PowerPC without involving major headaches.  If anyone has
148  * any ideas, please let me know. =)
149  *      - benno@FreeBSD.org
150  */
151
152 static __inline void
153 atomic_set_64(volatile u_int64_t *p, u_int64_t v)
154 {
155         u_int64_t temp;
156
157         __asm __volatile (
158                 : "=&r" (temp), "=r" (*p)
159                 : "r" (*p), "r" (v)
160                 : "memory");
161 }
162
163 static __inline void
164 atomic_clear_64(volatile u_int64_t *p, u_int64_t v)
165 {
166         u_int64_t temp;
167
168         __asm __volatile (
169                 : "=&r" (temp), "=r" (*p)
170                 : "r" (*p), "r" (v)
171                 : "memory");
172 }
173
174 static __inline void
175 atomic_add_64(volatile u_int64_t *p, u_int64_t v)
176 {
177         u_int64_t temp;
178
179         __asm __volatile (
180                 : "=&r" (temp), "=r" (*p)
181                 : "r" (*p), "r" (v)
182                 : "memory");
183 }
184
185 static __inline void
186 atomic_subtract_64(volatile u_int64_t *p, u_int64_t v)
187 {
188         u_int64_t temp;
189
190         __asm __volatile (
191                 : "=&r" (temp), "=r" (*p)
192                 : "r" (*p), "r" (v)
193                 : "memory");
194 }
195
196 static __inline u_int64_t
197 atomic_readandclear_64(volatile u_int64_t *addr)
198 {
199         u_int64_t result,temp;
200
201         __asm __volatile (
202                 : "=&r"(result), "=&r"(temp), "=r" (*addr)
203                 : "r"(*addr)
204                 : "memory");
205
206         return result;
207 }
208
209 #endif /* 0 */
210
211 #define atomic_set_char                 atomic_set_8
212 #define atomic_clear_char               atomic_clear_8
213 #define atomic_add_char                 atomic_add_8
214 #define atomic_subtract_char            atomic_subtract_8
215
216 #define atomic_set_short                atomic_set_16
217 #define atomic_clear_short              atomic_clear_16
218 #define atomic_add_short                atomic_add_16
219 #define atomic_subtract_short           atomic_subtract_16
220
221 #define atomic_set_int                  atomic_set_32
222 #define atomic_clear_int                atomic_clear_32
223 #define atomic_add_int                  atomic_add_32
224 #define atomic_subtract_int             atomic_subtract_32
225 #define atomic_readandclear_int         atomic_readandclear_32
226
227 #define atomic_set_long                 atomic_set_32
228 #define atomic_clear_long               atomic_clear_32
229 #define atomic_add_long(p, v)           atomic_add_32((uint32_t *)p, (uint32_t)v)
230 #define atomic_subtract_long(p, v)      atomic_subtract_32((uint32_t *)p, (uint32_t)v)
231 #define atomic_readandclear_long        atomic_readandclear_32
232
233 #define atomic_set_ptr                  atomic_set_32
234 #define atomic_clear_ptr                atomic_clear_32
235 #define atomic_add_ptr                  atomic_add_32
236 #define atomic_subtract_ptr             atomic_subtract_32
237 #define atomic_readandclear_ptr         atomic_readandclear_32
238
239 #if 0
240
241 /* See above. */
242
243 #define atomic_set_long_long            atomic_set_64
244 #define atomic_clear_long_long          atomic_clear_64
245 #define atomic_add_long_long            atomic_add_64
246 #define atomic_subtract_long_long       atomic_subtract_64
247 #define atomic_readandclear_long_long   atomic_readandclear_64
248
249 #endif /* 0 */
250
251 #define ATOMIC_ACQ_REL(NAME, WIDTH, TYPE)                               \
252 static __inline void                                                    \
253 atomic_##NAME##_acq_##WIDTH(volatile u_int##WIDTH##_t *p, u_int##WIDTH##_t v) \
254 {                                                                       \
255         atomic_##NAME##_##WIDTH(p, v);                                  \
256         powerpc_mb();                                                   \
257 }                                                                       \
258                                                                         \
259 static __inline void                                                    \
260 atomic_##NAME##_rel_##WIDTH(volatile u_int##WIDTH##_t *p, u_int##WIDTH##_t v) \
261 {                                                                       \
262         powerpc_mb();                                                   \
263         atomic_##NAME##_##WIDTH(p, v);                                  \
264 }                                                                       \
265                                                                         \
266 static __inline void                                                    \
267 atomic_##NAME##_acq_##TYPE(volatile u_int##WIDTH##_t *p, u_int##WIDTH##_t v) \
268 {                                                                       \
269         atomic_##NAME##_##WIDTH(p, v);                                  \
270         powerpc_mb();                                                   \
271 }                                                                       \
272                                                                         \
273 static __inline void                                                    \
274 atomic_##NAME##_rel_##TYPE(volatile u_int##WIDTH##_t *p, u_int##WIDTH##_t v) \
275 {                                                                       \
276         powerpc_mb();                                                   \
277         atomic_##NAME##_##WIDTH(p, v);                                  \
278 }
279
280 ATOMIC_ACQ_REL(set, 8, char)
281 ATOMIC_ACQ_REL(clear, 8, char)
282 ATOMIC_ACQ_REL(add, 8, char)
283 ATOMIC_ACQ_REL(subtract, 8, char)
284 ATOMIC_ACQ_REL(set, 16, short)
285 ATOMIC_ACQ_REL(clear, 16, short)
286 ATOMIC_ACQ_REL(add, 16, short)
287 ATOMIC_ACQ_REL(subtract, 16, short)
288 ATOMIC_ACQ_REL(set, 32, int)
289 ATOMIC_ACQ_REL(clear, 32, int)
290 ATOMIC_ACQ_REL(add, 32, int)
291 ATOMIC_ACQ_REL(subtract, 32, int)
292
293 #define atomic_set_acq_long             atomic_set_acq_32
294 #define atomic_set_rel_long             atomic_set_rel_32
295 #define atomic_clear_acq_long           atomic_clear_acq_32
296 #define atomic_clear_rel_long           atomic_clear_rel_32
297 #define atomic_add_acq_long             atomic_add_acq_32
298 #define atomic_add_rel_long             atomic_add_rel_32
299 #define atomic_subtract_acq_long        atomic_subtract_acq_32
300 #define atomic_subtract_rel_long        atomic_subtract_rel_32
301
302 #define atomic_set_acq_ptr              atomic_set_acq_32
303 #define atomic_set_rel_ptr              atomic_set_rel_32
304 #define atomic_clear_acq_ptr            atomic_clear_acq_32
305 #define atomic_clear_rel_ptr            atomic_clear_rel_32
306 #define atomic_add_acq_ptr              atomic_add_acq_32
307 #define atomic_add_rel_ptr              atomic_add_rel_32
308 #define atomic_subtract_acq_ptr         atomic_subtract_acq_32
309 #define atomic_subtract_rel_ptr         atomic_subtract_rel_32
310
311 #undef ATOMIC_ACQ_REL
312
313 /*
314  * We assume that a = b will do atomic loads and stores.
315  */
316 #define ATOMIC_STORE_LOAD(TYPE, WIDTH)                          \
317 static __inline u_##TYPE                                        \
318 atomic_load_acq_##WIDTH(volatile u_##TYPE *p)                   \
319 {                                                               \
320         u_##TYPE v;                                             \
321                                                                 \
322         v = *p;                                                 \
323         powerpc_mb();                                           \
324         return (v);                                             \
325 }                                                               \
326                                                                 \
327 static __inline void                                            \
328 atomic_store_rel_##WIDTH(volatile u_##TYPE *p, u_##TYPE v)      \
329 {                                                               \
330         powerpc_mb();                                           \
331         *p = v;                                                 \
332 }                                                               \
333                                                                 \
334 static __inline u_##TYPE                                        \
335 atomic_load_acq_##TYPE(volatile u_##TYPE *p)                    \
336 {                                                               \
337         u_##TYPE v;                                             \
338                                                                 \
339         v = *p;                                                 \
340         powerpc_mb();                                           \
341         return (v);                                             \
342 }                                                               \
343                                                                 \
344 static __inline void                                            \
345 atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)       \
346 {                                                               \
347         powerpc_mb();                                           \
348         *p = v;                                                 \
349 }
350
351 ATOMIC_STORE_LOAD(char,         8)
352 ATOMIC_STORE_LOAD(short,        16)
353 ATOMIC_STORE_LOAD(int,          32)
354
355 #define atomic_load_acq_long    atomic_load_acq_32
356 #define atomic_store_rel_long   atomic_store_rel_32
357
358 #define atomic_load_acq_ptr     atomic_load_acq_32
359 #define atomic_store_rel_ptr    atomic_store_rel_32
360
361 #undef ATOMIC_STORE_LOAD
362
363 /*
364  * Atomically compare the value stored at *p with cmpval and if the
365  * two values are equal, update the value of *p with newval. Returns
366  * zero if the compare failed, nonzero otherwise.
367  */
368 static __inline uint32_t
369 atomic_cmpset_32(volatile uint32_t* p, uint32_t cmpval, uint32_t newval)
370 {
371         uint32_t        ret;
372
373 #ifdef __GNUCLIKE_ASM
374         __asm __volatile (
375                 "1:\tlwarx %0, 0, %2\n\t"       /* load old value */
376                 "cmplw %3, %0\n\t"              /* compare */
377                 "bne 2f\n\t"                    /* exit if not equal */
378                 "stwcx. %4, 0, %2\n\t"          /* attempt to store */
379                 "bne- 1b\n\t"                   /* spin if failed */
380                 "li %0, 1\n\t"                  /* success - retval = 1 */
381                 "b 3f\n\t"                      /* we've succeeded */
382                 "2:\n\t"
383                 "stwcx. %0, 0, %2\n\t"          /* clear reservation (74xx) */
384                 "li %0, 0\n\t"                  /* failure - retval = 0 */
385                 "3:\n\t"
386                 : "=&r" (ret), "=m" (*p)
387                 : "r" (p), "r" (cmpval), "r" (newval), "m" (*p)
388                 : "cc", "memory");
389 #endif
390
391         return (ret);
392 }
393
394 static __inline u_long
395 atomic_cmpset_long(volatile u_long* p, u_long cmpval, u_long newval)
396 {
397         uint32_t        ret;
398
399 #ifdef __GNUCLIKE_ASM
400         __asm __volatile (
401                 "1:\tlwarx %0, 0, %2\n\t"       /* load old value */
402                 "cmplw %3, %0\n\t"              /* compare */
403                 "bne 2f\n\t"                    /* exit if not equal */
404                 "stwcx. %4, 0, %2\n\t"          /* attempt to store */
405                 "bne- 1b\n\t"                   /* spin if failed */
406                 "li %0, 1\n\t"                  /* success - retval = 1 */
407                 "b 3f\n\t"                      /* we've succeeded */
408                 "2:\n\t"
409                 "stwcx. %0, 0, %2\n\t"          /* clear reservation (74xx) */
410                 "li %0, 0\n\t"                  /* failure - retval = 0 */
411                 "3:\n\t"
412                 : "=&r" (ret), "=m" (*p)
413                 : "r" (p), "r" (cmpval), "r" (newval), "m" (*p)
414                 : "cc", "memory");
415 #endif
416
417         return (ret);
418 }
419
420 #if 0
421
422 /*
423  * Atomically compare the value stored at *p with cmpval and if the
424  * two values are equal, update the value of *p with newval. Returns
425  * zero if the compare failed, nonzero otherwise.
426  */
427 static __inline u_int64_t
428 atomic_cmpset_64(volatile u_int64_t* p, u_int64_t cmpval, u_int64_t newval)
429 {
430         u_int64_t ret;
431
432         __asm __volatile (
433                 : "=&r" (ret), "=r" (*p)
434                 : "r" (cmpval), "r" (newval), "r" (*p)
435                 : "memory");
436
437         return ret;
438 }
439
440 #endif /* 0 */
441
442 #define atomic_cmpset_int       atomic_cmpset_32
443
444 #define atomic_cmpset_ptr(dst, old, new)        \
445     atomic_cmpset_32((volatile u_int *)(dst), (u_int)(old), (u_int)(new))
446
447 #if 0
448 #define atomic_cmpset_long_long atomic_cmpset_64
449 #endif /* 0 */
450
451 static __inline uint32_t
452 atomic_cmpset_acq_32(volatile uint32_t *p, uint32_t cmpval, uint32_t newval)
453 {
454         int retval;
455
456         retval = atomic_cmpset_32(p, cmpval, newval);
457         powerpc_mb();
458         return (retval);
459 }
460
461 static __inline uint32_t
462 atomic_cmpset_rel_32(volatile uint32_t *p, uint32_t cmpval, uint32_t newval)
463 {
464         powerpc_mb();
465         return (atomic_cmpset_32(p, cmpval, newval));
466 }
467
468 static __inline u_long
469 atomic_cmpset_acq_long(volatile u_long *p, u_long cmpval, u_long newval)
470 {
471         int retval;
472
473         retval = atomic_cmpset_long(p, cmpval, newval);
474         powerpc_mb();
475         return (retval);
476 }
477
478 static __inline uint32_t
479 atomic_cmpset_rel_long(volatile u_long *p, u_long cmpval, u_long newval)
480 {
481         powerpc_mb();
482         return (atomic_cmpset_long(p, cmpval, newval));
483 }
484
485 #define atomic_cmpset_acq_int   atomic_cmpset_acq_32
486 #define atomic_cmpset_rel_int   atomic_cmpset_rel_32
487
488 #define atomic_cmpset_acq_ptr(dst, old, new)    \
489     atomic_cmpset_acq_32((volatile u_int *)(dst), (u_int)(old), (u_int)(new))
490 #define atomic_cmpset_rel_ptr(dst, old, new)    \
491     atomic_cmpset_rel_32((volatile u_int *)(dst), (u_int)(old), (u_int)(new))
492
493 static __inline uint32_t
494 atomic_fetchadd_32(volatile uint32_t *p, uint32_t v)
495 {
496         uint32_t value;
497
498         do {
499                 value = *p;
500         } while (!atomic_cmpset_32(p, value, value + v));
501         return (value);
502 }
503
504 #define atomic_fetchadd_int     atomic_fetchadd_32
505
506 #endif /* ! _MACHINE_ATOMIC_H_ */