]> CyberLeo.Net >> Repos - FreeBSD/FreeBSD.git/blob - sys/powerpc/include/atomic.h
tcpdump: remove undesired svn:keywords property from contrib
[FreeBSD/FreeBSD.git] / sys / powerpc / include / atomic.h
1 /*-
2  * SPDX-License-Identifier: BSD-2-Clause-FreeBSD
3  *
4  * Copyright (c) 2008 Marcel Moolenaar
5  * Copyright (c) 2001 Benno Rice
6  * Copyright (c) 2001 David E. O'Brien
7  * Copyright (c) 1998 Doug Rabson
8  * All rights reserved.
9  *
10  * Redistribution and use in source and binary forms, with or without
11  * modification, are permitted provided that the following conditions
12  * are met:
13  * 1. Redistributions of source code must retain the above copyright
14  *    notice, this list of conditions and the following disclaimer.
15  * 2. Redistributions in binary form must reproduce the above copyright
16  *    notice, this list of conditions and the following disclaimer in the
17  *    documentation and/or other materials provided with the distribution.
18  *
19  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
20  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
21  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
22  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
23  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
24  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
25  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
26  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
27  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
28  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
29  * SUCH DAMAGE.
30  *
31  * $FreeBSD$
32  */
33
34 #ifndef _MACHINE_ATOMIC_H_
35 #define _MACHINE_ATOMIC_H_
36
37 #ifndef _SYS_CDEFS_H_
38 #error this file needs sys/cdefs.h as a prerequisite
39 #endif
40
41 /*
42  * The __ATOMIC_REL/ACQ() macros provide memory barriers only in conjunction
43  * with the atomic lXarx/stXcx. sequences below. They are not exposed outside
44  * of this file. See also Appendix B.2 of Book II of the architecture manual.
45  *
46  * Note that not all Book-E processors accept the light-weight sync variant.
47  * In particular, early models of E500 cores are known to wedge. Bank on all
48  * 64-bit capable CPUs to accept lwsync properly and pressimize 32-bit CPUs
49  * to use the heavier-weight sync.
50  */
51
52 #ifdef __powerpc64__
53 #define mb()            __asm __volatile("sync" : : : "memory")
54 #define rmb()           __asm __volatile("lwsync" : : : "memory")
55 #define wmb()           __asm __volatile("lwsync" : : : "memory")
56 #define __ATOMIC_REL()  __asm __volatile("lwsync" : : : "memory")
57 #define __ATOMIC_ACQ()  __asm __volatile("isync" : : : "memory")
58 #else
59 #define mb()            __asm __volatile("sync" : : : "memory")
60 #define rmb()           __asm __volatile("sync" : : : "memory")
61 #define wmb()           __asm __volatile("sync" : : : "memory")
62 #define __ATOMIC_REL()  __asm __volatile("sync" : : : "memory")
63 #define __ATOMIC_ACQ()  __asm __volatile("isync" : : : "memory")
64 #endif
65
66 static __inline void
67 powerpc_lwsync(void)
68 {
69
70 #ifdef __powerpc64__
71         __asm __volatile("lwsync" : : : "memory");
72 #else
73         __asm __volatile("sync" : : : "memory");
74 #endif
75 }
76
77 /*
78  * atomic_add(p, v)
79  * { *p += v; }
80  */
81
82 #define __atomic_add_int(p, v, t)                               \
83     __asm __volatile(                                           \
84         "1:     lwarx   %0, 0, %2\n"                            \
85         "       add     %0, %3, %0\n"                           \
86         "       stwcx.  %0, 0, %2\n"                            \
87         "       bne-    1b\n"                                   \
88         : "=&r" (t), "=m" (*p)                                  \
89         : "r" (p), "r" (v), "m" (*p)                            \
90         : "cr0", "memory")                                      \
91     /* __atomic_add_int */
92
93 #ifdef __powerpc64__
94 #define __atomic_add_long(p, v, t)                              \
95     __asm __volatile(                                           \
96         "1:     ldarx   %0, 0, %2\n"                            \
97         "       add     %0, %3, %0\n"                           \
98         "       stdcx.  %0, 0, %2\n"                            \
99         "       bne-    1b\n"                                   \
100         : "=&r" (t), "=m" (*p)                                  \
101         : "r" (p), "r" (v), "m" (*p)                            \
102         : "cr0", "memory")                                      \
103     /* __atomic_add_long */
104 #else
105 #define __atomic_add_long(p, v, t)                              \
106     __asm __volatile(                                           \
107         "1:     lwarx   %0, 0, %2\n"                            \
108         "       add     %0, %3, %0\n"                           \
109         "       stwcx.  %0, 0, %2\n"                            \
110         "       bne-    1b\n"                                   \
111         : "=&r" (t), "=m" (*p)                                  \
112         : "r" (p), "r" (v), "m" (*p)                            \
113         : "cr0", "memory")                                      \
114     /* __atomic_add_long */
115 #endif
116
117 #define _ATOMIC_ADD(type)                                       \
118     static __inline void                                        \
119     atomic_add_##type(volatile u_##type *p, u_##type v) {       \
120         u_##type t;                                             \
121         __atomic_add_##type(p, v, t);                           \
122     }                                                           \
123                                                                 \
124     static __inline void                                        \
125     atomic_add_acq_##type(volatile u_##type *p, u_##type v) {   \
126         u_##type t;                                             \
127         __atomic_add_##type(p, v, t);                           \
128         __ATOMIC_ACQ();                                         \
129     }                                                           \
130                                                                 \
131     static __inline void                                        \
132     atomic_add_rel_##type(volatile u_##type *p, u_##type v) {   \
133         u_##type t;                                             \
134         __ATOMIC_REL();                                         \
135         __atomic_add_##type(p, v, t);                           \
136     }                                                           \
137     /* _ATOMIC_ADD */
138
139 _ATOMIC_ADD(int)
140 _ATOMIC_ADD(long)
141
142 #define atomic_add_32           atomic_add_int
143 #define atomic_add_acq_32       atomic_add_acq_int
144 #define atomic_add_rel_32       atomic_add_rel_int
145
146 #ifdef __powerpc64__
147 #define atomic_add_64           atomic_add_long
148 #define atomic_add_acq_64       atomic_add_acq_long
149 #define atomic_add_rel_64       atomic_add_rel_long
150
151 #define atomic_add_ptr          atomic_add_long
152 #define atomic_add_acq_ptr      atomic_add_acq_long
153 #define atomic_add_rel_ptr      atomic_add_rel_long
154 #else
155 #define atomic_add_ptr          atomic_add_int
156 #define atomic_add_acq_ptr      atomic_add_acq_int
157 #define atomic_add_rel_ptr      atomic_add_rel_int
158 #endif
159 #undef _ATOMIC_ADD
160 #undef __atomic_add_long
161 #undef __atomic_add_int
162
163 /*
164  * atomic_clear(p, v)
165  * { *p &= ~v; }
166  */
167
168 #define __atomic_clear_int(p, v, t)                             \
169     __asm __volatile(                                           \
170         "1:     lwarx   %0, 0, %2\n"                            \
171         "       andc    %0, %0, %3\n"                           \
172         "       stwcx.  %0, 0, %2\n"                            \
173         "       bne-    1b\n"                                   \
174         : "=&r" (t), "=m" (*p)                                  \
175         : "r" (p), "r" (v), "m" (*p)                            \
176         : "cr0", "memory")                                      \
177     /* __atomic_clear_int */
178
179 #ifdef __powerpc64__
180 #define __atomic_clear_long(p, v, t)                            \
181     __asm __volatile(                                           \
182         "1:     ldarx   %0, 0, %2\n"                            \
183         "       andc    %0, %0, %3\n"                           \
184         "       stdcx.  %0, 0, %2\n"                            \
185         "       bne-    1b\n"                                   \
186         : "=&r" (t), "=m" (*p)                                  \
187         : "r" (p), "r" (v), "m" (*p)                            \
188         : "cr0", "memory")                                      \
189     /* __atomic_clear_long */
190 #else
191 #define __atomic_clear_long(p, v, t)                            \
192     __asm __volatile(                                           \
193         "1:     lwarx   %0, 0, %2\n"                            \
194         "       andc    %0, %0, %3\n"                           \
195         "       stwcx.  %0, 0, %2\n"                            \
196         "       bne-    1b\n"                                   \
197         : "=&r" (t), "=m" (*p)                                  \
198         : "r" (p), "r" (v), "m" (*p)                            \
199         : "cr0", "memory")                                      \
200     /* __atomic_clear_long */
201 #endif
202
203 #define _ATOMIC_CLEAR(type)                                     \
204     static __inline void                                        \
205     atomic_clear_##type(volatile u_##type *p, u_##type v) {     \
206         u_##type t;                                             \
207         __atomic_clear_##type(p, v, t);                         \
208     }                                                           \
209                                                                 \
210     static __inline void                                        \
211     atomic_clear_acq_##type(volatile u_##type *p, u_##type v) { \
212         u_##type t;                                             \
213         __atomic_clear_##type(p, v, t);                         \
214         __ATOMIC_ACQ();                                         \
215     }                                                           \
216                                                                 \
217     static __inline void                                        \
218     atomic_clear_rel_##type(volatile u_##type *p, u_##type v) { \
219         u_##type t;                                             \
220         __ATOMIC_REL();                                         \
221         __atomic_clear_##type(p, v, t);                         \
222     }                                                           \
223     /* _ATOMIC_CLEAR */
224
225
226 _ATOMIC_CLEAR(int)
227 _ATOMIC_CLEAR(long)
228
229 #define atomic_clear_32         atomic_clear_int
230 #define atomic_clear_acq_32     atomic_clear_acq_int
231 #define atomic_clear_rel_32     atomic_clear_rel_int
232
233 #ifdef __powerpc64__
234 #define atomic_clear_64         atomic_clear_long
235 #define atomic_clear_acq_64     atomic_clear_acq_long
236 #define atomic_clear_rel_64     atomic_clear_rel_long
237
238 #define atomic_clear_ptr        atomic_clear_long
239 #define atomic_clear_acq_ptr    atomic_clear_acq_long
240 #define atomic_clear_rel_ptr    atomic_clear_rel_long
241 #else
242 #define atomic_clear_ptr        atomic_clear_int
243 #define atomic_clear_acq_ptr    atomic_clear_acq_int
244 #define atomic_clear_rel_ptr    atomic_clear_rel_int
245 #endif
246 #undef _ATOMIC_CLEAR
247 #undef __atomic_clear_long
248 #undef __atomic_clear_int
249
250 /*
251  * atomic_cmpset(p, o, n)
252  */
253 /* TODO -- see below */
254
255 /*
256  * atomic_load_acq(p)
257  */
258 /* TODO -- see below */
259
260 /*
261  * atomic_readandclear(p)
262  */
263 /* TODO -- see below */
264
265 /*
266  * atomic_set(p, v)
267  * { *p |= v; }
268  */
269
270 #define __atomic_set_int(p, v, t)                               \
271     __asm __volatile(                                           \
272         "1:     lwarx   %0, 0, %2\n"                            \
273         "       or      %0, %3, %0\n"                           \
274         "       stwcx.  %0, 0, %2\n"                            \
275         "       bne-    1b\n"                                   \
276         : "=&r" (t), "=m" (*p)                                  \
277         : "r" (p), "r" (v), "m" (*p)                            \
278         : "cr0", "memory")                                      \
279     /* __atomic_set_int */
280
281 #ifdef __powerpc64__
282 #define __atomic_set_long(p, v, t)                              \
283     __asm __volatile(                                           \
284         "1:     ldarx   %0, 0, %2\n"                            \
285         "       or      %0, %3, %0\n"                           \
286         "       stdcx.  %0, 0, %2\n"                            \
287         "       bne-    1b\n"                                   \
288         : "=&r" (t), "=m" (*p)                                  \
289         : "r" (p), "r" (v), "m" (*p)                            \
290         : "cr0", "memory")                                      \
291     /* __atomic_set_long */
292 #else
293 #define __atomic_set_long(p, v, t)                              \
294     __asm __volatile(                                           \
295         "1:     lwarx   %0, 0, %2\n"                            \
296         "       or      %0, %3, %0\n"                           \
297         "       stwcx.  %0, 0, %2\n"                            \
298         "       bne-    1b\n"                                   \
299         : "=&r" (t), "=m" (*p)                                  \
300         : "r" (p), "r" (v), "m" (*p)                            \
301         : "cr0", "memory")                                      \
302     /* __atomic_set_long */
303 #endif
304
305 #define _ATOMIC_SET(type)                                       \
306     static __inline void                                        \
307     atomic_set_##type(volatile u_##type *p, u_##type v) {       \
308         u_##type t;                                             \
309         __atomic_set_##type(p, v, t);                           \
310     }                                                           \
311                                                                 \
312     static __inline void                                        \
313     atomic_set_acq_##type(volatile u_##type *p, u_##type v) {   \
314         u_##type t;                                             \
315         __atomic_set_##type(p, v, t);                           \
316         __ATOMIC_ACQ();                                         \
317     }                                                           \
318                                                                 \
319     static __inline void                                        \
320     atomic_set_rel_##type(volatile u_##type *p, u_##type v) {   \
321         u_##type t;                                             \
322         __ATOMIC_REL();                                         \
323         __atomic_set_##type(p, v, t);                           \
324     }                                                           \
325     /* _ATOMIC_SET */
326
327 _ATOMIC_SET(int)
328 _ATOMIC_SET(long)
329
330 #define atomic_set_32           atomic_set_int
331 #define atomic_set_acq_32       atomic_set_acq_int
332 #define atomic_set_rel_32       atomic_set_rel_int
333
334 #ifdef __powerpc64__
335 #define atomic_set_64           atomic_set_long
336 #define atomic_set_acq_64       atomic_set_acq_long
337 #define atomic_set_rel_64       atomic_set_rel_long
338
339 #define atomic_set_ptr          atomic_set_long
340 #define atomic_set_acq_ptr      atomic_set_acq_long
341 #define atomic_set_rel_ptr      atomic_set_rel_long
342 #else
343 #define atomic_set_ptr          atomic_set_int
344 #define atomic_set_acq_ptr      atomic_set_acq_int
345 #define atomic_set_rel_ptr      atomic_set_rel_int
346 #endif
347 #undef _ATOMIC_SET
348 #undef __atomic_set_long
349 #undef __atomic_set_int
350
351 /*
352  * atomic_subtract(p, v)
353  * { *p -= v; }
354  */
355
356 #define __atomic_subtract_int(p, v, t)                          \
357     __asm __volatile(                                           \
358         "1:     lwarx   %0, 0, %2\n"                            \
359         "       subf    %0, %3, %0\n"                           \
360         "       stwcx.  %0, 0, %2\n"                            \
361         "       bne-    1b\n"                                   \
362         : "=&r" (t), "=m" (*p)                                  \
363         : "r" (p), "r" (v), "m" (*p)                            \
364         : "cr0", "memory")                                      \
365     /* __atomic_subtract_int */
366
367 #ifdef __powerpc64__
368 #define __atomic_subtract_long(p, v, t)                         \
369     __asm __volatile(                                           \
370         "1:     ldarx   %0, 0, %2\n"                            \
371         "       subf    %0, %3, %0\n"                           \
372         "       stdcx.  %0, 0, %2\n"                            \
373         "       bne-    1b\n"                                   \
374         : "=&r" (t), "=m" (*p)                                  \
375         : "r" (p), "r" (v), "m" (*p)                            \
376         : "cr0", "memory")                                      \
377     /* __atomic_subtract_long */
378 #else
379 #define __atomic_subtract_long(p, v, t)                         \
380     __asm __volatile(                                           \
381         "1:     lwarx   %0, 0, %2\n"                            \
382         "       subf    %0, %3, %0\n"                           \
383         "       stwcx.  %0, 0, %2\n"                            \
384         "       bne-    1b\n"                                   \
385         : "=&r" (t), "=m" (*p)                                  \
386         : "r" (p), "r" (v), "m" (*p)                            \
387         : "cr0", "memory")                                      \
388     /* __atomic_subtract_long */
389 #endif
390
391 #define _ATOMIC_SUBTRACT(type)                                          \
392     static __inline void                                                \
393     atomic_subtract_##type(volatile u_##type *p, u_##type v) {          \
394         u_##type t;                                                     \
395         __atomic_subtract_##type(p, v, t);                              \
396     }                                                                   \
397                                                                         \
398     static __inline void                                                \
399     atomic_subtract_acq_##type(volatile u_##type *p, u_##type v) {      \
400         u_##type t;                                                     \
401         __atomic_subtract_##type(p, v, t);                              \
402         __ATOMIC_ACQ();                                                 \
403     }                                                                   \
404                                                                         \
405     static __inline void                                                \
406     atomic_subtract_rel_##type(volatile u_##type *p, u_##type v) {      \
407         u_##type t;                                                     \
408         __ATOMIC_REL();                                                 \
409         __atomic_subtract_##type(p, v, t);                              \
410     }                                                                   \
411     /* _ATOMIC_SUBTRACT */
412
413 _ATOMIC_SUBTRACT(int)
414 _ATOMIC_SUBTRACT(long)
415
416 #define atomic_subtract_32      atomic_subtract_int
417 #define atomic_subtract_acq_32  atomic_subtract_acq_int
418 #define atomic_subtract_rel_32  atomic_subtract_rel_int
419
420 #ifdef __powerpc64__
421 #define atomic_subtract_64      atomic_subtract_long
422 #define atomic_subtract_acq_64  atomic_subract_acq_long
423 #define atomic_subtract_rel_64  atomic_subtract_rel_long
424
425 #define atomic_subtract_ptr     atomic_subtract_long
426 #define atomic_subtract_acq_ptr atomic_subtract_acq_long
427 #define atomic_subtract_rel_ptr atomic_subtract_rel_long
428 #else
429 #define atomic_subtract_ptr     atomic_subtract_int
430 #define atomic_subtract_acq_ptr atomic_subtract_acq_int
431 #define atomic_subtract_rel_ptr atomic_subtract_rel_int
432 #endif
433 #undef _ATOMIC_SUBTRACT
434 #undef __atomic_subtract_long
435 #undef __atomic_subtract_int
436
437 /*
438  * atomic_store_rel(p, v)
439  */
440 /* TODO -- see below */
441
442 /*
443  * Old/original implementations that still need revisiting.
444  */
445
446 static __inline u_int
447 atomic_readandclear_int(volatile u_int *addr)
448 {
449         u_int result,temp;
450
451         __asm __volatile (
452                 "\tsync\n"                      /* drain writes */
453                 "1:\tlwarx %0, 0, %3\n\t"       /* load old value */
454                 "li %1, 0\n\t"                  /* load new value */
455                 "stwcx. %1, 0, %3\n\t"          /* attempt to store */
456                 "bne- 1b\n\t"                   /* spin if failed */
457                 : "=&r"(result), "=&r"(temp), "=m" (*addr)
458                 : "r" (addr), "m" (*addr)
459                 : "cr0", "memory");
460
461         return (result);
462 }
463
464 #ifdef __powerpc64__
465 static __inline u_long
466 atomic_readandclear_long(volatile u_long *addr)
467 {
468         u_long result,temp;
469
470         __asm __volatile (
471                 "\tsync\n"                      /* drain writes */
472                 "1:\tldarx %0, 0, %3\n\t"       /* load old value */
473                 "li %1, 0\n\t"                  /* load new value */
474                 "stdcx. %1, 0, %3\n\t"          /* attempt to store */
475                 "bne- 1b\n\t"                   /* spin if failed */
476                 : "=&r"(result), "=&r"(temp), "=m" (*addr)
477                 : "r" (addr), "m" (*addr)
478                 : "cr0", "memory");
479
480         return (result);
481 }
482 #endif
483
484 #define atomic_readandclear_32          atomic_readandclear_int
485
486 #ifdef __powerpc64__
487 #define atomic_readandclear_64          atomic_readandclear_long
488
489 #define atomic_readandclear_ptr         atomic_readandclear_long
490 #else
491 static __inline u_long
492 atomic_readandclear_long(volatile u_long *addr)
493 {
494
495         return ((u_long)atomic_readandclear_int((volatile u_int *)addr));
496 }
497
498 #define atomic_readandclear_ptr         atomic_readandclear_int
499 #endif
500
501 /*
502  * We assume that a = b will do atomic loads and stores.
503  */
504 #define ATOMIC_STORE_LOAD(TYPE)                                 \
505 static __inline u_##TYPE                                        \
506 atomic_load_acq_##TYPE(volatile u_##TYPE *p)                    \
507 {                                                               \
508         u_##TYPE v;                                             \
509                                                                 \
510         v = *p;                                                 \
511         mb();                                                   \
512         return (v);                                             \
513 }                                                               \
514                                                                 \
515 static __inline void                                            \
516 atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)       \
517 {                                                               \
518                                                                 \
519         powerpc_lwsync();                                       \
520         *p = v;                                                 \
521 }
522
523 ATOMIC_STORE_LOAD(int)
524
525 #define atomic_load_acq_32      atomic_load_acq_int
526 #define atomic_store_rel_32     atomic_store_rel_int
527
528 #ifdef __powerpc64__
529 ATOMIC_STORE_LOAD(long)
530
531 #define atomic_load_acq_64      atomic_load_acq_long
532 #define atomic_store_rel_64     atomic_store_rel_long
533
534 #define atomic_load_acq_ptr     atomic_load_acq_long
535 #define atomic_store_rel_ptr    atomic_store_rel_long
536 #else
537 static __inline u_long
538 atomic_load_acq_long(volatile u_long *addr)
539 {
540
541         return ((u_long)atomic_load_acq_int((volatile u_int *)addr));
542 }
543
544 static __inline void
545 atomic_store_rel_long(volatile u_long *addr, u_long val)
546 {
547
548         atomic_store_rel_int((volatile u_int *)addr, (u_int)val);
549 }
550
551 #define atomic_load_acq_ptr     atomic_load_acq_int
552 #define atomic_store_rel_ptr    atomic_store_rel_int
553 #endif
554 #undef ATOMIC_STORE_LOAD
555
556 /*
557  * Atomically compare the value stored at *p with cmpval and if the
558  * two values are equal, update the value of *p with newval. Returns
559  * zero if the compare failed, nonzero otherwise.
560  */
561 static __inline int
562 atomic_cmpset_int(volatile u_int* p, u_int cmpval, u_int newval)
563 {
564         int     ret;
565
566         __asm __volatile (
567                 "1:\tlwarx %0, 0, %2\n\t"       /* load old value */
568                 "cmplw %3, %0\n\t"              /* compare */
569                 "bne 2f\n\t"                    /* exit if not equal */
570                 "stwcx. %4, 0, %2\n\t"          /* attempt to store */
571                 "bne- 1b\n\t"                   /* spin if failed */
572                 "li %0, 1\n\t"                  /* success - retval = 1 */
573                 "b 3f\n\t"                      /* we've succeeded */
574                 "2:\n\t"
575                 "stwcx. %0, 0, %2\n\t"          /* clear reservation (74xx) */
576                 "li %0, 0\n\t"                  /* failure - retval = 0 */
577                 "3:\n\t"
578                 : "=&r" (ret), "=m" (*p)
579                 : "r" (p), "r" (cmpval), "r" (newval), "m" (*p)
580                 : "cr0", "memory");
581
582         return (ret);
583 }
584 static __inline int
585 atomic_cmpset_long(volatile u_long* p, u_long cmpval, u_long newval)
586 {
587         int ret;
588
589         __asm __volatile (
590             #ifdef __powerpc64__
591                 "1:\tldarx %0, 0, %2\n\t"       /* load old value */
592                 "cmpld %3, %0\n\t"              /* compare */
593                 "bne 2f\n\t"                    /* exit if not equal */
594                 "stdcx. %4, 0, %2\n\t"          /* attempt to store */
595             #else
596                 "1:\tlwarx %0, 0, %2\n\t"       /* load old value */
597                 "cmplw %3, %0\n\t"              /* compare */
598                 "bne 2f\n\t"                    /* exit if not equal */
599                 "stwcx. %4, 0, %2\n\t"          /* attempt to store */
600             #endif
601                 "bne- 1b\n\t"                   /* spin if failed */
602                 "li %0, 1\n\t"                  /* success - retval = 1 */
603                 "b 3f\n\t"                      /* we've succeeded */
604                 "2:\n\t"
605             #ifdef __powerpc64__
606                 "stdcx. %0, 0, %2\n\t"          /* clear reservation (74xx) */
607             #else
608                 "stwcx. %0, 0, %2\n\t"          /* clear reservation (74xx) */
609             #endif
610                 "li %0, 0\n\t"                  /* failure - retval = 0 */
611                 "3:\n\t"
612                 : "=&r" (ret), "=m" (*p)
613                 : "r" (p), "r" (cmpval), "r" (newval), "m" (*p)
614                 : "cr0", "memory");
615
616         return (ret);
617 }
618
619 static __inline int
620 atomic_cmpset_acq_int(volatile u_int *p, u_int cmpval, u_int newval)
621 {
622         int retval;
623
624         retval = atomic_cmpset_int(p, cmpval, newval);
625         __ATOMIC_ACQ();
626         return (retval);
627 }
628
629 static __inline int
630 atomic_cmpset_rel_int(volatile u_int *p, u_int cmpval, u_int newval)
631 {
632         __ATOMIC_REL();
633         return (atomic_cmpset_int(p, cmpval, newval));
634 }
635
636 static __inline int
637 atomic_cmpset_acq_long(volatile u_long *p, u_long cmpval, u_long newval)
638 {
639         u_long retval;
640
641         retval = atomic_cmpset_long(p, cmpval, newval);
642         __ATOMIC_ACQ();
643         return (retval);
644 }
645
646 static __inline int
647 atomic_cmpset_rel_long(volatile u_long *p, u_long cmpval, u_long newval)
648 {
649         __ATOMIC_REL();
650         return (atomic_cmpset_long(p, cmpval, newval));
651 }
652
653 #define atomic_cmpset_32        atomic_cmpset_int
654 #define atomic_cmpset_acq_32    atomic_cmpset_acq_int
655 #define atomic_cmpset_rel_32    atomic_cmpset_rel_int
656
657 #ifdef __powerpc64__
658 #define atomic_cmpset_64        atomic_cmpset_long
659 #define atomic_cmpset_acq_64    atomic_cmpset_acq_long
660 #define atomic_cmpset_rel_64    atomic_cmpset_rel_long
661
662 #define atomic_cmpset_ptr       atomic_cmpset_long
663 #define atomic_cmpset_acq_ptr   atomic_cmpset_acq_long
664 #define atomic_cmpset_rel_ptr   atomic_cmpset_rel_long
665 #else
666 #define atomic_cmpset_ptr       atomic_cmpset_int
667 #define atomic_cmpset_acq_ptr   atomic_cmpset_acq_int
668 #define atomic_cmpset_rel_ptr   atomic_cmpset_rel_int
669 #endif
670
671 /*
672  * Atomically compare the value stored at *p with *cmpval and if the
673  * two values are equal, update the value of *p with newval. Returns
674  * zero if the compare failed and sets *cmpval to the read value from *p,
675  * nonzero otherwise.
676  */
677 static __inline int
678 atomic_fcmpset_int(volatile u_int *p, u_int *cmpval, u_int newval)
679 {
680         int     ret;
681
682         __asm __volatile (
683                 "lwarx %0, 0, %3\n\t"   /* load old value */
684                 "cmplw %4, %0\n\t"              /* compare */
685                 "bne 1f\n\t"                    /* exit if not equal */
686                 "stwcx. %5, 0, %3\n\t"          /* attempt to store */
687                 "bne- 1f\n\t"                   /* exit if failed */
688                 "li %0, 1\n\t"                  /* success - retval = 1 */
689                 "b 2f\n\t"                      /* we've succeeded */
690                 "1:\n\t"
691                 "stwcx. %0, 0, %3\n\t"          /* clear reservation (74xx) */
692                 "stwx %0, 0, %7\n\t"
693                 "li %0, 0\n\t"                  /* failure - retval = 0 */
694                 "2:\n\t"
695                 : "=&r" (ret), "=m" (*p), "=m" (*cmpval)
696                 : "r" (p), "r" (*cmpval), "r" (newval), "m" (*p), "r"(cmpval)
697                 : "cr0", "memory");
698
699         return (ret);
700 }
701 static __inline int
702 atomic_fcmpset_long(volatile u_long *p, u_long *cmpval, u_long newval)
703 {
704         int ret;
705
706         __asm __volatile (
707             #ifdef __powerpc64__
708                 "ldarx %0, 0, %3\n\t"   /* load old value */
709                 "cmpld %4, %0\n\t"              /* compare */
710                 "bne 1f\n\t"                    /* exit if not equal */
711                 "stdcx. %5, 0, %3\n\t"          /* attempt to store */
712             #else
713                 "lwarx %0, 0, %3\n\t"   /* load old value */
714                 "cmplw %4, %0\n\t"              /* compare */
715                 "bne 1f\n\t"                    /* exit if not equal */
716                 "stwcx. %5, 0, %3\n\t"          /* attempt to store */
717             #endif
718                 "bne- 1f\n\t"                   /* exit if failed */
719                 "li %0, 1\n\t"                  /* success - retval = 1 */
720                 "b 2f\n\t"                      /* we've succeeded */
721                 "1:\n\t"
722             #ifdef __powerpc64__
723                 "stdcx. %0, 0, %3\n\t"          /* clear reservation (74xx) */
724                 "stdx %0, 0, %7\n\t"
725             #else
726                 "stwcx. %0, 0, %3\n\t"          /* clear reservation (74xx) */
727                 "stwx %0, 0, %7\n\t"
728             #endif
729                 "li %0, 0\n\t"                  /* failure - retval = 0 */
730                 "2:\n\t"
731                 : "=&r" (ret), "=m" (*p), "=m" (*cmpval)
732                 : "r" (p), "r" (*cmpval), "r" (newval), "m" (*p), "r"(cmpval)
733                 : "cr0", "memory");
734
735         return (ret);
736 }
737
738 static __inline int
739 atomic_fcmpset_acq_int(volatile u_int *p, u_int *cmpval, u_int newval)
740 {
741         int retval;
742
743         retval = atomic_fcmpset_int(p, cmpval, newval);
744         __ATOMIC_ACQ();
745         return (retval);
746 }
747
748 static __inline int
749 atomic_fcmpset_rel_int(volatile u_int *p, u_int *cmpval, u_int newval)
750 {
751         __ATOMIC_REL();
752         return (atomic_fcmpset_int(p, cmpval, newval));
753 }
754
755 static __inline int
756 atomic_fcmpset_acq_long(volatile u_long *p, u_long *cmpval, u_long newval)
757 {
758         u_long retval;
759
760         retval = atomic_fcmpset_long(p, cmpval, newval);
761         __ATOMIC_ACQ();
762         return (retval);
763 }
764
765 static __inline int
766 atomic_fcmpset_rel_long(volatile u_long *p, u_long *cmpval, u_long newval)
767 {
768         __ATOMIC_REL();
769         return (atomic_fcmpset_long(p, cmpval, newval));
770 }
771
772 #define atomic_fcmpset_32       atomic_fcmpset_int
773 #define atomic_fcmpset_acq_32   atomic_fcmpset_acq_int
774 #define atomic_fcmpset_rel_32   atomic_fcmpset_rel_int
775
776 #ifdef __powerpc64__
777 #define atomic_fcmpset_64       atomic_fcmpset_long
778 #define atomic_fcmpset_acq_64   atomic_fcmpset_acq_long
779 #define atomic_fcmpset_rel_64   atomic_fcmpset_rel_long
780
781 #define atomic_fcmpset_ptr      atomic_fcmpset_long
782 #define atomic_fcmpset_acq_ptr  atomic_fcmpset_acq_long
783 #define atomic_fcmpset_rel_ptr  atomic_fcmpset_rel_long
784 #else
785 #define atomic_fcmpset_ptr      atomic_fcmpset_int
786 #define atomic_fcmpset_acq_ptr  atomic_fcmpset_acq_int
787 #define atomic_fcmpset_rel_ptr  atomic_fcmpset_rel_int
788 #endif
789
790 static __inline u_int
791 atomic_fetchadd_int(volatile u_int *p, u_int v)
792 {
793         u_int value;
794
795         do {
796                 value = *p;
797         } while (!atomic_cmpset_int(p, value, value + v));
798         return (value);
799 }
800
801 static __inline u_long
802 atomic_fetchadd_long(volatile u_long *p, u_long v)
803 {
804         u_long value;
805
806         do {
807                 value = *p;
808         } while (!atomic_cmpset_long(p, value, value + v));
809         return (value);
810 }
811
812 static __inline u_int
813 atomic_swap_32(volatile u_int *p, u_int v)
814 {
815         u_int prev;
816
817         __asm __volatile(
818         "1:     lwarx   %0,0,%2\n"
819         "       stwcx.  %3,0,%2\n"
820         "       bne-    1b\n"
821         : "=&r" (prev), "+m" (*(volatile u_int *)p)
822         : "r" (p), "r" (v)
823         : "cr0", "memory");
824
825         return (prev);
826 }
827
828 #ifdef __powerpc64__
829 static __inline u_long
830 atomic_swap_64(volatile u_long *p, u_long v)
831 {
832         u_long prev;
833
834         __asm __volatile(
835         "1:     ldarx   %0,0,%2\n"
836         "       stdcx.  %3,0,%2\n"
837         "       bne-    1b\n"
838         : "=&r" (prev), "+m" (*(volatile u_long *)p)
839         : "r" (p), "r" (v)
840         : "cr0", "memory");
841
842         return (prev);
843 }
844 #endif
845
846 #define atomic_fetchadd_32      atomic_fetchadd_int
847 #define atomic_swap_int         atomic_swap_32
848
849 #ifdef __powerpc64__
850 #define atomic_fetchadd_64      atomic_fetchadd_long
851 #define atomic_swap_long        atomic_swap_64
852 #define atomic_swap_ptr         atomic_swap_64
853 #endif
854
855 #undef __ATOMIC_REL
856 #undef __ATOMIC_ACQ
857
858 static __inline void
859 atomic_thread_fence_acq(void)
860 {
861
862         powerpc_lwsync();
863 }
864
865 static __inline void
866 atomic_thread_fence_rel(void)
867 {
868
869         powerpc_lwsync();
870 }
871
872 static __inline void
873 atomic_thread_fence_acq_rel(void)
874 {
875
876         powerpc_lwsync();
877 }
878
879 static __inline void
880 atomic_thread_fence_seq_cst(void)
881 {
882
883         __asm __volatile("sync" : : : "memory");
884 }
885
886 #endif /* ! _MACHINE_ATOMIC_H_ */