2 * SPDX-License-Identifier: BSD-2-Clause-FreeBSD
4 * Copyright (c) 2008 Marcel Moolenaar
5 * Copyright (c) 2001 Benno Rice
6 * Copyright (c) 2001 David E. O'Brien
7 * Copyright (c) 1998 Doug Rabson
10 * Redistribution and use in source and binary forms, with or without
11 * modification, are permitted provided that the following conditions
13 * 1. Redistributions of source code must retain the above copyright
14 * notice, this list of conditions and the following disclaimer.
15 * 2. Redistributions in binary form must reproduce the above copyright
16 * notice, this list of conditions and the following disclaimer in the
17 * documentation and/or other materials provided with the distribution.
19 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
20 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
21 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
22 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
23 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
24 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
25 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
26 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
27 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
28 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
34 #ifndef _MACHINE_ATOMIC_H_
35 #define _MACHINE_ATOMIC_H_
38 #error this file needs sys/cdefs.h as a prerequisite
41 #include <sys/atomic_common.h>
44 #include <sys/_atomic64e.h>
48 * The __ATOMIC_REL/ACQ() macros provide memory barriers only in conjunction
49 * with the atomic lXarx/stXcx. sequences below. They are not exposed outside
50 * of this file. See also Appendix B.2 of Book II of the architecture manual.
52 * Note that not all Book-E processors accept the light-weight sync variant.
53 * In particular, early models of E500 cores are known to wedge. Bank on all
54 * 64-bit capable CPUs to accept lwsync properly and pressimize 32-bit CPUs
55 * to use the heavier-weight sync.
59 #define mb() __asm __volatile("sync" : : : "memory")
60 #define rmb() __asm __volatile("lwsync" : : : "memory")
61 #define wmb() __asm __volatile("lwsync" : : : "memory")
62 #define __ATOMIC_REL() __asm __volatile("lwsync" : : : "memory")
63 #define __ATOMIC_ACQ() __asm __volatile("isync" : : : "memory")
65 #define mb() __asm __volatile("sync" : : : "memory")
66 #define rmb() __asm __volatile("sync" : : : "memory")
67 #define wmb() __asm __volatile("sync" : : : "memory")
68 #define __ATOMIC_REL() __asm __volatile("sync" : : : "memory")
69 #define __ATOMIC_ACQ() __asm __volatile("isync" : : : "memory")
77 __asm __volatile("lwsync" : : : "memory");
79 __asm __volatile("sync" : : : "memory");
88 #define __atomic_add_int(p, v, t) \
90 "1: lwarx %0, 0, %2\n" \
92 " stwcx. %0, 0, %2\n" \
94 : "=&r" (t), "=m" (*p) \
95 : "r" (p), "r" (v), "m" (*p) \
97 /* __atomic_add_int */
100 #define __atomic_add_long(p, v, t) \
102 "1: ldarx %0, 0, %2\n" \
103 " add %0, %3, %0\n" \
104 " stdcx. %0, 0, %2\n" \
106 : "=&r" (t), "=m" (*p) \
107 : "r" (p), "r" (v), "m" (*p) \
109 /* __atomic_add_long */
111 #define __atomic_add_long(p, v, t) \
113 "1: lwarx %0, 0, %2\n" \
114 " add %0, %3, %0\n" \
115 " stwcx. %0, 0, %2\n" \
117 : "=&r" (t), "=m" (*p) \
118 : "r" (p), "r" (v), "m" (*p) \
120 /* __atomic_add_long */
123 #define _ATOMIC_ADD(type) \
124 static __inline void \
125 atomic_add_##type(volatile u_##type *p, u_##type v) { \
127 __atomic_add_##type(p, v, t); \
130 static __inline void \
131 atomic_add_acq_##type(volatile u_##type *p, u_##type v) { \
133 __atomic_add_##type(p, v, t); \
137 static __inline void \
138 atomic_add_rel_##type(volatile u_##type *p, u_##type v) { \
141 __atomic_add_##type(p, v, t); \
148 #define atomic_add_32 atomic_add_int
149 #define atomic_add_acq_32 atomic_add_acq_int
150 #define atomic_add_rel_32 atomic_add_rel_int
153 #define atomic_add_64 atomic_add_long
154 #define atomic_add_acq_64 atomic_add_acq_long
155 #define atomic_add_rel_64 atomic_add_rel_long
157 #define atomic_add_ptr atomic_add_long
158 #define atomic_add_acq_ptr atomic_add_acq_long
159 #define atomic_add_rel_ptr atomic_add_rel_long
161 #define atomic_add_ptr atomic_add_int
162 #define atomic_add_acq_ptr atomic_add_acq_int
163 #define atomic_add_rel_ptr atomic_add_rel_int
166 #undef __atomic_add_long
167 #undef __atomic_add_int
174 #define __atomic_clear_int(p, v, t) \
176 "1: lwarx %0, 0, %2\n" \
177 " andc %0, %0, %3\n" \
178 " stwcx. %0, 0, %2\n" \
180 : "=&r" (t), "=m" (*p) \
181 : "r" (p), "r" (v), "m" (*p) \
183 /* __atomic_clear_int */
186 #define __atomic_clear_long(p, v, t) \
188 "1: ldarx %0, 0, %2\n" \
189 " andc %0, %0, %3\n" \
190 " stdcx. %0, 0, %2\n" \
192 : "=&r" (t), "=m" (*p) \
193 : "r" (p), "r" (v), "m" (*p) \
195 /* __atomic_clear_long */
197 #define __atomic_clear_long(p, v, t) \
199 "1: lwarx %0, 0, %2\n" \
200 " andc %0, %0, %3\n" \
201 " stwcx. %0, 0, %2\n" \
203 : "=&r" (t), "=m" (*p) \
204 : "r" (p), "r" (v), "m" (*p) \
206 /* __atomic_clear_long */
209 #define _ATOMIC_CLEAR(type) \
210 static __inline void \
211 atomic_clear_##type(volatile u_##type *p, u_##type v) { \
213 __atomic_clear_##type(p, v, t); \
216 static __inline void \
217 atomic_clear_acq_##type(volatile u_##type *p, u_##type v) { \
219 __atomic_clear_##type(p, v, t); \
223 static __inline void \
224 atomic_clear_rel_##type(volatile u_##type *p, u_##type v) { \
227 __atomic_clear_##type(p, v, t); \
235 #define atomic_clear_32 atomic_clear_int
236 #define atomic_clear_acq_32 atomic_clear_acq_int
237 #define atomic_clear_rel_32 atomic_clear_rel_int
240 #define atomic_clear_64 atomic_clear_long
241 #define atomic_clear_acq_64 atomic_clear_acq_long
242 #define atomic_clear_rel_64 atomic_clear_rel_long
244 #define atomic_clear_ptr atomic_clear_long
245 #define atomic_clear_acq_ptr atomic_clear_acq_long
246 #define atomic_clear_rel_ptr atomic_clear_rel_long
248 #define atomic_clear_ptr atomic_clear_int
249 #define atomic_clear_acq_ptr atomic_clear_acq_int
250 #define atomic_clear_rel_ptr atomic_clear_rel_int
253 #undef __atomic_clear_long
254 #undef __atomic_clear_int
257 * atomic_cmpset(p, o, n)
259 /* TODO -- see below */
264 /* TODO -- see below */
267 * atomic_readandclear(p)
269 /* TODO -- see below */
276 #define __atomic_set_int(p, v, t) \
278 "1: lwarx %0, 0, %2\n" \
280 " stwcx. %0, 0, %2\n" \
282 : "=&r" (t), "=m" (*p) \
283 : "r" (p), "r" (v), "m" (*p) \
285 /* __atomic_set_int */
288 #define __atomic_set_long(p, v, t) \
290 "1: ldarx %0, 0, %2\n" \
292 " stdcx. %0, 0, %2\n" \
294 : "=&r" (t), "=m" (*p) \
295 : "r" (p), "r" (v), "m" (*p) \
297 /* __atomic_set_long */
299 #define __atomic_set_long(p, v, t) \
301 "1: lwarx %0, 0, %2\n" \
303 " stwcx. %0, 0, %2\n" \
305 : "=&r" (t), "=m" (*p) \
306 : "r" (p), "r" (v), "m" (*p) \
308 /* __atomic_set_long */
311 #define _ATOMIC_SET(type) \
312 static __inline void \
313 atomic_set_##type(volatile u_##type *p, u_##type v) { \
315 __atomic_set_##type(p, v, t); \
318 static __inline void \
319 atomic_set_acq_##type(volatile u_##type *p, u_##type v) { \
321 __atomic_set_##type(p, v, t); \
325 static __inline void \
326 atomic_set_rel_##type(volatile u_##type *p, u_##type v) { \
329 __atomic_set_##type(p, v, t); \
336 #define atomic_set_32 atomic_set_int
337 #define atomic_set_acq_32 atomic_set_acq_int
338 #define atomic_set_rel_32 atomic_set_rel_int
341 #define atomic_set_64 atomic_set_long
342 #define atomic_set_acq_64 atomic_set_acq_long
343 #define atomic_set_rel_64 atomic_set_rel_long
345 #define atomic_set_ptr atomic_set_long
346 #define atomic_set_acq_ptr atomic_set_acq_long
347 #define atomic_set_rel_ptr atomic_set_rel_long
349 #define atomic_set_ptr atomic_set_int
350 #define atomic_set_acq_ptr atomic_set_acq_int
351 #define atomic_set_rel_ptr atomic_set_rel_int
354 #undef __atomic_set_long
355 #undef __atomic_set_int
358 * atomic_subtract(p, v)
362 #define __atomic_subtract_int(p, v, t) \
364 "1: lwarx %0, 0, %2\n" \
365 " subf %0, %3, %0\n" \
366 " stwcx. %0, 0, %2\n" \
368 : "=&r" (t), "=m" (*p) \
369 : "r" (p), "r" (v), "m" (*p) \
371 /* __atomic_subtract_int */
374 #define __atomic_subtract_long(p, v, t) \
376 "1: ldarx %0, 0, %2\n" \
377 " subf %0, %3, %0\n" \
378 " stdcx. %0, 0, %2\n" \
380 : "=&r" (t), "=m" (*p) \
381 : "r" (p), "r" (v), "m" (*p) \
383 /* __atomic_subtract_long */
385 #define __atomic_subtract_long(p, v, t) \
387 "1: lwarx %0, 0, %2\n" \
388 " subf %0, %3, %0\n" \
389 " stwcx. %0, 0, %2\n" \
391 : "=&r" (t), "=m" (*p) \
392 : "r" (p), "r" (v), "m" (*p) \
394 /* __atomic_subtract_long */
397 #define _ATOMIC_SUBTRACT(type) \
398 static __inline void \
399 atomic_subtract_##type(volatile u_##type *p, u_##type v) { \
401 __atomic_subtract_##type(p, v, t); \
404 static __inline void \
405 atomic_subtract_acq_##type(volatile u_##type *p, u_##type v) { \
407 __atomic_subtract_##type(p, v, t); \
411 static __inline void \
412 atomic_subtract_rel_##type(volatile u_##type *p, u_##type v) { \
415 __atomic_subtract_##type(p, v, t); \
417 /* _ATOMIC_SUBTRACT */
419 _ATOMIC_SUBTRACT(int)
420 _ATOMIC_SUBTRACT(long)
422 #define atomic_subtract_32 atomic_subtract_int
423 #define atomic_subtract_acq_32 atomic_subtract_acq_int
424 #define atomic_subtract_rel_32 atomic_subtract_rel_int
427 #define atomic_subtract_64 atomic_subtract_long
428 #define atomic_subtract_acq_64 atomic_subract_acq_long
429 #define atomic_subtract_rel_64 atomic_subtract_rel_long
431 #define atomic_subtract_ptr atomic_subtract_long
432 #define atomic_subtract_acq_ptr atomic_subtract_acq_long
433 #define atomic_subtract_rel_ptr atomic_subtract_rel_long
435 #define atomic_subtract_ptr atomic_subtract_int
436 #define atomic_subtract_acq_ptr atomic_subtract_acq_int
437 #define atomic_subtract_rel_ptr atomic_subtract_rel_int
439 #undef _ATOMIC_SUBTRACT
440 #undef __atomic_subtract_long
441 #undef __atomic_subtract_int
444 * atomic_store_rel(p, v)
446 /* TODO -- see below */
449 * Old/original implementations that still need revisiting.
452 static __inline u_int
453 atomic_readandclear_int(volatile u_int *addr)
458 "\tsync\n" /* drain writes */
459 "1:\tlwarx %0, 0, %3\n\t" /* load old value */
460 "li %1, 0\n\t" /* load new value */
461 "stwcx. %1, 0, %3\n\t" /* attempt to store */
462 "bne- 1b\n\t" /* spin if failed */
463 : "=&r"(result), "=&r"(temp), "=m" (*addr)
464 : "r" (addr), "m" (*addr)
471 static __inline u_long
472 atomic_readandclear_long(volatile u_long *addr)
477 "\tsync\n" /* drain writes */
478 "1:\tldarx %0, 0, %3\n\t" /* load old value */
479 "li %1, 0\n\t" /* load new value */
480 "stdcx. %1, 0, %3\n\t" /* attempt to store */
481 "bne- 1b\n\t" /* spin if failed */
482 : "=&r"(result), "=&r"(temp), "=m" (*addr)
483 : "r" (addr), "m" (*addr)
490 #define atomic_readandclear_32 atomic_readandclear_int
493 #define atomic_readandclear_64 atomic_readandclear_long
495 #define atomic_readandclear_ptr atomic_readandclear_long
497 static __inline u_long
498 atomic_readandclear_long(volatile u_long *addr)
501 return ((u_long)atomic_readandclear_int((volatile u_int *)addr));
504 #define atomic_readandclear_ptr atomic_readandclear_int
508 * We assume that a = b will do atomic loads and stores.
510 #define ATOMIC_STORE_LOAD(TYPE) \
511 static __inline u_##TYPE \
512 atomic_load_acq_##TYPE(volatile u_##TYPE *p) \
521 static __inline void \
522 atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v) \
529 ATOMIC_STORE_LOAD(int)
531 #define atomic_load_acq_32 atomic_load_acq_int
532 #define atomic_store_rel_32 atomic_store_rel_int
535 ATOMIC_STORE_LOAD(long)
537 #define atomic_load_acq_64 atomic_load_acq_long
538 #define atomic_store_rel_64 atomic_store_rel_long
540 #define atomic_load_acq_ptr atomic_load_acq_long
541 #define atomic_store_rel_ptr atomic_store_rel_long
543 static __inline u_long
544 atomic_load_acq_long(volatile u_long *addr)
547 return ((u_long)atomic_load_acq_int((volatile u_int *)addr));
551 atomic_store_rel_long(volatile u_long *addr, u_long val)
554 atomic_store_rel_int((volatile u_int *)addr, (u_int)val);
557 #define atomic_load_acq_ptr atomic_load_acq_int
558 #define atomic_store_rel_ptr atomic_store_rel_int
560 #undef ATOMIC_STORE_LOAD
563 * Atomically compare the value stored at *p with cmpval and if the
564 * two values are equal, update the value of *p with newval. Returns
565 * zero if the compare failed, nonzero otherwise.
567 #ifdef ISA_206_ATOMICS
569 atomic_cmpset_char(volatile u_char *p, u_char cmpval, u_char newval)
574 "1:\tlbarx %0, 0, %2\n\t" /* load old value */
575 "cmplw %3, %0\n\t" /* compare */
576 "bne- 2f\n\t" /* exit if not equal */
577 "stbcx. %4, 0, %2\n\t" /* attempt to store */
578 "bne- 1b\n\t" /* spin if failed */
579 "li %0, 1\n\t" /* success - retval = 1 */
580 "b 3f\n\t" /* we've succeeded */
582 "stbcx. %0, 0, %2\n\t" /* clear reservation (74xx) */
583 "li %0, 0\n\t" /* failure - retval = 0 */
585 : "=&r" (ret), "=m" (*p)
586 : "r" (p), "r" (cmpval), "r" (newval), "m" (*p)
593 atomic_cmpset_short(volatile u_short *p, u_short cmpval, u_short newval)
598 "1:\tlharx %0, 0, %2\n\t" /* load old value */
599 "cmplw %3, %0\n\t" /* compare */
600 "bne- 2f\n\t" /* exit if not equal */
601 "sthcx. %4, 0, %2\n\t" /* attempt to store */
602 "bne- 1b\n\t" /* spin if failed */
603 "li %0, 1\n\t" /* success - retval = 1 */
604 "b 3f\n\t" /* we've succeeded */
606 "sthcx. %0, 0, %2\n\t" /* clear reservation (74xx) */
607 "li %0, 0\n\t" /* failure - retval = 0 */
609 : "=&r" (ret), "=m" (*p)
610 : "r" (p), "r" (cmpval), "r" (newval), "m" (*p)
617 atomic_cmpset_masked(uint32_t *p, uint32_t cmpval, uint32_t newval,
624 "1:\tlwarx %2, 0, %3\n\t" /* load old value */
626 "cmplw %4, %0\n\t" /* compare */
627 "bne- 2f\n\t" /* exit if not equal */
628 "andc %2, %2, %7\n\t"
630 "stwcx. %2, 0, %3\n\t" /* attempt to store */
631 "bne- 1b\n\t" /* spin if failed */
632 "li %0, 1\n\t" /* success - retval = 1 */
633 "b 3f\n\t" /* we've succeeded */
635 "stwcx. %2, 0, %3\n\t" /* clear reservation (74xx) */
636 "li %0, 0\n\t" /* failure - retval = 0 */
638 : "=&r" (ret), "=m" (*p), "+&r" (tmp)
639 : "r" (p), "r" (cmpval), "r" (newval), "m" (*p),
646 #define _atomic_cmpset_masked_word(a,o,v,m) atomic_cmpset_masked(a, o, v, m)
650 atomic_cmpset_int(volatile u_int* p, u_int cmpval, u_int newval)
655 "1:\tlwarx %0, 0, %2\n\t" /* load old value */
656 "cmplw %3, %0\n\t" /* compare */
657 "bne 2f\n\t" /* exit if not equal */
658 "stwcx. %4, 0, %2\n\t" /* attempt to store */
659 "bne- 1b\n\t" /* spin if failed */
660 "li %0, 1\n\t" /* success - retval = 1 */
661 "b 3f\n\t" /* we've succeeded */
663 "stwcx. %0, 0, %2\n\t" /* clear reservation (74xx) */
664 "li %0, 0\n\t" /* failure - retval = 0 */
666 : "=&r" (ret), "=m" (*p)
667 : "r" (p), "r" (cmpval), "r" (newval), "m" (*p)
673 atomic_cmpset_long(volatile u_long* p, u_long cmpval, u_long newval)
679 "1:\tldarx %0, 0, %2\n\t" /* load old value */
680 "cmpld %3, %0\n\t" /* compare */
681 "bne 2f\n\t" /* exit if not equal */
682 "stdcx. %4, 0, %2\n\t" /* attempt to store */
684 "1:\tlwarx %0, 0, %2\n\t" /* load old value */
685 "cmplw %3, %0\n\t" /* compare */
686 "bne 2f\n\t" /* exit if not equal */
687 "stwcx. %4, 0, %2\n\t" /* attempt to store */
689 "bne- 1b\n\t" /* spin if failed */
690 "li %0, 1\n\t" /* success - retval = 1 */
691 "b 3f\n\t" /* we've succeeded */
694 "stdcx. %0, 0, %2\n\t" /* clear reservation (74xx) */
696 "stwcx. %0, 0, %2\n\t" /* clear reservation (74xx) */
698 "li %0, 0\n\t" /* failure - retval = 0 */
700 : "=&r" (ret), "=m" (*p)
701 : "r" (p), "r" (cmpval), "r" (newval), "m" (*p)
707 #define ATOMIC_CMPSET_ACQ_REL(type) \
708 static __inline int \
709 atomic_cmpset_acq_##type(volatile u_##type *p, \
710 u_##type cmpval, u_##type newval)\
713 retval = atomic_cmpset_##type(p, cmpval, newval);\
717 static __inline int \
718 atomic_cmpset_rel_##type(volatile u_##type *p, \
719 u_##type cmpval, u_##type newval)\
722 return (atomic_cmpset_##type(p, cmpval, newval));\
726 ATOMIC_CMPSET_ACQ_REL(int);
727 ATOMIC_CMPSET_ACQ_REL(long);
730 #define atomic_cmpset_8 atomic_cmpset_char
731 #define atomic_cmpset_acq_8 atomic_cmpset_acq_char
732 #define atomic_cmpset_rel_8 atomic_cmpset_rel_char
734 #define atomic_cmpset_16 atomic_cmpset_short
735 #define atomic_cmpset_acq_16 atomic_cmpset_acq_short
736 #define atomic_cmpset_rel_16 atomic_cmpset_rel_short
738 #define atomic_cmpset_32 atomic_cmpset_int
739 #define atomic_cmpset_acq_32 atomic_cmpset_acq_int
740 #define atomic_cmpset_rel_32 atomic_cmpset_rel_int
743 #define atomic_cmpset_64 atomic_cmpset_long
744 #define atomic_cmpset_acq_64 atomic_cmpset_acq_long
745 #define atomic_cmpset_rel_64 atomic_cmpset_rel_long
747 #define atomic_cmpset_ptr atomic_cmpset_long
748 #define atomic_cmpset_acq_ptr atomic_cmpset_acq_long
749 #define atomic_cmpset_rel_ptr atomic_cmpset_rel_long
751 #define atomic_cmpset_ptr atomic_cmpset_int
752 #define atomic_cmpset_acq_ptr atomic_cmpset_acq_int
753 #define atomic_cmpset_rel_ptr atomic_cmpset_rel_int
757 * Atomically compare the value stored at *p with *cmpval and if the
758 * two values are equal, update the value of *p with newval. Returns
759 * zero if the compare failed and sets *cmpval to the read value from *p,
762 #ifdef ISA_206_ATOMICS
764 atomic_fcmpset_char(volatile u_char *p, u_char *cmpval, u_char newval)
769 "lbarx %0, 0, %3\n\t" /* load old value */
770 "cmplw %4, %0\n\t" /* compare */
771 "bne- 1f\n\t" /* exit if not equal */
772 "stbcx. %5, 0, %3\n\t" /* attempt to store */
773 "bne- 1f\n\t" /* exit if failed */
774 "li %0, 1\n\t" /* success - retval = 1 */
775 "b 2f\n\t" /* we've succeeded */
777 "stbcx. %0, 0, %3\n\t" /* clear reservation (74xx) */
779 "li %0, 0\n\t" /* failure - retval = 0 */
781 : "=&r" (ret), "=m" (*p), "=m" (*cmpval)
782 : "r" (p), "r" (*cmpval), "r" (newval), "m" (*p), "r"(cmpval)
789 atomic_fcmpset_short(volatile u_short *p, u_short *cmpval, u_short newval)
794 "lharx %0, 0, %3\n\t" /* load old value */
795 "cmplw %4, %0\n\t" /* compare */
796 "bne- 1f\n\t" /* exit if not equal */
797 "sthcx. %5, 0, %3\n\t" /* attempt to store */
798 "bne- 1f\n\t" /* exit if failed */
799 "li %0, 1\n\t" /* success - retval = 1 */
800 "b 2f\n\t" /* we've succeeded */
802 "sthcx. %0, 0, %3\n\t" /* clear reservation (74xx) */
804 "li %0, 0\n\t" /* failure - retval = 0 */
806 : "=&r" (ret), "=m" (*p), "=m" (*cmpval)
807 : "r" (p), "r" (*cmpval), "r" (newval), "m" (*p), "r"(cmpval)
812 #endif /* ISA_206_ATOMICS */
815 atomic_fcmpset_int(volatile u_int *p, u_int *cmpval, u_int newval)
820 "lwarx %0, 0, %3\n\t" /* load old value */
821 "cmplw %4, %0\n\t" /* compare */
822 "bne 1f\n\t" /* exit if not equal */
823 "stwcx. %5, 0, %3\n\t" /* attempt to store */
824 "bne- 1f\n\t" /* exit if failed */
825 "li %0, 1\n\t" /* success - retval = 1 */
826 "b 2f\n\t" /* we've succeeded */
828 "stwcx. %0, 0, %3\n\t" /* clear reservation (74xx) */
830 "li %0, 0\n\t" /* failure - retval = 0 */
832 : "=&r" (ret), "=m" (*p), "=m" (*cmpval)
833 : "r" (p), "r" (*cmpval), "r" (newval), "m" (*p), "r"(cmpval)
839 atomic_fcmpset_long(volatile u_long *p, u_long *cmpval, u_long newval)
845 "ldarx %0, 0, %3\n\t" /* load old value */
846 "cmpld %4, %0\n\t" /* compare */
847 "bne 1f\n\t" /* exit if not equal */
848 "stdcx. %5, 0, %3\n\t" /* attempt to store */
850 "lwarx %0, 0, %3\n\t" /* load old value */
851 "cmplw %4, %0\n\t" /* compare */
852 "bne 1f\n\t" /* exit if not equal */
853 "stwcx. %5, 0, %3\n\t" /* attempt to store */
855 "bne- 1f\n\t" /* exit if failed */
856 "li %0, 1\n\t" /* success - retval = 1 */
857 "b 2f\n\t" /* we've succeeded */
860 "stdcx. %0, 0, %3\n\t" /* clear reservation (74xx) */
863 "stwcx. %0, 0, %3\n\t" /* clear reservation (74xx) */
866 "li %0, 0\n\t" /* failure - retval = 0 */
868 : "=&r" (ret), "=m" (*p), "=m" (*cmpval)
869 : "r" (p), "r" (*cmpval), "r" (newval), "m" (*p), "r"(cmpval)
875 #define ATOMIC_FCMPSET_ACQ_REL(type) \
876 static __inline int \
877 atomic_fcmpset_acq_##type(volatile u_##type *p, \
878 u_##type *cmpval, u_##type newval)\
881 retval = atomic_fcmpset_##type(p, cmpval, newval);\
885 static __inline int \
886 atomic_fcmpset_rel_##type(volatile u_##type *p, \
887 u_##type *cmpval, u_##type newval)\
890 return (atomic_fcmpset_##type(p, cmpval, newval));\
894 ATOMIC_FCMPSET_ACQ_REL(int);
895 ATOMIC_FCMPSET_ACQ_REL(long);
897 #define atomic_fcmpset_8 atomic_fcmpset_char
898 #define atomic_fcmpset_acq_8 atomic_fcmpset_acq_char
899 #define atomic_fcmpset_rel_8 atomic_fcmpset_rel_char
901 #define atomic_fcmpset_16 atomic_fcmpset_short
902 #define atomic_fcmpset_acq_16 atomic_fcmpset_acq_short
903 #define atomic_fcmpset_rel_16 atomic_fcmpset_rel_short
905 #define atomic_fcmpset_32 atomic_fcmpset_int
906 #define atomic_fcmpset_acq_32 atomic_fcmpset_acq_int
907 #define atomic_fcmpset_rel_32 atomic_fcmpset_rel_int
910 #define atomic_fcmpset_64 atomic_fcmpset_long
911 #define atomic_fcmpset_acq_64 atomic_fcmpset_acq_long
912 #define atomic_fcmpset_rel_64 atomic_fcmpset_rel_long
914 #define atomic_fcmpset_ptr atomic_fcmpset_long
915 #define atomic_fcmpset_acq_ptr atomic_fcmpset_acq_long
916 #define atomic_fcmpset_rel_ptr atomic_fcmpset_rel_long
918 #define atomic_fcmpset_ptr atomic_fcmpset_int
919 #define atomic_fcmpset_acq_ptr atomic_fcmpset_acq_int
920 #define atomic_fcmpset_rel_ptr atomic_fcmpset_rel_int
923 static __inline u_int
924 atomic_fetchadd_int(volatile u_int *p, u_int v)
930 } while (!atomic_cmpset_int(p, value, value + v));
934 static __inline u_long
935 atomic_fetchadd_long(volatile u_long *p, u_long v)
941 } while (!atomic_cmpset_long(p, value, value + v));
945 static __inline u_int
946 atomic_swap_32(volatile u_int *p, u_int v)
954 : "=&r" (prev), "+m" (*(volatile u_int *)p)
962 static __inline u_long
963 atomic_swap_64(volatile u_long *p, u_long v)
971 : "=&r" (prev), "+m" (*(volatile u_long *)p)
979 #define atomic_fetchadd_32 atomic_fetchadd_int
980 #define atomic_swap_int atomic_swap_32
983 #define atomic_fetchadd_64 atomic_fetchadd_long
984 #define atomic_swap_long atomic_swap_64
985 #define atomic_swap_ptr atomic_swap_64
987 #define atomic_swap_long(p,v) atomic_swap_32((volatile u_int *)(p), v)
988 #define atomic_swap_ptr(p,v) atomic_swap_32((volatile u_int *)(p), v)
992 atomic_thread_fence_acq(void)
999 atomic_thread_fence_rel(void)
1005 static __inline void
1006 atomic_thread_fence_acq_rel(void)
1012 static __inline void
1013 atomic_thread_fence_seq_cst(void)
1016 __asm __volatile("sync" : : : "memory");
1019 #ifndef ISA_206_ATOMICS
1020 #include <sys/_atomic_subword.h>
1023 /* These need sys/_atomic_subword.h on non-ISA-2.06-atomic platforms. */
1024 ATOMIC_CMPSET_ACQ_REL(char);
1025 ATOMIC_CMPSET_ACQ_REL(short);
1027 ATOMIC_FCMPSET_ACQ_REL(char);
1028 ATOMIC_FCMPSET_ACQ_REL(short);
1033 #endif /* ! _MACHINE_ATOMIC_H_ */