2 * SPDX-License-Identifier: BSD-2-Clause-FreeBSD
4 * Copyright (c) 2008 Marcel Moolenaar
5 * Copyright (c) 2001 Benno Rice
6 * Copyright (c) 2001 David E. O'Brien
7 * Copyright (c) 1998 Doug Rabson
10 * Redistribution and use in source and binary forms, with or without
11 * modification, are permitted provided that the following conditions
13 * 1. Redistributions of source code must retain the above copyright
14 * notice, this list of conditions and the following disclaimer.
15 * 2. Redistributions in binary form must reproduce the above copyright
16 * notice, this list of conditions and the following disclaimer in the
17 * documentation and/or other materials provided with the distribution.
19 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
20 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
21 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
22 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
23 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
24 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
25 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
26 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
27 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
28 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
34 #ifndef _MACHINE_ATOMIC_H_
35 #define _MACHINE_ATOMIC_H_
38 #error this file needs sys/cdefs.h as a prerequisite
41 #include <sys/atomic_common.h>
44 #include <sys/_atomic64e.h>
48 * The __ATOMIC_REL/ACQ() macros provide memory barriers only in conjunction
49 * with the atomic lXarx/stXcx. sequences below. They are not exposed outside
50 * of this file. See also Appendix B.2 of Book II of the architecture manual.
52 * Note that not all Book-E processors accept the light-weight sync variant.
53 * In particular, early models of E500 cores are known to wedge. Bank on all
54 * 64-bit capable CPUs to accept lwsync properly and pressimize 32-bit CPUs
55 * to use the heavier-weight sync.
59 #define mb() __asm __volatile("sync" : : : "memory")
60 #define rmb() __asm __volatile("lwsync" : : : "memory")
61 #define wmb() __asm __volatile("lwsync" : : : "memory")
62 #define __ATOMIC_REL() __asm __volatile("lwsync" : : : "memory")
63 #define __ATOMIC_ACQ() __asm __volatile("isync" : : : "memory")
65 #define mb() __asm __volatile("sync" : : : "memory")
66 #define rmb() __asm __volatile("sync" : : : "memory")
67 #define wmb() __asm __volatile("sync" : : : "memory")
68 #define __ATOMIC_REL() __asm __volatile("sync" : : : "memory")
69 #define __ATOMIC_ACQ() __asm __volatile("isync" : : : "memory")
77 __asm __volatile("lwsync" : : : "memory");
79 __asm __volatile("sync" : : : "memory");
88 #define __atomic_add_int(p, v, t) \
90 "1: lwarx %0, 0, %2\n" \
92 " stwcx. %0, 0, %2\n" \
94 : "=&r" (t), "=m" (*p) \
95 : "r" (p), "r" (v), "m" (*p) \
97 /* __atomic_add_int */
100 #define __atomic_add_long(p, v, t) \
102 "1: ldarx %0, 0, %2\n" \
103 " add %0, %3, %0\n" \
104 " stdcx. %0, 0, %2\n" \
106 : "=&r" (t), "=m" (*p) \
107 : "r" (p), "r" (v), "m" (*p) \
109 /* __atomic_add_long */
111 #define __atomic_add_long(p, v, t) \
113 "1: lwarx %0, 0, %2\n" \
114 " add %0, %3, %0\n" \
115 " stwcx. %0, 0, %2\n" \
117 : "=&r" (t), "=m" (*p) \
118 : "r" (p), "r" (v), "m" (*p) \
120 /* __atomic_add_long */
123 #define _ATOMIC_ADD(type) \
124 static __inline void \
125 atomic_add_##type(volatile u_##type *p, u_##type v) { \
127 __atomic_add_##type(p, v, t); \
130 static __inline void \
131 atomic_add_acq_##type(volatile u_##type *p, u_##type v) { \
133 __atomic_add_##type(p, v, t); \
137 static __inline void \
138 atomic_add_rel_##type(volatile u_##type *p, u_##type v) { \
141 __atomic_add_##type(p, v, t); \
148 #define atomic_add_32 atomic_add_int
149 #define atomic_add_acq_32 atomic_add_acq_int
150 #define atomic_add_rel_32 atomic_add_rel_int
153 #define atomic_add_64 atomic_add_long
154 #define atomic_add_acq_64 atomic_add_acq_long
155 #define atomic_add_rel_64 atomic_add_rel_long
157 #define atomic_add_ptr atomic_add_long
158 #define atomic_add_acq_ptr atomic_add_acq_long
159 #define atomic_add_rel_ptr atomic_add_rel_long
161 #define atomic_add_ptr atomic_add_int
162 #define atomic_add_acq_ptr atomic_add_acq_int
163 #define atomic_add_rel_ptr atomic_add_rel_int
166 #undef __atomic_add_long
167 #undef __atomic_add_int
174 #define __atomic_clear_int(p, v, t) \
176 "1: lwarx %0, 0, %2\n" \
177 " andc %0, %0, %3\n" \
178 " stwcx. %0, 0, %2\n" \
180 : "=&r" (t), "=m" (*p) \
181 : "r" (p), "r" (v), "m" (*p) \
183 /* __atomic_clear_int */
186 #define __atomic_clear_long(p, v, t) \
188 "1: ldarx %0, 0, %2\n" \
189 " andc %0, %0, %3\n" \
190 " stdcx. %0, 0, %2\n" \
192 : "=&r" (t), "=m" (*p) \
193 : "r" (p), "r" (v), "m" (*p) \
195 /* __atomic_clear_long */
197 #define __atomic_clear_long(p, v, t) \
199 "1: lwarx %0, 0, %2\n" \
200 " andc %0, %0, %3\n" \
201 " stwcx. %0, 0, %2\n" \
203 : "=&r" (t), "=m" (*p) \
204 : "r" (p), "r" (v), "m" (*p) \
206 /* __atomic_clear_long */
209 #define _ATOMIC_CLEAR(type) \
210 static __inline void \
211 atomic_clear_##type(volatile u_##type *p, u_##type v) { \
213 __atomic_clear_##type(p, v, t); \
216 static __inline void \
217 atomic_clear_acq_##type(volatile u_##type *p, u_##type v) { \
219 __atomic_clear_##type(p, v, t); \
223 static __inline void \
224 atomic_clear_rel_##type(volatile u_##type *p, u_##type v) { \
227 __atomic_clear_##type(p, v, t); \
234 #define atomic_clear_32 atomic_clear_int
235 #define atomic_clear_acq_32 atomic_clear_acq_int
236 #define atomic_clear_rel_32 atomic_clear_rel_int
239 #define atomic_clear_64 atomic_clear_long
240 #define atomic_clear_acq_64 atomic_clear_acq_long
241 #define atomic_clear_rel_64 atomic_clear_rel_long
243 #define atomic_clear_ptr atomic_clear_long
244 #define atomic_clear_acq_ptr atomic_clear_acq_long
245 #define atomic_clear_rel_ptr atomic_clear_rel_long
247 #define atomic_clear_ptr atomic_clear_int
248 #define atomic_clear_acq_ptr atomic_clear_acq_int
249 #define atomic_clear_rel_ptr atomic_clear_rel_int
252 #undef __atomic_clear_long
253 #undef __atomic_clear_int
256 * atomic_cmpset(p, o, n)
258 /* TODO -- see below */
263 /* TODO -- see below */
266 * atomic_readandclear(p)
268 /* TODO -- see below */
275 #define __atomic_set_int(p, v, t) \
277 "1: lwarx %0, 0, %2\n" \
279 " stwcx. %0, 0, %2\n" \
281 : "=&r" (t), "=m" (*p) \
282 : "r" (p), "r" (v), "m" (*p) \
284 /* __atomic_set_int */
287 #define __atomic_set_long(p, v, t) \
289 "1: ldarx %0, 0, %2\n" \
291 " stdcx. %0, 0, %2\n" \
293 : "=&r" (t), "=m" (*p) \
294 : "r" (p), "r" (v), "m" (*p) \
296 /* __atomic_set_long */
298 #define __atomic_set_long(p, v, t) \
300 "1: lwarx %0, 0, %2\n" \
302 " stwcx. %0, 0, %2\n" \
304 : "=&r" (t), "=m" (*p) \
305 : "r" (p), "r" (v), "m" (*p) \
307 /* __atomic_set_long */
310 #define _ATOMIC_SET(type) \
311 static __inline void \
312 atomic_set_##type(volatile u_##type *p, u_##type v) { \
314 __atomic_set_##type(p, v, t); \
317 static __inline void \
318 atomic_set_acq_##type(volatile u_##type *p, u_##type v) { \
320 __atomic_set_##type(p, v, t); \
324 static __inline void \
325 atomic_set_rel_##type(volatile u_##type *p, u_##type v) { \
328 __atomic_set_##type(p, v, t); \
335 #define atomic_set_32 atomic_set_int
336 #define atomic_set_acq_32 atomic_set_acq_int
337 #define atomic_set_rel_32 atomic_set_rel_int
340 #define atomic_set_64 atomic_set_long
341 #define atomic_set_acq_64 atomic_set_acq_long
342 #define atomic_set_rel_64 atomic_set_rel_long
344 #define atomic_set_ptr atomic_set_long
345 #define atomic_set_acq_ptr atomic_set_acq_long
346 #define atomic_set_rel_ptr atomic_set_rel_long
348 #define atomic_set_ptr atomic_set_int
349 #define atomic_set_acq_ptr atomic_set_acq_int
350 #define atomic_set_rel_ptr atomic_set_rel_int
353 #undef __atomic_set_long
354 #undef __atomic_set_int
357 * atomic_subtract(p, v)
361 #define __atomic_subtract_int(p, v, t) \
363 "1: lwarx %0, 0, %2\n" \
364 " subf %0, %3, %0\n" \
365 " stwcx. %0, 0, %2\n" \
367 : "=&r" (t), "=m" (*p) \
368 : "r" (p), "r" (v), "m" (*p) \
370 /* __atomic_subtract_int */
373 #define __atomic_subtract_long(p, v, t) \
375 "1: ldarx %0, 0, %2\n" \
376 " subf %0, %3, %0\n" \
377 " stdcx. %0, 0, %2\n" \
379 : "=&r" (t), "=m" (*p) \
380 : "r" (p), "r" (v), "m" (*p) \
382 /* __atomic_subtract_long */
384 #define __atomic_subtract_long(p, v, t) \
386 "1: lwarx %0, 0, %2\n" \
387 " subf %0, %3, %0\n" \
388 " stwcx. %0, 0, %2\n" \
390 : "=&r" (t), "=m" (*p) \
391 : "r" (p), "r" (v), "m" (*p) \
393 /* __atomic_subtract_long */
396 #define _ATOMIC_SUBTRACT(type) \
397 static __inline void \
398 atomic_subtract_##type(volatile u_##type *p, u_##type v) { \
400 __atomic_subtract_##type(p, v, t); \
403 static __inline void \
404 atomic_subtract_acq_##type(volatile u_##type *p, u_##type v) { \
406 __atomic_subtract_##type(p, v, t); \
410 static __inline void \
411 atomic_subtract_rel_##type(volatile u_##type *p, u_##type v) { \
414 __atomic_subtract_##type(p, v, t); \
416 /* _ATOMIC_SUBTRACT */
418 _ATOMIC_SUBTRACT(int)
419 _ATOMIC_SUBTRACT(long)
421 #define atomic_subtract_32 atomic_subtract_int
422 #define atomic_subtract_acq_32 atomic_subtract_acq_int
423 #define atomic_subtract_rel_32 atomic_subtract_rel_int
426 #define atomic_subtract_64 atomic_subtract_long
427 #define atomic_subtract_acq_64 atomic_subract_acq_long
428 #define atomic_subtract_rel_64 atomic_subtract_rel_long
430 #define atomic_subtract_ptr atomic_subtract_long
431 #define atomic_subtract_acq_ptr atomic_subtract_acq_long
432 #define atomic_subtract_rel_ptr atomic_subtract_rel_long
434 #define atomic_subtract_ptr atomic_subtract_int
435 #define atomic_subtract_acq_ptr atomic_subtract_acq_int
436 #define atomic_subtract_rel_ptr atomic_subtract_rel_int
438 #undef _ATOMIC_SUBTRACT
439 #undef __atomic_subtract_long
440 #undef __atomic_subtract_int
443 * atomic_store_rel(p, v)
445 /* TODO -- see below */
448 * Old/original implementations that still need revisiting.
451 static __inline u_int
452 atomic_readandclear_int(volatile u_int *addr)
457 "\tsync\n" /* drain writes */
458 "1:\tlwarx %0, 0, %3\n\t" /* load old value */
459 "li %1, 0\n\t" /* load new value */
460 "stwcx. %1, 0, %3\n\t" /* attempt to store */
461 "bne- 1b\n\t" /* spin if failed */
462 : "=&r"(result), "=&r"(temp), "=m" (*addr)
463 : "r" (addr), "m" (*addr)
470 static __inline u_long
471 atomic_readandclear_long(volatile u_long *addr)
476 "\tsync\n" /* drain writes */
477 "1:\tldarx %0, 0, %3\n\t" /* load old value */
478 "li %1, 0\n\t" /* load new value */
479 "stdcx. %1, 0, %3\n\t" /* attempt to store */
480 "bne- 1b\n\t" /* spin if failed */
481 : "=&r"(result), "=&r"(temp), "=m" (*addr)
482 : "r" (addr), "m" (*addr)
489 #define atomic_readandclear_32 atomic_readandclear_int
492 #define atomic_readandclear_64 atomic_readandclear_long
494 #define atomic_readandclear_ptr atomic_readandclear_long
496 static __inline u_long
497 atomic_readandclear_long(volatile u_long *addr)
500 return ((u_long)atomic_readandclear_int((volatile u_int *)addr));
503 #define atomic_readandclear_ptr atomic_readandclear_int
507 * We assume that a = b will do atomic loads and stores.
509 #define ATOMIC_STORE_LOAD(TYPE) \
510 static __inline u_##TYPE \
511 atomic_load_acq_##TYPE(volatile u_##TYPE *p) \
520 static __inline void \
521 atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v) \
528 ATOMIC_STORE_LOAD(int)
530 #define atomic_load_acq_32 atomic_load_acq_int
531 #define atomic_store_rel_32 atomic_store_rel_int
534 ATOMIC_STORE_LOAD(long)
536 #define atomic_load_acq_64 atomic_load_acq_long
537 #define atomic_store_rel_64 atomic_store_rel_long
539 #define atomic_load_acq_ptr atomic_load_acq_long
540 #define atomic_store_rel_ptr atomic_store_rel_long
542 static __inline u_long
543 atomic_load_acq_long(volatile u_long *addr)
546 return ((u_long)atomic_load_acq_int((volatile u_int *)addr));
550 atomic_store_rel_long(volatile u_long *addr, u_long val)
553 atomic_store_rel_int((volatile u_int *)addr, (u_int)val);
556 #define atomic_load_acq_ptr atomic_load_acq_int
557 #define atomic_store_rel_ptr atomic_store_rel_int
559 #undef ATOMIC_STORE_LOAD
562 * Atomically compare the value stored at *p with cmpval and if the
563 * two values are equal, update the value of *p with newval. Returns
564 * zero if the compare failed, nonzero otherwise.
566 #ifdef ISA_206_ATOMICS
568 atomic_cmpset_char(volatile u_char *p, u_char cmpval, u_char newval)
573 "1:\tlbarx %0, 0, %2\n\t" /* load old value */
574 "cmplw %3, %0\n\t" /* compare */
575 "bne- 2f\n\t" /* exit if not equal */
576 "stbcx. %4, 0, %2\n\t" /* attempt to store */
577 "bne- 1b\n\t" /* spin if failed */
578 "li %0, 1\n\t" /* success - retval = 1 */
579 "b 3f\n\t" /* we've succeeded */
581 "stbcx. %0, 0, %2\n\t" /* clear reservation (74xx) */
582 "li %0, 0\n\t" /* failure - retval = 0 */
584 : "=&r" (ret), "=m" (*p)
585 : "r" (p), "r" (cmpval), "r" (newval), "m" (*p)
592 atomic_cmpset_short(volatile u_short *p, u_short cmpval, u_short newval)
597 "1:\tlharx %0, 0, %2\n\t" /* load old value */
598 "cmplw %3, %0\n\t" /* compare */
599 "bne- 2f\n\t" /* exit if not equal */
600 "sthcx. %4, 0, %2\n\t" /* attempt to store */
601 "bne- 1b\n\t" /* spin if failed */
602 "li %0, 1\n\t" /* success - retval = 1 */
603 "b 3f\n\t" /* we've succeeded */
605 "sthcx. %0, 0, %2\n\t" /* clear reservation (74xx) */
606 "li %0, 0\n\t" /* failure - retval = 0 */
608 : "=&r" (ret), "=m" (*p)
609 : "r" (p), "r" (cmpval), "r" (newval), "m" (*p)
616 atomic_cmpset_masked(uint32_t *p, uint32_t cmpval, uint32_t newval,
623 "1:\tlwarx %2, 0, %3\n\t" /* load old value */
625 "cmplw %4, %0\n\t" /* compare */
626 "bne- 2f\n\t" /* exit if not equal */
627 "andc %2, %2, %7\n\t"
629 "stwcx. %2, 0, %3\n\t" /* attempt to store */
630 "bne- 1b\n\t" /* spin if failed */
631 "li %0, 1\n\t" /* success - retval = 1 */
632 "b 3f\n\t" /* we've succeeded */
634 "stwcx. %2, 0, %3\n\t" /* clear reservation (74xx) */
635 "li %0, 0\n\t" /* failure - retval = 0 */
637 : "=&r" (ret), "=m" (*p), "+&r" (tmp)
638 : "r" (p), "r" (cmpval), "r" (newval), "m" (*p),
645 #define _atomic_cmpset_masked_word(a,o,v,m) atomic_cmpset_masked(a, o, v, m)
649 atomic_cmpset_int(volatile u_int* p, u_int cmpval, u_int newval)
654 "1:\tlwarx %0, 0, %2\n\t" /* load old value */
655 "cmplw %3, %0\n\t" /* compare */
656 "bne- 2f\n\t" /* exit if not equal */
657 "stwcx. %4, 0, %2\n\t" /* attempt to store */
658 "bne- 1b\n\t" /* spin if failed */
659 "li %0, 1\n\t" /* success - retval = 1 */
660 "b 3f\n\t" /* we've succeeded */
662 "stwcx. %0, 0, %2\n\t" /* clear reservation (74xx) */
663 "li %0, 0\n\t" /* failure - retval = 0 */
665 : "=&r" (ret), "=m" (*p)
666 : "r" (p), "r" (cmpval), "r" (newval), "m" (*p)
672 atomic_cmpset_long(volatile u_long* p, u_long cmpval, u_long newval)
678 "1:\tldarx %0, 0, %2\n\t" /* load old value */
679 "cmpld %3, %0\n\t" /* compare */
680 "bne- 2f\n\t" /* exit if not equal */
681 "stdcx. %4, 0, %2\n\t" /* attempt to store */
683 "1:\tlwarx %0, 0, %2\n\t" /* load old value */
684 "cmplw %3, %0\n\t" /* compare */
685 "bne- 2f\n\t" /* exit if not equal */
686 "stwcx. %4, 0, %2\n\t" /* attempt to store */
688 "bne- 1b\n\t" /* spin if failed */
689 "li %0, 1\n\t" /* success - retval = 1 */
690 "b 3f\n\t" /* we've succeeded */
693 "stdcx. %0, 0, %2\n\t" /* clear reservation (74xx) */
695 "stwcx. %0, 0, %2\n\t" /* clear reservation (74xx) */
697 "li %0, 0\n\t" /* failure - retval = 0 */
699 : "=&r" (ret), "=m" (*p)
700 : "r" (p), "r" (cmpval), "r" (newval), "m" (*p)
706 #define ATOMIC_CMPSET_ACQ_REL(type) \
707 static __inline int \
708 atomic_cmpset_acq_##type(volatile u_##type *p, \
709 u_##type cmpval, u_##type newval)\
712 retval = atomic_cmpset_##type(p, cmpval, newval);\
716 static __inline int \
717 atomic_cmpset_rel_##type(volatile u_##type *p, \
718 u_##type cmpval, u_##type newval)\
721 return (atomic_cmpset_##type(p, cmpval, newval));\
725 ATOMIC_CMPSET_ACQ_REL(int);
726 ATOMIC_CMPSET_ACQ_REL(long);
728 #ifdef ISA_206_ATOMICS
729 #define atomic_cmpset_8 atomic_cmpset_char
731 #define atomic_cmpset_acq_8 atomic_cmpset_acq_char
732 #define atomic_cmpset_rel_8 atomic_cmpset_rel_char
734 #ifdef ISA_206_ATOMICS
735 #define atomic_cmpset_16 atomic_cmpset_short
737 #define atomic_cmpset_acq_16 atomic_cmpset_acq_short
738 #define atomic_cmpset_rel_16 atomic_cmpset_rel_short
740 #define atomic_cmpset_32 atomic_cmpset_int
741 #define atomic_cmpset_acq_32 atomic_cmpset_acq_int
742 #define atomic_cmpset_rel_32 atomic_cmpset_rel_int
745 #define atomic_cmpset_64 atomic_cmpset_long
746 #define atomic_cmpset_acq_64 atomic_cmpset_acq_long
747 #define atomic_cmpset_rel_64 atomic_cmpset_rel_long
749 #define atomic_cmpset_ptr atomic_cmpset_long
750 #define atomic_cmpset_acq_ptr atomic_cmpset_acq_long
751 #define atomic_cmpset_rel_ptr atomic_cmpset_rel_long
753 #define atomic_cmpset_ptr atomic_cmpset_int
754 #define atomic_cmpset_acq_ptr atomic_cmpset_acq_int
755 #define atomic_cmpset_rel_ptr atomic_cmpset_rel_int
759 * Atomically compare the value stored at *p with *cmpval and if the
760 * two values are equal, update the value of *p with newval. Returns
761 * zero if the compare failed and sets *cmpval to the read value from *p,
764 #ifdef ISA_206_ATOMICS
766 atomic_fcmpset_char(volatile u_char *p, u_char *cmpval, u_char newval)
771 "lbarx %0, 0, %3\n\t" /* load old value */
772 "cmplw %4, %0\n\t" /* compare */
773 "bne- 1f\n\t" /* exit if not equal */
774 "stbcx. %5, 0, %3\n\t" /* attempt to store */
775 "bne- 1f\n\t" /* exit if failed */
776 "li %0, 1\n\t" /* success - retval = 1 */
777 "b 2f\n\t" /* we've succeeded */
779 "stbcx. %0, 0, %3\n\t" /* clear reservation (74xx) */
781 "li %0, 0\n\t" /* failure - retval = 0 */
783 : "=&r" (ret), "=m" (*p), "=m" (*cmpval)
784 : "r" (p), "r" (*cmpval), "r" (newval), "m" (*p), "r"(cmpval)
791 atomic_fcmpset_short(volatile u_short *p, u_short *cmpval, u_short newval)
796 "lharx %0, 0, %3\n\t" /* load old value */
797 "cmplw %4, %0\n\t" /* compare */
798 "bne- 1f\n\t" /* exit if not equal */
799 "sthcx. %5, 0, %3\n\t" /* attempt to store */
800 "bne- 1f\n\t" /* exit if failed */
801 "li %0, 1\n\t" /* success - retval = 1 */
802 "b 2f\n\t" /* we've succeeded */
804 "sthcx. %0, 0, %3\n\t" /* clear reservation (74xx) */
806 "li %0, 0\n\t" /* failure - retval = 0 */
808 : "=&r" (ret), "=m" (*p), "=m" (*cmpval)
809 : "r" (p), "r" (*cmpval), "r" (newval), "m" (*p), "r"(cmpval)
814 #endif /* ISA_206_ATOMICS */
817 atomic_fcmpset_int(volatile u_int *p, u_int *cmpval, u_int newval)
822 "lwarx %0, 0, %3\n\t" /* load old value */
823 "cmplw %4, %0\n\t" /* compare */
824 "bne- 1f\n\t" /* exit if not equal */
825 "stwcx. %5, 0, %3\n\t" /* attempt to store */
826 "bne- 1f\n\t" /* exit if failed */
827 "li %0, 1\n\t" /* success - retval = 1 */
828 "b 2f\n\t" /* we've succeeded */
830 "stwcx. %0, 0, %3\n\t" /* clear reservation (74xx) */
832 "li %0, 0\n\t" /* failure - retval = 0 */
834 : "=&r" (ret), "=m" (*p), "=m" (*cmpval)
835 : "r" (p), "r" (*cmpval), "r" (newval), "m" (*p), "r"(cmpval)
841 atomic_fcmpset_long(volatile u_long *p, u_long *cmpval, u_long newval)
847 "ldarx %0, 0, %3\n\t" /* load old value */
848 "cmpld %4, %0\n\t" /* compare */
849 "bne- 1f\n\t" /* exit if not equal */
850 "stdcx. %5, 0, %3\n\t" /* attempt to store */
852 "lwarx %0, 0, %3\n\t" /* load old value */
853 "cmplw %4, %0\n\t" /* compare */
854 "bne- 1f\n\t" /* exit if not equal */
855 "stwcx. %5, 0, %3\n\t" /* attempt to store */
857 "bne- 1f\n\t" /* exit if failed */
858 "li %0, 1\n\t" /* success - retval = 1 */
859 "b 2f\n\t" /* we've succeeded */
862 "stdcx. %0, 0, %3\n\t" /* clear reservation (74xx) */
865 "stwcx. %0, 0, %3\n\t" /* clear reservation (74xx) */
868 "li %0, 0\n\t" /* failure - retval = 0 */
870 : "=&r" (ret), "=m" (*p), "=m" (*cmpval)
871 : "r" (p), "r" (*cmpval), "r" (newval), "m" (*p), "r"(cmpval)
877 #define ATOMIC_FCMPSET_ACQ_REL(type) \
878 static __inline int \
879 atomic_fcmpset_acq_##type(volatile u_##type *p, \
880 u_##type *cmpval, u_##type newval)\
883 retval = atomic_fcmpset_##type(p, cmpval, newval);\
887 static __inline int \
888 atomic_fcmpset_rel_##type(volatile u_##type *p, \
889 u_##type *cmpval, u_##type newval)\
892 return (atomic_fcmpset_##type(p, cmpval, newval));\
896 ATOMIC_FCMPSET_ACQ_REL(int);
897 ATOMIC_FCMPSET_ACQ_REL(long);
899 #ifdef ISA_206_ATOMICS
900 #define atomic_fcmpset_8 atomic_fcmpset_char
902 #define atomic_fcmpset_acq_8 atomic_fcmpset_acq_char
903 #define atomic_fcmpset_rel_8 atomic_fcmpset_rel_char
905 #ifdef ISA_206_ATOMICS
906 #define atomic_fcmpset_16 atomic_fcmpset_short
908 #define atomic_fcmpset_acq_16 atomic_fcmpset_acq_short
909 #define atomic_fcmpset_rel_16 atomic_fcmpset_rel_short
911 #define atomic_fcmpset_32 atomic_fcmpset_int
912 #define atomic_fcmpset_acq_32 atomic_fcmpset_acq_int
913 #define atomic_fcmpset_rel_32 atomic_fcmpset_rel_int
916 #define atomic_fcmpset_64 atomic_fcmpset_long
917 #define atomic_fcmpset_acq_64 atomic_fcmpset_acq_long
918 #define atomic_fcmpset_rel_64 atomic_fcmpset_rel_long
920 #define atomic_fcmpset_ptr atomic_fcmpset_long
921 #define atomic_fcmpset_acq_ptr atomic_fcmpset_acq_long
922 #define atomic_fcmpset_rel_ptr atomic_fcmpset_rel_long
924 #define atomic_fcmpset_ptr atomic_fcmpset_int
925 #define atomic_fcmpset_acq_ptr atomic_fcmpset_acq_int
926 #define atomic_fcmpset_rel_ptr atomic_fcmpset_rel_int
929 static __inline u_int
930 atomic_fetchadd_int(volatile u_int *p, u_int v)
936 } while (!atomic_cmpset_int(p, value, value + v));
940 static __inline u_long
941 atomic_fetchadd_long(volatile u_long *p, u_long v)
947 } while (!atomic_cmpset_long(p, value, value + v));
951 static __inline u_int
952 atomic_swap_32(volatile u_int *p, u_int v)
960 : "=&r" (prev), "+m" (*(volatile u_int *)p)
968 static __inline u_long
969 atomic_swap_64(volatile u_long *p, u_long v)
977 : "=&r" (prev), "+m" (*(volatile u_long *)p)
985 #define atomic_fetchadd_32 atomic_fetchadd_int
986 #define atomic_swap_int atomic_swap_32
989 #define atomic_fetchadd_64 atomic_fetchadd_long
990 #define atomic_swap_long atomic_swap_64
991 #define atomic_swap_ptr atomic_swap_64
993 #define atomic_swap_long(p,v) atomic_swap_32((volatile u_int *)(p), v)
994 #define atomic_swap_ptr(p,v) atomic_swap_32((volatile u_int *)(p), v)
998 atomic_thread_fence_acq(void)
1004 static __inline void
1005 atomic_thread_fence_rel(void)
1011 static __inline void
1012 atomic_thread_fence_acq_rel(void)
1018 static __inline void
1019 atomic_thread_fence_seq_cst(void)
1022 __asm __volatile("sync" : : : "memory");
1025 #ifndef ISA_206_ATOMICS
1026 #include <sys/_atomic_subword.h>
1027 #define atomic_cmpset_char atomic_cmpset_8
1028 #define atomic_cmpset_short atomic_cmpset_16
1029 #define atomic_fcmpset_char atomic_fcmpset_8
1030 #define atomic_fcmpset_short atomic_fcmpset_16
1033 /* These need sys/_atomic_subword.h on non-ISA-2.06-atomic platforms. */
1034 ATOMIC_CMPSET_ACQ_REL(char);
1035 ATOMIC_CMPSET_ACQ_REL(short);
1037 ATOMIC_FCMPSET_ACQ_REL(char);
1038 ATOMIC_FCMPSET_ACQ_REL(short);
1043 #endif /* ! _MACHINE_ATOMIC_H_ */