2 * SPDX-License-Identifier: BSD-2-Clause-FreeBSD
4 * Copyright (c) 2008 Marcel Moolenaar
5 * Copyright (c) 2001 Benno Rice
6 * Copyright (c) 2001 David E. O'Brien
7 * Copyright (c) 1998 Doug Rabson
10 * Redistribution and use in source and binary forms, with or without
11 * modification, are permitted provided that the following conditions
13 * 1. Redistributions of source code must retain the above copyright
14 * notice, this list of conditions and the following disclaimer.
15 * 2. Redistributions in binary form must reproduce the above copyright
16 * notice, this list of conditions and the following disclaimer in the
17 * documentation and/or other materials provided with the distribution.
19 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
20 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
21 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
22 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
23 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
24 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
25 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
26 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
27 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
28 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
34 #ifndef _MACHINE_ATOMIC_H_
35 #define _MACHINE_ATOMIC_H_
38 #error this file needs sys/cdefs.h as a prerequisite
41 #include <sys/atomic_common.h>
44 * The __ATOMIC_REL/ACQ() macros provide memory barriers only in conjunction
45 * with the atomic lXarx/stXcx. sequences below. They are not exposed outside
46 * of this file. See also Appendix B.2 of Book II of the architecture manual.
48 * Note that not all Book-E processors accept the light-weight sync variant.
49 * In particular, early models of E500 cores are known to wedge. Bank on all
50 * 64-bit capable CPUs to accept lwsync properly and pressimize 32-bit CPUs
51 * to use the heavier-weight sync.
55 #define mb() __asm __volatile("sync" : : : "memory")
56 #define rmb() __asm __volatile("lwsync" : : : "memory")
57 #define wmb() __asm __volatile("lwsync" : : : "memory")
58 #define __ATOMIC_REL() __asm __volatile("lwsync" : : : "memory")
59 #define __ATOMIC_ACQ() __asm __volatile("isync" : : : "memory")
61 #define mb() __asm __volatile("sync" : : : "memory")
62 #define rmb() __asm __volatile("sync" : : : "memory")
63 #define wmb() __asm __volatile("sync" : : : "memory")
64 #define __ATOMIC_REL() __asm __volatile("sync" : : : "memory")
65 #define __ATOMIC_ACQ() __asm __volatile("isync" : : : "memory")
73 __asm __volatile("lwsync" : : : "memory");
75 __asm __volatile("sync" : : : "memory");
84 #define __atomic_add_int(p, v, t) \
86 "1: lwarx %0, 0, %2\n" \
88 " stwcx. %0, 0, %2\n" \
90 : "=&r" (t), "=m" (*p) \
91 : "r" (p), "r" (v), "m" (*p) \
93 /* __atomic_add_int */
96 #define __atomic_add_long(p, v, t) \
98 "1: ldarx %0, 0, %2\n" \
100 " stdcx. %0, 0, %2\n" \
102 : "=&r" (t), "=m" (*p) \
103 : "r" (p), "r" (v), "m" (*p) \
105 /* __atomic_add_long */
107 #define __atomic_add_long(p, v, t) \
109 "1: lwarx %0, 0, %2\n" \
110 " add %0, %3, %0\n" \
111 " stwcx. %0, 0, %2\n" \
113 : "=&r" (t), "=m" (*p) \
114 : "r" (p), "r" (v), "m" (*p) \
116 /* __atomic_add_long */
119 #define _ATOMIC_ADD(type) \
120 static __inline void \
121 atomic_add_##type(volatile u_##type *p, u_##type v) { \
123 __atomic_add_##type(p, v, t); \
126 static __inline void \
127 atomic_add_acq_##type(volatile u_##type *p, u_##type v) { \
129 __atomic_add_##type(p, v, t); \
133 static __inline void \
134 atomic_add_rel_##type(volatile u_##type *p, u_##type v) { \
137 __atomic_add_##type(p, v, t); \
144 #define atomic_add_32 atomic_add_int
145 #define atomic_add_acq_32 atomic_add_acq_int
146 #define atomic_add_rel_32 atomic_add_rel_int
149 #define atomic_add_64 atomic_add_long
150 #define atomic_add_acq_64 atomic_add_acq_long
151 #define atomic_add_rel_64 atomic_add_rel_long
153 #define atomic_add_ptr atomic_add_long
154 #define atomic_add_acq_ptr atomic_add_acq_long
155 #define atomic_add_rel_ptr atomic_add_rel_long
157 #define atomic_add_ptr atomic_add_int
158 #define atomic_add_acq_ptr atomic_add_acq_int
159 #define atomic_add_rel_ptr atomic_add_rel_int
162 #undef __atomic_add_long
163 #undef __atomic_add_int
170 #define __atomic_clear_int(p, v, t) \
172 "1: lwarx %0, 0, %2\n" \
173 " andc %0, %0, %3\n" \
174 " stwcx. %0, 0, %2\n" \
176 : "=&r" (t), "=m" (*p) \
177 : "r" (p), "r" (v), "m" (*p) \
179 /* __atomic_clear_int */
182 #define __atomic_clear_long(p, v, t) \
184 "1: ldarx %0, 0, %2\n" \
185 " andc %0, %0, %3\n" \
186 " stdcx. %0, 0, %2\n" \
188 : "=&r" (t), "=m" (*p) \
189 : "r" (p), "r" (v), "m" (*p) \
191 /* __atomic_clear_long */
193 #define __atomic_clear_long(p, v, t) \
195 "1: lwarx %0, 0, %2\n" \
196 " andc %0, %0, %3\n" \
197 " stwcx. %0, 0, %2\n" \
199 : "=&r" (t), "=m" (*p) \
200 : "r" (p), "r" (v), "m" (*p) \
202 /* __atomic_clear_long */
205 #define _ATOMIC_CLEAR(type) \
206 static __inline void \
207 atomic_clear_##type(volatile u_##type *p, u_##type v) { \
209 __atomic_clear_##type(p, v, t); \
212 static __inline void \
213 atomic_clear_acq_##type(volatile u_##type *p, u_##type v) { \
215 __atomic_clear_##type(p, v, t); \
219 static __inline void \
220 atomic_clear_rel_##type(volatile u_##type *p, u_##type v) { \
223 __atomic_clear_##type(p, v, t); \
231 #define atomic_clear_32 atomic_clear_int
232 #define atomic_clear_acq_32 atomic_clear_acq_int
233 #define atomic_clear_rel_32 atomic_clear_rel_int
236 #define atomic_clear_64 atomic_clear_long
237 #define atomic_clear_acq_64 atomic_clear_acq_long
238 #define atomic_clear_rel_64 atomic_clear_rel_long
240 #define atomic_clear_ptr atomic_clear_long
241 #define atomic_clear_acq_ptr atomic_clear_acq_long
242 #define atomic_clear_rel_ptr atomic_clear_rel_long
244 #define atomic_clear_ptr atomic_clear_int
245 #define atomic_clear_acq_ptr atomic_clear_acq_int
246 #define atomic_clear_rel_ptr atomic_clear_rel_int
249 #undef __atomic_clear_long
250 #undef __atomic_clear_int
253 * atomic_cmpset(p, o, n)
255 /* TODO -- see below */
260 /* TODO -- see below */
263 * atomic_readandclear(p)
265 /* TODO -- see below */
272 #define __atomic_set_int(p, v, t) \
274 "1: lwarx %0, 0, %2\n" \
276 " stwcx. %0, 0, %2\n" \
278 : "=&r" (t), "=m" (*p) \
279 : "r" (p), "r" (v), "m" (*p) \
281 /* __atomic_set_int */
284 #define __atomic_set_long(p, v, t) \
286 "1: ldarx %0, 0, %2\n" \
288 " stdcx. %0, 0, %2\n" \
290 : "=&r" (t), "=m" (*p) \
291 : "r" (p), "r" (v), "m" (*p) \
293 /* __atomic_set_long */
295 #define __atomic_set_long(p, v, t) \
297 "1: lwarx %0, 0, %2\n" \
299 " stwcx. %0, 0, %2\n" \
301 : "=&r" (t), "=m" (*p) \
302 : "r" (p), "r" (v), "m" (*p) \
304 /* __atomic_set_long */
307 #define _ATOMIC_SET(type) \
308 static __inline void \
309 atomic_set_##type(volatile u_##type *p, u_##type v) { \
311 __atomic_set_##type(p, v, t); \
314 static __inline void \
315 atomic_set_acq_##type(volatile u_##type *p, u_##type v) { \
317 __atomic_set_##type(p, v, t); \
321 static __inline void \
322 atomic_set_rel_##type(volatile u_##type *p, u_##type v) { \
325 __atomic_set_##type(p, v, t); \
332 #define atomic_set_32 atomic_set_int
333 #define atomic_set_acq_32 atomic_set_acq_int
334 #define atomic_set_rel_32 atomic_set_rel_int
337 #define atomic_set_64 atomic_set_long
338 #define atomic_set_acq_64 atomic_set_acq_long
339 #define atomic_set_rel_64 atomic_set_rel_long
341 #define atomic_set_ptr atomic_set_long
342 #define atomic_set_acq_ptr atomic_set_acq_long
343 #define atomic_set_rel_ptr atomic_set_rel_long
345 #define atomic_set_ptr atomic_set_int
346 #define atomic_set_acq_ptr atomic_set_acq_int
347 #define atomic_set_rel_ptr atomic_set_rel_int
350 #undef __atomic_set_long
351 #undef __atomic_set_int
354 * atomic_subtract(p, v)
358 #define __atomic_subtract_int(p, v, t) \
360 "1: lwarx %0, 0, %2\n" \
361 " subf %0, %3, %0\n" \
362 " stwcx. %0, 0, %2\n" \
364 : "=&r" (t), "=m" (*p) \
365 : "r" (p), "r" (v), "m" (*p) \
367 /* __atomic_subtract_int */
370 #define __atomic_subtract_long(p, v, t) \
372 "1: ldarx %0, 0, %2\n" \
373 " subf %0, %3, %0\n" \
374 " stdcx. %0, 0, %2\n" \
376 : "=&r" (t), "=m" (*p) \
377 : "r" (p), "r" (v), "m" (*p) \
379 /* __atomic_subtract_long */
381 #define __atomic_subtract_long(p, v, t) \
383 "1: lwarx %0, 0, %2\n" \
384 " subf %0, %3, %0\n" \
385 " stwcx. %0, 0, %2\n" \
387 : "=&r" (t), "=m" (*p) \
388 : "r" (p), "r" (v), "m" (*p) \
390 /* __atomic_subtract_long */
393 #define _ATOMIC_SUBTRACT(type) \
394 static __inline void \
395 atomic_subtract_##type(volatile u_##type *p, u_##type v) { \
397 __atomic_subtract_##type(p, v, t); \
400 static __inline void \
401 atomic_subtract_acq_##type(volatile u_##type *p, u_##type v) { \
403 __atomic_subtract_##type(p, v, t); \
407 static __inline void \
408 atomic_subtract_rel_##type(volatile u_##type *p, u_##type v) { \
411 __atomic_subtract_##type(p, v, t); \
413 /* _ATOMIC_SUBTRACT */
415 _ATOMIC_SUBTRACT(int)
416 _ATOMIC_SUBTRACT(long)
418 #define atomic_subtract_32 atomic_subtract_int
419 #define atomic_subtract_acq_32 atomic_subtract_acq_int
420 #define atomic_subtract_rel_32 atomic_subtract_rel_int
423 #define atomic_subtract_64 atomic_subtract_long
424 #define atomic_subtract_acq_64 atomic_subract_acq_long
425 #define atomic_subtract_rel_64 atomic_subtract_rel_long
427 #define atomic_subtract_ptr atomic_subtract_long
428 #define atomic_subtract_acq_ptr atomic_subtract_acq_long
429 #define atomic_subtract_rel_ptr atomic_subtract_rel_long
431 #define atomic_subtract_ptr atomic_subtract_int
432 #define atomic_subtract_acq_ptr atomic_subtract_acq_int
433 #define atomic_subtract_rel_ptr atomic_subtract_rel_int
435 #undef _ATOMIC_SUBTRACT
436 #undef __atomic_subtract_long
437 #undef __atomic_subtract_int
440 * atomic_store_rel(p, v)
442 /* TODO -- see below */
445 * Old/original implementations that still need revisiting.
448 static __inline u_int
449 atomic_readandclear_int(volatile u_int *addr)
454 "\tsync\n" /* drain writes */
455 "1:\tlwarx %0, 0, %3\n\t" /* load old value */
456 "li %1, 0\n\t" /* load new value */
457 "stwcx. %1, 0, %3\n\t" /* attempt to store */
458 "bne- 1b\n\t" /* spin if failed */
459 : "=&r"(result), "=&r"(temp), "=m" (*addr)
460 : "r" (addr), "m" (*addr)
467 static __inline u_long
468 atomic_readandclear_long(volatile u_long *addr)
473 "\tsync\n" /* drain writes */
474 "1:\tldarx %0, 0, %3\n\t" /* load old value */
475 "li %1, 0\n\t" /* load new value */
476 "stdcx. %1, 0, %3\n\t" /* attempt to store */
477 "bne- 1b\n\t" /* spin if failed */
478 : "=&r"(result), "=&r"(temp), "=m" (*addr)
479 : "r" (addr), "m" (*addr)
486 #define atomic_readandclear_32 atomic_readandclear_int
489 #define atomic_readandclear_64 atomic_readandclear_long
491 #define atomic_readandclear_ptr atomic_readandclear_long
493 static __inline u_long
494 atomic_readandclear_long(volatile u_long *addr)
497 return ((u_long)atomic_readandclear_int((volatile u_int *)addr));
500 #define atomic_readandclear_ptr atomic_readandclear_int
504 * We assume that a = b will do atomic loads and stores.
506 #define ATOMIC_STORE_LOAD(TYPE) \
507 static __inline u_##TYPE \
508 atomic_load_acq_##TYPE(volatile u_##TYPE *p) \
517 static __inline void \
518 atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v) \
525 ATOMIC_STORE_LOAD(int)
527 #define atomic_load_acq_32 atomic_load_acq_int
528 #define atomic_store_rel_32 atomic_store_rel_int
531 ATOMIC_STORE_LOAD(long)
533 #define atomic_load_acq_64 atomic_load_acq_long
534 #define atomic_store_rel_64 atomic_store_rel_long
536 #define atomic_load_acq_ptr atomic_load_acq_long
537 #define atomic_store_rel_ptr atomic_store_rel_long
539 static __inline u_long
540 atomic_load_acq_long(volatile u_long *addr)
543 return ((u_long)atomic_load_acq_int((volatile u_int *)addr));
547 atomic_store_rel_long(volatile u_long *addr, u_long val)
550 atomic_store_rel_int((volatile u_int *)addr, (u_int)val);
553 #define atomic_load_acq_ptr atomic_load_acq_int
554 #define atomic_store_rel_ptr atomic_store_rel_int
556 #undef ATOMIC_STORE_LOAD
559 * Atomically compare the value stored at *p with cmpval and if the
560 * two values are equal, update the value of *p with newval. Returns
561 * zero if the compare failed, nonzero otherwise.
564 atomic_cmpset_int(volatile u_int* p, u_int cmpval, u_int newval)
569 "1:\tlwarx %0, 0, %2\n\t" /* load old value */
570 "cmplw %3, %0\n\t" /* compare */
571 "bne 2f\n\t" /* exit if not equal */
572 "stwcx. %4, 0, %2\n\t" /* attempt to store */
573 "bne- 1b\n\t" /* spin if failed */
574 "li %0, 1\n\t" /* success - retval = 1 */
575 "b 3f\n\t" /* we've succeeded */
577 "stwcx. %0, 0, %2\n\t" /* clear reservation (74xx) */
578 "li %0, 0\n\t" /* failure - retval = 0 */
580 : "=&r" (ret), "=m" (*p)
581 : "r" (p), "r" (cmpval), "r" (newval), "m" (*p)
587 atomic_cmpset_long(volatile u_long* p, u_long cmpval, u_long newval)
593 "1:\tldarx %0, 0, %2\n\t" /* load old value */
594 "cmpld %3, %0\n\t" /* compare */
595 "bne 2f\n\t" /* exit if not equal */
596 "stdcx. %4, 0, %2\n\t" /* attempt to store */
598 "1:\tlwarx %0, 0, %2\n\t" /* load old value */
599 "cmplw %3, %0\n\t" /* compare */
600 "bne 2f\n\t" /* exit if not equal */
601 "stwcx. %4, 0, %2\n\t" /* attempt to store */
603 "bne- 1b\n\t" /* spin if failed */
604 "li %0, 1\n\t" /* success - retval = 1 */
605 "b 3f\n\t" /* we've succeeded */
608 "stdcx. %0, 0, %2\n\t" /* clear reservation (74xx) */
610 "stwcx. %0, 0, %2\n\t" /* clear reservation (74xx) */
612 "li %0, 0\n\t" /* failure - retval = 0 */
614 : "=&r" (ret), "=m" (*p)
615 : "r" (p), "r" (cmpval), "r" (newval), "m" (*p)
622 atomic_cmpset_acq_int(volatile u_int *p, u_int cmpval, u_int newval)
626 retval = atomic_cmpset_int(p, cmpval, newval);
632 atomic_cmpset_rel_int(volatile u_int *p, u_int cmpval, u_int newval)
635 return (atomic_cmpset_int(p, cmpval, newval));
639 atomic_cmpset_acq_long(volatile u_long *p, u_long cmpval, u_long newval)
643 retval = atomic_cmpset_long(p, cmpval, newval);
649 atomic_cmpset_rel_long(volatile u_long *p, u_long cmpval, u_long newval)
652 return (atomic_cmpset_long(p, cmpval, newval));
655 #define atomic_cmpset_32 atomic_cmpset_int
656 #define atomic_cmpset_acq_32 atomic_cmpset_acq_int
657 #define atomic_cmpset_rel_32 atomic_cmpset_rel_int
660 #define atomic_cmpset_64 atomic_cmpset_long
661 #define atomic_cmpset_acq_64 atomic_cmpset_acq_long
662 #define atomic_cmpset_rel_64 atomic_cmpset_rel_long
664 #define atomic_cmpset_ptr atomic_cmpset_long
665 #define atomic_cmpset_acq_ptr atomic_cmpset_acq_long
666 #define atomic_cmpset_rel_ptr atomic_cmpset_rel_long
668 #define atomic_cmpset_ptr atomic_cmpset_int
669 #define atomic_cmpset_acq_ptr atomic_cmpset_acq_int
670 #define atomic_cmpset_rel_ptr atomic_cmpset_rel_int
674 * Atomically compare the value stored at *p with *cmpval and if the
675 * two values are equal, update the value of *p with newval. Returns
676 * zero if the compare failed and sets *cmpval to the read value from *p,
680 atomic_fcmpset_int(volatile u_int *p, u_int *cmpval, u_int newval)
685 "lwarx %0, 0, %3\n\t" /* load old value */
686 "cmplw %4, %0\n\t" /* compare */
687 "bne 1f\n\t" /* exit if not equal */
688 "stwcx. %5, 0, %3\n\t" /* attempt to store */
689 "bne- 1f\n\t" /* exit if failed */
690 "li %0, 1\n\t" /* success - retval = 1 */
691 "b 2f\n\t" /* we've succeeded */
693 "stwcx. %0, 0, %3\n\t" /* clear reservation (74xx) */
695 "li %0, 0\n\t" /* failure - retval = 0 */
697 : "=&r" (ret), "=m" (*p), "=m" (*cmpval)
698 : "r" (p), "r" (*cmpval), "r" (newval), "m" (*p), "r"(cmpval)
704 atomic_fcmpset_long(volatile u_long *p, u_long *cmpval, u_long newval)
710 "ldarx %0, 0, %3\n\t" /* load old value */
711 "cmpld %4, %0\n\t" /* compare */
712 "bne 1f\n\t" /* exit if not equal */
713 "stdcx. %5, 0, %3\n\t" /* attempt to store */
715 "lwarx %0, 0, %3\n\t" /* load old value */
716 "cmplw %4, %0\n\t" /* compare */
717 "bne 1f\n\t" /* exit if not equal */
718 "stwcx. %5, 0, %3\n\t" /* attempt to store */
720 "bne- 1f\n\t" /* exit if failed */
721 "li %0, 1\n\t" /* success - retval = 1 */
722 "b 2f\n\t" /* we've succeeded */
725 "stdcx. %0, 0, %3\n\t" /* clear reservation (74xx) */
728 "stwcx. %0, 0, %3\n\t" /* clear reservation (74xx) */
731 "li %0, 0\n\t" /* failure - retval = 0 */
733 : "=&r" (ret), "=m" (*p), "=m" (*cmpval)
734 : "r" (p), "r" (*cmpval), "r" (newval), "m" (*p), "r"(cmpval)
741 atomic_fcmpset_acq_int(volatile u_int *p, u_int *cmpval, u_int newval)
745 retval = atomic_fcmpset_int(p, cmpval, newval);
751 atomic_fcmpset_rel_int(volatile u_int *p, u_int *cmpval, u_int newval)
754 return (atomic_fcmpset_int(p, cmpval, newval));
758 atomic_fcmpset_acq_long(volatile u_long *p, u_long *cmpval, u_long newval)
762 retval = atomic_fcmpset_long(p, cmpval, newval);
768 atomic_fcmpset_rel_long(volatile u_long *p, u_long *cmpval, u_long newval)
771 return (atomic_fcmpset_long(p, cmpval, newval));
774 #define atomic_fcmpset_32 atomic_fcmpset_int
775 #define atomic_fcmpset_acq_32 atomic_fcmpset_acq_int
776 #define atomic_fcmpset_rel_32 atomic_fcmpset_rel_int
779 #define atomic_fcmpset_64 atomic_fcmpset_long
780 #define atomic_fcmpset_acq_64 atomic_fcmpset_acq_long
781 #define atomic_fcmpset_rel_64 atomic_fcmpset_rel_long
783 #define atomic_fcmpset_ptr atomic_fcmpset_long
784 #define atomic_fcmpset_acq_ptr atomic_fcmpset_acq_long
785 #define atomic_fcmpset_rel_ptr atomic_fcmpset_rel_long
787 #define atomic_fcmpset_ptr atomic_fcmpset_int
788 #define atomic_fcmpset_acq_ptr atomic_fcmpset_acq_int
789 #define atomic_fcmpset_rel_ptr atomic_fcmpset_rel_int
792 static __inline u_int
793 atomic_fetchadd_int(volatile u_int *p, u_int v)
799 } while (!atomic_cmpset_int(p, value, value + v));
803 static __inline u_long
804 atomic_fetchadd_long(volatile u_long *p, u_long v)
810 } while (!atomic_cmpset_long(p, value, value + v));
814 static __inline u_int
815 atomic_swap_32(volatile u_int *p, u_int v)
823 : "=&r" (prev), "+m" (*(volatile u_int *)p)
831 static __inline u_long
832 atomic_swap_64(volatile u_long *p, u_long v)
840 : "=&r" (prev), "+m" (*(volatile u_long *)p)
848 #define atomic_fetchadd_32 atomic_fetchadd_int
849 #define atomic_swap_int atomic_swap_32
852 #define atomic_fetchadd_64 atomic_fetchadd_long
853 #define atomic_swap_long atomic_swap_64
854 #define atomic_swap_ptr atomic_swap_64
856 #define atomic_swap_long(p,v) atomic_swap_32((volatile u_int *)(p), v)
857 #define atomic_swap_ptr(p,v) atomic_swap_32((volatile u_int *)(p), v)
864 atomic_thread_fence_acq(void)
871 atomic_thread_fence_rel(void)
878 atomic_thread_fence_acq_rel(void)
885 atomic_thread_fence_seq_cst(void)
888 __asm __volatile("sync" : : : "memory");
891 #endif /* ! _MACHINE_ATOMIC_H_ */