2 * SPDX-License-Identifier: BSD-2-Clause-FreeBSD
4 * Copyright (c) 1998 Doug Rabson
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
10 * 1. Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in the
14 * documentation and/or other materials provided with the distribution.
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
17 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
18 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
19 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
20 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
22 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
23 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
24 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
25 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
30 #ifndef _MACHINE_ATOMIC_H_
31 #define _MACHINE_ATOMIC_H_
34 #error this file needs sys/cdefs.h as a prerequisite
37 #include <sys/atomic_common.h>
40 #include <machine/md_var.h>
41 #include <machine/specialreg.h>
44 #ifndef __OFFSETOF_MONITORBUF
46 * __OFFSETOF_MONITORBUF == __pcpu_offset(pc_monitorbuf).
48 * The open-coded number is used instead of the symbolic expression to
49 * avoid a dependency on sys/pcpu.h in machine/atomic.h consumers.
50 * An assertion in i386/vm_machdep.c ensures that the value is correct.
52 #define __OFFSETOF_MONITORBUF 0x80
58 __asm __volatile("lock; addl $0,%%fs:%0"
59 : "+m" (*(u_int *)__OFFSETOF_MONITORBUF) : : "memory", "cc");
66 __asm __volatile("lock; addl $0,(%%esp)" : : : "memory", "cc");
71 * Various simple operations on memory, each of which is atomic in the
72 * presence of interrupts and multiple processors.
74 * atomic_set_char(P, V) (*(u_char *)(P) |= (V))
75 * atomic_clear_char(P, V) (*(u_char *)(P) &= ~(V))
76 * atomic_add_char(P, V) (*(u_char *)(P) += (V))
77 * atomic_subtract_char(P, V) (*(u_char *)(P) -= (V))
79 * atomic_set_short(P, V) (*(u_short *)(P) |= (V))
80 * atomic_clear_short(P, V) (*(u_short *)(P) &= ~(V))
81 * atomic_add_short(P, V) (*(u_short *)(P) += (V))
82 * atomic_subtract_short(P, V) (*(u_short *)(P) -= (V))
84 * atomic_set_int(P, V) (*(u_int *)(P) |= (V))
85 * atomic_clear_int(P, V) (*(u_int *)(P) &= ~(V))
86 * atomic_add_int(P, V) (*(u_int *)(P) += (V))
87 * atomic_subtract_int(P, V) (*(u_int *)(P) -= (V))
88 * atomic_swap_int(P, V) (return (*(u_int *)(P)); *(u_int *)(P) = (V);)
89 * atomic_readandclear_int(P) (return (*(u_int *)(P)); *(u_int *)(P) = 0;)
91 * atomic_set_long(P, V) (*(u_long *)(P) |= (V))
92 * atomic_clear_long(P, V) (*(u_long *)(P) &= ~(V))
93 * atomic_add_long(P, V) (*(u_long *)(P) += (V))
94 * atomic_subtract_long(P, V) (*(u_long *)(P) -= (V))
95 * atomic_swap_long(P, V) (return (*(u_long *)(P)); *(u_long *)(P) = (V);)
96 * atomic_readandclear_long(P) (return (*(u_long *)(P)); *(u_long *)(P) = 0;)
100 * The above functions are expanded inline in the statically-linked
101 * kernel. Lock prefixes are generated if an SMP kernel is being
104 * Kernel modules call real functions which are built into the kernel.
105 * This allows kernel modules to be portable between UP and SMP systems.
107 #if !defined(__GNUCLIKE_ASM)
108 #define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \
109 void atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v); \
110 void atomic_##NAME##_barr_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
112 int atomic_cmpset_char(volatile u_char *dst, u_char expect, u_char src);
113 int atomic_cmpset_short(volatile u_short *dst, u_short expect, u_short src);
114 int atomic_cmpset_int(volatile u_int *dst, u_int expect, u_int src);
115 int atomic_fcmpset_char(volatile u_char *dst, u_char *expect, u_char src);
116 int atomic_fcmpset_short(volatile u_short *dst, u_short *expect,
118 int atomic_fcmpset_int(volatile u_int *dst, u_int *expect, u_int src);
119 u_int atomic_fetchadd_int(volatile u_int *p, u_int v);
120 int atomic_testandset_int(volatile u_int *p, u_int v);
121 int atomic_testandclear_int(volatile u_int *p, u_int v);
122 void atomic_thread_fence_acq(void);
123 void atomic_thread_fence_acq_rel(void);
124 void atomic_thread_fence_rel(void);
125 void atomic_thread_fence_seq_cst(void);
127 #define ATOMIC_LOAD(TYPE) \
128 u_##TYPE atomic_load_acq_##TYPE(volatile u_##TYPE *p)
129 #define ATOMIC_STORE(TYPE) \
130 void atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
132 int atomic_cmpset_64(volatile uint64_t *, uint64_t, uint64_t);
133 uint64_t atomic_load_acq_64(volatile uint64_t *);
134 void atomic_store_rel_64(volatile uint64_t *, uint64_t);
135 uint64_t atomic_swap_64(volatile uint64_t *, uint64_t);
136 uint64_t atomic_fetchadd_64(volatile uint64_t *, uint64_t);
137 void atomic_add_64(volatile uint64_t *, uint64_t);
138 void atomic_subtract_64(volatile uint64_t *, uint64_t);
140 #else /* !KLD_MODULE && __GNUCLIKE_ASM */
143 * For userland, always use lock prefixes so that the binaries will run
144 * on both SMP and !SMP systems.
146 #if defined(SMP) || !defined(_KERNEL) || defined(KLD_MODULE)
147 #define MPLOCKED "lock ; "
153 * The assembly is volatilized to avoid code chunk removal by the compiler.
154 * GCC aggressively reorders operations and memory clobbering is necessary
155 * in order to avoid that for memory barriers.
157 #define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \
158 static __inline void \
159 atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
161 __asm __volatile(MPLOCKED OP \
167 static __inline void \
168 atomic_##NAME##_barr_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
170 __asm __volatile(MPLOCKED OP \
178 * Atomic compare and set, used by the mutex functions.
181 * if (*dst == expect)
185 * if (*dst == *expect)
190 * Returns 0 on failure, non-zero on success.
192 #define ATOMIC_CMPSET(TYPE, CONS) \
193 static __inline int \
194 atomic_cmpset_##TYPE(volatile u_##TYPE *dst, u_##TYPE expect, u_##TYPE src) \
200 " cmpxchg %3,%1 ; " \
202 "# atomic_cmpset_" #TYPE " " \
203 : "=q" (res), /* 0 */ \
204 "+m" (*dst), /* 1 */ \
205 "+a" (expect) /* 2 */ \
206 : CONS (src) /* 3 */ \
211 static __inline int \
212 atomic_fcmpset_##TYPE(volatile u_##TYPE *dst, u_##TYPE *expect, u_##TYPE src) \
218 " cmpxchg %3,%1 ; " \
220 "# atomic_fcmpset_" #TYPE " " \
221 : "=q" (res), /* 0 */ \
222 "+m" (*dst), /* 1 */ \
223 "+a" (*expect) /* 2 */ \
224 : CONS (src) /* 3 */ \
229 ATOMIC_CMPSET(char, "q");
230 ATOMIC_CMPSET(short, "r");
231 ATOMIC_CMPSET(int, "r");
234 * Atomically add the value of v to the integer pointed to by p and return
235 * the previous value of *p.
237 static __inline u_int
238 atomic_fetchadd_int(volatile u_int *p, u_int v)
244 "# atomic_fetchadd_int"
252 atomic_testandset_int(volatile u_int *p, u_int v)
260 "# atomic_testandset_int"
261 : "=q" (res), /* 0 */
263 : "Ir" (v & 0x1f) /* 2 */
269 atomic_testandclear_int(volatile u_int *p, u_int v)
277 "# atomic_testandclear_int"
278 : "=q" (res), /* 0 */
280 : "Ir" (v & 0x1f) /* 2 */
286 * We assume that a = b will do atomic loads and stores. Due to the
287 * IA32 memory model, a simple store guarantees release semantics.
289 * However, a load may pass a store if they are performed on distinct
290 * addresses, so we need Store/Load barrier for sequentially
291 * consistent fences in SMP kernels. We use "lock addl $0,mem" for a
292 * Store/Load barrier, as recommended by the AMD Software Optimization
293 * Guide, and not mfence. In the kernel, we use a private per-cpu
294 * cache line for "mem", to avoid introducing false data
295 * dependencies. In user space, we use the word at the top of the
298 * For UP kernels, however, the memory of the single processor is
299 * always consistent, so we only need to stop the compiler from
300 * reordering accesses in a way that violates the semantics of acquire
305 #if defined(SMP) || defined(KLD_MODULE)
306 #define __storeload_barrier() __mbk()
307 #else /* _KERNEL && UP */
308 #define __storeload_barrier() __compiler_membar()
311 #define __storeload_barrier() __mbu()
314 #define ATOMIC_LOAD(TYPE) \
315 static __inline u_##TYPE \
316 atomic_load_acq_##TYPE(volatile u_##TYPE *p) \
321 __compiler_membar(); \
326 #define ATOMIC_STORE(TYPE) \
327 static __inline void \
328 atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v) \
331 __compiler_membar(); \
337 atomic_thread_fence_acq(void)
344 atomic_thread_fence_rel(void)
351 atomic_thread_fence_acq_rel(void)
358 atomic_thread_fence_seq_cst(void)
361 __storeload_barrier();
366 #ifdef WANT_FUNCTIONS
367 int atomic_cmpset_64_i386(volatile uint64_t *, uint64_t, uint64_t);
368 int atomic_cmpset_64_i586(volatile uint64_t *, uint64_t, uint64_t);
369 uint64_t atomic_load_acq_64_i386(volatile uint64_t *);
370 uint64_t atomic_load_acq_64_i586(volatile uint64_t *);
371 void atomic_store_rel_64_i386(volatile uint64_t *, uint64_t);
372 void atomic_store_rel_64_i586(volatile uint64_t *, uint64_t);
373 uint64_t atomic_swap_64_i386(volatile uint64_t *, uint64_t);
374 uint64_t atomic_swap_64_i586(volatile uint64_t *, uint64_t);
377 /* I486 does not support SMP or CMPXCHG8B. */
379 atomic_cmpset_64_i386(volatile uint64_t *dst, uint64_t expect, uint64_t src)
381 volatile uint32_t *p;
384 p = (volatile uint32_t *)dst;
390 " orl %%edx,%%eax ; "
397 : "+A" (expect), /* 0 */
399 "+m" (*(p + 1)), /* 2 */
401 : "r" ((uint32_t)src), /* 4 */
402 "r" ((uint32_t)(src >> 32)) /* 5 */
407 static __inline uint64_t
408 atomic_load_acq_64_i386(volatile uint64_t *p)
410 volatile uint32_t *q;
413 q = (volatile uint32_t *)p;
420 : "=&A" (res) /* 0 */
422 "m" (*(q + 1)) /* 2 */
428 atomic_store_rel_64_i386(volatile uint64_t *p, uint64_t v)
430 volatile uint32_t *q;
432 q = (volatile uint32_t *)p;
440 "=m" (*(q + 1)) /* 1 */
445 static __inline uint64_t
446 atomic_swap_64_i386(volatile uint64_t *p, uint64_t v)
448 volatile uint32_t *q;
451 q = (volatile uint32_t *)p;
460 : "=&A" (res), /* 0 */
462 "+m" (*(q + 1)) /* 2 */
463 : "r" ((uint32_t)v), /* 3 */
464 "r" ((uint32_t)(v >> 32))); /* 4 */
469 atomic_cmpset_64_i586(volatile uint64_t *dst, uint64_t expect, uint64_t src)
477 : "=q" (res), /* 0 */
479 "+A" (expect) /* 2 */
480 : "b" ((uint32_t)src), /* 3 */
481 "c" ((uint32_t)(src >> 32)) /* 4 */
486 static __inline uint64_t
487 atomic_load_acq_64_i586(volatile uint64_t *p)
492 " movl %%ebx,%%eax ; "
493 " movl %%ecx,%%edx ; "
496 : "=&A" (res), /* 0 */
503 atomic_store_rel_64_i586(volatile uint64_t *p, uint64_t v)
507 " movl %%eax,%%ebx ; "
508 " movl %%edx,%%ecx ; "
515 : : "ebx", "ecx", "memory", "cc");
518 static __inline uint64_t
519 atomic_swap_64_i586(volatile uint64_t *p, uint64_t v)
523 " movl %%eax,%%ebx ; "
524 " movl %%edx,%%ecx ; "
531 : : "ebx", "ecx", "memory", "cc");
536 atomic_cmpset_64(volatile uint64_t *dst, uint64_t expect, uint64_t src)
539 if ((cpu_feature & CPUID_CX8) == 0)
540 return (atomic_cmpset_64_i386(dst, expect, src));
542 return (atomic_cmpset_64_i586(dst, expect, src));
545 static __inline uint64_t
546 atomic_load_acq_64(volatile uint64_t *p)
549 if ((cpu_feature & CPUID_CX8) == 0)
550 return (atomic_load_acq_64_i386(p));
552 return (atomic_load_acq_64_i586(p));
556 atomic_store_rel_64(volatile uint64_t *p, uint64_t v)
559 if ((cpu_feature & CPUID_CX8) == 0)
560 atomic_store_rel_64_i386(p, v);
562 atomic_store_rel_64_i586(p, v);
565 static __inline uint64_t
566 atomic_swap_64(volatile uint64_t *p, uint64_t v)
569 if ((cpu_feature & CPUID_CX8) == 0)
570 return (atomic_swap_64_i386(p, v));
572 return (atomic_swap_64_i586(p, v));
575 static __inline uint64_t
576 atomic_fetchadd_64(volatile uint64_t *p, uint64_t v)
581 if (atomic_cmpset_64(p, t, t + v))
587 atomic_add_64(volatile uint64_t *p, uint64_t v)
593 if (atomic_cmpset_64(p, t, t + v))
599 atomic_subtract_64(volatile uint64_t *p, uint64_t v)
605 if (atomic_cmpset_64(p, t, t - v))
612 #endif /* KLD_MODULE || !__GNUCLIKE_ASM */
614 ATOMIC_ASM(set, char, "orb %b1,%0", "iq", v);
615 ATOMIC_ASM(clear, char, "andb %b1,%0", "iq", ~v);
616 ATOMIC_ASM(add, char, "addb %b1,%0", "iq", v);
617 ATOMIC_ASM(subtract, char, "subb %b1,%0", "iq", v);
619 ATOMIC_ASM(set, short, "orw %w1,%0", "ir", v);
620 ATOMIC_ASM(clear, short, "andw %w1,%0", "ir", ~v);
621 ATOMIC_ASM(add, short, "addw %w1,%0", "ir", v);
622 ATOMIC_ASM(subtract, short, "subw %w1,%0", "ir", v);
624 ATOMIC_ASM(set, int, "orl %1,%0", "ir", v);
625 ATOMIC_ASM(clear, int, "andl %1,%0", "ir", ~v);
626 ATOMIC_ASM(add, int, "addl %1,%0", "ir", v);
627 ATOMIC_ASM(subtract, int, "subl %1,%0", "ir", v);
629 ATOMIC_ASM(set, long, "orl %1,%0", "ir", v);
630 ATOMIC_ASM(clear, long, "andl %1,%0", "ir", ~v);
631 ATOMIC_ASM(add, long, "addl %1,%0", "ir", v);
632 ATOMIC_ASM(subtract, long, "subl %1,%0", "ir", v);
634 #define ATOMIC_LOADSTORE(TYPE) \
638 ATOMIC_LOADSTORE(char);
639 ATOMIC_LOADSTORE(short);
640 ATOMIC_LOADSTORE(int);
641 ATOMIC_LOADSTORE(long);
646 #undef ATOMIC_LOADSTORE
648 #ifndef WANT_FUNCTIONS
651 atomic_cmpset_long(volatile u_long *dst, u_long expect, u_long src)
654 return (atomic_cmpset_int((volatile u_int *)dst, (u_int)expect,
658 static __inline u_long
659 atomic_fetchadd_long(volatile u_long *p, u_long v)
662 return (atomic_fetchadd_int((volatile u_int *)p, (u_int)v));
666 atomic_testandset_long(volatile u_long *p, u_int v)
669 return (atomic_testandset_int((volatile u_int *)p, v));
673 atomic_testandclear_long(volatile u_long *p, u_int v)
676 return (atomic_testandclear_int((volatile u_int *)p, v));
679 /* Read the current value and store a new value in the destination. */
680 #ifdef __GNUCLIKE_ASM
682 static __inline u_int
683 atomic_swap_int(volatile u_int *p, u_int v)
694 static __inline u_long
695 atomic_swap_long(volatile u_long *p, u_long v)
698 return (atomic_swap_int((volatile u_int *)p, (u_int)v));
701 #else /* !__GNUCLIKE_ASM */
703 u_int atomic_swap_int(volatile u_int *p, u_int v);
704 u_long atomic_swap_long(volatile u_long *p, u_long v);
706 #endif /* __GNUCLIKE_ASM */
708 #define atomic_set_acq_char atomic_set_barr_char
709 #define atomic_set_rel_char atomic_set_barr_char
710 #define atomic_clear_acq_char atomic_clear_barr_char
711 #define atomic_clear_rel_char atomic_clear_barr_char
712 #define atomic_add_acq_char atomic_add_barr_char
713 #define atomic_add_rel_char atomic_add_barr_char
714 #define atomic_subtract_acq_char atomic_subtract_barr_char
715 #define atomic_subtract_rel_char atomic_subtract_barr_char
716 #define atomic_cmpset_acq_char atomic_cmpset_char
717 #define atomic_cmpset_rel_char atomic_cmpset_char
718 #define atomic_fcmpset_acq_char atomic_fcmpset_char
719 #define atomic_fcmpset_rel_char atomic_fcmpset_char
721 #define atomic_set_acq_short atomic_set_barr_short
722 #define atomic_set_rel_short atomic_set_barr_short
723 #define atomic_clear_acq_short atomic_clear_barr_short
724 #define atomic_clear_rel_short atomic_clear_barr_short
725 #define atomic_add_acq_short atomic_add_barr_short
726 #define atomic_add_rel_short atomic_add_barr_short
727 #define atomic_subtract_acq_short atomic_subtract_barr_short
728 #define atomic_subtract_rel_short atomic_subtract_barr_short
729 #define atomic_cmpset_acq_short atomic_cmpset_short
730 #define atomic_cmpset_rel_short atomic_cmpset_short
731 #define atomic_fcmpset_acq_short atomic_fcmpset_short
732 #define atomic_fcmpset_rel_short atomic_fcmpset_short
734 #define atomic_set_acq_int atomic_set_barr_int
735 #define atomic_set_rel_int atomic_set_barr_int
736 #define atomic_clear_acq_int atomic_clear_barr_int
737 #define atomic_clear_rel_int atomic_clear_barr_int
738 #define atomic_add_acq_int atomic_add_barr_int
739 #define atomic_add_rel_int atomic_add_barr_int
740 #define atomic_subtract_acq_int atomic_subtract_barr_int
741 #define atomic_subtract_rel_int atomic_subtract_barr_int
742 #define atomic_cmpset_acq_int atomic_cmpset_int
743 #define atomic_cmpset_rel_int atomic_cmpset_int
744 #define atomic_fcmpset_acq_int atomic_fcmpset_int
745 #define atomic_fcmpset_rel_int atomic_fcmpset_int
747 #define atomic_set_acq_long atomic_set_barr_long
748 #define atomic_set_rel_long atomic_set_barr_long
749 #define atomic_clear_acq_long atomic_clear_barr_long
750 #define atomic_clear_rel_long atomic_clear_barr_long
751 #define atomic_add_acq_long atomic_add_barr_long
752 #define atomic_add_rel_long atomic_add_barr_long
753 #define atomic_subtract_acq_long atomic_subtract_barr_long
754 #define atomic_subtract_rel_long atomic_subtract_barr_long
755 #define atomic_cmpset_acq_long atomic_cmpset_long
756 #define atomic_cmpset_rel_long atomic_cmpset_long
757 #define atomic_fcmpset_acq_long atomic_fcmpset_long
758 #define atomic_fcmpset_rel_long atomic_fcmpset_long
760 #define atomic_readandclear_int(p) atomic_swap_int(p, 0)
761 #define atomic_readandclear_long(p) atomic_swap_long(p, 0)
763 /* Operations on 8-bit bytes. */
764 #define atomic_set_8 atomic_set_char
765 #define atomic_set_acq_8 atomic_set_acq_char
766 #define atomic_set_rel_8 atomic_set_rel_char
767 #define atomic_clear_8 atomic_clear_char
768 #define atomic_clear_acq_8 atomic_clear_acq_char
769 #define atomic_clear_rel_8 atomic_clear_rel_char
770 #define atomic_add_8 atomic_add_char
771 #define atomic_add_acq_8 atomic_add_acq_char
772 #define atomic_add_rel_8 atomic_add_rel_char
773 #define atomic_subtract_8 atomic_subtract_char
774 #define atomic_subtract_acq_8 atomic_subtract_acq_char
775 #define atomic_subtract_rel_8 atomic_subtract_rel_char
776 #define atomic_load_acq_8 atomic_load_acq_char
777 #define atomic_store_rel_8 atomic_store_rel_char
778 #define atomic_cmpset_8 atomic_cmpset_char
779 #define atomic_cmpset_acq_8 atomic_cmpset_acq_char
780 #define atomic_cmpset_rel_8 atomic_cmpset_rel_char
781 #define atomic_fcmpset_8 atomic_fcmpset_char
782 #define atomic_fcmpset_acq_8 atomic_fcmpset_acq_char
783 #define atomic_fcmpset_rel_8 atomic_fcmpset_rel_char
785 /* Operations on 16-bit words. */
786 #define atomic_set_16 atomic_set_short
787 #define atomic_set_acq_16 atomic_set_acq_short
788 #define atomic_set_rel_16 atomic_set_rel_short
789 #define atomic_clear_16 atomic_clear_short
790 #define atomic_clear_acq_16 atomic_clear_acq_short
791 #define atomic_clear_rel_16 atomic_clear_rel_short
792 #define atomic_add_16 atomic_add_short
793 #define atomic_add_acq_16 atomic_add_acq_short
794 #define atomic_add_rel_16 atomic_add_rel_short
795 #define atomic_subtract_16 atomic_subtract_short
796 #define atomic_subtract_acq_16 atomic_subtract_acq_short
797 #define atomic_subtract_rel_16 atomic_subtract_rel_short
798 #define atomic_load_acq_16 atomic_load_acq_short
799 #define atomic_store_rel_16 atomic_store_rel_short
800 #define atomic_cmpset_16 atomic_cmpset_short
801 #define atomic_cmpset_acq_16 atomic_cmpset_acq_short
802 #define atomic_cmpset_rel_16 atomic_cmpset_rel_short
803 #define atomic_fcmpset_16 atomic_fcmpset_short
804 #define atomic_fcmpset_acq_16 atomic_fcmpset_acq_short
805 #define atomic_fcmpset_rel_16 atomic_fcmpset_rel_short
807 /* Operations on 32-bit double words. */
808 #define atomic_set_32 atomic_set_int
809 #define atomic_set_acq_32 atomic_set_acq_int
810 #define atomic_set_rel_32 atomic_set_rel_int
811 #define atomic_clear_32 atomic_clear_int
812 #define atomic_clear_acq_32 atomic_clear_acq_int
813 #define atomic_clear_rel_32 atomic_clear_rel_int
814 #define atomic_add_32 atomic_add_int
815 #define atomic_add_acq_32 atomic_add_acq_int
816 #define atomic_add_rel_32 atomic_add_rel_int
817 #define atomic_subtract_32 atomic_subtract_int
818 #define atomic_subtract_acq_32 atomic_subtract_acq_int
819 #define atomic_subtract_rel_32 atomic_subtract_rel_int
820 #define atomic_load_acq_32 atomic_load_acq_int
821 #define atomic_store_rel_32 atomic_store_rel_int
822 #define atomic_cmpset_32 atomic_cmpset_int
823 #define atomic_cmpset_acq_32 atomic_cmpset_acq_int
824 #define atomic_cmpset_rel_32 atomic_cmpset_rel_int
825 #define atomic_fcmpset_32 atomic_fcmpset_int
826 #define atomic_fcmpset_acq_32 atomic_fcmpset_acq_int
827 #define atomic_fcmpset_rel_32 atomic_fcmpset_rel_int
828 #define atomic_swap_32 atomic_swap_int
829 #define atomic_readandclear_32 atomic_readandclear_int
830 #define atomic_fetchadd_32 atomic_fetchadd_int
831 #define atomic_testandset_32 atomic_testandset_int
832 #define atomic_testandclear_32 atomic_testandclear_int
834 /* Operations on 64-bit quad words. */
835 #define atomic_cmpset_acq_64 atomic_cmpset_64
836 #define atomic_cmpset_rel_64 atomic_cmpset_64
837 #define atomic_fetchadd_acq_64 atomic_fetchadd_64
838 #define atomic_fetchadd_rel_64 atomic_fetchadd_64
839 #define atomic_add_acq_64 atomic_add_64
840 #define atomic_add_rel_64 atomic_add_64
841 #define atomic_subtract_acq_64 atomic_subtract_64
842 #define atomic_subtract_rel_64 atomic_subtract_64
844 /* Operations on pointers. */
845 #define atomic_set_ptr(p, v) \
846 atomic_set_int((volatile u_int *)(p), (u_int)(v))
847 #define atomic_set_acq_ptr(p, v) \
848 atomic_set_acq_int((volatile u_int *)(p), (u_int)(v))
849 #define atomic_set_rel_ptr(p, v) \
850 atomic_set_rel_int((volatile u_int *)(p), (u_int)(v))
851 #define atomic_clear_ptr(p, v) \
852 atomic_clear_int((volatile u_int *)(p), (u_int)(v))
853 #define atomic_clear_acq_ptr(p, v) \
854 atomic_clear_acq_int((volatile u_int *)(p), (u_int)(v))
855 #define atomic_clear_rel_ptr(p, v) \
856 atomic_clear_rel_int((volatile u_int *)(p), (u_int)(v))
857 #define atomic_add_ptr(p, v) \
858 atomic_add_int((volatile u_int *)(p), (u_int)(v))
859 #define atomic_add_acq_ptr(p, v) \
860 atomic_add_acq_int((volatile u_int *)(p), (u_int)(v))
861 #define atomic_add_rel_ptr(p, v) \
862 atomic_add_rel_int((volatile u_int *)(p), (u_int)(v))
863 #define atomic_subtract_ptr(p, v) \
864 atomic_subtract_int((volatile u_int *)(p), (u_int)(v))
865 #define atomic_subtract_acq_ptr(p, v) \
866 atomic_subtract_acq_int((volatile u_int *)(p), (u_int)(v))
867 #define atomic_subtract_rel_ptr(p, v) \
868 atomic_subtract_rel_int((volatile u_int *)(p), (u_int)(v))
869 #define atomic_load_acq_ptr(p) \
870 atomic_load_acq_int((volatile u_int *)(p))
871 #define atomic_store_rel_ptr(p, v) \
872 atomic_store_rel_int((volatile u_int *)(p), (v))
873 #define atomic_cmpset_ptr(dst, old, new) \
874 atomic_cmpset_int((volatile u_int *)(dst), (u_int)(old), (u_int)(new))
875 #define atomic_cmpset_acq_ptr(dst, old, new) \
876 atomic_cmpset_acq_int((volatile u_int *)(dst), (u_int)(old), \
878 #define atomic_cmpset_rel_ptr(dst, old, new) \
879 atomic_cmpset_rel_int((volatile u_int *)(dst), (u_int)(old), \
881 #define atomic_fcmpset_ptr(dst, old, new) \
882 atomic_fcmpset_int((volatile u_int *)(dst), (u_int *)(old), (u_int)(new))
883 #define atomic_fcmpset_acq_ptr(dst, old, new) \
884 atomic_fcmpset_acq_int((volatile u_int *)(dst), (u_int *)(old), \
886 #define atomic_fcmpset_rel_ptr(dst, old, new) \
887 atomic_fcmpset_rel_int((volatile u_int *)(dst), (u_int *)(old), \
889 #define atomic_swap_ptr(p, v) \
890 atomic_swap_int((volatile u_int *)(p), (u_int)(v))
891 #define atomic_readandclear_ptr(p) \
892 atomic_readandclear_int((volatile u_int *)(p))
894 #endif /* !WANT_FUNCTIONS */
898 #define wmb() __mbk()
899 #define rmb() __mbk()
902 #define wmb() __mbu()
903 #define rmb() __mbu()
906 #endif /* !_MACHINE_ATOMIC_H_ */