1 /* $NetBSD: atomic.h,v 1.1 2002/10/19 12:22:34 bsh Exp $ */
4 * Copyright (C) 2003-2004 Olivier Houchard
5 * Copyright (C) 1994-1997 Mark Brinicombe
6 * Copyright (C) 1994 Brini
9 * This code is derived from software written for Brini by Mark Brinicombe
11 * Redistribution and use in source and binary forms, with or without
12 * modification, are permitted provided that the following conditions
14 * 1. Redistributions of source code must retain the above copyright
15 * notice, this list of conditions and the following disclaimer.
16 * 2. Redistributions in binary form must reproduce the above copyright
17 * notice, this list of conditions and the following disclaimer in the
18 * documentation and/or other materials provided with the distribution.
19 * 3. All advertising materials mentioning features or use of this software
20 * must display the following acknowledgement:
21 * This product includes software developed by Brini.
22 * 4. The name of Brini may not be used to endorse or promote products
23 * derived from this software without specific prior written permission.
25 * THIS SOFTWARE IS PROVIDED BY BRINI ``AS IS'' AND ANY EXPRESS OR
26 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
27 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
28 * IN NO EVENT SHALL BRINI BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
29 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
30 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
31 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
32 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
33 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
34 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
39 #ifndef _MACHINE_ATOMIC_H_
40 #define _MACHINE_ATOMIC_H_
42 #include <sys/types.h>
43 #include <machine/armreg.h>
46 #include <machine/sysarch.h>
48 #include <machine/cpuconf.h>
51 #if defined (__ARM_ARCH_7__) || defined (__ARM_ARCH_7A__)
52 #define isb() __asm __volatile("isb" : : : "memory")
53 #define dsb() __asm __volatile("dsb" : : : "memory")
54 #define dmb() __asm __volatile("dmb" : : : "memory")
55 #elif defined (__ARM_ARCH_6__) || defined (__ARM_ARCH_6J__) || \
56 defined (__ARM_ARCH_6K__) || defined (__ARM_ARCH_6T2__) || \
57 defined (__ARM_ARCH_6Z__) || defined (__ARM_ARCH_6ZK__)
58 #define isb() __asm __volatile("mcr p15, 0, %0, c7, c5, 4" : : "r" (0) : "memory")
59 #define dsb() __asm __volatile("mcr p15, 0, %0, c7, c10, 4" : : "r" (0) : "memory")
60 #define dmb() __asm __volatile("mcr p15, 0, %0, c7, c10, 5" : : "r" (0) : "memory")
62 #define isb() __asm __volatile("mcr p15, 0, %0, c7, c5, 4" : : "r" (0) : "memory")
63 #define dsb() __asm __volatile("mcr p15, 0, %0, c7, c10, 4" : : "r" (0) : "memory")
74 * It would be nice to use _HAVE_ARMv6_INSTRUCTIONS from machine/asm.h
75 * here, but that header can't be included here because this is C
76 * code. I would like to move the _HAVE_ARMv6_INSTRUCTIONS definition
77 * out of asm.h so it can be used in both asm and C code. - kientzle@
79 #if defined (__ARM_ARCH_7__) || \
80 defined (__ARM_ARCH_7A__) || \
81 defined (__ARM_ARCH_6__) || \
82 defined (__ARM_ARCH_6J__) || \
83 defined (__ARM_ARCH_6K__) || \
84 defined (__ARM_ARCH_6T2__) || \
85 defined (__ARM_ARCH_6Z__) || \
86 defined (__ARM_ARCH_6ZK__)
87 #define ARM_HAVE_ATOMIC64
93 #if defined (__ARM_ARCH_7__) || defined (__ARM_ARCH_7A__)
94 __asm __volatile("dmb" : : : "memory");
96 __asm __volatile("mcr p15, 0, r0, c7, c10, 5" : : : "memory");
100 #define ATOMIC_ACQ_REL_LONG(NAME) \
101 static __inline void \
102 atomic_##NAME##_acq_long(__volatile u_long *p, u_long v) \
104 atomic_##NAME##_long(p, v); \
108 static __inline void \
109 atomic_##NAME##_rel_long(__volatile u_long *p, u_long v) \
112 atomic_##NAME##_long(p, v); \
115 #define ATOMIC_ACQ_REL(NAME, WIDTH) \
116 static __inline void \
117 atomic_##NAME##_acq_##WIDTH(__volatile uint##WIDTH##_t *p, uint##WIDTH##_t v)\
119 atomic_##NAME##_##WIDTH(p, v); \
123 static __inline void \
124 atomic_##NAME##_rel_##WIDTH(__volatile uint##WIDTH##_t *p, uint##WIDTH##_t v)\
127 atomic_##NAME##_##WIDTH(p, v); \
131 atomic_set_32(volatile uint32_t *address, uint32_t setmask)
133 uint32_t tmp = 0, tmp2 = 0;
135 __asm __volatile("1: ldrex %0, [%2]\n"
137 "strex %1, %0, [%2]\n"
141 : "=&r" (tmp), "+r" (tmp2)
142 , "+r" (address), "+r" (setmask) : : "cc", "memory");
147 atomic_set_64(volatile uint64_t *p, uint64_t val)
154 " ldrexd %[tmp], [%[ptr]]\n"
155 " orr %Q[tmp], %Q[val]\n"
156 " orr %R[tmp], %R[val]\n"
157 " strexd %[exf], %[tmp], [%[ptr]]\n"
161 : [exf] "=&r" (exflag),
169 atomic_set_long(volatile u_long *address, u_long setmask)
171 u_long tmp = 0, tmp2 = 0;
173 __asm __volatile("1: ldrex %0, [%2]\n"
175 "strex %1, %0, [%2]\n"
179 : "=&r" (tmp), "+r" (tmp2)
180 , "+r" (address), "+r" (setmask) : : "cc", "memory");
185 atomic_clear_32(volatile uint32_t *address, uint32_t setmask)
187 uint32_t tmp = 0, tmp2 = 0;
189 __asm __volatile("1: ldrex %0, [%2]\n"
191 "strex %1, %0, [%2]\n"
195 : "=&r" (tmp), "+r" (tmp2)
196 ,"+r" (address), "+r" (setmask) : : "cc", "memory");
200 atomic_clear_64(volatile uint64_t *p, uint64_t val)
207 " ldrexd %[tmp], [%[ptr]]\n"
208 " bic %Q[tmp], %Q[val]\n"
209 " bic %R[tmp], %R[val]\n"
210 " strexd %[exf], %[tmp], [%[ptr]]\n"
214 : [exf] "=&r" (exflag),
222 atomic_clear_long(volatile u_long *address, u_long setmask)
224 u_long tmp = 0, tmp2 = 0;
226 __asm __volatile("1: ldrex %0, [%2]\n"
228 "strex %1, %0, [%2]\n"
232 : "=&r" (tmp), "+r" (tmp2)
233 ,"+r" (address), "+r" (setmask) : : "cc", "memory");
236 static __inline u_int32_t
237 atomic_cmpset_32(volatile u_int32_t *p, volatile u_int32_t cmpval, volatile u_int32_t newval)
241 __asm __volatile("1: ldrex %0, [%1]\n"
246 "strex %0, %3, [%1]\n"
253 ,"+r" (p), "+r" (cmpval), "+r" (newval) : : "cc",
259 atomic_cmpset_64(volatile uint64_t *p, uint64_t cmpval, uint64_t newval)
266 " ldrexd %[tmp], [%[ptr]]\n"
267 " teq %Q[tmp], %Q[cmpval]\n"
269 " teqeq %R[tmp], %R[cmpval]\n"
270 " movne %[ret], #0\n"
272 " strexd %[ret], %[newval], [%[ptr]]\n"
281 [cmpval] "r" (cmpval),
282 [newval] "r" (newval)
287 static __inline u_long
288 atomic_cmpset_long(volatile u_long *p, volatile u_long cmpval, volatile u_long newval)
292 __asm __volatile("1: ldrex %0, [%1]\n"
297 "strex %0, %3, [%1]\n"
304 ,"+r" (p), "+r" (cmpval), "+r" (newval) : : "cc",
309 static __inline u_int32_t
310 atomic_cmpset_acq_32(volatile u_int32_t *p, volatile u_int32_t cmpval, volatile u_int32_t newval)
312 u_int32_t ret = atomic_cmpset_32(p, cmpval, newval);
318 static __inline uint64_t
319 atomic_cmpset_acq_64(volatile uint64_t *p, volatile uint64_t cmpval, volatile uint64_t newval)
321 uint64_t ret = atomic_cmpset_64(p, cmpval, newval);
327 static __inline u_long
328 atomic_cmpset_acq_long(volatile u_long *p, volatile u_long cmpval, volatile u_long newval)
330 u_long ret = atomic_cmpset_long(p, cmpval, newval);
336 static __inline u_int32_t
337 atomic_cmpset_rel_32(volatile u_int32_t *p, volatile u_int32_t cmpval, volatile u_int32_t newval)
341 return (atomic_cmpset_32(p, cmpval, newval));
344 static __inline uint64_t
345 atomic_cmpset_rel_64(volatile uint64_t *p, volatile uint64_t cmpval, volatile uint64_t newval)
349 return (atomic_cmpset_64(p, cmpval, newval));
352 static __inline u_long
353 atomic_cmpset_rel_long(volatile u_long *p, volatile u_long cmpval, volatile u_long newval)
357 return (atomic_cmpset_long(p, cmpval, newval));
362 atomic_add_32(volatile u_int32_t *p, u_int32_t val)
364 uint32_t tmp = 0, tmp2 = 0;
366 __asm __volatile("1: ldrex %0, [%2]\n"
368 "strex %1, %0, [%2]\n"
372 : "=&r" (tmp), "+r" (tmp2)
373 ,"+r" (p), "+r" (val) : : "cc", "memory");
377 atomic_add_64(volatile uint64_t *p, uint64_t val)
384 " ldrexd %[tmp], [%[ptr]]\n"
385 " adds %Q[tmp], %Q[val]\n"
386 " adc %R[tmp], %R[val]\n"
387 " strexd %[exf], %[tmp], [%[ptr]]\n"
391 : [exf] "=&r" (exflag),
399 atomic_add_long(volatile u_long *p, u_long val)
401 u_long tmp = 0, tmp2 = 0;
403 __asm __volatile("1: ldrex %0, [%2]\n"
405 "strex %1, %0, [%2]\n"
409 : "=&r" (tmp), "+r" (tmp2)
410 ,"+r" (p), "+r" (val) : : "cc", "memory");
414 atomic_subtract_32(volatile u_int32_t *p, u_int32_t val)
416 uint32_t tmp = 0, tmp2 = 0;
418 __asm __volatile("1: ldrex %0, [%2]\n"
420 "strex %1, %0, [%2]\n"
424 : "=&r" (tmp), "+r" (tmp2)
425 ,"+r" (p), "+r" (val) : : "cc", "memory");
429 atomic_subtract_64(volatile uint64_t *p, uint64_t val)
436 " ldrexd %[tmp], [%[ptr]]\n"
437 " subs %Q[tmp], %Q[val]\n"
438 " sbc %R[tmp], %R[val]\n"
439 " strexd %[exf], %[tmp], [%[ptr]]\n"
443 : [exf] "=&r" (exflag),
451 atomic_subtract_long(volatile u_long *p, u_long val)
453 u_long tmp = 0, tmp2 = 0;
455 __asm __volatile("1: ldrex %0, [%2]\n"
457 "strex %1, %0, [%2]\n"
461 : "=&r" (tmp), "+r" (tmp2)
462 ,"+r" (p), "+r" (val) : : "cc", "memory");
465 ATOMIC_ACQ_REL(clear, 32)
466 ATOMIC_ACQ_REL(add, 32)
467 ATOMIC_ACQ_REL(subtract, 32)
468 ATOMIC_ACQ_REL(set, 32)
469 ATOMIC_ACQ_REL(clear, 64)
470 ATOMIC_ACQ_REL(add, 64)
471 ATOMIC_ACQ_REL(subtract, 64)
472 ATOMIC_ACQ_REL(set, 64)
473 ATOMIC_ACQ_REL_LONG(clear)
474 ATOMIC_ACQ_REL_LONG(add)
475 ATOMIC_ACQ_REL_LONG(subtract)
476 ATOMIC_ACQ_REL_LONG(set)
478 #undef ATOMIC_ACQ_REL
479 #undef ATOMIC_ACQ_REL_LONG
481 static __inline uint32_t
482 atomic_fetchadd_32(volatile uint32_t *p, uint32_t val)
484 uint32_t tmp = 0, tmp2 = 0, ret = 0;
486 __asm __volatile("1: ldrex %0, [%3]\n"
488 "strex %2, %1, [%3]\n"
492 : "+r" (ret), "=&r" (tmp), "+r" (tmp2)
493 ,"+r" (p), "+r" (val) : : "cc", "memory");
497 static __inline uint32_t
498 atomic_readandclear_32(volatile u_int32_t *p)
500 uint32_t ret, tmp = 0, tmp2 = 0;
502 __asm __volatile("1: ldrex %0, [%3]\n"
504 "strex %2, %1, [%3]\n"
508 : "=r" (ret), "=&r" (tmp), "+r" (tmp2)
509 ,"+r" (p) : : "cc", "memory");
513 static __inline uint32_t
514 atomic_load_acq_32(volatile uint32_t *p)
524 atomic_store_rel_32(volatile uint32_t *p, uint32_t v)
531 static __inline uint64_t
532 atomic_fetchadd_64(volatile uint64_t *p, uint64_t val)
539 " ldrexd %[ret], [%[ptr]]\n"
540 " adds %Q[tmp], %Q[ret], %Q[val]\n"
541 " adc %R[tmp], %R[ret], %R[val]\n"
542 " strexd %[exf], %[tmp], [%[ptr]]\n"
547 [exf] "=&r" (exflag),
555 static __inline uint64_t
556 atomic_readandclear_64(volatile uint64_t *p)
563 " ldrexd %[ret], [%[ptr]]\n"
566 " strexd %[exf], %[tmp], [%[ptr]]\n"
571 [exf] "=&r" (exflag),
578 static __inline uint64_t
579 atomic_load_64(volatile uint64_t *p)
584 * The only way to atomically load 64 bits is with LDREXD which puts the
585 * exclusive monitor into the exclusive state, so reset it to open state
586 * with CLREX because we don't actually need to store anything.
590 " ldrexd %[ret], [%[ptr]]\n"
598 static __inline uint64_t
599 atomic_load_acq_64(volatile uint64_t *p)
603 ret = atomic_load_64(p);
609 atomic_store_64(volatile uint64_t *p, uint64_t val)
615 * The only way to atomically store 64 bits is with STREXD, which will
616 * succeed only if paired up with a preceeding LDREXD using the same
617 * address, so we read and discard the existing value before storing.
621 " ldrexd %[tmp], [%[ptr]]\n"
622 " strexd %[exf], %[val], [%[ptr]]\n"
634 atomic_store_rel_64(volatile uint64_t *p, uint64_t val)
638 atomic_store_64(p, val);
641 static __inline u_long
642 atomic_fetchadd_long(volatile u_long *p, u_long val)
644 u_long tmp = 0, tmp2 = 0, ret = 0;
646 __asm __volatile("1: ldrex %0, [%3]\n"
648 "strex %2, %1, [%3]\n"
652 : "+r" (ret), "=&r" (tmp), "+r" (tmp2)
653 ,"+r" (p), "+r" (val) : : "cc", "memory");
657 static __inline u_long
658 atomic_readandclear_long(volatile u_long *p)
660 u_long ret, tmp = 0, tmp2 = 0;
662 __asm __volatile("1: ldrex %0, [%3]\n"
664 "strex %2, %1, [%3]\n"
668 : "=r" (ret), "=&r" (tmp), "+r" (tmp2)
669 ,"+r" (p) : : "cc", "memory");
673 static __inline u_long
674 atomic_load_acq_long(volatile u_long *p)
684 atomic_store_rel_long(volatile u_long *p, u_long v)
692 #define __with_interrupts_disabled(expr) \
694 u_int cpsr_save, tmp; \
699 "msr cpsr_fsxc, %1;" \
700 : "=r" (cpsr_save), "=r" (tmp) \
701 : "I" (PSR_I | PSR_F) \
705 "msr cpsr_fsxc, %0" \
711 static __inline uint32_t
712 __swp(uint32_t val, volatile uint32_t *ptr)
714 __asm __volatile("swp %0, %2, [%3]"
715 : "=&r" (val), "=m" (*ptr)
716 : "r" (val), "r" (ptr), "m" (*ptr)
723 #define ARM_HAVE_ATOMIC64
726 atomic_set_32(volatile uint32_t *address, uint32_t setmask)
728 __with_interrupts_disabled(*address |= setmask);
732 atomic_set_64(volatile uint64_t *address, uint64_t setmask)
734 __with_interrupts_disabled(*address |= setmask);
738 atomic_clear_32(volatile uint32_t *address, uint32_t clearmask)
740 __with_interrupts_disabled(*address &= ~clearmask);
744 atomic_clear_64(volatile uint64_t *address, uint64_t clearmask)
746 __with_interrupts_disabled(*address &= ~clearmask);
749 static __inline u_int32_t
750 atomic_cmpset_32(volatile u_int32_t *p, volatile u_int32_t cmpval, volatile u_int32_t newval)
754 __with_interrupts_disabled(
766 static __inline u_int64_t
767 atomic_cmpset_64(volatile u_int64_t *p, volatile u_int64_t cmpval, volatile u_int64_t newval)
771 __with_interrupts_disabled(
784 atomic_add_32(volatile u_int32_t *p, u_int32_t val)
786 __with_interrupts_disabled(*p += val);
790 atomic_add_64(volatile u_int64_t *p, u_int64_t val)
792 __with_interrupts_disabled(*p += val);
796 atomic_subtract_32(volatile u_int32_t *p, u_int32_t val)
798 __with_interrupts_disabled(*p -= val);
802 atomic_subtract_64(volatile u_int64_t *p, u_int64_t val)
804 __with_interrupts_disabled(*p -= val);
807 static __inline uint32_t
808 atomic_fetchadd_32(volatile uint32_t *p, uint32_t v)
812 __with_interrupts_disabled(
820 static __inline uint64_t
821 atomic_fetchadd_64(volatile uint64_t *p, uint64_t v)
825 __with_interrupts_disabled(
833 static __inline uint64_t
834 atomic_load_64(volatile uint64_t *p)
838 __with_interrupts_disabled(value = *p);
843 atomic_store_64(volatile uint64_t *p, uint64_t value)
845 __with_interrupts_disabled(*p = value);
850 static __inline u_int32_t
851 atomic_cmpset_32(volatile u_int32_t *p, volatile u_int32_t cmpval, volatile u_int32_t newval)
853 register int done, ras_start = ARM_RAS_START;
855 __asm __volatile("1:\n"
866 "mov %1, #0xffffffff\n"
870 : "+r" (ras_start), "=r" (done)
871 ,"+r" (p), "+r" (cmpval), "+r" (newval) : : "cc", "memory");
876 atomic_add_32(volatile u_int32_t *p, u_int32_t val)
878 int start, ras_start = ARM_RAS_START;
880 __asm __volatile("1:\n"
891 "mov %1, #0xffffffff\n"
893 : "+r" (ras_start), "=r" (start), "+r" (p), "+r" (val)
898 atomic_subtract_32(volatile u_int32_t *p, u_int32_t val)
900 int start, ras_start = ARM_RAS_START;
902 __asm __volatile("1:\n"
913 "mov %1, #0xffffffff\n"
916 : "+r" (ras_start), "=r" (start), "+r" (p), "+r" (val)
921 atomic_set_32(volatile uint32_t *address, uint32_t setmask)
923 int start, ras_start = ARM_RAS_START;
925 __asm __volatile("1:\n"
936 "mov %1, #0xffffffff\n"
939 : "+r" (ras_start), "=r" (start), "+r" (address), "+r" (setmask)
944 atomic_clear_32(volatile uint32_t *address, uint32_t clearmask)
946 int start, ras_start = ARM_RAS_START;
948 __asm __volatile("1:\n"
959 "mov %1, #0xffffffff\n"
961 : "+r" (ras_start), "=r" (start), "+r" (address), "+r" (clearmask)
966 static __inline uint32_t
967 atomic_fetchadd_32(volatile uint32_t *p, uint32_t v)
969 uint32_t start, tmp, ras_start = ARM_RAS_START;
971 __asm __volatile("1:\n"
983 "mov %2, #0xffffffff\n"
985 : "+r" (ras_start), "=r" (start), "=r" (tmp), "+r" (p), "+r" (v)
993 static __inline uint32_t
994 atomic_readandclear_32(volatile u_int32_t *p)
997 return (__swp(0, p));
1000 #define atomic_cmpset_rel_32 atomic_cmpset_32
1001 #define atomic_cmpset_acq_32 atomic_cmpset_32
1002 #define atomic_set_rel_32 atomic_set_32
1003 #define atomic_set_acq_32 atomic_set_32
1004 #define atomic_clear_rel_32 atomic_clear_32
1005 #define atomic_clear_acq_32 atomic_clear_32
1006 #define atomic_add_rel_32 atomic_add_32
1007 #define atomic_add_acq_32 atomic_add_32
1008 #define atomic_subtract_rel_32 atomic_subtract_32
1009 #define atomic_subtract_acq_32 atomic_subtract_32
1010 #define atomic_store_rel_32 atomic_store_32
1011 #define atomic_store_rel_long atomic_store_long
1012 #define atomic_load_acq_32 atomic_load_32
1013 #define atomic_load_acq_long atomic_load_long
1014 #define atomic_add_acq_long atomic_add_long
1015 #define atomic_add_rel_long atomic_add_long
1016 #define atomic_subtract_acq_long atomic_subtract_long
1017 #define atomic_subtract_rel_long atomic_subtract_long
1018 #define atomic_clear_acq_long atomic_clear_long
1019 #define atomic_clear_rel_long atomic_clear_long
1020 #define atomic_set_acq_long atomic_set_long
1021 #define atomic_set_rel_long atomic_set_long
1022 #define atomic_cmpset_acq_long atomic_cmpset_long
1023 #define atomic_cmpset_rel_long atomic_cmpset_long
1024 #define atomic_load_acq_long atomic_load_long
1025 #undef __with_interrupts_disabled
1027 static __inline void
1028 atomic_add_long(volatile u_long *p, u_long v)
1031 atomic_add_32((volatile uint32_t *)p, v);
1034 static __inline void
1035 atomic_clear_long(volatile u_long *p, u_long v)
1038 atomic_clear_32((volatile uint32_t *)p, v);
1042 atomic_cmpset_long(volatile u_long *dst, u_long old, u_long newe)
1045 return (atomic_cmpset_32((volatile uint32_t *)dst, old, newe));
1048 static __inline u_long
1049 atomic_fetchadd_long(volatile u_long *p, u_long v)
1052 return (atomic_fetchadd_32((volatile uint32_t *)p, v));
1055 static __inline void
1056 atomic_readandclear_long(volatile u_long *p)
1059 atomic_readandclear_32((volatile uint32_t *)p);
1062 static __inline void
1063 atomic_set_long(volatile u_long *p, u_long v)
1066 atomic_set_32((volatile uint32_t *)p, v);
1069 static __inline void
1070 atomic_subtract_long(volatile u_long *p, u_long v)
1073 atomic_subtract_32((volatile uint32_t *)p, v);
1078 #endif /* Arch >= v6 */
1081 atomic_load_32(volatile uint32_t *v)
1087 static __inline void
1088 atomic_store_32(volatile uint32_t *dst, uint32_t src)
1094 atomic_load_long(volatile u_long *v)
1100 static __inline void
1101 atomic_store_long(volatile u_long *dst, u_long src)
1106 #define atomic_clear_ptr atomic_clear_32
1107 #define atomic_set_ptr atomic_set_32
1108 #define atomic_cmpset_ptr atomic_cmpset_32
1109 #define atomic_cmpset_rel_ptr atomic_cmpset_rel_32
1110 #define atomic_cmpset_acq_ptr atomic_cmpset_acq_32
1111 #define atomic_store_ptr atomic_store_32
1112 #define atomic_store_rel_ptr atomic_store_rel_32
1114 #define atomic_add_int atomic_add_32
1115 #define atomic_add_acq_int atomic_add_acq_32
1116 #define atomic_add_rel_int atomic_add_rel_32
1117 #define atomic_subtract_int atomic_subtract_32
1118 #define atomic_subtract_acq_int atomic_subtract_acq_32
1119 #define atomic_subtract_rel_int atomic_subtract_rel_32
1120 #define atomic_clear_int atomic_clear_32
1121 #define atomic_clear_acq_int atomic_clear_acq_32
1122 #define atomic_clear_rel_int atomic_clear_rel_32
1123 #define atomic_set_int atomic_set_32
1124 #define atomic_set_acq_int atomic_set_acq_32
1125 #define atomic_set_rel_int atomic_set_rel_32
1126 #define atomic_cmpset_int atomic_cmpset_32
1127 #define atomic_cmpset_acq_int atomic_cmpset_acq_32
1128 #define atomic_cmpset_rel_int atomic_cmpset_rel_32
1129 #define atomic_fetchadd_int atomic_fetchadd_32
1130 #define atomic_readandclear_int atomic_readandclear_32
1131 #define atomic_load_acq_int atomic_load_acq_32
1132 #define atomic_store_rel_int atomic_store_rel_32
1134 #endif /* _MACHINE_ATOMIC_H_ */