]> CyberLeo.Net >> Repos - FreeBSD/FreeBSD.git/blob - sys/alpha/include/atomic.h
Use the newer "+" modifier on output contraints when a register or
[FreeBSD/FreeBSD.git] / sys / alpha / include / atomic.h
1 /*-
2  * Copyright (c) 1998 Doug Rabson
3  * All rights reserved.
4  *
5  * Redistribution and use in source and binary forms, with or without
6  * modification, are permitted provided that the following conditions
7  * are met:
8  * 1. Redistributions of source code must retain the above copyright
9  *    notice, this list of conditions and the following disclaimer.
10  * 2. Redistributions in binary form must reproduce the above copyright
11  *    notice, this list of conditions and the following disclaimer in the
12  *    documentation and/or other materials provided with the distribution.
13  *
14  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
15  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
17  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
18  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
19  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
20  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
21  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
22  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
23  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
24  * SUCH DAMAGE.
25  *
26  * $FreeBSD$
27  */
28
29 #ifndef _MACHINE_ATOMIC_H_
30 #define _MACHINE_ATOMIC_H_
31
32 #include <machine/alpha_cpu.h>
33
34 /*
35  * Various simple arithmetic on memory which is atomic in the presence
36  * of interrupts and SMP safe.
37  */
38
39 void atomic_set_8(volatile u_int8_t *, u_int8_t);
40 void atomic_clear_8(volatile u_int8_t *, u_int8_t);
41 void atomic_add_8(volatile u_int8_t *, u_int8_t);
42 void atomic_subtract_8(volatile u_int8_t *, u_int8_t);
43
44 void atomic_set_16(volatile u_int16_t *, u_int16_t);
45 void atomic_clear_16(volatile u_int16_t *, u_int16_t);
46 void atomic_add_16(volatile u_int16_t *, u_int16_t);
47 void atomic_subtract_16(volatile u_int16_t *, u_int16_t);
48
49 static __inline void atomic_set_32(volatile u_int32_t *p, u_int32_t v)
50 {
51         u_int32_t temp;
52
53 #ifdef __GNUC__
54         __asm __volatile (
55                 "1:\tldl_l %0, %2\n\t"          /* load old value */
56                 "bis %0, %3, %0\n\t"            /* calculate new value */
57                 "stl_c %0, %1\n\t"              /* attempt to store */
58                 "beq %0, 2f\n\t"                /* spin if failed */
59                 "mb\n\t"                        /* drain to memory */
60                 ".section .text3,\"ax\"\n"      /* improve branch prediction */
61                 "2:\tbr 1b\n"                   /* try again */
62                 ".previous\n"
63                 : "=&r" (temp), "=m" (*p)
64                 : "m" (*p), "r" (v)
65                 : "memory");
66 #endif
67 }
68
69 static __inline void atomic_clear_32(volatile u_int32_t *p, u_int32_t v)
70 {
71         u_int32_t temp;
72
73 #ifdef __GNUC__
74         __asm __volatile (
75                 "1:\tldl_l %0, %1\n\t"          /* load old value */
76                 "bic %0, %2, %0\n\t"            /* calculate new value */
77                 "stl_c %0, %1\n\t"              /* attempt to store */
78                 "beq %0, 2f\n\t"                /* spin if failed */
79                 "mb\n\t"                        /* drain to memory */
80                 ".section .text3,\"ax\"\n"      /* improve branch prediction */
81                 "2:\tbr 1b\n"                   /* try again */
82                 ".previous\n"
83                 : "=&r" (temp), "+m" (*p)
84                 : "r" (v)
85                 : "memory");
86 #endif
87 }
88
89 static __inline void atomic_add_32(volatile u_int32_t *p, u_int32_t v)
90 {
91         u_int32_t temp;
92
93 #ifdef __GNUC__
94         __asm __volatile (
95                 "1:\tldl_l %0, %1\n\t"          /* load old value */
96                 "addl %0, %2, %0\n\t"           /* calculate new value */
97                 "stl_c %0, %1\n\t"              /* attempt to store */
98                 "beq %0, 2f\n\t"                /* spin if failed */
99                 "mb\n\t"                        /* drain to memory */
100                 ".section .text3,\"ax\"\n"      /* improve branch prediction */
101                 "2:\tbr 1b\n"                   /* try again */
102                 ".previous\n"
103                 : "=&r" (temp), "+m" (*p)
104                 : "r" (v)
105                 : "memory");
106 #endif
107 }
108
109 static __inline void atomic_subtract_32(volatile u_int32_t *p, u_int32_t v)
110 {
111         u_int32_t temp;
112
113 #ifdef __GNUC__
114         __asm __volatile (
115                 "1:\tldl_l %0, %1\n\t"          /* load old value */
116                 "subl %0, %2, %0\n\t"           /* calculate new value */
117                 "stl_c %0, %1\n\t"              /* attempt to store */
118                 "beq %0, 2f\n\t"                /* spin if failed */
119                 "mb\n\t"                        /* drain to memory */
120                 ".section .text3,\"ax\"\n"      /* improve branch prediction */
121                 "2:\tbr 1b\n"                   /* try again */
122                 ".previous\n"
123                 : "=&r" (temp), "+m" (*p)
124                 : "r" (v)
125                 : "memory");
126 #endif
127 }
128
129 static __inline u_int32_t atomic_readandclear_32(volatile u_int32_t *addr)
130 {
131         u_int32_t result,temp;
132
133 #ifdef __GNUC__
134         __asm __volatile (
135                 "wmb\n"                 /* ensure pending writes have drained */
136                 "1:\tldl_l %0,%2\n\t"   /* load current value, asserting lock */
137                 "ldiq %1,0\n\t"         /* value to store */
138                 "stl_c %1,%2\n\t"       /* attempt to store */
139                 "beq %1,2f\n\t"         /* if the store failed, spin */
140                 "br 3f\n"               /* it worked, exit */
141                 "2:\tbr 1b\n"           /* *addr not updated, loop */
142                 "3:\tmb\n"              /* it worked */
143                 : "=&r"(result), "=&r"(temp), "+m" (*addr)
144                 :
145                 : "memory");
146 #endif
147
148         return result;
149 }
150
151 static __inline void atomic_set_64(volatile u_int64_t *p, u_int64_t v)
152 {
153         u_int64_t temp;
154
155 #ifdef __GNUC__
156         __asm __volatile (
157                 "1:\tldq_l %0, %1\n\t"          /* load old value */
158                 "bis %0, %2, %0\n\t"            /* calculate new value */
159                 "stq_c %0, %1\n\t"              /* attempt to store */
160                 "beq %0, 2f\n\t"                /* spin if failed */
161                 "mb\n\t"                        /* drain to memory */
162                 ".section .text3,\"ax\"\n"      /* improve branch prediction */
163                 "2:\tbr 1b\n"                   /* try again */
164                 ".previous\n"
165                 : "=&r" (temp), "+m" (*p)
166                 : "r" (v)
167                 : "memory");
168 #endif
169 }
170
171 static __inline void atomic_clear_64(volatile u_int64_t *p, u_int64_t v)
172 {
173         u_int64_t temp;
174
175 #ifdef __GNUC__
176         __asm __volatile (
177                 "1:\tldq_l %0, %1\n\t"          /* load old value */
178                 "bic %0, %2, %0\n\t"            /* calculate new value */
179                 "stq_c %0, %1\n\t"              /* attempt to store */
180                 "beq %0, 2f\n\t"                /* spin if failed */
181                 "mb\n\t"                        /* drain to memory */
182                 ".section .text3,\"ax\"\n"      /* improve branch prediction */
183                 "2:\tbr 1b\n"                   /* try again */
184                 ".previous\n"
185                 : "=&r" (temp), "+m" (*p)
186                 : "r" (v)
187                 : "memory");
188 #endif
189 }
190
191 static __inline void atomic_add_64(volatile u_int64_t *p, u_int64_t v)
192 {
193         u_int64_t temp;
194
195 #ifdef __GNUC__
196         __asm __volatile (
197                 "1:\tldq_l %0, %1\n\t"          /* load old value */
198                 "addq %0, %2, %0\n\t"           /* calculate new value */
199                 "stq_c %0, %1\n\t"              /* attempt to store */
200                 "beq %0, 2f\n\t"                /* spin if failed */
201                 "mb\n\t"                        /* drain to memory */
202                 ".section .text3,\"ax\"\n"      /* improve branch prediction */
203                 "2:\tbr 1b\n"                   /* try again */
204                 ".previous\n"
205                 : "=&r" (temp), "+m" (*p)
206                 : "r" (v)
207                 : "memory");
208 #endif
209 }
210
211 static __inline void atomic_subtract_64(volatile u_int64_t *p, u_int64_t v)
212 {
213         u_int64_t temp;
214
215 #ifdef __GNUC__
216         __asm __volatile (
217                 "1:\tldq_l %0, %1\n\t"          /* load old value */
218                 "subq %0, %2, %0\n\t"           /* calculate new value */
219                 "stq_c %0, %1\n\t"              /* attempt to store */
220                 "beq %0, 2f\n\t"                /* spin if failed */
221                 "mb\n\t"                        /* drain to memory */
222                 ".section .text3,\"ax\"\n"      /* improve branch prediction */
223                 "2:\tbr 1b\n"                   /* try again */
224                 ".previous\n"
225                 : "=&r" (temp), "+m" (*p)
226                 : "r" (v)
227                 : "memory");
228 #endif
229 }
230
231 static __inline u_int64_t atomic_readandclear_64(volatile u_int64_t *addr)
232 {
233         u_int64_t result,temp;
234
235 #ifdef __GNUC__
236         __asm __volatile (
237                 "wmb\n"                 /* ensure pending writes have drained */
238                 "1:\tldq_l %0,%2\n\t"   /* load current value, asserting lock */
239                 "ldiq %1,0\n\t"         /* value to store */
240                 "stq_c %1,%2\n\t"       /* attempt to store */
241                 "beq %1,2f\n\t"         /* if the store failed, spin */
242                 "br 3f\n"               /* it worked, exit */
243                 "2:\tbr 1b\n"           /* *addr not updated, loop */
244                 "3:\tmb\n"              /* it worked */
245                 : "=&r"(result), "=&r"(temp), "+m" (*addr)
246                 :
247                 : "memory");
248 #endif
249
250         return result;
251 }
252
253 #define atomic_set_char         atomic_set_8
254 #define atomic_clear_char       atomic_clear_8
255 #define atomic_add_char         atomic_add_8
256 #define atomic_subtract_char    atomic_subtract_8
257
258 #define atomic_set_short        atomic_set_16
259 #define atomic_clear_short      atomic_clear_16
260 #define atomic_add_short        atomic_add_16
261 #define atomic_subtract_short   atomic_subtract_16
262
263 #define atomic_set_int          atomic_set_32
264 #define atomic_clear_int        atomic_clear_32
265 #define atomic_add_int          atomic_add_32
266 #define atomic_subtract_int     atomic_subtract_32
267 #define atomic_readandclear_int atomic_readandclear_32
268
269 #define atomic_set_long         atomic_set_64
270 #define atomic_clear_long       atomic_clear_64
271 #define atomic_add_long         atomic_add_64
272 #define atomic_subtract_long    atomic_subtract_64
273 #define atomic_readandclear_long        atomic_readandclear_64
274
275 #define ATOMIC_ACQ_REL(NAME, WIDTH, TYPE)                               \
276 static __inline void                                                    \
277 atomic_##NAME##_acq_##WIDTH(volatile u_int##WIDTH##_t *p, u_int##WIDTH##_t v)\
278 {                                                                       \
279         atomic_##NAME##_##WIDTH(p, v);                                  \
280         /* alpha_mb(); */                                               \
281 }                                                                       \
282                                                                         \
283 static __inline void                                                    \
284 atomic_##NAME##_rel_##WIDTH(volatile u_int##WIDTH##_t *p, u_int##WIDTH##_t v)\
285 {                                                                       \
286         alpha_mb();                                                     \
287         atomic_##NAME##_##WIDTH(p, v);                                  \
288 }                                                                       \
289                                                                         \
290 static __inline void                                                    \
291 atomic_##NAME##_acq_##TYPE(volatile u_int##WIDTH##_t *p, u_int##WIDTH##_t v)\
292 {                                                                       \
293         atomic_##NAME##_##WIDTH(p, v);                                  \
294         /* alpha_mb(); */                                               \
295 }                                                                       \
296                                                                         \
297 static __inline void                                                    \
298 atomic_##NAME##_rel_##TYPE(volatile u_int##WIDTH##_t *p, u_int##WIDTH##_t v)\
299 {                                                                       \
300         alpha_mb();                                                     \
301         atomic_##NAME##_##WIDTH(p, v);                                  \
302 }
303
304 ATOMIC_ACQ_REL(set, 8, char)
305 ATOMIC_ACQ_REL(clear, 8, char)
306 ATOMIC_ACQ_REL(add, 8, char)
307 ATOMIC_ACQ_REL(subtract, 8, char)
308 ATOMIC_ACQ_REL(set, 16, short)
309 ATOMIC_ACQ_REL(clear, 16, short)
310 ATOMIC_ACQ_REL(add, 16, short)
311 ATOMIC_ACQ_REL(subtract, 16, short)
312 ATOMIC_ACQ_REL(set, 32, int)
313 ATOMIC_ACQ_REL(clear, 32, int)
314 ATOMIC_ACQ_REL(add, 32, int)
315 ATOMIC_ACQ_REL(subtract, 32, int)
316 ATOMIC_ACQ_REL(set, 64, long)
317 ATOMIC_ACQ_REL(clear, 64, long)
318 ATOMIC_ACQ_REL(add, 64, long)
319 ATOMIC_ACQ_REL(subtract, 64, long)
320
321 #undef ATOMIC_ACQ_REL
322
323 /*
324  * We assume that a = b will do atomic loads and stores.
325  */
326 #define ATOMIC_STORE_LOAD(TYPE, WIDTH)                  \
327 static __inline u_##TYPE                                \
328 atomic_load_acq_##WIDTH(volatile u_##TYPE *p)           \
329 {                                                       \
330         u_##TYPE v;                                     \
331                                                         \
332         v = *p;                                         \
333         alpha_mb();                                     \
334         return (v);                                     \
335 }                                                       \
336                                                         \
337 static __inline void                                    \
338 atomic_store_rel_##WIDTH(volatile u_##TYPE *p, u_##TYPE v)\
339 {                                                       \
340         alpha_mb();                                     \
341         *p = v;                                         \
342 }                                                       \
343 static __inline u_##TYPE                                \
344 atomic_load_acq_##TYPE(volatile u_##TYPE *p)            \
345 {                                                       \
346         u_##TYPE v;                                     \
347                                                         \
348         v = *p;                                         \
349         alpha_mb();                                     \
350         return (v);                                     \
351 }                                                       \
352                                                         \
353 static __inline void                                    \
354 atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
355 {                                                       \
356         alpha_mb();                                     \
357         *p = v;                                         \
358 }
359
360 ATOMIC_STORE_LOAD(char,         8)
361 ATOMIC_STORE_LOAD(short,        16)
362 ATOMIC_STORE_LOAD(int,          32)
363 ATOMIC_STORE_LOAD(long,         64)
364
365 #undef ATOMIC_STORE_LOAD
366
367 /*
368  * Atomically compare the value stored at *p with cmpval and if the
369  * two values are equal, update the value of *p with newval. Returns
370  * zero if the compare failed, nonzero otherwise.
371  */
372 static __inline u_int32_t
373 atomic_cmpset_32(volatile u_int32_t* p, u_int32_t cmpval, u_int32_t newval)
374 {
375         u_int32_t ret;
376
377 #ifdef __GNUC__
378         __asm __volatile (
379                 "1:\tldl_l %0, %1\n\t"          /* load old value */
380                 "cmpeq %0, %2, %0\n\t"          /* compare */
381                 "beq %0, 2f\n\t"                /* exit if not equal */
382                 "mov %3, %0\n\t"                /* value to store */
383                 "stl_c %0, %1\n\t"              /* attempt to store */
384                 "beq %0, 3f\n\t"                /* if it failed, spin */
385                 "mb\n\t"                        /* drain to memory */
386                 "2:\n"                          /* done */
387                 ".section .text3,\"ax\"\n"      /* improve branch prediction */
388                 "3:\tbr 1b\n"                   /* try again */
389                 ".previous\n"
390                 : "=&r" (ret), "+m" (*p)
391                 : "r" ((long)(int)cmpval), "r" (newval)
392                 : "memory");
393 #endif
394
395         return ret;
396 }
397
398 /*
399  * Atomically compare the value stored at *p with cmpval and if the
400  * two values are equal, update the value of *p with newval. Returns
401  * zero if the compare failed, nonzero otherwise.
402  */
403 static __inline u_int64_t
404 atomic_cmpset_64(volatile u_int64_t* p, u_int64_t cmpval, u_int64_t newval)
405 {
406         u_int64_t ret;
407
408 #ifdef __GNUC__
409         __asm __volatile (
410                 "1:\tldq_l %0, %1\n\t"          /* load old value */
411                 "cmpeq %0, %2, %0\n\t"          /* compare */
412                 "beq %0, 2f\n\t"                /* exit if not equal */
413                 "mov %3, %0\n\t"                /* value to store */
414                 "stq_c %0, %1\n\t"              /* attempt to store */
415                 "beq %0, 3f\n\t"                /* if it failed, spin */
416                 "mb\n\t"                        /* drain to memory */
417                 "2:\n"                          /* done */
418                 ".section .text3,\"ax\"\n"      /* improve branch prediction */
419                 "3:\tbr 1b\n"                   /* try again */
420                 ".previous\n"
421                 : "=&r" (ret), "+m" (*p)
422                 : "r" (cmpval), "r" (newval)
423                 : "memory");
424 #endif
425
426         return ret;
427 }
428
429 #define atomic_cmpset_int       atomic_cmpset_32
430 #define atomic_cmpset_long      atomic_cmpset_64
431
432 static __inline int
433 atomic_cmpset_ptr(volatile void *dst, void *exp, void *src)
434 {
435
436         return (atomic_cmpset_long((volatile u_long *)dst, (u_long)exp,
437             (u_long)src));
438 }
439
440 static __inline u_int32_t
441 atomic_cmpset_acq_32(volatile u_int32_t *p, u_int32_t cmpval, u_int32_t newval)
442 {
443         int retval;
444
445         retval = atomic_cmpset_32(p, cmpval, newval);
446         alpha_mb();
447         return (retval);
448 }
449
450 static __inline u_int32_t
451 atomic_cmpset_rel_32(volatile u_int32_t *p, u_int32_t cmpval, u_int32_t newval)
452 {
453         alpha_mb();
454         return (atomic_cmpset_32(p, cmpval, newval));
455 }
456
457 static __inline u_int64_t
458 atomic_cmpset_acq_64(volatile u_int64_t *p, u_int64_t cmpval, u_int64_t newval)
459 {
460         int retval;
461
462         retval = atomic_cmpset_64(p, cmpval, newval);
463         alpha_mb();
464         return (retval);
465 }
466
467 static __inline u_int64_t
468 atomic_cmpset_rel_64(volatile u_int64_t *p, u_int64_t cmpval, u_int64_t newval)
469 {
470         alpha_mb();
471         return (atomic_cmpset_64(p, cmpval, newval));
472 }
473
474 #define atomic_cmpset_acq_int   atomic_cmpset_acq_32
475 #define atomic_cmpset_rel_int   atomic_cmpset_rel_32
476 #define atomic_cmpset_acq_long  atomic_cmpset_acq_64
477 #define atomic_cmpset_rel_long  atomic_cmpset_rel_64
478
479 static __inline int
480 atomic_cmpset_acq_ptr(volatile void *dst, void *exp, void *src)
481 {
482
483         return (atomic_cmpset_acq_long((volatile u_long *)dst, (u_long)exp,
484             (u_long)src));
485 }
486
487 static __inline int
488 atomic_cmpset_rel_ptr(volatile void *dst, void *exp, void *src)
489 {
490
491         return (atomic_cmpset_rel_long((volatile u_long *)dst, (u_long)exp,
492             (u_long)src));
493 }
494
495 static __inline void *
496 atomic_load_acq_ptr(volatile void *p)
497 {
498         return (void *)atomic_load_acq_long((volatile u_long *)p);
499 }
500
501 static __inline void
502 atomic_store_rel_ptr(volatile void *p, void *v)
503 {
504         atomic_store_rel_long((volatile u_long *)p, (u_long)v);
505 }
506
507 #define ATOMIC_PTR(NAME)                                \
508 static __inline void                                    \
509 atomic_##NAME##_ptr(volatile void *p, uintptr_t v)      \
510 {                                                       \
511         atomic_##NAME##_long((volatile u_long *)p, v);  \
512 }                                                       \
513                                                         \
514 static __inline void                                    \
515 atomic_##NAME##_acq_ptr(volatile void *p, uintptr_t v)  \
516 {                                                       \
517         atomic_##NAME##_acq_long((volatile u_long *)p, v);\
518 }                                                       \
519                                                         \
520 static __inline void                                    \
521 atomic_##NAME##_rel_ptr(volatile void *p, uintptr_t v)  \
522 {                                                       \
523         atomic_##NAME##_rel_long((volatile u_long *)p, v);\
524 }
525
526 ATOMIC_PTR(set)
527 ATOMIC_PTR(clear)
528 ATOMIC_PTR(add)
529 ATOMIC_PTR(subtract)
530
531 #undef ATOMIC_PTR
532
533 #endif /* ! _MACHINE_ATOMIC_H_ */