1 /* ===-------- intrin.h ---------------------------------------------------===
3 * Permission is hereby granted, free of charge, to any person obtaining a copy
4 * of this software and associated documentation files (the "Software"), to deal
5 * in the Software without restriction, including without limitation the rights
6 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7 * copies of the Software, and to permit persons to whom the Software is
8 * furnished to do so, subject to the following conditions:
10 * The above copyright notice and this permission notice shall be included in
11 * all copies or substantial portions of the Software.
13 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
21 *===-----------------------------------------------------------------------===
24 /* Only include this if we're compiling for the windows platform. */
26 #include_next <intrin.h>
32 /* First include the standard intrinsics. */
33 #if defined(__i386__) || defined(__x86_64__)
34 #include <x86intrin.h>
41 /* For the definition of jmp_buf. */
46 /* Define the default attributes for the functions in this file. */
47 #define __DEFAULT_FN_ATTRS __attribute__((__always_inline__, __nodebug__))
54 /* And the random ones that aren't in those files. */
55 __m64 _m_from_float(float);
56 float _m_to_float(__m64);
59 /* Other assorted instruction intrinsics. */
60 void __addfsbyte(unsigned long, unsigned char);
61 void __addfsdword(unsigned long, unsigned long);
62 void __addfsword(unsigned long, unsigned short);
63 void __code_seg(const char *);
65 void __cpuid(int[4], int);
67 void __cpuidex(int[4], int, int);
69 __int64 __emul(int, int);
71 unsigned __int64 __emulu(unsigned int, unsigned int);
72 void __cdecl __fastfail(unsigned int);
73 unsigned int __getcallerseflags(void);
76 unsigned char __inbyte(unsigned short);
77 void __inbytestring(unsigned short, unsigned char *, unsigned long);
78 void __incfsbyte(unsigned long);
79 void __incfsdword(unsigned long);
80 void __incfsword(unsigned long);
81 unsigned long __indword(unsigned short);
82 void __indwordstring(unsigned short, unsigned long *, unsigned long);
84 void __invlpg(void *);
85 unsigned short __inword(unsigned short);
86 void __inwordstring(unsigned short, unsigned short *, unsigned long);
88 unsigned __int64 __ll_lshift(unsigned __int64, int);
89 __int64 __ll_rshift(__int64, int);
90 void __llwpcb(void *);
91 unsigned char __lwpins32(unsigned int, unsigned int, unsigned int);
92 void __lwpval32(unsigned int, unsigned int, unsigned int);
93 unsigned int __lzcnt(unsigned int);
94 unsigned short __lzcnt16(unsigned short);
96 void __movsb(unsigned char *, unsigned char const *, size_t);
98 void __movsd(unsigned long *, unsigned long const *, size_t);
100 void __movsw(unsigned short *, unsigned short const *, size_t);
103 void __nvreg_restore_fence(void);
104 void __nvreg_save_fence(void);
105 void __outbyte(unsigned short, unsigned char);
106 void __outbytestring(unsigned short, unsigned char *, unsigned long);
107 void __outdword(unsigned short, unsigned long);
108 void __outdwordstring(unsigned short, unsigned long *, unsigned long);
109 void __outword(unsigned short, unsigned short);
110 void __outwordstring(unsigned short, unsigned short *, unsigned long);
111 unsigned long __readcr0(void);
112 unsigned long __readcr2(void);
114 unsigned long __readcr3(void);
115 unsigned long __readcr4(void);
116 unsigned long __readcr8(void);
117 unsigned int __readdr(unsigned int);
120 unsigned char __readfsbyte(unsigned long);
122 unsigned __int64 __readfsqword(unsigned long);
124 unsigned short __readfsword(unsigned long);
127 unsigned __int64 __readmsr(unsigned long);
128 unsigned __int64 __readpmc(unsigned long);
129 unsigned long __segmentlimit(unsigned long);
131 void *__slwpcb(void);
133 void __stosb(unsigned char *, unsigned char, size_t);
135 void __stosd(unsigned long *, unsigned long, size_t);
137 void __stosw(unsigned short *, unsigned short, size_t);
138 void __svm_clgi(void);
139 void __svm_invlpga(void *, int);
140 void __svm_skinit(int);
141 void __svm_stgi(void);
142 void __svm_vmload(size_t);
143 void __svm_vmrun(size_t);
144 void __svm_vmsave(size_t);
146 unsigned __int64 __ull_rshift(unsigned __int64, int);
147 void __vmx_off(void);
148 void __vmx_vmptrst(unsigned __int64 *);
150 void __writecr0(unsigned int);
152 void __writecr3(unsigned int);
153 void __writecr4(unsigned int);
154 void __writecr8(unsigned int);
155 void __writedr(unsigned int, unsigned int);
156 void __writefsbyte(unsigned long, unsigned char);
157 void __writefsdword(unsigned long, unsigned long);
158 void __writefsqword(unsigned long, unsigned __int64);
159 void __writefsword(unsigned long, unsigned short);
160 void __writemsr(unsigned long, unsigned __int64);
162 void *_AddressOfReturnAddress(void);
164 unsigned char _BitScanForward(unsigned long *_Index, unsigned long _Mask);
166 unsigned char _BitScanReverse(unsigned long *_Index, unsigned long _Mask);
168 unsigned char _bittest(long const *, long);
170 unsigned char _bittestandcomplement(long *, long);
172 unsigned char _bittestandreset(long *, long);
174 unsigned char _bittestandset(long *, long);
175 void __cdecl _disable(void);
176 void __cdecl _enable(void);
177 long _InterlockedAddLargeStatistic(__int64 volatile *_Addend, long _Value);
178 unsigned char _interlockedbittestandreset(long volatile *, long);
180 unsigned char _interlockedbittestandset(long volatile *, long);
181 long _InterlockedCompareExchange_HLEAcquire(long volatile *, long, long);
182 long _InterlockedCompareExchange_HLERelease(long volatile *, long, long);
183 __int64 _InterlockedcompareExchange64_HLEAcquire(__int64 volatile *, __int64,
185 __int64 _InterlockedCompareExchange64_HLERelease(__int64 volatile *, __int64,
187 void *_InterlockedCompareExchangePointer_HLEAcquire(void *volatile *, void *,
189 void *_InterlockedCompareExchangePointer_HLERelease(void *volatile *, void *,
191 long _InterlockedExchangeAdd_HLEAcquire(long volatile *, long);
192 long _InterlockedExchangeAdd_HLERelease(long volatile *, long);
193 __int64 _InterlockedExchangeAdd64_HLEAcquire(__int64 volatile *, __int64);
194 __int64 _InterlockedExchangeAdd64_HLERelease(__int64 volatile *, __int64);
195 void __cdecl _invpcid(unsigned int, void *);
196 static __inline__ void
197 __attribute__((__deprecated__("use other intrinsics or C++11 atomics instead")))
199 static __inline__ void
200 __attribute__((__deprecated__("use other intrinsics or C++11 atomics instead")))
201 _ReadWriteBarrier(void);
202 unsigned int _rorx_u32(unsigned int, const unsigned int);
203 int _sarx_i32(int, unsigned int);
205 int __cdecl _setjmp(jmp_buf);
207 unsigned int _shlx_u32(unsigned int, unsigned int);
208 unsigned int _shrx_u32(unsigned int, unsigned int);
209 void _Store_HLERelease(long volatile *, long);
210 void _Store64_HLERelease(__int64 volatile *, __int64);
211 void _StorePointer_HLERelease(void *volatile *, void *);
212 static __inline__ void
213 __attribute__((__deprecated__("use other intrinsics or C++11 atomics instead")))
215 unsigned __int32 xbegin(void);
218 #define _XCR_XFEATURE_ENABLED_MASK 0
219 unsigned __int64 __cdecl _xgetbv(unsigned int);
220 void __cdecl _xsetbv(unsigned int, unsigned __int64);
222 /* These additional intrinsics are turned on in x64/amd64/x86_64 mode. */
224 void __addgsbyte(unsigned long, unsigned char);
225 void __addgsdword(unsigned long, unsigned long);
226 void __addgsqword(unsigned long, unsigned __int64);
227 void __addgsword(unsigned long, unsigned short);
229 void __faststorefence(void);
230 void __incgsbyte(unsigned long);
231 void __incgsdword(unsigned long);
232 void __incgsqword(unsigned long);
233 void __incgsword(unsigned long);
234 unsigned char __lwpins64(unsigned __int64, unsigned int, unsigned int);
235 void __lwpval64(unsigned __int64, unsigned int, unsigned int);
236 unsigned __int64 __lzcnt64(unsigned __int64);
238 void __movsq(unsigned long long *, unsigned long long const *, size_t);
240 unsigned char __readgsbyte(unsigned long);
242 unsigned long __readgsdword(unsigned long);
244 unsigned __int64 __readgsqword(unsigned long);
245 unsigned short __readgsword(unsigned long);
246 unsigned __int64 __shiftleft128(unsigned __int64 _LowPart,
247 unsigned __int64 _HighPart,
248 unsigned char _Shift);
249 unsigned __int64 __shiftright128(unsigned __int64 _LowPart,
250 unsigned __int64 _HighPart,
251 unsigned char _Shift);
253 void __stosq(unsigned __int64 *, unsigned __int64, size_t);
254 unsigned char __vmx_on(unsigned __int64 *);
255 unsigned char __vmx_vmclear(unsigned __int64 *);
256 unsigned char __vmx_vmlaunch(void);
257 unsigned char __vmx_vmptrld(unsigned __int64 *);
258 unsigned char __vmx_vmread(size_t, size_t *);
259 unsigned char __vmx_vmresume(void);
260 unsigned char __vmx_vmwrite(size_t, size_t);
261 void __writegsbyte(unsigned long, unsigned char);
262 void __writegsdword(unsigned long, unsigned long);
263 void __writegsqword(unsigned long, unsigned __int64);
264 void __writegsword(unsigned long, unsigned short);
266 unsigned char _BitScanForward64(unsigned long *_Index, unsigned __int64 _Mask);
268 unsigned char _BitScanReverse64(unsigned long *_Index, unsigned __int64 _Mask);
270 unsigned char _bittest64(__int64 const *, __int64);
272 unsigned char _bittestandcomplement64(__int64 *, __int64);
274 unsigned char _bittestandreset64(__int64 *, __int64);
276 unsigned char _bittestandset64(__int64 *, __int64);
277 long _InterlockedAnd_np(long volatile *_Value, long _Mask);
278 short _InterlockedAnd16_np(short volatile *_Value, short _Mask);
279 __int64 _InterlockedAnd64_np(__int64 volatile *_Value, __int64 _Mask);
280 char _InterlockedAnd8_np(char volatile *_Value, char _Mask);
281 unsigned char _interlockedbittestandreset64(__int64 volatile *, __int64);
283 unsigned char _interlockedbittestandset64(__int64 volatile *, __int64);
284 long _InterlockedCompareExchange_np(long volatile *_Destination, long _Exchange,
286 unsigned char _InterlockedCompareExchange128(__int64 volatile *_Destination,
287 __int64 _ExchangeHigh,
288 __int64 _ExchangeLow,
289 __int64 *_CompareandResult);
290 unsigned char _InterlockedCompareExchange128_np(__int64 volatile *_Destination,
291 __int64 _ExchangeHigh,
292 __int64 _ExchangeLow,
293 __int64 *_ComparandResult);
294 short _InterlockedCompareExchange16_np(short volatile *_Destination,
295 short _Exchange, short _Comparand);
296 __int64 _InterlockedCompareExchange64_HLEAcquire(__int64 volatile *, __int64,
298 __int64 _InterlockedCompareExchange64_HLERelease(__int64 volatile *, __int64,
300 __int64 _InterlockedCompareExchange64_np(__int64 volatile *_Destination,
301 __int64 _Exchange, __int64 _Comparand);
302 void *_InterlockedCompareExchangePointer_np(void *volatile *_Destination,
303 void *_Exchange, void *_Comparand);
304 long _InterlockedOr_np(long volatile *_Value, long _Mask);
305 short _InterlockedOr16_np(short volatile *_Value, short _Mask);
306 __int64 _InterlockedOr64_np(__int64 volatile *_Value, __int64 _Mask);
307 char _InterlockedOr8_np(char volatile *_Value, char _Mask);
308 long _InterlockedXor_np(long volatile *_Value, long _Mask);
309 short _InterlockedXor16_np(short volatile *_Value, short _Mask);
310 __int64 _InterlockedXor64_np(__int64 volatile *_Value, __int64 _Mask);
311 char _InterlockedXor8_np(char volatile *_Value, char _Mask);
312 unsigned __int64 _rorx_u64(unsigned __int64, const unsigned int);
313 __int64 _sarx_i64(__int64, unsigned int);
314 unsigned __int64 _shlx_u64(unsigned __int64, unsigned int);
315 unsigned __int64 _shrx_u64(unsigned __int64, unsigned int);
317 __int64 __mulh(__int64, __int64);
319 unsigned __int64 __umulh(unsigned __int64, unsigned __int64);
321 __int64 _mul128(__int64, __int64, __int64*);
323 unsigned __int64 _umul128(unsigned __int64,
327 #endif /* __x86_64__ */
329 #if defined(__x86_64__) || defined(__arm__)
332 __int64 _InterlockedDecrement64(__int64 volatile *_Addend);
334 __int64 _InterlockedExchange64(__int64 volatile *_Target, __int64 _Value);
336 __int64 _InterlockedExchangeAdd64(__int64 volatile *_Addend, __int64 _Value);
338 __int64 _InterlockedExchangeSub64(__int64 volatile *_Subend, __int64 _Value);
340 __int64 _InterlockedIncrement64(__int64 volatile *_Addend);
342 __int64 _InterlockedOr64(__int64 volatile *_Value, __int64 _Mask);
344 __int64 _InterlockedXor64(__int64 volatile *_Value, __int64 _Mask);
346 __int64 _InterlockedAnd64(__int64 volatile *_Value, __int64 _Mask);
350 /*----------------------------------------------------------------------------*\
351 |* Bit Counting and Testing
352 \*----------------------------------------------------------------------------*/
353 static __inline__ unsigned char __DEFAULT_FN_ATTRS
354 _bittest(long const *_BitBase, long _BitPos) {
355 return (*_BitBase >> _BitPos) & 1;
357 static __inline__ unsigned char __DEFAULT_FN_ATTRS
358 _bittestandcomplement(long *_BitBase, long _BitPos) {
359 unsigned char _Res = (*_BitBase >> _BitPos) & 1;
360 *_BitBase = *_BitBase ^ (1 << _BitPos);
363 static __inline__ unsigned char __DEFAULT_FN_ATTRS
364 _bittestandreset(long *_BitBase, long _BitPos) {
365 unsigned char _Res = (*_BitBase >> _BitPos) & 1;
366 *_BitBase = *_BitBase & ~(1 << _BitPos);
369 static __inline__ unsigned char __DEFAULT_FN_ATTRS
370 _bittestandset(long *_BitBase, long _BitPos) {
371 unsigned char _Res = (*_BitBase >> _BitPos) & 1;
372 *_BitBase = *_BitBase | (1 << _BitPos);
375 static __inline__ unsigned char __DEFAULT_FN_ATTRS
376 _interlockedbittestandset(long volatile *_BitBase, long _BitPos) {
377 long _PrevVal = __atomic_fetch_or(_BitBase, 1l << _BitPos, __ATOMIC_SEQ_CST);
378 return (_PrevVal >> _BitPos) & 1;
380 #if defined(__arm__) || defined(__aarch64__)
381 static __inline__ unsigned char __DEFAULT_FN_ATTRS
382 _interlockedbittestandset_acq(long volatile *_BitBase, long _BitPos) {
383 long _PrevVal = __atomic_fetch_or(_BitBase, 1l << _BitPos, __ATOMIC_ACQUIRE);
384 return (_PrevVal >> _BitPos) & 1;
386 static __inline__ unsigned char __DEFAULT_FN_ATTRS
387 _interlockedbittestandset_nf(long volatile *_BitBase, long _BitPos) {
388 long _PrevVal = __atomic_fetch_or(_BitBase, 1l << _BitPos, __ATOMIC_RELAXED);
389 return (_PrevVal >> _BitPos) & 1;
391 static __inline__ unsigned char __DEFAULT_FN_ATTRS
392 _interlockedbittestandset_rel(long volatile *_BitBase, long _BitPos) {
393 long _PrevVal = __atomic_fetch_or(_BitBase, 1l << _BitPos, __ATOMIC_RELEASE);
394 return (_PrevVal >> _BitPos) & 1;
398 static __inline__ unsigned char __DEFAULT_FN_ATTRS
399 _bittest64(__int64 const *_BitBase, __int64 _BitPos) {
400 return (*_BitBase >> _BitPos) & 1;
402 static __inline__ unsigned char __DEFAULT_FN_ATTRS
403 _bittestandcomplement64(__int64 *_BitBase, __int64 _BitPos) {
404 unsigned char _Res = (*_BitBase >> _BitPos) & 1;
405 *_BitBase = *_BitBase ^ (1ll << _BitPos);
408 static __inline__ unsigned char __DEFAULT_FN_ATTRS
409 _bittestandreset64(__int64 *_BitBase, __int64 _BitPos) {
410 unsigned char _Res = (*_BitBase >> _BitPos) & 1;
411 *_BitBase = *_BitBase & ~(1ll << _BitPos);
414 static __inline__ unsigned char __DEFAULT_FN_ATTRS
415 _bittestandset64(__int64 *_BitBase, __int64 _BitPos) {
416 unsigned char _Res = (*_BitBase >> _BitPos) & 1;
417 *_BitBase = *_BitBase | (1ll << _BitPos);
420 static __inline__ unsigned char __DEFAULT_FN_ATTRS
421 _interlockedbittestandset64(__int64 volatile *_BitBase, __int64 _BitPos) {
423 __atomic_fetch_or(_BitBase, 1ll << _BitPos, __ATOMIC_SEQ_CST);
424 return (_PrevVal >> _BitPos) & 1;
427 /*----------------------------------------------------------------------------*\
428 |* Interlocked Exchange Add
429 \*----------------------------------------------------------------------------*/
430 #if defined(__arm__) || defined(__aarch64__)
431 static __inline__ char __DEFAULT_FN_ATTRS
432 _InterlockedExchangeAdd8_acq(char volatile *_Addend, char _Value) {
433 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_ACQUIRE);
435 static __inline__ char __DEFAULT_FN_ATTRS
436 _InterlockedExchangeAdd8_nf(char volatile *_Addend, char _Value) {
437 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
439 static __inline__ char __DEFAULT_FN_ATTRS
440 _InterlockedExchangeAdd8_rel(char volatile *_Addend, char _Value) {
441 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
443 static __inline__ short __DEFAULT_FN_ATTRS
444 _InterlockedExchangeAdd16_acq(short volatile *_Addend, short _Value) {
445 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_ACQUIRE);
447 static __inline__ short __DEFAULT_FN_ATTRS
448 _InterlockedExchangeAdd16_nf(short volatile *_Addend, short _Value) {
449 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
451 static __inline__ short __DEFAULT_FN_ATTRS
452 _InterlockedExchangeAdd16_rel(short volatile *_Addend, short _Value) {
453 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELEASE);
455 static __inline__ long __DEFAULT_FN_ATTRS
456 _InterlockedExchangeAdd_acq(long volatile *_Addend, long _Value) {
457 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_ACQUIRE);
459 static __inline__ long __DEFAULT_FN_ATTRS
460 _InterlockedExchangeAdd_nf(long volatile *_Addend, long _Value) {
461 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
463 static __inline__ long __DEFAULT_FN_ATTRS
464 _InterlockedExchangeAdd_rel(long volatile *_Addend, long _Value) {
465 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELEASE);
467 static __inline__ __int64 __DEFAULT_FN_ATTRS
468 _InterlockedExchangeAdd64_acq(__int64 volatile *_Addend, __int64 _Value) {
469 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_ACQUIRE);
471 static __inline__ __int64 __DEFAULT_FN_ATTRS
472 _InterlockedExchangeAdd64_nf(__int64 volatile *_Addend, __int64 _Value) {
473 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
475 static __inline__ __int64 __DEFAULT_FN_ATTRS
476 _InterlockedExchangeAdd64_rel(__int64 volatile *_Addend, __int64 _Value) {
477 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELEASE);
480 /*----------------------------------------------------------------------------*\
481 |* Interlocked Increment
482 \*----------------------------------------------------------------------------*/
483 #if defined(__arm__) || defined(__aarch64__)
484 static __inline__ short __DEFAULT_FN_ATTRS
485 _InterlockedIncrement16_acq(short volatile *_Value) {
486 return __atomic_add_fetch(_Value, 1, __ATOMIC_ACQUIRE);
488 static __inline__ short __DEFAULT_FN_ATTRS
489 _InterlockedIncrement16_nf(short volatile *_Value) {
490 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELAXED);
492 static __inline__ short __DEFAULT_FN_ATTRS
493 _InterlockedIncrement16_rel(short volatile *_Value) {
494 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELEASE);
496 static __inline__ long __DEFAULT_FN_ATTRS
497 _InterlockedIncrement_acq(long volatile *_Value) {
498 return __atomic_add_fetch(_Value, 1, __ATOMIC_ACQUIRE);
500 static __inline__ long __DEFAULT_FN_ATTRS
501 _InterlockedIncrement_nf(long volatile *_Value) {
502 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELAXED);
504 static __inline__ long __DEFAULT_FN_ATTRS
505 _InterlockedIncrement_rel(long volatile *_Value) {
506 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELEASE);
508 static __inline__ __int64 __DEFAULT_FN_ATTRS
509 _InterlockedIncrement64_acq(__int64 volatile *_Value) {
510 return __atomic_add_fetch(_Value, 1, __ATOMIC_ACQUIRE);
512 static __inline__ __int64 __DEFAULT_FN_ATTRS
513 _InterlockedIncrement64_nf(__int64 volatile *_Value) {
514 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELAXED);
516 static __inline__ __int64 __DEFAULT_FN_ATTRS
517 _InterlockedIncrement64_rel(__int64 volatile *_Value) {
518 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELEASE);
521 /*----------------------------------------------------------------------------*\
522 |* Interlocked Decrement
523 \*----------------------------------------------------------------------------*/
524 #if defined(__arm__) || defined(__aarch64__)
525 static __inline__ short __DEFAULT_FN_ATTRS
526 _InterlockedDecrement16_acq(short volatile *_Value) {
527 return __atomic_sub_fetch(_Value, 1, __ATOMIC_ACQUIRE);
529 static __inline__ short __DEFAULT_FN_ATTRS
530 _InterlockedDecrement16_nf(short volatile *_Value) {
531 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELAXED);
533 static __inline__ short __DEFAULT_FN_ATTRS
534 _InterlockedDecrement16_rel(short volatile *_Value) {
535 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELEASE);
537 static __inline__ long __DEFAULT_FN_ATTRS
538 _InterlockedDecrement_acq(long volatile *_Value) {
539 return __atomic_sub_fetch(_Value, 1, __ATOMIC_ACQUIRE);
541 static __inline__ long __DEFAULT_FN_ATTRS
542 _InterlockedDecrement_nf(long volatile *_Value) {
543 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELAXED);
545 static __inline__ long __DEFAULT_FN_ATTRS
546 _InterlockedDecrement_rel(long volatile *_Value) {
547 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELEASE);
549 static __inline__ __int64 __DEFAULT_FN_ATTRS
550 _InterlockedDecrement64_acq(__int64 volatile *_Value) {
551 return __atomic_sub_fetch(_Value, 1, __ATOMIC_ACQUIRE);
553 static __inline__ __int64 __DEFAULT_FN_ATTRS
554 _InterlockedDecrement64_nf(__int64 volatile *_Value) {
555 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELAXED);
557 static __inline__ __int64 __DEFAULT_FN_ATTRS
558 _InterlockedDecrement64_rel(__int64 volatile *_Value) {
559 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELEASE);
562 /*----------------------------------------------------------------------------*\
564 \*----------------------------------------------------------------------------*/
565 #if defined(__arm__) || defined(__aarch64__)
566 static __inline__ char __DEFAULT_FN_ATTRS
567 _InterlockedAnd8_acq(char volatile *_Value, char _Mask) {
568 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_ACQUIRE);
570 static __inline__ char __DEFAULT_FN_ATTRS
571 _InterlockedAnd8_nf(char volatile *_Value, char _Mask) {
572 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELAXED);
574 static __inline__ char __DEFAULT_FN_ATTRS
575 _InterlockedAnd8_rel(char volatile *_Value, char _Mask) {
576 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELEASE);
578 static __inline__ short __DEFAULT_FN_ATTRS
579 _InterlockedAnd16_acq(short volatile *_Value, short _Mask) {
580 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_ACQUIRE);
582 static __inline__ short __DEFAULT_FN_ATTRS
583 _InterlockedAnd16_nf(short volatile *_Value, short _Mask) {
584 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELAXED);
586 static __inline__ short __DEFAULT_FN_ATTRS
587 _InterlockedAnd16_rel(short volatile *_Value, short _Mask) {
588 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELEASE);
590 static __inline__ long __DEFAULT_FN_ATTRS
591 _InterlockedAnd_acq(long volatile *_Value, long _Mask) {
592 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_ACQUIRE);
594 static __inline__ long __DEFAULT_FN_ATTRS
595 _InterlockedAnd_nf(long volatile *_Value, long _Mask) {
596 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELAXED);
598 static __inline__ long __DEFAULT_FN_ATTRS
599 _InterlockedAnd_rel(long volatile *_Value, long _Mask) {
600 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELEASE);
602 static __inline__ __int64 __DEFAULT_FN_ATTRS
603 _InterlockedAnd64_acq(__int64 volatile *_Value, __int64 _Mask) {
604 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_ACQUIRE);
606 static __inline__ __int64 __DEFAULT_FN_ATTRS
607 _InterlockedAnd64_nf(__int64 volatile *_Value, __int64 _Mask) {
608 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELAXED);
610 static __inline__ __int64 __DEFAULT_FN_ATTRS
611 _InterlockedAnd64_rel(__int64 volatile *_Value, __int64 _Mask) {
612 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELEASE);
615 /*----------------------------------------------------------------------------*\
617 \*----------------------------------------------------------------------------*/
618 #if defined(__arm__) || defined(__aarch64__)
619 static __inline__ char __DEFAULT_FN_ATTRS
620 _InterlockedOr8_acq(char volatile *_Value, char _Mask) {
621 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_ACQUIRE);
623 static __inline__ char __DEFAULT_FN_ATTRS
624 _InterlockedOr8_nf(char volatile *_Value, char _Mask) {
625 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELAXED);
627 static __inline__ char __DEFAULT_FN_ATTRS
628 _InterlockedOr8_rel(char volatile *_Value, char _Mask) {
629 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELEASE);
631 static __inline__ short __DEFAULT_FN_ATTRS
632 _InterlockedOr16_acq(short volatile *_Value, short _Mask) {
633 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_ACQUIRE);
635 static __inline__ short __DEFAULT_FN_ATTRS
636 _InterlockedOr16_nf(short volatile *_Value, short _Mask) {
637 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELAXED);
639 static __inline__ short __DEFAULT_FN_ATTRS
640 _InterlockedOr16_rel(short volatile *_Value, short _Mask) {
641 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELEASE);
643 static __inline__ long __DEFAULT_FN_ATTRS
644 _InterlockedOr_acq(long volatile *_Value, long _Mask) {
645 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_ACQUIRE);
647 static __inline__ long __DEFAULT_FN_ATTRS
648 _InterlockedOr_nf(long volatile *_Value, long _Mask) {
649 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELAXED);
651 static __inline__ long __DEFAULT_FN_ATTRS
652 _InterlockedOr_rel(long volatile *_Value, long _Mask) {
653 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELEASE);
655 static __inline__ __int64 __DEFAULT_FN_ATTRS
656 _InterlockedOr64_acq(__int64 volatile *_Value, __int64 _Mask) {
657 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_ACQUIRE);
659 static __inline__ __int64 __DEFAULT_FN_ATTRS
660 _InterlockedOr64_nf(__int64 volatile *_Value, __int64 _Mask) {
661 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELAXED);
663 static __inline__ __int64 __DEFAULT_FN_ATTRS
664 _InterlockedOr64_rel(__int64 volatile *_Value, __int64 _Mask) {
665 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELEASE);
668 /*----------------------------------------------------------------------------*\
670 \*----------------------------------------------------------------------------*/
671 #if defined(__arm__) || defined(__aarch64__)
672 static __inline__ char __DEFAULT_FN_ATTRS
673 _InterlockedXor8_acq(char volatile *_Value, char _Mask) {
674 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_ACQUIRE);
676 static __inline__ char __DEFAULT_FN_ATTRS
677 _InterlockedXor8_nf(char volatile *_Value, char _Mask) {
678 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELAXED);
680 static __inline__ char __DEFAULT_FN_ATTRS
681 _InterlockedXor8_rel(char volatile *_Value, char _Mask) {
682 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELEASE);
684 static __inline__ short __DEFAULT_FN_ATTRS
685 _InterlockedXor16_acq(short volatile *_Value, short _Mask) {
686 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_ACQUIRE);
688 static __inline__ short __DEFAULT_FN_ATTRS
689 _InterlockedXor16_nf(short volatile *_Value, short _Mask) {
690 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELAXED);
692 static __inline__ short __DEFAULT_FN_ATTRS
693 _InterlockedXor16_rel(short volatile *_Value, short _Mask) {
694 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELEASE);
696 static __inline__ long __DEFAULT_FN_ATTRS
697 _InterlockedXor_acq(long volatile *_Value, long _Mask) {
698 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_ACQUIRE);
700 static __inline__ long __DEFAULT_FN_ATTRS
701 _InterlockedXor_nf(long volatile *_Value, long _Mask) {
702 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELAXED);
704 static __inline__ long __DEFAULT_FN_ATTRS
705 _InterlockedXor_rel(long volatile *_Value, long _Mask) {
706 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELEASE);
708 static __inline__ __int64 __DEFAULT_FN_ATTRS
709 _InterlockedXor64_acq(__int64 volatile *_Value, __int64 _Mask) {
710 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_ACQUIRE);
712 static __inline__ __int64 __DEFAULT_FN_ATTRS
713 _InterlockedXor64_nf(__int64 volatile *_Value, __int64 _Mask) {
714 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELAXED);
716 static __inline__ __int64 __DEFAULT_FN_ATTRS
717 _InterlockedXor64_rel(__int64 volatile *_Value, __int64 _Mask) {
718 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELEASE);
721 /*----------------------------------------------------------------------------*\
722 |* Interlocked Exchange
723 \*----------------------------------------------------------------------------*/
724 #if defined(__arm__) || defined(__aarch64__)
725 static __inline__ char __DEFAULT_FN_ATTRS
726 _InterlockedExchange8_acq(char volatile *_Target, char _Value) {
727 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_ACQUIRE);
730 static __inline__ char __DEFAULT_FN_ATTRS
731 _InterlockedExchange8_nf(char volatile *_Target, char _Value) {
732 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELAXED);
735 static __inline__ char __DEFAULT_FN_ATTRS
736 _InterlockedExchange8_rel(char volatile *_Target, char _Value) {
737 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELEASE);
740 static __inline__ short __DEFAULT_FN_ATTRS
741 _InterlockedExchange16_acq(short volatile *_Target, short _Value) {
742 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_ACQUIRE);
745 static __inline__ short __DEFAULT_FN_ATTRS
746 _InterlockedExchange16_nf(short volatile *_Target, short _Value) {
747 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELAXED);
750 static __inline__ short __DEFAULT_FN_ATTRS
751 _InterlockedExchange16_rel(short volatile *_Target, short _Value) {
752 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELEASE);
755 static __inline__ long __DEFAULT_FN_ATTRS
756 _InterlockedExchange_acq(long volatile *_Target, long _Value) {
757 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_ACQUIRE);
760 static __inline__ long __DEFAULT_FN_ATTRS
761 _InterlockedExchange_nf(long volatile *_Target, long _Value) {
762 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELAXED);
765 static __inline__ long __DEFAULT_FN_ATTRS
766 _InterlockedExchange_rel(long volatile *_Target, long _Value) {
767 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELEASE);
770 static __inline__ __int64 __DEFAULT_FN_ATTRS
771 _InterlockedExchange64_acq(__int64 volatile *_Target, __int64 _Value) {
772 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_ACQUIRE);
775 static __inline__ __int64 __DEFAULT_FN_ATTRS
776 _InterlockedExchange64_nf(__int64 volatile *_Target, __int64 _Value) {
777 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELAXED);
780 static __inline__ __int64 __DEFAULT_FN_ATTRS
781 _InterlockedExchange64_rel(__int64 volatile *_Target, __int64 _Value) {
782 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELEASE);
786 /*----------------------------------------------------------------------------*\
787 |* Interlocked Compare Exchange
788 \*----------------------------------------------------------------------------*/
789 #if defined(__arm__) || defined(__aarch64__)
790 static __inline__ char __DEFAULT_FN_ATTRS
791 _InterlockedCompareExchange8_acq(char volatile *_Destination,
792 char _Exchange, char _Comparand) {
793 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
794 __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE);
797 static __inline__ char __DEFAULT_FN_ATTRS
798 _InterlockedCompareExchange8_nf(char volatile *_Destination,
799 char _Exchange, char _Comparand) {
800 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
801 __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
804 static __inline__ char __DEFAULT_FN_ATTRS
805 _InterlockedCompareExchange8_rel(char volatile *_Destination,
806 char _Exchange, char _Comparand) {
807 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
808 __ATOMIC_SEQ_CST, __ATOMIC_RELEASE);
811 static __inline__ short __DEFAULT_FN_ATTRS
812 _InterlockedCompareExchange16_acq(short volatile *_Destination,
813 short _Exchange, short _Comparand) {
814 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
815 __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE);
818 static __inline__ short __DEFAULT_FN_ATTRS
819 _InterlockedCompareExchange16_nf(short volatile *_Destination,
820 short _Exchange, short _Comparand) {
821 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
822 __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
825 static __inline__ short __DEFAULT_FN_ATTRS
826 _InterlockedCompareExchange16_rel(short volatile *_Destination,
827 short _Exchange, short _Comparand) {
828 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
829 __ATOMIC_SEQ_CST, __ATOMIC_RELEASE);
832 static __inline__ long __DEFAULT_FN_ATTRS
833 _InterlockedCompareExchange_acq(long volatile *_Destination,
834 long _Exchange, long _Comparand) {
835 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
836 __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE);
839 static __inline__ long __DEFAULT_FN_ATTRS
840 _InterlockedCompareExchange_nf(long volatile *_Destination,
841 long _Exchange, long _Comparand) {
842 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
843 __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
846 static __inline__ short __DEFAULT_FN_ATTRS
847 _InterlockedCompareExchange_rel(long volatile *_Destination,
848 long _Exchange, long _Comparand) {
849 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
850 __ATOMIC_SEQ_CST, __ATOMIC_RELEASE);
853 static __inline__ __int64 __DEFAULT_FN_ATTRS
854 _InterlockedCompareExchange64_acq(__int64 volatile *_Destination,
855 __int64 _Exchange, __int64 _Comparand) {
856 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
857 __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE);
860 static __inline__ __int64 __DEFAULT_FN_ATTRS
861 _InterlockedCompareExchange64_nf(__int64 volatile *_Destination,
862 __int64 _Exchange, __int64 _Comparand) {
863 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
864 __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
867 static __inline__ __int64 __DEFAULT_FN_ATTRS
868 _InterlockedCompareExchange64_rel(__int64 volatile *_Destination,
869 __int64 _Exchange, __int64 _Comparand) {
870 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
871 __ATOMIC_SEQ_CST, __ATOMIC_RELEASE);
875 /*----------------------------------------------------------------------------*\
877 |* (Pointers in address space #256 and #257 are relative to the GS and FS
878 |* segment registers, respectively.)
879 \*----------------------------------------------------------------------------*/
880 #define __ptr_to_addr_space(__addr_space_nbr, __type, __offset) \
881 ((volatile __type __attribute__((__address_space__(__addr_space_nbr)))*) \
885 static __inline__ unsigned char __DEFAULT_FN_ATTRS
886 __readfsbyte(unsigned long __offset) {
887 return *__ptr_to_addr_space(257, unsigned char, __offset);
889 static __inline__ unsigned short __DEFAULT_FN_ATTRS
890 __readfsword(unsigned long __offset) {
891 return *__ptr_to_addr_space(257, unsigned short, __offset);
893 static __inline__ unsigned __int64 __DEFAULT_FN_ATTRS
894 __readfsqword(unsigned long __offset) {
895 return *__ptr_to_addr_space(257, unsigned __int64, __offset);
899 static __inline__ unsigned char __DEFAULT_FN_ATTRS
900 __readgsbyte(unsigned long __offset) {
901 return *__ptr_to_addr_space(256, unsigned char, __offset);
903 static __inline__ unsigned short __DEFAULT_FN_ATTRS
904 __readgsword(unsigned long __offset) {
905 return *__ptr_to_addr_space(256, unsigned short, __offset);
907 static __inline__ unsigned long __DEFAULT_FN_ATTRS
908 __readgsdword(unsigned long __offset) {
909 return *__ptr_to_addr_space(256, unsigned long, __offset);
911 static __inline__ unsigned __int64 __DEFAULT_FN_ATTRS
912 __readgsqword(unsigned long __offset) {
913 return *__ptr_to_addr_space(256, unsigned __int64, __offset);
916 #undef __ptr_to_addr_space
917 /*----------------------------------------------------------------------------*\
919 \*----------------------------------------------------------------------------*/
920 #if defined(__i386__) || defined(__x86_64__)
921 static __inline__ void __DEFAULT_FN_ATTRS
922 __movsb(unsigned char *__dst, unsigned char const *__src, size_t __n) {
923 __asm__("rep movsb" : : "D"(__dst), "S"(__src), "c"(__n));
925 static __inline__ void __DEFAULT_FN_ATTRS
926 __movsd(unsigned long *__dst, unsigned long const *__src, size_t __n) {
927 __asm__("rep movsl" : : "D"(__dst), "S"(__src), "c"(__n));
929 static __inline__ void __DEFAULT_FN_ATTRS
930 __movsw(unsigned short *__dst, unsigned short const *__src, size_t __n) {
931 __asm__("rep movsw" : : "D"(__dst), "S"(__src), "c"(__n));
933 static __inline__ void __DEFAULT_FN_ATTRS
934 __stosd(unsigned long *__dst, unsigned long __x, size_t __n) {
935 __asm__("rep stosl" : : "D"(__dst), "a"(__x), "c"(__n));
937 static __inline__ void __DEFAULT_FN_ATTRS
938 __stosw(unsigned short *__dst, unsigned short __x, size_t __n) {
939 __asm__("rep stosw" : : "D"(__dst), "a"(__x), "c"(__n));
943 static __inline__ void __DEFAULT_FN_ATTRS
944 __movsq(unsigned long long *__dst, unsigned long long const *__src, size_t __n) {
945 __asm__("rep movsq" : : "D"(__dst), "S"(__src), "c"(__n));
947 static __inline__ void __DEFAULT_FN_ATTRS
948 __stosq(unsigned __int64 *__dst, unsigned __int64 __x, size_t __n) {
949 __asm__("rep stosq" : : "D"(__dst), "a"(__x), "c"(__n));
953 /*----------------------------------------------------------------------------*\
955 \*----------------------------------------------------------------------------*/
956 #if defined(__i386__) || defined(__x86_64__)
957 static __inline__ void __DEFAULT_FN_ATTRS
958 __cpuid(int __info[4], int __level) {
959 __asm__ ("cpuid" : "=a"(__info[0]), "=b" (__info[1]), "=c"(__info[2]), "=d"(__info[3])
962 static __inline__ void __DEFAULT_FN_ATTRS
963 __cpuidex(int __info[4], int __level, int __ecx) {
964 __asm__ ("cpuid" : "=a"(__info[0]), "=b" (__info[1]), "=c"(__info[2]), "=d"(__info[3])
965 : "a"(__level), "c"(__ecx));
967 static __inline__ unsigned __int64 __cdecl __DEFAULT_FN_ATTRS
968 _xgetbv(unsigned int __xcr_no) {
969 unsigned int __eax, __edx;
970 __asm__ ("xgetbv" : "=a" (__eax), "=d" (__edx) : "c" (__xcr_no));
971 return ((unsigned __int64)__edx << 32) | __eax;
973 static __inline__ void __DEFAULT_FN_ATTRS
975 __asm__ volatile ("hlt");
977 static __inline__ void __DEFAULT_FN_ATTRS
979 __asm__ volatile ("nop");
983 /*----------------------------------------------------------------------------*\
984 |* Privileged intrinsics
985 \*----------------------------------------------------------------------------*/
986 #if defined(__i386__) || defined(__x86_64__)
987 static __inline__ unsigned __int64 __DEFAULT_FN_ATTRS
988 __readmsr(unsigned long __register) {
989 // Loads the contents of a 64-bit model specific register (MSR) specified in
990 // the ECX register into registers EDX:EAX. The EDX register is loaded with
991 // the high-order 32 bits of the MSR and the EAX register is loaded with the
992 // low-order 32 bits. If less than 64 bits are implemented in the MSR being
993 // read, the values returned to EDX:EAX in unimplemented bit locations are
997 __asm__ ("rdmsr" : "=d"(__edx), "=a"(__eax) : "c"(__register));
998 return (((unsigned __int64)__edx) << 32) | (unsigned __int64)__eax;
1001 static __inline__ unsigned long __DEFAULT_FN_ATTRS
1003 unsigned long __cr3_val;
1004 __asm__ __volatile__ ("mov %%cr3, %0" : "=q"(__cr3_val) : : "memory");
1008 static __inline__ void __DEFAULT_FN_ATTRS
1009 __writecr3(unsigned int __cr3_val) {
1010 __asm__ ("mov %0, %%cr3" : : "q"(__cr3_val) : "memory");
1018 #undef __DEFAULT_FN_ATTRS
1020 #endif /* __INTRIN_H */
1021 #endif /* _MSC_VER */