1 /* ===-------- intrin.h ---------------------------------------------------===
3 * Permission is hereby granted, free of charge, to any person obtaining a copy
4 * of this software and associated documentation files (the "Software"), to deal
5 * in the Software without restriction, including without limitation the rights
6 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7 * copies of the Software, and to permit persons to whom the Software is
8 * furnished to do so, subject to the following conditions:
10 * The above copyright notice and this permission notice shall be included in
11 * all copies or substantial portions of the Software.
13 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
21 *===-----------------------------------------------------------------------===
24 /* Only include this if we're compiling for the windows platform. */
26 #include_next <intrin.h>
32 /* First include the standard intrinsics. */
33 #if defined(__i386__) || defined(__x86_64__)
34 #include <x86intrin.h>
41 /* For the definition of jmp_buf. */
46 /* Define the default attributes for the functions in this file. */
47 #define __DEFAULT_FN_ATTRS __attribute__((__always_inline__, __nodebug__))
54 /* And the random ones that aren't in those files. */
55 __m64 _m_from_float(float);
56 float _m_to_float(__m64);
59 /* Other assorted instruction intrinsics. */
60 void __addfsbyte(unsigned long, unsigned char);
61 void __addfsdword(unsigned long, unsigned long);
62 void __addfsword(unsigned long, unsigned short);
63 void __code_seg(const char *);
65 void __cpuid(int[4], int);
67 void __cpuidex(int[4], int, int);
68 void __debugbreak(void);
70 __int64 __emul(int, int);
72 unsigned __int64 __emulu(unsigned int, unsigned int);
73 void __cdecl __fastfail(unsigned int);
74 unsigned int __getcallerseflags(void);
77 unsigned char __inbyte(unsigned short);
78 void __inbytestring(unsigned short, unsigned char *, unsigned long);
79 void __incfsbyte(unsigned long);
80 void __incfsdword(unsigned long);
81 void __incfsword(unsigned long);
82 unsigned long __indword(unsigned short);
83 void __indwordstring(unsigned short, unsigned long *, unsigned long);
85 void __invlpg(void *);
86 unsigned short __inword(unsigned short);
87 void __inwordstring(unsigned short, unsigned short *, unsigned long);
89 unsigned __int64 __ll_lshift(unsigned __int64, int);
90 __int64 __ll_rshift(__int64, int);
91 void __llwpcb(void *);
92 unsigned char __lwpins32(unsigned int, unsigned int, unsigned int);
93 void __lwpval32(unsigned int, unsigned int, unsigned int);
94 unsigned int __lzcnt(unsigned int);
95 unsigned short __lzcnt16(unsigned short);
97 void __movsb(unsigned char *, unsigned char const *, size_t);
99 void __movsd(unsigned long *, unsigned long const *, size_t);
101 void __movsw(unsigned short *, unsigned short const *, size_t);
104 void __nvreg_restore_fence(void);
105 void __nvreg_save_fence(void);
106 void __outbyte(unsigned short, unsigned char);
107 void __outbytestring(unsigned short, unsigned char *, unsigned long);
108 void __outdword(unsigned short, unsigned long);
109 void __outdwordstring(unsigned short, unsigned long *, unsigned long);
110 void __outword(unsigned short, unsigned short);
111 void __outwordstring(unsigned short, unsigned short *, unsigned long);
113 unsigned int __popcnt(unsigned int);
115 unsigned short __popcnt16(unsigned short);
116 unsigned long __readcr0(void);
117 unsigned long __readcr2(void);
119 unsigned long __readcr3(void);
120 unsigned long __readcr4(void);
121 unsigned long __readcr8(void);
122 unsigned int __readdr(unsigned int);
125 unsigned char __readfsbyte(unsigned long);
127 unsigned long __readfsdword(unsigned long);
129 unsigned __int64 __readfsqword(unsigned long);
131 unsigned short __readfsword(unsigned long);
134 unsigned __int64 __readmsr(unsigned long);
135 unsigned __int64 __readpmc(unsigned long);
136 unsigned long __segmentlimit(unsigned long);
138 void *__slwpcb(void);
140 void __stosb(unsigned char *, unsigned char, size_t);
142 void __stosd(unsigned long *, unsigned long, size_t);
144 void __stosw(unsigned short *, unsigned short, size_t);
145 void __svm_clgi(void);
146 void __svm_invlpga(void *, int);
147 void __svm_skinit(int);
148 void __svm_stgi(void);
149 void __svm_vmload(size_t);
150 void __svm_vmrun(size_t);
151 void __svm_vmsave(size_t);
153 unsigned __int64 __ull_rshift(unsigned __int64, int);
154 void __vmx_off(void);
155 void __vmx_vmptrst(unsigned __int64 *);
157 void __writecr0(unsigned int);
159 void __writecr3(unsigned int);
160 void __writecr4(unsigned int);
161 void __writecr8(unsigned int);
162 void __writedr(unsigned int, unsigned int);
163 void __writefsbyte(unsigned long, unsigned char);
164 void __writefsdword(unsigned long, unsigned long);
165 void __writefsqword(unsigned long, unsigned __int64);
166 void __writefsword(unsigned long, unsigned short);
167 void __writemsr(unsigned long, unsigned __int64);
169 void *_AddressOfReturnAddress(void);
171 unsigned char _BitScanForward(unsigned long *_Index, unsigned long _Mask);
173 unsigned char _BitScanReverse(unsigned long *_Index, unsigned long _Mask);
175 unsigned char _bittest(long const *, long);
177 unsigned char _bittestandcomplement(long *, long);
179 unsigned char _bittestandreset(long *, long);
181 unsigned char _bittestandset(long *, long);
182 unsigned __int64 __cdecl _byteswap_uint64(unsigned __int64);
183 unsigned long __cdecl _byteswap_ulong(unsigned long);
184 unsigned short __cdecl _byteswap_ushort(unsigned short);
185 void __cdecl _disable(void);
186 void __cdecl _enable(void);
187 long _InterlockedAddLargeStatistic(__int64 volatile *_Addend, long _Value);
189 long _InterlockedAnd(long volatile *_Value, long _Mask);
191 short _InterlockedAnd16(short volatile *_Value, short _Mask);
193 char _InterlockedAnd8(char volatile *_Value, char _Mask);
194 unsigned char _interlockedbittestandreset(long volatile *, long);
196 unsigned char _interlockedbittestandset(long volatile *, long);
198 long __cdecl _InterlockedCompareExchange(long volatile *_Destination,
199 long _Exchange, long _Comparand);
200 long _InterlockedCompareExchange_HLEAcquire(long volatile *, long, long);
201 long _InterlockedCompareExchange_HLERelease(long volatile *, long, long);
203 short _InterlockedCompareExchange16(short volatile *_Destination,
204 short _Exchange, short _Comparand);
206 __int64 _InterlockedCompareExchange64(__int64 volatile *_Destination,
207 __int64 _Exchange, __int64 _Comparand);
208 __int64 _InterlockedcompareExchange64_HLEAcquire(__int64 volatile *, __int64,
210 __int64 _InterlockedCompareExchange64_HLERelease(__int64 volatile *, __int64,
213 char _InterlockedCompareExchange8(char volatile *_Destination, char _Exchange,
215 void *_InterlockedCompareExchangePointer_HLEAcquire(void *volatile *, void *,
217 void *_InterlockedCompareExchangePointer_HLERelease(void *volatile *, void *,
220 long __cdecl _InterlockedDecrement(long volatile *_Addend);
222 short _InterlockedDecrement16(short volatile *_Addend);
223 long _InterlockedExchange(long volatile *_Target, long _Value);
225 short _InterlockedExchange16(short volatile *_Target, short _Value);
227 char _InterlockedExchange8(char volatile *_Target, char _Value);
229 long __cdecl _InterlockedExchangeAdd(long volatile *_Addend, long _Value);
230 long _InterlockedExchangeAdd_HLEAcquire(long volatile *, long);
231 long _InterlockedExchangeAdd_HLERelease(long volatile *, long);
233 short _InterlockedExchangeAdd16(short volatile *_Addend, short _Value);
234 __int64 _InterlockedExchangeAdd64_HLEAcquire(__int64 volatile *, __int64);
235 __int64 _InterlockedExchangeAdd64_HLERelease(__int64 volatile *, __int64);
237 char _InterlockedExchangeAdd8(char volatile *_Addend, char _Value);
239 long __cdecl _InterlockedIncrement(long volatile *_Addend);
241 short _InterlockedIncrement16(short volatile *_Addend);
243 long _InterlockedOr(long volatile *_Value, long _Mask);
245 short _InterlockedOr16(short volatile *_Value, short _Mask);
247 char _InterlockedOr8(char volatile *_Value, char _Mask);
249 long _InterlockedXor(long volatile *_Value, long _Mask);
251 short _InterlockedXor16(short volatile *_Value, short _Mask);
253 char _InterlockedXor8(char volatile *_Value, char _Mask);
254 void __cdecl _invpcid(unsigned int, void *);
256 unsigned long __cdecl _lrotl(unsigned long, int);
258 unsigned long __cdecl _lrotr(unsigned long, int);
259 static __inline__ void
260 __attribute__((__deprecated__("use other intrinsics or C++11 atomics instead")))
262 static __inline__ void
263 __attribute__((__deprecated__("use other intrinsics or C++11 atomics instead")))
264 _ReadWriteBarrier(void);
266 void *_ReturnAddress(void);
267 unsigned int _rorx_u32(unsigned int, const unsigned int);
269 unsigned int __cdecl _rotl(unsigned int _Value, int _Shift);
271 unsigned short _rotl16(unsigned short _Value, unsigned char _Shift);
273 unsigned __int64 __cdecl _rotl64(unsigned __int64 _Value, int _Shift);
275 unsigned char _rotl8(unsigned char _Value, unsigned char _Shift);
277 unsigned int __cdecl _rotr(unsigned int _Value, int _Shift);
279 unsigned short _rotr16(unsigned short _Value, unsigned char _Shift);
281 unsigned __int64 __cdecl _rotr64(unsigned __int64 _Value, int _Shift);
283 unsigned char _rotr8(unsigned char _Value, unsigned char _Shift);
284 int _sarx_i32(int, unsigned int);
286 int __cdecl _setjmp(jmp_buf);
288 unsigned int _shlx_u32(unsigned int, unsigned int);
289 unsigned int _shrx_u32(unsigned int, unsigned int);
290 void _Store_HLERelease(long volatile *, long);
291 void _Store64_HLERelease(__int64 volatile *, __int64);
292 void _StorePointer_HLERelease(void *volatile *, void *);
293 static __inline__ void
294 __attribute__((__deprecated__("use other intrinsics or C++11 atomics instead")))
296 unsigned __int32 xbegin(void);
299 #define _XCR_XFEATURE_ENABLED_MASK 0
300 unsigned __int64 __cdecl _xgetbv(unsigned int);
301 void __cdecl _xsetbv(unsigned int, unsigned __int64);
303 /* These additional intrinsics are turned on in x64/amd64/x86_64 mode. */
305 void __addgsbyte(unsigned long, unsigned char);
306 void __addgsdword(unsigned long, unsigned long);
307 void __addgsqword(unsigned long, unsigned __int64);
308 void __addgsword(unsigned long, unsigned short);
310 void __faststorefence(void);
311 void __incgsbyte(unsigned long);
312 void __incgsdword(unsigned long);
313 void __incgsqword(unsigned long);
314 void __incgsword(unsigned long);
315 unsigned char __lwpins64(unsigned __int64, unsigned int, unsigned int);
316 void __lwpval64(unsigned __int64, unsigned int, unsigned int);
317 unsigned __int64 __lzcnt64(unsigned __int64);
319 void __movsq(unsigned long long *, unsigned long long const *, size_t);
321 unsigned __int64 __popcnt64(unsigned __int64);
323 unsigned char __readgsbyte(unsigned long);
325 unsigned long __readgsdword(unsigned long);
327 unsigned __int64 __readgsqword(unsigned long);
328 unsigned short __readgsword(unsigned long);
329 unsigned __int64 __shiftleft128(unsigned __int64 _LowPart,
330 unsigned __int64 _HighPart,
331 unsigned char _Shift);
332 unsigned __int64 __shiftright128(unsigned __int64 _LowPart,
333 unsigned __int64 _HighPart,
334 unsigned char _Shift);
336 void __stosq(unsigned __int64 *, unsigned __int64, size_t);
337 unsigned char __vmx_on(unsigned __int64 *);
338 unsigned char __vmx_vmclear(unsigned __int64 *);
339 unsigned char __vmx_vmlaunch(void);
340 unsigned char __vmx_vmptrld(unsigned __int64 *);
341 unsigned char __vmx_vmread(size_t, size_t *);
342 unsigned char __vmx_vmresume(void);
343 unsigned char __vmx_vmwrite(size_t, size_t);
344 void __writegsbyte(unsigned long, unsigned char);
345 void __writegsdword(unsigned long, unsigned long);
346 void __writegsqword(unsigned long, unsigned __int64);
347 void __writegsword(unsigned long, unsigned short);
349 unsigned char _BitScanForward64(unsigned long *_Index, unsigned __int64 _Mask);
351 unsigned char _BitScanReverse64(unsigned long *_Index, unsigned __int64 _Mask);
353 unsigned char _bittest64(__int64 const *, __int64);
355 unsigned char _bittestandcomplement64(__int64 *, __int64);
357 unsigned char _bittestandreset64(__int64 *, __int64);
359 unsigned char _bittestandset64(__int64 *, __int64);
360 unsigned __int64 __cdecl _byteswap_uint64(unsigned __int64);
361 long _InterlockedAnd_np(long volatile *_Value, long _Mask);
362 short _InterlockedAnd16_np(short volatile *_Value, short _Mask);
363 __int64 _InterlockedAnd64_np(__int64 volatile *_Value, __int64 _Mask);
364 char _InterlockedAnd8_np(char volatile *_Value, char _Mask);
365 unsigned char _interlockedbittestandreset64(__int64 volatile *, __int64);
367 unsigned char _interlockedbittestandset64(__int64 volatile *, __int64);
368 long _InterlockedCompareExchange_np(long volatile *_Destination, long _Exchange,
370 unsigned char _InterlockedCompareExchange128(__int64 volatile *_Destination,
371 __int64 _ExchangeHigh,
372 __int64 _ExchangeLow,
373 __int64 *_CompareandResult);
374 unsigned char _InterlockedCompareExchange128_np(__int64 volatile *_Destination,
375 __int64 _ExchangeHigh,
376 __int64 _ExchangeLow,
377 __int64 *_ComparandResult);
378 short _InterlockedCompareExchange16_np(short volatile *_Destination,
379 short _Exchange, short _Comparand);
380 __int64 _InterlockedCompareExchange64_HLEAcquire(__int64 volatile *, __int64,
382 __int64 _InterlockedCompareExchange64_HLERelease(__int64 volatile *, __int64,
384 __int64 _InterlockedCompareExchange64_np(__int64 volatile *_Destination,
385 __int64 _Exchange, __int64 _Comparand);
386 void *_InterlockedCompareExchangePointer(void *volatile *_Destination,
387 void *_Exchange, void *_Comparand);
388 void *_InterlockedCompareExchangePointer_np(void *volatile *_Destination,
389 void *_Exchange, void *_Comparand);
390 void *_InterlockedExchangePointer(void *volatile *_Target, void *_Value);
391 long _InterlockedOr_np(long volatile *_Value, long _Mask);
392 short _InterlockedOr16_np(short volatile *_Value, short _Mask);
393 __int64 _InterlockedOr64_np(__int64 volatile *_Value, __int64 _Mask);
394 char _InterlockedOr8_np(char volatile *_Value, char _Mask);
395 long _InterlockedXor_np(long volatile *_Value, long _Mask);
396 short _InterlockedXor16_np(short volatile *_Value, short _Mask);
397 __int64 _InterlockedXor64_np(__int64 volatile *_Value, __int64 _Mask);
398 char _InterlockedXor8_np(char volatile *_Value, char _Mask);
399 unsigned __int64 _rorx_u64(unsigned __int64, const unsigned int);
400 __int64 _sarx_i64(__int64, unsigned int);
402 int __cdecl _setjmpex(jmp_buf);
404 unsigned __int64 _shlx_u64(unsigned __int64, unsigned int);
405 unsigned __int64 _shrx_u64(unsigned __int64, unsigned int);
407 __int64 __mulh(__int64, __int64);
409 unsigned __int64 __umulh(unsigned __int64, unsigned __int64);
411 __int64 _mul128(__int64, __int64, __int64*);
413 unsigned __int64 _umul128(unsigned __int64,
417 #endif /* __x86_64__ */
419 #if defined(__x86_64__) || defined(__arm__)
422 __int64 _InterlockedDecrement64(__int64 volatile *_Addend);
424 __int64 _InterlockedExchange64(__int64 volatile *_Target, __int64 _Value);
426 __int64 _InterlockedExchangeAdd64(__int64 volatile *_Addend, __int64 _Value);
428 __int64 _InterlockedExchangeSub64(__int64 volatile *_Subend, __int64 _Value);
430 __int64 _InterlockedIncrement64(__int64 volatile *_Addend);
432 __int64 _InterlockedOr64(__int64 volatile *_Value, __int64 _Mask);
434 __int64 _InterlockedXor64(__int64 volatile *_Value, __int64 _Mask);
436 __int64 _InterlockedAnd64(__int64 volatile *_Value, __int64 _Mask);
440 /*----------------------------------------------------------------------------*\
441 |* Bit Counting and Testing
442 \*----------------------------------------------------------------------------*/
443 static __inline__ unsigned char __DEFAULT_FN_ATTRS
444 _bittest(long const *_BitBase, long _BitPos) {
445 return (*_BitBase >> _BitPos) & 1;
447 static __inline__ unsigned char __DEFAULT_FN_ATTRS
448 _bittestandcomplement(long *_BitBase, long _BitPos) {
449 unsigned char _Res = (*_BitBase >> _BitPos) & 1;
450 *_BitBase = *_BitBase ^ (1 << _BitPos);
453 static __inline__ unsigned char __DEFAULT_FN_ATTRS
454 _bittestandreset(long *_BitBase, long _BitPos) {
455 unsigned char _Res = (*_BitBase >> _BitPos) & 1;
456 *_BitBase = *_BitBase & ~(1 << _BitPos);
459 static __inline__ unsigned char __DEFAULT_FN_ATTRS
460 _bittestandset(long *_BitBase, long _BitPos) {
461 unsigned char _Res = (*_BitBase >> _BitPos) & 1;
462 *_BitBase = *_BitBase | (1 << _BitPos);
465 static __inline__ unsigned char __DEFAULT_FN_ATTRS
466 _interlockedbittestandset(long volatile *_BitBase, long _BitPos) {
467 long _PrevVal = __atomic_fetch_or(_BitBase, 1l << _BitPos, __ATOMIC_SEQ_CST);
468 return (_PrevVal >> _BitPos) & 1;
470 #if defined(__arm__) || defined(__aarch64__)
471 static __inline__ unsigned char __DEFAULT_FN_ATTRS
472 _interlockedbittestandset_acq(long volatile *_BitBase, long _BitPos) {
473 long _PrevVal = __atomic_fetch_or(_BitBase, 1l << _BitPos, __ATOMIC_ACQUIRE);
474 return (_PrevVal >> _BitPos) & 1;
476 static __inline__ unsigned char __DEFAULT_FN_ATTRS
477 _interlockedbittestandset_nf(long volatile *_BitBase, long _BitPos) {
478 long _PrevVal = __atomic_fetch_or(_BitBase, 1l << _BitPos, __ATOMIC_RELAXED);
479 return (_PrevVal >> _BitPos) & 1;
481 static __inline__ unsigned char __DEFAULT_FN_ATTRS
482 _interlockedbittestandset_rel(long volatile *_BitBase, long _BitPos) {
483 long _PrevVal = __atomic_fetch_or(_BitBase, 1l << _BitPos, __ATOMIC_RELEASE);
484 return (_PrevVal >> _BitPos) & 1;
488 static __inline__ unsigned char __DEFAULT_FN_ATTRS
489 _bittest64(__int64 const *_BitBase, __int64 _BitPos) {
490 return (*_BitBase >> _BitPos) & 1;
492 static __inline__ unsigned char __DEFAULT_FN_ATTRS
493 _bittestandcomplement64(__int64 *_BitBase, __int64 _BitPos) {
494 unsigned char _Res = (*_BitBase >> _BitPos) & 1;
495 *_BitBase = *_BitBase ^ (1ll << _BitPos);
498 static __inline__ unsigned char __DEFAULT_FN_ATTRS
499 _bittestandreset64(__int64 *_BitBase, __int64 _BitPos) {
500 unsigned char _Res = (*_BitBase >> _BitPos) & 1;
501 *_BitBase = *_BitBase & ~(1ll << _BitPos);
504 static __inline__ unsigned char __DEFAULT_FN_ATTRS
505 _bittestandset64(__int64 *_BitBase, __int64 _BitPos) {
506 unsigned char _Res = (*_BitBase >> _BitPos) & 1;
507 *_BitBase = *_BitBase | (1ll << _BitPos);
510 static __inline__ unsigned char __DEFAULT_FN_ATTRS
511 _interlockedbittestandset64(__int64 volatile *_BitBase, __int64 _BitPos) {
513 __atomic_fetch_or(_BitBase, 1ll << _BitPos, __ATOMIC_SEQ_CST);
514 return (_PrevVal >> _BitPos) & 1;
517 /*----------------------------------------------------------------------------*\
518 |* Interlocked Exchange Add
519 \*----------------------------------------------------------------------------*/
520 #if defined(__arm__) || defined(__aarch64__)
521 static __inline__ char __DEFAULT_FN_ATTRS
522 _InterlockedExchangeAdd8_acq(char volatile *_Addend, char _Value) {
523 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_ACQUIRE);
525 static __inline__ char __DEFAULT_FN_ATTRS
526 _InterlockedExchangeAdd8_nf(char volatile *_Addend, char _Value) {
527 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
529 static __inline__ char __DEFAULT_FN_ATTRS
530 _InterlockedExchangeAdd8_rel(char volatile *_Addend, char _Value) {
531 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
533 static __inline__ short __DEFAULT_FN_ATTRS
534 _InterlockedExchangeAdd16_acq(short volatile *_Addend, short _Value) {
535 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_ACQUIRE);
537 static __inline__ short __DEFAULT_FN_ATTRS
538 _InterlockedExchangeAdd16_nf(short volatile *_Addend, short _Value) {
539 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
541 static __inline__ short __DEFAULT_FN_ATTRS
542 _InterlockedExchangeAdd16_rel(short volatile *_Addend, short _Value) {
543 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELEASE);
545 static __inline__ long __DEFAULT_FN_ATTRS
546 _InterlockedExchangeAdd_acq(long volatile *_Addend, long _Value) {
547 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_ACQUIRE);
549 static __inline__ long __DEFAULT_FN_ATTRS
550 _InterlockedExchangeAdd_nf(long volatile *_Addend, long _Value) {
551 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
553 static __inline__ long __DEFAULT_FN_ATTRS
554 _InterlockedExchangeAdd_rel(long volatile *_Addend, long _Value) {
555 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELEASE);
557 static __inline__ __int64 __DEFAULT_FN_ATTRS
558 _InterlockedExchangeAdd64_acq(__int64 volatile *_Addend, __int64 _Value) {
559 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_ACQUIRE);
561 static __inline__ __int64 __DEFAULT_FN_ATTRS
562 _InterlockedExchangeAdd64_nf(__int64 volatile *_Addend, __int64 _Value) {
563 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
565 static __inline__ __int64 __DEFAULT_FN_ATTRS
566 _InterlockedExchangeAdd64_rel(__int64 volatile *_Addend, __int64 _Value) {
567 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELEASE);
570 /*----------------------------------------------------------------------------*\
571 |* Interlocked Increment
572 \*----------------------------------------------------------------------------*/
573 #if defined(__arm__) || defined(__aarch64__)
574 static __inline__ short __DEFAULT_FN_ATTRS
575 _InterlockedIncrement16_acq(short volatile *_Value) {
576 return __atomic_add_fetch(_Value, 1, __ATOMIC_ACQUIRE);
578 static __inline__ short __DEFAULT_FN_ATTRS
579 _InterlockedIncrement16_nf(short volatile *_Value) {
580 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELAXED);
582 static __inline__ short __DEFAULT_FN_ATTRS
583 _InterlockedIncrement16_rel(short volatile *_Value) {
584 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELEASE);
586 static __inline__ long __DEFAULT_FN_ATTRS
587 _InterlockedIncrement_acq(long volatile *_Value) {
588 return __atomic_add_fetch(_Value, 1, __ATOMIC_ACQUIRE);
590 static __inline__ long __DEFAULT_FN_ATTRS
591 _InterlockedIncrement_nf(long volatile *_Value) {
592 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELAXED);
594 static __inline__ long __DEFAULT_FN_ATTRS
595 _InterlockedIncrement_rel(long volatile *_Value) {
596 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELEASE);
598 static __inline__ __int64 __DEFAULT_FN_ATTRS
599 _InterlockedIncrement64_acq(__int64 volatile *_Value) {
600 return __atomic_add_fetch(_Value, 1, __ATOMIC_ACQUIRE);
602 static __inline__ __int64 __DEFAULT_FN_ATTRS
603 _InterlockedIncrement64_nf(__int64 volatile *_Value) {
604 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELAXED);
606 static __inline__ __int64 __DEFAULT_FN_ATTRS
607 _InterlockedIncrement64_rel(__int64 volatile *_Value) {
608 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELEASE);
611 /*----------------------------------------------------------------------------*\
612 |* Interlocked Decrement
613 \*----------------------------------------------------------------------------*/
614 #if defined(__arm__) || defined(__aarch64__)
615 static __inline__ short __DEFAULT_FN_ATTRS
616 _InterlockedDecrement16_acq(short volatile *_Value) {
617 return __atomic_sub_fetch(_Value, 1, __ATOMIC_ACQUIRE);
619 static __inline__ short __DEFAULT_FN_ATTRS
620 _InterlockedDecrement16_nf(short volatile *_Value) {
621 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELAXED);
623 static __inline__ short __DEFAULT_FN_ATTRS
624 _InterlockedDecrement16_rel(short volatile *_Value) {
625 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELEASE);
627 static __inline__ long __DEFAULT_FN_ATTRS
628 _InterlockedDecrement_acq(long volatile *_Value) {
629 return __atomic_sub_fetch(_Value, 1, __ATOMIC_ACQUIRE);
631 static __inline__ long __DEFAULT_FN_ATTRS
632 _InterlockedDecrement_nf(long volatile *_Value) {
633 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELAXED);
635 static __inline__ long __DEFAULT_FN_ATTRS
636 _InterlockedDecrement_rel(long volatile *_Value) {
637 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELEASE);
639 static __inline__ __int64 __DEFAULT_FN_ATTRS
640 _InterlockedDecrement64_acq(__int64 volatile *_Value) {
641 return __atomic_sub_fetch(_Value, 1, __ATOMIC_ACQUIRE);
643 static __inline__ __int64 __DEFAULT_FN_ATTRS
644 _InterlockedDecrement64_nf(__int64 volatile *_Value) {
645 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELAXED);
647 static __inline__ __int64 __DEFAULT_FN_ATTRS
648 _InterlockedDecrement64_rel(__int64 volatile *_Value) {
649 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELEASE);
652 /*----------------------------------------------------------------------------*\
654 \*----------------------------------------------------------------------------*/
655 #if defined(__arm__) || defined(__aarch64__)
656 static __inline__ char __DEFAULT_FN_ATTRS
657 _InterlockedAnd8_acq(char volatile *_Value, char _Mask) {
658 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_ACQUIRE);
660 static __inline__ char __DEFAULT_FN_ATTRS
661 _InterlockedAnd8_nf(char volatile *_Value, char _Mask) {
662 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELAXED);
664 static __inline__ char __DEFAULT_FN_ATTRS
665 _InterlockedAnd8_rel(char volatile *_Value, char _Mask) {
666 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELEASE);
668 static __inline__ short __DEFAULT_FN_ATTRS
669 _InterlockedAnd16_acq(short volatile *_Value, short _Mask) {
670 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_ACQUIRE);
672 static __inline__ short __DEFAULT_FN_ATTRS
673 _InterlockedAnd16_nf(short volatile *_Value, short _Mask) {
674 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELAXED);
676 static __inline__ short __DEFAULT_FN_ATTRS
677 _InterlockedAnd16_rel(short volatile *_Value, short _Mask) {
678 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELEASE);
680 static __inline__ long __DEFAULT_FN_ATTRS
681 _InterlockedAnd_acq(long volatile *_Value, long _Mask) {
682 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_ACQUIRE);
684 static __inline__ long __DEFAULT_FN_ATTRS
685 _InterlockedAnd_nf(long volatile *_Value, long _Mask) {
686 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELAXED);
688 static __inline__ long __DEFAULT_FN_ATTRS
689 _InterlockedAnd_rel(long volatile *_Value, long _Mask) {
690 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELEASE);
692 static __inline__ __int64 __DEFAULT_FN_ATTRS
693 _InterlockedAnd64_acq(__int64 volatile *_Value, __int64 _Mask) {
694 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_ACQUIRE);
696 static __inline__ __int64 __DEFAULT_FN_ATTRS
697 _InterlockedAnd64_nf(__int64 volatile *_Value, __int64 _Mask) {
698 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELAXED);
700 static __inline__ __int64 __DEFAULT_FN_ATTRS
701 _InterlockedAnd64_rel(__int64 volatile *_Value, __int64 _Mask) {
702 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELEASE);
705 /*----------------------------------------------------------------------------*\
707 \*----------------------------------------------------------------------------*/
708 #if defined(__arm__) || defined(__aarch64__)
709 static __inline__ char __DEFAULT_FN_ATTRS
710 _InterlockedOr8_acq(char volatile *_Value, char _Mask) {
711 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_ACQUIRE);
713 static __inline__ char __DEFAULT_FN_ATTRS
714 _InterlockedOr8_nf(char volatile *_Value, char _Mask) {
715 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELAXED);
717 static __inline__ char __DEFAULT_FN_ATTRS
718 _InterlockedOr8_rel(char volatile *_Value, char _Mask) {
719 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELEASE);
721 static __inline__ short __DEFAULT_FN_ATTRS
722 _InterlockedOr16_acq(short volatile *_Value, short _Mask) {
723 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_ACQUIRE);
725 static __inline__ short __DEFAULT_FN_ATTRS
726 _InterlockedOr16_nf(short volatile *_Value, short _Mask) {
727 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELAXED);
729 static __inline__ short __DEFAULT_FN_ATTRS
730 _InterlockedOr16_rel(short volatile *_Value, short _Mask) {
731 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELEASE);
733 static __inline__ long __DEFAULT_FN_ATTRS
734 _InterlockedOr_acq(long volatile *_Value, long _Mask) {
735 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_ACQUIRE);
737 static __inline__ long __DEFAULT_FN_ATTRS
738 _InterlockedOr_nf(long volatile *_Value, long _Mask) {
739 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELAXED);
741 static __inline__ long __DEFAULT_FN_ATTRS
742 _InterlockedOr_rel(long volatile *_Value, long _Mask) {
743 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELEASE);
745 static __inline__ __int64 __DEFAULT_FN_ATTRS
746 _InterlockedOr64_acq(__int64 volatile *_Value, __int64 _Mask) {
747 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_ACQUIRE);
749 static __inline__ __int64 __DEFAULT_FN_ATTRS
750 _InterlockedOr64_nf(__int64 volatile *_Value, __int64 _Mask) {
751 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELAXED);
753 static __inline__ __int64 __DEFAULT_FN_ATTRS
754 _InterlockedOr64_rel(__int64 volatile *_Value, __int64 _Mask) {
755 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELEASE);
758 /*----------------------------------------------------------------------------*\
760 \*----------------------------------------------------------------------------*/
761 #if defined(__arm__) || defined(__aarch64__)
762 static __inline__ char __DEFAULT_FN_ATTRS
763 _InterlockedXor8_acq(char volatile *_Value, char _Mask) {
764 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_ACQUIRE);
766 static __inline__ char __DEFAULT_FN_ATTRS
767 _InterlockedXor8_nf(char volatile *_Value, char _Mask) {
768 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELAXED);
770 static __inline__ char __DEFAULT_FN_ATTRS
771 _InterlockedXor8_rel(char volatile *_Value, char _Mask) {
772 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELEASE);
774 static __inline__ short __DEFAULT_FN_ATTRS
775 _InterlockedXor16_acq(short volatile *_Value, short _Mask) {
776 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_ACQUIRE);
778 static __inline__ short __DEFAULT_FN_ATTRS
779 _InterlockedXor16_nf(short volatile *_Value, short _Mask) {
780 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELAXED);
782 static __inline__ short __DEFAULT_FN_ATTRS
783 _InterlockedXor16_rel(short volatile *_Value, short _Mask) {
784 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELEASE);
786 static __inline__ long __DEFAULT_FN_ATTRS
787 _InterlockedXor_acq(long volatile *_Value, long _Mask) {
788 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_ACQUIRE);
790 static __inline__ long __DEFAULT_FN_ATTRS
791 _InterlockedXor_nf(long volatile *_Value, long _Mask) {
792 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELAXED);
794 static __inline__ long __DEFAULT_FN_ATTRS
795 _InterlockedXor_rel(long volatile *_Value, long _Mask) {
796 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELEASE);
798 static __inline__ __int64 __DEFAULT_FN_ATTRS
799 _InterlockedXor64_acq(__int64 volatile *_Value, __int64 _Mask) {
800 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_ACQUIRE);
802 static __inline__ __int64 __DEFAULT_FN_ATTRS
803 _InterlockedXor64_nf(__int64 volatile *_Value, __int64 _Mask) {
804 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELAXED);
806 static __inline__ __int64 __DEFAULT_FN_ATTRS
807 _InterlockedXor64_rel(__int64 volatile *_Value, __int64 _Mask) {
808 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELEASE);
811 /*----------------------------------------------------------------------------*\
812 |* Interlocked Exchange
813 \*----------------------------------------------------------------------------*/
814 #if defined(__arm__) || defined(__aarch64__)
815 static __inline__ char __DEFAULT_FN_ATTRS
816 _InterlockedExchange8_acq(char volatile *_Target, char _Value) {
817 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_ACQUIRE);
820 static __inline__ char __DEFAULT_FN_ATTRS
821 _InterlockedExchange8_nf(char volatile *_Target, char _Value) {
822 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELAXED);
825 static __inline__ char __DEFAULT_FN_ATTRS
826 _InterlockedExchange8_rel(char volatile *_Target, char _Value) {
827 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELEASE);
830 static __inline__ short __DEFAULT_FN_ATTRS
831 _InterlockedExchange16_acq(short volatile *_Target, short _Value) {
832 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_ACQUIRE);
835 static __inline__ short __DEFAULT_FN_ATTRS
836 _InterlockedExchange16_nf(short volatile *_Target, short _Value) {
837 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELAXED);
840 static __inline__ short __DEFAULT_FN_ATTRS
841 _InterlockedExchange16_rel(short volatile *_Target, short _Value) {
842 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELEASE);
845 static __inline__ long __DEFAULT_FN_ATTRS
846 _InterlockedExchange_acq(long volatile *_Target, long _Value) {
847 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_ACQUIRE);
850 static __inline__ long __DEFAULT_FN_ATTRS
851 _InterlockedExchange_nf(long volatile *_Target, long _Value) {
852 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELAXED);
855 static __inline__ long __DEFAULT_FN_ATTRS
856 _InterlockedExchange_rel(long volatile *_Target, long _Value) {
857 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELEASE);
860 static __inline__ __int64 __DEFAULT_FN_ATTRS
861 _InterlockedExchange64_acq(__int64 volatile *_Target, __int64 _Value) {
862 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_ACQUIRE);
865 static __inline__ __int64 __DEFAULT_FN_ATTRS
866 _InterlockedExchange64_nf(__int64 volatile *_Target, __int64 _Value) {
867 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELAXED);
870 static __inline__ __int64 __DEFAULT_FN_ATTRS
871 _InterlockedExchange64_rel(__int64 volatile *_Target, __int64 _Value) {
872 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELEASE);
876 /*----------------------------------------------------------------------------*\
877 |* Interlocked Compare Exchange
878 \*----------------------------------------------------------------------------*/
879 #if defined(__arm__) || defined(__aarch64__)
880 static __inline__ char __DEFAULT_FN_ATTRS
881 _InterlockedCompareExchange8_acq(char volatile *_Destination,
882 char _Exchange, char _Comparand) {
883 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
884 __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE);
887 static __inline__ char __DEFAULT_FN_ATTRS
888 _InterlockedCompareExchange8_nf(char volatile *_Destination,
889 char _Exchange, char _Comparand) {
890 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
891 __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
894 static __inline__ char __DEFAULT_FN_ATTRS
895 _InterlockedCompareExchange8_rel(char volatile *_Destination,
896 char _Exchange, char _Comparand) {
897 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
898 __ATOMIC_SEQ_CST, __ATOMIC_RELEASE);
901 static __inline__ short __DEFAULT_FN_ATTRS
902 _InterlockedCompareExchange16_acq(short volatile *_Destination,
903 short _Exchange, short _Comparand) {
904 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
905 __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE);
908 static __inline__ short __DEFAULT_FN_ATTRS
909 _InterlockedCompareExchange16_nf(short volatile *_Destination,
910 short _Exchange, short _Comparand) {
911 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
912 __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
915 static __inline__ short __DEFAULT_FN_ATTRS
916 _InterlockedCompareExchange16_rel(short volatile *_Destination,
917 short _Exchange, short _Comparand) {
918 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
919 __ATOMIC_SEQ_CST, __ATOMIC_RELEASE);
922 static __inline__ long __DEFAULT_FN_ATTRS
923 _InterlockedCompareExchange_acq(long volatile *_Destination,
924 long _Exchange, long _Comparand) {
925 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
926 __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE);
929 static __inline__ long __DEFAULT_FN_ATTRS
930 _InterlockedCompareExchange_nf(long volatile *_Destination,
931 long _Exchange, long _Comparand) {
932 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
933 __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
936 static __inline__ short __DEFAULT_FN_ATTRS
937 _InterlockedCompareExchange_rel(long volatile *_Destination,
938 long _Exchange, long _Comparand) {
939 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
940 __ATOMIC_SEQ_CST, __ATOMIC_RELEASE);
943 static __inline__ __int64 __DEFAULT_FN_ATTRS
944 _InterlockedCompareExchange64_acq(__int64 volatile *_Destination,
945 __int64 _Exchange, __int64 _Comparand) {
946 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
947 __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE);
950 static __inline__ __int64 __DEFAULT_FN_ATTRS
951 _InterlockedCompareExchange64_nf(__int64 volatile *_Destination,
952 __int64 _Exchange, __int64 _Comparand) {
953 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
954 __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
957 static __inline__ __int64 __DEFAULT_FN_ATTRS
958 _InterlockedCompareExchange64_rel(__int64 volatile *_Destination,
959 __int64 _Exchange, __int64 _Comparand) {
960 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
961 __ATOMIC_SEQ_CST, __ATOMIC_RELEASE);
965 /*----------------------------------------------------------------------------*\
967 |* (Pointers in address space #256 and #257 are relative to the GS and FS
968 |* segment registers, respectively.)
969 \*----------------------------------------------------------------------------*/
970 #define __ptr_to_addr_space(__addr_space_nbr, __type, __offset) \
971 ((volatile __type __attribute__((__address_space__(__addr_space_nbr)))*) \
975 static __inline__ unsigned char __DEFAULT_FN_ATTRS
976 __readfsbyte(unsigned long __offset) {
977 return *__ptr_to_addr_space(257, unsigned char, __offset);
979 static __inline__ unsigned short __DEFAULT_FN_ATTRS
980 __readfsword(unsigned long __offset) {
981 return *__ptr_to_addr_space(257, unsigned short, __offset);
983 static __inline__ unsigned __int64 __DEFAULT_FN_ATTRS
984 __readfsqword(unsigned long __offset) {
985 return *__ptr_to_addr_space(257, unsigned __int64, __offset);
989 static __inline__ unsigned char __DEFAULT_FN_ATTRS
990 __readgsbyte(unsigned long __offset) {
991 return *__ptr_to_addr_space(256, unsigned char, __offset);
993 static __inline__ unsigned short __DEFAULT_FN_ATTRS
994 __readgsword(unsigned long __offset) {
995 return *__ptr_to_addr_space(256, unsigned short, __offset);
997 static __inline__ unsigned long __DEFAULT_FN_ATTRS
998 __readgsdword(unsigned long __offset) {
999 return *__ptr_to_addr_space(256, unsigned long, __offset);
1001 static __inline__ unsigned __int64 __DEFAULT_FN_ATTRS
1002 __readgsqword(unsigned long __offset) {
1003 return *__ptr_to_addr_space(256, unsigned __int64, __offset);
1006 #undef __ptr_to_addr_space
1007 /*----------------------------------------------------------------------------*\
1009 \*----------------------------------------------------------------------------*/
1010 #if defined(__i386__) || defined(__x86_64__)
1011 static __inline__ void __DEFAULT_FN_ATTRS
1012 __movsb(unsigned char *__dst, unsigned char const *__src, size_t __n) {
1013 __asm__("rep movsb" : : "D"(__dst), "S"(__src), "c"(__n));
1015 static __inline__ void __DEFAULT_FN_ATTRS
1016 __movsd(unsigned long *__dst, unsigned long const *__src, size_t __n) {
1017 __asm__("rep movsl" : : "D"(__dst), "S"(__src), "c"(__n));
1019 static __inline__ void __DEFAULT_FN_ATTRS
1020 __movsw(unsigned short *__dst, unsigned short const *__src, size_t __n) {
1021 __asm__("rep movsw" : : "D"(__dst), "S"(__src), "c"(__n));
1023 static __inline__ void __DEFAULT_FN_ATTRS
1024 __stosd(unsigned long *__dst, unsigned long __x, size_t __n) {
1025 __asm__("rep stosl" : : "D"(__dst), "a"(__x), "c"(__n));
1027 static __inline__ void __DEFAULT_FN_ATTRS
1028 __stosw(unsigned short *__dst, unsigned short __x, size_t __n) {
1029 __asm__("rep stosw" : : "D"(__dst), "a"(__x), "c"(__n));
1033 static __inline__ void __DEFAULT_FN_ATTRS
1034 __movsq(unsigned long long *__dst, unsigned long long const *__src, size_t __n) {
1035 __asm__("rep movsq" : : "D"(__dst), "S"(__src), "c"(__n));
1037 static __inline__ void __DEFAULT_FN_ATTRS
1038 __stosq(unsigned __int64 *__dst, unsigned __int64 __x, size_t __n) {
1039 __asm__("rep stosq" : : "D"(__dst), "a"(__x), "c"(__n));
1043 /*----------------------------------------------------------------------------*\
1045 \*----------------------------------------------------------------------------*/
1046 #if defined(__i386__) || defined(__x86_64__)
1047 static __inline__ void __DEFAULT_FN_ATTRS
1048 __cpuid(int __info[4], int __level) {
1049 __asm__ ("cpuid" : "=a"(__info[0]), "=b" (__info[1]), "=c"(__info[2]), "=d"(__info[3])
1052 static __inline__ void __DEFAULT_FN_ATTRS
1053 __cpuidex(int __info[4], int __level, int __ecx) {
1054 __asm__ ("cpuid" : "=a"(__info[0]), "=b" (__info[1]), "=c"(__info[2]), "=d"(__info[3])
1055 : "a"(__level), "c"(__ecx));
1057 static __inline__ unsigned __int64 __cdecl __DEFAULT_FN_ATTRS
1058 _xgetbv(unsigned int __xcr_no) {
1059 unsigned int __eax, __edx;
1060 __asm__ ("xgetbv" : "=a" (__eax), "=d" (__edx) : "c" (__xcr_no));
1061 return ((unsigned __int64)__edx << 32) | __eax;
1063 static __inline__ void __DEFAULT_FN_ATTRS
1065 __asm__ volatile ("hlt");
1067 static __inline__ void __DEFAULT_FN_ATTRS
1069 __asm__ volatile ("nop");
1073 /*----------------------------------------------------------------------------*\
1074 |* Privileged intrinsics
1075 \*----------------------------------------------------------------------------*/
1076 #if defined(__i386__) || defined(__x86_64__)
1077 static __inline__ unsigned __int64 __DEFAULT_FN_ATTRS
1078 __readmsr(unsigned long __register) {
1079 // Loads the contents of a 64-bit model specific register (MSR) specified in
1080 // the ECX register into registers EDX:EAX. The EDX register is loaded with
1081 // the high-order 32 bits of the MSR and the EAX register is loaded with the
1082 // low-order 32 bits. If less than 64 bits are implemented in the MSR being
1083 // read, the values returned to EDX:EAX in unimplemented bit locations are
1085 unsigned long __edx;
1086 unsigned long __eax;
1087 __asm__ ("rdmsr" : "=d"(__edx), "=a"(__eax) : "c"(__register));
1088 return (((unsigned __int64)__edx) << 32) | (unsigned __int64)__eax;
1091 static __inline__ unsigned long __DEFAULT_FN_ATTRS
1093 unsigned long __cr3_val;
1094 __asm__ __volatile__ ("mov %%cr3, %0" : "=q"(__cr3_val) : : "memory");
1098 static __inline__ void __DEFAULT_FN_ATTRS
1099 __writecr3(unsigned int __cr3_val) {
1100 __asm__ ("mov %0, %%cr3" : : "q"(__cr3_val) : "memory");
1108 #undef __DEFAULT_FN_ATTRS
1110 #endif /* __INTRIN_H */
1111 #endif /* _MSC_VER */