28#ifndef KJK_INTRIN_X86_H_
29#define KJK_INTRIN_X86_H_
81#define _ReturnAddress() (__builtin_return_address(0))
82#define _AddressOfReturnAddress() (&(((void **)(__builtin_frame_address(0)))[1]))
87#if !HAS_BUILTIN(_ReadWriteBarrier)
90 __asm__ __volatile__(
"" : : :
"memory");
95#define _ReadBarrier _ReadWriteBarrier
96#define _WriteBarrier _ReadWriteBarrier
98#if !HAS_BUILTIN(_mm_mfence)
101 __asm__ __volatile__(
"mfence" : : :
"memory");
105#if !HAS_BUILTIN(_mm_lfence)
109 __asm__ __volatile__(
"lfence");
114#if defined(__x86_64__) && !HAS_BUILTIN(__faststorefence)
118 __asm__ __volatile__(
"lock; orl $0, %0;" : :
"m"(
local));
125#if (__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__) > 40100
127#if !HAS_BUILTIN(_InterlockedCompareExchange8)
130 return __sync_val_compare_and_swap(
Destination, Comperand, Exchange);
134#if !HAS_BUILTIN(_InterlockedCompareExchange16)
137 return __sync_val_compare_and_swap(
Destination, Comperand, Exchange);
141#if !HAS_BUILTIN(_InterlockedCompareExchange)
144 return __sync_val_compare_and_swap(
Destination, Comperand, Exchange);
148#if !HAS_BUILTIN(_InterlockedCompareExchangePointer)
151 return (
void *)__sync_val_compare_and_swap(
Destination, Comperand, Exchange);
155#if !HAS_BUILTIN(_InterlockedExchange8)
159 __sync_synchronize();
164#if !HAS_BUILTIN(_InterlockedExchange16)
168 __sync_synchronize();
173#if !HAS_BUILTIN(_InterlockedExchange)
177 __sync_synchronize();
182#if !HAS_BUILTIN(_InterlockedExchangePointer)
186 __sync_synchronize();
187 return (
void *)__sync_lock_test_and_set(
Target,
Value);
191#if defined(__x86_64__) && !HAS_BUILTIN(_InterlockedExchange64)
195 __sync_synchronize();
200#if !HAS_BUILTIN(_InterlockedExchangeAdd8)
207#if !HAS_BUILTIN(_InterlockedExchangeAdd16)
214#if !HAS_BUILTIN(_InterlockedExchangeAdd)
221#if defined(__x86_64__) && !HAS_BUILTIN(_InterlockedExchangeAdd64)
228#if !HAS_BUILTIN(_InterlockedAnd8)
231 return __sync_fetch_and_and(
value,
mask);
235#if !HAS_BUILTIN(_InterlockedAnd16)
238 return __sync_fetch_and_and(
value,
mask);
242#if !HAS_BUILTIN(_InterlockedAnd)
245 return __sync_fetch_and_and(
value,
mask);
249#if defined(__x86_64__) && !HAS_BUILTIN(_InterlockedAnd64)
252 return __sync_fetch_and_and(
value,
mask);
256#if !HAS_BUILTIN(_InterlockedOr8)
263#if !HAS_BUILTIN(_InterlockedOr16)
270#if !HAS_BUILTIN(_InterlockedOr)
277#if defined(__x86_64__) && !HAS_BUILTIN(_InterlockedOr64)
284#if !HAS_BUILTIN(_InterlockedXor8)
287 return __sync_fetch_and_xor(
value,
mask);
291#if !HAS_BUILTIN(_InterlockedXor16)
294 return __sync_fetch_and_xor(
value,
mask);
298#if !HAS_BUILTIN(_InterlockedXor)
301 return __sync_fetch_and_xor(
value,
mask);
305#if defined(__x86_64__) && !HAS_BUILTIN(_InterlockedXor64)
308 return __sync_fetch_and_xor(
value,
mask);
312#if !HAS_BUILTIN(_InterlockedDecrement)
315 return __sync_sub_and_fetch(lpAddend, 1);
319#if !HAS_BUILTIN(_InterlockedIncrement)
322 return __sync_add_and_fetch(lpAddend, 1);
326#if !HAS_BUILTIN(_InterlockedDecrement16)
329 return __sync_sub_and_fetch(lpAddend, 1);
333#if !HAS_BUILTIN(_InterlockedIncrement16)
336 return __sync_add_and_fetch(lpAddend, 1);
340#if defined(__x86_64__)
341#if !HAS_BUILTIN(_InterlockedDecrement64)
342__INTRIN_INLINE long long _InterlockedDecrement64(
volatile long long * lpAddend)
344 return __sync_sub_and_fetch(lpAddend, 1);
348#if !HAS_BUILTIN(_InterlockedIncrement64)
349__INTRIN_INLINE long long _InterlockedIncrement64(
volatile long long * lpAddend)
351 return __sync_add_and_fetch(lpAddend, 1);
358#if !HAS_BUILTIN(_InterlockedCompareExchange8)
361 char retval = Comperand;
362 __asm__(
"lock; cmpxchgb %b[Exchange], %[Destination]" : [retval]
"+a" (retval) : [
Destination]
"m" (*
Destination), [Exchange]
"q" (Exchange) :
"memory");
367#if !HAS_BUILTIN(_InterlockedCompareExchange16)
370 short retval = Comperand;
371 __asm__(
"lock; cmpxchgw %w[Exchange], %[Destination]" : [retval]
"+a" (retval) : [
Destination]
"m" (*
Destination), [Exchange]
"q" (Exchange):
"memory");
376#if !HAS_BUILTIN(_InterlockedCompareExchange)
379 long retval = Comperand;
380 __asm__(
"lock; cmpxchgl %k[Exchange], %[Destination]" : [retval]
"+a" (retval) : [
Destination]
"m" (*
Destination), [Exchange]
"q" (Exchange):
"memory");
385#if !HAS_BUILTIN(_InterlockedCompareExchangePointer)
388 void * retval = (
void *)Comperand;
389 __asm__(
"lock; cmpxchgl %k[Exchange], %[Destination]" : [retval]
"=a" (retval) :
"[retval]" (retval), [
Destination]
"m" (*
Destination), [Exchange]
"q" (Exchange) :
"memory");
394#if !HAS_BUILTIN(_InterlockedExchange8)
398 __asm__(
"xchgb %[retval], %[Target]" : [retval]
"+r" (retval) : [
Target]
"m" (*
Target) :
"memory");
403#if !HAS_BUILTIN(_InterlockedExchange16)
406 short retval =
Value;
407 __asm__(
"xchgw %[retval], %[Target]" : [retval]
"+r" (retval) : [
Target]
"m" (*
Target) :
"memory");
412#if !HAS_BUILTIN(_InterlockedExchange)
416 __asm__(
"xchgl %[retval], %[Target]" : [retval]
"+r" (retval) : [
Target]
"m" (*
Target) :
"memory");
421#if !HAS_BUILTIN(_InterlockedExchangePointer)
424 void * retval =
Value;
425 __asm__(
"xchgl %[retval], %[Target]" : [retval]
"+r" (retval) : [
Target]
"m" (*
Target) :
"memory");
430#if !HAS_BUILTIN(_InterlockedExchangeAdd8)
434 __asm__(
"lock; xaddb %[retval], %[Addend]" : [retval]
"+r" (retval) : [
Addend]
"m" (*
Addend) :
"memory");
439#if !HAS_BUILTIN(_InterlockedExchangeAdd16)
442 short retval =
Value;
443 __asm__(
"lock; xaddw %[retval], %[Addend]" : [retval]
"+r" (retval) : [
Addend]
"m" (*
Addend) :
"memory");
448#if !HAS_BUILTIN(_InterlockedExchangeAdd)
452 __asm__(
"lock; xaddl %[retval], %[Addend]" : [retval]
"+r" (retval) : [
Addend]
"m" (*
Addend) :
"memory");
457#if !HAS_BUILTIN(_InterlockedAnd8)
476#if !HAS_BUILTIN(_InterlockedAnd16)
495#if !HAS_BUILTIN(_InterlockedAnd)
514#if !HAS_BUILTIN(_InterlockedOr8)
533#if !HAS_BUILTIN(_InterlockedOr16)
552#if !HAS_BUILTIN(_InterlockedOr)
571#if !HAS_BUILTIN(_InterlockedXor8)
590#if !HAS_BUILTIN(_InterlockedXor16)
609#if !HAS_BUILTIN(_InterlockedXor)
628#if !HAS_BUILTIN(_InterlockedDecrement)
635#if !HAS_BUILTIN(_InterlockedIncrement)
642#if !HAS_BUILTIN(_InterlockedDecrement16)
649#if !HAS_BUILTIN(_InterlockedIncrement16)
656#if defined(__x86_64__)
657#if !HAS_BUILTIN(_InterlockedDecrement64)
658__INTRIN_INLINE long long _InterlockedDecrement64(
volatile long long * lpAddend)
660 return _InterlockedExchangeAdd64(lpAddend, -1) - 1;
664#if !HAS_BUILTIN(_InterlockedIncrement64)
665__INTRIN_INLINE long long _InterlockedIncrement64(
volatile long long * lpAddend)
667 return _InterlockedExchangeAdd64(lpAddend, 1) + 1;
674#if !HAS_BUILTIN(_InterlockedCompareExchange64)
675#if (__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__) > 40100 && defined(__x86_64__)
679 return __sync_val_compare_and_swap(
Destination, Comperand, Exchange);
685 long long retval = Comperand;
689 "lock; cmpxchg8b %[Destination]" :
690 [retval]
"+A" (retval) :
692 "b" ((
unsigned long)((Exchange >> 0) & 0xFFFFFFFF)),
693 "c" ((
unsigned long)((Exchange >> 32) & 0xFFFFFFFF)) :
702#if defined(__x86_64__) && !HAS_BUILTIN(_InterlockedCompareExchange128)
705 __int64 xchg[2] = { ExchangeLow, ExchangeHigh };
706 return __sync_bool_compare_and_swap((__uint128_t*)
Destination, *((__uint128_t*)ComparandResult), *((__uint128_t*)xchg));
715 "lock; addl %[Value], %[Lo32];"
717 "lock; adcl $0, %[Hi32];"
719 [Lo32]
"+m" (*((
volatile long *)(
Addend) + 0)), [Hi32]
"+m" (*((
volatile long *)(
Addend) + 1)) :
728#if !HAS_BUILTIN(_interlockedbittestandreset)
731 unsigned char retval;
732 __asm__(
"lock; btrl %[b], %[a]; setb %b[retval]" : [retval]
"=q" (retval), [
a]
"+m" (*
a) : [
b]
"Ir" (
b) :
"memory");
737#if defined(__x86_64__) && !HAS_BUILTIN(_interlockedbittestandreset64)
738__INTRIN_INLINE unsigned char _interlockedbittestandreset64(
volatile long long *
a,
long long b)
740 unsigned char retval;
741 __asm__(
"lock; btrq %[b], %[a]; setb %b[retval]" : [retval]
"=r" (retval), [
a]
"+m" (*
a) : [
b]
"Ir" (
b) :
"memory");
747#if !HAS_BUILTIN(_interlockedbittestandset)
750 unsigned char retval;
751 __asm__(
"lock; btsl %[b], %[a]; setc %b[retval]" : [retval]
"=q" (retval), [
a]
"+m" (*
a) : [
b]
"Ir" (
b) :
"memory");
756#if defined(__x86_64__) && !HAS_BUILTIN(_interlockedbittestandset64)
757__INTRIN_INLINE unsigned char _interlockedbittestandset64(
volatile long long *
a,
long long b)
759 unsigned char retval;
760 __asm__(
"lock; btsq %[b], %[a]; setc %b[retval]" : [retval]
"=r" (retval), [
a]
"+m" (*
a) : [
b]
"Ir" (
b) :
"memory");
767#if !HAS_BUILTIN(__stosb)
775 "[Dest]" (Dest),
"a" (
Data),
"[Count]" (
Count)
786 "[Dest]" (Dest),
"a" (
Data),
"[Count]" (
Count)
796 "[Dest]" (Dest),
"a" (
Data),
"[Count]" (
Count)
807 "[Dest]" (Dest),
"a" (
Data),
"[Count]" (
Count)
854#if defined(__x86_64__)
878#if !HAS_BUILTIN(__readgsbyte)
887#if !HAS_BUILTIN(__readgsword)
890 unsigned short value;
896#if !HAS_BUILTIN(__readgsdword)
905#if !HAS_BUILTIN(__readgsqword)
908 unsigned long long value;
973#if !HAS_BUILTIN(__readfsbyte)
982#if !HAS_BUILTIN(__readfsword)
985 unsigned short value;
991#if !HAS_BUILTIN(__readfsdword)
1018 if(!__builtin_constant_p(
Offset))
1019 __asm__ __volatile__(
"addb %b[Offset], %%fs:%a[Offset]" : : [
Offset]
"r" (
Offset) :
"memory");
1026 if(!__builtin_constant_p(
Offset))
1027 __asm__ __volatile__(
"addw %w[Offset], %%fs:%a[Offset]" : : [
Offset]
"r" (
Offset) :
"memory");
1034 if(!__builtin_constant_p(
Offset))
1035 __asm__ __volatile__(
"addl %k[Offset], %%fs:%a[Offset]" : : [
Offset]
"r" (
Offset) :
"memory");
1045#if !HAS_BUILTIN(_BitScanForward)
1049 return Mask ? 1 : 0;
1053#if !HAS_BUILTIN(_BitScanReverse)
1057 return Mask ? 1 : 0;
1061#if !HAS_BUILTIN(_bittest)
1065 unsigned char retval;
1067 if(__builtin_constant_p(
b))
1068 __asm__(
"bt %[b], %[a]; setb %b[retval]" : [retval]
"=q" (retval) : [
a]
"mr" (*(
a + (
b / 32))), [
b]
"Ir" (
b % 32));
1070 __asm__(
"bt %[b], %[a]; setb %b[retval]" : [retval]
"=q" (retval) : [
a]
"m" (*
a), [
b]
"r" (
b));
1077#if !HAS_BUILTIN(_BitScanForward64)
1080 unsigned long long Index64;
1083 return Mask ? 1 : 0;
1087#if !HAS_BUILTIN(_BitScanReverse64)
1090 unsigned long long Index64;
1093 return Mask ? 1 : 0;
1097#if !HAS_BUILTIN(_bittest64)
1100 unsigned char retval;
1102 if(__builtin_constant_p(
b))
1103 __asm__(
"bt %[b], %[a]; setb %b[retval]" : [retval]
"=q" (retval) : [
a]
"mr" (*(
a + (
b / 64))), [
b]
"Ir" (
b % 64));
1105 __asm__(
"bt %[b], %[a]; setb %b[retval]" : [retval]
"=q" (retval) : [
a]
"m" (*
a), [
b]
"r" (
b));
1112#if !HAS_BUILTIN(_bittestandcomplement)
1115 unsigned char retval;
1117 if(__builtin_constant_p(
b))
1118 __asm__(
"btc %[b], %[a]; setb %b[retval]" : [
a]
"+mr" (*(
a + (
b / 32))), [retval]
"=q" (retval) : [
b]
"Ir" (
b % 32));
1120 __asm__(
"btc %[b], %[a]; setb %b[retval]" : [
a]
"+m" (*
a), [retval]
"=q" (retval) : [
b]
"r" (
b));
1126#if !HAS_BUILTIN(_bittestandreset)
1129 unsigned char retval;
1131 if(__builtin_constant_p(
b))
1132 __asm__(
"btr %[b], %[a]; setb %b[retval]" : [
a]
"+mr" (*(
a + (
b / 32))), [retval]
"=q" (retval) : [
b]
"Ir" (
b % 32));
1134 __asm__(
"btr %[b], %[a]; setb %b[retval]" : [
a]
"+m" (*
a), [retval]
"=q" (retval) : [
b]
"r" (
b));
1140#if !HAS_BUILTIN(_bittestandset)
1143 unsigned char retval;
1145 if(__builtin_constant_p(
b))
1146 __asm__(
"bts %[b], %[a]; setb %b[retval]" : [
a]
"+mr" (*(
a + (
b / 32))), [retval]
"=q" (retval) : [
b]
"Ir" (
b % 32));
1148 __asm__(
"bts %[b], %[a]; setb %b[retval]" : [
a]
"+m" (*
a), [retval]
"=q" (retval) : [
b]
"r" (
b));
1155#if !HAS_BUILTIN(_bittestandset64)
1158 unsigned char retval;
1160 if(__builtin_constant_p(
b))
1161 __asm__(
"btsq %[b], %[a]; setb %b[retval]" : [
a]
"+mr" (*(
a + (
b / 64))), [retval]
"=q" (retval) : [
b]
"Ir" (
b % 64));
1163 __asm__(
"btsq %[b], %[a]; setb %b[retval]" : [
a]
"+m" (*
a), [retval]
"=q" (retval) : [
b]
"r" (
b));
1169#if !HAS_BUILTIN(_bittestandreset64)
1172 unsigned char retval;
1174 if(__builtin_constant_p(
b))
1175 __asm__(
"btrq %[b], %[a]; setb %b[retval]" : [
a]
"+mr" (*(
a + (
b / 64))), [retval]
"=q" (retval) : [
b]
"Ir" (
b % 64));
1177 __asm__(
"btrq %[b], %[a]; setb %b[retval]" : [
a]
"+m" (*
a), [retval]
"=q" (retval) : [
b]
"r" (
b));
1183#if !HAS_BUILTIN(_bittestandcomplement64)
1184__INTRIN_INLINE unsigned char _bittestandcomplement64(
long long *
a,
long long b)
1186 unsigned char retval;
1188 if(__builtin_constant_p(
b))
1189 __asm__(
"btcq %[b], %[a]; setb %b[retval]" : [
a]
"+mr" (*(
a + (
b / 64))), [retval]
"=q" (retval) : [
b]
"Ir" (
b % 64));
1191 __asm__(
"btcq %[b], %[a]; setb %b[retval]" : [
a]
"+m" (*
a), [retval]
"=q" (retval) : [
b]
"r" (
b));
1198#if !HAS_BUILTIN(_rotl8)
1201 unsigned char retval;
1202 __asm__(
"rolb %b[shift], %b[retval]" : [retval]
"=rm" (retval) :
"[retval]" (
value), [
shift]
"Nc" (
shift));
1207#if !HAS_BUILTIN(_rotl16)
1210 unsigned short retval;
1211 __asm__(
"rolw %b[shift], %w[retval]" : [retval]
"=rm" (retval) :
"[retval]" (
value), [
shift]
"Nc" (
shift));
1216#if !HAS_BUILTIN(_rotl)
1219 unsigned int retval;
1220 __asm__(
"roll %b[shift], %k[retval]" : [retval]
"=rm" (retval) :
"[retval]" (
value), [
shift]
"Nc" (
shift));
1225#if !HAS_BUILTIN(_rotl64)
1229 unsigned long long retval;
1230 __asm__(
"rolq %b[shift], %k[retval]" : [retval]
"=rm" (retval) :
"[retval]" (
value), [
shift]
"Nc" (
shift));
1242#if !HAS_BUILTIN(_rotr)
1245 unsigned int retval;
1246 __asm__(
"rorl %b[shift], %k[retval]" : [retval]
"=rm" (retval) :
"[retval]" (
value), [
shift]
"Nc" (
shift));
1251#if !HAS_BUILTIN(_rotr8)
1254 unsigned char retval;
1255 __asm__(
"rorb %b[shift], %b[retval]" : [retval]
"=qm" (retval) :
"[retval]" (
value), [
shift]
"Nc" (
shift));
1260#if !HAS_BUILTIN(_rotr16)
1263 unsigned short retval;
1264 __asm__(
"rorw %b[shift], %w[retval]" : [retval]
"=rm" (retval) :
"[retval]" (
value), [
shift]
"Nc" (
shift));
1269#if !HAS_BUILTIN(_rotr64)
1273 unsigned long long retval;
1274 __asm__(
"rorq %b[shift], %k[retval]" : [retval]
"=rm" (retval) :
"[retval]" (
value), [
shift]
"Nc" (
shift));
1286#if !HAS_BUILTIN(_lrotl)
1289 unsigned long retval;
1290 __asm__(
"roll %b[shift], %k[retval]" : [retval]
"=rm" (retval) :
"[retval]" (
value), [
shift]
"Nc" (
shift));
1295#if !HAS_BUILTIN(_lrotr)
1298 unsigned long retval;
1299 __asm__(
"rorl %b[shift], %k[retval]" : [retval]
"=rm" (retval) :
"[retval]" (
value), [
shift]
"Nc" (
shift));
1307 unsigned long long retval;
1308 unsigned char shift = Bit & 0x3F;
1321 unsigned char shift = Bit & 0x3F;
1334 unsigned char shift = Bit & 0x3F;
1353 unsigned long long retval =
Mask;
1357 "shldl %b[Bit], %%eax, %%edx; sall %b[Bit], %%eax" :
1359 [Bit]
"Nc" ((
unsigned char)((
unsigned long)Bit) & 0xFF)
1367 long long retval =
Mask;
1371 "shrdl %b[Bit], %%edx, %%eax; sarl %b[Bit], %%edx" :
1373 [Bit]
"Nc" ((
unsigned char)((
unsigned long)Bit) & 0xFF)
1381 unsigned long long retval =
Mask;
1385 "shrdl %b[Bit], %%edx, %%eax; shrl %b[Bit], %%edx" :
1387 [Bit]
"Nc" ((
unsigned char)((
unsigned long)Bit) & 0xFF)
1396 unsigned short retval;
1397 __asm__(
"rorw $8, %w[retval]" : [retval]
"=rm" (retval) :
"[retval]" (
value));
1403 unsigned long retval;
1404 __asm__(
"bswapl %[retval]" : [retval]
"=r" (retval) :
"[retval]" (
value));
1411 unsigned long long retval;
1412 __asm__(
"bswapq %[retval]" : [retval]
"=r" (retval) :
"[retval]" (
value));
1419 unsigned long long int64part;
1421 unsigned long lowpart;
1422 unsigned long hipart;
1425 retval.int64part =
value;
1427 "bswapl %[hipart]\n"
1428 : [lowpart]
"=r" (retval.hipart), [hipart]
"=r" (retval.lowpart) :
"[lowpart]" (retval.lowpart),
"[hipart]" (retval.hipart) );
1429 return retval.int64part;
1433#if !HAS_BUILTIN(__lzcnt)
1436 return __builtin_clz(
value);
1440#if !HAS_BUILTIN(__lzcnt16)
1443 return __builtin_clz(
value);
1447#if !HAS_BUILTIN(__popcnt)
1450 return __builtin_popcount(
value);
1454#if !HAS_BUILTIN(__popcnt16)
1457 return __builtin_popcount(
value);
1462#if !HAS_BUILTIN(__lzcnt64)
1465 return __builtin_clzll(
value);
1469#if !HAS_BUILTIN(__popcnt64)
1472 return __builtin_popcountll(
value);
1479#if !HAS_BUILTIN(__emul)
1483 __asm__(
"imull %[b]" :
"=A" (retval) : [
a]
"a" (
a), [
b]
"rm" (
b));
1488#if !HAS_BUILTIN(__emulu)
1491 unsigned long long retval;
1492 __asm__(
"mull %[b]" :
"=A" (retval) : [
a]
"a" (
a), [
b]
"rm" (
b));
1503#if !HAS_BUILTIN(__mulh)
1507 __asm__(
"imulq %[b]" :
"=d" (retval) : [
a]
"a" (
a), [
b]
"rm" (
b));
1512#if !HAS_BUILTIN(__umulh)
1513__INTRIN_INLINE unsigned long long __umulh(
unsigned long long a,
unsigned long long b)
1515 unsigned long long retval;
1516 __asm__(
"mulq %[b]" :
"=d" (retval) : [
a]
"a" (
a), [
b]
"rm" (
b));
1527 __asm__ __volatile__(
"inb %w[Port], %b[byte]" : [
byte]
"=a" (
byte) : [
Port]
"Nd" (
Port));
1533 unsigned short word;
1540 unsigned long dword;
1541 __asm__ __volatile__(
"inl %w[Port], %k[dword]" : [dword]
"=a" (dword) : [
Port]
"Nd" (
Port));
1646 __asm__ __volatile__(
"cpuid" :
"=a" (CPUInfo[0]),
"=b" (CPUInfo[1]),
"=c" (CPUInfo[2]),
"=d" (CPUInfo[3]) :
"a" (InfoType));
1651 __asm__ __volatile__(
"cpuid" :
"=a" (CPUInfo[0]),
"=b" (CPUInfo[1]),
"=c" (CPUInfo[2]),
"=d" (CPUInfo[3]) :
"a" (InfoType),
"c" (ECXValue));
1654#if !HAS_BUILTIN(__rdtsc)
1658 unsigned long long low, high;
1659 __asm__ __volatile__(
"rdtsc" :
"=a"(low),
"=d"(high));
1660 return low | (high << 32);
1662 unsigned long long retval;
1663 __asm__ __volatile__(
"rdtsc" :
"=A"(retval));
1671 __asm__ __volatile__(
"push %0\n popf" : :
"rim"(
Value));
1677 __asm__ __volatile__(
"pushf\n pop %0" :
"=rm"(retval));
1683#if !HAS_BUILTIN(__debugbreak)
1690#if !HAS_BUILTIN(__ud2)
1697#if !HAS_BUILTIN(__int2c)
1706 __asm__(
"cli" : : :
"memory");
1711 __asm__(
"sti" : : :
"memory");
1716 __asm__(
"hlt" : : :
"memory");
1719#if !HAS_BUILTIN(__fastfail)
1724 __builtin_unreachable();
1754 unsigned long long value;
1761 unsigned long long value;
1768 unsigned long long value;
1775 unsigned long long value;
1782 unsigned long long value;
1806 unsigned long value;
1813 unsigned long value;
1820 unsigned long value;
1827 unsigned long value;
1838 unsigned long long value;
1979 unsigned long low, high;
1980 __asm__ __volatile__(
"rdmsr" :
"=a" (low),
"=d" (high) :
"c" (
reg));
1981 return ((
unsigned long long)high << 32) | low;
1983 unsigned long long retval;
1984 __asm__ __volatile__(
"rdmsr" :
"=A" (retval) :
"c" (
reg));
1992 __asm__ __volatile__(
"wrmsr" : :
"a" (
Value),
"d" (
Value >> 32),
"c" (Register));
1994 __asm__ __volatile__(
"wrmsr" : :
"A" (
Value),
"c" (Register));
2000 unsigned long long retval;
2008 unsigned long retval;
2009 __asm__ __volatile__(
"lsl %[a], %[retval]" : [retval]
"=r" (retval) : [
a]
"rm" (
a));
2015 __asm__ __volatile__(
"wbinvd" : : :
"memory");
2035#if !HAS_BUILTIN(_mm_pause)
2038 __asm__ __volatile__(
"pause" : : :
"memory");
GLint GLint GLint GLint GLint x
GLint GLint GLint GLint GLint GLint y
GLboolean GLboolean GLboolean b
GLboolean GLboolean GLboolean GLboolean a
__INTRIN_INLINE long _InterlockedAddLargeStatistic(volatile long long *const Addend, const long Value)
PPC_QUAL unsigned long long __readmsr()
volatile char *const const char modify volatile long *const const long modify _InterlockedOr16
volatile char *const const char modify volatile long *const const long modify volatile short *const const short modify volatile char *const const char modify _InterlockedXor
volatile char *const const char modify _InterlockedAnd
volatile char *const const char modify volatile long *const const long modify volatile short *const const short modify _InterlockedXor8
__INTRIN_INLINE unsigned char __inbyte(unsigned short Port)
__INTRIN_INLINE unsigned long long __cdecl _byteswap_uint64(unsigned long long value)
__INTRIN_INLINE unsigned int __popcnt(unsigned int value)
__INTRIN_INLINE void __addfsword(unsigned long Offset, unsigned short Data)
__INTRIN_INLINE void _sgdt(void *Destination)
__INTRIN_INLINE unsigned long __segmentlimit(unsigned long a)
__INTRIN_INLINE void __stosw(unsigned short *Dest, unsigned short Data, size_t Count)
__INTRIN_INLINE unsigned long long __emulu(unsigned int a, unsigned int b)
__INTRIN_INLINE unsigned long __readcr3(void)
__INTRIN_INLINE unsigned int __lzcnt(unsigned int value)
__INTRIN_INLINE unsigned char _interlockedbittestandset(volatile long *a, long b)
__INTRIN_INLINE void __incfsdword(unsigned long Offset)
__INTRIN_INLINE void __incfsbyte(unsigned long Offset)
__INTRIN_INLINE void __writeeflags(uintptr_t Value)
__INTRIN_INLINE unsigned short __cdecl _rotl16(unsigned short value, unsigned char shift)
__INTRIN_INLINE void __writefsbyte(unsigned long Offset, unsigned char Data)
__INTRIN_INLINE void __cpuid(int CPUInfo[4], int InfoType)
__INTRIN_INLINE unsigned short __cdecl _byteswap_ushort(unsigned short value)
__INTRIN_INLINE long _InterlockedIncrement(volatile long *lpAddend)
__INTRIN_INLINE unsigned long __cdecl _inpd(unsigned short Port)
__INTRIN_INLINE void __lidt(void *Source)
__INTRIN_INLINE unsigned long long __ull_rshift(unsigned long long Mask, int Bit)
__INTRIN_INLINE unsigned char _BitScanForward(unsigned long *Index, unsigned long Mask)
__INTRIN_INLINE void _ReadWriteBarrier(void)
__INTRIN_INLINE unsigned int __cdecl _rotr(unsigned int value, int shift)
__INTRIN_INLINE void __cpuidex(int CPUInfo[4], int InfoType, int ECXValue)
__INTRIN_INLINE void __movsw(unsigned short *Destination, const unsigned short *Source, size_t Count)
__INTRIN_INLINE unsigned long __readfsdword(unsigned long Offset)
__declspec(noreturn) __INTRIN_INLINE void __fastfail(unsigned int Code)
__INTRIN_INLINE unsigned int __readdr(unsigned int reg)
__INTRIN_INLINE long long __cdecl _abs64(long long value)
__INTRIN_INLINE unsigned long __readcr4(void)
__INTRIN_INLINE unsigned int __cdecl _rotl(unsigned int value, int shift)
__INTRIN_INLINE unsigned long __readcr0(void)
__INTRIN_INLINE unsigned short __lzcnt16(unsigned short value)
__INTRIN_INLINE unsigned long __cdecl _outpd(unsigned short Port, unsigned long dataword)
__INTRIN_INLINE void __cdecl _enable(void)
__INTRIN_INLINE uintptr_t __readeflags(void)
__INTRIN_INLINE void __outbytestring(unsigned short Port, unsigned char *Buffer, unsigned long Count)
__INTRIN_INLINE void __invlpg(void *Address)
__INTRIN_INLINE void __halt(void)
__INTRIN_INLINE unsigned char _interlockedbittestandreset(volatile long *a, long b)
__INTRIN_INLINE unsigned char __cdecl _rotl8(unsigned char value, unsigned char shift)
__INTRIN_INLINE void __writefsdword(unsigned long Offset, unsigned long Data)
__INTRIN_INLINE short _InterlockedIncrement16(volatile short *lpAddend)
__INTRIN_INLINE char _InterlockedOr8(volatile char *value, char mask)
__INTRIN_INLINE int __cdecl _outp(unsigned short Port, int databyte)
__INTRIN_INLINE char _InterlockedExchangeAdd8(char volatile *Addend, char Value)
__INTRIN_INLINE short _InterlockedXor16(volatile short *value, short mask)
__INTRIN_INLINE void __nop(void)
__INTRIN_INLINE unsigned short __cdecl _inpw(unsigned short Port)
__INTRIN_INLINE unsigned long long __cdecl _rotr64(unsigned long long value, int shift)
__INTRIN_INLINE void __inbytestring(unsigned short Port, unsigned char *Buffer, unsigned long Count)
__INTRIN_INLINE unsigned short __popcnt16(unsigned short value)
__INTRIN_INLINE unsigned char _BitScanReverse(unsigned long *Index, unsigned long Mask)
__INTRIN_INLINE unsigned long __indword(unsigned short Port)
__INTRIN_INLINE void __writemsr(unsigned long Register, unsigned long long Value)
__INTRIN_INLINE void __writecr3(unsigned int Data)
__INTRIN_INLINE long _InterlockedCompareExchange(volatile long *Destination, long Exchange, long Comperand)
__INTRIN_INLINE void __indwordstring(unsigned short Port, unsigned long *Buffer, unsigned long Count)
__INTRIN_INLINE void _mm_lfence(void)
__INTRIN_INLINE void __int2c(void)
__INTRIN_INLINE long long __emul(int a, int b)
__INTRIN_INLINE short _InterlockedExchangeAdd16(volatile short *Addend, short Value)
__INTRIN_INLINE short _InterlockedCompareExchange16(volatile short *Destination, short Exchange, short Comperand)
__INTRIN_INLINE unsigned long long __readpmc(unsigned long counter)
__INTRIN_INLINE void __cdecl __debugbreak(void)
__INTRIN_INLINE void __incfsword(unsigned long Offset)
__INTRIN_INLINE unsigned short __inword(unsigned short Port)
__INTRIN_INLINE unsigned long __readcr2(void)
__INTRIN_INLINE void __writecr0(unsigned int Data)
__INTRIN_INLINE void __addfsdword(unsigned long Offset, unsigned long Data)
__INTRIN_INLINE void __sidt(void *Destination)
__INTRIN_INLINE long long _InterlockedCompareExchange64(volatile long long *Destination, long long Exchange, long long Comperand)
__INTRIN_INLINE void * _InterlockedExchangePointer(void *volatile *Target, void *Value)
__INTRIN_INLINE void __writecr4(unsigned int Data)
__INTRIN_INLINE void __outwordstring(unsigned short Port, unsigned short *Buffer, unsigned long Count)
__INTRIN_INLINE void __writedr(unsigned reg, unsigned int value)
__INTRIN_INLINE void * _InterlockedCompareExchangePointer(void *volatile *Destination, void *Exchange, void *Comperand)
__INTRIN_INLINE short _InterlockedDecrement16(volatile short *lpAddend)
__INTRIN_INLINE unsigned char _bittestandset(long *a, long b)
__INTRIN_INLINE short _InterlockedExchange16(volatile short *Target, short Value)
__INTRIN_INLINE unsigned long long __ll_lshift(unsigned long long Mask, int Bit)
__INTRIN_INLINE unsigned char _bittestandreset(long *a, long b)
__INTRIN_INLINE char _InterlockedCompareExchange8(volatile char *Destination, char Exchange, char Comperand)
__INTRIN_INLINE long _InterlockedExchangeAdd(volatile long *Addend, long Value)
__INTRIN_INLINE unsigned short __cdecl _rotr16(unsigned short value, unsigned char shift)
__INTRIN_INLINE unsigned char __cdecl _rotr8(unsigned char value, unsigned char shift)
__INTRIN_INLINE unsigned long __cdecl _byteswap_ulong(unsigned long value)
__INTRIN_INLINE long _InterlockedExchange(volatile long *Target, long Value)
__INTRIN_INLINE unsigned char __readfsbyte(unsigned long Offset)
__INTRIN_INLINE unsigned short __cdecl _outpw(unsigned short Port, unsigned short dataword)
__INTRIN_INLINE int __cdecl _inp(unsigned short Port)
__INTRIN_INLINE void __outbyte(unsigned short Port, unsigned char Data)
__INTRIN_INLINE short _InterlockedAnd16(volatile short *value, short mask)
__INTRIN_INLINE void __stosb(unsigned char *Dest, unsigned char Data, size_t Count)
__INTRIN_INLINE void __movsb(unsigned char *Destination, const unsigned char *Source, size_t Count)
__INTRIN_INLINE void __ud2(void)
__INTRIN_INLINE void __movsd(unsigned long *Destination, const unsigned long *Source, size_t Count)
__INTRIN_INLINE void __cdecl _disable(void)
__INTRIN_INLINE void __writefsword(unsigned long Offset, unsigned short Data)
__INTRIN_INLINE long long __ll_rshift(long long Mask, int Bit)
__INTRIN_INLINE void _mm_pause(void)
__INTRIN_INLINE unsigned long __cdecl _lrotl(unsigned long value, int shift)
__INTRIN_INLINE void __inwordstring(unsigned short Port, unsigned short *Buffer, unsigned long Count)
__INTRIN_INLINE unsigned char _bittest(const long *a, long b)
__INTRIN_INLINE void __outdwordstring(unsigned short Port, unsigned long *Buffer, unsigned long Count)
__INTRIN_INLINE unsigned char _bittestandcomplement(long *a, long b)
__INTRIN_INLINE void __wbinvd(void)
__INTRIN_INLINE void __outword(unsigned short Port, unsigned short Data)
__INTRIN_INLINE void __stosd(unsigned long *Dest, unsigned long Data, size_t Count)
__INTRIN_INLINE void __outdword(unsigned short Port, unsigned long Data)
__INTRIN_INLINE void __addfsbyte(unsigned long Offset, unsigned char Data)
__INTRIN_INLINE void _mm_mfence(void)
__INTRIN_INLINE unsigned long long __cdecl _rotl64(unsigned long long value, int shift)
__INTRIN_INLINE unsigned long __cdecl _lrotr(unsigned long value, int shift)
__INTRIN_INLINE char _InterlockedExchange8(volatile char *Target, char Value)
__INTRIN_INLINE unsigned long long __rdtsc(void)
__INTRIN_INLINE long _InterlockedOr(volatile long *value, long mask)
__INTRIN_INLINE unsigned short __readfsword(unsigned long Offset)
__INTRIN_INLINE long _InterlockedDecrement(volatile long *lpAddend)
#define memcpy(s1, s2, n)
#define memmove(s1, s2, n)
#define _Interlocked_operand_
_In_ UINT _In_ UINT _In_ PNDIS_PACKET Source
_In_ PUNICODE_STRING _Inout_ PUNICODE_STRING Destination
_In_ ULONG _In_ ULONG Offset
__asm__(".p2align 4, 0x90\n" ".seh_proc __seh2_global_filter_func\n" "__seh2_global_filter_func:\n" "\tpush %rbp\n" "\t.seh_pushreg %rbp\n" "\tsub $32, %rsp\n" "\t.seh_stackalloc 32\n" "\t.seh_endprologue\n" "\tmov %rdx, %rbp\n" "\tjmp *%rax\n" "__seh2_global_filter_func_exit:\n" "\t.p2align 4\n" "\tadd $32, %rsp\n" "\tpop %rbp\n" "\tret\n" "\t.seh_endproc")
_In_ WDFCOLLECTION _In_ ULONG Index
_In_ UCHAR _In_ UCHAR _In_ ULONG Code
_Must_inspect_result_ _In_ WDFKEY _In_ PCUNICODE_STRING _Out_opt_ PUSHORT _Inout_opt_ PUNICODE_STRING Value