1 /* ===-------- Intrin.h ---------------------------------------------------=== 2 * 3 * Permission is hereby granted, free of charge, to any person obtaining a copy 4 * of this software and associated documentation files (the "Software"), to deal 5 * in the Software without restriction, including without limitation the rights 6 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 * copies of the Software, and to permit persons to whom the Software is 8 * furnished to do so, subject to the following conditions: 9 * 10 * The above copyright notice and this permission notice shall be included in 11 * all copies or substantial portions of the Software. 12 * 13 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 19 * THE SOFTWARE. 20 * 21 *===-----------------------------------------------------------------------=== 22 */ 23 24 /* Only include this if we're compiling for the windows platform. */ 25 #ifndef _MSC_VER 26 #include_next <Intrin.h> 27 #else 28 29 #ifndef __INTRIN_H 30 #define __INTRIN_H 31 32 /* First include the standard intrinsics. */ 33 #if defined(__i386__) || defined(__x86_64__) 34 #include <x86intrin.h> 35 #endif 36 37 /* For the definition of jmp_buf. */ 38 #if __STDC_HOSTED__ 39 #include <setjmp.h> 40 #endif 41 42 #ifdef __cplusplus 43 extern "C" { 44 #endif 45 46 #if defined(__MMX__) 47 /* And the random ones that aren't in those files. */ 48 __m64 _m_from_float(float); 49 __m64 _m_from_int(int _l); 50 void _m_prefetch(void *); 51 float _m_to_float(__m64); 52 int _m_to_int(__m64 _M); 53 #endif 54 55 /* Other assorted instruction intrinsics. */ 56 void __addfsbyte(unsigned long, unsigned char); 57 void __addfsdword(unsigned long, unsigned long); 58 void __addfsword(unsigned long, unsigned short); 59 void __code_seg(const char *); 60 static __inline__ 61 void __cpuid(int[4], int); 62 static __inline__ 63 void __cpuidex(int[4], int, int); 64 void __debugbreak(void); 65 __int64 __emul(int, int); 66 unsigned __int64 __emulu(unsigned int, unsigned int); 67 void __cdecl __fastfail(unsigned int); 68 unsigned int __getcallerseflags(void); 69 static __inline__ 70 void __halt(void); 71 unsigned char __inbyte(unsigned short); 72 void __inbytestring(unsigned short, unsigned char *, unsigned long); 73 void __incfsbyte(unsigned long); 74 void __incfsdword(unsigned long); 75 void __incfsword(unsigned long); 76 unsigned long __indword(unsigned short); 77 void __indwordstring(unsigned short, unsigned long *, unsigned long); 78 void __int2c(void); 79 void __invlpg(void *); 80 unsigned short __inword(unsigned short); 81 void __inwordstring(unsigned short, unsigned short *, unsigned long); 82 void __lidt(void *); 83 unsigned __int64 __ll_lshift(unsigned __int64, int); 84 __int64 __ll_rshift(__int64, int); 85 void __llwpcb(void *); 86 unsigned char __lwpins32(unsigned int, unsigned int, unsigned int); 87 void __lwpval32(unsigned int, unsigned int, unsigned int); 88 unsigned int __lzcnt(unsigned int); 89 unsigned short __lzcnt16(unsigned short); 90 static __inline__ 91 void __movsb(unsigned char *, unsigned char const *, size_t); 92 static __inline__ 93 void __movsd(unsigned long *, unsigned long const *, size_t); 94 static __inline__ 95 void __movsw(unsigned short *, unsigned short const *, size_t); 96 void __nop(void); 97 void __nvreg_restore_fence(void); 98 void __nvreg_save_fence(void); 99 void __outbyte(unsigned short, unsigned char); 100 void __outbytestring(unsigned short, unsigned char *, unsigned long); 101 void __outdword(unsigned short, unsigned long); 102 void __outdwordstring(unsigned short, unsigned long *, unsigned long); 103 void __outword(unsigned short, unsigned short); 104 void __outwordstring(unsigned short, unsigned short *, unsigned long); 105 static __inline__ 106 unsigned int __popcnt(unsigned int); 107 static __inline__ 108 unsigned short __popcnt16(unsigned short); 109 unsigned long __readcr0(void); 110 unsigned long __readcr2(void); 111 static __inline__ 112 unsigned long __readcr3(void); 113 unsigned long __readcr4(void); 114 unsigned long __readcr8(void); 115 unsigned int __readdr(unsigned int); 116 #ifdef __i386__ 117 static __inline__ 118 unsigned char __readfsbyte(unsigned long); 119 static __inline__ 120 unsigned long __readfsdword(unsigned long); 121 static __inline__ 122 unsigned __int64 __readfsqword(unsigned long); 123 static __inline__ 124 unsigned short __readfsword(unsigned long); 125 #endif 126 static __inline__ 127 unsigned __int64 __readmsr(unsigned long); 128 unsigned __int64 __readpmc(unsigned long); 129 unsigned long __segmentlimit(unsigned long); 130 void __sidt(void *); 131 void *__slwpcb(void); 132 static __inline__ 133 void __stosb(unsigned char *, unsigned char, size_t); 134 static __inline__ 135 void __stosd(unsigned long *, unsigned long, size_t); 136 static __inline__ 137 void __stosw(unsigned short *, unsigned short, size_t); 138 void __svm_clgi(void); 139 void __svm_invlpga(void *, int); 140 void __svm_skinit(int); 141 void __svm_stgi(void); 142 void __svm_vmload(size_t); 143 void __svm_vmrun(size_t); 144 void __svm_vmsave(size_t); 145 void __ud2(void); 146 unsigned __int64 __ull_rshift(unsigned __int64, int); 147 void __vmx_off(void); 148 void __vmx_vmptrst(unsigned __int64 *); 149 void __wbinvd(void); 150 void __writecr0(unsigned int); 151 static __inline__ 152 void __writecr3(unsigned int); 153 void __writecr4(unsigned int); 154 void __writecr8(unsigned int); 155 void __writedr(unsigned int, unsigned int); 156 void __writefsbyte(unsigned long, unsigned char); 157 void __writefsdword(unsigned long, unsigned long); 158 void __writefsqword(unsigned long, unsigned __int64); 159 void __writefsword(unsigned long, unsigned short); 160 void __writemsr(unsigned long, unsigned __int64); 161 static __inline__ 162 void *_AddressOfReturnAddress(void); 163 static __inline__ 164 unsigned char _BitScanForward(unsigned long *_Index, unsigned long _Mask); 165 static __inline__ 166 unsigned char _BitScanReverse(unsigned long *_Index, unsigned long _Mask); 167 static __inline__ 168 unsigned char _bittest(long const *, long); 169 static __inline__ 170 unsigned char _bittestandcomplement(long *, long); 171 static __inline__ 172 unsigned char _bittestandreset(long *, long); 173 static __inline__ 174 unsigned char _bittestandset(long *, long); 175 unsigned __int64 __cdecl _byteswap_uint64(unsigned __int64); 176 unsigned long __cdecl _byteswap_ulong(unsigned long); 177 unsigned short __cdecl _byteswap_ushort(unsigned short); 178 void __cdecl _disable(void); 179 void __cdecl _enable(void); 180 void __cdecl _fxrstor(void const *); 181 void __cdecl _fxsave(void *); 182 long _InterlockedAddLargeStatistic(__int64 volatile *_Addend, long _Value); 183 static __inline__ 184 long _InterlockedAnd(long volatile *_Value, long _Mask); 185 static __inline__ 186 short _InterlockedAnd16(short volatile *_Value, short _Mask); 187 static __inline__ 188 char _InterlockedAnd8(char volatile *_Value, char _Mask); 189 unsigned char _interlockedbittestandreset(long volatile *, long); 190 static __inline__ 191 unsigned char _interlockedbittestandset(long volatile *, long); 192 static __inline__ 193 long __cdecl _InterlockedCompareExchange(long volatile *_Destination, 194 long _Exchange, long _Comparand); 195 long _InterlockedCompareExchange_HLEAcquire(long volatile *, long, long); 196 long _InterlockedCompareExchange_HLERelease(long volatile *, long, long); 197 static __inline__ 198 short _InterlockedCompareExchange16(short volatile *_Destination, 199 short _Exchange, short _Comparand); 200 static __inline__ 201 __int64 _InterlockedCompareExchange64(__int64 volatile *_Destination, 202 __int64 _Exchange, __int64 _Comparand); 203 __int64 _InterlockedcompareExchange64_HLEAcquire(__int64 volatile *, __int64, 204 __int64); 205 __int64 _InterlockedCompareExchange64_HLERelease(__int64 volatile *, __int64, 206 __int64); 207 static __inline__ 208 char _InterlockedCompareExchange8(char volatile *_Destination, char _Exchange, 209 char _Comparand); 210 void *_InterlockedCompareExchangePointer_HLEAcquire(void *volatile *, void *, 211 void *); 212 void *_InterlockedCompareExchangePointer_HLERelease(void *volatile *, void *, 213 void *); 214 static __inline__ 215 long __cdecl _InterlockedDecrement(long volatile *_Addend); 216 static __inline__ 217 short _InterlockedDecrement16(short volatile *_Addend); 218 long _InterlockedExchange(long volatile *_Target, long _Value); 219 static __inline__ 220 short _InterlockedExchange16(short volatile *_Target, short _Value); 221 static __inline__ 222 char _InterlockedExchange8(char volatile *_Target, char _Value); 223 static __inline__ 224 long __cdecl _InterlockedExchangeAdd(long volatile *_Addend, long _Value); 225 long _InterlockedExchangeAdd_HLEAcquire(long volatile *, long); 226 long _InterlockedExchangeAdd_HLERelease(long volatile *, long); 227 static __inline__ 228 short _InterlockedExchangeAdd16(short volatile *_Addend, short _Value); 229 __int64 _InterlockedExchangeAdd64_HLEAcquire(__int64 volatile *, __int64); 230 __int64 _InterlockedExchangeAdd64_HLERelease(__int64 volatile *, __int64); 231 static __inline__ 232 char _InterlockedExchangeAdd8(char volatile *_Addend, char _Value); 233 static __inline__ 234 long __cdecl _InterlockedIncrement(long volatile *_Addend); 235 static __inline__ 236 short _InterlockedIncrement16(short volatile *_Addend); 237 static __inline__ 238 long _InterlockedOr(long volatile *_Value, long _Mask); 239 static __inline__ 240 short _InterlockedOr16(short volatile *_Value, short _Mask); 241 static __inline__ 242 char _InterlockedOr8(char volatile *_Value, char _Mask); 243 static __inline__ 244 long _InterlockedXor(long volatile *_Value, long _Mask); 245 static __inline__ 246 short _InterlockedXor16(short volatile *_Value, short _Mask); 247 static __inline__ 248 char _InterlockedXor8(char volatile *_Value, char _Mask); 249 void __cdecl _invpcid(unsigned int, void *); 250 static __inline__ 251 unsigned long __cdecl _lrotl(unsigned long, int); 252 static __inline__ 253 unsigned long __cdecl _lrotr(unsigned long, int); 254 static __inline__ 255 static __inline__ 256 void _ReadBarrier(void); 257 static __inline__ 258 void _ReadWriteBarrier(void); 259 static __inline__ 260 void *_ReturnAddress(void); 261 unsigned int _rorx_u32(unsigned int, const unsigned int); 262 static __inline__ 263 unsigned int __cdecl _rotl(unsigned int _Value, int _Shift); 264 static __inline__ 265 unsigned short _rotl16(unsigned short _Value, unsigned char _Shift); 266 static __inline__ 267 unsigned __int64 __cdecl _rotl64(unsigned __int64 _Value, int _Shift); 268 static __inline__ 269 unsigned char _rotl8(unsigned char _Value, unsigned char _Shift); 270 static __inline__ 271 unsigned int __cdecl _rotr(unsigned int _Value, int _Shift); 272 static __inline__ 273 unsigned short _rotr16(unsigned short _Value, unsigned char _Shift); 274 static __inline__ 275 unsigned __int64 __cdecl _rotr64(unsigned __int64 _Value, int _Shift); 276 static __inline__ 277 unsigned char _rotr8(unsigned char _Value, unsigned char _Shift); 278 int _sarx_i32(int, unsigned int); 279 #if __STDC_HOSTED__ 280 int __cdecl _setjmp(jmp_buf); 281 #endif 282 unsigned int _shlx_u32(unsigned int, unsigned int); 283 unsigned int _shrx_u32(unsigned int, unsigned int); 284 void _Store_HLERelease(long volatile *, long); 285 void _Store64_HLERelease(__int64 volatile *, __int64); 286 void _StorePointer_HLERelease(void *volatile *, void *); 287 static __inline__ 288 void _WriteBarrier(void); 289 unsigned __int32 xbegin(void); 290 void _xend(void); 291 static __inline__ 292 unsigned __int64 __cdecl _xgetbv(unsigned int); 293 void __cdecl _xrstor(void const *, unsigned __int64); 294 void __cdecl _xsave(void *, unsigned __int64); 295 void __cdecl _xsaveopt(void *, unsigned __int64); 296 void __cdecl _xsetbv(unsigned int, unsigned __int64); 297 298 /* These additional intrinsics are turned on in x64/amd64/x86_64 mode. */ 299 #ifdef __x86_64__ 300 void __addgsbyte(unsigned long, unsigned char); 301 void __addgsdword(unsigned long, unsigned long); 302 void __addgsqword(unsigned long, unsigned __int64); 303 void __addgsword(unsigned long, unsigned short); 304 static __inline__ 305 void __faststorefence(void); 306 void __incgsbyte(unsigned long); 307 void __incgsdword(unsigned long); 308 void __incgsqword(unsigned long); 309 void __incgsword(unsigned long); 310 unsigned char __lwpins64(unsigned __int64, unsigned int, unsigned int); 311 void __lwpval64(unsigned __int64, unsigned int, unsigned int); 312 unsigned __int64 __lzcnt64(unsigned __int64); 313 static __inline__ 314 void __movsq(unsigned long long *, unsigned long long const *, size_t); 315 __int64 __mulh(__int64, __int64); 316 static __inline__ 317 unsigned __int64 __popcnt64(unsigned __int64); 318 static __inline__ 319 unsigned char __readgsbyte(unsigned long); 320 static __inline__ 321 unsigned long __readgsdword(unsigned long); 322 static __inline__ 323 unsigned __int64 __readgsqword(unsigned long); 324 unsigned short __readgsword(unsigned long); 325 unsigned __int64 __shiftleft128(unsigned __int64 _LowPart, 326 unsigned __int64 _HighPart, 327 unsigned char _Shift); 328 unsigned __int64 __shiftright128(unsigned __int64 _LowPart, 329 unsigned __int64 _HighPart, 330 unsigned char _Shift); 331 static __inline__ 332 void __stosq(unsigned __int64 *, unsigned __int64, size_t); 333 unsigned char __vmx_on(unsigned __int64 *); 334 unsigned char __vmx_vmclear(unsigned __int64 *); 335 unsigned char __vmx_vmlaunch(void); 336 unsigned char __vmx_vmptrld(unsigned __int64 *); 337 unsigned char __vmx_vmread(size_t, size_t *); 338 unsigned char __vmx_vmresume(void); 339 unsigned char __vmx_vmwrite(size_t, size_t); 340 void __writegsbyte(unsigned long, unsigned char); 341 void __writegsdword(unsigned long, unsigned long); 342 void __writegsqword(unsigned long, unsigned __int64); 343 void __writegsword(unsigned long, unsigned short); 344 static __inline__ 345 unsigned char _BitScanForward64(unsigned long *_Index, unsigned __int64 _Mask); 346 static __inline__ 347 unsigned char _BitScanReverse64(unsigned long *_Index, unsigned __int64 _Mask); 348 static __inline__ 349 unsigned char _bittest64(__int64 const *, __int64); 350 static __inline__ 351 unsigned char _bittestandcomplement64(__int64 *, __int64); 352 static __inline__ 353 unsigned char _bittestandreset64(__int64 *, __int64); 354 static __inline__ 355 unsigned char _bittestandset64(__int64 *, __int64); 356 unsigned __int64 __cdecl _byteswap_uint64(unsigned __int64); 357 void __cdecl _fxrstor64(void const *); 358 void __cdecl _fxsave64(void *); 359 long _InterlockedAnd_np(long volatile *_Value, long _Mask); 360 short _InterlockedAnd16_np(short volatile *_Value, short _Mask); 361 __int64 _InterlockedAnd64_np(__int64 volatile *_Value, __int64 _Mask); 362 char _InterlockedAnd8_np(char volatile *_Value, char _Mask); 363 unsigned char _interlockedbittestandreset64(__int64 volatile *, __int64); 364 static __inline__ 365 unsigned char _interlockedbittestandset64(__int64 volatile *, __int64); 366 long _InterlockedCompareExchange_np(long volatile *_Destination, long _Exchange, 367 long _Comparand); 368 unsigned char _InterlockedCompareExchange128(__int64 volatile *_Destination, 369 __int64 _ExchangeHigh, 370 __int64 _ExchangeLow, 371 __int64 *_CompareandResult); 372 unsigned char _InterlockedCompareExchange128_np(__int64 volatile *_Destination, 373 __int64 _ExchangeHigh, 374 __int64 _ExchangeLow, 375 __int64 *_ComparandResult); 376 short _InterlockedCompareExchange16_np(short volatile *_Destination, 377 short _Exchange, short _Comparand); 378 __int64 _InterlockedCompareExchange64_HLEAcquire(__int64 volatile *, __int64, 379 __int64); 380 __int64 _InterlockedCompareExchange64_HLERelease(__int64 volatile *, __int64, 381 __int64); 382 __int64 _InterlockedCompareExchange64_np(__int64 volatile *_Destination, 383 __int64 _Exchange, __int64 _Comparand); 384 void *_InterlockedCompareExchangePointer(void *volatile *_Destination, 385 void *_Exchange, void *_Comparand); 386 void *_InterlockedCompareExchangePointer_np(void *volatile *_Destination, 387 void *_Exchange, void *_Comparand); 388 static __inline__ 389 __int64 _InterlockedDecrement64(__int64 volatile *_Addend); 390 static __inline__ 391 __int64 _InterlockedExchange64(__int64 volatile *_Target, __int64 _Value); 392 static __inline__ 393 __int64 _InterlockedExchangeAdd64(__int64 volatile *_Addend, __int64 _Value); 394 void *_InterlockedExchangePointer(void *volatile *_Target, void *_Value); 395 static __inline__ 396 __int64 _InterlockedIncrement64(__int64 volatile *_Addend); 397 long _InterlockedOr_np(long volatile *_Value, long _Mask); 398 short _InterlockedOr16_np(short volatile *_Value, short _Mask); 399 static __inline__ 400 __int64 _InterlockedOr64(__int64 volatile *_Value, __int64 _Mask); 401 __int64 _InterlockedOr64_np(__int64 volatile *_Value, __int64 _Mask); 402 char _InterlockedOr8_np(char volatile *_Value, char _Mask); 403 long _InterlockedXor_np(long volatile *_Value, long _Mask); 404 short _InterlockedXor16_np(short volatile *_Value, short _Mask); 405 static __inline__ 406 __int64 _InterlockedXor64(__int64 volatile *_Value, __int64 _Mask); 407 __int64 _InterlockedXor64_np(__int64 volatile *_Value, __int64 _Mask); 408 char _InterlockedXor8_np(char volatile *_Value, char _Mask); 409 static __inline__ 410 __int64 _mul128(__int64 _Multiplier, __int64 _Multiplicand, 411 __int64 *_HighProduct); 412 unsigned __int64 _rorx_u64(unsigned __int64, const unsigned int); 413 __int64 _sarx_i64(__int64, unsigned int); 414 #if __STDC_HOSTED__ 415 int __cdecl _setjmpex(jmp_buf); 416 #endif 417 unsigned __int64 _shlx_u64(unsigned __int64, unsigned int); 418 unsigned __int64 _shrx_u64(unsigned __int64, unsigned int); 419 /* 420 * Multiply two 64-bit integers and obtain a 64-bit result. 421 * The low-half is returned directly and the high half is in an out parameter. 422 */ 423 static __inline__ unsigned __int64 __attribute__((__always_inline__, __nodebug__)) 424 _umul128(unsigned __int64 _Multiplier, unsigned __int64 _Multiplicand, 425 unsigned __int64 *_HighProduct) { 426 unsigned __int128 _FullProduct = 427 (unsigned __int128)_Multiplier * (unsigned __int128)_Multiplicand; 428 *_HighProduct = _FullProduct >> 64; 429 return _FullProduct; 430 } 431 static __inline__ unsigned __int64 __attribute__((__always_inline__, __nodebug__)) 432 __umulh(unsigned __int64 _Multiplier, unsigned __int64 _Multiplicand) { 433 unsigned __int128 _FullProduct = 434 (unsigned __int128)_Multiplier * (unsigned __int128)_Multiplicand; 435 return _FullProduct >> 64; 436 } 437 void __cdecl _xrstor64(void const *, unsigned __int64); 438 void __cdecl _xsave64(void *, unsigned __int64); 439 void __cdecl _xsaveopt64(void *, unsigned __int64); 440 441 #endif /* __x86_64__ */ 442 443 /*----------------------------------------------------------------------------*\ 444 |* Bit Twiddling 445 \*----------------------------------------------------------------------------*/ 446 static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__)) 447 _rotl8(unsigned char _Value, unsigned char _Shift) { 448 _Shift &= 0x7; 449 return _Shift ? (_Value << _Shift) | (_Value >> (8 - _Shift)) : _Value; 450 } 451 static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__)) 452 _rotr8(unsigned char _Value, unsigned char _Shift) { 453 _Shift &= 0x7; 454 return _Shift ? (_Value >> _Shift) | (_Value << (8 - _Shift)) : _Value; 455 } 456 static __inline__ unsigned short __attribute__((__always_inline__, __nodebug__)) 457 _rotl16(unsigned short _Value, unsigned char _Shift) { 458 _Shift &= 0xf; 459 return _Shift ? (_Value << _Shift) | (_Value >> (16 - _Shift)) : _Value; 460 } 461 static __inline__ unsigned short __attribute__((__always_inline__, __nodebug__)) 462 _rotr16(unsigned short _Value, unsigned char _Shift) { 463 _Shift &= 0xf; 464 return _Shift ? (_Value >> _Shift) | (_Value << (16 - _Shift)) : _Value; 465 } 466 static __inline__ unsigned int __attribute__((__always_inline__, __nodebug__)) 467 _rotl(unsigned int _Value, int _Shift) { 468 _Shift &= 0x1f; 469 return _Shift ? (_Value << _Shift) | (_Value >> (32 - _Shift)) : _Value; 470 } 471 static __inline__ unsigned int __attribute__((__always_inline__, __nodebug__)) 472 _rotr(unsigned int _Value, int _Shift) { 473 _Shift &= 0x1f; 474 return _Shift ? (_Value >> _Shift) | (_Value << (32 - _Shift)) : _Value; 475 } 476 static __inline__ unsigned long __attribute__((__always_inline__, __nodebug__)) 477 _lrotl(unsigned long _Value, int _Shift) { 478 _Shift &= 0x1f; 479 return _Shift ? (_Value << _Shift) | (_Value >> (32 - _Shift)) : _Value; 480 } 481 static __inline__ unsigned long __attribute__((__always_inline__, __nodebug__)) 482 _lrotr(unsigned long _Value, int _Shift) { 483 _Shift &= 0x1f; 484 return _Shift ? (_Value >> _Shift) | (_Value << (32 - _Shift)) : _Value; 485 } 486 static 487 __inline__ unsigned __int64 __attribute__((__always_inline__, __nodebug__)) 488 _rotl64(unsigned __int64 _Value, int _Shift) { 489 _Shift &= 0x3f; 490 return _Shift ? (_Value << _Shift) | (_Value >> (64 - _Shift)) : _Value; 491 } 492 static 493 __inline__ unsigned __int64 __attribute__((__always_inline__, __nodebug__)) 494 _rotr64(unsigned __int64 _Value, int _Shift) { 495 _Shift &= 0x3f; 496 return _Shift ? (_Value >> _Shift) | (_Value << (64 - _Shift)) : _Value; 497 } 498 /*----------------------------------------------------------------------------*\ 499 |* Bit Counting and Testing 500 \*----------------------------------------------------------------------------*/ 501 static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__)) 502 _BitScanForward(unsigned long *_Index, unsigned long _Mask) { 503 if (!_Mask) 504 return 0; 505 *_Index = __builtin_ctzl(_Mask); 506 return 1; 507 } 508 static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__)) 509 _BitScanReverse(unsigned long *_Index, unsigned long _Mask) { 510 if (!_Mask) 511 return 0; 512 *_Index = 31 - __builtin_clzl(_Mask); 513 return 1; 514 } 515 static __inline__ unsigned short __attribute__((__always_inline__, __nodebug__)) 516 __popcnt16(unsigned short value) { 517 return __builtin_popcount((int)value); 518 } 519 static __inline__ unsigned int __attribute__((__always_inline__, __nodebug__)) 520 __popcnt(unsigned int value) { 521 return __builtin_popcount(value); 522 } 523 static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__)) 524 _bittest(long const *a, long b) { 525 return (*a >> b) & 1; 526 } 527 static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__)) 528 _bittestandcomplement(long *a, long b) { 529 unsigned char x = (*a >> b) & 1; 530 *a = *a ^ (1 << b); 531 return x; 532 } 533 static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__)) 534 _bittestandreset(long *a, long b) { 535 unsigned char x = (*a >> b) & 1; 536 *a = *a & ~(1 << b); 537 return x; 538 } 539 static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__)) 540 _bittestandset(long *a, long b) { 541 unsigned char x = (*a >> b) & 1; 542 *a = *a | (1 << b); 543 return x; 544 } 545 #if defined(__i386__) || defined(__x86_64__) 546 static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__)) 547 _interlockedbittestandset(long volatile *__BitBase, long __BitPos) { 548 unsigned char __Res; 549 __asm__ ("xor %0, %0\n" 550 "lock bts %2, %1\n" 551 "setc %0\n" 552 : "=r" (__Res), "+m"(*__BitBase) 553 : "Ir"(__BitPos)); 554 return __Res; 555 } 556 #endif 557 #ifdef __x86_64__ 558 static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__)) 559 _BitScanForward64(unsigned long *_Index, unsigned __int64 _Mask) { 560 if (!_Mask) 561 return 0; 562 *_Index = __builtin_ctzll(_Mask); 563 return 1; 564 } 565 static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__)) 566 _BitScanReverse64(unsigned long *_Index, unsigned __int64 _Mask) { 567 if (!_Mask) 568 return 0; 569 *_Index = 63 - __builtin_clzll(_Mask); 570 return 1; 571 } 572 static __inline__ 573 unsigned __int64 __attribute__((__always_inline__, __nodebug__)) 574 __popcnt64(unsigned __int64 value) { 575 return __builtin_popcountll(value); 576 } 577 static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__)) 578 _bittest64(__int64 const *a, __int64 b) { 579 return (*a >> b) & 1; 580 } 581 static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__)) 582 _bittestandcomplement64(__int64 *a, __int64 b) { 583 unsigned char x = (*a >> b) & 1; 584 *a = *a ^ (1ll << b); 585 return x; 586 } 587 static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__)) 588 _bittestandreset64(__int64 *a, __int64 b) { 589 unsigned char x = (*a >> b) & 1; 590 *a = *a & ~(1ll << b); 591 return x; 592 } 593 static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__)) 594 _bittestandset64(__int64 *a, __int64 b) { 595 unsigned char x = (*a >> b) & 1; 596 *a = *a | (1ll << b); 597 return x; 598 } 599 static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__)) 600 _interlockedbittestandset64(__int64 volatile *__BitBase, __int64 __BitPos) { 601 unsigned char __Res; 602 __asm__ ("xor %0, %0\n" 603 "lock bts %2, %1\n" 604 "setc %0\n" 605 : "=r" (__Res), "+m"(*__BitBase) 606 : "Ir"(__BitPos)); 607 return __Res; 608 } 609 #endif 610 /*----------------------------------------------------------------------------*\ 611 |* Interlocked Exchange Add 612 \*----------------------------------------------------------------------------*/ 613 static __inline__ char __attribute__((__always_inline__, __nodebug__)) 614 _InterlockedExchangeAdd8(char volatile *_Addend, char _Value) { 615 return __atomic_add_fetch(_Addend, _Value, 0) - _Value; 616 } 617 static __inline__ short __attribute__((__always_inline__, __nodebug__)) 618 _InterlockedExchangeAdd16(short volatile *_Addend, short _Value) { 619 return __atomic_add_fetch(_Addend, _Value, 0) - _Value; 620 } 621 #ifdef __x86_64__ 622 static __inline__ __int64 __attribute__((__always_inline__, __nodebug__)) 623 _InterlockedExchangeAdd64(__int64 volatile *_Addend, __int64 _Value) { 624 return __atomic_add_fetch(_Addend, _Value, 0) - _Value; 625 } 626 #endif 627 /*----------------------------------------------------------------------------*\ 628 |* Interlocked Exchange Sub 629 \*----------------------------------------------------------------------------*/ 630 static __inline__ char __attribute__((__always_inline__, __nodebug__)) 631 _InterlockedExchangeSub8(char volatile *_Subend, char _Value) { 632 return __atomic_sub_fetch(_Subend, _Value, 0) + _Value; 633 } 634 static __inline__ short __attribute__((__always_inline__, __nodebug__)) 635 _InterlockedExchangeSub16(short volatile *_Subend, short _Value) { 636 return __atomic_sub_fetch(_Subend, _Value, 0) + _Value; 637 } 638 static __inline__ long __attribute__((__always_inline__, __nodebug__)) 639 _InterlockedExchangeSub(long volatile *_Subend, long _Value) { 640 return __atomic_sub_fetch(_Subend, _Value, 0) + _Value; 641 } 642 #ifdef __x86_64__ 643 static __inline__ __int64 __attribute__((__always_inline__, __nodebug__)) 644 _InterlockedExchangeSub64(__int64 volatile *_Subend, __int64 _Value) { 645 return __atomic_sub_fetch(_Subend, _Value, 0) + _Value; 646 } 647 #endif 648 /*----------------------------------------------------------------------------*\ 649 |* Interlocked Increment 650 \*----------------------------------------------------------------------------*/ 651 static __inline__ short __attribute__((__always_inline__, __nodebug__)) 652 _InterlockedIncrement16(short volatile *_Value) { 653 return __atomic_add_fetch(_Value, 1, 0); 654 } 655 #ifdef __x86_64__ 656 static __inline__ __int64 __attribute__((__always_inline__, __nodebug__)) 657 _InterlockedIncrement64(__int64 volatile *_Value) { 658 return __atomic_add_fetch(_Value, 1, 0); 659 } 660 #endif 661 /*----------------------------------------------------------------------------*\ 662 |* Interlocked Decrement 663 \*----------------------------------------------------------------------------*/ 664 static __inline__ short __attribute__((__always_inline__, __nodebug__)) 665 _InterlockedDecrement16(short volatile *_Value) { 666 return __atomic_sub_fetch(_Value, 1, 0); 667 } 668 #ifdef __x86_64__ 669 static __inline__ __int64 __attribute__((__always_inline__, __nodebug__)) 670 _InterlockedDecrement64(__int64 volatile *_Value) { 671 return __atomic_sub_fetch(_Value, 1, 0); 672 } 673 #endif 674 /*----------------------------------------------------------------------------*\ 675 |* Interlocked And 676 \*----------------------------------------------------------------------------*/ 677 static __inline__ char __attribute__((__always_inline__, __nodebug__)) 678 _InterlockedAnd8(char volatile *_Value, char _Mask) { 679 return __atomic_and_fetch(_Value, _Mask, 0); 680 } 681 static __inline__ short __attribute__((__always_inline__, __nodebug__)) 682 _InterlockedAnd16(short volatile *_Value, short _Mask) { 683 return __atomic_and_fetch(_Value, _Mask, 0); 684 } 685 static __inline__ long __attribute__((__always_inline__, __nodebug__)) 686 _InterlockedAnd(long volatile *_Value, long _Mask) { 687 return __atomic_and_fetch(_Value, _Mask, 0); 688 } 689 #ifdef __x86_64__ 690 static __inline__ __int64 __attribute__((__always_inline__, __nodebug__)) 691 _InterlockedAnd64(__int64 volatile *_Value, __int64 _Mask) { 692 return __atomic_and_fetch(_Value, _Mask, 0); 693 } 694 #endif 695 /*----------------------------------------------------------------------------*\ 696 |* Interlocked Or 697 \*----------------------------------------------------------------------------*/ 698 static __inline__ char __attribute__((__always_inline__, __nodebug__)) 699 _InterlockedOr8(char volatile *_Value, char _Mask) { 700 return __atomic_or_fetch(_Value, _Mask, 0); 701 } 702 static __inline__ short __attribute__((__always_inline__, __nodebug__)) 703 _InterlockedOr16(short volatile *_Value, short _Mask) { 704 return __atomic_or_fetch(_Value, _Mask, 0); 705 } 706 static __inline__ long __attribute__((__always_inline__, __nodebug__)) 707 _InterlockedOr(long volatile *_Value, long _Mask) { 708 return __atomic_or_fetch(_Value, _Mask, 0); 709 } 710 #ifdef __x86_64__ 711 static __inline__ __int64 __attribute__((__always_inline__, __nodebug__)) 712 _InterlockedOr64(__int64 volatile *_Value, __int64 _Mask) { 713 return __atomic_or_fetch(_Value, _Mask, 0); 714 } 715 #endif 716 /*----------------------------------------------------------------------------*\ 717 |* Interlocked Xor 718 \*----------------------------------------------------------------------------*/ 719 static __inline__ char __attribute__((__always_inline__, __nodebug__)) 720 _InterlockedXor8(char volatile *_Value, char _Mask) { 721 return __atomic_xor_fetch(_Value, _Mask, 0); 722 } 723 static __inline__ short __attribute__((__always_inline__, __nodebug__)) 724 _InterlockedXor16(short volatile *_Value, short _Mask) { 725 return __atomic_xor_fetch(_Value, _Mask, 0); 726 } 727 static __inline__ long __attribute__((__always_inline__, __nodebug__)) 728 _InterlockedXor(long volatile *_Value, long _Mask) { 729 return __atomic_xor_fetch(_Value, _Mask, 0); 730 } 731 #ifdef __x86_64__ 732 static __inline__ __int64 __attribute__((__always_inline__, __nodebug__)) 733 _InterlockedXor64(__int64 volatile *_Value, __int64 _Mask) { 734 return __atomic_xor_fetch(_Value, _Mask, 0); 735 } 736 #endif 737 /*----------------------------------------------------------------------------*\ 738 |* Interlocked Exchange 739 \*----------------------------------------------------------------------------*/ 740 static __inline__ char __attribute__((__always_inline__, __nodebug__)) 741 _InterlockedExchange8(char volatile *_Target, char _Value) { 742 __atomic_exchange(_Target, &_Value, &_Value, 0); 743 return _Value; 744 } 745 static __inline__ short __attribute__((__always_inline__, __nodebug__)) 746 _InterlockedExchange16(short volatile *_Target, short _Value) { 747 __atomic_exchange(_Target, &_Value, &_Value, 0); 748 return _Value; 749 } 750 #ifdef __x86_64__ 751 static __inline__ __int64 __attribute__((__always_inline__, __nodebug__)) 752 _InterlockedExchange64(__int64 volatile *_Target, __int64 _Value) { 753 __atomic_exchange(_Target, &_Value, &_Value, 0); 754 return _Value; 755 } 756 #endif 757 /*----------------------------------------------------------------------------*\ 758 |* Interlocked Compare Exchange 759 \*----------------------------------------------------------------------------*/ 760 static __inline__ char __attribute__((__always_inline__, __nodebug__)) 761 _InterlockedCompareExchange8(char volatile *_Destination, 762 char _Exchange, char _Comparand) { 763 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0, 0, 0); 764 return _Comparand; 765 } 766 static __inline__ short __attribute__((__always_inline__, __nodebug__)) 767 _InterlockedCompareExchange16(short volatile *_Destination, 768 short _Exchange, short _Comparand) { 769 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0, 0, 0); 770 return _Comparand; 771 } 772 static __inline__ __int64 __attribute__((__always_inline__, __nodebug__)) 773 _InterlockedCompareExchange64(__int64 volatile *_Destination, 774 __int64 _Exchange, __int64 _Comparand) { 775 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0, 0, 0); 776 return _Comparand; 777 } 778 /*----------------------------------------------------------------------------*\ 779 |* Barriers 780 \*----------------------------------------------------------------------------*/ 781 #if defined(__i386__) || defined(__x86_64__) 782 static __inline__ void __attribute__((__always_inline__, __nodebug__)) 783 __attribute__((deprecated("use other intrinsics or C++11 atomics instead"))) 784 _ReadWriteBarrier(void) { 785 __asm__ volatile ("" : : : "memory"); 786 } 787 static __inline__ void __attribute__((__always_inline__, __nodebug__)) 788 __attribute__((deprecated("use other intrinsics or C++11 atomics instead"))) 789 _ReadBarrier(void) { 790 __asm__ volatile ("" : : : "memory"); 791 } 792 static __inline__ void __attribute__((__always_inline__, __nodebug__)) 793 __attribute__((deprecated("use other intrinsics or C++11 atomics instead"))) 794 _WriteBarrier(void) { 795 __asm__ volatile ("" : : : "memory"); 796 } 797 #endif 798 #ifdef __x86_64__ 799 static __inline__ void __attribute__((__always_inline__, __nodebug__)) 800 __faststorefence(void) { 801 __asm__ volatile("lock orq $0, (%%rsp)" : : : "memory"); 802 } 803 #endif 804 /*----------------------------------------------------------------------------*\ 805 |* readfs, readgs 806 |* (Pointers in address space #256 and #257 are relative to the GS and FS 807 |* segment registers, respectively.) 808 \*----------------------------------------------------------------------------*/ 809 #define __ptr_to_addr_space(__addr_space_nbr, __type, __offset) \ 810 ((volatile __type __attribute__((__address_space__(__addr_space_nbr)))*) \ 811 (__offset)) 812 813 #ifdef __i386__ 814 static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__)) 815 __readfsbyte(unsigned long __offset) { 816 return *__ptr_to_addr_space(257, unsigned char, __offset); 817 } 818 static __inline__ unsigned __int64 __attribute__((__always_inline__, __nodebug__)) 819 __readfsqword(unsigned long __offset) { 820 return *__ptr_to_addr_space(257, unsigned __int64, __offset); 821 } 822 static __inline__ unsigned short __attribute__((__always_inline__, __nodebug__)) 823 __readfsword(unsigned long __offset) { 824 return *__ptr_to_addr_space(257, unsigned short, __offset); 825 } 826 #endif 827 #ifdef __x86_64__ 828 static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__)) 829 __readgsbyte(unsigned long __offset) { 830 return *__ptr_to_addr_space(256, unsigned char, __offset); 831 } 832 static __inline__ unsigned long __attribute__((__always_inline__, __nodebug__)) 833 __readgsdword(unsigned long __offset) { 834 return *__ptr_to_addr_space(256, unsigned long, __offset); 835 } 836 static __inline__ unsigned __int64 __attribute__((__always_inline__, __nodebug__)) 837 __readgsqword(unsigned long __offset) { 838 return *__ptr_to_addr_space(256, unsigned __int64, __offset); 839 } 840 static __inline__ unsigned short __attribute__((__always_inline__, __nodebug__)) 841 __readgsword(unsigned long __offset) { 842 return *__ptr_to_addr_space(256, unsigned short, __offset); 843 } 844 #endif 845 #undef __ptr_to_addr_space 846 /*----------------------------------------------------------------------------*\ 847 |* movs, stos 848 \*----------------------------------------------------------------------------*/ 849 #if defined(__i386__) || defined(__x86_64__) 850 static __inline__ void __attribute__((__always_inline__, __nodebug__)) 851 __movsb(unsigned char *__dst, unsigned char const *__src, size_t __n) { 852 __asm__("rep movsb" : : "D"(__dst), "S"(__src), "c"(__n) 853 : "%edi", "%esi", "%ecx"); 854 } 855 static __inline__ void __attribute__((__always_inline__, __nodebug__)) 856 __movsd(unsigned long *__dst, unsigned long const *__src, size_t __n) { 857 __asm__("rep movsl" : : "D"(__dst), "S"(__src), "c"(__n) 858 : "%edi", "%esi", "%ecx"); 859 } 860 static __inline__ void __attribute__((__always_inline__, __nodebug__)) 861 __movsw(unsigned short *__dst, unsigned short const *__src, size_t __n) { 862 __asm__("rep movsh" : : "D"(__dst), "S"(__src), "c"(__n) 863 : "%edi", "%esi", "%ecx"); 864 } 865 static __inline__ void __attribute__((__always_inline__, __nodebug__)) 866 __stosb(unsigned char *__dst, unsigned char __x, size_t __n) { 867 __asm__("rep stosb" : : "D"(__dst), "a"(__x), "c"(__n) 868 : "%edi", "%ecx"); 869 } 870 static __inline__ void __attribute__((__always_inline__, __nodebug__)) 871 __stosd(unsigned long *__dst, unsigned long __x, size_t __n) { 872 __asm__("rep stosl" : : "D"(__dst), "a"(__x), "c"(__n) 873 : "%edi", "%ecx"); 874 } 875 static __inline__ void __attribute__((__always_inline__, __nodebug__)) 876 __stosw(unsigned short *__dst, unsigned short __x, size_t __n) { 877 __asm__("rep stosh" : : "D"(__dst), "a"(__x), "c"(__n) 878 : "%edi", "%ecx"); 879 } 880 #endif 881 #ifdef __x86_64__ 882 static __inline__ void __attribute__((__always_inline__, __nodebug__)) 883 __movsq(unsigned long long *__dst, unsigned long long const *__src, size_t __n) { 884 __asm__("rep movsq" : : "D"(__dst), "S"(__src), "c"(__n) 885 : "%edi", "%esi", "%ecx"); 886 } 887 static __inline__ void __attribute__((__always_inline__, __nodebug__)) 888 __stosq(unsigned __int64 *__dst, unsigned __int64 __x, size_t __n) { 889 __asm__("rep stosq" : : "D"(__dst), "a"(__x), "c"(__n) 890 : "%edi", "%ecx"); 891 } 892 #endif 893 894 /*----------------------------------------------------------------------------*\ 895 |* Misc 896 \*----------------------------------------------------------------------------*/ 897 static __inline__ void * __attribute__((__always_inline__, __nodebug__)) 898 _AddressOfReturnAddress(void) { 899 return (void*)((char*)__builtin_frame_address(0) + sizeof(void*)); 900 } 901 static __inline__ void * __attribute__((__always_inline__, __nodebug__)) 902 _ReturnAddress(void) { 903 return __builtin_return_address(0); 904 } 905 #if defined(__i386__) || defined(__x86_64__) 906 static __inline__ void __attribute__((__always_inline__, __nodebug__)) 907 __cpuid(int __info[4], int __level) { 908 __asm__ ("cpuid" : "=a"(__info[0]), "=b" (__info[1]), "=c"(__info[2]), "=d"(__info[3]) 909 : "a"(__level)); 910 } 911 static __inline__ void __attribute__((__always_inline__, __nodebug__)) 912 __cpuidex(int __info[4], int __level, int __ecx) { 913 __asm__ ("cpuid" : "=a"(__info[0]), "=b" (__info[1]), "=c"(__info[2]), "=d"(__info[3]) 914 : "a"(__level), "c"(__ecx)); 915 } 916 static __inline__ unsigned __int64 __cdecl __attribute__((__always_inline__, __nodebug__)) 917 _xgetbv(unsigned int __xcr_no) { 918 unsigned int __eax, __edx; 919 __asm__ ("xgetbv" : "=a" (__eax), "=d" (__edx) : "c" (__xcr_no)); 920 return ((unsigned __int64)__edx << 32) | __eax; 921 } 922 static __inline__ void __attribute__((__always_inline__, __nodebug__)) 923 __halt(void) { 924 __asm__ volatile ("hlt"); 925 } 926 #endif 927 928 /*----------------------------------------------------------------------------*\ 929 |* Privileged intrinsics 930 \*----------------------------------------------------------------------------*/ 931 #if defined(__i386__) || defined(__x86_64__) 932 static __inline__ unsigned __int64 __attribute__((__always_inline__, __nodebug__)) 933 __readmsr(unsigned long __register) { 934 // Loads the contents of a 64-bit model specific register (MSR) specified in 935 // the ECX register into registers EDX:EAX. The EDX register is loaded with 936 // the high-order 32 bits of the MSR and the EAX register is loaded with the 937 // low-order 32 bits. If less than 64 bits are implemented in the MSR being 938 // read, the values returned to EDX:EAX in unimplemented bit locations are 939 // undefined. 940 unsigned long __edx; 941 unsigned long __eax; 942 __asm__ ("rdmsr" : "=d"(__edx), "=a"(__eax) : "c"(__register)); 943 return (((unsigned __int64)__edx) << 32) | (unsigned __int64)__eax; 944 } 945 946 static __inline__ unsigned long __attribute__((always_inline, __nodebug__)) 947 __readcr3(void) { 948 unsigned long __cr3_val; 949 __asm__ __volatile__ ("mov %%cr3, %0" : "=q"(__cr3_val) : : "memory"); 950 return __cr3_val; 951 } 952 953 static __inline__ void __attribute__((always_inline, __nodebug__)) 954 __writecr3(unsigned int __cr3_val) { 955 __asm__ ("mov %0, %%cr3" : : "q"(__cr3_val) : "memory"); 956 } 957 #endif 958 959 #ifdef __cplusplus 960 } 961 #endif 962 963 #endif /* __INTRIN_H */ 964 #endif /* _MSC_VER */ 965