1 #ifndef _INTRIN_INTERNAL_ 2 #define _INTRIN_INTERNAL_ 3 4 FORCEINLINE 5 VOID 6 KeSetCurrentIrql(KIRQL Irql) 7 { 8 __writecr8(Irql); 9 } 10 11 FORCEINLINE 12 PKGDTENTRY64 13 KiGetGdtEntry(PVOID pGdt, USHORT Selector) 14 { 15 return (PKGDTENTRY64)((ULONG64)pGdt + (Selector & ~RPL_MASK)); 16 } 17 18 FORCEINLINE 19 PVOID 20 KiGetGdtDescriptorBase(PKGDTENTRY Entry) 21 { 22 return (PVOID)((ULONG64)Entry->BaseLow | 23 (ULONG64)Entry->Bytes.BaseMiddle << 16 | 24 (ULONG64)Entry->Bytes.BaseHigh << 24 | 25 (ULONG64)Entry->BaseUpper << 32); 26 } 27 28 FORCEINLINE 29 VOID 30 KiSetGdtDescriptorBase(PKGDTENTRY Entry, ULONG64 Base) 31 { 32 Entry->BaseLow = Base & 0xffff; 33 Entry->Bits.BaseMiddle = (Base >> 16) & 0xff; 34 Entry->Bits.BaseHigh = (Base >> 24) & 0xff; 35 Entry->BaseUpper = Base >> 32; 36 } 37 38 FORCEINLINE 39 VOID 40 KiSetGdtDescriptorLimit(PKGDTENTRY Entry, ULONG Limit) 41 { 42 Entry->LimitLow = Limit & 0xffff; 43 Entry->Bits.LimitHigh = Limit >> 16; 44 } 45 46 FORCEINLINE 47 VOID 48 KiInitGdtEntry(PKGDTENTRY64 Entry, ULONG64 Base, ULONG Size, UCHAR Type, UCHAR Dpl) 49 { 50 KiSetGdtDescriptorBase(Entry, Base); 51 KiSetGdtDescriptorLimit(Entry, Size - 1); 52 Entry->Bits.Type = Type; 53 Entry->Bits.Dpl = Dpl; 54 Entry->Bits.Present = 1; 55 Entry->Bits.System = 0; 56 Entry->Bits.LongMode = 0; 57 Entry->Bits.DefaultBig = 0; 58 Entry->Bits.Granularity = 0; 59 Entry->MustBeZero = 0; 60 } 61 62 #if defined(__GNUC__) 63 64 static __inline__ __attribute__((always_inline)) void __lgdt(void *Source) 65 { 66 __asm__ __volatile__("lgdt %0" : : "m"(*(short*)Source)); 67 } 68 69 static __inline__ __attribute__((always_inline)) void __sgdt(void *Destination) 70 { 71 __asm__ __volatile__("sgdt %0" : : "m"(*(short*)Destination) : "memory"); 72 } 73 74 static __inline__ __attribute__((always_inline)) void __lldt(unsigned short Value) 75 { 76 __asm__ __volatile__("lldt %0" : : "rm"(Value)); 77 } 78 79 static __inline__ __attribute__((always_inline)) void __sldt(void *Destination) 80 { 81 __asm__ __volatile__("sldt %0" : : "m"(*(short*)Destination) : "memory"); 82 } 83 84 static __inline__ __attribute__((always_inline)) void __ldmxcsr(unsigned long *Source) 85 { 86 __asm__ __volatile__("ldmxcsr %0" : : "m"(*Source)); 87 } 88 89 static __inline__ __attribute__((always_inline)) void __stmxcsr(unsigned long *Destination) 90 { 91 __asm__ __volatile__("stmxcsr %0" : : "m"(*Destination) : "memory"); 92 } 93 94 static __inline__ __attribute__((always_inline)) void __ltr(unsigned short Source) 95 { 96 __asm__ __volatile__("ltr %0" : : "rm"(Source)); 97 } 98 99 static __inline__ __attribute__((always_inline)) void __str(unsigned short *Destination) 100 { 101 __asm__ __volatile__("str %0" : : "m"(*Destination) : "memory"); 102 } 103 104 105 #elif defined(_MSC_VER) 106 107 void __lgdt(void *Source); 108 109 void __sgdt(void *Destination); 110 111 void __lldt(unsigned short Value); 112 113 void __sldt(void *Destination); 114 115 void __ltr(unsigned short Source); 116 117 void __str(unsigned short *Destination); 118 119 120 #else 121 #error Unknown compiler for inline assembler 122 #endif 123 124 #endif 125 126 /* EOF */ 127