1 /*
2 * Copyright (c) 2018-2021 Maxime Villard, m00nbsd.net
3 * All rights reserved.
4 *
5 * This code is part of the NVMM hypervisor.
6 *
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
9 * are met:
10 * 1. Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in the
14 * documentation and/or other materials provided with the distribution.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
17 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
19 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
20 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
21 * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
22 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
23 * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24 * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
25 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
26 * SUCH DAMAGE.
27 */
28
29 #ifndef _NVMM_X86_H_
30 #define _NVMM_X86_H_
31
32 #if defined(__NetBSD__)
33 #include <x86/specialreg.h>
34 #elif defined(__DragonFly__)
35 #include <machine/specialreg.h>
36 #endif
37
38 /* -------------------------------------------------------------------------- */
39
40 #ifndef ASM_NVMM
41
42 struct nvmm_x86_exit_memory {
43 int prot;
44 gpaddr_t gpa;
45 uint8_t inst_len;
46 uint8_t inst_bytes[15];
47 };
48
49 struct nvmm_x86_exit_io {
50 bool in;
51 uint16_t port;
52 int8_t seg;
53 uint8_t address_size;
54 uint8_t operand_size;
55 bool rep;
56 bool str;
57 uint64_t npc;
58 };
59
60 struct nvmm_x86_exit_rdmsr {
61 uint32_t msr;
62 uint64_t npc;
63 };
64
65 struct nvmm_x86_exit_wrmsr {
66 uint32_t msr;
67 uint64_t val;
68 uint64_t npc;
69 };
70
71 struct nvmm_x86_exit_insn {
72 uint64_t npc;
73 };
74
75 struct nvmm_x86_exit_invalid {
76 uint64_t hwcode;
77 };
78
79 /* Generic. */
80 #define NVMM_VCPU_EXIT_NONE 0x0000000000000000ULL
81 #define NVMM_VCPU_EXIT_INVALID 0xFFFFFFFFFFFFFFFFULL
82 /* x86: operations. */
83 #define NVMM_VCPU_EXIT_MEMORY 0x0000000000000001ULL
84 #define NVMM_VCPU_EXIT_IO 0x0000000000000002ULL
85 /* x86: changes in VCPU state. */
86 #define NVMM_VCPU_EXIT_SHUTDOWN 0x0000000000001000ULL
87 #define NVMM_VCPU_EXIT_INT_READY 0x0000000000001001ULL
88 #define NVMM_VCPU_EXIT_NMI_READY 0x0000000000001002ULL
89 #define NVMM_VCPU_EXIT_HALTED 0x0000000000001003ULL
90 #define NVMM_VCPU_EXIT_TPR_CHANGED 0x0000000000001004ULL
91 /* x86: instructions. */
92 #define NVMM_VCPU_EXIT_RDMSR 0x0000000000002000ULL
93 #define NVMM_VCPU_EXIT_WRMSR 0x0000000000002001ULL
94 #define NVMM_VCPU_EXIT_MONITOR 0x0000000000002002ULL
95 #define NVMM_VCPU_EXIT_MWAIT 0x0000000000002003ULL
96 #define NVMM_VCPU_EXIT_CPUID 0x0000000000002004ULL
97
98 struct nvmm_x86_exit {
99 uint64_t reason;
100 union {
101 struct nvmm_x86_exit_memory mem;
102 struct nvmm_x86_exit_io io;
103 struct nvmm_x86_exit_rdmsr rdmsr;
104 struct nvmm_x86_exit_wrmsr wrmsr;
105 struct nvmm_x86_exit_insn insn;
106 struct nvmm_x86_exit_invalid inv;
107 } u;
108 struct {
109 uint64_t rflags;
110 uint64_t cr8;
111 uint64_t int_shadow:1;
112 uint64_t int_window_exiting:1;
113 uint64_t nmi_window_exiting:1;
114 uint64_t evt_pending:1;
115 uint64_t rsvd:60;
116 } exitstate;
117 };
118 #define nvmm_vcpu_exit nvmm_x86_exit
119
120 #define NVMM_VCPU_EVENT_EXCP 0
121 #define NVMM_VCPU_EVENT_INTR 1
122
123 struct nvmm_x86_event {
124 u_int type;
125 uint8_t vector;
126 union {
127 struct {
128 uint64_t error;
129 } excp;
130 } u;
131 };
132 #define nvmm_vcpu_event nvmm_x86_event
133
134 struct nvmm_cap_md {
135 uint64_t mach_conf_support;
136
137 uint64_t vcpu_conf_support;
138 #define NVMM_CAP_ARCH_VCPU_CONF_CPUID __BIT(0)
139 #define NVMM_CAP_ARCH_VCPU_CONF_TPR __BIT(1)
140
141 uint64_t xcr0_mask;
142 uint32_t mxcsr_mask;
143 uint32_t conf_cpuid_maxops;
144 uint64_t rsvd[6];
145 };
146
147 #endif /* ASM_NVMM */
148
149 /* -------------------------------------------------------------------------- */
150
151 /*
152 * State indexes. We use X64 as naming convention, not to confuse with X86
153 * which originally implied 32bit.
154 */
155
156 /* Segments. */
157 #define NVMM_X64_SEG_ES 0
158 #define NVMM_X64_SEG_CS 1
159 #define NVMM_X64_SEG_SS 2
160 #define NVMM_X64_SEG_DS 3
161 #define NVMM_X64_SEG_FS 4
162 #define NVMM_X64_SEG_GS 5
163 #define NVMM_X64_SEG_GDT 6
164 #define NVMM_X64_SEG_IDT 7
165 #define NVMM_X64_SEG_LDT 8
166 #define NVMM_X64_SEG_TR 9
167 #define NVMM_X64_NSEG 10
168
169 /* General Purpose Registers. */
170 #define NVMM_X64_GPR_RAX 0
171 #define NVMM_X64_GPR_RCX 1
172 #define NVMM_X64_GPR_RDX 2
173 #define NVMM_X64_GPR_RBX 3
174 #define NVMM_X64_GPR_RSP 4
175 #define NVMM_X64_GPR_RBP 5
176 #define NVMM_X64_GPR_RSI 6
177 #define NVMM_X64_GPR_RDI 7
178 #define NVMM_X64_GPR_R8 8
179 #define NVMM_X64_GPR_R9 9
180 #define NVMM_X64_GPR_R10 10
181 #define NVMM_X64_GPR_R11 11
182 #define NVMM_X64_GPR_R12 12
183 #define NVMM_X64_GPR_R13 13
184 #define NVMM_X64_GPR_R14 14
185 #define NVMM_X64_GPR_R15 15
186 #define NVMM_X64_GPR_RIP 16
187 #define NVMM_X64_GPR_RFLAGS 17
188 #define NVMM_X64_NGPR 18
189
190 /* Control Registers. */
191 #define NVMM_X64_CR_CR0 0
192 #define NVMM_X64_CR_CR2 1
193 #define NVMM_X64_CR_CR3 2
194 #define NVMM_X64_CR_CR4 3
195 #define NVMM_X64_CR_CR8 4
196 #define NVMM_X64_CR_XCR0 5
197 #define NVMM_X64_NCR 6
198
199 /* Debug Registers. */
200 #define NVMM_X64_DR_DR0 0
201 #define NVMM_X64_DR_DR1 1
202 #define NVMM_X64_DR_DR2 2
203 #define NVMM_X64_DR_DR3 3
204 #define NVMM_X64_DR_DR6 4
205 #define NVMM_X64_DR_DR7 5
206 #define NVMM_X64_NDR 6
207
208 /* MSRs. */
209 #define NVMM_X64_MSR_EFER 0
210 #define NVMM_X64_MSR_STAR 1
211 #define NVMM_X64_MSR_LSTAR 2
212 #define NVMM_X64_MSR_CSTAR 3
213 #define NVMM_X64_MSR_SFMASK 4
214 #define NVMM_X64_MSR_KERNELGSBASE 5
215 #define NVMM_X64_MSR_SYSENTER_CS 6
216 #define NVMM_X64_MSR_SYSENTER_ESP 7
217 #define NVMM_X64_MSR_SYSENTER_EIP 8
218 #define NVMM_X64_MSR_PAT 9
219 #define NVMM_X64_MSR_TSC 10
220 #define NVMM_X64_NMSR 11
221
222 #ifndef ASM_NVMM
223
224 #include <sys/types.h>
225 #include <sys/bitops.h>
226 #if defined(__DragonFly__)
227 #ifdef __x86_64__
228 #undef __BIT
229 #define __BIT(__n) __BIT64(__n)
230 #undef __BITS
231 #define __BITS(__m, __n) __BITS64(__m, __n)
232 #endif /* __x86_64__ */
233 #endif
234
235 /* Segment state. */
236 struct nvmm_x64_state_seg {
237 uint16_t selector;
238 struct { /* hidden */
239 uint16_t type:4;
240 uint16_t s:1;
241 uint16_t dpl:2;
242 uint16_t p:1;
243 uint16_t avl:1;
244 uint16_t l:1;
245 uint16_t def:1;
246 uint16_t g:1;
247 uint16_t rsvd:4;
248 } attrib;
249 uint32_t limit; /* hidden */
250 uint64_t base; /* hidden */
251 };
252
253 /* Interrupt state. */
254 struct nvmm_x64_state_intr {
255 uint64_t int_shadow:1;
256 uint64_t int_window_exiting:1;
257 uint64_t nmi_window_exiting:1;
258 uint64_t evt_pending:1;
259 uint64_t rsvd:60;
260 };
261
262 /* FPU state structures. */
263 union nvmm_x64_state_fpu_addr {
264 uint64_t fa_64;
265 struct {
266 uint32_t fa_off;
267 uint16_t fa_seg;
268 uint16_t fa_opcode;
269 } fa_32;
270 };
271 CTASSERT(sizeof(union nvmm_x64_state_fpu_addr) == 8);
272 struct nvmm_x64_state_fpu_mmreg {
273 uint64_t mm_significand;
274 uint16_t mm_exp_sign;
275 uint8_t mm_rsvd[6];
276 };
277 CTASSERT(sizeof(struct nvmm_x64_state_fpu_mmreg) == 16);
278 struct nvmm_x64_state_fpu_xmmreg {
279 uint8_t xmm_bytes[16];
280 };
281 CTASSERT(sizeof(struct nvmm_x64_state_fpu_xmmreg) == 16);
282
283 /* FPU state (x87 + SSE). */
284 struct nvmm_x64_state_fpu {
285 uint16_t fx_cw; /* Control Word */
286 uint16_t fx_sw; /* Status Word */
287 uint8_t fx_tw; /* Tag Word */
288 uint8_t fx_zero;
289 uint16_t fx_opcode;
290 union nvmm_x64_state_fpu_addr fx_ip; /* Instruction Pointer */
291 union nvmm_x64_state_fpu_addr fx_dp; /* Data pointer */
292 uint32_t fx_mxcsr;
293 uint32_t fx_mxcsr_mask;
294 struct nvmm_x64_state_fpu_mmreg fx_87_ac[8]; /* x87 registers */
295 struct nvmm_x64_state_fpu_xmmreg fx_xmm[16]; /* XMM registers */
296 uint8_t fx_rsvd[96];
297 } __aligned(16);
298 CTASSERT(sizeof(struct nvmm_x64_state_fpu) == 512);
299
300 /* Flags. */
301 #define NVMM_X64_STATE_SEGS 0x01
302 #define NVMM_X64_STATE_GPRS 0x02
303 #define NVMM_X64_STATE_CRS 0x04
304 #define NVMM_X64_STATE_DRS 0x08
305 #define NVMM_X64_STATE_MSRS 0x10
306 #define NVMM_X64_STATE_INTR 0x20
307 #define NVMM_X64_STATE_FPU 0x40
308 #define NVMM_X64_STATE_ALL \
309 (NVMM_X64_STATE_SEGS | NVMM_X64_STATE_GPRS | NVMM_X64_STATE_CRS | \
310 NVMM_X64_STATE_DRS | NVMM_X64_STATE_MSRS | NVMM_X64_STATE_INTR | \
311 NVMM_X64_STATE_FPU)
312
313 struct nvmm_x64_state {
314 struct nvmm_x64_state_seg segs[NVMM_X64_NSEG];
315 uint64_t gprs[NVMM_X64_NGPR];
316 uint64_t crs[NVMM_X64_NCR];
317 uint64_t drs[NVMM_X64_NDR];
318 uint64_t msrs[NVMM_X64_NMSR];
319 struct nvmm_x64_state_intr intr;
320 struct nvmm_x64_state_fpu fpu;
321 };
322 #define nvmm_vcpu_state nvmm_x64_state
323
324 /* -------------------------------------------------------------------------- */
325
326 #define NVMM_VCPU_CONF_CPUID NVMM_VCPU_CONF_MD_BEGIN
327 #define NVMM_VCPU_CONF_TPR (NVMM_VCPU_CONF_MD_BEGIN + 1)
328
329 struct nvmm_vcpu_conf_cpuid {
330 /* The options. */
331 uint32_t mask:1;
332 uint32_t exit:1;
333 uint32_t rsvd:30;
334
335 /* The leaf. */
336 uint32_t leaf;
337
338 /* The params. */
339 union {
340 struct {
341 struct {
342 uint32_t eax;
343 uint32_t ebx;
344 uint32_t ecx;
345 uint32_t edx;
346 } set;
347 struct {
348 uint32_t eax;
349 uint32_t ebx;
350 uint32_t ecx;
351 uint32_t edx;
352 } del;
353 } mask;
354 } u;
355 };
356
357 struct nvmm_vcpu_conf_tpr {
358 uint32_t exit_changed:1;
359 uint32_t rsvd:31;
360 };
361
362 /* -------------------------------------------------------------------------- */
363
364 /*
365 * CPUID defines.
366 */
367
368 /* Fn0000_0001:EBX */
369 #define CPUID_0_01_EBX_BRAND_INDEX __BITS(7,0)
370 #define CPUID_0_01_EBX_CLFLUSH_SIZE __BITS(15,8)
371 #define CPUID_0_01_EBX_HTT_CORES __BITS(23,16)
372 #define CPUID_0_01_EBX_LOCAL_APIC_ID __BITS(31,24)
373 /* Fn0000_0001:ECX */
374 #define CPUID_0_01_ECX_SSE3 __BIT(0)
375 #define CPUID_0_01_ECX_PCLMULQDQ __BIT(1)
376 #define CPUID_0_01_ECX_DTES64 __BIT(2)
377 #define CPUID_0_01_ECX_MONITOR __BIT(3)
378 #define CPUID_0_01_ECX_DS_CPL __BIT(4)
379 #define CPUID_0_01_ECX_VMX __BIT(5)
380 #define CPUID_0_01_ECX_SMX __BIT(6)
381 #define CPUID_0_01_ECX_EIST __BIT(7)
382 #define CPUID_0_01_ECX_TM2 __BIT(8)
383 #define CPUID_0_01_ECX_SSSE3 __BIT(9)
384 #define CPUID_0_01_ECX_CNXTID __BIT(10)
385 #define CPUID_0_01_ECX_SDBG __BIT(11)
386 #define CPUID_0_01_ECX_FMA __BIT(12)
387 #define CPUID_0_01_ECX_CX16 __BIT(13)
388 #define CPUID_0_01_ECX_XTPR __BIT(14)
389 #define CPUID_0_01_ECX_PDCM __BIT(15)
390 #define CPUID_0_01_ECX_PCID __BIT(17)
391 #define CPUID_0_01_ECX_DCA __BIT(18)
392 #define CPUID_0_01_ECX_SSE41 __BIT(19)
393 #define CPUID_0_01_ECX_SSE42 __BIT(20)
394 #define CPUID_0_01_ECX_X2APIC __BIT(21)
395 #define CPUID_0_01_ECX_MOVBE __BIT(22)
396 #define CPUID_0_01_ECX_POPCNT __BIT(23)
397 #define CPUID_0_01_ECX_TSC_DEADLINE __BIT(24)
398 #define CPUID_0_01_ECX_AESNI __BIT(25)
399 #define CPUID_0_01_ECX_XSAVE __BIT(26)
400 #define CPUID_0_01_ECX_OSXSAVE __BIT(27)
401 #define CPUID_0_01_ECX_AVX __BIT(28)
402 #define CPUID_0_01_ECX_F16C __BIT(29)
403 #define CPUID_0_01_ECX_RDRAND __BIT(30)
404 #define CPUID_0_01_ECX_RAZ __BIT(31)
405 /* Fn0000_0001:EDX */
406 #define CPUID_0_01_EDX_FPU __BIT(0)
407 #define CPUID_0_01_EDX_VME __BIT(1)
408 #define CPUID_0_01_EDX_DE __BIT(2)
409 #define CPUID_0_01_EDX_PSE __BIT(3)
410 #define CPUID_0_01_EDX_TSC __BIT(4)
411 #define CPUID_0_01_EDX_MSR __BIT(5)
412 #define CPUID_0_01_EDX_PAE __BIT(6)
413 #define CPUID_0_01_EDX_MCE __BIT(7)
414 #define CPUID_0_01_EDX_CX8 __BIT(8)
415 #define CPUID_0_01_EDX_APIC __BIT(9)
416 #define CPUID_0_01_EDX_SEP __BIT(11)
417 #define CPUID_0_01_EDX_MTRR __BIT(12)
418 #define CPUID_0_01_EDX_PGE __BIT(13)
419 #define CPUID_0_01_EDX_MCA __BIT(14)
420 #define CPUID_0_01_EDX_CMOV __BIT(15)
421 #define CPUID_0_01_EDX_PAT __BIT(16)
422 #define CPUID_0_01_EDX_PSE36 __BIT(17)
423 #define CPUID_0_01_EDX_PSN __BIT(18)
424 #define CPUID_0_01_EDX_CLFSH __BIT(19)
425 #define CPUID_0_01_EDX_DS __BIT(21)
426 #define CPUID_0_01_EDX_ACPI __BIT(22)
427 #define CPUID_0_01_EDX_MMX __BIT(23)
428 #define CPUID_0_01_EDX_FXSR __BIT(24)
429 #define CPUID_0_01_EDX_SSE __BIT(25)
430 #define CPUID_0_01_EDX_SSE2 __BIT(26)
431 #define CPUID_0_01_EDX_SS __BIT(27)
432 #define CPUID_0_01_EDX_HTT __BIT(28)
433 #define CPUID_0_01_EDX_TM __BIT(29)
434 #define CPUID_0_01_EDX_PBE __BIT(31)
435
436 /* Fn0000_0004:EAX (Intel Deterministic Cache Parameter Leaf) */
437 #define CPUID_0_04_EAX_CACHETYPE __BITS(4, 0)
438 #define CPUID_0_04_EAX_CACHETYPE_NULL 0
439 #define CPUID_0_04_EAX_CACHETYPE_DATA 1
440 #define CPUID_0_04_EAX_CACHETYPE_INSN 2
441 #define CPUID_0_04_EAX_CACHETYPE_UNIFIED 3
442 #define CPUID_0_04_EAX_CACHELEVEL __BITS(7, 5)
443 #define CPUID_0_04_EAX_SELFINITCL __BIT(8)
444 #define CPUID_0_04_EAX_FULLASSOC __BIT(9)
445 #define CPUID_0_04_EAX_SHARING __BITS(25, 14)
446 #define CPUID_0_04_EAX_CORE_P_PKG __BITS(31, 26)
447
448 /* [ECX=0] Fn0000_0007:EBX (Structured Extended Features) */
449 #define CPUID_0_07_EBX_FSGSBASE __BIT(0)
450 #define CPUID_0_07_EBX_TSC_ADJUST __BIT(1)
451 #define CPUID_0_07_EBX_SGX __BIT(2)
452 #define CPUID_0_07_EBX_BMI1 __BIT(3)
453 #define CPUID_0_07_EBX_HLE __BIT(4)
454 #define CPUID_0_07_EBX_AVX2 __BIT(5)
455 #define CPUID_0_07_EBX_FDPEXONLY __BIT(6)
456 #define CPUID_0_07_EBX_SMEP __BIT(7)
457 #define CPUID_0_07_EBX_BMI2 __BIT(8)
458 #define CPUID_0_07_EBX_ERMS __BIT(9)
459 #define CPUID_0_07_EBX_INVPCID __BIT(10)
460 #define CPUID_0_07_EBX_RTM __BIT(11)
461 #define CPUID_0_07_EBX_QM __BIT(12)
462 #define CPUID_0_07_EBX_FPUCSDS __BIT(13)
463 #define CPUID_0_07_EBX_MPX __BIT(14)
464 #define CPUID_0_07_EBX_PQE __BIT(15)
465 #define CPUID_0_07_EBX_AVX512F __BIT(16)
466 #define CPUID_0_07_EBX_AVX512DQ __BIT(17)
467 #define CPUID_0_07_EBX_RDSEED __BIT(18)
468 #define CPUID_0_07_EBX_ADX __BIT(19)
469 #define CPUID_0_07_EBX_SMAP __BIT(20)
470 #define CPUID_0_07_EBX_AVX512_IFMA __BIT(21)
471 #define CPUID_0_07_EBX_CLFLUSHOPT __BIT(23)
472 #define CPUID_0_07_EBX_CLWB __BIT(24)
473 #define CPUID_0_07_EBX_PT __BIT(25)
474 #define CPUID_0_07_EBX_AVX512PF __BIT(26)
475 #define CPUID_0_07_EBX_AVX512ER __BIT(27)
476 #define CPUID_0_07_EBX_AVX512CD __BIT(28)
477 #define CPUID_0_07_EBX_SHA __BIT(29)
478 #define CPUID_0_07_EBX_AVX512BW __BIT(30)
479 #define CPUID_0_07_EBX_AVX512VL __BIT(31)
480 /* [ECX=0] Fn0000_0007:ECX (Structured Extended Features) */
481 #define CPUID_0_07_ECX_PREFETCHWT1 __BIT(0)
482 #define CPUID_0_07_ECX_AVX512_VBMI __BIT(1)
483 #define CPUID_0_07_ECX_UMIP __BIT(2)
484 #define CPUID_0_07_ECX_PKU __BIT(3)
485 #define CPUID_0_07_ECX_OSPKE __BIT(4)
486 #define CPUID_0_07_ECX_WAITPKG __BIT(5)
487 #define CPUID_0_07_ECX_AVX512_VBMI2 __BIT(6)
488 #define CPUID_0_07_ECX_CET_SS __BIT(7)
489 #define CPUID_0_07_ECX_GFNI __BIT(8)
490 #define CPUID_0_07_ECX_VAES __BIT(9)
491 #define CPUID_0_07_ECX_VPCLMULQDQ __BIT(10)
492 #define CPUID_0_07_ECX_AVX512_VNNI __BIT(11)
493 #define CPUID_0_07_ECX_AVX512_BITALG __BIT(12)
494 #define CPUID_0_07_ECX_AVX512_VPOPCNTDQ __BIT(14)
495 #define CPUID_0_07_ECX_LA57 __BIT(16)
496 #define CPUID_0_07_ECX_MAWAU __BITS(21, 17)
497 #define CPUID_0_07_ECX_RDPID __BIT(22)
498 #define CPUID_0_07_ECX_KL __BIT(23)
499 #define CPUID_0_07_ECX_CLDEMOTE __BIT(25)
500 #define CPUID_0_07_ECX_MOVDIRI __BIT(27)
501 #define CPUID_0_07_ECX_MOVDIR64B __BIT(28)
502 #define CPUID_0_07_ECX_SGXLC __BIT(30)
503 #define CPUID_0_07_ECX_PKS __BIT(31)
504 /* [ECX=0] Fn0000_0007:EDX (Structured Extended Features) */
505 #define CPUID_0_07_EDX_AVX512_4VNNIW __BIT(2)
506 #define CPUID_0_07_EDX_AVX512_4FMAPS __BIT(3)
507 #define CPUID_0_07_EDX_FSREP_MOV __BIT(4)
508 #define CPUID_0_07_EDX_AVX512_VP2INTERSECT __BIT(8)
509 #define CPUID_0_07_EDX_SRBDS_CTRL __BIT(9)
510 #define CPUID_0_07_EDX_MD_CLEAR __BIT(10)
511 #define CPUID_0_07_EDX_TSX_FORCE_ABORT __BIT(13)
512 #define CPUID_0_07_EDX_SERIALIZE __BIT(14)
513 #define CPUID_0_07_EDX_HYBRID __BIT(15)
514 #define CPUID_0_07_EDX_TSXLDTRK __BIT(16)
515 #define CPUID_0_07_EDX_CET_IBT __BIT(20)
516 #define CPUID_0_07_EDX_IBRS __BIT(26)
517 #define CPUID_0_07_EDX_STIBP __BIT(27)
518 #define CPUID_0_07_EDX_L1D_FLUSH __BIT(28)
519 #define CPUID_0_07_EDX_ARCH_CAP __BIT(29)
520 #define CPUID_0_07_EDX_CORE_CAP __BIT(30)
521 #define CPUID_0_07_EDX_SSBD __BIT(31)
522
523 /* Fn0000_000B:EAX (Extended Topology Enumeration) */
524 #define CPUID_0_0B_EAX_SHIFTNUM __BITS(4, 0)
525 /* Fn0000_000B:ECX (Extended Topology Enumeration) */
526 #define CPUID_0_0B_ECX_LVLNUM __BITS(7, 0)
527 #define CPUID_0_0B_ECX_LVLTYPE __BITS(15, 8)
528 #define CPUID_0_0B_ECX_LVLTYPE_INVAL 0
529 #define CPUID_0_0B_ECX_LVLTYPE_SMT 1
530 #define CPUID_0_0B_ECX_LVLTYPE_CORE 2
531
532 /* [ECX=1] Fn0000_000D:EAX (Processor Extended State Enumeration) */
533 #define CPUID_0_0D_ECX1_EAX_XSAVEOPT __BIT(0)
534 #define CPUID_0_0D_ECX1_EAX_XSAVEC __BIT(1)
535 #define CPUID_0_0D_ECX1_EAX_XGETBV __BIT(2)
536 #define CPUID_0_0D_ECX1_EAX_XSAVES __BIT(3)
537
538 /* Fn8000_0001:ECX */
539 #define CPUID_8_01_ECX_LAHF __BIT(0)
540 #define CPUID_8_01_ECX_CMPLEGACY __BIT(1)
541 #define CPUID_8_01_ECX_SVM __BIT(2)
542 #define CPUID_8_01_ECX_EAPIC __BIT(3)
543 #define CPUID_8_01_ECX_ALTMOVCR8 __BIT(4)
544 #define CPUID_8_01_ECX_ABM __BIT(5)
545 #define CPUID_8_01_ECX_SSE4A __BIT(6)
546 #define CPUID_8_01_ECX_MISALIGNSSE __BIT(7)
547 #define CPUID_8_01_ECX_3DNOWPF __BIT(8)
548 #define CPUID_8_01_ECX_OSVW __BIT(9)
549 #define CPUID_8_01_ECX_IBS __BIT(10)
550 #define CPUID_8_01_ECX_XOP __BIT(11)
551 #define CPUID_8_01_ECX_SKINIT __BIT(12)
552 #define CPUID_8_01_ECX_WDT __BIT(13)
553 #define CPUID_8_01_ECX_LWP __BIT(15)
554 #define CPUID_8_01_ECX_FMA4 __BIT(16)
555 #define CPUID_8_01_ECX_TCE __BIT(17)
556 #define CPUID_8_01_ECX_NODEID __BIT(19)
557 #define CPUID_8_01_ECX_TBM __BIT(21)
558 #define CPUID_8_01_ECX_TOPOEXT __BIT(22)
559 #define CPUID_8_01_ECX_PCEC __BIT(23)
560 #define CPUID_8_01_ECX_PCENB __BIT(24)
561 #define CPUID_8_01_ECX_DBE __BIT(26)
562 #define CPUID_8_01_ECX_PERFTSC __BIT(27)
563 #define CPUID_8_01_ECX_PERFEXTLLC __BIT(28)
564 #define CPUID_8_01_ECX_MWAITX __BIT(29)
565 /* Fn8000_0001:EDX */
566 #define CPUID_8_01_EDX_FPU __BIT(0)
567 #define CPUID_8_01_EDX_VME __BIT(1)
568 #define CPUID_8_01_EDX_DE __BIT(2)
569 #define CPUID_8_01_EDX_PSE __BIT(3)
570 #define CPUID_8_01_EDX_TSC __BIT(4)
571 #define CPUID_8_01_EDX_MSR __BIT(5)
572 #define CPUID_8_01_EDX_PAE __BIT(6)
573 #define CPUID_8_01_EDX_MCE __BIT(7)
574 #define CPUID_8_01_EDX_CX8 __BIT(8)
575 #define CPUID_8_01_EDX_APIC __BIT(9)
576 #define CPUID_8_01_EDX_SYSCALL __BIT(11)
577 #define CPUID_8_01_EDX_MTRR __BIT(12)
578 #define CPUID_8_01_EDX_PGE __BIT(13)
579 #define CPUID_8_01_EDX_MCA __BIT(14)
580 #define CPUID_8_01_EDX_CMOV __BIT(15)
581 #define CPUID_8_01_EDX_PAT __BIT(16)
582 #define CPUID_8_01_EDX_PSE36 __BIT(17)
583 #define CPUID_8_01_EDX_XD __BIT(20)
584 #define CPUID_8_01_EDX_MMXEXT __BIT(22)
585 #define CPUID_8_01_EDX_MMX __BIT(23)
586 #define CPUID_8_01_EDX_FXSR __BIT(24)
587 #define CPUID_8_01_EDX_FFXSR __BIT(25)
588 #define CPUID_8_01_EDX_PAGE1GB __BIT(26)
589 #define CPUID_8_01_EDX_RDTSCP __BIT(27)
590 #define CPUID_8_01_EDX_LM __BIT(29)
591 #define CPUID_8_01_EDX_3DNOWEXT __BIT(30)
592 #define CPUID_8_01_EDX_3DNOW __BIT(31)
593
594 /* Fn8000_0007:EDX (Advanced Power Management) */
595 #define CPUID_8_07_EDX_TS __BIT(0)
596 #define CPUID_8_07_EDX_FID __BIT(1)
597 #define CPUID_8_07_EDX_VID __BIT(2)
598 #define CPUID_8_07_EDX_TTP __BIT(3)
599 #define CPUID_8_07_EDX_TM __BIT(4)
600 #define CPUID_8_07_EDX_100MHzSteps __BIT(6)
601 #define CPUID_8_07_EDX_HwPstate __BIT(7)
602 #define CPUID_8_07_EDX_TscInvariant __BIT(8)
603 #define CPUID_8_07_EDX_CPB __BIT(9)
604 #define CPUID_8_07_EDX_EffFreqRO __BIT(10)
605 #define CPUID_8_07_EDX_ProcFeedbackIntf __BIT(11)
606 #define CPUID_8_07_EDX_ProcPowerReport __BIT(12)
607
608 /* Fn8000_0008:EBX */
609 #define CPUID_8_08_EBX_CLZERO __BIT(0)
610 #define CPUID_8_08_EBX_InstRetCntMsr __BIT(1)
611 #define CPUID_8_08_EBX_RstrFpErrPtrs __BIT(2)
612 #define CPUID_8_08_EBX_INVLPGB __BIT(3)
613 #define CPUID_8_08_EBX_RDPRU __BIT(4)
614 #define CPUID_8_08_EBX_MCOMMIT __BIT(8)
615 #define CPUID_8_08_EBX_WBNOINVD __BIT(9)
616 #define CPUID_8_08_EBX_IBPB __BIT(12)
617 #define CPUID_8_08_EBX_INT_WBINVD __BIT(13)
618 #define CPUID_8_08_EBX_IBRS __BIT(14)
619 #define CPUID_8_08_EBX_STIBP __BIT(15)
620 #define CPUID_8_08_EBX_IBRS_ALWAYSON __BIT(16)
621 #define CPUID_8_08_EBX_STIBP_ALWAYSON __BIT(17)
622 #define CPUID_8_08_EBX_PREFER_IBRS __BIT(18)
623 #define CPUID_8_08_EBX_EferLmsleUnsupp __BIT(20)
624 #define CPUID_8_08_EBX_INVLPGBnestedPg __BIT(21)
625 #define CPUID_8_08_EBX_SSBD __BIT(24)
626 #define CPUID_8_08_EBX_VIRT_SSBD __BIT(25)
627 #define CPUID_8_08_EBX_SSB_NO __BIT(26)
628 /* Fn8000_0008:ECX */
629 #define CPUID_8_08_ECX_NC __BITS(7,0)
630 #define CPUID_8_08_ECX_ApicIdSize __BITS(15,12)
631 #define CPUID_8_08_ECX_PerfTscSize __BITS(17,16)
632
633 /* Fn8000_000A:EAX (SVM features) */
634 #define CPUID_8_0A_EAX_SvmRev __BITS(7,0)
635 /* Fn8000_000A:EDX (SVM features) */
636 #define CPUID_8_0A_EDX_NP __BIT(0)
637 #define CPUID_8_0A_EDX_LbrVirt __BIT(1)
638 #define CPUID_8_0A_EDX_SVML __BIT(2)
639 #define CPUID_8_0A_EDX_NRIPS __BIT(3)
640 #define CPUID_8_0A_EDX_TscRateMsr __BIT(4)
641 #define CPUID_8_0A_EDX_VmcbClean __BIT(5)
642 #define CPUID_8_0A_EDX_FlushByASID __BIT(6)
643 #define CPUID_8_0A_EDX_DecodeAssists __BIT(7)
644 #define CPUID_8_0A_EDX_PauseFilter __BIT(10)
645 #define CPUID_8_0A_EDX_PFThreshold __BIT(12)
646 #define CPUID_8_0A_EDX_AVIC __BIT(13)
647 #define CPUID_8_0A_EDX_VMSAVEvirt __BIT(15)
648 #define CPUID_8_0A_EDX_VGIF __BIT(16)
649 #define CPUID_8_0A_EDX_GMET __BIT(17)
650 #define CPUID_8_0A_EDX_SSSCheck __BIT(19)
651 #define CPUID_8_0A_EDX_SpecCtrl __BIT(20)
652 #define CPUID_8_0A_EDX_TlbiCtl __BIT(24)
653
654 /* -------------------------------------------------------------------------- */
655
656 /*
657 * Register defines. We mainly rely on the already-existing OS definitions.
658 */
659
660 #if defined(__DragonFly__)
661
662 #define XCR0_X87 CPU_XFEATURE_X87 /* 0x00000001 */
663 #define XCR0_SSE CPU_XFEATURE_SSE /* 0x00000002 */
664
665 #define MSR_MISC_ENABLE MSR_IA32_MISC_ENABLE /* 0x1a0 */
666 #define MSR_CR_PAT MSR_PAT /* 0x277 */
667 #define MSR_SFMASK MSR_SF_MASK /* 0xc0000084 */
668 #define MSR_KERNELGSBASE MSR_KGSBASE /* 0xc0000102 */
669 #define MSR_NB_CFG MSR_AMD_NB_CFG /* 0xc001001f */
670 #define MSR_IC_CFG MSR_AMD_IC_CFG /* 0xc0011021 */
671 #define MSR_DE_CFG MSR_AMD_DE_CFG /* 0xc0011029 */
672 #define MSR_UCODE_AMD_PATCHLEVEL MSR_AMD_PATCH_LEVEL /* 0x0000008b */
673
674 /* MSR_IA32_ARCH_CAPABILITIES (0x10a) */
675 #define IA32_ARCH_RDCL_NO IA32_ARCH_CAP_RDCL_NO
676 #define IA32_ARCH_IBRS_ALL IA32_ARCH_CAP_IBRS_ALL
677 #define IA32_ARCH_RSBA IA32_ARCH_CAP_RSBA
678 #define IA32_ARCH_SKIP_L1DFL_VMENTRY IA32_ARCH_CAP_SKIP_L1DFL_VMENTRY
679 #define IA32_ARCH_SSB_NO IA32_ARCH_CAP_SSB_NO
680 #define IA32_ARCH_MDS_NO IA32_ARCH_CAP_MDS_NO
681 #define IA32_ARCH_IF_PSCHANGE_MC_NO IA32_ARCH_CAP_IF_PSCHANGE_MC_NO
682 #define IA32_ARCH_TSX_CTRL IA32_ARCH_CAP_TSX_CTRL
683 #define IA32_ARCH_TAA_NO IA32_ARCH_CAP_TAA_NO
684
685 /* MSR_IA32_FLUSH_CMD (0x10b) */
686 #define IA32_FLUSH_CMD_L1D_FLUSH IA32_FLUSH_CMD_L1D
687
688 #endif /* __DragonFly__ */
689
690 /* -------------------------------------------------------------------------- */
691
692 #ifdef _KERNEL
693 #define NVMM_X86_MACH_NCONF 0
694 #define NVMM_X86_VCPU_NCONF 2
695
696 struct nvmm_x86_cpuid_mask {
697 uint32_t eax;
698 uint32_t ebx;
699 uint32_t ecx;
700 uint32_t edx;
701 };
702
703 /* FPU area + XSAVE header. */
704 struct nvmm_x86_xsave {
705 struct nvmm_x64_state_fpu fpu;
706 uint64_t xstate_bv;
707 uint64_t xcomp_bv;
708 uint8_t rsvd0[8];
709 uint8_t rsvd[40];
710 };
711 CTASSERT(sizeof(struct nvmm_x86_xsave) == 512 + 64);
712
713 extern const struct nvmm_x64_state nvmm_x86_reset_state;
714 extern const struct nvmm_x86_cpuid_mask nvmm_cpuid_00000001;
715 extern const struct nvmm_x86_cpuid_mask nvmm_cpuid_00000007;
716 extern const struct nvmm_x86_cpuid_mask nvmm_cpuid_80000001;
717 extern const struct nvmm_x86_cpuid_mask nvmm_cpuid_80000007;
718 extern const struct nvmm_x86_cpuid_mask nvmm_cpuid_80000008;
719
720 bool nvmm_x86_pat_validate(uint64_t);
721 uint32_t nvmm_x86_xsave_size(uint64_t);
722
723 /* -------------------------------------------------------------------------- */
724
725 /*
726 * ASM defines. We mainly rely on the already-existing OS definitions.
727 */
728
729 #if defined(__NetBSD__)
730 #include <x86/cpufunc.h>
731 #include <x86/fpu.h>
732 #elif defined(__DragonFly__)
733 #include <machine/cpufunc.h>
734 #include <machine/npx.h>
735 #endif
736
737 /* CPUID. */
738 typedef struct {
739 uint32_t eax, ebx, ecx, edx;
740 } cpuid_desc_t;
741
742 #if defined(__NetBSD__)
743 #define x86_get_cpuid(l, d) x86_cpuid(l, (uint32_t *)d)
744 #define x86_get_cpuid2(l, c, d) x86_cpuid2(l, c, (uint32_t *)d)
745 #elif defined(__DragonFly__)
746 #define x86_get_cpuid(l, d) do_cpuid(l, (uint32_t *)d)
747 #define x86_get_cpuid2(l, c, d) cpuid_count(l, c, (uint32_t *)d)
748 #endif
749
750 /* Control registers. */
751 #if defined(__NetBSD__)
752 #define x86_get_cr0() rcr0()
753 #define x86_get_cr2() rcr2()
754 #define x86_get_cr3() rcr3()
755 #define x86_get_cr4() rcr4()
756 #define x86_set_cr0(v) lcr0(v)
757 #define x86_set_cr2(v) lcr2(v)
758 #define x86_set_cr4(v) lcr4(v)
759 #elif defined(__DragonFly__)
760 #define x86_get_cr0() rcr0()
761 #define x86_get_cr2() rcr2()
762 #define x86_get_cr3() rcr3()
763 #define x86_get_cr4() rcr4()
764 #define x86_set_cr0(v) load_cr0(v)
765 #define x86_set_cr2(v) load_cr2(v)
766 #define x86_set_cr4(v) load_cr4(v)
767 #endif
768
769 /* Debug registers. */
770 #if defined(__NetBSD__)
771 #include <x86/dbregs.h>
772 static inline void
x86_curthread_save_dbregs(uint64_t * drs __unused)773 x86_curthread_save_dbregs(uint64_t *drs __unused)
774 {
775 x86_dbregs_save(curlwp);
776 }
777 static inline void
x86_curthread_restore_dbregs(uint64_t * drs __unused)778 x86_curthread_restore_dbregs(uint64_t *drs __unused)
779 {
780 x86_dbregs_restore(curlwp);
781 }
782 #define x86_get_dr0() rdr0()
783 #define x86_get_dr1() rdr1()
784 #define x86_get_dr2() rdr2()
785 #define x86_get_dr3() rdr3()
786 #define x86_get_dr6() rdr6()
787 #define x86_get_dr7() rdr7()
788 #define x86_set_dr0(v) ldr0(v)
789 #define x86_set_dr1(v) ldr1(v)
790 #define x86_set_dr2(v) ldr2(v)
791 #define x86_set_dr3(v) ldr3(v)
792 #define x86_set_dr6(v) ldr6(v)
793 #define x86_set_dr7(v) ldr7(v)
794 #elif defined(__DragonFly__)
795 #include <sys/proc.h> /* struct lwp */
796 static inline void
x86_curthread_save_dbregs(uint64_t * drs)797 x86_curthread_save_dbregs(uint64_t *drs)
798 {
799 struct pcb *pcb = curthread->td_lwp->lwp_thread->td_pcb;
800
801 if (__predict_true(!(pcb->pcb_flags & PCB_DBREGS)))
802 return;
803
804 drs[NVMM_X64_DR_DR0] = rdr0();
805 drs[NVMM_X64_DR_DR1] = rdr1();
806 drs[NVMM_X64_DR_DR2] = rdr2();
807 drs[NVMM_X64_DR_DR3] = rdr3();
808 drs[NVMM_X64_DR_DR6] = rdr6();
809 drs[NVMM_X64_DR_DR7] = rdr7();
810 }
811 static inline void
x86_curthread_restore_dbregs(uint64_t * drs)812 x86_curthread_restore_dbregs(uint64_t *drs)
813 {
814 struct pcb *pcb = curthread->td_lwp->lwp_thread->td_pcb;
815
816 if (__predict_true(!(pcb->pcb_flags & PCB_DBREGS)))
817 return;
818
819 load_dr0(drs[NVMM_X64_DR_DR0]);
820 load_dr1(drs[NVMM_X64_DR_DR1]);
821 load_dr2(drs[NVMM_X64_DR_DR2]);
822 load_dr3(drs[NVMM_X64_DR_DR3]);
823 load_dr6(drs[NVMM_X64_DR_DR6]);
824 load_dr7(drs[NVMM_X64_DR_DR7]);
825 }
826 #define x86_get_dr0() rdr0()
827 #define x86_get_dr1() rdr1()
828 #define x86_get_dr2() rdr2()
829 #define x86_get_dr3() rdr3()
830 #define x86_get_dr6() rdr6()
831 #define x86_get_dr7() rdr7()
832 #define x86_set_dr0(v) load_dr0(v)
833 #define x86_set_dr1(v) load_dr1(v)
834 #define x86_set_dr2(v) load_dr2(v)
835 #define x86_set_dr3(v) load_dr3(v)
836 #define x86_set_dr6(v) load_dr6(v)
837 #define x86_set_dr7(v) load_dr7(v)
838 #endif
839
840 /* FPU. */
841 #if defined(__NetBSD__)
842 #define x86_curthread_save_fpu() fpu_kern_enter()
843 #define x86_curthread_restore_fpu() fpu_kern_leave()
844 #define x86_save_fpu(a, m) fpu_area_save(a, m, true)
845 #define x86_restore_fpu(a, m) fpu_area_restore(a, m, true)
846 #elif defined(__DragonFly__)
847 #define x86_curthread_save_fpu() /* TODO */
848 #define x86_curthread_restore_fpu() /* TODO */
849 #define x86_save_fpu(a, m) \
850 ({ \
851 fpusave((union savefpu *)(a), m); \
852 load_cr0(rcr0() | CR0_TS); \
853 })
854 #define x86_restore_fpu(a, m) \
855 ({ \
856 __asm volatile("clts" ::: "memory"); \
857 fpurstor((union savefpu *)(a), m); \
858 })
859 #endif
860
861 /* XCRs. */
862 static inline uint64_t
x86_get_xcr(uint32_t xcr)863 x86_get_xcr(uint32_t xcr)
864 {
865 uint32_t low, high;
866
867 __asm volatile (
868 "xgetbv"
869 : "=a" (low), "=d" (high)
870 : "c" (xcr)
871 );
872
873 return (low | ((uint64_t)high << 32));
874 }
875
876 static inline void
x86_set_xcr(uint32_t xcr,uint64_t val)877 x86_set_xcr(uint32_t xcr, uint64_t val)
878 {
879 uint32_t low, high;
880
881 low = val;
882 high = val >> 32;
883 __asm volatile (
884 "xsetbv"
885 :
886 : "a" (low), "d" (high), "c" (xcr)
887 : "memory"
888 );
889 }
890
891 #if defined(__DragonFly__)
892 #define x86_xsave_features npx_xcr0_mask
893 #define x86_fpu_mxcsr_mask npx_mxcsr_mask
894 #endif
895
896 #endif /* _KERNEL */
897
898 #endif /* ASM_NVMM */
899
900 #endif /* _NVMM_X86_H_ */
901