Lines Matching refs:hvf

84     wreg(cpu->hvf->fd, HV_X86_TPR, tpr);  in vmx_update_tpr()
86 wvmcs(cpu->hvf->fd, VMCS_TPR_THRESHOLD, 0); in vmx_update_tpr()
88 wvmcs(cpu->hvf->fd, VMCS_TPR_THRESHOLD, (irr > tpr) ? tpr >> 4 : in vmx_update_tpr()
96 int tpr = rreg(cpu->hvf->fd, HV_X86_TPR) >> 4; in update_apic_tpr()
258 wvmcs(cpu->hvf->fd, VMCS_PIN_BASED_CTLS, in hvf_arch_init_vcpu()
263 wvmcs(cpu->hvf->fd, VMCS_PRI_PROC_BASED_CTLS, in hvf_arch_init_vcpu()
270 wvmcs(cpu->hvf->fd, VMCS_SEC_PROC_BASED_CTLS, in hvf_arch_init_vcpu()
274 wvmcs(cpu->hvf->fd, VMCS_ENTRY_CTLS, cap2ctrl(hvf_state->hvf_caps->vmx_cap_entry, in hvf_arch_init_vcpu()
276 wvmcs(cpu->hvf->fd, VMCS_EXCEPTION_BITMAP, 0); /* Double fault */ in hvf_arch_init_vcpu()
278 wvmcs(cpu->hvf->fd, VMCS_TPR_THRESHOLD, 0); in hvf_arch_init_vcpu()
290 hv_vcpu_enable_native_msr(cpu->hvf->fd, MSR_STAR, 1); in hvf_arch_init_vcpu()
291 hv_vcpu_enable_native_msr(cpu->hvf->fd, MSR_LSTAR, 1); in hvf_arch_init_vcpu()
292 hv_vcpu_enable_native_msr(cpu->hvf->fd, MSR_CSTAR, 1); in hvf_arch_init_vcpu()
293 hv_vcpu_enable_native_msr(cpu->hvf->fd, MSR_FMASK, 1); in hvf_arch_init_vcpu()
294 hv_vcpu_enable_native_msr(cpu->hvf->fd, MSR_FSBASE, 1); in hvf_arch_init_vcpu()
295 hv_vcpu_enable_native_msr(cpu->hvf->fd, MSR_GSBASE, 1); in hvf_arch_init_vcpu()
296 hv_vcpu_enable_native_msr(cpu->hvf->fd, MSR_KERNELGSBASE, 1); in hvf_arch_init_vcpu()
297 hv_vcpu_enable_native_msr(cpu->hvf->fd, MSR_TSC_AUX, 1); in hvf_arch_init_vcpu()
298 hv_vcpu_enable_native_msr(cpu->hvf->fd, MSR_IA32_TSC, 1); in hvf_arch_init_vcpu()
299 hv_vcpu_enable_native_msr(cpu->hvf->fd, MSR_IA32_SYSENTER_CS, 1); in hvf_arch_init_vcpu()
300 hv_vcpu_enable_native_msr(cpu->hvf->fd, MSR_IA32_SYSENTER_EIP, 1); in hvf_arch_init_vcpu()
301 hv_vcpu_enable_native_msr(cpu->hvf->fd, MSR_IA32_SYSENTER_ESP, 1); in hvf_arch_init_vcpu()
342 env->error_code = rvmcs(cpu->hvf->fd, VMCS_IDT_VECTORING_ERROR); in hvf_store_events()
345 if ((rvmcs(cpu->hvf->fd, VMCS_GUEST_INTERRUPTIBILITY) & in hvf_store_events()
351 if (rvmcs(cpu->hvf->fd, VMCS_GUEST_INTERRUPTIBILITY) & in hvf_store_events()
430 hv_return_t r = hv_vcpu_run(cpu->hvf->fd); in hvf_vcpu_exec()
434 uint64_t exit_reason = rvmcs(cpu->hvf->fd, VMCS_EXIT_REASON); in hvf_vcpu_exec()
435 uint64_t exit_qual = rvmcs(cpu->hvf->fd, VMCS_EXIT_QUALIFICATION); in hvf_vcpu_exec()
436 uint32_t ins_len = (uint32_t)rvmcs(cpu->hvf->fd, in hvf_vcpu_exec()
439 uint64_t idtvec_info = rvmcs(cpu->hvf->fd, VMCS_IDT_VECTORING_INFO); in hvf_vcpu_exec()
442 rip = rreg(cpu->hvf->fd, HV_X86_RIP); in hvf_vcpu_exec()
443 env->eflags = rreg(cpu->hvf->fd, HV_X86_RFLAGS); in hvf_vcpu_exec()
473 uint64_t gpa = rvmcs(cpu->hvf->fd, VMCS_GUEST_PHYSICAL_ADDRESS); in hvf_vcpu_exec()
518 RAX(env) = rreg(cpu->hvf->fd, HV_X86_RAX); in hvf_vcpu_exec()
534 uint32_t rax = (uint32_t)rreg(cpu->hvf->fd, HV_X86_RAX); in hvf_vcpu_exec()
535 uint32_t rbx = (uint32_t)rreg(cpu->hvf->fd, HV_X86_RBX); in hvf_vcpu_exec()
536 uint32_t rcx = (uint32_t)rreg(cpu->hvf->fd, HV_X86_RCX); in hvf_vcpu_exec()
537 uint32_t rdx = (uint32_t)rreg(cpu->hvf->fd, HV_X86_RDX); in hvf_vcpu_exec()
541 env->cr[4] = rvmcs(cpu->hvf->fd, VMCS_GUEST_CR4); in hvf_vcpu_exec()
545 wreg(cpu->hvf->fd, HV_X86_RAX, rax); in hvf_vcpu_exec()
546 wreg(cpu->hvf->fd, HV_X86_RBX, rbx); in hvf_vcpu_exec()
547 wreg(cpu->hvf->fd, HV_X86_RCX, rcx); in hvf_vcpu_exec()
548 wreg(cpu->hvf->fd, HV_X86_RDX, rdx); in hvf_vcpu_exec()
556 uint32_t eax = (uint32_t)rreg(cpu->hvf->fd, HV_X86_RAX); in hvf_vcpu_exec()
557 uint32_t ecx = (uint32_t)rreg(cpu->hvf->fd, HV_X86_RCX); in hvf_vcpu_exec()
558 uint32_t edx = (uint32_t)rreg(cpu->hvf->fd, HV_X86_RDX); in hvf_vcpu_exec()
565 wreg(cpu->hvf->fd, HV_X86_XCR0, env->xcr0 | 1); in hvf_vcpu_exec()
604 macvm_set_cr0(cpu->hvf->fd, RRX(env, reg)); in hvf_vcpu_exec()
608 macvm_set_cr4(cpu->hvf->fd, RRX(env, reg)); in hvf_vcpu_exec()
644 uint64_t vinfo = rvmcs(cpu->hvf->fd, VMCS_IDT_VECTORING_INFO); in hvf_vcpu_exec()
657 wreg(cpu->hvf->fd, HV_X86_RAX, 0); in hvf_vcpu_exec()
658 wreg(cpu->hvf->fd, HV_X86_RDX, 0); in hvf_vcpu_exec()