Home
last modified time | relevance | path

Searched refs:vcpu (Results 1 – 25 of 491) sorted by relevance

12345678910>>...20

/linux/arch/arm64/include/asm/
H A Dkvm_emulate.h43 bool kvm_condition_valid32(const struct kvm_vcpu *vcpu);
44 void kvm_skip_instr32(struct kvm_vcpu *vcpu);
46 void kvm_inject_undefined(struct kvm_vcpu *vcpu);
47 void kvm_inject_vabt(struct kvm_vcpu *vcpu);
48 void kvm_inject_dabt(struct kvm_vcpu *vcpu, unsigned long addr);
49 void kvm_inject_pabt(struct kvm_vcpu *vcpu, unsigned long addr);
50 void kvm_inject_size_fault(struct kvm_vcpu *vcpu);
52 void kvm_vcpu_wfi(struct kvm_vcpu *vcpu);
54 void kvm_emulate_nested_eret(struct kvm_vcpu *vcpu);
55 int kvm_inject_nested_sync(struct kvm_vcpu *vcpu, u6
59 vcpu_el1_is_32bit(struct kvm_vcpu * vcpu) vcpu_el1_is_32bit() argument
64 vcpu_el1_is_32bit(struct kvm_vcpu * vcpu) vcpu_el1_is_32bit() argument
70 vcpu_reset_hcr(struct kvm_vcpu * vcpu) vcpu_reset_hcr() argument
107 vcpu_hcr(struct kvm_vcpu * vcpu) vcpu_hcr() argument
112 vcpu_clear_wfx_traps(struct kvm_vcpu * vcpu) vcpu_clear_wfx_traps() argument
122 vcpu_set_wfx_traps(struct kvm_vcpu * vcpu) vcpu_set_wfx_traps() argument
128 vcpu_ptrauth_enable(struct kvm_vcpu * vcpu) vcpu_ptrauth_enable() argument
133 vcpu_ptrauth_disable(struct kvm_vcpu * vcpu) vcpu_ptrauth_disable() argument
138 vcpu_get_vsesr(struct kvm_vcpu * vcpu) vcpu_get_vsesr() argument
143 vcpu_set_vsesr(struct kvm_vcpu * vcpu,u64 vsesr) vcpu_set_vsesr() argument
148 vcpu_pc(const struct kvm_vcpu * vcpu) vcpu_pc() argument
153 vcpu_cpsr(const struct kvm_vcpu * vcpu) vcpu_cpsr() argument
158 vcpu_mode_is_32bit(const struct kvm_vcpu * vcpu) vcpu_mode_is_32bit() argument
163 kvm_condition_valid(const struct kvm_vcpu * vcpu) kvm_condition_valid() argument
171 vcpu_set_thumb(struct kvm_vcpu * vcpu) vcpu_set_thumb() argument
181 vcpu_get_reg(const struct kvm_vcpu * vcpu,u8 reg_num) vcpu_get_reg() argument
187 vcpu_set_reg(struct kvm_vcpu * vcpu,u8 reg_num,unsigned long val) vcpu_set_reg() argument
205 vcpu_is_el2(const struct kvm_vcpu * vcpu) vcpu_is_el2() argument
216 vcpu_el2_e2h_is_set(const struct kvm_vcpu * vcpu) vcpu_el2_e2h_is_set() argument
226 vcpu_el2_tge_is_set(const struct kvm_vcpu * vcpu) vcpu_el2_tge_is_set() argument
246 is_hyp_ctxt(const struct kvm_vcpu * vcpu) is_hyp_ctxt() argument
283 vcpu_mode_priv(const struct kvm_vcpu * vcpu) vcpu_mode_priv() argument
297 kvm_vcpu_get_esr(const struct kvm_vcpu * vcpu) kvm_vcpu_get_esr() argument
302 kvm_vcpu_get_condition(const struct kvm_vcpu * vcpu) kvm_vcpu_get_condition() argument
312 kvm_vcpu_get_hfar(const struct kvm_vcpu * vcpu) kvm_vcpu_get_hfar() argument
317 kvm_vcpu_get_fault_ipa(const struct kvm_vcpu * vcpu) kvm_vcpu_get_fault_ipa() argument
322 kvm_vcpu_get_disr(const struct kvm_vcpu * vcpu) kvm_vcpu_get_disr() argument
327 kvm_vcpu_hvc_get_imm(const struct kvm_vcpu * vcpu) kvm_vcpu_hvc_get_imm() argument
332 kvm_vcpu_dabt_isvalid(const struct kvm_vcpu * vcpu) kvm_vcpu_dabt_isvalid() argument
337 kvm_vcpu_dabt_iss_nisv_sanitized(const struct kvm_vcpu * vcpu) kvm_vcpu_dabt_iss_nisv_sanitized() argument
342 kvm_vcpu_dabt_issext(const struct kvm_vcpu * vcpu) kvm_vcpu_dabt_issext() argument
347 kvm_vcpu_dabt_issf(const struct kvm_vcpu * vcpu) kvm_vcpu_dabt_issf() argument
352 kvm_vcpu_dabt_get_rd(const struct kvm_vcpu * vcpu) kvm_vcpu_dabt_get_rd() argument
357 kvm_vcpu_abt_iss1tw(const struct kvm_vcpu * vcpu) kvm_vcpu_abt_iss1tw() argument
363 kvm_vcpu_dabt_iswrite(const struct kvm_vcpu * vcpu) kvm_vcpu_dabt_iswrite() argument
368 kvm_vcpu_dabt_is_cm(const struct kvm_vcpu * vcpu) kvm_vcpu_dabt_is_cm() argument
373 kvm_vcpu_dabt_get_as(const struct kvm_vcpu * vcpu) kvm_vcpu_dabt_get_as() argument
379 kvm_vcpu_trap_il_is32bit(const struct kvm_vcpu * vcpu) kvm_vcpu_trap_il_is32bit() argument
384 kvm_vcpu_trap_get_class(const struct kvm_vcpu * vcpu) kvm_vcpu_trap_get_class() argument
389 kvm_vcpu_trap_is_iabt(const struct kvm_vcpu * vcpu) kvm_vcpu_trap_is_iabt() argument
394 kvm_vcpu_trap_is_exec_fault(const struct kvm_vcpu * vcpu) kvm_vcpu_trap_is_exec_fault() argument
399 kvm_vcpu_trap_get_fault(const struct kvm_vcpu * vcpu) kvm_vcpu_trap_get_fault() argument
405 kvm_vcpu_trap_is_permission_fault(const struct kvm_vcpu * vcpu) kvm_vcpu_trap_is_permission_fault() argument
411 kvm_vcpu_trap_is_translation_fault(const struct kvm_vcpu * vcpu) kvm_vcpu_trap_is_translation_fault() argument
417 kvm_vcpu_trap_get_perm_fault_granule(const struct kvm_vcpu * vcpu) kvm_vcpu_trap_get_perm_fault_granule() argument
425 kvm_vcpu_abt_issea(const struct kvm_vcpu * vcpu) kvm_vcpu_abt_issea() argument
438 kvm_vcpu_sys_get_rt(struct kvm_vcpu * vcpu) kvm_vcpu_sys_get_rt() argument
444 kvm_is_write_fault(struct kvm_vcpu * vcpu) kvm_is_write_fault() argument
468 kvm_vcpu_get_mpidr_aff(struct kvm_vcpu * vcpu) kvm_vcpu_get_mpidr_aff() argument
473 kvm_vcpu_set_be(struct kvm_vcpu * vcpu) kvm_vcpu_set_be() argument
484 kvm_vcpu_is_be(struct kvm_vcpu * vcpu) kvm_vcpu_is_be() argument
495 vcpu_data_guest_to_host(struct kvm_vcpu * vcpu,unsigned long data,unsigned int len) vcpu_data_guest_to_host() argument
526 vcpu_data_host_to_guest(struct kvm_vcpu * vcpu,unsigned long data,unsigned int len) vcpu_data_host_to_guest() argument
557 kvm_incr_pc(struct kvm_vcpu * vcpu) kvm_incr_pc() argument
578 kvm_get_reset_cptr_el2(struct kvm_vcpu * vcpu) kvm_get_reset_cptr_el2() argument
608 kvm_reset_cptr_el2(struct kvm_vcpu * vcpu) kvm_reset_cptr_el2() argument
[all...]
/linux/arch/s390/kvm/
H A Dpriv.c34 vcpu->stat.instruction_ri++; in handle_ri()
39 kvm_s390_retry_instr(vcpu); in handle_ri()
48 return handle_ri(vcpu); in kvm_s390_handle_aa()
66 vcpu->arch.gs_enabled = 1; in handle_gs()
78 return handle_gs(vcpu); in kvm_s390_handle_e3()
444 wait_event(vcpu->kvm->arch.ipte_wq, !ipte_lock_held(vcpu->kvm)); in handle_ipte_interlock()
464 return kvm_s390_inject_prog_irq(vcpu, &vcpu->arch.pgm); in handle_test_block()
495 inti = kvm_s390_get_io_int(vcpu->kvm, vcpu->arch.sie_block->gcr[6], 0); in handle_tpi()
574 vcpu->run->s390_tsch.ipb = vcpu->arch.sie_block->ipb; in handle_tsch()
1023 vcpu->run->s.regs.gprs[reg1] |= vcpu->arch.sie_block->gpsw.mask >> 32; in handle_epsw()
[all …]
H A Dintercept.c216 if (!IS_TE_ENABLED(vcpu) || !IS_ITDB_VALID(vcpu)) in handle_itdb()
229 #define per_event(vcpu) (vcpu->arch.sie_block->iprcc & PGM_PER) argument
233 if (!guestdbg_enabled(vcpu) || !per_event(vcpu)) in should_handle_per_event()
269 trace_kvm_s390_intercept_prog(vcpu, vcpu->arch.sie_block->iprcc); in handle_prog()
278 rc = handle_itdb(vcpu); in handle_prog()
366 rc = guest_translate_address_with_key(vcpu, vcpu->run->s.regs.gprs[reg2], in handle_mvpg_pei()
375 rc = guest_translate_address_with_key(vcpu, vcpu->run->s.regs.gprs[reg1], in handle_mvpg_pei()
390 vcpu->stat.exit_pei++; in handle_partial_execution()
468 trace_kvm_s390_handle_operexc(vcpu, vcpu->arch.sie_block->ipa, in handle_operexc()
474 if (vcpu->arch.sie_block->ipa == 0 && vcpu->kvm->arch.user_instr0) in handle_operexc()
[all …]
H A Ddiag.c25 start = vcpu->run->s.regs.gprs[(vcpu->arch.sie_block->ipa & 0xf0) >> 4]; in diag_release_pages()
26 end = vcpu->run->s.regs.gprs[vcpu->arch.sie_block->ipa & 0xf] + PAGE_SIZE; in diag_release_pages()
76 vcpu->run->s.regs.gprs[rx]); in __diag_page_ref_service()
80 rc = read_guest(vcpu, vcpu->run->s.regs.gprs[rx], rx, &parm, sizeof(parm)); in __diag_page_ref_service()
149 kvm_vcpu_on_spin(vcpu, true); in __diag_time_slice_end()
172 tid = vcpu->run->s.regs.gprs[(vcpu->arch.sie_block->ipa & 0xf0) >> 4]; in __diag_time_slice_end_directed()
176 if (tid == vcpu->vcpu_id) in __diag_time_slice_end_directed()
194 VCPU_EVENT(vcpu, 5, in __diag_time_slice_end_directed()
197 vcpu->stat.diag_9c_forward++; in __diag_time_slice_end_directed()
208 vcpu->stat.diag_9c_ignored++; in __diag_time_slice_end_directed()
[all …]
H A Dguestdbg.c132 vcpu->arch.guestdbg.cr0 = vcpu->arch.sie_block->gcr[0]; in kvm_s390_backup_guest_per_regs()
133 vcpu->arch.guestdbg.cr9 = vcpu->arch.sie_block->gcr[9]; in kvm_s390_backup_guest_per_regs()
134 vcpu->arch.guestdbg.cr10 = vcpu->arch.sie_block->gcr[10]; in kvm_s390_backup_guest_per_regs()
135 vcpu->arch.guestdbg.cr11 = vcpu->arch.sie_block->gcr[11]; in kvm_s390_backup_guest_per_regs()
140 vcpu->arch.sie_block->gcr[0] = vcpu->arch.guestdbg.cr0; in kvm_s390_restore_guest_per_regs()
141 vcpu->arch.sie_block->gcr[9] = vcpu->arch.guestdbg.cr9; in kvm_s390_restore_guest_per_regs()
142 vcpu->arch.sie_block->gcr[10] = vcpu->arch.guestdbg.cr10; in kvm_s390_restore_guest_per_regs()
143 vcpu->arch.sie_block->gcr[11] = vcpu->arch.guestdbg.cr11; in kvm_s390_restore_guest_per_regs()
590 if (debug_exit_required(vcpu, vcpu->arch.sie_block->perc, in kvm_s390_handle_per_event()
614 (pssec(vcpu) || hssec(vcpu))) in kvm_s390_handle_per_event()
[all …]
/linux/arch/riscv/kvm/
H A Dvcpu.c45 static void kvm_riscv_reset_vcpu(struct kvm_vcpu *vcpu) in kvm_riscv_reset_vcpu() argument
47 struct kvm_vcpu_csr *csr = &vcpu->arch.guest_csr; in kvm_riscv_reset_vcpu()
48 struct kvm_vcpu_csr *reset_csr = &vcpu->arch.guest_reset_csr; in kvm_riscv_reset_vcpu()
49 struct kvm_cpu_context *cntx = &vcpu->arch.guest_context; in kvm_riscv_reset_vcpu()
50 struct kvm_cpu_context *reset_cntx = &vcpu->arch.guest_reset_context; in kvm_riscv_reset_vcpu()
59 loaded = (vcpu->cpu != -1); in kvm_riscv_reset_vcpu()
61 kvm_arch_vcpu_put(vcpu); in kvm_riscv_reset_vcpu()
63 vcpu->arch.last_exit_cpu = -1; in kvm_riscv_reset_vcpu()
67 spin_lock(&vcpu->arch.reset_cntx_lock); in kvm_riscv_reset_vcpu()
69 spin_unlock(&vcpu in kvm_riscv_reset_vcpu()
99 kvm_arch_vcpu_create(struct kvm_vcpu * vcpu) kvm_arch_vcpu_create() argument
158 kvm_arch_vcpu_postcreate(struct kvm_vcpu * vcpu) kvm_arch_vcpu_postcreate() argument
169 kvm_arch_vcpu_destroy(struct kvm_vcpu * vcpu) kvm_arch_vcpu_destroy() argument
186 kvm_cpu_has_pending_timer(struct kvm_vcpu * vcpu) kvm_cpu_has_pending_timer() argument
191 kvm_arch_vcpu_blocking(struct kvm_vcpu * vcpu) kvm_arch_vcpu_blocking() argument
196 kvm_arch_vcpu_unblocking(struct kvm_vcpu * vcpu) kvm_arch_vcpu_unblocking() argument
201 kvm_arch_vcpu_runnable(struct kvm_vcpu * vcpu) kvm_arch_vcpu_runnable() argument
207 kvm_arch_vcpu_should_kick(struct kvm_vcpu * vcpu) kvm_arch_vcpu_should_kick() argument
212 kvm_arch_vcpu_in_kernel(struct kvm_vcpu * vcpu) kvm_arch_vcpu_in_kernel() argument
217 kvm_arch_vcpu_fault(struct kvm_vcpu * vcpu,struct vm_fault * vmf) kvm_arch_vcpu_fault() argument
225 struct kvm_vcpu *vcpu = filp->private_data; kvm_arch_vcpu_async_ioctl() local
246 struct kvm_vcpu *vcpu = filp->private_data; kvm_arch_vcpu_ioctl() local
290 kvm_arch_vcpu_ioctl_get_sregs(struct kvm_vcpu * vcpu,struct kvm_sregs * sregs) kvm_arch_vcpu_ioctl_get_sregs() argument
296 kvm_arch_vcpu_ioctl_set_sregs(struct kvm_vcpu * vcpu,struct kvm_sregs * sregs) kvm_arch_vcpu_ioctl_set_sregs() argument
302 kvm_arch_vcpu_ioctl_get_fpu(struct kvm_vcpu * vcpu,struct kvm_fpu * fpu) kvm_arch_vcpu_ioctl_get_fpu() argument
307 kvm_arch_vcpu_ioctl_set_fpu(struct kvm_vcpu * vcpu,struct kvm_fpu * fpu) kvm_arch_vcpu_ioctl_set_fpu() argument
312 kvm_arch_vcpu_ioctl_translate(struct kvm_vcpu * vcpu,struct kvm_translation * tr) kvm_arch_vcpu_ioctl_translate() argument
318 kvm_arch_vcpu_ioctl_get_regs(struct kvm_vcpu * vcpu,struct kvm_regs * regs) kvm_arch_vcpu_ioctl_get_regs() argument
323 kvm_arch_vcpu_ioctl_set_regs(struct kvm_vcpu * vcpu,struct kvm_regs * regs) kvm_arch_vcpu_ioctl_set_regs() argument
328 kvm_riscv_vcpu_flush_interrupts(struct kvm_vcpu * vcpu) kvm_riscv_vcpu_flush_interrupts() argument
345 kvm_riscv_vcpu_sync_interrupts(struct kvm_vcpu * vcpu) kvm_riscv_vcpu_sync_interrupts() argument
375 kvm_riscv_vcpu_set_interrupt(struct kvm_vcpu * vcpu,unsigned int irq) kvm_riscv_vcpu_set_interrupt() argument
397 kvm_riscv_vcpu_unset_interrupt(struct kvm_vcpu * vcpu,unsigned int irq) kvm_riscv_vcpu_unset_interrupt() argument
417 kvm_riscv_vcpu_has_interrupts(struct kvm_vcpu * vcpu,u64 mask) kvm_riscv_vcpu_has_interrupts() argument
432 kvm_riscv_vcpu_power_off(struct kvm_vcpu * vcpu) kvm_riscv_vcpu_power_off() argument
439 kvm_riscv_vcpu_power_on(struct kvm_vcpu * vcpu) kvm_riscv_vcpu_power_on() argument
445 kvm_arch_vcpu_ioctl_get_mpstate(struct kvm_vcpu * vcpu,struct kvm_mp_state * mp_state) kvm_arch_vcpu_ioctl_get_mpstate() argument
456 kvm_arch_vcpu_ioctl_set_mpstate(struct kvm_vcpu * vcpu,struct kvm_mp_state * mp_state) kvm_arch_vcpu_ioctl_set_mpstate() argument
475 kvm_arch_vcpu_ioctl_set_guest_debug(struct kvm_vcpu * vcpu,struct kvm_guest_debug * dbg) kvm_arch_vcpu_ioctl_set_guest_debug() argument
482 kvm_riscv_vcpu_setup_config(struct kvm_vcpu * vcpu) kvm_riscv_vcpu_setup_config() argument
510 kvm_arch_vcpu_load(struct kvm_vcpu * vcpu,int cpu) kvm_arch_vcpu_load() argument
551 kvm_arch_vcpu_put(struct kvm_vcpu * vcpu) kvm_arch_vcpu_put() argument
579 kvm_riscv_check_vcpu_requests(struct kvm_vcpu * vcpu) kvm_riscv_check_vcpu_requests() argument
627 kvm_riscv_update_hvip(struct kvm_vcpu * vcpu) kvm_riscv_update_hvip() argument
635 kvm_riscv_vcpu_swap_in_guest_state(struct kvm_vcpu * vcpu) kvm_riscv_vcpu_swap_in_guest_state() argument
648 kvm_riscv_vcpu_swap_in_host_state(struct kvm_vcpu * vcpu) kvm_riscv_vcpu_swap_in_host_state() argument
668 kvm_riscv_vcpu_enter_exit(struct kvm_vcpu * vcpu) kvm_riscv_vcpu_enter_exit() argument
678 kvm_arch_vcpu_ioctl_run(struct kvm_vcpu * vcpu) kvm_arch_vcpu_ioctl_run() argument
[all...]
/linux/arch/arm64/kvm/
H A Ddebug.c111 if (vcpu->guest_debug) in kvm_arm_setup_mdcr_el2()
140 kvm_arm_setup_mdcr_el2(vcpu); in kvm_arm_vcpu_init_debug()
151 vcpu->arch.debug_ptr = &vcpu->arch.vcpu_debug_state; in kvm_arm_reset_debug_ptr()
174 trace_kvm_arm_setup_debug(vcpu, vcpu->guest_debug); in kvm_arm_setup_debug()
176 kvm_arm_setup_mdcr_el2(vcpu); in kvm_arm_setup_debug()
179 if (vcpu->guest_debug || kvm_vcpu_os_lock_enabled(vcpu)) { in kvm_arm_setup_debug()
181 save_guest_debug_regs(vcpu); in kvm_arm_setup_debug()
241 vcpu->arch.debug_ptr = &vcpu->arch.external_debug_state; in kvm_arm_setup_debug()
267 BUG_ON(!vcpu->guest_debug && in kvm_arm_setup_debug()
268 vcpu->arch.debug_ptr != &vcpu->arch.vcpu_debug_state); in kvm_arm_setup_debug()
[all …]
H A Dinject_fault.c21 if (likely(!vcpu_has_nv(vcpu))) { in pend_sync_exception()
42 if (vcpu_el2_tge_is_set(vcpu)) in pend_sync_exception()
63 pend_sync_exception(vcpu); in inject_abt64()
99 pend_sync_exception(vcpu); in inject_undef64()
168 if (vcpu_el1_is_32bit(vcpu)) in kvm_inject_dabt()
184 if (vcpu_el1_is_32bit(vcpu)) in kvm_inject_pabt()
198 kvm_inject_pabt(vcpu, addr); in kvm_inject_size_fault()
227 if (vcpu_el1_is_32bit(vcpu)) in kvm_inject_undefined()
228 inject_undef32(vcpu); in kvm_inject_undefined()
230 inject_undef64(vcpu); in kvm_inject_undefined()
[all …]
H A Dhandle_exit.c31 static void kvm_handle_guest_serror(struct kvm_vcpu *vcpu, u64 esr) in kvm_handle_guest_serror() argument
34 kvm_inject_vabt(vcpu); in kvm_handle_guest_serror()
37 static int handle_hvc(struct kvm_vcpu *vcpu) in handle_hvc() argument
39 trace_kvm_hvc_arm64(*vcpu_pc(vcpu), vcpu_get_reg(vcpu, 0), in handle_hvc()
40 kvm_vcpu_hvc_get_imm(vcpu)); in handle_hvc()
41 vcpu->stat.hvc_exit_stat++; in handle_hvc()
44 if (vcpu_has_nv(vcpu)) { in handle_hvc()
45 if (vcpu_read_sys_reg(vcpu, HCR_EL2) & HCR_HCD) in handle_hvc()
46 kvm_inject_undefined(vcpu); in handle_hvc()
56 handle_smc(struct kvm_vcpu * vcpu) handle_smc() argument
93 handle_no_fpsimd(struct kvm_vcpu * vcpu) handle_no_fpsimd() argument
114 kvm_handle_wfx(struct kvm_vcpu * vcpu) kvm_handle_wfx() argument
166 kvm_handle_guest_debug(struct kvm_vcpu * vcpu) kvm_handle_guest_debug() argument
188 kvm_handle_unknown_ec(struct kvm_vcpu * vcpu) kvm_handle_unknown_ec() argument
203 handle_sve(struct kvm_vcpu * vcpu) handle_sve() argument
214 kvm_handle_ptrauth(struct kvm_vcpu * vcpu) kvm_handle_ptrauth() argument
220 kvm_handle_eret(struct kvm_vcpu * vcpu) kvm_handle_eret() argument
244 handle_svc(struct kvm_vcpu * vcpu) handle_svc() argument
283 kvm_get_exit_handler(struct kvm_vcpu * vcpu) kvm_get_exit_handler() argument
297 handle_trap_exceptions(struct kvm_vcpu * vcpu) handle_trap_exceptions() argument
322 handle_exit(struct kvm_vcpu * vcpu,int exception_index) handle_exit() argument
366 handle_exit_early(struct kvm_vcpu * vcpu,int exception_index) handle_exit_early() argument
388 nvhe_hyp_panic_handler(u64 esr,u64 spsr,u64 elr_virt,u64 elr_phys,u64 par,uintptr_t vcpu,u64 far,u64 hpfar) nvhe_hyp_panic_handler() argument
[all...]
H A Dreset.c76 static void kvm_vcpu_enable_sve(struct kvm_vcpu *vcpu) in kvm_vcpu_enable_sve() argument
78 vcpu->arch.sve_max_vl = kvm_sve_max_vl; in kvm_vcpu_enable_sve()
85 vcpu_set_flag(vcpu, GUEST_HAS_SVE); in kvm_vcpu_enable_sve()
89 * Finalize vcpu's maximum SVE vector length, allocating
90 * vcpu->arch.sve_state as necessary.
92 static int kvm_vcpu_finalize_sve(struct kvm_vcpu *vcpu) in kvm_vcpu_finalize_sve() argument
99 vl = vcpu->arch.sve_max_vl; in kvm_vcpu_finalize_sve()
110 reg_sz = vcpu_sve_state_size(vcpu); in kvm_vcpu_finalize_sve()
121 vcpu->arch.sve_state = buf; in kvm_vcpu_finalize_sve()
122 vcpu_set_flag(vcpu, VCPU_SVE_FINALIZE in kvm_vcpu_finalize_sve()
126 kvm_arm_vcpu_finalize(struct kvm_vcpu * vcpu,int feature) kvm_arm_vcpu_finalize() argument
142 kvm_arm_vcpu_is_finalized(struct kvm_vcpu * vcpu) kvm_arm_vcpu_is_finalized() argument
150 kvm_arm_vcpu_destroy(struct kvm_vcpu * vcpu) kvm_arm_vcpu_destroy() argument
162 kvm_vcpu_reset_sve(struct kvm_vcpu * vcpu) kvm_vcpu_reset_sve() argument
168 kvm_vcpu_enable_ptrauth(struct kvm_vcpu * vcpu) kvm_vcpu_enable_ptrauth() argument
191 kvm_reset_vcpu(struct kvm_vcpu * vcpu) kvm_reset_vcpu() argument
[all...]
H A Dpsci.c49 kvm_vcpu_wfi(vcpu); in kvm_psci_vcpu_suspend()
64 struct kvm_vcpu *vcpu = NULL; in kvm_psci_vcpu_on() local
78 if (!vcpu) in kvm_psci_vcpu_on()
114 kvm_vcpu_wake_up(vcpu); in kvm_psci_vcpu_on()
128 struct kvm *kvm = vcpu->kvm; in kvm_psci_vcpu_affinity_info()
185 memset(&vcpu->run->system_event, 0, sizeof(vcpu->run->system_event)); in kvm_prepare_system_event()
226 vcpu_set_reg(vcpu, i, lower_32_bits(vcpu_get_reg(vcpu, i))); in kvm_psci_narrow_to_32bit()
283 kvm_psci_system_off(vcpu); in kvm_psci_0_2_call()
298 kvm_psci_system_reset(vcpu); in kvm_psci_0_2_call()
319 struct kvm *kvm = vcpu->kvm; in kvm_psci_1_x_call()
[all …]
/linux/arch/powerpc/kvm/
H A Dbook3s_emulate.c92 vcpu->arch.ppr_tm = vcpu->arch.ppr; in kvmppc_copyto_vcpu_tm()
94 vcpu->arch.amr_tm = vcpu->arch.amr; in kvmppc_copyto_vcpu_tm()
96 vcpu->arch.tar_tm = vcpu->arch.tar; in kvmppc_copyto_vcpu_tm()
111 vcpu->arch.ppr = vcpu->arch.ppr_tm; in kvmppc_copyfrom_vcpu_tm()
113 vcpu->arch.amr = vcpu->arch.amr_tm; in kvmppc_copyfrom_vcpu_tm()
115 vcpu->arch.tar = vcpu->arch.tar_tm; in kvmppc_copyfrom_vcpu_tm()
337 vcpu->arch.mmu.mtsrin(vcpu, in kvmppc_core_emulate_op_pr()
342 vcpu->arch.mmu.mtsrin(vcpu, in kvmppc_core_emulate_op_pr()
388 vcpu->arch.mmu.slbmte(vcpu, in kvmppc_core_emulate_op_pr()
396 vcpu->arch.mmu.slbie(vcpu, in kvmppc_core_emulate_op_pr()
[all …]
H A Dbooke.c513 set_guest_srr(vcpu, vcpu->arch.regs.nip, in kvmppc_booke_irqprio_deliver()
517 set_guest_csrr(vcpu, vcpu->arch.regs.nip, in kvmppc_booke_irqprio_deliver()
521 set_guest_dsrr(vcpu, vcpu->arch.regs.nip, in kvmppc_booke_irqprio_deliver()
525 set_guest_mcsrr(vcpu, vcpu->arch.regs.nip, in kvmppc_booke_irqprio_deliver()
530 vcpu->arch.regs.nip = vcpu->arch.ivpr | in kvmppc_booke_irqprio_deliver()
805 vcpu->arch.pgdir = vcpu->kvm->mm->pgd; in kvmppc_vcpu_run()
1894 vcpu->arch.dec = vcpu->arch.decar; in kvmppc_decrementer_func()
2140 vcpu->arch.shared->pir = vcpu->vcpu_id; in kvmppc_core_vcpu_create()
2160 vcpu->kvm->arch.kvm_ops->vcpu_free(vcpu); in kvmppc_core_vcpu_create()
2167 vcpu->kvm->arch.kvm_ops->vcpu_free(vcpu); in kvmppc_core_vcpu_free()
[all …]
H A Dbooke_emulate.c26 vcpu->arch.regs.nip = vcpu->arch.shared->srr0; in kvmppc_emul_rfi()
27 kvmppc_set_msr(vcpu, vcpu->arch.shared->srr1); in kvmppc_emul_rfi()
32 vcpu->arch.regs.nip = vcpu->arch.dsrr0; in kvmppc_emul_rfdi()
33 kvmppc_set_msr(vcpu, vcpu->arch.dsrr1); in kvmppc_emul_rfdi()
38 vcpu->arch.regs.nip = vcpu->arch.csrr0; in kvmppc_emul_rfci()
39 kvmppc_set_msr(vcpu, vcpu->arch.csrr1); in kvmppc_emul_rfci()
53 kvmppc_emul_rfi(vcpu); in kvmppc_booke_emulate_op()
80 kvmppc_set_gpr(vcpu, rt, vcpu->arch.shared->msr); in kvmppc_booke_emulate_op()
86 kvmppc_set_msr(vcpu, kvmppc_get_gpr(vcpu, rs)); in kvmppc_booke_emulate_op()
90 vcpu->arch.shared->msr = (vcpu->arch.shared->msr & ~MSR_EE) in kvmppc_booke_emulate_op()
[all …]
H A Dbook3s_pr.c53 static int kvmppc_handle_ext(struct kvm_vcpu *vcpu, unsigned int exit_nr,
56 static int kvmppc_handle_fac(struct kvm_vcpu *vcpu, ulong fac);
67 static bool kvmppc_is_split_real(struct kvm_vcpu *vcpu) in kvmppc_is_split_real() argument
69 ulong msr = kvmppc_get_msr(vcpu); in kvmppc_is_split_real()
73 static void kvmppc_fixup_split_real(struct kvm_vcpu *vcpu) in kvmppc_fixup_split_real() argument
75 ulong msr = kvmppc_get_msr(vcpu); in kvmppc_fixup_split_real()
76 ulong pc = kvmppc_get_pc(vcpu); in kvmppc_fixup_split_real()
83 if (vcpu->arch.hflags & BOOK3S_HFLAG_SPLIT_HACK) in kvmppc_fixup_split_real()
90 vcpu->arch.hflags |= BOOK3S_HFLAG_SPLIT_HACK; in kvmppc_fixup_split_real()
91 kvmppc_set_pc(vcpu, p in kvmppc_fixup_split_real()
94 kvmppc_unfixup_split_real(struct kvm_vcpu * vcpu) kvmppc_unfixup_split_real() argument
107 kvmppc_inject_interrupt_pr(struct kvm_vcpu * vcpu,int vec,u64 srr1_flags) kvmppc_inject_interrupt_pr() argument
132 kvmppc_core_vcpu_load_pr(struct kvm_vcpu * vcpu,int cpu) kvmppc_core_vcpu_load_pr() argument
161 kvmppc_core_vcpu_put_pr(struct kvm_vcpu * vcpu) kvmppc_core_vcpu_put_pr() argument
192 kvmppc_copy_to_svcpu(struct kvm_vcpu * vcpu) kvmppc_copy_to_svcpu() argument
231 kvmppc_recalc_shadow_msr(struct kvm_vcpu * vcpu) kvmppc_recalc_shadow_msr() argument
264 kvmppc_copy_from_svcpu(struct kvm_vcpu * vcpu) kvmppc_copy_from_svcpu() argument
343 kvmppc_save_tm_sprs(struct kvm_vcpu * vcpu) kvmppc_save_tm_sprs() argument
352 kvmppc_restore_tm_sprs(struct kvm_vcpu * vcpu) kvmppc_restore_tm_sprs() argument
364 kvmppc_handle_lost_math_exts(struct kvm_vcpu * vcpu) kvmppc_handle_lost_math_exts() argument
383 kvmppc_save_tm_pr(struct kvm_vcpu * vcpu) kvmppc_save_tm_pr() argument
398 kvmppc_restore_tm_pr(struct kvm_vcpu * vcpu) kvmppc_restore_tm_pr() argument
422 kvmppc_core_check_requests_pr(struct kvm_vcpu * vcpu) kvmppc_core_check_requests_pr() argument
438 struct kvm_vcpu *vcpu; do_kvm_unmap_gfn() local
472 kvmppc_set_msr_pr(struct kvm_vcpu * vcpu,u64 msr) kvmppc_set_msr_pr() argument
556 kvmppc_set_pvr_pr(struct kvm_vcpu * vcpu,u32 pvr) kvmppc_set_pvr_pr() argument
646 kvmppc_patch_dcbz(struct kvm_vcpu * vcpu,struct kvmppc_pte * pte) kvmppc_patch_dcbz() argument
673 kvmppc_visible_gpa(struct kvm_vcpu * vcpu,gpa_t gpa) kvmppc_visible_gpa() argument
688 kvmppc_handle_pagefault(struct kvm_vcpu * vcpu,ulong eaddr,int vec) kvmppc_handle_pagefault() argument
805 kvmppc_giveup_ext(struct kvm_vcpu * vcpu,ulong msr) kvmppc_giveup_ext() argument
848 kvmppc_giveup_fac(struct kvm_vcpu * vcpu,ulong fac) kvmppc_giveup_fac() argument
867 kvmppc_handle_ext(struct kvm_vcpu * vcpu,unsigned int exit_nr,ulong msr) kvmppc_handle_ext() argument
938 kvmppc_handle_lost_ext(struct kvm_vcpu * vcpu) kvmppc_handle_lost_ext() argument
967 kvmppc_trigger_fac_interrupt(struct kvm_vcpu * vcpu,ulong fac) kvmppc_trigger_fac_interrupt() argument
975 kvmppc_emulate_fac(struct kvm_vcpu * vcpu,ulong fac) kvmppc_emulate_fac() argument
989 kvmppc_handle_fac(struct kvm_vcpu * vcpu,ulong fac) kvmppc_handle_fac() argument
1044 kvmppc_set_fscr(struct kvm_vcpu * vcpu,u64 fscr) kvmppc_set_fscr() argument
1063 kvmppc_setup_debug(struct kvm_vcpu * vcpu) kvmppc_setup_debug() argument
1072 kvmppc_clear_debug(struct kvm_vcpu * vcpu) kvmppc_clear_debug() argument
1081 kvmppc_exit_pr_progint(struct kvm_vcpu * vcpu,unsigned int exit_nr) kvmppc_exit_pr_progint() argument
1143 kvmppc_handle_exit_pr(struct kvm_vcpu * vcpu,unsigned int exit_nr) kvmppc_handle_exit_pr() argument
1463 kvm_arch_vcpu_ioctl_get_sregs_pr(struct kvm_vcpu * vcpu,struct kvm_sregs * sregs) kvm_arch_vcpu_ioctl_get_sregs_pr() argument
1490 kvm_arch_vcpu_ioctl_set_sregs_pr(struct kvm_vcpu * vcpu,struct kvm_sregs * sregs) kvm_arch_vcpu_ioctl_set_sregs_pr() argument
1536 kvmppc_get_one_reg_pr(struct kvm_vcpu * vcpu,u64 id,union kvmppc_one_reg * val) kvmppc_get_one_reg_pr() argument
1636 kvmppc_set_lpcr_pr(struct kvm_vcpu * vcpu,u64 new_lpcr) kvmppc_set_lpcr_pr() argument
1644 kvmppc_set_one_reg_pr(struct kvm_vcpu * vcpu,u64 id,union kvmppc_one_reg * val) kvmppc_set_one_reg_pr() argument
1735 kvmppc_core_vcpu_create_pr(struct kvm_vcpu * vcpu) kvmppc_core_vcpu_create_pr() argument
1804 kvmppc_core_vcpu_free_pr(struct kvm_vcpu * vcpu) kvmppc_core_vcpu_free_pr() argument
1816 kvmppc_vcpu_run_pr(struct kvm_vcpu * vcpu) kvmppc_vcpu_run_pr() argument
1875 struct kvm_vcpu *vcpu; kvm_vm_ioctl_get_dirty_log_pr() local
1937 struct kvm_vcpu *vcpu; kvm_vm_ioctl_get_smmu_info_pr() local
[all...]
H A Demulate_loadstore.c31 kvmppc_core_queue_fpunavail(vcpu, kvmppc_get_msr(vcpu) & SRR1_PREFIXED); in kvmppc_check_fp_disabled()
43 kvmppc_core_queue_vsx_unavail(vcpu, kvmppc_get_msr(vcpu) & SRR1_PREFIXED); in kvmppc_check_vsx_disabled()
95 vcpu->arch.regs.msr = kvmppc_get_msr(vcpu); in kvmppc_emulate_loadstore()
114 kvmppc_set_gpr(vcpu, op.update_reg, vcpu->arch.vaddr_accessed); in kvmppc_emulate_loadstore()
134 kvmppc_set_gpr(vcpu, op.update_reg, vcpu->arch.vaddr_accessed); in kvmppc_emulate_loadstore()
233 kvmppc_set_gpr(vcpu, op.update_reg, vcpu->arch.vaddr_accessed); in kvmppc_emulate_loadstore()
247 vcpu->kvm->arch.kvm_ops->giveup_ext(vcpu, in kvmppc_emulate_loadstore()
257 kvmppc_set_gpr(vcpu, op.update_reg, vcpu->arch.vaddr_accessed); in kvmppc_emulate_loadstore()
271 vcpu->kvm->arch.kvm_ops->giveup_ext(vcpu, in kvmppc_emulate_loadstore()
316 vcpu->kvm->arch.kvm_ops->giveup_ext(vcpu, in kvmppc_emulate_loadstore()
[all …]
H A Dbook3s_hv_tm.c29 vcpu->arch.tfiar = tfiar; in emulate_tx_failure()
31 vcpu->arch.texasr = (vcpu->arch.texasr & 0x3ffffff) | texasr; in emulate_tx_failure()
56 vcpu->arch.regs.nip -= 4; in kvmhv_p9_tm_emulation()
79 vcpu->arch.cfar = vcpu->arch.regs.nip; in kvmhv_p9_tm_emulation()
80 vcpu->arch.regs.nip = vcpu->arch.shregs.srr0; in kvmhv_p9_tm_emulation()
103 bescr = vcpu->arch.bescr; in kvmhv_p9_tm_emulation()
113 vcpu->arch.cfar = vcpu->arch.regs.nip; in kvmhv_p9_tm_emulation()
114 vcpu->arch.regs.nip = vcpu->arch.ebbrr; in kvmhv_p9_tm_emulation()
156 vcpu->arch.regs.ccr = (vcpu->arch.regs.ccr & 0x0fffffff) | in kvmhv_p9_tm_emulation()
203 vcpu->arch.regs.ccr = (vcpu->arch.regs.ccr & 0x0fffffff) | in kvmhv_p9_tm_emulation()
[all …]
H A Dbook3s.c98 static inline void kvmppc_update_int_pending(struct kvm_vcpu *vcpu, in kvmppc_update_int_pending() argument
101 if (is_kvmppc_hv_enabled(vcpu->kvm)) in kvmppc_update_int_pending()
104 kvmppc_set_int_pending(vcpu, 1); in kvmppc_update_int_pending()
106 kvmppc_set_int_pending(vcpu, 0); in kvmppc_update_int_pending()
109 static inline bool kvmppc_critical_section(struct kvm_vcpu *vcpu) in kvmppc_critical_section() argument
115 if (is_kvmppc_hv_enabled(vcpu->kvm)) in kvmppc_critical_section()
118 crit_raw = kvmppc_get_critical(vcpu); in kvmppc_critical_section()
119 crit_r1 = kvmppc_get_gpr(vcpu, 1); in kvmppc_critical_section()
122 if (!(kvmppc_get_msr(vcpu) & MSR_SF)) { in kvmppc_critical_section()
130 crit = crit && !(kvmppc_get_msr(vcpu) in kvmppc_critical_section()
135 kvmppc_inject_interrupt(struct kvm_vcpu * vcpu,int vec,u64 flags) kvmppc_inject_interrupt() argument
167 kvmppc_book3s_dequeue_irqprio(struct kvm_vcpu * vcpu,unsigned int vec) kvmppc_book3s_dequeue_irqprio() argument
179 kvmppc_book3s_queue_irqprio(struct kvm_vcpu * vcpu,unsigned int vec) kvmppc_book3s_queue_irqprio() argument
191 kvmppc_core_queue_machine_check(struct kvm_vcpu * vcpu,ulong srr1_flags) kvmppc_core_queue_machine_check() argument
198 kvmppc_core_queue_syscall(struct kvm_vcpu * vcpu) kvmppc_core_queue_syscall() argument
204 kvmppc_core_queue_program(struct kvm_vcpu * vcpu,ulong srr1_flags) kvmppc_core_queue_program() argument
211 kvmppc_core_queue_fpunavail(struct kvm_vcpu * vcpu,ulong srr1_flags) kvmppc_core_queue_fpunavail() argument
217 kvmppc_core_queue_vec_unavail(struct kvm_vcpu * vcpu,ulong srr1_flags) kvmppc_core_queue_vec_unavail() argument
223 kvmppc_core_queue_vsx_unavail(struct kvm_vcpu * vcpu,ulong srr1_flags) kvmppc_core_queue_vsx_unavail() argument
229 kvmppc_core_queue_dec(struct kvm_vcpu * vcpu) kvmppc_core_queue_dec() argument
235 kvmppc_core_pending_dec(struct kvm_vcpu * vcpu) kvmppc_core_pending_dec() argument
241 kvmppc_core_dequeue_dec(struct kvm_vcpu * vcpu) kvmppc_core_dequeue_dec() argument
247 kvmppc_core_queue_external(struct kvm_vcpu * vcpu,struct kvm_interrupt * irq) kvmppc_core_queue_external() argument
276 kvmppc_core_dequeue_external(struct kvm_vcpu * vcpu) kvmppc_core_dequeue_external() argument
281 kvmppc_core_queue_data_storage(struct kvm_vcpu * vcpu,ulong srr1_flags,ulong dar,ulong dsisr) kvmppc_core_queue_data_storage() argument
290 kvmppc_core_queue_inst_storage(struct kvm_vcpu * vcpu,ulong srr1_flags) kvmppc_core_queue_inst_storage() argument
296 kvmppc_book3s_irqprio_deliver(struct kvm_vcpu * vcpu,unsigned int priority) kvmppc_book3s_irqprio_deliver() argument
376 clear_irqprio(struct kvm_vcpu * vcpu,unsigned int priority) clear_irqprio() argument
398 kvmppc_core_prepare_to_enter(struct kvm_vcpu * vcpu) kvmppc_core_prepare_to_enter() argument
428 kvmppc_gpa_to_pfn(struct kvm_vcpu * vcpu,gpa_t gpa,bool writing,bool * writable) kvmppc_gpa_to_pfn() argument
454 kvmppc_xlate(struct kvm_vcpu * vcpu,ulong eaddr,enum xlate_instdata xlid,enum xlate_readwrite xlrw,struct kvmppc_pte * pte) kvmppc_xlate() argument
492 kvmppc_load_last_inst(struct kvm_vcpu * vcpu,enum instruction_fetch_type type,unsigned long * inst) kvmppc_load_last_inst() argument
523 kvmppc_subarch_vcpu_init(struct kvm_vcpu * vcpu) kvmppc_subarch_vcpu_init() argument
528 kvmppc_subarch_vcpu_uninit(struct kvm_vcpu * vcpu) kvmppc_subarch_vcpu_uninit() argument
532 kvm_arch_vcpu_ioctl_get_sregs(struct kvm_vcpu * vcpu,struct kvm_sregs * sregs) kvm_arch_vcpu_ioctl_get_sregs() argument
544 kvm_arch_vcpu_ioctl_set_sregs(struct kvm_vcpu * vcpu,struct kvm_sregs * sregs) kvm_arch_vcpu_ioctl_set_sregs() argument
556 kvm_arch_vcpu_ioctl_get_regs(struct kvm_vcpu * vcpu,struct kvm_regs * regs) kvm_arch_vcpu_ioctl_get_regs() argument
584 kvm_arch_vcpu_ioctl_set_regs(struct kvm_vcpu * vcpu,struct kvm_regs * regs) kvm_arch_vcpu_ioctl_set_regs() argument
611 kvm_arch_vcpu_ioctl_get_fpu(struct kvm_vcpu * vcpu,struct kvm_fpu * fpu) kvm_arch_vcpu_ioctl_get_fpu() argument
616 kvm_arch_vcpu_ioctl_set_fpu(struct kvm_vcpu * vcpu,struct kvm_fpu * fpu) kvm_arch_vcpu_ioctl_set_fpu() argument
621 kvmppc_get_one_reg(struct kvm_vcpu * vcpu,u64 id,union kvmppc_one_reg * val) kvmppc_get_one_reg() argument
709 kvmppc_set_one_reg(struct kvm_vcpu * vcpu,u64 id,union kvmppc_one_reg * val) kvmppc_set_one_reg() argument
794 kvmppc_core_vcpu_load(struct kvm_vcpu * vcpu,int cpu) kvmppc_core_vcpu_load() argument
799 kvmppc_core_vcpu_put(struct kvm_vcpu * vcpu) kvmppc_core_vcpu_put() argument
804 kvmppc_set_msr(struct kvm_vcpu * vcpu,u64 msr) kvmppc_set_msr() argument
810 kvmppc_vcpu_run(struct kvm_vcpu * vcpu) kvmppc_vcpu_run() argument
815 kvm_arch_vcpu_ioctl_translate(struct kvm_vcpu * vcpu,struct kvm_translation * tr) kvm_arch_vcpu_ioctl_translate() argument
821 kvm_arch_vcpu_ioctl_set_guest_debug(struct kvm_vcpu * vcpu,struct kvm_guest_debug * dbg) kvm_arch_vcpu_ioctl_set_guest_debug() argument
830 kvmppc_decrementer_func(struct kvm_vcpu * vcpu) kvmppc_decrementer_func() argument
836 kvmppc_core_vcpu_create(struct kvm_vcpu * vcpu) kvmppc_core_vcpu_create() argument
841 kvmppc_core_vcpu_free(struct kvm_vcpu * vcpu) kvmppc_core_vcpu_free() argument
846 kvmppc_core_check_requests(struct kvm_vcpu * vcpu) kvmppc_core_check_requests() argument
942 kvmppc_h_logical_ci_load(struct kvm_vcpu * vcpu) kvmppc_h_logical_ci_load() argument
984 kvmppc_h_logical_ci_store(struct kvm_vcpu * vcpu) kvmppc_h_logical_ci_store() argument
[all...]
H A Dbook3s_paired_singles.c152 kvm_cvt_df(&VCPU_FPR(vcpu, rt), &vcpu->arch.qpr[rt]); in kvmppc_sync_qpr()
759 VCPU_FPR(vcpu, ax_rd) = VCPU_FPR(vcpu, ax_rb); in kvmppc_emulate_paired_single()
761 vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rb]; in kvmppc_emulate_paired_single()
770 VCPU_FPR(vcpu, ax_rd) = VCPU_FPR(vcpu, ax_rb); in kvmppc_emulate_paired_single()
771 vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rb]; in kvmppc_emulate_paired_single()
779 VCPU_FPR(vcpu, ax_rd) = VCPU_FPR(vcpu, ax_rb); in kvmppc_emulate_paired_single()
781 vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rb]; in kvmppc_emulate_paired_single()
786 VCPU_FPR(vcpu, ax_rd) = VCPU_FPR(vcpu, ax_rb); in kvmppc_emulate_paired_single()
788 vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rb]; in kvmppc_emulate_paired_single()
793 VCPU_FPR(vcpu, ax_rd) = VCPU_FPR(vcpu, ax_ra); in kvmppc_emulate_paired_single()
[all …]
/linux/arch/mips/kvm/
H A Demulate.c248 err = kvm_compute_return_epc(vcpu, vcpu->arch.pc, in update_pc()
1003 vcpu->arch.pc, vcpu->arch.host_cp0_badvaddr, in kvm_mips_emulate_store()
1013 vcpu->arch.pc, vcpu->arch.host_cp0_badvaddr, in kvm_mips_emulate_store()
1022 vcpu->arch.pc, vcpu->arch.host_cp0_badvaddr, in kvm_mips_emulate_store()
1031 vcpu->arch.pc, vcpu->arch.host_cp0_badvaddr, in kvm_mips_emulate_store()
1061 vcpu->arch.pc, vcpu->arch.host_cp0_badvaddr, in kvm_mips_emulate_store()
1091 vcpu->arch.pc, vcpu->arch.host_cp0_badvaddr, in kvm_mips_emulate_store()
1139 vcpu->arch.pc, vcpu->arch.host_cp0_badvaddr, in kvm_mips_emulate_store()
1186 vcpu->arch.pc, vcpu->arch.host_cp0_badvaddr, in kvm_mips_emulate_store()
1290 vcpu->arch.io_pc = vcpu->arch.pc; in kvm_mips_emulate_load()
[all …]
/linux/arch/x86/kvm/
H A Dhyperv.h66 return vcpu->arch.hyperv; in to_hv_vcpu()
80 return hv_vcpu->vcpu; in hv_synic_to_vcpu()
100 return vcpu->arch.hyperv_enabled && to_kvm_hv(vcpu->kvm)->hv_guest_os_id; in kvm_hv_hypercall_enabled()
112 return to_hv_vcpu(vcpu) && test_bit(vector, to_hv_synic(vcpu)->vec_bitmap); in kvm_hv_synic_has_vector()
117 return to_hv_vcpu(vcpu) && in kvm_hv_synic_auto_eoi_set()
138 return hv_vcpu->vcpu; in hv_stimer_to_vcpu()
210 if (!to_hv_vcpu(vcpu) || !kvm_check_request(KVM_REQ_HV_TLB_FLUSH, vcpu)) in kvm_hv_vcpu_purge_flush_tlb()
213 tlb_flush_fifo = kvm_hv_get_tlb_flush_fifo(vcpu, is_guest_mode(vcpu)); in kvm_hv_vcpu_purge_flush_tlb()
234 code = is_64_bit_hypercall(vcpu) ? kvm_rcx_read(vcpu) : in kvm_hv_is_tlb_flush_hcall()
245 if (!to_hv_vcpu(vcpu)) in kvm_hv_verify_vp_assist()
[all …]
H A Dx86.c106 ((struct kvm_vcpu *)(ctxt)->vcpu)
128 static void update_cr8_intercept(struct kvm_vcpu *vcpu);
129 static void process_nmi(struct kvm_vcpu *vcpu);
130 static void __kvm_set_rflags(struct kvm_vcpu *vcpu, unsigned long rflags);
131 static void store_regs(struct kvm_vcpu *vcpu);
132 static int sync_regs(struct kvm_vcpu *vcpu);
133 static int kvm_vcpu_do_singlestep(struct kvm_vcpu *vcpu);
135 static int __set_sregs2(struct kvm_vcpu *vcpu, struct kvm_sregs2 *sregs2);
136 static void __get_sregs2(struct kvm_vcpu *vcpu, struct kvm_sregs2 *sregs2);
358 static inline void kvm_async_pf_hash_reset(struct kvm_vcpu *vcpu) in kvm_async_pf_hash_reset()
355 kvm_async_pf_hash_reset(struct kvm_vcpu * vcpu) kvm_async_pf_hash_reset() argument
474 kvm_get_apic_base(struct kvm_vcpu * vcpu) kvm_get_apic_base() argument
479 kvm_get_apic_mode(struct kvm_vcpu * vcpu) kvm_get_apic_mode() argument
485 kvm_set_apic_base(struct kvm_vcpu * vcpu,struct msr_data * msr_info) kvm_set_apic_base() argument
573 kvm_deliver_exception_payload(struct kvm_vcpu * vcpu,struct kvm_queued_exception * ex) kvm_deliver_exception_payload() argument
625 kvm_queue_exception_vmexit(struct kvm_vcpu * vcpu,unsigned int vector,bool has_error_code,u32 error_code,bool has_payload,unsigned long payload) kvm_queue_exception_vmexit() argument
641 kvm_leave_nested(struct kvm_vcpu * vcpu) kvm_leave_nested() argument
646 kvm_multiple_exception(struct kvm_vcpu * vcpu,unsigned nr,bool has_error,u32 error_code,bool has_payload,unsigned long payload,bool reinject) kvm_multiple_exception() argument
731 kvm_queue_exception(struct kvm_vcpu * vcpu,unsigned nr) kvm_queue_exception() argument
737 kvm_requeue_exception(struct kvm_vcpu * vcpu,unsigned nr) kvm_requeue_exception() argument
743 kvm_queue_exception_p(struct kvm_vcpu * vcpu,unsigned nr,unsigned long payload) kvm_queue_exception_p() argument
750 kvm_queue_exception_e_p(struct kvm_vcpu * vcpu,unsigned nr,u32 error_code,unsigned long payload) kvm_queue_exception_e_p() argument
757 kvm_complete_insn_gp(struct kvm_vcpu * vcpu,int err) kvm_complete_insn_gp() argument
768 complete_emulated_insn_gp(struct kvm_vcpu * vcpu,int err) complete_emulated_insn_gp() argument
779 kvm_inject_page_fault(struct kvm_vcpu * vcpu,struct x86_exception * fault) kvm_inject_page_fault() argument
796 kvm_inject_emulated_page_fault(struct kvm_vcpu * vcpu,struct x86_exception * fault) kvm_inject_emulated_page_fault() argument
818 kvm_inject_nmi(struct kvm_vcpu * vcpu) kvm_inject_nmi() argument
824 kvm_queue_exception_e(struct kvm_vcpu * vcpu,unsigned nr,u32 error_code) kvm_queue_exception_e() argument
830 kvm_requeue_exception_e(struct kvm_vcpu * vcpu,unsigned nr,u32 error_code) kvm_requeue_exception_e() argument
840 kvm_require_cpl(struct kvm_vcpu * vcpu,int required_cpl) kvm_require_cpl() argument
848 kvm_require_dr(struct kvm_vcpu * vcpu,int dr) kvm_require_dr() argument
858 pdptr_rsvd_bits(struct kvm_vcpu * vcpu) pdptr_rsvd_bits() argument
866 load_pdptrs(struct kvm_vcpu * vcpu,unsigned long cr3) load_pdptrs() argument
913 kvm_is_valid_cr0(struct kvm_vcpu * vcpu,unsigned long cr0) kvm_is_valid_cr0() argument
929 kvm_post_set_cr0(struct kvm_vcpu * vcpu,unsigned long old_cr0,unsigned long cr0) kvm_post_set_cr0() argument
971 kvm_set_cr0(struct kvm_vcpu * vcpu,unsigned long cr0) kvm_set_cr0() argument
1012 kvm_lmsw(struct kvm_vcpu * vcpu,unsigned long msw) kvm_lmsw() argument
1018 kvm_load_guest_xsave_state(struct kvm_vcpu * vcpu) kvm_load_guest_xsave_state() argument
1041 kvm_load_host_xsave_state(struct kvm_vcpu * vcpu) kvm_load_host_xsave_state() argument
1068 kvm_guest_supported_xfd(struct kvm_vcpu * vcpu) kvm_guest_supported_xfd() argument
1074 __kvm_set_xcr(struct kvm_vcpu * vcpu,u32 index,u64 xcr) __kvm_set_xcr() argument
1119 kvm_emulate_xsetbv(struct kvm_vcpu * vcpu) kvm_emulate_xsetbv() argument
1132 __kvm_is_valid_cr4(struct kvm_vcpu * vcpu,unsigned long cr4) __kvm_is_valid_cr4() argument
1144 kvm_is_valid_cr4(struct kvm_vcpu * vcpu,unsigned long cr4) kvm_is_valid_cr4() argument
1150 kvm_post_set_cr4(struct kvm_vcpu * vcpu,unsigned long old_cr4,unsigned long cr4) kvm_post_set_cr4() argument
1192 kvm_set_cr4(struct kvm_vcpu * vcpu,unsigned long cr4) kvm_set_cr4() argument
1223 kvm_invalidate_pcid(struct kvm_vcpu * vcpu,unsigned long pcid) kvm_invalidate_pcid() argument
1266 kvm_set_cr3(struct kvm_vcpu * vcpu,unsigned long cr3) kvm_set_cr3() argument
1315 kvm_set_cr8(struct kvm_vcpu * vcpu,unsigned long cr8) kvm_set_cr8() argument
1327 kvm_get_cr8(struct kvm_vcpu * vcpu) kvm_get_cr8() argument
1336 kvm_update_dr0123(struct kvm_vcpu * vcpu) kvm_update_dr0123() argument
1346 kvm_update_dr7(struct kvm_vcpu * vcpu) kvm_update_dr7() argument
1361 kvm_dr6_fixed(struct kvm_vcpu * vcpu) kvm_dr6_fixed() argument
1373 kvm_set_dr(struct kvm_vcpu * vcpu,int dr,unsigned long val) kvm_set_dr() argument
1402 kvm_get_dr(struct kvm_vcpu * vcpu,int dr) kvm_get_dr() argument
1419 kvm_emulate_rdpmc(struct kvm_vcpu * vcpu) kvm_emulate_rdpmc() argument
1702 do_get_msr_feature(struct kvm_vcpu * vcpu,unsigned index,u64 * data) do_get_msr_feature() argument
1720 __kvm_valid_efer(struct kvm_vcpu * vcpu,u64 efer) __kvm_valid_efer() argument
1741 kvm_valid_efer(struct kvm_vcpu * vcpu,u64 efer) kvm_valid_efer() argument
1750 set_efer(struct kvm_vcpu * vcpu,struct msr_data * msr_info) set_efer() argument
1793 kvm_msr_allowed(struct kvm_vcpu * vcpu,u32 index,u32 type) kvm_msr_allowed() argument
1842 __kvm_set_msr(struct kvm_vcpu * vcpu,u32 index,u64 data,bool host_initiated) __kvm_set_msr() argument
1904 kvm_set_msr_ignored_check(struct kvm_vcpu * vcpu,u32 index,u64 data,bool host_initiated) kvm_set_msr_ignored_check() argument
1922 __kvm_get_msr(struct kvm_vcpu * vcpu,u32 index,u64 * data,bool host_initiated) __kvm_get_msr() argument
1949 kvm_get_msr_ignored_check(struct kvm_vcpu * vcpu,u32 index,u64 * data,bool host_initiated) kvm_get_msr_ignored_check() argument
1964 kvm_get_msr_with_filter(struct kvm_vcpu * vcpu,u32 index,u64 * data) kvm_get_msr_with_filter() argument
1971 kvm_set_msr_with_filter(struct kvm_vcpu * vcpu,u32 index,u64 data) kvm_set_msr_with_filter() argument
1978 kvm_get_msr(struct kvm_vcpu * vcpu,u32 index,u64 * data) kvm_get_msr() argument
1984 kvm_set_msr(struct kvm_vcpu * vcpu,u32 index,u64 data) kvm_set_msr() argument
1990 complete_userspace_rdmsr(struct kvm_vcpu * vcpu) complete_userspace_rdmsr() argument
1998 complete_emulated_msr_access(struct kvm_vcpu * vcpu) complete_emulated_msr_access() argument
2003 complete_emulated_rdmsr(struct kvm_vcpu * vcpu) complete_emulated_rdmsr() argument
2009 complete_fast_msr_access(struct kvm_vcpu * vcpu) complete_fast_msr_access() argument
2014 complete_fast_rdmsr(struct kvm_vcpu * vcpu) complete_fast_rdmsr() argument
2032 kvm_msr_user_space(struct kvm_vcpu * vcpu,u32 index,u32 exit_reason,u64 data,int (* completion)(struct kvm_vcpu * vcpu),int r) kvm_msr_user_space() argument
2034 kvm_msr_user_space(struct kvm_vcpu * vcpu,u32 index,u32 exit_reason,u64 data,int (* completion)(struct kvm_vcpu * vcpu),int r) kvm_msr_user_space() argument
2054 kvm_emulate_rdmsr(struct kvm_vcpu * vcpu) kvm_emulate_rdmsr() argument
2079 kvm_emulate_wrmsr(struct kvm_vcpu * vcpu) kvm_emulate_wrmsr() argument
2104 kvm_emulate_as_nop(struct kvm_vcpu * vcpu) kvm_emulate_as_nop() argument
2109 kvm_emulate_invd(struct kvm_vcpu * vcpu) kvm_emulate_invd() argument
2116 kvm_handle_invalid_op(struct kvm_vcpu * vcpu) kvm_handle_invalid_op() argument
2124 kvm_emulate_monitor_mwait(struct kvm_vcpu * vcpu,const char * insn) kvm_emulate_monitor_mwait() argument
2133 kvm_emulate_mwait(struct kvm_vcpu * vcpu) kvm_emulate_mwait() argument
2139 kvm_emulate_monitor(struct kvm_vcpu * vcpu) kvm_emulate_monitor() argument
2145 kvm_vcpu_exit_request(struct kvm_vcpu * vcpu) kvm_vcpu_exit_request() argument
2159 handle_fastpath_set_x2apic_icr_irqoff(struct kvm_vcpu * vcpu,u64 data) handle_fastpath_set_x2apic_icr_irqoff() argument
2173 handle_fastpath_set_tscdeadline(struct kvm_vcpu * vcpu,u64 data) handle_fastpath_set_tscdeadline() argument
2182 handle_fastpath_set_msr_irqoff(struct kvm_vcpu * vcpu) handle_fastpath_set_msr_irqoff() argument
2221 do_get_msr(struct kvm_vcpu * vcpu,unsigned index,u64 * data) do_get_msr() argument
2226 do_set_msr(struct kvm_vcpu * vcpu,unsigned index,u64 * data) do_set_msr() argument
2354 kvm_write_system_time(struct kvm_vcpu * vcpu,gpa_t system_time,bool old_msr,bool host_initiated) kvm_write_system_time() argument
2429 set_tsc_khz(struct kvm_vcpu * vcpu,u32 user_tsc_khz,bool scale) set_tsc_khz() argument
2465 kvm_set_tsc_khz(struct kvm_vcpu * vcpu,u32 user_tsc_khz) kvm_set_tsc_khz() argument
2499 compute_guest_tsc(struct kvm_vcpu * vcpu,s64 kernel_ns) compute_guest_tsc() argument
2515 kvm_track_tsc_matching(struct kvm_vcpu * vcpu,bool new_generation) kvm_track_tsc_matching() argument
2571 kvm_compute_l1_tsc_offset(struct kvm_vcpu * vcpu,u64 target_tsc) kvm_compute_l1_tsc_offset() argument
2580 kvm_read_l1_tsc(struct kvm_vcpu * vcpu,u64 host_tsc) kvm_read_l1_tsc() argument
2612 kvm_vcpu_write_tsc_offset(struct kvm_vcpu * vcpu,u64 l1_offset) kvm_vcpu_write_tsc_offset() argument
2636 kvm_vcpu_write_tsc_multiplier(struct kvm_vcpu * vcpu,u64 l1_multiplier) kvm_vcpu_write_tsc_multiplier() argument
2670 __kvm_synchronize_tsc(struct kvm_vcpu * vcpu,u64 offset,u64 tsc,u64 ns,bool matched) __kvm_synchronize_tsc() argument
2717 kvm_synchronize_tsc(struct kvm_vcpu * vcpu,u64 * user_value) kvm_synchronize_tsc() argument
2787 adjust_tsc_offset_guest(struct kvm_vcpu * vcpu,s64 adjustment) adjust_tsc_offset_guest() argument
2794 adjust_tsc_offset_host(struct kvm_vcpu * vcpu,s64 adjustment) adjust_tsc_offset_host() argument
3069 struct kvm_vcpu *vcpu; kvm_end_pvclock_update() local
3166 struct kvm_vcpu_arch *vcpu = &v->arch; kvm_setup_guest_pvclock() local
3219 struct kvm_vcpu_arch *vcpu = &v->arch; kvm_guest_time_update() local
3426 struct kvm_vcpu *vcpu; kvmclock_update_fn() local
3470 can_set_mci_status(struct kvm_vcpu * vcpu) can_set_mci_status() argument
3479 set_msr_mce(struct kvm_vcpu * vcpu,struct msr_data * msr_info) set_msr_mce() argument
3551 kvm_pv_async_pf_enabled(struct kvm_vcpu * vcpu) kvm_pv_async_pf_enabled() argument
3558 kvm_pv_enable_async_pf(struct kvm_vcpu * vcpu,u64 data) kvm_pv_enable_async_pf() argument
3597 kvm_pv_enable_async_pf_int(struct kvm_vcpu * vcpu,u64 data) kvm_pv_enable_async_pf_int() argument
3613 kvmclock_reset(struct kvm_vcpu * vcpu) kvmclock_reset() argument
3619 kvm_vcpu_flush_tlb_all(struct kvm_vcpu * vcpu) kvm_vcpu_flush_tlb_all() argument
3628 kvm_vcpu_flush_tlb_guest(struct kvm_vcpu * vcpu) kvm_vcpu_flush_tlb_guest() argument
3653 kvm_vcpu_flush_tlb_current(struct kvm_vcpu * vcpu) kvm_vcpu_flush_tlb_current() argument
3665 kvm_service_local_tlb_flush_requests(struct kvm_vcpu * vcpu) kvm_service_local_tlb_flush_requests() argument
3675 record_steal_time(struct kvm_vcpu * vcpu) record_steal_time() argument
3785 kvm_set_msr_common(struct kvm_vcpu * vcpu,struct msr_data * msr_info) kvm_set_msr_common() argument
4180 get_msr_mce(struct kvm_vcpu * vcpu,u32 msr,u64 * pdata,bool host) get_msr_mce() argument
4230 kvm_get_msr_common(struct kvm_vcpu * vcpu,struct msr_data * msr_info) kvm_get_msr_common() argument
4546 __msr_io(struct kvm_vcpu * vcpu,struct kvm_msrs * msrs,struct kvm_msr_entry * entries,int (* do_msr)(struct kvm_vcpu * vcpu,unsigned index,u64 * data)) __msr_io() argument
4548 __msr_io(struct kvm_vcpu * vcpu,struct kvm_msrs * msrs,struct kvm_msr_entry * entries,int (* do_msr)(struct kvm_vcpu * vcpu,unsigned index,u64 * data)) __msr_io() argument
4565 msr_io(struct kvm_vcpu * vcpu,struct kvm_msrs __user * user_msrs,int (* do_msr)(struct kvm_vcpu * vcpu,unsigned index,u64 * data),int writeback) msr_io() argument
4566 msr_io(struct kvm_vcpu * vcpu,struct kvm_msrs __user * user_msrs,int (* do_msr)(struct kvm_vcpu * vcpu,unsigned index,u64 * data),int writeback) msr_io() argument
4608 kvm_ioctl_get_supported_hv_cpuid(struct kvm_vcpu * vcpu,struct kvm_cpuid2 __user * cpuid_arg) kvm_ioctl_get_supported_hv_cpuid() argument
5006 need_emulate_wbinvd(struct kvm_vcpu * vcpu) need_emulate_wbinvd() argument
5011 kvm_arch_vcpu_load(struct kvm_vcpu * vcpu,int cpu) kvm_arch_vcpu_load() argument
5064 kvm_steal_time_set_preempted(struct kvm_vcpu * vcpu) kvm_steal_time_set_preempted() argument
5111 kvm_arch_vcpu_put(struct kvm_vcpu * vcpu) kvm_arch_vcpu_put() argument
5134 kvm_vcpu_ioctl_get_lapic(struct kvm_vcpu * vcpu,struct kvm_lapic_state * s) kvm_vcpu_ioctl_get_lapic() argument
5142 kvm_vcpu_ioctl_set_lapic(struct kvm_vcpu * vcpu,struct kvm_lapic_state * s) kvm_vcpu_ioctl_set_lapic() argument
5155 kvm_cpu_accept_dm_intr(struct kvm_vcpu * vcpu) kvm_cpu_accept_dm_intr() argument
5171 kvm_vcpu_ready_for_interrupt_injection(struct kvm_vcpu * vcpu) kvm_vcpu_ready_for_interrupt_injection() argument
5186 kvm_vcpu_ioctl_interrupt(struct kvm_vcpu * vcpu,struct kvm_interrupt * irq) kvm_vcpu_ioctl_interrupt() argument
5213 kvm_vcpu_ioctl_nmi(struct kvm_vcpu * vcpu) kvm_vcpu_ioctl_nmi() argument
5220 vcpu_ioctl_tpr_access_reporting(struct kvm_vcpu * vcpu,struct kvm_tpr_access_ctl * tac) vcpu_ioctl_tpr_access_reporting() argument
5229 kvm_vcpu_ioctl_x86_setup_mce(struct kvm_vcpu * vcpu,u64 mcg_cap) kvm_vcpu_ioctl_x86_setup_mce() argument
5276 kvm_vcpu_x86_set_ucna(struct kvm_vcpu * vcpu,struct kvm_x86_mce * mce,u64 * banks) kvm_vcpu_x86_set_ucna() argument
5295 kvm_vcpu_ioctl_x86_set_mce(struct kvm_vcpu * vcpu,struct kvm_x86_mce * mce) kvm_vcpu_ioctl_x86_set_mce() argument
5348 kvm_vcpu_ioctl_x86_get_vcpu_events(struct kvm_vcpu * vcpu,struct kvm_vcpu_events * events) kvm_vcpu_ioctl_x86_get_vcpu_events() argument
5441 kvm_vcpu_ioctl_x86_set_vcpu_events(struct kvm_vcpu * vcpu,struct kvm_vcpu_events * events) kvm_vcpu_ioctl_x86_set_vcpu_events() argument
5560 kvm_vcpu_ioctl_x86_get_debugregs(struct kvm_vcpu * vcpu,struct kvm_debugregs * dbgregs) kvm_vcpu_ioctl_x86_get_debugregs() argument
5575 kvm_vcpu_ioctl_x86_set_debugregs(struct kvm_vcpu * vcpu,struct kvm_debugregs * dbgregs) kvm_vcpu_ioctl_x86_set_debugregs() argument
5600 kvm_vcpu_ioctl_x86_get_xsave2(struct kvm_vcpu * vcpu,u8 * state,unsigned int size) kvm_vcpu_ioctl_x86_get_xsave2() argument
5625 kvm_vcpu_ioctl_x86_get_xsave(struct kvm_vcpu * vcpu,struct kvm_xsave * guest_xsave) kvm_vcpu_ioctl_x86_get_xsave() argument
5632 kvm_vcpu_ioctl_x86_set_xsave(struct kvm_vcpu * vcpu,struct kvm_xsave * guest_xsave) kvm_vcpu_ioctl_x86_set_xsave() argument
5644 kvm_vcpu_ioctl_x86_get_xcrs(struct kvm_vcpu * vcpu,struct kvm_xcrs * guest_xcrs) kvm_vcpu_ioctl_x86_get_xcrs() argument
5658 kvm_vcpu_ioctl_x86_set_xcrs(struct kvm_vcpu * vcpu,struct kvm_xcrs * guest_xcrs) kvm_vcpu_ioctl_x86_set_xcrs() argument
5687 kvm_set_guest_paused(struct kvm_vcpu * vcpu) kvm_set_guest_paused() argument
5696 kvm_arch_tsc_has_attr(struct kvm_vcpu * vcpu,struct kvm_device_attr * attr) kvm_arch_tsc_has_attr() argument
5712 kvm_arch_tsc_get_attr(struct kvm_vcpu * vcpu,struct kvm_device_attr * attr) kvm_arch_tsc_get_attr() argument
5735 kvm_arch_tsc_set_attr(struct kvm_vcpu * vcpu,struct kvm_device_attr * attr) kvm_arch_tsc_set_attr() argument
5778 kvm_vcpu_ioctl_device_attr(struct kvm_vcpu * vcpu,unsigned int ioctl,void __user * argp) kvm_vcpu_ioctl_device_attr() argument
5806 kvm_vcpu_ioctl_enable_cap(struct kvm_vcpu * vcpu,struct kvm_enable_cap * cap) kvm_vcpu_ioctl_enable_cap() argument
5865 struct kvm_vcpu *vcpu = filp->private_data; kvm_arch_vcpu_ioctl() local
6306 kvm_arch_vcpu_fault(struct kvm_vcpu * vcpu,struct vm_fault * vmf) kvm_arch_vcpu_fault() argument
6480 struct kvm_vcpu *vcpu; kvm_arch_sync_dirty_log() local
6901 struct kvm_vcpu *vcpu; kvm_arch_suspend_notifier() local
7468 vcpu_mmio_write(struct kvm_vcpu * vcpu,gpa_t addr,int len,const void * v) vcpu_mmio_write() argument
7489 vcpu_mmio_read(struct kvm_vcpu * vcpu,gpa_t addr,int len,void * v) vcpu_mmio_read() argument
7511 kvm_set_segment(struct kvm_vcpu * vcpu,struct kvm_segment * var,int seg) kvm_set_segment() argument
7517 kvm_get_segment(struct kvm_vcpu * vcpu,struct kvm_segment * var,int seg) kvm_get_segment() argument
7523 translate_nested_gpa(struct kvm_vcpu * vcpu,gpa_t gpa,u64 access,struct x86_exception * exception) translate_nested_gpa() argument
7538 kvm_mmu_gva_to_gpa_read(struct kvm_vcpu * vcpu,gva_t gva,struct x86_exception * exception) kvm_mmu_gva_to_gpa_read() argument
7548 kvm_mmu_gva_to_gpa_write(struct kvm_vcpu * vcpu,gva_t gva,struct x86_exception * exception) kvm_mmu_gva_to_gpa_write() argument
7560 kvm_mmu_gva_to_gpa_system(struct kvm_vcpu * vcpu,gva_t gva,struct x86_exception * exception) kvm_mmu_gva_to_gpa_system() argument
7569 kvm_read_guest_virt_helper(gva_t addr,void * val,unsigned int bytes,struct kvm_vcpu * vcpu,u64 access,struct x86_exception * exception) kvm_read_guest_virt_helper() argument
7604 struct kvm_vcpu *vcpu = emul_to_vcpu(ctxt); kvm_fetch_guest_virt() local
7627 kvm_read_guest_virt(struct kvm_vcpu * vcpu,gva_t addr,void * val,unsigned int bytes,struct x86_exception * exception) kvm_read_guest_virt() argument
7649 struct kvm_vcpu *vcpu = emul_to_vcpu(ctxt); emulator_read_std() local
7661 kvm_write_guest_virt_helper(gva_t addr,void * val,unsigned int bytes,struct kvm_vcpu * vcpu,u64 access,struct x86_exception * exception) kvm_write_guest_virt_helper() argument
7694 struct kvm_vcpu *vcpu = emul_to_vcpu(ctxt); emulator_write_std() local
7706 kvm_write_guest_virt_system(struct kvm_vcpu * vcpu,gva_t addr,void * val,unsigned int bytes,struct x86_exception * exception) kvm_write_guest_virt_system() argument
7717 kvm_check_emulate_insn(struct kvm_vcpu * vcpu,int emul_type,void * insn,int insn_len) kvm_check_emulate_insn() argument
7724 handle_ud(struct kvm_vcpu * vcpu) handle_ud() argument
7751 vcpu_is_mmio_gpa(struct kvm_vcpu * vcpu,unsigned long gva,gpa_t gpa,bool write) vcpu_is_mmio_gpa() argument
7766 vcpu_mmio_gva_to_gpa(struct kvm_vcpu * vcpu,unsigned long gva,gpa_t * gpa,struct x86_exception * exception,bool write) vcpu_mmio_gva_to_gpa() argument
7796 emulator_write_phys(struct kvm_vcpu * vcpu,gpa_t gpa,const void * val,int bytes) emulator_write_phys() argument
7820 read_prepare(struct kvm_vcpu * vcpu,void * val,int bytes) read_prepare() argument
7832 read_emulate(struct kvm_vcpu * vcpu,gpa_t gpa,void * val,int bytes) read_emulate() argument
7838 write_emulate(struct kvm_vcpu * vcpu,gpa_t gpa,void * val,int bytes) write_emulate() argument
7844 write_mmio(struct kvm_vcpu * vcpu,gpa_t gpa,int bytes,void * val) write_mmio() argument
7850 read_exit_mmio(struct kvm_vcpu * vcpu,gpa_t gpa,void * val,int bytes) read_exit_mmio() argument
7857 write_exit_mmio(struct kvm_vcpu * vcpu,gpa_t gpa,void * val,int bytes) write_exit_mmio() argument
7883 emulator_read_write_onepage(unsigned long addr,void * val,unsigned int bytes,struct x86_exception * exception,struct kvm_vcpu * vcpu,const struct read_write_emulator_ops * ops) emulator_read_write_onepage() argument
7937 struct kvm_vcpu *vcpu = emul_to_vcpu(ctxt); emulator_read_write() local
8015 struct kvm_vcpu *vcpu = emul_to_vcpu(ctxt); emulator_cmpxchg_emulated() local
8091 emulator_pio_in_out(struct kvm_vcpu * vcpu,int size,unsigned short port,void * data,unsigned int count,bool in) emulator_pio_in_out() argument
8142 emulator_pio_in(struct kvm_vcpu * vcpu,int size,unsigned short port,void * val,unsigned int count) emulator_pio_in() argument
8152 complete_emulator_pio_in(struct kvm_vcpu * vcpu,void * val) complete_emulator_pio_in() argument
8165 struct kvm_vcpu *vcpu = emul_to_vcpu(ctxt); emulator_pio_in_emulated() local
8181 emulator_pio_out(struct kvm_vcpu * vcpu,int size,unsigned short port,const void * val,unsigned int count) emulator_pio_out() argument
8196 get_segment_base(struct kvm_vcpu * vcpu,int seg) get_segment_base() argument
8206 kvm_emulate_wbinvd_noskip(struct kvm_vcpu * vcpu) kvm_emulate_wbinvd_noskip() argument
8224 kvm_emulate_wbinvd(struct kvm_vcpu * vcpu) kvm_emulate_wbinvd() argument
8257 struct kvm_vcpu *vcpu = emul_to_vcpu(ctxt); emulator_get_cr() local
8286 struct kvm_vcpu *vcpu = emul_to_vcpu(ctxt); emulator_set_cr() local
8384 struct kvm_vcpu *vcpu = emul_to_vcpu(ctxt); emulator_set_segment() local
8413 struct kvm_vcpu *vcpu = emul_to_vcpu(ctxt); emulator_get_msr_with_filter() local
8436 struct kvm_vcpu *vcpu = emul_to_vcpu(ctxt); emulator_set_msr_with_filter() local
8615 toggle_interruptibility(struct kvm_vcpu * vcpu,u32 mask) toggle_interruptibility() argument
8634 inject_emulated_exception(struct kvm_vcpu * vcpu) inject_emulated_exception() argument
8647 alloc_emulate_ctxt(struct kvm_vcpu * vcpu) alloc_emulate_ctxt() argument
8664 init_emulate_ctxt(struct kvm_vcpu * vcpu) init_emulate_ctxt() argument
8690 kvm_inject_realmode_interrupt(struct kvm_vcpu * vcpu,int irq,int inc_eip) kvm_inject_realmode_interrupt() argument
8712 prepare_emulation_failure_exit(struct kvm_vcpu * vcpu,u64 * data,u8 ndata,u8 * insn_bytes,u8 insn_size) prepare_emulation_failure_exit() argument
8763 prepare_emulation_ctxt_failure_exit(struct kvm_vcpu * vcpu) prepare_emulation_ctxt_failure_exit() argument
8771 __kvm_prepare_emulation_failure_exit(struct kvm_vcpu * vcpu,u64 * data,u8 ndata) __kvm_prepare_emulation_failure_exit() argument
8778 kvm_prepare_emulation_failure_exit(struct kvm_vcpu * vcpu) kvm_prepare_emulation_failure_exit() argument
8784 handle_emulation_failure(struct kvm_vcpu * vcpu,int emulation_type) handle_emulation_failure() argument
8812 reexecute_instruction(struct kvm_vcpu * vcpu,gpa_t cr2_or_gpa,int emulation_type) reexecute_instruction() argument
8882 struct kvm_vcpu *vcpu = emul_to_vcpu(ctxt); retry_instruction() local
8945 kvm_vcpu_do_singlestep(struct kvm_vcpu * vcpu) kvm_vcpu_do_singlestep() argument
8960 kvm_skip_emulated_instruction(struct kvm_vcpu * vcpu) kvm_skip_emulated_instruction() argument
8985 kvm_is_code_breakpoint_inhibited(struct kvm_vcpu * vcpu) kvm_is_code_breakpoint_inhibited() argument
9002 kvm_vcpu_check_code_breakpoint(struct kvm_vcpu * vcpu,int emulation_type,int * r) kvm_vcpu_check_code_breakpoint() argument
9101 x86_decode_emulated_instruction(struct kvm_vcpu * vcpu,int emulation_type,void * insn,int insn_len) x86_decode_emulated_instruction() argument
9118 x86_emulate_instruction(struct kvm_vcpu * vcpu,gpa_t cr2_or_gpa,int emulation_type,void * insn,int insn_len) x86_emulate_instruction() argument
9306 kvm_emulate_instruction(struct kvm_vcpu * vcpu,int emulation_type) kvm_emulate_instruction() argument
9312 kvm_emulate_instruction_from_buffer(struct kvm_vcpu * vcpu,void * insn,int insn_len) kvm_emulate_instruction_from_buffer() argument
9319 complete_fast_pio_out_port_0x7e(struct kvm_vcpu * vcpu) complete_fast_pio_out_port_0x7e() argument
9325 complete_fast_pio_out(struct kvm_vcpu * vcpu) complete_fast_pio_out() argument
9335 kvm_fast_pio_out(struct kvm_vcpu * vcpu,int size,unsigned short port) kvm_fast_pio_out() argument
9360 complete_fast_pio_in(struct kvm_vcpu * vcpu) complete_fast_pio_in() argument
9381 kvm_fast_pio_in(struct kvm_vcpu * vcpu,int size,unsigned short port) kvm_fast_pio_in() argument
9402 kvm_fast_pio(struct kvm_vcpu * vcpu,int size,unsigned short port,int in) kvm_fast_pio() argument
9469 struct kvm_vcpu *vcpu; __kvmclock_cpufreq_notifier() local
9600 struct kvm_vcpu *vcpu; pvclock_gtod_update_fn() local
9867 __kvm_emulate_halt(struct kvm_vcpu * vcpu,int state,int reason) __kvm_emulate_halt() argument
9886 kvm_emulate_halt_noskip(struct kvm_vcpu * vcpu) kvm_emulate_halt_noskip() argument
9892 kvm_emulate_halt(struct kvm_vcpu * vcpu) kvm_emulate_halt() argument
9903 kvm_emulate_ap_reset_hold(struct kvm_vcpu * vcpu) kvm_emulate_ap_reset_hold() argument
9913 kvm_pv_clock_pairing(struct kvm_vcpu * vcpu,gpa_t paddr,unsigned long clock_type) kvm_pv_clock_pairing() argument
9976 kvm_vcpu_apicv_activated(struct kvm_vcpu * vcpu) kvm_vcpu_apicv_activated() argument
10009 kvm_sched_yield(struct kvm_vcpu * vcpu,unsigned long dest_id) kvm_sched_yield() argument
10043 complete_hypercall_exit(struct kvm_vcpu * vcpu) complete_hypercall_exit() argument
10054 kvm_emulate_hypercall(struct kvm_vcpu * vcpu) kvm_emulate_hypercall() argument
10161 struct kvm_vcpu *vcpu = emul_to_vcpu(ctxt); emulator_fix_hypercall() local
10182 dm_request_for_irq_injection(struct kvm_vcpu * vcpu) dm_request_for_irq_injection() argument
10189 post_kvm_run_save(struct kvm_vcpu * vcpu) post_kvm_run_save() argument
10205 update_cr8_intercept(struct kvm_vcpu * vcpu) update_cr8_intercept() argument
10232 kvm_check_nested_events(struct kvm_vcpu * vcpu) kvm_check_nested_events() argument
10242 kvm_inject_exception(struct kvm_vcpu * vcpu) kvm_inject_exception() argument
10300 kvm_check_and_inject_events(struct kvm_vcpu * vcpu,bool * req_immediate_exit) kvm_check_and_inject_events() argument
10505 process_nmi(struct kvm_vcpu * vcpu) process_nmi() argument
10543 kvm_get_nr_pending_nmis(struct kvm_vcpu * vcpu) kvm_get_nr_pending_nmis() argument
10560 __kvm_vcpu_update_apicv(struct kvm_vcpu * vcpu) __kvm_vcpu_update_apicv() argument
10597 kvm_vcpu_update_apicv(struct kvm_vcpu * vcpu) kvm_vcpu_update_apicv() argument
10673 vcpu_scan_ioapic(struct kvm_vcpu * vcpu) vcpu_scan_ioapic() argument
10694 vcpu_load_eoi_exitmap(struct kvm_vcpu * vcpu) vcpu_load_eoi_exitmap() argument
10719 kvm_vcpu_reload_apic_access_page(struct kvm_vcpu * vcpu) kvm_vcpu_reload_apic_access_page() argument
10733 vcpu_enter_guest(struct kvm_vcpu * vcpu) vcpu_enter_guest() argument
11121 vcpu_block(struct kvm_vcpu * vcpu) vcpu_block() argument
11188 kvm_vcpu_running(struct kvm_vcpu * vcpu) kvm_vcpu_running() argument
11195 vcpu_run(struct kvm_vcpu * vcpu) vcpu_run() argument
11246 complete_emulated_io(struct kvm_vcpu * vcpu) complete_emulated_io() argument
11251 complete_emulated_pio(struct kvm_vcpu * vcpu) complete_emulated_pio() argument
11276 complete_emulated_mmio(struct kvm_vcpu * vcpu) complete_emulated_mmio() argument
11322 kvm_load_guest_fpu(struct kvm_vcpu * vcpu) kvm_load_guest_fpu() argument
11330 kvm_put_guest_fpu(struct kvm_vcpu * vcpu) kvm_put_guest_fpu() argument
11337 kvm_arch_vcpu_ioctl_run(struct kvm_vcpu * vcpu) kvm_arch_vcpu_ioctl_run() argument
11451 __get_regs(struct kvm_vcpu * vcpu,struct kvm_regs * regs) __get_regs() argument
11487 kvm_arch_vcpu_ioctl_get_regs(struct kvm_vcpu * vcpu,struct kvm_regs * regs) kvm_arch_vcpu_ioctl_get_regs() argument
11495 __set_regs(struct kvm_vcpu * vcpu,struct kvm_regs * regs) __set_regs() argument
11528 kvm_arch_vcpu_ioctl_set_regs(struct kvm_vcpu * vcpu,struct kvm_regs * regs) kvm_arch_vcpu_ioctl_set_regs() argument
11536 __get_sregs_common(struct kvm_vcpu * vcpu,struct kvm_sregs * sregs) __get_sregs_common() argument
11571 __get_sregs(struct kvm_vcpu * vcpu,struct kvm_sregs * sregs) __get_sregs() argument
11583 __get_sregs2(struct kvm_vcpu * vcpu,struct kvm_sregs2 * sregs2) __get_sregs2() argument
11599 kvm_arch_vcpu_ioctl_get_sregs(struct kvm_vcpu * vcpu,struct kvm_sregs * sregs) kvm_arch_vcpu_ioctl_get_sregs() argument
11608 kvm_arch_vcpu_ioctl_get_mpstate(struct kvm_vcpu * vcpu,struct kvm_mp_state * mp_state) kvm_arch_vcpu_ioctl_get_mpstate() argument
11636 kvm_arch_vcpu_ioctl_set_mpstate(struct kvm_vcpu * vcpu,struct kvm_mp_state * mp_state) kvm_arch_vcpu_ioctl_set_mpstate() argument
11684 kvm_task_switch(struct kvm_vcpu * vcpu,u16 tss_selector,int idt_index,int reason,bool has_error_code,u32 error_code) kvm_task_switch() argument
11707 kvm_is_valid_sregs(struct kvm_vcpu * vcpu,struct kvm_sregs * sregs) kvm_is_valid_sregs() argument
11732 __set_sregs_common(struct kvm_vcpu * vcpu,struct kvm_sregs * sregs,int * mmu_reset_needed,bool update_pdptrs) __set_sregs_common() argument
11804 __set_sregs(struct kvm_vcpu * vcpu,struct kvm_sregs * sregs) __set_sregs() argument
11830 __set_sregs2(struct kvm_vcpu * vcpu,struct kvm_sregs2 * sregs2) __set_sregs2() argument
11864 kvm_arch_vcpu_ioctl_set_sregs(struct kvm_vcpu * vcpu,struct kvm_sregs * sregs) kvm_arch_vcpu_ioctl_set_sregs() argument
11878 struct kvm_vcpu *vcpu; kvm_arch_vcpu_guestdbg_update_apicv_inhibit() local
11896 kvm_arch_vcpu_ioctl_set_guest_debug(struct kvm_vcpu * vcpu,struct kvm_guest_debug * dbg) kvm_arch_vcpu_ioctl_set_guest_debug() argument
11960 kvm_arch_vcpu_ioctl_translate(struct kvm_vcpu * vcpu,struct kvm_translation * tr) kvm_arch_vcpu_ioctl_translate() argument
11981 kvm_arch_vcpu_ioctl_get_fpu(struct kvm_vcpu * vcpu,struct kvm_fpu * fpu) kvm_arch_vcpu_ioctl_get_fpu() argument
12004 kvm_arch_vcpu_ioctl_set_fpu(struct kvm_vcpu * vcpu,struct kvm_fpu * fpu) kvm_arch_vcpu_ioctl_set_fpu() argument
12028 store_regs(struct kvm_vcpu * vcpu) store_regs() argument
12043 sync_regs(struct kvm_vcpu * vcpu) sync_regs() argument
12086 kvm_arch_vcpu_create(struct kvm_vcpu * vcpu) kvm_arch_vcpu_create() argument
12186 kvm_arch_vcpu_postcreate(struct kvm_vcpu * vcpu) kvm_arch_vcpu_postcreate() argument
12206 kvm_arch_vcpu_destroy(struct kvm_vcpu * vcpu) kvm_arch_vcpu_destroy() argument
12231 kvm_vcpu_reset(struct kvm_vcpu * vcpu,bool init_event) kvm_vcpu_reset() argument
12381 kvm_vcpu_deliver_sipi_vector(struct kvm_vcpu * vcpu,u8 vector) kvm_vcpu_deliver_sipi_vector() argument
12396 struct kvm_vcpu *vcpu; kvm_arch_hardware_enable() local
12495 kvm_vcpu_is_reset_bsp(struct kvm_vcpu * vcpu) kvm_vcpu_is_reset_bsp() argument
12500 kvm_vcpu_is_bsp(struct kvm_vcpu * vcpu) kvm_vcpu_is_bsp() argument
12505 kvm_arch_sched_in(struct kvm_vcpu * vcpu,int cpu) kvm_arch_sched_in() argument
12594 kvm_unload_vcpu_mmu(struct kvm_vcpu * vcpu) kvm_unload_vcpu_mmu() argument
12604 struct kvm_vcpu *vcpu; kvm_unload_vcpu_mmus() local
12846 struct kvm_vcpu *vcpu; kvm_arch_memslots_updated() local
13045 kvm_guest_apic_has_interrupt(struct kvm_vcpu * vcpu) kvm_guest_apic_has_interrupt() argument
13051 kvm_vcpu_has_events(struct kvm_vcpu * vcpu) kvm_vcpu_has_events() argument
13100 kvm_arch_vcpu_runnable(struct kvm_vcpu * vcpu) kvm_arch_vcpu_runnable() argument
13105 kvm_arch_dy_has_pending_interrupt(struct kvm_vcpu * vcpu) kvm_arch_dy_has_pending_interrupt() argument
13111 kvm_arch_vcpu_preempted_in_kernel(struct kvm_vcpu * vcpu) kvm_arch_vcpu_preempted_in_kernel() argument
13116 kvm_arch_dy_runnable(struct kvm_vcpu * vcpu) kvm_arch_dy_runnable() argument
13131 kvm_arch_vcpu_in_kernel(struct kvm_vcpu * vcpu) kvm_arch_vcpu_in_kernel() argument
13139 kvm_arch_vcpu_get_ip(struct kvm_vcpu * vcpu) kvm_arch_vcpu_get_ip() argument
13144 kvm_arch_vcpu_should_kick(struct kvm_vcpu * vcpu) kvm_arch_vcpu_should_kick() argument
13149 kvm_arch_interrupt_allowed(struct kvm_vcpu * vcpu) kvm_arch_interrupt_allowed() argument
13154 kvm_get_linear_rip(struct kvm_vcpu * vcpu) kvm_get_linear_rip() argument
13167 kvm_is_linear_rip(struct kvm_vcpu * vcpu,unsigned long linear_rip) kvm_is_linear_rip() argument
13173 kvm_get_rflags(struct kvm_vcpu * vcpu) kvm_get_rflags() argument
13184 __kvm_set_rflags(struct kvm_vcpu * vcpu,unsigned long rflags) __kvm_set_rflags() argument
13192 kvm_set_rflags(struct kvm_vcpu * vcpu,unsigned long rflags) kvm_set_rflags() argument
13211 kvm_add_async_pf_gfn(struct kvm_vcpu * vcpu,gfn_t gfn) kvm_add_async_pf_gfn() argument
13221 kvm_async_pf_gfn_slot(struct kvm_vcpu * vcpu,gfn_t gfn) kvm_async_pf_gfn_slot() argument
13234 kvm_find_async_pf_gfn(struct kvm_vcpu * vcpu,gfn_t gfn) kvm_find_async_pf_gfn() argument
13239 kvm_del_async_pf_gfn(struct kvm_vcpu * vcpu,gfn_t gfn) kvm_del_async_pf_gfn() argument
13266 apf_put_user_notpresent(struct kvm_vcpu * vcpu) apf_put_user_notpresent() argument
13274 apf_put_user_ready(struct kvm_vcpu * vcpu,u32 token) apf_put_user_ready() argument
13282 apf_pageready_slot_free(struct kvm_vcpu * vcpu) apf_pageready_slot_free() argument
13294 kvm_can_deliver_async_pf(struct kvm_vcpu * vcpu) kvm_can_deliver_async_pf() argument
13320 kvm_can_do_async_pf(struct kvm_vcpu * vcpu) kvm_can_do_async_pf() argument
13337 kvm_arch_async_page_not_present(struct kvm_vcpu * vcpu,struct kvm_async_pf * work) kvm_arch_async_page_not_present() argument
13369 kvm_arch_async_page_present(struct kvm_vcpu * vcpu,struct kvm_async_pf * work) kvm_arch_async_page_present() argument
13394 kvm_arch_async_page_present_queued(struct kvm_vcpu * vcpu) kvm_arch_async_page_present_queued() argument
13401 kvm_arch_can_dequeue_async_page_present(struct kvm_vcpu * vcpu) kvm_arch_can_dequeue_async_page_present() argument
13528 kvm_arch_no_poll(struct kvm_vcpu * vcpu) kvm_arch_no_poll() argument
13561 kvm_fixup_and_inject_pf_error(struct kvm_vcpu * vcpu,gva_t gva,u16 error_code) kvm_fixup_and_inject_pf_error() argument
13591 kvm_handle_memory_failure(struct kvm_vcpu * vcpu,int r,struct x86_exception * e) kvm_handle_memory_failure() argument
13615 kvm_handle_invpcid(struct kvm_vcpu * vcpu,unsigned long type,gva_t gva) kvm_handle_invpcid() argument
13679 complete_sev_es_emulated_mmio(struct kvm_vcpu * vcpu) complete_sev_es_emulated_mmio() argument
13725 kvm_sev_es_mmio_write(struct kvm_vcpu * vcpu,gpa_t gpa,unsigned int bytes,void * data) kvm_sev_es_mmio_write() argument
13764 kvm_sev_es_mmio_read(struct kvm_vcpu * vcpu,gpa_t gpa,unsigned int bytes,void * data) kvm_sev_es_mmio_read() argument
13802 advance_sev_es_emulated_pio(struct kvm_vcpu * vcpu,unsigned count,int size) advance_sev_es_emulated_pio() argument
13811 complete_sev_es_emulated_outs(struct kvm_vcpu * vcpu) complete_sev_es_emulated_outs() argument
13822 kvm_sev_es_outs(struct kvm_vcpu * vcpu,unsigned int size,unsigned int port) kvm_sev_es_outs() argument
13847 complete_sev_es_emulated_ins(struct kvm_vcpu * vcpu) complete_sev_es_emulated_ins() argument
13860 kvm_sev_es_ins(struct kvm_vcpu * vcpu,unsigned int size,unsigned int port) kvm_sev_es_ins() argument
13879 kvm_sev_es_string_io(struct kvm_vcpu * vcpu,unsigned int size,unsigned int port,void * data,unsigned int count,int in) kvm_sev_es_string_io() argument
[all...]
H A Dkvm_cache_regs.h103 return vcpu->arch.regs[reg]; in kvm_register_read_raw()
112 vcpu->arch.regs[reg] = val; in kvm_register_write_raw()
113 kvm_register_mark_dirty(vcpu, reg); in kvm_register_write_raw()
157 return vcpu->arch.cr0 & mask; in kvm_read_cr0_bits()
179 return vcpu->arch.cr4 & mask; in kvm_read_cr4_bits()
194 return vcpu->arch.cr3; in kvm_read_cr3()
204 return (kvm_rax_read(vcpu) & -1u) in kvm_read_edx_eax()
210 vcpu->arch.hflags |= HF_GUEST_MASK; in enter_guest_mode()
211 vcpu->stat.guest_mode = 1; in enter_guest_mode()
216 vcpu->arch.hflags &= ~HF_GUEST_MASK; in leave_guest_mode()
[all …]
H A Dx86.h94 void kvm_service_local_tlb_flush_requests(struct kvm_vcpu *vcpu);
95 int kvm_check_nested_events(struct kvm_vcpu *vcpu); in kvm_vcpu_has_run() argument
97 static inline bool kvm_vcpu_has_run(struct kvm_vcpu *vcpu) in kvm_vcpu_has_run()
99 return vcpu->arch.last_vmentry_cpu != -1;
102 static inline bool kvm_is_exception_pending(struct kvm_vcpu *vcpu) in kvm_is_exception_pending()
104 return vcpu->arch.exception.pending || in kvm_is_exception_pending()
105 vcpu->arch.exception_vmexit.pending || in kvm_is_exception_pending()
106 kvm_test_request(KVM_REQ_TRIPLE_FAULT, vcpu);
109 static inline void kvm_clear_exception_queue(struct kvm_vcpu *vcpu) in kvm_clear_exception_queue()
111 vcpu in kvm_clear_exception_queue()
100 kvm_is_exception_pending(struct kvm_vcpu * vcpu) kvm_is_exception_pending() argument
107 kvm_clear_exception_queue(struct kvm_vcpu * vcpu) kvm_clear_exception_queue() argument
114 kvm_queue_interrupt(struct kvm_vcpu * vcpu,u8 vector,bool soft) kvm_queue_interrupt() argument
122 kvm_clear_interrupt_queue(struct kvm_vcpu * vcpu) kvm_clear_interrupt_queue() argument
127 kvm_event_needs_reinjection(struct kvm_vcpu * vcpu) kvm_event_needs_reinjection() argument
138 is_protmode(struct kvm_vcpu * vcpu) is_protmode() argument
143 is_long_mode(struct kvm_vcpu * vcpu) is_long_mode() argument
152 is_64_bit_mode(struct kvm_vcpu * vcpu) is_64_bit_mode() argument
164 is_64_bit_hypercall(struct kvm_vcpu * vcpu) is_64_bit_hypercall() argument
183 mmu_is_nested(struct kvm_vcpu * vcpu) mmu_is_nested() argument
188 is_pae(struct kvm_vcpu * vcpu) is_pae() argument
193 is_pse(struct kvm_vcpu * vcpu) is_pse() argument
198 is_paging(struct kvm_vcpu * vcpu) is_paging() argument
203 is_pae_paging(struct kvm_vcpu * vcpu) is_pae_paging() argument
208 vcpu_virt_addr_bits(struct kvm_vcpu * vcpu) vcpu_virt_addr_bits() argument
213 is_noncanonical_address(u64 la,struct kvm_vcpu * vcpu) is_noncanonical_address() argument
218 vcpu_cache_mmio_info(struct kvm_vcpu * vcpu,gva_t gva,gfn_t gfn,unsigned access) vcpu_cache_mmio_info() argument
236 vcpu_match_mmio_gen(struct kvm_vcpu * vcpu) vcpu_match_mmio_gen() argument
247 vcpu_clear_mmio_info(struct kvm_vcpu * vcpu,gva_t gva) vcpu_clear_mmio_info() argument
255 vcpu_match_mmio_gva(struct kvm_vcpu * vcpu,unsigned long gva) vcpu_match_mmio_gva() argument
264 vcpu_match_mmio_gpa(struct kvm_vcpu * vcpu,gpa_t gpa) vcpu_match_mmio_gpa() argument
273 kvm_register_read(struct kvm_vcpu * vcpu,int reg) kvm_register_read() argument
280 kvm_register_write(struct kvm_vcpu * vcpu,int reg,unsigned long val) kvm_register_write() argument
378 kvm_pr_unimpl_wrmsr(struct kvm_vcpu * vcpu,u32 msr,u64 data) kvm_pr_unimpl_wrmsr() argument
384 kvm_pr_unimpl_rdmsr(struct kvm_vcpu * vcpu,u32 msr) kvm_pr_unimpl_rdmsr() argument
390 nsec_to_cycles(struct kvm_vcpu * vcpu,u64 nsec) nsec_to_cycles() argument
435 kvm_before_interrupt(struct kvm_vcpu * vcpu,enum kvm_intr_type intr) kvm_before_interrupt() argument
441 kvm_after_interrupt(struct kvm_vcpu * vcpu) kvm_after_interrupt() argument
446 kvm_handling_nmi_from_guest(struct kvm_vcpu * vcpu) kvm_handling_nmi_from_guest() argument
[all...]
/linux/arch/powerpc/include/asm/
H A Dkvm_book3s.h389 return vcpu->arch.book3s; in to_book3s()
415 vcpu->arch.regs.ccr = val; in kvmppc_set_cr()
422 return vcpu->arch.regs.ccr; in kvmppc_get_cr()
427 vcpu->arch.regs.xer = val; in kvmppc_set_xer()
434 return vcpu->arch.regs.xer; in kvmppc_get_xer()
439 vcpu->arch.regs.ctr = val; in kvmppc_set_ctr()
446 return vcpu->arch.regs.ctr; in kvmppc_get_ctr()
463 vcpu->arch.regs.nip = val; in kvmppc_set_pc()
504 vcpu->arch.fp.fpscr = val; in kvmppc_set_fpscr()
526 *v = vcpu->arch.vr.vr[i]; in kvmppc_get_vsx_vr()
[all …]

12345678910>>...20