Lines Matching refs:mtrr_state

39 	return &vcpu->arch.mtrr_state.var_ranges[index];  in var_mtrr_msr_to_range()
104 static bool mtrr_is_enabled(struct kvm_mtrr *mtrr_state) in mtrr_is_enabled() argument
106 return !!(mtrr_state->deftype & IA32_MTRR_DEF_TYPE_E); in mtrr_is_enabled()
109 static bool fixed_mtrr_is_enabled(struct kvm_mtrr *mtrr_state) in fixed_mtrr_is_enabled() argument
111 return !!(mtrr_state->deftype & IA32_MTRR_DEF_TYPE_FE); in fixed_mtrr_is_enabled()
114 static u8 mtrr_default_type(struct kvm_mtrr *mtrr_state) in mtrr_default_type() argument
116 return mtrr_state->deftype & IA32_MTRR_DEF_TYPE_TYPE_MASK; in mtrr_default_type()
320 struct kvm_mtrr *mtrr_state = &vcpu->arch.mtrr_state; in update_mtrr() local
326 if (!mtrr_is_enabled(mtrr_state) && msr != MSR_MTRRdefType) in update_mtrr()
331 if (!fixed_mtrr_is_enabled(mtrr_state)) in update_mtrr()
351 struct kvm_mtrr *mtrr_state = &vcpu->arch.mtrr_state; in set_var_mtrr_msr() local
371 list_for_each_entry(tmp, &mtrr_state->head, node) in set_var_mtrr_msr()
387 *(u64 *)&vcpu->arch.mtrr_state.fixed_ranges[index] = data; in kvm_mtrr_set_msr()
389 vcpu->arch.mtrr_state.deftype = data; in kvm_mtrr_set_msr()
418 *pdata = *(u64 *)&vcpu->arch.mtrr_state.fixed_ranges[index]; in kvm_mtrr_get_msr()
420 *pdata = vcpu->arch.mtrr_state.deftype; in kvm_mtrr_get_msr()
436 INIT_LIST_HEAD(&vcpu->arch.mtrr_state.head); in kvm_vcpu_mtrr_init()
441 struct kvm_mtrr *mtrr_state; member
475 if (!fixed_mtrr_is_enabled(iter->mtrr_state)) in mtrr_lookup_fixed_start()
515 struct kvm_mtrr *mtrr_state = iter->mtrr_state; in __mtrr_lookup_var_next() local
517 list_for_each_entry_continue(iter->range, &mtrr_state->head, node) in __mtrr_lookup_var_next()
527 struct kvm_mtrr *mtrr_state = iter->mtrr_state; in mtrr_lookup_var_start() local
532 iter->range = list_prepare_entry(iter->range, &mtrr_state->head, node); in mtrr_lookup_var_start()
549 if (iter->index >= ARRAY_SIZE(iter->mtrr_state->fixed_ranges)) in mtrr_lookup_fixed_next()
564 if (!mtrr_is_enabled(iter->mtrr_state)) { in mtrr_lookup_start()
574 struct kvm_mtrr *mtrr_state, u64 start, u64 end) in mtrr_lookup_init() argument
576 iter->mtrr_state = mtrr_state; in mtrr_lookup_init()
590 iter->mem_type = iter->mtrr_state->fixed_ranges[iter->index]; in mtrr_lookup_okay()
616 struct kvm_mtrr *mtrr_state = &vcpu->arch.mtrr_state; in kvm_mtrr_get_guest_memory_type() local
626 mtrr_for_each_mem_type(&iter, mtrr_state, start, end) { in kvm_mtrr_get_guest_memory_type()
678 return mtrr_default_type(mtrr_state); in kvm_mtrr_get_guest_memory_type()
693 struct kvm_mtrr *mtrr_state = &vcpu->arch.mtrr_state; in kvm_mtrr_check_gfn_range_consistency() local
700 mtrr_for_each_mem_type(&iter, mtrr_state, start, end) { in kvm_mtrr_check_gfn_range_consistency()
719 return type == mtrr_default_type(mtrr_state); in kvm_mtrr_check_gfn_range_consistency()