Lines Matching refs:msr

27 static bool msr_mtrr_valid(unsigned msr)  in msr_mtrr_valid()  argument
29 switch (msr) { in msr_mtrr_valid()
54 bool kvm_mtrr_valid(struct kvm_vcpu *vcpu, u32 msr, u64 data) in kvm_mtrr_valid() argument
59 if (!msr_mtrr_valid(msr)) in kvm_mtrr_valid()
62 if (msr == MSR_IA32_CR_PAT) { in kvm_mtrr_valid()
64 } else if (msr == MSR_MTRRdefType) { in kvm_mtrr_valid()
68 } else if (msr >= MSR_MTRRfix64K_00000 && msr <= MSR_MTRRfix4K_F8000) { in kvm_mtrr_valid()
76 WARN_ON(!(msr >= 0x200 && msr < 0x200 + 2 * KVM_NR_VAR_MTRR)); in kvm_mtrr_valid()
79 if ((msr & 1) == 0) { in kvm_mtrr_valid()
182 static bool fixed_msr_to_seg_unit(u32 msr, int *seg, int *unit) in fixed_msr_to_seg_unit() argument
184 switch (msr) { in fixed_msr_to_seg_unit()
192 msr - MSR_MTRRfix16K_80000, in fixed_msr_to_seg_unit()
198 msr - MSR_MTRRfix4K_C0000, in fixed_msr_to_seg_unit()
238 static bool fixed_msr_to_range(u32 msr, u64 *start, u64 *end) in fixed_msr_to_range() argument
242 if (!fixed_msr_to_seg_unit(msr, &seg, &unit)) in fixed_msr_to_range()
249 static int fixed_msr_to_range_index(u32 msr) in fixed_msr_to_range_index() argument
253 if (!fixed_msr_to_seg_unit(msr, &seg, &unit)) in fixed_msr_to_range_index()
306 static void update_mtrr(struct kvm_vcpu *vcpu, u32 msr) in update_mtrr() argument
312 if (msr == MSR_IA32_CR_PAT || !tdp_enabled || in update_mtrr()
316 if (!mtrr_is_enabled(mtrr_state) && msr != MSR_MTRRdefType) in update_mtrr()
320 if (fixed_msr_to_range(msr, &start, &end)) { in update_mtrr()
323 } else if (msr == MSR_MTRRdefType) { in update_mtrr()
328 index = (msr - 0x200) / 2; in update_mtrr()
340 static void set_var_mtrr_msr(struct kvm_vcpu *vcpu, u32 msr, u64 data) in set_var_mtrr_msr() argument
346 index = (msr - 0x200) / 2; in set_var_mtrr_msr()
347 is_mtrr_mask = msr - 0x200 - 2 * index; in set_var_mtrr_msr()
372 int kvm_mtrr_set_msr(struct kvm_vcpu *vcpu, u32 msr, u64 data) in kvm_mtrr_set_msr() argument
376 if (!kvm_mtrr_valid(vcpu, msr, data)) in kvm_mtrr_set_msr()
379 index = fixed_msr_to_range_index(msr); in kvm_mtrr_set_msr()
382 else if (msr == MSR_MTRRdefType) in kvm_mtrr_set_msr()
384 else if (msr == MSR_IA32_CR_PAT) in kvm_mtrr_set_msr()
387 set_var_mtrr_msr(vcpu, msr, data); in kvm_mtrr_set_msr()
389 update_mtrr(vcpu, msr); in kvm_mtrr_set_msr()
393 int kvm_mtrr_get_msr(struct kvm_vcpu *vcpu, u32 msr, u64 *pdata) in kvm_mtrr_get_msr() argument
398 if (msr == MSR_MTRRcap) { in kvm_mtrr_get_msr()
409 if (!msr_mtrr_valid(msr)) in kvm_mtrr_get_msr()
412 index = fixed_msr_to_range_index(msr); in kvm_mtrr_get_msr()
415 else if (msr == MSR_MTRRdefType) in kvm_mtrr_get_msr()
417 else if (msr == MSR_IA32_CR_PAT) in kvm_mtrr_get_msr()
422 index = (msr - 0x200) / 2; in kvm_mtrr_get_msr()
423 is_mtrr_mask = msr - 0x200 - 2 * index; in kvm_mtrr_get_msr()