Lines Matching refs:msr

28 static bool msr_mtrr_valid(unsigned msr)  in msr_mtrr_valid()  argument
30 switch (msr) { in msr_mtrr_valid()
55 bool kvm_mtrr_valid(struct kvm_vcpu *vcpu, u32 msr, u64 data) in kvm_mtrr_valid() argument
60 if (!msr_mtrr_valid(msr)) in kvm_mtrr_valid()
63 if (msr == MSR_IA32_CR_PAT) { in kvm_mtrr_valid()
65 } else if (msr == MSR_MTRRdefType) { in kvm_mtrr_valid()
69 } else if (msr >= MSR_MTRRfix64K_00000 && msr <= MSR_MTRRfix4K_F8000) { in kvm_mtrr_valid()
77 WARN_ON(!(msr >= 0x200 && msr < 0x200 + 2 * KVM_NR_VAR_MTRR)); in kvm_mtrr_valid()
80 if ((msr & 1) == 0) { in kvm_mtrr_valid()
183 static bool fixed_msr_to_seg_unit(u32 msr, int *seg, int *unit) in fixed_msr_to_seg_unit() argument
185 switch (msr) { in fixed_msr_to_seg_unit()
193 msr - MSR_MTRRfix16K_80000, in fixed_msr_to_seg_unit()
199 msr - MSR_MTRRfix4K_C0000, in fixed_msr_to_seg_unit()
239 static bool fixed_msr_to_range(u32 msr, u64 *start, u64 *end) in fixed_msr_to_range() argument
243 if (!fixed_msr_to_seg_unit(msr, &seg, &unit)) in fixed_msr_to_range()
250 static int fixed_msr_to_range_index(u32 msr) in fixed_msr_to_range_index() argument
254 if (!fixed_msr_to_seg_unit(msr, &seg, &unit)) in fixed_msr_to_range_index()
307 static void update_mtrr(struct kvm_vcpu *vcpu, u32 msr) in update_mtrr() argument
313 if (msr == MSR_IA32_CR_PAT || !tdp_enabled || in update_mtrr()
317 if (!mtrr_is_enabled(mtrr_state) && msr != MSR_MTRRdefType) in update_mtrr()
321 if (fixed_msr_to_range(msr, &start, &end)) { in update_mtrr()
324 } else if (msr == MSR_MTRRdefType) { in update_mtrr()
329 index = (msr - 0x200) / 2; in update_mtrr()
341 static void set_var_mtrr_msr(struct kvm_vcpu *vcpu, u32 msr, u64 data) in set_var_mtrr_msr() argument
347 index = (msr - 0x200) / 2; in set_var_mtrr_msr()
348 is_mtrr_mask = msr - 0x200 - 2 * index; in set_var_mtrr_msr()
373 int kvm_mtrr_set_msr(struct kvm_vcpu *vcpu, u32 msr, u64 data) in kvm_mtrr_set_msr() argument
377 if (!kvm_mtrr_valid(vcpu, msr, data)) in kvm_mtrr_set_msr()
380 index = fixed_msr_to_range_index(msr); in kvm_mtrr_set_msr()
383 else if (msr == MSR_MTRRdefType) in kvm_mtrr_set_msr()
385 else if (msr == MSR_IA32_CR_PAT) in kvm_mtrr_set_msr()
388 set_var_mtrr_msr(vcpu, msr, data); in kvm_mtrr_set_msr()
390 update_mtrr(vcpu, msr); in kvm_mtrr_set_msr()
394 int kvm_mtrr_get_msr(struct kvm_vcpu *vcpu, u32 msr, u64 *pdata) in kvm_mtrr_get_msr() argument
399 if (msr == MSR_MTRRcap) { in kvm_mtrr_get_msr()
410 if (!msr_mtrr_valid(msr)) in kvm_mtrr_get_msr()
413 index = fixed_msr_to_range_index(msr); in kvm_mtrr_get_msr()
416 else if (msr == MSR_MTRRdefType) in kvm_mtrr_get_msr()
418 else if (msr == MSR_IA32_CR_PAT) in kvm_mtrr_get_msr()
423 index = (msr - 0x200) / 2; in kvm_mtrr_get_msr()
424 is_mtrr_mask = msr - 0x200 - 2 * index; in kvm_mtrr_get_msr()