| /hypervisor/arch/x86/guest/ |
| A D | vmsr.c | 332 if ((msr <= 0x1FFFU) || ((msr >= 0xc0000000U) && (msr <= 0xc0001fffU))) { in enable_msr_interception() 364 uint32_t msr; in intercept_x2apic_msrs() local 366 for (msr = 0x800U; msr < 0x900U; msr++) { in intercept_x2apic_msrs() 497 uint32_t msr; in init_intercepted_cat_msr_list() local 500 for (msr = MSR_IA32_L2_MASK_BASE; msr < (MSR_IA32_L2_MASK_BASE + NUM_CAT_L2_MSRS); msr++) { in init_intercepted_cat_msr_list() 505 for (msr = MSR_IA32_L3_MASK_BASE; msr < (MSR_IA32_L3_MASK_BASE + NUM_CAT_L3_MSRS); msr++) { in init_intercepted_cat_msr_list() 545 for (msr = MSR_IA32_L3_MASK_BASE; msr < MSR_IA32_BNDCFGS; msr++) { in init_msr_emulation() 646 uint32_t msr; in rdmsr_vmexit_handler() local 653 switch (msr) { in rdmsr_vmexit_handler() 1040 uint32_t msr; in wrmsr_vmexit_handler() local [all …]
|
| A D | vmtrr.c | 25 uint32_t msr; member 51 static uint32_t get_index_of_fixed_mtrr(uint32_t msr) in get_index_of_fixed_mtrr() argument 56 if (fixed_mtrr_map[i].msr == msr) { in get_index_of_fixed_mtrr() 210 if (msr == MSR_IA32_MTRR_DEF_TYPE) { in write_vmtrr() 240 index = get_index_of_fixed_mtrr(msr); in write_vmtrr() 244 pr_err("Write to unexpected MSR: 0x%x", msr); in write_vmtrr() 250 uint64_t read_vmtrr(const struct acrn_vcpu *vcpu, uint32_t msr) in read_vmtrr() argument 256 if (msr == MSR_IA32_MTRR_CAP) { in read_vmtrr() 258 } else if (msr == MSR_IA32_MTRR_DEF_TYPE) { in read_vmtrr() 261 index = get_index_of_fixed_mtrr(msr); in read_vmtrr() [all …]
|
| A D | hyperv.c | 150 hyperv_wrmsr(struct acrn_vcpu *vcpu, uint32_t msr, uint64_t wval) in hyperv_wrmsr() argument 154 switch (msr) { in hyperv_wrmsr() 179 pr_err("hv: %s: unexpected MSR[0x%x] write", __func__, msr); in hyperv_wrmsr() 185 __func__, msr, wval, vcpu->vcpu_id, vcpu->vm->vm_id); in hyperv_wrmsr() 191 hyperv_rdmsr(struct acrn_vcpu *vcpu, uint32_t msr, uint64_t *rval) in hyperv_rdmsr() argument 195 switch (msr) { in hyperv_rdmsr() 219 pr_err("hv: %s: unexpected MSR[0x%x] read", __func__, msr); in hyperv_rdmsr() 225 __func__, msr, *rval, vcpu->vcpu_id, vcpu->vm->vm_id); in hyperv_rdmsr()
|
| A D | vmcs.c | 217 static uint32_t check_vmx_ctrl(uint32_t msr, uint32_t ctrl_req) in check_vmx_ctrl() argument 223 vmx_msr = msr_read(msr); in check_vmx_ctrl() 237 msr, ctrl, ctrl_req); in check_vmx_ctrl() 244 static uint32_t check_vmx_ctrl_64(uint32_t msr, uint64_t ctrl_req) in check_vmx_ctrl_64() argument 249 vmx_msr = msr_read(msr); in check_vmx_ctrl_64() 256 msr, ctrl, ctrl_req); in check_vmx_ctrl_64()
|
| A D | nested.c | 29 bool is_vmx_msr(uint32_t msr) in is_vmx_msr() argument 35 if (msr == vmx_msrs[i]) { in is_vmx_msr() 44 static uint64_t adjust_vmx_ctrls(uint32_t msr, uint64_t request_bits) in adjust_vmx_ctrls() argument 56 msr_val.full = msr_read(msr); in adjust_vmx_ctrls() 200 int32_t read_vmx_msr(struct acrn_vcpu *vcpu, uint32_t msr, uint64_t *val) in read_vmx_msr() argument 206 switch (msr) { in read_vmx_msr() 225 v = vcpu_get_guest_msr(vcpu, msr); in read_vmx_msr()
|
| A D | vlapic.c | 2018 static inline uint32_t x2apic_msr_to_regoff(uint32_t msr) in x2apic_msr_to_regoff() argument 2021 return (((msr - 0x800U) & 0x3FFU) << 4U); in x2apic_msr_to_regoff() 2110 int32_t vlapic_x2apic_read(struct acrn_vcpu *vcpu, uint32_t msr, uint64_t *val) in vlapic_x2apic_read() argument 2123 switch (msr) { in vlapic_x2apic_read() 2127 offset = x2apic_msr_to_regoff(msr); in vlapic_x2apic_read() 2131 pr_err("%s: unexpected MSR[0x%x] read with lapic_pt", __func__, msr); in vlapic_x2apic_read() 2135 offset = x2apic_msr_to_regoff(msr); in vlapic_x2apic_read() 2158 switch (msr) { in vlapic_x2apic_write() 2163 offset = x2apic_msr_to_regoff(msr); in vlapic_x2apic_write() 2167 pr_err("%s: unexpected MSR[0x%x] write with lapic_pt", __func__, msr); in vlapic_x2apic_write() [all …]
|
| A D | vcpu.c | 135 uint64_t vcpu_get_guest_msr(const struct acrn_vcpu *vcpu, uint32_t msr) in vcpu_get_guest_msr() argument 137 uint32_t index = vmsr_get_guest_msr_index(msr); in vcpu_get_guest_msr() 147 void vcpu_set_guest_msr(struct acrn_vcpu *vcpu, uint32_t msr, uint64_t val) in vcpu_set_guest_msr() argument 149 uint32_t index = vmsr_get_guest_msr_index(msr); in vcpu_set_guest_msr()
|
| /hypervisor/arch/x86/ |
| A D | cpu.c | 624 struct msr_data_struct *msr = (struct msr_data_struct *)data; in smpcall_write_msr_func() local 626 msr_write(msr->msr_index, msr->write_val); in smpcall_write_msr_func() 631 struct msr_data_struct msr = {0}; in msr_write_pcpu() local 637 msr.msr_index = msr_index; in msr_write_pcpu() 638 msr.write_val = value64; in msr_write_pcpu() 640 smp_call_function(mask, smpcall_write_msr_func, &msr); in msr_write_pcpu() 648 msr->read_val = msr_read(msr->msr_index); in smpcall_read_msr_func() 653 struct msr_data_struct msr = {0}; in msr_read_pcpu() local 660 msr.msr_index = msr_index; in msr_read_pcpu() 662 smp_call_function(mask, smpcall_read_msr_func, &msr); in msr_read_pcpu() [all …]
|
| A D | cpu_caps.c | 45 uint32_t msr; member 478 static bool is_vmx_cap_supported(uint32_t msr, uint32_t bits) in is_vmx_cap_supported() argument 483 vmx_msr = msr_read(msr); in is_vmx_cap_supported() 507 if (!is_vmx_cap_supported(vmx_caps[i].msr, vmx_caps[i].bits)) { in check_essential_vmx_caps() 509 vmx_caps[i].msr, msr_read(vmx_caps[i].msr), vmx_caps[i].bits); in check_essential_vmx_caps()
|
| /hypervisor/dm/ |
| A D | vuart.c | 246 uint8_t msr; in get_modem_status() local 253 msr = 0U; in get_modem_status() 255 msr |= MSR_CTS; in get_modem_status() 258 msr |= MSR_DSR; in get_modem_status() 261 msr |= MSR_RI; in get_modem_status() 264 msr |= MSR_DCD; in get_modem_status() 271 msr = MSR_DCD | MSR_DSR; in get_modem_status() 273 return msr; in get_modem_status() 307 uint8_t msr; in write_reg() local 364 vu->msr = update_modem_status(msr, vu->msr); in write_reg() [all …]
|
| /hypervisor/include/arch/x86/asm/guest/ |
| A D | vmtrr.h | 64 void write_vmtrr(struct acrn_vcpu *vcpu, uint32_t msr, uint64_t value); 73 uint64_t read_vmtrr(const struct acrn_vcpu *vcpu, uint32_t msr);
|
| A D | hyperv.h | 62 int32_t hyperv_wrmsr(struct acrn_vcpu *vcpu, uint32_t msr, uint64_t wval); 63 int32_t hyperv_rdmsr(struct acrn_vcpu *vcpu, uint32_t msr, uint64_t *rval);
|
| A D | nested.h | 353 bool is_vmx_msr(uint32_t msr); 355 int32_t read_vmx_msr(__unused struct acrn_vcpu *vcpu, uint32_t msr, uint64_t *val); 365 static inline bool is_vmx_msr(__unused uint32_t msr) in is_vmx_msr() argument 377 __unused uint32_t msr, __unused uint64_t *val) in read_vmx_msr() argument
|
| A D | vlapic.h | 132 int32_t vlapic_x2apic_read(struct acrn_vcpu *vcpu, uint32_t msr, uint64_t *val); 133 int32_t vlapic_x2apic_write(struct acrn_vcpu *vcpu, uint32_t msr, uint64_t val);
|
| A D | vcpu.h | 500 uint64_t vcpu_get_guest_msr(const struct acrn_vcpu *vcpu, uint32_t msr); 511 void vcpu_set_guest_msr(struct acrn_vcpu *vcpu, uint32_t msr, uint64_t val);
|
| /hypervisor/include/arch/x86/asm/ |
| A D | msr.h | 615 static inline bool is_x2apic_msr(uint32_t msr) in is_x2apic_msr() argument 620 return ((msr >= 0x800U) && (msr < 0x900U)); in is_x2apic_msr() 627 uint32_t vmsr_get_guest_msr_index(uint32_t msr);
|
| /hypervisor/include/dm/ |
| A D | vuart.h | 88 uint8_t msr; /**< Modem status register (R/W). */ member
|