| /arch/arm64/kvm/hyp/vhe/ |
| A D | sysreg-sr.c | 90 write_sysreg(__vcpu_sys_reg(vcpu, PAR_EL1), par_el1); in __sysreg_restore_vel2_state() 91 write_sysreg(__vcpu_sys_reg(vcpu, TPIDR_EL1), tpidr_el1); in __sysreg_restore_vel2_state() 94 write_sysreg(__vcpu_sys_reg(vcpu, MPIDR_EL1), vmpidr_el2); in __sysreg_restore_vel2_state() 95 write_sysreg_el1(__vcpu_sys_reg(vcpu, MAIR_EL2), SYS_MAIR); in __sysreg_restore_vel2_state() 96 write_sysreg_el1(__vcpu_sys_reg(vcpu, VBAR_EL2), SYS_VBAR); in __sysreg_restore_vel2_state() 130 write_sysreg_el1(__vcpu_sys_reg(vcpu, PIR_EL2), SYS_PIR); in __sysreg_restore_vel2_state() 135 write_sysreg_el1(__vcpu_sys_reg(vcpu, POR_EL2), SYS_POR); in __sysreg_restore_vel2_state() 138 write_sysreg_el1(__vcpu_sys_reg(vcpu, ESR_EL2), SYS_ESR); in __sysreg_restore_vel2_state() 141 write_sysreg_el1(__vcpu_sys_reg(vcpu, FAR_EL2), SYS_FAR); in __sysreg_restore_vel2_state() 142 write_sysreg(__vcpu_sys_reg(vcpu, SP_EL2), sp_el1); in __sysreg_restore_vel2_state() [all …]
|
| A D | switch.c | 84 guest_hcr = __vcpu_sys_reg(vcpu, HCR_EL2); in __compute_hcr() 119 val = __vcpu_sys_reg(vcpu, CNTP_CVAL_EL0); in __activate_traps() 121 val = __vcpu_sys_reg(vcpu, CNTHP_CVAL_EL2); in __activate_traps() 232 cval = __vcpu_sys_reg(vcpu, CNTP_CVAL_EL0); in compute_emulated_cntx_ctl_el0() 233 ctl = __vcpu_sys_reg(vcpu, CNTP_CTL_EL0); in compute_emulated_cntx_ctl_el0() 237 cval = __vcpu_sys_reg(vcpu, CNTV_CVAL_EL0); in compute_emulated_cntx_ctl_el0() 238 ctl = __vcpu_sys_reg(vcpu, CNTV_CTL_EL0); in compute_emulated_cntx_ctl_el0() 279 val = __vcpu_sys_reg(vcpu, CNTP_CVAL_EL0); in kvm_hyp_handle_timer() 288 val = __vcpu_sys_reg(vcpu, CNTP_CVAL_EL0); in kvm_hyp_handle_timer() 305 val = __vcpu_sys_reg(vcpu, CNTV_CVAL_EL0); in kvm_hyp_handle_timer() [all …]
|
| /arch/arm64/kvm/vgic/ |
| A D | vgic-v3-nested.c | 120 xmo = __vcpu_sys_reg(vcpu, HCR_EL2) & (HCR_IMO | HCR_FMO); in vgic_state_is_nested() 146 u64 lr = __vcpu_sys_reg(vcpu, ICH_LRN(i)); in vgic_compute_mi_state() 181 hcr = __vcpu_sys_reg(vcpu, ICH_HCR_EL2); in vgic_v3_get_misr() 182 vmcr = __vcpu_sys_reg(vcpu, ICH_VMCR_EL2); in vgic_v3_get_misr() 189 if (__vcpu_sys_reg(vcpu, ICH_HCR_EL2) & ICH_HCR_EL2_UIE) { in vgic_v3_get_misr() 254 u64 lr = __vcpu_sys_reg(vcpu, ICH_LRN(i)); in vgic_v3_create_shadow_lr() 274 u64 lr = __vcpu_sys_reg(vcpu, ICH_LRN(i)); in vgic_v3_sync_nested() 313 s_cpu_if->vgic_hcr = __vcpu_sys_reg(vcpu, ICH_HCR_EL2) | val; in vgic_v3_create_shadow_state() 314 s_cpu_if->vgic_vmcr = __vcpu_sys_reg(vcpu, ICH_VMCR_EL2); in vgic_v3_create_shadow_state() 361 val = __vcpu_sys_reg(vcpu, ICH_HCR_EL2); in vgic_v3_put_nested() [all …]
|
| /arch/arm64/kvm/ |
| A D | pauth.c | 40 mod = __vcpu_sys_reg(vcpu, SP_EL2); in compute_pac() 170 ikey.lo = __vcpu_sys_reg(vcpu, APIBKEYLO_EL1); in kvm_auth_eretax() 171 ikey.hi = __vcpu_sys_reg(vcpu, APIBKEYHI_EL1); in kvm_auth_eretax() 176 ikey.lo = __vcpu_sys_reg(vcpu, APIAKEYLO_EL1); in kvm_auth_eretax() 177 ikey.hi = __vcpu_sys_reg(vcpu, APIAKEYHI_EL1); in kvm_auth_eretax()
|
| A D | pmu-emul.c | 101 return __vcpu_sys_reg(vcpu, MDCR_EL2) & MDCR_EL2_HLP; in kvm_pmc_has_64bit_overflow() 134 counter = __vcpu_sys_reg(vcpu, reg); in kvm_pmu_get_pmc_value() 177 val = __vcpu_sys_reg(vcpu, reg) & GENMASK(63, 32); in kvm_pmu_set_pmc_value() 375 u64 reg = __vcpu_sys_reg(vcpu, PMOVSSET_EL0); in kvm_pmu_overflow_status() 377 reg &= __vcpu_sys_reg(vcpu, PMINTENSET_EL1); in kvm_pmu_overflow_status() 490 mask &= __vcpu_sys_reg(vcpu, PMCNTENSET_EL0); in kvm_pmu_counter_increment() 625 unsigned int mdcr = __vcpu_sys_reg(vcpu, MDCR_EL2); in kvm_pmu_counter_is_enabled() 657 u64 mdcr = __vcpu_sys_reg(vcpu, MDCR_EL2); in kvm_pmc_counts_at_el2() 1038 u64 val = __vcpu_sys_reg(vcpu, MDCR_EL2); in kvm_arm_set_nr_counters() 1302 u64 pmcr = __vcpu_sys_reg(vcpu, PMCR_EL0); in kvm_vcpu_read_pmcr() [all …]
|
| A D | inject_fault.c | 84 !(__vcpu_sys_reg(vcpu, HCRX_EL2) & HCRX_EL2_SCTLR2En)) in __effective_sctlr2_bit() 212 if (__vcpu_sys_reg(vcpu, HCR_EL2) & (HCR_TGE | HCR_TEA)) in kvm_sea_target_is_el2() 219 (__vcpu_sys_reg(vcpu, HCRX_EL2) & HCRX_EL2_TMEA); in kvm_sea_target_is_el2() 283 if (!(__vcpu_sys_reg(vcpu, HCRX_EL2) & HCRX_EL2_TMEA)) in kvm_serror_target_is_el2()
|
| A D | handle_exit.c | 315 u64 hcrx = __vcpu_sys_reg(vcpu, HCRX_EL2); in handle_other() 349 fwd &= (__vcpu_sys_reg(vcpu, HFGITR2_EL2) & HFGITR2_EL2_TSBCSYNC); in handle_other() 353 fwd &= (__vcpu_sys_reg(vcpu, HFGITR_EL2) & HFGITR_EL2_PSBCSYNC); in handle_other()
|
| A D | at.c | 63 return (__vcpu_sys_reg(vcpu, HCRX_EL2) & HCRX_EL2_TCR2En) && in s1pie_enabled() 64 (__vcpu_sys_reg(vcpu, TCR2_EL1) & TCR2_EL1_PIE); in s1pie_enabled() 87 if (__vcpu_sys_reg(vcpu, HCRX_EL2) & HCRX_EL2_TCR2En) { in compute_s1poe() 92 val = __vcpu_sys_reg(vcpu, TCR2_EL1); in compute_s1poe() 105 hcr = __vcpu_sys_reg(vcpu, HCR_EL2); in setup_s1_walk() 686 if (__vcpu_sys_reg(vcpu, HCR_EL2) & HCR_FWB) { in compute_par_s12() 740 if ((__vcpu_sys_reg(vcpu, HCR_EL2) & HCR_CD) && in compute_par_s12() 770 (__vcpu_sys_reg(vcpu, HCR_EL2) & HCR_DC)) { in compute_par_s1() 876 wxn = (__vcpu_sys_reg(vcpu, SCTLR_EL1) & SCTLR_ELx_WXN); in compute_s1_direct_permissions()
|
| A D | arch_timer.c | 73 return __vcpu_sys_reg(vcpu, CNTV_CTL_EL0); in timer_get_ctl() 75 return __vcpu_sys_reg(vcpu, CNTP_CTL_EL0); in timer_get_ctl() 77 return __vcpu_sys_reg(vcpu, CNTHV_CTL_EL2); in timer_get_ctl() 79 return __vcpu_sys_reg(vcpu, CNTHP_CTL_EL2); in timer_get_ctl() 92 return __vcpu_sys_reg(vcpu, CNTV_CVAL_EL0); in timer_get_cval() 94 return __vcpu_sys_reg(vcpu, CNTP_CVAL_EL0); in timer_get_cval() 96 return __vcpu_sys_reg(vcpu, CNTHV_CVAL_EL2); in timer_get_cval() 98 return __vcpu_sys_reg(vcpu, CNTHP_CVAL_EL2); in timer_get_cval() 834 u64 val = __vcpu_sys_reg(vcpu, CNTHCTL_EL2); in timer_set_traps()
|
| A D | sys_regs.h | 141 return __vcpu_sys_reg(vcpu, r->reg); in reset_unknown() 149 return __vcpu_sys_reg(vcpu, r->reg); in reset_val()
|
| A D | sys_regs.c | 211 return __vcpu_sys_reg(vcpu, reg); in vcpu_read_sys_reg() 593 p->regval = __vcpu_sys_reg(vcpu, r->reg); in trap_oslsr_el1() 801 return __vcpu_sys_reg(vcpu, r->reg); in reset_pmu_reg() 809 return __vcpu_sys_reg(vcpu, r->reg); in reset_pmevcntr() 821 return __vcpu_sys_reg(vcpu, r->reg); in reset_pmevtyper() 829 return __vcpu_sys_reg(vcpu, r->reg); in reset_pmselr() 845 return __vcpu_sys_reg(vcpu, r->reg); in reset_pmcr() 1074 p->regval = __vcpu_sys_reg(vcpu, reg); in access_pmu_evtyper() 2234 return __vcpu_sys_reg(vcpu, r->reg); in reset_clidr() 2479 return __vcpu_sys_reg(vcpu, r->reg); in reset_hcr() [all …]
|
| A D | trace_arm.h | 352 __entry->hcr_el2 = __vcpu_sys_reg(vcpu, HCR_EL2); 382 __entry->hcr_el2 = __vcpu_sys_reg(vcpu, HCR_EL2);
|
| A D | emulate-nested.c | 493 u64 val = __vcpu_sys_reg(vcpu, CNTHCTL_EL2); 521 val = __vcpu_sys_reg(vcpu, HCR_EL2); in is_nested_nv2_guest() 545 u64 val = __vcpu_sys_reg(vcpu, CPTR_EL2); in check_cptr_tta() 570 __vcpu_sys_reg(vcpu, PMSELR_EL0)); in check_mdcr_hpmn() 2379 val = __vcpu_sys_reg(vcpu, tb->index); in get_behaviour() 2446 val = __vcpu_sys_reg(vcpu, sr); in check_fgt_bit() 2539 tmp = __vcpu_sys_reg(vcpu, HCRX_EL2); in triage_sysreg_trap() 2608 (__vcpu_sys_reg(vcpu, reg) & control_bit)) { in __forward_traps() 2701 elr = __vcpu_sys_reg(vcpu, ELR_EL2); in kvm_emulate_nested_eret() 2823 !(__vcpu_sys_reg(vcpu, HCR_EL2) & HCR_IMO)) in kvm_inject_nested_irq() [all …]
|
| A D | vgic-sys-reg-v3.c | 310 *val = __vcpu_sys_reg(vcpu, r->reg); in get_gic_ich_reg()
|
| A D | nested.c | 745 if (__vcpu_sys_reg(vcpu, HCR_EL2) & HCR_NV) in kvm_vcpu_load_hw_mmu() 1172 return (u64)sign_extend64(__vcpu_sys_reg(vcpu, VNCR_EL2), 48); in read_vncr_el2()
|
| A D | arm.c | 534 val = __vcpu_sys_reg(vcpu, HCR_EL2); in vcpu_set_pauth_traps()
|
| /arch/arm64/kvm/hyp/include/hyp/ |
| A D | switch.h | 244 u64 hfg = __vcpu_sys_reg(vcpu, reg); \ 440 u64 val = __vcpu_sys_reg(vcpu, HCRX_EL2); in __activate_traps_common() 500 vsesr = __vcpu_sys_reg(vcpu, VSESR_EL2); in ___activate_traps() 570 sve_cond_update_zcr_vq(__vcpu_sys_reg(vcpu, ZCR_EL2), SYS_ZCR_EL2); in __hyp_sve_restore_guest() 572 write_sysreg_el1(__vcpu_sys_reg(vcpu, vcpu_sve_zcr_elx(vcpu)), SYS_ZCR); in __hyp_sve_restore_guest() 596 zcr_el2 = __vcpu_sys_reg(vcpu, ZCR_EL2); in fpsimd_lazy_switch_to_guest() 602 zcr_el1 = __vcpu_sys_reg(vcpu, vcpu_sve_zcr_elx(vcpu)); in fpsimd_lazy_switch_to_guest() 718 write_sysreg_s(__vcpu_sys_reg(vcpu, FPMR), SYS_FPMR); in kvm_hyp_handle_fpsimd() 722 write_sysreg(__vcpu_sys_reg(vcpu, FPEXC32_EL2), fpexc32_el2); in kvm_hyp_handle_fpsimd() 833 val = __vcpu_sys_reg(vcpu, CNTHCTL_EL2); in kvm_handle_cntxct() [all …]
|
| A D | sysreg-sr.h | 370 write_sysreg(__vcpu_sys_reg(vcpu, DACR32_EL2), dacr32_el2); in __sysreg32_restore_state() 371 write_sysreg(__vcpu_sys_reg(vcpu, IFSR32_EL2), ifsr32_el2); in __sysreg32_restore_state() 374 write_sysreg(__vcpu_sys_reg(vcpu, DBGVCR32_EL2), dbgvcr32_el2); in __sysreg32_restore_state()
|
| /arch/arm64/include/asm/ |
| A D | kvm_emulate.h | 213 (__vcpu_sys_reg(vcpu, HCR_EL2) & HCR_E2H)); in vcpu_el2_e2h_is_set() 234 hcr = __vcpu_sys_reg(vcpu, HCR_EL2); in is_hyp_ctxt() 266 (__vcpu_sys_reg(vcpu, HCRX_EL2) & HCRX_EL2_TMEA); in vserror_state_is_nested() 324 u64 hcr_el2 = __vcpu_sys_reg(vcpu, HCR_EL2); in guest_hyp_wfx_traps_enabled() 506 return __vcpu_sys_reg(vcpu, MPIDR_EL1) & MPIDR_HWID_BITMASK; in kvm_vcpu_get_mpidr_aff() 612 u64 cptr = __vcpu_sys_reg(vcpu, CPTR_EL2); in vcpu_sanitised_cptr_el2()
|
| A D | kvm_host.h | 1154 #define __vcpu_sys_reg(v,r) \ macro 1485 (!!(__vcpu_sys_reg(vcpu, OSLSR_EL1) & OSLSR_EL1_OSLK))
|
| /arch/arm64/kvm/hyp/ |
| A D | vgic-v3-sr.c | 1091 ich_hcr = __vcpu_sys_reg(vcpu, ICH_HCR_EL2); in __vgic_v3_check_trap_forwarding() 1096 (__vcpu_sys_reg(vcpu, HFGRTR_EL2) & HFGRTR_EL2_ICC_IGRPENn_EL1)) in __vgic_v3_check_trap_forwarding() 1100 (__vcpu_sys_reg(vcpu, HFGWTR_EL2) & HFGWTR_EL2_ICC_IGRPENn_EL1)) in __vgic_v3_check_trap_forwarding() 1117 (__vcpu_sys_reg(vcpu, HFGRTR_EL2) & HFGRTR_EL2_ICC_IGRPENn_EL1)) in __vgic_v3_check_trap_forwarding() 1121 (__vcpu_sys_reg(vcpu, HFGWTR_EL2) & HFGWTR_EL2_ICC_IGRPENn_EL1)) in __vgic_v3_check_trap_forwarding()
|
| A D | exception.c | 33 return __vcpu_sys_reg(vcpu, reg); in __vcpu_read_sys_reg()
|