Lines Matching refs:esr
31 static void kvm_handle_guest_serror(struct kvm_vcpu *vcpu, u64 esr) in kvm_handle_guest_serror() argument
33 if (!arm64_is_ras_serror(esr) || arm64_is_fatal_ras_serror(NULL, esr)) in kvm_handle_guest_serror()
128 u64 esr = kvm_vcpu_get_esr(vcpu); in kvm_handle_wfx() local
130 if (esr & ESR_ELx_WFx_ISS_WFE) { in kvm_handle_wfx()
138 if (esr & ESR_ELx_WFx_ISS_WFxT) { in kvm_handle_wfx()
139 if (esr & ESR_ELx_WFx_ISS_RV) { in kvm_handle_wfx()
149 esr &= ~ESR_ELx_WFx_ISS_WFxT; in kvm_handle_wfx()
153 if (esr & ESR_ELx_WFx_ISS_WFE) { in kvm_handle_wfx()
156 if (esr & ESR_ELx_WFx_ISS_WFxT) in kvm_handle_wfx()
181 u64 esr = kvm_vcpu_get_esr(vcpu); in kvm_handle_guest_debug() local
184 run->debug.arch.hsr = lower_32_bits(esr); in kvm_handle_guest_debug()
185 run->debug.arch.hsr_high = upper_32_bits(esr); in kvm_handle_guest_debug()
188 switch (ESR_ELx_EC(esr)) { in kvm_handle_guest_debug()
202 u64 esr = kvm_vcpu_get_esr(vcpu); in kvm_handle_unknown_ec() local
205 esr, esr_get_class_string(esr)); in kvm_handle_unknown_ec()
270 u64 esr = kvm_vcpu_get_esr(vcpu); in kvm_get_exit_handler() local
271 u8 esr_ec = ESR_ELx_EC(esr); in kvm_get_exit_handler()
371 void __noreturn __cold nvhe_hyp_panic_handler(u64 esr, u64 spsr, in nvhe_hyp_panic_handler() argument
382 } else if (ESR_ELx_EC(esr) == ESR_ELx_EC_BRK64 && in nvhe_hyp_panic_handler()
383 (esr & ESR_ELx_BRK64_ISS_COMMENT_MASK) == BUG_BRK_IMM) { in nvhe_hyp_panic_handler()
418 spsr, elr_virt, esr, far, hpfar, par, vcpu); in nvhe_hyp_panic_handler()