/linux-6.3-rc2/arch/arm64/mm/ |
A D | fault.c | 71 if (esr & ESR_ELx_ISV) { in data_abort_decode() 95 ESR_ELx_EC(esr), esr_get_class_string(esr), in mem_abort_decode() 107 data_abort_decode(esr); in mem_abort_decode() 248 if (!is_el1_data_abort(esr) && !is_el1_instruction_abort(esr)) in is_el1_permission_fault() 303 mem_abort_decode(esr); in die_kernel_fault() 306 die("Oops", regs, esr); in die_kernel_fault() 456 esr = 0; in set_thread_esr() 526 return (esr & ESR_ELx_WNR) && !(esr & ESR_ELx_CM); in is_write_abort() 579 addr, esr, regs); in do_page_fault() 583 addr, esr, regs); in do_page_fault() [all …]
|
/linux-6.3-rc2/arch/arm64/kernel/ |
A D | entry-common.c | 434 el1_pc(regs, esr); in el1h_64_sync_handler() 441 el1_bti(regs, esr); in el1h_64_sync_handler() 447 el1_dbg(regs, esr); in el1h_64_sync_handler() 658 el0_da(regs, esr); in el0t_64_sync_handler() 661 el0_ia(regs, esr); in el0t_64_sync_handler() 677 el0_sys(regs, esr); in el0t_64_sync_handler() 680 el0_sp(regs, esr); in el0t_64_sync_handler() 683 el0_pc(regs, esr); in el0t_64_sync_handler() 786 el0_da(regs, esr); in el0t_32_sync_handler() 789 el0_ia(regs, esr); in el0t_32_sync_handler() [all …]
|
A D | traps.c | 257 if (esr) in arm64_show_signal() 258 pr_cont("%s, ESR 0x%016lx, ", esr_get_class_string(esr), esr); in arm64_show_signal() 758 do_el0_undef(regs, esr); in do_el0_cp15() 773 do_el0_undef(regs, esr); in do_el0_cp15() 792 do_el0_undef(regs, esr); in do_el0_sys() 875 pr_emerg("ESR: 0x%016lx -- %s\n", esr, esr_get_class_string(esr)); in panic_bad_stack() 901 smp_processor_id(), esr, esr_get_class_string(esr)); in arm64_serror_panic() 946 if (!arm64_is_ras_serror(esr) || arm64_is_fatal_ras_serror(regs, esr)) in do_serror() 1041 #define KASAN_ESR_SIZE(esr) (1 << ((esr) & KASAN_ESR_SIZE_MASK)) argument 1085 die(report_ubsan_failure(regs, esr & UBSAN_BRK_MASK), regs, esr); in ubsan_handler() [all …]
|
A D | debug-monitors.c | 205 static int call_step_hook(struct pt_regs *regs, unsigned long esr) in call_step_hook() argument 218 retval = hook->fn(regs, esr); in call_step_hook() 241 static int single_step_handler(unsigned long unused, unsigned long esr, in single_step_handler() argument 253 if (!handler_found && call_step_hook(regs, esr) == DBG_HOOK_HANDLED) in single_step_handler() 302 static int call_break_hook(struct pt_regs *regs, unsigned long esr) in call_break_hook() argument 306 int (*fn)(struct pt_regs *regs, unsigned long esr) = NULL; in call_break_hook() 315 unsigned long comment = esr & ESR_ELx_BRK64_ISS_COMMENT_MASK; in call_break_hook() 321 return fn ? fn(regs, esr) : DBG_HOOK_ERROR; in call_break_hook() 325 static int brk_handler(unsigned long unused, unsigned long esr, in brk_handler() argument 328 if (call_break_hook(regs, esr) == DBG_HOOK_HANDLED) in brk_handler()
|
A D | fpsimd.c | 1440 void do_sve_acc(unsigned long esr, struct pt_regs *regs) in do_sve_acc() argument 1482 void do_sme_acc(unsigned long esr, struct pt_regs *regs) in do_sme_acc() argument 1494 if (ESR_ELx_ISS(esr) != ESR_ELx_SME_ISS_SME_DISABLED) { in do_sme_acc() 1526 void do_fpsimd_acc(unsigned long esr, struct pt_regs *regs) in do_fpsimd_acc() argument 1535 void do_fpsimd_exc(unsigned long esr, struct pt_regs *regs) in do_fpsimd_exc() argument 1539 if (esr & ESR_ELx_FP_EXC_TFV) { in do_fpsimd_exc() 1540 if (esr & FPEXC_IOF) in do_fpsimd_exc() 1542 else if (esr & FPEXC_DZF) in do_fpsimd_exc() 1544 else if (esr & FPEXC_OFF) in do_fpsimd_exc() 1546 else if (esr & FPEXC_UFF) in do_fpsimd_exc() [all …]
|
A D | kgdb.c | 235 static int kgdb_brk_fn(struct pt_regs *regs, unsigned long esr) in kgdb_brk_fn() argument 242 static int kgdb_compiled_brk_fn(struct pt_regs *regs, unsigned long esr) in NOKPROBE_SYMBOL() 251 static int kgdb_step_brk_fn(struct pt_regs *regs, unsigned long esr) in kgdb_step_brk_fn() argument
|
/linux-6.3-rc2/arch/arm64/include/asm/ |
A D | exception.h | 24 unsigned long esr = ESR_ELx_EC_SERROR << ESR_ELx_EC_SHIFT; in disr_to_esr() local 27 esr |= (disr & DISR_EL1_ESR_MASK); in disr_to_esr() 29 esr |= (disr & ESR_ELx_ISS_MASK); in disr_to_esr() 31 return esr; in disr_to_esr() 61 void do_el0_undef(struct pt_regs *regs, unsigned long esr); 64 void do_el1_bti(struct pt_regs *regs, unsigned long esr); 68 void do_sve_acc(unsigned long esr, struct pt_regs *regs); 69 void do_sme_acc(unsigned long esr, struct pt_regs *regs); 71 void do_el0_sys(unsigned long esr, struct pt_regs *regs); 74 void do_el0_cp15(unsigned long esr, struct pt_regs *regs); [all …]
|
A D | traps.h | 59 static inline bool arm64_is_ras_serror(unsigned long esr) in arm64_is_ras_serror() argument 63 if (esr & ESR_ELx_IDS) in arm64_is_ras_serror() 79 static inline unsigned long arm64_ras_serror_get_severity(unsigned long esr) in arm64_ras_serror_get_severity() argument 81 unsigned long aet = esr & ESR_ELx_AET; in arm64_ras_serror_get_severity() 83 if (!arm64_is_ras_serror(esr)) { in arm64_ras_serror_get_severity() 92 if ((esr & ESR_ELx_FSC) != ESR_ELx_FSC_SERROR) { in arm64_ras_serror_get_severity() 100 bool arm64_is_fatal_ras_serror(struct pt_regs *regs, unsigned long esr); 101 void __noreturn arm64_serror_panic(struct pt_regs *regs, unsigned long esr);
|
A D | esr.h | 74 #define ESR_ELx_EC(esr) (((esr) & ESR_ELx_EC_MASK) >> ESR_ELx_EC_SHIFT) argument 79 #define ESR_ELx_ISS(esr) ((esr) & ESR_ELx_ISS_MASK) argument 204 #define ESR_ELx_SYS64_ISS_RT(esr) \ argument 205 (((esr) & ESR_ELx_SYS64_ISS_RT_MASK) >> ESR_ELx_SYS64_ISS_RT_SHIFT) 362 static inline bool esr_is_data_abort(unsigned long esr) in esr_is_data_abort() argument 364 const unsigned long ec = ESR_ELx_EC(esr); in esr_is_data_abort() 369 const char *esr_get_class_string(unsigned long esr);
|
A D | debug-monitors.h | 67 int (*fn)(struct pt_regs *regs, unsigned long esr); 78 int (*fn)(struct pt_regs *regs, unsigned long esr);
|
A D | kvm_emulate.h | 301 u64 esr = kvm_vcpu_get_esr(vcpu); in kvm_vcpu_get_condition() local 303 if (esr & ESR_ELx_CV) in kvm_vcpu_get_condition() 304 return (esr & ESR_ELx_COND_MASK) >> ESR_ELx_COND_SHIFT; in kvm_vcpu_get_condition() 432 u64 esr = kvm_vcpu_get_esr(vcpu); in kvm_vcpu_sys_get_rt() local 433 return ESR_ELx_SYS64_ISS_RT(esr); in kvm_vcpu_sys_get_rt()
|
A D | kvm_ras.h | 17 static inline int kvm_handle_guest_sea(phys_addr_t addr, u64 esr) in kvm_handle_guest_sea() argument
|
/linux-6.3-rc2/arch/arm64/kvm/ |
A D | inject_fault.c | 61 u64 esr = 0; in inject_abt64() local 70 esr |= ESR_ELx_IL; in inject_abt64() 84 esr |= ESR_ELx_FSC_EXTABT; in inject_abt64() 88 vcpu_write_sys_reg(vcpu, esr, ESR_EL1); in inject_abt64() 91 vcpu_write_sys_reg(vcpu, esr, ESR_EL2); in inject_abt64() 106 esr |= ESR_ELx_IL; in inject_undef64() 109 vcpu_write_sys_reg(vcpu, esr, ESR_EL1); in inject_undef64() 111 vcpu_write_sys_reg(vcpu, esr, ESR_EL2); in inject_undef64() 192 unsigned long addr, esr; in kvm_inject_size_fault() local 213 esr = vcpu_read_sys_reg(vcpu, ESR_EL1); in kvm_inject_size_fault() [all …]
|
A D | handle_exit.c | 33 if (!arm64_is_ras_serror(esr) || arm64_is_fatal_ras_serror(NULL, esr)) in kvm_handle_guest_serror() 128 u64 esr = kvm_vcpu_get_esr(vcpu); in kvm_handle_wfx() local 130 if (esr & ESR_ELx_WFx_ISS_WFE) { in kvm_handle_wfx() 138 if (esr & ESR_ELx_WFx_ISS_WFxT) { in kvm_handle_wfx() 139 if (esr & ESR_ELx_WFx_ISS_RV) { in kvm_handle_wfx() 149 esr &= ~ESR_ELx_WFx_ISS_WFxT; in kvm_handle_wfx() 153 if (esr & ESR_ELx_WFx_ISS_WFE) { in kvm_handle_wfx() 156 if (esr & ESR_ELx_WFx_ISS_WFxT) in kvm_handle_wfx() 188 switch (ESR_ELx_EC(esr)) { in kvm_handle_guest_debug() 205 esr, esr_get_class_string(esr)); in kvm_handle_unknown_ec() [all …]
|
A D | sys_regs.h | 32 .Op1 = ((esr) >> 14) & 0x7, \ 33 .CRn = ((esr) >> 10) & 0xf, \ 34 .CRm = ((esr) >> 1) & 0xf, \ 35 .Op2 = ((esr) >> 17) & 0x7, \ 36 .is_write = !((esr) & 1) }) 38 #define esr_cp1x_32_to_params(esr) \ argument 39 ((struct sys_reg_params){ .Op1 = ((esr) >> 14) & 0x7, \ 40 .CRn = ((esr) >> 10) & 0xf, \ 41 .CRm = ((esr) >> 1) & 0xf, \ 42 .Op2 = ((esr) >> 17) & 0x7, \ [all …]
|
/linux-6.3-rc2/arch/alpha/kernel/ |
A D | core_lca.c | 341 ((esr & ESR_CEE) ? "Correctable" : in mem_error() 342 (esr & ESR_UEE) ? "Uncorrectable" : "A"), in mem_error() 343 (esr & ESR_WRE) ? "write" : "read", in mem_error() 346 if (esr & ESR_CTE) { in mem_error() 349 if (esr & ESR_MSE) { in mem_error() 352 if (esr & ESR_MHE) { in mem_error() 355 if (esr & ESR_NXM) { in mem_error() 435 if (el.s->esr & ESR_EAV) { in lca_machine_check() 436 mem_error(el.s->esr, el.s->ear); in lca_machine_check() 450 if (el.l->esr & ESR_EAV) { in lca_machine_check() [all …]
|
/linux-6.3-rc2/arch/sh/boards/mach-dreamcast/ |
A D | irq.c | 90 __u32 esr = ESR_BASE + (LEVEL(irq) << 2); in mask_ack_systemasic_irq() local 92 outl((1 << EVENT_BIT(irq)), esr); in mask_ack_systemasic_irq() 107 __u32 emr, esr, status, level; in systemasic_irq_demux() local 124 esr = ESR_BASE + (level << 2); in systemasic_irq_demux() 127 status = inl(esr); in systemasic_irq_demux()
|
/linux-6.3-rc2/arch/arm64/kvm/hyp/include/hyp/ |
A D | fault.h | 44 static inline bool __get_fault_info(u64 esr, struct kvm_vcpu_fault_info *fault) in __get_fault_info() argument 61 if (!(esr & ESR_ELx_S1PTW) && in __get_fault_info() 63 (esr & ESR_ELx_FSC_TYPE) == ESR_ELx_FSC_PERM)) { in __get_fault_info()
|
/linux-6.3-rc2/drivers/net/ethernet/ibm/emac/ |
A D | mal.c | 220 u32 esr = get_mal_dcrn(mal, MAL_ESR); in mal_serr() local 223 set_mal_dcrn(mal, MAL_ESR, esr); in mal_serr() 225 MAL_DBG(mal, "SERR %08x" NL, esr); in mal_serr() 227 if (esr & MAL_ESR_EVB) { in mal_serr() 228 if (esr & MAL_ESR_DE) { in mal_serr() 235 if (esr & MAL_ESR_PEIN) { in mal_serr() 243 mal->index, esr); in mal_serr() 253 mal->index, esr); in mal_serr() 355 if (esr & MAL_ESR_EVB) { in mal_int() 357 if (esr & MAL_ESR_DE) { in mal_int() [all …]
|
/linux-6.3-rc2/sound/soc/fsl/ |
A D | fsl_esai.c | 102 u32 esr; in esai_isr() local 121 if (esr & ESAI_ESR_RFF_MASK) in esai_isr() 124 if (esr & ESAI_ESR_TFE_MASK) in esai_isr() 127 if (esr & ESAI_ESR_TLS_MASK) in esai_isr() 130 if (esr & ESAI_ESR_TDE_MASK) in esai_isr() 133 if (esr & ESAI_ESR_TED_MASK) in esai_isr() 136 if (esr & ESAI_ESR_TD_MASK) in esai_isr() 139 if (esr & ESAI_ESR_RLS_MASK) in esai_isr() 142 if (esr & ESAI_ESR_RDE_MASK) in esai_isr() 145 if (esr & ESAI_ESR_RED_MASK) in esai_isr() [all …]
|
/linux-6.3-rc2/arch/powerpc/platforms/44x/ |
A D | machine_check.c | 14 unsigned long reason = regs->esr; in machine_check_440A() 51 unsigned long reason = regs->esr; in machine_check_47x()
|
/linux-6.3-rc2/arch/arm64/kvm/hyp/nvhe/ |
A D | sys_regs.c | 37 u64 esr = (ESR_ELx_EC_UNKNOWN << ESR_ELx_EC_SHIFT); in inject_undef64() local 46 write_sysreg_el1(esr, SYS_ESR); in inject_undef64() 485 unsigned long esr = kvm_vcpu_get_esr(vcpu); in kvm_handle_pvm_sysreg() local 488 params = esr_sys64_to_params(esr); in kvm_handle_pvm_sysreg()
|
/linux-6.3-rc2/arch/mips/include/asm/octeon/ |
A D | cvmx-sli-defs.h | 107 __BITFIELD_FIELD(uint64_t esr:2, 119 __BITFIELD_FIELD(uint64_t esr:2,
|
/linux-6.3-rc2/arch/arm64/kernel/probes/ |
A D | uprobes.c | 169 unsigned long esr) in uprobe_breakpoint_handler() argument 178 unsigned long esr) in uprobe_single_step_handler() argument
|
/linux-6.3-rc2/arch/arm64/kvm/hyp/ |
A D | vgic-v3-sr.c | 476 u64 esr = kvm_vcpu_get_esr(vcpu); in __vgic_v3_get_group() local 477 u8 crm = (esr & ESR_ELx_SYS64_ISS_CRM_MASK) >> ESR_ELx_SYS64_ISS_CRM_SHIFT; in __vgic_v3_get_group() 1019 u64 esr; in __vgic_v3_perform_cpuif_access() local 1025 esr = kvm_vcpu_get_esr(vcpu); in __vgic_v3_perform_cpuif_access() 1032 sysreg = esr_cp15_to_sysreg(esr); in __vgic_v3_perform_cpuif_access() 1034 sysreg = esr_sys64_to_sysreg(esr); in __vgic_v3_perform_cpuif_access() 1037 is_read = (esr & ESR_ELx_SYS64_ISS_DIR_MASK) == ESR_ELx_SYS64_ISS_DIR_READ; in __vgic_v3_perform_cpuif_access()
|