| /arch/x86/kvm/svm/ |
| A D | svm_ops.h | 12 _ASM_EXTABLE(1b, %l[fault]) \ 13 ::: clobber : fault); \ 15 fault: \ 22 _ASM_EXTABLE(1b, %l[fault]) \ 23 :: op1 : clobber : fault); \ 25 fault: \ 32 _ASM_EXTABLE(1b, %l[fault]) \ 33 :: op1, op2 : clobber : fault); \ 35 fault: \
|
| /arch/nios2/kernel/ |
| A D | misaligned.c | 72 unsigned int fault; in handle_unaligned_c() local 85 fault = 0; in handle_unaligned_c() 98 fault |= __get_user(d0, (u8 *)(addr+0)); in handle_unaligned_c() 99 fault |= __get_user(d1, (u8 *)(addr+1)); in handle_unaligned_c() 116 fault |= __get_user(d0, (u8 *)(addr+0)); in handle_unaligned_c() 117 fault |= __get_user(d1, (u8 *)(addr+1)); in handle_unaligned_c() 140 fault |= __get_user(d0, (u8 *)(addr+0)); in handle_unaligned_c() 141 fault |= __get_user(d1, (u8 *)(addr+1)); in handle_unaligned_c() 142 fault |= __get_user(d2, (u8 *)(addr+2)); in handle_unaligned_c() 143 fault |= __get_user(d3, (u8 *)(addr+3)); in handle_unaligned_c() [all …]
|
| /arch/x86/kvm/mmu/ |
| A D | mmu_internal.h | 343 struct kvm_page_fault *fault) in kvm_mmu_prepare_memory_fault_exit() argument 346 PAGE_SIZE, fault->write, fault->exec, in kvm_mmu_prepare_memory_fault_exit() 347 fault->is_private); in kvm_mmu_prepare_memory_fault_exit() 354 struct kvm_page_fault fault = { in kvm_mmu_do_page_fault() local 382 fault.gfn = gpa_to_gfn(fault.addr) & ~kvm_gfn_direct_bits(vcpu->kvm); in kvm_mmu_do_page_fault() 383 fault.slot = kvm_vcpu_gfn_to_memslot(vcpu, fault.gfn); in kvm_mmu_do_page_fault() 391 r = kvm_tdp_page_fault(vcpu, &fault); in kvm_mmu_do_page_fault() 393 r = vcpu->arch.mmu->page_fault(vcpu, &fault); in kvm_mmu_do_page_fault() 400 if (r == RET_PF_EMULATE && fault.is_private) { in kvm_mmu_do_page_fault() 402 kvm_mmu_prepare_memory_fault_exit(vcpu, &fault); in kvm_mmu_do_page_fault() [all …]
|
| A D | paging_tmpl.h | 92 struct x86_exception fault; member 516 walker->fault.address = addr; in FNAME() 621 gfn_t base_gfn = fault->gfn; in FNAME() 700 if (fault->write && table_gfn == fault->gfn) in FNAME() 743 base_gfn, fault->pfn, fault); in FNAME() 770 WARN_ON_ONCE(fault->is_tdp); in FNAME() 784 if (!fault->prefetch) in FNAME() 790 fault->gfn = walker.gfn; in FNAME() 792 fault->slot = kvm_vcpu_gfn_to_memslot(vcpu, fault->gfn); in FNAME() 815 !is_cr0_wp(vcpu->arch.mmu) && !fault->user && fault->slot) { in FNAME() [all …]
|
| /arch/mips/kernel/ |
| A D | unaligned.c | 175 goto fault; in emulate_load_store_insn() 184 goto fault; in emulate_load_store_insn() 299 goto fault; in emulate_load_store_insn() 314 goto fault; in emulate_load_store_insn() 329 goto fault; in emulate_load_store_insn() 348 goto fault; in emulate_load_store_insn() 371 goto fault; in emulate_load_store_insn() 584 fault: in emulate_load_store_insn() 656 goto fault; in emulate_load_store_microMIPS() 1211 fault: in emulate_load_store_microMIPS() [all …]
|
| /arch/arc/kernel/ |
| A D | unaligned.c | 51 goto fault; \ 66 goto fault; \ 93 goto fault; \ 126 goto fault; \ 160 fault: state->fault = 1; in fixup_load() 180 goto fault; in fixup_store() 192 fault: state->fault = 1; in fixup_store() 226 goto fault; in misaligned_fixup() 230 goto fault; in misaligned_fixup() 238 goto fault; in misaligned_fixup() [all …]
|
| /arch/mips/loongson64/ |
| A D | cop2-ex.c | 76 goto fault; in loongson_cu2_call() 80 goto fault; in loongson_cu2_call() 92 goto fault; in loongson_cu2_call() 96 goto fault; in loongson_cu2_call() 118 goto fault; in loongson_cu2_call() 123 goto fault; in loongson_cu2_call() 135 goto fault; in loongson_cu2_call() 141 goto fault; in loongson_cu2_call() 165 goto fault; in loongson_cu2_call() 176 goto fault; in loongson_cu2_call() [all …]
|
| /arch/powerpc/mm/ |
| A D | fault.c | 141 vm_fault_t fault) in do_sigbus() argument 154 if (fault & VM_FAULT_HWPOISON_LARGE) in do_sigbus() 156 if (fault & VM_FAULT_HWPOISON) in do_sigbus() 169 vm_fault_t fault) in mm_fault_error() argument 179 if (fault & VM_FAULT_OOM) { in mm_fault_error() 191 else if (fault & VM_FAULT_SIGSEGV) in mm_fault_error() 423 vm_fault_t fault, major = 0; in ___do_page_fault() local 509 if (!(fault & VM_FAULT_RETRY)) { in ___do_page_fault() 514 if (fault & VM_FAULT_MAJOR) in ___do_page_fault() 550 major |= fault & VM_FAULT_MAJOR; in ___do_page_fault() [all …]
|
| /arch/m68k/mm/ |
| A D | fault.c | 75 vm_fault_t fault; in do_page_fault() local 141 fault = handle_mm_fault(vma, address, flags, regs); in do_page_fault() 142 pr_debug("handle_mm_fault returns %x\n", fault); in do_page_fault() 144 if (fault_signal_pending(fault, regs)) { in do_page_fault() 151 if (fault & VM_FAULT_COMPLETED) in do_page_fault() 154 if (unlikely(fault & VM_FAULT_ERROR)) { in do_page_fault() 155 if (fault & VM_FAULT_OOM) in do_page_fault() 157 else if (fault & VM_FAULT_SIGSEGV) in do_page_fault() 159 else if (fault & VM_FAULT_SIGBUS) in do_page_fault() 164 if (fault & VM_FAULT_RETRY) { in do_page_fault()
|
| /arch/powerpc/lib/ |
| A D | checksum_32.S | 240 fault: label 244 EX_TABLE(70b, fault); 245 EX_TABLE(71b, fault); 246 EX_TABLE(72b, fault); 247 EX_TABLE(73b, fault); 248 EX_TABLE(54b, fault); 269 EX_TABLE(30b, fault); 270 EX_TABLE(31b, fault); 271 EX_TABLE(40b, fault); 272 EX_TABLE(41b, fault); [all …]
|
| /arch/hexagon/mm/ |
| A D | vm_fault.c | 43 vm_fault_t fault; in do_page_fault() local 84 fault = handle_mm_fault(vma, address, flags, regs); in do_page_fault() 86 if (fault_signal_pending(fault, regs)) { in do_page_fault() 93 if (fault & VM_FAULT_COMPLETED) in do_page_fault() 97 if (likely(!(fault & VM_FAULT_ERROR))) { in do_page_fault() 98 if (fault & VM_FAULT_RETRY) { in do_page_fault() 113 if (fault & VM_FAULT_OOM) { in do_page_fault() 121 if (fault & VM_FAULT_SIGBUS) { in do_page_fault()
|
| /arch/alpha/mm/ |
| A D | fault.c | 92 vm_fault_t fault; in do_page_fault() local 145 fault = handle_mm_fault(vma, address, flags, regs); in do_page_fault() 147 if (fault_signal_pending(fault, regs)) { in do_page_fault() 154 if (fault & VM_FAULT_COMPLETED) in do_page_fault() 157 if (unlikely(fault & VM_FAULT_ERROR)) { in do_page_fault() 158 if (fault & VM_FAULT_OOM) in do_page_fault() 160 else if (fault & VM_FAULT_SIGSEGV) in do_page_fault() 162 else if (fault & VM_FAULT_SIGBUS) in do_page_fault() 167 if (fault & VM_FAULT_RETRY) { in do_page_fault()
|
| /arch/x86/hyperv/ |
| A D | nested.c | 29 goto fault; in hyperv_flush_guest_mapping() 37 goto fault; in hyperv_flush_guest_mapping() 50 fault: in hyperv_flush_guest_mapping() 97 goto fault; in hyperv_flush_guest_mapping_range() 105 goto fault; in hyperv_flush_guest_mapping_range() 114 goto fault; in hyperv_flush_guest_mapping_range() 126 fault: in hyperv_flush_guest_mapping_range()
|
| /arch/riscv/mm/ |
| A D | fault.c | 124 if (fault & VM_FAULT_OOM) { in mm_fault_error() 135 } else if (fault & VM_FAULT_SIGSEGV) { in mm_fault_error() 286 vm_fault_t fault; in handle_page_fault() local 366 if (!(fault & VM_FAULT_RETRY)) { in handle_page_fault() 371 if (fault & VM_FAULT_MAJOR) in handle_page_fault() 374 if (fault_signal_pending(fault, regs)) { in handle_page_fault() 413 if (fault_signal_pending(fault, regs)) { in handle_page_fault() 420 if (fault & VM_FAULT_COMPLETED) in handle_page_fault() 423 if (unlikely(fault & VM_FAULT_RETRY)) { in handle_page_fault() 437 if (unlikely(fault & VM_FAULT_ERROR)) { in handle_page_fault() [all …]
|
| /arch/microblaze/mm/ |
| A D | fault.c | 93 vm_fault_t fault; in do_page_fault() local 221 fault = handle_mm_fault(vma, address, flags, regs); in do_page_fault() 223 if (fault_signal_pending(fault, regs)) { in do_page_fault() 230 if (fault & VM_FAULT_COMPLETED) in do_page_fault() 233 if (unlikely(fault & VM_FAULT_ERROR)) { in do_page_fault() 234 if (fault & VM_FAULT_OOM) in do_page_fault() 236 else if (fault & VM_FAULT_SIGSEGV) in do_page_fault() 238 else if (fault & VM_FAULT_SIGBUS) in do_page_fault() 243 if (fault & VM_FAULT_RETRY) { in do_page_fault()
|
| /arch/nios2/mm/ |
| A D | fault.c | 50 vm_fault_t fault; in do_page_fault() local 124 fault = handle_mm_fault(vma, address, flags, regs); in do_page_fault() 126 if (fault_signal_pending(fault, regs)) { in do_page_fault() 133 if (fault & VM_FAULT_COMPLETED) in do_page_fault() 136 if (unlikely(fault & VM_FAULT_ERROR)) { in do_page_fault() 137 if (fault & VM_FAULT_OOM) in do_page_fault() 139 else if (fault & VM_FAULT_SIGSEGV) in do_page_fault() 141 else if (fault & VM_FAULT_SIGBUS) in do_page_fault() 146 if (fault & VM_FAULT_RETRY) { in do_page_fault()
|
| /arch/arc/mm/ |
| A D | fault.c | 80 vm_fault_t fault = VM_FAULT_SIGSEGV; /* handle_mm_fault() output */ in do_page_fault() local 135 fault = handle_mm_fault(vma, address, flags, regs); in do_page_fault() 138 if (fault_signal_pending(fault, regs)) { in do_page_fault() 145 if (fault & VM_FAULT_COMPLETED) in do_page_fault() 151 if (unlikely(fault & VM_FAULT_RETRY)) { in do_page_fault() 164 if (likely(!(fault & VM_FAULT_ERROR))) in do_page_fault() 171 if (fault & VM_FAULT_OOM) { in do_page_fault() 176 if (fault & VM_FAULT_SIGBUS) { in do_page_fault()
|
| /arch/openrisc/mm/ |
| A D | fault.c | 53 vm_fault_t fault; in do_page_fault() local 166 fault = handle_mm_fault(vma, address, flags, regs); in do_page_fault() 168 if (fault_signal_pending(fault, regs)) { in do_page_fault() 175 if (fault & VM_FAULT_COMPLETED) in do_page_fault() 178 if (unlikely(fault & VM_FAULT_ERROR)) { in do_page_fault() 179 if (fault & VM_FAULT_OOM) in do_page_fault() 181 else if (fault & VM_FAULT_SIGSEGV) in do_page_fault() 183 else if (fault & VM_FAULT_SIGBUS) in do_page_fault() 189 if (fault & VM_FAULT_RETRY) { in do_page_fault()
|
| /arch/csky/mm/ |
| A D | fault.c | 73 static inline void mm_fault_error(struct pt_regs *regs, unsigned long addr, vm_fault_t fault) in mm_fault_error() argument 77 if (fault & VM_FAULT_OOM) { in mm_fault_error() 88 } else if (fault & VM_FAULT_SIGBUS) { in mm_fault_error() 195 vm_fault_t fault; in do_page_fault() local 263 fault = handle_mm_fault(vma, addr, flags, regs); in do_page_fault() 270 if (fault_signal_pending(fault, regs)) { in do_page_fault() 277 if (fault & VM_FAULT_COMPLETED) in do_page_fault() 280 if (unlikely((fault & VM_FAULT_RETRY) && (flags & FAULT_FLAG_ALLOW_RETRY))) { in do_page_fault() 293 if (unlikely(fault & VM_FAULT_ERROR)) { in do_page_fault() 294 mm_fault_error(regs, addr, fault); in do_page_fault()
|
| /arch/xtensa/mm/ |
| A D | fault.c | 97 vm_fault_t fault; in do_page_fault() local 159 fault = handle_mm_fault(vma, address, flags, regs); in do_page_fault() 161 if (fault_signal_pending(fault, regs)) { in do_page_fault() 168 if (fault & VM_FAULT_COMPLETED) in do_page_fault() 171 if (unlikely(fault & VM_FAULT_ERROR)) { in do_page_fault() 172 if (fault & VM_FAULT_OOM) in do_page_fault() 174 else if (fault & VM_FAULT_SIGSEGV) in do_page_fault() 176 else if (fault & VM_FAULT_SIGBUS) in do_page_fault() 181 if (fault & VM_FAULT_RETRY) { in do_page_fault()
|
| /arch/parisc/mm/ |
| A D | fault.c | 274 vm_fault_t fault = 0; in do_page_fault() local 318 fault = handle_mm_fault(vma, address, flags, regs); in do_page_fault() 320 if (fault_signal_pending(fault, regs)) { in do_page_fault() 329 if (fault & VM_FAULT_COMPLETED) in do_page_fault() 332 if (unlikely(fault & VM_FAULT_ERROR)) { in do_page_fault() 338 if (fault & VM_FAULT_OOM) in do_page_fault() 340 else if (fault & VM_FAULT_SIGSEGV) in do_page_fault() 342 else if (fault & (VM_FAULT_SIGBUS|VM_FAULT_HWPOISON| in do_page_fault() 347 if (fault & VM_FAULT_RETRY) { in do_page_fault() 415 if (fault & VM_FAULT_HWPOISON_LARGE) in do_page_fault() [all …]
|
| /arch/arm64/kvm/hyp/include/hyp/ |
| A D | fault.h | 82 static inline bool __get_fault_info(u64 esr, struct kvm_vcpu_fault_info *fault) in __get_fault_info() argument 86 fault->far_el2 = read_sysreg_el2(SYS_FAR); in __get_fault_info() 87 fault->hpfar_el2 = 0; in __get_fault_info() 93 else if (!__translate_far_to_hpfar(fault->far_el2, &hpfar)) in __get_fault_info() 100 fault->hpfar_el2 = hpfar | HPFAR_EL2_NS; in __get_fault_info()
|
| /arch/s390/mm/ |
| A D | fault.c | 264 vm_fault_t fault; in do_exception() local 300 if (!(fault & VM_FAULT_RETRY)) { in do_exception() 305 if (fault & VM_FAULT_MAJOR) in do_exception() 308 if (fault_signal_pending(fault, regs)) { in do_exception() 320 fault = handle_mm_fault(vma, address, flags, regs); in do_exception() 321 if (fault_signal_pending(fault, regs)) { in do_exception() 327 if (fault & VM_FAULT_COMPLETED) in do_exception() 329 if (fault & VM_FAULT_RETRY) { in do_exception() 335 if (!(fault & VM_FAULT_ERROR)) in do_exception() 337 if (fault & VM_FAULT_OOM) { in do_exception() [all …]
|
| /arch/sh/mm/ |
| A D | fault.c | 314 unsigned long address, vm_fault_t fault) in mm_fault_error() argument 320 if (fault_signal_pending(fault, regs)) { in mm_fault_error() 327 if (!(fault & VM_FAULT_RETRY)) in mm_fault_error() 330 if (!(fault & VM_FAULT_ERROR)) in mm_fault_error() 333 if (fault & VM_FAULT_OOM) { in mm_fault_error() 347 if (fault & VM_FAULT_SIGBUS) in mm_fault_error() 349 else if (fault & VM_FAULT_SIGSEGV) in mm_fault_error() 397 vm_fault_t fault; in do_page_fault() local 469 fault = handle_mm_fault(vma, address, flags, regs); in do_page_fault() 476 if (fault & VM_FAULT_COMPLETED) in do_page_fault() [all …]
|
| /arch/arm/mm/ |
| A D | fault.c | 269 vm_fault_t fault; in do_page_fault() local 324 fault = 0; in do_page_fault() 332 if (!(fault & VM_FAULT_RETRY)) { in do_page_fault() 337 if (fault & VM_FAULT_MAJOR) in do_page_fault() 351 fault = 0; in do_page_fault() 362 fault = 0; in do_page_fault() 380 if (fault & VM_FAULT_COMPLETED) in do_page_fault() 383 if (!(fault & VM_FAULT_ERROR)) { in do_page_fault() 384 if (fault & VM_FAULT_RETRY) { in do_page_fault() 406 if (fault & VM_FAULT_OOM) { in do_page_fault() [all …]
|