Home
last modified time | relevance | path

Searched refs:vmf (Results 1 – 25 of 43) sorted by relevance

12

/arch/x86/entry/vdso/
A Dvma.c53 struct vm_area_struct *vma, struct vm_fault *vmf) in vdso_fault() argument
57 if (!image || (vmf->pgoff << PAGE_SHIFT) >= image->size) in vdso_fault()
60 vmf->page = virt_to_page(image->data + (vmf->pgoff << PAGE_SHIFT)); in vdso_fault()
61 get_page(vmf->page); in vdso_fault()
92 struct vm_area_struct *vma, struct vm_fault *vmf) in vvar_vclock_fault() argument
94 switch (vmf->pgoff) { in vvar_vclock_fault()
102 return vmf_insert_pfn_prot(vma, vmf->address, in vvar_vclock_fault()
113 return vmf_insert_pfn(vma, vmf->address, pfn); in vvar_vclock_fault()
/arch/x86/kernel/cpu/sgx/
A Dvirt.c74 static vm_fault_t sgx_vepc_fault(struct vm_fault *vmf) in sgx_vepc_fault() argument
76 struct vm_area_struct *vma = vmf->vma; in sgx_vepc_fault()
81 ret = __sgx_vepc_fault(vepc, vma, vmf->address); in sgx_vepc_fault()
87 if (ret == -EBUSY && (vmf->flags & FAULT_FLAG_ALLOW_RETRY)) { in sgx_vepc_fault()
A Dencl.c430 static vm_fault_t sgx_vma_fault(struct vm_fault *vmf) in sgx_vma_fault() argument
432 unsigned long addr = (unsigned long)vmf->address; in sgx_vma_fault()
433 struct vm_area_struct *vma = vmf->vma; in sgx_vma_fault()
/arch/powerpc/platforms/cell/spufs/
A Dfile.c230 spufs_mem_mmap_fault(struct vm_fault *vmf) in spufs_mem_mmap_fault() argument
232 struct vm_area_struct *vma = vmf->vma; in spufs_mem_mmap_fault()
237 offset = vmf->pgoff << PAGE_SHIFT; in spufs_mem_mmap_fault()
242 vmf->address, offset); in spufs_mem_mmap_fault()
254 ret = vmf_insert_pfn(vma, vmf->address, pfn); in spufs_mem_mmap_fault()
353 ret = vmf_insert_pfn(vmf->vma, vmf->address, in spufs_ps_fault()
1016 spufs_signal1_mmap_fault(struct vm_fault *vmf) in spufs_signal1_mmap_fault() argument
1150 spufs_signal2_mmap_fault(struct vm_fault *vmf) in spufs_signal2_mmap_fault() argument
1277 spufs_mss_mmap_fault(struct vm_fault *vmf) in spufs_mss_mmap_fault() argument
1338 spufs_psmap_mmap_fault(struct vm_fault *vmf) in spufs_psmap_mmap_fault() argument
[all …]
/arch/hexagon/include/asm/
A Dcacheflush.h61 static inline void update_mmu_cache_range(struct vm_fault *vmf, in update_mmu_cache_range() argument
/arch/microblaze/include/asm/
A Dtlbflush.h36 #define update_mmu_cache_range(vmf, vma, addr, ptep, nr) do { } while (0) argument
/arch/csky/abiv1/
A Dcacheflush.c44 void update_mmu_cache_range(struct vm_fault *vmf, struct vm_area_struct *vma, in update_mmu_cache_range() argument
/arch/powerpc/kvm/
A Dbook3s_xive_native.c228 static vm_fault_t xive_native_esb_fault(struct vm_fault *vmf) in xive_native_esb_fault() argument
230 struct vm_area_struct *vma = vmf->vma; in xive_native_esb_fault()
246 page_offset = vmf->pgoff - vma->vm_pgoff; in xive_native_esb_fault()
280 vmf_insert_pfn(vma, vmf->address, page >> PAGE_SHIFT); in xive_native_esb_fault()
288 static vm_fault_t xive_native_tima_fault(struct vm_fault *vmf) in xive_native_tima_fault() argument
290 struct vm_area_struct *vma = vmf->vma; in xive_native_tima_fault()
292 switch (vmf->pgoff - vma->vm_pgoff) { in xive_native_tima_fault()
297 vmf_insert_pfn(vma, vmf->address, xive_tima_os >> PAGE_SHIFT); in xive_native_tima_fault()
A Dbook3s_64_vio.c229 static vm_fault_t kvm_spapr_tce_fault(struct vm_fault *vmf) in kvm_spapr_tce_fault() argument
231 struct kvmppc_spapr_tce_table *stt = vmf->vma->vm_file->private_data; in kvm_spapr_tce_fault()
234 if (vmf->pgoff >= kvmppc_tce_pages(stt->size)) in kvm_spapr_tce_fault()
237 page = kvm_spapr_get_tce_page(stt, vmf->pgoff); in kvm_spapr_tce_fault()
242 vmf->page = page; in kvm_spapr_tce_fault()
A Dbook3s_hv_uvmem.c998 static vm_fault_t kvmppc_uvmem_migrate_to_ram(struct vm_fault *vmf) in kvmppc_uvmem_migrate_to_ram() argument
1000 struct kvmppc_uvmem_page_pvt *pvt = vmf->page->zone_device_data; in kvmppc_uvmem_migrate_to_ram()
1002 if (kvmppc_svm_page_out(vmf->vma, vmf->address, in kvmppc_uvmem_migrate_to_ram()
1003 vmf->address + PAGE_SIZE, PAGE_SHIFT, in kvmppc_uvmem_migrate_to_ram()
1004 pvt->kvm, pvt->gpa, vmf->page)) in kvmppc_uvmem_migrate_to_ram()
/arch/csky/abiv2/
A Dcacheflush.c10 void update_mmu_cache_range(struct vm_fault *vmf, struct vm_area_struct *vma, in update_mmu_cache_range() argument
/arch/m68k/include/asm/
A Dpgtable_mm.h139 static inline void update_mmu_cache_range(struct vm_fault *vmf, in update_mmu_cache_range() argument
/arch/sh/include/asm/
A Dpgtable.h105 static inline void update_mmu_cache_range(struct vm_fault *vmf, in update_mmu_cache_range() argument
/arch/powerpc/platforms/book3s/
A Dvas-api.c395 static vm_fault_t vas_mmap_fault(struct vm_fault *vmf) in vas_mmap_fault() argument
397 struct vm_area_struct *vma = vmf->vma; in vas_mmap_fault()
423 if (txwin->task_ref.vma != vmf->vma) { in vas_mmap_fault()
/arch/arc/include/asm/
A Dpgtable-bits-arcv2.h104 void update_mmu_cache_range(struct vm_fault *vmf, struct vm_area_struct *vma,
/arch/powerpc/include/asm/
A Dpgtable.h146 static inline void update_mmu_cache_range(struct vm_fault *vmf, in update_mmu_cache_range() argument
/arch/arm/include/asm/
A Dtlbflush.h625 void update_mmu_cache_range(struct vm_fault *vmf, struct vm_area_struct *vma,
628 static inline void update_mmu_cache_range(struct vm_fault *vmf, in update_mmu_cache_range() argument
/arch/arm/mm/
A Dfault-armv.c187 void update_mmu_cache_range(struct vm_fault *vmf, struct vm_area_struct *vma, in update_mmu_cache_range() argument
/arch/xtensa/mm/
A Dcache.c216 void update_mmu_cache_range(struct vm_fault *vmf, struct vm_area_struct *vma, in update_mmu_cache_range() argument
/arch/csky/include/asm/
A Dpgtable.h261 void update_mmu_cache_range(struct vm_fault *vmf, struct vm_area_struct *vma,
/arch/nios2/include/asm/
A Dpgtable.h282 void update_mmu_cache_range(struct vm_fault *vmf, struct vm_area_struct *vma,
/arch/nios2/mm/
A Dcacheflush.c209 void update_mmu_cache_range(struct vm_fault *vmf, struct vm_area_struct *vma, in update_mmu_cache_range() argument
/arch/um/include/asm/
A Dpgtable.h293 #define update_mmu_cache_range(vmf, vma, address, ptep, nr) do {} while (0) argument
/arch/openrisc/include/asm/
A Dpgtable.h381 static inline void update_mmu_cache_range(struct vm_fault *vmf, in update_mmu_cache_range() argument
/arch/alpha/include/asm/
A Dpgtable.h299 static inline void update_mmu_cache_range(struct vm_fault *vmf, in update_mmu_cache_range() argument

Completed in 52 milliseconds

12