| /linux/include/trace/events/ |
| A D | fs_dax.h | 11 TP_PROTO(struct inode *inode, struct vm_fault *vmf, 56 TP_PROTO(struct inode *inode, struct vm_fault *vmf, \ 64 TP_PROTO(struct inode *inode, struct vm_fault *vmf, 98 TP_PROTO(struct inode *inode, struct vm_fault *vmf, \ 106 TP_PROTO(struct inode *inode, struct vm_fault *vmf, 147 TP_PROTO(struct inode *inode, struct vm_fault *vmf, \ 154 TP_PROTO(struct inode *inode, struct vm_fault *vmf, int result), 188 TP_PROTO(struct inode *inode, struct vm_fault *vmf, int result), \ 198 TP_PROTO(struct inode *inode, struct vm_fault *vmf, void *radix_entry),
|
| /linux/include/linux/ |
| A D | huge_mm.h | 11 vm_fault_t do_huge_pmd_anonymous_page(struct vm_fault *vmf); 15 void huge_pmd_set_accessed(struct vm_fault *vmf); 21 void huge_pud_set_accessed(struct vm_fault *vmf, pud_t orig_pud); 23 static inline void huge_pud_set_accessed(struct vm_fault *vmf, pud_t orig_pud) in huge_pud_set_accessed() 28 vm_fault_t do_huge_pmd_wp_page(struct vm_fault *vmf); 41 vm_fault_t vmf_insert_pfn_pmd(struct vm_fault *vmf, pfn_t pfn, bool write); 42 vm_fault_t vmf_insert_pfn_pud(struct vm_fault *vmf, pfn_t pfn, bool write); 456 vm_fault_t do_huge_pmd_numa_page(struct vm_fault *vmf); 602 static inline vm_fault_t do_huge_pmd_numa_page(struct vm_fault *vmf) in do_huge_pmd_numa_page()
|
| A D | mm.h | 547 struct vm_fault { struct 615 vm_fault_t (*fault)(struct vm_fault *vmf); 617 vm_fault_t (*map_pages)(struct vm_fault *vmf, 623 vm_fault_t (*page_mkwrite)(struct vm_fault *vmf); 626 vm_fault_t (*pfn_mkwrite)(struct vm_fault *vmf); 789 static inline void release_fault_lock(struct vm_fault *vmf) in release_fault_lock() 1348 vm_fault_t finish_fault(struct vm_fault *vmf); 3404 extern vm_fault_t filemap_fault(struct vm_fault *vmf); 3405 extern vm_fault_t filemap_map_pages(struct vm_fault *vmf, 3595 if (vm_fault & VM_FAULT_OOM) in vm_fault_to_errno() [all …]
|
| A D | mempolicy.h | 170 int mpol_misplaced(struct folio *folio, struct vm_fault *vmf, 286 struct vm_fault *vmf, in mpol_misplaced()
|
| /linux/drivers/dax/ |
| A D | device.c | 76 static void dax_set_mapping(struct vm_fault *vmf, pfn_t pfn, in dax_set_mapping() 104 struct vm_fault *vmf) in __dev_dax_pte_fault() 137 struct vm_fault *vmf) in __dev_dax_pmd_fault() 181 struct vm_fault *vmf) in __dev_dax_pud_fault() 225 struct vm_fault *vmf) in __dev_dax_pud_fault() 231 static vm_fault_t dev_dax_huge_fault(struct vm_fault *vmf, unsigned int order) in dev_dax_huge_fault() 257 static vm_fault_t dev_dax_fault(struct vm_fault *vmf) in dev_dax_fault()
|
| /linux/include/drm/ttm/ |
| A D | ttm_bo.h | 425 struct vm_fault *vmf); 426 vm_fault_t ttm_bo_vm_fault_reserved(struct vm_fault *vmf, 429 vm_fault_t ttm_bo_vm_fault(struct vm_fault *vmf); 434 vm_fault_t ttm_bo_vm_dummy_page(struct vm_fault *vmf, pgprot_t prot);
|
| /linux/drivers/gpu/drm/ttm/ |
| A D | ttm_bo_vm.c | 42 struct vm_fault *vmf) in ttm_bo_vm_fault_idle() 117 struct vm_fault *vmf) in ttm_bo_vm_reserve() 181 vm_fault_t ttm_bo_vm_fault_reserved(struct vm_fault *vmf, in ttm_bo_vm_fault_reserved() 291 vm_fault_t ttm_bo_vm_dummy_page(struct vm_fault *vmf, pgprot_t prot) in ttm_bo_vm_dummy_page() 321 vm_fault_t ttm_bo_vm_fault(struct vm_fault *vmf) in ttm_bo_vm_fault()
|
| /linux/tools/testing/vma/ |
| A D | vma_internal.h | 290 struct vm_fault {}; struct 309 vm_fault_t (*fault)(struct vm_fault *vmf); 310 vm_fault_t (*huge_fault)(struct vm_fault *vmf, unsigned int order); 311 vm_fault_t (*map_pages)(struct vm_fault *vmf, 317 vm_fault_t (*page_mkwrite)(struct vm_fault *vmf); 320 vm_fault_t (*pfn_mkwrite)(struct vm_fault *vmf);
|
| /linux/mm/ |
| A D | memory.c | 107 static vm_fault_t do_fault(struct vm_fault *vmf); 109 static bool vmf_pte_changed(struct vm_fault *vmf); 3034 struct vm_fault *vmf) in __wp_page_copy_user() 3296 vm_fault_t __vmf_anon_prepare(struct vm_fault *vmf) in __vmf_anon_prepare() 3655 static vm_fault_t do_wp_page(struct vm_fault *vmf) in do_wp_page() 4200 vm_fault_t do_swap_page(struct vm_fault *vmf) in do_swap_page() 4855 static vm_fault_t __do_fault(struct vm_fault *vmf) in __do_fault() 5048 static bool vmf_pte_changed(struct vm_fault *vmf) in vmf_pte_changed() 5071 vm_fault_t finish_fault(struct vm_fault *vmf) in finish_fault() 5401 static vm_fault_t do_fault(struct vm_fault *vmf) in do_fault() [all …]
|
| A D | swap.h | 77 struct vm_fault *vmf); 139 struct vm_fault *vmf) in swapin_readahead()
|
| /linux/arch/arc/include/asm/ |
| A D | pgtable-bits-arcv2.h | 103 struct vm_fault; 104 void update_mmu_cache_range(struct vm_fault *vmf, struct vm_area_struct *vma,
|
| /linux/fs/ocfs2/ |
| A D | mmap.c | 31 static vm_fault_t ocfs2_fault(struct vm_fault *vmf) in ocfs2_fault() 113 static vm_fault_t ocfs2_page_mkwrite(struct vm_fault *vmf) in ocfs2_page_mkwrite()
|
| /linux/fs/ |
| A D | dax.c | 869 static void *dax_insert_entry(struct xa_state *xas, struct vm_fault *vmf, in dax_insert_entry() 1186 static vm_fault_t dax_load_hole(struct xa_state *xas, struct vm_fault *vmf, in dax_load_hole() 1202 static vm_fault_t dax_pmd_load_hole(struct xa_state *xas, struct vm_fault *vmf, in dax_pmd_load_hole() 1618 static vm_fault_t dax_fault_cow_page(struct vm_fault *vmf, in dax_fault_cow_page() 1657 static vm_fault_t dax_fault_iter(struct vm_fault *vmf, in dax_fault_iter() 1712 static vm_fault_t dax_iomap_pte_fault(struct vm_fault *vmf, pfn_t *pfnp, in dax_iomap_pte_fault() 1822 static vm_fault_t dax_iomap_pmd_fault(struct vm_fault *vmf, pfn_t *pfnp, in dax_iomap_pmd_fault() 1902 static vm_fault_t dax_iomap_pmd_fault(struct vm_fault *vmf, pfn_t *pfnp, in dax_iomap_pmd_fault() 1922 vm_fault_t dax_iomap_fault(struct vm_fault *vmf, unsigned int order, in dax_iomap_fault() 1944 dax_insert_pfn_mkwrite(struct vm_fault *vmf, pfn_t pfn, unsigned int order) in dax_insert_pfn_mkwrite() [all …]
|
| /linux/fs/bcachefs/ |
| A D | fs-io-pagecache.h | 167 vm_fault_t bch2_page_fault(struct vm_fault *); 168 vm_fault_t bch2_page_mkwrite(struct vm_fault *);
|
| /linux/arch/hexagon/mm/ |
| A D | Makefile | 6 obj-y := init.o uaccess.o vm_fault.o cache.o
|
| /linux/drivers/video/fbdev/core/ |
| A D | fb_defio.c | 129 static vm_fault_t fb_deferred_io_fault(struct vm_fault *vmf) in fb_deferred_io_fault() 230 static vm_fault_t fb_deferred_io_page_mkwrite(struct fb_info *info, struct vm_fault *vmf) in fb_deferred_io_page_mkwrite() 241 static vm_fault_t fb_deferred_io_mkwrite(struct vm_fault *vmf) in fb_deferred_io_mkwrite()
|
| /linux/drivers/gpu/drm/nouveau/nvkm/subdev/fb/ |
| A D | nv50.c | 121 static const struct nvkm_enum vm_fault[] = { variable 174 re = nvkm_enum_find(vm_fault , st1); in nv50_fb_intr()
|
| /linux/drivers/gpu/drm/amd/amdgpu/ |
| A D | gmc_v11_0.c | 166 adev->gmc.vm_fault.num_types = 1; in gmc_v11_0_set_irq_funcs() 167 adev->gmc.vm_fault.funcs = &gmc_v11_0_irq_funcs; in gmc_v11_0_set_irq_funcs() 638 return amdgpu_irq_get(adev, &adev->gmc.vm_fault, 0); in gmc_v11_0_late_init() 775 &adev->gmc.vm_fault); in gmc_v11_0_sw_init() 782 &adev->gmc.vm_fault); in gmc_v11_0_sw_init() 953 amdgpu_irq_put(adev, &adev->gmc.vm_fault, 0); in gmc_v11_0_hw_fini()
|
| A D | gmc_v12_0.c | 159 adev->gmc.vm_fault.num_types = 1; in gmc_v12_0_set_irq_funcs() 160 adev->gmc.vm_fault.funcs = &gmc_v12_0_irq_funcs; in gmc_v12_0_set_irq_funcs() 640 return amdgpu_irq_get(adev, &adev->gmc.vm_fault, 0); in gmc_v12_0_late_init() 771 &adev->gmc.vm_fault); in gmc_v12_0_sw_init() 778 &adev->gmc.vm_fault); in gmc_v12_0_sw_init() 937 amdgpu_irq_put(adev, &adev->gmc.vm_fault, 0); in gmc_v12_0_hw_fini()
|
| A D | gmc_v6_0.c | 780 return amdgpu_irq_get(adev, &adev->gmc.vm_fault, 0); in gmc_v6_0_late_init() 818 r = amdgpu_irq_add_id(adev, AMDGPU_IRQ_CLIENTID_LEGACY, 146, &adev->gmc.vm_fault); in gmc_v6_0_sw_init() 822 r = amdgpu_irq_add_id(adev, AMDGPU_IRQ_CLIENTID_LEGACY, 147, &adev->gmc.vm_fault); in gmc_v6_0_sw_init() 921 amdgpu_irq_put(adev, &adev->gmc.vm_fault, 0); in gmc_v6_0_hw_fini() 1137 adev->gmc.vm_fault.num_types = 1; in gmc_v6_0_set_irq_funcs() 1138 adev->gmc.vm_fault.funcs = &gmc_v6_0_irq_funcs; in gmc_v6_0_set_irq_funcs()
|
| A D | gmc_v10_0.c | 197 adev->gmc.vm_fault.num_types = 1; in gmc_v10_0_set_irq_funcs() 198 adev->gmc.vm_fault.funcs = &gmc_v10_0_irq_funcs; in gmc_v10_0_set_irq_funcs() 667 return amdgpu_irq_get(adev, &adev->gmc.vm_fault, 0); in gmc_v10_0_late_init() 846 &adev->gmc.vm_fault); in gmc_v10_0_sw_init() 853 &adev->gmc.vm_fault); in gmc_v10_0_sw_init() 1047 amdgpu_irq_put(adev, &adev->gmc.vm_fault, 0); in gmc_v10_0_hw_fini()
|
| /linux/fs/xfs/ |
| A D | xfs_file.c | 1400 struct vm_fault *vmf, in xfs_dax_fault_locked() 1422 struct vm_fault *vmf, in xfs_dax_read_fault() 1437 struct vm_fault *vmf, in xfs_write_fault() 1482 struct vm_fault *vmf, in __xfs_filemap_fault() 1499 struct vm_fault *vmf) in xfs_is_write_fault() 1507 struct vm_fault *vmf) in xfs_filemap_fault() 1517 struct vm_fault *vmf, in xfs_filemap_huge_fault() 1530 struct vm_fault *vmf) in xfs_filemap_page_mkwrite() 1542 struct vm_fault *vmf) in xfs_filemap_pfn_mkwrite()
|
| /linux/arch/hexagon/include/asm/ |
| A D | cacheflush.h | 61 static inline void update_mmu_cache_range(struct vm_fault *vmf, in update_mmu_cache_range()
|
| /linux/arch/csky/abiv1/ |
| A D | cacheflush.c | 44 void update_mmu_cache_range(struct vm_fault *vmf, struct vm_area_struct *vma, in update_mmu_cache_range()
|
| /linux/arch/csky/abiv2/ |
| A D | cacheflush.c | 10 void update_mmu_cache_range(struct vm_fault *vmf, struct vm_area_struct *vma, in update_mmu_cache_range()
|