| /mm/ |
| A D | pagewalk.c | 63 struct mm_walk *walk) in walk_pte_range() 98 struct mm_walk *walk) in walk_pmd_range() 168 struct mm_walk *walk) in walk_pud_range() 233 struct mm_walk *walk) in walk_p4d_range() 363 struct mm_walk *walk) in walk_page_test() 389 struct mm_walk *walk) in __walk_page_range() 462 struct mm_walk walk = { in walk_page_range_mm() local 610 struct mm_walk walk = { in walk_kernel_page_table_range() local 655 struct mm_walk walk = { in walk_page_range_debug() local 689 struct mm_walk walk = { in walk_page_range_vma() local [all …]
|
| A D | ptdump.c | 17 static inline int note_kasan_page_table(struct mm_walk *walk, in note_kasan_page_table() 31 unsigned long next, struct mm_walk *walk) in ptdump_pgd_entry() 54 unsigned long next, struct mm_walk *walk) in ptdump_p4d_entry() 77 unsigned long next, struct mm_walk *walk) in ptdump_pud_entry() 100 unsigned long next, struct mm_walk *walk) in ptdump_pmd_entry() 121 unsigned long next, struct mm_walk *walk) in ptdump_pte_entry() 135 int depth, struct mm_walk *walk) in ptdump_hole()
|
| A D | mapping_dirty_helpers.c | 35 struct mm_walk *walk) in wp_pte() 90 unsigned long end, struct mm_walk *walk) in clean_record_pte() 127 struct mm_walk *walk) in wp_clean_pmd_entry() 149 struct mm_walk *walk) in wp_clean_pud_entry() 170 struct mm_walk *walk) in wp_clean_pre_vma() 198 static void wp_clean_post_vma(struct mm_walk *walk) in wp_clean_post_vma() 219 struct mm_walk *walk) in wp_clean_test_walk()
|
| A D | hmm.c | 74 unsigned int required_fault, struct mm_walk *walk) in hmm_vma_fault() 158 __always_unused int depth, struct mm_walk *walk) in hmm_vma_walk_hole() 197 static int hmm_vma_handle_pmd(struct mm_walk *walk, unsigned long addr, in hmm_vma_handle_pmd() 235 static int hmm_vma_handle_pte(struct mm_walk *walk, unsigned long addr, in hmm_vma_handle_pte() 332 struct mm_walk *walk) in hmm_vma_walk_pmd() 420 struct mm_walk *walk) in hmm_vma_walk_pud() 480 struct mm_walk *walk) in hmm_vma_walk_hugetlb_entry() 533 struct mm_walk *walk) in hmm_vma_walk_test()
|
| A D | hugetlb_vmemmap.c | 52 struct vmemmap_remap_walk *walk) in vmemmap_split_pmd() 98 unsigned long next, struct mm_walk *walk) in vmemmap_pmd_entry() 139 unsigned long next, struct mm_walk *walk) in vmemmap_pte_entry() 162 struct vmemmap_remap_walk *walk) in vmemmap_remap_range() 208 struct vmemmap_remap_walk *walk) in vmemmap_remap_pte() 256 struct vmemmap_remap_walk *walk) in vmemmap_restore_pte() 292 struct vmemmap_remap_walk walk = { in vmemmap_remap_split() local 324 struct vmemmap_remap_walk walk = { in vmemmap_remap_free() local 427 struct vmemmap_remap_walk walk = { in vmemmap_remap_alloc() local
|
| A D | mincore.c | 27 unsigned long end, struct mm_walk *walk) in mincore_hugetlb() 97 struct mm_walk *walk) in mincore_unmapped_range() 105 struct mm_walk *walk) in mincore_pte_range()
|
| A D | madvise.c | 188 unsigned long end, struct mm_walk *walk) in swapin_walk_pmd_entry() 358 struct mm_walk *walk) in madvise_cold_or_pageout_pte_range() 656 unsigned long end, struct mm_walk *walk) in madvise_free_pte_range() 1079 unsigned long next, struct mm_walk *walk) in guard_install_pud_entry() 1088 unsigned long next, struct mm_walk *walk) in guard_install_pmd_entry() 1097 unsigned long next, struct mm_walk *walk) in guard_install_pte_entry() 1114 pte_t *ptep, struct mm_walk *walk) in guard_install_set_pte() 1200 unsigned long next, struct mm_walk *walk) in guard_remove_pud_entry() 1212 unsigned long next, struct mm_walk *walk) in guard_remove_pmd_entry() 1224 unsigned long next, struct mm_walk *walk) in guard_remove_pte_entry()
|
| A D | migrate_device.c | 22 struct mm_walk *walk) in migrate_vma_collect_skip() 38 struct mm_walk *walk) in migrate_vma_collect_hole() 60 struct mm_walk *walk) in migrate_vma_collect_pmd()
|
| A D | vmscan.c | 3328 static void reset_batch_size(struct lru_gen_mm_walk *walk) in reset_batch_size() 3357 struct lru_gen_mm_walk *walk = args->private; in should_skip_vma() local 3532 struct lru_gen_mm_walk *walk = args->private; in walk_pte_range() local 3608 struct lru_gen_mm_walk *walk = args->private; in walk_pmd_range_locked() local 3698 struct lru_gen_mm_walk *walk = args->private; in walk_pmd_range() local 3768 struct lru_gen_mm_walk *walk = args->private; in walk_pud_range() local 3843 struct lru_gen_mm_walk *walk = current->reclaim_state->mm_walk; in set_mm_walk() local 3862 struct lru_gen_mm_walk *walk = current->reclaim_state->mm_walk; in clear_mm_walk() local 4052 struct lru_gen_mm_walk *walk; in try_to_inc_max_seq() local 4234 struct lru_gen_mm_walk *walk; in lru_gen_look_around() local [all …]
|
| A D | mprotect.c | 725 unsigned long next, struct mm_walk *walk) in prot_none_pte_entry() 734 struct mm_walk *walk) in prot_none_hugetlb_entry() 742 struct mm_walk *walk) in prot_none_test()
|
| A D | mempolicy.c | 643 static void queue_folios_pmd(pmd_t *pmd, struct mm_walk *walk) in queue_folios_pmd() 676 unsigned long end, struct mm_walk *walk) in queue_folios_pte_range() 762 struct mm_walk *walk) in queue_folios_hugetlb() 836 struct mm_walk *walk) in queue_pages_test_walk()
|
| A D | mlock.c | 351 unsigned long end, struct mm_walk *walk) in mlock_pte_range()
|
| A D | memory-failure.c | 803 unsigned long end, struct mm_walk *walk) in hwpoison_pte_range() 837 struct mm_walk *walk) in hwpoison_hugetlb_range()
|
| /mm/damon/ |
| A D | vaddr.c | 307 unsigned long next, struct mm_walk *walk) in damon_mkold_pmd_entry() 373 struct mm_walk *walk) in damon_mkold_hugetlb_entry() 442 unsigned long next, struct mm_walk *walk) in damon_young_pmd_entry() 507 struct mm_walk *walk) in damon_young_hugetlb_entry() 727 unsigned long next, struct mm_walk *walk) in damos_va_migrate_pmd_entry() 767 unsigned long next, struct mm_walk *walk) in damos_va_migrate_pte_entry()
|