Home
last modified time | relevance | path

Searched refs:range (Results 1 – 25 of 27) sorted by relevance

12

/mm/
A Dmemremap.c41 static void pgmap_array_delete(struct range *range) in pgmap_array_delete() argument
50 struct range *range = &pgmap->ranges[range_id]; in pfn_first() local
63 struct range *range = &pgmap->ranges[i]; in pgmap_pfn_valid() local
75 const struct range *range = &pgmap->ranges[range_id]; in pfn_end() local
77 return (range->start + range_len(range)) >> PAGE_SHIFT; in pfn_end()
88 struct range *range = &pgmap->ranges[range_id]; in pageunmap_range() local
102 arch_remove_memory(range->start, range_len(range), in pageunmap_range()
108 pfnmap_untrack(PHYS_PFN(range->start), range_len(range)); in pageunmap_range()
148 struct range *range = &pgmap->ranges[range_id]; in pagemap_range() local
170 is_ram = region_intersects(range->start, range_len(range), in pagemap_range()
[all …]
A Dhmm.c100 struct hmm_range *range = hmm_vma_walk->range; in hmm_pte_need_fault() local
135 struct hmm_range *range = hmm_vma_walk->range; in hmm_range_need_fault() local
144 if (!((range->default_flags | range->pfn_flags_mask) & in hmm_range_need_fault()
161 struct hmm_range *range = hmm_vma_walk->range; in hmm_vma_walk_hole() local
202 struct hmm_range *range = hmm_vma_walk->range; in hmm_vma_handle_pmd() local
240 struct hmm_range *range = hmm_vma_walk->range; in hmm_vma_handle_pte() local
335 struct hmm_range *range = hmm_vma_walk->range; in hmm_vma_walk_pmd() local
423 struct hmm_range *range = hmm_vma_walk->range; in hmm_vma_walk_pud() local
484 struct hmm_range *range = hmm_vma_walk->range; in hmm_vma_walk_hugetlb_entry() local
536 struct hmm_range *range = hmm_vma_walk->range; in hmm_vma_walk_test() local
[all …]
A Dmadvise.c167 range->start, range->end, anon_name); in madvise_update_vma()
170 range->start, range->end, new_flags); in madvise_update_vma()
826 range.start, range.end); in madvise_free_single_vma()
834 walk_page_range_vma(vma, range.start, range.end, in madvise_free_single_vma()
871 range->end - range->start, &details); in madvise_dontneed_single_vma()
916 if (range->start == range->end) in madvise_dontneed_free()
955 if (range->start == range->end) in madvise_dontneed_free()
957 VM_WARN_ON(range->start > range->end); in madvise_dontneed_free()
1177 PHYS_PFN(range->end - range->start); in madvise_guard_install()
1188 range->end - range->start, NULL); in madvise_guard_install()
[all …]
A Dexecmem.c34 unsigned long start = range->start; in execmem_vmalloc()
35 unsigned long end = range->end; in execmem_vmalloc()
47 if (!p && range->fallback_start) { in execmem_vmalloc()
48 start = range->fallback_start; in execmem_vmalloc()
49 end = range->fallback_end; in execmem_vmalloc()
74 range->start, range->end, NUMA_NO_NODE, in execmem_vmap()
76 if (!area && range->fallback_start) in execmem_vmap()
78 range->fallback_start, range->fallback_end, in execmem_vmap()
221 if (addr >= range->start && addr + size < range->end) in within_range()
224 if (range->fallback_start && in within_range()
[all …]
A Dmmu_notifier.c97 const struct mmu_notifier_range *range, in mn_itree_inv_start_range() argument
106 range->end - 1); in mn_itree_inv_start_range()
120 const struct mmu_notifier_range *range) in mn_itree_inv_next() argument
125 range->start, range->end - 1); in mn_itree_inv_next()
266 struct mmu_notifier_range range = { in mn_itree_release() local
462 struct mmu_notifier_range *range) in mn_hlist_invalidate_range_start() argument
513 range); in mn_hlist_invalidate_range_start()
524 range->mm->notifier_subscriptions; in __mmu_notifier_invalidate_range_start()
539 struct mmu_notifier_range *range) in mn_hlist_invalidate_end() argument
551 range); in mn_hlist_invalidate_end()
[all …]
A Dptdump.c177 const struct ptdump_range *range = st->range; in ptdump_walk_pgd() local
181 while (range->start != range->end) { in ptdump_walk_pgd()
182 walk_page_range_debug(mm, range->start, range->end, in ptdump_walk_pgd()
184 range++; in ptdump_walk_pgd()
A Drmap.c1028 struct mmu_notifier_range range; in page_vma_mkclean_one() local
1885 struct mmu_notifier_range range; in try_to_unmap_one() local
1910 address, range.end); in try_to_unmap_one()
1917 &range.end); in try_to_unmap_one()
1996 flush_cache_range(vma, range.start, range.end); in try_to_unmap_one()
2015 range.start, range.end); in try_to_unmap_one()
2288 struct mmu_notifier_range range; in try_to_migrate_one() local
2312 address, range.end); in try_to_migrate_one()
2319 &range.end); in try_to_migrate_one()
2381 flush_cache_range(vma, range.start, range.end); in try_to_migrate_one()
[all …]
A Dmapping_dirty_helpers.c18 struct mmu_notifier_range range; member
177 mmu_notifier_range_init(&wpwalk->range, MMU_NOTIFY_PROTECTION_PAGE, 0, in wp_clean_pre_vma()
179 mmu_notifier_invalidate_range_start(&wpwalk->range); in wp_clean_pre_vma()
203 flush_tlb_range(walk->vma, wpwalk->range.start, in wp_clean_post_vma()
204 wpwalk->range.end); in wp_clean_post_vma()
209 mmu_notifier_invalidate_range_end(&wpwalk->range); in wp_clean_post_vma()
A Dmigrate_device.c312 struct mmu_notifier_range range; in migrate_vma_collect() local
319 mmu_notifier_range_init_owner(&range, MMU_NOTIFY_MIGRATE, 0, in migrate_vma_collect()
322 mmu_notifier_invalidate_range_start(&range); in migrate_vma_collect()
327 mmu_notifier_invalidate_range_end(&range); in migrate_vma_collect()
706 struct mmu_notifier_range range; in __migrate_device_pages() local
738 mmu_notifier_range_init_owner(&range, in __migrate_device_pages()
742 mmu_notifier_invalidate_range_start(&range); in __migrate_device_pages()
788 mmu_notifier_invalidate_range_end(&range); in __migrate_device_pages()
A Dutil.c371 unsigned long randomize_page(unsigned long start, unsigned long range) in randomize_page() argument
374 range -= PAGE_ALIGN(start) - start; in randomize_page()
378 if (start > ULONG_MAX - range) in randomize_page()
379 range = ULONG_MAX - start; in randomize_page()
381 range >>= PAGE_SHIFT; in randomize_page()
383 if (range == 0) in randomize_page()
386 return start + (get_random_long() % range << PAGE_SHIFT); in randomize_page()
A Dhugetlb.c5562 struct mmu_notifier_range range; in copy_hugetlb_page_range() local
5790 struct mmu_notifier_range range; in move_hugetlb_page_tables() local
5795 adjust_range_if_pmd_sharing_possible(vma, &range.start, &range.end); in move_hugetlb_page_tables()
5800 flush_cache_range(vma, range.start, range.end); in move_hugetlb_page_tables()
5832 flush_hugetlb_tlb_range(vma, range.start, range.end); in move_hugetlb_page_tables()
6055 struct mmu_notifier_range range; in unmap_hugepage_range() local
6060 adjust_range_if_pmd_sharing_possible(vma, &range.start, &range.end); in unmap_hugepage_range()
6145 struct mmu_notifier_range range; in hugetlb_wp() local
7110 adjust_range_if_pmd_sharing_possible(vma, &range.start, &range.end); in hugetlb_change_protection()
7113 flush_cache_range(vma, range.start, range.end); in hugetlb_change_protection()
[all …]
A Dmprotect.c585 struct mmu_notifier_range range; in change_pud_range() local
590 range.start = 0; in change_pud_range()
606 if (!range.start) { in change_pud_range()
607 mmu_notifier_range_init(&range, in change_pud_range()
610 mmu_notifier_invalidate_range_start(&range); in change_pud_range()
634 if (range.start) in change_pud_range()
635 mmu_notifier_invalidate_range_end(&range); in change_pud_range()
A Doom_kill.c544 struct mmu_notifier_range range; in __oom_reap_task_mm() local
547 mmu_notifier_range_init(&range, MMU_NOTIFY_UNMAP, 0, in __oom_reap_task_mm()
551 if (mmu_notifier_invalidate_range_start_nonblock(&range)) { in __oom_reap_task_mm()
556 unmap_page_range(&tlb, vma, range.start, range.end, NULL); in __oom_reap_task_mm()
557 mmu_notifier_invalidate_range_end(&range); in __oom_reap_task_mm()
A Dkhugepaged.c1120 struct mmu_notifier_range range; in collapse_huge_page() local
1184 mmu_notifier_invalidate_range_start(&range); in collapse_huge_page()
1197 mmu_notifier_invalidate_range_end(&range); in collapse_huge_page()
1508 struct mmu_notifier_range range; in collapse_pte_mapped_thp() local
1602 mmu_notifier_range_init(&range, MMU_NOTIFY_CLEAR, 0, mm, in collapse_pte_mapped_thp()
1604 mmu_notifier_invalidate_range_start(&range); in collapse_pte_mapped_thp()
1694 mmu_notifier_invalidate_range_end(&range); in collapse_pte_mapped_thp()
1718 mmu_notifier_invalidate_range_end(&range); in collapse_pte_mapped_thp()
1731 struct mmu_notifier_range range; in retract_page_tables() local
1771 mmu_notifier_invalidate_range_start(&range); in retract_page_tables()
[all …]
A Dhuge_memory.c969 char *token, *range, *policy, *subtoken; in setup_thp_anon() local
984 range = strsep(&token, ":"); in setup_thp_anon()
1870 struct mmu_notifier_range range; in do_huge_zero_wp_pmd() local
1880 mmu_notifier_invalidate_range_start(&range); in do_huge_zero_wp_pmd()
1894 mmu_notifier_invalidate_range_end(&range); in do_huge_zero_wp_pmd()
2547 struct mmu_notifier_range range; in move_pages_huge_pmd() local
2662 mmu_notifier_invalidate_range_end(&range); in move_pages_huge_pmd()
2774 struct mmu_notifier_range range; in __split_huge_pud() local
2787 mmu_notifier_invalidate_range_end(&range); in __split_huge_pud()
3070 struct mmu_notifier_range range; in __split_huge_pmd() local
[all …]
A Dmemory.c1369 struct mmu_notifier_range range; in copy_page_range() local
1419 mmu_notifier_invalidate_range_end(&range); in copy_page_range()
1962 struct mmu_notifier_range range; in unmap_vmas() local
1981 mmu_notifier_invalidate_range_end(&range); in unmap_vmas()
2000 struct mmu_notifier_range range; in zap_page_range_single_batched() local
2006 hugetlb_zap_begin(vma, &range.start, &range.end); in zap_page_range_single_batched()
2014 mmu_notifier_invalidate_range_end(&range); in zap_page_range_single_batched()
3539 struct mmu_notifier_range range; in wp_page_copy() local
3659 mmu_notifier_invalidate_range_end(&range); in wp_page_copy()
4151 struct mmu_notifier_range range; in remove_device_exclusive_entry() local
[all …]
A Dksm.c1257 struct mmu_notifier_range range; in write_protect_page() local
1268 mmu_notifier_range_init(&range, MMU_NOTIFY_CLEAR, 0, mm, pvmw.address, in write_protect_page()
1270 mmu_notifier_invalidate_range_start(&range); in write_protect_page()
1336 mmu_notifier_invalidate_range_end(&range); in write_protect_page()
1363 struct mmu_notifier_range range; in replace_page() local
1381 mmu_notifier_range_init(&range, MMU_NOTIFY_CLEAR, 0, mm, addr, in replace_page()
1383 mmu_notifier_invalidate_range_start(&range); in replace_page()
1439 mmu_notifier_invalidate_range_end(&range); in replace_page()
A Dmemory_hotplug.c1698 struct range __weak arch_get_mappable_range(void) in arch_get_mappable_range()
1700 struct range mhp_range = { in arch_get_mappable_range()
1707 struct range mhp_get_pluggable_range(bool need_mapping) in mhp_get_pluggable_range()
1710 struct range mhp_range; in mhp_get_pluggable_range()
1729 struct range mhp_range = mhp_get_pluggable_range(need_mapping); in mhp_range_allowed()
A Dmremap.c789 struct mmu_notifier_range range; in move_page_tables() local
808 mmu_notifier_range_init(&range, MMU_NOTIFY_UNMAP, 0, mm, in move_page_tables()
810 mmu_notifier_invalidate_range_start(&range); in move_page_tables()
867 mmu_notifier_invalidate_range_end(&range); in move_page_tables()
A Duserfaultfd.c1196 struct mmu_notifier_range range; in move_pages_pte() local
1200 mmu_notifier_range_init(&range, MMU_NOTIFY_CLEAR, 0, mm, in move_pages_pte()
1202 mmu_notifier_invalidate_range_start(&range); in move_pages_pte()
1460 mmu_notifier_invalidate_range_end(&range); in move_pages_pte()
A DKconfig177 range 4 16
714 range 0 6
730 Enable bounce buffers for devices that cannot access the full range of
1017 range 1 10 if ARCH_FORCE_MAX_ORDER = 0
1019 range 1 ARCH_FORCE_MAX_ORDER if ARCH_FORCE_MAX_ORDER != 0
1056 range 8 2048
1149 # Helpers to mirror range of the CPU page tables of a process into device page
1212 range of user-space addresses. These pages are either pinned via
/mm/damon/
A Dsysfs-common.c22 struct damon_sysfs_ul_range *range = kmalloc(sizeof(*range), in damon_sysfs_ul_range_alloc() local
25 if (!range) in damon_sysfs_ul_range_alloc()
27 range->kobj = (struct kobject){}; in damon_sysfs_ul_range_alloc()
28 range->min = min; in damon_sysfs_ul_range_alloc()
29 range->max = max; in damon_sysfs_ul_range_alloc()
31 return range; in damon_sysfs_ul_range_alloc()
37 struct damon_sysfs_ul_range *range = container_of(kobj, in min_show() local
40 return sysfs_emit(buf, "%lu\n", range->min); in min_show()
55 range->min = min; in min_store()
65 return sysfs_emit(buf, "%lu\n", range->max); in max_show()
[all …]
A Dcore.c231 struct damon_addr_range *range; in damon_set_regions() local
233 range = &ranges[i]; in damon_set_regions()
236 if (damon_intersect(r, range)) { in damon_set_regions()
241 if (r->ar.start >= range->end) in damon_set_regions()
247 ALIGN_DOWN(range->start, in damon_set_regions()
249 ALIGN(range->end, DAMON_MIN_REGION)); in damon_set_regions()
255 first->ar.start = ALIGN_DOWN(range->start, in damon_set_regions()
257 last->ar.end = ALIGN(range->end, DAMON_MIN_REGION); in damon_set_regions()
A Dsysfs-schemes.c1587 struct damon_sysfs_ul_range *range = damon_sysfs_ul_range_alloc(0, 0); in damon_sysfs_access_pattern_add_range_dir() local
1590 if (!range) in damon_sysfs_access_pattern_add_range_dir()
1592 err = kobject_init_and_add(&range->kobj, &damon_sysfs_ul_range_ktype, in damon_sysfs_access_pattern_add_range_dir()
1595 kobject_put(&range->kobj); in damon_sysfs_access_pattern_add_range_dir()
1597 *range_dir_ptr = range; in damon_sysfs_access_pattern_add_range_dir()
/mm/damon/tests/
A Dcore-kunit.h299 struct damon_addr_range range = {.start = 8, .end = 28}; in damon_test_set_regions() local
306 damon_set_regions(t, &range, 1); in damon_test_set_regions()

Completed in 118 milliseconds

12