Home
last modified time | relevance | path

Searched refs:target (Results 1 – 15 of 15) sorted by relevance

/mm/damon/
A Dmodules-common.c21 struct damon_target *target; in damon_modules_new_paddr_ctx_target() local
32 target = damon_new_target(); in damon_modules_new_paddr_ctx_target()
33 if (!target) { in damon_modules_new_paddr_ctx_target()
37 damon_add_target(ctx, target); in damon_modules_new_paddr_ctx_target()
40 *targetp = target; in damon_modules_new_paddr_ctx_target()
A Dstat.c145 struct damon_target *target; in damon_stat_build_ctx() local
180 target = damon_new_target(); in damon_stat_build_ctx()
181 if (!target) in damon_stat_build_ctx()
183 damon_add_target(ctx, target); in damon_stat_build_ctx()
184 if (damon_set_region_biggest_system_ram_default(target, &start, &end)) in damon_stat_build_ctx()
A Dsysfs.c231 &target->kobj, "regions"); in damon_sysfs_target_add_dirs()
235 target->regions = regions; in damon_sysfs_target_add_dirs()
242 kobject_put(&target->regions->kobj); in damon_sysfs_target_rm_dirs()
251 return sysfs_emit(buf, "%d\n", target->pid); in pid_target_show()
259 int err = kstrtoint(buf, 0, &target->pid); in pid_target_store()
332 target = damon_sysfs_target_alloc(); in damon_sysfs_targets_add_dirs()
333 if (!target) { in damon_sysfs_targets_add_dirs()
338 err = kobject_init_and_add(&target->kobj, in damon_sysfs_targets_add_dirs()
344 err = damon_sysfs_target_add_dirs(target); in damon_sysfs_targets_add_dirs()
348 targets_arr[i] = target; in damon_sysfs_targets_add_dirs()
[all …]
A Dvaddr.c683 unsigned int target; in damos_va_migrate_dests_add() local
707 target = ilx % weight_total; in damos_va_migrate_dests_add()
709 if (target < dests->weight_arr[i]) in damos_va_migrate_dests_add()
711 target -= dests->weight_arr[i]; in damos_va_migrate_dests_add()
818 static unsigned long damos_madvise(struct damon_target *target, in damos_madvise() argument
824 static unsigned long damos_madvise(struct damon_target *target, in damos_madvise() argument
832 mm = damon_get_mm(target); in damos_madvise()
843 static unsigned long damos_va_migrate(struct damon_target *target, in damos_va_migrate() argument
873 mm = damon_get_mm(target); in damos_va_migrate()
A Dreclaim.c154 static struct damon_target *target; variable
330 int err = damon_modules_new_paddr_ctx_target(&ctx, &target); in damon_reclaim_init()
A Dlru_sort.c146 static struct damon_target *target; variable
326 int err = damon_modules_new_paddr_ctx_target(&ctx, &target); in damon_lru_sort_init()
/mm/
A Dmemory-tiers.c332 int target; in next_demotion_node() local
359 target = node_random(&nd->preferred); in next_demotion_node()
362 return target; in next_demotion_node()
418 int target = NUMA_NO_NODE, node; in establish_demotion_targets() local
455 target = find_next_best_node(node, &tier_nodes); in establish_demotion_targets()
456 if (target == NUMA_NO_NODE) in establish_demotion_targets()
459 distance = node_distance(node, target); in establish_demotion_targets()
462 node_set(target, nd->preferred); in establish_demotion_targets()
A Dvma.c740 vma = vmg->target; in commit_merge()
901 vmg->target = next; in vma_merge_existing_range()
906 vmg->target = prev; in vma_merge_existing_range()
978 return vmg->target; in vma_merge_existing_range()
1051 VM_WARN_ON_VMG(vmg->target, vmg); in vma_merge_new_range()
1067 vmg->target = next; in vma_merge_new_range()
1073 vmg->target = prev; in vma_merge_new_range()
1098 return vmg->target; in vma_merge_new_range()
1124 struct vm_area_struct *target = vmg->target; in vma_expand() local
1127 VM_WARN_ON_VMG(!target, vmg); in vma_expand()
[all …]
A Dvma_exec.c57 vmg.target = vma; in relocate_vma_down()
A Dmempolicy.c2133 unsigned int target, nr_nodes; in weighted_interleave_nid() local
2155 target = ilx % weight_total; in weighted_interleave_nid()
2157 while (target) { in weighted_interleave_nid()
2160 if (target < weight) in weighted_interleave_nid()
2162 target -= weight; in weighted_interleave_nid()
2177 unsigned int target, nnodes; in interleave_nid() local
2184 target = ilx % nnodes; in interleave_nid()
2186 for (i = 0; i < target; i++) in interleave_nid()
A Dmemcontrol-v1.c528 enum mem_cgroup_events_target target) in memcg1_event_ratelimit() argument
533 next = __this_cpu_read(memcg->events_percpu->targets[target]); in memcg1_event_ratelimit()
536 switch (target) { in memcg1_event_ratelimit()
546 __this_cpu_write(memcg->events_percpu->targets[target], next); in memcg1_event_ratelimit()
A Ddebug.c258 vmg->prev, vmg->middle, vmg->next, vmg->target, in dump_vmg()
A Dvma.h88 struct vm_area_struct *target; member
A DKconfig241 Many kernel heap attacks try to target slab cache metadata and
254 target object. To avoid sharing these allocation buckets,
A Dpage_alloc.c7228 struct page *target, int low, int high, in break_down_buddy_pages() argument
7238 if (target >= &page[size]) { in break_down_buddy_pages()

Completed in 60 milliseconds