Home
last modified time | relevance | path

Searched refs:prev (Results 1 – 25 of 25) sorted by relevance

/mm/
A Dvma.c62 .prev = (map_)->prev, \
423 return vmg->prev && vmg->prev->vm_end == vmg->start && in can_vma_merge_left()
453 prev = vmg->prev; in can_vma_merge_right()
811 struct vm_area_struct *prev = vmg->prev; in vma_merge_existing_range() local
824 VM_WARN_ON_VMG(prev && start <= prev->vm_start, vmg); in vma_merge_existing_range()
1044 struct vm_area_struct *prev = vmg->prev; in vma_merge_new_range() local
1985 if (prev) in find_mergeable_anon_vma()
1986 anon_vma = reusable_anon_vma(prev, prev, vma); in find_mergeable_anon_vma()
2367 map->prev = vms->prev; in __mmap_prepare()
3089 if (prev) { in expand_downwards()
[all …]
A Dinterval_tree.c29 struct vm_area_struct *prev, in vma_interval_tree_insert_after() argument
36 VM_BUG_ON_VMA(vma_start_pgoff(node) != vma_start_pgoff(prev), node); in vma_interval_tree_insert_after()
38 if (!prev->shared.rb.rb_right) { in vma_interval_tree_insert_after()
39 parent = prev; in vma_interval_tree_insert_after()
40 link = &prev->shared.rb.rb_right; in vma_interval_tree_insert_after()
42 parent = rb_entry(prev->shared.rb.rb_right, in vma_interval_tree_insert_after()
A Dvma.h37 struct vm_area_struct *prev; /* vma before the munmap area */ member
84 struct vm_area_struct *prev; member
182 .prev = prev_, \
284 struct vm_area_struct *prev, struct vm_area_struct *next);
289 struct vm_area_struct *prev, struct vm_area_struct *vma,
296 struct vm_area_struct *prev,
305 struct vm_area_struct *prev,
313 struct vm_area_struct *prev,
542 struct vm_area_struct *prev = vma_prev(vmi); in vma_iter_next_rewind() local
552 if (prev) in vma_iter_next_rewind()
[all …]
A Dmmap.c693 struct vm_area_struct *vma, *prev; in generic_get_unmapped_area() local
708 (!prev || addr >= vm_end_gap(prev))) in generic_get_unmapped_area()
741 struct vm_area_struct *vma, *prev; in generic_get_unmapped_area_topdown() local
759 (!prev || addr >= vm_end_gap(prev))) in generic_get_unmapped_area_topdown()
970 if (!prev) in find_extend_vma_locked()
974 if (prev->vm_flags & VM_LOCKED) in find_extend_vma_locked()
975 populate_vma_page_range(prev, addr, prev->vm_end, NULL); in find_extend_vma_locked()
976 return prev; in find_extend_vma_locked()
1039 if (prev && !vma_expand_up(prev, addr)) { in expand_stack()
1040 vma = prev; in expand_stack()
[all …]
A Dmseal.c58 struct vm_area_struct *vma, *prev; in mseal_apply() local
64 prev = vma_prev(&vmi); in mseal_apply()
66 prev = vma; in mseal_apply()
72 vma = vma_modify_flags(&vmi, prev, vma, in mseal_apply()
80 prev = vma; in mseal_apply()
A Dmlock.c467 struct vm_area_struct **prev, unsigned long start, in mlock_fixup() argument
481 vma = vma_modify_flags(vmi, *prev, vma, start, end, newflags); in mlock_fixup()
510 *prev = vma; in mlock_fixup()
518 struct vm_area_struct *vma, *prev; in apply_vma_lock_flags() local
532 prev = vma_prev(&vmi); in apply_vma_lock_flags()
534 prev = vma; in apply_vma_lock_flags()
551 error = mlock_fixup(&vmi, vma, &prev, nstart, tmp, newflags); in apply_vma_lock_flags()
707 struct vm_area_struct *vma, *prev = NULL; in apply_mlockall_flags() local
734 error = mlock_fixup(&vmi, vma, &prev, vma->vm_start, vma->vm_end, in apply_mlockall_flags()
738 prev = vma; in apply_mlockall_flags()
A Duserfaultfd.c1931 struct vm_area_struct *prev, in userfaultfd_clear_vma() argument
1950 ret = vma_modify_flags_uffd(vmi, prev, vma, start, end, in userfaultfd_clear_vma()
1973 struct vm_area_struct *prev = vma_prev(&vmi); in userfaultfd_register_range() local
1978 prev = vma; in userfaultfd_register_range()
2001 vma = vma_modify_flags_uffd(&vmi, prev, vma, start, vma_end, in userfaultfd_register_range()
2019 prev = vma; in userfaultfd_register_range()
2044 struct vm_area_struct *vma, *prev; in userfaultfd_release_all() local
2059 prev = NULL; in userfaultfd_release_all()
2065 prev = vma; in userfaultfd_release_all()
2069 vma = userfaultfd_clear_vma(&vmi, prev, vma, in userfaultfd_release_all()
[all …]
A Dmadvise.c79 struct vm_area_struct *prev; member
166 vma = vma_modify_name(&vmi, madv_behavior->prev, vma, in madvise_update_vma()
169 vma = vma_modify_flags(&vmi, madv_behavior->prev, vma, in madvise_update_vma()
1627 struct vm_area_struct *prev, *vma; in madvise_walk_vmas() local
1640 vma = find_vma_prev(mm, range->start, &prev); in madvise_walk_vmas()
1642 prev = vma; in madvise_walk_vmas()
1667 madv_behavior->prev = prev; in madvise_walk_vmas()
1674 prev = NULL; in madvise_walk_vmas()
1679 prev = vma; in madvise_walk_vmas()
A Ddebug.c258 vmg->prev, vmg->middle, vmg->next, vmg->target, in dump_vmg()
279 if (vmg->prev) { in dump_vmg()
281 dump_vma(vmg->prev); in dump_vmg()
A Dmempolicy.c972 *prev = vma; in mbind_range()
979 *prev = vma; in mbind_range()
987 *prev = vma; in mbind_range()
1416 struct vm_area_struct *vma, *prev; in do_mbind() local
1483 prev = vma_prev(&vmi); in do_mbind()
1689 struct vm_area_struct *vma, *prev; in SYSCALL_DEFINE4() local
1719 prev = vma_prev(&vmi); in SYSCALL_DEFINE4()
1728 prev = vma; in SYSCALL_DEFINE4()
2835 struct rb_node *prev = rb_prev(n); in sp_lookup() local
2836 if (!prev) in sp_lookup()
[all …]
A Dmprotect.c865 struct vm_area_struct *vma, *prev; in do_mprotect_pkey() local
927 prev = vma_prev(&vmi); in do_mprotect_pkey()
929 prev = vma; in do_mprotect_pkey()
990 error = mprotect_fixup(&vmi, &tlb, vma, &prev, nstart, tmp, newflags); in do_mprotect_pkey()
A Dmemcontrol.c1002 struct mem_cgroup *prev, in mem_cgroup_iter() argument
1031 if (!prev) in mem_cgroup_iter()
1038 pos = prev; in mem_cgroup_iter()
1075 if (!prev) in mem_cgroup_iter()
1082 if (prev && prev != root) in mem_cgroup_iter()
1083 css_put(&prev->css); in mem_cgroup_iter()
1094 struct mem_cgroup *prev) in mem_cgroup_iter_break() argument
1098 if (prev && prev != root) in mem_cgroup_iter_break()
1099 css_put(&prev->css); in mem_cgroup_iter_break()
A Dmigrate.c1335 struct list_head *prev; in migrate_folio_move() local
1338 prev = dst->lru.prev; in migrate_folio_move()
1396 list_add(&dst->lru, prev); in migrate_folio_move()
A Dzsmalloc.c1093 static bool can_merge(struct size_class *prev, int pages_per_zspage, in can_merge() argument
1096 if (prev->pages_per_zspage == pages_per_zspage && in can_merge()
1097 prev->objs_per_zspage == objs_per_zspage) in can_merge()
A Dmremap.c1155 struct vm_area_struct *prev = vma_prev(&vmi); in unmap_source_vma() local
1157 vm_flags_set(prev, VM_ACCOUNT); /* Acquires VMA lock. */ in unmap_source_vma()
A Dvmscan.c188 if ((_folio)->lru.prev != _base) { \
189 struct folio *prev; \
191 prev = lru_to_folio(&(_folio->lru)); \
192 prefetchw(&prev->_field); \
2980 mm_state->head = mm_state->head->prev; in lru_gen_del_mm()
3986 int prev, next; in inc_max_seq() local
4019 prev = lru_gen_from_seq(lrugen->max_seq - 1); in inc_max_seq()
4025 long delta = lrugen->nr_pages[prev][type][zone] - in inc_max_seq()
A Dksm.c235 #define STABLE_NODE_DUP_HEAD ((struct list_head *)&migrate_nodes.prev)
376 static unsigned long ewma(unsigned long prev, unsigned long curr) in ewma() argument
378 return ((100 - EWMA_WEIGHT) * prev + EWMA_WEIGHT * curr) / 100; in ewma()
A Dhuge_memory.c4155 struct folio *folio, *next, *prev = NULL; in deferred_split_scan() local
4222 swap(folio, prev); in deferred_split_scan()
4233 if (prev) in deferred_split_scan()
4234 folio_put(prev); in deferred_split_scan()
A Dcma.c352 __list_add(&mrp->list, mlp->list.prev, &mlp->list); in list_insert_sorted()
A Dvmalloc.c1238 head = head->prev; in __link_va()
1474 if (next->prev != head) { in __merge_or_add_vmap_area()
1475 sibling = list_entry(next->prev, struct vmap_area, list); in __merge_or_add_vmap_area()
A Dswapfile.c2265 unsigned int prev) in find_next_to_unuse() argument
2276 for (i = prev + 1; i < si->max; i++) { in find_next_to_unuse()
A Dhugetlb.c635 rg = iter->link.prev; in add_reservation_in_range()
643 add += hugetlb_resv_map_add(resv, iter->link.prev, in add_reservation_in_range()
655 rg = head->prev; in add_reservation_in_range()
/mm/damon/
A Dvaddr.c125 struct vm_area_struct *vma, *prev = NULL; in __damon_va_three_regions() local
137 if (!prev) { in __damon_va_three_regions()
141 gap = vma->vm_start - prev->vm_end; in __damon_va_three_regions()
145 first_gap.start = prev->vm_end; in __damon_va_three_regions()
148 second_gap.start = prev->vm_end; in __damon_va_three_regions()
152 prev = vma; in __damon_va_three_regions()
169 regions[2].end = ALIGN(prev->vm_end, DAMON_MIN_REGION); in __damon_va_three_regions()
A Dcore.c2212 struct damon_region *r, *prev = NULL, *next; in damon_merge_regions_of() local
2220 if (prev && prev->ar.end == r->ar.start && in damon_merge_regions_of()
2221 abs(prev->nr_accesses - r->nr_accesses) <= thres && in damon_merge_regions_of()
2222 damon_sz_region(prev) + damon_sz_region(r) <= sz_limit) in damon_merge_regions_of()
2223 damon_merge_two_regions(t, prev, r); in damon_merge_regions_of()
2225 prev = r; in damon_merge_regions_of()
/mm/damon/tests/
A Dcore-kunit.h417 KUNIT_EXPECT_PTR_EQ(test, filter->list.prev, &filter->list); in damos_test_new_filter()

Completed in 136 milliseconds