Lines Matching refs:vmap_area
845 static DEFINE_PER_CPU(struct vmap_area *, ne_fit_preload_node);
971 va_size(struct vmap_area *va) in va_size()
979 struct vmap_area *va; in get_subtree_max_size()
981 va = rb_entry_safe(node, struct vmap_area, rb_node); in get_subtree_max_size()
986 struct vmap_area, rb_node, unsigned long, subtree_max_size, va_size)
1000 static struct vmap_area *__find_vmap_area(unsigned long addr, struct rb_root *root) in __find_vmap_area()
1007 struct vmap_area *va; in __find_vmap_area()
1009 va = rb_entry(n, struct vmap_area, rb_node); in __find_vmap_area()
1022 static struct vmap_area *
1025 struct vmap_area *va = NULL; in __find_vmap_area_exceed_addr()
1031 struct vmap_area *tmp; in __find_vmap_area_exceed_addr()
1033 tmp = rb_entry(n, struct vmap_area, rb_node); in __find_vmap_area_exceed_addr()
1055 find_vmap_area_exceed_addr_lock(unsigned long addr, struct vmap_area **va) in find_vmap_area_exceed_addr_lock()
1104 find_va_links(struct vmap_area *va, in find_va_links()
1108 struct vmap_area *tmp_va; in find_va_links()
1127 tmp_va = rb_entry(*link, struct vmap_area, rb_node); in find_va_links()
1164 list = &rb_entry(parent, struct vmap_area, rb_node)->list; in get_va_next_sibling()
1169 __link_va(struct vmap_area *va, struct rb_root *root, in __link_va()
1178 head = &rb_entry(parent, struct vmap_area, rb_node)->list; in __link_va()
1209 link_va(struct vmap_area *va, struct rb_root *root, in link_va()
1217 link_va_augment(struct vmap_area *va, struct rb_root *root, in link_va_augment()
1225 __unlink_va(struct vmap_area *va, struct rb_root *root, bool augment) in __unlink_va()
1241 unlink_va(struct vmap_area *va, struct rb_root *root) in unlink_va()
1247 unlink_va_augment(struct vmap_area *va, struct rb_root *root) in unlink_va_augment()
1257 compute_subtree_max_size(struct vmap_area *va) in compute_subtree_max_size()
1267 struct vmap_area *va; in augment_tree_propagate_check()
1307 augment_tree_propagate_from(struct vmap_area *va) in augment_tree_propagate_from()
1322 insert_vmap_area(struct vmap_area *va, in insert_vmap_area()
1334 insert_vmap_area_augment(struct vmap_area *va, in insert_vmap_area_augment()
1363 static __always_inline struct vmap_area *
1364 __merge_or_add_vmap_area(struct vmap_area *va, in __merge_or_add_vmap_area()
1367 struct vmap_area *sibling; in __merge_or_add_vmap_area()
1396 sibling = list_entry(next, struct vmap_area, list); in __merge_or_add_vmap_area()
1417 sibling = list_entry(next->prev, struct vmap_area, list); in __merge_or_add_vmap_area()
1447 static __always_inline struct vmap_area *
1448 merge_or_add_vmap_area(struct vmap_area *va, in merge_or_add_vmap_area()
1454 static __always_inline struct vmap_area *
1455 merge_or_add_vmap_area_augment(struct vmap_area *va, in merge_or_add_vmap_area_augment()
1466 is_within_this_va(struct vmap_area *va, unsigned long size, in is_within_this_va()
1491 static __always_inline struct vmap_area *
1495 struct vmap_area *va; in find_vmap_lowest_match()
1506 va = rb_entry(node, struct vmap_area, rb_node); in find_vmap_lowest_match()
1532 va = rb_entry(node, struct vmap_area, rb_node); in find_vmap_lowest_match()
1558 static struct vmap_area *
1562 struct vmap_area *va; in find_vmap_lowest_linear_match()
1578 struct vmap_area *va_1, *va_2; in find_vmap_lowest_match_check()
1603 classify_va_fit_type(struct vmap_area *va, in classify_va_fit_type()
1630 struct vmap_area *va, unsigned long nva_start_addr, in va_clip()
1633 struct vmap_area *lva = NULL; in va_clip()
1729 va_alloc(struct vmap_area *va, in va_alloc()
1765 struct vmap_area *va; in __alloc_vmap_area()
1797 static void free_vmap_area(struct vmap_area *va) in free_vmap_area()
1819 struct vmap_area *va = NULL, *tmp; in preload_this_cpu_lock()
1852 node_pool_add_va(struct vmap_node *n, struct vmap_area *va) in node_pool_add_va()
1868 static struct vmap_area *
1873 struct vmap_area *va = NULL; in node_pool_del_va()
1883 va = list_first_entry(&vp->head, struct vmap_area, list); in node_pool_del_va()
1910 static struct vmap_area *
1915 struct vmap_area *va; in node_alloc()
1939 struct vmap_area *va, unsigned long flags, const void *caller) in setup_vmalloc_vm()
1952 static struct vmap_area *alloc_vmap_area(unsigned long size, in alloc_vmap_area()
1959 struct vmap_area *va; in alloc_vmap_area()
2119 struct vmap_area *va, *n; in reclaim_list_global()
2136 struct vmap_area *va, *nva; in decay_va_pool_node()
2190 struct vmap_area *va, *n_va; in purge_vmap_node()
2258 struct vmap_area, list)->va_start); in __purge_vmap_area_lazy()
2261 struct vmap_area, list)->va_end); in __purge_vmap_area_lazy()
2331 static void free_vmap_area_noflush(struct vmap_area *va) in free_vmap_area_noflush()
2366 static void free_unmap_vmap_area(struct vmap_area *va) in free_unmap_vmap_area()
2376 struct vmap_area *find_vmap_area(unsigned long addr) in find_vmap_area()
2379 struct vmap_area *va; in find_vmap_area()
2413 static struct vmap_area *find_unlink_vmap_area(unsigned long addr) in find_unlink_vmap_area()
2416 struct vmap_area *va; in find_unlink_vmap_area()
2493 struct vmap_area *va;
2593 struct vmap_area *va; in new_vmap_block()
2919 struct vmap_area *va; in vm_unmap_ram()
2970 struct vmap_area *va; in vm_map_ram()
3093 struct vmap_area *va; in __get_vm_area_node()
3184 struct vmap_area *va; in find_vm_area()
3205 struct vmap_area *va; in remove_vm_area()
4323 struct vmap_area *va; in vread_iter()
4517 static struct vmap_area *node_to_va(struct rb_node *n) in node_to_va()
4519 return rb_entry_safe(n, struct vmap_area, rb_node); in node_to_va()
4531 static struct vmap_area *
4534 struct vmap_area *va, *tmp; in pvm_find_va_enclose_addr()
4541 tmp = rb_entry(n, struct vmap_area, rb_node); in pvm_find_va_enclose_addr()
4567 pvm_determine_end_from_reverse(struct vmap_area **va, unsigned long align) in pvm_determine_end_from_reverse()
4614 struct vmap_area **vas, *va; in pcpu_get_vm_areas()
4869 struct vmap_area *va; in vmalloc_dump_obj()
4927 struct vmap_area *va; in show_purge_info()
4946 struct vmap_area *va; in vmalloc_info_show()
5032 struct vmap_area *free; in vmap_init_free_space()
5155 struct vmap_area *va; in vmalloc_init()
5163 vmap_area_cachep = KMEM_CACHE(vmap_area, SLAB_PANIC); in vmalloc_init()