Lines Matching refs:va
763 va_size(struct vmap_area *va) in va_size() argument
765 return (va->va_end - va->va_start); in va_size()
771 struct vmap_area *va; in get_subtree_max_size() local
773 va = rb_entry_safe(node, struct vmap_area, rb_node); in get_subtree_max_size()
774 return va ? va->subtree_max_size : 0; in get_subtree_max_size()
781 compute_subtree_max_size(struct vmap_area *va) in compute_subtree_max_size() argument
783 return max3(va_size(va), in compute_subtree_max_size()
784 get_subtree_max_size(va->rb_node.rb_left), in compute_subtree_max_size()
785 get_subtree_max_size(va->rb_node.rb_right)); in compute_subtree_max_size()
804 struct vmap_area *va = NULL; in find_vmap_area_exceed_addr() local
812 va = tmp; in find_vmap_area_exceed_addr()
821 return va; in find_vmap_area_exceed_addr()
829 struct vmap_area *va; in __find_vmap_area() local
831 va = rb_entry(n, struct vmap_area, rb_node); in __find_vmap_area()
832 if (addr < va->va_start) in __find_vmap_area()
834 else if (addr >= va->va_end) in __find_vmap_area()
837 return va; in __find_vmap_area()
852 find_va_links(struct vmap_area *va, in find_va_links() argument
882 if (va->va_start < tmp_va->va_end && in find_va_links()
883 va->va_end <= tmp_va->va_start) in find_va_links()
885 else if (va->va_end > tmp_va->va_start && in find_va_links()
886 va->va_start >= tmp_va->va_end) in find_va_links()
890 va->va_start, va->va_end, tmp_va->va_start, tmp_va->va_end); in find_va_links()
919 link_va(struct vmap_area *va, struct rb_root *root, in link_va() argument
933 rb_link_node(&va->rb_node, parent, link); in link_va()
946 rb_insert_augmented(&va->rb_node, in link_va()
948 va->subtree_max_size = 0; in link_va()
950 rb_insert_color(&va->rb_node, root); in link_va()
954 list_add(&va->list, head); in link_va()
958 unlink_va(struct vmap_area *va, struct rb_root *root) in unlink_va() argument
960 if (WARN_ON(RB_EMPTY_NODE(&va->rb_node))) in unlink_va()
964 rb_erase_augmented(&va->rb_node, in unlink_va()
967 rb_erase(&va->rb_node, root); in unlink_va()
969 list_del(&va->list); in unlink_va()
970 RB_CLEAR_NODE(&va->rb_node); in unlink_va()
977 struct vmap_area *va; in augment_tree_propagate_check() local
980 list_for_each_entry(va, &free_vmap_area_list, list) { in augment_tree_propagate_check()
981 computed_size = compute_subtree_max_size(va); in augment_tree_propagate_check()
982 if (computed_size != va->subtree_max_size) in augment_tree_propagate_check()
984 va_size(va), va->subtree_max_size); in augment_tree_propagate_check()
1017 augment_tree_propagate_from(struct vmap_area *va) in augment_tree_propagate_from() argument
1024 free_vmap_area_rb_augment_cb_propagate(&va->rb_node, NULL); in augment_tree_propagate_from()
1032 insert_vmap_area(struct vmap_area *va, in insert_vmap_area() argument
1038 link = find_va_links(va, root, NULL, &parent); in insert_vmap_area()
1040 link_va(va, root, parent, link, head); in insert_vmap_area()
1044 insert_vmap_area_augment(struct vmap_area *va, in insert_vmap_area_augment() argument
1052 link = find_va_links(va, NULL, from, &parent); in insert_vmap_area_augment()
1054 link = find_va_links(va, root, NULL, &parent); in insert_vmap_area_augment()
1057 link_va(va, root, parent, link, head); in insert_vmap_area_augment()
1058 augment_tree_propagate_from(va); in insert_vmap_area_augment()
1074 merge_or_add_vmap_area(struct vmap_area *va, in merge_or_add_vmap_area() argument
1087 link = find_va_links(va, root, NULL, &parent); in merge_or_add_vmap_area()
1107 if (sibling->va_start == va->va_end) { in merge_or_add_vmap_area()
1108 sibling->va_start = va->va_start; in merge_or_add_vmap_area()
1111 kmem_cache_free(vmap_area_cachep, va); in merge_or_add_vmap_area()
1114 va = sibling; in merge_or_add_vmap_area()
1128 if (sibling->va_end == va->va_start) { in merge_or_add_vmap_area()
1137 unlink_va(va, root); in merge_or_add_vmap_area()
1139 sibling->va_end = va->va_end; in merge_or_add_vmap_area()
1142 kmem_cache_free(vmap_area_cachep, va); in merge_or_add_vmap_area()
1145 va = sibling; in merge_or_add_vmap_area()
1152 link_va(va, root, parent, link, head); in merge_or_add_vmap_area()
1154 return va; in merge_or_add_vmap_area()
1158 merge_or_add_vmap_area_augment(struct vmap_area *va, in merge_or_add_vmap_area_augment() argument
1161 va = merge_or_add_vmap_area(va, root, head); in merge_or_add_vmap_area_augment()
1162 if (va) in merge_or_add_vmap_area_augment()
1163 augment_tree_propagate_from(va); in merge_or_add_vmap_area_augment()
1165 return va; in merge_or_add_vmap_area_augment()
1169 is_within_this_va(struct vmap_area *va, unsigned long size, in is_within_this_va() argument
1174 if (va->va_start > vstart) in is_within_this_va()
1175 nva_start_addr = ALIGN(va->va_start, align); in is_within_this_va()
1184 return (nva_start_addr + size <= va->va_end); in is_within_this_va()
1196 struct vmap_area *va; in find_vmap_lowest_match() local
1203 va = rb_entry(node, struct vmap_area, rb_node); in find_vmap_lowest_match()
1206 vstart < va->va_start) { in find_vmap_lowest_match()
1209 if (is_within_this_va(va, size, align, vstart)) in find_vmap_lowest_match()
1210 return va; in find_vmap_lowest_match()
1229 va = rb_entry(node, struct vmap_area, rb_node); in find_vmap_lowest_match()
1230 if (is_within_this_va(va, size, align, vstart)) in find_vmap_lowest_match()
1231 return va; in find_vmap_lowest_match()
1234 vstart <= va->va_start) { in find_vmap_lowest_match()
1241 vstart = va->va_start + 1; in find_vmap_lowest_match()
1259 struct vmap_area *va; in find_vmap_lowest_linear_match() local
1261 list_for_each_entry(va, &free_vmap_area_list, list) { in find_vmap_lowest_linear_match()
1262 if (!is_within_this_va(va, size, align, vstart)) in find_vmap_lowest_linear_match()
1265 return va; in find_vmap_lowest_linear_match()
1299 classify_va_fit_type(struct vmap_area *va, in classify_va_fit_type() argument
1305 if (nva_start_addr < va->va_start || in classify_va_fit_type()
1306 nva_start_addr + size > va->va_end) in classify_va_fit_type()
1310 if (va->va_start == nva_start_addr) { in classify_va_fit_type()
1311 if (va->va_end == nva_start_addr + size) in classify_va_fit_type()
1315 } else if (va->va_end == nva_start_addr + size) { in classify_va_fit_type()
1325 adjust_va_to_fit_type(struct vmap_area *va, in adjust_va_to_fit_type() argument
1339 unlink_va(va, &free_vmap_area_root); in adjust_va_to_fit_type()
1340 kmem_cache_free(vmap_area_cachep, va); in adjust_va_to_fit_type()
1349 va->va_start += size; in adjust_va_to_fit_type()
1358 va->va_end = nva_start_addr; in adjust_va_to_fit_type()
1402 lva->va_start = va->va_start; in adjust_va_to_fit_type()
1408 va->va_start = nva_start_addr + size; in adjust_va_to_fit_type()
1414 augment_tree_propagate_from(va); in adjust_va_to_fit_type()
1417 insert_vmap_area_augment(lva, &va->rb_node, in adjust_va_to_fit_type()
1433 struct vmap_area *va; in __alloc_vmap_area() local
1437 va = find_vmap_lowest_match(size, align, vstart); in __alloc_vmap_area()
1438 if (unlikely(!va)) in __alloc_vmap_area()
1441 if (va->va_start > vstart) in __alloc_vmap_area()
1442 nva_start_addr = ALIGN(va->va_start, align); in __alloc_vmap_area()
1451 type = classify_va_fit_type(va, nva_start_addr, size); in __alloc_vmap_area()
1456 ret = adjust_va_to_fit_type(va, nva_start_addr, size, type); in __alloc_vmap_area()
1470 static void free_vmap_area(struct vmap_area *va) in free_vmap_area() argument
1476 unlink_va(va, &vmap_area_root); in free_vmap_area()
1483 merge_or_add_vmap_area_augment(va, &free_vmap_area_root, &free_vmap_area_list); in free_vmap_area()
1490 struct vmap_area *va = NULL; in preload_this_cpu_lock() local
1502 va = kmem_cache_alloc_node(vmap_area_cachep, gfp_mask, node); in preload_this_cpu_lock()
1506 if (va && __this_cpu_cmpxchg(ne_fit_preload_node, NULL, va)) in preload_this_cpu_lock()
1507 kmem_cache_free(vmap_area_cachep, va); in preload_this_cpu_lock()
1519 struct vmap_area *va; in alloc_vmap_area() local
1535 va = kmem_cache_alloc_node(vmap_area_cachep, gfp_mask, node); in alloc_vmap_area()
1536 if (unlikely(!va)) in alloc_vmap_area()
1543 kmemleak_scan_area(&va->rb_node, SIZE_MAX, gfp_mask); in alloc_vmap_area()
1557 va->va_start = addr; in alloc_vmap_area()
1558 va->va_end = addr + size; in alloc_vmap_area()
1559 va->vm = NULL; in alloc_vmap_area()
1562 insert_vmap_area(va, &vmap_area_root, &vmap_area_list); in alloc_vmap_area()
1565 BUG_ON(!IS_ALIGNED(va->va_start, align)); in alloc_vmap_area()
1566 BUG_ON(va->va_start < vstart); in alloc_vmap_area()
1567 BUG_ON(va->va_end > vend); in alloc_vmap_area()
1571 free_vmap_area(va); in alloc_vmap_area()
1575 return va; in alloc_vmap_area()
1596 kmem_cache_free(vmap_area_cachep, va); in alloc_vmap_area()
1667 struct vmap_area *va, *n_va; in __purge_vmap_area_lazy() local
1691 list_for_each_entry_safe(va, n_va, &local_pure_list, list) { in __purge_vmap_area_lazy()
1692 unsigned long nr = (va->va_end - va->va_start) >> PAGE_SHIFT; in __purge_vmap_area_lazy()
1693 unsigned long orig_start = va->va_start; in __purge_vmap_area_lazy()
1694 unsigned long orig_end = va->va_end; in __purge_vmap_area_lazy()
1701 va = merge_or_add_vmap_area_augment(va, &free_vmap_area_root, in __purge_vmap_area_lazy()
1704 if (!va) in __purge_vmap_area_lazy()
1709 va->va_start, va->va_end); in __purge_vmap_area_lazy()
1748 static void free_vmap_area_noflush(struct vmap_area *va) in free_vmap_area_noflush() argument
1753 unlink_va(va, &vmap_area_root); in free_vmap_area_noflush()
1756 nr_lazy = atomic_long_add_return((va->va_end - va->va_start) >> in free_vmap_area_noflush()
1763 merge_or_add_vmap_area(va, in free_vmap_area_noflush()
1775 static void free_unmap_vmap_area(struct vmap_area *va) in free_unmap_vmap_area() argument
1777 flush_cache_vunmap(va->va_start, va->va_end); in free_unmap_vmap_area()
1778 vunmap_range_noflush(va->va_start, va->va_end); in free_unmap_vmap_area()
1780 flush_tlb_kernel_range(va->va_start, va->va_end); in free_unmap_vmap_area()
1782 free_vmap_area_noflush(va); in free_unmap_vmap_area()
1787 struct vmap_area *va; in find_vmap_area() local
1790 va = __find_vmap_area(addr); in find_vmap_area()
1793 return va; in find_vmap_area()
1833 struct vmap_area *va; member
1886 struct vmap_area *va; in new_vmap_block() local
1898 va = alloc_vmap_area(VMAP_BLOCK_SIZE, VMAP_BLOCK_SIZE, in new_vmap_block()
1901 if (IS_ERR(va)) { in new_vmap_block()
1903 return ERR_CAST(va); in new_vmap_block()
1906 vaddr = vmap_block_vaddr(va->va_start, 0); in new_vmap_block()
1908 vb->va = va; in new_vmap_block()
1917 vb_idx = addr_to_vb_idx(va->va_start); in new_vmap_block()
1921 free_vmap_area(va); in new_vmap_block()
1938 tmp = xa_erase(&vmap_blocks, addr_to_vb_idx(vb->va->va_start)); in free_vmap_block()
1941 free_vmap_area_noflush(vb->va); in free_vmap_block()
2019 vaddr = vmap_block_vaddr(vb->va->va_start, pages_off); in vb_alloc()
2093 unsigned long va_start = vb->va->va_start; in _vm_unmap_aliases()
2147 struct vmap_area *va; in vm_unmap_ram() local
2163 va = find_vmap_area(addr); in vm_unmap_ram()
2164 BUG_ON(!va); in vm_unmap_ram()
2165 debug_check_no_locks_freed((void *)va->va_start, in vm_unmap_ram()
2166 (va->va_end - va->va_start)); in vm_unmap_ram()
2167 free_unmap_vmap_area(va); in vm_unmap_ram()
2197 struct vmap_area *va; in vm_map_ram() local
2198 va = alloc_vmap_area(size, PAGE_SIZE, in vm_map_ram()
2200 if (IS_ERR(va)) in vm_map_ram()
2203 addr = va->va_start; in vm_map_ram()
2340 struct vmap_area *va; in vmalloc_init() local
2363 va = kmem_cache_zalloc(vmap_area_cachep, GFP_NOWAIT); in vmalloc_init()
2364 if (WARN_ON_ONCE(!va)) in vmalloc_init()
2367 va->va_start = (unsigned long)tmp->addr; in vmalloc_init()
2368 va->va_end = va->va_start + tmp->size; in vmalloc_init()
2369 va->vm = tmp; in vmalloc_init()
2370 insert_vmap_area(va, &vmap_area_root, &vmap_area_list); in vmalloc_init()
2381 struct vmap_area *va, unsigned long flags, const void *caller) in setup_vmalloc_vm_locked() argument
2384 vm->addr = (void *)va->va_start; in setup_vmalloc_vm_locked()
2385 vm->size = va->va_end - va->va_start; in setup_vmalloc_vm_locked()
2387 va->vm = vm; in setup_vmalloc_vm_locked()
2390 static void setup_vmalloc_vm(struct vm_struct *vm, struct vmap_area *va, in setup_vmalloc_vm() argument
2394 setup_vmalloc_vm_locked(vm, va, flags, caller); in setup_vmalloc_vm()
2414 struct vmap_area *va; in __get_vm_area_node() local
2434 va = alloc_vmap_area(size, align, start, end, node, gfp_mask); in __get_vm_area_node()
2435 if (IS_ERR(va)) { in __get_vm_area_node()
2440 kasan_unpoison_vmalloc((void *)va->va_start, requested_size); in __get_vm_area_node()
2442 setup_vmalloc_vm(area, va, flags, caller); in __get_vm_area_node()
2494 struct vmap_area *va; in find_vm_area() local
2496 va = find_vmap_area((unsigned long)addr); in find_vm_area()
2497 if (!va) in find_vm_area()
2500 return va->vm; in find_vm_area()
2515 struct vmap_area *va; in remove_vm_area() local
2520 va = __find_vmap_area((unsigned long)addr); in remove_vm_area()
2521 if (va && va->vm) { in remove_vm_area()
2522 struct vm_struct *vm = va->vm; in remove_vm_area()
2524 va->vm = NULL; in remove_vm_area()
2528 free_unmap_vmap_area(va); in remove_vm_area()
3358 struct vmap_area *va; in vread() local
3369 va = find_vmap_area_exceed_addr((unsigned long)addr); in vread()
3370 if (!va) in vread()
3374 if ((unsigned long)addr + count <= va->va_start) in vread()
3377 list_for_each_entry_from(va, &vmap_area_list, list) { in vread()
3381 if (!va->vm) in vread()
3384 vm = va->vm; in vread()
3532 struct vmap_area *va, *tmp; in pvm_find_va_enclose_addr() local
3536 va = NULL; in pvm_find_va_enclose_addr()
3541 va = tmp; in pvm_find_va_enclose_addr()
3551 return va; in pvm_find_va_enclose_addr()
3565 pvm_determine_end_from_reverse(struct vmap_area **va, unsigned long align) in pvm_determine_end_from_reverse() argument
3570 if (likely(*va)) { in pvm_determine_end_from_reverse()
3571 list_for_each_entry_from_reverse((*va), in pvm_determine_end_from_reverse()
3573 addr = min((*va)->va_end & ~(align - 1), vmalloc_end); in pvm_determine_end_from_reverse()
3574 if ((*va)->va_start < addr) in pvm_determine_end_from_reverse()
3612 struct vmap_area **vas, *va; in pcpu_get_vm_areas() local
3666 va = pvm_find_va_enclose_addr(vmalloc_end); in pcpu_get_vm_areas()
3667 base = pvm_determine_end_from_reverse(&va, align) - end; in pcpu_get_vm_areas()
3680 if (va == NULL) in pcpu_get_vm_areas()
3687 if (base + end > va->va_end) { in pcpu_get_vm_areas()
3688 base = pvm_determine_end_from_reverse(&va, align) - end; in pcpu_get_vm_areas()
3696 if (base + start < va->va_start) { in pcpu_get_vm_areas()
3697 va = node_to_va(rb_prev(&va->rb_node)); in pcpu_get_vm_areas()
3698 base = pvm_determine_end_from_reverse(&va, align) - end; in pcpu_get_vm_areas()
3713 va = pvm_find_va_enclose_addr(base + end); in pcpu_get_vm_areas()
3723 va = pvm_find_va_enclose_addr(start); in pcpu_get_vm_areas()
3724 if (WARN_ON_ONCE(va == NULL)) in pcpu_get_vm_areas()
3728 type = classify_va_fit_type(va, start, size); in pcpu_get_vm_areas()
3733 ret = adjust_va_to_fit_type(va, start, size, type); in pcpu_get_vm_areas()
3738 va = vas[area]; in pcpu_get_vm_areas()
3739 va->va_start = start; in pcpu_get_vm_areas()
3740 va->va_end = start + size; in pcpu_get_vm_areas()
3777 va = merge_or_add_vmap_area_augment(vas[area], &free_vmap_area_root, in pcpu_get_vm_areas()
3779 if (va) in pcpu_get_vm_areas()
3781 va->va_start, va->va_end); in pcpu_get_vm_areas()
3827 va = merge_or_add_vmap_area_augment(vas[area], &free_vmap_area_root, in pcpu_get_vm_areas()
3829 if (va) in pcpu_get_vm_areas()
3831 va->va_start, va->va_end); in pcpu_get_vm_areas()
3923 struct vmap_area *va; in show_purge_info() local
3926 list_for_each_entry(va, &purge_vmap_area_list, list) { in show_purge_info()
3928 (void *)va->va_start, (void *)va->va_end, in show_purge_info()
3929 va->va_end - va->va_start); in show_purge_info()
3936 struct vmap_area *va; in s_show() local
3939 va = list_entry(p, struct vmap_area, list); in s_show()
3945 if (!va->vm) { in s_show()
3947 (void *)va->va_start, (void *)va->va_end, in s_show()
3948 va->va_end - va->va_start); in s_show()
3953 v = va->vm; in s_show()
3992 if (list_is_last(&va->list, &vmap_area_list)) in s_show()