Lines Matching refs:gpuvm
759 __restore_vm_bo_list(struct drm_gpuvm *gpuvm, spinlock_t *lock, in __restore_vm_bo_list() argument
801 __drm_gpuvm_bo_list_add(struct drm_gpuvm *gpuvm, spinlock_t *lock, in __drm_gpuvm_bo_list_add() argument
826 __drm_gpuvm_bo_list_del(struct drm_gpuvm *gpuvm, spinlock_t *lock, in __drm_gpuvm_bo_list_del() argument
881 static int __drm_gpuva_insert(struct drm_gpuvm *gpuvm,
894 drm_gpuvm_warn_check_overflow(struct drm_gpuvm *gpuvm, u64 addr, u64 range) in drm_gpuvm_warn_check_overflow() argument
896 return drm_WARN(gpuvm->drm, drm_gpuvm_check_overflow(addr, range), in drm_gpuvm_warn_check_overflow()
901 drm_gpuvm_in_mm_range(struct drm_gpuvm *gpuvm, u64 addr, u64 range) in drm_gpuvm_in_mm_range() argument
904 u64 mm_start = gpuvm->mm_start; in drm_gpuvm_in_mm_range()
905 u64 mm_end = mm_start + gpuvm->mm_range; in drm_gpuvm_in_mm_range()
911 drm_gpuvm_in_kernel_node(struct drm_gpuvm *gpuvm, u64 addr, u64 range) in drm_gpuvm_in_kernel_node() argument
914 u64 kstart = gpuvm->kernel_alloc_node.va.addr; in drm_gpuvm_in_kernel_node()
915 u64 krange = gpuvm->kernel_alloc_node.va.range; in drm_gpuvm_in_kernel_node()
933 drm_gpuvm_range_valid(struct drm_gpuvm *gpuvm, in drm_gpuvm_range_valid() argument
937 drm_gpuvm_in_mm_range(gpuvm, addr, range) && in drm_gpuvm_range_valid()
938 !drm_gpuvm_in_kernel_node(gpuvm, addr, range); in drm_gpuvm_range_valid()
998 drm_gpuvm_init(struct drm_gpuvm *gpuvm, const char *name, in drm_gpuvm_init() argument
1006 gpuvm->rb.tree = RB_ROOT_CACHED; in drm_gpuvm_init()
1007 INIT_LIST_HEAD(&gpuvm->rb.list); in drm_gpuvm_init()
1009 INIT_LIST_HEAD(&gpuvm->extobj.list); in drm_gpuvm_init()
1010 spin_lock_init(&gpuvm->extobj.lock); in drm_gpuvm_init()
1012 INIT_LIST_HEAD(&gpuvm->evict.list); in drm_gpuvm_init()
1013 spin_lock_init(&gpuvm->evict.lock); in drm_gpuvm_init()
1015 kref_init(&gpuvm->kref); in drm_gpuvm_init()
1017 gpuvm->name = name ? name : "unknown"; in drm_gpuvm_init()
1018 gpuvm->flags = flags; in drm_gpuvm_init()
1019 gpuvm->ops = ops; in drm_gpuvm_init()
1020 gpuvm->drm = drm; in drm_gpuvm_init()
1021 gpuvm->r_obj = r_obj; in drm_gpuvm_init()
1025 drm_gpuvm_warn_check_overflow(gpuvm, start_offset, range); in drm_gpuvm_init()
1026 gpuvm->mm_start = start_offset; in drm_gpuvm_init()
1027 gpuvm->mm_range = range; in drm_gpuvm_init()
1029 memset(&gpuvm->kernel_alloc_node, 0, sizeof(struct drm_gpuva)); in drm_gpuvm_init()
1031 gpuvm->kernel_alloc_node.va.addr = reserve_offset; in drm_gpuvm_init()
1032 gpuvm->kernel_alloc_node.va.range = reserve_range; in drm_gpuvm_init()
1034 if (likely(!drm_gpuvm_warn_check_overflow(gpuvm, reserve_offset, in drm_gpuvm_init()
1036 __drm_gpuva_insert(gpuvm, &gpuvm->kernel_alloc_node); in drm_gpuvm_init()
1042 drm_gpuvm_fini(struct drm_gpuvm *gpuvm) in drm_gpuvm_fini() argument
1044 gpuvm->name = NULL; in drm_gpuvm_fini()
1046 if (gpuvm->kernel_alloc_node.va.range) in drm_gpuvm_fini()
1047 __drm_gpuva_remove(&gpuvm->kernel_alloc_node); in drm_gpuvm_fini()
1049 drm_WARN(gpuvm->drm, !RB_EMPTY_ROOT(&gpuvm->rb.tree.rb_root), in drm_gpuvm_fini()
1052 drm_WARN(gpuvm->drm, !list_empty(&gpuvm->extobj.list), in drm_gpuvm_fini()
1054 drm_WARN(gpuvm->drm, !list_empty(&gpuvm->evict.list), in drm_gpuvm_fini()
1057 drm_gem_object_put(gpuvm->r_obj); in drm_gpuvm_fini()
1063 struct drm_gpuvm *gpuvm = container_of(kref, struct drm_gpuvm, kref); in drm_gpuvm_free() local
1065 drm_gpuvm_fini(gpuvm); in drm_gpuvm_free()
1067 if (drm_WARN_ON(gpuvm->drm, !gpuvm->ops->vm_free)) in drm_gpuvm_free()
1070 gpuvm->ops->vm_free(gpuvm); in drm_gpuvm_free()
1082 drm_gpuvm_put(struct drm_gpuvm *gpuvm) in drm_gpuvm_put() argument
1084 if (gpuvm) in drm_gpuvm_put()
1085 kref_put(&gpuvm->kref, drm_gpuvm_free); in drm_gpuvm_put()
1112 drm_gpuvm_prepare_vm(struct drm_gpuvm *gpuvm, in drm_gpuvm_prepare_vm() argument
1116 return exec_prepare_obj(exec, gpuvm->r_obj, num_fences); in drm_gpuvm_prepare_vm()
1121 __drm_gpuvm_prepare_objects(struct drm_gpuvm *gpuvm, in __drm_gpuvm_prepare_objects() argument
1129 for_each_vm_bo_in_list(gpuvm, extobj, &extobjs, vm_bo) { in __drm_gpuvm_prepare_objects()
1136 restore_vm_bo_list(gpuvm, extobj); in __drm_gpuvm_prepare_objects()
1142 drm_gpuvm_prepare_objects_locked(struct drm_gpuvm *gpuvm, in drm_gpuvm_prepare_objects_locked() argument
1149 drm_gpuvm_resv_assert_held(gpuvm); in drm_gpuvm_prepare_objects_locked()
1150 list_for_each_entry(vm_bo, &gpuvm->extobj.list, list.entry.extobj) { in drm_gpuvm_prepare_objects_locked()
1186 drm_gpuvm_prepare_objects(struct drm_gpuvm *gpuvm, in drm_gpuvm_prepare_objects() argument
1190 if (drm_gpuvm_resv_protected(gpuvm)) in drm_gpuvm_prepare_objects()
1191 return drm_gpuvm_prepare_objects_locked(gpuvm, exec, in drm_gpuvm_prepare_objects()
1194 return __drm_gpuvm_prepare_objects(gpuvm, exec, num_fences); in drm_gpuvm_prepare_objects()
1213 drm_gpuvm_prepare_range(struct drm_gpuvm *gpuvm, struct drm_exec *exec, in drm_gpuvm_prepare_range() argument
1220 drm_gpuvm_for_each_va_range(va, gpuvm, addr, end) { in drm_gpuvm_prepare_range()
1249 struct drm_gpuvm *gpuvm = vm_exec->vm; in drm_gpuvm_exec_lock() local
1257 ret = drm_gpuvm_prepare_vm(gpuvm, exec, num_fences); in drm_gpuvm_exec_lock()
1262 ret = drm_gpuvm_prepare_objects(gpuvm, exec, num_fences); in drm_gpuvm_exec_lock()
1341 struct drm_gpuvm *gpuvm = vm_exec->vm; in drm_gpuvm_exec_lock_range() local
1348 ret = drm_gpuvm_prepare_range(gpuvm, exec, addr, range, in drm_gpuvm_exec_lock_range()
1364 __drm_gpuvm_validate(struct drm_gpuvm *gpuvm, struct drm_exec *exec) in __drm_gpuvm_validate() argument
1366 const struct drm_gpuvm_ops *ops = gpuvm->ops; in __drm_gpuvm_validate()
1371 for_each_vm_bo_in_list(gpuvm, evict, &evict, vm_bo) { in __drm_gpuvm_validate()
1378 restore_vm_bo_list(gpuvm, evict); in __drm_gpuvm_validate()
1384 drm_gpuvm_validate_locked(struct drm_gpuvm *gpuvm, struct drm_exec *exec) in drm_gpuvm_validate_locked() argument
1386 const struct drm_gpuvm_ops *ops = gpuvm->ops; in drm_gpuvm_validate_locked()
1390 drm_gpuvm_resv_assert_held(gpuvm); in drm_gpuvm_validate_locked()
1392 list_for_each_entry_safe(vm_bo, next, &gpuvm->evict.list, in drm_gpuvm_validate_locked()
1417 drm_gpuvm_validate(struct drm_gpuvm *gpuvm, struct drm_exec *exec) in drm_gpuvm_validate() argument
1419 const struct drm_gpuvm_ops *ops = gpuvm->ops; in drm_gpuvm_validate()
1424 if (drm_gpuvm_resv_protected(gpuvm)) in drm_gpuvm_validate()
1425 return drm_gpuvm_validate_locked(gpuvm, exec); in drm_gpuvm_validate()
1427 return __drm_gpuvm_validate(gpuvm, exec); in drm_gpuvm_validate()
1441 drm_gpuvm_resv_add_fence(struct drm_gpuvm *gpuvm, in drm_gpuvm_resv_add_fence() argument
1453 drm_gpuvm_is_extobj(gpuvm, obj) ? in drm_gpuvm_resv_add_fence()
1470 drm_gpuvm_bo_create(struct drm_gpuvm *gpuvm, in drm_gpuvm_bo_create() argument
1473 const struct drm_gpuvm_ops *ops = gpuvm->ops; in drm_gpuvm_bo_create()
1484 vm_bo->vm = drm_gpuvm_get(gpuvm); in drm_gpuvm_bo_create()
1504 struct drm_gpuvm *gpuvm = vm_bo->vm; in drm_gpuvm_bo_destroy() local
1505 const struct drm_gpuvm_ops *ops = gpuvm->ops; in drm_gpuvm_bo_destroy()
1507 bool lock = !drm_gpuvm_resv_protected(gpuvm); in drm_gpuvm_bo_destroy()
1510 drm_gpuvm_resv_assert_held(gpuvm); in drm_gpuvm_bo_destroy()
1523 drm_gpuvm_put(gpuvm); in drm_gpuvm_bo_destroy()
1555 __drm_gpuvm_bo_find(struct drm_gpuvm *gpuvm, in __drm_gpuvm_bo_find() argument
1562 if (vm_bo->vm == gpuvm) in __drm_gpuvm_bo_find()
1581 drm_gpuvm_bo_find(struct drm_gpuvm *gpuvm, in drm_gpuvm_bo_find() argument
1584 struct drm_gpuvm_bo *vm_bo = __drm_gpuvm_bo_find(gpuvm, obj); in drm_gpuvm_bo_find()
1606 drm_gpuvm_bo_obtain(struct drm_gpuvm *gpuvm, in drm_gpuvm_bo_obtain() argument
1611 vm_bo = drm_gpuvm_bo_find(gpuvm, obj); in drm_gpuvm_bo_obtain()
1615 vm_bo = drm_gpuvm_bo_create(gpuvm, obj); in drm_gpuvm_bo_obtain()
1645 struct drm_gpuvm *gpuvm = __vm_bo->vm; in drm_gpuvm_bo_obtain_prealloc() local
1649 vm_bo = drm_gpuvm_bo_find(gpuvm, obj); in drm_gpuvm_bo_obtain_prealloc()
1674 struct drm_gpuvm *gpuvm = vm_bo->vm; in drm_gpuvm_bo_extobj_add() local
1675 bool lock = !drm_gpuvm_resv_protected(gpuvm); in drm_gpuvm_bo_extobj_add()
1678 drm_gpuvm_resv_assert_held(gpuvm); in drm_gpuvm_bo_extobj_add()
1680 if (drm_gpuvm_is_extobj(gpuvm, vm_bo->obj)) in drm_gpuvm_bo_extobj_add()
1696 struct drm_gpuvm *gpuvm = vm_bo->vm; in drm_gpuvm_bo_evict() local
1698 bool lock = !drm_gpuvm_resv_protected(gpuvm); in drm_gpuvm_bo_evict()
1707 if (drm_gpuvm_is_extobj(gpuvm, obj) && !lock) in drm_gpuvm_bo_evict()
1718 __drm_gpuva_insert(struct drm_gpuvm *gpuvm, in __drm_gpuva_insert() argument
1724 if (drm_gpuva_it_iter_first(&gpuvm->rb.tree, in __drm_gpuva_insert()
1729 va->vm = gpuvm; in __drm_gpuva_insert()
1731 drm_gpuva_it_insert(va, &gpuvm->rb.tree); in __drm_gpuva_insert()
1737 head = &gpuvm->rb.list; in __drm_gpuva_insert()
1759 drm_gpuva_insert(struct drm_gpuvm *gpuvm, in drm_gpuva_insert() argument
1766 if (unlikely(!drm_gpuvm_range_valid(gpuvm, addr, range))) in drm_gpuva_insert()
1769 ret = __drm_gpuva_insert(gpuvm, va); in drm_gpuva_insert()
1776 drm_gpuvm_get(gpuvm); in drm_gpuva_insert()
1802 struct drm_gpuvm *gpuvm = va->vm; in drm_gpuva_remove() local
1804 if (unlikely(va == &gpuvm->kernel_alloc_node)) { in drm_gpuva_remove()
1805 drm_WARN(gpuvm->drm, 1, in drm_gpuva_remove()
1834 struct drm_gpuvm *gpuvm = va->vm; in drm_gpuva_link() local
1839 drm_WARN_ON(gpuvm->drm, obj != vm_bo->obj); in drm_gpuva_link()
1892 drm_gpuva_find_first(struct drm_gpuvm *gpuvm, in drm_gpuva_find_first() argument
1897 return drm_gpuva_it_iter_first(&gpuvm->rb.tree, addr, last); in drm_gpuva_find_first()
1910 drm_gpuva_find(struct drm_gpuvm *gpuvm, in drm_gpuva_find() argument
1915 va = drm_gpuva_find_first(gpuvm, addr, range); in drm_gpuva_find()
1943 drm_gpuva_find_prev(struct drm_gpuvm *gpuvm, u64 start) in drm_gpuva_find_prev() argument
1945 if (!drm_gpuvm_range_valid(gpuvm, start - 1, 1)) in drm_gpuva_find_prev()
1948 return drm_gpuva_it_iter_first(&gpuvm->rb.tree, start - 1, start); in drm_gpuva_find_prev()
1965 drm_gpuva_find_next(struct drm_gpuvm *gpuvm, u64 end) in drm_gpuva_find_next() argument
1967 if (!drm_gpuvm_range_valid(gpuvm, end, 1)) in drm_gpuva_find_next()
1970 return drm_gpuva_it_iter_first(&gpuvm->rb.tree, end, end + 1); in drm_gpuva_find_next()
1984 drm_gpuvm_interval_empty(struct drm_gpuvm *gpuvm, u64 addr, u64 range) in drm_gpuvm_interval_empty() argument
1986 return !drm_gpuva_find_first(gpuvm, addr, range); in drm_gpuvm_interval_empty()
2000 drm_gpuva_map(struct drm_gpuvm *gpuvm, in drm_gpuva_map() argument
2005 drm_gpuva_insert(gpuvm, va); in drm_gpuva_map()
2025 struct drm_gpuvm *gpuvm = va->vm; in drm_gpuva_remap() local
2031 drm_gpuva_insert(gpuvm, prev); in drm_gpuva_remap()
2036 drm_gpuva_insert(gpuvm, next); in drm_gpuva_remap()
2103 __drm_gpuvm_sm_map(struct drm_gpuvm *gpuvm, in __drm_gpuvm_sm_map() argument
2112 if (unlikely(!drm_gpuvm_range_valid(gpuvm, req_addr, req_range))) in __drm_gpuvm_sm_map()
2115 drm_gpuvm_for_each_va_range_safe(va, next, gpuvm, req_addr, req_end) { in __drm_gpuvm_sm_map()
2245 __drm_gpuvm_sm_unmap(struct drm_gpuvm *gpuvm, in __drm_gpuvm_sm_unmap() argument
2253 if (unlikely(!drm_gpuvm_range_valid(gpuvm, req_addr, req_range))) in __drm_gpuvm_sm_unmap()
2256 drm_gpuvm_for_each_va_range_safe(va, next, gpuvm, req_addr, req_end) { in __drm_gpuvm_sm_unmap()
2335 drm_gpuvm_sm_map(struct drm_gpuvm *gpuvm, void *priv, in drm_gpuvm_sm_map() argument
2339 const struct drm_gpuvm_ops *ops = gpuvm->ops; in drm_gpuvm_sm_map()
2346 return __drm_gpuvm_sm_map(gpuvm, ops, priv, in drm_gpuvm_sm_map()
2380 drm_gpuvm_sm_unmap(struct drm_gpuvm *gpuvm, void *priv, in drm_gpuvm_sm_unmap() argument
2383 const struct drm_gpuvm_ops *ops = gpuvm->ops; in drm_gpuvm_sm_unmap()
2389 return __drm_gpuvm_sm_unmap(gpuvm, ops, priv, in drm_gpuvm_sm_unmap()
2479 drm_gpuvm_sm_map_exec_lock(struct drm_gpuvm *gpuvm, in drm_gpuvm_sm_map_exec_lock() argument
2490 return __drm_gpuvm_sm_map(gpuvm, &lock_ops, exec, in drm_gpuvm_sm_map_exec_lock()
2512 drm_gpuvm_sm_unmap_exec_lock(struct drm_gpuvm *gpuvm, struct drm_exec *exec, in drm_gpuvm_sm_unmap_exec_lock() argument
2515 return __drm_gpuvm_sm_unmap(gpuvm, &lock_ops, exec, in drm_gpuvm_sm_unmap_exec_lock()
2521 gpuva_op_alloc(struct drm_gpuvm *gpuvm) in gpuva_op_alloc() argument
2523 const struct drm_gpuvm_ops *fn = gpuvm->ops; in gpuva_op_alloc()
2538 gpuva_op_free(struct drm_gpuvm *gpuvm, in gpuva_op_free() argument
2541 const struct drm_gpuvm_ops *fn = gpuvm->ops; in gpuva_op_free()
2557 struct drm_gpuvm *gpuvm = args->vm; in drm_gpuva_sm_step() local
2561 op = gpuva_op_alloc(gpuvm); in drm_gpuva_sm_step()
2600 gpuva_op_free(gpuvm, op); in drm_gpuva_sm_step()
2644 drm_gpuvm_sm_map_ops_create(struct drm_gpuvm *gpuvm, in drm_gpuvm_sm_map_ops_create() argument
2661 args.vm = gpuvm; in drm_gpuvm_sm_map_ops_create()
2664 ret = __drm_gpuvm_sm_map(gpuvm, &gpuvm_list_ops, &args, in drm_gpuvm_sm_map_ops_create()
2673 drm_gpuva_ops_free(gpuvm, ops); in drm_gpuvm_sm_map_ops_create()
2708 drm_gpuvm_sm_unmap_ops_create(struct drm_gpuvm *gpuvm, in drm_gpuvm_sm_unmap_ops_create() argument
2724 args.vm = gpuvm; in drm_gpuvm_sm_unmap_ops_create()
2727 ret = __drm_gpuvm_sm_unmap(gpuvm, &gpuvm_list_ops, &args, in drm_gpuvm_sm_unmap_ops_create()
2735 drm_gpuva_ops_free(gpuvm, ops); in drm_gpuvm_sm_unmap_ops_create()
2759 drm_gpuvm_prefetch_ops_create(struct drm_gpuvm *gpuvm, in drm_gpuvm_prefetch_ops_create() argument
2774 drm_gpuvm_for_each_va_range(va, gpuvm, addr, end) { in drm_gpuvm_prefetch_ops_create()
2775 op = gpuva_op_alloc(gpuvm); in drm_gpuvm_prefetch_ops_create()
2789 drm_gpuva_ops_free(gpuvm, ops); in drm_gpuvm_prefetch_ops_create()
2857 drm_gpuva_ops_free(struct drm_gpuvm *gpuvm, in drm_gpuva_ops_free() argument
2871 gpuva_op_free(gpuvm, op); in drm_gpuva_ops_free()