Lines Matching refs:range

872 #define GPUVA_LAST(node) ((node)->va.addr + (node)->va.range - 1)
886 drm_gpuvm_check_overflow(u64 addr, u64 range) in drm_gpuvm_check_overflow() argument
890 return check_add_overflow(addr, range, &end); in drm_gpuvm_check_overflow()
894 drm_gpuvm_warn_check_overflow(struct drm_gpuvm *gpuvm, u64 addr, u64 range) in drm_gpuvm_warn_check_overflow() argument
896 return drm_WARN(gpuvm->drm, drm_gpuvm_check_overflow(addr, range), in drm_gpuvm_warn_check_overflow()
901 drm_gpuvm_in_mm_range(struct drm_gpuvm *gpuvm, u64 addr, u64 range) in drm_gpuvm_in_mm_range() argument
903 u64 end = addr + range; in drm_gpuvm_in_mm_range()
911 drm_gpuvm_in_kernel_node(struct drm_gpuvm *gpuvm, u64 addr, u64 range) in drm_gpuvm_in_kernel_node() argument
913 u64 end = addr + range; in drm_gpuvm_in_kernel_node()
915 u64 krange = gpuvm->kernel_alloc_node.va.range; in drm_gpuvm_in_kernel_node()
934 u64 addr, u64 range) in drm_gpuvm_range_valid() argument
936 return !drm_gpuvm_check_overflow(addr, range) && in drm_gpuvm_range_valid()
937 drm_gpuvm_in_mm_range(gpuvm, addr, range) && in drm_gpuvm_range_valid()
938 !drm_gpuvm_in_kernel_node(gpuvm, addr, range); in drm_gpuvm_range_valid()
1002 u64 start_offset, u64 range, in drm_gpuvm_init() argument
1025 drm_gpuvm_warn_check_overflow(gpuvm, start_offset, range); in drm_gpuvm_init()
1027 gpuvm->mm_range = range; in drm_gpuvm_init()
1032 gpuvm->kernel_alloc_node.va.range = reserve_range; in drm_gpuvm_init()
1046 if (gpuvm->kernel_alloc_node.va.range) in drm_gpuvm_fini()
1214 u64 addr, u64 range, unsigned int num_fences) in drm_gpuvm_prepare_range() argument
1217 u64 end = addr + range; in drm_gpuvm_prepare_range()
1339 u64 addr, u64 range) in drm_gpuvm_exec_lock_range() argument
1348 ret = drm_gpuvm_prepare_range(gpuvm, exec, addr, range, in drm_gpuvm_exec_lock_range()
1763 u64 range = va->va.range; in drm_gpuva_insert() local
1766 if (unlikely(!drm_gpuvm_range_valid(gpuvm, addr, range))) in drm_gpuva_insert()
1893 u64 addr, u64 range) in drm_gpuva_find_first() argument
1895 u64 last = addr + range - 1; in drm_gpuva_find_first()
1911 u64 addr, u64 range) in drm_gpuva_find() argument
1915 va = drm_gpuva_find_first(gpuvm, addr, range); in drm_gpuva_find()
1920 va->va.range != range) in drm_gpuva_find()
1984 drm_gpuvm_interval_empty(struct drm_gpuvm *gpuvm, u64 addr, u64 range) in drm_gpuvm_interval_empty() argument
1986 return !drm_gpuva_find_first(gpuvm, addr, range); in drm_gpuvm_interval_empty()
2057 u64 addr, u64 range, in op_map_cb() argument
2064 op.map.va.range = range; in op_map_cb()
2119 u64 range = va->va.range; in __drm_gpuvm_sm_map() local
2120 u64 end = addr + range; in __drm_gpuvm_sm_map()
2144 .va.range = range - req_range, in __drm_gpuvm_sm_map()
2162 .va.range = ls_range, in __drm_gpuvm_sm_map()
2189 .va.range = end - req_end, in __drm_gpuvm_sm_map()
2222 .va.range = end - req_end, in __drm_gpuvm_sm_map()
2262 u64 range = va->va.range; in __drm_gpuvm_sm_unmap() local
2263 u64 end = addr + range; in __drm_gpuvm_sm_unmap()
2267 prev.va.range = req_addr - addr; in __drm_gpuvm_sm_unmap()
2276 next.va.range = end - req_end; in __drm_gpuvm_sm_unmap()
2760 u64 addr, u64 range) in drm_gpuvm_prefetch_ops_create() argument
2765 u64 end = addr + range; in drm_gpuvm_prefetch_ops_create()