Home
last modified time | relevance | path

Searched refs:vm_bo (Results 1 – 17 of 17) sorted by relevance

/drivers/gpu/drm/
A Ddrm_gpuvm.c1479 vm_bo = kzalloc(sizeof(*vm_bo), GFP_KERNEL); in drm_gpuvm_bo_create()
1495 return vm_bo; in drm_gpuvm_bo_create()
1521 kfree(vm_bo); in drm_gpuvm_bo_destroy()
1547 if (vm_bo) in drm_gpuvm_bo_put()
1586 return vm_bo ? drm_gpuvm_bo_get(vm_bo) : NULL; in drm_gpuvm_bo_find()
1612 if (vm_bo) in drm_gpuvm_bo_obtain()
1616 if (!vm_bo) in drm_gpuvm_bo_obtain()
1622 return vm_bo; in drm_gpuvm_bo_obtain()
1650 if (vm_bo) { in drm_gpuvm_bo_obtain_prealloc()
1841 va->vm_bo = drm_gpuvm_bo_get(vm_bo); in drm_gpuva_link()
[all …]
/drivers/gpu/drm/amd/amdgpu/
A Damdgpu_vm.c183 vm_bo->moved = true; in amdgpu_vm_bo_evicted()
202 list_move(&vm_bo->vm_status, &vm_bo->vm->moved); in amdgpu_vm_bo_moved()
217 list_move(&vm_bo->vm_status, &vm_bo->vm->idle); in amdgpu_vm_bo_idle()
233 list_move(&vm_bo->vm_status, &vm_bo->vm->invalidated); in amdgpu_vm_bo_invalidated()
247 vm_bo->moved = true; in amdgpu_vm_bo_evicted_user()
249 list_move(&vm_bo->vm_status, &vm_bo->vm->evicted_user); in amdgpu_vm_bo_evicted_user()
265 list_move(&vm_bo->vm_status, &vm_bo->vm->relocated); in amdgpu_vm_bo_relocated()
283 list_move(&vm_bo->vm_status, &vm_bo->vm->done); in amdgpu_vm_bo_done()
307 list_move(&vm_bo->vm_status, &vm_bo->vm->moved); in amdgpu_vm_bo_reset_state_machine()
309 list_move(&vm_bo->vm_status, &vm_bo->vm->relocated); in amdgpu_vm_bo_reset_state_machine()
[all …]
A Damdgpu_vm_pt.c143 return parent->vm_bo; in amdgpu_vm_pt_parent()
541 entry->bo->vm_bo = NULL; in amdgpu_vm_pt_free()
A Damdgpu_object.h111 struct amdgpu_vm_bo_base *vm_bo; member
A Damdgpu_dma_buf.c412 for (bo_base = bo->vm_bo; bo_base; bo_base = bo_base->next) { in amdgpu_dma_buf_move_notify()
A Damdgpu_object.c668 bo->vm_bo = NULL; in amdgpu_bo_create()
1291 WARN_ON(abo->vm_bo); in amdgpu_bo_release_notify()
A Damdgpu_gem.c1037 for (base = robj->vm_bo; base; base = base->next) in amdgpu_gem_op_ioctl()
A Damdgpu_amdkfd_gpuvm.c2205 bo, bo->vm_bo->vm->process_info->eviction_fence); in amdgpu_amdkfd_map_gtt_bo_to_gart()
/drivers/gpu/drm/msm/
A Dmsm_gem_shrinker.c55 struct drm_gpuvm_bo *vm_bo, *last_locked = NULL; in with_vm_locks() local
58 drm_gem_for_each_gpuvm_bo (vm_bo, obj) { in with_vm_locks()
59 struct dma_resv *resv = drm_gpuvm_resv(vm_bo->vm); in with_vm_locks()
87 drm_gpuvm_bo_get(vm_bo); in with_vm_locks()
89 last_locked = vm_bo; in with_vm_locks()
96 drm_gem_for_each_gpuvm_bo (vm_bo, obj) { in with_vm_locks()
97 struct dma_resv *resv = drm_gpuvm_resv(vm_bo->vm); in with_vm_locks()
105 drm_gpuvm_bo_put(vm_bo); in with_vm_locks()
107 if (last_locked == vm_bo) in with_vm_locks()
A Dmsm_gem.c405 struct drm_gpuvm_bo *vm_bo; in lookup_vma() local
409 drm_gem_for_each_gpuvm_bo (vm_bo, obj) { in lookup_vma()
412 drm_gpuvm_bo_for_each_va (vma, vm_bo) { in lookup_vma()
437 struct drm_gpuvm_bo *vm_bo, *tmp; in put_iova_spaces() local
444 if (vm && vm_bo->vm != vm) in put_iova_spaces()
447 drm_gpuvm_bo_get(vm_bo); in put_iova_spaces()
455 drm_gpuvm_bo_put(vm_bo); in put_iova_spaces()
995 struct drm_gpuvm_bo *vm_bo; in msm_gem_describe() local
999 drm_gem_for_each_gpuvm_bo (vm_bo, obj) { in msm_gem_describe()
1091 struct drm_gpuvm_bo *vm_bo; in msm_gem_free_object() local
[all …]
A Dmsm_gem_vma.c375 struct drm_gpuvm_bo *vm_bo; in msm_gem_vma_new() local
413 if (IS_ERR(vm_bo)) { in msm_gem_vma_new()
414 ret = PTR_ERR(vm_bo); in msm_gem_vma_new()
418 drm_gpuvm_bo_extobj_add(vm_bo); in msm_gem_vma_new()
419 drm_gpuva_link(&vma->base, vm_bo); in msm_gem_vma_new()
420 GEM_WARN_ON(drm_gpuvm_bo_put(vm_bo)); in msm_gem_vma_new()
528 struct drm_gpuvm_bo *vm_bo = orig_vma->vm_bo; in msm_gem_vm_sm_step_remap() local
564 if (vm_bo) in msm_gem_vm_sm_step_remap()
565 drm_gpuvm_bo_get(vm_bo); in msm_gem_vm_sm_step_remap()
596 drm_gpuvm_bo_evict(vm_bo, true); in msm_gem_vm_sm_step_remap()
[all …]
A Dmsm_gem_submit.c366 submit->bos[i].vm_bo = drm_gpuvm_bo_get(vma->vm_bo); in submit_pin_objects()
533 struct drm_gpuvm_bo *vm_bo = submit->bos[i].vm_bo; in msm_submit_retire() local
536 drm_gpuvm_bo_put(vm_bo); in msm_submit_retire()
A Dmsm_gem.h477 struct drm_gpuvm_bo *vm_bo; member
/drivers/gpu/drm/panthor/
A Dpanthor_mmu.c200 struct drm_gpuvm_bo *vm_bo; member
1059 struct drm_gpuvm *vm = vm_bo->vm; in panthor_vm_bo_put()
1078 unpin = drm_gpuvm_bo_put(vm_bo); in panthor_vm_bo_put()
1108 if (op_ctx->map.vm_bo) in panthor_vm_cleanup_op_ctx()
1116 panthor_vm_bo_put(vma->base.vm_bo); in panthor_vm_cleanup_op_ctx()
2002 struct drm_gpuvm_bo *vm_bo) in panthor_vma_link() argument
2007 drm_gpuva_link(&vma->base, vm_bo); in panthor_vma_link()
2016 struct drm_gpuvm_bo *vm_bo = drm_gpuvm_bo_get(vma->base.vm_bo); in panthor_vma_unlink() local
2026 vma->base.vm_bo = vm_bo; in panthor_vma_unlink()
2064 op_ctx->map.vm_bo = NULL; in panthor_gpuva_sm_step_map()
[all …]
/drivers/gpu/drm/nouveau/
A Dnouveau_uvmm.c65 struct drm_gpuvm_bo *vm_bo; member
1123 struct drm_gpuvm_bo *vm_bo = bop->vm_bo; in bind_link_gpuvas() local
1212 if (IS_ERR(op->vm_bo)) in nouveau_uvmm_bind_job_submit()
1213 return PTR_ERR(op->vm_bo); in nouveau_uvmm_bind_job_submit()
1215 drm_gpuvm_bo_extobj_add(op->vm_bo); in nouveau_uvmm_bind_job_submit()
1521 if (!IS_ERR_OR_NULL(op->vm_bo)) { in nouveau_uvmm_bind_job_cleanup()
1523 drm_gpuvm_bo_put(op->vm_bo); in nouveau_uvmm_bind_job_cleanup()
1759 struct drm_gpuvm_bo *vm_bo; in nouveau_uvmm_bo_map_all() local
1765 drm_gpuvm_bo_for_each_va(va, vm_bo) { in nouveau_uvmm_bo_map_all()
1778 struct drm_gpuvm_bo *vm_bo; in nouveau_uvmm_bo_unmap_all() local
[all …]
/drivers/gpu/drm/xe/
A Dxe_vm.c400 vm_bo->evicted = false; in xe_gpuvm_validate()
1229 struct drm_gpuvm_bo *vm_bo; in xe_vma_create() local
1234 if (IS_ERR(vm_bo)) { in xe_vma_create()
1236 return ERR_CAST(vm_bo); in xe_vma_create()
1239 drm_gpuvm_bo_extobj_add(vm_bo); in xe_vma_create()
1242 drm_gpuva_link(&vma->gpuva, vm_bo); in xe_vma_create()
1243 drm_gpuvm_bo_put(vm_bo); in xe_vma_create()
2306 struct drm_gpuvm_bo *vm_bo; in vm_bind_ioctl_ops_create() local
2337 if (IS_ERR(vm_bo)) { in vm_bind_ioctl_ops_create()
2339 return ERR_CAST(vm_bo); in vm_bind_ioctl_ops_create()
[all …]
A Dxe_bo.c620 struct drm_gpuvm_bo *vm_bo; in xe_bo_trigger_rebind() local
634 drm_gem_for_each_gpuvm_bo(vm_bo, obj) { in xe_bo_trigger_rebind()
635 struct xe_vm *vm = gpuvm_to_vm(vm_bo->vm); in xe_bo_trigger_rebind()
639 drm_gpuvm_bo_evict(vm_bo, true); in xe_bo_trigger_rebind()
663 drm_gpuvm_bo_for_each_va(gpuva, vm_bo) { in xe_bo_trigger_rebind()
1021 struct drm_gpuvm_bo *vm_bo; in xe_bo_eviction_valuable() local
1029 drm_gem_for_each_gpuvm_bo(vm_bo, &bo->base) { in xe_bo_eviction_valuable()
1030 if (xe_vm_is_validating(gpuvm_to_vm(vm_bo->vm))) in xe_bo_eviction_valuable()

Completed in 77 milliseconds