Lines Matching refs:tbo

183 	if (bo->tbo.type == ttm_bo_type_kernel)  in amdgpu_vm_bo_evicted()
292 if (bo->tbo.base.resv != vm->root.bo->tbo.base.resv) in amdgpu_vm_bo_base_init()
295 dma_resv_assert_held(vm->root.bo->tbo.base.resv); in amdgpu_vm_bo_base_init()
297 ttm_bo_set_bulk_move(&bo->tbo, &vm->lru_bulk_move); in amdgpu_vm_bo_base_init()
298 if (bo->tbo.type == ttm_bo_type_kernel && bo->parent) in amdgpu_vm_bo_base_init()
304 amdgpu_mem_type_to_domain(bo->tbo.resource->mem_type)) in amdgpu_vm_bo_base_init()
330 entry->tv.bo = &vm->root.bo->tbo; in amdgpu_vm_get_pd_bo()
395 if (bo->tbo.type != ttm_bo_type_kernel) { in amdgpu_vm_validate_pt_bos()
994 resv = vm->root.bo->tbo.base.resv; in amdgpu_vm_bo_update()
996 struct drm_gem_object *obj = &bo->tbo.base; in amdgpu_vm_bo_update()
998 resv = bo->tbo.base.resv; in amdgpu_vm_bo_update()
1004 if (abo->tbo.resource->mem_type == TTM_PL_VRAM) in amdgpu_vm_bo_update()
1007 mem = bo->tbo.resource; in amdgpu_vm_bo_update()
1010 pages_addr = bo->tbo.ttm->dma_address; in amdgpu_vm_bo_update()
1016 flags = amdgpu_ttm_tt_pte_flags(adev, bo->tbo.ttm, mem); in amdgpu_vm_bo_update()
1021 bo_adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_vm_bo_update()
1028 if (clear || (bo && bo->tbo.base.resv == in amdgpu_vm_bo_update()
1029 vm->root.bo->tbo.base.resv)) in amdgpu_vm_bo_update()
1071 if (bo && bo->tbo.base.resv == vm->root.bo->tbo.base.resv) { in amdgpu_vm_bo_update()
1072 uint32_t mem_type = bo->tbo.resource->mem_type; in amdgpu_vm_bo_update()
1209 struct dma_resv *resv = vm->root.bo->tbo.base.resv; in amdgpu_vm_prt_fini()
1239 struct dma_resv *resv = vm->root.bo->tbo.base.resv; in amdgpu_vm_clear_freed()
1313 resv = bo_va->base.bo->tbo.base.resv; in amdgpu_vm_handle_moved()
1370 dma_resv_assert_held(bo->tbo.base.resv); in amdgpu_vm_bo_add()
1404 if (bo && bo->tbo.base.resv == vm->root.bo->tbo.base.resv && in amdgpu_vm_bo_insert_map()
1738 if (dma_resv_locking_ctx(bo->tbo.base.resv) != in amdgpu_vm_bo_trace_cs()
1765 dma_resv_assert_held(vm->root.bo->tbo.base.resv); in amdgpu_vm_bo_del()
1768 dma_resv_assert_held(bo->tbo.base.resv); in amdgpu_vm_bo_del()
1769 if (bo->tbo.base.resv == vm->root.bo->tbo.base.resv) in amdgpu_vm_bo_del()
1770 ttm_bo_set_bulk_move(&bo->tbo, NULL); in amdgpu_vm_bo_del()
1824 if (!dma_resv_test_signaled(bo->tbo.base.resv, DMA_RESV_USAGE_BOOKKEEP)) in amdgpu_vm_evictable()
1863 if (evicted && bo->tbo.base.resv == vm->root.bo->tbo.base.resv) { in amdgpu_vm_bo_invalidate()
1872 if (bo->tbo.type == ttm_bo_type_kernel) in amdgpu_vm_bo_invalidate()
1874 else if (bo->tbo.base.resv == vm->root.bo->tbo.base.resv) in amdgpu_vm_bo_invalidate()
2004 timeout = dma_resv_wait_timeout(vm->root.bo->tbo.base.resv, in amdgpu_vm_wait_idle()
2089 r = dma_resv_reserve_fences(root_bo->tbo.base.resv, 1); in amdgpu_vm_init()
2508 r = dma_resv_reserve_fences(root->tbo.base.resv, 1); in amdgpu_vm_handle_fault()