Lines Matching refs:tbo
58 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_dma_buf_attach()
91 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_dma_buf_detach()
115 if (bo->tbo.moving) { in amdgpu_dma_buf_pin()
116 r = dma_fence_wait(bo->tbo.moving, true); in amdgpu_dma_buf_pin()
159 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_dma_buf_map()
163 if (!bo->tbo.pin_count) { in amdgpu_dma_buf_map()
174 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_dma_buf_map()
178 } else if (!(amdgpu_mem_type_to_domain(bo->tbo.resource->mem_type) & in amdgpu_dma_buf_map()
183 switch (bo->tbo.resource->mem_type) { in amdgpu_dma_buf_map()
186 bo->tbo.ttm->pages, in amdgpu_dma_buf_map()
187 bo->tbo.ttm->num_pages); in amdgpu_dma_buf_map()
197 r = amdgpu_vram_mgr_alloc_sgt(adev, bo->tbo.resource, 0, in amdgpu_dma_buf_map()
198 bo->tbo.base.size, attach->dev, in amdgpu_dma_buf_map()
253 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_dma_buf_begin_cpu_access()
268 if (!bo->tbo.pin_count && in amdgpu_dma_buf_begin_cpu_access()
271 ret = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_dma_buf_begin_cpu_access()
308 if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm) || in amdgpu_gem_prime_export()
381 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_dma_buf_move_notify()
387 if (!bo->tbo.resource || bo->tbo.resource->mem_type == TTM_PL_SYSTEM) in amdgpu_dma_buf_move_notify()
390 r = ttm_bo_validate(&bo->tbo, &placement, &ctx); in amdgpu_dma_buf_move_notify()
398 struct dma_resv *resv = vm->root.bo->tbo.base.resv; in amdgpu_dma_buf_move_notify()
491 struct drm_gem_object *obj = &bo->tbo.base; in amdgpu_dmabuf_is_xgmi_accessible()
505 if (amdgpu_xgmi_same_hive(adev, amdgpu_ttm_adev(bo->tbo.bdev)) && in amdgpu_dmabuf_is_xgmi_accessible()