Lines Matching refs:tbo

234 	struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev);  in amdgpu_amdkfd_release_notify()
263 ret = amdgpu_gem_object_create(adev, mem->bo->tbo.base.size, align, in create_dmamap_sg_bo()
265 ttm_bo_type_sg, mem->bo->tbo.base.resv, &gem_obj); in create_dmamap_sg_bo()
300 dma_resv_replace_fences(bo->tbo.base.resv, ef->base.context, in amdgpu_amdkfd_remove_eviction_fence()
334 BUG_ON(!dma_resv_trylock(bo->tbo.base.resv)); in amdgpu_amdkfd_remove_fence_on_pt_pd_bos()
336 dma_resv_unlock(bo->tbo.base.resv); in amdgpu_amdkfd_remove_fence_on_pt_pd_bos()
348 if (WARN(amdgpu_ttm_tt_get_usermm(bo->tbo.ttm), in amdgpu_amdkfd_bo_validate()
354 ret = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_amdkfd_bo_validate()
379 struct amdgpu_device *adev = amdgpu_ttm_adev(pd->tbo.bdev); in vm_validate_pt_pd_bos()
396 struct amdgpu_device *adev = amdgpu_ttm_adev(pd->tbo.bdev); in vm_update_pds()
461 struct ttm_tt *src_ttm = mem->bo->tbo.ttm; in kfd_mem_dmamap_userptr()
462 struct ttm_tt *ttm = bo->tbo.ttm; in kfd_mem_dmamap_userptr()
488 ret = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in kfd_mem_dmamap_userptr()
512 return ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in kfd_mem_dmamap_dmabuf()
548 struct ttm_tt *ttm = bo->tbo.ttm; in kfd_mem_dmamap_sg_bo()
563 dma_addr = mem->bo->tbo.sg->sgl->dma_address; in kfd_mem_dmamap_sg_bo()
564 pr_debug("%d BO size: %d\n", mmio, mem->bo->tbo.sg->sgl->length); in kfd_mem_dmamap_sg_bo()
567 mem->bo->tbo.sg->sgl->length, dir, DMA_ATTR_SKIP_CPU_SYNC); in kfd_mem_dmamap_sg_bo()
573 ttm->sg = create_sg_table(dma_addr, mem->bo->tbo.sg->sgl->length); in kfd_mem_dmamap_sg_bo()
580 ret = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in kfd_mem_dmamap_sg_bo()
591 dma_unmap_resource(adev->dev, dma_addr, mem->bo->tbo.sg->sgl->length, in kfd_mem_dmamap_sg_bo()
625 struct ttm_tt *ttm = bo->tbo.ttm; in kfd_mem_dmaunmap_userptr()
631 ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in kfd_mem_dmaunmap_userptr()
646 ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in kfd_mem_dmaunmap_dmabuf()
672 struct ttm_tt *ttm = bo->tbo.ttm; in kfd_mem_dmaunmap_sg_bo()
681 ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in kfd_mem_dmaunmap_sg_bo()
690 bo->tbo.sg = NULL; in kfd_mem_dmaunmap_sg_bo()
722 mem->dmabuf = amdgpu_gem_prime_export(&mem->bo->tbo.base, in kfd_mem_attach_dmabuf()
758 struct amdgpu_device *bo_adev = amdgpu_ttm_adev(mem->bo->tbo.bdev); in kfd_mem_attach()
759 unsigned long bo_size = mem->bo->tbo.base.size; in kfd_mem_attach()
800 (amdgpu_ttm_tt_get_usermm(mem->bo->tbo.ttm) && adev->ram_is_direct_mapped) || in kfd_mem_attach()
808 drm_gem_object_get(&bo[i]->tbo.base); in kfd_mem_attach()
813 drm_gem_object_get(&bo[i]->tbo.base); in kfd_mem_attach()
814 } else if (amdgpu_ttm_tt_get_usermm(mem->bo->tbo.ttm)) { in kfd_mem_attach()
821 } else if (mem->bo->tbo.type == ttm_bo_type_sg) { in kfd_mem_attach()
878 drm_gem_object_put(&bo[i]->tbo.base); in kfd_mem_attach()
891 drm_gem_object_put(&bo->tbo.base); in kfd_mem_detach()
905 entry->bo = &bo->tbo; in add_kgd_mem_to_kfd_bo_list()
948 ret = amdgpu_ttm_tt_set_userptr(&bo->tbo, user_addr, 0); in init_user_pages()
975 ret = amdgpu_ttm_tt_get_user_pages(bo, bo->tbo.ttm->pages, &range); in init_user_pages()
987 ret = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in init_user_pages()
993 amdgpu_ttm_tt_get_user_pages_done(bo->tbo.ttm, range); in init_user_pages()
1050 ctx->kfd_bo.tv.bo = &bo->tbo; in reserve_bo_and_vm()
1113 ctx->kfd_bo.tv.bo = &bo->tbo; in reserve_bo_and_cond_vms()
1269 ret = amdgpu_sync_resv(NULL, sync, pd->tbo.base.resv, in process_sync_pds_resv()
1346 ret = dma_resv_reserve_fences(vm->root.bo->tbo.base.resv, 1); in init_kfd_vm()
1349 dma_resv_add_fence(vm->root.bo->tbo.base.resv, in init_kfd_vm()
1536 struct amdgpu_device *adev = amdgpu_ttm_adev(pd->tbo.bdev); in amdgpu_amdkfd_gpuvm_get_process_page_dir()
1688 bo->tbo.sg = sg; in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1689 bo->tbo.ttm->sg = sg; in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1751 unsigned long bo_size = mem->bo->tbo.base.size; in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1789 if (amdgpu_ttm_tt_get_usermm(mem->bo->tbo.ttm)) { in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1792 amdgpu_ttm_tt_discard_user_pages(mem->bo->tbo.ttm, mem->range); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1821 if (mem->bo->tbo.sg) { in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1822 sg_free_table(mem->bo->tbo.sg); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1823 kfree(mem->bo->tbo.sg); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1838 drm_vma_node_revoke(&mem->bo->tbo.base.vma_node, drm_priv); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1847 drm_gem_object_put(&mem->bo->tbo.base); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1888 if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm)) { in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1897 bo_size = bo->tbo.base.size; in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1919 if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm) && in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1920 bo->tbo.resource->mem_type == TTM_PL_SYSTEM) in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1928 !amdgpu_ttm_tt_get_usermm(bo->tbo.ttm)) { in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1966 if (!amdgpu_ttm_tt_get_usermm(bo->tbo.ttm) && !bo->tbo.pin_count) in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1967 dma_resv_add_fence(bo->tbo.base.resv, in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1987 unsigned long bo_size = mem->bo->tbo.base.size; in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
2031 !amdgpu_ttm_tt_get_usermm(mem->bo->tbo.ttm) && in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
2032 !mem->bo->tbo.pin_count) in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
2084 ret = amdgpu_ttm_alloc_gart(&bo->tbo); in amdgpu_amdkfd_map_gtt_bo_to_gart()
2127 if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm)) { in amdgpu_amdkfd_gpuvm_map_gtt_bo_to_kernel()
2253 drm_gem_object_get(&bo->tbo.base); in amdgpu_amdkfd_gpuvm_import_dmabuf()
2344 amdgpu_ttm_tt_discard_user_pages(bo->tbo.ttm, mem->range); in update_invalid_user_pages()
2355 if (bo->tbo.resource->mem_type != TTM_PL_SYSTEM) { in update_invalid_user_pages()
2359 ret = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in update_invalid_user_pages()
2369 ret = amdgpu_ttm_tt_get_user_pages(bo, bo->tbo.ttm->pages, in update_invalid_user_pages()
2470 if (bo->tbo.ttm->pages[0]) { in validate_invalid_user_pages()
2472 ret = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in validate_invalid_user_pages()
2529 mem->bo->tbo.ttm, mem->range); in confirm_valid_user_pages_locked()
2731 dma_resv_for_each_fence(&cursor, bo->tbo.base.resv, in amdgpu_amdkfd_gpuvm_restore_process_bos()
2785 if (mem->bo->tbo.pin_count) in amdgpu_amdkfd_gpuvm_restore_process_bos()
2788 dma_resv_add_fence(mem->bo->tbo.base.resv, in amdgpu_amdkfd_gpuvm_restore_process_bos()
2797 dma_resv_add_fence(bo->tbo.base.resv, in amdgpu_amdkfd_gpuvm_restore_process_bos()
2850 ret = dma_resv_reserve_fences(gws_bo->tbo.base.resv, 1); in amdgpu_amdkfd_add_gws_to_process()
2853 dma_resv_add_fence(gws_bo->tbo.base.resv, in amdgpu_amdkfd_add_gws_to_process()