Lines Matching refs:ttm

71 				   struct ttm_tt *ttm,
74 struct ttm_tt *ttm);
250 flags = amdgpu_ttm_tt_pte_flags(adev, bo->ttm, mem); in amdgpu_ttm_map_buffer()
259 dma_addr = &bo->ttm->dma_address[mm_cur->start >> PAGE_SHIFT]; in amdgpu_ttm_map_buffer()
464 r = amdgpu_ttm_backend_bind(bo->bdev, bo->ttm, new_mem); in amdgpu_bo_move()
477 bo->ttm == NULL)) { in amdgpu_bo_move()
494 amdgpu_ttm_backend_unbind(bo->bdev, bo->ttm); in amdgpu_bo_move()
631 struct ttm_tt ttm; member
640 #define ttm_to_amdgpu_ttm_tt(ptr) container_of(ptr, struct amdgpu_ttm_tt, ttm)
653 struct ttm_tt *ttm = bo->tbo.ttm; in amdgpu_ttm_tt_get_user_pages() local
654 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_get_user_pages()
685 readonly = amdgpu_ttm_tt_is_readonly(ttm); in amdgpu_ttm_tt_get_user_pages()
686 r = amdgpu_hmm_range_get_pages(&bo->notifier, start, ttm->num_pages, in amdgpu_ttm_tt_get_user_pages()
700 void amdgpu_ttm_tt_discard_user_pages(struct ttm_tt *ttm, in amdgpu_ttm_tt_discard_user_pages() argument
703 struct amdgpu_ttm_tt *gtt = (void *)ttm; in amdgpu_ttm_tt_discard_user_pages()
715 bool amdgpu_ttm_tt_get_user_pages_done(struct ttm_tt *ttm, in amdgpu_ttm_tt_get_user_pages_done() argument
718 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_get_user_pages_done()
724 gtt->userptr, ttm->num_pages); in amdgpu_ttm_tt_get_user_pages_done()
739 void amdgpu_ttm_tt_set_user_pages(struct ttm_tt *ttm, struct page **pages) in amdgpu_ttm_tt_set_user_pages() argument
743 for (i = 0; i < ttm->num_pages; ++i) in amdgpu_ttm_tt_set_user_pages()
744 ttm->pages[i] = pages ? pages[i] : NULL; in amdgpu_ttm_tt_set_user_pages()
753 struct ttm_tt *ttm) in amdgpu_ttm_tt_pin_userptr() argument
756 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_pin_userptr()
763 r = sg_alloc_table_from_pages(ttm->sg, ttm->pages, ttm->num_pages, 0, in amdgpu_ttm_tt_pin_userptr()
764 (u64)ttm->num_pages << PAGE_SHIFT, in amdgpu_ttm_tt_pin_userptr()
770 r = dma_map_sgtable(adev->dev, ttm->sg, direction, 0); in amdgpu_ttm_tt_pin_userptr()
775 drm_prime_sg_to_dma_addr_array(ttm->sg, gtt->ttm.dma_address, in amdgpu_ttm_tt_pin_userptr()
776 ttm->num_pages); in amdgpu_ttm_tt_pin_userptr()
781 kfree(ttm->sg); in amdgpu_ttm_tt_pin_userptr()
782 ttm->sg = NULL; in amdgpu_ttm_tt_pin_userptr()
790 struct ttm_tt *ttm) in amdgpu_ttm_tt_unpin_userptr() argument
793 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_unpin_userptr()
799 if (!ttm->sg || !ttm->sg->sgl) in amdgpu_ttm_tt_unpin_userptr()
803 dma_unmap_sgtable(adev->dev, ttm->sg, direction, 0); in amdgpu_ttm_tt_unpin_userptr()
804 sg_free_table(ttm->sg); in amdgpu_ttm_tt_unpin_userptr()
812 struct ttm_tt *ttm = tbo->ttm; in amdgpu_ttm_gart_bind() local
813 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_gart_bind()
822 gtt->ttm.dma_address, flags); in amdgpu_ttm_gart_bind()
832 ttm->num_pages - page_idx, in amdgpu_ttm_gart_bind()
833 &(gtt->ttm.dma_address[page_idx]), flags); in amdgpu_ttm_gart_bind()
835 amdgpu_gart_bind(adev, gtt->offset, ttm->num_pages, in amdgpu_ttm_gart_bind()
836 gtt->ttm.dma_address, flags); in amdgpu_ttm_gart_bind()
847 struct ttm_tt *ttm, in amdgpu_ttm_backend_bind() argument
851 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_backend_bind()
862 r = amdgpu_ttm_tt_pin_userptr(bdev, ttm); in amdgpu_ttm_backend_bind()
867 } else if (ttm->page_flags & TTM_TT_FLAG_EXTERNAL) { in amdgpu_ttm_backend_bind()
868 if (!ttm->sg) { in amdgpu_ttm_backend_bind()
877 ttm->sg = sgt; in amdgpu_ttm_backend_bind()
880 drm_prime_sg_to_dma_addr_array(ttm->sg, gtt->ttm.dma_address, in amdgpu_ttm_backend_bind()
881 ttm->num_pages); in amdgpu_ttm_backend_bind()
884 if (!ttm->num_pages) { in amdgpu_ttm_backend_bind()
886 ttm->num_pages, bo_mem, ttm); in amdgpu_ttm_backend_bind()
896 flags = amdgpu_ttm_tt_pte_flags(adev, ttm, bo_mem); in amdgpu_ttm_backend_bind()
900 amdgpu_gart_bind(adev, gtt->offset, ttm->num_pages, in amdgpu_ttm_backend_bind()
901 gtt->ttm.dma_address, flags); in amdgpu_ttm_backend_bind()
918 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(bo->ttm); in amdgpu_ttm_alloc_gart()
949 flags = amdgpu_ttm_tt_pte_flags(adev, bo->ttm, tmp); in amdgpu_ttm_alloc_gart()
972 if (!tbo->ttm) in amdgpu_ttm_recover_gart()
975 flags = amdgpu_ttm_tt_pte_flags(adev, tbo->ttm, tbo->resource); in amdgpu_ttm_recover_gart()
986 struct ttm_tt *ttm) in amdgpu_ttm_backend_unbind() argument
989 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_backend_unbind()
993 amdgpu_ttm_tt_unpin_userptr(bdev, ttm); in amdgpu_ttm_backend_unbind()
994 } else if (ttm->sg && gtt->gobj->import_attach) { in amdgpu_ttm_backend_unbind()
998 dma_buf_unmap_attachment(attach, ttm->sg, DMA_BIDIRECTIONAL); in amdgpu_ttm_backend_unbind()
999 ttm->sg = NULL; in amdgpu_ttm_backend_unbind()
1009 amdgpu_gart_unbind(adev, gtt->offset, ttm->num_pages); in amdgpu_ttm_backend_unbind()
1014 struct ttm_tt *ttm) in amdgpu_ttm_backend_destroy() argument
1016 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_backend_destroy()
1021 ttm_tt_fini(&gtt->ttm); in amdgpu_ttm_backend_destroy()
1052 if (ttm_sg_tt_init(&gtt->ttm, bo, page_flags, caching)) { in amdgpu_ttm_tt_create()
1056 return &gtt->ttm; in amdgpu_ttm_tt_create()
1066 struct ttm_tt *ttm, in amdgpu_ttm_tt_populate() argument
1070 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_populate()
1076 ttm->sg = kzalloc(sizeof(struct sg_table), GFP_KERNEL); in amdgpu_ttm_tt_populate()
1077 if (!ttm->sg) in amdgpu_ttm_tt_populate()
1082 if (ttm->page_flags & TTM_TT_FLAG_EXTERNAL) in amdgpu_ttm_tt_populate()
1085 ret = ttm_pool_alloc(&adev->mman.bdev.pool, ttm, ctx); in amdgpu_ttm_tt_populate()
1089 for (i = 0; i < ttm->num_pages; ++i) in amdgpu_ttm_tt_populate()
1090 ttm->pages[i]->mapping = bdev->dev_mapping; in amdgpu_ttm_tt_populate()
1102 struct ttm_tt *ttm) in amdgpu_ttm_tt_unpopulate() argument
1104 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_unpopulate()
1108 amdgpu_ttm_backend_unbind(bdev, ttm); in amdgpu_ttm_tt_unpopulate()
1111 amdgpu_ttm_tt_set_user_pages(ttm, NULL); in amdgpu_ttm_tt_unpopulate()
1112 kfree(ttm->sg); in amdgpu_ttm_tt_unpopulate()
1113 ttm->sg = NULL; in amdgpu_ttm_tt_unpopulate()
1117 if (ttm->page_flags & TTM_TT_FLAG_EXTERNAL) in amdgpu_ttm_tt_unpopulate()
1120 for (i = 0; i < ttm->num_pages; ++i) in amdgpu_ttm_tt_unpopulate()
1121 ttm->pages[i]->mapping = NULL; in amdgpu_ttm_tt_unpopulate()
1124 return ttm_pool_free(&adev->mman.bdev.pool, ttm); in amdgpu_ttm_tt_unpopulate()
1139 if (!tbo->ttm) in amdgpu_ttm_tt_get_userptr()
1142 gtt = (void *)tbo->ttm; in amdgpu_ttm_tt_get_userptr()
1164 if (!bo->ttm) { in amdgpu_ttm_tt_set_userptr()
1166 bo->ttm = amdgpu_ttm_tt_create(bo, 0); in amdgpu_ttm_tt_set_userptr()
1167 if (bo->ttm == NULL) in amdgpu_ttm_tt_set_userptr()
1172 bo->ttm->page_flags |= TTM_TT_FLAG_EXTERNAL; in amdgpu_ttm_tt_set_userptr()
1174 gtt = ttm_to_amdgpu_ttm_tt(bo->ttm); in amdgpu_ttm_tt_set_userptr()
1189 struct mm_struct *amdgpu_ttm_tt_get_usermm(struct ttm_tt *ttm) in amdgpu_ttm_tt_get_usermm() argument
1191 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_get_usermm()
1207 bool amdgpu_ttm_tt_affect_userptr(struct ttm_tt *ttm, unsigned long start, in amdgpu_ttm_tt_affect_userptr() argument
1210 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_affect_userptr()
1219 size = (unsigned long)gtt->ttm.num_pages * PAGE_SIZE; in amdgpu_ttm_tt_affect_userptr()
1231 bool amdgpu_ttm_tt_is_userptr(struct ttm_tt *ttm) in amdgpu_ttm_tt_is_userptr() argument
1233 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_is_userptr()
1244 bool amdgpu_ttm_tt_is_readonly(struct ttm_tt *ttm) in amdgpu_ttm_tt_is_readonly() argument
1246 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_is_readonly()
1262 uint64_t amdgpu_ttm_tt_pde_flags(struct ttm_tt *ttm, struct ttm_resource *mem) in amdgpu_ttm_tt_pde_flags() argument
1273 if (ttm->caching == ttm_cached) in amdgpu_ttm_tt_pde_flags()
1293 uint64_t amdgpu_ttm_tt_pte_flags(struct amdgpu_device *adev, struct ttm_tt *ttm, in amdgpu_ttm_tt_pte_flags() argument
1296 uint64_t flags = amdgpu_ttm_tt_pde_flags(ttm, mem); in amdgpu_ttm_tt_pte_flags()
1301 if (!amdgpu_ttm_tt_is_readonly(ttm)) in amdgpu_ttm_tt_pte_flags()