Lines Matching refs:ttm
69 struct ttm_tt *ttm,
72 struct ttm_tt *ttm);
244 flags = amdgpu_ttm_tt_pte_flags(adev, bo->ttm, mem); in amdgpu_ttm_map_buffer()
253 dma_addr = &bo->ttm->dma_address[mm_cur->start >> PAGE_SHIFT]; in amdgpu_ttm_map_buffer()
502 r = amdgpu_ttm_backend_bind(bo->bdev, bo->ttm, new_mem); in amdgpu_bo_move()
511 bo->ttm == NULL)) { in amdgpu_bo_move()
530 amdgpu_ttm_backend_unbind(bo->bdev, bo->ttm); in amdgpu_bo_move()
678 struct ttm_tt ttm; member
688 #define ttm_to_amdgpu_ttm_tt(ptr) container_of(ptr, struct amdgpu_ttm_tt, ttm)
701 struct ttm_tt *ttm = bo->tbo.ttm; in amdgpu_ttm_tt_get_user_pages() local
702 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_get_user_pages()
733 readonly = amdgpu_ttm_tt_is_readonly(ttm); in amdgpu_ttm_tt_get_user_pages()
734 r = amdgpu_hmm_range_get_pages(&bo->notifier, start, ttm->num_pages, in amdgpu_ttm_tt_get_user_pages()
748 void amdgpu_ttm_tt_discard_user_pages(struct ttm_tt *ttm, in amdgpu_ttm_tt_discard_user_pages() argument
751 struct amdgpu_ttm_tt *gtt = (void *)ttm; in amdgpu_ttm_tt_discard_user_pages()
763 bool amdgpu_ttm_tt_get_user_pages_done(struct ttm_tt *ttm, in amdgpu_ttm_tt_get_user_pages_done() argument
766 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_get_user_pages_done()
772 gtt->userptr, ttm->num_pages); in amdgpu_ttm_tt_get_user_pages_done()
787 void amdgpu_ttm_tt_set_user_pages(struct ttm_tt *ttm, struct page **pages) in amdgpu_ttm_tt_set_user_pages() argument
791 for (i = 0; i < ttm->num_pages; ++i) in amdgpu_ttm_tt_set_user_pages()
792 ttm->pages[i] = pages ? pages[i] : NULL; in amdgpu_ttm_tt_set_user_pages()
801 struct ttm_tt *ttm) in amdgpu_ttm_tt_pin_userptr() argument
804 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_pin_userptr()
811 r = sg_alloc_table_from_pages(ttm->sg, ttm->pages, ttm->num_pages, 0, in amdgpu_ttm_tt_pin_userptr()
812 (u64)ttm->num_pages << PAGE_SHIFT, in amdgpu_ttm_tt_pin_userptr()
818 r = dma_map_sgtable(adev->dev, ttm->sg, direction, 0); in amdgpu_ttm_tt_pin_userptr()
823 drm_prime_sg_to_dma_addr_array(ttm->sg, gtt->ttm.dma_address, in amdgpu_ttm_tt_pin_userptr()
824 ttm->num_pages); in amdgpu_ttm_tt_pin_userptr()
829 sg_free_table(ttm->sg); in amdgpu_ttm_tt_pin_userptr()
831 kfree(ttm->sg); in amdgpu_ttm_tt_pin_userptr()
832 ttm->sg = NULL; in amdgpu_ttm_tt_pin_userptr()
840 struct ttm_tt *ttm) in amdgpu_ttm_tt_unpin_userptr() argument
843 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_unpin_userptr()
849 if (!ttm->sg || !ttm->sg->sgl) in amdgpu_ttm_tt_unpin_userptr()
853 dma_unmap_sgtable(adev->dev, ttm->sg, direction, 0); in amdgpu_ttm_tt_unpin_userptr()
854 sg_free_table(ttm->sg); in amdgpu_ttm_tt_unpin_userptr()
865 struct ttm_tt *ttm, uint64_t flags) in amdgpu_ttm_gart_bind_gfx9_mqd() argument
867 struct amdgpu_ttm_tt *gtt = (void *)ttm; in amdgpu_ttm_gart_bind_gfx9_mqd()
868 uint64_t total_pages = ttm->num_pages; in amdgpu_ttm_gart_bind_gfx9_mqd()
881 1, >t->ttm.dma_address[page_idx], flags); in amdgpu_ttm_gart_bind_gfx9_mqd()
889 >t->ttm.dma_address[page_idx + 1], in amdgpu_ttm_gart_bind_gfx9_mqd()
899 struct ttm_tt *ttm = tbo->ttm; in amdgpu_ttm_gart_bind() local
900 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_gart_bind()
906 amdgpu_ttm_gart_bind_gfx9_mqd(adev, ttm, flags); in amdgpu_ttm_gart_bind()
908 amdgpu_gart_bind(adev, gtt->offset, ttm->num_pages, in amdgpu_ttm_gart_bind()
909 gtt->ttm.dma_address, flags); in amdgpu_ttm_gart_bind()
921 struct ttm_tt *ttm, in amdgpu_ttm_backend_bind() argument
925 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_backend_bind()
936 r = amdgpu_ttm_tt_pin_userptr(bdev, ttm); in amdgpu_ttm_backend_bind()
941 } else if (ttm->page_flags & TTM_TT_FLAG_EXTERNAL) { in amdgpu_ttm_backend_bind()
942 if (!ttm->sg) { in amdgpu_ttm_backend_bind()
951 ttm->sg = sgt; in amdgpu_ttm_backend_bind()
954 drm_prime_sg_to_dma_addr_array(ttm->sg, gtt->ttm.dma_address, in amdgpu_ttm_backend_bind()
955 ttm->num_pages); in amdgpu_ttm_backend_bind()
958 if (!ttm->num_pages) { in amdgpu_ttm_backend_bind()
960 ttm->num_pages, bo_mem, ttm); in amdgpu_ttm_backend_bind()
970 flags = amdgpu_ttm_tt_pte_flags(adev, ttm, bo_mem); in amdgpu_ttm_backend_bind()
974 amdgpu_gart_bind(adev, gtt->offset, ttm->num_pages, in amdgpu_ttm_backend_bind()
975 gtt->ttm.dma_address, flags); in amdgpu_ttm_backend_bind()
992 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(bo->ttm); in amdgpu_ttm_alloc_gart()
1019 flags = amdgpu_ttm_tt_pte_flags(adev, bo->ttm, tmp); in amdgpu_ttm_alloc_gart()
1042 if (!tbo->ttm) in amdgpu_ttm_recover_gart()
1045 flags = amdgpu_ttm_tt_pte_flags(adev, tbo->ttm, tbo->resource); in amdgpu_ttm_recover_gart()
1056 struct ttm_tt *ttm) in amdgpu_ttm_backend_unbind() argument
1059 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_backend_unbind()
1063 amdgpu_ttm_tt_unpin_userptr(bdev, ttm); in amdgpu_ttm_backend_unbind()
1064 } else if (ttm->sg && drm_gem_is_imported(gtt->gobj)) { in amdgpu_ttm_backend_unbind()
1068 dma_buf_unmap_attachment(attach, ttm->sg, DMA_BIDIRECTIONAL); in amdgpu_ttm_backend_unbind()
1069 ttm->sg = NULL; in amdgpu_ttm_backend_unbind()
1079 amdgpu_gart_unbind(adev, gtt->offset, ttm->num_pages); in amdgpu_ttm_backend_unbind()
1084 struct ttm_tt *ttm) in amdgpu_ttm_backend_destroy() argument
1086 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_backend_destroy()
1091 ttm_tt_fini(>t->ttm); in amdgpu_ttm_backend_destroy()
1127 if (ttm_sg_tt_init(>t->ttm, bo, page_flags, caching)) { in amdgpu_ttm_tt_create()
1131 return >t->ttm; in amdgpu_ttm_tt_create()
1141 struct ttm_tt *ttm, in amdgpu_ttm_tt_populate() argument
1145 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_populate()
1152 ttm->sg = kzalloc(sizeof(struct sg_table), GFP_KERNEL); in amdgpu_ttm_tt_populate()
1153 if (!ttm->sg) in amdgpu_ttm_tt_populate()
1158 if (ttm->page_flags & TTM_TT_FLAG_EXTERNAL) in amdgpu_ttm_tt_populate()
1165 ret = ttm_pool_alloc(pool, ttm, ctx); in amdgpu_ttm_tt_populate()
1169 for (i = 0; i < ttm->num_pages; ++i) in amdgpu_ttm_tt_populate()
1170 ttm->pages[i]->mapping = bdev->dev_mapping; in amdgpu_ttm_tt_populate()
1182 struct ttm_tt *ttm) in amdgpu_ttm_tt_unpopulate() argument
1184 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_unpopulate()
1189 amdgpu_ttm_backend_unbind(bdev, ttm); in amdgpu_ttm_tt_unpopulate()
1192 amdgpu_ttm_tt_set_user_pages(ttm, NULL); in amdgpu_ttm_tt_unpopulate()
1193 kfree(ttm->sg); in amdgpu_ttm_tt_unpopulate()
1194 ttm->sg = NULL; in amdgpu_ttm_tt_unpopulate()
1198 if (ttm->page_flags & TTM_TT_FLAG_EXTERNAL) in amdgpu_ttm_tt_unpopulate()
1201 for (i = 0; i < ttm->num_pages; ++i) in amdgpu_ttm_tt_unpopulate()
1202 ttm->pages[i]->mapping = NULL; in amdgpu_ttm_tt_unpopulate()
1211 return ttm_pool_free(pool, ttm); in amdgpu_ttm_tt_unpopulate()
1226 if (!tbo->ttm) in amdgpu_ttm_tt_get_userptr()
1229 gtt = (void *)tbo->ttm; in amdgpu_ttm_tt_get_userptr()
1251 if (!bo->ttm) { in amdgpu_ttm_tt_set_userptr()
1253 bo->ttm = amdgpu_ttm_tt_create(bo, 0); in amdgpu_ttm_tt_set_userptr()
1254 if (bo->ttm == NULL) in amdgpu_ttm_tt_set_userptr()
1259 bo->ttm->page_flags |= TTM_TT_FLAG_EXTERNAL; in amdgpu_ttm_tt_set_userptr()
1261 gtt = ttm_to_amdgpu_ttm_tt(bo->ttm); in amdgpu_ttm_tt_set_userptr()
1276 struct mm_struct *amdgpu_ttm_tt_get_usermm(struct ttm_tt *ttm) in amdgpu_ttm_tt_get_usermm() argument
1278 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_get_usermm()
1294 bool amdgpu_ttm_tt_affect_userptr(struct ttm_tt *ttm, unsigned long start, in amdgpu_ttm_tt_affect_userptr() argument
1297 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_affect_userptr()
1306 size = (unsigned long)gtt->ttm.num_pages * PAGE_SIZE; in amdgpu_ttm_tt_affect_userptr()
1318 bool amdgpu_ttm_tt_is_userptr(struct ttm_tt *ttm) in amdgpu_ttm_tt_is_userptr() argument
1320 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_is_userptr()
1331 bool amdgpu_ttm_tt_is_readonly(struct ttm_tt *ttm) in amdgpu_ttm_tt_is_readonly() argument
1333 struct amdgpu_ttm_tt *gtt = ttm_to_amdgpu_ttm_tt(ttm); in amdgpu_ttm_tt_is_readonly()
1349 uint64_t amdgpu_ttm_tt_pde_flags(struct ttm_tt *ttm, struct ttm_resource *mem) in amdgpu_ttm_tt_pde_flags() argument
1361 if (ttm->caching == ttm_cached) in amdgpu_ttm_tt_pde_flags()
1381 uint64_t amdgpu_ttm_tt_pte_flags(struct amdgpu_device *adev, struct ttm_tt *ttm, in amdgpu_ttm_tt_pte_flags() argument
1384 uint64_t flags = amdgpu_ttm_tt_pde_flags(ttm, mem); in amdgpu_ttm_tt_pte_flags()
1389 if (!amdgpu_ttm_tt_is_readonly(ttm)) in amdgpu_ttm_tt_pte_flags()