Lines Matching refs:ttm

68 				   struct ttm_tt *ttm,
71 struct ttm_tt *ttm);
242 flags = amdgpu_ttm_tt_pte_flags(adev, bo->ttm, mem); in amdgpu_ttm_map_buffer()
251 dma_addr = &bo->ttm->dma_address[mm_cur->start >> PAGE_SHIFT]; in amdgpu_ttm_map_buffer()
475 r = amdgpu_ttm_backend_bind(bo->bdev, bo->ttm, new_mem); in amdgpu_bo_move()
487 if (old_mem->mem_type == TTM_PL_SYSTEM && bo->ttm == NULL) { in amdgpu_bo_move()
504 amdgpu_ttm_backend_unbind(bo->bdev, bo->ttm); in amdgpu_bo_move()
641 struct ttm_tt ttm; member
663 struct ttm_tt *ttm = bo->tbo.ttm; in amdgpu_ttm_tt_get_user_pages() local
664 struct amdgpu_ttm_tt *gtt = (void *)ttm; in amdgpu_ttm_tt_get_user_pages()
696 readonly = amdgpu_ttm_tt_is_readonly(ttm); in amdgpu_ttm_tt_get_user_pages()
698 ttm->num_pages, &gtt->range, readonly, in amdgpu_ttm_tt_get_user_pages()
716 bool amdgpu_ttm_tt_get_user_pages_done(struct ttm_tt *ttm) in amdgpu_ttm_tt_get_user_pages_done() argument
718 struct amdgpu_ttm_tt *gtt = (void *)ttm; in amdgpu_ttm_tt_get_user_pages_done()
725 gtt->userptr, ttm->num_pages); in amdgpu_ttm_tt_get_user_pages_done()
750 void amdgpu_ttm_tt_set_user_pages(struct ttm_tt *ttm, struct page **pages) in amdgpu_ttm_tt_set_user_pages() argument
754 for (i = 0; i < ttm->num_pages; ++i) in amdgpu_ttm_tt_set_user_pages()
755 ttm->pages[i] = pages ? pages[i] : NULL; in amdgpu_ttm_tt_set_user_pages()
764 struct ttm_tt *ttm) in amdgpu_ttm_tt_pin_userptr() argument
767 struct amdgpu_ttm_tt *gtt = (void *)ttm; in amdgpu_ttm_tt_pin_userptr()
774 r = sg_alloc_table_from_pages(ttm->sg, ttm->pages, ttm->num_pages, 0, in amdgpu_ttm_tt_pin_userptr()
775 (u64)ttm->num_pages << PAGE_SHIFT, in amdgpu_ttm_tt_pin_userptr()
781 r = dma_map_sgtable(adev->dev, ttm->sg, direction, 0); in amdgpu_ttm_tt_pin_userptr()
786 drm_prime_sg_to_dma_addr_array(ttm->sg, gtt->ttm.dma_address, in amdgpu_ttm_tt_pin_userptr()
787 ttm->num_pages); in amdgpu_ttm_tt_pin_userptr()
792 kfree(ttm->sg); in amdgpu_ttm_tt_pin_userptr()
793 ttm->sg = NULL; in amdgpu_ttm_tt_pin_userptr()
801 struct ttm_tt *ttm) in amdgpu_ttm_tt_unpin_userptr() argument
804 struct amdgpu_ttm_tt *gtt = (void *)ttm; in amdgpu_ttm_tt_unpin_userptr()
810 if (!ttm->sg || !ttm->sg->sgl) in amdgpu_ttm_tt_unpin_userptr()
814 dma_unmap_sgtable(adev->dev, ttm->sg, direction, 0); in amdgpu_ttm_tt_unpin_userptr()
815 sg_free_table(ttm->sg); in amdgpu_ttm_tt_unpin_userptr()
821 for (i = 0; i < ttm->num_pages; i++) { in amdgpu_ttm_tt_unpin_userptr()
822 if (ttm->pages[i] != in amdgpu_ttm_tt_unpin_userptr()
827 WARN((i == ttm->num_pages), "Missing get_user_page_done\n"); in amdgpu_ttm_tt_unpin_userptr()
837 struct ttm_tt *ttm = tbo->ttm; in amdgpu_ttm_gart_bind() local
838 struct amdgpu_ttm_tt *gtt = (void *)ttm; in amdgpu_ttm_gart_bind()
848 gtt->ttm.dma_address, flags); in amdgpu_ttm_gart_bind()
861 ttm->num_pages - page_idx, in amdgpu_ttm_gart_bind()
862 &(gtt->ttm.dma_address[page_idx]), flags); in amdgpu_ttm_gart_bind()
864 r = amdgpu_gart_bind(adev, gtt->offset, ttm->num_pages, in amdgpu_ttm_gart_bind()
865 gtt->ttm.dma_address, flags); in amdgpu_ttm_gart_bind()
871 ttm->num_pages, gtt->offset); in amdgpu_ttm_gart_bind()
883 struct ttm_tt *ttm, in amdgpu_ttm_backend_bind() argument
887 struct amdgpu_ttm_tt *gtt = (void*)ttm; in amdgpu_ttm_backend_bind()
898 r = amdgpu_ttm_tt_pin_userptr(bdev, ttm); in amdgpu_ttm_backend_bind()
903 } else if (ttm->page_flags & TTM_TT_FLAG_EXTERNAL) { in amdgpu_ttm_backend_bind()
904 if (!ttm->sg) { in amdgpu_ttm_backend_bind()
913 ttm->sg = sgt; in amdgpu_ttm_backend_bind()
916 drm_prime_sg_to_dma_addr_array(ttm->sg, gtt->ttm.dma_address, in amdgpu_ttm_backend_bind()
917 ttm->num_pages); in amdgpu_ttm_backend_bind()
920 if (!ttm->num_pages) { in amdgpu_ttm_backend_bind()
922 ttm->num_pages, bo_mem, ttm); in amdgpu_ttm_backend_bind()
937 flags = amdgpu_ttm_tt_pte_flags(adev, ttm, bo_mem); in amdgpu_ttm_backend_bind()
941 r = amdgpu_gart_bind(adev, gtt->offset, ttm->num_pages, in amdgpu_ttm_backend_bind()
942 gtt->ttm.dma_address, flags); in amdgpu_ttm_backend_bind()
946 ttm->num_pages, gtt->offset); in amdgpu_ttm_backend_bind()
963 struct amdgpu_ttm_tt *gtt = (void *)bo->ttm; in amdgpu_ttm_alloc_gart()
994 flags = amdgpu_ttm_tt_pte_flags(adev, bo->ttm, tmp); in amdgpu_ttm_alloc_gart()
1023 if (!tbo->ttm) in amdgpu_ttm_recover_gart()
1026 flags = amdgpu_ttm_tt_pte_flags(adev, tbo->ttm, tbo->resource); in amdgpu_ttm_recover_gart()
1039 struct ttm_tt *ttm) in amdgpu_ttm_backend_unbind() argument
1042 struct amdgpu_ttm_tt *gtt = (void *)ttm; in amdgpu_ttm_backend_unbind()
1047 amdgpu_ttm_tt_unpin_userptr(bdev, ttm); in amdgpu_ttm_backend_unbind()
1048 } else if (ttm->sg && gtt->gobj->import_attach) { in amdgpu_ttm_backend_unbind()
1052 dma_buf_unmap_attachment(attach, ttm->sg, DMA_BIDIRECTIONAL); in amdgpu_ttm_backend_unbind()
1053 ttm->sg = NULL; in amdgpu_ttm_backend_unbind()
1063 r = amdgpu_gart_unbind(adev, gtt->offset, ttm->num_pages); in amdgpu_ttm_backend_unbind()
1066 gtt->ttm.num_pages, gtt->offset); in amdgpu_ttm_backend_unbind()
1071 struct ttm_tt *ttm) in amdgpu_ttm_backend_destroy() argument
1073 struct amdgpu_ttm_tt *gtt = (void *)ttm; in amdgpu_ttm_backend_destroy()
1078 ttm_tt_fini(&gtt->ttm); in amdgpu_ttm_backend_destroy()
1109 if (ttm_sg_tt_init(&gtt->ttm, bo, page_flags, caching)) { in amdgpu_ttm_tt_create()
1113 return &gtt->ttm; in amdgpu_ttm_tt_create()
1123 struct ttm_tt *ttm, in amdgpu_ttm_tt_populate() argument
1127 struct amdgpu_ttm_tt *gtt = (void *)ttm; in amdgpu_ttm_tt_populate()
1133 ttm->sg = kzalloc(sizeof(struct sg_table), GFP_KERNEL); in amdgpu_ttm_tt_populate()
1134 if (!ttm->sg) in amdgpu_ttm_tt_populate()
1139 if (ttm->page_flags & TTM_TT_FLAG_EXTERNAL) in amdgpu_ttm_tt_populate()
1142 ret = ttm_pool_alloc(&adev->mman.bdev.pool, ttm, ctx); in amdgpu_ttm_tt_populate()
1146 for (i = 0; i < ttm->num_pages; ++i) in amdgpu_ttm_tt_populate()
1147 ttm->pages[i]->mapping = bdev->dev_mapping; in amdgpu_ttm_tt_populate()
1159 struct ttm_tt *ttm) in amdgpu_ttm_tt_unpopulate() argument
1161 struct amdgpu_ttm_tt *gtt = (void *)ttm; in amdgpu_ttm_tt_unpopulate()
1165 amdgpu_ttm_backend_unbind(bdev, ttm); in amdgpu_ttm_tt_unpopulate()
1168 amdgpu_ttm_tt_set_user_pages(ttm, NULL); in amdgpu_ttm_tt_unpopulate()
1169 kfree(ttm->sg); in amdgpu_ttm_tt_unpopulate()
1170 ttm->sg = NULL; in amdgpu_ttm_tt_unpopulate()
1174 if (ttm->page_flags & TTM_TT_FLAG_EXTERNAL) in amdgpu_ttm_tt_unpopulate()
1177 for (i = 0; i < ttm->num_pages; ++i) in amdgpu_ttm_tt_unpopulate()
1178 ttm->pages[i]->mapping = NULL; in amdgpu_ttm_tt_unpopulate()
1181 return ttm_pool_free(&adev->mman.bdev.pool, ttm); in amdgpu_ttm_tt_unpopulate()
1200 if (!bo->ttm) { in amdgpu_ttm_tt_set_userptr()
1202 bo->ttm = amdgpu_ttm_tt_create(bo, 0); in amdgpu_ttm_tt_set_userptr()
1203 if (bo->ttm == NULL) in amdgpu_ttm_tt_set_userptr()
1208 bo->ttm->page_flags |= TTM_TT_FLAG_EXTERNAL; in amdgpu_ttm_tt_set_userptr()
1210 gtt = (void *)bo->ttm; in amdgpu_ttm_tt_set_userptr()
1225 struct mm_struct *amdgpu_ttm_tt_get_usermm(struct ttm_tt *ttm) in amdgpu_ttm_tt_get_usermm() argument
1227 struct amdgpu_ttm_tt *gtt = (void *)ttm; in amdgpu_ttm_tt_get_usermm()
1243 bool amdgpu_ttm_tt_affect_userptr(struct ttm_tt *ttm, unsigned long start, in amdgpu_ttm_tt_affect_userptr() argument
1246 struct amdgpu_ttm_tt *gtt = (void *)ttm; in amdgpu_ttm_tt_affect_userptr()
1255 size = (unsigned long)gtt->ttm.num_pages * PAGE_SIZE; in amdgpu_ttm_tt_affect_userptr()
1267 bool amdgpu_ttm_tt_is_userptr(struct ttm_tt *ttm) in amdgpu_ttm_tt_is_userptr() argument
1269 struct amdgpu_ttm_tt *gtt = (void *)ttm; in amdgpu_ttm_tt_is_userptr()
1280 bool amdgpu_ttm_tt_is_readonly(struct ttm_tt *ttm) in amdgpu_ttm_tt_is_readonly() argument
1282 struct amdgpu_ttm_tt *gtt = (void *)ttm; in amdgpu_ttm_tt_is_readonly()
1298 uint64_t amdgpu_ttm_tt_pde_flags(struct ttm_tt *ttm, struct ttm_resource *mem) in amdgpu_ttm_tt_pde_flags() argument
1309 if (ttm->caching == ttm_cached) in amdgpu_ttm_tt_pde_flags()
1329 uint64_t amdgpu_ttm_tt_pte_flags(struct amdgpu_device *adev, struct ttm_tt *ttm, in amdgpu_ttm_tt_pte_flags() argument
1332 uint64_t flags = amdgpu_ttm_tt_pde_flags(ttm, mem); in amdgpu_ttm_tt_pte_flags()
1337 if (!amdgpu_ttm_tt_is_readonly(ttm)) in amdgpu_ttm_tt_pte_flags()