Home
last modified time | relevance | path

Searched refs:ttm (Results 1 – 25 of 95) sorted by relevance

1234

/drivers/gpu/drm/ttm/
A Dttm_tt.c74 if (bo->ttm) in ttm_tt_create()
117 ttm->pages = kvcalloc(ttm->num_pages, sizeof(void*), GFP_KERNEL); in ttm_tt_alloc_page_directory()
118 if (!ttm->pages) in ttm_tt_alloc_page_directory()
126 ttm->pages = kvcalloc(ttm->num_pages, sizeof(*ttm->pages) + in ttm_dma_tt_alloc_page_directory()
128 if (!ttm->pages) in ttm_dma_tt_alloc_page_directory()
131 ttm->dma_address = (void *)(ttm->pages + ttm->num_pages); in ttm_dma_tt_alloc_page_directory()
137 ttm->dma_address = kvcalloc(ttm->num_pages, sizeof(*ttm->dma_address), in ttm_sg_tt_alloc_page_directory()
161 ttm->sg = bo->sg; in ttm_tt_init_fields()
191 if (ttm->backup) { in ttm_tt_fini()
196 if (ttm->pages) in ttm_tt_fini()
[all …]
A Dttm_agp_backend.c46 struct ttm_tt ttm; member
53 struct ttm_agp_backend *agp_be = container_of(ttm, struct ttm_agp_backend, ttm); in ttm_agp_bind()
67 for (i = 0; i < ttm->num_pages; i++) { in ttm_agp_bind()
68 struct page *page = ttm->pages[i]; in ttm_agp_bind()
90 struct ttm_agp_backend *agp_be = container_of(ttm, struct ttm_agp_backend, ttm); in ttm_agp_unbind()
105 struct ttm_agp_backend *agp_be = container_of(ttm, struct ttm_agp_backend, ttm); in ttm_agp_is_bound()
107 if (!ttm) in ttm_agp_is_bound()
116 struct ttm_agp_backend *agp_be = container_of(ttm, struct ttm_agp_backend, ttm); in ttm_agp_destroy()
119 ttm_agp_unbind(ttm); in ttm_agp_destroy()
120 ttm_tt_fini(ttm); in ttm_agp_destroy()
[all …]
A Dttm_bo_util.c153 struct ttm_tt *ttm = bo->ttm; in ttm_bo_move_memcpy() local
168 if (ttm && ((ttm->page_flags & TTM_TT_FLAG_SWAPPED) || in ttm_bo_move_memcpy()
190 if (!(clear && ttm && !(ttm->page_flags & TTM_TT_FLAG_ZERO_ALLOC))) in ttm_bo_move_memcpy()
350 struct ttm_tt *ttm = bo->ttm; in ttm_bo_kmap_ttm() local
356 BUG_ON(!ttm); in ttm_bo_kmap_ttm()
537 struct ttm_tt *ttm = bo->ttm; in ttm_bo_vmap() local
550 vaddr = vmap(ttm->pages, ttm->num_pages, 0, prot); in ttm_bo_vmap()
775 ttm = bo->ttm; in ttm_bo_pipeline_gutting()
776 bo->ttm = NULL; in ttm_bo_pipeline_gutting()
778 swap(bo->ttm, ttm); in ttm_bo_pipeline_gutting()
[all …]
/drivers/gpu/drm/radeon/
A Dradeon_ttm.c361 r = sg_alloc_table_from_pages(ttm->sg, ttm->pages, ttm->num_pages, 0, in radeon_ttm_tt_pin_userptr()
371 drm_prime_sg_to_dma_addr_array(ttm->sg, gtt->ttm.dma_address, in radeon_ttm_tt_pin_userptr()
377 kfree(ttm->sg); in radeon_ttm_tt_pin_userptr()
395 if (!ttm->sg || !ttm->sg->sgl) in radeon_ttm_tt_unpin_userptr()
441 ttm->num_pages, bo_mem, ttm); in radeon_ttm_backend_bind()
446 ttm->pages, gtt->ttm.dma_address, flags); in radeon_ttm_backend_bind()
522 if (!ttm) in radeon_ttm_tt_to_gtt()
524 return container_of(ttm, struct radeon_ttm_tt, ttm); in radeon_ttm_tt_to_gtt()
537 if (!ttm->sg) in radeon_ttm_tt_populate()
545 drm_prime_sg_to_dma_addr_array(ttm->sg, gtt->ttm.dma_address, in radeon_ttm_tt_populate()
[all …]
/drivers/gpu/drm/i915/gem/
A Di915_gem_ttm.c50 struct ttm_tt ttm; member
187 struct i915_ttm_tt *i915_tt = container_of(ttm, typeof(*i915_tt), ttm); in i915_ttm_tt_shmem_populate()
242 struct i915_ttm_tt *i915_tt = container_of(ttm, typeof(*i915_tt), ttm); in i915_ttm_tt_shmem_unpopulate()
350 ttm_tt_fini(ttm); in i915_ttm_tt_destroy()
367 if (bo->ttm && bo->ttm->page_flags & TTM_TT_FLAG_EXTERNAL) in i915_ttm_eviction_valuable()
420 container_of(bo->ttm, typeof(*i915_tt), ttm); in i915_ttm_purge()
460 container_of(bo->ttm, typeof(*i915_tt), ttm); in i915_ttm_shrink()
530 ttm->pages, ttm->num_pages, in i915_ttm_tt_get_st()
810 if (bo->ttm && !ttm_tt_is_populated(bo->ttm)) { in __i915_ttm_get_pages()
927 container_of(bo->ttm, typeof(*i915_tt), ttm); in i915_ttm_adjust_lru()
[all …]
A Di915_gem_ttm_move.c53 struct ttm_tt *ttm) in i915_ttm_cache_level() argument
57 ttm->caching == ttm_cached) ? I915_CACHE_LLC : in i915_ttm_cache_level()
125 bo->ttm); in i915_ttm_adjust_gem_after_move()
198 struct ttm_tt *src_ttm = bo->ttm; in i915_ttm_accel_move()
332 &obj->ttm.cached_io_rsgt->table, in i915_ttm_memcpy_init()
579 struct ttm_tt *ttm = bo->ttm; in i915_ttm_move() local
626 if (ttm && (dst_man->use_tt || (ttm->page_flags & TTM_TT_FLAG_SWAPPED))) { in i915_ttm_move()
636 clear = !i915_ttm_cpu_maps_iomem(bo->resource) && (!ttm || !ttm_tt_is_populated(ttm)); in i915_ttm_move()
638 if (!(clear && ttm && !((ttm->page_flags & TTM_TT_FLAG_ZERO_ALLOC) && !prealloc_bo))) { in i915_ttm_move()
678 obj->ttm.cached_io_rsgt = dst_rsgt; in i915_ttm_move()
[all …]
A Di915_gem_ttm_pm.c24 if (obj->ttm.backup) { in i915_ttm_backup_free()
25 i915_gem_object_put(obj->ttm.backup); in i915_ttm_backup_free()
26 obj->ttm.backup = NULL; in i915_ttm_backup_free()
56 if (!i915_ttm_cpu_maps_iomem(bo->resource) || obj->ttm.backup) in i915_ttm_backup()
106 obj->ttm.backup = backup; in i915_ttm_backup()
173 struct drm_i915_gem_object *backup = obj->ttm.backup; in i915_ttm_restore()
199 obj->ttm.backup = NULL; in i915_ttm_restore()
/drivers/gpu/drm/xe/
A Dxe_bo.c393 struct ttm_tt *tt = bo->ttm.ttm; in xe_bo_sg()
795 struct ttm_tt *ttm = ttm_bo->ttm; in xe_bo_move() local
1343 backup->ttm.resource, bo->ttm.resource, in xe_bo_restore_pinned()
2298 if (bo->ttm.ttm && ttm_tt_is_populated(bo->ttm.ttm)) in xe_bo_pin_external()
2299 xe_ttm_tt_account_subtract(xe, bo->ttm.ttm); in xe_bo_pin_external()
2346 if (bo->ttm.ttm && ttm_tt_is_populated(bo->ttm.ttm)) in xe_bo_pin()
2382 if (bo->ttm.ttm && ttm_tt_is_populated(bo->ttm.ttm)) in xe_bo_unpin_external()
2383 xe_ttm_tt_account_add(xe, bo->ttm.ttm); in xe_bo_unpin_external()
2414 if (bo->ttm.ttm && ttm_tt_is_populated(bo->ttm.ttm)) in xe_bo_unpin()
2415 xe_ttm_tt_account_add(xe, bo->ttm.ttm); in xe_bo_unpin()
[all …]
A Dxe_bo.h132 return container_of(bo, struct xe_bo, ttm); in ttm_to_xe_bo()
137 return container_of(obj, struct xe_bo, ttm.base); in gem_to_xe_bo()
140 #define xe_bo_device(bo) ttm_to_xe_device((bo)->ttm.bdev)
145 drm_gem_object_get(&bo->ttm.base); in xe_bo_get()
177 ttm_bo_set_bulk_move(&bo->ttm, NULL); in __xe_bo_unset_bulk_move()
183 dma_resv_assert_held((bo)->ttm.base.resv); in xe_bo_assert_held()
197 dma_resv_unlock(bo->ttm.base.resv); in xe_bo_unlock_vm_held()
209 return bo->ttm.pin_count; in xe_bo_is_pinned()
248 return bo->ttm.base.size; in xe_bo_size()
318 if ((bo->ttm.ttm && ttm_tt_is_populated(bo->ttm.ttm)) || in xe_bo_has_pages()
[all …]
A Dxe_dma_buf.c110 switch (bo->ttm.resource->mem_type) { in xe_dma_buf_map()
113 bo->ttm.ttm->pages, in xe_dma_buf_map()
114 bo->ttm.ttm->num_pages); in xe_dma_buf_map()
126 bo->ttm.resource, 0, in xe_dma_buf_map()
127 bo->ttm.base.size, attach->dev, in xe_dma_buf_map()
224 return &bo->ttm.base; in xe_dma_buf_init_obj()
296 attach = dma_buf_dynamic_attach(dma_buf, dev->dev, attach_ops, &bo->ttm.base); in xe_gem_prime_import()
A Dxe_drm_client.c155 struct xe_device *xe = ttm_to_xe_device(bo->ttm.bdev); in xe_drm_client_remove_bo()
158 xe_assert(xe, !kref_read(&bo->ttm.base.refcount)); in xe_drm_client_remove_bo()
171 u32 mem_type = bo->ttm.resource->mem_type; in bo_meminfo()
175 if (drm_gem_object_is_shared_for_memory_stats(&bo->ttm.base)) in bo_meminfo()
183 if (!dma_resv_test_signaled(bo->ttm.base.resv, in bo_meminfo()
195 struct ttm_device *bdev = &xef->xe->ttm; in show_meminfo()
211 if (dma_resv_trylock(bo->ttm.base.resv)) { in show_meminfo()
231 if (!kref_get_unless_zero(&bo->ttm.base.refcount)) in show_meminfo()
234 if (dma_resv_trylock(bo->ttm.base.resv)) { in show_meminfo()
/drivers/gpu/drm/amd/amdgpu/
A Damdgpu_ttm.c701 struct ttm_tt *ttm = bo->tbo.ttm; in amdgpu_ttm_tt_get_user_pages() local
811 r = sg_alloc_table_from_pages(ttm->sg, ttm->pages, ttm->num_pages, 0, in amdgpu_ttm_tt_pin_userptr()
823 drm_prime_sg_to_dma_addr_array(ttm->sg, gtt->ttm.dma_address, in amdgpu_ttm_tt_pin_userptr()
831 kfree(ttm->sg); in amdgpu_ttm_tt_pin_userptr()
832 ttm->sg = NULL; in amdgpu_ttm_tt_pin_userptr()
849 if (!ttm->sg || !ttm->sg->sgl) in amdgpu_ttm_tt_unpin_userptr()
899 struct ttm_tt *ttm = tbo->ttm; in amdgpu_ttm_gart_bind() local
960 ttm->num_pages, bo_mem, ttm); in amdgpu_ttm_backend_bind()
1042 if (!tbo->ttm) in amdgpu_ttm_recover_gart()
1153 if (!ttm->sg) in amdgpu_ttm_tt_populate()
[all …]
A Damdgpu_ttm.h194 void amdgpu_ttm_tt_discard_user_pages(struct ttm_tt *ttm,
196 bool amdgpu_ttm_tt_get_user_pages_done(struct ttm_tt *ttm,
205 static inline void amdgpu_ttm_tt_discard_user_pages(struct ttm_tt *ttm, in amdgpu_ttm_tt_discard_user_pages() argument
209 static inline bool amdgpu_ttm_tt_get_user_pages_done(struct ttm_tt *ttm, in amdgpu_ttm_tt_get_user_pages_done() argument
216 void amdgpu_ttm_tt_set_user_pages(struct ttm_tt *ttm, struct page **pages);
221 bool amdgpu_ttm_tt_has_userptr(struct ttm_tt *ttm);
222 struct mm_struct *amdgpu_ttm_tt_get_usermm(struct ttm_tt *ttm);
223 bool amdgpu_ttm_tt_affect_userptr(struct ttm_tt *ttm, unsigned long start,
225 bool amdgpu_ttm_tt_userptr_invalidated(struct ttm_tt *ttm,
227 bool amdgpu_ttm_tt_is_userptr(struct ttm_tt *ttm);
[all …]
A Damdgpu_amdkfd_gpuvm.c553 struct ttm_tt *ttm = bo->tbo.ttm; in kfd_mem_dmamap_userptr() local
559 ttm->sg = kmalloc(sizeof(*ttm->sg), GFP_KERNEL); in kfd_mem_dmamap_userptr()
588 kfree(ttm->sg); in kfd_mem_dmamap_userptr()
589 ttm->sg = NULL; in kfd_mem_dmamap_userptr()
636 struct ttm_tt *ttm = bo->tbo.ttm; in kfd_mem_dmamap_sg_bo() local
676 kfree(ttm->sg); in kfd_mem_dmamap_sg_bo()
677 ttm->sg = NULL; in kfd_mem_dmamap_sg_bo()
713 struct ttm_tt *ttm = bo->tbo.ttm; in kfd_mem_dmaunmap_userptr() local
723 kfree(ttm->sg); in kfd_mem_dmaunmap_userptr()
724 ttm->sg = NULL; in kfd_mem_dmaunmap_userptr()
[all …]
/drivers/gpu/drm/vmwgfx/
A Dvmwgfx_ttm_buffer.c301 vmw_mob_create(ttm->num_pages); in vmw_ttm_bind()
321 struct ttm_tt *ttm) in vmw_ttm_unbind() argument
354 ttm_tt_fini(ttm); in vmw_ttm_destroy()
367 if (ttm_tt_is_populated(ttm)) in vmw_ttm_populate()
370 if (external && ttm->sg) in vmw_ttm_populate()
372 ttm->dma_address, in vmw_ttm_populate()
373 ttm->num_pages); in vmw_ttm_populate()
379 struct ttm_tt *ttm) in vmw_ttm_unpopulate() argument
388 vmw_ttm_unbind(bdev, ttm); in vmw_ttm_unpopulate()
397 ttm_pool_free(&bdev->pool, ttm); in vmw_ttm_unpopulate()
[all …]
A Dvmwgfx_blit.c571 if (!ttm_tt_is_populated(dst->ttm)) { in vmw_bo_cpu_blit()
577 if (!ttm_tt_is_populated(src->ttm)) { in vmw_bo_cpu_blit()
588 if (!src->ttm->pages && src->ttm->sg) { in vmw_bo_cpu_blit()
589 src_pages = kvmalloc_array(src->ttm->num_pages, in vmw_bo_cpu_blit()
593 ret = drm_prime_sg_to_page_array(src->ttm->sg, src_pages, in vmw_bo_cpu_blit()
594 src->ttm->num_pages); in vmw_bo_cpu_blit()
598 if (!dst->ttm->pages && dst->ttm->sg) { in vmw_bo_cpu_blit()
599 dst_pages = kvmalloc_array(dst->ttm->num_pages, in vmw_bo_cpu_blit()
606 dst->ttm->num_pages); in vmw_bo_cpu_blit()
615 d.dst_pages = dst->ttm->pages ? dst->ttm->pages : dst_pages; in vmw_bo_cpu_blit()
[all …]
/drivers/gpu/drm/nouveau/
A Dnouveau_sgdma.c15 struct ttm_tt ttm; member
20 nouveau_sgdma_destroy(struct ttm_device *bdev, struct ttm_tt *ttm) in nouveau_sgdma_destroy() argument
22 struct nouveau_sgdma_be *nvbe = (struct nouveau_sgdma_be *)ttm; in nouveau_sgdma_destroy()
24 if (ttm) { in nouveau_sgdma_destroy()
25 ttm_tt_fini(&nvbe->ttm); in nouveau_sgdma_destroy()
33 struct nouveau_sgdma_be *nvbe = (struct nouveau_sgdma_be *)ttm; in nouveau_sgdma_bind()
41 ret = nouveau_mem_host(reg, &nvbe->ttm); in nouveau_sgdma_bind()
58 nouveau_sgdma_unbind(struct ttm_device *bdev, struct ttm_tt *ttm) in nouveau_sgdma_unbind() argument
60 struct nouveau_sgdma_be *nvbe = (struct nouveau_sgdma_be *)ttm; in nouveau_sgdma_unbind()
86 if (ttm_sg_tt_init(&nvbe->ttm, bo, page_flags, caching)) { in nouveau_sgdma_create_ttm()
[all …]
A Dnouveau_ttm.c170 drm->ttm.type_host[!!kind] = typei; in nouveau_ttm_init_host()
176 drm->ttm.type_ncoh[!!kind] = typei; in nouveau_ttm_init_host()
191 ttm_resource_manager_init(man, &drm->ttm.bdev, in nouveau_ttm_init_vram()
251 ttm_range_man_fini(&drm->ttm.bdev, TTM_PL_TT); in nouveau_ttm_fini_gtt()
290 drm->ttm.type_vram = typei; in nouveau_ttm_init()
292 drm->ttm.type_vram = -1; in nouveau_ttm_init()
340 mutex_init(&drm->ttm.io_reserve_mutex); in nouveau_ttm_init()
341 INIT_LIST_HEAD(&drm->ttm.io_reserve_lru); in nouveau_ttm_init()
356 ttm_device_fini(&drm->ttm.bdev); in nouveau_ttm_fini()
358 arch_phys_wc_del(drm->ttm.mtrr); in nouveau_ttm_fini()
[all …]
A Dnouveau_bo.c878 ttm_agp_unbind(ttm); in nouveau_ttm_tt_unbind()
1048 &drm->ttm.copy); in nouveau_bo_move_init()
1057 drm->ttm.chan = chan; in nouveau_bo_move_init()
1176 !bo->ttm)) { in nouveau_bo_move()
1196 if (drm->ttm.move) { in nouveau_bo_move()
1429 if (slave && ttm->sg) { in nouveau_ttm_tt_populate()
1431 ttm->num_pages); in nouveau_ttm_tt_populate()
1437 return ttm_pool_alloc(&drm->ttm.bdev.pool, ttm, ctx); in nouveau_ttm_tt_populate()
1442 struct ttm_tt *ttm) in nouveau_ttm_tt_unpopulate() argument
1454 return ttm_pool_free(&drm->ttm.bdev.pool, ttm); in nouveau_ttm_tt_unpopulate()
[all …]
A Dnouveau_ttm.h8 return container_of(bd, struct nouveau_drm, ttm.bdev); in nouveau_bdev()
24 int nouveau_sgdma_bind(struct ttm_device *bdev, struct ttm_tt *ttm, struct ttm_resource *reg);
25 void nouveau_sgdma_unbind(struct ttm_device *bdev, struct ttm_tt *ttm);
26 void nouveau_sgdma_destroy(struct ttm_device *bdev, struct ttm_tt *ttm);
/drivers/gpu/drm/xe/tests/
A Dxe_bo.c29 struct ttm_tt *ttm; in ccs_test_migrate() local
84 ttm = bo->ttm.ttm; in ccs_test_migrate()
85 if (!ttm || !ttm_tt_is_populated(ttm)) { in ccs_test_migrate()
91 if (ccs_page >= ttm->num_pages) { in ccs_test_migrate()
96 page = ttm->pages[ccs_page]; in ccs_test_migrate()
400 ttm_bo_vunmap(&bo->ttm, &map); in shrink_test_fill_random()
433 ttm_bo_vunmap(&bo->ttm, &map); in shrink_test_verify()
510 xe_tt = container_of(bo->ttm.ttm, typeof(*xe_tt), ttm); in shrink_test_run_device()
520 bo->ttm.priority = 0; in shrink_test_run_device()
545 bo->ttm.resource->mem_type); in shrink_test_run_device()
[all …]
/drivers/gpu/drm/qxl/
A Dqxl_ttm.c100 static void qxl_ttm_backend_destroy(struct ttm_device *bdev, struct ttm_tt *ttm) in qxl_ttm_backend_destroy() argument
102 ttm_tt_fini(ttm); in qxl_ttm_backend_destroy()
103 kfree(ttm); in qxl_ttm_backend_destroy()
109 struct ttm_tt *ttm; in qxl_ttm_tt_create() local
111 ttm = kzalloc(sizeof(struct ttm_tt), GFP_KERNEL); in qxl_ttm_tt_create()
112 if (ttm == NULL) in qxl_ttm_tt_create()
114 if (ttm_tt_init(ttm, bo, page_flags, ttm_cached, 0)) { in qxl_ttm_tt_create()
115 kfree(ttm); in qxl_ttm_tt_create()
118 return ttm; in qxl_ttm_tt_create()
161 if (old_mem->mem_type == TTM_PL_SYSTEM && bo->ttm == NULL) { in qxl_bo_move()
/drivers/gpu/drm/ttm/tests/
A Dttm_kunit_helpers.c58 static void ttm_tt_simple_destroy(struct ttm_device *bdev, struct ttm_tt *ttm) in ttm_tt_simple_destroy() argument
60 kfree(ttm); in ttm_tt_simple_destroy()
70 if (!old_mem || (old_mem->mem_type == TTM_PL_SYSTEM && !bo->ttm)) { in mock_move()
119 struct ttm_device *ttm, in ttm_device_kunit_init_with_funcs() argument
127 err = ttm_device_init(ttm, funcs, drm->dev, in ttm_device_kunit_init_with_funcs()
145 struct ttm_device *ttm, in ttm_device_kunit_init() argument
149 return ttm_device_kunit_init_with_funcs(priv, ttm, use_dma_alloc, in ttm_device_kunit_init()
164 struct ttm_device *ttm, in ttm_device_kunit_init_bad_evict() argument
168 return ttm_device_kunit_init_with_funcs(priv, ttm, use_dma_alloc, in ttm_device_kunit_init_bad_evict()
/drivers/gpu/drm/xe/display/
A Dintel_fb_bo.c32 struct xe_device *xe = to_xe_device(bo->ttm.base.dev); in intel_fb_bo_framebuffer_init()
45 ret = ttm_bo_reserve(&bo->ttm, true, false, NULL); in intel_fb_bo_framebuffer_init()
57 ttm_bo_unreserve(&bo->ttm); in intel_fb_bo_framebuffer_init()
63 ttm_bo_unreserve(&bo->ttm); in intel_fb_bo_framebuffer_init()
86 bo->ttm.type != ttm_bo_type_sg) { in intel_fb_bo_lookup_valid_bo()
A Dintel_fbdev_fb.c69 fb = intel_framebuffer_create(&obj->ttm.base, in intel_fbdev_fb_alloc()
79 drm_gem_object_put(&obj->ttm.base); in intel_fbdev_fb_alloc()
101 info->fix.smem_len = obj->ttm.base.size; in intel_fbdev_fb_fill_info()
105 info->fix.smem_len = obj->ttm.base.size; in intel_fbdev_fb_fill_info()
110 info->screen_size = obj->ttm.base.size; in intel_fbdev_fb_fill_info()

Completed in 69 milliseconds

1234