Home
last modified time | relevance | path

Searched refs:ttm (Results 1 – 25 of 55) sorted by relevance

123

/linux-6.3-rc2/drivers/gpu/drm/ttm/
A Dttm_tt.c67 if (bo->ttm) in ttm_tt_create()
100 ttm->pages = kvcalloc(ttm->num_pages, sizeof(void*), GFP_KERNEL); in ttm_tt_alloc_page_directory()
101 if (!ttm->pages) in ttm_tt_alloc_page_directory()
109 ttm->pages = kvcalloc(ttm->num_pages, sizeof(*ttm->pages) + in ttm_dma_tt_alloc_page_directory()
111 if (!ttm->pages) in ttm_dma_tt_alloc_page_directory()
114 ttm->dma_address = (void *)(ttm->pages + ttm->num_pages); in ttm_dma_tt_alloc_page_directory()
120 ttm->dma_address = kvcalloc(ttm->num_pages, sizeof(*ttm->dma_address), in ttm_sg_tt_alloc_page_directory()
144 ttm->sg = bo->sg; in ttm_tt_init_fields()
170 if (ttm->pages) in ttm_tt_fini()
174 ttm->pages = NULL; in ttm_tt_fini()
[all …]
A Dttm_agp_backend.c45 struct ttm_tt ttm; member
52 struct ttm_agp_backend *agp_be = container_of(ttm, struct ttm_agp_backend, ttm); in ttm_agp_bind()
66 for (i = 0; i < ttm->num_pages; i++) { in ttm_agp_bind()
67 struct page *page = ttm->pages[i]; in ttm_agp_bind()
89 struct ttm_agp_backend *agp_be = container_of(ttm, struct ttm_agp_backend, ttm); in ttm_agp_unbind()
104 struct ttm_agp_backend *agp_be = container_of(ttm, struct ttm_agp_backend, ttm); in ttm_agp_is_bound()
106 if (!ttm) in ttm_agp_is_bound()
115 struct ttm_agp_backend *agp_be = container_of(ttm, struct ttm_agp_backend, ttm); in ttm_agp_destroy()
118 ttm_agp_unbind(ttm); in ttm_agp_destroy()
119 ttm_tt_fini(ttm); in ttm_agp_destroy()
[all …]
A Dttm_bo_util.c149 struct ttm_tt *ttm = bo->ttm; in ttm_bo_move_memcpy() local
164 if (ttm && ((ttm->page_flags & TTM_TT_FLAG_SWAPPED) || in ttm_bo_move_memcpy()
339 struct ttm_tt *ttm = bo->ttm; in ttm_bo_kmap_ttm() local
343 BUG_ON(!ttm); in ttm_bo_kmap_ttm()
497 struct ttm_tt *ttm = bo->ttm; in ttm_bo_vmap() local
510 vaddr = vmap(ttm->pages, ttm->num_pages, 0, prot); in ttm_bo_vmap()
597 bo->ttm = NULL; in ttm_bo_move_to_ghost()
742 ttm = bo->ttm; in ttm_bo_pipeline_gutting()
743 bo->ttm = NULL; in ttm_bo_pipeline_gutting()
745 swap(bo->ttm, ttm); in ttm_bo_pipeline_gutting()
[all …]
A DMakefile5 ttm-y := ttm_tt.o ttm_bo.o ttm_bo_util.o ttm_bo_vm.o ttm_module.o \
8 ttm-$(CONFIG_AGP) += ttm_agp_backend.o
10 obj-$(CONFIG_DRM_TTM) += ttm.o
A Dttm_bo_vm.c152 if (bo->ttm && (bo->ttm->page_flags & TTM_TT_FLAG_EXTERNAL)) { in ttm_bo_vm_reserve()
153 if (!(bo->ttm->page_flags & TTM_TT_FLAG_EXTERNAL_MAPPABLE)) { in ttm_bo_vm_reserve()
191 struct ttm_tt *ttm = NULL; in ttm_bo_vm_fault_reserved() local
226 ttm = bo->ttm; in ttm_bo_vm_fault_reserved()
227 if (ttm_tt_populate(bdev, bo->ttm, &ctx)) in ttm_bo_vm_fault_reserved()
242 page = ttm->pages[page_offset]; in ttm_bo_vm_fault_reserved()
/linux-6.3-rc2/drivers/gpu/drm/radeon/
A Dradeon_ttm.c373 r = sg_alloc_table_from_pages(ttm->sg, ttm->pages, ttm->num_pages, 0, in radeon_ttm_tt_pin_userptr()
383 drm_prime_sg_to_dma_addr_array(ttm->sg, gtt->ttm.dma_address, in radeon_ttm_tt_pin_userptr()
389 kfree(ttm->sg); in radeon_ttm_tt_pin_userptr()
407 if (!ttm->sg || !ttm->sg->sgl) in radeon_ttm_tt_unpin_userptr()
453 ttm->num_pages, bo_mem, ttm); in radeon_ttm_backend_bind()
458 ttm->pages, gtt->ttm.dma_address, flags); in radeon_ttm_backend_bind()
534 if (!ttm) in radeon_ttm_tt_to_gtt()
536 return container_of(ttm, struct radeon_ttm_tt, ttm); in radeon_ttm_tt_to_gtt()
549 if (!ttm->sg) in radeon_ttm_tt_populate()
557 drm_prime_sg_to_dma_addr_array(ttm->sg, gtt->ttm.dma_address, in radeon_ttm_tt_populate()
[all …]
A Dradeon_prime.c41 return drm_prime_pages_to_sg(obj->dev, bo->tbo.ttm->pages, in radeon_gem_prime_get_sg_table()
42 bo->tbo.ttm->num_pages); in radeon_gem_prime_get_sg_table()
109 if (radeon_ttm_tt_has_userptr(bo->rdev, bo->tbo.ttm)) in radeon_gem_prime_export()
A Dradeon_mn.c57 if (!bo->tbo.ttm || !radeon_ttm_tt_is_bound(bo->tbo.bdev, bo->tbo.ttm)) in radeon_mn_invalidate()
/linux-6.3-rc2/include/drm/ttm/
A Dttm_tt.h150 int ttm_tt_init(struct ttm_tt *ttm, struct ttm_buffer_object *bo,
163 void ttm_tt_fini(struct ttm_tt *ttm);
173 void ttm_tt_destroy(struct ttm_device *bdev, struct ttm_tt *ttm);
182 int ttm_tt_swapin(struct ttm_tt *ttm);
183 int ttm_tt_swapout(struct ttm_device *bdev, struct ttm_tt *ttm,
195 int ttm_tt_populate(struct ttm_device *bdev, struct ttm_tt *ttm,
216 static inline void ttm_tt_mark_for_clear(struct ttm_tt *ttm) in ttm_tt_mark_for_clear() argument
218 ttm->page_flags |= TTM_TT_FLAG_ZERO_ALLOC; in ttm_tt_mark_for_clear()
245 void ttm_agp_unbind(struct ttm_tt *ttm);
246 void ttm_agp_destroy(struct ttm_tt *ttm);
[all …]
A Dttm_device.h86 struct ttm_tt *ttm,
97 struct ttm_tt *ttm);
108 void (*ttm_tt_destroy)(struct ttm_device *bdev, struct ttm_tt *ttm);
/linux-6.3-rc2/drivers/gpu/drm/i915/gem/
A Di915_gem_ttm.c50 struct ttm_tt ttm; member
194 struct i915_ttm_tt *i915_tt = container_of(ttm, typeof(*i915_tt), ttm); in i915_ttm_tt_shmem_populate()
249 struct i915_ttm_tt *i915_tt = container_of(ttm, typeof(*i915_tt), ttm); in i915_ttm_tt_shmem_unpopulate()
357 ttm_tt_fini(ttm); in i915_ttm_tt_destroy()
374 if (bo->ttm && bo->ttm->page_flags & TTM_TT_FLAG_EXTERNAL) in i915_ttm_eviction_valuable()
427 container_of(bo->ttm, typeof(*i915_tt), ttm); in i915_ttm_purge()
467 container_of(bo->ttm, typeof(*i915_tt), ttm); in i915_ttm_shrink()
531 ttm->pages, ttm->num_pages, in i915_ttm_tt_get_st()
808 if (bo->ttm && !ttm_tt_is_populated(bo->ttm)) { in __i915_ttm_get_pages()
927 container_of(bo->ttm, typeof(*i915_tt), ttm); in i915_ttm_adjust_lru()
[all …]
A Di915_gem_ttm_pm.c24 if (obj->ttm.backup) { in i915_ttm_backup_free()
25 i915_gem_object_put(obj->ttm.backup); in i915_ttm_backup_free()
26 obj->ttm.backup = NULL; in i915_ttm_backup_free()
56 if (bo->resource->mem_type == I915_PL_SYSTEM || obj->ttm.backup) in i915_ttm_backup()
93 err = ttm_tt_populate(backup_bo->bdev, backup_bo->ttm, &ctx); in i915_ttm_backup()
106 obj->ttm.backup = backup; in i915_ttm_backup()
174 struct drm_i915_gem_object *backup = obj->ttm.backup; in i915_ttm_restore()
190 err = ttm_tt_populate(backup_bo->bdev, backup_bo->ttm, &ctx); in i915_ttm_restore()
197 obj->ttm.backup = NULL; in i915_ttm_restore()
A Di915_gem_ttm_move.c53 struct ttm_tt *ttm) in i915_ttm_cache_level() argument
57 ttm->caching == ttm_cached) ? I915_CACHE_LLC : in i915_ttm_cache_level()
125 bo->ttm); in i915_ttm_adjust_gem_after_move()
198 struct ttm_tt *src_ttm = bo->ttm; in i915_ttm_accel_move()
329 &obj->ttm.cached_io_rsgt->table, in i915_ttm_memcpy_init()
574 struct ttm_tt *ttm = bo->ttm; in i915_ttm_move() local
621 if (ttm && (dst_man->use_tt || (ttm->page_flags & TTM_TT_FLAG_SWAPPED))) { in i915_ttm_move()
631 clear = !i915_ttm_cpu_maps_iomem(bo->resource) && (!ttm || !ttm_tt_is_populated(ttm)); in i915_ttm_move()
632 if (!(clear && ttm && !(ttm->page_flags & TTM_TT_FLAG_ZERO_ALLOC))) { in i915_ttm_move()
672 obj->ttm.cached_io_rsgt = dst_rsgt; in i915_ttm_move()
[all …]
/linux-6.3-rc2/drivers/gpu/drm/nouveau/
A Dnouveau_sgdma.c15 struct ttm_tt ttm; member
20 nouveau_sgdma_destroy(struct ttm_device *bdev, struct ttm_tt *ttm) in nouveau_sgdma_destroy() argument
22 struct nouveau_sgdma_be *nvbe = (struct nouveau_sgdma_be *)ttm; in nouveau_sgdma_destroy()
24 if (ttm) { in nouveau_sgdma_destroy()
25 ttm_tt_fini(&nvbe->ttm); in nouveau_sgdma_destroy()
33 struct nouveau_sgdma_be *nvbe = (struct nouveau_sgdma_be *)ttm; in nouveau_sgdma_bind()
41 ret = nouveau_mem_host(reg, &nvbe->ttm); in nouveau_sgdma_bind()
58 nouveau_sgdma_unbind(struct ttm_device *bdev, struct ttm_tt *ttm) in nouveau_sgdma_unbind() argument
60 struct nouveau_sgdma_be *nvbe = (struct nouveau_sgdma_be *)ttm; in nouveau_sgdma_unbind()
86 if (ttm_sg_tt_init(&nvbe->ttm, bo, page_flags, caching)) { in nouveau_sgdma_create_ttm()
[all …]
A Dnouveau_ttm.c170 drm->ttm.type_host[!!kind] = typei; in nouveau_ttm_init_host()
176 drm->ttm.type_ncoh[!!kind] = typei; in nouveau_ttm_init_host()
191 ttm_resource_manager_init(man, &drm->ttm.bdev, in nouveau_ttm_init_vram()
251 ttm_range_man_fini(&drm->ttm.bdev, TTM_PL_TT); in nouveau_ttm_fini_gtt()
291 drm->ttm.type_vram = typei; in nouveau_ttm_init()
293 drm->ttm.type_vram = -1; in nouveau_ttm_init()
344 mutex_init(&drm->ttm.io_reserve_mutex); in nouveau_ttm_init()
345 INIT_LIST_HEAD(&drm->ttm.io_reserve_lru); in nouveau_ttm_init()
360 ttm_device_fini(&drm->ttm.bdev); in nouveau_ttm_fini()
362 arch_phys_wc_del(drm->ttm.mtrr); in nouveau_ttm_fini()
[all …]
A Dnouveau_bo.c738 ttm_agp_unbind(ttm); in nouveau_ttm_tt_unbind()
901 &drm->ttm.copy); in nouveau_bo_move_init()
910 drm->ttm.chan = chan; in nouveau_bo_move_init()
1029 !bo->ttm)) { in nouveau_bo_move()
1049 if (drm->ttm.move) { in nouveau_bo_move()
1284 if (slave && ttm->sg) { in nouveau_ttm_tt_populate()
1286 ttm->num_pages); in nouveau_ttm_tt_populate()
1292 return ttm_pool_alloc(&drm->ttm.bdev.pool, ttm, ctx); in nouveau_ttm_tt_populate()
1297 struct ttm_tt *ttm) in nouveau_ttm_tt_unpopulate() argument
1309 return ttm_pool_free(&drm->ttm.bdev.pool, ttm); in nouveau_ttm_tt_unpopulate()
[all …]
A Dnouveau_ttm.h8 return container_of(bd, struct nouveau_drm, ttm.bdev); in nouveau_bdev()
24 int nouveau_sgdma_bind(struct ttm_device *bdev, struct ttm_tt *ttm, struct ttm_resource *reg);
25 void nouveau_sgdma_unbind(struct ttm_device *bdev, struct ttm_tt *ttm);
26 void nouveau_sgdma_destroy(struct ttm_device *bdev, struct ttm_tt *ttm);
A Dnouveau_mem.c100 type = drm->ttm.type_ncoh[!!mem->kind]; in nouveau_mem_host()
102 type = drm->ttm.type_host[0]; in nouveau_mem_host()
139 drm->ttm.type_vram, page, size, in nouveau_mem_vram()
147 drm->ttm.type_vram, page, size, in nouveau_mem_vram()
/linux-6.3-rc2/drivers/gpu/drm/amd/amdgpu/
A Damdgpu_ttm.c653 struct ttm_tt *ttm = bo->tbo.ttm; in amdgpu_ttm_tt_get_user_pages() local
763 r = sg_alloc_table_from_pages(ttm->sg, ttm->pages, ttm->num_pages, 0, in amdgpu_ttm_tt_pin_userptr()
775 drm_prime_sg_to_dma_addr_array(ttm->sg, gtt->ttm.dma_address, in amdgpu_ttm_tt_pin_userptr()
781 kfree(ttm->sg); in amdgpu_ttm_tt_pin_userptr()
782 ttm->sg = NULL; in amdgpu_ttm_tt_pin_userptr()
799 if (!ttm->sg || !ttm->sg->sgl) in amdgpu_ttm_tt_unpin_userptr()
812 struct ttm_tt *ttm = tbo->ttm; in amdgpu_ttm_gart_bind() local
886 ttm->num_pages, bo_mem, ttm); in amdgpu_ttm_backend_bind()
972 if (!tbo->ttm) in amdgpu_ttm_recover_gart()
1077 if (!ttm->sg) in amdgpu_ttm_tt_populate()
[all …]
A Damdgpu_ttm.h162 void amdgpu_ttm_tt_discard_user_pages(struct ttm_tt *ttm,
164 bool amdgpu_ttm_tt_get_user_pages_done(struct ttm_tt *ttm,
173 static inline void amdgpu_ttm_tt_discard_user_pages(struct ttm_tt *ttm, in amdgpu_ttm_tt_discard_user_pages() argument
177 static inline bool amdgpu_ttm_tt_get_user_pages_done(struct ttm_tt *ttm, in amdgpu_ttm_tt_get_user_pages_done() argument
184 void amdgpu_ttm_tt_set_user_pages(struct ttm_tt *ttm, struct page **pages);
189 bool amdgpu_ttm_tt_has_userptr(struct ttm_tt *ttm);
190 struct mm_struct *amdgpu_ttm_tt_get_usermm(struct ttm_tt *ttm);
191 bool amdgpu_ttm_tt_affect_userptr(struct ttm_tt *ttm, unsigned long start,
193 bool amdgpu_ttm_tt_userptr_invalidated(struct ttm_tt *ttm,
195 bool amdgpu_ttm_tt_is_userptr(struct ttm_tt *ttm);
[all …]
A Damdgpu_amdkfd_gpuvm.c462 struct ttm_tt *ttm = bo->tbo.ttm; in kfd_mem_dmamap_userptr() local
468 ttm->sg = kmalloc(sizeof(*ttm->sg), GFP_KERNEL); in kfd_mem_dmamap_userptr()
484 drm_prime_sg_to_dma_addr_array(ttm->sg, ttm->dma_address, in kfd_mem_dmamap_userptr()
500 kfree(ttm->sg); in kfd_mem_dmamap_userptr()
501 ttm->sg = NULL; in kfd_mem_dmamap_userptr()
548 struct ttm_tt *ttm = bo->tbo.ttm; in kfd_mem_dmamap_sg_bo() local
588 kfree(ttm->sg); in kfd_mem_dmamap_sg_bo()
589 ttm->sg = NULL; in kfd_mem_dmamap_sg_bo()
625 struct ttm_tt *ttm = bo->tbo.ttm; in kfd_mem_dmaunmap_userptr() local
635 kfree(ttm->sg); in kfd_mem_dmaunmap_userptr()
[all …]
/linux-6.3-rc2/drivers/gpu/drm/vmwgfx/
A Dvmwgfx_ttm_buffer.c403 ttm->num_pages, vmw_be->gmr_id); in vmw_ttm_bind()
408 vmw_mob_create(ttm->num_pages); in vmw_ttm_bind()
414 &vmw_be->vsgt, ttm->num_pages, in vmw_ttm_bind()
428 struct ttm_tt *ttm) in vmw_ttm_unbind() argument
461 ttm_tt_fini(ttm); in vmw_ttm_destroy()
475 if (ttm_tt_is_populated(ttm)) in vmw_ttm_populate()
478 ret = ttm_pool_alloc(&bdev->pool, ttm, ctx); in vmw_ttm_populate()
484 struct ttm_tt *ttm) in vmw_ttm_unpopulate() argument
489 vmw_ttm_unbind(bdev, ttm); in vmw_ttm_unpopulate()
498 ttm_pool_free(&bdev->pool, ttm); in vmw_ttm_unpopulate()
[all …]
A Dvmwgfx_blit.c468 if (!ttm_tt_is_populated(dst->ttm)) { in vmw_bo_cpu_blit()
469 ret = dst->bdev->funcs->ttm_tt_populate(dst->bdev, dst->ttm, &ctx); in vmw_bo_cpu_blit()
474 if (!ttm_tt_is_populated(src->ttm)) { in vmw_bo_cpu_blit()
475 ret = src->bdev->funcs->ttm_tt_populate(src->bdev, src->ttm, &ctx); in vmw_bo_cpu_blit()
484 d.dst_pages = dst->ttm->pages; in vmw_bo_cpu_blit()
485 d.src_pages = src->ttm->pages; in vmw_bo_cpu_blit()
/linux-6.3-rc2/drivers/gpu/drm/qxl/
A Dqxl_ttm.c102 static void qxl_ttm_backend_destroy(struct ttm_device *bdev, struct ttm_tt *ttm) in qxl_ttm_backend_destroy() argument
104 ttm_tt_fini(ttm); in qxl_ttm_backend_destroy()
105 kfree(ttm); in qxl_ttm_backend_destroy()
111 struct ttm_tt *ttm; in qxl_ttm_tt_create() local
113 ttm = kzalloc(sizeof(struct ttm_tt), GFP_KERNEL); in qxl_ttm_tt_create()
114 if (ttm == NULL) in qxl_ttm_tt_create()
116 if (ttm_tt_init(ttm, bo, page_flags, ttm_cached, 0)) { in qxl_ttm_tt_create()
117 kfree(ttm); in qxl_ttm_tt_create()
120 return ttm; in qxl_ttm_tt_create()
152 if (old_mem->mem_type == TTM_PL_SYSTEM && bo->ttm == NULL) { in qxl_bo_move()
/linux-6.3-rc2/arch/powerpc/perf/
A Dppc970-pmu.c264 unsigned int ttm, grp; in p970_compute_mmcr() local
321 ttm = unitmap[i]; in p970_compute_mmcr()
322 ++ttmuse[(ttm >> 2) & 1]; in p970_compute_mmcr()
323 mmcr1 |= (unsigned long)(ttm & ~4) << MMCR1_TTM1SEL_SH; in p970_compute_mmcr()
335 ttm = (unitmap[unit] >> 2) & 1; in p970_compute_mmcr()
337 ttm = 2; in p970_compute_mmcr()
339 ttm = 3; in p970_compute_mmcr()
343 mmcr1 |= (unsigned long)ttm in p970_compute_mmcr()

Completed in 41 milliseconds

123