/linux-6.3-rc2/drivers/staging/media/atomisp/pci/hmm/ |
A D | hmm_bo.c | 59 memset(bo, 0, sizeof(*bo)); in __bo_init() 259 bo->pgnr = bo->pgnr - pgnr; in __bo_break_up() 276 if (!bo->prev && !bo->next) { in __bo_take_off_handling() 283 } else if (!bo->prev && bo->next) { in __bo_take_off_handling() 293 } else if (bo->prev && !bo->next) { in __bo_take_off_handling() 301 } else if (bo->prev && bo->next) { in __bo_take_off_handling() 302 bo->next->prev = bo->prev; in __bo_take_off_handling() 303 bo->prev->next = bo->next; in __bo_take_off_handling() 485 bo = __bo_merge(prev_bo, bo); in hmm_bo_release() 492 bo = __bo_merge(bo, next_bo); in hmm_bo_release() [all …]
|
A D | hmm.c | 72 hmm_bo_type_string[bo->type], bo->pgnr); in bo_show() 74 total[bo->type] += bo->pgnr; in bo_show() 188 if (!bo) { in __hmm_alloc() 210 return bo->start; in __hmm_alloc() 241 if (!bo) { in hmm_free() 255 if (!bo) { in hmm_check_bo() 528 if (!bo) { in hmm_virt_to_phys() 546 if (!bo) { in hmm_mmap() 563 if (!bo) { in hmm_vmap() 583 if (!bo) { in hmm_flush_vmap() [all …]
|
/linux-6.3-rc2/drivers/accel/ivpu/ |
A D | ivpu_gem.c | 200 ret = bo->ops->alloc_pages(bo); in ivpu_bo_alloc_and_map_pages_locked() 206 ret = bo->ops->map_pages(bo); in ivpu_bo_alloc_and_map_pages_locked() 214 bo->ops->free_pages(bo); in ivpu_bo_alloc_and_map_pages_locked() 223 bo->ops->unmap_pages(bo); in ivpu_bo_unmap_and_free_pages() 225 bo->ops->free_pages(bo); in ivpu_bo_unmap_and_free_pages() 247 bo->ctx->id, bo->handle); in ivpu_bo_pin() 306 ctx->id, bo->vpu_addr, (bool)bo->sgt, bo->mmu_mapped); in ivpu_bo_free_vpu_addr() 406 bo->ctx->id, bo->vpu_addr, (bool)bo->sgt, bo->mmu_mapped); in ivpu_bo_free() 436 bo->ctx->id, bo->handle, bo->vpu_addr, bo->base.size, bo->ops->name); in ivpu_bo_mmap() 551 file_priv->ctx.id, bo->vpu_addr, bo->base.size, bo->flags); in ivpu_bo_create_ioctl() [all …]
|
A D | ivpu_gem.h | 47 int (*map_pages)(struct ivpu_bo *bo); 51 int ivpu_bo_pin(struct ivpu_bo *bo); 73 if (offset > bo->base.size || !bo->pages) in ivpu_bo_get_page() 91 if (bo->flags & DRM_IVPU_BO_WC) in ivpu_bo_pgprot() 102 return to_ivpu_device(bo->base.dev); in ivpu_bo_to_vdev() 107 if (vpu_addr < bo->vpu_addr) in ivpu_to_cpu_addr() 110 if (vpu_addr >= (bo->vpu_addr + bo->base.size)) in ivpu_to_cpu_addr() 113 return bo->kvaddr + (vpu_addr - bo->vpu_addr); in ivpu_to_cpu_addr() 118 if (cpu_addr < bo->kvaddr) in cpu_to_vpu_addr() 121 if (cpu_addr >= (bo->kvaddr + bo->base.size)) in cpu_to_vpu_addr() [all …]
|
/linux-6.3-rc2/drivers/gpu/drm/ttm/ |
A D | ttm_bo.c | 180 bo->bdev->funcs->delete_mem_notify(bo); in ttm_bo_cleanup_memtype_use() 183 ttm_resource_free(bo, &bo->resource); in ttm_bo_cleanup_memtype_use() 190 if (bo->base.resv == &bo->base._resv) in ttm_bo_individualize_resv() 206 bo->base.resv = &bo->base._resv; in ttm_bo_individualize_resv() 343 bo->bdev->funcs->release_notify(bo); in ttm_bo_release() 383 bo->destroy(bo); in ttm_bo_release() 607 bo = res->bo; in ttm_mem_evict_first() 970 bo->base.resv = &bo->base._resv; in ttm_bo_init_reserved() 1192 bo->bdev->funcs->swap_notify(bo); in ttm_bo_swapout() 1213 ttm_tt_unpopulate(bo->bdev, bo->ttm); in ttm_bo_tt_destroy() [all …]
|
A D | ttm_bo_util.c | 271 ttm_bo_get(bo); in ttm_buffer_object_transfer() 272 fbo->bo = bo; in ttm_buffer_object_transfer() 395 map->bo = bo; in ttm_bo_kmap() 401 ret = ttm_mem_io_reserve(bo->bdev, bo->resource); in ttm_bo_kmap() 440 ttm_mem_io_free(map->bo->bdev, map->bo->resource); in ttm_bo_kunmap() 544 ttm_mem_io_free(bo->bdev, bo->resource); in ttm_bo_vunmap() 562 ttm_resource_free(bo, &bo->resource); in ttm_bo_wait_free_node() 623 ttm_resource_free(bo, &bo->resource); in ttm_bo_move_pipeline_evict() 725 ttm_tt_unpopulate(bo->bdev, bo->ttm); in ttm_bo_pipeline_gutting() 729 ttm_resource_free(bo, &bo->resource); in ttm_bo_pipeline_gutting() [all …]
|
A D | ttm_bo_vm.c | 61 ttm_bo_get(bo); in ttm_bo_vm_fault_idle() 67 ttm_bo_put(bo); in ttm_bo_vm_fault_idle() 133 ttm_bo_get(bo); in ttm_bo_vm_reserve() 138 ttm_bo_put(bo); in ttm_bo_vm_reserve() 152 if (bo->ttm && (bo->ttm->page_flags & TTM_TT_FLAG_EXTERNAL)) { in ttm_bo_vm_reserve() 218 prot = ttm_io_prot(bo, bo->resource, prot); in ttm_bo_vm_fault_reserved() 226 ttm = bo->ttm; in ttm_bo_vm_fault_reserved() 349 ttm_bo_get(bo); in ttm_bo_vm_open() 357 ttm_bo_put(bo); in ttm_bo_vm_close() 431 ttm_bo_unreserve(bo); in ttm_bo_vm_access() [all …]
|
/linux-6.3-rc2/drivers/gpu/drm/qxl/ |
A D | qxl_object.c | 49 kfree(bo); in qxl_ttm_bo_destroy() 130 kfree(bo); in qxl_bo_create() 176 r = ttm_bo_vmap(&bo->tbo, &bo->map); in qxl_bo_vmap_locked() 185 bo->kptr = (void *)bo->map.vaddr_iomem; in qxl_bo_vmap_locked() 187 bo->kptr = bo->map.vaddr; in qxl_bo_vmap_locked() 250 ttm_bo_vunmap(&bo->tbo, &bo->map); in qxl_bo_vunmap_locked() 286 *bo = NULL; in qxl_bo_unref() 292 return bo; in qxl_bo_ref() 305 qxl_ttm_placement_from_domain(bo, bo->type); in __qxl_bo_pin() 306 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in __qxl_bo_pin() [all …]
|
A D | qxl_object.h | 30 static inline int qxl_bo_reserve(struct qxl_bo *bo) in qxl_bo_reserve() argument 34 r = ttm_bo_reserve(&bo->tbo, true, false, NULL); in qxl_bo_reserve() 37 struct drm_device *ddev = bo->tbo.base.dev; in qxl_bo_reserve() 39 dev_err(ddev->dev, "%p reserve failed\n", bo); in qxl_bo_reserve() 48 ttm_bo_unreserve(&bo->tbo); in qxl_bo_unreserve() 53 return bo->tbo.base.size; in qxl_bo_size() 64 int qxl_bo_vunmap(struct qxl_bo *bo); 65 void qxl_bo_vunmap_locked(struct qxl_bo *bo); 69 extern void qxl_bo_unref(struct qxl_bo **bo); 70 extern int qxl_bo_pin(struct qxl_bo *bo); [all …]
|
A D | qxl_release.c | 128 bo = to_qxl_bo(entry->tv.bo); in qxl_release_free_list() 176 if (entry->tv.bo == &bo->tbo) in qxl_release_list_add() 184 qxl_bo_ref(bo); in qxl_release_list_add() 185 entry->tv.bo = &bo->tbo; in qxl_release_list_add() 197 qxl_ttm_placement_from_domain(bo, bo->type); in qxl_release_validate_bo() 198 ret = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in qxl_release_validate_bo() 208 ret = qxl_bo_check_id(to_qxl(bo->tbo.base.dev), bo); in qxl_release_validate_bo() 230 struct qxl_bo *bo = to_qxl_bo(entry->tv.bo); in qxl_release_reserve_list() local 342 *rbo = bo; in qxl_alloc_release_reserved() 418 bdev = bo->bdev; in qxl_release_fence_buffer_objects() [all …]
|
/linux-6.3-rc2/drivers/gpu/drm/radeon/ |
A D | radeon_object.c | 64 drm_prime_gem_destroy(&bo->tbo.base, bo->tbo.sg); in radeon_ttm_bo_destroy() 66 kfree(bo); in radeon_ttm_bo_destroy() 235 r = ttm_bo_kmap(&bo->tbo, 0, PFN_UP(bo->tbo.base.size), &bo->kmap); in radeon_bo_kmap() 262 return bo; in radeon_bo_ref() 326 bo->rdev->vram_pin_size += radeon_bo_size(bo); in radeon_bo_pin_restricted() 328 bo->rdev->gart_pin_size += radeon_bo_size(bo); in radeon_bo_pin_restricted() 330 dev_err(bo->rdev->dev, "%p pin failed\n", bo); in radeon_bo_pin_restricted() 345 bo->rdev->vram_pin_size -= radeon_bo_size(bo); in radeon_bo_unpin() 347 bo->rdev->gart_pin_size -= radeon_bo_size(bo); in radeon_bo_unpin() 380 &bo->tbo.base, bo, (unsigned long)bo->tbo.base.size, in radeon_bo_force_delete() [all …]
|
A D | radeon_object.h | 68 r = ttm_bo_reserve(&bo->tbo, !no_intr, false, NULL); in radeon_bo_reserve() 71 dev_err(bo->rdev->dev, "%p reserve failed\n", bo); in radeon_bo_reserve() 79 ttm_bo_unreserve(&bo->tbo); in radeon_bo_unreserve() 96 rdev = radeon_get_rdev(bo->tbo.bdev); in radeon_bo_gpu_offset() 98 switch (bo->tbo.resource->mem_type) { in radeon_bo_gpu_offset() 107 return (bo->tbo.resource->start << PAGE_SHIFT) + start; in radeon_bo_gpu_offset() 112 return bo->tbo.base.size; in radeon_bo_size() 117 return bo->tbo.base.size / RADEON_GPU_PAGE_SIZE; in radeon_bo_ngpu_pages() 143 extern void radeon_bo_kunmap(struct radeon_bo *bo); 145 extern void radeon_bo_unref(struct radeon_bo **bo); [all …]
|
A D | radeon_prime.c | 42 bo->tbo.ttm->num_pages); in radeon_gem_prime_get_sg_table() 51 struct radeon_bo *bo; in radeon_gem_prime_import_sg_table() local 67 bo->prime_shared_count = 1; in radeon_gem_prime_import_sg_table() 68 return &bo->tbo.base; in radeon_gem_prime_import_sg_table() 83 bo->prime_shared_count++; in radeon_gem_prime_pin() 85 radeon_bo_unreserve(bo); in radeon_gem_prime_pin() 98 radeon_bo_unpin(bo); in radeon_gem_prime_unpin() 99 if (bo->prime_shared_count) in radeon_gem_prime_unpin() 100 bo->prime_shared_count--; in radeon_gem_prime_unpin() 101 radeon_bo_unreserve(bo); in radeon_gem_prime_unpin() [all …]
|
A D | radeon_mn.c | 57 if (!bo->tbo.ttm || !radeon_ttm_tt_is_bound(bo->tbo.bdev, bo->tbo.ttm)) in radeon_mn_invalidate() 63 r = radeon_bo_reserve(bo, true); in radeon_mn_invalidate() 74 radeon_ttm_placement_from_domain(bo, RADEON_GEM_DOMAIN_CPU); in radeon_mn_invalidate() 75 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in radeon_mn_invalidate() 79 radeon_bo_unreserve(bo); in radeon_mn_invalidate() 101 radeon_bo_size(bo), &radeon_mn_ops); in radeon_mn_register() 111 mmu_interval_read_begin(&bo->notifier); in radeon_mn_register() 122 void radeon_mn_unregister(struct radeon_bo *bo) in radeon_mn_unregister() argument 124 if (!bo->notifier.mm) in radeon_mn_unregister() 126 mmu_interval_notifier_remove(&bo->notifier); in radeon_mn_unregister() [all …]
|
/linux-6.3-rc2/drivers/gpu/drm/tegra/ |
A D | gem.c | 71 map->bo = host1x_bo_get(bo); in tegra_bo_pin() 232 bo->mm = kzalloc(sizeof(*bo->mm), GFP_KERNEL); in tegra_bo_iommu_map() 239 bo->mm, bo->gem.size, PAGE_SIZE, 0, 0); in tegra_bo_iommu_map() 246 bo->iova = bo->mm->start; in tegra_bo_iommu_map() 248 bo->size = iommu_map_sgtable(tegra->domain, bo->iova, bo->sgt, prot); in tegra_bo_iommu_map() 273 iommu_unmap(tegra->domain, bo->iova, bo->size); in tegra_bo_iommu_unmap() 294 bo = kzalloc(sizeof(*bo), GFP_KERNEL); in tegra_bo_alloc_object() 328 dma_free_wc(drm->dev, bo->gem.size, bo->vaddr, bo->iova); in tegra_bo_free() 336 bo->pages = drm_gem_get_pages(&bo->gem); in tegra_bo_get_pages() 340 bo->num_pages = bo->gem.size >> PAGE_SHIFT; in tegra_bo_get_pages() [all …]
|
/linux-6.3-rc2/drivers/gpu/drm/amd/amdgpu/ |
A D | amdgpu_object.c | 63 drm_prime_gem_destroy(&bo->tbo.base, bo->tbo.sg); in amdgpu_bo_destroy() 66 kvfree(bo); in amdgpu_bo_destroy() 569 bo->allowed_domains = bo->preferred_domains; in amdgpu_bo_create() 623 *bo_ptr = bo; in amdgpu_bo_create() 780 r = ttm_bo_kmap(&bo->tbo, 0, PFN_UP(bo->tbo.base.size), &bo->kmap); in amdgpu_bo_kmap() 814 if (bo->kmap.bo) in amdgpu_bo_kunmap() 833 return bo; in amdgpu_bo_ref() 851 *bo = NULL; in amdgpu_bo_unref() 950 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_bo_pin_restricted() 1312 && bo->base.resv != &bo->base._resv); in amdgpu_bo_release_notify() [all …]
|
A D | amdgpu_object.h | 114 struct amdgpu_bo bo; member 123 struct amdgpu_bo bo; member 186 ttm_bo_unreserve(&bo->tbo); in amdgpu_bo_unreserve() 191 return bo->tbo.base.size; in amdgpu_bo_size() 226 amdgpu_res_first(bo->tbo.resource, 0, amdgpu_bo_size(bo), &cursor); in amdgpu_bo_in_cpu_visible_vram() 266 if (bo->tbo.type == ttm_bo_type_kernel) in amdgpu_bo_shadowed() 267 return to_amdgpu_bo_vm(bo)->shadow; in amdgpu_bo_shadowed() 298 void *amdgpu_bo_kptr(struct amdgpu_bo *bo); 299 void amdgpu_bo_kunmap(struct amdgpu_bo *bo); 301 void amdgpu_bo_unref(struct amdgpu_bo **bo); [all …]
|
A D | amdgpu_dma_buf.c | 123 amdgpu_bo_unpin(bo); in amdgpu_dma_buf_unpin() 149 if (!bo->tbo.pin_count) { in amdgpu_dma_buf_map() 160 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_dma_buf_map() 172 bo->tbo.ttm->pages, in amdgpu_dma_buf_map() 254 if (!bo->tbo.pin_count && in amdgpu_dma_buf_begin_cpu_access() 257 ret = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_dma_buf_begin_cpu_access() 260 amdgpu_bo_unreserve(bo); in amdgpu_dma_buf_begin_cpu_access() 323 struct amdgpu_bo *bo; in amdgpu_dma_buf_create_obj() local 343 bo = gem_to_amdgpu_bo(gobj); in amdgpu_dma_buf_create_obj() 375 if (!bo->tbo.resource || bo->tbo.resource->mem_type == TTM_PL_SYSTEM) in amdgpu_dma_buf_move_notify() [all …]
|
A D | amdgpu_amdkfd_gpuvm.c | 807 bo[i] = mem->bo; in kfd_mem_attach() 812 bo[i] = bo[0]; in kfd_mem_attach() 905 entry->bo = &bo->tbo; in add_kgd_mem_to_kfd_bo_list() 1050 ctx->kfd_bo.tv.bo = &bo->tbo; in reserve_bo_and_vm() 1113 ctx->kfd_bo.tv.bo = &bo->tbo; in reserve_bo_and_cond_vms() 1692 (*mem)->bo = bo; in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu() 1872 bo = mem->bo; in amdgpu_amdkfd_gpuvm_map_memory_to_gpu() 2095 bo = amdgpu_bo_ref(bo); in amdgpu_amdkfd_map_gtt_bo_to_gart() 2254 (*mem)->bo = bo; in amdgpu_amdkfd_gpuvm_import_dmabuf() 2342 bo = mem->bo; in update_invalid_user_pages() [all …]
|
/linux-6.3-rc2/drivers/gpu/drm/vc4/ |
A D | vc4_bo.c | 380 if (bo) in vc4_bo_get_from_cache() 383 return bo; in vc4_bo_get_from_cache() 402 bo = kzalloc(sizeof(*bo), GFP_KERNEL); in vc4_create_object() 403 if (!bo) in vc4_create_object() 438 if (bo) { in vc4_bo_create() 440 memset(bo->base.vaddr, 0, bo->base.base.size); in vc4_bo_create() 441 return bo; in vc4_bo_create() 488 return bo; in vc4_bo_create() 507 if (IS_ERR(bo)) in vc4_bo_dumb_create() 798 if (IS_ERR(bo)) in vc4_create_bo_ioctl() [all …]
|
/linux-6.3-rc2/include/drm/ttm/ |
A D | ttm_bo.h | 160 struct ttm_buffer_object *bo; member 195 kref_get(&bo->kref); in ttm_bo_get() 213 return bo; in ttm_bo_get_unless_zero() 293 spin_lock(&bo->bdev->lru_lock); in ttm_bo_move_to_lru_tail_unlocked() 294 ttm_bo_move_to_lru_tail(bo); in ttm_bo_move_to_lru_tail_unlocked() 295 spin_unlock(&bo->bdev->lru_lock); in ttm_bo_move_to_lru_tail_unlocked() 301 WARN_ON(bo->resource); in ttm_bo_assign_mem() 302 bo->resource = new_mem; in ttm_bo_assign_mem() 315 ttm_resource_free(bo, &bo->resource); in ttm_bo_move_null() 316 ttm_bo_assign_mem(bo, new_mem); in ttm_bo_move_null() [all …]
|
/linux-6.3-rc2/drivers/gpu/drm/vmwgfx/ |
A D | vmwgfx_bo.c | 99 ttm_bo_unreserve(bo); in vmw_bo_pin_in_placement() 214 bo->resource->start < PFN_UP(bo->resource->size) && in vmw_bo_pin_in_start_of_vram() 324 ttm_bo_pin(bo); in vmw_bo_pin_reserved() 326 ttm_bo_unpin(bo); in vmw_bo_pin_reserved() 355 ret = ttm_bo_kmap(bo, 0, PFN_UP(bo->base.size), &vbo->map); in vmw_bo_map_and_cache() 399 kfree(bo); in vmw_bo_default_destroy() 424 bo = kzalloc(sizeof(*bo), GFP_KERNEL); in vmw_bo_create_kernel() 425 if (unlikely(!bo)) in vmw_bo_create_kernel() 438 ttm_bo_pin(bo); in vmw_bo_create_kernel() 440 *p_bo = bo; in vmw_bo_create_kernel() [all …]
|
/linux-6.3-rc2/drivers/gpu/drm/lima/ |
A D | lima_gem.c | 27 size_t new_size = bo->heap_size ? bo->heap_size * 2 : in lima_heap_alloc() 32 if (bo->heap_size >= bo->base.base.size) in lima_heap_alloc() 39 if (bo->base.pages) { in lima_heap_alloc() 72 if (bo->base.sgt) { in lima_heap_alloc() 76 bo->base.sgt = kmalloc(sizeof(*bo->base.sgt), GFP_KERNEL); in lima_heap_alloc() 110 struct lima_bo *bo; in lima_gem_create_handle() local 180 if (bo->heap_size) in lima_gem_pin() 190 if (bo->heap_size) in lima_gem_vmap() 224 bo = kzalloc(sizeof(*bo), GFP_KERNEL); in lima_gem_create_object() 225 if (!bo) in lima_gem_create_object() [all …]
|
/linux-6.3-rc2/drivers/gpu/drm/panfrost/ |
A D | panfrost_gem.c | 38 if (bo->sgts) { in panfrost_gem_free_object() 43 if (bo->sgts[i].sgl) { in panfrost_gem_free_object() 49 kvfree(bo->sgts); in panfrost_gem_free_object() 52 drm_gem_shmem_free(&bo->base); in panfrost_gem_free_object() 131 mapping->obj = bo; in panfrost_gem_open() 139 if (!bo->noexec) in panfrost_gem_open() 152 if (!bo->is_heap) { in panfrost_gem_open() 192 if (bo->is_heap) in panfrost_gem_pin() 255 return bo; in panfrost_gem_create() 270 bo = to_panfrost_bo(obj); in panfrost_gem_prime_import_sg_table() [all …]
|
/linux-6.3-rc2/drivers/gpu/drm/v3d/ |
A D | v3d_bo.c | 36 v3d_mmu_remove_ptes(bo); in v3d_free_object() 70 struct v3d_bo *bo; in v3d_create_object() local 76 bo = kzalloc(sizeof(*bo), GFP_KERNEL); in v3d_create_object() 77 if (!bo) in v3d_create_object() 79 obj = &bo->base.base; in v3d_create_object() 85 return &bo->base.base; in v3d_create_object() 130 struct v3d_bo *bo; in v3d_bo_create() local 142 return bo; in v3d_bo_create() 183 if (IS_ERR(bo)) in v3d_create_bo_ioctl() 184 return PTR_ERR(bo); in v3d_create_bo_ioctl() [all …]
|