| /linux/drivers/staging/media/atomisp/pci/hmm/ |
| A D | hmm_bo.c | 59 memset(bo, 0, sizeof(*bo)); in __bo_init() 259 bo->pgnr = bo->pgnr - pgnr; in __bo_break_up() 276 if (!bo->prev && !bo->next) { in __bo_take_off_handling() 283 } else if (!bo->prev && bo->next) { in __bo_take_off_handling() 293 } else if (bo->prev && !bo->next) { in __bo_take_off_handling() 301 } else if (bo->prev && bo->next) { in __bo_take_off_handling() 302 bo->next->prev = bo->prev; in __bo_take_off_handling() 303 bo->prev->next = bo->next; in __bo_take_off_handling() 485 bo = __bo_merge(prev_bo, bo); in hmm_bo_release() 492 bo = __bo_merge(bo, next_bo); in hmm_bo_release() [all …]
|
| A D | hmm.c | 72 hmm_bo_type_string[bo->type], bo->pgnr); in bo_show() 74 total[bo->type] += bo->pgnr; in bo_show() 188 if (!bo) { in __hmm_alloc() 207 return bo->start; in __hmm_alloc() 236 if (!bo) { in hmm_free() 250 if (!bo) { in hmm_check_bo() 523 if (!bo) { in hmm_virt_to_phys() 541 if (!bo) { in hmm_mmap() 558 if (!bo) { in hmm_vmap() 578 if (!bo) { in hmm_flush_vmap() [all …]
|
| /linux/drivers/gpu/drm/ttm/ |
| A D | ttm_bo.c | 180 bo->bdev->funcs->delete_mem_notify(bo); in ttm_bo_cleanup_memtype_use() 183 ttm_resource_free(bo, &bo->resource); in ttm_bo_cleanup_memtype_use() 190 if (bo->base.resv == &bo->base._resv) in ttm_bo_individualize_resv() 206 bo->base.resv = &bo->base._resv; in ttm_bo_individualize_resv() 267 bo->bdev->funcs->release_notify(bo); in ttm_bo_release() 315 bo->destroy(bo); in ttm_bo_release() 459 bo = res->bo; in ttm_bo_evict_first() 904 bo->base.resv = &bo->base._resv; in ttm_bo_init_reserved() 1074 if (bo->pin_count || !bo->bdev->funcs->eviction_valuable(bo, &place)) { in ttm_bo_swapout_cb() 1129 bo->bdev->funcs->swap_notify(bo); in ttm_bo_swapout_cb() [all …]
|
| A D | ttm_bo_util.c | 272 fbo->bo = bo; in ttm_buffer_object_transfer() 347 ttm_manager_type(bo->bdev, bo->resource->mem_type); in ttm_bo_kmap_ttm() 404 map->bo = bo; in ttm_bo_kmap() 410 ret = ttm_mem_io_reserve(bo->bdev, bo->resource); in ttm_bo_kmap() 449 ttm_mem_io_free(map->bo->bdev, map->bo->resource); in ttm_bo_kunmap() 553 ttm_mem_io_free(bo->bdev, bo->resource); in ttm_bo_vunmap() 571 ttm_resource_free(bo, &bo->resource); in ttm_bo_wait_free_node() 632 ttm_resource_free(bo, &bo->resource); in ttm_bo_move_pipeline_evict() 728 ttm_tt_unpopulate(bo->bdev, bo->ttm); in ttm_bo_pipeline_gutting() 732 ttm_resource_free(bo, &bo->resource); in ttm_bo_pipeline_gutting() [all …]
|
| A D | ttm_bo_vm.c | 61 ttm_bo_get(bo); in ttm_bo_vm_fault_idle() 67 ttm_bo_put(bo); in ttm_bo_vm_fault_idle() 133 ttm_bo_get(bo); in ttm_bo_vm_reserve() 138 ttm_bo_put(bo); in ttm_bo_vm_reserve() 152 if (bo->ttm && (bo->ttm->page_flags & TTM_TT_FLAG_EXTERNAL)) { in ttm_bo_vm_reserve() 218 prot = ttm_io_prot(bo, bo->resource, prot); in ttm_bo_vm_fault_reserved() 226 ttm = bo->ttm; in ttm_bo_vm_fault_reserved() 356 ttm_bo_get(bo); in ttm_bo_vm_open() 364 ttm_bo_put(bo); in ttm_bo_vm_close() 438 ttm_bo_unreserve(bo); in ttm_bo_vm_access() [all …]
|
| /linux/drivers/accel/ivpu/ |
| A D | ivpu_gem.c | 29 action, bo, bo->vpu_addr, ivpu_bo_size(bo), bo->ctx ? bo->ctx->id : 0, in ivpu_dbg_bo() 30 (bool)bo->base.pages, (bool)bo->base.sgt, bo->mmu_mapped, bo->base.map_wc, in ivpu_dbg_bo() 90 bo->vpu_addr = bo->mm_node.start; in ivpu_bo_alloc_vpu_addr() 114 ivpu_mmu_context_unmap_sgt(vdev, bo->ctx, bo->vpu_addr, bo->base.sgt); in ivpu_bo_unbind_locked() 119 ivpu_mmu_context_remove_node(bo->ctx, &bo->mm_node); in ivpu_bo_unbind_locked() 162 bo = kzalloc(sizeof(*bo), GFP_KERNEL); in ivpu_gem_create_object() 163 if (!bo) in ivpu_gem_create_object() 200 return bo; in ivpu_bo_alloc() 329 return bo; in ivpu_bo_create() 409 bo, bo->ctx->id, bo->vpu_addr, bo->base.base.size, in ivpu_bo_print_info() [all …]
|
| A D | ivpu_gem.h | 27 int ivpu_bo_pin(struct ivpu_bo *bo); 34 void ivpu_bo_free(struct ivpu_bo *bo); 50 return bo->base.vaddr; in ivpu_bo_vaddr() 55 return bo->base.base.size; in ivpu_bo_size() 65 return to_ivpu_device(bo->base.base.dev); in ivpu_bo_to_vdev() 78 if (vpu_addr < bo->vpu_addr) in ivpu_to_cpu_addr() 81 if (vpu_addr >= (bo->vpu_addr + ivpu_bo_size(bo))) in ivpu_to_cpu_addr() 84 return ivpu_bo_vaddr(bo) + (vpu_addr - bo->vpu_addr); in ivpu_to_cpu_addr() 89 if (cpu_addr < ivpu_bo_vaddr(bo)) in cpu_to_vpu_addr() 92 if (cpu_addr >= (ivpu_bo_vaddr(bo) + ivpu_bo_size(bo))) in cpu_to_vpu_addr() [all …]
|
| /linux/drivers/gpu/drm/qxl/ |
| A D | qxl_object.c | 46 kfree(bo); in qxl_ttm_bo_destroy() 125 kfree(bo); in qxl_bo_create() 167 r = ttm_bo_vmap(&bo->tbo, &bo->map); in qxl_bo_vmap_locked() 176 bo->kptr = (void *)bo->map.vaddr_iomem; in qxl_bo_vmap_locked() 178 bo->kptr = bo->map.vaddr; in qxl_bo_vmap_locked() 249 ttm_bo_vunmap(&bo->tbo, &bo->map); in qxl_bo_vunmap_locked() 285 *bo = NULL; in qxl_bo_unref() 291 return bo; in qxl_bo_ref() 306 qxl_ttm_placement_from_domain(bo, bo->type); in qxl_bo_pin_locked() 307 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in qxl_bo_pin_locked() [all …]
|
| A D | qxl_object.h | 34 r = ttm_bo_reserve(&bo->tbo, true, false, NULL); in qxl_bo_reserve() 37 struct drm_device *ddev = bo->tbo.base.dev; in qxl_bo_reserve() 39 dev_err(ddev->dev, "%p reserve failed\n", bo); in qxl_bo_reserve() 48 ttm_bo_unreserve(&bo->tbo); in qxl_bo_unreserve() 53 return bo->tbo.base.size; in qxl_bo_size() 64 int qxl_bo_vunmap_and_unpin(struct qxl_bo *bo); 65 void qxl_bo_vunmap_locked(struct qxl_bo *bo); 69 extern void qxl_bo_unref(struct qxl_bo **bo); 70 extern int qxl_bo_pin_locked(struct qxl_bo *bo); 72 extern int qxl_bo_pin(struct qxl_bo *bo); [all …]
|
| /linux/drivers/gpu/drm/radeon/ |
| A D | radeon_object.c | 64 drm_prime_gem_destroy(&bo->tbo.base, bo->tbo.sg); in radeon_ttm_bo_destroy() 66 kfree(bo); in radeon_ttm_bo_destroy() 234 r = ttm_bo_kmap(&bo->tbo, 0, PFN_UP(bo->tbo.base.size), &bo->kmap); in radeon_bo_kmap() 261 return bo; in radeon_bo_ref() 322 bo->rdev->vram_pin_size += radeon_bo_size(bo); in radeon_bo_pin_restricted() 324 bo->rdev->gart_pin_size += radeon_bo_size(bo); in radeon_bo_pin_restricted() 326 dev_err(bo->rdev->dev, "%p pin failed\n", bo); in radeon_bo_pin_restricted() 341 bo->rdev->vram_pin_size -= radeon_bo_size(bo); in radeon_bo_unpin() 343 bo->rdev->gart_pin_size -= radeon_bo_size(bo); in radeon_bo_unpin() 376 &bo->tbo.base, bo, (unsigned long)bo->tbo.base.size, in radeon_bo_force_delete() [all …]
|
| A D | radeon_object.h | 68 r = ttm_bo_reserve(&bo->tbo, !no_intr, false, NULL); in radeon_bo_reserve() 71 dev_err(bo->rdev->dev, "%p reserve failed\n", bo); in radeon_bo_reserve() 79 ttm_bo_unreserve(&bo->tbo); in radeon_bo_unreserve() 96 rdev = radeon_get_rdev(bo->tbo.bdev); in radeon_bo_gpu_offset() 98 switch (bo->tbo.resource->mem_type) { in radeon_bo_gpu_offset() 107 return (bo->tbo.resource->start << PAGE_SHIFT) + start; in radeon_bo_gpu_offset() 112 return bo->tbo.base.size; in radeon_bo_size() 117 return bo->tbo.base.size / RADEON_GPU_PAGE_SIZE; in radeon_bo_ngpu_pages() 143 extern void radeon_bo_kunmap(struct radeon_bo *bo); 145 extern void radeon_bo_unref(struct radeon_bo **bo); [all …]
|
| /linux/drivers/gpu/drm/xe/ |
| A D | xe_bo.h | 119 #define xe_bo_device(bo) ttm_to_xe_device((bo)->ttm.bdev) argument 123 if (bo) in xe_bo_get() 126 return bo; in xe_bo_get() 133 if (bo) in __xe_bo_unset_bulk_move() 139 if (bo) in xe_bo_assert_held() 149 if (bo) { in xe_bo_unlock_vm_held() 150 XE_WARN_ON(bo->vm && bo->ttm.base.resv != xe_vm_resv(bo->vm)); in xe_bo_unlock_vm_held() 151 if (bo->vm) in xe_bo_unlock_vm_held() 176 xe_bo_put(bo); in xe_bo_unpin_map_no_vm() 196 XE_WARN_ON(bo->ggtt_node->base.size > bo->size); in xe_bo_ggtt_addr() [all …]
|
| A D | xe_bo.c | 772 if (xe_bo_is_pinned(bo) && !xe_bo_is_user(bo)) { in xe_bo_move() 897 bo->ttm.ttm = xe_ttm_tt_create(&bo->ttm, 0); in xe_bo_evict_pinned() 954 if (WARN_ON(!bo->ttm.ttm && !xe_bo_is_stolen(bo))) in xe_bo_restore_pinned() 1117 if (bo->ggtt_node && bo->ggtt_node->base.size) in xe_ttm_bo_destroy() 1118 xe_ggtt_remove_bo(bo->tile->mem.ggtt, bo); in xe_ttm_bo_destroy() 1125 if (bo->vm && xe_bo_is_user(bo)) in xe_ttm_bo_destroy() 1160 if (bo->vm && !xe_vm_in_fault_mode(bo->vm)) { in xe_gem_object_close() 1161 xe_assert(xe_bo_device(bo), xe_bo_is_user(bo)); in xe_gem_object_close() 1248 if (!bo) in xe_bo_alloc() 1936 ret = ttm_bo_kmap(&bo->ttm, 0, bo->size >> PAGE_SHIFT, &bo->kmap); in xe_bo_vmap() [all …]
|
| A D | xe_bo_evict.c | 30 struct xe_bo *bo; in xe_bo_evict_all() local 66 if (!bo) in xe_bo_evict_all() 68 xe_bo_get(bo); in xe_bo_evict_all() 75 xe_bo_put(bo); in xe_bo_evict_all() 100 if (!bo) in xe_bo_evict_all() 102 xe_bo_get(bo); in xe_bo_evict_all() 109 xe_bo_put(bo); in xe_bo_evict_all() 140 if (!bo) in xe_bo_restore_kernel() 142 xe_bo_get(bo); in xe_bo_restore_kernel() 168 xe_bo_put(bo); in xe_bo_restore_kernel() [all …]
|
| /linux/drivers/gpu/drm/tegra/ |
| A D | gem.c | 72 map->bo = host1x_bo_get(bo); in tegra_bo_pin() 243 bo->mm = kzalloc(sizeof(*bo->mm), GFP_KERNEL); in tegra_bo_iommu_map() 250 bo->mm, bo->gem.size, PAGE_SIZE, 0, 0); in tegra_bo_iommu_map() 257 bo->iova = bo->mm->start; in tegra_bo_iommu_map() 259 bo->size = iommu_map_sgtable(tegra->domain, bo->iova, bo->sgt, prot); in tegra_bo_iommu_map() 284 iommu_unmap(tegra->domain, bo->iova, bo->size); in tegra_bo_iommu_unmap() 305 bo = kzalloc(sizeof(*bo), GFP_KERNEL); in tegra_bo_alloc_object() 339 dma_free_wc(drm->dev, bo->gem.size, bo->vaddr, bo->iova); in tegra_bo_free() 347 bo->pages = drm_gem_get_pages(&bo->gem); in tegra_bo_get_pages() 351 bo->num_pages = bo->gem.size >> PAGE_SHIFT; in tegra_bo_get_pages() [all …]
|
| /linux/drivers/gpu/drm/amd/amdgpu/ |
| A D | amdgpu_object.c | 64 drm_prime_gem_destroy(&bo->tbo.base, bo->tbo.sg); in amdgpu_bo_destroy() 67 kvfree(bo); in amdgpu_bo_destroy() 572 bo->allowed_domains = bo->preferred_domains; in amdgpu_bo_create() 631 *bo_ptr = bo; in amdgpu_bo_create() 739 r = ttm_bo_kmap(&bo->tbo, 0, PFN_UP(bo->tbo.base.size), &bo->kmap); in amdgpu_bo_kmap() 773 if (bo->kmap.bo) in amdgpu_bo_kunmap() 792 return bo; in amdgpu_bo_ref() 807 *bo = NULL; in amdgpu_bo_unref() 881 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_bo_pin() 1251 && bo->base.resv != &bo->base._resv); in amdgpu_bo_release_notify() [all …]
|
| A D | amdgpu_dma_buf.c | 94 amdgpu_bo_unpin(bo); in amdgpu_dma_buf_unpin() 120 if (!bo->tbo.pin_count) { in amdgpu_dma_buf_map() 131 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_dma_buf_map() 142 bo->tbo.ttm->pages, in amdgpu_dma_buf_map() 224 if (!bo->tbo.pin_count && in amdgpu_dma_buf_begin_cpu_access() 227 ret = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_dma_buf_begin_cpu_access() 230 amdgpu_bo_unreserve(bo); in amdgpu_dma_buf_begin_cpu_access() 292 struct amdgpu_bo *bo; in amdgpu_dma_buf_create_obj() local 313 bo = gem_to_amdgpu_bo(gobj); in amdgpu_dma_buf_create_obj() 349 if (!bo->tbo.resource || bo->tbo.resource->mem_type == TTM_PL_SYSTEM) in amdgpu_dma_buf_move_notify() [all …]
|
| A D | amdgpu_object.h | 58 void (*destroy)(struct ttm_buffer_object *bo); 129 struct amdgpu_bo bo; member 138 struct amdgpu_bo bo; member 228 ttm_bo_unreserve(&bo->tbo); in amdgpu_bo_unreserve() 233 return bo->tbo.base.size; in amdgpu_bo_size() 302 void *amdgpu_bo_kptr(struct amdgpu_bo *bo); 303 void amdgpu_bo_kunmap(struct amdgpu_bo *bo); 305 void amdgpu_bo_unref(struct amdgpu_bo **bo); 307 void amdgpu_bo_unpin(struct amdgpu_bo *bo); 328 u64 amdgpu_bo_gpu_offset(struct amdgpu_bo *bo); [all …]
|
| /linux/drivers/gpu/drm/vc4/ |
| A D | vc4_bo.c | 380 if (bo) in vc4_bo_get_from_cache() 383 return bo; in vc4_bo_get_from_cache() 402 bo = kzalloc(sizeof(*bo), GFP_KERNEL); in vc4_create_object() 403 if (!bo) in vc4_create_object() 438 if (bo) { in vc4_bo_create() 440 memset(bo->base.vaddr, 0, bo->base.base.size); in vc4_bo_create() 441 return bo; in vc4_bo_create() 488 return bo; in vc4_bo_create() 507 if (IS_ERR(bo)) in vc4_bo_dumb_create() 798 if (IS_ERR(bo)) in vc4_create_bo_ioctl() [all …]
|
| /linux/include/drm/ttm/ |
| A D | ttm_bo.h | 164 struct ttm_buffer_object *bo; member 235 kref_get(&bo->kref); in ttm_bo_get() 253 return bo; in ttm_bo_get_unless_zero() 333 spin_lock(&bo->bdev->lru_lock); in ttm_bo_move_to_lru_tail_unlocked() 334 ttm_bo_move_to_lru_tail(bo); in ttm_bo_move_to_lru_tail_unlocked() 335 spin_unlock(&bo->bdev->lru_lock); in ttm_bo_move_to_lru_tail_unlocked() 341 WARN_ON(bo->resource); in ttm_bo_assign_mem() 342 bo->resource = new_mem; in ttm_bo_assign_mem() 355 ttm_resource_free(bo, &bo->resource); in ttm_bo_move_null() 356 ttm_bo_assign_mem(bo, new_mem); in ttm_bo_move_null() [all …]
|
| /linux/drivers/gpu/drm/lima/ |
| A D | lima_gem.c | 27 size_t new_size = bo->heap_size ? bo->heap_size * 2 : in lima_heap_alloc() 32 if (bo->heap_size >= bo->base.base.size) in lima_heap_alloc() 39 if (bo->base.pages) { in lima_heap_alloc() 72 if (bo->base.sgt) { in lima_heap_alloc() 76 bo->base.sgt = kmalloc(sizeof(*bo->base.sgt), GFP_KERNEL); in lima_heap_alloc() 115 struct lima_bo *bo; in lima_gem_create_handle() local 185 if (bo->heap_size) in lima_gem_pin() 195 if (bo->heap_size) in lima_gem_vmap() 229 bo = kzalloc(sizeof(*bo), GFP_KERNEL); in lima_gem_create_object() 230 if (!bo) in lima_gem_create_object() [all …]
|
| /linux/drivers/gpu/drm/ttm/tests/ |
| A D | ttm_bo_test.c | 301 ttm_bo_pin(bo); in ttm_bo_unreserve_pinned() 406 bo->resource = res; in ttm_bo_put_basic() 413 ttm_bo_put(bo); in ttm_bo_put_basic() 466 ttm_bo_put(bo); in ttm_bo_put_shared_resv() 488 ttm_bo_pin(bo); in ttm_bo_pin_basic() 527 ttm_bo_pin(bo); in ttm_bo_pin_unpin_resource() 537 ttm_bo_unpin(bo); in ttm_bo_pin_unpin_resource() 580 ttm_bo_pin(bo); in ttm_bo_multiple_pin_one_unpin() 581 ttm_bo_pin(bo); in ttm_bo_multiple_pin_one_unpin() 592 ttm_bo_unpin(bo); in ttm_bo_multiple_pin_one_unpin() [all …]
|
| /linux/drivers/gpu/drm/panfrost/ |
| A D | panfrost_gem.c | 38 if (bo->sgts) { in panfrost_gem_free_object() 43 if (bo->sgts[i].sgl) { in panfrost_gem_free_object() 49 kvfree(bo->sgts); in panfrost_gem_free_object() 131 mapping->obj = bo; in panfrost_gem_open() 139 if (!bo->noexec) in panfrost_gem_open() 152 if (!bo->is_heap) { in panfrost_gem_open() 192 if (bo->is_heap) in panfrost_gem_pin() 203 if (bo->base.base.import_attach || bo->base.pages) in panfrost_gem_status() 216 if (bo->is_heap) { in panfrost_gem_rss() 285 return bo; in panfrost_gem_create() [all …]
|
| /linux/drivers/gpu/drm/xe/display/ |
| A D | intel_fb_bo.c | 18 xe_bo_lock(bo, false); in intel_fb_bo_framebuffer_fini() 19 xe_bo_unpin(bo); in intel_fb_bo_framebuffer_fini() 20 xe_bo_unlock(bo); in intel_fb_bo_framebuffer_fini() 22 xe_bo_put(bo); in intel_fb_bo_framebuffer_fini() 26 struct xe_bo *bo, in intel_fb_bo_framebuffer_init() argument 40 xe_bo_get(bo); in intel_fb_bo_framebuffer_init() 54 ttm_bo_unreserve(&bo->ttm); in intel_fb_bo_framebuffer_init() 60 ttm_bo_unreserve(&bo->ttm); in intel_fb_bo_framebuffer_init() 64 xe_bo_put(bo); in intel_fb_bo_framebuffer_init() 78 bo = gem_to_xe_bo(gem); in intel_fb_bo_lookup_valid_bo() [all …]
|
| /linux/drivers/gpu/drm/panthor/ |
| A D | panthor_gem.c | 23 drm_gem_shmem_free(&bo->base); in panthor_gem_free_object() 37 if (IS_ERR_OR_NULL(bo)) in panthor_kernel_bo_destroy() 40 vm = bo->vm; in panthor_kernel_bo_destroy() 41 panthor_kernel_bo_vunmap(bo); in panthor_kernel_bo_destroy() 47 ret = panthor_vm_unmap_range(vm, bo->va_node.start, bo->va_node.size); in panthor_kernel_bo_destroy() 52 drm_gem_object_put(bo->obj); in panthor_kernel_bo_destroy() 56 kfree(bo); in panthor_kernel_bo_destroy() 98 bo->flags = bo_flags; in panthor_kernel_bo_create() 118 bo->base.base.resv = bo->exclusive_vm_root_gem->resv; in panthor_kernel_bo_create() 217 bo->flags = flags; in panthor_gem_create_with_handle() [all …]
|