| /drivers/staging/media/atomisp/pci/hmm/ |
| A D | hmm_bo.c | 46 memset(bo, 0, sizeof(*bo)); in __bo_init() 246 bo->pgnr = bo->pgnr - pgnr; in __bo_break_up() 263 if (!bo->prev && !bo->next) { in __bo_take_off_handling() 270 } else if (!bo->prev && bo->next) { in __bo_take_off_handling() 280 } else if (bo->prev && !bo->next) { in __bo_take_off_handling() 288 } else if (bo->prev && bo->next) { in __bo_take_off_handling() 289 bo->next->prev = bo->prev; in __bo_take_off_handling() 290 bo->prev->next = bo->next; in __bo_take_off_handling() 473 bo = __bo_merge(prev_bo, bo); in hmm_bo_release() 480 bo = __bo_merge(bo, next_bo); in hmm_bo_release() [all …]
|
| A D | hmm.c | 86 if (!bo) { in __hmm_alloc() 105 return bo->start; in __hmm_alloc() 110 hmm_bo_unref(bo); in __hmm_alloc() 134 if (!bo) { in hmm_free() 143 hmm_bo_unref(bo); in hmm_free() 148 if (!bo) { in hmm_check_bo() 421 if (!bo) { in hmm_virt_to_phys() 439 if (!bo) { in hmm_mmap() 456 if (!bo) { in hmm_vmap() 476 if (!bo) { in hmm_flush_vmap() [all …]
|
| /drivers/gpu/drm/ttm/ |
| A D | ttm_bo.c | 183 bo->bdev->funcs->delete_mem_notify(bo); in ttm_bo_cleanup_memtype_use() 186 ttm_resource_free(bo, &bo->resource); in ttm_bo_cleanup_memtype_use() 193 if (bo->base.resv == &bo->base._resv) in ttm_bo_individualize_resv() 209 bo->base.resv = &bo->base._resv; in ttm_bo_individualize_resv() 270 bo->bdev->funcs->release_notify(bo); in ttm_bo_release() 318 bo->destroy(bo); in ttm_bo_release() 463 bo = res->bo; in ttm_bo_evict_first() 632 if (!bo->pin_count++ && bo->resource) in ttm_bo_pin() 947 bo->base.resv = &bo->base._resv; in ttm_bo_init_reserved() 1175 bo->bdev->funcs->swap_notify(bo); in ttm_bo_swapout_cb() [all …]
|
| A D | ttm_bo_util.c | 277 fbo->bo = bo; in ttm_buffer_object_transfer() 404 if (!bo->resource->bus.is_iomem && bo->ttm->pages && bo->ttm->pages[page]) in ttm_bo_kmap_try_from_panic() 435 map->bo = bo; in ttm_bo_kmap() 584 ttm_mem_io_free(bo->bdev, bo->resource); in ttm_bo_vunmap() 602 ttm_resource_free(bo, &bo->resource); in ttm_bo_wait_free_node() 663 ttm_resource_free(bo, &bo->resource); in ttm_bo_move_pipeline_evict() 759 ttm_tt_unpopulate(bo->bdev, bo->ttm); in ttm_bo_pipeline_gutting() 763 ttm_resource_free(bo, &bo->resource); in ttm_bo_pipeline_gutting() 982 bo = res->bo; in __ttm_bo_lru_cursor_next() 1006 curs->bo = bo; in __ttm_bo_lru_cursor_next() [all …]
|
| A D | ttm_bo_vm.c | 63 drm_gem_object_get(&bo->base); in ttm_bo_vm_fault_idle() 68 dma_resv_unlock(bo->base.resv); in ttm_bo_vm_fault_idle() 69 drm_gem_object_put(&bo->base); in ttm_bo_vm_fault_idle() 154 if (bo->ttm && (bo->ttm->page_flags & TTM_TT_FLAG_EXTERNAL)) { in ttm_bo_vm_reserve() 220 prot = ttm_io_prot(bo, bo->resource, prot); in ttm_bo_vm_fault_reserved() 227 ttm = bo->ttm; in ttm_bo_vm_fault_reserved() 357 drm_gem_object_get(&bo->base); in ttm_bo_vm_open() 365 drm_gem_object_put(&bo->base); in ttm_bo_vm_close() 451 ttm_bo_unreserve(bo); in ttm_bo_access() 490 drm_gem_object_get(&bo->base); in ttm_bo_mmap_obj() [all …]
|
| /drivers/accel/ivpu/ |
| A D | ivpu_gem.c | 31 action, bo, bo->vpu_addr, ivpu_bo_size(bo), bo->ctx_id, in ivpu_dbg_bo() 32 (bool)bo->base.pages, (bool)bo->base.sgt, bo->mmu_mapped, bo->base.map_wc, in ivpu_dbg_bo() 102 bo->vpu_addr = bo->mm_node.start; in ivpu_bo_alloc_vpu_addr() 124 ivpu_mmu_context_unmap_sgt(vdev, bo->ctx, bo->vpu_addr, bo->base.sgt); in ivpu_bo_unbind_locked() 129 ivpu_mmu_context_remove_node(bo->ctx, &bo->mm_node); in ivpu_bo_unbind_locked() 170 bo = kzalloc(sizeof(*bo), GFP_KERNEL); in ivpu_gem_create_object() 171 if (!bo) in ivpu_gem_create_object() 251 return bo; in ivpu_bo_alloc() 387 return bo; in ivpu_bo_create() 470 bo, bo->ctx_id, bo->vpu_addr, bo->base.base.size, in ivpu_bo_print_info() [all …]
|
| /drivers/gpu/drm/radeon/ |
| A D | radeon_object.c | 64 drm_prime_gem_destroy(&bo->tbo.base, bo->tbo.sg); in radeon_ttm_bo_destroy() 66 kfree(bo); in radeon_ttm_bo_destroy() 234 r = ttm_bo_kmap(&bo->tbo, 0, PFN_UP(bo->tbo.base.size), &bo->kmap); in radeon_bo_kmap() 261 return bo; in radeon_bo_ref() 322 bo->rdev->vram_pin_size += radeon_bo_size(bo); in radeon_bo_pin_restricted() 324 bo->rdev->gart_pin_size += radeon_bo_size(bo); in radeon_bo_pin_restricted() 326 dev_err(bo->rdev->dev, "%p pin failed\n", bo); in radeon_bo_pin_restricted() 341 bo->rdev->vram_pin_size -= radeon_bo_size(bo); in radeon_bo_unpin() 343 bo->rdev->gart_pin_size -= radeon_bo_size(bo); in radeon_bo_unpin() 376 &bo->tbo.base, bo, (unsigned long)bo->tbo.base.size, in radeon_bo_force_delete() [all …]
|
| A D | radeon_object.h | 68 r = ttm_bo_reserve(&bo->tbo, !no_intr, false, NULL); in radeon_bo_reserve() 71 dev_err(bo->rdev->dev, "%p reserve failed\n", bo); in radeon_bo_reserve() 79 ttm_bo_unreserve(&bo->tbo); in radeon_bo_unreserve() 96 rdev = radeon_get_rdev(bo->tbo.bdev); in radeon_bo_gpu_offset() 98 switch (bo->tbo.resource->mem_type) { in radeon_bo_gpu_offset() 107 return (bo->tbo.resource->start << PAGE_SHIFT) + start; in radeon_bo_gpu_offset() 112 return bo->tbo.base.size; in radeon_bo_size() 117 return bo->tbo.base.size / RADEON_GPU_PAGE_SIZE; in radeon_bo_ngpu_pages() 143 extern void radeon_bo_kunmap(struct radeon_bo *bo); 145 extern void radeon_bo_unref(struct radeon_bo **bo); [all …]
|
| /drivers/gpu/drm/qxl/ |
| A D | qxl_object.c | 46 kfree(bo); in qxl_ttm_bo_destroy() 125 kfree(bo); in qxl_bo_create() 167 r = ttm_bo_vmap(&bo->tbo, &bo->map); in qxl_bo_vmap_locked() 176 bo->kptr = (void *)bo->map.vaddr_iomem; in qxl_bo_vmap_locked() 178 bo->kptr = bo->map.vaddr; in qxl_bo_vmap_locked() 249 ttm_bo_vunmap(&bo->tbo, &bo->map); in qxl_bo_vunmap_locked() 285 *bo = NULL; in qxl_bo_unref() 291 return bo; in qxl_bo_ref() 306 qxl_ttm_placement_from_domain(bo, bo->type); in qxl_bo_pin_locked() 307 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in qxl_bo_pin_locked() [all …]
|
| /drivers/gpu/drm/xe/ |
| A D | xe_bo.h | 140 #define xe_bo_device(bo) ttm_to_xe_device((bo)->ttm.bdev) argument 144 if (bo) in xe_bo_get() 147 return bo; in xe_bo_get() 171 return bo; in xe_bo_get_unless_zero() 176 if (bo) in __xe_bo_unset_bulk_move() 182 if (bo) in xe_bo_assert_held() 192 if (bo) { in xe_bo_unlock_vm_held() 193 XE_WARN_ON(bo->vm && bo->ttm.base.resv != xe_vm_resv(bo->vm)); in xe_bo_unlock_vm_held() 194 if (bo->vm) in xe_bo_unlock_vm_held() 267 xe_assert(xe_bo_device(bo), bo->tile); in xe_bo_ggtt_addr() [all …]
|
| A D | xe_bo.c | 100 resource_is_stolen_vram(xe_bo_device(bo), bo->ttm.resource); in xe_bo_is_vram() 1641 if (bo->vm && xe_bo_is_user(bo)) in xe_ttm_bo_destroy() 1652 kfree(bo); in xe_ttm_bo_destroy() 1679 if (bo->vm && !xe_vm_in_fault_mode(bo->vm)) { in xe_gem_object_close() 1680 xe_assert(xe_bo_device(bo), xe_bo_is_user(bo)); in xe_gem_object_close() 1804 struct xe_bo *bo = kzalloc(sizeof(*bo), GFP_KERNEL); in xe_bo_alloc() local 1806 if (!bo) in xe_bo_alloc() 1809 return bo; in xe_bo_alloc() 2484 if (!xe_bo_is_vram(bo) && !xe_bo_is_stolen(bo)) { in __xe_bo_addr() 2529 ret = ttm_bo_kmap(&bo->ttm, 0, xe_bo_size(bo) >> PAGE_SHIFT, &bo->kmap); in xe_bo_vmap() [all …]
|
| /drivers/gpu/drm/tegra/ |
| A D | gem.c | 71 map->bo = host1x_bo_get(bo); in tegra_bo_pin() 242 bo->mm = kzalloc(sizeof(*bo->mm), GFP_KERNEL); in tegra_bo_iommu_map() 249 bo->mm, bo->gem.size, PAGE_SIZE, 0, 0); in tegra_bo_iommu_map() 256 bo->iova = bo->mm->start; in tegra_bo_iommu_map() 258 bo->size = iommu_map_sgtable(tegra->domain, bo->iova, bo->sgt, prot); in tegra_bo_iommu_map() 283 iommu_unmap(tegra->domain, bo->iova, bo->size); in tegra_bo_iommu_unmap() 304 bo = kzalloc(sizeof(*bo), GFP_KERNEL); in tegra_bo_alloc_object() 338 dma_free_wc(drm->dev, bo->gem.size, bo->vaddr, bo->iova); in tegra_bo_free() 346 bo->pages = drm_gem_get_pages(&bo->gem); in tegra_bo_get_pages() 350 bo->num_pages = bo->gem.size >> PAGE_SHIFT; in tegra_bo_get_pages() [all …]
|
| /drivers/gpu/drm/amd/amdgpu/ |
| A D | amdgpu_object.c | 67 drm_prime_gem_destroy(&bo->tbo.base, bo->tbo.sg); in amdgpu_bo_destroy() 70 kvfree(bo); in amdgpu_bo_destroy() 385 if (!(*bo)) { in amdgpu_bo_create_isp_user() 671 bo->allowed_domains = bo->preferred_domains; in amdgpu_bo_create() 720 r = amdgpu_ttm_clear_buffer(bo, bo->tbo.base.resv, &fence); in amdgpu_bo_create() 730 *bo_ptr = bo; in amdgpu_bo_create() 838 r = ttm_bo_kmap(&bo->tbo, 0, PFN_UP(bo->tbo.base.size), &bo->kmap); in amdgpu_bo_kmap() 872 if (bo->kmap.bo) in amdgpu_bo_kunmap() 891 return bo; in amdgpu_bo_ref() 906 *bo = NULL; in amdgpu_bo_unref() [all …]
|
| A D | amdgpu_dma_buf.c | 89 amdgpu_vm_bo_update_shared(bo); in amdgpu_dma_buf_attach() 146 amdgpu_bo_unpin(bo); in amdgpu_dma_buf_unpin() 172 if (!bo->tbo.pin_count) { in amdgpu_dma_buf_map() 183 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_dma_buf_map() 191 bo->tbo.ttm->pages, in amdgpu_dma_buf_map() 278 if (!bo->tbo.pin_count && in amdgpu_dma_buf_begin_cpu_access() 281 ret = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_dma_buf_begin_cpu_access() 284 amdgpu_bo_unreserve(bo); in amdgpu_dma_buf_begin_cpu_access() 346 struct amdgpu_bo *bo; in amdgpu_dma_buf_create_obj() local 367 bo = gem_to_amdgpu_bo(gobj); in amdgpu_dma_buf_create_obj() [all …]
|
| A D | amdgpu_object.h | 58 void (*destroy)(struct ttm_buffer_object *bo); 129 struct amdgpu_bo bo; member 138 struct amdgpu_bo bo; member 201 ttm_bo_unreserve(&bo->tbo); in amdgpu_bo_unreserve() 206 return bo->tbo.base.size; in amdgpu_bo_size() 265 struct amdgpu_bo **bo, 280 void *amdgpu_bo_kptr(struct amdgpu_bo *bo); 281 void amdgpu_bo_kunmap(struct amdgpu_bo *bo); 283 void amdgpu_bo_unref(struct amdgpu_bo **bo); 285 void amdgpu_bo_unpin(struct amdgpu_bo *bo); [all …]
|
| /drivers/gpu/drm/vc4/ |
| A D | vc4_bo.c | 380 if (bo) in vc4_bo_get_from_cache() 383 return bo; in vc4_bo_get_from_cache() 402 bo = kzalloc(sizeof(*bo), GFP_KERNEL); in vc4_create_object() 403 if (!bo) in vc4_create_object() 438 if (bo) { in vc4_bo_create() 440 memset(bo->base.vaddr, 0, bo->base.base.size); in vc4_bo_create() 441 return bo; in vc4_bo_create() 488 return bo; in vc4_bo_create() 507 if (IS_ERR(bo)) in vc4_bo_dumb_create() 798 if (IS_ERR(bo)) in vc4_create_bo_ioctl() [all …]
|
| /drivers/gpu/drm/panfrost/ |
| A D | panfrost_gem.c | 73 if (bo->sgts) { in panfrost_gem_free_object() 84 kvfree(bo->sgts); in panfrost_gem_free_object() 166 mapping->obj = bo; in panfrost_gem_open() 174 if (!bo->noexec) in panfrost_gem_open() 187 if (!bo->is_heap) { in panfrost_gem_open() 227 if (bo->is_heap) in panfrost_gem_pin() 238 if (drm_gem_is_imported(&bo->base.base) || bo->base.pages) in panfrost_gem_status() 251 if (bo->is_heap) { in panfrost_gem_rss() 323 return bo; in panfrost_gem_create() 339 bo->noexec = true; in panfrost_gem_prime_import_sg_table() [all …]
|
| /drivers/gpu/drm/panthor/ |
| A D | panthor_gem.c | 95 vm = bo->vm; in panthor_kernel_bo_destroy() 102 ret = panthor_vm_unmap_range(vm, bo->va_node.start, bo->va_node.size); in panthor_kernel_bo_destroy() 111 kfree(bo); in panthor_kernel_bo_destroy() 155 bo->flags = bo_flags; in panthor_kernel_bo_create() 181 bo->base.base.resv = bo->exclusive_vm_root_gem->resv; in panthor_kernel_bo_create() 210 if (drm_gem_is_imported(&bo->base.base) || bo->base.pages) in panthor_gem_status() 284 bo->flags = flags; in panthor_gem_create_with_handle() 289 bo->base.base.resv = bo->exclusive_vm_root_gem->resv; in panthor_gem_create_with_handle() 328 if (drm_WARN_ON(bo->obj->dev, bo->obj->handle_count > 0)) in panthor_gem_kernel_bo_set_label() 397 resident_size = bo->base.pages ? bo->base.base.size : 0; in panthor_gem_debugfs_bo_print() [all …]
|
| /drivers/gpu/drm/ttm/tests/ |
| A D | ttm_bo_test.c | 301 ttm_bo_pin(bo); in ttm_bo_unreserve_pinned() 406 bo->resource = res; in ttm_bo_put_basic() 413 ttm_bo_put(bo); in ttm_bo_put_basic() 466 ttm_bo_put(bo); in ttm_bo_put_shared_resv() 488 ttm_bo_pin(bo); in ttm_bo_pin_basic() 527 ttm_bo_pin(bo); in ttm_bo_pin_unpin_resource() 537 ttm_bo_unpin(bo); in ttm_bo_pin_unpin_resource() 580 ttm_bo_pin(bo); in ttm_bo_multiple_pin_one_unpin() 581 ttm_bo_pin(bo); in ttm_bo_multiple_pin_one_unpin() 592 ttm_bo_unpin(bo); in ttm_bo_multiple_pin_one_unpin() [all …]
|
| A D | ttm_bo_validate_test.c | 116 bo = kunit_kzalloc(test, sizeof(*bo), GFP_KERNEL); in ttm_bo_init_reserved_sys_man() 146 ttm_resource_free(bo, &bo->resource); in ttm_bo_init_reserved_sys_man() 165 bo = kunit_kzalloc(test, sizeof(*bo), GFP_KERNEL); in ttm_bo_init_reserved_mock_man() 188 ttm_resource_free(bo, &bo->resource); in ttm_bo_init_reserved_mock_man() 205 bo = kunit_kzalloc(test, sizeof(*bo), GFP_KERNEL); in ttm_bo_init_reserved_resv() 223 ttm_resource_free(bo, &bo->resource); in ttm_bo_init_reserved_resv() 244 bo = kunit_kzalloc(test, sizeof(*bo), GFP_KERNEL); in ttm_bo_validate_basic() 390 bo = kunit_kzalloc(test, sizeof(*bo), GFP_KERNEL); in ttm_bo_validate_same_placement() 431 bo = kunit_kzalloc(test, sizeof(*bo), GFP_KERNEL); in ttm_bo_validate_busy_placement() 478 bo = kunit_kzalloc(test, sizeof(*bo), GFP_KERNEL); in ttm_bo_validate_multihop() [all …]
|
| A D | ttm_tt_test.c | 42 struct ttm_buffer_object *bo; in ttm_tt_init_basic() local 69 struct ttm_buffer_object *bo; in ttm_tt_init_misaligned() local 82 bo->base.size += 1; in ttm_tt_init_misaligned() 92 struct ttm_buffer_object *bo; in ttm_tt_fini_basic() local 112 struct ttm_buffer_object *bo; in ttm_tt_fini_sg() local 132 struct ttm_buffer_object *bo; in ttm_tt_fini_shmem() local 155 struct ttm_buffer_object *bo; in ttm_tt_create_basic() local 169 kfree(bo->ttm); in ttm_tt_create_basic() 174 struct ttm_buffer_object *bo; in ttm_tt_create_invalid_bo_type() local 190 struct ttm_buffer_object *bo; in ttm_tt_create_ttm_exists() local [all …]
|
| /drivers/gpu/drm/lima/ |
| A D | lima_gem.c | 27 size_t new_size = bo->heap_size ? bo->heap_size * 2 : in lima_heap_alloc() 32 if (bo->heap_size >= bo->base.base.size) in lima_heap_alloc() 39 if (bo->base.pages) { in lima_heap_alloc() 72 if (bo->base.sgt) { in lima_heap_alloc() 76 bo->base.sgt = kmalloc(sizeof(*bo->base.sgt), GFP_KERNEL); in lima_heap_alloc() 115 struct lima_bo *bo; in lima_gem_create_handle() local 185 if (bo->heap_size) in lima_gem_pin() 195 if (bo->heap_size) in lima_gem_vmap() 229 bo = kzalloc(sizeof(*bo), GFP_KERNEL); in lima_gem_create_object() 230 if (!bo) in lima_gem_create_object() [all …]
|
| /drivers/gpu/drm/xe/tests/ |
| A D | xe_bo.c | 46 fence = xe_migrate_clear(tile->migrate, bo, bo->ttm.resource, in ccs_test_migrate() 84 ttm = bo->ttm.ttm; in ccs_test_migrate() 129 struct xe_bo *bo; in ccs_test_run_tile() local 167 xe_bo_put(bo); in ccs_test_run_tile() 297 err = xe_bo_validate(bo, bo->vm, false); in evict_test_run_tile() 324 xe_bo_put(bo); in evict_test_run_tile() 337 xe_bo_put(bo); in evict_test_run_tile() 502 if (bo != ERR_PTR(-ENOMEM) && bo != ERR_PTR(-ENOSPC) && in shrink_test_run_device() 503 bo != ERR_PTR(-EINTR) && bo != ERR_PTR(-ERESTARTSYS)) in shrink_test_run_device() 540 link->bo = bo; in shrink_test_run_device() [all …]
|
| /drivers/gpu/drm/virtio/ |
| A D | virtgpu_prime.c | 113 bool blob = bo->host3d_blob || bo->guest_blob; in virtgpu_gem_prime_export() 182 bo->sgt = sgt; in virtgpu_dma_buf_import_sgt() 192 if (bo->created) { in virtgpu_dma_buf_unmap() 195 if (bo->sgt) in virtgpu_dma_buf_unmap() 199 bo->sgt = NULL; in virtgpu_dma_buf_unmap() 220 if (bo->created) { in virtgpu_dma_buf_free_obj() 262 bo->guest_blob = true; in virtgpu_dma_buf_init_obj() 318 bo = kzalloc(sizeof(*bo), GFP_KERNEL); in virtgpu_gem_prime_import() 319 if (!bo) in virtgpu_gem_prime_import() 322 obj = &bo->base.base; in virtgpu_gem_prime_import() [all …]
|
| /drivers/gpu/drm/v3d/ |
| A D | v3d_bo.c | 29 if (bo->base.pages) in v3d_gem_status() 43 if (bo->vaddr) in v3d_free_object() 81 struct v3d_bo *bo; in v3d_create_object() local 87 bo = kzalloc(sizeof(*bo), GFP_KERNEL); in v3d_create_object() 88 if (!bo) in v3d_create_object() 152 struct v3d_bo *bo; in v3d_bo_create() local 160 bo->vaddr = NULL; in v3d_bo_create() 166 return bo; in v3d_bo_create() 204 vunmap(bo->vaddr); in v3d_put_bo_vaddr() 205 bo->vaddr = NULL; in v3d_put_bo_vaddr() [all …]
|