Lines Matching refs:bo
62 struct amdgpu_bo *bo = ttm_to_amdgpu_bo(tbo); in amdgpu_bo_destroy() local
64 amdgpu_bo_kunmap(bo); in amdgpu_bo_destroy()
66 if (drm_gem_is_imported(&bo->tbo.base)) in amdgpu_bo_destroy()
67 drm_prime_gem_destroy(&bo->tbo.base, bo->tbo.sg); in amdgpu_bo_destroy()
68 drm_gem_object_release(&bo->tbo.base); in amdgpu_bo_destroy()
69 amdgpu_bo_unref(&bo->parent); in amdgpu_bo_destroy()
70 kvfree(bo); in amdgpu_bo_destroy()
75 struct amdgpu_bo *bo = ttm_to_amdgpu_bo(tbo); in amdgpu_bo_user_destroy() local
78 ubo = to_amdgpu_bo_user(bo); in amdgpu_bo_user_destroy()
93 bool amdgpu_bo_is_amdgpu_bo(struct ttm_buffer_object *bo) in amdgpu_bo_is_amdgpu_bo() argument
95 if (bo->destroy == &amdgpu_bo_destroy || in amdgpu_bo_is_amdgpu_bo()
96 bo->destroy == &amdgpu_bo_user_destroy) in amdgpu_bo_is_amdgpu_bo()
376 struct dma_buf *dma_buf, u32 domain, struct amdgpu_bo **bo, in amdgpu_bo_create_isp_user() argument
384 *bo = gem_to_amdgpu_bo(gem_obj); in amdgpu_bo_create_isp_user()
385 if (!(*bo)) { in amdgpu_bo_create_isp_user()
390 r = amdgpu_bo_reserve(*bo, false); in amdgpu_bo_create_isp_user()
396 r = amdgpu_bo_pin(*bo, domain); in amdgpu_bo_create_isp_user()
402 r = amdgpu_ttm_alloc_gart(&(*bo)->tbo); in amdgpu_bo_create_isp_user()
404 dev_err(adev->dev, "%p bind failed\n", *bo); in amdgpu_bo_create_isp_user()
409 *gpu_addr = amdgpu_bo_gpu_offset(*bo); in amdgpu_bo_create_isp_user()
411 amdgpu_bo_unreserve(*bo); in amdgpu_bo_create_isp_user()
416 amdgpu_bo_unpin(*bo); in amdgpu_bo_create_isp_user()
418 amdgpu_bo_unreserve(*bo); in amdgpu_bo_create_isp_user()
419 amdgpu_bo_unref(bo); in amdgpu_bo_create_isp_user()
503 void amdgpu_bo_free_kernel(struct amdgpu_bo **bo, u64 *gpu_addr, in amdgpu_bo_free_kernel() argument
506 if (*bo == NULL) in amdgpu_bo_free_kernel()
509 WARN_ON(amdgpu_ttm_adev((*bo)->tbo.bdev)->in_suspend); in amdgpu_bo_free_kernel()
511 if (likely(amdgpu_bo_reserve(*bo, true) == 0)) { in amdgpu_bo_free_kernel()
513 amdgpu_bo_kunmap(*bo); in amdgpu_bo_free_kernel()
515 amdgpu_bo_unpin(*bo); in amdgpu_bo_free_kernel()
516 amdgpu_bo_unreserve(*bo); in amdgpu_bo_free_kernel()
518 amdgpu_bo_unref(bo); in amdgpu_bo_free_kernel()
537 void amdgpu_bo_free_isp_user(struct amdgpu_bo *bo) in amdgpu_bo_free_isp_user() argument
539 if (bo == NULL) in amdgpu_bo_free_isp_user()
542 if (amdgpu_bo_reserve(bo, true) == 0) { in amdgpu_bo_free_isp_user()
543 amdgpu_bo_unpin(bo); in amdgpu_bo_free_isp_user()
544 amdgpu_bo_unreserve(bo); in amdgpu_bo_free_isp_user()
546 amdgpu_bo_unref(&bo); in amdgpu_bo_free_isp_user()
637 struct amdgpu_bo *bo; in amdgpu_bo_create() local
663 bo = kvzalloc(bp->bo_ptr_size, GFP_KERNEL); in amdgpu_bo_create()
664 if (bo == NULL) in amdgpu_bo_create()
666 drm_gem_private_object_init(adev_to_drm(adev), &bo->tbo.base, size); in amdgpu_bo_create()
667 bo->tbo.base.funcs = &amdgpu_gem_object_funcs; in amdgpu_bo_create()
668 bo->vm_bo = NULL; in amdgpu_bo_create()
669 bo->preferred_domains = bp->preferred_domain ? bp->preferred_domain : in amdgpu_bo_create()
671 bo->allowed_domains = bo->preferred_domains; in amdgpu_bo_create()
674 bo->allowed_domains == AMDGPU_GEM_DOMAIN_VRAM) in amdgpu_bo_create()
675 bo->allowed_domains |= AMDGPU_GEM_DOMAIN_GTT; in amdgpu_bo_create()
677 bo->flags = bp->flags; in amdgpu_bo_create()
681 bo->xcp_id = bp->xcp_id_plus1 - 1; in amdgpu_bo_create()
684 bo->xcp_id = 0; in amdgpu_bo_create()
686 if (!amdgpu_bo_support_uswc(bo->flags)) in amdgpu_bo_create()
687 bo->flags &= ~AMDGPU_GEM_CREATE_CPU_GTT_USWC; in amdgpu_bo_create()
689 bo->tbo.bdev = &adev->mman.bdev; in amdgpu_bo_create()
692 amdgpu_bo_placement_from_domain(bo, AMDGPU_GEM_DOMAIN_CPU); in amdgpu_bo_create()
694 amdgpu_bo_placement_from_domain(bo, bp->domain); in amdgpu_bo_create()
696 bo->tbo.priority = 2; in amdgpu_bo_create()
698 bo->tbo.priority = 1; in amdgpu_bo_create()
703 r = ttm_bo_init_reserved(&adev->mman.bdev, &bo->tbo, bp->type, in amdgpu_bo_create()
704 &bo->placement, page_align, &ctx, NULL, in amdgpu_bo_create()
710 amdgpu_res_cpu_visible(adev, bo->tbo.resource)) in amdgpu_bo_create()
717 bo->tbo.resource->mem_type == TTM_PL_VRAM) { in amdgpu_bo_create()
720 r = amdgpu_ttm_clear_buffer(bo, bo->tbo.base.resv, &fence); in amdgpu_bo_create()
724 dma_resv_add_fence(bo->tbo.base.resv, fence, in amdgpu_bo_create()
729 amdgpu_bo_unreserve(bo); in amdgpu_bo_create()
730 *bo_ptr = bo; in amdgpu_bo_create()
732 trace_amdgpu_bo_create(bo); in amdgpu_bo_create()
736 bo->flags &= ~AMDGPU_GEM_CREATE_CPU_ACCESS_REQUIRED; in amdgpu_bo_create()
742 dma_resv_unlock(bo->tbo.base.resv); in amdgpu_bo_create()
743 amdgpu_bo_unref(&bo); in amdgpu_bo_create()
818 int amdgpu_bo_kmap(struct amdgpu_bo *bo, void **ptr) in amdgpu_bo_kmap() argument
823 if (bo->flags & AMDGPU_GEM_CREATE_NO_CPU_ACCESS) in amdgpu_bo_kmap()
826 r = dma_resv_wait_timeout(bo->tbo.base.resv, DMA_RESV_USAGE_KERNEL, in amdgpu_bo_kmap()
831 kptr = amdgpu_bo_kptr(bo); in amdgpu_bo_kmap()
838 r = ttm_bo_kmap(&bo->tbo, 0, PFN_UP(bo->tbo.base.size), &bo->kmap); in amdgpu_bo_kmap()
843 *ptr = amdgpu_bo_kptr(bo); in amdgpu_bo_kmap()
857 void *amdgpu_bo_kptr(struct amdgpu_bo *bo) in amdgpu_bo_kptr() argument
861 return ttm_kmap_obj_virtual(&bo->kmap, &is_iomem); in amdgpu_bo_kptr()
870 void amdgpu_bo_kunmap(struct amdgpu_bo *bo) in amdgpu_bo_kunmap() argument
872 if (bo->kmap.bo) in amdgpu_bo_kunmap()
873 ttm_bo_kunmap(&bo->kmap); in amdgpu_bo_kunmap()
885 struct amdgpu_bo *amdgpu_bo_ref(struct amdgpu_bo *bo) in amdgpu_bo_ref() argument
887 if (bo == NULL) in amdgpu_bo_ref()
890 drm_gem_object_get(&bo->tbo.base); in amdgpu_bo_ref()
891 return bo; in amdgpu_bo_ref()
900 void amdgpu_bo_unref(struct amdgpu_bo **bo) in amdgpu_bo_unref() argument
902 if ((*bo) == NULL) in amdgpu_bo_unref()
905 drm_gem_object_put(&(*bo)->tbo.base); in amdgpu_bo_unref()
906 *bo = NULL; in amdgpu_bo_unref()
925 int amdgpu_bo_pin(struct amdgpu_bo *bo, u32 domain) in amdgpu_bo_pin() argument
927 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_bo_pin()
931 if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm)) in amdgpu_bo_pin()
935 if (bo->preferred_domains & domain) in amdgpu_bo_pin()
936 domain = bo->preferred_domains & domain; in amdgpu_bo_pin()
939 if (drm_gem_is_imported(&bo->tbo.base)) { in amdgpu_bo_pin()
946 if (bo->tbo.pin_count) { in amdgpu_bo_pin()
947 uint32_t mem_type = bo->tbo.resource->mem_type; in amdgpu_bo_pin()
948 uint32_t mem_flags = bo->tbo.resource->placement; in amdgpu_bo_pin()
954 (bo->flags & AMDGPU_GEM_CREATE_VRAM_CONTIGUOUS) && in amdgpu_bo_pin()
958 ttm_bo_pin(&bo->tbo); in amdgpu_bo_pin()
967 if (drm_gem_is_imported(&bo->tbo.base)) in amdgpu_bo_pin()
968 dma_buf_pin(bo->tbo.base.import_attach); in amdgpu_bo_pin()
971 if (!(bo->flags & AMDGPU_GEM_CREATE_NO_CPU_ACCESS)) in amdgpu_bo_pin()
972 bo->flags |= AMDGPU_GEM_CREATE_CPU_ACCESS_REQUIRED; in amdgpu_bo_pin()
973 amdgpu_bo_placement_from_domain(bo, domain); in amdgpu_bo_pin()
974 for (i = 0; i < bo->placement.num_placement; i++) { in amdgpu_bo_pin()
975 if (bo->flags & AMDGPU_GEM_CREATE_VRAM_CONTIGUOUS && in amdgpu_bo_pin()
976 bo->placements[i].mem_type == TTM_PL_VRAM) in amdgpu_bo_pin()
977 bo->placements[i].flags |= TTM_PL_FLAG_CONTIGUOUS; in amdgpu_bo_pin()
980 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_bo_pin()
982 dev_err(adev->dev, "%p pin failed\n", bo); in amdgpu_bo_pin()
986 ttm_bo_pin(&bo->tbo); in amdgpu_bo_pin()
988 if (bo->tbo.resource->mem_type == TTM_PL_VRAM) { in amdgpu_bo_pin()
989 atomic64_add(amdgpu_bo_size(bo), &adev->vram_pin_size); in amdgpu_bo_pin()
990 atomic64_add(amdgpu_vram_mgr_bo_visible_size(bo), in amdgpu_bo_pin()
992 } else if (bo->tbo.resource->mem_type == TTM_PL_TT) { in amdgpu_bo_pin()
993 atomic64_add(amdgpu_bo_size(bo), &adev->gart_pin_size); in amdgpu_bo_pin()
1010 void amdgpu_bo_unpin(struct amdgpu_bo *bo) in amdgpu_bo_unpin() argument
1012 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_bo_unpin()
1014 ttm_bo_unpin(&bo->tbo); in amdgpu_bo_unpin()
1015 if (bo->tbo.pin_count) in amdgpu_bo_unpin()
1018 if (drm_gem_is_imported(&bo->tbo.base)) in amdgpu_bo_unpin()
1019 dma_buf_unpin(bo->tbo.base.import_attach); in amdgpu_bo_unpin()
1021 if (bo->tbo.resource->mem_type == TTM_PL_VRAM) { in amdgpu_bo_unpin()
1022 atomic64_sub(amdgpu_bo_size(bo), &adev->vram_pin_size); in amdgpu_bo_unpin()
1023 atomic64_sub(amdgpu_vram_mgr_bo_visible_size(bo), in amdgpu_bo_unpin()
1025 } else if (bo->tbo.resource->mem_type == TTM_PL_TT) { in amdgpu_bo_unpin()
1026 atomic64_sub(amdgpu_bo_size(bo), &adev->gart_pin_size); in amdgpu_bo_unpin()
1115 int amdgpu_bo_set_tiling_flags(struct amdgpu_bo *bo, u64 tiling_flags) in amdgpu_bo_set_tiling_flags() argument
1117 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_bo_set_tiling_flags()
1120 BUG_ON(bo->tbo.type == ttm_bo_type_kernel); in amdgpu_bo_set_tiling_flags()
1125 ubo = to_amdgpu_bo_user(bo); in amdgpu_bo_set_tiling_flags()
1138 void amdgpu_bo_get_tiling_flags(struct amdgpu_bo *bo, u64 *tiling_flags) in amdgpu_bo_get_tiling_flags() argument
1142 BUG_ON(bo->tbo.type == ttm_bo_type_kernel); in amdgpu_bo_get_tiling_flags()
1143 dma_resv_assert_held(bo->tbo.base.resv); in amdgpu_bo_get_tiling_flags()
1144 ubo = to_amdgpu_bo_user(bo); in amdgpu_bo_get_tiling_flags()
1163 int amdgpu_bo_set_metadata(struct amdgpu_bo *bo, void *metadata, in amdgpu_bo_set_metadata() argument
1169 BUG_ON(bo->tbo.type == ttm_bo_type_kernel); in amdgpu_bo_set_metadata()
1170 ubo = to_amdgpu_bo_user(bo); in amdgpu_bo_set_metadata()
1210 int amdgpu_bo_get_metadata(struct amdgpu_bo *bo, void *buffer, in amdgpu_bo_get_metadata() argument
1219 BUG_ON(bo->tbo.type == ttm_bo_type_kernel); in amdgpu_bo_get_metadata()
1220 ubo = to_amdgpu_bo_user(bo); in amdgpu_bo_get_metadata()
1248 void amdgpu_bo_move_notify(struct ttm_buffer_object *bo, in amdgpu_bo_move_notify() argument
1252 struct ttm_resource *old_mem = bo->resource; in amdgpu_bo_move_notify()
1255 if (!amdgpu_bo_is_amdgpu_bo(bo)) in amdgpu_bo_move_notify()
1258 abo = ttm_to_amdgpu_bo(bo); in amdgpu_bo_move_notify()
1279 void amdgpu_bo_release_notify(struct ttm_buffer_object *bo) in amdgpu_bo_release_notify() argument
1281 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->bdev); in amdgpu_bo_release_notify()
1286 if (!amdgpu_bo_is_amdgpu_bo(bo)) in amdgpu_bo_release_notify()
1289 abo = ttm_to_amdgpu_bo(bo); in amdgpu_bo_release_notify()
1302 if (WARN_ON_ONCE(!dma_resv_trylock(&bo->base._resv))) in amdgpu_bo_release_notify()
1307 if (!bo->resource || bo->resource->mem_type != TTM_PL_VRAM || in amdgpu_bo_release_notify()
1312 r = dma_resv_reserve_fences(&bo->base._resv, 1); in amdgpu_bo_release_notify()
1316 r = amdgpu_fill_buffer(abo, 0, &bo->base._resv, &fence, true); in amdgpu_bo_release_notify()
1320 amdgpu_vram_mgr_set_cleared(bo->resource); in amdgpu_bo_release_notify()
1321 dma_resv_add_fence(&bo->base._resv, fence, DMA_RESV_USAGE_KERNEL); in amdgpu_bo_release_notify()
1325 dma_resv_unlock(&bo->base._resv); in amdgpu_bo_release_notify()
1339 vm_fault_t amdgpu_bo_fault_reserve_notify(struct ttm_buffer_object *bo) in amdgpu_bo_fault_reserve_notify() argument
1341 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->bdev); in amdgpu_bo_fault_reserve_notify()
1343 struct amdgpu_bo *abo = ttm_to_amdgpu_bo(bo); in amdgpu_bo_fault_reserve_notify()
1349 if (amdgpu_res_cpu_visible(adev, bo->resource)) in amdgpu_bo_fault_reserve_notify()
1364 r = ttm_bo_validate(bo, &abo->placement, &ctx); in amdgpu_bo_fault_reserve_notify()
1371 if (bo->resource->mem_type == TTM_PL_VRAM && in amdgpu_bo_fault_reserve_notify()
1372 !amdgpu_res_cpu_visible(adev, bo->resource)) in amdgpu_bo_fault_reserve_notify()
1375 ttm_bo_move_to_lru_tail_unlocked(bo); in amdgpu_bo_fault_reserve_notify()
1387 void amdgpu_bo_fence(struct amdgpu_bo *bo, struct dma_fence *fence, in amdgpu_bo_fence() argument
1390 struct dma_resv *resv = bo->tbo.base.resv; in amdgpu_bo_fence()
1442 int amdgpu_bo_sync_wait(struct amdgpu_bo *bo, void *owner, bool intr) in amdgpu_bo_sync_wait() argument
1444 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_bo_sync_wait()
1446 return amdgpu_bo_sync_wait_resv(adev, bo->tbo.base.resv, in amdgpu_bo_sync_wait()
1460 u64 amdgpu_bo_gpu_offset(struct amdgpu_bo *bo) in amdgpu_bo_gpu_offset() argument
1462 WARN_ON_ONCE(bo->tbo.resource->mem_type == TTM_PL_SYSTEM); in amdgpu_bo_gpu_offset()
1463 WARN_ON_ONCE(!dma_resv_is_locked(bo->tbo.base.resv) && in amdgpu_bo_gpu_offset()
1464 !bo->tbo.pin_count && bo->tbo.type != ttm_bo_type_kernel); in amdgpu_bo_gpu_offset()
1465 WARN_ON_ONCE(bo->tbo.resource->start == AMDGPU_BO_INVALID_OFFSET); in amdgpu_bo_gpu_offset()
1466 WARN_ON_ONCE(bo->tbo.resource->mem_type == TTM_PL_VRAM && in amdgpu_bo_gpu_offset()
1467 !(bo->flags & AMDGPU_GEM_CREATE_VRAM_CONTIGUOUS)); in amdgpu_bo_gpu_offset()
1469 return amdgpu_bo_gpu_offset_no_check(bo); in amdgpu_bo_gpu_offset()
1479 u64 amdgpu_bo_fb_aper_addr(struct amdgpu_bo *bo) in amdgpu_bo_fb_aper_addr() argument
1481 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_bo_fb_aper_addr()
1484 WARN_ON_ONCE(bo->tbo.resource->mem_type != TTM_PL_VRAM); in amdgpu_bo_fb_aper_addr()
1488 offset = (bo->tbo.resource->start << PAGE_SHIFT) + fb_base; in amdgpu_bo_fb_aper_addr()
1499 u64 amdgpu_bo_gpu_offset_no_check(struct amdgpu_bo *bo) in amdgpu_bo_gpu_offset_no_check() argument
1501 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_bo_gpu_offset_no_check()
1504 if (bo->tbo.resource->mem_type == TTM_PL_TT) in amdgpu_bo_gpu_offset_no_check()
1505 offset = amdgpu_gmc_agp_addr(&bo->tbo); in amdgpu_bo_gpu_offset_no_check()
1508 offset = (bo->tbo.resource->start << PAGE_SHIFT) + in amdgpu_bo_gpu_offset_no_check()
1509 amdgpu_ttm_domain_start(adev, bo->tbo.resource->mem_type); in amdgpu_bo_gpu_offset_no_check()
1526 uint32_t amdgpu_bo_mem_stats_placement(struct amdgpu_bo *bo) in amdgpu_bo_mem_stats_placement() argument
1528 uint32_t domain = bo->preferred_domains & AMDGPU_GEM_DOMAIN_MASK; in amdgpu_bo_mem_stats_placement()
1574 #define amdgpu_bo_print_flag(m, bo, flag) \ argument
1576 if (bo->flags & (AMDGPU_GEM_CREATE_ ## flag)) { \
1593 u64 amdgpu_bo_print_info(int id, struct amdgpu_bo *bo, struct seq_file *m) in amdgpu_bo_print_info() argument
1595 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_bo_print_info()
1602 if (dma_resv_trylock(bo->tbo.base.resv)) { in amdgpu_bo_print_info()
1603 if (!bo->tbo.resource) { in amdgpu_bo_print_info()
1606 switch (bo->tbo.resource->mem_type) { in amdgpu_bo_print_info()
1608 if (amdgpu_res_cpu_visible(adev, bo->tbo.resource)) in amdgpu_bo_print_info()
1637 dma_resv_unlock(bo->tbo.base.resv); in amdgpu_bo_print_info()
1642 size = amdgpu_bo_size(bo); in amdgpu_bo_print_info()
1646 pin_count = READ_ONCE(bo->tbo.pin_count); in amdgpu_bo_print_info()
1650 dma_buf = READ_ONCE(bo->tbo.base.dma_buf); in amdgpu_bo_print_info()
1651 attachment = READ_ONCE(bo->tbo.base.import_attach); in amdgpu_bo_print_info()
1658 amdgpu_bo_print_flag(m, bo, CPU_ACCESS_REQUIRED); in amdgpu_bo_print_info()
1659 amdgpu_bo_print_flag(m, bo, NO_CPU_ACCESS); in amdgpu_bo_print_info()
1660 amdgpu_bo_print_flag(m, bo, CPU_GTT_USWC); in amdgpu_bo_print_info()
1661 amdgpu_bo_print_flag(m, bo, VRAM_CLEARED); in amdgpu_bo_print_info()
1662 amdgpu_bo_print_flag(m, bo, VRAM_CONTIGUOUS); in amdgpu_bo_print_info()
1663 amdgpu_bo_print_flag(m, bo, VM_ALWAYS_VALID); in amdgpu_bo_print_info()
1664 amdgpu_bo_print_flag(m, bo, EXPLICIT_SYNC); in amdgpu_bo_print_info()
1666 if (dma_resv_trylock(bo->tbo.base.resv)) { in amdgpu_bo_print_info()
1667 dma_resv_describe(bo->tbo.base.resv, m); in amdgpu_bo_print_info()
1668 dma_resv_unlock(bo->tbo.base.resv); in amdgpu_bo_print_info()