Lines Matching refs:mgr

50 to_amdgpu_device(struct amdgpu_vram_mgr *mgr)  in to_amdgpu_device()  argument
52 return container_of(mgr, struct amdgpu_device, mman.vram_mgr); in to_amdgpu_device()
297 struct amdgpu_vram_mgr *mgr = to_vram_mgr(man); in amdgpu_vram_mgr_do_reserve() local
298 struct amdgpu_device *adev = to_amdgpu_device(mgr); in amdgpu_vram_mgr_do_reserve()
299 struct drm_buddy *mm = &mgr->mm; in amdgpu_vram_mgr_do_reserve()
304 list_for_each_entry_safe(rsv, temp, &mgr->reservations_pending, blocks) { in amdgpu_vram_mgr_do_reserve()
318 atomic64_add(vis_usage, &mgr->vis_usage); in amdgpu_vram_mgr_do_reserve()
322 list_move(&rsv->blocks, &mgr->reserved_pages); in amdgpu_vram_mgr_do_reserve()
335 int amdgpu_vram_mgr_reserve_range(struct amdgpu_vram_mgr *mgr, in amdgpu_vram_mgr_reserve_range() argument
350 mutex_lock(&mgr->lock); in amdgpu_vram_mgr_reserve_range()
351 list_add_tail(&rsv->blocks, &mgr->reservations_pending); in amdgpu_vram_mgr_reserve_range()
352 amdgpu_vram_mgr_do_reserve(&mgr->manager); in amdgpu_vram_mgr_reserve_range()
353 mutex_unlock(&mgr->lock); in amdgpu_vram_mgr_reserve_range()
369 int amdgpu_vram_mgr_query_page_status(struct amdgpu_vram_mgr *mgr, in amdgpu_vram_mgr_query_page_status() argument
375 mutex_lock(&mgr->lock); in amdgpu_vram_mgr_query_page_status()
377 list_for_each_entry(rsv, &mgr->reservations_pending, blocks) { in amdgpu_vram_mgr_query_page_status()
385 list_for_each_entry(rsv, &mgr->reserved_pages, blocks) { in amdgpu_vram_mgr_query_page_status()
395 mutex_unlock(&mgr->lock); in amdgpu_vram_mgr_query_page_status()
453 struct amdgpu_vram_mgr *mgr = to_vram_mgr(man); in amdgpu_vram_mgr_new() local
454 struct amdgpu_device *adev = to_amdgpu_device(mgr); in amdgpu_vram_mgr_new()
460 struct drm_buddy *mm = &mgr->mm; in amdgpu_vram_mgr_new()
511 if (fpfn || lpfn != mgr->mm.size) in amdgpu_vram_mgr_new()
529 mutex_lock(&mgr->lock); in amdgpu_vram_mgr_new()
534 min_block_size = mgr->default_page_size; in amdgpu_vram_mgr_new()
586 mutex_unlock(&mgr->lock); in amdgpu_vram_mgr_new()
615 atomic64_add(vis_usage, &mgr->vis_usage); in amdgpu_vram_mgr_new()
621 mutex_unlock(&mgr->lock); in amdgpu_vram_mgr_new()
641 struct amdgpu_vram_mgr *mgr = to_vram_mgr(man); in amdgpu_vram_mgr_del() local
642 struct amdgpu_device *adev = to_amdgpu_device(mgr); in amdgpu_vram_mgr_del()
643 struct drm_buddy *mm = &mgr->mm; in amdgpu_vram_mgr_del()
647 mutex_lock(&mgr->lock); in amdgpu_vram_mgr_del()
653 mutex_unlock(&mgr->lock); in amdgpu_vram_mgr_del()
655 atomic64_sub(vis_usage, &mgr->vis_usage); in amdgpu_vram_mgr_del()
779 uint64_t amdgpu_vram_mgr_vis_usage(struct amdgpu_vram_mgr *mgr) in amdgpu_vram_mgr_vis_usage() argument
781 return atomic64_read(&mgr->vis_usage); in amdgpu_vram_mgr_vis_usage()
793 struct amdgpu_vram_mgr *mgr = &adev->mman.vram_mgr; in amdgpu_vram_mgr_clear_reset_blocks() local
794 struct drm_buddy *mm = &mgr->mm; in amdgpu_vram_mgr_clear_reset_blocks()
796 mutex_lock(&mgr->lock); in amdgpu_vram_mgr_clear_reset_blocks()
798 mutex_unlock(&mgr->lock); in amdgpu_vram_mgr_clear_reset_blocks()
816 struct amdgpu_vram_mgr_resource *mgr = to_amdgpu_vram_mgr_resource(res); in amdgpu_vram_mgr_intersects() local
820 list_for_each_entry(block, &mgr->blocks, link) { in amdgpu_vram_mgr_intersects()
849 struct amdgpu_vram_mgr_resource *mgr = to_amdgpu_vram_mgr_resource(res); in amdgpu_vram_mgr_compatible() local
853 list_for_each_entry(block, &mgr->blocks, link) { in amdgpu_vram_mgr_compatible()
878 struct amdgpu_vram_mgr *mgr = to_vram_mgr(man); in amdgpu_vram_mgr_debug() local
879 struct drm_buddy *mm = &mgr->mm; in amdgpu_vram_mgr_debug()
883 amdgpu_vram_mgr_vis_usage(mgr)); in amdgpu_vram_mgr_debug()
885 mutex_lock(&mgr->lock); in amdgpu_vram_mgr_debug()
887 mgr->default_page_size >> 10); in amdgpu_vram_mgr_debug()
892 list_for_each_entry(rsv, &mgr->reserved_pages, blocks) in amdgpu_vram_mgr_debug()
895 mutex_unlock(&mgr->lock); in amdgpu_vram_mgr_debug()
923 struct amdgpu_vram_mgr *mgr = &adev->mman.vram_mgr; in amdgpu_vram_mgr_init() local
924 struct ttm_resource_manager *man = &mgr->manager; in amdgpu_vram_mgr_init()
933 mutex_init(&mgr->lock); in amdgpu_vram_mgr_init()
934 INIT_LIST_HEAD(&mgr->reservations_pending); in amdgpu_vram_mgr_init()
935 INIT_LIST_HEAD(&mgr->reserved_pages); in amdgpu_vram_mgr_init()
936 mgr->default_page_size = PAGE_SIZE; in amdgpu_vram_mgr_init()
941 err = drm_buddy_init(&mgr->mm, man->size, PAGE_SIZE); in amdgpu_vram_mgr_init()
949 ttm_set_driver_manager(&adev->mman.bdev, TTM_PL_VRAM, &mgr->manager); in amdgpu_vram_mgr_init()
964 struct amdgpu_vram_mgr *mgr = &adev->mman.vram_mgr; in amdgpu_vram_mgr_fini() local
965 struct ttm_resource_manager *man = &mgr->manager; in amdgpu_vram_mgr_fini()
975 mutex_lock(&mgr->lock); in amdgpu_vram_mgr_fini()
976 list_for_each_entry_safe(rsv, temp, &mgr->reservations_pending, blocks) in amdgpu_vram_mgr_fini()
979 list_for_each_entry_safe(rsv, temp, &mgr->reserved_pages, blocks) { in amdgpu_vram_mgr_fini()
980 drm_buddy_free_list(&mgr->mm, &rsv->allocated, 0); in amdgpu_vram_mgr_fini()
984 drm_buddy_fini(&mgr->mm); in amdgpu_vram_mgr_fini()
985 mutex_unlock(&mgr->lock); in amdgpu_vram_mgr_fini()