Lines Matching refs:resv

76 	dma_resv_assert_held(bo->base.resv);  in ttm_bo_move_to_lru_tail()
99 dma_resv_assert_held(bo->base.resv); in ttm_bo_set_bulk_move()
148 ret = dma_resv_reserve_fences(bo->base.resv, 1); in ttm_bo_handle_move_mem()
190 if (bo->base.resv == &bo->base._resv) in ttm_bo_individualize_resv()
195 r = dma_resv_copy_fences(&bo->base._resv, bo->base.resv); in ttm_bo_individualize_resv()
206 bo->base.resv = &bo->base._resv; in ttm_bo_individualize_resv()
215 struct dma_resv *resv = &bo->base._resv; in ttm_bo_flush_all_fences() local
219 dma_resv_iter_begin(&cursor, resv, DMA_RESV_USAGE_BOOKKEEP); in ttm_bo_flush_all_fences()
245 struct dma_resv *resv = &bo->base._resv; in ttm_bo_cleanup_refs() local
248 if (dma_resv_test_signaled(resv, DMA_RESV_USAGE_BOOKKEEP)) in ttm_bo_cleanup_refs()
257 dma_resv_unlock(bo->base.resv); in ttm_bo_cleanup_refs()
260 lret = dma_resv_wait_timeout(resv, DMA_RESV_USAGE_BOOKKEEP, in ttm_bo_cleanup_refs()
270 if (unlock_resv && !dma_resv_trylock(bo->base.resv)) { in ttm_bo_cleanup_refs()
287 dma_resv_unlock(bo->base.resv); in ttm_bo_cleanup_refs()
296 dma_resv_unlock(bo->base.resv); in ttm_bo_cleanup_refs()
313 dma_resv_wait_timeout(bo->base.resv, DMA_RESV_USAGE_BOOKKEEP, false, in ttm_bo_delayed_delete()
315 dma_resv_lock(bo->base.resv, NULL); in ttm_bo_delayed_delete()
317 dma_resv_unlock(bo->base.resv); in ttm_bo_delayed_delete()
337 dma_resv_wait_timeout(bo->base.resv, in ttm_bo_release()
348 if (!dma_resv_test_signaled(bo->base.resv, in ttm_bo_release()
350 !dma_resv_trylock(bo->base.resv)) { in ttm_bo_release()
379 dma_resv_unlock(bo->base.resv); in ttm_bo_release()
435 dma_resv_assert_held(bo->base.resv); in ttm_bo_evict()
493 dma_resv_assert_held(bo->base.resv); in ttm_bo_eviction_valuable()
521 if (bo->base.resv == ctx->resv) { in ttm_bo_evict_swapout_allowable()
522 dma_resv_assert_held(bo->base.resv); in ttm_bo_evict_swapout_allowable()
529 ret = dma_resv_trylock(bo->base.resv); in ttm_bo_evict_swapout_allowable()
539 dma_resv_unlock(bo->base.resv); in ttm_bo_evict_swapout_allowable()
566 r = dma_resv_lock_interruptible(busy_bo->base.resv, in ttm_mem_evict_wait_busy()
569 r = dma_resv_lock(busy_bo->base.resv, ticket); in ttm_mem_evict_wait_busy()
577 dma_resv_unlock(busy_bo->base.resv); in ttm_mem_evict_wait_busy()
601 dma_resv_locking_ctx(res->bo->base.resv)) in ttm_mem_evict_first()
611 dma_resv_unlock(res->bo->base.resv); in ttm_mem_evict_first()
652 dma_resv_assert_held(bo->base.resv); in ttm_bo_pin()
670 dma_resv_assert_held(bo->base.resv); in ttm_bo_unpin()
708 dma_resv_add_fence(bo->base.resv, fence, DMA_RESV_USAGE_KERNEL); in ttm_bo_add_move_fence()
710 ret = dma_resv_reserve_fences(bo->base.resv, 1); in ttm_bo_add_move_fence()
730 ticket = dma_resv_locking_ctx(bo->base.resv); in ttm_bo_mem_force_space()
773 ret = dma_resv_reserve_fences(bo->base.resv, 1); in ttm_bo_mem_space()
839 dma_resv_assert_held(bo->base.resv); in ttm_bo_move_buffer()
889 dma_resv_assert_held(bo->base.resv); in ttm_bo_validate()
953 struct sg_table *sg, struct dma_resv *resv, in ttm_bo_init_reserved() argument
967 if (resv) in ttm_bo_init_reserved()
968 bo->base.resv = resv; in ttm_bo_init_reserved()
970 bo->base.resv = &bo->base._resv; in ttm_bo_init_reserved()
993 if (!resv) in ttm_bo_init_reserved()
994 WARN_ON(!dma_resv_trylock(bo->base.resv)); in ttm_bo_init_reserved()
996 dma_resv_assert_held(resv); in ttm_bo_init_reserved()
1005 if (!resv) in ttm_bo_init_reserved()
1006 dma_resv_unlock(bo->base.resv); in ttm_bo_init_reserved()
1053 struct sg_table *sg, struct dma_resv *resv, in ttm_bo_init_validate() argument
1060 sg, resv, destroy); in ttm_bo_init_validate()
1064 if (!resv) in ttm_bo_init_validate()
1104 if (dma_resv_test_signaled(bo->base.resv, in ttm_bo_wait_ctx()
1111 ret = dma_resv_wait_timeout(bo->base.resv, DMA_RESV_USAGE_BOOKKEEP, in ttm_bo_wait_ctx()
1144 dma_resv_unlock(bo->base.resv); in ttm_bo_swapout()
1203 dma_resv_unlock(bo->base.resv); in ttm_bo_swapout()