| /drivers/gpu/drm/ttm/ |
| A D | ttm_execbuf_util.c | 59 if (ticket) in ttm_eu_backoff_reservation() 60 ww_acquire_fini(ticket); in ttm_eu_backoff_reservation() 86 if (ticket) in ttm_eu_reserve_buffers() 87 ww_acquire_init(ticket, &reservation_ww_class); in ttm_eu_reserve_buffers() 93 ret = ttm_bo_reserve(bo, intr, (ticket == NULL), ticket); in ttm_eu_reserve_buffers() 117 ret = ttm_bo_reserve_slowpath(bo, intr, ticket); in ttm_eu_reserve_buffers() 125 if (ticket) { in ttm_eu_reserve_buffers() 126 ww_acquire_done(ticket); in ttm_eu_reserve_buffers() 127 ww_acquire_fini(ticket); in ttm_eu_reserve_buffers() 160 if (ticket) in ttm_eu_fence_buffer_objects() [all …]
|
| A D | ttm_bo.c | 562 struct ww_acquire_ctx *ticket, in ttm_bo_evict_alloc() argument 571 .ticket = ticket, in ttm_bo_evict_alloc() 589 if (lret || !ticket) in ttm_bo_evict_alloc() 600 evict_walk.walk.arg.ticket = ticket; in ttm_bo_evict_alloc() 717 struct ww_acquire_ctx *ticket; in ttm_bo_alloc_resource() local 720 ticket = dma_resv_locking_ctx(bo->base.resv); in ttm_bo_alloc_resource() 752 ticket, res, limit_pool); in ttm_bo_alloc_resource()
|
| A D | ttm_bo_util.c | 831 ret = dma_resv_lock_interruptible(resv, arg->ticket); in ttm_lru_walk_ticketlock() 833 ret = dma_resv_lock(resv, arg->ticket); in ttm_lru_walk_ticketlock() 843 arg->ticket = NULL; in ttm_lru_walk_ticketlock() 985 else if (!arg->ticket || arg->ctx->no_wait_gpu || arg->trylock_only) in __ttm_bo_lru_cursor_next()
|
| /drivers/gpu/drm/msm/ |
| A D | msm_gem_shrinker.c | 47 with_vm_locks(struct ww_acquire_ctx *ticket, in with_vm_locks() argument 64 ret = dma_resv_lock(resv, ticket); in with_vm_locks() 116 purge(struct drm_gem_object *obj, struct ww_acquire_ctx *ticket) in purge() argument 124 return with_vm_locks(ticket, msm_gem_purge, obj); in purge() 128 evict(struct drm_gem_object *obj, struct ww_acquire_ctx *ticket) in evict() argument 136 return with_vm_locks(ticket, msm_gem_evict, obj); in evict() 147 active_purge(struct drm_gem_object *obj, struct ww_acquire_ctx *ticket) in active_purge() argument 152 return purge(obj, ticket); in active_purge() 161 return evict(obj, ticket); in active_evict() 168 struct ww_acquire_ctx ticket; in msm_gem_shrinker_scan() local [all …]
|
| /drivers/gpu/drm/ |
| A D | drm_exec.c | 110 ww_acquire_fini(&exec->ticket); in drm_exec_fini() 126 ww_acquire_done(&exec->ticket); in drm_exec_cleanup() 132 ww_acquire_init(&exec->ticket, &reservation_ww_class); in drm_exec_cleanup() 176 &exec->ticket); in drm_exec_lock_contended() 180 dma_resv_lock_slow(obj->resv, &exec->ticket); in drm_exec_lock_contended() 224 ret = dma_resv_lock_interruptible(obj->resv, &exec->ticket); in drm_exec_lock_obj() 226 ret = dma_resv_lock(obj->resv, &exec->ticket); in drm_exec_lock_obj()
|
| A D | drm_gem.c | 1492 bool (*shrink)(struct drm_gem_object *obj, struct ww_acquire_ctx *ticket), in drm_gem_lru_scan() argument 1493 struct ww_acquire_ctx *ticket) in drm_gem_lru_scan() 1526 if (ticket) in drm_gem_lru_scan() 1527 ww_acquire_init(ticket, &reservation_ww_class); in drm_gem_lru_scan() 1534 if (!ww_mutex_trylock(&obj->resv->lock, ticket)) { in drm_gem_lru_scan() 1539 if (shrink(obj, ticket)) { in drm_gem_lru_scan() 1553 if (ticket) in drm_gem_lru_scan() 1554 ww_acquire_fini(ticket); in drm_gem_lru_scan()
|
| /drivers/gpu/drm/vmwgfx/ |
| A D | vmwgfx_validation.h | 62 struct ww_acquire_ctx ticket; member 122 return ttm_eu_reserve_buffers(&ctx->ticket, &ctx->bo_list, intr, in vmw_validation_bo_reserve() 138 ttm_eu_fence_buffer_objects(&ctx->ticket, &ctx->bo_list, in vmw_validation_bo_fence()
|
| A D | vmwgfx_resource.c | 517 vmw_resource_check_buffer(struct ww_acquire_ctx *ticket, in vmw_resource_check_buffer() argument 538 ret = ttm_eu_reserve_buffers(ticket, &val_list, interruptible, NULL); in vmw_resource_check_buffer() 558 ttm_eu_backoff_reservation(ticket, &val_list); in vmw_resource_check_buffer() 611 vmw_resource_backoff_reservation(struct ww_acquire_ctx *ticket, in vmw_resource_backoff_reservation() argument 621 ttm_eu_backoff_reservation(ticket, &val_list); in vmw_resource_backoff_reservation() 634 static int vmw_resource_do_evict(struct ww_acquire_ctx *ticket, in vmw_resource_do_evict() argument 645 ret = vmw_resource_check_buffer(ticket, res, interruptible, &val_buf); in vmw_resource_do_evict() 660 vmw_resource_backoff_reservation(ticket, &val_buf); in vmw_resource_do_evict() 901 struct ww_acquire_ctx ticket; in vmw_resource_evict_type() local 916 ret = vmw_resource_do_evict(&ticket, evict_res, false); in vmw_resource_evict_type()
|
| A D | vmwgfx_validation.c | 845 ttm_eu_backoff_reservation(&ctx->ticket, &ctx->bo_list); in vmw_validation_bo_backoff()
|
| /drivers/gpu/drm/etnaviv/ |
| A D | etnaviv_gem_submit.c | 121 struct ww_acquire_ctx *ticket) in submit_lock_objects() argument 135 ret = dma_resv_lock_interruptible(obj->resv, ticket); in submit_lock_objects() 145 ww_acquire_done(ticket); in submit_lock_objects() 162 ret = dma_resv_lock_slow_interruptible(obj->resv, ticket); in submit_lock_objects() 423 struct ww_acquire_ctx ticket; in etnaviv_ioctl_gem_submit() local 515 ww_acquire_init(&ticket, &reservation_ww_class); in etnaviv_ioctl_gem_submit() 580 ret = submit_lock_objects(submit, &ticket); in etnaviv_ioctl_gem_submit() 624 ww_acquire_fini(&ticket); in etnaviv_ioctl_gem_submit()
|
| /drivers/gpu/drm/amd/amdgpu/ |
| A D | amdgpu_dma_buf.c | 391 struct ww_acquire_ctx *ticket = dma_resv_locking_ctx(obj->resv); in amdgpu_dma_buf_move_notify() local 416 if (ticket) { in amdgpu_dma_buf_move_notify() 421 r = dma_resv_lock(resv, ticket); in amdgpu_dma_buf_move_notify() 439 r = amdgpu_vm_handle_moved(adev, vm, ticket); in amdgpu_dma_buf_move_notify()
|
| A D | amdgpu_vm.h | 497 struct ww_acquire_ctx *ticket, 508 struct ww_acquire_ctx *ticket); 554 void amdgpu_vm_bo_trace_cs(struct amdgpu_vm *vm, struct ww_acquire_ctx *ticket);
|
| A D | amdgpu_userq.c | 628 struct ww_acquire_ctx *ticket; in amdgpu_userq_validate_bos() local 670 ticket = &exec.ticket; in amdgpu_userq_validate_bos() 689 } else if (dma_resv_locking_ctx(resv) == ticket) { in amdgpu_userq_validate_bos()
|
| A D | amdgpu_vm.c | 573 struct ww_acquire_ctx *ticket, in amdgpu_vm_validate() argument 612 while (ticket && !list_empty(&vm->evicted_user)) { in amdgpu_vm_validate() 620 if (dma_resv_locking_ctx(bo->tbo.base.resv) != ticket) { in amdgpu_vm_validate() 1594 struct ww_acquire_ctx *ticket) in amdgpu_vm_handle_moved() argument 1625 } else if (ticket && dma_resv_locking_ctx(resv) == ticket) { in amdgpu_vm_handle_moved() 2123 void amdgpu_vm_bo_trace_cs(struct amdgpu_vm *vm, struct ww_acquire_ctx *ticket) in amdgpu_vm_bo_trace_cs() argument 2137 ticket) in amdgpu_vm_bo_trace_cs()
|
| A D | amdgpu_amdkfd_gpuvm.c | 466 struct ww_acquire_ctx *ticket) in vm_validate_pt_pd_bos() argument 472 ret = amdgpu_vm_validate(adev, vm, ticket, in vm_validate_pt_pd_bos() 1314 struct ww_acquire_ctx *ticket) in process_validate_vms() argument 1321 ret = vm_validate_pt_pd_bos(peer_vm, ticket); in process_validate_vms() 2942 ret = process_validate_vms(process_info, &exec.ticket); in amdgpu_amdkfd_gpuvm_restore_process_bos() 2972 ret = amdgpu_vm_handle_moved(adev, peer_vm, &exec.ticket); in amdgpu_amdkfd_gpuvm_restore_process_bos()
|
| A D | amdgpu_cs.c | 1191 r = amdgpu_vm_handle_moved(adev, vm, &p->exec.ticket); in amdgpu_cs_vm_handling() 1394 amdgpu_vm_bo_trace_cs(&fpriv->vm, &p->exec.ticket); in amdgpu_cs_submit() 1831 if (dma_resv_locking_ctx((*bo)->tbo.base.resv) != &parser->exec.ticket) in amdgpu_cs_find_mapping()
|
| /drivers/gpu/drm/nouveau/ |
| A D | nouveau_gem.c | 407 struct ww_acquire_ctx ticket; member 452 ww_acquire_fini(&op->ticket); in validate_fini() 468 ww_acquire_init(&op->ticket, &reservation_ww_class); in validate_init() 501 ret = ttm_bo_reserve(&nvbo->bo, true, false, &op->ticket); in validate_init() 509 &op->ticket); in validate_init() 556 ww_acquire_done(&op->ticket); in validate_init()
|
| /drivers/gpu/drm/virtio/ |
| A D | virtgpu_gem.c | 226 &objs->ticket); in virtio_gpu_array_lock_resv() 247 &objs->ticket); in virtio_gpu_array_unlock_resv()
|
| A D | virtgpu_drv.h | 123 struct ww_acquire_ctx ticket; member
|