| /drivers/dma-buf/ |
| A D | dma-fence.c | 161 fence = kzalloc(sizeof(*fence), GFP_KERNEL); in dma_fence_allocate_private_stub() 171 &fence->flags); in dma_fence_allocate_private_stub() 175 return fence; in dma_fence_allocate_private_stub() 520 ret = fence->ops->wait(fence, intr, timeout); in dma_fence_wait_timeout() 559 fence->context, fence->seqno); in dma_fence_release() 577 fence->ops->release(fence); in dma_fence_release() 611 if (!fence->ops->enable_signaling(fence)) { in __dma_fence_enable_signaling() 988 if (fence->ops->set_deadline && !dma_fence_is_signaled(fence)) in dma_fence_set_deadline() 989 fence->ops->set_deadline(fence, deadline); in dma_fence_set_deadline() 1112 return fence->ops->get_driver_name(fence); in dma_fence_driver_name() [all …]
|
| A D | dma-fence-chain.c | 44 chain = to_dma_fence_chain(fence); in dma_fence_chain_walk() 46 dma_fence_put(fence); in dma_fence_chain_walk() 74 dma_fence_put(fence); in dma_fence_chain_walk() 155 dma_fence_put(fence); in dma_fence_chain_enable_signaling() 166 dma_fence_chain_for_each(fence, fence) { in dma_fence_chain_signaled() 170 dma_fence_put(fence); in dma_fence_chain_signaled() 205 dma_fence_put(chain->fence); in dma_fence_chain_release() 206 dma_fence_free(fence); in dma_fence_chain_release() 213 dma_fence_chain_for_each(fence, fence) { in dma_fence_chain_set_deadline() 242 struct dma_fence *fence, in dma_fence_chain_init() argument [all …]
|
| A D | sync_file.c | 73 sync_file->fence = dma_fence_get(fence); in sync_file_create() 112 fence = dma_fence_get(sync_file->fence); in sync_file_get_fence() 115 return fence; in sync_file_get_fence() 137 struct dma_fence *fence = sync_file->fence; in sync_file_get_name() local 148 fence->seqno); in sync_file_get_name() 175 fence = dma_fence_unwrap_merge(a->fence, b->fence); in sync_file_merge() 176 if (!fence) { in sync_file_merge() 180 sync_file->fence = fence; in sync_file_merge() 247 data.fence = fd; in sync_file_ioctl_merge() 313 dma_fence_unwrap_for_each(fence, &iter, sync_file->fence) in sync_file_ioctl_fence_info() [all …]
|
| A D | dma-resv.c | 129 dma_fence_put(fence); in dma_resv_list_free() 246 dma_fence_put(fence); in dma_resv_reserve_fences() 294 dma_fence_get(fence); in dma_resv_add_fence() 396 cursor->fence = dma_fence_get_rcu(cursor->fence); in dma_resv_iter_walk_unlocked() 397 if (!cursor->fence) { in dma_resv_iter_walk_unlocked() 429 return cursor->fence; in dma_resv_iter_first_unlocked() 458 return cursor->fence; in dma_resv_iter_next_unlocked() 482 return fence; in dma_resv_iter_first() 510 return fence; in dma_resv_iter_next() 640 *fence = NULL; in dma_resv_get_singleton() [all …]
|
| A D | st-dma-fence-chain.c | 194 dma_fence_put(fence); in find_seqno() 203 dma_fence_put(fence); in find_seqno() 216 dma_fence_get(fence); in find_seqno() 218 dma_fence_put(fence); in find_seqno() 229 dma_fence_get(fence); in find_seqno() 231 dma_fence_put(fence); in find_seqno() 273 dma_fence_put(fence); in find_signaled() 279 if (fence && fence != fc.chains[0]) { in find_signaled() 311 dma_fence_put(fence); in find_out_of_order() 326 fence ? fence->seqno : 0); in find_out_of_order() [all …]
|
| A D | sw_sync.c | 52 __s32 fence; /* fd of new fence */ member 87 if (fence->ops != &timeline_fence_ops) in dma_fence_to_sync_pt() 158 spin_lock_irqsave(fence->lock, flags); in timeline_fence_release() 166 dma_fence_free(fence); in timeline_fence_release() 173 return !__dma_fence_is_later(fence, fence->seqno, parent->value); in timeline_fence_signaled() 373 data.fence = fd; in sw_sync_ioctl_create_fence() 409 struct dma_fence *fence; in sw_sync_ioctl_get_deadline() local 421 if (!fence) in sw_sync_ioctl_get_deadline() 424 pt = dma_fence_to_sync_pt(fence); in sw_sync_ioctl_get_deadline() 438 dma_fence_put(fence); in sw_sync_ioctl_get_deadline() [all …]
|
| /drivers/gpu/drm/i915/ |
| A D | i915_sw_fence.c | 131 return fence->fn(fence, state); in __i915_sw_fence_notify() 245 fence->fn = fn; in __i915_sw_fence_init() 247 fence->flags = 0; in __i915_sw_fence_init() 258 fence->error = 0; in i915_sw_fence_reinit() 379 wq->private = fence; in __i915_sw_fence_await_sw_fence() 436 fence = xchg(&cb->base.fence, NULL); in timer_i915_sw_fence_wake() 437 if (!fence) in timer_i915_sw_fence_wake() 461 fence = xchg(&cb->base.fence, NULL); in dma_i915_sw_fence_wake_timer() 462 if (fence) { in dma_i915_sw_fence_wake_timer() 514 cb->fence = fence; in i915_sw_fence_await_dma_fence() [all …]
|
| A D | i915_sw_fence.h | 42 void __i915_sw_fence_init(struct i915_sw_fence *fence, 47 #define i915_sw_fence_init(fence, fn) \ argument 52 __i915_sw_fence_init((fence), (fn), #fence, &__key); \ 55 #define i915_sw_fence_init(fence, fn) \ argument 58 __i915_sw_fence_init((fence), (fn), NULL, NULL); \ 65 void i915_sw_fence_fini(struct i915_sw_fence *fence); 81 struct i915_sw_fence *fence; member 103 return atomic_read(&fence->pending) <= 0; in i915_sw_fence_signaled() 108 return atomic_read(&fence->pending) < 0; in i915_sw_fence_done() 113 wait_event(fence->wait, i915_sw_fence_done(fence)); in i915_sw_fence_wait() [all …]
|
| A D | i915_request.c | 1078 fence = NULL; in i915_request_await_start() 1108 fence = &prev->fence; in i915_request_await_start() 1111 if (!fence) in i915_request_await_start() 1117 fence, 0, in i915_request_await_start() 1421 fence = *child++; in i915_request_await_execution() 1425 if (fence->context == rq->fence.context) in i915_request_await_execution() 1523 fence = *child++; in i915_request_await_dma_fence() 1532 if (fence->context == rq->fence.context) in i915_request_await_dma_fence() 1538 fence)) in i915_request_await_dma_fence() 1554 fence); in i915_request_await_dma_fence() [all …]
|
| /drivers/gpu/drm/i915/gt/ |
| A D | intel_ggtt_fencing.c | 82 val = fence->start + fence->size - I965_FENCE_PAGE; in i965_write_fence_reg() 248 GEM_BUG_ON(old->fence != fence); in fence_update() 276 vma->fence = fence; in fence_update() 293 struct i915_fence_reg *fence = vma->fence; in i915_vma_revoke_fence() local 297 if (!fence) in i915_vma_revoke_fence() 326 return fence->vma && i915_vma_is_active(fence->vma); in fence_is_active() 336 GEM_BUG_ON(fence->vma && fence->vma->fence != fence); in fence_find() 353 return fence; in fence_find() 374 fence = vma->fence; in __i915_vma_pin_fence() 397 GEM_BUG_ON(vma->fence != (set ? fence : NULL)); in __i915_vma_pin_fence() [all …]
|
| /drivers/gpu/drm/nouveau/ |
| A D | nouveau_fence.c | 53 list_del(&fence->head); in nouveau_fence_signal() 151 fence = list_first_entry_or_null(&fctx->pending, typeof(*fence), head); in nouveau_fence_uevent_work() 152 if (fence) { in nouveau_fence_uevent_work() 224 ret = fctx->emit(fence); in nouveau_fence_emit() 371 struct dma_fence *fence; in nouveau_fence_sync() local 375 fence) { in nouveau_fence_sync() 426 fence = kzalloc(sizeof(*fence), GFP_KERNEL); in nouveau_fence_create() 427 if (!fence) in nouveau_fence_create() 430 fence->channel = chan; in nouveau_fence_create() 432 *pfence = fence; in nouveau_fence_create() [all …]
|
| /drivers/gpu/drm/scheduler/ |
| A D | sched_fence.c | 52 struct dma_fence *fence) in drm_sched_fence_set_parent() argument 103 if (!WARN_ON_ONCE(!fence)) in drm_sched_fence_free_rcu() 118 if (!WARN_ON_ONCE(fence->sched)) in drm_sched_fence_free() 134 dma_fence_put(fence->parent); in drm_sched_fence_release_scheduled() 149 dma_fence_put(&fence->scheduled); in drm_sched_fence_release_finished() 168 fence->deadline = deadline; in drm_sched_fence_set_deadline_finished() 215 if (fence == NULL) in drm_sched_fence_alloc() 218 fence->owner = owner; in drm_sched_fence_alloc() 220 spin_lock_init(&fence->lock); in drm_sched_fence_alloc() 222 return fence; in drm_sched_fence_alloc() [all …]
|
| /drivers/gpu/drm/vmwgfx/ |
| A D | vmwgfx_fence.c | 77 fence->destroy(fence); in vmw_fence_obj_destroy() 254 fence = kzalloc(sizeof(*fence), GFP_KERNEL); in vmw_fence_create() 255 if (unlikely(!fence)) in vmw_fence_create() 262 *p_fence = fence; in vmw_fence_create() 266 kfree(fence); in vmw_fence_create() 274 container_of(fence, struct vmw_user_fence, fence); in vmw_user_fence_destroy() 284 struct vmw_fence_obj *fence = &ufence->fence; in vmw_user_fence_base_release() local 453 fence = &(container_of(base, struct vmw_user_fence, base)->fence); in vmw_fence_obj_wait_ioctl() 492 fence = &(container_of(base, struct vmw_user_fence, base)->fence); in vmw_fence_obj_signaled_ioctl() 704 if (!fence) { in vmw_fence_event_ioctl() [all …]
|
| /drivers/gpu/drm/amd/amdgpu/ |
| A D | amdgpu_amdkfd_fence.c | 69 fence = kzalloc(sizeof(*fence), GFP_KERNEL); in amdgpu_amdkfd_fence_create() 70 if (fence == NULL) in amdgpu_amdkfd_fence_create() 75 fence->mm = mm; in amdgpu_amdkfd_fence_create() 79 dma_fence_init(&fence->base, &amdkfd_fence_ops, &fence->lock, in amdgpu_amdkfd_fence_create() 82 return fence; in amdgpu_amdkfd_fence_create() 94 return fence; in to_amdgpu_amdkfd_fence() 123 if (!fence) in amdkfd_fence_enable_signaling() 154 if (WARN_ON(!fence)) in amdkfd_fence_release() 157 mmdrop(fence->mm); in amdkfd_fence_release() 175 if (!fence) in amdkfd_fence_check_mm() [all …]
|
| A D | amdgpu_userq_fence.c | 167 dma_fence_put(fence); in amdgpu_userq_fence_driver_process() 186 f = &fence->base; in amdgpu_userq_fence_driver_destroy() 246 dma_fence_get(fence); in amdgpu_userq_fence_create() 282 dma_fence_put(fence); in amdgpu_userq_fence_create() 286 *f = fence; in amdgpu_userq_fence_create() 408 dma_fence_put(fence); in amdgpu_userq_fence_cleanup() 568 dma_fence_put(fence); in amdgpu_userq_signal_ioctl() 707 &fence); in amdgpu_userq_wait_ioctl() 725 &fence); in amdgpu_userq_wait_ioctl() 817 &fence); in amdgpu_userq_wait_ioctl() [all …]
|
| /drivers/gpu/drm/vgem/ |
| A D | vgem_fence.c | 50 struct vgem_fence *fence = container_of(base, typeof(*fence), base); in vgem_fence_release() local 64 struct vgem_fence *fence = timer_container_of(fence, t, timer); in vgem_fence_timeout() local 74 fence = kzalloc(sizeof(*fence), GFP_KERNEL); in vgem_fence_create() 75 if (!fence) in vgem_fence_create() 79 dma_fence_init(&fence->base, &vgem_fence_ops, &fence->lock, in vgem_fence_create() 87 return &fence->base; in vgem_fence_create() 133 if (!fence) { in vgem_fence_attach_ioctl() 203 fence = idr_replace(&vfile->fence_idr, NULL, arg->fence); in vgem_fence_signal_ioctl() 205 if (!fence) in vgem_fence_signal_ioctl() 207 if (IS_ERR(fence)) in vgem_fence_signal_ioctl() [all …]
|
| /drivers/gpu/drm/xe/ |
| A D | xe_gt_tlb_invalidation.c | 42 if (WARN_ON_ONCE(!fence->gt)) in xe_gt_tlb_invalidation_fence_fini() 56 dma_fence_signal(&fence->base); in __invalidation_fence_signal() 58 dma_fence_put(&fence->base); in __invalidation_fence_signal() 64 list_del(&fence->link); in invalidation_fence_signal() 70 if (WARN_ON_ONCE(!fence->gt)) in xe_gt_tlb_invalidation_fence_signal() 73 __invalidation_fence_signal(gt_to_xe(fence->gt), fence); in xe_gt_tlb_invalidation_fence_signal() 98 fence->base.error = -ETIME; in xe_gt_tlb_fence_timeout() 198 xe_gt_assert(gt, fence); in send_tlb_invalidation() 208 fence->seqno = seqno; in send_tlb_invalidation() 401 xe_gt_assert(gt, fence); in xe_gt_tlb_invalidation_range() [all …]
|
| A D | xe_hw_fence.c | 45 struct xe_hw_fence *fence = in fence_free() local 48 if (!WARN_ON_ONCE(!fence)) in fence_free() 55 struct xe_hw_fence *fence, *next; in hw_fence_irq_run_cb() local 66 trace_xe_hw_fence_signal(fence); in hw_fence_irq_run_cb() 67 list_del_init(&fence->irq_link); in hw_fence_irq_run_cb() 86 struct xe_hw_fence *fence, *next; in xe_hw_fence_irq_finish() local 95 list_del_init(&fence->irq_link); in xe_hw_fence_irq_finish() 97 dma_fence_put(&fence->dma); in xe_hw_fence_irq_finish() 161 return fence->name; in xe_hw_fence_get_timeline_name() 167 struct xe_device *xe = fence->xe; in xe_hw_fence_signaled() [all …]
|
| A D | xe_sched_job.c | 125 if (IS_ERR(fence)) { in xe_sched_job_create() 126 err = PTR_ERR(fence); in xe_sched_job_create() 129 job->ptrs[i].lrc_fence = fence; in xe_sched_job_create() 177 dma_fence_put(job->fence); in xe_sched_job_destroy() 220 struct dma_fence *fence = dma_fence_chain_contained(job->fence); in xe_sched_job_started() local 230 struct dma_fence *fence = dma_fence_chain_contained(job->fence); in xe_sched_job_completed() local 246 struct dma_fence *fence, *prev; in xe_sched_job_arm() local 270 fence = job->ptrs[i].lrc_fence; in xe_sched_job_arm() 283 fence = &chain->base; in xe_sched_job_arm() 286 job->fence = dma_fence_get(fence); /* Pairs with put in scheduler */ in xe_sched_job_arm() [all …]
|
| /drivers/gpu/drm/i915/selftests/ |
| A D | lib_sw_fence.c | 42 atomic_set(&fence->pending, 1); in __onstack_fence_init() 43 fence->error = 0; in __onstack_fence_init() 44 fence->fn = nop_fence_notify; in __onstack_fence_init() 49 if (!fence->fn) in onstack_fence_fini() 52 i915_sw_fence_commit(fence); in onstack_fence_fini() 53 i915_sw_fence_fini(fence); in onstack_fence_fini() 85 struct i915_sw_fence fence; member 95 struct heap_fence *h = container_of(fence, typeof(*h), fence); in heap_fence_notify() 102 heap_fence_put(&h->fence); in heap_fence_notify() 119 return &h->fence; in heap_fence_create() [all …]
|
| /drivers/gpu/drm/radeon/ |
| A D | radeon_fence.c | 177 seq = atomic64_read(&fence->rdev->fence_drv[fence->ring].last_seq); in radeon_fence_check_signaled() 180 radeon_irq_kms_sw_irq_put(fence->rdev, fence->ring); in radeon_fence_check_signaled() 428 if (!fence) in radeon_fence_signaled() 431 if (radeon_fence_seq_signaled(fence->rdev, fence->seq, fence->ring)) { in radeon_fence_signaled() 547 seq[fence->ring] = fence->seq; in radeon_fence_wait_timeout() 648 return fence; in radeon_fence_ref() 662 *fence = NULL; in radeon_fence_unref() 709 if (!fence) in radeon_fence_need_sync() 717 if (fence->seq <= fdrv->sync_seq[fence->ring]) in radeon_fence_need_sync() 737 if (!fence) in radeon_fence_note_sync() [all …]
|
| A D | radeon_sync.c | 63 struct radeon_fence *fence) in radeon_sync_fence() argument 67 if (!fence) in radeon_sync_fence() 70 other = sync->sync_to[fence->ring]; in radeon_sync_fence() 71 sync->sync_to[fence->ring] = radeon_fence_later(fence, other); in radeon_sync_fence() 73 if (fence->is_vm_update) { in radeon_sync_fence() 95 struct radeon_fence *fence; in radeon_sync_resv() local 100 fence = to_radeon_fence(f); in radeon_sync_resv() 101 if (fence && fence->rdev == rdev) in radeon_sync_resv() 102 radeon_sync_fence(sync, fence); in radeon_sync_resv() 181 radeon_fence_note_sync(fence, ring); in radeon_sync_rings() [all …]
|
| /drivers/gpu/drm/ |
| A D | drm_syncobj.c | 286 wait->fence = fence; in drm_syncobj_fence_add_wait() 379 if (fence) in drm_syncobj_replace_fence() 386 rcu_assign_pointer(syncobj->fence, fence); in drm_syncobj_replace_fence() 508 *fence = wait.fence; in drm_syncobj_find_fence() 577 if (fence) in drm_syncobj_create() 1042 fence = rcu_dereference_protected(syncobj->fence, in syncobj_wait_syncobj_func() 1051 wait->fence = fence; in syncobj_wait_syncobj_func() 1119 entries[i].fence = fence; in drm_syncobj_array_wait_timeout() 1151 fence = entries[i].fence; in drm_syncobj_array_wait_timeout() 1163 fence = entries[i].fence; in drm_syncobj_array_wait_timeout() [all …]
|
| /drivers/gpu/drm/i915/gem/ |
| A D | i915_gem_wait.c | 26 if (dma_fence_is_i915(fence)) in i915_gem_object_wait_fence() 29 return dma_fence_wait_timeout(fence, in i915_gem_object_wait_fence() 38 struct dma_fence *fence; in i915_gem_object_boost() local 59 if (dma_fence_is_i915(fence) && in i915_gem_object_boost() 71 struct dma_fence *fence; in i915_gem_object_wait_reservation() local 97 if (dma_fence_is_signaled(fence) || !dma_fence_is_i915(fence)) in fence_set_priority() 100 rq = to_request(fence); in fence_set_priority() 112 if (dma_fence_is_signaled(fence)) in i915_gem_fence_wait_priority() 118 if (dma_fence_is_array(fence)) { in i915_gem_fence_wait_priority() 135 fence_set_priority(fence, attr); in i915_gem_fence_wait_priority() [all …]
|
| /drivers/gpu/host1x/ |
| A D | intr.c | 15 struct host1x_syncpt_fence *fence) in host1x_intr_add_fence_to_list() argument 22 list_add(&fence->list, &fence_in_list->list); in host1x_intr_add_fence_to_list() 28 list_add(&fence->list, &list->list); in host1x_intr_add_fence_to_list() 33 struct host1x_syncpt_fence *fence; in host1x_intr_update_hw_state() local 49 INIT_LIST_HEAD(&fence->list); in host1x_intr_add_fence_locked() 52 host1x_intr_update_hw_state(host, fence->sp); in host1x_intr_add_fence_locked() 62 if (list_empty(&fence->list)) { in host1x_intr_remove_fence() 67 list_del_init(&fence->list); in host1x_intr_remove_fence() 78 struct host1x_syncpt_fence *fence, *tmp; in host1x_intr_handle_interrupt() local 91 list_del_init(&fence->list); in host1x_intr_handle_interrupt() [all …]
|