| /linux/drivers/dma-buf/ |
| A D | dma-fence.c | 161 fence = kzalloc(sizeof(*fence), GFP_KERNEL); in dma_fence_allocate_private_stub() 415 if (!fence) in dma_fence_signal_timestamp() 516 ret = fence->ops->wait(fence, intr, timeout); in dma_fence_wait_timeout() 541 fence->ops->get_driver_name(fence), in dma_fence_release() 542 fence->ops->get_timeline_name(fence), in dma_fence_release() 543 fence->context, fence->seqno)) { in dma_fence_release() 560 fence->ops->release(fence); in dma_fence_release() 594 if (!fence->ops->enable_signaling(fence)) { in __dma_fence_enable_signaling() 972 fence->ops->set_deadline(fence, deadline); in dma_fence_set_deadline() 986 fence->ops->get_driver_name(fence), in dma_fence_describe() [all …]
|
| A D | sync_file.c | 73 sync_file->fence = dma_fence_get(fence); in sync_file_create() 112 fence = dma_fence_get(sync_file->fence); in sync_file_get_fence() 115 return fence; in sync_file_get_fence() 137 struct dma_fence *fence = sync_file->fence; in sync_file_get_name() local 140 fence->ops->get_driver_name(fence), in sync_file_get_name() 141 fence->ops->get_timeline_name(fence), in sync_file_get_name() 169 fence = dma_fence_unwrap_merge(a->fence, b->fence); in sync_file_merge() 170 if (!fence) { in sync_file_merge() 174 sync_file->fence = fence; in sync_file_merge() 297 dma_fence_unwrap_for_each(fence, &iter, sync_file->fence) in sync_file_ioctl_fence_info() [all …]
|
| A D | dma-fence-chain.c | 44 chain = to_dma_fence_chain(fence); in dma_fence_chain_walk() 46 dma_fence_put(fence); in dma_fence_chain_walk() 74 dma_fence_put(fence); in dma_fence_chain_walk() 155 dma_fence_put(fence); in dma_fence_chain_enable_signaling() 166 dma_fence_chain_for_each(fence, fence) { in dma_fence_chain_signaled() 170 dma_fence_put(fence); in dma_fence_chain_signaled() 205 dma_fence_put(chain->fence); in dma_fence_chain_release() 206 dma_fence_free(fence); in dma_fence_chain_release() 213 dma_fence_chain_for_each(fence, fence) { in dma_fence_chain_set_deadline() 243 struct dma_fence *fence, in dma_fence_chain_init() argument [all …]
|
| A D | dma-resv.c | 129 dma_fence_put(fence); in dma_resv_list_free() 246 dma_fence_put(fence); in dma_resv_reserve_fences() 294 dma_fence_get(fence); in dma_resv_add_fence() 395 cursor->fence = dma_fence_get_rcu(cursor->fence); in dma_resv_iter_walk_unlocked() 396 if (!cursor->fence) { in dma_resv_iter_walk_unlocked() 428 return cursor->fence; in dma_resv_iter_first_unlocked() 457 return cursor->fence; in dma_resv_iter_next_unlocked() 481 return fence; in dma_resv_iter_first() 509 return fence; in dma_resv_iter_next() 639 *fence = NULL; in dma_resv_get_singleton() [all …]
|
| A D | st-dma-fence-chain.c | 194 dma_fence_put(fence); in find_seqno() 203 dma_fence_put(fence); in find_seqno() 216 dma_fence_get(fence); in find_seqno() 218 dma_fence_put(fence); in find_seqno() 229 dma_fence_get(fence); in find_seqno() 231 dma_fence_put(fence); in find_seqno() 273 dma_fence_put(fence); in find_signaled() 279 if (fence && fence != fc.chains[0]) { in find_signaled() 311 dma_fence_put(fence); in find_out_of_order() 326 fence ? fence->seqno : 0); in find_out_of_order() [all …]
|
| A D | sw_sync.c | 52 __s32 fence; /* fd of new fence */ member 87 if (fence->ops != &timeline_fence_ops) in dma_fence_to_sync_pt() 158 spin_lock_irqsave(fence->lock, flags); in timeline_fence_release() 166 dma_fence_free(fence); in timeline_fence_release() 173 return !__dma_fence_is_later(fence->seqno, parent->value, fence->ops); in timeline_fence_signaled() 200 spin_lock_irqsave(fence->lock, flags); in timeline_fence_set_deadline() 395 data.fence = fd; in sw_sync_ioctl_create_fence() 431 struct dma_fence *fence; in sw_sync_ioctl_get_deadline() local 443 if (!fence) in sw_sync_ioctl_get_deadline() 446 pt = dma_fence_to_sync_pt(fence); in sw_sync_ioctl_get_deadline() [all …]
|
| /linux/include/linux/ |
| A D | dma-fence.h | 296 if (fence) in dma_fence_put() 308 if (fence) in dma_fence_get() 310 return fence; in dma_fence_get() 323 return fence; in dma_fence_get_rcu() 351 if (!fence) in dma_fence_get_rcu_safe() 374 dma_fence_put(fence); in dma_fence_get_rcu_safe() 425 if (fence->ops->signaled && fence->ops->signaled(fence)) { in dma_fence_is_signaled_locked() 455 if (fence->ops->signaled && fence->ops->signaled(fence)) { in dma_fence_is_signaled() 584 fence->error = error; in dma_fence_set_error() 603 return fence->timestamp; in dma_fence_timestamp() [all …]
|
| /linux/drivers/gpu/drm/i915/ |
| A D | i915_sw_fence.c | 131 return fence->fn(fence, state); in __i915_sw_fence_notify() 245 fence->fn = fn; in __i915_sw_fence_init() 247 fence->flags = 0; in __i915_sw_fence_init() 258 fence->error = 0; in i915_sw_fence_reinit() 379 wq->private = fence; in __i915_sw_fence_await_sw_fence() 433 fence = xchg(&cb->base.fence, NULL); in timer_i915_sw_fence_wake() 434 if (!fence) in timer_i915_sw_fence_wake() 454 fence = xchg(&cb->base.fence, NULL); in dma_i915_sw_fence_wake_timer() 455 if (fence) { in dma_i915_sw_fence_wake_timer() 507 cb->fence = fence; in i915_sw_fence_await_dma_fence() [all …]
|
| A D | i915_sw_fence.h | 42 void __i915_sw_fence_init(struct i915_sw_fence *fence, 47 #define i915_sw_fence_init(fence, fn) \ argument 52 __i915_sw_fence_init((fence), (fn), #fence, &__key); \ 55 #define i915_sw_fence_init(fence, fn) \ argument 58 __i915_sw_fence_init((fence), (fn), NULL, NULL); \ 65 void i915_sw_fence_fini(struct i915_sw_fence *fence); 81 struct i915_sw_fence *fence; member 103 return atomic_read(&fence->pending) <= 0; in i915_sw_fence_signaled() 108 return atomic_read(&fence->pending) < 0; in i915_sw_fence_done() 113 wait_event(fence->wait, i915_sw_fence_done(fence)); in i915_sw_fence_wait() [all …]
|
| A D | i915_request.c | 1078 fence = NULL; in i915_request_await_start() 1108 fence = &prev->fence; in i915_request_await_start() 1111 if (!fence) in i915_request_await_start() 1117 fence, 0, in i915_request_await_start() 1421 fence = *child++; in i915_request_await_execution() 1425 if (fence->context == rq->fence.context) in i915_request_await_execution() 1523 fence = *child++; in i915_request_await_dma_fence() 1532 if (fence->context == rq->fence.context) in i915_request_await_dma_fence() 1538 fence)) in i915_request_await_dma_fence() 1554 fence); in i915_request_await_dma_fence() [all …]
|
| /linux/drivers/gpu/drm/i915/gt/ |
| A D | intel_ggtt_fencing.c | 82 val = fence->start + fence->size - I965_FENCE_PAGE; in i965_write_fence_reg() 248 GEM_BUG_ON(old->fence != fence); in fence_update() 276 vma->fence = fence; in fence_update() 293 struct i915_fence_reg *fence = vma->fence; in i915_vma_revoke_fence() local 297 if (!fence) in i915_vma_revoke_fence() 326 return fence->vma && i915_vma_is_active(fence->vma); in fence_is_active() 335 GEM_BUG_ON(fence->vma && fence->vma->fence != fence); in fence_find() 352 return fence; in fence_find() 373 fence = vma->fence; in __i915_vma_pin_fence() 396 GEM_BUG_ON(vma->fence != (set ? fence : NULL)); in __i915_vma_pin_fence() [all …]
|
| /linux/drivers/gpu/drm/scheduler/ |
| A D | sched_fence.c | 50 struct dma_fence *fence) in drm_sched_fence_set_parent() argument 101 if (!WARN_ON_ONCE(!fence)) in drm_sched_fence_free_rcu() 116 if (!WARN_ON_ONCE(fence->sched)) in drm_sched_fence_free() 132 dma_fence_put(fence->parent); in drm_sched_fence_release_scheduled() 147 dma_fence_put(&fence->scheduled); in drm_sched_fence_release_finished() 166 fence->deadline = deadline; in drm_sched_fence_set_deadline_finished() 212 if (fence == NULL) in drm_sched_fence_alloc() 215 fence->owner = owner; in drm_sched_fence_alloc() 216 spin_lock_init(&fence->lock); in drm_sched_fence_alloc() 218 return fence; in drm_sched_fence_alloc() [all …]
|
| /linux/drivers/gpu/drm/nouveau/ |
| A D | nouveau_fence.c | 59 list_del(&fence->head); in nouveau_fence_signal() 80 return from_fence(fence); in nouveau_local_fence() 91 fence = list_entry(fctx->pending.next, typeof(*fence), head); in nouveau_fence_context_kill() 138 fence = list_entry(fctx->pending.next, typeof(*fence), head); in nouveau_fence_update() 162 fence = list_entry(fctx->pending.next, typeof(*fence), head); in nouveau_fence_uevent_work() 238 ret = fctx->emit(fence); in nouveau_fence_emit() 378 fence) { in nouveau_fence_sync() 427 fence = kzalloc(sizeof(*fence), GFP_KERNEL); in nouveau_fence_create() 428 if (!fence) in nouveau_fence_create() 431 fence->channel = chan; in nouveau_fence_create() [all …]
|
| /linux/drivers/gpu/drm/vgem/ |
| A D | vgem_fence.c | 50 struct vgem_fence *fence = container_of(base, typeof(*fence), base); in vgem_fence_release() local 65 dma_fence_is_signaled(fence) ? fence->seqno : 0); in vgem_fence_timeline_value_str() 79 struct vgem_fence *fence = from_timer(fence, t, timer); in vgem_fence_timeout() local 89 fence = kzalloc(sizeof(*fence), GFP_KERNEL); in vgem_fence_create() 90 if (!fence) in vgem_fence_create() 94 dma_fence_init(&fence->base, &vgem_fence_ops, &fence->lock, in vgem_fence_create() 102 return &fence->base; in vgem_fence_create() 148 if (!fence) { in vgem_fence_attach_ioctl() 218 fence = idr_replace(&vfile->fence_idr, NULL, arg->fence); in vgem_fence_signal_ioctl() 220 if (!fence) in vgem_fence_signal_ioctl() [all …]
|
| /linux/drivers/gpu/drm/amd/amdgpu/ |
| A D | amdgpu_amdkfd_fence.c | 69 fence = kzalloc(sizeof(*fence), GFP_KERNEL); in amdgpu_amdkfd_fence_create() 70 if (fence == NULL) in amdgpu_amdkfd_fence_create() 75 fence->mm = mm; in amdgpu_amdkfd_fence_create() 79 dma_fence_init(&fence->base, &amdkfd_fence_ops, &fence->lock, in amdgpu_amdkfd_fence_create() 82 return fence; in amdgpu_amdkfd_fence_create() 94 return fence; in to_amdgpu_amdkfd_fence() 123 if (!fence) in amdkfd_fence_enable_signaling() 154 if (WARN_ON(!fence)) in amdkfd_fence_release() 157 mmdrop(fence->mm); in amdkfd_fence_release() 175 if (!fence) in amdkfd_fence_check_mm() [all …]
|
| /linux/drivers/gpu/drm/i915/selftests/ |
| A D | lib_sw_fence.c | 42 atomic_set(&fence->pending, 1); in __onstack_fence_init() 43 fence->error = 0; in __onstack_fence_init() 44 fence->fn = nop_fence_notify; in __onstack_fence_init() 49 if (!fence->fn) in onstack_fence_fini() 52 i915_sw_fence_commit(fence); in onstack_fence_fini() 53 i915_sw_fence_fini(fence); in onstack_fence_fini() 85 struct i915_sw_fence fence; member 95 struct heap_fence *h = container_of(fence, typeof(*h), fence); in heap_fence_notify() 102 heap_fence_put(&h->fence); in heap_fence_notify() 119 return &h->fence; in heap_fence_create() [all …]
|
| /linux/include/trace/events/ |
| A D | dma_fence.h | 16 TP_ARGS(fence), 19 __string(driver, fence->ops->get_driver_name(fence)) 20 __string(timeline, fence->ops->get_timeline_name(fence)) 29 __entry->seqno = fence->seqno; 41 TP_ARGS(fence) 48 TP_ARGS(fence) 55 TP_ARGS(fence) 62 TP_ARGS(fence) 69 TP_ARGS(fence) 76 TP_ARGS(fence) [all …]
|
| /linux/drivers/gpu/drm/vmwgfx/ |
| A D | vmwgfx_fence.c | 133 fence->destroy(fence); in vmw_fence_obj_destroy() 544 fence = kzalloc(sizeof(*fence), GFP_KERNEL); in vmw_fence_create() 553 *p_fence = fence; in vmw_fence_create() 557 kfree(fence); in vmw_fence_create() 565 container_of(fence, struct vmw_user_fence, fence); in vmw_user_fence_destroy() 575 struct vmw_fence_obj *fence = &ufence->fence; in vmw_user_fence_base_release() local 749 fence = &(container_of(base, struct vmw_user_fence, base)->fence); in vmw_fence_obj_wait_ioctl() 789 fence = &(container_of(base, struct vmw_user_fence, base)->fence); in vmw_fence_obj_signaled_ioctl() 957 eaction->fence = vmw_fence_obj_reference(fence); in vmw_event_fence_action_queue() 1072 if (!fence) { in vmw_fence_event_ioctl() [all …]
|
| /linux/drivers/gpu/drm/xe/ |
| A D | xe_hw_fence.c | 45 struct xe_hw_fence *fence = in fence_free() local 48 if (!WARN_ON_ONCE(!fence)) in fence_free() 55 struct xe_hw_fence *fence, *next; in hw_fence_irq_run_cb() local 66 trace_xe_hw_fence_signal(fence); in hw_fence_irq_run_cb() 67 list_del_init(&fence->irq_link); in hw_fence_irq_run_cb() 86 struct xe_hw_fence *fence, *next; in xe_hw_fence_irq_finish() local 95 list_del_init(&fence->irq_link); in xe_hw_fence_irq_finish() 97 dma_fence_put(&fence->dma); in xe_hw_fence_irq_finish() 158 return fence->name; in xe_hw_fence_get_timeline_name() 164 struct xe_device *xe = fence->xe; in xe_hw_fence_signaled() [all …]
|
| A D | xe_gt_tlb_invalidation.c | 42 if (WARN_ON_ONCE(!fence->gt)) in xe_gt_tlb_invalidation_fence_fini() 56 dma_fence_signal(&fence->base); in __invalidation_fence_signal() 58 dma_fence_put(&fence->base); in __invalidation_fence_signal() 64 list_del(&fence->link); in invalidation_fence_signal() 90 fence->base.error = -ETIME; in xe_gt_tlb_fence_timeout() 182 xe_gt_assert(gt, fence); in send_tlb_invalidation() 192 fence->seqno = seqno; in send_tlb_invalidation() 209 list_add_tail(&fence->link, in send_tlb_invalidation() 331 xe_gt_assert(gt, fence); in xe_gt_tlb_invalidation_range() 522 INIT_LIST_HEAD(&fence->link); in xe_gt_tlb_invalidation_fence_init() [all …]
|
| /linux/drivers/gpu/drm/i915/gem/ |
| A D | i915_gem_wait.c | 27 if (dma_fence_is_i915(fence)) in i915_gem_object_wait_fence() 30 return dma_fence_wait_timeout(fence, in i915_gem_object_wait_fence() 39 struct dma_fence *fence; in i915_gem_object_boost() local 60 if (dma_fence_is_i915(fence) && in i915_gem_object_boost() 72 struct dma_fence *fence; in i915_gem_object_wait_reservation() local 98 if (dma_fence_is_signaled(fence) || !dma_fence_is_i915(fence)) in fence_set_priority() 101 rq = to_request(fence); in fence_set_priority() 118 if (dma_fence_is_signaled(fence)) in i915_gem_fence_wait_priority() 124 if (dma_fence_is_array(fence)) { in i915_gem_fence_wait_priority() 141 fence_set_priority(fence, attr); in i915_gem_fence_wait_priority() [all …]
|
| /linux/drivers/gpu/drm/radeon/ |
| A D | radeon_fence.c | 177 seq = atomic64_read(&fence->rdev->fence_drv[fence->ring].last_seq); in radeon_fence_check_signaled() 180 radeon_irq_kms_sw_irq_put(fence->rdev, fence->ring); in radeon_fence_check_signaled() 428 if (!fence) in radeon_fence_signaled() 431 if (radeon_fence_seq_signaled(fence->rdev, fence->seq, fence->ring)) { in radeon_fence_signaled() 547 seq[fence->ring] = fence->seq; in radeon_fence_wait_timeout() 690 return fence; in radeon_fence_ref() 704 *fence = NULL; in radeon_fence_unref() 751 if (!fence) in radeon_fence_need_sync() 759 if (fence->seq <= fdrv->sync_seq[fence->ring]) in radeon_fence_need_sync() 779 if (!fence) in radeon_fence_note_sync() [all …]
|
| A D | radeon_sync.c | 63 struct radeon_fence *fence) in radeon_sync_fence() argument 67 if (!fence) in radeon_sync_fence() 70 other = sync->sync_to[fence->ring]; in radeon_sync_fence() 71 sync->sync_to[fence->ring] = radeon_fence_later(fence, other); in radeon_sync_fence() 73 if (fence->is_vm_update) { in radeon_sync_fence() 95 struct radeon_fence *fence; in radeon_sync_resv() local 100 fence = to_radeon_fence(f); in radeon_sync_resv() 101 if (fence && fence->rdev == rdev) in radeon_sync_resv() 102 radeon_sync_fence(sync, fence); in radeon_sync_resv() 181 radeon_fence_note_sync(fence, ring); in radeon_sync_rings() [all …]
|
| /linux/drivers/gpu/drm/ |
| A D | drm_syncobj.c | 285 wait->fence = fence; in drm_syncobj_fence_add_wait() 378 if (fence) in drm_syncobj_replace_fence() 385 rcu_assign_pointer(syncobj->fence, fence); in drm_syncobj_replace_fence() 507 *fence = wait.fence; in drm_syncobj_find_fence() 576 if (fence) in drm_syncobj_create() 1017 fence = rcu_dereference_protected(syncobj->fence, in syncobj_wait_syncobj_func() 1026 wait->fence = fence; in syncobj_wait_syncobj_func() 1094 entries[i].fence = fence; in drm_syncobj_array_wait_timeout() 1126 fence = entries[i].fence; in drm_syncobj_array_wait_timeout() 1138 fence = entries[i].fence; in drm_syncobj_array_wait_timeout() [all …]
|
| /linux/drivers/gpu/host1x/ |
| A D | intr.c | 15 struct host1x_syncpt_fence *fence) in host1x_intr_add_fence_to_list() argument 22 list_add(&fence->list, &fence_in_list->list); in host1x_intr_add_fence_to_list() 28 list_add(&fence->list, &list->list); in host1x_intr_add_fence_to_list() 33 struct host1x_syncpt_fence *fence; in host1x_intr_update_hw_state() local 49 INIT_LIST_HEAD(&fence->list); in host1x_intr_add_fence_locked() 52 host1x_intr_update_hw_state(host, fence->sp); in host1x_intr_add_fence_locked() 62 if (list_empty(&fence->list)) { in host1x_intr_remove_fence() 67 list_del_init(&fence->list); in host1x_intr_remove_fence() 78 struct host1x_syncpt_fence *fence, *tmp; in host1x_intr_handle_interrupt() local 91 list_del_init(&fence->list); in host1x_intr_handle_interrupt() [all …]
|