Home
last modified time | relevance | path

Searched refs:fences (Results 1 – 25 of 55) sorted by relevance

123

/drivers/gpu/drm/i915/
A Di915_deps.c38 if (deps->fences != &deps->single) in i915_deps_reset_fences()
39 kfree(deps->fences); in i915_deps_reset_fences()
42 deps->fences = &deps->single; in i915_deps_reset_fences()
52 deps->fences = NULL; in i915_deps_init()
69 dma_fence_put(deps->fences[i]); in i915_deps_fini()
71 if (deps->fences != &deps->single) in i915_deps_fini()
72 kfree(deps->fences); in i915_deps_fini()
89 memcpy(new_fences, deps->fences, in i915_deps_grow()
91 swap(new_fences, deps->fences); in i915_deps_grow()
134 struct dma_fence **fences = deps->fences; in i915_deps_sync() local
[all …]
/drivers/dma-buf/
A Ddma-resv.c270 if (fences) in dma_resv_reset_max_fences()
271 fences->max_fences = fences->num_fences; in dma_resv_reset_max_fences()
372 if (cursor->fences) in dma_resv_iter_restart_unlocked()
380 if (!cursor->fences) in dma_resv_iter_walk_unlocked()
502 if (!cursor->fences || in dma_resv_iter_next()
576 *fences = NULL; in dma_resv_get_fences()
595 kfree(*fences); in dma_resv_get_fences()
596 *fences = NULL; in dma_resv_get_fences()
645 *fence = fences[0]; in dma_resv_get_singleton()
646 kfree(fences); in dma_resv_get_singleton()
[all …]
A Ddma-fence-array.c89 int error = array->fences[i]->error; in dma_fence_array_enable_signaling()
146 dma_fence_put(array->fences[i]); in dma_fence_array_release()
148 kfree(array->fences); in dma_fence_array_release()
199 int num_fences, struct dma_fence **fences, in dma_fence_array_init() argument
203 WARN_ON(!num_fences || !fences); in dma_fence_array_init()
213 array->fences = fences; in dma_fence_array_init()
253 struct dma_fence **fences, in dma_fence_array_create() argument
263 dma_fence_array_init(array, num_fences, fences, in dma_fence_array_create()
288 if (array->fences[i]->context != context) in dma_fence_match_context()
310 return array->fences[0]; in dma_fence_array_first()
[all …]
A Ddma-fence-unwrap.c96 int dma_fence_dedup_array(struct dma_fence **fences, int num_fences) in dma_fence_dedup_array() argument
100 sort(fences, num_fences, sizeof(*fences), fence_cmp, NULL); in dma_fence_dedup_array()
107 if (fences[i]->context == fences[j]->context) in dma_fence_dedup_array()
108 dma_fence_put(fences[i]); in dma_fence_dedup_array()
110 fences[++j] = fences[i]; in dma_fence_dedup_array()
119 struct dma_fence **fences, in __dma_fence_unwrap_merge() argument
130 dma_fence_unwrap_for_each(tmp, &iter[i], fences[i]) { in __dma_fence_unwrap_merge()
164 dma_fence_unwrap_for_each(tmp, &iter[i], fences[i]) { in __dma_fence_unwrap_merge()
A Dst-dma-fence-chain.c102 struct dma_fence **fences; member
124 fc->fences = kvmalloc_array(count, sizeof(*fc->fences), in fence_chains_init()
126 if (!fc->fences) { in fence_chains_init()
133 fc->fences[i] = mock_fence(); in fence_chains_init()
134 if (!fc->fences[i]) { in fence_chains_init()
140 fc->fences[i], in fence_chains_init()
157 dma_fence_put(fc->fences[i]); in fence_chains_init()
160 kvfree(fc->fences); in fence_chains_init()
172 dma_fence_put(fc->fences[i]); in fence_chains_fini()
174 kvfree(fc->fences); in fence_chains_fini()
[all …]
A Dst-dma-resv.c228 cursor.fences = (void*)~0; in test_for_each_unlocked()
247 struct dma_fence *f, **fences = NULL; in test_get_fences() local
274 r = dma_resv_get_fences(&resv, usage, &i, &fences); in test_get_fences()
280 if (i != 1 || fences[0] != f) { in test_get_fences()
288 dma_fence_put(fences[i]); in test_get_fences()
289 kfree(fences); in test_get_fences()
A Dst-dma-fence-unwrap.c53 struct dma_fence **fences; in mock_array() local
57 fences = kcalloc(num_fences, sizeof(*fences), GFP_KERNEL); in mock_array()
58 if (!fences) in mock_array()
63 fences[i] = va_arg(valist, typeof(*fences)); in mock_array()
66 array = dma_fence_array_create(num_fences, fences, in mock_array()
74 kfree(fences); in mock_array()
79 dma_fence_put(va_arg(valist, typeof(*fences))); in mock_array()
A Ddma-fence.c828 dma_fence_test_signaled_any(struct dma_fence **fences, uint32_t count, in dma_fence_test_signaled_any() argument
834 struct dma_fence *fence = fences[i]; in dma_fence_test_signaled_any()
865 dma_fence_wait_any_timeout(struct dma_fence **fences, uint32_t count, in dma_fence_wait_any_timeout() argument
872 if (WARN_ON(!fences || !count || timeout < 0)) in dma_fence_wait_any_timeout()
877 if (dma_fence_is_signaled(fences[i])) { in dma_fence_wait_any_timeout()
893 struct dma_fence *fence = fences[i]; in dma_fence_wait_any_timeout()
911 if (dma_fence_test_signaled_any(fences, count, idx)) in dma_fence_wait_any_timeout()
924 dma_fence_remove_callback(fences[i], &cb[i].base); in dma_fence_wait_any_timeout()
A Dst-dma-fence.c446 struct dma_fence __rcu **fences; member
477 rcu_assign_pointer(t->fences[t->id], f1); in thread_signal_callback()
482 f2 = dma_fence_get_rcu_safe(&t->fences[!t->id]); in thread_signal_callback()
514 rcu_assign_pointer(t->fences[t->id], NULL); in thread_signal_callback()
538 t[i].fences = f; in race_signal_callback()
/drivers/gpu/host1x/
A Dintr.c35 if (!list_empty(&sp->fences.list)) { in host1x_intr_update_hw_state()
36 fence = list_first_entry(&sp->fences.list, struct host1x_syncpt_fence, list); in host1x_intr_update_hw_state()
47 struct host1x_fence_list *fence_list = &fence->sp->fences; in host1x_intr_add_fence_locked()
57 struct host1x_fence_list *fence_list = &fence->sp->fences; in host1x_intr_remove_fence()
83 spin_lock(&sp->fences.lock); in host1x_intr_handle_interrupt()
85 list_for_each_entry_safe(fence, tmp, &sp->fences.list, list) { in host1x_intr_handle_interrupt()
98 spin_unlock(&sp->fences.lock); in host1x_intr_handle_interrupt()
110 spin_lock_init(&syncpt->fences.lock); in host1x_intr_init()
111 INIT_LIST_HEAD(&syncpt->fences.list); in host1x_intr_init()
A Ddebug.c96 spin_lock_irqsave(&m->syncpt[i].fences.lock, irqflags); in show_syncpts()
97 list_for_each(pos, &m->syncpt[i].fences.list) in show_syncpts()
99 spin_unlock_irqrestore(&m->syncpt[i].fences.lock, irqflags); in show_syncpts()
/drivers/gpu/drm/amd/amdgpu/
A Damdgpu_sync.c54 hash_init(sync->fences); in amdgpu_sync_create()
137 hash_for_each_possible(sync->fences, e, node, f->context) { in amdgpu_sync_add_later()
176 hash_add(sync->fences, &e->node, f->context); in amdgpu_sync_fence()
323 hash_for_each_safe(sync->fences, i, tmp, e, node) { in amdgpu_sync_peek_fence()
363 hash_for_each_safe(sync->fences, i, tmp, e, node) { in amdgpu_sync_get_fence()
394 hash_for_each_safe(source->fences, i, tmp, e, node) { in amdgpu_sync_clone()
423 for (i = 0; i < HASH_SIZE(src->fences); ++i) in amdgpu_sync_move()
424 hlist_move_list(&src->fences[i], &dst->fences[i]); in amdgpu_sync_move()
441 hash_for_each_safe(sync->fences, i, tmp, e, node) { in amdgpu_sync_push_to_job()
464 hash_for_each_safe(sync->fences, i, tmp, e, node) { in amdgpu_sync_wait()
[all …]
A Damdgpu_userq_fence.c90 INIT_LIST_HEAD(&fence_drv->fences); in amdgpu_userq_fence_driver_alloc()
768 fences = kmalloc_array(wait_info->num_fences, sizeof(*fences), GFP_KERNEL); in amdgpu_userq_wait_ioctl()
769 if (!fences) { in amdgpu_userq_wait_ioctl()
786 fences[num_fences++] = fence; in amdgpu_userq_wait_ioctl()
828 fences[num_fences++] = f; in amdgpu_userq_wait_ioctl()
851 fences[num_fences++] = fence; in amdgpu_userq_wait_ioctl()
879 dma_fence_put(fences[i]); in amdgpu_userq_wait_ioctl()
883 dma_fence_put(fences[i]); in amdgpu_userq_wait_ioctl()
905 dma_fence_put(fences[i]); in amdgpu_userq_wait_ioctl()
918 kfree(fences); in amdgpu_userq_wait_ioctl()
[all …]
A Damdgpu_ids.c204 struct dma_fence **fences; in amdgpu_vmid_grab_idle() local
212 fences = kmalloc_array(id_mgr->num_ids, sizeof(void *), GFP_NOWAIT); in amdgpu_vmid_grab_idle()
213 if (!fences) in amdgpu_vmid_grab_idle()
223 fences[i] = amdgpu_sync_peek_fence(&(*idle)->active, r); in amdgpu_vmid_grab_idle()
224 if (!fences[i]) in amdgpu_vmid_grab_idle()
238 dma_fence_get(fences[j]); in amdgpu_vmid_grab_idle()
240 array = dma_fence_array_create(i, fences, fence_context, in amdgpu_vmid_grab_idle()
244 dma_fence_put(fences[j]); in amdgpu_vmid_grab_idle()
245 kfree(fences); in amdgpu_vmid_grab_idle()
254 kfree(fences); in amdgpu_vmid_grab_idle()
A Damdgpu_fence.c148 ptr = &ring->fence_drv.fences[seq & ring->fence_drv.num_fences_mask]; in amdgpu_fence_emit()
263 ptr = &drv->fences[last_seq]; in amdgpu_fence_process()
322 ptr = &ring->fence_drv.fences[seq & ring->fence_drv.num_fences_mask]; in amdgpu_fence_wait_empty()
399 fence = drv->fences[last_seq]; in amdgpu_fence_last_unsignaled_time_us()
423 fence = drv->fences[seq]; in amdgpu_fence_update_start_timestamp()
502 if (!ring->fence_drv.fences) in amdgpu_fence_driver_init_ring()
636 dma_fence_put(ring->fence_drv.fences[j]); in amdgpu_fence_driver_sw_fini()
637 kfree(ring->fence_drv.fences); in amdgpu_fence_driver_sw_fini()
638 ring->fence_drv.fences = NULL; in amdgpu_fence_driver_sw_fini()
685 ptr = &ring->fence_drv.fences[i]; in amdgpu_fence_driver_clear_job_fences()
[all …]
A Damdgpu_ctx.c198 res = ktime_add(res, amdgpu_ctx_fence_time(centity->fences[i])); in amdgpu_ctx_entity_time()
215 entity = kzalloc(struct_size(entity, fences, amdgpu_sched_jobs), in amdgpu_ctx_init_entity()
282 res = ktime_add(res, amdgpu_ctx_fence_time(entity->fences[i])); in amdgpu_ctx_fini_entity()
283 dma_fence_put(entity->fences[i]); in amdgpu_ctx_fini_entity()
767 other = centity->fences[idx]; in amdgpu_ctx_add_fence()
773 centity->fences[idx] = fence; in amdgpu_ctx_add_fence()
807 fence = dma_fence_get(centity->fences[seq & (amdgpu_sched_jobs - 1)]); in amdgpu_ctx_get_fence()
869 other = dma_fence_get(centity->fences[idx]); in amdgpu_ctx_wait_prev_fence()
A Damdgpu_cs.c1659 struct drm_amdgpu_fence *fences) in amdgpu_cs_wait_all_fences() argument
1669 fence = amdgpu_cs_get_fence(adev, filp, &fences[i]); in amdgpu_cs_wait_all_fences()
1704 struct drm_amdgpu_fence *fences) in amdgpu_cs_wait_any_fence() argument
1722 fence = amdgpu_cs_get_fence(adev, filp, &fences[i]); in amdgpu_cs_wait_any_fence()
1772 struct drm_amdgpu_fence *fences; in amdgpu_cs_wait_fences_ioctl() local
1778 if (fences == NULL) in amdgpu_cs_wait_fences_ioctl()
1781 fences_user = u64_to_user_ptr(wait->in.fences); in amdgpu_cs_wait_fences_ioctl()
1782 if (copy_from_user(fences, fences_user, in amdgpu_cs_wait_fences_ioctl()
1789 r = amdgpu_cs_wait_all_fences(adev, filp, wait, fences); in amdgpu_cs_wait_fences_ioctl()
1791 r = amdgpu_cs_wait_any_fence(adev, filp, wait, fences); in amdgpu_cs_wait_fences_ioctl()
[all …]
A Damdgpu_debugfs.c1815 struct dma_fence **fences) in amdgpu_ib_preempt_fences_swap() argument
1831 ptr = &drv->fences[last_seq]; in amdgpu_ib_preempt_fences_swap()
1839 fences[last_seq] = fence; in amdgpu_ib_preempt_fences_swap()
1851 fence = fences[i]; in amdgpu_ib_preempt_signal_fences()
1892 ptr = &drv->fences[preempt_seq]; in amdgpu_ib_preempt_mark_partial_job()
1916 struct dma_fence **fences = NULL; in amdgpu_debugfs_ib_preempt() local
1933 fences = kcalloc(length, sizeof(void *), GFP_KERNEL); in amdgpu_debugfs_ib_preempt()
1934 if (!fences) in amdgpu_debugfs_ib_preempt()
1961 amdgpu_ib_preempt_fences_swap(ring, fences); in amdgpu_debugfs_ib_preempt()
1972 amdgpu_ib_preempt_signal_fences(fences, length); in amdgpu_debugfs_ib_preempt()
[all …]
/drivers/gpu/drm/i915/selftests/
A Di915_sw_fence.c453 struct i915_sw_fence **fences; in test_chain() local
457 fences = kmalloc_array(nfences, sizeof(*fences), GFP_KERNEL); in test_chain()
458 if (!fences) in test_chain()
462 fences[i] = alloc_fence(); in test_chain()
463 if (!fences[i]) { in test_chain()
471 fences[i - 1], in test_chain()
478 i915_sw_fence_commit(fences[i]); in test_chain()
484 if (i915_sw_fence_done(fences[i])) { in test_chain()
490 i915_sw_fence_commit(fences[0]); in test_chain()
500 free_fence(fences[i]); in test_chain()
[all …]
/drivers/gpu/drm/
A Ddrm_suballoc.c227 struct dma_fence **fences, in drm_suballoc_next_hole() argument
250 fences[i] = NULL; in drm_suballoc_next_hole()
259 fences[i] = sa->fence; in drm_suballoc_next_hole()
318 struct dma_fence *fences[DRM_SUBALLOC_MAX_QUEUES]; in drm_suballoc_new() local
355 } while (drm_suballoc_next_hole(sa_manager, fences, tries)); in drm_suballoc_new()
358 if (fences[i]) in drm_suballoc_new()
359 fences[count++] = dma_fence_get(fences[i]); in drm_suballoc_new()
365 t = dma_fence_wait_any_timeout(fences, count, intr, in drm_suballoc_new()
369 dma_fence_put(fences[i]); in drm_suballoc_new()
/drivers/gpu/drm/xe/
A Dxe_sync.c284 struct dma_fence **fences = NULL; in xe_sync_in_fence_get() local
306 fences = kmalloc_array(num_in_fence + 1, sizeof(*fences), GFP_KERNEL); in xe_sync_in_fence_get()
307 if (!fences) in xe_sync_in_fence_get()
312 fences[current_fence++] = sync[i].fence; in xe_sync_in_fence_get()
315 fences[current_fence++] = xe_exec_queue_last_fence_get(q, vm); in xe_sync_in_fence_get()
316 cf = dma_fence_array_create(num_in_fence, fences, in xe_sync_in_fence_get()
329 dma_fence_put(fences[--current_fence]); in xe_sync_in_fence_get()
330 kfree(fences); in xe_sync_in_fence_get()
A Dxe_lmtt.c228 struct xe_gt_tlb_invalidation_fence fences[XE_MAX_GT_PER_TILE]; in lmtt_invalidate_hw() local
229 struct xe_gt_tlb_invalidation_fence *fence = fences; in lmtt_invalidate_hw()
243 lmtt_debug(lmtt, "num_fences=%d err=%d\n", (int)(fence - fences), result); in lmtt_invalidate_hw()
250 fence = fences; in lmtt_invalidate_hw()
/drivers/gpu/drm/i915/gem/
A Di915_gem_execbuffer.c2795 kvfree(fences); in __free_fence_array()
2833 eb->fences = f; in add_timeline_fence_array()
2962 eb->fences = f; in add_fence_array()
3008 if (fences) in put_fence_array()
3206 fences = kmalloc_array(eb->num_batches, sizeof(*fences), GFP_KERNEL); in eb_composite_fence_create()
3207 if (!fences) in eb_composite_fence_create()
3217 fences, in eb_composite_fence_create()
3222 kfree(fences); in eb_composite_fence_create()
3269 if (eb->fences) { in eb_fences_add()
3386 eb.fences = NULL; in i915_gem_do_execbuffer()
[all …]
/drivers/gpu/drm/virtio/
A Dvirtgpu_fence.c95 list_add_tail(&fence->node, &drv->fences); in virtio_gpu_fence_emit()
120 list_for_each_entry_safe(curr, tmp, &drv->fences, node) { in virtio_gpu_fence_event_process()
130 list_for_each_entry_safe(curr, tmp, &drv->fences, node) { in virtio_gpu_fence_event_process()
/drivers/gpu/drm/radeon/
A Dradeon_trace.h36 __field(u32, fences)
42 __entry->fences = radeon_fence_count_emitted(
47 __entry->fences)

Completed in 61 milliseconds

123