Home
last modified time | relevance | path

Searched refs:fences (Results 1 – 25 of 62) sorted by relevance

123

/linux-6.3-rc2/drivers/gpu/drm/i915/
A Di915_deps.c38 if (deps->fences != &deps->single) in i915_deps_reset_fences()
39 kfree(deps->fences); in i915_deps_reset_fences()
42 deps->fences = &deps->single; in i915_deps_reset_fences()
52 deps->fences = NULL; in i915_deps_init()
69 dma_fence_put(deps->fences[i]); in i915_deps_fini()
71 if (deps->fences != &deps->single) in i915_deps_fini()
72 kfree(deps->fences); in i915_deps_fini()
89 memcpy(new_fences, deps->fences, in i915_deps_grow()
91 swap(new_fences, deps->fences); in i915_deps_grow()
134 struct dma_fence **fences = deps->fences; in i915_deps_sync() local
[all …]
/linux-6.3-rc2/drivers/dma-buf/
A Ddma-resv.c263 if (fences) in dma_resv_reset_max_fences()
264 fences->max_fences = fences->num_fences; in dma_resv_reset_max_fences()
364 if (cursor->fences) in dma_resv_iter_restart_unlocked()
372 if (!cursor->fences) in dma_resv_iter_walk_unlocked()
494 if (!cursor->fences || in dma_resv_iter_next()
568 *fences = NULL; in dma_resv_get_fences()
582 *fences = krealloc_array(*fences, count, in dma_resv_get_fences()
585 if (count && !*fences) { in dma_resv_get_fences()
632 *fence = fences[0]; in dma_resv_get_singleton()
633 kfree(fences); in dma_resv_get_singleton()
[all …]
A Ddma-fence-unwrap.c64 struct dma_fence **fences, in __dma_fence_unwrap_merge() argument
74 dma_fence_unwrap_for_each(tmp, &iter[i], fences[i]) in __dma_fence_unwrap_merge()
93 fences[i] = dma_fence_unwrap_first(fences[i], &iter[i]); in __dma_fence_unwrap_merge()
104 while (fences[i] && dma_fence_is_signaled(fences[i])) in __dma_fence_unwrap_merge()
105 fences[i] = dma_fence_unwrap_next(&iter[i]); in __dma_fence_unwrap_merge()
107 next = fences[i]; in __dma_fence_unwrap_merge()
126 fences[i] = dma_fence_unwrap_next(&iter[i]); in __dma_fence_unwrap_merge()
129 fences[sel] = dma_fence_unwrap_next(&iter[sel]); in __dma_fence_unwrap_merge()
136 fences[sel] = dma_fence_unwrap_next(&iter[sel]); in __dma_fence_unwrap_merge()
A Ddma-fence-array.c87 if (dma_fence_add_callback(array->fences[i], &cb[i].cb, in dma_fence_array_enable_signaling()
89 int error = array->fences[i]->error; in dma_fence_array_enable_signaling()
120 dma_fence_put(array->fences[i]); in dma_fence_array_release()
122 kfree(array->fences); in dma_fence_array_release()
155 struct dma_fence **fences, in dma_fence_array_create() argument
162 WARN_ON(!num_fences || !fences); in dma_fence_array_create()
177 array->fences = fences; in dma_fence_array_create()
193 WARN_ON(dma_fence_is_container(fences[num_fences])); in dma_fence_array_create()
217 if (array->fences[i]->context != context) in dma_fence_match_context()
239 return array->fences[0]; in dma_fence_array_first()
[all …]
A Dst-dma-fence-chain.c102 struct dma_fence **fences; member
124 fc->fences = kvmalloc_array(count, sizeof(*fc->fences), in fence_chains_init()
126 if (!fc->fences) { in fence_chains_init()
133 fc->fences[i] = mock_fence(); in fence_chains_init()
134 if (!fc->fences[i]) { in fence_chains_init()
140 fc->fences[i], in fence_chains_init()
157 dma_fence_put(fc->fences[i]); in fence_chains_init()
160 kvfree(fc->fences); in fence_chains_init()
172 dma_fence_put(fc->fences[i]); in fence_chains_fini()
174 kvfree(fc->fences); in fence_chains_fini()
[all …]
A Dst-dma-fence-unwrap.c49 struct dma_fence **fences; in mock_array() local
53 fences = kcalloc(num_fences, sizeof(*fences), GFP_KERNEL); in mock_array()
54 if (!fences) in mock_array()
59 fences[i] = va_arg(valist, typeof(*fences)); in mock_array()
62 array = dma_fence_array_create(num_fences, fences, in mock_array()
70 kfree(fences); in mock_array()
75 dma_fence_put(va_arg(valist, typeof(*fences))); in mock_array()
A Dst-dma-resv.c228 cursor.fences = (void*)~0; in test_for_each_unlocked()
247 struct dma_fence *f, **fences = NULL; in test_get_fences() local
274 r = dma_resv_get_fences(&resv, usage, &i, &fences); in test_get_fences()
280 if (i != 1 || fences[0] != f) { in test_get_fences()
288 dma_fence_put(fences[i]); in test_get_fences()
289 kfree(fences); in test_get_fences()
A Ddma-fence.c810 dma_fence_test_signaled_any(struct dma_fence **fences, uint32_t count, in dma_fence_test_signaled_any() argument
816 struct dma_fence *fence = fences[i]; in dma_fence_test_signaled_any()
847 dma_fence_wait_any_timeout(struct dma_fence **fences, uint32_t count, in dma_fence_wait_any_timeout() argument
854 if (WARN_ON(!fences || !count || timeout < 0)) in dma_fence_wait_any_timeout()
859 if (dma_fence_is_signaled(fences[i])) { in dma_fence_wait_any_timeout()
875 struct dma_fence *fence = fences[i]; in dma_fence_wait_any_timeout()
893 if (dma_fence_test_signaled_any(fences, count, idx)) in dma_fence_wait_any_timeout()
906 dma_fence_remove_callback(fences[i], &cb[i].base); in dma_fence_wait_any_timeout()
A Dst-dma-fence.c446 struct dma_fence __rcu **fences; member
477 rcu_assign_pointer(t->fences[t->id], f1); in thread_signal_callback()
482 f2 = dma_fence_get_rcu_safe(&t->fences[!t->id]); in thread_signal_callback()
514 rcu_assign_pointer(t->fences[t->id], NULL); in thread_signal_callback()
538 t[i].fences = f; in race_signal_callback()
/linux-6.3-rc2/drivers/gpu/host1x/
A Dintr.c35 if (!list_empty(&sp->fences.list)) { in host1x_intr_update_hw_state()
36 fence = list_first_entry(&sp->fences.list, struct host1x_syncpt_fence, list); in host1x_intr_update_hw_state()
47 struct host1x_fence_list *fence_list = &fence->sp->fences; in host1x_intr_add_fence_locked()
57 struct host1x_fence_list *fence_list = &fence->sp->fences; in host1x_intr_remove_fence()
83 spin_lock(&sp->fences.lock); in host1x_intr_handle_interrupt()
85 list_for_each_entry_safe(fence, tmp, &sp->fences.list, list) { in host1x_intr_handle_interrupt()
98 spin_unlock(&sp->fences.lock); in host1x_intr_handle_interrupt()
110 spin_lock_init(&syncpt->fences.lock); in host1x_intr_init()
111 INIT_LIST_HEAD(&syncpt->fences.list); in host1x_intr_init()
A Ddebug.c96 spin_lock_irqsave(&m->syncpt[i].fences.lock, irqflags); in show_syncpts()
97 list_for_each(pos, &m->syncpt[i].fences.list) in show_syncpts()
99 spin_unlock_irqrestore(&m->syncpt[i].fences.lock, irqflags); in show_syncpts()
/linux-6.3-rc2/Documentation/driver-api/
A Dsync_file.rst9 the fences(struct dma_fence) that are needed to synchronize between drivers or
29 in-fences and out-fences
33 the driver to userspace we call the fences it contains 'out-fences'. They are
37 Out-fences are fences that the driver creates.
40 userspace we call these fence(s) 'in-fences'. Receiving in-fences means that
42 the in-fences.
72 of the Sync File to the kernel. The kernel can then retrieve the fences
A Ddma-buf.rst153 :doc: DMA fences overview
228 batch DMA fences for memory management instead of context preemption DMA
229 fences which get reattached when the compute job is rescheduled.
232 fences and controls when they fire. Mixing indefinite fences with normal
239 * Only userspace knows about all dependencies in indefinite fences and when
244 dependent upon DMA fences. If the kernel also support indefinite fences in the
255 userspace [label="userspace controlled fences"]
270 fences in the kernel. This means:
272 * No future fences, proxy fences or userspace fences imported as DMA fences,
284 implications for DMA fences.
[all …]
/linux-6.3-rc2/drivers/gpu/drm/amd/amdgpu/
A Damdgpu_sync.c54 hash_init(sync->fences); in amdgpu_sync_create()
137 hash_for_each_possible(sync->fences, e, node, f->context) { in amdgpu_sync_add_later()
169 hash_add(sync->fences, &e->node, f->context); in amdgpu_sync_fence()
286 hash_for_each_safe(sync->fences, i, tmp, e, node) { in amdgpu_sync_peek_fence()
326 hash_for_each_safe(sync->fences, i, tmp, e, node) { in amdgpu_sync_get_fence()
357 hash_for_each_safe(source->fences, i, tmp, e, node) { in amdgpu_sync_clone()
385 hash_for_each_safe(sync->fences, i, tmp, e, node) { in amdgpu_sync_push_to_job()
408 hash_for_each_safe(sync->fences, i, tmp, e, node) { in amdgpu_sync_wait()
432 hash_for_each_safe(sync->fences, i, tmp, e, node) in amdgpu_sync_free()
A Damdgpu_sa.c207 struct dma_fence **fences, in amdgpu_sa_bo_next_hole() argument
229 fences[i] = NULL; in amdgpu_sa_bo_next_hole()
238 fences[i] = sa_bo->fence; in amdgpu_sa_bo_next_hole()
279 struct dma_fence *fences[AMDGPU_SA_NUM_FENCE_LISTS]; in amdgpu_sa_bo_new() local
314 } while (amdgpu_sa_bo_next_hole(sa_manager, fences, tries)); in amdgpu_sa_bo_new()
317 if (fences[i]) in amdgpu_sa_bo_new()
318 fences[count++] = dma_fence_get(fences[i]); in amdgpu_sa_bo_new()
322 t = dma_fence_wait_any_timeout(fences, count, false, in amdgpu_sa_bo_new()
326 dma_fence_put(fences[i]); in amdgpu_sa_bo_new()
A Damdgpu_ids.c207 struct dma_fence **fences; in amdgpu_vmid_grab_idle() local
215 fences = kmalloc_array(id_mgr->num_ids, sizeof(void *), GFP_KERNEL); in amdgpu_vmid_grab_idle()
216 if (!fences) in amdgpu_vmid_grab_idle()
226 fences[i] = amdgpu_sync_peek_fence(&(*idle)->active, r); in amdgpu_vmid_grab_idle()
227 if (!fences[i]) in amdgpu_vmid_grab_idle()
241 dma_fence_get(fences[j]); in amdgpu_vmid_grab_idle()
243 array = dma_fence_array_create(i, fences, fence_context, in amdgpu_vmid_grab_idle()
247 dma_fence_put(fences[j]); in amdgpu_vmid_grab_idle()
248 kfree(fences); in amdgpu_vmid_grab_idle()
257 kfree(fences); in amdgpu_vmid_grab_idle()
A Damdgpu_fence.c187 ptr = &ring->fence_drv.fences[seq & ring->fence_drv.num_fences_mask]; in amdgpu_fence_emit()
301 ptr = &drv->fences[last_seq]; in amdgpu_fence_process()
352 ptr = &ring->fence_drv.fences[seq & ring->fence_drv.num_fences_mask]; in amdgpu_fence_wait_empty()
432 fence = drv->fences[last_seq]; in amdgpu_fence_last_unsignaled_time_us()
456 fence = drv->fences[seq]; in amdgpu_fence_update_start_timestamp()
532 ring->fence_drv.fences = kcalloc(ring->num_hw_submission * 2, sizeof(void *), in amdgpu_fence_driver_init_ring()
535 if (!ring->fence_drv.fences) in amdgpu_fence_driver_init_ring()
631 dma_fence_put(ring->fence_drv.fences[j]); in amdgpu_fence_driver_sw_fini()
632 kfree(ring->fence_drv.fences); in amdgpu_fence_driver_sw_fini()
633 ring->fence_drv.fences = NULL; in amdgpu_fence_driver_sw_fini()
[all …]
A Damdgpu_jpeg.c78 unsigned int fences = 0; in amdgpu_jpeg_idle_work_handler() local
85 fences += amdgpu_fence_count_emitted(&adev->jpeg.inst[i].ring_dec); in amdgpu_jpeg_idle_work_handler()
88 if (!fences && !atomic_read(&adev->jpeg.total_submission_cnt)) in amdgpu_jpeg_idle_work_handler()
/linux-6.3-rc2/drivers/gpu/drm/i915/selftests/
A Di915_sw_fence.c453 struct i915_sw_fence **fences; in test_chain() local
457 fences = kmalloc_array(nfences, sizeof(*fences), GFP_KERNEL); in test_chain()
458 if (!fences) in test_chain()
462 fences[i] = alloc_fence(); in test_chain()
463 if (!fences[i]) { in test_chain()
471 fences[i - 1], in test_chain()
478 i915_sw_fence_commit(fences[i]); in test_chain()
484 if (i915_sw_fence_done(fences[i])) { in test_chain()
490 i915_sw_fence_commit(fences[0]); in test_chain()
500 free_fence(fences[i]); in test_chain()
[all …]
/linux-6.3-rc2/drivers/gpu/drm/radeon/
A Dradeon_sa.c248 struct radeon_fence **fences, in radeon_sa_bo_next_hole() argument
270 fences[i] = NULL; in radeon_sa_bo_next_hole()
280 fences[i] = sa_bo->fence; in radeon_sa_bo_next_hole()
319 struct radeon_fence *fences[RADEON_NUM_RINGS]; in radeon_sa_bo_new() local
350 } while (radeon_sa_bo_next_hole(sa_manager, fences, tries)); in radeon_sa_bo_new()
353 radeon_fence_ref(fences[i]); in radeon_sa_bo_new()
356 r = radeon_fence_wait_any(rdev, fences, false); in radeon_sa_bo_new()
358 radeon_fence_unref(&fences[i]); in radeon_sa_bo_new()
A Dradeon_trace.h36 __field(u32, fences)
42 __entry->fences = radeon_fence_count_emitted(
47 __entry->fences)
/linux-6.3-rc2/include/linux/
A Ddma-fence-array.h43 struct dma_fence **fences; member
80 struct dma_fence **fences,
A Ddma-resv.h178 struct dma_resv_list __rcu *fences; member
210 struct dma_resv_list *fences; member
476 unsigned int *num_fences, struct dma_fence ***fences);
/linux-6.3-rc2/drivers/gpu/drm/i915/gem/
A Di915_gem_execbuffer.c2782 kvfree(fences); in __free_fence_array()
2820 eb->fences = f; in add_timeline_fence_array()
2949 eb->fences = f; in add_fence_array()
2995 if (fences) in put_fence_array()
3193 fences = kmalloc_array(eb->num_batches, sizeof(*fences), GFP_KERNEL); in eb_composite_fence_create()
3194 if (!fences) in eb_composite_fence_create()
3204 fences, in eb_composite_fence_create()
3209 kfree(fences); in eb_composite_fence_create()
3256 if (eb->fences) { in eb_fences_add()
3373 eb.fences = NULL; in i915_gem_do_execbuffer()
[all …]
/linux-6.3-rc2/drivers/gpu/drm/virtio/
A Dvirtgpu_fence.c111 list_add_tail(&fence->node, &drv->fences); in virtio_gpu_fence_emit()
136 list_for_each_entry_safe(curr, tmp, &drv->fences, node) { in virtio_gpu_fence_event_process()
146 list_for_each_entry_safe(curr, tmp, &drv->fences, node) { in virtio_gpu_fence_event_process()

Completed in 48 milliseconds

123