Home
last modified time | relevance | path

Searched refs:hw_fence (Results 1 – 15 of 15) sorted by relevance

/drivers/gpu/drm/scheduler/tests/
A Dmock_scheduler.c67 dma_fence_signal_locked(&job->hw_fence); in drm_mock_sched_job_complete()
143 container_of(fence, typeof(*job), hw_fence); in drm_mock_sched_hw_fence_timeline_name()
151 container_of(fence, typeof(*job), hw_fence); in drm_mock_sched_hw_fence_release()
170 dma_fence_init(&job->hw_fence, in mock_sched_run_job()
200 return &job->hw_fence; in mock_sched_run_job()
230 dma_fence_set_error(&job->hw_fence, -ETIMEDOUT); in mock_sched_timedout_job()
231 dma_fence_signal_locked(&job->hw_fence); in mock_sched_timedout_job()
235 dma_fence_put(&job->hw_fence); in mock_sched_timedout_job()
246 dma_fence_put(&job->hw_fence); in mock_sched_free_job()
263 dma_fence_set_error(&job->hw_fence, -ECANCELED); in mock_sched_cancel_job()
[all …]
A Dsched_tests.h109 struct dma_fence hw_fence; member
A Dtests_basic.c235 KUNIT_ASSERT_EQ(test, job->hw_fence.error, -ECANCELED); in drm_sched_basic_cancel()
/drivers/gpu/drm/xe/
A Dxe_hw_fence.c223 struct xe_hw_fence *hw_fence = fence_alloc(); in xe_hw_fence_alloc() local
225 if (!hw_fence) in xe_hw_fence_alloc()
228 return &hw_fence->dma; in xe_hw_fence_alloc()
256 struct xe_hw_fence *hw_fence = in xe_hw_fence_init() local
257 container_of(fence, typeof(*hw_fence), dma); in xe_hw_fence_init()
259 hw_fence->xe = gt_to_xe(ctx->gt); in xe_hw_fence_init()
260 snprintf(hw_fence->name, sizeof(hw_fence->name), "%s", ctx->name); in xe_hw_fence_init()
261 hw_fence->seqno_map = seqno_map; in xe_hw_fence_init()
262 INIT_LIST_HEAD(&hw_fence->irq_link); in xe_hw_fence_init()
267 trace_xe_hw_fence_create(hw_fence); in xe_hw_fence_init()
A Dxe_gpu_scheduler.h58 struct dma_fence *hw_fence = s_fence->parent; in xe_sched_resubmit_jobs() local
60 if (hw_fence && !dma_fence_is_signaled(hw_fence)) in xe_sched_resubmit_jobs()
/drivers/gpu/drm/amd/amdgpu/
A Damdgpu_job.c140 r = amdgpu_ring_reset(ring, job->vmid, &job->hw_fence); in amdgpu_job_timedout()
256 else if (job->hw_fence.base.ops) in amdgpu_job_free_resources()
257 f = &job->hw_fence.base; in amdgpu_job_free_resources()
274 if (!job->hw_fence.base.ops) in amdgpu_job_free_cb()
277 dma_fence_put(&job->hw_fence.base); in amdgpu_job_free_cb()
306 if (!job->hw_fence.base.ops) in amdgpu_job_free()
309 dma_fence_put(&job->hw_fence.base); in amdgpu_job_free()
A Damdgpu_fence.c694 job = container_of(old, struct amdgpu_job, hw_fence.base); in amdgpu_fence_driver_clear_job_fences()
836 struct amdgpu_job *job = container_of(f, struct amdgpu_job, hw_fence.base); in amdgpu_job_fence_get_timeline_name()
866 struct amdgpu_job *job = container_of(f, struct amdgpu_job, hw_fence.base); in amdgpu_job_fence_enable_signaling()
901 kfree(container_of(f, struct amdgpu_job, hw_fence.base)); in amdgpu_job_fence_free()
A Damdgpu_job.h51 struct amdgpu_fence hw_fence; member
A Damdgpu_ib.c157 af = &job->hw_fence; in amdgpu_ib_schedule()
A Damdgpu_debugfs.c1905 if (preempted && (&job->hw_fence.base) == fence) in amdgpu_ib_preempt_mark_partial_job()
A Damdgpu_device.c6480 if (job && dma_fence_is_signaled(&job->hw_fence.base)) { in amdgpu_device_gpu_recover()
/drivers/gpu/drm/msm/
A Dmsm_gem_submit.c52 submit->hw_fence = msm_fence_alloc(); in submit_create()
53 if (IS_ERR(submit->hw_fence)) { in submit_create()
54 ret = PTR_ERR(submit->hw_fence); in submit_create()
62 kfree(submit->hw_fence); in submit_create()
115 if (kref_read(&submit->hw_fence->refcount) == 0) { in __msm_gem_submit_destroy()
116 kfree(submit->hw_fence); in __msm_gem_submit_destroy()
118 dma_fence_put(submit->hw_fence); in __msm_gem_submit_destroy()
A Dmsm_ringbuffer.c23 msm_fence_init(submit->hw_fence, fctx); in msm_job_run()
49 return dma_fence_get(submit->hw_fence); in msm_job_run()
A Dmsm_gem.h445 struct dma_fence *hw_fence; member
A Dmsm_gpu.c833 if (submit && dma_fence_is_signaled(submit->hw_fence)) { in retire_submits()
875 submit->seqno = submit->hw_fence->seqno; in msm_gpu_submit()

Completed in 54 milliseconds