/linux-6.3-rc2/drivers/gpu/drm/scheduler/ |
A D | sched_main.c | 262 struct drm_sched_fence *s_fence = s_job->s_fence; in drm_sched_job_done() local 270 dma_fence_get(&s_fence->finished); in drm_sched_job_done() 271 drm_sched_fence_finished(s_fence); in drm_sched_job_done() 272 dma_fence_put(&s_fence->finished); in drm_sched_job_done() 460 if (s_job->s_fence->parent && in drm_sched_stop() 464 s_job->s_fence->parent = NULL; in drm_sched_stop() 578 struct drm_sched_fence *s_fence = s_job->s_fence; in drm_sched_resubmit_jobs() local 594 s_job->s_fence->parent = NULL; in drm_sched_resubmit_jobs() 634 if (!job->s_fence) in drm_sched_job_init() 806 job->s_fence = NULL; in drm_sched_job_cleanup() [all …]
|
A D | sched_entity.c | 147 drm_sched_fence_finished(job->s_fence); in drm_sched_entity_kill_jobs_work() 148 WARN_ON(job->s_fence->parent); in drm_sched_entity_kill_jobs_work() 196 struct drm_sched_fence *s_fence = job->s_fence; in drm_sched_entity_kill() local 198 dma_fence_set_error(&s_fence->finished, -ESRCH); in drm_sched_entity_kill() 200 dma_fence_get(&s_fence->finished); in drm_sched_entity_kill() 205 prev = &s_fence->finished; in drm_sched_entity_kill() 352 struct drm_sched_fence *s_fence; in drm_sched_entity_add_dependency_cb() local 365 s_fence = to_drm_sched_fence(fence); in drm_sched_entity_add_dependency_cb() 366 if (s_fence && s_fence->sched == sched && in drm_sched_entity_add_dependency_cb() 373 fence = dma_fence_get(&s_fence->scheduled); in drm_sched_entity_add_dependency_cb() [all …]
|
A D | gpu_scheduler_trace.h | 50 __entry->fence = &sched_job->s_fence->finished;
|
/linux-6.3-rc2/drivers/gpu/drm/amd/amdgpu/ |
A D | amdgpu_job.c | 165 if (job->base.s_fence && job->base.s_fence->finished.ops) in amdgpu_job_free_resources() 166 f = &job->base.s_fence->finished; in amdgpu_job_free_resources() 214 if (job->gang_submit != &job->base.s_fence->scheduled) in amdgpu_job_free() 228 f = dma_fence_get(&job->base.s_fence->finished); in amdgpu_job_submit() 280 finished = &job->base.s_fence->finished; in amdgpu_job_run() 320 struct drm_sched_fence *s_fence = s_job->s_fence; in amdgpu_job_stop_all_jobs_on_sched() local 322 dma_fence_signal(&s_fence->scheduled); in amdgpu_job_stop_all_jobs_on_sched() 324 dma_fence_signal(&s_fence->finished); in amdgpu_job_stop_all_jobs_on_sched() 332 struct drm_sched_fence *s_fence = s_job->s_fence; in amdgpu_job_stop_all_jobs_on_sched() local 334 dma_fence_set_error(&s_fence->finished, -EHWPOISON); in amdgpu_job_stop_all_jobs_on_sched() [all …]
|
A D | amdgpu_sync.c | 68 struct drm_sched_fence *s_fence = to_drm_sched_fence(f); in amdgpu_sync_same_dev() local 70 if (s_fence) { in amdgpu_sync_same_dev() 89 struct drm_sched_fence *s_fence; in amdgpu_sync_get_owner() local 95 s_fence = to_drm_sched_fence(f); in amdgpu_sync_get_owner() 96 if (s_fence) in amdgpu_sync_get_owner() 97 return s_fence->owner; in amdgpu_sync_get_owner() 288 struct drm_sched_fence *s_fence = to_drm_sched_fence(f); in amdgpu_sync_peek_fence() local 294 if (ring && s_fence) { in amdgpu_sync_peek_fence() 298 if (s_fence->sched == &ring->sched) { in amdgpu_sync_peek_fence() 299 if (dma_fence_is_signaled(&s_fence->scheduled)) in amdgpu_sync_peek_fence() [all …]
|
A D | amdgpu_trace.h | 36 job->base.s_fence->finished.ops->get_timeline_name(&job->base.s_fence->finished) 182 __entry->context = job->base.s_fence->finished.context; 183 __entry->seqno = job->base.s_fence->finished.seqno; 207 __entry->context = job->base.s_fence->finished.context; 208 __entry->seqno = job->base.s_fence->finished.seqno;
|
A D | amdgpu_ctx.c | 169 struct drm_sched_fence *s_fence; in amdgpu_ctx_fence_time() local 175 s_fence = to_drm_sched_fence(fence); in amdgpu_ctx_fence_time() 176 if (!test_bit(DMA_FENCE_FLAG_TIMESTAMP_BIT, &s_fence->scheduled.flags)) in amdgpu_ctx_fence_time() 180 if (!test_bit(DMA_FENCE_FLAG_TIMESTAMP_BIT, &s_fence->finished.flags)) in amdgpu_ctx_fence_time() 181 return ktime_sub(ktime_get(), s_fence->scheduled.timestamp); in amdgpu_ctx_fence_time() 183 return ktime_sub(s_fence->finished.timestamp, in amdgpu_ctx_fence_time() 184 s_fence->scheduled.timestamp); in amdgpu_ctx_fence_time()
|
A D | amdgpu_ids.c | 314 r = amdgpu_sync_fence(&(*id)->active, &job->base.s_fence->finished); in amdgpu_vmid_grab_reserved() 375 &job->base.s_fence->finished); in amdgpu_vmid_grab_used() 427 &job->base.s_fence->finished); in amdgpu_vmid_grab()
|
A D | amdgpu_cs.c | 428 struct drm_sched_fence *s_fence; in amdgpu_cs_p2_dependencies() local 431 s_fence = to_drm_sched_fence(fence); in amdgpu_cs_p2_dependencies() 432 fence = dma_fence_get(&s_fence->scheduled); in amdgpu_cs_p2_dependencies() 1217 struct drm_sched_fence *s_fence = to_drm_sched_fence(fence); in amdgpu_cs_sync_rings() local 1225 if (!s_fence || s_fence->sched != sched) { in amdgpu_cs_sync_rings() 1274 fence = &p->jobs[i]->base.s_fence->scheduled; in amdgpu_cs_submit() 1309 p->fence = dma_fence_get(&leader->base.s_fence->finished); in amdgpu_cs_submit() 1318 &p->jobs[i]->base.s_fence->finished, in amdgpu_cs_submit()
|
A D | amdgpu_ib.c | 151 fence_ctx = job->base.s_fence ? in amdgpu_ib_schedule() 152 job->base.s_fence->scheduled.context : 0; in amdgpu_ib_schedule()
|
A D | amdgpu_debugfs.c | 1704 if (dma_fence_is_signaled(&s_job->s_fence->finished)) { in amdgpu_ib_preempt_mark_partial_job()
|
/linux-6.3-rc2/drivers/gpu/drm/lima/ |
A D | lima_trace.h | 25 __entry->context = task->base.s_fence->finished.context; 26 __entry->seqno = task->base.s_fence->finished.seqno;
|
A D | lima_sched.c | 173 struct dma_fence *fence = dma_fence_get(&task->base.s_fence->finished); in lima_sched_context_queue_task() 211 if (job->s_fence->finished.error < 0) in lima_sched_run_job()
|
/linux-6.3-rc2/drivers/gpu/drm/etnaviv/ |
A D | etnaviv_sched.c | 25 if (likely(!sched_job->s_fence->finished.error)) in etnaviv_sched_run_job() 112 submit->out_fence = dma_fence_get(&submit->sched_job.s_fence->finished); in etnaviv_sched_push_job()
|
/linux-6.3-rc2/drivers/gpu/drm/v3d/ |
A D | v3d_sched.c | 83 if (unlikely(job->base.base.s_fence->finished.error)) in v3d_bin_job_run() 137 if (unlikely(job->base.base.s_fence->finished.error)) in v3d_render_job_run()
|
A D | v3d_gem.c | 479 job->done_fence = dma_fence_get(&job->base.s_fence->finished); in v3d_push_job()
|
/linux-6.3-rc2/include/drm/ |
A D | gpu_scheduler.h | 331 struct drm_sched_fence *s_fence; member
|
/linux-6.3-rc2/drivers/gpu/drm/panfrost/ |
A D | panfrost_job.c | 289 job->render_done_fence = dma_fence_get(&job->base.s_fence->finished); in panfrost_job_push() 364 if (unlikely(job->base.s_fence->finished.error)) in panfrost_job_run()
|
/linux-6.3-rc2/drivers/gpu/drm/msm/ |
A D | msm_gem_submit.c | 894 submit->user_fence = dma_fence_get(&submit->base.s_fence->finished); in msm_ioctl_gem_submit()
|