Home
last modified time | relevance | path

Searched refs:sched_job (Results 1 – 19 of 19) sorted by relevance

/linux/drivers/gpu/drm/etnaviv/
A Detnaviv_sched.c22 struct etnaviv_gem_submit *submit = to_etnaviv_submit(sched_job); in etnaviv_sched_run_job()
25 if (likely(!sched_job->s_fence->finished.error)) in etnaviv_sched_run_job()
34 *sched_job) in etnaviv_sched_timedout_job()
64 drm_sched_stop(&gpu->sched, sched_job); in etnaviv_sched_timedout_job()
66 if(sched_job) in etnaviv_sched_timedout_job()
67 drm_sched_increase_karma(sched_job); in etnaviv_sched_timedout_job()
79 list_add(&sched_job->list, &sched_job->sched->pending_list); in etnaviv_sched_timedout_job()
87 drm_sched_job_cleanup(sched_job); in etnaviv_sched_free_job()
110 drm_sched_job_arm(&submit->sched_job); in etnaviv_sched_push_job()
117 drm_sched_job_cleanup(&submit->sched_job); in etnaviv_sched_push_job()
[all …]
A Detnaviv_sched.h14 struct etnaviv_gem_submit *to_etnaviv_submit(struct drm_sched_job *sched_job) in to_etnaviv_submit() argument
16 return container_of(sched_job, struct etnaviv_gem_submit, sched_job); in to_etnaviv_submit()
A Detnaviv_gem_submit.c189 ret = drm_sched_job_add_implicit_dependencies(&submit->sched_job, in submit_fence_sync()
536 ret = drm_sched_job_init(&submit->sched_job, in etnaviv_ioctl_gem_submit()
560 ret = drm_sched_job_add_dependency(&submit->sched_job, in etnaviv_ioctl_gem_submit()
620 drm_sched_job_cleanup(&submit->sched_job); in etnaviv_ioctl_gem_submit()
A Detnaviv_gem.h90 struct drm_sched_job sched_job; member
/linux/drivers/gpu/drm/scheduler/
A Dgpu_scheduler_trace.h37 TP_ARGS(sched_job, entity),
41 __string(name, sched_job->sched->name)
49 __entry->id = sched_job->id;
50 __entry->fence = &sched_job->s_fence->finished;
54 &sched_job->sched->credit_count);
64 TP_ARGS(sched_job, entity)
69 TP_ARGS(sched_job, entity)
86 TP_PROTO(struct drm_sched_job *sched_job, struct dma_fence *fence),
87 TP_ARGS(sched_job, fence),
89 __string(name, sched_job->sched->name)
[all …]
A Dsched_entity.c33 #define to_drm_sched_job(sched_job) \ argument
34 container_of((sched_job), struct drm_sched_job, queue_node)
478 struct drm_sched_job *sched_job; in drm_sched_entity_pop_job() local
481 if (!sched_job) in drm_sched_entity_pop_job()
485 drm_sched_job_dependency(sched_job, entity))) { in drm_sched_entity_pop_job()
498 dma_fence_get(&sched_job->s_fence->finished)); in drm_sched_entity_pop_job()
525 sched_job->entity = NULL; in drm_sched_entity_pop_job()
527 return sched_job; in drm_sched_entity_pop_job()
584 struct drm_sched_entity *entity = sched_job->entity; in drm_sched_entity_push_job()
588 trace_drm_sched_job(sched_job, entity); in drm_sched_entity_push_job()
[all …]
A Dsched_main.c96 #define to_drm_sched_job(sched_job) \ argument
1175 struct drm_sched_job *sched_job; in drm_sched_run_job_work() local
1186 sched_job = drm_sched_entity_pop_job(entity); in drm_sched_run_job_work()
1187 if (!sched_job) { in drm_sched_run_job_work()
1193 s_fence = sched_job->s_fence; in drm_sched_run_job_work()
1196 drm_sched_job_begin(sched_job); in drm_sched_run_job_work()
1198 trace_drm_run_job(sched_job, entity); in drm_sched_run_job_work()
1199 fence = sched->ops->run_job(sched_job); in drm_sched_run_job_work()
1207 r = dma_fence_add_callback(fence, &sched_job->cb, in drm_sched_run_job_work()
1210 drm_sched_job_done(sched_job, fence->error); in drm_sched_run_job_work()
[all …]
/linux/drivers/gpu/drm/v3d/
A Dv3d_sched.c33 to_v3d_job(struct drm_sched_job *sched_job) in to_v3d_job() argument
39 to_bin_job(struct drm_sched_job *sched_job) in to_bin_job() argument
45 to_render_job(struct drm_sched_job *sched_job) in to_render_job() argument
51 to_tfu_job(struct drm_sched_job *sched_job) in to_tfu_job() argument
57 to_csd_job(struct drm_sched_job *sched_job) in to_csd_job() argument
63 to_cpu_job(struct drm_sched_job *sched_job) in to_cpu_job() argument
71 struct v3d_job *job = to_v3d_job(sched_job); in v3d_sched_job_free()
636 struct v3d_job *job = to_v3d_job(sched_job); in v3d_cache_clean_job_run()
659 if (sched_job) in v3d_gpu_reset_for_timeout()
660 drm_sched_increase_karma(sched_job); in v3d_gpu_reset_for_timeout()
[all …]
/linux/include/drm/
A Dgpu_scheduler.h408 struct dma_fence *(*prepare_job)(struct drm_sched_job *sched_job,
417 struct dma_fence *(*run_job)(struct drm_sched_job *sched_job);
461 enum drm_gpu_sched_stat (*timedout_job)(struct drm_sched_job *sched_job);
467 void (*free_job)(struct drm_sched_job *sched_job);
480 u32 (*update_job_credits)(struct drm_sched_job *sched_job);
608 void drm_sched_entity_push_job(struct drm_sched_job *sched_job);
/linux/drivers/gpu/drm/nouveau/
A Dnouveau_sched.c361 nouveau_sched_run_job(struct drm_sched_job *sched_job) in nouveau_sched_run_job() argument
363 struct nouveau_job *job = to_nouveau_job(sched_job); in nouveau_sched_run_job()
369 nouveau_sched_timedout_job(struct drm_sched_job *sched_job) in nouveau_sched_timedout_job() argument
371 struct drm_gpu_scheduler *sched = sched_job->sched; in nouveau_sched_timedout_job()
372 struct nouveau_job *job = to_nouveau_job(sched_job); in nouveau_sched_timedout_job()
375 drm_sched_stop(sched, sched_job); in nouveau_sched_timedout_job()
388 nouveau_sched_free_job(struct drm_sched_job *sched_job) in nouveau_sched_free_job() argument
390 struct nouveau_job *job = to_nouveau_job(sched_job); in nouveau_sched_free_job()
A Dnouveau_sched.h13 #define to_nouveau_job(sched_job) \ argument
14 container_of((sched_job), struct nouveau_job, base)
/linux/drivers/gpu/drm/amd/amdgpu/
A Damdgpu_job.c347 amdgpu_job_prepare_job(struct drm_sched_job *sched_job, in amdgpu_job_prepare_job() argument
351 struct amdgpu_job *job = to_amdgpu_job(sched_job); in amdgpu_job_prepare_job()
377 static struct dma_fence *amdgpu_job_run(struct drm_sched_job *sched_job) in amdgpu_job_run() argument
379 struct amdgpu_ring *ring = to_amdgpu_ring(sched_job->sched); in amdgpu_job_run()
385 job = to_amdgpu_job(sched_job); in amdgpu_job_run()
414 #define to_drm_sched_job(sched_job) \ argument
415 container_of((sched_job), struct drm_sched_job, queue_node)
A Damdgpu_job.h39 #define to_amdgpu_job(sched_job) \ argument
40 container_of((sched_job), struct amdgpu_job, base)
A Damdgpu_trace.h518 TP_PROTO(struct amdgpu_job *sched_job, struct dma_fence *fence),
519 TP_ARGS(sched_job, fence),
521 __string(ring, sched_job->base.sched->name)
530 __entry->id = sched_job->base.id;
/linux/drivers/gpu/drm/panfrost/
A Dpanfrost_job.c43 to_panfrost_job(struct drm_sched_job *sched_job) in to_panfrost_job() argument
45 return container_of(sched_job, struct panfrost_job, base); in to_panfrost_job()
365 static void panfrost_job_free(struct drm_sched_job *sched_job) in panfrost_job_free() argument
367 struct panfrost_job *job = to_panfrost_job(sched_job); in panfrost_job_free()
369 drm_sched_job_cleanup(sched_job); in panfrost_job_free()
374 static struct dma_fence *panfrost_job_run(struct drm_sched_job *sched_job) in panfrost_job_run() argument
376 struct panfrost_job *job = to_panfrost_job(sched_job); in panfrost_job_run()
741 *sched_job) in panfrost_job_timedout()
743 struct panfrost_job *job = to_panfrost_job(sched_job); in panfrost_job_timedout()
776 sched_job); in panfrost_job_timedout()
[all …]
/linux/drivers/gpu/drm/panthor/
A Dpanthor_mmu.c2180 panthor_vm_bind_run_job(struct drm_sched_job *sched_job) in panthor_vm_bind_run_job() argument
2214 void panthor_vm_bind_job_put(struct drm_sched_job *sched_job) in panthor_vm_bind_job_put() argument
2217 container_of(sched_job, struct panthor_vm_bind_job, base); in panthor_vm_bind_job_put()
2219 if (sched_job) in panthor_vm_bind_job_put()
2224 panthor_vm_bind_free_job(struct drm_sched_job *sched_job) in panthor_vm_bind_free_job() argument
2227 container_of(sched_job, struct panthor_vm_bind_job, base); in panthor_vm_bind_free_job()
2229 drm_sched_job_cleanup(sched_job); in panthor_vm_bind_free_job()
2238 panthor_vm_bind_timedout_job(struct drm_sched_job *sched_job) in panthor_vm_bind_timedout_job() argument
2499 struct drm_sched_job *sched_job) in panthor_vm_bind_job_prepare_resvs() argument
2525 struct drm_sched_job *sched_job) in panthor_vm_bind_job_update_resvs() argument
[all …]
A Dpanthor_sched.c2833 queue_run_job(struct drm_sched_job *sched_job) in queue_run_job() argument
2974 queue_timedout_job(struct drm_sched_job *sched_job) in queue_timedout_job() argument
3008 static void queue_free_job(struct drm_sched_job *sched_job) in queue_free_job() argument
3010 drm_sched_job_cleanup(sched_job); in queue_free_job()
3011 panthor_job_put(sched_job); in queue_free_job()
3350 struct drm_sched_job *panthor_job_get(struct drm_sched_job *sched_job) in panthor_job_get() argument
3352 if (sched_job) { in panthor_job_get()
3358 return sched_job; in panthor_job_get()
3361 void panthor_job_put(struct drm_sched_job *sched_job) in panthor_job_put() argument
3365 if (sched_job) in panthor_job_put()
[all …]
A Dpanthor_sched.h33 struct panthor_vm *panthor_job_vm(struct drm_sched_job *sched_job);
/linux/drivers/gpu/drm/imagination/
A Dpvr_queue.c495 pvr_queue_prepare_job(struct drm_sched_job *sched_job, in pvr_queue_prepare_job() argument
498 struct pvr_job *job = container_of(sched_job, struct pvr_job, base); in pvr_queue_prepare_job()
686 static struct dma_fence *pvr_queue_run_job(struct drm_sched_job *sched_job) in pvr_queue_run_job() argument
688 struct pvr_job *job = container_of(sched_job, struct pvr_job, base); in pvr_queue_run_job()
854 static void pvr_queue_free_job(struct drm_sched_job *sched_job) in pvr_queue_free_job() argument
856 struct pvr_job *job = container_of(sched_job, struct pvr_job, base); in pvr_queue_free_job()
858 drm_sched_job_cleanup(sched_job); in pvr_queue_free_job()

Completed in 53 milliseconds