Home
last modified time | relevance | path

Searched refs:job (Results 1 – 25 of 366) sorted by relevance

12345678910>>...15

/linux/drivers/gpu/drm/nouveau/
A Dnouveau_sched.c45 if (job->sync) in nouveau_job_init()
58 if (job->sync) { in nouveau_job_init()
72 job->out_sync.objs = kcalloc(job->out_sync.count, in nouveau_job_init()
80 job->out_sync.chains = kcalloc(job->out_sync.count, in nouveau_job_init()
115 job->ops->free(job); in nouveau_job_fini()
302 ret = job->ops->submit(job, &vm_exec); in nouveau_job_submit()
309 list_add(&job->entry, &sched->job.list.head); in nouveau_job_submit()
314 if (job->sync) in nouveau_job_submit()
318 job->ops->armed_submit(job, &vm_exec); in nouveau_job_submit()
351 fence = job->ops->run(job); in nouveau_job_run()
[all …]
A Dnouveau_exec.c122 job->resv_usage, job->resv_usage); in nouveau_exec_job_armed_submit()
168 nouveau_job_done(job); in nouveau_exec_job_free()
169 nouveau_job_free(job); in nouveau_exec_job_free()
203 struct nouveau_exec_job *job; in nouveau_exec_job_init() local
218 job = *pjob = kzalloc(sizeof(*job), GFP_KERNEL); in nouveau_exec_job_init()
219 if (!job) in nouveau_exec_job_init()
228 if (!job->push.s) { in nouveau_exec_job_init()
235 job->chan = __args->chan; in nouveau_exec_job_init()
257 kfree(job->push.s); in nouveau_exec_job_init()
259 kfree(job); in nouveau_exec_job_init()
[all …]
/linux/drivers/gpu/host1x/
A Djob.c52 if (!job) in host1x_job_alloc()
70 job->reloc_addr_phys = job->addr_phys; in host1x_job_alloc()
71 job->gather_addr_phys = &job->addr_phys[num_relocs]; in host1x_job_alloc()
73 return job; in host1x_job_alloc()
80 return job; in host1x_job_get()
89 job->release(job); in job_free()
200 job->addr_phys[job->num_unpins] = map->phys; in pin_job()
201 job->unpins[job->num_unpins].map = map; in pin_job()
263 job->addr_phys[job->num_unpins] = map->phys; in pin_job()
264 job->unpins[job->num_unpins].map = map; in pin_job()
[all …]
A Dcdma.c326 if (job->timeout) in update_cdma_locked()
340 if (job->num_slots) { in update_cdma_locked()
349 list_del(&job->list); in update_cdma_locked()
350 host1x_job_put(job); in update_cdma_locked()
398 job = NULL; in host1x_cdma_update_sync_queue()
413 if (!job) in host1x_cdma_update_sync_queue()
422 job->timeout = 0; in host1x_cdma_update_sync_queue()
432 job->num_slots); in host1x_cdma_update_sync_queue()
470 unsigned int next_job = (job->first_get/8 + job->num_slots) in host1x_cdma_update_sync_queue()
564 if (job->timeout) { in host1x_cdma_begin()
[all …]
/linux/drivers/gpu/drm/xe/
A Dxe_sched_job.c100 struct xe_sched_job *job; in xe_sched_job_create() local
109 if (!job) in xe_sched_job_create()
112 job->q = q; in xe_sched_job_create()
113 kref_init(&job->refcount); in xe_sched_job_create()
114 xe_exec_queue_get(job->q); in xe_sched_job_create()
150 return job; in xe_sched_job_create()
157 job_free(job); in xe_sched_job_create()
178 job_free(job); in xe_sched_job_destroy()
283 job->fence = fence; in xe_sched_job_arm()
289 xe_sched_job_get(job); in xe_sched_job_push()
[all …]
A Dxe_sched_job.h33 kref_get(&job->refcount); in xe_sched_job_get()
34 return job; in xe_sched_job_get()
46 kref_put(&job->refcount, xe_sched_job_destroy); in xe_sched_job_put()
52 return job->fence->error < 0; in xe_sched_job_is_error()
55 bool xe_sched_job_started(struct xe_sched_job *job);
56 bool xe_sched_job_completed(struct xe_sched_job *job);
58 void xe_sched_job_arm(struct xe_sched_job *job);
59 void xe_sched_job_push(struct xe_sched_job *job);
73 return job->fence ? job->fence->seqno : 0; in xe_sched_job_seqno()
78 return job->lrc_seqno; in xe_sched_job_lrc_seqno()
[all …]
A Dxe_ring_ops.c262 if (job->user_fence.used) { in __emit_job_gen12_simple()
265 job->user_fence.value, in __emit_job_gen12_simple()
325 if (job->user_fence.used) { in __emit_job_gen12_video()
328 job->user_fence.value, in __emit_job_gen12_video()
376 if (job->user_fence.used) in __emit_job_gen12_render_compute()
378 job->user_fence.value, in __emit_job_gen12_render_compute()
434 __emit_job_gen12_simple(job, job->q->lrc[0], in emit_job_gen12_gsc()
444 emit_migration_job_gen12(job, job->q->lrc[0], in emit_job_gen12_copy()
450 __emit_job_gen12_simple(job, job->q->lrc[i], in emit_job_gen12_copy()
461 __emit_job_gen12_video(job, job->q->lrc[i], in emit_job_gen12_video()
[all …]
/linux/drivers/md/
A Ddm-kcopyd.c434 job->master_job->write_offset += job->source.count; in pop_io_job()
460 return job; in pop()
500 if (job->pages && job->pages != &zero_page_list) in run_complete_job()
506 if (job->master_job == job) { in run_complete_job()
574 job->write_err = job->master_job->write_err; in run_io_job()
596 push(&job->kc->io_jobs, job); in run_pages_job()
715 if ((!job->read_err && !job->write_err) || in segment_complete()
829 memset(&job->source, 0, sizeof(job->source)); in dm_kcopyd_copy()
830 job->source.count = job->dests[0].count; in dm_kcopyd_copy()
846 job->master_job = job; in dm_kcopyd_copy()
[all …]
/linux/drivers/gpu/drm/amd/amdgpu/
A Damdgpu_job.c195 if (!*job) in amdgpu_job_alloc()
231 kfree(*job); in amdgpu_job_alloc_with_ib()
261 if (job->base.s_fence && job->base.s_fence->finished.ops) in amdgpu_job_free_resources()
282 kfree(job); in amdgpu_job_free_cb()
310 if (job->gang_submit != &job->base.s_fence->scheduled) in amdgpu_job_free()
314 kfree(job); in amdgpu_job_free()
337 r = amdgpu_ib_schedule(ring, job->num_ibs, job->ibs, job, fence); in amdgpu_job_submit_direct()
362 while (!fence && job->vm && !job->vmid) { in amdgpu_job_prepare_job()
363 r = amdgpu_vmid_grab(job->vm, ring, job, &fence); in amdgpu_job_prepare_job()
392 (job->job_run_counter && job->gang_submit)) in amdgpu_job_run()
[all …]
A Damdgpu_ib.c151 if (job) { in amdgpu_ib_schedule()
152 vm = job->vm; in amdgpu_ib_schedule()
153 fence_ctx = job->base.s_fence ? in amdgpu_ib_schedule()
155 shadow_va = job->shadow_va; in amdgpu_ib_schedule()
156 csa_va = job->csa_va; in amdgpu_ib_schedule()
157 gds_va = job->gds_va; in amdgpu_ib_schedule()
158 init_shadow = job->init_shadow; in amdgpu_ib_schedule()
216 if (job) { in amdgpu_ib_schedule()
277 if (job && job->uf_addr) { in amdgpu_ib_schedule()
278 amdgpu_ring_emit_fence(ring, job->uf_addr, job->uf_sequence, in amdgpu_ib_schedule()
[all …]
A Damdgpu_ids.c169 struct amdgpu_job *job) in amdgpu_vmid_gds_switch_needed() argument
176 id->oa_size != job->oa_size; in amdgpu_vmid_gds_switch_needed()
181 struct amdgpu_job *job) in amdgpu_vmid_compatible() argument
305 if (!job->gang_submit || in amdgpu_vmid_grab_reserved()
351 struct amdgpu_job *job, in amdgpu_vmid_grab_used() argument
446 job->vm_needs_flush = true; in amdgpu_vmid_grab()
452 job->gds_switch_needed = amdgpu_vmid_gds_switch_needed(id, job); in amdgpu_vmid_grab()
453 if (job->vm_needs_flush) { in amdgpu_vmid_grab()
459 job->pasid = vm->pasid; in amdgpu_vmid_grab()
465 id->oa_base = job->oa_base; in amdgpu_vmid_grab()
[all …]
A Damdgpu_job.h42 #define AMDGPU_JOB_GET_VMID(job) ((job) ? (job)->vmid : 0) argument
86 static inline struct amdgpu_ring *amdgpu_job_ring(struct amdgpu_job *job) in amdgpu_job_ring() argument
88 return to_amdgpu_ring(job->base.entity->rq->sched); in amdgpu_job_ring()
93 unsigned int num_ibs, struct amdgpu_job **job);
97 struct amdgpu_job **job);
98 void amdgpu_job_set_resources(struct amdgpu_job *job, struct amdgpu_bo *gds,
100 void amdgpu_job_free_resources(struct amdgpu_job *job);
101 void amdgpu_job_set_gang_leader(struct amdgpu_job *job,
103 void amdgpu_job_free(struct amdgpu_job *job);
104 struct dma_fence *amdgpu_job_submit(struct amdgpu_job *job);
[all …]
/linux/drivers/gpu/host1x/hw/
A Dchannel_hw.c233 submit_wait(job, job->syncpt->id, fence, job->class); in channel_program_cdma()
236 job->syncpt_end = host1x_syncpt_incr_max(sp, job->syncpt_incrs); in channel_program_cdma()
237 submit_gathers(job, job->syncpt_end - job->syncpt_incrs); in channel_program_cdma()
245 submit_wait(job, job->syncpt->id, fence, job->class); in channel_program_cdma()
268 if (job->class) in channel_program_cdma()
273 job->syncpt_end = host1x_syncpt_incr_max(sp, job->syncpt_incrs); in channel_program_cdma()
275 submit_gathers(job, job->syncpt_end - job->syncpt_incrs); in channel_program_cdma()
297 job->num_cmds, job->num_relocs, in channel_submit()
298 job->syncpt->id, job->syncpt_incrs); in channel_submit()
328 job->fence = NULL; in channel_submit()
[all …]
A Ddebug_hw.c196 struct host1x_job *job; in show_channel_gathers() local
202 job->syncpt->id, job->syncpt_end, job->timeout, in show_channel_gathers()
203 job->num_slots, job->num_unpins); in show_channel_gathers()
205 show_gather(o, pb->dma + job->first_get, job->num_slots * 2, cdma, in show_channel_gathers()
206 pb->dma + job->first_get, pb->mapped + job->first_get); in show_channel_gathers()
208 for (i = 0; i < job->num_cmds; i++) { in show_channel_gathers()
212 if (job->cmds[i].is_wait) in show_channel_gathers()
215 g = &job->cmds[i].gather; in show_channel_gathers()
217 if (job->gather_copy_mapped) in show_channel_gathers()
218 mapped = (u32 *)job->gather_copy_mapped; in show_channel_gathers()
[all …]
/linux/drivers/gpu/drm/imagination/
A Dpvr_job.c27 xa_erase(&job->pvr_dev->job_ids, job->id); in pvr_job_release()
37 kfree(job->cmd); in pvr_job_release()
38 kfree(job); in pvr_job_release()
48 if (job) in pvr_job_put()
74 if (!job->cmd) in pvr_job_process_stream()
425 job = kzalloc(sizeof(*job), GFP_KERNEL); in create_job()
426 if (!job) in create_job()
460 return job; in create_job()
548 struct pvr_job *job = job_data[i].job; in jobs_lock_all_objs() local
637 struct pvr_job *job = job_data[i - 1].job; in get_last_queued_job_scheduled_fence() local
[all …]
A Dpvr_queue.c415 queue->cccb_fence_ctx.job = pvr_job_get(job); in pvr_queue_get_job_cccb_fence()
669 pvr_cccb_write_command_with_header(cccb, job->fw_ccb_cmd_type, job->cmd_len, job->cmd, in pvr_queue_submit_job_to_cccb()
670 job->id, job->id); in pvr_queue_submit_job_to_cccb()
710 job->hwrt != job->paired_job->hwrt || in pvr_queue_run_job()
711 job->ctx != job->paired_job->ctx))) in pvr_queue_run_job()
939 job = queue->cccb_fence_ctx.job; in pvr_queue_check_job_waiting_for_cccb_space()
940 if (!job) in pvr_queue_check_job_waiting_for_cccb_space()
947 job = NULL; in pvr_queue_check_job_waiting_for_cccb_space()
954 job = NULL; in pvr_queue_check_job_waiting_for_cccb_space()
964 job = NULL; in pvr_queue_check_job_waiting_for_cccb_space()
[all …]
A Dpvr_job.h104 pvr_job_get(struct pvr_job *job) in pvr_job_get() argument
106 if (job) in pvr_job_get()
107 kref_get(&job->ref_count); in pvr_job_get()
109 return job; in pvr_job_get()
112 void pvr_job_put(struct pvr_job *job);
121 if (job->has_pm_ref) { in pvr_job_release_pm_ref()
122 pvr_power_put(job->pvr_dev); in pvr_job_release_pm_ref()
123 job->has_pm_ref = false; in pvr_job_release_pm_ref()
140 if (job->has_pm_ref) in pvr_job_get_pm_ref()
143 err = pvr_power_get(job->pvr_dev); in pvr_job_get_pm_ref()
[all …]
/linux/drivers/gpu/drm/v3d/
A Dv3d_sched.c73 v3d_job_cleanup(job); in v3d_sched_job_free()
126 if (job->perfmon && v3d->active_perfmon != job->perfmon) in v3d_switch_perfmon()
192 v3d->bin_job = job; in v3d_bin_job_run()
210 job->start, job->end); in v3d_bin_job_run()
218 if (job->qma) { in v3d_bin_job_run()
222 if (job->qts) { in v3d_bin_job_run()
225 job->qts); in v3d_bin_job_run()
262 job->start, job->end); in v3d_render_job_run()
625 cpu_job_function[job->job_type](job); in v3d_cpu_job_run()
707 &job->timedout_ctca, &job->timedout_ctra); in v3d_bin_job_timedout()
[all …]
A Dv3d_submit.c83 job->bo_count, &job->bo); in v3d_lookup_bos()
104 kfree(job); in v3d_job_free()
123 if (!job) in v3d_job_cleanup()
132 if (!job) in v3d_job_put()
135 kref_put(&job->refcount, job->free); in v3d_job_put()
238 dma_resv_add_fence(job->bo[i]->resv, job->done_fence, in v3d_attach_fences_and_unlock_reservation()
277 ret = v3d_job_allocate((void *)job, sizeof(**job)); in v3d_setup_csd_jobs_and_bos()
416 if (!job) { in v3d_get_cpu_indirect_csd_params()
459 if (!job) { in v3d_get_cpu_timestamp_query_params()
1083 ret = v3d_job_allocate((void *)&job, sizeof(*job)); in v3d_submit_tfu_ioctl()
[all …]
/linux/block/
A Dbsg-lib.c31 struct bsg_job *job; in bsg_transport_sg_io_fn() local
50 reply = job->reply; in bsg_transport_sg_io_fn()
51 memset(job, 0, sizeof(*job)); in bsg_transport_sg_io_fn()
52 job->reply = reply; in bsg_transport_sg_io_fn()
54 job->dd_data = job + 1; in bsg_transport_sg_io_fn()
76 job->bidi_bio = job->bidi_rq->bio; in bsg_transport_sg_io_fn()
140 if (job->bidi_rq) in bsg_transport_sg_io_fn()
143 if (job->bidi_rq) in bsg_transport_sg_io_fn()
245 ret = bsg_map_buffer(&job->reply_payload, job->bidi_rq); in bsg_prepare_job()
249 job->dev = dev; in bsg_prepare_job()
[all …]
/linux/drivers/gpu/drm/panfrost/
A Dpanfrost_job.c161 WARN_ON(!job); in panfrost_dequeue_job()
175 return job; in panfrost_dequeue_job()
298 ret = drm_gem_lock_reservations(job->bos, job->bo_count, in panfrost_job_push()
308 ret = panfrost_acquire_object_fences(job->bos, job->bo_count, in panfrost_job_push()
321 panfrost_attach_object_fences(job->bos, job->bo_count, in panfrost_job_push()
357 kfree(job); in panfrost_job_cleanup()
387 if (!job->jc) in panfrost_job_run()
456 job->jc = 0; in panfrost_job_handle_err()
463 job->jc = 0; in panfrost_job_handle_err()
486 job->jc = 0; in panfrost_job_handle_done()
[all …]
/linux/drivers/accel/ivpu/
A Divpu_job.c428 job->job_id, job->file_priv->ctx.id, job->engine_idx); in ivpu_job_destroy()
436 kfree(job); in ivpu_job_destroy()
445 job = kzalloc(struct_size(job, bos, bo_count), GFP_KERNEL); in ivpu_job_create()
446 if (!job) in ivpu_job_create()
461 return job; in ivpu_job_create()
464 kfree(job); in ivpu_job_create()
482 return job; in ivpu_job_remove_from_submitted_jobs()
490 if (!job) in ivpu_job_signal_and_destroy()
500 job->job_id, job->file_priv->ctx.id, job->engine_idx, job_status); in ivpu_job_signal_and_destroy()
570 job->job_id, file_priv->ctx.id, job->engine_idx, priority, in ivpu_job_submit()
[all …]
/linux/drivers/gpu/drm/
A Ddrm_writeback.c325 job->prepared = true; in drm_writeback_prepare_job()
352 struct drm_writeback_job *job; in drm_writeback_queue_job() local
373 if (job->fb) in drm_writeback_cleanup_job()
374 drm_framebuffer_put(job->fb); in drm_writeback_cleanup_job()
376 if (job->out_fence) in drm_writeback_cleanup_job()
379 kfree(job); in drm_writeback_cleanup_job()
427 if (job) in drm_writeback_signal_completion()
428 list_del(&job->list_entry); in drm_writeback_signal_completion()
432 if (WARN_ON(!job)) in drm_writeback_signal_completion()
435 out_fence = job->out_fence; in drm_writeback_signal_completion()
[all …]
/linux/drivers/scsi/lpfc/
A Dlpfc_bsg.c309 if (job) { in lpfc_bsg_send_mgmt_cmd_cmp()
332 if (job) { in lpfc_bsg_send_mgmt_cmd_cmp()
366 if (job) { in lpfc_bsg_send_mgmt_cmd_cmp()
572 if (job) { in lpfc_bsg_rport_els_cmp()
594 if (job) { in lpfc_bsg_rport_els_cmp()
629 if (job) { in lpfc_bsg_rport_els_cmp()
1374 if (job) { in lpfc_issue_ct_rsp_cmp()
1394 if (job) { in lpfc_issue_ct_rsp_cmp()
1426 if (job) { in lpfc_issue_ct_rsp_cmp()
3646 if (!job) in lpfc_bsg_issue_read_mbox_ext_cmpl()
[all …]
/linux/drivers/gpu/drm/tegra/
A Dsubmit.c399 struct host1x_job *job; in submit_create_job() local
413 if (!job) { in submit_create_job()
425 job->serialize = true; in submit_create_job()
481 host1x_job_put(job); in submit_create_job()
482 job = ERR_PTR(err); in submit_create_job()
487 return job; in submit_create_job()
577 if (IS_ERR(job)) { in tegra_drm_ioctl_channel_submit()
578 err = PTR_ERR(job); in tegra_drm_ioctl_channel_submit()
630 job->timeout = 10000; in tegra_drm_ioctl_channel_submit()
664 host1x_job_unpin(job); in tegra_drm_ioctl_channel_submit()
[all …]

Completed in 724 milliseconds

12345678910>>...15