Home
last modified time | relevance | path

Searched refs:rq (Results 1 – 25 of 534) sorted by relevance

12345678910>>...22

/drivers/gpu/drm/i915/
A Di915_request.c383 rq->ring->head = rq->postfix; in i915_request_retire()
404 rq->engine->remove_active_request(rq); in i915_request_retire()
556 if (rq->infix == rq->postfix) in __i915_request_skip()
559 RQ_TRACE(rq, "error: %d\n", rq->fence.error); in __i915_request_skip()
567 rq->infix = rq->postfix; in __i915_request_skip()
596 rq = i915_request_get(rq); in i915_request_mark_eio()
1000 rq->head = rq->ring->emit; in __i915_request_create()
1002 ret = rq->engine->request_alloc(rq); in __i915_request_create()
1811 rq->postfix = intel_ring_offset(rq, cs); in __i915_request_commit()
2228 rq->fence.context, rq->fence.seqno, in i915_request_show()
[all …]
A Di915_request.h64 #define RQ_TRACE(rq, fmt, ...) do { \ argument
414 dma_fence_put(&rq->fence); in i915_request_put()
509 seqno = __hwsp_seqno(rq); in hwsp_seqno()
517 return i915_seqno_passed(__hwsp_seqno(rq), rq->fence.seqno - 1); in __i915_request_has_started()
550 if (i915_request_signaled(rq)) in i915_request_started()
575 if (!i915_request_is_active(rq)) in i915_request_is_running()
579 result = __i915_request_has_started(rq) && i915_request_is_active(rq); in i915_request_is_running()
603 return !list_empty(&rq->sched.link); in i915_request_is_ready()
608 return i915_seqno_passed(__hwsp_seqno(rq), rq->fence.seqno); in __i915_request_is_complete()
615 if (i915_request_signaled(rq)) in i915_request_completed()
[all …]
/drivers/scsi/fnic/
A Dvnic_rq.c23 if (!rq->bufs[i]) { in vnic_rq_alloc_bufs()
30 buf = rq->bufs[i]; in vnic_rq_alloc_bufs()
47 rq->to_use = rq->to_clean = rq->bufs[0]; in vnic_rq_alloc_bufs()
48 rq->buf_index = 0; in vnic_rq_alloc_bufs()
58 vdev = rq->vdev; in vnic_rq_free()
67 rq->ctrl = NULL; in vnic_rq_free()
76 rq->vdev = vdev; in vnic_rq_alloc()
79 if (!rq->ctrl) { in vnic_rq_alloc()
108 iowrite32(rq->ring.desc_count, &rq->ctrl->ring_size); in vnic_rq_init()
117 rq->to_use = rq->to_clean = in vnic_rq_init()
[all …]
A Dvnic_rq.h96 return rq->ring.desc_avail; in vnic_rq_desc_avail()
102 return rq->ring.desc_count - rq->ring.desc_avail - 1; in vnic_rq_desc_used()
107 return rq->to_use->desc; in vnic_rq_next_desc()
112 return rq->to_use->index; in vnic_rq_next_index()
117 return rq->buf_index++; in vnic_rq_next_buf_index()
132 rq->to_use = buf; in vnic_rq_post()
133 rq->ring.desc_avail--; in vnic_rq_post()
177 buf = rq->to_clean; in vnic_rq_service()
185 rq->ring.desc_avail++; in vnic_rq_service()
192 buf = rq->to_clean; in vnic_rq_service()
[all …]
/drivers/net/ethernet/cisco/enic/
A Dvnic_rq.c26 if (!rq->bufs[i]) in vnic_rq_alloc_bufs()
31 buf = rq->bufs[i]; in vnic_rq_alloc_bufs()
48 rq->to_use = rq->to_clean = rq->bufs[0]; in vnic_rq_alloc_bufs()
58 vdev = rq->vdev; in vnic_rq_free()
69 rq->ctrl = NULL; in vnic_rq_free()
77 rq->index = index; in vnic_rq_alloc()
78 rq->vdev = vdev; in vnic_rq_alloc()
81 if (!rq->ctrl) { in vnic_rq_alloc()
120 rq->to_use = rq->to_clean = in vnic_rq_init_start()
185 rq->ring.desc_avail = rq->ring.desc_count - 1; in vnic_rq_clean()
[all …]
A Dvnic_rq.h89 return rq->ring.desc_avail; in vnic_rq_desc_avail()
95 return rq->ring.desc_count - rq->ring.desc_avail - 1; in vnic_rq_desc_used()
100 return rq->to_use->desc; in vnic_rq_next_desc()
105 return rq->to_use->index; in vnic_rq_next_index()
122 rq->to_use = buf; in vnic_rq_post()
123 rq->ring.desc_avail--; in vnic_rq_post()
162 buf = rq->to_clean; in vnic_rq_service()
170 rq->ring.desc_avail++; in vnic_rq_service()
172 rq->to_clean = buf->next; in vnic_rq_service()
177 buf = rq->to_clean; in vnic_rq_service()
[all …]
/drivers/gpu/drm/i915/gt/
A Dgen8_engine_cs.c46 if (IS_KABYLAKE(rq->i915) && IS_GRAPHICS_STEP(rq->i915, 0, STEP_C0)) in gen8_emit_flush_rcs()
227 IS_DG2(rq->i915)) { in mtl_dummy_pipe_control()
474 rq->infix = intel_ring_offset(rq, cs); in gen8_emit_init_breadcrumb()
605 GEM_BUG_ON(intel_ring_direction(ring, rq->wa_tail, rq->head) <= 0); in assert_request_valid()
618 rq->wa_tail = intel_ring_offset(rq, cs); in gen8_emit_wa_tail()
651 rq->tail = intel_ring_offset(rq, cs); in gen8_emit_fini_breadcrumb_tail()
652 assert_ring_tail_valid(rq->ring, rq->tail); in gen8_emit_fini_breadcrumb_tail()
659 return gen8_emit_ggtt_write(cs, rq->fence.seqno, hwsp_offset(rq), 0); in emit_xcs_breadcrumb()
664 return gen8_emit_fini_breadcrumb_tail(rq, emit_xcs_breadcrumb(rq, cs)); in gen8_emit_fini_breadcrumb_xcs()
799 rq->tail = intel_ring_offset(rq, cs); in gen12_emit_fini_breadcrumb_tail()
[all …]
A Dselftest_execlists.c266 GEM_BUG_ON(rq[1]->postfix <= rq[0]->postfix); in live_unlite_restore()
289 GEM_BUG_ON(rq[0]->postfix > rq[1]->postfix); in live_unlite_restore()
873 err = rq->engine->emit_init_breadcrumb(rq); in semaphore_queue()
2082 intel_context_ban(rq->context, rq); in __cancel_active0()
2141 intel_context_ban(rq[1]->context, rq[1]); in __cancel_active1()
2224 intel_context_ban(rq[2]->context, rq[2]); in __cancel_queued()
2293 intel_context_ban(rq->context, rq); in __cancel_hostile()
2591 ring_size = rq->wa_tail - rq->head; in live_chain_preempt()
2772 err = rq->engine->emit_bb_start(rq, in create_gang()
3188 err = rq->engine->emit_bb_start(rq, in create_gpr_client()
[all …]
A Dgen6_engine_cs.c167 *cs++ = rq->fence.seqno; in gen6_emit_breadcrumb_rcs()
172 rq->tail = intel_ring_offset(rq, cs); in gen6_emit_breadcrumb_rcs()
173 assert_ring_tail_valid(rq->ring, rq->tail); in gen6_emit_breadcrumb_rcs()
337 gen7_stall_cs(rq); in gen7_emit_flush_rcs()
364 *cs++ = rq->fence.seqno; in gen7_emit_breadcrumb_rcs()
369 rq->tail = intel_ring_offset(rq, cs); in gen7_emit_breadcrumb_rcs()
370 assert_ring_tail_valid(rq->ring, rq->tail); in gen7_emit_breadcrumb_rcs()
386 rq->tail = intel_ring_offset(rq, cs); in gen6_emit_breadcrumb_xcs()
387 assert_ring_tail_valid(rq->ring, rq->tail); in gen6_emit_breadcrumb_xcs()
418 rq->tail = intel_ring_offset(rq, cs); in gen7_emit_breadcrumb_xcs()
[all …]
A Dintel_breadcrumbs.c113 if (rq->context != ce) in check_signal_order()
221 &rq->fence.flags)) in signal_irq_work()
255 rq->engine->sched_engine->retire_inflight_request_prio(rq); in signal_irq_work()
257 spin_lock(&rq->lock); in signal_irq_work()
263 i915_request_put(rq); in signal_irq_work()
343 i915_request_get(rq); in irq_signal_request()
395 i915_request_get(rq); in insert_breadcrumb()
456 i915_request_put(rq); in i915_request_cancel_breadcrumb()
474 &rq->fence.flags)) in intel_context_remove_breadcrumbs()
479 i915_request_put(rq); in intel_context_remove_breadcrumbs()
[all …]
A Dselftest_timeline.c521 return rq; in checked_tl_write()
899 struct i915_request *rq = fetch_and_zero(&w->rq); in check_watcher() local
975 return rq; in wrap_timeline()
980 return rq; in wrap_timeline()
1084 switch_tl_lock(rq, watcher[0].rq); in live_hwsp_read()
1090 switch_tl_lock(watcher[0].rq, rq); in live_hwsp_read()
1098 switch_tl_lock(rq, watcher[1].rq); in live_hwsp_read()
1104 switch_tl_lock(watcher[1].rq, rq); in live_hwsp_read()
1115 rq = wrap_timeline(rq); in live_hwsp_read()
1227 GEM_BUG_ON(rq[2]->fence.seqno > rq[0]->fence.seqno); in live_hwsp_rollover_kernel()
[all …]
A Dintel_execlists_submission.c444 rq->fence.context, rq->fence.seqno); in reset_active()
699 rq->tail = rq->wa_tail; in execlists_update_context()
737 rq->fence.context, rq->fence.seqno, in dump_port()
926 rq ? execlists_update_context(rq) : 0, in execlists_submit_ports()
1152 if (!rq || __i915_request_is_complete(rq)) in needs_timeslice()
1975 rq->head, rq->tail, in process_csb()
2339 cap->rq = active_request(cap->rq->context->timeline, cap->rq); in execlists_capture()
2340 cap->rq = i915_request_get_rcu(cap->rq); in execlists_capture()
3048 rq = active_request(ce->timeline, rq); in execlists_reset_active()
3342 rq->fence.context, rq->fence.seqno, in kick_execlists()
[all …]
A Dgen2_engine_cs.c38 intel_ring_advance(rq, cs); in gen2_emit_flush()
79 if (IS_G4X(rq->i915) || GRAPHICS_VER(rq->i915) == 5) in gen4_emit_flush_rcs()
87 cs = intel_ring_begin(rq, i); in gen4_emit_flush_rcs()
124 intel_ring_advance(rq, cs); in gen4_emit_flush_rcs()
139 intel_ring_advance(rq, cs); in gen4_emit_flush_vcs()
147 GEM_BUG_ON(i915_request_active_timeline(rq)->hwsp_ggtt != rq->engine->status_page.vma); in __gen2_emit_breadcrumb()
155 *cs++ = rq->fence.seqno; in __gen2_emit_breadcrumb()
161 *cs++ = rq->fence.seqno; in __gen2_emit_breadcrumb()
166 rq->tail = intel_ring_offset(rq, cs); in __gen2_emit_breadcrumb()
167 assert_ring_tail_valid(rq->ring, rq->tail); in __gen2_emit_breadcrumb()
[all …]
A Dintel_engine_heartbeat.c25 struct i915_request *rq; in next_heartbeat() local
41 if (rq && rq->sched.attr.priority >= I915_PRIORITY_BARRIER && in next_heartbeat()
76 return rq; in heartbeat_create()
90 idle_pulse(rq->engine, rq); in heartbeat_commit()
102 if (!rq) { in show_heartbeat()
111 rq->fence.seqno, in show_heartbeat()
149 if (rq && i915_request_completed(rq)) { in heartbeat()
224 if (IS_ERR(rq)) in heartbeat()
286 if (IS_ERR(rq)) in __intel_engine_pulse()
399 if (IS_ERR(rq)) { in intel_engine_flush_barriers()
[all …]
A Dselftest_hangcheck.c229 err = rq->engine->emit_init_breadcrumb(rq); in hang_create_request()
776 if (rq) { in __igt_reset_engine()
786 if (rq) in __igt_reset_engine()
872 if (!rq) in active_request_put()
1096 if (rq) { in __igt_reset_engines()
1107 if (rq) { in __igt_reset_engines()
1112 rq->fence.seqno, rq->context->guc_id.id); in __igt_reset_engines()
1335 __func__, rq->fence.seqno, hws_seqno(&h, rq)); in igt_reset_wait()
1524 __func__, rq->fence.seqno, hws_seqno(&h, rq)); in __igt_reset_evict_vma()
1847 __func__, rq->fence.seqno, hws_seqno(&h, rq)); in igt_handle_error()
[all …]
/drivers/net/ethernet/mellanox/mlx5/core/
A Den_rx.c340 return &rq->wqe.frags[ix << rq->wqe.info.log_num_frags]; in get_frag()
832 .umr.rq = rq, in mlx5e_alloc_rx_mpwqe()
991 struct mlx5e_rq *rq = &c->rq; in mlx5e_handle_shampo_hd_umr() local
1133 rq->mpwqe.umr_in_progress += rq->mpwqe.umr_last_bulk; in mlx5e_post_rx_mpwqes()
1672 mxbuf->rq = rq; in mlx5e_fill_mxbuf()
1747 rq->buff.frame0_sz, rq->buff.map_dir); in mlx5e_skb_from_cqe_nonlinear()
1787 rq, mxbuf->xdp.data_hard_start, rq->buff.frame0_sz, in mlx5e_skb_from_cqe_nonlinear()
1818 mlx5e_dump_error_cqe(&rq->cq, rq->rqn, err_cqe); in trigger_report()
2503 rq, cqe); in mlx5e_rx_cq_process_enhanced_cqe_comp()
2538 rq, cqe); in mlx5e_rx_cq_process_basic_cqe_comp()
[all …]
/drivers/scsi/esas2r/
A Desas2r_disc.c359 rq->interrupt_cx = dc; in esas2r_disc_start_port()
529 rq, in esas2r_disc_block_dev_scan()
538 rq->timeout = 30000; in esas2r_disc_block_dev_scan()
539 rq->interrupt_cx = dc; in esas2r_disc_block_dev_scan()
604 rq, in esas2r_disc_raid_grp_info()
615 rq->interrupt_cx = dc; in esas2r_disc_raid_grp_info()
666 rq->req_stat); in esas2r_disc_raid_grp_info_cb()
715 rq, in esas2r_disc_part_info()
730 rq->interrupt_cx = dc; in esas2r_disc_part_info()
807 rq, in esas2r_disc_passthru_dev_info()
[all …]
A Desas2r_vda.c93 clear_vda_request(rq); in esas2r_process_vda_ioctl()
97 rq->interrupt_cx = vi; in esas2r_process_vda_ioctl()
200 rq->vrq->mgt.dev_index = in esas2r_process_vda_ioctl()
355 clear_vda_request(rq); in esas2r_build_flash_req()
382 clear_vda_request(rq); in esas2r_build_mgt_req()
424 clear_vda_request(rq); in esas2r_build_ae_req()
435 rq->vrq_md->phys_addr + in esas2r_build_ae_req()
442 rq->vrq_md->phys_addr + in esas2r_build_ae_req()
455 clear_vda_request(rq); in esas2r_build_ioctl_req()
473 clear_vda_request(rq); in esas2r_build_cfg_req()
[all …]
A Desas2r_int.c264 if (unlikely(rq == NULL || rq->vrq->scsi.handle != handle)) { in esas2r_get_outbound_responses()
403 if (rq->interrupt_cx) { in esas2r_process_adapter_reset()
418 rq->interrupt_cx = NULL; in esas2r_process_adapter_reset()
419 rq->interrupt_cb = NULL; in esas2r_process_adapter_reset()
766 rq, length); in esas2r_ae_complete()
890 if (rq->sense_buf) in esas2r_check_req_rsp_sense()
891 memcpy(rq->sense_buf, rq->data_buf, snslen); in esas2r_check_req_rsp_sense()
893 rq->sense_buf = (u8 *)rq->data_buf; in esas2r_check_req_rsp_sense()
915 rq->sense_len = snslen; in esas2r_check_req_rsp_sense()
929 (*rq->interrupt_cb)(a, rq); in esas2r_complete_request()
[all …]
A Desas2r_io.c55 rq->req_stat = RS_SEL2; in esas2r_start_request()
57 rq->req_stat = RS_DEGRADED; in esas2r_start_request()
63 rq->req_stat = RS_SEL; in esas2r_start_request()
75 rq->req_stat = RS_SEL; in esas2r_start_request()
84 esas2r_trace("rq=%p", rq); in esas2r_start_request()
123 esas2r_trace("rq=%p", rq); in esas2r_local_start_request()
143 rq->req_stat = RS_STARTED; in esas2r_start_vda_request()
288 rq->vda_req_sz = in esas2r_build_sg_list_sge()
660 rq->vda_req_sz = reqsize; in esas2r_build_sg_list_prd()
869 rq->req_stat = RS_BUSY; in esas2r_ioreq_aborted()
[all …]
/drivers/net/ethernet/mellanox/mlx5/core/en/xsk/
A Drx.c30 if (unlikely(!xsk_buff_can_alloc(rq->xsk_pool, rq->mpwqe.pages_per_wqe))) in mlx5e_xsk_alloc_rx_mpwqe()
62 mxbuf->rq = rq; in mlx5e_xsk_alloc_rx_mpwqe()
70 .key = rq->mkey_be, in mlx5e_xsk_alloc_rx_mpwqe()
73 mxbuf->rq = rq; in mlx5e_xsk_alloc_rx_mpwqe()
83 .key = rq->mkey_be, in mlx5e_xsk_alloc_rx_mpwqe()
87 .key = rq->mkey_be, in mlx5e_xsk_alloc_rx_mpwqe()
91 .key = rq->mkey_be, in mlx5e_xsk_alloc_rx_mpwqe()
95 .key = rq->mkey_be, in mlx5e_xsk_alloc_rx_mpwqe()
98 mxbuf->rq = rq; in mlx5e_xsk_alloc_rx_mpwqe()
119 mxbuf->rq = rq; in mlx5e_xsk_alloc_rx_mpwqe()
[all …]
/drivers/gpu/drm/i915/selftests/
A Di915_perf.c225 if (IS_ERR(rq)) { in live_noa_delay()
226 err = PTR_ERR(rq); in live_noa_delay()
231 err = rq->engine->emit_init_breadcrumb(rq); in live_noa_delay()
244 err = rq->engine->emit_bb_start(rq, in live_noa_delay()
258 i915_request_get(rq); in live_noa_delay()
259 i915_request_add(rq); in live_noa_delay()
283 i915_request_put(rq); in live_noa_delay()
320 if (IS_ERR(rq)) { in live_noa_gpr()
321 err = PTR_ERR(rq); in live_noa_gpr()
327 err = rq->engine->emit_init_breadcrumb(rq); in live_noa_gpr()
[all …]
/drivers/net/ethernet/huawei/hinic/
A Dhinic_hw_qp.c62 #define RQ_MASKED_IDX(rq, idx) ((idx) & (rq)->wq->mask) argument
162 wq = rq->wq; in hinic_rq_prepare_ctxt()
326 if (!rq->cqe) in alloc_rq_cqe()
346 dma_free_coherent(&pdev->dev, sizeof(*rq->cqe[j]), rq->cqe[j], in alloc_rq_cqe()
352 vfree(rq->cqe); in alloc_rq_cqe()
368 dma_free_coherent(&pdev->dev, sizeof(*rq->cqe[i]), rq->cqe[i], in free_rq_cqe()
372 vfree(rq->cqe); in free_rq_cqe()
391 rq->hwif = hwif; in hinic_init_rq()
393 rq->wq = wq; in hinic_init_rq()
424 free_rq_cqe(rq); in hinic_init_rq()
[all …]
/drivers/net/ethernet/mellanox/mlx5/core/en/
A Dreporter_rx.c82 struct mlx5e_rq *rq; in mlx5e_rx_reporter_err_icosq_cqe_recover() local
91 rq = &icosq->channel->rq; in mlx5e_rx_reporter_err_icosq_cqe_recover()
163 if (rq->channel) in mlx5e_rx_reporter_err_rq_cqe_recover()
177 rq = ctx; in mlx5e_rx_reporter_timeout_recover()
178 priv = rq->priv; in mlx5e_rx_reporter_timeout_recover()
182 eq = rq->cq.mcq.eq; in mlx5e_rx_reporter_timeout_recover()
253 err = mlx5e_query_rq_state(rq->mdev, rq->rqn, &hw_state); in mlx5e_rx_reporter_build_diagnose_output_rq_common()
270 if (rq->icosq) { in mlx5e_rx_reporter_build_diagnose_output_rq_common()
565 struct mlx5e_rq *rq = &priv->channels.c[i]->rq; in mlx5e_rx_reporter_dump_all_rqs() local
603 err_ctx.ctx = rq; in mlx5e_reporter_rx_timeout()
[all …]
/drivers/gpu/drm/i915/gt/uc/
A Dintel_gsc_uc_heci_cmd_submit.c47 struct i915_request *rq; in intel_gsc_uc_heci_cmd_submit_packet() local
60 if (IS_ERR(rq)) in intel_gsc_uc_heci_cmd_submit_packet()
61 return PTR_ERR(rq); in intel_gsc_uc_heci_cmd_submit_packet()
77 i915_request_get(rq); in intel_gsc_uc_heci_cmd_submit_packet()
82 i915_request_add(rq); in intel_gsc_uc_heci_cmd_submit_packet()
96 i915_request_put(rq); in intel_gsc_uc_heci_cmd_submit_packet()
159 if (IS_ERR(rq)) { in intel_gsc_uc_heci_cmd_submit_nonpriv()
160 err = PTR_ERR(rq); in intel_gsc_uc_heci_cmd_submit_nonpriv()
190 i915_request_get(rq); in intel_gsc_uc_heci_cmd_submit_nonpriv()
195 i915_request_add(rq); in intel_gsc_uc_heci_cmd_submit_nonpriv()
[all …]

Completed in 698 milliseconds

12345678910>>...22