Home
last modified time | relevance | path

Searched refs:sqe (Results 1 – 25 of 41) sorted by relevance

12

/drivers/crypto/hisilicon/zip/
A Dzip_crypto.c170 sqe->dw9 = val; in hisi_zip_fill_buf_type()
179 sqe->dw9 = val; in hisi_zip_fill_req_type()
184 sqe->dw26 = req->req_id; in hisi_zip_fill_tag()
193 sqe->dw7 = val; in hisi_zip_fill_sqe_type()
203 ops->fill_addr(sqe, req); in hisi_zip_fill_sqe()
204 ops->fill_buf_size(sqe, req); in hisi_zip_fill_sqe()
207 ops->fill_tag(sqe, req); in hisi_zip_fill_sqe()
270 return sqe->dw26; in hisi_zip_get_tag()
280 return sqe->produced; in hisi_zip_get_dstlen()
291 u32 tag = ops->get_tag(sqe); in hisi_zip_acomp_cb()
[all …]
/drivers/infiniband/sw/siw/
A Dsiw_qp.c251 wqe->sqe.flags = 0; in siw_qp_mpa_rts()
252 wqe->sqe.num_sge = 1; in siw_qp_mpa_rts()
260 wqe->sqe.rkey = 1; in siw_qp_mpa_rts()
261 wqe->sqe.raddr = 0; in siw_qp_mpa_rts()
842 rreq->id = sqe->id; in siw_read_to_orq()
854 struct siw_sqe *sqe; in siw_activate_tx_from_sq() local
859 if (!sqe) in siw_activate_tx_from_sq()
866 memcpy(&wqe->sqe, sqe, sizeof(*sqe)); in siw_activate_tx_from_sq()
882 wqe->sqe.sge[0].laddr = (uintptr_t)&wqe->sqe.sge[1]; in siw_activate_tx_from_sq()
974 wqe->sqe.flags = 0; in siw_activate_tx()
[all …]
A Dsiw_qp_tx.c52 struct siw_sge *sge = &wqe->sqe.sge[0]; in siw_try_1seg()
62 memcpy(paddr, &wqe->sqe.sge[1], bytes); in siw_try_1seg()
137 cpu_to_be64(wqe->sqe.sge[0].laddr); in siw_qp_prepare_tx()
244 cpu_to_be64(wqe->sqe.raddr); in siw_qp_prepare_tx()
653 sge = &wqe->sqe.sge[c_tx->sge_idx]; in siw_tx_hdt()
809 wqe->sqe.num_sge = 1; in siw_qp_sq_proc_tx()
837 wqe->sqe.sge[0].laddr = in siw_qp_sq_proc_tx()
838 (u64)(uintptr_t)&wqe->sqe.sge[1]; in siw_qp_sq_proc_tx()
860 wqe->sqe.id); in siw_qp_sq_proc_tx()
967 mem->stag = sqe->rkey; in siw_fastreg_mr()
[all …]
A Dsiw_verbs.c655 void *kbuf = &sqe->sge[1]; in siw_copy_inline_sgl()
659 sqe->sge[0].lkey = 0; in siw_copy_inline_sgl()
690 struct siw_sqe sqe = {}; in siw_sq_flush_wr() local
697 sqe.opcode = SIW_OP_READ; in siw_sq_flush_wr()
722 sqe.id = wr->wr_id; in siw_sq_flush_wr()
829 if (sqe->flags) { in siw_post_send()
839 sqe->id = wr->wr_id; in siw_post_send()
865 sqe->num_sge = 1; in siw_post_send()
894 sqe->num_sge = 1; in siw_post_send()
914 sqe->num_sge = 1; in siw_post_send()
[all …]
A Dsiw.h189 struct siw_sqe sqe; member
475 #define tx_type(wqe) ((wqe)->sqe.opcode)
477 #define tx_flags(wqe) ((wqe)->sqe.flags)
521 void siw_read_to_orq(struct siw_sqe *rreq, struct siw_sqe *sqe);
522 int siw_sqe_complete(struct siw_qp *qp, struct siw_sqe *sqe, u32 bytes,
626 struct siw_sqe *sqe = &qp->sendq[qp->sq_get % qp->attrs.sq_size]; in siw_sq_empty() local
628 return READ_ONCE(sqe->flags) == 0; in siw_sq_empty()
633 struct siw_sqe *sqe = &qp->sendq[qp->sq_get % qp->attrs.sq_size]; in sq_get_next() local
635 if (READ_ONCE(sqe->flags) & SIW_WQE_VALID) in sq_get_next()
636 return sqe; in sq_get_next()
A Dsiw_qp_rx.c176 srx->ddp_stag = wqe->sqe.sge[0].lkey; in siw_rresp_check_ntoh()
177 srx->ddp_to = wqe->sqe.sge[0].laddr; in siw_rresp_check_ntoh()
691 resp = &tx_work->sqe; in siw_init_rresp()
754 wqe->sqe.id = orqe->id; in siw_orqe_start_rx()
755 wqe->sqe.opcode = orqe->opcode; in siw_orqe_start_rx()
756 wqe->sqe.sge[0].laddr = orqe->sge[0].laddr; in siw_orqe_start_rx()
757 wqe->sqe.sge[0].lkey = orqe->sge[0].lkey; in siw_orqe_start_rx()
759 wqe->sqe.flags = orqe->flags; in siw_orqe_start_rx()
760 wqe->sqe.num_sge = 1; in siw_orqe_start_rx()
823 sge = wqe->sqe.sge; /* there is only one */ in siw_proc_rresp()
[all …]
/drivers/net/ethernet/qlogic/qed/
A Dqed_nvmetcp_fw_funcs.c68 if (!task_params->sqe) in init_sqe()
71 memset(task_params->sqe, 0, sizeof(*task_params->sqe)); in init_sqe()
72 task_params->sqe->task_id = cpu_to_le16(task_params->itid); in init_sqe()
79 SET_FIELD(task_params->sqe->contlen_cdbsize, in init_sqe()
81 SET_FIELD(task_params->sqe->flags, NVMETCP_WQE_WQE_TYPE, in init_sqe()
94 SET_FIELD(task_params->sqe->flags, NVMETCP_WQE_NUM_SGES, num_sges); in init_sqe()
99 SET_FIELD(task_params->sqe->flags, NVMETCP_WQE_WQE_TYPE, in init_sqe()
101 SET_FIELD(task_params->sqe->contlen_cdbsize, in init_sqe()
106 SET_FIELD(task_params->sqe->flags, NVMETCP_WQE_WQE_TYPE, in init_sqe()
112 SET_FIELD(task_params->sqe->flags, NVMETCP_WQE_NUM_SGES, in init_sqe()
[all …]
/drivers/scsi/qedf/
A Ddrv_fcoe_fw_funcs.c13 memset(task_params->sqe, 0, sizeof(*(task_params->sqe))); in init_common_sqe()
14 SET_FIELD(task_params->sqe->flags, FCOE_WQE_REQ_TYPE, in init_common_sqe()
16 task_params->sqe->task_id = task_params->itid; in init_common_sqe()
167 task_params->sqe->additional_info_union.burst_length = in init_initiator_midpath_unsolicited_fcoe_task()
169 SET_FIELD(task_params->sqe->flags, in init_initiator_midpath_unsolicited_fcoe_task()
171 SET_FIELD(task_params->sqe->flags, FCOE_WQE_SGL_MODE, in init_initiator_midpath_unsolicited_fcoe_task()
193 task_params->sqe->additional_info_union.seq_rec_updated_offset = in init_initiator_sequence_recovery_fcoe_task()
A Dqedf_io.c592 struct fcoe_wqe *sqe) in qedf_init_task() argument
628 io_req->task_params->sqe = sqe; in qedf_init_task()
707 io_req->task_params->sqe = sqe; in qedf_init_mp_task()
858 struct fcoe_wqe *sqe; in qedf_post_io_req() local
905 sqe = &fcport->sq[sqe_idx]; in qedf_post_io_req()
1860 struct fcoe_wqe *sqe; in qedf_initiate_abts() local
1942 sqe = &fcport->sq[sqe_idx]; in qedf_initiate_abts()
1944 io_req->task_params->sqe = sqe; in qedf_initiate_abts()
2157 struct fcoe_wqe *sqe; in qedf_initiate_cleanup() local
2229 io_req->task_params->sqe = sqe; in qedf_initiate_cleanup()
[all …]
A Dqedf_els.c23 struct fcoe_wqe *sqe; in qedf_initiate_els() local
120 sqe = &fcport->sq[sqe_idx]; in qedf_initiate_els()
121 memset(sqe, 0, sizeof(struct fcoe_wqe)); in qedf_initiate_els()
125 qedf_init_mp_task(els_req, task, sqe); in qedf_initiate_els()
702 struct fcoe_wqe *sqe; in qedf_initiate_seq_cleanup() local
732 sqe = &fcport->sq[sqe_idx]; in qedf_initiate_seq_cleanup()
733 memset(sqe, 0, sizeof(struct fcoe_wqe)); in qedf_initiate_seq_cleanup()
734 orig_io_req->task_params->sqe = sqe; in qedf_initiate_seq_cleanup()
A Ddrv_fcoe_fw_funcs.h16 struct fcoe_wqe *sqe; member
/drivers/net/ethernet/intel/libeth/
A Dtx.c24 void libeth_tx_complete_any(struct libeth_sqe *sqe, struct libeth_cq_pp *cp) in libeth_tx_complete_any() argument
26 if (sqe->type >= __LIBETH_SQE_XDP_START) in libeth_tx_complete_any()
27 __libeth_xdp_complete_tx(sqe, cp, static_call(bulk), in libeth_tx_complete_any()
30 libeth_tx_complete(sqe, cp); in libeth_tx_complete_any()
/drivers/scsi/qedi/
A Dqedi_fw_api.c98 if (!task_params->sqe) in init_sqe()
101 memset(task_params->sqe, 0, sizeof(*task_params->sqe)); in init_sqe()
104 SET_FIELD(task_params->sqe->flags, ISCSI_WQE_WQE_TYPE, in init_sqe()
115 init_dif_context_flags(&task_params->sqe->prot_flags, in init_sqe()
118 SET_FIELD(task_params->sqe->flags, ISCSI_WQE_WQE_TYPE, in init_sqe()
134 SET_FIELD(task_params->sqe->flags, ISCSI_WQE_NUM_SGES, in init_sqe()
141 SET_FIELD(task_params->sqe->contlen_cdbsize, in init_sqe()
147 SET_FIELD(task_params->sqe->flags, ISCSI_WQE_WQE_TYPE, in init_sqe()
152 SET_FIELD(task_params->sqe->contlen_cdbsize, in init_sqe()
178 SET_FIELD(task_params->sqe->flags, ISCSI_WQE_RESPONSE, in init_sqe()
[all …]
A Dqedi_fw.c1042 task_params.sqe = &ep->sq[sq_idx]; in qedi_send_iscsi_login()
1044 memset(task_params.sqe, 0, sizeof(struct iscsi_wqe)); in qedi_send_iscsi_login()
1116 task_params.sqe = &ep->sq[sq_idx]; in qedi_send_iscsi_logout()
1117 memset(task_params.sqe, 0, sizeof(struct iscsi_wqe)); in qedi_send_iscsi_logout()
1490 task_params.sqe = &ep->sq[sq_idx]; in send_iscsi_tmf()
1492 memset(task_params.sqe, 0, sizeof(struct iscsi_wqe)); in send_iscsi_tmf()
1614 task_params.sqe = &ep->sq[sq_idx]; in qedi_send_iscsi_text()
1616 memset(task_params.sqe, 0, sizeof(struct iscsi_wqe)); in qedi_send_iscsi_text()
1731 task_params.sqe = &ep->sq[sq_idx]; in qedi_send_iscsi_nopout()
2093 task_params.sqe = &ep->sq[sq_idx]; in qedi_iscsi_send_ioreq()
[all …]
/drivers/infiniband/hw/bnxt_re/
A Dqplib_fp.c1953 sqe->flags = wqe->flags; in bnxt_qplib_post_send()
1954 sqe->wqe_size = wqe_sz; in bnxt_qplib_post_send()
1971 sqe->wqe_type = wqe->type; in bnxt_qplib_post_send()
1972 sqe->flags = wqe->flags; in bnxt_qplib_post_send()
1973 sqe->wqe_size = wqe_sz; in bnxt_qplib_post_send()
2003 sqe->flags = wqe->flags; in bnxt_qplib_post_send()
2004 sqe->wqe_size = wqe_sz; in bnxt_qplib_post_send()
2023 sqe->flags = wqe->flags; in bnxt_qplib_post_send()
2040 sqe->flags = wqe->flags; in bnxt_qplib_post_send()
2051 sqe->flags = wqe->flags; in bnxt_qplib_post_send()
[all …]
/drivers/infiniband/hw/cxgb4/
A Drestrack.c96 struct t4_swsqe *sqe) in fill_swsqe() argument
100 if (rdma_nl_put_driver_u32(msg, "opcode", sqe->opcode)) in fill_swsqe()
102 if (rdma_nl_put_driver_u32(msg, "complete", sqe->complete)) in fill_swsqe()
104 if (sqe->complete && in fill_swsqe()
105 rdma_nl_put_driver_u32(msg, "cqe_status", CQE_STATUS(&sqe->cqe))) in fill_swsqe()
107 if (rdma_nl_put_driver_u32(msg, "signaled", sqe->signaled)) in fill_swsqe()
109 if (rdma_nl_put_driver_u32(msg, "flushed", sqe->flushed)) in fill_swsqe()
/drivers/scsi/bnx2i/
A Dbnx2i.h498 struct sqe { struct
634 struct sqe *sq_virt;
638 struct sqe *sq_prod_qe;
639 struct sqe *sq_cons_qe;
640 struct sqe *sq_first_qe;
641 struct sqe *sq_last_qe;
/drivers/nvme/host/
A Drdma.c65 struct nvme_rdma_qe sqe; member
290 kfree(req->sqe.data); in nvme_rdma_exit_request()
304 if (!req->sqe.data) in nvme_rdma_init_request()
314 nvme_req(rq)->cmd = req->sqe.data; in nvme_rdma_init_request()
1669 struct nvme_command *cmd = sqe->data; in nvme_rdma_submit_async_event()
1681 sqe->cqe.done = nvme_rdma_async_done; in nvme_rdma_submit_async_event()
1999 struct nvme_rdma_qe *sqe = &req->sqe; in nvme_rdma_queue_rq() local
2013 req->sqe.dma = ib_dma_map_single(dev, req->sqe.data, in nvme_rdma_queue_rq()
2020 ib_dma_sync_single_for_cpu(dev, sqe->dma, in nvme_rdma_queue_rq()
2045 sqe->cqe.done = nvme_rdma_send_done; in nvme_rdma_queue_rq()
[all …]
A Dfc.c1900 struct nvme_command *sqe = &op->cmd_iu.sqe; in nvme_fc_fcpio_done() local
2014 sqe->common.command_id, in nvme_fc_fcpio_done()
2131 struct nvme_command *sqe; in nvme_fc_init_aen_ops() local
2145 sqe = &cmdiu->sqe; in nvme_fc_init_aen_ops()
2157 memset(sqe, 0, sizeof(*sqe)); in nvme_fc_init_aen_ops()
2539 struct nvme_command *sqe = &cmdiu->sqe; in nvme_fc_timeout() local
2548 ctrl->cnum, qnum, sqe->common.opcode, sqe->fabrics.fctype, in nvme_fc_timeout()
2550 sqe->common.cdw10, sqe->common.cdw11); in nvme_fc_timeout()
2643 struct nvme_command *sqe = &cmdiu->sqe; in nvme_fc_start_fcp_op() local
2681 WARN_ON_ONCE(sqe->common.metadata); in nvme_fc_start_fcp_op()
[all …]
/drivers/infiniband/hw/erdma/
A Derdma_qp.c403 static void init_send_sqe_rc(struct erdma_qp *qp, struct erdma_send_sqe_rc *sqe, in init_send_sqe_rc() argument
410 sqe->imm_data = wr->ex.imm_data; in init_send_sqe_rc()
413 sqe->invalid_stag = cpu_to_le32(wr->ex.invalidate_rkey); in init_send_sqe_rc()
419 static void init_send_sqe_ud(struct erdma_qp *qp, struct erdma_send_sqe_ud *sqe, in init_send_sqe_ud() argument
428 sqe->imm_data = wr->ex.imm_data; in init_send_sqe_ud()
433 sqe->ahn = cpu_to_le32(ah->ahn); in init_send_sqe_ud()
434 sqe->dst_qpn = cpu_to_le32(uwr->remote_qpn); in init_send_sqe_ud()
437 sqe->qkey = cpu_to_le32(qp->attrs.rocev2.qkey); in init_send_sqe_ud()
439 sqe->qkey = cpu_to_le32(uwr->remote_qkey); in init_send_sqe_ud()
/drivers/dma/
A Dhisi_dma.c141 struct hisi_dma_sqe sqe; member
492 desc->sqe.length = cpu_to_le32(len); in hisi_dma_prep_dma_memcpy()
493 desc->sqe.src_addr = cpu_to_le64(src); in hisi_dma_prep_dma_memcpy()
494 desc->sqe.dst_addr = cpu_to_le64(dst); in hisi_dma_prep_dma_memcpy()
508 struct hisi_dma_sqe *sqe = chan->sq + chan->sq_tail; in hisi_dma_start_transfer() local
522 memcpy(sqe, &desc->sqe, sizeof(struct hisi_dma_sqe)); in hisi_dma_start_transfer()
525 sqe->dw0 = cpu_to_le32(FIELD_PREP(OPCODE_MASK, OPCODE_M2M)); in hisi_dma_start_transfer()
526 sqe->dw0 |= cpu_to_le32(LOCAL_IRQ_EN); in hisi_dma_start_transfer()
/drivers/crypto/hisilicon/hpre/
A Dhpre_crypto.c325 struct hpre_sqe *sqe = &req->req; in hpre_hw_data_clr_all() local
328 tmp = le64_to_cpu(sqe->in); in hpre_hw_data_clr_all()
339 tmp = le64_to_cpu(sqe->out); in hpre_hw_data_clr_all()
365 id = (int)le16_to_cpu(sqe->tag); in hpre_alg_res_post_hf()
485 struct hpre_sqe *sqe = resp; in hpre_alg_cb() local
1456 struct hpre_sqe *sqe = &req->req; in hpre_ecdh_hw_data_clr_all() local
1459 dma = le64_to_cpu(sqe->in); in hpre_ecdh_hw_data_clr_all()
1466 dma = le64_to_cpu(sqe->out); in hpre_ecdh_hw_data_clr_all()
1777 struct hpre_sqe *sqe = &req->req; in hpre_curve25519_hw_data_clr_all() local
1780 dma = le64_to_cpu(sqe->in); in hpre_curve25519_hw_data_clr_all()
[all …]
/drivers/crypto/hisilicon/
A Ddebugfs.c325 void *sqe; in qm_sq_dump() local
332 sqe = kzalloc(qm->sqe_size, GFP_KERNEL); in qm_sq_dump()
333 if (!sqe) in qm_sq_dump()
337 memcpy(sqe, qp->sqe + sqe_id * qm->sqe_size, qm->sqe_size); in qm_sq_dump()
338 memset(sqe + qm->debug.sqe_mask_offset, QM_SQE_ADDR_MASK, in qm_sq_dump()
341 dump_show(qm, sqe, qm->sqe_size, name); in qm_sq_dump()
343 kfree(sqe); in qm_sq_dump()
/drivers/net/ethernet/marvell/octeontx2/nic/
A Dotx2_txrx.c54 sq->sqe_base = sq->sqe->base; in otx2_sq_set_sqe_base()
848 struct sk_buff *skb, int sqe, int hdr_len) in otx2_dma_map_tso_skb() argument
851 struct sg_list *sg = &sq->sg[sqe]; in otx2_dma_map_tso_skb()
881 u64 seg_addr, int hdr_len, int sqe) in otx2_tso_frag_dma_addr() argument
883 struct sg_list *sg = &sq->sg[sqe]; in otx2_tso_frag_dma_addr()
1392 int sq_idx, sqe; in otx2_free_pending_sqe() local
1396 for (sqe = 0; sqe < sq->sqe_cnt; sqe++) { in otx2_free_pending_sqe()
1397 sg = &sq->sg[sqe]; in otx2_free_pending_sqe()
/drivers/scsi/lpfc/
A Dlpfc_nvme.c1016 cid = cp->sqe.common.command_id; in lpfc_nvme_io_cmd_cmpl()
1085 cp->sqe.common.opcode, in lpfc_nvme_io_cmd_cmpl()
1086 cp->sqe.common.command_id, in lpfc_nvme_io_cmd_cmpl()
1098 cp->sqe.common.opcode, in lpfc_nvme_io_cmd_cmpl()
1099 cp->sqe.common.command_id, in lpfc_nvme_io_cmd_cmpl()
1214 struct nvme_common_command *sqe; in lpfc_nvme_prep_io_cmd() local
1274 sqe = &((struct nvme_fc_cmd_iu *) in lpfc_nvme_prep_io_cmd()
1275 nCmd->cmdaddr)->sqe.common; in lpfc_nvme_prep_io_cmd()
1541 struct nvme_common_command *sqe; in lpfc_nvme_fcp_io_submit() local
1630 sqe = &((struct nvme_fc_cmd_iu *) in lpfc_nvme_fcp_io_submit()
[all …]

Completed in 113 milliseconds

12