| /drivers/infiniband/hw/erdma/ |
| A D | erdma_qp.c | 338 while (i < send_wr->num_sge) { in fill_inline_data() 339 bytes += send_wr->sg_list[i].length; in fill_inline_data() 385 while (i < send_wr->num_sge) { in fill_sgl() 391 bytes += send_wr->sg_list[i].length; in fill_sgl() 473 flags = send_wr->send_flags; in erdma_push_one_sqe() 509 if (unlikely(send_wr->num_sge != 1)) in erdma_push_one_sqe() 558 mr = to_emr(reg_wr(send_wr)->mr); in erdma_push_one_sqe() 637 send_wr->num_sge); in erdma_push_one_sqe() 665 const struct ib_send_wr *wr = send_wr; in erdma_post_send() 669 if (!send_wr) in erdma_post_send() [all …]
|
| A D | erdma_verbs.h | 464 int erdma_post_send(struct ib_qp *ibqp, const struct ib_send_wr *send_wr,
|
| /drivers/infiniband/ulp/isert/ |
| A D | ib_isert.c | 783 struct ib_send_wr send_wr; in isert_login_post_send() local 791 send_wr.next = NULL; in isert_login_post_send() 795 send_wr.opcode = IB_WR_SEND; in isert_login_post_send() 864 struct ib_send_wr *send_wr) in isert_init_send_wr() argument 875 send_wr->opcode = IB_WR_SEND; in isert_init_send_wr() 1763 struct ib_send_wr *send_wr = &isert_cmd->tx_desc.send_wr; in isert_put_response() local 1851 struct ib_send_wr *send_wr = &isert_cmd->tx_desc.send_wr; in isert_put_nopin() local 1870 struct ib_send_wr *send_wr = &isert_cmd->tx_desc.send_wr; in isert_put_logout_rsp() local 1888 struct ib_send_wr *send_wr = &isert_cmd->tx_desc.send_wr; in isert_put_tm_rsp() local 1906 struct ib_send_wr *send_wr = &isert_cmd->tx_desc.send_wr; in isert_put_reject() local [all …]
|
| A D | ib_isert.h | 132 struct ib_send_wr send_wr; member
|
| /drivers/infiniband/core/ |
| A D | mad.c | 62 struct ib_ud_wr *wr = &mad_send_wr->send_wr; in create_mad_addr_info() 634 struct ib_ud_wr *send_wr = &mad_send_wr->send_wr; in handle_outgoing_dr_smp() local 643 port_num = send_wr->port_num; in handle_outgoing_dr_smp() 716 send_wr->wr.wr_cqe, drslid, in handle_outgoing_dr_smp() 717 send_wr->pkey_index, in handle_outgoing_dr_smp() 718 send_wr->port_num, &mad_wc); in handle_outgoing_dr_smp() 826 pad = send_wr->pad; in alloc_send_rmpp_list() 832 free_send_rmpp_list(send_wr); in alloc_send_rmpp_list() 848 send_wr->cur_seg = container_of(send_wr->rmpp_list.next, in alloc_send_rmpp_list() 850 send_wr->last_ack_seg = send_wr->cur_seg; in alloc_send_rmpp_list() [all …]
|
| A D | cma_trace.h | 186 __field(u32, send_wr) 198 __entry->send_wr = qp_init_attr->cap.max_send_wr; 218 rdma_show_qp_type(__entry->qp_type), __entry->send_wr,
|
| A D | agent.c | 138 mad_send_wr->send_wr.port_num = port_num; in agent_send_response()
|
| A D | mad_priv.h | 157 struct ib_ud_wr send_wr; member
|
| /drivers/infiniband/ulp/iser/ |
| A D | iscsi_iser.h | 144 #define ISER_GET_MAX_XMIT_CMDS(send_wr) ((send_wr \ argument 247 struct ib_send_wr send_wr; member
|
| A D | iser_memory.c | 281 wr->wr.next = &tx_desc->send_wr; in iser_reg_sig_mr() 328 wr->wr.next = &tx_desc->send_wr; in iser_fast_reg_mr()
|
| A D | iser_verbs.c | 859 struct ib_send_wr *wr = &tx_desc->send_wr; in iser_post_send()
|
| /drivers/nvme/target/ |
| A D | rdma.c | 63 struct ib_send_wr send_wr; member 420 r->send_wr.wr_cqe = &r->send_cqe; in nvmet_rdma_alloc_rsp() 421 r->send_wr.sg_list = &r->send_sge; in nvmet_rdma_alloc_rsp() 422 r->send_wr.num_sge = 1; in nvmet_rdma_alloc_rsp() 423 r->send_wr.send_flags = IB_SEND_SIGNALED; in nvmet_rdma_alloc_rsp() 717 rsp->send_wr.opcode = IB_WR_SEND_WITH_INV; in nvmet_rdma_queue_response() 718 rsp->send_wr.ex.invalidate_rkey = rsp->invalidate_rkey; in nvmet_rdma_queue_response() 720 rsp->send_wr.opcode = IB_WR_SEND; in nvmet_rdma_queue_response() 729 cm_id->port_num, NULL, &rsp->send_wr); in nvmet_rdma_queue_response() 731 first_wr = &rsp->send_wr; in nvmet_rdma_queue_response() [all …]
|
| /drivers/infiniband/hw/mlx5/ |
| A D | wr.c | 416 static int set_sig_data_segment(const struct ib_send_wr *send_wr, in set_sig_data_segment() argument 557 static int set_pi_umr_wr(const struct ib_send_wr *send_wr, in set_pi_umr_wr() argument 561 const struct ib_reg_wr *wr = reg_wr(send_wr); in set_pi_umr_wr() 569 if (unlikely(send_wr->num_sge != 0) || in set_pi_umr_wr() 599 ret = set_sig_data_segment(send_wr, wr->mr, sig_attrs, qp, seg, size, in set_pi_umr_wr()
|
| /drivers/infiniband/ulp/srpt/ |
| A D | ib_srpt.c | 2875 struct ib_send_wr send_wr, *first_wr = &send_wr; in srpt_queue_response() local 2938 send_wr.next = NULL; in srpt_queue_response() 2939 send_wr.wr_cqe = &ioctx->ioctx.cqe; in srpt_queue_response() 2940 send_wr.sg_list = &sge; in srpt_queue_response() 2941 send_wr.num_sge = 1; in srpt_queue_response() 2942 send_wr.opcode = IB_WR_SEND; in srpt_queue_response() 2943 send_wr.send_flags = IB_SEND_SIGNALED; in srpt_queue_response()
|
| /drivers/infiniband/hw/bnxt_re/ |
| A D | ib_verbs.h | 239 int bnxt_re_post_send(struct ib_qp *qp, const struct ib_send_wr *send_wr,
|
| /drivers/infiniband/hw/hns/ |
| A D | hns_roce_hw_v2.c | 3442 struct ib_send_wr *send_wr; in free_mr_post_send_lp_wqe() local 3445 send_wr = &rdma_wr.wr; in free_mr_post_send_lp_wqe() 3446 send_wr->opcode = IB_WR_RDMA_WRITE; in free_mr_post_send_lp_wqe() 3448 ret = hns_roce_v2_post_send(&hr_qp->ibqp, send_wr, &bad_wr); in free_mr_post_send_lp_wqe()
|