/linux-6.3-rc2/drivers/infiniband/hw/mlx5/ |
A D | gsi.c | 56 wr = &gsi->outstanding_wrs[index % gsi->cap.max_send_wr]; in generate_completions() 112 kcalloc(attr->cap.max_send_wr, sizeof(*gsi->outstanding_wrs), in mlx5_ib_create_gsi() 131 gsi->cq = ib_alloc_cq(pd->device, gsi, attr->cap.max_send_wr, 0, in mlx5_ib_create_gsi() 143 hw_init_attr.cap.max_send_wr = 0; in mlx5_ib_create_gsi() 208 .max_send_wr = gsi->cap.max_send_wr, in create_gsi_ud_qp() 366 if (gsi->outstanding_pi == gsi->outstanding_ci + gsi->cap.max_send_wr) { in mlx5_ib_add_outstanding_wr() 372 gsi->cap.max_send_wr]; in mlx5_ib_add_outstanding_wr()
|
/linux-6.3-rc2/drivers/infiniband/ulp/iser/ |
A D | iser_verbs.c | 242 unsigned int max_send_wr, cq_size; in iser_create_ib_conn_res() local 251 max_send_wr = ISER_QP_SIG_MAX_REQ_DTOS + 1; in iser_create_ib_conn_res() 253 max_send_wr = ISER_QP_MAX_REQ_DTOS + 1; in iser_create_ib_conn_res() 254 max_send_wr = min_t(unsigned int, max_send_wr, in iser_create_ib_conn_res() 257 cq_size = max_send_wr + ISER_QP_MAX_RECV_DTOS; in iser_create_ib_conn_res() 277 init_attr.cap.max_send_wr = max_send_wr; in iser_create_ib_conn_res() 280 iser_conn->max_cmds = ISER_GET_MAX_XMIT_CMDS(max_send_wr - 1); in iser_create_ib_conn_res() 288 ib_conn->cma_id, ib_conn->cma_id->qp, max_send_wr); in iser_create_ib_conn_res()
|
/linux-6.3-rc2/drivers/infiniband/core/ |
A D | uverbs_std_types_qp.c | 68 attr->cap.max_send_wr = cap->max_send_wr; in set_caps() 74 cap->max_send_wr = attr->cap.max_send_wr; in set_caps() 167 if (cap.max_send_wr) { in UVERBS_HANDLER()
|
A D | uverbs_marshall.c | 110 dst->max_send_wr = src->cap.max_send_wr; in ib_copy_qp_attr_to_user()
|
A D | rw.c | 677 attr->cap.max_send_wr += factor * attr->cap.max_rdma_ctxs; in rdma_rw_init_qp() 683 attr->cap.max_send_wr = in rdma_rw_init_qp() 684 min_t(u32, attr->cap.max_send_wr, dev->attrs.max_qp_wr); in rdma_rw_init_qp()
|
/linux-6.3-rc2/drivers/infiniband/hw/vmw_pvrdma/ |
A D | pvrdma_misc.c | 226 dst->max_send_wr = src->max_send_wr; in pvrdma_qp_cap_to_ib() 235 dst->max_send_wr = src->max_send_wr; in ib_qp_cap_to_pvrdma()
|
A D | pvrdma_dev_api.h | 559 u32 max_send_wr; member 578 u32 max_send_wr; member 589 u32 max_send_wr; member
|
A D | pvrdma_qp.c | 159 if (req_cap->max_send_wr > dev->dsr->caps.max_qp_wr || in pvrdma_set_sq_size() 165 qp->sq.wqe_cnt = roundup_pow_of_two(max(1U, req_cap->max_send_wr)); in pvrdma_set_sq_size() 169 req_cap->max_send_wr = qp->sq.wqe_cnt; in pvrdma_set_sq_size() 365 cmd->max_send_wr = init_attr->cap.max_send_wr; in pvrdma_create_qp() 380 cmd->max_send_wr, cmd->max_recv_wr, cmd->max_send_sge, in pvrdma_create_qp()
|
/linux-6.3-rc2/net/sunrpc/xprtrdma/ |
A D | frwr_ops.c | 237 ep->re_attr.cap.max_send_wr = ep->re_max_requests * depth; in frwr_query_device() 238 if (ep->re_attr.cap.max_send_wr > max_qp_wr) { in frwr_query_device() 242 ep->re_attr.cap.max_send_wr = ep->re_max_requests * depth; in frwr_query_device() 244 ep->re_attr.cap.max_send_wr += RPCRDMA_BACKWARD_WRS; in frwr_query_device() 245 ep->re_attr.cap.max_send_wr += 1; /* for ib_drain_sq */ in frwr_query_device()
|
A D | svc_rdma_transport.c | 446 qp_attr.cap.max_send_wr = newxprt->sc_sq_depth - ctxts; in svc_rdma_accept() 457 qp_attr.cap.max_send_wr, qp_attr.cap.max_recv_wr); in svc_rdma_accept()
|
/linux-6.3-rc2/drivers/infiniband/ulp/rtrs/ |
A D | rtrs.c | 254 u32 max_send_wr, u32 max_recv_wr, u32 max_sge) in create_qp() argument 260 init_attr.cap.max_send_wr = max_send_wr; in create_qp() 295 u32 max_send_wr, u32 max_recv_wr, in rtrs_cq_qp_create() argument 304 err = create_qp(con, path->dev->ib_pd, max_send_wr, max_recv_wr, in rtrs_cq_qp_create()
|
A D | rtrs-srv.c | 1635 u32 cq_num, max_send_wr, max_recv_wr, wr_limit; in create_con() local 1657 max_send_wr = min_t(int, wr_limit, in create_con() 1659 max_recv_wr = max_send_wr; in create_con() 1665 max_send_wr = in create_con() 1669 max_send_wr = in create_con() 1675 cq_num = max_send_wr + max_recv_wr; in create_con() 1676 atomic_set(&con->c.sq_wr_avail, max_send_wr); in create_con() 1681 max_send_wr, max_recv_wr, in create_con()
|
/linux-6.3-rc2/include/uapi/linux/ |
A D | rds.h | 249 __u32 max_send_wr; member 265 __u32 max_send_wr; member
|
/linux-6.3-rc2/include/uapi/rdma/ |
A D | qedr-abi.h | 69 __u32 max_send_wr; member
|
A D | ib_user_verbs.h | 564 __u32 max_send_wr; member 593 __u32 max_send_wr; member 619 __u32 max_send_wr; member 648 __u32 max_send_wr; member 692 __u32 max_send_wr; member
|
A D | ib_user_ioctl_verbs.h | 227 __u32 max_send_wr; member
|
/linux-6.3-rc2/drivers/infiniband/sw/rxe/ |
A D | rxe_qp.c | 21 if (cap->max_send_wr > rxe->attr.max_qp_wr) { in rxe_qp_chk_cap() 23 cap->max_send_wr, rxe->attr.max_qp_wr); in rxe_qp_chk_cap() 204 qp->sq.max_wr = init->cap.max_send_wr; in rxe_qp_init_req() 366 init->cap.max_send_wr = qp->sq.max_wr; in rxe_qp_to_init() 727 attr->cap.max_send_wr = qp->sq.max_wr; in rxe_qp_to_attr()
|
/linux-6.3-rc2/drivers/infiniband/hw/qedr/ |
A D | qedr_roce_cm.c | 170 if (attrs->cap.max_send_wr > QEDR_GSI_MAX_SEND_WR) { in qedr_check_gsi_qp_attrs() 173 attrs->cap.max_send_wr, QEDR_GSI_MAX_SEND_WR); in qedr_check_gsi_qp_attrs() 282 data.input.tx_num_desc = attrs->cap.max_send_wr; in qedr_ll2_start() 340 qp->sq.max_wr = attrs->cap.max_send_wr; in qedr_create_gsi_qp()
|
/linux-6.3-rc2/net/rds/ |
A D | ib.c | 322 iinfo->max_send_wr = ic->i_send_ring.w_nr; in rds_ib_conn_info_visitor() 359 iinfo6->max_send_wr = ic->i_send_ring.w_nr; in rds6_ib_conn_info_visitor()
|
/linux-6.3-rc2/drivers/net/ethernet/mellanox/mlx5/core/steering/ |
A D | dr_send.c | 48 u32 max_send_wr; member 267 dr_qp->sq.wqe_cnt = roundup_pow_of_two(attr->max_send_wr); in dr_create_rc_qp() 1076 init_attr.max_send_wr = QUEUE_SIZE; in mlx5dr_send_ring_alloc() 1093 dmn->info.max_send_wr = QUEUE_SIZE; in mlx5dr_send_ring_alloc() 1097 dmn->send_ring->signal_th = dmn->info.max_send_wr / in mlx5dr_send_ring_alloc()
|
/linux-6.3-rc2/drivers/infiniband/hw/efa/ |
A D | efa.h | 115 u32 max_send_wr; member
|
A D | efa_verbs.c | 325 qp_attr->cap.max_send_wr = qp->max_send_wr; in efa_query_qp() 559 if (init_attr->cap.max_send_wr > dev->dev_attr.max_sq_depth) { in efa_qp_validate_cap() 562 init_attr->cap.max_send_wr, in efa_qp_validate_cap() 692 create_qp_params.sq_depth = init_attr->cap.max_send_wr; in efa_create_qp() 730 qp->max_send_wr = init_attr->cap.max_send_wr; in efa_create_qp()
|
/linux-6.3-rc2/drivers/infiniband/hw/erdma/ |
A D | erdma_verbs.c | 274 attr->max_qp_wr = min(dev->attrs.max_send_wr, dev->attrs.max_recv_wr); in erdma_query_device() 400 if ((attrs->cap.max_send_wr > dev->attrs.max_send_wr) || in erdma_qp_validate_cap() 405 !attrs->cap.max_send_wr || !attrs->cap.max_recv_wr) { in erdma_qp_validate_cap() 721 qp->attrs.sq_size = roundup_pow_of_two(attrs->cap.max_send_wr * in erdma_create_qp() 1331 qp_attr->cap.max_send_wr = qp->attrs.sq_size; in erdma_query_qp()
|
/linux-6.3-rc2/drivers/infiniband/hw/hns/ |
A D | hns_roce_qp.c | 685 if (!cap->max_send_wr || cap->max_send_wr > hr_dev->caps.max_wqes || in set_kernel_sq_size() 691 cnt = roundup_pow_of_two(max(cap->max_send_wr, hr_dev->caps.min_wqes)); in set_kernel_sq_size() 704 cap->max_send_wr = cnt; in set_kernel_sq_size() 712 if (attr->qp_type == IB_QPT_XRC_TGT || !attr->cap.max_send_wr) in hns_roce_qp_has_sq()
|
/linux-6.3-rc2/drivers/infiniband/hw/mana/ |
A D | qp.c | 295 if (attr->cap.max_send_wr > MAX_SEND_BUFFERS_PER_QUEUE) { in mana_ib_create_qp_raw() 298 attr->cap.max_send_wr); in mana_ib_create_qp_raw()
|