| /drivers/infiniband/hw/mana/ |
| A D | wr.c | 23 if (wr->num_sge > MAX_WR_SGL_NUM) in mana_ib_post_recv_ud() 26 for (i = 0; i < wr->num_sge; ++i) { in mana_ib_post_recv_ud() 31 wqe_req.num_sge = wr->num_sge; in mana_ib_post_recv_ud() 100 if (wr->wr.num_sge > MAX_WR_SGL_NUM) in mana_ib_post_send_ud() 106 for (i = 0; i < wr->wr.num_sge; ++i) { in mana_ib_post_send_ud() 112 wqe_req.num_sge = wr->wr.num_sge + 1; in mana_ib_post_send_ud()
|
| /drivers/vfio/pci/pds/ |
| A D | cmds.c | 221 lm_file->num_sge = lm_file->sg_table.nents; in pds_vfio_dma_map_lm_file() 224 sgl_size = lm_file->num_sge * sizeof(struct pds_lm_sg_elem); in pds_vfio_dma_map_lm_file() 254 lm_file->num_sge = 0; in pds_vfio_dma_map_lm_file() 269 lm_file->num_sge * sizeof(*lm_file->sgl), in pds_vfio_dma_unmap_lm_file() 274 lm_file->num_sge = 0; in pds_vfio_dma_unmap_lm_file() 307 cmd.lm_save.num_sge = cpu_to_le32(lm_file->num_sge); in pds_vfio_get_lm_state_cmd() 346 cmd.lm_restore.num_sge = cpu_to_le32(lm_file->num_sge); in pds_vfio_set_lm_state_cmd() 480 u64 sgl_dma, u16 num_sge, u32 offset, in pds_vfio_dirty_seq_ack_cmd() argument 490 .num_sge = cpu_to_le16(num_sge), in pds_vfio_dirty_seq_ack_cmd()
|
| A D | dirty.c | 119 region->num_sge * sizeof(struct pds_lm_sg_elem), in __pds_vfio_dirty_free_sgl() 123 region->num_sge = 0; in __pds_vfio_dirty_free_sgl() 168 region->num_sge = max_sge; in pds_vfio_dirty_alloc_sgl() 384 u16 num_sge; in pds_vfio_dirty_seq_ack() local 430 num_sge = sg_table.nents; in pds_vfio_dirty_seq_ack() 431 size = num_sge * sizeof(struct pds_lm_sg_elem); in pds_vfio_dirty_seq_ack() 434 err = pds_vfio_dirty_seq_ack_cmd(pds_vfio, region->sgl_addr, num_sge, in pds_vfio_dirty_seq_ack() 440 num_sge, region->sgl_addr, ERR_PTR(err)); in pds_vfio_dirty_seq_ack()
|
| /drivers/infiniband/sw/rxe/ |
| A D | rxe_odp.c | 414 u32 num_sge; member 433 WARN_ON(!work->num_sge); in rxe_ib_prefetch_mr_work() 434 for (i = 0; i < work->num_sge; ++i) { in rxe_ib_prefetch_mr_work() 460 u32 num_sge) in rxe_ib_prefetch_sg_list() argument 466 for (i = 0; i < num_sge; ++i) { in rxe_ib_prefetch_sg_list() 506 u32 num_sge) in rxe_ib_advise_mr_prefetch() argument 523 num_sge); in rxe_ib_advise_mr_prefetch() 532 work->num_sge = num_sge; in rxe_ib_advise_mr_prefetch() 534 for (i = 0; i < num_sge; ++i) { in rxe_ib_advise_mr_prefetch() 568 u32 num_sge, in rxe_ib_advise_mr() argument [all …]
|
| A D | rxe_verbs.c | 689 int num_sge = ibwr->num_sge; in validate_send_wr() local 703 if (num_sge > sq->max_sge) { in validate_send_wr() 709 for (i = 0; i < ibwr->num_sge; i++) in validate_send_wr() 845 int num_sge = ibwr->num_sge; in init_send_wqe() local 863 num_sge * sizeof(struct ib_sge)); in init_send_wqe() 870 wqe->dma.num_sge = num_sge; in init_send_wqe() 975 int num_sge = ibwr->num_sge; in post_one_recv() local 986 if (unlikely(num_sge > rq->max_sge)) { in post_one_recv() 993 for (i = 0; i < num_sge; i++) in post_one_recv() 1007 recv_wqe->dma.num_sge = num_sge; in post_one_recv() [all …]
|
| /drivers/infiniband/sw/siw/ |
| A D | siw_verbs.c | 656 int num_sge = core_wr->num_sge, bytes = 0; in siw_copy_inline_sgl() local 661 while (num_sge--) { in siw_copy_inline_sgl() 856 wr->num_sge); in siw_post_send() 857 sqe->num_sge = wr->num_sge; in siw_post_send() 865 sqe->num_sge = 1; in siw_post_send() 894 sqe->num_sge = 1; in siw_post_send() 905 wr->num_sge); in siw_post_send() 906 sqe->num_sge = wr->num_sge; in siw_post_send() 914 sqe->num_sge = 1; in siw_post_send() 1086 rqe->num_sge = wr->num_sge; in siw_post_receive() [all …]
|
| A D | siw_mem.h | 32 static inline void siw_unref_mem_sgl(struct siw_mem **mem, unsigned int num_sge) in siw_unref_mem_sgl() argument 34 while (num_sge) { in siw_unref_mem_sgl() 41 num_sge--; in siw_unref_mem_sgl()
|
| A D | siw_qp_rx.c | 352 int num_sge = rqe->num_sge; in siw_rqe_get() local 354 if (likely(num_sge <= SIW_MAX_SGE)) { in siw_rqe_get() 364 wqe->rqe.num_sge = num_sge; in siw_rqe_get() 366 while (i < num_sge) { in siw_rqe_get() 377 siw_dbg_qp(qp, "too many sge's: %d\n", rqe->num_sge); in siw_rqe_get() 578 wqe->rqe.num_sge = 1; in siw_proc_write() 710 resp->num_sge = length ? 1 : 0; in siw_init_rresp() 760 wqe->sqe.num_sge = 1; in siw_orqe_start_rx()
|
| A D | siw_verbs.h | 25 int num_sge) in siw_copy_sgl() argument 27 while (num_sge--) { in siw_copy_sgl()
|
| /drivers/net/ethernet/huawei/hinic3/ |
| A D | hinic3_tx.c | 404 u32 offload, u16 num_sge, u16 *curr_pi) in hinic3_set_wqe_combo() argument 410 if (!offload && num_sge == 1) { in hinic3_set_wqe_combo() 425 if (num_sge > 1) { in hinic3_set_wqe_combo() 429 hinic3_wq_get_multi_wqebbs(&txq->sq->wq, num_sge - 1, &tmp_pi, in hinic3_set_wqe_combo() 439 num_sge + !!offload); in hinic3_set_wqe_combo() 487 u16 wqebb_cnt, num_sge; in hinic3_send_one_skb() local 500 num_sge = skb_shinfo(skb)->nr_frags + 1; in hinic3_send_one_skb() 502 wqebb_cnt = num_sge + 1; in hinic3_send_one_skb() 520 if (unlikely(num_sge == 1 && in hinic3_send_one_skb() 528 owner = hinic3_set_wqe_combo(txq, &wqe_combo, offload, num_sge, &pi); in hinic3_send_one_skb() [all …]
|
| /drivers/infiniband/sw/rdmavt/ |
| A D | trace_tx.h | 68 __field(int, num_sge) 88 __entry->num_sge = wqe->wr.num_sge; 109 __entry->num_sge,
|
| A D | qp.c | 863 qp->r_sge.num_sge = 0; in rvt_init_qp() 1826 wqe->num_sge = wr->num_sge; in rvt_post_recv() 2044 if (wr->num_sge) { in rvt_post_one_wr() 2063 wqe->wr.num_sge = j; in rvt_post_one_wr() 2233 wqe->num_sge = wr->num_sge; in rvt_post_srq_recv() 2292 ss->num_sge = j; in init_sge() 2302 ss->num_sge = 0; in init_sge() 2980 sqp->s_sge.num_sge = wqe->wr.num_sge; in rvt_ruc_loopback() 3048 qp->r_sge.num_sge = 1; in rvt_ruc_loopback() 3065 qp->r_sge.num_sge = wqe->wr.num_sge; in rvt_ruc_loopback() [all …]
|
| A D | rc.c | 166 ss->num_sge = wqe->wr.num_sge; in rvt_restart_sge()
|
| A D | srq.c | 202 p->num_sge = wqe->num_sge; in rvt_modify_srq() 203 for (i = 0; i < wqe->num_sge; i++) in rvt_modify_srq()
|
| /drivers/infiniband/hw/mlx5/ |
| A D | wr.c | 272 for (i = 0; i < wr->num_sge; i++) { in set_data_inl_seg() 569 if (unlikely(send_wr->num_sge != 0) || in set_pi_umr_wr() 935 u8 next_fence, int *num_sge) in handle_qpt_rc() argument 955 *num_sge = 0; in handle_qpt_rc() 962 *num_sge = 0; in handle_qpt_rc() 971 *num_sge = 0; in handle_qpt_rc() 1064 int num_sge; in mlx5_ib_post_send() local 1090 num_sge = wr->num_sge; in mlx5_ib_post_send() 1131 next_fence, &num_sge); in mlx5_ib_post_send() 1170 for (i = 0; i < num_sge; i++) { in mlx5_ib_post_send() [all …]
|
| A D | odp.c | 1936 u32 num_sge; member 1948 for (i = 0; i < work->num_sge; ++i) in destroy_prefetch_work() 2002 WARN_ON(!work->num_sge); in mlx5_ib_prefetch_mr_work() 2003 for (i = 0; i < work->num_sge; ++i) { in mlx5_ib_prefetch_mr_work() 2018 struct ib_sge *sg_list, u32 num_sge) in init_prefetch_work() argument 2025 for (i = 0; i < num_sge; ++i) { in init_prefetch_work() 2030 work->num_sge = i; in init_prefetch_work() 2037 work->num_sge = num_sge; in init_prefetch_work() 2044 u32 num_sge) in mlx5_ib_prefetch_sg_list() argument 2050 for (i = 0; i < num_sge; ++i) { in mlx5_ib_prefetch_sg_list() [all …]
|
| /drivers/infiniband/hw/erdma/ |
| A D | erdma_qp.c | 338 while (i < send_wr->num_sge) { in fill_inline_data() 379 if (send_wr->num_sge > qp->dev->attrs.max_send_sge) in fill_sgl() 385 while (i < send_wr->num_sge) { in fill_sgl() 509 if (unlikely(send_wr->num_sge != 1)) in erdma_push_one_sqe() 533 send_wr->num_sge * sizeof(struct ib_sge); in erdma_push_one_sqe() 635 wqe_size += send_wr->num_sge * sizeof(struct ib_sge); in erdma_push_one_sqe() 637 send_wr->num_sge); in erdma_push_one_sqe() 711 if (recv_wr->num_sge == 0) { in erdma_post_recv_one() 713 } else if (recv_wr->num_sge == 1) { in erdma_post_recv_one()
|
| /drivers/infiniband/hw/hfi1/ |
| A D | uc.c | 114 qp->s_sge.num_sge = wqe->wr.num_sge; in hfi1_make_uc_req() 293 qp->r_sge.num_sge = 0; in hfi1_uc_rcv() 458 qp->r_sge.num_sge = 1; in hfi1_uc_rcv() 460 qp->r_sge.num_sge = 0; in hfi1_uc_rcv() 535 qp->r_sge.num_sge = 0; in hfi1_uc_rcv()
|
| /drivers/infiniband/ulp/rtrs/ |
| A D | rtrs.c | 90 .num_sge = 1, in rtrs_iu_post_recv() 145 .num_sge = 1, in rtrs_iu_post_send() 155 struct ib_sge *sge, unsigned int num_sge, in rtrs_iu_post_rdma_write_imm() argument 167 .wr.num_sge = num_sge, in rtrs_iu_post_rdma_write_imm() 179 for (i = 0; i < num_sge; i++) in rtrs_iu_post_rdma_write_imm()
|
| /drivers/infiniband/core/ |
| A D | uverbs_std_types_mr.c | 55 int num_sge; in UVERBS_HANDLER() local 72 num_sge = uverbs_attr_ptr_get_array_size( in UVERBS_HANDLER() 74 if (num_sge <= 0) in UVERBS_HANDLER() 75 return num_sge; in UVERBS_HANDLER() 79 return ib_dev->ops.advise_mr(pd, advice, flags, sg_list, num_sge, in UVERBS_HANDLER()
|
| A D | rw.c | 160 reg->wr.wr.num_sge = 1; in rdma_rw_init_mr_wrs() 224 rdma_wr->wr.num_sge = nr_sge; in rdma_rw_init_map_wrs() 269 rdma_wr->wr.num_sge = 1; in rdma_rw_init_single_wr() 428 ctx->reg->reg_wr.wr.num_sge = 0; in rdma_rw_ctx_signature_init() 444 rdma_wr->wr.num_sge = 1; in rdma_rw_ctx_signature_init()
|
| /drivers/infiniband/hw/vmw_pvrdma/ |
| A D | pvrdma_qp.c | 704 if (unlikely(wr->num_sge > qp->sq.max_sg || wr->num_sge < 0)) { in pvrdma_post_send() 753 wqe_hdr->num_sge = wr->num_sge; in pvrdma_post_send() 836 for (i = 0; i < wr->num_sge; i++) { in pvrdma_post_send() 904 if (unlikely(wr->num_sge > qp->rq.max_sg || in pvrdma_post_recv() 905 wr->num_sge < 0)) { in pvrdma_post_recv() 924 wqe_hdr->num_sge = wr->num_sge; in pvrdma_post_recv() 928 for (i = 0; i < wr->num_sge; i++) { in pvrdma_post_recv()
|
| /drivers/infiniband/hw/cxgb4/ |
| A D | qp.c | 423 for (i = 0; i < wr->num_sge; i++) { in build_immd() 455 int num_sge, u32 *plenp) in build_isgl() argument 467 for (i = 0; i < num_sge; i++) { in build_isgl() 482 isglp->nsge = cpu_to_be16(num_sge); in build_isgl() 496 if (wr->num_sge > T4_MAX_SEND_SGE) in build_rdma_send() 525 if (wr->num_sge) { in build_rdma_send() 537 wr->sg_list, wr->num_sge, &plen); in build_rdma_send() 563 if (wr->num_sge > T4_MAX_SEND_SGE) in build_rdma_write() 576 if (wr->num_sge) { in build_rdma_write() 588 wr->sg_list, wr->num_sge, &plen); in build_rdma_write() [all …]
|
| /drivers/infiniband/hw/qedr/ |
| A D | qedr_roce_cm.c | 406 for (i = 0; i < swr->num_sge; ++i) in qedr_gsi_build_header() 530 packet->n_seg = swr->num_sge; in qedr_gsi_build_packet() 558 if (wr->num_sge > RDMA_MAX_SGE_PER_SQ_WQE) { in qedr_gsi_post_send() 560 wr->num_sge, RDMA_MAX_SGE_PER_SQ_WQE); in qedr_gsi_post_send() 631 if (wr->num_sge > QEDR_GSI_MAX_RECV_SGE) { in qedr_gsi_post_recv() 634 wr->num_sge, QEDR_GSI_MAX_RECV_SGE); in qedr_gsi_post_recv()
|
| /drivers/infiniband/hw/ocrdma/ |
| A D | ocrdma_verbs.c | 1897 struct ocrdma_sge *sge, int num_sge, in ocrdma_build_sges() argument 1902 for (i = 0; i < num_sge; i++) { in ocrdma_build_sges() 1909 if (num_sge == 0) in ocrdma_build_sges() 1917 for (i = 0; i < num_sge; i++) in ocrdma_sglist_len() 1940 for (i = 0; i < wr->num_sge; i++) { in ocrdma_build_inline_sges() 1953 if (wr->num_sge) in ocrdma_build_inline_sges() 2006 ocrdma_build_sges(hdr, sge, wr->num_sge, wr->sg_list); in ocrdma_build_read() 2114 wr->num_sge > qp->sq.max_sges) { in ocrdma_post_send() 2207 if (wr->num_sge) in ocrdma_build_rqe() 2239 wr->num_sge > qp->rq.max_sges) { in ocrdma_post_recv() [all …]
|