| /drivers/infiniband/hw/irdma/ |
| A D | uk.c | 62 u32 wqe_idx; in irdma_nop_1() local 97 u32 wqe_idx; in irdma_clr_wqes() local 102 if (wqe_idx) in irdma_clr_wqes() 183 if (!*wqe_idx) in irdma_qp_get_next_send_wqe() 218 if (!*wqe_idx) in irdma_qp_get_next_recv_wqe() 336 u32 wqe_idx; in irdma_uk_rdma_read() local 660 u32 wqe_idx; in irdma_uk_inline_rdma_write() local 728 u32 wqe_idx; in irdma_uk_inline_send() local 801 u32 wqe_idx; in irdma_uk_stag_local_invalidate() local 977 u32 wqe_idx; in irdma_uk_cq_poll_cmpl() local [all …]
|
| A D | puda.c | 90 wqe = qp->qp_uk.rq_base[wqe_idx].elem; in irdma_puda_post_recvbuf() 192 u32 *wqe_idx) in irdma_puda_get_next_send_wqe() argument 197 if (!*wqe_idx) in irdma_puda_get_next_send_wqe() 203 return qp->sq_base[*wqe_idx].elem; in irdma_puda_get_next_send_wqe() 363 qp->rq_wrid_array[info.wqe_idx]; in irdma_puda_poll_cmpl() 374 info.wqe_idx); in irdma_puda_poll_cmpl() 383 rsrc->compl_rxwqe_idx = info.wqe_idx; in irdma_puda_poll_cmpl() 430 u32 wqe_idx; in irdma_puda_send() local 1124 wqe = qp->qp_uk.rq_base[wqe_idx].elem; in irdma_ilq_putback_rcvbuf() 1679 u32 wqe_idx = ieq->compl_rxwqe_idx; in irdma_ieq_receive() local [all …]
|
| A D | utils.c | 534 u32 i, pending_work, wqe_idx; in irdma_cleanup_pending_cqp_op() local 537 wqe_idx = IRDMA_RING_CURRENT_TAIL(cqp->sc_cqp.sq_ring); in irdma_cleanup_pending_cqp_op() 540 cqp->scratch_array[wqe_idx]; in irdma_cleanup_pending_cqp_op() 543 wqe_idx = (wqe_idx + 1) % IRDMA_RING_SIZE(cqp->sc_cqp.sq_ring); in irdma_cleanup_pending_cqp_op() 2340 u32 wqe_idx; in irdma_generate_flush_completions() local 2357 wqe_idx = sq_ring->tail; in irdma_generate_flush_completions() 2360 cmpl->cpi.wr_id = qp->sq_wrtrk_array[wqe_idx].wrid; in irdma_generate_flush_completions() 2361 sw_wqe = qp->sq_base[wqe_idx].elem; in irdma_generate_flush_completions() 2401 wqe_idx = rq_ring->tail; in irdma_generate_flush_completions() 2404 cmpl->cpi.wr_id = qp->rq_wrid_array[wqe_idx]; in irdma_generate_flush_completions() [all …]
|
| A D | user.h | 398 __le64 *irdma_qp_get_next_send_wqe(struct irdma_qp_uk *qp, u32 *wqe_idx, 401 __le64 *irdma_qp_get_next_recv_wqe(struct irdma_qp_uk *qp, u32 *wqe_idx);
|
| A D | ctrl.c | 1297 u32 wqe_idx; in irdma_sc_mr_fast_register() local 1316 irdma_clr_wqes(&qp->qp_uk, wqe_idx); in irdma_sc_mr_fast_register() 1320 info->wr_id, wqe_idx, in irdma_sc_mr_fast_register() 1321 &qp->qp_uk.sq_wrtrk_array[wqe_idx].wrid); in irdma_sc_mr_fast_register() 3261 u32 *wqe_idx) in irdma_sc_cqp_get_next_send_wqe_idx() argument 3278 if (!*wqe_idx) in irdma_sc_cqp_get_next_send_wqe_idx() 3280 wqe = cqp->sq_base[*wqe_idx].elem; in irdma_sc_cqp_get_next_send_wqe_idx() 3281 cqp->scratch_array[*wqe_idx] = scratch; in irdma_sc_cqp_get_next_send_wqe_idx() 3351 u32 wqe_idx; in irdma_sc_ccq_get_cqe_info() local 3382 info->scratch = cqp->scratch_array[wqe_idx]; in irdma_sc_ccq_get_cqe_info() [all …]
|
| A D | type.h | 949 u16 wqe_idx; member 1481 u32 *wqe_idx); 1490 u32 wqe_idx; in irdma_sc_cqp_get_next_send_wqe() local 1492 return irdma_sc_cqp_get_next_send_wqe_idx(cqp, scratch, &wqe_idx); in irdma_sc_cqp_get_next_send_wqe()
|
| A D | puda.h | 38 u32 wqe_idx; member
|
| A D | hw.c | 399 ctx_info->roce_info->err_rq_idx = info->wqe_idx; in irdma_process_aeq() 409 ctx_info->iwarp_info->err_rq_idx = info->wqe_idx; in irdma_process_aeq()
|
| /drivers/infiniband/hw/erdma/ |
| A D | erdma_cq.c | 133 u16 wqe_idx, depth; in erdma_poll_one_cqe() local 148 wqe_idx = be32_to_cpu(cqe->qe_idx); in erdma_poll_one_cqe() 164 wqe_hdr = get_queue_entry(qp->kern_qp.sq_buf, wqe_idx, in erdma_poll_one_cqe() 168 wqe_idx + 1; in erdma_poll_one_cqe() 173 wc->wr_id = id_table[wqe_idx & (depth - 1)]; in erdma_poll_one_cqe()
|
| A D | erdma_qp.c | 326 const struct ib_send_wr *send_wr, u16 wqe_idx, in fill_inline_data() argument 333 wqe_idx += (sgl_offset >> SQEBB_SHIFT); in fill_inline_data() 335 data = get_queue_entry(qp->kern_qp.sq_buf, wqe_idx, qp->attrs.sq_size, in fill_inline_data() 356 wqe_idx += (sgl_offset >> SQEBB_SHIFT); in fill_inline_data() 359 data = get_queue_entry(qp->kern_qp.sq_buf, wqe_idx, in fill_inline_data() 373 u16 wqe_idx, u32 sgl_offset, __le32 *length_field) in fill_sgl() argument 386 wqe_idx += (sgl_offset >> SQEBB_SHIFT); in fill_sgl() 388 sgl = get_queue_entry(qp->kern_qp.sq_buf, wqe_idx, in fill_sgl()
|
| /drivers/infiniband/hw/ocrdma/ |
| A D | ocrdma_verbs.c | 1599 u32 qpn = 0, wqe_idx = 0; in ocrdma_discard_cqes() local 1635 BUG_ON(wqe_idx < 1); in ocrdma_discard_cqes() 2405 u32 wqe_idx) in ocrdma_update_wc() argument 2566 u32 wqe_idx; in ocrdma_poll_success_scqe() local 2577 wqe_idx = (le32_to_cpu(cqe->wq.wqeidx) & in ocrdma_poll_success_scqe() 2579 if (tail != wqe_idx) in ocrdma_poll_success_scqe() 2635 u32 wqe_idx; in ocrdma_update_free_srq_cqe() local 2638 wqe_idx = (le32_to_cpu(cqe->rq.buftag_qpn) >> in ocrdma_update_free_srq_cqe() 2640 BUG_ON(wqe_idx < 1); in ocrdma_update_free_srq_cqe() 2642 ibwc->wr_id = srq->rqe_wr_id_tbl[wqe_idx]; in ocrdma_update_free_srq_cqe() [all …]
|
| /drivers/net/ethernet/huawei/hinic3/ |
| A D | hinic3_rx.c | 63 static void rq_wqe_buf_set(struct hinic3_io_queue *rq, uint32_t wqe_idx, in rq_wqe_buf_set() argument 68 rq_wqe = get_q_element(&rq->wq.qpages, wqe_idx, NULL); in rq_wqe_buf_set()
|
| /drivers/infiniband/hw/hns/ |
| A D | hns_roce_hw_v2.c | 699 unsigned int wqe_idx; in hns_roce_v2_post_send() local 733 qp->sq.wrid[wqe_idx] = wr->wr_id; in hns_roce_v2_post_send() 808 u32 wqe_idx, u32 max_sge) in fill_rq_wqe() argument 826 u32 wqe_idx, nreq, max_sge; in hns_roce_v2_post_recv() local 932 *wqe_idx = pos; in get_srq_wqe_idx() 945 *buf = cpu_to_le32(wqe_idx); in fill_wqe_idx() 970 u32 wqe_idx; in hns_roce_v2_post_srq_recv() local 992 fill_wqe_idx(srq, wqe_idx); in hns_roce_v2_post_srq_recv() 993 srq->wrid[wqe_idx] = wr->wr_id; in hns_roce_v2_post_srq_recv() 3952 u16 wqe_idx; in hns_roce_v2_poll_one() local [all …]
|
| /drivers/infiniband/hw/bnxt_re/ |
| A D | qplib_fp.c | 1881 u32 wqe_idx, slots, idx; in bnxt_qplib_post_send() local 1907 swq = bnxt_qplib_get_swqe(sq, &wqe_idx); in bnxt_qplib_post_send() 2105 bnxt_qplib_swq_mod_start(sq, wqe_idx); in bnxt_qplib_post_send() 2142 u32 wqe_idx, idx; in bnxt_qplib_post_recv() local 2162 swq = bnxt_qplib_get_swqe(rq, &wqe_idx); in bnxt_qplib_post_recv() 2191 base_hdr->wr_id[0] = cpu_to_le32(wqe_idx); in bnxt_qplib_post_recv() 2193 bnxt_qplib_swq_mod_start(rq, wqe_idx); in bnxt_qplib_post_recv()
|