Searched refs:mlx5_wqe_data_seg (Results 1 – 20 of 20) sorted by relevance
153 struct mlx5_wqe_data_seg *dseg = in mlx5e_xdp_mpwqe_add_dseg()154 (struct mlx5_wqe_data_seg *)session->wqe + session->ds_count; in mlx5e_xdp_mpwqe_add_dseg()
396 struct mlx5_wqe_data_seg *dseg; in mlx5e_xmit_xdp_frame()
790 int sz = sizeof(struct mlx5_wqe_data_seg) * ndsegs; in mlx5e_get_rqwq_log_stride()
222 srq->msrq.max_gs * sizeof(struct mlx5_wqe_data_seg); in mlx5_ib_create_srq()232 sizeof(struct mlx5_wqe_data_seg); in mlx5_ib_create_srq()407 struct mlx5_wqe_data_seg *scat; in mlx5_ib_post_srq_recv()440 scat = (struct mlx5_wqe_data_seg *)(next + 1); in mlx5_ib_post_srq_recv()
87 struct mlx5_wqe_data_seg data_seg;
111 static void set_data_ptr_seg(struct mlx5_wqe_data_seg *dseg, struct ib_sge *sg) in set_data_ptr_seg()217 static void set_reg_data_seg(struct mlx5_wqe_data_seg *dseg, in set_reg_data_seg()686 *seg += sizeof(struct mlx5_wqe_data_seg); in set_reg_wr()687 *size += (sizeof(struct mlx5_wqe_data_seg) / 16); in set_reg_wr()1177 (struct mlx5_wqe_data_seg *)seg, in mlx5_ib_post_send()1179 size += sizeof(struct mlx5_wqe_data_seg) / 16; in mlx5_ib_post_send()1180 seg += sizeof(struct mlx5_wqe_data_seg); in mlx5_ib_post_send()1210 struct mlx5_wqe_data_seg *scat; in mlx5_ib_post_recv()
233 sizeof(struct mlx5_wqe_data_seg); in mlx5r_umr_post_send()552 mlx5r_umr_set_update_xlt_data_seg(struct mlx5_wqe_data_seg *data_seg, in mlx5r_umr_set_update_xlt_data_seg()
454 sizeof(struct mlx5_wqe_data_seg) < in set_rq_size()459 sizeof(struct mlx5_wqe_data_seg) - in set_rq_size()466 wqe_size += cap->max_recv_sge * sizeof(struct mlx5_wqe_data_seg); in set_rq_size()481 sizeof(struct mlx5_wqe_data_seg) - in set_rq_size()556 size += attr->cap.max_send_sge * sizeof(struct mlx5_wqe_data_seg); in calc_send_wqe()572 sizeof(struct mlx5_wqe_data_seg); in get_send_sge()578 sizeof(struct mlx5_wqe_data_seg); in get_send_sge()581 sizeof(struct mlx5_wqe_data_seg); in get_send_sge()584 sizeof(struct mlx5_wqe_data_seg)); in get_send_sge()
1000 struct mlx5_wqe_data_seg *dseg = wqe; in pagefault_data_segments()
958 sizeof(struct mlx5_wqe_data_seg); in mlx5_ib_query_device()962 sizeof(struct mlx5_wqe_data_seg); in mlx5_ib_query_device()
112 struct mlx5_wqe_data_seg data[];
179 struct mlx5_wqe_data_seg *dseg) in mlx5e_txwqe_build_dsegs()420 struct mlx5_wqe_data_seg *dseg; in mlx5e_sq_xmit_wqe()542 struct mlx5_wqe_data_seg *dseg; in mlx5e_tx_mpwqe_add_dseg()544 dseg = (struct mlx5_wqe_data_seg *)session->wqe + session->ds_count; in mlx5e_tx_mpwqe_add_dseg()968 struct mlx5_wqe_data_seg *dseg; in mlx5i_sq_xmit()
241 struct mlx5_wqe_data_seg data[];246 struct mlx5_wqe_data_seg data[];250 DECLARE_FLEX_ARRAY(struct mlx5_wqe_data_seg, data);
1842 struct mlx5_wqe_data_seg *dseg; in mlx5e_open_xdpsq()1852 dseg = (struct mlx5_wqe_data_seg *)cseg + (ds_cnt - 1); in mlx5e_open_xdpsq()
360 struct mlx5_wqe_data_seg { struct
11 struct mlx5_wqe_data_seg data;716 struct mlx5_wqe_data_seg *dseg; in tx_post_resync_dump()
102 struct mlx5_wqe_data_seg *data; in mlx5_fpga_conn_post_recv()145 struct mlx5_wqe_data_seg *data; in mlx5_fpga_conn_post_send()
371 struct mlx5_wqe_data_seg *wq_dseg; in dr_rdma_segments()
1150 struct mlx5_wqe_data_seg *data; in mlx5vf_post_recv()
390 num_ent * sizeof(struct mlx5_wqe_data_seg), &vqp->frag_buf, in rq_buf_alloc()
Completed in 66 milliseconds