Lines Matching refs:wqe_req

112 	tp->wqe_req.sgl[0].address = ash->dma_handle[0];  in mana_map_skb()
113 tp->wqe_req.sgl[0].mem_key = gd->gpa_mkey; in mana_map_skb()
114 tp->wqe_req.sgl[0].size = ash->size[0]; in mana_map_skb()
127 tp->wqe_req.sgl[i + 1].address = ash->dma_handle[i + 1]; in mana_map_skb()
128 tp->wqe_req.sgl[i + 1].mem_key = gd->gpa_mkey; in mana_map_skb()
129 tp->wqe_req.sgl[i + 1].size = ash->size[i + 1]; in mana_map_skb()
183 pkg.wqe_req.inline_oob_size = sizeof(struct mana_tx_short_oob); in mana_start_xmit()
185 pkg.wqe_req.inline_oob_size = sizeof(struct mana_tx_oob); in mana_start_xmit()
187 pkg.wqe_req.inline_oob_data = &pkg.tx_oob; in mana_start_xmit()
188 pkg.wqe_req.flags = 0; in mana_start_xmit()
189 pkg.wqe_req.client_data_unit = 0; in mana_start_xmit()
191 pkg.wqe_req.num_sge = 1 + skb_shinfo(skb)->nr_frags; in mana_start_xmit()
192 WARN_ON_ONCE(pkg.wqe_req.num_sge > MAX_TX_WQE_SGL_ENTRIES); in mana_start_xmit()
194 if (pkg.wqe_req.num_sge <= ARRAY_SIZE(pkg.sgl_array)) { in mana_start_xmit()
195 pkg.wqe_req.sgl = pkg.sgl_array; in mana_start_xmit()
197 pkg.sgl_ptr = kmalloc_array(pkg.wqe_req.num_sge, in mana_start_xmit()
203 pkg.wqe_req.sgl = pkg.sgl_ptr; in mana_start_xmit()
219 pkg.wqe_req.client_data_unit = skb_shinfo(skb)->gso_size; in mana_start_xmit()
220 pkg.wqe_req.flags = GDMA_WR_OOB_IN_SGL | GDMA_WR_PAD_BY_SGE0; in mana_start_xmit()
265 err = mana_gd_post_work_request(gdma_sq, &pkg.wqe_req, in mana_start_xmit()
1135 err = mana_gd_post_and_ring(rxq->gdma_rq, &recv_buf_oob->wqe_req, in mana_post_pkt_rxq()
1670 rx_oob->wqe_req.sgl = rx_oob->sgl; in mana_alloc_rx_wqe()
1671 rx_oob->wqe_req.num_sge = rx_oob->num_sge; in mana_alloc_rx_wqe()
1672 rx_oob->wqe_req.inline_oob_size = 0; in mana_alloc_rx_wqe()
1673 rx_oob->wqe_req.inline_oob_data = NULL; in mana_alloc_rx_wqe()
1674 rx_oob->wqe_req.flags = 0; in mana_alloc_rx_wqe()
1675 rx_oob->wqe_req.client_data_unit = 0; in mana_alloc_rx_wqe()
1694 err = mana_gd_post_and_ring(rxq->gdma_rq, &rx_oob->wqe_req, in mana_push_wqe()