Lines Matching refs:tx_buf
36 struct ice_tx_buf *tx_buf, *first; in ice_prgm_fdir_fltr() local
68 first = &tx_ring->tx_buf[i]; in ice_prgm_fdir_fltr()
75 tx_buf = &tx_ring->tx_buf[i]; in ice_prgm_fdir_fltr()
80 memset(tx_buf, 0, sizeof(*tx_buf)); in ice_prgm_fdir_fltr()
81 dma_unmap_len_set(tx_buf, len, ICE_FDIR_MAX_RAW_PKT_SIZE); in ice_prgm_fdir_fltr()
82 dma_unmap_addr_set(tx_buf, dma, dma); in ice_prgm_fdir_fltr()
88 tx_buf->type = ICE_TX_BUF_DUMMY; in ice_prgm_fdir_fltr()
89 tx_buf->raw_buf = raw_packet; in ice_prgm_fdir_fltr()
113 ice_unmap_and_free_tx_buf(struct ice_tx_ring *ring, struct ice_tx_buf *tx_buf) in ice_unmap_and_free_tx_buf() argument
115 if (dma_unmap_len(tx_buf, len)) in ice_unmap_and_free_tx_buf()
117 dma_unmap_addr(tx_buf, dma), in ice_unmap_and_free_tx_buf()
118 dma_unmap_len(tx_buf, len), in ice_unmap_and_free_tx_buf()
121 switch (tx_buf->type) { in ice_unmap_and_free_tx_buf()
123 devm_kfree(ring->dev, tx_buf->raw_buf); in ice_unmap_and_free_tx_buf()
126 dev_kfree_skb_any(tx_buf->skb); in ice_unmap_and_free_tx_buf()
129 page_frag_free(tx_buf->raw_buf); in ice_unmap_and_free_tx_buf()
132 xdp_return_frame(tx_buf->xdpf); in ice_unmap_and_free_tx_buf()
136 tx_buf->next_to_watch = NULL; in ice_unmap_and_free_tx_buf()
137 tx_buf->type = ICE_TX_BUF_EMPTY; in ice_unmap_and_free_tx_buf()
138 dma_unmap_len_set(tx_buf, len, 0); in ice_unmap_and_free_tx_buf()
162 if (!tx_ring->tx_buf) in ice_clean_tx_ring()
167 ice_unmap_and_free_tx_buf(tx_ring, &tx_ring->tx_buf[i]); in ice_clean_tx_ring()
170 memset(tx_ring->tx_buf, 0, sizeof(*tx_ring->tx_buf) * tx_ring->count); in ice_clean_tx_ring()
198 devm_kfree(tx_ring->dev, tx_ring->tx_buf); in ice_free_tx_ring()
199 tx_ring->tx_buf = NULL; in ice_free_tx_ring()
224 struct ice_tx_buf *tx_buf; in ice_clean_tx_irq() local
229 tx_buf = &tx_ring->tx_buf[i]; in ice_clean_tx_irq()
236 struct ice_tx_desc *eop_desc = tx_buf->next_to_watch; in ice_clean_tx_irq()
243 prefetchw(&tx_buf->skb->users); in ice_clean_tx_irq()
247 ice_trace(clean_tx_irq, tx_ring, tx_desc, tx_buf); in ice_clean_tx_irq()
254 tx_buf->next_to_watch = NULL; in ice_clean_tx_irq()
257 total_bytes += tx_buf->bytecount; in ice_clean_tx_irq()
258 total_pkts += tx_buf->gso_segs; in ice_clean_tx_irq()
261 napi_consume_skb(tx_buf->skb, napi_budget); in ice_clean_tx_irq()
265 dma_unmap_addr(tx_buf, dma), in ice_clean_tx_irq()
266 dma_unmap_len(tx_buf, len), in ice_clean_tx_irq()
270 tx_buf->type = ICE_TX_BUF_EMPTY; in ice_clean_tx_irq()
271 dma_unmap_len_set(tx_buf, len, 0); in ice_clean_tx_irq()
275 ice_trace(clean_tx_irq_unmap, tx_ring, tx_desc, tx_buf); in ice_clean_tx_irq()
276 tx_buf++; in ice_clean_tx_irq()
281 tx_buf = tx_ring->tx_buf; in ice_clean_tx_irq()
286 if (dma_unmap_len(tx_buf, len)) { in ice_clean_tx_irq()
288 dma_unmap_addr(tx_buf, dma), in ice_clean_tx_irq()
289 dma_unmap_len(tx_buf, len), in ice_clean_tx_irq()
291 dma_unmap_len_set(tx_buf, len, 0); in ice_clean_tx_irq()
294 ice_trace(clean_tx_irq_unmap_eop, tx_ring, tx_desc, tx_buf); in ice_clean_tx_irq()
297 tx_buf++; in ice_clean_tx_irq()
302 tx_buf = tx_ring->tx_buf; in ice_clean_tx_irq()
350 WARN_ON(tx_ring->tx_buf); in ice_setup_tx_ring()
351 tx_ring->tx_buf = in ice_setup_tx_ring()
352 devm_kcalloc(dev, sizeof(*tx_ring->tx_buf), tx_ring->count, in ice_setup_tx_ring()
354 if (!tx_ring->tx_buf) in ice_setup_tx_ring()
374 devm_kfree(dev, tx_ring->tx_buf); in ice_setup_tx_ring()
375 tx_ring->tx_buf = NULL; in ice_setup_tx_ring()
647 struct ice_tx_buf *tx_buf; in ice_xdp_xmit() local
670 tx_buf = &xdp_ring->tx_buf[xdp_ring->next_to_use]; in ice_xdp_xmit()
681 tx_buf->rs_idx = ice_set_rs_bit(xdp_ring); in ice_xdp_xmit()
1648 struct ice_tx_buf *tx_buf; in ice_tx_map() local
1672 tx_buf = first; in ice_tx_map()
1681 dma_unmap_len_set(tx_buf, len, size); in ice_tx_map()
1682 dma_unmap_addr_set(tx_buf, dma, dma); in ice_tx_map()
1731 tx_buf = &tx_ring->tx_buf[i]; in ice_tx_map()
1732 tx_buf->type = ICE_TX_BUF_FRAG; in ice_tx_map()
1774 tx_buf = &tx_ring->tx_buf[i]; in ice_tx_map()
1775 ice_unmap_and_free_tx_buf(tx_ring, tx_buf); in ice_tx_map()
1776 if (tx_buf == first) in ice_tx_map()
2377 first = &tx_ring->tx_buf[tx_ring->next_to_use]; in ice_xmit_frame_ring()
2510 struct ice_tx_buf *tx_buf; in ice_clean_ctrl_tx_irq() local
2512 tx_buf = &tx_ring->tx_buf[i]; in ice_clean_ctrl_tx_irq()
2517 struct ice_tx_desc *eop_desc = tx_buf->next_to_watch; in ice_clean_ctrl_tx_irq()
2532 tx_buf->next_to_watch = NULL; in ice_clean_ctrl_tx_irq()
2537 tx_buf++; in ice_clean_ctrl_tx_irq()
2542 tx_buf = tx_ring->tx_buf; in ice_clean_ctrl_tx_irq()
2547 if (dma_unmap_len(tx_buf, len)) in ice_clean_ctrl_tx_irq()
2549 dma_unmap_addr(tx_buf, dma), in ice_clean_ctrl_tx_irq()
2550 dma_unmap_len(tx_buf, len), in ice_clean_ctrl_tx_irq()
2552 if (tx_buf->type == ICE_TX_BUF_DUMMY) in ice_clean_ctrl_tx_irq()
2553 devm_kfree(tx_ring->dev, tx_buf->raw_buf); in ice_clean_ctrl_tx_irq()
2556 tx_buf->type = ICE_TX_BUF_EMPTY; in ice_clean_ctrl_tx_irq()
2557 tx_buf->tx_flags = 0; in ice_clean_ctrl_tx_irq()
2558 tx_buf->next_to_watch = NULL; in ice_clean_ctrl_tx_irq()
2559 dma_unmap_len_set(tx_buf, len, 0); in ice_clean_ctrl_tx_irq()
2564 tx_buf++; in ice_clean_ctrl_tx_irq()
2569 tx_buf = tx_ring->tx_buf; in ice_clean_ctrl_tx_irq()