Lines Matching refs:tx_q

363 	struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[queue];  in stmmac_tx_avail()  local
366 if (tx_q->dirty_tx > tx_q->cur_tx) in stmmac_tx_avail()
367 avail = tx_q->dirty_tx - tx_q->cur_tx - 1; in stmmac_tx_avail()
369 avail = priv->dma_conf.dma_tx_size - tx_q->cur_tx + tx_q->dirty_tx - 1; in stmmac_tx_avail()
415 struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[queue]; in stmmac_enable_eee_mode() local
417 if (tx_q->dirty_tx != tx_q->cur_tx) in stmmac_enable_eee_mode()
1283 struct stmmac_tx_queue *tx_q = &dma_conf->tx_queue[queue]; in stmmac_display_tx_rings() local
1288 head_tx = (void *)tx_q->dma_etx; in stmmac_display_tx_rings()
1290 } else if (tx_q->tbs & STMMAC_TBS_AVAIL) { in stmmac_display_tx_rings()
1291 head_tx = (void *)tx_q->dma_entx; in stmmac_display_tx_rings()
1294 head_tx = (void *)tx_q->dma_tx; in stmmac_display_tx_rings()
1299 tx_q->dma_tx_phy, desc_size); in stmmac_display_tx_rings()
1372 struct stmmac_tx_queue *tx_q = &dma_conf->tx_queue[queue]; in stmmac_clear_tx_descriptors() local
1381 p = &tx_q->dma_etx[i].basic; in stmmac_clear_tx_descriptors()
1382 else if (tx_q->tbs & STMMAC_TBS_AVAIL) in stmmac_clear_tx_descriptors()
1383 p = &tx_q->dma_entx[i].basic; in stmmac_clear_tx_descriptors()
1385 p = &tx_q->dma_tx[i]; in stmmac_clear_tx_descriptors()
1497 struct stmmac_tx_queue *tx_q = &dma_conf->tx_queue[queue]; in stmmac_free_tx_buffer() local
1499 if (tx_q->tx_skbuff_dma[i].buf && in stmmac_free_tx_buffer()
1500 tx_q->tx_skbuff_dma[i].buf_type != STMMAC_TXBUF_T_XDP_TX) { in stmmac_free_tx_buffer()
1501 if (tx_q->tx_skbuff_dma[i].map_as_page) in stmmac_free_tx_buffer()
1503 tx_q->tx_skbuff_dma[i].buf, in stmmac_free_tx_buffer()
1504 tx_q->tx_skbuff_dma[i].len, in stmmac_free_tx_buffer()
1508 tx_q->tx_skbuff_dma[i].buf, in stmmac_free_tx_buffer()
1509 tx_q->tx_skbuff_dma[i].len, in stmmac_free_tx_buffer()
1513 if (tx_q->xdpf[i] && in stmmac_free_tx_buffer()
1514 (tx_q->tx_skbuff_dma[i].buf_type == STMMAC_TXBUF_T_XDP_TX || in stmmac_free_tx_buffer()
1515 tx_q->tx_skbuff_dma[i].buf_type == STMMAC_TXBUF_T_XDP_NDO)) { in stmmac_free_tx_buffer()
1516 xdp_return_frame(tx_q->xdpf[i]); in stmmac_free_tx_buffer()
1517 tx_q->xdpf[i] = NULL; in stmmac_free_tx_buffer()
1520 if (tx_q->tx_skbuff_dma[i].buf_type == STMMAC_TXBUF_T_XSK_TX) in stmmac_free_tx_buffer()
1521 tx_q->xsk_frames_done++; in stmmac_free_tx_buffer()
1523 if (tx_q->tx_skbuff[i] && in stmmac_free_tx_buffer()
1524 tx_q->tx_skbuff_dma[i].buf_type == STMMAC_TXBUF_T_SKB) { in stmmac_free_tx_buffer()
1525 dev_kfree_skb_any(tx_q->tx_skbuff[i]); in stmmac_free_tx_buffer()
1526 tx_q->tx_skbuff[i] = NULL; in stmmac_free_tx_buffer()
1529 tx_q->tx_skbuff_dma[i].buf = 0; in stmmac_free_tx_buffer()
1530 tx_q->tx_skbuff_dma[i].map_as_page = false; in stmmac_free_tx_buffer()
1762 struct stmmac_tx_queue *tx_q = &dma_conf->tx_queue[queue]; in __init_dma_tx_desc_rings() local
1767 (u32)tx_q->dma_tx_phy); in __init_dma_tx_desc_rings()
1772 stmmac_mode_init(priv, tx_q->dma_etx, in __init_dma_tx_desc_rings()
1773 tx_q->dma_tx_phy, in __init_dma_tx_desc_rings()
1775 else if (!(tx_q->tbs & STMMAC_TBS_AVAIL)) in __init_dma_tx_desc_rings()
1776 stmmac_mode_init(priv, tx_q->dma_tx, in __init_dma_tx_desc_rings()
1777 tx_q->dma_tx_phy, in __init_dma_tx_desc_rings()
1781 tx_q->xsk_pool = stmmac_get_xsk_pool(priv, queue); in __init_dma_tx_desc_rings()
1787 p = &((tx_q->dma_etx + i)->basic); in __init_dma_tx_desc_rings()
1788 else if (tx_q->tbs & STMMAC_TBS_AVAIL) in __init_dma_tx_desc_rings()
1789 p = &((tx_q->dma_entx + i)->basic); in __init_dma_tx_desc_rings()
1791 p = tx_q->dma_tx + i; in __init_dma_tx_desc_rings()
1795 tx_q->tx_skbuff_dma[i].buf = 0; in __init_dma_tx_desc_rings()
1796 tx_q->tx_skbuff_dma[i].map_as_page = false; in __init_dma_tx_desc_rings()
1797 tx_q->tx_skbuff_dma[i].len = 0; in __init_dma_tx_desc_rings()
1798 tx_q->tx_skbuff_dma[i].last_segment = false; in __init_dma_tx_desc_rings()
1799 tx_q->tx_skbuff[i] = NULL; in __init_dma_tx_desc_rings()
1860 struct stmmac_tx_queue *tx_q = &dma_conf->tx_queue[queue]; in dma_free_tx_skbufs() local
1863 tx_q->xsk_frames_done = 0; in dma_free_tx_skbufs()
1868 if (tx_q->xsk_pool && tx_q->xsk_frames_done) { in dma_free_tx_skbufs()
1869 xsk_tx_completed(tx_q->xsk_pool, tx_q->xsk_frames_done); in dma_free_tx_skbufs()
1870 tx_q->xsk_frames_done = 0; in dma_free_tx_skbufs()
1871 tx_q->xsk_pool = NULL; in dma_free_tx_skbufs()
1948 struct stmmac_tx_queue *tx_q = &dma_conf->tx_queue[queue]; in __free_dma_tx_desc_resources() local
1957 addr = tx_q->dma_etx; in __free_dma_tx_desc_resources()
1958 } else if (tx_q->tbs & STMMAC_TBS_AVAIL) { in __free_dma_tx_desc_resources()
1960 addr = tx_q->dma_entx; in __free_dma_tx_desc_resources()
1963 addr = tx_q->dma_tx; in __free_dma_tx_desc_resources()
1968 dma_free_coherent(priv->device, size, addr, tx_q->dma_tx_phy); in __free_dma_tx_desc_resources()
1970 kfree(tx_q->tx_skbuff_dma); in __free_dma_tx_desc_resources()
1971 kfree(tx_q->tx_skbuff); in __free_dma_tx_desc_resources()
2105 struct stmmac_tx_queue *tx_q = &dma_conf->tx_queue[queue]; in __alloc_dma_tx_desc_resources() local
2109 tx_q->queue_index = queue; in __alloc_dma_tx_desc_resources()
2110 tx_q->priv_data = priv; in __alloc_dma_tx_desc_resources()
2112 tx_q->tx_skbuff_dma = kcalloc(dma_conf->dma_tx_size, in __alloc_dma_tx_desc_resources()
2113 sizeof(*tx_q->tx_skbuff_dma), in __alloc_dma_tx_desc_resources()
2115 if (!tx_q->tx_skbuff_dma) in __alloc_dma_tx_desc_resources()
2118 tx_q->tx_skbuff = kcalloc(dma_conf->dma_tx_size, in __alloc_dma_tx_desc_resources()
2121 if (!tx_q->tx_skbuff) in __alloc_dma_tx_desc_resources()
2126 else if (tx_q->tbs & STMMAC_TBS_AVAIL) in __alloc_dma_tx_desc_resources()
2134 &tx_q->dma_tx_phy, GFP_KERNEL); in __alloc_dma_tx_desc_resources()
2139 tx_q->dma_etx = addr; in __alloc_dma_tx_desc_resources()
2140 else if (tx_q->tbs & STMMAC_TBS_AVAIL) in __alloc_dma_tx_desc_resources()
2141 tx_q->dma_entx = addr; in __alloc_dma_tx_desc_resources()
2143 tx_q->dma_tx = addr; in __alloc_dma_tx_desc_resources()
2411 struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[queue]; in stmmac_xdp_xmit_zc() local
2412 struct xsk_buff_pool *pool = tx_q->xsk_pool; in stmmac_xdp_xmit_zc()
2413 unsigned int entry = tx_q->cur_tx; in stmmac_xdp_xmit_zc()
2440 tx_desc = (struct dma_desc *)(tx_q->dma_etx + entry); in stmmac_xdp_xmit_zc()
2441 else if (tx_q->tbs & STMMAC_TBS_AVAIL) in stmmac_xdp_xmit_zc()
2442 tx_desc = &tx_q->dma_entx[entry].basic; in stmmac_xdp_xmit_zc()
2444 tx_desc = tx_q->dma_tx + entry; in stmmac_xdp_xmit_zc()
2449 tx_q->tx_skbuff_dma[entry].buf_type = STMMAC_TXBUF_T_XSK_TX; in stmmac_xdp_xmit_zc()
2455 tx_q->tx_skbuff_dma[entry].buf = 0; in stmmac_xdp_xmit_zc()
2456 tx_q->xdpf[entry] = NULL; in stmmac_xdp_xmit_zc()
2458 tx_q->tx_skbuff_dma[entry].map_as_page = false; in stmmac_xdp_xmit_zc()
2459 tx_q->tx_skbuff_dma[entry].len = xdp_desc.len; in stmmac_xdp_xmit_zc()
2460 tx_q->tx_skbuff_dma[entry].last_segment = true; in stmmac_xdp_xmit_zc()
2461 tx_q->tx_skbuff_dma[entry].is_jumbo = false; in stmmac_xdp_xmit_zc()
2465 tx_q->tx_count_frames++; in stmmac_xdp_xmit_zc()
2469 else if (tx_q->tx_count_frames % priv->tx_coal_frames[queue] == 0) in stmmac_xdp_xmit_zc()
2475 tx_q->tx_count_frames = 0; in stmmac_xdp_xmit_zc()
2486 tx_q->cur_tx = STMMAC_GET_ENTRY(tx_q->cur_tx, priv->dma_conf.dma_tx_size); in stmmac_xdp_xmit_zc()
2487 entry = tx_q->cur_tx; in stmmac_xdp_xmit_zc()
2527 struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[queue]; in stmmac_tx_clean() local
2535 tx_q->xsk_frames_done = 0; in stmmac_tx_clean()
2537 entry = tx_q->dirty_tx; in stmmac_tx_clean()
2540 while ((entry != tx_q->cur_tx) && count < priv->dma_conf.dma_tx_size) { in stmmac_tx_clean()
2546 if (tx_q->tx_skbuff_dma[entry].buf_type == STMMAC_TXBUF_T_XDP_TX || in stmmac_tx_clean()
2547 tx_q->tx_skbuff_dma[entry].buf_type == STMMAC_TXBUF_T_XDP_NDO) { in stmmac_tx_clean()
2548 xdpf = tx_q->xdpf[entry]; in stmmac_tx_clean()
2550 } else if (tx_q->tx_skbuff_dma[entry].buf_type == STMMAC_TXBUF_T_SKB) { in stmmac_tx_clean()
2552 skb = tx_q->tx_skbuff[entry]; in stmmac_tx_clean()
2559 p = (struct dma_desc *)(tx_q->dma_etx + entry); in stmmac_tx_clean()
2560 else if (tx_q->tbs & STMMAC_TBS_AVAIL) in stmmac_tx_clean()
2561 p = &tx_q->dma_entx[entry].basic; in stmmac_tx_clean()
2563 p = tx_q->dma_tx + entry; in stmmac_tx_clean()
2594 if (likely(tx_q->tx_skbuff_dma[entry].buf && in stmmac_tx_clean()
2595 tx_q->tx_skbuff_dma[entry].buf_type != STMMAC_TXBUF_T_XDP_TX)) { in stmmac_tx_clean()
2596 if (tx_q->tx_skbuff_dma[entry].map_as_page) in stmmac_tx_clean()
2598 tx_q->tx_skbuff_dma[entry].buf, in stmmac_tx_clean()
2599 tx_q->tx_skbuff_dma[entry].len, in stmmac_tx_clean()
2603 tx_q->tx_skbuff_dma[entry].buf, in stmmac_tx_clean()
2604 tx_q->tx_skbuff_dma[entry].len, in stmmac_tx_clean()
2606 tx_q->tx_skbuff_dma[entry].buf = 0; in stmmac_tx_clean()
2607 tx_q->tx_skbuff_dma[entry].len = 0; in stmmac_tx_clean()
2608 tx_q->tx_skbuff_dma[entry].map_as_page = false; in stmmac_tx_clean()
2611 stmmac_clean_desc3(priv, tx_q, p); in stmmac_tx_clean()
2613 tx_q->tx_skbuff_dma[entry].last_segment = false; in stmmac_tx_clean()
2614 tx_q->tx_skbuff_dma[entry].is_jumbo = false; in stmmac_tx_clean()
2617 tx_q->tx_skbuff_dma[entry].buf_type == STMMAC_TXBUF_T_XDP_TX) { in stmmac_tx_clean()
2619 tx_q->xdpf[entry] = NULL; in stmmac_tx_clean()
2623 tx_q->tx_skbuff_dma[entry].buf_type == STMMAC_TXBUF_T_XDP_NDO) { in stmmac_tx_clean()
2625 tx_q->xdpf[entry] = NULL; in stmmac_tx_clean()
2628 if (tx_q->tx_skbuff_dma[entry].buf_type == STMMAC_TXBUF_T_XSK_TX) in stmmac_tx_clean()
2629 tx_q->xsk_frames_done++; in stmmac_tx_clean()
2631 if (tx_q->tx_skbuff_dma[entry].buf_type == STMMAC_TXBUF_T_SKB) { in stmmac_tx_clean()
2636 tx_q->tx_skbuff[entry] = NULL; in stmmac_tx_clean()
2644 tx_q->dirty_tx = entry; in stmmac_tx_clean()
2658 if (tx_q->xsk_pool) { in stmmac_tx_clean()
2661 if (tx_q->xsk_frames_done) in stmmac_tx_clean()
2662 xsk_tx_completed(tx_q->xsk_pool, tx_q->xsk_frames_done); in stmmac_tx_clean()
2664 if (xsk_uses_need_wakeup(tx_q->xsk_pool)) in stmmac_tx_clean()
2665 xsk_set_tx_need_wakeup(tx_q->xsk_pool); in stmmac_tx_clean()
2687 if (tx_q->dirty_tx != tx_q->cur_tx) in stmmac_tx_clean()
2688 hrtimer_start(&tx_q->txtimer, in stmmac_tx_clean()
2707 struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[chan]; in stmmac_tx_err() local
2716 tx_q->dma_tx_phy, chan); in stmmac_tx_err()
2775 struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[chan]; in stmmac_napi_check() local
2782 tx_napi = tx_q->xsk_pool ? &ch->rxtx_napi : &ch->tx_napi; in stmmac_napi_check()
2908 struct stmmac_tx_queue *tx_q; in stmmac_init_dma_engine() local
2955 tx_q = &priv->dma_conf.tx_queue[chan]; in stmmac_init_dma_engine()
2958 tx_q->dma_tx_phy, chan); in stmmac_init_dma_engine()
2960 tx_q->tx_tail_addr = tx_q->dma_tx_phy; in stmmac_init_dma_engine()
2962 tx_q->tx_tail_addr, chan); in stmmac_init_dma_engine()
2970 struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[queue]; in stmmac_tx_timer_arm() local
2972 hrtimer_start(&tx_q->txtimer, in stmmac_tx_timer_arm()
2985 struct stmmac_tx_queue *tx_q = container_of(t, struct stmmac_tx_queue, txtimer); in stmmac_tx_timer() local
2986 struct stmmac_priv *priv = tx_q->priv_data; in stmmac_tx_timer()
2990 ch = &priv->channel[tx_q->queue_index]; in stmmac_tx_timer()
2991 napi = tx_q->xsk_pool ? &ch->rxtx_napi : &ch->tx_napi; in stmmac_tx_timer()
3020 struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[chan]; in stmmac_init_coalesce() local
3025 hrtimer_init(&tx_q->txtimer, CLOCK_MONOTONIC, HRTIMER_MODE_REL); in stmmac_init_coalesce()
3026 tx_q->txtimer.function = stmmac_tx_timer; in stmmac_init_coalesce()
3382 struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[chan]; in stmmac_hw_setup() local
3385 if (tx_q->tbs & STMMAC_TBS_AVAIL) in stmmac_hw_setup()
3404 struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[chan]; in stmmac_hw_setup() local
3405 int enable = tx_q->tbs & STMMAC_TBS_AVAIL; in stmmac_hw_setup()
3746 struct stmmac_tx_queue *tx_q = &dma_conf->tx_queue[chan]; in stmmac_setup_dma_desc() local
3750 tx_q->tbs |= tbs_en ? STMMAC_TBS_AVAIL : 0; in stmmac_setup_dma_desc()
3951 struct stmmac_tx_queue *tx_q) in stmmac_vlan_insert() argument
3968 if (tx_q->tbs & STMMAC_TBS_AVAIL) in stmmac_vlan_insert()
3969 p = &tx_q->dma_entx[tx_q->cur_tx].basic; in stmmac_vlan_insert()
3971 p = &tx_q->dma_tx[tx_q->cur_tx]; in stmmac_vlan_insert()
3977 tx_q->cur_tx = STMMAC_GET_ENTRY(tx_q->cur_tx, priv->dma_conf.dma_tx_size); in stmmac_vlan_insert()
3995 struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[queue]; in stmmac_tso_allocator() local
4005 tx_q->cur_tx = STMMAC_GET_ENTRY(tx_q->cur_tx, in stmmac_tso_allocator()
4007 WARN_ON(tx_q->tx_skbuff[tx_q->cur_tx]); in stmmac_tso_allocator()
4009 if (tx_q->tbs & STMMAC_TBS_AVAIL) in stmmac_tso_allocator()
4010 desc = &tx_q->dma_entx[tx_q->cur_tx].basic; in stmmac_tso_allocator()
4012 desc = &tx_q->dma_tx[tx_q->cur_tx]; in stmmac_tso_allocator()
4034 struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[queue]; in stmmac_flush_tx_descriptors() local
4039 else if (tx_q->tbs & STMMAC_TBS_AVAIL) in stmmac_flush_tx_descriptors()
4050 tx_q->tx_tail_addr = tx_q->dma_tx_phy + (tx_q->cur_tx * desc_size); in stmmac_flush_tx_descriptors()
4051 stmmac_set_tx_tail_ptr(priv, priv->ioaddr, tx_q->tx_tail_addr, queue); in stmmac_flush_tx_descriptors()
4089 struct stmmac_tx_queue *tx_q; in stmmac_tso_xmit() local
4096 tx_q = &priv->dma_conf.tx_queue[queue]; in stmmac_tso_xmit()
4097 first_tx = tx_q->cur_tx; in stmmac_tso_xmit()
4127 if (mss != tx_q->mss) { in stmmac_tso_xmit()
4128 if (tx_q->tbs & STMMAC_TBS_AVAIL) in stmmac_tso_xmit()
4129 mss_desc = &tx_q->dma_entx[tx_q->cur_tx].basic; in stmmac_tso_xmit()
4131 mss_desc = &tx_q->dma_tx[tx_q->cur_tx]; in stmmac_tso_xmit()
4134 tx_q->mss = mss; in stmmac_tso_xmit()
4135 tx_q->cur_tx = STMMAC_GET_ENTRY(tx_q->cur_tx, in stmmac_tso_xmit()
4137 WARN_ON(tx_q->tx_skbuff[tx_q->cur_tx]); in stmmac_tso_xmit()
4148 has_vlan = stmmac_vlan_insert(priv, skb, tx_q); in stmmac_tso_xmit()
4150 first_entry = tx_q->cur_tx; in stmmac_tso_xmit()
4151 WARN_ON(tx_q->tx_skbuff[first_entry]); in stmmac_tso_xmit()
4153 if (tx_q->tbs & STMMAC_TBS_AVAIL) in stmmac_tso_xmit()
4154 desc = &tx_q->dma_entx[first_entry].basic; in stmmac_tso_xmit()
4156 desc = &tx_q->dma_tx[first_entry]; in stmmac_tso_xmit()
4168 tx_q->tx_skbuff_dma[first_entry].buf = des; in stmmac_tso_xmit()
4169 tx_q->tx_skbuff_dma[first_entry].len = skb_headlen(skb); in stmmac_tso_xmit()
4170 tx_q->tx_skbuff_dma[first_entry].map_as_page = false; in stmmac_tso_xmit()
4171 tx_q->tx_skbuff_dma[first_entry].buf_type = STMMAC_TXBUF_T_SKB; in stmmac_tso_xmit()
4204 tx_q->tx_skbuff_dma[tx_q->cur_tx].buf = des; in stmmac_tso_xmit()
4205 tx_q->tx_skbuff_dma[tx_q->cur_tx].len = skb_frag_size(frag); in stmmac_tso_xmit()
4206 tx_q->tx_skbuff_dma[tx_q->cur_tx].map_as_page = true; in stmmac_tso_xmit()
4207 tx_q->tx_skbuff_dma[tx_q->cur_tx].buf_type = STMMAC_TXBUF_T_SKB; in stmmac_tso_xmit()
4210 tx_q->tx_skbuff_dma[tx_q->cur_tx].last_segment = true; in stmmac_tso_xmit()
4213 tx_q->tx_skbuff[tx_q->cur_tx] = skb; in stmmac_tso_xmit()
4214 tx_q->tx_skbuff_dma[tx_q->cur_tx].buf_type = STMMAC_TXBUF_T_SKB; in stmmac_tso_xmit()
4217 tx_packets = (tx_q->cur_tx + 1) - first_tx; in stmmac_tso_xmit()
4218 tx_q->tx_count_frames += tx_packets; in stmmac_tso_xmit()
4226 else if ((tx_q->tx_count_frames % in stmmac_tso_xmit()
4233 if (tx_q->tbs & STMMAC_TBS_AVAIL) in stmmac_tso_xmit()
4234 desc = &tx_q->dma_entx[tx_q->cur_tx].basic; in stmmac_tso_xmit()
4236 desc = &tx_q->dma_tx[tx_q->cur_tx]; in stmmac_tso_xmit()
4238 tx_q->tx_count_frames = 0; in stmmac_tso_xmit()
4248 tx_q->cur_tx = STMMAC_GET_ENTRY(tx_q->cur_tx, priv->dma_conf.dma_tx_size); in stmmac_tso_xmit()
4276 1, tx_q->tx_skbuff_dma[first_entry].last_segment, in stmmac_tso_xmit()
4292 __func__, tx_q->cur_tx, tx_q->dirty_tx, first_entry, in stmmac_tso_xmit()
4293 tx_q->cur_tx, first, nfrags); in stmmac_tso_xmit()
4331 struct stmmac_tx_queue *tx_q; in stmmac_xmit() local
4336 tx_q = &priv->dma_conf.tx_queue[queue]; in stmmac_xmit()
4337 first_tx = tx_q->cur_tx; in stmmac_xmit()
4363 has_vlan = stmmac_vlan_insert(priv, skb, tx_q); in stmmac_xmit()
4365 entry = tx_q->cur_tx; in stmmac_xmit()
4367 WARN_ON(tx_q->tx_skbuff[first_entry]); in stmmac_xmit()
4372 desc = (struct dma_desc *)(tx_q->dma_etx + entry); in stmmac_xmit()
4373 else if (tx_q->tbs & STMMAC_TBS_AVAIL) in stmmac_xmit()
4374 desc = &tx_q->dma_entx[entry].basic; in stmmac_xmit()
4376 desc = tx_q->dma_tx + entry; in stmmac_xmit()
4389 entry = stmmac_jumbo_frm(priv, tx_q, skb, csum_insertion); in stmmac_xmit()
4400 WARN_ON(tx_q->tx_skbuff[entry]); in stmmac_xmit()
4403 desc = (struct dma_desc *)(tx_q->dma_etx + entry); in stmmac_xmit()
4404 else if (tx_q->tbs & STMMAC_TBS_AVAIL) in stmmac_xmit()
4405 desc = &tx_q->dma_entx[entry].basic; in stmmac_xmit()
4407 desc = tx_q->dma_tx + entry; in stmmac_xmit()
4414 tx_q->tx_skbuff_dma[entry].buf = des; in stmmac_xmit()
4418 tx_q->tx_skbuff_dma[entry].map_as_page = true; in stmmac_xmit()
4419 tx_q->tx_skbuff_dma[entry].len = len; in stmmac_xmit()
4420 tx_q->tx_skbuff_dma[entry].last_segment = last_segment; in stmmac_xmit()
4421 tx_q->tx_skbuff_dma[entry].buf_type = STMMAC_TXBUF_T_SKB; in stmmac_xmit()
4429 tx_q->tx_skbuff[entry] = skb; in stmmac_xmit()
4430 tx_q->tx_skbuff_dma[entry].buf_type = STMMAC_TXBUF_T_SKB; in stmmac_xmit()
4438 tx_q->tx_count_frames += tx_packets; in stmmac_xmit()
4446 else if ((tx_q->tx_count_frames % in stmmac_xmit()
4454 desc = &tx_q->dma_etx[entry].basic; in stmmac_xmit()
4455 else if (tx_q->tbs & STMMAC_TBS_AVAIL) in stmmac_xmit()
4456 desc = &tx_q->dma_entx[entry].basic; in stmmac_xmit()
4458 desc = &tx_q->dma_tx[entry]; in stmmac_xmit()
4460 tx_q->tx_count_frames = 0; in stmmac_xmit()
4471 tx_q->cur_tx = entry; in stmmac_xmit()
4476 __func__, tx_q->cur_tx, tx_q->dirty_tx, first_entry, in stmmac_xmit()
4508 tx_q->tx_skbuff_dma[first_entry].buf = des; in stmmac_xmit()
4509 tx_q->tx_skbuff_dma[first_entry].buf_type = STMMAC_TXBUF_T_SKB; in stmmac_xmit()
4510 tx_q->tx_skbuff_dma[first_entry].map_as_page = false; in stmmac_xmit()
4514 tx_q->tx_skbuff_dma[first_entry].len = nopaged_len; in stmmac_xmit()
4515 tx_q->tx_skbuff_dma[first_entry].last_segment = last_segment; in stmmac_xmit()
4530 if (tx_q->tbs & STMMAC_TBS_EN) { in stmmac_xmit()
4533 tbs_desc = &tx_q->dma_entx[first_entry]; in stmmac_xmit()
4699 struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[queue]; in stmmac_xdp_xmit_xdpf() local
4700 unsigned int entry = tx_q->cur_tx; in stmmac_xdp_xmit_xdpf()
4709 tx_desc = (struct dma_desc *)(tx_q->dma_etx + entry); in stmmac_xdp_xmit_xdpf()
4710 else if (tx_q->tbs & STMMAC_TBS_AVAIL) in stmmac_xdp_xmit_xdpf()
4711 tx_desc = &tx_q->dma_entx[entry].basic; in stmmac_xdp_xmit_xdpf()
4713 tx_desc = tx_q->dma_tx + entry; in stmmac_xdp_xmit_xdpf()
4721 tx_q->tx_skbuff_dma[entry].buf_type = STMMAC_TXBUF_T_XDP_NDO; in stmmac_xdp_xmit_xdpf()
4730 tx_q->tx_skbuff_dma[entry].buf_type = STMMAC_TXBUF_T_XDP_TX; in stmmac_xdp_xmit_xdpf()
4733 tx_q->tx_skbuff_dma[entry].buf = dma_addr; in stmmac_xdp_xmit_xdpf()
4734 tx_q->tx_skbuff_dma[entry].map_as_page = false; in stmmac_xdp_xmit_xdpf()
4735 tx_q->tx_skbuff_dma[entry].len = xdpf->len; in stmmac_xdp_xmit_xdpf()
4736 tx_q->tx_skbuff_dma[entry].last_segment = true; in stmmac_xdp_xmit_xdpf()
4737 tx_q->tx_skbuff_dma[entry].is_jumbo = false; in stmmac_xdp_xmit_xdpf()
4739 tx_q->xdpf[entry] = xdpf; in stmmac_xdp_xmit_xdpf()
4747 tx_q->tx_count_frames++; in stmmac_xdp_xmit_xdpf()
4749 if (tx_q->tx_count_frames % priv->tx_coal_frames[queue] == 0) in stmmac_xdp_xmit_xdpf()
4755 tx_q->tx_count_frames = 0; in stmmac_xdp_xmit_xdpf()
4763 tx_q->cur_tx = entry; in stmmac_xdp_xmit_xdpf()
5845 struct stmmac_tx_queue *tx_q = (struct stmmac_tx_queue *)data; in stmmac_msi_intr_tx() local
5847 int chan = tx_q->queue_index; in stmmac_msi_intr_tx()
5851 dma_conf = container_of(tx_q, struct stmmac_dma_conf, tx_queue[chan]); in stmmac_msi_intr_tx()
6114 struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[queue]; in stmmac_rings_status_show() local
6120 sysfs_display_ring((void *)tx_q->dma_etx, in stmmac_rings_status_show()
6121 priv->dma_conf.dma_tx_size, 1, seq, tx_q->dma_tx_phy); in stmmac_rings_status_show()
6122 } else if (!(tx_q->tbs & STMMAC_TBS_AVAIL)) { in stmmac_rings_status_show()
6124 sysfs_display_ring((void *)tx_q->dma_tx, in stmmac_rings_status_show()
6125 priv->dma_conf.dma_tx_size, 0, seq, tx_q->dma_tx_phy); in stmmac_rings_status_show()
6527 struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[queue]; in stmmac_enable_tx_queue() local
6549 tx_q->dma_tx_phy, tx_q->queue_index); in stmmac_enable_tx_queue()
6551 if (tx_q->tbs & STMMAC_TBS_AVAIL) in stmmac_enable_tx_queue()
6552 stmmac_enable_tbs(priv, priv->ioaddr, 1, tx_q->queue_index); in stmmac_enable_tx_queue()
6554 tx_q->tx_tail_addr = tx_q->dma_tx_phy; in stmmac_enable_tx_queue()
6556 tx_q->tx_tail_addr, tx_q->queue_index); in stmmac_enable_tx_queue()
6605 struct stmmac_tx_queue *tx_q; in stmmac_xdp_open() local
6663 tx_q = &priv->dma_conf.tx_queue[chan]; in stmmac_xdp_open()
6666 tx_q->dma_tx_phy, chan); in stmmac_xdp_open()
6668 tx_q->tx_tail_addr = tx_q->dma_tx_phy; in stmmac_xdp_open()
6670 tx_q->tx_tail_addr, chan); in stmmac_xdp_open()
6672 hrtimer_init(&tx_q->txtimer, CLOCK_MONOTONIC, HRTIMER_MODE_REL); in stmmac_xdp_open()
6673 tx_q->txtimer.function = stmmac_tx_timer; in stmmac_xdp_open()
6709 struct stmmac_tx_queue *tx_q; in stmmac_xsk_wakeup() local
6724 tx_q = &priv->dma_conf.tx_queue[queue]; in stmmac_xsk_wakeup()
6727 if (!rx_q->xsk_pool && !tx_q->xsk_pool) in stmmac_xsk_wakeup()
7477 struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[queue]; in stmmac_reset_tx_queue() local
7479 tx_q->cur_tx = 0; in stmmac_reset_tx_queue()
7480 tx_q->dirty_tx = 0; in stmmac_reset_tx_queue()
7481 tx_q->mss = 0; in stmmac_reset_tx_queue()