Home
last modified time | relevance | path

Searched refs:txr (Results 1 – 16 of 16) sorted by relevance

/drivers/net/ethernet/broadcom/bnxt/
A Dbnxt_xdp.c45 prod = txr->tx_prod; in bnxt_xmit_bd()
66 WRITE_ONCE(txr->tx_prod, prod); in bnxt_xmit_bd()
90 WRITE_ONCE(txr->tx_prod, prod); in bnxt_xmit_bd()
128 u16 tx_cons = txr->tx_cons; in bnxt_tx_int_xdp()
229 struct bnxt_tx_ring_info *txr; in bnxt_rx_xdp() local
245 txr = rxr->bnapi->tx_ring[0]; in bnxt_rx_xdp()
337 struct bnxt_tx_ring_info *txr; in bnxt_xdp_xmit() local
349 txr = &bp->tx_ring[ring]; in bnxt_xdp_xmit()
355 spin_lock(&txr->xdp_tx_lock); in bnxt_xdp_xmit()
360 if (!bnxt_tx_avail(bp, txr)) in bnxt_xdp_xmit()
[all …]
A Dbnxt.c416 txr->txq_index, txr->tx_hw_cons, in bnxt_sched_reset_txr()
417 txr->tx_cons, txr->tx_prod, curr); in bnxt_sched_reset_txr()
793 txr->tx_buf_ring[RING_TX(bp, txr->tx_prod)].is_ts_pkt = 0; in bnxt_start_xmit()
800 bnxt_txr_db_kick(bp, txr, txr->tx_prod); in bnxt_start_xmit()
801 txr->tx_buf_ring[RING_TX(bp, txr->tx_prod)].skb = NULL; in bnxt_start_xmit()
894 if (txr->tx_hw_cons != RING_TX(bp, txr->tx_cons)) in bnxt_tx_int()
3038 tx_freed = (txr->tx_hw_cons - txr->tx_cons) & in __bnxt_poll_work()
3953 txr->tx_push, txr->tx_push_mapping); in bnxt_free_tx_rings()
5522 bnapi2->tx_ring[txr->tx_napi_idx] = txr; in bnxt_alloc_mem()
11470 bnxt_free_one_tx_ring_skbs(bp, txr, txr->txq_index); in bnxt_tx_queue_stop()
[all …]
A Dbnxt_xdp.h16 struct bnxt_tx_ring_info *txr,
A Dbnxt.h73 #define SET_TX_OPAQUE(bp, txr, idx, bds) \ argument
74 (((txr)->tx_napi_idx << TX_OPAQUE_RING_SHIFT) | \
1209 #define bnxt_for_each_napi_tx(iter, bnapi, txr) \ argument
1210 for (iter = 0, txr = (bnapi)->tx_ring[0]; txr; \
1211 txr = (iter < BNXT_MAX_TXR_PER_NAPI - 1) ? \
2802 const struct bnxt_tx_ring_info *txr) in bnxt_tx_avail() argument
2804 u32 used = READ_ONCE(txr->tx_prod) - READ_ONCE(txr->tx_cons); in bnxt_tx_avail()
2925 void bnxt_sched_reset_txr(struct bnxt *bp, struct bnxt_tx_ring_info *txr,
A Dbnxt_ptp.c864 struct bnxt_tx_ring_info *txr; in bnxt_tx_ts_cmp() local
869 txr = bnapi->tx_ring[TX_OPAQUE_RING(opaque)]; in bnxt_tx_ts_cmp()
872 tx_buf = &txr->tx_buf_ring[RING_TX(bp, cons)]; in bnxt_tx_ts_cmp()
A Dbnxt_ethtool.c4937 struct bnxt_tx_ring_info *txr = &bp->tx_ring[0]; in bnxt_run_loopback() local
4968 bnxt_xmit_bd(bp, txr, map, pkt_size, NULL); in bnxt_run_loopback()
4973 bnxt_db_write(bp, &txr->tx_db, txr->tx_prod); in bnxt_run_loopback()
/drivers/net/ethernet/qualcomm/
A Dqca_spi.c276 if (qca->txr.skb[qca->txr.head] == NULL) in qcaspi_transmit()
289 while (qca->txr.skb[qca->txr.head]) { in qcaspi_transmit()
305 n_stats->tx_bytes += qca->txr.skb[qca->txr.head]->len; in qcaspi_transmit()
313 dev_kfree_skb(qca->txr.skb[qca->txr.head]); in qcaspi_transmit()
314 qca->txr.skb[qca->txr.head] = NULL; in qcaspi_transmit()
450 if (txr->skb[txr->tail]) in qcaspi_tx_ring_has_space()
591 !qca->txr.skb[qca->txr.head]) in qcaspi_spi_thread()
598 qca->txr.skb[qca->txr.head]); in qcaspi_spi_thread()
743 if (qca->txr.skb[qca->txr.tail]) { in qcaspi_netdev_xmit()
783 qca->txr.skb[qca->txr.tail] = skb; in qcaspi_netdev_xmit()
[all …]
A Dqca_debug.c69 if (qca->txr.skb[qca->txr.head] == NULL) in qcaspi_info_show()
71 else if (qca->txr.skb[qca->txr.tail]) in qcaspi_info_show()
79 qca->txr.size); in qcaspi_info_show()
245 ring->tx_pending = qca->txr.count; in qcaspi_get_ringparam()
263 qca->txr.count = max_t(u32, ring->tx_pending, QCASPI_TX_RING_MIN_LEN); in qcaspi_set_ringparam()
264 qca->txr.count = min_t(u16, qca->txr.count, QCASPI_TX_RING_MAX_LEN); in qcaspi_set_ringparam()
A Dqca_spi.h74 struct tx_ring txr; member
/drivers/net/ethernet/meta/fbnic/
A Dfbnic_txrx.c1228 struct fbnic_ring *txr) in fbnic_aggregate_ring_tx_counters() argument
1247 struct fbnic_ring *txr) in fbnic_remove_tx_ring() argument
1255 WARN_ON(fbn->tx[txr->q_idx] && fbn->tx[txr->q_idx] != txr); in fbnic_remove_tx_ring()
1256 fbn->tx[txr->q_idx] = NULL; in fbnic_remove_tx_ring()
1545 struct fbnic_ring *txr) in fbnic_alloc_tx_ring_desc() argument
1553 txr->desc = dma_alloc_coherent(dev, size, &txr->dma, in fbnic_alloc_tx_ring_desc()
1555 if (!txr->desc) in fbnic_alloc_tx_ring_desc()
1560 txr->size = size; in fbnic_alloc_tx_ring_desc()
1567 size_t size = array_size(sizeof(*txr->tx_buf), txr->size_mask + 1); in fbnic_alloc_tx_ring_buffer()
1571 return txr->tx_buf ? 0 : -ENOMEM; in fbnic_alloc_tx_ring_buffer()
[all …]
A Dfbnic_netdev.c438 struct fbnic_ring *txr = fbn->tx[i]; in fbnic_get_stats64() local
440 if (!txr) in fbnic_get_stats64()
443 stats = &txr->stats; in fbnic_get_stats64()
568 struct fbnic_ring *txr = fbn->tx[idx]; in fbnic_get_queue_stats_tx() local
574 if (!txr) in fbnic_get_queue_stats_tx()
577 stats = &txr->stats; in fbnic_get_queue_stats_tx()
A Dfbnic_txrx.h150 struct fbnic_ring *txr);
/drivers/net/ethernet/broadcom/
A Dbnx2.c253 diff = READ_ONCE(txr->tx_prod) - READ_ONCE(txr->tx_cons); in bnx2_tx_avail()
755 txr->tx_desc_ring = in bnx2_alloc_tx_mem()
5121 txr->tx_cons = 0; in bnx2_clear_ring_states()
5184 txr->tx_prod = 0; in bnx2_init_tx_ring()
5847 txbd = &txr->tx_desc_ring[BNX2_TX_RING_IDX(txr->tx_prod)]; in bnx2_run_loopback()
5855 txr->tx_prod = BNX2_NEXT_TX_BD(txr->tx_prod); in bnx2_run_loopback()
5858 BNX2_WR16(bp, txr->tx_bidx_addr, txr->tx_prod); in bnx2_run_loopback()
5859 BNX2_WR(bp, txr->tx_bseq_addr, txr->tx_prod_bseq); in bnx2_run_loopback()
6601 prod = txr->tx_prod; in bnx2_start_xmit()
6702 BNX2_WR(bp, txr->tx_bseq_addr, txr->tx_prod_bseq); in bnx2_start_xmit()
[all …]
A Dbcmsysport.c1108 struct bcm_sysport_tx_ring *txr; in bcm_sysport_rx_isr() local
1143 txr = &priv->tx_rings[ring]; in bcm_sysport_rx_isr()
1145 if (likely(napi_schedule_prep(&txr->napi))) { in bcm_sysport_rx_isr()
1147 __napi_schedule(&txr->napi); in bcm_sysport_rx_isr()
1159 struct bcm_sysport_tx_ring *txr; in bcm_sysport_tx_isr() local
1175 txr = &priv->tx_rings[ring]; in bcm_sysport_tx_isr()
1177 if (likely(napi_schedule_prep(&txr->napi))) { in bcm_sysport_tx_isr()
1179 __napi_schedule_irqoff(&txr->napi); in bcm_sysport_tx_isr()
/drivers/net/ethernet/sgi/
A Dioc3-eth.c86 struct ioc3_etxd *txr; member
615 desc = &ip->txr[entry]; in ioc3_tx_unmap()
642 ip->txr[i].cmd = 0; in ioc3_clean_tx_ring()
902 ip->txr = PTR_ALIGN(ip->tx_ring, SZ_16K); in ioc3eth_probe()
1039 desc = &ip->txr[produce]; in ioc3_start_xmit()
/drivers/net/ethernet/amazon/ena/
A Dena_netdev.c187 struct ena_ring *txr, *rxr; in ena_init_io_rings() local
193 txr = &adapter->tx_ring[i]; in ena_init_io_rings()
197 ena_init_io_rings_common(adapter, txr, i); in ena_init_io_rings()
200 txr->ring_size = adapter->requested_tx_ring_size; in ena_init_io_rings()
201 txr->tx_max_header_size = ena_dev->tx_max_header_size; in ena_init_io_rings()
202 txr->tx_mem_queue_type = ena_dev->tx_mem_queue_type; in ena_init_io_rings()
203 txr->sgl_size = adapter->max_tx_sgl_size; in ena_init_io_rings()
204 txr->smoothed_interval = in ena_init_io_rings()
207 spin_lock_init(&txr->xdp_tx_lock); in ena_init_io_rings()
3303 struct ena_ring *txr; in ena_restore_device() local
[all …]

Completed in 839 milliseconds