Lines Matching refs:ex
356 struct ring_desc_ex *ex; member
1030 if (np->rx_ring.ex) in free_rings()
1035 np->rx_ring.ex, np->ring_addr); in free_rings()
1854 less_rx = np->get_rx.ex; in nv_alloc_rx_optimized()
1855 if (less_rx-- == np->rx_ring.ex) in nv_alloc_rx_optimized()
1856 less_rx = np->last_rx.ex; in nv_alloc_rx_optimized()
1858 while (np->put_rx.ex != less_rx) { in nv_alloc_rx_optimized()
1872 np->put_rx.ex->bufhigh = cpu_to_le32(dma_high(np->put_rx_ctx->dma)); in nv_alloc_rx_optimized()
1873 np->put_rx.ex->buflow = cpu_to_le32(dma_low(np->put_rx_ctx->dma)); in nv_alloc_rx_optimized()
1875 np->put_rx.ex->flaglen = cpu_to_le32(np->rx_buf_sz | NV_RX2_AVAIL); in nv_alloc_rx_optimized()
1876 if (unlikely(np->put_rx.ex++ == np->last_rx.ex)) in nv_alloc_rx_optimized()
1877 np->put_rx.ex = np->rx_ring.ex; in nv_alloc_rx_optimized()
1911 np->last_rx.ex = &np->rx_ring.ex[np->rx_ring_size-1]; in nv_init_rx()
1921 np->rx_ring.ex[i].flaglen = 0; in nv_init_rx()
1922 np->rx_ring.ex[i].txvlan = 0; in nv_init_rx()
1923 np->rx_ring.ex[i].bufhigh = 0; in nv_init_rx()
1924 np->rx_ring.ex[i].buflow = 0; in nv_init_rx()
1942 np->last_tx.ex = &np->tx_ring.ex[np->tx_ring_size-1]; in nv_init_tx()
1957 np->tx_ring.ex[i].flaglen = 0; in nv_init_tx()
1958 np->tx_ring.ex[i].txvlan = 0; in nv_init_tx()
1959 np->tx_ring.ex[i].bufhigh = 0; in nv_init_tx()
1960 np->tx_ring.ex[i].buflow = 0; in nv_init_tx()
2020 np->tx_ring.ex[i].flaglen = 0; in nv_drain_tx()
2021 np->tx_ring.ex[i].txvlan = 0; in nv_drain_tx()
2022 np->tx_ring.ex[i].bufhigh = 0; in nv_drain_tx()
2023 np->tx_ring.ex[i].buflow = 0; in nv_drain_tx()
2051 np->rx_ring.ex[i].flaglen = 0; in nv_drain_rx()
2052 np->rx_ring.ex[i].txvlan = 0; in nv_drain_rx()
2053 np->rx_ring.ex[i].bufhigh = 0; in nv_drain_rx()
2054 np->rx_ring.ex[i].buflow = 0; in nv_drain_rx()
2410 start_tx = put_tx = np->put_tx.ex; in nv_start_xmit_optimized()
2440 if (unlikely(put_tx++ == np->last_tx.ex)) in nv_start_xmit_optimized()
2441 put_tx = np->tx_ring.ex; in nv_start_xmit_optimized()
2489 if (unlikely(put_tx++ == np->last_tx.ex)) in nv_start_xmit_optimized()
2490 put_tx = np->tx_ring.ex; in nv_start_xmit_optimized()
2496 if (unlikely(put_tx == np->tx_ring.ex)) in nv_start_xmit_optimized()
2497 prev_tx = np->last_tx.ex; in nv_start_xmit_optimized()
2554 np->put_tx.ex = put_tx; in nv_start_xmit_optimized()
2667 struct ring_desc_ex *orig_get_tx = np->get_tx.ex; in nv_tx_done_optimized()
2670 while ((np->get_tx.ex != np->put_tx.ex) && in nv_tx_done_optimized()
2671 !((flags = le32_to_cpu(np->get_tx.ex->flaglen)) & NV_TX2_VALID) && in nv_tx_done_optimized()
2704 if (unlikely(np->get_tx.ex++ == np->last_tx.ex)) in nv_tx_done_optimized()
2705 np->get_tx.ex = np->tx_ring.ex; in nv_tx_done_optimized()
2712 if (unlikely((np->tx_stop == 1) && (np->get_tx.ex != orig_get_tx))) { in nv_tx_done_optimized()
2775 le32_to_cpu(np->tx_ring.ex[i].bufhigh), in nv_tx_timeout()
2776 le32_to_cpu(np->tx_ring.ex[i].buflow), in nv_tx_timeout()
2777 le32_to_cpu(np->tx_ring.ex[i].flaglen), in nv_tx_timeout()
2778 le32_to_cpu(np->tx_ring.ex[i+1].bufhigh), in nv_tx_timeout()
2779 le32_to_cpu(np->tx_ring.ex[i+1].buflow), in nv_tx_timeout()
2780 le32_to_cpu(np->tx_ring.ex[i+1].flaglen), in nv_tx_timeout()
2781 le32_to_cpu(np->tx_ring.ex[i+2].bufhigh), in nv_tx_timeout()
2782 le32_to_cpu(np->tx_ring.ex[i+2].buflow), in nv_tx_timeout()
2783 le32_to_cpu(np->tx_ring.ex[i+2].flaglen), in nv_tx_timeout()
2784 le32_to_cpu(np->tx_ring.ex[i+3].bufhigh), in nv_tx_timeout()
2785 le32_to_cpu(np->tx_ring.ex[i+3].buflow), in nv_tx_timeout()
2786 le32_to_cpu(np->tx_ring.ex[i+3].flaglen)); in nv_tx_timeout()
2807 put_tx.ex = np->tx_change_owner->first_tx_desc; in nv_tx_timeout()
2987 while ((np->get_rx.ex != np->put_rx.ex) && in nv_rx_process_optimized()
2988 !((flags = le32_to_cpu(np->get_rx.ex->flaglen)) & NV_RX2_AVAIL) && in nv_rx_process_optimized()
3034 vlanflags = le32_to_cpu(np->get_rx.ex->buflow); in nv_rx_process_optimized()
3056 if (unlikely(np->get_rx.ex++ == np->last_rx.ex)) in nv_rx_process_optimized()
3057 np->get_rx.ex = np->rx_ring.ex; in nv_rx_process_optimized()
4741 np->rx_ring.ex = (struct ring_desc_ex *)rxtx_ring; in nv_set_ringparam()
4742 np->tx_ring.ex = &np->rx_ring.ex[np->rx_ring_size]; in nv_set_ringparam()
5186 np->tx_ring.ex[0].bufhigh = cpu_to_le32(dma_high(test_dma_addr)); in nv_loopback_test()
5187 np->tx_ring.ex[0].buflow = cpu_to_le32(dma_low(test_dma_addr)); in nv_loopback_test()
5188 np->tx_ring.ex[0].flaglen = cpu_to_le32((pkt_len-1) | np->tx_flags | tx_flags_extra); in nv_loopback_test()
5201 flags = le32_to_cpu(np->rx_ring.ex[0].flaglen); in nv_loopback_test()
5202 len = nv_descr_getlength_ex(&np->rx_ring.ex[0], np->desc_ver); in nv_loopback_test()
5848 np->rx_ring.ex = dma_alloc_coherent(&pci_dev->dev, in nv_probe()
5853 if (!np->rx_ring.ex) in nv_probe()
5855 np->tx_ring.ex = &np->rx_ring.ex[np->rx_ring_size]; in nv_probe()