Lines Matching refs:dma_conf
77 #define STMMAC_TX_THRESH(x) ((x)->dma_conf.dma_tx_size / 4)
78 #define STMMAC_RX_THRESH(x) ((x)->dma_conf.dma_rx_size / 4)
237 rx_q = &priv->dma_conf.rx_queue[queue]; in stmmac_disable_all_queues()
363 struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[queue]; in stmmac_tx_avail()
369 avail = priv->dma_conf.dma_tx_size - tx_q->cur_tx + tx_q->dirty_tx - 1; in stmmac_tx_avail()
381 struct stmmac_rx_queue *rx_q = &priv->dma_conf.rx_queue[queue]; in stmmac_rx_dirty()
387 dirty = priv->dma_conf.dma_rx_size - rx_q->dirty_rx + rx_q->cur_rx; in stmmac_rx_dirty()
415 struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[queue]; in stmmac_enable_eee_mode()
1246 struct stmmac_dma_conf *dma_conf) in stmmac_display_rx_rings() argument
1255 struct stmmac_rx_queue *rx_q = &dma_conf->rx_queue[queue]; in stmmac_display_rx_rings()
1268 stmmac_display_ring(priv, head_rx, dma_conf->dma_rx_size, true, in stmmac_display_rx_rings()
1274 struct stmmac_dma_conf *dma_conf) in stmmac_display_tx_rings() argument
1283 struct stmmac_tx_queue *tx_q = &dma_conf->tx_queue[queue]; in stmmac_display_tx_rings()
1298 stmmac_display_ring(priv, head_tx, dma_conf->dma_tx_size, false, in stmmac_display_tx_rings()
1304 struct stmmac_dma_conf *dma_conf) in stmmac_display_rings() argument
1307 stmmac_display_rx_rings(priv, dma_conf); in stmmac_display_rings()
1310 stmmac_display_tx_rings(priv, dma_conf); in stmmac_display_rings()
1340 struct stmmac_dma_conf *dma_conf, in stmmac_clear_rx_descriptors() argument
1343 struct stmmac_rx_queue *rx_q = &dma_conf->rx_queue[queue]; in stmmac_clear_rx_descriptors()
1347 for (i = 0; i < dma_conf->dma_rx_size; i++) in stmmac_clear_rx_descriptors()
1351 (i == dma_conf->dma_rx_size - 1), in stmmac_clear_rx_descriptors()
1352 dma_conf->dma_buf_sz); in stmmac_clear_rx_descriptors()
1356 (i == dma_conf->dma_rx_size - 1), in stmmac_clear_rx_descriptors()
1357 dma_conf->dma_buf_sz); in stmmac_clear_rx_descriptors()
1369 struct stmmac_dma_conf *dma_conf, in stmmac_clear_tx_descriptors() argument
1372 struct stmmac_tx_queue *tx_q = &dma_conf->tx_queue[queue]; in stmmac_clear_tx_descriptors()
1376 for (i = 0; i < dma_conf->dma_tx_size; i++) { in stmmac_clear_tx_descriptors()
1377 int last = (i == (dma_conf->dma_tx_size - 1)); in stmmac_clear_tx_descriptors()
1399 struct stmmac_dma_conf *dma_conf) in stmmac_clear_descriptors() argument
1407 stmmac_clear_rx_descriptors(priv, dma_conf, queue); in stmmac_clear_descriptors()
1411 stmmac_clear_tx_descriptors(priv, dma_conf, queue); in stmmac_clear_descriptors()
1426 struct stmmac_dma_conf *dma_conf, in stmmac_init_rx_buffers() argument
1430 struct stmmac_rx_queue *rx_q = &dma_conf->rx_queue[queue]; in stmmac_init_rx_buffers()
1459 if (dma_conf->dma_buf_sz == BUF_SIZE_16KiB) in stmmac_init_rx_buffers()
1494 struct stmmac_dma_conf *dma_conf, in stmmac_free_tx_buffer() argument
1497 struct stmmac_tx_queue *tx_q = &dma_conf->tx_queue[queue]; in stmmac_free_tx_buffer()
1540 struct stmmac_dma_conf *dma_conf, in dma_free_rx_skbufs() argument
1543 struct stmmac_rx_queue *rx_q = &dma_conf->rx_queue[queue]; in dma_free_rx_skbufs()
1546 for (i = 0; i < dma_conf->dma_rx_size; i++) in dma_free_rx_skbufs()
1551 struct stmmac_dma_conf *dma_conf, in stmmac_alloc_rx_buffers() argument
1554 struct stmmac_rx_queue *rx_q = &dma_conf->rx_queue[queue]; in stmmac_alloc_rx_buffers()
1557 for (i = 0; i < dma_conf->dma_rx_size; i++) { in stmmac_alloc_rx_buffers()
1566 ret = stmmac_init_rx_buffers(priv, dma_conf, p, i, flags, in stmmac_alloc_rx_buffers()
1584 struct stmmac_dma_conf *dma_conf, in dma_free_rx_xskbufs() argument
1587 struct stmmac_rx_queue *rx_q = &dma_conf->rx_queue[queue]; in dma_free_rx_xskbufs()
1590 for (i = 0; i < dma_conf->dma_rx_size; i++) { in dma_free_rx_xskbufs()
1602 struct stmmac_dma_conf *dma_conf, in stmmac_alloc_rx_buffers_zc() argument
1605 struct stmmac_rx_queue *rx_q = &dma_conf->rx_queue[queue]; in stmmac_alloc_rx_buffers_zc()
1608 for (i = 0; i < dma_conf->dma_rx_size; i++) { in stmmac_alloc_rx_buffers_zc()
1651 struct stmmac_dma_conf *dma_conf, in __init_dma_rx_desc_rings() argument
1654 struct stmmac_rx_queue *rx_q = &dma_conf->rx_queue[queue]; in __init_dma_rx_desc_rings()
1661 stmmac_clear_rx_descriptors(priv, dma_conf, queue); in __init_dma_rx_desc_rings()
1688 stmmac_alloc_rx_buffers_zc(priv, dma_conf, queue); in __init_dma_rx_desc_rings()
1690 ret = stmmac_alloc_rx_buffers(priv, dma_conf, queue, flags); in __init_dma_rx_desc_rings()
1700 dma_conf->dma_rx_size, 1); in __init_dma_rx_desc_rings()
1704 dma_conf->dma_rx_size, 0); in __init_dma_rx_desc_rings()
1711 struct stmmac_dma_conf *dma_conf, in init_dma_rx_desc_rings() argument
1724 ret = __init_dma_rx_desc_rings(priv, dma_conf, queue, flags); in init_dma_rx_desc_rings()
1733 struct stmmac_rx_queue *rx_q = &dma_conf->rx_queue[queue]; in init_dma_rx_desc_rings()
1736 dma_free_rx_xskbufs(priv, dma_conf, queue); in init_dma_rx_desc_rings()
1738 dma_free_rx_skbufs(priv, dma_conf, queue); in init_dma_rx_desc_rings()
1759 struct stmmac_dma_conf *dma_conf, in __init_dma_tx_desc_rings() argument
1762 struct stmmac_tx_queue *tx_q = &dma_conf->tx_queue[queue]; in __init_dma_tx_desc_rings()
1774 dma_conf->dma_tx_size, 1); in __init_dma_tx_desc_rings()
1778 dma_conf->dma_tx_size, 0); in __init_dma_tx_desc_rings()
1783 for (i = 0; i < dma_conf->dma_tx_size; i++) { in __init_dma_tx_desc_rings()
1806 struct stmmac_dma_conf *dma_conf) in init_dma_tx_desc_rings() argument
1815 __init_dma_tx_desc_rings(priv, dma_conf, queue); in init_dma_tx_desc_rings()
1830 struct stmmac_dma_conf *dma_conf, in init_dma_desc_rings() argument
1836 ret = init_dma_rx_desc_rings(dev, dma_conf, flags); in init_dma_desc_rings()
1840 ret = init_dma_tx_desc_rings(dev, dma_conf); in init_dma_desc_rings()
1842 stmmac_clear_descriptors(priv, dma_conf); in init_dma_desc_rings()
1845 stmmac_display_rings(priv, dma_conf); in init_dma_desc_rings()
1857 struct stmmac_dma_conf *dma_conf, in dma_free_tx_skbufs() argument
1860 struct stmmac_tx_queue *tx_q = &dma_conf->tx_queue[queue]; in dma_free_tx_skbufs()
1865 for (i = 0; i < dma_conf->dma_tx_size; i++) in dma_free_tx_skbufs()
1866 stmmac_free_tx_buffer(priv, dma_conf, queue, i); in dma_free_tx_skbufs()
1885 dma_free_tx_skbufs(priv, &priv->dma_conf, queue); in stmmac_free_tx_skbufs()
1895 struct stmmac_dma_conf *dma_conf, in __free_dma_rx_desc_resources() argument
1898 struct stmmac_rx_queue *rx_q = &dma_conf->rx_queue[queue]; in __free_dma_rx_desc_resources()
1902 dma_free_rx_xskbufs(priv, dma_conf, queue); in __free_dma_rx_desc_resources()
1904 dma_free_rx_skbufs(priv, dma_conf, queue); in __free_dma_rx_desc_resources()
1911 dma_free_coherent(priv->device, dma_conf->dma_rx_size * in __free_dma_rx_desc_resources()
1915 dma_free_coherent(priv->device, dma_conf->dma_rx_size * in __free_dma_rx_desc_resources()
1928 struct stmmac_dma_conf *dma_conf) in free_dma_rx_desc_resources() argument
1935 __free_dma_rx_desc_resources(priv, dma_conf, queue); in free_dma_rx_desc_resources()
1945 struct stmmac_dma_conf *dma_conf, in __free_dma_tx_desc_resources() argument
1948 struct stmmac_tx_queue *tx_q = &dma_conf->tx_queue[queue]; in __free_dma_tx_desc_resources()
1953 dma_free_tx_skbufs(priv, dma_conf, queue); in __free_dma_tx_desc_resources()
1966 size *= dma_conf->dma_tx_size; in __free_dma_tx_desc_resources()
1975 struct stmmac_dma_conf *dma_conf) in free_dma_tx_desc_resources() argument
1982 __free_dma_tx_desc_resources(priv, dma_conf, queue); in free_dma_tx_desc_resources()
1996 struct stmmac_dma_conf *dma_conf, in __alloc_dma_rx_desc_resources() argument
1999 struct stmmac_rx_queue *rx_q = &dma_conf->rx_queue[queue]; in __alloc_dma_rx_desc_resources()
2011 pp_params.pool_size = dma_conf->dma_rx_size; in __alloc_dma_rx_desc_resources()
2012 num_pages = DIV_ROUND_UP(dma_conf->dma_buf_sz, PAGE_SIZE); in __alloc_dma_rx_desc_resources()
2027 rx_q->buf_pool = kcalloc(dma_conf->dma_rx_size, in __alloc_dma_rx_desc_resources()
2035 dma_conf->dma_rx_size * in __alloc_dma_rx_desc_resources()
2044 dma_conf->dma_rx_size * in __alloc_dma_rx_desc_resources()
2070 struct stmmac_dma_conf *dma_conf) in alloc_dma_rx_desc_resources() argument
2078 ret = __alloc_dma_rx_desc_resources(priv, dma_conf, queue); in alloc_dma_rx_desc_resources()
2086 free_dma_rx_desc_resources(priv, dma_conf); in alloc_dma_rx_desc_resources()
2102 struct stmmac_dma_conf *dma_conf, in __alloc_dma_tx_desc_resources() argument
2105 struct stmmac_tx_queue *tx_q = &dma_conf->tx_queue[queue]; in __alloc_dma_tx_desc_resources()
2112 tx_q->tx_skbuff_dma = kcalloc(dma_conf->dma_tx_size, in __alloc_dma_tx_desc_resources()
2118 tx_q->tx_skbuff = kcalloc(dma_conf->dma_tx_size, in __alloc_dma_tx_desc_resources()
2131 size *= dma_conf->dma_tx_size; in __alloc_dma_tx_desc_resources()
2149 struct stmmac_dma_conf *dma_conf) in alloc_dma_tx_desc_resources() argument
2157 ret = __alloc_dma_tx_desc_resources(priv, dma_conf, queue); in alloc_dma_tx_desc_resources()
2165 free_dma_tx_desc_resources(priv, dma_conf); in alloc_dma_tx_desc_resources()
2179 struct stmmac_dma_conf *dma_conf) in alloc_dma_desc_resources() argument
2182 int ret = alloc_dma_rx_desc_resources(priv, dma_conf); in alloc_dma_desc_resources()
2187 ret = alloc_dma_tx_desc_resources(priv, dma_conf); in alloc_dma_desc_resources()
2198 struct stmmac_dma_conf *dma_conf) in free_dma_desc_resources() argument
2201 free_dma_tx_desc_resources(priv, dma_conf); in free_dma_desc_resources()
2206 free_dma_rx_desc_resources(priv, dma_conf); in free_dma_desc_resources()
2380 struct stmmac_rx_queue *rx_q = &priv->dma_conf.rx_queue[chan]; in stmmac_dma_operation_mode()
2395 priv->dma_conf.dma_buf_sz, in stmmac_dma_operation_mode()
2411 struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[queue]; in stmmac_xdp_xmit_zc()
2486 tx_q->cur_tx = STMMAC_GET_ENTRY(tx_q->cur_tx, priv->dma_conf.dma_tx_size); in stmmac_xdp_xmit_zc()
2527 struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[queue]; in stmmac_tx_clean()
2540 while ((entry != tx_q->cur_tx) && count < priv->dma_conf.dma_tx_size) { in stmmac_tx_clean()
2642 entry = STMMAC_GET_ENTRY(entry, priv->dma_conf.dma_tx_size); in stmmac_tx_clean()
2707 struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[chan]; in stmmac_tx_err()
2712 dma_free_tx_skbufs(priv, &priv->dma_conf, chan); in stmmac_tx_err()
2713 stmmac_clear_tx_descriptors(priv, &priv->dma_conf, chan); in stmmac_tx_err()
2774 struct stmmac_rx_queue *rx_q = &priv->dma_conf.rx_queue[chan]; in stmmac_napi_check()
2775 struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[chan]; in stmmac_napi_check()
2941 rx_q = &priv->dma_conf.rx_queue[chan]; in stmmac_init_dma_engine()
2955 tx_q = &priv->dma_conf.tx_queue[chan]; in stmmac_init_dma_engine()
2970 struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[queue]; in stmmac_tx_timer_arm()
3020 struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[chan]; in stmmac_init_coalesce()
3042 (priv->dma_conf.dma_tx_size - 1), chan); in stmmac_set_rings_length()
3047 (priv->dma_conf.dma_rx_size - 1), chan); in stmmac_set_rings_length()
3382 struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[chan]; in stmmac_hw_setup()
3404 struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[chan]; in stmmac_hw_setup()
3448 free_irq(priv->tx_irq[j], &priv->dma_conf.tx_queue[j]); in stmmac_free_irq()
3457 free_irq(priv->rx_irq[j], &priv->dma_conf.rx_queue[j]); in stmmac_free_irq()
3592 0, int_name, &priv->dma_conf.rx_queue[i]); in stmmac_request_irq_multi_msi()
3617 0, int_name, &priv->dma_conf.tx_queue[i]); in stmmac_request_irq_multi_msi()
3715 struct stmmac_dma_conf *dma_conf; in stmmac_setup_dma_desc() local
3718 dma_conf = kzalloc(sizeof(*dma_conf), GFP_KERNEL); in stmmac_setup_dma_desc()
3719 if (!dma_conf) { in stmmac_setup_dma_desc()
3732 dma_conf->dma_buf_sz = bfsize; in stmmac_setup_dma_desc()
3736 dma_conf->dma_tx_size = priv->dma_conf.dma_tx_size; in stmmac_setup_dma_desc()
3737 dma_conf->dma_rx_size = priv->dma_conf.dma_rx_size; in stmmac_setup_dma_desc()
3739 if (!dma_conf->dma_tx_size) in stmmac_setup_dma_desc()
3740 dma_conf->dma_tx_size = DMA_DEFAULT_TX_SIZE; in stmmac_setup_dma_desc()
3741 if (!dma_conf->dma_rx_size) in stmmac_setup_dma_desc()
3742 dma_conf->dma_rx_size = DMA_DEFAULT_RX_SIZE; in stmmac_setup_dma_desc()
3746 struct stmmac_tx_queue *tx_q = &dma_conf->tx_queue[chan]; in stmmac_setup_dma_desc()
3753 ret = alloc_dma_desc_resources(priv, dma_conf); in stmmac_setup_dma_desc()
3760 ret = init_dma_desc_rings(priv->dev, dma_conf, GFP_KERNEL); in stmmac_setup_dma_desc()
3767 return dma_conf; in stmmac_setup_dma_desc()
3770 free_dma_desc_resources(priv, dma_conf); in stmmac_setup_dma_desc()
3772 kfree(dma_conf); in stmmac_setup_dma_desc()
3787 struct stmmac_dma_conf *dma_conf) in __stmmac_open() argument
3817 buf_sz = dma_conf->dma_buf_sz; in __stmmac_open()
3818 memcpy(&priv->dma_conf, dma_conf, sizeof(*dma_conf)); in __stmmac_open()
3857 hrtimer_cancel(&priv->dma_conf.tx_queue[chan].txtimer); in __stmmac_open()
3861 free_dma_desc_resources(priv, &priv->dma_conf); in __stmmac_open()
3871 struct stmmac_dma_conf *dma_conf; in stmmac_open() local
3874 dma_conf = stmmac_setup_dma_desc(priv, dev->mtu); in stmmac_open()
3875 if (IS_ERR(dma_conf)) in stmmac_open()
3876 return PTR_ERR(dma_conf); in stmmac_open()
3878 ret = __stmmac_open(dev, dma_conf); in stmmac_open()
3879 kfree(dma_conf); in stmmac_open()
3913 hrtimer_cancel(&priv->dma_conf.tx_queue[chan].txtimer); in stmmac_release()
3929 free_dma_desc_resources(priv, &priv->dma_conf); in stmmac_release()
3977 tx_q->cur_tx = STMMAC_GET_ENTRY(tx_q->cur_tx, priv->dma_conf.dma_tx_size); in stmmac_vlan_insert()
3995 struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[queue]; in stmmac_tso_allocator()
4006 priv->dma_conf.dma_tx_size); in stmmac_tso_allocator()
4034 struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[queue]; in stmmac_flush_tx_descriptors()
4096 tx_q = &priv->dma_conf.tx_queue[queue]; in stmmac_tso_xmit()
4136 priv->dma_conf.dma_tx_size); in stmmac_tso_xmit()
4248 tx_q->cur_tx = STMMAC_GET_ENTRY(tx_q->cur_tx, priv->dma_conf.dma_tx_size); in stmmac_tso_xmit()
4336 tx_q = &priv->dma_conf.tx_queue[queue]; in stmmac_xmit()
4399 entry = STMMAC_GET_ENTRY(entry, priv->dma_conf.dma_tx_size); in stmmac_xmit()
4470 entry = STMMAC_GET_ENTRY(entry, priv->dma_conf.dma_tx_size); in stmmac_xmit()
4585 struct stmmac_rx_queue *rx_q = &priv->dma_conf.rx_queue[queue]; in stmmac_rx_refill()
4639 entry = STMMAC_GET_ENTRY(entry, priv->dma_conf.dma_rx_size); in stmmac_rx_refill()
4667 return priv->dma_conf.dma_buf_sz; in stmmac_rx_buf1_len()
4672 return min_t(unsigned int, priv->dma_conf.dma_buf_sz, plen); in stmmac_rx_buf1_len()
4688 return priv->dma_conf.dma_buf_sz; in stmmac_rx_buf2_len()
4699 struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[queue]; in stmmac_xdp_xmit_xdpf()
4762 entry = STMMAC_GET_ENTRY(entry, priv->dma_conf.dma_tx_size); in stmmac_xdp_xmit_xdpf()
4936 struct stmmac_rx_queue *rx_q = &priv->dma_conf.rx_queue[queue]; in stmmac_rx_refill_zc()
4979 entry = STMMAC_GET_ENTRY(entry, priv->dma_conf.dma_rx_size); in stmmac_rx_refill_zc()
4994 struct stmmac_rx_queue *rx_q = &priv->dma_conf.rx_queue[queue]; in stmmac_rx_zc()
5016 stmmac_display_ring(priv, rx_head, priv->dma_conf.dma_rx_size, true, in stmmac_rx_zc()
5063 priv->dma_conf.dma_rx_size); in stmmac_rx_zc()
5176 struct stmmac_rx_queue *rx_q = &priv->dma_conf.rx_queue[queue]; in stmmac_rx()
5189 buf_sz = DIV_ROUND_UP(priv->dma_conf.dma_buf_sz, PAGE_SIZE) * PAGE_SIZE; in stmmac_rx()
5203 stmmac_display_ring(priv, rx_head, priv->dma_conf.dma_rx_size, true, in stmmac_rx()
5247 priv->dma_conf.dma_rx_size); in stmmac_rx()
5374 priv->dma_conf.dma_buf_sz); in stmmac_rx()
5386 priv->dma_conf.dma_buf_sz); in stmmac_rx()
5570 struct stmmac_dma_conf *dma_conf; in stmmac_change_mtu() local
5593 dma_conf = stmmac_setup_dma_desc(priv, mtu); in stmmac_change_mtu()
5594 if (IS_ERR(dma_conf)) { in stmmac_change_mtu()
5597 return PTR_ERR(dma_conf); in stmmac_change_mtu()
5602 ret = __stmmac_open(dev, dma_conf); in stmmac_change_mtu()
5603 kfree(dma_conf); in stmmac_change_mtu()
5846 struct stmmac_dma_conf *dma_conf; in stmmac_msi_intr_tx() local
5851 dma_conf = container_of(tx_q, struct stmmac_dma_conf, tx_queue[chan]); in stmmac_msi_intr_tx()
5852 priv = container_of(dma_conf, struct stmmac_priv, dma_conf); in stmmac_msi_intr_tx()
5878 struct stmmac_dma_conf *dma_conf; in stmmac_msi_intr_rx() local
5882 dma_conf = container_of(rx_q, struct stmmac_dma_conf, rx_queue[chan]); in stmmac_msi_intr_rx()
5883 priv = container_of(dma_conf, struct stmmac_priv, dma_conf); in stmmac_msi_intr_rx()
5914 stmmac_msi_intr_rx(0, &priv->dma_conf.rx_queue[i]); in stmmac_poll_controller()
5917 stmmac_msi_intr_tx(0, &priv->dma_conf.tx_queue[i]); in stmmac_poll_controller()
6098 struct stmmac_rx_queue *rx_q = &priv->dma_conf.rx_queue[queue]; in stmmac_rings_status_show()
6105 priv->dma_conf.dma_rx_size, 1, seq, rx_q->dma_rx_phy); in stmmac_rings_status_show()
6109 priv->dma_conf.dma_rx_size, 0, seq, rx_q->dma_rx_phy); in stmmac_rings_status_show()
6114 struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[queue]; in stmmac_rings_status_show()
6121 priv->dma_conf.dma_tx_size, 1, seq, tx_q->dma_tx_phy); in stmmac_rings_status_show()
6125 priv->dma_conf.dma_tx_size, 0, seq, tx_q->dma_tx_phy); in stmmac_rings_status_show()
6459 __free_dma_rx_desc_resources(priv, &priv->dma_conf, queue); in stmmac_disable_rx_queue()
6464 struct stmmac_rx_queue *rx_q = &priv->dma_conf.rx_queue[queue]; in stmmac_enable_rx_queue()
6470 ret = __alloc_dma_rx_desc_resources(priv, &priv->dma_conf, queue); in stmmac_enable_rx_queue()
6476 ret = __init_dma_rx_desc_rings(priv, &priv->dma_conf, queue, GFP_KERNEL); in stmmac_enable_rx_queue()
6478 __free_dma_rx_desc_resources(priv, &priv->dma_conf, queue); in stmmac_enable_rx_queue()
6484 stmmac_clear_rx_descriptors(priv, &priv->dma_conf, queue); in stmmac_enable_rx_queue()
6501 priv->dma_conf.dma_buf_sz, in stmmac_enable_rx_queue()
6522 __free_dma_tx_desc_resources(priv, &priv->dma_conf, queue); in stmmac_disable_tx_queue()
6527 struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[queue]; in stmmac_enable_tx_queue()
6532 ret = __alloc_dma_tx_desc_resources(priv, &priv->dma_conf, queue); in stmmac_enable_tx_queue()
6538 ret = __init_dma_tx_desc_rings(priv, &priv->dma_conf, queue); in stmmac_enable_tx_queue()
6540 __free_dma_tx_desc_resources(priv, &priv->dma_conf, queue); in stmmac_enable_tx_queue()
6546 stmmac_clear_tx_descriptors(priv, &priv->dma_conf, queue); in stmmac_enable_tx_queue()
6577 hrtimer_cancel(&priv->dma_conf.tx_queue[chan].txtimer); in stmmac_xdp_release()
6586 free_dma_desc_resources(priv, &priv->dma_conf); in stmmac_xdp_release()
6611 ret = alloc_dma_desc_resources(priv, &priv->dma_conf); in stmmac_xdp_open()
6618 ret = init_dma_desc_rings(dev, &priv->dma_conf, GFP_KERNEL); in stmmac_xdp_open()
6636 rx_q = &priv->dma_conf.rx_queue[chan]; in stmmac_xdp_open()
6654 priv->dma_conf.dma_buf_sz, in stmmac_xdp_open()
6663 tx_q = &priv->dma_conf.tx_queue[chan]; in stmmac_xdp_open()
6696 hrtimer_cancel(&priv->dma_conf.tx_queue[chan].txtimer); in stmmac_xdp_open()
6700 free_dma_desc_resources(priv, &priv->dma_conf); in stmmac_xdp_open()
6723 rx_q = &priv->dma_conf.rx_queue[queue]; in stmmac_xsk_wakeup()
6724 tx_q = &priv->dma_conf.tx_queue[queue]; in stmmac_xsk_wakeup()
6977 priv->dma_conf.dma_rx_size = rx_size; in stmmac_reinit_ringparam()
6978 priv->dma_conf.dma_tx_size = tx_size; in stmmac_reinit_ringparam()
7418 hrtimer_cancel(&priv->dma_conf.tx_queue[chan].txtimer); in stmmac_suspend()
7469 struct stmmac_rx_queue *rx_q = &priv->dma_conf.rx_queue[queue]; in stmmac_reset_rx_queue()
7477 struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[queue]; in stmmac_reset_tx_queue()
7560 stmmac_clear_descriptors(priv, &priv->dma_conf); in stmmac_resume()