Home
last modified time | relevance | path

Searched refs:PKTSIZE_ALIGN (Results 1 – 25 of 36) sorted by relevance

12

/u-boot/drivers/net/
A Dethoc.c278 addr += PKTSIZE_ALIGN; in ethoc_init_ring()
291 addr += PKTSIZE_ALIGN; in ethoc_init_ring()
299 (ulong)net_rx_packets[i] + PKTSIZE_ALIGN); in ethoc_init_ring()
413 *packetp = priv->packet + entry * PKTSIZE_ALIGN; in ethoc_rx_common()
480 void *p = priv->packet + entry * PKTSIZE_ALIGN; in ethoc_send_common()
528 src = priv->packet + entry * PKTSIZE_ALIGN; in ethoc_free_pkt_common()
533 (ulong)src + PKTSIZE_ALIGN); in ethoc_free_pkt_common()
707 (1 + PKTBUFSRX) * PKTSIZE_ALIGN); in ethoc_probe()
A Dmvgbe.c403 p_rx_desc->buf_size = PKTSIZE_ALIGN; in mvgbe_init_rx_desc_ring()
405 p_rx_desc->buf_ptr = dmvgbe->p_rxbuf + i * PKTSIZE_ALIGN; in mvgbe_init_rx_desc_ring()
517 if (datasize > PKTSIZE_ALIGN) { in __mvgbe_send()
625 p_rxdesc_curr->buf_size = PKTSIZE_ALIGN; in __mvgbe_recv()
670 RINGSZ * PKTSIZE_ALIGN + 1); in mvgbe_alloc_buffers()
674 dmvgbe->p_aligned_txbuf = memalign(8, PKTSIZE_ALIGN); in mvgbe_alloc_buffers()
A Dzynq_gem.c411 + (i * PKTSIZE_ALIGN))); in zynq_gem_init()
415 + (i * PKTSIZE_ALIGN))); in zynq_gem_init()
643 invalidate_dcache_range(addr, addr + roundup(PKTSIZE_ALIGN, ARCH_DMA_MINALIGN)); in zynq_gem_recv()
676 flush_dcache_range(addr, addr + roundup(PKTSIZE_ALIGN, in zynq_gem_free_pkt()
794 priv->rxbuffers = memalign(ARCH_DMA_MINALIGN, RX_BUF * PKTSIZE_ALIGN); in zynq_gem_probe()
798 memset(priv->rxbuffers, 0, RX_BUF * PKTSIZE_ALIGN); in zynq_gem_probe()
800 flush_dcache_range(addr, addr + roundup(RX_BUF * PKTSIZE_ALIGN, ARCH_DMA_MINALIGN)); in zynq_gem_probe()
A Dxilinx_axi_mrmac.c202 priv->rx_bd[0]->cntrl = PKTSIZE_ALIGN; in axi_mrmac_start()
203 priv->rx_bd[1]->cntrl = PKTSIZE_ALIGN; in axi_mrmac_start()
252 if (len > PKTSIZE_ALIGN) in axi_mrmac_send()
253 len = PKTSIZE_ALIGN; in axi_mrmac_send()
A Daltera_tse.c367 writel(PKTSIZE_ALIGN, &desc->len); in altera_tse_free_pkt_msgdma()
503 invalidate_dcache_range(rx_buf, rx_buf + PKTSIZE_ALIGN); in altera_tse_free_pkt()
525 altera_tse_free_pkt(dev, priv->rx_buf, PKTSIZE_ALIGN); in altera_tse_start()
529 writel(PKTSIZE_ALIGN, &mac_dev->max_frame_length); in altera_tse_start()
658 priv->rx_buf = malloc_cache_aligned(PKTSIZE_ALIGN); in altera_tse_probe()
A Dmtk_eth.c34 #define TX_TOTAL_BUF_SIZE (NUM_TX_DESC * PKTSIZE_ALIGN)
35 #define RX_TOTAL_BUF_SIZE (NUM_RX_DESC * PKTSIZE_ALIGN)
1175 pkt_base += PKTSIZE_ALIGN; in mtk_eth_fifo_init()
1184 rxd->rxd2 = PDMA_V2_RXD2_PLEN0_SET(PKTSIZE_ALIGN); in mtk_eth_fifo_init()
1186 rxd->rxd2 = PDMA_V1_RXD2_PLEN0_SET(PKTSIZE_ALIGN); in mtk_eth_fifo_init()
1188 pkt_base += PKTSIZE_ALIGN; in mtk_eth_fifo_init()
1342 rxd->rxd2 = PDMA_V2_RXD2_PLEN0_SET(PKTSIZE_ALIGN); in mtk_eth_free_pkt()
1344 rxd->rxd2 = PDMA_V1_RXD2_PLEN0_SET(PKTSIZE_ALIGN); in mtk_eth_free_pkt()
A Dmt7620-eth.c814 memset(priv->pkt_buf, 0, (NUM_TX_DESC + NUM_RX_DESC) * PKTSIZE_ALIGN); in mt7620_eth_fifo_init()
826 pkt_base += PKTSIZE_ALIGN; in mt7620_eth_fifo_init()
830 priv->rx_ring_noc[i].rxd_info2.PLEN0 = PKTSIZE_ALIGN; in mt7620_eth_fifo_init()
832 pkt_base += PKTSIZE_ALIGN; in mt7620_eth_fifo_init()
949 priv->rx_ring_noc[idx].rxd_info2.PLEN0 = PKTSIZE_ALIGN; in mt7620_eth_free_pkt()
983 (NUM_TX_DESC + NUM_RX_DESC) * PKTSIZE_ALIGN); in mt7620_eth_alloc_rings_pkts()
A Dnpcm750_eth.c25 ((CFG_TX_DESCR_NUM + 1) * PKTSIZE_ALIGN + PKTALIGN)
27 ((CFG_RX_DESCR_NUM + 1) * PKTSIZE_ALIGN + PKTALIGN)
359 desc_p->buffer = (u32)&txbuffs[idx * PKTSIZE_ALIGN]; in npcm750_tx_descs_init()
389 desc_p->buffer = (u32)&rxbuffs[idx * PKTSIZE_ALIGN]; in npcm750_rx_descs_init()
A Dxilinx_axi_mrmac.h57 #define RX_BUFF_TOTAL_SIZE (RX_DESC * PKTSIZE_ALIGN)
A Dravb.c120 u8 packet[PKTSIZE_ALIGN];
214 desc->data.ctrl = RAVB_DESC_DT_FEMPTY | RAVB_DESC_DS(PKTSIZE_ALIGN); in ravb_free_pkt()
289 RAVB_DESC_DS(PKTSIZE_ALIGN); in ravb_rx_desc_init()
A Dfsl_enetc.h75 #define ENETC_RX_MAXFRM_SIZE PKTSIZE_ALIGN
A Deepro100.c112 u8 data[PKTSIZE_ALIGN];
359 rx_ring[i].count = cpu_to_le32(PKTSIZE_ALIGN << 16); in init_rx_ring()
707 desc->count = cpu_to_le32(PKTSIZE_ALIGN << 16); in eepro100_free_pkt_common()
A Dxilinx_axi_emac.c89 static u8 rxframe[PKTSIZE_ALIGN] __attribute((aligned(DMAALIGN)));
664 if (len > PKTSIZE_ALIGN) in axiemac_send()
665 len = PKTSIZE_ALIGN; in axiemac_send()
A Dfsl_mcdmafec.c279 info->rxbd[i].cbd_datlen = PKTSIZE_ALIGN; in fec_init()
462 prbd->cbd_datlen = PKTSIZE_ALIGN; in mcdmafec_recv()
A Drswitch.c268 u8 packet[PKTSIZE_ALIGN];
576 priv->rx_desc[i].data.info_ds = PKTSIZE_ALIGN; in rswitch_rx_desc_init()
899 desc->data.info_ds = PKTSIZE_ALIGN; in rswitch_free_pkt()
A Dftmac100.c120 rxdes[i].rxdes1 |= FTMAC100_RXDES1_RXBUF_SIZE (PKTSIZE_ALIGN); in _ftmac100_init()
A Dbcm6348-eth.c201 PKTSIZE_ALIGN); in bcm6348_eth_start()
/u-boot/lib/efi_loader/
A Defi_net.c511 if (buffer_size > PKTSIZE_ALIGN) { in efi_net_transmit()
693 if (len > PKTSIZE_ALIGN) in efi_net_push()
877 transmit_buffer = calloc(1, PKTSIZE_ALIGN + PKTALIGN); in efi_net_register()
888 receive_buffer[i] = malloc(PKTSIZE_ALIGN); in efi_net_register()
/u-boot/drivers/net/ti/
A Dam65-cpsw-nuss.c135 #ifdef PKTSIZE_ALIGN
136 #define UDMA_RX_BUF_SIZE PKTSIZE_ALIGN
355 writel(PKTSIZE_ALIGN, port0->port_base + AM65_CPSW_PN_RX_MAXLEN_REG); in am65_cpsw_start()
379 writel(PKTSIZE_ALIGN, port->port_base + AM65_CPSW_PN_RX_MAXLEN_REG); in am65_cpsw_start()
/u-boot/drivers/usb/eth/
A Dlan7x.h101 LAN7X_MAC_RX_MAX_SIZE(PKTSIZE_ALIGN + 4 /* VLAN */ + 4 /* CRC */)
/u-boot/net/
A Darp.c28 static uchar arp_tx_packet_buf[PKTSIZE_ALIGN + PKTALIGN];
A Ddsa-uclass.c141 uchar dsa_packet_tmp[PKTSIZE_ALIGN]; in dsa_port_send()
146 if (length + head + tail > PKTSIZE_ALIGN) in dsa_port_send()
A Dndisc.c27 static uchar net_nd_packet_buf[PKTSIZE_ALIGN + PKTALIGN];
/u-boot/include/
A Dnet.h446 #define PKTSIZE_ALIGN 1536 macro
450 #define PKTSIZE_ALIGN (1536 + DSA_MAX_OVR) macro
A Dtsec.h81 #define MRBLR_INIT_SETTINGS PKTSIZE_ALIGN

Completed in 72 milliseconds

12