Lines Matching refs:tx_q

305 	writel(upper_32_bits(adpt->tx_q.tpd.dma_addr),  in emac_mac_dma_rings_config()
308 writel(lower_32_bits(adpt->tx_q.tpd.dma_addr), in emac_mac_dma_rings_config()
311 writel(adpt->tx_q.tpd.count & TPD_RING_SIZE_BMSK, in emac_mac_dma_rings_config()
587 struct emac_tx_queue *tx_q = &adpt->tx_q; in emac_tx_q_descs_free() local
592 if (!tx_q->tpd.tpbuff) in emac_tx_q_descs_free()
595 for (i = 0; i < tx_q->tpd.count; i++) { in emac_tx_q_descs_free()
596 struct emac_buffer *tpbuf = GET_TPD_BUFFER(tx_q, i); in emac_tx_q_descs_free()
610 size = sizeof(struct emac_buffer) * tx_q->tpd.count; in emac_tx_q_descs_free()
611 memset(tx_q->tpd.tpbuff, 0, size); in emac_tx_q_descs_free()
614 memset(tx_q->tpd.v_addr, 0, tx_q->tpd.size); in emac_tx_q_descs_free()
616 tx_q->tpd.consume_idx = 0; in emac_tx_q_descs_free()
617 tx_q->tpd.produce_idx = 0; in emac_tx_q_descs_free()
662 struct emac_tx_queue *tx_q = &adpt->tx_q; in emac_tx_q_bufs_free() local
666 kfree(tx_q->tpd.tpbuff); in emac_tx_q_bufs_free()
667 tx_q->tpd.tpbuff = NULL; in emac_tx_q_bufs_free()
668 tx_q->tpd.v_addr = NULL; in emac_tx_q_bufs_free()
669 tx_q->tpd.dma_addr = 0; in emac_tx_q_bufs_free()
670 tx_q->tpd.size = 0; in emac_tx_q_bufs_free()
675 struct emac_tx_queue *tx_q) in emac_tx_q_desc_alloc() argument
681 size = sizeof(struct emac_buffer) * tx_q->tpd.count; in emac_tx_q_desc_alloc()
682 tx_q->tpd.tpbuff = kzalloc_node(size, GFP_KERNEL, node); in emac_tx_q_desc_alloc()
683 if (!tx_q->tpd.tpbuff) in emac_tx_q_desc_alloc()
686 tx_q->tpd.size = tx_q->tpd.count * (adpt->tpd_size * 4); in emac_tx_q_desc_alloc()
687 tx_q->tpd.dma_addr = ring_header->dma_addr + ring_header->used; in emac_tx_q_desc_alloc()
688 tx_q->tpd.v_addr = ring_header->v_addr + ring_header->used; in emac_tx_q_desc_alloc()
689 ring_header->used += ALIGN(tx_q->tpd.size, 8); in emac_tx_q_desc_alloc()
690 tx_q->tpd.produce_idx = 0; in emac_tx_q_desc_alloc()
691 tx_q->tpd.consume_idx = 0; in emac_tx_q_desc_alloc()
757 adpt->tx_q.tpd.count = adpt->tx_desc_cnt; in emac_mac_rx_tx_rings_alloc_all()
780 ret = emac_tx_q_desc_alloc(adpt, &adpt->tx_q); in emac_mac_rx_tx_rings_alloc_all()
831 adpt->tx_q.tpd.produce_idx = 0; in emac_mac_rx_tx_ring_reset_all()
832 adpt->tx_q.tpd.consume_idx = 0; in emac_mac_rx_tx_ring_reset_all()
833 for (i = 0; i < adpt->tx_q.tpd.count; i++) in emac_mac_rx_tx_ring_reset_all()
834 adpt->tx_q.tpd.tpbuff[i].dma_addr = 0; in emac_mac_rx_tx_ring_reset_all()
1026 struct emac_tx_queue *tx_q, struct emac_tpd *tpd) in emac_tx_tpd_create() argument
1030 tx_q->tpd.last_produce_idx = tx_q->tpd.produce_idx; in emac_tx_tpd_create()
1031 hw_tpd = EMAC_TPD(tx_q, adpt->tpd_size, tx_q->tpd.produce_idx); in emac_tx_tpd_create()
1033 if (++tx_q->tpd.produce_idx == tx_q->tpd.count) in emac_tx_tpd_create()
1034 tx_q->tpd.produce_idx = 0; in emac_tx_tpd_create()
1044 struct emac_tx_queue *tx_q) in emac_tx_tpd_mark_last() argument
1047 EMAC_TPD(tx_q, adpt->tpd_size, tx_q->tpd.last_produce_idx); in emac_tx_tpd_mark_last()
1168 static unsigned int emac_tpd_num_free_descs(struct emac_tx_queue *tx_q) in emac_tpd_num_free_descs() argument
1170 u32 produce_idx = tx_q->tpd.produce_idx; in emac_tpd_num_free_descs()
1171 u32 consume_idx = tx_q->tpd.consume_idx; in emac_tpd_num_free_descs()
1175 (tx_q->tpd.count + consume_idx - produce_idx - 1); in emac_tpd_num_free_descs()
1179 void emac_mac_tx_process(struct emac_adapter *adpt, struct emac_tx_queue *tx_q) in emac_mac_tx_process() argument
1181 u32 reg = readl_relaxed(adpt->base + tx_q->consume_reg); in emac_mac_tx_process()
1185 hw_consume_idx = (reg & tx_q->consume_mask) >> tx_q->consume_shift; in emac_mac_tx_process()
1187 while (tx_q->tpd.consume_idx != hw_consume_idx) { in emac_mac_tx_process()
1188 tpbuf = GET_TPD_BUFFER(tx_q, tx_q->tpd.consume_idx); in emac_mac_tx_process()
1203 if (++tx_q->tpd.consume_idx == tx_q->tpd.count) in emac_mac_tx_process()
1204 tx_q->tpd.consume_idx = 0; in emac_mac_tx_process()
1210 if (emac_tpd_num_free_descs(tx_q) > (MAX_SKB_FRAGS + 1)) in emac_mac_tx_process()
1235 adpt->tx_q.produce_reg = EMAC_MAILBOX_15; in emac_mac_rx_tx_ring_init_all()
1236 adpt->tx_q.produce_mask = NTPD_PROD_IDX_BMSK; in emac_mac_rx_tx_ring_init_all()
1237 adpt->tx_q.produce_shift = NTPD_PROD_IDX_SHFT; in emac_mac_rx_tx_ring_init_all()
1239 adpt->tx_q.consume_reg = EMAC_MAILBOX_2; in emac_mac_rx_tx_ring_init_all()
1240 adpt->tx_q.consume_mask = NTPD_CONS_IDX_BMSK; in emac_mac_rx_tx_ring_init_all()
1241 adpt->tx_q.consume_shift = NTPD_CONS_IDX_SHFT; in emac_mac_rx_tx_ring_init_all()
1246 struct emac_tx_queue *tx_q, in emac_tso_csum() argument
1296 emac_tx_tpd_create(adpt, tx_q, &extra_tpd); in emac_tso_csum()
1328 struct emac_tx_queue *tx_q, struct sk_buff *skb, in emac_tx_fill_tpd() argument
1332 unsigned int first = tx_q->tpd.produce_idx; in emac_tx_fill_tpd()
1344 tpbuf = GET_TPD_BUFFER(tx_q, tx_q->tpd.produce_idx); in emac_tx_fill_tpd()
1359 emac_tx_tpd_create(adpt, tx_q, tpd); in emac_tx_fill_tpd()
1364 tpbuf = GET_TPD_BUFFER(tx_q, tx_q->tpd.produce_idx); in emac_tx_fill_tpd()
1380 emac_tx_tpd_create(adpt, tx_q, tpd); in emac_tx_fill_tpd()
1387 tpbuf = GET_TPD_BUFFER(tx_q, tx_q->tpd.produce_idx); in emac_tx_fill_tpd()
1400 emac_tx_tpd_create(adpt, tx_q, tpd); in emac_tx_fill_tpd()
1406 emac_tx_tpd_mark_last(adpt, tx_q); in emac_tx_fill_tpd()
1417 tx_q->tpd.produce_idx = first; in emac_tx_fill_tpd()
1420 tpbuf = GET_TPD_BUFFER(tx_q, first); in emac_tx_fill_tpd()
1426 if (++first == tx_q->tpd.count) in emac_tx_fill_tpd()
1435 struct emac_tx_queue *tx_q, in emac_mac_tx_buf_send() argument
1444 if (emac_tso_csum(adpt, tx_q, skb, &tpd) != 0) { in emac_mac_tx_buf_send()
1461 emac_tx_fill_tpd(adpt, tx_q, skb, &tpd); in emac_mac_tx_buf_send()
1470 if (emac_tpd_num_free_descs(tx_q) < (MAX_SKB_FRAGS + 3)) in emac_mac_tx_buf_send()
1474 prod_idx = (tx_q->tpd.produce_idx << tx_q->produce_shift) & in emac_mac_tx_buf_send()
1475 tx_q->produce_mask; in emac_mac_tx_buf_send()
1476 emac_reg_update32(adpt->base + tx_q->produce_reg, in emac_mac_tx_buf_send()
1477 tx_q->produce_mask, prod_idx); in emac_mac_tx_buf_send()