| /linux/drivers/net/ethernet/fungible/funeth/ |
| A D | funeth_tx.c | 349 unsigned int ndesc; in fun_start_xmit() local 357 ndesc = write_pkt_desc(skb, q, tls_len); in fun_start_xmit() 358 if (unlikely(!ndesc)) { in fun_start_xmit() 363 q->prod_cnt += ndesc; in fun_start_xmit() 448 ndesc += pkt_desc; in fun_txq_reclaim() 454 q->cons_cnt += ndesc; in fun_txq_reclaim() 505 ndesc += pkt_desc; in fun_xdpq_clean() 510 q->cons_cnt += ndesc; in fun_xdpq_clean() 566 q->prod_cnt += ndesc; in fun_xdp_tx() 625 unsigned int ndesc, in fun_txq_create_sw() argument [all …]
|
| /linux/drivers/net/ethernet/altera/ |
| A D | altera_sgdma.c | 13 struct sgdma_descrip __iomem *ndesc, 173 struct sgdma_descrip __iomem *ndesc = &descbase[1]; in sgdma_tx_buffer() local 180 ndesc, /* next descriptor */ in sgdma_tx_buffer() 181 sgdma_txphysaddr(priv, ndesc), in sgdma_tx_buffer() 294 struct sgdma_descrip __iomem *ndesc, in sgdma_setup_descrip() argument 305 u32 ctrl = csrrd8(ndesc, sgdma_descroffs(control)); in sgdma_setup_descrip() 307 csrwr8(ctrl, ndesc, sgdma_descroffs(control)); in sgdma_setup_descrip() 342 struct sgdma_descrip __iomem *ndesc = &descbase[1]; in sgdma_async_read() local 353 ndesc, /* next descriptor */ in sgdma_async_read() 354 sgdma_rxphysaddr(priv, ndesc), in sgdma_async_read()
|
| /linux/drivers/net/ethernet/socionext/ |
| A D | sni_ave.c | 690 ndesc = priv->tx.ndesc; in ave_tx_complete() 723 done_idx = (done_idx + 1) % ndesc; in ave_tx_complete() 750 u32 ndesc, cmdsts; in ave_rx_receive() local 754 ndesc = priv->rx.ndesc; in ave_rx_receive() 755 restpkt = ((proc_idx + ndesc - 1) - done_idx) % ndesc; in ave_rx_receive() 794 proc_idx = (proc_idx + 1) % ndesc; in ave_rx_receive() 809 done_idx = (done_idx + 1) % ndesc; in ave_rx_receive() 1409 ndesc = priv->tx.ndesc; in ave_start_xmit() 1410 freepkt = ((done_idx + ndesc - 1) - proc_idx) % ndesc; in ave_start_xmit() 1636 priv->tx.ndesc = AVE_NR_TXDESC; in ave_probe() [all …]
|
| /linux/drivers/net/wireless/mediatek/mt76/ |
| A D | dma.c | 195 Q_WRITE(q, ring_size, q->ndesc); in mt76_dma_sync_idx() 203 if (!q || !q->ndesc) in __mt76_dma_queue_reset() 210 for (i = 0; i < q->ndesc; i++) in __mt76_dma_queue_reset() 303 next = (q->head + 1) % q->ndesc; in mt76_dma_add_buf() 387 if (!q || !q->ndesc) in mt76_dma_tx_cleanup() 640 if (!q->ndesc) in mt76_dma_rx_fill() 693 q->ndesc = n_desc; in mt76_dma_alloc_queue() 709 for (i = 0; i < q->ndesc; i++) { in mt76_dma_alloc_queue() 747 if (!q->ndesc) in mt76_dma_rx_cleanup() 776 if (!q->ndesc) in mt76_dma_rx_reset() [all …]
|
| A D | usb.c | 648 for (i = 0; i < q->ndesc; i++) { in mt76u_submit_rx_buffers() 677 q->ndesc = MT_NUM_RX_ENTRIES; in mt76u_alloc_rx_queue() 680 for (i = 0; i < q->ndesc; i++) { in mt76u_alloc_rx_queue() 700 for (i = 0; i < q->ndesc; i++) { in mt76u_free_rx_queue() 731 for (j = 0; j < q->ndesc; j++) in mt76u_stop_rx() 745 for (j = 0; j < q->ndesc; j++) in mt76u_resume_rx() 864 if (q->queued == q->ndesc) in mt76u_tx_queue_skb() 962 q->ndesc = MT_NUM_TX_ENTRIES; in mt76u_alloc_tx() 963 for (j = 0; j < q->ndesc; j++) { in mt76u_alloc_tx() 987 for (j = 0; j < q->ndesc; j++) { in mt76u_free_tx() [all …]
|
| A D | sdio.c | 315 q->ndesc = MT76S_NUM_RX_ENTRIES; in mt76s_alloc_rx_queue() 338 q->ndesc = MT76S_NUM_TX_ENTRIES; in mt76s_alloc_tx_queue() 374 q->tail = (q->tail + 1) % q->ndesc; in mt76s_get_next_rx_entry() 529 if (q->queued == q->ndesc) in mt76s_tx_queue_skb() 543 q->head = (q->head + 1) % q->ndesc; in mt76s_tx_queue_skb() 562 if (q->queued == q->ndesc) { in mt76s_tx_queue_skb_raw() 574 q->head = (q->head + 1) % q->ndesc; in mt76s_tx_queue_skb_raw() 622 for (j = 0; j < q->ndesc; j++) { in mt76s_deinit()
|
| A D | wed.c | 102 if (!q || !q->ndesc) in mt76_wed_dma_setup() 142 q->head = q->ndesc - 1; in mt76_wed_dma_setup() 149 q->head = q->ndesc - 1; in mt76_wed_dma_setup()
|
| A D | sdio_txrx.c | 119 int index = (q->head + i) % q->ndesc; in mt76s_rx_run_queue() 133 if (q->queued + i + 1 == q->ndesc) in mt76s_rx_run_queue() 142 q->head = (q->head + i) % q->ndesc; in mt76s_rx_run_queue() 292 q->first = (q->first + 1) % q->ndesc; in mt76s_tx_run_queue()
|
| A D | debugfs.c | 83 queued = mt76_is_usb(dev) ? q->ndesc - q->queued : q->queued; in mt76_rx_queues_read()
|
| /linux/drivers/net/ethernet/chelsio/inline_crypto/ch_ktls/ |
| A D | chcr_ktls.c | 852 ndesc = DIV_ROUND_UP(len, 64); in chcr_ktls_xmit_tcb_cpls() 922 ndesc = DIV_ROUND_UP(len, 64); in chcr_ktls_xmit_tcb_cpls() 995 unsigned int ndesc; in chcr_ktls_write_tcp_options() local 1016 ndesc = DIV_ROUND_UP(len16, 4); in chcr_ktls_write_tcp_options() 1079 chcr_txq_advance(&q->q, ndesc); in chcr_ktls_write_tcp_options() 1246 chcr_txq_advance(&q->q, ndesc); in chcr_ktls_xmit_wr_complete() 1280 unsigned int flits = 0, ndesc; in chcr_ktls_xmit_wr_short() local 1436 chcr_txq_advance(&q->q, ndesc); in chcr_ktls_xmit_wr_short() 1465 unsigned int flits = 0, ndesc; in chcr_ktls_tx_plaintxt() local 1484 ndesc = DIV_ROUND_UP(flits, 8); in chcr_ktls_tx_plaintxt() [all …]
|
| /linux/drivers/net/ethernet/chelsio/inline_crypto/ch_ipsec/ |
| A D | chcr_ipsec.c | 576 u16 ndesc; in ch_ipsec_crypto_wreq() local 585 ndesc = DIV_ROUND_UP(flits, 2); in ch_ipsec_crypto_wreq() 601 wr_mid = FW_CRYPTO_LOOKASIDE_WR_LEN16_V(ndesc); in ch_ipsec_crypto_wreq() 614 wr->req.ulptx.len = htonl(ndesc - 1); in ch_ipsec_crypto_wreq() 711 unsigned int last_desc, ndesc, flits = 0; in ch_ipsec_xmit() local 741 ndesc = flits_to_desc(flits); in ch_ipsec_xmit() 742 credits = txq_avail(&q->q) - ndesc; in ch_ipsec_xmit() 748 dev->name, qidx, credits, ndesc, txq_avail(&q->q), in ch_ipsec_xmit() 753 last_desc = q->q.pidx + ndesc - 1; in ch_ipsec_xmit() 791 txq_advance(&q->q, ndesc); in ch_ipsec_xmit() [all …]
|
| /linux/drivers/net/ethernet/ti/ |
| A D | netcp_core.c | 113 *ndesc = le32_to_cpu(desc->next_desc); in get_pkt_info() 155 desc->next_desc = cpu_to_le32(ndesc); in set_pkt_info() 580 struct knav_dma_desc *ndesc; in netcp_free_rx_desc_chain() local 590 if (unlikely(!ndesc)) { in netcp_free_rx_desc_chain() 643 struct knav_dma_desc *desc, *ndesc; in netcp_process_one_rx_packet() local 694 if (unlikely(!ndesc)) { in netcp_process_one_rx_packet() 980 while (ndesc) { in netcp_free_tx_desc_chain() 991 ndesc = NULL; in netcp_free_tx_desc_chain() 995 if (!ndesc) in netcp_free_tx_desc_chain() 1131 if (IS_ERR_OR_NULL(ndesc)) { in netcp_tx_map_skb() [all …]
|
| /linux/drivers/usb/mon/ |
| A D | mon_bin.c | 451 struct urb *urb, unsigned int ndesc) in mon_bin_collate_isodesc() argument 458 while (ndesc-- != 0) { in mon_bin_collate_isodesc() 475 while (ndesc-- != 0) { in mon_bin_get_isodesc() 498 unsigned int ndesc, lendesc; in mon_bin_event() local 516 ndesc = 0; in mon_bin_event() 518 ndesc = ISODESC_MAX; in mon_bin_event() 520 ndesc = urb->number_of_packets; in mon_bin_event() 525 ndesc = 0; in mon_bin_event() 527 lendesc = ndesc*sizeof(struct mon_bin_isodesc); in mon_bin_event() 598 if (ndesc != 0) { in mon_bin_event() [all …]
|
| A D | mon_text.c | 200 int i, ndesc; in mon_text_event() local 233 if ((ndesc = urb->number_of_packets) > ISODESC_MAX) in mon_text_event() 234 ndesc = ISODESC_MAX; in mon_text_event() 237 for (i = 0; i < ndesc; i++) { in mon_text_event() 591 int ndesc; /* Display this many */ in mon_text_read_isodesc() local 597 ndesc = ep->numdesc; in mon_text_read_isodesc() 598 if (ndesc > ISODESC_MAX) in mon_text_read_isodesc() 599 ndesc = ISODESC_MAX; in mon_text_read_isodesc() 600 if (ndesc < 0) in mon_text_read_isodesc() 601 ndesc = 0; in mon_text_read_isodesc() [all …]
|
| /linux/drivers/net/ethernet/chelsio/cxgb4/ |
| A D | sge.c | 1719 txq_advance(&q->q, ndesc); in cxgb4_eth_xmit() 2122 while (ndesc--) { in cxgb4_eosw_txq_free_desc() 2263 u8 flits, ndesc; in ethofld_hard_xmit() local 2500 pidx += eosw_txq->ndesc; in eosw_txq_flush_pending_skbs() 2659 u32 ctrl0, ndesc, flits; in cxgb4_selftest_lb_pkt() local 2711 txq_advance(&q->q, ndesc); in cxgb4_selftest_lb_pkt() 2737 unsigned int ndesc; in ctrl_xmit() local 2759 txq_advance(&q->q, ndesc); in ctrl_xmit() 2790 written += ndesc; in restart_ctrlq() 3023 written += ndesc; in service_ofldq() [all …]
|
| /linux/drivers/net/ethernet/chelsio/cxgb3/ |
| A D | sge.c | 1105 if (likely(ndesc == 1)) { in write_wr_hdr_sgl() 1128 ndesc--; in write_wr_hdr_sgl() 1158 WARN_ON(ndesc != 0); in write_wr_hdr_sgl() 1306 q->in_use += ndesc; in t3_eth_xmit() 1318 q->unacked += ndesc; in t3_eth_xmit() 1322 q->pidx += ndesc; in t3_eth_xmit() 1720 q->in_use += ndesc; in ofld_xmit() 1722 q->pidx += ndesc; in ofld_xmit() 1773 q->in_use += ndesc; in restart_offloadq() 1775 q->pidx += ndesc; in restart_offloadq() [all …]
|
| /linux/drivers/net/ethernet/mediatek/ |
| A D | airoha_eth.c | 743 int ndesc; member 1571 q->ndesc = ndesc; in airoha_qdma_init_rx_queue() 1678 if (!q->ndesc) in airoha_qdma_tx_napi_poll() 1747 q->ndesc = size; in airoha_qdma_init_tx_queue() 1949 if (!qdma->q_tx[i].ndesc) in airoha_qdma_hw_init() 1975 if (!qdma->q_rx[i].ndesc) in airoha_qdma_hw_init() 2008 if (!qdma->q_rx[i].ndesc) in airoha_irq_handler() 2123 if (!qdma->q_rx[i].ndesc) in airoha_hw_cleanup() 2139 if (!qdma->q_tx[i].ndesc) in airoha_hw_cleanup() 2154 if (!qdma->q_rx[i].ndesc) in airoha_qdma_start_napi() [all …]
|
| /linux/drivers/dma/ |
| A D | idma64.c | 195 static struct idma64_desc *idma64_alloc_desc(unsigned int ndesc) in idma64_alloc_desc() argument 203 desc->hw = kcalloc(ndesc, sizeof(*desc->hw), GFP_NOWAIT); in idma64_alloc_desc() 217 if (desc->ndesc) { in idma64_desc_free() 218 unsigned int i = desc->ndesc; in idma64_desc_free() 280 unsigned int i = desc->ndesc; in idma64_desc_fill() 320 desc->ndesc = i; in idma64_prep_slave_sg() 329 desc->ndesc = sg_len; in idma64_prep_slave_sg() 362 } while (++i < desc->ndesc); in idma64_active_desc_size()
|
| A D | idma64.h | 117 unsigned int ndesc; member
|
| /linux/drivers/net/wireless/ath/ath9k/ |
| A D | init.c | 285 int nbuf, int ndesc, bool is_tx) in ath_descdma_setup() argument 292 name, nbuf, ndesc); in ath_descdma_setup() 308 dd->dd_desc_len = desc_len * nbuf * ndesc; in ath_descdma_setup() 348 for (i = 0; i < nbuf; i++, bf++, ds += (desc_len * ndesc)) { in ath_descdma_setup() 364 ds += (desc_len * ndesc); in ath_descdma_setup() 379 for (i = 0; i < nbuf; i++, bf++, ds += (desc_len * ndesc)) { in ath_descdma_setup() 395 ds += (desc_len * ndesc); in ath_descdma_setup()
|
| /linux/drivers/infiniband/hw/qib/ |
| A D | qib_verbs.c | 143 u32 ndesc = 1; /* count the header */ in qib_count_sge() local 150 ndesc = 0; in qib_count_sge() 153 ndesc++; in qib_count_sge() 173 return ndesc; in qib_count_sge() 782 u32 ndesc; in qib_verbs_send_dma() local 817 ndesc = qib_count_sge(ss, len); in qib_verbs_send_dma() 818 if (ndesc >= ppd->sdma_descq_cnt) in qib_verbs_send_dma() 819 ndesc = 0; in qib_verbs_send_dma() 821 ndesc = 1; in qib_verbs_send_dma() 822 if (ndesc) { in qib_verbs_send_dma() [all …]
|
| A D | qib_user_sdma.c | 803 int *maxpkts, int *ndesc) in qib_user_sdma_queue_pkts() argument 1023 *ndesc += pkt->naddr; in qib_user_sdma_queue_pkts() 1409 int ndesc = 0; in qib_user_sdma_writev() local 1412 iov, dim, &list, &mxp, &ndesc); in qib_user_sdma_writev() 1425 if (qib_sdma_descq_freecnt(ppd) < ndesc) { in qib_user_sdma_writev()
|
| /linux/drivers/infiniband/ulp/srp/ |
| A D | ib_srp.h | 347 unsigned int ndesc; member
|
| /linux/drivers/net/ethernet/chelsio/cxgb4vf/ |
| A D | sge.c | 1162 unsigned int flits, ndesc; in t4vf_eth_xmit() local 1213 ndesc = flits_to_desc(flits); in t4vf_eth_xmit() 1214 credits = txq_avail(&txq->q) - ndesc; in t4vf_eth_xmit() 1364 ndesc, credits, txq->q.pidx, skb->len, ssi->nr_frags); in t4vf_eth_xmit() 1435 last_desc = tq->pidx + ndesc - 1; in t4vf_eth_xmit() 1446 txq_advance(&txq->q, ndesc); in t4vf_eth_xmit() 1448 ring_tx_db(adapter, &txq->q, ndesc); in t4vf_eth_xmit()
|
| /linux/drivers/net/ethernet/atheros/ |
| A D | ag71xx.c | 1447 int i, ring_mask, ndesc, split; in ag71xx_fill_dma_desc() local 1451 ndesc = 0; in ag71xx_fill_dma_desc() 1460 i = (ring->curr + ndesc) & ring_mask; in ag71xx_fill_dma_desc() 1484 if (!ndesc) in ag71xx_fill_dma_desc() 1488 ndesc++; in ag71xx_fill_dma_desc() 1491 return ndesc; in ag71xx_fill_dma_desc()
|