| /drivers/dma/xilinx/ |
| A D | xdma.c | 100 u32 desc_num; member 283 sw_desc->desc_num = desc_num; in xdma_alloc_desc() 608 u32 desc_num = 0, i; in xdma_prep_device_sg() local 632 desc_num = 0; in xdma_prep_device_sg() 635 desc_num += xdma_fill_descs(sw_desc, *src, *dst, sg_dma_len(sg), desc_num); in xdma_prep_device_sg() 672 u32 desc_num; in xdma_prep_dma_cyclic() local 709 desc_num = 0; in xdma_prep_dma_cyclic() 711 desc_num += xdma_fill_descs(sw_desc, *src, *dst, period_size, desc_num); in xdma_prep_dma_cyclic() 739 u32 desc_num = 0, period_size = 0; in xdma_prep_interleaved_dma() local 757 desc_num = 0; in xdma_prep_interleaved_dma() [all …]
|
| /drivers/net/ethernet/hisilicon/hns3/hns3_common/ |
| A D | hclge_comm_cmd.c | 20 reg_val |= ring->desc_num >> HCLGE_COMM_NIC_CMQ_DESC_NUM_S; in hclge_comm_cmd_config_regs() 29 reg_val = ring->desc_num >> HCLGE_COMM_NIC_CMQ_DESC_NUM_S; in hclge_comm_cmd_config_regs() 106 int size = ring->desc_num * sizeof(struct hclge_desc); in hclge_comm_free_cmd_desc() 118 int size = ring->desc_num * sizeof(struct hclge_desc); in hclge_comm_alloc_cmd_desc() 290 int used = (ntu - ntc + ring->desc_num) % ring->desc_num; in hclge_comm_ring_space() 292 return ring->desc_num - used - 1; in hclge_comm_ring_space() 305 if (hw->cmq.csq.next_to_use >= hw->cmq.csq.desc_num) in hclge_comm_cmd_copy_desc() 343 clean = (head - csq->next_to_clean + csq->desc_num) % csq->desc_num; in hclge_comm_cmd_csq_clean() 422 if (ntc >= hw->cmq.csq.desc_num) in hclge_comm_cmd_check_retval() 574 cmdq->csq.desc_num = HCLGE_COMM_NIC_CMQ_DESC_NUM; in hclge_comm_cmd_queue_init() [all …]
|
| /drivers/net/wireless/ath/wcn36xx/ |
| A D | dxe.c | 62 for (i = 0; i < ch->desc_num && ctl; i++) { in wcn36xx_dxe_free_ctl_block() 76 for (i = 0; i < ch->desc_num; i++) { in wcn36xx_dxe_allocate_ctl_block() 85 } else if (ch->desc_num - 1 == i) { in wcn36xx_dxe_allocate_ctl_block() 110 wcn->dxe_tx_l_ch.desc_num = WCN36XX_DXE_CH_DESC_NUMB_TX_L; in wcn36xx_dxe_alloc_ctl_blks() 111 wcn->dxe_tx_h_ch.desc_num = WCN36XX_DXE_CH_DESC_NUMB_TX_H; in wcn36xx_dxe_alloc_ctl_blks() 186 for (i = 0; i < wcn_ch->desc_num; i++) { in wcn36xx_dxe_init_descs() 211 } else if ((0 < i) && (i < wcn_ch->desc_num - 1)) { in wcn36xx_dxe_init_descs() 214 } else if (i == (wcn_ch->desc_num - 1)) { in wcn36xx_dxe_init_descs() 244 for (i = 0; i < ch->desc_num; i++) { in wcn36xx_dxe_init_tx_bd() 324 for (i = 0; i < wcn_ch->desc_num; i++) { in wcn36xx_dxe_ch_alloc_skb() [all …]
|
| /drivers/net/ethernet/hisilicon/hns/ |
| A D | hnae.h | 279 u16 desc_num; /* total number of desc */ member 301 ((ring)->p = ((ring)->p + 1) % (ring)->desc_num) 303 ((ring)->p = ((ring)->p - 1 + (ring)->desc_num) % (ring)->desc_num) 311 assert((idx) >= 0 && (idx) < (ring)->desc_num) 321 return (end - begin + ring->desc_num) % ring->desc_num; in ring_dist() 326 return ring->desc_num - in ring_space() 666 for (j = 0; j < ring->desc_num; j++) in hnae_reinit_all_ring_desc() 681 for (j = 0; j < ring->desc_num; j++) { in hnae_reinit_all_ring_page_off()
|
| A D | hnae.c | 125 for (i = 0; i < ring->desc_num; i++) in hnae_free_buffers() 134 for (i = 0; i < ring->desc_num; i++) { in hnae_alloc_buffers() 152 ring->desc_num * sizeof(ring->desc[0]), in hnae_free_desc() 162 int size = ring->desc_num * sizeof(ring->desc[0]); in hnae_alloc_desc() 199 if (ring->desc_num <= 0 || ring->buf_size <= 0) in hnae_init_ring() 211 ring->desc_cb = kcalloc(ring->desc_num, sizeof(ring->desc_cb[0]), in hnae_init_ring()
|
| A D | hns_dsaf_rcb.c | 379 hns_rcb_set_port_desc_cnt(rcb_common, i, rcb_common->desc_num); in hns_rcb_common_init_hw() 437 u16 desc_num, mdnum_ppkt; in hns_rcb_ring_get_cfg() local 457 desc_num = rcb_common->dsaf_dev->desc_num; in hns_rcb_ring_get_cfg() 466 ring->desc_num = desc_num; in hns_rcb_ring_get_cfg() 671 if (coalesced_frames >= rcb_common->desc_num || in hns_rcb_set_rx_coalesced_frames() 790 rcb_common->desc_num = dsaf_dev->desc_num; in hns_rcb_common_get_cfg()
|
| A D | hns_ae_adapt.c | 605 (dsaf_dev->desc_num - 1 > HNS_RCB_TX_FRAMES_HIGH) ? in hns_ae_get_coalesce_range() 606 HNS_RCB_TX_FRAMES_HIGH : dsaf_dev->desc_num - 1; in hns_ae_get_coalesce_range() 610 *rx_frames_high = (dsaf_dev->desc_num - 1 > HNS_RCB_RX_FRAMES_HIGH) ? in hns_ae_get_coalesce_range() 611 HNS_RCB_RX_FRAMES_HIGH : dsaf_dev->desc_num - 1; in hns_ae_get_coalesce_range()
|
| A D | hns_dsaf_rcb.h | 109 u32 desc_num; /* desc num per queue*/ member
|
| A D | hns_dsaf_main.c | 44 u32 desc_num; in hns_dsaf_get_cfg() local 164 ret = device_property_read_u32(dsaf_dev->dev, "desc-num", &desc_num); in hns_dsaf_get_cfg() 165 if (ret < 0 || desc_num < HNS_DSAF_MIN_DESC_CNT || in hns_dsaf_get_cfg() 166 desc_num > HNS_DSAF_MAX_DESC_CNT) { in hns_dsaf_get_cfg() 168 desc_num, ret); in hns_dsaf_get_cfg() 171 dsaf_dev->desc_num = desc_num; in hns_dsaf_get_cfg()
|
| /drivers/usb/dwc2/ |
| A D | hcd.h | 686 struct dwc2_hcd_urb *dwc2_urb, int desc_num, u32 offset, in dwc2_hcd_urb_set_iso_desc_params() argument 689 dwc2_urb->iso_descs[desc_num].offset = offset; in dwc2_hcd_urb_set_iso_desc_params() 690 dwc2_urb->iso_descs[desc_num].length = length; in dwc2_hcd_urb_set_iso_desc_params() 694 struct dwc2_hcd_urb *dwc2_urb, int desc_num) in dwc2_hcd_urb_get_iso_desc_status() argument 696 return dwc2_urb->iso_descs[desc_num].status; in dwc2_hcd_urb_get_iso_desc_status() 700 struct dwc2_hcd_urb *dwc2_urb, int desc_num) in dwc2_hcd_urb_get_iso_desc_actual_length() argument 702 return dwc2_urb->iso_descs[desc_num].actual_length; in dwc2_hcd_urb_get_iso_desc_actual_length()
|
| A D | hcd_ddma.c | 1129 int desc_num, in dwc2_process_non_isoc_desc() argument 1145 qh->desc_list_dma + (desc_num * in dwc2_process_non_isoc_desc() 1150 dma_desc = &qh->desc_list[desc_num]; in dwc2_process_non_isoc_desc() 1151 n_bytes = qh->n_bytes[desc_num]; in dwc2_process_non_isoc_desc() 1154 qtd, urb, desc_num, dma_desc, n_bytes); in dwc2_process_non_isoc_desc() 1181 } else if (desc_num + 1 == qtd->n_desc) { in dwc2_process_non_isoc_desc() 1207 int desc_num = 0; in dwc2_complete_non_isoc_xfer_ddma() local 1225 desc_num, halt_status, in dwc2_complete_non_isoc_xfer_ddma() 1231 desc_num++; in dwc2_complete_non_isoc_xfer_ddma()
|
| /drivers/dma/ti/ |
| A D | cppi41.c | 258 u32 desc_num; in desc_to_chan() local 268 BUG_ON(desc_num >= ALLOC_DECS_NUM); in desc_to_chan() 269 c = cdd->chan_busy[desc_num]; in desc_to_chan() 270 cdd->chan_busy[desc_num] = NULL; in desc_to_chan() 434 u32 desc_num; in push_desc_queue() local 467 WARN_ON(cdd->chan_busy[desc_num]); in push_desc_queue() 468 cdd->chan_busy[desc_num] = c; in push_desc_queue() 740 u32 desc_num; in cppi41_stop_chan() local 746 if (!cdd->chan_busy[desc_num]) { in cppi41_stop_chan() 767 WARN_ON(!cdd->chan_busy[desc_num]); in cppi41_stop_chan() [all …]
|
| /drivers/net/ethernet/ti/ |
| A D | davinci_cpdma.c | 116 u32 desc_num; member 680 int rx, int desc_num, in cpdma_chan_set_descs() argument 684 int desc_cnt = desc_num; in cpdma_chan_set_descs() 688 if (!desc_num) in cpdma_chan_set_descs() 705 chan->desc_num = (chan->weight * desc_num) / 100; in cpdma_chan_set_descs() 707 chan->desc_num = per_ch_desc; in cpdma_chan_set_descs() 709 desc_cnt -= chan->desc_num; in cpdma_chan_set_descs() 712 most_dnum = chan->desc_num; in cpdma_chan_set_descs() 938 int desc_num; in cpdma_chan_get_rx_buf_num() local 941 desc_num = chan->desc_num; in cpdma_chan_get_rx_buf_num() [all …]
|
| /drivers/dma/ |
| A D | loongson2-apb-dma.c | 107 u32 desc_num; member 110 struct ls2x_dma_sg sg[] __counted_by(desc_num); 175 for (i = 0; i < desc->desc_num; i++) { in ls2x_dma_desc_free() 342 desc->desc_num = sg_len; in ls2x_dma_prep_slave_sg() 352 desc->desc_num = i; in ls2x_dma_prep_slave_sg() 407 desc->desc_num = num_periods; in ls2x_dma_prep_dma_cyclic() 418 desc->desc_num = i; in ls2x_dma_prep_dma_cyclic()
|
| A D | mmp_tdma.c | 109 int desc_num; member 359 int size = tdmac->desc_num * sizeof(struct mmp_tdma_desc); in mmp_tdma_free_descriptor() 411 int size = tdmac->desc_num * sizeof(struct mmp_tdma_desc); in mmp_tdma_alloc_descriptor() 450 tdmac->desc_num = num_periods; in mmp_tdma_prep_dma_cyclic()
|
| A D | k3dma.c | 72 size_t desc_num; member 409 for (; index < ds->desc_num; index++) { in k3_dma_tx_status() 449 if (num != ds->desc_num - 1) in k3_dma_fill_desc() 484 ds->desc_num = num; in k3_dma_alloc_desc_resource()
|
| /drivers/dma/lgm/ |
| A D | lgm-dma.c | 217 u32 desc_num; member 636 int desc_num) in ldma_chan_desc_hw_cfg() argument 652 writel(desc_num, d->base + DMA_CDLEN); in ldma_chan_desc_hw_cfg() 666 if (!desc_num) { in ldma_chan_desc_cfg() 672 if (desc_num > DMA_MAX_DESC_NUM) { in ldma_chan_desc_cfg() 674 c->nr, desc_num); in ldma_chan_desc_cfg() 681 c->desc_cnt = desc_num; in ldma_chan_desc_cfg() 980 if (num > c->desc_num) { in dma_alloc_desc_resource() 1282 return c->desc_num; in ldma_alloc_chan_resources() 1293 return c->desc_num; in ldma_alloc_chan_resources() [all …]
|
| /drivers/spi/ |
| A D | spi-amlogic-spisg.c | 492 int desc_num = 1, descs_len; in aml_spisg_transfer_one_message() local 504 desc_num++; in aml_spisg_transfer_one_message() 507 descs = kcalloc(desc_num, sizeof(*desc) + sizeof(*exdesc), in aml_spisg_transfer_one_message() 514 descs_len = sizeof(*desc) * desc_num; in aml_spisg_transfer_one_message() 515 exdescs = (struct spisg_descriptor_extra *)(descs + desc_num); in aml_spisg_transfer_one_message()
|
| /drivers/dma/sh/ |
| A D | shdma-base.c | 233 schan->desc_num = NR_DESCS_PER_CHANNEL; in shdma_alloc_chan_resources() 479 schan->desc_num = 0; in shdma_free_chan_resources() 649 BUG_ON(!schan->desc_num); in shdma_prep_memcpy() 674 BUG_ON(!schan->desc_num); in shdma_prep_slave_sg() 709 BUG_ON(!schan->desc_num); in shdma_prep_dma_cyclic()
|
| /drivers/net/ |
| A D | virtio_net.c | 4551 u32 desc_num[3]; member 4638 ctx->desc_num[queue_type] += ARRAY_SIZE(virtnet_stats_cvq_desc); in virtnet_stats_ctx_init() 4672 ctx->desc_num[queue_type] += ARRAY_SIZE(virtnet_stats_tx_gso_desc); in virtnet_stats_ctx_init() 4710 num_cq = ctx->desc_num[VIRTNET_Q_TYPE_CQ]; in virtnet_fill_total_fields() 4711 num_rx = ctx->desc_num[VIRTNET_Q_TYPE_RX]; in virtnet_fill_total_fields() 4712 num_tx = ctx->desc_num[VIRTNET_Q_TYPE_TX]; in virtnet_fill_total_fields() 4847 num_cq = ctx->desc_num[VIRTNET_Q_TYPE_CQ]; in virtnet_fill_stats() 4848 num_rx = ctx->desc_num[VIRTNET_Q_TYPE_RX]; in virtnet_fill_stats() 4849 num_tx = ctx->desc_num[VIRTNET_Q_TYPE_TX]; in virtnet_fill_stats() 5104 pair_count = ctx.desc_num[VIRTNET_Q_TYPE_RX] + ctx.desc_num[VIRTNET_Q_TYPE_TX]; in virtnet_get_sset_count() [all …]
|
| /drivers/net/ethernet/hisilicon/hns3/ |
| A D | hns3_enet.c | 1986 for (i = 0; i < ring->desc_num; i++) { in hns3_clear_desc() 2069 ring->desc_num; in hns3_tx_push_bd() 2342 (ring->desc_num - 1); in hns3_nic_net_xmit() 3474 for (i = 0; i < ring->desc_num; i++) in hns3_free_buffers() 3618 if (++ntc == ring->desc_num) in hns3_nic_reclaim_desc() 3683 return ring->desc_num; in hns3_desc_unused() 4279 (ring->desc_num - 1); in hns3_handle_bdinfo() 4910 int desc_num; in hns3_ring_get_cfg() local 4933 ring->desc_num = desc_num; in hns3_ring_get_cfg() 5101 ring->desc_num / 8 - 1); in hns3_init_ring_hw() [all …]
|
| A D | hnae3.h | 192 ((ring)->p = ((ring)->p + 1) % (ring)->desc_num) 194 ((ring)->p = ((ring)->p - 1 + (ring)->desc_num) % (ring)->desc_num)
|
| A D | hclge_mbx.h | 248 (crq->next_to_use = (crq->next_to_use + 1) % crq->desc_num)
|
| A D | hns3_enet.h | 482 u16 desc_num; /* total number of desc */ member 634 return ((end >= begin) ? (ring->desc_num - end + begin) : in ring_space()
|
| A D | hns3_ethtool.c | 684 param->tx_pending = priv->ring[0].desc_num; in hns3_get_ringparam() 685 param->rx_pending = priv->ring[rx_queue_index].desc_num; in hns3_get_ringparam() 1087 priv->ring[i].desc_num = tx_desc_num; in hns3_change_all_ring_bd_num() 1088 priv->ring[i + h->kinfo.num_tqps].desc_num = rx_desc_num; in hns3_change_all_ring_bd_num() 1163 old_ringparam->tx_desc_num = priv->ring[0].desc_num; in hns3_is_ringparam_changed() 1164 old_ringparam->rx_desc_num = priv->ring[queue_num].desc_num; in hns3_is_ringparam_changed()
|