Home
last modified time | relevance | path

Searched refs:txqs (Results 1 – 25 of 54) sorted by relevance

123

/drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/
A Dtx.c173 memset(tfd, 0, trans_pcie->txqs.tfd.size); in iwl_pcie_txq_build_tfd()
180 trans_pcie->txqs.tfd.max_tbs); in iwl_pcie_txq_build_tfd()
460 trans_pcie->txqs.tfd.size * in iwl_pcie_txq_free()
490 memset(trans_pcie->txqs.queue_stopped, 0, in iwl_pcie_tx_start()
492 memset(trans_pcie->txqs.queue_used, 0, in iwl_pcie_tx_start()
627 memset(trans_pcie->txqs.queue_used, 0, in iwl_pcie_tx_stop()
652 memset(trans_pcie->txqs.queue_used, 0, in iwl_pcie_tx_free()
661 trans_pcie->txqs.txq[txq_id] = NULL; in iwl_pcie_tx_free()
1932 trans_pcie->txqs.tfd.size, in iwl_fill_data_tbs_amsdu()
2128 txq = trans_pcie->txqs.txq[txq_id]; in iwl_trans_pcie_tx()
[all …]
A Dtx-gen2.c625 trans_pcie->txqs.tfd.max_tbs); in iwl_txq_gen2_set_tb()
870 dma_pool_free(trans_pcie->txqs.bc_pool, in iwl_txq_gen2_free_memory()
893 txq = trans_pcie->txqs.txq[txq_id]; in iwl_txq_gen2_free()
910 trans_pcie->txqs.txq[txq_id] = NULL; in iwl_txq_gen2_free()
923 WARN_ON(!trans_pcie->txqs.bc_tbl_size); in iwl_txq_dyn_alloc_dma()
1001 trans_pcie->txqs.txq[qid] = txq; in iwl_pcie_txq_alloc_response()
1135 memset(trans_pcie->txqs.queue_used, 0, in iwl_txq_gen2_tx_free()
1140 if (!trans_pcie->txqs.txq[i]) in iwl_txq_gen2_tx_free()
1154 if (!trans_pcie->txqs.txq[txq_id]) { in iwl_txq_gen2_init()
1160 trans_pcie->txqs.txq[txq_id] = queue; in iwl_txq_gen2_init()
[all …]
A Dtrans.c1991 if (trans_pcie->txqs.tso_hdr_page) { in iwl_trans_pcie_free()
2527 txq = trans_pcie->txqs.txq[txq_idx]; in iwl_trans_pcie_wait_txq_empty()
3584 u16 tfd_size = trans_pcie->txqs.tfd.size; in iwl_trans_pcie_dump_data()
3741 trans_pcie->txqs.tfd.addr_size = 64; in iwl_trans_pcie_alloc()
3745 trans_pcie->txqs.tfd.addr_size = 36; in iwl_trans_pcie_alloc()
3757 if (!trans_pcie->txqs.tso_hdr_page) { in iwl_trans_pcie_alloc()
3769 trans_pcie->txqs.bc_tbl_size = in iwl_trans_pcie_alloc()
3777 trans_pcie->txqs.bc_pool = in iwl_trans_pcie_alloc()
3779 trans_pcie->txqs.bc_tbl_size, in iwl_trans_pcie_alloc()
3781 if (!trans_pcie->txqs.bc_pool) { in iwl_trans_pcie_alloc()
[all …]
A Dtrans-gen2.c369 memset(trans_pcie->txqs.queue_stopped, 0, in iwl_trans_pcie_gen2_fw_alive()
370 sizeof(trans_pcie->txqs.queue_stopped)); in iwl_trans_pcie_gen2_fw_alive()
371 memset(trans_pcie->txqs.queue_used, 0, in iwl_trans_pcie_gen2_fw_alive()
372 sizeof(trans_pcie->txqs.queue_used)); in iwl_trans_pcie_gen2_fw_alive()
A Dinternal.h501 struct iwl_pcie_txqs txqs; member
586 #define IWL_TRANS_PCIE_MAX_FRAGS(trans_pcie) ((trans_pcie)->txqs.tfd.max_tbs - 3)
669 return (u8 *)txq->tfds + trans_pcie->txqs.tfd.size * idx; in iwl_txq_get_tfd()
688 if (!test_and_set_bit(txq->id, trans_pcie->txqs.queue_stopped)) { in iwl_txq_stop()
728 if (test_and_clear_bit(txq->id, trans_pcie->txqs.queue_stopped)) { in iwl_trans_pcie_wake_queue()
790 unsigned long txqs, bool freeze);
/drivers/net/ethernet/huawei/hinic/
A Dhinic_main.c134 if (nic_dev->txqs) in create_txqs()
139 if (!nic_dev->txqs) in create_txqs()
165 hinic_clean_txq(&nic_dev->txqs[i]); in create_txqs()
169 hinic_clean_txq(&nic_dev->txqs[j]); in create_txqs()
175 nic_dev->txqs = NULL; in create_txqs()
185 napi_enable(&nic_dev->txqs[i].napi); in enable_txqs_napi()
194 napi_disable(&nic_dev->txqs[i].napi); in disable_txqs_napi()
206 if (!nic_dev->txqs) in free_txqs()
211 hinic_clean_txq(&nic_dev->txqs[i]); in free_txqs()
217 nic_dev->txqs = NULL; in free_txqs()
[all …]
A Dhinic_dev.h97 struct hinic_txq *txqs; member
/drivers/net/ethernet/fungible/funeth/
A Dfuneth_main.c355 for (i = start; i < nqs && txqs[i]; i++) in free_txqs()
356 txqs[i] = funeth_txq_free(txqs[i], state); in free_txqs()
369 state, &txqs[i]); in alloc_txqs()
453 qset->txqs = fp->txqs; in fun_free_rings()
465 fp->txqs = NULL; in fun_free_rings()
481 struct funeth_txq **xdpqs = NULL, **txqs; in fun_alloc_rings() local
514 qset->txqs = txqs; in fun_alloc_rings()
796 fp->txqs = qset->txqs; in fun_up()
1630 .txqs = fp->txqs, in fun_change_num_queues()
1659 memcpy(newqs.txqs, fp->txqs, keep_tx * sizeof(*fp->txqs)); in fun_change_num_queues()
[all …]
A Dfuneth.h67 struct funeth_txq **txqs; member
88 struct funeth_txq **txqs; member
/drivers/net/wireless/ath/ath5k/
A Dmac80211-ops.c69 ath5k_tx_queue(hw, skb, &ah->txqs[qnum], control); in ath5k_tx()
749 *tx = ah->txqs[AR5K_TX_QUEUE_ID_DATA_MIN].txq_max; in ath5k_get_ringparam()
769 for (qnum = 0; qnum < ARRAY_SIZE(ah->txqs); qnum++) { in ath5k_set_ringparam()
770 if (!ah->txqs[qnum].setup) in ath5k_set_ringparam()
772 if (ah->txqs[qnum].qnum < AR5K_TX_QUEUE_ID_DATA_MIN || in ath5k_set_ringparam()
773 ah->txqs[qnum].qnum > AR5K_TX_QUEUE_ID_DATA_MAX) in ath5k_set_ringparam()
776 ah->txqs[qnum].txq_max = tx; in ath5k_set_ringparam()
777 if (ah->txqs[qnum].txq_len >= ah->txqs[qnum].txq_max) in ath5k_set_ringparam()
778 ieee80211_stop_queue(hw, ah->txqs[qnum].qnum); in ath5k_set_ringparam()
A Dbase.c1055 txq = &ah->txqs[qnum]; in ath5k_txq_setup()
1067 return &ah->txqs[qnum]; in ath5k_txq_setup()
1161 for (i = 0; i < ARRAY_SIZE(ah->txqs); i++) { in ath5k_drain_tx_buffs()
1162 if (ah->txqs[i].setup) { in ath5k_drain_tx_buffs()
1163 txq = &ah->txqs[i]; in ath5k_drain_tx_buffs()
1186 struct ath5k_txq *txq = ah->txqs; in ath5k_txq_release()
1189 for (i = 0; i < ARRAY_SIZE(ah->txqs); i++, txq++) in ath5k_txq_release()
1839 ath5k_tx_processq(ah, &ah->txqs[i]); in ath5k_tasklet_tx()
2519 for (i = 0; i < ARRAY_SIZE(ah->txqs); i++) { in ath5k_tx_complete_poll_work()
2520 if (ah->txqs[i].setup) { in ath5k_tx_complete_poll_work()
[all …]
/drivers/infiniband/hw/hfi1/
A Dipoib_tx.c697 priv->txqs = kcalloc_node(dev->num_tx_queues, in hfi1_ipoib_txreq_init()
701 if (!priv->txqs) in hfi1_ipoib_txreq_init()
705 struct hfi1_ipoib_txq *txq = &priv->txqs[i]; in hfi1_ipoib_txreq_init()
756 struct hfi1_ipoib_txq *txq = &priv->txqs[i]; in hfi1_ipoib_txreq_init()
765 kfree(priv->txqs); in hfi1_ipoib_txreq_init()
766 priv->txqs = NULL; in hfi1_ipoib_txreq_init()
799 struct hfi1_ipoib_txq *txq = &priv->txqs[i]; in hfi1_ipoib_txreq_deinit()
812 kfree(priv->txqs); in hfi1_ipoib_txreq_deinit()
813 priv->txqs = NULL; in hfi1_ipoib_txreq_deinit()
822 struct hfi1_ipoib_txq *txq = &priv->txqs[i]; in hfi1_ipoib_napi_tx_enable()
[all …]
A Dipoib.h125 struct hfi1_ipoib_txq *txqs; member
/drivers/net/ethernet/huawei/hinic3/
A Dhinic3_tx.c32 nic_dev->txqs = kcalloc(num_txqs, sizeof(*nic_dev->txqs), GFP_KERNEL); in hinic3_alloc_txqs()
33 if (!nic_dev->txqs) in hinic3_alloc_txqs()
37 txq = &nic_dev->txqs[q_id]; in hinic3_alloc_txqs()
52 kfree(nic_dev->txqs); in hinic3_free_txqs()
574 return hinic3_send_one_skb(skb, netdev, &nic_dev->txqs[q_id]); in hinic3_xmit_frame()
617 err = hinic3_stop_sq(&nic_dev->txqs[qid]); in hinic3_flush_txqs()
A Dhinic3_nic_dev.h67 struct hinic3_txq *txqs; member
/drivers/net/ethernet/intel/idpf/
A Didpf_txrx.c998 kfree(txq_grp->txqs[j]); in idpf_txq_group_rel()
999 txq_grp->txqs[j] = NULL; in idpf_txq_group_rel()
1095 kfree(vport->txqs); in idpf_vport_queues_rel()
1096 vport->txqs = NULL; in idpf_vport_queues_rel()
1115 vport->txqs = kcalloc(vport->num_txq, sizeof(*vport->txqs), in idpf_vport_init_fast_path_txqs()
1118 if (!vport->txqs) in idpf_vport_init_fast_path_txqs()
1125 vport->txqs[k] = tx_grp->txqs[j]; in idpf_vport_init_fast_path_txqs()
1126 vport->txqs[k]->idx = k; in idpf_vport_init_fast_path_txqs()
1377 tx_qgrp->txqs[j] = kzalloc(sizeof(*tx_qgrp->txqs[j]), in idpf_txq_group_alloc()
1379 if (!tx_qgrp->txqs[j]) in idpf_txq_group_alloc()
[all …]
A Didpf_virtchnl.c731 idpf_queue_set(SW_MARKER, vport->txqs[i]); in idpf_wait_for_marker_event()
739 idpf_queue_clear(POLL_MODE, vport->txqs[i]); in idpf_wait_for_marker_event()
1241 tx_qgrp->txqs[j]->tail = in __idpf_queue_reg_init()
1592 cpu_to_le32(tx_qgrp->txqs[j]->q_id); in idpf_send_config_tx_queues_msg()
1598 cpu_to_le16(tx_qgrp->txqs[j]->desc_count); in idpf_send_config_tx_queues_msg()
1600 cpu_to_le64(tx_qgrp->txqs[j]->dma); in idpf_send_config_tx_queues_msg()
1602 struct idpf_tx_queue *q = tx_qgrp->txqs[j]; in idpf_send_config_tx_queues_msg()
2021 cpu_to_le16(tx_qgrp->txqs[j]->q_vector->v_idx); in idpf_send_map_unmap_queue_vector_msg()
2023 cpu_to_le32(tx_qgrp->txqs[j]->q_vector->tx_itr_idx); in idpf_send_map_unmap_queue_vector_msg()
2138 idpf_queue_set(POLL_MODE, vport->txqs[i]); in idpf_send_disable_queues_msg()
[all …]
/drivers/net/ethernet/netronome/nfp/abm/
A Dmain.c85 unsigned int txqs; in nfp_abm_spawn_repr() local
90 txqs = 1; in nfp_abm_spawn_repr()
93 txqs = alink->vnic->max_rx_rings; in nfp_abm_spawn_repr()
96 netdev = nfp_repr_alloc_mqs(app, txqs, 1); in nfp_abm_spawn_repr()
/drivers/net/ethernet/intel/ice/
A Dice_base.c894 u8 buf_len = struct_size(qg_buf, txqs, 1); in ice_vsi_cfg_txq()
910 qg_buf->txqs[0].txq_id = cpu_to_le16(pf_q); in ice_vsi_cfg_txq()
911 ice_pack_txq_ctx(&tlan_ctx, &qg_buf->txqs[0].txq_ctx); in ice_vsi_cfg_txq()
946 txq = &qg_buf->txqs[0]; in ice_vsi_cfg_txq()
956 DEFINE_RAW_FLEX(struct ice_aqc_add_tx_qgrp, qg_buf, txqs, 1); in ice_vsi_cfg_single_txq()
978 DEFINE_RAW_FLEX(struct ice_aqc_add_tx_qgrp, qg_buf, txqs, 1); in ice_vsi_cfg_txqs()
/drivers/net/wireless/intel/iwlwifi/
A Diwl-trans.c730 int iwl_trans_wait_tx_queues_empty(struct iwl_trans *trans, u32 txqs) in iwl_trans_wait_tx_queues_empty() argument
736 return iwl_trans_pcie_wait_txqs_empty(trans, txqs); in iwl_trans_wait_tx_queues_empty()
741 unsigned long txqs, bool freeze) in iwl_trans_freeze_txq_timer() argument
747 iwl_pcie_freeze_txq_timer(trans, txqs, freeze); in iwl_trans_freeze_txq_timer()
/drivers/net/ethernet/netronome/nfp/
A Dnfp_net_repr.h102 nfp_repr_alloc_mqs(struct nfp_app *app, unsigned int txqs, unsigned int rxqs);
A Dnfp_net_repr.c424 nfp_repr_alloc_mqs(struct nfp_app *app, unsigned int txqs, unsigned int rxqs) in nfp_repr_alloc_mqs() argument
429 netdev = alloc_etherdev_mqs(sizeof(*repr), txqs, rxqs); in nfp_repr_alloc_mqs()
/drivers/net/can/dev/
A Ddev.c254 unsigned int txqs, unsigned int rxqs) in alloc_candev_mqs() argument
281 txqs, rxqs); in alloc_candev_mqs()
/drivers/net/wan/
A Dfarsync.c437 int txqs; /* index to get next buffer to tx */ member
1305 txq_length = port->txqe - port->txqs; in do_bottom_half_tx()
1317 skb = port->txq[port->txqs]; in do_bottom_half_tx()
1318 port->txqs++; in do_bottom_half_tx()
1319 if (port->txqs == FST_TXQ_DEPTH) in do_bottom_half_tx()
1320 port->txqs = 0; in do_bottom_half_tx()
2098 port->txqs = 0; in fst_openport()
2233 txq_length = port->txqe - port->txqs; in fst_start_xmit()
/drivers/net/ethernet/marvell/
A Dmvneta.c511 struct mvneta_tx_queue *txqs; member
1260 struct mvneta_tx_queue *txq = &pp->txqs[queue]; in mvneta_port_up()
1864 return &pp->txqs[queue]; in mvneta_tx_done_policy()
2188 txq = &pp->txqs[cpu % txq_number]; in mvneta_xdp_xmit_back()
2229 txq = &pp->txqs[cpu % txq_number]; in mvneta_xdp_xmit()
2918 struct mvneta_tx_queue *txq = &pp->txqs[txq_id]; in mvneta_tx()
3429 mvneta_txq_done_force(pp, &pp->txqs[queue]); in mvneta_tx_reset()
3650 mvneta_txq_deinit(pp, &pp->txqs[queue]); in mvneta_cleanup_txqs()
4738 struct mvneta_tx_queue *txq = &pp->txqs[queue]; in mvneta_ethtool_set_coalesce()
5375 pp->txqs = devm_kcalloc(dev, txq_number, sizeof(*pp->txqs), GFP_KERNEL); in mvneta_init()
[all …]

Completed in 96 milliseconds

123