/linux-6.3-rc2/drivers/net/wireless/intel/iwlwifi/ |
A D | iwl-trans.c | 46 trans->txqs.tfd.addr_size = 64; in iwl_trans_alloc() 50 trans->txqs.tfd.addr_size = 36; in iwl_trans_alloc() 51 trans->txqs.tfd.max_tbs = IWL_NUM_OF_TBS; in iwl_trans_alloc() 82 trans->txqs.bc_tbl_size = in iwl_trans_init() 85 trans->txqs.bc_tbl_size = in iwl_trans_init() 96 trans->txqs.bc_tbl_size, in iwl_trans_init() 98 if (!trans->txqs.bc_pool) in iwl_trans_init() 103 WARN_ON(trans->txqs.tfd.addr_size != in iwl_trans_init() 116 if (!trans->txqs.tso_hdr_page) { in iwl_trans_init() 131 if (trans->txqs.tso_hdr_page) { in iwl_trans_free() [all …]
|
A D | iwl-trans.h | 170 #define IWL_TRANS_MAX_FRAGS(trans) ((trans)->txqs.tfd.max_tbs - 3) 588 void (*freeze_txq_timer)(struct iwl_trans *trans, unsigned long txqs, 1061 struct iwl_trans_txqs txqs; member 1318 unsigned long txqs, in iwl_trans_freeze_txq_timer() argument 1327 trans->ops->freeze_txq_timer(trans, txqs, freeze); in iwl_trans_freeze_txq_timer() 1343 u32 txqs) in iwl_trans_wait_tx_queues_empty() argument 1354 return trans->ops->wait_tx_queues_empty(trans, txqs); in iwl_trans_wait_tx_queues_empty()
|
/linux-6.3-rc2/drivers/net/wireless/intel/iwlwifi/pcie/ |
A D | tx.c | 167 trans->txqs.tfd.max_tbs); in iwl_pcie_txq_build_tfd() 274 trans->txqs.tfd.size * in iwl_pcie_txq_free() 306 memset(trans->txqs.queue_used, 0, sizeof(trans->txqs.queue_used)); in iwl_pcie_tx_start() 329 trans->txqs.cmd.fifo, in iwl_pcie_tx_start() 330 trans->txqs.cmd.wdg_timeout); in iwl_pcie_tx_start() 443 memset(trans->txqs.queue_used, 0, sizeof(trans->txqs.queue_used)); in iwl_pcie_tx_stop() 467 memset(trans->txqs.queue_used, 0, sizeof(trans->txqs.queue_used)); in iwl_pcie_tx_free() 904 struct iwl_txq *txq = trans->txqs.txq[trans->txqs.cmd.q_id]; in iwl_pcie_enqueue_hcmd() 1185 struct iwl_txq *txq = trans->txqs.txq[trans->txqs.cmd.q_id]; in iwl_pcie_hcmd_complete() 1313 trans->txqs.tfd.size, in iwl_fill_data_tbs_amsdu() [all …]
|
A D | trans-gen2.c | 240 if (iwl_txq_gen2_init(trans, trans->txqs.cmd.q_id, queue_size)) in iwl_pcie_gen2_nic_init() 326 memset(trans->txqs.queue_stopped, 0, in iwl_trans_pcie_gen2_fw_alive() 327 sizeof(trans->txqs.queue_stopped)); in iwl_trans_pcie_gen2_fw_alive() 328 memset(trans->txqs.queue_used, 0, sizeof(trans->txqs.queue_used)); in iwl_trans_pcie_gen2_fw_alive()
|
A D | tx-gen2.c | 31 struct iwl_txq *txq = trans->txqs.txq[trans->txqs.cmd.q_id]; in iwl_pcie_gen2_enqueue_hcmd() 143 cpu_to_le16(QUEUE_TO_SEQ(trans->txqs.cmd.q_id) | in iwl_pcie_gen2_enqueue_hcmd() 191 cmd_size, txq->write_ptr, idx, trans->txqs.cmd.q_id); in iwl_pcie_gen2_enqueue_hcmd()
|
A D | trans.c | 1956 trans->txqs.cmd.q_id = trans_cfg->cmd_queue; in iwl_trans_pcie_configure() 1957 trans->txqs.cmd.fifo = trans_cfg->cmd_fifo; in iwl_trans_pcie_configure() 2308 struct iwl_txq *txq = trans->txqs.txq[i]; in iwl_trans_pcie_block_txq_ptrs() 2310 if (i == trans->txqs.cmd.q_id) in iwl_trans_pcie_block_txq_ptrs() 2362 txq = trans->txqs.txq[txq_idx]; in iwl_trans_pcie_wait_txq_empty() 2418 if (cnt == trans->txqs.cmd.q_id) in iwl_trans_pcie_wait_txqs_empty() 2420 if (!test_bit(cnt, trans->txqs.queue_used)) in iwl_trans_pcie_wait_txqs_empty() 2608 if (state->pos == trans->txqs.cmd.q_id) in iwl_dbgfs_tx_queue_seq_show() 3299 struct iwl_txq *cmdq = trans->txqs.txq[trans->txqs.cmd.q_id]; in iwl_trans_pcie_dump_data() 3368 u16 tfd_size = trans->txqs.tfd.size; in iwl_trans_pcie_dump_data() [all …]
|
A D | ctxt-info.c | 221 cpu_to_le64(trans->txqs.txq[trans->txqs.cmd.q_id]->dma_addr); in iwl_pcie_ctxt_info_init()
|
A D | ctxt-info-gen3.c | 190 cpu_to_le64(trans->txqs.txq[trans->txqs.cmd.q_id]->dma_addr); in iwl_pcie_ctxt_info_gen3_init()
|
/linux-6.3-rc2/drivers/net/wireless/intel/iwlwifi/queue/ |
A D | tx.c | 49 WARN_ON(trans->txqs.bc_table_dword); in iwl_pcie_gen2_update_byte_tbl() 169 trans->txqs.tfd.max_tbs); in iwl_txq_gen2_set_tb() 874 txq = trans->txqs.txq[txq_id]; in iwl_txq_gen2_free() 891 trans->txqs.txq[txq_id] = NULL; in iwl_txq_gen2_free() 1171 trans->txqs.txq[qid] = txq; in iwl_txq_alloc_response() 1283 memset(trans->txqs.queue_used, 0, sizeof(trans->txqs.queue_used)); in iwl_txq_gen2_tx_free() 1287 if (!trans->txqs.txq[i]) in iwl_txq_gen2_tx_free() 1300 if (!trans->txqs.txq[txq_id]) { in iwl_txq_gen2_init() 1447 if (trans->txqs.bc_table_dword) in iwl_txq_gen1_update_byte_cnt_tbl() 1652 trans->txqs.dev_cmd_offs); in iwl_txq_reclaim() [all …]
|
A D | tx.h | 32 if (test_and_clear_bit(txq->id, trans->txqs.queue_stopped)) { in iwl_wake_queue() 44 return (u8 *)txq->tfds + trans->txqs.tfd.size * idx; in iwl_txq_get_tfd() 63 if (!test_and_set_bit(txq->id, trans->txqs.queue_stopped)) { in iwl_txq_stop() 178 void iwl_trans_txq_freeze_timer(struct iwl_trans *trans, unsigned long txqs,
|
/linux-6.3-rc2/drivers/net/wireless/ath/ath5k/ |
A D | mac80211-ops.c | 69 ath5k_tx_queue(hw, skb, &ah->txqs[qnum], control); in ath5k_tx() 746 *tx = ah->txqs[AR5K_TX_QUEUE_ID_DATA_MIN].txq_max; in ath5k_get_ringparam() 766 for (qnum = 0; qnum < ARRAY_SIZE(ah->txqs); qnum++) { in ath5k_set_ringparam() 767 if (!ah->txqs[qnum].setup) in ath5k_set_ringparam() 769 if (ah->txqs[qnum].qnum < AR5K_TX_QUEUE_ID_DATA_MIN || in ath5k_set_ringparam() 770 ah->txqs[qnum].qnum > AR5K_TX_QUEUE_ID_DATA_MAX) in ath5k_set_ringparam() 773 ah->txqs[qnum].txq_max = tx; in ath5k_set_ringparam() 774 if (ah->txqs[qnum].txq_len >= ah->txqs[qnum].txq_max) in ath5k_set_ringparam() 775 ieee80211_stop_queue(hw, ah->txqs[qnum].qnum); in ath5k_set_ringparam()
|
A D | base.c | 1055 txq = &ah->txqs[qnum]; in ath5k_txq_setup() 1067 return &ah->txqs[qnum]; in ath5k_txq_setup() 1161 for (i = 0; i < ARRAY_SIZE(ah->txqs); i++) { in ath5k_drain_tx_buffs() 1162 if (ah->txqs[i].setup) { in ath5k_drain_tx_buffs() 1163 txq = &ah->txqs[i]; in ath5k_drain_tx_buffs() 1186 struct ath5k_txq *txq = ah->txqs; in ath5k_txq_release() 1189 for (i = 0; i < ARRAY_SIZE(ah->txqs); i++, txq++) in ath5k_txq_release() 1839 ath5k_tx_processq(ah, &ah->txqs[i]); in ath5k_tasklet_tx() 2519 for (i = 0; i < ARRAY_SIZE(ah->txqs); i++) { in ath5k_tx_complete_poll_work() 2520 if (ah->txqs[i].setup) { in ath5k_tx_complete_poll_work() [all …]
|
/linux-6.3-rc2/drivers/net/ethernet/fungible/funeth/ |
A D | funeth_main.c | 355 for (i = start; i < nqs && txqs[i]; i++) in free_txqs() 356 txqs[i] = funeth_txq_free(txqs[i], state); in free_txqs() 369 state, &txqs[i]); in alloc_txqs() 453 qset->txqs = fp->txqs; in fun_free_rings() 465 fp->txqs = NULL; in fun_free_rings() 481 struct funeth_txq **xdpqs = NULL, **txqs; in fun_alloc_rings() local 514 qset->txqs = txqs; in fun_alloc_rings() 796 fp->txqs = qset->txqs; in fun_up() 1630 .txqs = fp->txqs, in fun_change_num_queues() 1659 memcpy(newqs.txqs, fp->txqs, keep_tx * sizeof(*fp->txqs)); in fun_change_num_queues() [all …]
|
A D | funeth.h | 67 struct funeth_txq **txqs; member 88 struct funeth_txq **txqs; member
|
/linux-6.3-rc2/drivers/net/ethernet/huawei/hinic/ |
A D | hinic_main.c | 134 if (nic_dev->txqs) in create_txqs() 139 if (!nic_dev->txqs) in create_txqs() 165 hinic_clean_txq(&nic_dev->txqs[i]); in create_txqs() 169 hinic_clean_txq(&nic_dev->txqs[j]); in create_txqs() 184 napi_enable(&nic_dev->txqs[i].napi); in enable_txqs_napi() 193 napi_disable(&nic_dev->txqs[i].napi); in disable_txqs_napi() 205 if (!nic_dev->txqs) in free_txqs() 210 hinic_clean_txq(&nic_dev->txqs[i]); in free_txqs() 216 nic_dev->txqs = NULL; in free_txqs() 834 nic_dev->txqs[q_id].napi.state); in hinic_tx_timeout() [all …]
|
A D | hinic_dev.h | 97 struct hinic_txq *txqs; member
|
/linux-6.3-rc2/drivers/infiniband/hw/hfi1/ |
A D | ipoib_tx.c | 696 priv->txqs = kcalloc_node(dev->num_tx_queues, in hfi1_ipoib_txreq_init() 700 if (!priv->txqs) in hfi1_ipoib_txreq_init() 704 struct hfi1_ipoib_txq *txq = &priv->txqs[i]; in hfi1_ipoib_txreq_init() 752 struct hfi1_ipoib_txq *txq = &priv->txqs[i]; in hfi1_ipoib_txreq_init() 761 kfree(priv->txqs); in hfi1_ipoib_txreq_init() 762 priv->txqs = NULL; in hfi1_ipoib_txreq_init() 795 struct hfi1_ipoib_txq *txq = &priv->txqs[i]; in hfi1_ipoib_txreq_deinit() 808 kfree(priv->txqs); in hfi1_ipoib_txreq_deinit() 809 priv->txqs = NULL; in hfi1_ipoib_txreq_deinit() 818 struct hfi1_ipoib_txq *txq = &priv->txqs[i]; in hfi1_ipoib_napi_tx_enable() [all …]
|
A D | ipoib.h | 125 struct hfi1_ipoib_txq *txqs; member
|
/linux-6.3-rc2/net/ |
A D | devres.c | 22 unsigned int txqs, unsigned int rxqs) in devm_alloc_etherdev_mqs() argument 30 dr->ndev = alloc_etherdev_mqs(sizeof_priv, txqs, rxqs); in devm_alloc_etherdev_mqs()
|
/linux-6.3-rc2/drivers/net/ethernet/netronome/nfp/abm/ |
A D | main.c | 85 unsigned int txqs; in nfp_abm_spawn_repr() local 90 txqs = 1; in nfp_abm_spawn_repr() 93 txqs = alink->vnic->max_rx_rings; in nfp_abm_spawn_repr() 96 netdev = nfp_repr_alloc_mqs(app, txqs, 1); in nfp_abm_spawn_repr()
|
/linux-6.3-rc2/drivers/net/ethernet/netronome/nfp/ |
A D | nfp_net_repr.h | 102 nfp_repr_alloc_mqs(struct nfp_app *app, unsigned int txqs, unsigned int rxqs);
|
/linux-6.3-rc2/drivers/net/ethernet/intel/ice/ |
A D | ice_base.c | 773 u8 buf_len = struct_size(qg_buf, txqs, 1); in ice_vsi_cfg_txq() 789 qg_buf->txqs[0].txq_id = cpu_to_le16(pf_q); in ice_vsi_cfg_txq() 790 ice_set_ctx(hw, (u8 *)&tlan_ctx, qg_buf->txqs[0].txq_ctx, in ice_vsi_cfg_txq() 833 txq = &qg_buf->txqs[0]; in ice_vsi_cfg_txq()
|
/linux-6.3-rc2/include/linux/ |
A D | etherdevice.h | 56 struct net_device *alloc_etherdev_mqs(int sizeof_priv, unsigned int txqs, 62 unsigned int txqs,
|
/linux-6.3-rc2/drivers/net/can/dev/ |
A D | dev.c | 229 unsigned int txqs, unsigned int rxqs) in alloc_candev_mqs() argument 256 txqs, rxqs); in alloc_candev_mqs()
|
/linux-6.3-rc2/net/ethernet/ |
A D | eth.c | 391 struct net_device *alloc_etherdev_mqs(int sizeof_priv, unsigned int txqs, in alloc_etherdev_mqs() argument 395 ether_setup, txqs, rxqs); in alloc_etherdev_mqs()
|