Home
last modified time | relevance | path

Searched refs:txq (Results 1 – 14 of 14) sorted by relevance

/include/trace/events/
A Dqdisc.h16 TP_PROTO(struct Qdisc *qdisc, const struct netdev_queue *txq,
19 TP_ARGS(qdisc, txq, packets, skb),
23 __field(const struct netdev_queue *, txq )
35 __entry->txq = txq;
38 __entry->ifindex = txq->dev ? txq->dev->ifindex : 0;
41 __entry->txq_state = txq->state;
51 TP_PROTO(struct Qdisc *qdisc, const struct netdev_queue *txq, struct sk_buff *skb),
53 TP_ARGS(qdisc, txq, skb),
57 __field(const struct netdev_queue *, txq)
66 __entry->txq = txq;
[all …]
/include/net/
A Dnetdev_queues.h176 #define netif_txq_try_stop(txq, get_desc, start_thrs) \ argument
180 netif_tx_stop_queue(txq); \
192 netif_tx_start_queue(txq); \
223 _res = netif_txq_try_stop(txq, get_desc, start_thrs); \
260 #define __netif_txq_completed_wake(txq, pkts, bytes, \ argument
270 netdev_txq_completed_mb(txq, pkts, bytes); \
275 if (unlikely(netif_tx_queue_stopped(txq)) && \
277 netif_tx_wake_queue(txq); \
300 struct netdev_queue *txq; in netif_subqueue_sent() local
302 txq = netdev_get_tx_queue(dev, idx); in netif_subqueue_sent()
[all …]
A Dsch_generic.h241 static inline int qdisc_avail_bulklimit(const struct netdev_queue *txq) in qdisc_avail_bulklimit() argument
243 return netdev_queue_dql_avail(txq); in qdisc_avail_bulklimit()
779 struct netdev_queue *txq = netdev_get_tx_queue(dev, i); in qdisc_all_tx_empty() local
780 const struct Qdisc *q = rcu_dereference(txq->qdisc); in qdisc_all_tx_empty()
797 struct netdev_queue *txq = netdev_get_tx_queue(dev, i); in qdisc_tx_changing() local
799 if (rcu_access_pointer(txq->qdisc) != in qdisc_tx_changing()
800 rcu_access_pointer(txq->qdisc_sleeping)) in qdisc_tx_changing()
807 static inline bool qdisc_txq_has_no_queue(const struct netdev_queue *txq) in qdisc_txq_has_no_queue() argument
809 struct Qdisc *qdisc = rcu_access_pointer(txq->qdisc); in qdisc_txq_has_no_queue()
820 struct netdev_queue *txq = netdev_get_tx_queue(dev, i); in qdisc_tx_is_noop() local
[all …]
A Dmac80211.h2052 struct ieee80211_txq *txq; member
2542 struct ieee80211_txq *txq[IEEE80211_NUM_TIDS + 1]; member
4824 struct ieee80211_txq *txq);
7431 struct ieee80211_txq *txq);
7447 struct ieee80211_txq *txq) in ieee80211_tx_dequeue_ni() argument
7452 skb = ieee80211_tx_dequeue(hw, txq); in ieee80211_tx_dequeue_ni()
7468 struct ieee80211_txq *txq);
7499 struct ieee80211_txq *txq, bool force);
7516 __ieee80211_schedule_txq(hw, txq, true); in ieee80211_schedule_txq()
7533 __ieee80211_schedule_txq(hw, txq, force); in ieee80211_return_txq()
[all …]
A Dpkt_sched.h118 struct net_device *dev, struct netdev_queue *txq,
A Dxdp.h87 struct xdp_txq_info *txq; member
/include/linux/
A Dnetdevice.h3571 netif_tx_start_queue(txq); in netif_tx_start_all_queues()
3595 netif_tx_wake_queue(txq); in netif_tx_wake_all_queues()
3680 return dql_avail(&txq->dql); in netdev_queue_dql_avail()
3926 netif_tx_start_queue(txq); in netif_start_subqueue()
3939 netif_tx_stop_queue(txq); in netif_stop_subqueue()
3981 netif_tx_wake_queue(txq); in netif_wake_subqueue()
4730 txq_trans_cond_update(txq); in netif_trans_update()
4766 __netif_tx_acquire(txq))
4787 __netif_tx_lock(txq, cpu); in netif_tx_disable()
4788 netif_tx_stop_queue(txq); in netif_tx_disable()
[all …]
A Dnetpoll.h54 struct sk_buff_head txq; member
A Dti_wilink_st.h142 struct sk_buff_head txq, tx_waitq; member
/include/net/mana/
A Dhw_channel.h190 struct hwc_wq *txq; member
A Dmana.h285 struct mana_txq *txq; member
363 struct mana_txq txq; member
/include/linux/usb/
A Dusbnet.h54 struct sk_buff_head txq; member
/include/net/libeth/
A Dxdp.h116 static inline u32 libeth_xdpsq_num(u32 rxq, u32 txq, u32 max) in libeth_xdpsq_num() argument
118 return min(max(nr_cpu_ids, rxq), max - txq); in libeth_xdpsq_num()
/include/linux/avf/
A Dvirtchnl.h421 struct virtchnl_txq_info txq; member

Completed in 102 milliseconds