| /net/sched/ |
| A D | sch_sfq.c | 200 int qlen = slot->qlen; in sfq_link() local 203 n = q->dep[qlen].next; in sfq_link() 303 sch->q.qlen--; in sfq_drop() 346 sfq_index x, qlen; in sfq_enqueue() local 460 qlen = slot->qlen; in sfq_enqueue() 465 if (qlen != slot->qlen) { in sfq_enqueue() 498 sch->q.qlen--; in sfq_dequeue() 545 if (!slot->qlen) in sfq_rehash() 732 qlen = sch->q.qlen; in sfq_change() 740 qdisc_tree_reduce_backlog(sch, qlen - sch->q.qlen, dropped); in sfq_change() [all …]
|
| A D | sch_sfb.c | 133 b[hash].qlen++; in increment_one_qlen() 161 if (b[hash].qlen > 0) in decrement_one_qlen() 162 b[hash].qlen--; in decrement_one_qlen() 205 if (qlen < b->qlen) in sfb_compute_qlen() 206 qlen = b->qlen; in sfb_compute_qlen() 214 return qlen; in sfb_compute_qlen() 336 if (b->qlen == 0) in sfb_enqueue() 341 minqlen = b->qlen; in sfb_enqueue() 369 if (b->qlen == 0) in sfb_enqueue() 410 sch->q.qlen++; in sfb_enqueue() [all …]
|
| A D | sch_codel.c | 106 unsigned int qlen, dropped = 0; in codel_change() local 145 qlen = sch->q.qlen; in codel_change() 146 while (sch->q.qlen > sch->limit) { in codel_change() 153 qdisc_tree_reduce_backlog(sch, qlen - sch->q.qlen, dropped); in codel_change()
|
| A D | sch_choke.c | 126 --sch->q.qlen; in choke_drop_by_idx() 220 q->vars.qavg = red_calc_qavg(p, &q->vars, sch->q.qlen); in choke_enqueue() 267 if (sch->q.qlen < q->limit) { in choke_enqueue() 270 ++sch->q.qlen; in choke_enqueue() 297 --sch->q.qlen; in choke_dequeue() 380 unsigned int oqlen = sch->q.qlen, tail = 0; in choke_change() 395 --sch->q.qlen; in choke_change() 398 qdisc_tree_reduce_backlog(sch, oqlen - sch->q.qlen, dropped); in choke_change()
|
| A D | sch_hhf.c | 362 sch->q.qlen--; in hhf_drop() 402 if (++sch->q.qlen <= sch->limit) in hhf_enqueue() 445 sch->q.qlen--; in hhf_dequeue() 513 unsigned int qlen, prev_backlog; in hhf_change() local 564 qlen = sch->q.qlen; in hhf_change() 566 while (sch->q.qlen > sch->limit) { in hhf_change() 571 qdisc_tree_reduce_backlog(sch, qlen - sch->q.qlen, in hhf_change()
|
| A D | sch_drr.c | 267 __u32 qlen = qdisc_qlen_sum(cl->qdisc); in drr_dump_class_stats() local 272 if (qlen) in drr_dump_class_stats() 277 gnet_stats_copy_queue(d, cl_q->cpu_qstats, &cl_q->qstats, qlen) < 0) in drr_dump_class_stats() 369 sch->q.qlen++; in drr_enqueue() 396 if (cl->qdisc->q.qlen == 0) in drr_dequeue() 402 sch->q.qlen--; in drr_dequeue() 437 if (cl->qdisc->q.qlen) in drr_reset_qdisc()
|
| A D | sch_fq.c | 321 if (sch->q.qlen != 0) { in fq_fastpath_check() 333 if (q->internal.qlen >= 8) in fq_fastpath_check() 499 sch->q.qlen--; in fq_dequeue_skb() 592 if (f->qlen == 0) in fq_enqueue() 596 f->qlen++; in fq_enqueue() 601 sch->q.qlen++; in fq_enqueue() 660 if (!sch->q.qlen) in fq_dequeue() 665 q->internal.qlen--; in fq_dequeue() 718 if (--f->qlen == 0) in fq_dequeue() 796 flow->qlen = 0; in fq_flow_purge() [all …]
|
| A D | sch_fq_pie.c | 49 u32 qlen; member 188 sch->q.qlen++; in fq_pie_qdisc_enqueue() 194 sel_flow->qlen = 0; in fq_pie_qdisc_enqueue() 197 sel_flow->qlen++; in fq_pie_qdisc_enqueue() 266 sch->q.qlen--; in fq_pie_qdisc_dequeue() 279 flow->qlen--; in fq_pie_qdisc_dequeue() 368 while (sch->q.qlen > sch->limit) { in fq_pie_change()
|
| A D | sch_pie.c | 146 unsigned int qlen, dropped = 0; in pie_change() local 196 qlen = sch->q.qlen; in pie_change() 197 while (sch->q.qlen > sch->limit) { in pie_change() 204 qdisc_tree_reduce_backlog(sch, qlen - sch->q.qlen, dropped); in pie_change()
|
| A D | sch_skbprio.c | 84 if (sch->q.qlen < READ_ONCE(sch->limit)) { in skbprio_enqueue() 96 sch->q.qlen++; in skbprio_enqueue() 148 sch->q.qlen--; in skbprio_dequeue() 254 q->qstats[cl - 1].qlen) < 0) in skbprio_dump_class_stats()
|
| A D | sch_fq_codel.c | 181 sch->q.qlen -= i; in fq_codel_drop() 218 if (++sch->q.qlen <= sch->limit && !memory_limited) in fq_codel_enqueue() 222 prev_qlen = sch->q.qlen; in fq_codel_enqueue() 233 prev_qlen -= sch->q.qlen; in fq_codel_enqueue() 268 sch->q.qlen--; in dequeue_func() 442 while (sch->q.qlen > sch->limit || in fq_codel_change() 673 qs.qlen++; in fq_codel_dump_class_stats() 681 if (gnet_stats_copy_queue(d, NULL, &qs, qs.qlen) < 0) in fq_codel_dump_class_stats()
|
| A D | sch_generic.c | 96 q->q.qlen--; in __skb_dequeue_bad_txq() 137 q->q.qlen++; in qdisc_enqueue_skb_bad_txq() 166 q->q.qlen++; in dev_requeue_skb() 269 q->q.qlen--; in dequeue_skb() 362 dev->name, ret, q->q.qlen); in sch_direct_xmit() 673 .qlen = 0, 679 .qlen = 0, 836 q->qlen = 0; in pfifo_fast_reset() 862 if (!qlen) in pfifo_fast_init() 869 err = skb_array_init(q, qlen, GFP_KERNEL); in pfifo_fast_init() [all …]
|
| A D | sch_mqprio.c | 562 sch->q.qlen = 0; in mqprio_dump() 579 sch->q.qlen += qdisc_qlen(qdisc); in mqprio_dump() 668 __u32 qlen; in mqprio_dump_class_stats() local 693 sch->q.qlen += qdisc_qlen(qdisc); in mqprio_dump_class_stats() 697 qlen = qdisc_qlen(sch) + qstats.qlen; in mqprio_dump_class_stats() 703 gnet_stats_copy_queue(d, NULL, &qstats, qlen) < 0) in mqprio_dump_class_stats()
|
| A D | sch_hfsc.c | 756 if (cl->qdisc->q.qlen == 0 && cl->cl_flags & HFSC_FSC) in update_vf() 984 if (cl->qdisc->q.qlen != 0) in hfsc_change_class() 990 if (cl->qdisc->q.qlen != 0) { in hfsc_change_class() 1338 __u32 qlen; in hfsc_dump_class_stats() local 1348 gnet_stats_copy_queue(d, NULL, &cl->qstats, qlen) < 0) in hfsc_dump_class_stats() 1553 first = !cl->qdisc->q.qlen; in hfsc_enqueue() 1564 sch->q.qlen++; in hfsc_enqueue() 1594 if (sch->q.qlen == 0) in hfsc_dequeue() 1632 if (cl->qdisc->q.qlen != 0) { in hfsc_dequeue() 1639 if (cl->qdisc->q.qlen != 0) { in hfsc_dequeue() [all …]
|
| A D | sch_ets.c | 300 if (!ets_class_is_strict(q, cl) && sch->q.qlen) in ets_class_qlen_notify() 448 sch->q.qlen++; in ets_qdisc_enqueue() 457 sch->q.qlen--; in ets_qdisc_dequeue_skb() 493 if (cl->qdisc->q.qlen == 0) in ets_qdisc_dequeue() 655 if (i >= q->nstrict && q->classes[i].qdisc->q.qlen) in ets_qdisc_change() 662 if (q->classes[i].qdisc->q.qlen) { in ets_qdisc_change() 719 if (q->classes[band].qdisc->q.qlen) in ets_qdisc_reset()
|
| A D | sch_etf.c | 192 sch->q.qlen++; in etf_enqueue_timesortedlist() 225 sch->q.qlen--; in timesortedlist_drop() 250 sch->q.qlen--; in timesortedlist_remove() 429 sch->q.qlen--; in timesortedlist_clear()
|
| A D | sch_teql.c | 81 if (q->q.qlen < READ_ONCE(dev->tx_queue_len)) { in teql_enqueue() 110 sch->q.qlen = dat->q.qlen + q->q.qlen; in teql_dequeue()
|
| A D | sch_qfq.c | 327 if (cl->qdisc->q.qlen > 0) { /* adding an active class */ in qfq_add_to_agg() 376 if (cl->qdisc->q.qlen > 0) /* class is active */ in qfq_deact_rm_from_agg() 1000 if (cl->qdisc->q.qlen == 0) /* no more packets, remove from list */ in agg_dequeue() 1138 } else if (sch->q.qlen == 0) { /* no aggregate to serve */ in qfq_dequeue() 1153 sch->q.qlen--; in qfq_dequeue() 1158 sch->q.qlen++; in qfq_dequeue() 1266 ++sch->q.qlen; in qfq_enqueue() 1484 if (cl->qdisc->q.qlen > 0) in qfq_reset_qdisc()
|
| A D | sch_htb.c | 595 WARN_ON(cl->level || !cl->leaf.q || !cl->leaf.q->q.qlen); in htb_activate() 629 if (q->direct_queue.qlen < q->direct_qlen) { in htb_enqueue() 654 sch->q.qlen++; in htb_enqueue() 895 if (unlikely(cl->leaf.q->q.qlen == 0)) { in htb_dequeue_tree() 933 if (!cl->leaf.q->q.qlen) in htb_dequeue_tree() 954 sch->q.qlen--; in htb_dequeue() 958 if (!sch->q.qlen) in htb_dequeue() 1331 __u32 qlen = 0; in htb_dump_class_stats() local 1334 qdisc_qstats_qlen_backlog(cl->leaf.q, &qlen, &qs.backlog); in htb_dump_class_stats() 1357 gnet_stats_copy_queue(d, NULL, &qs, qlen) < 0) in htb_dump_class_stats()
|
| /net/core/ |
| A D | gen_stats.c | 348 qstats->qlen += qcpu->qlen; in gnet_stats_add_queue_cpu() 363 qstats->qlen += q->qlen; in gnet_stats_add_queue() 389 struct gnet_stats_queue *q, __u32 qlen) in gnet_stats_copy_queue() argument 394 qstats.qlen = qlen; in gnet_stats_copy_queue() 398 d->tc_stats.qlen = qstats.qlen; in gnet_stats_copy_queue()
|
| A D | request_sock.c | 38 queue->fastopenq.qlen = 0; in reqsk_queue_alloc() 98 fastopenq->qlen--; in reqsk_fastopen_remove() 126 fastopenq->qlen++; in reqsk_fastopen_remove()
|
| /net/nfc/hci/ |
| A D | llc_shdlc.c | 322 if (shdlc->send_q.qlen == 0) { in llc_shdlc_rcv_s_frame() 462 if (shdlc->rcv_q.qlen) in llc_shdlc_handle_rcv_queue() 463 pr_debug("rcvQlen=%d\n", shdlc->rcv_q.qlen); in llc_shdlc_handle_rcv_queue() 518 if (shdlc->send_q.qlen) in llc_shdlc_handle_send_queue() 520 shdlc->send_q.qlen, shdlc->ns, shdlc->dnr, in llc_shdlc_handle_send_queue() 523 shdlc->ack_pending_q.qlen); in llc_shdlc_handle_send_queue() 525 while (shdlc->send_q.qlen && shdlc->ack_pending_q.qlen < shdlc->w && in llc_shdlc_handle_send_queue()
|
| /net/kcm/ |
| A D | kcmproc.c | 119 kcm->sk.sk_receive_queue.qlen, in kcm_format_sock() 121 kcm->sk.sk_write_queue.qlen, in kcm_format_sock() 149 psock->sk->sk_receive_queue.qlen, in kcm_format_psock() 151 psock->sk->sk_write_queue.qlen, in kcm_format_psock() 167 if (psock->sk->sk_receive_queue.qlen) { in kcm_format_psock()
|
| /net/netfilter/ |
| A D | nfnetlink_log.c | 65 unsigned int qlen; /* number of nlmsgs in skb */ member 363 if (inst->qlen > 1) { in __nfulnl_send() 376 inst->qlen = 0; in __nfulnl_send() 803 inst->qlen++; in nfulnl_log_packet() 809 if (inst->qlen >= qthreshold) in nfulnl_log_packet() 1101 inst->peer_portid, inst->qlen, in seq_show()
|
| /net/caif/ |
| A D | caif_dev.c | 170 int err, high = 0, qlen = 0; in transmit() local 203 if (!(sch->flags & TCQ_F_NOLOCK) && likely(sch->q.qlen < high)) in transmit() 223 qlen, high); in transmit()
|