Searched refs:send_queue (Results 1 – 12 of 12) sorted by relevance
| /drivers/net/ethernet/mellanox/mlx5/core/steering/hws/ |
| A D | debug.c | 223 struct mlx5hws_send_engine *send_queue; in hws_debug_dump_context_send_engine() local 230 send_queue = &ctx->send_queue[i]; in hws_debug_dump_context_send_engine() 235 send_queue->used_entries, in hws_debug_dump_context_send_engine() 236 send_queue->num_entries, in hws_debug_dump_context_send_engine() 238 send_queue->num_entries, in hws_debug_dump_context_send_engine() 239 send_queue->err, in hws_debug_dump_context_send_engine() 240 send_queue->completed.ci, in hws_debug_dump_context_send_engine() 241 send_queue->completed.pi, in hws_debug_dump_context_send_engine() 242 send_queue->completed.mask); in hws_debug_dump_context_send_engine() 244 send_ring = &send_queue->send_ring; in hws_debug_dump_context_send_engine() [all …]
|
| A D | send.c | 678 return hws_send_engine_poll(&ctx->send_queue[queue_id], res, res_nb); in mlx5hws_send_queue_poll() 1067 mlx5hws_send_queue_close(&ctx->send_queue[queues]); in __hws_send_queues_close() 1091 kfree(ctx->send_queue); in mlx5hws_send_queues_close() 1146 ctx->send_queue = kcalloc(ctx->queues, sizeof(*ctx->send_queue), GFP_KERNEL); in mlx5hws_send_queues_open() 1147 if (!ctx->send_queue) { in mlx5hws_send_queues_open() 1159 err = mlx5hws_send_queue_open(ctx, &ctx->send_queue[i], queue_size); in mlx5hws_send_queues_open() 1169 kfree(ctx->send_queue); in mlx5hws_send_queues_open() 1186 queue = &ctx->send_queue[queue_id]; in mlx5hws_send_queue_action() 1308 queue_id = queue - ctx->send_queue; in mlx5hws_send_stes_fw()
|
| A D | rule.c | 312 queue = &ctx->send_queue[attr->queue_id]; in hws_rule_create_hws() 417 queue = &ctx->send_queue[attr->queue_id]; in hws_rule_destroy_failed_hws() 455 queue = &ctx->send_queue[attr->queue_id]; in hws_rule_destroy_hws() 528 if (unlikely(mlx5hws_send_engine_full(&ctx->send_queue[attr->queue_id]))) in hws_rule_enqueue_precheck() 622 queue = &ctx->send_queue[attr->queue_id]; in mlx5hws_rule_move_hws_add()
|
| A D | context.h | 49 struct mlx5hws_send_engine *send_queue; member
|
| A D | bwc.c | 15 return min(ctx->send_queue[queue_id].num_entries / 2, in hws_bwc_get_burst_th() 123 queue = &ctx->send_queue[rule_attr.queue_id]; in hws_bwc_matcher_move_all_simple() 396 queue_full = mlx5hws_send_engine_full(&ctx->send_queue[queue_id]); in mlx5hws_bwc_queue_poll()
|
| A D | pat_arg.c | 376 queue = &ctx->send_queue[ctx->queues - 1]; in mlx5hws_arg_write_inline_arg_data()
|
| A D | action.c | 1711 queue = &ctx->send_queue[ctx->queues - 1]; in hws_action_create_dest_match_range_fill_table()
|
| /drivers/net/wireless/ath/ath6kl/ |
| A D | htc_pipe.c | 315 INIT_LIST_HEAD(&send_queue); in htc_try_send() 357 list_splice_tail_init(txq, &send_queue); in htc_try_send() 373 list_move_tail(&packet->list, &send_queue); in htc_try_send() 398 &send_queue); in htc_try_send() 402 if (list_empty(&send_queue)) { in htc_try_send() 418 if (!list_empty(&send_queue)) { in htc_try_send() 420 list_splice_tail_init(&send_queue, &ep->txq); in htc_try_send() 421 if (!list_empty(&send_queue)) { in htc_try_send() 426 INIT_LIST_HEAD(&send_queue); in htc_try_send() 470 if (get_queue_depth(&send_queue) == 0) { in htc_try_send() [all …]
|
| /drivers/infiniband/core/ |
| A D | mad.c | 1059 if (qp_info->send_queue.count < qp_info->send_queue.max_active) { in ib_send_mad() 1063 list = &qp_info->send_queue.list; in ib_send_mad() 1070 qp_info->send_queue.count++; in ib_send_mad() 2508 struct ib_mad_queue *send_queue; in ib_mad_send_done() local 2523 send_queue = mad_list->mad_queue; in ib_mad_send_done() 2524 qp_info = send_queue->qp_info; in ib_mad_send_done() 2537 spin_lock_irqsave(&send_queue->lock, flags); in ib_mad_send_done() 2541 if (send_queue->count-- > send_queue->max_active) { in ib_mad_send_done() 2549 spin_unlock_irqrestore(&send_queue->lock, flags); in ib_mad_send_done() 3142 init_mad_queue(qp_info, &qp_info->send_queue); in init_mad_qp() [all …]
|
| A D | mad_priv.h | 256 struct ib_mad_queue send_queue; member
|
| /drivers/net/hamradio/ |
| A D | yam.c | 125 struct sk_buff_head send_queue; /* Packets awaiting transmission */ member 588 skb_queue_tail(&yp->send_queue, skb); in yam_send_packet() 608 skb_queue_empty(&yp->send_queue)) in yam_arbitrate() 658 if (!(skb = skb_dequeue(&yp->send_queue))) { in yam_tx_byte() 702 if (skb_queue_empty(&yp->send_queue)) { in yam_tx_byte() 913 while ((skb = skb_dequeue(&yp->send_queue))) in yam_close() 1098 skb_queue_head_init(&yp->send_queue); in yam_setup()
|
| /drivers/net/ |
| A D | virtio_net.c | 302 struct send_queue { struct 396 struct send_queue *sq; 1120 struct send_queue *sq) in tx_may_stop() 1612 struct send_queue *sq; in virtnet_xsk_wakeup() 1729 struct send_queue *sq; in virtnet_xdp_xmit() 3103 struct send_queue *sq; in virtnet_poll() 3241 struct send_queue *sq = container_of(napi, struct send_queue, napi); in virtnet_poll_tx() 4165 struct send_queue *sq; in virtnet_set_ringparam() 5915 struct send_queue *sq; in virtnet_xsk_pool_enable() 5997 struct send_queue *sq; in virtnet_xsk_pool_disable() [all …]
|
Completed in 51 milliseconds