Home
last modified time | relevance | path

Searched refs:backlog (Results 1 – 25 of 70) sorted by relevance

123

/drivers/crypto/ccp/
A Dccp-crypto-main.c58 struct list_head *backlog; member
98 *backlog = NULL; in ccp_crypto_cmd_complete()
117 if (req_queue.backlog != &req_queue.cmds) { in ccp_crypto_cmd_complete()
122 *backlog = container_of(req_queue.backlog, in ccp_crypto_cmd_complete()
124 req_queue.backlog = req_queue.backlog->next; in ccp_crypto_cmd_complete()
163 if (backlog) { in ccp_crypto_complete()
164 backlog->ret = -EINPROGRESS; in ccp_crypto_complete()
195 if (backlog) { in ccp_crypto_complete()
196 backlog->ret = -EINPROGRESS; in ccp_crypto_complete()
245 req_queue.backlog = &crypto_cmd->entry; in ccp_crypto_enqueue_cmd()
[all …]
A Dccp-dev.c315 list_add_tail(&cmd->entry, &ccp->backlog); in ccp_enqueue_cmd()
378 struct ccp_cmd *backlog = NULL; in ccp_dequeue_cmd() local
403 if (!list_empty(&ccp->backlog)) { in ccp_dequeue_cmd()
404 backlog = list_first_entry(&ccp->backlog, struct ccp_cmd, in ccp_dequeue_cmd()
406 list_del(&backlog->entry); in ccp_dequeue_cmd()
411 if (backlog) { in ccp_dequeue_cmd()
412 INIT_WORK(&backlog->work, ccp_do_cmd_backlog); in ccp_dequeue_cmd()
413 schedule_work(&backlog->work); in ccp_dequeue_cmd()
488 INIT_LIST_HEAD(&ccp->backlog); in ccp_alloc_struct()
/drivers/crypto/intel/qat/qat_common/
A Dqat_algs_send.c24 void qat_alg_send_backlog(struct qat_instance_backlog *backlog) in qat_alg_send_backlog() argument
28 spin_lock_bh(&backlog->lock); in qat_alg_send_backlog()
29 list_for_each_entry_safe(req, tmp, &backlog->list, list) { in qat_alg_send_backlog()
40 spin_unlock_bh(&backlog->lock); in qat_alg_send_backlog()
45 struct qat_instance_backlog *backlog = req->backlog; in qat_alg_try_enqueue() local
50 if (!list_empty(&backlog->list)) in qat_alg_try_enqueue()
67 struct qat_instance_backlog *backlog = req->backlog; in qat_alg_send_message_maybacklog() local
73 spin_lock_bh(&backlog->lock); in qat_alg_send_message_maybacklog()
75 list_add_tail(&req->list, &backlog->list); in qat_alg_send_message_maybacklog()
78 spin_unlock_bh(&backlog->lock); in qat_alg_send_message_maybacklog()
A Dqat_comp_algs.c52 alg_req->backlog = &inst->backlog; in qat_alg_send_dc_message()
124 struct qat_instance_backlog *backlog = qat_req->alg_req.backlog; in qat_comp_alg_callback() local
128 qat_alg_send_backlog(backlog); in qat_comp_alg_callback()
A Dqat_algs_send.h19 struct qat_instance_backlog *backlog; member
23 void qat_alg_send_backlog(struct qat_instance_backlog *backlog);
A Dqat_compression.h21 struct qat_instance_backlog backlog; member
A Dqat_crypto.c232 INIT_LIST_HEAD(&inst->backlog.list); in qat_crypto_create_instances()
233 spin_lock_init(&inst->backlog.lock); in qat_crypto_create_instances()
A Dqat_crypto.h24 struct qat_instance_backlog backlog; member
A Dqat_compression.c182 INIT_LIST_HEAD(&inst->backlog.list); in qat_compression_create_instances()
183 spin_lock_init(&inst->backlog.lock); in qat_compression_create_instances()
/drivers/net/ethernet/mellanox/mlxsw/
A Dspectrum_qdisc.c79 u64 backlog; member
200 tmp->stats_base.backlog -= mlxsw_sp_qdisc->stats_base.backlog; in mlxsw_sp_qdisc_reduce_parent_backlog()
569 backlog -= stats_base->backlog; in mlxsw_sp_qdisc_update_stats()
575 stats_base->backlog += backlog; in mlxsw_sp_qdisc_update_stats()
751 u64 backlog; in mlxsw_sp_qdisc_leaf_unoffload() local
755 qstats->backlog -= backlog; in mlxsw_sp_qdisc_leaf_unoffload()
1298 u64 backlog; in mlxsw_sp_qdisc_walk_cb_clean_stats() local
1301 backlog = mlxsw_sp_qdisc->stats_base.backlog; in mlxsw_sp_qdisc_walk_cb_clean_stats()
1305 mlxsw_sp_qdisc->stats_base.backlog = backlog; in mlxsw_sp_qdisc_walk_cb_clean_stats()
1422 u64 backlog; in __mlxsw_sp_qdisc_ets_unoffload() local
[all …]
/drivers/crypto/qce/
A Dcore.c91 struct crypto_async_request *async_req, *backlog; in qce_handle_queue() local
102 backlog = crypto_get_backlog(&qce->queue); in qce_handle_queue()
111 if (backlog) { in qce_handle_queue()
113 crypto_request_complete(backlog, -EINPROGRESS); in qce_handle_queue()
/drivers/net/ipvlan/
A Dipvlan_core.c243 spin_lock_bh(&port->backlog.lock); in ipvlan_process_multicast()
244 skb_queue_splice_tail_init(&port->backlog, &list); in ipvlan_process_multicast()
245 spin_unlock_bh(&port->backlog.lock); in ipvlan_process_multicast()
579 spin_lock(&port->backlog.lock); in ipvlan_multicast_enqueue()
580 if (skb_queue_len(&port->backlog) < IPVLAN_QBACKLOG_LIMIT) { in ipvlan_multicast_enqueue()
582 __skb_queue_tail(&port->backlog, skb); in ipvlan_multicast_enqueue()
583 spin_unlock(&port->backlog.lock); in ipvlan_multicast_enqueue()
586 spin_unlock(&port->backlog.lock); in ipvlan_multicast_enqueue()
/drivers/crypto/marvell/cesa/
A Dcesa.c39 struct crypto_async_request **backlog) in mv_cesa_dequeue_req_locked() argument
43 *backlog = crypto_get_backlog(&engine->queue); in mv_cesa_dequeue_req_locked()
54 struct crypto_async_request *req = NULL, *backlog = NULL; in mv_cesa_rearm_engine() local
60 req = mv_cesa_dequeue_req_locked(engine, &backlog); in mv_cesa_rearm_engine()
68 if (backlog) in mv_cesa_rearm_engine()
69 crypto_request_complete(backlog, -EINPROGRESS); in mv_cesa_rearm_engine()
A Dtdma.c147 struct crypto_async_request *backlog = NULL; in mv_cesa_tdma_process() local
160 &backlog); in mv_cesa_tdma_process()
181 if (backlog) in mv_cesa_tdma_process()
182 crypto_request_complete(backlog, -EINPROGRESS); in mv_cesa_tdma_process()
/drivers/crypto/
A Dmxs-dcp.c415 struct crypto_async_request *backlog; in dcp_chan_thread_aes() local
424 backlog = crypto_get_backlog(&sdcp->queue[chan]); in dcp_chan_thread_aes()
428 if (!backlog && !arq) { in dcp_chan_thread_aes()
435 if (backlog) in dcp_chan_thread_aes()
436 crypto_request_complete(backlog, -EINPROGRESS); in dcp_chan_thread_aes()
746 struct crypto_async_request *backlog; in dcp_chan_thread_sha() local
754 backlog = crypto_get_backlog(&sdcp->queue[chan]); in dcp_chan_thread_sha()
758 if (!backlog && !arq) { in dcp_chan_thread_sha()
765 if (backlog) in dcp_chan_thread_sha()
766 crypto_request_complete(backlog, -EINPROGRESS); in dcp_chan_thread_sha()
/drivers/infiniband/hw/irdma/
A Dcm.h255 int backlog; member
337 int backlog; member
397 int irdma_create_listen(struct iw_cm_id *cm_id, int backlog);
/drivers/crypto/cavium/nitrox/
A Dnitrox_reqmgr.c232 INIT_LIST_HEAD(&sr->backlog); in backlog_list_add()
235 list_add_tail(&sr->backlog, &cmdq->backlog_head); in backlog_list_add()
329 list_for_each_entry_safe(sr, tmp, &cmdq->backlog_head, backlog) { in post_backlog_cmds()
336 list_del(&sr->backlog); in post_backlog_cmds()
/drivers/crypto/ccree/
A Dcc_request_mgr.c34 struct list_head backlog; member
129 INIT_LIST_HEAD(&req_mgr_h->backlog); in cc_req_mgr_init()
337 list_add_tail(&bli->list, &mgr->backlog); in cc_enqueue_backlog()
356 bli = list_first_entry(&mgr->backlog, struct cc_bl_item, list); in cc_proc_backlog()
/drivers/crypto/hisilicon/sec2/
A Dsec.h115 struct sec_instance_backlog *backlog; member
175 struct sec_instance_backlog backlog; member
/drivers/infiniband/hw/qedr/
A Dqedr_iw_cm.h37 int qedr_iw_create_listen(struct iw_cm_id *cm_id, int backlog);
/drivers/infiniband/core/
A Ducma.c91 atomic_t backlog; member
304 if (!atomic_add_unless(&listen_ctx->backlog, -1, 0)) in ucma_connect_event_handler()
329 atomic_inc(&listen_ctx->backlog); in ucma_connect_event_handler()
415 atomic_inc(&uevent->ctx->backlog); in ucma_get_event()
1101 if (cmd.backlog <= 0 || cmd.backlog > max_backlog) in ucma_listen()
1102 cmd.backlog = max_backlog; in ucma_listen()
1103 atomic_set(&ctx->backlog, cmd.backlog); in ucma_listen()
1106 ret = rdma_listen(ctx->cm_id, cmd.backlog); in ucma_listen()
A Diwcm.c564 int iw_cm_listen(struct iw_cm_id *cm_id, int backlog) in iw_cm_listen() argument
572 if (!backlog) in iw_cm_listen()
573 backlog = default_backlog; in iw_cm_listen()
575 ret = alloc_work_entries(cm_id_priv, backlog); in iw_cm_listen()
587 backlog); in iw_cm_listen()
/drivers/xen/
A Dpvcalls-front.h12 int pvcalls_front_listen(struct socket *sock, int backlog);
/drivers/net/ethernet/mellanox/mlx5/core/fpga/
A Dconn.h75 struct list_head backlog; member
/drivers/crypto/inside-secure/
A Dsafexcel.c822 struct crypto_async_request *req, *backlog; in safexcel_dequeue() local
830 backlog = priv->ring[ring].backlog; in safexcel_dequeue()
836 backlog = crypto_get_backlog(&priv->ring[ring].queue); in safexcel_dequeue()
842 priv->ring[ring].backlog = NULL; in safexcel_dequeue()
852 if (backlog) in safexcel_dequeue()
853 crypto_request_complete(backlog, -EINPROGRESS); in safexcel_dequeue()
872 priv->ring[ring].backlog = backlog; in safexcel_dequeue()

Completed in 50 milliseconds

123