Home
last modified time | relevance | path

Searched refs:async_req (Results 1 – 25 of 27) sorted by relevance

12

/drivers/crypto/qce/
A Dcore.c71 static int qce_handle_request(struct crypto_async_request *async_req) in qce_handle_request() argument
75 u32 type = crypto_tfm_alg_type(async_req->tfm); in qce_handle_request()
81 ret = ops->async_req_handle(async_req); in qce_handle_request()
91 struct crypto_async_request *async_req, *backlog; in qce_handle_queue() local
103 async_req = crypto_dequeue_request(&qce->queue); in qce_handle_queue()
104 if (async_req) in qce_handle_queue()
105 qce->req = async_req; in qce_handle_queue()
108 if (!async_req) in qce_handle_queue()
116 err = qce_handle_request(async_req); in qce_handle_queue()
A Dcommon.c149 struct ahash_request *req = ahash_request_cast(async_req); in qce_setup_regs_ahash()
150 struct crypto_ahash *ahash = __crypto_ahash_cast(async_req->tfm); in qce_setup_regs_ahash()
152 struct qce_alg_template *tmpl = to_ahash_tmpl(async_req->tfm); in qce_setup_regs_ahash()
316 struct skcipher_request *req = skcipher_request_cast(async_req); in qce_setup_regs_skcipher()
318 struct qce_cipher_ctx *ctx = crypto_tfm_ctx(async_req->tfm); in qce_setup_regs_skcipher()
422 struct aead_request *req = aead_request_cast(async_req); in qce_setup_regs_aead()
424 struct qce_aead_ctx *ctx = crypto_tfm_ctx(async_req->tfm); in qce_setup_regs_aead()
544 int qce_start(struct crypto_async_request *async_req, u32 type) in qce_start() argument
549 return qce_setup_regs_skcipher(async_req); in qce_start()
553 return qce_setup_regs_ahash(async_req); in qce_start()
[all …]
A Dsha.c38 struct crypto_async_request *async_req = data; in qce_ahash_done() local
39 struct ahash_request *req = ahash_request_cast(async_req); in qce_ahash_done()
42 struct qce_alg_template *tmpl = to_ahash_tmpl(async_req->tfm); in qce_ahash_done()
75 static int qce_ahash_async_req_handle(struct crypto_async_request *async_req) in qce_ahash_async_req_handle() argument
77 struct ahash_request *req = ahash_request_cast(async_req); in qce_ahash_async_req_handle()
79 struct qce_sha_ctx *ctx = crypto_tfm_ctx(async_req->tfm); in qce_ahash_async_req_handle()
80 struct qce_alg_template *tmpl = to_ahash_tmpl(async_req->tfm); in qce_ahash_async_req_handle()
112 &rctx->result_sg, 1, qce_ahash_done, async_req); in qce_ahash_async_req_handle()
118 ret = qce_start(async_req, tmpl->crypto_alg_type); in qce_ahash_async_req_handle()
A Dskcipher.c29 struct crypto_async_request *async_req = data; in qce_skcipher_done() local
30 struct skcipher_request *req = skcipher_request_cast(async_req); in qce_skcipher_done()
64 qce_skcipher_async_req_handle(struct crypto_async_request *async_req) in qce_skcipher_async_req_handle() argument
66 struct skcipher_request *req = skcipher_request_cast(async_req); in qce_skcipher_async_req_handle()
146 qce_skcipher_done, async_req); in qce_skcipher_async_req_handle()
152 ret = qce_start(async_req, tmpl->crypto_alg_type); in qce_skcipher_async_req_handle()
A Daead.c25 struct crypto_async_request *async_req = data; in qce_aead_done() local
26 struct aead_request *req = aead_request_cast(async_req); in qce_aead_done()
28 struct qce_aead_ctx *ctx = crypto_tfm_ctx(async_req->tfm); in qce_aead_done()
408 qce_aead_async_req_handle(struct crypto_async_request *async_req) in qce_aead_async_req_handle() argument
410 struct aead_request *req = aead_request_cast(async_req); in qce_aead_async_req_handle()
413 struct qce_aead_ctx *ctx = crypto_tfm_ctx(async_req->tfm); in qce_aead_async_req_handle()
472 qce_aead_done, async_req); in qce_aead_async_req_handle()
478 ret = qce_start(async_req, tmpl->crypto_alg_type); in qce_aead_async_req_handle()
A Dcore.h61 int (*async_req_handle)(struct crypto_async_request *async_req);
A Dcommon.h102 int qce_start(struct crypto_async_request *async_req, u32 type);
/drivers/crypto/ccp/
A Dccp-crypto-aes.c23 static int ccp_aes_complete(struct crypto_async_request *async_req, int ret) in ccp_aes_complete() argument
25 struct skcipher_request *req = skcipher_request_cast(async_req); in ccp_aes_complete()
133 static int ccp_aes_rfc3686_complete(struct crypto_async_request *async_req, in ccp_aes_rfc3686_complete() argument
136 struct skcipher_request *req = skcipher_request_cast(async_req); in ccp_aes_rfc3686_complete()
142 return ccp_aes_complete(async_req, ret); in ccp_aes_rfc3686_complete()
A Dccp-crypto-des3.c22 static int ccp_des3_complete(struct crypto_async_request *async_req, int ret) in ccp_des3_complete() argument
24 struct skcipher_request *req = skcipher_request_cast(async_req); in ccp_des3_complete()
A Dccp-crypto-aes-xts.c62 static int ccp_aes_xts_complete(struct crypto_async_request *async_req, int ret) in ccp_aes_xts_complete() argument
64 struct skcipher_request *req = skcipher_request_cast(async_req); in ccp_aes_xts_complete()
A Dccp-crypto-rsa.c44 static int ccp_rsa_complete(struct crypto_async_request *async_req, int ret) in ccp_rsa_complete() argument
46 struct akcipher_request *req = akcipher_request_cast(async_req); in ccp_rsa_complete()
A Dccp-crypto-aes-cmac.c23 static int ccp_aes_cmac_complete(struct crypto_async_request *async_req, in ccp_aes_cmac_complete() argument
26 struct ahash_request *req = ahash_request_cast(async_req); in ccp_aes_cmac_complete()
A Dccp-crypto-sha.c27 static int ccp_sha_complete(struct crypto_async_request *async_req, int ret) in ccp_sha_complete() argument
29 struct ahash_request *req = ahash_request_cast(async_req); in ccp_sha_complete()
A Dccp-crypto-aes-galois.c24 static int ccp_aes_gcm_complete(struct crypto_async_request *async_req, int ret) in ccp_aes_gcm_complete() argument
/drivers/crypto/allwinner/sun8i-ce/
A Dsun8i-ce-cipher.c134 static int sun8i_ce_cipher_prepare(struct crypto_engine *engine, void *async_req) in sun8i_ce_cipher_prepare() argument
136 struct skcipher_request *areq = container_of(async_req, struct skcipher_request, base); in sun8i_ce_cipher_prepare()
319 void *async_req) in sun8i_ce_cipher_unprepare() argument
321 struct skcipher_request *areq = container_of(async_req, struct skcipher_request, base); in sun8i_ce_cipher_unprepare()
/drivers/net/usb/
A Drtl8150.c149 struct async_req { struct
177 struct async_req *req = (struct async_req *)urb->context; in async_set_reg_cb()
190 struct async_req *req; in async_set_registers()
192 req = kmalloc(sizeof(struct async_req), GFP_ATOMIC); in async_set_registers()
/drivers/crypto/
A Ds5p-sss.c1352 struct crypto_async_request *async_req, *backlog; in s5p_hash_handle_queue() local
1368 async_req = crypto_dequeue_request(&dd->hash_queue); in s5p_hash_handle_queue()
1369 if (async_req) in s5p_hash_handle_queue()
1374 if (!async_req) in s5p_hash_handle_queue()
1380 req = ahash_request_cast(async_req); in s5p_hash_handle_queue()
1953 struct crypto_async_request *async_req, *backlog; in s5p_tasklet_cb() local
1959 async_req = crypto_dequeue_request(&dev->queue); in s5p_tasklet_cb()
1961 if (!async_req) { in s5p_tasklet_cb()
1971 dev->req = skcipher_request_cast(async_req); in s5p_tasklet_cb()
A Dimg-hash.c504 struct crypto_async_request *async_req, *backlog; in img_hash_handle_queue() local
520 async_req = crypto_dequeue_request(&hdev->queue); in img_hash_handle_queue()
521 if (async_req) in img_hash_handle_queue()
526 if (!async_req) in img_hash_handle_queue()
532 req = ahash_request_cast(async_req); in img_hash_handle_queue()
A Datmel-tdes.c558 struct crypto_async_request *async_req, *backlog; in atmel_tdes_handle_queue() local
572 async_req = crypto_dequeue_request(&dd->queue); in atmel_tdes_handle_queue()
573 if (async_req) in atmel_tdes_handle_queue()
577 if (!async_req) in atmel_tdes_handle_queue()
583 req = skcipher_request_cast(async_req); in atmel_tdes_handle_queue()
A Dsahara.c988 struct crypto_async_request *async_req = areq; in sahara_do_one_request() local
991 if (crypto_tfm_alg_type(async_req->tfm) == CRYPTO_ALG_TYPE_AHASH) { in sahara_do_one_request()
992 struct ahash_request *req = ahash_request_cast(async_req); in sahara_do_one_request()
999 struct skcipher_request *req = skcipher_request_cast(async_req); in sahara_do_one_request()
1001 err = sahara_aes_process(skcipher_request_cast(async_req)); in sahara_do_one_request()
A Dhifn_795x.c1898 struct crypto_async_request *async_req; in hifn_flush() local
1914 while ((async_req = crypto_dequeue_request(&dev->queue))) { in hifn_flush()
1915 req = skcipher_request_cast(async_req); in hifn_flush()
2025 struct crypto_async_request *async_req, *backlog; in hifn_process_queue() local
2033 async_req = crypto_dequeue_request(&dev->queue); in hifn_process_queue()
2036 if (!async_req) in hifn_process_queue()
2042 req = skcipher_request_cast(async_req); in hifn_process_queue()
/drivers/nvme/host/
A Dtcp.c198 struct nvme_tcp_request async_req; member
294 return req == &req->queue->ctrl->async_req; in nvme_tcp_async_req()
2017 if (to_tcp_ctrl(ctrl)->async_req.pdu) { in nvme_tcp_free_admin_queue()
2020 to_tcp_ctrl(ctrl)->async_req.pdu = NULL; in nvme_tcp_free_admin_queue()
2614 ctrl->async_req.state = NVME_TCP_SEND_CMD_PDU; in nvme_tcp_submit_async_event()
2615 ctrl->async_req.offset = 0; in nvme_tcp_submit_async_event()
2616 ctrl->async_req.curr_bio = NULL; in nvme_tcp_submit_async_event()
2617 ctrl->async_req.data_len = 0; in nvme_tcp_submit_async_event()
2618 init_llist_node(&ctrl->async_req.lentry); in nvme_tcp_submit_async_event()
2619 INIT_LIST_HEAD(&ctrl->async_req.entry); in nvme_tcp_submit_async_event()
[all …]
/drivers/crypto/amcc/
A Dcrypto4xx_core.h70 struct crypto_async_request *async_req; /* base crypto request member
A Dcrypto4xx_core.c514 req = skcipher_request_cast(pd_uinfo->async_req); in crypto4xx_cipher_done()
544 struct aead_request *aead_req = container_of(pd_uinfo->async_req, in crypto4xx_aead_done()
609 switch (crypto_tfm_alg_type(pd_uinfo->async_req->tfm)) { in crypto4xx_pd_done()
779 pd_uinfo->async_req = req; in crypto4xx_build_pd()
/drivers/crypto/rockchip/
A Drk3288_crypto_skcipher.c299 static int rk_cipher_run(struct crypto_engine *engine, void *async_req) in rk_cipher_run() argument
301 struct skcipher_request *areq = container_of(async_req, struct skcipher_request, base); in rk_cipher_run()

Completed in 696 milliseconds

12