Home
last modified time | relevance | path

Searched refs:out_sg (Results 1 – 23 of 23) sorted by relevance

/drivers/crypto/nx/
A Dnx-aes-xcbc.c62 struct nx_sg *in_sg, *out_sg; in nx_xcbc_empty() local
86 out_sg = nx_build_sg_list(nx_ctx->out_sg, (u8 *) keys, &len, in nx_xcbc_empty()
93 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); in nx_xcbc_empty()
114 out_sg = nx_build_sg_list(nx_ctx->out_sg, out, &len, in nx_xcbc_empty()
121 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); in nx_xcbc_empty()
172 struct nx_sg *out_sg; in nx_xcbc_update() local
194 out_sg = nx_build_sg_list(nx_ctx->out_sg, (u8 *)sctx->state, in nx_xcbc_update()
202 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); in nx_xcbc_update()
250 struct nx_sg *in_sg, *out_sg; in nx_xcbc_finup() local
284 out_sg = nx_build_sg_list(nx_ctx->out_sg, out, &len, in nx_xcbc_finup()
[all …]
A Dnx-sha256.c69 struct nx_sg *out_sg; in nx_sha256_update() local
87 out_sg = nx_build_sg_list(nx_ctx->out_sg, (u8 *)sctx->state, in nx_sha256_update()
89 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); in nx_sha256_update()
147 struct nx_sg *in_sg, *out_sg; in nx_sha256_finup() local
179 out_sg = nx_build_sg_list(nx_ctx->out_sg, out, &len, max_sg_len); in nx_sha256_finup()
187 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); in nx_sha256_finup()
A Dnx-sha512.c70 struct nx_sg *out_sg; in nx_sha512_update() local
88 out_sg = nx_build_sg_list(nx_ctx->out_sg, (u8 *)sctx->state, in nx_sha512_update()
90 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); in nx_sha512_update()
150 struct nx_sg *in_sg, *out_sg; in nx_sha512_finup() local
187 out_sg = nx_build_sg_list(nx_ctx->out_sg, out, &len, in nx_sha512_finup()
191 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); in nx_sha512_finup()
A Dnx.c246 struct nx_sg *nx_outsg = nx_ctx->out_sg; in nx_build_sg_lists()
271 nx_ctx->op.outlen = trim_sg_list(nx_ctx->out_sg, nx_outsg, delta, nbytes); in nx_build_sg_lists()
291 nx_ctx->op.out = __pa(nx_ctx->out_sg); in nx_ctx_init()
299 nx_ctx->op_aead.out = __pa(nx_ctx->out_sg); in nx_ctx_init()
654 nx_ctx->out_sg = (struct nx_sg *)((u8 *)nx_ctx->in_sg + NX_PAGE_SIZE); in nx_crypto_ctx_init()
658 (struct nx_csbcpb *)((u8 *)nx_ctx->out_sg + in nx_crypto_ctx_init()
730 nx_ctx->out_sg = NULL; in nx_crypto_ctx_exit()
A Dnx-aes-gcm.c246 struct nx_sg *in_sg, *out_sg; in gcm_empty() local
273 out_sg = nx_build_sg_list(nx_ctx->out_sg, (u8 *) out, &len, in gcm_empty()
280 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); in gcm_empty()
A Dnx.h131 struct nx_sg *out_sg; /* aligned pointer into kmem to an sg list */ member
A Dnx-aes-ccm.c162 struct nx_sg *nx_outsg = nx_ctx->out_sg; in generate_pat()
249 nx_ctx->op.outlen = (nx_ctx->out_sg - nx_outsg) * in generate_pat()
/drivers/crypto/
A Domap-des.c145 struct scatterlist *out_sg; member
477 dma_unmap_sg(dd->dev, dd->out_sg, dd->out_sg_len, in omap_des_crypt_dma_start()
530 dd->out_sg = req->dst; in omap_des_prepare_req()
594 dma_unmap_sg(dd->dev, dd->out_sg, dd->out_sg_len, in omap_des_done_task()
863 BUG_ON(!dd->out_sg); in omap_des_irq()
865 BUG_ON(dd->out_sg_offset > dd->out_sg->length); in omap_des_irq()
867 dst = sg_virt(dd->out_sg) + dd->out_sg_offset; in omap_des_irq()
872 if (dd->out_sg_offset == dd->out_sg->length) { in omap_des_irq()
873 dd->out_sg = sg_next(dd->out_sg); in omap_des_irq()
874 if (dd->out_sg) { in omap_des_irq()
[all …]
A Domap-aes-gcm.c37 dd->out_sg = NULL; in omap_aes_gcm_finish_req()
56 dma_sync_sg_for_device(dd->dev, dd->out_sg, dd->out_sg_len, in omap_aes_gcm_done_task()
59 dma_unmap_sg(dd->dev, dd->out_sg, dd->out_sg_len, DMA_FROM_DEVICE); in omap_aes_gcm_done_task()
62 omap_crypto_cleanup(dd->out_sg, dd->orig_out, in omap_aes_gcm_done_task()
152 dd->out_sg = req->dst; in omap_aes_gcm_copy_buffers()
155 dd->out_sg = scatterwalk_ffwd(sg_arr, req->dst, req->assoclen); in omap_aes_gcm_copy_buffers()
158 if (req->src == req->dst || dd->out_sg == sg_arr) in omap_aes_gcm_copy_buffers()
162 ret = omap_crypto_align_sg(&dd->out_sg, cryptlen, in omap_aes_gcm_copy_buffers()
171 dd->out_sg_len = sg_nents_for_len(dd->out_sg, clen); in omap_aes_gcm_copy_buffers()
A Domap-aes.c266 struct scatterlist *out_sg, in omap_aes_crypt_dma() argument
388 dma_unmap_sg(dd->dev, dd->out_sg, dd->out_sg_len, in omap_aes_crypt_dma_start()
439 dd->out_sg = req->dst; in omap_aes_prepare_req()
507 dma_unmap_sg(dd->dev, dd->out_sg, dd->out_sg_len, in omap_aes_done_task()
900 BUG_ON(!dd->out_sg); in omap_aes_irq()
902 BUG_ON(dd->out_sg_offset > dd->out_sg->length); in omap_aes_irq()
904 dst = sg_virt(dd->out_sg) + dd->out_sg_offset; in omap_aes_irq()
909 if (dd->out_sg_offset == dd->out_sg->length) { in omap_aes_irq()
910 dd->out_sg = sg_next(dd->out_sg); in omap_aes_irq()
911 if (dd->out_sg) { in omap_aes_irq()
[all …]
A Datmel-tdes.c108 struct scatterlist *out_sg; member
294 dma_unmap_sg(dd->dev, dd->out_sg, 1, DMA_FROM_DEVICE); in atmel_tdes_crypt_pdc_stop()
463 IS_ALIGNED(dd->out_sg->length, dd->ctx->block_size); in atmel_tdes_crypt_start()
466 if (sg_dma_len(dd->in_sg) != sg_dma_len(dd->out_sg)) in atmel_tdes_crypt_start()
473 count = min_t(size_t, count, sg_dma_len(dd->out_sg)); in atmel_tdes_crypt_start()
481 err = dma_map_sg(dd->dev, dd->out_sg, 1, in atmel_tdes_crypt_start()
491 addr_out = sg_dma_address(dd->out_sg); in atmel_tdes_crypt_start()
515 dma_unmap_sg(dd->dev, dd->out_sg, 1, DMA_TO_DEVICE); in atmel_tdes_crypt_start()
591 dd->out_sg = req->dst; in atmel_tdes_handle_queue()
868 dd->out_sg = sg_next(dd->out_sg); in atmel_tdes_done_task()
[all …]
A Dsahara.c214 struct scatterlist *out_sg; member
463 dev->nb_out_sg = sg_nents_for_len(dev->out_sg, dev->total); in sahara_hw_descriptor_create()
481 ret = dma_map_sg(dev->device, dev->out_sg, dev->nb_out_sg, in sahara_hw_descriptor_create()
506 sg = dev->out_sg; in sahara_hw_descriptor_create()
572 dev->out_sg = req->dst; in sahara_aes_process()
603 dma_unmap_sg(dev->device, dev->out_sg, dev->nb_out_sg, in sahara_aes_process()
A Domap-aes.h180 struct scatterlist *out_sg; member
/drivers/crypto/stm32/
A Dstm32-cryp.c215 struct scatterlist *out_sg; member
937 kfree(cryp->out_sg); in stm32_cryp_dma_callback()
942 kfree(cryp->out_sg); in stm32_cryp_dma_callback()
1567 ret = stm32_cryp_truncate_sg(&cryp->out_sg, &cryp->out_sg_len, out_sg, 0, in stm32_cryp_cipher_prepare()
1577 cryp->out_sg = out_sg; in stm32_cryp_cipher_prepare()
1602 cryp->out_sg = NULL; in stm32_cryp_aead_prepare()
1641 ret = stm32_cryp_truncate_sg(&cryp->out_sg, &cryp->out_sg_len, out_sg, skip, align_size); in stm32_cryp_aead_prepare()
1654 kfree(cryp->out_sg); in stm32_cryp_aead_prepare()
1655 cryp->out_sg = NULL; in stm32_cryp_aead_prepare()
1698 out_sg = req->dst; in stm32_cryp_prepare_req()
[all …]
/drivers/crypto/gemini/
A Dsl3516-ce-cipher.c31 struct scatterlist *out_sg; in sl3516_ce_need_fallback() local
88 out_sg = areq->dst; in sl3516_ce_need_fallback()
89 while (in_sg && out_sg) { in sl3516_ce_need_fallback()
90 if (in_sg->length != out_sg->length) { in sl3516_ce_need_fallback()
95 out_sg = sg_next(out_sg); in sl3516_ce_need_fallback()
97 if (in_sg || out_sg) in sl3516_ce_need_fallback()
/drivers/crypto/allwinner/sun8i-ss/
A Dsun8i-ss-cipher.c29 struct scatterlist *out_sg = areq->dst; in sun8i_ss_need_fallback() local
77 out_sg = areq->dst; in sun8i_ss_need_fallback()
78 while (in_sg && out_sg) { in sun8i_ss_need_fallback()
79 if (in_sg->length != out_sg->length) in sun8i_ss_need_fallback()
82 out_sg = sg_next(out_sg); in sun8i_ss_need_fallback()
84 if (in_sg || out_sg) in sun8i_ss_need_fallback()
/drivers/crypto/allwinner/sun4i-ss/
A Dsun4i-ss-cipher.c183 struct scatterlist *out_sg = areq->dst; in sun4i_ss_cipher_poll() local
230 while (out_sg && no_chunk == 1) { in sun4i_ss_cipher_poll()
231 if ((out_sg->length | out_sg->offset) & 3u) in sun4i_ss_cipher_poll()
233 out_sg = sg_next(out_sg); in sun4i_ss_cipher_poll()
/drivers/crypto/starfive/
A Djh7110-rsa.c278 sg_copy_buffer(rctx->out_sg, sg_nents(rctx->out_sg), in starfive_rsa_enc_core()
310 rctx->out_sg = req->dst; in starfive_rsa_enc()
341 rctx->out_sg = req->dst; in starfive_rsa_dec()
A Djh7110-aes.c313 scatterwalk_map_and_copy(cryp->tag_out, rctx->out_sg, in starfive_aes_read_authtag()
585 rctx->out_sg = req->dst; in starfive_aes_do_one_req()
598 ret = starfive_aes_map_sg(cryp, rctx->in_sg, rctx->out_sg); in starfive_aes_do_one_req()
654 rctx->out_sg = rctx->in_sg; in starfive_aes_aead_do_one_req()
656 rctx->out_sg = scatterwalk_ffwd(_dst, req->dst, cryp->assoclen); in starfive_aes_aead_do_one_req()
710 ret = starfive_aes_map_sg(cryp, rctx->in_sg, rctx->out_sg); in starfive_aes_aead_do_one_req()
A Djh7110-cryp.h218 struct scatterlist *out_sg; member
/drivers/crypto/aspeed/
A Daspeed-hace-crypto.c155 struct scatterlist *out_sg; in aspeed_sk_transfer() local
161 out_sg = req->dst; in aspeed_sk_transfer()
164 nbytes = sg_copy_from_buffer(out_sg, rctx->dst_nents, in aspeed_sk_transfer()
A Daspeed-acry.c303 struct scatterlist *out_sg = req->dst; in aspeed_acry_rsa_transfer() local
334 scatterwalk_map_and_copy(dram_buffer, out_sg, 0, result_nbytes, in aspeed_acry_rsa_transfer()
/drivers/virtio/
A Dvirtio_rtc_driver.c416 struct scatterlist out_sg[1]; in viortc_msg_xfer() local
424 sgs[0] = out_sg; in viortc_msg_xfer()
427 sg_init_one(out_sg, msg->req, msg->req_size); in viortc_msg_xfer()

Completed in 58 milliseconds