| /drivers/crypto/nx/ |
| A D | nx-aes-xcbc.c | 62 struct nx_sg *in_sg, *out_sg; in nx_xcbc_empty() local 86 out_sg = nx_build_sg_list(nx_ctx->out_sg, (u8 *) keys, &len, in nx_xcbc_empty() 93 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); in nx_xcbc_empty() 114 out_sg = nx_build_sg_list(nx_ctx->out_sg, out, &len, in nx_xcbc_empty() 121 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); in nx_xcbc_empty() 172 struct nx_sg *out_sg; in nx_xcbc_update() local 194 out_sg = nx_build_sg_list(nx_ctx->out_sg, (u8 *)sctx->state, in nx_xcbc_update() 202 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); in nx_xcbc_update() 250 struct nx_sg *in_sg, *out_sg; in nx_xcbc_finup() local 284 out_sg = nx_build_sg_list(nx_ctx->out_sg, out, &len, in nx_xcbc_finup() [all …]
|
| A D | nx-sha256.c | 69 struct nx_sg *out_sg; in nx_sha256_update() local 87 out_sg = nx_build_sg_list(nx_ctx->out_sg, (u8 *)sctx->state, in nx_sha256_update() 89 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); in nx_sha256_update() 147 struct nx_sg *in_sg, *out_sg; in nx_sha256_finup() local 179 out_sg = nx_build_sg_list(nx_ctx->out_sg, out, &len, max_sg_len); in nx_sha256_finup() 187 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); in nx_sha256_finup()
|
| A D | nx-sha512.c | 70 struct nx_sg *out_sg; in nx_sha512_update() local 88 out_sg = nx_build_sg_list(nx_ctx->out_sg, (u8 *)sctx->state, in nx_sha512_update() 90 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); in nx_sha512_update() 150 struct nx_sg *in_sg, *out_sg; in nx_sha512_finup() local 187 out_sg = nx_build_sg_list(nx_ctx->out_sg, out, &len, in nx_sha512_finup() 191 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); in nx_sha512_finup()
|
| A D | nx.c | 246 struct nx_sg *nx_outsg = nx_ctx->out_sg; in nx_build_sg_lists() 271 nx_ctx->op.outlen = trim_sg_list(nx_ctx->out_sg, nx_outsg, delta, nbytes); in nx_build_sg_lists() 291 nx_ctx->op.out = __pa(nx_ctx->out_sg); in nx_ctx_init() 299 nx_ctx->op_aead.out = __pa(nx_ctx->out_sg); in nx_ctx_init() 654 nx_ctx->out_sg = (struct nx_sg *)((u8 *)nx_ctx->in_sg + NX_PAGE_SIZE); in nx_crypto_ctx_init() 658 (struct nx_csbcpb *)((u8 *)nx_ctx->out_sg + in nx_crypto_ctx_init() 730 nx_ctx->out_sg = NULL; in nx_crypto_ctx_exit()
|
| A D | nx-aes-gcm.c | 246 struct nx_sg *in_sg, *out_sg; in gcm_empty() local 273 out_sg = nx_build_sg_list(nx_ctx->out_sg, (u8 *) out, &len, in gcm_empty() 280 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); in gcm_empty()
|
| A D | nx.h | 131 struct nx_sg *out_sg; /* aligned pointer into kmem to an sg list */ member
|
| A D | nx-aes-ccm.c | 162 struct nx_sg *nx_outsg = nx_ctx->out_sg; in generate_pat() 249 nx_ctx->op.outlen = (nx_ctx->out_sg - nx_outsg) * in generate_pat()
|
| /drivers/crypto/ |
| A D | omap-des.c | 145 struct scatterlist *out_sg; member 477 dma_unmap_sg(dd->dev, dd->out_sg, dd->out_sg_len, in omap_des_crypt_dma_start() 530 dd->out_sg = req->dst; in omap_des_prepare_req() 594 dma_unmap_sg(dd->dev, dd->out_sg, dd->out_sg_len, in omap_des_done_task() 863 BUG_ON(!dd->out_sg); in omap_des_irq() 865 BUG_ON(dd->out_sg_offset > dd->out_sg->length); in omap_des_irq() 867 dst = sg_virt(dd->out_sg) + dd->out_sg_offset; in omap_des_irq() 872 if (dd->out_sg_offset == dd->out_sg->length) { in omap_des_irq() 873 dd->out_sg = sg_next(dd->out_sg); in omap_des_irq() 874 if (dd->out_sg) { in omap_des_irq() [all …]
|
| A D | omap-aes-gcm.c | 37 dd->out_sg = NULL; in omap_aes_gcm_finish_req() 56 dma_sync_sg_for_device(dd->dev, dd->out_sg, dd->out_sg_len, in omap_aes_gcm_done_task() 59 dma_unmap_sg(dd->dev, dd->out_sg, dd->out_sg_len, DMA_FROM_DEVICE); in omap_aes_gcm_done_task() 62 omap_crypto_cleanup(dd->out_sg, dd->orig_out, in omap_aes_gcm_done_task() 152 dd->out_sg = req->dst; in omap_aes_gcm_copy_buffers() 155 dd->out_sg = scatterwalk_ffwd(sg_arr, req->dst, req->assoclen); in omap_aes_gcm_copy_buffers() 158 if (req->src == req->dst || dd->out_sg == sg_arr) in omap_aes_gcm_copy_buffers() 162 ret = omap_crypto_align_sg(&dd->out_sg, cryptlen, in omap_aes_gcm_copy_buffers() 171 dd->out_sg_len = sg_nents_for_len(dd->out_sg, clen); in omap_aes_gcm_copy_buffers()
|
| A D | omap-aes.c | 266 struct scatterlist *out_sg, in omap_aes_crypt_dma() argument 388 dma_unmap_sg(dd->dev, dd->out_sg, dd->out_sg_len, in omap_aes_crypt_dma_start() 439 dd->out_sg = req->dst; in omap_aes_prepare_req() 507 dma_unmap_sg(dd->dev, dd->out_sg, dd->out_sg_len, in omap_aes_done_task() 900 BUG_ON(!dd->out_sg); in omap_aes_irq() 902 BUG_ON(dd->out_sg_offset > dd->out_sg->length); in omap_aes_irq() 904 dst = sg_virt(dd->out_sg) + dd->out_sg_offset; in omap_aes_irq() 909 if (dd->out_sg_offset == dd->out_sg->length) { in omap_aes_irq() 910 dd->out_sg = sg_next(dd->out_sg); in omap_aes_irq() 911 if (dd->out_sg) { in omap_aes_irq() [all …]
|
| A D | atmel-tdes.c | 108 struct scatterlist *out_sg; member 294 dma_unmap_sg(dd->dev, dd->out_sg, 1, DMA_FROM_DEVICE); in atmel_tdes_crypt_pdc_stop() 463 IS_ALIGNED(dd->out_sg->length, dd->ctx->block_size); in atmel_tdes_crypt_start() 466 if (sg_dma_len(dd->in_sg) != sg_dma_len(dd->out_sg)) in atmel_tdes_crypt_start() 473 count = min_t(size_t, count, sg_dma_len(dd->out_sg)); in atmel_tdes_crypt_start() 481 err = dma_map_sg(dd->dev, dd->out_sg, 1, in atmel_tdes_crypt_start() 491 addr_out = sg_dma_address(dd->out_sg); in atmel_tdes_crypt_start() 515 dma_unmap_sg(dd->dev, dd->out_sg, 1, DMA_TO_DEVICE); in atmel_tdes_crypt_start() 591 dd->out_sg = req->dst; in atmel_tdes_handle_queue() 868 dd->out_sg = sg_next(dd->out_sg); in atmel_tdes_done_task() [all …]
|
| A D | sahara.c | 214 struct scatterlist *out_sg; member 463 dev->nb_out_sg = sg_nents_for_len(dev->out_sg, dev->total); in sahara_hw_descriptor_create() 481 ret = dma_map_sg(dev->device, dev->out_sg, dev->nb_out_sg, in sahara_hw_descriptor_create() 506 sg = dev->out_sg; in sahara_hw_descriptor_create() 572 dev->out_sg = req->dst; in sahara_aes_process() 603 dma_unmap_sg(dev->device, dev->out_sg, dev->nb_out_sg, in sahara_aes_process()
|
| A D | omap-aes.h | 180 struct scatterlist *out_sg; member
|
| /drivers/crypto/stm32/ |
| A D | stm32-cryp.c | 215 struct scatterlist *out_sg; member 937 kfree(cryp->out_sg); in stm32_cryp_dma_callback() 942 kfree(cryp->out_sg); in stm32_cryp_dma_callback() 1567 ret = stm32_cryp_truncate_sg(&cryp->out_sg, &cryp->out_sg_len, out_sg, 0, in stm32_cryp_cipher_prepare() 1577 cryp->out_sg = out_sg; in stm32_cryp_cipher_prepare() 1602 cryp->out_sg = NULL; in stm32_cryp_aead_prepare() 1641 ret = stm32_cryp_truncate_sg(&cryp->out_sg, &cryp->out_sg_len, out_sg, skip, align_size); in stm32_cryp_aead_prepare() 1654 kfree(cryp->out_sg); in stm32_cryp_aead_prepare() 1655 cryp->out_sg = NULL; in stm32_cryp_aead_prepare() 1698 out_sg = req->dst; in stm32_cryp_prepare_req() [all …]
|
| /drivers/crypto/gemini/ |
| A D | sl3516-ce-cipher.c | 31 struct scatterlist *out_sg; in sl3516_ce_need_fallback() local 88 out_sg = areq->dst; in sl3516_ce_need_fallback() 89 while (in_sg && out_sg) { in sl3516_ce_need_fallback() 90 if (in_sg->length != out_sg->length) { in sl3516_ce_need_fallback() 95 out_sg = sg_next(out_sg); in sl3516_ce_need_fallback() 97 if (in_sg || out_sg) in sl3516_ce_need_fallback()
|
| /drivers/crypto/allwinner/sun8i-ss/ |
| A D | sun8i-ss-cipher.c | 29 struct scatterlist *out_sg = areq->dst; in sun8i_ss_need_fallback() local 77 out_sg = areq->dst; in sun8i_ss_need_fallback() 78 while (in_sg && out_sg) { in sun8i_ss_need_fallback() 79 if (in_sg->length != out_sg->length) in sun8i_ss_need_fallback() 82 out_sg = sg_next(out_sg); in sun8i_ss_need_fallback() 84 if (in_sg || out_sg) in sun8i_ss_need_fallback()
|
| /drivers/crypto/allwinner/sun4i-ss/ |
| A D | sun4i-ss-cipher.c | 183 struct scatterlist *out_sg = areq->dst; in sun4i_ss_cipher_poll() local 230 while (out_sg && no_chunk == 1) { in sun4i_ss_cipher_poll() 231 if ((out_sg->length | out_sg->offset) & 3u) in sun4i_ss_cipher_poll() 233 out_sg = sg_next(out_sg); in sun4i_ss_cipher_poll()
|
| /drivers/crypto/starfive/ |
| A D | jh7110-rsa.c | 278 sg_copy_buffer(rctx->out_sg, sg_nents(rctx->out_sg), in starfive_rsa_enc_core() 310 rctx->out_sg = req->dst; in starfive_rsa_enc() 341 rctx->out_sg = req->dst; in starfive_rsa_dec()
|
| A D | jh7110-aes.c | 313 scatterwalk_map_and_copy(cryp->tag_out, rctx->out_sg, in starfive_aes_read_authtag() 585 rctx->out_sg = req->dst; in starfive_aes_do_one_req() 598 ret = starfive_aes_map_sg(cryp, rctx->in_sg, rctx->out_sg); in starfive_aes_do_one_req() 654 rctx->out_sg = rctx->in_sg; in starfive_aes_aead_do_one_req() 656 rctx->out_sg = scatterwalk_ffwd(_dst, req->dst, cryp->assoclen); in starfive_aes_aead_do_one_req() 710 ret = starfive_aes_map_sg(cryp, rctx->in_sg, rctx->out_sg); in starfive_aes_aead_do_one_req()
|
| A D | jh7110-cryp.h | 218 struct scatterlist *out_sg; member
|
| /drivers/crypto/aspeed/ |
| A D | aspeed-hace-crypto.c | 155 struct scatterlist *out_sg; in aspeed_sk_transfer() local 161 out_sg = req->dst; in aspeed_sk_transfer() 164 nbytes = sg_copy_from_buffer(out_sg, rctx->dst_nents, in aspeed_sk_transfer()
|
| A D | aspeed-acry.c | 303 struct scatterlist *out_sg = req->dst; in aspeed_acry_rsa_transfer() local 334 scatterwalk_map_and_copy(dram_buffer, out_sg, 0, result_nbytes, in aspeed_acry_rsa_transfer()
|
| /drivers/virtio/ |
| A D | virtio_rtc_driver.c | 416 struct scatterlist out_sg[1]; in viortc_msg_xfer() local 424 sgs[0] = out_sg; in viortc_msg_xfer() 427 sg_init_one(out_sg, msg->req, msg->req_size); in viortc_msg_xfer()
|