/linux-6.3-rc2/arch/arm/crypto/ |
A D | aes-ce-glue.c | 273 struct scatterlist sg_src[2], sg_dst[2]; in cts_cbc_encrypt() local 303 dst = scatterwalk_ffwd(sg_dst, req->dst, in cts_cbc_encrypt() 331 struct scatterlist sg_src[2], sg_dst[2]; in cts_cbc_decrypt() local 361 dst = scatterwalk_ffwd(sg_dst, req->dst, in cts_cbc_decrypt() 450 struct scatterlist sg_src[2], sg_dst[2]; in xts_encrypt() local 498 dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen); in xts_encrypt() 522 struct scatterlist sg_src[2], sg_dst[2]; in xts_decrypt() local 570 dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen); in xts_decrypt()
|
/linux-6.3-rc2/arch/arm64/crypto/ |
A D | aes-glue.c | 284 struct scatterlist sg_src[2], sg_dst[2]; in cts_cbc_encrypt() local 313 dst = scatterwalk_ffwd(sg_dst, req->dst, in cts_cbc_encrypt() 341 struct scatterlist sg_src[2], sg_dst[2]; in cts_cbc_decrypt() local 370 dst = scatterwalk_ffwd(sg_dst, req->dst, in cts_cbc_decrypt() 547 struct scatterlist sg_src[2], sg_dst[2]; in xts_encrypt() local 595 dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen); in xts_encrypt() 619 struct scatterlist sg_src[2], sg_dst[2]; in xts_decrypt() local 667 dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen); in xts_decrypt()
|
A D | sm4-ce-glue.c | 217 struct scatterlist sg_src[2], sg_dst[2]; in sm4_cbc_cts_crypt() local 246 dst = scatterwalk_ffwd(sg_dst, req->dst, in sm4_cbc_cts_crypt() 416 struct scatterlist sg_src[2], sg_dst[2]; in sm4_xts_crypt() local 480 dst = scatterwalk_ffwd(sg_dst, req->dst, subreq.cryptlen); in sm4_xts_crypt()
|
A D | aes-neonbs-glue.c | 272 struct scatterlist sg_src[2], sg_dst[2]; in __xts_crypt() local 346 dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen); in __xts_crypt()
|
/linux-6.3-rc2/drivers/crypto/ |
A D | s5p-sss.c | 312 struct scatterlist *sg_dst; member 507 dma_unmap_sg(dev->dev, dev->sg_dst, 1, DMA_FROM_DEVICE); in s5p_unset_outdata() 549 dev->sg_dst = sg; in s5p_set_outdata() 580 if (!sg_is_last(dev->sg_dst)) { in s5p_aes_tx() 581 ret = s5p_set_outdata(dev, sg_next(dev->sg_dst)); in s5p_aes_tx() 689 if (sg_is_last(dev->sg_dst)) in s5p_aes_interrupt() 745 s5p_set_dma_outdata(dev, dev->sg_dst); in s5p_aes_interrupt() 1956 s5p_set_dma_outdata(dev, dev->sg_dst); in s5p_aes_crypt_start()
|
/linux-6.3-rc2/arch/x86/crypto/ |
A D | aesni-intel_glue.c | 386 struct scatterlist sg_src[2], sg_dst[2]; in cts_cbc_encrypt() local 415 dst = scatterwalk_ffwd(sg_dst, req->dst, in cts_cbc_encrypt() 442 struct scatterlist sg_src[2], sg_dst[2]; in cts_cbc_decrypt() local 471 dst = scatterwalk_ffwd(sg_dst, req->dst, in cts_cbc_decrypt() 970 struct scatterlist sg_src[2], sg_dst[2]; in xts_crypt() local 975 dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen); in xts_crypt()
|
/linux-6.3-rc2/crypto/ |
A D | hctr2.c | 65 struct scatterlist sg_dst[2]; member 285 rctx->bulk_part_dst = scatterwalk_ffwd(rctx->sg_dst, req->dst, in hctr2_crypt()
|
/linux-6.3-rc2/drivers/dma/ |
A D | ste_dma40.c | 2110 struct scatterlist *sg_src, struct scatterlist *sg_dst, in d40_prep_sg_log() argument 2126 ret = d40_log_sg_to_lli(sg_dst, sg_len, in d40_prep_sg_log() 2138 struct scatterlist *sg_src, struct scatterlist *sg_dst, in d40_prep_sg_phy() argument 2157 ret = d40_phy_sg_to_lli(sg_dst, sg_len, dst_dev_addr, in d40_prep_sg_phy() 2209 struct scatterlist *sg_dst, unsigned int sg_len, in d40_prep_sg() argument 2243 ret = d40_prep_sg_log(chan, desc, sg_src, sg_dst, in d40_prep_sg() 2246 ret = d40_prep_sg_phy(chan, desc, sg_src, sg_dst, in d40_prep_sg()
|