/linux-6.3-rc2/arch/arm/crypto/ |
A D | aes-ce-glue.c | 273 struct scatterlist sg_src[2], sg_dst[2]; in cts_cbc_encrypt() local 301 dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen); in cts_cbc_encrypt() 331 struct scatterlist sg_src[2], sg_dst[2]; in cts_cbc_decrypt() local 359 dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen); in cts_cbc_decrypt() 450 struct scatterlist sg_src[2], sg_dst[2]; in xts_encrypt() local 496 dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen); in xts_encrypt() 522 struct scatterlist sg_src[2], sg_dst[2]; in xts_decrypt() local 568 dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen); in xts_decrypt()
|
/linux-6.3-rc2/drivers/crypto/nx/ |
A D | nx.c | 150 struct scatterlist *sg_src, in nx_walk_and_build() argument 161 scatterwalk_start(&walk, sg_src); in nx_walk_and_build() 163 if (start < offset + sg_src->length) in nx_walk_and_build() 166 offset += sg_src->length; in nx_walk_and_build() 167 sg_src = sg_next(sg_src); in nx_walk_and_build()
|
/linux-6.3-rc2/arch/arm64/crypto/ |
A D | aes-glue.c | 284 struct scatterlist sg_src[2], sg_dst[2]; in cts_cbc_encrypt() local 311 dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen); in cts_cbc_encrypt() 341 struct scatterlist sg_src[2], sg_dst[2]; in cts_cbc_decrypt() local 368 dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen); in cts_cbc_decrypt() 547 struct scatterlist sg_src[2], sg_dst[2]; in xts_encrypt() local 593 dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen); in xts_encrypt() 619 struct scatterlist sg_src[2], sg_dst[2]; in xts_decrypt() local 665 dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen); in xts_decrypt()
|
A D | sm4-ce-glue.c | 217 struct scatterlist sg_src[2], sg_dst[2]; in sm4_cbc_cts_crypt() local 244 dst = src = scatterwalk_ffwd(sg_src, src, subreq.cryptlen); in sm4_cbc_cts_crypt() 416 struct scatterlist sg_src[2], sg_dst[2]; in sm4_xts_crypt() local 478 dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen); in sm4_xts_crypt()
|
A D | aes-neonbs-glue.c | 272 struct scatterlist sg_src[2], sg_dst[2]; in __xts_crypt() local 344 dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen); in __xts_crypt()
|
/linux-6.3-rc2/lib/crypto/ |
A D | chacha20poly1305-selftest.c | 8891 struct scatterlist sg_src[3]; in chacha20poly1305_selftest() local 8926 sg_init_one(sg_src, computed_output, in chacha20poly1305_selftest() 8928 ret = chacha20poly1305_encrypt_sg_inplace(sg_src, in chacha20poly1305_selftest() 8968 sg_init_one(sg_src, computed_output, in chacha20poly1305_selftest() 8970 ret = chacha20poly1305_decrypt_sg_inplace(sg_src, in chacha20poly1305_selftest() 9032 sg_init_table(sg_src, 3); in chacha20poly1305_selftest() 9034 sg_set_buf(&sg_src[k++], input, i); in chacha20poly1305_selftest() 9036 sg_set_buf(&sg_src[k++], input + i, j - i); in chacha20poly1305_selftest() 9039 sg_init_marker(sg_src, k); in chacha20poly1305_selftest() 9043 if (!chacha20poly1305_encrypt_sg_inplace(sg_src, in chacha20poly1305_selftest() [all …]
|
/linux-6.3-rc2/arch/x86/crypto/ |
A D | aegis128-aesni-glue.c | 70 struct aegis_state *state, struct scatterlist *sg_src, in crypto_aegis128_aesni_process_ad() argument 77 scatterwalk_start(&walk, sg_src); in crypto_aegis128_aesni_process_ad()
|
A D | aesni-intel_glue.c | 386 struct scatterlist sg_src[2], sg_dst[2]; in cts_cbc_encrypt() local 413 dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen); in cts_cbc_encrypt() 442 struct scatterlist sg_src[2], sg_dst[2]; in cts_cbc_decrypt() local 469 dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen); in cts_cbc_decrypt() 970 struct scatterlist sg_src[2], sg_dst[2]; in xts_crypt() local 973 dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen); in xts_crypt()
|
/linux-6.3-rc2/drivers/crypto/ |
A D | s5p-sss.c | 311 struct scatterlist *sg_src; member 512 dma_unmap_sg(dev->dev, dev->sg_src, 1, DMA_TO_DEVICE); in s5p_unset_indata() 562 dev->sg_src = sg; in s5p_set_indata() 602 if (!sg_is_last(dev->sg_src)) { in s5p_aes_rx() 603 ret = s5p_set_indata(dev, sg_next(dev->sg_src)); in s5p_aes_rx() 747 s5p_set_dma_indata(dev, dev->sg_src); in s5p_aes_interrupt() 1955 s5p_set_dma_indata(dev, dev->sg_src); in s5p_aes_crypt_start()
|
/linux-6.3-rc2/crypto/ |
A D | hctr2.c | 64 struct scatterlist sg_src[2]; member 283 rctx->bulk_part_src = scatterwalk_ffwd(rctx->sg_src, req->src, in hctr2_crypt()
|
A D | aegis128-core.c | 277 struct scatterlist *sg_src, in crypto_aegis128_process_ad() argument 285 scatterwalk_start(&walk, sg_src); in crypto_aegis128_process_ad()
|
/linux-6.3-rc2/drivers/dma/ |
A D | ste_dma40.c | 2110 struct scatterlist *sg_src, struct scatterlist *sg_dst, in d40_prep_sg_log() argument 2119 ret = d40_log_sg_to_lli(sg_src, sg_len, in d40_prep_sg_log() 2138 struct scatterlist *sg_src, struct scatterlist *sg_dst, in d40_prep_sg_phy() argument 2151 ret = d40_phy_sg_to_lli(sg_src, sg_len, src_dev_addr, in d40_prep_sg_phy() 2208 d40_prep_sg(struct dma_chan *dchan, struct scatterlist *sg_src, in d40_prep_sg() argument 2228 desc = d40_prep_desc(chan, sg_src, sg_len, dma_flags); in d40_prep_sg() 2232 if (sg_next(&sg_src[sg_len - 1]) == sg_src) in d40_prep_sg() 2243 ret = d40_prep_sg_log(chan, desc, sg_src, sg_dst, in d40_prep_sg() 2246 ret = d40_prep_sg_phy(chan, desc, sg_src, sg_dst, in d40_prep_sg()
|