| /crypto/ |
| A D | scatterwalk.c | 119 if (walk.src.virt.addr != walk.dst.virt.addr) in memcpy_sglist() 157 walk->buffer = walk->page; in skcipher_next_slow() 172 walk->in.__addr = walk->out.addr; in skcipher_next_slow() 185 memcpy(tmp, walk->in.addr, walk->nbytes); in skcipher_next_copy() 207 walk->in.__addr = walk->out.__addr; in skcipher_next_fast() 236 if (unlikely((walk->in.offset | walk->out.offset) & walk->alignmask)) { in skcipher_walk_next() 269 walk->iv = memcpy(iv, walk->iv, walk->ivsize); in skcipher_copy_iv() 333 memcpy(walk->out.addr, walk->page, n); in skcipher_walk_done() 371 if (walk->iv != walk->oiv) in skcipher_walk_done() 372 memcpy(walk->oiv, walk->iv, walk->ivsize); in skcipher_walk_done() [all …]
|
| A D | pcbc.c | 27 u8 *dst = walk->dst.virt.addr; in crypto_pcbc_encrypt_segment() 28 u8 * const iv = walk->iv; in crypto_pcbc_encrypt_segment() 49 u8 * const iv = walk->iv; in crypto_pcbc_encrypt_inplace() 68 struct skcipher_walk walk; in crypto_pcbc_encrypt() local 74 while (walk.nbytes) { in crypto_pcbc_encrypt() 75 if (walk.src.virt.addr == walk.dst.virt.addr) in crypto_pcbc_encrypt() 95 u8 * const iv = walk->iv; in crypto_pcbc_decrypt_segment() 116 u8 * const iv = walk->iv; in crypto_pcbc_decrypt_inplace() 135 struct skcipher_walk walk; in crypto_pcbc_decrypt() local 141 while (walk.nbytes) { in crypto_pcbc_decrypt() [all …]
|
| A D | xctr.c | 38 const u8 *src = walk->src.virt.addr; in crypto_xctr_crypt_final() 39 u8 *dst = walk->dst.virt.addr; in crypto_xctr_crypt_final() 40 unsigned int nbytes = walk->nbytes; in crypto_xctr_crypt_final() 55 u8 *dst = walk->dst.virt.addr; in crypto_xctr_crypt_segment() 56 unsigned int nbytes = walk->nbytes; in crypto_xctr_crypt_segment() 80 unsigned int nbytes = walk->nbytes; in crypto_xctr_crypt_inplace() 81 u8 *data = walk->dst.virt.addr; in crypto_xctr_crypt_inplace() 104 struct skcipher_walk walk; in crypto_xctr_crypt() local 112 if (walk.src.virt.addr == walk.dst.virt.addr) in crypto_xctr_crypt() 119 byte_ctr += walk.nbytes - nbytes; in crypto_xctr_crypt() [all …]
|
| A D | acompress.c | 476 walk->slen -= used; in acomp_walk_done_src() 489 walk->dlen -= used; in acomp_walk_done_dst() 508 walk->in.__addr = (void *)(((u8 *)walk->in.sg) + in acomp_walk_next_src() 509 walk->in.offset); in acomp_walk_next_src() 525 walk->out.__addr = (void *)(((u8 *)walk->out.sg) + in acomp_walk_next_dst() 540 walk->slen = req->slen; in acomp_walk_virt() 541 walk->dlen = req->dlen; in acomp_walk_virt() 543 if (!walk->slen || !walk->dlen) in acomp_walk_virt() 546 walk->flags = 0; in acomp_walk_virt() 556 walk->in.offset = 0; in acomp_walk_virt() [all …]
|
| A D | zstd.c | 109 struct acomp_walk walk; in zstd_compress() local 130 dcur = acomp_walk_next_dst(&walk); in zstd_compress() 141 scur = acomp_walk_next_src(&walk); in zstd_compress() 145 acomp_walk_done_src(&walk, scur); in zstd_compress() 146 acomp_walk_done_dst(&walk, dcur); in zstd_compress() 152 inbuf.src = walk.src.virt.addr; in zstd_compress() 174 acomp_walk_done_dst(&walk, dcur); in zstd_compress() 219 struct acomp_walk walk; in zstd_decompress() local 239 scur = acomp_walk_next_src(&walk); in zstd_decompress() 243 inbuf.src = walk.src.virt.addr; in zstd_decompress() [all …]
|
| A D | deflate.c | 65 struct acomp_walk walk; in deflate_compress_one() local 75 dcur = acomp_walk_next_dst(&walk); in deflate_compress_one() 80 stream->next_out = walk.dst.virt.addr; in deflate_compress_one() 89 scur = acomp_walk_next_src(&walk); in deflate_compress_one() 101 acomp_walk_done_src(&walk, scur); in deflate_compress_one() 105 acomp_walk_done_dst(&walk, dcur); in deflate_compress_one() 145 struct acomp_walk walk; in deflate_decompress_one() local 158 scur = acomp_walk_next_src(&walk); in deflate_decompress_one() 167 dcur = acomp_walk_next_dst(&walk); in deflate_decompress_one() 179 acomp_walk_done_dst(&walk, dcur); in deflate_decompress_one() [all …]
|
| A D | ctr.c | 33 u8 *ctrblk = walk->iv; in crypto_ctr_crypt_final() 37 u8 *dst = walk->dst.virt.addr; in crypto_ctr_crypt_final() 38 unsigned int nbytes = walk->nbytes; in crypto_ctr_crypt_final() 52 u8 *ctrblk = walk->iv; in crypto_ctr_crypt_segment() 54 u8 *dst = walk->dst.virt.addr; in crypto_ctr_crypt_segment() 80 u8 *dst = walk->dst.virt.addr; in crypto_ctr_crypt_inplace() 81 u8 *ctrblk = walk->iv; in crypto_ctr_crypt_inplace() 104 struct skcipher_walk walk; in crypto_ctr_crypt() local 110 while (walk.nbytes >= bsize) { in crypto_ctr_crypt() 111 if (walk.src.virt.addr == walk.dst.virt.addr) in crypto_ctr_crypt() [all …]
|
| A D | ahash.c | 79 walk->data = kmap_local_page(walk->pg); in hash_walk_next() 89 sg = walk->sg; in hash_walk_new_entry() 91 walk->pg = nth_page(sg_page(walk->sg), (walk->offset >> PAGE_SHIFT)); in hash_walk_new_entry() 92 walk->offset = offset_in_page(walk->offset); in hash_walk_new_entry() 95 if (walk->entrylen > walk->total) in hash_walk_new_entry() 96 walk->entrylen = walk->total; in hash_walk_new_entry() 97 walk->total -= walk->entrylen; in hash_walk_new_entry() 108 if (!walk->total) in crypto_hash_walk_first() 130 walk->data -= walk->offset; in crypto_hash_walk_done() 140 walk->pg++; in crypto_hash_walk_done() [all …]
|
| A D | skcipher.c | 48 walk->total = req->cryptlen; in skcipher_walk_virt() 49 walk->nbytes = 0; in skcipher_walk_virt() 50 walk->iv = req->iv; in skcipher_walk_virt() 51 walk->oiv = req->iv; in skcipher_walk_virt() 55 if (unlikely(!walk->total)) in skcipher_walk_virt() 68 walk->stride = alg->walksize; in skcipher_walk_virt() 80 walk->nbytes = 0; in skcipher_walk_aead_common() 81 walk->iv = req->iv; in skcipher_walk_aead_common() 82 walk->oiv = req->iv; in skcipher_walk_aead_common() 86 if (unlikely(!walk->total)) in skcipher_walk_aead_common() [all …]
|
| A D | aegis128-core.c | 281 struct scatter_walk walk; in crypto_aegis128_process_ad() local 288 const u8 *src = walk.addr; in crypto_aegis128_process_ad() 321 struct skcipher_walk *walk, in crypto_aegis128_process_crypt() argument 329 while (walk->nbytes) { in crypto_aegis128_process_crypt() 332 if (nbytes < walk->total) in crypto_aegis128_process_crypt() 335 crypt(state, walk->dst.virt.addr, walk->src.virt.addr, nbytes); in crypto_aegis128_process_crypt() 337 err = skcipher_walk_done(walk, walk->nbytes - nbytes); in crypto_aegis128_process_crypt() 393 struct skcipher_walk walk; in crypto_aegis128_encrypt_generic() local 416 struct skcipher_walk walk; in crypto_aegis128_decrypt_generic() local 454 struct skcipher_walk walk; in crypto_aegis128_encrypt_simd() local [all …]
|
| A D | chacha.c | 52 struct skcipher_walk walk; in chacha_stream_xor() local 56 err = skcipher_walk_virt(&walk, req, false); in chacha_stream_xor() 60 while (walk.nbytes > 0) { in chacha_stream_xor() 61 unsigned int nbytes = walk.nbytes; in chacha_stream_xor() 63 if (nbytes < walk.total) in chacha_stream_xor() 67 chacha_crypt(&state, walk.dst.virt.addr, in chacha_stream_xor() 68 walk.src.virt.addr, nbytes, ctx->nrounds); in chacha_stream_xor() 70 chacha_crypt_generic(&state, walk.dst.virt.addr, in chacha_stream_xor() 71 walk.src.virt.addr, nbytes, in chacha_stream_xor() 73 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in chacha_stream_xor()
|
| A D | chacha20poly1305.c | 136 struct scatter_walk walk; in poly_hash() local 156 scatterwalk_start(&walk, sg); in poly_hash() 160 unsigned int n = scatterwalk_next(&walk, total); in poly_hash() 162 poly1305_update(&desc, walk.addr, n); in poly_hash() 163 scatterwalk_done_src(&walk, n); in poly_hash() 170 scatterwalk_skip(&walk, req->assoclen - rctx->assoclen); in poly_hash() 174 unsigned int n = scatterwalk_next(&walk, total); in poly_hash() 176 poly1305_update(&desc, walk.addr, n); in poly_hash() 177 scatterwalk_done_src(&walk, n); in poly_hash() 193 memcpy_to_scatterwalk(&walk, rctx->tag, sizeof(rctx->tag)); in poly_hash()
|
| A D | lskcipher.c | 168 struct skcipher_walk walk; in crypto_lskcipher_crypt_sg() local 185 err = skcipher_walk_virt(&walk, req, false); in crypto_lskcipher_crypt_sg() 187 while (walk.nbytes) { in crypto_lskcipher_crypt_sg() 188 err = crypt(tfm, walk.src.virt.addr, walk.dst.virt.addr, in crypto_lskcipher_crypt_sg() 189 walk.nbytes, ivs, in crypto_lskcipher_crypt_sg() 190 flags & ~(walk.nbytes == walk.total ? in crypto_lskcipher_crypt_sg() 192 err = skcipher_walk_done(&walk, err); in crypto_lskcipher_crypt_sg()
|