Home
last modified time | relevance | path

Searched refs:rctx (Results 1 – 12 of 12) sorted by relevance

/crypto/
A Dchacha20poly1305.c84 u8 tag[sizeof(rctx->tag)]; in poly_verify_tag()
107 if (rctx->cryptlen == 0) in chacha_decrypt()
121 rctx->cryptlen, creq->iv); in chacha_decrypt()
158 total = rctx->assoclen; in poly_hash()
172 total = rctx->cryptlen; in poly_hash()
193 memcpy_to_scatterwalk(&walk, rctx->tag, sizeof(rctx->tag)); in poly_hash()
210 rctx->assoclen = req->assoclen; in poly_genkey()
213 if (rctx->assoclen < 8) in poly_genkey()
215 rctx->assoclen -= 8; in poly_genkey()
218 memset(rctx->key, 0, sizeof(rctx->key)); in poly_genkey()
[all …]
A Dadiantum.c290 if (!rctx->enc) in adiantum_finish()
292 rctx->rbuf.bytes); in adiantum_finish()
300 le128_sub(&rctx->rbuf.bignum, &rctx->rbuf.bignum, &rctx->header_hash); in adiantum_finish()
312 le128_sub(&rctx->rbuf.bignum, &rctx->rbuf.bignum, &digest); in adiantum_finish()
321 le128_sub(&rctx->rbuf.bignum, &rctx->rbuf.bignum, &digest); in adiantum_finish()
353 rctx->enc = enc; in adiantum_crypt()
378 le128_add(&rctx->rbuf.bignum, &rctx->rbuf.bignum, &rctx->header_hash); in adiantum_crypt()
379 le128_add(&rctx->rbuf.bignum, &rctx->rbuf.bignum, &digest); in adiantum_crypt()
384 rctx->rbuf.bytes); in adiantum_crypt()
390 rctx->rbuf.words[5] = 0; in adiantum_crypt()
[all …]
A Dxts.c90 le128 t = rctx->t; in xts_xor_tweak()
94 req = &rctx->subreq; in xts_xor_tweak()
113 rctx->t = t; in xts_xor_tweak()
118 gf128mul_x_ble(&rctx->t, &t); in xts_xor_tweak()
152 le128_xor(&b, &rctx->t, &b); in xts_cts_done()
166 struct skcipher_request *subreq = &rctx->subreq; in xts_cts_final()
171 rctx->tail = scatterwalk_ffwd(rctx->sg, req->dst, in xts_cts_final()
178 le128_xor(b, &rctx->t, b); in xts_cts_final()
185 skcipher_request_set_crypt(subreq, rctx->tail, rctx->tail, in xts_cts_final()
193 le128_xor(b, &rctx->t, b); in xts_cts_final()
[all …]
A Dhctr2.c94 struct hctr2_request_ctx *rctx) in hctr2_hashed_tweak() argument
96 return (u8 *)rctx + tctx->hashed_tweak_offset; in hctr2_hashed_tweak()
175 struct shash_desc *hash_desc = &rctx->u.hash_desc; in hctr2_hash_tweak()
198 struct shash_desc *hash_desc = &rctx->u.hash_desc; in hctr2_hash_message()
235 struct shash_desc *hash_desc = &rctx->u.hash_desc; in hctr2_finish()
283 rctx->bulk_part_src = scatterwalk_ffwd(rctx->sg_src, req->src, in hctr2_crypt()
285 rctx->bulk_part_dst = scatterwalk_ffwd(rctx->sg_dst, req->dst, in hctr2_crypt()
314 skcipher_request_set_crypt(&rctx->u.xctr_req, rctx->bulk_part_src, in hctr2_crypt()
315 rctx->bulk_part_dst, bulk_len, in hctr2_crypt()
316 rctx->xctr_iv); in hctr2_crypt()
[all …]
A Dcts.c67 struct crypto_cts_reqctx *rctx = skcipher_request_ctx(req); in crypto_cts_reqctx_space() local
100 struct crypto_cts_reqctx *rctx = skcipher_request_ctx(req); in cts_cbc_encrypt() local
102 struct skcipher_request *subreq = &rctx->subreq; in cts_cbc_encrypt()
109 offset = rctx->offset; in cts_cbc_encrypt()
112 sg = scatterwalk_ffwd(rctx->sg, req->dst, offset - bsize); in cts_cbc_encrypt()
148 struct skcipher_request *subreq = &rctx->subreq; in crypto_cts_encrypt()
168 rctx->offset = offset; in crypto_cts_encrypt()
183 struct skcipher_request *subreq = &rctx->subreq; in cts_cbc_decrypt()
191 offset = rctx->offset; in cts_cbc_decrypt()
242 struct skcipher_request *subreq = &rctx->subreq; in crypto_cts_decrypt()
[all …]
A Dlrw.c145 struct lrw_request_ctx *rctx = skcipher_request_ctx(req); in lrw_xor_tweak() local
146 be128 t = rctx->t; in lrw_xor_tweak()
153 req = &rctx->subreq; in lrw_xor_tweak()
215 rctx->subreq.base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; in lrw_crypt_done()
226 struct lrw_request_ctx *rctx = skcipher_request_ctx(req); in lrw_init_crypt() local
227 struct skcipher_request *subreq = &rctx->subreq; in lrw_init_crypt()
237 memcpy(&rctx->t, req->iv, sizeof(rctx->t)); in lrw_init_crypt()
240 gf128mul_64k_bbe(&rctx->t, ctx->table); in lrw_init_crypt()
245 struct lrw_request_ctx *rctx = skcipher_request_ctx(req); in lrw_encrypt() local
246 struct skcipher_request *subreq = &rctx->subreq; in lrw_encrypt()
[all …]
A Dessiv.c177 kfree(rctx->assoc); in essiv_aead_done()
199 rctx->assoc = NULL; in essiv_aead_crypt()
219 sg_init_table(rctx->sg, 4); in essiv_aead_crypt()
226 rctx->assoc = kmalloc(ssize, GFP_ATOMIC); in essiv_aead_crypt()
227 if (!rctx->assoc) in essiv_aead_crypt()
232 sg_set_buf(rctx->sg, rctx->assoc, ssize); in essiv_aead_crypt()
238 sg_set_buf(rctx->sg + 1, iv, ivsize); in essiv_aead_crypt()
240 if (sg != rctx->sg + 2) in essiv_aead_crypt()
241 sg_chain(rctx->sg, 3, sg); in essiv_aead_crypt()
243 src = rctx->sg; in essiv_aead_crypt()
[all …]
A Dcryptd.c250 struct skcipher_request *subreq = &rctx->req; in cryptd_skcipher_prepare()
497 rctx->complete = req->base.complete; in cryptd_hash_enqueue()
498 rctx->data = req->base.data; in cryptd_hash_enqueue()
510 req->base.complete = rctx->complete; in cryptd_hash_prepare()
511 req->base.data = rctx->data; in cryptd_hash_prepare()
516 return &rctx->desc; in cryptd_hash_prepare()
748 struct cryptd_aead_request_ctx *rctx; in cryptd_aead_crypt() local
754 rctx = aead_request_ctx(req); in cryptd_aead_crypt()
755 subreq = &rctx->req; in cryptd_aead_crypt()
820 struct aead_request *subreq = &rctx->req; in cryptd_aead_enqueue()
[all …]
A Drmd160.c256 rctx->byte_count = 0; in rmd160_init()
258 rctx->state[0] = RMD_H0; in rmd160_init()
259 rctx->state[1] = RMD_H1; in rmd160_init()
260 rctx->state[2] = RMD_H2; in rmd160_init()
261 rctx->state[3] = RMD_H3; in rmd160_init()
262 rctx->state[4] = RMD_H4; in rmd160_init()
274 rctx->byte_count += len - remain; in rmd160_update()
278 rmd160_transform(rctx->state, buffer); in rmd160_update()
301 rctx->byte_count += len; in rmd160_finup()
310 rmd160_transform(rctx->state, in rmd160_finup()
[all …]
A Dccm.c601 struct aead_request *subreq = &rctx->subreq; in crypto_rfc4309_crypt()
617 sg_init_table(rctx->src, 3); in crypto_rfc4309_crypt()
618 sg_set_buf(rctx->src, iv + 16, req->assoclen - 8); in crypto_rfc4309_crypt()
620 if (sg != rctx->src + 1) in crypto_rfc4309_crypt()
621 sg_chain(rctx->src, 2, sg); in crypto_rfc4309_crypt()
624 sg_init_table(rctx->dst, 3); in crypto_rfc4309_crypt()
625 sg_set_buf(rctx->dst, iv + 16, req->assoclen - 8); in crypto_rfc4309_crypt()
627 if (sg != rctx->dst + 1) in crypto_rfc4309_crypt()
628 sg_chain(rctx->dst, 2, sg); in crypto_rfc4309_crypt()
634 aead_request_set_crypt(subreq, rctx->src, in crypto_rfc4309_crypt()
[all …]
A Dgcm.c716 struct crypto_rfc4106_req_ctx *rctx = aead_request_ctx(req); in crypto_rfc4106_crypt() local
719 struct aead_request *subreq = &rctx->subreq; in crypto_rfc4106_crypt()
730 sg_init_table(rctx->src, 3); in crypto_rfc4106_crypt()
733 if (sg != rctx->src + 1) in crypto_rfc4106_crypt()
734 sg_chain(rctx->src, 2, sg); in crypto_rfc4106_crypt()
737 sg_init_table(rctx->dst, 3); in crypto_rfc4106_crypt()
740 if (sg != rctx->dst + 1) in crypto_rfc4106_crypt()
741 sg_chain(rctx->dst, 2, sg); in crypto_rfc4106_crypt()
747 aead_request_set_crypt(subreq, rctx->src, in crypto_rfc4106_crypt()
748 req->src == req->dst ? rctx->src : rctx->dst, in crypto_rfc4106_crypt()
[all …]
A Dctr.c196 struct crypto_rfc3686_req_ctx *rctx = in crypto_rfc3686_crypt() local
198 struct skcipher_request *subreq = &rctx->subreq; in crypto_rfc3686_crypt()
199 u8 *iv = rctx->iv; in crypto_rfc3686_crypt()

Completed in 35 milliseconds