Lines Matching refs:cryptlen

60 	unsigned int cryptlen;  member
109 offset = req->cryptlen - ctx->ivsize; in tegra_cbc_iv_copyback()
124 num = req->cryptlen / ctx->ivsize; in tegra_aes_update_iv()
125 if (req->cryptlen % ctx->ivsize) in tegra_aes_update_iv()
273 rctx->len = req->cryptlen; in tegra_aes_do_one_req()
291 scatterwalk_map_and_copy(rctx->datbuf.buf, req->src, 0, req->cryptlen, 0); in tegra_aes_do_one_req()
322 scatterwalk_map_and_copy(rctx->datbuf.buf, req->dst, 0, req->cryptlen, 1); in tegra_aes_do_one_req()
489 if (!IS_ALIGNED(req->cryptlen, crypto_skcipher_blocksize(tfm))) { in tegra_aes_crypt()
490 dev_dbg(ctx->se->dev, "invalid length (%d)", req->cryptlen); in tegra_aes_crypt()
493 } else if (req->cryptlen < XTS_BLOCK_SIZE) { in tegra_aes_crypt()
494 dev_dbg(ctx->se->dev, "invalid length (%d)", req->cryptlen); in tegra_aes_crypt()
498 if (!req->cryptlen) in tegra_aes_crypt()
653 data_count = (rctx->cryptlen / AES_BLOCK_SIZE); in tegra_gcm_crypt_prep_cmd()
654 res_bits = (rctx->cryptlen % AES_BLOCK_SIZE) * 8; in tegra_gcm_crypt_prep_cmd()
688 SE_ADDR_HI_SZ(rctx->cryptlen); in tegra_gcm_crypt_prep_cmd()
693 SE_ADDR_HI_SZ(rctx->cryptlen); in tegra_gcm_crypt_prep_cmd()
718 if (!rctx->assoclen && !rctx->cryptlen) in tegra_gcm_prep_final_cmd()
726 cpuvaddr[i++] = rctx->cryptlen * 8; in tegra_gcm_prep_final_cmd()
780 rctx->assoclen, rctx->cryptlen, 0); in tegra_gcm_do_crypt()
794 rctx->assoclen, rctx->cryptlen, 1); in tegra_gcm_do_crypt()
817 offset = rctx->assoclen + rctx->cryptlen; in tegra_gcm_do_final()
830 offset = rctx->assoclen + rctx->cryptlen; in tegra_gcm_do_verify()
985 return tegra_ccm_set_msg_len(q_ptr, rctx->cryptlen, q); in tegra_ccm_format_nonce()
1076 rctx->assoclen, rctx->cryptlen, 1); in tegra_ccm_ctr_result()
1080 rctx->assoclen + rctx->cryptlen, in tegra_ccm_ctr_result()
1103 rctx->cryptlen, 0); in tegra_ccm_compute_auth()
1104 offset += rctx->cryptlen; in tegra_ccm_compute_auth()
1105 offset += tegra_ccm_add_padding(rctx->inbuf.buf + offset, rctx->cryptlen); in tegra_ccm_compute_auth()
1132 rctx->assoclen + rctx->cryptlen, in tegra_ccm_do_ctr()
1139 if (rctx->cryptlen) { in tegra_ccm_do_ctr()
1141 rctx->assoclen, rctx->cryptlen, 0); in tegra_ccm_do_ctr()
1142 offset += rctx->cryptlen; in tegra_ccm_do_ctr()
1143 offset += tegra_ccm_add_padding(rctx->inbuf.buf + offset, rctx->cryptlen); in tegra_ccm_do_ctr()
1170 rctx->cryptlen = req->cryptlen; in tegra_ccm_crypt_init()
1172 rctx->cryptlen = req->cryptlen - rctx->authsize; in tegra_ccm_crypt_init()
1209 rctx->inbuf.size = rctx->assoclen + rctx->authsize + rctx->cryptlen + 100; in tegra_ccm_do_one_req()
1215 rctx->outbuf.size = rctx->assoclen + rctx->authsize + rctx->cryptlen + 100; in tegra_ccm_do_one_req()
1283 rctx->cryptlen = req->cryptlen; in tegra_gcm_do_one_req()
1285 rctx->cryptlen = req->cryptlen - ctx->authsize; in tegra_gcm_do_one_req()
1293 rctx->inbuf.size = rctx->assoclen + rctx->authsize + rctx->cryptlen; in tegra_gcm_do_one_req()
1301 rctx->outbuf.size = rctx->assoclen + rctx->authsize + rctx->cryptlen; in tegra_gcm_do_one_req()
1324 if (rctx->cryptlen) { in tegra_gcm_do_one_req()