| /drivers/crypto/tegra/ |
| A D | tegra-se-aes.c | 283 rctx->datbuf.size = rctx->len; in tegra_aes_do_one_req() 1083 memcpy(rctx->authdata, rctx->outbuf.buf, rctx->authsize); in tegra_ccm_ctr_result() 1099 sg = rctx->encrypt ? rctx->src_sg : rctx->dst_sg; in tegra_ccm_compute_auth() 1129 memcpy(rctx->inbuf.buf, rctx->authdata, rctx->authsize); in tegra_ccm_do_ctr() 1209 rctx->inbuf.size = rctx->assoclen + rctx->authsize + rctx->cryptlen + 100; in tegra_ccm_do_one_req() 1215 rctx->outbuf.size = rctx->assoclen + rctx->authsize + rctx->cryptlen + 100; in tegra_ccm_do_one_req() 1293 rctx->inbuf.size = rctx->assoclen + rctx->authsize + rctx->cryptlen; in tegra_gcm_do_one_req() 1301 rctx->outbuf.size = rctx->assoclen + rctx->authsize + rctx->cryptlen; in tegra_gcm_do_one_req() 1617 memcpy(rctx->datbuf.buf, rctx->residue.buf, rctx->residue.size); in tegra_cmac_do_update() 1668 memcpy(rctx->datbuf.buf, rctx->residue.buf, rctx->residue.size); in tegra_cmac_do_final() [all …]
|
| A D | tegra-se-hash.c | 440 memcpy(rctx->datbuf.buf, rctx->residue.buf, rctx->residue.size); in tegra_sha_do_update() 450 rctx->total_len += rctx->datbuf.size; in tegra_sha_do_update() 452 rctx->config = tegra_sha_get_config(rctx->alg) | in tegra_sha_do_update() 459 rctx->datbuf.buf, rctx->datbuf.addr); in tegra_sha_do_update() 481 memcpy(rctx->datbuf.buf, rctx->residue.buf, rctx->residue.size); in tegra_sha_do_final() 484 rctx->datbuf.size = rctx->residue.size; in tegra_sha_do_final() 485 rctx->total_len += rctx->residue.size; in tegra_sha_do_final() 501 rctx->datbuf.buf, rctx->datbuf.addr); in tegra_sha_do_final() 504 rctx->residue.buf, rctx->residue.addr); in tegra_sha_do_final() 727 memcpy(out, rctx, sizeof(*rctx)); in tegra_sha_export() [all …]
|
| /drivers/crypto/qce/ |
| A D | aead.c | 174 rctx->dst_sg = rctx->dst_tbl.sgl; in qce_aead_prepare_dst_buf() 249 sg_init_one(&rctx->adata_sg, rctx->adata, rctx->assoclen); in qce_aead_ccm_prepare_buf_assoclen() 277 rctx->src_sg = rctx->src_tbl.sgl; in qce_aead_ccm_prepare_buf_assoclen() 291 rctx->dst_nents = rctx->src_nents; in qce_aead_ccm_prepare_buf_assoclen() 292 rctx->dst_sg = rctx->src_sg; in qce_aead_ccm_prepare_buf_assoclen() 324 rctx->src_sg = rctx->dst_sg; in qce_aead_prepare_buf() 354 rctx->dst_nents = rctx->src_nents; in qce_aead_ccm_prepare_buf() 355 rctx->dst_sg = rctx->src_sg; in qce_aead_ccm_prepare_buf() 367 if (!rctx || !rctx->iv) in qce_aead_create_ccm_nonce() 396 memcpy(&rctx->ccm_nonce[0], rctx->iv, rctx->ivsize); in qce_aead_create_ccm_nonce() [all …]
|
| A D | sha.c | 139 memset(rctx, 0, sizeof(*rctx)); in qce_ahash_init() 143 memcpy(rctx->digest, std_iv, sizeof(rctx->digest)); in qce_ahash_init() 153 memcpy(export_state->pending_buf, rctx->buf, rctx->buflen); in qce_ahash_export() 170 memset(rctx, 0, sizeof(*rctx)); in qce_ahash_import() 177 memcpy(rctx->buf, import_state->pending_buf, rctx->buflen); in qce_ahash_import() 202 scatterwalk_map_and_copy(rctx->buf + rctx->buflen, req->src, in qce_ahash_update() 216 if (rctx->buflen) in qce_ahash_update() 217 memcpy(rctx->tmpbuf, rctx->buf, rctx->buflen); in qce_ahash_update() 265 sg_set_buf(rctx->sg, rctx->tmpbuf, rctx->buflen); in qce_ahash_update() 294 memcpy(rctx->tmpbuf, rctx->buf, rctx->buflen); in qce_ahash_final() [all …]
|
| A D | skcipher.c | 50 dma_unmap_sg(qce->dev, rctx->src_sg, rctx->src_nents, dir_src); in qce_skcipher_done() 51 dma_unmap_sg(qce->dev, rctx->dst_sg, rctx->dst_nents, dir_dst); in qce_skcipher_done() 59 memcpy(rctx->iv, result_buf->encr_cntr_iv, rctx->ivsize); in qce_skcipher_done() 77 rctx->iv = req->iv; in qce_skcipher_async_req_handle() 89 rctx->dst_nents = rctx->src_nents; in qce_skcipher_async_req_handle() 104 ret = sg_alloc_table(&rctx->dst_tbl, rctx->dst_nents, gfp); in qce_skcipher_async_req_handle() 116 sg = qce_sgtable_add(&rctx->dst_tbl, &rctx->result_sg, in qce_skcipher_async_req_handle() 124 rctx->dst_sg = rctx->dst_tbl.sgl; in qce_skcipher_async_req_handle() 140 rctx->src_sg = rctx->dst_sg; in qce_skcipher_async_req_handle() 164 dma_unmap_sg(qce->dev, rctx->dst_sg, rctx->dst_nents, dir_dst); in qce_skcipher_async_req_handle() [all …]
|
| A D | common.c | 175 auth_cfg = qce_auth_cfg(rctx->flags, rctx->authklen, digestsize); in qce_setup_regs_ahash() 178 if (IS_SHA_HMAC(rctx->flags) || IS_CMAC(rctx->flags)) { in qce_setup_regs_ahash() 181 qce_cpu_to_be32p_array(mackey, rctx->authkey, rctx->authklen); in qce_setup_regs_ahash() 189 if (rctx->first_blk) in qce_setup_regs_ahash() 194 iv_words = (IS_SHA1(rctx->flags) || IS_SHA1_HMAC(rctx->flags)) ? 5 : 8; in qce_setup_regs_ahash() 197 if (rctx->first_blk) in qce_setup_regs_ahash() 205 if (rctx->last_blk) in qce_setup_regs_ahash() 210 if (rctx->first_blk) in qce_setup_regs_ahash() 514 totallen = rctx->cryptlen + rctx->assoclen; in qce_setup_regs_aead() 517 if (IS_CCM(rctx->flags) && IS_DECRYPT(rctx->flags)) in qce_setup_regs_aead() [all …]
|
| /drivers/crypto/aspeed/ |
| A D | aspeed-hace-hash.c | 73 memcpy(out, rctx->digest, rctx->ivsize); in aspeed_sham_export() 94 memcpy(rctx->digest, in, rctx->ivsize); in aspeed_sham_import() 170 length = rctx->total - rctx->offset; in aspeed_ahash_dma_prepare() 333 if (rctx->total - rctx->offset >= rctx->block_size || in aspeed_ahash_complete() 334 (rctx->total != rctx->offset && rctx->flags & SHA_FLAGS_FINUP)) in aspeed_ahash_complete() 343 rctx->total - rctx->offset); in aspeed_ahash_complete() 392 if (rctx->flags & SHA_FLAGS_FINUP && rctx->total == rctx->offset) in aspeed_ahash_update_resume_sg() 444 rctx->total - rctx->offset); in aspeed_ahash_fallback() 554 memcpy(rctx->digest, sha1_iv, rctx->ivsize); in aspeed_sham_init() 562 memcpy(rctx->digest, sha224_iv, rctx->ivsize); in aspeed_sham_init() [all …]
|
| /drivers/crypto/intel/keembay/ |
| A D | keembay-ocs-hcu-core.c | 119 return rctx->sg_data_total + rctx->buf_cnt; in kmb_get_total_data() 127 if (rctx->sg_data_total > (sizeof(rctx->buffer) - rctx->buf_cnt)) { in flush_sg_to_ocs_buffer() 141 if (rctx->sg_data_offset == rctx->sg->length) { in flush_sg_to_ocs_buffer() 142 rctx->sg = sg_next(rctx->sg); in flush_sg_to_ocs_buffer() 155 rctx->sg, rctx->sg_data_offset, in flush_sg_to_ocs_buffer() 289 rctx->buf_dma_count = rctx->buf_cnt; in kmb_ocs_dma_prepare() 412 rctx->buf_cnt = rctx->blk_sz; in prepare_ipad() 578 memset(rctx, 0, sizeof(*rctx)); in kmb_ocs_hcu_init() 650 if (rctx->sg_data_total <= (sizeof(rctx->buffer) - rctx->buf_cnt)) in kmb_ocs_hcu_update() 738 memcpy(out, rctx, sizeof(*rctx)); in kmb_ocs_hcu_export() [all …]
|
| A D | keembay-ocs-aes-core.c | 243 memset(rctx, 0, sizeof(*rctx)); in ocs_aes_init_rctx() 418 if (rctx->cts_swap && rctx->instruction == OCS_DECRYPT) in kmb_ocs_sk_prepare_inplace() 433 rctx->dst_dma_count, &rctx->dst_dll, in kmb_ocs_sk_prepare_inplace() 442 rctx->src_dll.dma_addr = rctx->dst_dll.dma_addr; in kmb_ocs_sk_prepare_inplace() 468 rctx->src_dma_count, &rctx->src_dll, in kmb_ocs_sk_prepare_notinplace() 483 rctx->dst_dma_count, &rctx->dst_dll, in kmb_ocs_sk_prepare_notinplace() 535 rctx->cts_swap = (rctx->mode == OCS_MODE_CTS && in kmb_ocs_sk_run() 550 rctx->dst_dll.dma_addr, rctx->src_dll.dma_addr, in kmb_ocs_sk_run() 809 rctx->dst_dma_count, &rctx->dst_dll, in kmb_ocs_aead_dma_prepare() 823 rctx->src_dll.dma_addr = rctx->dst_dll.dma_addr; in kmb_ocs_aead_dma_prepare() [all …]
|
| /drivers/crypto/ccp/ |
| A D | ccp-crypto-aes-cmac.c | 38 scatterwalk_map_and_copy(rctx->buf, rctx->src, in ccp_aes_cmac_complete() 40 rctx->buf_count = rctx->hash_rem; in ccp_aes_cmac_complete() 90 rctx->hash_cnt = len - rctx->hash_rem; in ccp_do_cmac_update() 102 sg_init_one(&rctx->iv_sg, rctx->iv, sizeof(rctx->iv)); in ccp_do_cmac_update() 116 sg_init_one(&rctx->buf_sg, rctx->buf, rctx->buf_count); in ccp_do_cmac_update() 137 memset(rctx->pad, 0, sizeof(rctx->pad)); in ccp_do_cmac_update() 156 memset(&rctx->cmd, 0, sizeof(rctx->cmd)); in ccp_do_cmac_update() 164 rctx->cmd.u.aes.iv = &rctx->iv_sg; in ccp_do_cmac_update() 167 rctx->cmd.u.aes.src_len = rctx->hash_cnt; in ccp_do_cmac_update() 187 memset(rctx, 0, sizeof(*rctx)); in ccp_aes_cmac_init() [all …]
|
| A D | ccp-crypto-sha.c | 43 rctx->buf_count = rctx->hash_rem; in ccp_sha_complete() 87 rctx->hash_cnt = len - rctx->hash_rem; in ccp_do_sha_update() 95 sg_init_one(&rctx->ctx_sg, rctx->ctx, sizeof(rctx->ctx)); in ccp_do_sha_update() 109 sg_init_one(&rctx->buf_sg, rctx->buf, rctx->buf_count); in ccp_do_sha_update() 124 sg_init_one(&rctx->buf_sg, rctx->buf, rctx->buf_count); in ccp_do_sha_update() 136 rctx->cmd.u.sha.type = rctx->type; in ccp_do_sha_update() 137 rctx->cmd.u.sha.ctx = &rctx->ctx_sg; in ccp_do_sha_update() 166 rctx->cmd.u.sha.first = rctx->first; in ccp_do_sha_update() 167 rctx->cmd.u.sha.final = rctx->final; in ccp_do_sha_update() 192 memset(rctx, 0, sizeof(*rctx)); in ccp_sha_init() [all …]
|
| A D | ccp-crypto-aes-xts.c | 70 memcpy(req->iv, rctx->iv, AES_BLOCK_SIZE); in ccp_aes_xts_complete() 165 sg_init_one(&rctx->iv_sg, rctx->iv, AES_BLOCK_SIZE); in ccp_aes_xts_crypt() 167 memset(&rctx->cmd, 0, sizeof(rctx->cmd)); in ccp_aes_xts_crypt() 168 INIT_LIST_HEAD(&rctx->cmd.entry); in ccp_aes_xts_crypt() 170 rctx->cmd.u.xts.type = CCP_AES_TYPE_128; in ccp_aes_xts_crypt() 173 rctx->cmd.u.xts.unit_size = unit_size; in ccp_aes_xts_crypt() 176 rctx->cmd.u.xts.iv = &rctx->iv_sg; in ccp_aes_xts_crypt() 177 rctx->cmd.u.xts.iv_len = AES_BLOCK_SIZE; in ccp_aes_xts_crypt() 178 rctx->cmd.u.xts.src = req->src; in ccp_aes_xts_crypt() 179 rctx->cmd.u.xts.src_len = req->cryptlen; in ccp_aes_xts_crypt() [all …]
|
| A D | ccp-crypto-aes-galois.c | 107 rctx->iv[i + GCM_AES_IV_SIZE] = 0; in ccp_aes_gcm_crypt() 108 rctx->iv[AES_BLOCK_SIZE - 1] = 1; in ccp_aes_gcm_crypt() 111 iv_sg = &rctx->iv_sg; in ccp_aes_gcm_crypt() 116 memset(&rctx->cmd, 0, sizeof(rctx->cmd)); in ccp_aes_gcm_crypt() 117 INIT_LIST_HEAD(&rctx->cmd.entry); in ccp_aes_gcm_crypt() 118 rctx->cmd.engine = CCP_ENGINE_AES; in ccp_aes_gcm_crypt() 122 rctx->cmd.u.aes.action = encrypt; in ccp_aes_gcm_crypt() 125 rctx->cmd.u.aes.iv = iv_sg; in ccp_aes_gcm_crypt() 126 rctx->cmd.u.aes.iv_len = iv_len; in ccp_aes_gcm_crypt() 127 rctx->cmd.u.aes.src = req->src; in ccp_aes_gcm_crypt() [all …]
|
| /drivers/crypto/inside-secure/eip93/ |
| A D | eip93-common.c | 208 u32 totlen_src = rctx->assoclen + rctx->textsize; in check_valid_request() 209 u32 totlen_dst = rctx->assoclen + rctx->textsize; in check_valid_request() 522 u32 datalen = rctx->assoclen + rctx->textsize; in eip93_send_req() 643 u32 len = rctx->assoclen + rctx->textsize; in eip93_unmap_dma() 649 if (rctx->sg_src == rctx->sg_dst) { in eip93_unmap_dma() 650 dma_unmap_sg(eip93->dev, rctx->sg_dst, rctx->dst_nents, in eip93_unmap_dma() 655 dma_unmap_sg(eip93->dev, rctx->sg_src, rctx->src_nents, in eip93_unmap_dma() 659 eip93_free_sg_copy(len + rctx->authsize, &rctx->sg_src); in eip93_unmap_dma() 661 dma_unmap_sg(eip93->dev, rctx->sg_dst, rctx->dst_nents, in eip93_unmap_dma() 680 eip93_free_sg_copy(len + rctx->authsize, &rctx->sg_dst); in eip93_unmap_dma() [all …]
|
| A D | eip93-hash.c | 37 if (rctx->finalize) in eip93_hash_free_data_blocks() 39 rctx->data_used, in eip93_hash_free_data_blocks() 79 if (rctx->finalize || rctx->partial_hash) { in eip93_hash_handle_result() 147 if (!rctx->len) in eip93_hash_export_sa_state() 201 rctx->len = 0; in __eip93_hash_init() 243 if (rctx->finalize && !rctx->partial_hash) { in eip93_send_hash_req() 494 ret = eip93_send_hash_req(async, rctx->data, &rctx->data_dma, in __eip93_hash_final() 613 if (rctx->len) in eip93_hash_import() 614 memcpy(rctx->data, state->data, rctx->data_used); in eip93_hash_import() 625 if (rctx->len) in eip93_hash_export() [all …]
|
| /drivers/crypto/cavium/nitrox/ |
| A D | nitrox_aead.c | 166 creq->gph.param1 = cpu_to_be16(rctx->cryptlen + rctx->assoclen); in nitrox_set_creq() 167 creq->gph.param2 = cpu_to_be16(rctx->ivsize + rctx->assoclen); in nitrox_set_creq() 175 ret = alloc_src_sglist(&rctx->nkreq, rctx->src, rctx->iv, rctx->ivsize, in nitrox_set_creq() 180 ret = alloc_dst_sglist(&rctx->nkreq, rctx->dst, rctx->ivsize, in nitrox_set_creq() 230 rctx->dstlen = rctx->srclen + aead->authsize; in nitrox_aes_gcm_enc() 235 rctx->src = areq->src; in nitrox_aes_gcm_enc() 236 rctx->dst = areq->dst; in nitrox_aes_gcm_enc() 264 rctx->dstlen = rctx->srclen - aead->authsize; in nitrox_aes_gcm_dec() 405 sg_set_buf(rctx->src, rctx->assoc, assoclen); in nitrox_rfc4106_set_aead_rctx_sglist() 412 sg_set_buf(rctx->dst, rctx->assoc, assoclen); in nitrox_rfc4106_set_aead_rctx_sglist() [all …]
|
| /drivers/crypto/starfive/ |
| A D | jh7110-rsa.c | 75 struct starfive_cryp_request_ctx *rctx = ctx->rctx; in starfive_rsa_montgomery_form() local 82 rctx->csr.pka.v = 0; in starfive_rsa_montgomery_form() 90 rctx->csr.pka.v = 0; in starfive_rsa_montgomery_form() 112 rctx->csr.pka.v = 0; in starfive_rsa_montgomery_form() 172 struct starfive_cryp_request_ctx *rctx = ctx->rctx; in starfive_rsa_cpu_start() local 251 struct starfive_cryp_request_ctx *rctx = ctx->rctx; in starfive_rsa_enc_core() local 262 rctx->total = sg_copy_to_buffer(rctx->in_sg, sg_nents(rctx->in_sg), in starfive_rsa_enc_core() 263 rctx->rsa_data + shift, rctx->total); in starfive_rsa_enc_core() 278 sg_copy_buffer(rctx->out_sg, sg_nents(rctx->out_sg), in starfive_rsa_enc_core() 312 ctx->rctx = rctx; in starfive_rsa_enc() [all …]
|
| A D | jh7110-hash.c | 66 struct starfive_cryp_request_ctx *rctx = ctx->rctx; in starfive_hash_hmac_key() local 74 rctx->csr.hash.hmac = 1; in starfive_hash_hmac_key() 75 rctx->csr.hash.key_flag = 1; in starfive_hash_hmac_key() 200 struct starfive_cryp_request_ctx *rctx = ctx->rctx; in starfive_hash_one_request() local 210 rctx->csr.hash.v = 0; in starfive_hash_one_request() 218 rctx->csr.hash.start = 1; in starfive_hash_one_request() 224 if (!rctx->total) in starfive_hash_one_request() 229 for_each_sg(rctx->in_sg, tsg, rctx->in_sg_len, i) { in starfive_hash_one_request() 334 rctx->in_sg = req->src; in starfive_hash_digest() 337 rctx->in_sg_len = sg_nents_for_len(rctx->in_sg, rctx->total); in starfive_hash_digest() [all …]
|
| /drivers/crypto/bcm/ |
| A D | cipher.c | 345 rctx->total_sent = rctx->src_sent; in handle_skcipher_req() 348 rctx->src_nents = spu_sg_count(rctx->src_sg, rctx->src_skip, chunksize); in handle_skcipher_req() 349 rctx->dst_nents = spu_sg_count(rctx->dst_sg, rctx->dst_skip, chunksize); in handle_skcipher_req() 696 rctx->total_todo, rctx->total_sent); in handle_ahash_req() 769 rctx->src_nents = spu_sg_count(rctx->src_sg, rctx->src_skip, in handle_ahash_req() 787 if ((rctx->total_sent == rctx->total_todo) && rctx->is_final) in handle_ahash_req() 1013 if (rctx->is_final && (rctx->total_sent == rctx->total_todo)) in handle_ahash_resp() 2649 rctx->dst_sg = rctx->src_sg; in aead_enqueue() 2650 rctx->dst_skip = rctx->src_skip; in aead_enqueue() 2683 rctx->src_sg, rctx->src_skip); in aead_enqueue() [all …]
|
| /drivers/crypto/allwinner/sun8i-ss/ |
| A D | sun8i-ss-hash.c | 290 int flow = rctx->flow; in sun8i_ss_run_hash_task() 304 v |= rctx->method; in sun8i_ss_run_hash_task() 320 rctx->t_src[i].len, rctx->t_dst[i].len, in sun8i_ss_run_hash_task() 321 rctx->method, rctx->t_src[i].addr, rctx->t_dst[i].addr); in sun8i_ss_run_hash_task() 411 rctx->flow = e; in sun8i_ss_hash_digest() 516 rctx->t_dst[i].len = 0; in sun8i_ss_hash_run() 579 rctx->t_src[k + 1].addr = rctx->t_src[k].addr; in sun8i_ss_hash_run() 580 rctx->t_src[k + 1].len = rctx->t_src[k].len; in sun8i_ss_hash_run() 581 rctx->t_dst[k + 1].addr = rctx->t_dst[k].addr; in sun8i_ss_hash_run() 582 rctx->t_dst[k + 1].len = rctx->t_dst[k].len; in sun8i_ss_hash_run() [all …]
|
| A D | sun8i-ss-cipher.c | 113 if (rctx->op_dir & SS_DECRYPTION) in sun8i_ss_cipher_fallback() 135 rctx->ivlen = ivsize; in sun8i_ss_setup_ivs() 157 rctx->p_iv[i] = a; in sun8i_ss_setup_ivs() 168 rctx->niv = i; in sun8i_ss_setup_ivs() 215 rctx->keylen = op->keylen; in sun8i_ss_cipher() 261 rctx->t_src[i].len = todo / 4; in sun8i_ss_cipher() 283 rctx->t_dst[i].len = todo / 4; in sun8i_ss_cipher() 354 rctx->op_dir = SS_DECRYPTION; in sun8i_ss_skdecrypt() 361 rctx->flow = e; in sun8i_ss_skdecrypt() 375 rctx->op_dir = SS_ENCRYPTION; in sun8i_ss_skencrypt() [all …]
|
| /drivers/crypto/rockchip/ |
| A D | rk3288_crypto_ahash.c | 55 ahash_request_set_callback(&rctx->fallback_req, in rk_ahash_digest_fb() 61 return crypto_ahash_digest(&rctx->fallback_req); in rk_ahash_digest_fb() 221 rctx->dev = dev; in rk_ahash_digest() 239 struct rk_crypto_info *rkc = rctx->dev; in rk_hash_prepare() 246 rctx->nrsg = ret; in rk_hash_prepare() 255 struct rk_crypto_info *rkc = rctx->dev; in rk_hash_unprepare() 268 struct rk_crypto_info *rkc = rctx->dev; in rk_hash_run() 281 rctx->mode = 0; in rk_hash_run() 288 rctx->mode = RK_CRYPTO_HASH_SHA1; in rk_hash_run() 291 rctx->mode = RK_CRYPTO_HASH_SHA256; in rk_hash_run() [all …]
|
| A D | rk3288_crypto_skcipher.c | 84 if (rctx->mode & RK_CRYPTO_DEC) in rk_cipher_fallback() 103 rctx->dev = rkc; in rk_cipher_handle_req() 159 rctx->mode = RK_CRYPTO_AES_ECB_MODE; in rk_aes_ecb_encrypt() 175 rctx->mode = RK_CRYPTO_AES_CBC_MODE; in rk_aes_cbc_encrypt() 191 rctx->mode = 0; in rk_des_ecb_encrypt() 199 rctx->mode = RK_CRYPTO_DEC; in rk_des_ecb_decrypt() 223 rctx->mode = RK_CRYPTO_TDES_SELECT; in rk_des3_ede_ecb_encrypt() 326 if (rctx->mode & RK_CRYPTO_DEC) { in rk_cipher_run() 342 if (rctx->mode & RK_CRYPTO_DEC) { in rk_cipher_run() 392 if (rctx->mode & RK_CRYPTO_DEC) { in rk_cipher_run() [all …]
|
| /drivers/crypto/stm32/ |
| A D | stm32-hash.c | 358 count = min(rctx->sg->length - rctx->offset, rctx->total); in stm32_hash_append_sg() 363 rctx->sg = sg_next(rctx->sg); in stm32_hash_append_sg() 371 rctx->sg, rctx->offset, count, 0); in stm32_hash_append_sg() 377 if (rctx->offset == rctx->sg->length) { in stm32_hash_append_sg() 378 rctx->sg = sg_next(rctx->sg); in stm32_hash_append_sg() 682 for_each_sg(rctx->sg, tsg, rctx->nents, i) { in stm32_hash_dma_send() 899 rctx->total, rctx->digcnt); in stm32_hash_update_req() 1125 rctx->sg = rctx->sgl; in stm32_hash_copy_sgs() 1154 rctx->sg = rctx->sgl; in stm32_hash_align_sgs() 1245 rctx->sg = rctx->sgl; in stm32_hash_align_sgs() [all …]
|
| /drivers/crypto/ |
| A D | sahara.c | 747 if (rctx->last) in sahara_sha_init_hdr() 887 scatterwalk_map_and_copy(rctx->buf + rctx->buf_cnt, req->src, in sahara_sha_prepare_request() 896 memcpy(rctx->rembuf, rctx->buf, rctx->buf_cnt); in sahara_sha_prepare_request() 911 sg_set_buf(rctx->in_sg_chain, rctx->rembuf, rctx->buf_cnt); in sahara_sha_prepare_request() 913 rctx->in_sg = rctx->in_sg_chain; in sahara_sha_prepare_request() 916 rctx->in_sg = rctx->in_sg_chain; in sahara_sha_prepare_request() 917 sg_init_one(rctx->in_sg, rctx->rembuf, rctx->buf_cnt); in sahara_sha_prepare_request() 978 memcpy(rctx->context, dev->context_base, rctx->context_size); in sahara_sha_process() 981 memcpy(req->result, rctx->context, rctx->digest_size); in sahara_sha_process() 1028 memset(rctx, 0, sizeof(*rctx)); in sahara_sha_init() [all …]
|