Home
last modified time | relevance | path

Searched refs:cryptlen (Results 1 – 25 of 66) sorted by relevance

123

/drivers/crypto/allwinner/sun8i-ce/
A Dsun8i-ce-cipher.c49 if (areq->cryptlen == 0) { in sun8i_ce_cipher_need_fallback()
56 if (areq->cryptlen % 16) { in sun8i_ce_cipher_need_fallback()
63 len = areq->cryptlen; in sun8i_ce_cipher_need_fallback()
83 len = areq->cryptlen; in sun8i_ce_cipher_need_fallback()
126 areq->cryptlen, areq->iv); in sun8i_ce_cipher_fallback()
159 areq->cryptlen, in sun8i_ce_cipher_prepare()
249 len = areq->cryptlen; in sun8i_ce_cipher_prepare()
264 len = areq->cryptlen; in sun8i_ce_cipher_prepare()
279 chan->timeout = areq->cryptlen; in sun8i_ce_cipher_prepare()
301 offset = areq->cryptlen - ivsize; in sun8i_ce_cipher_prepare()
[all …]
/drivers/crypto/qce/
A Daead.c71 totallen = req->cryptlen + req->assoclen; in qce_aead_done()
124 totallen = rctx->cryptlen + assoclen; in qce_aead_prepare_dst_buf()
198 cryptlen = rctx->cryptlen + ctx->authsize; in qce_aead_ccm_prepare_buf_assoclen()
200 cryptlen = rctx->cryptlen; in qce_aead_ccm_prepare_buf_assoclen()
201 totallen = cryptlen + req->assoclen; in qce_aead_ccm_prepare_buf_assoclen()
278 totallen = cryptlen + rctx->assoclen; in qce_aead_ccm_prepare_buf_assoclen()
336 unsigned int cryptlen; in qce_aead_ccm_prepare_buf() local
344 cryptlen = rctx->cryptlen + ctx->authsize; in qce_aead_ccm_prepare_buf()
514 rctx->cryptlen = req->cryptlen; in qce_aead_crypt()
516 rctx->cryptlen = req->cryptlen - ctx->authsize; in qce_aead_crypt()
[all …]
A Dskcipher.c79 rctx->cryptlen = req->cryptlen; in qce_skcipher_async_req_handle()
85 rctx->src_nents = sg_nents_for_len(req->src, req->cryptlen); in qce_skcipher_async_req_handle()
87 rctx->dst_nents = sg_nents_for_len(req->dst, req->cryptlen); in qce_skcipher_async_req_handle()
110 sg = qce_sgtable_add(&rctx->dst_tbl, req->dst, req->cryptlen); in qce_skcipher_async_req_handle()
271 if (!req->cryptlen) in qce_skcipher_crypt()
279 if (!IS_ALIGNED(req->cryptlen, blocksize)) in qce_skcipher_crypt()
292 (IS_XTS(rctx->flags) && ((req->cryptlen <= aes_sw_max_len) || in qce_skcipher_crypt()
293 (req->cryptlen > QCE_SECTOR_SIZE && in qce_skcipher_crypt()
294 req->cryptlen % QCE_SECTOR_SIZE))))) { in qce_skcipher_crypt()
301 req->dst, req->cryptlen, req->iv); in qce_skcipher_crypt()
A Dcommon.c299 unsigned int enckeylen, unsigned int cryptlen) in qce_xtskey() argument
311 qce_write(qce, REG_ENCR_XTS_DU_SIZE, cryptlen); in qce_xtskey()
352 rctx->cryptlen); in qce_setup_regs_skcipher()
373 qce_write(qce, REG_ENCR_SEG_SIZE, rctx->cryptlen); in qce_setup_regs_skcipher()
383 qce_write(qce, REG_SEG_SIZE, rctx->cryptlen); in qce_setup_regs_skcipher()
514 totallen = rctx->cryptlen + rctx->assoclen; in qce_setup_regs_aead()
518 qce_write(qce, REG_ENCR_SEG_SIZE, rctx->cryptlen + ctx->authsize); in qce_setup_regs_aead()
520 qce_write(qce, REG_ENCR_SEG_SIZE, rctx->cryptlen); in qce_setup_regs_aead()
/drivers/crypto/allwinner/sun8i-ss/
A Dsun8i-ss-cipher.c33 if (areq->cryptlen == 0 || areq->cryptlen % 16) { in sun8i_ss_need_fallback()
39 sg_nents_for_len(areq->dst, areq->cryptlen) > 8) { in sun8i_ss_need_fallback()
44 len = areq->cryptlen; in sun8i_ss_need_fallback()
59 len = areq->cryptlen; in sun8i_ss_need_fallback()
112 areq->cryptlen, areq->iv); in sun8i_ss_cipher_fallback()
128 unsigned int len = areq->cryptlen; in sun8i_ss_setup_ivs()
137 offset = areq->cryptlen - ivsize; in sun8i_ss_setup_ivs()
205 areq->cryptlen, in sun8i_ss_cipher()
253 len = areq->cryptlen; in sun8i_ss_cipher()
275 len = areq->cryptlen; in sun8i_ss_cipher()
[all …]
/drivers/crypto/
A Domap-aes-gcm.c93 int alen, clen, cryptlen, assoclen, ret; in omap_aes_gcm_copy_buffers() local
101 cryptlen = req->cryptlen; in omap_aes_gcm_copy_buffers()
107 cryptlen -= authlen; in omap_aes_gcm_copy_buffers()
110 clen = ALIGN(cryptlen, AES_BLOCK_SIZE); in omap_aes_gcm_copy_buffers()
112 nsg = !!(assoclen && cryptlen); in omap_aes_gcm_copy_buffers()
130 if (cryptlen) { in omap_aes_gcm_copy_buffers()
136 ret = omap_crypto_align_sg(&tmp, cryptlen, in omap_aes_gcm_copy_buffers()
148 dd->total = cryptlen; in omap_aes_gcm_copy_buffers()
161 if (cryptlen) { in omap_aes_gcm_copy_buffers()
162 ret = omap_crypto_align_sg(&dd->out_sg, cryptlen, in omap_aes_gcm_copy_buffers()
[all …]
A Dtalitos.c995 unsigned int cryptlen = areq->cryptlen - (encrypt ? 0 : authsize); in ipsec_esp_unmap() local
1101 int padding = ALIGN(cryptlen, align) - cryptlen; in sg_to_link_tbl_offset()
1113 if (len > cryptlen) in sg_to_link_tbl_offset()
1114 len = cryptlen; in sg_to_link_tbl_offset()
1128 cryptlen -= len; in sg_to_link_tbl_offset()
1204 unsigned int cryptlen = areq->cryptlen - (encrypt ? 0 : authsize); in ipsec_esp() local
1424 unsigned int cryptlen = areq->cryptlen - (encrypt ? 0 : authsize); in aead_edesc_alloc() local
1576 unsigned int cryptlen = areq->cryptlen; in common_nonsnoop() local
1596 cryptlen); in common_nonsnoop()
1659 if (!areq->cryptlen) in skcipher_encrypt()
[all …]
/drivers/crypto/allwinner/sun4i-ss/
A Dsun4i-ss-cipher.c30 unsigned int ileft = areq->cryptlen; in sun4i_ss_opti_poll()
31 unsigned int oleft = areq->cryptlen; in sun4i_ss_opti_poll()
41 if (!areq->cryptlen) in sun4i_ss_opti_poll()
57 algt->stat_bytes += areq->cryptlen; in sun4i_ss_opti_poll()
74 ileft = areq->cryptlen / 4; in sun4i_ss_opti_poll()
75 oleft = areq->cryptlen / 4; in sun4i_ss_opti_poll()
166 areq->cryptlen, areq->iv); in sun4i_ss_cipher_poll_fallback()
196 unsigned int ileft = areq->cryptlen; in sun4i_ss_cipher_poll()
209 if (!areq->cryptlen) in sun4i_ss_cipher_poll()
265 ileft = areq->cryptlen; in sun4i_ss_cipher_poll()
[all …]
/drivers/crypto/tegra/
A Dtegra-se-aes.c60 unsigned int cryptlen; member
273 rctx->len = req->cryptlen; in tegra_aes_do_one_req()
498 if (!req->cryptlen) in tegra_aes_crypt()
1103 rctx->cryptlen, 0); in tegra_ccm_compute_auth()
1104 offset += rctx->cryptlen; in tegra_ccm_compute_auth()
1139 if (rctx->cryptlen) { in tegra_ccm_do_ctr()
1170 rctx->cryptlen = req->cryptlen; in tegra_ccm_crypt_init()
1172 rctx->cryptlen = req->cryptlen - rctx->authsize; in tegra_ccm_crypt_init()
1283 rctx->cryptlen = req->cryptlen; in tegra_gcm_do_one_req()
1285 rctx->cryptlen = req->cryptlen - ctx->authsize; in tegra_gcm_do_one_req()
[all …]
/drivers/crypto/cavium/nitrox/
A Dnitrox_aead.c165 creq->gph.param0 = cpu_to_be16(rctx->cryptlen); in nitrox_set_creq()
166 creq->gph.param1 = cpu_to_be16(rctx->cryptlen + rctx->assoclen); in nitrox_set_creq()
227 rctx->cryptlen = areq->cryptlen; in nitrox_aes_gcm_enc()
229 rctx->srclen = areq->assoclen + areq->cryptlen; in nitrox_aes_gcm_enc()
261 rctx->cryptlen = areq->cryptlen - aead->authsize; in nitrox_aes_gcm_dec()
263 rctx->srclen = areq->cryptlen + areq->assoclen; in nitrox_aes_gcm_dec()
449 aead_rctx->cryptlen = areq->cryptlen; in nitrox_rfc4106_enc()
451 aead_rctx->srclen = aead_rctx->assoclen + aead_rctx->cryptlen; in nitrox_rfc4106_enc()
481 aead_rctx->cryptlen = areq->cryptlen - aead->authsize; in nitrox_rfc4106_dec()
484 areq->cryptlen - GCM_RFC4106_IV_SIZE + areq->assoclen; in nitrox_rfc4106_dec()
/drivers/crypto/intel/keembay/
A Dkeembay-ocs-aes-core.c289 if (req->cryptlen < AES_BLOCK_SIZE) in kmb_ocs_sk_validate_input()
326 req->cryptlen, req->iv); in kmb_ocs_sk_common()
434 req->cryptlen, 0); in kmb_ocs_sk_prepare_inplace()
469 req->cryptlen, 0); in kmb_ocs_sk_prepare_notinplace()
484 req->cryptlen, 0); in kmb_ocs_sk_prepare_notinplace()
551 req->cryptlen, req->iv, iv_size); in kmb_ocs_sk_run()
571 req->cryptlen - iv_size, in kmb_ocs_sk_run()
580 req->cryptlen - iv_size, in kmb_ocs_sk_run()
652 req->cryptlen, req->iv); in kmb_ocs_aead_common()
745 in_size = req->cryptlen - tag_size; in kmb_ocs_aead_dma_prepare()
[all …]
/drivers/crypto/gemini/
A Dsl3516-ce-cipher.c34 if (areq->cryptlen == 0 || areq->cryptlen % 16) { in sl3516_ce_need_fallback()
119 areq->cryptlen, areq->iv); in sl3516_ce_cipher_fallback()
147 areq->cryptlen, in sl3516_ce_cipher()
179 len = areq->cryptlen; in sl3516_ce_cipher()
189 areq->cryptlen, i, rctx->t_src[i].len, sg->offset, todo); in sl3516_ce_cipher()
196 dev_err(ce->dev, "remaining len %d/%u nr_sgs=%d\n", len, areq->cryptlen, nr_sgs); in sl3516_ce_cipher()
201 len = areq->cryptlen; in sl3516_ce_cipher()
211 areq->cryptlen, i, rctx->t_dst[i].len, sg->offset, todo); in sl3516_ce_cipher()
234 ecb->cipher.algorithm_len = areq->cryptlen; in sl3516_ce_cipher()
/drivers/crypto/aspeed/
A Daspeed-hace-crypto.c38 areq->cryptlen, areq->iv); in aspeed_crypto_do_fallback()
52 if (areq->cryptlen == 0) in aspeed_crypto_need_fallback()
56 !IS_ALIGNED(areq->cryptlen, DES_BLOCK_SIZE)) in aspeed_crypto_need_fallback()
60 !IS_ALIGNED(areq->cryptlen, AES_BLOCK_SIZE)) in aspeed_crypto_need_fallback()
168 "nbytes", nbytes, "cryptlen", req->cryptlen); in aspeed_sk_transfer()
193 crypto_engine->cipher_addr, req->cryptlen); in aspeed_sk_start()
202 "nbytes", nbytes, "cryptlen", req->cryptlen); in aspeed_sk_start()
269 total = req->cryptlen; in aspeed_sk_start_sg()
300 total = req->cryptlen; in aspeed_sk_start_sg()
417 if (!IS_ALIGNED(req->cryptlen, DES_BLOCK_SIZE)) in aspeed_des_crypt()
[all …]
/drivers/crypto/xilinx/
A Dzynqmp-aes-gcm.c93 dma_size = req->cryptlen + ZYNQMP_AES_KEY_SIZE in zynqmp_aes_aead_cipher()
96 dma_size = req->cryptlen + GCM_AES_IV_SIZE; in zynqmp_aes_aead_cipher()
109 data_size = req->cryptlen; in zynqmp_aes_aead_cipher()
110 scatterwalk_map_and_copy(kbuf, req->src, 0, req->cryptlen, 0); in zynqmp_aes_aead_cipher()
191 req->cryptlen < ZYNQMP_AES_MIN_INPUT_BLK_SIZE) { in zynqmp_fallback_check()
194 if ((req->cryptlen % ZYNQMP_AES_WORD_LEN) != 0) in zynqmp_fallback_check()
198 req->cryptlen <= ZYNQMP_AES_AUTH_SIZE) { in zynqmp_fallback_check()
224 areq->cryptlen, areq->iv); in zynqmp_handle_aes_req()
/drivers/crypto/virtio/
A Dvirtio_crypto_skcipher_algs.c341 src_nents = sg_nents_for_len(req->src, req->cryptlen); in __virtio_crypto_skcipher_do_req()
383 cpu_to_le32(req->cryptlen); in __virtio_crypto_skcipher_do_req()
392 dst_len = min_t(unsigned int, req->cryptlen, dst_len); in __virtio_crypto_skcipher_do_req()
394 req->cryptlen, dst_len); in __virtio_crypto_skcipher_do_req()
396 if (unlikely(req->cryptlen + dst_len + ivsize + in __virtio_crypto_skcipher_do_req()
425 req->cryptlen - AES_BLOCK_SIZE, in __virtio_crypto_skcipher_do_req()
475 if (!req->cryptlen) in virtio_crypto_skcipher_encrypt()
477 if (req->cryptlen % AES_BLOCK_SIZE) in virtio_crypto_skcipher_encrypt()
498 if (!req->cryptlen) in virtio_crypto_skcipher_decrypt()
500 if (req->cryptlen % AES_BLOCK_SIZE) in virtio_crypto_skcipher_decrypt()
[all …]
/drivers/crypto/stm32/
A Dstm32-cryp.c1238 if (req->cryptlen == 0) in stm32_cryp_aes_ecb_encrypt()
1249 if (req->cryptlen == 0) in stm32_cryp_aes_ecb_decrypt()
1260 if (req->cryptlen == 0) in stm32_cryp_aes_cbc_encrypt()
1271 if (req->cryptlen == 0) in stm32_cryp_aes_cbc_decrypt()
1279 if (req->cryptlen == 0) in stm32_cryp_aes_ctr_encrypt()
1287 if (req->cryptlen == 0) in stm32_cryp_aes_ctr_decrypt()
1339 if (req->cryptlen == 0) in stm32_cryp_des_ecb_encrypt()
1350 if (req->cryptlen == 0) in stm32_cryp_des_ecb_decrypt()
1361 if (req->cryptlen == 0) in stm32_cryp_des_cbc_encrypt()
1372 if (req->cryptlen == 0) in stm32_cryp_des_cbc_decrypt()
[all …]
/drivers/crypto/amlogic/
A Damlogic-gxl-cipher.c30 if (areq->cryptlen == 0) in meson_cipher_need_fallback()
75 areq->cryptlen, areq->iv); in meson_cipher_do_fallback()
108 areq->cryptlen, in meson_cipher()
131 if (ivsize > areq->cryptlen) { in meson_cipher()
132 dev_err(mc->dev, "invalid ivsize=%d vs len=%d\n", ivsize, areq->cryptlen); in meson_cipher()
144 offset = areq->cryptlen - ivsize; in meson_cipher()
205 len = areq->cryptlen; in meson_cipher()
250 areq->cryptlen - ivsize, in meson_cipher()
/drivers/crypto/chelsio/
A Dchcr_algo.c187 req->cryptlen - authsize); in chcr_verify_tag()
701 req->cryptlen, iv); in chcr_cipher_fallback()
1173 if (req->cryptlen == reqctx->processed) { in chcr_handle_cipher_resp()
1262 (req->cryptlen == 0) || in process_cipher()
1292 bytes = IV + req->cryptlen; in process_cipher()
1307 bytes = req->cryptlen; in process_cipher()
2467 if (req->cryptlen == 0) in create_authenc_wr()
2600 src_len = req->assoclen + req->cryptlen; in chcr_aead_dma_map()
2727 temp = req->assoclen + req->cryptlen + in chcr_add_aead_dst_ent()
2923 req->cryptlen - m : req->cryptlen, l); in generate_b0()
[all …]
/drivers/crypto/marvell/octeontx2/
A Dotx2_cptvf_algs.c145 start = sreq->cryptlen - ivsize; in output_iv_copyback()
229 u32 start = req->cryptlen - ivsize; in create_ctx_hdr()
255 req_info->req.param1 = req->cryptlen; in create_ctx_hdr()
341 req->dst, req->cryptlen, req->iv); in skcipher_do_fallback()
360 if (req->cryptlen == 0) in cpt_enc_dec()
366 if (req->cryptlen > OTX2_CPT_MAX_REQ_SIZE) in cpt_enc_dec()
1028 req_info->req.param1 = req->cryptlen; in create_aead_ctx_hdr()
1088 u32 inputlen = req->cryptlen + req->assoclen; in create_aead_input_list()
1124 inputlen = req->cryptlen + req->assoclen; in create_aead_null_input_list()
1144 inputlen = req->cryptlen + req->assoclen; in create_aead_null_output_list()
[all …]
/drivers/crypto/amcc/
A Dcrypto4xx_alg.c74 if (check_blocksize && !IS_ALIGNED(req->cryptlen, AES_BLOCK_SIZE)) in crypto4xx_crypt()
81 req->cryptlen, iv, ivlen, decrypt ? ctx->sa_in : ctx->sa_out, in crypto4xx_crypt()
216 req->cryptlen, iv, AES_IV_SIZE, in crypto4xx_rfc3686_encrypt()
231 req->cryptlen, iv, AES_IV_SIZE, in crypto4xx_rfc3686_decrypt()
242 unsigned int nblks = ALIGN(req->cryptlen, AES_BLOCK_SIZE) / in crypto4xx_ctr_crypt()
259 req->cryptlen, req->iv); in crypto4xx_ctr_crypt()
342 aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen, in crypto4xx_aead_fallback()
432 unsigned int len = req->cryptlen; in crypto4xx_crypt_aes_ccm()
576 unsigned int len = req->cryptlen; in crypto4xx_crypt_aes_gcm()
/drivers/crypto/inside-secure/
A Dsafexcel_cipher.c174 cryptlen -= digestsize; in safexcel_aead_token()
208 cbcmaciv[14] = cryptlen >> 8; in safexcel_aead_token()
209 cbcmaciv[15] = cryptlen & 255; in safexcel_aead_token()
243 if (likely(cryptlen)) { in safexcel_aead_token()
306 atoken->packet_length = cryptlen; in safexcel_aead_token()
323 cryptlen &= 15; in safexcel_aead_token()
611 unsigned int cryptlen, in safexcel_handle_req_result() argument
664 (cryptlen - in safexcel_handle_req_result()
825 sreq->direction, cryptlen, in safexcel_send_req()
829 cryptlen); in safexcel_send_req()
[all …]
/drivers/crypto/marvell/cesa/
A Dcipher.c44 mv_cesa_req_dma_iter_init(&iter->base, req->cryptlen); in mv_cesa_skcipher_req_iter_init()
83 atomic_sub(req->cryptlen, &engine->load); in mv_cesa_skcipher_cleanup()
91 size_t len = min_t(size_t, req->cryptlen - sreq->offset, in mv_cesa_skcipher_std_step()
139 if (sreq->offset < req->cryptlen) in mv_cesa_skcipher_std_process()
432 if (!IS_ALIGNED(req->cryptlen, blksize)) in mv_cesa_skcipher_req_init()
435 creq->src_nents = sg_nents_for_len(req->src, req->cryptlen); in mv_cesa_skcipher_req_init()
440 creq->dst_nents = sg_nents_for_len(req->dst, req->cryptlen); in mv_cesa_skcipher_req_init()
464 if (!req->cryptlen) in mv_cesa_skcipher_queue_req()
471 engine = mv_cesa_select_engine(req->cryptlen); in mv_cesa_skcipher_queue_req()
/drivers/crypto/ccree/
A Dcc_aead.c739 areq_ctx->cryptlen > 0) in cc_set_assoc_desc()
749 areq_ctx->cryptlen > 0) in cc_set_assoc_desc()
786 areq_ctx->cryptlen, NS_BIT); in cc_proc_authen_desc()
836 if (areq_ctx->cryptlen == 0) in cc_proc_cipher_desc()
972 if (req_ctx->cryptlen == 0) in cc_proc_cipher()
1319 (req->cryptlen - ctx->authsize) : req->cryptlen; in validate_data_size()
1322 req->cryptlen < ctx->authsize) in validate_data_size()
1488 if (req_ctx->cryptlen) in cc_ccm()
1549 req->cryptlen : in config_ccm_adata()
1822 if (req_ctx->cryptlen) in cc_gcm()
[all …]
/drivers/crypto/marvell/octeontx/
A Dotx_cptvf_algs.c162 start = sreq->cryptlen - ivsize; in output_iv_copyback()
243 u32 start = req->cryptlen - ivsize; in create_ctx_hdr()
269 req_info->req.param1 = req->cryptlen; in create_ctx_hdr()
351 if (req->cryptlen > OTX_CPT_MAX_REQ_SIZE) in cpt_enc_dec()
939 req_info->req.param1 = req->cryptlen; in create_aead_ctx_hdr()
943 req_info->req.param1 = req->cryptlen - mac_len; in create_aead_ctx_hdr()
1001 u32 inputlen = req->cryptlen + req->assoclen; in create_aead_input_list()
1023 outputlen = req->cryptlen + req->assoclen - mac_len; in create_aead_output_list()
1039 inputlen = req->cryptlen + req->assoclen; in create_aead_null_input_list()
1041 inputlen = req->cryptlen + req->assoclen - mac_len; in create_aead_null_input_list()
[all …]
/drivers/crypto/hisilicon/sec2/
A Dsec_crypto.c1638 unsigned int cryptlen; in sec_update_iv() local
1645 cryptlen = sk_req->cryptlen; in sec_update_iv()
1649 cryptlen = aead_req->cryptlen; in sec_update_iv()
1654 cryptlen - iv_size); in sec_update_iv()
1658 sz = (cryptlen + iv_size - 1) / iv_size; in sec_update_iv()
1687 u32 data_size = aead_req->cryptlen; in set_aead_auth_iv()
2220 u32 cryptlen = sreq->c_req.sk_req->cryptlen; in sec_skcipher_cryptlen_check() local
2260 if (sk_req->cryptlen > MAX_INPUT_DATA_LEN) in sec_skcipher_param_check()
2263 sreq->c_req.c_len = sk_req->cryptlen; in sec_skcipher_param_check()
2304 sreq->cryptlen, sreq->iv); in sec_skcipher_soft_crypto()
[all …]

Completed in 90 milliseconds

123