| /drivers/crypto/nx/ |
| A D | nx-aes-ccm.c | 147 if (assoclen) in generate_b0() 185 if (!assoclen) { in generate_pat() 187 } else if (assoclen <= 14) { in generate_pat() 193 iauth_len = assoclen; in generate_pat() 218 if (assoclen <= 65280) { in generate_pat() 219 *(u16 *)b1 = assoclen; in generate_pat() 223 *(u32 *)&b1[2] = assoclen; in generate_pat() 229 if (!assoclen) { in generate_pat() 231 } else if (assoclen <= 14) { in generate_pat() 326 unsigned int assoclen) in ccm_nx_decrypt() argument [all …]
|
| A D | nx-aes-gcm.c | 102 unsigned int assoclen) in nx_gca() argument 107 unsigned int nbytes = assoclen; in nx_gca() 173 unsigned int nbytes = assoclen; in gmac() 305 unsigned int assoclen) in gcm_aes_nx_crypt() argument 322 if (assoclen == 0) in gcm_aes_nx_crypt() 334 if (assoclen) { in gcm_aes_nx_crypt() 336 assoclen); in gcm_aes_nx_crypt() 356 processed + req->assoclen, in gcm_aes_nx_crypt() 392 req->dst, req->assoclen + nbytes, in gcm_aes_nx_crypt() 442 if (req->assoclen < 8) in gcm4106_aes_nx_encrypt() [all …]
|
| /drivers/crypto/ |
| A D | omap-aes-gcm.c | 63 dd->aead_req->assoclen, dd->total, in omap_aes_gcm_done_task() 100 assoclen = req->assoclen; in omap_aes_gcm_copy_buffers() 104 assoclen -= 8; in omap_aes_gcm_copy_buffers() 109 alen = ALIGN(assoclen, AES_BLOCK_SIZE); in omap_aes_gcm_copy_buffers() 112 nsg = !!(assoclen && cryptlen); in omap_aes_gcm_copy_buffers() 117 if (assoclen) { in omap_aes_gcm_copy_buffers() 149 dd->assoc_len = assoclen; in omap_aes_gcm_copy_buffers() 245 int err, assoclen; in omap_aes_gcm_crypt() local 255 assoclen = req->assoclen - 8; in omap_aes_gcm_crypt() 257 assoclen = req->assoclen; in omap_aes_gcm_crypt() [all …]
|
| A D | atmel-aes.c | 1364 if (likely(req->assoclen != 0 || ctx->textlen != 0)) in atmel_aes_gcm_process() 1385 atmel_aes_write(dd, AES_AADLENR, req->assoclen); in atmel_aes_gcm_length() 1389 if (unlikely(req->assoclen == 0)) { in atmel_aes_gcm_length() 1395 padlen = atmel_aes_padlen(req->assoclen, AES_BLOCK_SIZE); in atmel_aes_gcm_length() 1396 if (unlikely(req->assoclen + padlen > dd->buflen)) in atmel_aes_gcm_length() 1402 dd->datalen = req->assoclen + padlen; in atmel_aes_gcm_length() 1473 data[0] = cpu_to_be64(req->assoclen * 8); in atmel_aes_gcm_tag_init() 1514 offset = req->assoclen + ctx->textlen; in atmel_aes_gcm_finalize() 1815 req->src, req->assoclen, in atmel_aes_authenc_init() 1890 offs = req->assoclen + rctx->textlen; in atmel_aes_authenc_final() [all …]
|
| A D | atmel-authenc.h | 41 struct scatterlist *assoc, unsigned int assoclen,
|
| /drivers/crypto/cavium/nitrox/ |
| A D | nitrox_aead.c | 207 if (assoclen <= 512) in nitrox_aes_gcm_assoclen_supported() 228 rctx->assoclen = areq->assoclen; in nitrox_aes_gcm_enc() 229 rctx->srclen = areq->assoclen + areq->cryptlen; in nitrox_aes_gcm_enc() 262 rctx->assoclen = areq->assoclen; in nitrox_aes_gcm_dec() 263 rctx->srclen = areq->cryptlen + areq->assoclen; in nitrox_aes_gcm_dec() 397 unsigned int assoclen = areq->assoclen - GCM_RFC4106_IV_SIZE; in nitrox_rfc4106_set_aead_rctx_sglist() local 400 if (areq->assoclen != 16 && areq->assoclen != 20) in nitrox_rfc4106_set_aead_rctx_sglist() 405 sg_set_buf(rctx->src, rctx->assoc, assoclen); in nitrox_rfc4106_set_aead_rctx_sglist() 412 sg_set_buf(rctx->dst, rctx->assoc, assoclen); in nitrox_rfc4106_set_aead_rctx_sglist() 450 aead_rctx->assoclen = areq->assoclen - GCM_RFC4106_IV_SIZE; in nitrox_rfc4106_enc() [all …]
|
| /drivers/crypto/qce/ |
| A D | aead.c | 54 if (req->assoclen) { in qce_aead_done() 120 unsigned int assoclen = req->assoclen; in qce_aead_prepare_dst_buf() local 146 rctx->assoclen); in qce_aead_prepare_dst_buf() 191 unsigned int assoclen = rctx->assoclen; in qce_aead_ccm_prepare_buf_assoclen() local 230 assoclen) != assoclen) in qce_aead_ccm_prepare_buf_assoclen() 234 rctx->assoclen = ALIGN(assoclen + adata_header_len, 16); in qce_aead_ccm_prepare_buf_assoclen() 251 rctx->assoclen); in qce_aead_ccm_prepare_buf_assoclen() 338 if (rctx->assoclen) in qce_aead_ccm_prepare_buf() 397 if (rctx->assoclen) in qce_aead_create_ccm_nonce() 432 rctx->assoclen = req->assoclen - 8; in qce_aead_async_req_handle() [all …]
|
| A D | aead.h | 39 unsigned int assoclen; member
|
| /drivers/crypto/starfive/ |
| A D | jh7110-aes.c | 211 if (cryp->assoclen) in starfive_aes_ccm_init() 378 total_len = cryp->assoclen; in starfive_aes_ccm_write_adata() 581 cryp->assoclen = 0; in starfive_aes_do_one_req() 649 cryp->assoclen = req->assoclen; in starfive_aes_aead_do_one_req() 665 cryp->total_in + cryp->assoclen, in starfive_aes_aead_do_one_req() 669 if (cryp->assoclen) { in starfive_aes_aead_do_one_req() 676 rctx->adata, cryp->assoclen) != cryp->assoclen) in starfive_aes_aead_do_one_req() 691 if (!cryp->assoclen) in starfive_aes_aead_do_one_req() 820 aead_request_set_ad(subreq, req->assoclen); in starfive_aes_aead_do_fallback() 841 src = scatterwalk_ffwd(_src, req->src, req->assoclen); in starfive_aes_aead_crypt() [all …]
|
| /drivers/crypto/chelsio/ |
| A D | chcr_algo.c | 2919 if (req->assoclen) in generate_b0() 2956 if (assoclen) in ccm_format_packet() 2980 unsigned int assoclen; in fill_sec_cpl_for_aead() local 2985 assoclen = req->assoclen - 8; in fill_sec_cpl_for_aead() 2987 assoclen = req->assoclen; in fill_sec_cpl_for_aead() 3032 if (req->assoclen != 16 && req->assoclen != 20) { in aead_ccm_validate_input() 3054 unsigned int sub_type, assoclen = req->assoclen; in create_aead_ccm_wr() local 3064 assoclen -= 8; in create_aead_ccm_wr() 3152 unsigned int dst_size = 0, temp = 0, kctx_len, assoclen = req->assoclen; in create_gcm_wr() local 3163 assoclen = req->assoclen - 8; in create_gcm_wr() [all …]
|
| /drivers/crypto/ccree/ |
| A D | cc_aead.c | 1317 unsigned int assoclen = areq_ctx->assoclen; in validate_data_size() local 1568 if (req_ctx->assoclen > 0) in config_ccm_adata() 1818 if (req_ctx->assoclen > 0) in cc_gcm() 2029 areq_ctx->assoclen = req->assoclen; in cc_aead_encrypt() 2053 areq_ctx->assoclen = req->assoclen - CCM_BLOCK_IV_SIZE; in cc_rfc4309_ccm_encrypt() 2073 areq_ctx->assoclen = req->assoclen; in cc_aead_decrypt() 2095 areq_ctx->assoclen = req->assoclen - CCM_BLOCK_IV_SIZE; in cc_rfc4309_ccm_decrypt() 2207 areq_ctx->assoclen = req->assoclen - GCM_BLOCK_RFC4_IV_SIZE; in cc_rfc4106_gcm_encrypt() 2234 areq_ctx->assoclen = req->assoclen; in cc_rfc4543_gcm_encrypt() 2258 areq_ctx->assoclen = req->assoclen - GCM_BLOCK_RFC4_IV_SIZE; in cc_rfc4106_gcm_decrypt() [all …]
|
| A D | cc_buffer_mgr.c | 56 u32 skip = req->assoclen + req->cryptlen; in cc_copy_mac() 291 unsigned int assoclen) in cc_set_aead_conf_buf() argument 307 if (assoclen > 0) { in cc_set_aead_conf_buf() 519 areq_ctx->assoclen, req->cryptlen); in cc_unmap_aead_request() 599 if (areq_ctx->assoclen == 0) { in cc_aead_chain_assoc() 643 areq_ctx->assoclen, 0, is_last, in cc_aead_chain_assoc() 802 unsigned int size_for_map = req->assoclen + req->cryptlen; in cc_aead_chain_data() 804 u32 size_to_skip = req->assoclen; in cc_aead_chain_data() 841 size_for_map = req->assoclen + req->cryptlen; in cc_aead_chain_data() 1003 &sg_data, areq_ctx->assoclen); in cc_map_aead_request() [all …]
|
| /drivers/crypto/tegra/ |
| A D | tegra-se-aes.c | 59 unsigned int assoclen; member 632 SE_ADDR_HI_SZ(rctx->assoclen); in tegra_gmac_prep_cmd() 662 if (!rctx->assoclen) in tegra_gcm_crypt_prep_cmd() 978 if (rctx->assoclen) in tegra_ccm_format_nonce() 1033 if (rctx->assoclen) { in tegra_ccm_format_blocks() 1041 offset += rctx->assoclen; in tegra_ccm_format_blocks() 1043 rctx->assoclen + alen); in tegra_ccm_format_blocks() 1102 sg, rctx->assoclen, in tegra_ccm_compute_auth() 1166 rctx->assoclen = req->assoclen; in tegra_ccm_crypt_init() 1279 rctx->assoclen = req->assoclen; in tegra_gcm_do_one_req() [all …]
|
| /drivers/crypto/inside-secure/eip93/ |
| A D | eip93-aead.c | 181 req->assoclen / sizeof(u32)); in eip93_aead_setassoc() 183 ctx->assoclen = req->assoclen; in eip93_aead_setassoc() 202 rctx->assoclen = req->assoclen; in eip93_aead_crypt() 228 if (req->assoclen != ctx->assoclen) { in eip93_aead_encrypt() 252 if (req->assoclen != ctx->assoclen) { in eip93_aead_decrypt()
|
| A D | eip93-cipher.h | 22 unsigned int assoclen; member 33 unsigned int assoclen; member
|
| A D | eip93-common.c | 208 u32 totlen_src = rctx->assoclen + rctx->textsize; in check_valid_request() 209 u32 totlen_dst = rctx->assoclen + rctx->textsize; in check_valid_request() 522 u32 datalen = rctx->assoclen + rctx->textsize; in eip93_send_req() 643 u32 len = rctx->assoclen + rctx->textsize; in eip93_unmap_dma()
|
| /drivers/crypto/intel/keembay/ |
| A D | keembay-ocs-aes-core.c | 653 aead_request_set_ad(subreq, req->assoclen); in kmb_ocs_aead_common() 731 req->assoclen + req->cryptlen); in kmb_ocs_aead_dma_prepare() 751 dst_size = req->assoclen + out_size; in kmb_ocs_aead_dma_prepare() 759 tag_size, req->assoclen + in_size); in kmb_ocs_aead_dma_prepare() 802 &rctx->aad_dst_dll, req->assoclen, in kmb_ocs_aead_dma_prepare() 810 out_size, req->assoclen); in kmb_ocs_aead_dma_prepare() 836 req->assoclen, 0); in kmb_ocs_aead_dma_prepare() 842 req->assoclen); in kmb_ocs_aead_dma_prepare() 862 req->assoclen, 0); in kmb_ocs_aead_dma_prepare() 870 req->assoclen); in kmb_ocs_aead_dma_prepare() [all …]
|
| /drivers/crypto/marvell/octeontx2/ |
| A D | otx2_cptvf_algs.c | 979 rctx->ctrl_word.e.enc_data_offset = req->assoclen; in create_aead_ctx_hdr() 983 if (req->assoclen > 248 || !IS_ALIGNED(req->assoclen, 8)) in create_aead_ctx_hdr() 1003 if (crypto_ipsec_check_assoclen(req->assoclen)) in create_aead_ctx_hdr() 1029 req_info->req.param2 = req->cryptlen + req->assoclen; in create_aead_ctx_hdr() 1088 u32 inputlen = req->cryptlen + req->assoclen; in create_aead_input_list() 1110 outputlen = req->cryptlen + req->assoclen - mac_len; in create_aead_output_list() 1124 inputlen = req->cryptlen + req->assoclen; in create_aead_null_input_list() 1126 inputlen = req->cryptlen + req->assoclen - mac_len; in create_aead_null_input_list() 1144 inputlen = req->cryptlen + req->assoclen; in create_aead_null_output_list() 1146 inputlen = req->cryptlen + req->assoclen - mac_len; in create_aead_null_output_list() [all …]
|
| /drivers/crypto/inside-secure/ |
| A D | safexcel_cipher.c | 164 u32 cryptlen, u32 assoclen, u32 digestsize) in safexcel_aead_token() argument 826 assoclen, digestsize); in safexcel_send_req() 844 if (assoclen) { in safexcel_send_req() 845 if (assoclen >= len) { in safexcel_send_req() 846 assoclen -= len; in safexcel_send_req() 851 assoclen, in safexcel_send_req() 852 len - assoclen); in safexcel_send_req() 853 assoclen = 0; in safexcel_send_req() 1087 req->cryptlen, req->assoclen, in safexcel_aead_send() 3558 if (req->assoclen != 16 && req->assoclen != 20) in safexcel_rfc4309_ccm_encrypt() [all …]
|
| /drivers/crypto/marvell/octeontx/ |
| A D | otx_cptvf_algs.c | 896 rctx->ctrl_word.e.enc_data_offset = req->assoclen; in create_aead_ctx_hdr() 924 rctx->ctrl_word.e.iv_offset = req->assoclen - AES_GCM_IV_OFFSET; in create_aead_ctx_hdr() 940 req_info->req.param2 = req->cryptlen + req->assoclen; in create_aead_ctx_hdr() 944 req_info->req.param2 = req->cryptlen + req->assoclen - mac_len; in create_aead_ctx_hdr() 1001 u32 inputlen = req->cryptlen + req->assoclen; in create_aead_input_list() 1021 outputlen = req->cryptlen + req->assoclen + mac_len; in create_aead_output_list() 1023 outputlen = req->cryptlen + req->assoclen - mac_len; in create_aead_output_list() 1039 inputlen = req->cryptlen + req->assoclen; in create_aead_null_input_list() 1041 inputlen = req->cryptlen + req->assoclen - mac_len; in create_aead_null_input_list() 1061 inputlen = req->cryptlen + req->assoclen; in create_aead_null_output_list() [all …]
|
| /drivers/crypto/amcc/ |
| A D | crypto4xx_alg.c | 324 if (req->assoclen & 0x3 || req->assoclen > 1020) in crypto4xx_aead_need_fallback() 344 aead_request_set_ad(subreq, req->assoclen); in crypto4xx_aead_fallback() 453 sa, ctx->sa_len, req->assoclen, rctx->dst); in crypto4xx_crypt_aes_ccm() 590 ctx->sa_len, req->assoclen, rctx->dst); in crypto4xx_crypt_aes_gcm()
|
| A D | crypto4xx_core.c | 571 aead_req->assoclen + aead_req->cryptlen - in crypto4xx_aead_done() 650 const unsigned int assoclen, in crypto4xx_build_pd() argument 686 tmp = sg_nents_for_len(src, assoclen + datalen); in crypto4xx_build_pd() 695 if (assoclen) { in crypto4xx_build_pd() 696 nbytes += assoclen; in crypto4xx_build_pd() 697 dst = scatterwalk_ffwd(_dst, dst, assoclen); in crypto4xx_build_pd() 787 sa->sa_command_1.bf.hash_crypto_offset = (assoclen >> 2); in crypto4xx_build_pd() 885 pd->pd_ctl_len.w = 0x00400000 | (assoclen + datalen); in crypto4xx_build_pd()
|
| /drivers/crypto/hisilicon/sec2/ |
| A D | sec_crypto.c | 973 copy_size = aead_req->cryptlen + aead_req->assoclen; in GEN_SEC_SETKEY_FUNC() 1017 copy_size = c_req->c_len + aead_req->assoclen; in sec_cipher_pbuf_unmap() 1703 if (aead_req->assoclen) in set_aead_auth_iv() 1766 sec_sqe->type2.alen_ivllen = cpu_to_le32(aq->assoclen); in sec_auth_bd_fill_xcm() 1794 sqe3->a_len_key = cpu_to_le32(aq->assoclen); in sec_auth_bd_fill_xcm_v3() 1796 sqe3->cipher_src_offset = cpu_to_le16((u16)aq->assoclen); in sec_auth_bd_fill_xcm_v3() 1883 sqe3->cipher_src_offset = cpu_to_le16((u16)aq->assoclen); in sec_auth_bd_fill_ex_v3() 1924 authsize, a_req->cryptlen + a_req->assoclen); in sec_aead_callback() 2453 req->assoclen > SEC_MAX_AAD_LEN)) in sec_aead_spec_check() 2457 if (unlikely(req->assoclen > SEC_MAX_CCM_AAD_LEN)) in sec_aead_spec_check() [all …]
|
| /drivers/crypto/bcm/ |
| A D | cipher.c | 1320 aead_parms.assoc_size = req->assoclen; in handle_aead_req() 1422 req->assoclen + rctx->total_sent - in handle_aead_req() 1547 if (req->assoclen) in handle_aead_resp() 1549 req->assoclen); in handle_aead_resp() 2522 (req->assoclen == 0)) { in aead_need_fallback() 2546 (req->assoclen == 0)) { in aead_need_fallback() 2559 req->assoclen != 16 && req->assoclen != 20) { in aead_need_fallback() 2567 payload_len += req->assoclen; in aead_need_fallback() 2596 aead_request_set_ad(subreq, req->assoclen); in aead_do_fallback() 2611 if (req->assoclen > MAX_ASSOC_SIZE) { in aead_enqueue() [all …]
|
| /drivers/crypto/caam/ |
| A D | caamalg.c | 1101 append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen, in init_aead_job() 1124 req->assoclen + req->cryptlen + authsize, in init_aead_job() 1128 req->assoclen + req->cryptlen - authsize, in init_aead_job() 1144 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen); in init_gcm_job() 1148 if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen)) in init_gcm_job() 1168 unsigned int assoclen = req->assoclen; in init_chachapoly_job() local 1182 assoclen -= ivsize; in init_chachapoly_job() 1185 append_math_add_imm_u32(desc, REG3, ZERO, IMM, assoclen); in init_chachapoly_job() 1235 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen); in init_authenc_job() 1322 src_len = req->assoclen + req->cryptlen; in aead_edesc_alloc() [all …]
|