Lines Matching refs:req

70 static inline u64 safexcel_queued_len(struct safexcel_ahash_req *req)  in safexcel_queued_len()  argument
72 return req->len - req->processed; in safexcel_queued_len()
109 struct safexcel_ahash_req *req, in safexcel_context_control() argument
123 if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM)) { in safexcel_context_control()
124 if (req->xcbcmac) in safexcel_context_control()
127 memcpy(ctx->base.ctxr->data, req->state, req->state_sz); in safexcel_context_control()
129 if (!req->finish && req->xcbcmac) in safexcel_context_control()
134 CONTEXT_CONTROL_SIZE(req->state_sz / in safexcel_context_control()
140 CONTEXT_CONTROL_SIZE(req->state_sz / in safexcel_context_control()
143 } else if (!req->processed) { in safexcel_context_control()
145 if (req->finish) in safexcel_context_control()
146 cdesc->control_data.control0 |= req->digest | in safexcel_context_control()
152 cdesc->control_data.control0 |= req->digest | in safexcel_context_control()
162 memcpy(ctx->base.ctxr->data, req->state, req->state_sz); in safexcel_context_control()
164 if (req->finish) { in safexcel_context_control()
166 if ((req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) || in safexcel_context_control()
167 req->hmac_zlen || (req->processed != req->block_sz)) { in safexcel_context_control()
168 count = req->processed / EIP197_COUNTER_BLOCK_SIZE; in safexcel_context_control()
182 if ((req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) || in safexcel_context_control()
184 req->hmac_zlen || in safexcel_context_control()
186 (req->processed != req->block_sz)) { in safexcel_context_control()
189 CONTEXT_CONTROL_SIZE((req->state_sz >> 2) + 1) | in safexcel_context_control()
193 if (req->hmac_zlen) in safexcel_context_control()
198 ctx->base.ctxr->data[req->state_sz >> 2] = in safexcel_context_control()
200 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; in safexcel_context_control()
203 req->hmac_zlen = false; in safexcel_context_control()
206 memcpy(ctx->base.ctxr->data + (req->state_sz >> 2), in safexcel_context_control()
207 &ctx->base.opad, req->state_sz); in safexcel_context_control()
211 CONTEXT_CONTROL_SIZE(req->state_sz >> 1) | in safexcel_context_control()
217 CONTEXT_CONTROL_SIZE(req->state_sz >> 2) | in safexcel_context_control()
311 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); in safexcel_ahash_send_req() local
321 queued = safexcel_queued_len(req); in safexcel_ahash_send_req()
327 if (!req->finish && !req->last_req) { in safexcel_ahash_send_req()
340 req->cache_next, extra, in safexcel_ahash_send_req()
354 if (unlikely(req->xcbcmac && req->processed > AES_BLOCK_SIZE)) { in safexcel_ahash_send_req()
367 req->cache + cache_len, in safexcel_ahash_send_req()
371 memset(req->cache + cache_len + skip, 0, extra); in safexcel_ahash_send_req()
374 req->cache[cache_len + skip] = 0x80; in safexcel_ahash_send_req()
377 u32 *cache = (void *)req->cache; in safexcel_ahash_send_req()
390 crypto_xor(req->cache, (const u8 *)req->state, AES_BLOCK_SIZE); in safexcel_ahash_send_req()
396 req->cache_dma = dma_map_single(priv->dev, req->cache, in safexcel_ahash_send_req()
398 if (dma_mapping_error(priv->dev, req->cache_dma)) in safexcel_ahash_send_req()
401 req->cache_sz = cache_len; in safexcel_ahash_send_req()
404 req->cache_dma, cache_len, in safexcel_ahash_send_req()
419 req->nents = dma_map_sg(priv->dev, areq->src, in safexcel_ahash_send_req()
423 if (!req->nents) { in safexcel_ahash_send_req()
428 for_each_sg(areq->src, sg, req->nents, i) { in safexcel_ahash_send_req()
463 safexcel_context_control(ctx, req, first_cdesc); in safexcel_ahash_send_req()
466 safexcel_hash_token(first_cdesc, len, req->digest_sz, ctx->cbcmac); in safexcel_ahash_send_req()
468 req->result_dma = dma_map_single(priv->dev, req->state, req->digest_sz, in safexcel_ahash_send_req()
470 if (dma_mapping_error(priv->dev, req->result_dma)) { in safexcel_ahash_send_req()
476 rdesc = safexcel_add_rdesc(priv, ring, 1, 1, req->result_dma, in safexcel_ahash_send_req()
477 req->digest_sz); in safexcel_ahash_send_req()
485 req->processed += len - extra; in safexcel_ahash_send_req()
492 dma_unmap_single(priv->dev, req->result_dma, req->digest_sz, in safexcel_ahash_send_req()
495 if (req->nents) { in safexcel_ahash_send_req()
499 req->nents = 0; in safexcel_ahash_send_req()
505 if (req->cache_dma) { in safexcel_ahash_send_req()
506 dma_unmap_single(priv->dev, req->cache_dma, req->cache_sz, in safexcel_ahash_send_req()
508 req->cache_dma = 0; in safexcel_ahash_send_req()
509 req->cache_sz = 0; in safexcel_ahash_send_req()
570 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); in safexcel_handle_result() local
573 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && req->needs_inv); in safexcel_handle_result()
575 if (req->needs_inv) { in safexcel_handle_result()
576 req->needs_inv = false; in safexcel_handle_result()
609 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); in safexcel_ahash_send() local
612 if (req->needs_inv) in safexcel_ahash_send()
624 EIP197_REQUEST_ON_STACK(req, ahash, EIP197_AHASH_REQ_SIZE); in safexcel_ahash_exit_inv()
625 struct safexcel_ahash_req *rctx = ahash_request_ctx_dma(req); in safexcel_ahash_exit_inv()
630 memset(req, 0, EIP197_AHASH_REQ_SIZE); in safexcel_ahash_exit_inv()
634 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, in safexcel_ahash_exit_inv()
637 ahash_request_set_tfm(req, __crypto_ahash_cast(tfm)); in safexcel_ahash_exit_inv()
638 ctx = crypto_tfm_ctx(req->base.tfm); in safexcel_ahash_exit_inv()
643 crypto_enqueue_request(&priv->ring[ring].queue, &req->base); in safexcel_ahash_exit_inv()
664 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); in safexcel_ahash_cache() local
670 cache_len = safexcel_queued_len(req); in safexcel_ahash_cache()
678 req->cache + cache_len, in safexcel_ahash_cache()
690 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); in safexcel_ahash_enqueue() local
694 req->needs_inv = false; in safexcel_ahash_enqueue()
699 ((req->not_first && !req->xcbcmac) || in safexcel_ahash_enqueue()
701 memcmp(ctx->base.ctxr->data, req->state, req->state_sz) || in safexcel_ahash_enqueue()
703 (req->finish && req->hmac && in safexcel_ahash_enqueue()
704 memcmp(ctx->base.ctxr->data + (req->state_sz>>2), in safexcel_ahash_enqueue()
705 &ctx->base.opad, req->state_sz)))) in safexcel_ahash_enqueue()
716 req->needs_inv = true; in safexcel_ahash_enqueue()
726 req->not_first = true; in safexcel_ahash_enqueue()
742 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); in safexcel_ahash_update() local
753 req->len += areq->nbytes; in safexcel_ahash_update()
759 if ((ret && !req->finish) || req->last_req) in safexcel_ahash_update()
767 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); in safexcel_ahash_final() local
770 req->finish = true; in safexcel_ahash_final()
772 if (unlikely(!req->len && !areq->nbytes)) { in safexcel_ahash_final()
802 } else if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM && in safexcel_ahash_final()
804 req->len == sizeof(u32) && !areq->nbytes)) { in safexcel_ahash_final()
808 } else if (unlikely(ctx->cbcmac && req->len == AES_BLOCK_SIZE && in safexcel_ahash_final()
813 } else if (unlikely(req->xcbcmac && req->len == AES_BLOCK_SIZE && in safexcel_ahash_final()
827 } else if (unlikely(req->hmac && in safexcel_ahash_final()
828 (req->len == req->block_sz) && in safexcel_ahash_final()
838 memset(req->cache, 0, req->block_sz); in safexcel_ahash_final()
840 req->cache[0] = 0x80; in safexcel_ahash_final()
842 if (req->len_is_le) { in safexcel_ahash_final()
844 req->cache[req->block_sz-8] = (req->block_sz << 3) & in safexcel_ahash_final()
846 req->cache[req->block_sz-7] = (req->block_sz >> 5); in safexcel_ahash_final()
849 req->cache[req->block_sz-2] = (req->block_sz >> 5); in safexcel_ahash_final()
850 req->cache[req->block_sz-1] = (req->block_sz << 3) & in safexcel_ahash_final()
854 req->len += req->block_sz; /* plus 1 hash block */ in safexcel_ahash_final()
857 req->hmac_zlen = true; in safexcel_ahash_final()
860 req->digest = CONTEXT_CONTROL_DIGEST_HMAC; in safexcel_ahash_final()
861 } else if (req->hmac) { in safexcel_ahash_final()
863 req->digest = CONTEXT_CONTROL_DIGEST_HMAC; in safexcel_ahash_final()
871 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); in safexcel_ahash_finup() local
873 req->finish = true; in safexcel_ahash_finup()
881 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); in safexcel_ahash_export() local
884 export->len = req->len; in safexcel_ahash_export()
885 export->processed = req->processed; in safexcel_ahash_export()
887 export->digest = req->digest; in safexcel_ahash_export()
889 memcpy(export->state, req->state, req->state_sz); in safexcel_ahash_export()
890 memcpy(export->cache, req->cache, HASH_CACHE_SIZE); in safexcel_ahash_export()
897 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); in safexcel_ahash_import() local
905 req->len = export->len; in safexcel_ahash_import()
906 req->processed = export->processed; in safexcel_ahash_import()
908 req->digest = export->digest; in safexcel_ahash_import()
910 memcpy(req->cache, export->cache, HASH_CACHE_SIZE); in safexcel_ahash_import()
911 memcpy(req->state, export->state, req->state_sz); in safexcel_ahash_import()
936 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); in safexcel_sha1_init() local
938 memset(req, 0, sizeof(*req)); in safexcel_sha1_init()
941 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; in safexcel_sha1_init()
942 req->state_sz = SHA1_DIGEST_SIZE; in safexcel_sha1_init()
943 req->digest_sz = SHA1_DIGEST_SIZE; in safexcel_sha1_init()
944 req->block_sz = SHA1_BLOCK_SIZE; in safexcel_sha1_init()
1013 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); in safexcel_hmac_sha1_init() local
1015 memset(req, 0, sizeof(*req)); in safexcel_hmac_sha1_init()
1018 memcpy(req->state, &ctx->base.ipad, SHA1_DIGEST_SIZE); in safexcel_hmac_sha1_init()
1020 req->len = SHA1_BLOCK_SIZE; in safexcel_hmac_sha1_init()
1021 req->processed = SHA1_BLOCK_SIZE; in safexcel_hmac_sha1_init()
1024 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; in safexcel_hmac_sha1_init()
1025 req->state_sz = SHA1_DIGEST_SIZE; in safexcel_hmac_sha1_init()
1026 req->digest_sz = SHA1_DIGEST_SIZE; in safexcel_hmac_sha1_init()
1027 req->block_sz = SHA1_BLOCK_SIZE; in safexcel_hmac_sha1_init()
1028 req->hmac = true; in safexcel_hmac_sha1_init()
1090 struct safexcel_ahash_req *req; in safexcel_hmac_init_iv() local
1104 req = ahash_request_ctx_dma(areq); in safexcel_hmac_init_iv()
1105 req->hmac = true; in safexcel_hmac_init_iv()
1106 req->last_req = true; in safexcel_hmac_init_iv()
1239 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); in safexcel_sha256_init() local
1241 memset(req, 0, sizeof(*req)); in safexcel_sha256_init()
1244 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; in safexcel_sha256_init()
1245 req->state_sz = SHA256_DIGEST_SIZE; in safexcel_sha256_init()
1246 req->digest_sz = SHA256_DIGEST_SIZE; in safexcel_sha256_init()
1247 req->block_sz = SHA256_BLOCK_SIZE; in safexcel_sha256_init()
1296 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); in safexcel_sha224_init() local
1298 memset(req, 0, sizeof(*req)); in safexcel_sha224_init()
1301 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; in safexcel_sha224_init()
1302 req->state_sz = SHA256_DIGEST_SIZE; in safexcel_sha224_init()
1303 req->digest_sz = SHA256_DIGEST_SIZE; in safexcel_sha224_init()
1304 req->block_sz = SHA256_BLOCK_SIZE; in safexcel_sha224_init()
1360 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); in safexcel_hmac_sha224_init() local
1362 memset(req, 0, sizeof(*req)); in safexcel_hmac_sha224_init()
1365 memcpy(req->state, &ctx->base.ipad, SHA256_DIGEST_SIZE); in safexcel_hmac_sha224_init()
1367 req->len = SHA256_BLOCK_SIZE; in safexcel_hmac_sha224_init()
1368 req->processed = SHA256_BLOCK_SIZE; in safexcel_hmac_sha224_init()
1371 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; in safexcel_hmac_sha224_init()
1372 req->state_sz = SHA256_DIGEST_SIZE; in safexcel_hmac_sha224_init()
1373 req->digest_sz = SHA256_DIGEST_SIZE; in safexcel_hmac_sha224_init()
1374 req->block_sz = SHA256_BLOCK_SIZE; in safexcel_hmac_sha224_init()
1375 req->hmac = true; in safexcel_hmac_sha224_init()
1432 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); in safexcel_hmac_sha256_init() local
1434 memset(req, 0, sizeof(*req)); in safexcel_hmac_sha256_init()
1437 memcpy(req->state, &ctx->base.ipad, SHA256_DIGEST_SIZE); in safexcel_hmac_sha256_init()
1439 req->len = SHA256_BLOCK_SIZE; in safexcel_hmac_sha256_init()
1440 req->processed = SHA256_BLOCK_SIZE; in safexcel_hmac_sha256_init()
1443 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; in safexcel_hmac_sha256_init()
1444 req->state_sz = SHA256_DIGEST_SIZE; in safexcel_hmac_sha256_init()
1445 req->digest_sz = SHA256_DIGEST_SIZE; in safexcel_hmac_sha256_init()
1446 req->block_sz = SHA256_BLOCK_SIZE; in safexcel_hmac_sha256_init()
1447 req->hmac = true; in safexcel_hmac_sha256_init()
1497 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); in safexcel_sha512_init() local
1499 memset(req, 0, sizeof(*req)); in safexcel_sha512_init()
1502 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; in safexcel_sha512_init()
1503 req->state_sz = SHA512_DIGEST_SIZE; in safexcel_sha512_init()
1504 req->digest_sz = SHA512_DIGEST_SIZE; in safexcel_sha512_init()
1505 req->block_sz = SHA512_BLOCK_SIZE; in safexcel_sha512_init()
1554 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); in safexcel_sha384_init() local
1556 memset(req, 0, sizeof(*req)); in safexcel_sha384_init()
1559 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; in safexcel_sha384_init()
1560 req->state_sz = SHA512_DIGEST_SIZE; in safexcel_sha384_init()
1561 req->digest_sz = SHA512_DIGEST_SIZE; in safexcel_sha384_init()
1562 req->block_sz = SHA512_BLOCK_SIZE; in safexcel_sha384_init()
1618 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); in safexcel_hmac_sha512_init() local
1620 memset(req, 0, sizeof(*req)); in safexcel_hmac_sha512_init()
1623 memcpy(req->state, &ctx->base.ipad, SHA512_DIGEST_SIZE); in safexcel_hmac_sha512_init()
1625 req->len = SHA512_BLOCK_SIZE; in safexcel_hmac_sha512_init()
1626 req->processed = SHA512_BLOCK_SIZE; in safexcel_hmac_sha512_init()
1629 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; in safexcel_hmac_sha512_init()
1630 req->state_sz = SHA512_DIGEST_SIZE; in safexcel_hmac_sha512_init()
1631 req->digest_sz = SHA512_DIGEST_SIZE; in safexcel_hmac_sha512_init()
1632 req->block_sz = SHA512_BLOCK_SIZE; in safexcel_hmac_sha512_init()
1633 req->hmac = true; in safexcel_hmac_sha512_init()
1690 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); in safexcel_hmac_sha384_init() local
1692 memset(req, 0, sizeof(*req)); in safexcel_hmac_sha384_init()
1695 memcpy(req->state, &ctx->base.ipad, SHA512_DIGEST_SIZE); in safexcel_hmac_sha384_init()
1697 req->len = SHA512_BLOCK_SIZE; in safexcel_hmac_sha384_init()
1698 req->processed = SHA512_BLOCK_SIZE; in safexcel_hmac_sha384_init()
1701 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; in safexcel_hmac_sha384_init()
1702 req->state_sz = SHA512_DIGEST_SIZE; in safexcel_hmac_sha384_init()
1703 req->digest_sz = SHA512_DIGEST_SIZE; in safexcel_hmac_sha384_init()
1704 req->block_sz = SHA512_BLOCK_SIZE; in safexcel_hmac_sha384_init()
1705 req->hmac = true; in safexcel_hmac_sha384_init()
1755 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); in safexcel_md5_init() local
1757 memset(req, 0, sizeof(*req)); in safexcel_md5_init()
1760 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; in safexcel_md5_init()
1761 req->state_sz = MD5_DIGEST_SIZE; in safexcel_md5_init()
1762 req->digest_sz = MD5_DIGEST_SIZE; in safexcel_md5_init()
1763 req->block_sz = MD5_HMAC_BLOCK_SIZE; in safexcel_md5_init()
1812 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); in safexcel_hmac_md5_init() local
1814 memset(req, 0, sizeof(*req)); in safexcel_hmac_md5_init()
1817 memcpy(req->state, &ctx->base.ipad, MD5_DIGEST_SIZE); in safexcel_hmac_md5_init()
1819 req->len = MD5_HMAC_BLOCK_SIZE; in safexcel_hmac_md5_init()
1820 req->processed = MD5_HMAC_BLOCK_SIZE; in safexcel_hmac_md5_init()
1823 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; in safexcel_hmac_md5_init()
1824 req->state_sz = MD5_DIGEST_SIZE; in safexcel_hmac_md5_init()
1825 req->digest_sz = MD5_DIGEST_SIZE; in safexcel_hmac_md5_init()
1826 req->block_sz = MD5_HMAC_BLOCK_SIZE; in safexcel_hmac_md5_init()
1827 req->len_is_le = true; /* MD5 is little endian! ... */ in safexcel_hmac_md5_init()
1828 req->hmac = true; in safexcel_hmac_md5_init()
1885 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); in safexcel_cbcmac_init() local
1887 memset(req, 0, sizeof(*req)); in safexcel_cbcmac_init()
1890 memcpy(req->state, &ctx->base.ipad, ctx->key_sz); in safexcel_cbcmac_init()
1892 req->len = AES_BLOCK_SIZE; in safexcel_cbcmac_init()
1893 req->processed = AES_BLOCK_SIZE; in safexcel_cbcmac_init()
1895 req->digest = CONTEXT_CONTROL_DIGEST_XCM; in safexcel_cbcmac_init()
1896 req->state_sz = ctx->key_sz; in safexcel_cbcmac_init()
1897 req->digest_sz = AES_BLOCK_SIZE; in safexcel_cbcmac_init()
1898 req->block_sz = AES_BLOCK_SIZE; in safexcel_cbcmac_init()
1899 req->xcbcmac = true; in safexcel_cbcmac_init()
2145 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); in safexcel_sm3_init() local
2147 memset(req, 0, sizeof(*req)); in safexcel_sm3_init()
2150 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; in safexcel_sm3_init()
2151 req->state_sz = SM3_DIGEST_SIZE; in safexcel_sm3_init()
2152 req->digest_sz = SM3_DIGEST_SIZE; in safexcel_sm3_init()
2153 req->block_sz = SM3_BLOCK_SIZE; in safexcel_sm3_init()
2209 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); in safexcel_hmac_sm3_init() local
2211 memset(req, 0, sizeof(*req)); in safexcel_hmac_sm3_init()
2214 memcpy(req->state, &ctx->base.ipad, SM3_DIGEST_SIZE); in safexcel_hmac_sm3_init()
2216 req->len = SM3_BLOCK_SIZE; in safexcel_hmac_sm3_init()
2217 req->processed = SM3_BLOCK_SIZE; in safexcel_hmac_sm3_init()
2220 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; in safexcel_hmac_sm3_init()
2221 req->state_sz = SM3_DIGEST_SIZE; in safexcel_hmac_sm3_init()
2222 req->digest_sz = SM3_DIGEST_SIZE; in safexcel_hmac_sm3_init()
2223 req->block_sz = SM3_BLOCK_SIZE; in safexcel_hmac_sm3_init()
2224 req->hmac = true; in safexcel_hmac_sm3_init()
2275 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); in safexcel_sha3_224_init() local
2277 memset(req, 0, sizeof(*req)); in safexcel_sha3_224_init()
2280 req->digest = CONTEXT_CONTROL_DIGEST_INITIAL; in safexcel_sha3_224_init()
2281 req->state_sz = SHA3_224_DIGEST_SIZE; in safexcel_sha3_224_init()
2282 req->digest_sz = SHA3_224_DIGEST_SIZE; in safexcel_sha3_224_init()
2283 req->block_sz = SHA3_224_BLOCK_SIZE; in safexcel_sha3_224_init()
2289 static int safexcel_sha3_fbcheck(struct ahash_request *req) in safexcel_sha3_fbcheck() argument
2291 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); in safexcel_sha3_fbcheck()
2293 struct ahash_request *subreq = ahash_request_ctx_dma(req); in safexcel_sha3_fbcheck()
2298 ahash_request_set_callback(subreq, req->base.flags, in safexcel_sha3_fbcheck()
2299 req->base.complete, req->base.data); in safexcel_sha3_fbcheck()
2300 ahash_request_set_crypt(subreq, req->src, req->result, in safexcel_sha3_fbcheck()
2301 req->nbytes); in safexcel_sha3_fbcheck()
2326 static int safexcel_sha3_update(struct ahash_request *req) in safexcel_sha3_update() argument
2328 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); in safexcel_sha3_update()
2330 struct ahash_request *subreq = ahash_request_ctx_dma(req); in safexcel_sha3_update()
2333 return safexcel_sha3_fbcheck(req) ?: crypto_ahash_update(subreq); in safexcel_sha3_update()
2336 static int safexcel_sha3_final(struct ahash_request *req) in safexcel_sha3_final() argument
2338 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); in safexcel_sha3_final()
2340 struct ahash_request *subreq = ahash_request_ctx_dma(req); in safexcel_sha3_final()
2343 return safexcel_sha3_fbcheck(req) ?: crypto_ahash_final(subreq); in safexcel_sha3_final()
2346 static int safexcel_sha3_finup(struct ahash_request *req) in safexcel_sha3_finup() argument
2348 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); in safexcel_sha3_finup()
2350 struct ahash_request *subreq = ahash_request_ctx_dma(req); in safexcel_sha3_finup()
2352 ctx->do_fallback |= !req->nbytes; in safexcel_sha3_finup()
2355 return safexcel_sha3_fbcheck(req) ?: in safexcel_sha3_finup()
2358 return safexcel_ahash_finup(req); in safexcel_sha3_finup()
2361 static int safexcel_sha3_digest_fallback(struct ahash_request *req) in safexcel_sha3_digest_fallback() argument
2363 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); in safexcel_sha3_digest_fallback()
2365 struct ahash_request *subreq = ahash_request_ctx_dma(req); in safexcel_sha3_digest_fallback()
2369 return safexcel_sha3_fbcheck(req) ?: crypto_ahash_finup(subreq); in safexcel_sha3_digest_fallback()
2372 static int safexcel_sha3_224_digest(struct ahash_request *req) in safexcel_sha3_224_digest() argument
2374 if (req->nbytes) in safexcel_sha3_224_digest()
2375 return safexcel_sha3_224_init(req) ?: safexcel_ahash_finup(req); in safexcel_sha3_224_digest()
2378 return safexcel_sha3_digest_fallback(req); in safexcel_sha3_224_digest()
2381 static int safexcel_sha3_export(struct ahash_request *req, void *out) in safexcel_sha3_export() argument
2383 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); in safexcel_sha3_export()
2385 struct ahash_request *subreq = ahash_request_ctx_dma(req); in safexcel_sha3_export()
2388 return safexcel_sha3_fbcheck(req) ?: crypto_ahash_export(subreq, out); in safexcel_sha3_export()
2391 static int safexcel_sha3_import(struct ahash_request *req, const void *in) in safexcel_sha3_import() argument
2393 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); in safexcel_sha3_import()
2395 struct ahash_request *subreq = ahash_request_ctx_dma(req); in safexcel_sha3_import()
2398 return safexcel_sha3_fbcheck(req) ?: crypto_ahash_import(subreq, in); in safexcel_sha3_import()
2469 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); in safexcel_sha3_256_init() local
2471 memset(req, 0, sizeof(*req)); in safexcel_sha3_256_init()
2474 req->digest = CONTEXT_CONTROL_DIGEST_INITIAL; in safexcel_sha3_256_init()
2475 req->state_sz = SHA3_256_DIGEST_SIZE; in safexcel_sha3_256_init()
2476 req->digest_sz = SHA3_256_DIGEST_SIZE; in safexcel_sha3_256_init()
2477 req->block_sz = SHA3_256_BLOCK_SIZE; in safexcel_sha3_256_init()
2483 static int safexcel_sha3_256_digest(struct ahash_request *req) in safexcel_sha3_256_digest() argument
2485 if (req->nbytes) in safexcel_sha3_256_digest()
2486 return safexcel_sha3_256_init(req) ?: safexcel_ahash_finup(req); in safexcel_sha3_256_digest()
2489 return safexcel_sha3_digest_fallback(req); in safexcel_sha3_256_digest()
2527 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); in safexcel_sha3_384_init() local
2529 memset(req, 0, sizeof(*req)); in safexcel_sha3_384_init()
2532 req->digest = CONTEXT_CONTROL_DIGEST_INITIAL; in safexcel_sha3_384_init()
2533 req->state_sz = SHA3_384_DIGEST_SIZE; in safexcel_sha3_384_init()
2534 req->digest_sz = SHA3_384_DIGEST_SIZE; in safexcel_sha3_384_init()
2535 req->block_sz = SHA3_384_BLOCK_SIZE; in safexcel_sha3_384_init()
2541 static int safexcel_sha3_384_digest(struct ahash_request *req) in safexcel_sha3_384_digest() argument
2543 if (req->nbytes) in safexcel_sha3_384_digest()
2544 return safexcel_sha3_384_init(req) ?: safexcel_ahash_finup(req); in safexcel_sha3_384_digest()
2547 return safexcel_sha3_digest_fallback(req); in safexcel_sha3_384_digest()
2585 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); in safexcel_sha3_512_init() local
2587 memset(req, 0, sizeof(*req)); in safexcel_sha3_512_init()
2590 req->digest = CONTEXT_CONTROL_DIGEST_INITIAL; in safexcel_sha3_512_init()
2591 req->state_sz = SHA3_512_DIGEST_SIZE; in safexcel_sha3_512_init()
2592 req->digest_sz = SHA3_512_DIGEST_SIZE; in safexcel_sha3_512_init()
2593 req->block_sz = SHA3_512_BLOCK_SIZE; in safexcel_sha3_512_init()
2599 static int safexcel_sha3_512_digest(struct ahash_request *req) in safexcel_sha3_512_digest() argument
2601 if (req->nbytes) in safexcel_sha3_512_digest()
2602 return safexcel_sha3_512_init(req) ?: safexcel_ahash_finup(req); in safexcel_sha3_512_digest()
2605 return safexcel_sha3_digest_fallback(req); in safexcel_sha3_512_digest()
2735 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); in safexcel_hmac_sha3_224_init() local
2737 memset(req, 0, sizeof(*req)); in safexcel_hmac_sha3_224_init()
2740 memcpy(req->state, &ctx->base.ipad, SHA3_224_BLOCK_SIZE / 2); in safexcel_hmac_sha3_224_init()
2742 req->len = SHA3_224_BLOCK_SIZE; in safexcel_hmac_sha3_224_init()
2743 req->processed = SHA3_224_BLOCK_SIZE; in safexcel_hmac_sha3_224_init()
2745 req->digest = CONTEXT_CONTROL_DIGEST_HMAC; in safexcel_hmac_sha3_224_init()
2746 req->state_sz = SHA3_224_BLOCK_SIZE / 2; in safexcel_hmac_sha3_224_init()
2747 req->digest_sz = SHA3_224_DIGEST_SIZE; in safexcel_hmac_sha3_224_init()
2748 req->block_sz = SHA3_224_BLOCK_SIZE; in safexcel_hmac_sha3_224_init()
2749 req->hmac = true; in safexcel_hmac_sha3_224_init()
2755 static int safexcel_hmac_sha3_224_digest(struct ahash_request *req) in safexcel_hmac_sha3_224_digest() argument
2757 if (req->nbytes) in safexcel_hmac_sha3_224_digest()
2758 return safexcel_hmac_sha3_224_init(req) ?: in safexcel_hmac_sha3_224_digest()
2759 safexcel_ahash_finup(req); in safexcel_hmac_sha3_224_digest()
2762 return safexcel_sha3_digest_fallback(req); in safexcel_hmac_sha3_224_digest()
2806 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); in safexcel_hmac_sha3_256_init() local
2808 memset(req, 0, sizeof(*req)); in safexcel_hmac_sha3_256_init()
2811 memcpy(req->state, &ctx->base.ipad, SHA3_256_BLOCK_SIZE / 2); in safexcel_hmac_sha3_256_init()
2813 req->len = SHA3_256_BLOCK_SIZE; in safexcel_hmac_sha3_256_init()
2814 req->processed = SHA3_256_BLOCK_SIZE; in safexcel_hmac_sha3_256_init()
2816 req->digest = CONTEXT_CONTROL_DIGEST_HMAC; in safexcel_hmac_sha3_256_init()
2817 req->state_sz = SHA3_256_BLOCK_SIZE / 2; in safexcel_hmac_sha3_256_init()
2818 req->digest_sz = SHA3_256_DIGEST_SIZE; in safexcel_hmac_sha3_256_init()
2819 req->block_sz = SHA3_256_BLOCK_SIZE; in safexcel_hmac_sha3_256_init()
2820 req->hmac = true; in safexcel_hmac_sha3_256_init()
2826 static int safexcel_hmac_sha3_256_digest(struct ahash_request *req) in safexcel_hmac_sha3_256_digest() argument
2828 if (req->nbytes) in safexcel_hmac_sha3_256_digest()
2829 return safexcel_hmac_sha3_256_init(req) ?: in safexcel_hmac_sha3_256_digest()
2830 safexcel_ahash_finup(req); in safexcel_hmac_sha3_256_digest()
2833 return safexcel_sha3_digest_fallback(req); in safexcel_hmac_sha3_256_digest()
2877 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); in safexcel_hmac_sha3_384_init() local
2879 memset(req, 0, sizeof(*req)); in safexcel_hmac_sha3_384_init()
2882 memcpy(req->state, &ctx->base.ipad, SHA3_384_BLOCK_SIZE / 2); in safexcel_hmac_sha3_384_init()
2884 req->len = SHA3_384_BLOCK_SIZE; in safexcel_hmac_sha3_384_init()
2885 req->processed = SHA3_384_BLOCK_SIZE; in safexcel_hmac_sha3_384_init()
2887 req->digest = CONTEXT_CONTROL_DIGEST_HMAC; in safexcel_hmac_sha3_384_init()
2888 req->state_sz = SHA3_384_BLOCK_SIZE / 2; in safexcel_hmac_sha3_384_init()
2889 req->digest_sz = SHA3_384_DIGEST_SIZE; in safexcel_hmac_sha3_384_init()
2890 req->block_sz = SHA3_384_BLOCK_SIZE; in safexcel_hmac_sha3_384_init()
2891 req->hmac = true; in safexcel_hmac_sha3_384_init()
2897 static int safexcel_hmac_sha3_384_digest(struct ahash_request *req) in safexcel_hmac_sha3_384_digest() argument
2899 if (req->nbytes) in safexcel_hmac_sha3_384_digest()
2900 return safexcel_hmac_sha3_384_init(req) ?: in safexcel_hmac_sha3_384_digest()
2901 safexcel_ahash_finup(req); in safexcel_hmac_sha3_384_digest()
2904 return safexcel_sha3_digest_fallback(req); in safexcel_hmac_sha3_384_digest()
2948 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq); in safexcel_hmac_sha3_512_init() local
2950 memset(req, 0, sizeof(*req)); in safexcel_hmac_sha3_512_init()
2953 memcpy(req->state, &ctx->base.ipad, SHA3_512_BLOCK_SIZE / 2); in safexcel_hmac_sha3_512_init()
2955 req->len = SHA3_512_BLOCK_SIZE; in safexcel_hmac_sha3_512_init()
2956 req->processed = SHA3_512_BLOCK_SIZE; in safexcel_hmac_sha3_512_init()
2958 req->digest = CONTEXT_CONTROL_DIGEST_HMAC; in safexcel_hmac_sha3_512_init()
2959 req->state_sz = SHA3_512_BLOCK_SIZE / 2; in safexcel_hmac_sha3_512_init()
2960 req->digest_sz = SHA3_512_DIGEST_SIZE; in safexcel_hmac_sha3_512_init()
2961 req->block_sz = SHA3_512_BLOCK_SIZE; in safexcel_hmac_sha3_512_init()
2962 req->hmac = true; in safexcel_hmac_sha3_512_init()
2968 static int safexcel_hmac_sha3_512_digest(struct ahash_request *req) in safexcel_hmac_sha3_512_digest() argument
2970 if (req->nbytes) in safexcel_hmac_sha3_512_digest()
2971 return safexcel_hmac_sha3_512_init(req) ?: in safexcel_hmac_sha3_512_digest()
2972 safexcel_ahash_finup(req); in safexcel_hmac_sha3_512_digest()
2975 return safexcel_sha3_digest_fallback(req); in safexcel_hmac_sha3_512_digest()