Lines Matching refs:edesc
367 struct aead_edesc *edesc; in aead_edesc_alloc() local
376 edesc = qi_cache_zalloc(flags); in aead_edesc_alloc()
377 if (unlikely(!edesc)) { in aead_edesc_alloc()
390 qi_cache_free(edesc); in aead_edesc_alloc()
398 qi_cache_free(edesc); in aead_edesc_alloc()
407 qi_cache_free(edesc); in aead_edesc_alloc()
421 qi_cache_free(edesc); in aead_edesc_alloc()
435 qi_cache_free(edesc); in aead_edesc_alloc()
443 qi_cache_free(edesc); in aead_edesc_alloc()
473 sg_table = &edesc->sgt[0]; in aead_edesc_alloc()
481 qi_cache_free(edesc); in aead_edesc_alloc()
496 qi_cache_free(edesc); in aead_edesc_alloc()
501 edesc->src_nents = src_nents; in aead_edesc_alloc()
502 edesc->dst_nents = dst_nents; in aead_edesc_alloc()
503 edesc->iv_dma = iv_dma; in aead_edesc_alloc()
511 edesc->assoclen = cpu_to_caam32(req->assoclen - ivsize); in aead_edesc_alloc()
513 edesc->assoclen = cpu_to_caam32(req->assoclen); in aead_edesc_alloc()
514 edesc->assoclen_dma = dma_map_single(dev, &edesc->assoclen, 4, in aead_edesc_alloc()
516 if (dma_mapping_error(dev, edesc->assoclen_dma)) { in aead_edesc_alloc()
520 qi_cache_free(edesc); in aead_edesc_alloc()
524 dma_to_qm_sg_one(sg_table, edesc->assoclen_dma, 4, 0); in aead_edesc_alloc()
539 dma_unmap_single(dev, edesc->assoclen_dma, 4, DMA_TO_DEVICE); in aead_edesc_alloc()
542 qi_cache_free(edesc); in aead_edesc_alloc()
546 edesc->qm_sg_dma = qm_sg_dma; in aead_edesc_alloc()
547 edesc->qm_sg_bytes = qm_sg_bytes; in aead_edesc_alloc()
588 return edesc; in aead_edesc_alloc()
1124 struct skcipher_edesc *edesc; in skcipher_edesc_alloc() local
1196 edesc = qi_cache_zalloc(flags); in skcipher_edesc_alloc()
1197 if (unlikely(!edesc)) { in skcipher_edesc_alloc()
1205 sg_table = &edesc->sgt[0]; in skcipher_edesc_alloc()
1214 qi_cache_free(edesc); in skcipher_edesc_alloc()
1218 edesc->src_nents = src_nents; in skcipher_edesc_alloc()
1219 edesc->dst_nents = dst_nents; in skcipher_edesc_alloc()
1220 edesc->iv_dma = iv_dma; in skcipher_edesc_alloc()
1221 edesc->qm_sg_bytes = qm_sg_bytes; in skcipher_edesc_alloc()
1232 edesc->qm_sg_dma = dma_map_single(dev, sg_table, edesc->qm_sg_bytes, in skcipher_edesc_alloc()
1234 if (dma_mapping_error(dev, edesc->qm_sg_dma)) { in skcipher_edesc_alloc()
1238 qi_cache_free(edesc); in skcipher_edesc_alloc()
1248 dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); in skcipher_edesc_alloc()
1253 dpaa2_fl_set_addr(out_fle, edesc->qm_sg_dma + in skcipher_edesc_alloc()
1256 dpaa2_fl_set_addr(out_fle, edesc->qm_sg_dma + dst_sg_idx * in skcipher_edesc_alloc()
1259 return edesc; in skcipher_edesc_alloc()
1262 static void aead_unmap(struct device *dev, struct aead_edesc *edesc, in aead_unmap() argument
1268 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, in aead_unmap()
1269 edesc->iv_dma, ivsize, DMA_TO_DEVICE, edesc->qm_sg_dma, in aead_unmap()
1270 edesc->qm_sg_bytes); in aead_unmap()
1271 dma_unmap_single(dev, edesc->assoclen_dma, 4, DMA_TO_DEVICE); in aead_unmap()
1274 static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc, in skcipher_unmap() argument
1280 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, in skcipher_unmap()
1281 edesc->iv_dma, ivsize, DMA_BIDIRECTIONAL, edesc->qm_sg_dma, in skcipher_unmap()
1282 edesc->qm_sg_bytes); in skcipher_unmap()
1291 struct aead_edesc *edesc = req_ctx->edesc; in aead_encrypt_done() local
1301 aead_unmap(ctx->dev, edesc, req); in aead_encrypt_done()
1302 qi_cache_free(edesc); in aead_encrypt_done()
1312 struct aead_edesc *edesc = req_ctx->edesc; in aead_decrypt_done() local
1322 aead_unmap(ctx->dev, edesc, req); in aead_decrypt_done()
1323 qi_cache_free(edesc); in aead_decrypt_done()
1329 struct aead_edesc *edesc; in aead_encrypt() local
1336 edesc = aead_edesc_alloc(req, true); in aead_encrypt()
1337 if (IS_ERR(edesc)) in aead_encrypt()
1338 return PTR_ERR(edesc); in aead_encrypt()
1344 caam_req->edesc = edesc; in aead_encrypt()
1348 aead_unmap(ctx->dev, edesc, req); in aead_encrypt()
1349 qi_cache_free(edesc); in aead_encrypt()
1357 struct aead_edesc *edesc; in aead_decrypt() local
1364 edesc = aead_edesc_alloc(req, false); in aead_decrypt()
1365 if (IS_ERR(edesc)) in aead_decrypt()
1366 return PTR_ERR(edesc); in aead_decrypt()
1372 caam_req->edesc = edesc; in aead_decrypt()
1376 aead_unmap(ctx->dev, edesc, req); in aead_decrypt()
1377 qi_cache_free(edesc); in aead_decrypt()
1400 struct skcipher_edesc *edesc = req_ctx->edesc; in skcipher_encrypt_done() local
1411 edesc->src_nents > 1 ? 100 : ivsize, 1); in skcipher_encrypt_done()
1414 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1); in skcipher_encrypt_done()
1416 skcipher_unmap(ctx->dev, edesc, req); in skcipher_encrypt_done()
1424 memcpy(req->iv, (u8 *)&edesc->sgt[0] + edesc->qm_sg_bytes, in skcipher_encrypt_done()
1427 qi_cache_free(edesc); in skcipher_encrypt_done()
1438 struct skcipher_edesc *edesc = req_ctx->edesc; in skcipher_decrypt_done() local
1449 edesc->src_nents > 1 ? 100 : ivsize, 1); in skcipher_decrypt_done()
1452 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1); in skcipher_decrypt_done()
1454 skcipher_unmap(ctx->dev, edesc, req); in skcipher_decrypt_done()
1462 memcpy(req->iv, (u8 *)&edesc->sgt[0] + edesc->qm_sg_bytes, in skcipher_decrypt_done()
1465 qi_cache_free(edesc); in skcipher_decrypt_done()
1479 struct skcipher_edesc *edesc; in skcipher_encrypt() local
1508 edesc = skcipher_edesc_alloc(req); in skcipher_encrypt()
1509 if (IS_ERR(edesc)) in skcipher_encrypt()
1510 return PTR_ERR(edesc); in skcipher_encrypt()
1516 caam_req->edesc = edesc; in skcipher_encrypt()
1520 skcipher_unmap(ctx->dev, edesc, req); in skcipher_encrypt()
1521 qi_cache_free(edesc); in skcipher_encrypt()
1529 struct skcipher_edesc *edesc; in skcipher_decrypt() local
1558 edesc = skcipher_edesc_alloc(req); in skcipher_decrypt()
1559 if (IS_ERR(edesc)) in skcipher_decrypt()
1560 return PTR_ERR(edesc); in skcipher_decrypt()
1566 caam_req->edesc = edesc; in skcipher_decrypt()
1570 skcipher_unmap(ctx->dev, edesc, req); in skcipher_decrypt()
1571 qi_cache_free(edesc); in skcipher_decrypt()
3368 static inline void ahash_unmap(struct device *dev, struct ahash_edesc *edesc, in ahash_unmap() argument
3373 if (edesc->src_nents) in ahash_unmap()
3374 dma_unmap_sg(dev, req->src, edesc->src_nents, DMA_TO_DEVICE); in ahash_unmap()
3376 if (edesc->qm_sg_bytes) in ahash_unmap()
3377 dma_unmap_single(dev, edesc->qm_sg_dma, edesc->qm_sg_bytes, in ahash_unmap()
3388 struct ahash_edesc *edesc, in ahash_unmap_ctx() argument
3397 ahash_unmap(dev, edesc, req); in ahash_unmap_ctx()
3406 struct ahash_edesc *edesc = state->caam_req.edesc; in ahash_done() local
3416 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_FROM_DEVICE); in ahash_done()
3418 qi_cache_free(edesc); in ahash_done()
3433 struct ahash_edesc *edesc = state->caam_req.edesc; in ahash_done_bi() local
3442 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_BIDIRECTIONAL); in ahash_done_bi()
3443 qi_cache_free(edesc); in ahash_done_bi()
3471 struct ahash_edesc *edesc = state->caam_req.edesc; in ahash_done_ctx_src() local
3481 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_BIDIRECTIONAL); in ahash_done_ctx_src()
3483 qi_cache_free(edesc); in ahash_done_ctx_src()
3498 struct ahash_edesc *edesc = state->caam_req.edesc; in ahash_done_ctx_dst() local
3507 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_FROM_DEVICE); in ahash_done_ctx_dst()
3508 qi_cache_free(edesc); in ahash_done_ctx_dst()
3545 struct ahash_edesc *edesc; in ahash_update_ctx() local
3573 edesc = qi_cache_zalloc(flags); in ahash_update_ctx()
3574 if (!edesc) { in ahash_update_ctx()
3580 edesc->src_nents = src_nents; in ahash_update_ctx()
3584 sg_table = &edesc->sgt[0]; in ahash_update_ctx()
3603 edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, in ahash_update_ctx()
3605 if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { in ahash_update_ctx()
3610 edesc->qm_sg_bytes = qm_sg_bytes; in ahash_update_ctx()
3615 dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); in ahash_update_ctx()
3625 req_ctx->edesc = edesc; in ahash_update_ctx()
3644 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_BIDIRECTIONAL); in ahash_update_ctx()
3645 qi_cache_free(edesc); in ahash_update_ctx()
3662 struct ahash_edesc *edesc; in ahash_final_ctx() local
3667 edesc = qi_cache_zalloc(flags); in ahash_final_ctx()
3668 if (!edesc) in ahash_final_ctx()
3672 sg_table = &edesc->sgt[0]; in ahash_final_ctx()
3685 edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, qm_sg_bytes, in ahash_final_ctx()
3687 if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { in ahash_final_ctx()
3692 edesc->qm_sg_bytes = qm_sg_bytes; in ahash_final_ctx()
3697 dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); in ahash_final_ctx()
3707 req_ctx->edesc = edesc; in ahash_final_ctx()
3715 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_BIDIRECTIONAL); in ahash_final_ctx()
3716 qi_cache_free(edesc); in ahash_final_ctx()
3734 struct ahash_edesc *edesc; in ahash_finup_ctx() local
3756 edesc = qi_cache_zalloc(flags); in ahash_finup_ctx()
3757 if (!edesc) { in ahash_finup_ctx()
3762 edesc->src_nents = src_nents; in ahash_finup_ctx()
3766 sg_table = &edesc->sgt[0]; in ahash_finup_ctx()
3779 edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, qm_sg_bytes, in ahash_finup_ctx()
3781 if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { in ahash_finup_ctx()
3786 edesc->qm_sg_bytes = qm_sg_bytes; in ahash_finup_ctx()
3791 dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); in ahash_finup_ctx()
3801 req_ctx->edesc = edesc; in ahash_finup_ctx()
3809 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_BIDIRECTIONAL); in ahash_finup_ctx()
3810 qi_cache_free(edesc); in ahash_finup_ctx()
3826 struct ahash_edesc *edesc; in ahash_digest() local
3849 edesc = qi_cache_zalloc(flags); in ahash_digest()
3850 if (!edesc) { in ahash_digest()
3855 edesc->src_nents = src_nents; in ahash_digest()
3860 struct dpaa2_sg_entry *sg_table = &edesc->sgt[0]; in ahash_digest()
3864 edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, in ahash_digest()
3866 if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { in ahash_digest()
3870 edesc->qm_sg_bytes = qm_sg_bytes; in ahash_digest()
3872 dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); in ahash_digest()
3897 req_ctx->edesc = edesc; in ahash_digest()
3904 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_FROM_DEVICE); in ahash_digest()
3905 qi_cache_free(edesc); in ahash_digest()
3922 struct ahash_edesc *edesc; in ahash_final_no_ctx() local
3926 edesc = qi_cache_zalloc(flags); in ahash_final_no_ctx()
3927 if (!edesc) in ahash_final_no_ctx()
3969 req_ctx->edesc = edesc; in ahash_final_no_ctx()
3977 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_FROM_DEVICE); in ahash_final_no_ctx()
3978 qi_cache_free(edesc); in ahash_final_no_ctx()
3997 struct ahash_edesc *edesc; in ahash_update_no_ctx() local
4025 edesc = qi_cache_zalloc(flags); in ahash_update_no_ctx()
4026 if (!edesc) { in ahash_update_no_ctx()
4032 edesc->src_nents = src_nents; in ahash_update_no_ctx()
4035 sg_table = &edesc->sgt[0]; in ahash_update_no_ctx()
4043 edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, in ahash_update_no_ctx()
4045 if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { in ahash_update_no_ctx()
4050 edesc->qm_sg_bytes = qm_sg_bytes; in ahash_update_no_ctx()
4065 dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); in ahash_update_no_ctx()
4075 req_ctx->edesc = edesc; in ahash_update_no_ctx()
4098 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_TO_DEVICE); in ahash_update_no_ctx()
4099 qi_cache_free(edesc); in ahash_update_no_ctx()
4116 struct ahash_edesc *edesc; in ahash_finup_no_ctx() local
4138 edesc = qi_cache_zalloc(flags); in ahash_finup_no_ctx()
4139 if (!edesc) { in ahash_finup_no_ctx()
4144 edesc->src_nents = src_nents; in ahash_finup_no_ctx()
4146 sg_table = &edesc->sgt[0]; in ahash_finup_no_ctx()
4154 edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, qm_sg_bytes, in ahash_finup_no_ctx()
4156 if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { in ahash_finup_no_ctx()
4161 edesc->qm_sg_bytes = qm_sg_bytes; in ahash_finup_no_ctx()
4176 dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); in ahash_finup_no_ctx()
4186 req_ctx->edesc = edesc; in ahash_finup_no_ctx()
4194 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_FROM_DEVICE); in ahash_finup_no_ctx()
4195 qi_cache_free(edesc); in ahash_finup_no_ctx()
4214 struct ahash_edesc *edesc; in ahash_update_first() local
4243 edesc = qi_cache_zalloc(flags); in ahash_update_first()
4244 if (!edesc) { in ahash_update_first()
4250 edesc->src_nents = src_nents; in ahash_update_first()
4251 sg_table = &edesc->sgt[0]; in ahash_update_first()
4263 edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, in ahash_update_first()
4266 if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { in ahash_update_first()
4271 edesc->qm_sg_bytes = qm_sg_bytes; in ahash_update_first()
4273 dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); in ahash_update_first()
4297 req_ctx->edesc = edesc; in ahash_update_first()
4323 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_TO_DEVICE); in ahash_update_first()
4324 qi_cache_free(edesc); in ahash_update_first()