Lines Matching refs:edesc
894 struct aead_edesc *edesc, in aead_unmap() argument
900 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, in aead_unmap()
901 edesc->iv_dma, ivsize, DMA_TO_DEVICE, edesc->qm_sg_dma, in aead_unmap()
902 edesc->qm_sg_bytes); in aead_unmap()
903 dma_unmap_single(dev, edesc->assoclen_dma, 4, DMA_TO_DEVICE); in aead_unmap()
906 static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc, in skcipher_unmap() argument
912 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, in skcipher_unmap()
913 edesc->iv_dma, ivsize, DMA_BIDIRECTIONAL, edesc->qm_sg_dma, in skcipher_unmap()
914 edesc->qm_sg_bytes); in skcipher_unmap()
920 struct aead_edesc *edesc; in aead_done() local
931 edesc = container_of(drv_req, typeof(*edesc), drv_req); in aead_done()
932 aead_unmap(qidev, edesc, aead_req); in aead_done()
935 qi_cache_free(edesc); in aead_done()
953 struct aead_edesc *edesc; in aead_edesc_alloc() local
967 edesc = qi_cache_alloc(flags); in aead_edesc_alloc()
968 if (unlikely(!edesc)) { in aead_edesc_alloc()
981 qi_cache_free(edesc); in aead_edesc_alloc()
989 qi_cache_free(edesc); in aead_edesc_alloc()
1000 qi_cache_free(edesc); in aead_edesc_alloc()
1008 qi_cache_free(edesc); in aead_edesc_alloc()
1017 qi_cache_free(edesc); in aead_edesc_alloc()
1032 qi_cache_free(edesc); in aead_edesc_alloc()
1064 sg_table = &edesc->sgt[0]; in aead_edesc_alloc()
1072 qi_cache_free(edesc); in aead_edesc_alloc()
1087 qi_cache_free(edesc); in aead_edesc_alloc()
1092 edesc->src_nents = src_nents; in aead_edesc_alloc()
1093 edesc->dst_nents = dst_nents; in aead_edesc_alloc()
1094 edesc->iv_dma = iv_dma; in aead_edesc_alloc()
1095 edesc->drv_req.app_ctx = req; in aead_edesc_alloc()
1096 edesc->drv_req.cbk = aead_done; in aead_edesc_alloc()
1097 edesc->drv_req.drv_ctx = drv_ctx; in aead_edesc_alloc()
1099 edesc->assoclen = cpu_to_caam32(req->assoclen); in aead_edesc_alloc()
1100 edesc->assoclen_dma = dma_map_single(qidev, &edesc->assoclen, 4, in aead_edesc_alloc()
1102 if (dma_mapping_error(qidev, edesc->assoclen_dma)) { in aead_edesc_alloc()
1106 qi_cache_free(edesc); in aead_edesc_alloc()
1110 dma_to_qm_sg_one(sg_table, edesc->assoclen_dma, 4, 0); in aead_edesc_alloc()
1125 dma_unmap_single(qidev, edesc->assoclen_dma, 4, DMA_TO_DEVICE); in aead_edesc_alloc()
1128 qi_cache_free(edesc); in aead_edesc_alloc()
1132 edesc->qm_sg_dma = qm_sg_dma; in aead_edesc_alloc()
1133 edesc->qm_sg_bytes = qm_sg_bytes; in aead_edesc_alloc()
1139 fd_sgt = &edesc->drv_req.fd_sgt[0]; in aead_edesc_alloc()
1158 return edesc; in aead_edesc_alloc()
1163 struct aead_edesc *edesc; in aead_crypt() local
1172 edesc = aead_edesc_alloc(req, encrypt); in aead_crypt()
1173 if (IS_ERR(edesc)) in aead_crypt()
1174 return PTR_ERR(edesc); in aead_crypt()
1177 ret = caam_qi_enqueue(ctx->qidev, &edesc->drv_req); in aead_crypt()
1181 aead_unmap(ctx->qidev, edesc, req); in aead_crypt()
1182 qi_cache_free(edesc); in aead_crypt()
1210 static inline u8 *skcipher_edesc_iv(struct skcipher_edesc *edesc) in skcipher_edesc_iv() argument
1212 return PTR_ALIGN((u8 *)&edesc->sgt[0] + edesc->qm_sg_bytes, in skcipher_edesc_iv()
1218 struct skcipher_edesc *edesc; in skcipher_done() local
1228 edesc = container_of(drv_req, typeof(*edesc), drv_req); in skcipher_done()
1235 edesc->src_nents > 1 ? 100 : ivsize, 1); in skcipher_done()
1238 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1); in skcipher_done()
1240 skcipher_unmap(qidev, edesc, req); in skcipher_done()
1248 memcpy(req->iv, skcipher_edesc_iv(edesc), ivsize); in skcipher_done()
1250 qi_cache_free(edesc); in skcipher_done()
1263 struct skcipher_edesc *edesc; in skcipher_edesc_alloc() local
1345 edesc = qi_cache_alloc(flags); in skcipher_edesc_alloc()
1346 if (unlikely(!edesc)) { in skcipher_edesc_alloc()
1353 edesc->src_nents = src_nents; in skcipher_edesc_alloc()
1354 edesc->dst_nents = dst_nents; in skcipher_edesc_alloc()
1355 edesc->qm_sg_bytes = qm_sg_bytes; in skcipher_edesc_alloc()
1356 edesc->drv_req.app_ctx = req; in skcipher_edesc_alloc()
1357 edesc->drv_req.cbk = skcipher_done; in skcipher_edesc_alloc()
1358 edesc->drv_req.drv_ctx = drv_ctx; in skcipher_edesc_alloc()
1361 sg_table = &edesc->sgt[0]; in skcipher_edesc_alloc()
1362 iv = skcipher_edesc_iv(edesc); in skcipher_edesc_alloc()
1370 qi_cache_free(edesc); in skcipher_edesc_alloc()
1374 edesc->iv_dma = iv_dma; in skcipher_edesc_alloc()
1385 edesc->qm_sg_dma = dma_map_single(qidev, sg_table, edesc->qm_sg_bytes, in skcipher_edesc_alloc()
1387 if (dma_mapping_error(qidev, edesc->qm_sg_dma)) { in skcipher_edesc_alloc()
1391 qi_cache_free(edesc); in skcipher_edesc_alloc()
1395 fd_sgt = &edesc->drv_req.fd_sgt[0]; in skcipher_edesc_alloc()
1397 dma_to_qm_sg_one_last_ext(&fd_sgt[1], edesc->qm_sg_dma, in skcipher_edesc_alloc()
1401 dma_to_qm_sg_one_ext(&fd_sgt[0], edesc->qm_sg_dma + in skcipher_edesc_alloc()
1405 dma_to_qm_sg_one_ext(&fd_sgt[0], edesc->qm_sg_dma + dst_sg_idx * in skcipher_edesc_alloc()
1409 return edesc; in skcipher_edesc_alloc()
1422 struct skcipher_edesc *edesc; in skcipher_crypt() local
1456 edesc = skcipher_edesc_alloc(req, encrypt); in skcipher_crypt()
1457 if (IS_ERR(edesc)) in skcipher_crypt()
1458 return PTR_ERR(edesc); in skcipher_crypt()
1460 ret = caam_qi_enqueue(ctx->qidev, &edesc->drv_req); in skcipher_crypt()
1464 skcipher_unmap(ctx->qidev, edesc, req); in skcipher_crypt()
1465 qi_cache_free(edesc); in skcipher_crypt()