Lines Matching refs:edesc

132 	struct skcipher_edesc *edesc;  member
137 struct aead_edesc *edesc; member
959 struct aead_edesc *edesc, in aead_unmap() argument
963 edesc->src_nents, edesc->dst_nents, 0, 0, in aead_unmap()
964 edesc->sec4_sg_dma, edesc->sec4_sg_bytes); in aead_unmap()
967 static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc, in skcipher_unmap() argument
974 edesc->src_nents, edesc->dst_nents, in skcipher_unmap()
975 edesc->iv_dma, ivsize, in skcipher_unmap()
976 edesc->sec4_sg_dma, edesc->sec4_sg_bytes); in skcipher_unmap()
985 struct aead_edesc *edesc; in aead_crypt_done() local
991 edesc = rctx->edesc; in aead_crypt_done()
992 has_bklog = edesc->bklog; in aead_crypt_done()
997 aead_unmap(jrdev, edesc, req); in aead_crypt_done()
999 kfree(edesc); in aead_crypt_done()
1011 static inline u8 *skcipher_edesc_iv(struct skcipher_edesc *edesc) in skcipher_edesc_iv() argument
1014 return PTR_ALIGN((u8 *)edesc->sec4_sg + edesc->sec4_sg_bytes, in skcipher_edesc_iv()
1022 struct skcipher_edesc *edesc; in skcipher_crypt_done() local
1032 edesc = rctx->edesc; in skcipher_crypt_done()
1033 has_bklog = edesc->bklog; in skcipher_crypt_done()
1037 skcipher_unmap(jrdev, edesc, req); in skcipher_crypt_done()
1045 memcpy(req->iv, skcipher_edesc_iv(edesc), ivsize); in skcipher_crypt_done()
1054 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1); in skcipher_crypt_done()
1056 kfree(edesc); in skcipher_crypt_done()
1072 struct aead_edesc *edesc, in init_aead_job() argument
1078 u32 *desc = edesc->hw_desc; in init_aead_job()
1092 src_dma = edesc->mapped_src_nents ? sg_dma_address(req->src) : in init_aead_job()
1096 src_dma = edesc->sec4_sg_dma; in init_aead_job()
1097 sec4_sg_index += edesc->mapped_src_nents; in init_aead_job()
1108 if (!edesc->mapped_dst_nents) { in init_aead_job()
1111 } else if (edesc->mapped_dst_nents == 1) { in init_aead_job()
1115 dst_dma = edesc->sec4_sg_dma + in init_aead_job()
1133 struct aead_edesc *edesc, in init_gcm_job() argument
1139 u32 *desc = edesc->hw_desc; in init_gcm_job()
1143 init_aead_job(req, edesc, all_contig, encrypt); in init_gcm_job()
1163 struct aead_edesc *edesc, bool all_contig, in init_chachapoly_job() argument
1169 u32 *desc = edesc->hw_desc; in init_chachapoly_job()
1172 init_aead_job(req, edesc, all_contig, encrypt); in init_chachapoly_job()
1197 struct aead_edesc *edesc, in init_authenc_job() argument
1210 u32 *desc = edesc->hw_desc; in init_authenc_job()
1228 init_aead_job(req, edesc, all_contig, encrypt); in init_authenc_job()
1250 struct skcipher_edesc *edesc, in init_skcipher_job() argument
1257 u32 *desc = edesc->hw_desc; in init_skcipher_job()
1266 (int)edesc->src_nents > 1 ? 100 : req->cryptlen, req->cryptlen); in init_skcipher_job()
1270 edesc->src_nents > 1 ? 100 : req->cryptlen, 1); in init_skcipher_job()
1278 if (ivsize || edesc->mapped_src_nents > 1) { in init_skcipher_job()
1279 src_dma = edesc->sec4_sg_dma; in init_skcipher_job()
1280 sec4_sg_index = edesc->mapped_src_nents + !!ivsize; in init_skcipher_job()
1291 } else if (!ivsize && edesc->mapped_dst_nents == 1) { in init_skcipher_job()
1294 dst_dma = edesc->sec4_sg_dma + sec4_sg_index * in init_skcipher_job()
1317 struct aead_edesc *edesc; in aead_edesc_alloc() local
1399 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes, flags); in aead_edesc_alloc()
1400 if (!edesc) { in aead_edesc_alloc()
1406 edesc->src_nents = src_nents; in aead_edesc_alloc()
1407 edesc->dst_nents = dst_nents; in aead_edesc_alloc()
1408 edesc->mapped_src_nents = mapped_src_nents; in aead_edesc_alloc()
1409 edesc->mapped_dst_nents = mapped_dst_nents; in aead_edesc_alloc()
1410 edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) + in aead_edesc_alloc()
1413 rctx->edesc = edesc; in aead_edesc_alloc()
1420 edesc->sec4_sg + sec4_sg_index, 0); in aead_edesc_alloc()
1425 edesc->sec4_sg + sec4_sg_index, 0); in aead_edesc_alloc()
1429 return edesc; in aead_edesc_alloc()
1431 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, in aead_edesc_alloc()
1433 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { in aead_edesc_alloc()
1435 aead_unmap(jrdev, edesc, req); in aead_edesc_alloc()
1436 kfree(edesc); in aead_edesc_alloc()
1440 edesc->sec4_sg_bytes = sec4_sg_bytes; in aead_edesc_alloc()
1442 return edesc; in aead_edesc_alloc()
1449 struct aead_edesc *edesc = rctx->edesc; in aead_enqueue_req() local
1450 u32 *desc = edesc->hw_desc; in aead_enqueue_req()
1465 aead_unmap(jrdev, edesc, req); in aead_enqueue_req()
1466 kfree(rctx->edesc); in aead_enqueue_req()
1474 struct aead_edesc *edesc; in chachapoly_crypt() local
1481 edesc = aead_edesc_alloc(req, CHACHAPOLY_DESC_JOB_IO_LEN, &all_contig, in chachapoly_crypt()
1483 if (IS_ERR(edesc)) in chachapoly_crypt()
1484 return PTR_ERR(edesc); in chachapoly_crypt()
1486 desc = edesc->hw_desc; in chachapoly_crypt()
1488 init_chachapoly_job(req, edesc, all_contig, encrypt); in chachapoly_crypt()
1508 struct aead_edesc *edesc; in aead_crypt() local
1515 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN, in aead_crypt()
1517 if (IS_ERR(edesc)) in aead_crypt()
1518 return PTR_ERR(edesc); in aead_crypt()
1521 init_authenc_job(req, edesc, all_contig, encrypt); in aead_crypt()
1524 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, in aead_crypt()
1525 desc_bytes(edesc->hw_desc), 1); in aead_crypt()
1545 u32 *desc = rctx->edesc->hw_desc; in aead_do_one_req()
1548 rctx->edesc->bklog = true; in aead_do_one_req()
1556 aead_unmap(ctx->jrdev, rctx->edesc, req); in aead_do_one_req()
1557 kfree(rctx->edesc); in aead_do_one_req()
1567 struct aead_edesc *edesc; in gcm_crypt() local
1574 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, in gcm_crypt()
1576 if (IS_ERR(edesc)) in gcm_crypt()
1577 return PTR_ERR(edesc); in gcm_crypt()
1580 init_gcm_job(req, edesc, all_contig, encrypt); in gcm_crypt()
1583 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, in gcm_crypt()
1584 desc_bytes(edesc->hw_desc), 1); in gcm_crypt()
1622 struct skcipher_edesc *edesc; in skcipher_edesc_alloc() local
1701 aligned_size = sizeof(*edesc) + desc_bytes + sec4_sg_bytes; in skcipher_edesc_alloc()
1706 edesc = kzalloc(aligned_size, flags); in skcipher_edesc_alloc()
1707 if (!edesc) { in skcipher_edesc_alloc()
1714 edesc->src_nents = src_nents; in skcipher_edesc_alloc()
1715 edesc->dst_nents = dst_nents; in skcipher_edesc_alloc()
1716 edesc->mapped_src_nents = mapped_src_nents; in skcipher_edesc_alloc()
1717 edesc->mapped_dst_nents = mapped_dst_nents; in skcipher_edesc_alloc()
1718 edesc->sec4_sg_bytes = sec4_sg_bytes; in skcipher_edesc_alloc()
1719 edesc->sec4_sg = (struct sec4_sg_entry *)((u8 *)edesc->hw_desc + in skcipher_edesc_alloc()
1721 rctx->edesc = edesc; in skcipher_edesc_alloc()
1725 iv = skcipher_edesc_iv(edesc); in skcipher_edesc_alloc()
1733 kfree(edesc); in skcipher_edesc_alloc()
1737 dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0); in skcipher_edesc_alloc()
1740 sg_to_sec4_sg(req->src, req->cryptlen, edesc->sec4_sg + in skcipher_edesc_alloc()
1744 sg_to_sec4_sg(req->dst, req->cryptlen, edesc->sec4_sg + in skcipher_edesc_alloc()
1748 dma_to_sec4_sg_one(edesc->sec4_sg + dst_sg_idx + in skcipher_edesc_alloc()
1752 sg_to_sec4_set_last(edesc->sec4_sg + dst_sg_idx + in skcipher_edesc_alloc()
1756 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, in skcipher_edesc_alloc()
1759 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { in skcipher_edesc_alloc()
1763 kfree(edesc); in skcipher_edesc_alloc()
1768 edesc->iv_dma = iv_dma; in skcipher_edesc_alloc()
1771 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg, in skcipher_edesc_alloc()
1774 return edesc; in skcipher_edesc_alloc()
1782 u32 *desc = rctx->edesc->hw_desc; in skcipher_do_one_req()
1785 rctx->edesc->bklog = true; in skcipher_do_one_req()
1793 skcipher_unmap(ctx->jrdev, rctx->edesc, req); in skcipher_do_one_req()
1794 kfree(rctx->edesc); in skcipher_do_one_req()
1812 struct skcipher_edesc *edesc; in skcipher_crypt() local
1846 edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ); in skcipher_crypt()
1847 if (IS_ERR(edesc)) in skcipher_crypt()
1848 return PTR_ERR(edesc); in skcipher_crypt()
1851 init_skcipher_job(req, edesc, encrypt); in skcipher_crypt()
1854 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, in skcipher_crypt()
1855 desc_bytes(edesc->hw_desc), 1); in skcipher_crypt()
1857 desc = edesc->hw_desc; in skcipher_crypt()
1870 skcipher_unmap(jrdev, edesc, req); in skcipher_crypt()
1871 kfree(edesc); in skcipher_crypt()