Lines Matching refs:jrdev
123 struct device *jrdev; member
143 struct device *jrdev = ctx->jrdev; in aead_null_set_sh_desc() local
144 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); in aead_null_set_sh_desc()
165 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, in aead_null_set_sh_desc()
184 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, in aead_null_set_sh_desc()
197 struct device *jrdev = ctx->jrdev; in aead_set_sh_desc() local
198 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); in aead_set_sh_desc()
268 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, in aead_set_sh_desc()
290 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, in aead_set_sh_desc()
314 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, in aead_set_sh_desc()
335 struct device *jrdev = ctx->jrdev; in gcm_set_sh_desc() local
359 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, in gcm_set_sh_desc()
376 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, in gcm_set_sh_desc()
400 struct device *jrdev = ctx->jrdev; in rfc4106_set_sh_desc() local
425 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, in rfc4106_set_sh_desc()
443 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, in rfc4106_set_sh_desc()
468 struct device *jrdev = ctx->jrdev; in rfc4543_set_sh_desc() local
493 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, in rfc4543_set_sh_desc()
511 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, in rfc4543_set_sh_desc()
534 struct device *jrdev = ctx->jrdev; in chachapoly_set_sh_desc() local
544 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, in chachapoly_set_sh_desc()
550 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, in chachapoly_set_sh_desc()
589 struct device *jrdev = ctx->jrdev; in aead_setkey() local
590 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); in aead_setkey()
597 dev_dbg(jrdev, "keylen %d enckeylen %d authkeylen %d\n", in aead_setkey()
618 dma_sync_single_for_device(jrdev, ctx->key_dma, in aead_setkey()
624 ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey, in aead_setkey()
633 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad + in aead_setkey()
670 struct device *jrdev = ctx->jrdev; in gcm_setkey() local
681 dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, ctx->dir); in gcm_setkey()
691 struct device *jrdev = ctx->jrdev; in rfc4106_setkey() local
708 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen, in rfc4106_setkey()
717 struct device *jrdev = ctx->jrdev; in rfc4543_setkey() local
734 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen, in rfc4543_setkey()
746 struct device *jrdev = ctx->jrdev; in skcipher_setkey() local
762 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, in skcipher_setkey()
769 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, in skcipher_setkey()
846 struct device *jrdev = ctx->jrdev; in xts_skcipher_setkey() local
847 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); in xts_skcipher_setkey()
853 dev_dbg(jrdev, "key size mismatch\n"); in xts_skcipher_setkey()
873 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, in xts_skcipher_setkey()
879 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, in xts_skcipher_setkey()
979 static void aead_crypt_done(struct device *jrdev, u32 *desc, u32 err, in aead_crypt_done() argument
984 struct caam_drv_private_jr *jrp = dev_get_drvdata(jrdev); in aead_crypt_done()
989 dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); in aead_crypt_done()
995 ecode = caam_jr_strstatus(jrdev, err); in aead_crypt_done()
997 aead_unmap(jrdev, edesc, req); in aead_crypt_done()
1018 static void skcipher_crypt_done(struct device *jrdev, u32 *desc, u32 err, in skcipher_crypt_done() argument
1025 struct caam_drv_private_jr *jrp = dev_get_drvdata(jrdev); in skcipher_crypt_done()
1030 dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); in skcipher_crypt_done()
1035 ecode = caam_jr_strstatus(jrdev, err); in skcipher_crypt_done()
1037 skcipher_unmap(jrdev, edesc, req); in skcipher_crypt_done()
1206 struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent); in init_authenc_job()
1255 struct device *jrdev = ctx->jrdev; in init_skcipher_job() local
1265 dev_dbg(jrdev, "asked=%d, cryptlen%d\n", in init_skcipher_job()
1311 struct device *jrdev = ctx->jrdev; in aead_edesc_alloc() local
1327 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", in aead_edesc_alloc()
1334 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n", in aead_edesc_alloc()
1344 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", in aead_edesc_alloc()
1351 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, in aead_edesc_alloc()
1354 dev_err(jrdev, "unable to map source\n"); in aead_edesc_alloc()
1360 mapped_src_nents = dma_map_sg(jrdev, req->src, in aead_edesc_alloc()
1363 dev_err(jrdev, "unable to map source\n"); in aead_edesc_alloc()
1372 mapped_dst_nents = dma_map_sg(jrdev, req->dst, in aead_edesc_alloc()
1376 dev_err(jrdev, "unable to map destination\n"); in aead_edesc_alloc()
1377 dma_unmap_sg(jrdev, req->src, src_nents, in aead_edesc_alloc()
1401 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0, in aead_edesc_alloc()
1431 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, in aead_edesc_alloc()
1433 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { in aead_edesc_alloc()
1434 dev_err(jrdev, "unable to map S/G table\n"); in aead_edesc_alloc()
1435 aead_unmap(jrdev, edesc, req); in aead_edesc_alloc()
1445 static int aead_enqueue_req(struct device *jrdev, struct aead_request *req) in aead_enqueue_req() argument
1447 struct caam_drv_private_jr *jrpriv = dev_get_drvdata(jrdev); in aead_enqueue_req()
1462 ret = caam_jr_enqueue(jrdev, desc, aead_crypt_done, req); in aead_enqueue_req()
1465 aead_unmap(jrdev, edesc, req); in aead_enqueue_req()
1477 struct device *jrdev = ctx->jrdev; in chachapoly_crypt() local
1493 return aead_enqueue_req(jrdev, req); in chachapoly_crypt()
1511 struct device *jrdev = ctx->jrdev; in aead_crypt() local
1527 return aead_enqueue_req(jrdev, req); in aead_crypt()
1550 ret = caam_jr_enqueue(ctx->jrdev, desc, aead_crypt_done, req); in aead_do_one_req()
1556 aead_unmap(ctx->jrdev, rctx->edesc, req); in aead_do_one_req()
1570 struct device *jrdev = ctx->jrdev; in gcm_crypt() local
1586 return aead_enqueue_req(jrdev, req); in gcm_crypt()
1618 struct device *jrdev = ctx->jrdev; in skcipher_edesc_alloc() local
1631 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", in skcipher_edesc_alloc()
1639 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n", in skcipher_edesc_alloc()
1646 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, in skcipher_edesc_alloc()
1649 dev_err(jrdev, "unable to map source\n"); in skcipher_edesc_alloc()
1653 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, in skcipher_edesc_alloc()
1656 dev_err(jrdev, "unable to map source\n"); in skcipher_edesc_alloc()
1659 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents, in skcipher_edesc_alloc()
1662 dev_err(jrdev, "unable to map destination\n"); in skcipher_edesc_alloc()
1663 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); in skcipher_edesc_alloc()
1708 dev_err(jrdev, "could not allocate extended descriptor\n"); in skcipher_edesc_alloc()
1709 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0, in skcipher_edesc_alloc()
1728 iv_dma = dma_map_single(jrdev, iv, ivsize, DMA_BIDIRECTIONAL); in skcipher_edesc_alloc()
1729 if (dma_mapping_error(jrdev, iv_dma)) { in skcipher_edesc_alloc()
1730 dev_err(jrdev, "unable to map IV\n"); in skcipher_edesc_alloc()
1731 caam_unmap(jrdev, req->src, req->dst, src_nents, in skcipher_edesc_alloc()
1756 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, in skcipher_edesc_alloc()
1759 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { in skcipher_edesc_alloc()
1760 dev_err(jrdev, "unable to map S/G table\n"); in skcipher_edesc_alloc()
1761 caam_unmap(jrdev, req->src, req->dst, src_nents, in skcipher_edesc_alloc()
1787 ret = caam_jr_enqueue(ctx->jrdev, desc, skcipher_crypt_done, req); in skcipher_do_one_req()
1793 skcipher_unmap(ctx->jrdev, rctx->edesc, req); in skcipher_do_one_req()
1815 struct device *jrdev = ctx->jrdev; in skcipher_crypt() local
1816 struct caam_drv_private_jr *jrpriv = dev_get_drvdata(jrdev); in skcipher_crypt()
1817 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); in skcipher_crypt()
1867 ret = caam_jr_enqueue(jrdev, desc, skcipher_crypt_done, req); in skcipher_crypt()
1870 skcipher_unmap(jrdev, edesc, req); in skcipher_crypt()
3583 ctx->jrdev = caam_jr_alloc(); in caam_init_common()
3584 if (IS_ERR(ctx->jrdev)) { in caam_init_common()
3586 return PTR_ERR(ctx->jrdev); in caam_init_common()
3589 priv = dev_get_drvdata(ctx->jrdev->parent); in caam_init_common()
3595 dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc, in caam_init_common()
3600 if (dma_mapping_error(ctx->jrdev, dma_addr)) { in caam_init_common()
3601 dev_err(ctx->jrdev, "unable to map key, shared descriptors\n"); in caam_init_common()
3602 caam_jr_free(ctx->jrdev); in caam_init_common()
3669 dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma, in caam_exit_common()
3673 caam_jr_free(ctx->jrdev); in caam_exit_common()