Lines Matching refs:eip93
25 int eip93_parse_ctrl_stat_err(struct eip93_device *eip93, int err) in eip93_parse_ctrl_stat_err() argument
42 dev_err(eip93->dev, "Unhandled error 0x%08x\n", err); in eip93_parse_ctrl_stat_err()
63 dev_err(eip93->dev, "Unhandled ext error 0x%08x\n", ext_err); in eip93_parse_ctrl_stat_err()
68 static void *eip93_ring_next_wptr(struct eip93_device *eip93, in eip93_ring_next_wptr() argument
85 static void *eip93_ring_next_rptr(struct eip93_device *eip93, in eip93_ring_next_rptr() argument
101 int eip93_put_descriptor(struct eip93_device *eip93, in eip93_put_descriptor() argument
107 rdesc = eip93_ring_next_wptr(eip93, &eip93->ring->rdr); in eip93_put_descriptor()
111 cdesc = eip93_ring_next_wptr(eip93, &eip93->ring->cdr); in eip93_put_descriptor()
122 void *eip93_get_descriptor(struct eip93_device *eip93) in eip93_get_descriptor() argument
127 cdesc = eip93_ring_next_rptr(eip93, &eip93->ring->cdr); in eip93_get_descriptor()
133 ptr = eip93_ring_next_rptr(eip93, &eip93->ring->rdr); in eip93_get_descriptor()
380 static int eip93_scatter_combine(struct eip93_device *eip93, in eip93_scatter_combine() argument
497 scoped_guard(spinlock_irqsave, &eip93->ring->write_lock) in eip93_scatter_combine()
498 err = eip93_put_descriptor(eip93, cdesc); in eip93_scatter_combine()
505 writel(1, eip93->base + EIP93_REG_PE_CD_COUNT); in eip93_scatter_combine()
515 struct eip93_device *eip93 = ctx->eip93; in eip93_send_req() local
574 rctx->sa_state_ctr_base = dma_map_single(eip93->dev, rctx->sa_state_ctr, in eip93_send_req()
577 err = dma_mapping_error(eip93->dev, rctx->sa_state_ctr_base); in eip93_send_req()
583 rctx->sa_state_base = dma_map_single(eip93->dev, rctx->sa_state, in eip93_send_req()
585 err = dma_mapping_error(eip93->dev, rctx->sa_state_base); in eip93_send_req()
596 scoped_guard(spinlock_bh, &eip93->ring->idr_lock) in eip93_send_req()
597 crypto_async_idr = idr_alloc(&eip93->ring->crypto_async_idr, async, 0, in eip93_send_req()
608 if (!dma_map_sg(eip93->dev, dst, rctx->dst_nents, DMA_BIDIRECTIONAL)) { in eip93_send_req()
614 !dma_map_sg(eip93->dev, src, rctx->src_nents, DMA_TO_DEVICE)) { in eip93_send_req()
619 return eip93_scatter_combine(eip93, rctx, datalen, split, offsetin); in eip93_send_req()
622 dma_unmap_sg(eip93->dev, dst, rctx->dst_nents, DMA_BIDIRECTIONAL); in eip93_send_req()
625 dma_unmap_single(eip93->dev, rctx->sa_state_ctr_base, in eip93_send_req()
631 dma_unmap_single(eip93->dev, rctx->sa_state_base, in eip93_send_req()
640 void eip93_unmap_dma(struct eip93_device *eip93, struct eip93_cipher_reqctx *rctx, in eip93_unmap_dma() argument
650 dma_unmap_sg(eip93->dev, rctx->sg_dst, rctx->dst_nents, in eip93_unmap_dma()
655 dma_unmap_sg(eip93->dev, rctx->sg_src, rctx->src_nents, in eip93_unmap_dma()
661 dma_unmap_sg(eip93->dev, rctx->sg_dst, rctx->dst_nents, in eip93_unmap_dma()
684 void eip93_handle_result(struct eip93_device *eip93, struct eip93_cipher_reqctx *rctx, in eip93_handle_result() argument
688 dma_unmap_single(eip93->dev, rctx->sa_state_ctr_base, in eip93_handle_result()
693 dma_unmap_single(eip93->dev, rctx->sa_state_base, in eip93_handle_result()