Lines Matching refs:sreq

507 				    struct safexcel_cipher_req *sreq,  in safexcel_context_control()  argument
529 (sreq->direction == SAFEXCEL_ENCRYPT ? in safexcel_context_control()
544 if (sreq->direction == SAFEXCEL_ENCRYPT && in safexcel_context_control()
549 else if (sreq->direction == SAFEXCEL_ENCRYPT) in safexcel_context_control()
559 if (sreq->direction == SAFEXCEL_ENCRYPT) in safexcel_context_control()
612 struct safexcel_cipher_req *sreq, in safexcel_handle_req_result() argument
623 if (unlikely(!sreq->rdescs)) in safexcel_handle_req_result()
626 while (sreq->rdescs--) { in safexcel_handle_req_result()
644 if (sreq->nr_src > 0) in safexcel_handle_req_result()
645 dma_unmap_sg(priv->dev, src, sreq->nr_src, in safexcel_handle_req_result()
648 if (sreq->nr_src > 0) in safexcel_handle_req_result()
649 dma_unmap_sg(priv->dev, src, sreq->nr_src, in safexcel_handle_req_result()
651 if (sreq->nr_dst > 0) in safexcel_handle_req_result()
652 dma_unmap_sg(priv->dev, dst, sreq->nr_dst, in safexcel_handle_req_result()
660 (sreq->direction == SAFEXCEL_ENCRYPT)) { in safexcel_handle_req_result()
662 sg_pcopy_to_buffer(dst, sreq->nr_dst, areq->iv, in safexcel_handle_req_result()
674 struct safexcel_cipher_req *sreq, in safexcel_send_req() argument
696 sreq->nr_src = sg_nents_for_len(src, totlen_src); in safexcel_send_req()
703 if (sreq->direction == SAFEXCEL_DECRYPT) in safexcel_send_req()
715 (sreq->direction == SAFEXCEL_DECRYPT)) { in safexcel_send_req()
721 sg_pcopy_to_buffer(src, sreq->nr_src, areq->iv, in safexcel_send_req()
727 sreq->nr_dst = sg_nents_for_len(dst, totlen_dst); in safexcel_send_req()
737 sreq->nr_src = max(sreq->nr_src, sreq->nr_dst); in safexcel_send_req()
738 sreq->nr_dst = sreq->nr_src; in safexcel_send_req()
740 (sreq->nr_src <= 0))) { in safexcel_send_req()
745 if (sreq->nr_src > 0) in safexcel_send_req()
746 dma_map_sg(priv->dev, src, sreq->nr_src, in safexcel_send_req()
749 if (unlikely(totlen_src && (sreq->nr_src <= 0))) { in safexcel_send_req()
755 if (sreq->nr_src > 0) in safexcel_send_req()
756 dma_map_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE); in safexcel_send_req()
758 if (unlikely(totlen_dst && (sreq->nr_dst <= 0))) { in safexcel_send_req()
765 if (sreq->nr_dst > 0) in safexcel_send_req()
766 dma_map_sg(priv->dev, dst, sreq->nr_dst, in safexcel_send_req()
792 for_each_sg(src, sg, sreq->nr_src, i) { in safexcel_send_req()
819 safexcel_context_control(ctx, base, sreq, first_cdesc); in safexcel_send_req()
822 sreq->direction, cryptlen, in safexcel_send_req()
829 for_each_sg(dst, sg, sreq->nr_dst, i) { in safexcel_send_req()
830 bool last = (i == sreq->nr_dst - 1); in safexcel_send_req()
899 if (sreq->nr_src > 0) in safexcel_send_req()
900 dma_unmap_sg(priv->dev, src, sreq->nr_src, in safexcel_send_req()
903 if (sreq->nr_src > 0) in safexcel_send_req()
904 dma_unmap_sg(priv->dev, src, sreq->nr_src, in safexcel_send_req()
906 if (sreq->nr_dst > 0) in safexcel_send_req()
907 dma_unmap_sg(priv->dev, dst, sreq->nr_dst, in safexcel_send_req()
917 struct safexcel_cipher_req *sreq, in safexcel_handle_inv_result() argument
926 if (unlikely(!sreq->rdescs)) in safexcel_handle_inv_result()
929 while (sreq->rdescs--) { in safexcel_handle_inv_result()
979 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req); in safexcel_skcipher_handle_result() local
982 if (sreq->needs_inv) { in safexcel_skcipher_handle_result()
983 sreq->needs_inv = false; in safexcel_skcipher_handle_result()
984 err = safexcel_handle_inv_result(priv, ring, async, sreq, in safexcel_skcipher_handle_result()
988 req->dst, req->cryptlen, sreq, in safexcel_skcipher_handle_result()
1002 struct safexcel_cipher_req *sreq = aead_request_ctx(req); in safexcel_aead_handle_result() local
1005 if (sreq->needs_inv) { in safexcel_aead_handle_result()
1006 sreq->needs_inv = false; in safexcel_aead_handle_result()
1007 err = safexcel_handle_inv_result(priv, ring, async, sreq, in safexcel_aead_handle_result()
1013 sreq, should_complete, ret); in safexcel_aead_handle_result()
1041 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req); in safexcel_skcipher_send() local
1045 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv); in safexcel_skcipher_send()
1047 if (sreq->needs_inv) { in safexcel_skcipher_send()
1059 ret = safexcel_send_req(async, ring, sreq, req->src, in safexcel_skcipher_send()
1064 sreq->rdescs = *results; in safexcel_skcipher_send()
1074 struct safexcel_cipher_req *sreq = aead_request_ctx(req); in safexcel_aead_send() local
1078 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv); in safexcel_aead_send()
1080 if (sreq->needs_inv) in safexcel_aead_send()
1083 ret = safexcel_send_req(async, ring, sreq, req->src, req->dst, in safexcel_aead_send()
1087 sreq->rdescs = *results; in safexcel_aead_send()
1093 struct safexcel_cipher_req *sreq, in safexcel_cipher_exit_inv() argument
1103 sreq->needs_inv = true; in safexcel_cipher_exit_inv()
1127 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req); in safexcel_skcipher_exit_inv() local
1136 return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result); in safexcel_skcipher_exit_inv()
1142 struct safexcel_cipher_req *sreq = aead_request_ctx(req); in safexcel_aead_exit_inv() local
1151 return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result); in safexcel_aead_exit_inv()
1155 struct safexcel_cipher_req *sreq, in safexcel_queue_req() argument
1162 sreq->needs_inv = false; in safexcel_queue_req()
1163 sreq->direction = dir; in safexcel_queue_req()
1167 sreq->needs_inv = true; in safexcel_queue_req()