Lines Matching defs:sreq

507 				    struct safexcel_cipher_req *sreq,
529 (sreq->direction == SAFEXCEL_ENCRYPT ?
544 if (sreq->direction == SAFEXCEL_ENCRYPT &&
549 else if (sreq->direction == SAFEXCEL_ENCRYPT)
559 if (sreq->direction == SAFEXCEL_ENCRYPT)
612 struct safexcel_cipher_req *sreq,
623 if (unlikely(!sreq->rdescs))
626 while (sreq->rdescs--) {
644 dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
646 dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
647 dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
654 (sreq->direction == SAFEXCEL_ENCRYPT)) {
656 sg_pcopy_to_buffer(dst, sreq->nr_dst, areq->iv,
668 struct safexcel_cipher_req *sreq,
690 sreq->nr_src = sg_nents_for_len(src, totlen_src);
697 if (sreq->direction == SAFEXCEL_DECRYPT)
709 (sreq->direction == SAFEXCEL_DECRYPT)) {
715 sg_pcopy_to_buffer(src, sreq->nr_src, areq->iv,
721 sreq->nr_dst = sg_nents_for_len(dst, totlen_dst);
731 sreq->nr_src = max(sreq->nr_src, sreq->nr_dst);
732 sreq->nr_dst = sreq->nr_src;
734 (sreq->nr_src <= 0))) {
739 dma_map_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
741 if (unlikely(totlen_src && (sreq->nr_src <= 0))) {
746 dma_map_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
748 if (unlikely(totlen_dst && (sreq->nr_dst <= 0))) {
751 dma_unmap_sg(priv->dev, src, sreq->nr_src,
755 dma_map_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
780 for_each_sg(src, sg, sreq->nr_src, i) {
807 safexcel_context_control(ctx, base, sreq, first_cdesc);
810 sreq->direction, cryptlen,
817 for_each_sg(dst, sg, sreq->nr_dst, i) {
818 bool last = (i == sreq->nr_dst - 1);
887 dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
889 dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
890 dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
899 struct safexcel_cipher_req *sreq,
908 if (unlikely(!sreq->rdescs))
911 while (sreq->rdescs--) {
961 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
964 if (sreq->needs_inv) {
965 sreq->needs_inv = false;
966 err = safexcel_handle_inv_result(priv, ring, async, sreq,
970 req->dst, req->cryptlen, sreq,
984 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
987 if (sreq->needs_inv) {
988 sreq->needs_inv = false;
989 err = safexcel_handle_inv_result(priv, ring, async, sreq,
995 sreq, should_complete, ret);
1023 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
1027 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
1029 if (sreq->needs_inv) {
1041 ret = safexcel_send_req(async, ring, sreq, req->src,
1046 sreq->rdescs = *results;
1056 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1060 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
1062 if (sreq->needs_inv)
1065 ret = safexcel_send_req(async, ring, sreq, req->src, req->dst,
1069 sreq->rdescs = *results;
1075 struct safexcel_cipher_req *sreq,
1086 sreq->needs_inv = true;
1110 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
1119 return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1125 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1134 return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1138 struct safexcel_cipher_req *sreq,
1145 sreq->needs_inv = false;
1146 sreq->direction = dir;
1150 sreq->needs_inv = true;