Lines Matching defs:sreq
507 struct safexcel_cipher_req *sreq,
529 (sreq->direction == SAFEXCEL_ENCRYPT ?
544 if (sreq->direction == SAFEXCEL_ENCRYPT &&
549 else if (sreq->direction == SAFEXCEL_ENCRYPT)
559 if (sreq->direction == SAFEXCEL_ENCRYPT)
612 struct safexcel_cipher_req *sreq,
623 if (unlikely(!sreq->rdescs))
626 while (sreq->rdescs--) {
644 if (sreq->nr_src > 0)
645 dma_unmap_sg(priv->dev, src, sreq->nr_src,
648 if (sreq->nr_src > 0)
649 dma_unmap_sg(priv->dev, src, sreq->nr_src,
651 if (sreq->nr_dst > 0)
652 dma_unmap_sg(priv->dev, dst, sreq->nr_dst,
660 (sreq->direction == SAFEXCEL_ENCRYPT)) {
662 sg_pcopy_to_buffer(dst, sreq->nr_dst, areq->iv,
674 struct safexcel_cipher_req *sreq,
696 sreq->nr_src = sg_nents_for_len(src, totlen_src);
703 if (sreq->direction == SAFEXCEL_DECRYPT)
715 (sreq->direction == SAFEXCEL_DECRYPT)) {
721 sg_pcopy_to_buffer(src, sreq->nr_src, areq->iv,
727 sreq->nr_dst = sg_nents_for_len(dst, totlen_dst);
737 sreq->nr_src = max(sreq->nr_src, sreq->nr_dst);
738 sreq->nr_dst = sreq->nr_src;
740 (sreq->nr_src <= 0))) {
745 if (sreq->nr_src > 0 &&
746 !dma_map_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL))
749 if (unlikely(totlen_src && (sreq->nr_src <= 0))) {
755 if (sreq->nr_src > 0 &&
756 !dma_map_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE))
759 if (unlikely(totlen_dst && (sreq->nr_dst <= 0))) {
766 if (sreq->nr_dst > 0 &&
767 !dma_map_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE)) {
795 for_each_sg(src, sg, sreq->nr_src, i) {
822 safexcel_context_control(ctx, base, sreq, first_cdesc);
825 sreq->direction, cryptlen,
832 for_each_sg(dst, sg, sreq->nr_dst, i) {
833 bool last = (i == sreq->nr_dst - 1);
902 if (sreq->nr_src > 0)
903 dma_unmap_sg(priv->dev, src, sreq->nr_src,
906 if (sreq->nr_src > 0)
907 dma_unmap_sg(priv->dev, src, sreq->nr_src,
909 if (sreq->nr_dst > 0)
910 dma_unmap_sg(priv->dev, dst, sreq->nr_dst,
920 struct safexcel_cipher_req *sreq,
929 if (unlikely(!sreq->rdescs))
932 while (sreq->rdescs--) {
982 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
985 if (sreq->needs_inv) {
986 sreq->needs_inv = false;
987 err = safexcel_handle_inv_result(priv, ring, async, sreq,
991 req->dst, req->cryptlen, sreq,
1005 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1008 if (sreq->needs_inv) {
1009 sreq->needs_inv = false;
1010 err = safexcel_handle_inv_result(priv, ring, async, sreq,
1016 sreq, should_complete, ret);
1044 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
1048 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
1050 if (sreq->needs_inv) {
1062 ret = safexcel_send_req(async, ring, sreq, req->src,
1067 sreq->rdescs = *results;
1077 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1081 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
1083 if (sreq->needs_inv)
1086 ret = safexcel_send_req(async, ring, sreq, req->src, req->dst,
1090 sreq->rdescs = *results;
1096 struct safexcel_cipher_req *sreq,
1106 sreq->needs_inv = true;
1130 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
1139 return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1145 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1154 return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1158 struct safexcel_cipher_req *sreq,
1165 sreq->needs_inv = false;
1166 sreq->direction = dir;
1170 sreq->needs_inv = true;