Lines Matching refs:req
50 struct akcipher_request *req)
52 struct caam_rsa_req_ctx *req_ctx = akcipher_request_ctx(req);
54 dma_unmap_sg(dev, req->dst, edesc->dst_nents, DMA_FROM_DEVICE);
63 struct akcipher_request *req)
65 struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req);
75 struct akcipher_request *req)
77 struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req);
87 struct akcipher_request *req)
89 struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req);
104 struct akcipher_request *req)
106 struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req);
125 struct akcipher_request *req = context;
126 struct caam_rsa_req_ctx *req_ctx = akcipher_request_ctx(req);
138 rsa_pub_unmap(dev, edesc, req);
139 rsa_io_unmap(dev, edesc, req);
147 akcipher_request_complete(req, ecode);
149 crypto_finalize_akcipher_request(jrp->engine, req, ecode);
155 struct akcipher_request *req = context;
156 struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req);
160 struct caam_rsa_req_ctx *req_ctx = akcipher_request_ctx(req);
173 rsa_priv_f1_unmap(dev, edesc, req);
176 rsa_priv_f2_unmap(dev, edesc, req);
179 rsa_priv_f3_unmap(dev, edesc, req);
182 rsa_io_unmap(dev, edesc, req);
190 akcipher_request_complete(req, ecode);
192 crypto_finalize_akcipher_request(jrp->engine, req, ecode);
249 static struct rsa_edesc *rsa_edesc_alloc(struct akcipher_request *req,
252 struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req);
255 struct caam_rsa_req_ctx *req_ctx = akcipher_request_ctx(req);
258 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
267 if (req->src_len > key->n_sz) {
272 lzeros = caam_rsa_count_leading_zeros(req->src, req->src_len -
277 req_ctx->fixup_src = scatterwalk_ffwd(req_ctx->src, req->src,
279 req_ctx->fixup_src_len = req->src_len - lzeros;
285 diff_size = key->n_sz - req->src_len;
286 req_ctx->fixup_src = req->src;
287 req_ctx->fixup_src_len = req->src_len;
292 dst_nents = sg_nents_for_len(req->dst, req->dst_len);
300 mapped_dst_nents = dma_map_sg(dev, req->dst, dst_nents,
335 sg_to_sec4_sg_last(req->dst, req->dst_len,
368 dma_unmap_sg(dev, req->dst, dst_nents, DMA_FROM_DEVICE);
376 struct akcipher_request *req = container_of(areq,
379 struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req);
380 struct caam_rsa_req_ctx *req_ctx = akcipher_request_ctx(req);
388 ret = caam_jr_enqueue(jrdev, desc, req_ctx->akcipher_op_done, req);
394 rsa_pub_unmap(jrdev, req_ctx->edesc, req);
395 rsa_io_unmap(jrdev, req_ctx->edesc, req);
404 static int set_rsa_pub_pdb(struct akcipher_request *req,
407 struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req);
408 struct caam_rsa_req_ctx *req_ctx = akcipher_request_ctx(req);
441 pdb->g_dma = sg_dma_address(req->dst);
450 static int set_rsa_priv_f1_pdb(struct akcipher_request *req,
453 struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req);
479 struct caam_rsa_req_ctx *req_ctx = akcipher_request_ctx(req);
489 pdb->f_dma = sg_dma_address(req->dst);
497 static int set_rsa_priv_f2_pdb(struct akcipher_request *req,
500 struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req);
544 struct caam_rsa_req_ctx *req_ctx = akcipher_request_ctx(req);
554 pdb->f_dma = sg_dma_address(req->dst);
574 static int set_rsa_priv_f3_pdb(struct akcipher_request *req,
577 struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req);
633 struct caam_rsa_req_ctx *req_ctx = akcipher_request_ctx(req);
643 pdb->f_dma = sg_dma_address(req->dst);
670 struct akcipher_request *req)
673 struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req);
676 struct caam_rsa_req_ctx *req_ctx = akcipher_request_ctx(req);
687 if (req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG)
689 req);
691 ret = caam_jr_enqueue(jrdev, desc, cbk, req);
696 rsa_priv_f1_unmap(jrdev, edesc, req);
699 rsa_priv_f2_unmap(jrdev, edesc, req);
702 rsa_priv_f3_unmap(jrdev, edesc, req);
705 rsa_pub_unmap(jrdev, edesc, req);
707 rsa_io_unmap(jrdev, edesc, req);
714 static int caam_rsa_enc(struct akcipher_request *req)
716 struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req);
726 if (req->dst_len < key->n_sz) {
727 req->dst_len = key->n_sz;
733 edesc = rsa_edesc_alloc(req, DESC_RSA_PUB_LEN);
738 ret = set_rsa_pub_pdb(req, edesc);
745 return akcipher_enqueue_req(jrdev, rsa_pub_done, req);
748 rsa_io_unmap(jrdev, edesc, req);
753 static int caam_rsa_dec_priv_f1(struct akcipher_request *req)
755 struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req);
762 edesc = rsa_edesc_alloc(req, DESC_RSA_PRIV_F1_LEN);
767 ret = set_rsa_priv_f1_pdb(req, edesc);
774 return akcipher_enqueue_req(jrdev, rsa_priv_f_done, req);
777 rsa_io_unmap(jrdev, edesc, req);
782 static int caam_rsa_dec_priv_f2(struct akcipher_request *req)
784 struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req);
791 edesc = rsa_edesc_alloc(req, DESC_RSA_PRIV_F2_LEN);
796 ret = set_rsa_priv_f2_pdb(req, edesc);
803 return akcipher_enqueue_req(jrdev, rsa_priv_f_done, req);
806 rsa_io_unmap(jrdev, edesc, req);
811 static int caam_rsa_dec_priv_f3(struct akcipher_request *req)
813 struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req);
820 edesc = rsa_edesc_alloc(req, DESC_RSA_PRIV_F3_LEN);
825 ret = set_rsa_priv_f3_pdb(req, edesc);
832 return akcipher_enqueue_req(jrdev, rsa_priv_f_done, req);
835 rsa_io_unmap(jrdev, edesc, req);
840 static int caam_rsa_dec(struct akcipher_request *req)
842 struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req);
850 if (req->dst_len < key->n_sz) {
851 req->dst_len = key->n_sz;
857 ret = caam_rsa_dec_priv_f3(req);
859 ret = caam_rsa_dec_priv_f2(req);
861 ret = caam_rsa_dec_priv_f1(req);