Lines Matching refs:dev
49 static void rsa_io_unmap(struct device *dev, struct rsa_edesc *edesc,
54 dma_unmap_sg(dev, req->dst, edesc->dst_nents, DMA_FROM_DEVICE);
55 dma_unmap_sg(dev, req_ctx->fixup_src, edesc->src_nents, DMA_TO_DEVICE);
58 dma_unmap_single(dev, edesc->sec4_sg_dma, edesc->sec4_sg_bytes,
62 static void rsa_pub_unmap(struct device *dev, struct rsa_edesc *edesc,
70 dma_unmap_single(dev, pdb->n_dma, key->n_sz, DMA_TO_DEVICE);
71 dma_unmap_single(dev, pdb->e_dma, key->e_sz, DMA_TO_DEVICE);
74 static void rsa_priv_f1_unmap(struct device *dev, struct rsa_edesc *edesc,
82 dma_unmap_single(dev, pdb->n_dma, key->n_sz, DMA_TO_DEVICE);
83 dma_unmap_single(dev, pdb->d_dma, key->d_sz, DMA_TO_DEVICE);
86 static void rsa_priv_f2_unmap(struct device *dev, struct rsa_edesc *edesc,
96 dma_unmap_single(dev, pdb->d_dma, key->d_sz, DMA_TO_DEVICE);
97 dma_unmap_single(dev, pdb->p_dma, p_sz, DMA_TO_DEVICE);
98 dma_unmap_single(dev, pdb->q_dma, q_sz, DMA_TO_DEVICE);
99 dma_unmap_single(dev, pdb->tmp1_dma, p_sz, DMA_BIDIRECTIONAL);
100 dma_unmap_single(dev, pdb->tmp2_dma, q_sz, DMA_BIDIRECTIONAL);
103 static void rsa_priv_f3_unmap(struct device *dev, struct rsa_edesc *edesc,
113 dma_unmap_single(dev, pdb->p_dma, p_sz, DMA_TO_DEVICE);
114 dma_unmap_single(dev, pdb->q_dma, q_sz, DMA_TO_DEVICE);
115 dma_unmap_single(dev, pdb->dp_dma, p_sz, DMA_TO_DEVICE);
116 dma_unmap_single(dev, pdb->dq_dma, q_sz, DMA_TO_DEVICE);
117 dma_unmap_single(dev, pdb->c_dma, p_sz, DMA_TO_DEVICE);
118 dma_unmap_single(dev, pdb->tmp1_dma, p_sz, DMA_BIDIRECTIONAL);
119 dma_unmap_single(dev, pdb->tmp2_dma, q_sz, DMA_BIDIRECTIONAL);
123 static void rsa_pub_done(struct device *dev, u32 *desc, u32 err, void *context)
127 struct caam_drv_private_jr *jrp = dev_get_drvdata(dev);
133 ecode = caam_jr_strstatus(dev, err);
138 rsa_pub_unmap(dev, edesc, req);
139 rsa_io_unmap(dev, edesc, req);
152 static void rsa_priv_f_done(struct device *dev, u32 *desc, u32 err,
157 struct caam_drv_private_jr *jrp = dev_get_drvdata(dev);
166 ecode = caam_jr_strstatus(dev, err);
173 rsa_priv_f1_unmap(dev, edesc, req);
176 rsa_priv_f2_unmap(dev, edesc, req);
179 rsa_priv_f3_unmap(dev, edesc, req);
182 rsa_io_unmap(dev, edesc, req);
254 struct device *dev = ctx->dev;
294 mapped_src_nents = dma_map_sg(dev, req_ctx->fixup_src, src_nents,
297 dev_err(dev, "unable to map source\n");
300 mapped_dst_nents = dma_map_sg(dev, req->dst, dst_nents,
303 dev_err(dev, "unable to map destination\n");
350 edesc->sec4_sg_dma = dma_map_single(dev, edesc->sec4_sg,
352 if (dma_mapping_error(dev, edesc->sec4_sg_dma)) {
353 dev_err(dev, "unable to map S/G table\n");
368 dma_unmap_sg(dev, req->dst, dst_nents, DMA_FROM_DEVICE);
370 dma_unmap_sg(dev, req_ctx->fixup_src, src_nents, DMA_TO_DEVICE);
382 struct device *jrdev = ctx->dev;
411 struct device *dev = ctx->dev;
415 pdb->n_dma = dma_map_single(dev, key->n, key->n_sz, DMA_TO_DEVICE);
416 if (dma_mapping_error(dev, pdb->n_dma)) {
417 dev_err(dev, "Unable to map RSA modulus memory\n");
421 pdb->e_dma = dma_map_single(dev, key->e, key->e_sz, DMA_TO_DEVICE);
422 if (dma_mapping_error(dev, pdb->e_dma)) {
423 dev_err(dev, "Unable to map RSA public exponent memory\n");
424 dma_unmap_single(dev, pdb->n_dma, key->n_sz, DMA_TO_DEVICE);
456 struct device *dev = ctx->dev;
460 pdb->n_dma = dma_map_single(dev, key->n, key->n_sz, DMA_TO_DEVICE);
461 if (dma_mapping_error(dev, pdb->n_dma)) {
462 dev_err(dev, "Unable to map modulus memory\n");
466 pdb->d_dma = dma_map_single(dev, key->d, key->d_sz, DMA_TO_DEVICE);
467 if (dma_mapping_error(dev, pdb->d_dma)) {
468 dev_err(dev, "Unable to map RSA private exponent memory\n");
469 dma_unmap_single(dev, pdb->n_dma, key->n_sz, DMA_TO_DEVICE);
503 struct device *dev = ctx->dev;
509 pdb->d_dma = dma_map_single(dev, key->d, key->d_sz, DMA_TO_DEVICE);
510 if (dma_mapping_error(dev, pdb->d_dma)) {
511 dev_err(dev, "Unable to map RSA private exponent memory\n");
515 pdb->p_dma = dma_map_single(dev, key->p, p_sz, DMA_TO_DEVICE);
516 if (dma_mapping_error(dev, pdb->p_dma)) {
517 dev_err(dev, "Unable to map RSA prime factor p memory\n");
521 pdb->q_dma = dma_map_single(dev, key->q, q_sz, DMA_TO_DEVICE);
522 if (dma_mapping_error(dev, pdb->q_dma)) {
523 dev_err(dev, "Unable to map RSA prime factor q memory\n");
527 pdb->tmp1_dma = dma_map_single(dev, key->tmp1, p_sz, DMA_BIDIRECTIONAL);
528 if (dma_mapping_error(dev, pdb->tmp1_dma)) {
529 dev_err(dev, "Unable to map RSA tmp1 memory\n");
533 pdb->tmp2_dma = dma_map_single(dev, key->tmp2, q_sz, DMA_BIDIRECTIONAL);
534 if (dma_mapping_error(dev, pdb->tmp2_dma)) {
535 dev_err(dev, "Unable to map RSA tmp2 memory\n");
563 dma_unmap_single(dev, pdb->tmp1_dma, p_sz, DMA_BIDIRECTIONAL);
565 dma_unmap_single(dev, pdb->q_dma, q_sz, DMA_TO_DEVICE);
567 dma_unmap_single(dev, pdb->p_dma, p_sz, DMA_TO_DEVICE);
569 dma_unmap_single(dev, pdb->d_dma, key->d_sz, DMA_TO_DEVICE);
580 struct device *dev = ctx->dev;
586 pdb->p_dma = dma_map_single(dev, key->p, p_sz, DMA_TO_DEVICE);
587 if (dma_mapping_error(dev, pdb->p_dma)) {
588 dev_err(dev, "Unable to map RSA prime factor p memory\n");
592 pdb->q_dma = dma_map_single(dev, key->q, q_sz, DMA_TO_DEVICE);
593 if (dma_mapping_error(dev, pdb->q_dma)) {
594 dev_err(dev, "Unable to map RSA prime factor q memory\n");
598 pdb->dp_dma = dma_map_single(dev, key->dp, p_sz, DMA_TO_DEVICE);
599 if (dma_mapping_error(dev, pdb->dp_dma)) {
600 dev_err(dev, "Unable to map RSA exponent dp memory\n");
604 pdb->dq_dma = dma_map_single(dev, key->dq, q_sz, DMA_TO_DEVICE);
605 if (dma_mapping_error(dev, pdb->dq_dma)) {
606 dev_err(dev, "Unable to map RSA exponent dq memory\n");
610 pdb->c_dma = dma_map_single(dev, key->qinv, p_sz, DMA_TO_DEVICE);
611 if (dma_mapping_error(dev, pdb->c_dma)) {
612 dev_err(dev, "Unable to map RSA CRT coefficient qinv memory\n");
616 pdb->tmp1_dma = dma_map_single(dev, key->tmp1, p_sz, DMA_BIDIRECTIONAL);
617 if (dma_mapping_error(dev, pdb->tmp1_dma)) {
618 dev_err(dev, "Unable to map RSA tmp1 memory\n");
622 pdb->tmp2_dma = dma_map_single(dev, key->tmp2, q_sz, DMA_BIDIRECTIONAL);
623 if (dma_mapping_error(dev, pdb->tmp2_dma)) {
624 dev_err(dev, "Unable to map RSA tmp2 memory\n");
652 dma_unmap_single(dev, pdb->tmp1_dma, p_sz, DMA_BIDIRECTIONAL);
654 dma_unmap_single(dev, pdb->c_dma, p_sz, DMA_TO_DEVICE);
656 dma_unmap_single(dev, pdb->dq_dma, q_sz, DMA_TO_DEVICE);
658 dma_unmap_single(dev, pdb->dp_dma, p_sz, DMA_TO_DEVICE);
660 dma_unmap_single(dev, pdb->q_dma, q_sz, DMA_TO_DEVICE);
662 dma_unmap_single(dev, pdb->p_dma, p_sz, DMA_TO_DEVICE);
719 struct device *jrdev = ctx->dev;
757 struct device *jrdev = ctx->dev;
786 struct device *jrdev = ctx->dev;
815 struct device *jrdev = ctx->dev;
852 dev_err(ctx->dev, "Output buffer length less than parameter n\n");
1114 ctx->dev = caam_jr_alloc();
1116 if (IS_ERR(ctx->dev)) {
1118 return PTR_ERR(ctx->dev);
1121 ctx->padding_dma = dma_map_single(ctx->dev, zero_buffer,
1124 if (dma_mapping_error(ctx->dev, ctx->padding_dma)) {
1125 dev_err(ctx->dev, "unable to map padding\n");
1126 caam_jr_free(ctx->dev);
1139 dma_unmap_single(ctx->dev, ctx->padding_dma, CAAM_RSA_MAX_INPUT_SIZE -
1142 caam_jr_free(ctx->dev);