Lines Matching refs:dev
43 static void rsa_io_unmap(struct device *dev, struct rsa_edesc *edesc,
48 dma_unmap_sg(dev, req->dst, edesc->dst_nents, DMA_FROM_DEVICE);
49 dma_unmap_sg(dev, req_ctx->fixup_src, edesc->src_nents, DMA_TO_DEVICE);
52 dma_unmap_single(dev, edesc->sec4_sg_dma, edesc->sec4_sg_bytes,
56 static void rsa_pub_unmap(struct device *dev, struct rsa_edesc *edesc,
64 dma_unmap_single(dev, pdb->n_dma, key->n_sz, DMA_TO_DEVICE);
65 dma_unmap_single(dev, pdb->e_dma, key->e_sz, DMA_TO_DEVICE);
68 static void rsa_priv_f1_unmap(struct device *dev, struct rsa_edesc *edesc,
76 dma_unmap_single(dev, pdb->n_dma, key->n_sz, DMA_TO_DEVICE);
77 dma_unmap_single(dev, pdb->d_dma, key->d_sz, DMA_TO_DEVICE);
80 static void rsa_priv_f2_unmap(struct device *dev, struct rsa_edesc *edesc,
90 dma_unmap_single(dev, pdb->d_dma, key->d_sz, DMA_TO_DEVICE);
91 dma_unmap_single(dev, pdb->p_dma, p_sz, DMA_TO_DEVICE);
92 dma_unmap_single(dev, pdb->q_dma, q_sz, DMA_TO_DEVICE);
93 dma_unmap_single(dev, pdb->tmp1_dma, p_sz, DMA_BIDIRECTIONAL);
94 dma_unmap_single(dev, pdb->tmp2_dma, q_sz, DMA_BIDIRECTIONAL);
97 static void rsa_priv_f3_unmap(struct device *dev, struct rsa_edesc *edesc,
107 dma_unmap_single(dev, pdb->p_dma, p_sz, DMA_TO_DEVICE);
108 dma_unmap_single(dev, pdb->q_dma, q_sz, DMA_TO_DEVICE);
109 dma_unmap_single(dev, pdb->dp_dma, p_sz, DMA_TO_DEVICE);
110 dma_unmap_single(dev, pdb->dq_dma, q_sz, DMA_TO_DEVICE);
111 dma_unmap_single(dev, pdb->c_dma, p_sz, DMA_TO_DEVICE);
112 dma_unmap_single(dev, pdb->tmp1_dma, p_sz, DMA_BIDIRECTIONAL);
113 dma_unmap_single(dev, pdb->tmp2_dma, q_sz, DMA_BIDIRECTIONAL);
117 static void rsa_pub_done(struct device *dev, u32 *desc, u32 err, void *context)
121 struct caam_drv_private_jr *jrp = dev_get_drvdata(dev);
127 ecode = caam_jr_strstatus(dev, err);
132 rsa_pub_unmap(dev, edesc, req);
133 rsa_io_unmap(dev, edesc, req);
146 static void rsa_priv_f_done(struct device *dev, u32 *desc, u32 err,
151 struct caam_drv_private_jr *jrp = dev_get_drvdata(dev);
160 ecode = caam_jr_strstatus(dev, err);
167 rsa_priv_f1_unmap(dev, edesc, req);
170 rsa_priv_f2_unmap(dev, edesc, req);
173 rsa_priv_f3_unmap(dev, edesc, req);
176 rsa_io_unmap(dev, edesc, req);
247 struct device *dev = ctx->dev;
287 mapped_src_nents = dma_map_sg(dev, req_ctx->fixup_src, src_nents,
290 dev_err(dev, "unable to map source\n");
293 mapped_dst_nents = dma_map_sg(dev, req->dst, dst_nents,
296 dev_err(dev, "unable to map destination\n");
344 edesc->sec4_sg_dma = dma_map_single(dev, edesc->sec4_sg,
346 if (dma_mapping_error(dev, edesc->sec4_sg_dma)) {
347 dev_err(dev, "unable to map S/G table\n");
362 dma_unmap_sg(dev, req->dst, dst_nents, DMA_FROM_DEVICE);
364 dma_unmap_sg(dev, req_ctx->fixup_src, src_nents, DMA_TO_DEVICE);
376 struct device *jrdev = ctx->dev;
402 struct device *dev = ctx->dev;
406 pdb->n_dma = dma_map_single(dev, key->n, key->n_sz, DMA_TO_DEVICE);
407 if (dma_mapping_error(dev, pdb->n_dma)) {
408 dev_err(dev, "Unable to map RSA modulus memory\n");
412 pdb->e_dma = dma_map_single(dev, key->e, key->e_sz, DMA_TO_DEVICE);
413 if (dma_mapping_error(dev, pdb->e_dma)) {
414 dev_err(dev, "Unable to map RSA public exponent memory\n");
415 dma_unmap_single(dev, pdb->n_dma, key->n_sz, DMA_TO_DEVICE);
447 struct device *dev = ctx->dev;
451 pdb->n_dma = dma_map_single(dev, key->n, key->n_sz, DMA_TO_DEVICE);
452 if (dma_mapping_error(dev, pdb->n_dma)) {
453 dev_err(dev, "Unable to map modulus memory\n");
457 pdb->d_dma = dma_map_single(dev, key->d, key->d_sz, DMA_TO_DEVICE);
458 if (dma_mapping_error(dev, pdb->d_dma)) {
459 dev_err(dev, "Unable to map RSA private exponent memory\n");
460 dma_unmap_single(dev, pdb->n_dma, key->n_sz, DMA_TO_DEVICE);
494 struct device *dev = ctx->dev;
500 pdb->d_dma = dma_map_single(dev, key->d, key->d_sz, DMA_TO_DEVICE);
501 if (dma_mapping_error(dev, pdb->d_dma)) {
502 dev_err(dev, "Unable to map RSA private exponent memory\n");
506 pdb->p_dma = dma_map_single(dev, key->p, p_sz, DMA_TO_DEVICE);
507 if (dma_mapping_error(dev, pdb->p_dma)) {
508 dev_err(dev, "Unable to map RSA prime factor p memory\n");
512 pdb->q_dma = dma_map_single(dev, key->q, q_sz, DMA_TO_DEVICE);
513 if (dma_mapping_error(dev, pdb->q_dma)) {
514 dev_err(dev, "Unable to map RSA prime factor q memory\n");
518 pdb->tmp1_dma = dma_map_single(dev, key->tmp1, p_sz, DMA_BIDIRECTIONAL);
519 if (dma_mapping_error(dev, pdb->tmp1_dma)) {
520 dev_err(dev, "Unable to map RSA tmp1 memory\n");
524 pdb->tmp2_dma = dma_map_single(dev, key->tmp2, q_sz, DMA_BIDIRECTIONAL);
525 if (dma_mapping_error(dev, pdb->tmp2_dma)) {
526 dev_err(dev, "Unable to map RSA tmp2 memory\n");
554 dma_unmap_single(dev, pdb->tmp1_dma, p_sz, DMA_BIDIRECTIONAL);
556 dma_unmap_single(dev, pdb->q_dma, q_sz, DMA_TO_DEVICE);
558 dma_unmap_single(dev, pdb->p_dma, p_sz, DMA_TO_DEVICE);
560 dma_unmap_single(dev, pdb->d_dma, key->d_sz, DMA_TO_DEVICE);
571 struct device *dev = ctx->dev;
577 pdb->p_dma = dma_map_single(dev, key->p, p_sz, DMA_TO_DEVICE);
578 if (dma_mapping_error(dev, pdb->p_dma)) {
579 dev_err(dev, "Unable to map RSA prime factor p memory\n");
583 pdb->q_dma = dma_map_single(dev, key->q, q_sz, DMA_TO_DEVICE);
584 if (dma_mapping_error(dev, pdb->q_dma)) {
585 dev_err(dev, "Unable to map RSA prime factor q memory\n");
589 pdb->dp_dma = dma_map_single(dev, key->dp, p_sz, DMA_TO_DEVICE);
590 if (dma_mapping_error(dev, pdb->dp_dma)) {
591 dev_err(dev, "Unable to map RSA exponent dp memory\n");
595 pdb->dq_dma = dma_map_single(dev, key->dq, q_sz, DMA_TO_DEVICE);
596 if (dma_mapping_error(dev, pdb->dq_dma)) {
597 dev_err(dev, "Unable to map RSA exponent dq memory\n");
601 pdb->c_dma = dma_map_single(dev, key->qinv, p_sz, DMA_TO_DEVICE);
602 if (dma_mapping_error(dev, pdb->c_dma)) {
603 dev_err(dev, "Unable to map RSA CRT coefficient qinv memory\n");
607 pdb->tmp1_dma = dma_map_single(dev, key->tmp1, p_sz, DMA_BIDIRECTIONAL);
608 if (dma_mapping_error(dev, pdb->tmp1_dma)) {
609 dev_err(dev, "Unable to map RSA tmp1 memory\n");
613 pdb->tmp2_dma = dma_map_single(dev, key->tmp2, q_sz, DMA_BIDIRECTIONAL);
614 if (dma_mapping_error(dev, pdb->tmp2_dma)) {
615 dev_err(dev, "Unable to map RSA tmp2 memory\n");
643 dma_unmap_single(dev, pdb->tmp1_dma, p_sz, DMA_BIDIRECTIONAL);
645 dma_unmap_single(dev, pdb->c_dma, p_sz, DMA_TO_DEVICE);
647 dma_unmap_single(dev, pdb->dq_dma, q_sz, DMA_TO_DEVICE);
649 dma_unmap_single(dev, pdb->dp_dma, p_sz, DMA_TO_DEVICE);
651 dma_unmap_single(dev, pdb->q_dma, q_sz, DMA_TO_DEVICE);
653 dma_unmap_single(dev, pdb->p_dma, p_sz, DMA_TO_DEVICE);
710 struct device *jrdev = ctx->dev;
748 struct device *jrdev = ctx->dev;
777 struct device *jrdev = ctx->dev;
806 struct device *jrdev = ctx->dev;
843 dev_err(ctx->dev, "Output buffer length less than parameter n\n");
1100 ctx->dev = caam_jr_alloc();
1102 if (IS_ERR(ctx->dev)) {
1104 return PTR_ERR(ctx->dev);
1107 ctx->padding_dma = dma_map_single(ctx->dev, zero_buffer,
1110 if (dma_mapping_error(ctx->dev, ctx->padding_dma)) {
1111 dev_err(ctx->dev, "unable to map padding\n");
1112 caam_jr_free(ctx->dev);
1127 dma_unmap_single(ctx->dev, ctx->padding_dma, CAAM_RSA_MAX_INPUT_SIZE -
1130 caam_jr_free(ctx->dev);