Lines Matching defs:acry_dev
113 struct aspeed_acry_dev *acry_dev;
130 struct aspeed_acry_dev *acry_dev;
172 static int aspeed_acry_handle_queue(struct aspeed_acry_dev *acry_dev,
176 ACRY_DBG(acry_dev, "SW fallback\n");
180 return crypto_transfer_akcipher_request_to_engine(acry_dev->crypt_engine_rsa, req);
188 struct aspeed_acry_dev *acry_dev = ctx->acry_dev;
190 acry_dev->req = req;
191 acry_dev->flags |= CRYPTO_FLAGS_BUSY;
193 return ctx->trigger(acry_dev);
196 static int aspeed_acry_complete(struct aspeed_acry_dev *acry_dev, int err)
198 struct akcipher_request *req = acry_dev->req;
200 acry_dev->flags &= ~CRYPTO_FLAGS_BUSY;
202 crypto_finalize_akcipher_request(acry_dev->crypt_engine_rsa, req, err);
210 static void aspeed_acry_rsa_sg_copy_to_buffer(struct aspeed_acry_dev *acry_dev,
218 ACRY_DBG(acry_dev, "\n");
223 data_idx = acry_dev->data_byte_mapping[i];
229 data_idx = acry_dev->data_byte_mapping[i];
247 static int aspeed_acry_rsa_ctx_copy(struct aspeed_acry_dev *acry_dev, void *buf,
257 ACRY_DBG(acry_dev, "nbytes:%zu, mode:%d\n", nbytes, mode);
288 idx = acry_dev->exp_dw_mapping[j - 1];
290 idx = acry_dev->mod_dw_mapping[j - 1];
299 static int aspeed_acry_rsa_transfer(struct aspeed_acry_dev *acry_dev)
301 struct akcipher_request *req = acry_dev->req;
302 u8 __iomem *sram_buffer = acry_dev->acry_sram;
311 ast_acry_write(acry_dev, ACRY_CMD_DMEM_AHB, ASPEED_ACRY_DMA_CMD);
314 regmap_update_bits(acry_dev->ahbc, AHBC_REGION_PROT,
320 data_idx = acry_dev->data_byte_mapping[j];
330 ACRY_DBG(acry_dev, "result_nbytes:%d, req->dst_len:%d\n",
339 dev_err(acry_dev->dev, "RSA engine error!\n");
342 memzero_explicit(acry_dev->buf_addr, ASPEED_ACRY_BUFF_SIZE);
344 return aspeed_acry_complete(acry_dev, 0);
347 static int aspeed_acry_rsa_trigger(struct aspeed_acry_dev *acry_dev)
349 struct akcipher_request *req = acry_dev->req;
355 dev_err(acry_dev->dev, "%s: key n is not set\n", __func__);
359 memzero_explicit(acry_dev->buf_addr, ASPEED_ACRY_BUFF_SIZE);
362 aspeed_acry_rsa_sg_copy_to_buffer(acry_dev, acry_dev->buf_addr,
365 nm = aspeed_acry_rsa_ctx_copy(acry_dev, acry_dev->buf_addr, ctx->n,
369 dev_err(acry_dev->dev, "%s: key e is not set\n",
374 ne = aspeed_acry_rsa_ctx_copy(acry_dev, acry_dev->buf_addr,
379 dev_err(acry_dev->dev, "%s: key d is not set\n",
384 ne = aspeed_acry_rsa_ctx_copy(acry_dev, acry_dev->buf_addr,
389 ast_acry_write(acry_dev, acry_dev->buf_dma_addr,
391 ast_acry_write(acry_dev, (ne << 16) + nm,
393 ast_acry_write(acry_dev, ASPEED_ACRY_BUFF_SIZE,
396 acry_dev->resume = aspeed_acry_rsa_transfer;
399 regmap_update_bits(acry_dev->ahbc, AHBC_REGION_PROT,
402 ast_acry_write(acry_dev, ACRY_RSA_ISR, ASPEED_ACRY_INT_MASK);
403 ast_acry_write(acry_dev, ACRY_CMD_DMA_SRAM_MODE_RSA |
407 ast_acry_write(acry_dev, ACRY_CMD_RSA_TRIGGER |
417 struct aspeed_acry_dev *acry_dev = ctx->acry_dev;
422 return aspeed_acry_handle_queue(acry_dev, req);
429 struct aspeed_acry_dev *acry_dev = ctx->acry_dev;
434 return aspeed_acry_handle_queue(acry_dev, req);
489 struct aspeed_acry_dev *acry_dev = ctx->acry_dev;
498 dev_err(acry_dev->dev, "rsa parse key failed, ret:0x%x\n",
526 dev_err(acry_dev->dev, "rsa set key failed\n");
579 ctx->acry_dev = acry_alg->acry_dev;
584 dev_err(ctx->acry_dev->dev, "ERROR: Cannot allocate fallback for %s %ld\n",
629 static void aspeed_acry_register(struct aspeed_acry_dev *acry_dev)
634 aspeed_acry_akcipher_algs[i].acry_dev = acry_dev;
637 ACRY_DBG(acry_dev, "Failed to register %s\n",
643 static void aspeed_acry_unregister(struct aspeed_acry_dev *acry_dev)
654 struct aspeed_acry_dev *acry_dev = (struct aspeed_acry_dev *)dev;
657 sts = ast_acry_read(acry_dev, ASPEED_ACRY_STATUS);
658 ast_acry_write(acry_dev, sts, ASPEED_ACRY_STATUS);
660 ACRY_DBG(acry_dev, "irq sts:0x%x\n", sts);
664 ast_acry_write(acry_dev, 0, ASPEED_ACRY_TRIGGER);
666 if (acry_dev->flags & CRYPTO_FLAGS_BUSY)
667 tasklet_schedule(&acry_dev->done_task);
669 dev_err(acry_dev->dev, "RSA no active requests.\n");
679 static void aspeed_acry_sram_mapping(struct aspeed_acry_dev *acry_dev)
684 acry_dev->exp_dw_mapping[i] = j;
685 acry_dev->mod_dw_mapping[i] = j + 4;
686 acry_dev->data_byte_mapping[(i * 4)] = (j + 8) * 4;
687 acry_dev->data_byte_mapping[(i * 4) + 1] = (j + 8) * 4 + 1;
688 acry_dev->data_byte_mapping[(i * 4) + 2] = (j + 8) * 4 + 2;
689 acry_dev->data_byte_mapping[(i * 4) + 3] = (j + 8) * 4 + 3;
697 struct aspeed_acry_dev *acry_dev = (struct aspeed_acry_dev *)data;
699 (void)acry_dev->resume(acry_dev);
709 struct aspeed_acry_dev *acry_dev;
713 acry_dev = devm_kzalloc(dev, sizeof(struct aspeed_acry_dev),
715 if (!acry_dev)
718 acry_dev->dev = dev;
720 platform_set_drvdata(pdev, acry_dev);
722 acry_dev->regs = devm_platform_ioremap_resource(pdev, 0);
723 if (IS_ERR(acry_dev->regs))
724 return PTR_ERR(acry_dev->regs);
726 acry_dev->acry_sram = devm_platform_ioremap_resource(pdev, 1);
727 if (IS_ERR(acry_dev->acry_sram))
728 return PTR_ERR(acry_dev->acry_sram);
731 acry_dev->irq = platform_get_irq(pdev, 0);
732 if (acry_dev->irq < 0)
735 rc = devm_request_irq(dev, acry_dev->irq, aspeed_acry_irq, 0,
736 dev_name(dev), acry_dev);
742 acry_dev->clk = devm_clk_get_enabled(dev, NULL);
743 if (IS_ERR(acry_dev->clk)) {
745 return PTR_ERR(acry_dev->clk);
748 acry_dev->ahbc = syscon_regmap_lookup_by_phandle(dev->of_node,
750 if (IS_ERR(acry_dev->ahbc)) {
756 acry_dev->crypt_engine_rsa = crypto_engine_alloc_init(dev, true);
757 if (!acry_dev->crypt_engine_rsa) {
762 rc = crypto_engine_start(acry_dev->crypt_engine_rsa);
766 tasklet_init(&acry_dev->done_task, aspeed_acry_done_task,
767 (unsigned long)acry_dev);
770 ast_acry_write(acry_dev, ACRY_CMD_DMEM_AHB, ASPEED_ACRY_DMA_CMD);
773 aspeed_acry_sram_mapping(acry_dev);
775 acry_dev->buf_addr = dmam_alloc_coherent(dev, ASPEED_ACRY_BUFF_SIZE,
776 &acry_dev->buf_dma_addr,
778 if (!acry_dev->buf_addr) {
783 aspeed_acry_register(acry_dev);
790 crypto_engine_exit(acry_dev->crypt_engine_rsa);
792 clk_disable_unprepare(acry_dev->clk);
799 struct aspeed_acry_dev *acry_dev = platform_get_drvdata(pdev);
801 aspeed_acry_unregister(acry_dev);
802 crypto_engine_exit(acry_dev->crypt_engine_rsa);
803 tasklet_kill(&acry_dev->done_task);
804 clk_disable_unprepare(acry_dev->clk);