Lines Matching defs:ebu_host

144 	struct ebu_nand_controller *ebu_host = nand_get_controller_data(chip);
145 u8 cs_num = ebu_host->cs_num;
148 val = readb(ebu_host->cs[cs_num].chipaddr + HSNAND_CS_OFFS);
155 struct ebu_nand_controller *ebu_host = nand_get_controller_data(chip);
156 u8 cs_num = ebu_host->cs_num;
158 writeb(value, ebu_host->cs[cs_num].chipaddr + offset);
180 struct ebu_nand_controller *ebu_host = nand_get_controller_data(chip);
182 writel(0, ebu_host->ebu + EBU_CON);
187 struct ebu_nand_controller *ebu_host = nand_get_controller_data(chip);
188 void __iomem *nand_con = ebu_host->ebu + EBU_CON;
189 u32 cs = ebu_host->cs_num;
271 struct ebu_nand_controller *ebu_host = cookie;
273 dmaengine_terminate_async(ebu_host->dma_rx);
275 complete(&ebu_host->dma_access_complete);
280 struct ebu_nand_controller *ebu_host = cookie;
282 dmaengine_terminate_async(ebu_host->dma_tx);
284 complete(&ebu_host->dma_access_complete);
287 static int ebu_dma_start(struct ebu_nand_controller *ebu_host, u32 dir,
301 chan = ebu_host->dma_rx;
302 dma_completion = &ebu_host->dma_access_complete;
305 chan = ebu_host->dma_tx;
306 dma_completion = &ebu_host->dma_access_complete;
312 dev_err(ebu_host->dev, "Failed to map DMA buffer\n");
324 tx->callback_param = ebu_host;
329 dev_err(ebu_host->dev, "dma_submit_error %d\n", cookie);
340 dev_err(ebu_host->dev, "I/O Error in DMA RX (status %d)\n",
350 dma_unmap_single(ebu_host->dev, buf_dma, len, dir);
355 static void ebu_nand_trigger(struct ebu_nand_controller *ebu_host,
361 writel(val, ebu_host->hsnand + HSNAND_CTL1);
363 writel(val, ebu_host->hsnand + HSNAND_CTL2);
365 writel(ebu_host->nd_para0, ebu_host->hsnand + HSNAND_PARA0);
368 writel(0xFFFFFFFF, ebu_host->hsnand + HSNAND_CMSG_0);
369 writel(0xFFFFFFFF, ebu_host->hsnand + HSNAND_CMSG_1);
372 ebu_host->hsnand + HSNAND_INT_MSK_CTL);
380 HSNAND_CTL_ECC_OFF_V8TH | HSNAND_CTL_CE_SEL_CS(ebu_host->cs_num) |
382 ebu_host->hsnand + HSNAND_CTL);
389 struct ebu_nand_controller *ebu_host = nand_get_controller_data(chip);
392 ebu_nand_trigger(ebu_host, page, NAND_CMD_READ0);
394 ret = ebu_dma_start(ebu_host, DMA_DEV_TO_MEM, buf, mtd->writesize);
401 reg_data = readl(ebu_host->hsnand + HSNAND_CTL);
403 writel(reg_data, ebu_host->hsnand + HSNAND_CTL);
412 struct ebu_nand_controller *ebu_host = nand_get_controller_data(chip);
413 void __iomem *int_sta = ebu_host->hsnand + HSNAND_INT_STA;
417 ebu_nand_trigger(ebu_host, page, NAND_CMD_SEQIN);
419 ret = ebu_dma_start(ebu_host, DMA_MEM_TO_DEV, buf, mtd->writesize);
425 writel(reg, ebu_host->hsnand + HSNAND_CMSG_0);
428 writel(reg, ebu_host->hsnand + HSNAND_CMSG_1);
436 reg_data = readl(ebu_host->hsnand + HSNAND_CTL);
438 writel(reg_data, ebu_host->hsnand + HSNAND_CTL);
448 struct ebu_nand_controller *ebu_host = nand_get_controller_data(chip);
503 ebu_host->nd_para0 = pagesize | pg_per_blk | HSNAND_PARA0_BYP_EN_NP |
567 static void ebu_dma_cleanup(struct ebu_nand_controller *ebu_host)
569 if (ebu_host->dma_rx)
570 dma_release_channel(ebu_host->dma_rx);
572 if (ebu_host->dma_tx)
573 dma_release_channel(ebu_host->dma_tx);
579 struct ebu_nand_controller *ebu_host;
588 ebu_host = devm_kzalloc(dev, sizeof(*ebu_host), GFP_KERNEL);
589 if (!ebu_host)
592 ebu_host->dev = dev;
593 nand_controller_init(&ebu_host->controller);
595 ebu_host->ebu = devm_platform_ioremap_resource_byname(pdev, "ebunand");
596 if (IS_ERR(ebu_host->ebu))
597 return PTR_ERR(ebu_host->ebu);
599 ebu_host->hsnand = devm_platform_ioremap_resource_byname(pdev, "hsnand");
600 if (IS_ERR(ebu_host->hsnand))
601 return PTR_ERR(ebu_host->hsnand);
619 ebu_host->cs_num = cs;
627 ebu_host->cs[cs].chipaddr = devm_platform_ioremap_resource_byname(pdev,
629 if (IS_ERR(ebu_host->cs[cs].chipaddr)) {
630 ret = PTR_ERR(ebu_host->cs[cs].chipaddr);
634 ebu_host->clk = devm_clk_get_enabled(dev, NULL);
635 if (IS_ERR(ebu_host->clk)) {
636 ret = dev_err_probe(dev, PTR_ERR(ebu_host->clk),
641 ebu_host->dma_tx = dma_request_chan(dev, "tx");
642 if (IS_ERR(ebu_host->dma_tx)) {
643 ret = dev_err_probe(dev, PTR_ERR(ebu_host->dma_tx),
648 ebu_host->dma_rx = dma_request_chan(dev, "rx");
649 if (IS_ERR(ebu_host->dma_rx)) {
650 ret = dev_err_probe(dev, PTR_ERR(ebu_host->dma_rx),
652 ebu_host->dma_rx = NULL;
667 ebu_host->cs[cs].addr_sel = res->start;
668 writel(ebu_host->cs[cs].addr_sel | EBU_ADDR_MASK(5) | EBU_ADDR_SEL_REGEN,
669 ebu_host->ebu + EBU_ADDR_SEL(cs));
671 nand_set_flash_node(&ebu_host->chip, chip_np);
673 mtd = nand_to_mtd(&ebu_host->chip);
675 dev_err(ebu_host->dev, "NAND label property is mandatory\n");
681 ebu_host->dev = dev;
683 platform_set_drvdata(pdev, ebu_host);
684 nand_set_controller_data(&ebu_host->chip, ebu_host);
686 nand = &ebu_host->chip;
687 nand->controller = &ebu_host->controller;
691 ret = nand_scan(&ebu_host->chip, 1);
702 nand_cleanup(&ebu_host->chip);
704 ebu_dma_cleanup(ebu_host);
713 struct ebu_nand_controller *ebu_host = platform_get_drvdata(pdev);
716 ret = mtd_device_unregister(nand_to_mtd(&ebu_host->chip));
718 nand_cleanup(&ebu_host->chip);
719 ebu_nand_disable(&ebu_host->chip);
720 ebu_dma_cleanup(ebu_host);