Lines Matching refs:dmadev

309 static u32 stm32_mdma_read(struct stm32_mdma_device *dmadev, u32 reg)
311 return readl_relaxed(dmadev->base + reg);
314 static void stm32_mdma_write(struct stm32_mdma_device *dmadev, u32 reg, u32 val)
316 writel_relaxed(val, dmadev->base + reg);
319 static void stm32_mdma_set_bits(struct stm32_mdma_device *dmadev, u32 reg,
322 void __iomem *addr = dmadev->base + reg;
327 static void stm32_mdma_clr_bits(struct stm32_mdma_device *dmadev, u32 reg,
330 void __iomem *addr = dmadev->base + reg;
427 struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
435 stm32_mdma_clr_bits(dmadev, reg, STM32_MDMA_CCR_IRQ_MASK);
437 ccr = stm32_mdma_read(dmadev, reg);
439 stm32_mdma_clr_bits(dmadev, reg, STM32_MDMA_CCR_EN);
443 dmadev->base + STM32_MDMA_CISR(id), cisr,
456 struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
466 status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(chan->id));
470 stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(chan->id), status);
476 static void stm32_mdma_set_bus(struct stm32_mdma_device *dmadev, u32 *ctbr,
485 for (i = 0; i < dmadev->nr_ahb_addr_masks; i++) {
486 if (mask == dmadev->ahb_addr_masks[i]) {
499 struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
512 ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id)) & ~STM32_MDMA_CCR_EN;
513 ctcr = stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id));
514 ctbr = stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id));
603 stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS,
610 stm32_mdma_write(dmadev, STM32_MDMA_CDAR(chan->id), dst_addr);
650 stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS,
657 stm32_mdma_write(dmadev, STM32_MDMA_CSAR(chan->id), src_addr);
729 struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
748 stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS,
756 stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS,
825 struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
864 stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS,
870 stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS,
914 struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
940 ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id)) & ~STM32_MDMA_CCR_EN;
941 ctcr = stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id));
942 ctbr = stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id));
943 cbndtr = stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id));
966 stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS, src);
967 stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS, dest);
1090 struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1093 stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id)));
1095 stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id)));
1097 stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id)));
1099 stm32_mdma_read(dmadev, STM32_MDMA_CSAR(chan->id)));
1101 stm32_mdma_read(dmadev, STM32_MDMA_CDAR(chan->id)));
1103 stm32_mdma_read(dmadev, STM32_MDMA_CBRUR(chan->id)));
1105 stm32_mdma_read(dmadev, STM32_MDMA_CLAR(chan->id)));
1107 stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id)));
1109 stm32_mdma_read(dmadev, STM32_MDMA_CMAR(chan->id)));
1111 stm32_mdma_read(dmadev, STM32_MDMA_CMDR(chan->id)));
1116 struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1134 stm32_mdma_write(dmadev, STM32_MDMA_CCR(id), chan->desc->ccr);
1135 stm32_mdma_write(dmadev, STM32_MDMA_CTCR(id), hwdesc->ctcr);
1136 stm32_mdma_write(dmadev, STM32_MDMA_CBNDTR(id), hwdesc->cbndtr);
1137 stm32_mdma_write(dmadev, STM32_MDMA_CSAR(id), hwdesc->csar);
1138 stm32_mdma_write(dmadev, STM32_MDMA_CDAR(id), hwdesc->cdar);
1139 stm32_mdma_write(dmadev, STM32_MDMA_CBRUR(id), hwdesc->cbrur);
1140 stm32_mdma_write(dmadev, STM32_MDMA_CLAR(id), hwdesc->clar);
1141 stm32_mdma_write(dmadev, STM32_MDMA_CTBR(id), hwdesc->ctbr);
1142 stm32_mdma_write(dmadev, STM32_MDMA_CMAR(id), hwdesc->cmar);
1143 stm32_mdma_write(dmadev, STM32_MDMA_CMDR(id), hwdesc->cmdr);
1146 status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(id));
1148 stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(id), status);
1153 stm32_mdma_set_bits(dmadev, STM32_MDMA_CCR(id), STM32_MDMA_CCR_EN);
1158 stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CCR_SWRQ);
1204 struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1210 if (!chan->desc || (stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id)) & STM32_MDMA_CCR_EN))
1218 stm32_mdma_write(dmadev, STM32_MDMA_CCR(chan->id), chan->desc->ccr);
1221 status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(chan->id));
1223 stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(chan->id), status);
1229 stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CCR_EN);
1233 stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CCR_SWRQ);
1284 struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1294 cbndtr = stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id));
1350 struct stm32_mdma_device *dmadev = devid;
1355 status = readl_relaxed(dmadev->base + STM32_MDMA_GISR0);
1357 dev_dbg(mdma2dev(dmadev), "spurious it\n");
1362 chan = &dmadev->chan[id];
1364 dev_warn(mdma2dev(dmadev), "MDMA channel not initialized\n");
1370 status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(id));
1373 ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(id));
1388 readl_relaxed(dmadev->base + STM32_MDMA_CESR(id)));
1389 stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CTEIF);
1394 stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CCTCIF);
1400 stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CBRTIF);
1405 stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CBTIF);
1416 stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CLTCIF);
1421 stm32_mdma_set_bits(dmadev, reg, status);
1435 struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1448 ret = pm_runtime_resume_and_get(dmadev->ddev.dev);
1454 pm_runtime_put(dmadev->ddev.dev);
1462 struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1474 pm_runtime_put(dmadev->ddev.dev);
1483 struct stm32_mdma_device *dmadev = ofdma->of_dma_data;
1489 dev_err(mdma2dev(dmadev), "Bad number of args\n");
1499 if (config.request >= dmadev->nr_requests) {
1500 dev_err(mdma2dev(dmadev), "Bad request line\n");
1505 dev_err(mdma2dev(dmadev), "Priority level not supported\n");
1509 c = dma_get_any_slave_channel(&dmadev->ddev);
1511 dev_err(mdma2dev(dmadev), "No more channels available\n");
1530 struct stm32_mdma_device *dmadev;
1562 dmadev = devm_kzalloc(&pdev->dev, sizeof(*dmadev) + sizeof(u32) * count,
1564 if (!dmadev)
1567 dmadev->nr_channels = nr_channels;
1568 dmadev->nr_requests = nr_requests;
1570 dmadev->ahb_addr_masks,
1572 dmadev->nr_ahb_addr_masks = count;
1575 dmadev->base = devm_ioremap_resource(&pdev->dev, res);
1576 if (IS_ERR(dmadev->base))
1577 return PTR_ERR(dmadev->base);
1579 dmadev->clk = devm_clk_get(&pdev->dev, NULL);
1580 if (IS_ERR(dmadev->clk))
1581 return dev_err_probe(&pdev->dev, PTR_ERR(dmadev->clk),
1584 ret = clk_prepare_enable(dmadev->clk);
1601 dd = &dmadev->ddev;
1635 for (i = 0; i < dmadev->nr_channels; i++) {
1636 chan = &dmadev->chan[i];
1642 dmadev->irq = platform_get_irq(pdev, 0);
1643 if (dmadev->irq < 0) {
1644 ret = dmadev->irq;
1648 ret = devm_request_irq(&pdev->dev, dmadev->irq, stm32_mdma_irq_handler,
1649 0, dev_name(&pdev->dev), dmadev);
1659 ret = of_dma_controller_register(of_node, stm32_mdma_of_xlate, dmadev);
1666 platform_set_drvdata(pdev, dmadev);
1677 clk_disable_unprepare(dmadev->clk);
1685 struct stm32_mdma_device *dmadev = dev_get_drvdata(dev);
1687 clk_disable_unprepare(dmadev->clk);
1694 struct stm32_mdma_device *dmadev = dev_get_drvdata(dev);
1697 ret = clk_prepare_enable(dmadev->clk);
1710 struct stm32_mdma_device *dmadev = dev_get_drvdata(dev);
1718 for (id = 0; id < dmadev->nr_channels; id++) {
1719 ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(id));