Lines Matching defs:tdma
189 struct tegra_dma *tdma;
231 static inline void tdma_write(struct tegra_dma *tdma, u32 reg, u32 val)
233 writel(val, tdma->base_addr + reg);
236 static inline u32 tdma_read(struct tegra_dma *tdma, u32 reg)
238 return readl(tdma->base_addr + reg);
360 struct tegra_dma *tdma = tdc->tdma;
362 spin_lock(&tdma->global_lock);
364 if (tdc->tdma->global_pause_count == 0) {
365 tdma_write(tdma, TEGRA_APBDMA_GENERAL, 0);
370 tdc->tdma->global_pause_count++;
372 spin_unlock(&tdma->global_lock);
377 struct tegra_dma *tdma = tdc->tdma;
379 spin_lock(&tdma->global_lock);
381 if (WARN_ON(tdc->tdma->global_pause_count == 0))
384 if (--tdc->tdma->global_pause_count == 0)
385 tdma_write(tdma, TEGRA_APBDMA_GENERAL,
389 spin_unlock(&tdma->global_lock);
395 struct tegra_dma *tdma = tdc->tdma;
397 if (tdma->chip_data->support_channel_pause) {
409 struct tegra_dma *tdma = tdc->tdma;
411 if (tdma->chip_data->support_channel_pause)
449 if (tdc->tdma->chip_data->support_separate_wcount_reg)
490 if (tdc->tdma->chip_data->support_separate_wcount_reg)
569 pm_runtime_put(tdc->tdma->dev);
609 pm_runtime_put(tdc->tdma->dev);
726 err = pm_runtime_resume_and_get(tdc->tdma->dev);
771 if (tdc->tdma->chip_data->support_separate_wcount_reg)
787 pm_runtime_put(tdc->tdma->dev);
821 err = pm_runtime_resume_and_get(tdc->tdma->dev);
836 pm_runtime_put(tdc->tdma->dev);
847 if (tdc->tdma->chip_data->support_separate_wcount_reg)
852 if (!tdc->tdma->chip_data->support_separate_wcount_reg)
1040 if (tdc->tdma->chip_data->support_separate_wcount_reg)
1120 len > tdc->tdma->chip_data->max_dma_count) {
1220 len > tdc->tdma->chip_data->max_dma_count) {
1360 struct tegra_dma *tdma = ofdma->of_dma_data;
1365 dev_err(tdma->dev, "Invalid slave id: %d\n", dma_spec->args[0]);
1369 chan = dma_get_any_slave_channel(&tdma->dma_dev);
1415 static int tegra_dma_init_hw(struct tegra_dma *tdma)
1419 err = reset_control_assert(tdma->rst);
1421 dev_err(tdma->dev, "failed to assert reset: %d\n", err);
1425 err = clk_enable(tdma->dma_clk);
1427 dev_err(tdma->dev, "failed to enable clk: %d\n", err);
1433 reset_control_deassert(tdma->rst);
1436 tdma_write(tdma, TEGRA_APBDMA_GENERAL, TEGRA_APBDMA_GENERAL_ENABLE);
1437 tdma_write(tdma, TEGRA_APBDMA_CONTROL, 0);
1438 tdma_write(tdma, TEGRA_APBDMA_IRQ_MASK_SET, 0xFFFFFFFF);
1440 clk_disable(tdma->dma_clk);
1448 struct tegra_dma *tdma;
1454 size = struct_size(tdma, channels, cdata->nr_channels);
1456 tdma = devm_kzalloc(&pdev->dev, size, GFP_KERNEL);
1457 if (!tdma)
1460 tdma->dev = &pdev->dev;
1461 tdma->chip_data = cdata;
1462 platform_set_drvdata(pdev, tdma);
1464 tdma->base_addr = devm_platform_ioremap_resource(pdev, 0);
1465 if (IS_ERR(tdma->base_addr))
1466 return PTR_ERR(tdma->base_addr);
1468 tdma->dma_clk = devm_clk_get(&pdev->dev, NULL);
1469 if (IS_ERR(tdma->dma_clk)) {
1471 return PTR_ERR(tdma->dma_clk);
1474 tdma->rst = devm_reset_control_get(&pdev->dev, "dma");
1475 if (IS_ERR(tdma->rst)) {
1477 return PTR_ERR(tdma->rst);
1480 spin_lock_init(&tdma->global_lock);
1482 ret = clk_prepare(tdma->dma_clk);
1486 ret = tegra_dma_init_hw(tdma);
1493 INIT_LIST_HEAD(&tdma->dma_dev.channels);
1495 struct tegra_dma_channel *tdc = &tdma->channels[i];
1498 tdc->chan_addr = tdma->base_addr +
1518 tdc->dma_chan.device = &tdma->dma_dev;
1521 &tdma->dma_dev.channels);
1522 tdc->tdma = tdma;
1536 dma_cap_set(DMA_SLAVE, tdma->dma_dev.cap_mask);
1537 dma_cap_set(DMA_PRIVATE, tdma->dma_dev.cap_mask);
1538 dma_cap_set(DMA_CYCLIC, tdma->dma_dev.cap_mask);
1540 tdma->global_pause_count = 0;
1541 tdma->dma_dev.dev = &pdev->dev;
1542 tdma->dma_dev.device_alloc_chan_resources =
1544 tdma->dma_dev.device_free_chan_resources =
1546 tdma->dma_dev.device_prep_slave_sg = tegra_dma_prep_slave_sg;
1547 tdma->dma_dev.device_prep_dma_cyclic = tegra_dma_prep_dma_cyclic;
1548 tdma->dma_dev.src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) |
1552 tdma->dma_dev.dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) |
1556 tdma->dma_dev.directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV);
1557 tdma->dma_dev.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST;
1558 tdma->dma_dev.device_config = tegra_dma_slave_config;
1559 tdma->dma_dev.device_terminate_all = tegra_dma_terminate_all;
1560 tdma->dma_dev.device_synchronize = tegra_dma_synchronize;
1561 tdma->dma_dev.device_tx_status = tegra_dma_tx_status;
1562 tdma->dma_dev.device_issue_pending = tegra_dma_issue_pending;
1564 ret = dma_async_device_register(&tdma->dma_dev);
1572 tegra_dma_of_xlate, tdma);
1585 dma_async_device_unregister(&tdma->dma_dev);
1591 clk_unprepare(tdma->dma_clk);
1598 struct tegra_dma *tdma = platform_get_drvdata(pdev);
1601 dma_async_device_unregister(&tdma->dma_dev);
1603 clk_unprepare(tdma->dma_clk);
1610 struct tegra_dma *tdma = dev_get_drvdata(dev);
1612 clk_disable(tdma->dma_clk);
1619 struct tegra_dma *tdma = dev_get_drvdata(dev);
1621 return clk_enable(tdma->dma_clk);
1626 struct tegra_dma *tdma = dev_get_drvdata(dev);
1631 for (i = 0; i < tdma->chip_data->nr_channels; i++) {
1632 struct tegra_dma_channel *tdc = &tdma->channels[i];
1641 dev_err(tdma->dev, "channel %u busy\n", i);
1651 struct tegra_dma *tdma = dev_get_drvdata(dev);
1654 err = tegra_dma_init_hw(tdma);