Lines Matching refs:atdma

33  * at_dma_ / atdma	: ATmel DMA controller entity related
106 struct at_dma *atdma = to_at_dma(chan->device);
109 desc = dma_pool_zalloc(atdma->dma_desc_pool, gfp_flags, &phys);
211 struct at_dma *atdma = to_at_dma(atchan->chan_common.device);
242 dma_writel(atdma, CHER, atchan->mask);
437 struct at_dma *atdma = to_at_dma(atchan->chan_common.device);
468 dma_pool_free(atdma->memset_pool, desc->memset_vaddr,
587 struct at_dma *atdma = (struct at_dma *)dev_id;
594 imr = dma_readl(atdma, EBCIMR);
595 status = dma_readl(atdma, EBCISR);
601 dev_vdbg(atdma->dma_common.dev,
605 for (i = 0; i < atdma->dma_common.chancnt; i++) {
606 atchan = &atdma->chan[i];
610 dma_writel(atdma, CHDR,
885 struct at_dma *atdma = to_at_dma(chan->device);
904 vaddr = dma_pool_alloc(atdma->memset_pool, GFP_NOWAIT, &paddr);
934 dma_pool_free(atdma->memset_pool, vaddr, paddr);
945 struct at_dma *atdma = to_at_dma(chan->device);
962 vaddr = dma_pool_alloc(atdma->memset_pool, GFP_NOWAIT, &paddr);
1341 struct at_dma *atdma = to_at_dma(chan->device);
1349 dma_writel(atdma, CHER, AT_DMA_SUSP(chan_id));
1360 struct at_dma *atdma = to_at_dma(chan->device);
1371 dma_writel(atdma, CHDR, AT_DMA_RES(chan_id));
1382 struct at_dma *atdma = to_at_dma(chan->device);
1397 dma_writel(atdma, CHDR, AT_DMA_RES(chan_id) | atchan->mask);
1400 while (dma_readl(atdma, CHSR) & atchan->mask)
1498 struct at_dma *atdma = to_at_dma(chan->device);
1525 BUG_ON(!atslave->dma_dev || atslave->dma_dev != atdma->dma_common.dev);
1536 dev_err(atdma->dma_common.dev,
1561 struct at_dma *atdma = to_at_dma(chan->device);
1574 dma_pool_free(atdma->dma_desc_pool, desc, desc->txd.phys);
1728 * @atdma: the Atmel HDAMC device
1730 static void at_dma_off(struct at_dma *atdma)
1732 dma_writel(atdma, EN, 0);
1735 dma_writel(atdma, EBCIDR, -1L);
1738 while (dma_readl(atdma, CHSR) & atdma->all_chan_mask)
1745 struct at_dma *atdma;
1776 atdma = kzalloc(size, GFP_KERNEL);
1777 if (!atdma)
1781 atdma->dma_common.cap_mask = plat_dat->cap_mask;
1782 atdma->all_chan_mask = (1 << plat_dat->nr_channels) - 1;
1790 atdma->regs = ioremap(io->start, size);
1791 if (!atdma->regs) {
1796 atdma->clk = clk_get(&pdev->dev, "dma_clk");
1797 if (IS_ERR(atdma->clk)) {
1798 err = PTR_ERR(atdma->clk);
1801 err = clk_prepare_enable(atdma->clk);
1806 at_dma_off(atdma);
1808 err = request_irq(irq, at_dma_interrupt, 0, "at_hdmac", atdma);
1812 platform_set_drvdata(pdev, atdma);
1815 atdma->dma_desc_pool = dma_pool_create("at_hdmac_desc_pool",
1818 if (!atdma->dma_desc_pool) {
1825 atdma->memset_pool = dma_pool_create("at_hdmac_memset_pool",
1827 if (!atdma->memset_pool) {
1834 while (dma_readl(atdma, EBCISR))
1838 INIT_LIST_HEAD(&atdma->dma_common.channels);
1840 struct at_dma_chan *atchan = &atdma->chan[i];
1844 atchan->chan_common.device = &atdma->dma_common;
1847 &atdma->dma_common.channels);
1849 atchan->ch_regs = atdma->regs + ch_regs(i);
1858 atc_enable_chan_irq(atdma, i);
1862 atdma->dma_common.device_alloc_chan_resources = atc_alloc_chan_resources;
1863 atdma->dma_common.device_free_chan_resources = atc_free_chan_resources;
1864 atdma->dma_common.device_tx_status = atc_tx_status;
1865 atdma->dma_common.device_issue_pending = atc_issue_pending;
1866 atdma->dma_common.dev = &pdev->dev;
1869 if (dma_has_cap(DMA_INTERLEAVE, atdma->dma_common.cap_mask))
1870 atdma->dma_common.device_prep_interleaved_dma = atc_prep_dma_interleaved;
1872 if (dma_has_cap(DMA_MEMCPY, atdma->dma_common.cap_mask))
1873 atdma->dma_common.device_prep_dma_memcpy = atc_prep_dma_memcpy;
1875 if (dma_has_cap(DMA_MEMSET, atdma->dma_common.cap_mask)) {
1876 atdma->dma_common.device_prep_dma_memset = atc_prep_dma_memset;
1877 atdma->dma_common.device_prep_dma_memset_sg = atc_prep_dma_memset_sg;
1878 atdma->dma_common.fill_align = DMAENGINE_ALIGN_4_BYTES;
1881 if (dma_has_cap(DMA_SLAVE, atdma->dma_common.cap_mask)) {
1882 atdma->dma_common.device_prep_slave_sg = atc_prep_slave_sg;
1884 dma_cap_set(DMA_CYCLIC, atdma->dma_common.cap_mask);
1885 atdma->dma_common.device_prep_dma_cyclic = atc_prep_dma_cyclic;
1886 atdma->dma_common.device_config = atc_config;
1887 atdma->dma_common.device_pause = atc_pause;
1888 atdma->dma_common.device_resume = atc_resume;
1889 atdma->dma_common.device_terminate_all = atc_terminate_all;
1890 atdma->dma_common.src_addr_widths = ATC_DMA_BUSWIDTHS;
1891 atdma->dma_common.dst_addr_widths = ATC_DMA_BUSWIDTHS;
1892 atdma->dma_common.directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV);
1893 atdma->dma_common.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST;
1896 dma_writel(atdma, EN, AT_DMA_ENABLE);
1899 dma_has_cap(DMA_MEMCPY, atdma->dma_common.cap_mask) ? "cpy " : "",
1900 dma_has_cap(DMA_MEMSET, atdma->dma_common.cap_mask) ? "set " : "",
1901 dma_has_cap(DMA_SLAVE, atdma->dma_common.cap_mask) ? "slave " : "",
1904 err = dma_async_device_register(&atdma->dma_common);
1917 at_dma_xlate, atdma);
1927 dma_async_device_unregister(&atdma->dma_common);
1929 dma_pool_destroy(atdma->memset_pool);
1931 dma_pool_destroy(atdma->dma_desc_pool);
1933 free_irq(platform_get_irq(pdev, 0), atdma);
1935 clk_disable_unprepare(atdma->clk);
1937 clk_put(atdma->clk);
1939 iounmap(atdma->regs);
1940 atdma->regs = NULL;
1944 kfree(atdma);
1950 struct at_dma *atdma = platform_get_drvdata(pdev);
1954 at_dma_off(atdma);
1957 dma_async_device_unregister(&atdma->dma_common);
1959 dma_pool_destroy(atdma->memset_pool);
1960 dma_pool_destroy(atdma->dma_desc_pool);
1961 free_irq(platform_get_irq(pdev, 0), atdma);
1963 list_for_each_entry_safe(chan, _chan, &atdma->dma_common.channels,
1968 atc_disable_chan_irq(atdma, chan->chan_id);
1974 clk_disable_unprepare(atdma->clk);
1975 clk_put(atdma->clk);
1977 iounmap(atdma->regs);
1978 atdma->regs = NULL;
1983 kfree(atdma);
1990 struct at_dma *atdma = platform_get_drvdata(pdev);
1993 clk_disable_unprepare(atdma->clk);
1998 struct at_dma *atdma = dev_get_drvdata(dev);
2001 list_for_each_entry_safe(chan, _chan, &atdma->dma_common.channels,
2032 struct at_dma *atdma = dev_get_drvdata(dev);
2036 list_for_each_entry_safe(chan, _chan, &atdma->dma_common.channels,
2044 atdma->save_imr = dma_readl(atdma, EBCIMR);
2047 at_dma_off(atdma);
2048 clk_disable_unprepare(atdma->clk);
2054 struct at_dma *atdma = to_at_dma(atchan->chan_common.device);
2063 dma_writel(atdma, CHER, atchan->mask);
2073 struct at_dma *atdma = dev_get_drvdata(dev);
2077 clk_prepare_enable(atdma->clk);
2078 dma_writel(atdma, EN, AT_DMA_ENABLE);
2081 while (dma_readl(atdma, EBCISR))
2085 dma_writel(atdma, EBCIER, atdma->save_imr);
2086 list_for_each_entry_safe(chan, _chan, &atdma->dma_common.channels,