Lines Matching defs:atdma
36 * at_dma_ / atdma : ATmel DMA controller entity related
209 * struct atdma_sg - atdma scatter gather entry
260 * @atdma: pointer to the driver data.
276 struct at_dma *atdma;
353 #define dma_readl(atdma, name) \
354 __raw_readl((atdma)->regs + AT_DMA_##name)
355 #define dma_writel(atdma, name, val) \
356 __raw_writel((val), (atdma)->regs + AT_DMA_##name)
384 struct at_dma *atdma = to_at_dma(atchan->vc.chan.device);
389 dma_readl(atdma, EBCIMR),
390 dma_readl(atdma, CHSR));
414 static void atc_setup_irq(struct at_dma *atdma, int chan_id, int on)
422 dma_writel(atdma, EBCIER, ebci);
424 dma_writel(atdma, EBCIDR, ebci);
427 static void atc_enable_chan_irq(struct at_dma *atdma, int chan_id)
429 atc_setup_irq(atdma, chan_id, 1);
432 static void atc_disable_chan_irq(struct at_dma *atdma, int chan_id)
434 atc_setup_irq(atdma, chan_id, 0);
444 struct at_dma *atdma = to_at_dma(atchan->vc.chan.device);
446 return !!(dma_readl(atdma, CHSR) & atchan->mask);
579 dma_writel(atchan->atdma, CHER, atchan->mask);
586 struct at_dma *atdma = to_at_dma(vd->tx.chan->device);
592 dma_pool_free(atdma->lli_pool, desc->sg[i].lli,
598 dma_pool_free(atdma->memset_pool, desc->memset_vaddr,
774 dma_writel(atchan->atdma, CHDR, AT_DMA_RES(i) | atchan->mask);
818 struct at_dma *atdma = dev_id;
825 imr = dma_readl(atdma, EBCIMR);
826 status = dma_readl(atdma, EBCISR);
832 dev_vdbg(atdma->dma_device.dev,
836 for (i = 0; i < atdma->dma_device.chancnt; i++) {
837 atchan = &atdma->chan[i];
861 struct at_dma *atdma = to_at_dma(chan->device);
928 atdma_sg->lli = dma_pool_alloc(atdma->lli_pool, GFP_NOWAIT,
964 struct at_dma *atdma = to_at_dma(chan->device);
1009 atdma_sg->lli = dma_pool_alloc(atdma->lli_pool, GFP_NOWAIT,
1044 struct at_dma *atdma = to_at_dma(chan->device);
1059 atdma_sg->lli = dma_pool_alloc(atdma->lli_pool, GFP_NOWAIT,
1088 struct at_dma *atdma = to_at_dma(chan->device);
1109 vaddr = dma_pool_alloc(atdma->memset_pool, GFP_NOWAIT, &paddr);
1147 dma_pool_free(atdma->memset_pool, vaddr, paddr);
1158 struct at_dma *atdma = to_at_dma(chan->device);
1176 vaddr = dma_pool_alloc(atdma->memset_pool, GFP_NOWAIT, &paddr);
1225 dma_pool_free(atdma->memset_pool, vaddr, paddr);
1243 struct at_dma *atdma = to_at_dma(chan->device);
1293 atdma_sg->lli = dma_pool_alloc(atdma->lli_pool,
1342 atdma_sg->lli = dma_pool_alloc(atdma->lli_pool,
1421 struct at_dma *atdma = to_at_dma(chan->device);
1427 atdma_sg->lli = dma_pool_alloc(atdma->lli_pool, GFP_ATOMIC,
1571 struct at_dma *atdma = to_at_dma(chan->device);
1579 dma_writel(atdma, CHER, AT_DMA_SUSP(chan_id));
1590 struct at_dma *atdma = to_at_dma(chan->device);
1601 dma_writel(atdma, CHDR, AT_DMA_RES(chan_id));
1612 struct at_dma *atdma = to_at_dma(chan->device);
1629 dma_writel(atdma, CHDR, AT_DMA_RES(chan_id) | atchan->mask);
1632 while (dma_readl(atdma, CHSR) & atchan->mask)
1718 struct at_dma *atdma = to_at_dma(chan->device);
1738 BUG_ON(!atslave->dma_dev || atslave->dma_dev != atdma->dma_device.dev);
1914 * @atdma: the Atmel HDAMC device
1916 static void at_dma_off(struct at_dma *atdma)
1918 dma_writel(atdma, EN, 0);
1921 dma_writel(atdma, EBCIDR, -1L);
1924 while (dma_readl(atdma, CHSR) & atdma->all_chan_mask)
1930 struct at_dma *atdma;
1950 atdma = devm_kzalloc(&pdev->dev,
1951 struct_size(atdma, chan, plat_dat->nr_channels),
1953 if (!atdma)
1956 atdma->regs = devm_platform_ioremap_resource(pdev, 0);
1957 if (IS_ERR(atdma->regs))
1958 return PTR_ERR(atdma->regs);
1965 atdma->dma_device.cap_mask = plat_dat->cap_mask;
1966 atdma->all_chan_mask = (1 << plat_dat->nr_channels) - 1;
1968 atdma->clk = devm_clk_get(&pdev->dev, "dma_clk");
1969 if (IS_ERR(atdma->clk))
1970 return PTR_ERR(atdma->clk);
1972 err = clk_prepare_enable(atdma->clk);
1977 at_dma_off(atdma);
1979 err = request_irq(irq, at_dma_interrupt, 0, "at_hdmac", atdma);
1983 platform_set_drvdata(pdev, atdma);
1986 atdma->lli_pool = dma_pool_create("at_hdmac_lli_pool",
1989 if (!atdma->lli_pool) {
1996 atdma->memset_pool = dma_pool_create("at_hdmac_memset_pool",
1998 if (!atdma->memset_pool) {
2005 while (dma_readl(atdma, EBCISR))
2009 INIT_LIST_HEAD(&atdma->dma_device.channels);
2011 struct at_dma_chan *atchan = &atdma->chan[i];
2016 atchan->ch_regs = atdma->regs + ch_regs(i);
2019 atchan->atdma = atdma;
2021 vchan_init(&atchan->vc, &atdma->dma_device);
2022 atc_enable_chan_irq(atdma, i);
2026 atdma->dma_device.device_alloc_chan_resources = atc_alloc_chan_resources;
2027 atdma->dma_device.device_free_chan_resources = atc_free_chan_resources;
2028 atdma->dma_device.device_tx_status = atc_tx_status;
2029 atdma->dma_device.device_issue_pending = atc_issue_pending;
2030 atdma->dma_device.dev = &pdev->dev;
2033 if (dma_has_cap(DMA_INTERLEAVE, atdma->dma_device.cap_mask))
2034 atdma->dma_device.device_prep_interleaved_dma = atc_prep_dma_interleaved;
2036 if (dma_has_cap(DMA_MEMCPY, atdma->dma_device.cap_mask))
2037 atdma->dma_device.device_prep_dma_memcpy = atc_prep_dma_memcpy;
2039 if (dma_has_cap(DMA_MEMSET, atdma->dma_device.cap_mask)) {
2040 atdma->dma_device.device_prep_dma_memset = atc_prep_dma_memset;
2041 atdma->dma_device.device_prep_dma_memset_sg = atc_prep_dma_memset_sg;
2042 atdma->dma_device.fill_align = DMAENGINE_ALIGN_4_BYTES;
2045 if (dma_has_cap(DMA_SLAVE, atdma->dma_device.cap_mask)) {
2046 atdma->dma_device.device_prep_slave_sg = atc_prep_slave_sg;
2048 dma_cap_set(DMA_CYCLIC, atdma->dma_device.cap_mask);
2049 atdma->dma_device.device_prep_dma_cyclic = atc_prep_dma_cyclic;
2050 atdma->dma_device.device_config = atc_config;
2051 atdma->dma_device.device_pause = atc_pause;
2052 atdma->dma_device.device_resume = atc_resume;
2053 atdma->dma_device.device_terminate_all = atc_terminate_all;
2054 atdma->dma_device.src_addr_widths = ATC_DMA_BUSWIDTHS;
2055 atdma->dma_device.dst_addr_widths = ATC_DMA_BUSWIDTHS;
2056 atdma->dma_device.directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV);
2057 atdma->dma_device.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST;
2060 dma_writel(atdma, EN, AT_DMA_ENABLE);
2063 dma_has_cap(DMA_MEMCPY, atdma->dma_device.cap_mask) ? "cpy " : "",
2064 dma_has_cap(DMA_MEMSET, atdma->dma_device.cap_mask) ? "set " : "",
2065 dma_has_cap(DMA_SLAVE, atdma->dma_device.cap_mask) ? "slave " : "",
2068 err = dma_async_device_register(&atdma->dma_device);
2081 at_dma_xlate, atdma);
2091 dma_async_device_unregister(&atdma->dma_device);
2093 dma_pool_destroy(atdma->memset_pool);
2095 dma_pool_destroy(atdma->lli_pool);
2097 free_irq(platform_get_irq(pdev, 0), atdma);
2099 clk_disable_unprepare(atdma->clk);
2105 struct at_dma *atdma = platform_get_drvdata(pdev);
2108 at_dma_off(atdma);
2111 dma_async_device_unregister(&atdma->dma_device);
2113 dma_pool_destroy(atdma->memset_pool);
2114 dma_pool_destroy(atdma->lli_pool);
2115 free_irq(platform_get_irq(pdev, 0), atdma);
2117 list_for_each_entry_safe(chan, _chan, &atdma->dma_device.channels,
2120 atc_disable_chan_irq(atdma, chan->chan_id);
2124 clk_disable_unprepare(atdma->clk);
2131 struct at_dma *atdma = platform_get_drvdata(pdev);
2134 clk_disable_unprepare(atdma->clk);
2139 struct at_dma *atdma = dev_get_drvdata(dev);
2142 list_for_each_entry_safe(chan, _chan, &atdma->dma_device.channels,
2173 struct at_dma *atdma = dev_get_drvdata(dev);
2177 list_for_each_entry_safe(chan, _chan, &atdma->dma_device.channels,
2185 atdma->save_imr = dma_readl(atdma, EBCIMR);
2188 at_dma_off(atdma);
2189 clk_disable_unprepare(atdma->clk);
2195 struct at_dma *atdma = to_at_dma(atchan->vc.chan.device);
2204 dma_writel(atdma, CHER, atchan->mask);
2214 struct at_dma *atdma = dev_get_drvdata(dev);
2218 clk_prepare_enable(atdma->clk);
2219 dma_writel(atdma, EN, AT_DMA_ENABLE);
2222 while (dma_readl(atdma, EBCISR))
2226 dma_writel(atdma, EBCIER, atdma->save_imr);
2227 list_for_each_entry_safe(chan, _chan, &atdma->dma_device.channels,