Lines Matching refs:atchan
319 #define at_xdmac_chan_read(atchan, reg) readl_relaxed((atchan)->ch_regs + (reg))
320 #define at_xdmac_chan_write(atchan, reg, value) writel_relaxed((value), (atchan)->ch_regs + (reg))
342 static inline int at_xdmac_chan_is_cyclic(struct at_xdmac_chan *atchan)
344 return test_bit(AT_XDMAC_CHAN_IS_CYCLIC, &atchan->status);
347 static inline int at_xdmac_chan_is_paused(struct at_xdmac_chan *atchan)
349 return test_bit(AT_XDMAC_CHAN_IS_PAUSED, &atchan->status);
352 static inline int at_xdmac_chan_is_paused_internal(struct at_xdmac_chan *atchan)
354 return test_bit(AT_XDMAC_CHAN_IS_PAUSED_INTERNAL, &atchan->status);
373 static void at_xdmac_runtime_suspend_descriptors(struct at_xdmac_chan *atchan)
375 struct at_xdmac *atxdmac = to_at_xdmac(atchan->chan.device);
378 list_for_each_entry_safe(desc, _desc, &atchan->xfers_list, xfer_node) {
387 static int at_xdmac_runtime_resume_descriptors(struct at_xdmac_chan *atchan)
389 struct at_xdmac *atxdmac = to_at_xdmac(atchan->chan.device);
393 list_for_each_entry_safe(desc, _desc, &atchan->xfers_list, xfer_node) {
405 static bool at_xdmac_chan_is_enabled(struct at_xdmac_chan *atchan)
407 struct at_xdmac *atxdmac = to_at_xdmac(atchan->chan.device);
414 ret = !!(at_xdmac_chan_read(atchan, AT_XDMAC_GS) & atchan->mask);
425 struct at_xdmac_chan *atchan;
444 atchan = to_at_xdmac_chan(chan);
445 at_xdmac_runtime_suspend_descriptors(atchan);
454 static void at_xdmac_start_xfer(struct at_xdmac_chan *atchan,
457 struct at_xdmac *atxdmac = to_at_xdmac(atchan->chan.device);
465 dev_vdbg(chan2dev(&atchan->chan), "%s: desc 0x%p\n", __func__, first);
473 reg |= AT_XDMAC_CNDA_NDAIF(atchan->memif);
475 at_xdmac_chan_write(atchan, AT_XDMAC_CNDA, reg);
482 if (at_xdmac_chan_is_cyclic(atchan))
495 at_xdmac_chan_write(atchan, AT_XDMAC_CC, first->lld.mbr_cfg);
500 at_xdmac_chan_write(atchan, AT_XDMAC_CNDC, reg);
502 dev_vdbg(chan2dev(&atchan->chan),
504 __func__, at_xdmac_chan_read(atchan, AT_XDMAC_CC),
505 at_xdmac_chan_read(atchan, AT_XDMAC_CNDA),
506 at_xdmac_chan_read(atchan, AT_XDMAC_CNDC),
507 at_xdmac_chan_read(atchan, AT_XDMAC_CSA),
508 at_xdmac_chan_read(atchan, AT_XDMAC_CDA),
509 at_xdmac_chan_read(atchan, AT_XDMAC_CUBC));
511 at_xdmac_chan_write(atchan, AT_XDMAC_CID, 0xffffffff);
523 if (at_xdmac_chan_is_cyclic(atchan))
524 at_xdmac_chan_write(atchan, AT_XDMAC_CIE,
527 at_xdmac_chan_write(atchan, AT_XDMAC_CIE,
529 at_xdmac_write(atxdmac, AT_XDMAC_GIE, atchan->mask);
530 dev_vdbg(chan2dev(&atchan->chan),
531 "%s: enable channel (0x%08x)\n", __func__, atchan->mask);
533 at_xdmac_write(atxdmac, AT_XDMAC_GE, atchan->mask);
535 dev_vdbg(chan2dev(&atchan->chan),
537 __func__, at_xdmac_chan_read(atchan, AT_XDMAC_CC),
538 at_xdmac_chan_read(atchan, AT_XDMAC_CNDA),
539 at_xdmac_chan_read(atchan, AT_XDMAC_CNDC),
540 at_xdmac_chan_read(atchan, AT_XDMAC_CSA),
541 at_xdmac_chan_read(atchan, AT_XDMAC_CDA),
542 at_xdmac_chan_read(atchan, AT_XDMAC_CUBC));
548 struct at_xdmac_chan *atchan = to_at_xdmac_chan(tx->chan);
552 spin_lock_irqsave(&atchan->lock, irqflags);
555 list_add_tail(&desc->xfer_node, &atchan->xfers_list);
556 spin_unlock_irqrestore(&atchan->lock, irqflags);
558 dev_vdbg(chan2dev(tx->chan), "%s: atchan 0x%p, add desc 0x%p to xfers_list\n",
559 __func__, atchan, desc);
592 static struct at_xdmac_desc *at_xdmac_get_desc(struct at_xdmac_chan *atchan)
596 if (list_empty(&atchan->free_descs_list)) {
597 desc = at_xdmac_alloc_desc(&atchan->chan, GFP_NOWAIT);
599 desc = list_first_entry(&atchan->free_descs_list,
639 struct at_xdmac_chan *atchan;
654 atchan = to_at_xdmac_chan(chan);
655 atchan->memif = AT91_XDMAC_DT_GET_MEM_IF(dma_spec->args[0]);
656 atchan->perif = AT91_XDMAC_DT_GET_PER_IF(dma_spec->args[0]);
657 atchan->perid = AT91_XDMAC_DT_GET_PERID(dma_spec->args[0]);
659 atchan->memif, atchan->perif, atchan->perid);
667 struct at_xdmac_chan *atchan = to_at_xdmac_chan(chan);
668 struct at_xdmac *atxdmac = to_at_xdmac(atchan->chan.device);
672 atchan->cfg =
673 AT91_XDMAC_DT_PERID(atchan->perid)
681 atchan->cfg |= AT_XDMAC_CC_DIF(atchan->memif) |
682 AT_XDMAC_CC_SIF(atchan->perif);
684 csize = ffs(atchan->sconfig.src_maxburst) - 1;
689 atchan->cfg |= AT_XDMAC_CC_CSIZE(csize);
690 dwidth = ffs(atchan->sconfig.src_addr_width) - 1;
695 atchan->cfg |= AT_XDMAC_CC_DWIDTH(dwidth);
697 atchan->cfg =
698 AT91_XDMAC_DT_PERID(atchan->perid)
706 atchan->cfg |= AT_XDMAC_CC_DIF(atchan->perif) |
707 AT_XDMAC_CC_SIF(atchan->memif);
709 csize = ffs(atchan->sconfig.dst_maxburst) - 1;
714 atchan->cfg |= AT_XDMAC_CC_CSIZE(csize);
715 dwidth = ffs(atchan->sconfig.dst_addr_width) - 1;
720 atchan->cfg |= AT_XDMAC_CC_DWIDTH(dwidth);
723 dev_dbg(chan2dev(chan), "%s: cfg=0x%08x\n", __func__, atchan->cfg);
749 struct at_xdmac_chan *atchan = to_at_xdmac_chan(chan);
756 memcpy(&atchan->sconfig, sconfig, sizeof(atchan->sconfig));
766 struct at_xdmac_chan *atchan = to_at_xdmac_chan(chan);
788 spin_lock_irqsave(&atchan->lock, irqflags);
807 desc = at_xdmac_get_desc(atchan);
812 &atchan->free_descs_list);
818 desc->lld.mbr_sa = atchan->sconfig.src_addr;
822 desc->lld.mbr_da = atchan->sconfig.dst_addr;
824 dwidth = at_xdmac_get_dwidth(atchan->cfg);
832 desc->lld.mbr_cfg = (atchan->cfg & ~AT_XDMAC_CC_DWIDTH_MASK) |
859 spin_unlock_irqrestore(&atchan->lock, irqflags);
869 struct at_xdmac_chan *atchan = to_at_xdmac_chan(chan);
884 if (test_and_set_bit(AT_XDMAC_CHAN_IS_CYCLIC, &atchan->status)) {
895 spin_lock_irqsave(&atchan->lock, irqflags);
896 desc = at_xdmac_get_desc(atchan);
901 &atchan->free_descs_list);
902 spin_unlock_irqrestore(&atchan->lock, irqflags);
905 spin_unlock_irqrestore(&atchan->lock, irqflags);
911 desc->lld.mbr_sa = atchan->sconfig.src_addr;
915 desc->lld.mbr_da = atchan->sconfig.dst_addr;
917 desc->lld.mbr_cfg = atchan->cfg;
979 struct at_xdmac_chan *atchan,
1036 spin_lock_irqsave(&atchan->lock, flags);
1037 desc = at_xdmac_get_desc(atchan);
1038 spin_unlock_irqrestore(&atchan->lock, flags);
1076 struct at_xdmac_chan *atchan = to_at_xdmac_chan(chan);
1101 first = at_xdmac_interleaved_queue_desc(chan, atchan,
1132 desc = at_xdmac_interleaved_queue_desc(chan, atchan,
1139 &atchan->free_descs_list);
1172 struct at_xdmac_chan *atchan = to_at_xdmac_chan(chan);
1213 spin_lock_irqsave(&atchan->lock, irqflags);
1214 desc = at_xdmac_get_desc(atchan);
1215 spin_unlock_irqrestore(&atchan->lock, irqflags);
1220 &atchan->free_descs_list);
1276 struct at_xdmac_chan *atchan,
1318 spin_lock_irqsave(&atchan->lock, flags);
1319 desc = at_xdmac_get_desc(atchan);
1320 spin_unlock_irqrestore(&atchan->lock, flags);
1356 struct at_xdmac_chan *atchan = to_at_xdmac_chan(chan);
1365 desc = at_xdmac_memset_create_desc(chan, atchan, dest, len, value);
1380 struct at_xdmac_chan *atchan = to_at_xdmac_chan(chan);
1398 desc = at_xdmac_memset_create_desc(chan, atchan,
1404 &atchan->free_descs_list);
1453 &atchan->free_descs_list);
1516 &atchan->free_descs_list);
1541 struct at_xdmac_chan *atchan = to_at_xdmac_chan(chan);
1542 struct at_xdmac *atxdmac = to_at_xdmac(atchan->chan.device);
1560 spin_lock_irqsave(&atchan->lock, flags);
1562 desc = list_first_entry(&atchan->xfers_list, struct at_xdmac_desc, xfer_node);
1590 at_xdmac_write(atxdmac, atxdmac->layout->gswf, atchan->mask);
1591 while (!(at_xdmac_chan_read(atchan, AT_XDMAC_CIS) & AT_XDMAC_CIS_FIS))
1622 check_nda = at_xdmac_chan_read(atchan, AT_XDMAC_CNDA) & 0xfffffffc;
1624 cur_ubc = at_xdmac_chan_read(atchan, AT_XDMAC_CUBC);
1626 initd = !!(at_xdmac_chan_read(atchan, AT_XDMAC_CC) & AT_XDMAC_CC_INITD);
1628 cur_nda = at_xdmac_chan_read(atchan, AT_XDMAC_CNDA) & 0xfffffffc;
1648 at_xdmac_write(atxdmac, atxdmac->layout->gswf, atchan->mask);
1649 while (!(at_xdmac_chan_read(atchan, AT_XDMAC_CIS) & AT_XDMAC_CIS_FIS))
1676 spin_unlock_irqrestore(&atchan->lock, flags);
1682 static void at_xdmac_advance_work(struct at_xdmac_chan *atchan)
1690 if (at_xdmac_chan_is_enabled(atchan) || list_empty(&atchan->xfers_list))
1693 desc = list_first_entry(&atchan->xfers_list, struct at_xdmac_desc,
1695 dev_vdbg(chan2dev(&atchan->chan), "%s: desc 0x%p\n", __func__, desc);
1697 at_xdmac_start_xfer(atchan, desc);
1700 static void at_xdmac_handle_cyclic(struct at_xdmac_chan *atchan)
1705 spin_lock_irq(&atchan->lock);
1706 dev_dbg(chan2dev(&atchan->chan), "%s: status=0x%08x\n",
1707 __func__, atchan->irq_status);
1708 if (list_empty(&atchan->xfers_list)) {
1709 spin_unlock_irq(&atchan->lock);
1712 desc = list_first_entry(&atchan->xfers_list, struct at_xdmac_desc,
1714 spin_unlock_irq(&atchan->lock);
1720 /* Called with atchan->lock held. */
1721 static void at_xdmac_handle_error(struct at_xdmac_chan *atchan)
1723 struct at_xdmac *atxdmac = to_at_xdmac(atchan->chan.device);
1737 if (atchan->irq_status & AT_XDMAC_CIS_RBEIS)
1738 dev_err(chan2dev(&atchan->chan), "read bus error!!!");
1739 if (atchan->irq_status & AT_XDMAC_CIS_WBEIS)
1740 dev_err(chan2dev(&atchan->chan), "write bus error!!!");
1741 if (atchan->irq_status & AT_XDMAC_CIS_ROIS)
1742 dev_err(chan2dev(&atchan->chan), "request overflow error!!!");
1745 at_xdmac_write(atxdmac, AT_XDMAC_GD, atchan->mask);
1746 while (at_xdmac_read(atxdmac, AT_XDMAC_GS) & atchan->mask)
1749 bad_desc = list_first_entry(&atchan->xfers_list,
1754 dev_dbg(chan2dev(&atchan->chan),
1767 struct at_xdmac_chan *atchan = from_tasklet(atchan, t, tasklet);
1768 struct at_xdmac *atxdmac = to_at_xdmac(atchan->chan.device);
1773 if (at_xdmac_chan_is_cyclic(atchan))
1774 return at_xdmac_handle_cyclic(atchan);
1779 spin_lock_irq(&atchan->lock);
1781 dev_dbg(chan2dev(&atchan->chan), "%s: status=0x%08x\n",
1782 __func__, atchan->irq_status);
1784 if (!(atchan->irq_status & AT_XDMAC_CIS_LIS) &&
1785 !(atchan->irq_status & error_mask)) {
1786 spin_unlock_irq(&atchan->lock);
1790 if (atchan->irq_status & error_mask)
1791 at_xdmac_handle_error(atchan);
1793 desc = list_first_entry(&atchan->xfers_list, struct at_xdmac_desc,
1795 dev_vdbg(chan2dev(&atchan->chan), "%s: desc 0x%p\n", __func__, desc);
1797 dev_err(chan2dev(&atchan->chan), "Xfer not active: exiting");
1798 spin_unlock_irq(&atchan->lock);
1806 spin_unlock_irq(&atchan->lock);
1813 spin_lock_irq(&atchan->lock);
1815 list_splice_tail_init(&desc->descs_list, &atchan->free_descs_list);
1816 at_xdmac_advance_work(atchan);
1817 spin_unlock_irq(&atchan->lock);
1830 struct at_xdmac_chan *atchan;
1852 atchan = &atxdmac->chan[i];
1853 chan_imr = at_xdmac_chan_read(atchan, AT_XDMAC_CIM);
1854 chan_status = at_xdmac_chan_read(atchan, AT_XDMAC_CIS);
1855 atchan->irq_status = chan_status & chan_imr;
1859 dev_vdbg(chan2dev(&atchan->chan),
1862 at_xdmac_chan_read(atchan, AT_XDMAC_CC),
1863 at_xdmac_chan_read(atchan, AT_XDMAC_CNDA),
1864 at_xdmac_chan_read(atchan, AT_XDMAC_CNDC),
1865 at_xdmac_chan_read(atchan, AT_XDMAC_CSA),
1866 at_xdmac_chan_read(atchan, AT_XDMAC_CDA),
1867 at_xdmac_chan_read(atchan, AT_XDMAC_CUBC));
1869 if (atchan->irq_status & (AT_XDMAC_CIS_RBEIS | AT_XDMAC_CIS_WBEIS))
1870 at_xdmac_write(atxdmac, AT_XDMAC_GD, atchan->mask);
1872 tasklet_schedule(&atchan->tasklet);
1883 struct at_xdmac_chan *atchan = to_at_xdmac_chan(chan);
1886 dev_dbg(chan2dev(&atchan->chan), "%s\n", __func__);
1888 spin_lock_irqsave(&atchan->lock, flags);
1889 at_xdmac_advance_work(atchan);
1890 spin_unlock_irqrestore(&atchan->lock, flags);
1898 struct at_xdmac_chan *atchan = to_at_xdmac_chan(chan);
1904 spin_lock_irqsave(&atchan->lock, flags);
1906 spin_unlock_irqrestore(&atchan->lock, flags);
1912 struct at_xdmac_chan *atchan)
1914 at_xdmac_write(atxdmac, atxdmac->layout->grws, atchan->mask);
1915 while (at_xdmac_chan_read(atchan, AT_XDMAC_CC) &
1920 static void at_xdmac_device_pause_internal(struct at_xdmac_chan *atchan)
1922 struct at_xdmac *atxdmac = to_at_xdmac(atchan->chan.device);
1925 spin_lock_irqsave(&atchan->lock, flags);
1926 set_bit(AT_XDMAC_CHAN_IS_PAUSED_INTERNAL, &atchan->status);
1927 at_xdmac_device_pause_set(atxdmac, atchan);
1928 spin_unlock_irqrestore(&atchan->lock, flags);
1933 struct at_xdmac_chan *atchan = to_at_xdmac_chan(chan);
1934 struct at_xdmac *atxdmac = to_at_xdmac(atchan->chan.device);
1940 if (test_and_set_bit(AT_XDMAC_CHAN_IS_PAUSED, &atchan->status))
1947 spin_lock_irqsave(&atchan->lock, flags);
1949 at_xdmac_device_pause_set(atxdmac, atchan);
1951 at_xdmac_runtime_suspend_descriptors(atchan);
1953 spin_unlock_irqrestore(&atchan->lock, flags);
1961 static void at_xdmac_device_resume_internal(struct at_xdmac_chan *atchan)
1963 struct at_xdmac *atxdmac = to_at_xdmac(atchan->chan.device);
1966 spin_lock_irqsave(&atchan->lock, flags);
1967 at_xdmac_write(atxdmac, atxdmac->layout->grwr, atchan->mask);
1968 clear_bit(AT_XDMAC_CHAN_IS_PAUSED_INTERNAL, &atchan->status);
1969 spin_unlock_irqrestore(&atchan->lock, flags);
1974 struct at_xdmac_chan *atchan = to_at_xdmac_chan(chan);
1975 struct at_xdmac *atxdmac = to_at_xdmac(atchan->chan.device);
1985 spin_lock_irqsave(&atchan->lock, flags);
1986 if (!at_xdmac_chan_is_paused(atchan))
1990 ret = at_xdmac_runtime_resume_descriptors(atchan);
1994 at_xdmac_write(atxdmac, atxdmac->layout->grwr, atchan->mask);
1995 clear_bit(AT_XDMAC_CHAN_IS_PAUSED, &atchan->status);
1998 spin_unlock_irqrestore(&atchan->lock, flags);
2008 struct at_xdmac_chan *atchan = to_at_xdmac_chan(chan);
2009 struct at_xdmac *atxdmac = to_at_xdmac(atchan->chan.device);
2019 spin_lock_irqsave(&atchan->lock, flags);
2020 at_xdmac_write(atxdmac, AT_XDMAC_GD, atchan->mask);
2021 while (at_xdmac_read(atxdmac, AT_XDMAC_GS) & atchan->mask)
2025 list_for_each_entry_safe(desc, _desc, &atchan->xfers_list, xfer_node) {
2028 &atchan->free_descs_list);
2040 clear_bit(AT_XDMAC_CHAN_IS_PAUSED, &atchan->status);
2041 clear_bit(AT_XDMAC_CHAN_IS_CYCLIC, &atchan->status);
2042 spin_unlock_irqrestore(&atchan->lock, flags);
2052 struct at_xdmac_chan *atchan = to_at_xdmac_chan(chan);
2056 if (at_xdmac_chan_is_enabled(atchan)) {
2062 if (!list_empty(&atchan->free_descs_list)) {
2080 list_add_tail(&desc->desc_node, &atchan->free_descs_list);
2092 struct at_xdmac_chan *atchan = to_at_xdmac_chan(chan);
2096 list_for_each_entry_safe(desc, _desc, &atchan->free_descs_list, desc_node) {
2135 struct at_xdmac_chan *atchan = to_at_xdmac_chan(chan);
2138 if (at_xdmac_chan_is_enabled(atchan) && !at_xdmac_chan_is_cyclic(atchan))
2155 struct at_xdmac_chan *atchan = to_at_xdmac_chan(chan);
2157 atchan->save_cc = at_xdmac_chan_read(atchan, AT_XDMAC_CC);
2158 if (at_xdmac_chan_is_cyclic(atchan)) {
2159 if (!at_xdmac_chan_is_paused(atchan)) {
2162 at_xdmac_device_pause_internal(atchan);
2163 at_xdmac_runtime_suspend_descriptors(atchan);
2165 atchan->save_cim = at_xdmac_chan_read(atchan, AT_XDMAC_CIM);
2166 atchan->save_cnda = at_xdmac_chan_read(atchan, AT_XDMAC_CNDA);
2167 atchan->save_cndc = at_xdmac_chan_read(atchan, AT_XDMAC_CNDC);
2184 struct at_xdmac_chan *atchan;
2199 atchan = &atxdmac->chan[i];
2200 while (at_xdmac_chan_read(atchan, AT_XDMAC_CIS))
2206 atchan = to_at_xdmac_chan(chan);
2208 at_xdmac_chan_write(atchan, AT_XDMAC_CC, atchan->save_cc);
2209 if (at_xdmac_chan_is_cyclic(atchan)) {
2214 if (at_xdmac_chan_is_paused_internal(atchan)) {
2215 ret = at_xdmac_runtime_resume_descriptors(atchan);
2218 at_xdmac_device_resume_internal(atchan);
2226 else if (at_xdmac_chan_is_paused(atchan))
2227 at_xdmac_device_pause_set(atxdmac, atchan);
2229 at_xdmac_chan_write(atchan, AT_XDMAC_CNDA, atchan->save_cnda);
2230 at_xdmac_chan_write(atchan, AT_XDMAC_CNDC, atchan->save_cndc);
2231 at_xdmac_chan_write(atchan, AT_XDMAC_CIE, atchan->save_cim);
2233 if (atxdmac->save_gs & atchan->mask)
2234 at_xdmac_write(atxdmac, AT_XDMAC_GE, atchan->mask);
2378 struct at_xdmac_chan *atchan = &atxdmac->chan[i];
2380 atchan->chan.device = &atxdmac->dma;
2381 list_add_tail(&atchan->chan.device_node,
2384 atchan->ch_regs = at_xdmac_chan_reg_base(atxdmac, i);
2385 atchan->mask = 1 << i;
2387 spin_lock_init(&atchan->lock);
2388 INIT_LIST_HEAD(&atchan->xfers_list);
2389 INIT_LIST_HEAD(&atchan->free_descs_list);
2390 tasklet_setup(&atchan->tasklet, at_xdmac_tasklet);
2393 while (at_xdmac_chan_read(atchan, AT_XDMAC_CIS))
2450 struct at_xdmac_chan *atchan = &atxdmac->chan[i];
2452 tasklet_kill(&atchan->tasklet);
2453 at_xdmac_free_chan_resources(&atchan->chan);