Lines Matching refs:atchan
34 * atc_ / atchan : ATmel DMA Channel entity related
80 static struct at_desc *atc_first_active(struct at_dma_chan *atchan)
82 return list_first_entry(&atchan->active_list,
86 static struct at_desc *atc_first_queued(struct at_dma_chan *atchan)
88 return list_first_entry(&atchan->queue,
124 * @atchan: channel we want a new descriptor for
126 static struct at_desc *atc_desc_get(struct at_dma_chan *atchan)
133 spin_lock_irqsave(&atchan->lock, flags);
134 list_for_each_entry_safe(desc, _desc, &atchan->free_list, desc_node) {
141 dev_dbg(chan2dev(&atchan->chan_common),
144 spin_unlock_irqrestore(&atchan->lock, flags);
145 dev_vdbg(chan2dev(&atchan->chan_common),
150 ret = atc_alloc_descriptor(&atchan->chan_common, GFP_NOWAIT);
157 * @atchan: channel we work on
160 static void atc_desc_put(struct at_dma_chan *atchan, struct at_desc *desc)
166 spin_lock_irqsave(&atchan->lock, flags);
168 dev_vdbg(chan2dev(&atchan->chan_common),
171 list_splice_init(&desc->tx_list, &atchan->free_list);
172 dev_vdbg(chan2dev(&atchan->chan_common),
174 list_add(&desc->desc_node, &atchan->free_list);
175 spin_unlock_irqrestore(&atchan->lock, flags);
204 * @atchan: the channel we want to start
207 * Called with atchan->lock held and bh disabled
209 static void atc_dostart(struct at_dma_chan *atchan, struct at_desc *first)
211 struct at_dma *atdma = to_at_dma(atchan->chan_common.device);
214 if (atc_chan_is_enabled(atchan)) {
215 dev_err(chan2dev(&atchan->chan_common),
217 dev_err(chan2dev(&atchan->chan_common),
219 channel_readl(atchan, SADDR),
220 channel_readl(atchan, DADDR),
221 channel_readl(atchan, CTRLA),
222 channel_readl(atchan, CTRLB),
223 channel_readl(atchan, DSCR));
229 vdbg_dump_regs(atchan);
231 channel_writel(atchan, SADDR, 0);
232 channel_writel(atchan, DADDR, 0);
233 channel_writel(atchan, CTRLA, 0);
234 channel_writel(atchan, CTRLB, 0);
235 channel_writel(atchan, DSCR, first->txd.phys);
236 channel_writel(atchan, SPIP, ATC_SPIP_HOLE(first->src_hole) |
238 channel_writel(atchan, DPIP, ATC_DPIP_HOLE(first->dst_hole) |
242 dma_writel(atdma, CHER, atchan->mask);
244 vdbg_dump_regs(atchan);
249 * @atchan: the DMA channel
252 static struct at_desc *atc_get_desc_by_cookie(struct at_dma_chan *atchan,
257 list_for_each_entry_safe(desc, _desc, &atchan->queue, desc_node) {
262 list_for_each_entry_safe(desc, _desc, &atchan->active_list, desc_node) {
298 struct at_dma_chan *atchan = to_at_dma_chan(chan);
299 struct at_desc *desc_first = atc_first_active(atchan);
310 desc = atc_get_desc_by_cookie(atchan, cookie);
370 dscr = channel_readl(atchan, DSCR);
372 ctrla = channel_readl(atchan, CTRLA);
377 new_dscr = channel_readl(atchan, DSCR);
397 ctrla = channel_readl(atchan, CTRLA);
421 ctrla = channel_readl(atchan, CTRLA);
430 * @atchan: channel we work on
434 atc_chain_complete(struct at_dma_chan *atchan, struct at_desc *desc)
437 struct at_dma *atdma = to_at_dma(atchan->chan_common.device);
440 dev_vdbg(chan2dev(&atchan->chan_common),
443 spin_lock_irqsave(&atchan->lock, flags);
446 if (!atc_chan_is_cyclic(atchan))
449 spin_unlock_irqrestore(&atchan->lock, flags);
454 if (!atc_chan_is_cyclic(atchan))
459 spin_lock_irqsave(&atchan->lock, flags);
461 list_splice_init(&desc->tx_list, &atchan->free_list);
463 list_add(&desc->desc_node, &atchan->free_list);
464 spin_unlock_irqrestore(&atchan->lock, flags);
476 * @atchan: channel where the transaction ended
478 static void atc_advance_work(struct at_dma_chan *atchan)
483 dev_vdbg(chan2dev(&atchan->chan_common), "advance_work\n");
485 spin_lock_irqsave(&atchan->lock, flags);
486 if (atc_chan_is_enabled(atchan) || list_empty(&atchan->active_list))
487 return spin_unlock_irqrestore(&atchan->lock, flags);
489 desc = atc_first_active(atchan);
492 spin_unlock_irqrestore(&atchan->lock, flags);
493 atc_chain_complete(atchan, desc);
496 spin_lock_irqsave(&atchan->lock, flags);
497 if (!list_empty(&atchan->active_list)) {
498 desc = atc_first_queued(atchan);
499 list_move_tail(&desc->desc_node, &atchan->active_list);
500 atc_dostart(atchan, desc);
502 spin_unlock_irqrestore(&atchan->lock, flags);
508 * @atchan: channel where error occurs
510 static void atc_handle_error(struct at_dma_chan *atchan)
517 spin_lock_irqsave(&atchan->lock, flags);
523 bad_desc = atc_first_active(atchan);
527 if (!list_empty(&atchan->active_list)) {
528 desc = atc_first_queued(atchan);
529 list_move_tail(&desc->desc_node, &atchan->active_list);
530 atc_dostart(atchan, desc);
540 dev_crit(chan2dev(&atchan->chan_common),
542 dev_crit(chan2dev(&atchan->chan_common),
544 atc_dump_lli(atchan, &bad_desc->lli);
546 atc_dump_lli(atchan, &child->lli);
548 spin_unlock_irqrestore(&atchan->lock, flags);
551 atc_chain_complete(atchan, bad_desc);
556 * @atchan: channel used for cyclic operations
558 static void atc_handle_cyclic(struct at_dma_chan *atchan)
560 struct at_desc *first = atc_first_active(atchan);
563 dev_vdbg(chan2dev(&atchan->chan_common),
565 channel_readl(atchan, DSCR));
574 struct at_dma_chan *atchan = from_tasklet(atchan, t, tasklet);
576 if (test_and_clear_bit(ATC_IS_ERROR, &atchan->status))
577 return atc_handle_error(atchan);
579 if (atc_chan_is_cyclic(atchan))
580 return atc_handle_cyclic(atchan);
582 atc_advance_work(atchan);
588 struct at_dma_chan *atchan;
606 atchan = &atdma->chan[i];
611 AT_DMA_RES(i) | atchan->mask);
613 set_bit(ATC_IS_ERROR, &atchan->status);
615 tasklet_schedule(&atchan->tasklet);
639 struct at_dma_chan *atchan = to_at_dma_chan(tx->chan);
643 spin_lock_irqsave(&atchan->lock, flags);
646 list_add_tail(&desc->desc_node, &atchan->queue);
647 spin_unlock_irqrestore(&atchan->lock, flags);
665 struct at_dma_chan *atchan = to_at_dma_chan(chan);
726 desc = atc_desc_get(atchan);
765 struct at_dma_chan *atchan = to_at_dma_chan(chan);
802 desc = atc_desc_get(atchan);
829 atc_desc_put(atchan, first);
838 struct at_dma_chan *atchan = to_at_dma_chan(chan);
855 desc = atc_desc_get(atchan);
944 struct at_dma_chan *atchan = to_at_dma_chan(chan);
1011 atc_desc_put(atchan, first);
1029 struct at_dma_chan *atchan = to_at_dma_chan(chan);
1031 struct dma_slave_config *sconfig = &atchan->dma_sconfig;
1064 | ATC_SIF(atchan->mem_if) | ATC_DIF(atchan->per_if);
1071 desc = atc_desc_get(atchan);
1104 | ATC_SIF(atchan->per_if) | ATC_DIF(atchan->mem_if);
1112 desc = atc_desc_get(atchan);
1158 atc_desc_put(atchan, first);
1192 struct at_dma_chan *atchan = to_at_dma_chan(chan);
1193 struct dma_slave_config *sconfig = &atchan->dma_sconfig;
1211 | ATC_SIF(atchan->mem_if)
1212 | ATC_DIF(atchan->per_if);
1223 | ATC_SIF(atchan->per_if)
1224 | ATC_DIF(atchan->mem_if);
1249 struct at_dma_chan *atchan = to_at_dma_chan(chan);
1251 struct dma_slave_config *sconfig = &atchan->dma_sconfig;
1269 was_cyclic = test_and_set_bit(ATC_IS_CYCLIC, &atchan->status);
1291 desc = atc_desc_get(atchan);
1313 atc_desc_put(atchan, first);
1315 clear_bit(ATC_IS_CYCLIC, &atchan->status);
1322 struct at_dma_chan *atchan = to_at_dma_chan(chan);
1330 memcpy(&atchan->dma_sconfig, sconfig, sizeof(*sconfig));
1332 convert_burst(&atchan->dma_sconfig.src_maxburst);
1333 convert_burst(&atchan->dma_sconfig.dst_maxburst);
1340 struct at_dma_chan *atchan = to_at_dma_chan(chan);
1342 int chan_id = atchan->chan_common.chan_id;
1347 spin_lock_irqsave(&atchan->lock, flags);
1350 set_bit(ATC_IS_PAUSED, &atchan->status);
1352 spin_unlock_irqrestore(&atchan->lock, flags);
1359 struct at_dma_chan *atchan = to_at_dma_chan(chan);
1361 int chan_id = atchan->chan_common.chan_id;
1366 if (!atc_chan_is_paused(atchan))
1369 spin_lock_irqsave(&atchan->lock, flags);
1372 clear_bit(ATC_IS_PAUSED, &atchan->status);
1374 spin_unlock_irqrestore(&atchan->lock, flags);
1381 struct at_dma_chan *atchan = to_at_dma_chan(chan);
1383 int chan_id = atchan->chan_common.chan_id;
1394 spin_lock_irqsave(&atchan->lock, flags);
1397 dma_writel(atdma, CHDR, AT_DMA_RES(chan_id) | atchan->mask);
1400 while (dma_readl(atdma, CHSR) & atchan->mask)
1404 list_splice_tail_init(&atchan->queue, &atchan->free_list);
1405 list_splice_tail_init(&atchan->active_list, &atchan->free_list);
1407 spin_unlock_irqrestore(&atchan->lock, flags);
1409 clear_bit(ATC_IS_PAUSED, &atchan->status);
1411 clear_bit(ATC_IS_CYCLIC, &atchan->status);
1431 struct at_dma_chan *atchan = to_at_dma_chan(chan);
1446 spin_lock_irqsave(&atchan->lock, flags);
1451 spin_unlock_irqrestore(&atchan->lock, flags);
1473 struct at_dma_chan *atchan = to_at_dma_chan(chan);
1479 spin_lock_irqsave(&atchan->lock, flags);
1480 if (atc_chan_is_enabled(atchan) || list_empty(&atchan->queue))
1481 return spin_unlock_irqrestore(&atchan->lock, flags);
1483 desc = atc_first_queued(atchan);
1484 list_move_tail(&desc->desc_node, &atchan->active_list);
1485 atc_dostart(atchan, desc);
1486 spin_unlock_irqrestore(&atchan->lock, flags);
1497 struct at_dma_chan *atchan = to_at_dma_chan(chan);
1507 if (atc_chan_is_enabled(atchan)) {
1512 if (!list_empty(&atchan->free_list)) {
1540 list_add_tail(&desc->desc_node, &atchan->free_list);
1546 channel_writel(atchan, CFG, cfg);
1560 struct at_dma_chan *atchan = to_at_dma_chan(chan);
1566 BUG_ON(!list_empty(&atchan->active_list));
1567 BUG_ON(!list_empty(&atchan->queue));
1568 BUG_ON(atc_chan_is_enabled(atchan));
1570 list_for_each_entry_safe(desc, _desc, &atchan->free_list, desc_node) {
1576 list_splice_init(&atchan->free_list, &list);
1577 atchan->status = 0;
1605 struct at_dma_chan *atchan;
1660 atchan = to_at_dma_chan(chan);
1661 atchan->per_if = dma_spec->args[0] & 0xff;
1662 atchan->mem_if = (dma_spec->args[0] >> 16) & 0xff;
1840 struct at_dma_chan *atchan = &atdma->chan[i];
1842 atchan->mem_if = AT_DMA_MEM_IF;
1843 atchan->per_if = AT_DMA_PER_IF;
1844 atchan->chan_common.device = &atdma->dma_common;
1845 dma_cookie_init(&atchan->chan_common);
1846 list_add_tail(&atchan->chan_common.device_node,
1849 atchan->ch_regs = atdma->regs + ch_regs(i);
1850 spin_lock_init(&atchan->lock);
1851 atchan->mask = 1 << i;
1853 INIT_LIST_HEAD(&atchan->active_list);
1854 INIT_LIST_HEAD(&atchan->queue);
1855 INIT_LIST_HEAD(&atchan->free_list);
1857 tasklet_setup(&atchan->tasklet, atc_tasklet);
1965 struct at_dma_chan *atchan = to_at_dma_chan(chan);
1970 tasklet_kill(&atchan->tasklet);
2003 struct at_dma_chan *atchan = to_at_dma_chan(chan);
2005 if (atc_chan_is_enabled(atchan) && !atc_chan_is_cyclic(atchan))
2011 static void atc_suspend_cyclic(struct at_dma_chan *atchan)
2013 struct dma_chan *chan = &atchan->chan_common;
2017 if (!atc_chan_is_paused(atchan)) {
2025 atchan->save_dscr = channel_readl(atchan, DSCR);
2027 vdbg_dump_regs(atchan);
2038 struct at_dma_chan *atchan = to_at_dma_chan(chan);
2040 if (atc_chan_is_cyclic(atchan))
2041 atc_suspend_cyclic(atchan);
2042 atchan->save_cfg = channel_readl(atchan, CFG);
2052 static void atc_resume_cyclic(struct at_dma_chan *atchan)
2054 struct at_dma *atdma = to_at_dma(atchan->chan_common.device);
2058 channel_writel(atchan, SADDR, 0);
2059 channel_writel(atchan, DADDR, 0);
2060 channel_writel(atchan, CTRLA, 0);
2061 channel_writel(atchan, CTRLB, 0);
2062 channel_writel(atchan, DSCR, atchan->save_dscr);
2063 dma_writel(atdma, CHER, atchan->mask);
2068 vdbg_dump_regs(atchan);
2088 struct at_dma_chan *atchan = to_at_dma_chan(chan);
2090 channel_writel(atchan, CFG, atchan->save_cfg);
2091 if (atc_chan_is_cyclic(atchan))
2092 atc_resume_cyclic(atchan);