Lines Matching defs:schan
158 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(c);
159 return container_of(schan, struct sirfsoc_dma, channels[c->chan_id]);
236 static void sirfsoc_dma_execute(struct sirfsoc_dma_chan *schan)
238 struct sirfsoc_dma *sdma = dma_chan_to_sirfsoc_dma(&schan->chan);
239 int cid = schan->chan.chan_id;
248 sdesc = list_first_entry(&schan->queued, struct sirfsoc_dma_desc,
251 list_move_tail(&sdesc->node, &schan->active);
257 sdma->exec_desc(sdesc, cid, schan->mode, base);
260 schan->happened_cyclic = schan->completed_cyclic = 0;
267 struct sirfsoc_dma_chan *schan;
282 schan = &sdma->channels[ch];
283 spin_lock(&schan->lock);
284 sdesc = list_first_entry(&schan->active,
288 list_splice_tail_init(&schan->active,
289 &schan->completed);
291 if (!list_empty(&schan->queued))
292 sirfsoc_dma_execute(schan);
294 schan->happened_cyclic++;
295 spin_unlock(&schan->lock);
304 schan = &sdma->channels[0];
305 spin_lock(&schan->lock);
306 sdesc = list_first_entry(&schan->active,
314 list_splice_tail_init(&schan->active,
315 &schan->completed);
317 if (!list_empty(&schan->queued))
318 sirfsoc_dma_execute(schan);
322 schan->happened_cyclic++;
324 spin_unlock(&schan->lock);
341 struct sirfsoc_dma_chan *schan;
350 schan = &sdma->channels[i];
353 spin_lock_irqsave(&schan->lock, flags);
354 if (!list_empty(&schan->completed)) {
355 list_splice_tail_init(&schan->completed, &list);
356 spin_unlock_irqrestore(&schan->lock, flags);
368 spin_lock_irqsave(&schan->lock, flags);
369 list_splice_tail_init(&list, &schan->free);
370 schan->chan.completed_cookie = last_cookie;
371 spin_unlock_irqrestore(&schan->lock, flags);
373 if (list_empty(&schan->active)) {
374 spin_unlock_irqrestore(&schan->lock, flags);
379 sdesc = list_first_entry(&schan->active,
383 happened_cyclic = schan->happened_cyclic;
384 spin_unlock_irqrestore(&schan->lock, flags);
387 while (happened_cyclic != schan->completed_cyclic) {
389 schan->completed_cyclic++;
406 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(txd->chan);
413 spin_lock_irqsave(&schan->lock, flags);
416 list_move_tail(&sdesc->node, &schan->queued);
420 spin_unlock_irqrestore(&schan->lock, flags);
428 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan);
435 spin_lock_irqsave(&schan->lock, flags);
436 schan->mode = (config->src_maxburst == 4 ? 1 : 0);
437 spin_unlock_irqrestore(&schan->lock, flags);
444 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan);
445 struct sirfsoc_dma *sdma = dma_chan_to_sirfsoc_dma(&schan->chan);
446 int cid = schan->chan.chan_id;
449 spin_lock_irqsave(&schan->lock, flags);
480 list_splice_tail_init(&schan->active, &schan->free);
481 list_splice_tail_init(&schan->queued, &schan->free);
483 spin_unlock_irqrestore(&schan->lock, flags);
490 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan);
491 struct sirfsoc_dma *sdma = dma_chan_to_sirfsoc_dma(&schan->chan);
492 int cid = schan->chan.chan_id;
495 spin_lock_irqsave(&schan->lock, flags);
517 spin_unlock_irqrestore(&schan->lock, flags);
524 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan);
525 struct sirfsoc_dma *sdma = dma_chan_to_sirfsoc_dma(&schan->chan);
526 int cid = schan->chan.chan_id;
529 spin_lock_irqsave(&schan->lock, flags);
550 spin_unlock_irqrestore(&schan->lock, flags);
559 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan);
587 spin_lock_irqsave(&schan->lock, flags);
589 list_splice_tail_init(&descs, &schan->free);
590 spin_unlock_irqrestore(&schan->lock, flags);
598 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan);
604 spin_lock_irqsave(&schan->lock, flags);
607 BUG_ON(!list_empty(&schan->prepared));
608 BUG_ON(!list_empty(&schan->queued));
609 BUG_ON(!list_empty(&schan->active));
610 BUG_ON(!list_empty(&schan->completed));
613 list_splice_tail_init(&schan->free, &descs);
615 spin_unlock_irqrestore(&schan->lock, flags);
627 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan);
630 spin_lock_irqsave(&schan->lock, flags);
632 if (list_empty(&schan->active) && !list_empty(&schan->queued))
633 sirfsoc_dma_execute(schan);
635 spin_unlock_irqrestore(&schan->lock, flags);
644 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan);
648 int cid = schan->chan.chan_id;
653 spin_lock_irqsave(&schan->lock, flags);
655 if (list_empty(&schan->active)) {
658 spin_unlock_irqrestore(&schan->lock, flags);
661 sdesc = list_first_entry(&schan->active, struct sirfsoc_dma_desc, node);
683 spin_unlock_irqrestore(&schan->lock, flags);
693 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan);
704 spin_lock_irqsave(&schan->lock, iflags);
705 if (!list_empty(&schan->free)) {
706 sdesc = list_first_entry(&schan->free, struct sirfsoc_dma_desc,
710 spin_unlock_irqrestore(&schan->lock, iflags);
720 spin_lock_irqsave(&schan->lock, iflags);
740 list_add_tail(&sdesc->node, &schan->prepared);
746 spin_unlock_irqrestore(&schan->lock, iflags);
750 spin_unlock_irqrestore(&schan->lock, iflags);
761 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan);
780 spin_lock_irqsave(&schan->lock, iflags);
781 if (!list_empty(&schan->free)) {
782 sdesc = list_first_entry(&schan->free, struct sirfsoc_dma_desc,
786 spin_unlock_irqrestore(&schan->lock, iflags);
792 spin_lock_irqsave(&schan->lock, iflags);
798 list_add_tail(&sdesc->node, &schan->prepared);
799 spin_unlock_irqrestore(&schan->lock, iflags);
845 struct sirfsoc_dma_chan *schan;
926 schan = &sdma->channels[i];
928 schan->chan.device = dma;
929 dma_cookie_init(&schan->chan);
931 INIT_LIST_HEAD(&schan->free);
932 INIT_LIST_HEAD(&schan->prepared);
933 INIT_LIST_HEAD(&schan->queued);
934 INIT_LIST_HEAD(&schan->active);
935 INIT_LIST_HEAD(&schan->completed);
937 spin_lock_init(&schan->lock);
938 list_add_tail(&schan->chan.device_node, &dma->channels);
1013 struct sirfsoc_dma_chan *schan;
1042 schan = &sdma->channels[ch];
1043 if (list_empty(&schan->active))
1061 struct sirfsoc_dma_chan *schan;
1085 schan = &sdma->channels[ch];
1086 if (list_empty(&schan->active))
1088 sdesc = list_first_entry(&schan->active,