Lines Matching defs:hvc
495 struct mtk_hsdma_vchan *hvc)
500 lockdep_assert_held(&hvc->vc.lock);
502 list_for_each_entry_safe(vd, vd2, &hvc->vc.desc_issued, node) {
528 list_move_tail(&vd->node, &hvc->desc_hw_processing);
534 struct mtk_hsdma_vchan *hvc;
583 hvc = to_hsdma_vchan(cb->vd->tx.chan);
585 spin_lock(&hvc->vc.lock);
593 if (hvc->issue_synchronize &&
594 list_empty(&hvc->desc_hw_processing)) {
595 complete(&hvc->issue_completion);
596 hvc->issue_synchronize = false;
598 spin_unlock(&hvc->vc.lock);
633 hvc = &hsdma->vc[i];
634 spin_lock(&hvc->vc.lock);
635 mtk_hsdma_issue_vchan_pending(hsdma, hvc);
636 spin_unlock(&hvc->vc.lock);
662 struct mtk_hsdma_vchan *hvc = to_hsdma_vchan(c);
665 list_for_each_entry(vd, &hvc->desc_hw_processing, node)
669 list_for_each_entry(vd, &hvc->vc.desc_issued, node)
680 struct mtk_hsdma_vchan *hvc = to_hsdma_vchan(c);
691 spin_lock_irqsave(&hvc->vc.lock, flags);
693 spin_unlock_irqrestore(&hvc->vc.lock, flags);
708 struct mtk_hsdma_vchan *hvc = to_hsdma_vchan(c);
711 spin_lock_irqsave(&hvc->vc.lock, flags);
713 if (vchan_issue_pending(&hvc->vc))
714 mtk_hsdma_issue_vchan_pending(hsdma, hvc);
716 spin_unlock_irqrestore(&hvc->vc.lock, flags);
757 struct mtk_hsdma_vchan *hvc = to_hsdma_vchan(c);
765 spin_lock(&hvc->vc.lock);
766 if (!list_empty(&hvc->desc_hw_processing)) {
767 hvc->issue_synchronize = true;
770 spin_unlock(&hvc->vc.lock);
773 wait_for_completion(&hvc->issue_completion);
778 WARN_ONCE(!list_empty(&hvc->desc_hw_processing),
782 vchan_synchronize(&hvc->vc);
784 WARN_ONCE(!list_empty(&hvc->vc.desc_completed),