Lines Matching defs:hvc

496 					  struct mtk_hsdma_vchan *hvc)
501 lockdep_assert_held(&hvc->vc.lock);
503 list_for_each_entry_safe(vd, vd2, &hvc->vc.desc_issued, node) {
529 list_move_tail(&vd->node, &hvc->desc_hw_processing);
535 struct mtk_hsdma_vchan *hvc;
584 hvc = to_hsdma_vchan(cb->vd->tx.chan);
586 spin_lock(&hvc->vc.lock);
594 if (hvc->issue_synchronize &&
595 list_empty(&hvc->desc_hw_processing)) {
596 complete(&hvc->issue_completion);
597 hvc->issue_synchronize = false;
599 spin_unlock(&hvc->vc.lock);
634 hvc = &hsdma->vc[i];
635 spin_lock(&hvc->vc.lock);
636 mtk_hsdma_issue_vchan_pending(hsdma, hvc);
637 spin_unlock(&hvc->vc.lock);
663 struct mtk_hsdma_vchan *hvc = to_hsdma_vchan(c);
666 list_for_each_entry(vd, &hvc->desc_hw_processing, node)
670 list_for_each_entry(vd, &hvc->vc.desc_issued, node)
681 struct mtk_hsdma_vchan *hvc = to_hsdma_vchan(c);
692 spin_lock_irqsave(&hvc->vc.lock, flags);
694 spin_unlock_irqrestore(&hvc->vc.lock, flags);
709 struct mtk_hsdma_vchan *hvc = to_hsdma_vchan(c);
712 spin_lock_irqsave(&hvc->vc.lock, flags);
714 if (vchan_issue_pending(&hvc->vc))
715 mtk_hsdma_issue_vchan_pending(hsdma, hvc);
717 spin_unlock_irqrestore(&hvc->vc.lock, flags);
758 struct mtk_hsdma_vchan *hvc = to_hsdma_vchan(c);
766 spin_lock(&hvc->vc.lock);
767 if (!list_empty(&hvc->desc_hw_processing)) {
768 hvc->issue_synchronize = true;
771 spin_unlock(&hvc->vc.lock);
774 wait_for_completion(&hvc->issue_completion);
779 WARN_ONCE(!list_empty(&hvc->desc_hw_processing),
783 vchan_synchronize(&hvc->vc);
785 WARN_ONCE(!list_empty(&hvc->vc.desc_completed),