Lines Matching defs:s3cchan
258 static bool s3c24xx_dma_phy_valid(struct s3c24xx_dma_chan *s3cchan,
261 struct s3c24xx_dma_engine *s3cdma = s3cchan->host;
263 struct s3c24xx_dma_channel *cdata = &pdata->channels[s3cchan->id];
267 if (!s3cchan->slave)
286 struct s3c24xx_dma_phy *s3c24xx_dma_get_phy(struct s3c24xx_dma_chan *s3cchan)
288 struct s3c24xx_dma_engine *s3cdma = s3cchan->host;
300 if (!s3c24xx_dma_phy_valid(s3cchan, phy))
306 phy->serving = s3cchan;
370 static u32 s3c24xx_dma_getbytes_chan(struct s3c24xx_dma_chan *s3cchan)
372 struct s3c24xx_dma_phy *phy = s3cchan->phy;
373 struct s3c24xx_txd *txd = s3cchan->at;
382 struct s3c24xx_dma_chan *s3cchan = to_s3c24xx_dma_chan(chan);
391 spin_lock_irqsave(&s3cchan->vc.lock, flags);
393 if (!s3cchan->slave) {
398 s3cchan->cfg = *config;
401 spin_unlock_irqrestore(&s3cchan->vc.lock, flags);
439 static void s3c24xx_dma_start_next_sg(struct s3c24xx_dma_chan *s3cchan,
442 struct s3c24xx_dma_engine *s3cdma = s3cchan->host;
443 struct s3c24xx_dma_phy *phy = s3cchan->phy;
462 if (s3cchan->slave) {
464 &pdata->channels[s3cchan->id];
494 if (!s3cchan->slave)
503 static void s3c24xx_dma_start_next_txd(struct s3c24xx_dma_chan *s3cchan)
505 struct s3c24xx_dma_phy *phy = s3cchan->phy;
506 struct virt_dma_desc *vd = vchan_next_desc(&s3cchan->vc);
511 s3cchan->at = txd;
519 s3c24xx_dma_start_next_sg(s3cchan, txd);
527 static void s3c24xx_dma_phy_alloc_and_start(struct s3c24xx_dma_chan *s3cchan)
529 struct s3c24xx_dma_engine *s3cdma = s3cchan->host;
532 phy = s3c24xx_dma_get_phy(s3cchan);
535 s3cchan->name);
536 s3cchan->state = S3C24XX_DMA_CHAN_WAITING;
541 phy->id, s3cchan->name);
543 s3cchan->phy = phy;
544 s3cchan->state = S3C24XX_DMA_CHAN_RUNNING;
546 s3c24xx_dma_start_next_txd(s3cchan);
550 struct s3c24xx_dma_chan *s3cchan)
552 struct s3c24xx_dma_engine *s3cdma = s3cchan->host;
555 phy->id, s3cchan->name);
562 phy->serving = s3cchan;
563 s3cchan->phy = phy;
564 s3cchan->state = S3C24XX_DMA_CHAN_RUNNING;
565 s3c24xx_dma_start_next_txd(s3cchan);
572 static void s3c24xx_dma_phy_free(struct s3c24xx_dma_chan *s3cchan)
574 struct s3c24xx_dma_engine *s3cdma = s3cchan->host;
591 s3c24xx_dma_phy_valid(p, s3cchan->phy)) {
598 s3c24xx_dma_terminate_phy(s3cchan->phy);
611 s3c24xx_dma_phy_reassign_start(s3cchan->phy, next);
619 s3c24xx_dma_put_phy(s3cchan->phy);
622 s3cchan->phy = NULL;
623 s3cchan->state = S3C24XX_DMA_CHAN_IDLE;
629 struct s3c24xx_dma_chan *s3cchan = to_s3c24xx_dma_chan(vd->tx.chan);
631 if (!s3cchan->slave)
640 struct s3c24xx_dma_chan *s3cchan = phy->serving;
652 if (unlikely(!s3cchan)) {
661 spin_lock(&s3cchan->vc.lock);
662 txd = s3cchan->at;
669 s3c24xx_dma_start_next_sg(s3cchan, txd);
671 s3cchan->at = NULL;
678 if (vchan_next_desc(&s3cchan->vc))
679 s3c24xx_dma_start_next_txd(s3cchan);
681 s3c24xx_dma_phy_free(s3cchan);
687 s3c24xx_dma_start_next_sg(s3cchan, txd);
690 spin_unlock(&s3cchan->vc.lock);
701 struct s3c24xx_dma_chan *s3cchan = to_s3c24xx_dma_chan(chan);
702 struct s3c24xx_dma_engine *s3cdma = s3cchan->host;
707 spin_lock_irqsave(&s3cchan->vc.lock, flags);
709 if (!s3cchan->phy && !s3cchan->at) {
711 s3cchan->id);
716 s3cchan->state = S3C24XX_DMA_CHAN_IDLE;
719 if (s3cchan->phy)
720 s3c24xx_dma_phy_free(s3cchan);
723 if (s3cchan->at) {
724 vchan_terminate_vdesc(&s3cchan->at->vd);
725 s3cchan->at = NULL;
730 vchan_get_all_descriptors(&s3cchan->vc, &head);
732 spin_unlock_irqrestore(&s3cchan->vc.lock, flags);
734 vchan_dma_desc_free_list(&s3cchan->vc, &head);
739 spin_unlock_irqrestore(&s3cchan->vc.lock, flags);
746 struct s3c24xx_dma_chan *s3cchan = to_s3c24xx_dma_chan(chan);
748 vchan_synchronize(&s3cchan->vc);
760 struct s3c24xx_dma_chan *s3cchan = to_s3c24xx_dma_chan(chan);
768 spin_lock_irqsave(&s3cchan->vc.lock, flags);
776 spin_unlock_irqrestore(&s3cchan->vc.lock, flags);
780 vd = vchan_find_desc(&s3cchan->vc, cookie);
792 txd = s3cchan->at;
798 bytes += s3c24xx_dma_getbytes_chan(s3cchan);
800 spin_unlock_irqrestore(&s3cchan->vc.lock, flags);
819 struct s3c24xx_dma_chan *s3cchan = to_s3c24xx_dma_chan(chan);
820 struct s3c24xx_dma_engine *s3cdma = s3cchan->host;
826 len, s3cchan->name);
874 return vchan_tx_prep(&s3cchan->vc, &txd->vd, flags);
881 struct s3c24xx_dma_chan *s3cchan = to_s3c24xx_dma_chan(chan);
882 struct s3c24xx_dma_engine *s3cdma = s3cchan->host;
884 struct s3c24xx_dma_channel *cdata = &pdata->channels[s3cchan->id];
894 size, period, s3cchan->name);
938 slave_addr = s3cchan->cfg.dst_addr;
939 txd->width = s3cchan->cfg.dst_addr_width;
944 slave_addr = s3cchan->cfg.src_addr;
945 txd->width = s3cchan->cfg.src_addr_width;
971 return vchan_tx_prep(&s3cchan->vc, &txd->vd, flags);
979 struct s3c24xx_dma_chan *s3cchan = to_s3c24xx_dma_chan(chan);
980 struct s3c24xx_dma_engine *s3cdma = s3cchan->host;
982 struct s3c24xx_dma_channel *cdata = &pdata->channels[s3cchan->id];
991 sg_dma_len(sgl), s3cchan->name);
1027 slave_addr = s3cchan->cfg.dst_addr;
1028 txd->width = s3cchan->cfg.dst_addr_width;
1033 slave_addr = s3cchan->cfg.src_addr;
1034 txd->width = s3cchan->cfg.src_addr_width;
1060 return vchan_tx_prep(&s3cchan->vc, &txd->vd, flags);
1069 struct s3c24xx_dma_chan *s3cchan = to_s3c24xx_dma_chan(chan);
1072 spin_lock_irqsave(&s3cchan->vc.lock, flags);
1073 if (vchan_issue_pending(&s3cchan->vc)) {
1074 if (!s3cchan->phy && s3cchan->state != S3C24XX_DMA_CHAN_WAITING)
1075 s3c24xx_dma_phy_alloc_and_start(s3cchan);
1077 spin_unlock_irqrestore(&s3cchan->vc.lock, flags);
1415 struct s3c24xx_dma_chan *s3cchan;
1420 s3cchan = to_s3c24xx_dma_chan(chan);
1422 return s3cchan->id == (uintptr_t)param;