Lines Matching refs:dma_desc

239 	struct tegra_dma_desc *dma_desc;
473 struct tegra_dma_desc *dma_desc = tdc->dma_desc;
478 dma_desc->sg_idx++;
481 if (dma_desc->sg_idx == dma_desc->sg_count)
482 dma_desc->sg_idx = 0;
493 ch_regs = &dma_desc->sg_req[dma_desc->sg_idx].ch_regs;
507 struct tegra_dma_desc *dma_desc = tdc->dma_desc;
511 if (!dma_desc) {
516 dma_desc = vd_to_tegra_dma_desc(vdesc);
518 dma_desc->tdc = tdc;
519 tdc->dma_desc = dma_desc;
524 ch_regs = &dma_desc->sg_req[dma_desc->sg_idx].ch_regs;
543 vchan_cookie_complete(&tdc->dma_desc->vd);
546 tdc->dma_desc = NULL;
593 struct tegra_dma_desc *dma_desc = tdc->dma_desc;
613 if (!dma_desc)
616 sg_req = dma_desc->sg_req;
617 dma_desc->bytes_xfer += sg_req[dma_desc->sg_idx].len;
619 if (dma_desc->cyclic) {
620 vchan_cyclic_callback(&dma_desc->vd);
623 dma_desc->sg_idx++;
624 if (dma_desc->sg_idx == dma_desc->sg_count)
640 if (tdc->dma_desc)
654 if (tdc->dma_desc && tdc->dma_desc->cyclic)
707 if (tdc->dma_desc) {
714 vchan_terminate_vdesc(&tdc->dma_desc->vd);
716 tdc->dma_desc = NULL;
730 struct tegra_dma_desc *dma_desc = tdc->dma_desc;
731 struct tegra_dma_sg_req *sg_req = dma_desc->sg_req;
746 bytes_xfer = dma_desc->bytes_xfer +
747 sg_req[dma_desc->sg_idx].len - (wcount * 4);
749 residual = dma_desc->bytes_req - (bytes_xfer % dma_desc->bytes_req);
759 struct tegra_dma_desc *dma_desc;
772 dma_desc = vd_to_tegra_dma_desc(vd);
773 residual = dma_desc->bytes_req;
775 } else if (tdc->dma_desc && tdc->dma_desc->vd.tx.cookie == cookie) {
862 struct tegra_dma_desc *dma_desc;
898 dma_desc = kzalloc(struct_size(dma_desc, sg_req, 1), GFP_NOWAIT);
899 if (!dma_desc)
902 dma_desc->bytes_req = len;
903 dma_desc->sg_count = 1;
904 sg_req = dma_desc->sg_req;
918 dma_desc->cyclic = false;
919 return vchan_tx_prep(&tdc->vc, &dma_desc->vd, flags);
928 struct tegra_dma_desc *dma_desc;
967 dma_desc = kzalloc(struct_size(dma_desc, sg_req, 1), GFP_NOWAIT);
968 if (!dma_desc)
971 dma_desc->bytes_req = len;
972 dma_desc->sg_count = 1;
973 sg_req = dma_desc->sg_req;
988 dma_desc->cyclic = false;
989 return vchan_tx_prep(&tdc->vc, &dma_desc->vd, flags);
1002 struct tegra_dma_desc *dma_desc;
1060 dma_desc = kzalloc(struct_size(dma_desc, sg_req, sg_len), GFP_NOWAIT);
1061 if (!dma_desc)
1064 dma_desc->sg_count = sg_len;
1065 sg_req = dma_desc->sg_req;
1078 kfree(dma_desc);
1083 dma_desc->bytes_req += len;
1108 dma_desc->cyclic = false;
1109 return vchan_tx_prep(&tdc->vc, &dma_desc->vd, flags);
1121 struct tegra_dma_desc *dma_desc;
1195 dma_desc = kzalloc(struct_size(dma_desc, sg_req, period_count),
1197 if (!dma_desc)
1200 dma_desc->bytes_req = buf_len;
1201 dma_desc->sg_count = period_count;
1202 sg_req = dma_desc->sg_req;
1231 dma_desc->cyclic = true;
1233 return vchan_tx_prep(&tdc->vc, &dma_desc->vd, flags);
1497 if (tdc->dma_desc) {