Lines Matching refs:chan
193 * @chan: DMA channel
199 struct xilinx_dpdma_chan *chan;
247 container_of(_chan, struct xilinx_dpdma_chan, vchan.chan)
256 * @chan: DPDMA channels
266 struct xilinx_dpdma_chan *chan[XILINX_DPDMA_NUM_CHAN];
300 static void xilinx_dpdma_debugfs_desc_done_irq(struct xilinx_dpdma_chan *chan)
302 if (chan->id == dpdma_debugfs.chan_id)
470 static void xilinx_dpdma_debugfs_desc_done_irq(struct xilinx_dpdma_chan *chan)
558 * @chan: DPDMA channel
565 xilinx_dpdma_chan_alloc_sw_desc(struct xilinx_dpdma_chan *chan)
570 sw_desc = dma_pool_zalloc(chan->desc_pool, GFP_ATOMIC, &dma_addr);
581 * @chan: DPDMA channel
587 xilinx_dpdma_chan_free_sw_desc(struct xilinx_dpdma_chan *chan,
590 dma_pool_free(chan->desc_pool, sw_desc, sw_desc->dma_addr);
595 * @chan: DPDMA channel
600 static void xilinx_dpdma_chan_dump_tx_desc(struct xilinx_dpdma_chan *chan,
604 struct device *dev = chan->xdev->dev;
608 dev_dbg(dev, "------- channel ID = %d -------\n", chan->id);
638 * @chan: DPDMA channel
645 xilinx_dpdma_chan_alloc_tx_desc(struct xilinx_dpdma_chan *chan)
654 tx_desc->chan = chan;
678 xilinx_dpdma_chan_free_sw_desc(desc->chan, sw_desc);
687 * @chan: DPDMA channel
696 xilinx_dpdma_chan_prep_interleaved_dma(struct xilinx_dpdma_chan *chan,
706 dev_err(chan->xdev->dev, "buffer should be aligned at %d B\n",
711 tx_desc = xilinx_dpdma_chan_alloc_tx_desc(chan);
715 sw_desc = xilinx_dpdma_chan_alloc_sw_desc(chan);
721 xilinx_dpdma_sw_desc_set_dma_addrs(chan->xdev, sw_desc, sw_desc,
747 * @chan: DPDMA channel
751 static void xilinx_dpdma_chan_enable(struct xilinx_dpdma_chan *chan)
755 reg = (XILINX_DPDMA_INTR_CHAN_MASK << chan->id)
757 dpdma_write(chan->xdev->reg, XILINX_DPDMA_IEN, reg);
758 reg = (XILINX_DPDMA_EINTR_CHAN_ERR_MASK << chan->id)
760 dpdma_write(chan->xdev->reg, XILINX_DPDMA_EIEN, reg);
769 dpdma_set(chan->reg, XILINX_DPDMA_CH_CNTL, reg);
774 * @chan: DPDMA channel
778 static void xilinx_dpdma_chan_disable(struct xilinx_dpdma_chan *chan)
782 reg = XILINX_DPDMA_INTR_CHAN_MASK << chan->id;
783 dpdma_write(chan->xdev->reg, XILINX_DPDMA_IEN, reg);
784 reg = XILINX_DPDMA_EINTR_CHAN_ERR_MASK << chan->id;
785 dpdma_write(chan->xdev->reg, XILINX_DPDMA_EIEN, reg);
787 dpdma_clr(chan->reg, XILINX_DPDMA_CH_CNTL, XILINX_DPDMA_CH_CNTL_ENABLE);
792 * @chan: DPDMA channel
796 static void xilinx_dpdma_chan_pause(struct xilinx_dpdma_chan *chan)
798 dpdma_set(chan->reg, XILINX_DPDMA_CH_CNTL, XILINX_DPDMA_CH_CNTL_PAUSE);
803 * @chan: DPDMA channel
807 static void xilinx_dpdma_chan_unpause(struct xilinx_dpdma_chan *chan)
809 dpdma_clr(chan->reg, XILINX_DPDMA_CH_CNTL, XILINX_DPDMA_CH_CNTL_PAUSE);
812 static u32 xilinx_dpdma_chan_video_group_ready(struct xilinx_dpdma_chan *chan)
814 struct xilinx_dpdma_device *xdev = chan->xdev;
819 if (xdev->chan[i]->video_group && !xdev->chan[i]->running)
822 if (xdev->chan[i]->video_group)
831 * @chan: DPDMA channel
836 static void xilinx_dpdma_chan_queue_transfer(struct xilinx_dpdma_chan *chan)
838 struct xilinx_dpdma_device *xdev = chan->xdev;
845 lockdep_assert_held(&chan->lock);
847 if (chan->desc.pending)
850 if (!chan->running) {
851 xilinx_dpdma_chan_unpause(chan);
852 xilinx_dpdma_chan_enable(chan);
853 chan->first_frame = true;
854 chan->running = true;
857 vdesc = vchan_next_desc(&chan->vchan);
862 chan->desc.pending = desc;
875 dpdma_write(chan->reg, XILINX_DPDMA_CH_DESC_START_ADDR,
878 dpdma_write(chan->reg, XILINX_DPDMA_CH_DESC_START_ADDRE,
882 first_frame = chan->first_frame;
883 chan->first_frame = false;
885 if (chan->video_group) {
886 channels = xilinx_dpdma_chan_video_group_ready(chan);
894 channels = BIT(chan->id);
907 * @chan: DPDMA channel
913 static u32 xilinx_dpdma_chan_ostand(struct xilinx_dpdma_chan *chan)
916 dpdma_read(chan->reg, XILINX_DPDMA_CH_STATUS));
921 * @chan: DPDMA channel
933 static int xilinx_dpdma_chan_notify_no_ostand(struct xilinx_dpdma_chan *chan)
937 cnt = xilinx_dpdma_chan_ostand(chan);
939 dev_dbg(chan->xdev->dev, "%d outstanding transactions\n", cnt);
944 dpdma_write(chan->xdev->reg, XILINX_DPDMA_IDS,
945 XILINX_DPDMA_INTR_NO_OSTAND(chan->id));
946 wake_up(&chan->wait_to_stop);
953 * @chan: DPDMA channel
961 static int xilinx_dpdma_chan_wait_no_ostand(struct xilinx_dpdma_chan *chan)
966 ret = wait_event_interruptible_timeout(chan->wait_to_stop,
967 !xilinx_dpdma_chan_ostand(chan),
970 dpdma_write(chan->xdev->reg, XILINX_DPDMA_IEN,
971 XILINX_DPDMA_INTR_NO_OSTAND(chan->id));
975 dev_err(chan->xdev->dev, "not ready to stop: %d trans\n",
976 xilinx_dpdma_chan_ostand(chan));
986 * @chan: DPDMA channel
994 static int xilinx_dpdma_chan_poll_no_ostand(struct xilinx_dpdma_chan *chan)
1000 cnt = xilinx_dpdma_chan_ostand(chan);
1005 dpdma_write(chan->xdev->reg, XILINX_DPDMA_IEN,
1006 XILINX_DPDMA_INTR_NO_OSTAND(chan->id));
1010 dev_err(chan->xdev->dev, "not ready to stop: %d trans\n",
1011 xilinx_dpdma_chan_ostand(chan));
1018 * @chan: DPDMA channel
1025 static int xilinx_dpdma_chan_stop(struct xilinx_dpdma_chan *chan)
1030 ret = xilinx_dpdma_chan_wait_no_ostand(chan);
1034 spin_lock_irqsave(&chan->lock, flags);
1035 xilinx_dpdma_chan_disable(chan);
1036 chan->running = false;
1037 spin_unlock_irqrestore(&chan->lock, flags);
1044 * @chan: DPDMA channel
1046 * Handle completion of the currently active descriptor (@chan->desc.active). As
1051 static void xilinx_dpdma_chan_done_irq(struct xilinx_dpdma_chan *chan)
1056 spin_lock_irqsave(&chan->lock, flags);
1058 xilinx_dpdma_debugfs_desc_done_irq(chan);
1060 active = chan->desc.active;
1064 dev_warn(chan->xdev->dev,
1067 spin_unlock_irqrestore(&chan->lock, flags);
1072 * @chan: DPDMA channel
1078 static void xilinx_dpdma_chan_vsync_irq(struct xilinx_dpdma_chan *chan)
1085 spin_lock_irqsave(&chan->lock, flags);
1087 pending = chan->desc.pending;
1088 if (!chan->running || !pending)
1091 desc_id = dpdma_read(chan->reg, XILINX_DPDMA_CH_DESC_ID)
1104 if (chan->desc.active)
1105 vchan_cookie_complete(&chan->desc.active->vdesc);
1106 chan->desc.active = pending;
1107 chan->desc.pending = NULL;
1109 xilinx_dpdma_chan_queue_transfer(chan);
1112 spin_unlock_irqrestore(&chan->lock, flags);
1117 * @chan: DPDMA channel
1124 xilinx_dpdma_chan_err(struct xilinx_dpdma_chan *chan, u32 isr, u32 eisr)
1126 if (!chan)
1129 if (chan->running &&
1130 ((isr & (XILINX_DPDMA_INTR_CHAN_ERR_MASK << chan->id)) ||
1131 (eisr & (XILINX_DPDMA_EINTR_CHAN_ERR_MASK << chan->id))))
1139 * @chan: DPDMA channel
1146 static void xilinx_dpdma_chan_handle_err(struct xilinx_dpdma_chan *chan)
1148 struct xilinx_dpdma_device *xdev = chan->xdev;
1152 spin_lock_irqsave(&chan->lock, flags);
1155 dpdma_read(chan->reg, XILINX_DPDMA_CH_DESC_START_ADDRE),
1156 dpdma_read(chan->reg, XILINX_DPDMA_CH_DESC_START_ADDR));
1158 dpdma_read(chan->reg, XILINX_DPDMA_CH_PYLD_CUR_ADDRE),
1159 dpdma_read(chan->reg, XILINX_DPDMA_CH_PYLD_CUR_ADDR));
1161 xilinx_dpdma_chan_disable(chan);
1162 chan->running = false;
1164 if (!chan->desc.active)
1167 active = chan->desc.active;
1168 chan->desc.active = NULL;
1170 xilinx_dpdma_chan_dump_tx_desc(chan, active);
1176 if (!chan->desc.pending &&
1177 list_empty(&chan->vchan.desc_issued)) {
1180 &chan->vchan.desc_issued);
1186 spin_unlock_irqrestore(&chan->lock, flags);
1198 struct xilinx_dpdma_chan *chan = to_xilinx_chan(dchan);
1210 desc = xilinx_dpdma_chan_prep_interleaved_dma(chan, xt);
1214 vchan_tx_prep(&chan->vchan, &desc->vdesc, flags | DMA_CTRL_ACK);
1229 struct xilinx_dpdma_chan *chan = to_xilinx_chan(dchan);
1232 chan->desc_pool = dma_pool_create(dev_name(chan->xdev->dev),
1233 chan->xdev->dev,
1236 if (!chan->desc_pool) {
1237 dev_err(chan->xdev->dev,
1254 struct xilinx_dpdma_chan *chan = to_xilinx_chan(dchan);
1256 vchan_free_chan_resources(&chan->vchan);
1258 dma_pool_destroy(chan->desc_pool);
1259 chan->desc_pool = NULL;
1264 struct xilinx_dpdma_chan *chan = to_xilinx_chan(dchan);
1267 spin_lock_irqsave(&chan->vchan.lock, flags);
1268 if (vchan_issue_pending(&chan->vchan))
1269 xilinx_dpdma_chan_queue_transfer(chan);
1270 spin_unlock_irqrestore(&chan->vchan.lock, flags);
1276 struct xilinx_dpdma_chan *chan = to_xilinx_chan(dchan);
1286 spin_lock_irqsave(&chan->lock, flags);
1292 if (chan->id <= ZYNQMP_DPDMA_VIDEO2)
1293 chan->video_group = config->slave_id != 0;
1295 spin_unlock_irqrestore(&chan->lock, flags);
1331 struct xilinx_dpdma_chan *chan = to_xilinx_chan(dchan);
1332 struct xilinx_dpdma_device *xdev = chan->xdev;
1338 if (chan->video_group) {
1340 if (xdev->chan[i]->video_group &&
1341 xdev->chan[i]->running) {
1342 xilinx_dpdma_chan_pause(xdev->chan[i]);
1343 xdev->chan[i]->video_group = false;
1347 xilinx_dpdma_chan_pause(chan);
1351 spin_lock_irqsave(&chan->vchan.lock, flags);
1352 vchan_get_all_descriptors(&chan->vchan, &descriptors);
1353 spin_unlock_irqrestore(&chan->vchan.lock, flags);
1355 vchan_dma_desc_free_list(&chan->vchan, &descriptors);
1375 struct xilinx_dpdma_chan *chan = to_xilinx_chan(dchan);
1378 xilinx_dpdma_chan_stop(chan);
1380 spin_lock_irqsave(&chan->vchan.lock, flags);
1381 if (chan->desc.pending) {
1382 vchan_terminate_vdesc(&chan->desc.pending->vdesc);
1383 chan->desc.pending = NULL;
1385 if (chan->desc.active) {
1386 vchan_terminate_vdesc(&chan->desc.active->vdesc);
1387 chan->desc.active = NULL;
1389 spin_unlock_irqrestore(&chan->vchan.lock, flags);
1391 vchan_synchronize(&chan->vchan);
1440 for (i = 0; i < ARRAY_SIZE(xdev->chan); i++)
1441 if (err || xilinx_dpdma_chan_err(xdev->chan[i], isr, eisr))
1442 tasklet_schedule(&xdev->chan[i]->err_task);
1479 struct xilinx_dpdma_chan *chan = from_tasklet(chan, t, err_task);
1480 struct xilinx_dpdma_device *xdev = chan->xdev;
1484 xilinx_dpdma_chan_poll_no_ostand(chan);
1486 xilinx_dpdma_chan_handle_err(chan);
1489 XILINX_DPDMA_INTR_CHAN_ERR_MASK << chan->id);
1491 XILINX_DPDMA_EINTR_CHAN_ERR_MASK << chan->id);
1493 spin_lock_irqsave(&chan->lock, flags);
1494 xilinx_dpdma_chan_queue_transfer(chan);
1495 spin_unlock_irqrestore(&chan->lock, flags);
1519 for (i = 0; i < ARRAY_SIZE(xdev->chan); i++) {
1520 struct xilinx_dpdma_chan *chan = xdev->chan[i];
1522 if (chan)
1523 xilinx_dpdma_chan_vsync_irq(chan);
1529 for_each_set_bit(i, &mask, ARRAY_SIZE(xdev->chan))
1530 xilinx_dpdma_chan_done_irq(xdev->chan[i]);
1535 for_each_set_bit(i, &mask, ARRAY_SIZE(xdev->chan))
1536 xilinx_dpdma_chan_notify_no_ostand(xdev->chan[i]);
1553 struct xilinx_dpdma_chan *chan;
1555 chan = devm_kzalloc(xdev->dev, sizeof(*chan), GFP_KERNEL);
1556 if (!chan)
1559 chan->id = chan_id;
1560 chan->reg = xdev->reg + XILINX_DPDMA_CH_BASE
1561 + XILINX_DPDMA_CH_OFFSET * chan->id;
1562 chan->running = false;
1563 chan->xdev = xdev;
1565 spin_lock_init(&chan->lock);
1566 init_waitqueue_head(&chan->wait_to_stop);
1568 tasklet_setup(&chan->err_task, xilinx_dpdma_chan_err_task);
1570 chan->vchan.desc_free = xilinx_dpdma_chan_free_tx_desc;
1571 vchan_init(&chan->vchan, &xdev->common);
1573 xdev->chan[chan->id] = chan;
1578 static void xilinx_dpdma_chan_remove(struct xilinx_dpdma_chan *chan)
1580 if (!chan)
1583 tasklet_kill(&chan->err_task);
1584 list_del(&chan->vchan.chan.device_node);
1593 if (chan_id >= ARRAY_SIZE(xdev->chan))
1596 if (!xdev->chan[chan_id])
1599 return dma_get_slave_channel(&xdev->chan[chan_id]->vchan.chan);
1611 for (i = 0; i < ARRAY_SIZE(xdev->chan); i++) {
1688 for (i = 0; i < ARRAY_SIZE(xdev->chan); ++i) {
1729 for (i = 0; i < ARRAY_SIZE(xdev->chan); i++)
1730 xilinx_dpdma_chan_remove(xdev->chan[i]);
1750 for (i = 0; i < ARRAY_SIZE(xdev->chan); i++)
1751 xilinx_dpdma_chan_remove(xdev->chan[i]);