Lines Matching refs:chan

451 	void (*start_transfer)(struct xilinx_dma_chan *chan);
452 int (*stop_transfer)(struct xilinx_dma_chan *chan);
488 * @chan: Driver specific DMA channel
507 struct xilinx_dma_chan *chan[XILINX_MCDMA_MAX_CHANS_PER_DEVICE];
524 #define to_xilinx_chan(chan) \
525 container_of(chan, struct xilinx_dma_chan, common)
528 #define xilinx_dma_poll_timeout(chan, reg, val, cond, delay_us, timeout_us) \
529 readl_poll_timeout_atomic(chan->xdev->regs + chan->ctrl_offset + reg, \
533 static inline u32 dma_read(struct xilinx_dma_chan *chan, u32 reg)
535 return ioread32(chan->xdev->regs + reg);
538 static inline void dma_write(struct xilinx_dma_chan *chan, u32 reg, u32 value)
540 iowrite32(value, chan->xdev->regs + reg);
543 static inline void vdma_desc_write(struct xilinx_dma_chan *chan, u32 reg,
546 dma_write(chan, chan->desc_offset + reg, value);
549 static inline u32 dma_ctrl_read(struct xilinx_dma_chan *chan, u32 reg)
551 return dma_read(chan, chan->ctrl_offset + reg);
554 static inline void dma_ctrl_write(struct xilinx_dma_chan *chan, u32 reg,
557 dma_write(chan, chan->ctrl_offset + reg, value);
560 static inline void dma_ctrl_clr(struct xilinx_dma_chan *chan, u32 reg,
563 dma_ctrl_write(chan, reg, dma_ctrl_read(chan, reg) & ~clr);
566 static inline void dma_ctrl_set(struct xilinx_dma_chan *chan, u32 reg,
569 dma_ctrl_write(chan, reg, dma_ctrl_read(chan, reg) | set);
574 * @chan: Driver specific VDMA channel
583 static inline void vdma_desc_write_64(struct xilinx_dma_chan *chan, u32 reg,
587 writel(value_lsb, chan->xdev->regs + chan->desc_offset + reg);
590 writel(value_msb, chan->xdev->regs + chan->desc_offset + reg + 4);
593 static inline void dma_writeq(struct xilinx_dma_chan *chan, u32 reg, u64 value)
595 lo_hi_writeq(value, chan->xdev->regs + chan->ctrl_offset + reg);
598 static inline void xilinx_write(struct xilinx_dma_chan *chan, u32 reg,
601 if (chan->ext_addr)
602 dma_writeq(chan, reg, addr);
604 dma_ctrl_write(chan, reg, addr);
607 static inline void xilinx_axidma_buf(struct xilinx_dma_chan *chan,
612 if (chan->ext_addr) {
621 static inline void xilinx_aximcdma_buf(struct xilinx_dma_chan *chan,
625 if (chan->ext_addr) {
662 * @chan: Driver specific DMA channel
667 xilinx_vdma_alloc_tx_segment(struct xilinx_dma_chan *chan)
672 segment = dma_pool_zalloc(chan->desc_pool, GFP_ATOMIC, &phys);
683 * @chan: Driver specific DMA channel
688 xilinx_cdma_alloc_tx_segment(struct xilinx_dma_chan *chan)
693 segment = dma_pool_zalloc(chan->desc_pool, GFP_ATOMIC, &phys);
704 * @chan: Driver specific DMA channel
709 xilinx_axidma_alloc_tx_segment(struct xilinx_dma_chan *chan)
714 spin_lock_irqsave(&chan->lock, flags);
715 if (!list_empty(&chan->free_seg_list)) {
716 segment = list_first_entry(&chan->free_seg_list,
721 spin_unlock_irqrestore(&chan->lock, flags);
724 dev_dbg(chan->dev, "Could not find free tx segment\n");
731 * @chan: Driver specific DMA channel
736 xilinx_aximcdma_alloc_tx_segment(struct xilinx_dma_chan *chan)
741 spin_lock_irqsave(&chan->lock, flags);
742 if (!list_empty(&chan->free_seg_list)) {
743 segment = list_first_entry(&chan->free_seg_list,
748 spin_unlock_irqrestore(&chan->lock, flags);
777 * @chan: Driver specific DMA channel
780 static void xilinx_dma_free_tx_segment(struct xilinx_dma_chan *chan,
785 list_add_tail(&segment->node, &chan->free_seg_list);
790 * @chan: Driver specific DMA channel
793 static void xilinx_mcdma_free_tx_segment(struct xilinx_dma_chan *chan,
799 list_add_tail(&segment->node, &chan->free_seg_list);
804 * @chan: Driver specific DMA channel
807 static void xilinx_cdma_free_tx_segment(struct xilinx_dma_chan *chan,
810 dma_pool_free(chan->desc_pool, segment, segment->phys);
815 * @chan: Driver specific DMA channel
818 static void xilinx_vdma_free_tx_segment(struct xilinx_dma_chan *chan,
821 dma_pool_free(chan->desc_pool, segment, segment->phys);
826 * @chan: Driver specific DMA channel
831 xilinx_dma_alloc_tx_descriptor(struct xilinx_dma_chan *chan)
846 * @chan: Driver specific DMA channel
850 xilinx_dma_free_tx_descriptor(struct xilinx_dma_chan *chan,
861 if (chan->xdev->dma_config->dmatype == XDMA_TYPE_VDMA) {
864 xilinx_vdma_free_tx_segment(chan, segment);
866 } else if (chan->xdev->dma_config->dmatype == XDMA_TYPE_CDMA) {
870 xilinx_cdma_free_tx_segment(chan, cdma_segment);
872 } else if (chan->xdev->dma_config->dmatype == XDMA_TYPE_AXIDMA) {
876 xilinx_dma_free_tx_segment(chan, axidma_segment);
882 xilinx_mcdma_free_tx_segment(chan, aximcdma_segment);
893 * @chan: Driver specific DMA channel
896 static void xilinx_dma_free_desc_list(struct xilinx_dma_chan *chan,
903 xilinx_dma_free_tx_descriptor(chan, desc);
909 * @chan: Driver specific DMA channel
911 static void xilinx_dma_free_descriptors(struct xilinx_dma_chan *chan)
915 spin_lock_irqsave(&chan->lock, flags);
917 xilinx_dma_free_desc_list(chan, &chan->pending_list);
918 xilinx_dma_free_desc_list(chan, &chan->done_list);
919 xilinx_dma_free_desc_list(chan, &chan->active_list);
921 spin_unlock_irqrestore(&chan->lock, flags);
930 struct xilinx_dma_chan *chan = to_xilinx_chan(dchan);
933 dev_dbg(chan->dev, "Free all channel resources.\n");
935 xilinx_dma_free_descriptors(chan);
937 if (chan->xdev->dma_config->dmatype == XDMA_TYPE_AXIDMA) {
938 spin_lock_irqsave(&chan->lock, flags);
939 INIT_LIST_HEAD(&chan->free_seg_list);
940 spin_unlock_irqrestore(&chan->lock, flags);
943 dma_free_coherent(chan->dev, sizeof(*chan->seg_v) *
944 XILINX_DMA_NUM_DESCS, chan->seg_v,
945 chan->seg_p);
948 dma_free_coherent(chan->dev, sizeof(*chan->cyclic_seg_v),
949 chan->cyclic_seg_v, chan->cyclic_seg_p);
952 if (chan->xdev->dma_config->dmatype == XDMA_TYPE_AXIMCDMA) {
953 spin_lock_irqsave(&chan->lock, flags);
954 INIT_LIST_HEAD(&chan->free_seg_list);
955 spin_unlock_irqrestore(&chan->lock, flags);
958 dma_free_coherent(chan->dev, sizeof(*chan->seg_mv) *
959 XILINX_DMA_NUM_DESCS, chan->seg_mv,
960 chan->seg_p);
963 if (chan->xdev->dma_config->dmatype != XDMA_TYPE_AXIDMA &&
964 chan->xdev->dma_config->dmatype != XDMA_TYPE_AXIMCDMA) {
965 dma_pool_destroy(chan->desc_pool);
966 chan->desc_pool = NULL;
973 * @chan: Driver specific dma channel
978 static u32 xilinx_dma_get_residue(struct xilinx_dma_chan *chan,
991 if (chan->xdev->dma_config->dmatype == XDMA_TYPE_CDMA) {
997 chan->xdev->max_buffer_len;
998 } else if (chan->xdev->dma_config->dmatype ==
1005 chan->xdev->max_buffer_len;
1014 chan->xdev->max_buffer_len;
1023 * @chan: Driver specific dma channel
1027 static void xilinx_dma_chan_handle_cyclic(struct xilinx_dma_chan *chan,
1035 spin_unlock_irqrestore(&chan->lock, *flags);
1037 spin_lock_irqsave(&chan->lock, *flags);
1043 * @chan: Driver specific DMA channel
1045 static void xilinx_dma_chan_desc_cleanup(struct xilinx_dma_chan *chan)
1050 spin_lock_irqsave(&chan->lock, flags);
1052 list_for_each_entry_safe(desc, next, &chan->done_list, node) {
1056 xilinx_dma_chan_handle_cyclic(chan, desc, &flags);
1064 if (chan->direction == DMA_DEV_TO_MEM)
1075 spin_unlock_irqrestore(&chan->lock, flags);
1077 spin_lock_irqsave(&chan->lock, flags);
1081 xilinx_dma_free_tx_descriptor(chan, desc);
1087 if (chan->terminating)
1091 spin_unlock_irqrestore(&chan->lock, flags);
1100 struct xilinx_dma_chan *chan = from_tasklet(chan, t, tasklet);
1102 xilinx_dma_chan_desc_cleanup(chan);
1113 struct xilinx_dma_chan *chan = to_xilinx_chan(dchan);
1117 if (chan->desc_pool)
1124 if (chan->xdev->dma_config->dmatype == XDMA_TYPE_AXIDMA) {
1126 chan->seg_v = dma_alloc_coherent(chan->dev,
1127 sizeof(*chan->seg_v) * XILINX_DMA_NUM_DESCS,
1128 &chan->seg_p, GFP_KERNEL);
1129 if (!chan->seg_v) {
1130 dev_err(chan->dev,
1132 chan->id);
1141 chan->cyclic_seg_v = dma_alloc_coherent(chan->dev,
1142 sizeof(*chan->cyclic_seg_v),
1143 &chan->cyclic_seg_p,
1145 if (!chan->cyclic_seg_v) {
1146 dev_err(chan->dev,
1148 dma_free_coherent(chan->dev, sizeof(*chan->seg_v) *
1149 XILINX_DMA_NUM_DESCS, chan->seg_v,
1150 chan->seg_p);
1153 chan->cyclic_seg_v->phys = chan->cyclic_seg_p;
1156 chan->seg_v[i].hw.next_desc =
1157 lower_32_bits(chan->seg_p + sizeof(*chan->seg_v) *
1159 chan->seg_v[i].hw.next_desc_msb =
1160 upper_32_bits(chan->seg_p + sizeof(*chan->seg_v) *
1162 chan->seg_v[i].phys = chan->seg_p +
1163 sizeof(*chan->seg_v) * i;
1164 list_add_tail(&chan->seg_v[i].node,
1165 &chan->free_seg_list);
1167 } else if (chan->xdev->dma_config->dmatype == XDMA_TYPE_AXIMCDMA) {
1169 chan->seg_mv = dma_alloc_coherent(chan->dev,
1170 sizeof(*chan->seg_mv) *
1172 &chan->seg_p, GFP_KERNEL);
1173 if (!chan->seg_mv) {
1174 dev_err(chan->dev,
1176 chan->id);
1180 chan->seg_mv[i].hw.next_desc =
1181 lower_32_bits(chan->seg_p + sizeof(*chan->seg_mv) *
1183 chan->seg_mv[i].hw.next_desc_msb =
1184 upper_32_bits(chan->seg_p + sizeof(*chan->seg_mv) *
1186 chan->seg_mv[i].phys = chan->seg_p +
1187 sizeof(*chan->seg_mv) * i;
1188 list_add_tail(&chan->seg_mv[i].node,
1189 &chan->free_seg_list);
1191 } else if (chan->xdev->dma_config->dmatype == XDMA_TYPE_CDMA) {
1192 chan->desc_pool = dma_pool_create("xilinx_cdma_desc_pool",
1193 chan->dev,
1198 chan->desc_pool = dma_pool_create("xilinx_vdma_desc_pool",
1199 chan->dev,
1205 if (!chan->desc_pool &&
1206 ((chan->xdev->dma_config->dmatype != XDMA_TYPE_AXIDMA) &&
1207 chan->xdev->dma_config->dmatype != XDMA_TYPE_AXIMCDMA)) {
1208 dev_err(chan->dev,
1210 chan->id);
1216 if (chan->xdev->dma_config->dmatype == XDMA_TYPE_AXIDMA) {
1220 dma_ctrl_set(chan, XILINX_DMA_REG_DMACR,
1224 if ((chan->xdev->dma_config->dmatype == XDMA_TYPE_CDMA) && chan->has_sg)
1225 dma_ctrl_set(chan, XILINX_DMA_REG_DMACR,
1233 * @chan: Driver specific DMA channel
1239 static int xilinx_dma_calc_copysize(struct xilinx_dma_chan *chan,
1245 chan->xdev->max_buffer_len);
1248 chan->xdev->common.copy_align) {
1254 (1 << chan->xdev->common.copy_align));
1271 struct xilinx_dma_chan *chan = to_xilinx_chan(dchan);
1281 spin_lock_irqsave(&chan->lock, flags);
1282 if (!list_empty(&chan->active_list)) {
1283 desc = list_last_entry(&chan->active_list,
1289 if (chan->has_sg && chan->xdev->dma_config->dmatype != XDMA_TYPE_VDMA)
1290 residue = xilinx_dma_get_residue(chan, desc);
1292 spin_unlock_irqrestore(&chan->lock, flags);
1301 * @chan: Driver specific DMA channel
1305 static int xilinx_dma_stop_transfer(struct xilinx_dma_chan *chan)
1309 dma_ctrl_clr(chan, XILINX_DMA_REG_DMACR, XILINX_DMA_DMACR_RUNSTOP);
1312 return xilinx_dma_poll_timeout(chan, XILINX_DMA_REG_DMASR, val,
1319 * @chan: Driver specific DMA channel
1323 static int xilinx_cdma_stop_transfer(struct xilinx_dma_chan *chan)
1327 return xilinx_dma_poll_timeout(chan, XILINX_DMA_REG_DMASR, val,
1334 * @chan: Driver specific DMA channel
1336 static void xilinx_dma_start(struct xilinx_dma_chan *chan)
1341 dma_ctrl_set(chan, XILINX_DMA_REG_DMACR, XILINX_DMA_DMACR_RUNSTOP);
1344 err = xilinx_dma_poll_timeout(chan, XILINX_DMA_REG_DMASR, val,
1349 dev_err(chan->dev, "Cannot start channel %p: %x\n",
1350 chan, dma_ctrl_read(chan, XILINX_DMA_REG_DMASR));
1352 chan->err = true;
1358 * @chan: Driver specific channel struct pointer
1360 static void xilinx_vdma_start_transfer(struct xilinx_dma_chan *chan)
1362 struct xilinx_vdma_config *config = &chan->config;
1369 if (chan->err)
1372 if (!chan->idle)
1375 if (list_empty(&chan->pending_list))
1378 desc = list_first_entry(&chan->pending_list,
1382 if (chan->has_vflip) {
1383 reg = dma_read(chan, XILINX_VDMA_REG_ENABLE_VERTICAL_FLIP);
1386 dma_write(chan, XILINX_VDMA_REG_ENABLE_VERTICAL_FLIP,
1390 reg = dma_ctrl_read(chan, XILINX_DMA_REG_DMACR);
1403 dma_ctrl_write(chan, XILINX_DMA_REG_DMACR, reg);
1405 j = chan->desc_submitcount;
1406 reg = dma_read(chan, XILINX_DMA_REG_PARK_PTR);
1407 if (chan->direction == DMA_MEM_TO_DEV) {
1414 dma_write(chan, XILINX_DMA_REG_PARK_PTR, reg);
1417 xilinx_dma_start(chan);
1419 if (chan->err)
1423 if (chan->desc_submitcount < chan->num_frms)
1424 i = chan->desc_submitcount;
1427 if (chan->ext_addr)
1428 vdma_desc_write_64(chan,
1433 vdma_desc_write(chan,
1444 vdma_desc_write(chan, XILINX_DMA_REG_HSIZE, last->hw.hsize);
1445 vdma_desc_write(chan, XILINX_DMA_REG_FRMDLY_STRIDE,
1447 vdma_desc_write(chan, XILINX_DMA_REG_VSIZE, last->hw.vsize);
1449 chan->desc_submitcount++;
1450 chan->desc_pendingcount--;
1451 list_move_tail(&desc->node, &chan->active_list);
1452 if (chan->desc_submitcount == chan->num_frms)
1453 chan->desc_submitcount = 0;
1455 chan->idle = false;
1460 * @chan: Driver specific channel struct pointer
1462 static void xilinx_cdma_start_transfer(struct xilinx_dma_chan *chan)
1466 u32 ctrl_reg = dma_read(chan, XILINX_DMA_REG_DMACR);
1468 if (chan->err)
1471 if (!chan->idle)
1474 if (list_empty(&chan->pending_list))
1477 head_desc = list_first_entry(&chan->pending_list,
1479 tail_desc = list_last_entry(&chan->pending_list,
1484 if (chan->desc_pendingcount <= XILINX_DMA_COALESCE_MAX) {
1486 ctrl_reg |= chan->desc_pendingcount <<
1488 dma_ctrl_write(chan, XILINX_DMA_REG_DMACR, ctrl_reg);
1491 if (chan->has_sg) {
1492 dma_ctrl_clr(chan, XILINX_DMA_REG_DMACR,
1495 dma_ctrl_set(chan, XILINX_DMA_REG_DMACR,
1498 xilinx_write(chan, XILINX_DMA_REG_CURDESC,
1502 xilinx_write(chan, XILINX_DMA_REG_TAILDESC,
1515 xilinx_write(chan, XILINX_CDMA_REG_SRCADDR,
1517 xilinx_write(chan, XILINX_CDMA_REG_DSTADDR,
1521 dma_ctrl_write(chan, XILINX_DMA_REG_BTT,
1522 hw->control & chan->xdev->max_buffer_len);
1525 list_splice_tail_init(&chan->pending_list, &chan->active_list);
1526 chan->desc_pendingcount = 0;
1527 chan->idle = false;
1532 * @chan: Driver specific channel struct pointer
1534 static void xilinx_dma_start_transfer(struct xilinx_dma_chan *chan)
1540 if (chan->err)
1543 if (list_empty(&chan->pending_list))
1546 if (!chan->idle)
1549 head_desc = list_first_entry(&chan->pending_list,
1551 tail_desc = list_last_entry(&chan->pending_list,
1556 reg = dma_ctrl_read(chan, XILINX_DMA_REG_DMACR);
1558 if (chan->desc_pendingcount <= XILINX_DMA_COALESCE_MAX) {
1560 reg |= chan->desc_pendingcount <<
1562 dma_ctrl_write(chan, XILINX_DMA_REG_DMACR, reg);
1565 if (chan->has_sg)
1566 xilinx_write(chan, XILINX_DMA_REG_CURDESC,
1569 reg |= chan->irq_delay << XILINX_DMA_CR_DELAY_SHIFT;
1570 dma_ctrl_write(chan, XILINX_DMA_REG_DMACR, reg);
1572 xilinx_dma_start(chan);
1574 if (chan->err)
1578 if (chan->has_sg) {
1579 if (chan->cyclic)
1580 xilinx_write(chan, XILINX_DMA_REG_TAILDESC,
1581 chan->cyclic_seg_v->phys);
1583 xilinx_write(chan, XILINX_DMA_REG_TAILDESC,
1594 xilinx_write(chan, XILINX_DMA_REG_SRCDSTADDR,
1598 dma_ctrl_write(chan, XILINX_DMA_REG_BTT,
1599 hw->control & chan->xdev->max_buffer_len);
1602 list_splice_tail_init(&chan->pending_list, &chan->active_list);
1603 chan->desc_pendingcount = 0;
1604 chan->idle = false;
1609 * @chan: Driver specific channel struct pointer
1611 static void xilinx_mcdma_start_transfer(struct xilinx_dma_chan *chan)
1622 if (chan->err)
1625 if (!chan->idle)
1628 if (list_empty(&chan->pending_list))
1631 head_desc = list_first_entry(&chan->pending_list,
1633 tail_desc = list_last_entry(&chan->pending_list,
1638 reg = dma_ctrl_read(chan, XILINX_MCDMA_CHAN_CR_OFFSET(chan->tdest));
1640 if (chan->desc_pendingcount <= XILINX_MCDMA_COALESCE_MAX) {
1642 reg |= chan->desc_pendingcount <<
1647 dma_ctrl_write(chan, XILINX_MCDMA_CHAN_CR_OFFSET(chan->tdest), reg);
1650 xilinx_write(chan, XILINX_MCDMA_CHAN_CDESC_OFFSET(chan->tdest),
1654 reg = dma_ctrl_read(chan, XILINX_MCDMA_CHEN_OFFSET);
1655 reg |= BIT(chan->tdest);
1656 dma_ctrl_write(chan, XILINX_MCDMA_CHEN_OFFSET, reg);
1659 reg = dma_ctrl_read(chan, XILINX_MCDMA_CHAN_CR_OFFSET(chan->tdest));
1661 dma_ctrl_write(chan, XILINX_MCDMA_CHAN_CR_OFFSET(chan->tdest), reg);
1663 xilinx_dma_start(chan);
1665 if (chan->err)
1669 xilinx_write(chan, XILINX_MCDMA_CHAN_TDESC_OFFSET(chan->tdest),
1672 list_splice_tail_init(&chan->pending_list, &chan->active_list);
1673 chan->desc_pendingcount = 0;
1674 chan->idle = false;
1683 struct xilinx_dma_chan *chan = to_xilinx_chan(dchan);
1686 spin_lock_irqsave(&chan->lock, flags);
1687 chan->start_transfer(chan);
1688 spin_unlock_irqrestore(&chan->lock, flags);
1706 * @chan : xilinx DMA channel
1710 static void xilinx_dma_complete_descriptor(struct xilinx_dma_chan *chan)
1715 if (list_empty(&chan->active_list))
1718 list_for_each_entry_safe(desc, next, &chan->active_list, node) {
1719 if (chan->xdev->dma_config->dmatype == XDMA_TYPE_AXIDMA) {
1724 if (!(seg->hw.status & XILINX_DMA_BD_COMP_MASK) && chan->has_sg)
1727 if (chan->has_sg && chan->xdev->dma_config->dmatype !=
1729 desc->residue = xilinx_dma_get_residue(chan, desc);
1732 desc->err = chan->err;
1737 list_add_tail(&desc->node, &chan->done_list);
1743 * @chan: Driver specific DMA channel
1747 static int xilinx_dma_reset(struct xilinx_dma_chan *chan)
1752 dma_ctrl_set(chan, XILINX_DMA_REG_DMACR, XILINX_DMA_DMACR_RESET);
1755 err = xilinx_dma_poll_timeout(chan, XILINX_DMA_REG_DMACR, tmp,
1760 dev_err(chan->dev, "reset timeout, cr %x, sr %x\n",
1761 dma_ctrl_read(chan, XILINX_DMA_REG_DMACR),
1762 dma_ctrl_read(chan, XILINX_DMA_REG_DMASR));
1766 chan->err = false;
1767 chan->idle = true;
1768 chan->desc_pendingcount = 0;
1769 chan->desc_submitcount = 0;
1776 * @chan: Driver specific DMA channel
1780 static int xilinx_dma_chan_reset(struct xilinx_dma_chan *chan)
1785 err = xilinx_dma_reset(chan);
1790 dma_ctrl_set(chan, XILINX_DMA_REG_DMACR,
1805 struct xilinx_dma_chan *chan = data;
1808 if (chan->direction == DMA_DEV_TO_MEM)
1814 chan_sermask = dma_ctrl_read(chan, ser_offset);
1820 if (chan->direction == DMA_DEV_TO_MEM)
1821 chan_offset = chan->xdev->dma_config->max_channels / 2;
1824 chan = chan->xdev->chan[chan_offset];
1826 status = dma_ctrl_read(chan, XILINX_MCDMA_CHAN_SR_OFFSET(chan->tdest));
1830 dma_ctrl_write(chan, XILINX_MCDMA_CHAN_SR_OFFSET(chan->tdest),
1834 dev_err(chan->dev, "Channel %p has errors %x cdr %x tdr %x\n",
1835 chan,
1836 dma_ctrl_read(chan, XILINX_MCDMA_CH_ERR_OFFSET),
1837 dma_ctrl_read(chan, XILINX_MCDMA_CHAN_CDESC_OFFSET
1838 (chan->tdest)),
1839 dma_ctrl_read(chan, XILINX_MCDMA_CHAN_TDESC_OFFSET
1840 (chan->tdest)));
1841 chan->err = true;
1849 dev_dbg(chan->dev, "Inter-packet latency too long\n");
1853 spin_lock(&chan->lock);
1854 xilinx_dma_complete_descriptor(chan);
1855 chan->idle = true;
1856 chan->start_transfer(chan);
1857 spin_unlock(&chan->lock);
1860 tasklet_hi_schedule(&chan->tasklet);
1873 struct xilinx_dma_chan *chan = data;
1877 status = dma_ctrl_read(chan, XILINX_DMA_REG_DMASR);
1881 dma_ctrl_write(chan, XILINX_DMA_REG_DMASR,
1894 dma_ctrl_write(chan, XILINX_DMA_REG_DMASR,
1897 if (!chan->flush_on_fsync ||
1899 dev_err(chan->dev,
1901 chan, errors,
1902 dma_ctrl_read(chan, XILINX_DMA_REG_CURDESC),
1903 dma_ctrl_read(chan, XILINX_DMA_REG_TAILDESC));
1904 chan->err = true;
1910 spin_lock(&chan->lock);
1911 xilinx_dma_complete_descriptor(chan);
1912 chan->idle = true;
1913 chan->start_transfer(chan);
1914 spin_unlock(&chan->lock);
1917 tasklet_schedule(&chan->tasklet);
1923 * @chan: Driver specific dma channel
1926 static void append_desc_queue(struct xilinx_dma_chan *chan,
1935 if (list_empty(&chan->pending_list))
1942 tail_desc = list_last_entry(&chan->pending_list,
1944 if (chan->xdev->dma_config->dmatype == XDMA_TYPE_VDMA) {
1949 } else if (chan->xdev->dma_config->dmatype == XDMA_TYPE_CDMA) {
1954 } else if (chan->xdev->dma_config->dmatype == XDMA_TYPE_AXIDMA) {
1972 list_add_tail(&desc->node, &chan->pending_list);
1973 chan->desc_pendingcount++;
1975 if (chan->has_sg && (chan->xdev->dma_config->dmatype == XDMA_TYPE_VDMA)
1976 && unlikely(chan->desc_pendingcount > chan->num_frms)) {
1977 dev_dbg(chan->dev, "desc pendingcount is too high\n");
1978 chan->desc_pendingcount = chan->num_frms;
1991 struct xilinx_dma_chan *chan = to_xilinx_chan(tx->chan);
1996 if (chan->cyclic) {
1997 xilinx_dma_free_tx_descriptor(chan, desc);
2001 if (chan->err) {
2006 err = xilinx_dma_chan_reset(chan);
2011 spin_lock_irqsave(&chan->lock, flags);
2016 append_desc_queue(chan, desc);
2019 chan->cyclic = true;
2021 chan->terminating = false;
2023 spin_unlock_irqrestore(&chan->lock, flags);
2042 struct xilinx_dma_chan *chan = to_xilinx_chan(dchan);
2057 desc = xilinx_dma_alloc_tx_descriptor(chan);
2061 dma_async_tx_descriptor_init(&desc->async_tx, &chan->common);
2066 segment = xilinx_vdma_alloc_tx_segment(chan);
2076 hw->stride |= chan->config.frm_dly <<
2080 if (chan->ext_addr) {
2087 if (chan->ext_addr) {
2106 xilinx_dma_free_tx_descriptor(chan, desc);
2124 struct xilinx_dma_chan *chan = to_xilinx_chan(dchan);
2129 if (!len || len > chan->xdev->max_buffer_len)
2132 desc = xilinx_dma_alloc_tx_descriptor(chan);
2136 dma_async_tx_descriptor_init(&desc->async_tx, &chan->common);
2140 segment = xilinx_cdma_alloc_tx_segment(chan);
2148 if (chan->ext_addr) {
2162 xilinx_dma_free_tx_descriptor(chan, desc);
2182 struct xilinx_dma_chan *chan = to_xilinx_chan(dchan);
2195 desc = xilinx_dma_alloc_tx_descriptor(chan);
2199 dma_async_tx_descriptor_init(&desc->async_tx, &chan->common);
2211 segment = xilinx_axidma_alloc_tx_segment(chan);
2219 copy = xilinx_dma_calc_copysize(chan, sg_dma_len(sg),
2224 xilinx_axidma_buf(chan, hw, sg_dma_address(sg),
2229 if (chan->direction == DMA_MEM_TO_DEV) {
2250 if (chan->direction == DMA_MEM_TO_DEV) {
2258 if (chan->xdev->has_axistream_connected)
2264 xilinx_dma_free_tx_descriptor(chan, desc);
2284 struct xilinx_dma_chan *chan = to_xilinx_chan(dchan);
2304 desc = xilinx_dma_alloc_tx_descriptor(chan);
2308 chan->direction = direction;
2309 dma_async_tx_descriptor_init(&desc->async_tx, &chan->common);
2319 segment = xilinx_axidma_alloc_tx_segment(chan);
2327 copy = xilinx_dma_calc_copysize(chan, period_len,
2330 xilinx_axidma_buf(chan, hw, buf_addr, sg_used,
2353 reg = dma_ctrl_read(chan, XILINX_DMA_REG_DMACR);
2355 dma_ctrl_write(chan, XILINX_DMA_REG_DMACR, reg);
2371 xilinx_dma_free_tx_descriptor(chan, desc);
2392 struct xilinx_dma_chan *chan = to_xilinx_chan(dchan);
2405 desc = xilinx_dma_alloc_tx_descriptor(chan);
2409 dma_async_tx_descriptor_init(&desc->async_tx, &chan->common);
2421 segment = xilinx_aximcdma_alloc_tx_segment(chan);
2430 chan->xdev->max_buffer_len);
2434 xilinx_aximcdma_buf(chan, hw, sg_dma_address(sg),
2438 if (chan->direction == DMA_MEM_TO_DEV && app_w) {
2457 if (chan->direction == DMA_MEM_TO_DEV) {
2468 xilinx_dma_free_tx_descriptor(chan, desc);
2481 struct xilinx_dma_chan *chan = to_xilinx_chan(dchan);
2485 if (!chan->cyclic) {
2486 err = chan->stop_transfer(chan);
2488 dev_err(chan->dev, "Cannot stop channel %p: %x\n",
2489 chan, dma_ctrl_read(chan,
2491 chan->err = true;
2495 xilinx_dma_chan_reset(chan);
2497 chan->terminating = true;
2498 xilinx_dma_free_descriptors(chan);
2499 chan->idle = true;
2501 if (chan->cyclic) {
2502 reg = dma_ctrl_read(chan, XILINX_DMA_REG_DMACR);
2504 dma_ctrl_write(chan, XILINX_DMA_REG_DMACR, reg);
2505 chan->cyclic = false;
2508 if ((chan->xdev->dma_config->dmatype == XDMA_TYPE_CDMA) && chan->has_sg)
2509 dma_ctrl_clr(chan, XILINX_DMA_REG_DMACR,
2517 struct xilinx_dma_chan *chan = to_xilinx_chan(dchan);
2519 tasklet_kill(&chan->tasklet);
2538 struct xilinx_dma_chan *chan = to_xilinx_chan(dchan);
2542 return xilinx_dma_chan_reset(chan);
2544 dmacr = dma_ctrl_read(chan, XILINX_DMA_REG_DMACR);
2546 chan->config.frm_dly = cfg->frm_dly;
2547 chan->config.park = cfg->park;
2550 chan->config.gen_lock = cfg->gen_lock;
2551 chan->config.master = cfg->master;
2554 if (cfg->gen_lock && chan->genlock) {
2560 chan->config.frm_cnt_en = cfg->frm_cnt_en;
2561 chan->config.vflip_en = cfg->vflip_en;
2564 chan->config.park_frm = cfg->park_frm;
2566 chan->config.park_frm = -1;
2568 chan->config.coalesc = cfg->coalesc;
2569 chan->config.delay = cfg->delay;
2574 chan->config.coalesc = cfg->coalesc;
2580 chan->config.delay = cfg->delay;
2587 dma_ctrl_write(chan, XILINX_DMA_REG_DMACR, dmacr);
2599 * @chan: Driver specific DMA channel
2601 static void xilinx_dma_chan_remove(struct xilinx_dma_chan *chan)
2604 dma_ctrl_clr(chan, XILINX_DMA_REG_DMACR,
2607 if (chan->irq > 0)
2608 free_irq(chan->irq, chan);
2610 tasklet_kill(&chan->tasklet);
2612 list_del(&chan->common.device_node);
2806 struct xilinx_dma_chan *chan;
2812 chan = devm_kzalloc(xdev->dev, sizeof(*chan), GFP_KERNEL);
2813 if (!chan)
2816 chan->dev = xdev->dev;
2817 chan->xdev = xdev;
2818 chan->desc_pendingcount = 0x0;
2819 chan->ext_addr = xdev->ext_addr;
2825 chan->idle = true;
2827 spin_lock_init(&chan->lock);
2828 INIT_LIST_HEAD(&chan->pending_list);
2829 INIT_LIST_HEAD(&chan->done_list);
2830 INIT_LIST_HEAD(&chan->active_list);
2831 INIT_LIST_HEAD(&chan->free_seg_list);
2836 of_property_read_u8(node, "xlnx,irq-delay", &chan->irq_delay);
2838 chan->genlock = of_property_read_bool(node, "xlnx,genlock-mode");
2857 chan->direction = DMA_MEM_TO_DEV;
2858 chan->id = xdev->mm2s_chan_id++;
2859 chan->tdest = chan->id;
2861 chan->ctrl_offset = XILINX_DMA_MM2S_CTRL_OFFSET;
2863 chan->desc_offset = XILINX_VDMA_MM2S_DESC_OFFSET;
2864 chan->config.park = 1;
2868 chan->flush_on_fsync = true;
2874 chan->direction = DMA_DEV_TO_MEM;
2875 chan->id = xdev->s2mm_chan_id++;
2876 chan->tdest = chan->id - xdev->dma_config->max_channels / 2;
2877 chan->has_vflip = of_property_read_bool(node,
2879 if (chan->has_vflip) {
2880 chan->config.vflip_en = dma_read(chan,
2886 chan->ctrl_offset = XILINX_MCDMA_S2MM_CTRL_OFFSET;
2888 chan->ctrl_offset = XILINX_DMA_S2MM_CTRL_OFFSET;
2891 chan->desc_offset = XILINX_VDMA_S2MM_DESC_OFFSET;
2892 chan->config.park = 1;
2896 chan->flush_on_fsync = true;
2904 chan->irq = of_irq_get(node, chan->tdest);
2905 if (chan->irq < 0)
2906 return dev_err_probe(xdev->dev, chan->irq, "failed to get irq\n");
2907 err = request_irq(chan->irq, xdev->dma_config->irq_handler,
2908 IRQF_SHARED, "xilinx-dma-controller", chan);
2910 dev_err(xdev->dev, "unable to request IRQ %d\n", chan->irq);
2915 chan->start_transfer = xilinx_dma_start_transfer;
2916 chan->stop_transfer = xilinx_dma_stop_transfer;
2918 chan->start_transfer = xilinx_mcdma_start_transfer;
2919 chan->stop_transfer = xilinx_dma_stop_transfer;
2921 chan->start_transfer = xilinx_cdma_start_transfer;
2922 chan->stop_transfer = xilinx_cdma_stop_transfer;
2924 chan->start_transfer = xilinx_vdma_start_transfer;
2925 chan->stop_transfer = xilinx_dma_stop_transfer;
2931 dma_ctrl_read(chan, XILINX_DMA_REG_DMASR) &
2933 chan->has_sg = true;
2934 dev_dbg(chan->dev, "ch %d: SG %s\n", chan->id,
2935 chan->has_sg ? "enabled" : "disabled");
2939 tasklet_setup(&chan->tasklet, xilinx_dma_do_tasklet);
2945 chan->common.device = &xdev->common;
2947 list_add_tail(&chan->common.device_node, &xdev->common.channels);
2948 xdev->chan[chan->id] = chan;
2951 err = xilinx_dma_chan_reset(chan);
3002 if (chan_id >= xdev->dma_config->max_channels || !xdev->chan[chan_id])
3005 return dma_get_slave_channel(&xdev->chan[chan_id]->common);
3202 if (xdev->chan[i])
3203 xdev->chan[i]->num_frms = num_frames;
3234 if (xdev->chan[i])
3235 xilinx_dma_chan_remove(xdev->chan[i]);
3258 if (xdev->chan[i])
3259 xilinx_dma_chan_remove(xdev->chan[i]);