Lines Matching refs:chan

447 	void (*start_transfer)(struct xilinx_dma_chan *chan);
448 int (*stop_transfer)(struct xilinx_dma_chan *chan);
483 * @chan: Driver specific DMA channel
501 struct xilinx_dma_chan *chan[XILINX_MCDMA_MAX_CHANS_PER_DEVICE];
517 #define to_xilinx_chan(chan) \
518 container_of(chan, struct xilinx_dma_chan, common)
521 #define xilinx_dma_poll_timeout(chan, reg, val, cond, delay_us, timeout_us) \
522 readl_poll_timeout_atomic(chan->xdev->regs + chan->ctrl_offset + reg, \
526 static inline u32 dma_read(struct xilinx_dma_chan *chan, u32 reg)
528 return ioread32(chan->xdev->regs + reg);
531 static inline void dma_write(struct xilinx_dma_chan *chan, u32 reg, u32 value)
533 iowrite32(value, chan->xdev->regs + reg);
536 static inline void vdma_desc_write(struct xilinx_dma_chan *chan, u32 reg,
539 dma_write(chan, chan->desc_offset + reg, value);
542 static inline u32 dma_ctrl_read(struct xilinx_dma_chan *chan, u32 reg)
544 return dma_read(chan, chan->ctrl_offset + reg);
547 static inline void dma_ctrl_write(struct xilinx_dma_chan *chan, u32 reg,
550 dma_write(chan, chan->ctrl_offset + reg, value);
553 static inline void dma_ctrl_clr(struct xilinx_dma_chan *chan, u32 reg,
556 dma_ctrl_write(chan, reg, dma_ctrl_read(chan, reg) & ~clr);
559 static inline void dma_ctrl_set(struct xilinx_dma_chan *chan, u32 reg,
562 dma_ctrl_write(chan, reg, dma_ctrl_read(chan, reg) | set);
567 * @chan: Driver specific VDMA channel
576 static inline void vdma_desc_write_64(struct xilinx_dma_chan *chan, u32 reg,
580 writel(value_lsb, chan->xdev->regs + chan->desc_offset + reg);
583 writel(value_msb, chan->xdev->regs + chan->desc_offset + reg + 4);
586 static inline void dma_writeq(struct xilinx_dma_chan *chan, u32 reg, u64 value)
588 lo_hi_writeq(value, chan->xdev->regs + chan->ctrl_offset + reg);
591 static inline void xilinx_write(struct xilinx_dma_chan *chan, u32 reg,
594 if (chan->ext_addr)
595 dma_writeq(chan, reg, addr);
597 dma_ctrl_write(chan, reg, addr);
600 static inline void xilinx_axidma_buf(struct xilinx_dma_chan *chan,
605 if (chan->ext_addr) {
614 static inline void xilinx_aximcdma_buf(struct xilinx_dma_chan *chan,
618 if (chan->ext_addr) {
632 * @chan: Driver specific DMA channel
637 xilinx_vdma_alloc_tx_segment(struct xilinx_dma_chan *chan)
642 segment = dma_pool_zalloc(chan->desc_pool, GFP_ATOMIC, &phys);
653 * @chan: Driver specific DMA channel
658 xilinx_cdma_alloc_tx_segment(struct xilinx_dma_chan *chan)
663 segment = dma_pool_zalloc(chan->desc_pool, GFP_ATOMIC, &phys);
674 * @chan: Driver specific DMA channel
679 xilinx_axidma_alloc_tx_segment(struct xilinx_dma_chan *chan)
684 spin_lock_irqsave(&chan->lock, flags);
685 if (!list_empty(&chan->free_seg_list)) {
686 segment = list_first_entry(&chan->free_seg_list,
691 spin_unlock_irqrestore(&chan->lock, flags);
694 dev_dbg(chan->dev, "Could not find free tx segment\n");
701 * @chan: Driver specific DMA channel
706 xilinx_aximcdma_alloc_tx_segment(struct xilinx_dma_chan *chan)
711 spin_lock_irqsave(&chan->lock, flags);
712 if (!list_empty(&chan->free_seg_list)) {
713 segment = list_first_entry(&chan->free_seg_list,
718 spin_unlock_irqrestore(&chan->lock, flags);
747 * @chan: Driver specific DMA channel
750 static void xilinx_dma_free_tx_segment(struct xilinx_dma_chan *chan,
755 list_add_tail(&segment->node, &chan->free_seg_list);
760 * @chan: Driver specific DMA channel
763 static void xilinx_mcdma_free_tx_segment(struct xilinx_dma_chan *chan,
769 list_add_tail(&segment->node, &chan->free_seg_list);
774 * @chan: Driver specific DMA channel
777 static void xilinx_cdma_free_tx_segment(struct xilinx_dma_chan *chan,
780 dma_pool_free(chan->desc_pool, segment, segment->phys);
785 * @chan: Driver specific DMA channel
788 static void xilinx_vdma_free_tx_segment(struct xilinx_dma_chan *chan,
791 dma_pool_free(chan->desc_pool, segment, segment->phys);
796 * @chan: Driver specific DMA channel
801 xilinx_dma_alloc_tx_descriptor(struct xilinx_dma_chan *chan)
816 * @chan: Driver specific DMA channel
820 xilinx_dma_free_tx_descriptor(struct xilinx_dma_chan *chan,
831 if (chan->xdev->dma_config->dmatype == XDMA_TYPE_VDMA) {
834 xilinx_vdma_free_tx_segment(chan, segment);
836 } else if (chan->xdev->dma_config->dmatype == XDMA_TYPE_CDMA) {
840 xilinx_cdma_free_tx_segment(chan, cdma_segment);
842 } else if (chan->xdev->dma_config->dmatype == XDMA_TYPE_AXIDMA) {
846 xilinx_dma_free_tx_segment(chan, axidma_segment);
852 xilinx_mcdma_free_tx_segment(chan, aximcdma_segment);
863 * @chan: Driver specific DMA channel
866 static void xilinx_dma_free_desc_list(struct xilinx_dma_chan *chan,
873 xilinx_dma_free_tx_descriptor(chan, desc);
879 * @chan: Driver specific DMA channel
881 static void xilinx_dma_free_descriptors(struct xilinx_dma_chan *chan)
885 spin_lock_irqsave(&chan->lock, flags);
887 xilinx_dma_free_desc_list(chan, &chan->pending_list);
888 xilinx_dma_free_desc_list(chan, &chan->done_list);
889 xilinx_dma_free_desc_list(chan, &chan->active_list);
891 spin_unlock_irqrestore(&chan->lock, flags);
900 struct xilinx_dma_chan *chan = to_xilinx_chan(dchan);
903 dev_dbg(chan->dev, "Free all channel resources.\n");
905 xilinx_dma_free_descriptors(chan);
907 if (chan->xdev->dma_config->dmatype == XDMA_TYPE_AXIDMA) {
908 spin_lock_irqsave(&chan->lock, flags);
909 INIT_LIST_HEAD(&chan->free_seg_list);
910 spin_unlock_irqrestore(&chan->lock, flags);
913 dma_free_coherent(chan->dev, sizeof(*chan->seg_v) *
914 XILINX_DMA_NUM_DESCS, chan->seg_v,
915 chan->seg_p);
918 dma_free_coherent(chan->dev, sizeof(*chan->cyclic_seg_v),
919 chan->cyclic_seg_v, chan->cyclic_seg_p);
922 if (chan->xdev->dma_config->dmatype == XDMA_TYPE_AXIMCDMA) {
923 spin_lock_irqsave(&chan->lock, flags);
924 INIT_LIST_HEAD(&chan->free_seg_list);
925 spin_unlock_irqrestore(&chan->lock, flags);
928 dma_free_coherent(chan->dev, sizeof(*chan->seg_mv) *
929 XILINX_DMA_NUM_DESCS, chan->seg_mv,
930 chan->seg_p);
933 if (chan->xdev->dma_config->dmatype != XDMA_TYPE_AXIDMA &&
934 chan->xdev->dma_config->dmatype != XDMA_TYPE_AXIMCDMA) {
935 dma_pool_destroy(chan->desc_pool);
936 chan->desc_pool = NULL;
943 * @chan: Driver specific dma channel
948 static u32 xilinx_dma_get_residue(struct xilinx_dma_chan *chan,
961 if (chan->xdev->dma_config->dmatype == XDMA_TYPE_CDMA) {
967 chan->xdev->max_buffer_len;
968 } else if (chan->xdev->dma_config->dmatype ==
975 chan->xdev->max_buffer_len;
984 chan->xdev->max_buffer_len;
993 * @chan: Driver specific dma channel
997 static void xilinx_dma_chan_handle_cyclic(struct xilinx_dma_chan *chan,
1007 spin_unlock_irqrestore(&chan->lock, *flags);
1009 spin_lock_irqsave(&chan->lock, *flags);
1015 * @chan: Driver specific DMA channel
1017 static void xilinx_dma_chan_desc_cleanup(struct xilinx_dma_chan *chan)
1022 spin_lock_irqsave(&chan->lock, flags);
1024 list_for_each_entry_safe(desc, next, &chan->done_list, node) {
1028 xilinx_dma_chan_handle_cyclic(chan, desc, &flags);
1036 if (chan->direction == DMA_DEV_TO_MEM)
1047 spin_unlock_irqrestore(&chan->lock, flags);
1049 spin_lock_irqsave(&chan->lock, flags);
1053 xilinx_dma_free_tx_descriptor(chan, desc);
1059 if (chan->terminating)
1063 spin_unlock_irqrestore(&chan->lock, flags);
1072 struct xilinx_dma_chan *chan = from_tasklet(chan, t, tasklet);
1074 xilinx_dma_chan_desc_cleanup(chan);
1085 struct xilinx_dma_chan *chan = to_xilinx_chan(dchan);
1089 if (chan->desc_pool)
1096 if (chan->xdev->dma_config->dmatype == XDMA_TYPE_AXIDMA) {
1098 chan->seg_v = dma_alloc_coherent(chan->dev,
1099 sizeof(*chan->seg_v) * XILINX_DMA_NUM_DESCS,
1100 &chan->seg_p, GFP_KERNEL);
1101 if (!chan->seg_v) {
1102 dev_err(chan->dev,
1104 chan->id);
1113 chan->cyclic_seg_v = dma_alloc_coherent(chan->dev,
1114 sizeof(*chan->cyclic_seg_v),
1115 &chan->cyclic_seg_p,
1117 if (!chan->cyclic_seg_v) {
1118 dev_err(chan->dev,
1120 dma_free_coherent(chan->dev, sizeof(*chan->seg_v) *
1121 XILINX_DMA_NUM_DESCS, chan->seg_v,
1122 chan->seg_p);
1125 chan->cyclic_seg_v->phys = chan->cyclic_seg_p;
1128 chan->seg_v[i].hw.next_desc =
1129 lower_32_bits(chan->seg_p + sizeof(*chan->seg_v) *
1131 chan->seg_v[i].hw.next_desc_msb =
1132 upper_32_bits(chan->seg_p + sizeof(*chan->seg_v) *
1134 chan->seg_v[i].phys = chan->seg_p +
1135 sizeof(*chan->seg_v) * i;
1136 list_add_tail(&chan->seg_v[i].node,
1137 &chan->free_seg_list);
1139 } else if (chan->xdev->dma_config->dmatype == XDMA_TYPE_AXIMCDMA) {
1141 chan->seg_mv = dma_alloc_coherent(chan->dev,
1142 sizeof(*chan->seg_mv) *
1144 &chan->seg_p, GFP_KERNEL);
1145 if (!chan->seg_mv) {
1146 dev_err(chan->dev,
1148 chan->id);
1152 chan->seg_mv[i].hw.next_desc =
1153 lower_32_bits(chan->seg_p + sizeof(*chan->seg_mv) *
1155 chan->seg_mv[i].hw.next_desc_msb =
1156 upper_32_bits(chan->seg_p + sizeof(*chan->seg_mv) *
1158 chan->seg_mv[i].phys = chan->seg_p +
1159 sizeof(*chan->seg_mv) * i;
1160 list_add_tail(&chan->seg_mv[i].node,
1161 &chan->free_seg_list);
1163 } else if (chan->xdev->dma_config->dmatype == XDMA_TYPE_CDMA) {
1164 chan->desc_pool = dma_pool_create("xilinx_cdma_desc_pool",
1165 chan->dev,
1170 chan->desc_pool = dma_pool_create("xilinx_vdma_desc_pool",
1171 chan->dev,
1177 if (!chan->desc_pool &&
1178 ((chan->xdev->dma_config->dmatype != XDMA_TYPE_AXIDMA) &&
1179 chan->xdev->dma_config->dmatype != XDMA_TYPE_AXIMCDMA)) {
1180 dev_err(chan->dev,
1182 chan->id);
1188 if (chan->xdev->dma_config->dmatype == XDMA_TYPE_AXIDMA) {
1192 dma_ctrl_set(chan, XILINX_DMA_REG_DMACR,
1196 if ((chan->xdev->dma_config->dmatype == XDMA_TYPE_CDMA) && chan->has_sg)
1197 dma_ctrl_set(chan, XILINX_DMA_REG_DMACR,
1205 * @chan: Driver specific DMA channel
1211 static int xilinx_dma_calc_copysize(struct xilinx_dma_chan *chan,
1217 chan->xdev->max_buffer_len);
1220 chan->xdev->common.copy_align) {
1226 (1 << chan->xdev->common.copy_align));
1243 struct xilinx_dma_chan *chan = to_xilinx_chan(dchan);
1253 spin_lock_irqsave(&chan->lock, flags);
1254 if (!list_empty(&chan->active_list)) {
1255 desc = list_last_entry(&chan->active_list,
1261 if (chan->has_sg && chan->xdev->dma_config->dmatype != XDMA_TYPE_VDMA)
1262 residue = xilinx_dma_get_residue(chan, desc);
1264 spin_unlock_irqrestore(&chan->lock, flags);
1273 * @chan: Driver specific DMA channel
1277 static int xilinx_dma_stop_transfer(struct xilinx_dma_chan *chan)
1281 dma_ctrl_clr(chan, XILINX_DMA_REG_DMACR, XILINX_DMA_DMACR_RUNSTOP);
1284 return xilinx_dma_poll_timeout(chan, XILINX_DMA_REG_DMASR, val,
1291 * @chan: Driver specific DMA channel
1295 static int xilinx_cdma_stop_transfer(struct xilinx_dma_chan *chan)
1299 return xilinx_dma_poll_timeout(chan, XILINX_DMA_REG_DMASR, val,
1306 * @chan: Driver specific DMA channel
1308 static void xilinx_dma_start(struct xilinx_dma_chan *chan)
1313 dma_ctrl_set(chan, XILINX_DMA_REG_DMACR, XILINX_DMA_DMACR_RUNSTOP);
1316 err = xilinx_dma_poll_timeout(chan, XILINX_DMA_REG_DMASR, val,
1321 dev_err(chan->dev, "Cannot start channel %p: %x\n",
1322 chan, dma_ctrl_read(chan, XILINX_DMA_REG_DMASR));
1324 chan->err = true;
1330 * @chan: Driver specific channel struct pointer
1332 static void xilinx_vdma_start_transfer(struct xilinx_dma_chan *chan)
1334 struct xilinx_vdma_config *config = &chan->config;
1341 if (chan->err)
1344 if (!chan->idle)
1347 if (list_empty(&chan->pending_list))
1350 desc = list_first_entry(&chan->pending_list,
1354 if (chan->has_vflip) {
1355 reg = dma_read(chan, XILINX_VDMA_REG_ENABLE_VERTICAL_FLIP);
1358 dma_write(chan, XILINX_VDMA_REG_ENABLE_VERTICAL_FLIP,
1362 reg = dma_ctrl_read(chan, XILINX_DMA_REG_DMACR);
1375 dma_ctrl_write(chan, XILINX_DMA_REG_DMACR, reg);
1377 j = chan->desc_submitcount;
1378 reg = dma_read(chan, XILINX_DMA_REG_PARK_PTR);
1379 if (chan->direction == DMA_MEM_TO_DEV) {
1386 dma_write(chan, XILINX_DMA_REG_PARK_PTR, reg);
1389 xilinx_dma_start(chan);
1391 if (chan->err)
1395 if (chan->desc_submitcount < chan->num_frms)
1396 i = chan->desc_submitcount;
1399 if (chan->ext_addr)
1400 vdma_desc_write_64(chan,
1405 vdma_desc_write(chan,
1416 vdma_desc_write(chan, XILINX_DMA_REG_HSIZE, last->hw.hsize);
1417 vdma_desc_write(chan, XILINX_DMA_REG_FRMDLY_STRIDE,
1419 vdma_desc_write(chan, XILINX_DMA_REG_VSIZE, last->hw.vsize);
1421 chan->desc_submitcount++;
1422 chan->desc_pendingcount--;
1424 list_add_tail(&desc->node, &chan->active_list);
1425 if (chan->desc_submitcount == chan->num_frms)
1426 chan->desc_submitcount = 0;
1428 chan->idle = false;
1433 * @chan: Driver specific channel struct pointer
1435 static void xilinx_cdma_start_transfer(struct xilinx_dma_chan *chan)
1439 u32 ctrl_reg = dma_read(chan, XILINX_DMA_REG_DMACR);
1441 if (chan->err)
1444 if (!chan->idle)
1447 if (list_empty(&chan->pending_list))
1450 head_desc = list_first_entry(&chan->pending_list,
1452 tail_desc = list_last_entry(&chan->pending_list,
1457 if (chan->desc_pendingcount <= XILINX_DMA_COALESCE_MAX) {
1459 ctrl_reg |= chan->desc_pendingcount <<
1461 dma_ctrl_write(chan, XILINX_DMA_REG_DMACR, ctrl_reg);
1464 if (chan->has_sg) {
1465 dma_ctrl_clr(chan, XILINX_DMA_REG_DMACR,
1468 dma_ctrl_set(chan, XILINX_DMA_REG_DMACR,
1471 xilinx_write(chan, XILINX_DMA_REG_CURDESC,
1475 xilinx_write(chan, XILINX_DMA_REG_TAILDESC,
1488 xilinx_write(chan, XILINX_CDMA_REG_SRCADDR,
1490 xilinx_write(chan, XILINX_CDMA_REG_DSTADDR,
1494 dma_ctrl_write(chan, XILINX_DMA_REG_BTT,
1495 hw->control & chan->xdev->max_buffer_len);
1498 list_splice_tail_init(&chan->pending_list, &chan->active_list);
1499 chan->desc_pendingcount = 0;
1500 chan->idle = false;
1505 * @chan: Driver specific channel struct pointer
1507 static void xilinx_dma_start_transfer(struct xilinx_dma_chan *chan)
1513 if (chan->err)
1516 if (list_empty(&chan->pending_list))
1519 if (!chan->idle)
1522 head_desc = list_first_entry(&chan->pending_list,
1524 tail_desc = list_last_entry(&chan->pending_list,
1529 reg = dma_ctrl_read(chan, XILINX_DMA_REG_DMACR);
1531 if (chan->desc_pendingcount <= XILINX_DMA_COALESCE_MAX) {
1533 reg |= chan->desc_pendingcount <<
1535 dma_ctrl_write(chan, XILINX_DMA_REG_DMACR, reg);
1538 if (chan->has_sg)
1539 xilinx_write(chan, XILINX_DMA_REG_CURDESC,
1542 xilinx_dma_start(chan);
1544 if (chan->err)
1548 if (chan->has_sg) {
1549 if (chan->cyclic)
1550 xilinx_write(chan, XILINX_DMA_REG_TAILDESC,
1551 chan->cyclic_seg_v->phys);
1553 xilinx_write(chan, XILINX_DMA_REG_TAILDESC,
1564 xilinx_write(chan, XILINX_DMA_REG_SRCDSTADDR,
1568 dma_ctrl_write(chan, XILINX_DMA_REG_BTT,
1569 hw->control & chan->xdev->max_buffer_len);
1572 list_splice_tail_init(&chan->pending_list, &chan->active_list);
1573 chan->desc_pendingcount = 0;
1574 chan->idle = false;
1579 * @chan: Driver specific channel struct pointer
1581 static void xilinx_mcdma_start_transfer(struct xilinx_dma_chan *chan)
1592 if (chan->err)
1595 if (!chan->idle)
1598 if (list_empty(&chan->pending_list))
1601 head_desc = list_first_entry(&chan->pending_list,
1603 tail_desc = list_last_entry(&chan->pending_list,
1608 reg = dma_ctrl_read(chan, XILINX_MCDMA_CHAN_CR_OFFSET(chan->tdest));
1610 if (chan->desc_pendingcount <= XILINX_MCDMA_COALESCE_MAX) {
1612 reg |= chan->desc_pendingcount <<
1617 dma_ctrl_write(chan, XILINX_MCDMA_CHAN_CR_OFFSET(chan->tdest), reg);
1620 xilinx_write(chan, XILINX_MCDMA_CHAN_CDESC_OFFSET(chan->tdest),
1624 reg = dma_ctrl_read(chan, XILINX_MCDMA_CHEN_OFFSET);
1625 reg |= BIT(chan->tdest);
1626 dma_ctrl_write(chan, XILINX_MCDMA_CHEN_OFFSET, reg);
1629 reg = dma_ctrl_read(chan, XILINX_MCDMA_CHAN_CR_OFFSET(chan->tdest));
1631 dma_ctrl_write(chan, XILINX_MCDMA_CHAN_CR_OFFSET(chan->tdest), reg);
1633 xilinx_dma_start(chan);
1635 if (chan->err)
1639 xilinx_write(chan, XILINX_MCDMA_CHAN_TDESC_OFFSET(chan->tdest),
1642 list_splice_tail_init(&chan->pending_list, &chan->active_list);
1643 chan->desc_pendingcount = 0;
1644 chan->idle = false;
1653 struct xilinx_dma_chan *chan = to_xilinx_chan(dchan);
1656 spin_lock_irqsave(&chan->lock, flags);
1657 chan->start_transfer(chan);
1658 spin_unlock_irqrestore(&chan->lock, flags);
1663 * @chan : xilinx DMA channel
1667 static void xilinx_dma_complete_descriptor(struct xilinx_dma_chan *chan)
1672 if (list_empty(&chan->active_list))
1675 list_for_each_entry_safe(desc, next, &chan->active_list, node) {
1676 if (chan->has_sg && chan->xdev->dma_config->dmatype !=
1678 desc->residue = xilinx_dma_get_residue(chan, desc);
1681 desc->err = chan->err;
1686 list_add_tail(&desc->node, &chan->done_list);
1692 * @chan: Driver specific DMA channel
1696 static int xilinx_dma_reset(struct xilinx_dma_chan *chan)
1701 dma_ctrl_set(chan, XILINX_DMA_REG_DMACR, XILINX_DMA_DMACR_RESET);
1704 err = xilinx_dma_poll_timeout(chan, XILINX_DMA_REG_DMACR, tmp,
1709 dev_err(chan->dev, "reset timeout, cr %x, sr %x\n",
1710 dma_ctrl_read(chan, XILINX_DMA_REG_DMACR),
1711 dma_ctrl_read(chan, XILINX_DMA_REG_DMASR));
1715 chan->err = false;
1716 chan->idle = true;
1717 chan->desc_pendingcount = 0;
1718 chan->desc_submitcount = 0;
1725 * @chan: Driver specific DMA channel
1729 static int xilinx_dma_chan_reset(struct xilinx_dma_chan *chan)
1734 err = xilinx_dma_reset(chan);
1739 dma_ctrl_set(chan, XILINX_DMA_REG_DMACR,
1754 struct xilinx_dma_chan *chan = data;
1757 if (chan->direction == DMA_DEV_TO_MEM)
1763 chan_sermask = dma_ctrl_read(chan, ser_offset);
1769 if (chan->direction == DMA_DEV_TO_MEM)
1770 chan_offset = chan->xdev->dma_config->max_channels / 2;
1773 chan = chan->xdev->chan[chan_offset];
1775 status = dma_ctrl_read(chan, XILINX_MCDMA_CHAN_SR_OFFSET(chan->tdest));
1779 dma_ctrl_write(chan, XILINX_MCDMA_CHAN_SR_OFFSET(chan->tdest),
1783 dev_err(chan->dev, "Channel %p has errors %x cdr %x tdr %x\n",
1784 chan,
1785 dma_ctrl_read(chan, XILINX_MCDMA_CH_ERR_OFFSET),
1786 dma_ctrl_read(chan, XILINX_MCDMA_CHAN_CDESC_OFFSET
1787 (chan->tdest)),
1788 dma_ctrl_read(chan, XILINX_MCDMA_CHAN_TDESC_OFFSET
1789 (chan->tdest)));
1790 chan->err = true;
1798 dev_dbg(chan->dev, "Inter-packet latency too long\n");
1802 spin_lock(&chan->lock);
1803 xilinx_dma_complete_descriptor(chan);
1804 chan->idle = true;
1805 chan->start_transfer(chan);
1806 spin_unlock(&chan->lock);
1809 tasklet_schedule(&chan->tasklet);
1822 struct xilinx_dma_chan *chan = data;
1826 status = dma_ctrl_read(chan, XILINX_DMA_REG_DMASR);
1830 dma_ctrl_write(chan, XILINX_DMA_REG_DMASR,
1843 dma_ctrl_write(chan, XILINX_DMA_REG_DMASR,
1846 if (!chan->flush_on_fsync ||
1848 dev_err(chan->dev,
1850 chan, errors,
1851 dma_ctrl_read(chan, XILINX_DMA_REG_CURDESC),
1852 dma_ctrl_read(chan, XILINX_DMA_REG_TAILDESC));
1853 chan->err = true;
1862 dev_dbg(chan->dev, "Inter-packet latency too long\n");
1866 spin_lock(&chan->lock);
1867 xilinx_dma_complete_descriptor(chan);
1868 chan->idle = true;
1869 chan->start_transfer(chan);
1870 spin_unlock(&chan->lock);
1873 tasklet_schedule(&chan->tasklet);
1879 * @chan: Driver specific dma channel
1882 static void append_desc_queue(struct xilinx_dma_chan *chan,
1891 if (list_empty(&chan->pending_list))
1898 tail_desc = list_last_entry(&chan->pending_list,
1900 if (chan->xdev->dma_config->dmatype == XDMA_TYPE_VDMA) {
1905 } else if (chan->xdev->dma_config->dmatype == XDMA_TYPE_CDMA) {
1910 } else if (chan->xdev->dma_config->dmatype == XDMA_TYPE_AXIDMA) {
1928 list_add_tail(&desc->node, &chan->pending_list);
1929 chan->desc_pendingcount++;
1931 if (chan->has_sg && (chan->xdev->dma_config->dmatype == XDMA_TYPE_VDMA)
1932 && unlikely(chan->desc_pendingcount > chan->num_frms)) {
1933 dev_dbg(chan->dev, "desc pendingcount is too high\n");
1934 chan->desc_pendingcount = chan->num_frms;
1947 struct xilinx_dma_chan *chan = to_xilinx_chan(tx->chan);
1952 if (chan->cyclic) {
1953 xilinx_dma_free_tx_descriptor(chan, desc);
1957 if (chan->err) {
1962 err = xilinx_dma_chan_reset(chan);
1967 spin_lock_irqsave(&chan->lock, flags);
1972 append_desc_queue(chan, desc);
1975 chan->cyclic = true;
1977 chan->terminating = false;
1979 spin_unlock_irqrestore(&chan->lock, flags);
1998 struct xilinx_dma_chan *chan = to_xilinx_chan(dchan);
2013 desc = xilinx_dma_alloc_tx_descriptor(chan);
2017 dma_async_tx_descriptor_init(&desc->async_tx, &chan->common);
2022 segment = xilinx_vdma_alloc_tx_segment(chan);
2032 hw->stride |= chan->config.frm_dly <<
2036 if (chan->ext_addr) {
2043 if (chan->ext_addr) {
2062 xilinx_dma_free_tx_descriptor(chan, desc);
2080 struct xilinx_dma_chan *chan = to_xilinx_chan(dchan);
2085 if (!len || len > chan->xdev->max_buffer_len)
2088 desc = xilinx_dma_alloc_tx_descriptor(chan);
2092 dma_async_tx_descriptor_init(&desc->async_tx, &chan->common);
2096 segment = xilinx_cdma_alloc_tx_segment(chan);
2104 if (chan->ext_addr) {
2118 xilinx_dma_free_tx_descriptor(chan, desc);
2138 struct xilinx_dma_chan *chan = to_xilinx_chan(dchan);
2151 desc = xilinx_dma_alloc_tx_descriptor(chan);
2155 dma_async_tx_descriptor_init(&desc->async_tx, &chan->common);
2167 segment = xilinx_axidma_alloc_tx_segment(chan);
2175 copy = xilinx_dma_calc_copysize(chan, sg_dma_len(sg),
2180 xilinx_axidma_buf(chan, hw, sg_dma_address(sg),
2185 if (chan->direction == DMA_MEM_TO_DEV) {
2206 if (chan->direction == DMA_MEM_TO_DEV) {
2217 xilinx_dma_free_tx_descriptor(chan, desc);
2237 struct xilinx_dma_chan *chan = to_xilinx_chan(dchan);
2257 desc = xilinx_dma_alloc_tx_descriptor(chan);
2261 chan->direction = direction;
2262 dma_async_tx_descriptor_init(&desc->async_tx, &chan->common);
2272 segment = xilinx_axidma_alloc_tx_segment(chan);
2280 copy = xilinx_dma_calc_copysize(chan, period_len,
2283 xilinx_axidma_buf(chan, hw, buf_addr, sg_used,
2306 reg = dma_ctrl_read(chan, XILINX_DMA_REG_DMACR);
2308 dma_ctrl_write(chan, XILINX_DMA_REG_DMACR, reg);
2324 xilinx_dma_free_tx_descriptor(chan, desc);
2345 struct xilinx_dma_chan *chan = to_xilinx_chan(dchan);
2358 desc = xilinx_dma_alloc_tx_descriptor(chan);
2362 dma_async_tx_descriptor_init(&desc->async_tx, &chan->common);
2374 segment = xilinx_aximcdma_alloc_tx_segment(chan);
2383 chan->xdev->max_buffer_len);
2387 xilinx_aximcdma_buf(chan, hw, sg_dma_address(sg),
2391 if (chan->direction == DMA_MEM_TO_DEV && app_w) {
2410 if (chan->direction == DMA_MEM_TO_DEV) {
2421 xilinx_dma_free_tx_descriptor(chan, desc);
2434 struct xilinx_dma_chan *chan = to_xilinx_chan(dchan);
2438 if (!chan->cyclic) {
2439 err = chan->stop_transfer(chan);
2441 dev_err(chan->dev, "Cannot stop channel %p: %x\n",
2442 chan, dma_ctrl_read(chan,
2444 chan->err = true;
2448 xilinx_dma_chan_reset(chan);
2450 chan->terminating = true;
2451 xilinx_dma_free_descriptors(chan);
2452 chan->idle = true;
2454 if (chan->cyclic) {
2455 reg = dma_ctrl_read(chan, XILINX_DMA_REG_DMACR);
2457 dma_ctrl_write(chan, XILINX_DMA_REG_DMACR, reg);
2458 chan->cyclic = false;
2461 if ((chan->xdev->dma_config->dmatype == XDMA_TYPE_CDMA) && chan->has_sg)
2462 dma_ctrl_clr(chan, XILINX_DMA_REG_DMACR,
2484 struct xilinx_dma_chan *chan = to_xilinx_chan(dchan);
2488 return xilinx_dma_chan_reset(chan);
2490 dmacr = dma_ctrl_read(chan, XILINX_DMA_REG_DMACR);
2492 chan->config.frm_dly = cfg->frm_dly;
2493 chan->config.park = cfg->park;
2496 chan->config.gen_lock = cfg->gen_lock;
2497 chan->config.master = cfg->master;
2500 if (cfg->gen_lock && chan->genlock) {
2506 chan->config.frm_cnt_en = cfg->frm_cnt_en;
2507 chan->config.vflip_en = cfg->vflip_en;
2510 chan->config.park_frm = cfg->park_frm;
2512 chan->config.park_frm = -1;
2514 chan->config.coalesc = cfg->coalesc;
2515 chan->config.delay = cfg->delay;
2520 chan->config.coalesc = cfg->coalesc;
2526 chan->config.delay = cfg->delay;
2533 dma_ctrl_write(chan, XILINX_DMA_REG_DMACR, dmacr);
2545 * @chan: Driver specific DMA channel
2547 static void xilinx_dma_chan_remove(struct xilinx_dma_chan *chan)
2550 dma_ctrl_clr(chan, XILINX_DMA_REG_DMACR,
2553 if (chan->irq > 0)
2554 free_irq(chan->irq, chan);
2556 tasklet_kill(&chan->tasklet);
2558 list_del(&chan->common.device_node);
2752 struct xilinx_dma_chan *chan;
2758 chan = devm_kzalloc(xdev->dev, sizeof(*chan), GFP_KERNEL);
2759 if (!chan)
2762 chan->dev = xdev->dev;
2763 chan->xdev = xdev;
2764 chan->desc_pendingcount = 0x0;
2765 chan->ext_addr = xdev->ext_addr;
2771 chan->idle = true;
2773 spin_lock_init(&chan->lock);
2774 INIT_LIST_HEAD(&chan->pending_list);
2775 INIT_LIST_HEAD(&chan->done_list);
2776 INIT_LIST_HEAD(&chan->active_list);
2777 INIT_LIST_HEAD(&chan->free_seg_list);
2782 chan->genlock = of_property_read_bool(node, "xlnx,genlock-mode");
2801 chan->direction = DMA_MEM_TO_DEV;
2802 chan->id = xdev->mm2s_chan_id++;
2803 chan->tdest = chan->id;
2805 chan->ctrl_offset = XILINX_DMA_MM2S_CTRL_OFFSET;
2807 chan->desc_offset = XILINX_VDMA_MM2S_DESC_OFFSET;
2808 chan->config.park = 1;
2812 chan->flush_on_fsync = true;
2818 chan->direction = DMA_DEV_TO_MEM;
2819 chan->id = xdev->s2mm_chan_id++;
2820 chan->tdest = chan->id - xdev->dma_config->max_channels / 2;
2821 chan->has_vflip = of_property_read_bool(node,
2823 if (chan->has_vflip) {
2824 chan->config.vflip_en = dma_read(chan,
2830 chan->ctrl_offset = XILINX_MCDMA_S2MM_CTRL_OFFSET;
2832 chan->ctrl_offset = XILINX_DMA_S2MM_CTRL_OFFSET;
2835 chan->desc_offset = XILINX_VDMA_S2MM_DESC_OFFSET;
2836 chan->config.park = 1;
2840 chan->flush_on_fsync = true;
2848 chan->irq = irq_of_parse_and_map(node, chan->tdest);
2849 err = request_irq(chan->irq, xdev->dma_config->irq_handler,
2850 IRQF_SHARED, "xilinx-dma-controller", chan);
2852 dev_err(xdev->dev, "unable to request IRQ %d\n", chan->irq);
2857 chan->start_transfer = xilinx_dma_start_transfer;
2858 chan->stop_transfer = xilinx_dma_stop_transfer;
2860 chan->start_transfer = xilinx_mcdma_start_transfer;
2861 chan->stop_transfer = xilinx_dma_stop_transfer;
2863 chan->start_transfer = xilinx_cdma_start_transfer;
2864 chan->stop_transfer = xilinx_cdma_stop_transfer;
2866 chan->start_transfer = xilinx_vdma_start_transfer;
2867 chan->stop_transfer = xilinx_dma_stop_transfer;
2873 dma_ctrl_read(chan, XILINX_DMA_REG_DMASR) &
2875 chan->has_sg = true;
2876 dev_dbg(chan->dev, "ch %d: SG %s\n", chan->id,
2877 chan->has_sg ? "enabled" : "disabled");
2881 tasklet_setup(&chan->tasklet, xilinx_dma_do_tasklet);
2887 chan->common.device = &xdev->common;
2889 list_add_tail(&chan->common.device_node, &xdev->common.channels);
2890 xdev->chan[chan->id] = chan;
2893 err = xilinx_dma_chan_reset(chan);
2941 if (chan_id >= xdev->dma_config->max_channels || !xdev->chan[chan_id])
2944 return dma_get_slave_channel(&xdev->chan[chan_id]->common);
3130 if (xdev->chan[i])
3131 xdev->chan[i]->num_frms = num_frames;
3162 if (xdev->chan[i])
3163 xilinx_dma_chan_remove(xdev->chan[i]);
3186 if (xdev->chan[i])
3187 xilinx_dma_chan_remove(xdev->chan[i]);