Lines Matching defs:tdc

178 static inline void tdma_ch_write(struct tegra_adma_chan *tdc, u32 reg, u32 val)
180 writel(val, tdc->chan_addr + reg);
183 static inline u32 tdma_ch_read(struct tegra_adma_chan *tdc, u32 reg)
185 return readl(tdc->chan_addr + reg);
199 static inline struct device *tdc2dev(struct tegra_adma_chan *tdc)
201 return tdc->tdma->dev;
212 struct tegra_adma_chan *tdc = to_tegra_adma_chan(dc);
214 memcpy(&tdc->sconfig, sconfig, sizeof(*sconfig));
245 static int tegra_adma_request_alloc(struct tegra_adma_chan *tdc,
248 struct tegra_adma *tdma = tdc->tdma;
249 unsigned int sreq_index = tdc->sreq_index;
251 if (tdc->sreq_reserved)
252 return tdc->sreq_dir == direction ? 0 : -EINVAL;
276 dma_chan_name(&tdc->vc.chan));
280 tdc->sreq_dir = direction;
281 tdc->sreq_reserved = true;
286 static void tegra_adma_request_free(struct tegra_adma_chan *tdc)
288 struct tegra_adma *tdma = tdc->tdma;
290 if (!tdc->sreq_reserved)
293 switch (tdc->sreq_dir) {
295 clear_bit(tdc->sreq_index, &tdma->tx_requests_reserved);
299 clear_bit(tdc->sreq_index, &tdma->rx_requests_reserved);
304 dma_chan_name(&tdc->vc.chan));
308 tdc->sreq_reserved = false;
311 static u32 tegra_adma_irq_status(struct tegra_adma_chan *tdc)
313 u32 status = tdma_ch_read(tdc, ADMA_CH_INT_STATUS);
318 static u32 tegra_adma_irq_clear(struct tegra_adma_chan *tdc)
320 u32 status = tegra_adma_irq_status(tdc);
323 tdma_ch_write(tdc, ADMA_CH_INT_CLEAR, status);
328 static void tegra_adma_stop(struct tegra_adma_chan *tdc)
333 tdma_ch_write(tdc, ADMA_CH_CMD, 0);
336 tegra_adma_irq_clear(tdc);
338 if (readx_poll_timeout_atomic(readl, tdc->chan_addr + ADMA_CH_STATUS,
341 dev_err(tdc2dev(tdc), "unable to stop DMA channel\n");
345 kfree(tdc->desc);
346 tdc->desc = NULL;
349 static void tegra_adma_start(struct tegra_adma_chan *tdc)
351 struct virt_dma_desc *vd = vchan_next_desc(&tdc->vc);
363 dev_warn(tdc2dev(tdc), "unable to start DMA, no descriptor\n");
369 tdc->tx_buf_pos = 0;
370 tdc->tx_buf_count = 0;
371 tdma_ch_write(tdc, ADMA_CH_TC, ch_regs->tc);
372 tdma_ch_write(tdc, ADMA_CH_CTRL, ch_regs->ctrl);
373 tdma_ch_write(tdc, ADMA_CH_LOWER_SRC_ADDR, ch_regs->src_addr);
374 tdma_ch_write(tdc, ADMA_CH_LOWER_TRG_ADDR, ch_regs->trg_addr);
375 tdma_ch_write(tdc, ADMA_CH_FIFO_CTRL, ch_regs->fifo_ctrl);
376 tdma_ch_write(tdc, ADMA_CH_CONFIG, ch_regs->config);
379 tdma_ch_write(tdc, ADMA_CH_CMD, 1);
381 tdc->desc = desc;
384 static unsigned int tegra_adma_get_residue(struct tegra_adma_chan *tdc)
386 struct tegra_adma_desc *desc = tdc->desc;
388 unsigned int pos = tdma_ch_read(tdc, ADMA_CH_XFER_STATUS);
394 if (pos < tdc->tx_buf_pos)
395 tdc->tx_buf_count += pos + (max - tdc->tx_buf_pos);
397 tdc->tx_buf_count += pos - tdc->tx_buf_pos;
399 periods_remaining = tdc->tx_buf_count % desc->num_periods;
400 tdc->tx_buf_pos = pos;
407 struct tegra_adma_chan *tdc = dev_id;
410 spin_lock(&tdc->vc.lock);
412 status = tegra_adma_irq_clear(tdc);
413 if (status == 0 || !tdc->desc) {
414 spin_unlock(&tdc->vc.lock);
418 vchan_cyclic_callback(&tdc->desc->vd);
420 spin_unlock(&tdc->vc.lock);
427 struct tegra_adma_chan *tdc = to_tegra_adma_chan(dc);
430 spin_lock_irqsave(&tdc->vc.lock, flags);
432 if (vchan_issue_pending(&tdc->vc)) {
433 if (!tdc->desc)
434 tegra_adma_start(tdc);
437 spin_unlock_irqrestore(&tdc->vc.lock, flags);
440 static bool tegra_adma_is_paused(struct tegra_adma_chan *tdc)
444 csts = tdma_ch_read(tdc, ADMA_CH_STATUS);
452 struct tegra_adma_chan *tdc = to_tegra_adma_chan(dc);
453 struct tegra_adma_desc *desc = tdc->desc;
457 ch_regs->ctrl = tdma_ch_read(tdc, ADMA_CH_CTRL);
459 tdma_ch_write(tdc, ADMA_CH_CTRL, ch_regs->ctrl);
461 while (dcnt-- && !tegra_adma_is_paused(tdc))
465 dev_err(tdc2dev(tdc), "unable to pause DMA channel\n");
474 struct tegra_adma_chan *tdc = to_tegra_adma_chan(dc);
475 struct tegra_adma_desc *desc = tdc->desc;
478 ch_regs->ctrl = tdma_ch_read(tdc, ADMA_CH_CTRL);
480 tdma_ch_write(tdc, ADMA_CH_CTRL, ch_regs->ctrl);
487 struct tegra_adma_chan *tdc = to_tegra_adma_chan(dc);
491 spin_lock_irqsave(&tdc->vc.lock, flags);
493 if (tdc->desc)
494 tegra_adma_stop(tdc);
496 tegra_adma_request_free(tdc);
497 vchan_get_all_descriptors(&tdc->vc, &head);
498 spin_unlock_irqrestore(&tdc->vc.lock, flags);
499 vchan_dma_desc_free_list(&tdc->vc, &head);
508 struct tegra_adma_chan *tdc = to_tegra_adma_chan(dc);
519 spin_lock_irqsave(&tdc->vc.lock, flags);
521 vd = vchan_find_desc(&tdc->vc, cookie);
525 } else if (tdc->desc && tdc->desc->vd.tx.cookie == cookie) {
526 residual = tegra_adma_get_residue(tdc);
531 spin_unlock_irqrestore(&tdc->vc.lock, flags);
554 static int tegra_adma_set_xfer_params(struct tegra_adma_chan *tdc,
560 const struct tegra_adma_chip_data *cdata = tdc->tdma->cdata;
570 burst_size = tdc->sconfig.dst_maxburst;
572 ch_regs->ctrl = ADMA_CH_REG_FIELD_VAL(tdc->sreq_index,
581 burst_size = tdc->sconfig.src_maxburst;
583 ch_regs->ctrl = ADMA_CH_REG_FIELD_VAL(tdc->sreq_index,
590 dev_err(tdc2dev(tdc), "DMA direction is not supported\n");
613 if (tdc->sreq_index > cdata->sreq_index_offset)
624 return tegra_adma_request_alloc(tdc, direction);
632 struct tegra_adma_chan *tdc = to_tegra_adma_chan(dc);
636 dev_err(tdc2dev(tdc), "invalid buffer/period len\n");
641 dev_err(tdc2dev(tdc), "buf_len not a multiple of period_len\n");
646 dev_err(tdc2dev(tdc), "invalid buffer alignment\n");
658 if (tegra_adma_set_xfer_params(tdc, desc, buf_addr, direction)) {
663 return vchan_tx_prep(&tdc->vc, &desc->vd, flags);
668 struct tegra_adma_chan *tdc = to_tegra_adma_chan(dc);
671 ret = request_irq(tdc->irq, tegra_adma_isr, 0, dma_chan_name(dc), tdc);
673 dev_err(tdc2dev(tdc), "failed to get interrupt for %s\n",
678 ret = pm_runtime_resume_and_get(tdc2dev(tdc));
680 free_irq(tdc->irq, tdc);
684 dma_cookie_init(&tdc->vc.chan);
691 struct tegra_adma_chan *tdc = to_tegra_adma_chan(dc);
694 vchan_free_chan_resources(&tdc->vc);
695 tasklet_kill(&tdc->vc.task);
696 free_irq(tdc->irq, tdc);
697 pm_runtime_put(tdc2dev(tdc));
699 tdc->sreq_index = 0;
700 tdc->sreq_dir = DMA_TRANS_NONE;
707 struct tegra_adma_chan *tdc;
725 tdc = to_tegra_adma_chan(chan);
726 tdc->sreq_index = sreq_index;
735 struct tegra_adma_chan *tdc;
743 tdc = &tdma->channels[i];
744 ch_reg = &tdc->ch_regs;
745 ch_reg->cmd = tdma_ch_read(tdc, ADMA_CH_CMD);
749 ch_reg->tc = tdma_ch_read(tdc, ADMA_CH_TC);
750 ch_reg->src_addr = tdma_ch_read(tdc, ADMA_CH_LOWER_SRC_ADDR);
751 ch_reg->trg_addr = tdma_ch_read(tdc, ADMA_CH_LOWER_TRG_ADDR);
752 ch_reg->ctrl = tdma_ch_read(tdc, ADMA_CH_CTRL);
753 ch_reg->fifo_ctrl = tdma_ch_read(tdc, ADMA_CH_FIFO_CTRL);
754 ch_reg->config = tdma_ch_read(tdc, ADMA_CH_CONFIG);
767 struct tegra_adma_chan *tdc;
781 tdc = &tdma->channels[i];
782 ch_reg = &tdc->ch_regs;
786 tdma_ch_write(tdc, ADMA_CH_TC, ch_reg->tc);
787 tdma_ch_write(tdc, ADMA_CH_LOWER_SRC_ADDR, ch_reg->src_addr);
788 tdma_ch_write(tdc, ADMA_CH_LOWER_TRG_ADDR, ch_reg->trg_addr);
789 tdma_ch_write(tdc, ADMA_CH_CTRL, ch_reg->ctrl);
790 tdma_ch_write(tdc, ADMA_CH_FIFO_CTRL, ch_reg->fifo_ctrl);
791 tdma_ch_write(tdc, ADMA_CH_CONFIG, ch_reg->config);
792 tdma_ch_write(tdc, ADMA_CH_CMD, ch_reg->cmd);
872 struct tegra_adma_chan *tdc = &tdma->channels[i];
874 tdc->chan_addr = tdma->base_addr + cdata->ch_base_offset
877 tdc->irq = of_irq_get(pdev->dev.of_node, i);
878 if (tdc->irq <= 0) {
879 ret = tdc->irq ?: -ENXIO;
883 vchan_init(&tdc->vc, &tdma->dma_dev);
884 tdc->vc.desc_free = tegra_adma_desc_free;
885 tdc->tdma = tdma;