Lines Matching refs:ch

188 	struct moxart_chan *ch = to_moxart_dma_chan(chan);
193 dev_dbg(chan2dev(chan), "%s: ch=%p\n", __func__, ch);
195 spin_lock_irqsave(&ch->vc.lock, flags);
197 if (ch->desc) {
198 moxart_dma_desc_free(&ch->desc->vd);
199 ch->desc = NULL;
202 ctrl = readl(ch->base + REG_OFF_CTRL);
204 writel(ctrl, ch->base + REG_OFF_CTRL);
206 vchan_get_all_descriptors(&ch->vc, &head);
207 spin_unlock_irqrestore(&ch->vc.lock, flags);
208 vchan_dma_desc_free_list(&ch->vc, &head);
216 struct moxart_chan *ch = to_moxart_dma_chan(chan);
219 ch->cfg = *cfg;
221 ctrl = readl(ch->base + REG_OFF_CTRL);
226 switch (ch->cfg.src_addr_width) {
229 if (ch->cfg.direction != DMA_MEM_TO_DEV)
236 if (ch->cfg.direction != DMA_MEM_TO_DEV)
243 if (ch->cfg.direction != DMA_MEM_TO_DEV)
252 if (ch->cfg.direction == DMA_MEM_TO_DEV) {
255 ctrl |= (ch->line_reqno << 16 &
260 ctrl |= (ch->line_reqno << 24 &
264 writel(ctrl, ch->base + REG_OFF_CTRL);
274 struct moxart_chan *ch = to_moxart_dma_chan(chan);
289 dev_addr = ch->cfg.src_addr;
290 dev_width = ch->cfg.src_addr_width;
292 dev_addr = ch->cfg.dst_addr;
293 dev_width = ch->cfg.dst_addr_width;
327 ch->error = 0;
329 return vchan_tx_prep(&ch->vc, &d->vd, tx_flags);
337 struct moxart_chan *ch;
343 ch = to_moxart_dma_chan(chan);
344 ch->line_reqno = dma_spec->args[0];
351 struct moxart_chan *ch = to_moxart_dma_chan(chan);
354 __func__, ch->ch_num);
355 ch->allocated = 1;
362 struct moxart_chan *ch = to_moxart_dma_chan(chan);
364 vchan_free_chan_resources(&ch->vc);
367 __func__, ch->ch_num);
368 ch->allocated = 0;
371 static void moxart_dma_set_params(struct moxart_chan *ch, dma_addr_t src_addr,
374 writel(src_addr, ch->base + REG_OFF_ADDRESS_SOURCE);
375 writel(dst_addr, ch->base + REG_OFF_ADDRESS_DEST);
378 static void moxart_set_transfer_params(struct moxart_chan *ch, unsigned int len)
380 struct moxart_desc *d = ch->desc;
389 writel(d->dma_cycles, ch->base + REG_OFF_CYCLES);
391 dev_dbg(chan2dev(&ch->vc.chan), "%s: set %u DMA cycles (len=%u)\n",
395 static void moxart_start_dma(struct moxart_chan *ch)
399 ctrl = readl(ch->base + REG_OFF_CTRL);
401 writel(ctrl, ch->base + REG_OFF_CTRL);
404 static void moxart_dma_start_sg(struct moxart_chan *ch, unsigned int idx)
406 struct moxart_desc *d = ch->desc;
407 struct moxart_sg *sg = ch->desc->sg + idx;
409 if (ch->desc->dma_dir == DMA_MEM_TO_DEV)
410 moxart_dma_set_params(ch, sg->addr, d->dev_addr);
411 else if (ch->desc->dma_dir == DMA_DEV_TO_MEM)
412 moxart_dma_set_params(ch, d->dev_addr, sg->addr);
414 moxart_set_transfer_params(ch, sg->len);
416 moxart_start_dma(ch);
421 struct moxart_chan *ch = to_moxart_dma_chan(chan);
424 vd = vchan_next_desc(&ch->vc);
427 ch->desc = NULL;
433 ch->desc = to_moxart_dma_desc(&vd->tx);
434 ch->sgidx = 0;
436 moxart_dma_start_sg(ch, 0);
441 struct moxart_chan *ch = to_moxart_dma_chan(chan);
444 spin_lock_irqsave(&ch->vc.lock, flags);
445 if (vchan_issue_pending(&ch->vc) && !ch->desc)
447 spin_unlock_irqrestore(&ch->vc.lock, flags);
462 static size_t moxart_dma_desc_size_in_flight(struct moxart_chan *ch)
467 size = moxart_dma_desc_size(ch->desc, ch->sgidx);
468 cycles = readl(ch->base + REG_OFF_CYCLES);
469 completed_cycles = (ch->desc->dma_cycles - cycles);
470 size -= completed_cycles << es_bytes[ch->desc->es];
472 dev_dbg(chan2dev(&ch->vc.chan), "%s: size=%zu\n", __func__, size);
481 struct moxart_chan *ch = to_moxart_dma_chan(chan);
492 spin_lock_irqsave(&ch->vc.lock, flags);
493 vd = vchan_find_desc(&ch->vc, cookie);
497 } else if (ch->desc && ch->desc->vd.tx.cookie == cookie) {
498 txstate->residue = moxart_dma_desc_size_in_flight(ch);
500 spin_unlock_irqrestore(&ch->vc.lock, flags);
502 if (ch->error)
525 struct moxart_chan *ch = &mc->slave_chans[0];
530 dev_dbg(chan2dev(&ch->vc.chan), "%s\n", __func__);
532 for (i = 0; i < APB_DMA_MAX_CHANNEL; i++, ch++) {
533 if (!ch->allocated)
536 ctrl = readl(ch->base + REG_OFF_CTRL);
538 dev_dbg(chan2dev(&ch->vc.chan), "%s: ch=%p ch->base=%p ctrl=%x\n",
539 __func__, ch, ch->base, ctrl);
543 if (ch->desc) {
544 spin_lock_irqsave(&ch->vc.lock, flags);
545 if (++ch->sgidx < ch->desc->sglen) {
546 moxart_dma_start_sg(ch, ch->sgidx);
548 vchan_cookie_complete(&ch->desc->vd);
549 moxart_dma_start_desc(&ch->vc.chan);
551 spin_unlock_irqrestore(&ch->vc.lock, flags);
557 ch->error = 1;
560 writel(ctrl, ch->base + REG_OFF_CTRL);
574 struct moxart_chan *ch;
598 ch = &mdc->slave_chans[0];
599 for (i = 0; i < APB_DMA_MAX_CHANNEL; i++, ch++) {
600 ch->ch_num = i;
601 ch->base = dma_base_addr + i * REG_OFF_CHAN_SIZE;
602 ch->allocated = 0;
604 ch->vc.desc_free = moxart_dma_desc_free;
605 vchan_init(&ch->vc, &mdc->dma_slave);
607 dev_dbg(dev, "%s: chs[%d]: ch->ch_num=%u ch->base=%p\n",
608 __func__, i, ch->ch_num, ch->base);