Home
last modified time | relevance | path

Searched refs:vchan (Results 1 - 25 of 67) sorted by relevance

123

/kernel/linux/linux-5.10/drivers/dma/
H A Dowl-dma.c186 * @vchan: the virtual channel currently being served by this physical channel
191 struct owl_dma_vchan *vchan; member
380 static inline int owl_dma_cfg_lli(struct owl_dma_vchan *vchan, in owl_dma_cfg_lli() argument
387 struct owl_dma *od = to_owl_dma(vchan->vc.chan.device); in owl_dma_cfg_lli()
400 mode |= OWL_DMA_MODE_TS(vchan->drq) in owl_dma_cfg_lli()
413 mode |= OWL_DMA_MODE_TS(vchan->drq) in owl_dma_cfg_lli()
468 struct owl_dma_vchan *vchan) in owl_dma_get_pchan()
478 if (!pchan->vchan) { in owl_dma_get_pchan()
479 pchan->vchan = vchan; in owl_dma_get_pchan()
467 owl_dma_get_pchan(struct owl_dma *od, struct owl_dma_vchan *vchan) owl_dma_get_pchan() argument
534 owl_dma_start_next_txd(struct owl_dma_vchan *vchan) owl_dma_start_next_txd() argument
583 owl_dma_phy_free(struct owl_dma *od, struct owl_dma_vchan *vchan) owl_dma_phy_free() argument
594 struct owl_dma_vchan *vchan; owl_dma_interrupt() local
699 struct owl_dma_vchan *vchan = to_owl_vchan(chan); owl_dma_terminate_all() local
725 struct owl_dma_vchan *vchan = to_owl_vchan(chan); owl_dma_config() local
739 struct owl_dma_vchan *vchan = to_owl_vchan(chan); owl_dma_pause() local
753 struct owl_dma_vchan *vchan = to_owl_vchan(chan); owl_dma_resume() local
770 owl_dma_getbytes_chan(struct owl_dma_vchan *vchan) owl_dma_getbytes_chan() argument
807 struct owl_dma_vchan *vchan = to_owl_vchan(chan); owl_dma_tx_status() local
837 owl_dma_phy_alloc_and_start(struct owl_dma_vchan *vchan) owl_dma_phy_alloc_and_start() argument
854 struct owl_dma_vchan *vchan = to_owl_vchan(chan); owl_dma_issue_pending() local
871 struct owl_dma_vchan *vchan = to_owl_vchan(chan); owl_dma_prep_memcpy() local
922 struct owl_dma_vchan *vchan = to_owl_vchan(chan); owl_dma_prep_slave_sg() local
987 struct owl_dma_vchan *vchan = to_owl_vchan(chan); owl_prep_dma_cyclic() local
1043 struct owl_dma_vchan *vchan = to_owl_vchan(chan); owl_dma_free_chan_resources() local
1051 struct owl_dma_vchan *vchan = NULL; owl_dma_free() local
1065 struct owl_dma_vchan *vchan; owl_dma_of_xlate() local
1189 struct owl_dma_vchan *vchan = &od->vchans[i]; owl_dma_probe() local
[all...]
H A Dsun4i-dma.c128 /* vchan currently being serviced */
129 struct sun4i_dma_vchan *vchan; member
212 struct sun4i_dma_vchan *vchan = to_sun4i_dma_vchan(chan); in sun4i_dma_free_chan_resources() local
214 vchan_free_chan_resources(&vchan->vc); in sun4i_dma_free_chan_resources()
218 struct sun4i_dma_vchan *vchan) in find_and_use_pchan()
228 if (vchan->is_dedicated) { in find_and_use_pchan()
239 pchan->vchan = vchan; in find_and_use_pchan()
256 pchan->vchan = NULL; in release_pchan()
311 * Execute pending operations on a vchan
217 find_and_use_pchan(struct sun4i_dma_dev *priv, struct sun4i_dma_vchan *vchan) find_and_use_pchan() argument
319 __execute_vchan_pending(struct sun4i_dma_dev *priv, struct sun4i_dma_vchan *vchan) __execute_vchan_pending() argument
614 struct sun4i_dma_vchan *vchan = to_sun4i_dma_vchan(chan); sun4i_dma_prep_dma_memcpy() local
665 struct sun4i_dma_vchan *vchan = to_sun4i_dma_vchan(chan); sun4i_dma_prep_dma_cyclic() local
776 struct sun4i_dma_vchan *vchan = to_sun4i_dma_vchan(chan); sun4i_dma_prep_slave_sg() local
870 struct sun4i_dma_vchan *vchan = to_sun4i_dma_vchan(chan); sun4i_dma_terminate_all() local
906 struct sun4i_dma_vchan *vchan = to_sun4i_dma_vchan(chan); sun4i_dma_config() local
917 struct sun4i_dma_vchan *vchan; sun4i_dma_of_xlate() local
947 struct sun4i_dma_vchan *vchan = to_sun4i_dma_vchan(chan); sun4i_dma_tx_status() local
995 struct sun4i_dma_vchan *vchan = to_sun4i_dma_vchan(chan); sun4i_dma_issue_pending() local
1014 struct sun4i_dma_vchan *vchan; sun4i_dma_interrupt() local
1204 struct sun4i_dma_vchan *vchan = &priv->vchans[i]; sun4i_dma_probe() local
[all...]
H A Dsun6i-dma.c170 struct sun6i_vchan *vchan; member
387 static inline void sun6i_dma_dump_lli(struct sun6i_vchan *vchan, in sun6i_dma_dump_lli() argument
392 dev_dbg(chan2dev(&vchan->vc.chan), in sun6i_dma_dump_lli()
427 static int sun6i_dma_start_desc(struct sun6i_vchan *vchan) in sun6i_dma_start_desc() argument
429 struct sun6i_dma_dev *sdev = to_sun6i_dma_dev(vchan->vc.chan.device); in sun6i_dma_start_desc()
430 struct virt_dma_desc *desc = vchan_next_desc(&vchan->vc); in sun6i_dma_start_desc()
431 struct sun6i_pchan *pchan = vchan->phy; in sun6i_dma_start_desc()
448 sun6i_dma_dump_lli(vchan, pchan->desc->v_lli); in sun6i_dma_start_desc()
453 vchan->irq_type = vchan in sun6i_dma_start_desc()
473 struct sun6i_vchan *vchan; sun6i_dma_tasklet() local
538 struct sun6i_vchan *vchan; sun6i_dma_interrupt() local
634 struct sun6i_vchan *vchan = to_sun6i_vchan(chan); sun6i_dma_prep_dma_memcpy() local
688 struct sun6i_vchan *vchan = to_sun6i_vchan(chan); sun6i_dma_prep_slave_sg() local
770 struct sun6i_vchan *vchan = to_sun6i_vchan(chan); sun6i_dma_prep_dma_cyclic() local
832 struct sun6i_vchan *vchan = to_sun6i_vchan(chan); sun6i_dma_config() local
842 struct sun6i_vchan *vchan = to_sun6i_vchan(chan); sun6i_dma_pause() local
862 struct sun6i_vchan *vchan = to_sun6i_vchan(chan); sun6i_dma_resume() local
887 struct sun6i_vchan *vchan = to_sun6i_vchan(chan); sun6i_dma_terminate_all() local
931 struct sun6i_vchan *vchan = to_sun6i_vchan(chan); sun6i_dma_tx_status() local
968 struct sun6i_vchan *vchan = to_sun6i_vchan(chan); sun6i_dma_issue_pending() local
995 struct sun6i_vchan *vchan = to_sun6i_vchan(chan); sun6i_dma_free_chan_resources() local
1009 struct sun6i_vchan *vchan; sun6i_dma_of_xlate() local
1047 struct sun6i_vchan *vchan = &sdev->vchans[i]; sun6i_dma_free() local
1356 struct sun6i_vchan *vchan = &sdc->vchans[i]; sun6i_dma_probe() local
[all...]
H A Dst_fdma.c24 return container_of(c, struct st_fdma_chan, vchan.chan); in to_st_fdma_chan()
79 vdesc = vchan_next_desc(&fchan->vchan); in st_fdma_xfer_desc()
85 cmd = FDMA_CMD_START(fchan->vchan.chan.chan_id); in st_fdma_xfer_desc()
94 dev_dbg(fchan->fdev->dev, "start chan:%d\n", fchan->vchan.chan.chan_id); in st_fdma_xfer_desc()
101 int ch_id = fchan->vchan.chan.chan_id; in st_fdma_ch_sta_update()
139 spin_lock(&fchan->vchan.lock); in st_fdma_irq_handler()
157 spin_unlock(&fchan->vchan.lock); in st_fdma_irq_handler()
282 fchan->vchan.chan.chan_id, fchan->cfg.type); in st_fdma_alloc_chan_res()
294 __func__, fchan->vchan.chan.chan_id); in st_fdma_free_chan_res()
299 spin_lock_irqsave(&fchan->vchan in st_fdma_free_chan_res()
[all...]
H A Dfsl-edma-common.c48 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_enable_request()
65 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_disable_request()
109 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_chan_mux()
116 ch_off = fsl_chan->vchan.chan.chan_id % chans_per_mux; in fsl_edma_chan_mux()
166 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_edma_terminate_all()
170 vchan_get_all_descriptors(&fsl_chan->vchan, &head); in fsl_edma_terminate_all()
171 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_edma_terminate_all()
172 vchan_dma_desc_free_list(&fsl_chan->vchan, &head); in fsl_edma_terminate_all()
182 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_edma_pause()
188 spin_unlock_irqrestore(&fsl_chan->vchan in fsl_edma_pause()
[all...]
H A Didma64.c107 struct idma64 *idma64 = to_idma64(idma64c->vchan.chan.device); in idma64_stop_transfer()
114 struct idma64 *idma64 = to_idma64(idma64c->vchan.chan.device); in idma64_start_transfer()
118 vdesc = vchan_next_desc(&idma64c->vchan); in idma64_start_transfer()
142 spin_lock(&idma64c->vchan.lock); in idma64_chan_irq()
159 spin_unlock(&idma64c->vchan.lock); in idma64_chan_irq()
326 return vchan_tx_prep(&idma64c->vchan, &desc->vdesc, flags); in idma64_prep_slave_sg()
334 spin_lock_irqsave(&idma64c->vchan.lock, flags); in idma64_issue_pending()
335 if (vchan_issue_pending(&idma64c->vchan) && !idma64c->desc) in idma64_issue_pending()
337 spin_unlock_irqrestore(&idma64c->vchan.lock, flags); in idma64_issue_pending()
378 spin_lock_irqsave(&idma64c->vchan in idma64_tx_status()
[all...]
H A Ddma-jz4780.c129 struct virt_dma_chan vchan; member
166 return container_of(chan, struct jz4780_dma_chan, vchan.chan); in to_jz4780_dma_chan()
178 return container_of(jzchan->vchan.chan.device, struct jz4780_dma_dev, in jz4780_dma_chan_parent()
395 return vchan_tx_prep(&jzchan->vchan, &desc->vdesc, flags); in jz4780_dma_prep_slave_sg()
447 return vchan_tx_prep(&jzchan->vchan, &desc->vdesc, flags); in jz4780_dma_prep_dma_cyclic()
475 return vchan_tx_prep(&jzchan->vchan, &desc->vdesc, flags); in jz4780_dma_prep_dma_memcpy()
486 vdesc = vchan_next_desc(&jzchan->vchan); in jz4780_dma_begin()
558 spin_lock_irqsave(&jzchan->vchan.lock, flags); in jz4780_dma_issue_pending()
560 if (vchan_issue_pending(&jzchan->vchan) && !jzchan->desc) in jz4780_dma_issue_pending()
563 spin_unlock_irqrestore(&jzchan->vchan in jz4780_dma_issue_pending()
[all...]
H A Ddma-axi-dmac.c120 struct virt_dma_chan vchan; member
152 return container_of(chan->vchan.chan.device, struct axi_dmac, in chan_to_axi_dmac()
158 return container_of(c, struct axi_dmac_chan, vchan.chan); in to_axi_dmac_chan()
219 vdesc = vchan_next_desc(&chan->vchan); in axi_dmac_start_transfer()
419 spin_lock(&dmac->chan.vchan.lock); in axi_dmac_interrupt_handler()
430 spin_unlock(&dmac->chan.vchan.lock); in axi_dmac_interrupt_handler()
442 spin_lock_irqsave(&chan->vchan.lock, flags); in axi_dmac_terminate_all()
445 vchan_get_all_descriptors(&chan->vchan, &head); in axi_dmac_terminate_all()
447 spin_unlock_irqrestore(&chan->vchan.lock, flags); in axi_dmac_terminate_all()
449 vchan_dma_desc_free_list(&chan->vchan, in axi_dmac_terminate_all()
[all...]
H A Dfsl-qdma.c176 struct virt_dma_chan vchan; member
296 return container_of(chan, struct fsl_qdma_chan, vchan.chan); in to_fsl_qdma_chan()
313 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_qdma_free_chan_resources()
314 vchan_get_all_descriptors(&fsl_chan->vchan, &head); in fsl_qdma_free_chan_resources()
315 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_qdma_free_chan_resources()
317 vchan_dma_desc_free_list(&fsl_chan->vchan, &head); in fsl_qdma_free_chan_resources()
729 spin_lock(&fsl_comp->qchan->vchan.lock); in fsl_qdma_queue_transfer_complete()
732 spin_unlock(&fsl_comp->qchan->vchan.lock); in fsl_qdma_queue_transfer_complete()
985 return vchan_tx_prep(&fsl_chan->vchan, &fsl_comp->vdesc, flags); in fsl_qdma_prep_memcpy()
999 vdesc = vchan_next_desc(&fsl_chan->vchan); in fsl_qdma_enqueue_desc()
[all...]
/kernel/linux/linux-6.6/drivers/dma/
H A Dowl-dma.c187 * @vchan: the virtual channel currently being served by this physical channel
192 struct owl_dma_vchan *vchan; member
381 static inline int owl_dma_cfg_lli(struct owl_dma_vchan *vchan, in owl_dma_cfg_lli() argument
388 struct owl_dma *od = to_owl_dma(vchan->vc.chan.device); in owl_dma_cfg_lli()
401 mode |= OWL_DMA_MODE_TS(vchan->drq) in owl_dma_cfg_lli()
414 mode |= OWL_DMA_MODE_TS(vchan->drq) in owl_dma_cfg_lli()
469 struct owl_dma_vchan *vchan) in owl_dma_get_pchan()
479 if (!pchan->vchan) { in owl_dma_get_pchan()
480 pchan->vchan = vchan; in owl_dma_get_pchan()
468 owl_dma_get_pchan(struct owl_dma *od, struct owl_dma_vchan *vchan) owl_dma_get_pchan() argument
535 owl_dma_start_next_txd(struct owl_dma_vchan *vchan) owl_dma_start_next_txd() argument
584 owl_dma_phy_free(struct owl_dma *od, struct owl_dma_vchan *vchan) owl_dma_phy_free() argument
595 struct owl_dma_vchan *vchan; owl_dma_interrupt() local
700 struct owl_dma_vchan *vchan = to_owl_vchan(chan); owl_dma_terminate_all() local
726 struct owl_dma_vchan *vchan = to_owl_vchan(chan); owl_dma_config() local
740 struct owl_dma_vchan *vchan = to_owl_vchan(chan); owl_dma_pause() local
754 struct owl_dma_vchan *vchan = to_owl_vchan(chan); owl_dma_resume() local
771 owl_dma_getbytes_chan(struct owl_dma_vchan *vchan) owl_dma_getbytes_chan() argument
808 struct owl_dma_vchan *vchan = to_owl_vchan(chan); owl_dma_tx_status() local
838 owl_dma_phy_alloc_and_start(struct owl_dma_vchan *vchan) owl_dma_phy_alloc_and_start() argument
855 struct owl_dma_vchan *vchan = to_owl_vchan(chan); owl_dma_issue_pending() local
872 struct owl_dma_vchan *vchan = to_owl_vchan(chan); owl_dma_prep_memcpy() local
923 struct owl_dma_vchan *vchan = to_owl_vchan(chan); owl_dma_prep_slave_sg() local
988 struct owl_dma_vchan *vchan = to_owl_vchan(chan); owl_prep_dma_cyclic() local
1044 struct owl_dma_vchan *vchan = to_owl_vchan(chan); owl_dma_free_chan_resources() local
1052 struct owl_dma_vchan *vchan = NULL; owl_dma_free() local
1066 struct owl_dma_vchan *vchan; owl_dma_of_xlate() local
1191 struct owl_dma_vchan *vchan = &od->vchans[i]; owl_dma_probe() local
[all...]
H A Dsun4i-dma.c138 /* vchan currently being serviced */
139 struct sun4i_dma_vchan *vchan; member
223 struct sun4i_dma_vchan *vchan = to_sun4i_dma_vchan(chan); in sun4i_dma_free_chan_resources() local
225 vchan_free_chan_resources(&vchan->vc); in sun4i_dma_free_chan_resources()
229 struct sun4i_dma_vchan *vchan) in find_and_use_pchan()
239 if (vchan->is_dedicated) { in find_and_use_pchan()
250 pchan->vchan = vchan; in find_and_use_pchan()
267 pchan->vchan = NULL; in release_pchan()
322 * Execute pending operations on a vchan
228 find_and_use_pchan(struct sun4i_dma_dev *priv, struct sun4i_dma_vchan *vchan) find_and_use_pchan() argument
330 __execute_vchan_pending(struct sun4i_dma_dev *priv, struct sun4i_dma_vchan *vchan) __execute_vchan_pending() argument
625 struct sun4i_dma_vchan *vchan = to_sun4i_dma_vchan(chan); sun4i_dma_prep_dma_memcpy() local
676 struct sun4i_dma_vchan *vchan = to_sun4i_dma_vchan(chan); sun4i_dma_prep_dma_cyclic() local
796 struct sun4i_dma_vchan *vchan = to_sun4i_dma_vchan(chan); sun4i_dma_prep_slave_sg() local
890 struct sun4i_dma_vchan *vchan = to_sun4i_dma_vchan(chan); sun4i_dma_terminate_all() local
926 struct sun4i_dma_vchan *vchan = to_sun4i_dma_vchan(chan); sun4i_dma_config() local
937 struct sun4i_dma_vchan *vchan; sun4i_dma_of_xlate() local
967 struct sun4i_dma_vchan *vchan = to_sun4i_dma_vchan(chan); sun4i_dma_tx_status() local
1015 struct sun4i_dma_vchan *vchan = to_sun4i_dma_vchan(chan); sun4i_dma_issue_pending() local
1034 struct sun4i_dma_vchan *vchan; sun4i_dma_interrupt() local
1223 struct sun4i_dma_vchan *vchan = &priv->vchans[i]; sun4i_dma_probe() local
[all...]
H A Dsun6i-dma.c180 struct sun6i_vchan *vchan; member
395 static inline void sun6i_dma_dump_lli(struct sun6i_vchan *vchan, in sun6i_dma_dump_lli() argument
399 dev_dbg(chan2dev(&vchan->vc.chan), in sun6i_dma_dump_lli()
434 static int sun6i_dma_start_desc(struct sun6i_vchan *vchan) in sun6i_dma_start_desc() argument
436 struct sun6i_dma_dev *sdev = to_sun6i_dma_dev(vchan->vc.chan.device); in sun6i_dma_start_desc()
437 struct virt_dma_desc *desc = vchan_next_desc(&vchan->vc); in sun6i_dma_start_desc()
438 struct sun6i_pchan *pchan = vchan->phy; in sun6i_dma_start_desc()
455 sun6i_dma_dump_lli(vchan, pchan->desc->v_lli, pchan->desc->p_lli); in sun6i_dma_start_desc()
460 vchan->irq_type = vchan in sun6i_dma_start_desc()
480 struct sun6i_vchan *vchan; sun6i_dma_tasklet() local
545 struct sun6i_vchan *vchan; sun6i_dma_interrupt() local
653 struct sun6i_vchan *vchan = to_sun6i_vchan(chan); sun6i_dma_prep_dma_memcpy() local
706 struct sun6i_vchan *vchan = to_sun6i_vchan(chan); sun6i_dma_prep_slave_sg() local
792 struct sun6i_vchan *vchan = to_sun6i_vchan(chan); sun6i_dma_prep_dma_cyclic() local
857 struct sun6i_vchan *vchan = to_sun6i_vchan(chan); sun6i_dma_config() local
867 struct sun6i_vchan *vchan = to_sun6i_vchan(chan); sun6i_dma_pause() local
887 struct sun6i_vchan *vchan = to_sun6i_vchan(chan); sun6i_dma_resume() local
912 struct sun6i_vchan *vchan = to_sun6i_vchan(chan); sun6i_dma_terminate_all() local
956 struct sun6i_vchan *vchan = to_sun6i_vchan(chan); sun6i_dma_tx_status() local
993 struct sun6i_vchan *vchan = to_sun6i_vchan(chan); sun6i_dma_issue_pending() local
1020 struct sun6i_vchan *vchan = to_sun6i_vchan(chan); sun6i_dma_free_chan_resources() local
1034 struct sun6i_vchan *vchan; sun6i_dma_of_xlate() local
1072 struct sun6i_vchan *vchan = &sdev->vchans[i]; sun6i_dma_free() local
1406 struct sun6i_vchan *vchan = &sdc->vchans[i]; sun6i_dma_probe() local
[all...]
H A Dfsl-edma-common.c47 spin_lock(&fsl_chan->vchan.lock); in fsl_edma_tx_chan_handler()
51 spin_unlock(&fsl_chan->vchan.lock); in fsl_edma_tx_chan_handler()
68 spin_unlock(&fsl_chan->vchan.lock); in fsl_edma_tx_chan_handler()
112 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_enable_request()
146 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_disable_request()
192 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_chan_mux()
202 ch_off = fsl_chan->vchan.chan.chan_id % chans_per_mux; in fsl_edma_chan_mux()
245 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_edma_terminate_all()
249 vchan_get_all_descriptors(&fsl_chan->vchan, &head); in fsl_edma_terminate_all()
250 spin_unlock_irqrestore(&fsl_chan->vchan in fsl_edma_terminate_all()
[all...]
H A Dst_fdma.c24 return container_of(c, struct st_fdma_chan, vchan.chan); in to_st_fdma_chan()
79 vdesc = vchan_next_desc(&fchan->vchan); in st_fdma_xfer_desc()
85 cmd = FDMA_CMD_START(fchan->vchan.chan.chan_id); in st_fdma_xfer_desc()
94 dev_dbg(fchan->fdev->dev, "start chan:%d\n", fchan->vchan.chan.chan_id); in st_fdma_xfer_desc()
101 int ch_id = fchan->vchan.chan.chan_id; in st_fdma_ch_sta_update()
139 spin_lock(&fchan->vchan.lock); in st_fdma_irq_handler()
157 spin_unlock(&fchan->vchan.lock); in st_fdma_irq_handler()
282 fchan->vchan.chan.chan_id, fchan->cfg.type); in st_fdma_alloc_chan_res()
294 __func__, fchan->vchan.chan.chan_id); in st_fdma_free_chan_res()
299 spin_lock_irqsave(&fchan->vchan in st_fdma_free_chan_res()
[all...]
H A Didma64.c107 struct idma64 *idma64 = to_idma64(idma64c->vchan.chan.device); in idma64_stop_transfer()
114 struct idma64 *idma64 = to_idma64(idma64c->vchan.chan.device); in idma64_start_transfer()
118 vdesc = vchan_next_desc(&idma64c->vchan); in idma64_start_transfer()
143 stat = this_cpu_ptr(idma64c->vchan.chan.local); in idma64_chan_irq()
145 spin_lock(&idma64c->vchan.lock); in idma64_chan_irq()
163 spin_unlock(&idma64c->vchan.lock); in idma64_chan_irq()
330 return vchan_tx_prep(&idma64c->vchan, &desc->vdesc, flags); in idma64_prep_slave_sg()
338 spin_lock_irqsave(&idma64c->vchan.lock, flags); in idma64_issue_pending()
339 if (vchan_issue_pending(&idma64c->vchan) && !idma64c->desc) in idma64_issue_pending()
341 spin_unlock_irqrestore(&idma64c->vchan in idma64_issue_pending()
[all...]
H A Ddma-jz4780.c130 struct virt_dma_chan vchan; member
167 return container_of(chan, struct jz4780_dma_chan, vchan.chan); in to_jz4780_dma_chan()
179 return container_of(jzchan->vchan.chan.device, struct jz4780_dma_dev, in jz4780_dma_chan_parent()
403 return vchan_tx_prep(&jzchan->vchan, &desc->vdesc, flags); in jz4780_dma_prep_slave_sg()
455 return vchan_tx_prep(&jzchan->vchan, &desc->vdesc, flags); in jz4780_dma_prep_dma_cyclic()
483 return vchan_tx_prep(&jzchan->vchan, &desc->vdesc, flags); in jz4780_dma_prep_dma_memcpy()
494 vdesc = vchan_next_desc(&jzchan->vchan); in jz4780_dma_begin()
566 spin_lock_irqsave(&jzchan->vchan.lock, flags); in jz4780_dma_issue_pending()
568 if (vchan_issue_pending(&jzchan->vchan) && !jzchan->desc) in jz4780_dma_issue_pending()
571 spin_unlock_irqrestore(&jzchan->vchan in jz4780_dma_issue_pending()
[all...]
H A Ddma-axi-dmac.c124 struct virt_dma_chan vchan; member
156 return container_of(chan->vchan.chan.device, struct axi_dmac, in chan_to_axi_dmac()
162 return container_of(c, struct axi_dmac_chan, vchan.chan); in to_axi_dmac_chan()
223 vdesc = vchan_next_desc(&chan->vchan); in axi_dmac_start_transfer()
423 spin_lock(&dmac->chan.vchan.lock); in axi_dmac_interrupt_handler()
434 spin_unlock(&dmac->chan.vchan.lock); in axi_dmac_interrupt_handler()
446 spin_lock_irqsave(&chan->vchan.lock, flags); in axi_dmac_terminate_all()
449 vchan_get_all_descriptors(&chan->vchan, &head); in axi_dmac_terminate_all()
451 spin_unlock_irqrestore(&chan->vchan.lock, flags); in axi_dmac_terminate_all()
453 vchan_dma_desc_free_list(&chan->vchan, in axi_dmac_terminate_all()
[all...]
H A Dstm32-dma.c211 struct virt_dma_chan vchan; member
238 return container_of(chan->vchan.chan.device, struct stm32_dma_device, in stm32_dma_get_dev()
244 return container_of(c, struct stm32_dma_chan, vchan.chan); in to_stm32_dma_chan()
254 return &chan->vchan.chan.dev->device; in chan2dev()
506 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_dma_terminate_all()
516 vchan_get_all_descriptors(&chan->vchan, &head); in stm32_dma_terminate_all()
517 spin_unlock_irqrestore(&chan->vchan.lock, flags); in stm32_dma_terminate_all()
518 vchan_dma_desc_free_list(&chan->vchan, &head); in stm32_dma_terminate_all()
527 vchan_synchronize(&chan->vchan); in stm32_dma_synchronize()
571 vdesc = vchan_next_desc(&chan->vchan); in stm32_dma_start_transfer()
[all...]
/kernel/linux/linux-6.6/drivers/dma/hsu/
H A Dhsu.c121 vdesc = vchan_next_desc(&hsuc->vchan); in hsu_dma_start_transfer()
168 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_get_status()
170 spin_unlock_irqrestore(&hsuc->vchan.lock, flags); in hsu_dma_get_status()
222 stat = this_cpu_ptr(hsuc->vchan.chan.local); in hsu_dma_do_irq()
224 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_do_irq()
238 spin_unlock_irqrestore(&hsuc->vchan.lock, flags); in hsu_dma_do_irq()
295 return vchan_tx_prep(&hsuc->vchan, &desc->vdesc, flags); in hsu_dma_prep_slave_sg()
303 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_issue_pending()
304 if (vchan_issue_pending(&hsuc->vchan) && !hsuc->desc) in hsu_dma_issue_pending()
306 spin_unlock_irqrestore(&hsuc->vchan in hsu_dma_issue_pending()
[all...]
/kernel/linux/linux-5.10/drivers/dma/sf-pdma/
H A Dsf-pdma.c44 return container_of(dchan, struct sf_pdma_chan, vchan.chan); in to_sf_pdma_chan()
101 desc->async_tx = vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); in sf_pdma_prep_dma_memcpy()
103 spin_lock_irqsave(&chan->vchan.lock, iflags); in sf_pdma_prep_dma_memcpy()
105 spin_unlock_irqrestore(&chan->vchan.lock, iflags); in sf_pdma_prep_dma_memcpy()
144 spin_lock_irqsave(&chan->vchan.lock, flags); in sf_pdma_free_chan_resources()
148 vchan_get_all_descriptors(&chan->vchan, &head); in sf_pdma_free_chan_resources()
150 spin_unlock_irqrestore(&chan->vchan.lock, flags); in sf_pdma_free_chan_resources()
151 vchan_dma_desc_free_list(&chan->vchan, &head); in sf_pdma_free_chan_resources()
164 spin_lock_irqsave(&chan->vchan.lock, flags); in sf_pdma_desc_residue()
166 list_for_each_entry(vd, &chan->vchan in sf_pdma_desc_residue()
241 struct virt_dma_chan *vchan = &chan->vchan; sf_pdma_get_first_pending_desc() local
[all...]
/kernel/linux/linux-6.6/drivers/dma/sf-pdma/
H A Dsf-pdma.c44 return container_of(dchan, struct sf_pdma_chan, vchan.chan); in to_sf_pdma_chan()
100 desc->async_tx = vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); in sf_pdma_prep_dma_memcpy()
102 spin_lock_irqsave(&chan->vchan.lock, iflags); in sf_pdma_prep_dma_memcpy()
104 spin_unlock_irqrestore(&chan->vchan.lock, iflags); in sf_pdma_prep_dma_memcpy()
143 spin_lock_irqsave(&chan->vchan.lock, flags); in sf_pdma_free_chan_resources()
147 vchan_get_all_descriptors(&chan->vchan, &head); in sf_pdma_free_chan_resources()
149 spin_unlock_irqrestore(&chan->vchan.lock, flags); in sf_pdma_free_chan_resources()
150 vchan_dma_desc_free_list(&chan->vchan, &head); in sf_pdma_free_chan_resources()
163 spin_lock_irqsave(&chan->vchan.lock, flags); in sf_pdma_desc_residue()
165 list_for_each_entry(vd, &chan->vchan in sf_pdma_desc_residue()
240 struct virt_dma_chan *vchan = &chan->vchan; sf_pdma_get_first_pending_desc() local
[all...]
/kernel/linux/linux-5.10/drivers/dma/hsu/
H A Dhsu.c113 vdesc = vchan_next_desc(&hsuc->vchan); in hsu_dma_start_transfer()
160 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_get_status()
162 spin_unlock_irqrestore(&hsuc->vchan.lock, flags); in hsu_dma_get_status()
214 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_do_irq()
227 spin_unlock_irqrestore(&hsuc->vchan.lock, flags); in hsu_dma_do_irq()
284 return vchan_tx_prep(&hsuc->vchan, &desc->vdesc, flags); in hsu_dma_prep_slave_sg()
292 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_issue_pending()
293 if (vchan_issue_pending(&hsuc->vchan) && !hsuc->desc) in hsu_dma_issue_pending()
295 spin_unlock_irqrestore(&hsuc->vchan.lock, flags); in hsu_dma_issue_pending()
328 spin_lock_irqsave(&hsuc->vchan in hsu_dma_tx_status()
[all...]
/kernel/linux/linux-6.6/drivers/dma/lgm/
H A Dlgm-dma.c195 struct virt_dma_chan vchan; member
292 return container_of(chan, struct ldma_chan, vchan.chan); in to_ldma_chan()
521 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_cctrl_cfg()
550 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_irq_init()
578 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_set_class()
596 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_on()
615 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_off()
638 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_desc_hw_cfg()
662 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_desc_cfg()
696 struct ldma_dev *d = to_ldma_dev(c->vchan in ldma_chan_reset()
[all...]
/kernel/linux/linux-5.10/drivers/staging/ralink-gdma/
H A Dralink-gdma.c108 struct virt_dma_chan vchan; member
141 return container_of(chan->vchan.chan.device, struct gdma_dma_dev, in gdma_dma_chan_get_dev()
147 return container_of(c, struct gdma_dmaengine_chan, vchan.chan); in to_gdma_dma_chan()
228 spin_lock_irqsave(&chan->vchan.lock, flags); in gdma_dma_terminate_all()
231 vchan_get_all_descriptors(&chan->vchan, &head); in gdma_dma_terminate_all()
232 spin_unlock_irqrestore(&chan->vchan.lock, flags); in gdma_dma_terminate_all()
234 vchan_dma_desc_free_list(&chan->vchan, &head); in gdma_dma_terminate_all()
417 vdesc = vchan_next_desc(&chan->vchan); in gdma_next_desc()
436 spin_lock_irqsave(&chan->vchan.lock, flags); in gdma_dma_chan_irq()
460 spin_unlock_irqrestore(&chan->vchan in gdma_dma_chan_irq()
[all...]
/kernel/linux/linux-5.10/drivers/staging/mt7621-dma/
H A Dhsdma-mt7621.c144 struct virt_dma_chan vchan; member
168 return container_of(chan->vchan.chan.device, struct mtk_hsdam_engine, in mtk_hsdma_chan_get_dev()
174 return container_of(c, struct mtk_hsdma_chan, vchan.chan); in to_mtk_hsdma_chan()
290 spin_lock_bh(&chan->vchan.lock); in mtk_hsdma_terminate_all()
293 vchan_get_all_descriptors(&chan->vchan, &head); in mtk_hsdma_terminate_all()
294 spin_unlock_bh(&chan->vchan.lock); in mtk_hsdma_terminate_all()
296 vchan_dma_desc_free_list(&chan->vchan, &head); in mtk_hsdma_terminate_all()
389 vdesc = vchan_next_desc(&chan->vchan); in gdma_next_desc()
407 spin_lock_bh(&chan->vchan.lock); in mtk_hsdma_chan_done()
421 spin_unlock_bh(&chan->vchan in mtk_hsdma_chan_done()
[all...]

Completed in 37 milliseconds

123