Lines Matching defs:mchan

114 static void hidma_process_completed(struct hidma_chan *mchan)
116 struct dma_device *ddev = mchan->chan.device;
128 spin_lock_irqsave(&mchan->lock, irqflags);
129 list_splice_tail_init(&mchan->completed, &list);
130 spin_unlock_irqrestore(&mchan->lock, irqflags);
143 spin_lock_irqsave(&mchan->lock, irqflags);
145 mchan->last_success = last_cookie;
152 spin_unlock_irqrestore(&mchan->lock, irqflags);
158 spin_lock_irqsave(&mchan->lock, irqflags);
159 list_move(&mdesc->node, &mchan->free);
160 spin_unlock_irqrestore(&mchan->lock, irqflags);
174 struct hidma_chan *mchan = to_hidma_chan(mdesc->desc.chan);
175 struct dma_device *ddev = mchan->chan.device;
180 spin_lock_irqsave(&mchan->lock, irqflags);
183 list_move_tail(&mdesc->node, &mchan->completed);
187 mchan->running = list_first_entry(&mchan->active,
190 spin_unlock_irqrestore(&mchan->lock, irqflags);
192 hidma_process_completed(mchan);
202 struct hidma_chan *mchan;
205 mchan = devm_kzalloc(dmadev->ddev.dev, sizeof(*mchan), GFP_KERNEL);
206 if (!mchan)
210 mchan->dma_sig = dma_sig;
211 mchan->dmadev = dmadev;
212 mchan->chan.device = ddev;
213 dma_cookie_init(&mchan->chan);
215 INIT_LIST_HEAD(&mchan->free);
216 INIT_LIST_HEAD(&mchan->prepared);
217 INIT_LIST_HEAD(&mchan->active);
218 INIT_LIST_HEAD(&mchan->completed);
219 INIT_LIST_HEAD(&mchan->queued);
221 spin_lock_init(&mchan->lock);
222 list_add_tail(&mchan->chan.device_node, &ddev->channels);
237 struct hidma_chan *mchan = to_hidma_chan(dmach);
238 struct hidma_dev *dmadev = mchan->dmadev;
243 spin_lock_irqsave(&mchan->lock, flags);
244 list_for_each_entry_safe(qdesc, next, &mchan->queued, node) {
246 list_move_tail(&qdesc->node, &mchan->active);
249 if (!mchan->running) {
250 struct hidma_desc *desc = list_first_entry(&mchan->active,
253 mchan->running = desc;
255 spin_unlock_irqrestore(&mchan->lock, flags);
282 struct hidma_chan *mchan = to_hidma_chan(dmach);
289 is_success = hidma_txn_is_success(cookie, mchan->last_success,
294 if (mchan->paused && (ret == DMA_IN_PROGRESS)) {
298 spin_lock_irqsave(&mchan->lock, flags);
299 if (mchan->running)
300 runcookie = mchan->running->desc.cookie;
307 spin_unlock_irqrestore(&mchan->lock, flags);
319 struct hidma_chan *mchan = to_hidma_chan(txd->chan);
320 struct hidma_dev *dmadev = mchan->dmadev;
335 spin_lock_irqsave(&mchan->lock, irqflags);
338 list_move_tail(&mdesc->node, &mchan->queued);
343 spin_unlock_irqrestore(&mchan->lock, irqflags);
350 struct hidma_chan *mchan = to_hidma_chan(dmach);
351 struct hidma_dev *dmadev = mchan->dmadev;
358 if (mchan->allocated)
371 rc = hidma_ll_request(dmadev->lldev, mchan->dma_sig,
392 spin_lock_irqsave(&mchan->lock, irqflags);
393 list_splice_tail_init(&descs, &mchan->free);
394 mchan->allocated = true;
395 spin_unlock_irqrestore(&mchan->lock, irqflags);
403 struct hidma_chan *mchan = to_hidma_chan(dmach);
405 struct hidma_dev *mdma = mchan->dmadev;
409 spin_lock_irqsave(&mchan->lock, irqflags);
410 if (!list_empty(&mchan->free)) {
411 mdesc = list_first_entry(&mchan->free, struct hidma_desc, node);
414 spin_unlock_irqrestore(&mchan->lock, irqflags);
425 spin_lock_irqsave(&mchan->lock, irqflags);
426 list_add_tail(&mdesc->node, &mchan->prepared);
427 spin_unlock_irqrestore(&mchan->lock, irqflags);
436 struct hidma_chan *mchan = to_hidma_chan(dmach);
438 struct hidma_dev *mdma = mchan->dmadev;
442 spin_lock_irqsave(&mchan->lock, irqflags);
443 if (!list_empty(&mchan->free)) {
444 mdesc = list_first_entry(&mchan->free, struct hidma_desc, node);
447 spin_unlock_irqrestore(&mchan->lock, irqflags);
458 spin_lock_irqsave(&mchan->lock, irqflags);
459 list_add_tail(&mdesc->node, &mchan->prepared);
460 spin_unlock_irqrestore(&mchan->lock, irqflags);
467 struct hidma_chan *mchan = to_hidma_chan(chan);
468 struct hidma_dev *dmadev = to_hidma_dev(mchan->chan.device);
476 hidma_process_completed(mchan);
478 spin_lock_irqsave(&mchan->lock, irqflags);
479 mchan->last_success = 0;
480 list_splice_init(&mchan->active, &list);
481 list_splice_init(&mchan->prepared, &list);
482 list_splice_init(&mchan->completed, &list);
483 list_splice_init(&mchan->queued, &list);
484 spin_unlock_irqrestore(&mchan->lock, irqflags);
502 list_move(&mdesc->node, &mchan->free);
514 struct hidma_chan *mchan = to_hidma_chan(chan);
515 struct hidma_dev *dmadev = to_hidma_dev(mchan->chan.device);
532 struct hidma_chan *mchan = to_hidma_chan(dmach);
533 struct hidma_dev *mdma = mchan->dmadev;
541 spin_lock_irqsave(&mchan->lock, irqflags);
544 list_splice_tail_init(&mchan->free, &descs);
553 mchan->allocated = false;
554 spin_unlock_irqrestore(&mchan->lock, irqflags);
559 struct hidma_chan *mchan;
562 mchan = to_hidma_chan(chan);
563 dmadev = to_hidma_dev(mchan->chan.device);
564 if (!mchan->paused) {
568 mchan->paused = true;
577 struct hidma_chan *mchan;
581 mchan = to_hidma_chan(chan);
582 dmadev = to_hidma_dev(mchan->chan.device);
583 if (mchan->paused) {
587 mchan->paused = false;