Lines Matching defs:mchan
108 static void hidma_process_completed(struct hidma_chan *mchan)
110 struct dma_device *ddev = mchan->chan.device;
122 spin_lock_irqsave(&mchan->lock, irqflags);
123 list_splice_tail_init(&mchan->completed, &list);
124 spin_unlock_irqrestore(&mchan->lock, irqflags);
137 spin_lock_irqsave(&mchan->lock, irqflags);
139 mchan->last_success = last_cookie;
146 spin_unlock_irqrestore(&mchan->lock, irqflags);
152 spin_lock_irqsave(&mchan->lock, irqflags);
153 list_move(&mdesc->node, &mchan->free);
154 spin_unlock_irqrestore(&mchan->lock, irqflags);
168 struct hidma_chan *mchan = to_hidma_chan(mdesc->desc.chan);
169 struct dma_device *ddev = mchan->chan.device;
174 spin_lock_irqsave(&mchan->lock, irqflags);
177 list_move_tail(&mdesc->node, &mchan->completed);
181 mchan->running = list_first_entry(&mchan->active,
184 spin_unlock_irqrestore(&mchan->lock, irqflags);
186 hidma_process_completed(mchan);
196 struct hidma_chan *mchan;
199 mchan = devm_kzalloc(dmadev->ddev.dev, sizeof(*mchan), GFP_KERNEL);
200 if (!mchan)
204 mchan->dma_sig = dma_sig;
205 mchan->dmadev = dmadev;
206 mchan->chan.device = ddev;
207 dma_cookie_init(&mchan->chan);
209 INIT_LIST_HEAD(&mchan->free);
210 INIT_LIST_HEAD(&mchan->prepared);
211 INIT_LIST_HEAD(&mchan->active);
212 INIT_LIST_HEAD(&mchan->completed);
213 INIT_LIST_HEAD(&mchan->queued);
215 spin_lock_init(&mchan->lock);
216 list_add_tail(&mchan->chan.device_node, &ddev->channels);
230 struct hidma_chan *mchan = to_hidma_chan(dmach);
231 struct hidma_dev *dmadev = mchan->dmadev;
236 spin_lock_irqsave(&mchan->lock, flags);
237 list_for_each_entry_safe(qdesc, next, &mchan->queued, node) {
239 list_move_tail(&qdesc->node, &mchan->active);
242 if (!mchan->running) {
243 struct hidma_desc *desc = list_first_entry(&mchan->active,
246 mchan->running = desc;
248 spin_unlock_irqrestore(&mchan->lock, flags);
275 struct hidma_chan *mchan = to_hidma_chan(dmach);
282 is_success = hidma_txn_is_success(cookie, mchan->last_success,
287 if (mchan->paused && (ret == DMA_IN_PROGRESS)) {
291 spin_lock_irqsave(&mchan->lock, flags);
292 if (mchan->running)
293 runcookie = mchan->running->desc.cookie;
300 spin_unlock_irqrestore(&mchan->lock, flags);
312 struct hidma_chan *mchan = to_hidma_chan(txd->chan);
313 struct hidma_dev *dmadev = mchan->dmadev;
328 spin_lock_irqsave(&mchan->lock, irqflags);
331 list_move_tail(&mdesc->node, &mchan->queued);
336 spin_unlock_irqrestore(&mchan->lock, irqflags);
343 struct hidma_chan *mchan = to_hidma_chan(dmach);
344 struct hidma_dev *dmadev = mchan->dmadev;
351 if (mchan->allocated)
364 rc = hidma_ll_request(dmadev->lldev, mchan->dma_sig,
385 spin_lock_irqsave(&mchan->lock, irqflags);
386 list_splice_tail_init(&descs, &mchan->free);
387 mchan->allocated = true;
388 spin_unlock_irqrestore(&mchan->lock, irqflags);
396 struct hidma_chan *mchan = to_hidma_chan(dmach);
398 struct hidma_dev *mdma = mchan->dmadev;
402 spin_lock_irqsave(&mchan->lock, irqflags);
403 if (!list_empty(&mchan->free)) {
404 mdesc = list_first_entry(&mchan->free, struct hidma_desc, node);
407 spin_unlock_irqrestore(&mchan->lock, irqflags);
418 spin_lock_irqsave(&mchan->lock, irqflags);
419 list_add_tail(&mdesc->node, &mchan->prepared);
420 spin_unlock_irqrestore(&mchan->lock, irqflags);
429 struct hidma_chan *mchan = to_hidma_chan(dmach);
431 struct hidma_dev *mdma = mchan->dmadev;
436 spin_lock_irqsave(&mchan->lock, irqflags);
437 if (!list_empty(&mchan->free)) {
438 mdesc = list_first_entry(&mchan->free, struct hidma_desc, node);
441 spin_unlock_irqrestore(&mchan->lock, irqflags);
462 spin_lock_irqsave(&mchan->lock, irqflags);
463 list_add_tail(&mdesc->node, &mchan->prepared);
464 spin_unlock_irqrestore(&mchan->lock, irqflags);
471 struct hidma_chan *mchan = to_hidma_chan(chan);
472 struct hidma_dev *dmadev = to_hidma_dev(mchan->chan.device);
480 hidma_process_completed(mchan);
482 spin_lock_irqsave(&mchan->lock, irqflags);
483 mchan->last_success = 0;
484 list_splice_init(&mchan->active, &list);
485 list_splice_init(&mchan->prepared, &list);
486 list_splice_init(&mchan->completed, &list);
487 list_splice_init(&mchan->queued, &list);
488 spin_unlock_irqrestore(&mchan->lock, irqflags);
506 list_move(&mdesc->node, &mchan->free);
518 struct hidma_chan *mchan = to_hidma_chan(chan);
519 struct hidma_dev *dmadev = to_hidma_dev(mchan->chan.device);
536 struct hidma_chan *mchan = to_hidma_chan(dmach);
537 struct hidma_dev *mdma = mchan->dmadev;
545 spin_lock_irqsave(&mchan->lock, irqflags);
548 list_splice_tail_init(&mchan->free, &descs);
557 mchan->allocated = false;
558 spin_unlock_irqrestore(&mchan->lock, irqflags);
563 struct hidma_chan *mchan;
566 mchan = to_hidma_chan(chan);
567 dmadev = to_hidma_dev(mchan->chan.device);
568 if (!mchan->paused) {
572 mchan->paused = true;
581 struct hidma_chan *mchan;
585 mchan = to_hidma_chan(chan);
586 dmadev = to_hidma_dev(mchan->chan.device);
587 if (mchan->paused) {
591 mchan->paused = false;