Lines Matching defs:dmatx

272 	struct pl011_dmatx_data	dmatx;
435 uap->dmatx.chan = chan;
438 dma_chan_name(uap->dmatx.chan));
526 if (uap->dmatx.chan)
527 dma_release_channel(uap->dmatx.chan);
543 struct pl011_dmatx_data *dmatx = &uap->dmatx;
548 if (uap->dmatx.queued)
549 dma_unmap_single(dmatx->chan->device->dev, dmatx->dma,
550 dmatx->len, DMA_TO_DEVICE);
567 uap->dmatx.queued = false;
592 struct pl011_dmatx_data *dmatx = &uap->dmatx;
593 struct dma_chan *chan = dmatx->chan;
607 uap->dmatx.queued = false;
622 memcpy(&dmatx->buf[0], &xmit->buf[xmit->tail], count);
631 memcpy(&dmatx->buf[0], &xmit->buf[xmit->tail], first);
633 memcpy(&dmatx->buf[first], &xmit->buf[0], second);
636 dmatx->len = count;
637 dmatx->dma = dma_map_single(dma_dev->dev, dmatx->buf, count,
639 if (dmatx->dma == DMA_MAPPING_ERROR) {
640 uap->dmatx.queued = false;
645 desc = dmaengine_prep_slave_single(chan, dmatx->dma, dmatx->len, DMA_MEM_TO_DEV,
648 dma_unmap_single(dma_dev->dev, dmatx->dma, dmatx->len, DMA_TO_DEVICE);
649 uap->dmatx.queued = false;
670 uap->dmatx.queued = true;
702 if (uap->dmatx.queued) {
728 if (uap->dmatx.queued) {
753 if (!uap->dmatx.queued) {
808 dmaengine_terminate_async(uap->dmatx.chan);
810 if (uap->dmatx.queued) {
811 dma_unmap_single(uap->dmatx.chan->device->dev, uap->dmatx.dma,
812 uap->dmatx.len, DMA_TO_DEVICE);
813 uap->dmatx.queued = false;
1111 if (!uap->dmatx.chan)
1114 uap->dmatx.buf = kmalloc(PL011_DMA_BUFFER_SIZE, GFP_KERNEL | __GFP_DMA);
1115 if (!uap->dmatx.buf) {
1121 uap->dmatx.len = PL011_DMA_BUFFER_SIZE;
1196 dmaengine_terminate_all(uap->dmatx.chan);
1197 if (uap->dmatx.queued) {
1198 dma_unmap_single(uap->dmatx.chan->device->dev,
1199 uap->dmatx.dma, uap->dmatx.len,
1201 uap->dmatx.queued = false;
1204 kfree(uap->dmatx.buf);