Lines Matching refs:dma_device

330  * @dma_device: dmaengine dma_device object members
340 struct dma_device dma_device;
368 static inline struct at_dma *to_at_dma(struct dma_device *ddev)
370 return container_of(ddev, struct at_dma, dma_device);
832 dev_vdbg(atdma->dma_device.dev,
836 for (i = 0; i < atdma->dma_device.chancnt; i++) {
1738 BUG_ON(!atslave->dma_dev || atslave->dma_dev != atdma->dma_device.dev);
1965 atdma->dma_device.cap_mask = plat_dat->cap_mask;
2009 INIT_LIST_HEAD(&atdma->dma_device.channels);
2021 vchan_init(&atchan->vc, &atdma->dma_device);
2026 atdma->dma_device.device_alloc_chan_resources = atc_alloc_chan_resources;
2027 atdma->dma_device.device_free_chan_resources = atc_free_chan_resources;
2028 atdma->dma_device.device_tx_status = atc_tx_status;
2029 atdma->dma_device.device_issue_pending = atc_issue_pending;
2030 atdma->dma_device.dev = &pdev->dev;
2033 if (dma_has_cap(DMA_INTERLEAVE, atdma->dma_device.cap_mask))
2034 atdma->dma_device.device_prep_interleaved_dma = atc_prep_dma_interleaved;
2036 if (dma_has_cap(DMA_MEMCPY, atdma->dma_device.cap_mask))
2037 atdma->dma_device.device_prep_dma_memcpy = atc_prep_dma_memcpy;
2039 if (dma_has_cap(DMA_MEMSET, atdma->dma_device.cap_mask)) {
2040 atdma->dma_device.device_prep_dma_memset = atc_prep_dma_memset;
2041 atdma->dma_device.device_prep_dma_memset_sg = atc_prep_dma_memset_sg;
2042 atdma->dma_device.fill_align = DMAENGINE_ALIGN_4_BYTES;
2045 if (dma_has_cap(DMA_SLAVE, atdma->dma_device.cap_mask)) {
2046 atdma->dma_device.device_prep_slave_sg = atc_prep_slave_sg;
2048 dma_cap_set(DMA_CYCLIC, atdma->dma_device.cap_mask);
2049 atdma->dma_device.device_prep_dma_cyclic = atc_prep_dma_cyclic;
2050 atdma->dma_device.device_config = atc_config;
2051 atdma->dma_device.device_pause = atc_pause;
2052 atdma->dma_device.device_resume = atc_resume;
2053 atdma->dma_device.device_terminate_all = atc_terminate_all;
2054 atdma->dma_device.src_addr_widths = ATC_DMA_BUSWIDTHS;
2055 atdma->dma_device.dst_addr_widths = ATC_DMA_BUSWIDTHS;
2056 atdma->dma_device.directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV);
2057 atdma->dma_device.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST;
2063 dma_has_cap(DMA_MEMCPY, atdma->dma_device.cap_mask) ? "cpy " : "",
2064 dma_has_cap(DMA_MEMSET, atdma->dma_device.cap_mask) ? "set " : "",
2065 dma_has_cap(DMA_SLAVE, atdma->dma_device.cap_mask) ? "slave " : "",
2068 err = dma_async_device_register(&atdma->dma_device);
2091 dma_async_device_unregister(&atdma->dma_device);
2111 dma_async_device_unregister(&atdma->dma_device);
2117 list_for_each_entry_safe(chan, _chan, &atdma->dma_device.channels,
2142 list_for_each_entry_safe(chan, _chan, &atdma->dma_device.channels,
2177 list_for_each_entry_safe(chan, _chan, &atdma->dma_device.channels,
2227 list_for_each_entry_safe(chan, _chan, &atdma->dma_device.channels,