Lines Matching defs:ddev

42  * @ddev: DMA device
48 struct dma_device ddev;
190 return container_of(d, struct bcm2835_dmadev, ddev);
822 c = devm_kzalloc(d->ddev.dev, sizeof(*c), GFP_KERNEL);
827 vchan_init(&c->vc, &d->ddev);
846 list_for_each_entry_safe(c, next, &od->ddev.channels,
852 dma_unmap_page_attrs(od->ddev.dev, od->zero_page, PAGE_SIZE,
868 chan = dma_get_any_slave_channel(&d->ddev);
912 dma_cap_set(DMA_SLAVE, od->ddev.cap_mask);
913 dma_cap_set(DMA_PRIVATE, od->ddev.cap_mask);
914 dma_cap_set(DMA_CYCLIC, od->ddev.cap_mask);
915 dma_cap_set(DMA_MEMCPY, od->ddev.cap_mask);
916 od->ddev.device_alloc_chan_resources = bcm2835_dma_alloc_chan_resources;
917 od->ddev.device_free_chan_resources = bcm2835_dma_free_chan_resources;
918 od->ddev.device_tx_status = bcm2835_dma_tx_status;
919 od->ddev.device_issue_pending = bcm2835_dma_issue_pending;
920 od->ddev.device_prep_dma_cyclic = bcm2835_dma_prep_dma_cyclic;
921 od->ddev.device_prep_slave_sg = bcm2835_dma_prep_slave_sg;
922 od->ddev.device_prep_dma_memcpy = bcm2835_dma_prep_dma_memcpy;
923 od->ddev.device_config = bcm2835_dma_slave_config;
924 od->ddev.device_terminate_all = bcm2835_dma_terminate_all;
925 od->ddev.device_synchronize = bcm2835_dma_synchronize;
926 od->ddev.src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_4_BYTES);
927 od->ddev.dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_4_BYTES);
928 od->ddev.directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV) |
930 od->ddev.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST;
931 od->ddev.descriptor_reuse = true;
932 od->ddev.dev = &pdev->dev;
933 INIT_LIST_HEAD(&od->ddev.channels);
937 od->zero_page = dma_map_page_attrs(od->ddev.dev, ZERO_PAGE(0), 0,
940 if (dma_mapping_error(od->ddev.dev, od->zero_page)) {
1008 rc = dma_async_device_register(&od->ddev);
1028 dma_async_device_unregister(&od->ddev);