Lines Matching refs:hwdev
277 xen_swiotlb_alloc_coherent(struct device *hwdev, size_t size,
303 ret = xen_alloc_coherent_pages(hwdev, size, dma_handle, flags, attrs);
308 if (hwdev && hwdev->coherent_dma_mask)
309 dma_mask = hwdev->coherent_dma_mask;
315 phys = dma_to_phys(hwdev, *dma_handle);
316 dev_addr = xen_phys_to_dma(hwdev, phys);
323 xen_free_coherent_pages(hwdev, size, ret, (dma_addr_t)phys, attrs);
326 *dma_handle = phys_to_dma(hwdev, *dma_handle);
334 xen_swiotlb_free_coherent(struct device *hwdev, size_t size, void *vaddr,
342 if (hwdev && hwdev->coherent_dma_mask)
343 dma_mask = hwdev->coherent_dma_mask;
347 phys = xen_dma_to_phys(hwdev, dev_addr);
362 xen_free_coherent_pages(hwdev, size, vaddr, phys_to_dma(hwdev, phys),
432 static void xen_swiotlb_unmap_page(struct device *hwdev, dma_addr_t dev_addr,
435 phys_addr_t paddr = xen_dma_to_phys(hwdev, dev_addr);
439 if (!dev_is_dma_coherent(hwdev) && !(attrs & DMA_ATTR_SKIP_CPU_SYNC)) {
440 if (pfn_valid(PFN_DOWN(dma_to_phys(hwdev, dev_addr))))
443 xen_dma_sync_for_cpu(hwdev, dev_addr, size, dir);
447 if (is_xen_swiotlb_buffer(hwdev, dev_addr))
448 swiotlb_tbl_unmap_single(hwdev, paddr, size, size, dir, attrs);
490 xen_swiotlb_unmap_sg(struct device *hwdev, struct scatterlist *sgl, int nelems,
499 xen_swiotlb_unmap_page(hwdev, sg->dma_address, sg_dma_len(sg),
561 xen_swiotlb_dma_supported(struct device *hwdev, u64 mask)
563 return xen_virt_to_bus(hwdev, xen_io_tlb_end - 1) <= mask;