Lines Matching defs:device
67 err = mlx4_mr_alloc(to_mdev(pd->device)->dev, to_mpd(pd)->pdn, 0,
72 err = mlx4_mr_enable(to_mdev(pd->device)->dev, &mr->mmr);
82 (void) mlx4_mr_free(to_mdev(pd->device)->dev, &mr->mmr);
372 static struct ib_umem *mlx4_get_umem_mr(struct ib_device *device, u64 start,
403 return ib_umem_get(device, start, length, access_flags);
410 struct mlx4_ib_dev *dev = to_mdev(pd->device);
420 mr->umem = mlx4_get_umem_mr(pd->device, start, length, access_flags);
447 (void) mlx4_mr_free(to_mdev(pd->device)->dev, &mr->mmr);
463 struct mlx4_ib_dev *dev = to_mdev(mr->device);
506 mmr->umem = mlx4_get_umem_mr(mr->device, start, length,
549 mlx4_alloc_priv_pages(struct ib_device *device,
568 mr->page_map = dma_map_single(device->dev.parent, mr->pages,
571 if (dma_mapping_error(device->dev.parent, mr->page_map)) {
587 struct ib_device *device = mr->ibmr.device;
589 dma_unmap_single(device->dev.parent, mr->page_map,
603 ret = mlx4_mr_free(to_mdev(ibmr->device)->dev, &mr->mmr);
615 struct mlx4_ib_dev *dev = to_mdev(ibmw->device);
640 mlx4_mw_free(to_mdev(ibmw->device)->dev, &mw->mmw);
647 struct mlx4_ib_dev *dev = to_mdev(pd->device);
664 err = mlx4_alloc_priv_pages(pd->device, mr, max_num_sg);
679 mr->ibmr.device = pd->device;
708 ib_dma_sync_single_for_cpu(ibmr->device, mr->page_map,
713 ib_dma_sync_single_for_device(ibmr->device, mr->page_map,