Lines Matching defs:qdev
33 struct qxl_device *qdev;
36 qdev = to_qxl(bo->tbo.base.dev);
38 qxl_surface_evict(qdev, bo, false);
40 mutex_lock(&qdev->gem.mutex);
42 mutex_unlock(&qdev->gem.mutex);
106 int qxl_bo_create(struct qxl_device *qdev, unsigned long size,
124 r = drm_gem_object_init(&qdev->ddev, &bo->tbo.base, size);
141 r = ttm_bo_init(&qdev->mman.bdev, &bo->tbo, size, type,
146 dev_err(qdev->ddev.dev,
176 void *qxl_bo_kmap_atomic_page(struct qxl_device *qdev,
185 map = qdev->vram_mapping;
187 map = qdev->surface_mapping;
218 void qxl_bo_kunmap_atomic_page(struct qxl_device *qdev,
323 void qxl_bo_force_delete(struct qxl_device *qdev)
327 if (list_empty(&qdev->gem.objects))
329 dev_err(qdev->ddev.dev, "Userspace still has active objects !\n");
330 list_for_each_entry_safe(bo, n, &qdev->gem.objects, list) {
331 dev_err(qdev->ddev.dev, "%p %p %lu %lu force free\n",
334 mutex_lock(&qdev->gem.mutex);
336 mutex_unlock(&qdev->gem.mutex);
342 int qxl_bo_init(struct qxl_device *qdev)
344 return qxl_ttm_init(qdev);
347 void qxl_bo_fini(struct qxl_device *qdev)
349 qxl_ttm_fini(qdev);
352 int qxl_bo_check_id(struct qxl_device *qdev, struct qxl_bo *bo)
358 ret = qxl_surface_id_alloc(qdev, bo);
362 ret = qxl_hw_surface_alloc(qdev, bo);
369 int qxl_surf_evict(struct qxl_device *qdev)
371 return ttm_bo_evict_mm(&qdev->mman.bdev, TTM_PL_PRIV);
374 int qxl_vram_evict(struct qxl_device *qdev)
376 return ttm_bo_evict_mm(&qdev->mman.bdev, TTM_PL_VRAM);