Lines Matching refs:bo
32 static int __qxl_bo_pin(struct qxl_bo *bo);
33 static void __qxl_bo_unpin(struct qxl_bo *bo);
37 struct qxl_bo *bo;
40 bo = to_qxl_bo(tbo);
41 qdev = to_qxl(bo->tbo.base.dev);
43 qxl_surface_evict(qdev, bo, false);
44 WARN_ON_ONCE(bo->map_count > 0);
46 list_del_init(&bo->list);
48 drm_gem_object_release(&bo->tbo.base);
49 kfree(bo);
52 bool qxl_ttm_bo_is_qxl_bo(struct ttm_buffer_object *bo)
54 if (bo->destroy == &qxl_ttm_bo_destroy)
115 struct qxl_bo *bo;
124 bo = kzalloc(sizeof(struct qxl_bo), GFP_KERNEL);
125 if (bo == NULL)
128 r = drm_gem_object_init(&qdev->ddev, &bo->tbo.base, size);
130 kfree(bo);
133 bo->tbo.base.funcs = &qxl_object_funcs;
134 bo->type = domain;
135 bo->surface_id = 0;
136 INIT_LIST_HEAD(&bo->list);
139 bo->surf = *surf;
141 qxl_ttm_placement_from_domain(bo, domain);
143 bo->tbo.priority = priority;
144 r = ttm_bo_init_reserved(&qdev->mman.bdev, &bo->tbo, type,
145 &bo->placement, 0, &ctx, NULL, NULL,
155 ttm_bo_pin(&bo->tbo);
156 ttm_bo_unreserve(&bo->tbo);
157 *bo_ptr = bo;
161 int qxl_bo_vmap_locked(struct qxl_bo *bo, struct iosys_map *map)
165 dma_resv_assert_held(bo->tbo.base.resv);
167 if (bo->kptr) {
168 bo->map_count++;
172 r = __qxl_bo_pin(bo);
176 r = ttm_bo_vmap(&bo->tbo, &bo->map);
178 __qxl_bo_unpin(bo);
181 bo->map_count = 1;
184 if (bo->map.is_iomem)
185 bo->kptr = (void *)bo->map.vaddr_iomem;
187 bo->kptr = bo->map.vaddr;
190 *map = bo->map;
194 int qxl_bo_vmap(struct qxl_bo *bo, struct iosys_map *map)
198 r = qxl_bo_reserve(bo);
202 r = qxl_bo_vmap_locked(bo, map);
203 qxl_bo_unreserve(bo);
208 struct qxl_bo *bo, int page_offset)
216 if (bo->tbo.resource->mem_type == TTM_PL_VRAM)
218 else if (bo->tbo.resource->mem_type == TTM_PL_PRIV)
223 offset = bo->tbo.resource->start << PAGE_SHIFT;
226 if (bo->kptr) {
227 rptr = bo->kptr + (page_offset * PAGE_SIZE);
231 ret = qxl_bo_vmap_locked(bo, &bo_map);
240 void qxl_bo_vunmap_locked(struct qxl_bo *bo)
242 dma_resv_assert_held(bo->tbo.base.resv);
244 if (bo->kptr == NULL)
246 bo->map_count--;
247 if (bo->map_count > 0)
249 bo->kptr = NULL;
250 ttm_bo_vunmap(&bo->tbo, &bo->map);
251 __qxl_bo_unpin(bo);
254 int qxl_bo_vunmap(struct qxl_bo *bo)
258 r = qxl_bo_reserve(bo);
262 qxl_bo_vunmap_locked(bo);
263 qxl_bo_unreserve(bo);
268 struct qxl_bo *bo, void *pmap)
270 if ((bo->tbo.resource->mem_type != TTM_PL_VRAM) &&
271 (bo->tbo.resource->mem_type != TTM_PL_PRIV))
277 qxl_bo_vunmap_locked(bo);
280 void qxl_bo_unref(struct qxl_bo **bo)
282 if ((*bo) == NULL)
285 drm_gem_object_put(&(*bo)->tbo.base);
286 *bo = NULL;
289 struct qxl_bo *qxl_bo_ref(struct qxl_bo *bo)
291 drm_gem_object_get(&bo->tbo.base);
292 return bo;
295 static int __qxl_bo_pin(struct qxl_bo *bo)
298 struct drm_device *ddev = bo->tbo.base.dev;
301 if (bo->tbo.pin_count) {
302 ttm_bo_pin(&bo->tbo);
305 qxl_ttm_placement_from_domain(bo, bo->type);
306 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
308 ttm_bo_pin(&bo->tbo);
310 dev_err(ddev->dev, "%p pin failed\n", bo);
314 static void __qxl_bo_unpin(struct qxl_bo *bo)
316 ttm_bo_unpin(&bo->tbo);
324 int qxl_bo_pin(struct qxl_bo *bo)
328 r = qxl_bo_reserve(bo);
332 r = __qxl_bo_pin(bo);
333 qxl_bo_unreserve(bo);
342 int qxl_bo_unpin(struct qxl_bo *bo)
346 r = qxl_bo_reserve(bo);
350 __qxl_bo_unpin(bo);
351 qxl_bo_unreserve(bo);
357 struct qxl_bo *bo, *n;
362 list_for_each_entry_safe(bo, n, &qdev->gem.objects, list) {
364 &bo->tbo.base, bo, (unsigned long)bo->tbo.base.size,
365 *((unsigned long *)&bo->tbo.base.refcount));
367 list_del_init(&bo->list);
369 /* this should unref the ttm bo */
370 drm_gem_object_put(&bo->tbo.base);
384 int qxl_bo_check_id(struct qxl_device *qdev, struct qxl_bo *bo)
388 if (bo->type == QXL_GEM_DOMAIN_SURFACE && bo->surface_id == 0) {
390 ret = qxl_surface_id_alloc(qdev, bo);
394 ret = qxl_hw_surface_alloc(qdev, bo);