Lines Matching defs:base
78 drm_dbg(vma->obj->base.dev,
85 drm_dbg(vma->obj->base.dev,
132 vma->size = obj->base.size;
154 obj->base.size >> PAGE_SHIFT));
157 GEM_BUG_ON(vma->size > obj->base.size);
313 struct dma_fence_work base;
325 struct i915_vma_work *vw = container_of(work, typeof(*vw), base);
343 struct i915_vma_work *vw = container_of(work, typeof(*vw), base);
367 dma_fence_work_init(&vw->base, &bind_ops);
368 vw->base.dma.error = -EAGAIN; /* disable the worker by default */
483 &work->base.chain,
523 prev = i915_active_set_exclusive(&vma->active, &work->base.dma);
525 __i915_sw_fence_await_dma_fence(&work->base.chain,
531 work->base.dma.error = 0; /* enable the queue_work() */
571 vma->obj->base.size);
809 drm_dbg(vma->obj->base.dev,
989 struct drm_i915_private *i915 = to_i915(obj->base.dev);
1021 obj->base.size, rot_info->plane[0].width,
1198 struct drm_i915_private *i915 = to_i915(obj->base.dev);
1229 obj->base.size, rem_info->plane[0].width,
1447 dma_fence_work_chain(&work->base, moving);
1561 dma_fence_work_commit_imm(&work->base);
1630 lockdep_assert_not_held(&vma->obj->base.resv->lock.base);
1813 if (!kref_get_unless_zero(&obj->base.refcount))
1932 err = dma_resv_reserve_fences(vma->obj->base.resv, idx);
1963 dma_resv_add_fence(vma->obj->base.resv, curr, usage);
2185 err = dma_resv_reserve_fences(obj->base.resv, 2);
2213 dma_resv_add_fence(obj->base.resv, fence, DMA_RESV_USAGE_READ);