Lines Matching defs:obj
54 assert_object_held_shared(vma->obj);
78 drm_dbg(vma->obj->base.dev,
85 drm_dbg(vma->obj->base.dev,
114 vma_create(struct drm_i915_gem_object *obj,
131 vma->obj = obj;
132 vma->size = obj->base.size;
154 obj->base.size >> PAGE_SHIFT));
157 GEM_BUG_ON(vma->size > obj->base.size);
181 spin_lock(&obj->vma.lock);
187 i915_gem_object_get_tiling(obj),
188 i915_gem_object_get_stride(obj));
196 i915_gem_object_get_tiling(obj),
197 i915_gem_object_get_stride(obj));
204 p = &obj->vma.tree.rb_node;
225 rb_insert_color(&vma->obj_node, &obj->vma.tree);
234 list_add(&vma->obj_link, &obj->vma.list);
236 list_add_tail(&vma->obj_link, &obj->vma.list);
238 spin_unlock(&obj->vma.lock);
244 spin_unlock(&obj->vma.lock);
253 i915_vma_lookup(struct drm_i915_gem_object *obj,
259 rb = obj->vma.tree.rb_node;
279 * @obj: parent &struct drm_i915_gem_object to be mapped
283 * i915_vma_instance() looks up an existing VMA of the @obj in the @vm with
291 i915_vma_instance(struct drm_i915_gem_object *obj,
300 spin_lock(&obj->vma.lock);
301 vma = i915_vma_lookup(obj, vm, view);
302 spin_unlock(&obj->vma.lock);
306 vma = vma_create(obj, vm, view);
317 struct drm_i915_gem_object *obj;
334 if (i915_gem_object_has_unknown_state(vw->obj))
345 if (vw->obj)
346 i915_gem_object_put(vw->obj);
418 struct drm_i915_gem_object *obj = vma->obj;
421 obj->mm.rsgt, i915_gem_object_is_readonly(obj),
422 i915_gem_object_is_lmem(obj), obj->mm.region,
520 * part of the obj->resv->excl_fence as it only affects
532 work->obj = i915_gem_object_get(vma->obj);
534 ret = i915_gem_object_wait_moving_fence(vma->obj, true);
554 if (WARN_ON_ONCE(vma->obj->flags & I915_BO_ALLOC_GPU_ONLY))
569 if (i915_gem_object_is_lmem(vma->obj)) {
570 ptr = i915_gem_object_lmem_io_map(vma->obj, 0,
571 vma->obj->base.size);
578 i915_gem_object_pin_map(vma->obj, I915_MAP_WC);
593 __i915_gem_object_release_map(vma->obj);
638 struct drm_i915_gem_object *obj;
644 obj = vma->obj;
645 GEM_BUG_ON(!obj);
650 i915_gem_object_unpin_map(obj);
652 i915_gem_object_put(obj);
801 alignment = max(alignment, i915_vm_obj_min_alignment(vma->vm, vma->obj));
809 drm_dbg(vma->obj->base.dev,
818 color = vma->obj->pat_index;
936 rotate_pages(struct drm_i915_gem_object *obj, unsigned int offset,
957 i915_gem_object_get_dma_address(obj, src_idx);
986 struct drm_i915_gem_object *obj)
989 struct drm_i915_private *i915 = to_i915(obj->base.dev);
1008 sg = rotate_pages(obj, rot_info->plane[i].offset,
1021 obj->base.size, rot_info->plane[0].width,
1047 remap_tiled_color_plane_pages(struct drm_i915_gem_object *obj,
1075 addr = i915_gem_object_get_dma_address_len(obj, offset, &length);
1106 remap_contiguous_pages(struct drm_i915_gem_object *obj,
1114 iter = i915_gem_object_get_sg_dma(obj, obj_offset, &offset);
1139 remap_linear_color_plane_pages(struct drm_i915_gem_object *obj,
1151 sg = remap_contiguous_pages(obj, obj_offset, size, st, sg);
1161 struct drm_i915_gem_object *obj,
1172 sg = remap_linear_color_plane_pages(obj,
1180 sg = remap_tiled_color_plane_pages(obj,
1195 struct drm_i915_gem_object *obj)
1198 struct drm_i915_private *i915 = to_i915(obj->base.dev);
1218 sg = remap_color_plane_pages(rem_info, obj, i, st, sg, >t_offset);
1229 obj->base.size, rem_info->plane[0].width,
1237 struct drm_i915_gem_object *obj)
1254 sg = remap_contiguous_pages(obj, view->partial.offset, count, st, st->sgl);
1274 * obj->mm.pages. When the obj->mm.pages sg_table is regenerated, so
1276 * be accessed when the obj->mm.pages are pinned.
1278 GEM_BUG_ON(!i915_gem_object_has_pinned_pages(vma->obj));
1285 pages = vma->obj->mm.pages;
1290 intel_rotate_pages(&vma->gtt_view.rotated, vma->obj);
1295 intel_remap_pages(&vma->gtt_view.remapped, vma->obj);
1299 pages = intel_partial_pages(&vma->gtt_view, vma->obj);
1322 err = i915_gem_object_pin_pages(vma->obj);
1330 vma->page_sizes = vma->obj->mm.page_sizes;
1336 __i915_gem_object_unpin_pages(vma->obj);
1368 if (vma->pages != vma->obj->mm.pages) {
1374 i915_gem_object_unpin_pages(vma->obj);
1443 err = i915_gem_object_get_moving_fence(vma->obj, &moving);
1530 vma->obj->pat_index,
1630 lockdep_assert_not_held(&vma->obj->base.resv->lock.base);
1633 err = i915_gem_object_lock(vma->obj, &_ww);
1643 * @obj: i915 GEM object
1649 void i915_ggtt_clear_scanout(struct drm_i915_gem_object *obj)
1653 spin_lock(&obj->vma.lock);
1654 for_each_ggtt_vma(vma, obj) {
1658 spin_unlock(&obj->vma.lock);
1724 struct drm_i915_gem_object *obj = vma->obj;
1728 spin_lock(&obj->vma.lock);
1731 rb_erase(&vma->obj_node, &obj->vma.tree);
1733 spin_unlock(&obj->vma.lock);
1772 * - obj->vma.lock
1808 struct drm_i915_gem_object *obj = vma->obj;
1813 if (!kref_get_unless_zero(&obj->base.refcount))
1817 i915_gem_object_put(obj);
1827 struct drm_i915_gem_object *obj = vma->obj;
1830 if (i915_gem_object_trylock(obj, NULL)) {
1833 i915_gem_object_unlock(obj);
1841 i915_gem_object_put(obj);
1854 __i915_gem_object_release_map(vma->obj);
1869 GEM_BUG_ON(!vma->obj->userfault_count);
1879 if (!--vma->obj->userfault_count)
1880 list_del(&vma->obj->userfault_link);
1906 struct drm_i915_gem_object *obj = vma->obj;
1909 assert_object_held(obj);
1914 err = i915_request_await_object(rq, vma->obj, flags & EXEC_OBJECT_WRITE);
1932 err = dma_resv_reserve_fences(vma->obj->base.resv, idx);
1940 front = i915_gem_object_get_frontbuffer(obj);
1955 obj->write_domain = I915_GEM_DOMAIN_RENDER;
1956 obj->read_domains = 0;
1959 obj->write_domain = 0;
1963 dma_resv_add_fence(vma->obj->base.resv, curr, usage);
1969 obj->read_domains |= I915_GEM_GPU_DOMAINS;
1970 obj->mm.dirty = true;
2026 vma->obj->mm.tlb);
2043 vma_invalidate_tlb(vma->vm, vma->obj->mm.tlb);
2097 &vma->obj->mm.rsgt->table != vma->resource->bi.pages)
2128 assert_object_held_shared(vma->obj);
2162 struct drm_i915_gem_object *obj = vma->obj;
2172 assert_object_held(obj);
2182 if (!obj->mm.rsgt)
2185 err = dma_resv_reserve_fences(obj->base.resv, 2);
2213 dma_resv_add_fence(obj->base.resv, fence, DMA_RESV_USAGE_READ);
2226 i915_gem_object_lock(vma->obj, NULL);
2228 i915_gem_object_unlock(vma->obj);
2235 i915_gem_object_make_unshrinkable(vma->obj);
2241 i915_gem_object_make_shrinkable(vma->obj);
2246 i915_gem_object_make_purgeable(vma->obj);