/kernel/linux/linux-5.10/drivers/gpu/drm/i915/gt/ |
H A D | gen6_ppgtt.c | 15 static inline void gen6_write_pde(const struct gen6_ppgtt *ppgtt, in gen6_write_pde() argument 19 dma_addr_t addr = pt ? px_dma(pt) : px_dma(ppgtt->base.vm.scratch[1]); in gen6_write_pde() 23 ppgtt->pd_addr + pde); in gen6_write_pde() 82 struct gen6_ppgtt * const ppgtt = to_gen6_ppgtt(i915_vm_to_ppgtt(vm)); in gen6_ppgtt_clear_range() local 91 i915_pt_entry(ppgtt->base.pd, pde++); in gen6_ppgtt_clear_range() 99 ppgtt->scan_for_unused_pt = true; in gen6_ppgtt_clear_range() 121 struct i915_ppgtt *ppgtt = i915_vm_to_ppgtt(vm); in gen6_ppgtt_insert_entries() local 122 struct i915_page_directory * const pd = ppgtt->pd; in gen6_ppgtt_insert_entries() 158 static void gen6_flush_pd(struct gen6_ppgtt *ppgtt, u64 start, u64 end) in gen6_flush_pd() argument 160 struct i915_page_directory * const pd = ppgtt in gen6_flush_pd() 184 struct gen6_ppgtt *ppgtt = to_gen6_ppgtt(i915_vm_to_ppgtt(vm)); gen6_alloc_va_range() local 228 gen6_ppgtt_init_scratch(struct gen6_ppgtt *ppgtt) gen6_ppgtt_init_scratch() argument 262 gen6_ppgtt_free_pd(struct gen6_ppgtt *ppgtt) gen6_ppgtt_free_pd() argument 275 struct gen6_ppgtt *ppgtt = to_gen6_ppgtt(i915_vm_to_ppgtt(vm)); gen6_ppgtt_cleanup() local 308 struct gen6_ppgtt *ppgtt = vma->private; pd_vma_bind() local 319 struct gen6_ppgtt *ppgtt = vma->private; pd_vma_unbind() local 346 pd_vma_create(struct gen6_ppgtt *ppgtt, int size) pd_vma_create() argument 379 struct gen6_ppgtt *ppgtt = to_gen6_ppgtt(base); gen6_ppgtt_pin() local 413 struct gen6_ppgtt *ppgtt = to_gen6_ppgtt(base); gen6_ppgtt_unpin() local 422 struct gen6_ppgtt *ppgtt = to_gen6_ppgtt(base); gen6_ppgtt_unpin_all() local 434 struct gen6_ppgtt *ppgtt; gen6_ppgtt_create() local [all...] |
H A D | gen8_ppgtt.c | 53 static void gen8_ppgtt_notify_vgt(struct i915_ppgtt *ppgtt, bool create) in gen8_ppgtt_notify_vgt() argument 55 struct drm_i915_private *i915 = ppgtt->vm.i915; in gen8_ppgtt_notify_vgt() 56 struct intel_uncore *uncore = ppgtt->vm.gt->uncore; in gen8_ppgtt_notify_vgt() 61 atomic_inc(px_used(ppgtt->pd)); /* never remove */ in gen8_ppgtt_notify_vgt() 63 atomic_dec(px_used(ppgtt->pd)); in gen8_ppgtt_notify_vgt() 67 if (i915_vm_is_4lvl(&ppgtt->vm)) { in gen8_ppgtt_notify_vgt() 68 const u64 daddr = px_dma(ppgtt->pd); in gen8_ppgtt_notify_vgt() 80 const u64 daddr = i915_page_dir_dma_addr(ppgtt, i); in gen8_ppgtt_notify_vgt() 155 struct i915_ppgtt * const ppgtt = i915_vm_to_ppgtt(vm); in gen8_pdp_for_page_index() local 158 return ppgtt in gen8_pdp_for_page_index() 189 struct i915_ppgtt *ppgtt = i915_vm_to_ppgtt(vm); gen8_ppgtt_cleanup() local 358 gen8_ppgtt_insert_pte(struct i915_ppgtt *ppgtt, struct i915_page_directory *pdp, struct sgt_dma *iter, u64 idx, enum i915_cache_level cache_level, u32 flags) gen8_ppgtt_insert_pte() argument 533 struct i915_ppgtt * const ppgtt = i915_vm_to_ppgtt(vm); gen8_ppgtt_insert() local 609 gen8_preallocate_top_level_pdp(struct i915_ppgtt *ppgtt) gen8_preallocate_top_level_pdp() argument 683 struct i915_ppgtt *ppgtt; gen8_ppgtt_create() local [all...] |
H A D | intel_ppgtt.c | 167 struct i915_ppgtt *ppgtt; in i915_ppgtt_create() local 169 ppgtt = __ppgtt_create(gt); in i915_ppgtt_create() 170 if (IS_ERR(ppgtt)) in i915_ppgtt_create() 171 return ppgtt; in i915_ppgtt_create() 173 trace_i915_ppgtt_create(&ppgtt->vm); in i915_ppgtt_create() 175 return ppgtt; in i915_ppgtt_create() 298 void ppgtt_init(struct i915_ppgtt *ppgtt, struct intel_gt *gt) in ppgtt_init() argument 302 ppgtt->vm.gt = gt; in ppgtt_init() 303 ppgtt->vm.i915 = i915; in ppgtt_init() 304 ppgtt in ppgtt_init() [all...] |
H A D | intel_lrc_reg.h | 32 #define ASSIGN_CTX_PDP(ppgtt, reg_state, n) do { \ 34 const u64 addr__ = i915_page_dir_dma_addr((ppgtt), (n)); \ 39 #define ASSIGN_CTX_PML4(ppgtt, reg_state) do { \ 41 const u64 addr__ = px_dma(ppgtt->pd); \
|
H A D | intel_ggtt.c | 625 struct i915_ppgtt *ppgtt; in init_aliasing_ppgtt() local 628 ppgtt = i915_ppgtt_create(ggtt->vm.gt); in init_aliasing_ppgtt() 629 if (IS_ERR(ppgtt)) in init_aliasing_ppgtt() 630 return PTR_ERR(ppgtt); in init_aliasing_ppgtt() 632 if (GEM_WARN_ON(ppgtt->vm.total < ggtt->vm.total)) { in init_aliasing_ppgtt() 637 err = i915_vm_alloc_pt_stash(&ppgtt->vm, &stash, ggtt->vm.total); in init_aliasing_ppgtt() 641 err = i915_vm_pin_pt_stash(&ppgtt->vm, &stash); in init_aliasing_ppgtt() 651 ppgtt->vm.allocate_va_range(&ppgtt->vm, &stash, 0, ggtt->vm.total); in init_aliasing_ppgtt() 653 ggtt->alias = ppgtt; in init_aliasing_ppgtt() 674 struct i915_ppgtt *ppgtt; fini_aliasing_ppgtt() local [all...] |
H A D | intel_gtt.h | 232 * Each active user context has its own address space (in full-ppgtt). 476 i915_page_dir_dma_addr(const struct i915_ppgtt *ppgtt, const unsigned int n) in i915_page_dir_dma_addr() argument 478 struct i915_page_table *pt = ppgtt->pd->entry[n]; in i915_page_dir_dma_addr() 480 return __px_dma(pt ? px_base(pt) : ppgtt->vm.scratch[ppgtt->vm.top]); in i915_page_dir_dma_addr() 483 void ppgtt_init(struct i915_ppgtt *ppgtt, struct intel_gt *gt);
|
/kernel/linux/linux-6.6/drivers/gpu/drm/i915/gt/ |
H A D | gen6_ppgtt.c | 19 static void gen6_write_pde(const struct gen6_ppgtt *ppgtt, in gen6_write_pde() argument 23 dma_addr_t addr = pt ? px_dma(pt) : px_dma(ppgtt->base.vm.scratch[1]); in gen6_write_pde() 27 ppgtt->pd_addr + pde); in gen6_write_pde() 77 struct gen6_ppgtt * const ppgtt = to_gen6_ppgtt(i915_vm_to_ppgtt(vm)); in gen6_ppgtt_clear_range() local 86 i915_pt_entry(ppgtt->base.pd, pde++); in gen6_ppgtt_clear_range() 94 ppgtt->scan_for_unused_pt = true; in gen6_ppgtt_clear_range() 115 struct i915_ppgtt *ppgtt = i915_vm_to_ppgtt(vm); in gen6_ppgtt_insert_entries() local 116 struct i915_page_directory * const pd = ppgtt->pd; in gen6_ppgtt_insert_entries() 150 static void gen6_flush_pd(struct gen6_ppgtt *ppgtt, u64 start, u64 end) in gen6_flush_pd() argument 152 struct i915_page_directory * const pd = ppgtt in gen6_flush_pd() 176 struct gen6_ppgtt *ppgtt = to_gen6_ppgtt(i915_vm_to_ppgtt(vm)); gen6_alloc_va_range() local 219 gen6_ppgtt_init_scratch(struct gen6_ppgtt *ppgtt) gen6_ppgtt_init_scratch() argument 256 gen6_ppgtt_free_pd(struct gen6_ppgtt *ppgtt) gen6_ppgtt_free_pd() argument 269 struct gen6_ppgtt *ppgtt = to_gen6_ppgtt(i915_vm_to_ppgtt(vm)); gen6_ppgtt_cleanup() local 287 struct gen6_ppgtt *ppgtt = vma_res->private; pd_vma_bind() local 299 struct gen6_ppgtt *ppgtt = vma_res->private; pd_vma_unbind() local 326 struct gen6_ppgtt *ppgtt = to_gen6_ppgtt(base); gen6_ppgtt_pin() local 380 gen6_alloc_top_pd(struct gen6_ppgtt *ppgtt) gen6_alloc_top_pd() argument 421 struct gen6_ppgtt *ppgtt = to_gen6_ppgtt(base); gen6_ppgtt_unpin() local 431 struct gen6_ppgtt *ppgtt; gen6_ppgtt_create() local [all...] |
H A D | gen8_ppgtt.c | 87 static void gen8_ppgtt_notify_vgt(struct i915_ppgtt *ppgtt, bool create) in gen8_ppgtt_notify_vgt() argument 89 struct drm_i915_private *i915 = ppgtt->vm.i915; in gen8_ppgtt_notify_vgt() 90 struct intel_uncore *uncore = ppgtt->vm.gt->uncore; in gen8_ppgtt_notify_vgt() 95 atomic_inc(px_used(ppgtt->pd)); /* never remove */ in gen8_ppgtt_notify_vgt() 97 atomic_dec(px_used(ppgtt->pd)); in gen8_ppgtt_notify_vgt() 101 if (i915_vm_is_4lvl(&ppgtt->vm)) { in gen8_ppgtt_notify_vgt() 102 const u64 daddr = px_dma(ppgtt->pd); in gen8_ppgtt_notify_vgt() 114 const u64 daddr = i915_page_dir_dma_addr(ppgtt, i); in gen8_ppgtt_notify_vgt() 189 struct i915_ppgtt * const ppgtt = i915_vm_to_ppgtt(vm); in gen8_pdp_for_page_index() local 192 return ppgtt in gen8_pdp_for_page_index() 223 struct i915_ppgtt *ppgtt = i915_vm_to_ppgtt(vm); gen8_ppgtt_cleanup() local 451 gen8_ppgtt_insert_pte(struct i915_ppgtt *ppgtt, struct i915_page_directory *pdp, struct sgt_dma *iter, u64 idx, unsigned int pat_index, u32 flags) gen8_ppgtt_insert_pte() argument 736 struct i915_ppgtt * const ppgtt = i915_vm_to_ppgtt(vm); gen8_ppgtt_insert() local 888 gen8_preallocate_top_level_pdp(struct i915_ppgtt *ppgtt) gen8_preallocate_top_level_pdp() argument 964 struct i915_ppgtt *ppgtt; gen8_ppgtt_create() local [all...] |
H A D | intel_ppgtt.c | 171 struct i915_ppgtt *ppgtt; in i915_ppgtt_create() local 173 ppgtt = __ppgtt_create(gt, lmem_pt_obj_flags); in i915_ppgtt_create() 174 if (IS_ERR(ppgtt)) in i915_ppgtt_create() 175 return ppgtt; in i915_ppgtt_create() 177 trace_i915_ppgtt_create(&ppgtt->vm); in i915_ppgtt_create() 179 return ppgtt; in i915_ppgtt_create() 307 void ppgtt_init(struct i915_ppgtt *ppgtt, struct intel_gt *gt, in ppgtt_init() argument 312 ppgtt->vm.gt = gt; in ppgtt_init() 313 ppgtt->vm.i915 = i915; in ppgtt_init() 314 ppgtt in ppgtt_init() [all...] |
H A D | intel_lrc_reg.h | 33 #define ASSIGN_CTX_PDP(ppgtt, reg_state, n) do { \ 35 const u64 addr__ = i915_page_dir_dma_addr((ppgtt), (n)); \ 40 #define ASSIGN_CTX_PML4(ppgtt, reg_state) do { \ 42 const u64 addr__ = px_dma((ppgtt)->pd); \
|
H A D | intel_ggtt.c | 716 struct i915_ppgtt *ppgtt; in init_aliasing_ppgtt() local 719 ppgtt = i915_ppgtt_create(ggtt->vm.gt, 0); in init_aliasing_ppgtt() 720 if (IS_ERR(ppgtt)) in init_aliasing_ppgtt() 721 return PTR_ERR(ppgtt); in init_aliasing_ppgtt() 723 if (GEM_WARN_ON(ppgtt->vm.total < ggtt->vm.total)) { in init_aliasing_ppgtt() 728 err = i915_vm_alloc_pt_stash(&ppgtt->vm, &stash, ggtt->vm.total); in init_aliasing_ppgtt() 732 i915_gem_object_lock(ppgtt->vm.scratch[0], NULL); in init_aliasing_ppgtt() 733 err = i915_vm_map_pt_stash(&ppgtt->vm, &stash); in init_aliasing_ppgtt() 734 i915_gem_object_unlock(ppgtt->vm.scratch[0]); in init_aliasing_ppgtt() 744 ppgtt in init_aliasing_ppgtt() 767 struct i915_ppgtt *ppgtt; fini_aliasing_ppgtt() local [all...] |
H A D | selftest_tlb.c | 228 struct i915_ppgtt *ppgtt; in mem_tlbinv() local 270 ppgtt = i915_ppgtt_create(gt, 0); in mem_tlbinv() 271 if (IS_ERR(ppgtt)) { in mem_tlbinv() 272 err = PTR_ERR(ppgtt); in mem_tlbinv() 276 va = i915_vma_instance(A, &ppgtt->vm, NULL); in mem_tlbinv() 282 vb = i915_vma_instance(B, &ppgtt->vm, NULL); in mem_tlbinv() 301 ce->vm = i915_vm_get(&ppgtt->vm); in mem_tlbinv() 348 i915_vm_put(&ppgtt->vm); in mem_tlbinv()
|
H A D | intel_gtt.h | 568 i915_page_dir_dma_addr(const struct i915_ppgtt *ppgtt, const unsigned int n) in i915_page_dir_dma_addr() argument 570 struct i915_page_table *pt = ppgtt->pd->entry[n]; in i915_page_dir_dma_addr() 572 return __px_dma(pt ? px_base(pt) : ppgtt->vm.scratch[ppgtt->vm.top]); in i915_page_dir_dma_addr() 575 void ppgtt_init(struct i915_ppgtt *ppgtt, struct intel_gt *gt,
|
/kernel/linux/linux-5.10/drivers/gpu/drm/i915/selftests/ |
H A D | mock_gtt.c | 67 struct i915_ppgtt *ppgtt; in mock_ppgtt() local 69 ppgtt = kzalloc(sizeof(*ppgtt), GFP_KERNEL); in mock_ppgtt() 70 if (!ppgtt) in mock_ppgtt() 73 ppgtt->vm.gt = &i915->gt; in mock_ppgtt() 74 ppgtt->vm.i915 = i915; in mock_ppgtt() 75 ppgtt->vm.total = round_down(U64_MAX, PAGE_SIZE); in mock_ppgtt() 76 ppgtt->vm.file = ERR_PTR(-ENODEV); in mock_ppgtt() 77 ppgtt->vm.dma = &i915->drm.pdev->dev; in mock_ppgtt() 79 i915_address_space_init(&ppgtt in mock_ppgtt() [all...] |
H A D | i915_gem_gtt.c | 147 struct i915_ppgtt *ppgtt; in igt_ppgtt_alloc() local 156 ppgtt = i915_ppgtt_create(&dev_priv->gt); in igt_ppgtt_alloc() 157 if (IS_ERR(ppgtt)) in igt_ppgtt_alloc() 158 return PTR_ERR(ppgtt); in igt_ppgtt_alloc() 160 if (!ppgtt->vm.allocate_va_range) in igt_ppgtt_alloc() 171 limit = min(ppgtt->vm.total, limit); in igt_ppgtt_alloc() 177 err = i915_vm_alloc_pt_stash(&ppgtt->vm, &stash, size); in igt_ppgtt_alloc() 181 err = i915_vm_pin_pt_stash(&ppgtt->vm, &stash); in igt_ppgtt_alloc() 183 i915_vm_free_pt_stash(&ppgtt->vm, &stash); in igt_ppgtt_alloc() 187 ppgtt in igt_ppgtt_alloc() 1014 struct i915_ppgtt *ppgtt; exercise_ppgtt() local [all...] |
/kernel/linux/linux-6.6/drivers/gpu/drm/i915/selftests/ |
H A D | mock_gtt.c | 67 struct i915_ppgtt *ppgtt; in mock_ppgtt() local 69 ppgtt = kzalloc(sizeof(*ppgtt), GFP_KERNEL); in mock_ppgtt() 70 if (!ppgtt) in mock_ppgtt() 73 ppgtt->vm.gt = to_gt(i915); in mock_ppgtt() 74 ppgtt->vm.i915 = i915; in mock_ppgtt() 75 ppgtt->vm.total = round_down(U64_MAX, PAGE_SIZE); in mock_ppgtt() 76 ppgtt->vm.dma = i915->drm.dev; in mock_ppgtt() 78 i915_address_space_init(&ppgtt->vm, VM_CLASS_PPGTT); in mock_ppgtt() 80 ppgtt in mock_ppgtt() [all...] |
H A D | i915_gem_gtt.c | 156 struct i915_ppgtt *ppgtt; in igt_ppgtt_alloc() local 166 ppgtt = i915_ppgtt_create(to_gt(dev_priv), 0); in igt_ppgtt_alloc() 167 if (IS_ERR(ppgtt)) in igt_ppgtt_alloc() 168 return PTR_ERR(ppgtt); in igt_ppgtt_alloc() 170 if (!ppgtt->vm.allocate_va_range) in igt_ppgtt_alloc() 181 limit = min(ppgtt->vm.total, limit); in igt_ppgtt_alloc() 185 err = i915_vm_lock_objects(&ppgtt->vm, &ww); in igt_ppgtt_alloc() 193 err = i915_vm_alloc_pt_stash(&ppgtt->vm, &stash, size); in igt_ppgtt_alloc() 197 err = i915_vm_map_pt_stash(&ppgtt->vm, &stash); in igt_ppgtt_alloc() 199 i915_vm_free_pt_stash(&ppgtt in igt_ppgtt_alloc() 1195 struct i915_ppgtt *ppgtt; exercise_ppgtt() local [all...] |
/kernel/linux/linux-5.10/drivers/gpu/drm/i915/gem/selftests/ |
H A D | mock_context.c | 43 struct i915_ppgtt *ppgtt; in mock_context() local 47 ppgtt = mock_ppgtt(i915, name); in mock_context() 48 if (!ppgtt) in mock_context() 52 __set_ppgtt(ctx, &ppgtt->vm); in mock_context() 55 i915_vm_put(&ppgtt->vm); in mock_context()
|
H A D | huge_pages.c | 382 struct i915_ppgtt *ppgtt = arg; in igt_mock_exhaust_device_supported_pages() local 383 struct drm_i915_private *i915 = ppgtt->vm.i915; in igt_mock_exhaust_device_supported_pages() 419 vma = i915_vma_instance(obj, &ppgtt->vm, NULL); in igt_mock_exhaust_device_supported_pages() 458 struct i915_ppgtt *ppgtt = arg; in igt_mock_memory_region_huge_pages() local 459 struct drm_i915_private *i915 = ppgtt->vm.i915; in igt_mock_memory_region_huge_pages() 486 vma = i915_vma_instance(obj, &ppgtt->vm, NULL); in igt_mock_memory_region_huge_pages() 535 struct i915_ppgtt *ppgtt = arg; in igt_mock_ppgtt_misaligned_dma() local 536 struct drm_i915_private *i915 = ppgtt->vm.i915; in igt_mock_ppgtt_misaligned_dma() 578 vma = i915_vma_instance(obj, &ppgtt->vm, NULL); in igt_mock_ppgtt_misaligned_dma() 651 struct i915_ppgtt *ppgtt) in close_object_list() 650 close_object_list(struct list_head *objects, struct i915_ppgtt *ppgtt) close_object_list() argument 665 struct i915_ppgtt *ppgtt = arg; igt_mock_ppgtt_huge_fill() local 786 struct i915_ppgtt *ppgtt = arg; igt_mock_ppgtt_64K() local 1585 struct i915_ppgtt *ppgtt; i915_gem_huge_page_mock_selftests() local [all...] |
/kernel/linux/linux-6.6/drivers/gpu/drm/i915/gem/selftests/ |
H A D | mock_context.c | 37 struct i915_ppgtt *ppgtt; in mock_context() local 41 ppgtt = mock_ppgtt(i915, name); in mock_context() 42 if (!ppgtt) in mock_context() 45 ctx->vm = &ppgtt->vm; in mock_context()
|
H A D | i915_gem_migrate.c | 375 struct i915_ppgtt *ppgtt; in igt_async_migrate() local 379 ppgtt = i915_ppgtt_create(gt, 0); in igt_async_migrate() 380 if (IS_ERR(ppgtt)) in igt_async_migrate() 381 return PTR_ERR(ppgtt); in igt_async_migrate() 425 err = __igt_lmem_pages_migrate(gt, &ppgtt->vm, &deps, &spin, in igt_async_migrate() 436 i915_vm_put(&ppgtt->vm); in igt_async_migrate()
|
H A D | huge_pages.c | 424 struct i915_ppgtt *ppgtt = arg; in igt_mock_exhaust_device_supported_pages() local 425 struct drm_i915_private *i915 = ppgtt->vm.i915; in igt_mock_exhaust_device_supported_pages() 461 vma = i915_vma_instance(obj, &ppgtt->vm, NULL); in igt_mock_exhaust_device_supported_pages() 500 struct i915_ppgtt *ppgtt = arg; in igt_mock_memory_region_huge_pages() local 501 struct drm_i915_private *i915 = ppgtt->vm.i915; in igt_mock_memory_region_huge_pages() 529 vma = i915_vma_instance(obj, &ppgtt->vm, NULL); in igt_mock_memory_region_huge_pages() 578 struct i915_ppgtt *ppgtt = arg; in igt_mock_ppgtt_misaligned_dma() local 579 struct drm_i915_private *i915 = ppgtt->vm.i915; in igt_mock_ppgtt_misaligned_dma() 621 vma = i915_vma_instance(obj, &ppgtt->vm, NULL); in igt_mock_ppgtt_misaligned_dma() 1973 struct i915_ppgtt *ppgtt; in i915_gem_huge_page_mock_selftests() local [all...] |
/kernel/linux/linux-5.10/drivers/gpu/drm/i915/gvt/ |
H A D | scheduler.c | 418 struct i915_ppgtt *ppgtt = i915_vm_to_ppgtt(ce->vm); in set_context_ppgtt_from_shadow() local 422 set_dma_address(ppgtt->pd, mm->ppgtt_mm.shadow_pdps[0]); in set_context_ppgtt_from_shadow() 426 i915_pd_entry(ppgtt->pd, i); in set_context_ppgtt_from_shadow() 427 /* skip now as current i915 ppgtt alloc won't allocate in set_context_ppgtt_from_shadow() 429 shadow ppgtt. */ in set_context_ppgtt_from_shadow() 530 if (!bb->ppgtt) { in prepare_shadow_batch_buffer() 651 gvt_vgpu_err("workload shadow ppgtt isn't ready\n"); in intel_vgpu_shadow_mm_pin() 664 gvt_vgpu_err("LRI shadow ppgtt fail to pin\n"); in intel_vgpu_shadow_mm_pin() 877 gvt_dbg_mm("4-level context ppgtt not match LRI command\n"); in check_shadow_context_ppgtt() 1248 struct i915_ppgtt *ppgtt) in i915_context_ppgtt_root_restore() 1247 i915_context_ppgtt_root_restore(struct intel_vgpu_submission *s, struct i915_ppgtt *ppgtt) i915_context_ppgtt_root_restore() argument 1308 i915_context_ppgtt_root_save(struct intel_vgpu_submission *s, struct i915_ppgtt *ppgtt) i915_context_ppgtt_root_save() argument 1340 struct i915_ppgtt *ppgtt; intel_vgpu_setup_submission() local [all...] |
/kernel/linux/linux-6.6/drivers/gpu/drm/i915/gvt/ |
H A D | scheduler.c | 438 struct i915_ppgtt *ppgtt = i915_vm_to_ppgtt(ce->vm); in set_context_ppgtt_from_shadow() local 442 set_dma_address(ppgtt->pd, mm->ppgtt_mm.shadow_pdps[0]); in set_context_ppgtt_from_shadow() 446 i915_pd_entry(ppgtt->pd, i); in set_context_ppgtt_from_shadow() 447 /* skip now as current i915 ppgtt alloc won't allocate in set_context_ppgtt_from_shadow() 449 shadow ppgtt. */ in set_context_ppgtt_from_shadow() 551 if (!bb->ppgtt) { in prepare_shadow_batch_buffer() 699 gvt_vgpu_err("workload shadow ppgtt isn't ready\n"); in intel_vgpu_shadow_mm_pin() 712 gvt_vgpu_err("LRI shadow ppgtt fail to pin\n"); in intel_vgpu_shadow_mm_pin() 925 gvt_dbg_mm("4-level context ppgtt not match LRI command\n"); in check_shadow_context_ppgtt() 1295 struct i915_ppgtt *ppgtt) in i915_context_ppgtt_root_restore() 1294 i915_context_ppgtt_root_restore(struct intel_vgpu_submission *s, struct i915_ppgtt *ppgtt) i915_context_ppgtt_root_restore() argument 1355 i915_context_ppgtt_root_save(struct intel_vgpu_submission *s, struct i915_ppgtt *ppgtt) i915_context_ppgtt_root_save() argument 1387 struct i915_ppgtt *ppgtt; intel_vgpu_setup_submission() local [all...] |
/kernel/linux/linux-5.10/drivers/gpu/drm/i915/gem/ |
H A D | i915_gem_context.c | 177 * We do not allow downgrading from full-ppgtt [to a shared in context_get_vm_rcu() 185 * This ppgtt may have be reallocated between in context_get_vm_rcu() 188 * of this ppgtt with that third context (and not in context_get_vm_rcu() 190 * ppgtt after passing through the strong memory in context_get_vm_rcu() 194 * Once we have acquired the current ppgtt of ctx, in context_get_vm_rcu() 631 * the ppgtt). in context_close() 860 struct i915_ppgtt *ppgtt; in i915_gem_create_context() local 862 ppgtt = i915_ppgtt_create(&i915->gt); in i915_gem_create_context() 863 if (IS_ERR(ppgtt)) { in i915_gem_create_context() 865 PTR_ERR(ppgtt)); in i915_gem_create_context() 1013 struct i915_ppgtt *ppgtt; i915_gem_vm_create_ioctl() local 1249 struct i915_ppgtt *ppgtt = i915_vm_to_ppgtt(vm); emit_ppgtt_update() local 1266 struct i915_ppgtt *ppgtt = i915_vm_to_ppgtt(vm); emit_ppgtt_update() local [all...] |