Searched refs:indirect_ctx (Results 1 - 11 of 11) sorted by relevance
/kernel/linux/linux-6.6/drivers/gpu/drm/i915/gvt/ |
H A D | scheduler.c | 414 if (!wa_ctx->indirect_ctx.obj) in release_shadow_wa_ctx() 417 i915_gem_object_lock(wa_ctx->indirect_ctx.obj, NULL); in release_shadow_wa_ctx() 418 i915_gem_object_unpin_map(wa_ctx->indirect_ctx.obj); in release_shadow_wa_ctx() 419 i915_gem_object_unlock(wa_ctx->indirect_ctx.obj); in release_shadow_wa_ctx() 420 i915_gem_object_put(wa_ctx->indirect_ctx.obj); in release_shadow_wa_ctx() 422 wa_ctx->indirect_ctx.obj = NULL; in release_shadow_wa_ctx() 423 wa_ctx->indirect_ctx.shadow_va = NULL; in release_shadow_wa_ctx() 506 workload->wa_ctx.indirect_ctx.size) { in intel_gvt_scan_and_shadow_workload() 603 (~INDIRECT_CTX_ADDR_MASK)) | wa_ctx->indirect_ctx.shadow_gma; in update_wa_ctx_2_shadow_ctx() 610 (unsigned char *)wa_ctx->indirect_ctx in prepare_shadow_wa_ctx() 1636 u32 head, tail, start, ctl, ctx_ctl, per_ctx, indirect_ctx; intel_vgpu_create_workload() local [all...] |
H A D | cmd_parser.c | 2884 if (WARN_ON(!IS_ALIGNED(wa_ctx->indirect_ctx.guest_gma, in scan_wa_ctx() 2888 ring_tail = wa_ctx->indirect_ctx.size + 3 * sizeof(u32); in scan_wa_ctx() 2889 ring_size = round_up(wa_ctx->indirect_ctx.size + CACHELINE_BYTES, in scan_wa_ctx() 2891 gma_head = wa_ctx->indirect_ctx.guest_gma; in scan_wa_ctx() 2892 gma_tail = wa_ctx->indirect_ctx.guest_gma + ring_tail; in scan_wa_ctx() 2898 s.ring_start = wa_ctx->indirect_ctx.guest_gma; in scan_wa_ctx() 2902 s.rb_va = wa_ctx->indirect_ctx.shadow_va; in scan_wa_ctx() 2911 wa_ctx->indirect_ctx.guest_gma, ring_size); in scan_wa_ctx() 2996 int ctx_size = wa_ctx->indirect_ctx.size; in shadow_indirect_ctx() 2997 unsigned long guest_gma = wa_ctx->indirect_ctx in shadow_indirect_ctx() [all...] |
H A D | scheduler.h | 80 struct shadow_indirect_ctx indirect_ctx; member
|
/kernel/linux/linux-5.10/drivers/gpu/drm/i915/gvt/ |
H A D | scheduler.c | 396 if (!wa_ctx->indirect_ctx.obj) in release_shadow_wa_ctx() 399 i915_gem_object_unpin_map(wa_ctx->indirect_ctx.obj); in release_shadow_wa_ctx() 400 i915_gem_object_put(wa_ctx->indirect_ctx.obj); in release_shadow_wa_ctx() 402 wa_ctx->indirect_ctx.obj = NULL; in release_shadow_wa_ctx() 403 wa_ctx->indirect_ctx.shadow_va = NULL; in release_shadow_wa_ctx() 486 workload->wa_ctx.indirect_ctx.size) { in intel_gvt_scan_and_shadow_workload() 572 (~INDIRECT_CTX_ADDR_MASK)) | wa_ctx->indirect_ctx.shadow_gma; in update_wa_ctx_2_shadow_ctx() 579 (unsigned char *)wa_ctx->indirect_ctx.shadow_va + in prepare_shadow_wa_ctx() 580 wa_ctx->indirect_ctx.size; in prepare_shadow_wa_ctx() 582 if (wa_ctx->indirect_ctx in prepare_shadow_wa_ctx() 1592 u32 head, tail, start, ctl, ctx_ctl, per_ctx, indirect_ctx; intel_vgpu_create_workload() local [all...] |
H A D | cmd_parser.c | 2847 if (WARN_ON(!IS_ALIGNED(wa_ctx->indirect_ctx.guest_gma, in scan_wa_ctx() 2851 ring_tail = wa_ctx->indirect_ctx.size + 3 * sizeof(u32); in scan_wa_ctx() 2852 ring_size = round_up(wa_ctx->indirect_ctx.size + CACHELINE_BYTES, in scan_wa_ctx() 2854 gma_head = wa_ctx->indirect_ctx.guest_gma; in scan_wa_ctx() 2855 gma_tail = wa_ctx->indirect_ctx.guest_gma + ring_tail; in scan_wa_ctx() 2856 gma_bottom = wa_ctx->indirect_ctx.guest_gma + ring_size; in scan_wa_ctx() 2862 s.ring_start = wa_ctx->indirect_ctx.guest_gma; in scan_wa_ctx() 2866 s.rb_va = wa_ctx->indirect_ctx.shadow_va; in scan_wa_ctx() 2875 wa_ctx->indirect_ctx.guest_gma, ring_size); in scan_wa_ctx() 2960 int ctx_size = wa_ctx->indirect_ctx in shadow_indirect_ctx() [all...] |
H A D | scheduler.h | 75 struct shadow_indirect_ctx indirect_ctx; member
|
/kernel/linux/linux-6.6/drivers/mfd/ |
H A D | intel-m10-bmc-pmci.c | 41 struct indirect_ctx { struct 46 static int indirect_clear_cmd(struct indirect_ctx *ctx) in indirect_clear_cmd() 64 struct indirect_ctx *ctx = context; in indirect_reg_read() 96 struct indirect_ctx *ctx = context; in indirect_reg_write() 385 struct indirect_ctx *ctx; in m10bmc_pmci_probe()
|
/kernel/linux/linux-6.6/drivers/gpu/drm/i915/gt/ |
H A D | intel_lrc.c | 885 if (wa_ctx->indirect_ctx.size) { in init_wa_bb_regs() 888 wa_ctx->indirect_ctx.offset, in init_wa_bb_regs() 889 wa_ctx->indirect_ctx.size); in init_wa_bb_regs() 1506 GEM_BUG_ON(engine->wa_ctx.indirect_ctx.size); in lrc_update_regs() 1604 * Typically we only have one indirect_ctx and per_ctx batch buffer which are 1779 &wa_ctx->indirect_ctx, &wa_ctx->per_ctx in lrc_init_wa_ctx()
|
H A D | intel_engine_types.h | 96 } indirect_ctx, per_ctx; member
|
/kernel/linux/linux-5.10/drivers/gpu/drm/i915/gt/ |
H A D | intel_engine_types.h | 99 } indirect_ctx, per_ctx; member
|
H A D | intel_lrc.c | 3485 GEM_BUG_ON(engine->wa_ctx.indirect_ctx.size); in __execlists_update_reg_state() 3734 * Typically we only have one indirect_ctx and per_ctx batch buffer which are 3949 struct i915_wa_ctx_bb *wa_bb[2] = { &wa_ctx->indirect_ctx, in intel_init_workaround_bb() 5253 if (wa_ctx->indirect_ctx.size) { in init_wa_bb_reg_state() 5256 wa_ctx->indirect_ctx.offset, in init_wa_bb_reg_state() 5257 wa_ctx->indirect_ctx.size); in init_wa_bb_reg_state()
|
Completed in 25 milliseconds