Lines Matching refs:vgpu

112 #define vgpu_cfg_space(vgpu) ((vgpu)->cfg_space.virtual_cfg_space)
126 #define vgpu_opregion(vgpu) (&(vgpu->opregion))
145 int (*init)(struct intel_vgpu *vgpu, intel_engine_mask_t engine_mask);
146 void (*clean)(struct intel_vgpu *vgpu, intel_engine_mask_t engine_mask);
147 void (*reset)(struct intel_vgpu *vgpu, intel_engine_mask_t engine_mask);
185 * scheduler structure. So below 2 vgpu data are protected
217 static inline void *intel_vgpu_vdev(struct intel_vgpu *vgpu)
219 return vgpu->vdev;
293 * not yet protected by special locks(vgpu and scheduler lock).
296 /* scheduler scope lock, protect gvt and vgpu schedule related data */
391 #define vgpu_aperture_offset(vgpu) ((vgpu)->gm.low_gm_node.start)
392 #define vgpu_hidden_offset(vgpu) ((vgpu)->gm.high_gm_node.start)
393 #define vgpu_aperture_sz(vgpu) ((vgpu)->gm.aperture_sz)
394 #define vgpu_hidden_sz(vgpu) ((vgpu)->gm.hidden_sz)
396 #define vgpu_aperture_pa_base(vgpu) \
397 (gvt_aperture_pa_base(vgpu->gvt) + vgpu_aperture_offset(vgpu))
399 #define vgpu_ggtt_gm_sz(vgpu) ((vgpu)->gm.aperture_sz + (vgpu)->gm.hidden_sz)
401 #define vgpu_aperture_pa_end(vgpu) \
402 (vgpu_aperture_pa_base(vgpu) + vgpu_aperture_sz(vgpu) - 1)
404 #define vgpu_aperture_gmadr_base(vgpu) (vgpu_aperture_offset(vgpu))
405 #define vgpu_aperture_gmadr_end(vgpu) \
406 (vgpu_aperture_gmadr_base(vgpu) + vgpu_aperture_sz(vgpu) - 1)
408 #define vgpu_hidden_gmadr_base(vgpu) (vgpu_hidden_offset(vgpu))
409 #define vgpu_hidden_gmadr_end(vgpu) \
410 (vgpu_hidden_gmadr_base(vgpu) + vgpu_hidden_sz(vgpu) - 1)
412 #define vgpu_fence_base(vgpu) (vgpu->fence.base)
413 #define vgpu_fence_sz(vgpu) (vgpu->fence.size)
427 int intel_vgpu_alloc_resource(struct intel_vgpu *vgpu,
429 void intel_vgpu_reset_resource(struct intel_vgpu *vgpu);
430 void intel_vgpu_free_resource(struct intel_vgpu *vgpu);
431 void intel_vgpu_write_fence(struct intel_vgpu *vgpu,
436 #define vgpu_vreg_t(vgpu, reg) \
437 (*(u32 *)(vgpu->mmio.vreg + i915_mmio_reg_offset(reg)))
438 #define vgpu_vreg(vgpu, offset) \
439 (*(u32 *)(vgpu->mmio.vreg + (offset)))
440 #define vgpu_vreg64_t(vgpu, reg) \
441 (*(u64 *)(vgpu->mmio.vreg + i915_mmio_reg_offset(reg)))
442 #define vgpu_vreg64(vgpu, offset) \
443 (*(u64 *)(vgpu->mmio.vreg + (offset)))
445 #define for_each_active_vgpu(gvt, vgpu, id) \
446 idr_for_each_entry((&(gvt)->vgpu_idr), (vgpu), (id)) \
447 for_each_if(vgpu->active)
449 static inline void intel_vgpu_write_pci_bar(struct intel_vgpu *vgpu,
456 pval = (u32 *)(vgpu_cfg_space(vgpu) + offset);
473 void intel_gvt_destroy_idle_vgpu(struct intel_vgpu *vgpu);
476 void intel_gvt_destroy_vgpu(struct intel_vgpu *vgpu);
477 void intel_gvt_release_vgpu(struct intel_vgpu *vgpu);
478 void intel_gvt_reset_vgpu_locked(struct intel_vgpu *vgpu, bool dmlr,
480 void intel_gvt_reset_vgpu(struct intel_vgpu *vgpu);
481 void intel_gvt_activate_vgpu(struct intel_vgpu *vgpu);
482 void intel_gvt_deactivate_vgpu(struct intel_vgpu *vgpu);
485 #define vgpu_gmadr_is_aperture(vgpu, gmadr) \
486 ((gmadr >= vgpu_aperture_gmadr_base(vgpu)) && \
487 (gmadr <= vgpu_aperture_gmadr_end(vgpu)))
489 #define vgpu_gmadr_is_hidden(vgpu, gmadr) \
490 ((gmadr >= vgpu_hidden_gmadr_base(vgpu)) && \
491 (gmadr <= vgpu_hidden_gmadr_end(vgpu)))
493 #define vgpu_gmadr_is_valid(vgpu, gmadr) \
494 ((vgpu_gmadr_is_aperture(vgpu, gmadr) || \
495 (vgpu_gmadr_is_hidden(vgpu, gmadr))))
509 bool intel_gvt_ggtt_validate_range(struct intel_vgpu *vgpu, u64 addr, u32 size);
510 int intel_gvt_ggtt_gmadr_g2h(struct intel_vgpu *vgpu, u64 g_addr, u64 *h_addr);
511 int intel_gvt_ggtt_gmadr_h2g(struct intel_vgpu *vgpu, u64 h_addr, u64 *g_addr);
512 int intel_gvt_ggtt_index_g2h(struct intel_vgpu *vgpu, unsigned long g_index,
514 int intel_gvt_ggtt_h2g_index(struct intel_vgpu *vgpu, unsigned long h_index,
517 void intel_vgpu_init_cfg_space(struct intel_vgpu *vgpu,
519 void intel_vgpu_reset_cfg_space(struct intel_vgpu *vgpu);
521 int intel_vgpu_emulate_cfg_read(struct intel_vgpu *vgpu, unsigned int offset,
524 int intel_vgpu_emulate_cfg_write(struct intel_vgpu *vgpu, unsigned int offset,
527 void intel_vgpu_emulate_hotplug(struct intel_vgpu *vgpu, bool connected);
529 static inline u64 intel_vgpu_get_bar_gpa(struct intel_vgpu *vgpu, int bar)
532 return (*(u64 *)(vgpu->cfg_space.virtual_cfg_space + bar)) &
536 void intel_vgpu_clean_opregion(struct intel_vgpu *vgpu);
537 int intel_vgpu_init_opregion(struct intel_vgpu *vgpu);
538 int intel_vgpu_opregion_base_write_handler(struct intel_vgpu *vgpu, u32 gpa);
540 int intel_vgpu_emulate_opregion_request(struct intel_vgpu *vgpu, u32 swsci);
541 void populate_pvinfo_page(struct intel_vgpu *vgpu);
544 void enter_failsafe_mode(struct intel_vgpu *vgpu, int reason);
557 void (*vgpu_destroy)(struct intel_vgpu *vgpu);
558 void (*vgpu_release)(struct intel_vgpu *vgpu);
565 int (*vgpu_query_plane)(struct intel_vgpu *vgpu, void *);
566 int (*vgpu_get_dmabuf)(struct intel_vgpu *vgpu, unsigned int);
569 void (*emulate_hotplug)(struct intel_vgpu *vgpu, bool connected);
685 void intel_gvt_debugfs_add_vgpu(struct intel_vgpu *vgpu);
686 void intel_gvt_debugfs_remove_vgpu(struct intel_vgpu *vgpu);