Lines Matching refs:vgpu
111 #define vgpu_cfg_space(vgpu) ((vgpu)->cfg_space.virtual_cfg_space)
125 #define vgpu_opregion(vgpu) (&(vgpu->opregion))
145 int (*init)(struct intel_vgpu *vgpu, intel_engine_mask_t engine_mask);
146 void (*clean)(struct intel_vgpu *vgpu, intel_engine_mask_t engine_mask);
147 void (*reset)(struct intel_vgpu *vgpu, intel_engine_mask_t engine_mask);
193 * scheduler structure. So below 2 vgpu data are protected
325 * not yet protected by special locks(vgpu and scheduler lock).
328 /* scheduler scope lock, protect gvt and vgpu schedule related data */
429 #define vgpu_aperture_offset(vgpu) ((vgpu)->gm.low_gm_node.start)
430 #define vgpu_hidden_offset(vgpu) ((vgpu)->gm.high_gm_node.start)
431 #define vgpu_aperture_sz(vgpu) ((vgpu)->gm.aperture_sz)
432 #define vgpu_hidden_sz(vgpu) ((vgpu)->gm.hidden_sz)
434 #define vgpu_aperture_pa_base(vgpu) \
435 (gvt_aperture_pa_base(vgpu->gvt) + vgpu_aperture_offset(vgpu))
437 #define vgpu_ggtt_gm_sz(vgpu) ((vgpu)->gm.aperture_sz + (vgpu)->gm.hidden_sz)
439 #define vgpu_aperture_pa_end(vgpu) \
440 (vgpu_aperture_pa_base(vgpu) + vgpu_aperture_sz(vgpu) - 1)
442 #define vgpu_aperture_gmadr_base(vgpu) (vgpu_aperture_offset(vgpu))
443 #define vgpu_aperture_gmadr_end(vgpu) \
444 (vgpu_aperture_gmadr_base(vgpu) + vgpu_aperture_sz(vgpu) - 1)
446 #define vgpu_hidden_gmadr_base(vgpu) (vgpu_hidden_offset(vgpu))
447 #define vgpu_hidden_gmadr_end(vgpu) \
448 (vgpu_hidden_gmadr_base(vgpu) + vgpu_hidden_sz(vgpu) - 1)
450 #define vgpu_fence_base(vgpu) (vgpu->fence.base)
451 #define vgpu_fence_sz(vgpu) (vgpu->fence.size)
456 int intel_vgpu_alloc_resource(struct intel_vgpu *vgpu,
458 void intel_vgpu_reset_resource(struct intel_vgpu *vgpu);
459 void intel_vgpu_free_resource(struct intel_vgpu *vgpu);
460 void intel_vgpu_write_fence(struct intel_vgpu *vgpu,
465 #define vgpu_vreg_t(vgpu, reg) \
466 (*(u32 *)(vgpu->mmio.vreg + i915_mmio_reg_offset(reg)))
467 #define vgpu_vreg(vgpu, offset) \
468 (*(u32 *)(vgpu->mmio.vreg + (offset)))
469 #define vgpu_vreg64_t(vgpu, reg) \
470 (*(u64 *)(vgpu->mmio.vreg + i915_mmio_reg_offset(reg)))
471 #define vgpu_vreg64(vgpu, offset) \
472 (*(u64 *)(vgpu->mmio.vreg + (offset)))
474 #define for_each_active_vgpu(gvt, vgpu, id) \
475 idr_for_each_entry((&(gvt)->vgpu_idr), (vgpu), (id)) \
476 for_each_if(test_bit(INTEL_VGPU_STATUS_ACTIVE, vgpu->status))
478 static inline void intel_vgpu_write_pci_bar(struct intel_vgpu *vgpu,
485 pval = (u32 *)(vgpu_cfg_space(vgpu) + offset);
502 void intel_gvt_destroy_idle_vgpu(struct intel_vgpu *vgpu);
503 int intel_gvt_create_vgpu(struct intel_vgpu *vgpu,
505 void intel_gvt_destroy_vgpu(struct intel_vgpu *vgpu);
506 void intel_gvt_release_vgpu(struct intel_vgpu *vgpu);
507 void intel_gvt_reset_vgpu_locked(struct intel_vgpu *vgpu, bool dmlr,
509 void intel_gvt_reset_vgpu(struct intel_vgpu *vgpu);
510 void intel_gvt_activate_vgpu(struct intel_vgpu *vgpu);
511 void intel_gvt_deactivate_vgpu(struct intel_vgpu *vgpu);
513 int intel_gvt_set_opregion(struct intel_vgpu *vgpu);
514 int intel_gvt_set_edid(struct intel_vgpu *vgpu, int port_num);
517 #define vgpu_gmadr_is_aperture(vgpu, gmadr) \
518 ((gmadr >= vgpu_aperture_gmadr_base(vgpu)) && \
519 (gmadr <= vgpu_aperture_gmadr_end(vgpu)))
521 #define vgpu_gmadr_is_hidden(vgpu, gmadr) \
522 ((gmadr >= vgpu_hidden_gmadr_base(vgpu)) && \
523 (gmadr <= vgpu_hidden_gmadr_end(vgpu)))
525 #define vgpu_gmadr_is_valid(vgpu, gmadr) \
526 ((vgpu_gmadr_is_aperture(vgpu, gmadr) || \
527 (vgpu_gmadr_is_hidden(vgpu, gmadr))))
541 bool intel_gvt_ggtt_validate_range(struct intel_vgpu *vgpu, u64 addr, u32 size);
542 int intel_gvt_ggtt_gmadr_g2h(struct intel_vgpu *vgpu, u64 g_addr, u64 *h_addr);
543 int intel_gvt_ggtt_gmadr_h2g(struct intel_vgpu *vgpu, u64 h_addr, u64 *g_addr);
544 int intel_gvt_ggtt_index_g2h(struct intel_vgpu *vgpu, unsigned long g_index,
546 int intel_gvt_ggtt_h2g_index(struct intel_vgpu *vgpu, unsigned long h_index,
549 void intel_vgpu_init_cfg_space(struct intel_vgpu *vgpu,
551 void intel_vgpu_reset_cfg_space(struct intel_vgpu *vgpu);
553 int intel_vgpu_emulate_cfg_read(struct intel_vgpu *vgpu, unsigned int offset,
556 int intel_vgpu_emulate_cfg_write(struct intel_vgpu *vgpu, unsigned int offset,
559 void intel_vgpu_emulate_hotplug(struct intel_vgpu *vgpu, bool connected);
561 static inline u64 intel_vgpu_get_bar_gpa(struct intel_vgpu *vgpu, int bar)
564 return (*(u64 *)(vgpu->cfg_space.virtual_cfg_space + bar)) &
568 void intel_vgpu_clean_opregion(struct intel_vgpu *vgpu);
569 int intel_vgpu_init_opregion(struct intel_vgpu *vgpu);
570 int intel_vgpu_opregion_base_write_handler(struct intel_vgpu *vgpu, u32 gpa);
572 int intel_vgpu_emulate_opregion_request(struct intel_vgpu *vgpu, u32 swsci);
573 void populate_pvinfo_page(struct intel_vgpu *vgpu);
576 void enter_failsafe_mode(struct intel_vgpu *vgpu, int reason);
577 void intel_vgpu_detach_regions(struct intel_vgpu *vgpu);
691 void intel_gvt_debugfs_add_vgpu(struct intel_vgpu *vgpu);
723 * @vgpu: a vGPU
731 static inline int intel_gvt_read_gpa(struct intel_vgpu *vgpu, unsigned long gpa,
734 if (!test_bit(INTEL_VGPU_STATUS_ATTACHED, vgpu->status))
736 return vfio_dma_rw(&vgpu->vfio_device, gpa, buf, len, false);
741 * @vgpu: a vGPU
749 static inline int intel_gvt_write_gpa(struct intel_vgpu *vgpu,
752 if (!test_bit(INTEL_VGPU_STATUS_ATTACHED, vgpu->status))
754 return vfio_dma_rw(&vgpu->vfio_device, gpa, buf, len, true);
757 void intel_gvt_debugfs_remove_vgpu(struct intel_vgpu *vgpu);
763 int intel_gvt_dma_pin_guest_page(struct intel_vgpu *vgpu, dma_addr_t dma_addr);
764 int intel_gvt_dma_map_guest_page(struct intel_vgpu *vgpu, unsigned long gfn,
766 void intel_gvt_dma_unmap_guest_page(struct intel_vgpu *vgpu,