Lines Matching refs:vcpu_svm
207 struct vcpu_svm {
312 void recalc_intercepts(struct vcpu_svm *svm);
362 static __always_inline struct vcpu_svm *to_svm(struct kvm_vcpu *vcpu)
364 return container_of(vcpu, struct vcpu_svm, vcpu);
401 static inline void set_exception_intercept(struct vcpu_svm *svm, u32 bit)
411 static inline void clr_exception_intercept(struct vcpu_svm *svm, u32 bit)
421 static inline void svm_set_intercept(struct vcpu_svm *svm, int bit)
430 static inline void svm_clr_intercept(struct vcpu_svm *svm, int bit)
439 static inline bool svm_is_intercept(struct vcpu_svm *svm, int bit)
444 static inline bool nested_vgif_enabled(struct vcpu_svm *svm)
450 static inline struct vmcb *get_vgif_vmcb(struct vcpu_svm *svm)
461 static inline void enable_gif(struct vcpu_svm *svm)
471 static inline void disable_gif(struct vcpu_svm *svm)
481 static inline bool gif_set(struct vcpu_svm *svm)
491 static inline bool nested_npt_enabled(struct vcpu_svm *svm)
496 static inline bool nested_vnmi_enabled(struct vcpu_svm *svm)
511 static inline struct vmcb *get_vnmi_vmcb_l1(struct vcpu_svm *svm)
522 static inline bool is_vnmi_enabled(struct vcpu_svm *svm)
549 void disable_nmi_singlestep(struct vcpu_svm *svm);
553 void svm_set_gif(struct vcpu_svm *svm, bool value);
557 void svm_set_x2apic_msr_interception(struct vcpu_svm *svm, bool disable);
569 struct vcpu_svm *svm = to_svm(vcpu);
574 static inline bool nested_exit_on_smi(struct vcpu_svm *svm)
579 static inline bool nested_exit_on_intr(struct vcpu_svm *svm)
584 static inline bool nested_exit_on_nmi(struct vcpu_svm *svm)
592 void svm_free_nested(struct vcpu_svm *svm);
593 int svm_allocate_nested(struct vcpu_svm *svm);
598 int nested_svm_vmexit(struct vcpu_svm *svm);
600 static inline int nested_svm_simple_vmexit(struct vcpu_svm *svm, u32 exit_code)
608 int nested_svm_exit_handled(struct vcpu_svm *svm);
610 int nested_svm_check_exception(struct vcpu_svm *svm, unsigned nr,
612 int nested_svm_exit_special(struct vcpu_svm *svm);
615 void nested_copy_vmcb_control_to_cache(struct vcpu_svm *svm,
617 void nested_copy_vmcb_save_to_cache(struct vcpu_svm *svm,
619 void nested_sync_control_from_vmcb02(struct vcpu_svm *svm);
620 void nested_vmcb02_compute_g_pat(struct vcpu_svm *svm);
621 void svm_switch_vmcb(struct vcpu_svm *svm, struct kvm_vmcb_info *target_vmcb);
646 void avic_init_vmcb(struct vcpu_svm *svm, struct vmcb *vmcb);
649 int avic_init_vcpu(struct vcpu_svm *svm);
681 void pre_sev_run(struct vcpu_svm *svm, int cpu);
686 void sev_init_vmcb(struct vcpu_svm *svm);
687 void sev_vcpu_after_set_cpuid(struct vcpu_svm *svm);
690 int sev_es_string_io(struct vcpu_svm *svm, int size, unsigned int port, int in);
691 void sev_es_vcpu_reset(struct vcpu_svm *svm);
694 void sev_es_unmap_ghcb(struct vcpu_svm *svm);
698 void __svm_sev_es_vcpu_run(struct vcpu_svm *svm, bool spec_ctrl_intercepted);
699 void __svm_vcpu_run(struct vcpu_svm *svm, bool spec_ctrl_intercepted);
702 static __always_inline bool kvm_ghcb_##field##_is_valid(const struct vcpu_svm *svm) \
708 static __always_inline u64 kvm_ghcb_get_##field##_if_valid(struct vcpu_svm *svm, struct ghcb *ghcb) \