Home
last modified time | relevance | path

Searched refs:is_guest_mode (Results 1 - 25 of 33) sorted by relevance

12

/kernel/linux/linux-6.6/arch/x86/kvm/
H A Dhyperv.h182 bool is_guest_mode) in kvm_hv_get_tlb_flush_fifo()
185 int i = is_guest_mode ? HV_L2_TLB_FLUSH_FIFO : in kvm_hv_get_tlb_flush_fifo()
198 tlb_flush_fifo = kvm_hv_get_tlb_flush_fifo(vcpu, is_guest_mode(vcpu)); in kvm_hv_vcpu_purge_flush_tlb()
181 kvm_hv_get_tlb_flush_fifo(struct kvm_vcpu *vcpu, bool is_guest_mode) kvm_hv_get_tlb_flush_fifo() argument
H A Dirq.c87 if (!is_guest_mode(v) && kvm_vcpu_apicv_active(v)) in kvm_cpu_has_injectable_intr()
H A Dkvm_cache_regs.h226 static inline bool is_guest_mode(struct kvm_vcpu *vcpu) in is_guest_mode() function
H A Dhyperv.c1924 tlb_flush_fifo = kvm_hv_get_tlb_flush_fifo(vcpu, is_guest_mode(vcpu)); in kvm_hv_vcpu_flush_tlb()
1987 if (!hc->fast && is_guest_mode(vcpu)) { in kvm_hv_flush_tlb()
2009 is_guest_mode(vcpu)); in kvm_hv_flush_tlb()
2040 flush_ex.flags, is_guest_mode(vcpu)); in kvm_hv_flush_tlb()
2084 if (all_cpus && !is_guest_mode(vcpu)) { in kvm_hv_flush_tlb()
2092 } else if (!is_guest_mode(vcpu)) { in kvm_hv_flush_tlb()
2331 if (hv_result_success(result) && is_guest_mode(vcpu) && in kvm_hv_hypercall_complete()
H A Dx86.c662 if (!reinject && is_guest_mode(vcpu) && in kvm_multiple_exception()
698 if (!is_guest_mode(vcpu)) in kvm_multiple_exception()
787 if (is_guest_mode(vcpu) && fault->async_page_fault) in kvm_inject_page_fault()
2628 if (is_guest_mode(vcpu)) in kvm_vcpu_write_tsc_offset()
2644 if (is_guest_mode(vcpu)) in kvm_vcpu_write_tsc_multiplier()
8299 return is_guest_mode(emul_to_vcpu(ctxt)); in emulator_is_guest_mode()
8368 .is_guest_mode = emulator_is_guest_mode,
8563 if (!is_guest_mode(vcpu) && static_call(kvm_x86_get_cpl)(vcpu) == 0) { in handle_emulation_failure()
8580 if (WARN_ON_ONCE(is_guest_mode(vcpu)) || in reexecute_instruction()
8672 if (WARN_ON_ONCE(is_guest_mode(vcp in retry_instruction()
[all...]
/kernel/linux/linux-6.6/arch/x86/kvm/svm/
H A Davic.c131 if (is_guest_mode(&svm->vcpu) && in avic_deactivate_vmcb()
544 if (is_guest_mode(vcpu)) in avic_vcpu_get_apicv_inhibit_reasons()
941 pi.is_guest_mode = true; in avic_pi_update_irte()
952 if (!ret && pi.is_guest_mode) in avic_pi_update_irte()
964 pi.is_guest_mode = false; in avic_pi_update_irte()
H A Dsvm.h455 if (is_guest_mode(&svm->vcpu) && !nested_vgif_enabled(svm)) in get_vgif_vmcb()
516 if (is_guest_mode(&svm->vcpu)) in get_vnmi_vmcb_l1()
571 return is_guest_mode(vcpu) && (svm->nested.ctl.int_ctl & V_INTR_MASKING_MASK); in nested_svm_virtualize_tpr()
H A Dsvm.c827 msrpm = is_guest_mode(vcpu) ? to_svm(vcpu)->nested.msrpm: in msr_write_intercepted()
1021 if (is_guest_mode(vcpu)) in svm_enable_lbrv()
1039 if (is_guest_mode(vcpu)) in svm_disable_lbrv()
1059 (is_guest_mode(vcpu) && guest_can_use(vcpu, X86_FEATURE_LBRV) && in svm_update_lbrv()
1686 if (is_guest_mode(&svm->vcpu)) { in svm_clear_vintr()
2366 if (is_guest_mode(vcpu)) { in emulate_svm_instr()
2408 if (!is_guest_mode(vcpu)) in gp_interception()
2626 if (!is_guest_mode(vcpu) || in check_selective_cr0_intercepted()
3024 is_guest_mode(vcpu)) in svm_set_msr()
3034 if (is_guest_mode(vcp in svm_set_msr()
[all...]
H A Dnested.c130 if (!is_guest_mode(&svm->vcpu)) in recalc_intercepts()
1231 if (is_guest_mode(vcpu)) { in svm_leave_nested()
1600 if (is_guest_mode(vcpu)) { in svm_get_nested_state()
1615 if (!is_guest_mode(vcpu)) in svm_get_nested_state()
1737 if (is_guest_mode(vcpu)) in svm_set_nested_state()
1782 if (WARN_ON(!is_guest_mode(vcpu))) in svm_get_nested_state_pages()
/kernel/linux/linux-5.10/arch/x86/kvm/
H A Dirq.c79 if (!is_guest_mode(v) && kvm_vcpu_apicv_active(v)) in kvm_cpu_has_injectable_intr()
H A Dkvm_cache_regs.h172 static inline bool is_guest_mode(struct kvm_vcpu *vcpu) in is_guest_mode() function
/kernel/linux/linux-5.10/include/linux/
H A Damd-iommu.h22 bool is_guest_mode; member
/kernel/linux/linux-6.6/include/linux/
H A Damd-iommu.h24 bool is_guest_mode; member
/kernel/linux/linux-6.6/arch/x86/kvm/vmx/
H A Dvmx.c909 if (is_guest_mode(vcpu)) in vmx_update_exception_bitmap()
1759 if (!is_guest_mode(vcpu)) in vmx_update_emulated_instruction()
2214 if (is_guest_mode(vcpu)) in vmx_set_msr()
2219 if (is_guest_mode(vcpu)) { in vmx_set_msr()
2226 if (is_guest_mode(vcpu)) { in vmx_set_msr()
2245 if (is_guest_mode(vcpu) && get_vmcs12(vcpu)->vm_exit_controls & in vmx_set_msr()
2264 if (is_guest_mode(vcpu) && in vmx_set_msr()
2321 if (is_guest_mode(vcpu) && in vmx_set_msr()
3085 WARN_ON_ONCE(is_guest_mode(vcpu)); in enter_rmode()
3194 if (is_guest_mode(vcp in vmx_get_current_vpid()
[all...]
H A Dnested.c3299 if (is_guest_mode(vcpu) && !nested_get_vmcs12_pages(vcpu)) in vmx_get_nested_state_pages()
3311 if (WARN_ON_ONCE(!is_guest_mode(vcpu))) in nested_vmx_write_pml_buffer()
4721 * L2 was its real guest. Must only be called when in L2 (is_guest_mode())
5345 struct vmcs12 *vmcs12 = is_guest_mode(vcpu) ? get_shadow_vmcs12(vcpu) in handle_vmread()
5369 (is_guest_mode(vcpu) && in handle_vmread()
5377 if (!is_guest_mode(vcpu) && is_vmcs12_ext_field(field)) in handle_vmread()
5393 if (WARN_ON_ONCE(is_guest_mode(vcpu))) in handle_vmread()
5451 struct vmcs12 *vmcs12 = is_guest_mode(vcpu) ? get_shadow_vmcs12(vcpu) in handle_vmwrite()
5479 (is_guest_mode(vcpu) && in handle_vmwrite()
5513 if (!is_guest_mode(vcp in handle_vmwrite()
[all...]
H A Dsgx.c503 if (!vmcs12 && is_guest_mode(vcpu)) in vmx_write_encls_bitmap()
H A Dvmx.h725 return enable_unrestricted_guest && (!is_guest_mode(vcpu) || in is_unrestricted_guest()
/kernel/linux/linux-5.10/arch/x86/kvm/vmx/
H A Dvmx.c897 if (is_guest_mode(vcpu)) in update_exception_bitmap()
1698 if (!is_guest_mode(vcpu)) in vmx_update_emulated_instruction()
1840 if (is_guest_mode(vcpu) && in vmx_write_l1_tsc_offset()
2078 if (is_guest_mode(vcpu)) in vmx_set_msr()
2083 if (is_guest_mode(vcpu)) { in vmx_set_msr()
2090 if (is_guest_mode(vcpu)) { in vmx_set_msr()
2097 if (is_guest_mode(vcpu) && get_vmcs12(vcpu)->vm_exit_controls & in vmx_set_msr()
2190 if (is_guest_mode(vcpu) && in vmx_set_msr()
2999 if (is_guest_mode(vcpu)) in vmx_get_current_vpid()
3131 } else if (!is_guest_mode(vcp in vmx_set_cr0()
[all...]
H A Dnested.h62 * nested_vmx_run()/vmx_vcpu_run()). Check is_guest_mode() as we always in vmx_has_valid_vmcs12()
65 return is_guest_mode(vcpu) || vmx->nested.current_vmptr != -1ull || in vmx_has_valid_vmcs12()
H A Dnested.c433 WARN_ON(!is_guest_mode(vcpu)); in vmx_inject_page_fault_nested()
3255 if (is_guest_mode(vcpu) && !nested_get_vmcs12_pages(vcpu)) in vmx_get_nested_state_pages()
3267 if (WARN_ON_ONCE(!is_guest_mode(vcpu))) in nested_vmx_write_pml_buffer()
4482 * L2 was its real guest. Must only be called when in L2 (is_guest_mode())
5103 struct vmcs12 *vmcs12 = is_guest_mode(vcpu) ? get_shadow_vmcs12(vcpu) in handle_vmread()
5123 (is_guest_mode(vcpu) && in handle_vmread()
5134 if (!is_guest_mode(vcpu) && is_vmcs12_ext_field(field)) in handle_vmread()
5187 struct vmcs12 *vmcs12 = is_guest_mode(vcpu) ? get_shadow_vmcs12(vcpu) in handle_vmwrite()
5215 (is_guest_mode(vcpu) && in handle_vmwrite()
5249 if (!is_guest_mode(vcp in handle_vmwrite()
[all...]
H A Dvmx.h516 return enable_unrestricted_guest && (!is_guest_mode(vcpu) || in is_unrestricted_guest()
/kernel/linux/linux-5.10/arch/x86/kvm/svm/
H A Dsvm.c619 msrpm = is_guest_mode(vcpu) ? to_svm(vcpu)->nested.msrpm: in msr_write_intercepted()
1069 if (is_guest_mode(vcpu)) { in svm_write_l1_tsc_offset()
1490 if (is_guest_mode(&svm->vcpu)) { in svm_clear_vintr()
2327 if (!is_guest_mode(&svm->vcpu) || in check_selective_cr0_intercepted()
3092 if (is_guest_mode(vcpu)) { in handle_exit()
3212 if (is_guest_mode(vcpu) && nested_exit_on_nmi(svm)) in svm_nmi_blocked()
3228 if (for_injection && is_guest_mode(vcpu) && nested_exit_on_nmi(svm)) in svm_nmi_allowed()
3262 if (is_guest_mode(vcpu)) { in svm_interrupt_blocked()
3290 if (for_injection && is_guest_mode(vcpu) && nested_exit_on_intr(svm)) in svm_interrupt_allowed()
3650 if (is_guest_mode( in svm_vcpu_run()
[all...]
H A Dnested.c57 WARN_ON(!is_guest_mode(vcpu)); in svm_inject_page_fault_nested()
122 if (!is_guest_mode(&svm->vcpu)) in recalc_intercepts()
223 if (WARN_ON(!is_guest_mode(vcpu))) in svm_get_nested_state_pages()
790 if (is_guest_mode(&svm->vcpu)) { in svm_leave_nested()
1116 if (is_guest_mode(vcpu)) { in svm_get_nested_state()
1131 if (!is_guest_mode(vcpu)) in svm_get_nested_state()
1243 if (is_guest_mode(vcpu)) in svm_set_nested_state()
H A Dsvm.h219 if (is_guest_mode(&svm->vcpu)) in get_host_vmcb()
376 return is_guest_mode(vcpu) && (svm->nested.ctl.int_ctl & V_INTR_MASKING_MASK); in nested_svm_virtualize_tpr()
H A Davic.c851 pi.is_guest_mode = true; in svm_update_pi_irte()
862 if (!ret && pi.is_guest_mode) in svm_update_pi_irte()
874 pi.is_guest_mode = false; in svm_update_pi_irte()

Completed in 79 milliseconds

12