Home
last modified time | relevance | path

Searched refs:vmxon (Results 1 - 10 of 10) sorted by relevance

/kernel/linux/linux-6.6/tools/testing/selftests/kvm/lib/x86_64/
H A Dvmx.c80 /* Setup of a region of guest memory for the vmxon region. */ in vcpu_alloc_vmx()
81 vmx->vmxon = (void *)vm_vaddr_alloc_page(vm); in vcpu_alloc_vmx()
82 vmx->vmxon_hva = addr_gva2hva(vm, (uintptr_t)vmx->vmxon); in vcpu_alloc_vmx()
83 vmx->vmxon_gpa = addr_gva2gpa(vm, (uintptr_t)vmx->vmxon); in vcpu_alloc_vmx()
153 *(uint32_t *)(vmx->vmxon) = vmcs_revision(); in prepare_for_vmx_operation()
154 if (vmxon(vmx->vmxon_gpa)) in prepare_for_vmx_operation()
/kernel/linux/linux-5.10/tools/testing/selftests/kvm/lib/x86_64/
H A Dvmx.c83 /* Setup of a region of guest memory for the vmxon region. */ in vcpu_alloc_vmx()
84 vmx->vmxon = (void *)vm_vaddr_alloc(vm, getpagesize(), 0x10000, 0, 0); in vcpu_alloc_vmx()
85 vmx->vmxon_hva = addr_gva2hva(vm, (uintptr_t)vmx->vmxon); in vcpu_alloc_vmx()
86 vmx->vmxon_gpa = addr_gva2gpa(vm, (uintptr_t)vmx->vmxon); in vcpu_alloc_vmx()
170 *(uint32_t *)(vmx->vmxon) = vmcs_revision(); in prepare_for_vmx_operation()
171 if (vmxon(vmx->vmxon_gpa)) in prepare_for_vmx_operation()
/kernel/linux/linux-5.10/arch/x86/kvm/vmx/
H A Dvmx.h102 /* Has the level1 guest done vmxon? */
103 bool vmxon; member
194 bool vmxon; member
H A Dnested.c286 if (!vmx->nested.vmxon && !vmx->nested.smm.vmxon) in free_nested()
291 vmx->nested.vmxon = false; in free_nested()
292 vmx->nested.smm.vmxon = false; in free_nested()
1413 if (vmx->nested.vmxon) in vmx_set_vmx_msr()
3307 if (!to_vmx(vcpu)->nested.vmxon) { in nested_vmx_check_permission()
4881 vmx->nested.vmxon = true; in enter_vmx_operation()
4956 if (vmx->nested.vmxon) in handle_vmon()
6125 (vmx->nested.vmxon || vmx->nested.smm.vmxon)) { in vmx_get_nested_state()
[all...]
H A Dvmx.c2230 vmx->nested.vmxon) in vmx_set_msr()
2370 asm_volatile_goto("1: vmxon %[vmxon_pointer]\n\t" in kvm_cpu_vmxon()
3222 if (to_vmx(vcpu)->nested.vmxon && !nested_cr4_valid(vcpu, cr4)) in vmx_is_valid_cr4()
5108 if (to_vmx(vcpu)->nested.vmxon && in handle_set_cr0()
7728 vmx->nested.smm.vmxon = vmx->nested.vmxon; in vmx_pre_enter_smm()
7729 vmx->nested.vmxon = false; in vmx_pre_enter_smm()
7739 if (vmx->nested.smm.vmxon) { in vmx_pre_leave_smm()
7740 vmx->nested.vmxon = true; in vmx_pre_leave_smm()
7741 vmx->nested.smm.vmxon in vmx_pre_leave_smm()
[all...]
/kernel/linux/linux-5.10/tools/testing/selftests/kvm/include/x86_64/
H A Dvmx.h342 static inline int vmxon(uint64_t phys) in vmxon() function
346 __asm__ __volatile__ ("vmxon %[pa]; setna %[ret]" in vmxon()
543 void *vmxon; member
/kernel/linux/linux-6.6/arch/x86/kvm/vmx/
H A Dvmx.h117 /* Has the level1 guest done vmxon? */
118 bool vmxon; member
239 bool vmxon; member
H A Dnested.c301 if (!vmx->nested.vmxon && !vmx->nested.smm.vmxon) in free_nested()
306 vmx->nested.vmxon = false; in free_nested()
307 vmx->nested.smm.vmxon = false; in free_nested()
1376 if (vmx->nested.vmxon) in vmx_set_vmx_msr()
3351 if (!to_vmx(vcpu)->nested.vmxon) { in nested_vmx_check_permission()
5120 vmx->nested.vmxon = true; in enter_vmx_operation()
5188 if (vmx->nested.vmxon) in handle_vmxon()
6430 (vmx->nested.vmxon || vmx->nested.smm.vmxon)) { in vmx_get_nested_state()
[all...]
H A Dvmx.c2376 vmx->nested.vmxon) in vmx_set_msr()
2801 asm goto("1: vmxon %[vmxon_pointer]\n\t" in kvm_cpu_vmxon()
3274 if (to_vmx(vcpu)->nested.vmxon) in vmx_is_valid_cr0()
3430 if (to_vmx(vcpu)->nested.vmxon && !nested_cr4_valid(vcpu, cr4)) in vmx_is_valid_cr4()
8148 vmx->nested.smm.vmxon = vmx->nested.vmxon; in vmx_enter_smm()
8149 vmx->nested.vmxon = false; in vmx_enter_smm()
8159 if (vmx->nested.smm.vmxon) { in vmx_leave_smm()
8160 vmx->nested.vmxon = true; in vmx_leave_smm()
8161 vmx->nested.smm.vmxon in vmx_leave_smm()
[all...]
/kernel/linux/linux-6.6/tools/testing/selftests/kvm/include/x86_64/
H A Dvmx.h297 static inline int vmxon(uint64_t phys) in vmxon() function
301 __asm__ __volatile__ ("vmxon %[pa]; setna %[ret]" in vmxon()
503 void *vmxon; member

Completed in 38 milliseconds