Searched refs:hv_vcpu (Results 1 - 8 of 8) sorted by relevance
/kernel/linux/linux-6.6/arch/x86/kvm/ |
H A D | hyperv.h | 69 struct kvm_vcpu_hv *hv_vcpu = to_hv_vcpu(vcpu); in to_hv_synic() local 71 return &hv_vcpu->synic; in to_hv_synic() 76 struct kvm_vcpu_hv *hv_vcpu = container_of(synic, struct kvm_vcpu_hv, synic); in hv_synic_to_vcpu() local 78 return hv_vcpu->vcpu; in hv_synic_to_vcpu() 88 struct kvm_vcpu_hv *hv_vcpu = to_hv_vcpu(vcpu); in kvm_hv_get_vpindex() local 90 return hv_vcpu ? hv_vcpu->vp_index : vcpu->vcpu_idx; in kvm_hv_get_vpindex() 121 struct kvm_vcpu_hv *hv_vcpu; in hv_stimer_to_vcpu() local 123 hv_vcpu = container_of(stimer - stimer->index, struct kvm_vcpu_hv, in hv_stimer_to_vcpu() 125 return hv_vcpu in hv_stimer_to_vcpu() 130 struct kvm_vcpu_hv *hv_vcpu = to_hv_vcpu(vcpu); kvm_hv_has_stimer_pending() local 145 struct kvm_vcpu_hv *hv_vcpu = to_hv_vcpu(vcpu); kvm_hv_invtsc_suppressed() local 184 struct kvm_vcpu_hv *hv_vcpu = to_hv_vcpu(vcpu); kvm_hv_get_tlb_flush_fifo() local 205 struct kvm_vcpu_hv *hv_vcpu = to_hv_vcpu(vcpu); guest_hv_cpuid_has_l2_tlb_flush() local 213 struct kvm_vcpu_hv *hv_vcpu = to_hv_vcpu(vcpu); kvm_hv_is_tlb_flush_hcall() local [all...] |
H A D | hyperv.c | 223 struct kvm_vcpu_hv *hv_vcpu = to_hv_vcpu(vcpu); in kvm_hv_notify_acked_sint() local 230 for (idx = 0; idx < ARRAY_SIZE(hv_vcpu->stimer); idx++) { in kvm_hv_notify_acked_sint() 231 stimer = &hv_vcpu->stimer[idx]; in kvm_hv_notify_acked_sint() 248 struct kvm_vcpu_hv *hv_vcpu = to_hv_vcpu(vcpu); in synic_exit() local 250 hv_vcpu->exit.type = KVM_EXIT_HYPERV_SYNIC; in synic_exit() 251 hv_vcpu->exit.u.synic.msr = msr; in synic_exit() 252 hv_vcpu->exit.u.synic.control = synic->control; in synic_exit() 253 hv_vcpu->exit.u.synic.evt_page = synic->evt_page; in synic_exit() 254 hv_vcpu->exit.u.synic.msg_page = synic->msg_page; in synic_exit() 330 struct kvm_vcpu_hv *hv_vcpu in kvm_hv_is_syndbg_enabled() local 349 struct kvm_vcpu_hv *hv_vcpu = to_hv_vcpu(vcpu); syndbg_exit() local 690 struct kvm_vcpu_hv *hv_vcpu = to_hv_vcpu(vcpu); stimer_set_config() local 865 struct kvm_vcpu_hv *hv_vcpu = to_hv_vcpu(vcpu); kvm_hv_process_stimers() local 898 struct kvm_vcpu_hv *hv_vcpu = to_hv_vcpu(vcpu); kvm_hv_vcpu_uninit() local 913 struct kvm_vcpu_hv *hv_vcpu = to_hv_vcpu(vcpu); kvm_hv_assist_page_enabled() local 926 struct kvm_vcpu_hv *hv_vcpu = to_hv_vcpu(vcpu); kvm_hv_get_assist_page() local 962 struct kvm_vcpu_hv *hv_vcpu = to_hv_vcpu(vcpu); kvm_hv_vcpu_init() local 1249 hv_check_msr_access(struct kvm_vcpu_hv *hv_vcpu, u32 msr) hv_check_msr_access() argument 1470 struct kvm_vcpu_hv *hv_vcpu = to_hv_vcpu(vcpu); kvm_hv_set_msr() local 1640 struct kvm_vcpu_hv *hv_vcpu = to_hv_vcpu(vcpu); kvm_hv_get_msr() local 1885 struct kvm_vcpu_hv *hv_vcpu = to_hv_vcpu(vcpu); hv_tlb_flush_enqueue() local 1916 struct kvm_vcpu_hv *hv_vcpu = to_hv_vcpu(vcpu); kvm_hv_vcpu_flush_tlb() local 1953 struct kvm_vcpu_hv *hv_vcpu = to_hv_vcpu(vcpu); kvm_hv_flush_tlb() local 2169 struct kvm_vcpu_hv *hv_vcpu = to_hv_vcpu(vcpu); kvm_hv_send_ipi() local 2249 struct kvm_vcpu_hv *hv_vcpu = to_hv_vcpu(vcpu); kvm_hv_set_cpuid() local 2294 struct kvm_vcpu_hv *hv_vcpu; kvm_hv_set_enforce_cpuid() local 2419 hv_check_hypercall_access(struct kvm_vcpu_hv *hv_vcpu, u16 code) hv_check_hypercall_access() argument 2471 struct kvm_vcpu_hv *hv_vcpu = to_hv_vcpu(vcpu); kvm_hv_hypercall() local [all...] |
H A D | x86.c | 10638 struct kvm_vcpu_hv *hv_vcpu = to_hv_vcpu(vcpu); in vcpu_enter_guest() local 10641 vcpu->run->hyperv = hv_vcpu->exit; in vcpu_enter_guest()
|
/kernel/linux/linux-6.6/arch/x86/kvm/svm/ |
H A D | hyperv.h | 18 struct kvm_vcpu_hv *hv_vcpu = to_hv_vcpu(vcpu); in nested_svm_hv_update_vm_vp_ids() local 20 if (!hv_vcpu) in nested_svm_hv_update_vm_vp_ids() 23 hv_vcpu->nested.pa_page_gpa = hve->partition_assist_page; in nested_svm_hv_update_vm_vp_ids() 24 hv_vcpu->nested.vm_id = hve->hv_vm_id; in nested_svm_hv_update_vm_vp_ids() 25 hv_vcpu->nested.vp_id = hve->hv_vp_id; in nested_svm_hv_update_vm_vp_ids() 32 struct kvm_vcpu_hv *hv_vcpu = to_hv_vcpu(vcpu); in nested_svm_l2_tlb_flush_enabled() local 34 if (!hv_vcpu) in nested_svm_l2_tlb_flush_enabled() 40 return hv_vcpu->vp_assist_page.nested_control.features.directhypercall; in nested_svm_l2_tlb_flush_enabled()
|
/kernel/linux/linux-5.10/arch/x86/kvm/ |
H A D | hyperv.c | 168 struct kvm_vcpu_hv *hv_vcpu = vcpu_to_hv_vcpu(vcpu); in kvm_hv_notify_acked_sint() local 175 for (idx = 0; idx < ARRAY_SIZE(hv_vcpu->stimer); idx++) { in kvm_hv_notify_acked_sint() 176 stimer = &hv_vcpu->stimer[idx]; in kvm_hv_notify_acked_sint() 193 struct kvm_vcpu_hv *hv_vcpu = &vcpu->arch.hyperv; in synic_exit() local 195 hv_vcpu->exit.type = KVM_EXIT_HYPERV_SYNIC; in synic_exit() 196 hv_vcpu->exit.u.synic.msr = msr; in synic_exit() 197 hv_vcpu->exit.u.synic.control = synic->control; in synic_exit() 198 hv_vcpu->exit.u.synic.evt_page = synic->evt_page; in synic_exit() 199 hv_vcpu->exit.u.synic.msg_page = synic->msg_page; in synic_exit() 300 struct kvm_vcpu_hv *hv_vcpu in syndbg_exit() local 812 struct kvm_vcpu_hv *hv_vcpu = vcpu_to_hv_vcpu(vcpu); kvm_hv_process_stimers() local 842 struct kvm_vcpu_hv *hv_vcpu = vcpu_to_hv_vcpu(vcpu); kvm_hv_vcpu_uninit() local 893 struct kvm_vcpu_hv *hv_vcpu = vcpu_to_hv_vcpu(vcpu); kvm_hv_vcpu_init() local 905 struct kvm_vcpu_hv *hv_vcpu = vcpu_to_hv_vcpu(vcpu); kvm_hv_vcpu_postcreate() local 1227 struct kvm_vcpu_hv *hv_vcpu = &vcpu->arch.hyperv; kvm_hv_set_msr() local 1390 struct kvm_vcpu_hv *hv_vcpu = &vcpu->arch.hyperv; kvm_hv_get_msr() local 1505 struct kvm_vcpu_hv *hv_vcpu = ¤t_vcpu->arch.hyperv; kvm_hv_flush_tlb() local [all...] |
H A D | hyperv.h | 58 static inline struct kvm_vcpu *hv_vcpu_to_vcpu(struct kvm_vcpu_hv *hv_vcpu) in hv_vcpu_to_vcpu() argument 62 arch = container_of(hv_vcpu, struct kvm_vcpu_arch, hyperv); in hv_vcpu_to_vcpu() 108 struct kvm_vcpu_hv *hv_vcpu; in stimer_to_vcpu() local 110 hv_vcpu = container_of(stimer - stimer->index, struct kvm_vcpu_hv, in stimer_to_vcpu() 112 return hv_vcpu_to_vcpu(hv_vcpu); in stimer_to_vcpu()
|
/kernel/linux/linux-6.6/arch/x86/kvm/vmx/ |
H A D | hyperv.c | 431 struct kvm_vcpu_hv *hv_vcpu = to_hv_vcpu(vcpu); in nested_get_evmptr() local 436 if (unlikely(!hv_vcpu->vp_assist_page.enlighten_vmentry)) in nested_get_evmptr() 439 return hv_vcpu->vp_assist_page.current_nested_vmcs; in nested_get_evmptr() 509 struct kvm_vcpu_hv *hv_vcpu = to_hv_vcpu(vcpu); in evmcs_has_perf_global_ctrl() local 518 if (WARN_ON_ONCE(!hv_vcpu)) in evmcs_has_perf_global_ctrl() 521 return hv_vcpu->cpuid_cache.nested_ebx & HV_X64_NESTED_EVMCS1_PERF_GLOBAL_CTRL; in evmcs_has_perf_global_ctrl() 660 struct kvm_vcpu_hv *hv_vcpu = to_hv_vcpu(vcpu); in nested_evmcs_l2_tlb_flush_enabled() local 664 if (!hv_vcpu || !evmcs) in nested_evmcs_l2_tlb_flush_enabled() 670 return hv_vcpu->vp_assist_page.nested_control.features.directhypercall; in nested_evmcs_l2_tlb_flush_enabled()
|
H A D | nested.c | 229 struct kvm_vcpu_hv *hv_vcpu = to_hv_vcpu(vcpu); in nested_release_evmcs() local 239 if (hv_vcpu) { in nested_release_evmcs() 240 hv_vcpu->nested.pa_page_gpa = INVALID_GPA; in nested_release_evmcs() 241 hv_vcpu->nested.vm_id = 0; in nested_release_evmcs() 242 hv_vcpu->nested.vp_id = 0; in nested_release_evmcs() 1583 struct kvm_vcpu_hv *hv_vcpu = to_hv_vcpu(&vmx->vcpu); in copy_enlightened_to_vmcs12() local 1591 hv_vcpu->nested.pa_page_gpa = evmcs->partition_assist_page; in copy_enlightened_to_vmcs12() 1592 hv_vcpu->nested.vm_id = evmcs->hv_vm_id; in copy_enlightened_to_vmcs12() 1593 hv_vcpu->nested.vp_id = evmcs->hv_vp_id; in copy_enlightened_to_vmcs12()
|
Completed in 29 milliseconds