Lines Matching refs:arch
298 u64 old_efer = vcpu->arch.efer;
299 vcpu->arch.efer = efer;
329 vcpu->arch.efer = old_efer;
468 struct kvm_queued_exception *ex = &vcpu->arch.exception;
514 vcpu->arch.osvw.length = (osvw_len >= 3) ? (osvw_len) : 3;
515 vcpu->arch.osvw.status = osvw_status & ~(6ULL);
526 vcpu->arch.osvw.status |= 1;
1175 svm->vmcb01.ptr->control.tsc_offset = vcpu->arch.l1_tsc_offset;
1176 svm->vmcb->control.tsc_offset = vcpu->arch.tsc_offset;
1184 __svm_write_tsc_multiplier(vcpu->arch.tsc_scaling_ratio);
1344 save->g_pat = vcpu->arch.pat;
1401 vcpu->arch.microcode_version = 0x01000065;
1457 * Mark vcpu->arch.guest_fpu->fpstate as scratch so it won't
1460 fpstate_set_confidential(&vcpu->arch.guest_fpu);
1547 __svm_write_tsc_multiplier(vcpu->arch.tsc_scaling_ratio);
1876 if (vcpu->arch.efer & EFER_LME) {
1878 vcpu->arch.efer |= EFER_LMA;
1879 if (!vcpu->arch.guest_state_protected)
1884 vcpu->arch.efer &= ~EFER_LMA;
1885 if (!vcpu->arch.guest_state_protected)
1890 vcpu->arch.cr0 = cr0;
1934 unsigned long old_cr4 = vcpu->arch.cr4;
1939 vcpu->arch.cr4 = cr4;
2014 if (svm->vcpu.arch.guest_state_protected)
2030 get_debugreg(vcpu->arch.db[0], 0);
2031 get_debugreg(vcpu->arch.db[1], 1);
2032 get_debugreg(vcpu->arch.db[2], 2);
2033 get_debugreg(vcpu->arch.db[3], 3);
2038 vcpu->arch.dr6 = svm->vmcb->save.dr6;
2039 vcpu->arch.dr7 = svm->vmcb->save.dr7;
2040 vcpu->arch.switch_db_regs &= ~KVM_DEBUGREG_WONT_EXIT;
2048 if (vcpu->arch.guest_state_protected)
2104 kvm_run->debug.arch.dr6 = svm->vmcb->save.dr6;
2105 kvm_run->debug.arch.dr7 = svm->vmcb->save.dr7;
2106 kvm_run->debug.arch.pc =
2108 kvm_run->debug.arch.exception = DB_VECTOR;
2121 kvm_run->debug.arch.pc = svm->vmcb->save.cs.base + svm->vmcb->save.rip;
2122 kvm_run->debug.arch.exception = BP_VECTOR;
2332 struct x86_emulate_ctxt *ctxt = vcpu->arch.emulate_ctxt;
2439 if (svm->vcpu.arch.smi_pending ||
2440 svm->vcpu.arch.nmi_pending ||
2537 vcpu->arch.nmi_injected = false;
2623 unsigned long cr0 = vcpu->arch.cr0;
2695 val = vcpu->arch.cr2;
2773 vcpu->arch.switch_db_regs |= KVM_DEBUGREG_WONT_EXIT;
2985 if (svm_dis && (vcpu->arch.efer & EFER_SVME))
3223 * vcpu->arch.preempted_in_kernel can never be true. Just
3343 svm->current_vmcb->ptr, vcpu->arch.last_vmentry_cpu);
3467 vcpu->run->internal.data[1] = vcpu->arch.last_vmentry_cpu;
3517 vcpu->arch.cr0 = svm->vmcb->save.cr0;
3519 vcpu->arch.cr3 = svm->vmcb->save.cr3;
3540 kvm_run->fail_entry.cpu = vcpu->arch.last_vmentry_cpu;
3627 if (vcpu->arch.interrupt.soft) {
3636 trace_kvm_inj_virq(vcpu->arch.interrupt.nr,
3637 vcpu->arch.interrupt.soft, reinjected);
3640 svm->vmcb->control.event_inj = vcpu->arch.interrupt.nr |
3654 if (!READ_ONCE(vcpu->arch.apic->apicv_active)) {
3924 hpa_t root_tdp = vcpu->arch.mmu->root.hpa;
4042 vcpu->arch.nmi_injected = false;
4059 vcpu->arch.nmi_injected = true;
4136 svm->vmcb->save.rax = vcpu->arch.regs[VCPU_REGS_RAX];
4137 svm->vmcb->save.rsp = vcpu->arch.regs[VCPU_REGS_RSP];
4138 svm->vmcb->save.rip = vcpu->arch.regs[VCPU_REGS_RIP];
4164 svm->vmcb->save.cr2 = vcpu->arch.cr2;
4172 if (unlikely(vcpu->arch.switch_db_regs & KVM_DEBUGREG_WONT_EXIT))
4173 svm_set_dr6(svm, vcpu->arch.dr6);
4197 vcpu->arch.cr2 = svm->vmcb->save.cr2;
4198 vcpu->arch.regs[VCPU_REGS_RAX] = svm->vmcb->save.rax;
4199 vcpu->arch.regs[VCPU_REGS_RSP] = svm->vmcb->save.rsp;
4200 vcpu->arch.regs[VCPU_REGS_RIP] = svm->vmcb->save.rip;
4202 vcpu->arch.regs_dirty = 0;
4234 vcpu->arch.apf.host_apf_flags =
4237 vcpu->arch.regs_avail &= ~SVM_REGS_LAZY_LOAD_SET;
4269 cr3 = vcpu->arch.cr3;
4475 cr0 = vcpu->arch.cr0 & ~SVM_CR0_SELECTIVE_MASK;
4561 vcpu->arch.at_instruction_boundary = true;
4573 vcpu->arch.mcg_cap &= 0x1ff;
4624 svm->vmcb->save.rax = vcpu->arch.regs[VCPU_REGS_RAX];
4625 svm->vmcb->save.rsp = vcpu->arch.regs[VCPU_REGS_RSP];
4626 svm->vmcb->save.rip = vcpu->arch.regs[VCPU_REGS_RIP];
4900 kvm->arch.pause_in_guest = true;