/kernel/linux/linux-6.6/arch/riscv/kernel/ |
H A D | asm-offsets.c | 125 OFFSET(KVM_ARCH_GUEST_ZERO, kvm_vcpu_arch, guest_context.zero); in asm_offsets() 126 OFFSET(KVM_ARCH_GUEST_RA, kvm_vcpu_arch, guest_context.ra); in asm_offsets() 127 OFFSET(KVM_ARCH_GUEST_SP, kvm_vcpu_arch, guest_context.sp); in asm_offsets() 128 OFFSET(KVM_ARCH_GUEST_GP, kvm_vcpu_arch, guest_context.gp); in asm_offsets() 129 OFFSET(KVM_ARCH_GUEST_TP, kvm_vcpu_arch, guest_context.tp); in asm_offsets() 130 OFFSET(KVM_ARCH_GUEST_T0, kvm_vcpu_arch, guest_context.t0); in asm_offsets() 131 OFFSET(KVM_ARCH_GUEST_T1, kvm_vcpu_arch, guest_context.t1); in asm_offsets() 132 OFFSET(KVM_ARCH_GUEST_T2, kvm_vcpu_arch, guest_context.t2); in asm_offsets() 133 OFFSET(KVM_ARCH_GUEST_S0, kvm_vcpu_arch, guest_context.s0); in asm_offsets() 134 OFFSET(KVM_ARCH_GUEST_S1, kvm_vcpu_arch, guest_contex in asm_offsets() [all...] |
/kernel/linux/linux-5.10/arch/mips/kernel/ |
H A D | asm-offsets.c | 352 OFFSET(VCPU_FPR0, kvm_vcpu_arch, fpu.fpr[0]); in output_kvm_defines() 353 OFFSET(VCPU_FPR1, kvm_vcpu_arch, fpu.fpr[1]); in output_kvm_defines() 354 OFFSET(VCPU_FPR2, kvm_vcpu_arch, fpu.fpr[2]); in output_kvm_defines() 355 OFFSET(VCPU_FPR3, kvm_vcpu_arch, fpu.fpr[3]); in output_kvm_defines() 356 OFFSET(VCPU_FPR4, kvm_vcpu_arch, fpu.fpr[4]); in output_kvm_defines() 357 OFFSET(VCPU_FPR5, kvm_vcpu_arch, fpu.fpr[5]); in output_kvm_defines() 358 OFFSET(VCPU_FPR6, kvm_vcpu_arch, fpu.fpr[6]); in output_kvm_defines() 359 OFFSET(VCPU_FPR7, kvm_vcpu_arch, fpu.fpr[7]); in output_kvm_defines() 360 OFFSET(VCPU_FPR8, kvm_vcpu_arch, fpu.fpr[8]); in output_kvm_defines() 361 OFFSET(VCPU_FPR9, kvm_vcpu_arch, fp in output_kvm_defines() [all...] |
/kernel/linux/linux-6.6/arch/mips/kernel/ |
H A D | asm-offsets.c | 361 OFFSET(VCPU_FPR0, kvm_vcpu_arch, fpu.fpr[0]); in output_kvm_defines() 362 OFFSET(VCPU_FPR1, kvm_vcpu_arch, fpu.fpr[1]); in output_kvm_defines() 363 OFFSET(VCPU_FPR2, kvm_vcpu_arch, fpu.fpr[2]); in output_kvm_defines() 364 OFFSET(VCPU_FPR3, kvm_vcpu_arch, fpu.fpr[3]); in output_kvm_defines() 365 OFFSET(VCPU_FPR4, kvm_vcpu_arch, fpu.fpr[4]); in output_kvm_defines() 366 OFFSET(VCPU_FPR5, kvm_vcpu_arch, fpu.fpr[5]); in output_kvm_defines() 367 OFFSET(VCPU_FPR6, kvm_vcpu_arch, fpu.fpr[6]); in output_kvm_defines() 368 OFFSET(VCPU_FPR7, kvm_vcpu_arch, fpu.fpr[7]); in output_kvm_defines() 369 OFFSET(VCPU_FPR8, kvm_vcpu_arch, fpu.fpr[8]); in output_kvm_defines() 370 OFFSET(VCPU_FPR9, kvm_vcpu_arch, fp in output_kvm_defines() [all...] |
/kernel/linux/linux-5.10/arch/mips/kvm/ |
H A D | entry.c | 249 UASM_i_SW(&p, SP, offsetof(struct kvm_vcpu_arch, host_stack), K1); in kvm_mips_build_vcpu_run() 252 UASM_i_SW(&p, GP, offsetof(struct kvm_vcpu_arch, host_gp), K1); in kvm_mips_build_vcpu_run() 263 UASM_i_LW(&p, K0, offsetof(struct kvm_vcpu_arch, guest_ebase), K1); in kvm_mips_build_vcpu_run() 305 UASM_i_LW(&p, T0, offsetof(struct kvm_vcpu_arch, pc), K1); in kvm_mips_build_enter_guest() 314 UASM_i_SW(&p, K0, offsetof(struct kvm_vcpu_arch, host_pgd), K1); in kvm_mips_build_enter_guest() 364 UASM_i_SW(&p, K0, offsetof(struct kvm_vcpu_arch, host_entryhi), in kvm_mips_build_enter_guest() 372 UASM_i_LW(&p, T0, offsetof(struct kvm_vcpu_arch, cop0), K1); in kvm_mips_build_enter_guest() 378 UASM_i_ADDIU(&p, T1, K1, offsetof(struct kvm_vcpu_arch, in kvm_mips_build_enter_guest() 381 UASM_i_ADDIU(&p, T1, K1, offsetof(struct kvm_vcpu_arch, in kvm_mips_build_enter_guest() 437 UASM_i_LW(&p, i, offsetof(struct kvm_vcpu_arch, gpr in kvm_mips_build_enter_guest() [all...] |
H A D | emulate.c | 46 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_compute_return_epc() 2190 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_cache() 2390 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_syscall() 2424 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_tlbmiss_ld() 2467 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_tlbinv_ld() 2508 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_tlbmiss_st() 2549 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_tlbinv_st() 2591 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_tlbmod() 2628 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_fpu_exc() 2656 struct kvm_vcpu_arch *arc in kvm_mips_emulate_ri_exc() [all...] |
H A D | interrupt.c | 84 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_irq_deliver_cb()
|
H A D | vz.c | 1083 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_vz_gpsi_cache() 1218 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_trap_vz_handle_gpsi() 1300 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_trap_vz_handle_gsfc()
|
/kernel/linux/linux-6.6/arch/mips/kvm/ |
H A D | entry.c | 243 UASM_i_SW(&p, SP, offsetof(struct kvm_vcpu_arch, host_stack), K1); in kvm_mips_build_vcpu_run() 246 UASM_i_SW(&p, GP, offsetof(struct kvm_vcpu_arch, host_gp), K1); in kvm_mips_build_vcpu_run() 257 UASM_i_LW(&p, K0, offsetof(struct kvm_vcpu_arch, guest_ebase), K1); in kvm_mips_build_vcpu_run() 299 UASM_i_LW(&p, T0, offsetof(struct kvm_vcpu_arch, pc), K1); in kvm_mips_build_enter_guest() 307 UASM_i_SW(&p, K0, offsetof(struct kvm_vcpu_arch, host_pgd), K1); in kvm_mips_build_enter_guest() 357 UASM_i_SW(&p, K0, offsetof(struct kvm_vcpu_arch, host_entryhi), in kvm_mips_build_enter_guest() 400 UASM_i_LW(&p, i, offsetof(struct kvm_vcpu_arch, gprs[i]), K1); in kvm_mips_build_enter_guest() 405 UASM_i_LW(&p, K0, offsetof(struct kvm_vcpu_arch, hi), K1); in kvm_mips_build_enter_guest() 408 UASM_i_LW(&p, K0, offsetof(struct kvm_vcpu_arch, lo), K1); in kvm_mips_build_enter_guest() 413 UASM_i_LW(&p, K0, offsetof(struct kvm_vcpu_arch, gpr in kvm_mips_build_enter_guest() [all...] |
H A D | vz.c | 1081 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_vz_gpsi_cache() 1216 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_trap_vz_handle_gpsi() 1298 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_trap_vz_handle_gsfc()
|
/kernel/linux/linux-5.10/arch/loongarch/kernel/ |
H A D | asm-offsets.c | 290 OFFSET(VCPU_FCSR0, kvm_vcpu_arch, fpu.fcsr); in output_kvm_defines() 291 OFFSET(VCPU_FCC, kvm_vcpu_arch, fpu.fcc); in output_kvm_defines() 299 OFFSET(KVM_ARCH_HSTACK, kvm_vcpu_arch, host_stack); in output_kvm_defines() 300 OFFSET(KVM_ARCH_HGP, kvm_vcpu_arch, host_gp); in output_kvm_defines() 301 OFFSET(KVM_ARCH_HANDLE_EXIT, kvm_vcpu_arch, handle_exit); in output_kvm_defines() 302 OFFSET(KVM_ARCH_HPGD, kvm_vcpu_arch, host_pgd); in output_kvm_defines() 303 OFFSET(KVM_ARCH_GEENTRY, kvm_vcpu_arch, guest_eentry); in output_kvm_defines() 304 OFFSET(KVM_ARCH_GPC, kvm_vcpu_arch, pc); in output_kvm_defines() 305 OFFSET(KVM_ARCH_GGPR, kvm_vcpu_arch, gprs); in output_kvm_defines() 306 OFFSET(KVM_ARCH_HESTAT, kvm_vcpu_arch, host_esta in output_kvm_defines() [all...] |
/kernel/linux/linux-6.6/arch/mips/include/asm/ |
H A D | kvm_host.h | 290 struct kvm_vcpu_arch { struct 692 static inline bool kvm_mips_guest_can_have_fpu(struct kvm_vcpu_arch *vcpu) in kvm_mips_guest_can_have_fpu() 698 static inline bool kvm_mips_guest_has_fpu(struct kvm_vcpu_arch *vcpu) in kvm_mips_guest_has_fpu() 704 static inline bool kvm_mips_guest_can_have_msa(struct kvm_vcpu_arch *vcpu) in kvm_mips_guest_can_have_msa() 710 static inline bool kvm_mips_guest_has_msa(struct kvm_vcpu_arch *vcpu) in kvm_mips_guest_has_msa() 776 void __kvm_save_fpu(struct kvm_vcpu_arch *vcpu); 777 void __kvm_restore_fpu(struct kvm_vcpu_arch *vcpu); 778 void __kvm_restore_fcsr(struct kvm_vcpu_arch *vcpu); 779 void __kvm_save_msa(struct kvm_vcpu_arch *vcpu); 780 void __kvm_restore_msa(struct kvm_vcpu_arch *vcp [all...] |
/kernel/linux/linux-5.10/arch/loongarch/include/asm/ |
H A D | kvm_host.h | 180 struct kvm_vcpu_arch { struct 283 static inline bool _kvm_guest_has_fpu(struct kvm_vcpu_arch *arch) in _kvm_guest_has_fpu() 289 static inline bool _kvm_guest_has_lsx(struct kvm_vcpu_arch *arch) in _kvm_guest_has_lsx() 323 static inline void update_pc(struct kvm_vcpu_arch *arch) in update_pc() 335 static inline bool kvm_is_ifetch_fault(struct kvm_vcpu_arch *arch) in kvm_is_ifetch_fault()
|
/kernel/linux/linux-5.10/arch/mips/include/asm/ |
H A D | kvm_host.h | 342 struct kvm_vcpu_arch { struct 774 static inline bool kvm_mips_guest_can_have_fpu(struct kvm_vcpu_arch *vcpu) in kvm_mips_guest_can_have_fpu() 780 static inline bool kvm_mips_guest_has_fpu(struct kvm_vcpu_arch *vcpu) in kvm_mips_guest_has_fpu() 786 static inline bool kvm_mips_guest_can_have_msa(struct kvm_vcpu_arch *vcpu) in kvm_mips_guest_can_have_msa() 792 static inline bool kvm_mips_guest_has_msa(struct kvm_vcpu_arch *vcpu) in kvm_mips_guest_has_msa() 865 void __kvm_save_fpu(struct kvm_vcpu_arch *vcpu); 866 void __kvm_restore_fpu(struct kvm_vcpu_arch *vcpu); 867 void __kvm_restore_fcsr(struct kvm_vcpu_arch *vcpu); 868 void __kvm_save_msa(struct kvm_vcpu_arch *vcpu); 869 void __kvm_restore_msa(struct kvm_vcpu_arch *vcp [all...] |
/kernel/linux/linux-5.10/arch/x86/kvm/ |
H A D | hyperv.h | 60 struct kvm_vcpu_arch *arch; in hv_vcpu_to_vcpu() 62 arch = container_of(hv_vcpu, struct kvm_vcpu_arch, hyperv); in hv_vcpu_to_vcpu()
|
/kernel/linux/linux-6.6/arch/riscv/include/asm/ |
H A D | kvm_host.h | 167 struct kvm_vcpu_arch { struct 338 void __kvm_riscv_switch_to(struct kvm_vcpu_arch *vcpu_arch);
|
/kernel/linux/linux-5.10/arch/arm64/include/asm/ |
H A D | kvm_host.h | 284 struct kvm_vcpu_arch { struct 561 static inline void kvm_arm_pvtime_vcpu_init(struct kvm_vcpu_arch *vcpu_arch) in kvm_arm_pvtime_vcpu_init() 566 static inline bool kvm_arm_is_pvtime_enabled(struct kvm_vcpu_arch *vcpu_arch) in kvm_arm_is_pvtime_enabled()
|
/kernel/linux/linux-6.6/arch/riscv/kvm/ |
H A D | tlb.c | 207 struct kvm_vcpu_arch *varch = &vcpu->arch; in vcpu_hfence_dequeue() 232 struct kvm_vcpu_arch *varch = &vcpu->arch; in vcpu_hfence_enqueue()
|
H A D | vcpu.c | 340 struct kvm_vcpu_arch *v = &vcpu->arch; in kvm_riscv_vcpu_sync_interrupts()
|
/kernel/linux/linux-6.6/arch/arm64/include/asm/ |
H A D | kvm_host.h | 467 struct kvm_vcpu_arch { struct 1031 static inline void kvm_arm_pvtime_vcpu_init(struct kvm_vcpu_arch *vcpu_arch) in kvm_arm_pvtime_vcpu_init() 1036 static inline bool kvm_arm_is_pvtime_enabled(struct kvm_vcpu_arch *vcpu_arch) in kvm_arm_is_pvtime_enabled()
|
/kernel/linux/linux-5.10/arch/arm64/kvm/ |
H A D | pmu-emul.c | 52 struct kvm_vcpu_arch *vcpu_arch; in kvm_pmc_to_vcpu() 56 vcpu_arch = container_of(pmu, struct kvm_vcpu_arch, pmu); in kvm_pmc_to_vcpu()
|
/kernel/linux/linux-5.10/arch/powerpc/include/asm/ |
H A D | kvm_host.h | 521 struct kvm_vcpu_arch { struct
|
/kernel/linux/linux-6.6/arch/powerpc/include/asm/ |
H A D | kvm_host.h | 512 struct kvm_vcpu_arch { struct
|
/kernel/linux/linux-5.10/arch/s390/include/asm/ |
H A D | kvm_host.h | 720 struct kvm_vcpu_arch { struct
|
/kernel/linux/linux-6.6/arch/s390/include/asm/ |
H A D | kvm_host.h | 740 struct kvm_vcpu_arch { struct
|
/kernel/linux/linux-5.10/arch/x86/include/asm/ |
H A D | kvm_host.h | 524 struct kvm_vcpu_arch { struct
|