Home
last modified time | relevance | path

Searched refs:arch (Results 1 - 25 of 1655) sorted by relevance

12345678910>>...67

/kernel/linux/linux-6.6/arch/powerpc/kvm/
H A Dbook3s_hv_p9_entry.c14 mtspr(SPRN_TAR, vcpu->arch.tar); in load_spr_state()
18 current->thread.vrsave != vcpu->arch.vrsave) in load_spr_state()
19 mtspr(SPRN_VRSAVE, vcpu->arch.vrsave); in load_spr_state()
22 if (vcpu->arch.hfscr & HFSCR_EBB) { in load_spr_state()
23 if (current->thread.ebbhr != vcpu->arch.ebbhr) in load_spr_state()
24 mtspr(SPRN_EBBHR, vcpu->arch.ebbhr); in load_spr_state()
25 if (current->thread.ebbrr != vcpu->arch.ebbrr) in load_spr_state()
26 mtspr(SPRN_EBBRR, vcpu->arch.ebbrr); in load_spr_state()
27 if (current->thread.bescr != vcpu->arch.bescr) in load_spr_state()
28 mtspr(SPRN_BESCR, vcpu->arch in load_spr_state()
[all...]
H A Dbooke_emulate.c26 vcpu->arch.regs.nip = vcpu->arch.shared->srr0; in kvmppc_emul_rfi()
27 kvmppc_set_msr(vcpu, vcpu->arch.shared->srr1); in kvmppc_emul_rfi()
32 vcpu->arch.regs.nip = vcpu->arch.dsrr0; in kvmppc_emul_rfdi()
33 kvmppc_set_msr(vcpu, vcpu->arch.dsrr1); in kvmppc_emul_rfdi()
38 vcpu->arch.regs.nip = vcpu->arch.csrr0; in kvmppc_emul_rfci()
39 kvmppc_set_msr(vcpu, vcpu->arch.csrr1); in kvmppc_emul_rfci()
80 kvmppc_set_gpr(vcpu, rt, vcpu->arch in kvmppc_booke_emulate_op()
[all...]
H A Dbook3s_hv_tm.c19 u64 msr = vcpu->arch.shregs.msr; in emulate_tx_failure()
21 tfiar = vcpu->arch.regs.nip & ~0x3ull; in emulate_tx_failure()
23 if (MSR_TM_SUSPENDED(vcpu->arch.shregs.msr)) in emulate_tx_failure()
29 vcpu->arch.tfiar = tfiar; in emulate_tx_failure()
31 vcpu->arch.texasr = (vcpu->arch.texasr & 0x3ffffff) | texasr; in emulate_tx_failure()
37 * instruction image is in vcpu->arch.emul_inst. If the guest was in
44 u32 instr = vcpu->arch.emul_inst; in kvmhv_p9_tm_emulation()
45 u64 msr = vcpu->arch.shregs.msr; in kvmhv_p9_tm_emulation()
56 vcpu->arch in kvmhv_p9_tm_emulation()
[all...]
H A Dbooke.c94 printk("pc: %08lx msr: %08llx\n", vcpu->arch.regs.nip, in kvmppc_dump_vcpu()
95 vcpu->arch.shared->msr); in kvmppc_dump_vcpu()
96 printk("lr: %08lx ctr: %08lx\n", vcpu->arch.regs.link, in kvmppc_dump_vcpu()
97 vcpu->arch.regs.ctr); in kvmppc_dump_vcpu()
98 printk("srr0: %08llx srr1: %08llx\n", vcpu->arch.shared->srr0, in kvmppc_dump_vcpu()
99 vcpu->arch.shared->srr1); in kvmppc_dump_vcpu()
101 printk("exceptions: %08lx\n", vcpu->arch.pending_exceptions); in kvmppc_dump_vcpu()
119 vcpu->arch.shadow_msr &= ~MSR_SPE; in kvmppc_vcpu_disable_spe()
129 vcpu->arch.shadow_msr |= MSR_SPE; in kvmppc_vcpu_enable_spe()
135 if (vcpu->arch in kvmppc_vcpu_sync_spe()
[all...]
H A Dbook3s_hv_p9_perf.c41 lp = vcpu->arch.vpa.pinned_addr; in switch_pmu_to_guest()
87 if (load_pmu || (vcpu->arch.hfscr & HFSCR_PM)) { in switch_pmu_to_guest()
88 mtspr(SPRN_PMC1, vcpu->arch.pmc[0]); in switch_pmu_to_guest()
89 mtspr(SPRN_PMC2, vcpu->arch.pmc[1]); in switch_pmu_to_guest()
90 mtspr(SPRN_PMC3, vcpu->arch.pmc[2]); in switch_pmu_to_guest()
91 mtspr(SPRN_PMC4, vcpu->arch.pmc[3]); in switch_pmu_to_guest()
92 mtspr(SPRN_PMC5, vcpu->arch.pmc[4]); in switch_pmu_to_guest()
93 mtspr(SPRN_PMC6, vcpu->arch.pmc[5]); in switch_pmu_to_guest()
94 mtspr(SPRN_MMCR1, vcpu->arch.mmcr[1]); in switch_pmu_to_guest()
95 mtspr(SPRN_MMCR2, vcpu->arch in switch_pmu_to_guest()
[all...]
H A Dtiming.c27 mutex_lock(&vcpu->arch.exit_timing_lock); in kvmppc_init_timing_stats()
29 vcpu->arch.last_exit_type = 0xDEAD; in kvmppc_init_timing_stats()
31 vcpu->arch.timing_count_type[i] = 0; in kvmppc_init_timing_stats()
32 vcpu->arch.timing_max_duration[i] = 0; in kvmppc_init_timing_stats()
33 vcpu->arch.timing_min_duration[i] = 0xFFFFFFFF; in kvmppc_init_timing_stats()
34 vcpu->arch.timing_sum_duration[i] = 0; in kvmppc_init_timing_stats()
35 vcpu->arch.timing_sum_quad_duration[i] = 0; in kvmppc_init_timing_stats()
37 vcpu->arch.timing_last_exit = 0; in kvmppc_init_timing_stats()
38 vcpu->arch.timing_exit.tv64 = 0; in kvmppc_init_timing_stats()
39 vcpu->arch in kvmppc_init_timing_stats()
[all...]
H A Demulate_loadstore.c85 vcpu->arch.mmio_vsx_copy_nums = 0; in kvmppc_emulate_loadstore()
86 vcpu->arch.mmio_vsx_offset = 0; in kvmppc_emulate_loadstore()
87 vcpu->arch.mmio_copy_type = KVMPPC_VSX_COPY_NONE; in kvmppc_emulate_loadstore()
88 vcpu->arch.mmio_sp64_extend = 0; in kvmppc_emulate_loadstore()
89 vcpu->arch.mmio_sign_extend = 0; in kvmppc_emulate_loadstore()
90 vcpu->arch.mmio_vmx_copy_nums = 0; in kvmppc_emulate_loadstore()
91 vcpu->arch.mmio_vmx_offset = 0; in kvmppc_emulate_loadstore()
92 vcpu->arch.mmio_host_swabbed = 0; in kvmppc_emulate_loadstore()
95 vcpu->arch.regs.msr = vcpu->arch in kvmppc_emulate_loadstore()
[all...]
H A Dbook3s_hv.c14 * This file is derived from arch/powerpc/kvm/book3s.c,
243 cpu = READ_ONCE(vcpu->arch.thread_cpu); in kvmppc_fast_vcpu_kick_hv()
280 * Updates to busy_stolen are protected by arch.tbacct_lock;
320 struct kvmppc_vcore *vc = vcpu->arch.vcore; in kvmppc_core_vcpu_load_hv()
325 if (vcpu->arch.busy_preempt != TB_NIL) { in kvmppc_core_vcpu_load_hv()
326 WARN_ON_ONCE(vcpu->arch.state != KVMPPC_VCPU_BUSY_IN_HOST); in kvmppc_core_vcpu_load_hv()
327 vc->stolen_tb += mftb() - vcpu->arch.busy_preempt; in kvmppc_core_vcpu_load_hv()
328 vcpu->arch.busy_preempt = TB_NIL; in kvmppc_core_vcpu_load_hv()
344 spin_lock_irqsave(&vcpu->arch.tbacct_lock, flags); in kvmppc_core_vcpu_load_hv()
345 if (vcpu->arch in kvmppc_core_vcpu_load_hv()
[all...]
H A Dbook3s_emulate.c73 if (vcpu->arch.papr_enabled && (level > PRIV_SUPER)) in spr_allowed()
86 memcpy(&vcpu->arch.gpr_tm[0], &vcpu->arch.regs.gpr[0], in kvmppc_copyto_vcpu_tm()
87 sizeof(vcpu->arch.gpr_tm)); in kvmppc_copyto_vcpu_tm()
88 memcpy(&vcpu->arch.fp_tm, &vcpu->arch.fp, in kvmppc_copyto_vcpu_tm()
90 memcpy(&vcpu->arch.vr_tm, &vcpu->arch.vr, in kvmppc_copyto_vcpu_tm()
92 vcpu->arch.ppr_tm = vcpu->arch in kvmppc_copyto_vcpu_tm()
[all...]
H A De500_emulate.c8 * This file is derived from arch/powerpc/kvm/44x_emulate.c,
53 ulong param = vcpu->arch.regs.gpr[rb]; in kvmppc_e500_emul_msgclr()
59 clear_bit(prio, &vcpu->arch.pending_exceptions); in kvmppc_e500_emul_msgclr()
65 ulong param = vcpu->arch.regs.gpr[rb]; in kvmppc_e500_emul_msgsnd()
75 int cpir = cvcpu->arch.shared->pir; in kvmppc_e500_emul_msgsnd()
77 set_bit(prio, &cvcpu->arch.pending_exceptions); in kvmppc_e500_emul_msgsnd()
94 vcpu->run->debug.arch.address = vcpu->arch.regs.nip; in kvmppc_e500_emul_ehpriv()
95 vcpu->run->debug.arch.status = 0; in kvmppc_e500_emul_ehpriv()
225 vcpu->arch in kvmppc_core_emulate_mtspr_e500()
[all...]
/kernel/linux/linux-6.6/arch/mips/kvm/
H A Demulate.c45 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_compute_return_epc() local
65 arch->gprs[insn.r_format.rd] = epc + 8; in kvm_compute_return_epc()
68 nextpc = arch->gprs[insn.r_format.rs]; in kvm_compute_return_epc()
84 if ((long)arch->gprs[insn.i_format.rs] < 0) in kvm_compute_return_epc()
93 if ((long)arch->gprs[insn.i_format.rs] >= 0) in kvm_compute_return_epc()
102 arch->gprs[31] = epc + 8; in kvm_compute_return_epc()
103 if ((long)arch->gprs[insn.i_format.rs] < 0) in kvm_compute_return_epc()
112 arch->gprs[31] = epc + 8; in kvm_compute_return_epc()
113 if ((long)arch in kvm_compute_return_epc()
[all...]
/kernel/linux/linux-5.10/arch/mips/kvm/
H A Demulate.c46 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_compute_return_epc() local
66 arch->gprs[insn.r_format.rd] = epc + 8; in kvm_compute_return_epc()
69 nextpc = arch->gprs[insn.r_format.rs]; in kvm_compute_return_epc()
85 if ((long)arch->gprs[insn.i_format.rs] < 0) in kvm_compute_return_epc()
94 if ((long)arch->gprs[insn.i_format.rs] >= 0) in kvm_compute_return_epc()
103 arch->gprs[31] = epc + 8; in kvm_compute_return_epc()
104 if ((long)arch->gprs[insn.i_format.rs] < 0) in kvm_compute_return_epc()
113 arch->gprs[31] = epc + 8; in kvm_compute_return_epc()
114 if ((long)arch in kvm_compute_return_epc()
2190 struct kvm_vcpu_arch *arch = &vcpu->arch; kvm_mips_emulate_cache() local
2390 struct kvm_vcpu_arch *arch = &vcpu->arch; kvm_mips_emulate_syscall() local
2424 struct kvm_vcpu_arch *arch = &vcpu->arch; kvm_mips_emulate_tlbmiss_ld() local
2467 struct kvm_vcpu_arch *arch = &vcpu->arch; kvm_mips_emulate_tlbinv_ld() local
2508 struct kvm_vcpu_arch *arch = &vcpu->arch; kvm_mips_emulate_tlbmiss_st() local
2549 struct kvm_vcpu_arch *arch = &vcpu->arch; kvm_mips_emulate_tlbinv_st() local
2591 struct kvm_vcpu_arch *arch = &vcpu->arch; kvm_mips_emulate_tlbmod() local
2628 struct kvm_vcpu_arch *arch = &vcpu->arch; kvm_mips_emulate_fpu_exc() local
2656 struct kvm_vcpu_arch *arch = &vcpu->arch; kvm_mips_emulate_ri_exc() local
2690 struct kvm_vcpu_arch *arch = &vcpu->arch; kvm_mips_emulate_bp_exc() local
2724 struct kvm_vcpu_arch *arch = &vcpu->arch; kvm_mips_emulate_trap_exc() local
2758 struct kvm_vcpu_arch *arch = &vcpu->arch; kvm_mips_emulate_msafpe_exc() local
2792 struct kvm_vcpu_arch *arch = &vcpu->arch; kvm_mips_emulate_fpe_exc() local
2826 struct kvm_vcpu_arch *arch = &vcpu->arch; kvm_mips_emulate_msadis_exc() local
2859 struct kvm_vcpu_arch *arch = &vcpu->arch; kvm_mips_handle_ri() local
3096 struct kvm_vcpu_arch *arch = &vcpu->arch; kvm_mips_emulate_exc() local
[all...]
/kernel/linux/linux-5.10/arch/powerpc/kvm/
H A Dbooke_emulate.c26 vcpu->arch.regs.nip = vcpu->arch.shared->srr0; in kvmppc_emul_rfi()
27 kvmppc_set_msr(vcpu, vcpu->arch.shared->srr1); in kvmppc_emul_rfi()
32 vcpu->arch.regs.nip = vcpu->arch.dsrr0; in kvmppc_emul_rfdi()
33 kvmppc_set_msr(vcpu, vcpu->arch.dsrr1); in kvmppc_emul_rfdi()
38 vcpu->arch.regs.nip = vcpu->arch.csrr0; in kvmppc_emul_rfci()
39 kvmppc_set_msr(vcpu, vcpu->arch.csrr1); in kvmppc_emul_rfci()
80 kvmppc_set_gpr(vcpu, rt, vcpu->arch in kvmppc_booke_emulate_op()
[all...]
H A Dbook3s_hv_tm.c19 u64 msr = vcpu->arch.shregs.msr; in emulate_tx_failure()
21 tfiar = vcpu->arch.regs.nip & ~0x3ull; in emulate_tx_failure()
23 if (MSR_TM_SUSPENDED(vcpu->arch.shregs.msr)) in emulate_tx_failure()
29 vcpu->arch.tfiar = tfiar; in emulate_tx_failure()
31 vcpu->arch.texasr = (vcpu->arch.texasr & 0x3ffffff) | texasr; in emulate_tx_failure()
37 * instruction image is in vcpu->arch.emul_inst. If the guest was in
44 u32 instr = vcpu->arch.emul_inst; in kvmhv_p9_tm_emulation()
45 u64 msr = vcpu->arch.shregs.msr; in kvmhv_p9_tm_emulation()
63 newmsr = vcpu->arch in kvmhv_p9_tm_emulation()
[all...]
H A Dbook3s_hv.c14 * This file is derived from arch/powerpc/kvm/book3s.c,
134 return kvm->arch.nested_enable && kvm_is_radix(kvm); in nesting_enabled()
241 cpu = READ_ONCE(vcpu->arch.thread_cpu); in kvmppc_fast_vcpu_kick_hv()
277 * Updates to busy_stolen are protected by arch.tbacct_lock;
307 struct kvmppc_vcore *vc = vcpu->arch.vcore; in kvmppc_core_vcpu_load_hv()
319 spin_lock_irqsave(&vcpu->arch.tbacct_lock, flags); in kvmppc_core_vcpu_load_hv()
320 if (vcpu->arch.state == KVMPPC_VCPU_BUSY_IN_HOST && in kvmppc_core_vcpu_load_hv()
321 vcpu->arch.busy_preempt != TB_NIL) { in kvmppc_core_vcpu_load_hv()
322 vcpu->arch.busy_stolen += mftb() - vcpu->arch in kvmppc_core_vcpu_load_hv()
[all...]
H A Dbooke.c68 printk("pc: %08lx msr: %08llx\n", vcpu->arch.regs.nip, in kvmppc_dump_vcpu()
69 vcpu->arch.shared->msr); in kvmppc_dump_vcpu()
70 printk("lr: %08lx ctr: %08lx\n", vcpu->arch.regs.link, in kvmppc_dump_vcpu()
71 vcpu->arch.regs.ctr); in kvmppc_dump_vcpu()
72 printk("srr0: %08llx srr1: %08llx\n", vcpu->arch.shared->srr0, in kvmppc_dump_vcpu()
73 vcpu->arch.shared->srr1); in kvmppc_dump_vcpu()
75 printk("exceptions: %08lx\n", vcpu->arch.pending_exceptions); in kvmppc_dump_vcpu()
93 vcpu->arch.shadow_msr &= ~MSR_SPE; in kvmppc_vcpu_disable_spe()
103 vcpu->arch.shadow_msr |= MSR_SPE; in kvmppc_vcpu_enable_spe()
109 if (vcpu->arch in kvmppc_vcpu_sync_spe()
[all...]
H A Dtiming.c27 mutex_lock(&vcpu->arch.exit_timing_lock); in kvmppc_init_timing_stats()
29 vcpu->arch.last_exit_type = 0xDEAD; in kvmppc_init_timing_stats()
31 vcpu->arch.timing_count_type[i] = 0; in kvmppc_init_timing_stats()
32 vcpu->arch.timing_max_duration[i] = 0; in kvmppc_init_timing_stats()
33 vcpu->arch.timing_min_duration[i] = 0xFFFFFFFF; in kvmppc_init_timing_stats()
34 vcpu->arch.timing_sum_duration[i] = 0; in kvmppc_init_timing_stats()
35 vcpu->arch.timing_sum_quad_duration[i] = 0; in kvmppc_init_timing_stats()
37 vcpu->arch.timing_last_exit = 0; in kvmppc_init_timing_stats()
38 vcpu->arch.timing_exit.tv64 = 0; in kvmppc_init_timing_stats()
39 vcpu->arch in kvmppc_init_timing_stats()
[all...]
H A Demulate_loadstore.c86 vcpu->arch.mmio_vsx_copy_nums = 0; in kvmppc_emulate_loadstore()
87 vcpu->arch.mmio_vsx_offset = 0; in kvmppc_emulate_loadstore()
88 vcpu->arch.mmio_copy_type = KVMPPC_VSX_COPY_NONE; in kvmppc_emulate_loadstore()
89 vcpu->arch.mmio_sp64_extend = 0; in kvmppc_emulate_loadstore()
90 vcpu->arch.mmio_sign_extend = 0; in kvmppc_emulate_loadstore()
91 vcpu->arch.mmio_vmx_copy_nums = 0; in kvmppc_emulate_loadstore()
92 vcpu->arch.mmio_vmx_offset = 0; in kvmppc_emulate_loadstore()
93 vcpu->arch.mmio_host_swabbed = 0; in kvmppc_emulate_loadstore()
96 vcpu->arch.regs.msr = vcpu->arch in kvmppc_emulate_loadstore()
[all...]
H A Dbook3s_emulate.c77 if (vcpu->arch.papr_enabled && (level > PRIV_SUPER)) in spr_allowed()
90 memcpy(&vcpu->arch.gpr_tm[0], &vcpu->arch.regs.gpr[0], in kvmppc_copyto_vcpu_tm()
91 sizeof(vcpu->arch.gpr_tm)); in kvmppc_copyto_vcpu_tm()
92 memcpy(&vcpu->arch.fp_tm, &vcpu->arch.fp, in kvmppc_copyto_vcpu_tm()
94 memcpy(&vcpu->arch.vr_tm, &vcpu->arch.vr, in kvmppc_copyto_vcpu_tm()
96 vcpu->arch.ppr_tm = vcpu->arch in kvmppc_copyto_vcpu_tm()
[all...]
H A De500_emulate.c8 * This file is derived from arch/powerpc/kvm/44x_emulate.c,
53 ulong param = vcpu->arch.regs.gpr[rb]; in kvmppc_e500_emul_msgclr()
59 clear_bit(prio, &vcpu->arch.pending_exceptions); in kvmppc_e500_emul_msgclr()
65 ulong param = vcpu->arch.regs.gpr[rb]; in kvmppc_e500_emul_msgsnd()
75 int cpir = cvcpu->arch.shared->pir; in kvmppc_e500_emul_msgsnd()
77 set_bit(prio, &cvcpu->arch.pending_exceptions); in kvmppc_e500_emul_msgsnd()
94 vcpu->run->debug.arch.address = vcpu->arch.regs.nip; in kvmppc_e500_emul_ehpriv()
95 vcpu->run->debug.arch.status = 0; in kvmppc_e500_emul_ehpriv()
225 vcpu->arch in kvmppc_core_emulate_mtspr_e500()
[all...]
/kernel/linux/linux-6.6/tools/perf/util/
H A Dperf_regs.c26 const char *perf_reg_name(int id, const char *arch) in perf_reg_name() argument
30 if (!strcmp(arch, "csky")) in perf_reg_name()
32 else if (!strcmp(arch, "loongarch")) in perf_reg_name()
34 else if (!strcmp(arch, "mips")) in perf_reg_name()
36 else if (!strcmp(arch, "powerpc")) in perf_reg_name()
38 else if (!strcmp(arch, "riscv")) in perf_reg_name()
40 else if (!strcmp(arch, "s390")) in perf_reg_name()
42 else if (!strcmp(arch, "x86")) in perf_reg_name()
44 else if (!strcmp(arch, "arm")) in perf_reg_name()
46 else if (!strcmp(arch, "arm6 in perf_reg_name()
79 perf_arch_reg_ip(const char *arch) perf_arch_reg_ip() argument
104 perf_arch_reg_sp(const char *arch) perf_arch_reg_sp() argument
[all...]
/kernel/linux/linux-5.10/arch/s390/kvm/
H A Dguestdbg.c62 u64 *cr9 = &vcpu->arch.sie_block->gcr[9]; in enable_all_hw_bp()
63 u64 *cr10 = &vcpu->arch.sie_block->gcr[10]; in enable_all_hw_bp()
64 u64 *cr11 = &vcpu->arch.sie_block->gcr[11]; in enable_all_hw_bp()
67 if (vcpu->arch.guestdbg.nr_hw_bp <= 0 || in enable_all_hw_bp()
68 vcpu->arch.guestdbg.hw_bp_info == NULL) in enable_all_hw_bp()
79 for (i = 0; i < vcpu->arch.guestdbg.nr_hw_bp; i++) { in enable_all_hw_bp()
80 start = vcpu->arch.guestdbg.hw_bp_info[i].addr; in enable_all_hw_bp()
81 len = vcpu->arch.guestdbg.hw_bp_info[i].len; in enable_all_hw_bp()
102 u64 *cr9 = &vcpu->arch.sie_block->gcr[9]; in enable_all_hw_wp()
103 u64 *cr10 = &vcpu->arch in enable_all_hw_wp()
[all...]
H A Dkvm-s390.c285 kvm_clock_sync_scb(vcpu->arch.sie_block, *delta); in kvm_clock_sync()
287 kvm->arch.epoch = vcpu->arch.sie_block->epoch; in kvm_clock_sync()
288 kvm->arch.epdx = vcpu->arch.sie_block->epdx; in kvm_clock_sync()
290 if (vcpu->arch.cputm_enabled) in kvm_clock_sync()
291 vcpu->arch.cputm_start += *delta; in kvm_clock_sync()
292 if (vcpu->arch.vsie_block) in kvm_clock_sync()
293 kvm_clock_sync_scb(vcpu->arch.vsie_block, in kvm_clock_sync()
597 struct gmap *gmap = kvm->arch in kvm_arch_sync_dirty_log()
3983 struct kvm_arch_async_pf arch; kvm_arch_setup_async_pf() local
[all...]
/kernel/linux/linux-6.6/arch/s390/kvm/
H A Dguestdbg.c62 u64 *cr9 = &vcpu->arch.sie_block->gcr[9]; in enable_all_hw_bp()
63 u64 *cr10 = &vcpu->arch.sie_block->gcr[10]; in enable_all_hw_bp()
64 u64 *cr11 = &vcpu->arch.sie_block->gcr[11]; in enable_all_hw_bp()
67 if (vcpu->arch.guestdbg.nr_hw_bp <= 0 || in enable_all_hw_bp()
68 vcpu->arch.guestdbg.hw_bp_info == NULL) in enable_all_hw_bp()
79 for (i = 0; i < vcpu->arch.guestdbg.nr_hw_bp; i++) { in enable_all_hw_bp()
80 start = vcpu->arch.guestdbg.hw_bp_info[i].addr; in enable_all_hw_bp()
81 len = vcpu->arch.guestdbg.hw_bp_info[i].len; in enable_all_hw_bp()
102 u64 *cr9 = &vcpu->arch.sie_block->gcr[9]; in enable_all_hw_wp()
103 u64 *cr10 = &vcpu->arch in enable_all_hw_wp()
[all...]
H A Dkvm-s390.c309 kvm_clock_sync_scb(vcpu->arch.sie_block, *delta); in kvm_clock_sync()
311 kvm->arch.epoch = vcpu->arch.sie_block->epoch; in kvm_clock_sync()
312 kvm->arch.epdx = vcpu->arch.sie_block->epdx; in kvm_clock_sync()
314 if (vcpu->arch.cputm_enabled) in kvm_clock_sync()
315 vcpu->arch.cputm_start += *delta; in kvm_clock_sync()
316 if (vcpu->arch.vsie_block) in kvm_clock_sync()
317 kvm_clock_sync_scb(vcpu->arch.vsie_block, in kvm_clock_sync()
675 struct gmap *gmap = kvm->arch in kvm_arch_sync_dirty_log()
4646 struct kvm_arch_async_pf arch; kvm_arch_setup_async_pf() local
[all...]

Completed in 21 milliseconds

12345678910>>...67