Lines Matching refs:arch
85 vcpu->arch.mmio_vsx_copy_nums = 0;
86 vcpu->arch.mmio_vsx_offset = 0;
87 vcpu->arch.mmio_copy_type = KVMPPC_VSX_COPY_NONE;
88 vcpu->arch.mmio_sp64_extend = 0;
89 vcpu->arch.mmio_sign_extend = 0;
90 vcpu->arch.mmio_vmx_copy_nums = 0;
91 vcpu->arch.mmio_vmx_offset = 0;
92 vcpu->arch.mmio_host_swabbed = 0;
95 vcpu->arch.regs.msr = vcpu->arch.shared->msr;
96 if (analyse_instr(&op, &vcpu->arch.regs, inst) == 0) {
124 vcpu->arch.mmio_sp64_extend = 1;
144 vcpu->arch.vaddr_accessed &= ~((unsigned long)size - 1);
145 vcpu->arch.paddr_accessed &= ~((unsigned long)size - 1);
148 vcpu->arch.mmio_copy_type =
151 vcpu->arch.mmio_copy_type =
154 vcpu->arch.mmio_copy_type =
157 vcpu->arch.mmio_copy_type =
162 vcpu->arch.mmio_vmx_offset =
163 (vcpu->arch.vaddr_accessed & 0xf)/size;
166 vcpu->arch.mmio_vmx_copy_nums = 2;
171 vcpu->arch.mmio_vmx_copy_nums = 1;
191 vcpu->arch.mmio_sp64_extend = 1;
195 vcpu->arch.mmio_copy_type =
198 vcpu->arch.mmio_copy_type =
202 vcpu->arch.mmio_copy_type =
205 vcpu->arch.mmio_copy_type =
212 vcpu->arch.mmio_vsx_copy_nums = 1;
215 vcpu->arch.mmio_vsx_copy_nums =
243 * from vcpu->arch.
245 if (vcpu->kvm->arch.kvm_ops->giveup_ext)
246 vcpu->kvm->arch.kvm_ops->giveup_ext(vcpu,
250 vcpu->arch.mmio_sp64_extend = 1;
266 vcpu->arch.vaddr_accessed &= ~((unsigned long)size - 1);
267 vcpu->arch.paddr_accessed &= ~((unsigned long)size - 1);
269 if (vcpu->kvm->arch.kvm_ops->giveup_ext)
270 vcpu->kvm->arch.kvm_ops->giveup_ext(vcpu,
273 vcpu->arch.mmio_copy_type =
276 vcpu->arch.mmio_copy_type =
279 vcpu->arch.mmio_copy_type =
282 vcpu->arch.mmio_copy_type =
287 vcpu->arch.mmio_vmx_offset =
288 (vcpu->arch.vaddr_accessed & 0xf)/size;
291 vcpu->arch.mmio_vmx_copy_nums = 2;
295 vcpu->arch.mmio_vmx_copy_nums = 1;
314 if (vcpu->kvm->arch.kvm_ops->giveup_ext)
315 vcpu->kvm->arch.kvm_ops->giveup_ext(vcpu,
319 vcpu->arch.mmio_sp64_extend = 1;
322 vcpu->arch.mmio_copy_type =
325 vcpu->arch.mmio_copy_type =
332 vcpu->arch.mmio_vsx_copy_nums = 1;
335 vcpu->arch.mmio_vsx_copy_nums =