Lines Matching refs:kvm_ops

333 	if (vcpu->kvm->arch.kvm_ops && vcpu->kvm->arch.kvm_ops->store_to_eaddr)
334 r = vcpu->kvm->arch.kvm_ops->store_to_eaddr(vcpu, eaddr, ptr,
376 if (vcpu->kvm->arch.kvm_ops && vcpu->kvm->arch.kvm_ops->load_from_eaddr)
377 rc = vcpu->kvm->arch.kvm_ops->load_from_eaddr(vcpu, eaddr, ptr,
433 struct kvmppc_ops *kvm_ops = NULL;
439 kvm_ops = kvmppc_hv_ops;
441 kvm_ops = kvmppc_pr_ops;
442 if (!kvm_ops)
447 kvm_ops = kvmppc_hv_ops;
451 kvm_ops = kvmppc_pr_ops;
455 if (kvm_ops->owner && !try_module_get(kvm_ops->owner))
458 kvm->arch.kvm_ops = kvm_ops;
493 module_put(kvm->arch.kvm_ops->owner);
830 * kvm_ops are not defined for them.
845 if (kvm->arch.kvm_ops->irq_bypass_add_producer)
846 return kvm->arch.kvm_ops->irq_bypass_add_producer(cons, prod);
858 if (kvm->arch.kvm_ops->irq_bypass_del_producer)
859 kvm->arch.kvm_ops->irq_bypass_del_producer(cons, prod);
1164 if (vcpu->kvm->arch.kvm_ops->giveup_ext)
1165 vcpu->kvm->arch.kvm_ops->giveup_ext(vcpu, MSR_FP);
1180 if (vcpu->kvm->arch.kvm_ops->giveup_ext)
1181 vcpu->kvm->arch.kvm_ops->giveup_ext(vcpu, MSR_VSX);
1197 if (vcpu->kvm->arch.kvm_ops->giveup_ext)
1198 vcpu->kvm->arch.kvm_ops->giveup_ext(vcpu, MSR_VEC);
2170 if (kvm->arch.kvm_ops->set_smt_mode)
2171 r = kvm->arch.kvm_ops->set_smt_mode(kvm, mode, flags);
2178 !kvm->arch.kvm_ops->enable_nested)
2180 r = kvm->arch.kvm_ops->enable_nested(kvm);
2186 if (!is_kvmppc_hv_enabled(kvm) || !kvm->arch.kvm_ops->enable_svm)
2188 r = kvm->arch.kvm_ops->enable_svm(kvm);
2384 r = kvm->arch.kvm_ops->get_smmu_info(kvm, &info);
2400 if (!kvm->arch.kvm_ops->configure_mmu)
2405 r = kvm->arch.kvm_ops->configure_mmu(kvm, &cfg);
2413 if (!kvm->arch.kvm_ops->get_rmmu_info)
2415 r = kvm->arch.kvm_ops->get_rmmu_info(kvm, &info);
2432 if (!kvm->arch.kvm_ops->svm_off)
2435 r = kvm->arch.kvm_ops->svm_off(kvm);
2440 r = kvm->arch.kvm_ops->arch_vm_ioctl(filp, ioctl, arg);