Lines Matching defs:msr
54 ulong msr);
69 ulong msr = kvmppc_get_msr(vcpu);
70 return (msr & (MSR_IR|MSR_DR)) == MSR_DR;
75 ulong msr = kvmppc_get_msr(vcpu);
79 if ((msr & (MSR_IR|MSR_DR)) != MSR_DR)
109 unsigned long msr, pc, new_msr, new_pc;
113 msr = kvmppc_get_msr(vcpu);
120 if (MSR_TM_TRANSACTIONAL(msr))
123 new_msr |= msr & MSR_TS_MASK;
127 kvmppc_set_srr1(vcpu, (msr & SRR1_MSR_BITS) | srr1_flags);
472 static void kvmppc_set_msr_pr(struct kvm_vcpu *vcpu, u64 msr)
478 msr = (msr & ~MSR_HV) | MSR_ME;
481 printk(KERN_INFO "KVM: Set MSR to 0x%llx\n", msr);
489 if (!(msr & MSR_PR) && MSR_TM_TRANSACTIONAL(msr))
495 msr &= to_book3s(vcpu)->msr_mask;
496 kvmppc_set_msr_fast(vcpu, msr);
499 if (msr & MSR_POW) {
505 msr &= ~MSR_POW;
506 kvmppc_set_msr_fast(vcpu, msr);
521 if (!(msr & MSR_PR) && vcpu->arch.magic_page_pa) {
524 if (msr & MSR_DR)
540 !(old_msr & MSR_PR) && !(old_msr & MSR_SF) && (msr & MSR_SF)) {
804 void kvmppc_giveup_ext(struct kvm_vcpu *vcpu, ulong msr)
812 if (msr & MSR_VSX)
813 msr |= MSR_FP | MSR_VEC;
815 msr &= vcpu->arch.guest_owned_ext;
816 if (!msr)
820 printk(KERN_INFO "Giving up ext 0x%lx\n", msr);
823 if (msr & MSR_FP) {
829 if (t->regs->msr & MSR_FP)
835 if (msr & MSR_VEC) {
836 if (current->thread.regs->msr & MSR_VEC)
842 vcpu->arch.guest_owned_ext &= ~(msr | MSR_VSX);
867 ulong msr)
875 if (!(kvmppc_get_msr(vcpu) & msr)) {
880 if (msr == MSR_VSX) {
894 msr = MSR_FP | MSR_VEC | MSR_VSX;
898 msr &= ~vcpu->arch.guest_owned_ext;
899 if (!msr)
903 printk(KERN_INFO "Loading up ext 0x%lx\n", msr);
906 if (msr & MSR_FP) {
915 if (msr & MSR_VEC) {
926 t->regs->msr |= msr;
927 vcpu->arch.guest_owned_ext |= msr;
941 lost_ext = vcpu->arch.guest_owned_ext & ~current->thread.regs->msr;
961 current->thread.regs->msr |= lost_ext;
1065 u64 msr = kvmppc_get_msr(vcpu);
1067 kvmppc_set_msr(vcpu, msr | MSR_SE);
1074 u64 msr = kvmppc_get_msr(vcpu);
1076 kvmppc_set_msr(vcpu, msr & ~MSR_SE);
1427 printk(KERN_EMERG "exit_nr=0x%x | pc=0x%lx | msr=0x%lx\n",