Lines Matching defs:msr

53 			     ulong msr);
68 ulong msr = kvmppc_get_msr(vcpu);
69 return (msr & (MSR_IR|MSR_DR)) == MSR_DR;
74 ulong msr = kvmppc_get_msr(vcpu);
78 if ((msr & (MSR_IR|MSR_DR)) != MSR_DR)
108 unsigned long msr, pc, new_msr, new_pc;
112 msr = kvmppc_get_msr(vcpu);
119 if (MSR_TM_TRANSACTIONAL(msr))
122 new_msr |= msr & MSR_TS_MASK;
126 kvmppc_set_srr1(vcpu, (msr & SRR1_MSR_BITS) | srr1_flags);
487 static void kvmppc_set_msr_pr(struct kvm_vcpu *vcpu, u64 msr)
493 msr = (msr & ~MSR_HV) | MSR_ME;
496 printk(KERN_INFO "KVM: Set MSR to 0x%llx\n", msr);
504 if (!(msr & MSR_PR) && MSR_TM_TRANSACTIONAL(msr))
510 msr &= to_book3s(vcpu)->msr_mask;
511 kvmppc_set_msr_fast(vcpu, msr);
514 if (msr & MSR_POW) {
521 msr &= ~MSR_POW;
522 kvmppc_set_msr_fast(vcpu, msr);
537 if (!(msr & MSR_PR) && vcpu->arch.magic_page_pa) {
540 if (msr & MSR_DR)
556 !(old_msr & MSR_PR) && !(old_msr & MSR_SF) && (msr & MSR_SF)) {
820 void kvmppc_giveup_ext(struct kvm_vcpu *vcpu, ulong msr)
828 if (msr & MSR_VSX)
829 msr |= MSR_FP | MSR_VEC;
831 msr &= vcpu->arch.guest_owned_ext;
832 if (!msr)
836 printk(KERN_INFO "Giving up ext 0x%lx\n", msr);
839 if (msr & MSR_FP) {
845 if (t->regs->msr & MSR_FP)
851 if (msr & MSR_VEC) {
852 if (current->thread.regs->msr & MSR_VEC)
858 vcpu->arch.guest_owned_ext &= ~(msr | MSR_VSX);
883 ulong msr)
891 if (!(kvmppc_get_msr(vcpu) & msr)) {
896 if (msr == MSR_VSX) {
910 msr = MSR_FP | MSR_VEC | MSR_VSX;
914 msr &= ~vcpu->arch.guest_owned_ext;
915 if (!msr)
919 printk(KERN_INFO "Loading up ext 0x%lx\n", msr);
922 if (msr & MSR_FP) {
931 if (msr & MSR_VEC) {
942 t->regs->msr |= msr;
943 vcpu->arch.guest_owned_ext |= msr;
957 lost_ext = vcpu->arch.guest_owned_ext & ~current->thread.regs->msr;
977 current->thread.regs->msr |= lost_ext;
1077 u64 msr = kvmppc_get_msr(vcpu);
1079 kvmppc_set_msr(vcpu, msr | MSR_SE);
1086 u64 msr = kvmppc_get_msr(vcpu);
1088 kvmppc_set_msr(vcpu, msr & ~MSR_SE);
1439 printk(KERN_EMERG "exit_nr=0x%x | pc=0x%lx | msr=0x%lx\n",