/kernel/linux/linux-6.6/tools/testing/selftests/powerpc/signal/ |
H A D | sigreturn_kernel.c | 17 #define MSR_PR (1ul << 14) macro 112 sigreturn_msr_mask = ~MSR_PR; in test_sigreturn_kernel() 119 sigreturn_msr_mask = ~MSR_PR; in test_sigreturn_kernel()
|
/kernel/linux/linux-5.10/arch/powerpc/kvm/ |
H A D | book3s_hv_tm_builtin.c | 53 if ((msr & MSR_PR) && (vcpu->arch.vcore->pcr & PCR_ARCH_206)) in kvmhv_p9_tm_emulation_early() 57 ((msr & MSR_PR) && !(mfspr(SPRN_FSCR) & FSCR_EBB))) in kvmhv_p9_tm_emulation_early() 92 if ((msr & MSR_PR) && (vcpu->arch.vcore->pcr & PCR_ARCH_206)) in kvmhv_p9_tm_emulation_early()
|
H A D | book3s_emulate.c | 81 if ((kvmppc_get_msr(vcpu) & MSR_PR) && level > PRIV_PROBLEM) in spr_allowed() 149 if (kvmppc_get_msr(vcpu) & MSR_PR) in kvmppc_emulate_treclaim() 224 if (guest_msr & MSR_PR) in kvmppc_emulate_tabort() 365 if ((kvmppc_get_msr(vcpu) & MSR_PR) || in kvmppc_core_emulate_op_pr() 501 if (!(kvmppc_get_msr(vcpu) & MSR_PR)) { in kvmppc_core_emulate_op_pr() 543 WARN_ON(guest_msr & MSR_PR); in kvmppc_core_emulate_op_pr() 566 if (guest_msr & MSR_PR) { in kvmppc_core_emulate_op_pr() 600 if (guest_msr & MSR_PR) { in kvmppc_core_emulate_op_pr() 849 if (kvmppc_get_msr(vcpu) & MSR_PR) { in kvmppc_core_emulate_mtspr_pr() 854 if ((kvmppc_get_msr(vcpu) & MSR_PR) || spr in kvmppc_core_emulate_mtspr_pr() [all...] |
H A D | book3s_hv_tm.c | 25 if (msr & MSR_PR) { in emulate_tx_failure() 75 if ((msr & MSR_PR) && (vcpu->arch.vcore->pcr & PCR_ARCH_206)) { in kvmhv_p9_tm_emulation() 86 if ((msr & MSR_PR) && !(vcpu->arch.fscr & FSCR_EBB)) { in kvmhv_p9_tm_emulation() 124 if ((msr & MSR_PR) && (vcpu->arch.vcore->pcr & PCR_ARCH_206)) { in kvmhv_p9_tm_emulation()
|
H A D | book3s_64_mmu.c | 212 !(kvmppc_get_msr(vcpu) & MSR_PR)) { in kvmppc_mmu_book3s_64_xlate() 257 if ((kvmppc_get_msr(vcpu) & MSR_PR) && slbe->Kp) in kvmppc_mmu_book3s_64_xlate() 259 else if (!(kvmppc_get_msr(vcpu) & MSR_PR) && slbe->Ks) in kvmppc_mmu_book3s_64_xlate() 302 !(kvmppc_get_msr(vcpu) & MSR_PR)) in kvmppc_mmu_book3s_64_xlate() 568 return mp_ea && !(kvmppc_get_msr(vcpu) & MSR_PR) && in segment_contains_magic_page() 629 if (kvmppc_get_msr(vcpu) & MSR_PR) in kvmppc_mmu_book3s_64_esid_to_vsid() 639 !(kvmppc_get_msr(vcpu) & MSR_PR)) { in kvmppc_mmu_book3s_64_esid_to_vsid()
|
H A D | book3s_32_mmu.c | 141 if (kvmppc_get_msr(vcpu) & MSR_PR) { in kvmppc_mmu_book3s_32_xlate_bat() 224 if ((sr_kp(sre) && (kvmppc_get_msr(vcpu) & MSR_PR)) || in kvmppc_mmu_book3s_32_xlate_pte() 225 (sr_ks(sre) && !(kvmppc_get_msr(vcpu) & MSR_PR))) in kvmppc_mmu_book3s_32_xlate_pte() 303 !(kvmppc_get_msr(vcpu) & MSR_PR)) { in kvmppc_mmu_book3s_32_xlate() 385 if (msr & MSR_PR) in kvmppc_mmu_book3s_32_esid_to_vsid()
|
H A D | book3s_pr.c | 237 smsr |= MSR_ME | MSR_RI | MSR_IR | MSR_DR | MSR_PR | MSR_EE; in kvmppc_recalc_shadow_msr() 250 if (!(guest_msr & MSR_PR)) in kvmppc_recalc_shadow_msr() 319 if (unlikely((old_msr & MSR_PR) && in kvmppc_copy_from_svcpu() 504 if (!(msr & MSR_PR) && MSR_TM_TRANSACTIONAL(msr)) in kvmppc_set_msr_pr() 531 if ((kvmppc_get_msr(vcpu) & (MSR_PR|MSR_IR|MSR_DR)) != in kvmppc_set_msr_pr() 532 (old_msr & (MSR_PR|MSR_IR|MSR_DR))) { in kvmppc_set_msr_pr() 537 if (!(msr & MSR_PR) && vcpu->arch.magic_page_pa) { in kvmppc_set_msr_pr() 556 !(old_msr & MSR_PR) && !(old_msr & MSR_SF) && (msr & MSR_SF)) { in kvmppc_set_msr_pr() 994 if (!(kvmppc_get_msr(vcpu) & MSR_PR)) in kvmppc_emulate_fac() 1052 if ((fac == FSCR_TM_LG) && !(kvmppc_get_msr(vcpu) & MSR_PR)) in kvmppc_handle_fac() [all...] |
H A D | book3s_32_mmu_host.c | 84 if (kvmppc_get_msr(vcpu) & MSR_PR) in find_sid_vsid() 270 if (kvmppc_get_msr(vcpu) & MSR_PR) in create_sid_map()
|
H A D | book3s_64_mmu_host.c | 50 if (kvmppc_get_msr(vcpu) & MSR_PR) in find_sid_vsid() 231 if (kvmppc_get_msr(vcpu) & MSR_PR) in create_sid_map()
|
/kernel/linux/linux-6.6/arch/powerpc/kvm/ |
H A D | book3s_hv_tm_builtin.c | 53 if ((msr & MSR_PR) && (vcpu->arch.vcore->pcr & PCR_ARCH_206)) in kvmhv_p9_tm_emulation_early() 57 ((msr & MSR_PR) && !(mfspr(SPRN_FSCR) & FSCR_EBB))) in kvmhv_p9_tm_emulation_early() 92 if ((msr & MSR_PR) && (vcpu->arch.vcore->pcr & PCR_ARCH_206)) in kvmhv_p9_tm_emulation_early()
|
H A D | book3s_emulate.c | 77 if ((kvmppc_get_msr(vcpu) & MSR_PR) && level > PRIV_PROBLEM) in spr_allowed() 145 if (kvmppc_get_msr(vcpu) & MSR_PR) in kvmppc_emulate_treclaim() 220 if (guest_msr & MSR_PR) in kvmppc_emulate_tabort() 361 if ((kvmppc_get_msr(vcpu) & MSR_PR) || in kvmppc_core_emulate_op_pr() 497 if (!(kvmppc_get_msr(vcpu) & MSR_PR)) { in kvmppc_core_emulate_op_pr() 539 WARN_ON(guest_msr & MSR_PR); in kvmppc_core_emulate_op_pr() 562 if (guest_msr & MSR_PR) { in kvmppc_core_emulate_op_pr() 596 if (guest_msr & MSR_PR) { in kvmppc_core_emulate_op_pr() 848 if (kvmppc_get_msr(vcpu) & MSR_PR) { in kvmppc_core_emulate_mtspr_pr() 853 if ((kvmppc_get_msr(vcpu) & MSR_PR) || spr in kvmppc_core_emulate_mtspr_pr() [all...] |
H A D | book3s_64_mmu.c | 212 !(kvmppc_get_msr(vcpu) & MSR_PR)) { in kvmppc_mmu_book3s_64_xlate() 257 if ((kvmppc_get_msr(vcpu) & MSR_PR) && slbe->Kp) in kvmppc_mmu_book3s_64_xlate() 259 else if (!(kvmppc_get_msr(vcpu) & MSR_PR) && slbe->Ks) in kvmppc_mmu_book3s_64_xlate() 301 !(kvmppc_get_msr(vcpu) & MSR_PR)) in kvmppc_mmu_book3s_64_xlate() 567 return mp_ea && !(kvmppc_get_msr(vcpu) & MSR_PR) && in segment_contains_magic_page() 628 if (kvmppc_get_msr(vcpu) & MSR_PR) in kvmppc_mmu_book3s_64_esid_to_vsid() 638 !(kvmppc_get_msr(vcpu) & MSR_PR)) { in kvmppc_mmu_book3s_64_esid_to_vsid()
|
H A D | book3s_hv_tm.c | 25 if (msr & MSR_PR) { in emulate_tx_failure() 84 if ((msr & MSR_PR) && (vcpu->arch.vcore->pcr & PCR_ARCH_206)) { in kvmhv_p9_tm_emulation() 96 if ((msr & MSR_PR) && !(vcpu->arch.fscr & FSCR_EBB)) { in kvmhv_p9_tm_emulation() 135 if ((msr & MSR_PR) && (vcpu->arch.vcore->pcr & PCR_ARCH_206)) { in kvmhv_p9_tm_emulation()
|
H A D | book3s_32_mmu.c | 141 if (kvmppc_get_msr(vcpu) & MSR_PR) { in kvmppc_mmu_book3s_32_xlate_bat() 224 if ((sr_kp(sre) && (kvmppc_get_msr(vcpu) & MSR_PR)) || in kvmppc_mmu_book3s_32_xlate_pte() 225 (sr_ks(sre) && !(kvmppc_get_msr(vcpu) & MSR_PR))) in kvmppc_mmu_book3s_32_xlate_pte() 303 !(kvmppc_get_msr(vcpu) & MSR_PR)) { in kvmppc_mmu_book3s_32_xlate() 385 if (msr & MSR_PR) in kvmppc_mmu_book3s_32_esid_to_vsid()
|
H A D | book3s_pr.c | 244 smsr |= MSR_ME | MSR_RI | MSR_IR | MSR_DR | MSR_PR | MSR_EE; in kvmppc_recalc_shadow_msr() 257 if (!(guest_msr & MSR_PR)) in kvmppc_recalc_shadow_msr() 326 if (unlikely((old_msr & MSR_PR) && in kvmppc_copy_from_svcpu() 489 if (!(msr & MSR_PR) && MSR_TM_TRANSACTIONAL(msr)) in kvmppc_set_msr_pr() 515 if ((kvmppc_get_msr(vcpu) & (MSR_PR|MSR_IR|MSR_DR)) != in kvmppc_set_msr_pr() 516 (old_msr & (MSR_PR|MSR_IR|MSR_DR))) { in kvmppc_set_msr_pr() 521 if (!(msr & MSR_PR) && vcpu->arch.magic_page_pa) { in kvmppc_set_msr_pr() 540 !(old_msr & MSR_PR) && !(old_msr & MSR_SF) && (msr & MSR_SF)) { in kvmppc_set_msr_pr() 978 if (!(kvmppc_get_msr(vcpu) & MSR_PR)) in kvmppc_emulate_fac() 1036 if ((fac == FSCR_TM_LG) && !(kvmppc_get_msr(vcpu) & MSR_PR)) in kvmppc_handle_fac() [all...] |
/kernel/linux/linux-5.10/arch/powerpc/kernel/ |
H A D | syscall_64.c | 34 BUG_ON(!(regs->msr & MSR_PR)); in system_call_exception() 264 BUG_ON(!(regs->msr & MSR_PR)); in interrupt_exit_user_prepare() 348 BUG_ON(regs->msr & MSR_PR); in interrupt_exit_kernel_prepare()
|
H A D | head_32.h | 38 andi. r11, r11, MSR_PR 104 andi. r10, r9, MSR_PR 134 andi. r11, r9, MSR_PR 147 andi. r11, r9, MSR_PR
|
H A D | exceptions-64e.S | 74 andi. r3,r3,MSR_PR 140 andi. r3,r3,MSR_PR 282 andi. r10,r11,MSR_PR; /* save stack pointer */ \ 638 andi. r0,r12,MSR_PR; 657 andi. r0,r12,MSR_PR; 784 1: andi. r14,r11,MSR_PR; /* check for userspace again */ 856 1: andi. r14,r11,MSR_PR; /* check for userspace again */ 1063 andi. r3,r3,MSR_PR 1204 andi. r6,r10,MSR_PR
|
H A D | head_booke.h | 55 andi. r11, r11, MSR_PR; /* check whether user or kernel */\ 109 andi. r11, r9, MSR_PR 242 andi. r11,r11,MSR_PR; \
|
/kernel/linux/linux-6.6/arch/powerpc/kernel/ |
H A D | head_32.h | 42 andi. r11, r11, MSR_PR 135 andi. r12,r9,MSR_PR
|
H A D | syscall.c | 34 BUG_ON(!(regs->msr & MSR_PR)); in system_call_exception()
|
H A D | head_booke.h | 56 andi. r11, r11, MSR_PR; /* check whether user or kernel */\ 108 andi. r12,r9,MSR_PR 204 andi. r11,r11,MSR_PR; \
|
H A D | exceptions-64e.S | 76 andi. r3,r3,MSR_PR 139 andi. r3,r3,MSR_PR 261 andi. r10,r11,MSR_PR; /* save stack pointer */ \ 590 andi. r0,r12,MSR_PR; 607 andi. r0,r12,MSR_PR; 728 1: andi. r14,r11,MSR_PR; /* check for userspace again */ 799 1: andi. r14,r11,MSR_PR; /* check for userspace again */
|
/kernel/linux/linux-5.10/arch/powerpc/include/asm/ |
H A D | reg_booke.h | 43 #define MSR_USER32 (MSR_ | MSR_PR | MSR_EE) 47 #define MSR_USER (MSR_KERNEL|MSR_PR|MSR_EE) 50 #define MSR_USER (MSR_KERNEL|MSR_PR|MSR_EE)
|
/kernel/linux/linux-6.6/arch/powerpc/include/asm/ |
H A D | reg_booke.h | 43 #define MSR_USER32 (MSR_ | MSR_PR | MSR_EE) 47 #define MSR_USER (MSR_KERNEL|MSR_PR|MSR_EE) 50 #define MSR_USER (MSR_KERNEL|MSR_PR|MSR_EE)
|