/kernel/linux/linux-5.10/arch/powerpc/kernel/ |
H A D | signal_64.c | 150 msr &= ~MSR_VSX; in setup_sigcontext() 161 /* set MSR_VSX in the MSR value in the frame to in setup_sigcontext() 164 msr |= MSR_VSX; in setup_sigcontext() 223 msr |= tsk->thread.ckpt_regs.msr & (MSR_FP | MSR_VEC | MSR_VSX); in setup_tm_sigcontexts() 288 if (msr & MSR_VSX) in setup_tm_sigcontexts() 293 /* set MSR_VSX in the MSR value in the frame to in setup_tm_sigcontexts() 296 msr |= MSR_VSX; in setup_tm_sigcontexts() 369 regs->msr &= ~(MSR_FP | MSR_FE0 | MSR_FE1 | MSR_VEC | MSR_VSX); in restore_sigcontext() 402 if ((msr & MSR_VSX) != 0) { in restore_sigcontext() 487 regs->msr &= ~(MSR_FP | MSR_FE0 | MSR_FE1 | MSR_VEC | MSR_VSX); in restore_tm_sigcontexts() [all...] |
H A D | process.c | 128 newmsr |= MSR_VSX; in msr_check_and_set() 146 newmsr &= ~MSR_VSX; in __msr_check_and_clear() 162 msr &= ~MSR_VSX; in __giveup_fpu() 245 msr &= ~MSR_VSX; in __giveup_altivec() 308 * We should never be ssetting MSR_VSX without also setting in __giveup_vsx() 311 WARN_ON((msr & MSR_VSX) && !((msr & MSR_FP) && (msr & MSR_VEC))); in __giveup_vsx() 313 /* __giveup_fpu will clear MSR_VSX */ in __giveup_vsx() 324 msr_check_and_set(MSR_FP|MSR_VEC|MSR_VSX); in giveup_vsx() 326 msr_check_and_clear(MSR_FP|MSR_VEC|MSR_VSX); in giveup_vsx() 335 cpumsr = msr_check_and_set(MSR_FP|MSR_VEC|MSR_VSX); in enable_kernel_vsx() [all...] |
H A D | signal_32.c | 282 msr &= ~MSR_VSX; in save_user_regs() 286 * buffer, then write that to userspace. Also set MSR_VSX in in save_user_regs() 294 msr |= MSR_VSX; in save_user_regs() 424 * buffer, then write that to userspace. Also set MSR_VSX in in save_tm_user_regs() 431 if (msr & MSR_VSX) { in save_tm_user_regs() 440 msr |= MSR_VSX; in save_tm_user_regs() 539 regs->msr &= ~MSR_VSX; in restore_user_regs() 540 if (msr & MSR_VSX) { in restore_user_regs() 651 regs->msr &= ~MSR_VSX; in restore_tm_user_regs() 652 if (msr & MSR_VSX) { in restore_tm_user_regs() [all...] |
H A D | syscall_64.c | 220 mathflags |= MSR_VEC | MSR_VSX; in syscall_exit_prepare() 299 mathflags |= MSR_VEC | MSR_VSX; in interrupt_exit_user_prepare()
|
H A D | fpu.S | 99 oris r5,r5,MSR_VSX@h
|
H A D | tm.S | 142 oris r15,r15, MSR_VSX@h 396 oris r5,r5, MSR_VSX@h
|
H A D | vector.S | 136 oris r12,r12,MSR_VSX@h
|
/kernel/linux/linux-6.6/arch/powerpc/kernel/ |
H A D | signal_64.c | 163 msr &= ~MSR_VSX; in __unsafe_setup_sigcontext() 173 /* set MSR_VSX in the MSR value in the frame to in __unsafe_setup_sigcontext() 176 msr |= MSR_VSX; in __unsafe_setup_sigcontext() 237 msr |= tsk->thread.ckpt_regs.msr & (MSR_FP | MSR_VEC | MSR_VSX); in setup_tm_sigcontexts() 302 if (msr & MSR_VSX) in setup_tm_sigcontexts() 307 /* set MSR_VSX in the MSR value in the frame to in setup_tm_sigcontexts() 310 msr |= MSR_VSX; in setup_tm_sigcontexts() 387 regs_set_return_msr(regs, regs->msr & ~(MSR_FP | MSR_FE0 | MSR_FE1 | MSR_VEC | MSR_VSX)); in __unsafe_restore_sigcontext() 418 if ((msr & MSR_VSX) != 0) { in __unsafe_restore_sigcontext() 506 regs_set_return_msr(regs, regs->msr & ~(MSR_FP | MSR_FE0 | MSR_FE1 | MSR_VEC | MSR_VSX)); in restore_tm_sigcontexts() [all...] |
H A D | process.c | 127 newmsr |= MSR_VSX; in msr_check_and_set() 145 newmsr &= ~MSR_VSX; in __msr_check_and_clear() 161 msr &= ~MSR_VSX; in __giveup_fpu() 244 msr &= ~MSR_VSX; in __giveup_altivec() 307 * We should never be setting MSR_VSX without also setting in __giveup_vsx() 310 WARN_ON((msr & MSR_VSX) && !((msr & MSR_FP) && (msr & MSR_VEC))); in __giveup_vsx() 312 /* __giveup_fpu will clear MSR_VSX */ in __giveup_vsx() 323 msr_check_and_set(MSR_FP|MSR_VEC|MSR_VSX); in giveup_vsx() 325 msr_check_and_clear(MSR_FP|MSR_VEC|MSR_VSX); in giveup_vsx() 334 cpumsr = msr_check_and_set(MSR_FP|MSR_VEC|MSR_VSX); in enable_kernel_vsx() [all...] |
H A D | signal_32.c | 302 msr &= ~MSR_VSX; in __unsafe_save_user_regs() 306 * buffer, then write that to userspace. Also set MSR_VSX in in __unsafe_save_user_regs() 312 msr |= MSR_VSX; in __unsafe_save_user_regs() 425 * buffer, then write that to userspace. Also set MSR_VSX in in save_tm_user_regs_unsafe() 431 if (msr & MSR_VSX) in save_tm_user_regs_unsafe() 436 msr |= MSR_VSX; in save_tm_user_regs_unsafe() 520 regs_set_return_msr(regs, regs->msr & ~MSR_VSX); in restore_user_regs() 521 if (msr & MSR_VSX) { in restore_user_regs() 621 regs_set_return_msr(regs, regs->msr & ~MSR_VSX); in restore_tm_user_regs() 622 if (msr & MSR_VSX) { in restore_tm_user_regs() [all...] |
H A D | fpu.S | 104 oris r5,r5,MSR_VSX@h
|
H A D | tm.S | 142 oris r15,r15, MSR_VSX@h 393 oris r5,r5, MSR_VSX@h
|
H A D | interrupt.c | 211 mathflags |= MSR_VEC | MSR_VSX; in interrupt_exit_user_prepare_main()
|
H A D | vector.S | 147 oris r12,r12,MSR_VSX@h
|
/kernel/linux/linux-5.10/arch/powerpc/lib/ |
H A D | ldstfp.S | 162 oris r7,r6,MSR_VSX@h 189 oris r7,r6,MSR_VSX@h
|
H A D | test_emulate_step.c | 73 regs->msr |= MSR_VSX; in init_pt_regs()
|
/kernel/linux/linux-6.6/arch/powerpc/lib/ |
H A D | ldstfp.S | 162 oris r7,r6,MSR_VSX@h 189 oris r7,r6,MSR_VSX@h
|
/kernel/linux/linux-5.10/arch/powerpc/include/asm/ |
H A D | switch_to.h | 72 msr_check_and_clear(MSR_FP|MSR_VEC|MSR_VSX); in disable_kernel_vsx()
|
/kernel/linux/linux-6.6/arch/powerpc/include/asm/ |
H A D | switch_to.h | 81 msr_check_and_clear(MSR_FP|MSR_VEC|MSR_VSX); in disable_kernel_vsx()
|
/kernel/linux/linux-5.10/arch/powerpc/kvm/ |
H A D | tm.S | 45 oris r8, r8, (MSR_VEC | MSR_VSX)@h 239 oris r5, r5, (MSR_VEC | MSR_VSX)@h
|
H A D | emulate_loadstore.c | 42 if (!(kvmppc_get_msr(vcpu) & MSR_VSX)) { in kvmppc_check_vsx_disabled() 315 MSR_VSX); in kvmppc_emulate_loadstore()
|
H A D | book3s_pr.c | 172 kvmppc_giveup_ext(vcpu, MSR_FP | MSR_VEC | MSR_VSX); in kvmppc_core_vcpu_put_pr() 361 (MSR_FP | MSR_VEC | MSR_VSX); in kvmppc_handle_lost_math_exts() 384 kvmppc_giveup_ext(vcpu, MSR_VSX); in kvmppc_save_tm_pr() 828 if (msr & MSR_VSX) in kvmppc_giveup_ext() 858 vcpu->arch.guest_owned_ext &= ~(msr | MSR_VSX); in kvmppc_giveup_ext() 896 if (msr == MSR_VSX) { in kvmppc_handle_ext() 910 msr = MSR_FP | MSR_VEC | MSR_VSX; in kvmppc_handle_ext() 1390 ext_msr = MSR_VSX; in kvmppc_handle_exit_pr() 1868 kvmppc_giveup_ext(vcpu, MSR_FP | MSR_VEC | MSR_VSX); in kvmppc_vcpu_run_pr()
|
/kernel/linux/linux-6.6/arch/powerpc/kvm/ |
H A D | emulate_loadstore.c | 42 if (!(kvmppc_get_msr(vcpu) & MSR_VSX)) { in kvmppc_check_vsx_disabled() 316 MSR_VSX); in kvmppc_emulate_loadstore()
|
H A D | tm.S | 45 oris r8, r8, (MSR_VEC | MSR_VSX)@h 239 oris r5, r5, (MSR_VEC | MSR_VSX)@h
|
H A D | book3s_pr.c | 184 kvmppc_giveup_ext(vcpu, MSR_FP | MSR_VEC | MSR_VSX); in kvmppc_core_vcpu_put_pr() 368 (MSR_FP | MSR_VEC | MSR_VSX); in kvmppc_handle_lost_math_exts() 391 kvmppc_giveup_ext(vcpu, MSR_VSX); in kvmppc_save_tm_pr() 812 if (msr & MSR_VSX) in kvmppc_giveup_ext() 842 vcpu->arch.guest_owned_ext &= ~(msr | MSR_VSX); in kvmppc_giveup_ext() 880 if (msr == MSR_VSX) { in kvmppc_handle_ext() 894 msr = MSR_FP | MSR_VEC | MSR_VSX; in kvmppc_handle_ext() 1378 ext_msr = MSR_VSX; in kvmppc_handle_exit_pr() 1856 kvmppc_giveup_ext(vcpu, MSR_FP | MSR_VEC | MSR_VSX); in kvmppc_vcpu_run_pr()
|