/kernel/linux/linux-6.6/arch/powerpc/kernel/ |
H A D | process.c | 242 msr &= ~MSR_VEC; in __giveup_altivec() 252 msr_check_and_set(MSR_VEC); in giveup_altivec() 254 msr_check_and_clear(MSR_VEC); in giveup_altivec() 264 cpumsr = msr_check_and_set(MSR_VEC); in enable_kernel_altivec() 266 if (current->thread.regs && (current->thread.regs->msr & MSR_VEC)) { in enable_kernel_altivec() 291 if (tsk->thread.regs->msr & MSR_VEC) { in flush_altivec_to_thread() 308 * MSR_FP and MSR_VEC in __giveup_vsx() 310 WARN_ON((msr & MSR_VSX) && !((msr & MSR_FP) && (msr & MSR_VEC))); in __giveup_vsx() 315 if (msr & MSR_VEC) in __giveup_vsx() 323 msr_check_and_set(MSR_FP|MSR_VEC|MSR_VS in giveup_vsx() [all...] |
H A D | signal_64.c | 119 * process never used altivec yet (MSR_VEC is zero in pt_regs of in __unsafe_setup_sigcontext() 144 /* set MSR_VEC in the MSR value in the frame to indicate that sc->v_reg) in __unsafe_setup_sigcontext() 147 msr |= MSR_VEC; in __unsafe_setup_sigcontext() 213 * process never used altivec yet (MSR_VEC is zero in pt_regs of in setup_tm_sigcontexts() 237 msr |= tsk->thread.ckpt_regs.msr & (MSR_FP | MSR_VEC | MSR_VSX); in setup_tm_sigcontexts() 251 if (msr & MSR_VEC) in setup_tm_sigcontexts() 260 /* set MSR_VEC in the MSR value in the frame to indicate in setup_tm_sigcontexts() 263 msr |= MSR_VEC; in setup_tm_sigcontexts() 271 if (msr & MSR_VEC) in setup_tm_sigcontexts() 387 regs_set_return_msr(regs, regs->msr & ~(MSR_FP | MSR_FE0 | MSR_FE1 | MSR_VEC | MSR_VS in __unsafe_restore_sigcontext() [all...] |
H A D | signal_32.c | 281 /* set MSR_VEC in the saved MSR value to indicate that in __unsafe_save_user_regs() 283 msr |= MSR_VEC; in __unsafe_save_user_regs() 285 /* else assert((regs->msr & MSR_VEC) == 0) */ in __unsafe_save_user_regs() 388 if (msr & MSR_VEC) in save_tm_user_regs_unsafe() 397 /* set MSR_VEC in the saved MSR value to indicate that in save_tm_user_regs_unsafe() 400 msr |= MSR_VEC; in save_tm_user_regs_unsafe() 410 if (msr & MSR_VEC) in save_tm_user_regs_unsafe() 498 regs_set_return_msr(regs, regs->msr & ~MSR_VEC); in restore_user_regs() 499 if (msr & MSR_VEC) { in restore_user_regs() 598 regs_set_return_msr(regs, regs->msr & ~MSR_VEC); in restore_tm_user_regs() [all...] |
H A D | vector.S | 56 oris r5,r5,MSR_VEC@h 75 oris r9,r9,MSR_VEC@h 79 oris r12,r12,MSR_VEC@h 133 andis. r5,r12,MSR_VEC@h
|
H A D | interrupt.c | 211 mathflags |= MSR_VEC | MSR_VSX; in interrupt_exit_user_prepare_main() 213 mathflags |= MSR_VEC; in interrupt_exit_user_prepare_main()
|
H A D | tm.S | 139 oris r15, r15, MSR_VEC@h 389 oris r5, r5, MSR_VEC@h
|
/kernel/linux/linux-5.10/arch/powerpc/kernel/ |
H A D | process.c | 243 msr &= ~MSR_VEC; in __giveup_altivec() 253 msr_check_and_set(MSR_VEC); in giveup_altivec() 255 msr_check_and_clear(MSR_VEC); in giveup_altivec() 265 cpumsr = msr_check_and_set(MSR_VEC); in enable_kernel_altivec() 267 if (current->thread.regs && (current->thread.regs->msr & MSR_VEC)) { in enable_kernel_altivec() 292 if (tsk->thread.regs->msr & MSR_VEC) { in flush_altivec_to_thread() 309 * MSR_FP and MSR_VEC in __giveup_vsx() 311 WARN_ON((msr & MSR_VSX) && !((msr & MSR_FP) && (msr & MSR_VEC))); in __giveup_vsx() 316 if (msr & MSR_VEC) in __giveup_vsx() 324 msr_check_and_set(MSR_FP|MSR_VEC|MSR_VS in giveup_vsx() [all...] |
H A D | signal_64.c | 96 * process never used altivec yet (MSR_VEC is zero in pt_regs of in setup_sigcontext() 124 /* set MSR_VEC in the MSR value in the frame to indicate that sc->v_reg) in setup_sigcontext() 127 msr |= MSR_VEC; in setup_sigcontext() 199 * process never used altivec yet (MSR_VEC is zero in pt_regs of in setup_tm_sigcontexts() 223 msr |= tsk->thread.ckpt_regs.msr & (MSR_FP | MSR_VEC | MSR_VSX); in setup_tm_sigcontexts() 237 if (msr & MSR_VEC) in setup_tm_sigcontexts() 246 /* set MSR_VEC in the MSR value in the frame to indicate in setup_tm_sigcontexts() 249 msr |= MSR_VEC; in setup_tm_sigcontexts() 257 if (msr & MSR_VEC) in setup_tm_sigcontexts() 369 regs->msr &= ~(MSR_FP | MSR_FE0 | MSR_FE1 | MSR_VEC | MSR_VS in restore_sigcontext() [all...] |
H A D | syscall_64.c | 220 mathflags |= MSR_VEC | MSR_VSX; in syscall_exit_prepare() 222 mathflags |= MSR_VEC; in syscall_exit_prepare() 299 mathflags |= MSR_VEC | MSR_VSX; in interrupt_exit_user_prepare() 301 mathflags |= MSR_VEC; in interrupt_exit_user_prepare()
|
H A D | vector.S | 51 oris r5,r5,MSR_VEC@h 70 oris r9,r9,MSR_VEC@h 77 oris r12,r12,MSR_VEC@h 128 andis. r5,r12,MSR_VEC@h
|
H A D | signal_32.c | 258 /* set MSR_VEC in the saved MSR value to indicate that in save_user_regs() 260 msr |= MSR_VEC; in save_user_regs() 262 /* else assert((regs->msr & MSR_VEC) == 0) */ in save_user_regs() 372 if (msr & MSR_VEC) { in save_tm_user_regs() 384 /* set MSR_VEC in the saved MSR value to indicate that in save_tm_user_regs() 387 msr |= MSR_VEC; in save_tm_user_regs() 400 if (msr & MSR_VEC) { in save_tm_user_regs() 514 regs->msr &= ~MSR_VEC; in restore_user_regs() 515 if (msr & MSR_VEC) { in restore_user_regs() 617 regs->msr &= ~MSR_VEC; in restore_tm_user_regs() [all...] |
H A D | tm.S | 139 oris r15, r15, MSR_VEC@h 392 oris r5, r5, MSR_VEC@h
|
/kernel/linux/linux-5.10/arch/powerpc/include/asm/ |
H A D | switch_to.h | 60 msr_check_and_clear(MSR_VEC); in disable_kernel_altivec() 72 msr_check_and_clear(MSR_FP|MSR_VEC|MSR_VSX); in disable_kernel_vsx()
|
/kernel/linux/linux-6.6/arch/powerpc/include/asm/ |
H A D | switch_to.h | 60 msr_check_and_clear(MSR_VEC); in disable_kernel_altivec() 81 msr_check_and_clear(MSR_FP|MSR_VEC|MSR_VSX); in disable_kernel_vsx()
|
/kernel/linux/linux-5.10/arch/powerpc/lib/ |
H A D | ldstfp.S | 72 oris r7, r6, MSR_VEC@h 96 oris r7, r6, MSR_VEC@h
|
H A D | sstep.c | 626 if (regs->msr & MSR_VEC) in do_vec_load() 652 if (regs->msr & MSR_VEC) in do_vec_store() 892 if (regs->msr & MSR_VEC) in do_vsx_load() 923 if (regs->msr & MSR_VEC) in do_vsx_store() 3263 if (!(regs->msr & MSR_PR) && !(regs->msr & MSR_VEC)) in emulate_loadstore() 3273 * Some VSX instructions check the MSR_VEC bit rather than MSR_VSX in emulate_loadstore() 3277 msrbit = MSR_VEC; in emulate_loadstore() 3334 if (!(regs->msr & MSR_PR) && !(regs->msr & MSR_VEC)) in emulate_loadstore() 3344 * Some VSX instructions check the MSR_VEC bit rather than MSR_VSX in emulate_loadstore() 3348 msrbit = MSR_VEC; in emulate_loadstore() [all...] |
/kernel/linux/linux-6.6/arch/powerpc/lib/ |
H A D | ldstfp.S | 72 oris r7, r6, MSR_VEC@h 96 oris r7, r6, MSR_VEC@h
|
H A D | sstep.c | 700 if (regs->msr & MSR_VEC) in do_vec_load() 726 if (regs->msr & MSR_VEC) in do_vec_store() 992 if (regs->msr & MSR_VEC) { in do_vsx_load() 1038 if (regs->msr & MSR_VEC) { in do_vsx_store() 3447 if (!(regs->msr & MSR_PR) && !(regs->msr & MSR_VEC)) in emulate_loadstore() 3457 * Some VSX instructions check the MSR_VEC bit rather than MSR_VSX in emulate_loadstore() 3461 msrbit = MSR_VEC; in emulate_loadstore() 3518 if (!(regs->msr & MSR_PR) && !(regs->msr & MSR_VEC)) in emulate_loadstore() 3528 * Some VSX instructions check the MSR_VEC bit rather than MSR_VSX in emulate_loadstore() 3532 msrbit = MSR_VEC; in emulate_loadstore() [all...] |
/kernel/linux/linux-5.10/arch/powerpc/kvm/ |
H A D | book3s_pr.c | 172 kvmppc_giveup_ext(vcpu, MSR_FP | MSR_VEC | MSR_VSX); in kvmppc_core_vcpu_put_pr() 361 (MSR_FP | MSR_VEC | MSR_VSX); in kvmppc_handle_lost_math_exts() 368 else if (ext_diff == MSR_VEC) in kvmppc_handle_lost_math_exts() 829 msr |= MSR_FP | MSR_VEC; in kvmppc_giveup_ext() 851 if (msr & MSR_VEC) { in kvmppc_giveup_ext() 852 if (current->thread.regs->msr & MSR_VEC) in kvmppc_giveup_ext() 910 msr = MSR_FP | MSR_VEC | MSR_VSX; in kvmppc_handle_ext() 931 if (msr & MSR_VEC) { in kvmppc_handle_ext() 969 if (lost_ext & MSR_VEC) { in kvmppc_handle_lost_ext() 1386 ext_msr = MSR_VEC; in kvmppc_handle_exit_pr() [all...] |
H A D | tm.S | 45 oris r8, r8, (MSR_VEC | MSR_VSX)@h 239 oris r5, r5, (MSR_VEC | MSR_VSX)@h
|
H A D | emulate_loadstore.c | 54 if (!(kvmppc_get_msr(vcpu) & MSR_VEC)) { in kvmppc_check_altivec_disabled() 270 MSR_VEC); in kvmppc_emulate_loadstore()
|
H A D | booke.c | 176 if (!(current->thread.regs->msr & MSR_VEC)) { in kvmppc_load_guest_altivec() 181 current->thread.regs->msr |= MSR_VEC; in kvmppc_load_guest_altivec() 195 if (current->thread.regs->msr & MSR_VEC) in kvmppc_save_guest_altivec() 765 * Since we can't trap on MSR_VEC in GS-mode, we consider the guest in kvmppc_vcpu_run()
|
/kernel/linux/linux-6.6/arch/powerpc/kvm/ |
H A D | book3s_pr.c | 184 kvmppc_giveup_ext(vcpu, MSR_FP | MSR_VEC | MSR_VSX); in kvmppc_core_vcpu_put_pr() 368 (MSR_FP | MSR_VEC | MSR_VSX); in kvmppc_handle_lost_math_exts() 375 else if (ext_diff == MSR_VEC) in kvmppc_handle_lost_math_exts() 813 msr |= MSR_FP | MSR_VEC; in kvmppc_giveup_ext() 835 if (msr & MSR_VEC) { in kvmppc_giveup_ext() 836 if (current->thread.regs->msr & MSR_VEC) in kvmppc_giveup_ext() 894 msr = MSR_FP | MSR_VEC | MSR_VSX; in kvmppc_handle_ext() 915 if (msr & MSR_VEC) { in kvmppc_handle_ext() 953 if (lost_ext & MSR_VEC) { in kvmppc_handle_lost_ext() 1374 ext_msr = MSR_VEC; in kvmppc_handle_exit_pr() [all...] |
H A D | emulate_loadstore.c | 54 if (!(kvmppc_get_msr(vcpu) & MSR_VEC)) { in kvmppc_check_altivec_disabled() 271 MSR_VEC); in kvmppc_emulate_loadstore()
|
H A D | tm.S | 45 oris r8, r8, (MSR_VEC | MSR_VSX)@h 239 oris r5, r5, (MSR_VEC | MSR_VSX)@h
|