Home
last modified time | relevance | path

Searched refs:MSR_VEC (Results 1 - 25 of 40) sorted by relevance

12

/kernel/linux/linux-6.6/arch/powerpc/kernel/
H A Dprocess.c242 msr &= ~MSR_VEC; in __giveup_altivec()
252 msr_check_and_set(MSR_VEC); in giveup_altivec()
254 msr_check_and_clear(MSR_VEC); in giveup_altivec()
264 cpumsr = msr_check_and_set(MSR_VEC); in enable_kernel_altivec()
266 if (current->thread.regs && (current->thread.regs->msr & MSR_VEC)) { in enable_kernel_altivec()
291 if (tsk->thread.regs->msr & MSR_VEC) { in flush_altivec_to_thread()
308 * MSR_FP and MSR_VEC in __giveup_vsx()
310 WARN_ON((msr & MSR_VSX) && !((msr & MSR_FP) && (msr & MSR_VEC))); in __giveup_vsx()
315 if (msr & MSR_VEC) in __giveup_vsx()
323 msr_check_and_set(MSR_FP|MSR_VEC|MSR_VS in giveup_vsx()
[all...]
H A Dsignal_64.c119 * process never used altivec yet (MSR_VEC is zero in pt_regs of in __unsafe_setup_sigcontext()
144 /* set MSR_VEC in the MSR value in the frame to indicate that sc->v_reg) in __unsafe_setup_sigcontext()
147 msr |= MSR_VEC; in __unsafe_setup_sigcontext()
213 * process never used altivec yet (MSR_VEC is zero in pt_regs of in setup_tm_sigcontexts()
237 msr |= tsk->thread.ckpt_regs.msr & (MSR_FP | MSR_VEC | MSR_VSX); in setup_tm_sigcontexts()
251 if (msr & MSR_VEC) in setup_tm_sigcontexts()
260 /* set MSR_VEC in the MSR value in the frame to indicate in setup_tm_sigcontexts()
263 msr |= MSR_VEC; in setup_tm_sigcontexts()
271 if (msr & MSR_VEC) in setup_tm_sigcontexts()
387 regs_set_return_msr(regs, regs->msr & ~(MSR_FP | MSR_FE0 | MSR_FE1 | MSR_VEC | MSR_VS in __unsafe_restore_sigcontext()
[all...]
H A Dsignal_32.c281 /* set MSR_VEC in the saved MSR value to indicate that in __unsafe_save_user_regs()
283 msr |= MSR_VEC; in __unsafe_save_user_regs()
285 /* else assert((regs->msr & MSR_VEC) == 0) */ in __unsafe_save_user_regs()
388 if (msr & MSR_VEC) in save_tm_user_regs_unsafe()
397 /* set MSR_VEC in the saved MSR value to indicate that in save_tm_user_regs_unsafe()
400 msr |= MSR_VEC; in save_tm_user_regs_unsafe()
410 if (msr & MSR_VEC) in save_tm_user_regs_unsafe()
498 regs_set_return_msr(regs, regs->msr & ~MSR_VEC); in restore_user_regs()
499 if (msr & MSR_VEC) { in restore_user_regs()
598 regs_set_return_msr(regs, regs->msr & ~MSR_VEC); in restore_tm_user_regs()
[all...]
H A Dvector.S56 oris r5,r5,MSR_VEC@h
75 oris r9,r9,MSR_VEC@h
79 oris r12,r12,MSR_VEC@h
133 andis. r5,r12,MSR_VEC@h
H A Dinterrupt.c211 mathflags |= MSR_VEC | MSR_VSX; in interrupt_exit_user_prepare_main()
213 mathflags |= MSR_VEC; in interrupt_exit_user_prepare_main()
H A Dtm.S139 oris r15, r15, MSR_VEC@h
389 oris r5, r5, MSR_VEC@h
/kernel/linux/linux-5.10/arch/powerpc/kernel/
H A Dprocess.c243 msr &= ~MSR_VEC; in __giveup_altivec()
253 msr_check_and_set(MSR_VEC); in giveup_altivec()
255 msr_check_and_clear(MSR_VEC); in giveup_altivec()
265 cpumsr = msr_check_and_set(MSR_VEC); in enable_kernel_altivec()
267 if (current->thread.regs && (current->thread.regs->msr & MSR_VEC)) { in enable_kernel_altivec()
292 if (tsk->thread.regs->msr & MSR_VEC) { in flush_altivec_to_thread()
309 * MSR_FP and MSR_VEC in __giveup_vsx()
311 WARN_ON((msr & MSR_VSX) && !((msr & MSR_FP) && (msr & MSR_VEC))); in __giveup_vsx()
316 if (msr & MSR_VEC) in __giveup_vsx()
324 msr_check_and_set(MSR_FP|MSR_VEC|MSR_VS in giveup_vsx()
[all...]
H A Dsignal_64.c96 * process never used altivec yet (MSR_VEC is zero in pt_regs of in setup_sigcontext()
124 /* set MSR_VEC in the MSR value in the frame to indicate that sc->v_reg) in setup_sigcontext()
127 msr |= MSR_VEC; in setup_sigcontext()
199 * process never used altivec yet (MSR_VEC is zero in pt_regs of in setup_tm_sigcontexts()
223 msr |= tsk->thread.ckpt_regs.msr & (MSR_FP | MSR_VEC | MSR_VSX); in setup_tm_sigcontexts()
237 if (msr & MSR_VEC) in setup_tm_sigcontexts()
246 /* set MSR_VEC in the MSR value in the frame to indicate in setup_tm_sigcontexts()
249 msr |= MSR_VEC; in setup_tm_sigcontexts()
257 if (msr & MSR_VEC) in setup_tm_sigcontexts()
369 regs->msr &= ~(MSR_FP | MSR_FE0 | MSR_FE1 | MSR_VEC | MSR_VS in restore_sigcontext()
[all...]
H A Dsyscall_64.c220 mathflags |= MSR_VEC | MSR_VSX; in syscall_exit_prepare()
222 mathflags |= MSR_VEC; in syscall_exit_prepare()
299 mathflags |= MSR_VEC | MSR_VSX; in interrupt_exit_user_prepare()
301 mathflags |= MSR_VEC; in interrupt_exit_user_prepare()
H A Dvector.S51 oris r5,r5,MSR_VEC@h
70 oris r9,r9,MSR_VEC@h
77 oris r12,r12,MSR_VEC@h
128 andis. r5,r12,MSR_VEC@h
H A Dsignal_32.c258 /* set MSR_VEC in the saved MSR value to indicate that in save_user_regs()
260 msr |= MSR_VEC; in save_user_regs()
262 /* else assert((regs->msr & MSR_VEC) == 0) */ in save_user_regs()
372 if (msr & MSR_VEC) { in save_tm_user_regs()
384 /* set MSR_VEC in the saved MSR value to indicate that in save_tm_user_regs()
387 msr |= MSR_VEC; in save_tm_user_regs()
400 if (msr & MSR_VEC) { in save_tm_user_regs()
514 regs->msr &= ~MSR_VEC; in restore_user_regs()
515 if (msr & MSR_VEC) { in restore_user_regs()
617 regs->msr &= ~MSR_VEC; in restore_tm_user_regs()
[all...]
H A Dtm.S139 oris r15, r15, MSR_VEC@h
392 oris r5, r5, MSR_VEC@h
/kernel/linux/linux-5.10/arch/powerpc/include/asm/
H A Dswitch_to.h60 msr_check_and_clear(MSR_VEC); in disable_kernel_altivec()
72 msr_check_and_clear(MSR_FP|MSR_VEC|MSR_VSX); in disable_kernel_vsx()
/kernel/linux/linux-6.6/arch/powerpc/include/asm/
H A Dswitch_to.h60 msr_check_and_clear(MSR_VEC); in disable_kernel_altivec()
81 msr_check_and_clear(MSR_FP|MSR_VEC|MSR_VSX); in disable_kernel_vsx()
/kernel/linux/linux-5.10/arch/powerpc/lib/
H A Dldstfp.S72 oris r7, r6, MSR_VEC@h
96 oris r7, r6, MSR_VEC@h
H A Dsstep.c626 if (regs->msr & MSR_VEC) in do_vec_load()
652 if (regs->msr & MSR_VEC) in do_vec_store()
892 if (regs->msr & MSR_VEC) in do_vsx_load()
923 if (regs->msr & MSR_VEC) in do_vsx_store()
3263 if (!(regs->msr & MSR_PR) && !(regs->msr & MSR_VEC)) in emulate_loadstore()
3273 * Some VSX instructions check the MSR_VEC bit rather than MSR_VSX in emulate_loadstore()
3277 msrbit = MSR_VEC; in emulate_loadstore()
3334 if (!(regs->msr & MSR_PR) && !(regs->msr & MSR_VEC)) in emulate_loadstore()
3344 * Some VSX instructions check the MSR_VEC bit rather than MSR_VSX in emulate_loadstore()
3348 msrbit = MSR_VEC; in emulate_loadstore()
[all...]
/kernel/linux/linux-6.6/arch/powerpc/lib/
H A Dldstfp.S72 oris r7, r6, MSR_VEC@h
96 oris r7, r6, MSR_VEC@h
H A Dsstep.c700 if (regs->msr & MSR_VEC) in do_vec_load()
726 if (regs->msr & MSR_VEC) in do_vec_store()
992 if (regs->msr & MSR_VEC) { in do_vsx_load()
1038 if (regs->msr & MSR_VEC) { in do_vsx_store()
3447 if (!(regs->msr & MSR_PR) && !(regs->msr & MSR_VEC)) in emulate_loadstore()
3457 * Some VSX instructions check the MSR_VEC bit rather than MSR_VSX in emulate_loadstore()
3461 msrbit = MSR_VEC; in emulate_loadstore()
3518 if (!(regs->msr & MSR_PR) && !(regs->msr & MSR_VEC)) in emulate_loadstore()
3528 * Some VSX instructions check the MSR_VEC bit rather than MSR_VSX in emulate_loadstore()
3532 msrbit = MSR_VEC; in emulate_loadstore()
[all...]
/kernel/linux/linux-5.10/arch/powerpc/kvm/
H A Dbook3s_pr.c172 kvmppc_giveup_ext(vcpu, MSR_FP | MSR_VEC | MSR_VSX); in kvmppc_core_vcpu_put_pr()
361 (MSR_FP | MSR_VEC | MSR_VSX); in kvmppc_handle_lost_math_exts()
368 else if (ext_diff == MSR_VEC) in kvmppc_handle_lost_math_exts()
829 msr |= MSR_FP | MSR_VEC; in kvmppc_giveup_ext()
851 if (msr & MSR_VEC) { in kvmppc_giveup_ext()
852 if (current->thread.regs->msr & MSR_VEC) in kvmppc_giveup_ext()
910 msr = MSR_FP | MSR_VEC | MSR_VSX; in kvmppc_handle_ext()
931 if (msr & MSR_VEC) { in kvmppc_handle_ext()
969 if (lost_ext & MSR_VEC) { in kvmppc_handle_lost_ext()
1386 ext_msr = MSR_VEC; in kvmppc_handle_exit_pr()
[all...]
H A Dtm.S45 oris r8, r8, (MSR_VEC | MSR_VSX)@h
239 oris r5, r5, (MSR_VEC | MSR_VSX)@h
H A Demulate_loadstore.c54 if (!(kvmppc_get_msr(vcpu) & MSR_VEC)) { in kvmppc_check_altivec_disabled()
270 MSR_VEC); in kvmppc_emulate_loadstore()
H A Dbooke.c176 if (!(current->thread.regs->msr & MSR_VEC)) { in kvmppc_load_guest_altivec()
181 current->thread.regs->msr |= MSR_VEC; in kvmppc_load_guest_altivec()
195 if (current->thread.regs->msr & MSR_VEC) in kvmppc_save_guest_altivec()
765 * Since we can't trap on MSR_VEC in GS-mode, we consider the guest in kvmppc_vcpu_run()
/kernel/linux/linux-6.6/arch/powerpc/kvm/
H A Dbook3s_pr.c184 kvmppc_giveup_ext(vcpu, MSR_FP | MSR_VEC | MSR_VSX); in kvmppc_core_vcpu_put_pr()
368 (MSR_FP | MSR_VEC | MSR_VSX); in kvmppc_handle_lost_math_exts()
375 else if (ext_diff == MSR_VEC) in kvmppc_handle_lost_math_exts()
813 msr |= MSR_FP | MSR_VEC; in kvmppc_giveup_ext()
835 if (msr & MSR_VEC) { in kvmppc_giveup_ext()
836 if (current->thread.regs->msr & MSR_VEC) in kvmppc_giveup_ext()
894 msr = MSR_FP | MSR_VEC | MSR_VSX; in kvmppc_handle_ext()
915 if (msr & MSR_VEC) { in kvmppc_handle_ext()
953 if (lost_ext & MSR_VEC) { in kvmppc_handle_lost_ext()
1374 ext_msr = MSR_VEC; in kvmppc_handle_exit_pr()
[all...]
H A Demulate_loadstore.c54 if (!(kvmppc_get_msr(vcpu) & MSR_VEC)) { in kvmppc_check_altivec_disabled()
271 MSR_VEC); in kvmppc_emulate_loadstore()
H A Dtm.S45 oris r8, r8, (MSR_VEC | MSR_VSX)@h
239 oris r5, r5, (MSR_VEC | MSR_VSX)@h

Completed in 23 milliseconds

12