1/* SPDX-License-Identifier: GPL-2.0 */
2/*
3 * Author: Huacai Chen <chenhuacai@loongson.cn>
4 * Copyright (C) 2020 Loongson Technology Corporation Limited
5 */
6#ifndef _ASM_FPU_H
7#define _ASM_FPU_H
8
9#include <linux/sched.h>
10#include <linux/sched/task_stack.h>
11#include <linux/ptrace.h>
12#include <linux/thread_info.h>
13#include <linux/bitops.h>
14
15#include <asm/cpu.h>
16#include <asm/cpu-features.h>
17#include <asm/current.h>
18#include <asm/inst.h>
19#include <asm/loongarchregs.h>
20#include <asm/ptrace.h>
21#include <asm/processor.h>
22
23struct sigcontext;
24
25extern void kernel_fpu_begin(void);
26extern void kernel_fpu_end(void);
27
28extern void _init_fpu(unsigned int);
29extern void _save_fp(struct loongarch_fpu *);
30extern void _restore_fp(struct loongarch_fpu *);
31
32extern void _save_lsx(struct loongarch_fpu *fpu);
33extern void _restore_lsx(struct loongarch_fpu *fpu);
34extern void _init_lsx_upper(void);
35extern void _restore_lsx_upper(struct loongarch_fpu *fpu);
36
37extern void _save_lasx(struct loongarch_fpu *fpu);
38extern void _restore_lasx(struct loongarch_fpu *fpu);
39extern void _init_lasx_upper(void);
40extern void _restore_lasx_upper(struct loongarch_fpu *fpu);
41
42static inline void enable_lsx(void);
43static inline void disable_lsx(void);
44static inline void save_lsx(struct task_struct *t);
45static inline void restore_lsx(struct task_struct *t);
46
47static inline void enable_lasx(void);
48static inline void disable_lasx(void);
49static inline void save_lasx(struct task_struct *t);
50static inline void restore_lasx(struct task_struct *t);
51
52/*
53 * Mask the FCSR Cause bits according to the Enable bits, observing
54 * that Unimplemented is always enabled.
55 */
56static inline unsigned long mask_fcsr_x(unsigned long fcsr)
57{
58	return fcsr & ((fcsr & FPU_CSR_ALL_E) <<
59			(ffs(FPU_CSR_ALL_X) - ffs(FPU_CSR_ALL_E)));
60}
61
62static inline int is_fp_enabled(void)
63{
64	return (csr_read32(LOONGARCH_CSR_EUEN) & CSR_EUEN_FPEN) ?
65		1 : 0;
66}
67
68static inline int is_lsx_enabled(void)
69{
70	if (!cpu_has_lsx)
71		return 0;
72
73	return (csr_read32(LOONGARCH_CSR_EUEN) & CSR_EUEN_LSXEN) ?
74		1 : 0;
75}
76
77static inline int is_lasx_enabled(void)
78{
79	if (!cpu_has_lasx)
80		return 0;
81
82	return (csr_read32(LOONGARCH_CSR_EUEN) & CSR_EUEN_LASXEN) ?
83		1 : 0;
84}
85
86static inline int is_simd_enabled(void)
87{
88	return is_lsx_enabled() | is_lasx_enabled();
89}
90
91#define enable_fpu()						\
92do {								\
93	set_csr_euen(CSR_EUEN_FPEN);				\
94} while (0)
95
96#define disable_fpu()						\
97do {								\
98	clear_csr_euen(CSR_EUEN_FPEN);				\
99} while (0)
100
101#define clear_fpu_owner()	clear_thread_flag(TIF_USEDFPU)
102
103static inline int is_fpu_owner(void)
104{
105	return test_thread_flag(TIF_USEDFPU);
106}
107
108static inline void __own_fpu(void)
109{
110	enable_fpu();
111	set_thread_flag(TIF_USEDFPU);
112	KSTK_EUEN(current) |= CSR_EUEN_FPEN;
113}
114
115static inline void own_fpu_inatomic(int restore)
116{
117	if (cpu_has_fpu && !is_fpu_owner()) {
118		__own_fpu();
119		if (restore)
120			_restore_fp(&current->thread.fpu);
121	}
122}
123
124static inline void own_fpu(int restore)
125{
126	preempt_disable();
127	own_fpu_inatomic(restore);
128	preempt_enable();
129}
130
131static inline void lose_fpu_inatomic(int save, struct task_struct *tsk)
132{
133	if (is_fpu_owner()) {
134		if (!is_simd_enabled()) {
135			if (save)
136				_save_fp(&tsk->thread.fpu);
137			disable_fpu();
138		} else {
139			if (save) {
140				if (!is_lasx_enabled())
141					save_lsx(tsk);
142				else
143					save_lasx(tsk);
144			}
145			disable_fpu();
146			disable_lsx();
147			disable_lasx();
148			clear_tsk_thread_flag(tsk, TIF_USEDSIMD);
149		}
150		clear_tsk_thread_flag(tsk, TIF_USEDFPU);
151	}
152	KSTK_EUEN(tsk) &= ~(CSR_EUEN_FPEN | CSR_EUEN_LSXEN | CSR_EUEN_LASXEN);
153}
154
155static inline void lose_fpu(int save)
156{
157	preempt_disable();
158	lose_fpu_inatomic(save, current);
159	preempt_enable();
160}
161
162static inline void init_fpu(void)
163{
164	unsigned int fcsr = current->thread.fpu.fcsr;
165
166	__own_fpu();
167	_init_fpu(fcsr);
168	set_used_math();
169}
170
171static inline void save_fp(struct task_struct *tsk)
172{
173	if (cpu_has_fpu)
174		_save_fp(&tsk->thread.fpu);
175}
176
177static inline void restore_fp(struct task_struct *tsk)
178{
179	if (cpu_has_fpu)
180		_restore_fp(&tsk->thread.fpu);
181}
182
183static inline void save_fpu_regs(struct task_struct *tsk)
184{
185	unsigned int euen;
186
187	if (tsk == current) {
188		preempt_disable();
189
190		euen = csr_read32(LOONGARCH_CSR_EUEN);
191
192#ifdef CONFIG_CPU_HAS_LASX
193		if (euen & CSR_EUEN_LASXEN)
194			_save_lasx(&current->thread.fpu);
195		else
196#endif
197#ifdef CONFIG_CPU_HAS_LSX
198		if (euen & CSR_EUEN_LSXEN)
199			_save_lsx(&current->thread.fpu);
200		else
201#endif
202		if (euen & CSR_EUEN_FPEN)
203			_save_fp(&current->thread.fpu);
204
205		preempt_enable();
206	}
207}
208
209static inline int is_simd_owner(void)
210{
211	return test_thread_flag(TIF_USEDSIMD);
212}
213
214#ifdef CONFIG_CPU_HAS_LSX
215
216static inline void enable_lsx(void)
217{
218	if (cpu_has_lsx)
219		csr_xchg32(CSR_EUEN_LSXEN, CSR_EUEN_LSXEN, LOONGARCH_CSR_EUEN);
220}
221
222static inline void disable_lsx(void)
223{
224	if (cpu_has_lsx)
225		csr_xchg32(0, CSR_EUEN_LSXEN, LOONGARCH_CSR_EUEN);
226}
227
228static inline void save_lsx(struct task_struct *t)
229{
230	if (cpu_has_lsx)
231		_save_lsx(&t->thread.fpu);
232}
233
234static inline void restore_lsx(struct task_struct *t)
235{
236	if (cpu_has_lsx)
237		_restore_lsx(&t->thread.fpu);
238}
239
240static inline void init_lsx_upper(void)
241{
242	if (cpu_has_lsx)
243		_init_lsx_upper();
244}
245
246static inline void restore_lsx_upper(struct task_struct *t)
247{
248	if (cpu_has_lsx)
249		_restore_lsx_upper(&t->thread.fpu);
250}
251
252#else
253static inline void enable_lsx(void) {}
254static inline void disable_lsx(void) {}
255static inline void save_lsx(struct task_struct *t) {}
256static inline void restore_lsx(struct task_struct *t) {}
257static inline void init_lsx_upper(void) {}
258static inline void restore_lsx_upper(struct task_struct *t) {}
259#endif
260
261#ifdef CONFIG_CPU_HAS_LASX
262
263static inline void enable_lasx(void)
264{
265
266	if (cpu_has_lasx)
267		csr_xchg32(CSR_EUEN_LASXEN, CSR_EUEN_LASXEN, LOONGARCH_CSR_EUEN);
268}
269
270static inline void disable_lasx(void)
271{
272	if (cpu_has_lasx)
273		csr_xchg32(0, CSR_EUEN_LASXEN, LOONGARCH_CSR_EUEN);
274}
275
276static inline void save_lasx(struct task_struct *t)
277{
278	if (cpu_has_lasx)
279		_save_lasx(&t->thread.fpu);
280}
281
282static inline void restore_lasx(struct task_struct *t)
283{
284	if (cpu_has_lasx)
285		_restore_lasx(&t->thread.fpu);
286}
287
288static inline void init_lasx_upper(void)
289{
290	if (cpu_has_lasx)
291		_init_lasx_upper();
292}
293
294static inline void restore_lasx_upper(struct task_struct *t)
295{
296	if (cpu_has_lasx)
297		_restore_lasx_upper(&t->thread.fpu);
298}
299
300#else
301static inline void enable_lasx(void) {}
302static inline void disable_lasx(void) {}
303static inline void save_lasx(struct task_struct *t) {}
304static inline void restore_lasx(struct task_struct *t) {}
305static inline void init_lasx_upper(void) {}
306static inline void restore_lasx_upper(struct task_struct *t) {}
307#endif
308
309static inline int thread_lsx_context_live(void)
310{
311	if (!cpu_has_lsx)
312		return 0;
313
314	return test_thread_flag(TIF_LSX_CTX_LIVE);
315}
316
317static inline int thread_lasx_context_live(void)
318{
319	if (!cpu_has_lasx)
320		return 0;
321
322	return test_thread_flag(TIF_LASX_CTX_LIVE);
323}
324
325#endif /* _ASM_FPU_H */
326