1/* SPDX-License-Identifier: GPL-2.0 */
2/*
3 * Copyright (C) 2020-2022 Loongson Technology Corporation Limited
4 */
5#ifndef _ASM_STACKFRAME_H
6#define _ASM_STACKFRAME_H
7
8#include <linux/threads.h>
9
10#include <asm/addrspace.h>
11#include <asm/asm.h>
12#include <asm/asmmacro.h>
13#include <asm/asm-offsets.h>
14#include <asm/loongarch.h>
15#include <asm/thread_info.h>
16
17/* Make the addition of cfi info a little easier. */
18	.macro cfi_rel_offset reg offset=0 docfi=0
19	.if \docfi
20	.cfi_rel_offset \reg, \offset
21	.endif
22	.endm
23
24	.macro cfi_st reg offset=0 docfi=0
25	cfi_rel_offset \reg, \offset, \docfi
26	LONG_S	\reg, sp, \offset
27	.endm
28
29	.macro cfi_restore reg offset=0 docfi=0
30	.if \docfi
31	.cfi_restore \reg
32	.endif
33	.endm
34
35	.macro cfi_ld reg offset=0 docfi=0
36	LONG_L	\reg, sp, \offset
37	cfi_restore \reg \offset \docfi
38	.endm
39
40/* Jump to the runtime virtual address. */
41	.macro JUMP_VIRT_ADDR temp1 temp2
42	li.d	\temp1, CACHE_BASE
43	pcaddi	\temp2, 0
44	or	\temp1, \temp1, \temp2
45	jirl	zero, \temp1, 0xc
46	.endm
47
48	.macro BACKUP_T0T1
49	csrwr	t0, EXCEPTION_KS0
50	csrwr	t1, EXCEPTION_KS1
51	.endm
52
53	.macro RELOAD_T0T1
54	csrrd   t0, EXCEPTION_KS0
55	csrrd   t1, EXCEPTION_KS1
56	.endm
57
58	.macro	SAVE_TEMP docfi=0
59	RELOAD_T0T1
60	cfi_st	t0, PT_R12, \docfi
61	cfi_st	t1, PT_R13, \docfi
62	cfi_st	t2, PT_R14, \docfi
63	cfi_st	t3, PT_R15, \docfi
64	cfi_st	t4, PT_R16, \docfi
65	cfi_st	t5, PT_R17, \docfi
66	cfi_st	t6, PT_R18, \docfi
67	cfi_st	t7, PT_R19, \docfi
68	cfi_st	t8, PT_R20, \docfi
69	.endm
70
71	.macro	SAVE_STATIC docfi=0
72	cfi_st	s0, PT_R23, \docfi
73	cfi_st	s1, PT_R24, \docfi
74	cfi_st	s2, PT_R25, \docfi
75	cfi_st	s3, PT_R26, \docfi
76	cfi_st	s4, PT_R27, \docfi
77	cfi_st	s5, PT_R28, \docfi
78	cfi_st	s6, PT_R29, \docfi
79	cfi_st	s7, PT_R30, \docfi
80	cfi_st	s8, PT_R31, \docfi
81	.endm
82
83/*
84 * get_saved_sp returns the SP for the current CPU by looking in the
85 * kernelsp array for it. It stores the current sp in t0 and loads the
86 * new value in sp.
87 */
88	.macro	get_saved_sp docfi=0
89	la_abs	  t1, kernelsp
90#ifdef CONFIG_SMP
91	csrrd	  t0, PERCPU_BASE_KS
92	LONG_ADD  t1, t1, t0
93#endif
94	move	  t0, sp
95	.if \docfi
96	.cfi_register sp, t0
97	.endif
98	LONG_L	  sp, t1, 0
99	.endm
100
101	.macro	set_saved_sp stackp temp temp2
102	la.pcrel  \temp, kernelsp
103#ifdef CONFIG_SMP
104	LONG_ADD  \temp, \temp, u0
105#endif
106	LONG_S	  \stackp, \temp, 0
107	.endm
108
109	.macro	SAVE_SOME docfi=0
110	csrrd	t1, LOONGARCH_CSR_PRMD
111	andi	t1, t1, 0x3	/* extract pplv bit */
112	move	t0, sp
113	beqz	t1, 8f
114	/* Called from user mode, new stack. */
115	get_saved_sp docfi=\docfi
1168:
117	PTR_ADDI sp, sp, -PT_SIZE
118	.if \docfi
119	.cfi_def_cfa sp, 0
120	.endif
121	cfi_st	t0, PT_R3, \docfi
122	cfi_rel_offset  sp, PT_R3, \docfi
123	LONG_S	zero, sp, PT_R0
124	csrrd	t0, LOONGARCH_CSR_PRMD
125	LONG_S	t0, sp, PT_PRMD
126	csrrd	t0, LOONGARCH_CSR_CRMD
127	LONG_S	t0, sp, PT_CRMD
128	csrrd	t0, LOONGARCH_CSR_EUEN
129	LONG_S  t0, sp, PT_EUEN
130	csrrd	t0, LOONGARCH_CSR_ECFG
131	LONG_S	t0, sp, PT_ECFG
132	csrrd	t0, LOONGARCH_CSR_ESTAT
133	PTR_S	t0, sp, PT_ESTAT
134	cfi_st	ra, PT_R1, \docfi
135	cfi_st	a0, PT_R4, \docfi
136	cfi_st	a1, PT_R5, \docfi
137	cfi_st	a2, PT_R6, \docfi
138	cfi_st	a3, PT_R7, \docfi
139	cfi_st	a4, PT_R8, \docfi
140	cfi_st	a5, PT_R9, \docfi
141	cfi_st	a6, PT_R10, \docfi
142	cfi_st	a7, PT_R11, \docfi
143	csrrd	ra, LOONGARCH_CSR_ERA
144	LONG_S	ra, sp, PT_ERA
145	.if \docfi
146	.cfi_rel_offset ra, PT_ERA
147	.endif
148	cfi_st	tp, PT_R2, \docfi
149	cfi_st	fp, PT_R22, \docfi
150
151	/* Set thread_info if we're coming from user mode */
152	csrrd	t0, LOONGARCH_CSR_PRMD
153	andi	t0, t0, 0x3	/* extract pplv bit */
154	beqz	t0, 9f
155
156	li.d	tp, ~_THREAD_MASK
157	and	tp, tp, sp
158	cfi_st  u0, PT_R21, \docfi
159	csrrd	u0, PERCPU_BASE_KS
1609:
161#ifdef CONFIG_KGDB
162	li.w	t0, CSR_CRMD_WE
163	csrxchg	t0, t0, LOONGARCH_CSR_CRMD
164#endif
165	.endm
166
167	.macro	SAVE_ALL docfi=0
168	SAVE_SOME \docfi
169	SAVE_TEMP \docfi
170	SAVE_STATIC \docfi
171	.endm
172
173	.macro	RESTORE_TEMP docfi=0
174	cfi_ld	t0, PT_R12, \docfi
175	cfi_ld	t1, PT_R13, \docfi
176	cfi_ld	t2, PT_R14, \docfi
177	cfi_ld	t3, PT_R15, \docfi
178	cfi_ld	t4, PT_R16, \docfi
179	cfi_ld	t5, PT_R17, \docfi
180	cfi_ld	t6, PT_R18, \docfi
181	cfi_ld	t7, PT_R19, \docfi
182	cfi_ld	t8, PT_R20, \docfi
183	.endm
184
185	.macro	RESTORE_STATIC docfi=0
186	cfi_ld	s0, PT_R23, \docfi
187	cfi_ld	s1, PT_R24, \docfi
188	cfi_ld	s2, PT_R25, \docfi
189	cfi_ld	s3, PT_R26, \docfi
190	cfi_ld	s4, PT_R27, \docfi
191	cfi_ld	s5, PT_R28, \docfi
192	cfi_ld	s6, PT_R29, \docfi
193	cfi_ld	s7, PT_R30, \docfi
194	cfi_ld	s8, PT_R31, \docfi
195	.endm
196
197	.macro	RESTORE_SOME docfi=0
198	LONG_L	a0, sp, PT_PRMD
199	andi    a0, a0, 0x3	/* extract pplv bit */
200	beqz    a0, 8f
201	cfi_ld  u0, PT_R21, \docfi
2028:
203	LONG_L	a0, sp, PT_ERA
204	csrwr	a0, LOONGARCH_CSR_ERA
205	LONG_L	a0, sp, PT_PRMD
206	csrwr	a0, LOONGARCH_CSR_PRMD
207	cfi_ld	ra, PT_R1, \docfi
208	cfi_ld	a0, PT_R4, \docfi
209	cfi_ld	a1, PT_R5, \docfi
210	cfi_ld	a2, PT_R6, \docfi
211	cfi_ld	a3, PT_R7, \docfi
212	cfi_ld	a4, PT_R8, \docfi
213	cfi_ld	a5, PT_R9, \docfi
214	cfi_ld	a6, PT_R10, \docfi
215	cfi_ld	a7, PT_R11, \docfi
216	cfi_ld	tp, PT_R2, \docfi
217	cfi_ld	fp, PT_R22, \docfi
218	.endm
219
220	.macro	RESTORE_SP_AND_RET docfi=0
221	cfi_ld	sp, PT_R3, \docfi
222	ertn
223	.endm
224
225	.macro	RESTORE_ALL_AND_RET docfi=0
226	RESTORE_STATIC \docfi
227	RESTORE_TEMP \docfi
228	RESTORE_SOME \docfi
229	RESTORE_SP_AND_RET \docfi
230	.endm
231
232#endif /* _ASM_STACKFRAME_H */
233