1 #ifndef __LOONGARCH_KVM_COMPAT_H__
2 #define __LOONGARCH_KVM_COMPAT_H__
3
4 #ifdef __ASSEMBLY__
5 #define _ULCAST_
6 #else
7 #define _ULCAST_ (unsigned long)
8 #endif
9
10 #define KVM_CSR_CRMD 0x0 /* Current mode info */
11 #define KVM_CRMD_WE_SHIFT 9
12 #define KVM_CRMD_WE (_ULCAST_(0x1) << KVM_CRMD_WE_SHIFT)
13 #define KVM_CRMD_DACM_SHIFT 7
14 #define KVM_CRMD_DACM_WIDTH 2
15 #define KVM_CRMD_DACM (_ULCAST_(0x3) << KVM_CRMD_DACM_SHIFT)
16 #define KVM_CRMD_DACF_SHIFT 5
17 #define KVM_CRMD_DACF_WIDTH 2
18 #define KVM_CRMD_DACF (_ULCAST_(0x3) << KVM_CRMD_DACF_SHIFT)
19 #define KVM_CRMD_PG_SHIFT 4
20 #define KVM_CRMD_PG (_ULCAST_(0x1) << KVM_CRMD_PG_SHIFT)
21 #define KVM_CRMD_DA_SHIFT 3
22 #define KVM_CRMD_DA (_ULCAST_(0x1) << KVM_CRMD_DA_SHIFT)
23 #define KVM_CRMD_IE_SHIFT 2
24 #define KVM_CRMD_IE (_ULCAST_(0x1) << KVM_CRMD_IE_SHIFT)
25 #define KVM_CRMD_PLV_SHIFT 0
26 #define KVM_CRMD_PLV_WIDTH 2
27 #define KVM_CRMD_PLV (_ULCAST_(0x3) << KVM_CRMD_PLV_SHIFT)
28
29 #define KVM_CSR_PRMD 0x1 /* Prev-exception mode info */
30 #define KVM_PRMD_PIE_SHIFT 2
31 #define KVM_PRMD_PWE_SHIFT 3
32 #define KVM_PRMD_PIE (_ULCAST_(0x1) << KVM_PRMD_PIE_SHIFT)
33 #define KVM_PRMD_PWE (_ULCAST_(0x1) << KVM_PRMD_PWE_SHIFT)
34 #define KVM_PRMD_PPLV_SHIFT 0
35 #define KVM_PRMD_PPLV_WIDTH 2
36 #define KVM_PRMD_PPLV (_ULCAST_(0x3) << KVM_PRMD_PPLV_SHIFT)
37
38 #define KVM_CSR_EUEN 0x2 /* Extended unit enable */
39 #define KVM_EUEN_LBTEN_SHIFT 3
40 #define KVM_EUEN_LBTEN (_ULCAST_(0x1) << KVM_EUEN_LBTEN_SHIFT)
41 #define KVM_EUEN_LASXEN_SHIFT 2
42 #define KVM_EUEN_LASXEN (_ULCAST_(0x1) << KVM_EUEN_LASXEN_SHIFT)
43 #define KVM_EUEN_LSXEN_SHIFT 1
44 #define KVM_EUEN_LSXEN (_ULCAST_(0x1) << KVM_EUEN_LSXEN_SHIFT)
45 #define KVM_EUEN_FPEN_SHIFT 0
46 #define KVM_EUEN_FPEN (_ULCAST_(0x1) << KVM_EUEN_FPEN_SHIFT)
47
48 #define KVM_CSR_MISC 0x3 /* Misc config */
49 #define KVM_CSR_ECFG 0x4 /* Exception config */
50 #define KVM_ECFG_VS_SHIFT 16
51 #define KVM_ECFG_VS_WIDTH 3
52 #define KVM_ECFG_VS (_ULCAST_(0x7) << KVM_ECFG_VS_SHIFT)
53 #define KVM_ECFG_IM_SHIFT 0
54 #define KVM_ECFG_IM_WIDTH 13
55 #define KVM_ECFG_IM (_ULCAST_(0x1fff) << KVM_ECFG_IM_SHIFT)
56
57 #define KVM_CSR_ESTAT 0x5 /* Exception status */
58 #define KVM_ESTAT_ESUBCODE_SHIFT 22
59 #define KVM_ESTAT_ESUBCODE_WIDTH 9
60 #define KVM_ESTAT_ESUBCODE (_ULCAST_(0x1ff) << KVM_ESTAT_ESUBCODE_SHIFT)
61 #define KVM_ESTAT_EXC_SHIFT 16
62 #define KVM_ESTAT_EXC_WIDTH 6
63 #define KVM_ESTAT_EXC (_ULCAST_(0x3f) << KVM_ESTAT_EXC_SHIFT)
64 #define KVM_ESTAT_IS_SHIFT 0
65 #define KVM_ESTAT_IS_WIDTH 15
66 #define KVM_ESTAT_IS (_ULCAST_(0x7fff) << KVM_ESTAT_IS_SHIFT)
67
68 #define KVM_CSR_ERA 0x6 /* ERA */
69 #define KVM_CSR_BADV 0x7 /* Bad virtual address */
70 #define KVM_CSR_BADI 0x8 /* Bad instruction */
71 #define KVM_CSR_EENTRY 0xc /* Exception entry base address */
72 #define KVM_CSR_TLBIDX 0x10 /* TLB Index, EHINV, PageSize, NP */
73 #define KVM_CSR_TLBEHI 0x11 /* TLB EntryHi */
74 #define KVM_CSR_TLBELO0 0x12 /* TLB EntryLo0 */
75 #define KVM_CSR_TLBELO1 0x13 /* TLB EntryLo1 */
76 #define KVM_CSR_GTLBC 0x15 /* Guest TLB control */
77 #define KVM_GTLBC_TGID_SHIFT 16
78 #define KVM_GTLBC_TGID_WIDTH 8
79 #define KVM_GTLBC_TGID (_ULCAST_(0xff) << KVM_GTLBC_TGID_SHIFT)
80 #define KVM_GTLBC_TOTI_SHIFT 13
81 #define KVM_GTLBC_TOTI (_ULCAST_(0x1) << KVM_GTLBC_TOTI_SHIFT)
82 #define KVM_GTLBC_USETGID_SHIFT 12
83 #define KVM_GTLBC_USETGID (_ULCAST_(0x1) << KVM_GTLBC_USETGID_SHIFT)
84 #define KVM_GTLBC_GMTLBSZ_SHIFT 0
85 #define KVM_GTLBC_GMTLBSZ_WIDTH 6
86 #define KVM_GTLBC_GMTLBSZ (_ULCAST_(0x3f) << KVM_GTLBC_GMTLBSZ_SHIFT)
87
88 #define KVM_CSR_TRGP 0x16 /* TLBR read guest info */
89 #define KVM_CSR_ASID 0x18 /* ASID */
90 #define KVM_CSR_PGDL 0x19 /* Page table base address when VA[47] = 0 */
91 #define KVM_CSR_PGDH 0x1a /* Page table base address when VA[47] = 1 */
92 #define KVM_CSR_PGD 0x1b /* Page table base */
93 #define KVM_CSR_PWCTL0 0x1c /* PWCtl0 */
94 #define KVM_CSR_PWCTL1 0x1d /* PWCtl1 */
95 #define KVM_CSR_STLBPGSIZE 0x1e
96 #define KVM_CSR_RVACFG 0x1f
97 #define KVM_CSR_CPUID 0x20 /* CPU core number */
98 #define KVM_CSR_PRCFG1 0x21 /* Config1 */
99 #define KVM_CSR_PRCFG2 0x22 /* Config2 */
100 #define KVM_CSR_PRCFG3 0x23 /* Config3 */
101 #define KVM_CSR_KS0 0x30
102 #define KVM_CSR_KS1 0x31
103 #define KVM_CSR_KS2 0x32
104 #define KVM_CSR_KS3 0x33
105 #define KVM_CSR_KS4 0x34
106 #define KVM_CSR_KS5 0x35
107 #define KVM_CSR_KS6 0x36
108 #define KVM_CSR_KS7 0x37
109 #define KVM_CSR_KS8 0x38
110 #define KVM_CSR_TMID 0x40 /* Timer ID */
111 #define KVM_CSR_TCFG 0x41 /* Timer config */
112 #define KVM_TCFG_VAL_SHIFT 2
113 #define KVM_TCFG_VAL_WIDTH 48
114 #define KVM_TCFG_VAL (_ULCAST_(0x3fffffffffff) << KVM_TCFG_VAL_SHIFT)
115 #define KVM_TCFG_PERIOD_SHIFT 1
116 #define KVM_TCFG_PERIOD (_ULCAST_(0x1) << KVM_TCFG_PERIOD_SHIFT)
117 #define KVM_TCFG_EN (_ULCAST_(0x1))
118
119 #define KVM_CSR_TVAL 0x42 /* Timer value */
120 #define KVM_CSR_CNTC 0x43 /* Timer offset */
121 #define KVM_CSR_TINTCLR 0x44 /* Timer interrupt clear */
122 #define KVM_CSR_GSTAT 0x50 /* Guest status */
123 #define KVM_GSTAT_GID_SHIFT 16
124 #define KVM_GSTAT_GID_WIDTH 8
125 #define KVM_GSTAT_GID (_ULCAST_(0xff) << KVM_GSTAT_GID_SHIFT)
126 #define KVM_GSTAT_GIDBIT_SHIFT 4
127 #define KVM_GSTAT_GIDBIT_WIDTH 6
128 #define KVM_GSTAT_GIDBIT (_ULCAST_(0x3f) << KVM_GSTAT_GIDBIT_SHIFT)
129 #define KVM_GSTAT_PVM_SHIFT 1
130 #define KVM_GSTAT_PVM (_ULCAST_(0x1) << KVM_GSTAT_PVM_SHIFT)
131 #define KVM_GSTAT_VM_SHIFT 0
132 #define KVM_GSTAT_VM (_ULCAST_(0x1) << KVM_GSTAT_VM_SHIFT)
133
134 #define KVM_CSR_GCFG 0x51 /* Guest config */
135 #define KVM_GCFG_GPERF_SHIFT 24
136 #define KVM_GCFG_GPERF_WIDTH 3
137 #define KVM_GCFG_GPERF (_ULCAST_(0x7) << KVM_GCFG_GPERF_SHIFT)
138 #define KVM_GCFG_GCI_SHIFT 20
139 #define KVM_GCFG_GCI_WIDTH 2
140 #define KVM_GCFG_GCI (_ULCAST_(0x3) << KVM_GCFG_GCI_SHIFT)
141 #define KVM_GCFG_GCI_ALL (_ULCAST_(0x0) << KVM_GCFG_GCI_SHIFT)
142 #define KVM_GCFG_GCI_HIT (_ULCAST_(0x1) << KVM_GCFG_GCI_SHIFT)
143 #define KVM_GCFG_GCI_SECURE (_ULCAST_(0x2) << KVM_GCFG_GCI_SHIFT)
144 #define KVM_GCFG_GCIP_SHIFT 16
145 #define KVM_GCFG_GCIP (_ULCAST_(0xf) << KVM_GCFG_GCIP_SHIFT)
146 #define KVM_GCFG_GCIP_ALL (_ULCAST_(0x1) << KVM_GCFG_GCIP_SHIFT)
147 #define KVM_GCFG_GCIP_HIT (_ULCAST_(0x1) << (KVM_GCFG_GCIP_SHIFT + 1))
148 #define KVM_GCFG_GCIP_SECURE (_ULCAST_(0x1) << (KVM_GCFG_GCIP_SHIFT + 2))
149 #define KVM_GCFG_TORU_SHIFT 15
150 #define KVM_GCFG_TORU (_ULCAST_(0x1) << KVM_GCFG_TORU_SHIFT)
151 #define KVM_GCFG_TORUP_SHIFT 14
152 #define KVM_GCFG_TORUP (_ULCAST_(0x1) << KVM_GCFG_TORUP_SHIFT)
153 #define KVM_GCFG_TOP_SHIFT 13
154 #define KVM_GCFG_TOP (_ULCAST_(0x1) << KVM_GCFG_TOP_SHIFT)
155 #define KVM_GCFG_TOPP_SHIFT 12
156 #define KVM_GCFG_TOPP (_ULCAST_(0x1) << KVM_GCFG_TOPP_SHIFT)
157 #define KVM_GCFG_TOE_SHIFT 11
158 #define KVM_GCFG_TOE (_ULCAST_(0x1) << KVM_GCFG_TOE_SHIFT)
159 #define KVM_GCFG_TOEP_SHIFT 10
160 #define KVM_GCFG_TOEP (_ULCAST_(0x1) << KVM_GCFG_TOEP_SHIFT)
161 #define KVM_GCFG_TIT_SHIFT 9
162 #define KVM_GCFG_TIT (_ULCAST_(0x1) << KVM_GCFG_TIT_SHIFT)
163 #define KVM_GCFG_TITP_SHIFT 8
164 #define KVM_GCFG_TITP (_ULCAST_(0x1) << KVM_GCFG_TITP_SHIFT)
165 #define KVM_GCFG_SIT_SHIFT 7
166 #define KVM_GCFG_SIT (_ULCAST_(0x1) << KVM_GCFG_SIT_SHIFT)
167 #define KVM_GCFG_SITP_SHIFT 6
168 #define KVM_GCFG_SITP (_ULCAST_(0x1) << KVM_GCFG_SITP_SHIFT)
169 #define KVM_GCFG_MATC_SHITF 4
170 #define KVM_GCFG_MATC_WIDTH 2
171 #define KVM_GCFG_MATC_MASK (_ULCAST_(0x3) << KVM_GCFG_MATC_SHITF)
172 #define KVM_GCFG_MATC_GUEST (_ULCAST_(0x0) << KVM_GCFG_MATC_SHITF)
173 #define KVM_GCFG_MATC_ROOT (_ULCAST_(0x1) << KVM_GCFG_MATC_SHITF)
174 #define KVM_GCFG_MATC_NEST (_ULCAST_(0x2) << KVM_GCFG_MATC_SHITF)
175 #define KVM_GCFG_MATP_SHITF 0
176 #define KVM_GCFG_MATP_WIDTH 4
177 #define KVM_GCFG_MATR_MASK (_ULCAST_(0x3) << KVM_GCFG_MATP_SHITF)
178 #define KVM_GCFG_MATP_GUEST (_ULCAST_(0x0) << KVM_GCFG_MATP_SHITF)
179 #define KVM_GCFG_MATP_ROOT (_ULCAST_(0x1) << KVM_GCFG_MATP_SHITF)
180 #define KVM_GCFG_MATP_NEST (_ULCAST_(0x2) << KVM_GCFG_MATP_SHITF)
181
182 #define KVM_CSR_GINTC 0x52 /* Guest interrupt control */
183 #define KVM_CSR_GCNTC 0x53 /* Guest timer offset */
184 #define KVM_CSR_LLBCTL 0x60 /* LLBit control */
185 #define KVM_LLBCTL_ROLLB_SHIFT 0
186 #define KVM_LLBCTL_ROLLB (_ULCAST_(1) << KVM_LLBCTL_ROLLB_SHIFT)
187 #define KVM_LLBCTL_WCLLB_SHIFT 1
188 #define KVM_LLBCTL_WCLLB (_ULCAST_(1) << KVM_LLBCTL_WCLLB_SHIFT)
189 #define KVM_LLBCTL_KLO_SHIFT 2
190 #define KVM_LLBCTL_KLO (_ULCAST_(1) << KVM_LLBCTL_KLO_SHIFT)
191
192 #define KVM_CSR_IMPCTL1 0x80 /* Loongson config1 */
193 #define KVM_CSR_IMPCTL2 0x81 /* Loongson config2 */
194 #define KVM_CSR_GNMI 0x82
195 #define KVM_CSR_TLBRENTRY 0x88 /* TLB refill exception base address */
196 #define KVM_CSR_TLBRBADV 0x89 /* TLB refill badvaddr */
197 #define KVM_CSR_TLBRERA 0x8a /* TLB refill ERA */
198 #define KVM_CSR_TLBRSAVE 0x8b /* KScratch for TLB refill exception */
199 #define KVM_CSR_TLBRELO0 0x8c /* TLB refill entrylo0 */
200 #define KVM_CSR_TLBRELO1 0x8d /* TLB refill entrylo1 */
201 #define KVM_CSR_TLBREHI 0x8e /* TLB refill entryhi */
202 #define KVM_CSR_TLBRPRMD 0x8f /* TLB refill mode info */
203 #define KVM_CSR_MERRCTL 0x90 /* ERRCTL */
204 #define KVM_CSR_MERRINFO1 0x91 /* Error info1 */
205 #define KVM_CSR_MERRINFO2 0x92 /* Error info2 */
206 #define KVM_CSR_MERRENTRY 0x93 /* Error exception base address */
207 #define KVM_CSR_MERRERA 0x94 /* Error exception PC */
208 #define KVM_CSR_MERRSAVE 0x95 /* KScratch for machine error exception */
209 #define KVM_CSR_CTAG 0x98 /* TagLo + TagHi */
210 #define KVM_CSR_DMWIN0 0x180 /* 64 direct map win0: MEM & IF */
211 #define KVM_CSR_DMWIN1 0x181 /* 64 direct map win1: MEM & IF */
212 #define KVM_CSR_DMWIN2 0x182 /* 64 direct map win2: MEM */
213 #define KVM_CSR_DMWIN3 0x183 /* 64 direct map win3: MEM */
214 #define KVM_CSR_PERFCTRL0 0x200 /* 32 perf event 0 config */
215 #define KVM_CSR_PERFCNTR0 0x201 /* 64 perf event 0 count value */
216 #define KVM_CSR_PERFCTRL1 0x202 /* 32 perf event 1 config */
217 #define KVM_CSR_PERFCNTR1 0x203 /* 64 perf event 1 count value */
218 #define KVM_CSR_PERFCTRL2 0x204 /* 32 perf event 2 config */
219 #define KVM_CSR_PERFCNTR2 0x205 /* 64 perf event 2 count value */
220 #define KVM_CSR_PERFCTRL3 0x206 /* 32 perf event 3 config */
221 #define KVM_CSR_PERFCNTR3 0x207 /* 64 perf event 3 count value */
222 #define KVM_CSR_MWPC 0x300 /* data breakpoint config */
223 #define KVM_CSR_MWPS 0x301 /* data breakpoint status */
224 #define KVM_CSR_FWPC 0x380 /* instruction breakpoint config */
225 #define KVM_CSR_FWPS 0x381 /* instruction breakpoint status */
226 #define KVM_CSR_DEBUG 0x500 /* debug config */
227 #define KVM_CSR_DERA 0x501 /* debug era */
228 #define KVM_CSR_DESAVE 0x502 /* debug save */
229
230 #define KVM_IOCSR_FEATURES 0x8
231 #define KVM_IOCSRF_TEMP BIT_ULL(0)
232 #define KVM_IOCSRF_NODECNT BIT_ULL(1)
233 #define KVM_IOCSRF_MSI BIT_ULL(2)
234 #define KVM_IOCSRF_EXTIOI BIT_ULL(3)
235 #define KVM_IOCSRF_CSRIPI BIT_ULL(4)
236 #define KVM_IOCSRF_FREQCSR BIT_ULL(5)
237 #define KVM_IOCSRF_FREQSCALE BIT_ULL(6)
238 #define KVM_IOCSRF_DVFSV1 BIT_ULL(7)
239 #define KVM_IOCSRF_EXTIOI_DECODE BIT_ULL(9)
240 #define KVM_IOCSRF_FLATMODE BIT_ULL(10)
241 #define KVM_IOCSRF_VM BIT_ULL(11)
242
243 #define KVM_IOCSR_VENDOR 0x10
244 #define KVM_IOCSR_CPUNAME 0x20
245 #define KVM_IOCSR_NODECNT 0x408
246
247 #define KVM_IOCSR_MISC_FUNC 0x420
248 #define KVM_IOCSRF_MISC_FUNC_EXT_IOI_EN BIT_ULL(48)
249
250 /* PerCore CSR, only accessable by local cores */
251 #define KVM_IOCSR_IPI_STATUS 0x1000
252 #define KVM_IOCSR_IPI_SEND 0x1040
253 #define KVM_IOCSR_MBUF_SEND 0x1048
254 #define KVM_IOCSR_EXTIOI_NODEMAP_BASE 0x14a0
255 #define KVM_IOCSR_EXTIOI_IPMAP_BASE 0x14c0
256 #define KVM_IOCSR_EXTIOI_EN_BASE 0x1600
257 #define KVM_IOCSR_EXTIOI_BOUNCE_BASE 0x1680
258 #define KVM_IOCSR_EXTIOI_ISR_BASE 0x1800
259 #define KVM_IOCSR_EXTIOI_ROUTE_BASE 0x1c00
260
261 #ifndef __ASSEMBLY__
262
263 /* CSR */
kvm_csr_readl(u32 reg)264 static inline u32 kvm_csr_readl(u32 reg)
265 {
266 u32 val;
267
268 asm volatile (
269 "csrrd %[val], %[reg] \n"
270 : [val] "=r" (val)
271 : [reg] "i" (reg)
272 : "memory");
273 return val;
274 }
275
kvm_csr_readq(u32 reg)276 static inline u64 kvm_csr_readq(u32 reg)
277 {
278 u64 val;
279
280 asm volatile (
281 "csrrd %[val], %[reg] \n"
282 : [val] "=r" (val)
283 : [reg] "i" (reg)
284 : "memory");
285 return val;
286 }
287
kvm_csr_writel(u32 val, u32 reg)288 static inline void kvm_csr_writel(u32 val, u32 reg)
289 {
290 asm volatile (
291 "csrwr %[val], %[reg] \n"
292 : [val] "+r" (val)
293 : [reg] "i" (reg)
294 : "memory");
295 }
296
kvm_csr_writeq(u64 val, u32 reg)297 static inline void kvm_csr_writeq(u64 val, u32 reg)
298 {
299 asm volatile (
300 "csrwr %[val], %[reg] \n"
301 : [val] "+r" (val)
302 : [reg] "i" (reg)
303 : "memory");
304 }
305
kvm_csr_xchgl(u32 val, u32 mask, u32 reg)306 static inline u32 kvm_csr_xchgl(u32 val, u32 mask, u32 reg)
307 {
308 asm volatile (
309 "csrxchg %[val], %[mask], %[reg] \n"
310 : [val] "+r" (val)
311 : [mask] "r" (mask), [reg] "i" (reg)
312 : "memory");
313 return val;
314 }
315
kvm_csr_xchgq(u64 val, u64 mask, u32 reg)316 static inline u64 kvm_csr_xchgq(u64 val, u64 mask, u32 reg)
317 {
318 asm volatile (
319 "csrxchg %[val], %[mask], %[reg] \n"
320 : [val] "+r" (val)
321 : [mask] "r" (mask), [reg] "i" (reg)
322 : "memory");
323 return val;
324 }
325
326
327 /* IOCSR */
kvm_iocsr_readl(u32 reg)328 static inline u32 kvm_iocsr_readl(u32 reg)
329 {
330 u32 val;
331
332 asm volatile (
333 "iocsrrd.w %[val], %[reg] \n"
334 : [val] "=r" (val)
335 : [reg] "r" (reg)
336 : "memory");
337 return val;
338 }
339
kvm_iocsr_readq(u32 reg)340 static inline u64 kvm_iocsr_readq(u32 reg)
341 {
342 u64 val;
343
344 asm volatile (
345 "iocsrrd.d %[val], %[reg] \n"
346 : [val] "=r" (val)
347 : [reg] "r" (reg)
348 : "memory");
349 return val;
350 }
351
kvm_iocsr_writeb(u8 val, u32 reg)352 static inline void kvm_iocsr_writeb(u8 val, u32 reg)
353 {
354 asm volatile (
355 "iocsrwr.b %[val], %[reg] \n"
356 :
357 : [val] "r" (val), [reg] "r" (reg)
358 : "memory");
359 }
360
kvm_iocsr_writel(u32 val, u32 reg)361 static inline void kvm_iocsr_writel(u32 val, u32 reg)
362 {
363 asm volatile (
364 "iocsrwr.w %[val], %[reg] \n"
365 :
366 : [val] "r" (val), [reg] "r" (reg)
367 : "memory");
368 }
369
kvm_iocsr_writeq(u64 val, u32 reg)370 static inline void kvm_iocsr_writeq(u64 val, u32 reg)
371 {
372 asm volatile (
373 "iocsrwr.d %[val], %[reg] \n"
374 :
375 : [val] "r" (val), [reg] "r" (reg)
376 : "memory");
377 }
378
379
380 /* GCSR */
kvm_gcsr_read(u32 reg)381 static inline u64 kvm_gcsr_read(u32 reg)
382 {
383 u64 val = 0;
384
385 asm volatile (
386 "parse_r __reg, %[val] \n"
387 ".word 0x5 << 24 | %[reg] << 10 | 0 << 5 | __reg \n"
388 : [val] "+r" (val)
389 : [reg] "i" (reg)
390 : "memory");
391 return val;
392 }
393
kvm_gcsr_write(u64 val, u32 reg)394 static inline void kvm_gcsr_write(u64 val, u32 reg)
395 {
396 asm volatile (
397 "parse_r __reg, %[val] \n"
398 ".word 0x5 << 24 | %[reg] << 10 | 1 << 5 | __reg \n"
399 : [val] "+r" (val)
400 : [reg] "i" (reg)
401 : "memory");
402 }
403
kvm_gcsr_xchg(u64 val, u64 mask, u32 reg)404 static inline u64 kvm_gcsr_xchg(u64 val, u64 mask, u32 reg)
405 {
406 asm volatile (
407 "parse_r __rd, %[val] \n"
408 "parse_r __rj, %[mask] \n"
409 ".word 0x5 << 24 | %[reg] << 10 | __rj << 5 | __rd \n"
410 : [val] "+r" (val)
411 : [mask] "r" (mask), [reg] "i" (reg)
412 : "memory");
413 return val;
414 }
415
416 #endif /* !__ASSEMBLY__ */
417
418 #define kvm_read_csr_euen() kvm_csr_readq(KVM_CSR_EUEN)
419 #define kvm_write_csr_euen(val) kvm_csr_writeq(val, KVM_CSR_EUEN)
420 #define kvm_read_csr_ecfg() kvm_csr_readq(KVM_CSR_ECFG)
421 #define kvm_write_csr_ecfg(val) kvm_csr_writeq(val, KVM_CSR_ECFG)
422 #define kvm_write_csr_perfctrl0(val) kvm_csr_writeq(val, KVM_CSR_PERFCTRL0)
423 #define kvm_write_csr_perfcntr0(val) kvm_csr_writeq(val, LOONGARCH_CSR_PERFCNTR0)
424 #define kvm_write_csr_perfctrl1(val) kvm_csr_writeq(val, LOONGARCH_CSR_PERFCTRL1)
425 #define kvm_write_csr_perfcntr1(val) kvm_csr_writeq(val, LOONGARCH_CSR_PERFCNTR1)
426 #define kvm_write_csr_perfctrl2(val) kvm_csr_writeq(val, LOONGARCH_CSR_PERFCTRL2)
427 #define kvm_write_csr_perfcntr2(val) kvm_csr_writeq(val, LOONGARCH_CSR_PERFCNTR2)
428 #define kvm_write_csr_perfctrl3(val) kvm_csr_writeq(val, LOONGARCH_CSR_PERFCTRL3)
429 #define kvm_write_csr_perfcntr3(val) kvm_csr_writeq(val, LOONGARCH_CSR_PERFCNTR3)
430 #define kvm_read_csr_impctl1() kvm_csr_readq(LOONGARCH_CSR_IMPCTL1)
431 #define kvm_write_csr_impctl1(val) kvm_csr_writeq(val, LOONGARCH_CSR_IMPCTL1)
432
433
434 /* Guest related CSRS */
435 #define kvm_read_csr_gtlbc() kvm_csr_readq(KVM_CSR_GTLBC)
436 #define kvm_write_csr_gtlbc(val) kvm_csr_writeq(val, KVM_CSR_GTLBC)
437 #define kvm_read_csr_trgp() kvm_csr_readq(KVM_CSR_TRGP)
438 #define kvm_read_csr_gcfg() kvm_csr_readq(KVM_CSR_GCFG)
439 #define kvm_write_csr_gcfg(val) kvm_csr_writeq(val, KVM_CSR_GCFG)
440 #define kvm_read_csr_gstat() kvm_csr_readq(KVM_CSR_GSTAT)
441 #define kvm_write_csr_gstat(val) kvm_csr_writeq(val, KVM_CSR_GSTAT)
442 #define kvm_read_csr_gintc() kvm_csr_readq(KVM_CSR_GINTC)
443 #define kvm_write_csr_gintc(val) kvm_csr_writeq(val, KVM_CSR_GINTC)
444 #define kvm_read_csr_gcntc() kvm_csr_readq(KVM_CSR_GCNTC)
445 #define kvm_write_csr_gcntc(val) kvm_csr_writeq(val, KVM_CSR_GCNTC)
446
447 /* Guest CSRS read and write */
448 #define kvm_read_gcsr_crmd() kvm_gcsr_read(KVM_CSR_CRMD)
449 #define kvm_write_gcsr_crmd(val) kvm_gcsr_write(val, KVM_CSR_CRMD)
450 #define kvm_read_gcsr_prmd() kvm_gcsr_read(KVM_CSR_PRMD)
451 #define kvm_write_gcsr_prmd(val) kvm_gcsr_write(val, KVM_CSR_PRMD)
452 #define kvm_read_gcsr_euen() kvm_gcsr_read(KVM_CSR_EUEN)
453 #define kvm_write_gcsr_euen(val) kvm_gcsr_write(val, KVM_CSR_EUEN)
454 #define kvm_read_gcsr_misc() kvm_gcsr_read(KVM_CSR_MISC)
455 #define kvm_write_gcsr_misc(val) kvm_gcsr_write(val, KVM_CSR_MISC)
456 #define kvm_read_gcsr_ecfg() kvm_gcsr_read(KVM_CSR_ECFG)
457 #define kvm_write_gcsr_ecfg(val) kvm_gcsr_write(val, KVM_CSR_ECFG)
458 #define kvm_read_gcsr_estat() kvm_gcsr_read(KVM_CSR_ESTAT)
459 #define kvm_write_gcsr_estat(val) kvm_gcsr_write(val, KVM_CSR_ESTAT)
460 #define kvm_read_gcsr_era() kvm_gcsr_read(KVM_CSR_ERA)
461 #define kvm_write_gcsr_era(val) kvm_gcsr_write(val, KVM_CSR_ERA)
462 #define kvm_read_gcsr_badv() kvm_gcsr_read(KVM_CSR_BADV)
463 #define kvm_write_gcsr_badv(val) kvm_gcsr_write(val, KVM_CSR_BADV)
464 #define kvm_read_gcsr_badi() kvm_gcsr_read(KVM_CSR_BADI)
465 #define kvm_write_gcsr_badi(val) kvm_gcsr_write(val, KVM_CSR_BADI)
466 #define kvm_read_gcsr_eentry() kvm_gcsr_read(KVM_CSR_EENTRY)
467 #define kvm_write_gcsr_eentry(val) kvm_gcsr_write(val, KVM_CSR_EENTRY)
468
469 #define kvm_read_gcsr_tlbidx() kvm_gcsr_read(KVM_CSR_TLBIDX)
470 #define kvm_write_gcsr_tlbidx(val) kvm_gcsr_write(val, KVM_CSR_TLBIDX)
471 #define kvm_read_gcsr_tlbhi() kvm_gcsr_read(KVM_CSR_TLBEHI)
472 #define kvm_write_gcsr_tlbhi(val) kvm_gcsr_write(val, KVM_CSR_TLBEHI)
473 #define kvm_read_gcsr_tlblo0() kvm_gcsr_read(KVM_CSR_TLBELO0)
474 #define kvm_write_gcsr_tlblo0(val) kvm_gcsr_write(val, KVM_CSR_TLBELO0)
475 #define kvm_read_gcsr_tlblo1() kvm_gcsr_read(KVM_CSR_TLBELO1)
476 #define kvm_write_gcsr_tlblo1(val) kvm_gcsr_write(val, KVM_CSR_TLBELO1)
477
478 #define kvm_read_gcsr_asid() kvm_gcsr_read(KVM_CSR_ASID)
479 #define kvm_write_gcsr_asid(val) kvm_gcsr_write(val, KVM_CSR_ASID)
480 #define kvm_read_gcsr_pgdl() kvm_gcsr_read(KVM_CSR_PGDL)
481 #define kvm_write_gcsr_pgdl(val) kvm_gcsr_write(val, KVM_CSR_PGDL)
482 #define kvm_read_gcsr_pgdh() kvm_gcsr_read(KVM_CSR_PGDH)
483 #define kvm_write_gcsr_pgdh(val) kvm_gcsr_write(val, KVM_CSR_PGDH)
484 #define kvm_write_gcsr_pgd(val) kvm_gcsr_write(val, KVM_CSR_PGD)
485 #define kvm_read_gcsr_pgd() kvm_gcsr_read(KVM_CSR_PGD)
486 #define kvm_read_gcsr_pwctl0() kvm_gcsr_read(KVM_CSR_PWCTL0)
487 #define kvm_write_gcsr_pwctl0(val) kvm_gcsr_write(val, KVM_CSR_PWCTL0)
488 #define kvm_read_gcsr_pwctl1() kvm_gcsr_read(KVM_CSR_PWCTL1)
489 #define kvm_write_gcsr_pwctl1(val) kvm_gcsr_write(val, KVM_CSR_PWCTL1)
490 #define kvm_read_gcsr_stlbpgsize() kvm_gcsr_read(KVM_CSR_STLBPGSIZE)
491 #define kvm_write_gcsr_stlbpgsize(val) kvm_gcsr_write(val, KVM_CSR_STLBPGSIZE)
492 #define kvm_read_gcsr_rvacfg() kvm_gcsr_read(KVM_CSR_RVACFG)
493 #define kvm_write_gcsr_rvacfg(val) kvm_gcsr_write(val, KVM_CSR_RVACFG)
494
495 #define kvm_read_gcsr_cpuid() kvm_gcsr_read(KVM_CSR_CPUID)
496 #define kvm_write_gcsr_cpuid(val) kvm_gcsr_write(val, KVM_CSR_CPUID)
497 #define kvm_read_gcsr_prcfg1() kvm_gcsr_read(KVM_CSR_PRCFG1)
498 #define kvm_write_gcsr_prcfg1(val) kvm_gcsr_write(val, KVM_CSR_PRCFG1)
499 #define kvm_read_gcsr_prcfg2() kvm_gcsr_read(KVM_CSR_PRCFG2)
500 #define kvm_write_gcsr_prcfg2(val) kvm_gcsr_write(val, KVM_CSR_PRCFG2)
501 #define kvm_read_gcsr_prcfg3() kvm_gcsr_read(KVM_CSR_PRCFG3)
502 #define kvm_write_gcsr_prcfg3(val) kvm_gcsr_write(val, KVM_CSR_PRCFG3)
503
504 #define kvm_read_gcsr_kscratch0() kvm_gcsr_read(KVM_CSR_KS0)
505 #define kvm_write_gcsr_kscratch0(val) kvm_gcsr_write(val, KVM_CSR_KS0)
506 #define kvm_read_gcsr_kscratch1() kvm_gcsr_read(KVM_CSR_KS1)
507 #define kvm_write_gcsr_kscratch1(val) kvm_gcsr_write(val, KVM_CSR_KS1)
508 #define kvm_read_gcsr_kscratch2() kvm_gcsr_read(KVM_CSR_KS2)
509 #define kvm_write_gcsr_kscratch2(val) kvm_gcsr_write(val, KVM_CSR_KS2)
510 #define kvm_read_gcsr_kscratch3() kvm_gcsr_read(KVM_CSR_KS3)
511 #define kvm_write_gcsr_kscratch3(val) kvm_gcsr_write(val, KVM_CSR_KS3)
512 #define kvm_read_gcsr_kscratch4() kvm_gcsr_read(KVM_CSR_KS4)
513 #define kvm_write_gcsr_kscratch4(val) kvm_gcsr_write(val, KVM_CSR_KS4)
514 #define kvm_read_gcsr_kscratch5() kvm_gcsr_read(KVM_CSR_KS5)
515 #define kvm_write_gcsr_kscratch5(val) kvm_gcsr_write(val, KVM_CSR_KS5)
516 #define kvm_read_gcsr_kscratch6() kvm_gcsr_read(KVM_CSR_KS6)
517 #define kvm_write_gcsr_kscratch6(val) kvm_gcsr_write(val, KVM_CSR_KS6)
518 #define kvm_read_gcsr_kscratch7() kvm_gcsr_read(KVM_CSR_KS7)
519 #define kvm_write_gcsr_kscratch7(val) kvm_gcsr_write(val, KVM_CSR_KS7)
520
521 #define kvm_read_gcsr_timerid() kvm_gcsr_read(KVM_CSR_TMID)
522 #define kvm_write_gcsr_timerid(val) kvm_gcsr_write(val, KVM_CSR_TMID)
523 #define kvm_read_gcsr_timercfg() kvm_gcsr_read(KVM_CSR_TCFG)
524 #define kvm_write_gcsr_timercfg(val) kvm_gcsr_write(val, KVM_CSR_TCFG)
525 #define kvm_read_gcsr_timertick() kvm_gcsr_read(KVM_CSR_TVAL)
526 #define kvm_write_gcsr_timertick(val) kvm_gcsr_write(val, KVM_CSR_TVAL)
527 #define kvm_read_gcsr_timeroffset() kvm_gcsr_read(KVM_CSR_CNTC)
528 #define kvm_write_gcsr_timeroffset(val) kvm_gcsr_write(val, KVM_CSR_CNTC)
529
530 #define kvm_read_gcsr_llbctl() kvm_gcsr_read(KVM_CSR_LLBCTL)
531 #define kvm_write_gcsr_llbctl(val) kvm_gcsr_write(val, KVM_CSR_LLBCTL)
532
533 #define kvm_read_gcsr_tlbrentry() kvm_gcsr_read(KVM_CSR_TLBRENTRY)
534 #define kvm_write_gcsr_tlbrentry(val) kvm_gcsr_write(val, KVM_CSR_TLBRENTRY)
535 #define kvm_read_gcsr_tlbrbadv() kvm_gcsr_read(KVM_CSR_TLBRBADV)
536 #define kvm_write_gcsr_tlbrbadv(val) kvm_gcsr_write(val, KVM_CSR_TLBRBADV)
537 #define kvm_read_gcsr_tlbrera() kvm_gcsr_read(KVM_CSR_TLBRERA)
538 #define kvm_write_gcsr_tlbrera(val) kvm_gcsr_write(val, KVM_CSR_TLBRERA)
539 #define kvm_read_gcsr_tlbrsave() kvm_gcsr_read(KVM_CSR_TLBRSAVE)
540 #define kvm_write_gcsr_tlbrsave(val) kvm_gcsr_write(val, KVM_CSR_TLBRSAVE)
541 #define kvm_read_gcsr_tlbrelo0() kvm_gcsr_read(KVM_CSR_TLBRELO0)
542 #define kvm_write_gcsr_tlbrelo0(val) kvm_gcsr_write(val, KVM_CSR_TLBRELO0)
543 #define kvm_read_gcsr_tlbrelo1() kvm_gcsr_read(KVM_CSR_TLBRELO1)
544 #define kvm_write_gcsr_tlbrelo1(val) kvm_gcsr_write(val, KVM_CSR_TLBRELO1)
545 #define kvm_read_gcsr_tlbrehi() kvm_gcsr_read(KVM_CSR_TLBREHI)
546 #define kvm_write_gcsr_tlbrehi(val) kvm_gcsr_write(val, KVM_CSR_TLBREHI)
547 #define kvm_read_gcsr_tlbrprmd() kvm_gcsr_read(KVM_CSR_TLBRPRMD)
548 #define kvm_write_gcsr_tlbrprmd(val) kvm_gcsr_write(val, KVM_CSR_TLBRPRMD)
549
550 #define kvm_read_gcsr_directwin0() kvm_gcsr_read(KVM_CSR_DMWIN0)
551 #define kvm_write_gcsr_directwin0(val) kvm_gcsr_write(val, KVM_CSR_DMWIN0)
552 #define kvm_read_gcsr_directwin1() kvm_gcsr_read(KVM_CSR_DMWIN1)
553 #define kvm_write_gcsr_directwin1(val) kvm_gcsr_write(val, KVM_CSR_DMWIN1)
554 #define kvm_read_gcsr_directwin2() kvm_gcsr_read(KVM_CSR_DMWIN2)
555 #define kvm_write_gcsr_directwin2(val) kvm_gcsr_write(val, KVM_CSR_DMWIN2)
556 #define kvm_read_gcsr_directwin3() kvm_gcsr_read(KVM_CSR_DMWIN3)
557 #define kvm_write_gcsr_directwin3(val) kvm_gcsr_write(val, KVM_CSR_DMWIN3)
558
559 #ifndef __ASSEMBLY__
560
561 static inline unsigned long
kvm_set_csr_gtlbc(unsigned long set)562 kvm_set_csr_gtlbc(unsigned long set)
563 {
564 unsigned long res, new;
565
566 res = kvm_read_csr_gtlbc();
567 new = res | set;
568 kvm_write_csr_gtlbc(new);
569
570 return res;
571 }
572
573 static inline unsigned long
kvm_set_csr_euen(unsigned long set)574 kvm_set_csr_euen(unsigned long set)
575 {
576 unsigned long res, new;
577
578 res = kvm_read_csr_euen();
579 new = res | set;
580 kvm_write_csr_euen(new);
581
582 return res;
583 }
584
585 static inline unsigned long
kvm_set_csr_gintc(unsigned long set)586 kvm_set_csr_gintc(unsigned long set)
587 {
588 unsigned long res, new;
589
590 res = kvm_read_csr_gintc();
591 new = res | set;
592 kvm_write_csr_gintc(new);
593
594 return res;
595 }
596
597 static inline unsigned long
kvm_set_gcsr_llbctl(unsigned long set)598 kvm_set_gcsr_llbctl(unsigned long set)
599 {
600 unsigned long res, new;
601
602 res = kvm_read_gcsr_llbctl();
603 new = res | set;
604 kvm_write_gcsr_llbctl(new);
605
606 return res;
607 }
608
609
610 static inline unsigned long
kvm_clear_csr_gtlbc(unsigned long clear)611 kvm_clear_csr_gtlbc(unsigned long clear)
612 {
613 unsigned long res, new;
614
615 res = kvm_read_csr_gtlbc();
616 new = res & ~clear;
617 kvm_write_csr_gtlbc(new);
618
619 return res;
620 }
621
622 static inline unsigned long
kvm_clear_csr_euen(unsigned long clear)623 kvm_clear_csr_euen(unsigned long clear)
624 {
625 unsigned long res, new;
626
627 res = kvm_read_csr_euen();
628 new = res & ~clear;
629 kvm_write_csr_euen(new);
630
631 return res;
632 }
633
634 static inline unsigned long
kvm_clear_csr_gintc(unsigned long clear)635 kvm_clear_csr_gintc(unsigned long clear)
636 {
637 unsigned long res, new;
638
639 res = kvm_read_csr_gintc();
640 new = res & ~clear;
641 kvm_write_csr_gintc(new);
642
643 return res;
644 }
645
646 static inline unsigned long
kvm_change_csr_gstat(unsigned long change, unsigned long val)647 kvm_change_csr_gstat(unsigned long change, unsigned long val)
648 {
649 unsigned long res, new;
650
651 res = read_csr_gstat();
652 new = res & ~change;
653 new |= (val & change);
654 write_csr_gstat(new);
655
656 return res;
657 }
658
659 static inline unsigned long
kvm_change_csr_gcfg(unsigned long change, unsigned long val)660 kvm_change_csr_gcfg(unsigned long change, unsigned long val)
661 {
662 unsigned long res, new;
663
664 res = read_csr_gcfg();
665 new = res & ~change;
666 new |= (val & change);
667 write_csr_gcfg(new);
668
669 return res;
670 }
671
672
673 #define kvm_set_gcsr_estat(val) \
674 kvm_gcsr_xchg(val, val, KVM_CSR_ESTAT)
675 #define kvm_clear_gcsr_estat(val) \
676 kvm_gcsr_xchg(~(val), val, KVM_CSR_ESTAT)
677
678 #endif
679
680
681 #if (_LOONGARCH_SZLONG == 32)
682 #define KVM_LONG_ADD add.w
683 #define KVM_LONG_ADDI addi.w
684 #define KVM_LONG_SUB sub.w
685 #define KVM_LONG_L ld.w
686 #define KVM_LONG_S st.w
687 #define KVM_LONG_SLL slli.w
688 #define KVM_LONG_SLLV sll.w
689 #define KVM_LONG_SRL srli.w
690 #define KVM_LONG_SRLV srl.w
691 #define KVM_LONG_SRA srai.w
692 #define KVM_LONG_SRAV sra.w
693
694 #define KVM_LONGSIZE 4
695 #define KVM_LONGMASK 3
696 #define KVM_LONGLOG 2
697
698 /*
699 * How to add/sub/load/store/shift pointers.
700 */
701
702 #define KVM_PTR_ADD add.w
703 #define KVM_PTR_ADDI addi.w
704 #define KVM_PTR_SUB sub.w
705 #define KVM_PTR_L ld.w
706 #define KVM_PTR_S st.w
707 #define KVM_PTR_LI li.w
708 #define KVM_PTR_SLL slli.w
709 #define KVM_PTR_SLLV sll.w
710 #define KVM_PTR_SRL srli.w
711 #define KVM_PTR_SRLV srl.w
712 #define KVM_PTR_SRA srai.w
713 #define KVM_PTR_SRAV sra.w
714
715 #define KVM_PTR_SCALESHIFT 2
716
717 #define KVM_PTRSIZE 4
718 #define KVM_PTRLOG 2
719
720 #endif
721
722 #if (_LOONGARCH_SZLONG == 64)
723 #define KVM_LONG_ADD add.d
724 #define KVM_LONG_ADDI addi.d
725 #define KVM_LONG_SUB sub.d
726 #define KVM_LONG_L ld.d
727 #define KVM_LONG_S st.d
728 #define KVM_LONG_SLL slli.d
729 #define KVM_LONG_SLLV sll.d
730 #define KVM_LONG_SRL srli.d
731 #define KVM_LONG_SRLV srl.d
732 #define KVM_LONG_SRA sra.w
733 #define KVM_LONG_SRAV sra.d
734
735 #define KVM_LONGSIZE 8
736 #define KVM_LONGMASK 7
737 #define KVM_LONGLOG 3
738
739 /*
740 * How to add/sub/load/store/shift pointers.
741 */
742
743 #define KVM_PTR_ADD add.d
744 #define KVM_PTR_ADDI addi.d
745 #define KVM_PTR_SUB sub.d
746 #define KVM_PTR_L ld.d
747 #define KVM_PTR_S st.d
748 #define KVM_PTR_LI li.d
749 #define KVM_PTR_SLL slli.d
750 #define KVM_PTR_SLLV sll.d
751 #define KVM_PTR_SRL srli.d
752 #define KVM_PTR_SRLV srl.d
753 #define KVM_PTR_SRA srai.d
754 #define KVM_PTR_SRAV sra.d
755
756 #define KVM_PTR_SCALESHIFT 3
757
758 #define KVM_PTRSIZE 8
759 #define KVM_PTRLOG 3
760 #endif
761
762 #endif /* __LOONGARCH_KVM_COMPAT_H__ */
763