1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3 * Copyright (C) 2020-2022 Loongson Technology Corporation Limited
4 */
5
6 #ifndef __LOONGARCH_KVM_CSR_H__
7 #define __LOONGARCH_KVM_CSR_H__
8 #include <asm/kvm_host.h>
9 #include <asm/watch.h>
10 #include <linux/uaccess.h>
11 #include <linux/kvm_host.h>
12 #include "kvmcpu.h"
13
14 #define kvm_read_hw_gcsr(id) gcsr_read(id)
15 #define kvm_write_hw_gcsr(csr, id, val) gcsr_write(val, id)
16
17 int _kvm_getcsr(struct kvm_vcpu *vcpu, unsigned int id, u64 *v, int force);
18 int _kvm_setcsr(struct kvm_vcpu *vcpu, unsigned int id, u64 *v, int force);
19 unsigned long _kvm_emu_read_csr(struct kvm_vcpu *vcpu, int csrid);
20 void _kvm_emu_write_csr(struct kvm_vcpu *vcpu, int csrid, unsigned long val);
21 void _kvm_emu_xchg_csr(struct kvm_vcpu *vcpu, int csrid,
22 unsigned long csr_mask, unsigned long val);
23 int _kvm_emu_iocsr(larch_inst inst, struct kvm_run *run, struct kvm_vcpu *vcpu);
24
kvm_save_hw_gcsr(struct loongarch_csrs *csr, int gid)25 static inline void kvm_save_hw_gcsr(struct loongarch_csrs *csr, int gid)
26 {
27 csr->csrs[gid] = gcsr_read(gid);
28 }
29
kvm_restore_hw_gcsr(struct loongarch_csrs *csr, int gid)30 static inline void kvm_restore_hw_gcsr(struct loongarch_csrs *csr, int gid)
31 {
32 gcsr_write(csr->csrs[gid], gid);
33 }
34
kvm_read_sw_gcsr(struct loongarch_csrs *csr, int gid)35 static inline unsigned long kvm_read_sw_gcsr(struct loongarch_csrs *csr, int gid)
36 {
37 return csr->csrs[gid];
38 }
39
kvm_write_sw_gcsr(struct loongarch_csrs *csr, int gid, unsigned long val)40 static inline void kvm_write_sw_gcsr(struct loongarch_csrs *csr, int gid, unsigned long val)
41 {
42 csr->csrs[gid] = val;
43 }
44
kvm_set_sw_gcsr(struct loongarch_csrs *csr, int gid, unsigned long val)45 static inline void kvm_set_sw_gcsr(struct loongarch_csrs *csr, int gid, unsigned long val)
46 {
47 csr->csrs[gid] |= val;
48 }
49
kvm_change_sw_gcsr(struct loongarch_csrs *csr, int gid, unsigned mask, unsigned long val)50 static inline void kvm_change_sw_gcsr(struct loongarch_csrs *csr, int gid, unsigned mask,
51 unsigned long val)
52 {
53 unsigned long _mask = mask;
54 csr->csrs[gid] &= ~_mask;
55 csr->csrs[gid] |= val & _mask;
56 }
57
58
59 #define GET_HW_GCSR(id, csrid, v) \
60 do { \
61 if (csrid == id) { \
62 *v = (long)kvm_read_hw_gcsr(csrid); \
63 return 0; \
64 } \
65 } while (0)
66
67 #define GET_SW_GCSR(csr, id, csrid, v) \
68 do { \
69 if (csrid == id) { \
70 *v = kvm_read_sw_gcsr(csr, id); \
71 return 0; \
72 } \
73 } while (0)
74
75 #define SET_HW_GCSR(csr, id, csrid, v) \
76 do { \
77 if (csrid == id) { \
78 kvm_write_hw_gcsr(csr, csrid, *v); \
79 return 0; \
80 } \
81 } while (0)
82
83 #define SET_SW_GCSR(csr, id, csrid, v) \
84 do { \
85 if (csrid == id) { \
86 kvm_write_sw_gcsr(csr, csrid, *v); \
87 return 0; \
88 } \
89 } while (0)
90
91 int _kvm_init_iocsr(struct kvm *kvm);
92 int _kvm_set_iocsr(struct kvm *kvm, struct kvm_iocsr_entry *__user argp);
93 int _kvm_get_iocsr(struct kvm *kvm, struct kvm_iocsr_entry *__user argp);
94
95 #define KVM_PMU_PLV_ENABLE (CSR_PERFCTRL_PLV0 | \
96 CSR_PERFCTRL_PLV1 | \
97 CSR_PERFCTRL_PLV2 | \
98 CSR_PERFCTRL_PLV3)
99
100 #define CASE_WRITE_HW_PMU(vcpu, csr, id, csrid, v) \
101 do { \
102 if (csrid == id) { \
103 if (v & KVM_PMU_PLV_ENABLE) { \
104 write_csr_gcfg(read_csr_gcfg() | CSR_GCFG_GPERF); \
105 kvm_write_hw_gcsr(csr, csrid, v | CSR_PERFCTRL_GMOD); \
106 vcpu->arch.aux_inuse |= KVM_LARCH_PERF; \
107 return ; \
108 } else { \
109 kvm_write_sw_gcsr(csr, csrid, v); \
110 return; \
111 } \
112 } \
113 } while (0)
114
115 #endif /* __LOONGARCH_KVM_CSR_H__ */
116