1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3 * Copyright (C) 2020 Loongson Technology Ltd.
4 */
5 #ifndef __ASM_PERCPU_H
6 #define __ASM_PERCPU_H
7
8 #include <asm/cmpxchg.h>
9
10 /*
11 * The "address" (in fact, offset from $r21) of a per-CPU variable is close to
12 * the loading address of main kernel image, but far from where the modules are
13 * loaded. Tell the compiler this fact when using explicit relocs.
14 */
15 #if defined(MODULE) && defined(CONFIG_AS_HAS_EXPLICIT_RELOCS)
16 # if __has_attribute(model)
17 # define PER_CPU_ATTRIBUTES __attribute__((model("extreme")))
18 # else
19 # error compiler support for the model attribute is necessary when a recent assembler is used
20 # endif
21 #endif
22
23 /* Use r21 for fast access */
24 register unsigned long __my_cpu_offset __asm__("$r21");
25
set_my_cpu_offset(unsigned long off)26 static inline void set_my_cpu_offset(unsigned long off)
27 {
28 __my_cpu_offset = off;
29 csr_write64(off, PERCPU_BASE_KS);
30 }
31 #define __my_cpu_offset __my_cpu_offset
32
33 #define PERCPU_OP(op, asm_op, c_op) \
34 static __always_inline unsigned long __percpu_##op(void *ptr, \
35 unsigned long val, int size) \
36 { \
37 unsigned long ret; \
38 \
39 switch (size) { \
40 case 4: \
41 __asm__ __volatile__( \
42 "am"#asm_op".w" " %[ret], %[val], %[ptr] \n" \
43 : [ret] "=&r" (ret), [ptr] "+ZB"(*(u32 *)ptr) \
44 : [val] "r" (val)); \
45 break; \
46 case 8: \
47 __asm__ __volatile__( \
48 "am"#asm_op".d" " %[ret], %[val], %[ptr] \n" \
49 : [ret] "=&r" (ret), [ptr] "+ZB"(*(u64 *)ptr) \
50 : [val] "r" (val)); \
51 break; \
52 default: \
53 ret = 0; \
54 BUILD_BUG(); \
55 } \
56 \
57 return ret c_op val; \
58 }
59
60 PERCPU_OP(add, add, +)
61 PERCPU_OP(and, and, &)
62 PERCPU_OP(or, or, |)
63 #undef PERCPU_OP
64
__percpu_read(void __percpu *ptr, int size)65 static __always_inline unsigned long __percpu_read(void __percpu *ptr, int size)
66 {
67 unsigned long ret;
68
69 switch (size) {
70 case 1:
71 __asm__ __volatile__ ("ldx.b %[ret], $r21, %[ptr] \n"
72 : [ret] "=&r"(ret)
73 : [ptr] "r"(ptr)
74 : "memory");
75 break;
76 case 2:
77 __asm__ __volatile__ ("ldx.h %[ret], $r21, %[ptr] \n"
78 : [ret] "=&r"(ret)
79 : [ptr] "r"(ptr)
80 : "memory");
81 break;
82 case 4:
83 __asm__ __volatile__ ("ldx.w %[ret], $r21, %[ptr] \n"
84 : [ret] "=&r"(ret)
85 : [ptr] "r"(ptr)
86 : "memory");
87 break;
88 case 8:
89 __asm__ __volatile__ ("ldx.d %[ret], $r21, %[ptr] \n"
90 : [ret] "=&r"(ret)
91 : [ptr] "r"(ptr)
92 : "memory");
93 break;
94 default:
95 ret = 0;
96 BUILD_BUG();
97 }
98
99 return ret;
100 }
101
__percpu_write(void __percpu *ptr, unsigned long val, int size)102 static __always_inline void __percpu_write(void __percpu *ptr, unsigned long val, int size)
103 {
104 switch (size) {
105 case 1:
106 __asm__ __volatile__("stx.b %[val], $r21, %[ptr] \n"
107 :
108 : [val] "r" (val), [ptr] "r" (ptr)
109 : "memory");
110 break;
111 case 2:
112 __asm__ __volatile__("stx.h %[val], $r21, %[ptr] \n"
113 :
114 : [val] "r" (val), [ptr] "r" (ptr)
115 : "memory");
116 break;
117 case 4:
118 __asm__ __volatile__("stx.w %[val], $r21, %[ptr] \n"
119 :
120 : [val] "r" (val), [ptr] "r" (ptr)
121 : "memory");
122 break;
123 case 8:
124 __asm__ __volatile__("stx.d %[val], $r21, %[ptr] \n"
125 :
126 : [val] "r" (val), [ptr] "r" (ptr)
127 : "memory");
128 break;
129 default:
130 BUILD_BUG();
131 }
132 }
133
__percpu_xchg(void *ptr, unsigned long val, int size)134 static __always_inline unsigned long __percpu_xchg(void *ptr, unsigned long val, int size)
135 {
136 switch (size) {
137 case 1:
138 case 2:
139 return __xchg_small((volatile void *)ptr, val, size);
140
141 case 4:
142 return __xchg_asm("amswap.w", (volatile u32 *)ptr, (u32)val);
143
144 case 8:
145 return __xchg_asm("amswap.d", (volatile u64 *)ptr, (u64)val);
146
147 default:
148 BUILD_BUG();
149 }
150
151 return 0;
152 }
153
154 /* this_cpu_cmpxchg */
155 #define _protect_cmpxchg_local(pcp, o, n) \
156 ({ \
157 typeof(*raw_cpu_ptr(&(pcp))) __ret; \
158 preempt_disable_notrace(); \
159 __ret = cmpxchg_local(raw_cpu_ptr(&(pcp)), o, n); \
160 preempt_enable_notrace(); \
161 __ret; \
162 })
163
164 #define _percpu_read(pcp) \
165 ({ \
166 typeof(pcp) __retval; \
167 __retval = (typeof(pcp))__percpu_read(&(pcp), sizeof(pcp)); \
168 __retval; \
169 })
170
171 #define _percpu_write(pcp, val) \
172 do { \
173 __percpu_write(&(pcp), (unsigned long)(val), sizeof(pcp)); \
174 } while (0) \
175
176 #define _pcp_protect(operation, pcp, val) \
177 ({ \
178 typeof(pcp) __retval; \
179 preempt_disable_notrace(); \
180 __retval = (typeof(pcp))operation(raw_cpu_ptr(&(pcp)), \
181 (val), sizeof(pcp)); \
182 preempt_enable_notrace(); \
183 __retval; \
184 })
185
186 #define _percpu_add(pcp, val) \
187 _pcp_protect(__percpu_add, pcp, val)
188
189 #define _percpu_add_return(pcp, val) _percpu_add(pcp, val)
190
191 #define _percpu_and(pcp, val) \
192 _pcp_protect(__percpu_and, pcp, val)
193
194 #define _percpu_or(pcp, val) \
195 _pcp_protect(__percpu_or, pcp, val)
196
197 #define _percpu_xchg(pcp, val) ((typeof(pcp)) \
198 _pcp_protect(__percpu_xchg, pcp, (unsigned long)(val)))
199
200 #define this_cpu_add_4(pcp, val) _percpu_add(pcp, val)
201 #define this_cpu_add_8(pcp, val) _percpu_add(pcp, val)
202
203 #define this_cpu_add_return_4(pcp, val) _percpu_add_return(pcp, val)
204 #define this_cpu_add_return_8(pcp, val) _percpu_add_return(pcp, val)
205
206 #define this_cpu_and_4(pcp, val) _percpu_and(pcp, val)
207 #define this_cpu_and_8(pcp, val) _percpu_and(pcp, val)
208
209 #define this_cpu_or_4(pcp, val) _percpu_or(pcp, val)
210 #define this_cpu_or_8(pcp, val) _percpu_or(pcp, val)
211
212 #define this_cpu_read_1(pcp) _percpu_read(pcp)
213 #define this_cpu_read_2(pcp) _percpu_read(pcp)
214 #define this_cpu_read_4(pcp) _percpu_read(pcp)
215 #define this_cpu_read_8(pcp) _percpu_read(pcp)
216
217 #define this_cpu_write_1(pcp, val) _percpu_write(pcp, val)
218 #define this_cpu_write_2(pcp, val) _percpu_write(pcp, val)
219 #define this_cpu_write_4(pcp, val) _percpu_write(pcp, val)
220 #define this_cpu_write_8(pcp, val) _percpu_write(pcp, val)
221
222 #define this_cpu_xchg_1(pcp, val) _percpu_xchg(pcp, val)
223 #define this_cpu_xchg_2(pcp, val) _percpu_xchg(pcp, val)
224 #define this_cpu_xchg_4(pcp, val) _percpu_xchg(pcp, val)
225 #define this_cpu_xchg_8(pcp, val) _percpu_xchg(pcp, val)
226
227 #define this_cpu_cmpxchg_1(ptr, o, n) _protect_cmpxchg_local(ptr, o, n)
228 #define this_cpu_cmpxchg_2(ptr, o, n) _protect_cmpxchg_local(ptr, o, n)
229 #define this_cpu_cmpxchg_4(ptr, o, n) _protect_cmpxchg_local(ptr, o, n)
230 #define this_cpu_cmpxchg_8(ptr, o, n) _protect_cmpxchg_local(ptr, o, n)
231
232 #include <asm-generic/percpu.h>
233
234 #endif /* __ASM_PERCPU_H */
235