1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3 * Atomic operations that C can't guarantee us. Useful for
4 * resource counting etc..
5 *
6 * But use these as seldom as possible since they are much more slower
7 * than regular operations.
8 *
9 * This file is subject to the terms and conditions of the GNU General Public
10 * License. See the file "COPYING" in the main directory of this archive
11 * for more details.
12 *
13 * Copyright (C) 2020 Loongson Technology Corporation Limited
14 */
15 #ifndef _ASM_ATOMIC_H
16 #define _ASM_ATOMIC_H
17
18 #include <linux/types.h>
19 #include <asm/barrier.h>
20 #include <asm/cpu-features.h>
21 #include <asm/cmpxchg.h>
22
23 #if __SIZEOF_LONG__ == 4
24 #define __LL "ll.w "
25 #define __SC "sc.w "
26 #define __AMADD "amadd.w "
27 #define __AMAND_DB "amand_db.w "
28 #define __AMOR_DB "amor_db.w "
29 #define __AMXOR_DB "amxor_db.w "
30 #elif __SIZEOF_LONG__ == 8
31 #define __LL "ll.d "
32 #define __SC "sc.d "
33 #define __AMADD "amadd.d "
34 #define __AMAND_DB "amand_db.d "
35 #define __AMOR_DB "amor_db.d "
36 #define __AMXOR_DB "amxor_db.d "
37 #endif
38
39 #define ATOMIC_INIT(i) { (i) }
40
41 /*
42 * atomic_read - read atomic variable
43 * @v: pointer of type atomic_t
44 *
45 * Atomically reads the value of @v.
46 */
47 #define atomic_read(v) READ_ONCE((v)->counter)
48
49 /*
50 * atomic_set - set atomic variable
51 * @v: pointer of type atomic_t
52 * @i: required value
53 *
54 * Atomically sets the value of @v to @i.
55 */
56 #define atomic_set(v, i) WRITE_ONCE((v)->counter, (i))
57
58 #define ATOMIC_OP(op, I, asm_op) \
59 static __inline__ void atomic_##op(int i, atomic_t * v) \
60 { \
61 __asm__ __volatile__( \
62 "am"#asm_op".w" " $zero, %1, %0 \n" \
63 : "+ZB" (v->counter) \
64 : "r" (I) \
65 : "memory"); \
66 }
67
68 #define ATOMIC_OP_RETURN(op, I, asm_op, c_op, mb, suffix) \
69 static __inline__ int atomic_##op##_return##suffix(int i, atomic_t * v) \
70 { \
71 int result; \
72 \
73 __asm__ __volatile__( \
74 "am"#asm_op#mb".w" " %1, %2, %0 \n" \
75 : "+ZB" (v->counter), "=&r" (result) \
76 : "r" (I) \
77 : "memory"); \
78 \
79 return result c_op I; \
80 }
81
82 #define ATOMIC_FETCH_OP(op, I, asm_op, mb, suffix) \
83 static __inline__ int atomic_fetch_##op##suffix(int i, atomic_t * v) \
84 { \
85 int result; \
86 \
87 __asm__ __volatile__( \
88 "am"#asm_op#mb".w" " %1, %2, %0 \n" \
89 : "+ZB" (v->counter), "=&r" (result) \
90 : "r" (I) \
91 : "memory"); \
92 \
93 return result; \
94 }
95
96 #define ATOMIC_OPS(op, I, asm_op, c_op) \
97 ATOMIC_OP(op, I, asm_op) \
98 ATOMIC_OP_RETURN(op, I, asm_op, c_op, _db, ) \
99 ATOMIC_OP_RETURN(op, I, asm_op, c_op, , _relaxed) \
100 ATOMIC_FETCH_OP(op, I, asm_op, _db, ) \
101 ATOMIC_FETCH_OP(op, I, asm_op, , _relaxed)
102
103 ATOMIC_OPS(add, i, add, +)
104 ATOMIC_OPS(sub, -i, add, +)
105
106 #define atomic_add_return atomic_add_return
107 #define atomic_add_return_acquire atomic_add_return
108 #define atomic_add_return_release atomic_add_return
109 #define atomic_add_return_relaxed atomic_add_return_relaxed
110 #define atomic_sub_return atomic_sub_return
111 #define atomic_sub_return_acquire atomic_sub_return
112 #define atomic_sub_return_release atomic_sub_return
113 #define atomic_sub_return_relaxed atomic_sub_return_relaxed
114 #define atomic_fetch_add atomic_fetch_add
115 #define atomic_fetch_add_acquire atomic_fetch_add
116 #define atomic_fetch_add_release atomic_fetch_add
117 #define atomic_fetch_add_relaxed atomic_fetch_add_relaxed
118 #define atomic_fetch_sub atomic_fetch_sub
119 #define atomic_fetch_sub_acquire atomic_fetch_sub
120 #define atomic_fetch_sub_release atomic_fetch_sub
121 #define atomic_fetch_sub_relaxed atomic_fetch_sub_relaxed
122
123 #undef ATOMIC_OPS
124
125 #define ATOMIC_OPS(op, I, asm_op) \
126 ATOMIC_OP(op, I, asm_op) \
127 ATOMIC_FETCH_OP(op, I, asm_op, _db, ) \
128 ATOMIC_FETCH_OP(op, I, asm_op, , _relaxed)
129
130 ATOMIC_OPS(and, i, and)
131 ATOMIC_OPS(or, i, or)
132 ATOMIC_OPS(xor, i, xor)
133
134 #define atomic_fetch_and atomic_fetch_and
135 #define atomic_fetch_and_acquire atomic_fetch_and
136 #define atomic_fetch_and_release atomic_fetch_and
137 #define atomic_fetch_and_relaxed atomic_fetch_and_relaxed
138 #define atomic_fetch_or atomic_fetch_or
139 #define atomic_fetch_or_acquire atomic_fetch_or
140 #define atomic_fetch_or_release atomic_fetch_or
141 #define atomic_fetch_or_relaxed atomic_fetch_or_relaxed
142 #define atomic_fetch_xor atomic_fetch_xor
143 #define atomic_fetch_xor_acquire atomic_fetch_xor
144 #define atomic_fetch_xor_release atomic_fetch_xor
145 #define atomic_fetch_xor_relaxed atomic_fetch_xor_relaxed
146
147 #undef ATOMIC_OPS
148 #undef ATOMIC_FETCH_OP
149 #undef ATOMIC_OP_RETURN
150 #undef ATOMIC_OP
151
atomic_fetch_add_unless(atomic_t *v, int a, int u)152 static __inline__ int atomic_fetch_add_unless(atomic_t *v, int a, int u)
153 {
154 int prev, rc;
155
156 __asm__ __volatile__ (
157 "0: ll.w %[p], %[c]\n"
158 " beq %[p], %[u], 1f\n"
159 " add.w %[rc], %[p], %[a]\n"
160 " sc.w %[rc], %[c]\n"
161 " beqz %[rc], 0b\n"
162 " b 2f\n"
163 "1:\n"
164 __WEAK_LLSC_MB
165 "2:\n"
166 : [p]"=&r" (prev), [rc]"=&r" (rc),
167 [c]"=ZB" (v->counter)
168 : [a]"r" (a), [u]"r" (u)
169 : "memory");
170
171 return prev;
172 }
173 #define atomic_fetch_add_unless atomic_fetch_add_unless
174
175 /*
176 * atomic_sub_if_positive - conditionally subtract integer from atomic variable
177 * @i: integer value to subtract
178 * @v: pointer of type atomic_t
179 *
180 * Atomically test @v and subtract @i if @v is greater or equal than @i.
181 * The function returns the old value of @v minus @i.
182 */
atomic_sub_if_positive(int i, atomic_t * v)183 static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
184 {
185 int result;
186 int temp;
187
188 if (__builtin_constant_p(i)) {
189 __asm__ __volatile__(
190 "1: ll.w %1, %2 # atomic_sub_if_positive\n"
191 " addi.w %0, %1, %3 \n"
192 " or %1, %0, $zero \n"
193 " blt %0, $zero, 2f \n"
194 " sc.w %1, %2 \n"
195 " beq $zero, %1, 1b \n"
196 "2: \n"
197 __WEAK_LLSC_MB
198 : "=&r" (result), "=&r" (temp), "+ZC" (v->counter)
199 : "I" (-i));
200 } else {
201 __asm__ __volatile__(
202 "1: ll.w %1, %2 # atomic_sub_if_positive\n"
203 " sub.w %0, %1, %3 \n"
204 " or %1, %0, $zero \n"
205 " blt %0, $zero, 2f \n"
206 " sc.w %1, %2 \n"
207 " beq $zero, %1, 1b \n"
208 "2: \n"
209 __WEAK_LLSC_MB
210 : "=&r" (result), "=&r" (temp), "+ZC" (v->counter)
211 : "r" (i));
212 }
213
214 return result;
215 }
216
217 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
218 #define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
219
220 /*
221 * atomic_dec_if_positive - decrement by 1 if old value positive
222 * @v: pointer of type atomic_t
223 */
224 #define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
225
226 #ifdef CONFIG_64BIT
227
228 #define ATOMIC64_INIT(i) { (i) }
229
230 /*
231 * atomic64_read - read atomic variable
232 * @v: pointer of type atomic64_t
233 *
234 */
235 #define atomic64_read(v) READ_ONCE((v)->counter)
236
237 /*
238 * atomic64_set - set atomic variable
239 * @v: pointer of type atomic64_t
240 * @i: required value
241 */
242 #define atomic64_set(v, i) WRITE_ONCE((v)->counter, (i))
243
244 #define ATOMIC64_OP(op, I, asm_op) \
245 static __inline__ void atomic64_##op(long i, atomic64_t * v) \
246 { \
247 __asm__ __volatile__( \
248 "am"#asm_op".d " " $zero, %1, %0 \n" \
249 : "+ZB" (v->counter) \
250 : "r" (I) \
251 : "memory"); \
252 }
253
254 #define ATOMIC64_OP_RETURN(op, I, asm_op, c_op, mb, suffix) \
255 static __inline__ long atomic64_##op##_return##suffix(long i, atomic64_t * v) \
256 { \
257 long result; \
258 __asm__ __volatile__( \
259 "am"#asm_op#mb".d " " %1, %2, %0 \n" \
260 : "+ZB" (v->counter), "=&r" (result) \
261 : "r" (I) \
262 : "memory"); \
263 \
264 return result c_op I; \
265 }
266
267 #define ATOMIC64_FETCH_OP(op, I, asm_op, mb, suffix) \
268 static __inline__ long atomic64_fetch_##op##suffix(long i, atomic64_t * v) \
269 { \
270 long result; \
271 \
272 __asm__ __volatile__( \
273 "am"#asm_op#mb".d " " %1, %2, %0 \n" \
274 : "+ZB" (v->counter), "=&r" (result) \
275 : "r" (I) \
276 : "memory"); \
277 \
278 return result; \
279 }
280
281 #define ATOMIC64_OPS(op, I, asm_op, c_op) \
282 ATOMIC64_OP(op, I, asm_op) \
283 ATOMIC64_OP_RETURN(op, I, asm_op, c_op, _db, ) \
284 ATOMIC64_OP_RETURN(op, I, asm_op, c_op, , _relaxed) \
285 ATOMIC64_FETCH_OP(op, I, asm_op, _db, ) \
286 ATOMIC64_FETCH_OP(op, I, asm_op, , _relaxed)
287
288 ATOMIC64_OPS(add, i, add, +)
289 ATOMIC64_OPS(sub, -i, add, +)
290
291 #define atomic64_add_return atomic64_add_return
292 #define atomic64_add_return_acquire atomic64_add_return
293 #define atomic64_add_return_release atomic64_add_return
294 #define atomic64_add_return_relaxed atomic64_add_return_relaxed
295 #define atomic64_sub_return atomic64_sub_return
296 #define atomic64_sub_return_acquire atomic64_sub_return
297 #define atomic64_sub_return_release atomic64_sub_return
298 #define atomic64_sub_return_relaxed atomic64_sub_return_relaxed
299 #define atomic64_fetch_add atomic64_fetch_add
300 #define atomic64_fetch_add_acquire atomic64_fetch_add
301 #define atomic64_fetch_add_release atomic64_fetch_add
302 #define atomic64_fetch_add_relaxed atomic64_fetch_add_relaxed
303 #define atomic64_fetch_sub atomic64_fetch_sub
304 #define atomic64_fetch_sub_acquire atomic64_fetch_sub
305 #define atomic64_fetch_sub_release atomic64_fetch_sub
306 #define atomic64_fetch_sub_relaxed atomic64_fetch_sub_relaxed
307
308 #undef ATOMIC64_OPS
309
310 #define ATOMIC64_OPS(op, I, asm_op) \
311 ATOMIC64_OP(op, I, asm_op) \
312 ATOMIC64_FETCH_OP(op, I, asm_op, _db, ) \
313 ATOMIC64_FETCH_OP(op, I, asm_op, , _relaxed)
314
315 ATOMIC64_OPS(and, i, and)
316 ATOMIC64_OPS(or, i, or)
317 ATOMIC64_OPS(xor, i, xor)
318
319 #define atomic64_fetch_and atomic64_fetch_and
320 #define atomic64_fetch_and_acquire atomic64_fetch_and
321 #define atomic64_fetch_and_release atomic64_fetch_and
322 #define atomic64_fetch_and_relaxed atomic64_fetch_and_relaxed
323 #define atomic64_fetch_or atomic64_fetch_or
324 #define atomic64_fetch_or_acquire atomic64_fetch_or
325 #define atomic64_fetch_or_release atomic64_fetch_or
326 #define atomic64_fetch_or_relaxed atomic64_fetch_or_relaxed
327 #define atomic64_fetch_xor atomic64_fetch_xor
328 #define atomic64_fetch_xor_acquire atomic64_fetch_xor
329 #define atomic64_fetch_xor_release atomic64_fetch_xor
330 #define atomic64_fetch_xor_relaxed atomic64_fetch_xor_relaxed
331
332 #undef ATOMIC64_OPS
333 #undef ATOMIC64_FETCH_OP
334 #undef ATOMIC64_OP_RETURN
335 #undef ATOMIC64_OP
336
atomic64_fetch_add_unless(atomic64_t *v, long a, long u)337 static __inline__ long atomic64_fetch_add_unless(atomic64_t *v, long a, long u)
338 {
339 long prev, rc;
340
341 __asm__ __volatile__ (
342 "0: ll.d %[p], %[c]\n"
343 " beq %[p], %[u], 1f\n"
344 " add.d %[rc], %[p], %[a]\n"
345 " sc.d %[rc], %[c]\n"
346 " beqz %[rc], 0b\n"
347 " b 2f\n"
348 "1:\n"
349 __WEAK_LLSC_MB
350 "2:\n"
351 : [p]"=&r" (prev), [rc]"=&r" (rc),
352 [c] "=ZB" (v->counter)
353 : [a]"r" (a), [u]"r" (u)
354 : "memory");
355
356 return prev;
357 }
358 #define atomic64_fetch_add_unless atomic64_fetch_add_unless
359
360 /*
361 * atomic64_sub_if_positive - conditionally subtract integer from atomic
362 * variable
363 * @i: integer value to subtract
364 * @v: pointer of type atomic64_t
365 *
366 * Atomically test @v and subtract @i if @v is greater or equal than @i.
367 * The function returns the old value of @v minus @i.
368 */
atomic64_sub_if_positive(long i, atomic64_t * v)369 static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
370 {
371 long result;
372 long temp;
373
374 if (__builtin_constant_p(i)) {
375 __asm__ __volatile__(
376 "1: ll.d %1, %2 # atomic64_sub_if_positive \n"
377 " addi.d %0, %1, %3 \n"
378 " or %1, %0, $zero \n"
379 " blt %0, $zero, 2f \n"
380 " sc.d %1, %2 \n"
381 " beq %1, $zero, 1b \n"
382 "2: \n"
383 __WEAK_LLSC_MB
384 : "=&r" (result), "=&r" (temp), "+ZC" (v->counter)
385 : "I" (-i));
386 } else {
387 __asm__ __volatile__(
388 "1: ll.d %1, %2 # atomic64_sub_if_positive \n"
389 " sub.d %0, %1, %3 \n"
390 " or %1, %0, $zero \n"
391 " blt %0, $zero, 2f \n"
392 " sc.d %1, %2 \n"
393 " beq %1, $zero, 1b \n"
394 "2: \n"
395 __WEAK_LLSC_MB
396 : "=&r" (result), "=&r" (temp), "+ZC" (v->counter)
397 : "r" (i));
398 }
399
400 return result;
401 }
402
403 #define atomic64_cmpxchg(v, o, n) \
404 ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
405 #define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
406
407 /*
408 * atomic64_dec_if_positive - decrement by 1 if old value positive
409 * @v: pointer of type atomic64_t
410 */
411 #define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
412
413 #endif /* CONFIG_64BIT */
414
415 #endif /* _ASM_ATOMIC_H */
416