1/* SPDX-License-Identifier: GPL-2.0 */
2#ifndef _ARCH_MIPS_LOCAL_H
3#define _ARCH_MIPS_LOCAL_H
4
5#include <linux/percpu.h>
6#include <linux/bitops.h>
7#include <linux/atomic.h>
8#include <asm/cmpxchg.h>
9#include <asm/compiler.h>
10#include <asm/war.h>
11
12typedef struct
13{
14	atomic_long_t a;
15} local_t;
16
17#define LOCAL_INIT(i)	{ ATOMIC_LONG_INIT(i) }
18
19#define local_read(l)	atomic_long_read(&(l)->a)
20#define local_set(l, i) atomic_long_set(&(l)->a, (i))
21
22#define local_add(i, l) atomic_long_add((i), (&(l)->a))
23#define local_sub(i, l) atomic_long_sub((i), (&(l)->a))
24#define local_inc(l)	atomic_long_inc(&(l)->a)
25#define local_dec(l)	atomic_long_dec(&(l)->a)
26
27/*
28 * Same as above, but return the result value
29 */
30static __inline__ long local_add_return(long i, local_t * l)
31{
32	unsigned long result;
33
34	if (kernel_uses_llsc && IS_ENABLED(CONFIG_WAR_R10000_LLSC)) {
35		unsigned long temp;
36
37		__asm__ __volatile__(
38		"	.set	push					\n"
39		"	.set	arch=r4000				\n"
40			__SYNC(full, loongson3_war) "			\n"
41		"1:"	__LL	"%1, %2		# local_add_return	\n"
42		"	addu	%0, %1, %3				\n"
43			__SC	"%0, %2					\n"
44		"	beqzl	%0, 1b					\n"
45		"	addu	%0, %1, %3				\n"
46		"	.set	pop					\n"
47		: "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
48		: "Ir" (i), "m" (l->a.counter)
49		: "memory");
50	} else if (kernel_uses_llsc) {
51		unsigned long temp;
52
53		__asm__ __volatile__(
54		"	.set	push					\n"
55		"	.set	"MIPS_ISA_ARCH_LEVEL"			\n"
56			__SYNC(full, loongson3_war) "			\n"
57		"1:"	__LL	"%1, %2		# local_add_return	\n"
58		"	addu	%0, %1, %3				\n"
59			__SC	"%0, %2					\n"
60		"	beqz	%0, 1b					\n"
61		"	addu	%0, %1, %3				\n"
62		"	.set	pop					\n"
63		: "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
64		: "Ir" (i), "m" (l->a.counter)
65		: "memory");
66	} else {
67		unsigned long flags;
68
69		local_irq_save(flags);
70		result = l->a.counter;
71		result += i;
72		l->a.counter = result;
73		local_irq_restore(flags);
74	}
75
76	return result;
77}
78
79static __inline__ long local_sub_return(long i, local_t * l)
80{
81	unsigned long result;
82
83	if (kernel_uses_llsc && IS_ENABLED(CONFIG_WAR_R10000_LLSC)) {
84		unsigned long temp;
85
86		__asm__ __volatile__(
87		"	.set	push					\n"
88		"	.set	arch=r4000				\n"
89			__SYNC(full, loongson3_war) "			\n"
90		"1:"	__LL	"%1, %2		# local_sub_return	\n"
91		"	subu	%0, %1, %3				\n"
92			__SC	"%0, %2					\n"
93		"	beqzl	%0, 1b					\n"
94		"	subu	%0, %1, %3				\n"
95		"	.set	pop					\n"
96		: "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
97		: "Ir" (i), "m" (l->a.counter)
98		: "memory");
99	} else if (kernel_uses_llsc) {
100		unsigned long temp;
101
102		__asm__ __volatile__(
103		"	.set	push					\n"
104		"	.set	"MIPS_ISA_ARCH_LEVEL"			\n"
105			__SYNC(full, loongson3_war) "			\n"
106		"1:"	__LL	"%1, %2		# local_sub_return	\n"
107		"	subu	%0, %1, %3				\n"
108			__SC	"%0, %2					\n"
109		"	beqz	%0, 1b					\n"
110		"	subu	%0, %1, %3				\n"
111		"	.set	pop					\n"
112		: "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
113		: "Ir" (i), "m" (l->a.counter)
114		: "memory");
115	} else {
116		unsigned long flags;
117
118		local_irq_save(flags);
119		result = l->a.counter;
120		result -= i;
121		l->a.counter = result;
122		local_irq_restore(flags);
123	}
124
125	return result;
126}
127
128#define local_cmpxchg(l, o, n) \
129	((long)cmpxchg_local(&((l)->a.counter), (o), (n)))
130#define local_xchg(l, n) (atomic_long_xchg((&(l)->a), (n)))
131
132/**
133 * local_add_unless - add unless the number is a given value
134 * @l: pointer of type local_t
135 * @a: the amount to add to l...
136 * @u: ...unless l is equal to u.
137 *
138 * Atomically adds @a to @l, so long as it was not @u.
139 * Returns non-zero if @l was not @u, and zero otherwise.
140 */
141#define local_add_unless(l, a, u)				\
142({								\
143	long c, old;						\
144	c = local_read(l);					\
145	while (c != (u) && (old = local_cmpxchg((l), c, c + (a))) != c) \
146		c = old;					\
147	c != (u);						\
148})
149#define local_inc_not_zero(l) local_add_unless((l), 1, 0)
150
151#define local_dec_return(l) local_sub_return(1, (l))
152#define local_inc_return(l) local_add_return(1, (l))
153
154/*
155 * local_sub_and_test - subtract value from variable and test result
156 * @i: integer value to subtract
157 * @l: pointer of type local_t
158 *
159 * Atomically subtracts @i from @l and returns
160 * true if the result is zero, or false for all
161 * other cases.
162 */
163#define local_sub_and_test(i, l) (local_sub_return((i), (l)) == 0)
164
165/*
166 * local_inc_and_test - increment and test
167 * @l: pointer of type local_t
168 *
169 * Atomically increments @l by 1
170 * and returns true if the result is zero, or false for all
171 * other cases.
172 */
173#define local_inc_and_test(l) (local_inc_return(l) == 0)
174
175/*
176 * local_dec_and_test - decrement by 1 and test
177 * @l: pointer of type local_t
178 *
179 * Atomically decrements @l by 1 and
180 * returns true if the result is 0, or false for all other
181 * cases.
182 */
183#define local_dec_and_test(l) (local_sub_return(1, (l)) == 0)
184
185/*
186 * local_add_negative - add and test if negative
187 * @l: pointer of type local_t
188 * @i: integer value to add
189 *
190 * Atomically adds @i to @l and returns true
191 * if the result is negative, or false when
192 * result is greater than or equal to zero.
193 */
194#define local_add_negative(i, l) (local_add_return(i, (l)) < 0)
195
196/* Use these for per-cpu local_t variables: on some archs they are
197 * much more efficient than these naive implementations.  Note they take
198 * a variable, not an address.
199 */
200
201#define __local_inc(l)		((l)->a.counter++)
202#define __local_dec(l)		((l)->a.counter++)
203#define __local_add(i, l)	((l)->a.counter+=(i))
204#define __local_sub(i, l)	((l)->a.counter-=(i))
205
206#endif /* _ARCH_MIPS_LOCAL_H */
207