1/* SPDX-License-Identifier: GPL-2.0 */
2/*
3 * This file is subject to the terms and conditions of the GNU General Public
4 * License.  See the file "COPYING" in the main directory of this archive
5 * for more details.
6 *
7 * Copyright (C) 2020 Loongson Technology Corporation Limited
8 */
9#ifndef _ARCH_LOONGARCH_LOCAL_H
10#define _ARCH_LOONGARCH_LOCAL_H
11
12#include <linux/percpu.h>
13#include <linux/bitops.h>
14#include <linux/atomic.h>
15#include <asm/cmpxchg.h>
16
17typedef struct
18{
19	atomic_long_t a;
20} local_t;
21
22#define LOCAL_INIT(i)	{ ATOMIC_LONG_INIT(i) }
23
24#define local_read(l)	atomic_long_read(&(l)->a)
25#define local_set(l, i) atomic_long_set(&(l)->a, (i))
26
27#define local_add(i, l) atomic_long_add((i), (&(l)->a))
28#define local_sub(i, l) atomic_long_sub((i), (&(l)->a))
29#define local_inc(l)	atomic_long_inc(&(l)->a)
30#define local_dec(l)	atomic_long_dec(&(l)->a)
31
32/*
33 * Same as above, but return the result value
34 */
35static __inline__ long local_add_return(long i, local_t * l)
36{
37	unsigned long result;
38
39	__asm__ __volatile__(
40	"   " __AMADD " %1, %2, %0      \n"
41	: "+ZB" (l->a.counter), "=&r" (result)
42	: "r" (i)
43	: "memory");
44	result = result + i;
45
46	return result;
47}
48
49static __inline__ long local_sub_return(long i, local_t * l)
50{
51	unsigned long result;
52
53	__asm__ __volatile__(
54	"   " __AMADD "%1, %2, %0       \n"
55	: "+ZB" (l->a.counter), "=&r" (result)
56	: "r" (-i)
57	: "memory");
58
59	result = result - i;
60
61	return result;
62}
63
64#define local_cmpxchg(l, o, n) \
65	((long)cmpxchg_local(&((l)->a.counter), (o), (n)))
66#define local_xchg(l, n) (atomic_long_xchg((&(l)->a), (n)))
67
68/**
69 * local_add_unless - add unless the number is a given value
70 * @l: pointer of type local_t
71 * @a: the amount to add to l...
72 * @u: ...unless l is equal to u.
73 *
74 * Atomically adds @a to @l, so long as it was not @u.
75 * Returns non-zero if @l was not @u, and zero otherwise.
76 */
77#define local_add_unless(l, a, u)				\
78({								\
79	long c, old;						\
80	c = local_read(l);					\
81	while (c != (u) && (old = local_cmpxchg((l), c, c + (a))) != c) \
82		c = old;					\
83	c != (u);						\
84})
85#define local_inc_not_zero(l) local_add_unless((l), 1, 0)
86
87#define local_dec_return(l) local_sub_return(1, (l))
88#define local_inc_return(l) local_add_return(1, (l))
89
90/*
91 * local_sub_and_test - subtract value from variable and test result
92 * @i: integer value to subtract
93 * @l: pointer of type local_t
94 *
95 * Atomically subtracts @i from @l and returns
96 * true if the result is zero, or false for all
97 * other cases.
98 */
99#define local_sub_and_test(i, l) (local_sub_return((i), (l)) == 0)
100
101/*
102 * local_inc_and_test - increment and test
103 * @l: pointer of type local_t
104 *
105 * Atomically increments @l by 1
106 * and returns true if the result is zero, or false for all
107 * other cases.
108 */
109#define local_inc_and_test(l) (local_inc_return(l) == 0)
110
111/*
112 * local_dec_and_test - decrement by 1 and test
113 * @l: pointer of type local_t
114 *
115 * Atomically decrements @l by 1 and
116 * returns true if the result is 0, or false for all other
117 * cases.
118 */
119#define local_dec_and_test(l) (local_sub_return(1, (l)) == 0)
120
121/*
122 * local_add_negative - add and test if negative
123 * @l: pointer of type local_t
124 * @i: integer value to add
125 *
126 * Atomically adds @i to @l and returns true
127 * if the result is negative, or false when
128 * result is greater than or equal to zero.
129 */
130#define local_add_negative(i, l) (local_add_return(i, (l)) < 0)
131
132/* Use these for per-cpu local_t variables: on some archs they are
133 * much more efficient than these naive implementations.  Note they take
134 * a variable, not an address.
135 */
136
137#define __local_inc(l)		((l)->a.counter++)
138#define __local_dec(l)		((l)->a.counter++)
139#define __local_add(i, l)	((l)->a.counter+=(i))
140#define __local_sub(i, l)	((l)->a.counter-=(i))
141
142#endif /* _ARCH_LOONGARCH_LOCAL_H */
143