1/*
2 * Copyright (c) 2013-2019 Huawei Technologies Co., Ltd. All rights reserved.
3 * Copyright (c) 2020-2022 Huawei Device Co., Ltd. All rights reserved.
4 *
5 * Redistribution and use in source and binary forms, with or without modification,
6 * are permitted provided that the following conditions are met:
7 *
8 * 1. Redistributions of source code must retain the above copyright notice, this list of
9 *    conditions and the following disclaimer.
10 *
11 * 2. Redistributions in binary form must reproduce the above copyright notice, this list
12 *    of conditions and the following disclaimer in the documentation and/or other materials
13 *    provided with the distribution.
14 *
15 * 3. Neither the name of the copyright holder nor the names of its contributors may be used
16 *    to endorse or promote products derived from this software without specific prior written
17 *    permission.
18 *
19 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
20 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
21 * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
22 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
23 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
24 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
25 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
26 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
27 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
28 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
29 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 */
31
32#ifndef _LOS_ARCH_ATOMIC_H
33#define _LOS_ARCH_ATOMIC_H
34
35#include "los_compiler.h"
36#include "los_interrupt.h"
37
38#ifdef __cplusplus
39#if __cplusplus
40extern "C" {
41#endif /* __cplusplus */
42#endif /* __cplusplus */
43
44STATIC INLINE INT32 ArchAtomicRead(const Atomic *v)
45{
46    INT32 val;
47    UINT32 intSave;
48
49    intSave = LOS_IntLock();
50
51    __asm__ __volatile__("lr.w %0, (%1)\n"
52                         "fence rw, rw\n"
53                         : "=&r"(val)
54                         : "r"(v)
55                         : "memory");
56    LOS_IntRestore(intSave);
57
58    return val;
59}
60
61STATIC INLINE VOID ArchAtomicSet(Atomic *v, INT32 setVal)
62{
63    UINT32 prevVal;
64    UINT32 intSave;
65
66    intSave = LOS_IntLock();
67
68    __asm__ __volatile__("amoswap.w %0, %2, (%1)\n"
69                         : "=r"(prevVal)
70                         : "r"(v), "r"(setVal)
71                         : "memory");
72    LOS_IntRestore(intSave);
73}
74
75STATIC INLINE INT32 ArchAtomicAdd(Atomic *v, INT32 addVal)
76{
77    INT32 val;
78    UINT32 intSave;
79
80    intSave = LOS_IntLock();
81
82    __asm__ __volatile__("amoadd.w %0, %2, (%1)\n"
83                         "lw %0, (%1)\n"
84                         "fence rw, rw\n"
85                         : "=&r"(val)
86                         : "r"(v), "r"(addVal)
87                         : "memory");
88    LOS_IntRestore(intSave);
89
90    return val;
91}
92
93STATIC INLINE INT32 ArchAtomicSub(Atomic *v, INT32 subVal)
94{
95    INT32 val;
96    UINT32 intSave;
97
98    intSave = LOS_IntLock();
99
100    __asm__ __volatile__("amoadd.w %0, %2, (%1)\n"
101                         "lw %0, (%1)\n"
102                         "fence rw, rw\n"
103                         : "=&r"(val)
104                         : "r"(v), "r"(-subVal)
105                         : "memory");
106    LOS_IntRestore(intSave);
107
108    return val;
109}
110
111STATIC INLINE VOID ArchAtomicInc(Atomic *v)
112{
113    (VOID)ArchAtomicAdd(v, 1);
114}
115
116STATIC INLINE VOID ArchAtomicDec(Atomic *v)
117{
118    (VOID)ArchAtomicSub(v, 1);
119}
120
121STATIC INLINE INT32 ArchAtomicIncRet(Atomic *v)
122{
123    return ArchAtomicAdd(v, 1);
124}
125
126STATIC INLINE INT32 ArchAtomicDecRet(Atomic *v)
127{
128    return ArchAtomicSub(v, 1);
129}
130
131/**
132 * @ingroup  los_arch_atomic
133 * @brief Atomic exchange for 32-bit variable.
134 *
135 * @par Description:
136 * This API is used to implement the atomic exchange for 32-bit variable
137 * and return the previous value of the atomic variable.
138 * @attention
139 * <ul>The pointer v must not be NULL.</ul>
140 *
141 * @param  v       [IN] The variable pointer.
142 * @param  val     [IN] The exchange value.
143 *
144 * @retval #INT32       The previous value of the atomic variable
145 * @par Dependency:
146 * <ul><li>los_arch_atomic.h: the header file that contains the API declaration.</li></ul>
147 * @see
148 */
149STATIC INLINE INT32 ArchAtomicXchg32bits(volatile INT32 *v, INT32 val)
150{
151    INT32 prevVal = 0;
152    UINT32 intSave;
153
154    intSave = LOS_IntLock();
155
156    __asm__ __volatile__("lw %0, 0(%1)\n"
157                         "amoswap.w %0, %2, (%1)\n"
158                         : "=&r"(prevVal)
159                         : "r"(v), "r"(val)
160                         : "memory");
161    LOS_IntRestore(intSave);
162
163    return prevVal;
164}
165
166/**
167 * @ingroup  los_arch_atomic
168 * @brief Atomic exchange for 32-bit variable with compare.
169 *
170 * @par Description:
171 * This API is used to implement the atomic exchange for 32-bit variable, if the value of variable is equal to oldVal.
172 * @attention
173 * <ul>The pointer v must not be NULL.</ul>
174 *
175 * @param  v       [IN] The variable pointer.
176 * @param  val     [IN] The new value.
177 * @param  oldVal  [IN] The old value.
178 *
179 * @retval TRUE  The previous value of the atomic variable is not equal to oldVal.
180 * @retval FALSE The previous value of the atomic variable is equal to oldVal.
181 * @par Dependency:
182 * <ul><li>los_arch_atomic.h: the header file that contains the API declaration.</li></ul>
183 * @see
184 */
185STATIC INLINE BOOL ArchAtomicCmpXchg32bits(volatile INT32 *v, INT32 val, INT32 oldVal)
186{
187    INT32 prevVal = 0;
188    UINT32 intSave;
189
190    intSave = LOS_IntLock();
191    __asm__ __volatile__("lw %0, 0(%1)\n"
192                         "bne %0, %2, 1f\n"
193                         "amoswap.w %0, %3, (%1)\n"
194                         "1:"
195                         : "=&r"(prevVal)
196                         : "r"(v), "r"(oldVal), "r"(val)
197                         : "memory");
198    LOS_IntRestore(intSave);
199
200    return prevVal != oldVal;
201}
202
203STATIC INLINE INT64 ArchAtomic64Read(const Atomic64 *v)
204{
205    INT64 val;
206    UINT32 intSave;
207
208    intSave = LOS_IntLock();
209    val = *v;
210    LOS_IntRestore(intSave);
211
212    return val;
213}
214
215STATIC INLINE VOID ArchAtomic64Set(Atomic64 *v, INT64 setVal)
216{
217    UINT32 intSave;
218
219    intSave = LOS_IntLock();
220    *v = setVal;
221    LOS_IntRestore(intSave);
222}
223
224STATIC INLINE INT64 ArchAtomic64Add(Atomic64 *v, INT64 addVal)
225{
226    INT64 val;
227    UINT32 intSave;
228
229    intSave = LOS_IntLock();
230    *v += addVal;
231    val = *v;
232    LOS_IntRestore(intSave);
233
234    return val;
235}
236
237STATIC INLINE INT64 ArchAtomic64Sub(Atomic64 *v, INT64 subVal)
238{
239    INT64 val;
240    UINT32 intSave;
241
242    intSave = LOS_IntLock();
243    *v -= subVal;
244    val = *v;
245    LOS_IntRestore(intSave);
246
247    return val;
248}
249
250STATIC INLINE VOID ArchAtomic64Inc(Atomic64 *v)
251{
252    (VOID)ArchAtomic64Add(v, 1);
253}
254
255STATIC INLINE INT64 ArchAtomic64IncRet(Atomic64 *v)
256{
257    return ArchAtomic64Add(v, 1);
258}
259
260STATIC INLINE VOID ArchAtomic64Dec(Atomic64 *v)
261{
262    (VOID)ArchAtomic64Sub(v, 1);
263}
264
265STATIC INLINE INT64 ArchAtomic64DecRet(Atomic64 *v)
266{
267    return ArchAtomic64Sub(v, 1);
268}
269
270STATIC INLINE INT64 ArchAtomicXchg64bits(Atomic64 *v, INT64 val)
271{
272    INT64 prevVal;
273    UINT32 intSave;
274
275    intSave = LOS_IntLock();
276    prevVal = *v;
277    *v = val;
278    LOS_IntRestore(intSave);
279
280    return prevVal;
281}
282
283STATIC INLINE BOOL ArchAtomicCmpXchg64bits(Atomic64 *v, INT64 val, INT64 oldVal)
284{
285    INT64 prevVal;
286    UINT32 intSave;
287
288    intSave = LOS_IntLock();
289    prevVal = *v;
290    if (prevVal == oldVal) {
291        *v = val;
292    }
293    LOS_IntRestore(intSave);
294
295    return prevVal != oldVal;
296}
297
298#ifdef __cplusplus
299#if __cplusplus
300}
301#endif /* __cplusplus */
302#endif /* __cplusplus */
303
304#endif /* _LOS_ARCH_ATOMIC_H */
305