1/* 2 * Copyright (c) 2013-2019 Huawei Technologies Co., Ltd. All rights reserved. 3 * Copyright (c) 2020-2022 Huawei Device Co., Ltd. All rights reserved. 4 * 5 * Redistribution and use in source and binary forms, with or without modification, 6 * are permitted provided that the following conditions are met: 7 * 8 * 1. Redistributions of source code must retain the above copyright notice, this list of 9 * conditions and the following disclaimer. 10 * 11 * 2. Redistributions in binary form must reproduce the above copyright notice, this list 12 * of conditions and the following disclaimer in the documentation and/or other materials 13 * provided with the distribution. 14 * 15 * 3. Neither the name of the copyright holder nor the names of its contributors may be used 16 * to endorse or promote products derived from this software without specific prior written 17 * permission. 18 * 19 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 20 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, 21 * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 22 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR 23 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, 24 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 25 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; 26 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 27 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR 28 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF 29 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 */ 31 32#ifndef _LOS_ARCH_ATOMIC_H 33#define _LOS_ARCH_ATOMIC_H 34 35#include "los_compiler.h" 36#include "los_interrupt.h" 37 38#ifdef __cplusplus 39#if __cplusplus 40extern "C" { 41#endif /* __cplusplus */ 42#endif /* __cplusplus */ 43 44STATIC INLINE INT32 ArchAtomicRead(const Atomic *v) 45{ 46 return *v; 47} 48 49STATIC INLINE VOID ArchAtomicSet(Atomic *v, INT32 setVal) 50{ 51 UINT32 intSave; 52 53 intSave = LOS_IntLock(); 54 *v = setVal; 55 LOS_IntRestore(intSave); 56} 57 58STATIC INLINE INT32 ArchAtomicAdd(Atomic *v, INT32 addVal) 59{ 60 INT32 val; 61 UINT32 intSave; 62 63 intSave = LOS_IntLock(); 64 *v += addVal; 65 val = *v; 66 LOS_IntRestore(intSave); 67 68 return val; 69} 70 71STATIC INLINE INT32 ArchAtomicSub(Atomic *v, INT32 subVal) 72{ 73 INT32 val; 74 UINT32 intSave; 75 76 intSave = LOS_IntLock(); 77 *v -= subVal; 78 val = *v; 79 LOS_IntRestore(intSave); 80 81 return val; 82} 83 84STATIC INLINE VOID ArchAtomicInc(Atomic *v) 85{ 86 (VOID)ArchAtomicAdd(v, 1); 87} 88 89STATIC INLINE VOID ArchAtomicDec(Atomic *v) 90{ 91 (VOID)ArchAtomicSub(v, 1); 92} 93 94STATIC INLINE INT32 ArchAtomicIncRet(Atomic *v) 95{ 96 return ArchAtomicAdd(v, 1); 97} 98 99STATIC INLINE INT32 ArchAtomicDecRet(Atomic *v) 100{ 101 return ArchAtomicSub(v, 1); 102} 103 104STATIC INLINE INT32 ArchAtomicXchg32bits(Atomic *v, INT32 val) 105{ 106 INT32 prevVal; 107 UINT32 intSave; 108 109 intSave = LOS_IntLock(); 110 prevVal = *v; 111 *v = val; 112 LOS_IntRestore(intSave); 113 114 return prevVal; 115} 116 117STATIC INLINE BOOL ArchAtomicCmpXchg32bits(Atomic *v, INT32 val, INT32 oldVal) 118{ 119 INT32 prevVal; 120 UINT32 intSave; 121 122 intSave = LOS_IntLock(); 123 prevVal = *v; 124 if (prevVal == oldVal) { 125 *v = val; 126 } 127 LOS_IntRestore(intSave); 128 129 return prevVal != oldVal; 130} 131 132STATIC INLINE INT64 ArchAtomic64Read(const Atomic64 *v) 133{ 134 INT64 val; 135 UINT32 intSave; 136 137 intSave = LOS_IntLock(); 138 val = *v; 139 LOS_IntRestore(intSave); 140 141 return val; 142} 143 144STATIC INLINE VOID ArchAtomic64Set(Atomic64 *v, INT64 setVal) 145{ 146 UINT32 intSave; 147 148 intSave = LOS_IntLock(); 149 *v = setVal; 150 LOS_IntRestore(intSave); 151} 152 153STATIC INLINE INT64 ArchAtomic64Add(Atomic64 *v, INT64 addVal) 154{ 155 INT64 val; 156 UINT32 intSave; 157 158 intSave = LOS_IntLock(); 159 *v += addVal; 160 val = *v; 161 LOS_IntRestore(intSave); 162 163 return val; 164} 165 166STATIC INLINE INT64 ArchAtomic64Sub(Atomic64 *v, INT64 subVal) 167{ 168 INT64 val; 169 UINT32 intSave; 170 171 intSave = LOS_IntLock(); 172 *v -= subVal; 173 val = *v; 174 LOS_IntRestore(intSave); 175 176 return val; 177} 178 179STATIC INLINE VOID ArchAtomic64Inc(Atomic64 *v) 180{ 181 (VOID)ArchAtomic64Add(v, 1); 182} 183 184STATIC INLINE INT64 ArchAtomic64IncRet(Atomic64 *v) 185{ 186 return ArchAtomic64Add(v, 1); 187} 188 189STATIC INLINE VOID ArchAtomic64Dec(Atomic64 *v) 190{ 191 (VOID)ArchAtomic64Sub(v, 1); 192} 193 194STATIC INLINE INT64 ArchAtomic64DecRet(Atomic64 *v) 195{ 196 return ArchAtomic64Sub(v, 1); 197} 198 199STATIC INLINE INT64 ArchAtomicXchg64bits(Atomic64 *v, INT64 val) 200{ 201 INT64 prevVal; 202 UINT32 intSave; 203 204 intSave = LOS_IntLock(); 205 prevVal = *v; 206 *v = val; 207 LOS_IntRestore(intSave); 208 209 return prevVal; 210} 211 212STATIC INLINE BOOL ArchAtomicCmpXchg64bits(Atomic64 *v, INT64 val, INT64 oldVal) 213{ 214 INT64 prevVal; 215 UINT32 intSave; 216 217 intSave = LOS_IntLock(); 218 prevVal = *v; 219 if (prevVal == oldVal) { 220 *v = val; 221 } 222 LOS_IntRestore(intSave); 223 224 return prevVal != oldVal; 225} 226 227#ifdef __cplusplus 228#if __cplusplus 229} 230#endif /* __cplusplus */ 231#endif /* __cplusplus */ 232 233#endif /* _LOS_ARCH_ATOMIC_H */ 234 235