Lines Matching refs:name
36 #define ATOMIC_FETCH_OP(name, mb, op, asm_op, cl...) \
38 __lse_atomic_fetch_##op##name(int i, atomic_t *v) \
67 #define ATOMIC_FETCH_OP_SUB(name) \
69 __lse_atomic_fetch_sub##name(int i, atomic_t *v) \
71 return __lse_atomic_fetch_add##name(-i, v); \
81 #define ATOMIC_OP_ADD_SUB_RETURN(name) \
83 __lse_atomic_add_return##name(int i, atomic_t *v) \
85 return __lse_atomic_fetch_add##name(i, v) + i; \
89 __lse_atomic_sub_return##name(int i, atomic_t *v) \
106 #define ATOMIC_FETCH_OP_AND(name, mb, cl...) \
108 __lse_atomic_fetch_and##name(int i, atomic_t *v) \
110 return __lse_atomic_fetch_andnot##name(~i, v); \
143 #define ATOMIC64_FETCH_OP(name, mb, op, asm_op, cl...) \
145 __lse_atomic64_fetch_##op##name(s64 i, atomic64_t *v) \
174 #define ATOMIC64_FETCH_OP_SUB(name) \
176 __lse_atomic64_fetch_sub##name(s64 i, atomic64_t *v) \
178 return __lse_atomic64_fetch_add##name(-i, v); \
188 #define ATOMIC64_OP_ADD_SUB_RETURN(name) \
190 __lse_atomic64_add_return##name(s64 i, atomic64_t *v) \
192 return __lse_atomic64_fetch_add##name(i, v) + i; \
196 __lse_atomic64_sub_return##name(s64 i, atomic64_t *v) \
198 return __lse_atomic64_fetch_sub##name(i, v) - i; \
213 #define ATOMIC64_FETCH_OP_AND(name, mb, cl...) \
215 __lse_atomic64_fetch_and##name(s64 i, atomic64_t *v) \
217 return __lse_atomic64_fetch_andnot##name(~i, v); \
248 #define __CMPXCHG_CASE(w, sfx, name, sz, mb, cl...) \
250 __lse__cmpxchg_case_##name##sz(volatile void *ptr, \
284 #define __CMPXCHG128(name, mb, cl...) \
286 __lse__cmpxchg128##name(volatile u128 *ptr, u128 old, u128 new) \