162306a36Sopenharmony_ci/* SPDX-License-Identifier: GPL-2.0-only */ 262306a36Sopenharmony_ci 362306a36Sopenharmony_ci/* 462306a36Sopenharmony_ci * ARCv2 supports 64-bit exclusive load (LLOCKD) / store (SCONDD) 562306a36Sopenharmony_ci * - The address HAS to be 64-bit aligned 662306a36Sopenharmony_ci */ 762306a36Sopenharmony_ci 862306a36Sopenharmony_ci#ifndef _ASM_ARC_ATOMIC64_ARCV2_H 962306a36Sopenharmony_ci#define _ASM_ARC_ATOMIC64_ARCV2_H 1062306a36Sopenharmony_ci 1162306a36Sopenharmony_citypedef struct { 1262306a36Sopenharmony_ci s64 __aligned(8) counter; 1362306a36Sopenharmony_ci} atomic64_t; 1462306a36Sopenharmony_ci 1562306a36Sopenharmony_ci#define ATOMIC64_INIT(a) { (a) } 1662306a36Sopenharmony_ci 1762306a36Sopenharmony_cistatic inline s64 arch_atomic64_read(const atomic64_t *v) 1862306a36Sopenharmony_ci{ 1962306a36Sopenharmony_ci s64 val; 2062306a36Sopenharmony_ci 2162306a36Sopenharmony_ci __asm__ __volatile__( 2262306a36Sopenharmony_ci " ldd %0, [%1] \n" 2362306a36Sopenharmony_ci : "=r"(val) 2462306a36Sopenharmony_ci : "r"(&v->counter)); 2562306a36Sopenharmony_ci 2662306a36Sopenharmony_ci return val; 2762306a36Sopenharmony_ci} 2862306a36Sopenharmony_ci 2962306a36Sopenharmony_cistatic inline void arch_atomic64_set(atomic64_t *v, s64 a) 3062306a36Sopenharmony_ci{ 3162306a36Sopenharmony_ci /* 3262306a36Sopenharmony_ci * This could have been a simple assignment in "C" but would need 3362306a36Sopenharmony_ci * explicit volatile. Otherwise gcc optimizers could elide the store 3462306a36Sopenharmony_ci * which borked atomic64 self-test 3562306a36Sopenharmony_ci * In the inline asm version, memory clobber needed for exact same 3662306a36Sopenharmony_ci * reason, to tell gcc about the store. 3762306a36Sopenharmony_ci * 3862306a36Sopenharmony_ci * This however is not needed for sibling atomic64_add() etc since both 3962306a36Sopenharmony_ci * load/store are explicitly done in inline asm. As long as API is used 4062306a36Sopenharmony_ci * for each access, gcc has no way to optimize away any load/store 4162306a36Sopenharmony_ci */ 4262306a36Sopenharmony_ci __asm__ __volatile__( 4362306a36Sopenharmony_ci " std %0, [%1] \n" 4462306a36Sopenharmony_ci : 4562306a36Sopenharmony_ci : "r"(a), "r"(&v->counter) 4662306a36Sopenharmony_ci : "memory"); 4762306a36Sopenharmony_ci} 4862306a36Sopenharmony_ci 4962306a36Sopenharmony_ci#define ATOMIC64_OP(op, op1, op2) \ 5062306a36Sopenharmony_cistatic inline void arch_atomic64_##op(s64 a, atomic64_t *v) \ 5162306a36Sopenharmony_ci{ \ 5262306a36Sopenharmony_ci s64 val; \ 5362306a36Sopenharmony_ci \ 5462306a36Sopenharmony_ci __asm__ __volatile__( \ 5562306a36Sopenharmony_ci "1: \n" \ 5662306a36Sopenharmony_ci " llockd %0, [%1] \n" \ 5762306a36Sopenharmony_ci " " #op1 " %L0, %L0, %L2 \n" \ 5862306a36Sopenharmony_ci " " #op2 " %H0, %H0, %H2 \n" \ 5962306a36Sopenharmony_ci " scondd %0, [%1] \n" \ 6062306a36Sopenharmony_ci " bnz 1b \n" \ 6162306a36Sopenharmony_ci : "=&r"(val) \ 6262306a36Sopenharmony_ci : "r"(&v->counter), "ir"(a) \ 6362306a36Sopenharmony_ci : "cc", "memory"); \ 6462306a36Sopenharmony_ci} \ 6562306a36Sopenharmony_ci 6662306a36Sopenharmony_ci#define ATOMIC64_OP_RETURN(op, op1, op2) \ 6762306a36Sopenharmony_cistatic inline s64 arch_atomic64_##op##_return_relaxed(s64 a, atomic64_t *v) \ 6862306a36Sopenharmony_ci{ \ 6962306a36Sopenharmony_ci s64 val; \ 7062306a36Sopenharmony_ci \ 7162306a36Sopenharmony_ci __asm__ __volatile__( \ 7262306a36Sopenharmony_ci "1: \n" \ 7362306a36Sopenharmony_ci " llockd %0, [%1] \n" \ 7462306a36Sopenharmony_ci " " #op1 " %L0, %L0, %L2 \n" \ 7562306a36Sopenharmony_ci " " #op2 " %H0, %H0, %H2 \n" \ 7662306a36Sopenharmony_ci " scondd %0, [%1] \n" \ 7762306a36Sopenharmony_ci " bnz 1b \n" \ 7862306a36Sopenharmony_ci : [val] "=&r"(val) \ 7962306a36Sopenharmony_ci : "r"(&v->counter), "ir"(a) \ 8062306a36Sopenharmony_ci : "cc", "memory"); \ 8162306a36Sopenharmony_ci \ 8262306a36Sopenharmony_ci return val; \ 8362306a36Sopenharmony_ci} 8462306a36Sopenharmony_ci 8562306a36Sopenharmony_ci#define arch_atomic64_add_return_relaxed arch_atomic64_add_return_relaxed 8662306a36Sopenharmony_ci#define arch_atomic64_sub_return_relaxed arch_atomic64_sub_return_relaxed 8762306a36Sopenharmony_ci 8862306a36Sopenharmony_ci#define ATOMIC64_FETCH_OP(op, op1, op2) \ 8962306a36Sopenharmony_cistatic inline s64 arch_atomic64_fetch_##op##_relaxed(s64 a, atomic64_t *v) \ 9062306a36Sopenharmony_ci{ \ 9162306a36Sopenharmony_ci s64 val, orig; \ 9262306a36Sopenharmony_ci \ 9362306a36Sopenharmony_ci __asm__ __volatile__( \ 9462306a36Sopenharmony_ci "1: \n" \ 9562306a36Sopenharmony_ci " llockd %0, [%2] \n" \ 9662306a36Sopenharmony_ci " " #op1 " %L1, %L0, %L3 \n" \ 9762306a36Sopenharmony_ci " " #op2 " %H1, %H0, %H3 \n" \ 9862306a36Sopenharmony_ci " scondd %1, [%2] \n" \ 9962306a36Sopenharmony_ci " bnz 1b \n" \ 10062306a36Sopenharmony_ci : "=&r"(orig), "=&r"(val) \ 10162306a36Sopenharmony_ci : "r"(&v->counter), "ir"(a) \ 10262306a36Sopenharmony_ci : "cc", "memory"); \ 10362306a36Sopenharmony_ci \ 10462306a36Sopenharmony_ci return orig; \ 10562306a36Sopenharmony_ci} 10662306a36Sopenharmony_ci 10762306a36Sopenharmony_ci#define arch_atomic64_fetch_add_relaxed arch_atomic64_fetch_add_relaxed 10862306a36Sopenharmony_ci#define arch_atomic64_fetch_sub_relaxed arch_atomic64_fetch_sub_relaxed 10962306a36Sopenharmony_ci 11062306a36Sopenharmony_ci#define arch_atomic64_fetch_and_relaxed arch_atomic64_fetch_and_relaxed 11162306a36Sopenharmony_ci#define arch_atomic64_fetch_andnot_relaxed arch_atomic64_fetch_andnot_relaxed 11262306a36Sopenharmony_ci#define arch_atomic64_fetch_or_relaxed arch_atomic64_fetch_or_relaxed 11362306a36Sopenharmony_ci#define arch_atomic64_fetch_xor_relaxed arch_atomic64_fetch_xor_relaxed 11462306a36Sopenharmony_ci 11562306a36Sopenharmony_ci#define ATOMIC64_OPS(op, op1, op2) \ 11662306a36Sopenharmony_ci ATOMIC64_OP(op, op1, op2) \ 11762306a36Sopenharmony_ci ATOMIC64_OP_RETURN(op, op1, op2) \ 11862306a36Sopenharmony_ci ATOMIC64_FETCH_OP(op, op1, op2) 11962306a36Sopenharmony_ci 12062306a36Sopenharmony_ciATOMIC64_OPS(add, add.f, adc) 12162306a36Sopenharmony_ciATOMIC64_OPS(sub, sub.f, sbc) 12262306a36Sopenharmony_ci 12362306a36Sopenharmony_ci#undef ATOMIC64_OPS 12462306a36Sopenharmony_ci#define ATOMIC64_OPS(op, op1, op2) \ 12562306a36Sopenharmony_ci ATOMIC64_OP(op, op1, op2) \ 12662306a36Sopenharmony_ci ATOMIC64_FETCH_OP(op, op1, op2) 12762306a36Sopenharmony_ci 12862306a36Sopenharmony_ciATOMIC64_OPS(and, and, and) 12962306a36Sopenharmony_ciATOMIC64_OPS(andnot, bic, bic) 13062306a36Sopenharmony_ciATOMIC64_OPS(or, or, or) 13162306a36Sopenharmony_ciATOMIC64_OPS(xor, xor, xor) 13262306a36Sopenharmony_ci 13362306a36Sopenharmony_ci#define arch_atomic64_andnot arch_atomic64_andnot 13462306a36Sopenharmony_ci 13562306a36Sopenharmony_ci#undef ATOMIC64_OPS 13662306a36Sopenharmony_ci#undef ATOMIC64_FETCH_OP 13762306a36Sopenharmony_ci#undef ATOMIC64_OP_RETURN 13862306a36Sopenharmony_ci#undef ATOMIC64_OP 13962306a36Sopenharmony_ci 14062306a36Sopenharmony_cistatic inline s64 14162306a36Sopenharmony_ciarch_atomic64_cmpxchg(atomic64_t *ptr, s64 expected, s64 new) 14262306a36Sopenharmony_ci{ 14362306a36Sopenharmony_ci s64 prev; 14462306a36Sopenharmony_ci 14562306a36Sopenharmony_ci smp_mb(); 14662306a36Sopenharmony_ci 14762306a36Sopenharmony_ci __asm__ __volatile__( 14862306a36Sopenharmony_ci "1: llockd %0, [%1] \n" 14962306a36Sopenharmony_ci " brne %L0, %L2, 2f \n" 15062306a36Sopenharmony_ci " brne %H0, %H2, 2f \n" 15162306a36Sopenharmony_ci " scondd %3, [%1] \n" 15262306a36Sopenharmony_ci " bnz 1b \n" 15362306a36Sopenharmony_ci "2: \n" 15462306a36Sopenharmony_ci : "=&r"(prev) 15562306a36Sopenharmony_ci : "r"(ptr), "ir"(expected), "r"(new) 15662306a36Sopenharmony_ci : "cc"); /* memory clobber comes from smp_mb() */ 15762306a36Sopenharmony_ci 15862306a36Sopenharmony_ci smp_mb(); 15962306a36Sopenharmony_ci 16062306a36Sopenharmony_ci return prev; 16162306a36Sopenharmony_ci} 16262306a36Sopenharmony_ci#define arch_atomic64_cmpxchg arch_atomic64_cmpxchg 16362306a36Sopenharmony_ci 16462306a36Sopenharmony_cistatic inline s64 arch_atomic64_xchg(atomic64_t *ptr, s64 new) 16562306a36Sopenharmony_ci{ 16662306a36Sopenharmony_ci s64 prev; 16762306a36Sopenharmony_ci 16862306a36Sopenharmony_ci smp_mb(); 16962306a36Sopenharmony_ci 17062306a36Sopenharmony_ci __asm__ __volatile__( 17162306a36Sopenharmony_ci "1: llockd %0, [%1] \n" 17262306a36Sopenharmony_ci " scondd %2, [%1] \n" 17362306a36Sopenharmony_ci " bnz 1b \n" 17462306a36Sopenharmony_ci "2: \n" 17562306a36Sopenharmony_ci : "=&r"(prev) 17662306a36Sopenharmony_ci : "r"(ptr), "r"(new) 17762306a36Sopenharmony_ci : "cc"); /* memory clobber comes from smp_mb() */ 17862306a36Sopenharmony_ci 17962306a36Sopenharmony_ci smp_mb(); 18062306a36Sopenharmony_ci 18162306a36Sopenharmony_ci return prev; 18262306a36Sopenharmony_ci} 18362306a36Sopenharmony_ci#define arch_atomic64_xchg arch_atomic64_xchg 18462306a36Sopenharmony_ci 18562306a36Sopenharmony_cistatic inline s64 arch_atomic64_dec_if_positive(atomic64_t *v) 18662306a36Sopenharmony_ci{ 18762306a36Sopenharmony_ci s64 val; 18862306a36Sopenharmony_ci 18962306a36Sopenharmony_ci smp_mb(); 19062306a36Sopenharmony_ci 19162306a36Sopenharmony_ci __asm__ __volatile__( 19262306a36Sopenharmony_ci "1: llockd %0, [%1] \n" 19362306a36Sopenharmony_ci " sub.f %L0, %L0, 1 # w0 - 1, set C on borrow\n" 19462306a36Sopenharmony_ci " sub.c %H0, %H0, 1 # if C set, w1 - 1\n" 19562306a36Sopenharmony_ci " brlt %H0, 0, 2f \n" 19662306a36Sopenharmony_ci " scondd %0, [%1] \n" 19762306a36Sopenharmony_ci " bnz 1b \n" 19862306a36Sopenharmony_ci "2: \n" 19962306a36Sopenharmony_ci : "=&r"(val) 20062306a36Sopenharmony_ci : "r"(&v->counter) 20162306a36Sopenharmony_ci : "cc"); /* memory clobber comes from smp_mb() */ 20262306a36Sopenharmony_ci 20362306a36Sopenharmony_ci smp_mb(); 20462306a36Sopenharmony_ci 20562306a36Sopenharmony_ci return val; 20662306a36Sopenharmony_ci} 20762306a36Sopenharmony_ci#define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive 20862306a36Sopenharmony_ci 20962306a36Sopenharmony_cistatic inline s64 arch_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u) 21062306a36Sopenharmony_ci{ 21162306a36Sopenharmony_ci s64 old, temp; 21262306a36Sopenharmony_ci 21362306a36Sopenharmony_ci smp_mb(); 21462306a36Sopenharmony_ci 21562306a36Sopenharmony_ci __asm__ __volatile__( 21662306a36Sopenharmony_ci "1: llockd %0, [%2] \n" 21762306a36Sopenharmony_ci " brne %L0, %L4, 2f # continue to add since v != u \n" 21862306a36Sopenharmony_ci " breq.d %H0, %H4, 3f # return since v == u \n" 21962306a36Sopenharmony_ci "2: \n" 22062306a36Sopenharmony_ci " add.f %L1, %L0, %L3 \n" 22162306a36Sopenharmony_ci " adc %H1, %H0, %H3 \n" 22262306a36Sopenharmony_ci " scondd %1, [%2] \n" 22362306a36Sopenharmony_ci " bnz 1b \n" 22462306a36Sopenharmony_ci "3: \n" 22562306a36Sopenharmony_ci : "=&r"(old), "=&r" (temp) 22662306a36Sopenharmony_ci : "r"(&v->counter), "r"(a), "r"(u) 22762306a36Sopenharmony_ci : "cc"); /* memory clobber comes from smp_mb() */ 22862306a36Sopenharmony_ci 22962306a36Sopenharmony_ci smp_mb(); 23062306a36Sopenharmony_ci 23162306a36Sopenharmony_ci return old; 23262306a36Sopenharmony_ci} 23362306a36Sopenharmony_ci#define arch_atomic64_fetch_add_unless arch_atomic64_fetch_add_unless 23462306a36Sopenharmony_ci 23562306a36Sopenharmony_ci#endif 236