Searched refs:__sl_cas (Results 1 - 2 of 2) sorted by relevance
/kernel/linux/linux-5.10/arch/sh/include/asm/ |
H A D | spinlock-cas.h | 13 static inline unsigned __sl_cas(volatile unsigned *p, unsigned old, unsigned new) in __sl_cas() function 30 while (!__sl_cas(&lock->lock, 1, 0)); in arch_spin_lock() 35 __sl_cas(&lock->lock, 0, 1); in arch_spin_unlock() 40 return __sl_cas(&lock->lock, 1, 0); in arch_spin_trylock() 56 while (!old || __sl_cas(&rw->lock, old, old-1) != old); in arch_read_lock() 63 while (__sl_cas(&rw->lock, old, old+1) != old); in arch_read_unlock() 68 while (__sl_cas(&rw->lock, RW_LOCK_BIAS, 0) != RW_LOCK_BIAS); in arch_write_lock() 73 __sl_cas(&rw->lock, 0, RW_LOCK_BIAS); in arch_write_unlock() 80 while (old && __sl_cas(&rw->lock, old, old-1) != old); in arch_read_trylock() 86 return __sl_cas( in arch_write_trylock() [all...] |
/kernel/linux/linux-6.6/arch/sh/include/asm/ |
H A D | spinlock-cas.h | 13 static inline unsigned __sl_cas(volatile unsigned *p, unsigned old, unsigned new) in __sl_cas() function 30 while (!__sl_cas(&lock->lock, 1, 0)); in arch_spin_lock() 35 __sl_cas(&lock->lock, 0, 1); in arch_spin_unlock() 40 return __sl_cas(&lock->lock, 1, 0); in arch_spin_trylock() 56 while (!old || __sl_cas(&rw->lock, old, old-1) != old); in arch_read_lock() 63 while (__sl_cas(&rw->lock, old, old+1) != old); in arch_read_unlock() 68 while (__sl_cas(&rw->lock, RW_LOCK_BIAS, 0) != RW_LOCK_BIAS); in arch_write_lock() 73 __sl_cas(&rw->lock, 0, RW_LOCK_BIAS); in arch_write_unlock() 80 while (old && __sl_cas(&rw->lock, old, old-1) != old); in arch_read_trylock() 86 return __sl_cas( in arch_write_trylock() [all...] |
Completed in 1 milliseconds