Home
last modified time | relevance | path

Searched refs:arch_spin_value_unlocked (Results 1 - 17 of 17) sorted by relevance

/kernel/linux/linux-5.10/arch/arm/include/asm/
H A Dspinlock.h114 static inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked() function
121 return !arch_spin_value_unlocked(READ_ONCE(*lock)); in arch_spin_is_locked()
/kernel/linux/linux-5.10/arch/csky/include/asm/
H A Dspinlock.h71 static inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked() function
78 return !arch_spin_value_unlocked(READ_ONCE(*lock)); in arch_spin_is_locked()
/kernel/linux/linux-5.10/arch/powerpc/include/asm/
H A Dsimple_spinlock.h34 static __always_inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked() function
42 return !arch_spin_value_unlocked(*lock); in arch_spin_is_locked()
/kernel/linux/linux-6.6/arch/arm/include/asm/
H A Dspinlock.h114 static inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked() function
121 return !arch_spin_value_unlocked(READ_ONCE(*lock)); in arch_spin_is_locked()
/kernel/linux/linux-6.6/arch/powerpc/include/asm/
H A Dsimple_spinlock.h35 static __always_inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked() function
42 return !arch_spin_value_unlocked(READ_ONCE(*lock)); in arch_spin_is_locked()
H A Dqspinlock.h163 #define arch_spin_value_unlocked(l) queued_spin_value_unlocked(l) macro
/kernel/linux/linux-5.10/arch/alpha/include/asm/
H A Dspinlock.h19 static inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked() function
/kernel/linux/linux-6.6/arch/alpha/include/asm/
H A Dspinlock.h19 static inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked() function
/kernel/linux/linux-5.10/include/asm-generic/
H A Dqspinlock.h116 #define arch_spin_value_unlocked(l) queued_spin_value_unlocked(l) macro
/kernel/linux/linux-6.6/include/asm-generic/
H A Dspinlock.h85 static __always_inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked() function
H A Dqspinlock.h145 #define arch_spin_value_unlocked(l) queued_spin_value_unlocked(l) macro
/kernel/linux/linux-6.6/arch/s390/include/asm/
H A Dspinlock.h48 static inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked() function
/kernel/linux/linux-5.10/arch/s390/include/asm/
H A Dspinlock.h48 static inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked() function
/kernel/linux/linux-5.10/lib/
H A Dlockref.c16 while (likely(arch_spin_value_unlocked(old.lock.rlock.raw_lock))) { \
/kernel/linux/linux-6.6/lib/
H A Dlockref.c16 while (likely(arch_spin_value_unlocked(old.lock.rlock.raw_lock))) { \
/kernel/linux/linux-5.10/arch/ia64/include/asm/
H A Dspinlock.h96 static __always_inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked() function
/kernel/linux/linux-6.6/arch/ia64/include/asm/
H A Dspinlock.h96 static __always_inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked() function

Completed in 11 milliseconds