Home
last modified time | relevance | path

Searched refs:smp_mb (Results 1 - 25 of 613) sorted by relevance

12345678910>>...25

/kernel/linux/linux-5.10/arch/arc/include/asm/
H A Dspinlock.h40 smp_mb(); in arch_spin_lock()
62 smp_mb(); in arch_spin_trylock()
69 smp_mb(); in arch_spin_unlock()
105 smp_mb(); in arch_read_lock()
129 smp_mb(); in arch_read_trylock()
163 smp_mb(); in arch_write_lock()
188 smp_mb(); in arch_write_trylock()
197 smp_mb(); in arch_read_unlock()
215 smp_mb(); in arch_write_unlock()
227 * Per lkmm, smp_mb() i in arch_spin_lock()
[all...]
H A Datomic.h48 smp_mb(); \
60 smp_mb(); \
74 smp_mb(); \
87 smp_mb(); \
145 * spin lock/unlock provides the needed smp_mb() before/after \
163 * spin lock/unlock provides the needed smp_mb() before/after \
277 smp_mb(); \
288 : "cc"); /* memory clobber comes from smp_mb() */ \
290 smp_mb(); \
300 smp_mb(); \
[all...]
H A Dfutex.h20 smp_mb(); \
42 smp_mb() \
48 smp_mb(); \
69 smp_mb() \
133 smp_mb(); in futex_atomic_cmpxchg_inatomic()
160 smp_mb(); in futex_atomic_cmpxchg_inatomic()
H A Dcmpxchg.h25 smp_mb(); in __cmpxchg()
39 smp_mb(); in __cmpxchg()
54 * spin lock/unlock provide the needed smp_mb() before/after in __cmpxchg()
91 smp_mb(); in __xchg()
99 smp_mb(); in __xchg()
/kernel/linux/linux-6.6/arch/arc/include/asm/
H A Dspinlock.h40 smp_mb(); in arch_spin_lock()
62 smp_mb(); in arch_spin_trylock()
69 smp_mb(); in arch_spin_unlock()
105 smp_mb(); in arch_read_lock()
129 smp_mb(); in arch_read_trylock()
163 smp_mb(); in arch_write_lock()
188 smp_mb(); in arch_write_trylock()
197 smp_mb(); in arch_read_unlock()
215 smp_mb(); in arch_write_unlock()
227 * Per lkmm, smp_mb() i in arch_spin_lock()
[all...]
H A Dfutex.h20 smp_mb(); \
42 smp_mb() \
48 smp_mb(); \
69 smp_mb() \
133 smp_mb(); in futex_atomic_cmpxchg_inatomic()
160 smp_mb(); in futex_atomic_cmpxchg_inatomic()
H A Datomic64-arcv2.h145 smp_mb(); in arch_atomic64_cmpxchg()
156 : "cc"); /* memory clobber comes from smp_mb() */ in arch_atomic64_cmpxchg()
158 smp_mb(); in arch_atomic64_cmpxchg()
168 smp_mb(); in arch_atomic64_xchg()
177 : "cc"); /* memory clobber comes from smp_mb() */ in arch_atomic64_xchg()
179 smp_mb(); in arch_atomic64_xchg()
189 smp_mb(); in arch_atomic64_dec_if_positive()
201 : "cc"); /* memory clobber comes from smp_mb() */ in arch_atomic64_dec_if_positive()
203 smp_mb(); in arch_atomic64_dec_if_positive()
213 smp_mb(); in arch_atomic64_fetch_add_unless()
[all...]
/kernel/linux/linux-5.10/arch/csky/include/asm/
H A Dspinlock.h34 smp_mb(); in arch_spin_lock()
60 smp_mb(); in arch_spin_trylock()
67 smp_mb(); in arch_spin_unlock()
92 #define smp_mb__after_spinlock() smp_mb()
113 smp_mb(); in arch_spin_lock()
118 smp_mb(); in arch_spin_unlock()
140 smp_mb(); in arch_spin_trylock()
164 smp_mb(); in arch_read_lock()
172 smp_mb(); in arch_read_unlock()
201 smp_mb(); in arch_read_trylock()
[all...]
H A Dcmpxchg.h19 smp_mb(); \
28 smp_mb(); \
47 smp_mb(); \
59 smp_mb(); \
H A Datomic.h17 smp_mb(); in __atomic_add_unless()
33 smp_mb(); in __atomic_add_unless()
58 smp_mb(); \
68 smp_mb(); \
78 smp_mb(); \
88 smp_mb(); \
/kernel/linux/linux-5.10/arch/arm/include/asm/
H A Dspinlock.h78 smp_mb(); in arch_spin_lock()
100 smp_mb(); in arch_spin_trylock()
109 smp_mb(); in arch_spin_unlock()
155 smp_mb(); in arch_write_lock()
175 smp_mb(); in arch_write_trylock()
184 smp_mb(); in arch_write_unlock()
224 smp_mb(); in arch_read_lock()
231 smp_mb(); in arch_read_unlock()
266 smp_mb(); in arch_read_trylock()
/kernel/linux/linux-6.6/arch/arm/include/asm/
H A Dspinlock.h78 smp_mb(); in arch_spin_lock()
100 smp_mb(); in arch_spin_trylock()
109 smp_mb(); in arch_spin_unlock()
155 smp_mb(); in arch_write_lock()
175 smp_mb(); in arch_write_trylock()
184 smp_mb(); in arch_write_unlock()
224 smp_mb(); in arch_read_lock()
231 smp_mb(); in arch_read_unlock()
266 smp_mb(); in arch_read_trylock()
/kernel/linux/linux-5.10/arch/alpha/include/asm/
H A Datomic.h19 * _release atomics, an smp_mb() is unconditionally inserted into the
72 smp_mb(); \
90 smp_mb(); \
125 smp_mb(); \
143 smp_mb(); \
219 smp_mb(); in atomic_fetch_add_unless()
234 smp_mb(); in atomic_fetch_add_unless()
251 smp_mb(); in atomic64_fetch_add_unless()
266 smp_mb(); in atomic64_fetch_add_unless()
281 smp_mb(); in atomic64_dec_if_positive()
[all...]
H A Dcmpxchg.h49 smp_mb(); \
52 smp_mb(); \
61 smp_mb(); \
64 smp_mb(); \
/kernel/linux/linux-6.6/arch/alpha/include/asm/
H A Datomic.h19 * _release atomics, an smp_mb() is unconditionally inserted into the
72 smp_mb(); \
90 smp_mb(); \
126 smp_mb(); \
145 smp_mb(); \
206 smp_mb(); in arch_atomic_fetch_add_unless()
221 smp_mb(); in arch_atomic_fetch_add_unless()
229 smp_mb(); in arch_atomic64_fetch_add_unless()
244 smp_mb(); in arch_atomic64_fetch_add_unless()
252 smp_mb(); in arch_atomic64_dec_if_positive()
[all...]
H A Dcmpxchg.h49 smp_mb(); \
52 smp_mb(); \
61 smp_mb(); \
64 smp_mb(); \
/kernel/linux/linux-5.10/kernel/sched/
H A Dmembarrier.c41 smp_mb(); /* IPIs should be serializing but paranoid. */ in ipi_mb()
47 * The smp_mb() in membarrier after all the IPIs is supposed to in ipi_sync_core()
54 * after membarrier()'s smp_mb(). in ipi_sync_core()
56 smp_mb(); /* IPIs should be serializing but paranoid. */ in ipi_sync_core()
67 * time we've already sent an IPI, the cost of the extra smp_mb() in ipi_rseq()
70 smp_mb(); in ipi_rseq()
88 smp_mb(); in ipi_sync_rq_state()
98 smp_mb(); in membarrier_exec_mmap()
119 smp_mb(); /* system call entry is not a mb. */ in membarrier_global_expedited()
170 smp_mb(); /* exi in membarrier_global_expedited()
[all...]
/kernel/linux/linux-5.10/tools/testing/selftests/rcutorture/formal/srcu-cbmc/src/
H A Dbarriers.h8 #define smp_mb() __sync_synchronize() macro
15 #define smp_mb() __CPROVER_fence("WWfence", "RRfence", "RWfence", "WRfence", \ macro
27 #define sync_smp_mb() smp_mb()
33 #define rs_smp_mb() smp_mb()
/kernel/linux/linux-5.10/tools/include/asm/
H A Dbarrier.h46 #ifndef smp_mb
47 # define smp_mb() mb() macro
53 smp_mb(); \
62 smp_mb(); \
/kernel/linux/linux-6.6/tools/include/asm/
H A Dbarrier.h44 #ifndef smp_mb
45 # define smp_mb() mb() macro
51 smp_mb(); \
60 smp_mb(); \
/kernel/linux/linux-6.6/kernel/sched/
H A Dmembarrier.c28 * a: smp_mb()
30 * c: smp_mb()
67 * a: smp_mb()
69 * c: smp_mb()
87 * a: smp_mb()
91 * c: smp_mb()
107 * a: smp_mb()
109 * d: smp_mb()
112 * c: smp_mb()
121 * a: smp_mb()
[all...]
/kernel/linux/linux-5.10/arch/sh/kernel/
H A Dftrace.c137 smp_mb(); in arch_ftrace_nmi_enter()
143 smp_mb(); in arch_ftrace_nmi_exit()
174 smp_mb(); in do_ftrace_mod_code()
179 smp_mb(); in do_ftrace_mod_code()
184 smp_mb(); in do_ftrace_mod_code()
/kernel/linux/linux-6.6/arch/sh/kernel/
H A Dftrace.c137 smp_mb(); in arch_ftrace_nmi_enter()
143 smp_mb(); in arch_ftrace_nmi_exit()
174 smp_mb(); in do_ftrace_mod_code()
179 smp_mb(); in do_ftrace_mod_code()
184 smp_mb(); in do_ftrace_mod_code()
/kernel/linux/linux-5.10/include/asm-generic/
H A Dbarrier.h64 #ifndef smp_mb
65 #define smp_mb() __smp_mb() macro
78 #ifndef smp_mb
79 #define smp_mb() barrier() macro
/kernel/linux/linux-6.6/tools/virtio/ringtest/
H A Dmain.h117 #define smp_mb() asm volatile("lock; addl $0,-132(%%rsp)" ::: "memory", "cc") macro
119 #define smp_mb() asm volatile("dmb ish" ::: "memory") macro
125 #define smp_mb() __sync_synchronize() macro
188 smp_mb(); /* Enforce dependency ordering from x */ \

Completed in 9 milliseconds

12345678910>>...25