Home
last modified time | relevance | path

Searched refs:old (Results 1 - 25 of 2198) sorted by relevance

12345678910>>...88

/kernel/linux/linux-5.10/arch/sh/include/asm/
H A Dbitops-cas.h5 static inline unsigned __bo_cas(volatile unsigned *p, unsigned old, unsigned new) in __bo_cas() argument
9 : "r"(old), "z"(p) in __bo_cas()
16 unsigned mask, old; in set_bit() local
22 do old = *a; in set_bit()
23 while (__bo_cas(a, old, old|mask) != old); in set_bit()
28 unsigned mask, old; in clear_bit() local
34 do old = *a; in clear_bit()
35 while (__bo_cas(a, old, ol in clear_bit()
40 unsigned mask, old; change_bit() local
52 unsigned mask, old; test_and_set_bit() local
66 unsigned mask, old; test_and_clear_bit() local
80 unsigned mask, old; test_and_change_bit() local
[all...]
H A Dspinlock-cas.h13 static inline unsigned __sl_cas(volatile unsigned *p, unsigned old, unsigned new) in __sl_cas() argument
17 : "r"(old), "z"(p) in __sl_cas()
54 unsigned old; in arch_read_lock() local
55 do old = rw->lock; in arch_read_lock()
56 while (!old || __sl_cas(&rw->lock, old, old-1) != old); in arch_read_lock()
61 unsigned old; in arch_read_unlock() local
62 do old in arch_read_unlock()
78 unsigned old; arch_read_trylock() local
[all...]
/kernel/linux/linux-6.6/arch/sh/include/asm/
H A Dbitops-cas.h5 static inline unsigned __bo_cas(volatile unsigned *p, unsigned old, unsigned new) in __bo_cas() argument
9 : "r"(old), "z"(p) in __bo_cas()
16 unsigned mask, old; in set_bit() local
22 do old = *a; in set_bit()
23 while (__bo_cas(a, old, old|mask) != old); in set_bit()
28 unsigned mask, old; in clear_bit() local
34 do old = *a; in clear_bit()
35 while (__bo_cas(a, old, ol in clear_bit()
40 unsigned mask, old; change_bit() local
52 unsigned mask, old; test_and_set_bit() local
66 unsigned mask, old; test_and_clear_bit() local
80 unsigned mask, old; test_and_change_bit() local
[all...]
H A Dspinlock-cas.h13 static inline unsigned __sl_cas(volatile unsigned *p, unsigned old, unsigned new) in __sl_cas() argument
17 : "r"(old), "z"(p) in __sl_cas()
54 unsigned old; in arch_read_lock() local
55 do old = rw->lock; in arch_read_lock()
56 while (!old || __sl_cas(&rw->lock, old, old-1) != old); in arch_read_lock()
61 unsigned old; in arch_read_unlock() local
62 do old in arch_read_unlock()
78 unsigned old; arch_read_trylock() local
[all...]
/kernel/linux/linux-5.10/arch/ia64/include/asm/
H A Dbitops.h42 __u32 bit, old, new; in set_bit() local
50 old = *m; in set_bit()
51 new = old | bit; in set_bit()
52 } while (cmpxchg_acq(m, old, new) != old); in set_bit()
83 __u32 mask, old, new; in clear_bit() local
91 old = *m; in clear_bit()
92 new = old & mask; in clear_bit()
93 } while (cmpxchg_acq(m, old, new) != old); in clear_bit()
107 __u32 mask, old, new; clear_bit_unlock() local
164 __u32 bit, old, new; change_bit() local
203 __u32 bit, old, new; test_and_set_bit() local
257 __u32 mask, old, new; test_and_clear_bit() local
302 __u32 bit, old, new; test_and_change_bit() local
326 __u32 old, bit = (1 << (nr & 31)); __test_and_change_bit() local
[all...]
H A Dacenv.h22 unsigned int old, new, val; in ia64_acpi_acquire_global_lock() local
24 old = *lock; in ia64_acpi_acquire_global_lock()
25 new = (((old & ~0x3) + 2) + ((old >> 1) & 0x1)); in ia64_acpi_acquire_global_lock()
26 val = ia64_cmpxchg4_acq(lock, new, old); in ia64_acpi_acquire_global_lock()
27 } while (unlikely (val != old)); in ia64_acpi_acquire_global_lock()
34 unsigned int old, new, val; in ia64_acpi_release_global_lock() local
36 old = *lock; in ia64_acpi_release_global_lock()
37 new = old & ~0x3; in ia64_acpi_release_global_lock()
38 val = ia64_cmpxchg4_acq(lock, new, old); in ia64_acpi_release_global_lock()
[all...]
/kernel/linux/linux-6.6/arch/ia64/include/asm/
H A Dbitops.h42 __u32 bit, old, new; in set_bit() local
50 old = *m; in set_bit()
51 new = old | bit; in set_bit()
52 } while (cmpxchg_acq(m, old, new) != old); in set_bit()
83 __u32 mask, old, new; in clear_bit() local
91 old = *m; in clear_bit()
92 new = old & mask; in clear_bit()
93 } while (cmpxchg_acq(m, old, new) != old); in clear_bit()
107 __u32 mask, old, new; clear_bit_unlock() local
164 __u32 bit, old, new; change_bit() local
203 __u32 bit, old, new; test_and_set_bit() local
257 __u32 mask, old, new; test_and_clear_bit() local
302 __u32 bit, old, new; test_and_change_bit() local
326 __u32 old, bit = (1 << (nr & 31)); arch___test_and_change_bit() local
[all...]
H A Dacenv.h22 unsigned int old, new, val; in ia64_acpi_acquire_global_lock() local
24 old = *lock; in ia64_acpi_acquire_global_lock()
25 new = (((old & ~0x3) + 2) + ((old >> 1) & 0x1)); in ia64_acpi_acquire_global_lock()
26 val = ia64_cmpxchg4_acq(lock, new, old); in ia64_acpi_acquire_global_lock()
27 } while (unlikely (val != old)); in ia64_acpi_acquire_global_lock()
34 unsigned int old, new, val; in ia64_acpi_release_global_lock() local
36 old = *lock; in ia64_acpi_release_global_lock()
37 new = old & ~0x3; in ia64_acpi_release_global_lock()
38 val = ia64_cmpxchg4_acq(lock, new, old); in ia64_acpi_release_global_lock()
[all...]
/kernel/linux/linux-6.6/arch/s390/include/asm/
H A Datomic_ops.h50 op_type old; \
53 op_string " %[old],%[val],%[ptr]\n" \
55 : [old] "=d" (old), [ptr] "+QS" (*ptr) \
57 return old; \
101 int old, new; \
104 "0: lr %[new],%[old]\n" \
106 " cs %[old],%[new],%[ptr]\n" \
108 : [old] "=d" (old), [ne
157 __atomic_cmpxchg(int *ptr, int old, int new) __atomic_cmpxchg() argument
167 __atomic_cmpxchg_bool(int *ptr, int old, int new) __atomic_cmpxchg_bool() argument
179 __atomic64_cmpxchg(long *ptr, long old, long new) __atomic64_cmpxchg() argument
189 __atomic64_cmpxchg_bool(long *ptr, long old, long new) __atomic64_cmpxchg_bool() argument
[all...]
H A Dcmpxchg.h20 unsigned long old; in __arch_xchg() local
34 : "=&d" (old), "+Q" (*(int *) address) in __arch_xchg()
37 return old >> shift; in __arch_xchg()
48 : "=&d" (old), "+Q" (*(int *) address) in __arch_xchg()
51 return old >> shift; in __arch_xchg()
57 : "=&d" (old), "+Q" (*(int *) address) in __arch_xchg()
60 return old; in __arch_xchg()
66 : "=&d" (old), "+QS" (*(long *) address) in __arch_xchg()
69 return old; in __arch_xchg()
88 unsigned long old, in __cmpxchg()
87 __cmpxchg(unsigned long address, unsigned long old, unsigned long new, int size) __cmpxchg() argument
195 arch_cmpxchg128(volatile u128 *ptr, u128 old, u128 new) arch_cmpxchg128() argument
[all...]
/kernel/linux/linux-5.10/arch/powerpc/include/asm/
H A Dcmpxchg.h42 u32 __cmpxchg_##type##sfx(volatile void *p, u32 old, u32 new) \
49 old <<= bitoff; \
67 : "r" (p), "r" (old), "r" (new), "r" (prev_mask) \
202 * Compare and exchange - if *p == old, set it to new,
203 * and return the old value of *p.
216 __cmpxchg_u32(volatile unsigned int *p, unsigned long old, unsigned long new) in __cmpxchg_u32() argument
231 : "r" (p), "r" (old), "r" (new) in __cmpxchg_u32()
238 __cmpxchg_u32_local(volatile unsigned int *p, unsigned long old, in __cmpxchg_u32_local() argument
252 : "r" (p), "r" (old), "r" (new) in __cmpxchg_u32_local()
259 __cmpxchg_u32_relaxed(u32 *p, unsigned long old, unsigne argument
286 __cmpxchg_u32_acquire(u32 *p, unsigned long old, unsigned long new) __cmpxchg_u32_acquire() argument
308 __cmpxchg_u64(volatile unsigned long *p, unsigned long old, unsigned long new) __cmpxchg_u64() argument
330 __cmpxchg_u64_local(volatile unsigned long *p, unsigned long old, unsigned long new) __cmpxchg_u64_local() argument
351 __cmpxchg_u64_relaxed(u64 *p, unsigned long old, unsigned long new) __cmpxchg_u64_relaxed() argument
370 __cmpxchg_u64_acquire(u64 *p, unsigned long old, unsigned long new) __cmpxchg_u64_acquire() argument
392 __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new, unsigned int size) __cmpxchg() argument
412 __cmpxchg_local(void *ptr, unsigned long old, unsigned long new, unsigned int size) __cmpxchg_local() argument
432 __cmpxchg_relaxed(void *ptr, unsigned long old, unsigned long new, unsigned int size) __cmpxchg_relaxed() argument
452 __cmpxchg_acquire(void *ptr, unsigned long old, unsigned long new, unsigned int size) __cmpxchg_acquire() argument
[all...]
/kernel/linux/linux-6.6/arch/powerpc/include/asm/
H A Dcmpxchg.h42 u32 __cmpxchg_##type##sfx(volatile void *p, u32 old, u32 new) \
49 old <<= bitoff; \
67 : "r" (p), "r" (old), "r" (new), "r" (prev_mask) \
269 * Compare and exchange - if *p == old, set it to new,
270 * and return the old value of *p.
283 __cmpxchg_u8(volatile unsigned char *p, unsigned long old, unsigned long new) in __cmpxchg_u8() argument
298 : "r" (p), "r" (old), "r" (new) in __cmpxchg_u8()
305 __cmpxchg_u8_local(volatile unsigned char *p, unsigned long old, in __cmpxchg_u8_local() argument
318 : "r" (p), "r" (old), "r" (new) in __cmpxchg_u8_local()
325 __cmpxchg_u8_relaxed(u8 *p, unsigned long old, unsigne argument
344 __cmpxchg_u8_acquire(u8 *p, unsigned long old, unsigned long new) __cmpxchg_u8_acquire() argument
364 __cmpxchg_u16(volatile unsigned short *p, unsigned long old, unsigned long new) __cmpxchg_u16() argument
385 __cmpxchg_u16_local(volatile unsigned short *p, unsigned long old, unsigned long new) __cmpxchg_u16_local() argument
405 __cmpxchg_u16_relaxed(u16 *p, unsigned long old, unsigned long new) __cmpxchg_u16_relaxed() argument
424 __cmpxchg_u16_acquire(u16 *p, unsigned long old, unsigned long new) __cmpxchg_u16_acquire() argument
445 __cmpxchg_u32(volatile unsigned int *p, unsigned long old, unsigned long new) __cmpxchg_u32() argument
467 __cmpxchg_u32_local(volatile unsigned int *p, unsigned long old, unsigned long new) __cmpxchg_u32_local() argument
488 __cmpxchg_u32_relaxed(u32 *p, unsigned long old, unsigned long new) __cmpxchg_u32_relaxed() argument
515 __cmpxchg_u32_acquire(u32 *p, unsigned long old, unsigned long new) __cmpxchg_u32_acquire() argument
537 __cmpxchg_u64(volatile unsigned long *p, unsigned long old, unsigned long new) __cmpxchg_u64() argument
559 __cmpxchg_u64_local(volatile unsigned long *p, unsigned long old, unsigned long new) __cmpxchg_u64_local() argument
580 __cmpxchg_u64_relaxed(u64 *p, unsigned long old, unsigned long new) __cmpxchg_u64_relaxed() argument
599 __cmpxchg_u64_acquire(u64 *p, unsigned long old, unsigned long new) __cmpxchg_u64_acquire() argument
621 __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new, unsigned int size) __cmpxchg() argument
641 __cmpxchg_local(void *ptr, unsigned long old, unsigned long new, unsigned int size) __cmpxchg_local() argument
661 __cmpxchg_relaxed(void *ptr, unsigned long old, unsigned long new, unsigned int size) __cmpxchg_relaxed() argument
681 __cmpxchg_acquire(void *ptr, unsigned long old, unsigned long new, unsigned int size) __cmpxchg_acquire() argument
[all...]
/kernel/linux/linux-5.10/kernel/
H A Dtracepoint.c142 static inline void release_probes(struct tracepoint_func *old) in release_probes() argument
144 if (old) { in release_probes()
145 struct tp_probes *tp_probes = container_of(old, in release_probes()
183 struct tracepoint_func *old, *new; in func_add() local
192 old = *funcs; in func_add()
193 if (old) { in func_add()
195 for (nr_probes = 0; old[nr_probes].func; nr_probes++) { in func_add()
197 if (pos < 0 && old[nr_probes].prio < prio) in func_add()
199 if (old[nr_probes].func == tp_func->func && in func_add()
200 old[nr_probe in func_add()
252 struct tracepoint_func *old, *new; func_remove() local
348 struct tracepoint_func *old, *tp_funcs; tracepoint_add_func() local
420 struct tracepoint_func *old, *tp_funcs; tracepoint_remove_func() local
[all...]
/kernel/linux/linux-5.10/lib/
H A Derrseq.c60 errseq_t cur, old; in errseq_set() local
71 old = READ_ONCE(*eseq); in errseq_set()
75 return old; in errseq_set()
81 new = (old & ~(MAX_ERRNO|ERRSEQ_SEEN)) | -err; in errseq_set()
84 if (old & ERRSEQ_SEEN) in errseq_set()
88 if (new == old) { in errseq_set()
94 cur = cmpxchg(eseq, old, new); in errseq_set()
100 if (likely(cur == old || cur == new)) in errseq_set()
104 old = cur; in errseq_set()
115 * If the error has been "seen", new callers will not see an old erro
124 errseq_t old = READ_ONCE(*eseq); errseq_sample() local
177 errseq_t old, new; errseq_check_and_advance() local
[all...]
/kernel/linux/linux-6.6/lib/
H A Derrseq.c61 errseq_t cur, old; in errseq_set() local
72 old = READ_ONCE(*eseq); in errseq_set()
76 return old; in errseq_set()
82 new = (old & ~(MAX_ERRNO|ERRSEQ_SEEN)) | -err; in errseq_set()
85 if (old & ERRSEQ_SEEN) in errseq_set()
89 if (new == old) { in errseq_set()
95 cur = cmpxchg(eseq, old, new); in errseq_set()
101 if (likely(cur == old || cur == new)) in errseq_set()
105 old = cur; in errseq_set()
116 * If the error has been "seen", new callers will not see an old erro
125 errseq_t old = READ_ONCE(*eseq); errseq_sample() local
178 errseq_t old, new; errseq_check_and_advance() local
[all...]
/kernel/linux/linux-5.10/include/linux/
H A Drefcount.h32 * int old = atomic_fetch_add_relaxed(r);
33 * // old is INT_MAX, refcount now INT_MIN (0x8000_0000)
34 * if (old < 0)
152 int old = refcount_read(r); in __refcount_add_not_zero() local
155 if (!old) in __refcount_add_not_zero()
157 } while (!atomic_try_cmpxchg_relaxed(&r->refs, &old, old + i)); in __refcount_add_not_zero()
160 *oldp = old; in __refcount_add_not_zero()
162 if (unlikely(old < 0 || old in __refcount_add_not_zero()
193 int old = atomic_fetch_add_relaxed(i, &r->refs); __refcount_add() local
272 int old = atomic_fetch_sub_release(i, &r->refs); __refcount_sub_and_test() local
338 int old = atomic_fetch_sub_release(1, &r->refs); __refcount_dec() local
[all...]
/kernel/linux/linux-6.6/include/linux/
H A Drefcount.h32 * int old = atomic_fetch_add_relaxed(r);
33 * // old is INT_MAX, refcount now INT_MIN (0x8000_0000)
34 * if (old < 0)
152 int old = refcount_read(r); in __refcount_add_not_zero() local
155 if (!old) in __refcount_add_not_zero()
157 } while (!atomic_try_cmpxchg_relaxed(&r->refs, &old, old + i)); in __refcount_add_not_zero()
160 *oldp = old; in __refcount_add_not_zero()
162 if (unlikely(old < 0 || old in __refcount_add_not_zero()
193 int old = atomic_fetch_add_relaxed(i, &r->refs); __refcount_add() local
272 int old = atomic_fetch_sub_release(i, &r->refs); __refcount_sub_and_test() local
338 int old = atomic_fetch_sub_release(1, &r->refs); __refcount_dec() local
[all...]
/kernel/linux/linux-6.6/security/safesetid/
H A Dlsm.c144 * Check whether a caller with old credentials @old is allowed to switch to
147 static bool id_permitted_for_cred(const struct cred *old, kid_t new_id, enum setid_type new_type) in id_permitted_for_cred() argument
151 /* If our old creds already had this ID in it, it's fine. */ in id_permitted_for_cred()
153 if (uid_eq(new_id.uid, old->uid) || uid_eq(new_id.uid, old->euid) || in id_permitted_for_cred()
154 uid_eq(new_id.uid, old->suid)) in id_permitted_for_cred()
157 if (gid_eq(new_id.gid, old->gid) || gid_eq(new_id.gid, old->egid) || in id_permitted_for_cred()
158 gid_eq(new_id.gid, old in id_permitted_for_cred()
190 safesetid_task_fix_setuid(struct cred *new, const struct cred *old, int flags) safesetid_task_fix_setuid() argument
214 safesetid_task_fix_setgid(struct cred *new, const struct cred *old, int flags) safesetid_task_fix_setgid() argument
238 safesetid_task_fix_setgroups(struct cred *new, const struct cred *old) safesetid_task_fix_setgroups() argument
[all...]
/kernel/linux/linux-6.6/kernel/
H A Dtracepoint.c142 static inline void release_probes(struct tracepoint_func *old) in release_probes() argument
144 if (old) { in release_probes()
145 struct tp_probes *tp_probes = container_of(old, in release_probes()
183 struct tracepoint_func *old, *new; in func_add() local
184 int iter_probes; /* Iterate over old probe array. */ in func_add()
192 old = *funcs; in func_add()
193 if (old) { in func_add()
195 for (iter_probes = 0; old[iter_probes].func; iter_probes++) { in func_add()
196 if (old[iter_probes].func == tp_stub_func) in func_add()
198 if (old[iter_probe in func_add()
236 struct tracepoint_func *old, *new; func_remove() local
327 struct tracepoint_func *old, *tp_funcs; tracepoint_add_func() local
399 struct tracepoint_func *old, *tp_funcs; tracepoint_remove_func() local
[all...]
/kernel/linux/linux-5.10/arch/s390/include/asm/
H A Datomic_ops.h16 op_type old; \
19 op_string " %[old],%[val],%[ptr]\n" \
21 : [old] "=d" (old), [ptr] "+Q" (*ptr) \
23 return old; \
67 int old, new; \
70 "0: lr %[new],%[old]\n" \
72 " cs %[old],%[new],%[ptr]\n" \
74 : [old] "=d" (old), [ne
123 __atomic_cmpxchg(int *ptr, int old, int new) __atomic_cmpxchg() argument
128 __atomic_cmpxchg_bool(int *ptr, int old, int new) __atomic_cmpxchg_bool() argument
133 __atomic64_cmpxchg(long *ptr, long old, long new) __atomic64_cmpxchg() argument
138 __atomic64_cmpxchg_bool(long *ptr, long old, long new) __atomic64_cmpxchg_bool() argument
[all...]
/kernel/linux/linux-5.10/arch/arm/kernel/
H A Dftrace.c77 static int ftrace_modify_code(unsigned long pc, unsigned long old, in ftrace_modify_code() argument
83 old = __opcode_to_mem_thumb32(old); in ftrace_modify_code()
85 old = __opcode_to_mem_arm(old); in ftrace_modify_code()
92 if (replaced != old) in ftrace_modify_code()
126 unsigned long new, old; in ftrace_make_call() local
135 old = ftrace_nop_replace(rec); in ftrace_make_call()
145 return ftrace_modify_code(rec->ip, old, new, true); in ftrace_make_call()
153 unsigned long new, old; in ftrace_modify_call() local
170 unsigned long old; ftrace_make_nop() local
208 unsigned long old; prepare_ftrace_return() local
234 unsigned long old = enable ? nop : branch; __ftrace_modify_caller() local
[all...]
/kernel/linux/linux-5.10/arch/sparc/kernel/
H A Dftrace.c26 static int ftrace_modify_code(unsigned long ip, u32 old, u32 new) in ftrace_modify_code() argument
32 "1: cas [%[ip]], %[old], %[new]\n" in ftrace_modify_code()
47 : [new] "0" (new), [old] "r" (old), [ip] "r" (ip) in ftrace_modify_code()
50 if (replaced != old && replaced != new) in ftrace_modify_code()
59 u32 old, new; in ftrace_make_nop() local
61 old = ftrace_call_replace(ip, addr); in ftrace_make_nop()
63 return ftrace_modify_code(ip, old, new); in ftrace_make_nop()
69 u32 old, new; in ftrace_make_call() local
71 old in ftrace_make_call()
79 u32 old, new; ftrace_update_ftrace_func() local
100 u32 old, new; ftrace_enable_ftrace_graph_caller() local
110 u32 old, new; ftrace_disable_ftrace_graph_caller() local
[all...]
/kernel/linux/linux-6.6/arch/sparc/kernel/
H A Dftrace.c26 static int ftrace_modify_code(unsigned long ip, u32 old, u32 new) in ftrace_modify_code() argument
32 "1: cas [%[ip]], %[old], %[new]\n" in ftrace_modify_code()
47 : [new] "0" (new), [old] "r" (old), [ip] "r" (ip) in ftrace_modify_code()
50 if (replaced != old && replaced != new) in ftrace_modify_code()
59 u32 old, new; in ftrace_make_nop() local
61 old = ftrace_call_replace(ip, addr); in ftrace_make_nop()
63 return ftrace_modify_code(ip, old, new); in ftrace_make_nop()
69 u32 old, new; in ftrace_make_call() local
71 old in ftrace_make_call()
79 u32 old, new; ftrace_update_ftrace_func() local
95 u32 old, new; ftrace_enable_ftrace_graph_caller() local
105 u32 old, new; ftrace_disable_ftrace_graph_caller() local
[all...]
/kernel/linux/linux-5.10/security/safesetid/
H A Dlsm.c153 * Check whether a caller with old credentials @old is allowed to switch to
156 static bool id_permitted_for_cred(const struct cred *old, kid_t new_id, enum setid_type new_type) in id_permitted_for_cred() argument
160 /* If our old creds already had this ID in it, it's fine. */ in id_permitted_for_cred()
162 if (uid_eq(new_id.uid, old->uid) || uid_eq(new_id.uid, old->euid) || in id_permitted_for_cred()
163 uid_eq(new_id.uid, old->suid)) in id_permitted_for_cred()
166 if (gid_eq(new_id.gid, old->gid) || gid_eq(new_id.gid, old->egid) || in id_permitted_for_cred()
167 gid_eq(new_id.gid, old in id_permitted_for_cred()
199 safesetid_task_fix_setuid(struct cred *new, const struct cred *old, int flags) safesetid_task_fix_setuid() argument
223 safesetid_task_fix_setgid(struct cred *new, const struct cred *old, int flags) safesetid_task_fix_setgid() argument
[all...]
/kernel/linux/linux-5.10/arch/s390/lib/
H A Dspinlock.c84 static inline int arch_cmpxchg_niai8(int *lock, int old, int new) in arch_cmpxchg_niai8() argument
86 int expected = old; in arch_cmpxchg_niai8()
91 : "=d" (old), "=Q" (*lock) in arch_cmpxchg_niai8()
92 : "0" (old), "d" (new), "Q" (*lock) in arch_cmpxchg_niai8()
94 return expected == old; in arch_cmpxchg_niai8()
120 int lockval, ix, node_id, tail_id, old, new, owner, count; in arch_spin_lock_queued() local
131 old = READ_ONCE(lp->lock); in arch_spin_lock_queued()
132 if ((old & _Q_LOCK_CPU_MASK) == 0 && in arch_spin_lock_queued()
133 (old & _Q_LOCK_STEAL_MASK) != _Q_LOCK_STEAL_MASK) { in arch_spin_lock_queued()
141 new = (old in arch_spin_lock_queued()
213 int lockval, old, new, owner, count; arch_spin_lock_classic() local
292 int old; arch_write_lock_wait() local
[all...]

Completed in 18 milliseconds

12345678910>>...88