/kernel/linux/linux-5.10/arch/s390/include/asm/ |
H A D | preempt.h | 15 static inline int preempt_count(void) in preempt_count() function 17 return READ_ONCE(S390_lowcore.preempt_count) & ~PREEMPT_NEED_RESCHED; in preempt_count() 25 old = READ_ONCE(S390_lowcore.preempt_count); in preempt_count_set() 28 } while (__atomic_cmpxchg(&S390_lowcore.preempt_count, in preempt_count_set() 34 __atomic_and(~PREEMPT_NEED_RESCHED, &S390_lowcore.preempt_count); in set_preempt_need_resched() 39 __atomic_or(PREEMPT_NEED_RESCHED, &S390_lowcore.preempt_count); in clear_preempt_need_resched() 44 return !(READ_ONCE(S390_lowcore.preempt_count) & PREEMPT_NEED_RESCHED); in test_preempt_need_resched() 55 __atomic_add_const(val, &S390_lowcore.preempt_count); in __preempt_count_add() 59 __atomic_add(val, &S390_lowcore.preempt_count); in __preempt_count_add() 69 return __atomic_add(-1, &S390_lowcore.preempt_count) in __preempt_count_dec_and_test() 82 static inline int preempt_count(void) preempt_count() function [all...] |
/kernel/linux/linux-6.6/arch/s390/include/asm/ |
H A D | preempt.h | 15 static inline int preempt_count(void) in preempt_count() function 17 return READ_ONCE(S390_lowcore.preempt_count) & ~PREEMPT_NEED_RESCHED; in preempt_count() 25 old = READ_ONCE(S390_lowcore.preempt_count); in preempt_count_set() 28 } while (__atomic_cmpxchg(&S390_lowcore.preempt_count, in preempt_count_set() 34 __atomic_and(~PREEMPT_NEED_RESCHED, &S390_lowcore.preempt_count); in set_preempt_need_resched() 39 __atomic_or(PREEMPT_NEED_RESCHED, &S390_lowcore.preempt_count); in clear_preempt_need_resched() 44 return !(READ_ONCE(S390_lowcore.preempt_count) & PREEMPT_NEED_RESCHED); in test_preempt_need_resched() 55 __atomic_add_const(val, &S390_lowcore.preempt_count); in __preempt_count_add() 59 __atomic_add(val, &S390_lowcore.preempt_count); in __preempt_count_add() 69 return __atomic_add(-1, &S390_lowcore.preempt_count) in __preempt_count_dec_and_test() 82 static inline int preempt_count(void) preempt_count() function [all...] |
/kernel/linux/linux-6.6/arch/x86/include/asm/ |
H A D | preempt.h | 25 static __always_inline int preempt_count(void) in preempt_count() function 27 return raw_cpu_read_4(pcpu_hot.preempt_count) & ~PREEMPT_NEED_RESCHED; in preempt_count() 35 old = raw_cpu_read_4(pcpu_hot.preempt_count); in preempt_count_set() 38 } while (raw_cpu_cmpxchg_4(pcpu_hot.preempt_count, old, new) != old); in preempt_count_set() 47 per_cpu(pcpu_hot.preempt_count, (cpu)) = PREEMPT_DISABLED; \ 61 raw_cpu_and_4(pcpu_hot.preempt_count, ~PREEMPT_NEED_RESCHED); in set_preempt_need_resched() 66 raw_cpu_or_4(pcpu_hot.preempt_count, PREEMPT_NEED_RESCHED); in clear_preempt_need_resched() 71 return !(raw_cpu_read_4(pcpu_hot.preempt_count) & PREEMPT_NEED_RESCHED); in test_preempt_need_resched() 75 * The various preempt_count add/sub methods 80 raw_cpu_add_4(pcpu_hot.preempt_count, va in __preempt_count_add() [all...] |
/kernel/linux/linux-6.6/include/linux/ |
H A D | preempt.h | 7 * preempt_count (used for kernel preemption, interrupt count, etc.) 68 * Initial preempt_count value; reflects the preempt_count schedule invariant 71 * preempt_count() == 2*PREEMPT_DISABLE_OFFSET 78 /* preempt_count() and related functions, depends on PREEMPT_NEED_RESCHED */ 92 unsigned long pc = preempt_count(); in interrupt_context_level() 103 * These macro definitions avoid redundant invocations of preempt_count() 105 * preempt_count() is commonly implemented with READ_ONCE(). 108 #define nmi_count() (preempt_count() & NMI_MASK) 109 #define hardirq_count() (preempt_count() [all...] |
/kernel/linux/linux-5.10/include/asm-generic/ |
H A D | preempt.h | 9 static __always_inline int preempt_count(void) in preempt_count() function 11 return READ_ONCE(current_thread_info()->preempt_count); in preempt_count() 16 return ¤t_thread_info()->preempt_count; in preempt_count_ptr() 28 task_thread_info(p)->preempt_count = FORK_PREEMPT_COUNT; \ 32 task_thread_info(p)->preempt_count = PREEMPT_DISABLED; \ 49 * The various preempt_count add/sub methods 77 return unlikely(preempt_count() == preempt_offset && in should_resched()
|
/kernel/linux/linux-6.6/include/asm-generic/ |
H A D | preempt.h | 9 static __always_inline int preempt_count(void) in preempt_count() function 11 return READ_ONCE(current_thread_info()->preempt_count); in preempt_count() 16 return ¤t_thread_info()->preempt_count; in preempt_count_ptr() 28 task_thread_info(p)->preempt_count = FORK_PREEMPT_COUNT; \ 32 task_thread_info(p)->preempt_count = PREEMPT_DISABLED; \ 49 * The various preempt_count add/sub methods 77 return unlikely(preempt_count() == preempt_offset && in should_resched()
|
/kernel/linux/linux-5.10/arch/arm64/include/asm/ |
H A D | preempt.h | 10 static inline int preempt_count(void) in preempt_count() function 22 task_thread_info(p)->preempt_count = FORK_PREEMPT_COUNT; \ 26 task_thread_info(p)->preempt_count = PREEMPT_DISABLED; \ 61 u64 pc = READ_ONCE(ti->preempt_count); in __preempt_count_dec_and_test() 69 * preempt_count in case the need_resched flag was cleared by an in __preempt_count_dec_and_test() 73 return !pc || !READ_ONCE(ti->preempt_count); in __preempt_count_dec_and_test() 78 u64 pc = READ_ONCE(current_thread_info()->preempt_count); in should_resched()
|
/kernel/linux/linux-5.10/include/linux/ |
H A D | preempt.h | 7 * preempt_count (used for kernel preemption, interrupt count, etc.) 67 * Initial preempt_count value; reflects the preempt_count schedule invariant 70 * preempt_count() == 2*PREEMPT_DISABLE_OFFSET 77 /* preempt_count() and related functions, depends on PREEMPT_NEED_RESCHED */ 80 #define hardirq_count() (preempt_count() & HARDIRQ_MASK) 81 #define softirq_count() (preempt_count() & SOFTIRQ_MASK) 82 #define irq_count() (preempt_count() & (HARDIRQ_MASK | SOFTIRQ_MASK \ 102 #define in_nmi() (preempt_count() & NMI_MASK) 103 #define in_task() (!(preempt_count() [all...] |
/kernel/linux/linux-6.6/arch/arm64/include/asm/ |
H A D | preempt.h | 11 static inline int preempt_count(void) in preempt_count() function 23 task_thread_info(p)->preempt_count = FORK_PREEMPT_COUNT; \ 27 task_thread_info(p)->preempt_count = PREEMPT_DISABLED; \ 62 u64 pc = READ_ONCE(ti->preempt_count); in __preempt_count_dec_and_test() 70 * preempt_count in case the need_resched flag was cleared by an in __preempt_count_dec_and_test() 74 return !pc || !READ_ONCE(ti->preempt_count); in __preempt_count_dec_and_test() 79 u64 pc = READ_ONCE(current_thread_info()->preempt_count); in should_resched()
|
/kernel/linux/linux-5.10/tools/testing/radix-tree/linux/ |
H A D | preempt.h | 5 extern int preempt_count; 7 #define preempt_disable() uatomic_inc(&preempt_count) 8 #define preempt_enable() uatomic_dec(&preempt_count)
|
/kernel/linux/linux-6.6/tools/testing/radix-tree/linux/ |
H A D | preempt.h | 5 extern int preempt_count; 7 #define preempt_disable() uatomic_inc(&preempt_count) 8 #define preempt_enable() uatomic_dec(&preempt_count)
|
/kernel/linux/linux-5.10/tools/testing/radix-tree/ |
H A D | main.c | 244 nr_allocated, preempt_count); in single_thread_tests() 248 nr_allocated, preempt_count); in single_thread_tests() 252 nr_allocated, preempt_count); in single_thread_tests() 256 nr_allocated, preempt_count); in single_thread_tests() 260 nr_allocated, preempt_count); in single_thread_tests() 264 nr_allocated, preempt_count); in single_thread_tests() 269 nr_allocated, preempt_count); in single_thread_tests() 273 nr_allocated, preempt_count); in single_thread_tests() 281 nr_allocated, preempt_count); in single_thread_tests() 324 nr_allocated, preempt_count); in main() [all...] |
/kernel/linux/linux-6.6/tools/testing/radix-tree/ |
H A D | main.c | 244 nr_allocated, preempt_count); in single_thread_tests() 248 nr_allocated, preempt_count); in single_thread_tests() 252 nr_allocated, preempt_count); in single_thread_tests() 256 nr_allocated, preempt_count); in single_thread_tests() 260 nr_allocated, preempt_count); in single_thread_tests() 264 nr_allocated, preempt_count); in single_thread_tests() 269 nr_allocated, preempt_count); in single_thread_tests() 273 nr_allocated, preempt_count); in single_thread_tests() 281 nr_allocated, preempt_count); in single_thread_tests() 324 nr_allocated, preempt_count); in main() [all...] |
/kernel/linux/linux-5.10/kernel/trace/ |
H A D | trace_irqsoff.c | 122 if (!irqs_disabled_flags(*flags) && !preempt_count()) in func_prolog_dec() 150 trace_function(tr, ip, parent_ip, flags, preempt_count()); in irqsoff_tracer_call() 198 pc = preempt_count(); in irqsoff_graph_entry() 217 pc = preempt_count(); in irqsoff_graph_return() 334 pc = preempt_count(); in check_critical_timing() 442 int pc = preempt_count(); in start_critical_timings() 452 int pc = preempt_count(); in stop_critical_timings() 617 unsigned int pc = preempt_count(); in tracer_hardirqs_on() 626 unsigned int pc = preempt_count(); in tracer_hardirqs_off() 669 int pc = preempt_count(); in tracer_preempt_on() [all...] |
/kernel/linux/linux-5.10/arch/sh/kernel/ |
H A D | irq.c | 92 * Copy the softirq bits in preempt_count so that the in handle_one_irq() 95 irqctx->tinfo.preempt_count = in handle_one_irq() 96 (irqctx->tinfo.preempt_count & ~SOFTIRQ_MASK) | in handle_one_irq() 97 (curctx->tinfo.preempt_count & SOFTIRQ_MASK); in handle_one_irq() 129 irqctx->tinfo.preempt_count = HARDIRQ_OFFSET; in irq_ctx_init() 137 irqctx->tinfo.preempt_count = 0; in irq_ctx_init()
|
/kernel/linux/linux-6.6/arch/sh/kernel/ |
H A D | irq.c | 93 * Copy the softirq bits in preempt_count so that the in handle_one_irq() 96 irqctx->tinfo.preempt_count = in handle_one_irq() 97 (irqctx->tinfo.preempt_count & ~SOFTIRQ_MASK) | in handle_one_irq() 98 (curctx->tinfo.preempt_count & SOFTIRQ_MASK); in handle_one_irq() 130 irqctx->tinfo.preempt_count = HARDIRQ_OFFSET; in irq_ctx_init() 138 irqctx->tinfo.preempt_count = 0; in irq_ctx_init()
|
/kernel/linux/linux-5.10/tools/testing/selftests/rcutorture/formal/srcu-cbmc/src/ |
H A D | preempt.h | 33 static inline int preempt_count(void) in preempt_count() function 40 return !preempt_count(); in preemptible()
|
/kernel/linux/linux-5.10/arch/c6x/include/asm/ |
H A D | thread_info.h | 42 int preempt_count; /* 0 = preemptable, <0 = BUG */ member 49 * preempt_count needs to be 1 initially, until the scheduler is functional. 56 .preempt_count = INIT_PREEMPT_COUNT, \
|
/kernel/linux/linux-6.6/arch/m68k/include/asm/ |
H A D | thread_info.h | 29 int preempt_count; /* 0 => preemptable, <0 => BUG */ member 38 .preempt_count = INIT_PREEMPT_COUNT, \
|
/kernel/linux/linux-6.6/arch/nios2/include/asm/ |
H A D | thread_info.h | 40 int preempt_count; /* 0 => preemptable,<0 => BUG */ member 47 * preempt_count needs to be 1 initially, until the scheduler is functional. 54 .preempt_count = INIT_PREEMPT_COUNT, \
|
/kernel/linux/linux-5.10/lib/ |
H A D | smp_processor_id.c | 16 if (likely(preempt_count())) in check_preemption_disabled() 45 what1, what2, preempt_count() - 1, current->comm, current->pid); in check_preemption_disabled()
|
/kernel/linux/linux-6.6/lib/ |
H A D | smp_processor_id.c | 16 if (likely(preempt_count())) in check_preemption_disabled() 46 what1, what2, preempt_count() - 1, current->comm, current->pid); in check_preemption_disabled()
|
/kernel/linux/linux-5.10/arch/arc/include/asm/ |
H A D | thread_info.h | 41 int preempt_count; /* 0 => preemptable, <0 => BUG */ member 51 * preempt_count needs to be 1 initially, until the scheduler is functional. 58 .preempt_count = INIT_PREEMPT_COUNT, \
|
/kernel/linux/linux-5.10/arch/m68k/include/asm/ |
H A D | thread_info.h | 31 int preempt_count; /* 0 => preemptable, <0 => BUG */ member 41 .preempt_count = INIT_PREEMPT_COUNT, \
|
/kernel/linux/linux-5.10/arch/nds32/include/asm/ |
H A D | thread_info.h | 27 __s32 preempt_count; /* 0 => preemptable, <0 => bug */ member 32 .preempt_count = INIT_PREEMPT_COUNT, \
|