Home
last modified time | relevance | path

Searched refs:PREEMPT_NEED_RESCHED (Results 1 - 6 of 6) sorted by relevance

/kernel/linux/linux-5.10/arch/x86/include/asm/
H A Dpreempt.h12 #define PREEMPT_NEED_RESCHED 0x80000000 macro
15 * We use the PREEMPT_NEED_RESCHED bit as an inverted NEED_RESCHED such
18 #define PREEMPT_ENABLED (0 + PREEMPT_NEED_RESCHED)
21 * We mask the PREEMPT_NEED_RESCHED bit so as not to confuse all current users
26 return raw_cpu_read_4(__preempt_count) & ~PREEMPT_NEED_RESCHED; in preempt_count()
35 new = (old & PREEMPT_NEED_RESCHED) | in preempt_count_set()
36 (pc & ~PREEMPT_NEED_RESCHED); in preempt_count_set()
60 raw_cpu_and_4(__preempt_count, ~PREEMPT_NEED_RESCHED); in set_preempt_need_resched()
65 raw_cpu_or_4(__preempt_count, PREEMPT_NEED_RESCHED); in clear_preempt_need_resched()
70 return !(raw_cpu_read_4(__preempt_count) & PREEMPT_NEED_RESCHED); in test_preempt_need_resched()
[all...]
/kernel/linux/linux-5.10/arch/s390/include/asm/
H A Dpreempt.h12 #define PREEMPT_NEED_RESCHED 0x80000000 macro
13 #define PREEMPT_ENABLED (0 + PREEMPT_NEED_RESCHED)
17 return READ_ONCE(S390_lowcore.preempt_count) & ~PREEMPT_NEED_RESCHED; in preempt_count()
26 new = (old & PREEMPT_NEED_RESCHED) | in preempt_count_set()
27 (pc & ~PREEMPT_NEED_RESCHED); in preempt_count_set()
34 __atomic_and(~PREEMPT_NEED_RESCHED, &S390_lowcore.preempt_count); in set_preempt_need_resched()
39 __atomic_or(PREEMPT_NEED_RESCHED, &S390_lowcore.preempt_count); in clear_preempt_need_resched()
44 return !(READ_ONCE(S390_lowcore.preempt_count) & PREEMPT_NEED_RESCHED); in test_preempt_need_resched()
/kernel/linux/linux-6.6/arch/s390/include/asm/
H A Dpreempt.h12 #define PREEMPT_NEED_RESCHED 0x80000000 macro
13 #define PREEMPT_ENABLED (0 + PREEMPT_NEED_RESCHED)
17 return READ_ONCE(S390_lowcore.preempt_count) & ~PREEMPT_NEED_RESCHED; in preempt_count()
26 new = (old & PREEMPT_NEED_RESCHED) | in preempt_count_set()
27 (pc & ~PREEMPT_NEED_RESCHED); in preempt_count_set()
34 __atomic_and(~PREEMPT_NEED_RESCHED, &S390_lowcore.preempt_count); in set_preempt_need_resched()
39 __atomic_or(PREEMPT_NEED_RESCHED, &S390_lowcore.preempt_count); in clear_preempt_need_resched()
44 return !(READ_ONCE(S390_lowcore.preempt_count) & PREEMPT_NEED_RESCHED); in test_preempt_need_resched()
/kernel/linux/linux-6.6/arch/x86/include/asm/
H A Dpreempt.h13 #define PREEMPT_NEED_RESCHED 0x80000000 macro
16 * We use the PREEMPT_NEED_RESCHED bit as an inverted NEED_RESCHED such
19 #define PREEMPT_ENABLED (0 + PREEMPT_NEED_RESCHED)
22 * We mask the PREEMPT_NEED_RESCHED bit so as not to confuse all current users
27 return raw_cpu_read_4(pcpu_hot.preempt_count) & ~PREEMPT_NEED_RESCHED; in preempt_count()
36 new = (old & PREEMPT_NEED_RESCHED) | in preempt_count_set()
37 (pc & ~PREEMPT_NEED_RESCHED); in preempt_count_set()
61 raw_cpu_and_4(pcpu_hot.preempt_count, ~PREEMPT_NEED_RESCHED); in set_preempt_need_resched()
66 raw_cpu_or_4(pcpu_hot.preempt_count, PREEMPT_NEED_RESCHED); in clear_preempt_need_resched()
71 return !(raw_cpu_read_4(pcpu_hot.preempt_count) & PREEMPT_NEED_RESCHED); in test_preempt_need_resched()
[all...]
/kernel/linux/linux-5.10/arch/arm64/include/asm/
H A Dpreempt.h7 #define PREEMPT_NEED_RESCHED BIT(32) macro
8 #define PREEMPT_ENABLED (PREEMPT_NEED_RESCHED)
17 /* Preserve existing value of PREEMPT_NEED_RESCHED */ in preempt_count_set()
/kernel/linux/linux-6.6/arch/arm64/include/asm/
H A Dpreempt.h8 #define PREEMPT_NEED_RESCHED BIT(32) macro
9 #define PREEMPT_ENABLED (PREEMPT_NEED_RESCHED)
18 /* Preserve existing value of PREEMPT_NEED_RESCHED */ in preempt_count_set()

Completed in 3 milliseconds