/kernel/linux/linux-5.10/kernel/trace/ |
H A D | trace_preemptirq.c | 31 if (!in_nmi()) in trace_hardirqs_on_prepare() 43 if (!in_nmi()) in trace_hardirqs_on() 66 if (!in_nmi()) in trace_hardirqs_off_finish() 81 if (!in_nmi()) in trace_hardirqs_off() 91 if (!in_nmi()) in trace_hardirqs_on_caller() 110 if (!in_nmi()) in trace_hardirqs_off_caller() 122 if (!in_nmi()) in trace_preempt_on() 129 if (!in_nmi()) in trace_preempt_off()
|
H A D | trace_clock.c | 126 if (unlikely(in_nmi())) in trace_clock_global()
|
/kernel/linux/linux-5.10/arch/m68k/mac/ |
H A D | macints.c | 258 static volatile int in_nmi; variable 262 if (in_nmi) in mac_nmi_handler() 264 in_nmi = 1; in mac_nmi_handler() 269 in_nmi = 0; in mac_nmi_handler()
|
/kernel/linux/linux-6.6/arch/m68k/mac/ |
H A D | macints.c | 144 static volatile int in_nmi; in mac_nmi_handler() local 146 if (in_nmi) in mac_nmi_handler() 148 in_nmi = 1; in mac_nmi_handler() 153 in_nmi = 0; in mac_nmi_handler()
|
/kernel/linux/linux-6.6/arch/m68k/virt/ |
H A D | ints.c | 92 static int in_nmi; in virt_nmi_handler() local 94 if (READ_ONCE(in_nmi)) in virt_nmi_handler() 96 WRITE_ONCE(in_nmi, 1); in virt_nmi_handler() 101 WRITE_ONCE(in_nmi, 0); in virt_nmi_handler()
|
/kernel/linux/linux-5.10/include/linux/ |
H A D | hardirq.h | 107 * such that in_nmi() becomes true. To that effect all NMI C entry points must 119 BUG_ON(in_nmi() == NMI_MASK); \ 135 BUG_ON(!in_nmi()); \
|
/kernel/linux/linux-6.6/include/linux/ |
H A D | hardirq.h | 100 * such that in_nmi() becomes true. To that effect all NMI C entry points must 111 BUG_ON(in_nmi() == NMI_MASK); \ 127 BUG_ON(!in_nmi()); \
|
H A D | srcu.h | 270 WARN_ON_ONCE(in_nmi()); in __acquires() 327 WARN_ON_ONCE(in_nmi()); in __releases()
|
/kernel/linux/linux-6.6/arch/powerpc/kernel/ |
H A D | irq_64.c | 217 WARN_ON_ONCE(in_nmi()); in arch_local_irq_restore() 446 get_paca()->in_nmi = 1; in replay_system_reset() 448 get_paca()->in_nmi = 0; in replay_system_reset()
|
/kernel/linux/linux-6.6/kernel/bpf/ |
H A D | queue_stack_maps.c | 101 if (in_nmi()) { in __queue_map_get() 136 if (in_nmi()) { in __stack_map_get() 206 if (in_nmi()) { in queue_stack_map_push_elem()
|
H A D | percpu_freelist.c | 83 if (in_nmi()) in __pcpu_freelist_push() 186 if (in_nmi()) in __pcpu_freelist_pop()
|
/kernel/linux/linux-5.10/arch/arm64/include/asm/ |
H A D | simd.h | 40 !in_irq() && !irqs_disabled() && !in_nmi() && in may_use_simd()
|
H A D | sdei.h | 59 if (in_nmi()) in on_sdei_stack()
|
/kernel/linux/linux-6.6/arch/arm64/include/asm/ |
H A D | simd.h | 40 !in_hardirq() && !irqs_disabled() && !in_nmi() && in may_use_simd()
|
/kernel/linux/linux-5.10/kernel/bpf/ |
H A D | queue_stack_maps.c | 114 if (in_nmi()) { in __queue_map_get() 149 if (in_nmi()) { in __stack_map_get() 219 if (in_nmi()) { in queue_stack_map_push_elem()
|
H A D | percpu_freelist.c | 85 if (in_nmi()) in __pcpu_freelist_push() 194 if (in_nmi()) in __pcpu_freelist_pop()
|
/kernel/linux/linux-6.6/kernel/ |
H A D | context_tracking.c | 242 if (!in_nmi()) in ct_nmi_exit() 276 if (!in_nmi()) in ct_nmi_enter() 290 } else if (!in_nmi()) { in ct_nmi_enter()
|
/kernel/linux/linux-6.6/kernel/printk/ |
H A D | printk_safe.c | 41 if (this_cpu_read(printk_context) || in_nmi()) in vprintk()
|
/kernel/linux/linux-5.10/arch/x86/hyperv/ |
H A D | hv_spinlock.c | 31 if (in_nmi()) in hv_qlock_wait()
|
/kernel/linux/linux-6.6/arch/x86/hyperv/ |
H A D | hv_spinlock.c | 30 if (in_nmi()) in hv_qlock_wait()
|
H A D | ivm.c | 78 WARN_ON(in_nmi()); in hv_ghcb_hypercall() 194 WARN_ON(in_nmi()); in hv_ghcb_msr_write() 226 WARN_ON(in_nmi()); in hv_ghcb_msr_read()
|
/kernel/linux/linux-5.10/arch/x86/platform/uv/ |
H A D | uv_nmi.c | 493 * If first CPU in on this hub, set hub_nmi "in_nmi" and "owner" values and 494 * return true. If first CPU in on the system, set global "in_nmi" flag. 498 int first = atomic_add_unless(&hub_nmi->in_nmi, 1, 1); in uv_set_in_nmi() 521 nmi = atomic_read(&hub_nmi->in_nmi); in uv_check_nmi() 548 /* Re-check hub in_nmi flag */ in uv_check_nmi() 549 nmi = atomic_read(&hub_nmi->in_nmi); in uv_check_nmi() 583 atomic_set(&hub_nmi->in_nmi, 0); in uv_clear_nmi() 980 /* Clear per_cpu "in_nmi" flag */ in uv_handle_nmi()
|
/kernel/linux/linux-6.6/arch/x86/platform/uv/ |
H A D | uv_nmi.c | 493 * If first CPU in on this hub, set hub_nmi "in_nmi" and "owner" values and 494 * return true. If first CPU in on the system, set global "in_nmi" flag. 498 int first = atomic_add_unless(&hub_nmi->in_nmi, 1, 1); in uv_set_in_nmi() 521 nmi = atomic_read(&hub_nmi->in_nmi); in uv_check_nmi() 548 /* Re-check hub in_nmi flag */ in uv_check_nmi() 549 nmi = atomic_read(&hub_nmi->in_nmi); in uv_check_nmi() 583 atomic_set(&hub_nmi->in_nmi, 0); in uv_clear_nmi() 977 /* Clear per_cpu "in_nmi" flag */ in uv_handle_nmi()
|
/kernel/linux/linux-6.6/kernel/trace/ |
H A D | trace_clock.c | 126 if (unlikely(in_nmi())) in trace_clock_global()
|
H A D | trace_preemptirq.c | 29 #define trace(point) if (!in_nmi()) trace_##point##_rcuidle
|