Home
last modified time | relevance | path

Searched refs:loaded_mm (Results 1 - 12 of 12) sorted by relevance

/kernel/linux/linux-5.10/arch/x86/mm/
H A Dtlb.c289 struct mm_struct *loaded_mm = this_cpu_read(cpu_tlbstate.loaded_mm); in leave_mm() local
299 if (loaded_mm == &init_mm) in leave_mm()
415 cr4_update_pce_mm(this_cpu_read(cpu_tlbstate.loaded_mm)); in cr4_update_pce()
425 struct mm_struct *real_prev = this_cpu_read(cpu_tlbstate.loaded_mm); in switch_mm_irqs_off()
437 * cpu_tlbstate.loaded_mm) matches next. in switch_mm_irqs_off()
554 this_cpu_write(cpu_tlbstate.loaded_mm, LOADED_MM_SWITCHING); in switch_mm_irqs_off()
571 /* Make sure we write CR3 before loaded_mm. */ in switch_mm_irqs_off()
574 this_cpu_write(cpu_tlbstate.loaded_mm, next); in switch_mm_irqs_off()
598 if (this_cpu_read(cpu_tlbstate.loaded_mm) in enter_lazy_tlb()
668 struct mm_struct *loaded_mm = this_cpu_read(cpu_tlbstate.loaded_mm); flush_tlb_func_common() local
1162 struct mm_struct *loaded_mm = this_cpu_read(cpu_tlbstate.loaded_mm); nmi_uaccess_okay() local
[all...]
H A Dinit.c1050 .loaded_mm = &init_mm,
/kernel/linux/linux-6.6/arch/x86/mm/
H A Dtlb.c304 struct mm_struct *loaded_mm = this_cpu_read(cpu_tlbstate.loaded_mm); in leave_mm() local
314 if (loaded_mm == &init_mm) in leave_mm()
488 cr4_update_pce_mm(this_cpu_read(cpu_tlbstate.loaded_mm)); in cr4_update_pce()
498 struct mm_struct *real_prev = this_cpu_read(cpu_tlbstate.loaded_mm); in switch_mm_irqs_off()
511 * cpu_tlbstate.loaded_mm) matches next. in switch_mm_irqs_off()
635 this_cpu_write(cpu_tlbstate.loaded_mm, LOADED_MM_SWITCHING); in switch_mm_irqs_off()
653 /* Make sure we write CR3 before loaded_mm. */ in switch_mm_irqs_off()
656 this_cpu_write(cpu_tlbstate.loaded_mm, next); in switch_mm_irqs_off()
680 if (this_cpu_read(cpu_tlbstate.loaded_mm) in enter_lazy_tlb()
755 struct mm_struct *loaded_mm = this_cpu_read(cpu_tlbstate.loaded_mm); flush_tlb_func() local
1286 struct mm_struct *loaded_mm = this_cpu_read(cpu_tlbstate.loaded_mm); nmi_uaccess_okay() local
[all...]
H A Dinit.c1061 .loaded_mm = &init_mm,
/kernel/linux/linux-5.10/arch/x86/include/asm/
H A Dtlbflush.h69 * cpu_tlbstate.loaded_mm should match CR3 whenever interrupts
74 * During switch_mm_irqs_off(), loaded_mm will be set to
76 * when CR3 and loaded_mm would otherwise be inconsistent. This
79 struct mm_struct *loaded_mm; member
96 * mm_cpumask(loaded_mm) and is_lazy == false;
98 * - Not using a real mm. loaded_mm == &init_mm. Our CPU's bit
101 * - Lazily using a real mm. loaded_mm != &init_mm, our bit
102 * is set in mm_cpumask(loaded_mm), but is_lazy == true.
/kernel/linux/linux-6.6/arch/x86/include/asm/
H A Dtlbflush.h74 * cpu_tlbstate.loaded_mm should match CR3 whenever interrupts
79 * During switch_mm_irqs_off(), loaded_mm will be set to
81 * when CR3 and loaded_mm would otherwise be inconsistent. This
84 struct mm_struct *loaded_mm; member
159 * mm_cpumask(loaded_mm) and is_lazy == false;
161 * - Not using a real mm. loaded_mm == &init_mm. Our CPU's bit
164 * - Lazily using a real mm. loaded_mm != &init_mm, our bit
165 * is set in mm_cpumask(loaded_mm), but is_lazy == true.
/kernel/linux/linux-5.10/arch/x86/kernel/
H A Dldt.c140 if (this_cpu_read(cpu_tlbstate.loaded_mm) != mm) in flush_ldt()
H A Dalternative.c1137 temp_state.mm = this_cpu_read(cpu_tlbstate.loaded_mm); in use_temporary_mm()
/kernel/linux/linux-6.6/arch/x86/kernel/
H A Dldt.c140 if (this_cpu_read(cpu_tlbstate.loaded_mm) != mm) in flush_ldt()
H A Dalternative.c1739 temp_state.mm = this_cpu_read(cpu_tlbstate.loaded_mm); in use_temporary_mm()
/kernel/linux/linux-5.10/arch/x86/xen/
H A Dmmu_pv.c900 if (this_cpu_read(cpu_tlbstate.loaded_mm) == mm) in drop_mm_ref_this_cpu()
/kernel/linux/linux-6.6/arch/x86/xen/
H A Dmmu_pv.c915 if (this_cpu_read(cpu_tlbstate.loaded_mm) == mm) in drop_mm_ref_this_cpu()

Completed in 20 milliseconds