Lines Matching refs:loaded_mm
289 struct mm_struct *loaded_mm = this_cpu_read(cpu_tlbstate.loaded_mm);
299 if (loaded_mm == &init_mm)
415 cr4_update_pce_mm(this_cpu_read(cpu_tlbstate.loaded_mm));
425 struct mm_struct *real_prev = this_cpu_read(cpu_tlbstate.loaded_mm);
437 * cpu_tlbstate.loaded_mm) matches next.
554 this_cpu_write(cpu_tlbstate.loaded_mm, LOADED_MM_SWITCHING);
571 /* Make sure we write CR3 before loaded_mm. */
574 this_cpu_write(cpu_tlbstate.loaded_mm, next);
598 if (this_cpu_read(cpu_tlbstate.loaded_mm) == &init_mm)
620 struct mm_struct *mm = this_cpu_read(cpu_tlbstate.loaded_mm);
668 struct mm_struct *loaded_mm = this_cpu_read(cpu_tlbstate.loaded_mm);
670 u64 mm_tlb_gen = atomic64_read(&loaded_mm->context.tlb_gen);
676 if (unlikely(loaded_mm == &init_mm))
680 loaded_mm->context.ctx_id);
786 if (f->mm && f->mm != this_cpu_read(cpu_tlbstate.loaded_mm))
908 if (mm == this_cpu_read(cpu_tlbstate.loaded_mm)) {
973 unsigned long cr3 = build_cr3(this_cpu_read(cpu_tlbstate.loaded_mm)->pgd,
1162 struct mm_struct *loaded_mm = this_cpu_read(cpu_tlbstate.loaded_mm);
1165 VM_WARN_ON_ONCE(!loaded_mm);
1175 * sets loaded_mm to LOADED_MM_SWITCHING before writing to CR3.
1177 if (loaded_mm != current_mm)