Lines Matching refs:mm_tlb_gen
748 * - mm_tlb_gen: the latest generation.
760 u64 mm_tlb_gen;
798 * While the core might be still behind mm_tlb_gen, checking
799 * mm_tlb_gen unnecessarily would have negative caching effects
806 * Defer mm_tlb_gen reading as long as possible to avoid cache
809 mm_tlb_gen = atomic64_read(&loaded_mm->context.tlb_gen);
811 if (unlikely(local_tlb_gen == mm_tlb_gen)) {
821 WARN_ON_ONCE(local_tlb_gen > mm_tlb_gen);
822 WARN_ON_ONCE(f->new_tlb_gen > mm_tlb_gen);
848 * local_tlb_gen == 1, mm_tlb_gen == 3, and end != TLB_FLUSH_ALL.
853 * 2. f->new_tlb_gen == mm_tlb_gen. This is purely an optimization.
858 * local_tlb_gen all the way to mm_tlb_gen and we can probably
863 f->new_tlb_gen == mm_tlb_gen) {
890 /* Both paths above update our state to mm_tlb_gen. */
891 this_cpu_write(cpu_tlbstate.ctxs[loaded_mm_asid].tlb_gen, mm_tlb_gen);