Lines Matching refs:end
188 * flush_tlb_range(vma, start, end)
189 * Invalidate the virtual-address range '[start, end)' on all
195 * flush_tlb_kernel_range(start, end)
196 * Same as flush_tlb_range(..., start, end), but applies to
219 * __flush_tlb_range(vma, start, end, stride, last_level)
220 * Invalidate the virtual-address range '[start, end)' on all
320 * completion at the end in arch_tlbbatch_flush(). Since we've already issued
404 unsigned long start, unsigned long end,
411 end = round_up(end, stride);
412 pages = (end - start) >> PAGE_SHIFT;
421 (end - start) >= (MAX_TLBI_OPS * stride)) ||
436 mmu_notifier_arch_invalidate_secondary_tlbs(vma->vm_mm, start, end);
440 unsigned long start, unsigned long end)
447 __flush_tlb_range(vma, start, end, PAGE_SIZE, false, 0);
450 static inline void flush_tlb_kernel_range(unsigned long start, unsigned long end)
454 if ((end - start) > (MAX_TLBI_OPS * PAGE_SIZE)) {
460 end = __TLBI_VADDR(end, 0);
463 for (addr = start; addr < end; addr += 1 << (PAGE_SHIFT - 12))