Lines Matching refs:shadow_start
118 void *shadow_start, *shadow_end;
127 shadow_start = kasan_mem_to_shadow(address);
130 __memset(shadow_start, value, shadow_end - shadow_start);
550 unsigned long shadow_start;
552 shadow_start = (unsigned long)kasan_mem_to_shadow(addr);
556 if (WARN_ON(!PAGE_ALIGNED(shadow_start)))
559 ret = __vmalloc_node_range(shadow_size, 1, shadow_start,
560 shadow_start + shadow_size,
621 unsigned long nr_shadow_pages, start_kaddr, shadow_start;
626 shadow_start = (unsigned long)kasan_mem_to_shadow((void *)start_kaddr);
628 shadow_end = shadow_start + shadow_size;
643 if (shadow_mapped(shadow_start))
646 ret = __vmalloc_node_range(shadow_size, PAGE_SIZE, shadow_start,
662 * shadow_start was either mapped during boot by kasan_init()
673 vm = find_vm_area((void *)shadow_start);
675 vfree((void *)shadow_start);
722 unsigned long shadow_start, shadow_end;
728 shadow_start = (unsigned long)kasan_mem_to_shadow((void *)addr);
729 shadow_start = ALIGN_DOWN(shadow_start, PAGE_SIZE);
733 ret = apply_to_page_range(&init_mm, shadow_start,
734 shadow_end - shadow_start,
739 flush_cache_vmap(shadow_start, shadow_end);
897 void *shadow_start, *shadow_end;
918 shadow_start = kasan_mem_to_shadow((void *)region_start);
921 if (shadow_end > shadow_start) {
922 size = shadow_end - shadow_start;
924 (unsigned long)shadow_start,
927 flush_tlb_kernel_range((unsigned long)shadow_start,