Lines Matching refs:shadow_start

126 	void *shadow_start, *shadow_end;
147 shadow_start = kasan_mem_to_shadow(addr);
150 __memset(shadow_start, value, shadow_end - shadow_start);
236 unsigned long nr_shadow_pages, start_kaddr, shadow_start;
241 shadow_start = (unsigned long)kasan_mem_to_shadow((void *)start_kaddr);
243 shadow_end = shadow_start + shadow_size;
258 if (shadow_mapped(shadow_start))
261 ret = __vmalloc_node_range(shadow_size, PAGE_SIZE, shadow_start,
277 * shadow_start was either mapped during boot by kasan_init()
288 vm = find_vm_area((void *)shadow_start);
290 vfree((void *)shadow_start);
343 unsigned long shadow_start, shadow_end;
352 shadow_start = (unsigned long)kasan_mem_to_shadow((void *)addr);
363 __memset((void *)shadow_start, KASAN_VMALLOC_INVALID, shadow_end - shadow_start);
367 shadow_start = PAGE_ALIGN_DOWN(shadow_start);
370 ret = apply_to_page_range(&init_mm, shadow_start,
371 shadow_end - shadow_start,
376 flush_cache_vmap(shadow_start, shadow_end);
513 void *shadow_start, *shadow_end;
535 shadow_start = kasan_mem_to_shadow((void *)region_start);
538 if (shadow_end > shadow_start) {
539 size = shadow_end - shadow_start;
541 __memset(shadow_start, KASAN_SHADOW_INIT, shadow_end - shadow_start);
545 (unsigned long)shadow_start,
548 flush_tlb_kernel_range((unsigned long)shadow_start,
605 unsigned long shadow_start;
607 shadow_start = (unsigned long)kasan_mem_to_shadow(addr);
612 if (WARN_ON(!PAGE_ALIGNED(shadow_start)))
616 __memset((void *)shadow_start, KASAN_SHADOW_INIT, shadow_size);
620 ret = __vmalloc_node_range(shadow_size, 1, shadow_start,
621 shadow_start + shadow_size,