Lines Matching refs:page

22 #define shadow_page_for(page) ((page)->kmsan_shadow)
24 #define origin_page_for(page) ((page)->kmsan_origin)
26 static void *shadow_ptr_for(struct page *page)
28 return page_address(shadow_page_for(page));
31 static void *origin_ptr_for(struct page *page)
33 return page_address(origin_page_for(page));
36 static bool page_has_metadata(struct page *page)
38 return shadow_page_for(page) && origin_page_for(page);
41 static void set_no_shadow_origin_page(struct page *page)
43 shadow_page_for(page) = NULL;
44 origin_page_for(page) = NULL;
73 static struct page *virt_to_page_or_null(void *vaddr)
88 * Even if we redirect this memory access to the dummy page, it will
127 struct page *page;
143 page = virt_to_page_or_null(address);
144 if (!page)
146 if (!page_has_metadata(page))
150 return (is_origin ? origin_ptr_for(page) : shadow_ptr_for(page)) + off;
153 void kmsan_copy_page_meta(struct page *dst, struct page *src)
172 void kmsan_alloc_page(struct page *page, unsigned int order, gfp_t flags)
175 struct page *shadow, *origin;
179 if (!page)
182 shadow = shadow_page_for(page);
183 origin = origin_page_for(page);
200 * Addresses are page-aligned, pages are contiguous, so it's ok
207 void kmsan_free_page(struct page *page, unsigned int order)
212 kmsan_internal_poison_memory(page_address(page),
213 page_size(page),
220 pgprot_t prot, struct page **pages,
224 struct page **s_pages, **o_pages;
279 struct page *shadow_p, *origin_p;
281 struct page *page;
289 page = virt_to_page_or_null((char *)start + addr);
292 shadow_page_for(page) = shadow_p;
295 origin_page_for(page) = origin_p;
299 void kmsan_setup_meta(struct page *page, struct page *shadow,
300 struct page *origin, int order)
305 shadow_page_for(&page[i]) = &shadow[i];
306 origin_page_for(&page[i]) = &origin[i];