Lines Matching refs:page
28 void flush_icache_page(struct vm_area_struct *vma, struct page *page)
33 kaddr = (unsigned long)kmap_atomic(page);
39 void flush_icache_user_page(struct vm_area_struct *vma, struct page *page,
43 kaddr = (unsigned long)kmap_atomic(page) + (addr & ~PAGE_MASK);
51 struct page *page;
65 page = pfn_to_page(pfn);
67 if ((test_and_clear_bit(PG_dcache_dirty, &page->flags)) ||
71 kaddr = (unsigned long)kmap_atomic(page);
80 static inline unsigned long aliasing(unsigned long addr, unsigned long page)
82 return ((addr & PAGE_MASK) ^ page) & (SHMLBA - 1);
178 struct page *to)
187 void clear_user_page(void *addr, unsigned long vaddr, struct page *page)
196 void copy_user_highpage(struct page *to, struct page *from,
218 void clear_user_highpage(struct page *page, unsigned long vaddr)
222 kto = ((unsigned long)page_address(page) & PAGE_MASK);
229 vto = kremap0(vaddr, page_to_phys(page));
237 void flush_dcache_page(struct page *page)
241 mapping = page_mapping_file(page);
243 set_bit(PG_dcache_dirty, &page->flags);
247 kaddr = (unsigned long)page_address(page);
253 vaddr = page->index << PAGE_SHIFT;
255 kto = kremap0(vaddr, page_to_phys(page));
265 void copy_to_user_page(struct vm_area_struct *vma, struct page *page,
271 vto = kremap0(vaddr, page_to_phys(page));
286 void copy_from_user_page(struct vm_area_struct *vma, struct page *page,
292 vto = kremap0(vaddr, page_to_phys(page));
300 struct page *page, unsigned long vaddr)
303 if (!PageAnon(page))
312 kaddr = (unsigned long)page_address(page);
314 ktmp = kremap0(vaddr, page_to_phys(page));
321 void flush_kernel_dcache_page(struct page *page)
325 cpu_dcache_wbinval_page((unsigned long)page_address(page));