Lines Matching refs:addr

64 static int ioremap_pte_range(pmd_t *pmd, unsigned long addr,
72 pte = pte_alloc_kernel_track(pmd, addr, mask);
77 set_pte_at(&init_mm, addr, pte, pfn_pte(pfn, prot));
79 } while (pte++, addr += PAGE_SIZE, addr != end);
84 static int ioremap_try_huge_pmd(pmd_t *pmd, unsigned long addr,
91 if ((end - addr) != PMD_SIZE)
94 if (!IS_ALIGNED(addr, PMD_SIZE))
100 if (pmd_present(*pmd) && !pmd_free_pte_page(pmd, addr))
106 static inline int ioremap_pmd_range(pud_t *pud, unsigned long addr,
113 pmd = pmd_alloc_track(&init_mm, pud, addr, mask);
117 next = pmd_addr_end(addr, end);
119 if (ioremap_try_huge_pmd(pmd, addr, next, phys_addr, prot)) {
124 if (ioremap_pte_range(pmd, addr, next, phys_addr, prot, mask))
126 } while (pmd++, phys_addr += (next - addr), addr = next, addr != end);
130 static int ioremap_try_huge_pud(pud_t *pud, unsigned long addr,
137 if ((end - addr) != PUD_SIZE)
140 if (!IS_ALIGNED(addr, PUD_SIZE))
146 if (pud_present(*pud) && !pud_free_pmd_page(pud, addr))
152 static inline int ioremap_pud_range(p4d_t *p4d, unsigned long addr,
159 pud = pud_alloc_track(&init_mm, p4d, addr, mask);
163 next = pud_addr_end(addr, end);
165 if (ioremap_try_huge_pud(pud, addr, next, phys_addr, prot)) {
170 if (ioremap_pmd_range(pud, addr, next, phys_addr, prot, mask))
172 } while (pud++, phys_addr += (next - addr), addr = next, addr != end);
176 static int ioremap_try_huge_p4d(p4d_t *p4d, unsigned long addr,
183 if ((end - addr) != P4D_SIZE)
186 if (!IS_ALIGNED(addr, P4D_SIZE))
192 if (p4d_present(*p4d) && !p4d_free_pud_page(p4d, addr))
198 static inline int ioremap_p4d_range(pgd_t *pgd, unsigned long addr,
205 p4d = p4d_alloc_track(&init_mm, pgd, addr, mask);
209 next = p4d_addr_end(addr, end);
211 if (ioremap_try_huge_p4d(p4d, addr, next, phys_addr, prot)) {
216 if (ioremap_pud_range(p4d, addr, next, phys_addr, prot, mask))
218 } while (p4d++, phys_addr += (next - addr), addr = next, addr != end);
222 int ioremap_page_range(unsigned long addr,
232 BUG_ON(addr >= end);
234 start = addr;
235 pgd = pgd_offset_k(addr);
237 next = pgd_addr_end(addr, end);
238 err = ioremap_p4d_range(pgd, addr, next, phys_addr, prot,
242 } while (pgd++, phys_addr += (next - addr), addr = next, addr != end);
253 void __iomem *ioremap_prot(phys_addr_t addr, size_t size, unsigned long prot)
260 last_addr = addr + size - 1;
261 if (!size || last_addr < addr)
265 offset = addr & (~PAGE_MASK);
266 addr -= offset;
273 vaddr = (unsigned long)area->addr;
275 if (ioremap_page_range(vaddr, vaddr + size, addr, __pgprot(prot))) {
284 void iounmap(volatile void __iomem *addr)
286 vunmap((void *)((unsigned long)addr & PAGE_MASK));