Lines Matching defs:pmd

28 bool __init __early_make_pgtable(unsigned long address, pmdval_t pmd);
74 #define set_pmd(pmdp, pmd) native_set_pmd(pmdp, pmd)
98 #define pmd_clear(pmd) native_pmd_clear(pmd)
144 static inline bool pmd_dirty(pmd_t pmd)
146 return pmd_flags(pmd) & _PAGE_DIRTY_BITS;
149 static inline bool pmd_shstk(pmd_t pmd)
152 (pmd_flags(pmd) & (_PAGE_RW | _PAGE_DIRTY | _PAGE_PSE)) ==
157 static inline int pmd_young(pmd_t pmd)
159 return pmd_flags(pmd) & _PAGE_ACCESSED;
182 static inline int pmd_write(pmd_t pmd)
188 return (pmd_flags(pmd) & _PAGE_RW) || pmd_shstk(pmd);
230 static inline unsigned long pmd_pfn(pmd_t pmd)
232 phys_addr_t pfn = pmd_val(pmd);
234 return (pfn & pmd_pfn_mask(pmd)) >> PAGE_SHIFT;
271 static inline int pmd_trans_huge(pmd_t pmd)
273 return (pmd_val(pmd) & (_PAGE_PSE|_PAGE_DEVMAP)) == _PAGE_PSE;
290 static inline int pmd_devmap(pmd_t pmd)
292 return !!(pmd_val(pmd) & _PAGE_DEVMAP);
493 static inline pmd_t pmd_set_flags(pmd_t pmd, pmdval_t set)
495 pmdval_t v = native_pmd_val(pmd);
500 static inline pmd_t pmd_clear_flags(pmd_t pmd, pmdval_t clear)
502 pmdval_t v = native_pmd_val(pmd);
508 static inline pmd_t pmd_mksaveddirty(pmd_t pmd)
510 pmdval_t v = native_pmd_val(pmd);
517 static inline pmd_t pmd_clear_saveddirty(pmd_t pmd)
519 pmdval_t v = native_pmd_val(pmd);
525 static inline pmd_t pmd_wrprotect(pmd_t pmd)
527 pmd = pmd_clear_flags(pmd, _PAGE_RW);
534 return pmd_mksaveddirty(pmd);
538 static inline int pmd_uffd_wp(pmd_t pmd)
540 return pmd_flags(pmd) & _PAGE_UFFD_WP;
543 static inline pmd_t pmd_mkuffd_wp(pmd_t pmd)
545 return pmd_wrprotect(pmd_set_flags(pmd, _PAGE_UFFD_WP));
548 static inline pmd_t pmd_clear_uffd_wp(pmd_t pmd)
550 return pmd_clear_flags(pmd, _PAGE_UFFD_WP);
554 static inline pmd_t pmd_mkold(pmd_t pmd)
556 return pmd_clear_flags(pmd, _PAGE_ACCESSED);
559 static inline pmd_t pmd_mkclean(pmd_t pmd)
561 return pmd_clear_flags(pmd, _PAGE_DIRTY_BITS);
564 static inline pmd_t pmd_mkdirty(pmd_t pmd)
566 pmd = pmd_set_flags(pmd, _PAGE_DIRTY | _PAGE_SOFT_DIRTY);
568 return pmd_mksaveddirty(pmd);
571 static inline pmd_t pmd_mkwrite_shstk(pmd_t pmd)
573 pmd = pmd_clear_flags(pmd, _PAGE_RW);
575 return pmd_set_flags(pmd, _PAGE_DIRTY);
578 static inline pmd_t pmd_mkdevmap(pmd_t pmd)
580 return pmd_set_flags(pmd, _PAGE_DEVMAP);
583 static inline pmd_t pmd_mkhuge(pmd_t pmd)
585 return pmd_set_flags(pmd, _PAGE_PSE);
588 static inline pmd_t pmd_mkyoung(pmd_t pmd)
590 return pmd_set_flags(pmd, _PAGE_ACCESSED);
593 static inline pmd_t pmd_mkwrite_novma(pmd_t pmd)
595 return pmd_set_flags(pmd, _PAGE_RW);
598 pmd_t pmd_mkwrite(pmd_t pmd, struct vm_area_struct *vma);
690 static inline int pmd_soft_dirty(pmd_t pmd)
692 return pmd_flags(pmd) & _PAGE_SOFT_DIRTY;
705 static inline pmd_t pmd_mksoft_dirty(pmd_t pmd)
707 return pmd_set_flags(pmd, _PAGE_SOFT_DIRTY);
720 static inline pmd_t pmd_clear_soft_dirty(pmd_t pmd)
722 return pmd_clear_flags(pmd, _PAGE_SOFT_DIRTY);
787 static inline pmd_t pmd_mkinvalid(pmd_t pmd)
789 return pfn_pmd(pmd_pfn(pmd),
790 __pgprot(pmd_flags(pmd) & ~(_PAGE_PRESENT|_PAGE_PROTNONE)));
828 static inline pmd_t pmd_modify(pmd_t pmd, pgprot_t newprot)
830 pmdval_t val = pmd_val(pmd), oldval = val;
991 static inline int pmd_present(pmd_t pmd)
999 return pmd_flags(pmd) & (_PAGE_PRESENT | _PAGE_PROTNONE | _PAGE_PSE);
1013 static inline int pmd_protnone(pmd_t pmd)
1015 return (pmd_flags(pmd) & (_PAGE_PROTNONE | _PAGE_PRESENT))
1020 static inline int pmd_none(pmd_t pmd)
1024 unsigned long val = native_pmd_val(pmd);
1028 static inline unsigned long pmd_page_vaddr(pmd_t pmd)
1030 return (unsigned long)__va(pmd_val(pmd) & pmd_pfn_mask(pmd));
1037 #define pmd_page(pmd) pfn_to_page(pmd_pfn(pmd))
1055 static inline int pmd_bad(pmd_t pmd)
1057 return (pmd_flags(pmd) & ~(_PAGE_USER | _PAGE_ACCESSED)) !=
1245 pmd_t *pmdp, pmd_t pmd)
1247 page_table_check_pmd_set(mm, pmdp, pmd);
1248 set_pmd(pmdp, pmd);
1352 pmd_t pmd = native_pmdp_get_and_clear(pmdp);
1354 page_table_check_pmd_clear(mm, pmd);
1356 return pmd;
1390 unsigned long address, pmd_t *pmdp, pmd_t pmd)
1392 page_table_check_pmd_set(vma->vm_mm, pmdp, pmd);
1394 return xchg(pmdp, pmd);
1397 WRITE_ONCE(*pmdp, pmd);
1523 unsigned long addr, pmd_t *pmd)
1562 static inline pmd_t pmd_swp_mksoft_dirty(pmd_t pmd)
1564 return pmd_set_flags(pmd, _PAGE_SWP_SOFT_DIRTY);
1567 static inline int pmd_swp_soft_dirty(pmd_t pmd)
1569 return pmd_flags(pmd) & _PAGE_SWP_SOFT_DIRTY;
1572 static inline pmd_t pmd_swp_clear_soft_dirty(pmd_t pmd)
1574 return pmd_clear_flags(pmd, _PAGE_SWP_SOFT_DIRTY);
1595 static inline pmd_t pmd_swp_mkuffd_wp(pmd_t pmd)
1597 return pmd_set_flags(pmd, _PAGE_SWP_UFFD_WP);
1600 static inline int pmd_swp_uffd_wp(pmd_t pmd)
1602 return pmd_flags(pmd) & _PAGE_SWP_UFFD_WP;
1605 static inline pmd_t pmd_swp_clear_uffd_wp(pmd_t pmd)
1607 return pmd_clear_flags(pmd, _PAGE_SWP_UFFD_WP);
1663 static inline bool pmd_access_permitted(pmd_t pmd, bool write)
1665 return __pte_access_permitted(pmd_val(pmd), write);
1692 void arch_check_zapped_pmd(struct vm_area_struct *vma, pmd_t pmd);
1708 static inline bool pmd_user_accessible_page(pmd_t pmd)
1710 return pmd_leaf(pmd) && (pmd_val(pmd) & _PAGE_PRESENT) && (pmd_val(pmd) & _PAGE_USER);