/kernel/linux/linux-5.10/arch/riscv/mm/ |
H A D | kasan_init.c | 22 for (i = 0; i < PTRS_PER_PTE; ++i) in kasan_early_init() 59 ((n_pages + PTRS_PER_PTE) & -PTRS_PER_PTE) / PTRS_PER_PTE; in populate() 64 memblock_alloc(n_ptes * PTRS_PER_PTE * sizeof(pte_t), PAGE_SIZE); in populate() 74 for (i = 0, offset = 0; i < n_ptes; i++, offset += PTRS_PER_PTE) in populate() 116 for (i = 0; i < PTRS_PER_PTE; i++) in kasan_init()
|
/kernel/linux/linux-5.10/arch/xtensa/mm/ |
H A D | kasan_init.c | 25 for (i = 0; i < PTRS_PER_PTE; ++i) in kasan_early_init() 40 unsigned long n_pmds = n_pages / PTRS_PER_PTE; in populate() 55 for (k = 0; k < PTRS_PER_PTE; ++k, ++j) { in populate() 68 for (i = 0; i < n_pmds ; ++i, pte += PTRS_PER_PTE) in populate() 93 for (i = 0; i < PTRS_PER_PTE; ++i) in kasan_init()
|
H A D | mmu.c | 28 n_pages = ALIGN(n_pages, PTRS_PER_PTE); in init_pmd() 41 for (i = 0; i < n_pages; i += PTRS_PER_PTE, ++pmd) { in init_pmd()
|
/kernel/linux/linux-6.6/arch/xtensa/mm/ |
H A D | kasan_init.c | 24 for (i = 0; i < PTRS_PER_PTE; ++i) in kasan_early_init() 38 unsigned long n_pmds = n_pages / PTRS_PER_PTE; in populate() 53 for (k = 0; k < PTRS_PER_PTE; ++k, ++j) { in populate() 66 for (i = 0; i < n_pmds ; ++i, pte += PTRS_PER_PTE) in populate() 91 for (i = 0; i < PTRS_PER_PTE; ++i) in kasan_init()
|
H A D | mmu.c | 30 n_pages = ALIGN(n_pages, PTRS_PER_PTE); in init_pmd() 43 for (i = 0; i < n_pages; i += PTRS_PER_PTE, ++pmd) { in init_pmd()
|
/kernel/linux/linux-5.10/arch/s390/mm/ |
H A D | pgalloc.c | 174 memset64(table, _PAGE_INVALID, PTRS_PER_PTE); in page_table_alloc_pgste() 175 memset64(table + PTRS_PER_PTE, 0, PTRS_PER_PTE); in page_table_alloc_pgste() 209 table += PTRS_PER_PTE; in page_table_alloc() 233 memset64((u64 *)table, _PAGE_INVALID, PTRS_PER_PTE); in page_table_alloc() 234 memset64((u64 *)table + PTRS_PER_PTE, 0, PTRS_PER_PTE); in page_table_alloc() 238 memset64((u64 *)table, _PAGE_INVALID, 2 * PTRS_PER_PTE); in page_table_alloc() 254 bit = (__pa(table) & ~PAGE_MASK)/(PTRS_PER_PTE*sizeof(pte_t)); in page_table_free() 290 bit = (__pa(table) & ~PAGE_MASK) / (PTRS_PER_PTE*sizeo in page_table_free_rcu() [all...] |
H A D | pageattr.c | 71 mask = ~(PTRS_PER_PTE * sizeof(pte_t) - 1); in pgt_set() 126 for (i = 0; i < PTRS_PER_PTE; i++) { in split_pmd_page() 133 update_page_count(PG_DIRECT_MAP_4K, PTRS_PER_PTE); in split_pmd_page() 346 nr = PTRS_PER_PTE - (nr & (PTRS_PER_PTE - 1)); in __kernel_map_pages()
|
/kernel/linux/linux-5.10/arch/hexagon/include/asm/ |
H A D | pgtable.h | 77 #define PTRS_PER_PTE 1024 macro 81 #define PTRS_PER_PTE 256 macro 85 #define PTRS_PER_PTE 64 macro 89 #define PTRS_PER_PTE 16 macro 93 #define PTRS_PER_PTE 4 macro
|
/kernel/linux/linux-6.6/arch/hexagon/include/asm/ |
H A D | pgtable.h | 80 #define PTRS_PER_PTE 1024 macro 84 #define PTRS_PER_PTE 256 macro 88 #define PTRS_PER_PTE 64 macro 92 #define PTRS_PER_PTE 16 macro 96 #define PTRS_PER_PTE 4 macro
|
/kernel/linux/linux-6.6/arch/s390/mm/ |
H A D | pgalloc.c | 150 memset64(table, _PAGE_INVALID, PTRS_PER_PTE); in page_table_alloc_pgste() 151 memset64(table + PTRS_PER_PTE, 0, PTRS_PER_PTE); in page_table_alloc_pgste() 271 table += PTRS_PER_PTE; in page_table_alloc() 296 memset64((u64 *)table, _PAGE_INVALID, PTRS_PER_PTE); in page_table_alloc() 297 memset64((u64 *)table + PTRS_PER_PTE, 0, PTRS_PER_PTE); in page_table_alloc() 301 memset64((u64 *)table, _PAGE_INVALID, 2 * PTRS_PER_PTE); in page_table_alloc() 340 bit = ((unsigned long) table & ~PAGE_MASK)/(PTRS_PER_PTE*sizeof(pte_t)); in page_table_free() 393 bit = ((unsigned long) table & ~PAGE_MASK) / (PTRS_PER_PTE*sizeo in page_table_free_rcu() [all...] |
/kernel/linux/linux-6.6/arch/arc/mm/ |
H A D | highmem.c | 68 BUILD_BUG_ON(LAST_PKMAP > PTRS_PER_PTE); in kmap_init() 69 BUILD_BUG_ON(FIX_KMAP_SLOTS > PTRS_PER_PTE); in kmap_init()
|
/kernel/linux/linux-5.10/arch/m68k/include/asm/ |
H A D | pgtable_mm.h | 60 #define PTRS_PER_PTE 16 macro 65 #define PTRS_PER_PTE 512 macro 70 #define PTRS_PER_PTE 64 macro
|
/kernel/linux/linux-5.10/arch/arm/include/asm/ |
H A D | pgtable-2level.h | 70 #define PTRS_PER_PTE 512 macro 74 #define PTE_HWTABLE_PTRS (PTRS_PER_PTE) 76 #define PTE_HWTABLE_SIZE (PTRS_PER_PTE * sizeof(u32))
|
/kernel/linux/linux-6.6/arch/m68k/include/asm/ |
H A D | pgtable_mm.h | 58 #define PTRS_PER_PTE 16 macro 63 #define PTRS_PER_PTE 512 macro 68 #define PTRS_PER_PTE 64 macro
|
/kernel/linux/linux-6.6/arch/arm/include/asm/ |
H A D | pgtable-2level.h | 70 #define PTRS_PER_PTE 512 macro 74 #define PTE_HWTABLE_PTRS (PTRS_PER_PTE) 76 #define PTE_HWTABLE_SIZE (PTRS_PER_PTE * sizeof(u32))
|
/kernel/linux/linux-5.10/arch/powerpc/mm/book3s64/ |
H A D | hash_64k.c | 89 rpte = __real_pte(__pte(old_pte), ptep, PTRS_PER_PTE); in __hash_page_4K() 216 new_pte |= pte_set_hidx(ptep, rpte, subpg_index, slot, PTRS_PER_PTE); in __hash_page_4K() 264 rpte = __real_pte(__pte(old_pte), ptep, PTRS_PER_PTE); in __hash_page_64K() 329 new_pte |= pte_set_hidx(ptep, rpte, 0, slot, PTRS_PER_PTE); in __hash_page_64K()
|
H A D | subpage_prot.c | 120 i = (addr >> PAGE_SHIFT) & (PTRS_PER_PTE - 1); in subpage_prot_clear() 121 nw = PTRS_PER_PTE - i; in subpage_prot_clear() 267 i = (addr >> PAGE_SHIFT) & (PTRS_PER_PTE - 1); in SYSCALL_DEFINE3() 268 nw = PTRS_PER_PTE - i; in SYSCALL_DEFINE3()
|
/kernel/linux/linux-5.10/arch/m68k/mm/ |
H A D | sun3mmu.c | 71 next_pgtable += PTRS_PER_PTE * sizeof (pte_t); in paging_init() 77 for (i=0; i<PTRS_PER_PTE; ++i, ++pg_table) { in paging_init()
|
/kernel/linux/linux-5.10/arch/arc/include/asm/ |
H A D | pgalloc.h | 87 return get_order(PTRS_PER_PTE * sizeof(pte_t)); in __get_order_pte() 109 memzero((void *)pte_pg, PTRS_PER_PTE * sizeof(pte_t)); in pte_alloc_one()
|
/kernel/linux/linux-5.10/arch/arc/mm/ |
H A D | highmem.c | 112 BUILD_BUG_ON(KM_TYPE_NR > PTRS_PER_PTE); in kmap_init() 115 BUILD_BUG_ON(LAST_PKMAP > PTRS_PER_PTE); in kmap_init()
|
/kernel/linux/linux-6.6/arch/powerpc/mm/kasan/ |
H A D | init_book3s_64.c | 67 for (i = 0; i < PTRS_PER_PTE; i++) in kasan_init() 84 for (i = 0; i < PTRS_PER_PTE; i++) in kasan_init()
|
/kernel/linux/linux-5.10/arch/loongarch/include/asm/ |
H A D | pgtable-64.h | 55 #define PTRS_PER_PTE ((PAGE_SIZE << PTE_ORDER) >> 3) macro 78 min(PTRS_PER_PGD * PTRS_PER_PUD * PTRS_PER_PMD * PTRS_PER_PTE * PAGE_SIZE, (1UL << cpu_vabits)) - PMD_SIZE - VMEMMAP_SIZE) 82 min(PTRS_PER_PGD * PTRS_PER_PUD * PTRS_PER_PMD * PTRS_PER_PTE * PAGE_SIZE, (1UL << cpu_vabits) / 2) - PMD_SIZE - VMEMMAP_SIZE) 101 extern pte_t invalid_pte_table[PTRS_PER_PTE];
|
/kernel/linux/linux-6.6/arch/powerpc/mm/book3s64/ |
H A D | hash_64k.c | 91 rpte = __real_pte(__pte(old_pte), ptep, PTRS_PER_PTE); in __hash_page_4K() 218 new_pte |= pte_set_hidx(ptep, rpte, subpg_index, slot, PTRS_PER_PTE); in __hash_page_4K() 269 rpte = __real_pte(__pte(old_pte), ptep, PTRS_PER_PTE); in __hash_page_64K() 334 new_pte |= pte_set_hidx(ptep, rpte, 0, slot, PTRS_PER_PTE); in __hash_page_64K()
|
H A D | subpage_prot.c | 122 i = (addr >> PAGE_SHIFT) & (PTRS_PER_PTE - 1); in subpage_prot_clear() 123 nw = PTRS_PER_PTE - i; in subpage_prot_clear() 261 i = (addr >> PAGE_SHIFT) & (PTRS_PER_PTE - 1); in SYSCALL_DEFINE3() 262 nw = PTRS_PER_PTE - i; in SYSCALL_DEFINE3()
|
/kernel/linux/linux-6.6/arch/riscv/kvm/ |
H A D | tlb.c | 26 if (PTRS_PER_PTE < (gpsz >> order)) { in kvm_riscv_local_hfence_gvma_vmid_gpa() 54 if (PTRS_PER_PTE < (gpsz >> order)) { in kvm_riscv_local_hfence_gvma_gpa() 85 if (PTRS_PER_PTE < (gvsz >> order)) { in kvm_riscv_local_hfence_vvma_asid_gva() 125 if (PTRS_PER_PTE < (gvsz >> order)) { in kvm_riscv_local_hfence_vvma_gva()
|