/kernel/linux/linux-5.10/drivers/gpu/drm/nouveau/nvkm/subdev/mmu/ |
H A D | vmmnv44.c | 28 dma_addr_t *list, u32 ptei, u32 ptes) in nv44_vmm_pgt_fill() 38 while (ptes--) { in nv44_vmm_pgt_fill() 74 u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) in nv44_vmm_pgt_pte() 79 const u32 pten = min(ptes, 4 - (ptei & 3)); in nv44_vmm_pgt_pte() 84 ptes -= pten; in nv44_vmm_pgt_pte() 87 while (ptes >= 4) { in nv44_vmm_pgt_pte() 94 ptes -= 4; in nv44_vmm_pgt_pte() 97 if (ptes) { in nv44_vmm_pgt_pte() 98 for (i = 0; i < ptes; i++, addr += 0x1000) in nv44_vmm_pgt_pte() 100 nv44_vmm_pgt_fill(vmm, pt, tmp, ptei, ptes); in nv44_vmm_pgt_pte() 27 nv44_vmm_pgt_fill(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, dma_addr_t *list, u32 ptei, u32 ptes) nv44_vmm_pgt_fill() argument 73 nv44_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) nv44_vmm_pgt_pte() argument 105 nv44_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) nv44_vmm_pgt_sgl() argument 112 nv44_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) nv44_vmm_pgt_dma() argument 147 nv44_vmm_pgt_unmap(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) nv44_vmm_pgt_unmap() argument [all...] |
H A D | vmmgp100.c | 35 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) in gp100_vmm_pfn_unmap() 41 while (ptes--) { in gp100_vmm_pfn_unmap() 56 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) in gp100_vmm_pfn_clear() 60 while (ptes--) { in gp100_vmm_pfn_clear() 76 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) in gp100_vmm_pgt_pfn() 82 for (; ptes; ptes--, map->pfn++) { in gp100_vmm_pgt_pfn() 113 u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) in gp100_vmm_pgt_pte() 117 map->type += ptes * map->ctag; in gp100_vmm_pgt_pte() 119 while (ptes in gp100_vmm_pgt_pte() 34 gp100_vmm_pfn_unmap(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) gp100_vmm_pfn_unmap() argument 55 gp100_vmm_pfn_clear(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) gp100_vmm_pfn_clear() argument 75 gp100_vmm_pgt_pfn(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) gp100_vmm_pgt_pfn() argument 112 gp100_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) gp100_vmm_pgt_pte() argument 126 gp100_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) gp100_vmm_pgt_sgl() argument 133 gp100_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) gp100_vmm_pgt_dma() argument 152 gp100_vmm_pgt_mem(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) gp100_vmm_pgt_mem() argument 159 gp100_vmm_pgt_sparse(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) gp100_vmm_pgt_sparse() argument 179 gp100_vmm_lpt_invalid(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) gp100_vmm_lpt_invalid() argument 195 gp100_vmm_pd0_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) gp100_vmm_pd0_pte() argument 209 gp100_vmm_pd0_mem(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) gp100_vmm_pd0_mem() argument 265 gp100_vmm_pd0_pfn_unmap(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) gp100_vmm_pd0_pfn_unmap() argument 287 gp100_vmm_pd0_pfn_clear(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) gp100_vmm_pd0_pfn_clear() argument 309 gp100_vmm_pd0_pfn(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) gp100_vmm_pd0_pfn() argument [all...] |
H A D | vmmnv04.c | 29 u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) in nv04_vmm_pgt_pte() 32 while (ptes--) { in nv04_vmm_pgt_pte() 40 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) in nv04_vmm_pgt_sgl() 42 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv04_vmm_pgt_pte); in nv04_vmm_pgt_sgl() 47 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) in nv04_vmm_pgt_dma() 51 while (ptes--) in nv04_vmm_pgt_dma() 55 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv04_vmm_pgt_pte); in nv04_vmm_pgt_dma() 61 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) in nv04_vmm_pgt_unmap() 63 VMM_FO032(pt, vmm, 8 + (ptei * 4), 0, ptes); in nv04_vmm_pgt_unmap() 28 nv04_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) nv04_vmm_pgt_pte() argument 39 nv04_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) nv04_vmm_pgt_sgl() argument 46 nv04_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) nv04_vmm_pgt_dma() argument 60 nv04_vmm_pgt_unmap(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) nv04_vmm_pgt_unmap() argument
|
H A D | vmmnv41.c | 28 u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) in nv41_vmm_pgt_pte() 31 while (ptes--) { in nv41_vmm_pgt_pte() 39 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) in nv41_vmm_pgt_sgl() 41 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv41_vmm_pgt_pte); in nv41_vmm_pgt_sgl() 46 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) in nv41_vmm_pgt_dma() 50 while (ptes--) { in nv41_vmm_pgt_dma() 56 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv41_vmm_pgt_pte); in nv41_vmm_pgt_dma() 62 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) in nv41_vmm_pgt_unmap() 64 VMM_FO032(pt, vmm, ptei * 4, 0, ptes); in nv41_vmm_pgt_unmap() 27 nv41_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) nv41_vmm_pgt_pte() argument 38 nv41_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) nv41_vmm_pgt_sgl() argument 45 nv41_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) nv41_vmm_pgt_dma() argument 61 nv41_vmm_pgt_unmap(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) nv41_vmm_pgt_unmap() argument
|
H A D | vmm.c | 198 const struct nvkm_vmm_desc *desc, u32 ptei, u32 ptes) in nvkm_vmm_unref_sptes() 209 for (lpti = ptei >> sptb; ptes; spti = 0, lpti++) { in nvkm_vmm_unref_sptes() 210 const u32 pten = min(sptn - spti, ptes); in nvkm_vmm_unref_sptes() 212 ptes -= pten; in nvkm_vmm_unref_sptes() 222 for (ptes = 1, ptei++; ptei < lpti; ptes++, ptei++) { in nvkm_vmm_unref_sptes() 236 for (ptes = 1, ptei++; ptei < lpti; ptes++, ptei++) { in nvkm_vmm_unref_sptes() 243 TRA(it, "LPTE %05x: U -> S %d PTEs", pteb, ptes); in nvkm_vmm_unref_sptes() 244 pair->func->sparse(vmm, pgt->pt[0], pteb, ptes); in nvkm_vmm_unref_sptes() 197 nvkm_vmm_unref_sptes(struct nvkm_vmm_iter *it, struct nvkm_vmm_pt *pgt, const struct nvkm_vmm_desc *desc, u32 ptei, u32 ptes) nvkm_vmm_unref_sptes() argument 258 nvkm_vmm_unref_ptes(struct nvkm_vmm_iter *it, bool pfn, u32 ptei, u32 ptes) nvkm_vmm_unref_ptes() argument 296 nvkm_vmm_ref_sptes(struct nvkm_vmm_iter *it, struct nvkm_vmm_pt *pgt, const struct nvkm_vmm_desc *desc, u32 ptei, u32 ptes) nvkm_vmm_ref_sptes() argument 364 nvkm_vmm_ref_ptes(struct nvkm_vmm_iter *it, bool pfn, u32 ptei, u32 ptes) nvkm_vmm_ref_ptes() argument 381 nvkm_vmm_sparse_ptes(const struct nvkm_vmm_desc *desc, struct nvkm_vmm_pt *pgt, u32 ptei, u32 ptes) nvkm_vmm_sparse_ptes() argument 394 nvkm_vmm_sparse_unref_ptes(struct nvkm_vmm_iter *it, bool pfn, u32 ptei, u32 ptes) nvkm_vmm_sparse_unref_ptes() argument 406 nvkm_vmm_sparse_ref_ptes(struct nvkm_vmm_iter *it, bool pfn, u32 ptei, u32 ptes) nvkm_vmm_sparse_ref_ptes() argument 423 u32 pteb, ptei, ptes; nvkm_vmm_ref_hwpt() local 536 const u32 ptes = min_t(u64, it.cnt, pten - ptei); nvkm_vmm_iter() local 1805 nvkm_vmm_boot_ptes(struct nvkm_vmm_iter *it, bool pfn, u32 ptei, u32 ptes) nvkm_vmm_boot_ptes() argument [all...] |
H A D | vmmnv50.c | 33 u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) in nv50_vmm_pgt_pte() 39 map->type += ptes * map->ctag; in nv50_vmm_pgt_pte() 41 while (ptes) { in nv50_vmm_pgt_pte() 44 if (ptes >= pten && IS_ALIGNED(ptei, pten)) in nv50_vmm_pgt_pte() 50 ptes -= pten; in nv50_vmm_pgt_pte() 59 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) in nv50_vmm_pgt_sgl() 61 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv50_vmm_pgt_pte); in nv50_vmm_pgt_sgl() 66 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) in nv50_vmm_pgt_dma() 69 VMM_SPAM(vmm, "DMAA %08x %08x PTE(s)", ptei, ptes); in nv50_vmm_pgt_dma() 71 while (ptes in nv50_vmm_pgt_dma() 32 nv50_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) nv50_vmm_pgt_pte() argument 58 nv50_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) nv50_vmm_pgt_sgl() argument 65 nv50_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) nv50_vmm_pgt_dma() argument 84 nv50_vmm_pgt_mem(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) nv50_vmm_pgt_mem() argument 91 nv50_vmm_pgt_unmap(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) nv50_vmm_pgt_unmap() argument [all...] |
H A D | vmmgf100.c | 33 u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) in gf100_vmm_pgt_pte() 39 while (ptes--) { in gf100_vmm_pgt_pte() 48 map->type += ptes * map->ctag; in gf100_vmm_pgt_pte() 50 while (ptes--) { in gf100_vmm_pgt_pte() 59 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) in gf100_vmm_pgt_sgl() 61 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, gf100_vmm_pgt_pte); in gf100_vmm_pgt_sgl() 66 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) in gf100_vmm_pgt_dma() 69 VMM_SPAM(vmm, "DMAA %08x %08x PTE(s)", ptei, ptes); in gf100_vmm_pgt_dma() 71 while (ptes--) { in gf100_vmm_pgt_dma() 80 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, ma in gf100_vmm_pgt_dma() 32 gf100_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) gf100_vmm_pgt_pte() argument 58 gf100_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) gf100_vmm_pgt_sgl() argument 65 gf100_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) gf100_vmm_pgt_dma() argument 84 gf100_vmm_pgt_mem(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) gf100_vmm_pgt_mem() argument 91 gf100_vmm_pgt_unmap(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) gf100_vmm_pgt_unmap() argument [all...] |
H A D | vmmgk104.c | 26 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) in gk104_vmm_lpt_invalid() 29 VMM_FO064(pt, vmm, ptei * 8, BIT_ULL(1) /* PRIV. */, ptes); in gk104_vmm_lpt_invalid() 25 gk104_vmm_lpt_invalid(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) gk104_vmm_lpt_invalid() argument
|
/kernel/linux/linux-6.6/drivers/gpu/drm/nouveau/nvkm/subdev/mmu/ |
H A D | vmmnv44.c | 28 dma_addr_t *list, u32 ptei, u32 ptes) in nv44_vmm_pgt_fill() 38 while (ptes--) { in nv44_vmm_pgt_fill() 74 u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) in nv44_vmm_pgt_pte() 79 const u32 pten = min(ptes, 4 - (ptei & 3)); in nv44_vmm_pgt_pte() 84 ptes -= pten; in nv44_vmm_pgt_pte() 87 while (ptes >= 4) { in nv44_vmm_pgt_pte() 94 ptes -= 4; in nv44_vmm_pgt_pte() 97 if (ptes) { in nv44_vmm_pgt_pte() 98 for (i = 0; i < ptes; i++, addr += 0x1000) in nv44_vmm_pgt_pte() 100 nv44_vmm_pgt_fill(vmm, pt, tmp, ptei, ptes); in nv44_vmm_pgt_pte() 27 nv44_vmm_pgt_fill(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, dma_addr_t *list, u32 ptei, u32 ptes) nv44_vmm_pgt_fill() argument 73 nv44_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) nv44_vmm_pgt_pte() argument 105 nv44_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) nv44_vmm_pgt_sgl() argument 112 nv44_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) nv44_vmm_pgt_dma() argument 147 nv44_vmm_pgt_unmap(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) nv44_vmm_pgt_unmap() argument [all...] |
H A D | vmmgp100.c | 35 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) in gp100_vmm_pfn_unmap() 41 while (ptes--) { in gp100_vmm_pfn_unmap() 56 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) in gp100_vmm_pfn_clear() 60 while (ptes--) { in gp100_vmm_pfn_clear() 76 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) in gp100_vmm_pgt_pfn() 82 for (; ptes; ptes--, map->pfn++) { in gp100_vmm_pgt_pfn() 116 u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) in gp100_vmm_pgt_pte() 120 map->type += ptes * map->ctag; in gp100_vmm_pgt_pte() 122 while (ptes in gp100_vmm_pgt_pte() 34 gp100_vmm_pfn_unmap(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) gp100_vmm_pfn_unmap() argument 55 gp100_vmm_pfn_clear(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) gp100_vmm_pfn_clear() argument 75 gp100_vmm_pgt_pfn(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) gp100_vmm_pgt_pfn() argument 115 gp100_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) gp100_vmm_pgt_pte() argument 129 gp100_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) gp100_vmm_pgt_sgl() argument 136 gp100_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) gp100_vmm_pgt_dma() argument 155 gp100_vmm_pgt_mem(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) gp100_vmm_pgt_mem() argument 162 gp100_vmm_pgt_sparse(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) gp100_vmm_pgt_sparse() argument 182 gp100_vmm_lpt_invalid(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) gp100_vmm_lpt_invalid() argument 198 gp100_vmm_pd0_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) gp100_vmm_pd0_pte() argument 212 gp100_vmm_pd0_mem(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) gp100_vmm_pd0_mem() argument 268 gp100_vmm_pd0_pfn_unmap(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) gp100_vmm_pd0_pfn_unmap() argument 290 gp100_vmm_pd0_pfn_clear(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) gp100_vmm_pd0_pfn_clear() argument 312 gp100_vmm_pd0_pfn(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) gp100_vmm_pd0_pfn() argument [all...] |
H A D | vmmnv04.c | 29 u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) in nv04_vmm_pgt_pte() 32 while (ptes--) { in nv04_vmm_pgt_pte() 40 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) in nv04_vmm_pgt_sgl() 42 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv04_vmm_pgt_pte); in nv04_vmm_pgt_sgl() 47 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) in nv04_vmm_pgt_dma() 51 while (ptes--) in nv04_vmm_pgt_dma() 55 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv04_vmm_pgt_pte); in nv04_vmm_pgt_dma() 61 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) in nv04_vmm_pgt_unmap() 63 VMM_FO032(pt, vmm, 8 + (ptei * 4), 0, ptes); in nv04_vmm_pgt_unmap() 28 nv04_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) nv04_vmm_pgt_pte() argument 39 nv04_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) nv04_vmm_pgt_sgl() argument 46 nv04_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) nv04_vmm_pgt_dma() argument 60 nv04_vmm_pgt_unmap(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) nv04_vmm_pgt_unmap() argument
|
H A D | vmmnv41.c | 28 u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) in nv41_vmm_pgt_pte() 31 while (ptes--) { in nv41_vmm_pgt_pte() 39 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) in nv41_vmm_pgt_sgl() 41 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv41_vmm_pgt_pte); in nv41_vmm_pgt_sgl() 46 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) in nv41_vmm_pgt_dma() 50 while (ptes--) { in nv41_vmm_pgt_dma() 56 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv41_vmm_pgt_pte); in nv41_vmm_pgt_dma() 62 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) in nv41_vmm_pgt_unmap() 64 VMM_FO032(pt, vmm, ptei * 4, 0, ptes); in nv41_vmm_pgt_unmap() 27 nv41_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) nv41_vmm_pgt_pte() argument 38 nv41_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) nv41_vmm_pgt_sgl() argument 45 nv41_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) nv41_vmm_pgt_dma() argument 61 nv41_vmm_pgt_unmap(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) nv41_vmm_pgt_unmap() argument
|
H A D | vmmnv50.c | 33 u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) in nv50_vmm_pgt_pte() 39 map->type += ptes * map->ctag; in nv50_vmm_pgt_pte() 41 while (ptes) { in nv50_vmm_pgt_pte() 44 if (ptes >= pten && IS_ALIGNED(ptei, pten)) in nv50_vmm_pgt_pte() 50 ptes -= pten; in nv50_vmm_pgt_pte() 59 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) in nv50_vmm_pgt_sgl() 61 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv50_vmm_pgt_pte); in nv50_vmm_pgt_sgl() 66 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) in nv50_vmm_pgt_dma() 69 VMM_SPAM(vmm, "DMAA %08x %08x PTE(s)", ptei, ptes); in nv50_vmm_pgt_dma() 71 while (ptes in nv50_vmm_pgt_dma() 32 nv50_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) nv50_vmm_pgt_pte() argument 58 nv50_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) nv50_vmm_pgt_sgl() argument 65 nv50_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) nv50_vmm_pgt_dma() argument 84 nv50_vmm_pgt_mem(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) nv50_vmm_pgt_mem() argument 91 nv50_vmm_pgt_unmap(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) nv50_vmm_pgt_unmap() argument [all...] |
H A D | vmmgf100.c | 33 u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) in gf100_vmm_pgt_pte() 39 while (ptes--) { in gf100_vmm_pgt_pte() 48 map->type += ptes * map->ctag; in gf100_vmm_pgt_pte() 50 while (ptes--) { in gf100_vmm_pgt_pte() 59 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) in gf100_vmm_pgt_sgl() 61 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, gf100_vmm_pgt_pte); in gf100_vmm_pgt_sgl() 66 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) in gf100_vmm_pgt_dma() 69 VMM_SPAM(vmm, "DMAA %08x %08x PTE(s)", ptei, ptes); in gf100_vmm_pgt_dma() 71 while (ptes--) { in gf100_vmm_pgt_dma() 80 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, ma in gf100_vmm_pgt_dma() 32 gf100_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) gf100_vmm_pgt_pte() argument 58 gf100_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) gf100_vmm_pgt_sgl() argument 65 gf100_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) gf100_vmm_pgt_dma() argument 84 gf100_vmm_pgt_mem(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) gf100_vmm_pgt_mem() argument 91 gf100_vmm_pgt_unmap(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) gf100_vmm_pgt_unmap() argument [all...] |
H A D | vmm.c | 198 const struct nvkm_vmm_desc *desc, u32 ptei, u32 ptes) in nvkm_vmm_unref_sptes() 209 for (lpti = ptei >> sptb; ptes; spti = 0, lpti++) { in nvkm_vmm_unref_sptes() 210 const u32 pten = min(sptn - spti, ptes); in nvkm_vmm_unref_sptes() 212 ptes -= pten; in nvkm_vmm_unref_sptes() 222 for (ptes = 1, ptei++; ptei < lpti; ptes++, ptei++) { in nvkm_vmm_unref_sptes() 236 for (ptes = 1, ptei++; ptei < lpti; ptes++, ptei++) { in nvkm_vmm_unref_sptes() 243 TRA(it, "LPTE %05x: U -> S %d PTEs", pteb, ptes); in nvkm_vmm_unref_sptes() 244 pair->func->sparse(vmm, pgt->pt[0], pteb, ptes); in nvkm_vmm_unref_sptes() 197 nvkm_vmm_unref_sptes(struct nvkm_vmm_iter *it, struct nvkm_vmm_pt *pgt, const struct nvkm_vmm_desc *desc, u32 ptei, u32 ptes) nvkm_vmm_unref_sptes() argument 258 nvkm_vmm_unref_ptes(struct nvkm_vmm_iter *it, bool pfn, u32 ptei, u32 ptes) nvkm_vmm_unref_ptes() argument 296 nvkm_vmm_ref_sptes(struct nvkm_vmm_iter *it, struct nvkm_vmm_pt *pgt, const struct nvkm_vmm_desc *desc, u32 ptei, u32 ptes) nvkm_vmm_ref_sptes() argument 364 nvkm_vmm_ref_ptes(struct nvkm_vmm_iter *it, bool pfn, u32 ptei, u32 ptes) nvkm_vmm_ref_ptes() argument 381 nvkm_vmm_sparse_ptes(const struct nvkm_vmm_desc *desc, struct nvkm_vmm_pt *pgt, u32 ptei, u32 ptes) nvkm_vmm_sparse_ptes() argument 394 nvkm_vmm_sparse_unref_ptes(struct nvkm_vmm_iter *it, bool pfn, u32 ptei, u32 ptes) nvkm_vmm_sparse_unref_ptes() argument 406 nvkm_vmm_sparse_ref_ptes(struct nvkm_vmm_iter *it, bool pfn, u32 ptei, u32 ptes) nvkm_vmm_sparse_ref_ptes() argument 423 u32 pteb, ptei, ptes; nvkm_vmm_ref_hwpt() local 536 const u32 ptes = min_t(u64, it.cnt, pten - ptei); nvkm_vmm_iter() local 1908 nvkm_vmm_boot_ptes(struct nvkm_vmm_iter *it, bool pfn, u32 ptei, u32 ptes) nvkm_vmm_boot_ptes() argument [all...] |
H A D | vmmgk104.c | 26 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) in gk104_vmm_lpt_invalid() 29 VMM_FO064(pt, vmm, ptei * 8, BIT_ULL(1) /* PRIV. */, ptes); in gk104_vmm_lpt_invalid() 25 gk104_vmm_lpt_invalid(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) gk104_vmm_lpt_invalid() argument
|
/kernel/linux/linux-5.10/arch/x86/xen/ |
H A D | grant-table.c | 27 pte_t **ptes; member 45 set_pte_at(&init_mm, addr, gnttab_shared_vm_area.ptes[i], in arch_gnttab_map_shared() 67 set_pte_at(&init_mm, addr, gnttab_status_vm_area.ptes[i], in arch_gnttab_map_status() 77 pte_t **ptes; in arch_gnttab_unmap() local 82 ptes = gnttab_status_vm_area.ptes; in arch_gnttab_unmap() 84 ptes = gnttab_shared_vm_area.ptes; in arch_gnttab_unmap() 89 set_pte_at(&init_mm, addr, ptes[i], __pte(0)); in arch_gnttab_unmap() 98 area->ptes[are in gnttab_apply() [all...] |
/kernel/linux/linux-6.6/arch/x86/xen/ |
H A D | grant-table.c | 27 pte_t **ptes; member 45 set_pte_at(&init_mm, addr, gnttab_shared_vm_area.ptes[i], in arch_gnttab_map_shared() 67 set_pte_at(&init_mm, addr, gnttab_status_vm_area.ptes[i], in arch_gnttab_map_status() 77 pte_t **ptes; in arch_gnttab_unmap() local 82 ptes = gnttab_status_vm_area.ptes; in arch_gnttab_unmap() 84 ptes = gnttab_shared_vm_area.ptes; in arch_gnttab_unmap() 89 set_pte_at(&init_mm, addr, ptes[i], __pte(0)); in arch_gnttab_unmap() 98 area->ptes[are in gnttab_apply() [all...] |
/kernel/linux/linux-5.10/block/partitions/ |
H A D | efi.c | 269 * Description: Returns ptes on success, NULL on error. 332 * @ptes: PTEs ptr, filled on return. 338 gpt_header **gpt, gpt_entry **ptes) in is_gpt_valid() 343 if (!ptes) in is_gpt_valid() 432 if (!(*ptes = alloc_read_gpt_entries(state, *gpt))) in is_gpt_valid() 436 crc = efi_crc32((const unsigned char *) (*ptes), pt_size); in is_gpt_valid() 447 kfree(*ptes); in is_gpt_valid() 448 *ptes = NULL; in is_gpt_valid() 571 * @ptes: PTEs ptr, filled on return. 584 gpt_entry **ptes) in find_valid_gpt() 337 is_gpt_valid(struct parsed_partitions *state, u64 lba, gpt_header **gpt, gpt_entry **ptes) is_gpt_valid() argument 583 find_valid_gpt(struct parsed_partitions *state, gpt_header **gpt, gpt_entry **ptes) find_valid_gpt() argument 706 gpt_entry *ptes = NULL; efi_partition() local [all...] |
/kernel/linux/linux-6.6/block/partitions/ |
H A D | efi.c | 267 * Description: Returns ptes on success, NULL on error. 330 * @ptes: PTEs ptr, filled on return. 336 gpt_header **gpt, gpt_entry **ptes) in is_gpt_valid() 341 if (!ptes) in is_gpt_valid() 430 if (!(*ptes = alloc_read_gpt_entries(state, *gpt))) in is_gpt_valid() 434 crc = efi_crc32((const unsigned char *) (*ptes), pt_size); in is_gpt_valid() 445 kfree(*ptes); in is_gpt_valid() 446 *ptes = NULL; in is_gpt_valid() 569 * @ptes: PTEs ptr, filled on return. 582 gpt_entry **ptes) in find_valid_gpt() 335 is_gpt_valid(struct parsed_partitions *state, u64 lba, gpt_header **gpt, gpt_entry **ptes) is_gpt_valid() argument 581 find_valid_gpt(struct parsed_partitions *state, gpt_header **gpt, gpt_entry **ptes) find_valid_gpt() argument 716 gpt_entry *ptes = NULL; efi_partition() local [all...] |
/kernel/linux/linux-5.10/arch/alpha/kernel/ |
H A D | pci_iommu.c | 89 arena->ptes = memblock_alloc_node(sizeof(*arena), align, nid); in iommu_arena_new_node() 90 if (!NODE_DATA(nid) || !arena->ptes) { in iommu_arena_new_node() 91 printk("%s: couldn't allocate arena ptes from node %d\n" in iommu_arena_new_node() 94 arena->ptes = memblock_alloc(mem_size, align); in iommu_arena_new_node() 95 if (!arena->ptes) in iommu_arena_new_node() 106 arena->ptes = memblock_alloc(mem_size, align); in iommu_arena_new_node() 107 if (!arena->ptes) in iommu_arena_new_node() 138 unsigned long *ptes; in iommu_arena_find_pages() 147 /* Search forward for the first mask-aligned sequence of N free ptes */ in iommu_arena_find_pages() 148 ptes in iommu_arena_find_pages() 137 unsigned long *ptes; iommu_arena_find_pages() local 191 unsigned long *ptes; iommu_arena_alloc() local 569 unsigned long *ptes; sg_fill() local 855 unsigned long *ptes; iommu_reserve() local 885 unsigned long *ptes; iommu_release() local 906 unsigned long *ptes; iommu_bind() local [all...] |
/kernel/linux/linux-6.6/arch/alpha/kernel/ |
H A D | pci_iommu.c | 79 arena->ptes = memblock_alloc(mem_size, align); in iommu_arena_new_node() 80 if (!arena->ptes) in iommu_arena_new_node() 109 unsigned long *ptes; in iommu_arena_find_pages() 118 /* Search forward for the first mask-aligned sequence of N free ptes */ in iommu_arena_find_pages() 119 ptes = arena->ptes; in iommu_arena_find_pages() 131 if (ptes[p+i]) { in iommu_arena_find_pages() 165 unsigned long *ptes; in iommu_arena_alloc() 170 /* Search for N empty ptes */ in iommu_arena_alloc() 171 ptes in iommu_arena_alloc() 108 unsigned long *ptes; iommu_arena_find_pages() local 164 unsigned long *ptes; iommu_arena_alloc() local 542 unsigned long *ptes; sg_fill() local 832 unsigned long *ptes; iommu_reserve() local 862 unsigned long *ptes; iommu_release() local 883 unsigned long *ptes; iommu_bind() local [all...] |
/kernel/linux/linux-5.10/drivers/staging/gasket/ |
H A D | gasket_page_table.c | 302 static bool gasket_is_pte_range_free(struct gasket_page_table_entry *ptes, in gasket_is_pte_range_free() argument 308 if (ptes[i].status != PTE_FREE) in gasket_is_pte_range_free() 466 struct gasket_page_table_entry *ptes, in gasket_perform_mapping() 484 ptes[i].page = NULL; in gasket_perform_mapping() 485 ptes[i].offset = offset; in gasket_perform_mapping() 486 ptes[i].dma_addr = pg_tbl->coherent_pages[0].paddr + in gasket_perform_mapping() 500 ptes[i].page = page; in gasket_perform_mapping() 501 ptes[i].offset = offset; in gasket_perform_mapping() 504 ptes[i].dma_addr = in gasket_perform_mapping() 509 ptes[ in gasket_perform_mapping() 465 gasket_perform_mapping(struct gasket_page_table *pg_tbl, struct gasket_page_table_entry *ptes, u64 __iomem *slots, ulong host_addr, uint num_pages, int is_simple_mapping) gasket_perform_mapping() argument 590 gasket_perform_unmapping(struct gasket_page_table *pg_tbl, struct gasket_page_table_entry *ptes, u64 __iomem *slots, uint num_pages, int is_simple_mapping) gasket_perform_unmapping() argument [all...] |
/kernel/linux/linux-5.10/arch/powerpc/include/asm/ |
H A D | plpar_wrappers.h | 169 * ptes must be 8*sizeof(unsigned long) 172 unsigned long *ptes) in plpar_pte_read_4() 180 memcpy(ptes, retbuf, 8*sizeof(unsigned long)); in plpar_pte_read_4() 187 * ptes must be 8*sizeof(unsigned long) 190 unsigned long *ptes) in plpar_pte_read_4_raw() 198 memcpy(ptes, retbuf, 8*sizeof(unsigned long)); in plpar_pte_read_4_raw() 396 unsigned long *ptes) in plpar_pte_read_4() 171 plpar_pte_read_4(unsigned long flags, unsigned long ptex, unsigned long *ptes) plpar_pte_read_4() argument 189 plpar_pte_read_4_raw(unsigned long flags, unsigned long ptex, unsigned long *ptes) plpar_pte_read_4_raw() argument 395 plpar_pte_read_4(unsigned long flags, unsigned long ptex, unsigned long *ptes) plpar_pte_read_4() argument
|
/kernel/linux/linux-6.6/arch/powerpc/include/asm/ |
H A D | plpar_wrappers.h | 172 * ptes must be 8*sizeof(unsigned long) 175 unsigned long *ptes) in plpar_pte_read_4() 183 memcpy(ptes, retbuf, 8*sizeof(unsigned long)); in plpar_pte_read_4() 190 * ptes must be 8*sizeof(unsigned long) 193 unsigned long *ptes) in plpar_pte_read_4_raw() 201 memcpy(ptes, retbuf, 8*sizeof(unsigned long)); in plpar_pte_read_4_raw() 399 unsigned long *ptes) in plpar_pte_read_4() 174 plpar_pte_read_4(unsigned long flags, unsigned long ptex, unsigned long *ptes) plpar_pte_read_4() argument 192 plpar_pte_read_4_raw(unsigned long flags, unsigned long ptex, unsigned long *ptes) plpar_pte_read_4_raw() argument 398 plpar_pte_read_4(unsigned long flags, unsigned long ptex, unsigned long *ptes) plpar_pte_read_4() argument
|