Home
last modified time | relevance | path

Searched refs:mmu_mode (Results 1 - 16 of 16) sorted by relevance

/device/soc/rockchip/common/kernel/drivers/gpu/arm/bifrost/mmu/
H A Dmali_kbase_mmu.c1169 kbdev->mmu_mode->entry_invalidate(&page[i]); in kbase_mmu_alloc_pgd()
1211 target_pgd = kbdev->mmu_mode->pte_to_phy_addr(page[vpfn]); in mmu_get_next_pgd()
1222 kbdev->mmu_mode->entry_set_pte(page, vpfn, target_pgd); in mmu_get_next_pgd()
1280 struct kbase_mmu_mode const *mmu_mode; in mmu_insert_pages_failure_recovery() local
1288 mmu_mode = kbdev->mmu_mode; in mmu_insert_pages_failure_recovery()
1312 if (mmu_mode->ate_is_valid(page[idx], level)) in mmu_insert_pages_failure_recovery()
1315 pgd = mmu_mode->pte_to_phy_addr(page[idx]); in mmu_insert_pages_failure_recovery()
1333 num_of_valid_entries = mmu_mode->get_num_valid_entries(page); in mmu_insert_pages_failure_recovery()
1352 mmu_mode in mmu_insert_pages_failure_recovery()
1553 struct kbase_mmu_mode const *mmu_mode; kbase_mmu_insert_pages_no_flush() local
2007 struct kbase_mmu_mode const *mmu_mode; kbase_mmu_teardown_pages() local
2299 struct kbase_mmu_mode const *mmu_mode; mmu_teardown_level() local
2411 struct kbase_mmu_mode const *mmu_mode; kbasep_mmu_dump_level() local
[all...]
/device/soc/rockchip/common/vendor/drivers/gpu/arm/bifrost/mmu/
H A Dmali_kbase_mmu.c936 kbdev->mmu_mode->entry_invalidate(&page[i]); in kbase_mmu_alloc_pgd()
978 target_pgd = kbdev->mmu_mode->pte_to_phy_addr(page[vpfn]); in mmu_get_next_pgd()
987 kbdev->mmu_mode->entry_set_pte(&page[vpfn], target_pgd); in mmu_get_next_pgd()
1035 struct kbase_mmu_mode const *mmu_mode; in mmu_insert_pages_failure_recovery() local
1043 mmu_mode = kbdev->mmu_mode; in mmu_insert_pages_failure_recovery()
1064 if (mmu_mode->ate_is_valid(page[idx], level)) { in mmu_insert_pages_failure_recovery()
1068 pgd = mmu_mode->pte_to_phy_addr(page[idx]); in mmu_insert_pages_failure_recovery()
1087 mmu_mode->entry_invalidate(&page[idx + i]); in mmu_insert_pages_failure_recovery()
1223 tmp_pgd = kbdev->mmu_mode in cleanup_empty_pte()
1256 struct kbase_mmu_mode const *mmu_mode; kbase_mmu_insert_pages_no_flush() local
1573 struct kbase_mmu_mode const *mmu_mode; kbase_mmu_teardown_pages() local
1803 struct kbase_mmu_mode const *mmu_mode; mmu_teardown_level() local
1917 struct kbase_mmu_mode const *mmu_mode; kbasep_mmu_dump_level() local
[all...]
/device/soc/rockchip/common/vendor/drivers/gpu/arm/midgard/
H A Dmali_kbase_mmu.c388 kctx->kbdev->mmu_mode->entry_invalidate(&page[i]); in kbase_mmu_alloc_pgd()
435 target_pgd = kctx->kbdev->mmu_mode->pte_to_phy_addr(page[vpfn]); in mmu_get_next_pgd()
444 kctx->kbdev->mmu_mode->entry_set_pte(&page[vpfn], target_pgd); in mmu_get_next_pgd()
501 target_pgd = kctx->kbdev->mmu_mode->pte_to_phy_addr(page[vpfn]); in mmu_insert_pages_recover_get_next_pgd()
530 struct kbase_mmu_mode const *mmu_mode; in mmu_insert_pages_failure_recovery() local
540 mmu_mode = kctx->kbdev->mmu_mode; in mmu_insert_pages_failure_recovery()
562 mmu_mode->entry_invalidate(&pgd_page[index + i]); in mmu_insert_pages_failure_recovery()
657 kctx->kbdev->mmu_mode->entry_set_ate(&pgd_page[ofs], phys, flags); in kbase_mmu_insert_single_page()
763 kctx->kbdev->mmu_mode in kbase_mmu_insert_pages_no_flush()
995 struct kbase_mmu_mode const *mmu_mode; kbase_mmu_teardown_pages() local
1077 struct kbase_mmu_mode const *mmu_mode; kbase_mmu_update_pages() local
1180 struct kbase_mmu_mode const *mmu_mode; mmu_teardown_level() local
1279 struct kbase_mmu_mode const *mmu_mode; kbasep_mmu_dump_level() local
[all...]
H A Dmali_kbase_device.c257 kbdev->mmu_mode = kbase_mmu_mode_get_aarch64(); in kbase_device_init()
259 kbdev->mmu_mode = kbase_mmu_mode_get_lpae(); in kbase_device_init()
H A Dmali_kbase_defs.h976 struct kbase_mmu_mode const *mmu_mode; member
/device/soc/rockchip/common/kernel/drivers/gpu/arm/midgard/
H A Dmali_kbase_mmu.c424 kctx->kbdev->mmu_mode->entry_invalidate(&page[i]); in kbase_mmu_alloc_pgd()
471 target_pgd = kctx->kbdev->mmu_mode->pte_to_phy_addr(page[vpfn]); in mmu_get_next_pgd()
481 kctx->kbdev->mmu_mode->entry_set_pte(&page[vpfn], target_pgd); in mmu_get_next_pgd()
538 target_pgd = kctx->kbdev->mmu_mode->pte_to_phy_addr(page[vpfn]); in mmu_insert_pages_recover_get_next_pgd()
568 struct kbase_mmu_mode const *mmu_mode; in mmu_insert_pages_failure_recovery() local
578 mmu_mode = kctx->kbdev->mmu_mode; in mmu_insert_pages_failure_recovery()
599 mmu_mode->entry_invalidate(&pgd_page[index + i]); in mmu_insert_pages_failure_recovery()
697 kctx->kbdev->mmu_mode->entry_set_ate(&pgd_page[ofs], in kbase_mmu_insert_single_page()
809 kctx->kbdev->mmu_mode in kbase_mmu_insert_pages_no_flush()
1048 struct kbase_mmu_mode const *mmu_mode; kbase_mmu_teardown_pages() local
1130 struct kbase_mmu_mode const *mmu_mode; kbase_mmu_update_pages() local
1231 struct kbase_mmu_mode const *mmu_mode; mmu_teardown_level() local
1331 struct kbase_mmu_mode const *mmu_mode; kbasep_mmu_dump_level() local
[all...]
H A Dmali_kbase_device.c249 kbdev->mmu_mode = kbase_mmu_mode_get_aarch64(); in kbase_device_init()
251 kbdev->mmu_mode = kbase_mmu_mode_get_lpae(); in kbase_device_init()
H A Dmali_kbase_defs.h986 struct kbase_mmu_mode const *mmu_mode; member
/device/soc/rockchip/common/vendor/drivers/gpu/arm/bifrost/device/
H A Dmali_kbase_device.c210 kbdev->mmu_mode = kbase_mmu_mode_get_aarch64(); in kbase_device_misc_init()
212 kbdev->mmu_mode = kbase_mmu_mode_get_lpae(); in kbase_device_misc_init()
/device/soc/rockchip/common/vendor/drivers/gpu/arm/bifrost/backend/gpu/
H A Dmali_kbase_instr_backend.c360 if (kbdev->mmu_mode->flags & KBASE_MMU_MODE_HAS_NON_CACHEABLE) { in kbase_instr_hwcnt_sample_done()
/device/soc/rockchip/common/kernel/drivers/gpu/arm/bifrost/device/
H A Dmali_kbase_device.c310 kbdev->mmu_mode = kbase_mmu_mode_get_aarch64(); in kbase_device_misc_init()
/device/soc/rockchip/common/vendor/drivers/gpu/arm/bifrost/
H A Dmali_kbase_defs.h678 * @mmu_mode: Pointer to the object containing methods for programming
916 struct kbase_mmu_mode const *mmu_mode; member
H A Dmali_kbase_hwcnt_backend_jm.c414 if (kctx->kbdev->mmu_mode->flags & KBASE_MMU_MODE_HAS_NON_CACHEABLE) { in kbasep_hwcnt_backend_jm_dump_alloc()
H A Dmali_kbase_mem.c1847 if (kctx->kbdev->mmu_mode->flags & KBASE_MMU_MODE_HAS_NON_CACHEABLE) { in kbase_update_region_flags()
/device/soc/rockchip/common/kernel/drivers/gpu/arm/bifrost/
H A Dmali_kbase_defs.h715 * @mmu_mode: Pointer to the object containing methods for programming
1001 struct kbase_mmu_mode const *mmu_mode; member
H A Dmali_kbase_mem.c2164 if (kctx->kbdev->mmu_mode->flags & in kbase_update_region_flags()

Completed in 36 milliseconds