Home
last modified time | relevance | path

Searched refs:kbase_dma_addr (Results 1 - 23 of 23) sorted by relevance

/device/soc/rockchip/common/vendor/drivers/gpu/arm/midgard/
H A Dmali_kbase_10969_workaround.c93 kbase_sync_single_for_cpu(katom->kctx->kbdev, kbase_dma_addr(p) + offset, copy_size, DMA_BIDIRECTIONAL); in kbasep_10969_workaround_clamp_coordinates()
103 kbase_sync_single_for_cpu(katom->kctx->kbdev, kbase_dma_addr(p), JOB_HEADER_SIZE - copy_size, in kbasep_10969_workaround_clamp_coordinates()
171 kbase_sync_single_for_device(katom->kctx->kbdev, kbase_dma_addr(p) + offset, copy_size, DMA_TO_DEVICE); in kbasep_10969_workaround_clamp_coordinates()
177 kbase_sync_single_for_device(katom->kctx->kbdev, kbase_dma_addr(p), JOB_HEADER_SIZE - copy_size, in kbasep_10969_workaround_clamp_coordinates()
H A Dmali_kbase_mmu.c391 kbase_mmu_sync_pgd(kctx->kbdev, kbase_dma_addr(p), PAGE_SIZE); in kbase_mmu_alloc_pgd()
446 kbase_mmu_sync_pgd(kctx->kbdev, kbase_dma_addr(p), PAGE_SIZE); in mmu_get_next_pgd()
568 kbase_mmu_sync_pgd(kctx->kbdev, kbase_dma_addr(p), PAGE_SIZE); in mmu_insert_pages_failure_recovery()
663 kbase_mmu_sync_pgd(kctx->kbdev, kbase_dma_addr(p) + (index * sizeof(u64)), count * sizeof(u64)); in kbase_mmu_insert_single_page()
770 kbase_mmu_sync_pgd(kctx->kbdev, kbase_dma_addr(p) + (index * sizeof(u64)), count * sizeof(u64)); in kbase_mmu_insert_pages_no_flush()
1043 kbase_mmu_sync_pgd(kctx->kbdev, kbase_dma_addr(p) + (index * sizeof(u64)), count * sizeof(u64)); in kbase_mmu_teardown_pages()
1140 kbase_mmu_sync_pgd(kctx->kbdev, kbase_dma_addr(p) + (index * sizeof(u64)), count * sizeof(u64)); in kbase_mmu_update_pages()
H A Dmali_kbase_mem_pool.c128 dma_sync_single_for_device(dev, kbase_dma_addr(p), PAGE_SIZE, DMA_BIDIRECTIONAL); in kbase_mem_pool_sync_page()
186 dma_addr_t dma_addr = kbase_dma_addr(p); in kbase_mem_pool_free_page()
H A Dmali_kbase_mem.h858 static inline dma_addr_t kbase_dma_addr(struct page *p) in kbase_dma_addr() function
H A Dmali_kbase_mem.c1058 dma_addr = kbase_dma_addr(cpu_page) + offset; in kbase_sync_single()
1079 dma_sync_single_for_cpu(kctx->kbdev->dev, kbase_dma_addr(gpu_page) + offset, size, DMA_BIDIRECTIONAL); in kbase_sync_single()
1087 dma_sync_single_for_device(kctx->kbdev->dev, kbase_dma_addr(gpu_page) + offset, size, DMA_BIDIRECTIONAL); in kbase_sync_single()
/device/soc/rockchip/common/kernel/drivers/gpu/arm/midgard/
H A Dmali_kbase_10969_workaround.c93 kbase_dma_addr(p) + offset, in kbasep_10969_workaround_clamp_coordinates()
105 kbase_dma_addr(p), in kbasep_10969_workaround_clamp_coordinates()
187 kbase_dma_addr(p) + offset, in kbasep_10969_workaround_clamp_coordinates()
196 kbase_dma_addr(p), in kbasep_10969_workaround_clamp_coordinates()
H A Dmali_kbase_mmu.c426 kbase_mmu_sync_pgd(kctx->kbdev, kbase_dma_addr(p), PAGE_SIZE); in kbase_mmu_alloc_pgd()
483 kbase_mmu_sync_pgd(kctx->kbdev, kbase_dma_addr(p), PAGE_SIZE); in mmu_get_next_pgd()
604 kbase_mmu_sync_pgd(kctx->kbdev, kbase_dma_addr(p), PAGE_SIZE); in mmu_insert_pages_failure_recovery()
705 kbase_dma_addr(p) + (index * sizeof(u64)), in kbase_mmu_insert_single_page()
818 kbase_dma_addr(p) + (index * sizeof(u64)), in kbase_mmu_insert_pages_no_flush()
1095 kbase_dma_addr(p) + (index * sizeof(u64)), in kbase_mmu_teardown_pages()
1191 kbase_dma_addr(p) + (index * sizeof(u64)), in kbase_mmu_update_pages()
H A Dmali_kbase_mem_pool.c136 dma_sync_single_for_device(dev, kbase_dma_addr(p), in kbase_mem_pool_sync_page()
198 dma_addr_t dma_addr = kbase_dma_addr(p); in kbase_mem_pool_free_page()
H A Dmali_kbase_mem.h867 static inline dma_addr_t kbase_dma_addr(struct page *p) in kbase_dma_addr() function
H A Dmali_kbase_mem.c1087 dma_addr = kbase_dma_addr(cpu_page) + offset; in kbase_sync_single()
1109 kbase_dma_addr(gpu_page) + offset, in kbase_sync_single()
1119 kbase_dma_addr(gpu_page) + offset, in kbase_sync_single()
/device/soc/rockchip/common/kernel/drivers/gpu/arm/bifrost/
H A Dmali_kbase_hwcnt_backend_csf_if_fw.c450 kbase_dma_addr(pg), in kbasep_hwcnt_backend_csf_if_fw_ring_buf_sync()
455 kbase_dma_addr(pg), in kbasep_hwcnt_backend_csf_if_fw_ring_buf_sync()
470 kbase_dma_addr(pg), PAGE_SIZE, in kbasep_hwcnt_backend_csf_if_fw_ring_buf_sync()
474 kbase_dma_addr(pg), in kbasep_hwcnt_backend_csf_if_fw_ring_buf_sync()
H A Dmali_kbase_mem_pool.c129 dma_sync_single_for_device(dev, kbase_dma_addr(p), in kbase_mem_pool_sync_page()
192 dma_addr_t dma_addr = kbase_dma_addr(p); in kbase_mem_pool_free_page()
H A Dmali_kbase_mem.h1500 static inline dma_addr_t kbase_dma_addr(struct page *p) in kbase_dma_addr() function
H A Dmali_kbase_mem.c1808 dma_addr = kbase_dma_addr(cpu_page) + offset; in kbase_sync_single()
1830 kbase_dma_addr(gpu_page) + offset, in kbase_sync_single()
1840 kbase_dma_addr(gpu_page) + offset, in kbase_sync_single()
/device/soc/rockchip/common/kernel/drivers/gpu/arm/bifrost/mmu/
H A Dmali_kbase_mmu.c1171 kbase_mmu_sync_pgd(kbdev, kbase_dma_addr(p), PAGE_SIZE); in kbase_mmu_alloc_pgd()
1224 kbase_mmu_sync_pgd(kbdev, kbase_dma_addr(p), PAGE_SIZE); in mmu_get_next_pgd()
1357 kbase_dma_addr(phys_to_page(pgd)) + 8 * idx, in mmu_insert_pages_failure_recovery()
1483 kbase_dma_addr(p) + (index * sizeof(u64)), in kbase_mmu_insert_single_page()
1680 kbase_dma_addr(p) + (vindex * sizeof(u64)), in kbase_mmu_insert_pages_no_flush()
1980 kbase_dma_addr(phys_to_page( in kbase_mmu_update_and_free_parent_pgds()
2132 kbdev, kbase_dma_addr(phys_to_page(pgd)) + 8 * index, in kbase_mmu_teardown_pages()
2261 kbase_dma_addr(p) + (index * sizeof(u64)), in kbase_mmu_update_pages_no_flush()
/device/soc/rockchip/common/vendor/drivers/gpu/arm/bifrost/mmu/
H A Dmali_kbase_mmu.c939 kbase_mmu_sync_pgd(kbdev, kbase_dma_addr(p), PAGE_SIZE); in kbase_mmu_alloc_pgd()
989 kbase_mmu_sync_pgd(kbdev, kbase_dma_addr(p), PAGE_SIZE); in mmu_get_next_pgd()
1090 kbase_mmu_sync_pgd(kbdev, kbase_dma_addr(phys_to_page(pgd)) + 0x8 * idx, 0x8 * pcount); in mmu_insert_pages_failure_recovery()
1198 kbase_mmu_sync_pgd(kbdev, kbase_dma_addr(p) + (index * sizeof(u64)), count * sizeof(u64)); in kbase_mmu_insert_single_page()
1363 kbase_mmu_sync_pgd(kbdev, kbase_dma_addr(p) + (vindex * sizeof(u64)), count * sizeof(u64)); in kbase_mmu_insert_pages_no_flush()
1667 kbase_mmu_sync_pgd(kbdev, kbase_dma_addr(phys_to_page(pgd)) + 0x8 * index, 0x8 * pcount); in kbase_mmu_teardown_pages()
1773 kbase_mmu_sync_pgd(kbdev, kbase_dma_addr(p) + (index * sizeof(u64)), count * sizeof(u64)); in kbase_mmu_update_pages_no_flush()
/device/soc/rockchip/common/vendor/drivers/gpu/arm/bifrost/
H A Dmali_kbase_mem_pool.c124 dma_sync_single_for_device(dev, kbase_dma_addr(p), (PAGE_SIZE << pool->order), DMA_BIDIRECTIONAL); in kbase_mem_pool_sync_page()
189 dma_addr_t dma_addr = kbase_dma_addr(p); in kbase_mem_pool_free_page()
H A Dmali_kbase_mem.h1376 static inline dma_addr_t kbase_dma_addr(struct page *p) in kbase_dma_addr() function
H A Dmali_kbase_mem.c1503 dma_addr = kbase_dma_addr(cpu_page) + offset; in kbase_sync_single()
1524 dma_sync_single_for_cpu(kctx->kbdev->dev, kbase_dma_addr(gpu_page) + offset, size, DMA_BIDIRECTIONAL); in kbase_sync_single()
1532 dma_sync_single_for_device(kctx->kbdev->dev, kbase_dma_addr(gpu_page) + offset, size, DMA_BIDIRECTIONAL); in kbase_sync_single()
/device/soc/rockchip/common/kernel/drivers/gpu/arm/bifrost/csf/
H A Dmali_kbase_csf_firmware.c420 kbase_sync_single_for_device(kbdev, kbase_dma_addr(page), in load_fw_image_section()
1053 kbase_dma_addr(target_page) + offset_in_page, in access_firmware_memory()
1061 kbase_dma_addr(target_page) + offset_in_page, in access_firmware_memory()
H A Dmali_kbase_csf.c2971 kbase_sync_single_for_device(kbdev, kbase_dma_addr(page), sizeof(u32), in kbase_csf_setup_dummy_user_reg_page()
H A Dmali_kbase_csf_scheduler.c5143 kbase_dma_addr(pg), in kbase_csf_scheduler_group_copy_suspend_buf()
/device/soc/rockchip/common/kernel/drivers/gpu/arm/bifrost/backend/gpu/
H A Dmali_kbase_model_dummy.c611 kbase_dma_addr(pg), PAGE_SIZE, in gpu_model_sync_dummy_prfcnt()

Completed in 60 milliseconds