/device/soc/rockchip/common/kernel/drivers/gpu/arm/midgard/ |
H A D | mali_kbase_mem_linux.c | 2272 phys_addr_t gpu_pa = map->gpu_pages[0]; local 2274 kbase_sync_single(kctx, cpu_pa, gpu_pa, offset, sz, 2280 gpu_pa = map->gpu_pages[i]; 2281 kbase_sync_single(kctx, cpu_pa, gpu_pa, 0, PAGE_SIZE, 2288 gpu_pa = map->gpu_pages[page_count - 1]; 2290 kbase_sync_single(kctx, cpu_pa, gpu_pa, 0, sz, 2334 phys_addr_t gpu_pa = map->gpu_pages[0]; local 2336 kbase_sync_single(kctx, cpu_pa, gpu_pa, offset, sz, 2342 gpu_pa = map->gpu_pages[i]; 2343 kbase_sync_single(kctx, cpu_pa, gpu_pa, [all...] |
H A D | mali_kbase_mem.c | 1074 phys_addr_t cpu_pa, phys_addr_t gpu_pa, in kbase_sync_single() 1081 if (likely(cpu_pa == gpu_pa)) { in kbase_sync_single() 1099 if (WARN(!gpu_pa, "No GPU PA found for infinite cache op")) in kbase_sync_single() 1102 gpu_page = pfn_to_page(PFN_DOWN(gpu_pa)); in kbase_sync_single() 1133 phys_addr_t *gpu_pa; in kbase_do_syncset() local 1169 gpu_pa = kbase_get_gpu_phy_pages(reg); in kbase_do_syncset() 1182 kbase_sync_single(kctx, cpu_pa[page_off], gpu_pa[page_off], in kbase_do_syncset() 1193 gpu_pa[page_off + i], 0, PAGE_SIZE, sync_fn); in kbase_do_syncset() 1201 gpu_pa[page_off + page_count - 1], 0, sz, in kbase_do_syncset() 1073 kbase_sync_single(struct kbase_context *kctx, phys_addr_t cpu_pa, phys_addr_t gpu_pa, off_t offset, size_t size, enum kbase_sync_type sync_fn) kbase_sync_single() argument
|
H A D | mali_kbase_mem.h | 753 phys_addr_t gpu_pa, off_t offset, size_t size,
|
/device/soc/rockchip/common/vendor/drivers/gpu/arm/midgard/ |
H A D | mali_kbase_mem_linux.c | 2276 phys_addr_t gpu_pa = map->gpu_pages[0];
local 2278 kbase_sync_single(kctx, cpu_pa, gpu_pa, offset, sz, KBASE_SYNC_TO_CPU);
2283 gpu_pa = map->gpu_pages[i];
2284 kbase_sync_single(kctx, cpu_pa, gpu_pa, 0, PAGE_SIZE, KBASE_SYNC_TO_CPU);
2290 gpu_pa = map->gpu_pages[page_count - 1];
2292 kbase_sync_single(kctx, cpu_pa, gpu_pa, 0, sz, KBASE_SYNC_TO_CPU);
2335 phys_addr_t gpu_pa = map->gpu_pages[0];
local 2337 kbase_sync_single(kctx, cpu_pa, gpu_pa, offset, sz, KBASE_SYNC_TO_DEVICE);
2342 gpu_pa = map->gpu_pages[i];
2343 kbase_sync_single(kctx, cpu_pa, gpu_pa, [all...] |
H A D | mali_kbase_mem.c | 1045 void kbase_sync_single(struct kbase_context *kctx, phys_addr_t cpu_pa, phys_addr_t gpu_pa, off_t offset, size_t size,
in kbase_sync_single() argument 1052 if (likely(cpu_pa == gpu_pa)) {
in kbase_sync_single() 1069 if (WARN(!gpu_pa, "No GPU PA found for infinite cache op")) {
in kbase_sync_single() 1073 gpu_page = pfn_to_page(PFN_DOWN(gpu_pa));
in kbase_sync_single() 1100 phys_addr_t *gpu_pa;
in kbase_do_syncset() local 1135 gpu_pa = kbase_get_gpu_phy_pages(reg);
in kbase_do_syncset() 1147 kbase_sync_single(kctx, cpu_pa[page_off], gpu_pa[page_off], offset, sz, sync_fn);
in kbase_do_syncset() 1157 kbase_sync_single(kctx, cpu_pa[page_off + i], gpu_pa[page_off + i], 0, PAGE_SIZE, sync_fn);
in kbase_do_syncset() 1164 kbase_sync_single(kctx, cpu_pa[page_off + page_count - 1], gpu_pa[page_off + page_count - 1], 0, sz, sync_fn);
in kbase_do_syncset()
|
H A D | mali_kbase_mem.h | 744 void kbase_sync_single(struct kbase_context *kctx, phys_addr_t cpu_pa, phys_addr_t gpu_pa, off_t offset, size_t size,
|
/device/soc/rockchip/common/vendor/drivers/gpu/arm/bifrost/ |
H A D | mali_kbase_mem.c | 1493 phys_addr_t gpu_pa = as_phys_addr_t(t_gpu_pa); in kbase_sync_single() local 1497 if (likely(cpu_pa == gpu_pa)) { in kbase_sync_single() 1514 if (WARN(!gpu_pa, "No GPU PA found for infinite cache op")) { in kbase_sync_single() 1518 gpu_page = pfn_to_page(PFN_DOWN(gpu_pa)); in kbase_sync_single() 1545 struct tagged_addr *gpu_pa; in kbase_do_syncset() local 1591 gpu_pa = kbase_get_gpu_phy_pages(reg); in kbase_do_syncset() 1603 kbase_sync_single(kctx, cpu_pa[page_off], gpu_pa[page_off], offset, sz, sync_fn); in kbase_do_syncset() 1613 kbase_sync_single(kctx, cpu_pa[page_off + i], gpu_pa[page_off + i], 0, PAGE_SIZE, sync_fn); in kbase_do_syncset() 1620 kbase_sync_single(kctx, cpu_pa[page_off + page_count - 1], gpu_pa[page_off + page_count - 1], 0, sz, sync_fn); in kbase_do_syncset()
|
H A D | mali_kbase_mem_linux.c | 2960 struct tagged_addr gpu_pa = map->gpu_pages[0]; local 2962 kbase_sync_single(kctx, cpu_pa, gpu_pa, offset, sz, dest); 2967 gpu_pa = map->gpu_pages[i]; 2968 kbase_sync_single(kctx, cpu_pa, gpu_pa, 0, PAGE_SIZE, dest); 2974 gpu_pa = map->gpu_pages[page_count - 1]; 2976 kbase_sync_single(kctx, cpu_pa, gpu_pa, 0, sz, dest);
|
H A D | mali_kbase_mem.h | 1165 void kbase_sync_single(struct kbase_context *kctx, struct tagged_addr cpu_pa, struct tagged_addr gpu_pa, off_t offset,
|
/device/soc/rockchip/common/kernel/drivers/gpu/arm/bifrost/ |
H A D | mali_kbase_mem.c | 1798 phys_addr_t gpu_pa = as_phys_addr_t(t_gpu_pa); in kbase_sync_single() local 1802 if (likely(cpu_pa == gpu_pa)) { in kbase_sync_single() 1820 if (WARN(!gpu_pa, "No GPU PA found for infinite cache op")) in kbase_sync_single() 1823 gpu_page = pfn_to_page(PFN_DOWN(gpu_pa)); in kbase_sync_single() 1854 struct tagged_addr *gpu_pa; in kbase_do_syncset() local 1901 gpu_pa = kbase_get_gpu_phy_pages(reg); in kbase_do_syncset() 1914 kbase_sync_single(kctx, cpu_pa[page_off], gpu_pa[page_off], in kbase_do_syncset() 1925 gpu_pa[page_off + i], 0, PAGE_SIZE, sync_fn); in kbase_do_syncset() 1934 gpu_pa[page_off + page_count - 1], 0, sz, in kbase_do_syncset()
|
H A D | mali_kbase_mem_linux.c | 2936 struct tagged_addr gpu_pa = map->gpu_pages[0]; local 2938 kbase_sync_single(kctx, cpu_pa, gpu_pa, offset, sz, dest); 2943 gpu_pa = map->gpu_pages[i]; 2944 kbase_sync_single(kctx, cpu_pa, gpu_pa, 0, PAGE_SIZE, dest); 2950 gpu_pa = map->gpu_pages[page_count - 1]; 2952 kbase_sync_single(kctx, cpu_pa, gpu_pa, 0, sz, dest);
|
H A D | mali_kbase_mem.h | 1282 struct tagged_addr gpu_pa, off_t offset, size_t size,
|