Home
last modified time | relevance | path

Searched refs:gpu_pa (Results 1 - 12 of 12) sorted by relevance

/device/soc/rockchip/common/kernel/drivers/gpu/arm/midgard/
H A Dmali_kbase_mem_linux.c2272 phys_addr_t gpu_pa = map->gpu_pages[0]; local
2274 kbase_sync_single(kctx, cpu_pa, gpu_pa, offset, sz,
2280 gpu_pa = map->gpu_pages[i];
2281 kbase_sync_single(kctx, cpu_pa, gpu_pa, 0, PAGE_SIZE,
2288 gpu_pa = map->gpu_pages[page_count - 1];
2290 kbase_sync_single(kctx, cpu_pa, gpu_pa, 0, sz,
2334 phys_addr_t gpu_pa = map->gpu_pages[0]; local
2336 kbase_sync_single(kctx, cpu_pa, gpu_pa, offset, sz,
2342 gpu_pa = map->gpu_pages[i];
2343 kbase_sync_single(kctx, cpu_pa, gpu_pa,
[all...]
H A Dmali_kbase_mem.c1074 phys_addr_t cpu_pa, phys_addr_t gpu_pa, in kbase_sync_single()
1081 if (likely(cpu_pa == gpu_pa)) { in kbase_sync_single()
1099 if (WARN(!gpu_pa, "No GPU PA found for infinite cache op")) in kbase_sync_single()
1102 gpu_page = pfn_to_page(PFN_DOWN(gpu_pa)); in kbase_sync_single()
1133 phys_addr_t *gpu_pa; in kbase_do_syncset() local
1169 gpu_pa = kbase_get_gpu_phy_pages(reg); in kbase_do_syncset()
1182 kbase_sync_single(kctx, cpu_pa[page_off], gpu_pa[page_off], in kbase_do_syncset()
1193 gpu_pa[page_off + i], 0, PAGE_SIZE, sync_fn); in kbase_do_syncset()
1201 gpu_pa[page_off + page_count - 1], 0, sz, in kbase_do_syncset()
1073 kbase_sync_single(struct kbase_context *kctx, phys_addr_t cpu_pa, phys_addr_t gpu_pa, off_t offset, size_t size, enum kbase_sync_type sync_fn) kbase_sync_single() argument
H A Dmali_kbase_mem.h753 phys_addr_t gpu_pa, off_t offset, size_t size,
/device/soc/rockchip/common/vendor/drivers/gpu/arm/midgard/
H A Dmali_kbase_mem_linux.c2276 phys_addr_t gpu_pa = map->gpu_pages[0]; local
2278 kbase_sync_single(kctx, cpu_pa, gpu_pa, offset, sz, KBASE_SYNC_TO_CPU);
2283 gpu_pa = map->gpu_pages[i];
2284 kbase_sync_single(kctx, cpu_pa, gpu_pa, 0, PAGE_SIZE, KBASE_SYNC_TO_CPU);
2290 gpu_pa = map->gpu_pages[page_count - 1];
2292 kbase_sync_single(kctx, cpu_pa, gpu_pa, 0, sz, KBASE_SYNC_TO_CPU);
2335 phys_addr_t gpu_pa = map->gpu_pages[0]; local
2337 kbase_sync_single(kctx, cpu_pa, gpu_pa, offset, sz, KBASE_SYNC_TO_DEVICE);
2342 gpu_pa = map->gpu_pages[i];
2343 kbase_sync_single(kctx, cpu_pa, gpu_pa,
[all...]
H A Dmali_kbase_mem.c1045 void kbase_sync_single(struct kbase_context *kctx, phys_addr_t cpu_pa, phys_addr_t gpu_pa, off_t offset, size_t size, in kbase_sync_single() argument
1052 if (likely(cpu_pa == gpu_pa)) { in kbase_sync_single()
1069 if (WARN(!gpu_pa, "No GPU PA found for infinite cache op")) { in kbase_sync_single()
1073 gpu_page = pfn_to_page(PFN_DOWN(gpu_pa)); in kbase_sync_single()
1100 phys_addr_t *gpu_pa; in kbase_do_syncset() local
1135 gpu_pa = kbase_get_gpu_phy_pages(reg); in kbase_do_syncset()
1147 kbase_sync_single(kctx, cpu_pa[page_off], gpu_pa[page_off], offset, sz, sync_fn); in kbase_do_syncset()
1157 kbase_sync_single(kctx, cpu_pa[page_off + i], gpu_pa[page_off + i], 0, PAGE_SIZE, sync_fn); in kbase_do_syncset()
1164 kbase_sync_single(kctx, cpu_pa[page_off + page_count - 1], gpu_pa[page_off + page_count - 1], 0, sz, sync_fn); in kbase_do_syncset()
H A Dmali_kbase_mem.h744 void kbase_sync_single(struct kbase_context *kctx, phys_addr_t cpu_pa, phys_addr_t gpu_pa, off_t offset, size_t size,
/device/soc/rockchip/common/vendor/drivers/gpu/arm/bifrost/
H A Dmali_kbase_mem.c1493 phys_addr_t gpu_pa = as_phys_addr_t(t_gpu_pa); in kbase_sync_single() local
1497 if (likely(cpu_pa == gpu_pa)) { in kbase_sync_single()
1514 if (WARN(!gpu_pa, "No GPU PA found for infinite cache op")) { in kbase_sync_single()
1518 gpu_page = pfn_to_page(PFN_DOWN(gpu_pa)); in kbase_sync_single()
1545 struct tagged_addr *gpu_pa; in kbase_do_syncset() local
1591 gpu_pa = kbase_get_gpu_phy_pages(reg); in kbase_do_syncset()
1603 kbase_sync_single(kctx, cpu_pa[page_off], gpu_pa[page_off], offset, sz, sync_fn); in kbase_do_syncset()
1613 kbase_sync_single(kctx, cpu_pa[page_off + i], gpu_pa[page_off + i], 0, PAGE_SIZE, sync_fn); in kbase_do_syncset()
1620 kbase_sync_single(kctx, cpu_pa[page_off + page_count - 1], gpu_pa[page_off + page_count - 1], 0, sz, sync_fn); in kbase_do_syncset()
H A Dmali_kbase_mem_linux.c2960 struct tagged_addr gpu_pa = map->gpu_pages[0]; local
2962 kbase_sync_single(kctx, cpu_pa, gpu_pa, offset, sz, dest);
2967 gpu_pa = map->gpu_pages[i];
2968 kbase_sync_single(kctx, cpu_pa, gpu_pa, 0, PAGE_SIZE, dest);
2974 gpu_pa = map->gpu_pages[page_count - 1];
2976 kbase_sync_single(kctx, cpu_pa, gpu_pa, 0, sz, dest);
H A Dmali_kbase_mem.h1165 void kbase_sync_single(struct kbase_context *kctx, struct tagged_addr cpu_pa, struct tagged_addr gpu_pa, off_t offset,
/device/soc/rockchip/common/kernel/drivers/gpu/arm/bifrost/
H A Dmali_kbase_mem.c1798 phys_addr_t gpu_pa = as_phys_addr_t(t_gpu_pa); in kbase_sync_single() local
1802 if (likely(cpu_pa == gpu_pa)) { in kbase_sync_single()
1820 if (WARN(!gpu_pa, "No GPU PA found for infinite cache op")) in kbase_sync_single()
1823 gpu_page = pfn_to_page(PFN_DOWN(gpu_pa)); in kbase_sync_single()
1854 struct tagged_addr *gpu_pa; in kbase_do_syncset() local
1901 gpu_pa = kbase_get_gpu_phy_pages(reg); in kbase_do_syncset()
1914 kbase_sync_single(kctx, cpu_pa[page_off], gpu_pa[page_off], in kbase_do_syncset()
1925 gpu_pa[page_off + i], 0, PAGE_SIZE, sync_fn); in kbase_do_syncset()
1934 gpu_pa[page_off + page_count - 1], 0, sz, in kbase_do_syncset()
H A Dmali_kbase_mem_linux.c2936 struct tagged_addr gpu_pa = map->gpu_pages[0]; local
2938 kbase_sync_single(kctx, cpu_pa, gpu_pa, offset, sz, dest);
2943 gpu_pa = map->gpu_pages[i];
2944 kbase_sync_single(kctx, cpu_pa, gpu_pa, 0, PAGE_SIZE, dest);
2950 gpu_pa = map->gpu_pages[page_count - 1];
2952 kbase_sync_single(kctx, cpu_pa, gpu_pa, 0, sz, dest);
H A Dmali_kbase_mem.h1282 struct tagged_addr gpu_pa, off_t offset, size_t size,

Completed in 39 milliseconds