Home
last modified time | relevance | path

Searched refs:cpu_alloc (Results 1 - 25 of 26) sorted by relevance

12

/device/soc/rockchip/common/vendor/drivers/gpu/arm/midgard/
H A Dmali_kbase_mem.h301 struct kbase_mem_phy_alloc *cpu_alloc; /* the one alloc object we mmap to the CPU when mapping this region */ member
315 KBASE_DEBUG_ASSERT(reg->cpu_alloc); in kbase_get_cpu_phy_pages()
317 KBASE_DEBUG_ASSERT(reg->cpu_alloc->nents == reg->gpu_alloc->nents); in kbase_get_cpu_phy_pages()
319 return reg->cpu_alloc->pages; in kbase_get_cpu_phy_pages()
325 KBASE_DEBUG_ASSERT(reg->cpu_alloc); in kbase_get_gpu_phy_pages()
327 KBASE_DEBUG_ASSERT(reg->cpu_alloc->nents == reg->gpu_alloc->nents); in kbase_get_gpu_phy_pages()
336 if (!reg->cpu_alloc) { in kbase_reg_current_backed_size()
340 KBASE_DEBUG_ASSERT(reg->cpu_alloc); in kbase_reg_current_backed_size()
342 KBASE_DEBUG_ASSERT(reg->cpu_alloc->nents == reg->gpu_alloc->nents); in kbase_reg_current_backed_size()
344 return reg->cpu_alloc in kbase_reg_current_backed_size()
[all...]
H A Dmali_kbase_mem_linux.c238 kbase_mem_phy_alloc_put(reg->cpu_alloc); in kbase_mem_alloc()
274 if (reg->cpu_alloc->type != KBASE_MEM_TYPE_ALIAS) { in kbase_mem_query()
280 aliased = reg->cpu_alloc->imported.alias.aliased; in kbase_mem_query()
281 for (i = 0; i < reg->cpu_alloc->imported.alias.nents; i++) { in kbase_mem_query()
620 if (atomic_read(&reg->cpu_alloc->gpu_mappings) > 1) { in kbase_mem_flags_change()
626 if (reg->cpu_alloc->type != KBASE_MEM_TYPE_NATIVE) { in kbase_mem_flags_change()
748 reg->cpu_alloc = kbase_mem_phy_alloc_get(reg->gpu_alloc); in kbase_mem_from_ump()
877 reg->cpu_alloc = kbase_mem_phy_alloc_get(reg->gpu_alloc); in kbase_mem_from_umm()
997 reg->cpu_alloc = kbase_mem_phy_alloc_get(reg->gpu_alloc); in kbase_mem_from_user_buffer()
1098 kbase_mem_phy_alloc_put(reg->cpu_alloc); in kbase_mem_from_user_buffer()
[all...]
H A Dmali_kbase_mem.c779 new_reg->cpu_alloc = NULL; /* no alloc bound yet */ in kbase_alloc_free_region()
821 reg->cpu_alloc->reg = NULL; in kbase_free_alloced_region()
822 if (reg->cpu_alloc != reg->gpu_alloc) { in kbase_free_alloced_region()
834 KBASE_DEBUG_ASSERT(reg->cpu_alloc->type == KBASE_MEM_TYPE_NATIVE); in kbase_free_alloced_region()
845 kbase_mem_phy_alloc_put(reg->cpu_alloc); in kbase_free_alloced_region()
1218 reg->cpu_alloc->reg = NULL; in kbase_mem_free_region()
1219 if (reg->cpu_alloc != reg->gpu_alloc) { in kbase_mem_free_region()
1230 KBASE_DEBUG_ASSERT(reg->cpu_alloc->type == KBASE_MEM_TYPE_NATIVE); in kbase_mem_free_region()
1537 if ((size_t)vsize > ((size_t)-1 / sizeof(*reg->cpu_alloc->pages))) { in kbase_alloc_phy_pages()
1543 if (kbase_alloc_phy_pages_helper(reg->cpu_alloc, siz in kbase_alloc_phy_pages()
[all...]
H A Dmali_kbase_mem_linux.h124 struct kbase_mem_phy_alloc *cpu_alloc; member
H A Dmali_kbase_context.c202 kbase_mem_phy_alloc_put(reg->cpu_alloc); in kbase_reg_pending_dtor()
H A Dmali_kbase_mmu.c273 if (region->gpu_alloc != region->cpu_alloc) { in page_fault_worker()
274 if (kbase_alloc_phy_pages_helper(region->cpu_alloc, new_pages) == 0) { in page_fault_worker()
305 if (region->gpu_alloc != region->cpu_alloc) { in page_fault_worker()
306 kbase_free_phy_pages_helper(region->cpu_alloc, new_pages); in page_fault_worker()
/device/soc/rockchip/common/kernel/drivers/gpu/arm/midgard/
H A Dmali_kbase_mem.h300 struct kbase_mem_phy_alloc *cpu_alloc; /* the one alloc object we mmap to the CPU when mapping this region */ member
314 KBASE_DEBUG_ASSERT(reg->cpu_alloc); in kbase_get_cpu_phy_pages()
316 KBASE_DEBUG_ASSERT(reg->cpu_alloc->nents == reg->gpu_alloc->nents); in kbase_get_cpu_phy_pages()
318 return reg->cpu_alloc->pages; in kbase_get_cpu_phy_pages()
324 KBASE_DEBUG_ASSERT(reg->cpu_alloc); in kbase_get_gpu_phy_pages()
326 KBASE_DEBUG_ASSERT(reg->cpu_alloc->nents == reg->gpu_alloc->nents); in kbase_get_gpu_phy_pages()
335 if (!reg->cpu_alloc) in kbase_reg_current_backed_size()
338 KBASE_DEBUG_ASSERT(reg->cpu_alloc); in kbase_reg_current_backed_size()
340 KBASE_DEBUG_ASSERT(reg->cpu_alloc->nents == reg->gpu_alloc->nents); in kbase_reg_current_backed_size()
342 return reg->cpu_alloc in kbase_reg_current_backed_size()
[all...]
H A Dmali_kbase_mem_linux.c246 kbase_mem_phy_alloc_put(reg->cpu_alloc); in kbase_mem_alloc()
281 if (reg->cpu_alloc->type != KBASE_MEM_TYPE_ALIAS) { in kbase_mem_query()
287 aliased = reg->cpu_alloc->imported.alias.aliased; in kbase_mem_query()
288 for (i = 0; i < reg->cpu_alloc->imported.alias.nents; i++) in kbase_mem_query()
627 if (atomic_read(&reg->cpu_alloc->gpu_mappings) > 1) in kbase_mem_flags_change()
632 if (reg->cpu_alloc->type != KBASE_MEM_TYPE_NATIVE) { in kbase_mem_flags_change()
743 reg->cpu_alloc = kbase_mem_phy_alloc_get(reg->gpu_alloc); in kbase_mem_from_ump()
866 reg->cpu_alloc = kbase_mem_phy_alloc_get(reg->gpu_alloc); in kbase_mem_from_umm()
988 reg->cpu_alloc = kbase_mem_phy_alloc_get(reg->gpu_alloc); in kbase_mem_from_user_buffer()
1092 kbase_mem_phy_alloc_put(reg->cpu_alloc); in kbase_mem_from_user_buffer()
[all...]
H A Dmali_kbase_mem.c807 new_reg->cpu_alloc = NULL; /* no alloc bound yet */ in kbase_alloc_free_region()
849 reg->cpu_alloc->reg = NULL; in kbase_free_alloced_region()
850 if (reg->cpu_alloc != reg->gpu_alloc) in kbase_free_alloced_region()
861 KBASE_DEBUG_ASSERT(reg->cpu_alloc->type == in kbase_free_alloced_region()
874 kbase_mem_phy_alloc_put(reg->cpu_alloc); in kbase_free_alloced_region()
1257 reg->cpu_alloc->reg = NULL; in kbase_mem_free_region()
1258 if (reg->cpu_alloc != reg->gpu_alloc) in kbase_mem_free_region()
1268 KBASE_DEBUG_ASSERT(reg->cpu_alloc->type == in kbase_mem_free_region()
1585 if ((size_t) vsize > ((size_t) -1 / sizeof(*reg->cpu_alloc->pages))) in kbase_alloc_phy_pages()
1590 if (kbase_alloc_phy_pages_helper(reg->cpu_alloc, siz in kbase_alloc_phy_pages()
[all...]
H A Dmali_kbase_mem_linux.h126 struct kbase_mem_phy_alloc *cpu_alloc; member
H A Dmali_kbase_context.c199 kbase_mem_phy_alloc_put(reg->cpu_alloc); in kbase_reg_pending_dtor()
/device/soc/rockchip/common/kernel/drivers/gpu/arm/bifrost/
H A Dmali_kbase_mem_linux.c183 if (vsize != size || reg->cpu_alloc->permanent_map != NULL || in kbase_phy_alloc_mapping_init()
184 reg->cpu_alloc->type != KBASE_MEM_TYPE_NATIVE) in kbase_phy_alloc_mapping_init()
207 reg->cpu_alloc->permanent_map = kern_mapping; in kbase_phy_alloc_mapping_init()
225 /* Mappings are only done on cpu_alloc, so don't need to worry about in kbase_phy_alloc_mapping_term()
258 kern_mapping = reg->cpu_alloc->permanent_map; in kbase_phy_alloc_mapping_get()
265 WARN_ON(reg->cpu_alloc != kern_mapping->cpu_alloc); in kbase_phy_alloc_mapping_get()
267 (void)kbase_mem_phy_alloc_get(kern_mapping->cpu_alloc); in kbase_phy_alloc_mapping_get()
283 WARN_ON(kctx != kern_mapping->cpu_alloc->imported.native.kctx); in kbase_phy_alloc_mapping_put()
284 WARN_ON(kern_mapping != kern_mapping->cpu_alloc in kbase_phy_alloc_mapping_put()
3039 struct kbase_mem_phy_alloc *cpu_alloc; global() local
[all...]
H A Dmali_kbase_mem.h300 * @cpu_alloc: The physical memory we mmap to the CPU when mapping this region.
477 struct kbase_mem_phy_alloc *cpu_alloc; member
603 KBASE_DEBUG_ASSERT(reg->cpu_alloc); in kbase_get_cpu_phy_pages()
605 KBASE_DEBUG_ASSERT(reg->cpu_alloc->nents == reg->gpu_alloc->nents); in kbase_get_cpu_phy_pages()
607 return reg->cpu_alloc->pages; in kbase_get_cpu_phy_pages()
614 KBASE_DEBUG_ASSERT(reg->cpu_alloc); in kbase_get_gpu_phy_pages()
616 KBASE_DEBUG_ASSERT(reg->cpu_alloc->nents == reg->gpu_alloc->nents); in kbase_get_gpu_phy_pages()
625 if (!reg->cpu_alloc) in kbase_reg_current_backed_size()
628 KBASE_DEBUG_ASSERT(reg->cpu_alloc); in kbase_reg_current_backed_size()
630 KBASE_DEBUG_ASSERT(reg->cpu_alloc in kbase_reg_current_backed_size()
[all...]
H A Dmali_kbase_mem.c1364 new_reg->cpu_alloc = NULL; /* no alloc bound yet */ in kbase_alloc_free_region()
1463 reg->cpu_alloc->reg = NULL; in kbase_free_alloced_region()
1464 if (reg->cpu_alloc != reg->gpu_alloc) in kbase_free_alloced_region()
1475 KBASE_DEBUG_ASSERT(reg->cpu_alloc->type == in kbase_free_alloced_region()
1490 kbase_mem_phy_alloc_put(reg->cpu_alloc); in kbase_free_alloced_region()
1997 reg->cpu_alloc->reg = NULL; in kbase_mem_free_region()
1998 if (reg->cpu_alloc != reg->gpu_alloc) in kbase_mem_free_region()
2008 KBASE_DEBUG_ASSERT(reg->cpu_alloc->type == in kbase_mem_free_region()
2953 if ((size_t) vsize > ((size_t) -1 / sizeof(*reg->cpu_alloc->pages))) in kbase_alloc_phy_pages()
2958 if (kbase_alloc_phy_pages_helper(reg->cpu_alloc, siz in kbase_alloc_phy_pages()
[all...]
H A Dmali_kbase_mem_linux.h222 struct kbase_mem_phy_alloc *cpu_alloc; member
/device/soc/rockchip/common/vendor/drivers/gpu/arm/bifrost/
H A Dmali_kbase_mem_linux.c191 if (vsize != size || reg->cpu_alloc->permanent_map != NULL || in kbase_phy_alloc_mapping_init()
192 reg->cpu_alloc->type != KBASE_MEM_TYPE_NATIVE) { in kbase_phy_alloc_mapping_init()
219 reg->cpu_alloc->permanent_map = kern_mapping; in kbase_phy_alloc_mapping_init()
237 /* Mappings are only done on cpu_alloc, so don't need to worry about in kbase_phy_alloc_mapping_term()
269 kern_mapping = reg->cpu_alloc->permanent_map; in kbase_phy_alloc_mapping_get()
277 WARN_ON(reg->cpu_alloc != kern_mapping->cpu_alloc); in kbase_phy_alloc_mapping_get()
279 (void)kbase_mem_phy_alloc_get(kern_mapping->cpu_alloc); in kbase_phy_alloc_mapping_get()
296 WARN_ON(kctx != kern_mapping->cpu_alloc->imported.native.kctx); in kbase_phy_alloc_mapping_put()
297 WARN_ON(kern_mapping != kern_mapping->cpu_alloc in kbase_phy_alloc_mapping_put()
3072 struct kbase_mem_phy_alloc *cpu_alloc; global() local
[all...]
H A Dmali_kbase_mem.h265 * @cpu_alloc: The physical memory we mmap to the CPU when mapping this region.
417 struct kbase_mem_phy_alloc *cpu_alloc; member
524 KBASE_DEBUG_ASSERT(reg->cpu_alloc); in kbase_get_cpu_phy_pages()
526 KBASE_DEBUG_ASSERT(reg->cpu_alloc->nents == reg->gpu_alloc->nents); in kbase_get_cpu_phy_pages()
528 return reg->cpu_alloc->pages; in kbase_get_cpu_phy_pages()
534 KBASE_DEBUG_ASSERT(reg->cpu_alloc); in kbase_get_gpu_phy_pages()
536 KBASE_DEBUG_ASSERT(reg->cpu_alloc->nents == reg->gpu_alloc->nents); in kbase_get_gpu_phy_pages()
545 if (!reg->cpu_alloc) { in kbase_reg_current_backed_size()
549 KBASE_DEBUG_ASSERT(reg->cpu_alloc); in kbase_reg_current_backed_size()
551 KBASE_DEBUG_ASSERT(reg->cpu_alloc in kbase_reg_current_backed_size()
[all...]
H A Dmali_kbase_mem.c1094 new_reg->cpu_alloc = NULL; /* no alloc bound yet */ in kbase_alloc_free_region()
1189 reg->cpu_alloc->reg = NULL; in kbase_free_alloced_region()
1190 if (reg->cpu_alloc != reg->gpu_alloc) { in kbase_free_alloced_region()
1202 KBASE_DEBUG_ASSERT(reg->cpu_alloc->type == KBASE_MEM_TYPE_NATIVE); in kbase_free_alloced_region()
1215 kbase_mem_phy_alloc_put(reg->cpu_alloc); in kbase_free_alloced_region()
1680 reg->cpu_alloc->reg = NULL; in kbase_mem_free_region()
1681 if (reg->cpu_alloc != reg->gpu_alloc) { in kbase_mem_free_region()
1692 KBASE_DEBUG_ASSERT(reg->cpu_alloc->type == KBASE_MEM_TYPE_NATIVE); in kbase_mem_free_region()
2540 if ((size_t)vsize > ((size_t)-1 / sizeof(*reg->cpu_alloc->pages))) { in kbase_alloc_phy_pages()
2546 if (kbase_alloc_phy_pages_helper(reg->cpu_alloc, siz in kbase_alloc_phy_pages()
[all...]
H A Dmali_kbase_mem_linux.h223 struct kbase_mem_phy_alloc *cpu_alloc; member
/device/soc/rockchip/common/kernel/drivers/gpu/arm/bifrost/csf/
H A Dmali_kbase_csf_kcpu.h176 * @cpu_alloc: Reference to physical pages of suspend buffer allocation.
183 struct kbase_mem_phy_alloc *cpu_alloc; member
H A Dmali_kbase_csf_kcpu.c655 sus_buf->cpu_alloc = kbase_mem_phy_alloc_get(reg->cpu_alloc); in kbase_csf_queue_group_suspend_prepare()
656 kbase_mem_phy_alloc_kernel_mapped(reg->cpu_alloc); in kbase_csf_queue_group_suspend_prepare()
1784 if (!sus_buf->cpu_alloc) { in kcpu_queue_process()
1791 sus_buf->cpu_alloc); in kcpu_queue_process()
1793 sus_buf->cpu_alloc); in kcpu_queue_process()
/device/soc/rockchip/common/kernel/drivers/gpu/arm/bifrost/mmu/
H A Dmali_kbase_mmu.c502 if (region->gpu_alloc != region->cpu_alloc) in page_fault_try_alloc()
550 if (region->gpu_alloc == region->cpu_alloc) in page_fault_try_alloc()
565 } else if (region->gpu_alloc != region->cpu_alloc) { in page_fault_try_alloc()
568 region->cpu_alloc, pool, in page_fault_try_alloc()
607 if (region->gpu_alloc != region->cpu_alloc) { in page_fault_try_alloc()
611 region->cpu_alloc, in page_fault_try_alloc()
963 if (region->gpu_alloc != region->cpu_alloc) in kbase_mmu_page_fault_worker()
964 kbase_free_phy_pages_helper(region->cpu_alloc, in kbase_mmu_page_fault_worker()
/device/soc/rockchip/common/vendor/drivers/gpu/arm/bifrost/mmu/
H A Dmali_kbase_mmu.c359 if (region->gpu_alloc != region->cpu_alloc) { in page_fault_try_alloc()
410 if (region->gpu_alloc == region->cpu_alloc) { in page_fault_try_alloc()
423 } else if (region->gpu_alloc != region->cpu_alloc) { in page_fault_try_alloc()
425 region->cpu_alloc, pool, pages_to_alloc_4k_per_alloc, &prealloc_sas[1]); in page_fault_try_alloc()
464 if (region->gpu_alloc != region->cpu_alloc) { in page_fault_try_alloc()
466 kbase_free_phy_pages_helper_locked(region->cpu_alloc, pool, cpu_pages[pool_level], in page_fault_try_alloc()
764 if (region->gpu_alloc != region->cpu_alloc) { in kbase_mmu_page_fault_worker()
765 kbase_free_phy_pages_helper(region->cpu_alloc, new_pages); in kbase_mmu_page_fault_worker()
/device/soc/rockchip/common/vendor/drivers/gpu/arm/bifrost/context/
H A Dmali_kbase_context.c317 kbase_mem_phy_alloc_put(kctx->pending_regions[cookie]->cpu_alloc); in kbase_context_sticky_resource_term()
/device/soc/rockchip/common/kernel/drivers/gpu/arm/bifrost/context/
H A Dmali_kbase_context.c332 kctx->pending_regions[cookie]->cpu_alloc); in kbase_context_sticky_resource_term()

Completed in 42 milliseconds

12