/device/soc/rockchip/common/kernel/drivers/gpu/arm/mali400/mali/linux/ |
H A D | mali_memory_ump.c | 22 mali_mem_allocation *alloc; in mali_mem_ump_map() local 34 alloc = mem_backend->mali_allocation; in mali_mem_ump_map() 35 MALI_DEBUG_ASSERT_POINTER(alloc); in mali_mem_ump_map() 37 session = alloc->session; in mali_mem_ump_map() 63 err = mali_mem_mali_map_prepare(alloc); in mali_mem_ump_map() 73 u32 virt = alloc->mali_vma_node.vm_node.start + offset; in mali_mem_ump_map() 83 if (alloc->flags & _MALI_MAP_EXTERNAL_MAP_GUARD_PAGE) { in mali_mem_ump_map() 84 u32 virt = alloc->mali_vma_node.vm_node.start + offset; in mali_mem_ump_map() 98 static void mali_mem_ump_unmap(mali_mem_allocation *alloc) in mali_mem_ump_unmap() argument 101 MALI_DEBUG_ASSERT_POINTER(alloc); in mali_mem_ump_unmap() 110 mali_mem_bind_ump_buf(mali_mem_allocation *alloc, mali_mem_backend *mem_backend, u32 secure_id, u32 flags) mali_mem_bind_ump_buf() argument 143 mali_mem_allocation *alloc; mali_mem_unbind_ump_buf() local [all...] |
H A D | mali_memory_external.c | 20 mali_mem_allocation *alloc; in mali_mem_unbind_ext_buf() local 23 alloc = mem_backend->mali_allocation; in mali_mem_unbind_ext_buf() 24 MALI_DEBUG_ASSERT_POINTER(alloc); in mali_mem_unbind_ext_buf() 27 session = alloc->session; in mali_mem_unbind_ext_buf() 30 mali_mem_mali_map_free(session, alloc->psize, alloc->mali_vma_node.vm_node.start, in mali_mem_unbind_ext_buf() 31 alloc->flags); in mali_mem_unbind_ext_buf() 35 _mali_osk_errcode_t mali_mem_bind_ext_buf(mali_mem_allocation *alloc, in mali_mem_bind_ext_buf() argument 44 MALI_DEBUG_ASSERT_POINTER(alloc); in mali_mem_bind_ext_buf() 45 size = alloc in mali_mem_bind_ext_buf() [all...] |
H A D | mali_memory_dma_buf.c | 37 mali_mem_allocation *alloc; in mali_dma_buf_map() local 48 alloc = mem_backend->mali_allocation; in mali_dma_buf_map() 49 MALI_DEBUG_ASSERT_POINTER(alloc); in mali_dma_buf_map() 54 session = alloc->session; in mali_dma_buf_map() 58 virt = alloc->mali_vma_node.vm_node.start; in mali_dma_buf_map() 59 flags = alloc->flags; in mali_dma_buf_map() 79 err = mali_mem_mali_map_prepare(alloc); in mali_dma_buf_map() 123 static void mali_dma_buf_unmap(mali_mem_allocation *alloc, struct mali_dma_buf_attachment *mem) in mali_dma_buf_unmap() argument 125 MALI_DEBUG_ASSERT_POINTER(alloc); in mali_dma_buf_unmap() 129 MALI_DEBUG_ASSERT_POINTER(alloc in mali_dma_buf_unmap() 274 mali_mem_bind_dma_buf(mali_mem_allocation *alloc, mali_mem_backend *mem_backend, int fd, u32 flags) mali_mem_bind_dma_buf() argument [all...] |
H A D | mali_memory_cow.c | 292 mali_mem_allocation *alloc = NULL; in mali_memory_cow_modify_range() local 305 alloc = backend->mali_allocation; in mali_memory_cow_modify_range() 306 MALI_DEBUG_ASSERT_POINTER(alloc); in mali_memory_cow_modify_range() 308 session = alloc->session; in mali_memory_cow_modify_range() 375 MALI_DEBUG_ASSERT(MALI_MEM_COW == alloc->type); in mali_memory_cow_modify_range() 378 if (NULL != alloc->cpu_mapping.vma) { in mali_memory_cow_modify_range() 379 MALI_DEBUG_ASSERT(0 != alloc->backend_handle); in mali_memory_cow_modify_range() 380 MALI_DEBUG_ASSERT(NULL != alloc->cpu_mapping.vma); in mali_memory_cow_modify_range() 381 MALI_DEBUG_ASSERT(alloc->cpu_mapping.vma->vm_end - alloc in mali_memory_cow_modify_range() 595 mali_mem_allocation *alloc; mali_mem_cow_release() local [all...] |
H A D | mali_memory_secure.c | 93 void mali_mem_secure_mali_unmap(mali_mem_allocation *alloc) in mali_mem_secure_mali_unmap() argument 96 MALI_DEBUG_ASSERT_POINTER(alloc); in mali_mem_secure_mali_unmap() 97 session = alloc->session; in mali_mem_secure_mali_unmap() 101 mali_mem_mali_map_free(session, alloc->psize, alloc->mali_vma_node.vm_node.start, in mali_mem_secure_mali_unmap() 102 alloc->flags); in mali_mem_secure_mali_unmap() 149 mali_mem_allocation *alloc = mem_bkend->mali_allocation; in mali_mem_secure_release() local 158 mali_mem_secure_mali_unmap(alloc); in mali_mem_secure_release()
|
H A D | mali_memory_util.c | 133 u32 mali_allocation_unref(struct mali_mem_allocation **alloc) in mali_allocation_unref() argument 136 mali_mem_allocation *mali_alloc = *alloc; in mali_allocation_unref() 137 *alloc = NULL; in mali_allocation_unref() 144 void mali_allocation_ref(struct mali_mem_allocation *alloc) in mali_allocation_ref() argument 146 _mali_osk_atomic_inc(&alloc->mem_alloc_refcount); in mali_allocation_ref()
|
/device/soc/rockchip/common/vendor/drivers/gpu/arm/mali400/mali/linux/ |
H A D | mali_memory_ump.c | 23 mali_mem_allocation *alloc; in mali_mem_ump_map() local 35 alloc = mem_backend->mali_allocation; in mali_mem_ump_map() 36 MALI_DEBUG_ASSERT_POINTER(alloc); in mali_mem_ump_map() 38 session = alloc->session; in mali_mem_ump_map() 64 err = mali_mem_mali_map_prepare(alloc); in mali_mem_ump_map() 74 u32 virt = alloc->mali_vma_node.vm_node.start + offset; in mali_mem_ump_map() 84 if (alloc->flags & MALI_MAP_EXTERNAL_MAP_GUARD_PAGE) { in mali_mem_ump_map() 85 u32 virt = alloc->mali_vma_node.vm_node.start + offset; in mali_mem_ump_map() 99 static void mali_mem_ump_unmap(mali_mem_allocation *alloc) in mali_mem_ump_unmap() argument 102 MALI_DEBUG_ASSERT_POINTER(alloc); in mali_mem_ump_unmap() 110 mali_mem_bind_ump_buf(mali_mem_allocation *alloc, mali_mem_backend *mem_backend, u32 secure_id, u32 flags) mali_mem_bind_ump_buf() argument 145 mali_mem_allocation *alloc; mali_mem_unbind_ump_buf() local [all...] |
H A D | mali_memory_external.c | 21 mali_mem_allocation *alloc; in mali_mem_unbind_ext_buf() local 24 alloc = mem_backend->mali_allocation; in mali_mem_unbind_ext_buf() 25 MALI_DEBUG_ASSERT_POINTER(alloc); in mali_mem_unbind_ext_buf() 28 session = alloc->session; in mali_mem_unbind_ext_buf() 31 mali_mem_mali_map_free(session, alloc->psize, alloc->mali_vma_node.vm_node.start, alloc->flags); in mali_mem_unbind_ext_buf() 35 mali_osk_errcode_t mali_mem_bind_ext_buf(mali_mem_allocation *alloc, mali_mem_backend *mem_backend, u32 phys_addr, in mali_mem_bind_ext_buf() argument 42 MALI_DEBUG_ASSERT_POINTER(alloc); in mali_mem_bind_ext_buf() 43 size = alloc in mali_mem_bind_ext_buf() [all...] |
H A D | mali_memory_dma_buf.c | 38 mali_mem_allocation *alloc; in mali_dma_buf_map() local 49 alloc = mem_backend->mali_allocation; in mali_dma_buf_map() 50 MALI_DEBUG_ASSERT_POINTER(alloc); in mali_dma_buf_map() 55 session = alloc->session; in mali_dma_buf_map() 59 virt = alloc->mali_vma_node.vm_node.start; in mali_dma_buf_map() 60 flags = alloc->flags; in mali_dma_buf_map() 80 err = mali_mem_mali_map_prepare(alloc); in mali_dma_buf_map() 125 static void mali_dma_buf_unmap(mali_mem_allocation *alloc, struct mali_dma_buf_attachment *mem) in mali_dma_buf_unmap() argument 127 MALI_DEBUG_ASSERT_POINTER(alloc); in mali_dma_buf_unmap() 131 MALI_DEBUG_ASSERT_POINTER(alloc in mali_dma_buf_unmap() 276 mali_mem_bind_dma_buf(mali_mem_allocation *alloc, mali_mem_backend *mem_backend, int fd, u32 flags) mali_mem_bind_dma_buf() argument [all...] |
H A D | mali_memory_cow.c | 276 mali_mem_allocation *alloc = NULL;
in mali_memory_cow_modify_range() local 293 alloc = backend->mali_allocation;
in mali_memory_cow_modify_range() 294 MALI_DEBUG_ASSERT_POINTER(alloc);
in mali_memory_cow_modify_range() 296 session = alloc->session;
in mali_memory_cow_modify_range() 362 MALI_DEBUG_ASSERT(MALI_MEM_COW == alloc->type);
in mali_memory_cow_modify_range() 365 if (alloc->cpu_mapping.vma != NULL) {
in mali_memory_cow_modify_range() 366 MALI_DEBUG_ASSERT(alloc->backend_handle != 0);
in mali_memory_cow_modify_range() 367 MALI_DEBUG_ASSERT(alloc->cpu_mapping.vma != NULL);
in mali_memory_cow_modify_range() 368 MALI_DEBUG_ASSERT(alloc->cpu_mapping.vma->vm_end - alloc in mali_memory_cow_modify_range() 586 mali_mem_allocation *alloc; mali_mem_cow_release() local [all...] |
H A D | mali_memory_secure.c | 97 void mali_mem_secure_mali_unmap(mali_mem_allocation *alloc) in mali_mem_secure_mali_unmap() argument 100 MALI_DEBUG_ASSERT_POINTER(alloc); in mali_mem_secure_mali_unmap() 101 session = alloc->session; in mali_mem_secure_mali_unmap() 105 mali_mem_mali_map_free(session, alloc->psize, alloc->mali_vma_node.vm_node.start, alloc->flags); in mali_mem_secure_mali_unmap() 151 mali_mem_allocation *alloc = mem_bkend->mali_allocation; in mali_mem_secure_release() local 160 mali_mem_secure_mali_unmap(alloc); in mali_mem_secure_release()
|
H A D | mali_memory_util.c | 133 u32 mali_allocation_unref(struct mali_mem_allocation **alloc) in mali_allocation_unref() argument 136 mali_mem_allocation *mali_alloc = *alloc; in mali_allocation_unref() 137 *alloc = NULL; in mali_allocation_unref() 144 void mali_allocation_ref(struct mali_mem_allocation *alloc) in mali_allocation_ref() argument 146 mali_osk_atomic_inc(&alloc->mem_alloc_refcount); in mali_allocation_ref()
|
/device/soc/rockchip/common/vendor/drivers/gpu/arm/midgard/ |
H A D | mali_kbase_mem.h | 67 struct kbase_mem_phy_alloc *alloc; member 85 * but with alloc instead of a gpu va (handle) */ 87 struct kbase_mem_phy_alloc *alloc; /* NULL for special, non-NULL for native */ member 107 struct kref kref; /* number of users of this alloc */ 175 static inline void kbase_mem_phy_alloc_gpu_mapped(struct kbase_mem_phy_alloc *alloc) in kbase_mem_phy_alloc_gpu_mapped() argument 177 KBASE_DEBUG_ASSERT(alloc); in kbase_mem_phy_alloc_gpu_mapped() 179 if (alloc->type == KBASE_MEM_TYPE_NATIVE) { in kbase_mem_phy_alloc_gpu_mapped() 180 atomic_inc(&alloc->gpu_mappings); in kbase_mem_phy_alloc_gpu_mapped() 184 static inline void kbase_mem_phy_alloc_gpu_unmapped(struct kbase_mem_phy_alloc *alloc) in kbase_mem_phy_alloc_gpu_unmapped() argument 186 KBASE_DEBUG_ASSERT(alloc); in kbase_mem_phy_alloc_gpu_unmapped() 202 kbase_mem_phy_alloc_get(struct kbase_mem_phy_alloc *alloc) kbase_mem_phy_alloc_get() argument 208 kbase_mem_phy_alloc_put(struct kbase_mem_phy_alloc *alloc) kbase_mem_phy_alloc_put() argument 351 struct kbase_mem_phy_alloc *alloc; kbase_alloc_create() local [all...] |
H A D | mali_kbase_mem.c | 779 new_reg->cpu_alloc = NULL; /* no alloc bound yet */
in kbase_alloc_free_region() 780 new_reg->gpu_alloc = NULL; /* no alloc bound yet */
in kbase_alloc_free_region() 800 * alloc object will be released.
801 * It is a bug if no alloc object exists for non-free regions.
878 struct kbase_mem_phy_alloc *alloc;
in kbase_gpu_mmap() local 880 alloc = reg->gpu_alloc;
in kbase_gpu_mmap() 881 stride = alloc->imported.alias.stride;
in kbase_gpu_mmap() 882 KBASE_DEBUG_ASSERT(alloc->imported.alias.aliased);
in kbase_gpu_mmap() 883 for (i = 0; i < alloc->imported.alias.nents; i++) {
in kbase_gpu_mmap() 884 if (alloc in kbase_gpu_mmap() 1371 kbase_alloc_phy_pages_helper(struct kbase_mem_phy_alloc *alloc, size_t nr_pages_requested) kbase_alloc_phy_pages_helper() argument 1408 kbase_free_phy_pages_helper(struct kbase_mem_phy_alloc *alloc, size_t nr_pages_to_free) kbase_free_phy_pages_helper() argument 1450 struct kbase_mem_phy_alloc *alloc; kbase_mem_kref_free() local 2107 struct kbase_mem_phy_alloc *alloc; kbase_jd_user_buf_map() local 2194 kbase_jd_user_buf_unmap(struct kbase_context *kctx, struct kbase_mem_phy_alloc *alloc, bool writeable) kbase_jd_user_buf_unmap() argument 2231 struct kbase_mem_phy_alloc *alloc; kbase_jd_umm_map() local 2301 kbase_jd_umm_unmap(struct kbase_context *kctx, struct kbase_mem_phy_alloc *alloc) kbase_jd_umm_unmap() argument 2409 kbase_unmap_external_resource(struct kbase_context *kctx, struct kbase_va_region *reg, struct kbase_mem_phy_alloc *alloc) kbase_unmap_external_resource() argument [all...] |
/device/soc/rockchip/common/kernel/drivers/gpu/arm/midgard/ |
H A D | mali_kbase_mem.h | 69 struct kbase_mem_phy_alloc *alloc; member 87 * but with alloc instead of a gpu va (handle) */ 89 struct kbase_mem_phy_alloc *alloc; /* NULL for special, non-NULL for native */ member 109 struct kref kref; /* number of users of this alloc */ 177 static inline void kbase_mem_phy_alloc_gpu_mapped(struct kbase_mem_phy_alloc *alloc) in kbase_mem_phy_alloc_gpu_mapped() argument 179 KBASE_DEBUG_ASSERT(alloc); in kbase_mem_phy_alloc_gpu_mapped() 181 if (alloc->type == KBASE_MEM_TYPE_NATIVE) in kbase_mem_phy_alloc_gpu_mapped() 182 atomic_inc(&alloc->gpu_mappings); in kbase_mem_phy_alloc_gpu_mapped() 185 static inline void kbase_mem_phy_alloc_gpu_unmapped(struct kbase_mem_phy_alloc *alloc) in kbase_mem_phy_alloc_gpu_unmapped() argument 187 KBASE_DEBUG_ASSERT(alloc); in kbase_mem_phy_alloc_gpu_unmapped() 202 kbase_mem_phy_alloc_get(struct kbase_mem_phy_alloc *alloc) kbase_mem_phy_alloc_get() argument 208 kbase_mem_phy_alloc_put(struct kbase_mem_phy_alloc *alloc) kbase_mem_phy_alloc_put() argument 349 struct kbase_mem_phy_alloc *alloc; kbase_alloc_create() local [all...] |
H A D | mali_kbase_mem.c | 807 new_reg->cpu_alloc = NULL; /* no alloc bound yet */ in kbase_alloc_free_region() 808 new_reg->gpu_alloc = NULL; /* no alloc bound yet */ in kbase_alloc_free_region() 828 * alloc object will be released. 829 * It is a bug if no alloc object exists for non-free regions. 906 struct kbase_mem_phy_alloc *alloc; in kbase_gpu_mmap() local 908 alloc = reg->gpu_alloc; in kbase_gpu_mmap() 909 stride = alloc->imported.alias.stride; in kbase_gpu_mmap() 910 KBASE_DEBUG_ASSERT(alloc->imported.alias.aliased); in kbase_gpu_mmap() 911 for (i = 0; i < alloc->imported.alias.nents; i++) { in kbase_gpu_mmap() 912 if (alloc in kbase_gpu_mmap() 1410 kbase_alloc_phy_pages_helper( struct kbase_mem_phy_alloc *alloc, size_t nr_pages_requested) kbase_alloc_phy_pages_helper() argument 1451 kbase_free_phy_pages_helper( struct kbase_mem_phy_alloc *alloc, size_t nr_pages_to_free) kbase_free_phy_pages_helper() argument 1502 struct kbase_mem_phy_alloc *alloc; kbase_mem_kref_free() local 2145 struct kbase_mem_phy_alloc *alloc; kbase_jd_user_buf_map() local 2250 kbase_jd_user_buf_unmap(struct kbase_context *kctx, struct kbase_mem_phy_alloc *alloc, bool writeable) kbase_jd_user_buf_unmap() argument 2290 struct kbase_mem_phy_alloc *alloc; kbase_jd_umm_map() local 2369 kbase_jd_umm_unmap(struct kbase_context *kctx, struct kbase_mem_phy_alloc *alloc) kbase_jd_umm_unmap() argument 2480 kbase_unmap_external_resource(struct kbase_context *kctx, struct kbase_va_region *reg, struct kbase_mem_phy_alloc *alloc) kbase_unmap_external_resource() argument [all...] |
/device/soc/rockchip/common/kernel/drivers/gpu/arm/bifrost/ |
H A D | mali_kbase_mem.h | 68 struct kbase_mem_phy_alloc *alloc; member 84 * but with alloc instead of a gpu va (handle) 87 struct kbase_mem_phy_alloc *alloc; /* NULL for special, non-NULL for native */ member 106 * @kref: number of users of this alloc 122 * @permanent_map: Kernel side mapping of the alloc, shall never be 207 static inline void kbase_mem_phy_alloc_gpu_mapped(struct kbase_mem_phy_alloc *alloc) in kbase_mem_phy_alloc_gpu_mapped() argument 209 KBASE_DEBUG_ASSERT(alloc); in kbase_mem_phy_alloc_gpu_mapped() 211 if (alloc->type == KBASE_MEM_TYPE_NATIVE) in kbase_mem_phy_alloc_gpu_mapped() 212 atomic_inc(&alloc->gpu_mappings); in kbase_mem_phy_alloc_gpu_mapped() 215 static inline void kbase_mem_phy_alloc_gpu_unmapped(struct kbase_mem_phy_alloc *alloc) in kbase_mem_phy_alloc_gpu_unmapped() argument 233 kbase_mem_phy_alloc_kernel_mapped(struct kbase_mem_phy_alloc *alloc) kbase_mem_phy_alloc_kernel_mapped() argument 245 kbase_mem_phy_alloc_kernel_unmapped(struct kbase_mem_phy_alloc *alloc) kbase_mem_phy_alloc_kernel_unmapped() argument 275 kbase_mem_phy_alloc_put(struct kbase_mem_phy_alloc *alloc) kbase_mem_phy_alloc_put() argument 641 struct kbase_mem_phy_alloc *alloc; kbase_alloc_create() local [all...] |
H A D | mali_kbase_mem.c | 457 "Could not alloc a replacement free region for 0x%.16llx..0x%.16llx", in kbase_remove_va_region() 1364 new_reg->cpu_alloc = NULL; /* no alloc bound yet */ in kbase_alloc_free_region() 1365 new_reg->gpu_alloc = NULL; /* no alloc bound yet */ in kbase_alloc_free_region() 1416 * alloc object will be released. 1417 * It is a bug if no alloc object exists for non-free regions. 1512 struct kbase_mem_phy_alloc *alloc; in kbase_gpu_mmap() local 1532 alloc = reg->gpu_alloc; in kbase_gpu_mmap() 1533 group_id = alloc->group_id; in kbase_gpu_mmap() 1536 u64 const stride = alloc->imported.alias.stride; in kbase_gpu_mmap() 1538 KBASE_DEBUG_ASSERT(alloc in kbase_gpu_mmap() 1636 struct kbase_mem_phy_alloc *alloc = reg->gpu_alloc; kbase_gpu_munmap() local 2202 kbase_alloc_phy_pages_helper(struct kbase_mem_phy_alloc *alloc, size_t nr_pages_requested) kbase_alloc_phy_pages_helper() argument 2394 kbase_alloc_phy_pages_helper_locked( struct kbase_mem_phy_alloc *alloc, struct kbase_mem_pool *pool, size_t nr_pages_requested, struct kbase_sub_alloc **prealloc_sa) kbase_alloc_phy_pages_helper_locked() argument 2620 kbase_free_phy_pages_helper( struct kbase_mem_phy_alloc *alloc, size_t nr_pages_to_free) kbase_free_phy_pages_helper() argument 2742 kbase_free_phy_pages_helper_locked(struct kbase_mem_phy_alloc *alloc, struct kbase_mem_pool *pool, struct tagged_addr *pages, size_t nr_pages_to_free) kbase_free_phy_pages_helper_locked() argument 2851 struct kbase_mem_phy_alloc *alloc; kbase_mem_kref_free() local 4540 kbase_jd_user_buf_unpin_pages(struct kbase_mem_phy_alloc *alloc) kbase_jd_user_buf_unpin_pages() argument 4557 struct kbase_mem_phy_alloc *alloc = reg->gpu_alloc; kbase_jd_user_buf_pin_pages() local 4632 struct kbase_mem_phy_alloc *alloc; kbase_jd_user_buf_map() local 4711 kbase_jd_user_buf_unmap(struct kbase_context *kctx, struct kbase_mem_phy_alloc *alloc, bool writeable) kbase_jd_user_buf_unmap() argument 4822 kbase_unmap_external_resource(struct kbase_context *kctx, struct kbase_va_region *reg, struct kbase_mem_phy_alloc *alloc) kbase_unmap_external_resource() argument [all...] |
H A D | mali_kbase_trace_gpu_mem.c | 163 struct kbase_mem_phy_alloc *alloc) in kbase_remove_dma_buf_usage() 171 kctx, alloc->imported.umm.dma_buf, &kbdev->dma_buf_root); in kbase_remove_dma_buf_usage() 174 kctx, alloc->imported.umm.dma_buf, &kctx->kprcs->dma_buf_root); in kbase_remove_dma_buf_usage() 180 kbdev->total_gpu_pages -= alloc->nents; in kbase_remove_dma_buf_usage() 183 kctx->kprcs->total_gpu_pages -= alloc->nents; in kbase_remove_dma_buf_usage() 193 struct kbase_mem_phy_alloc *alloc) in kbase_add_dma_buf_usage() 202 kctx, alloc->imported.umm.dma_buf, &kbdev->dma_buf_root); in kbase_add_dma_buf_usage() 205 kctx, alloc->imported.umm.dma_buf, &kctx->kprcs->dma_buf_root); in kbase_add_dma_buf_usage() 211 kbdev->total_gpu_pages += alloc->nents; in kbase_add_dma_buf_usage() 214 kctx->kprcs->total_gpu_pages += alloc in kbase_add_dma_buf_usage() 162 kbase_remove_dma_buf_usage(struct kbase_context *kctx, struct kbase_mem_phy_alloc *alloc) kbase_remove_dma_buf_usage() argument 192 kbase_add_dma_buf_usage(struct kbase_context *kctx, struct kbase_mem_phy_alloc *alloc) kbase_add_dma_buf_usage() argument [all...] |
H A D | mali_kbase_mem_linux.c | 217 struct kbase_mem_phy_alloc *alloc) in kbase_phy_alloc_mapping_term() 219 WARN_ON(!alloc->permanent_map); in kbase_phy_alloc_mapping_term() 220 kbase_vunmap_phy_pages(kctx, alloc->permanent_map); in kbase_phy_alloc_mapping_term() 221 kfree(alloc->permanent_map); in kbase_phy_alloc_mapping_term() 223 alloc->permanent_map = NULL; in kbase_phy_alloc_mapping_term() 229 WARN_ON(alloc->nents > atomic_read(&kctx->permanent_mapped_pages)); in kbase_phy_alloc_mapping_term() 230 atomic_sub(alloc->nents, &kctx->permanent_mapped_pages); in kbase_phy_alloc_mapping_term() 678 struct kbase_mem_phy_alloc *alloc; in kbase_mem_evictable_reclaim_scan_objects() local 686 list_for_each_entry_safe(alloc, tmp, &kctx->evict_list, evict_node) { in kbase_mem_evictable_reclaim_scan_objects() 689 err = kbase_mem_shrink_gpu_mapping(kctx, alloc in kbase_mem_evictable_reclaim_scan_objects() 216 kbase_phy_alloc_mapping_term(struct kbase_context *kctx, struct kbase_mem_phy_alloc *alloc) kbase_phy_alloc_mapping_term() argument 756 kbase_mem_evictable_mark_reclaim(struct kbase_mem_phy_alloc *alloc) kbase_mem_evictable_mark_reclaim() argument 779 kbase_mem_evictable_unmark_reclaim(struct kbase_mem_phy_alloc *alloc) kbase_mem_evictable_unmark_reclaim() argument 1130 kbase_mem_umm_unmap_attachment(struct kbase_context *kctx, struct kbase_mem_phy_alloc *alloc) kbase_mem_umm_unmap_attachment() argument 1167 struct kbase_mem_phy_alloc *alloc = reg->gpu_alloc; kbase_mem_umm_map_attachment() local 1225 struct kbase_mem_phy_alloc *alloc; kbase_mem_umm_map() local 1301 kbase_mem_umm_unmap(struct kbase_context *kctx, struct kbase_va_region *reg, struct kbase_mem_phy_alloc *alloc) kbase_mem_umm_unmap() argument 1826 struct kbase_mem_phy_alloc *alloc; global() local [all...] |
/device/soc/rockchip/common/vendor/drivers/gpu/arm/bifrost/ |
H A D | mali_kbase_mem.h | 67 struct kbase_mem_phy_alloc *alloc; member 83 * but with alloc instead of a gpu va (handle) */ 85 struct kbase_mem_phy_alloc *alloc; /* NULL for special, non-NULL for native */ member 106 * @kref: number of users of this alloc 116 * @permanent_map: Kernel side mapping of the alloc, shall never be 198 static inline void kbase_mem_phy_alloc_gpu_mapped(struct kbase_mem_phy_alloc *alloc) in kbase_mem_phy_alloc_gpu_mapped() argument 200 KBASE_DEBUG_ASSERT(alloc); in kbase_mem_phy_alloc_gpu_mapped() 202 if (alloc->type == KBASE_MEM_TYPE_NATIVE) { in kbase_mem_phy_alloc_gpu_mapped() 203 atomic_inc(&alloc->gpu_mappings); in kbase_mem_phy_alloc_gpu_mapped() 207 static inline void kbase_mem_phy_alloc_gpu_unmapped(struct kbase_mem_phy_alloc *alloc) in kbase_mem_phy_alloc_gpu_unmapped() argument 237 kbase_mem_phy_alloc_get(struct kbase_mem_phy_alloc *alloc) kbase_mem_phy_alloc_get() argument 243 kbase_mem_phy_alloc_put(struct kbase_mem_phy_alloc *alloc) kbase_mem_phy_alloc_put() argument 561 struct kbase_mem_phy_alloc *alloc; kbase_alloc_create() local [all...] |
H A D | mali_kbase_mem.c | 1094 new_reg->cpu_alloc = NULL; /* no alloc bound yet */ in kbase_alloc_free_region() 1095 new_reg->gpu_alloc = NULL; /* no alloc bound yet */ in kbase_alloc_free_region() 1141 * alloc object will be released. 1142 * It is a bug if no alloc object exists for non-free regions. 1235 struct kbase_mem_phy_alloc *alloc; in kbase_gpu_mmap() local 1257 alloc = reg->gpu_alloc; in kbase_gpu_mmap() 1258 group_id = alloc->group_id; in kbase_gpu_mmap() 1261 u64 const stride = alloc->imported.alias.stride; in kbase_gpu_mmap() 1263 KBASE_DEBUG_ASSERT(alloc->imported.alias.aliased); in kbase_gpu_mmap() 1264 for (i = 0; i < alloc in kbase_gpu_mmap() 1882 kbase_alloc_phy_pages_helper(struct kbase_mem_phy_alloc *alloc, size_t nr_pages_requested) kbase_alloc_phy_pages_helper() argument 2052 kbase_alloc_phy_pages_helper_locked(struct kbase_mem_phy_alloc *alloc, struct kbase_mem_pool *pool, size_t nr_pages_requested, struct kbase_sub_alloc **prealloc_sa) kbase_alloc_phy_pages_helper_locked() argument 2252 kbase_free_phy_pages_helper(struct kbase_mem_phy_alloc *alloc, size_t nr_pages_to_free) kbase_free_phy_pages_helper() argument 2352 kbase_free_phy_pages_helper_locked(struct kbase_mem_phy_alloc *alloc, struct kbase_mem_pool *pool, struct tagged_addr *pages, size_t nr_pages_to_free) kbase_free_phy_pages_helper_locked() argument 2444 struct kbase_mem_phy_alloc *alloc; kbase_mem_kref_free() local 4070 kbase_jd_user_buf_unpin_pages(struct kbase_mem_phy_alloc *alloc) kbase_jd_user_buf_unpin_pages() argument 4087 struct kbase_mem_phy_alloc *alloc = reg->gpu_alloc; kbase_jd_user_buf_pin_pages() local 4149 struct kbase_mem_phy_alloc *alloc; kbase_jd_user_buf_map() local 4220 kbase_jd_user_buf_unmap(struct kbase_context *kctx, struct kbase_mem_phy_alloc *alloc, bool writeable) kbase_jd_user_buf_unmap() argument 4329 kbase_unmap_external_resource(struct kbase_context *kctx, struct kbase_va_region *reg, struct kbase_mem_phy_alloc *alloc) kbase_unmap_external_resource() argument [all...] |
H A D | mali_kbase_trace_gpu_mem.c | 160 void kbase_remove_dma_buf_usage(struct kbase_context *kctx, struct kbase_mem_phy_alloc *alloc) in kbase_remove_dma_buf_usage() argument 167 dev_mapping_removed = kbase_delete_dma_buf_mapping(kctx, alloc->imported.umm.dma_buf, &kbdev->dma_buf_root); in kbase_remove_dma_buf_usage() 169 prcs_mapping_removed = kbase_delete_dma_buf_mapping(kctx, alloc->imported.umm.dma_buf, &kctx->kprcs->dma_buf_root); in kbase_remove_dma_buf_usage() 175 kbdev->total_gpu_pages -= alloc->nents; in kbase_remove_dma_buf_usage() 179 kctx->kprcs->total_gpu_pages -= alloc->nents; in kbase_remove_dma_buf_usage() 190 void kbase_add_dma_buf_usage(struct kbase_context *kctx, struct kbase_mem_phy_alloc *alloc) in kbase_add_dma_buf_usage() argument 198 unique_dev_dmabuf = kbase_capture_dma_buf_mapping(kctx, alloc->imported.umm.dma_buf, &kbdev->dma_buf_root); in kbase_add_dma_buf_usage() 200 unique_prcs_dmabuf = kbase_capture_dma_buf_mapping(kctx, alloc->imported.umm.dma_buf, &kctx->kprcs->dma_buf_root); in kbase_add_dma_buf_usage() 206 kbdev->total_gpu_pages += alloc->nents; in kbase_add_dma_buf_usage() 210 kctx->kprcs->total_gpu_pages += alloc in kbase_add_dma_buf_usage() [all...] |
H A D | mali_kbase_mem_linux.c | 229 struct kbase_mem_phy_alloc *alloc) in kbase_phy_alloc_mapping_term() 231 WARN_ON(!alloc->permanent_map); in kbase_phy_alloc_mapping_term() 232 kbase_vunmap_phy_pages(kctx, alloc->permanent_map); in kbase_phy_alloc_mapping_term() 233 kfree(alloc->permanent_map); in kbase_phy_alloc_mapping_term() 235 alloc->permanent_map = NULL; in kbase_phy_alloc_mapping_term() 241 WARN_ON(alloc->nents > atomic_read(&kctx->permanent_mapped_pages)); in kbase_phy_alloc_mapping_term() 242 atomic_sub(alloc->nents, &kctx->permanent_mapped_pages); in kbase_phy_alloc_mapping_term() 687 struct kbase_mem_phy_alloc *alloc; in kbase_mem_evictable_reclaim_count_objects() local 694 list_for_each_entry(alloc, &kctx->evict_list, evict_node) pages += in kbase_mem_evictable_reclaim_count_objects() 695 alloc in kbase_mem_evictable_reclaim_count_objects() 228 kbase_phy_alloc_mapping_term(struct kbase_context *kctx, struct kbase_mem_phy_alloc *alloc) kbase_phy_alloc_mapping_term() argument 723 struct kbase_mem_phy_alloc *alloc; kbase_mem_evictable_reclaim_scan_objects() local 816 kbase_mem_evictable_mark_reclaim(struct kbase_mem_phy_alloc *alloc) kbase_mem_evictable_mark_reclaim() argument 835 kbase_mem_evictable_unmark_reclaim(struct kbase_mem_phy_alloc *alloc) kbase_mem_evictable_unmark_reclaim() argument 1181 kbase_mem_umm_unmap_attachment(struct kbase_context *kctx, struct kbase_mem_phy_alloc *alloc) kbase_mem_umm_unmap_attachment() argument 1218 struct kbase_mem_phy_alloc *alloc = reg->gpu_alloc; kbase_mem_umm_map_attachment() local 1277 struct kbase_mem_phy_alloc *alloc; kbase_mem_umm_map() local 1348 kbase_mem_umm_unmap(struct kbase_context *kctx, struct kbase_va_region *reg, struct kbase_mem_phy_alloc *alloc) kbase_mem_umm_unmap() argument 1883 struct kbase_mem_phy_alloc *alloc; global() local [all...] |
/device/soc/hisilicon/hi3861v100/sdk_liteos/boot/flashboot/lzmaram/ |
H A D | lzmaram.c | 131 i_sz_alloc alloc = { 0 }; in hi_lzma_decompress() local 139 alloc.alloc = lzma_alloc; in hi_lzma_decompress() 140 alloc.free = lzma_free; in hi_lzma_decompress() 146 ret = LzmaDecode2(lzma_head, 5, &lzma_stat, &alloc, &in_stream, /* size 5 */ in hi_lzma_decompress()
|