Home
last modified time | relevance | path

Searched refs:kbase_mem_phy_alloc (Results 1 - 25 of 39) sorted by relevance

12

/device/soc/rockchip/common/vendor/drivers/gpu/arm/midgard/
H A Dmali_kbase_mem.h67 struct kbase_mem_phy_alloc *alloc;
87 struct kbase_mem_phy_alloc *alloc; /* NULL for special, non-NULL for native */
102 * Changing of nents or *pages should only happen if the kbase_mem_phy_alloc is not
106 struct kbase_mem_phy_alloc { struct
175 static inline void kbase_mem_phy_alloc_gpu_mapped(struct kbase_mem_phy_alloc *alloc) in kbase_mem_phy_alloc_gpu_mapped()
184 static inline void kbase_mem_phy_alloc_gpu_unmapped(struct kbase_mem_phy_alloc *alloc) in kbase_mem_phy_alloc_gpu_unmapped()
202 static inline struct kbase_mem_phy_alloc *kbase_mem_phy_alloc_get(struct kbase_mem_phy_alloc *alloc) in kbase_mem_phy_alloc_get()
208 static inline struct kbase_mem_phy_alloc *kbase_mem_phy_alloc_put(struct kbase_mem_phy_alloc *allo
[all...]
H A Dmali_kbase_mem_linux.h104 int kbase_mem_evictable_make(struct kbase_mem_phy_alloc *gpu_alloc);
120 bool kbase_mem_evictable_unmake(struct kbase_mem_phy_alloc *alloc);
124 struct kbase_mem_phy_alloc *cpu_alloc;
125 struct kbase_mem_phy_alloc *gpu_alloc;
H A Dmali_kbase_mem.c878 struct kbase_mem_phy_alloc *alloc; in kbase_gpu_mmap()
936 static void kbase_jd_user_buf_unmap(struct kbase_context *kctx, struct kbase_mem_phy_alloc *alloc, bool writeable);
1371 int kbase_alloc_phy_pages_helper(struct kbase_mem_phy_alloc *alloc, size_t nr_pages_requested) in kbase_alloc_phy_pages_helper()
1408 int kbase_free_phy_pages_helper(struct kbase_mem_phy_alloc *alloc, size_t nr_pages_to_free) in kbase_free_phy_pages_helper()
1450 struct kbase_mem_phy_alloc *alloc; in kbase_mem_kref_free()
1452 alloc = container_of(kref, struct kbase_mem_phy_alloc, kref); in kbase_mem_kref_free()
2107 struct kbase_mem_phy_alloc *alloc; in kbase_jd_user_buf_map()
2194 static void kbase_jd_user_buf_unmap(struct kbase_context *kctx, struct kbase_mem_phy_alloc *alloc, bool writeable) in kbase_jd_user_buf_unmap()
2231 struct kbase_mem_phy_alloc *alloc; in kbase_jd_umm_map()
2301 static void kbase_jd_umm_unmap(struct kbase_context *kctx, struct kbase_mem_phy_alloc *allo
[all...]
H A Dmali_kbase_softjobs.c483 struct kbase_mem_phy_alloc *gpu_alloc;
521 struct kbase_mem_phy_alloc *gpu_alloc = buffers[i].gpu_alloc; in kbase_debug_copy_finish()
647 struct kbase_mem_phy_alloc *alloc = reg->gpu_alloc; in kbase_debug_copy_prepare()
739 static void *dma_buf_kmap_page(struct kbase_mem_phy_alloc *gpu_alloc, unsigned long page_num, struct page **page) in dma_buf_kmap_page()
779 struct kbase_mem_phy_alloc *gpu_alloc = buf_data->gpu_alloc; in kbase_mem_copy_from_extres()
/device/soc/rockchip/common/kernel/drivers/gpu/arm/midgard/
H A Dmali_kbase_mem.h69 struct kbase_mem_phy_alloc *alloc;
89 struct kbase_mem_phy_alloc *alloc; /* NULL for special, non-NULL for native */
104 * Changing of nents or *pages should only happen if the kbase_mem_phy_alloc is not
108 struct kbase_mem_phy_alloc { struct
177 static inline void kbase_mem_phy_alloc_gpu_mapped(struct kbase_mem_phy_alloc *alloc) in kbase_mem_phy_alloc_gpu_mapped()
185 static inline void kbase_mem_phy_alloc_gpu_unmapped(struct kbase_mem_phy_alloc *alloc) in kbase_mem_phy_alloc_gpu_unmapped()
202 static inline struct kbase_mem_phy_alloc *kbase_mem_phy_alloc_get(struct kbase_mem_phy_alloc *alloc) in kbase_mem_phy_alloc_get()
208 static inline struct kbase_mem_phy_alloc *kbase_mem_phy_alloc_put(struct kbase_mem_phy_alloc *allo
[all...]
H A Dmali_kbase_mem_linux.h106 int kbase_mem_evictable_make(struct kbase_mem_phy_alloc *gpu_alloc);
122 bool kbase_mem_evictable_unmake(struct kbase_mem_phy_alloc *alloc);
126 struct kbase_mem_phy_alloc *cpu_alloc;
127 struct kbase_mem_phy_alloc *gpu_alloc;
H A Dmali_kbase_mem.c906 struct kbase_mem_phy_alloc *alloc; in kbase_gpu_mmap()
966 struct kbase_mem_phy_alloc *alloc, bool writeable);
1411 struct kbase_mem_phy_alloc *alloc, in kbase_alloc_phy_pages_helper()
1452 struct kbase_mem_phy_alloc *alloc, in kbase_free_phy_pages_helper()
1502 struct kbase_mem_phy_alloc *alloc; in kbase_mem_kref_free()
1504 alloc = container_of(kref, struct kbase_mem_phy_alloc, kref); in kbase_mem_kref_free()
2145 struct kbase_mem_phy_alloc *alloc; in kbase_jd_user_buf_map()
2251 struct kbase_mem_phy_alloc *alloc, bool writeable) in kbase_jd_user_buf_unmap()
2290 struct kbase_mem_phy_alloc *alloc; in kbase_jd_umm_map()
2370 struct kbase_mem_phy_alloc *allo in kbase_jd_umm_unmap()
[all...]
H A Dmali_kbase_mem_linux.c341 struct kbase_mem_phy_alloc *alloc; in kbase_mem_evictable_reclaim_count_objects()
379 struct kbase_mem_phy_alloc *alloc; in kbase_mem_evictable_reclaim_scan_objects()
380 struct kbase_mem_phy_alloc *tmp; in kbase_mem_evictable_reclaim_scan_objects()
470 static void kbase_mem_evictable_mark_reclaim(struct kbase_mem_phy_alloc *alloc) in kbase_mem_evictable_mark_reclaim()
490 void kbase_mem_evictable_unmark_reclaim(struct kbase_mem_phy_alloc *alloc) in kbase_mem_evictable_unmark_reclaim()
509 int kbase_mem_evictable_make(struct kbase_mem_phy_alloc *gpu_alloc) in kbase_mem_evictable_make()
534 bool kbase_mem_evictable_unmake(struct kbase_mem_phy_alloc *gpu_alloc) in kbase_mem_evictable_unmake()
1194 struct kbase_mem_phy_alloc *alloc;
H A Dmali_kbase_softjobs.c499 struct kbase_mem_phy_alloc *gpu_alloc;
536 struct kbase_mem_phy_alloc *gpu_alloc = buffers[i].gpu_alloc; in kbase_debug_copy_finish()
666 struct kbase_mem_phy_alloc *alloc = reg->gpu_alloc; in kbase_debug_copy_prepare()
763 static void *dma_buf_kmap_page(struct kbase_mem_phy_alloc *gpu_alloc, in dma_buf_kmap_page()
801 struct kbase_mem_phy_alloc *gpu_alloc = buf_data->gpu_alloc; in kbase_mem_copy_from_extres()
/device/soc/rockchip/common/kernel/drivers/gpu/arm/bifrost/
H A Dmali_kbase_mem.h68 struct kbase_mem_phy_alloc *alloc;
87 struct kbase_mem_phy_alloc *alloc; /* NULL for special, non-NULL for native */
96 /* struct kbase_mem_phy_alloc - Physical pages tracking object.
101 * Changing of nents or *pages should only happen if the kbase_mem_phy_alloc
133 struct kbase_mem_phy_alloc { struct
207 static inline void kbase_mem_phy_alloc_gpu_mapped(struct kbase_mem_phy_alloc *alloc) in kbase_mem_phy_alloc_gpu_mapped()
215 static inline void kbase_mem_phy_alloc_gpu_unmapped(struct kbase_mem_phy_alloc *alloc) in kbase_mem_phy_alloc_gpu_unmapped()
233 kbase_mem_phy_alloc_kernel_mapped(struct kbase_mem_phy_alloc *alloc) in kbase_mem_phy_alloc_kernel_mapped()
245 kbase_mem_phy_alloc_kernel_unmapped(struct kbase_mem_phy_alloc *alloc) in kbase_mem_phy_alloc_kernel_unmapped()
269 static inline struct kbase_mem_phy_alloc *kbase_mem_phy_alloc_ge
[all...]
H A Dmali_kbase_mem_linux.h202 int kbase_mem_evictable_make(struct kbase_mem_phy_alloc *gpu_alloc);
218 bool kbase_mem_evictable_unmake(struct kbase_mem_phy_alloc *alloc);
222 struct kbase_mem_phy_alloc *cpu_alloc;
223 struct kbase_mem_phy_alloc *gpu_alloc;
358 struct kbase_mem_phy_alloc *alloc);
H A Dmali_kbase_trace_gpu_mem.h86 struct kbase_mem_phy_alloc *alloc);
98 struct kbase_mem_phy_alloc *alloc);
H A Dmali_kbase_trace_gpu_mem.c163 struct kbase_mem_phy_alloc *alloc) in kbase_remove_dma_buf_usage()
193 struct kbase_mem_phy_alloc *alloc) in kbase_add_dma_buf_usage()
H A Dmali_kbase_mem_linux.c217 struct kbase_mem_phy_alloc *alloc) in kbase_phy_alloc_mapping_term()
678 struct kbase_mem_phy_alloc *alloc; in kbase_mem_evictable_reclaim_scan_objects()
679 struct kbase_mem_phy_alloc *tmp; in kbase_mem_evictable_reclaim_scan_objects()
756 void kbase_mem_evictable_mark_reclaim(struct kbase_mem_phy_alloc *alloc) in kbase_mem_evictable_mark_reclaim()
779 void kbase_mem_evictable_unmark_reclaim(struct kbase_mem_phy_alloc *alloc) in kbase_mem_evictable_unmark_reclaim()
801 int kbase_mem_evictable_make(struct kbase_mem_phy_alloc *gpu_alloc) in kbase_mem_evictable_make()
827 bool kbase_mem_evictable_unmake(struct kbase_mem_phy_alloc *gpu_alloc) in kbase_mem_evictable_unmake()
1131 struct kbase_mem_phy_alloc *alloc) in kbase_mem_umm_unmap_attachment()
1167 struct kbase_mem_phy_alloc *alloc = reg->gpu_alloc; in kbase_mem_umm_map_attachment()
1225 struct kbase_mem_phy_alloc *allo in kbase_mem_umm_map()
[all...]
H A Dmali_kbase_mem.c1512 struct kbase_mem_phy_alloc *alloc; in kbase_gpu_mmap()
1620 struct kbase_mem_phy_alloc *alloc, bool writeable);
1636 struct kbase_mem_phy_alloc *alloc = reg->gpu_alloc; in kbase_gpu_munmap()
2202 int kbase_alloc_phy_pages_helper(struct kbase_mem_phy_alloc *alloc, in kbase_alloc_phy_pages_helper()
2395 struct kbase_mem_phy_alloc *alloc, struct kbase_mem_pool *pool, in kbase_alloc_phy_pages_helper_locked()
2621 struct kbase_mem_phy_alloc *alloc, in kbase_free_phy_pages_helper()
2742 void kbase_free_phy_pages_helper_locked(struct kbase_mem_phy_alloc *alloc, in kbase_free_phy_pages_helper_locked()
2846 static void kbase_jd_user_buf_unpin_pages(struct kbase_mem_phy_alloc *alloc);
2851 struct kbase_mem_phy_alloc *alloc; in kbase_mem_kref_free()
2853 alloc = container_of(kref, struct kbase_mem_phy_alloc, kre in kbase_mem_kref_free()
[all...]
/device/soc/rockchip/common/vendor/drivers/gpu/arm/bifrost/
H A Dmali_kbase_mem_linux.h203 int kbase_mem_evictable_make(struct kbase_mem_phy_alloc *gpu_alloc);
219 bool kbase_mem_evictable_unmake(struct kbase_mem_phy_alloc *alloc);
223 struct kbase_mem_phy_alloc *cpu_alloc;
224 struct kbase_mem_phy_alloc *gpu_alloc;
360 struct kbase_mem_phy_alloc *alloc);
H A Dmali_kbase_mem.h67 struct kbase_mem_phy_alloc *alloc;
85 struct kbase_mem_phy_alloc *alloc; /* NULL for special, non-NULL for native */
96 /* struct kbase_mem_phy_alloc - Physical pages tracking object.
101 * Changing of nents or *pages should only happen if the kbase_mem_phy_alloc
127 struct kbase_mem_phy_alloc { struct
198 static inline void kbase_mem_phy_alloc_gpu_mapped(struct kbase_mem_phy_alloc *alloc) in kbase_mem_phy_alloc_gpu_mapped()
207 static inline void kbase_mem_phy_alloc_gpu_unmapped(struct kbase_mem_phy_alloc *alloc) in kbase_mem_phy_alloc_gpu_unmapped()
237 static inline struct kbase_mem_phy_alloc *kbase_mem_phy_alloc_get(struct kbase_mem_phy_alloc *alloc) in kbase_mem_phy_alloc_get()
243 static inline struct kbase_mem_phy_alloc *kbase_mem_phy_alloc_pu
[all...]
H A Dmali_kbase_trace_gpu_mem.h86 void kbase_remove_dma_buf_usage(struct kbase_context *kctx, struct kbase_mem_phy_alloc *alloc);
97 void kbase_add_dma_buf_usage(struct kbase_context *kctx, struct kbase_mem_phy_alloc *alloc);
H A Dmali_kbase_trace_gpu_mem.c160 void kbase_remove_dma_buf_usage(struct kbase_context *kctx, struct kbase_mem_phy_alloc *alloc) in kbase_remove_dma_buf_usage()
190 void kbase_add_dma_buf_usage(struct kbase_context *kctx, struct kbase_mem_phy_alloc *alloc) in kbase_add_dma_buf_usage()
H A Dmali_kbase_mem_linux.c229 struct kbase_mem_phy_alloc *alloc) in kbase_phy_alloc_mapping_term()
687 struct kbase_mem_phy_alloc *alloc; in kbase_mem_evictable_reclaim_count_objects()
723 struct kbase_mem_phy_alloc *alloc; in kbase_mem_evictable_reclaim_scan_objects()
724 struct kbase_mem_phy_alloc *tmp; in kbase_mem_evictable_reclaim_scan_objects()
816 void kbase_mem_evictable_mark_reclaim(struct kbase_mem_phy_alloc *alloc) in kbase_mem_evictable_mark_reclaim()
835 kbase_mem_evictable_unmark_reclaim(struct kbase_mem_phy_alloc *alloc) in kbase_mem_evictable_unmark_reclaim()
853 int kbase_mem_evictable_make(struct kbase_mem_phy_alloc *gpu_alloc) in kbase_mem_evictable_make()
877 bool kbase_mem_evictable_unmake(struct kbase_mem_phy_alloc *gpu_alloc) in kbase_mem_evictable_unmake()
1182 struct kbase_mem_phy_alloc *alloc) in kbase_mem_umm_unmap_attachment()
1218 struct kbase_mem_phy_alloc *allo in kbase_mem_umm_map_attachment()
[all...]
H A Dmali_kbase_mem.c1235 struct kbase_mem_phy_alloc *alloc; in kbase_gpu_mmap()
1335 static void kbase_jd_user_buf_unmap(struct kbase_context *kctx, struct kbase_mem_phy_alloc *alloc, bool writeable);
1882 int kbase_alloc_phy_pages_helper(struct kbase_mem_phy_alloc *alloc, size_t nr_pages_requested) in kbase_alloc_phy_pages_helper()
2052 struct tagged_addr *kbase_alloc_phy_pages_helper_locked(struct kbase_mem_phy_alloc *alloc, struct kbase_mem_pool *pool, in kbase_alloc_phy_pages_helper_locked()
2252 int kbase_free_phy_pages_helper(struct kbase_mem_phy_alloc *alloc, size_t nr_pages_to_free) in kbase_free_phy_pages_helper()
2352 void kbase_free_phy_pages_helper_locked(struct kbase_mem_phy_alloc *alloc, struct kbase_mem_pool *pool, in kbase_free_phy_pages_helper_locked()
2439 static void kbase_jd_user_buf_unpin_pages(struct kbase_mem_phy_alloc *alloc);
2444 struct kbase_mem_phy_alloc *alloc; in kbase_mem_kref_free()
2446 alloc = container_of(kref, struct kbase_mem_phy_alloc, kref); in kbase_mem_kref_free()
4070 static void kbase_jd_user_buf_unpin_pages(struct kbase_mem_phy_alloc *allo
[all...]
H A Dmali_kbase_defs.h210 struct kbase_mem_phy_alloc *gpu_alloc;
1722 struct kbase_mem_phy_alloc *alloc;
/device/soc/rockchip/common/kernel/drivers/gpu/arm/bifrost/csf/
H A Dmali_kbase_csf_kcpu.h183 struct kbase_mem_phy_alloc *cpu_alloc;
/device/soc/rockchip/common/vendor/drivers/gpu/arm/bifrost/jm/
H A Dmali_kbase_jm_defs.h350 struct kbase_mem_phy_alloc *alloc;
/device/soc/rockchip/common/kernel/drivers/gpu/arm/bifrost/jm/
H A Dmali_kbase_jm_defs.h353 struct kbase_mem_phy_alloc *alloc;

Completed in 50 milliseconds

12