Home
last modified time | relevance | path

Searched refs:gpu_alloc (Results 1 - 25 of 41) sorted by relevance

12

/device/soc/rockchip/common/kernel/drivers/gpu/arm/midgard/
H A Dmali_kbase_mem_linux.c247 kbase_mem_phy_alloc_put(reg->gpu_alloc); in kbase_mem_alloc()
509 int kbase_mem_evictable_make(struct kbase_mem_phy_alloc *gpu_alloc) in kbase_mem_evictable_make() argument
511 struct kbase_context *kctx = gpu_alloc->imported.kctx; in kbase_mem_evictable_make()
516 WARN_ON(!list_empty(&gpu_alloc->evict_node)); in kbase_mem_evictable_make()
518 kbase_mem_shrink_cpu_mapping(kctx, gpu_alloc->reg, in kbase_mem_evictable_make()
519 0, gpu_alloc->nents); in kbase_mem_evictable_make()
526 list_add(&gpu_alloc->evict_node, &kctx->evict_list); in kbase_mem_evictable_make()
528 kbase_mem_evictable_mark_reclaim(gpu_alloc); in kbase_mem_evictable_make()
530 gpu_alloc->reg->flags |= KBASE_REG_DONT_NEED; in kbase_mem_evictable_make()
534 bool kbase_mem_evictable_unmake(struct kbase_mem_phy_alloc *gpu_alloc) in kbase_mem_evictable_unmake() argument
[all...]
H A Dmali_kbase_mem.c808 new_reg->gpu_alloc = NULL; /* no alloc bound yet */ in kbase_alloc_free_region()
842 if (!list_empty(&reg->gpu_alloc->evict_node)) { in kbase_free_alloced_region()
850 if (reg->cpu_alloc != reg->gpu_alloc) in kbase_free_alloced_region()
851 reg->gpu_alloc->reg = NULL; in kbase_free_alloced_region()
863 kbase_mem_evictable_unmake(reg->gpu_alloc); in kbase_free_alloced_region()
875 kbase_mem_phy_alloc_put(reg->gpu_alloc); in kbase_free_alloced_region()
904 if (reg->gpu_alloc->type == KBASE_MEM_TYPE_ALIAS) { in kbase_gpu_mmap()
908 alloc = reg->gpu_alloc; in kbase_gpu_mmap()
940 kbase_mem_phy_alloc_gpu_mapped(reg->gpu_alloc); in kbase_gpu_mmap()
946 if (reg->gpu_alloc in kbase_gpu_mmap()
[all...]
H A Dmali_kbase_mem.h301 struct kbase_mem_phy_alloc *gpu_alloc; /* the one alloc object we mmap to the GPU when mapping this region */ member
315 KBASE_DEBUG_ASSERT(reg->gpu_alloc); in kbase_get_cpu_phy_pages()
316 KBASE_DEBUG_ASSERT(reg->cpu_alloc->nents == reg->gpu_alloc->nents); in kbase_get_cpu_phy_pages()
325 KBASE_DEBUG_ASSERT(reg->gpu_alloc); in kbase_get_gpu_phy_pages()
326 KBASE_DEBUG_ASSERT(reg->cpu_alloc->nents == reg->gpu_alloc->nents); in kbase_get_gpu_phy_pages()
328 return reg->gpu_alloc->pages; in kbase_get_gpu_phy_pages()
339 KBASE_DEBUG_ASSERT(reg->gpu_alloc); in kbase_reg_current_backed_size()
340 KBASE_DEBUG_ASSERT(reg->cpu_alloc->nents == reg->gpu_alloc->nents); in kbase_reg_current_backed_size()
400 KBASE_DEBUG_ASSERT(!reg->gpu_alloc); in kbase_reg_prepare_native()
413 reg->gpu_alloc in kbase_reg_prepare_native()
[all...]
H A Dmali_kbase_softjobs.c499 struct kbase_mem_phy_alloc *gpu_alloc; member
536 struct kbase_mem_phy_alloc *gpu_alloc = buffers[i].gpu_alloc; in kbase_debug_copy_finish() local
547 if (gpu_alloc) { in kbase_debug_copy_finish()
548 switch (gpu_alloc->type) { in kbase_debug_copy_finish()
558 kbase_mem_phy_alloc_put(gpu_alloc); in kbase_debug_copy_finish()
651 if (NULL == reg || NULL == reg->gpu_alloc || in kbase_debug_copy_prepare()
657 buffers[i].gpu_alloc = kbase_mem_phy_alloc_get(reg->gpu_alloc); in kbase_debug_copy_prepare()
663 switch (reg->gpu_alloc in kbase_debug_copy_prepare()
763 dma_buf_kmap_page(struct kbase_mem_phy_alloc *gpu_alloc, unsigned long page_num, struct page **page) dma_buf_kmap_page() argument
801 struct kbase_mem_phy_alloc *gpu_alloc = buf_data->gpu_alloc; kbase_mem_copy_from_extres() local
[all...]
H A Dmali_kbase_mem_linux.h94 * @gpu_alloc: The physical allocation to make evictable
106 int kbase_mem_evictable_make(struct kbase_mem_phy_alloc *gpu_alloc);
127 struct kbase_mem_phy_alloc *gpu_alloc; member
/device/soc/rockchip/common/kernel/drivers/gpu/arm/bifrost/
H A Dmali_kbase_mem_linux.c226 * this being reduced a second time if a separate gpu_alloc is in kbase_phy_alloc_mapping_term()
266 WARN_ON(reg->gpu_alloc != kern_mapping->gpu_alloc); in kbase_phy_alloc_mapping_get()
268 (void)kbase_mem_phy_alloc_get(kern_mapping->gpu_alloc); in kbase_phy_alloc_mapping_get()
287 kbase_mem_phy_alloc_put(kern_mapping->gpu_alloc); in kbase_phy_alloc_mapping_put()
396 if (unlikely(reg->cpu_alloc != reg->gpu_alloc)) in kbase_mem_alloc()
509 kbase_mem_phy_alloc_put(reg->gpu_alloc); in kbase_mem_alloc()
801 int kbase_mem_evictable_make(struct kbase_mem_phy_alloc *gpu_alloc) in kbase_mem_evictable_make() argument
803 struct kbase_context *kctx = gpu_alloc->imported.native.kctx; in kbase_mem_evictable_make()
807 kbase_mem_shrink_cpu_mapping(kctx, gpu_alloc in kbase_mem_evictable_make()
827 kbase_mem_evictable_unmake(struct kbase_mem_phy_alloc *gpu_alloc) kbase_mem_evictable_unmake() argument
3040 struct kbase_mem_phy_alloc *gpu_alloc; global() local
[all...]
H A Dmali_kbase_mem.c1365 new_reg->gpu_alloc = NULL; /* no alloc bound yet */ in kbase_alloc_free_region()
1454 if (!list_empty(&reg->gpu_alloc->evict_node)) { in kbase_free_alloced_region()
1464 if (reg->cpu_alloc != reg->gpu_alloc) in kbase_free_alloced_region()
1465 reg->gpu_alloc->reg = NULL; in kbase_free_alloced_region()
1477 kbase_mem_evictable_unmake(reg->gpu_alloc); in kbase_free_alloced_region()
1491 kbase_mem_phy_alloc_put(reg->gpu_alloc); in kbase_free_alloced_region()
1532 alloc = reg->gpu_alloc; in kbase_gpu_mmap()
1535 if (reg->gpu_alloc->type == KBASE_MEM_TYPE_ALIAS) { in kbase_gpu_mmap()
1582 !WARN_ON(reg->nr_pages < reg->gpu_alloc->nents) && in kbase_gpu_mmap()
1583 reg->gpu_alloc in kbase_gpu_mmap()
[all...]
H A Dmali_kbase_softjobs.c517 struct kbase_mem_phy_alloc *gpu_alloc = buffers[i].gpu_alloc; in kbase_debug_copy_finish() local
531 if (gpu_alloc) { in kbase_debug_copy_finish()
532 switch (gpu_alloc->type) { in kbase_debug_copy_finish()
542 kbase_mem_phy_alloc_put(gpu_alloc); in kbase_debug_copy_finish()
661 reg->gpu_alloc == NULL) { in kbase_debug_copy_prepare()
666 buffers[i].gpu_alloc = kbase_mem_phy_alloc_get(reg->gpu_alloc); in kbase_debug_copy_prepare()
672 switch (reg->gpu_alloc->type) { in kbase_debug_copy_prepare()
675 struct kbase_mem_phy_alloc *alloc = reg->gpu_alloc; in kbase_debug_copy_prepare()
734 dma_buf_kmap_page(struct kbase_mem_phy_alloc *gpu_alloc, unsigned long page_num, struct page **page) dma_buf_kmap_page() argument
772 struct kbase_mem_phy_alloc *gpu_alloc = buf_data->gpu_alloc; kbase_mem_copy_from_extres() local
[all...]
H A Dmali_kbase_mem.h301 * @gpu_alloc: The physical memory we mmap to the GPU when mapping this region.
322 * greater than gpu_alloc->nents)
432 * In addition, this flag indicates the gpu_alloc member might no longer valid
478 struct kbase_mem_phy_alloc *gpu_alloc; member
505 * gpu_alloc->nents)
604 KBASE_DEBUG_ASSERT(reg->gpu_alloc); in kbase_get_cpu_phy_pages()
605 KBASE_DEBUG_ASSERT(reg->cpu_alloc->nents == reg->gpu_alloc->nents); in kbase_get_cpu_phy_pages()
615 KBASE_DEBUG_ASSERT(reg->gpu_alloc); in kbase_get_gpu_phy_pages()
616 KBASE_DEBUG_ASSERT(reg->cpu_alloc->nents == reg->gpu_alloc->nents); in kbase_get_gpu_phy_pages()
618 return reg->gpu_alloc in kbase_get_gpu_phy_pages()
[all...]
H A Dmali_kbase_mem_linux.h190 * @gpu_alloc: The physical allocation to make evictable
202 int kbase_mem_evictable_make(struct kbase_mem_phy_alloc *gpu_alloc);
223 struct kbase_mem_phy_alloc *gpu_alloc; member
H A Dmali_kbase_gwt.c41 reg->gpu_alloc->nents, in kbase_gpu_gwt_setup_page_permission()
43 reg->gpu_alloc->group_id); in kbase_gpu_gwt_setup_page_permission()
/device/soc/rockchip/common/vendor/drivers/gpu/arm/bifrost/
H A Dmali_kbase_mem_linux.c238 * this being reduced a second time if a separate gpu_alloc is in kbase_phy_alloc_mapping_term()
278 WARN_ON(reg->gpu_alloc != kern_mapping->gpu_alloc); in kbase_phy_alloc_mapping_get()
280 (void)kbase_mem_phy_alloc_get(kern_mapping->gpu_alloc); in kbase_phy_alloc_mapping_get()
300 kbase_mem_phy_alloc_put(kern_mapping->gpu_alloc); in kbase_phy_alloc_mapping_put()
411 if (unlikely(reg->cpu_alloc != reg->gpu_alloc)) { in kbase_mem_alloc()
541 kbase_mem_phy_alloc_put(reg->gpu_alloc); in kbase_mem_alloc()
853 int kbase_mem_evictable_make(struct kbase_mem_phy_alloc *gpu_alloc) in kbase_mem_evictable_make() argument
855 struct kbase_context *kctx = gpu_alloc->imported.native.kctx; in kbase_mem_evictable_make()
859 kbase_mem_shrink_cpu_mapping(kctx, gpu_alloc in kbase_mem_evictable_make()
877 kbase_mem_evictable_unmake(struct kbase_mem_phy_alloc *gpu_alloc) kbase_mem_evictable_unmake() argument
3073 struct kbase_mem_phy_alloc *gpu_alloc; global() local
[all...]
H A Dmali_kbase_mem.c1095 new_reg->gpu_alloc = NULL; /* no alloc bound yet */ in kbase_alloc_free_region()
1180 if (!list_empty(&reg->gpu_alloc->evict_node)) { in kbase_free_alloced_region()
1190 if (reg->cpu_alloc != reg->gpu_alloc) { in kbase_free_alloced_region()
1191 reg->gpu_alloc->reg = NULL; in kbase_free_alloced_region()
1203 kbase_mem_evictable_unmake(reg->gpu_alloc); in kbase_free_alloced_region()
1216 kbase_mem_phy_alloc_put(reg->gpu_alloc); in kbase_free_alloced_region()
1257 alloc = reg->gpu_alloc; in kbase_gpu_mmap()
1260 if (reg->gpu_alloc->type == KBASE_MEM_TYPE_ALIAS) { in kbase_gpu_mmap()
1293 if (reg->flags & KBASE_REG_IMPORT_PAD && !WARN_ON(reg->nr_pages < reg->gpu_alloc->nents) && in kbase_gpu_mmap()
1294 reg->gpu_alloc in kbase_gpu_mmap()
[all...]
H A Dmali_kbase_softjobs.c497 struct kbase_mem_phy_alloc *gpu_alloc = buffers[i].gpu_alloc; in kbase_debug_copy_finish() local
514 if (gpu_alloc) { in kbase_debug_copy_finish()
515 switch (gpu_alloc->type) { in kbase_debug_copy_finish()
524 kbase_mem_phy_alloc_put(gpu_alloc); in kbase_debug_copy_finish()
635 if (kbase_is_region_invalid_or_free(reg) || reg->gpu_alloc == NULL) { in kbase_debug_copy_prepare()
640 buffers[i].gpu_alloc = kbase_mem_phy_alloc_get(reg->gpu_alloc); in kbase_debug_copy_prepare()
647 switch (reg->gpu_alloc->type) { in kbase_debug_copy_prepare()
649 struct kbase_mem_phy_alloc *alloc = reg->gpu_alloc; in kbase_debug_copy_prepare()
703 dma_buf_kmap_page(struct kbase_mem_phy_alloc *gpu_alloc, unsigned long page_num, struct page **page) dma_buf_kmap_page() argument
743 struct kbase_mem_phy_alloc *gpu_alloc = buf_data->gpu_alloc; kbase_mem_copy_from_extres() local
[all...]
H A Dmali_kbase_mem.h266 * @gpu_alloc: The physical memory we mmap to the GPU when mapping this region.
373 * In addition, this flag indicates the gpu_alloc member might no longer valid
418 struct kbase_mem_phy_alloc *gpu_alloc; member
444 * gpu_alloc->nents)
525 KBASE_DEBUG_ASSERT(reg->gpu_alloc); in kbase_get_cpu_phy_pages()
526 KBASE_DEBUG_ASSERT(reg->cpu_alloc->nents == reg->gpu_alloc->nents); in kbase_get_cpu_phy_pages()
535 KBASE_DEBUG_ASSERT(reg->gpu_alloc); in kbase_get_gpu_phy_pages()
536 KBASE_DEBUG_ASSERT(reg->cpu_alloc->nents == reg->gpu_alloc->nents); in kbase_get_gpu_phy_pages()
538 return reg->gpu_alloc->pages; in kbase_get_gpu_phy_pages()
550 KBASE_DEBUG_ASSERT(reg->gpu_alloc); in kbase_reg_current_backed_size()
[all...]
H A Dmali_kbase_mem_linux.h191 * @gpu_alloc: The physical allocation to make evictable
203 int kbase_mem_evictable_make(struct kbase_mem_phy_alloc *gpu_alloc);
224 struct kbase_mem_phy_alloc *gpu_alloc; member
H A Dmali_kbase_gwt.c37 err = kbase_mmu_update_pages(kctx, reg->start_pfn, kbase_get_gpu_phy_pages(reg), reg->gpu_alloc->nents, in kbase_gpu_gwt_setup_page_permission()
38 reg->flags & flag, reg->gpu_alloc->group_id); in kbase_gpu_gwt_setup_page_permission()
/device/soc/rockchip/common/vendor/drivers/gpu/arm/midgard/
H A Dmali_kbase_mem_linux.c239 kbase_mem_phy_alloc_put(reg->gpu_alloc); in kbase_mem_alloc()
499 int kbase_mem_evictable_make(struct kbase_mem_phy_alloc *gpu_alloc) in kbase_mem_evictable_make() argument
501 struct kbase_context *kctx = gpu_alloc->imported.kctx; in kbase_mem_evictable_make()
506 WARN_ON(!list_empty(&gpu_alloc->evict_node)); in kbase_mem_evictable_make()
508 kbase_mem_shrink_cpu_mapping(kctx, gpu_alloc->reg, 0, gpu_alloc->nents); in kbase_mem_evictable_make()
515 list_add(&gpu_alloc->evict_node, &kctx->evict_list); in kbase_mem_evictable_make()
517 kbase_mem_evictable_mark_reclaim(gpu_alloc); in kbase_mem_evictable_make()
519 gpu_alloc->reg->flags |= KBASE_REG_DONT_NEED; in kbase_mem_evictable_make()
523 bool kbase_mem_evictable_unmake(struct kbase_mem_phy_alloc *gpu_alloc) in kbase_mem_evictable_unmake() argument
[all...]
H A Dmali_kbase_mem.c780 new_reg->gpu_alloc = NULL; /* no alloc bound yet */ in kbase_alloc_free_region()
814 if (!list_empty(&reg->gpu_alloc->evict_node)) { in kbase_free_alloced_region()
822 if (reg->cpu_alloc != reg->gpu_alloc) { in kbase_free_alloced_region()
823 reg->gpu_alloc->reg = NULL; in kbase_free_alloced_region()
835 kbase_mem_evictable_unmake(reg->gpu_alloc); in kbase_free_alloced_region()
846 kbase_mem_phy_alloc_put(reg->gpu_alloc); in kbase_free_alloced_region()
876 if (reg->gpu_alloc->type == KBASE_MEM_TYPE_ALIAS) { in kbase_gpu_mmap()
880 alloc = reg->gpu_alloc; in kbase_gpu_mmap()
909 kbase_mem_phy_alloc_gpu_mapped(reg->gpu_alloc); in kbase_gpu_mmap()
915 if (reg->gpu_alloc in kbase_gpu_mmap()
[all...]
H A Dmali_kbase_mem.h302 struct kbase_mem_phy_alloc *gpu_alloc; /* the one alloc object we mmap to the GPU when mapping this region */ member
316 KBASE_DEBUG_ASSERT(reg->gpu_alloc); in kbase_get_cpu_phy_pages()
317 KBASE_DEBUG_ASSERT(reg->cpu_alloc->nents == reg->gpu_alloc->nents); in kbase_get_cpu_phy_pages()
326 KBASE_DEBUG_ASSERT(reg->gpu_alloc); in kbase_get_gpu_phy_pages()
327 KBASE_DEBUG_ASSERT(reg->cpu_alloc->nents == reg->gpu_alloc->nents); in kbase_get_gpu_phy_pages()
329 return reg->gpu_alloc->pages; in kbase_get_gpu_phy_pages()
341 KBASE_DEBUG_ASSERT(reg->gpu_alloc); in kbase_reg_current_backed_size()
342 KBASE_DEBUG_ASSERT(reg->cpu_alloc->nents == reg->gpu_alloc->nents); in kbase_reg_current_backed_size()
403 KBASE_DEBUG_ASSERT(!reg->gpu_alloc); in kbase_reg_prepare_native()
415 reg->gpu_alloc in kbase_reg_prepare_native()
[all...]
H A Dmali_kbase_softjobs.c483 struct kbase_mem_phy_alloc *gpu_alloc; member
521 struct kbase_mem_phy_alloc *gpu_alloc = buffers[i].gpu_alloc; in kbase_debug_copy_finish() local
534 if (gpu_alloc) { in kbase_debug_copy_finish()
535 switch (gpu_alloc->type) { in kbase_debug_copy_finish()
544 kbase_mem_phy_alloc_put(gpu_alloc); in kbase_debug_copy_finish()
633 if (reg == NULL || reg->gpu_alloc == NULL || (reg->flags & KBASE_REG_FREE)) { in kbase_debug_copy_prepare()
638 buffers[i].gpu_alloc = kbase_mem_phy_alloc_get(reg->gpu_alloc); in kbase_debug_copy_prepare()
645 switch (reg->gpu_alloc in kbase_debug_copy_prepare()
739 dma_buf_kmap_page(struct kbase_mem_phy_alloc *gpu_alloc, unsigned long page_num, struct page **page) dma_buf_kmap_page() argument
779 struct kbase_mem_phy_alloc *gpu_alloc = buf_data->gpu_alloc; kbase_mem_copy_from_extres() local
[all...]
H A Dmali_kbase_mem_linux.h92 * @gpu_alloc: The physical allocation to make evictable
104 int kbase_mem_evictable_make(struct kbase_mem_phy_alloc *gpu_alloc);
125 struct kbase_mem_phy_alloc *gpu_alloc; member
H A Dmali_kbase_debug_mem_view.c169 if (reg->gpu_alloc == NULL) { in debug_mem_zone_open()
180 mapping->alloc = kbase_mem_phy_alloc_get(reg->gpu_alloc); in debug_mem_zone_open()
/device/soc/rockchip/common/kernel/drivers/gpu/arm/bifrost/mmu/
H A Dmali_kbase_mmu.c408 1, region->flags, region->gpu_alloc->group_id); in kbase_gpu_mmu_handle_write_fault()
483 if (WARN_ON(region->gpu_alloc->group_id >= in page_fault_try_alloc()
492 root_pool = &kctx->mem_pools.large[region->gpu_alloc->group_id]; in page_fault_try_alloc()
496 root_pool = &kctx->mem_pools.small[region->gpu_alloc->group_id]; in page_fault_try_alloc()
502 if (region->gpu_alloc != region->cpu_alloc) in page_fault_try_alloc()
550 if (region->gpu_alloc == region->cpu_alloc) in page_fault_try_alloc()
559 region->gpu_alloc, pool, in page_fault_try_alloc()
565 } else if (region->gpu_alloc != region->cpu_alloc) { in page_fault_try_alloc()
607 if (region->gpu_alloc != region->cpu_alloc) { in page_fault_try_alloc()
618 region->gpu_alloc, in page_fault_try_alloc()
[all...]
/device/soc/rockchip/common/vendor/drivers/gpu/arm/bifrost/mmu/
H A Dmali_kbase_mmu.c270 region->flags, region->gpu_alloc->group_id); in kbase_gpu_mmu_handle_write_fault()
341 if (WARN_ON(region->gpu_alloc->group_id >= MEMORY_GROUP_MANAGER_NR_GROUPS)) { in page_fault_try_alloc()
349 root_pool = &kctx->mem_pools.large[region->gpu_alloc->group_id]; in page_fault_try_alloc()
353 root_pool = &kctx->mem_pools.small[region->gpu_alloc->group_id]; in page_fault_try_alloc()
359 if (region->gpu_alloc != region->cpu_alloc) { in page_fault_try_alloc()
410 if (region->gpu_alloc == region->cpu_alloc) { in page_fault_try_alloc()
418 gpu_pages[pool_level] = kbase_alloc_phy_pages_helper_locked(region->gpu_alloc, pool, in page_fault_try_alloc()
423 } else if (region->gpu_alloc != region->cpu_alloc) { in page_fault_try_alloc()
464 if (region->gpu_alloc != region->cpu_alloc) { in page_fault_try_alloc()
472 kbase_free_phy_pages_helper_locked(region->gpu_alloc, poo in page_fault_try_alloc()
[all...]

Completed in 39 milliseconds

12