Home
last modified time | relevance | path

Searched refs:nents (Results 1 - 25 of 68) sorted by relevance

123

/device/soc/hisilicon/common/platform/wifi/hi3881v100/driver/oal/
H A Doal_scatterlist.h31 unsigned int nents; /* number of mapped entries */ member
68 static void sg_kfree(struct scatterlist *sg, unsigned int nents) in sg_kfree() argument
70 hi_unref_param(nents); in sg_kfree()
119 static struct scatterlist *sg_kmalloc(unsigned int nents, gfp_t gfp_mask) in sg_kmalloc() argument
122 return kmalloc(nents * sizeof(struct scatterlist), gfp_mask); in sg_kmalloc()
144 int __sg_alloc_table(struct sg_table *table, unsigned int nents, unsigned int max_ents, gfp_t gfp_mask, in __sg_alloc_table() argument
154 left = nents; in __sg_alloc_table()
178 table->nents = ++table->orig_nents; in __sg_alloc_table()
185 table->nents = table->orig_nents += sg_size; in __sg_alloc_table()
210 int sg_alloc_table(struct sg_table *table, unsigned int nents, gfp_ argument
[all...]
/device/soc/rockchip/common/kernel/drivers/gpu/arm/bifrost/
H A Dmali_kbase_mem_linux.c229 WARN_ON(alloc->nents > atomic_read(&kctx->permanent_mapped_pages)); in kbase_phy_alloc_mapping_term()
230 atomic_sub(alloc->nents, &kctx->permanent_mapped_pages); in kbase_phy_alloc_mapping_term()
550 for (i = 0; i < reg->cpu_alloc->imported.alias.nents; i++) in kbase_mem_query()
690 0, alloc->nents); in kbase_mem_evictable_reclaim_scan_objects()
706 alloc->evicted = alloc->nents; in kbase_mem_evictable_reclaim_scan_objects()
762 kbase_process_page_usage_dec(kctx, alloc->nents); in kbase_mem_evictable_mark_reclaim()
763 new_page_count = atomic_sub_return(alloc->nents, in kbase_mem_evictable_mark_reclaim()
765 atomic_sub(alloc->nents, &kctx->kbdev->memdev.used_pages); in kbase_mem_evictable_mark_reclaim()
771 kbase_trace_gpu_mem_usage_dec(kbdev, kctx, alloc->nents); in kbase_mem_evictable_mark_reclaim()
785 new_page_count = atomic_add_return(alloc->nents, in kbase_mem_evictable_unmark_reclaim()
1725 kbase_mem_alias(struct kbase_context *kctx, u64 *flags, u64 stride, u64 nents, struct base_mem_aliasing_info *ai, u64 *num_pages) global() argument
2418 size_t nents; global() local
[all...]
H A Dmali_kbase_trace_gpu_mem.c180 kbdev->total_gpu_pages -= alloc->nents; in kbase_remove_dma_buf_usage()
183 kctx->kprcs->total_gpu_pages -= alloc->nents; in kbase_remove_dma_buf_usage()
211 kbdev->total_gpu_pages += alloc->nents; in kbase_add_dma_buf_usage()
214 kctx->kprcs->total_gpu_pages += alloc->nents; in kbase_add_dma_buf_usage()
H A Dmali_kbase_mem.c1539 for (i = 0; i < alloc->imported.alias.nents; i++) { in kbase_gpu_mmap()
1582 !WARN_ON(reg->nr_pages < reg->gpu_alloc->nents) && in kbase_gpu_mmap()
1592 * Assume reg->gpu_alloc->nents is the number of actual pages in kbase_gpu_mmap()
1596 kctx, reg->start_pfn + reg->gpu_alloc->nents, in kbase_gpu_mmap()
1598 reg->nr_pages - reg->gpu_alloc->nents, in kbase_gpu_mmap()
1644 for (i = 0; i < alloc->imported.alias.nents; i++) { in kbase_gpu_munmap()
2219 if (nr_pages_requested > alloc->reg->nr_pages - alloc->nents) in kbase_alloc_phy_pages_helper()
2239 tp = alloc->pages + alloc->nents; in kbase_alloc_phy_pages_helper()
2363 alloc->nents += nr_pages_requested; in kbase_alloc_phy_pages_helper()
2375 alloc->nents in kbase_alloc_phy_pages_helper()
[all...]
H A Dmali_kbase_mem.h101 * Changing of nents or *pages should only happen if the kbase_mem_phy_alloc
114 * @nents: 0..N
115 * @pages: N elements, only 0..nents are valid
137 size_t nents; member
159 size_t nents; member
322 * greater than gpu_alloc->nents)
505 * gpu_alloc->nents)
605 KBASE_DEBUG_ASSERT(reg->cpu_alloc->nents == reg->gpu_alloc->nents); in kbase_get_cpu_phy_pages()
616 KBASE_DEBUG_ASSERT(reg->cpu_alloc->nents in kbase_get_gpu_phy_pages()
[all...]
/device/soc/rockchip/common/vendor/drivers/gpu/arm/bifrost/
H A Dmali_kbase_mem_linux.c241 WARN_ON(alloc->nents > atomic_read(&kctx->permanent_mapped_pages)); in kbase_phy_alloc_mapping_term()
242 atomic_sub(alloc->nents, &kctx->permanent_mapped_pages); in kbase_phy_alloc_mapping_term()
584 for (i = 0; i < reg->cpu_alloc->imported.alias.nents; i++) { in kbase_mem_query()
695 alloc->nents; in kbase_mem_evictable_reclaim_count_objects()
734 err = kbase_mem_shrink_gpu_mapping(kctx, alloc->reg, 0, alloc->nents); in kbase_mem_evictable_reclaim_scan_objects()
750 alloc->evicted = alloc->nents; in kbase_mem_evictable_reclaim_scan_objects()
822 kbase_process_page_usage_dec(kctx, alloc->nents); in kbase_mem_evictable_mark_reclaim()
823 new_page_count = atomic_sub_return(alloc->nents, &kctx->used_pages); in kbase_mem_evictable_mark_reclaim()
824 atomic_sub(alloc->nents, &kctx->kbdev->memdev.used_pages); in kbase_mem_evictable_mark_reclaim()
827 kbase_trace_gpu_mem_usage_dec(kbdev, kctx, alloc->nents); in kbase_mem_evictable_mark_reclaim()
1780 kbase_mem_alias(struct kbase_context *kctx, u64 *flags, u64 stride, u64 nents, struct base_mem_aliasing_info *ai, u64 *num_pages) global() argument
2449 size_t nents; global() local
[all...]
H A Dmali_kbase_trace_gpu_mem.c175 kbdev->total_gpu_pages -= alloc->nents; in kbase_remove_dma_buf_usage()
179 kctx->kprcs->total_gpu_pages -= alloc->nents; in kbase_remove_dma_buf_usage()
206 kbdev->total_gpu_pages += alloc->nents; in kbase_add_dma_buf_usage()
210 kctx->kprcs->total_gpu_pages += alloc->nents; in kbase_add_dma_buf_usage()
H A Dmali_kbase_mem.c1264 for (i = 0; i < alloc->imported.alias.nents; i++) { in kbase_gpu_mmap()
1293 if (reg->flags & KBASE_REG_IMPORT_PAD && !WARN_ON(reg->nr_pages < reg->gpu_alloc->nents) && in kbase_gpu_mmap()
1303 * Assume reg->gpu_alloc->nents is the number of actual pages in kbase_gpu_mmap()
1306 err = kbase_mmu_insert_single_page(kctx, reg->start_pfn + reg->gpu_alloc->nents, kctx->aliasing_sink_page, in kbase_gpu_mmap()
1307 reg->nr_pages - reg->gpu_alloc->nents, in kbase_gpu_mmap()
1366 for (i = 0; i < reg->gpu_alloc->imported.alias.nents; i++) { in kbase_gpu_munmap()
1897 if (nr_pages_requested > alloc->reg->nr_pages - alloc->nents) { in kbase_alloc_phy_pages_helper()
1916 tp = alloc->pages + alloc->nents; in kbase_alloc_phy_pages_helper()
2023 alloc->nents += nr_pages_requested; in kbase_alloc_phy_pages_helper()
2035 alloc->nents in kbase_alloc_phy_pages_helper()
[all...]
H A Dmali_kbase_mem.h101 * Changing of nents or *pages should only happen if the kbase_mem_phy_alloc
108 * @nents: 0..N
109 * @pages: N elements, only 0..nents are valid
130 size_t nents; member
152 size_t nents; member
444 * gpu_alloc->nents)
526 KBASE_DEBUG_ASSERT(reg->cpu_alloc->nents == reg->gpu_alloc->nents); in kbase_get_cpu_phy_pages()
536 KBASE_DEBUG_ASSERT(reg->cpu_alloc->nents == reg->gpu_alloc->nents); in kbase_get_gpu_phy_pages()
[all...]
/device/soc/rockchip/common/vendor/drivers/gpu/arm/midgard/
H A Dmali_kbase_mem.h102 * Changing of nents or *pages should only happen if the kbase_mem_phy_alloc is not
109 size_t nents; /* 0..N */ member
110 phys_addr_t *pages; /* N elements, only 0..nents are valid */
145 size_t nents; member
317 KBASE_DEBUG_ASSERT(reg->cpu_alloc->nents == reg->gpu_alloc->nents); in kbase_get_cpu_phy_pages()
327 KBASE_DEBUG_ASSERT(reg->cpu_alloc->nents == reg->gpu_alloc->nents); in kbase_get_gpu_phy_pages()
342 KBASE_DEBUG_ASSERT(reg->cpu_alloc->nents == reg->gpu_alloc->nents); in kbase_reg_current_backed_size()
[all...]
H A Dmali_kbase_mem_linux.c281 for (i = 0; i < reg->cpu_alloc->imported.alias.nents; i++) { in kbase_mem_query()
347 list_for_each_entry(alloc, &kctx->evict_list, evict_node) pages += alloc->nents; in kbase_mem_evictable_reclaim_count_objects()
386 err = kbase_mem_shrink_gpu_mapping(kctx, alloc->reg, 0, alloc->nents); in kbase_mem_evictable_reclaim_scan_objects()
402 alloc->evicted = alloc->nents; in kbase_mem_evictable_reclaim_scan_objects()
472 kbase_process_page_usage_dec(kctx, alloc->nents); in kbase_mem_evictable_mark_reclaim()
473 new_page_count = kbase_atomic_sub_pages(alloc->nents, &kctx->used_pages); in kbase_mem_evictable_mark_reclaim()
474 kbase_atomic_sub_pages(alloc->nents, &kctx->kbdev->memdev.used_pages); in kbase_mem_evictable_mark_reclaim()
488 new_page_count = kbase_atomic_add_pages(alloc->nents, &kctx->used_pages); in kbase_mem_evictable_unmark_reclaim()
489 kbase_atomic_add_pages(alloc->nents, &kctx->kbdev->memdev.used_pages); in kbase_mem_evictable_unmark_reclaim()
494 kbase_process_page_usage_inc(kctx, alloc->nents); in kbase_mem_evictable_unmark_reclaim()
1107 kbase_mem_alias(struct kbase_context *kctx, u64 *flags, u64 stride, u64 nents, struct base_mem_aliasing_info *ai, u64 *num_pages) global() argument
[all...]
H A Dmali_kbase_mem.c883 for (i = 0; i < alloc->imported.alias.nents; i++) { in kbase_gpu_mmap()
951 for (i = 0; i < reg->gpu_alloc->imported.alias.nents; i++) { in kbase_gpu_munmap()
1374 size_t old_page_count = alloc->nents; in kbase_alloc_phy_pages_helper()
1396 alloc->nents += nr_pages_requested; in kbase_alloc_phy_pages_helper()
1418 KBASE_DEBUG_ASSERT(alloc->nents >= nr_pages_to_free); in kbase_free_phy_pages_helper()
1425 start_free = alloc->pages + alloc->nents - nr_pages_to_free; in kbase_free_phy_pages_helper()
1431 alloc->nents -= nr_pages_to_free; in kbase_free_phy_pages_helper()
1462 kbase_free_phy_pages_helper(alloc, alloc->nents); in kbase_mem_kref_free()
1472 for (i = 0; i < alloc->imported.alias.nents; i++) { in kbase_mem_kref_free()
1807 data->active_value += reg->gpu_alloc->nents; in kbase_jit_debugfs_phys_get()
[all...]
H A Dmali_kbase_mem_linux.h41 u64 kbase_mem_alias(struct kbase_context *kctx, u64 *flags, u64 stride, u64 nents, struct base_mem_aliasing_info *ai,
/device/soc/rockchip/common/kernel/drivers/gpu/arm/midgard/
H A Dmali_kbase_mem.h104 * Changing of nents or *pages should only happen if the kbase_mem_phy_alloc is not
111 size_t nents; /* 0..N */ member
112 phys_addr_t *pages; /* N elements, only 0..nents are valid */
147 size_t nents; member
316 KBASE_DEBUG_ASSERT(reg->cpu_alloc->nents == reg->gpu_alloc->nents); in kbase_get_cpu_phy_pages()
326 KBASE_DEBUG_ASSERT(reg->cpu_alloc->nents == reg->gpu_alloc->nents); in kbase_get_gpu_phy_pages()
340 KBASE_DEBUG_ASSERT(reg->cpu_alloc->nents == reg->gpu_alloc->nents); in kbase_reg_current_backed_size()
[all...]
H A Dmali_kbase_mem_linux.c288 for (i = 0; i < reg->cpu_alloc->imported.alias.nents; i++) in kbase_mem_query()
349 pages += alloc->nents; in kbase_mem_evictable_reclaim_count_objects()
390 0, alloc->nents); in kbase_mem_evictable_reclaim_scan_objects()
406 alloc->evicted = alloc->nents; in kbase_mem_evictable_reclaim_scan_objects()
475 kbase_process_page_usage_dec(kctx, alloc->nents); in kbase_mem_evictable_mark_reclaim()
476 new_page_count = kbase_atomic_sub_pages(alloc->nents, in kbase_mem_evictable_mark_reclaim()
478 kbase_atomic_sub_pages(alloc->nents, &kctx->kbdev->memdev.used_pages); in kbase_mem_evictable_mark_reclaim()
495 new_page_count = kbase_atomic_add_pages(alloc->nents, in kbase_mem_evictable_unmark_reclaim()
497 kbase_atomic_add_pages(alloc->nents, &kctx->kbdev->memdev.used_pages); in kbase_mem_evictable_unmark_reclaim()
502 kbase_process_page_usage_inc(kctx, alloc->nents); in kbase_mem_evictable_unmark_reclaim()
1103 kbase_mem_alias(struct kbase_context *kctx, u64 *flags, u64 stride, u64 nents, struct base_mem_aliasing_info *ai, u64 *num_pages) global() argument
[all...]
H A Dmali_kbase_mem.c911 for (i = 0; i < alloc->imported.alias.nents; i++) { in kbase_gpu_mmap()
980 for (i = 0; i < reg->gpu_alloc->imported.alias.nents; i++) in kbase_gpu_munmap()
1415 size_t old_page_count = alloc->nents; in kbase_alloc_phy_pages_helper()
1439 alloc->nents += nr_pages_requested; in kbase_alloc_phy_pages_helper()
1463 KBASE_DEBUG_ASSERT(alloc->nents >= nr_pages_to_free); in kbase_free_phy_pages_helper()
1469 start_free = alloc->pages + alloc->nents - nr_pages_to_free; in kbase_free_phy_pages_helper()
1479 alloc->nents -= nr_pages_to_free; in kbase_free_phy_pages_helper()
1514 kbase_free_phy_pages_helper(alloc, alloc->nents); in kbase_mem_kref_free()
1524 for (i = 0; i < alloc->imported.alias.nents; i++) in kbase_mem_kref_free()
1836 data->active_value += reg->gpu_alloc->nents; in kbase_jit_debugfs_phys_get()
[all...]
/device/soc/rockchip/common/sdk_linux/include/linux/
H A Ddma-mapping.h122 int dma_map_sg_attrs(struct device *dev, struct scatterlist *sg, int nents, enum dma_data_direction dir,
124 void dma_unmap_sg_attrs(struct device *dev, struct scatterlist *sg, int nents, enum dma_data_direction dir,
160 static inline int dma_map_sg_attrs(struct device *dev, struct scatterlist *sg, int nents, enum dma_data_direction dir, in dma_map_sg_attrs() argument
165 static inline void dma_unmap_sg_attrs(struct device *dev, struct scatterlist *sg, int nents, in dma_unmap_sg_attrs() argument
315 int nents; in dma_map_sgtable() local
317 nents = dma_map_sg_attrs(dev, sgt->sgl, sgt->orig_nents, dir, attrs); in dma_map_sgtable()
318 if (nents <= 0) { in dma_map_sgtable()
321 sgt->nents = nents; in dma_map_sgtable()
H A Ddma-map-ops.h46 int (*map_sg)(struct device *dev, struct scatterlist *sg, int nents,
48 void (*unmap_sg)(struct device *dev, struct scatterlist *sg, int nents,
61 int nents, enum dma_data_direction dir);
63 int nents, enum dma_data_direction dir);
/device/soc/rockchip/common/sdk_linux/drivers/iommu/
H A Ddma-iommu.c828 static int finalise_sg_ext(struct device *dev, struct scatterlist *sg, int nents, dma_addr_t dma_addr) in finalise_sg_ext() argument
835 for_each_sg(sg, s, nents, i) in finalise_sg_ext()
882 static void invalidate_sg_ext(struct scatterlist *sg, int nents) in invalidate_sg_ext() argument
887 for_each_sg(sg, s, nents, i) in invalidate_sg_ext()
907 static int iommu_dma_map_sg(struct device *dev, struct scatterlist *sg, int nents, enum dma_data_direction dir, in iommu_dma_map_sg() argument
925 iommu_dma_sync_sg_for_device(dev, sg, nents, dir); in iommu_dma_map_sg()
934 for_each_sg(sg, s, nents, i) in iommu_dma_map_sg()
977 if (iommu_map_sg_atomic(domain, iova, sg, nents, prot) < iova_len) { in iommu_dma_map_sg()
981 return finalise_sg_ext(dev, sg, nents, iova); in iommu_dma_map_sg()
986 invalidate_sg_ext(sg, nents); in iommu_dma_map_sg()
990 iommu_dma_unmap_sg(struct device *dev, struct scatterlist *sg, int nents, enum dma_data_direction dir, unsigned long attrs) iommu_dma_unmap_sg() argument
[all...]
/device/soc/rockchip/common/vendor/drivers/gpu/arm/mali400/mali/linux/
H A Dmali_memory_secure.c78 for_each_sg(secure_mem->sgt->sgl, sg, secure_mem->sgt->nents, i) in mali_mem_secure_mali_map()
119 for_each_sg(secure_mem->sgt->sgl, sg, secure_mem->sgt->nents, i) in mali_mem_secure_cpu_map()
/device/soc/rockchip/common/kernel/drivers/gpu/arm/mali400/mali/linux/
H A Dmali_memory_secure.c76 for_each_sg(secure_mem->sgt->sgl, sg, secure_mem->sgt->nents, i) { in mali_mem_secure_mali_map()
118 for_each_sg(secure_mem->sgt->sgl, sg, secure_mem->sgt->nents, i) { in mali_mem_secure_cpu_map()
/device/soc/rockchip/common/kernel/drivers/gpu/arm/mali400/ump/linux/
H A Dump_ukk_ref_wrappers.c162 blocks = (ump_dd_physical_block *)_mali_osk_malloc(sizeof(ump_dd_physical_block) * sgt->nents); in ump_dmabuf_import_wrapper()
168 for_each_sg(sgt->sgl, sgl, sgt->nents, i) { in ump_dmabuf_import_wrapper()
/device/soc/rockchip/common/sdk_linux/drivers/gpu/drm/rockchip/
H A Drockchip_drm_gem.c286 for_each_sg(sgt->sgl, s, sgt->nents, i) sg_dma_address(s) = sg_phys(s); in rockchip_gem_alloc_dma()
948 dma_sync_sg_for_cpu(drm->dev, rk_obj->sgt->sgl, rk_obj->sgt->nents, dir); in rockchip_gem_prime_begin_cpu_access()
961 dma_sync_sg_for_device(drm->dev, rk_obj->sgt->sgl, rk_obj->sgt->nents, dir); in rockchip_gem_prime_end_cpu_access()
965 static int rockchip_gem_prime_sgl_sync_range(struct device *dev, struct scatterlist *sgl, unsigned int nents, in rockchip_gem_prime_sgl_sync_range() argument
974 for_each_sg(sgl, sg, nents, i) in rockchip_gem_prime_sgl_sync_range()
1015 rockchip_gem_prime_sgl_sync_range(drm->dev, rk_obj->sgt->sgl, rk_obj->sgt->nents, offset, len, dir, true); in rockchip_gem_prime_begin_cpu_access_partial()
1030 rockchip_gem_prime_sgl_sync_range(drm->dev, rk_obj->sgt->sgl, rk_obj->sgt->nents, offset, len, dir, false); in rockchip_gem_prime_end_cpu_access_partial()
/device/soc/rockchip/common/vendor/drivers/media/platform/rockchip/cif/
H A Dcommon.c115 ret = dma_map_sg(hw->dev, sg->sgl, sg->nents, DMA_BIDIRECTIONAL); in rkcif_alloc_page_dummy_buf()
139 dma_unmap_sg(dev->hw_dev->dev, sg->sgl, sg->nents, DMA_BIDIRECTIONAL); in rkcif_free_page_dummy_buf()
/device/soc/rockchip/rk3588/kernel/drivers/media/platform/rockchip/cif/
H A Dcommon.c113 ret = dma_map_sg(hw->dev, sg->sgl, sg->nents, DMA_BIDIRECTIONAL); in rkcif_alloc_page_dummy_buf()
137 dma_unmap_sg(dev->hw_dev->dev, sg->sgl, sg->nents, DMA_BIDIRECTIONAL); in rkcif_free_page_dummy_buf()

Completed in 32 milliseconds

123