/kernel/linux/linux-6.6/drivers/gpu/drm/i915/ |
H A D | i915_scatterlist.c | 84 const u32 max_segment = round_down(UINT_MAX, page_alignment); in i915_rsgt_from_mm_node() local 85 const u32 segment_pages = max_segment >> PAGE_SHIFT; in i915_rsgt_from_mm_node() 91 GEM_BUG_ON(!max_segment); in i915_rsgt_from_mm_node() 121 if (offset != prev_end || sg->length >= max_segment) { in i915_rsgt_from_mm_node() 133 len = min_t(u64, block_size, max_segment - sg->length); in i915_rsgt_from_mm_node() 169 const u32 max_segment = round_down(UINT_MAX, page_alignment); in i915_rsgt_from_buddy_resource() local 179 GEM_BUG_ON(!max_segment); in i915_rsgt_from_buddy_resource() 211 if (offset != prev_end || sg->length >= max_segment) { in i915_rsgt_from_buddy_resource() 223 len = min_t(u64, block_size, max_segment - sg->length); in i915_rsgt_from_buddy_resource()
|
/kernel/linux/linux-5.10/drivers/gpu/drm/i915/gem/ |
H A D | i915_gem_internal.c | 46 unsigned int max_segment; in i915_gem_object_get_pages_internal() local 48 max_segment = swiotlb_max_segment(); in i915_gem_object_get_pages_internal() 49 if (max_segment) { in i915_gem_object_get_pages_internal() 50 max_segment = max_t(unsigned int, max_segment, in i915_gem_object_get_pages_internal() 52 max_order = min(max_order, ilog2(max_segment)); in i915_gem_object_get_pages_internal()
|
H A D | i915_gem_shmem.c | 40 unsigned int max_segment = i915_sg_segment_size(); in shmem_get_pages() local 136 sg->length >= max_segment || in shmem_get_pages() 167 if (max_segment > PAGE_SIZE) { in shmem_get_pages() 172 max_segment = PAGE_SIZE; in shmem_get_pages()
|
H A D | i915_gem_userptr.c | 403 unsigned int max_segment = i915_sg_segment_size(); in __i915_gem_userptr_alloc_pages() local 415 num_pages << PAGE_SHIFT, max_segment, in __i915_gem_userptr_alloc_pages() 426 if (max_segment > PAGE_SIZE) { in __i915_gem_userptr_alloc_pages() 427 max_segment = PAGE_SIZE; in __i915_gem_userptr_alloc_pages()
|
/kernel/linux/linux-6.6/drivers/gpu/drm/i915/gem/ |
H A D | i915_gem_internal.c | 40 unsigned int max_segment; in i915_gem_object_get_pages_internal() local 47 max_segment = i915_sg_segment_size(i915->drm.dev) >> PAGE_SHIFT; in i915_gem_object_get_pages_internal() 48 max_order = min(max_order, get_order(max_segment)); in i915_gem_object_get_pages_internal()
|
H A D | i915_gem_shmem.c | 66 unsigned int max_segment) in shmem_sg_alloc_table() 157 sg->length >= max_segment || in shmem_sg_alloc_table() 210 unsigned int max_segment = i915_sg_segment_size(i915->drm.dev); in shmem_get_pages() local 230 max_segment); in shmem_get_pages() 241 if (max_segment > PAGE_SIZE) { in shmem_get_pages() 247 max_segment = PAGE_SIZE; in shmem_get_pages() 63 shmem_sg_alloc_table(struct drm_i915_private *i915, struct sg_table *st, size_t size, struct intel_memory_region *mr, struct address_space *mapping, unsigned int max_segment) shmem_sg_alloc_table() argument
|
H A D | i915_gem_userptr.c | 131 unsigned int max_segment = i915_sg_segment_size(obj->base.dev->dev); in i915_gem_userptr_get_pages() local 156 max_segment, GFP_KERNEL); in i915_gem_userptr_get_pages() 164 if (max_segment > PAGE_SIZE) { in i915_gem_userptr_get_pages() 165 max_segment = PAGE_SIZE; in i915_gem_userptr_get_pages()
|
H A D | i915_gem_ttm.c | 195 const unsigned int max_segment = i915_sg_segment_size(i915->drm.dev); in i915_ttm_tt_shmem_populate() local 223 max_segment); in i915_ttm_tt_shmem_populate()
|
H A D | i915_gem_object.h | 866 unsigned int max_segment);
|
/kernel/linux/linux-5.10/lib/ |
H A D | scatterlist.c | 407 * @max_segment: Maximum size of a scatterlist element in bytes 416 * entry up to the maximum size specified in @max_segment. A user may 430 unsigned long size, unsigned int max_segment, in __sg_alloc_table_from_pages() 439 * The algorithm below requires max_segment to be aligned to PAGE_SIZE in __sg_alloc_table_from_pages() 442 max_segment = ALIGN_DOWN(max_segment, PAGE_SIZE); in __sg_alloc_table_from_pages() 443 if (WARN_ON(max_segment < PAGE_SIZE)) in __sg_alloc_table_from_pages() 460 if (prv->length + PAGE_SIZE > max_segment) in __sg_alloc_table_from_pages() 476 if (seg_len >= max_segment || in __sg_alloc_table_from_pages() 492 if (seg_len >= max_segment || in __sg_alloc_table_from_pages() 428 __sg_alloc_table_from_pages(struct sg_table *sgt, struct page **pages, unsigned int n_pages, unsigned int offset, unsigned long size, unsigned int max_segment, struct scatterlist *prv, unsigned int left_pages, gfp_t gfp_mask) __sg_alloc_table_from_pages() argument [all...] |
/kernel/linux/linux-6.6/lib/ |
H A D | scatterlist.c | 433 * @max_segment: Maximum size of a scatterlist element in bytes 441 * size specified in @max_segment. A user may provide an offset at a start 456 unsigned long size, unsigned int max_segment, in sg_alloc_append_table_from_pages() 465 * The algorithm below requires max_segment to be aligned to PAGE_SIZE in sg_alloc_append_table_from_pages() 468 max_segment = ALIGN_DOWN(max_segment, PAGE_SIZE); in sg_alloc_append_table_from_pages() 469 if (WARN_ON(max_segment < PAGE_SIZE)) in sg_alloc_append_table_from_pages() 487 if (sgt_append->prv->length + PAGE_SIZE > max_segment) in sg_alloc_append_table_from_pages() 504 if (seg_len >= max_segment || in sg_alloc_append_table_from_pages() 520 if (seg_len >= max_segment || in sg_alloc_append_table_from_pages() 454 sg_alloc_append_table_from_pages(struct sg_append_table *sgt_append, struct page **pages, unsigned int n_pages, unsigned int offset, unsigned long size, unsigned int max_segment, unsigned int left_pages, gfp_t gfp_mask) sg_alloc_append_table_from_pages() argument 578 sg_alloc_table_from_pages_segment(struct sg_table *sgt, struct page **pages, unsigned int n_pages, unsigned int offset, unsigned long size, unsigned int max_segment, gfp_t gfp_mask) sg_alloc_table_from_pages_segment() argument [all...] |
/kernel/linux/linux-5.10/kernel/dma/ |
H A D | swiotlb.c | 93 static unsigned int max_segment; variable 140 return unlikely(no_iotlb_memory) ? 0 : max_segment; in swiotlb_max_segment() 147 max_segment = 1; in swiotlb_set_max_segment() 149 max_segment = rounddown(val, PAGE_SIZE); in swiotlb_set_max_segment() 334 max_segment = 0; in swiotlb_cleanup()
|
/kernel/linux/linux-5.10/drivers/gpu/drm/ |
H A D | drm_prime.c | 816 size_t max_segment = 0; in drm_prime_pages_to_sg() local 823 max_segment = dma_max_mapping_size(dev->dev); in drm_prime_pages_to_sg() 824 if (max_segment == 0 || max_segment > SCATTERLIST_MAX_SEGMENT) in drm_prime_pages_to_sg() 825 max_segment = SCATTERLIST_MAX_SEGMENT; in drm_prime_pages_to_sg() 828 max_segment, in drm_prime_pages_to_sg()
|
/kernel/linux/linux-6.6/drivers/gpu/drm/ |
H A D | drm_prime.c | 811 size_t max_segment = 0; in drm_prime_pages_to_sg() local 819 max_segment = dma_max_mapping_size(dev->dev); in drm_prime_pages_to_sg() 820 if (max_segment == 0) in drm_prime_pages_to_sg() 821 max_segment = UINT_MAX; in drm_prime_pages_to_sg() 824 max_segment, GFP_KERNEL); in drm_prime_pages_to_sg()
|
/kernel/linux/linux-6.6/include/linux/ |
H A D | scatterlist.h | 444 unsigned int max_segment, 449 unsigned int max_segment, gfp_t gfp_mask);
|
/kernel/linux/linux-6.6/drivers/gpu/drm/i915/selftests/ |
H A D | intel_memory_region.c | 464 unsigned int max_segment; in igt_mock_max_segment() local 481 max_segment = round_down(UINT_MAX, ps); in igt_mock_max_segment() 501 if (size < max_segment) { in igt_mock_max_segment() 503 __func__, max_segment, size); in igt_mock_max_segment() 511 if (sg->length > max_segment) { in igt_mock_max_segment() 513 __func__, sg->length, max_segment); in igt_mock_max_segment()
|
/kernel/linux/linux-5.10/include/linux/ |
H A D | scatterlist.h | 296 unsigned long size, unsigned int max_segment,
|