Home
last modified time | relevance | path

Searched refs:sg_dma_len (Results 1 - 25 of 702) sorted by relevance

12345678910>>...29

/kernel/linux/linux-5.10/drivers/parisc/
H A Diommu-helpers.h47 BUG_ON(pdirp && (dma_len != sg_dma_len(dma_sg))); in iommu_fill_pdir()
51 dma_len = sg_dma_len(startsg); in iommu_fill_pdir()
52 sg_dma_len(startsg) = 0; in iommu_fill_pdir()
71 sg_dma_len(dma_sg) += startsg->length; in iommu_fill_pdir()
125 sg_dma_len(startsg) = 0; in iommu_coalesce_chunks()
142 sg_dma_len(startsg) = 0; in iommu_coalesce_chunks()
171 sg_dma_len(contig_sg) = dma_len; in iommu_coalesce_chunks()
/kernel/linux/linux-6.6/drivers/parisc/
H A Diommu-helpers.h47 BUG_ON(pdirp && (dma_len != sg_dma_len(dma_sg))); in iommu_fill_pdir()
51 dma_len = sg_dma_len(startsg); in iommu_fill_pdir()
52 sg_dma_len(startsg) = 0; in iommu_fill_pdir()
71 sg_dma_len(dma_sg) += startsg->length; in iommu_fill_pdir()
125 sg_dma_len(startsg) = 0; in iommu_coalesce_chunks()
142 sg_dma_len(startsg) = 0; in iommu_coalesce_chunks()
171 sg_dma_len(contig_sg) = dma_len; in iommu_coalesce_chunks()
/kernel/linux/linux-5.10/drivers/media/pci/tw68/
H A Dtw68-risc.c57 while (offset && offset >= sg_dma_len(sg)) { in tw68_risc_field()
58 offset -= sg_dma_len(sg); in tw68_risc_field()
61 if (bpl <= sg_dma_len(sg) - offset) { in tw68_risc_field()
76 done = (sg_dma_len(sg) - offset); in tw68_risc_field()
84 while (todo > sg_dma_len(sg)) { in tw68_risc_field()
87 sg_dma_len(sg)); in tw68_risc_field()
89 todo -= sg_dma_len(sg); in tw68_risc_field()
91 done += sg_dma_len(sg); in tw68_risc_field()
/kernel/linux/linux-6.6/drivers/media/pci/tw68/
H A Dtw68-risc.c57 while (offset && offset >= sg_dma_len(sg)) { in tw68_risc_field()
58 offset -= sg_dma_len(sg); in tw68_risc_field()
61 if (bpl <= sg_dma_len(sg) - offset) { in tw68_risc_field()
76 done = (sg_dma_len(sg) - offset); in tw68_risc_field()
84 while (todo > sg_dma_len(sg)) { in tw68_risc_field()
87 sg_dma_len(sg)); in tw68_risc_field()
89 todo -= sg_dma_len(sg); in tw68_risc_field()
91 done += sg_dma_len(sg); in tw68_risc_field()
/kernel/linux/linux-6.6/drivers/infiniband/core/
H A Dumem_dmabuf.c40 if (start < cur + sg_dma_len(sg) && cur < end) in ib_umem_dmabuf_map_pages()
42 if (cur <= start && start < cur + sg_dma_len(sg)) { in ib_umem_dmabuf_map_pages()
48 sg_dma_len(sg) -= offset; in ib_umem_dmabuf_map_pages()
51 if (cur < end && end <= cur + sg_dma_len(sg)) { in ib_umem_dmabuf_map_pages()
52 unsigned long trim = cur + sg_dma_len(sg) - end; in ib_umem_dmabuf_map_pages()
56 sg_dma_len(sg) -= trim; in ib_umem_dmabuf_map_pages()
59 cur += sg_dma_len(sg); in ib_umem_dmabuf_map_pages()
94 sg_dma_len(umem_dmabuf->first_sg) += in ib_umem_dmabuf_unmap_pages()
100 sg_dma_len(umem_dmabuf->last_sg) += in ib_umem_dmabuf_unmap_pages()
/kernel/linux/linux-6.6/drivers/gpu/drm/i915/
H A Di915_scatterlist.h34 if (dma && s.sgp && sg_dma_len(s.sgp) == 0) { in __sgt_iter()
40 s.max += sg_dma_len(s.sgp); in __sgt_iter()
57 return sg_dma_len(sg) >> PAGE_SHIFT; in __sg_dma_page_count()
121 while (sg && sg_dma_len(sg)) { in i915_sg_dma_sizes()
123 GEM_BUG_ON(!IS_ALIGNED(sg_dma_len(sg), PAGE_SIZE)); in i915_sg_dma_sizes()
124 page_sizes |= sg_dma_len(sg); in i915_sg_dma_sizes()
H A Di915_scatterlist.c31 sg_dma_len(new_sg) = sg_dma_len(sg); in i915_sg_trim()
128 sg_dma_len(sg) = 0; in i915_rsgt_from_mm_node()
135 sg_dma_len(sg) += len; in i915_rsgt_from_mm_node()
218 sg_dma_len(sg) = 0; in i915_rsgt_from_buddy_resource()
225 sg_dma_len(sg) += len; in i915_rsgt_from_buddy_resource()
/kernel/linux/linux-5.10/net/rds/
H A Dib_frmr.c134 ret = ib_map_mr_sg_zbva(frmr->mr, ibmr->sg, ibmr->sg_dma_len, in rds_ib_post_reg_frmr()
136 if (unlikely(ret != ibmr->sg_dma_len)) in rds_ib_post_reg_frmr()
205 ibmr->sg_dma_len = 0; in rds_ib_map_frmr()
207 WARN_ON(ibmr->sg_dma_len); in rds_ib_map_frmr()
208 ibmr->sg_dma_len = ib_dma_map_sg(dev, ibmr->sg, ibmr->sg_len, in rds_ib_map_frmr()
210 if (unlikely(!ibmr->sg_dma_len)) { in rds_ib_map_frmr()
220 for (i = 0; i < ibmr->sg_dma_len; ++i) { in rds_ib_map_frmr()
221 unsigned int dma_len = sg_dma_len(&ibmr->sg[i]); in rds_ib_map_frmr()
233 if (i < ibmr->sg_dma_len - 1) in rds_ib_map_frmr()
262 ibmr->sg_dma_len in rds_ib_map_frmr()
[all...]
H A Dib.h327 unsigned int sg_dma_len, in rds_ib_dma_sync_sg_for_cpu()
333 for_each_sg(sglist, sg, sg_dma_len, i) { in rds_ib_dma_sync_sg_for_cpu()
335 sg_dma_len(sg), direction); in rds_ib_dma_sync_sg_for_cpu()
342 unsigned int sg_dma_len, in rds_ib_dma_sync_sg_for_device()
348 for_each_sg(sglist, sg, sg_dma_len, i) { in rds_ib_dma_sync_sg_for_device()
350 sg_dma_len(sg), direction); in rds_ib_dma_sync_sg_for_device()
325 rds_ib_dma_sync_sg_for_cpu(struct ib_device *dev, struct scatterlist *sglist, unsigned int sg_dma_len, int direction) rds_ib_dma_sync_sg_for_cpu() argument
340 rds_ib_dma_sync_sg_for_device(struct ib_device *dev, struct scatterlist *sglist, unsigned int sg_dma_len, int direction) rds_ib_dma_sync_sg_for_device() argument
/kernel/linux/linux-6.6/net/rds/
H A Dib_frmr.c134 ret = ib_map_mr_sg_zbva(frmr->mr, ibmr->sg, ibmr->sg_dma_len, in rds_ib_post_reg_frmr()
136 if (unlikely(ret != ibmr->sg_dma_len)) in rds_ib_post_reg_frmr()
205 ibmr->sg_dma_len = 0; in rds_ib_map_frmr()
207 WARN_ON(ibmr->sg_dma_len); in rds_ib_map_frmr()
208 ibmr->sg_dma_len = ib_dma_map_sg(dev, ibmr->sg, ibmr->sg_len, in rds_ib_map_frmr()
210 if (unlikely(!ibmr->sg_dma_len)) { in rds_ib_map_frmr()
220 for (i = 0; i < ibmr->sg_dma_len; ++i) { in rds_ib_map_frmr()
221 unsigned int dma_len = sg_dma_len(&ibmr->sg[i]); in rds_ib_map_frmr()
233 if (i < ibmr->sg_dma_len - 1) in rds_ib_map_frmr()
262 ibmr->sg_dma_len in rds_ib_map_frmr()
[all...]
H A Dib.h327 unsigned int sg_dma_len, in rds_ib_dma_sync_sg_for_cpu()
333 for_each_sg(sglist, sg, sg_dma_len, i) { in rds_ib_dma_sync_sg_for_cpu()
335 sg_dma_len(sg), direction); in rds_ib_dma_sync_sg_for_cpu()
342 unsigned int sg_dma_len, in rds_ib_dma_sync_sg_for_device()
348 for_each_sg(sglist, sg, sg_dma_len, i) { in rds_ib_dma_sync_sg_for_device()
350 sg_dma_len(sg), direction); in rds_ib_dma_sync_sg_for_device()
325 rds_ib_dma_sync_sg_for_cpu(struct ib_device *dev, struct scatterlist *sglist, unsigned int sg_dma_len, int direction) rds_ib_dma_sync_sg_for_cpu() argument
340 rds_ib_dma_sync_sg_for_device(struct ib_device *dev, struct scatterlist *sglist, unsigned int sg_dma_len, int direction) rds_ib_dma_sync_sg_for_device() argument
/kernel/linux/linux-5.10/lib/
H A Dsg_split.c36 sglen = mapped ? sg_dma_len(sg) : sg->length; in sg_calculate_split()
95 sg_dma_len(out_sg) = 0; in sg_split_phys()
114 sg_dma_len(out_sg) = sg_dma_len(in_sg); in sg_split_mapped()
117 sg_dma_len(out_sg) -= split->skip_sg0; in sg_split_mapped()
121 sg_dma_len(--out_sg) = split->length_last_sg; in sg_split_mapped()
/kernel/linux/linux-6.6/lib/
H A Dsg_split.c36 sglen = mapped ? sg_dma_len(sg) : sg->length; in sg_calculate_split()
95 sg_dma_len(out_sg) = 0; in sg_split_phys()
114 sg_dma_len(out_sg) = sg_dma_len(in_sg); in sg_split_mapped()
117 sg_dma_len(out_sg) -= split->skip_sg0; in sg_split_mapped()
121 sg_dma_len(--out_sg) = split->length_last_sg; in sg_split_mapped()
/kernel/linux/linux-5.10/arch/arm/mm/
H A Ddma-mapping-nommu.c127 sg_dma_len(sg) = sg->length; in arm_nommu_dma_map_sg()
128 __dma_page_cpu_to_dev(sg_dma_address(sg), sg_dma_len(sg), dir); in arm_nommu_dma_map_sg()
142 __dma_page_dev_to_cpu(sg_dma_address(sg), sg_dma_len(sg), dir); in arm_nommu_dma_unmap_sg()
164 __dma_page_cpu_to_dev(sg_dma_address(sg), sg_dma_len(sg), dir); in arm_nommu_dma_sync_sg_for_device()
174 __dma_page_dev_to_cpu(sg_dma_address(sg), sg_dma_len(sg), dir); in arm_nommu_dma_sync_sg_for_cpu()
/kernel/linux/linux-5.10/drivers/media/pci/bt8xx/
H A Dbttv-risc.c73 while (offset && offset >= sg_dma_len(sg)) { in bttv_risc_packed()
74 offset -= sg_dma_len(sg); in bttv_risc_packed()
77 if (bpl <= sg_dma_len(sg)-offset) { in bttv_risc_packed()
87 (sg_dma_len(sg)-offset)); in bttv_risc_packed()
89 todo -= (sg_dma_len(sg)-offset); in bttv_risc_packed()
92 while (todo > sg_dma_len(sg)) { in bttv_risc_packed()
94 sg_dma_len(sg)); in bttv_risc_packed()
96 todo -= sg_dma_len(sg); in bttv_risc_packed()
176 while (yoffset && yoffset >= sg_dma_len(ysg)) { in bttv_risc_planar()
177 yoffset -= sg_dma_len(ys in bttv_risc_planar()
[all...]
/kernel/linux/linux-6.6/drivers/media/pci/bt8xx/
H A Dbttv-risc.c73 while (offset && offset >= sg_dma_len(sg)) { in bttv_risc_packed()
74 offset -= sg_dma_len(sg); in bttv_risc_packed()
77 if (bpl <= sg_dma_len(sg)-offset) { in bttv_risc_packed()
87 (sg_dma_len(sg)-offset)); in bttv_risc_packed()
89 todo -= (sg_dma_len(sg)-offset); in bttv_risc_packed()
92 while (todo > sg_dma_len(sg)) { in bttv_risc_packed()
94 sg_dma_len(sg)); in bttv_risc_packed()
96 todo -= sg_dma_len(sg); in bttv_risc_packed()
176 while (yoffset && yoffset >= sg_dma_len(ysg)) { in bttv_risc_planar()
177 yoffset -= sg_dma_len(ys in bttv_risc_planar()
[all...]
/kernel/linux/linux-6.6/drivers/gpu/drm/i915/gt/
H A Dintel_migrate.c331 return (struct sgt_dma){ sg, addr, addr + sg_dma_len(sg) }; in sg_sgt()
460 if (!it->sg || sg_dma_len(it->sg) == 0) in emit_pte()
464 it->max = it->dma + sg_dma_len(it->sg); in emit_pte()
632 while (sg && sg_dma_len(sg)) { in scatter_list_length()
633 len += sg_dma_len(sg); in scatter_list_length()
662 GEM_BUG_ON(!it->sg || !sg_dma_len(it->sg)); in get_ccs_sg_sgt()
673 it->max = it->dma + sg_dma_len(it->sg); in get_ccs_sg_sgt()
896 WARN_ON(it_src.sg && sg_dma_len(it_src.sg)); in intel_context_migrate_copy()
898 WARN_ON(it_dst.sg && sg_dma_len(it_dst.sg)); in intel_context_migrate_copy()
902 if (WARN_ON(!it_src.sg || !sg_dma_len(it_sr in intel_context_migrate_copy()
[all...]
/kernel/linux/linux-5.10/drivers/gpu/drm/i915/
H A Di915_scatterlist.c25 sg_dma_len(new_sg) = sg_dma_len(sg); in i915_sg_trim()
/kernel/linux/linux-5.10/drivers/scsi/qla2xxx/
H A Dqla_dsd.h15 put_unaligned_le32(sg_dma_len(sg), &(*dsd)->length); in append_dsd32()
28 put_unaligned_le32(sg_dma_len(sg), &(*dsd)->length); in append_dsd64()
/kernel/linux/linux-6.6/drivers/scsi/qla2xxx/
H A Dqla_dsd.h15 put_unaligned_le32(sg_dma_len(sg), &(*dsd)->length); in append_dsd32()
28 put_unaligned_le32(sg_dma_len(sg), &(*dsd)->length); in append_dsd64()
/kernel/linux/linux-5.10/drivers/media/pci/cx25821/
H A Dcx25821-core.c1016 while (offset && offset >= sg_dma_len(sg)) { in cx25821_risc_field()
1017 offset -= sg_dma_len(sg); in cx25821_risc_field()
1020 if (bpl <= sg_dma_len(sg) - offset) { in cx25821_risc_field()
1031 (sg_dma_len(sg) - offset)); in cx25821_risc_field()
1034 todo -= (sg_dma_len(sg) - offset); in cx25821_risc_field()
1037 while (todo > sg_dma_len(sg)) { in cx25821_risc_field()
1039 sg_dma_len(sg)); in cx25821_risc_field()
1042 todo -= sg_dma_len(sg); in cx25821_risc_field()
1121 while (offset && offset >= sg_dma_len(sg)) { in cx25821_risc_field_audio()
1122 offset -= sg_dma_len(s in cx25821_risc_field_audio()
[all...]
/kernel/linux/linux-6.6/drivers/media/pci/cx25821/
H A Dcx25821-core.c1009 while (offset && offset >= sg_dma_len(sg)) { in cx25821_risc_field()
1010 offset -= sg_dma_len(sg); in cx25821_risc_field()
1013 if (bpl <= sg_dma_len(sg) - offset) { in cx25821_risc_field()
1024 (sg_dma_len(sg) - offset)); in cx25821_risc_field()
1027 todo -= (sg_dma_len(sg) - offset); in cx25821_risc_field()
1030 while (todo > sg_dma_len(sg)) { in cx25821_risc_field()
1032 sg_dma_len(sg)); in cx25821_risc_field()
1035 todo -= sg_dma_len(sg); in cx25821_risc_field()
1114 while (offset && offset >= sg_dma_len(sg)) { in cx25821_risc_field_audio()
1115 offset -= sg_dma_len(s in cx25821_risc_field_audio()
[all...]
/kernel/linux/linux-5.10/drivers/crypto/allwinner/sun8i-ss/
H A Dsun8i-ss-cipher.c39 if ((sg_dma_len(sg) % 16) != 0) in sun8i_ss_need_fallback()
49 if ((sg_dma_len(sg) % 16) != 0) in sun8i_ss_need_fallback()
120 if (sg_dma_len(sg) == 0) { in sun8i_ss_setup_ivs()
137 todo = min(len, sg_dma_len(sg)); in sun8i_ss_setup_ivs()
234 if (sg_dma_len(sg) == 0) in sun8i_ss_cipher()
237 todo = min(len, sg_dma_len(sg)); in sun8i_ss_cipher()
256 if (sg_dma_len(sg) == 0) in sun8i_ss_cipher()
259 todo = min(len, sg_dma_len(sg)); in sun8i_ss_cipher()
/kernel/linux/linux-6.6/drivers/crypto/gemini/
H A Dsl3516-ce-cipher.c59 if ((sg_dma_len(sg) % 16) != 0) { in sl3516_ce_need_fallback()
75 if ((sg_dma_len(sg) % 16) != 0) { in sl3516_ce_need_fallback()
183 if (sg_dma_len(sg) == 0) in sl3516_ce_cipher()
186 todo = min(len, sg_dma_len(sg)); in sl3516_ce_cipher()
205 if (sg_dma_len(sg) == 0) in sl3516_ce_cipher()
208 todo = min(len, sg_dma_len(sg)); in sl3516_ce_cipher()
/kernel/linux/linux-5.10/drivers/media/pci/ivtv/
H A Divtv-udma.c68 dma->SGarray[i].size = cpu_to_le32(sg_dma_len(sg)); in ivtv_udma_fill_sg_array()
71 buffer_offset += sg_dma_len(sg); in ivtv_udma_fill_sg_array()
73 split -= sg_dma_len(sg); in ivtv_udma_fill_sg_array()

Completed in 14 milliseconds

12345678910>>...29