Home
last modified time | relevance | path

Searched refs:src_nents (Results 1 - 25 of 49) sorted by relevance

12

/kernel/linux/linux-5.10/drivers/crypto/caam/
H A Dcaamhash.c527 * @src_nents: number of segments in input scatterlist
535 int src_nents; member
548 if (edesc->src_nents) in ahash_unmap()
549 dma_unmap_sg(dev, req->src, edesc->src_nents, DMA_TO_DEVICE); in ahash_unmap()
824 int src_nents, mapped_nents, sec4_sg_bytes, sec4_sg_src_index; in ahash_update_ctx() local
846 src_nents = sg_nents_for_len(req->src, src_len); in ahash_update_ctx()
847 if (src_nents < 0) { in ahash_update_ctx()
849 return src_nents; in ahash_update_ctx()
852 if (src_nents) { in ahash_update_ctx()
853 mapped_nents = dma_map_sg(jrdev, req->src, src_nents, in ahash_update_ctx()
1007 int src_nents, mapped_nents; ahash_finup_ctx() local
1080 int src_nents, mapped_nents; ahash_digest() local
1199 int sec4_sg_bytes, src_nents, mapped_nents; ahash_update_no_ctx() local
1317 int sec4_sg_bytes, sec4_sg_src_index, src_nents, mapped_nents; ahash_finup_no_ctx() local
1397 int src_nents, mapped_nents; ahash_update_first() local
[all...]
H A Dcaamalg_qi2.c146 struct scatterlist *dst, int src_nents, in caam_unmap()
152 if (src_nents) in caam_unmap()
153 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); in caam_unmap()
157 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); in caam_unmap()
359 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; in aead_edesc_alloc() local
380 src_nents = sg_nents_for_len(req->src, src_len); in aead_edesc_alloc()
381 if (unlikely(src_nents < 0)) { in aead_edesc_alloc()
385 return ERR_PTR(src_nents); in aead_edesc_alloc()
396 if (src_nents) { in aead_edesc_alloc()
397 mapped_src_nents = dma_map_sg(dev, req->src, src_nents, in aead_edesc_alloc()
145 caam_unmap(struct device *dev, struct scatterlist *src, struct scatterlist *dst, int src_nents, int dst_nents, dma_addr_t iv_dma, int ivsize, enum dma_data_direction iv_dir, dma_addr_t qm_sg_dma, int qm_sg_bytes) caam_unmap() argument
1117 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; skcipher_edesc_alloc() local
3530 int src_nents, mapped_nents, qm_sg_bytes, qm_sg_src_index; ahash_update_ctx() local
3718 int src_nents, mapped_nents; ahash_finup_ctx() local
3811 int src_nents, mapped_nents; ahash_digest() local
3982 int qm_sg_bytes, src_nents, mapped_nents; ahash_update_no_ctx() local
4100 int qm_sg_bytes, src_nents, mapped_nents; ahash_finup_no_ctx() local
4199 int src_nents, mapped_nents; ahash_update_first() local
[all...]
H A Dcaamalg_qi2.h101 * @src_nents: number of segments in input scatterlist
111 int src_nents; member
123 * @src_nents: number of segments in input scatterlist
131 int src_nents; member
142 * @src_nents: number of segments in input scatterlist
148 int src_nents; member
H A Dcaamalg_qi.c787 * @src_nents: number of segments in input scatterlist
798 int src_nents; member
811 * @src_nents: number of segments in input scatterlist
820 int src_nents; member
868 struct scatterlist *dst, int src_nents, in caam_unmap()
874 if (src_nents) in caam_unmap()
875 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); in caam_unmap()
879 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); in caam_unmap()
895 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, in aead_unmap()
907 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edes in skcipher_unmap()
867 caam_unmap(struct device *dev, struct scatterlist *src, struct scatterlist *dst, int src_nents, int dst_nents, dma_addr_t iv_dma, int ivsize, enum dma_data_direction iv_dir, dma_addr_t qm_sg_dma, int qm_sg_bytes) caam_unmap() argument
946 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; aead_edesc_alloc() local
1252 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; skcipher_edesc_alloc() local
[all...]
H A Dcaamalg.c878 * @src_nents: number of segments in input s/w scatterlist
889 int src_nents; member
902 * @src_nents: number of segments in input s/w scatterlist
915 int src_nents; member
928 struct scatterlist *dst, int src_nents, in caam_unmap()
934 if (src_nents) in caam_unmap()
935 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); in caam_unmap()
939 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); in caam_unmap()
954 edesc->src_nents, edesc->dst_nents, 0, 0, in aead_unmap()
965 edesc->src_nents, edes in skcipher_unmap()
927 caam_unmap(struct device *dev, struct scatterlist *src, struct scatterlist *dst, int src_nents, int dst_nents, dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma, int sec4_sg_bytes) caam_unmap() argument
1299 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; aead_edesc_alloc() local
1603 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; skcipher_edesc_alloc() local
[all...]
/kernel/linux/linux-6.6/drivers/crypto/caam/
H A Dcaamhash.c536 * @src_nents: number of segments in input scatterlist
544 int src_nents; member
557 if (edesc->src_nents) in ahash_unmap()
558 dma_unmap_sg(dev, req->src, edesc->src_nents, DMA_TO_DEVICE); in ahash_unmap()
831 int src_nents, mapped_nents, sec4_sg_bytes, sec4_sg_src_index; in ahash_update_ctx() local
853 src_nents = sg_nents_for_len(req->src, src_len); in ahash_update_ctx()
854 if (src_nents < 0) { in ahash_update_ctx()
856 return src_nents; in ahash_update_ctx()
859 if (src_nents) { in ahash_update_ctx()
860 mapped_nents = dma_map_sg(jrdev, req->src, src_nents, in ahash_update_ctx()
1014 int src_nents, mapped_nents; ahash_finup_ctx() local
1087 int src_nents, mapped_nents; ahash_digest() local
1206 int sec4_sg_bytes, src_nents, mapped_nents; ahash_update_no_ctx() local
1324 int sec4_sg_bytes, sec4_sg_src_index, src_nents, mapped_nents; ahash_finup_no_ctx() local
1404 int src_nents, mapped_nents; ahash_update_first() local
[all...]
H A Dcaamalg_qi2.c151 struct scatterlist *dst, int src_nents, in caam_unmap()
157 if (src_nents) in caam_unmap()
158 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); in caam_unmap()
162 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); in caam_unmap()
364 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; in aead_edesc_alloc() local
385 src_nents = sg_nents_for_len(req->src, src_len); in aead_edesc_alloc()
386 if (unlikely(src_nents < 0)) { in aead_edesc_alloc()
390 return ERR_PTR(src_nents); in aead_edesc_alloc()
401 if (src_nents) { in aead_edesc_alloc()
402 mapped_src_nents = dma_map_sg(dev, req->src, src_nents, in aead_edesc_alloc()
150 caam_unmap(struct device *dev, struct scatterlist *src, struct scatterlist *dst, int src_nents, int dst_nents, dma_addr_t iv_dma, int ivsize, enum dma_data_direction iv_dir, dma_addr_t qm_sg_dma, int qm_sg_bytes) caam_unmap() argument
1122 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; skcipher_edesc_alloc() local
3543 int src_nents, mapped_nents, qm_sg_bytes, qm_sg_src_index; ahash_update_ctx() local
3731 int src_nents, mapped_nents; ahash_finup_ctx() local
3824 int src_nents, mapped_nents; ahash_digest() local
3995 int qm_sg_bytes, src_nents, mapped_nents; ahash_update_no_ctx() local
4113 int qm_sg_bytes, src_nents, mapped_nents; ahash_finup_no_ctx() local
4212 int src_nents, mapped_nents; ahash_update_first() local
[all...]
H A Dcaamalg_qi2.h99 * @src_nents: number of segments in input scatterlist
109 int src_nents; member
121 * @src_nents: number of segments in input scatterlist
129 int src_nents; member
140 * @src_nents: number of segments in input scatterlist
146 int src_nents; member
H A Dcaamalg_qi.c792 * @src_nents: number of segments in input scatterlist
803 int src_nents; member
816 * @src_nents: number of segments in input scatterlist
825 int src_nents; member
873 struct scatterlist *dst, int src_nents, in caam_unmap()
879 if (src_nents) in caam_unmap()
880 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); in caam_unmap()
884 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); in caam_unmap()
900 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, in aead_unmap()
912 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edes in skcipher_unmap()
872 caam_unmap(struct device *dev, struct scatterlist *src, struct scatterlist *dst, int src_nents, int dst_nents, dma_addr_t iv_dma, int ivsize, enum dma_data_direction iv_dir, dma_addr_t qm_sg_dma, int qm_sg_bytes) caam_unmap() argument
951 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; aead_edesc_alloc() local
1262 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; skcipher_edesc_alloc() local
[all...]
H A Dcaamalg.c887 * @src_nents: number of segments in input s/w scatterlist
898 int src_nents; member
911 * @src_nents: number of segments in input s/w scatterlist
924 int src_nents; member
937 struct scatterlist *dst, int src_nents, in caam_unmap()
943 if (src_nents) in caam_unmap()
944 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); in caam_unmap()
948 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); in caam_unmap()
963 edesc->src_nents, edesc->dst_nents, 0, 0, in aead_unmap()
974 edesc->src_nents, edes in skcipher_unmap()
936 caam_unmap(struct device *dev, struct scatterlist *src, struct scatterlist *dst, int src_nents, int dst_nents, dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma, int sec4_sg_bytes) caam_unmap() argument
1315 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; aead_edesc_alloc() local
1621 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; skcipher_edesc_alloc() local
[all...]
/kernel/linux/linux-6.6/drivers/crypto/qce/
H A Daead.c49 dma_unmap_sg(qce->dev, rctx->src_sg, rctx->src_nents, dir_src); in qce_aead_done()
239 rctx->src_nents = sg_nents_for_len(req->src, totallen) + 1; in qce_aead_ccm_prepare_buf_assoclen()
241 rctx->src_nents = sg_nents_for_len(req->src, totallen) + 2; in qce_aead_ccm_prepare_buf_assoclen()
244 ret = sg_alloc_table(&rctx->src_tbl, rctx->src_nents, gfp); in qce_aead_ccm_prepare_buf_assoclen()
279 rctx->src_nents = sg_nents_for_len(rctx->src_sg, totallen); in qce_aead_ccm_prepare_buf_assoclen()
289 rctx->dst_nents = rctx->src_nents + 1; in qce_aead_ccm_prepare_buf_assoclen()
291 rctx->dst_nents = rctx->src_nents; in qce_aead_ccm_prepare_buf_assoclen()
316 rctx->src_nents = sg_nents_for_len(req->src, totallen); in qce_aead_prepare_buf()
317 if (rctx->src_nents < 0) { in qce_aead_prepare_buf()
323 rctx->src_nents in qce_aead_prepare_buf()
418 int dst_nents, src_nents, ret; qce_aead_async_req_handle() local
[all...]
H A Dskcipher.c50 dma_unmap_sg(qce->dev, rctx->src_sg, rctx->src_nents, dir_src); in qce_skcipher_done()
75 int dst_nents, src_nents, ret; in qce_skcipher_async_req_handle() local
85 rctx->src_nents = sg_nents_for_len(req->src, req->cryptlen); in qce_skcipher_async_req_handle()
89 rctx->dst_nents = rctx->src_nents; in qce_skcipher_async_req_handle()
90 if (rctx->src_nents < 0) { in qce_skcipher_async_req_handle()
92 return rctx->src_nents; in qce_skcipher_async_req_handle()
133 src_nents = dma_map_sg(qce->dev, req->src, rctx->src_nents, dir_src); in qce_skcipher_async_req_handle()
134 if (!src_nents) { in qce_skcipher_async_req_handle()
141 src_nents in qce_skcipher_async_req_handle()
[all...]
H A Dsha.c53 dma_unmap_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE); in qce_ahash_done()
93 rctx->src_nents = sg_nents_for_len(req->src, req->nbytes); in qce_ahash_async_req_handle()
94 if (rctx->src_nents < 0) { in qce_ahash_async_req_handle()
96 return rctx->src_nents; in qce_ahash_async_req_handle()
99 ret = dma_map_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE); in qce_ahash_async_req_handle()
111 ret = qce_dma_prep_sgs(&qce->dma, req->src, rctx->src_nents, in qce_ahash_async_req_handle()
129 dma_unmap_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE); in qce_ahash_async_req_handle()
/kernel/linux/linux-5.10/drivers/crypto/qce/
H A Dskcipher.c49 dma_unmap_sg(qce->dev, rctx->src_sg, rctx->src_nents, dir_src); in qce_skcipher_done()
74 int dst_nents, src_nents, ret; in qce_skcipher_async_req_handle() local
84 rctx->src_nents = sg_nents_for_len(req->src, req->cryptlen); in qce_skcipher_async_req_handle()
88 rctx->dst_nents = rctx->src_nents; in qce_skcipher_async_req_handle()
89 if (rctx->src_nents < 0) { in qce_skcipher_async_req_handle()
91 return rctx->src_nents; in qce_skcipher_async_req_handle()
132 src_nents = dma_map_sg(qce->dev, req->src, rctx->src_nents, dir_src); in qce_skcipher_async_req_handle()
133 if (src_nents < 0) { in qce_skcipher_async_req_handle()
134 ret = src_nents; in qce_skcipher_async_req_handle()
[all...]
H A Dsha.c47 dma_unmap_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE); in qce_ahash_done()
87 rctx->src_nents = sg_nents_for_len(req->src, req->nbytes); in qce_ahash_async_req_handle()
88 if (rctx->src_nents < 0) { in qce_ahash_async_req_handle()
90 return rctx->src_nents; in qce_ahash_async_req_handle()
93 ret = dma_map_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE); in qce_ahash_async_req_handle()
103 ret = qce_dma_prep_sgs(&qce->dma, req->src, rctx->src_nents, in qce_ahash_async_req_handle()
121 dma_unmap_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE); in qce_ahash_async_req_handle()
H A Dcipher.h25 * @src_nents: source entries
38 int src_nents; member
/kernel/linux/linux-5.10/drivers/crypto/virtio/
H A Dvirtio_crypto_algs.c336 int src_nents, dst_nents; in __virtio_crypto_skcipher_do_req() local
346 src_nents = sg_nents_for_len(req->src, req->cryptlen); in __virtio_crypto_skcipher_do_req()
347 if (src_nents < 0) { in __virtio_crypto_skcipher_do_req()
349 return src_nents; in __virtio_crypto_skcipher_do_req()
354 pr_debug("virtio_crypto: Number of sgs (src_nents: %d, dst_nents: %d)\n", in __virtio_crypto_skcipher_do_req()
355 src_nents, dst_nents); in __virtio_crypto_skcipher_do_req()
358 sg_total = src_nents + dst_nents + 3; in __virtio_crypto_skcipher_do_req()
438 for (sg = req->src; src_nents; sg = sg_next(sg), src_nents--) in __virtio_crypto_skcipher_do_req()
/kernel/linux/linux-6.6/drivers/crypto/virtio/
H A Dvirtio_crypto_skcipher_algs.c333 int src_nents, dst_nents; in __virtio_crypto_skcipher_do_req() local
343 src_nents = sg_nents_for_len(req->src, req->cryptlen); in __virtio_crypto_skcipher_do_req()
344 if (src_nents < 0) { in __virtio_crypto_skcipher_do_req()
346 return src_nents; in __virtio_crypto_skcipher_do_req()
351 pr_debug("virtio_crypto: Number of sgs (src_nents: %d, dst_nents: %d)\n", in __virtio_crypto_skcipher_do_req()
352 src_nents, dst_nents); in __virtio_crypto_skcipher_do_req()
355 sg_total = src_nents + dst_nents + 3; in __virtio_crypto_skcipher_do_req()
435 for (sg = req->src; src_nents; sg = sg_next(sg), src_nents--) in __virtio_crypto_skcipher_do_req()
/kernel/linux/linux-5.10/drivers/crypto/marvell/cesa/
H A Dcipher.c66 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_skcipher_dma_cleanup()
69 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_skcipher_dma_cleanup()
94 len = sg_pcopy_to_buffer(req->src, creq->src_nents, in mv_cesa_skcipher_std_step()
317 ret = dma_map_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_skcipher_dma_req_init()
329 ret = dma_map_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_skcipher_dma_req_init()
390 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_skcipher_dma_req_init()
423 creq->src_nents = sg_nents_for_len(req->src, req->cryptlen); in mv_cesa_skcipher_req_init()
424 if (creq->src_nents < 0) { in mv_cesa_skcipher_req_init()
426 return creq->src_nents; in mv_cesa_skcipher_req_init()
H A Dhash.c104 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, DMA_TO_DEVICE); in mv_cesa_ahash_dma_cleanup()
192 sreq->offset += sg_pcopy_to_buffer(req->src, creq->src_nents, in mv_cesa_ahash_std_step()
403 sg_pcopy_to_buffer(ahashreq->src, creq->src_nents, in mv_cesa_ahash_req_cleanup()
457 sg_pcopy_to_buffer(req->src, creq->src_nents, in mv_cesa_ahash_cache_req()
622 if (creq->src_nents) { in mv_cesa_ahash_dma_req_init()
623 ret = dma_map_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_ahash_dma_req_init()
729 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, DMA_TO_DEVICE); in mv_cesa_ahash_dma_req_init()
741 creq->src_nents = sg_nents_for_len(req->src, req->nbytes); in mv_cesa_ahash_req_init()
742 if (creq->src_nents < 0) { in mv_cesa_ahash_req_init()
744 return creq->src_nents; in mv_cesa_ahash_req_init()
[all...]
/kernel/linux/linux-6.6/drivers/crypto/marvell/cesa/
H A Dcipher.c66 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_skcipher_dma_cleanup()
69 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_skcipher_dma_cleanup()
97 len = mv_cesa_sg_copy_to_sram(engine, req->src, creq->src_nents, in mv_cesa_skcipher_std_step()
327 ret = dma_map_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_skcipher_dma_req_init()
339 ret = dma_map_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_skcipher_dma_req_init()
400 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_skcipher_dma_req_init()
433 creq->src_nents = sg_nents_for_len(req->src, req->cryptlen); in mv_cesa_skcipher_req_init()
434 if (creq->src_nents < 0) { in mv_cesa_skcipher_req_init()
436 return creq->src_nents; in mv_cesa_skcipher_req_init()
H A Dhash.c105 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, DMA_TO_DEVICE); in mv_cesa_ahash_dma_cleanup()
204 engine, req->src, creq->src_nents, in mv_cesa_ahash_std_step()
427 sg_pcopy_to_buffer(ahashreq->src, creq->src_nents, in mv_cesa_ahash_req_cleanup()
481 sg_pcopy_to_buffer(req->src, creq->src_nents, in mv_cesa_ahash_cache_req()
646 if (creq->src_nents) { in mv_cesa_ahash_dma_req_init()
647 ret = dma_map_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_ahash_dma_req_init()
753 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, DMA_TO_DEVICE); in mv_cesa_ahash_dma_req_init()
765 creq->src_nents = sg_nents_for_len(req->src, req->nbytes); in mv_cesa_ahash_req_init()
766 if (creq->src_nents < 0) { in mv_cesa_ahash_req_init()
768 return creq->src_nents; in mv_cesa_ahash_req_init()
[all...]
/kernel/linux/linux-5.10/drivers/crypto/
H A Dtalitos.c966 unsigned int src_nents = edesc->src_nents ? : 1; in talitos_sg_unmap() local
976 if (src_nents == 1 || !is_sec1) in talitos_sg_unmap()
977 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); in talitos_sg_unmap()
981 } else if (src_nents == 1 || !is_sec1) { in talitos_sg_unmap()
982 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); in talitos_sg_unmap()
1219 sg_count = edesc->src_nents ?: 1; in ipsec_esp()
1331 int src_nents, dst_nents, alloc_len, dma_len, src_len, dst_len; in talitos_edesc_alloc() local
1346 src_nents = sg_nents_for_len(src, src_len); in talitos_edesc_alloc()
1347 if (src_nents < in talitos_edesc_alloc()
[all...]
/kernel/linux/linux-6.6/drivers/crypto/
H A Dtalitos.c967 unsigned int src_nents = edesc->src_nents ? : 1; in talitos_sg_unmap() local
977 if (src_nents == 1 || !is_sec1) in talitos_sg_unmap()
978 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); in talitos_sg_unmap()
982 } else if (src_nents == 1 || !is_sec1) { in talitos_sg_unmap()
983 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); in talitos_sg_unmap()
1220 sg_count = edesc->src_nents ?: 1; in ipsec_esp()
1332 int src_nents, dst_nents, alloc_len, dma_len, src_len, dst_len; in talitos_edesc_alloc() local
1347 src_nents = sg_nents_for_len(src, src_len); in talitos_edesc_alloc()
1348 if (src_nents < in talitos_edesc_alloc()
[all...]
/kernel/linux/linux-6.6/drivers/crypto/aspeed/
H A Daspeed-hace-crypto.c141 dma_unmap_sg(dev, req->src, rctx->src_nents, DMA_BIDIRECTIONAL); in aspeed_sk_transfer_sg()
143 dma_unmap_sg(dev, req->src, rctx->src_nents, DMA_TO_DEVICE); in aspeed_sk_transfer_sg()
192 nbytes = sg_copy_to_buffer(in_sg, rctx->src_nents, in aspeed_sk_start()
197 "nb_in_sg", rctx->src_nents, in aspeed_sk_start()
243 rctx->src_nents, DMA_BIDIRECTIONAL); in aspeed_sk_start_sg()
252 rctx->src_nents, DMA_TO_DEVICE); in aspeed_sk_start_sg()
344 dma_unmap_sg(hace_dev->dev, req->src, rctx->src_nents, in aspeed_sk_start_sg()
350 dma_unmap_sg(hace_dev->dev, req->src, rctx->src_nents, in aspeed_sk_start_sg()
357 dma_unmap_sg(hace_dev->dev, req->src, rctx->src_nents, DMA_TO_DEVICE); in aspeed_sk_start_sg()
381 rctx->src_nents in aspeed_hace_skcipher_trigger()
[all...]

Completed in 35 milliseconds

12