Lines Matching defs:areq
24 static int sun8i_ce_cipher_need_fallback(struct skcipher_request *areq)
26 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
29 if (sg_nents(areq->src) > MAX_SG || sg_nents(areq->dst) > MAX_SG)
32 if (areq->cryptlen < crypto_skcipher_ivsize(tfm))
35 if (areq->cryptlen == 0 || areq->cryptlen % 16)
38 sg = areq->src;
44 sg = areq->dst;
53 static int sun8i_ce_cipher_fallback(struct skcipher_request *areq)
55 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
57 struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
68 skcipher_request_set_callback(&rctx->fallback_req, areq->base.flags,
69 areq->base.complete, areq->base.data);
70 skcipher_request_set_crypt(&rctx->fallback_req, areq->src, areq->dst,
71 areq->cryptlen, areq->iv);
81 struct skcipher_request *areq = container_of(async_req, struct skcipher_request, base);
82 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
85 struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
101 crypto_tfm_alg_name(areq->base.tfm),
102 areq->cryptlen,
103 rctx->op_dir, areq->iv, crypto_skcipher_ivsize(tfm),
123 cet->t_dlen = cpu_to_le32(areq->cryptlen);
125 cet->t_dlen = cpu_to_le32(areq->cryptlen / 4);
153 if (areq->iv && crypto_skcipher_ivsize(tfm) > 0) {
166 offset = areq->cryptlen - ivsize;
167 scatterwalk_map_and_copy(rctx->backup_iv, areq->src,
170 memcpy(rctx->bounce_iv, areq->iv, ivsize);
181 if (areq->src == areq->dst) {
182 nr_sgs = dma_map_sg(ce->dev, areq->src, sg_nents(areq->src),
191 nr_sgs = dma_map_sg(ce->dev, areq->src, sg_nents(areq->src),
198 nr_sgd = dma_map_sg(ce->dev, areq->dst, sg_nents(areq->dst),
207 len = areq->cryptlen;
208 for_each_sg(areq->src, sg, nr_sgs, i) {
213 areq->cryptlen, i, cet->t_src[i].len, sg->offset, todo);
222 len = areq->cryptlen;
223 for_each_sg(areq->dst, sg, nr_sgd, i) {
228 areq->cryptlen, i, cet->t_dst[i].len, sg->offset, todo);
237 chan->timeout = areq->cryptlen;
243 if (areq->src == areq->dst) {
244 dma_unmap_sg(ce->dev, areq->src, nr_sgs, DMA_BIDIRECTIONAL);
247 dma_unmap_sg(ce->dev, areq->src, nr_sgs, DMA_TO_DEVICE);
248 dma_unmap_sg(ce->dev, areq->dst, nr_sgd, DMA_FROM_DEVICE);
252 if (areq->iv && ivsize > 0) {
255 offset = areq->cryptlen - ivsize;
257 memcpy(areq->iv, rctx->backup_iv, ivsize);
260 scatterwalk_map_and_copy(areq->iv, areq->dst, offset,
273 static int sun8i_ce_cipher_run(struct crypto_engine *engine, void *areq)
275 struct skcipher_request *breq = container_of(areq, struct skcipher_request, base);
292 struct skcipher_request *areq = container_of(async_req, struct skcipher_request, base);
293 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
296 struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
309 if (areq->src == areq->dst) {
310 dma_unmap_sg(ce->dev, areq->src, nr_sgs, DMA_BIDIRECTIONAL);
313 dma_unmap_sg(ce->dev, areq->src, nr_sgs, DMA_TO_DEVICE);
314 dma_unmap_sg(ce->dev, areq->dst, nr_sgd, DMA_FROM_DEVICE);
317 if (areq->iv && ivsize > 0) {
320 offset = areq->cryptlen - ivsize;
322 memcpy(areq->iv, rctx->backup_iv, ivsize);
325 scatterwalk_map_and_copy(areq->iv, areq->dst, offset,
336 int sun8i_ce_skdecrypt(struct skcipher_request *areq)
338 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
340 struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
345 if (sun8i_ce_cipher_need_fallback(areq))
346 return sun8i_ce_cipher_fallback(areq);
352 return crypto_transfer_skcipher_request_to_engine(engine, areq);
355 int sun8i_ce_skencrypt(struct skcipher_request *areq)
357 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
359 struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
364 if (sun8i_ce_cipher_need_fallback(areq))
365 return sun8i_ce_cipher_fallback(areq);
371 return crypto_transfer_skcipher_request_to_engine(engine, areq);