Lines Matching refs:areq

24 static int sun8i_ce_cipher_need_fallback(struct skcipher_request *areq)
26 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
34 if (sg_nents_for_len(areq->src, areq->cryptlen) > MAX_SG ||
35 sg_nents_for_len(areq->dst, areq->cryptlen) > MAX_SG) {
40 if (areq->cryptlen < crypto_skcipher_ivsize(tfm)) {
45 if (areq->cryptlen == 0) {
50 if (areq->cryptlen % 16) {
55 len = areq->cryptlen;
56 sg = areq->src;
71 len = areq->cryptlen;
72 sg = areq->dst;
89 static int sun8i_ce_cipher_fallback(struct skcipher_request *areq)
91 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
93 struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
109 skcipher_request_set_callback(&rctx->fallback_req, areq->base.flags,
110 areq->base.complete, areq->base.data);
111 skcipher_request_set_crypt(&rctx->fallback_req, areq->src, areq->dst,
112 areq->cryptlen, areq->iv);
122 struct skcipher_request *areq = container_of(async_req, struct skcipher_request, base);
123 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
126 struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
138 int ns = sg_nents_for_len(areq->src, areq->cryptlen);
139 int nd = sg_nents_for_len(areq->dst, areq->cryptlen);
144 crypto_tfm_alg_name(areq->base.tfm),
145 areq->cryptlen,
146 rctx->op_dir, areq->iv, crypto_skcipher_ivsize(tfm),
166 cet->t_dlen = cpu_to_le32(areq->cryptlen);
168 cet->t_dlen = cpu_to_le32(areq->cryptlen / 4);
196 if (areq->iv && crypto_skcipher_ivsize(tfm) > 0) {
199 offset = areq->cryptlen - ivsize;
200 scatterwalk_map_and_copy(chan->backup_iv, areq->src,
203 memcpy(chan->bounce_iv, areq->iv, ivsize);
214 if (areq->src == areq->dst) {
215 nr_sgs = dma_map_sg(ce->dev, areq->src, ns, DMA_BIDIRECTIONAL);
223 nr_sgs = dma_map_sg(ce->dev, areq->src, ns, DMA_TO_DEVICE);
229 nr_sgd = dma_map_sg(ce->dev, areq->dst, nd, DMA_FROM_DEVICE);
237 len = areq->cryptlen;
238 for_each_sg(areq->src, sg, nr_sgs, i) {
243 areq->cryptlen, i, cet->t_src[i].len, sg->offset, todo);
252 len = areq->cryptlen;
253 for_each_sg(areq->dst, sg, nr_sgd, i) {
258 areq->cryptlen, i, cet->t_dst[i].len, sg->offset, todo);
267 chan->timeout = areq->cryptlen;
273 if (areq->src == areq->dst) {
274 dma_unmap_sg(ce->dev, areq->src, ns, DMA_BIDIRECTIONAL);
277 dma_unmap_sg(ce->dev, areq->src, ns, DMA_TO_DEVICE);
278 dma_unmap_sg(ce->dev, areq->dst, nd, DMA_FROM_DEVICE);
282 if (areq->iv && ivsize > 0) {
285 offset = areq->cryptlen - ivsize;
287 memcpy(areq->iv, chan->backup_iv, ivsize);
290 scatterwalk_map_and_copy(areq->iv, areq->dst, offset,
302 static void sun8i_ce_cipher_run(struct crypto_engine *engine, void *areq)
304 struct skcipher_request *breq = container_of(areq, struct skcipher_request, base);
321 struct skcipher_request *areq = container_of(async_req, struct skcipher_request, base);
322 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
325 struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
338 if (areq->src == areq->dst) {
339 dma_unmap_sg(ce->dev, areq->src, nr_sgs, DMA_BIDIRECTIONAL);
342 dma_unmap_sg(ce->dev, areq->src, nr_sgs, DMA_TO_DEVICE);
343 dma_unmap_sg(ce->dev, areq->dst, nr_sgd, DMA_FROM_DEVICE);
346 if (areq->iv && ivsize > 0) {
349 offset = areq->cryptlen - ivsize;
351 memcpy(areq->iv, chan->backup_iv, ivsize);
354 scatterwalk_map_and_copy(areq->iv, areq->dst, offset,
363 int sun8i_ce_cipher_do_one(struct crypto_engine *engine, void *areq)
365 int err = sun8i_ce_cipher_prepare(engine, areq);
370 sun8i_ce_cipher_run(engine, areq);
371 sun8i_ce_cipher_unprepare(engine, areq);
375 int sun8i_ce_skdecrypt(struct skcipher_request *areq)
377 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
379 struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
384 if (sun8i_ce_cipher_need_fallback(areq))
385 return sun8i_ce_cipher_fallback(areq);
391 return crypto_transfer_skcipher_request_to_engine(engine, areq);
394 int sun8i_ce_skencrypt(struct skcipher_request *areq)
396 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
398 struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
403 if (sun8i_ce_cipher_need_fallback(areq))
404 return sun8i_ce_cipher_fallback(areq);
410 return crypto_transfer_skcipher_request_to_engine(engine, areq);