Lines Matching refs:areq

23 static bool sun8i_ss_need_fallback(struct skcipher_request *areq)
25 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
28 struct scatterlist *in_sg = areq->src;
29 struct scatterlist *out_sg = areq->dst;
33 if (areq->cryptlen == 0 || areq->cryptlen % 16) {
38 if (sg_nents_for_len(areq->src, areq->cryptlen) > 8 ||
39 sg_nents_for_len(areq->dst, areq->cryptlen) > 8) {
44 len = areq->cryptlen;
45 sg = areq->src;
59 len = areq->cryptlen;
60 sg = areq->dst;
76 in_sg = areq->src;
77 out_sg = areq->dst;
89 static int sun8i_ss_cipher_fallback(struct skcipher_request *areq)
91 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
93 struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
109 skcipher_request_set_callback(&rctx->fallback_req, areq->base.flags,
110 areq->base.complete, areq->base.data);
111 skcipher_request_set_crypt(&rctx->fallback_req, areq->src, areq->dst,
112 areq->cryptlen, areq->iv);
120 static int sun8i_ss_setup_ivs(struct skcipher_request *areq)
122 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
125 struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
126 struct scatterlist *sg = areq->src;
128 unsigned int len = areq->cryptlen;
137 offset = areq->cryptlen - ivsize;
138 scatterwalk_map_and_copy(sf->biv, areq->src, offset,
149 memcpy(sf->iv[0], areq->iv, ivsize);
183 static int sun8i_ss_cipher(struct skcipher_request *areq)
185 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
188 struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
197 int nsgs = sg_nents_for_len(areq->src, areq->cryptlen);
198 int nsgd = sg_nents_for_len(areq->dst, areq->cryptlen);
204 crypto_tfm_alg_name(areq->base.tfm),
205 areq->cryptlen,
206 rctx->op_dir, areq->iv, crypto_skcipher_ivsize(tfm),
225 if (areq->iv && crypto_skcipher_ivsize(tfm) > 0) {
226 err = sun8i_ss_setup_ivs(areq);
230 if (areq->src == areq->dst) {
231 nr_sgs = dma_map_sg(ss->dev, areq->src, nsgs, DMA_BIDIRECTIONAL);
239 nr_sgs = dma_map_sg(ss->dev, areq->src, nsgs, DMA_TO_DEVICE);
245 nr_sgd = dma_map_sg(ss->dev, areq->dst, nsgd, DMA_FROM_DEVICE);
253 len = areq->cryptlen;
255 sg = areq->src;
263 areq->cryptlen, i, rctx->t_src[i].len, sg->offset, todo);
275 len = areq->cryptlen;
277 sg = areq->dst;
285 areq->cryptlen, i, rctx->t_dst[i].len, sg->offset, todo);
297 err = sun8i_ss_run_task(ss, rctx, crypto_tfm_alg_name(areq->base.tfm));
300 if (areq->src == areq->dst) {
301 dma_unmap_sg(ss->dev, areq->src, nsgs, DMA_BIDIRECTIONAL);
303 dma_unmap_sg(ss->dev, areq->src, nsgs, DMA_TO_DEVICE);
304 dma_unmap_sg(ss->dev, areq->dst, nsgd, DMA_FROM_DEVICE);
308 if (areq->iv && ivsize > 0) {
314 offset = areq->cryptlen - ivsize;
316 memcpy(areq->iv, sf->biv, ivsize);
319 scatterwalk_map_and_copy(areq->iv, areq->dst, offset,
332 int sun8i_ss_handle_cipher_request(struct crypto_engine *engine, void *areq)
335 struct skcipher_request *breq = container_of(areq, struct skcipher_request, base);
345 int sun8i_ss_skdecrypt(struct skcipher_request *areq)
347 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
349 struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
356 if (sun8i_ss_need_fallback(areq))
357 return sun8i_ss_cipher_fallback(areq);
363 return crypto_transfer_skcipher_request_to_engine(engine, areq);
366 int sun8i_ss_skencrypt(struct skcipher_request *areq)
368 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
370 struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
377 if (sun8i_ss_need_fallback(areq))
378 return sun8i_ss_cipher_fallback(areq);
384 return crypto_transfer_skcipher_request_to_engine(engine, areq);