Lines Matching refs:req

173 static int __maybe_unused ecb_encrypt(struct skcipher_request *req)
175 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
181 err = skcipher_walk_virt(&walk, req, false);
193 static int __maybe_unused ecb_decrypt(struct skcipher_request *req)
195 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
201 err = skcipher_walk_virt(&walk, req, false);
213 static int cbc_encrypt_walk(struct skcipher_request *req,
216 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
231 static int __maybe_unused cbc_encrypt(struct skcipher_request *req)
236 err = skcipher_walk_virt(&walk, req, false);
239 return cbc_encrypt_walk(req, &walk);
242 static int cbc_decrypt_walk(struct skcipher_request *req,
245 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
260 static int __maybe_unused cbc_decrypt(struct skcipher_request *req)
265 err = skcipher_walk_virt(&walk, req, false);
268 return cbc_decrypt_walk(req, &walk);
271 static int cts_cbc_encrypt(struct skcipher_request *req)
273 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
276 int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2;
277 struct scatterlist *src = req->src, *dst = req->dst;
283 skcipher_request_set_callback(&subreq, skcipher_request_flags(req),
286 if (req->cryptlen <= AES_BLOCK_SIZE) {
287 if (req->cryptlen < AES_BLOCK_SIZE)
293 skcipher_request_set_crypt(&subreq, req->src, req->dst,
295 req->iv);
302 if (req->cryptlen == AES_BLOCK_SIZE)
305 dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen);
306 if (req->dst != req->src)
307 dst = scatterwalk_ffwd(sg_dst, req->dst,
313 req->cryptlen - cbc_blocks * AES_BLOCK_SIZE,
314 req->iv);
328 static int cts_cbc_decrypt(struct skcipher_request *req)
330 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
333 int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2;
334 struct scatterlist *src = req->src, *dst = req->dst;
340 skcipher_request_set_callback(&subreq, skcipher_request_flags(req),
343 if (req->cryptlen <= AES_BLOCK_SIZE) {
344 if (req->cryptlen < AES_BLOCK_SIZE)
350 skcipher_request_set_crypt(&subreq, req->src, req->dst,
352 req->iv);
359 if (req->cryptlen == AES_BLOCK_SIZE)
362 dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen);
363 if (req->dst != req->src)
364 dst = scatterwalk_ffwd(sg_dst, req->dst,
370 req->cryptlen - cbc_blocks * AES_BLOCK_SIZE,
371 req->iv);
401 static int __maybe_unused essiv_cbc_encrypt(struct skcipher_request *req)
403 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
409 err = skcipher_walk_virt(&walk, req, false);
416 req->iv, ctx->key2.key_enc);
420 return err ?: cbc_encrypt_walk(req, &walk);
423 static int __maybe_unused essiv_cbc_decrypt(struct skcipher_request *req)
425 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
431 err = skcipher_walk_virt(&walk, req, false);
438 req->iv, ctx->key2.key_enc);
442 return err ?: cbc_decrypt_walk(req, &walk);
445 static int ctr_encrypt(struct skcipher_request *req)
447 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
453 err = skcipher_walk_virt(&walk, req, false);
499 static int __maybe_unused ctr_encrypt_sync(struct skcipher_request *req)
502 return crypto_ctr_encrypt_walk(req, ctr_encrypt_one);
504 return ctr_encrypt(req);
507 static int __maybe_unused xts_encrypt(struct skcipher_request *req)
509 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
512 int tail = req->cryptlen % AES_BLOCK_SIZE;
518 if (req->cryptlen < AES_BLOCK_SIZE)
521 err = skcipher_walk_virt(&walk, req, false);
524 int xts_blocks = DIV_ROUND_UP(req->cryptlen,
531 skcipher_request_flags(req),
533 skcipher_request_set_crypt(&subreq, req->src, req->dst,
535 req->iv);
536 req = &subreq;
537 err = skcipher_walk_virt(&walk, req, false);
559 dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen);
560 if (req->dst != req->src)
561 dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen);
563 skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail,
564 req->iv);
579 static int __maybe_unused xts_decrypt(struct skcipher_request *req)
581 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
584 int tail = req->cryptlen % AES_BLOCK_SIZE;
590 if (req->cryptlen < AES_BLOCK_SIZE)
593 err = skcipher_walk_virt(&walk, req, false);
596 int xts_blocks = DIV_ROUND_UP(req->cryptlen,
603 skcipher_request_flags(req),
605 skcipher_request_set_crypt(&subreq, req->src, req->dst,
607 req->iv);
608 req = &subreq;
609 err = skcipher_walk_virt(&walk, req, false);
631 dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen);
632 if (req->dst != req->src)
633 dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen);
635 skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail,
636 req->iv);