Lines Matching refs:req
167 static int ecb_encrypt(struct skcipher_request *req)
169 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
175 err = skcipher_walk_virt(&walk, req, false);
187 static int ecb_decrypt(struct skcipher_request *req)
189 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
195 err = skcipher_walk_virt(&walk, req, false);
207 static int cbc_encrypt_walk(struct skcipher_request *req,
210 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
226 static int cbc_encrypt(struct skcipher_request *req)
231 err = skcipher_walk_virt(&walk, req, false);
234 return cbc_encrypt_walk(req, &walk);
237 static int cbc_decrypt_walk(struct skcipher_request *req,
240 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
256 static int cbc_decrypt(struct skcipher_request *req)
261 err = skcipher_walk_virt(&walk, req, false);
264 return cbc_decrypt_walk(req, &walk);
267 static int cts_cbc_encrypt(struct skcipher_request *req)
269 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
271 int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2;
272 struct scatterlist *src = req->src, *dst = req->dst;
279 skcipher_request_set_callback(&subreq, skcipher_request_flags(req),
282 if (req->cryptlen <= AES_BLOCK_SIZE) {
283 if (req->cryptlen < AES_BLOCK_SIZE)
289 skcipher_request_set_crypt(&subreq, req->src, req->dst,
291 req->iv);
298 if (req->cryptlen == AES_BLOCK_SIZE)
301 dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen);
302 if (req->dst != req->src)
303 dst = scatterwalk_ffwd(sg_dst, req->dst,
309 req->cryptlen - cbc_blocks * AES_BLOCK_SIZE,
310 req->iv);
325 static int cts_cbc_decrypt(struct skcipher_request *req)
327 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
329 int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2;
330 struct scatterlist *src = req->src, *dst = req->dst;
337 skcipher_request_set_callback(&subreq, skcipher_request_flags(req),
340 if (req->cryptlen <= AES_BLOCK_SIZE) {
341 if (req->cryptlen < AES_BLOCK_SIZE)
347 skcipher_request_set_crypt(&subreq, req->src, req->dst,
349 req->iv);
356 if (req->cryptlen == AES_BLOCK_SIZE)
359 dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen);
360 if (req->dst != req->src)
361 dst = scatterwalk_ffwd(sg_dst, req->dst,
367 req->cryptlen - cbc_blocks * AES_BLOCK_SIZE,
368 req->iv);
383 static int ctr_encrypt(struct skcipher_request *req)
385 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
390 err = skcipher_walk_virt(&walk, req, false);
436 static int ctr_encrypt_sync(struct skcipher_request *req)
439 return crypto_ctr_encrypt_walk(req, ctr_encrypt_one);
441 return ctr_encrypt(req);
444 static int xts_encrypt(struct skcipher_request *req)
446 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
449 int tail = req->cryptlen % AES_BLOCK_SIZE;
455 if (req->cryptlen < AES_BLOCK_SIZE)
458 err = skcipher_walk_virt(&walk, req, false);
461 int xts_blocks = DIV_ROUND_UP(req->cryptlen,
468 skcipher_request_flags(req),
470 skcipher_request_set_crypt(&subreq, req->src, req->dst,
472 req->iv);
473 req = &subreq;
474 err = skcipher_walk_virt(&walk, req, false);
496 dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen);
497 if (req->dst != req->src)
498 dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen);
500 skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail,
501 req->iv);
503 err = skcipher_walk_virt(&walk, req, false);
516 static int xts_decrypt(struct skcipher_request *req)
518 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
521 int tail = req->cryptlen % AES_BLOCK_SIZE;
527 if (req->cryptlen < AES_BLOCK_SIZE)
530 err = skcipher_walk_virt(&walk, req, false);
533 int xts_blocks = DIV_ROUND_UP(req->cryptlen,
540 skcipher_request_flags(req),
542 skcipher_request_set_crypt(&subreq, req->src, req->dst,
544 req->iv);
545 req = &subreq;
546 err = skcipher_walk_virt(&walk, req, false);
568 dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen);
569 if (req->dst != req->src)
570 dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen);
572 skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail,
573 req->iv);
575 err = skcipher_walk_virt(&walk, req, false);