Lines Matching refs:req

76 	struct skcipher_request req;
160 * req->complete) and reschedule itself if there are more work to
165 struct crypto_async_request *req, *backlog;
173 req = crypto_dequeue_request(&cpu_queue->queue);
176 if (!req)
181 req->complete(req, 0);
240 static void cryptd_skcipher_complete(struct skcipher_request *req, int err)
242 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
244 struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
248 rctx->complete(&req->base, err);
258 struct skcipher_request *req = skcipher_request_cast(base);
259 struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
260 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
262 struct skcipher_request *subreq = &rctx->req;
271 skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
272 req->iv);
277 req->base.complete = rctx->complete;
280 cryptd_skcipher_complete(req, err);
286 struct skcipher_request *req = skcipher_request_cast(base);
287 struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
288 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
290 struct skcipher_request *subreq = &rctx->req;
299 skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
300 req->iv);
305 req->base.complete = rctx->complete;
308 cryptd_skcipher_complete(req, err);
311 static int cryptd_skcipher_enqueue(struct skcipher_request *req,
314 struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
315 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
319 rctx->complete = req->base.complete;
320 req->base.complete = compl;
322 return cryptd_enqueue_request(queue, &req->base);
325 static int cryptd_skcipher_encrypt_enqueue(struct skcipher_request *req)
327 return cryptd_skcipher_enqueue(req, cryptd_skcipher_encrypt);
330 static int cryptd_skcipher_decrypt_enqueue(struct skcipher_request *req)
332 return cryptd_skcipher_enqueue(req, cryptd_skcipher_decrypt);
464 static int cryptd_hash_enqueue(struct ahash_request *req,
467 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
468 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
472 rctx->complete = req->base.complete;
473 req->base.complete = compl;
475 return cryptd_enqueue_request(queue, &req->base);
478 static void cryptd_hash_complete(struct ahash_request *req, int err)
480 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
482 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
486 rctx->complete(&req->base, err);
497 struct ahash_request *req = ahash_request_cast(req_async);
498 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
508 req->base.complete = rctx->complete;
511 cryptd_hash_complete(req, err);
514 static int cryptd_hash_init_enqueue(struct ahash_request *req)
516 return cryptd_hash_enqueue(req, cryptd_hash_init);
521 struct ahash_request *req = ahash_request_cast(req_async);
524 rctx = ahash_request_ctx(req);
529 err = shash_ahash_update(req, &rctx->desc);
531 req->base.complete = rctx->complete;
534 cryptd_hash_complete(req, err);
537 static int cryptd_hash_update_enqueue(struct ahash_request *req)
539 return cryptd_hash_enqueue(req, cryptd_hash_update);
544 struct ahash_request *req = ahash_request_cast(req_async);
545 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
550 err = crypto_shash_final(&rctx->desc, req->result);
552 req->base.complete = rctx->complete;
555 cryptd_hash_complete(req, err);
558 static int cryptd_hash_final_enqueue(struct ahash_request *req)
560 return cryptd_hash_enqueue(req, cryptd_hash_final);
565 struct ahash_request *req = ahash_request_cast(req_async);
566 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
571 err = shash_ahash_finup(req, &rctx->desc);
573 req->base.complete = rctx->complete;
576 cryptd_hash_complete(req, err);
579 static int cryptd_hash_finup_enqueue(struct ahash_request *req)
581 return cryptd_hash_enqueue(req, cryptd_hash_finup);
588 struct ahash_request *req = ahash_request_cast(req_async);
589 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
597 err = shash_ahash_digest(req, desc);
599 req->base.complete = rctx->complete;
602 cryptd_hash_complete(req, err);
605 static int cryptd_hash_digest_enqueue(struct ahash_request *req)
607 return cryptd_hash_enqueue(req, cryptd_hash_digest);
610 static int cryptd_hash_export(struct ahash_request *req, void *out)
612 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
617 static int cryptd_hash_import(struct ahash_request *req, const void *in)
619 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
621 struct shash_desc *desc = cryptd_shash_desc(req);
714 static void cryptd_aead_crypt(struct aead_request *req,
717 int (*crypt)(struct aead_request *req))
725 rctx = aead_request_ctx(req);
728 tfm = crypto_aead_reqtfm(req);
732 aead_request_set_tfm(req, child);
733 err = crypt( req );
740 compl(&req->base, err);
751 struct aead_request *req;
753 req = container_of(areq, struct aead_request, base);
754 cryptd_aead_crypt(req, child, err, crypto_aead_alg(child)->encrypt);
761 struct aead_request *req;
763 req = container_of(areq, struct aead_request, base);
764 cryptd_aead_crypt(req, child, err, crypto_aead_alg(child)->decrypt);
767 static int cryptd_aead_enqueue(struct aead_request *req,
770 struct cryptd_aead_request_ctx *rctx = aead_request_ctx(req);
771 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
774 rctx->complete = req->base.complete;
775 req->base.complete = compl;
776 return cryptd_enqueue_request(queue, &req->base);
779 static int cryptd_aead_encrypt_enqueue(struct aead_request *req)
781 return cryptd_aead_enqueue(req, cryptd_aead_encrypt );
784 static int cryptd_aead_decrypt_enqueue(struct aead_request *req)
786 return cryptd_aead_enqueue(req, cryptd_aead_decrypt );
990 struct shash_desc *cryptd_shash_desc(struct ahash_request *req)
992 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);