xref: /kernel/linux/linux-5.10/crypto/essiv.c (revision 8c2ecf20)
1// SPDX-License-Identifier: GPL-2.0
2/*
3 * ESSIV skcipher and aead template for block encryption
4 *
5 * This template encapsulates the ESSIV IV generation algorithm used by
6 * dm-crypt and fscrypt, which converts the initial vector for the skcipher
7 * used for block encryption, by encrypting it using the hash of the
8 * skcipher key as encryption key. Usually, the input IV is a 64-bit sector
9 * number in LE representation zero-padded to the size of the IV, but this
10 * is not assumed by this driver.
11 *
12 * The typical use of this template is to instantiate the skcipher
13 * 'essiv(cbc(aes),sha256)', which is the only instantiation used by
14 * fscrypt, and the most relevant one for dm-crypt. However, dm-crypt
15 * also permits ESSIV to be used in combination with the authenc template,
16 * e.g., 'essiv(authenc(hmac(sha256),cbc(aes)),sha256)', in which case
17 * we need to instantiate an aead that accepts the same special key format
18 * as the authenc template, and deals with the way the encrypted IV is
19 * embedded into the AAD area of the aead request. This means the AEAD
20 * flavor produced by this template is tightly coupled to the way dm-crypt
21 * happens to use it.
22 *
23 * Copyright (c) 2019 Linaro, Ltd. <ard.biesheuvel@linaro.org>
24 *
25 * Heavily based on:
26 * adiantum length-preserving encryption mode
27 *
28 * Copyright 2018 Google LLC
29 */
30
31#include <crypto/authenc.h>
32#include <crypto/internal/aead.h>
33#include <crypto/internal/hash.h>
34#include <crypto/internal/skcipher.h>
35#include <crypto/scatterwalk.h>
36#include <linux/module.h>
37
38#include "internal.h"
39
40struct essiv_instance_ctx {
41	union {
42		struct crypto_skcipher_spawn	skcipher_spawn;
43		struct crypto_aead_spawn	aead_spawn;
44	} u;
45	char	essiv_cipher_name[CRYPTO_MAX_ALG_NAME];
46	char	shash_driver_name[CRYPTO_MAX_ALG_NAME];
47};
48
49struct essiv_tfm_ctx {
50	union {
51		struct crypto_skcipher	*skcipher;
52		struct crypto_aead	*aead;
53	} u;
54	struct crypto_cipher		*essiv_cipher;
55	struct crypto_shash		*hash;
56	int				ivoffset;
57};
58
59struct essiv_aead_request_ctx {
60	struct scatterlist		sg[4];
61	u8				*assoc;
62	struct aead_request		aead_req;
63};
64
65static int essiv_skcipher_setkey(struct crypto_skcipher *tfm,
66				 const u8 *key, unsigned int keylen)
67{
68	struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
69	u8 salt[HASH_MAX_DIGESTSIZE];
70	int err;
71
72	crypto_skcipher_clear_flags(tctx->u.skcipher, CRYPTO_TFM_REQ_MASK);
73	crypto_skcipher_set_flags(tctx->u.skcipher,
74				  crypto_skcipher_get_flags(tfm) &
75				  CRYPTO_TFM_REQ_MASK);
76	err = crypto_skcipher_setkey(tctx->u.skcipher, key, keylen);
77	if (err)
78		return err;
79
80	err = crypto_shash_tfm_digest(tctx->hash, key, keylen, salt);
81	if (err)
82		return err;
83
84	crypto_cipher_clear_flags(tctx->essiv_cipher, CRYPTO_TFM_REQ_MASK);
85	crypto_cipher_set_flags(tctx->essiv_cipher,
86				crypto_skcipher_get_flags(tfm) &
87				CRYPTO_TFM_REQ_MASK);
88	return crypto_cipher_setkey(tctx->essiv_cipher, salt,
89				    crypto_shash_digestsize(tctx->hash));
90}
91
92static int essiv_aead_setkey(struct crypto_aead *tfm, const u8 *key,
93			     unsigned int keylen)
94{
95	struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
96	SHASH_DESC_ON_STACK(desc, tctx->hash);
97	struct crypto_authenc_keys keys;
98	u8 salt[HASH_MAX_DIGESTSIZE];
99	int err;
100
101	crypto_aead_clear_flags(tctx->u.aead, CRYPTO_TFM_REQ_MASK);
102	crypto_aead_set_flags(tctx->u.aead, crypto_aead_get_flags(tfm) &
103					    CRYPTO_TFM_REQ_MASK);
104	err = crypto_aead_setkey(tctx->u.aead, key, keylen);
105	if (err)
106		return err;
107
108	if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
109		return -EINVAL;
110
111	desc->tfm = tctx->hash;
112	err = crypto_shash_init(desc) ?:
113	      crypto_shash_update(desc, keys.enckey, keys.enckeylen) ?:
114	      crypto_shash_finup(desc, keys.authkey, keys.authkeylen, salt);
115	if (err)
116		return err;
117
118	crypto_cipher_clear_flags(tctx->essiv_cipher, CRYPTO_TFM_REQ_MASK);
119	crypto_cipher_set_flags(tctx->essiv_cipher, crypto_aead_get_flags(tfm) &
120						    CRYPTO_TFM_REQ_MASK);
121	return crypto_cipher_setkey(tctx->essiv_cipher, salt,
122				    crypto_shash_digestsize(tctx->hash));
123}
124
125static int essiv_aead_setauthsize(struct crypto_aead *tfm,
126				  unsigned int authsize)
127{
128	struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
129
130	return crypto_aead_setauthsize(tctx->u.aead, authsize);
131}
132
133static void essiv_skcipher_done(struct crypto_async_request *areq, int err)
134{
135	struct skcipher_request *req = areq->data;
136
137	skcipher_request_complete(req, err);
138}
139
140static int essiv_skcipher_crypt(struct skcipher_request *req, bool enc)
141{
142	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
143	const struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
144	struct skcipher_request *subreq = skcipher_request_ctx(req);
145
146	crypto_cipher_encrypt_one(tctx->essiv_cipher, req->iv, req->iv);
147
148	skcipher_request_set_tfm(subreq, tctx->u.skcipher);
149	skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
150				   req->iv);
151	skcipher_request_set_callback(subreq, skcipher_request_flags(req),
152				      essiv_skcipher_done, req);
153
154	return enc ? crypto_skcipher_encrypt(subreq) :
155		     crypto_skcipher_decrypt(subreq);
156}
157
158static int essiv_skcipher_encrypt(struct skcipher_request *req)
159{
160	return essiv_skcipher_crypt(req, true);
161}
162
163static int essiv_skcipher_decrypt(struct skcipher_request *req)
164{
165	return essiv_skcipher_crypt(req, false);
166}
167
168static void essiv_aead_done(struct crypto_async_request *areq, int err)
169{
170	struct aead_request *req = areq->data;
171	struct essiv_aead_request_ctx *rctx = aead_request_ctx(req);
172
173	if (err == -EINPROGRESS)
174		goto out;
175
176	kfree(rctx->assoc);
177
178out:
179	aead_request_complete(req, err);
180}
181
182static int essiv_aead_crypt(struct aead_request *req, bool enc)
183{
184	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
185	const struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
186	struct essiv_aead_request_ctx *rctx = aead_request_ctx(req);
187	struct aead_request *subreq = &rctx->aead_req;
188	struct scatterlist *src = req->src;
189	int err;
190
191	crypto_cipher_encrypt_one(tctx->essiv_cipher, req->iv, req->iv);
192
193	/*
194	 * dm-crypt embeds the sector number and the IV in the AAD region, so
195	 * we have to copy the converted IV into the right scatterlist before
196	 * we pass it on.
197	 */
198	rctx->assoc = NULL;
199	if (req->src == req->dst || !enc) {
200		scatterwalk_map_and_copy(req->iv, req->dst,
201					 req->assoclen - crypto_aead_ivsize(tfm),
202					 crypto_aead_ivsize(tfm), 1);
203	} else {
204		u8 *iv = (u8 *)aead_request_ctx(req) + tctx->ivoffset;
205		int ivsize = crypto_aead_ivsize(tfm);
206		int ssize = req->assoclen - ivsize;
207		struct scatterlist *sg;
208		int nents;
209
210		if (ssize < 0)
211			return -EINVAL;
212
213		nents = sg_nents_for_len(req->src, ssize);
214		if (nents < 0)
215			return -EINVAL;
216
217		memcpy(iv, req->iv, ivsize);
218		sg_init_table(rctx->sg, 4);
219
220		if (unlikely(nents > 1)) {
221			/*
222			 * This is a case that rarely occurs in practice, but
223			 * for correctness, we have to deal with it nonetheless.
224			 */
225			rctx->assoc = kmalloc(ssize, GFP_ATOMIC);
226			if (!rctx->assoc)
227				return -ENOMEM;
228
229			scatterwalk_map_and_copy(rctx->assoc, req->src, 0,
230						 ssize, 0);
231			sg_set_buf(rctx->sg, rctx->assoc, ssize);
232		} else {
233			sg_set_page(rctx->sg, sg_page(req->src), ssize,
234				    req->src->offset);
235		}
236
237		sg_set_buf(rctx->sg + 1, iv, ivsize);
238		sg = scatterwalk_ffwd(rctx->sg + 2, req->src, req->assoclen);
239		if (sg != rctx->sg + 2)
240			sg_chain(rctx->sg, 3, sg);
241
242		src = rctx->sg;
243	}
244
245	aead_request_set_tfm(subreq, tctx->u.aead);
246	aead_request_set_ad(subreq, req->assoclen);
247	aead_request_set_callback(subreq, aead_request_flags(req),
248				  essiv_aead_done, req);
249	aead_request_set_crypt(subreq, src, req->dst, req->cryptlen, req->iv);
250
251	err = enc ? crypto_aead_encrypt(subreq) :
252		    crypto_aead_decrypt(subreq);
253
254	if (rctx->assoc && err != -EINPROGRESS && err != -EBUSY)
255		kfree(rctx->assoc);
256	return err;
257}
258
259static int essiv_aead_encrypt(struct aead_request *req)
260{
261	return essiv_aead_crypt(req, true);
262}
263
264static int essiv_aead_decrypt(struct aead_request *req)
265{
266	return essiv_aead_crypt(req, false);
267}
268
269static int essiv_init_tfm(struct essiv_instance_ctx *ictx,
270			  struct essiv_tfm_ctx *tctx)
271{
272	struct crypto_cipher *essiv_cipher;
273	struct crypto_shash *hash;
274	int err;
275
276	essiv_cipher = crypto_alloc_cipher(ictx->essiv_cipher_name, 0, 0);
277	if (IS_ERR(essiv_cipher))
278		return PTR_ERR(essiv_cipher);
279
280	hash = crypto_alloc_shash(ictx->shash_driver_name, 0, 0);
281	if (IS_ERR(hash)) {
282		err = PTR_ERR(hash);
283		goto err_free_essiv_cipher;
284	}
285
286	tctx->essiv_cipher = essiv_cipher;
287	tctx->hash = hash;
288
289	return 0;
290
291err_free_essiv_cipher:
292	crypto_free_cipher(essiv_cipher);
293	return err;
294}
295
296static int essiv_skcipher_init_tfm(struct crypto_skcipher *tfm)
297{
298	struct skcipher_instance *inst = skcipher_alg_instance(tfm);
299	struct essiv_instance_ctx *ictx = skcipher_instance_ctx(inst);
300	struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
301	struct crypto_skcipher *skcipher;
302	int err;
303
304	skcipher = crypto_spawn_skcipher(&ictx->u.skcipher_spawn);
305	if (IS_ERR(skcipher))
306		return PTR_ERR(skcipher);
307
308	crypto_skcipher_set_reqsize(tfm, sizeof(struct skcipher_request) +
309				         crypto_skcipher_reqsize(skcipher));
310
311	err = essiv_init_tfm(ictx, tctx);
312	if (err) {
313		crypto_free_skcipher(skcipher);
314		return err;
315	}
316
317	tctx->u.skcipher = skcipher;
318	return 0;
319}
320
321static int essiv_aead_init_tfm(struct crypto_aead *tfm)
322{
323	struct aead_instance *inst = aead_alg_instance(tfm);
324	struct essiv_instance_ctx *ictx = aead_instance_ctx(inst);
325	struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
326	struct crypto_aead *aead;
327	unsigned int subreq_size;
328	int err;
329
330	BUILD_BUG_ON(offsetofend(struct essiv_aead_request_ctx, aead_req) !=
331		     sizeof(struct essiv_aead_request_ctx));
332
333	aead = crypto_spawn_aead(&ictx->u.aead_spawn);
334	if (IS_ERR(aead))
335		return PTR_ERR(aead);
336
337	subreq_size = sizeof_field(struct essiv_aead_request_ctx, aead_req) +
338		      crypto_aead_reqsize(aead);
339
340	tctx->ivoffset = offsetof(struct essiv_aead_request_ctx, aead_req) +
341			 subreq_size;
342	crypto_aead_set_reqsize(tfm, tctx->ivoffset + crypto_aead_ivsize(aead));
343
344	err = essiv_init_tfm(ictx, tctx);
345	if (err) {
346		crypto_free_aead(aead);
347		return err;
348	}
349
350	tctx->u.aead = aead;
351	return 0;
352}
353
354static void essiv_skcipher_exit_tfm(struct crypto_skcipher *tfm)
355{
356	struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
357
358	crypto_free_skcipher(tctx->u.skcipher);
359	crypto_free_cipher(tctx->essiv_cipher);
360	crypto_free_shash(tctx->hash);
361}
362
363static void essiv_aead_exit_tfm(struct crypto_aead *tfm)
364{
365	struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
366
367	crypto_free_aead(tctx->u.aead);
368	crypto_free_cipher(tctx->essiv_cipher);
369	crypto_free_shash(tctx->hash);
370}
371
372static void essiv_skcipher_free_instance(struct skcipher_instance *inst)
373{
374	struct essiv_instance_ctx *ictx = skcipher_instance_ctx(inst);
375
376	crypto_drop_skcipher(&ictx->u.skcipher_spawn);
377	kfree(inst);
378}
379
380static void essiv_aead_free_instance(struct aead_instance *inst)
381{
382	struct essiv_instance_ctx *ictx = aead_instance_ctx(inst);
383
384	crypto_drop_aead(&ictx->u.aead_spawn);
385	kfree(inst);
386}
387
388static bool parse_cipher_name(char *essiv_cipher_name, const char *cra_name)
389{
390	const char *p, *q;
391	int len;
392
393	/* find the last opening parens */
394	p = strrchr(cra_name, '(');
395	if (!p++)
396		return false;
397
398	/* find the first closing parens in the tail of the string */
399	q = strchr(p, ')');
400	if (!q)
401		return false;
402
403	len = q - p;
404	if (len >= CRYPTO_MAX_ALG_NAME)
405		return false;
406
407	memcpy(essiv_cipher_name, p, len);
408	essiv_cipher_name[len] = '\0';
409	return true;
410}
411
412static bool essiv_supported_algorithms(const char *essiv_cipher_name,
413				       struct shash_alg *hash_alg,
414				       int ivsize)
415{
416	struct crypto_alg *alg;
417	bool ret = false;
418
419	alg = crypto_alg_mod_lookup(essiv_cipher_name,
420				    CRYPTO_ALG_TYPE_CIPHER,
421				    CRYPTO_ALG_TYPE_MASK);
422	if (IS_ERR(alg))
423		return false;
424
425	if (hash_alg->digestsize < alg->cra_cipher.cia_min_keysize ||
426	    hash_alg->digestsize > alg->cra_cipher.cia_max_keysize)
427		goto out;
428
429	if (ivsize != alg->cra_blocksize)
430		goto out;
431
432	if (crypto_shash_alg_needs_key(hash_alg))
433		goto out;
434
435	ret = true;
436
437out:
438	crypto_mod_put(alg);
439	return ret;
440}
441
442static int essiv_create(struct crypto_template *tmpl, struct rtattr **tb)
443{
444	struct crypto_attr_type *algt;
445	const char *inner_cipher_name;
446	const char *shash_name;
447	struct skcipher_instance *skcipher_inst = NULL;
448	struct aead_instance *aead_inst = NULL;
449	struct crypto_instance *inst;
450	struct crypto_alg *base, *block_base;
451	struct essiv_instance_ctx *ictx;
452	struct skcipher_alg *skcipher_alg = NULL;
453	struct aead_alg *aead_alg = NULL;
454	struct crypto_alg *_hash_alg;
455	struct shash_alg *hash_alg;
456	int ivsize;
457	u32 type;
458	u32 mask;
459	int err;
460
461	algt = crypto_get_attr_type(tb);
462	if (IS_ERR(algt))
463		return PTR_ERR(algt);
464
465	inner_cipher_name = crypto_attr_alg_name(tb[1]);
466	if (IS_ERR(inner_cipher_name))
467		return PTR_ERR(inner_cipher_name);
468
469	shash_name = crypto_attr_alg_name(tb[2]);
470	if (IS_ERR(shash_name))
471		return PTR_ERR(shash_name);
472
473	type = algt->type & algt->mask;
474	mask = crypto_algt_inherited_mask(algt);
475
476	switch (type) {
477	case CRYPTO_ALG_TYPE_SKCIPHER:
478		skcipher_inst = kzalloc(sizeof(*skcipher_inst) +
479					sizeof(*ictx), GFP_KERNEL);
480		if (!skcipher_inst)
481			return -ENOMEM;
482		inst = skcipher_crypto_instance(skcipher_inst);
483		base = &skcipher_inst->alg.base;
484		ictx = crypto_instance_ctx(inst);
485
486		/* Symmetric cipher, e.g., "cbc(aes)" */
487		err = crypto_grab_skcipher(&ictx->u.skcipher_spawn, inst,
488					   inner_cipher_name, 0, mask);
489		if (err)
490			goto out_free_inst;
491		skcipher_alg = crypto_spawn_skcipher_alg(&ictx->u.skcipher_spawn);
492		block_base = &skcipher_alg->base;
493		ivsize = crypto_skcipher_alg_ivsize(skcipher_alg);
494		break;
495
496	case CRYPTO_ALG_TYPE_AEAD:
497		aead_inst = kzalloc(sizeof(*aead_inst) +
498				    sizeof(*ictx), GFP_KERNEL);
499		if (!aead_inst)
500			return -ENOMEM;
501		inst = aead_crypto_instance(aead_inst);
502		base = &aead_inst->alg.base;
503		ictx = crypto_instance_ctx(inst);
504
505		/* AEAD cipher, e.g., "authenc(hmac(sha256),cbc(aes))" */
506		err = crypto_grab_aead(&ictx->u.aead_spawn, inst,
507				       inner_cipher_name, 0, mask);
508		if (err)
509			goto out_free_inst;
510		aead_alg = crypto_spawn_aead_alg(&ictx->u.aead_spawn);
511		block_base = &aead_alg->base;
512		if (!strstarts(block_base->cra_name, "authenc(")) {
513			pr_warn("Only authenc() type AEADs are supported by ESSIV\n");
514			err = -EINVAL;
515			goto out_drop_skcipher;
516		}
517		ivsize = aead_alg->ivsize;
518		break;
519
520	default:
521		return -EINVAL;
522	}
523
524	if (!parse_cipher_name(ictx->essiv_cipher_name, block_base->cra_name)) {
525		pr_warn("Failed to parse ESSIV cipher name from skcipher cra_name\n");
526		err = -EINVAL;
527		goto out_drop_skcipher;
528	}
529
530	/* Synchronous hash, e.g., "sha256" */
531	_hash_alg = crypto_alg_mod_lookup(shash_name,
532					  CRYPTO_ALG_TYPE_SHASH,
533					  CRYPTO_ALG_TYPE_MASK | mask);
534	if (IS_ERR(_hash_alg)) {
535		err = PTR_ERR(_hash_alg);
536		goto out_drop_skcipher;
537	}
538	hash_alg = __crypto_shash_alg(_hash_alg);
539
540	/* Check the set of algorithms */
541	if (!essiv_supported_algorithms(ictx->essiv_cipher_name, hash_alg,
542					ivsize)) {
543		pr_warn("Unsupported essiv instantiation: essiv(%s,%s)\n",
544			block_base->cra_name, hash_alg->base.cra_name);
545		err = -EINVAL;
546		goto out_free_hash;
547	}
548
549	/* record the driver name so we can instantiate this exact algo later */
550	strlcpy(ictx->shash_driver_name, hash_alg->base.cra_driver_name,
551		CRYPTO_MAX_ALG_NAME);
552
553	/* Instance fields */
554
555	err = -ENAMETOOLONG;
556	if (snprintf(base->cra_name, CRYPTO_MAX_ALG_NAME,
557		     "essiv(%s,%s)", block_base->cra_name,
558		     hash_alg->base.cra_name) >= CRYPTO_MAX_ALG_NAME)
559		goto out_free_hash;
560	if (snprintf(base->cra_driver_name, CRYPTO_MAX_ALG_NAME,
561		     "essiv(%s,%s)", block_base->cra_driver_name,
562		     hash_alg->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
563		goto out_free_hash;
564
565	/*
566	 * hash_alg wasn't gotten via crypto_grab*(), so we need to inherit its
567	 * flags manually.
568	 */
569	base->cra_flags        |= (hash_alg->base.cra_flags &
570				   CRYPTO_ALG_INHERITED_FLAGS);
571	base->cra_blocksize	= block_base->cra_blocksize;
572	base->cra_ctxsize	= sizeof(struct essiv_tfm_ctx);
573	base->cra_alignmask	= block_base->cra_alignmask;
574	base->cra_priority	= block_base->cra_priority;
575
576	if (type == CRYPTO_ALG_TYPE_SKCIPHER) {
577		skcipher_inst->alg.setkey	= essiv_skcipher_setkey;
578		skcipher_inst->alg.encrypt	= essiv_skcipher_encrypt;
579		skcipher_inst->alg.decrypt	= essiv_skcipher_decrypt;
580		skcipher_inst->alg.init		= essiv_skcipher_init_tfm;
581		skcipher_inst->alg.exit		= essiv_skcipher_exit_tfm;
582
583		skcipher_inst->alg.min_keysize	= crypto_skcipher_alg_min_keysize(skcipher_alg);
584		skcipher_inst->alg.max_keysize	= crypto_skcipher_alg_max_keysize(skcipher_alg);
585		skcipher_inst->alg.ivsize	= ivsize;
586		skcipher_inst->alg.chunksize	= crypto_skcipher_alg_chunksize(skcipher_alg);
587		skcipher_inst->alg.walksize	= crypto_skcipher_alg_walksize(skcipher_alg);
588
589		skcipher_inst->free		= essiv_skcipher_free_instance;
590
591		err = skcipher_register_instance(tmpl, skcipher_inst);
592	} else {
593		aead_inst->alg.setkey		= essiv_aead_setkey;
594		aead_inst->alg.setauthsize	= essiv_aead_setauthsize;
595		aead_inst->alg.encrypt		= essiv_aead_encrypt;
596		aead_inst->alg.decrypt		= essiv_aead_decrypt;
597		aead_inst->alg.init		= essiv_aead_init_tfm;
598		aead_inst->alg.exit		= essiv_aead_exit_tfm;
599
600		aead_inst->alg.ivsize		= ivsize;
601		aead_inst->alg.maxauthsize	= crypto_aead_alg_maxauthsize(aead_alg);
602		aead_inst->alg.chunksize	= crypto_aead_alg_chunksize(aead_alg);
603
604		aead_inst->free			= essiv_aead_free_instance;
605
606		err = aead_register_instance(tmpl, aead_inst);
607	}
608
609	if (err)
610		goto out_free_hash;
611
612	crypto_mod_put(_hash_alg);
613	return 0;
614
615out_free_hash:
616	crypto_mod_put(_hash_alg);
617out_drop_skcipher:
618	if (type == CRYPTO_ALG_TYPE_SKCIPHER)
619		crypto_drop_skcipher(&ictx->u.skcipher_spawn);
620	else
621		crypto_drop_aead(&ictx->u.aead_spawn);
622out_free_inst:
623	kfree(skcipher_inst);
624	kfree(aead_inst);
625	return err;
626}
627
628/* essiv(cipher_name, shash_name) */
629static struct crypto_template essiv_tmpl = {
630	.name	= "essiv",
631	.create	= essiv_create,
632	.module	= THIS_MODULE,
633};
634
635static int __init essiv_module_init(void)
636{
637	return crypto_register_template(&essiv_tmpl);
638}
639
640static void __exit essiv_module_exit(void)
641{
642	crypto_unregister_template(&essiv_tmpl);
643}
644
645subsys_initcall(essiv_module_init);
646module_exit(essiv_module_exit);
647
648MODULE_DESCRIPTION("ESSIV skcipher/aead wrapper for block encryption");
649MODULE_LICENSE("GPL v2");
650MODULE_ALIAS_CRYPTO("essiv");
651