1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Cryptographic API for algorithms (i.e., low-level API).
4  *
5  * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
6  */
7 
8 #include <crypto/algapi.h>
9 #include <linux/err.h>
10 #include <linux/errno.h>
11 #include <linux/fips.h>
12 #include <linux/init.h>
13 #include <linux/kernel.h>
14 #include <linux/list.h>
15 #include <linux/module.h>
16 #include <linux/rtnetlink.h>
17 #include <linux/slab.h>
18 #include <linux/string.h>
19 #include <linux/workqueue.h>
20 
21 #include "internal.h"
22 
23 static LIST_HEAD(crypto_template_list);
24 
crypto_check_module_sig(struct module *mod)25 static inline void crypto_check_module_sig(struct module *mod)
26 {
27 	if (fips_enabled && mod && !module_sig_ok(mod))
28 		panic("Module %s signature verification failed in FIPS mode\n",
29 		      module_name(mod));
30 }
31 
crypto_check_alg(struct crypto_alg *alg)32 static int crypto_check_alg(struct crypto_alg *alg)
33 {
34 	crypto_check_module_sig(alg->cra_module);
35 
36 	if (!alg->cra_name[0] || !alg->cra_driver_name[0])
37 		return -EINVAL;
38 
39 	if (alg->cra_alignmask & (alg->cra_alignmask + 1))
40 		return -EINVAL;
41 
42 	/* General maximums for all algs. */
43 	if (alg->cra_alignmask > MAX_ALGAPI_ALIGNMASK)
44 		return -EINVAL;
45 
46 	if (alg->cra_blocksize > MAX_ALGAPI_BLOCKSIZE)
47 		return -EINVAL;
48 
49 	/* Lower maximums for specific alg types. */
50 	if (!alg->cra_type && (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
51 			       CRYPTO_ALG_TYPE_CIPHER) {
52 		if (alg->cra_alignmask > MAX_CIPHER_ALIGNMASK)
53 			return -EINVAL;
54 
55 		if (alg->cra_blocksize > MAX_CIPHER_BLOCKSIZE)
56 			return -EINVAL;
57 	}
58 
59 	if (alg->cra_priority < 0)
60 		return -EINVAL;
61 
62 	refcount_set(&alg->cra_refcnt, 1);
63 
64 	return 0;
65 }
66 
crypto_free_instance(struct crypto_instance *inst)67 static void crypto_free_instance(struct crypto_instance *inst)
68 {
69 	inst->alg.cra_type->free(inst);
70 }
71 
crypto_destroy_instance_workfn(struct work_struct *w)72 static void crypto_destroy_instance_workfn(struct work_struct *w)
73 {
74 	struct crypto_instance *inst = container_of(w, struct crypto_instance,
75 						    free_work);
76 	struct crypto_template *tmpl = inst->tmpl;
77 
78 	crypto_free_instance(inst);
79 	crypto_tmpl_put(tmpl);
80 }
81 
crypto_destroy_instance(struct crypto_alg *alg)82 static void crypto_destroy_instance(struct crypto_alg *alg)
83 {
84 	struct crypto_instance *inst = container_of(alg,
85 						    struct crypto_instance,
86 						    alg);
87 
88 	INIT_WORK(&inst->free_work, crypto_destroy_instance_workfn);
89 	schedule_work(&inst->free_work);
90 }
91 
92 /*
93  * This function adds a spawn to the list secondary_spawns which
94  * will be used at the end of crypto_remove_spawns to unregister
95  * instances, unless the spawn happens to be one that is depended
96  * on by the new algorithm (nalg in crypto_remove_spawns).
97  *
98  * This function is also responsible for resurrecting any algorithms
99  * in the dependency chain of nalg by unsetting n->dead.
100  */
crypto_more_spawns(struct crypto_alg *alg, struct list_head *stack, struct list_head *top, struct list_head *secondary_spawns)101 static struct list_head *crypto_more_spawns(struct crypto_alg *alg,
102 					    struct list_head *stack,
103 					    struct list_head *top,
104 					    struct list_head *secondary_spawns)
105 {
106 	struct crypto_spawn *spawn, *n;
107 
108 	spawn = list_first_entry_or_null(stack, struct crypto_spawn, list);
109 	if (!spawn)
110 		return NULL;
111 
112 	n = list_prev_entry(spawn, list);
113 	list_move(&spawn->list, secondary_spawns);
114 
115 	if (list_is_last(&n->list, stack))
116 		return top;
117 
118 	n = list_next_entry(n, list);
119 	if (!spawn->dead)
120 		n->dead = false;
121 
122 	return &n->inst->alg.cra_users;
123 }
124 
crypto_remove_instance(struct crypto_instance *inst, struct list_head *list)125 static void crypto_remove_instance(struct crypto_instance *inst,
126 				   struct list_head *list)
127 {
128 	struct crypto_template *tmpl = inst->tmpl;
129 
130 	if (crypto_is_dead(&inst->alg))
131 		return;
132 
133 	inst->alg.cra_flags |= CRYPTO_ALG_DEAD;
134 
135 	if (!tmpl || !crypto_tmpl_get(tmpl))
136 		return;
137 
138 	list_move(&inst->alg.cra_list, list);
139 	hlist_del(&inst->list);
140 	inst->alg.cra_destroy = crypto_destroy_instance;
141 
142 	BUG_ON(!list_empty(&inst->alg.cra_users));
143 }
144 
145 /*
146  * Given an algorithm alg, remove all algorithms that depend on it
147  * through spawns.  If nalg is not null, then exempt any algorithms
148  * that is depended on by nalg.  This is useful when nalg itself
149  * depends on alg.
150  */
crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list, struct crypto_alg *nalg)151 void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list,
152 			  struct crypto_alg *nalg)
153 {
154 	u32 new_type = (nalg ?: alg)->cra_flags;
155 	struct crypto_spawn *spawn, *n;
156 	LIST_HEAD(secondary_spawns);
157 	struct list_head *spawns;
158 	LIST_HEAD(stack);
159 	LIST_HEAD(top);
160 
161 	spawns = &alg->cra_users;
162 	list_for_each_entry_safe(spawn, n, spawns, list) {
163 		if ((spawn->alg->cra_flags ^ new_type) & spawn->mask)
164 			continue;
165 
166 		list_move(&spawn->list, &top);
167 	}
168 
169 	/*
170 	 * Perform a depth-first walk starting from alg through
171 	 * the cra_users tree.  The list stack records the path
172 	 * from alg to the current spawn.
173 	 */
174 	spawns = &top;
175 	do {
176 		while (!list_empty(spawns)) {
177 			struct crypto_instance *inst;
178 
179 			spawn = list_first_entry(spawns, struct crypto_spawn,
180 						 list);
181 			inst = spawn->inst;
182 
183 			list_move(&spawn->list, &stack);
184 			spawn->dead = !spawn->registered || &inst->alg != nalg;
185 
186 			if (!spawn->registered)
187 				break;
188 
189 			BUG_ON(&inst->alg == alg);
190 
191 			if (&inst->alg == nalg)
192 				break;
193 
194 			spawns = &inst->alg.cra_users;
195 
196 			/*
197 			 * Even if spawn->registered is true, the
198 			 * instance itself may still be unregistered.
199 			 * This is because it may have failed during
200 			 * registration.  Therefore we still need to
201 			 * make the following test.
202 			 *
203 			 * We may encounter an unregistered instance here, since
204 			 * an instance's spawns are set up prior to the instance
205 			 * being registered.  An unregistered instance will have
206 			 * NULL ->cra_users.next, since ->cra_users isn't
207 			 * properly initialized until registration.  But an
208 			 * unregistered instance cannot have any users, so treat
209 			 * it the same as ->cra_users being empty.
210 			 */
211 			if (spawns->next == NULL)
212 				break;
213 		}
214 	} while ((spawns = crypto_more_spawns(alg, &stack, &top,
215 					      &secondary_spawns)));
216 
217 	/*
218 	 * Remove all instances that are marked as dead.  Also
219 	 * complete the resurrection of the others by moving them
220 	 * back to the cra_users list.
221 	 */
222 	list_for_each_entry_safe(spawn, n, &secondary_spawns, list) {
223 		if (!spawn->dead)
224 			list_move(&spawn->list, &spawn->alg->cra_users);
225 		else if (spawn->registered)
226 			crypto_remove_instance(spawn->inst, list);
227 	}
228 }
229 EXPORT_SYMBOL_GPL(crypto_remove_spawns);
230 
__crypto_register_alg(struct crypto_alg *alg)231 static struct crypto_larval *__crypto_register_alg(struct crypto_alg *alg)
232 {
233 	struct crypto_alg *q;
234 	struct crypto_larval *larval;
235 	int ret = -EAGAIN;
236 
237 	if (crypto_is_dead(alg))
238 		goto err;
239 
240 	INIT_LIST_HEAD(&alg->cra_users);
241 
242 	/* No cheating! */
243 	alg->cra_flags &= ~CRYPTO_ALG_TESTED;
244 
245 	ret = -EEXIST;
246 
247 	list_for_each_entry(q, &crypto_alg_list, cra_list) {
248 		if (q == alg)
249 			goto err;
250 
251 		if (crypto_is_moribund(q))
252 			continue;
253 
254 		if (crypto_is_larval(q)) {
255 			if (!strcmp(alg->cra_driver_name, q->cra_driver_name))
256 				goto err;
257 			continue;
258 		}
259 
260 		if (!strcmp(q->cra_driver_name, alg->cra_name) ||
261 		    !strcmp(q->cra_driver_name, alg->cra_driver_name) ||
262 		    !strcmp(q->cra_name, alg->cra_driver_name))
263 			goto err;
264 	}
265 
266 	larval = crypto_larval_alloc(alg->cra_name,
267 				     alg->cra_flags | CRYPTO_ALG_TESTED, 0);
268 	if (IS_ERR(larval))
269 		goto out;
270 
271 	ret = -ENOENT;
272 	larval->adult = crypto_mod_get(alg);
273 	if (!larval->adult)
274 		goto free_larval;
275 
276 	refcount_set(&larval->alg.cra_refcnt, 1);
277 	memcpy(larval->alg.cra_driver_name, alg->cra_driver_name,
278 	       CRYPTO_MAX_ALG_NAME);
279 	larval->alg.cra_priority = alg->cra_priority;
280 
281 	list_add(&alg->cra_list, &crypto_alg_list);
282 	list_add(&larval->alg.cra_list, &crypto_alg_list);
283 
284 	crypto_stats_init(alg);
285 
286 out:
287 	return larval;
288 
289 free_larval:
290 	kfree(larval);
291 err:
292 	larval = ERR_PTR(ret);
293 	goto out;
294 }
295 
crypto_alg_tested(const char *name, int err)296 void crypto_alg_tested(const char *name, int err)
297 {
298 	struct crypto_larval *test;
299 	struct crypto_alg *alg;
300 	struct crypto_alg *q;
301 	LIST_HEAD(list);
302 	bool best;
303 
304 	down_write(&crypto_alg_sem);
305 	list_for_each_entry(q, &crypto_alg_list, cra_list) {
306 		if (crypto_is_moribund(q) || !crypto_is_larval(q))
307 			continue;
308 
309 		test = (struct crypto_larval *)q;
310 
311 		if (!strcmp(q->cra_driver_name, name))
312 			goto found;
313 	}
314 
315 	pr_err("alg: Unexpected test result for %s: %d\n", name, err);
316 	goto unlock;
317 
318 found:
319 	q->cra_flags |= CRYPTO_ALG_DEAD;
320 	alg = test->adult;
321 	if (err || list_empty(&alg->cra_list))
322 		goto complete;
323 
324 	alg->cra_flags |= CRYPTO_ALG_TESTED;
325 
326 	/* Only satisfy larval waiters if we are the best. */
327 	best = true;
328 	list_for_each_entry(q, &crypto_alg_list, cra_list) {
329 		if (crypto_is_moribund(q) || !crypto_is_larval(q))
330 			continue;
331 
332 		if (strcmp(alg->cra_name, q->cra_name))
333 			continue;
334 
335 		if (q->cra_priority > alg->cra_priority) {
336 			best = false;
337 			break;
338 		}
339 	}
340 
341 	list_for_each_entry(q, &crypto_alg_list, cra_list) {
342 		if (q == alg)
343 			continue;
344 
345 		if (crypto_is_moribund(q))
346 			continue;
347 
348 		if (crypto_is_larval(q)) {
349 			struct crypto_larval *larval = (void *)q;
350 
351 			/*
352 			 * Check to see if either our generic name or
353 			 * specific name can satisfy the name requested
354 			 * by the larval entry q.
355 			 */
356 			if (strcmp(alg->cra_name, q->cra_name) &&
357 			    strcmp(alg->cra_driver_name, q->cra_name))
358 				continue;
359 
360 			if (larval->adult)
361 				continue;
362 			if ((q->cra_flags ^ alg->cra_flags) & larval->mask)
363 				continue;
364 
365 			if (best && crypto_mod_get(alg))
366 				larval->adult = alg;
367 			else
368 				larval->adult = ERR_PTR(-EAGAIN);
369 
370 			continue;
371 		}
372 
373 		if (strcmp(alg->cra_name, q->cra_name))
374 			continue;
375 
376 		if (strcmp(alg->cra_driver_name, q->cra_driver_name) &&
377 		    q->cra_priority > alg->cra_priority)
378 			continue;
379 
380 		crypto_remove_spawns(q, &list, alg);
381 	}
382 
383 complete:
384 	complete_all(&test->completion);
385 
386 unlock:
387 	up_write(&crypto_alg_sem);
388 
389 	crypto_remove_final(&list);
390 }
391 EXPORT_SYMBOL_GPL(crypto_alg_tested);
392 
crypto_remove_final(struct list_head *list)393 void crypto_remove_final(struct list_head *list)
394 {
395 	struct crypto_alg *alg;
396 	struct crypto_alg *n;
397 
398 	list_for_each_entry_safe(alg, n, list, cra_list) {
399 		list_del_init(&alg->cra_list);
400 		crypto_alg_put(alg);
401 	}
402 }
403 EXPORT_SYMBOL_GPL(crypto_remove_final);
404 
crypto_wait_for_test(struct crypto_larval *larval)405 static void crypto_wait_for_test(struct crypto_larval *larval)
406 {
407 	int err;
408 
409 	err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult);
410 	if (err != NOTIFY_STOP) {
411 		if (WARN_ON(err != NOTIFY_DONE))
412 			goto out;
413 		crypto_alg_tested(larval->alg.cra_driver_name, 0);
414 	}
415 
416 	err = wait_for_completion_killable(&larval->completion);
417 	WARN_ON(err);
418 	if (!err)
419 		crypto_notify(CRYPTO_MSG_ALG_LOADED, larval);
420 
421 out:
422 	crypto_larval_kill(&larval->alg);
423 }
424 
crypto_register_alg(struct crypto_alg *alg)425 int crypto_register_alg(struct crypto_alg *alg)
426 {
427 	struct crypto_larval *larval;
428 	int err;
429 
430 	alg->cra_flags &= ~CRYPTO_ALG_DEAD;
431 	err = crypto_check_alg(alg);
432 	if (err)
433 		return err;
434 
435 	down_write(&crypto_alg_sem);
436 	larval = __crypto_register_alg(alg);
437 	up_write(&crypto_alg_sem);
438 
439 	if (IS_ERR(larval))
440 		return PTR_ERR(larval);
441 
442 	crypto_wait_for_test(larval);
443 	return 0;
444 }
445 EXPORT_SYMBOL_GPL(crypto_register_alg);
446 
crypto_remove_alg(struct crypto_alg *alg, struct list_head *list)447 static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list)
448 {
449 	if (unlikely(list_empty(&alg->cra_list)))
450 		return -ENOENT;
451 
452 	alg->cra_flags |= CRYPTO_ALG_DEAD;
453 
454 	list_del_init(&alg->cra_list);
455 	crypto_remove_spawns(alg, list, NULL);
456 
457 	return 0;
458 }
459 
crypto_unregister_alg(struct crypto_alg *alg)460 void crypto_unregister_alg(struct crypto_alg *alg)
461 {
462 	int ret;
463 	LIST_HEAD(list);
464 
465 	down_write(&crypto_alg_sem);
466 	ret = crypto_remove_alg(alg, &list);
467 	up_write(&crypto_alg_sem);
468 
469 	if (WARN(ret, "Algorithm %s is not registered", alg->cra_driver_name))
470 		return;
471 
472 	if (WARN_ON(refcount_read(&alg->cra_refcnt) != 1))
473 		return;
474 
475 	if (alg->cra_destroy)
476 		alg->cra_destroy(alg);
477 
478 	crypto_remove_final(&list);
479 }
480 EXPORT_SYMBOL_GPL(crypto_unregister_alg);
481 
crypto_register_algs(struct crypto_alg *algs, int count)482 int crypto_register_algs(struct crypto_alg *algs, int count)
483 {
484 	int i, ret;
485 
486 	for (i = 0; i < count; i++) {
487 		ret = crypto_register_alg(&algs[i]);
488 		if (ret)
489 			goto err;
490 	}
491 
492 	return 0;
493 
494 err:
495 	for (--i; i >= 0; --i)
496 		crypto_unregister_alg(&algs[i]);
497 
498 	return ret;
499 }
500 EXPORT_SYMBOL_GPL(crypto_register_algs);
501 
crypto_unregister_algs(struct crypto_alg *algs, int count)502 void crypto_unregister_algs(struct crypto_alg *algs, int count)
503 {
504 	int i;
505 
506 	for (i = 0; i < count; i++)
507 		crypto_unregister_alg(&algs[i]);
508 }
509 EXPORT_SYMBOL_GPL(crypto_unregister_algs);
510 
crypto_register_template(struct crypto_template *tmpl)511 int crypto_register_template(struct crypto_template *tmpl)
512 {
513 	struct crypto_template *q;
514 	int err = -EEXIST;
515 
516 	down_write(&crypto_alg_sem);
517 
518 	crypto_check_module_sig(tmpl->module);
519 
520 	list_for_each_entry(q, &crypto_template_list, list) {
521 		if (q == tmpl)
522 			goto out;
523 	}
524 
525 	list_add(&tmpl->list, &crypto_template_list);
526 	err = 0;
527 out:
528 	up_write(&crypto_alg_sem);
529 	return err;
530 }
531 EXPORT_SYMBOL_GPL(crypto_register_template);
532 
crypto_register_templates(struct crypto_template *tmpls, int count)533 int crypto_register_templates(struct crypto_template *tmpls, int count)
534 {
535 	int i, err;
536 
537 	for (i = 0; i < count; i++) {
538 		err = crypto_register_template(&tmpls[i]);
539 		if (err)
540 			goto out;
541 	}
542 	return 0;
543 
544 out:
545 	for (--i; i >= 0; --i)
546 		crypto_unregister_template(&tmpls[i]);
547 	return err;
548 }
549 EXPORT_SYMBOL_GPL(crypto_register_templates);
550 
crypto_unregister_template(struct crypto_template *tmpl)551 void crypto_unregister_template(struct crypto_template *tmpl)
552 {
553 	struct crypto_instance *inst;
554 	struct hlist_node *n;
555 	struct hlist_head *list;
556 	LIST_HEAD(users);
557 
558 	down_write(&crypto_alg_sem);
559 
560 	BUG_ON(list_empty(&tmpl->list));
561 	list_del_init(&tmpl->list);
562 
563 	list = &tmpl->instances;
564 	hlist_for_each_entry(inst, list, list) {
565 		int err = crypto_remove_alg(&inst->alg, &users);
566 
567 		BUG_ON(err);
568 	}
569 
570 	up_write(&crypto_alg_sem);
571 
572 	hlist_for_each_entry_safe(inst, n, list, list) {
573 		BUG_ON(refcount_read(&inst->alg.cra_refcnt) != 1);
574 		crypto_free_instance(inst);
575 	}
576 	crypto_remove_final(&users);
577 }
578 EXPORT_SYMBOL_GPL(crypto_unregister_template);
579 
crypto_unregister_templates(struct crypto_template *tmpls, int count)580 void crypto_unregister_templates(struct crypto_template *tmpls, int count)
581 {
582 	int i;
583 
584 	for (i = count - 1; i >= 0; --i)
585 		crypto_unregister_template(&tmpls[i]);
586 }
587 EXPORT_SYMBOL_GPL(crypto_unregister_templates);
588 
__crypto_lookup_template(const char *name)589 static struct crypto_template *__crypto_lookup_template(const char *name)
590 {
591 	struct crypto_template *q, *tmpl = NULL;
592 
593 	down_read(&crypto_alg_sem);
594 	list_for_each_entry(q, &crypto_template_list, list) {
595 		if (strcmp(q->name, name))
596 			continue;
597 		if (unlikely(!crypto_tmpl_get(q)))
598 			continue;
599 
600 		tmpl = q;
601 		break;
602 	}
603 	up_read(&crypto_alg_sem);
604 
605 	return tmpl;
606 }
607 
crypto_lookup_template(const char *name)608 struct crypto_template *crypto_lookup_template(const char *name)
609 {
610 	return try_then_request_module(__crypto_lookup_template(name),
611 				       "crypto-%s", name);
612 }
613 EXPORT_SYMBOL_GPL(crypto_lookup_template);
614 
crypto_register_instance(struct crypto_template *tmpl, struct crypto_instance *inst)615 int crypto_register_instance(struct crypto_template *tmpl,
616 			     struct crypto_instance *inst)
617 {
618 	struct crypto_larval *larval;
619 	struct crypto_spawn *spawn;
620 	int err;
621 
622 	err = crypto_check_alg(&inst->alg);
623 	if (err)
624 		return err;
625 
626 	inst->alg.cra_module = tmpl->module;
627 	inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE;
628 
629 	down_write(&crypto_alg_sem);
630 
631 	larval = ERR_PTR(-EAGAIN);
632 	for (spawn = inst->spawns; spawn;) {
633 		struct crypto_spawn *next;
634 
635 		if (spawn->dead)
636 			goto unlock;
637 
638 		next = spawn->next;
639 		spawn->inst = inst;
640 		spawn->registered = true;
641 
642 		crypto_mod_put(spawn->alg);
643 
644 		spawn = next;
645 	}
646 
647 	larval = __crypto_register_alg(&inst->alg);
648 	if (IS_ERR(larval))
649 		goto unlock;
650 
651 	hlist_add_head(&inst->list, &tmpl->instances);
652 	inst->tmpl = tmpl;
653 
654 unlock:
655 	up_write(&crypto_alg_sem);
656 
657 	err = PTR_ERR(larval);
658 	if (IS_ERR(larval))
659 		goto err;
660 
661 	crypto_wait_for_test(larval);
662 	err = 0;
663 
664 err:
665 	return err;
666 }
667 EXPORT_SYMBOL_GPL(crypto_register_instance);
668 
crypto_unregister_instance(struct crypto_instance *inst)669 void crypto_unregister_instance(struct crypto_instance *inst)
670 {
671 	LIST_HEAD(list);
672 
673 	down_write(&crypto_alg_sem);
674 
675 	crypto_remove_spawns(&inst->alg, &list, NULL);
676 	crypto_remove_instance(inst, &list);
677 
678 	up_write(&crypto_alg_sem);
679 
680 	crypto_remove_final(&list);
681 }
682 EXPORT_SYMBOL_GPL(crypto_unregister_instance);
683 
crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst, const char *name, u32 type, u32 mask)684 int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst,
685 		      const char *name, u32 type, u32 mask)
686 {
687 	struct crypto_alg *alg;
688 	int err = -EAGAIN;
689 
690 	if (WARN_ON_ONCE(inst == NULL))
691 		return -EINVAL;
692 
693 	/* Allow the result of crypto_attr_alg_name() to be passed directly */
694 	if (IS_ERR(name))
695 		return PTR_ERR(name);
696 
697 	alg = crypto_find_alg(name, spawn->frontend, type, mask);
698 	if (IS_ERR(alg))
699 		return PTR_ERR(alg);
700 
701 	down_write(&crypto_alg_sem);
702 	if (!crypto_is_moribund(alg)) {
703 		list_add(&spawn->list, &alg->cra_users);
704 		spawn->alg = alg;
705 		spawn->mask = mask;
706 		spawn->next = inst->spawns;
707 		inst->spawns = spawn;
708 		inst->alg.cra_flags |=
709 			(alg->cra_flags & CRYPTO_ALG_INHERITED_FLAGS);
710 		err = 0;
711 	}
712 	up_write(&crypto_alg_sem);
713 	if (err)
714 		crypto_mod_put(alg);
715 	return err;
716 }
717 EXPORT_SYMBOL_GPL(crypto_grab_spawn);
718 
crypto_drop_spawn(struct crypto_spawn *spawn)719 void crypto_drop_spawn(struct crypto_spawn *spawn)
720 {
721 	if (!spawn->alg) /* not yet initialized? */
722 		return;
723 
724 	down_write(&crypto_alg_sem);
725 	if (!spawn->dead)
726 		list_del(&spawn->list);
727 	up_write(&crypto_alg_sem);
728 
729 	if (!spawn->registered)
730 		crypto_mod_put(spawn->alg);
731 }
732 EXPORT_SYMBOL_GPL(crypto_drop_spawn);
733 
crypto_spawn_alg(struct crypto_spawn *spawn)734 static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn)
735 {
736 	struct crypto_alg *alg = ERR_PTR(-EAGAIN);
737 	struct crypto_alg *target;
738 	bool shoot = false;
739 
740 	down_read(&crypto_alg_sem);
741 	if (!spawn->dead) {
742 		alg = spawn->alg;
743 		if (!crypto_mod_get(alg)) {
744 			target = crypto_alg_get(alg);
745 			shoot = true;
746 			alg = ERR_PTR(-EAGAIN);
747 		}
748 	}
749 	up_read(&crypto_alg_sem);
750 
751 	if (shoot) {
752 		crypto_shoot_alg(target);
753 		crypto_alg_put(target);
754 	}
755 
756 	return alg;
757 }
758 
crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type, u32 mask)759 struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
760 				    u32 mask)
761 {
762 	struct crypto_alg *alg;
763 	struct crypto_tfm *tfm;
764 
765 	alg = crypto_spawn_alg(spawn);
766 	if (IS_ERR(alg))
767 		return ERR_CAST(alg);
768 
769 	tfm = ERR_PTR(-EINVAL);
770 	if (unlikely((alg->cra_flags ^ type) & mask))
771 		goto out_put_alg;
772 
773 	tfm = __crypto_alloc_tfm(alg, type, mask);
774 	if (IS_ERR(tfm))
775 		goto out_put_alg;
776 
777 	return tfm;
778 
779 out_put_alg:
780 	crypto_mod_put(alg);
781 	return tfm;
782 }
783 EXPORT_SYMBOL_GPL(crypto_spawn_tfm);
784 
crypto_spawn_tfm2(struct crypto_spawn *spawn)785 void *crypto_spawn_tfm2(struct crypto_spawn *spawn)
786 {
787 	struct crypto_alg *alg;
788 	struct crypto_tfm *tfm;
789 
790 	alg = crypto_spawn_alg(spawn);
791 	if (IS_ERR(alg))
792 		return ERR_CAST(alg);
793 
794 	tfm = crypto_create_tfm(alg, spawn->frontend);
795 	if (IS_ERR(tfm))
796 		goto out_put_alg;
797 
798 	return tfm;
799 
800 out_put_alg:
801 	crypto_mod_put(alg);
802 	return tfm;
803 }
804 EXPORT_SYMBOL_GPL(crypto_spawn_tfm2);
805 
crypto_register_notifier(struct notifier_block *nb)806 int crypto_register_notifier(struct notifier_block *nb)
807 {
808 	return blocking_notifier_chain_register(&crypto_chain, nb);
809 }
810 EXPORT_SYMBOL_GPL(crypto_register_notifier);
811 
crypto_unregister_notifier(struct notifier_block *nb)812 int crypto_unregister_notifier(struct notifier_block *nb)
813 {
814 	return blocking_notifier_chain_unregister(&crypto_chain, nb);
815 }
816 EXPORT_SYMBOL_GPL(crypto_unregister_notifier);
817 
crypto_get_attr_type(struct rtattr **tb)818 struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb)
819 {
820 	struct rtattr *rta = tb[0];
821 	struct crypto_attr_type *algt;
822 
823 	if (!rta)
824 		return ERR_PTR(-ENOENT);
825 	if (RTA_PAYLOAD(rta) < sizeof(*algt))
826 		return ERR_PTR(-EINVAL);
827 	if (rta->rta_type != CRYPTOA_TYPE)
828 		return ERR_PTR(-EINVAL);
829 
830 	algt = RTA_DATA(rta);
831 
832 	return algt;
833 }
834 EXPORT_SYMBOL_GPL(crypto_get_attr_type);
835 
836 /**
837  * crypto_check_attr_type() - check algorithm type and compute inherited mask
838  * @tb: the template parameters
839  * @type: the algorithm type the template would be instantiated as
840  * @mask_ret: (output) the mask that should be passed to crypto_grab_*()
841  *	      to restrict the flags of any inner algorithms
842  *
843  * Validate that the algorithm type the user requested is compatible with the
844  * one the template would actually be instantiated as.  E.g., if the user is
845  * doing crypto_alloc_shash("cbc(aes)", ...), this would return an error because
846  * the "cbc" template creates an "skcipher" algorithm, not an "shash" algorithm.
847  *
848  * Also compute the mask to use to restrict the flags of any inner algorithms.
849  *
850  * Return: 0 on success; -errno on failure
851  */
crypto_check_attr_type(struct rtattr **tb, u32 type, u32 *mask_ret)852 int crypto_check_attr_type(struct rtattr **tb, u32 type, u32 *mask_ret)
853 {
854 	struct crypto_attr_type *algt;
855 
856 	algt = crypto_get_attr_type(tb);
857 	if (IS_ERR(algt))
858 		return PTR_ERR(algt);
859 
860 	if ((algt->type ^ type) & algt->mask)
861 		return -EINVAL;
862 
863 	*mask_ret = crypto_algt_inherited_mask(algt);
864 	return 0;
865 }
866 EXPORT_SYMBOL_GPL(crypto_check_attr_type);
867 
crypto_attr_alg_name(struct rtattr *rta)868 const char *crypto_attr_alg_name(struct rtattr *rta)
869 {
870 	struct crypto_attr_alg *alga;
871 
872 	if (!rta)
873 		return ERR_PTR(-ENOENT);
874 	if (RTA_PAYLOAD(rta) < sizeof(*alga))
875 		return ERR_PTR(-EINVAL);
876 	if (rta->rta_type != CRYPTOA_ALG)
877 		return ERR_PTR(-EINVAL);
878 
879 	alga = RTA_DATA(rta);
880 	alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0;
881 
882 	return alga->name;
883 }
884 EXPORT_SYMBOL_GPL(crypto_attr_alg_name);
885 
crypto_attr_u32(struct rtattr *rta, u32 *num)886 int crypto_attr_u32(struct rtattr *rta, u32 *num)
887 {
888 	struct crypto_attr_u32 *nu32;
889 
890 	if (!rta)
891 		return -ENOENT;
892 	if (RTA_PAYLOAD(rta) < sizeof(*nu32))
893 		return -EINVAL;
894 	if (rta->rta_type != CRYPTOA_U32)
895 		return -EINVAL;
896 
897 	nu32 = RTA_DATA(rta);
898 	*num = nu32->num;
899 
900 	return 0;
901 }
902 EXPORT_SYMBOL_GPL(crypto_attr_u32);
903 
crypto_inst_setname(struct crypto_instance *inst, const char *name, struct crypto_alg *alg)904 int crypto_inst_setname(struct crypto_instance *inst, const char *name,
905 			struct crypto_alg *alg)
906 {
907 	if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name,
908 		     alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
909 		return -ENAMETOOLONG;
910 
911 	if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s(%s)",
912 		     name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
913 		return -ENAMETOOLONG;
914 
915 	return 0;
916 }
917 EXPORT_SYMBOL_GPL(crypto_inst_setname);
918 
crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen)919 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen)
920 {
921 	INIT_LIST_HEAD(&queue->list);
922 	queue->backlog = &queue->list;
923 	queue->qlen = 0;
924 	queue->max_qlen = max_qlen;
925 }
926 EXPORT_SYMBOL_GPL(crypto_init_queue);
927 
crypto_enqueue_request(struct crypto_queue *queue, struct crypto_async_request *request)928 int crypto_enqueue_request(struct crypto_queue *queue,
929 			   struct crypto_async_request *request)
930 {
931 	int err = -EINPROGRESS;
932 
933 	if (unlikely(queue->qlen >= queue->max_qlen)) {
934 		if (!(request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG)) {
935 			err = -ENOSPC;
936 			goto out;
937 		}
938 		err = -EBUSY;
939 		if (queue->backlog == &queue->list)
940 			queue->backlog = &request->list;
941 	}
942 
943 	queue->qlen++;
944 	list_add_tail(&request->list, &queue->list);
945 
946 out:
947 	return err;
948 }
949 EXPORT_SYMBOL_GPL(crypto_enqueue_request);
950 
crypto_enqueue_request_head(struct crypto_queue *queue, struct crypto_async_request *request)951 void crypto_enqueue_request_head(struct crypto_queue *queue,
952 				 struct crypto_async_request *request)
953 {
954 	queue->qlen++;
955 	list_add(&request->list, &queue->list);
956 }
957 EXPORT_SYMBOL_GPL(crypto_enqueue_request_head);
958 
crypto_dequeue_request(struct crypto_queue *queue)959 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue)
960 {
961 	struct list_head *request;
962 
963 	if (unlikely(!queue->qlen))
964 		return NULL;
965 
966 	queue->qlen--;
967 
968 	if (queue->backlog != &queue->list)
969 		queue->backlog = queue->backlog->next;
970 
971 	request = queue->list.next;
972 	list_del(request);
973 
974 	return list_entry(request, struct crypto_async_request, list);
975 }
976 EXPORT_SYMBOL_GPL(crypto_dequeue_request);
977 
crypto_inc_byte(u8 *a, unsigned int size)978 static inline void crypto_inc_byte(u8 *a, unsigned int size)
979 {
980 	u8 *b = (a + size);
981 	u8 c;
982 
983 	for (; size; size--) {
984 		c = *--b + 1;
985 		*b = c;
986 		if (c)
987 			break;
988 	}
989 }
990 
crypto_inc(u8 *a, unsigned int size)991 void crypto_inc(u8 *a, unsigned int size)
992 {
993 	__be32 *b = (__be32 *)(a + size);
994 	u32 c;
995 
996 	if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) ||
997 	    IS_ALIGNED((unsigned long)b, __alignof__(*b)))
998 		for (; size >= 4; size -= 4) {
999 			c = be32_to_cpu(*--b) + 1;
1000 			*b = cpu_to_be32(c);
1001 			if (likely(c))
1002 				return;
1003 		}
1004 
1005 	crypto_inc_byte(a, size);
1006 }
1007 EXPORT_SYMBOL_GPL(crypto_inc);
1008 
__crypto_xor(u8 *dst, const u8 *src1, const u8 *src2, unsigned int len)1009 void __crypto_xor(u8 *dst, const u8 *src1, const u8 *src2, unsigned int len)
1010 {
1011 	int relalign = 0;
1012 
1013 	if (!IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)) {
1014 		int size = sizeof(unsigned long);
1015 		int d = (((unsigned long)dst ^ (unsigned long)src1) |
1016 			 ((unsigned long)dst ^ (unsigned long)src2)) &
1017 			(size - 1);
1018 
1019 		relalign = d ? 1 << __ffs(d) : size;
1020 
1021 		/*
1022 		 * If we care about alignment, process as many bytes as
1023 		 * needed to advance dst and src to values whose alignments
1024 		 * equal their relative alignment. This will allow us to
1025 		 * process the remainder of the input using optimal strides.
1026 		 */
1027 		while (((unsigned long)dst & (relalign - 1)) && len > 0) {
1028 			*dst++ = *src1++ ^ *src2++;
1029 			len--;
1030 		}
1031 	}
1032 
1033 	while (IS_ENABLED(CONFIG_64BIT) && len >= 8 && !(relalign & 7)) {
1034 		*(u64 *)dst = *(u64 *)src1 ^  *(u64 *)src2;
1035 		dst += 8;
1036 		src1 += 8;
1037 		src2 += 8;
1038 		len -= 8;
1039 	}
1040 
1041 	while (len >= 4 && !(relalign & 3)) {
1042 		*(u32 *)dst = *(u32 *)src1 ^ *(u32 *)src2;
1043 		dst += 4;
1044 		src1 += 4;
1045 		src2 += 4;
1046 		len -= 4;
1047 	}
1048 
1049 	while (len >= 2 && !(relalign & 1)) {
1050 		*(u16 *)dst = *(u16 *)src1 ^ *(u16 *)src2;
1051 		dst += 2;
1052 		src1 += 2;
1053 		src2 += 2;
1054 		len -= 2;
1055 	}
1056 
1057 	while (len--)
1058 		*dst++ = *src1++ ^ *src2++;
1059 }
1060 EXPORT_SYMBOL_GPL(__crypto_xor);
1061 
crypto_alg_extsize(struct crypto_alg *alg)1062 unsigned int crypto_alg_extsize(struct crypto_alg *alg)
1063 {
1064 	return alg->cra_ctxsize +
1065 	       (alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1));
1066 }
1067 EXPORT_SYMBOL_GPL(crypto_alg_extsize);
1068 
crypto_type_has_alg(const char *name, const struct crypto_type *frontend, u32 type, u32 mask)1069 int crypto_type_has_alg(const char *name, const struct crypto_type *frontend,
1070 			u32 type, u32 mask)
1071 {
1072 	int ret = 0;
1073 	struct crypto_alg *alg = crypto_find_alg(name, frontend, type, mask);
1074 
1075 	if (!IS_ERR(alg)) {
1076 		crypto_mod_put(alg);
1077 		ret = 1;
1078 	}
1079 
1080 	return ret;
1081 }
1082 EXPORT_SYMBOL_GPL(crypto_type_has_alg);
1083 
1084 #ifdef CONFIG_CRYPTO_STATS
crypto_stats_init(struct crypto_alg *alg)1085 void crypto_stats_init(struct crypto_alg *alg)
1086 {
1087 	memset(&alg->stats, 0, sizeof(alg->stats));
1088 }
1089 EXPORT_SYMBOL_GPL(crypto_stats_init);
1090 
crypto_stats_get(struct crypto_alg *alg)1091 void crypto_stats_get(struct crypto_alg *alg)
1092 {
1093 	crypto_alg_get(alg);
1094 }
1095 EXPORT_SYMBOL_GPL(crypto_stats_get);
1096 
crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret)1097 void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg,
1098 			       int ret)
1099 {
1100 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1101 		atomic64_inc(&alg->stats.aead.err_cnt);
1102 	} else {
1103 		atomic64_inc(&alg->stats.aead.encrypt_cnt);
1104 		atomic64_add(cryptlen, &alg->stats.aead.encrypt_tlen);
1105 	}
1106 	crypto_alg_put(alg);
1107 }
1108 EXPORT_SYMBOL_GPL(crypto_stats_aead_encrypt);
1109 
crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret)1110 void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg,
1111 			       int ret)
1112 {
1113 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1114 		atomic64_inc(&alg->stats.aead.err_cnt);
1115 	} else {
1116 		atomic64_inc(&alg->stats.aead.decrypt_cnt);
1117 		atomic64_add(cryptlen, &alg->stats.aead.decrypt_tlen);
1118 	}
1119 	crypto_alg_put(alg);
1120 }
1121 EXPORT_SYMBOL_GPL(crypto_stats_aead_decrypt);
1122 
crypto_stats_akcipher_encrypt(unsigned int src_len, int ret, struct crypto_alg *alg)1123 void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret,
1124 				   struct crypto_alg *alg)
1125 {
1126 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1127 		atomic64_inc(&alg->stats.akcipher.err_cnt);
1128 	} else {
1129 		atomic64_inc(&alg->stats.akcipher.encrypt_cnt);
1130 		atomic64_add(src_len, &alg->stats.akcipher.encrypt_tlen);
1131 	}
1132 	crypto_alg_put(alg);
1133 }
1134 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_encrypt);
1135 
crypto_stats_akcipher_decrypt(unsigned int src_len, int ret, struct crypto_alg *alg)1136 void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret,
1137 				   struct crypto_alg *alg)
1138 {
1139 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1140 		atomic64_inc(&alg->stats.akcipher.err_cnt);
1141 	} else {
1142 		atomic64_inc(&alg->stats.akcipher.decrypt_cnt);
1143 		atomic64_add(src_len, &alg->stats.akcipher.decrypt_tlen);
1144 	}
1145 	crypto_alg_put(alg);
1146 }
1147 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_decrypt);
1148 
crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg)1149 void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg)
1150 {
1151 	if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1152 		atomic64_inc(&alg->stats.akcipher.err_cnt);
1153 	else
1154 		atomic64_inc(&alg->stats.akcipher.sign_cnt);
1155 	crypto_alg_put(alg);
1156 }
1157 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_sign);
1158 
crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg)1159 void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg)
1160 {
1161 	if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1162 		atomic64_inc(&alg->stats.akcipher.err_cnt);
1163 	else
1164 		atomic64_inc(&alg->stats.akcipher.verify_cnt);
1165 	crypto_alg_put(alg);
1166 }
1167 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_verify);
1168 
crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg)1169 void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg)
1170 {
1171 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1172 		atomic64_inc(&alg->stats.compress.err_cnt);
1173 	} else {
1174 		atomic64_inc(&alg->stats.compress.compress_cnt);
1175 		atomic64_add(slen, &alg->stats.compress.compress_tlen);
1176 	}
1177 	crypto_alg_put(alg);
1178 }
1179 EXPORT_SYMBOL_GPL(crypto_stats_compress);
1180 
crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg)1181 void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg)
1182 {
1183 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1184 		atomic64_inc(&alg->stats.compress.err_cnt);
1185 	} else {
1186 		atomic64_inc(&alg->stats.compress.decompress_cnt);
1187 		atomic64_add(slen, &alg->stats.compress.decompress_tlen);
1188 	}
1189 	crypto_alg_put(alg);
1190 }
1191 EXPORT_SYMBOL_GPL(crypto_stats_decompress);
1192 
crypto_stats_ahash_update(unsigned int nbytes, int ret, struct crypto_alg *alg)1193 void crypto_stats_ahash_update(unsigned int nbytes, int ret,
1194 			       struct crypto_alg *alg)
1195 {
1196 	if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1197 		atomic64_inc(&alg->stats.hash.err_cnt);
1198 	else
1199 		atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
1200 	crypto_alg_put(alg);
1201 }
1202 EXPORT_SYMBOL_GPL(crypto_stats_ahash_update);
1203 
crypto_stats_ahash_final(unsigned int nbytes, int ret, struct crypto_alg *alg)1204 void crypto_stats_ahash_final(unsigned int nbytes, int ret,
1205 			      struct crypto_alg *alg)
1206 {
1207 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1208 		atomic64_inc(&alg->stats.hash.err_cnt);
1209 	} else {
1210 		atomic64_inc(&alg->stats.hash.hash_cnt);
1211 		atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
1212 	}
1213 	crypto_alg_put(alg);
1214 }
1215 EXPORT_SYMBOL_GPL(crypto_stats_ahash_final);
1216 
crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret)1217 void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret)
1218 {
1219 	if (ret)
1220 		atomic64_inc(&alg->stats.kpp.err_cnt);
1221 	else
1222 		atomic64_inc(&alg->stats.kpp.setsecret_cnt);
1223 	crypto_alg_put(alg);
1224 }
1225 EXPORT_SYMBOL_GPL(crypto_stats_kpp_set_secret);
1226 
crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret)1227 void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret)
1228 {
1229 	if (ret)
1230 		atomic64_inc(&alg->stats.kpp.err_cnt);
1231 	else
1232 		atomic64_inc(&alg->stats.kpp.generate_public_key_cnt);
1233 	crypto_alg_put(alg);
1234 }
1235 EXPORT_SYMBOL_GPL(crypto_stats_kpp_generate_public_key);
1236 
crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret)1237 void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret)
1238 {
1239 	if (ret)
1240 		atomic64_inc(&alg->stats.kpp.err_cnt);
1241 	else
1242 		atomic64_inc(&alg->stats.kpp.compute_shared_secret_cnt);
1243 	crypto_alg_put(alg);
1244 }
1245 EXPORT_SYMBOL_GPL(crypto_stats_kpp_compute_shared_secret);
1246 
crypto_stats_rng_seed(struct crypto_alg *alg, int ret)1247 void crypto_stats_rng_seed(struct crypto_alg *alg, int ret)
1248 {
1249 	if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1250 		atomic64_inc(&alg->stats.rng.err_cnt);
1251 	else
1252 		atomic64_inc(&alg->stats.rng.seed_cnt);
1253 	crypto_alg_put(alg);
1254 }
1255 EXPORT_SYMBOL_GPL(crypto_stats_rng_seed);
1256 
crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen, int ret)1257 void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen,
1258 			       int ret)
1259 {
1260 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1261 		atomic64_inc(&alg->stats.rng.err_cnt);
1262 	} else {
1263 		atomic64_inc(&alg->stats.rng.generate_cnt);
1264 		atomic64_add(dlen, &alg->stats.rng.generate_tlen);
1265 	}
1266 	crypto_alg_put(alg);
1267 }
1268 EXPORT_SYMBOL_GPL(crypto_stats_rng_generate);
1269 
crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg)1270 void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret,
1271 				   struct crypto_alg *alg)
1272 {
1273 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1274 		atomic64_inc(&alg->stats.cipher.err_cnt);
1275 	} else {
1276 		atomic64_inc(&alg->stats.cipher.encrypt_cnt);
1277 		atomic64_add(cryptlen, &alg->stats.cipher.encrypt_tlen);
1278 	}
1279 	crypto_alg_put(alg);
1280 }
1281 EXPORT_SYMBOL_GPL(crypto_stats_skcipher_encrypt);
1282 
crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg)1283 void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret,
1284 				   struct crypto_alg *alg)
1285 {
1286 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1287 		atomic64_inc(&alg->stats.cipher.err_cnt);
1288 	} else {
1289 		atomic64_inc(&alg->stats.cipher.decrypt_cnt);
1290 		atomic64_add(cryptlen, &alg->stats.cipher.decrypt_tlen);
1291 	}
1292 	crypto_alg_put(alg);
1293 }
1294 EXPORT_SYMBOL_GPL(crypto_stats_skcipher_decrypt);
1295 #endif
1296 
crypto_algapi_init(void)1297 static int __init crypto_algapi_init(void)
1298 {
1299 	crypto_init_proc();
1300 	return 0;
1301 }
1302 
crypto_algapi_exit(void)1303 static void __exit crypto_algapi_exit(void)
1304 {
1305 	crypto_exit_proc();
1306 }
1307 
1308 module_init(crypto_algapi_init);
1309 module_exit(crypto_algapi_exit);
1310 
1311 MODULE_LICENSE("GPL");
1312 MODULE_DESCRIPTION("Cryptographic algorithms API");
1313 MODULE_SOFTDEP("pre: cryptomgr");
1314