xref: /linux-6.15/crypto/algapi.c (revision 674f368a)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Cryptographic API for algorithms (i.e., low-level API).
4  *
5  * Copyright (c) 2006 Herbert Xu <[email protected]>
6  */
7 
8 #include <crypto/algapi.h>
9 #include <linux/err.h>
10 #include <linux/errno.h>
11 #include <linux/fips.h>
12 #include <linux/init.h>
13 #include <linux/kernel.h>
14 #include <linux/list.h>
15 #include <linux/module.h>
16 #include <linux/rtnetlink.h>
17 #include <linux/slab.h>
18 #include <linux/string.h>
19 
20 #include "internal.h"
21 
22 static LIST_HEAD(crypto_template_list);
23 
24 static inline void crypto_check_module_sig(struct module *mod)
25 {
26 	if (fips_enabled && mod && !module_sig_ok(mod))
27 		panic("Module %s signature verification failed in FIPS mode\n",
28 		      module_name(mod));
29 }
30 
31 static int crypto_check_alg(struct crypto_alg *alg)
32 {
33 	crypto_check_module_sig(alg->cra_module);
34 
35 	if (!alg->cra_name[0] || !alg->cra_driver_name[0])
36 		return -EINVAL;
37 
38 	if (alg->cra_alignmask & (alg->cra_alignmask + 1))
39 		return -EINVAL;
40 
41 	/* General maximums for all algs. */
42 	if (alg->cra_alignmask > MAX_ALGAPI_ALIGNMASK)
43 		return -EINVAL;
44 
45 	if (alg->cra_blocksize > MAX_ALGAPI_BLOCKSIZE)
46 		return -EINVAL;
47 
48 	/* Lower maximums for specific alg types. */
49 	if (!alg->cra_type && (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
50 			       CRYPTO_ALG_TYPE_CIPHER) {
51 		if (alg->cra_alignmask > MAX_CIPHER_ALIGNMASK)
52 			return -EINVAL;
53 
54 		if (alg->cra_blocksize > MAX_CIPHER_BLOCKSIZE)
55 			return -EINVAL;
56 	}
57 
58 	if (alg->cra_priority < 0)
59 		return -EINVAL;
60 
61 	refcount_set(&alg->cra_refcnt, 1);
62 
63 	return 0;
64 }
65 
66 static void crypto_free_instance(struct crypto_instance *inst)
67 {
68 	if (!inst->alg.cra_type->free) {
69 		inst->tmpl->free(inst);
70 		return;
71 	}
72 
73 	inst->alg.cra_type->free(inst);
74 }
75 
76 static void crypto_destroy_instance(struct crypto_alg *alg)
77 {
78 	struct crypto_instance *inst = (void *)alg;
79 	struct crypto_template *tmpl = inst->tmpl;
80 
81 	crypto_free_instance(inst);
82 	crypto_tmpl_put(tmpl);
83 }
84 
85 /*
86  * This function adds a spawn to the list secondary_spawns which
87  * will be used at the end of crypto_remove_spawns to unregister
88  * instances, unless the spawn happens to be one that is depended
89  * on by the new algorithm (nalg in crypto_remove_spawns).
90  *
91  * This function is also responsible for resurrecting any algorithms
92  * in the dependency chain of nalg by unsetting n->dead.
93  */
94 static struct list_head *crypto_more_spawns(struct crypto_alg *alg,
95 					    struct list_head *stack,
96 					    struct list_head *top,
97 					    struct list_head *secondary_spawns)
98 {
99 	struct crypto_spawn *spawn, *n;
100 
101 	spawn = list_first_entry_or_null(stack, struct crypto_spawn, list);
102 	if (!spawn)
103 		return NULL;
104 
105 	n = list_prev_entry(spawn, list);
106 	list_move(&spawn->list, secondary_spawns);
107 
108 	if (list_is_last(&n->list, stack))
109 		return top;
110 
111 	n = list_next_entry(n, list);
112 	if (!spawn->dead)
113 		n->dead = false;
114 
115 	return &n->inst->alg.cra_users;
116 }
117 
118 static void crypto_remove_instance(struct crypto_instance *inst,
119 				   struct list_head *list)
120 {
121 	struct crypto_template *tmpl = inst->tmpl;
122 
123 	if (crypto_is_dead(&inst->alg))
124 		return;
125 
126 	inst->alg.cra_flags |= CRYPTO_ALG_DEAD;
127 
128 	if (!tmpl || !crypto_tmpl_get(tmpl))
129 		return;
130 
131 	list_move(&inst->alg.cra_list, list);
132 	hlist_del(&inst->list);
133 	inst->alg.cra_destroy = crypto_destroy_instance;
134 
135 	BUG_ON(!list_empty(&inst->alg.cra_users));
136 }
137 
138 /*
139  * Given an algorithm alg, remove all algorithms that depend on it
140  * through spawns.  If nalg is not null, then exempt any algorithms
141  * that is depended on by nalg.  This is useful when nalg itself
142  * depends on alg.
143  */
144 void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list,
145 			  struct crypto_alg *nalg)
146 {
147 	u32 new_type = (nalg ?: alg)->cra_flags;
148 	struct crypto_spawn *spawn, *n;
149 	LIST_HEAD(secondary_spawns);
150 	struct list_head *spawns;
151 	LIST_HEAD(stack);
152 	LIST_HEAD(top);
153 
154 	spawns = &alg->cra_users;
155 	list_for_each_entry_safe(spawn, n, spawns, list) {
156 		if ((spawn->alg->cra_flags ^ new_type) & spawn->mask)
157 			continue;
158 
159 		list_move(&spawn->list, &top);
160 	}
161 
162 	/*
163 	 * Perform a depth-first walk starting from alg through
164 	 * the cra_users tree.  The list stack records the path
165 	 * from alg to the current spawn.
166 	 */
167 	spawns = &top;
168 	do {
169 		while (!list_empty(spawns)) {
170 			struct crypto_instance *inst;
171 
172 			spawn = list_first_entry(spawns, struct crypto_spawn,
173 						 list);
174 			inst = spawn->inst;
175 
176 			list_move(&spawn->list, &stack);
177 			spawn->dead = !spawn->registered || &inst->alg != nalg;
178 
179 			if (!spawn->registered)
180 				break;
181 
182 			BUG_ON(&inst->alg == alg);
183 
184 			if (&inst->alg == nalg)
185 				break;
186 
187 			spawns = &inst->alg.cra_users;
188 
189 			/*
190 			 * Even if spawn->registered is true, the
191 			 * instance itself may still be unregistered.
192 			 * This is because it may have failed during
193 			 * registration.  Therefore we still need to
194 			 * make the following test.
195 			 *
196 			 * We may encounter an unregistered instance here, since
197 			 * an instance's spawns are set up prior to the instance
198 			 * being registered.  An unregistered instance will have
199 			 * NULL ->cra_users.next, since ->cra_users isn't
200 			 * properly initialized until registration.  But an
201 			 * unregistered instance cannot have any users, so treat
202 			 * it the same as ->cra_users being empty.
203 			 */
204 			if (spawns->next == NULL)
205 				break;
206 		}
207 	} while ((spawns = crypto_more_spawns(alg, &stack, &top,
208 					      &secondary_spawns)));
209 
210 	/*
211 	 * Remove all instances that are marked as dead.  Also
212 	 * complete the resurrection of the others by moving them
213 	 * back to the cra_users list.
214 	 */
215 	list_for_each_entry_safe(spawn, n, &secondary_spawns, list) {
216 		if (!spawn->dead)
217 			list_move(&spawn->list, &spawn->alg->cra_users);
218 		else if (spawn->registered)
219 			crypto_remove_instance(spawn->inst, list);
220 	}
221 }
222 EXPORT_SYMBOL_GPL(crypto_remove_spawns);
223 
224 static struct crypto_larval *__crypto_register_alg(struct crypto_alg *alg)
225 {
226 	struct crypto_alg *q;
227 	struct crypto_larval *larval;
228 	int ret = -EAGAIN;
229 
230 	if (crypto_is_dead(alg))
231 		goto err;
232 
233 	INIT_LIST_HEAD(&alg->cra_users);
234 
235 	/* No cheating! */
236 	alg->cra_flags &= ~CRYPTO_ALG_TESTED;
237 
238 	ret = -EEXIST;
239 
240 	list_for_each_entry(q, &crypto_alg_list, cra_list) {
241 		if (q == alg)
242 			goto err;
243 
244 		if (crypto_is_moribund(q))
245 			continue;
246 
247 		if (crypto_is_larval(q)) {
248 			if (!strcmp(alg->cra_driver_name, q->cra_driver_name))
249 				goto err;
250 			continue;
251 		}
252 
253 		if (!strcmp(q->cra_driver_name, alg->cra_name) ||
254 		    !strcmp(q->cra_name, alg->cra_driver_name))
255 			goto err;
256 	}
257 
258 	larval = crypto_larval_alloc(alg->cra_name,
259 				     alg->cra_flags | CRYPTO_ALG_TESTED, 0);
260 	if (IS_ERR(larval))
261 		goto out;
262 
263 	ret = -ENOENT;
264 	larval->adult = crypto_mod_get(alg);
265 	if (!larval->adult)
266 		goto free_larval;
267 
268 	refcount_set(&larval->alg.cra_refcnt, 1);
269 	memcpy(larval->alg.cra_driver_name, alg->cra_driver_name,
270 	       CRYPTO_MAX_ALG_NAME);
271 	larval->alg.cra_priority = alg->cra_priority;
272 
273 	list_add(&alg->cra_list, &crypto_alg_list);
274 	list_add(&larval->alg.cra_list, &crypto_alg_list);
275 
276 	crypto_stats_init(alg);
277 
278 out:
279 	return larval;
280 
281 free_larval:
282 	kfree(larval);
283 err:
284 	larval = ERR_PTR(ret);
285 	goto out;
286 }
287 
288 void crypto_alg_tested(const char *name, int err)
289 {
290 	struct crypto_larval *test;
291 	struct crypto_alg *alg;
292 	struct crypto_alg *q;
293 	LIST_HEAD(list);
294 	bool best;
295 
296 	down_write(&crypto_alg_sem);
297 	list_for_each_entry(q, &crypto_alg_list, cra_list) {
298 		if (crypto_is_moribund(q) || !crypto_is_larval(q))
299 			continue;
300 
301 		test = (struct crypto_larval *)q;
302 
303 		if (!strcmp(q->cra_driver_name, name))
304 			goto found;
305 	}
306 
307 	pr_err("alg: Unexpected test result for %s: %d\n", name, err);
308 	goto unlock;
309 
310 found:
311 	q->cra_flags |= CRYPTO_ALG_DEAD;
312 	alg = test->adult;
313 	if (err || list_empty(&alg->cra_list))
314 		goto complete;
315 
316 	alg->cra_flags |= CRYPTO_ALG_TESTED;
317 
318 	/* Only satisfy larval waiters if we are the best. */
319 	best = true;
320 	list_for_each_entry(q, &crypto_alg_list, cra_list) {
321 		if (crypto_is_moribund(q) || !crypto_is_larval(q))
322 			continue;
323 
324 		if (strcmp(alg->cra_name, q->cra_name))
325 			continue;
326 
327 		if (q->cra_priority > alg->cra_priority) {
328 			best = false;
329 			break;
330 		}
331 	}
332 
333 	list_for_each_entry(q, &crypto_alg_list, cra_list) {
334 		if (q == alg)
335 			continue;
336 
337 		if (crypto_is_moribund(q))
338 			continue;
339 
340 		if (crypto_is_larval(q)) {
341 			struct crypto_larval *larval = (void *)q;
342 
343 			/*
344 			 * Check to see if either our generic name or
345 			 * specific name can satisfy the name requested
346 			 * by the larval entry q.
347 			 */
348 			if (strcmp(alg->cra_name, q->cra_name) &&
349 			    strcmp(alg->cra_driver_name, q->cra_name))
350 				continue;
351 
352 			if (larval->adult)
353 				continue;
354 			if ((q->cra_flags ^ alg->cra_flags) & larval->mask)
355 				continue;
356 
357 			if (best && crypto_mod_get(alg))
358 				larval->adult = alg;
359 			else
360 				larval->adult = ERR_PTR(-EAGAIN);
361 
362 			continue;
363 		}
364 
365 		if (strcmp(alg->cra_name, q->cra_name))
366 			continue;
367 
368 		if (strcmp(alg->cra_driver_name, q->cra_driver_name) &&
369 		    q->cra_priority > alg->cra_priority)
370 			continue;
371 
372 		crypto_remove_spawns(q, &list, alg);
373 	}
374 
375 complete:
376 	complete_all(&test->completion);
377 
378 unlock:
379 	up_write(&crypto_alg_sem);
380 
381 	crypto_remove_final(&list);
382 }
383 EXPORT_SYMBOL_GPL(crypto_alg_tested);
384 
385 void crypto_remove_final(struct list_head *list)
386 {
387 	struct crypto_alg *alg;
388 	struct crypto_alg *n;
389 
390 	list_for_each_entry_safe(alg, n, list, cra_list) {
391 		list_del_init(&alg->cra_list);
392 		crypto_alg_put(alg);
393 	}
394 }
395 EXPORT_SYMBOL_GPL(crypto_remove_final);
396 
397 static void crypto_wait_for_test(struct crypto_larval *larval)
398 {
399 	int err;
400 
401 	err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult);
402 	if (err != NOTIFY_STOP) {
403 		if (WARN_ON(err != NOTIFY_DONE))
404 			goto out;
405 		crypto_alg_tested(larval->alg.cra_driver_name, 0);
406 	}
407 
408 	err = wait_for_completion_killable(&larval->completion);
409 	WARN_ON(err);
410 	if (!err)
411 		crypto_probing_notify(CRYPTO_MSG_ALG_LOADED, larval);
412 
413 out:
414 	crypto_larval_kill(&larval->alg);
415 }
416 
417 int crypto_register_alg(struct crypto_alg *alg)
418 {
419 	struct crypto_larval *larval;
420 	int err;
421 
422 	alg->cra_flags &= ~CRYPTO_ALG_DEAD;
423 	err = crypto_check_alg(alg);
424 	if (err)
425 		return err;
426 
427 	down_write(&crypto_alg_sem);
428 	larval = __crypto_register_alg(alg);
429 	up_write(&crypto_alg_sem);
430 
431 	if (IS_ERR(larval))
432 		return PTR_ERR(larval);
433 
434 	crypto_wait_for_test(larval);
435 	return 0;
436 }
437 EXPORT_SYMBOL_GPL(crypto_register_alg);
438 
439 static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list)
440 {
441 	if (unlikely(list_empty(&alg->cra_list)))
442 		return -ENOENT;
443 
444 	alg->cra_flags |= CRYPTO_ALG_DEAD;
445 
446 	list_del_init(&alg->cra_list);
447 	crypto_remove_spawns(alg, list, NULL);
448 
449 	return 0;
450 }
451 
452 void crypto_unregister_alg(struct crypto_alg *alg)
453 {
454 	int ret;
455 	LIST_HEAD(list);
456 
457 	down_write(&crypto_alg_sem);
458 	ret = crypto_remove_alg(alg, &list);
459 	up_write(&crypto_alg_sem);
460 
461 	if (WARN(ret, "Algorithm %s is not registered", alg->cra_driver_name))
462 		return;
463 
464 	BUG_ON(refcount_read(&alg->cra_refcnt) != 1);
465 	if (alg->cra_destroy)
466 		alg->cra_destroy(alg);
467 
468 	crypto_remove_final(&list);
469 }
470 EXPORT_SYMBOL_GPL(crypto_unregister_alg);
471 
472 int crypto_register_algs(struct crypto_alg *algs, int count)
473 {
474 	int i, ret;
475 
476 	for (i = 0; i < count; i++) {
477 		ret = crypto_register_alg(&algs[i]);
478 		if (ret)
479 			goto err;
480 	}
481 
482 	return 0;
483 
484 err:
485 	for (--i; i >= 0; --i)
486 		crypto_unregister_alg(&algs[i]);
487 
488 	return ret;
489 }
490 EXPORT_SYMBOL_GPL(crypto_register_algs);
491 
492 void crypto_unregister_algs(struct crypto_alg *algs, int count)
493 {
494 	int i;
495 
496 	for (i = 0; i < count; i++)
497 		crypto_unregister_alg(&algs[i]);
498 }
499 EXPORT_SYMBOL_GPL(crypto_unregister_algs);
500 
501 int crypto_register_template(struct crypto_template *tmpl)
502 {
503 	struct crypto_template *q;
504 	int err = -EEXIST;
505 
506 	down_write(&crypto_alg_sem);
507 
508 	crypto_check_module_sig(tmpl->module);
509 
510 	list_for_each_entry(q, &crypto_template_list, list) {
511 		if (q == tmpl)
512 			goto out;
513 	}
514 
515 	list_add(&tmpl->list, &crypto_template_list);
516 	err = 0;
517 out:
518 	up_write(&crypto_alg_sem);
519 	return err;
520 }
521 EXPORT_SYMBOL_GPL(crypto_register_template);
522 
523 int crypto_register_templates(struct crypto_template *tmpls, int count)
524 {
525 	int i, err;
526 
527 	for (i = 0; i < count; i++) {
528 		err = crypto_register_template(&tmpls[i]);
529 		if (err)
530 			goto out;
531 	}
532 	return 0;
533 
534 out:
535 	for (--i; i >= 0; --i)
536 		crypto_unregister_template(&tmpls[i]);
537 	return err;
538 }
539 EXPORT_SYMBOL_GPL(crypto_register_templates);
540 
541 void crypto_unregister_template(struct crypto_template *tmpl)
542 {
543 	struct crypto_instance *inst;
544 	struct hlist_node *n;
545 	struct hlist_head *list;
546 	LIST_HEAD(users);
547 
548 	down_write(&crypto_alg_sem);
549 
550 	BUG_ON(list_empty(&tmpl->list));
551 	list_del_init(&tmpl->list);
552 
553 	list = &tmpl->instances;
554 	hlist_for_each_entry(inst, list, list) {
555 		int err = crypto_remove_alg(&inst->alg, &users);
556 
557 		BUG_ON(err);
558 	}
559 
560 	up_write(&crypto_alg_sem);
561 
562 	hlist_for_each_entry_safe(inst, n, list, list) {
563 		BUG_ON(refcount_read(&inst->alg.cra_refcnt) != 1);
564 		crypto_free_instance(inst);
565 	}
566 	crypto_remove_final(&users);
567 }
568 EXPORT_SYMBOL_GPL(crypto_unregister_template);
569 
570 void crypto_unregister_templates(struct crypto_template *tmpls, int count)
571 {
572 	int i;
573 
574 	for (i = count - 1; i >= 0; --i)
575 		crypto_unregister_template(&tmpls[i]);
576 }
577 EXPORT_SYMBOL_GPL(crypto_unregister_templates);
578 
579 static struct crypto_template *__crypto_lookup_template(const char *name)
580 {
581 	struct crypto_template *q, *tmpl = NULL;
582 
583 	down_read(&crypto_alg_sem);
584 	list_for_each_entry(q, &crypto_template_list, list) {
585 		if (strcmp(q->name, name))
586 			continue;
587 		if (unlikely(!crypto_tmpl_get(q)))
588 			continue;
589 
590 		tmpl = q;
591 		break;
592 	}
593 	up_read(&crypto_alg_sem);
594 
595 	return tmpl;
596 }
597 
598 struct crypto_template *crypto_lookup_template(const char *name)
599 {
600 	return try_then_request_module(__crypto_lookup_template(name),
601 				       "crypto-%s", name);
602 }
603 EXPORT_SYMBOL_GPL(crypto_lookup_template);
604 
605 int crypto_register_instance(struct crypto_template *tmpl,
606 			     struct crypto_instance *inst)
607 {
608 	struct crypto_larval *larval;
609 	struct crypto_spawn *spawn;
610 	int err;
611 
612 	err = crypto_check_alg(&inst->alg);
613 	if (err)
614 		return err;
615 
616 	inst->alg.cra_module = tmpl->module;
617 	inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE;
618 
619 	down_write(&crypto_alg_sem);
620 
621 	larval = ERR_PTR(-EAGAIN);
622 	for (spawn = inst->spawns; spawn;) {
623 		struct crypto_spawn *next;
624 
625 		if (spawn->dead)
626 			goto unlock;
627 
628 		next = spawn->next;
629 		spawn->inst = inst;
630 		spawn->registered = true;
631 
632 		if (spawn->dropref)
633 			crypto_mod_put(spawn->alg);
634 
635 		spawn = next;
636 	}
637 
638 	larval = __crypto_register_alg(&inst->alg);
639 	if (IS_ERR(larval))
640 		goto unlock;
641 
642 	hlist_add_head(&inst->list, &tmpl->instances);
643 	inst->tmpl = tmpl;
644 
645 unlock:
646 	up_write(&crypto_alg_sem);
647 
648 	err = PTR_ERR(larval);
649 	if (IS_ERR(larval))
650 		goto err;
651 
652 	crypto_wait_for_test(larval);
653 	err = 0;
654 
655 err:
656 	return err;
657 }
658 EXPORT_SYMBOL_GPL(crypto_register_instance);
659 
660 void crypto_unregister_instance(struct crypto_instance *inst)
661 {
662 	LIST_HEAD(list);
663 
664 	down_write(&crypto_alg_sem);
665 
666 	crypto_remove_spawns(&inst->alg, &list, NULL);
667 	crypto_remove_instance(inst, &list);
668 
669 	up_write(&crypto_alg_sem);
670 
671 	crypto_remove_final(&list);
672 }
673 EXPORT_SYMBOL_GPL(crypto_unregister_instance);
674 
675 int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg,
676 		      struct crypto_instance *inst, u32 mask)
677 {
678 	int err = -EAGAIN;
679 
680 	if (WARN_ON_ONCE(inst == NULL))
681 		return -EINVAL;
682 
683 	spawn->next = inst->spawns;
684 	inst->spawns = spawn;
685 
686 	spawn->mask = mask;
687 
688 	down_write(&crypto_alg_sem);
689 	if (!crypto_is_moribund(alg)) {
690 		list_add(&spawn->list, &alg->cra_users);
691 		spawn->alg = alg;
692 		err = 0;
693 	}
694 	up_write(&crypto_alg_sem);
695 
696 	return err;
697 }
698 EXPORT_SYMBOL_GPL(crypto_init_spawn);
699 
700 int crypto_init_spawn2(struct crypto_spawn *spawn, struct crypto_alg *alg,
701 		       struct crypto_instance *inst,
702 		       const struct crypto_type *frontend)
703 {
704 	int err = -EINVAL;
705 
706 	if ((alg->cra_flags ^ frontend->type) & frontend->maskset)
707 		goto out;
708 
709 	spawn->frontend = frontend;
710 	err = crypto_init_spawn(spawn, alg, inst, frontend->maskset);
711 
712 out:
713 	return err;
714 }
715 EXPORT_SYMBOL_GPL(crypto_init_spawn2);
716 
717 int crypto_grab_spawn(struct crypto_spawn *spawn, const char *name,
718 		      u32 type, u32 mask)
719 {
720 	struct crypto_alg *alg;
721 	int err;
722 
723 	alg = crypto_find_alg(name, spawn->frontend, type, mask);
724 	if (IS_ERR(alg))
725 		return PTR_ERR(alg);
726 
727 	spawn->dropref = true;
728 	err = crypto_init_spawn(spawn, alg, spawn->inst, mask);
729 	if (err)
730 		crypto_mod_put(alg);
731 	return err;
732 }
733 EXPORT_SYMBOL_GPL(crypto_grab_spawn);
734 
735 void crypto_drop_spawn(struct crypto_spawn *spawn)
736 {
737 	down_write(&crypto_alg_sem);
738 	if (!spawn->dead)
739 		list_del(&spawn->list);
740 	up_write(&crypto_alg_sem);
741 
742 	if (spawn->dropref && !spawn->registered)
743 		crypto_mod_put(spawn->alg);
744 }
745 EXPORT_SYMBOL_GPL(crypto_drop_spawn);
746 
747 static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn)
748 {
749 	struct crypto_alg *alg;
750 
751 	down_read(&crypto_alg_sem);
752 	alg = spawn->alg;
753 	if (!spawn->dead && !crypto_mod_get(alg)) {
754 		alg->cra_flags |= CRYPTO_ALG_DYING;
755 		alg = NULL;
756 	}
757 	up_read(&crypto_alg_sem);
758 
759 	return alg ?: ERR_PTR(-EAGAIN);
760 }
761 
762 struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
763 				    u32 mask)
764 {
765 	struct crypto_alg *alg;
766 	struct crypto_tfm *tfm;
767 
768 	alg = crypto_spawn_alg(spawn);
769 	if (IS_ERR(alg))
770 		return ERR_CAST(alg);
771 
772 	tfm = ERR_PTR(-EINVAL);
773 	if (unlikely((alg->cra_flags ^ type) & mask))
774 		goto out_put_alg;
775 
776 	tfm = __crypto_alloc_tfm(alg, type, mask);
777 	if (IS_ERR(tfm))
778 		goto out_put_alg;
779 
780 	return tfm;
781 
782 out_put_alg:
783 	crypto_mod_put(alg);
784 	return tfm;
785 }
786 EXPORT_SYMBOL_GPL(crypto_spawn_tfm);
787 
788 void *crypto_spawn_tfm2(struct crypto_spawn *spawn)
789 {
790 	struct crypto_alg *alg;
791 	struct crypto_tfm *tfm;
792 
793 	alg = crypto_spawn_alg(spawn);
794 	if (IS_ERR(alg))
795 		return ERR_CAST(alg);
796 
797 	tfm = crypto_create_tfm(alg, spawn->frontend);
798 	if (IS_ERR(tfm))
799 		goto out_put_alg;
800 
801 	return tfm;
802 
803 out_put_alg:
804 	crypto_mod_put(alg);
805 	return tfm;
806 }
807 EXPORT_SYMBOL_GPL(crypto_spawn_tfm2);
808 
809 int crypto_register_notifier(struct notifier_block *nb)
810 {
811 	return blocking_notifier_chain_register(&crypto_chain, nb);
812 }
813 EXPORT_SYMBOL_GPL(crypto_register_notifier);
814 
815 int crypto_unregister_notifier(struct notifier_block *nb)
816 {
817 	return blocking_notifier_chain_unregister(&crypto_chain, nb);
818 }
819 EXPORT_SYMBOL_GPL(crypto_unregister_notifier);
820 
821 struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb)
822 {
823 	struct rtattr *rta = tb[0];
824 	struct crypto_attr_type *algt;
825 
826 	if (!rta)
827 		return ERR_PTR(-ENOENT);
828 	if (RTA_PAYLOAD(rta) < sizeof(*algt))
829 		return ERR_PTR(-EINVAL);
830 	if (rta->rta_type != CRYPTOA_TYPE)
831 		return ERR_PTR(-EINVAL);
832 
833 	algt = RTA_DATA(rta);
834 
835 	return algt;
836 }
837 EXPORT_SYMBOL_GPL(crypto_get_attr_type);
838 
839 int crypto_check_attr_type(struct rtattr **tb, u32 type)
840 {
841 	struct crypto_attr_type *algt;
842 
843 	algt = crypto_get_attr_type(tb);
844 	if (IS_ERR(algt))
845 		return PTR_ERR(algt);
846 
847 	if ((algt->type ^ type) & algt->mask)
848 		return -EINVAL;
849 
850 	return 0;
851 }
852 EXPORT_SYMBOL_GPL(crypto_check_attr_type);
853 
854 const char *crypto_attr_alg_name(struct rtattr *rta)
855 {
856 	struct crypto_attr_alg *alga;
857 
858 	if (!rta)
859 		return ERR_PTR(-ENOENT);
860 	if (RTA_PAYLOAD(rta) < sizeof(*alga))
861 		return ERR_PTR(-EINVAL);
862 	if (rta->rta_type != CRYPTOA_ALG)
863 		return ERR_PTR(-EINVAL);
864 
865 	alga = RTA_DATA(rta);
866 	alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0;
867 
868 	return alga->name;
869 }
870 EXPORT_SYMBOL_GPL(crypto_attr_alg_name);
871 
872 struct crypto_alg *crypto_attr_alg2(struct rtattr *rta,
873 				    const struct crypto_type *frontend,
874 				    u32 type, u32 mask)
875 {
876 	const char *name;
877 
878 	name = crypto_attr_alg_name(rta);
879 	if (IS_ERR(name))
880 		return ERR_CAST(name);
881 
882 	return crypto_find_alg(name, frontend, type, mask);
883 }
884 EXPORT_SYMBOL_GPL(crypto_attr_alg2);
885 
886 int crypto_attr_u32(struct rtattr *rta, u32 *num)
887 {
888 	struct crypto_attr_u32 *nu32;
889 
890 	if (!rta)
891 		return -ENOENT;
892 	if (RTA_PAYLOAD(rta) < sizeof(*nu32))
893 		return -EINVAL;
894 	if (rta->rta_type != CRYPTOA_U32)
895 		return -EINVAL;
896 
897 	nu32 = RTA_DATA(rta);
898 	*num = nu32->num;
899 
900 	return 0;
901 }
902 EXPORT_SYMBOL_GPL(crypto_attr_u32);
903 
904 int crypto_inst_setname(struct crypto_instance *inst, const char *name,
905 			struct crypto_alg *alg)
906 {
907 	if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name,
908 		     alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
909 		return -ENAMETOOLONG;
910 
911 	if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s(%s)",
912 		     name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
913 		return -ENAMETOOLONG;
914 
915 	return 0;
916 }
917 EXPORT_SYMBOL_GPL(crypto_inst_setname);
918 
919 void *crypto_alloc_instance(const char *name, struct crypto_alg *alg,
920 			    unsigned int head)
921 {
922 	struct crypto_instance *inst;
923 	char *p;
924 	int err;
925 
926 	p = kzalloc(head + sizeof(*inst) + sizeof(struct crypto_spawn),
927 		    GFP_KERNEL);
928 	if (!p)
929 		return ERR_PTR(-ENOMEM);
930 
931 	inst = (void *)(p + head);
932 
933 	err = crypto_inst_setname(inst, name, alg);
934 	if (err)
935 		goto err_free_inst;
936 
937 	return p;
938 
939 err_free_inst:
940 	kfree(p);
941 	return ERR_PTR(err);
942 }
943 EXPORT_SYMBOL_GPL(crypto_alloc_instance);
944 
945 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen)
946 {
947 	INIT_LIST_HEAD(&queue->list);
948 	queue->backlog = &queue->list;
949 	queue->qlen = 0;
950 	queue->max_qlen = max_qlen;
951 }
952 EXPORT_SYMBOL_GPL(crypto_init_queue);
953 
954 int crypto_enqueue_request(struct crypto_queue *queue,
955 			   struct crypto_async_request *request)
956 {
957 	int err = -EINPROGRESS;
958 
959 	if (unlikely(queue->qlen >= queue->max_qlen)) {
960 		if (!(request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG)) {
961 			err = -ENOSPC;
962 			goto out;
963 		}
964 		err = -EBUSY;
965 		if (queue->backlog == &queue->list)
966 			queue->backlog = &request->list;
967 	}
968 
969 	queue->qlen++;
970 	list_add_tail(&request->list, &queue->list);
971 
972 out:
973 	return err;
974 }
975 EXPORT_SYMBOL_GPL(crypto_enqueue_request);
976 
977 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue)
978 {
979 	struct list_head *request;
980 
981 	if (unlikely(!queue->qlen))
982 		return NULL;
983 
984 	queue->qlen--;
985 
986 	if (queue->backlog != &queue->list)
987 		queue->backlog = queue->backlog->next;
988 
989 	request = queue->list.next;
990 	list_del(request);
991 
992 	return list_entry(request, struct crypto_async_request, list);
993 }
994 EXPORT_SYMBOL_GPL(crypto_dequeue_request);
995 
996 static inline void crypto_inc_byte(u8 *a, unsigned int size)
997 {
998 	u8 *b = (a + size);
999 	u8 c;
1000 
1001 	for (; size; size--) {
1002 		c = *--b + 1;
1003 		*b = c;
1004 		if (c)
1005 			break;
1006 	}
1007 }
1008 
1009 void crypto_inc(u8 *a, unsigned int size)
1010 {
1011 	__be32 *b = (__be32 *)(a + size);
1012 	u32 c;
1013 
1014 	if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) ||
1015 	    IS_ALIGNED((unsigned long)b, __alignof__(*b)))
1016 		for (; size >= 4; size -= 4) {
1017 			c = be32_to_cpu(*--b) + 1;
1018 			*b = cpu_to_be32(c);
1019 			if (likely(c))
1020 				return;
1021 		}
1022 
1023 	crypto_inc_byte(a, size);
1024 }
1025 EXPORT_SYMBOL_GPL(crypto_inc);
1026 
1027 void __crypto_xor(u8 *dst, const u8 *src1, const u8 *src2, unsigned int len)
1028 {
1029 	int relalign = 0;
1030 
1031 	if (!IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)) {
1032 		int size = sizeof(unsigned long);
1033 		int d = (((unsigned long)dst ^ (unsigned long)src1) |
1034 			 ((unsigned long)dst ^ (unsigned long)src2)) &
1035 			(size - 1);
1036 
1037 		relalign = d ? 1 << __ffs(d) : size;
1038 
1039 		/*
1040 		 * If we care about alignment, process as many bytes as
1041 		 * needed to advance dst and src to values whose alignments
1042 		 * equal their relative alignment. This will allow us to
1043 		 * process the remainder of the input using optimal strides.
1044 		 */
1045 		while (((unsigned long)dst & (relalign - 1)) && len > 0) {
1046 			*dst++ = *src1++ ^ *src2++;
1047 			len--;
1048 		}
1049 	}
1050 
1051 	while (IS_ENABLED(CONFIG_64BIT) && len >= 8 && !(relalign & 7)) {
1052 		*(u64 *)dst = *(u64 *)src1 ^  *(u64 *)src2;
1053 		dst += 8;
1054 		src1 += 8;
1055 		src2 += 8;
1056 		len -= 8;
1057 	}
1058 
1059 	while (len >= 4 && !(relalign & 3)) {
1060 		*(u32 *)dst = *(u32 *)src1 ^ *(u32 *)src2;
1061 		dst += 4;
1062 		src1 += 4;
1063 		src2 += 4;
1064 		len -= 4;
1065 	}
1066 
1067 	while (len >= 2 && !(relalign & 1)) {
1068 		*(u16 *)dst = *(u16 *)src1 ^ *(u16 *)src2;
1069 		dst += 2;
1070 		src1 += 2;
1071 		src2 += 2;
1072 		len -= 2;
1073 	}
1074 
1075 	while (len--)
1076 		*dst++ = *src1++ ^ *src2++;
1077 }
1078 EXPORT_SYMBOL_GPL(__crypto_xor);
1079 
1080 unsigned int crypto_alg_extsize(struct crypto_alg *alg)
1081 {
1082 	return alg->cra_ctxsize +
1083 	       (alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1));
1084 }
1085 EXPORT_SYMBOL_GPL(crypto_alg_extsize);
1086 
1087 int crypto_type_has_alg(const char *name, const struct crypto_type *frontend,
1088 			u32 type, u32 mask)
1089 {
1090 	int ret = 0;
1091 	struct crypto_alg *alg = crypto_find_alg(name, frontend, type, mask);
1092 
1093 	if (!IS_ERR(alg)) {
1094 		crypto_mod_put(alg);
1095 		ret = 1;
1096 	}
1097 
1098 	return ret;
1099 }
1100 EXPORT_SYMBOL_GPL(crypto_type_has_alg);
1101 
1102 #ifdef CONFIG_CRYPTO_STATS
1103 void crypto_stats_init(struct crypto_alg *alg)
1104 {
1105 	memset(&alg->stats, 0, sizeof(alg->stats));
1106 }
1107 EXPORT_SYMBOL_GPL(crypto_stats_init);
1108 
1109 void crypto_stats_get(struct crypto_alg *alg)
1110 {
1111 	crypto_alg_get(alg);
1112 }
1113 EXPORT_SYMBOL_GPL(crypto_stats_get);
1114 
1115 void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg,
1116 			       int ret)
1117 {
1118 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1119 		atomic64_inc(&alg->stats.aead.err_cnt);
1120 	} else {
1121 		atomic64_inc(&alg->stats.aead.encrypt_cnt);
1122 		atomic64_add(cryptlen, &alg->stats.aead.encrypt_tlen);
1123 	}
1124 	crypto_alg_put(alg);
1125 }
1126 EXPORT_SYMBOL_GPL(crypto_stats_aead_encrypt);
1127 
1128 void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg,
1129 			       int ret)
1130 {
1131 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1132 		atomic64_inc(&alg->stats.aead.err_cnt);
1133 	} else {
1134 		atomic64_inc(&alg->stats.aead.decrypt_cnt);
1135 		atomic64_add(cryptlen, &alg->stats.aead.decrypt_tlen);
1136 	}
1137 	crypto_alg_put(alg);
1138 }
1139 EXPORT_SYMBOL_GPL(crypto_stats_aead_decrypt);
1140 
1141 void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret,
1142 				   struct crypto_alg *alg)
1143 {
1144 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1145 		atomic64_inc(&alg->stats.akcipher.err_cnt);
1146 	} else {
1147 		atomic64_inc(&alg->stats.akcipher.encrypt_cnt);
1148 		atomic64_add(src_len, &alg->stats.akcipher.encrypt_tlen);
1149 	}
1150 	crypto_alg_put(alg);
1151 }
1152 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_encrypt);
1153 
1154 void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret,
1155 				   struct crypto_alg *alg)
1156 {
1157 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1158 		atomic64_inc(&alg->stats.akcipher.err_cnt);
1159 	} else {
1160 		atomic64_inc(&alg->stats.akcipher.decrypt_cnt);
1161 		atomic64_add(src_len, &alg->stats.akcipher.decrypt_tlen);
1162 	}
1163 	crypto_alg_put(alg);
1164 }
1165 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_decrypt);
1166 
1167 void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg)
1168 {
1169 	if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1170 		atomic64_inc(&alg->stats.akcipher.err_cnt);
1171 	else
1172 		atomic64_inc(&alg->stats.akcipher.sign_cnt);
1173 	crypto_alg_put(alg);
1174 }
1175 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_sign);
1176 
1177 void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg)
1178 {
1179 	if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1180 		atomic64_inc(&alg->stats.akcipher.err_cnt);
1181 	else
1182 		atomic64_inc(&alg->stats.akcipher.verify_cnt);
1183 	crypto_alg_put(alg);
1184 }
1185 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_verify);
1186 
1187 void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg)
1188 {
1189 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1190 		atomic64_inc(&alg->stats.compress.err_cnt);
1191 	} else {
1192 		atomic64_inc(&alg->stats.compress.compress_cnt);
1193 		atomic64_add(slen, &alg->stats.compress.compress_tlen);
1194 	}
1195 	crypto_alg_put(alg);
1196 }
1197 EXPORT_SYMBOL_GPL(crypto_stats_compress);
1198 
1199 void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg)
1200 {
1201 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1202 		atomic64_inc(&alg->stats.compress.err_cnt);
1203 	} else {
1204 		atomic64_inc(&alg->stats.compress.decompress_cnt);
1205 		atomic64_add(slen, &alg->stats.compress.decompress_tlen);
1206 	}
1207 	crypto_alg_put(alg);
1208 }
1209 EXPORT_SYMBOL_GPL(crypto_stats_decompress);
1210 
1211 void crypto_stats_ahash_update(unsigned int nbytes, int ret,
1212 			       struct crypto_alg *alg)
1213 {
1214 	if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1215 		atomic64_inc(&alg->stats.hash.err_cnt);
1216 	else
1217 		atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
1218 	crypto_alg_put(alg);
1219 }
1220 EXPORT_SYMBOL_GPL(crypto_stats_ahash_update);
1221 
1222 void crypto_stats_ahash_final(unsigned int nbytes, int ret,
1223 			      struct crypto_alg *alg)
1224 {
1225 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1226 		atomic64_inc(&alg->stats.hash.err_cnt);
1227 	} else {
1228 		atomic64_inc(&alg->stats.hash.hash_cnt);
1229 		atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
1230 	}
1231 	crypto_alg_put(alg);
1232 }
1233 EXPORT_SYMBOL_GPL(crypto_stats_ahash_final);
1234 
1235 void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret)
1236 {
1237 	if (ret)
1238 		atomic64_inc(&alg->stats.kpp.err_cnt);
1239 	else
1240 		atomic64_inc(&alg->stats.kpp.setsecret_cnt);
1241 	crypto_alg_put(alg);
1242 }
1243 EXPORT_SYMBOL_GPL(crypto_stats_kpp_set_secret);
1244 
1245 void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret)
1246 {
1247 	if (ret)
1248 		atomic64_inc(&alg->stats.kpp.err_cnt);
1249 	else
1250 		atomic64_inc(&alg->stats.kpp.generate_public_key_cnt);
1251 	crypto_alg_put(alg);
1252 }
1253 EXPORT_SYMBOL_GPL(crypto_stats_kpp_generate_public_key);
1254 
1255 void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret)
1256 {
1257 	if (ret)
1258 		atomic64_inc(&alg->stats.kpp.err_cnt);
1259 	else
1260 		atomic64_inc(&alg->stats.kpp.compute_shared_secret_cnt);
1261 	crypto_alg_put(alg);
1262 }
1263 EXPORT_SYMBOL_GPL(crypto_stats_kpp_compute_shared_secret);
1264 
1265 void crypto_stats_rng_seed(struct crypto_alg *alg, int ret)
1266 {
1267 	if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1268 		atomic64_inc(&alg->stats.rng.err_cnt);
1269 	else
1270 		atomic64_inc(&alg->stats.rng.seed_cnt);
1271 	crypto_alg_put(alg);
1272 }
1273 EXPORT_SYMBOL_GPL(crypto_stats_rng_seed);
1274 
1275 void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen,
1276 			       int ret)
1277 {
1278 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1279 		atomic64_inc(&alg->stats.rng.err_cnt);
1280 	} else {
1281 		atomic64_inc(&alg->stats.rng.generate_cnt);
1282 		atomic64_add(dlen, &alg->stats.rng.generate_tlen);
1283 	}
1284 	crypto_alg_put(alg);
1285 }
1286 EXPORT_SYMBOL_GPL(crypto_stats_rng_generate);
1287 
1288 void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret,
1289 				   struct crypto_alg *alg)
1290 {
1291 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1292 		atomic64_inc(&alg->stats.cipher.err_cnt);
1293 	} else {
1294 		atomic64_inc(&alg->stats.cipher.encrypt_cnt);
1295 		atomic64_add(cryptlen, &alg->stats.cipher.encrypt_tlen);
1296 	}
1297 	crypto_alg_put(alg);
1298 }
1299 EXPORT_SYMBOL_GPL(crypto_stats_skcipher_encrypt);
1300 
1301 void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret,
1302 				   struct crypto_alg *alg)
1303 {
1304 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1305 		atomic64_inc(&alg->stats.cipher.err_cnt);
1306 	} else {
1307 		atomic64_inc(&alg->stats.cipher.decrypt_cnt);
1308 		atomic64_add(cryptlen, &alg->stats.cipher.decrypt_tlen);
1309 	}
1310 	crypto_alg_put(alg);
1311 }
1312 EXPORT_SYMBOL_GPL(crypto_stats_skcipher_decrypt);
1313 #endif
1314 
1315 static int __init crypto_algapi_init(void)
1316 {
1317 	crypto_init_proc();
1318 	return 0;
1319 }
1320 
1321 static void __exit crypto_algapi_exit(void)
1322 {
1323 	crypto_exit_proc();
1324 }
1325 
1326 module_init(crypto_algapi_init);
1327 module_exit(crypto_algapi_exit);
1328 
1329 MODULE_LICENSE("GPL");
1330 MODULE_DESCRIPTION("Cryptographic algorithms API");
1331