1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3 * Cryptographic API for algorithms (i.e., low-level API).
4 *
5 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
6 */
7
8 #include <crypto/algapi.h>
9 #include <crypto/internal/simd.h>
10 #include <linux/err.h>
11 #include <linux/errno.h>
12 #include <linux/fips.h>
13 #include <linux/init.h>
14 #include <linux/kernel.h>
15 #include <linux/list.h>
16 #include <linux/module.h>
17 #include <linux/rtnetlink.h>
18 #include <linux/slab.h>
19 #include <linux/string.h>
20
21 #include "internal.h"
22
23 static LIST_HEAD(crypto_template_list);
24
25 #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
26 DEFINE_PER_CPU(bool, crypto_simd_disabled_for_test);
27 EXPORT_PER_CPU_SYMBOL_GPL(crypto_simd_disabled_for_test);
28 #endif
29
crypto_check_module_sig(struct module * mod)30 static inline void crypto_check_module_sig(struct module *mod)
31 {
32 if (fips_enabled && mod && !module_sig_ok(mod))
33 panic("Module %s signature verification failed in FIPS mode\n",
34 module_name(mod));
35 }
36
crypto_check_alg(struct crypto_alg * alg)37 static int crypto_check_alg(struct crypto_alg *alg)
38 {
39 crypto_check_module_sig(alg->cra_module);
40
41 if (!alg->cra_name[0] || !alg->cra_driver_name[0])
42 return -EINVAL;
43
44 if (alg->cra_alignmask & (alg->cra_alignmask + 1))
45 return -EINVAL;
46
47 /* General maximums for all algs. */
48 if (alg->cra_alignmask > MAX_ALGAPI_ALIGNMASK)
49 return -EINVAL;
50
51 if (alg->cra_blocksize > MAX_ALGAPI_BLOCKSIZE)
52 return -EINVAL;
53
54 /* Lower maximums for specific alg types. */
55 if (!alg->cra_type && (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
56 CRYPTO_ALG_TYPE_CIPHER) {
57 if (alg->cra_alignmask > MAX_CIPHER_ALIGNMASK)
58 return -EINVAL;
59
60 if (alg->cra_blocksize > MAX_CIPHER_BLOCKSIZE)
61 return -EINVAL;
62 }
63
64 if (alg->cra_priority < 0)
65 return -EINVAL;
66
67 refcount_set(&alg->cra_refcnt, 1);
68
69 return 0;
70 }
71
crypto_free_instance(struct crypto_instance * inst)72 static void crypto_free_instance(struct crypto_instance *inst)
73 {
74 inst->alg.cra_type->free(inst);
75 }
76
crypto_destroy_instance(struct crypto_alg * alg)77 static void crypto_destroy_instance(struct crypto_alg *alg)
78 {
79 struct crypto_instance *inst = (void *)alg;
80 struct crypto_template *tmpl = inst->tmpl;
81
82 crypto_free_instance(inst);
83 crypto_tmpl_put(tmpl);
84 }
85
86 /*
87 * This function adds a spawn to the list secondary_spawns which
88 * will be used at the end of crypto_remove_spawns to unregister
89 * instances, unless the spawn happens to be one that is depended
90 * on by the new algorithm (nalg in crypto_remove_spawns).
91 *
92 * This function is also responsible for resurrecting any algorithms
93 * in the dependency chain of nalg by unsetting n->dead.
94 */
crypto_more_spawns(struct crypto_alg * alg,struct list_head * stack,struct list_head * top,struct list_head * secondary_spawns)95 static struct list_head *crypto_more_spawns(struct crypto_alg *alg,
96 struct list_head *stack,
97 struct list_head *top,
98 struct list_head *secondary_spawns)
99 {
100 struct crypto_spawn *spawn, *n;
101
102 spawn = list_first_entry_or_null(stack, struct crypto_spawn, list);
103 if (!spawn)
104 return NULL;
105
106 n = list_prev_entry(spawn, list);
107 list_move(&spawn->list, secondary_spawns);
108
109 if (list_is_last(&n->list, stack))
110 return top;
111
112 n = list_next_entry(n, list);
113 if (!spawn->dead)
114 n->dead = false;
115
116 return &n->inst->alg.cra_users;
117 }
118
crypto_remove_instance(struct crypto_instance * inst,struct list_head * list)119 static void crypto_remove_instance(struct crypto_instance *inst,
120 struct list_head *list)
121 {
122 struct crypto_template *tmpl = inst->tmpl;
123
124 if (crypto_is_dead(&inst->alg))
125 return;
126
127 inst->alg.cra_flags |= CRYPTO_ALG_DEAD;
128
129 if (!tmpl || !crypto_tmpl_get(tmpl))
130 return;
131
132 list_move(&inst->alg.cra_list, list);
133 hlist_del(&inst->list);
134 inst->alg.cra_destroy = crypto_destroy_instance;
135
136 BUG_ON(!list_empty(&inst->alg.cra_users));
137 }
138
139 /*
140 * Given an algorithm alg, remove all algorithms that depend on it
141 * through spawns. If nalg is not null, then exempt any algorithms
142 * that is depended on by nalg. This is useful when nalg itself
143 * depends on alg.
144 */
crypto_remove_spawns(struct crypto_alg * alg,struct list_head * list,struct crypto_alg * nalg)145 void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list,
146 struct crypto_alg *nalg)
147 {
148 u32 new_type = (nalg ?: alg)->cra_flags;
149 struct crypto_spawn *spawn, *n;
150 LIST_HEAD(secondary_spawns);
151 struct list_head *spawns;
152 LIST_HEAD(stack);
153 LIST_HEAD(top);
154
155 spawns = &alg->cra_users;
156 list_for_each_entry_safe(spawn, n, spawns, list) {
157 if ((spawn->alg->cra_flags ^ new_type) & spawn->mask)
158 continue;
159
160 list_move(&spawn->list, &top);
161 }
162
163 /*
164 * Perform a depth-first walk starting from alg through
165 * the cra_users tree. The list stack records the path
166 * from alg to the current spawn.
167 */
168 spawns = ⊤
169 do {
170 while (!list_empty(spawns)) {
171 struct crypto_instance *inst;
172
173 spawn = list_first_entry(spawns, struct crypto_spawn,
174 list);
175 inst = spawn->inst;
176
177 list_move(&spawn->list, &stack);
178 spawn->dead = !spawn->registered || &inst->alg != nalg;
179
180 if (!spawn->registered)
181 break;
182
183 BUG_ON(&inst->alg == alg);
184
185 if (&inst->alg == nalg)
186 break;
187
188 spawns = &inst->alg.cra_users;
189
190 /*
191 * Even if spawn->registered is true, the
192 * instance itself may still be unregistered.
193 * This is because it may have failed during
194 * registration. Therefore we still need to
195 * make the following test.
196 *
197 * We may encounter an unregistered instance here, since
198 * an instance's spawns are set up prior to the instance
199 * being registered. An unregistered instance will have
200 * NULL ->cra_users.next, since ->cra_users isn't
201 * properly initialized until registration. But an
202 * unregistered instance cannot have any users, so treat
203 * it the same as ->cra_users being empty.
204 */
205 if (spawns->next == NULL)
206 break;
207 }
208 } while ((spawns = crypto_more_spawns(alg, &stack, &top,
209 &secondary_spawns)));
210
211 /*
212 * Remove all instances that are marked as dead. Also
213 * complete the resurrection of the others by moving them
214 * back to the cra_users list.
215 */
216 list_for_each_entry_safe(spawn, n, &secondary_spawns, list) {
217 if (!spawn->dead)
218 list_move(&spawn->list, &spawn->alg->cra_users);
219 else if (spawn->registered)
220 crypto_remove_instance(spawn->inst, list);
221 }
222 }
223 EXPORT_SYMBOL_GPL(crypto_remove_spawns);
224
crypto_alg_finish_registration(struct crypto_alg * alg,bool fulfill_requests,struct list_head * algs_to_put)225 static void crypto_alg_finish_registration(struct crypto_alg *alg,
226 bool fulfill_requests,
227 struct list_head *algs_to_put)
228 {
229 struct crypto_alg *q;
230
231 list_for_each_entry(q, &crypto_alg_list, cra_list) {
232 if (q == alg)
233 continue;
234
235 if (crypto_is_moribund(q))
236 continue;
237
238 if (crypto_is_larval(q)) {
239 struct crypto_larval *larval = (void *)q;
240
241 /*
242 * Check to see if either our generic name or
243 * specific name can satisfy the name requested
244 * by the larval entry q.
245 */
246 if (strcmp(alg->cra_name, q->cra_name) &&
247 strcmp(alg->cra_driver_name, q->cra_name))
248 continue;
249
250 if (larval->adult)
251 continue;
252 if ((q->cra_flags ^ alg->cra_flags) & larval->mask)
253 continue;
254
255 if (fulfill_requests && crypto_mod_get(alg))
256 larval->adult = alg;
257 else
258 larval->adult = ERR_PTR(-EAGAIN);
259
260 continue;
261 }
262
263 if (strcmp(alg->cra_name, q->cra_name))
264 continue;
265
266 if (strcmp(alg->cra_driver_name, q->cra_driver_name) &&
267 q->cra_priority > alg->cra_priority)
268 continue;
269
270 crypto_remove_spawns(q, algs_to_put, alg);
271 }
272
273 crypto_notify(CRYPTO_MSG_ALG_LOADED, alg);
274 }
275
crypto_alloc_test_larval(struct crypto_alg * alg)276 static struct crypto_larval *crypto_alloc_test_larval(struct crypto_alg *alg)
277 {
278 struct crypto_larval *larval;
279
280 if (!IS_ENABLED(CONFIG_CRYPTO_MANAGER) ||
281 IS_ENABLED(CONFIG_CRYPTO_MANAGER_DISABLE_TESTS) ||
282 (alg->cra_flags & CRYPTO_ALG_INTERNAL))
283 return NULL; /* No self-test needed */
284
285 larval = crypto_larval_alloc(alg->cra_name,
286 alg->cra_flags | CRYPTO_ALG_TESTED, 0);
287 if (IS_ERR(larval))
288 return larval;
289
290 larval->adult = crypto_mod_get(alg);
291 if (!larval->adult) {
292 kfree(larval);
293 return ERR_PTR(-ENOENT);
294 }
295
296 refcount_set(&larval->alg.cra_refcnt, 1);
297 memcpy(larval->alg.cra_driver_name, alg->cra_driver_name,
298 CRYPTO_MAX_ALG_NAME);
299 larval->alg.cra_priority = alg->cra_priority;
300
301 return larval;
302 }
303
304 static struct crypto_larval *
__crypto_register_alg(struct crypto_alg * alg,struct list_head * algs_to_put)305 __crypto_register_alg(struct crypto_alg *alg, struct list_head *algs_to_put)
306 {
307 struct crypto_alg *q;
308 struct crypto_larval *larval;
309 int ret = -EAGAIN;
310
311 if (crypto_is_dead(alg))
312 goto err;
313
314 INIT_LIST_HEAD(&alg->cra_users);
315
316 ret = -EEXIST;
317
318 list_for_each_entry(q, &crypto_alg_list, cra_list) {
319 if (q == alg)
320 goto err;
321
322 if (crypto_is_moribund(q))
323 continue;
324
325 if (crypto_is_larval(q)) {
326 if (!strcmp(alg->cra_driver_name, q->cra_driver_name))
327 goto err;
328 continue;
329 }
330
331 if (!strcmp(q->cra_driver_name, alg->cra_name) ||
332 !strcmp(q->cra_name, alg->cra_driver_name))
333 goto err;
334 }
335
336 larval = crypto_alloc_test_larval(alg);
337 if (IS_ERR(larval))
338 goto out;
339
340 list_add(&alg->cra_list, &crypto_alg_list);
341
342 crypto_stats_init(alg);
343
344 if (larval) {
345 /* No cheating! */
346 alg->cra_flags &= ~CRYPTO_ALG_TESTED;
347
348 list_add(&larval->alg.cra_list, &crypto_alg_list);
349 } else {
350 alg->cra_flags |= CRYPTO_ALG_TESTED;
351 crypto_alg_finish_registration(alg, true, algs_to_put);
352 }
353
354 out:
355 return larval;
356
357 err:
358 larval = ERR_PTR(ret);
359 goto out;
360 }
361
crypto_alg_tested(const char * name,int err)362 void crypto_alg_tested(const char *name, int err)
363 {
364 struct crypto_larval *test;
365 struct crypto_alg *alg;
366 struct crypto_alg *q;
367 LIST_HEAD(list);
368 bool best;
369
370 down_write(&crypto_alg_sem);
371 list_for_each_entry(q, &crypto_alg_list, cra_list) {
372 if (crypto_is_moribund(q) || !crypto_is_larval(q))
373 continue;
374
375 test = (struct crypto_larval *)q;
376
377 if (!strcmp(q->cra_driver_name, name))
378 goto found;
379 }
380
381 pr_err("alg: Unexpected test result for %s: %d\n", name, err);
382 goto unlock;
383
384 found:
385 q->cra_flags |= CRYPTO_ALG_DEAD;
386 alg = test->adult;
387
388 if (list_empty(&alg->cra_list))
389 goto complete;
390
391 if (err == -ECANCELED)
392 alg->cra_flags |= CRYPTO_ALG_FIPS_INTERNAL;
393 else if (err)
394 goto complete;
395 else
396 alg->cra_flags &= ~CRYPTO_ALG_FIPS_INTERNAL;
397
398 alg->cra_flags |= CRYPTO_ALG_TESTED;
399
400 /*
401 * If a higher-priority implementation of the same algorithm is
402 * currently being tested, then don't fulfill request larvals.
403 */
404 best = true;
405 list_for_each_entry(q, &crypto_alg_list, cra_list) {
406 if (crypto_is_moribund(q) || !crypto_is_larval(q))
407 continue;
408
409 if (strcmp(alg->cra_name, q->cra_name))
410 continue;
411
412 if (q->cra_priority > alg->cra_priority) {
413 best = false;
414 break;
415 }
416 }
417
418 crypto_alg_finish_registration(alg, best, &list);
419
420 complete:
421 complete_all(&test->completion);
422
423 unlock:
424 up_write(&crypto_alg_sem);
425
426 crypto_remove_final(&list);
427 }
428 EXPORT_SYMBOL_GPL(crypto_alg_tested);
429
crypto_remove_final(struct list_head * list)430 void crypto_remove_final(struct list_head *list)
431 {
432 struct crypto_alg *alg;
433 struct crypto_alg *n;
434
435 list_for_each_entry_safe(alg, n, list, cra_list) {
436 list_del_init(&alg->cra_list);
437 crypto_alg_put(alg);
438 }
439 }
440 EXPORT_SYMBOL_GPL(crypto_remove_final);
441
crypto_register_alg(struct crypto_alg * alg)442 int crypto_register_alg(struct crypto_alg *alg)
443 {
444 struct crypto_larval *larval;
445 LIST_HEAD(algs_to_put);
446 bool test_started = false;
447 int err;
448
449 alg->cra_flags &= ~CRYPTO_ALG_DEAD;
450 err = crypto_check_alg(alg);
451 if (err)
452 return err;
453
454 down_write(&crypto_alg_sem);
455 larval = __crypto_register_alg(alg, &algs_to_put);
456 if (!IS_ERR_OR_NULL(larval)) {
457 test_started = crypto_boot_test_finished();
458 larval->test_started = test_started;
459 }
460 up_write(&crypto_alg_sem);
461
462 if (IS_ERR(larval))
463 return PTR_ERR(larval);
464 if (test_started)
465 crypto_wait_for_test(larval);
466 crypto_remove_final(&algs_to_put);
467 return 0;
468 }
469 EXPORT_SYMBOL_GPL(crypto_register_alg);
470
crypto_remove_alg(struct crypto_alg * alg,struct list_head * list)471 static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list)
472 {
473 if (unlikely(list_empty(&alg->cra_list)))
474 return -ENOENT;
475
476 alg->cra_flags |= CRYPTO_ALG_DEAD;
477
478 list_del_init(&alg->cra_list);
479 crypto_remove_spawns(alg, list, NULL);
480
481 return 0;
482 }
483
crypto_unregister_alg(struct crypto_alg * alg)484 void crypto_unregister_alg(struct crypto_alg *alg)
485 {
486 int ret;
487 LIST_HEAD(list);
488
489 down_write(&crypto_alg_sem);
490 ret = crypto_remove_alg(alg, &list);
491 up_write(&crypto_alg_sem);
492
493 if (WARN(ret, "Algorithm %s is not registered", alg->cra_driver_name))
494 return;
495
496 BUG_ON(refcount_read(&alg->cra_refcnt) != 1);
497 if (alg->cra_destroy)
498 alg->cra_destroy(alg);
499
500 crypto_remove_final(&list);
501 }
502 EXPORT_SYMBOL_GPL(crypto_unregister_alg);
503
crypto_register_algs(struct crypto_alg * algs,int count)504 int crypto_register_algs(struct crypto_alg *algs, int count)
505 {
506 int i, ret;
507
508 for (i = 0; i < count; i++) {
509 ret = crypto_register_alg(&algs[i]);
510 if (ret)
511 goto err;
512 }
513
514 return 0;
515
516 err:
517 for (--i; i >= 0; --i)
518 crypto_unregister_alg(&algs[i]);
519
520 return ret;
521 }
522 EXPORT_SYMBOL_GPL(crypto_register_algs);
523
crypto_unregister_algs(struct crypto_alg * algs,int count)524 void crypto_unregister_algs(struct crypto_alg *algs, int count)
525 {
526 int i;
527
528 for (i = 0; i < count; i++)
529 crypto_unregister_alg(&algs[i]);
530 }
531 EXPORT_SYMBOL_GPL(crypto_unregister_algs);
532
crypto_register_template(struct crypto_template * tmpl)533 int crypto_register_template(struct crypto_template *tmpl)
534 {
535 struct crypto_template *q;
536 int err = -EEXIST;
537
538 down_write(&crypto_alg_sem);
539
540 crypto_check_module_sig(tmpl->module);
541
542 list_for_each_entry(q, &crypto_template_list, list) {
543 if (q == tmpl)
544 goto out;
545 }
546
547 list_add(&tmpl->list, &crypto_template_list);
548 err = 0;
549 out:
550 up_write(&crypto_alg_sem);
551 return err;
552 }
553 EXPORT_SYMBOL_GPL(crypto_register_template);
554
crypto_register_templates(struct crypto_template * tmpls,int count)555 int crypto_register_templates(struct crypto_template *tmpls, int count)
556 {
557 int i, err;
558
559 for (i = 0; i < count; i++) {
560 err = crypto_register_template(&tmpls[i]);
561 if (err)
562 goto out;
563 }
564 return 0;
565
566 out:
567 for (--i; i >= 0; --i)
568 crypto_unregister_template(&tmpls[i]);
569 return err;
570 }
571 EXPORT_SYMBOL_GPL(crypto_register_templates);
572
crypto_unregister_template(struct crypto_template * tmpl)573 void crypto_unregister_template(struct crypto_template *tmpl)
574 {
575 struct crypto_instance *inst;
576 struct hlist_node *n;
577 struct hlist_head *list;
578 LIST_HEAD(users);
579
580 down_write(&crypto_alg_sem);
581
582 BUG_ON(list_empty(&tmpl->list));
583 list_del_init(&tmpl->list);
584
585 list = &tmpl->instances;
586 hlist_for_each_entry(inst, list, list) {
587 int err = crypto_remove_alg(&inst->alg, &users);
588
589 BUG_ON(err);
590 }
591
592 up_write(&crypto_alg_sem);
593
594 hlist_for_each_entry_safe(inst, n, list, list) {
595 BUG_ON(refcount_read(&inst->alg.cra_refcnt) != 1);
596 crypto_free_instance(inst);
597 }
598 crypto_remove_final(&users);
599 }
600 EXPORT_SYMBOL_GPL(crypto_unregister_template);
601
crypto_unregister_templates(struct crypto_template * tmpls,int count)602 void crypto_unregister_templates(struct crypto_template *tmpls, int count)
603 {
604 int i;
605
606 for (i = count - 1; i >= 0; --i)
607 crypto_unregister_template(&tmpls[i]);
608 }
609 EXPORT_SYMBOL_GPL(crypto_unregister_templates);
610
__crypto_lookup_template(const char * name)611 static struct crypto_template *__crypto_lookup_template(const char *name)
612 {
613 struct crypto_template *q, *tmpl = NULL;
614
615 down_read(&crypto_alg_sem);
616 list_for_each_entry(q, &crypto_template_list, list) {
617 if (strcmp(q->name, name))
618 continue;
619 if (unlikely(!crypto_tmpl_get(q)))
620 continue;
621
622 tmpl = q;
623 break;
624 }
625 up_read(&crypto_alg_sem);
626
627 return tmpl;
628 }
629
crypto_lookup_template(const char * name)630 struct crypto_template *crypto_lookup_template(const char *name)
631 {
632 return try_then_request_module(__crypto_lookup_template(name),
633 "crypto-%s", name);
634 }
635 EXPORT_SYMBOL_GPL(crypto_lookup_template);
636
crypto_register_instance(struct crypto_template * tmpl,struct crypto_instance * inst)637 int crypto_register_instance(struct crypto_template *tmpl,
638 struct crypto_instance *inst)
639 {
640 struct crypto_larval *larval;
641 struct crypto_spawn *spawn;
642 u32 fips_internal = 0;
643 LIST_HEAD(algs_to_put);
644 int err;
645
646 err = crypto_check_alg(&inst->alg);
647 if (err)
648 return err;
649
650 inst->alg.cra_module = tmpl->module;
651 inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE;
652
653 down_write(&crypto_alg_sem);
654
655 larval = ERR_PTR(-EAGAIN);
656 for (spawn = inst->spawns; spawn;) {
657 struct crypto_spawn *next;
658
659 if (spawn->dead)
660 goto unlock;
661
662 next = spawn->next;
663 spawn->inst = inst;
664 spawn->registered = true;
665
666 fips_internal |= spawn->alg->cra_flags;
667
668 crypto_mod_put(spawn->alg);
669
670 spawn = next;
671 }
672
673 inst->alg.cra_flags |= (fips_internal & CRYPTO_ALG_FIPS_INTERNAL);
674
675 larval = __crypto_register_alg(&inst->alg, &algs_to_put);
676 if (IS_ERR(larval))
677 goto unlock;
678 else if (larval)
679 larval->test_started = true;
680
681 hlist_add_head(&inst->list, &tmpl->instances);
682 inst->tmpl = tmpl;
683
684 unlock:
685 up_write(&crypto_alg_sem);
686
687 if (IS_ERR(larval))
688 return PTR_ERR(larval);
689 if (larval)
690 crypto_wait_for_test(larval);
691 crypto_remove_final(&algs_to_put);
692 return 0;
693 }
694 EXPORT_SYMBOL_GPL(crypto_register_instance);
695
crypto_unregister_instance(struct crypto_instance * inst)696 void crypto_unregister_instance(struct crypto_instance *inst)
697 {
698 LIST_HEAD(list);
699
700 down_write(&crypto_alg_sem);
701
702 crypto_remove_spawns(&inst->alg, &list, NULL);
703 crypto_remove_instance(inst, &list);
704
705 up_write(&crypto_alg_sem);
706
707 crypto_remove_final(&list);
708 }
709 EXPORT_SYMBOL_GPL(crypto_unregister_instance);
710
crypto_grab_spawn(struct crypto_spawn * spawn,struct crypto_instance * inst,const char * name,u32 type,u32 mask)711 int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst,
712 const char *name, u32 type, u32 mask)
713 {
714 struct crypto_alg *alg;
715 int err = -EAGAIN;
716
717 if (WARN_ON_ONCE(inst == NULL))
718 return -EINVAL;
719
720 /* Allow the result of crypto_attr_alg_name() to be passed directly */
721 if (IS_ERR(name))
722 return PTR_ERR(name);
723
724 alg = crypto_find_alg(name, spawn->frontend,
725 type | CRYPTO_ALG_FIPS_INTERNAL, mask);
726 if (IS_ERR(alg))
727 return PTR_ERR(alg);
728
729 down_write(&crypto_alg_sem);
730 if (!crypto_is_moribund(alg)) {
731 list_add(&spawn->list, &alg->cra_users);
732 spawn->alg = alg;
733 spawn->mask = mask;
734 spawn->next = inst->spawns;
735 inst->spawns = spawn;
736 inst->alg.cra_flags |=
737 (alg->cra_flags & CRYPTO_ALG_INHERITED_FLAGS);
738 err = 0;
739 }
740 up_write(&crypto_alg_sem);
741 if (err)
742 crypto_mod_put(alg);
743 return err;
744 }
745 EXPORT_SYMBOL_GPL(crypto_grab_spawn);
746
crypto_drop_spawn(struct crypto_spawn * spawn)747 void crypto_drop_spawn(struct crypto_spawn *spawn)
748 {
749 if (!spawn->alg) /* not yet initialized? */
750 return;
751
752 down_write(&crypto_alg_sem);
753 if (!spawn->dead)
754 list_del(&spawn->list);
755 up_write(&crypto_alg_sem);
756
757 if (!spawn->registered)
758 crypto_mod_put(spawn->alg);
759 }
760 EXPORT_SYMBOL_GPL(crypto_drop_spawn);
761
crypto_spawn_alg(struct crypto_spawn * spawn)762 static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn)
763 {
764 struct crypto_alg *alg = ERR_PTR(-EAGAIN);
765 struct crypto_alg *target;
766 bool shoot = false;
767
768 down_read(&crypto_alg_sem);
769 if (!spawn->dead) {
770 alg = spawn->alg;
771 if (!crypto_mod_get(alg)) {
772 target = crypto_alg_get(alg);
773 shoot = true;
774 alg = ERR_PTR(-EAGAIN);
775 }
776 }
777 up_read(&crypto_alg_sem);
778
779 if (shoot) {
780 crypto_shoot_alg(target);
781 crypto_alg_put(target);
782 }
783
784 return alg;
785 }
786
crypto_spawn_tfm(struct crypto_spawn * spawn,u32 type,u32 mask)787 struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
788 u32 mask)
789 {
790 struct crypto_alg *alg;
791 struct crypto_tfm *tfm;
792
793 alg = crypto_spawn_alg(spawn);
794 if (IS_ERR(alg))
795 return ERR_CAST(alg);
796
797 tfm = ERR_PTR(-EINVAL);
798 if (unlikely((alg->cra_flags ^ type) & mask))
799 goto out_put_alg;
800
801 tfm = __crypto_alloc_tfm(alg, type, mask);
802 if (IS_ERR(tfm))
803 goto out_put_alg;
804
805 return tfm;
806
807 out_put_alg:
808 crypto_mod_put(alg);
809 return tfm;
810 }
811 EXPORT_SYMBOL_GPL(crypto_spawn_tfm);
812
crypto_spawn_tfm2(struct crypto_spawn * spawn)813 void *crypto_spawn_tfm2(struct crypto_spawn *spawn)
814 {
815 struct crypto_alg *alg;
816 struct crypto_tfm *tfm;
817
818 alg = crypto_spawn_alg(spawn);
819 if (IS_ERR(alg))
820 return ERR_CAST(alg);
821
822 tfm = crypto_create_tfm(alg, spawn->frontend);
823 if (IS_ERR(tfm))
824 goto out_put_alg;
825
826 return tfm;
827
828 out_put_alg:
829 crypto_mod_put(alg);
830 return tfm;
831 }
832 EXPORT_SYMBOL_GPL(crypto_spawn_tfm2);
833
crypto_register_notifier(struct notifier_block * nb)834 int crypto_register_notifier(struct notifier_block *nb)
835 {
836 return blocking_notifier_chain_register(&crypto_chain, nb);
837 }
838 EXPORT_SYMBOL_GPL(crypto_register_notifier);
839
crypto_unregister_notifier(struct notifier_block * nb)840 int crypto_unregister_notifier(struct notifier_block *nb)
841 {
842 return blocking_notifier_chain_unregister(&crypto_chain, nb);
843 }
844 EXPORT_SYMBOL_GPL(crypto_unregister_notifier);
845
crypto_get_attr_type(struct rtattr ** tb)846 struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb)
847 {
848 struct rtattr *rta = tb[0];
849 struct crypto_attr_type *algt;
850
851 if (!rta)
852 return ERR_PTR(-ENOENT);
853 if (RTA_PAYLOAD(rta) < sizeof(*algt))
854 return ERR_PTR(-EINVAL);
855 if (rta->rta_type != CRYPTOA_TYPE)
856 return ERR_PTR(-EINVAL);
857
858 algt = RTA_DATA(rta);
859
860 return algt;
861 }
862 EXPORT_SYMBOL_GPL(crypto_get_attr_type);
863
864 /**
865 * crypto_check_attr_type() - check algorithm type and compute inherited mask
866 * @tb: the template parameters
867 * @type: the algorithm type the template would be instantiated as
868 * @mask_ret: (output) the mask that should be passed to crypto_grab_*()
869 * to restrict the flags of any inner algorithms
870 *
871 * Validate that the algorithm type the user requested is compatible with the
872 * one the template would actually be instantiated as. E.g., if the user is
873 * doing crypto_alloc_shash("cbc(aes)", ...), this would return an error because
874 * the "cbc" template creates an "skcipher" algorithm, not an "shash" algorithm.
875 *
876 * Also compute the mask to use to restrict the flags of any inner algorithms.
877 *
878 * Return: 0 on success; -errno on failure
879 */
crypto_check_attr_type(struct rtattr ** tb,u32 type,u32 * mask_ret)880 int crypto_check_attr_type(struct rtattr **tb, u32 type, u32 *mask_ret)
881 {
882 struct crypto_attr_type *algt;
883
884 algt = crypto_get_attr_type(tb);
885 if (IS_ERR(algt))
886 return PTR_ERR(algt);
887
888 if ((algt->type ^ type) & algt->mask)
889 return -EINVAL;
890
891 *mask_ret = crypto_algt_inherited_mask(algt);
892 return 0;
893 }
894 EXPORT_SYMBOL_GPL(crypto_check_attr_type);
895
crypto_attr_alg_name(struct rtattr * rta)896 const char *crypto_attr_alg_name(struct rtattr *rta)
897 {
898 struct crypto_attr_alg *alga;
899
900 if (!rta)
901 return ERR_PTR(-ENOENT);
902 if (RTA_PAYLOAD(rta) < sizeof(*alga))
903 return ERR_PTR(-EINVAL);
904 if (rta->rta_type != CRYPTOA_ALG)
905 return ERR_PTR(-EINVAL);
906
907 alga = RTA_DATA(rta);
908 alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0;
909
910 return alga->name;
911 }
912 EXPORT_SYMBOL_GPL(crypto_attr_alg_name);
913
crypto_inst_setname(struct crypto_instance * inst,const char * name,struct crypto_alg * alg)914 int crypto_inst_setname(struct crypto_instance *inst, const char *name,
915 struct crypto_alg *alg)
916 {
917 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name,
918 alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
919 return -ENAMETOOLONG;
920
921 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s(%s)",
922 name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
923 return -ENAMETOOLONG;
924
925 return 0;
926 }
927 EXPORT_SYMBOL_GPL(crypto_inst_setname);
928
crypto_init_queue(struct crypto_queue * queue,unsigned int max_qlen)929 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen)
930 {
931 INIT_LIST_HEAD(&queue->list);
932 queue->backlog = &queue->list;
933 queue->qlen = 0;
934 queue->max_qlen = max_qlen;
935 }
936 EXPORT_SYMBOL_GPL(crypto_init_queue);
937
crypto_enqueue_request(struct crypto_queue * queue,struct crypto_async_request * request)938 int crypto_enqueue_request(struct crypto_queue *queue,
939 struct crypto_async_request *request)
940 {
941 int err = -EINPROGRESS;
942
943 if (unlikely(queue->qlen >= queue->max_qlen)) {
944 if (!(request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG)) {
945 err = -ENOSPC;
946 goto out;
947 }
948 err = -EBUSY;
949 if (queue->backlog == &queue->list)
950 queue->backlog = &request->list;
951 }
952
953 queue->qlen++;
954 list_add_tail(&request->list, &queue->list);
955
956 out:
957 return err;
958 }
959 EXPORT_SYMBOL_GPL(crypto_enqueue_request);
960
crypto_enqueue_request_head(struct crypto_queue * queue,struct crypto_async_request * request)961 void crypto_enqueue_request_head(struct crypto_queue *queue,
962 struct crypto_async_request *request)
963 {
964 queue->qlen++;
965 list_add(&request->list, &queue->list);
966 }
967 EXPORT_SYMBOL_GPL(crypto_enqueue_request_head);
968
crypto_dequeue_request(struct crypto_queue * queue)969 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue)
970 {
971 struct list_head *request;
972
973 if (unlikely(!queue->qlen))
974 return NULL;
975
976 queue->qlen--;
977
978 if (queue->backlog != &queue->list)
979 queue->backlog = queue->backlog->next;
980
981 request = queue->list.next;
982 list_del(request);
983
984 return list_entry(request, struct crypto_async_request, list);
985 }
986 EXPORT_SYMBOL_GPL(crypto_dequeue_request);
987
crypto_inc_byte(u8 * a,unsigned int size)988 static inline void crypto_inc_byte(u8 *a, unsigned int size)
989 {
990 u8 *b = (a + size);
991 u8 c;
992
993 for (; size; size--) {
994 c = *--b + 1;
995 *b = c;
996 if (c)
997 break;
998 }
999 }
1000
crypto_inc(u8 * a,unsigned int size)1001 void crypto_inc(u8 *a, unsigned int size)
1002 {
1003 __be32 *b = (__be32 *)(a + size);
1004 u32 c;
1005
1006 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) ||
1007 IS_ALIGNED((unsigned long)b, __alignof__(*b)))
1008 for (; size >= 4; size -= 4) {
1009 c = be32_to_cpu(*--b) + 1;
1010 *b = cpu_to_be32(c);
1011 if (likely(c))
1012 return;
1013 }
1014
1015 crypto_inc_byte(a, size);
1016 }
1017 EXPORT_SYMBOL_GPL(crypto_inc);
1018
crypto_alg_extsize(struct crypto_alg * alg)1019 unsigned int crypto_alg_extsize(struct crypto_alg *alg)
1020 {
1021 return alg->cra_ctxsize +
1022 (alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1));
1023 }
1024 EXPORT_SYMBOL_GPL(crypto_alg_extsize);
1025
crypto_type_has_alg(const char * name,const struct crypto_type * frontend,u32 type,u32 mask)1026 int crypto_type_has_alg(const char *name, const struct crypto_type *frontend,
1027 u32 type, u32 mask)
1028 {
1029 int ret = 0;
1030 struct crypto_alg *alg = crypto_find_alg(name, frontend, type, mask);
1031
1032 if (!IS_ERR(alg)) {
1033 crypto_mod_put(alg);
1034 ret = 1;
1035 }
1036
1037 return ret;
1038 }
1039 EXPORT_SYMBOL_GPL(crypto_type_has_alg);
1040
1041 #ifdef CONFIG_CRYPTO_STATS
crypto_stats_init(struct crypto_alg * alg)1042 void crypto_stats_init(struct crypto_alg *alg)
1043 {
1044 memset(&alg->stats, 0, sizeof(alg->stats));
1045 }
1046 EXPORT_SYMBOL_GPL(crypto_stats_init);
1047
crypto_stats_get(struct crypto_alg * alg)1048 void crypto_stats_get(struct crypto_alg *alg)
1049 {
1050 crypto_alg_get(alg);
1051 }
1052 EXPORT_SYMBOL_GPL(crypto_stats_get);
1053
crypto_stats_aead_encrypt(unsigned int cryptlen,struct crypto_alg * alg,int ret)1054 void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg,
1055 int ret)
1056 {
1057 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1058 atomic64_inc(&alg->stats.aead.err_cnt);
1059 } else {
1060 atomic64_inc(&alg->stats.aead.encrypt_cnt);
1061 atomic64_add(cryptlen, &alg->stats.aead.encrypt_tlen);
1062 }
1063 crypto_alg_put(alg);
1064 }
1065 EXPORT_SYMBOL_GPL(crypto_stats_aead_encrypt);
1066
crypto_stats_aead_decrypt(unsigned int cryptlen,struct crypto_alg * alg,int ret)1067 void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg,
1068 int ret)
1069 {
1070 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1071 atomic64_inc(&alg->stats.aead.err_cnt);
1072 } else {
1073 atomic64_inc(&alg->stats.aead.decrypt_cnt);
1074 atomic64_add(cryptlen, &alg->stats.aead.decrypt_tlen);
1075 }
1076 crypto_alg_put(alg);
1077 }
1078 EXPORT_SYMBOL_GPL(crypto_stats_aead_decrypt);
1079
crypto_stats_akcipher_encrypt(unsigned int src_len,int ret,struct crypto_alg * alg)1080 void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret,
1081 struct crypto_alg *alg)
1082 {
1083 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1084 atomic64_inc(&alg->stats.akcipher.err_cnt);
1085 } else {
1086 atomic64_inc(&alg->stats.akcipher.encrypt_cnt);
1087 atomic64_add(src_len, &alg->stats.akcipher.encrypt_tlen);
1088 }
1089 crypto_alg_put(alg);
1090 }
1091 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_encrypt);
1092
crypto_stats_akcipher_decrypt(unsigned int src_len,int ret,struct crypto_alg * alg)1093 void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret,
1094 struct crypto_alg *alg)
1095 {
1096 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1097 atomic64_inc(&alg->stats.akcipher.err_cnt);
1098 } else {
1099 atomic64_inc(&alg->stats.akcipher.decrypt_cnt);
1100 atomic64_add(src_len, &alg->stats.akcipher.decrypt_tlen);
1101 }
1102 crypto_alg_put(alg);
1103 }
1104 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_decrypt);
1105
crypto_stats_akcipher_sign(int ret,struct crypto_alg * alg)1106 void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg)
1107 {
1108 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1109 atomic64_inc(&alg->stats.akcipher.err_cnt);
1110 else
1111 atomic64_inc(&alg->stats.akcipher.sign_cnt);
1112 crypto_alg_put(alg);
1113 }
1114 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_sign);
1115
crypto_stats_akcipher_verify(int ret,struct crypto_alg * alg)1116 void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg)
1117 {
1118 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1119 atomic64_inc(&alg->stats.akcipher.err_cnt);
1120 else
1121 atomic64_inc(&alg->stats.akcipher.verify_cnt);
1122 crypto_alg_put(alg);
1123 }
1124 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_verify);
1125
crypto_stats_compress(unsigned int slen,int ret,struct crypto_alg * alg)1126 void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg)
1127 {
1128 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1129 atomic64_inc(&alg->stats.compress.err_cnt);
1130 } else {
1131 atomic64_inc(&alg->stats.compress.compress_cnt);
1132 atomic64_add(slen, &alg->stats.compress.compress_tlen);
1133 }
1134 crypto_alg_put(alg);
1135 }
1136 EXPORT_SYMBOL_GPL(crypto_stats_compress);
1137
crypto_stats_decompress(unsigned int slen,int ret,struct crypto_alg * alg)1138 void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg)
1139 {
1140 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1141 atomic64_inc(&alg->stats.compress.err_cnt);
1142 } else {
1143 atomic64_inc(&alg->stats.compress.decompress_cnt);
1144 atomic64_add(slen, &alg->stats.compress.decompress_tlen);
1145 }
1146 crypto_alg_put(alg);
1147 }
1148 EXPORT_SYMBOL_GPL(crypto_stats_decompress);
1149
crypto_stats_ahash_update(unsigned int nbytes,int ret,struct crypto_alg * alg)1150 void crypto_stats_ahash_update(unsigned int nbytes, int ret,
1151 struct crypto_alg *alg)
1152 {
1153 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1154 atomic64_inc(&alg->stats.hash.err_cnt);
1155 else
1156 atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
1157 crypto_alg_put(alg);
1158 }
1159 EXPORT_SYMBOL_GPL(crypto_stats_ahash_update);
1160
crypto_stats_ahash_final(unsigned int nbytes,int ret,struct crypto_alg * alg)1161 void crypto_stats_ahash_final(unsigned int nbytes, int ret,
1162 struct crypto_alg *alg)
1163 {
1164 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1165 atomic64_inc(&alg->stats.hash.err_cnt);
1166 } else {
1167 atomic64_inc(&alg->stats.hash.hash_cnt);
1168 atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
1169 }
1170 crypto_alg_put(alg);
1171 }
1172 EXPORT_SYMBOL_GPL(crypto_stats_ahash_final);
1173
crypto_stats_kpp_set_secret(struct crypto_alg * alg,int ret)1174 void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret)
1175 {
1176 if (ret)
1177 atomic64_inc(&alg->stats.kpp.err_cnt);
1178 else
1179 atomic64_inc(&alg->stats.kpp.setsecret_cnt);
1180 crypto_alg_put(alg);
1181 }
1182 EXPORT_SYMBOL_GPL(crypto_stats_kpp_set_secret);
1183
crypto_stats_kpp_generate_public_key(struct crypto_alg * alg,int ret)1184 void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret)
1185 {
1186 if (ret)
1187 atomic64_inc(&alg->stats.kpp.err_cnt);
1188 else
1189 atomic64_inc(&alg->stats.kpp.generate_public_key_cnt);
1190 crypto_alg_put(alg);
1191 }
1192 EXPORT_SYMBOL_GPL(crypto_stats_kpp_generate_public_key);
1193
crypto_stats_kpp_compute_shared_secret(struct crypto_alg * alg,int ret)1194 void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret)
1195 {
1196 if (ret)
1197 atomic64_inc(&alg->stats.kpp.err_cnt);
1198 else
1199 atomic64_inc(&alg->stats.kpp.compute_shared_secret_cnt);
1200 crypto_alg_put(alg);
1201 }
1202 EXPORT_SYMBOL_GPL(crypto_stats_kpp_compute_shared_secret);
1203
crypto_stats_rng_seed(struct crypto_alg * alg,int ret)1204 void crypto_stats_rng_seed(struct crypto_alg *alg, int ret)
1205 {
1206 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1207 atomic64_inc(&alg->stats.rng.err_cnt);
1208 else
1209 atomic64_inc(&alg->stats.rng.seed_cnt);
1210 crypto_alg_put(alg);
1211 }
1212 EXPORT_SYMBOL_GPL(crypto_stats_rng_seed);
1213
crypto_stats_rng_generate(struct crypto_alg * alg,unsigned int dlen,int ret)1214 void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen,
1215 int ret)
1216 {
1217 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1218 atomic64_inc(&alg->stats.rng.err_cnt);
1219 } else {
1220 atomic64_inc(&alg->stats.rng.generate_cnt);
1221 atomic64_add(dlen, &alg->stats.rng.generate_tlen);
1222 }
1223 crypto_alg_put(alg);
1224 }
1225 EXPORT_SYMBOL_GPL(crypto_stats_rng_generate);
1226
crypto_stats_skcipher_encrypt(unsigned int cryptlen,int ret,struct crypto_alg * alg)1227 void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret,
1228 struct crypto_alg *alg)
1229 {
1230 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1231 atomic64_inc(&alg->stats.cipher.err_cnt);
1232 } else {
1233 atomic64_inc(&alg->stats.cipher.encrypt_cnt);
1234 atomic64_add(cryptlen, &alg->stats.cipher.encrypt_tlen);
1235 }
1236 crypto_alg_put(alg);
1237 }
1238 EXPORT_SYMBOL_GPL(crypto_stats_skcipher_encrypt);
1239
crypto_stats_skcipher_decrypt(unsigned int cryptlen,int ret,struct crypto_alg * alg)1240 void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret,
1241 struct crypto_alg *alg)
1242 {
1243 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1244 atomic64_inc(&alg->stats.cipher.err_cnt);
1245 } else {
1246 atomic64_inc(&alg->stats.cipher.decrypt_cnt);
1247 atomic64_add(cryptlen, &alg->stats.cipher.decrypt_tlen);
1248 }
1249 crypto_alg_put(alg);
1250 }
1251 EXPORT_SYMBOL_GPL(crypto_stats_skcipher_decrypt);
1252 #endif
1253
crypto_start_tests(void)1254 static void __init crypto_start_tests(void)
1255 {
1256 if (IS_ENABLED(CONFIG_CRYPTO_MANAGER_DISABLE_TESTS))
1257 return;
1258
1259 for (;;) {
1260 struct crypto_larval *larval = NULL;
1261 struct crypto_alg *q;
1262
1263 down_write(&crypto_alg_sem);
1264
1265 list_for_each_entry(q, &crypto_alg_list, cra_list) {
1266 struct crypto_larval *l;
1267
1268 if (!crypto_is_larval(q))
1269 continue;
1270
1271 l = (void *)q;
1272
1273 if (!crypto_is_test_larval(l))
1274 continue;
1275
1276 if (l->test_started)
1277 continue;
1278
1279 l->test_started = true;
1280 larval = l;
1281 break;
1282 }
1283
1284 up_write(&crypto_alg_sem);
1285
1286 if (!larval)
1287 break;
1288
1289 crypto_wait_for_test(larval);
1290 }
1291
1292 set_crypto_boot_test_finished();
1293 }
1294
crypto_algapi_init(void)1295 static int __init crypto_algapi_init(void)
1296 {
1297 crypto_init_proc();
1298 crypto_start_tests();
1299 return 0;
1300 }
1301
crypto_algapi_exit(void)1302 static void __exit crypto_algapi_exit(void)
1303 {
1304 crypto_exit_proc();
1305 }
1306
1307 /*
1308 * We run this at late_initcall so that all the built-in algorithms
1309 * have had a chance to register themselves first.
1310 */
1311 late_initcall(crypto_algapi_init);
1312 module_exit(crypto_algapi_exit);
1313
1314 MODULE_LICENSE("GPL");
1315 MODULE_DESCRIPTION("Cryptographic algorithms API");
1316 MODULE_SOFTDEP("pre: cryptomgr");
1317