1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3 * Synchronous Cryptographic Hash operations.
4 *
5 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
6 */
7
8 #include <crypto/scatterwalk.h>
9 #include <crypto/internal/hash.h>
10 #include <linux/err.h>
11 #include <linux/kernel.h>
12 #include <linux/module.h>
13 #include <linux/slab.h>
14 #include <linux/seq_file.h>
15 #include <linux/cryptouser.h>
16 #include <net/netlink.h>
17 #include <linux/compiler.h>
18
19 #include "internal.h"
20
21 #define MAX_SHASH_ALIGNMASK 63
22
23 static const struct crypto_type crypto_shash_type;
24
shash_no_setkey(struct crypto_shash * tfm,const u8 * key,unsigned int keylen)25 int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
26 unsigned int keylen)
27 {
28 return -ENOSYS;
29 }
30 EXPORT_SYMBOL_GPL(shash_no_setkey);
31
shash_setkey_unaligned(struct crypto_shash * tfm,const u8 * key,unsigned int keylen)32 static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
33 unsigned int keylen)
34 {
35 struct shash_alg *shash = crypto_shash_alg(tfm);
36 unsigned long alignmask = crypto_shash_alignmask(tfm);
37 unsigned long absize;
38 u8 *buffer, *alignbuffer;
39 int err;
40
41 absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1));
42 buffer = kmalloc(absize, GFP_ATOMIC);
43 if (!buffer)
44 return -ENOMEM;
45
46 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
47 memcpy(alignbuffer, key, keylen);
48 err = shash->setkey(tfm, alignbuffer, keylen);
49 kfree_sensitive(buffer);
50 return err;
51 }
52
shash_set_needkey(struct crypto_shash * tfm,struct shash_alg * alg)53 static void shash_set_needkey(struct crypto_shash *tfm, struct shash_alg *alg)
54 {
55 if (crypto_shash_alg_needs_key(alg))
56 crypto_shash_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
57 }
58
crypto_shash_setkey(struct crypto_shash * tfm,const u8 * key,unsigned int keylen)59 int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
60 unsigned int keylen)
61 {
62 struct shash_alg *shash = crypto_shash_alg(tfm);
63 unsigned long alignmask = crypto_shash_alignmask(tfm);
64 int err;
65
66 if ((unsigned long)key & alignmask)
67 err = shash_setkey_unaligned(tfm, key, keylen);
68 else
69 err = shash->setkey(tfm, key, keylen);
70
71 if (unlikely(err)) {
72 shash_set_needkey(tfm, shash);
73 return err;
74 }
75
76 crypto_shash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
77 return 0;
78 }
79 EXPORT_SYMBOL_GPL(crypto_shash_setkey);
80
shash_update_unaligned(struct shash_desc * desc,const u8 * data,unsigned int len)81 static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
82 unsigned int len)
83 {
84 struct crypto_shash *tfm = desc->tfm;
85 struct shash_alg *shash = crypto_shash_alg(tfm);
86 unsigned long alignmask = crypto_shash_alignmask(tfm);
87 unsigned int unaligned_len = alignmask + 1 -
88 ((unsigned long)data & alignmask);
89 /*
90 * We cannot count on __aligned() working for large values:
91 * https://patchwork.kernel.org/patch/9507697/
92 */
93 u8 ubuf[MAX_SHASH_ALIGNMASK * 2];
94 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
95 int err;
96
97 if (WARN_ON(buf + unaligned_len > ubuf + sizeof(ubuf)))
98 return -EINVAL;
99
100 if (unaligned_len > len)
101 unaligned_len = len;
102
103 memcpy(buf, data, unaligned_len);
104 err = shash->update(desc, buf, unaligned_len);
105 memset(buf, 0, unaligned_len);
106
107 return err ?:
108 shash->update(desc, data + unaligned_len, len - unaligned_len);
109 }
110
crypto_shash_update(struct shash_desc * desc,const u8 * data,unsigned int len)111 int crypto_shash_update(struct shash_desc *desc, const u8 *data,
112 unsigned int len)
113 {
114 struct crypto_shash *tfm = desc->tfm;
115 struct shash_alg *shash = crypto_shash_alg(tfm);
116 unsigned long alignmask = crypto_shash_alignmask(tfm);
117
118 if ((unsigned long)data & alignmask)
119 return shash_update_unaligned(desc, data, len);
120
121 return shash->update(desc, data, len);
122 }
123 EXPORT_SYMBOL_GPL(crypto_shash_update);
124
shash_final_unaligned(struct shash_desc * desc,u8 * out)125 static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
126 {
127 struct crypto_shash *tfm = desc->tfm;
128 unsigned long alignmask = crypto_shash_alignmask(tfm);
129 struct shash_alg *shash = crypto_shash_alg(tfm);
130 unsigned int ds = crypto_shash_digestsize(tfm);
131 /*
132 * We cannot count on __aligned() working for large values:
133 * https://patchwork.kernel.org/patch/9507697/
134 */
135 u8 ubuf[MAX_SHASH_ALIGNMASK + HASH_MAX_DIGESTSIZE];
136 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
137 int err;
138
139 if (WARN_ON(buf + ds > ubuf + sizeof(ubuf)))
140 return -EINVAL;
141
142 err = shash->final(desc, buf);
143 if (err)
144 goto out;
145
146 memcpy(out, buf, ds);
147
148 out:
149 memset(buf, 0, ds);
150 return err;
151 }
152
crypto_shash_final(struct shash_desc * desc,u8 * out)153 int crypto_shash_final(struct shash_desc *desc, u8 *out)
154 {
155 struct crypto_shash *tfm = desc->tfm;
156 struct shash_alg *shash = crypto_shash_alg(tfm);
157 unsigned long alignmask = crypto_shash_alignmask(tfm);
158
159 if ((unsigned long)out & alignmask)
160 return shash_final_unaligned(desc, out);
161
162 return shash->final(desc, out);
163 }
164 EXPORT_SYMBOL_GPL(crypto_shash_final);
165
shash_finup_unaligned(struct shash_desc * desc,const u8 * data,unsigned int len,u8 * out)166 static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
167 unsigned int len, u8 *out)
168 {
169 return crypto_shash_update(desc, data, len) ?:
170 crypto_shash_final(desc, out);
171 }
172
crypto_shash_finup(struct shash_desc * desc,const u8 * data,unsigned int len,u8 * out)173 int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
174 unsigned int len, u8 *out)
175 {
176 struct crypto_shash *tfm = desc->tfm;
177 struct shash_alg *shash = crypto_shash_alg(tfm);
178 unsigned long alignmask = crypto_shash_alignmask(tfm);
179
180 if (((unsigned long)data | (unsigned long)out) & alignmask)
181 return shash_finup_unaligned(desc, data, len, out);
182
183 return shash->finup(desc, data, len, out);
184 }
185 EXPORT_SYMBOL_GPL(crypto_shash_finup);
186
shash_digest_unaligned(struct shash_desc * desc,const u8 * data,unsigned int len,u8 * out)187 static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
188 unsigned int len, u8 *out)
189 {
190 return crypto_shash_init(desc) ?:
191 crypto_shash_finup(desc, data, len, out);
192 }
193
crypto_shash_digest(struct shash_desc * desc,const u8 * data,unsigned int len,u8 * out)194 int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
195 unsigned int len, u8 *out)
196 {
197 struct crypto_shash *tfm = desc->tfm;
198 struct shash_alg *shash = crypto_shash_alg(tfm);
199 unsigned long alignmask = crypto_shash_alignmask(tfm);
200
201 if (crypto_shash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
202 return -ENOKEY;
203
204 if (((unsigned long)data | (unsigned long)out) & alignmask)
205 return shash_digest_unaligned(desc, data, len, out);
206
207 return shash->digest(desc, data, len, out);
208 }
209 EXPORT_SYMBOL_GPL(crypto_shash_digest);
210
crypto_shash_tfm_digest(struct crypto_shash * tfm,const u8 * data,unsigned int len,u8 * out)211 int crypto_shash_tfm_digest(struct crypto_shash *tfm, const u8 *data,
212 unsigned int len, u8 *out)
213 {
214 SHASH_DESC_ON_STACK(desc, tfm);
215 int err;
216
217 desc->tfm = tfm;
218
219 err = crypto_shash_digest(desc, data, len, out);
220
221 shash_desc_zero(desc);
222
223 return err;
224 }
225 EXPORT_SYMBOL_GPL(crypto_shash_tfm_digest);
226
shash_default_export(struct shash_desc * desc,void * out)227 static int shash_default_export(struct shash_desc *desc, void *out)
228 {
229 memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm));
230 return 0;
231 }
232
shash_default_import(struct shash_desc * desc,const void * in)233 static int shash_default_import(struct shash_desc *desc, const void *in)
234 {
235 memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm));
236 return 0;
237 }
238
shash_async_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int keylen)239 static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
240 unsigned int keylen)
241 {
242 struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
243
244 return crypto_shash_setkey(*ctx, key, keylen);
245 }
246
shash_async_init(struct ahash_request * req)247 static int shash_async_init(struct ahash_request *req)
248 {
249 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
250 struct shash_desc *desc = ahash_request_ctx(req);
251
252 desc->tfm = *ctx;
253
254 return crypto_shash_init(desc);
255 }
256
shash_ahash_update(struct ahash_request * req,struct shash_desc * desc)257 int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
258 {
259 struct crypto_hash_walk walk;
260 int nbytes;
261
262 for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
263 nbytes = crypto_hash_walk_done(&walk, nbytes))
264 nbytes = crypto_shash_update(desc, walk.data, nbytes);
265
266 return nbytes;
267 }
268 EXPORT_SYMBOL_GPL(shash_ahash_update);
269
shash_async_update(struct ahash_request * req)270 static int shash_async_update(struct ahash_request *req)
271 {
272 return shash_ahash_update(req, ahash_request_ctx(req));
273 }
274
shash_async_final(struct ahash_request * req)275 static int shash_async_final(struct ahash_request *req)
276 {
277 return crypto_shash_final(ahash_request_ctx(req), req->result);
278 }
279
shash_ahash_finup(struct ahash_request * req,struct shash_desc * desc)280 int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
281 {
282 struct crypto_hash_walk walk;
283 int nbytes;
284
285 nbytes = crypto_hash_walk_first(req, &walk);
286 if (!nbytes)
287 return crypto_shash_final(desc, req->result);
288
289 do {
290 nbytes = crypto_hash_walk_last(&walk) ?
291 crypto_shash_finup(desc, walk.data, nbytes,
292 req->result) :
293 crypto_shash_update(desc, walk.data, nbytes);
294 nbytes = crypto_hash_walk_done(&walk, nbytes);
295 } while (nbytes > 0);
296
297 return nbytes;
298 }
299 EXPORT_SYMBOL_GPL(shash_ahash_finup);
300
shash_async_finup(struct ahash_request * req)301 static int shash_async_finup(struct ahash_request *req)
302 {
303 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
304 struct shash_desc *desc = ahash_request_ctx(req);
305
306 desc->tfm = *ctx;
307
308 return shash_ahash_finup(req, desc);
309 }
310
shash_ahash_digest(struct ahash_request * req,struct shash_desc * desc)311 int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
312 {
313 unsigned int nbytes = req->nbytes;
314 struct scatterlist *sg;
315 unsigned int offset;
316 int err;
317
318 if (nbytes &&
319 (sg = req->src, offset = sg->offset,
320 nbytes <= min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset))) {
321 void *data;
322
323 data = kmap_local_page(sg_page(sg));
324 err = crypto_shash_digest(desc, data + offset, nbytes,
325 req->result);
326 kunmap_local(data);
327 } else
328 err = crypto_shash_init(desc) ?:
329 shash_ahash_finup(req, desc);
330
331 return err;
332 }
333 EXPORT_SYMBOL_GPL(shash_ahash_digest);
334
shash_async_digest(struct ahash_request * req)335 static int shash_async_digest(struct ahash_request *req)
336 {
337 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
338 struct shash_desc *desc = ahash_request_ctx(req);
339
340 desc->tfm = *ctx;
341
342 return shash_ahash_digest(req, desc);
343 }
344
shash_async_export(struct ahash_request * req,void * out)345 static int shash_async_export(struct ahash_request *req, void *out)
346 {
347 return crypto_shash_export(ahash_request_ctx(req), out);
348 }
349
shash_async_import(struct ahash_request * req,const void * in)350 static int shash_async_import(struct ahash_request *req, const void *in)
351 {
352 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
353 struct shash_desc *desc = ahash_request_ctx(req);
354
355 desc->tfm = *ctx;
356
357 return crypto_shash_import(desc, in);
358 }
359
crypto_exit_shash_ops_async(struct crypto_tfm * tfm)360 static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
361 {
362 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
363
364 crypto_free_shash(*ctx);
365 }
366
crypto_init_shash_ops_async(struct crypto_tfm * tfm)367 int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
368 {
369 struct crypto_alg *calg = tfm->__crt_alg;
370 struct shash_alg *alg = __crypto_shash_alg(calg);
371 struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
372 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
373 struct crypto_shash *shash;
374
375 if (!crypto_mod_get(calg))
376 return -EAGAIN;
377
378 shash = crypto_create_tfm(calg, &crypto_shash_type);
379 if (IS_ERR(shash)) {
380 crypto_mod_put(calg);
381 return PTR_ERR(shash);
382 }
383
384 *ctx = shash;
385 tfm->exit = crypto_exit_shash_ops_async;
386
387 crt->init = shash_async_init;
388 crt->update = shash_async_update;
389 crt->final = shash_async_final;
390 crt->finup = shash_async_finup;
391 crt->digest = shash_async_digest;
392 if (crypto_shash_alg_has_setkey(alg))
393 crt->setkey = shash_async_setkey;
394
395 crypto_ahash_set_flags(crt, crypto_shash_get_flags(shash) &
396 CRYPTO_TFM_NEED_KEY);
397
398 crt->export = shash_async_export;
399 crt->import = shash_async_import;
400
401 crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
402
403 return 0;
404 }
405
crypto_shash_exit_tfm(struct crypto_tfm * tfm)406 static void crypto_shash_exit_tfm(struct crypto_tfm *tfm)
407 {
408 struct crypto_shash *hash = __crypto_shash_cast(tfm);
409 struct shash_alg *alg = crypto_shash_alg(hash);
410
411 alg->exit_tfm(hash);
412 }
413
crypto_shash_init_tfm(struct crypto_tfm * tfm)414 static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
415 {
416 struct crypto_shash *hash = __crypto_shash_cast(tfm);
417 struct shash_alg *alg = crypto_shash_alg(hash);
418 int err;
419
420 hash->descsize = alg->descsize;
421
422 shash_set_needkey(hash, alg);
423
424 if (alg->exit_tfm)
425 tfm->exit = crypto_shash_exit_tfm;
426
427 if (!alg->init_tfm)
428 return 0;
429
430 err = alg->init_tfm(hash);
431 if (err)
432 return err;
433
434 /* ->init_tfm() may have increased the descsize. */
435 if (WARN_ON_ONCE(hash->descsize > HASH_MAX_DESCSIZE)) {
436 if (alg->exit_tfm)
437 alg->exit_tfm(hash);
438 return -EINVAL;
439 }
440
441 return 0;
442 }
443
crypto_shash_free_instance(struct crypto_instance * inst)444 static void crypto_shash_free_instance(struct crypto_instance *inst)
445 {
446 struct shash_instance *shash = shash_instance(inst);
447
448 shash->free(shash);
449 }
450
451 #ifdef CONFIG_NET
crypto_shash_report(struct sk_buff * skb,struct crypto_alg * alg)452 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
453 {
454 struct crypto_report_hash rhash;
455 struct shash_alg *salg = __crypto_shash_alg(alg);
456
457 memset(&rhash, 0, sizeof(rhash));
458
459 strscpy(rhash.type, "shash", sizeof(rhash.type));
460
461 rhash.blocksize = alg->cra_blocksize;
462 rhash.digestsize = salg->digestsize;
463
464 return nla_put(skb, CRYPTOCFGA_REPORT_HASH, sizeof(rhash), &rhash);
465 }
466 #else
crypto_shash_report(struct sk_buff * skb,struct crypto_alg * alg)467 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
468 {
469 return -ENOSYS;
470 }
471 #endif
472
473 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
474 __maybe_unused;
crypto_shash_show(struct seq_file * m,struct crypto_alg * alg)475 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
476 {
477 struct shash_alg *salg = __crypto_shash_alg(alg);
478
479 seq_printf(m, "type : shash\n");
480 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
481 seq_printf(m, "digestsize : %u\n", salg->digestsize);
482 }
483
484 static const struct crypto_type crypto_shash_type = {
485 .extsize = crypto_alg_extsize,
486 .init_tfm = crypto_shash_init_tfm,
487 .free = crypto_shash_free_instance,
488 #ifdef CONFIG_PROC_FS
489 .show = crypto_shash_show,
490 #endif
491 .report = crypto_shash_report,
492 .maskclear = ~CRYPTO_ALG_TYPE_MASK,
493 .maskset = CRYPTO_ALG_TYPE_MASK,
494 .type = CRYPTO_ALG_TYPE_SHASH,
495 .tfmsize = offsetof(struct crypto_shash, base),
496 };
497
crypto_grab_shash(struct crypto_shash_spawn * spawn,struct crypto_instance * inst,const char * name,u32 type,u32 mask)498 int crypto_grab_shash(struct crypto_shash_spawn *spawn,
499 struct crypto_instance *inst,
500 const char *name, u32 type, u32 mask)
501 {
502 spawn->base.frontend = &crypto_shash_type;
503 return crypto_grab_spawn(&spawn->base, inst, name, type, mask);
504 }
505 EXPORT_SYMBOL_GPL(crypto_grab_shash);
506
crypto_alloc_shash(const char * alg_name,u32 type,u32 mask)507 struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
508 u32 mask)
509 {
510 return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
511 }
512 EXPORT_SYMBOL_GPL(crypto_alloc_shash);
513
crypto_has_shash(const char * alg_name,u32 type,u32 mask)514 int crypto_has_shash(const char *alg_name, u32 type, u32 mask)
515 {
516 return crypto_type_has_alg(alg_name, &crypto_shash_type, type, mask);
517 }
518 EXPORT_SYMBOL_GPL(crypto_has_shash);
519
shash_prepare_alg(struct shash_alg * alg)520 static int shash_prepare_alg(struct shash_alg *alg)
521 {
522 struct crypto_alg *base = &alg->base;
523
524 if (alg->digestsize > HASH_MAX_DIGESTSIZE ||
525 alg->descsize > HASH_MAX_DESCSIZE ||
526 alg->statesize > HASH_MAX_STATESIZE)
527 return -EINVAL;
528
529 if (base->cra_alignmask > MAX_SHASH_ALIGNMASK)
530 return -EINVAL;
531
532 if ((alg->export && !alg->import) || (alg->import && !alg->export))
533 return -EINVAL;
534
535 base->cra_type = &crypto_shash_type;
536 base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
537 base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
538
539 if (!alg->finup)
540 alg->finup = shash_finup_unaligned;
541 if (!alg->digest)
542 alg->digest = shash_digest_unaligned;
543 if (!alg->export) {
544 alg->export = shash_default_export;
545 alg->import = shash_default_import;
546 alg->statesize = alg->descsize;
547 }
548 if (!alg->setkey)
549 alg->setkey = shash_no_setkey;
550
551 return 0;
552 }
553
crypto_register_shash(struct shash_alg * alg)554 int crypto_register_shash(struct shash_alg *alg)
555 {
556 struct crypto_alg *base = &alg->base;
557 int err;
558
559 err = shash_prepare_alg(alg);
560 if (err)
561 return err;
562
563 return crypto_register_alg(base);
564 }
565 EXPORT_SYMBOL_GPL(crypto_register_shash);
566
crypto_unregister_shash(struct shash_alg * alg)567 void crypto_unregister_shash(struct shash_alg *alg)
568 {
569 crypto_unregister_alg(&alg->base);
570 }
571 EXPORT_SYMBOL_GPL(crypto_unregister_shash);
572
crypto_register_shashes(struct shash_alg * algs,int count)573 int crypto_register_shashes(struct shash_alg *algs, int count)
574 {
575 int i, ret;
576
577 for (i = 0; i < count; i++) {
578 ret = crypto_register_shash(&algs[i]);
579 if (ret)
580 goto err;
581 }
582
583 return 0;
584
585 err:
586 for (--i; i >= 0; --i)
587 crypto_unregister_shash(&algs[i]);
588
589 return ret;
590 }
591 EXPORT_SYMBOL_GPL(crypto_register_shashes);
592
crypto_unregister_shashes(struct shash_alg * algs,int count)593 void crypto_unregister_shashes(struct shash_alg *algs, int count)
594 {
595 int i;
596
597 for (i = count - 1; i >= 0; --i)
598 crypto_unregister_shash(&algs[i]);
599 }
600 EXPORT_SYMBOL_GPL(crypto_unregister_shashes);
601
shash_register_instance(struct crypto_template * tmpl,struct shash_instance * inst)602 int shash_register_instance(struct crypto_template *tmpl,
603 struct shash_instance *inst)
604 {
605 int err;
606
607 if (WARN_ON(!inst->free))
608 return -EINVAL;
609
610 err = shash_prepare_alg(&inst->alg);
611 if (err)
612 return err;
613
614 return crypto_register_instance(tmpl, shash_crypto_instance(inst));
615 }
616 EXPORT_SYMBOL_GPL(shash_register_instance);
617
shash_free_singlespawn_instance(struct shash_instance * inst)618 void shash_free_singlespawn_instance(struct shash_instance *inst)
619 {
620 crypto_drop_spawn(shash_instance_ctx(inst));
621 kfree(inst);
622 }
623 EXPORT_SYMBOL_GPL(shash_free_singlespawn_instance);
624
625 MODULE_LICENSE("GPL");
626 MODULE_DESCRIPTION("Synchronous cryptographic hash type");
627