1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3 * Scatterlist Cryptographic API.
4 *
5 * Copyright (c) 2002 James Morris <[email protected]>
6 * Copyright (c) 2002 David S. Miller ([email protected])
7 * Copyright (c) 2005 Herbert Xu <[email protected]>
8 *
9 * Portions derived from Cryptoapi, by Alexander Kjeldaas <[email protected]>
10 * and Nettle, by Niels Möller.
11 */
12
13 #include <linux/err.h>
14 #include <linux/errno.h>
15 #include <linux/jump_label.h>
16 #include <linux/kernel.h>
17 #include <linux/kmod.h>
18 #include <linux/module.h>
19 #include <linux/param.h>
20 #include <linux/sched/signal.h>
21 #include <linux/slab.h>
22 #include <linux/string.h>
23 #include <linux/completion.h>
24 #include "internal.h"
25
26 LIST_HEAD(crypto_alg_list);
27 EXPORT_SYMBOL_GPL(crypto_alg_list);
28 DECLARE_RWSEM(crypto_alg_sem);
29 EXPORT_SYMBOL_GPL(crypto_alg_sem);
30
31 BLOCKING_NOTIFIER_HEAD(crypto_chain);
32 EXPORT_SYMBOL_GPL(crypto_chain);
33
34 #if IS_BUILTIN(CONFIG_CRYPTO_ALGAPI) && \
35 !IS_ENABLED(CONFIG_CRYPTO_MANAGER_DISABLE_TESTS)
36 DEFINE_STATIC_KEY_FALSE(__crypto_boot_test_finished);
37 #endif
38
39 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg,
40 u32 type, u32 mask);
41 static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type,
42 u32 mask);
43
crypto_mod_get(struct crypto_alg * alg)44 struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
45 {
46 return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
47 }
48 EXPORT_SYMBOL_GPL(crypto_mod_get);
49
crypto_mod_put(struct crypto_alg * alg)50 void crypto_mod_put(struct crypto_alg *alg)
51 {
52 struct module *module = alg->cra_module;
53
54 crypto_alg_put(alg);
55 module_put(module);
56 }
57 EXPORT_SYMBOL_GPL(crypto_mod_put);
58
__crypto_alg_lookup(const char * name,u32 type,u32 mask)59 static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
60 u32 mask)
61 {
62 struct crypto_alg *q, *alg = NULL;
63 int best = -2;
64
65 list_for_each_entry(q, &crypto_alg_list, cra_list) {
66 int exact, fuzzy;
67
68 if (crypto_is_moribund(q))
69 continue;
70
71 if ((q->cra_flags ^ type) & mask)
72 continue;
73
74 exact = !strcmp(q->cra_driver_name, name);
75 fuzzy = !strcmp(q->cra_name, name);
76 if (!exact && !(fuzzy && q->cra_priority > best))
77 continue;
78
79 if (unlikely(!crypto_mod_get(q)))
80 continue;
81
82 best = q->cra_priority;
83 if (alg)
84 crypto_mod_put(alg);
85 alg = q;
86
87 if (exact)
88 break;
89 }
90
91 return alg;
92 }
93
crypto_larval_destroy(struct crypto_alg * alg)94 static void crypto_larval_destroy(struct crypto_alg *alg)
95 {
96 struct crypto_larval *larval = (void *)alg;
97
98 BUG_ON(!crypto_is_larval(alg));
99 if (!IS_ERR_OR_NULL(larval->adult))
100 crypto_mod_put(larval->adult);
101 kfree(larval);
102 }
103
crypto_larval_alloc(const char * name,u32 type,u32 mask)104 struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
105 {
106 struct crypto_larval *larval;
107
108 larval = kzalloc(sizeof(*larval), GFP_KERNEL);
109 if (!larval)
110 return ERR_PTR(-ENOMEM);
111
112 type &= ~CRYPTO_ALG_TYPE_MASK | (mask ?: CRYPTO_ALG_TYPE_MASK);
113
114 larval->mask = mask;
115 larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
116 larval->alg.cra_priority = -1;
117 larval->alg.cra_destroy = crypto_larval_destroy;
118
119 strscpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
120 init_completion(&larval->completion);
121
122 return larval;
123 }
124 EXPORT_SYMBOL_GPL(crypto_larval_alloc);
125
crypto_larval_add(const char * name,u32 type,u32 mask)126 static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
127 u32 mask)
128 {
129 struct crypto_alg *alg;
130 struct crypto_larval *larval;
131
132 larval = crypto_larval_alloc(name, type, mask);
133 if (IS_ERR(larval))
134 return ERR_CAST(larval);
135
136 refcount_set(&larval->alg.cra_refcnt, 2);
137
138 down_write(&crypto_alg_sem);
139 alg = __crypto_alg_lookup(name, type, mask);
140 if (!alg) {
141 alg = &larval->alg;
142 list_add(&alg->cra_list, &crypto_alg_list);
143 }
144 up_write(&crypto_alg_sem);
145
146 if (alg != &larval->alg) {
147 kfree(larval);
148 if (crypto_is_larval(alg))
149 alg = crypto_larval_wait(alg, type, mask);
150 }
151
152 return alg;
153 }
154
crypto_larval_kill(struct crypto_larval * larval)155 static void crypto_larval_kill(struct crypto_larval *larval)
156 {
157 bool unlinked;
158
159 down_write(&crypto_alg_sem);
160 unlinked = list_empty(&larval->alg.cra_list);
161 if (!unlinked)
162 list_del_init(&larval->alg.cra_list);
163 up_write(&crypto_alg_sem);
164
165 if (unlinked)
166 return;
167
168 complete_all(&larval->completion);
169 crypto_alg_put(&larval->alg);
170 }
171
crypto_schedule_test(struct crypto_larval * larval)172 void crypto_schedule_test(struct crypto_larval *larval)
173 {
174 int err;
175
176 err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult);
177 WARN_ON_ONCE(err != NOTIFY_STOP);
178 }
179 EXPORT_SYMBOL_GPL(crypto_schedule_test);
180
crypto_start_test(struct crypto_larval * larval)181 static void crypto_start_test(struct crypto_larval *larval)
182 {
183 if (!crypto_is_test_larval(larval))
184 return;
185
186 if (larval->test_started)
187 return;
188
189 down_write(&crypto_alg_sem);
190 if (larval->test_started) {
191 up_write(&crypto_alg_sem);
192 return;
193 }
194
195 larval->test_started = true;
196 up_write(&crypto_alg_sem);
197
198 crypto_schedule_test(larval);
199 }
200
crypto_larval_wait(struct crypto_alg * alg,u32 type,u32 mask)201 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg,
202 u32 type, u32 mask)
203 {
204 struct crypto_larval *larval;
205 long time_left;
206
207 again:
208 larval = container_of(alg, struct crypto_larval, alg);
209
210 if (!crypto_boot_test_finished())
211 crypto_start_test(larval);
212
213 time_left = wait_for_completion_killable_timeout(
214 &larval->completion, 60 * HZ);
215
216 alg = larval->adult;
217 if (time_left < 0)
218 alg = ERR_PTR(-EINTR);
219 else if (!time_left) {
220 if (crypto_is_test_larval(larval))
221 crypto_larval_kill(larval);
222 alg = ERR_PTR(-ETIMEDOUT);
223 } else if (!alg) {
224 alg = &larval->alg;
225 alg = crypto_alg_lookup(alg->cra_name, type, mask) ?:
226 ERR_PTR(-EAGAIN);
227 } else if (IS_ERR(alg))
228 ;
229 else if (crypto_is_test_larval(larval) &&
230 !(alg->cra_flags & CRYPTO_ALG_TESTED))
231 alg = ERR_PTR(-EAGAIN);
232 else if (alg->cra_flags & CRYPTO_ALG_FIPS_INTERNAL)
233 alg = ERR_PTR(-EAGAIN);
234 else if (!crypto_mod_get(alg))
235 alg = ERR_PTR(-EAGAIN);
236 crypto_mod_put(&larval->alg);
237
238 if (!IS_ERR(alg) && crypto_is_larval(alg))
239 goto again;
240
241 return alg;
242 }
243
crypto_alg_lookup(const char * name,u32 type,u32 mask)244 static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type,
245 u32 mask)
246 {
247 const u32 fips = CRYPTO_ALG_FIPS_INTERNAL;
248 struct crypto_alg *alg;
249 u32 test = 0;
250
251 if (!((type | mask) & CRYPTO_ALG_TESTED))
252 test |= CRYPTO_ALG_TESTED;
253
254 down_read(&crypto_alg_sem);
255 alg = __crypto_alg_lookup(name, (type | test) & ~fips,
256 (mask | test) & ~fips);
257 if (alg) {
258 if (((type | mask) ^ fips) & fips)
259 mask |= fips;
260 mask &= fips;
261
262 if (!crypto_is_larval(alg) &&
263 ((type ^ alg->cra_flags) & mask)) {
264 /* Algorithm is disallowed in FIPS mode. */
265 crypto_mod_put(alg);
266 alg = ERR_PTR(-ENOENT);
267 }
268 } else if (test) {
269 alg = __crypto_alg_lookup(name, type, mask);
270 if (alg && !crypto_is_larval(alg)) {
271 /* Test failed */
272 crypto_mod_put(alg);
273 alg = ERR_PTR(-ELIBBAD);
274 }
275 }
276 up_read(&crypto_alg_sem);
277
278 return alg;
279 }
280
crypto_larval_lookup(const char * name,u32 type,u32 mask)281 static struct crypto_alg *crypto_larval_lookup(const char *name, u32 type,
282 u32 mask)
283 {
284 struct crypto_alg *alg;
285
286 if (!name)
287 return ERR_PTR(-ENOENT);
288
289 type &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
290 mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
291
292 alg = crypto_alg_lookup(name, type, mask);
293 if (!alg && !(mask & CRYPTO_NOLOAD)) {
294 request_module("crypto-%s", name);
295
296 if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask &
297 CRYPTO_ALG_NEED_FALLBACK))
298 request_module("crypto-%s-all", name);
299
300 alg = crypto_alg_lookup(name, type, mask);
301 }
302
303 if (!IS_ERR_OR_NULL(alg) && crypto_is_larval(alg))
304 alg = crypto_larval_wait(alg, type, mask);
305 else if (alg)
306 ;
307 else if (!(mask & CRYPTO_ALG_TESTED))
308 alg = crypto_larval_add(name, type, mask);
309 else
310 alg = ERR_PTR(-ENOENT);
311
312 return alg;
313 }
314
crypto_probing_notify(unsigned long val,void * v)315 int crypto_probing_notify(unsigned long val, void *v)
316 {
317 int ok;
318
319 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
320 if (ok == NOTIFY_DONE) {
321 request_module("cryptomgr");
322 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
323 }
324
325 return ok;
326 }
327 EXPORT_SYMBOL_GPL(crypto_probing_notify);
328
crypto_alg_mod_lookup(const char * name,u32 type,u32 mask)329 struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
330 {
331 struct crypto_alg *alg;
332 struct crypto_alg *larval;
333 int ok;
334
335 /*
336 * If the internal flag is set for a cipher, require a caller to
337 * invoke the cipher with the internal flag to use that cipher.
338 * Also, if a caller wants to allocate a cipher that may or may
339 * not be an internal cipher, use type | CRYPTO_ALG_INTERNAL and
340 * !(mask & CRYPTO_ALG_INTERNAL).
341 */
342 if (!((type | mask) & CRYPTO_ALG_INTERNAL))
343 mask |= CRYPTO_ALG_INTERNAL;
344
345 larval = crypto_larval_lookup(name, type, mask);
346 if (IS_ERR(larval) || !crypto_is_larval(larval))
347 return larval;
348
349 ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval);
350
351 if (ok == NOTIFY_STOP)
352 alg = crypto_larval_wait(larval, type, mask);
353 else {
354 crypto_mod_put(larval);
355 alg = ERR_PTR(-ENOENT);
356 }
357 crypto_larval_kill(container_of(larval, struct crypto_larval, alg));
358 return alg;
359 }
360 EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
361
crypto_exit_ops(struct crypto_tfm * tfm)362 static void crypto_exit_ops(struct crypto_tfm *tfm)
363 {
364 const struct crypto_type *type = tfm->__crt_alg->cra_type;
365
366 if (type && tfm->exit)
367 tfm->exit(tfm);
368 }
369
crypto_ctxsize(struct crypto_alg * alg,u32 type,u32 mask)370 static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
371 {
372 const struct crypto_type *type_obj = alg->cra_type;
373 unsigned int len;
374
375 len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
376 if (type_obj)
377 return len + type_obj->ctxsize(alg, type, mask);
378
379 switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
380 default:
381 BUG();
382
383 case CRYPTO_ALG_TYPE_CIPHER:
384 len += crypto_cipher_ctxsize(alg);
385 break;
386
387 case CRYPTO_ALG_TYPE_COMPRESS:
388 len += crypto_compress_ctxsize(alg);
389 break;
390 }
391
392 return len;
393 }
394
crypto_shoot_alg(struct crypto_alg * alg)395 void crypto_shoot_alg(struct crypto_alg *alg)
396 {
397 down_write(&crypto_alg_sem);
398 alg->cra_flags |= CRYPTO_ALG_DYING;
399 up_write(&crypto_alg_sem);
400 }
401 EXPORT_SYMBOL_GPL(crypto_shoot_alg);
402
__crypto_alloc_tfmgfp(struct crypto_alg * alg,u32 type,u32 mask,gfp_t gfp)403 struct crypto_tfm *__crypto_alloc_tfmgfp(struct crypto_alg *alg, u32 type,
404 u32 mask, gfp_t gfp)
405 {
406 struct crypto_tfm *tfm;
407 unsigned int tfm_size;
408 int err = -ENOMEM;
409
410 tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
411 tfm = kzalloc(tfm_size, gfp);
412 if (tfm == NULL)
413 goto out_err;
414
415 tfm->__crt_alg = alg;
416 refcount_set(&tfm->refcnt, 1);
417
418 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
419 goto cra_init_failed;
420
421 goto out;
422
423 cra_init_failed:
424 crypto_exit_ops(tfm);
425 if (err == -EAGAIN)
426 crypto_shoot_alg(alg);
427 kfree(tfm);
428 out_err:
429 tfm = ERR_PTR(err);
430 out:
431 return tfm;
432 }
433 EXPORT_SYMBOL_GPL(__crypto_alloc_tfmgfp);
434
__crypto_alloc_tfm(struct crypto_alg * alg,u32 type,u32 mask)435 struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
436 u32 mask)
437 {
438 return __crypto_alloc_tfmgfp(alg, type, mask, GFP_KERNEL);
439 }
440 EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
441
442 /*
443 * crypto_alloc_base - Locate algorithm and allocate transform
444 * @alg_name: Name of algorithm
445 * @type: Type of algorithm
446 * @mask: Mask for type comparison
447 *
448 * This function should not be used by new algorithm types.
449 * Please use crypto_alloc_tfm instead.
450 *
451 * crypto_alloc_base() will first attempt to locate an already loaded
452 * algorithm. If that fails and the kernel supports dynamically loadable
453 * modules, it will then attempt to load a module of the same name or
454 * alias. If that fails it will send a query to any loaded crypto manager
455 * to construct an algorithm on the fly. A refcount is grabbed on the
456 * algorithm which is then associated with the new transform.
457 *
458 * The returned transform is of a non-determinate type. Most people
459 * should use one of the more specific allocation functions such as
460 * crypto_alloc_skcipher().
461 *
462 * In case of error the return value is an error pointer.
463 */
crypto_alloc_base(const char * alg_name,u32 type,u32 mask)464 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
465 {
466 struct crypto_tfm *tfm;
467 int err;
468
469 for (;;) {
470 struct crypto_alg *alg;
471
472 alg = crypto_alg_mod_lookup(alg_name, type, mask);
473 if (IS_ERR(alg)) {
474 err = PTR_ERR(alg);
475 goto err;
476 }
477
478 tfm = __crypto_alloc_tfm(alg, type, mask);
479 if (!IS_ERR(tfm))
480 return tfm;
481
482 crypto_mod_put(alg);
483 err = PTR_ERR(tfm);
484
485 err:
486 if (err != -EAGAIN)
487 break;
488 if (fatal_signal_pending(current)) {
489 err = -EINTR;
490 break;
491 }
492 }
493
494 return ERR_PTR(err);
495 }
496 EXPORT_SYMBOL_GPL(crypto_alloc_base);
497
crypto_alloc_tfmmem(struct crypto_alg * alg,const struct crypto_type * frontend,int node,gfp_t gfp)498 static void *crypto_alloc_tfmmem(struct crypto_alg *alg,
499 const struct crypto_type *frontend, int node,
500 gfp_t gfp)
501 {
502 struct crypto_tfm *tfm;
503 unsigned int tfmsize;
504 unsigned int total;
505 char *mem;
506
507 tfmsize = frontend->tfmsize;
508 total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
509
510 mem = kzalloc_node(total, gfp, node);
511 if (mem == NULL)
512 return ERR_PTR(-ENOMEM);
513
514 tfm = (struct crypto_tfm *)(mem + tfmsize);
515 tfm->__crt_alg = alg;
516 tfm->node = node;
517 refcount_set(&tfm->refcnt, 1);
518
519 return mem;
520 }
521
crypto_create_tfm_node(struct crypto_alg * alg,const struct crypto_type * frontend,int node)522 void *crypto_create_tfm_node(struct crypto_alg *alg,
523 const struct crypto_type *frontend,
524 int node)
525 {
526 struct crypto_tfm *tfm;
527 char *mem;
528 int err;
529
530 mem = crypto_alloc_tfmmem(alg, frontend, node, GFP_KERNEL);
531 if (IS_ERR(mem))
532 goto out;
533
534 tfm = (struct crypto_tfm *)(mem + frontend->tfmsize);
535
536 err = frontend->init_tfm(tfm);
537 if (err)
538 goto out_free_tfm;
539
540 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
541 goto cra_init_failed;
542
543 goto out;
544
545 cra_init_failed:
546 crypto_exit_ops(tfm);
547 out_free_tfm:
548 if (err == -EAGAIN)
549 crypto_shoot_alg(alg);
550 kfree(mem);
551 mem = ERR_PTR(err);
552 out:
553 return mem;
554 }
555 EXPORT_SYMBOL_GPL(crypto_create_tfm_node);
556
crypto_clone_tfm(const struct crypto_type * frontend,struct crypto_tfm * otfm)557 void *crypto_clone_tfm(const struct crypto_type *frontend,
558 struct crypto_tfm *otfm)
559 {
560 struct crypto_alg *alg = otfm->__crt_alg;
561 struct crypto_tfm *tfm;
562 char *mem;
563
564 mem = ERR_PTR(-ESTALE);
565 if (unlikely(!crypto_mod_get(alg)))
566 goto out;
567
568 mem = crypto_alloc_tfmmem(alg, frontend, otfm->node, GFP_ATOMIC);
569 if (IS_ERR(mem)) {
570 crypto_mod_put(alg);
571 goto out;
572 }
573
574 tfm = (struct crypto_tfm *)(mem + frontend->tfmsize);
575 tfm->crt_flags = otfm->crt_flags;
576 tfm->exit = otfm->exit;
577
578 out:
579 return mem;
580 }
581 EXPORT_SYMBOL_GPL(crypto_clone_tfm);
582
crypto_find_alg(const char * alg_name,const struct crypto_type * frontend,u32 type,u32 mask)583 struct crypto_alg *crypto_find_alg(const char *alg_name,
584 const struct crypto_type *frontend,
585 u32 type, u32 mask)
586 {
587 if (frontend) {
588 type &= frontend->maskclear;
589 mask &= frontend->maskclear;
590 type |= frontend->type;
591 mask |= frontend->maskset;
592 }
593
594 return crypto_alg_mod_lookup(alg_name, type, mask);
595 }
596 EXPORT_SYMBOL_GPL(crypto_find_alg);
597
598 /*
599 * crypto_alloc_tfm_node - Locate algorithm and allocate transform
600 * @alg_name: Name of algorithm
601 * @frontend: Frontend algorithm type
602 * @type: Type of algorithm
603 * @mask: Mask for type comparison
604 * @node: NUMA node in which users desire to put requests, if node is
605 * NUMA_NO_NODE, it means users have no special requirement.
606 *
607 * crypto_alloc_tfm() will first attempt to locate an already loaded
608 * algorithm. If that fails and the kernel supports dynamically loadable
609 * modules, it will then attempt to load a module of the same name or
610 * alias. If that fails it will send a query to any loaded crypto manager
611 * to construct an algorithm on the fly. A refcount is grabbed on the
612 * algorithm which is then associated with the new transform.
613 *
614 * The returned transform is of a non-determinate type. Most people
615 * should use one of the more specific allocation functions such as
616 * crypto_alloc_skcipher().
617 *
618 * In case of error the return value is an error pointer.
619 */
620
crypto_alloc_tfm_node(const char * alg_name,const struct crypto_type * frontend,u32 type,u32 mask,int node)621 void *crypto_alloc_tfm_node(const char *alg_name,
622 const struct crypto_type *frontend, u32 type, u32 mask,
623 int node)
624 {
625 void *tfm;
626 int err;
627
628 for (;;) {
629 struct crypto_alg *alg;
630
631 alg = crypto_find_alg(alg_name, frontend, type, mask);
632 if (IS_ERR(alg)) {
633 err = PTR_ERR(alg);
634 goto err;
635 }
636
637 tfm = crypto_create_tfm_node(alg, frontend, node);
638 if (!IS_ERR(tfm))
639 return tfm;
640
641 crypto_mod_put(alg);
642 err = PTR_ERR(tfm);
643
644 err:
645 if (err != -EAGAIN)
646 break;
647 if (fatal_signal_pending(current)) {
648 err = -EINTR;
649 break;
650 }
651 }
652
653 return ERR_PTR(err);
654 }
655 EXPORT_SYMBOL_GPL(crypto_alloc_tfm_node);
656
657 /*
658 * crypto_destroy_tfm - Free crypto transform
659 * @mem: Start of tfm slab
660 * @tfm: Transform to free
661 *
662 * This function frees up the transform and any associated resources,
663 * then drops the refcount on the associated algorithm.
664 */
crypto_destroy_tfm(void * mem,struct crypto_tfm * tfm)665 void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
666 {
667 struct crypto_alg *alg;
668
669 if (IS_ERR_OR_NULL(mem))
670 return;
671
672 if (!refcount_dec_and_test(&tfm->refcnt))
673 return;
674 alg = tfm->__crt_alg;
675
676 if (!tfm->exit && alg->cra_exit)
677 alg->cra_exit(tfm);
678 crypto_exit_ops(tfm);
679 crypto_mod_put(alg);
680 kfree_sensitive(mem);
681 }
682 EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
683
crypto_has_alg(const char * name,u32 type,u32 mask)684 int crypto_has_alg(const char *name, u32 type, u32 mask)
685 {
686 int ret = 0;
687 struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
688
689 if (!IS_ERR(alg)) {
690 crypto_mod_put(alg);
691 ret = 1;
692 }
693
694 return ret;
695 }
696 EXPORT_SYMBOL_GPL(crypto_has_alg);
697
crypto_req_done(void * data,int err)698 void crypto_req_done(void *data, int err)
699 {
700 struct crypto_wait *wait = data;
701
702 if (err == -EINPROGRESS)
703 return;
704
705 wait->err = err;
706 complete(&wait->completion);
707 }
708 EXPORT_SYMBOL_GPL(crypto_req_done);
709
710 MODULE_DESCRIPTION("Cryptographic core API");
711 MODULE_LICENSE("GPL");
712