Line data Source code
1 : // SPDX-License-Identifier: GPL-2.0-or-later
2 : /*
3 : * Scatterlist Cryptographic API.
4 : *
5 : * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
6 : * Copyright (c) 2002 David S. Miller (davem@redhat.com)
7 : * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
8 : *
9 : * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
10 : * and Nettle, by Niels Möller.
11 : */
12 :
13 : #include <linux/err.h>
14 : #include <linux/errno.h>
15 : #include <linux/kernel.h>
16 : #include <linux/kmod.h>
17 : #include <linux/module.h>
18 : #include <linux/param.h>
19 : #include <linux/sched/signal.h>
20 : #include <linux/slab.h>
21 : #include <linux/string.h>
22 : #include <linux/completion.h>
23 : #include "internal.h"
24 :
25 : LIST_HEAD(crypto_alg_list);
26 : EXPORT_SYMBOL_GPL(crypto_alg_list);
27 : DECLARE_RWSEM(crypto_alg_sem);
28 : EXPORT_SYMBOL_GPL(crypto_alg_sem);
29 :
30 : BLOCKING_NOTIFIER_HEAD(crypto_chain);
31 : EXPORT_SYMBOL_GPL(crypto_chain);
32 :
33 : static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg);
34 :
35 12 : struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
36 : {
37 10 : return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
38 : }
39 : EXPORT_SYMBOL_GPL(crypto_mod_get);
40 :
41 11 : void crypto_mod_put(struct crypto_alg *alg)
42 : {
43 11 : struct module *module = alg->cra_module;
44 :
45 0 : crypto_alg_put(alg);
46 1 : module_put(module);
47 10 : }
48 : EXPORT_SYMBOL_GPL(crypto_mod_put);
49 :
50 0 : static inline int crypto_is_test_larval(struct crypto_larval *larval)
51 : {
52 0 : return larval->alg.cra_driver_name[0];
53 : }
54 :
55 2 : static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
56 : u32 mask)
57 : {
58 2 : struct crypto_alg *q, *alg = NULL;
59 2 : int best = -2;
60 :
61 22 : list_for_each_entry(q, &crypto_alg_list, cra_list) {
62 20 : int exact, fuzzy;
63 :
64 20 : if (crypto_is_moribund(q))
65 0 : continue;
66 :
67 20 : if ((q->cra_flags ^ type) & mask)
68 16 : continue;
69 :
70 4 : if (crypto_is_larval(q) &&
71 0 : !crypto_is_test_larval((struct crypto_larval *)q) &&
72 0 : ((struct crypto_larval *)q)->mask != mask)
73 0 : continue;
74 :
75 4 : exact = !strcmp(q->cra_driver_name, name);
76 4 : fuzzy = !strcmp(q->cra_name, name);
77 4 : if (!exact && !(fuzzy && q->cra_priority > best))
78 2 : continue;
79 :
80 2 : if (unlikely(!crypto_mod_get(q)))
81 0 : continue;
82 :
83 2 : best = q->cra_priority;
84 2 : if (alg)
85 0 : crypto_mod_put(alg);
86 2 : alg = q;
87 :
88 2 : if (exact)
89 : break;
90 : }
91 :
92 2 : return alg;
93 : }
94 :
95 10 : static void crypto_larval_destroy(struct crypto_alg *alg)
96 : {
97 10 : struct crypto_larval *larval = (void *)alg;
98 :
99 10 : BUG_ON(!crypto_is_larval(alg));
100 20 : if (!IS_ERR_OR_NULL(larval->adult))
101 10 : crypto_mod_put(larval->adult);
102 10 : kfree(larval);
103 10 : }
104 :
105 10 : struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
106 : {
107 10 : struct crypto_larval *larval;
108 :
109 10 : larval = kzalloc(sizeof(*larval), GFP_KERNEL);
110 10 : if (!larval)
111 10 : return ERR_PTR(-ENOMEM);
112 :
113 10 : larval->mask = mask;
114 10 : larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
115 10 : larval->alg.cra_priority = -1;
116 10 : larval->alg.cra_destroy = crypto_larval_destroy;
117 :
118 10 : strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
119 10 : init_completion(&larval->completion);
120 :
121 10 : return larval;
122 : }
123 : EXPORT_SYMBOL_GPL(crypto_larval_alloc);
124 :
125 0 : static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
126 : u32 mask)
127 : {
128 0 : struct crypto_alg *alg;
129 0 : struct crypto_larval *larval;
130 :
131 0 : larval = crypto_larval_alloc(name, type, mask);
132 0 : if (IS_ERR(larval))
133 0 : return ERR_CAST(larval);
134 :
135 0 : refcount_set(&larval->alg.cra_refcnt, 2);
136 :
137 0 : down_write(&crypto_alg_sem);
138 0 : alg = __crypto_alg_lookup(name, type, mask);
139 0 : if (!alg) {
140 0 : alg = &larval->alg;
141 0 : list_add(&alg->cra_list, &crypto_alg_list);
142 : }
143 0 : up_write(&crypto_alg_sem);
144 :
145 0 : if (alg != &larval->alg) {
146 0 : kfree(larval);
147 0 : if (crypto_is_larval(alg))
148 0 : alg = crypto_larval_wait(alg);
149 : }
150 :
151 : return alg;
152 : }
153 :
154 10 : void crypto_larval_kill(struct crypto_alg *alg)
155 : {
156 10 : struct crypto_larval *larval = (void *)alg;
157 :
158 10 : down_write(&crypto_alg_sem);
159 10 : list_del(&alg->cra_list);
160 10 : up_write(&crypto_alg_sem);
161 10 : complete_all(&larval->completion);
162 10 : crypto_alg_put(alg);
163 10 : }
164 : EXPORT_SYMBOL_GPL(crypto_larval_kill);
165 :
166 0 : static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
167 : {
168 0 : struct crypto_larval *larval = (void *)alg;
169 0 : long timeout;
170 :
171 0 : timeout = wait_for_completion_killable_timeout(
172 : &larval->completion, 60 * HZ);
173 :
174 0 : alg = larval->adult;
175 0 : if (timeout < 0)
176 0 : alg = ERR_PTR(-EINTR);
177 0 : else if (!timeout)
178 0 : alg = ERR_PTR(-ETIMEDOUT);
179 0 : else if (!alg)
180 0 : alg = ERR_PTR(-ENOENT);
181 0 : else if (IS_ERR(alg))
182 : ;
183 0 : else if (crypto_is_test_larval(larval) &&
184 0 : !(alg->cra_flags & CRYPTO_ALG_TESTED))
185 0 : alg = ERR_PTR(-EAGAIN);
186 0 : else if (!crypto_mod_get(alg))
187 : alg = ERR_PTR(-EAGAIN);
188 0 : crypto_mod_put(&larval->alg);
189 :
190 0 : return alg;
191 : }
192 :
193 2 : static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type,
194 : u32 mask)
195 : {
196 2 : struct crypto_alg *alg;
197 2 : u32 test = 0;
198 :
199 2 : if (!((type | mask) & CRYPTO_ALG_TESTED))
200 2 : test |= CRYPTO_ALG_TESTED;
201 :
202 2 : down_read(&crypto_alg_sem);
203 2 : alg = __crypto_alg_lookup(name, type | test, mask | test);
204 2 : if (!alg && test) {
205 0 : alg = __crypto_alg_lookup(name, type, mask);
206 0 : if (alg && !crypto_is_larval(alg)) {
207 : /* Test failed */
208 0 : crypto_mod_put(alg);
209 0 : alg = ERR_PTR(-ELIBBAD);
210 : }
211 : }
212 2 : up_read(&crypto_alg_sem);
213 :
214 2 : return alg;
215 : }
216 :
217 2 : static struct crypto_alg *crypto_larval_lookup(const char *name, u32 type,
218 : u32 mask)
219 : {
220 2 : struct crypto_alg *alg;
221 :
222 2 : if (!name)
223 2 : return ERR_PTR(-ENOENT);
224 :
225 2 : type &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
226 2 : mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
227 :
228 2 : alg = crypto_alg_lookup(name, type, mask);
229 2 : if (!alg && !(mask & CRYPTO_NOLOAD)) {
230 0 : request_module("crypto-%s", name);
231 :
232 0 : if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask &
233 : CRYPTO_ALG_NEED_FALLBACK))
234 0 : request_module("crypto-%s-all", name);
235 :
236 0 : alg = crypto_alg_lookup(name, type, mask);
237 : }
238 :
239 4 : if (!IS_ERR_OR_NULL(alg) && crypto_is_larval(alg))
240 0 : alg = crypto_larval_wait(alg);
241 2 : else if (!alg)
242 0 : alg = crypto_larval_add(name, type, mask);
243 :
244 : return alg;
245 : }
246 :
247 10 : int crypto_probing_notify(unsigned long val, void *v)
248 : {
249 10 : int ok;
250 :
251 10 : ok = blocking_notifier_call_chain(&crypto_chain, val, v);
252 10 : if (ok == NOTIFY_DONE) {
253 10 : request_module("cryptomgr");
254 10 : ok = blocking_notifier_call_chain(&crypto_chain, val, v);
255 : }
256 :
257 10 : return ok;
258 : }
259 : EXPORT_SYMBOL_GPL(crypto_probing_notify);
260 :
261 2 : struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
262 : {
263 2 : struct crypto_alg *alg;
264 2 : struct crypto_alg *larval;
265 2 : int ok;
266 :
267 : /*
268 : * If the internal flag is set for a cipher, require a caller to
269 : * to invoke the cipher with the internal flag to use that cipher.
270 : * Also, if a caller wants to allocate a cipher that may or may
271 : * not be an internal cipher, use type | CRYPTO_ALG_INTERNAL and
272 : * !(mask & CRYPTO_ALG_INTERNAL).
273 : */
274 2 : if (!((type | mask) & CRYPTO_ALG_INTERNAL))
275 2 : mask |= CRYPTO_ALG_INTERNAL;
276 :
277 2 : larval = crypto_larval_lookup(name, type, mask);
278 2 : if (IS_ERR(larval) || !crypto_is_larval(larval))
279 : return larval;
280 :
281 0 : ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval);
282 :
283 0 : if (ok == NOTIFY_STOP)
284 0 : alg = crypto_larval_wait(larval);
285 : else {
286 0 : crypto_mod_put(larval);
287 0 : alg = ERR_PTR(-ENOENT);
288 : }
289 0 : crypto_larval_kill(larval);
290 0 : return alg;
291 : }
292 : EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
293 :
294 0 : static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
295 : {
296 0 : const struct crypto_type *type_obj = tfm->__crt_alg->cra_type;
297 :
298 0 : if (type_obj)
299 0 : return type_obj->init(tfm, type, mask);
300 : return 0;
301 : }
302 :
303 1 : static void crypto_exit_ops(struct crypto_tfm *tfm)
304 : {
305 1 : const struct crypto_type *type = tfm->__crt_alg->cra_type;
306 :
307 1 : if (type && tfm->exit)
308 0 : tfm->exit(tfm);
309 1 : }
310 :
311 0 : static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
312 : {
313 0 : const struct crypto_type *type_obj = alg->cra_type;
314 0 : unsigned int len;
315 :
316 0 : len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
317 0 : if (type_obj)
318 0 : return len + type_obj->ctxsize(alg, type, mask);
319 :
320 0 : switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
321 0 : default:
322 0 : BUG();
323 :
324 : case CRYPTO_ALG_TYPE_CIPHER:
325 0 : len += crypto_cipher_ctxsize(alg);
326 0 : break;
327 :
328 : case CRYPTO_ALG_TYPE_COMPRESS:
329 0 : len += crypto_compress_ctxsize(alg);
330 0 : break;
331 : }
332 :
333 : return len;
334 : }
335 :
336 0 : void crypto_shoot_alg(struct crypto_alg *alg)
337 : {
338 0 : down_write(&crypto_alg_sem);
339 0 : alg->cra_flags |= CRYPTO_ALG_DYING;
340 0 : up_write(&crypto_alg_sem);
341 0 : }
342 : EXPORT_SYMBOL_GPL(crypto_shoot_alg);
343 :
344 0 : struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
345 : u32 mask)
346 : {
347 0 : struct crypto_tfm *tfm = NULL;
348 0 : unsigned int tfm_size;
349 0 : int err = -ENOMEM;
350 :
351 0 : tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
352 0 : tfm = kzalloc(tfm_size, GFP_KERNEL);
353 0 : if (tfm == NULL)
354 0 : goto out_err;
355 :
356 0 : tfm->__crt_alg = alg;
357 :
358 0 : err = crypto_init_ops(tfm, type, mask);
359 0 : if (err)
360 0 : goto out_free_tfm;
361 :
362 0 : if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
363 0 : goto cra_init_failed;
364 :
365 0 : goto out;
366 :
367 0 : cra_init_failed:
368 0 : crypto_exit_ops(tfm);
369 0 : out_free_tfm:
370 0 : if (err == -EAGAIN)
371 0 : crypto_shoot_alg(alg);
372 0 : kfree(tfm);
373 0 : out_err:
374 0 : tfm = ERR_PTR(err);
375 0 : out:
376 0 : return tfm;
377 : }
378 : EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
379 :
380 : /*
381 : * crypto_alloc_base - Locate algorithm and allocate transform
382 : * @alg_name: Name of algorithm
383 : * @type: Type of algorithm
384 : * @mask: Mask for type comparison
385 : *
386 : * This function should not be used by new algorithm types.
387 : * Please use crypto_alloc_tfm instead.
388 : *
389 : * crypto_alloc_base() will first attempt to locate an already loaded
390 : * algorithm. If that fails and the kernel supports dynamically loadable
391 : * modules, it will then attempt to load a module of the same name or
392 : * alias. If that fails it will send a query to any loaded crypto manager
393 : * to construct an algorithm on the fly. A refcount is grabbed on the
394 : * algorithm which is then associated with the new transform.
395 : *
396 : * The returned transform is of a non-determinate type. Most people
397 : * should use one of the more specific allocation functions such as
398 : * crypto_alloc_skcipher().
399 : *
400 : * In case of error the return value is an error pointer.
401 : */
402 0 : struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
403 : {
404 0 : struct crypto_tfm *tfm;
405 0 : int err;
406 :
407 0 : for (;;) {
408 0 : struct crypto_alg *alg;
409 :
410 0 : alg = crypto_alg_mod_lookup(alg_name, type, mask);
411 0 : if (IS_ERR(alg)) {
412 0 : err = PTR_ERR(alg);
413 0 : goto err;
414 : }
415 :
416 0 : tfm = __crypto_alloc_tfm(alg, type, mask);
417 0 : if (!IS_ERR(tfm))
418 0 : return tfm;
419 :
420 0 : crypto_mod_put(alg);
421 0 : err = PTR_ERR(tfm);
422 :
423 0 : err:
424 0 : if (err != -EAGAIN)
425 : break;
426 0 : if (fatal_signal_pending(current)) {
427 : err = -EINTR;
428 : break;
429 : }
430 : }
431 :
432 0 : return ERR_PTR(err);
433 : }
434 : EXPORT_SYMBOL_GPL(crypto_alloc_base);
435 :
436 2 : void *crypto_create_tfm_node(struct crypto_alg *alg,
437 : const struct crypto_type *frontend,
438 : int node)
439 : {
440 2 : char *mem;
441 2 : struct crypto_tfm *tfm = NULL;
442 2 : unsigned int tfmsize;
443 2 : unsigned int total;
444 2 : int err = -ENOMEM;
445 :
446 2 : tfmsize = frontend->tfmsize;
447 2 : total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
448 :
449 2 : mem = kzalloc_node(total, GFP_KERNEL, node);
450 2 : if (mem == NULL)
451 0 : goto out_err;
452 :
453 2 : tfm = (struct crypto_tfm *)(mem + tfmsize);
454 2 : tfm->__crt_alg = alg;
455 2 : tfm->node = node;
456 :
457 2 : err = frontend->init_tfm(tfm);
458 2 : if (err)
459 0 : goto out_free_tfm;
460 :
461 2 : if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
462 0 : goto cra_init_failed;
463 :
464 2 : goto out;
465 :
466 0 : cra_init_failed:
467 0 : crypto_exit_ops(tfm);
468 0 : out_free_tfm:
469 0 : if (err == -EAGAIN)
470 0 : crypto_shoot_alg(alg);
471 0 : kfree(mem);
472 0 : out_err:
473 0 : mem = ERR_PTR(err);
474 2 : out:
475 2 : return mem;
476 : }
477 : EXPORT_SYMBOL_GPL(crypto_create_tfm_node);
478 :
479 2 : struct crypto_alg *crypto_find_alg(const char *alg_name,
480 : const struct crypto_type *frontend,
481 : u32 type, u32 mask)
482 : {
483 2 : if (frontend) {
484 2 : type &= frontend->maskclear;
485 2 : mask &= frontend->maskclear;
486 2 : type |= frontend->type;
487 2 : mask |= frontend->maskset;
488 : }
489 :
490 2 : return crypto_alg_mod_lookup(alg_name, type, mask);
491 : }
492 : EXPORT_SYMBOL_GPL(crypto_find_alg);
493 :
494 : /*
495 : * crypto_alloc_tfm_node - Locate algorithm and allocate transform
496 : * @alg_name: Name of algorithm
497 : * @frontend: Frontend algorithm type
498 : * @type: Type of algorithm
499 : * @mask: Mask for type comparison
500 : * @node: NUMA node in which users desire to put requests, if node is
501 : * NUMA_NO_NODE, it means users have no special requirement.
502 : *
503 : * crypto_alloc_tfm() will first attempt to locate an already loaded
504 : * algorithm. If that fails and the kernel supports dynamically loadable
505 : * modules, it will then attempt to load a module of the same name or
506 : * alias. If that fails it will send a query to any loaded crypto manager
507 : * to construct an algorithm on the fly. A refcount is grabbed on the
508 : * algorithm which is then associated with the new transform.
509 : *
510 : * The returned transform is of a non-determinate type. Most people
511 : * should use one of the more specific allocation functions such as
512 : * crypto_alloc_skcipher().
513 : *
514 : * In case of error the return value is an error pointer.
515 : */
516 :
517 2 : void *crypto_alloc_tfm_node(const char *alg_name,
518 : const struct crypto_type *frontend, u32 type, u32 mask,
519 : int node)
520 : {
521 2 : void *tfm;
522 2 : int err;
523 :
524 2 : for (;;) {
525 2 : struct crypto_alg *alg;
526 :
527 2 : alg = crypto_find_alg(alg_name, frontend, type, mask);
528 2 : if (IS_ERR(alg)) {
529 0 : err = PTR_ERR(alg);
530 0 : goto err;
531 : }
532 :
533 2 : tfm = crypto_create_tfm_node(alg, frontend, node);
534 2 : if (!IS_ERR(tfm))
535 2 : return tfm;
536 :
537 0 : crypto_mod_put(alg);
538 0 : err = PTR_ERR(tfm);
539 :
540 0 : err:
541 0 : if (err != -EAGAIN)
542 : break;
543 0 : if (fatal_signal_pending(current)) {
544 : err = -EINTR;
545 : break;
546 : }
547 : }
548 :
549 0 : return ERR_PTR(err);
550 : }
551 : EXPORT_SYMBOL_GPL(crypto_alloc_tfm_node);
552 :
553 : /*
554 : * crypto_destroy_tfm - Free crypto transform
555 : * @mem: Start of tfm slab
556 : * @tfm: Transform to free
557 : *
558 : * This function frees up the transform and any associated resources,
559 : * then drops the refcount on the associated algorithm.
560 : */
561 1 : void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
562 : {
563 1 : struct crypto_alg *alg;
564 :
565 1 : if (unlikely(!mem))
566 : return;
567 :
568 1 : alg = tfm->__crt_alg;
569 :
570 1 : if (!tfm->exit && alg->cra_exit)
571 0 : alg->cra_exit(tfm);
572 1 : crypto_exit_ops(tfm);
573 1 : crypto_mod_put(alg);
574 1 : kfree_sensitive(mem);
575 : }
576 : EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
577 :
578 0 : int crypto_has_alg(const char *name, u32 type, u32 mask)
579 : {
580 0 : int ret = 0;
581 0 : struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
582 :
583 0 : if (!IS_ERR(alg)) {
584 0 : crypto_mod_put(alg);
585 0 : ret = 1;
586 : }
587 :
588 0 : return ret;
589 : }
590 : EXPORT_SYMBOL_GPL(crypto_has_alg);
591 :
592 0 : void crypto_req_done(struct crypto_async_request *req, int err)
593 : {
594 0 : struct crypto_wait *wait = req->data;
595 :
596 0 : if (err == -EINPROGRESS)
597 : return;
598 :
599 0 : wait->err = err;
600 0 : complete(&wait->completion);
601 : }
602 : EXPORT_SYMBOL_GPL(crypto_req_done);
603 :
604 : MODULE_DESCRIPTION("Cryptographic core API");
605 : MODULE_LICENSE("GPL");
606 : MODULE_SOFTDEP("pre: cryptomgr");
|