1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Synchronous Cryptographic Hash operations.
5 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
8 #include <crypto/scatterwalk.h>
9 #include <crypto/internal/hash.h>
10 #include <linux/err.h>
11 #include <linux/kernel.h>
12 #include <linux/module.h>
13 #include <linux/slab.h>
14 #include <linux/seq_file.h>
15 #include <linux/cryptouser.h>
16 #include <net/netlink.h>
17 #include <linux/compiler.h>
21 static const struct crypto_type crypto_shash_type;
23 static int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
30 * Check whether an shash algorithm has a setkey function.
32 * For CFI compatibility, this must not be an inline function. This is because
33 * when CFI is enabled, modules won't get the same address for shash_no_setkey
34 * (if it were exported, which inlining would require) as the core kernel will.
36 bool crypto_shash_alg_has_setkey(struct shash_alg *alg)
38 return alg->setkey != shash_no_setkey;
40 EXPORT_SYMBOL_GPL(crypto_shash_alg_has_setkey);
42 static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
45 struct shash_alg *shash = crypto_shash_alg(tfm);
46 unsigned long alignmask = crypto_shash_alignmask(tfm);
48 u8 *buffer, *alignbuffer;
51 absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1));
52 buffer = kmalloc(absize, GFP_ATOMIC);
56 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
57 memcpy(alignbuffer, key, keylen);
58 err = shash->setkey(tfm, alignbuffer, keylen);
63 static void shash_set_needkey(struct crypto_shash *tfm, struct shash_alg *alg)
65 if (crypto_shash_alg_has_setkey(alg) &&
66 !(alg->base.cra_flags & CRYPTO_ALG_OPTIONAL_KEY))
67 crypto_shash_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
70 int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
73 struct shash_alg *shash = crypto_shash_alg(tfm);
74 unsigned long alignmask = crypto_shash_alignmask(tfm);
77 if ((unsigned long)key & alignmask)
78 err = shash_setkey_unaligned(tfm, key, keylen);
80 err = shash->setkey(tfm, key, keylen);
83 shash_set_needkey(tfm, shash);
87 crypto_shash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
90 EXPORT_SYMBOL_GPL(crypto_shash_setkey);
92 static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
95 struct crypto_shash *tfm = desc->tfm;
96 struct shash_alg *shash = crypto_shash_alg(tfm);
97 unsigned long alignmask = crypto_shash_alignmask(tfm);
98 unsigned int unaligned_len = alignmask + 1 -
99 ((unsigned long)data & alignmask);
101 * We cannot count on __aligned() working for large values:
102 * https://patchwork.kernel.org/patch/9507697/
104 u8 ubuf[MAX_ALGAPI_ALIGNMASK * 2];
105 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
108 if (WARN_ON(buf + unaligned_len > ubuf + sizeof(ubuf)))
111 if (unaligned_len > len)
114 memcpy(buf, data, unaligned_len);
115 err = shash->update(desc, buf, unaligned_len);
116 memset(buf, 0, unaligned_len);
119 shash->update(desc, data + unaligned_len, len - unaligned_len);
122 int crypto_shash_update(struct shash_desc *desc, const u8 *data,
125 struct crypto_shash *tfm = desc->tfm;
126 struct shash_alg *shash = crypto_shash_alg(tfm);
127 unsigned long alignmask = crypto_shash_alignmask(tfm);
129 if ((unsigned long)data & alignmask)
130 return shash_update_unaligned(desc, data, len);
132 return shash->update(desc, data, len);
134 EXPORT_SYMBOL_GPL(crypto_shash_update);
136 static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
138 struct crypto_shash *tfm = desc->tfm;
139 unsigned long alignmask = crypto_shash_alignmask(tfm);
140 struct shash_alg *shash = crypto_shash_alg(tfm);
141 unsigned int ds = crypto_shash_digestsize(tfm);
143 * We cannot count on __aligned() working for large values:
144 * https://patchwork.kernel.org/patch/9507697/
146 u8 ubuf[MAX_ALGAPI_ALIGNMASK + HASH_MAX_DIGESTSIZE];
147 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
150 if (WARN_ON(buf + ds > ubuf + sizeof(ubuf)))
153 err = shash->final(desc, buf);
157 memcpy(out, buf, ds);
164 int crypto_shash_final(struct shash_desc *desc, u8 *out)
166 struct crypto_shash *tfm = desc->tfm;
167 struct shash_alg *shash = crypto_shash_alg(tfm);
168 unsigned long alignmask = crypto_shash_alignmask(tfm);
170 if ((unsigned long)out & alignmask)
171 return shash_final_unaligned(desc, out);
173 return shash->final(desc, out);
175 EXPORT_SYMBOL_GPL(crypto_shash_final);
177 static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
178 unsigned int len, u8 *out)
180 return crypto_shash_update(desc, data, len) ?:
181 crypto_shash_final(desc, out);
184 int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
185 unsigned int len, u8 *out)
187 struct crypto_shash *tfm = desc->tfm;
188 struct shash_alg *shash = crypto_shash_alg(tfm);
189 unsigned long alignmask = crypto_shash_alignmask(tfm);
191 if (((unsigned long)data | (unsigned long)out) & alignmask)
192 return shash_finup_unaligned(desc, data, len, out);
194 return shash->finup(desc, data, len, out);
196 EXPORT_SYMBOL_GPL(crypto_shash_finup);
198 static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
199 unsigned int len, u8 *out)
201 return crypto_shash_init(desc) ?:
202 crypto_shash_finup(desc, data, len, out);
205 int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
206 unsigned int len, u8 *out)
208 struct crypto_shash *tfm = desc->tfm;
209 struct shash_alg *shash = crypto_shash_alg(tfm);
210 unsigned long alignmask = crypto_shash_alignmask(tfm);
212 if (crypto_shash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
215 if (((unsigned long)data | (unsigned long)out) & alignmask)
216 return shash_digest_unaligned(desc, data, len, out);
218 return shash->digest(desc, data, len, out);
220 EXPORT_SYMBOL_GPL(crypto_shash_digest);
222 static int shash_default_export(struct shash_desc *desc, void *out)
224 memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm));
228 static int shash_default_import(struct shash_desc *desc, const void *in)
230 memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm));
234 static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
237 struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
239 return crypto_shash_setkey(*ctx, key, keylen);
242 static int shash_async_init(struct ahash_request *req)
244 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
245 struct shash_desc *desc = ahash_request_ctx(req);
249 return crypto_shash_init(desc);
252 int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
254 struct crypto_hash_walk walk;
257 for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
258 nbytes = crypto_hash_walk_done(&walk, nbytes))
259 nbytes = crypto_shash_update(desc, walk.data, nbytes);
263 EXPORT_SYMBOL_GPL(shash_ahash_update);
265 static int shash_async_update(struct ahash_request *req)
267 return shash_ahash_update(req, ahash_request_ctx(req));
270 static int shash_async_final(struct ahash_request *req)
272 return crypto_shash_final(ahash_request_ctx(req), req->result);
275 int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
277 struct crypto_hash_walk walk;
280 nbytes = crypto_hash_walk_first(req, &walk);
282 return crypto_shash_final(desc, req->result);
285 nbytes = crypto_hash_walk_last(&walk) ?
286 crypto_shash_finup(desc, walk.data, nbytes,
288 crypto_shash_update(desc, walk.data, nbytes);
289 nbytes = crypto_hash_walk_done(&walk, nbytes);
290 } while (nbytes > 0);
294 EXPORT_SYMBOL_GPL(shash_ahash_finup);
296 static int shash_async_finup(struct ahash_request *req)
298 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
299 struct shash_desc *desc = ahash_request_ctx(req);
303 return shash_ahash_finup(req, desc);
306 int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
308 unsigned int nbytes = req->nbytes;
309 struct scatterlist *sg;
314 (sg = req->src, offset = sg->offset,
315 nbytes <= min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset))) {
318 data = kmap_atomic(sg_page(sg));
319 err = crypto_shash_digest(desc, data + offset, nbytes,
323 err = crypto_shash_init(desc) ?:
324 shash_ahash_finup(req, desc);
328 EXPORT_SYMBOL_GPL(shash_ahash_digest);
330 static int shash_async_digest(struct ahash_request *req)
332 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
333 struct shash_desc *desc = ahash_request_ctx(req);
337 return shash_ahash_digest(req, desc);
340 static int shash_async_export(struct ahash_request *req, void *out)
342 return crypto_shash_export(ahash_request_ctx(req), out);
345 static int shash_async_import(struct ahash_request *req, const void *in)
347 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
348 struct shash_desc *desc = ahash_request_ctx(req);
352 return crypto_shash_import(desc, in);
355 static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
357 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
359 crypto_free_shash(*ctx);
362 int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
364 struct crypto_alg *calg = tfm->__crt_alg;
365 struct shash_alg *alg = __crypto_shash_alg(calg);
366 struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
367 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
368 struct crypto_shash *shash;
370 if (!crypto_mod_get(calg))
373 shash = crypto_create_tfm(calg, &crypto_shash_type);
375 crypto_mod_put(calg);
376 return PTR_ERR(shash);
380 tfm->exit = crypto_exit_shash_ops_async;
382 crt->init = shash_async_init;
383 crt->update = shash_async_update;
384 crt->final = shash_async_final;
385 crt->finup = shash_async_finup;
386 crt->digest = shash_async_digest;
387 if (crypto_shash_alg_has_setkey(alg))
388 crt->setkey = shash_async_setkey;
390 crypto_ahash_set_flags(crt, crypto_shash_get_flags(shash) &
391 CRYPTO_TFM_NEED_KEY);
393 crt->export = shash_async_export;
394 crt->import = shash_async_import;
396 crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
401 static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
403 struct crypto_shash *hash = __crypto_shash_cast(tfm);
404 struct shash_alg *alg = crypto_shash_alg(hash);
406 hash->descsize = alg->descsize;
408 shash_set_needkey(hash, alg);
414 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
416 struct crypto_report_hash rhash;
417 struct shash_alg *salg = __crypto_shash_alg(alg);
419 memset(&rhash, 0, sizeof(rhash));
421 strscpy(rhash.type, "shash", sizeof(rhash.type));
423 rhash.blocksize = alg->cra_blocksize;
424 rhash.digestsize = salg->digestsize;
426 return nla_put(skb, CRYPTOCFGA_REPORT_HASH, sizeof(rhash), &rhash);
429 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
435 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
437 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
439 struct shash_alg *salg = __crypto_shash_alg(alg);
441 seq_printf(m, "type : shash\n");
442 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
443 seq_printf(m, "digestsize : %u\n", salg->digestsize);
446 static const struct crypto_type crypto_shash_type = {
447 .extsize = crypto_alg_extsize,
448 .init_tfm = crypto_shash_init_tfm,
449 #ifdef CONFIG_PROC_FS
450 .show = crypto_shash_show,
452 .report = crypto_shash_report,
453 .maskclear = ~CRYPTO_ALG_TYPE_MASK,
454 .maskset = CRYPTO_ALG_TYPE_MASK,
455 .type = CRYPTO_ALG_TYPE_SHASH,
456 .tfmsize = offsetof(struct crypto_shash, base),
459 struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
462 return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
464 EXPORT_SYMBOL_GPL(crypto_alloc_shash);
466 static int shash_prepare_alg(struct shash_alg *alg)
468 struct crypto_alg *base = &alg->base;
470 if (alg->digestsize > HASH_MAX_DIGESTSIZE ||
471 alg->descsize > HASH_MAX_DESCSIZE ||
472 alg->statesize > HASH_MAX_STATESIZE)
475 if ((alg->export && !alg->import) || (alg->import && !alg->export))
478 base->cra_type = &crypto_shash_type;
479 base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
480 base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
483 alg->finup = shash_finup_unaligned;
485 alg->digest = shash_digest_unaligned;
487 alg->export = shash_default_export;
488 alg->import = shash_default_import;
489 alg->statesize = alg->descsize;
492 alg->setkey = shash_no_setkey;
497 int crypto_register_shash(struct shash_alg *alg)
499 struct crypto_alg *base = &alg->base;
502 err = shash_prepare_alg(alg);
506 return crypto_register_alg(base);
508 EXPORT_SYMBOL_GPL(crypto_register_shash);
510 int crypto_unregister_shash(struct shash_alg *alg)
512 return crypto_unregister_alg(&alg->base);
514 EXPORT_SYMBOL_GPL(crypto_unregister_shash);
516 int crypto_register_shashes(struct shash_alg *algs, int count)
520 for (i = 0; i < count; i++) {
521 ret = crypto_register_shash(&algs[i]);
529 for (--i; i >= 0; --i)
530 crypto_unregister_shash(&algs[i]);
534 EXPORT_SYMBOL_GPL(crypto_register_shashes);
536 int crypto_unregister_shashes(struct shash_alg *algs, int count)
540 for (i = count - 1; i >= 0; --i) {
541 ret = crypto_unregister_shash(&algs[i]);
543 pr_err("Failed to unregister %s %s: %d\n",
544 algs[i].base.cra_driver_name,
545 algs[i].base.cra_name, ret);
550 EXPORT_SYMBOL_GPL(crypto_unregister_shashes);
552 int shash_register_instance(struct crypto_template *tmpl,
553 struct shash_instance *inst)
557 err = shash_prepare_alg(&inst->alg);
561 return crypto_register_instance(tmpl, shash_crypto_instance(inst));
563 EXPORT_SYMBOL_GPL(shash_register_instance);
565 void shash_free_instance(struct crypto_instance *inst)
567 crypto_drop_spawn(crypto_instance_ctx(inst));
568 kfree(shash_instance(inst));
570 EXPORT_SYMBOL_GPL(shash_free_instance);
572 int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn,
573 struct shash_alg *alg,
574 struct crypto_instance *inst)
576 return crypto_init_spawn2(&spawn->base, &alg->base, inst,
579 EXPORT_SYMBOL_GPL(crypto_init_shash_spawn);
581 struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask)
583 struct crypto_alg *alg;
585 alg = crypto_attr_alg2(rta, &crypto_shash_type, type, mask);
586 return IS_ERR(alg) ? ERR_CAST(alg) :
587 container_of(alg, struct shash_alg, base);
589 EXPORT_SYMBOL_GPL(shash_attr_alg);
591 MODULE_LICENSE("GPL");
592 MODULE_DESCRIPTION("Synchronous cryptographic hash type");