2 * Synchronous Cryptographic Hash operations.
4 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
13 #include <crypto/scatterwalk.h>
14 #include <crypto/internal/hash.h>
15 #include <linux/err.h>
16 #include <linux/kernel.h>
17 #include <linux/module.h>
18 #include <linux/slab.h>
19 #include <linux/seq_file.h>
20 #include <linux/cryptouser.h>
21 #include <net/netlink.h>
25 static const struct crypto_type crypto_shash_type;
27 static int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
34 * Check whether an shash algorithm has a setkey function.
36 * For CFI compatibility, this must not be an inline function. This is because
37 * when CFI is enabled, modules won't get the same address for shash_no_setkey
38 * (if it were exported, which inlining would require) as the core kernel will.
40 bool crypto_shash_alg_has_setkey(struct shash_alg *alg)
42 return alg->setkey != shash_no_setkey;
44 EXPORT_SYMBOL_GPL(crypto_shash_alg_has_setkey);
46 static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
49 struct shash_alg *shash = crypto_shash_alg(tfm);
50 unsigned long alignmask = crypto_shash_alignmask(tfm);
52 u8 *buffer, *alignbuffer;
55 absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1));
56 buffer = kmalloc(absize, GFP_ATOMIC);
60 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
61 memcpy(alignbuffer, key, keylen);
62 err = shash->setkey(tfm, alignbuffer, keylen);
67 int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
70 struct shash_alg *shash = crypto_shash_alg(tfm);
71 unsigned long alignmask = crypto_shash_alignmask(tfm);
73 if ((unsigned long)key & alignmask)
74 return shash_setkey_unaligned(tfm, key, keylen);
76 return shash->setkey(tfm, key, keylen);
78 EXPORT_SYMBOL_GPL(crypto_shash_setkey);
80 static inline unsigned int shash_align_buffer_size(unsigned len,
83 typedef u8 __attribute__ ((aligned)) u8_aligned;
84 return len + (mask & ~(__alignof__(u8_aligned) - 1));
87 static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
90 struct crypto_shash *tfm = desc->tfm;
91 struct shash_alg *shash = crypto_shash_alg(tfm);
92 unsigned long alignmask = crypto_shash_alignmask(tfm);
93 unsigned int unaligned_len = alignmask + 1 -
94 ((unsigned long)data & alignmask);
95 u8 ubuf[shash_align_buffer_size(unaligned_len, alignmask)]
96 __attribute__ ((aligned));
97 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
100 if (unaligned_len > len)
103 memcpy(buf, data, unaligned_len);
104 err = shash->update(desc, buf, unaligned_len);
105 memset(buf, 0, unaligned_len);
108 shash->update(desc, data + unaligned_len, len - unaligned_len);
111 int crypto_shash_update(struct shash_desc *desc, const u8 *data,
114 struct crypto_shash *tfm = desc->tfm;
115 struct shash_alg *shash = crypto_shash_alg(tfm);
116 unsigned long alignmask = crypto_shash_alignmask(tfm);
118 if ((unsigned long)data & alignmask)
119 return shash_update_unaligned(desc, data, len);
121 return shash->update(desc, data, len);
123 EXPORT_SYMBOL_GPL(crypto_shash_update);
125 static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
127 struct crypto_shash *tfm = desc->tfm;
128 unsigned long alignmask = crypto_shash_alignmask(tfm);
129 struct shash_alg *shash = crypto_shash_alg(tfm);
130 unsigned int ds = crypto_shash_digestsize(tfm);
131 u8 ubuf[shash_align_buffer_size(ds, alignmask)]
132 __attribute__ ((aligned));
133 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
136 err = shash->final(desc, buf);
140 memcpy(out, buf, ds);
147 int crypto_shash_final(struct shash_desc *desc, u8 *out)
149 struct crypto_shash *tfm = desc->tfm;
150 struct shash_alg *shash = crypto_shash_alg(tfm);
151 unsigned long alignmask = crypto_shash_alignmask(tfm);
153 if ((unsigned long)out & alignmask)
154 return shash_final_unaligned(desc, out);
156 return shash->final(desc, out);
158 EXPORT_SYMBOL_GPL(crypto_shash_final);
160 static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
161 unsigned int len, u8 *out)
163 return crypto_shash_update(desc, data, len) ?:
164 crypto_shash_final(desc, out);
167 int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
168 unsigned int len, u8 *out)
170 struct crypto_shash *tfm = desc->tfm;
171 struct shash_alg *shash = crypto_shash_alg(tfm);
172 unsigned long alignmask = crypto_shash_alignmask(tfm);
174 if (((unsigned long)data | (unsigned long)out) & alignmask)
175 return shash_finup_unaligned(desc, data, len, out);
177 return shash->finup(desc, data, len, out);
179 EXPORT_SYMBOL_GPL(crypto_shash_finup);
181 static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
182 unsigned int len, u8 *out)
184 return crypto_shash_init(desc) ?:
185 crypto_shash_finup(desc, data, len, out);
188 int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
189 unsigned int len, u8 *out)
191 struct crypto_shash *tfm = desc->tfm;
192 struct shash_alg *shash = crypto_shash_alg(tfm);
193 unsigned long alignmask = crypto_shash_alignmask(tfm);
195 if (((unsigned long)data | (unsigned long)out) & alignmask)
196 return shash_digest_unaligned(desc, data, len, out);
198 return shash->digest(desc, data, len, out);
200 EXPORT_SYMBOL_GPL(crypto_shash_digest);
202 static int shash_default_export(struct shash_desc *desc, void *out)
204 memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm));
208 static int shash_default_import(struct shash_desc *desc, const void *in)
210 memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm));
214 static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
217 struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
219 return crypto_shash_setkey(*ctx, key, keylen);
222 static int shash_async_init(struct ahash_request *req)
224 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
225 struct shash_desc *desc = ahash_request_ctx(req);
228 desc->flags = req->base.flags;
230 return crypto_shash_init(desc);
233 int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
235 struct crypto_hash_walk walk;
238 for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
239 nbytes = crypto_hash_walk_done(&walk, nbytes))
240 nbytes = crypto_shash_update(desc, walk.data, nbytes);
244 EXPORT_SYMBOL_GPL(shash_ahash_update);
246 static int shash_async_update(struct ahash_request *req)
248 return shash_ahash_update(req, ahash_request_ctx(req));
251 static int shash_async_final(struct ahash_request *req)
253 return crypto_shash_final(ahash_request_ctx(req), req->result);
256 int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
258 struct crypto_hash_walk walk;
261 nbytes = crypto_hash_walk_first(req, &walk);
263 return crypto_shash_final(desc, req->result);
266 nbytes = crypto_hash_walk_last(&walk) ?
267 crypto_shash_finup(desc, walk.data, nbytes,
269 crypto_shash_update(desc, walk.data, nbytes);
270 nbytes = crypto_hash_walk_done(&walk, nbytes);
271 } while (nbytes > 0);
275 EXPORT_SYMBOL_GPL(shash_ahash_finup);
277 static int shash_async_finup(struct ahash_request *req)
279 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
280 struct shash_desc *desc = ahash_request_ctx(req);
283 desc->flags = req->base.flags;
285 return shash_ahash_finup(req, desc);
288 int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
290 unsigned int nbytes = req->nbytes;
291 struct scatterlist *sg;
296 (sg = req->src, offset = sg->offset,
297 nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset))) {
300 data = kmap_atomic(sg_page(sg));
301 err = crypto_shash_digest(desc, data + offset, nbytes,
304 crypto_yield(desc->flags);
306 err = crypto_shash_init(desc) ?:
307 shash_ahash_finup(req, desc);
311 EXPORT_SYMBOL_GPL(shash_ahash_digest);
313 static int shash_async_digest(struct ahash_request *req)
315 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
316 struct shash_desc *desc = ahash_request_ctx(req);
319 desc->flags = req->base.flags;
321 return shash_ahash_digest(req, desc);
324 static int shash_async_export(struct ahash_request *req, void *out)
326 return crypto_shash_export(ahash_request_ctx(req), out);
329 static int shash_async_import(struct ahash_request *req, const void *in)
331 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
332 struct shash_desc *desc = ahash_request_ctx(req);
335 desc->flags = req->base.flags;
337 return crypto_shash_import(desc, in);
340 static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
342 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
344 crypto_free_shash(*ctx);
347 int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
349 struct crypto_alg *calg = tfm->__crt_alg;
350 struct shash_alg *alg = __crypto_shash_alg(calg);
351 struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
352 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
353 struct crypto_shash *shash;
355 if (!crypto_mod_get(calg))
358 shash = crypto_create_tfm(calg, &crypto_shash_type);
360 crypto_mod_put(calg);
361 return PTR_ERR(shash);
365 tfm->exit = crypto_exit_shash_ops_async;
367 crt->init = shash_async_init;
368 crt->update = shash_async_update;
369 crt->final = shash_async_final;
370 crt->finup = shash_async_finup;
371 crt->digest = shash_async_digest;
372 crt->setkey = shash_async_setkey;
374 crt->has_setkey = alg->setkey != shash_no_setkey;
377 crt->export = shash_async_export;
379 crt->import = shash_async_import;
381 crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
386 static int shash_compat_setkey(struct crypto_hash *tfm, const u8 *key,
389 struct shash_desc **descp = crypto_hash_ctx(tfm);
390 struct shash_desc *desc = *descp;
392 return crypto_shash_setkey(desc->tfm, key, keylen);
395 static int shash_compat_init(struct hash_desc *hdesc)
397 struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
398 struct shash_desc *desc = *descp;
400 desc->flags = hdesc->flags;
402 return crypto_shash_init(desc);
405 static int shash_compat_update(struct hash_desc *hdesc, struct scatterlist *sg,
408 struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
409 struct shash_desc *desc = *descp;
410 struct crypto_hash_walk walk;
413 for (nbytes = crypto_hash_walk_first_compat(hdesc, &walk, sg, len);
414 nbytes > 0; nbytes = crypto_hash_walk_done(&walk, nbytes))
415 nbytes = crypto_shash_update(desc, walk.data, nbytes);
420 static int shash_compat_final(struct hash_desc *hdesc, u8 *out)
422 struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
424 return crypto_shash_final(*descp, out);
427 static int shash_compat_digest(struct hash_desc *hdesc, struct scatterlist *sg,
428 unsigned int nbytes, u8 *out)
430 unsigned int offset = sg->offset;
433 if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) {
434 struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
435 struct shash_desc *desc = *descp;
438 desc->flags = hdesc->flags;
440 data = kmap_atomic(sg_page(sg));
441 err = crypto_shash_digest(desc, data + offset, nbytes, out);
443 crypto_yield(desc->flags);
447 err = shash_compat_init(hdesc);
451 err = shash_compat_update(hdesc, sg, nbytes);
455 err = shash_compat_final(hdesc, out);
461 static void crypto_exit_shash_ops_compat(struct crypto_tfm *tfm)
463 struct shash_desc **descp = crypto_tfm_ctx(tfm);
464 struct shash_desc *desc = *descp;
466 crypto_free_shash(desc->tfm);
470 static int crypto_init_shash_ops_compat(struct crypto_tfm *tfm)
472 struct hash_tfm *crt = &tfm->crt_hash;
473 struct crypto_alg *calg = tfm->__crt_alg;
474 struct shash_alg *alg = __crypto_shash_alg(calg);
475 struct shash_desc **descp = crypto_tfm_ctx(tfm);
476 struct crypto_shash *shash;
477 struct shash_desc *desc;
479 if (!crypto_mod_get(calg))
482 shash = crypto_create_tfm(calg, &crypto_shash_type);
484 crypto_mod_put(calg);
485 return PTR_ERR(shash);
488 desc = kmalloc(sizeof(*desc) + crypto_shash_descsize(shash),
491 crypto_free_shash(shash);
497 tfm->exit = crypto_exit_shash_ops_compat;
499 crt->init = shash_compat_init;
500 crt->update = shash_compat_update;
501 crt->final = shash_compat_final;
502 crt->digest = shash_compat_digest;
503 crt->setkey = shash_compat_setkey;
505 crt->digestsize = alg->digestsize;
510 static int crypto_init_shash_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
512 switch (mask & CRYPTO_ALG_TYPE_MASK) {
513 case CRYPTO_ALG_TYPE_HASH_MASK:
514 return crypto_init_shash_ops_compat(tfm);
520 static unsigned int crypto_shash_ctxsize(struct crypto_alg *alg, u32 type,
523 switch (mask & CRYPTO_ALG_TYPE_MASK) {
524 case CRYPTO_ALG_TYPE_HASH_MASK:
525 return sizeof(struct shash_desc *);
531 static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
533 struct crypto_shash *hash = __crypto_shash_cast(tfm);
535 hash->descsize = crypto_shash_alg(hash)->descsize;
540 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
542 struct crypto_report_hash rhash;
543 struct shash_alg *salg = __crypto_shash_alg(alg);
545 strncpy(rhash.type, "shash", sizeof(rhash.type));
547 rhash.blocksize = alg->cra_blocksize;
548 rhash.digestsize = salg->digestsize;
550 if (nla_put(skb, CRYPTOCFGA_REPORT_HASH,
551 sizeof(struct crypto_report_hash), &rhash))
552 goto nla_put_failure;
559 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
565 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
566 __attribute__ ((unused));
567 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
569 struct shash_alg *salg = __crypto_shash_alg(alg);
571 seq_printf(m, "type : shash\n");
572 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
573 seq_printf(m, "digestsize : %u\n", salg->digestsize);
576 static const struct crypto_type crypto_shash_type = {
577 .ctxsize = crypto_shash_ctxsize,
578 .extsize = crypto_alg_extsize,
579 .init = crypto_init_shash_ops,
580 .init_tfm = crypto_shash_init_tfm,
581 #ifdef CONFIG_PROC_FS
582 .show = crypto_shash_show,
584 .report = crypto_shash_report,
585 .maskclear = ~CRYPTO_ALG_TYPE_MASK,
586 .maskset = CRYPTO_ALG_TYPE_MASK,
587 .type = CRYPTO_ALG_TYPE_SHASH,
588 .tfmsize = offsetof(struct crypto_shash, base),
591 struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
594 return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
596 EXPORT_SYMBOL_GPL(crypto_alloc_shash);
598 static int shash_prepare_alg(struct shash_alg *alg)
600 struct crypto_alg *base = &alg->base;
602 if (alg->digestsize > PAGE_SIZE / 8 ||
603 alg->descsize > PAGE_SIZE / 8 ||
604 alg->statesize > PAGE_SIZE / 8)
607 base->cra_type = &crypto_shash_type;
608 base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
609 base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
612 alg->finup = shash_finup_unaligned;
614 alg->digest = shash_digest_unaligned;
616 alg->export = shash_default_export;
617 alg->import = shash_default_import;
618 alg->statesize = alg->descsize;
621 alg->setkey = shash_no_setkey;
626 int crypto_register_shash(struct shash_alg *alg)
628 struct crypto_alg *base = &alg->base;
631 err = shash_prepare_alg(alg);
635 return crypto_register_alg(base);
637 EXPORT_SYMBOL_GPL(crypto_register_shash);
639 int crypto_unregister_shash(struct shash_alg *alg)
641 return crypto_unregister_alg(&alg->base);
643 EXPORT_SYMBOL_GPL(crypto_unregister_shash);
645 int crypto_register_shashes(struct shash_alg *algs, int count)
649 for (i = 0; i < count; i++) {
650 ret = crypto_register_shash(&algs[i]);
658 for (--i; i >= 0; --i)
659 crypto_unregister_shash(&algs[i]);
663 EXPORT_SYMBOL_GPL(crypto_register_shashes);
665 int crypto_unregister_shashes(struct shash_alg *algs, int count)
669 for (i = count - 1; i >= 0; --i) {
670 ret = crypto_unregister_shash(&algs[i]);
672 pr_err("Failed to unregister %s %s: %d\n",
673 algs[i].base.cra_driver_name,
674 algs[i].base.cra_name, ret);
679 EXPORT_SYMBOL_GPL(crypto_unregister_shashes);
681 int shash_register_instance(struct crypto_template *tmpl,
682 struct shash_instance *inst)
686 err = shash_prepare_alg(&inst->alg);
690 return crypto_register_instance(tmpl, shash_crypto_instance(inst));
692 EXPORT_SYMBOL_GPL(shash_register_instance);
694 void shash_free_instance(struct crypto_instance *inst)
696 crypto_drop_spawn(crypto_instance_ctx(inst));
697 kfree(shash_instance(inst));
699 EXPORT_SYMBOL_GPL(shash_free_instance);
701 int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn,
702 struct shash_alg *alg,
703 struct crypto_instance *inst)
705 return crypto_init_spawn2(&spawn->base, &alg->base, inst,
708 EXPORT_SYMBOL_GPL(crypto_init_shash_spawn);
710 struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask)
712 struct crypto_alg *alg;
714 alg = crypto_attr_alg2(rta, &crypto_shash_type, type, mask);
715 return IS_ERR(alg) ? ERR_CAST(alg) :
716 container_of(alg, struct shash_alg, base);
718 EXPORT_SYMBOL_GPL(shash_attr_alg);
720 MODULE_LICENSE("GPL");
721 MODULE_DESCRIPTION("Synchronous cryptographic hash type");