GNU Linux-libre 4.9.326-gnu1
[releases.git] / crypto / shash.c
1 /*
2  * Synchronous Cryptographic Hash operations.
3  *
4  * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
5  *
6  * This program is free software; you can redistribute it and/or modify it
7  * under the terms of the GNU General Public License as published by the Free
8  * Software Foundation; either version 2 of the License, or (at your option)
9  * any later version.
10  *
11  */
12
13 #include <crypto/scatterwalk.h>
14 #include <crypto/internal/hash.h>
15 #include <linux/err.h>
16 #include <linux/kernel.h>
17 #include <linux/module.h>
18 #include <linux/slab.h>
19 #include <linux/seq_file.h>
20 #include <linux/cryptouser.h>
21 #include <net/netlink.h>
22
23 #include "internal.h"
24
25 static const struct crypto_type crypto_shash_type;
26
27 static int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
28                            unsigned int keylen)
29 {
30         return -ENOSYS;
31 }
32
33 /*
34  * Check whether an shash algorithm has a setkey function.
35  *
36  * For CFI compatibility, this must not be an inline function.  This is because
37  * when CFI is enabled, modules won't get the same address for shash_no_setkey
38  * (if it were exported, which inlining would require) as the core kernel will.
39  */
40 bool crypto_shash_alg_has_setkey(struct shash_alg *alg)
41 {
42         return alg->setkey != shash_no_setkey;
43 }
44 EXPORT_SYMBOL_GPL(crypto_shash_alg_has_setkey);
45
46 static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
47                                   unsigned int keylen)
48 {
49         struct shash_alg *shash = crypto_shash_alg(tfm);
50         unsigned long alignmask = crypto_shash_alignmask(tfm);
51         unsigned long absize;
52         u8 *buffer, *alignbuffer;
53         int err;
54
55         absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1));
56         buffer = kmalloc(absize, GFP_ATOMIC);
57         if (!buffer)
58                 return -ENOMEM;
59
60         alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
61         memcpy(alignbuffer, key, keylen);
62         err = shash->setkey(tfm, alignbuffer, keylen);
63         kzfree(buffer);
64         return err;
65 }
66
67 static void shash_set_needkey(struct crypto_shash *tfm, struct shash_alg *alg)
68 {
69         if (crypto_shash_alg_has_setkey(alg) &&
70             !(alg->base.cra_flags & CRYPTO_ALG_OPTIONAL_KEY))
71                 crypto_shash_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
72 }
73
74 int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
75                         unsigned int keylen)
76 {
77         struct shash_alg *shash = crypto_shash_alg(tfm);
78         unsigned long alignmask = crypto_shash_alignmask(tfm);
79         int err;
80
81         if ((unsigned long)key & alignmask)
82                 err = shash_setkey_unaligned(tfm, key, keylen);
83         else
84                 err = shash->setkey(tfm, key, keylen);
85
86         if (unlikely(err)) {
87                 shash_set_needkey(tfm, shash);
88                 return err;
89         }
90
91         crypto_shash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
92         return 0;
93 }
94 EXPORT_SYMBOL_GPL(crypto_shash_setkey);
95
96 static inline unsigned int shash_align_buffer_size(unsigned len,
97                                                    unsigned long mask)
98 {
99         typedef u8 __attribute__ ((aligned)) u8_aligned;
100         return len + (mask & ~(__alignof__(u8_aligned) - 1));
101 }
102
103 static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
104                                   unsigned int len)
105 {
106         struct crypto_shash *tfm = desc->tfm;
107         struct shash_alg *shash = crypto_shash_alg(tfm);
108         unsigned long alignmask = crypto_shash_alignmask(tfm);
109         unsigned int unaligned_len = alignmask + 1 -
110                                      ((unsigned long)data & alignmask);
111         u8 ubuf[shash_align_buffer_size(unaligned_len, alignmask)]
112                 __attribute__ ((aligned));
113         u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
114         int err;
115
116         if (unaligned_len > len)
117                 unaligned_len = len;
118
119         memcpy(buf, data, unaligned_len);
120         err = shash->update(desc, buf, unaligned_len);
121         memset(buf, 0, unaligned_len);
122
123         return err ?:
124                shash->update(desc, data + unaligned_len, len - unaligned_len);
125 }
126
127 int crypto_shash_update(struct shash_desc *desc, const u8 *data,
128                         unsigned int len)
129 {
130         struct crypto_shash *tfm = desc->tfm;
131         struct shash_alg *shash = crypto_shash_alg(tfm);
132         unsigned long alignmask = crypto_shash_alignmask(tfm);
133
134         if ((unsigned long)data & alignmask)
135                 return shash_update_unaligned(desc, data, len);
136
137         return shash->update(desc, data, len);
138 }
139 EXPORT_SYMBOL_GPL(crypto_shash_update);
140
141 static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
142 {
143         struct crypto_shash *tfm = desc->tfm;
144         unsigned long alignmask = crypto_shash_alignmask(tfm);
145         struct shash_alg *shash = crypto_shash_alg(tfm);
146         unsigned int ds = crypto_shash_digestsize(tfm);
147         u8 ubuf[shash_align_buffer_size(ds, alignmask)]
148                 __attribute__ ((aligned));
149         u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
150         int err;
151
152         err = shash->final(desc, buf);
153         if (err)
154                 goto out;
155
156         memcpy(out, buf, ds);
157
158 out:
159         memset(buf, 0, ds);
160         return err;
161 }
162
163 int crypto_shash_final(struct shash_desc *desc, u8 *out)
164 {
165         struct crypto_shash *tfm = desc->tfm;
166         struct shash_alg *shash = crypto_shash_alg(tfm);
167         unsigned long alignmask = crypto_shash_alignmask(tfm);
168
169         if ((unsigned long)out & alignmask)
170                 return shash_final_unaligned(desc, out);
171
172         return shash->final(desc, out);
173 }
174 EXPORT_SYMBOL_GPL(crypto_shash_final);
175
176 static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
177                                  unsigned int len, u8 *out)
178 {
179         return crypto_shash_update(desc, data, len) ?:
180                crypto_shash_final(desc, out);
181 }
182
183 int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
184                        unsigned int len, u8 *out)
185 {
186         struct crypto_shash *tfm = desc->tfm;
187         struct shash_alg *shash = crypto_shash_alg(tfm);
188         unsigned long alignmask = crypto_shash_alignmask(tfm);
189
190         if (((unsigned long)data | (unsigned long)out) & alignmask)
191                 return shash_finup_unaligned(desc, data, len, out);
192
193         return shash->finup(desc, data, len, out);
194 }
195 EXPORT_SYMBOL_GPL(crypto_shash_finup);
196
197 static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
198                                   unsigned int len, u8 *out)
199 {
200         return crypto_shash_init(desc) ?:
201                crypto_shash_finup(desc, data, len, out);
202 }
203
204 int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
205                         unsigned int len, u8 *out)
206 {
207         struct crypto_shash *tfm = desc->tfm;
208         struct shash_alg *shash = crypto_shash_alg(tfm);
209         unsigned long alignmask = crypto_shash_alignmask(tfm);
210
211         if (crypto_shash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
212                 return -ENOKEY;
213
214         if (((unsigned long)data | (unsigned long)out) & alignmask)
215                 return shash_digest_unaligned(desc, data, len, out);
216
217         return shash->digest(desc, data, len, out);
218 }
219 EXPORT_SYMBOL_GPL(crypto_shash_digest);
220
221 static int shash_default_export(struct shash_desc *desc, void *out)
222 {
223         memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm));
224         return 0;
225 }
226
227 static int shash_default_import(struct shash_desc *desc, const void *in)
228 {
229         memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm));
230         return 0;
231 }
232
233 static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
234                               unsigned int keylen)
235 {
236         struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
237
238         return crypto_shash_setkey(*ctx, key, keylen);
239 }
240
241 static int shash_async_init(struct ahash_request *req)
242 {
243         struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
244         struct shash_desc *desc = ahash_request_ctx(req);
245
246         desc->tfm = *ctx;
247         desc->flags = req->base.flags;
248
249         return crypto_shash_init(desc);
250 }
251
252 int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
253 {
254         struct crypto_hash_walk walk;
255         int nbytes;
256
257         for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
258              nbytes = crypto_hash_walk_done(&walk, nbytes))
259                 nbytes = crypto_shash_update(desc, walk.data, nbytes);
260
261         return nbytes;
262 }
263 EXPORT_SYMBOL_GPL(shash_ahash_update);
264
265 static int shash_async_update(struct ahash_request *req)
266 {
267         return shash_ahash_update(req, ahash_request_ctx(req));
268 }
269
270 static int shash_async_final(struct ahash_request *req)
271 {
272         return crypto_shash_final(ahash_request_ctx(req), req->result);
273 }
274
275 int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
276 {
277         struct crypto_hash_walk walk;
278         int nbytes;
279
280         nbytes = crypto_hash_walk_first(req, &walk);
281         if (!nbytes)
282                 return crypto_shash_final(desc, req->result);
283
284         do {
285                 nbytes = crypto_hash_walk_last(&walk) ?
286                          crypto_shash_finup(desc, walk.data, nbytes,
287                                             req->result) :
288                          crypto_shash_update(desc, walk.data, nbytes);
289                 nbytes = crypto_hash_walk_done(&walk, nbytes);
290         } while (nbytes > 0);
291
292         return nbytes;
293 }
294 EXPORT_SYMBOL_GPL(shash_ahash_finup);
295
296 static int shash_async_finup(struct ahash_request *req)
297 {
298         struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
299         struct shash_desc *desc = ahash_request_ctx(req);
300
301         desc->tfm = *ctx;
302         desc->flags = req->base.flags;
303
304         return shash_ahash_finup(req, desc);
305 }
306
307 int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
308 {
309         unsigned int nbytes = req->nbytes;
310         struct scatterlist *sg;
311         unsigned int offset;
312         int err;
313
314         if (nbytes &&
315             (sg = req->src, offset = sg->offset,
316              nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset))) {
317                 void *data;
318
319                 data = kmap_atomic(sg_page(sg));
320                 err = crypto_shash_digest(desc, data + offset, nbytes,
321                                           req->result);
322                 kunmap_atomic(data);
323                 crypto_yield(desc->flags);
324         } else
325                 err = crypto_shash_init(desc) ?:
326                       shash_ahash_finup(req, desc);
327
328         return err;
329 }
330 EXPORT_SYMBOL_GPL(shash_ahash_digest);
331
332 static int shash_async_digest(struct ahash_request *req)
333 {
334         struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
335         struct shash_desc *desc = ahash_request_ctx(req);
336
337         desc->tfm = *ctx;
338         desc->flags = req->base.flags;
339
340         return shash_ahash_digest(req, desc);
341 }
342
343 static int shash_async_export(struct ahash_request *req, void *out)
344 {
345         return crypto_shash_export(ahash_request_ctx(req), out);
346 }
347
348 static int shash_async_import(struct ahash_request *req, const void *in)
349 {
350         struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
351         struct shash_desc *desc = ahash_request_ctx(req);
352
353         desc->tfm = *ctx;
354         desc->flags = req->base.flags;
355
356         return crypto_shash_import(desc, in);
357 }
358
359 static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
360 {
361         struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
362
363         crypto_free_shash(*ctx);
364 }
365
366 int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
367 {
368         struct crypto_alg *calg = tfm->__crt_alg;
369         struct shash_alg *alg = __crypto_shash_alg(calg);
370         struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
371         struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
372         struct crypto_shash *shash;
373
374         if (!crypto_mod_get(calg))
375                 return -EAGAIN;
376
377         shash = crypto_create_tfm(calg, &crypto_shash_type);
378         if (IS_ERR(shash)) {
379                 crypto_mod_put(calg);
380                 return PTR_ERR(shash);
381         }
382
383         *ctx = shash;
384         tfm->exit = crypto_exit_shash_ops_async;
385
386         crt->init = shash_async_init;
387         crt->update = shash_async_update;
388         crt->final = shash_async_final;
389         crt->finup = shash_async_finup;
390         crt->digest = shash_async_digest;
391         if (crypto_shash_alg_has_setkey(alg))
392                 crt->setkey = shash_async_setkey;
393
394         crypto_ahash_set_flags(crt, crypto_shash_get_flags(shash) &
395                                     CRYPTO_TFM_NEED_KEY);
396
397         if (alg->export)
398                 crt->export = shash_async_export;
399         if (alg->import)
400                 crt->import = shash_async_import;
401
402         crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
403
404         return 0;
405 }
406
407 static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
408 {
409         struct crypto_shash *hash = __crypto_shash_cast(tfm);
410         struct shash_alg *alg = crypto_shash_alg(hash);
411
412         hash->descsize = alg->descsize;
413
414         shash_set_needkey(hash, alg);
415
416         return 0;
417 }
418
419 #ifdef CONFIG_NET
420 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
421 {
422         struct crypto_report_hash rhash;
423         struct shash_alg *salg = __crypto_shash_alg(alg);
424
425         strncpy(rhash.type, "shash", sizeof(rhash.type));
426
427         rhash.blocksize = alg->cra_blocksize;
428         rhash.digestsize = salg->digestsize;
429
430         if (nla_put(skb, CRYPTOCFGA_REPORT_HASH,
431                     sizeof(struct crypto_report_hash), &rhash))
432                 goto nla_put_failure;
433         return 0;
434
435 nla_put_failure:
436         return -EMSGSIZE;
437 }
438 #else
439 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
440 {
441         return -ENOSYS;
442 }
443 #endif
444
445 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
446         __attribute__ ((unused));
447 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
448 {
449         struct shash_alg *salg = __crypto_shash_alg(alg);
450
451         seq_printf(m, "type         : shash\n");
452         seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
453         seq_printf(m, "digestsize   : %u\n", salg->digestsize);
454 }
455
456 static const struct crypto_type crypto_shash_type = {
457         .extsize = crypto_alg_extsize,
458         .init_tfm = crypto_shash_init_tfm,
459 #ifdef CONFIG_PROC_FS
460         .show = crypto_shash_show,
461 #endif
462         .report = crypto_shash_report,
463         .maskclear = ~CRYPTO_ALG_TYPE_MASK,
464         .maskset = CRYPTO_ALG_TYPE_MASK,
465         .type = CRYPTO_ALG_TYPE_SHASH,
466         .tfmsize = offsetof(struct crypto_shash, base),
467 };
468
469 struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
470                                         u32 mask)
471 {
472         return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
473 }
474 EXPORT_SYMBOL_GPL(crypto_alloc_shash);
475
476 static int shash_prepare_alg(struct shash_alg *alg)
477 {
478         struct crypto_alg *base = &alg->base;
479
480         if (alg->digestsize > PAGE_SIZE / 8 ||
481             alg->descsize > PAGE_SIZE / 8 ||
482             alg->statesize > PAGE_SIZE / 8)
483                 return -EINVAL;
484
485         base->cra_type = &crypto_shash_type;
486         base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
487         base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
488
489         if (!alg->finup)
490                 alg->finup = shash_finup_unaligned;
491         if (!alg->digest)
492                 alg->digest = shash_digest_unaligned;
493         if (!alg->export) {
494                 alg->export = shash_default_export;
495                 alg->import = shash_default_import;
496                 alg->statesize = alg->descsize;
497         }
498         if (!alg->setkey)
499                 alg->setkey = shash_no_setkey;
500
501         return 0;
502 }
503
504 int crypto_register_shash(struct shash_alg *alg)
505 {
506         struct crypto_alg *base = &alg->base;
507         int err;
508
509         err = shash_prepare_alg(alg);
510         if (err)
511                 return err;
512
513         return crypto_register_alg(base);
514 }
515 EXPORT_SYMBOL_GPL(crypto_register_shash);
516
517 int crypto_unregister_shash(struct shash_alg *alg)
518 {
519         return crypto_unregister_alg(&alg->base);
520 }
521 EXPORT_SYMBOL_GPL(crypto_unregister_shash);
522
523 int crypto_register_shashes(struct shash_alg *algs, int count)
524 {
525         int i, ret;
526
527         for (i = 0; i < count; i++) {
528                 ret = crypto_register_shash(&algs[i]);
529                 if (ret)
530                         goto err;
531         }
532
533         return 0;
534
535 err:
536         for (--i; i >= 0; --i)
537                 crypto_unregister_shash(&algs[i]);
538
539         return ret;
540 }
541 EXPORT_SYMBOL_GPL(crypto_register_shashes);
542
543 int crypto_unregister_shashes(struct shash_alg *algs, int count)
544 {
545         int i, ret;
546
547         for (i = count - 1; i >= 0; --i) {
548                 ret = crypto_unregister_shash(&algs[i]);
549                 if (ret)
550                         pr_err("Failed to unregister %s %s: %d\n",
551                                algs[i].base.cra_driver_name,
552                                algs[i].base.cra_name, ret);
553         }
554
555         return 0;
556 }
557 EXPORT_SYMBOL_GPL(crypto_unregister_shashes);
558
559 int shash_register_instance(struct crypto_template *tmpl,
560                             struct shash_instance *inst)
561 {
562         int err;
563
564         err = shash_prepare_alg(&inst->alg);
565         if (err)
566                 return err;
567
568         return crypto_register_instance(tmpl, shash_crypto_instance(inst));
569 }
570 EXPORT_SYMBOL_GPL(shash_register_instance);
571
572 void shash_free_instance(struct crypto_instance *inst)
573 {
574         crypto_drop_spawn(crypto_instance_ctx(inst));
575         kfree(shash_instance(inst));
576 }
577 EXPORT_SYMBOL_GPL(shash_free_instance);
578
579 int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn,
580                             struct shash_alg *alg,
581                             struct crypto_instance *inst)
582 {
583         return crypto_init_spawn2(&spawn->base, &alg->base, inst,
584                                   &crypto_shash_type);
585 }
586 EXPORT_SYMBOL_GPL(crypto_init_shash_spawn);
587
588 struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask)
589 {
590         struct crypto_alg *alg;
591
592         alg = crypto_attr_alg2(rta, &crypto_shash_type, type, mask);
593         return IS_ERR(alg) ? ERR_CAST(alg) :
594                container_of(alg, struct shash_alg, base);
595 }
596 EXPORT_SYMBOL_GPL(shash_attr_alg);
597
598 MODULE_LICENSE("GPL");
599 MODULE_DESCRIPTION("Synchronous cryptographic hash type");