GNU Linux-libre 4.9.304-gnu1
[releases.git] / arch / arm / crypto / aesbs-glue.c
1 /*
2  * linux/arch/arm/crypto/aesbs-glue.c - glue code for NEON bit sliced AES
3  *
4  * Copyright (C) 2013 Linaro Ltd <ard.biesheuvel@linaro.org>
5  *
6  * This program is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License version 2 as
8  * published by the Free Software Foundation.
9  */
10
11 #include <asm/neon.h>
12 #include <crypto/aes.h>
13 #include <crypto/ablk_helper.h>
14 #include <crypto/algapi.h>
15 #include <linux/module.h>
16 #include <crypto/xts.h>
17
18 #include "aes_glue.h"
19
20 #define BIT_SLICED_KEY_MAXSIZE  (128 * (AES_MAXNR - 1) + 2 * AES_BLOCK_SIZE)
21
22 struct BS_KEY {
23         struct AES_KEY  rk;
24         int             converted;
25         u8 __aligned(8) bs[BIT_SLICED_KEY_MAXSIZE];
26 } __aligned(8);
27
28 asmlinkage void bsaes_enc_key_convert(u8 out[], struct AES_KEY const *in);
29 asmlinkage void bsaes_dec_key_convert(u8 out[], struct AES_KEY const *in);
30
31 asmlinkage void bsaes_cbc_encrypt(u8 const in[], u8 out[], u32 bytes,
32                                   struct BS_KEY *key, u8 iv[]);
33
34 asmlinkage void bsaes_ctr32_encrypt_blocks(u8 const in[], u8 out[], u32 blocks,
35                                            struct BS_KEY *key, u8 const iv[]);
36
37 asmlinkage void bsaes_xts_encrypt(u8 const in[], u8 out[], u32 bytes,
38                                   struct BS_KEY *key, u8 tweak[]);
39
40 asmlinkage void bsaes_xts_decrypt(u8 const in[], u8 out[], u32 bytes,
41                                   struct BS_KEY *key, u8 tweak[]);
42
43 struct aesbs_cbc_ctx {
44         struct AES_KEY  enc;
45         struct BS_KEY   dec;
46 };
47
48 struct aesbs_ctr_ctx {
49         struct BS_KEY   enc;
50 };
51
52 struct aesbs_xts_ctx {
53         struct BS_KEY   enc;
54         struct BS_KEY   dec;
55         struct AES_KEY  twkey;
56 };
57
58 static int aesbs_cbc_set_key(struct crypto_tfm *tfm, const u8 *in_key,
59                              unsigned int key_len)
60 {
61         struct aesbs_cbc_ctx *ctx = crypto_tfm_ctx(tfm);
62         int bits = key_len * 8;
63
64         if (private_AES_set_encrypt_key(in_key, bits, &ctx->enc)) {
65                 tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
66                 return -EINVAL;
67         }
68         ctx->dec.rk = ctx->enc;
69         private_AES_set_decrypt_key(in_key, bits, &ctx->dec.rk);
70         ctx->dec.converted = 0;
71         return 0;
72 }
73
74 static int aesbs_ctr_set_key(struct crypto_tfm *tfm, const u8 *in_key,
75                              unsigned int key_len)
76 {
77         struct aesbs_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
78         int bits = key_len * 8;
79
80         if (private_AES_set_encrypt_key(in_key, bits, &ctx->enc.rk)) {
81                 tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
82                 return -EINVAL;
83         }
84         ctx->enc.converted = 0;
85         return 0;
86 }
87
88 static int aesbs_xts_set_key(struct crypto_tfm *tfm, const u8 *in_key,
89                              unsigned int key_len)
90 {
91         struct aesbs_xts_ctx *ctx = crypto_tfm_ctx(tfm);
92         int bits = key_len * 4;
93         int err;
94
95         err = xts_check_key(tfm, in_key, key_len);
96         if (err)
97                 return err;
98
99         if (private_AES_set_encrypt_key(in_key, bits, &ctx->enc.rk)) {
100                 tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
101                 return -EINVAL;
102         }
103         ctx->dec.rk = ctx->enc.rk;
104         private_AES_set_decrypt_key(in_key, bits, &ctx->dec.rk);
105         private_AES_set_encrypt_key(in_key + key_len / 2, bits, &ctx->twkey);
106         ctx->enc.converted = ctx->dec.converted = 0;
107         return 0;
108 }
109
110 static int aesbs_cbc_encrypt(struct blkcipher_desc *desc,
111                              struct scatterlist *dst,
112                              struct scatterlist *src, unsigned int nbytes)
113 {
114         struct aesbs_cbc_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
115         struct blkcipher_walk walk;
116         int err;
117
118         blkcipher_walk_init(&walk, dst, src, nbytes);
119         err = blkcipher_walk_virt(desc, &walk);
120
121         while (walk.nbytes) {
122                 u32 blocks = walk.nbytes / AES_BLOCK_SIZE;
123                 u8 *src = walk.src.virt.addr;
124
125                 if (walk.dst.virt.addr == walk.src.virt.addr) {
126                         u8 *iv = walk.iv;
127
128                         do {
129                                 crypto_xor(src, iv, AES_BLOCK_SIZE);
130                                 AES_encrypt(src, src, &ctx->enc);
131                                 iv = src;
132                                 src += AES_BLOCK_SIZE;
133                         } while (--blocks);
134                         memcpy(walk.iv, iv, AES_BLOCK_SIZE);
135                 } else {
136                         u8 *dst = walk.dst.virt.addr;
137
138                         do {
139                                 crypto_xor(walk.iv, src, AES_BLOCK_SIZE);
140                                 AES_encrypt(walk.iv, dst, &ctx->enc);
141                                 memcpy(walk.iv, dst, AES_BLOCK_SIZE);
142                                 src += AES_BLOCK_SIZE;
143                                 dst += AES_BLOCK_SIZE;
144                         } while (--blocks);
145                 }
146                 err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE);
147         }
148         return err;
149 }
150
151 static int aesbs_cbc_decrypt(struct blkcipher_desc *desc,
152                              struct scatterlist *dst,
153                              struct scatterlist *src, unsigned int nbytes)
154 {
155         struct aesbs_cbc_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
156         struct blkcipher_walk walk;
157         int err;
158
159         blkcipher_walk_init(&walk, dst, src, nbytes);
160         err = blkcipher_walk_virt_block(desc, &walk, 8 * AES_BLOCK_SIZE);
161
162         while ((walk.nbytes / AES_BLOCK_SIZE) >= 8) {
163                 kernel_neon_begin();
164                 bsaes_cbc_encrypt(walk.src.virt.addr, walk.dst.virt.addr,
165                                   walk.nbytes, &ctx->dec, walk.iv);
166                 kernel_neon_end();
167                 err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE);
168         }
169         while (walk.nbytes) {
170                 u32 blocks = walk.nbytes / AES_BLOCK_SIZE;
171                 u8 *dst = walk.dst.virt.addr;
172                 u8 *src = walk.src.virt.addr;
173                 u8 bk[2][AES_BLOCK_SIZE];
174                 u8 *iv = walk.iv;
175
176                 do {
177                         if (walk.dst.virt.addr == walk.src.virt.addr)
178                                 memcpy(bk[blocks & 1], src, AES_BLOCK_SIZE);
179
180                         AES_decrypt(src, dst, &ctx->dec.rk);
181                         crypto_xor(dst, iv, AES_BLOCK_SIZE);
182
183                         if (walk.dst.virt.addr == walk.src.virt.addr)
184                                 iv = bk[blocks & 1];
185                         else
186                                 iv = src;
187
188                         dst += AES_BLOCK_SIZE;
189                         src += AES_BLOCK_SIZE;
190                 } while (--blocks);
191                 err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE);
192         }
193         return err;
194 }
195
196 static void inc_be128_ctr(__be32 ctr[], u32 addend)
197 {
198         int i;
199
200         for (i = 3; i >= 0; i--, addend = 1) {
201                 u32 n = be32_to_cpu(ctr[i]) + addend;
202
203                 ctr[i] = cpu_to_be32(n);
204                 if (n >= addend)
205                         break;
206         }
207 }
208
209 static int aesbs_ctr_encrypt(struct blkcipher_desc *desc,
210                              struct scatterlist *dst, struct scatterlist *src,
211                              unsigned int nbytes)
212 {
213         struct aesbs_ctr_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
214         struct blkcipher_walk walk;
215         u32 blocks;
216         int err;
217
218         blkcipher_walk_init(&walk, dst, src, nbytes);
219         err = blkcipher_walk_virt_block(desc, &walk, 8 * AES_BLOCK_SIZE);
220
221         while ((blocks = walk.nbytes / AES_BLOCK_SIZE)) {
222                 u32 tail = walk.nbytes % AES_BLOCK_SIZE;
223                 __be32 *ctr = (__be32 *)walk.iv;
224                 u32 headroom = UINT_MAX - be32_to_cpu(ctr[3]);
225
226                 /* avoid 32 bit counter overflow in the NEON code */
227                 if (unlikely(headroom < blocks)) {
228                         blocks = headroom + 1;
229                         tail = walk.nbytes - blocks * AES_BLOCK_SIZE;
230                 }
231                 kernel_neon_begin();
232                 bsaes_ctr32_encrypt_blocks(walk.src.virt.addr,
233                                            walk.dst.virt.addr, blocks,
234                                            &ctx->enc, walk.iv);
235                 kernel_neon_end();
236                 inc_be128_ctr(ctr, blocks);
237
238                 nbytes -= blocks * AES_BLOCK_SIZE;
239                 if (nbytes && nbytes == tail && nbytes <= AES_BLOCK_SIZE)
240                         break;
241
242                 err = blkcipher_walk_done(desc, &walk, tail);
243         }
244         if (walk.nbytes) {
245                 u8 *tdst = walk.dst.virt.addr + blocks * AES_BLOCK_SIZE;
246                 u8 *tsrc = walk.src.virt.addr + blocks * AES_BLOCK_SIZE;
247                 u8 ks[AES_BLOCK_SIZE];
248
249                 AES_encrypt(walk.iv, ks, &ctx->enc.rk);
250                 if (tdst != tsrc)
251                         memcpy(tdst, tsrc, nbytes);
252                 crypto_xor(tdst, ks, nbytes);
253                 err = blkcipher_walk_done(desc, &walk, 0);
254         }
255         return err;
256 }
257
258 static int aesbs_xts_encrypt(struct blkcipher_desc *desc,
259                              struct scatterlist *dst,
260                              struct scatterlist *src, unsigned int nbytes)
261 {
262         struct aesbs_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
263         struct blkcipher_walk walk;
264         int err;
265
266         blkcipher_walk_init(&walk, dst, src, nbytes);
267         err = blkcipher_walk_virt_block(desc, &walk, 8 * AES_BLOCK_SIZE);
268         if (err)
269                 return err;
270
271         /* generate the initial tweak */
272         AES_encrypt(walk.iv, walk.iv, &ctx->twkey);
273
274         while (walk.nbytes) {
275                 kernel_neon_begin();
276                 bsaes_xts_encrypt(walk.src.virt.addr, walk.dst.virt.addr,
277                                   walk.nbytes, &ctx->enc, walk.iv);
278                 kernel_neon_end();
279                 err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE);
280         }
281         return err;
282 }
283
284 static int aesbs_xts_decrypt(struct blkcipher_desc *desc,
285                              struct scatterlist *dst,
286                              struct scatterlist *src, unsigned int nbytes)
287 {
288         struct aesbs_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
289         struct blkcipher_walk walk;
290         int err;
291
292         blkcipher_walk_init(&walk, dst, src, nbytes);
293         err = blkcipher_walk_virt_block(desc, &walk, 8 * AES_BLOCK_SIZE);
294         if (err)
295                 return err;
296
297         /* generate the initial tweak */
298         AES_encrypt(walk.iv, walk.iv, &ctx->twkey);
299
300         while (walk.nbytes) {
301                 kernel_neon_begin();
302                 bsaes_xts_decrypt(walk.src.virt.addr, walk.dst.virt.addr,
303                                   walk.nbytes, &ctx->dec, walk.iv);
304                 kernel_neon_end();
305                 err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE);
306         }
307         return err;
308 }
309
310 static struct crypto_alg aesbs_algs[] = { {
311         .cra_name               = "__cbc-aes-neonbs",
312         .cra_driver_name        = "__driver-cbc-aes-neonbs",
313         .cra_priority           = 0,
314         .cra_flags              = CRYPTO_ALG_TYPE_BLKCIPHER |
315                                   CRYPTO_ALG_INTERNAL,
316         .cra_blocksize          = AES_BLOCK_SIZE,
317         .cra_ctxsize            = sizeof(struct aesbs_cbc_ctx),
318         .cra_alignmask          = 7,
319         .cra_type               = &crypto_blkcipher_type,
320         .cra_module             = THIS_MODULE,
321         .cra_blkcipher = {
322                 .min_keysize    = AES_MIN_KEY_SIZE,
323                 .max_keysize    = AES_MAX_KEY_SIZE,
324                 .ivsize         = AES_BLOCK_SIZE,
325                 .setkey         = aesbs_cbc_set_key,
326                 .encrypt        = aesbs_cbc_encrypt,
327                 .decrypt        = aesbs_cbc_decrypt,
328         },
329 }, {
330         .cra_name               = "__ctr-aes-neonbs",
331         .cra_driver_name        = "__driver-ctr-aes-neonbs",
332         .cra_priority           = 0,
333         .cra_flags              = CRYPTO_ALG_TYPE_BLKCIPHER |
334                                   CRYPTO_ALG_INTERNAL,
335         .cra_blocksize          = 1,
336         .cra_ctxsize            = sizeof(struct aesbs_ctr_ctx),
337         .cra_alignmask          = 7,
338         .cra_type               = &crypto_blkcipher_type,
339         .cra_module             = THIS_MODULE,
340         .cra_blkcipher = {
341                 .min_keysize    = AES_MIN_KEY_SIZE,
342                 .max_keysize    = AES_MAX_KEY_SIZE,
343                 .ivsize         = AES_BLOCK_SIZE,
344                 .setkey         = aesbs_ctr_set_key,
345                 .encrypt        = aesbs_ctr_encrypt,
346                 .decrypt        = aesbs_ctr_encrypt,
347         },
348 }, {
349         .cra_name               = "__xts-aes-neonbs",
350         .cra_driver_name        = "__driver-xts-aes-neonbs",
351         .cra_priority           = 0,
352         .cra_flags              = CRYPTO_ALG_TYPE_BLKCIPHER |
353                                   CRYPTO_ALG_INTERNAL,
354         .cra_blocksize          = AES_BLOCK_SIZE,
355         .cra_ctxsize            = sizeof(struct aesbs_xts_ctx),
356         .cra_alignmask          = 7,
357         .cra_type               = &crypto_blkcipher_type,
358         .cra_module             = THIS_MODULE,
359         .cra_blkcipher = {
360                 .min_keysize    = 2 * AES_MIN_KEY_SIZE,
361                 .max_keysize    = 2 * AES_MAX_KEY_SIZE,
362                 .ivsize         = AES_BLOCK_SIZE,
363                 .setkey         = aesbs_xts_set_key,
364                 .encrypt        = aesbs_xts_encrypt,
365                 .decrypt        = aesbs_xts_decrypt,
366         },
367 }, {
368         .cra_name               = "cbc(aes)",
369         .cra_driver_name        = "cbc-aes-neonbs",
370         .cra_priority           = 250,
371         .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
372         .cra_blocksize          = AES_BLOCK_SIZE,
373         .cra_ctxsize            = sizeof(struct async_helper_ctx),
374         .cra_alignmask          = 7,
375         .cra_type               = &crypto_ablkcipher_type,
376         .cra_module             = THIS_MODULE,
377         .cra_init               = ablk_init,
378         .cra_exit               = ablk_exit,
379         .cra_ablkcipher = {
380                 .min_keysize    = AES_MIN_KEY_SIZE,
381                 .max_keysize    = AES_MAX_KEY_SIZE,
382                 .ivsize         = AES_BLOCK_SIZE,
383                 .setkey         = ablk_set_key,
384                 .encrypt        = __ablk_encrypt,
385                 .decrypt        = ablk_decrypt,
386         }
387 }, {
388         .cra_name               = "ctr(aes)",
389         .cra_driver_name        = "ctr-aes-neonbs",
390         .cra_priority           = 250,
391         .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
392         .cra_blocksize          = 1,
393         .cra_ctxsize            = sizeof(struct async_helper_ctx),
394         .cra_alignmask          = 7,
395         .cra_type               = &crypto_ablkcipher_type,
396         .cra_module             = THIS_MODULE,
397         .cra_init               = ablk_init,
398         .cra_exit               = ablk_exit,
399         .cra_ablkcipher = {
400                 .min_keysize    = AES_MIN_KEY_SIZE,
401                 .max_keysize    = AES_MAX_KEY_SIZE,
402                 .ivsize         = AES_BLOCK_SIZE,
403                 .setkey         = ablk_set_key,
404                 .encrypt        = ablk_encrypt,
405                 .decrypt        = ablk_decrypt,
406         }
407 }, {
408         .cra_name               = "xts(aes)",
409         .cra_driver_name        = "xts-aes-neonbs",
410         .cra_priority           = 250,
411         .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
412         .cra_blocksize          = AES_BLOCK_SIZE,
413         .cra_ctxsize            = sizeof(struct async_helper_ctx),
414         .cra_alignmask          = 7,
415         .cra_type               = &crypto_ablkcipher_type,
416         .cra_module             = THIS_MODULE,
417         .cra_init               = ablk_init,
418         .cra_exit               = ablk_exit,
419         .cra_ablkcipher = {
420                 .min_keysize    = 2 * AES_MIN_KEY_SIZE,
421                 .max_keysize    = 2 * AES_MAX_KEY_SIZE,
422                 .ivsize         = AES_BLOCK_SIZE,
423                 .setkey         = ablk_set_key,
424                 .encrypt        = ablk_encrypt,
425                 .decrypt        = ablk_decrypt,
426         }
427 } };
428
429 static int __init aesbs_mod_init(void)
430 {
431         if (!cpu_has_neon())
432                 return -ENODEV;
433
434         return crypto_register_algs(aesbs_algs, ARRAY_SIZE(aesbs_algs));
435 }
436
437 static void __exit aesbs_mod_exit(void)
438 {
439         crypto_unregister_algs(aesbs_algs, ARRAY_SIZE(aesbs_algs));
440 }
441
442 module_init(aesbs_mod_init);
443 module_exit(aesbs_mod_exit);
444
445 MODULE_DESCRIPTION("Bit sliced AES in CBC/CTR/XTS modes using NEON");
446 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
447 MODULE_LICENSE("GPL");