1 // SPDX-License-Identifier: GPL-2.0-only
3 * Bit sliced AES using NEON instructions
5 * Copyright (C) 2016 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
10 #include <crypto/aes.h>
11 #include <crypto/ctr.h>
12 #include <crypto/internal/simd.h>
13 #include <crypto/internal/skcipher.h>
14 #include <crypto/scatterwalk.h>
15 #include <crypto/xts.h>
16 #include <linux/module.h>
18 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
19 MODULE_LICENSE("GPL v2");
21 MODULE_ALIAS_CRYPTO("ecb(aes)");
22 MODULE_ALIAS_CRYPTO("cbc(aes)");
23 MODULE_ALIAS_CRYPTO("ctr(aes)");
24 MODULE_ALIAS_CRYPTO("xts(aes)");
26 asmlinkage void aesbs_convert_key(u8 out[], u32 const rk[], int rounds);
28 asmlinkage void aesbs_ecb_encrypt(u8 out[], u8 const in[], u8 const rk[],
29 int rounds, int blocks);
30 asmlinkage void aesbs_ecb_decrypt(u8 out[], u8 const in[], u8 const rk[],
31 int rounds, int blocks);
33 asmlinkage void aesbs_cbc_decrypt(u8 out[], u8 const in[], u8 const rk[],
34 int rounds, int blocks, u8 iv[]);
36 asmlinkage void aesbs_ctr_encrypt(u8 out[], u8 const in[], u8 const rk[],
37 int rounds, int blocks, u8 iv[]);
39 asmlinkage void aesbs_xts_encrypt(u8 out[], u8 const in[], u8 const rk[],
40 int rounds, int blocks, u8 iv[]);
41 asmlinkage void aesbs_xts_decrypt(u8 out[], u8 const in[], u8 const rk[],
42 int rounds, int blocks, u8 iv[]);
44 /* borrowed from aes-neon-blk.ko */
45 asmlinkage void neon_aes_ecb_encrypt(u8 out[], u8 const in[], u32 const rk[],
46 int rounds, int blocks);
47 asmlinkage void neon_aes_cbc_encrypt(u8 out[], u8 const in[], u32 const rk[],
48 int rounds, int blocks, u8 iv[]);
49 asmlinkage void neon_aes_ctr_encrypt(u8 out[], u8 const in[], u32 const rk[],
50 int rounds, int bytes, u8 ctr[]);
51 asmlinkage void neon_aes_xts_encrypt(u8 out[], u8 const in[],
52 u32 const rk1[], int rounds, int bytes,
53 u32 const rk2[], u8 iv[], int first);
54 asmlinkage void neon_aes_xts_decrypt(u8 out[], u8 const in[],
55 u32 const rk1[], int rounds, int bytes,
56 u32 const rk2[], u8 iv[], int first);
59 u8 rk[13 * (8 * AES_BLOCK_SIZE) + 32];
61 } __aligned(AES_BLOCK_SIZE);
63 struct aesbs_cbc_ctr_ctx {
65 u32 enc[AES_MAX_KEYLENGTH_U32];
68 struct aesbs_xts_ctx {
70 u32 twkey[AES_MAX_KEYLENGTH_U32];
71 struct crypto_aes_ctx cts;
74 static int aesbs_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
77 struct aesbs_ctx *ctx = crypto_skcipher_ctx(tfm);
78 struct crypto_aes_ctx rk;
81 err = aes_expandkey(&rk, in_key, key_len);
85 ctx->rounds = 6 + key_len / 4;
88 aesbs_convert_key(ctx->rk, rk.key_enc, ctx->rounds);
94 static int __ecb_crypt(struct skcipher_request *req,
95 void (*fn)(u8 out[], u8 const in[], u8 const rk[],
96 int rounds, int blocks))
98 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
99 struct aesbs_ctx *ctx = crypto_skcipher_ctx(tfm);
100 struct skcipher_walk walk;
103 err = skcipher_walk_virt(&walk, req, false);
105 while (walk.nbytes >= AES_BLOCK_SIZE) {
106 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE;
108 if (walk.nbytes < walk.total)
109 blocks = round_down(blocks,
110 walk.stride / AES_BLOCK_SIZE);
113 fn(walk.dst.virt.addr, walk.src.virt.addr, ctx->rk,
114 ctx->rounds, blocks);
116 err = skcipher_walk_done(&walk,
117 walk.nbytes - blocks * AES_BLOCK_SIZE);
123 static int ecb_encrypt(struct skcipher_request *req)
125 return __ecb_crypt(req, aesbs_ecb_encrypt);
128 static int ecb_decrypt(struct skcipher_request *req)
130 return __ecb_crypt(req, aesbs_ecb_decrypt);
133 static int aesbs_cbc_ctr_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
134 unsigned int key_len)
136 struct aesbs_cbc_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
137 struct crypto_aes_ctx rk;
140 err = aes_expandkey(&rk, in_key, key_len);
144 ctx->key.rounds = 6 + key_len / 4;
146 memcpy(ctx->enc, rk.key_enc, sizeof(ctx->enc));
149 aesbs_convert_key(ctx->key.rk, rk.key_enc, ctx->key.rounds);
151 memzero_explicit(&rk, sizeof(rk));
156 static int cbc_encrypt(struct skcipher_request *req)
158 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
159 struct aesbs_cbc_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
160 struct skcipher_walk walk;
163 err = skcipher_walk_virt(&walk, req, false);
165 while (walk.nbytes >= AES_BLOCK_SIZE) {
166 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE;
168 /* fall back to the non-bitsliced NEON implementation */
170 neon_aes_cbc_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
171 ctx->enc, ctx->key.rounds, blocks,
174 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
179 static int cbc_decrypt(struct skcipher_request *req)
181 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
182 struct aesbs_cbc_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
183 struct skcipher_walk walk;
186 err = skcipher_walk_virt(&walk, req, false);
188 while (walk.nbytes >= AES_BLOCK_SIZE) {
189 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE;
191 if (walk.nbytes < walk.total)
192 blocks = round_down(blocks,
193 walk.stride / AES_BLOCK_SIZE);
196 aesbs_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
197 ctx->key.rk, ctx->key.rounds, blocks,
200 err = skcipher_walk_done(&walk,
201 walk.nbytes - blocks * AES_BLOCK_SIZE);
207 static int ctr_encrypt(struct skcipher_request *req)
209 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
210 struct aesbs_cbc_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
211 struct skcipher_walk walk;
214 err = skcipher_walk_virt(&walk, req, false);
216 while (walk.nbytes > 0) {
217 int blocks = (walk.nbytes / AES_BLOCK_SIZE) & ~7;
218 int nbytes = walk.nbytes % (8 * AES_BLOCK_SIZE);
219 const u8 *src = walk.src.virt.addr;
220 u8 *dst = walk.dst.virt.addr;
224 aesbs_ctr_encrypt(dst, src, ctx->key.rk, ctx->key.rounds,
226 dst += blocks * AES_BLOCK_SIZE;
227 src += blocks * AES_BLOCK_SIZE;
229 if (nbytes && walk.nbytes == walk.total) {
230 neon_aes_ctr_encrypt(dst, src, ctx->enc, ctx->key.rounds,
235 err = skcipher_walk_done(&walk, nbytes);
240 static int aesbs_xts_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
241 unsigned int key_len)
243 struct aesbs_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
244 struct crypto_aes_ctx rk;
247 err = xts_verify_key(tfm, in_key, key_len);
252 err = aes_expandkey(&ctx->cts, in_key, key_len);
256 err = aes_expandkey(&rk, in_key + key_len, key_len);
260 memcpy(ctx->twkey, rk.key_enc, sizeof(ctx->twkey));
262 return aesbs_setkey(tfm, in_key, key_len);
265 static int __xts_crypt(struct skcipher_request *req, bool encrypt,
266 void (*fn)(u8 out[], u8 const in[], u8 const rk[],
267 int rounds, int blocks, u8 iv[]))
269 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
270 struct aesbs_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
271 int tail = req->cryptlen % (8 * AES_BLOCK_SIZE);
272 struct scatterlist sg_src[2], sg_dst[2];
273 struct skcipher_request subreq;
274 struct scatterlist *src, *dst;
275 struct skcipher_walk walk;
280 if (req->cryptlen < AES_BLOCK_SIZE)
283 /* ensure that the cts tail is covered by a single step */
284 if (unlikely(tail > 0 && tail < AES_BLOCK_SIZE)) {
285 int xts_blocks = DIV_ROUND_UP(req->cryptlen,
288 skcipher_request_set_tfm(&subreq, tfm);
289 skcipher_request_set_callback(&subreq,
290 skcipher_request_flags(req),
292 skcipher_request_set_crypt(&subreq, req->src, req->dst,
293 xts_blocks * AES_BLOCK_SIZE,
300 err = skcipher_walk_virt(&walk, req, false);
304 while (walk.nbytes >= AES_BLOCK_SIZE) {
305 int blocks = (walk.nbytes / AES_BLOCK_SIZE) & ~7;
306 out = walk.dst.virt.addr;
307 in = walk.src.virt.addr;
308 nbytes = walk.nbytes;
313 neon_aes_ecb_encrypt(walk.iv, walk.iv,
318 fn(out, in, ctx->key.rk, ctx->key.rounds, blocks,
321 out += blocks * AES_BLOCK_SIZE;
322 in += blocks * AES_BLOCK_SIZE;
323 nbytes -= blocks * AES_BLOCK_SIZE;
325 if (walk.nbytes == walk.total && nbytes > 0) {
327 neon_aes_xts_encrypt(out, in, ctx->cts.key_enc,
328 ctx->key.rounds, nbytes,
329 ctx->twkey, walk.iv, first);
331 neon_aes_xts_decrypt(out, in, ctx->cts.key_dec,
332 ctx->key.rounds, nbytes,
333 ctx->twkey, walk.iv, first);
337 err = skcipher_walk_done(&walk, nbytes);
340 if (err || likely(!tail))
343 /* handle ciphertext stealing */
344 dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen);
345 if (req->dst != req->src)
346 dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen);
348 skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail,
351 err = skcipher_walk_virt(&walk, req, false);
355 out = walk.dst.virt.addr;
356 in = walk.src.virt.addr;
357 nbytes = walk.nbytes;
361 neon_aes_xts_encrypt(out, in, ctx->cts.key_enc, ctx->key.rounds,
362 nbytes, ctx->twkey, walk.iv, first);
364 neon_aes_xts_decrypt(out, in, ctx->cts.key_dec, ctx->key.rounds,
365 nbytes, ctx->twkey, walk.iv, first);
368 return skcipher_walk_done(&walk, 0);
371 static int xts_encrypt(struct skcipher_request *req)
373 return __xts_crypt(req, true, aesbs_xts_encrypt);
376 static int xts_decrypt(struct skcipher_request *req)
378 return __xts_crypt(req, false, aesbs_xts_decrypt);
381 static struct skcipher_alg aes_algs[] = { {
382 .base.cra_name = "ecb(aes)",
383 .base.cra_driver_name = "ecb-aes-neonbs",
384 .base.cra_priority = 250,
385 .base.cra_blocksize = AES_BLOCK_SIZE,
386 .base.cra_ctxsize = sizeof(struct aesbs_ctx),
387 .base.cra_module = THIS_MODULE,
389 .min_keysize = AES_MIN_KEY_SIZE,
390 .max_keysize = AES_MAX_KEY_SIZE,
391 .walksize = 8 * AES_BLOCK_SIZE,
392 .setkey = aesbs_setkey,
393 .encrypt = ecb_encrypt,
394 .decrypt = ecb_decrypt,
396 .base.cra_name = "cbc(aes)",
397 .base.cra_driver_name = "cbc-aes-neonbs",
398 .base.cra_priority = 250,
399 .base.cra_blocksize = AES_BLOCK_SIZE,
400 .base.cra_ctxsize = sizeof(struct aesbs_cbc_ctr_ctx),
401 .base.cra_module = THIS_MODULE,
403 .min_keysize = AES_MIN_KEY_SIZE,
404 .max_keysize = AES_MAX_KEY_SIZE,
405 .walksize = 8 * AES_BLOCK_SIZE,
406 .ivsize = AES_BLOCK_SIZE,
407 .setkey = aesbs_cbc_ctr_setkey,
408 .encrypt = cbc_encrypt,
409 .decrypt = cbc_decrypt,
411 .base.cra_name = "ctr(aes)",
412 .base.cra_driver_name = "ctr-aes-neonbs",
413 .base.cra_priority = 250,
414 .base.cra_blocksize = 1,
415 .base.cra_ctxsize = sizeof(struct aesbs_cbc_ctr_ctx),
416 .base.cra_module = THIS_MODULE,
418 .min_keysize = AES_MIN_KEY_SIZE,
419 .max_keysize = AES_MAX_KEY_SIZE,
420 .chunksize = AES_BLOCK_SIZE,
421 .walksize = 8 * AES_BLOCK_SIZE,
422 .ivsize = AES_BLOCK_SIZE,
423 .setkey = aesbs_cbc_ctr_setkey,
424 .encrypt = ctr_encrypt,
425 .decrypt = ctr_encrypt,
427 .base.cra_name = "xts(aes)",
428 .base.cra_driver_name = "xts-aes-neonbs",
429 .base.cra_priority = 250,
430 .base.cra_blocksize = AES_BLOCK_SIZE,
431 .base.cra_ctxsize = sizeof(struct aesbs_xts_ctx),
432 .base.cra_module = THIS_MODULE,
434 .min_keysize = 2 * AES_MIN_KEY_SIZE,
435 .max_keysize = 2 * AES_MAX_KEY_SIZE,
436 .walksize = 8 * AES_BLOCK_SIZE,
437 .ivsize = AES_BLOCK_SIZE,
438 .setkey = aesbs_xts_setkey,
439 .encrypt = xts_encrypt,
440 .decrypt = xts_decrypt,
443 static void aes_exit(void)
445 crypto_unregister_skciphers(aes_algs, ARRAY_SIZE(aes_algs));
448 static int __init aes_init(void)
450 if (!cpu_have_named_feature(ASIMD))
453 return crypto_register_skciphers(aes_algs, ARRAY_SIZE(aes_algs));
456 module_init(aes_init);
457 module_exit(aes_exit);