1 // SPDX-License-Identifier: GPL-2.0-only
3 * linux/arch/arm64/crypto/aes-glue.c - wrapper code for ARMv8 AES
5 * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
11 #include <crypto/aes.h>
12 #include <crypto/ctr.h>
13 #include <crypto/sha.h>
14 #include <crypto/internal/hash.h>
15 #include <crypto/internal/simd.h>
16 #include <crypto/internal/skcipher.h>
17 #include <crypto/scatterwalk.h>
18 #include <linux/module.h>
19 #include <linux/cpufeature.h>
20 #include <crypto/xts.h>
22 #include "aes-ce-setkey.h"
24 #ifdef USE_V8_CRYPTO_EXTENSIONS
27 #define aes_expandkey ce_aes_expandkey
28 #define aes_ecb_encrypt ce_aes_ecb_encrypt
29 #define aes_ecb_decrypt ce_aes_ecb_decrypt
30 #define aes_cbc_encrypt ce_aes_cbc_encrypt
31 #define aes_cbc_decrypt ce_aes_cbc_decrypt
32 #define aes_cbc_cts_encrypt ce_aes_cbc_cts_encrypt
33 #define aes_cbc_cts_decrypt ce_aes_cbc_cts_decrypt
34 #define aes_essiv_cbc_encrypt ce_aes_essiv_cbc_encrypt
35 #define aes_essiv_cbc_decrypt ce_aes_essiv_cbc_decrypt
36 #define aes_ctr_encrypt ce_aes_ctr_encrypt
37 #define aes_xts_encrypt ce_aes_xts_encrypt
38 #define aes_xts_decrypt ce_aes_xts_decrypt
39 #define aes_mac_update ce_aes_mac_update
40 MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS using ARMv8 Crypto Extensions");
44 #define aes_ecb_encrypt neon_aes_ecb_encrypt
45 #define aes_ecb_decrypt neon_aes_ecb_decrypt
46 #define aes_cbc_encrypt neon_aes_cbc_encrypt
47 #define aes_cbc_decrypt neon_aes_cbc_decrypt
48 #define aes_cbc_cts_encrypt neon_aes_cbc_cts_encrypt
49 #define aes_cbc_cts_decrypt neon_aes_cbc_cts_decrypt
50 #define aes_essiv_cbc_encrypt neon_aes_essiv_cbc_encrypt
51 #define aes_essiv_cbc_decrypt neon_aes_essiv_cbc_decrypt
52 #define aes_ctr_encrypt neon_aes_ctr_encrypt
53 #define aes_xts_encrypt neon_aes_xts_encrypt
54 #define aes_xts_decrypt neon_aes_xts_decrypt
55 #define aes_mac_update neon_aes_mac_update
56 MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS using ARMv8 NEON");
58 #if defined(USE_V8_CRYPTO_EXTENSIONS) || !IS_ENABLED(CONFIG_CRYPTO_AES_ARM64_BS)
59 MODULE_ALIAS_CRYPTO("ecb(aes)");
60 MODULE_ALIAS_CRYPTO("cbc(aes)");
61 MODULE_ALIAS_CRYPTO("ctr(aes)");
62 MODULE_ALIAS_CRYPTO("xts(aes)");
64 MODULE_ALIAS_CRYPTO("cts(cbc(aes))");
65 MODULE_ALIAS_CRYPTO("essiv(cbc(aes),sha256)");
66 MODULE_ALIAS_CRYPTO("cmac(aes)");
67 MODULE_ALIAS_CRYPTO("xcbc(aes)");
68 MODULE_ALIAS_CRYPTO("cbcmac(aes)");
70 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
71 MODULE_LICENSE("GPL v2");
73 /* defined in aes-modes.S */
74 asmlinkage void aes_ecb_encrypt(u8 out[], u8 const in[], u32 const rk[],
75 int rounds, int blocks);
76 asmlinkage void aes_ecb_decrypt(u8 out[], u8 const in[], u32 const rk[],
77 int rounds, int blocks);
79 asmlinkage void aes_cbc_encrypt(u8 out[], u8 const in[], u32 const rk[],
80 int rounds, int blocks, u8 iv[]);
81 asmlinkage void aes_cbc_decrypt(u8 out[], u8 const in[], u32 const rk[],
82 int rounds, int blocks, u8 iv[]);
84 asmlinkage void aes_cbc_cts_encrypt(u8 out[], u8 const in[], u32 const rk[],
85 int rounds, int bytes, u8 const iv[]);
86 asmlinkage void aes_cbc_cts_decrypt(u8 out[], u8 const in[], u32 const rk[],
87 int rounds, int bytes, u8 const iv[]);
89 asmlinkage void aes_ctr_encrypt(u8 out[], u8 const in[], u32 const rk[],
90 int rounds, int blocks, u8 ctr[]);
92 asmlinkage void aes_xts_encrypt(u8 out[], u8 const in[], u32 const rk1[],
93 int rounds, int bytes, u32 const rk2[], u8 iv[],
95 asmlinkage void aes_xts_decrypt(u8 out[], u8 const in[], u32 const rk1[],
96 int rounds, int bytes, u32 const rk2[], u8 iv[],
99 asmlinkage void aes_essiv_cbc_encrypt(u8 out[], u8 const in[], u32 const rk1[],
100 int rounds, int blocks, u8 iv[],
102 asmlinkage void aes_essiv_cbc_decrypt(u8 out[], u8 const in[], u32 const rk1[],
103 int rounds, int blocks, u8 iv[],
106 asmlinkage void aes_mac_update(u8 const in[], u32 const rk[], int rounds,
107 int blocks, u8 dg[], int enc_before,
110 struct crypto_aes_xts_ctx {
111 struct crypto_aes_ctx key1;
112 struct crypto_aes_ctx __aligned(8) key2;
115 struct crypto_aes_essiv_cbc_ctx {
116 struct crypto_aes_ctx key1;
117 struct crypto_aes_ctx __aligned(8) key2;
118 struct crypto_shash *hash;
122 struct crypto_aes_ctx key;
123 u8 __aligned(8) consts[];
126 struct mac_desc_ctx {
128 u8 dg[AES_BLOCK_SIZE];
131 static int skcipher_aes_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
132 unsigned int key_len)
134 struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
137 ret = aes_expandkey(ctx, in_key, key_len);
139 crypto_skcipher_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
144 static int __maybe_unused xts_set_key(struct crypto_skcipher *tfm,
145 const u8 *in_key, unsigned int key_len)
147 struct crypto_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
150 ret = xts_verify_key(tfm, in_key, key_len);
154 ret = aes_expandkey(&ctx->key1, in_key, key_len / 2);
156 ret = aes_expandkey(&ctx->key2, &in_key[key_len / 2],
161 crypto_skcipher_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
165 static int __maybe_unused essiv_cbc_set_key(struct crypto_skcipher *tfm,
167 unsigned int key_len)
169 struct crypto_aes_essiv_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
170 SHASH_DESC_ON_STACK(desc, ctx->hash);
171 u8 digest[SHA256_DIGEST_SIZE];
174 ret = aes_expandkey(&ctx->key1, in_key, key_len);
178 desc->tfm = ctx->hash;
179 crypto_shash_digest(desc, in_key, key_len, digest);
181 ret = aes_expandkey(&ctx->key2, digest, sizeof(digest));
187 crypto_skcipher_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
191 static int __maybe_unused ecb_encrypt(struct skcipher_request *req)
193 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
194 struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
195 int err, rounds = 6 + ctx->key_length / 4;
196 struct skcipher_walk walk;
199 err = skcipher_walk_virt(&walk, req, false);
201 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
203 aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
204 ctx->key_enc, rounds, blocks);
206 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
211 static int __maybe_unused ecb_decrypt(struct skcipher_request *req)
213 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
214 struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
215 int err, rounds = 6 + ctx->key_length / 4;
216 struct skcipher_walk walk;
219 err = skcipher_walk_virt(&walk, req, false);
221 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
223 aes_ecb_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
224 ctx->key_dec, rounds, blocks);
226 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
231 static int cbc_encrypt_walk(struct skcipher_request *req,
232 struct skcipher_walk *walk)
234 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
235 struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
236 int err = 0, rounds = 6 + ctx->key_length / 4;
239 while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) {
241 aes_cbc_encrypt(walk->dst.virt.addr, walk->src.virt.addr,
242 ctx->key_enc, rounds, blocks, walk->iv);
244 err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE);
249 static int __maybe_unused cbc_encrypt(struct skcipher_request *req)
251 struct skcipher_walk walk;
254 err = skcipher_walk_virt(&walk, req, false);
257 return cbc_encrypt_walk(req, &walk);
260 static int cbc_decrypt_walk(struct skcipher_request *req,
261 struct skcipher_walk *walk)
263 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
264 struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
265 int err = 0, rounds = 6 + ctx->key_length / 4;
268 while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) {
270 aes_cbc_decrypt(walk->dst.virt.addr, walk->src.virt.addr,
271 ctx->key_dec, rounds, blocks, walk->iv);
273 err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE);
278 static int __maybe_unused cbc_decrypt(struct skcipher_request *req)
280 struct skcipher_walk walk;
283 err = skcipher_walk_virt(&walk, req, false);
286 return cbc_decrypt_walk(req, &walk);
289 static int cts_cbc_encrypt(struct skcipher_request *req)
291 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
292 struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
293 int err, rounds = 6 + ctx->key_length / 4;
294 int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2;
295 struct scatterlist *src = req->src, *dst = req->dst;
296 struct scatterlist sg_src[2], sg_dst[2];
297 struct skcipher_request subreq;
298 struct skcipher_walk walk;
300 skcipher_request_set_tfm(&subreq, tfm);
301 skcipher_request_set_callback(&subreq, skcipher_request_flags(req),
304 if (req->cryptlen <= AES_BLOCK_SIZE) {
305 if (req->cryptlen < AES_BLOCK_SIZE)
310 if (cbc_blocks > 0) {
311 skcipher_request_set_crypt(&subreq, req->src, req->dst,
312 cbc_blocks * AES_BLOCK_SIZE,
315 err = skcipher_walk_virt(&walk, &subreq, false) ?:
316 cbc_encrypt_walk(&subreq, &walk);
320 if (req->cryptlen == AES_BLOCK_SIZE)
323 dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen);
324 if (req->dst != req->src)
325 dst = scatterwalk_ffwd(sg_dst, req->dst,
329 /* handle ciphertext stealing */
330 skcipher_request_set_crypt(&subreq, src, dst,
331 req->cryptlen - cbc_blocks * AES_BLOCK_SIZE,
334 err = skcipher_walk_virt(&walk, &subreq, false);
339 aes_cbc_cts_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
340 ctx->key_enc, rounds, walk.nbytes, walk.iv);
343 return skcipher_walk_done(&walk, 0);
346 static int cts_cbc_decrypt(struct skcipher_request *req)
348 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
349 struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
350 int err, rounds = 6 + ctx->key_length / 4;
351 int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2;
352 struct scatterlist *src = req->src, *dst = req->dst;
353 struct scatterlist sg_src[2], sg_dst[2];
354 struct skcipher_request subreq;
355 struct skcipher_walk walk;
357 skcipher_request_set_tfm(&subreq, tfm);
358 skcipher_request_set_callback(&subreq, skcipher_request_flags(req),
361 if (req->cryptlen <= AES_BLOCK_SIZE) {
362 if (req->cryptlen < AES_BLOCK_SIZE)
367 if (cbc_blocks > 0) {
368 skcipher_request_set_crypt(&subreq, req->src, req->dst,
369 cbc_blocks * AES_BLOCK_SIZE,
372 err = skcipher_walk_virt(&walk, &subreq, false) ?:
373 cbc_decrypt_walk(&subreq, &walk);
377 if (req->cryptlen == AES_BLOCK_SIZE)
380 dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen);
381 if (req->dst != req->src)
382 dst = scatterwalk_ffwd(sg_dst, req->dst,
386 /* handle ciphertext stealing */
387 skcipher_request_set_crypt(&subreq, src, dst,
388 req->cryptlen - cbc_blocks * AES_BLOCK_SIZE,
391 err = skcipher_walk_virt(&walk, &subreq, false);
396 aes_cbc_cts_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
397 ctx->key_dec, rounds, walk.nbytes, walk.iv);
400 return skcipher_walk_done(&walk, 0);
403 static int __maybe_unused essiv_cbc_init_tfm(struct crypto_skcipher *tfm)
405 struct crypto_aes_essiv_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
407 ctx->hash = crypto_alloc_shash("sha256", 0, 0);
409 return PTR_ERR_OR_ZERO(ctx->hash);
412 static void __maybe_unused essiv_cbc_exit_tfm(struct crypto_skcipher *tfm)
414 struct crypto_aes_essiv_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
416 crypto_free_shash(ctx->hash);
419 static int __maybe_unused essiv_cbc_encrypt(struct skcipher_request *req)
421 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
422 struct crypto_aes_essiv_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
423 int err, rounds = 6 + ctx->key1.key_length / 4;
424 struct skcipher_walk walk;
427 err = skcipher_walk_virt(&walk, req, false);
429 blocks = walk.nbytes / AES_BLOCK_SIZE;
432 aes_essiv_cbc_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
433 ctx->key1.key_enc, rounds, blocks,
434 req->iv, ctx->key2.key_enc);
436 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
438 return err ?: cbc_encrypt_walk(req, &walk);
441 static int __maybe_unused essiv_cbc_decrypt(struct skcipher_request *req)
443 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
444 struct crypto_aes_essiv_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
445 int err, rounds = 6 + ctx->key1.key_length / 4;
446 struct skcipher_walk walk;
449 err = skcipher_walk_virt(&walk, req, false);
451 blocks = walk.nbytes / AES_BLOCK_SIZE;
454 aes_essiv_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
455 ctx->key1.key_dec, rounds, blocks,
456 req->iv, ctx->key2.key_enc);
458 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
460 return err ?: cbc_decrypt_walk(req, &walk);
463 static int ctr_encrypt(struct skcipher_request *req)
465 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
466 struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
467 int err, rounds = 6 + ctx->key_length / 4;
468 struct skcipher_walk walk;
471 err = skcipher_walk_virt(&walk, req, false);
473 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
475 aes_ctr_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
476 ctx->key_enc, rounds, blocks, walk.iv);
478 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
481 u8 __aligned(8) tail[AES_BLOCK_SIZE];
482 unsigned int nbytes = walk.nbytes;
483 u8 *tdst = walk.dst.virt.addr;
484 u8 *tsrc = walk.src.virt.addr;
487 * Tell aes_ctr_encrypt() to process a tail block.
492 aes_ctr_encrypt(tail, NULL, ctx->key_enc, rounds,
495 crypto_xor_cpy(tdst, tsrc, tail, nbytes);
496 err = skcipher_walk_done(&walk, 0);
502 static void ctr_encrypt_one(struct crypto_skcipher *tfm, const u8 *src, u8 *dst)
504 const struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
508 * Temporarily disable interrupts to avoid races where
509 * cachelines are evicted when the CPU is interrupted
510 * to do something else.
512 local_irq_save(flags);
513 aes_encrypt(ctx, dst, src);
514 local_irq_restore(flags);
517 static int __maybe_unused ctr_encrypt_sync(struct skcipher_request *req)
519 if (!crypto_simd_usable())
520 return crypto_ctr_encrypt_walk(req, ctr_encrypt_one);
522 return ctr_encrypt(req);
525 static int __maybe_unused xts_encrypt(struct skcipher_request *req)
527 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
528 struct crypto_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
529 int err, first, rounds = 6 + ctx->key1.key_length / 4;
530 int tail = req->cryptlen % AES_BLOCK_SIZE;
531 struct scatterlist sg_src[2], sg_dst[2];
532 struct skcipher_request subreq;
533 struct scatterlist *src, *dst;
534 struct skcipher_walk walk;
536 if (req->cryptlen < AES_BLOCK_SIZE)
539 err = skcipher_walk_virt(&walk, req, false);
541 if (unlikely(tail > 0 && walk.nbytes < walk.total)) {
542 int xts_blocks = DIV_ROUND_UP(req->cryptlen,
545 skcipher_walk_abort(&walk);
547 skcipher_request_set_tfm(&subreq, tfm);
548 skcipher_request_set_callback(&subreq,
549 skcipher_request_flags(req),
551 skcipher_request_set_crypt(&subreq, req->src, req->dst,
552 xts_blocks * AES_BLOCK_SIZE,
555 err = skcipher_walk_virt(&walk, req, false);
560 for (first = 1; walk.nbytes >= AES_BLOCK_SIZE; first = 0) {
561 int nbytes = walk.nbytes;
563 if (walk.nbytes < walk.total)
564 nbytes &= ~(AES_BLOCK_SIZE - 1);
567 aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
568 ctx->key1.key_enc, rounds, nbytes,
569 ctx->key2.key_enc, walk.iv, first);
571 err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
574 if (err || likely(!tail))
577 dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen);
578 if (req->dst != req->src)
579 dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen);
581 skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail,
584 err = skcipher_walk_virt(&walk, &subreq, false);
589 aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
590 ctx->key1.key_enc, rounds, walk.nbytes,
591 ctx->key2.key_enc, walk.iv, first);
594 return skcipher_walk_done(&walk, 0);
597 static int __maybe_unused xts_decrypt(struct skcipher_request *req)
599 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
600 struct crypto_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
601 int err, first, rounds = 6 + ctx->key1.key_length / 4;
602 int tail = req->cryptlen % AES_BLOCK_SIZE;
603 struct scatterlist sg_src[2], sg_dst[2];
604 struct skcipher_request subreq;
605 struct scatterlist *src, *dst;
606 struct skcipher_walk walk;
608 if (req->cryptlen < AES_BLOCK_SIZE)
611 err = skcipher_walk_virt(&walk, req, false);
613 if (unlikely(tail > 0 && walk.nbytes < walk.total)) {
614 int xts_blocks = DIV_ROUND_UP(req->cryptlen,
617 skcipher_walk_abort(&walk);
619 skcipher_request_set_tfm(&subreq, tfm);
620 skcipher_request_set_callback(&subreq,
621 skcipher_request_flags(req),
623 skcipher_request_set_crypt(&subreq, req->src, req->dst,
624 xts_blocks * AES_BLOCK_SIZE,
627 err = skcipher_walk_virt(&walk, req, false);
632 for (first = 1; walk.nbytes >= AES_BLOCK_SIZE; first = 0) {
633 int nbytes = walk.nbytes;
635 if (walk.nbytes < walk.total)
636 nbytes &= ~(AES_BLOCK_SIZE - 1);
639 aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
640 ctx->key1.key_dec, rounds, nbytes,
641 ctx->key2.key_enc, walk.iv, first);
643 err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
646 if (err || likely(!tail))
649 dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen);
650 if (req->dst != req->src)
651 dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen);
653 skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail,
656 err = skcipher_walk_virt(&walk, &subreq, false);
662 aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
663 ctx->key1.key_dec, rounds, walk.nbytes,
664 ctx->key2.key_enc, walk.iv, first);
667 return skcipher_walk_done(&walk, 0);
670 static struct skcipher_alg aes_algs[] = { {
671 #if defined(USE_V8_CRYPTO_EXTENSIONS) || !IS_ENABLED(CONFIG_CRYPTO_AES_ARM64_BS)
673 .cra_name = "__ecb(aes)",
674 .cra_driver_name = "__ecb-aes-" MODE,
675 .cra_priority = PRIO,
676 .cra_flags = CRYPTO_ALG_INTERNAL,
677 .cra_blocksize = AES_BLOCK_SIZE,
678 .cra_ctxsize = sizeof(struct crypto_aes_ctx),
679 .cra_module = THIS_MODULE,
681 .min_keysize = AES_MIN_KEY_SIZE,
682 .max_keysize = AES_MAX_KEY_SIZE,
683 .setkey = skcipher_aes_setkey,
684 .encrypt = ecb_encrypt,
685 .decrypt = ecb_decrypt,
688 .cra_name = "__cbc(aes)",
689 .cra_driver_name = "__cbc-aes-" MODE,
690 .cra_priority = PRIO,
691 .cra_flags = CRYPTO_ALG_INTERNAL,
692 .cra_blocksize = AES_BLOCK_SIZE,
693 .cra_ctxsize = sizeof(struct crypto_aes_ctx),
694 .cra_module = THIS_MODULE,
696 .min_keysize = AES_MIN_KEY_SIZE,
697 .max_keysize = AES_MAX_KEY_SIZE,
698 .ivsize = AES_BLOCK_SIZE,
699 .setkey = skcipher_aes_setkey,
700 .encrypt = cbc_encrypt,
701 .decrypt = cbc_decrypt,
704 .cra_name = "__ctr(aes)",
705 .cra_driver_name = "__ctr-aes-" MODE,
706 .cra_priority = PRIO,
707 .cra_flags = CRYPTO_ALG_INTERNAL,
709 .cra_ctxsize = sizeof(struct crypto_aes_ctx),
710 .cra_module = THIS_MODULE,
712 .min_keysize = AES_MIN_KEY_SIZE,
713 .max_keysize = AES_MAX_KEY_SIZE,
714 .ivsize = AES_BLOCK_SIZE,
715 .chunksize = AES_BLOCK_SIZE,
716 .setkey = skcipher_aes_setkey,
717 .encrypt = ctr_encrypt,
718 .decrypt = ctr_encrypt,
721 .cra_name = "ctr(aes)",
722 .cra_driver_name = "ctr-aes-" MODE,
723 .cra_priority = PRIO - 1,
725 .cra_ctxsize = sizeof(struct crypto_aes_ctx),
726 .cra_module = THIS_MODULE,
728 .min_keysize = AES_MIN_KEY_SIZE,
729 .max_keysize = AES_MAX_KEY_SIZE,
730 .ivsize = AES_BLOCK_SIZE,
731 .chunksize = AES_BLOCK_SIZE,
732 .setkey = skcipher_aes_setkey,
733 .encrypt = ctr_encrypt_sync,
734 .decrypt = ctr_encrypt_sync,
737 .cra_name = "__xts(aes)",
738 .cra_driver_name = "__xts-aes-" MODE,
739 .cra_priority = PRIO,
740 .cra_flags = CRYPTO_ALG_INTERNAL,
741 .cra_blocksize = AES_BLOCK_SIZE,
742 .cra_ctxsize = sizeof(struct crypto_aes_xts_ctx),
743 .cra_module = THIS_MODULE,
745 .min_keysize = 2 * AES_MIN_KEY_SIZE,
746 .max_keysize = 2 * AES_MAX_KEY_SIZE,
747 .ivsize = AES_BLOCK_SIZE,
748 .walksize = 2 * AES_BLOCK_SIZE,
749 .setkey = xts_set_key,
750 .encrypt = xts_encrypt,
751 .decrypt = xts_decrypt,
755 .cra_name = "__cts(cbc(aes))",
756 .cra_driver_name = "__cts-cbc-aes-" MODE,
757 .cra_priority = PRIO,
758 .cra_flags = CRYPTO_ALG_INTERNAL,
759 .cra_blocksize = AES_BLOCK_SIZE,
760 .cra_ctxsize = sizeof(struct crypto_aes_ctx),
761 .cra_module = THIS_MODULE,
763 .min_keysize = AES_MIN_KEY_SIZE,
764 .max_keysize = AES_MAX_KEY_SIZE,
765 .ivsize = AES_BLOCK_SIZE,
766 .walksize = 2 * AES_BLOCK_SIZE,
767 .setkey = skcipher_aes_setkey,
768 .encrypt = cts_cbc_encrypt,
769 .decrypt = cts_cbc_decrypt,
772 .cra_name = "__essiv(cbc(aes),sha256)",
773 .cra_driver_name = "__essiv-cbc-aes-sha256-" MODE,
774 .cra_priority = PRIO + 1,
775 .cra_flags = CRYPTO_ALG_INTERNAL,
776 .cra_blocksize = AES_BLOCK_SIZE,
777 .cra_ctxsize = sizeof(struct crypto_aes_essiv_cbc_ctx),
778 .cra_module = THIS_MODULE,
780 .min_keysize = AES_MIN_KEY_SIZE,
781 .max_keysize = AES_MAX_KEY_SIZE,
782 .ivsize = AES_BLOCK_SIZE,
783 .setkey = essiv_cbc_set_key,
784 .encrypt = essiv_cbc_encrypt,
785 .decrypt = essiv_cbc_decrypt,
786 .init = essiv_cbc_init_tfm,
787 .exit = essiv_cbc_exit_tfm,
790 static int cbcmac_setkey(struct crypto_shash *tfm, const u8 *in_key,
791 unsigned int key_len)
793 struct mac_tfm_ctx *ctx = crypto_shash_ctx(tfm);
796 err = aes_expandkey(&ctx->key, in_key, key_len);
798 crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
803 static void cmac_gf128_mul_by_x(be128 *y, const be128 *x)
805 u64 a = be64_to_cpu(x->a);
806 u64 b = be64_to_cpu(x->b);
808 y->a = cpu_to_be64((a << 1) | (b >> 63));
809 y->b = cpu_to_be64((b << 1) ^ ((a >> 63) ? 0x87 : 0));
812 static int cmac_setkey(struct crypto_shash *tfm, const u8 *in_key,
813 unsigned int key_len)
815 struct mac_tfm_ctx *ctx = crypto_shash_ctx(tfm);
816 be128 *consts = (be128 *)ctx->consts;
817 int rounds = 6 + key_len / 4;
820 err = cbcmac_setkey(tfm, in_key, key_len);
824 /* encrypt the zero vector */
826 aes_ecb_encrypt(ctx->consts, (u8[AES_BLOCK_SIZE]){}, ctx->key.key_enc,
830 cmac_gf128_mul_by_x(consts, consts);
831 cmac_gf128_mul_by_x(consts + 1, consts);
836 static int xcbc_setkey(struct crypto_shash *tfm, const u8 *in_key,
837 unsigned int key_len)
839 static u8 const ks[3][AES_BLOCK_SIZE] = {
840 { [0 ... AES_BLOCK_SIZE - 1] = 0x1 },
841 { [0 ... AES_BLOCK_SIZE - 1] = 0x2 },
842 { [0 ... AES_BLOCK_SIZE - 1] = 0x3 },
845 struct mac_tfm_ctx *ctx = crypto_shash_ctx(tfm);
846 int rounds = 6 + key_len / 4;
847 u8 key[AES_BLOCK_SIZE];
850 err = cbcmac_setkey(tfm, in_key, key_len);
855 aes_ecb_encrypt(key, ks[0], ctx->key.key_enc, rounds, 1);
856 aes_ecb_encrypt(ctx->consts, ks[1], ctx->key.key_enc, rounds, 2);
859 return cbcmac_setkey(tfm, key, sizeof(key));
862 static int mac_init(struct shash_desc *desc)
864 struct mac_desc_ctx *ctx = shash_desc_ctx(desc);
866 memset(ctx->dg, 0, AES_BLOCK_SIZE);
872 static void mac_do_update(struct crypto_aes_ctx *ctx, u8 const in[], int blocks,
873 u8 dg[], int enc_before, int enc_after)
875 int rounds = 6 + ctx->key_length / 4;
877 if (crypto_simd_usable()) {
879 aes_mac_update(in, ctx->key_enc, rounds, blocks, dg, enc_before,
884 aes_encrypt(ctx, dg, dg);
887 crypto_xor(dg, in, AES_BLOCK_SIZE);
888 in += AES_BLOCK_SIZE;
890 if (blocks || enc_after)
891 aes_encrypt(ctx, dg, dg);
896 static int mac_update(struct shash_desc *desc, const u8 *p, unsigned int len)
898 struct mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm);
899 struct mac_desc_ctx *ctx = shash_desc_ctx(desc);
904 if ((ctx->len % AES_BLOCK_SIZE) == 0 &&
905 (ctx->len + len) > AES_BLOCK_SIZE) {
907 int blocks = len / AES_BLOCK_SIZE;
909 len %= AES_BLOCK_SIZE;
911 mac_do_update(&tctx->key, p, blocks, ctx->dg,
912 (ctx->len != 0), (len != 0));
914 p += blocks * AES_BLOCK_SIZE;
917 ctx->len = AES_BLOCK_SIZE;
923 l = min(len, AES_BLOCK_SIZE - ctx->len);
925 if (l <= AES_BLOCK_SIZE) {
926 crypto_xor(ctx->dg + ctx->len, p, l);
936 static int cbcmac_final(struct shash_desc *desc, u8 *out)
938 struct mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm);
939 struct mac_desc_ctx *ctx = shash_desc_ctx(desc);
941 mac_do_update(&tctx->key, NULL, 0, ctx->dg, (ctx->len != 0), 0);
943 memcpy(out, ctx->dg, AES_BLOCK_SIZE);
948 static int cmac_final(struct shash_desc *desc, u8 *out)
950 struct mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm);
951 struct mac_desc_ctx *ctx = shash_desc_ctx(desc);
952 u8 *consts = tctx->consts;
954 if (ctx->len != AES_BLOCK_SIZE) {
955 ctx->dg[ctx->len] ^= 0x80;
956 consts += AES_BLOCK_SIZE;
959 mac_do_update(&tctx->key, consts, 1, ctx->dg, 0, 1);
961 memcpy(out, ctx->dg, AES_BLOCK_SIZE);
966 static struct shash_alg mac_algs[] = { {
967 .base.cra_name = "cmac(aes)",
968 .base.cra_driver_name = "cmac-aes-" MODE,
969 .base.cra_priority = PRIO,
970 .base.cra_blocksize = AES_BLOCK_SIZE,
971 .base.cra_ctxsize = sizeof(struct mac_tfm_ctx) +
973 .base.cra_module = THIS_MODULE,
975 .digestsize = AES_BLOCK_SIZE,
977 .update = mac_update,
979 .setkey = cmac_setkey,
980 .descsize = sizeof(struct mac_desc_ctx),
982 .base.cra_name = "xcbc(aes)",
983 .base.cra_driver_name = "xcbc-aes-" MODE,
984 .base.cra_priority = PRIO,
985 .base.cra_blocksize = AES_BLOCK_SIZE,
986 .base.cra_ctxsize = sizeof(struct mac_tfm_ctx) +
988 .base.cra_module = THIS_MODULE,
990 .digestsize = AES_BLOCK_SIZE,
992 .update = mac_update,
994 .setkey = xcbc_setkey,
995 .descsize = sizeof(struct mac_desc_ctx),
997 .base.cra_name = "cbcmac(aes)",
998 .base.cra_driver_name = "cbcmac-aes-" MODE,
999 .base.cra_priority = PRIO,
1000 .base.cra_blocksize = 1,
1001 .base.cra_ctxsize = sizeof(struct mac_tfm_ctx),
1002 .base.cra_module = THIS_MODULE,
1004 .digestsize = AES_BLOCK_SIZE,
1006 .update = mac_update,
1007 .final = cbcmac_final,
1008 .setkey = cbcmac_setkey,
1009 .descsize = sizeof(struct mac_desc_ctx),
1012 static struct simd_skcipher_alg *aes_simd_algs[ARRAY_SIZE(aes_algs)];
1014 static void aes_exit(void)
1018 for (i = 0; i < ARRAY_SIZE(aes_simd_algs); i++)
1019 if (aes_simd_algs[i])
1020 simd_skcipher_free(aes_simd_algs[i]);
1022 crypto_unregister_shashes(mac_algs, ARRAY_SIZE(mac_algs));
1023 crypto_unregister_skciphers(aes_algs, ARRAY_SIZE(aes_algs));
1026 static int __init aes_init(void)
1028 struct simd_skcipher_alg *simd;
1029 const char *basename;
1030 const char *algname;
1031 const char *drvname;
1035 err = crypto_register_skciphers(aes_algs, ARRAY_SIZE(aes_algs));
1039 err = crypto_register_shashes(mac_algs, ARRAY_SIZE(mac_algs));
1041 goto unregister_ciphers;
1043 for (i = 0; i < ARRAY_SIZE(aes_algs); i++) {
1044 if (!(aes_algs[i].base.cra_flags & CRYPTO_ALG_INTERNAL))
1047 algname = aes_algs[i].base.cra_name + 2;
1048 drvname = aes_algs[i].base.cra_driver_name + 2;
1049 basename = aes_algs[i].base.cra_driver_name;
1050 simd = simd_skcipher_create_compat(algname, drvname, basename);
1051 err = PTR_ERR(simd);
1053 goto unregister_simds;
1055 aes_simd_algs[i] = simd;
1064 crypto_unregister_skciphers(aes_algs, ARRAY_SIZE(aes_algs));
1068 #ifdef USE_V8_CRYPTO_EXTENSIONS
1069 module_cpu_feature_match(AES, aes_init);
1071 module_init(aes_init);
1072 EXPORT_SYMBOL(neon_aes_ecb_encrypt);
1073 EXPORT_SYMBOL(neon_aes_cbc_encrypt);
1074 EXPORT_SYMBOL(neon_aes_xts_encrypt);
1075 EXPORT_SYMBOL(neon_aes_xts_decrypt);
1077 module_exit(aes_exit);