1 // SPDX-License-Identifier: GPL-2.0-only
3 * aes-ccm-glue.c - AES-CCM transform for ARMv8 with Crypto Extensions
5 * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
10 #include <asm/unaligned.h>
11 #include <crypto/aes.h>
12 #include <crypto/scatterwalk.h>
13 #include <crypto/internal/aead.h>
14 #include <crypto/internal/simd.h>
15 #include <crypto/internal/skcipher.h>
16 #include <linux/module.h>
18 #include "aes-ce-setkey.h"
20 static int num_rounds(struct crypto_aes_ctx *ctx)
23 * # of rounds specified by AES:
24 * 128 bit key 10 rounds
25 * 192 bit key 12 rounds
26 * 256 bit key 14 rounds
27 * => n byte key => 6 + (n/4) rounds
29 return 6 + ctx->key_length / 4;
32 asmlinkage void ce_aes_ccm_auth_data(u8 mac[], u8 const in[], u32 abytes,
33 u32 *macp, u32 const rk[], u32 rounds);
35 asmlinkage void ce_aes_ccm_encrypt(u8 out[], u8 const in[], u32 cbytes,
36 u32 const rk[], u32 rounds, u8 mac[],
39 asmlinkage void ce_aes_ccm_decrypt(u8 out[], u8 const in[], u32 cbytes,
40 u32 const rk[], u32 rounds, u8 mac[],
43 asmlinkage void ce_aes_ccm_final(u8 mac[], u8 const ctr[], u32 const rk[],
46 static int ccm_setkey(struct crypto_aead *tfm, const u8 *in_key,
49 struct crypto_aes_ctx *ctx = crypto_aead_ctx(tfm);
52 ret = ce_aes_expandkey(ctx, in_key, key_len);
56 tfm->base.crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
60 static int ccm_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
62 if ((authsize & 1) || authsize < 4)
67 static int ccm_init_mac(struct aead_request *req, u8 maciv[], u32 msglen)
69 struct crypto_aead *aead = crypto_aead_reqtfm(req);
70 __be32 *n = (__be32 *)&maciv[AES_BLOCK_SIZE - 8];
71 u32 l = req->iv[0] + 1;
73 /* verify that CCM dimension 'L' is set correctly in the IV */
77 /* verify that msglen can in fact be represented in L bytes */
78 if (l < 4 && msglen >> (8 * l))
82 * Even if the CCM spec allows L values of up to 8, the Linux cryptoapi
83 * uses a u32 type to represent msglen so the top 4 bytes are always 0.
86 n[1] = cpu_to_be32(msglen);
88 memcpy(maciv, req->iv, AES_BLOCK_SIZE - l);
91 * Meaning of byte 0 according to CCM spec (RFC 3610/NIST 800-38C)
92 * - bits 0..2 : max # of bytes required to represent msglen, minus 1
93 * (already set by caller)
94 * - bits 3..5 : size of auth tag (1 => 4 bytes, 2 => 6 bytes, etc)
95 * - bit 6 : indicates presence of authenticate-only data
97 maciv[0] |= (crypto_aead_authsize(aead) - 2) << 2;
101 memset(&req->iv[AES_BLOCK_SIZE - l], 0, l);
105 static void ccm_update_mac(struct crypto_aes_ctx *key, u8 mac[], u8 const in[],
106 u32 abytes, u32 *macp)
108 if (crypto_simd_usable()) {
110 ce_aes_ccm_auth_data(mac, in, abytes, macp, key->key_enc,
114 if (*macp > 0 && *macp < AES_BLOCK_SIZE) {
115 int added = min(abytes, AES_BLOCK_SIZE - *macp);
117 crypto_xor(&mac[*macp], in, added);
124 while (abytes >= AES_BLOCK_SIZE) {
125 aes_encrypt(key, mac, mac);
126 crypto_xor(mac, in, AES_BLOCK_SIZE);
128 in += AES_BLOCK_SIZE;
129 abytes -= AES_BLOCK_SIZE;
133 aes_encrypt(key, mac, mac);
134 crypto_xor(mac, in, abytes);
140 static void ccm_calculate_auth_mac(struct aead_request *req, u8 mac[])
142 struct crypto_aead *aead = crypto_aead_reqtfm(req);
143 struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
144 struct __packed { __be16 l; __be32 h; u16 len; } ltag;
145 struct scatter_walk walk;
146 u32 len = req->assoclen;
149 /* prepend the AAD with a length tag */
151 ltag.l = cpu_to_be16(len);
154 ltag.l = cpu_to_be16(0xfffe);
155 put_unaligned_be32(len, <ag.h);
159 ccm_update_mac(ctx, mac, (u8 *)<ag, ltag.len, &macp);
160 scatterwalk_start(&walk, req->src);
163 u32 n = scatterwalk_clamp(&walk, len);
167 scatterwalk_start(&walk, sg_next(walk.sg));
168 n = scatterwalk_clamp(&walk, len);
170 p = scatterwalk_map(&walk);
171 ccm_update_mac(ctx, mac, p, n, &macp);
174 scatterwalk_unmap(p);
175 scatterwalk_advance(&walk, n);
176 scatterwalk_done(&walk, 0, len);
180 static int ccm_crypt_fallback(struct skcipher_walk *walk, u8 mac[], u8 iv0[],
181 struct crypto_aes_ctx *ctx, bool enc)
183 u8 buf[AES_BLOCK_SIZE];
186 while (walk->nbytes) {
187 int blocks = walk->nbytes / AES_BLOCK_SIZE;
188 u32 tail = walk->nbytes % AES_BLOCK_SIZE;
189 u8 *dst = walk->dst.virt.addr;
190 u8 *src = walk->src.virt.addr;
191 u32 nbytes = walk->nbytes;
193 if (nbytes == walk->total && tail > 0) {
199 u32 bsize = AES_BLOCK_SIZE;
201 if (nbytes < AES_BLOCK_SIZE)
204 crypto_inc(walk->iv, AES_BLOCK_SIZE);
205 aes_encrypt(ctx, buf, walk->iv);
206 aes_encrypt(ctx, mac, mac);
208 crypto_xor(mac, src, bsize);
209 crypto_xor_cpy(dst, src, buf, bsize);
211 crypto_xor(mac, dst, bsize);
217 err = skcipher_walk_done(walk, tail);
221 aes_encrypt(ctx, buf, iv0);
222 aes_encrypt(ctx, mac, mac);
223 crypto_xor(mac, buf, AES_BLOCK_SIZE);
228 static int ccm_encrypt(struct aead_request *req)
230 struct crypto_aead *aead = crypto_aead_reqtfm(req);
231 struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
232 struct skcipher_walk walk;
233 u8 __aligned(8) mac[AES_BLOCK_SIZE];
234 u8 buf[AES_BLOCK_SIZE];
235 u32 len = req->cryptlen;
238 err = ccm_init_mac(req, mac, len);
243 ccm_calculate_auth_mac(req, mac);
245 /* preserve the original iv for the final round */
246 memcpy(buf, req->iv, AES_BLOCK_SIZE);
248 err = skcipher_walk_aead_encrypt(&walk, req, false);
250 if (crypto_simd_usable()) {
251 while (walk.nbytes) {
252 u32 tail = walk.nbytes % AES_BLOCK_SIZE;
254 if (walk.nbytes == walk.total)
258 ce_aes_ccm_encrypt(walk.dst.virt.addr,
260 walk.nbytes - tail, ctx->key_enc,
261 num_rounds(ctx), mac, walk.iv);
264 err = skcipher_walk_done(&walk, tail);
268 ce_aes_ccm_final(mac, buf, ctx->key_enc,
273 err = ccm_crypt_fallback(&walk, mac, buf, ctx, true);
278 /* copy authtag to end of dst */
279 scatterwalk_map_and_copy(mac, req->dst, req->assoclen + req->cryptlen,
280 crypto_aead_authsize(aead), 1);
285 static int ccm_decrypt(struct aead_request *req)
287 struct crypto_aead *aead = crypto_aead_reqtfm(req);
288 struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
289 unsigned int authsize = crypto_aead_authsize(aead);
290 struct skcipher_walk walk;
291 u8 __aligned(8) mac[AES_BLOCK_SIZE];
292 u8 buf[AES_BLOCK_SIZE];
293 u32 len = req->cryptlen - authsize;
296 err = ccm_init_mac(req, mac, len);
301 ccm_calculate_auth_mac(req, mac);
303 /* preserve the original iv for the final round */
304 memcpy(buf, req->iv, AES_BLOCK_SIZE);
306 err = skcipher_walk_aead_decrypt(&walk, req, false);
308 if (crypto_simd_usable()) {
309 while (walk.nbytes) {
310 u32 tail = walk.nbytes % AES_BLOCK_SIZE;
312 if (walk.nbytes == walk.total)
316 ce_aes_ccm_decrypt(walk.dst.virt.addr,
318 walk.nbytes - tail, ctx->key_enc,
319 num_rounds(ctx), mac, walk.iv);
322 err = skcipher_walk_done(&walk, tail);
326 ce_aes_ccm_final(mac, buf, ctx->key_enc,
331 err = ccm_crypt_fallback(&walk, mac, buf, ctx, false);
337 /* compare calculated auth tag with the stored one */
338 scatterwalk_map_and_copy(buf, req->src,
339 req->assoclen + req->cryptlen - authsize,
342 if (crypto_memneq(mac, buf, authsize))
347 static struct aead_alg ccm_aes_alg = {
349 .cra_name = "ccm(aes)",
350 .cra_driver_name = "ccm-aes-ce",
353 .cra_ctxsize = sizeof(struct crypto_aes_ctx),
354 .cra_module = THIS_MODULE,
356 .ivsize = AES_BLOCK_SIZE,
357 .chunksize = AES_BLOCK_SIZE,
358 .maxauthsize = AES_BLOCK_SIZE,
359 .setkey = ccm_setkey,
360 .setauthsize = ccm_setauthsize,
361 .encrypt = ccm_encrypt,
362 .decrypt = ccm_decrypt,
365 static int __init aes_mod_init(void)
367 if (!cpu_have_named_feature(AES))
369 return crypto_register_aead(&ccm_aes_alg);
372 static void __exit aes_mod_exit(void)
374 crypto_unregister_aead(&ccm_aes_alg);
377 module_init(aes_mod_init);
378 module_exit(aes_mod_exit);
380 MODULE_DESCRIPTION("Synchronous AES in CCM mode using ARMv8 Crypto Extensions");
381 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
382 MODULE_LICENSE("GPL v2");
383 MODULE_ALIAS_CRYPTO("ccm(aes)");