1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Support for Intel AES-NI instructions. This file contains glue
4 * code, the real AES implementation is in intel-aes_asm.S.
6 * Copyright (C) 2008, Intel Corp.
7 * Author: Huang Ying <ying.huang@intel.com>
9 * Added RFC4106 AES-GCM support for 128-bit keys under the AEAD
10 * interface for 64-bit kernels.
11 * Authors: Adrian Hoban <adrian.hoban@intel.com>
12 * Gabriele Paoloni <gabriele.paoloni@intel.com>
13 * Tadeusz Struk (tadeusz.struk@intel.com)
14 * Aidan O'Mahony (aidan.o.mahony@intel.com)
15 * Copyright (c) 2010, Intel Corporation.
18 #include <linux/hardirq.h>
19 #include <linux/types.h>
20 #include <linux/module.h>
21 #include <linux/err.h>
22 #include <crypto/algapi.h>
23 #include <crypto/aes.h>
24 #include <crypto/ctr.h>
25 #include <crypto/b128ops.h>
26 #include <crypto/gcm.h>
27 #include <crypto/xts.h>
28 #include <asm/cpu_device_id.h>
30 #include <crypto/scatterwalk.h>
31 #include <crypto/internal/aead.h>
32 #include <crypto/internal/simd.h>
33 #include <crypto/internal/skcipher.h>
34 #include <linux/workqueue.h>
35 #include <linux/spinlock.h>
37 #include <asm/crypto/glue_helper.h>
41 #define AESNI_ALIGN 16
42 #define AESNI_ALIGN_ATTR __attribute__ ((__aligned__(AESNI_ALIGN)))
43 #define AES_BLOCK_MASK (~(AES_BLOCK_SIZE - 1))
44 #define RFC4106_HASH_SUBKEY_SIZE 16
45 #define AESNI_ALIGN_EXTRA ((AESNI_ALIGN - 1) & ~(CRYPTO_MINALIGN - 1))
46 #define CRYPTO_AES_CTX_SIZE (sizeof(struct crypto_aes_ctx) + AESNI_ALIGN_EXTRA)
47 #define XTS_AES_CTX_SIZE (sizeof(struct aesni_xts_ctx) + AESNI_ALIGN_EXTRA)
49 /* This data is stored at the end of the crypto_tfm struct.
50 * It's a type of per "session" data storage location.
51 * This needs to be 16 byte aligned.
53 struct aesni_rfc4106_gcm_ctx {
54 u8 hash_subkey[16] AESNI_ALIGN_ATTR;
55 struct crypto_aes_ctx aes_key_expanded AESNI_ALIGN_ATTR;
59 struct generic_gcmaes_ctx {
60 u8 hash_subkey[16] AESNI_ALIGN_ATTR;
61 struct crypto_aes_ctx aes_key_expanded AESNI_ALIGN_ATTR;
64 struct aesni_xts_ctx {
65 u8 raw_tweak_ctx[sizeof(struct crypto_aes_ctx)] AESNI_ALIGN_ATTR;
66 u8 raw_crypt_ctx[sizeof(struct crypto_aes_ctx)] AESNI_ALIGN_ATTR;
69 #define GCM_BLOCK_LEN 16
71 struct gcm_context_data {
72 /* init, update and finalize context data */
73 u8 aad_hash[GCM_BLOCK_LEN];
76 u8 partial_block_enc_key[GCM_BLOCK_LEN];
77 u8 orig_IV[GCM_BLOCK_LEN];
78 u8 current_counter[GCM_BLOCK_LEN];
79 u64 partial_block_len;
81 u8 hash_keys[GCM_BLOCK_LEN * 16];
84 asmlinkage int aesni_set_key(struct crypto_aes_ctx *ctx, const u8 *in_key,
85 unsigned int key_len);
86 asmlinkage void aesni_enc(const void *ctx, u8 *out, const u8 *in);
87 asmlinkage void aesni_dec(const void *ctx, u8 *out, const u8 *in);
88 asmlinkage void aesni_ecb_enc(struct crypto_aes_ctx *ctx, u8 *out,
89 const u8 *in, unsigned int len);
90 asmlinkage void aesni_ecb_dec(struct crypto_aes_ctx *ctx, u8 *out,
91 const u8 *in, unsigned int len);
92 asmlinkage void aesni_cbc_enc(struct crypto_aes_ctx *ctx, u8 *out,
93 const u8 *in, unsigned int len, u8 *iv);
94 asmlinkage void aesni_cbc_dec(struct crypto_aes_ctx *ctx, u8 *out,
95 const u8 *in, unsigned int len, u8 *iv);
97 #define AVX_GEN2_OPTSIZE 640
98 #define AVX_GEN4_OPTSIZE 4096
100 asmlinkage void aesni_xts_encrypt(const struct crypto_aes_ctx *ctx, u8 *out,
101 const u8 *in, unsigned int len, u8 *iv);
103 asmlinkage void aesni_xts_decrypt(const struct crypto_aes_ctx *ctx, u8 *out,
104 const u8 *in, unsigned int len, u8 *iv);
108 static void (*aesni_ctr_enc_tfm)(struct crypto_aes_ctx *ctx, u8 *out,
109 const u8 *in, unsigned int len, u8 *iv);
110 asmlinkage void aesni_ctr_enc(struct crypto_aes_ctx *ctx, u8 *out,
111 const u8 *in, unsigned int len, u8 *iv);
113 /* asmlinkage void aesni_gcm_enc()
114 * void *ctx, AES Key schedule. Starts on a 16 byte boundary.
115 * struct gcm_context_data. May be uninitialized.
116 * u8 *out, Ciphertext output. Encrypt in-place is allowed.
117 * const u8 *in, Plaintext input
118 * unsigned long plaintext_len, Length of data in bytes for encryption.
119 * u8 *iv, Pre-counter block j0: 12 byte IV concatenated with 0x00000001.
120 * 16-byte aligned pointer.
121 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
122 * const u8 *aad, Additional Authentication Data (AAD)
123 * unsigned long aad_len, Length of AAD in bytes.
124 * u8 *auth_tag, Authenticated Tag output.
125 * unsigned long auth_tag_len), Authenticated Tag Length in bytes.
126 * Valid values are 16 (most likely), 12 or 8.
128 asmlinkage void aesni_gcm_enc(void *ctx,
129 struct gcm_context_data *gdata, u8 *out,
130 const u8 *in, unsigned long plaintext_len, u8 *iv,
131 u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
132 u8 *auth_tag, unsigned long auth_tag_len);
134 /* asmlinkage void aesni_gcm_dec()
135 * void *ctx, AES Key schedule. Starts on a 16 byte boundary.
136 * struct gcm_context_data. May be uninitialized.
137 * u8 *out, Plaintext output. Decrypt in-place is allowed.
138 * const u8 *in, Ciphertext input
139 * unsigned long ciphertext_len, Length of data in bytes for decryption.
140 * u8 *iv, Pre-counter block j0: 12 byte IV concatenated with 0x00000001.
141 * 16-byte aligned pointer.
142 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
143 * const u8 *aad, Additional Authentication Data (AAD)
144 * unsigned long aad_len, Length of AAD in bytes. With RFC4106 this is going
145 * to be 8 or 12 bytes
146 * u8 *auth_tag, Authenticated Tag output.
147 * unsigned long auth_tag_len) Authenticated Tag Length in bytes.
148 * Valid values are 16 (most likely), 12 or 8.
150 asmlinkage void aesni_gcm_dec(void *ctx,
151 struct gcm_context_data *gdata, u8 *out,
152 const u8 *in, unsigned long ciphertext_len, u8 *iv,
153 u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
154 u8 *auth_tag, unsigned long auth_tag_len);
156 /* Scatter / Gather routines, with args similar to above */
157 asmlinkage void aesni_gcm_init(void *ctx,
158 struct gcm_context_data *gdata,
160 u8 *hash_subkey, const u8 *aad,
161 unsigned long aad_len);
162 asmlinkage void aesni_gcm_enc_update(void *ctx,
163 struct gcm_context_data *gdata, u8 *out,
164 const u8 *in, unsigned long plaintext_len);
165 asmlinkage void aesni_gcm_dec_update(void *ctx,
166 struct gcm_context_data *gdata, u8 *out,
168 unsigned long ciphertext_len);
169 asmlinkage void aesni_gcm_finalize(void *ctx,
170 struct gcm_context_data *gdata,
171 u8 *auth_tag, unsigned long auth_tag_len);
173 static const struct aesni_gcm_tfm_s {
174 void (*init)(void *ctx, struct gcm_context_data *gdata, u8 *iv,
175 u8 *hash_subkey, const u8 *aad, unsigned long aad_len);
176 void (*enc_update)(void *ctx, struct gcm_context_data *gdata, u8 *out,
177 const u8 *in, unsigned long plaintext_len);
178 void (*dec_update)(void *ctx, struct gcm_context_data *gdata, u8 *out,
179 const u8 *in, unsigned long ciphertext_len);
180 void (*finalize)(void *ctx, struct gcm_context_data *gdata,
181 u8 *auth_tag, unsigned long auth_tag_len);
184 static const struct aesni_gcm_tfm_s aesni_gcm_tfm_sse = {
185 .init = &aesni_gcm_init,
186 .enc_update = &aesni_gcm_enc_update,
187 .dec_update = &aesni_gcm_dec_update,
188 .finalize = &aesni_gcm_finalize,
192 asmlinkage void aes_ctr_enc_128_avx_by8(const u8 *in, u8 *iv,
193 void *keys, u8 *out, unsigned int num_bytes);
194 asmlinkage void aes_ctr_enc_192_avx_by8(const u8 *in, u8 *iv,
195 void *keys, u8 *out, unsigned int num_bytes);
196 asmlinkage void aes_ctr_enc_256_avx_by8(const u8 *in, u8 *iv,
197 void *keys, u8 *out, unsigned int num_bytes);
199 * asmlinkage void aesni_gcm_init_avx_gen2()
200 * gcm_data *my_ctx_data, context data
201 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
203 asmlinkage void aesni_gcm_init_avx_gen2(void *my_ctx_data,
204 struct gcm_context_data *gdata,
208 unsigned long aad_len);
210 asmlinkage void aesni_gcm_enc_update_avx_gen2(void *ctx,
211 struct gcm_context_data *gdata, u8 *out,
212 const u8 *in, unsigned long plaintext_len);
213 asmlinkage void aesni_gcm_dec_update_avx_gen2(void *ctx,
214 struct gcm_context_data *gdata, u8 *out,
216 unsigned long ciphertext_len);
217 asmlinkage void aesni_gcm_finalize_avx_gen2(void *ctx,
218 struct gcm_context_data *gdata,
219 u8 *auth_tag, unsigned long auth_tag_len);
221 asmlinkage void aesni_gcm_enc_avx_gen2(void *ctx,
222 struct gcm_context_data *gdata, u8 *out,
223 const u8 *in, unsigned long plaintext_len, u8 *iv,
224 const u8 *aad, unsigned long aad_len,
225 u8 *auth_tag, unsigned long auth_tag_len);
227 asmlinkage void aesni_gcm_dec_avx_gen2(void *ctx,
228 struct gcm_context_data *gdata, u8 *out,
229 const u8 *in, unsigned long ciphertext_len, u8 *iv,
230 const u8 *aad, unsigned long aad_len,
231 u8 *auth_tag, unsigned long auth_tag_len);
233 static const struct aesni_gcm_tfm_s aesni_gcm_tfm_avx_gen2 = {
234 .init = &aesni_gcm_init_avx_gen2,
235 .enc_update = &aesni_gcm_enc_update_avx_gen2,
236 .dec_update = &aesni_gcm_dec_update_avx_gen2,
237 .finalize = &aesni_gcm_finalize_avx_gen2,
242 #ifdef CONFIG_AS_AVX2
244 * asmlinkage void aesni_gcm_init_avx_gen4()
245 * gcm_data *my_ctx_data, context data
246 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
248 asmlinkage void aesni_gcm_init_avx_gen4(void *my_ctx_data,
249 struct gcm_context_data *gdata,
253 unsigned long aad_len);
255 asmlinkage void aesni_gcm_enc_update_avx_gen4(void *ctx,
256 struct gcm_context_data *gdata, u8 *out,
257 const u8 *in, unsigned long plaintext_len);
258 asmlinkage void aesni_gcm_dec_update_avx_gen4(void *ctx,
259 struct gcm_context_data *gdata, u8 *out,
261 unsigned long ciphertext_len);
262 asmlinkage void aesni_gcm_finalize_avx_gen4(void *ctx,
263 struct gcm_context_data *gdata,
264 u8 *auth_tag, unsigned long auth_tag_len);
266 asmlinkage void aesni_gcm_enc_avx_gen4(void *ctx,
267 struct gcm_context_data *gdata, u8 *out,
268 const u8 *in, unsigned long plaintext_len, u8 *iv,
269 const u8 *aad, unsigned long aad_len,
270 u8 *auth_tag, unsigned long auth_tag_len);
272 asmlinkage void aesni_gcm_dec_avx_gen4(void *ctx,
273 struct gcm_context_data *gdata, u8 *out,
274 const u8 *in, unsigned long ciphertext_len, u8 *iv,
275 const u8 *aad, unsigned long aad_len,
276 u8 *auth_tag, unsigned long auth_tag_len);
278 static const struct aesni_gcm_tfm_s aesni_gcm_tfm_avx_gen4 = {
279 .init = &aesni_gcm_init_avx_gen4,
280 .enc_update = &aesni_gcm_enc_update_avx_gen4,
281 .dec_update = &aesni_gcm_dec_update_avx_gen4,
282 .finalize = &aesni_gcm_finalize_avx_gen4,
288 aesni_rfc4106_gcm_ctx *aesni_rfc4106_gcm_ctx_get(struct crypto_aead *tfm)
290 unsigned long align = AESNI_ALIGN;
292 if (align <= crypto_tfm_ctx_alignment())
294 return PTR_ALIGN(crypto_aead_ctx(tfm), align);
298 generic_gcmaes_ctx *generic_gcmaes_ctx_get(struct crypto_aead *tfm)
300 unsigned long align = AESNI_ALIGN;
302 if (align <= crypto_tfm_ctx_alignment())
304 return PTR_ALIGN(crypto_aead_ctx(tfm), align);
308 static inline struct crypto_aes_ctx *aes_ctx(void *raw_ctx)
310 unsigned long addr = (unsigned long)raw_ctx;
311 unsigned long align = AESNI_ALIGN;
313 if (align <= crypto_tfm_ctx_alignment())
315 return (struct crypto_aes_ctx *)ALIGN(addr, align);
318 static int aes_set_key_common(struct crypto_tfm *tfm, void *raw_ctx,
319 const u8 *in_key, unsigned int key_len)
321 struct crypto_aes_ctx *ctx = aes_ctx(raw_ctx);
322 u32 *flags = &tfm->crt_flags;
325 if (key_len != AES_KEYSIZE_128 && key_len != AES_KEYSIZE_192 &&
326 key_len != AES_KEYSIZE_256) {
327 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
331 if (!crypto_simd_usable())
332 err = aes_expandkey(ctx, in_key, key_len);
335 err = aesni_set_key(ctx, in_key, key_len);
342 static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
343 unsigned int key_len)
345 return aes_set_key_common(tfm, crypto_tfm_ctx(tfm), in_key, key_len);
348 static void aesni_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
350 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
352 if (!crypto_simd_usable()) {
353 aes_encrypt(ctx, dst, src);
356 aesni_enc(ctx, dst, src);
361 static void aesni_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
363 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
365 if (!crypto_simd_usable()) {
366 aes_decrypt(ctx, dst, src);
369 aesni_dec(ctx, dst, src);
374 static int aesni_skcipher_setkey(struct crypto_skcipher *tfm, const u8 *key,
377 return aes_set_key_common(crypto_skcipher_tfm(tfm),
378 crypto_skcipher_ctx(tfm), key, len);
381 static int ecb_encrypt(struct skcipher_request *req)
383 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
384 struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
385 struct skcipher_walk walk;
389 err = skcipher_walk_virt(&walk, req, true);
392 while ((nbytes = walk.nbytes)) {
393 aesni_ecb_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
394 nbytes & AES_BLOCK_MASK);
395 nbytes &= AES_BLOCK_SIZE - 1;
396 err = skcipher_walk_done(&walk, nbytes);
403 static int ecb_decrypt(struct skcipher_request *req)
405 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
406 struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
407 struct skcipher_walk walk;
411 err = skcipher_walk_virt(&walk, req, true);
414 while ((nbytes = walk.nbytes)) {
415 aesni_ecb_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
416 nbytes & AES_BLOCK_MASK);
417 nbytes &= AES_BLOCK_SIZE - 1;
418 err = skcipher_walk_done(&walk, nbytes);
425 static int cbc_encrypt(struct skcipher_request *req)
427 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
428 struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
429 struct skcipher_walk walk;
433 err = skcipher_walk_virt(&walk, req, true);
436 while ((nbytes = walk.nbytes)) {
437 aesni_cbc_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
438 nbytes & AES_BLOCK_MASK, walk.iv);
439 nbytes &= AES_BLOCK_SIZE - 1;
440 err = skcipher_walk_done(&walk, nbytes);
447 static int cbc_decrypt(struct skcipher_request *req)
449 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
450 struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
451 struct skcipher_walk walk;
455 err = skcipher_walk_virt(&walk, req, true);
458 while ((nbytes = walk.nbytes)) {
459 aesni_cbc_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
460 nbytes & AES_BLOCK_MASK, walk.iv);
461 nbytes &= AES_BLOCK_SIZE - 1;
462 err = skcipher_walk_done(&walk, nbytes);
470 static void ctr_crypt_final(struct crypto_aes_ctx *ctx,
471 struct skcipher_walk *walk)
473 u8 *ctrblk = walk->iv;
474 u8 keystream[AES_BLOCK_SIZE];
475 u8 *src = walk->src.virt.addr;
476 u8 *dst = walk->dst.virt.addr;
477 unsigned int nbytes = walk->nbytes;
479 aesni_enc(ctx, keystream, ctrblk);
480 crypto_xor_cpy(dst, keystream, src, nbytes);
482 crypto_inc(ctrblk, AES_BLOCK_SIZE);
486 static void aesni_ctr_enc_avx_tfm(struct crypto_aes_ctx *ctx, u8 *out,
487 const u8 *in, unsigned int len, u8 *iv)
490 * based on key length, override with the by8 version
491 * of ctr mode encryption/decryption for improved performance
492 * aes_set_key_common() ensures that key length is one of
495 if (ctx->key_length == AES_KEYSIZE_128)
496 aes_ctr_enc_128_avx_by8(in, iv, (void *)ctx, out, len);
497 else if (ctx->key_length == AES_KEYSIZE_192)
498 aes_ctr_enc_192_avx_by8(in, iv, (void *)ctx, out, len);
500 aes_ctr_enc_256_avx_by8(in, iv, (void *)ctx, out, len);
504 static int ctr_crypt(struct skcipher_request *req)
506 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
507 struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
508 struct skcipher_walk walk;
512 err = skcipher_walk_virt(&walk, req, true);
515 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
516 aesni_ctr_enc_tfm(ctx, walk.dst.virt.addr, walk.src.virt.addr,
517 nbytes & AES_BLOCK_MASK, walk.iv);
518 nbytes &= AES_BLOCK_SIZE - 1;
519 err = skcipher_walk_done(&walk, nbytes);
522 ctr_crypt_final(ctx, &walk);
523 err = skcipher_walk_done(&walk, 0);
530 static int xts_aesni_setkey(struct crypto_skcipher *tfm, const u8 *key,
533 struct aesni_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
536 err = xts_verify_key(tfm, key, keylen);
542 /* first half of xts-key is for crypt */
543 err = aes_set_key_common(crypto_skcipher_tfm(tfm), ctx->raw_crypt_ctx,
548 /* second half of xts-key is for tweak */
549 return aes_set_key_common(crypto_skcipher_tfm(tfm), ctx->raw_tweak_ctx,
550 key + keylen, keylen);
554 static void aesni_xts_enc(const void *ctx, u8 *dst, const u8 *src, le128 *iv)
556 glue_xts_crypt_128bit_one(ctx, dst, src, iv, aesni_enc);
559 static void aesni_xts_dec(const void *ctx, u8 *dst, const u8 *src, le128 *iv)
561 glue_xts_crypt_128bit_one(ctx, dst, src, iv, aesni_dec);
564 static void aesni_xts_enc32(const void *ctx, u8 *dst, const u8 *src, le128 *iv)
566 aesni_xts_encrypt(ctx, dst, src, 32 * AES_BLOCK_SIZE, (u8 *)iv);
569 static void aesni_xts_dec32(const void *ctx, u8 *dst, const u8 *src, le128 *iv)
571 aesni_xts_decrypt(ctx, dst, src, 32 * AES_BLOCK_SIZE, (u8 *)iv);
574 static const struct common_glue_ctx aesni_enc_xts = {
576 .fpu_blocks_limit = 1,
580 .fn_u = { .xts = aesni_xts_enc32 }
583 .fn_u = { .xts = aesni_xts_enc }
587 static const struct common_glue_ctx aesni_dec_xts = {
589 .fpu_blocks_limit = 1,
593 .fn_u = { .xts = aesni_xts_dec32 }
596 .fn_u = { .xts = aesni_xts_dec }
600 static int xts_encrypt(struct skcipher_request *req)
602 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
603 struct aesni_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
605 return glue_xts_req_128bit(&aesni_enc_xts, req, aesni_enc,
606 aes_ctx(ctx->raw_tweak_ctx),
607 aes_ctx(ctx->raw_crypt_ctx),
611 static int xts_decrypt(struct skcipher_request *req)
613 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
614 struct aesni_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
616 return glue_xts_req_128bit(&aesni_dec_xts, req, aesni_enc,
617 aes_ctx(ctx->raw_tweak_ctx),
618 aes_ctx(ctx->raw_crypt_ctx),
623 rfc4106_set_hash_subkey(u8 *hash_subkey, const u8 *key, unsigned int key_len)
625 struct crypto_aes_ctx ctx;
628 ret = aes_expandkey(&ctx, key, key_len);
632 /* Clear the data in the hash sub key container to zero.*/
633 /* We want to cipher all zeros to create the hash sub key. */
634 memset(hash_subkey, 0, RFC4106_HASH_SUBKEY_SIZE);
636 aes_encrypt(&ctx, hash_subkey, hash_subkey);
638 memzero_explicit(&ctx, sizeof(ctx));
642 static int common_rfc4106_set_key(struct crypto_aead *aead, const u8 *key,
643 unsigned int key_len)
645 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(aead);
648 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
651 /*Account for 4 byte nonce at the end.*/
654 memcpy(ctx->nonce, key + key_len, sizeof(ctx->nonce));
656 return aes_set_key_common(crypto_aead_tfm(aead),
657 &ctx->aes_key_expanded, key, key_len) ?:
658 rfc4106_set_hash_subkey(ctx->hash_subkey, key, key_len);
661 /* This is the Integrity Check Value (aka the authentication tag) length and can
662 * be 8, 12 or 16 bytes long. */
663 static int common_rfc4106_set_authsize(struct crypto_aead *aead,
664 unsigned int authsize)
678 static int generic_gcmaes_set_authsize(struct crypto_aead *tfm,
679 unsigned int authsize)
697 static int gcmaes_crypt_by_sg(bool enc, struct aead_request *req,
698 unsigned int assoclen, u8 *hash_subkey,
699 u8 *iv, void *aes_ctx)
701 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
702 unsigned long auth_tag_len = crypto_aead_authsize(tfm);
703 const struct aesni_gcm_tfm_s *gcm_tfm = aesni_gcm_tfm;
704 u8 databuf[sizeof(struct gcm_context_data) + (AESNI_ALIGN - 8)] __aligned(8);
705 struct gcm_context_data *data = PTR_ALIGN((void *)databuf, AESNI_ALIGN);
706 struct scatter_walk dst_sg_walk = {};
707 unsigned long left = req->cryptlen;
708 unsigned long len, srclen, dstlen;
709 struct scatter_walk assoc_sg_walk;
710 struct scatter_walk src_sg_walk;
711 struct scatterlist src_start[2];
712 struct scatterlist dst_start[2];
713 struct scatterlist *src_sg;
714 struct scatterlist *dst_sg;
715 u8 *src, *dst, *assoc;
720 left -= auth_tag_len;
722 #ifdef CONFIG_AS_AVX2
723 if (left < AVX_GEN4_OPTSIZE && gcm_tfm == &aesni_gcm_tfm_avx_gen4)
724 gcm_tfm = &aesni_gcm_tfm_avx_gen2;
727 if (left < AVX_GEN2_OPTSIZE && gcm_tfm == &aesni_gcm_tfm_avx_gen2)
728 gcm_tfm = &aesni_gcm_tfm_sse;
731 /* Linearize assoc, if not already linear */
732 if (req->src->length >= assoclen && req->src->length &&
733 (!PageHighMem(sg_page(req->src)) ||
734 req->src->offset + req->src->length <= PAGE_SIZE)) {
735 scatterwalk_start(&assoc_sg_walk, req->src);
736 assoc = scatterwalk_map(&assoc_sg_walk);
738 /* assoc can be any length, so must be on heap */
739 assocmem = kmalloc(assoclen, GFP_ATOMIC);
740 if (unlikely(!assocmem))
744 scatterwalk_map_and_copy(assoc, req->src, 0, assoclen, 0);
748 src_sg = scatterwalk_ffwd(src_start, req->src, req->assoclen);
749 scatterwalk_start(&src_sg_walk, src_sg);
750 if (req->src != req->dst) {
751 dst_sg = scatterwalk_ffwd(dst_start, req->dst,
753 scatterwalk_start(&dst_sg_walk, dst_sg);
758 gcm_tfm->init(aes_ctx, data, iv, hash_subkey, assoc, assoclen);
759 if (req->src != req->dst) {
761 src = scatterwalk_map(&src_sg_walk);
762 dst = scatterwalk_map(&dst_sg_walk);
763 srclen = scatterwalk_clamp(&src_sg_walk, left);
764 dstlen = scatterwalk_clamp(&dst_sg_walk, left);
765 len = min(srclen, dstlen);
768 gcm_tfm->enc_update(aes_ctx, data,
771 gcm_tfm->dec_update(aes_ctx, data,
776 scatterwalk_unmap(src);
777 scatterwalk_unmap(dst);
778 scatterwalk_advance(&src_sg_walk, len);
779 scatterwalk_advance(&dst_sg_walk, len);
780 scatterwalk_done(&src_sg_walk, 0, left);
781 scatterwalk_done(&dst_sg_walk, 1, left);
785 dst = src = scatterwalk_map(&src_sg_walk);
786 len = scatterwalk_clamp(&src_sg_walk, left);
789 gcm_tfm->enc_update(aes_ctx, data,
792 gcm_tfm->dec_update(aes_ctx, data,
796 scatterwalk_unmap(src);
797 scatterwalk_advance(&src_sg_walk, len);
798 scatterwalk_done(&src_sg_walk, 1, left);
801 gcm_tfm->finalize(aes_ctx, data, authTag, auth_tag_len);
805 scatterwalk_unmap(assoc);
812 /* Copy out original authTag */
813 scatterwalk_map_and_copy(authTagMsg, req->src,
814 req->assoclen + req->cryptlen -
818 /* Compare generated tag with passed in tag. */
819 return crypto_memneq(authTagMsg, authTag, auth_tag_len) ?
823 /* Copy in the authTag */
824 scatterwalk_map_and_copy(authTag, req->dst,
825 req->assoclen + req->cryptlen,
831 static int gcmaes_encrypt(struct aead_request *req, unsigned int assoclen,
832 u8 *hash_subkey, u8 *iv, void *aes_ctx)
834 return gcmaes_crypt_by_sg(true, req, assoclen, hash_subkey, iv,
838 static int gcmaes_decrypt(struct aead_request *req, unsigned int assoclen,
839 u8 *hash_subkey, u8 *iv, void *aes_ctx)
841 return gcmaes_crypt_by_sg(false, req, assoclen, hash_subkey, iv,
845 static int helper_rfc4106_encrypt(struct aead_request *req)
847 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
848 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm);
849 void *aes_ctx = &(ctx->aes_key_expanded);
850 u8 ivbuf[16 + (AESNI_ALIGN - 8)] __aligned(8);
851 u8 *iv = PTR_ALIGN(&ivbuf[0], AESNI_ALIGN);
853 __be32 counter = cpu_to_be32(1);
855 /* Assuming we are supporting rfc4106 64-bit extended */
856 /* sequence numbers We need to have the AAD length equal */
857 /* to 16 or 20 bytes */
858 if (unlikely(req->assoclen != 16 && req->assoclen != 20))
862 for (i = 0; i < 4; i++)
863 *(iv+i) = ctx->nonce[i];
864 for (i = 0; i < 8; i++)
865 *(iv+4+i) = req->iv[i];
866 *((__be32 *)(iv+12)) = counter;
868 return gcmaes_encrypt(req, req->assoclen - 8, ctx->hash_subkey, iv,
872 static int helper_rfc4106_decrypt(struct aead_request *req)
874 __be32 counter = cpu_to_be32(1);
875 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
876 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm);
877 void *aes_ctx = &(ctx->aes_key_expanded);
878 u8 ivbuf[16 + (AESNI_ALIGN - 8)] __aligned(8);
879 u8 *iv = PTR_ALIGN(&ivbuf[0], AESNI_ALIGN);
882 if (unlikely(req->assoclen != 16 && req->assoclen != 20))
885 /* Assuming we are supporting rfc4106 64-bit extended */
886 /* sequence numbers We need to have the AAD length */
887 /* equal to 16 or 20 bytes */
890 for (i = 0; i < 4; i++)
891 *(iv+i) = ctx->nonce[i];
892 for (i = 0; i < 8; i++)
893 *(iv+4+i) = req->iv[i];
894 *((__be32 *)(iv+12)) = counter;
896 return gcmaes_decrypt(req, req->assoclen - 8, ctx->hash_subkey, iv,
901 static struct crypto_alg aesni_cipher_alg = {
903 .cra_driver_name = "aes-aesni",
905 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
906 .cra_blocksize = AES_BLOCK_SIZE,
907 .cra_ctxsize = CRYPTO_AES_CTX_SIZE,
908 .cra_module = THIS_MODULE,
911 .cia_min_keysize = AES_MIN_KEY_SIZE,
912 .cia_max_keysize = AES_MAX_KEY_SIZE,
913 .cia_setkey = aes_set_key,
914 .cia_encrypt = aesni_encrypt,
915 .cia_decrypt = aesni_decrypt
920 static struct skcipher_alg aesni_skciphers[] = {
923 .cra_name = "__ecb(aes)",
924 .cra_driver_name = "__ecb-aes-aesni",
926 .cra_flags = CRYPTO_ALG_INTERNAL,
927 .cra_blocksize = AES_BLOCK_SIZE,
928 .cra_ctxsize = CRYPTO_AES_CTX_SIZE,
929 .cra_module = THIS_MODULE,
931 .min_keysize = AES_MIN_KEY_SIZE,
932 .max_keysize = AES_MAX_KEY_SIZE,
933 .setkey = aesni_skcipher_setkey,
934 .encrypt = ecb_encrypt,
935 .decrypt = ecb_decrypt,
938 .cra_name = "__cbc(aes)",
939 .cra_driver_name = "__cbc-aes-aesni",
941 .cra_flags = CRYPTO_ALG_INTERNAL,
942 .cra_blocksize = AES_BLOCK_SIZE,
943 .cra_ctxsize = CRYPTO_AES_CTX_SIZE,
944 .cra_module = THIS_MODULE,
946 .min_keysize = AES_MIN_KEY_SIZE,
947 .max_keysize = AES_MAX_KEY_SIZE,
948 .ivsize = AES_BLOCK_SIZE,
949 .setkey = aesni_skcipher_setkey,
950 .encrypt = cbc_encrypt,
951 .decrypt = cbc_decrypt,
955 .cra_name = "__ctr(aes)",
956 .cra_driver_name = "__ctr-aes-aesni",
958 .cra_flags = CRYPTO_ALG_INTERNAL,
960 .cra_ctxsize = CRYPTO_AES_CTX_SIZE,
961 .cra_module = THIS_MODULE,
963 .min_keysize = AES_MIN_KEY_SIZE,
964 .max_keysize = AES_MAX_KEY_SIZE,
965 .ivsize = AES_BLOCK_SIZE,
966 .chunksize = AES_BLOCK_SIZE,
967 .setkey = aesni_skcipher_setkey,
968 .encrypt = ctr_crypt,
969 .decrypt = ctr_crypt,
972 .cra_name = "__xts(aes)",
973 .cra_driver_name = "__xts-aes-aesni",
975 .cra_flags = CRYPTO_ALG_INTERNAL,
976 .cra_blocksize = AES_BLOCK_SIZE,
977 .cra_ctxsize = XTS_AES_CTX_SIZE,
978 .cra_module = THIS_MODULE,
980 .min_keysize = 2 * AES_MIN_KEY_SIZE,
981 .max_keysize = 2 * AES_MAX_KEY_SIZE,
982 .ivsize = AES_BLOCK_SIZE,
983 .setkey = xts_aesni_setkey,
984 .encrypt = xts_encrypt,
985 .decrypt = xts_decrypt,
991 struct simd_skcipher_alg *aesni_simd_skciphers[ARRAY_SIZE(aesni_skciphers)];
994 static int generic_gcmaes_set_key(struct crypto_aead *aead, const u8 *key,
995 unsigned int key_len)
997 struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(aead);
999 return aes_set_key_common(crypto_aead_tfm(aead),
1000 &ctx->aes_key_expanded, key, key_len) ?:
1001 rfc4106_set_hash_subkey(ctx->hash_subkey, key, key_len);
1004 static int generic_gcmaes_encrypt(struct aead_request *req)
1006 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1007 struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(tfm);
1008 void *aes_ctx = &(ctx->aes_key_expanded);
1009 u8 ivbuf[16 + (AESNI_ALIGN - 8)] __aligned(8);
1010 u8 *iv = PTR_ALIGN(&ivbuf[0], AESNI_ALIGN);
1011 __be32 counter = cpu_to_be32(1);
1013 memcpy(iv, req->iv, 12);
1014 *((__be32 *)(iv+12)) = counter;
1016 return gcmaes_encrypt(req, req->assoclen, ctx->hash_subkey, iv,
1020 static int generic_gcmaes_decrypt(struct aead_request *req)
1022 __be32 counter = cpu_to_be32(1);
1023 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1024 struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(tfm);
1025 void *aes_ctx = &(ctx->aes_key_expanded);
1026 u8 ivbuf[16 + (AESNI_ALIGN - 8)] __aligned(8);
1027 u8 *iv = PTR_ALIGN(&ivbuf[0], AESNI_ALIGN);
1029 memcpy(iv, req->iv, 12);
1030 *((__be32 *)(iv+12)) = counter;
1032 return gcmaes_decrypt(req, req->assoclen, ctx->hash_subkey, iv,
1036 static struct aead_alg aesni_aeads[] = { {
1037 .setkey = common_rfc4106_set_key,
1038 .setauthsize = common_rfc4106_set_authsize,
1039 .encrypt = helper_rfc4106_encrypt,
1040 .decrypt = helper_rfc4106_decrypt,
1041 .ivsize = GCM_RFC4106_IV_SIZE,
1044 .cra_name = "__rfc4106(gcm(aes))",
1045 .cra_driver_name = "__rfc4106-gcm-aesni",
1046 .cra_priority = 400,
1047 .cra_flags = CRYPTO_ALG_INTERNAL,
1049 .cra_ctxsize = sizeof(struct aesni_rfc4106_gcm_ctx),
1050 .cra_alignmask = AESNI_ALIGN - 1,
1051 .cra_module = THIS_MODULE,
1054 .setkey = generic_gcmaes_set_key,
1055 .setauthsize = generic_gcmaes_set_authsize,
1056 .encrypt = generic_gcmaes_encrypt,
1057 .decrypt = generic_gcmaes_decrypt,
1058 .ivsize = GCM_AES_IV_SIZE,
1061 .cra_name = "__gcm(aes)",
1062 .cra_driver_name = "__generic-gcm-aesni",
1063 .cra_priority = 400,
1064 .cra_flags = CRYPTO_ALG_INTERNAL,
1066 .cra_ctxsize = sizeof(struct generic_gcmaes_ctx),
1067 .cra_alignmask = AESNI_ALIGN - 1,
1068 .cra_module = THIS_MODULE,
1072 static struct aead_alg aesni_aeads[0];
1075 static struct simd_aead_alg *aesni_simd_aeads[ARRAY_SIZE(aesni_aeads)];
1077 static const struct x86_cpu_id aesni_cpu_id[] = {
1078 X86_FEATURE_MATCH(X86_FEATURE_AES),
1081 MODULE_DEVICE_TABLE(x86cpu, aesni_cpu_id);
1083 static int __init aesni_init(void)
1087 if (!x86_match_cpu(aesni_cpu_id))
1089 #ifdef CONFIG_X86_64
1090 #ifdef CONFIG_AS_AVX2
1091 if (boot_cpu_has(X86_FEATURE_AVX2)) {
1092 pr_info("AVX2 version of gcm_enc/dec engaged.\n");
1093 aesni_gcm_tfm = &aesni_gcm_tfm_avx_gen4;
1096 #ifdef CONFIG_AS_AVX
1097 if (boot_cpu_has(X86_FEATURE_AVX)) {
1098 pr_info("AVX version of gcm_enc/dec engaged.\n");
1099 aesni_gcm_tfm = &aesni_gcm_tfm_avx_gen2;
1103 pr_info("SSE version of gcm_enc/dec engaged.\n");
1104 aesni_gcm_tfm = &aesni_gcm_tfm_sse;
1106 aesni_ctr_enc_tfm = aesni_ctr_enc;
1107 #ifdef CONFIG_AS_AVX
1108 if (boot_cpu_has(X86_FEATURE_AVX)) {
1109 /* optimize performance of ctr mode encryption transform */
1110 aesni_ctr_enc_tfm = aesni_ctr_enc_avx_tfm;
1111 pr_info("AES CTR mode by8 optimization enabled\n");
1116 err = crypto_register_alg(&aesni_cipher_alg);
1120 err = simd_register_skciphers_compat(aesni_skciphers,
1121 ARRAY_SIZE(aesni_skciphers),
1122 aesni_simd_skciphers);
1124 goto unregister_cipher;
1126 err = simd_register_aeads_compat(aesni_aeads, ARRAY_SIZE(aesni_aeads),
1129 goto unregister_skciphers;
1133 unregister_skciphers:
1134 simd_unregister_skciphers(aesni_skciphers, ARRAY_SIZE(aesni_skciphers),
1135 aesni_simd_skciphers);
1137 crypto_unregister_alg(&aesni_cipher_alg);
1141 static void __exit aesni_exit(void)
1143 simd_unregister_aeads(aesni_aeads, ARRAY_SIZE(aesni_aeads),
1145 simd_unregister_skciphers(aesni_skciphers, ARRAY_SIZE(aesni_skciphers),
1146 aesni_simd_skciphers);
1147 crypto_unregister_alg(&aesni_cipher_alg);
1150 late_initcall(aesni_init);
1151 module_exit(aesni_exit);
1153 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, Intel AES-NI instructions optimized");
1154 MODULE_LICENSE("GPL");
1155 MODULE_ALIAS_CRYPTO("aes");