2 * Support for Intel AES-NI instructions. This file contains glue
3 * code, the real AES implementation is in intel-aes_asm.S.
5 * Copyright (C) 2008, Intel Corp.
6 * Author: Huang Ying <ying.huang@intel.com>
8 * Added RFC4106 AES-GCM support for 128-bit keys under the AEAD
9 * interface for 64-bit kernels.
10 * Authors: Adrian Hoban <adrian.hoban@intel.com>
11 * Gabriele Paoloni <gabriele.paoloni@intel.com>
12 * Tadeusz Struk (tadeusz.struk@intel.com)
13 * Aidan O'Mahony (aidan.o.mahony@intel.com)
14 * Copyright (c) 2010, Intel Corporation.
16 * This program is free software; you can redistribute it and/or modify
17 * it under the terms of the GNU General Public License as published by
18 * the Free Software Foundation; either version 2 of the License, or
19 * (at your option) any later version.
22 #include <linux/hardirq.h>
23 #include <linux/types.h>
24 #include <linux/module.h>
25 #include <linux/err.h>
26 #include <crypto/algapi.h>
27 #include <crypto/aes.h>
28 #include <crypto/cryptd.h>
29 #include <crypto/ctr.h>
30 #include <crypto/b128ops.h>
31 #include <crypto/gcm.h>
32 #include <crypto/xts.h>
33 #include <asm/cpu_device_id.h>
34 #include <asm/fpu/api.h>
35 #include <asm/crypto/aes.h>
36 #include <crypto/scatterwalk.h>
37 #include <crypto/internal/aead.h>
38 #include <crypto/internal/simd.h>
39 #include <crypto/internal/skcipher.h>
40 #include <linux/workqueue.h>
41 #include <linux/spinlock.h>
43 #include <asm/crypto/glue_helper.h>
47 #define AESNI_ALIGN 16
48 #define AESNI_ALIGN_ATTR __attribute__ ((__aligned__(AESNI_ALIGN)))
49 #define AES_BLOCK_MASK (~(AES_BLOCK_SIZE - 1))
50 #define RFC4106_HASH_SUBKEY_SIZE 16
51 #define AESNI_ALIGN_EXTRA ((AESNI_ALIGN - 1) & ~(CRYPTO_MINALIGN - 1))
52 #define CRYPTO_AES_CTX_SIZE (sizeof(struct crypto_aes_ctx) + AESNI_ALIGN_EXTRA)
53 #define XTS_AES_CTX_SIZE (sizeof(struct aesni_xts_ctx) + AESNI_ALIGN_EXTRA)
55 /* This data is stored at the end of the crypto_tfm struct.
56 * It's a type of per "session" data storage location.
57 * This needs to be 16 byte aligned.
59 struct aesni_rfc4106_gcm_ctx {
60 u8 hash_subkey[16] AESNI_ALIGN_ATTR;
61 struct crypto_aes_ctx aes_key_expanded AESNI_ALIGN_ATTR;
65 struct generic_gcmaes_ctx {
66 u8 hash_subkey[16] AESNI_ALIGN_ATTR;
67 struct crypto_aes_ctx aes_key_expanded AESNI_ALIGN_ATTR;
70 struct aesni_xts_ctx {
71 u8 raw_tweak_ctx[sizeof(struct crypto_aes_ctx)] AESNI_ALIGN_ATTR;
72 u8 raw_crypt_ctx[sizeof(struct crypto_aes_ctx)] AESNI_ALIGN_ATTR;
75 #define GCM_BLOCK_LEN 16
77 struct gcm_context_data {
78 /* init, update and finalize context data */
79 u8 aad_hash[GCM_BLOCK_LEN];
82 u8 partial_block_enc_key[GCM_BLOCK_LEN];
83 u8 orig_IV[GCM_BLOCK_LEN];
84 u8 current_counter[GCM_BLOCK_LEN];
85 u64 partial_block_len;
87 u8 hash_keys[GCM_BLOCK_LEN * 8];
90 asmlinkage int aesni_set_key(struct crypto_aes_ctx *ctx, const u8 *in_key,
91 unsigned int key_len);
92 asmlinkage void aesni_enc(struct crypto_aes_ctx *ctx, u8 *out,
94 asmlinkage void aesni_dec(struct crypto_aes_ctx *ctx, u8 *out,
96 asmlinkage void aesni_ecb_enc(struct crypto_aes_ctx *ctx, u8 *out,
97 const u8 *in, unsigned int len);
98 asmlinkage void aesni_ecb_dec(struct crypto_aes_ctx *ctx, u8 *out,
99 const u8 *in, unsigned int len);
100 asmlinkage void aesni_cbc_enc(struct crypto_aes_ctx *ctx, u8 *out,
101 const u8 *in, unsigned int len, u8 *iv);
102 asmlinkage void aesni_cbc_dec(struct crypto_aes_ctx *ctx, u8 *out,
103 const u8 *in, unsigned int len, u8 *iv);
105 int crypto_fpu_init(void);
106 void crypto_fpu_exit(void);
108 #define AVX_GEN2_OPTSIZE 640
109 #define AVX_GEN4_OPTSIZE 4096
113 static void (*aesni_ctr_enc_tfm)(struct crypto_aes_ctx *ctx, u8 *out,
114 const u8 *in, unsigned int len, u8 *iv);
115 asmlinkage void aesni_ctr_enc(struct crypto_aes_ctx *ctx, u8 *out,
116 const u8 *in, unsigned int len, u8 *iv);
118 asmlinkage void aesni_xts_crypt8(struct crypto_aes_ctx *ctx, u8 *out,
119 const u8 *in, bool enc, u8 *iv);
121 /* asmlinkage void aesni_gcm_enc()
122 * void *ctx, AES Key schedule. Starts on a 16 byte boundary.
123 * struct gcm_context_data. May be uninitialized.
124 * u8 *out, Ciphertext output. Encrypt in-place is allowed.
125 * const u8 *in, Plaintext input
126 * unsigned long plaintext_len, Length of data in bytes for encryption.
127 * u8 *iv, Pre-counter block j0: 12 byte IV concatenated with 0x00000001.
128 * 16-byte aligned pointer.
129 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
130 * const u8 *aad, Additional Authentication Data (AAD)
131 * unsigned long aad_len, Length of AAD in bytes.
132 * u8 *auth_tag, Authenticated Tag output.
133 * unsigned long auth_tag_len), Authenticated Tag Length in bytes.
134 * Valid values are 16 (most likely), 12 or 8.
136 asmlinkage void aesni_gcm_enc(void *ctx,
137 struct gcm_context_data *gdata, u8 *out,
138 const u8 *in, unsigned long plaintext_len, u8 *iv,
139 u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
140 u8 *auth_tag, unsigned long auth_tag_len);
142 /* asmlinkage void aesni_gcm_dec()
143 * void *ctx, AES Key schedule. Starts on a 16 byte boundary.
144 * struct gcm_context_data. May be uninitialized.
145 * u8 *out, Plaintext output. Decrypt in-place is allowed.
146 * const u8 *in, Ciphertext input
147 * unsigned long ciphertext_len, Length of data in bytes for decryption.
148 * u8 *iv, Pre-counter block j0: 12 byte IV concatenated with 0x00000001.
149 * 16-byte aligned pointer.
150 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
151 * const u8 *aad, Additional Authentication Data (AAD)
152 * unsigned long aad_len, Length of AAD in bytes. With RFC4106 this is going
153 * to be 8 or 12 bytes
154 * u8 *auth_tag, Authenticated Tag output.
155 * unsigned long auth_tag_len) Authenticated Tag Length in bytes.
156 * Valid values are 16 (most likely), 12 or 8.
158 asmlinkage void aesni_gcm_dec(void *ctx,
159 struct gcm_context_data *gdata, u8 *out,
160 const u8 *in, unsigned long ciphertext_len, u8 *iv,
161 u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
162 u8 *auth_tag, unsigned long auth_tag_len);
164 /* Scatter / Gather routines, with args similar to above */
165 asmlinkage void aesni_gcm_init(void *ctx,
166 struct gcm_context_data *gdata,
168 u8 *hash_subkey, const u8 *aad,
169 unsigned long aad_len);
170 asmlinkage void aesni_gcm_enc_update(void *ctx,
171 struct gcm_context_data *gdata, u8 *out,
172 const u8 *in, unsigned long plaintext_len);
173 asmlinkage void aesni_gcm_dec_update(void *ctx,
174 struct gcm_context_data *gdata, u8 *out,
176 unsigned long ciphertext_len);
177 asmlinkage void aesni_gcm_finalize(void *ctx,
178 struct gcm_context_data *gdata,
179 u8 *auth_tag, unsigned long auth_tag_len);
182 asmlinkage void aes_ctr_enc_128_avx_by8(const u8 *in, u8 *iv,
183 void *keys, u8 *out, unsigned int num_bytes);
184 asmlinkage void aes_ctr_enc_192_avx_by8(const u8 *in, u8 *iv,
185 void *keys, u8 *out, unsigned int num_bytes);
186 asmlinkage void aes_ctr_enc_256_avx_by8(const u8 *in, u8 *iv,
187 void *keys, u8 *out, unsigned int num_bytes);
189 * asmlinkage void aesni_gcm_precomp_avx_gen2()
190 * gcm_data *my_ctx_data, context data
191 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
193 asmlinkage void aesni_gcm_precomp_avx_gen2(void *my_ctx_data, u8 *hash_subkey);
195 asmlinkage void aesni_gcm_enc_avx_gen2(void *ctx, u8 *out,
196 const u8 *in, unsigned long plaintext_len, u8 *iv,
197 const u8 *aad, unsigned long aad_len,
198 u8 *auth_tag, unsigned long auth_tag_len);
200 asmlinkage void aesni_gcm_dec_avx_gen2(void *ctx, u8 *out,
201 const u8 *in, unsigned long ciphertext_len, u8 *iv,
202 const u8 *aad, unsigned long aad_len,
203 u8 *auth_tag, unsigned long auth_tag_len);
205 static void aesni_gcm_enc_avx(void *ctx,
206 struct gcm_context_data *data, u8 *out,
207 const u8 *in, unsigned long plaintext_len, u8 *iv,
208 u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
209 u8 *auth_tag, unsigned long auth_tag_len)
211 struct crypto_aes_ctx *aes_ctx = (struct crypto_aes_ctx*)ctx;
212 if ((plaintext_len < AVX_GEN2_OPTSIZE) || (aes_ctx-> key_length != AES_KEYSIZE_128)){
213 aesni_gcm_enc(ctx, data, out, in,
214 plaintext_len, iv, hash_subkey, aad,
215 aad_len, auth_tag, auth_tag_len);
217 aesni_gcm_precomp_avx_gen2(ctx, hash_subkey);
218 aesni_gcm_enc_avx_gen2(ctx, out, in, plaintext_len, iv, aad,
219 aad_len, auth_tag, auth_tag_len);
223 static void aesni_gcm_dec_avx(void *ctx,
224 struct gcm_context_data *data, u8 *out,
225 const u8 *in, unsigned long ciphertext_len, u8 *iv,
226 u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
227 u8 *auth_tag, unsigned long auth_tag_len)
229 struct crypto_aes_ctx *aes_ctx = (struct crypto_aes_ctx*)ctx;
230 if ((ciphertext_len < AVX_GEN2_OPTSIZE) || (aes_ctx-> key_length != AES_KEYSIZE_128)) {
231 aesni_gcm_dec(ctx, data, out, in,
232 ciphertext_len, iv, hash_subkey, aad,
233 aad_len, auth_tag, auth_tag_len);
235 aesni_gcm_precomp_avx_gen2(ctx, hash_subkey);
236 aesni_gcm_dec_avx_gen2(ctx, out, in, ciphertext_len, iv, aad,
237 aad_len, auth_tag, auth_tag_len);
242 #ifdef CONFIG_AS_AVX2
244 * asmlinkage void aesni_gcm_precomp_avx_gen4()
245 * gcm_data *my_ctx_data, context data
246 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
248 asmlinkage void aesni_gcm_precomp_avx_gen4(void *my_ctx_data, u8 *hash_subkey);
250 asmlinkage void aesni_gcm_enc_avx_gen4(void *ctx, u8 *out,
251 const u8 *in, unsigned long plaintext_len, u8 *iv,
252 const u8 *aad, unsigned long aad_len,
253 u8 *auth_tag, unsigned long auth_tag_len);
255 asmlinkage void aesni_gcm_dec_avx_gen4(void *ctx, u8 *out,
256 const u8 *in, unsigned long ciphertext_len, u8 *iv,
257 const u8 *aad, unsigned long aad_len,
258 u8 *auth_tag, unsigned long auth_tag_len);
260 static void aesni_gcm_enc_avx2(void *ctx,
261 struct gcm_context_data *data, u8 *out,
262 const u8 *in, unsigned long plaintext_len, u8 *iv,
263 u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
264 u8 *auth_tag, unsigned long auth_tag_len)
266 struct crypto_aes_ctx *aes_ctx = (struct crypto_aes_ctx*)ctx;
267 if ((plaintext_len < AVX_GEN2_OPTSIZE) || (aes_ctx-> key_length != AES_KEYSIZE_128)) {
268 aesni_gcm_enc(ctx, data, out, in,
269 plaintext_len, iv, hash_subkey, aad,
270 aad_len, auth_tag, auth_tag_len);
271 } else if (plaintext_len < AVX_GEN4_OPTSIZE) {
272 aesni_gcm_precomp_avx_gen2(ctx, hash_subkey);
273 aesni_gcm_enc_avx_gen2(ctx, out, in, plaintext_len, iv, aad,
274 aad_len, auth_tag, auth_tag_len);
276 aesni_gcm_precomp_avx_gen4(ctx, hash_subkey);
277 aesni_gcm_enc_avx_gen4(ctx, out, in, plaintext_len, iv, aad,
278 aad_len, auth_tag, auth_tag_len);
282 static void aesni_gcm_dec_avx2(void *ctx,
283 struct gcm_context_data *data, u8 *out,
284 const u8 *in, unsigned long ciphertext_len, u8 *iv,
285 u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
286 u8 *auth_tag, unsigned long auth_tag_len)
288 struct crypto_aes_ctx *aes_ctx = (struct crypto_aes_ctx*)ctx;
289 if ((ciphertext_len < AVX_GEN2_OPTSIZE) || (aes_ctx-> key_length != AES_KEYSIZE_128)) {
290 aesni_gcm_dec(ctx, data, out, in,
291 ciphertext_len, iv, hash_subkey,
292 aad, aad_len, auth_tag, auth_tag_len);
293 } else if (ciphertext_len < AVX_GEN4_OPTSIZE) {
294 aesni_gcm_precomp_avx_gen2(ctx, hash_subkey);
295 aesni_gcm_dec_avx_gen2(ctx, out, in, ciphertext_len, iv, aad,
296 aad_len, auth_tag, auth_tag_len);
298 aesni_gcm_precomp_avx_gen4(ctx, hash_subkey);
299 aesni_gcm_dec_avx_gen4(ctx, out, in, ciphertext_len, iv, aad,
300 aad_len, auth_tag, auth_tag_len);
305 static void (*aesni_gcm_enc_tfm)(void *ctx,
306 struct gcm_context_data *data, u8 *out,
307 const u8 *in, unsigned long plaintext_len,
308 u8 *iv, u8 *hash_subkey, const u8 *aad,
309 unsigned long aad_len, u8 *auth_tag,
310 unsigned long auth_tag_len);
312 static void (*aesni_gcm_dec_tfm)(void *ctx,
313 struct gcm_context_data *data, u8 *out,
314 const u8 *in, unsigned long ciphertext_len,
315 u8 *iv, u8 *hash_subkey, const u8 *aad,
316 unsigned long aad_len, u8 *auth_tag,
317 unsigned long auth_tag_len);
320 aesni_rfc4106_gcm_ctx *aesni_rfc4106_gcm_ctx_get(struct crypto_aead *tfm)
322 unsigned long align = AESNI_ALIGN;
324 if (align <= crypto_tfm_ctx_alignment())
326 return PTR_ALIGN(crypto_aead_ctx(tfm), align);
330 generic_gcmaes_ctx *generic_gcmaes_ctx_get(struct crypto_aead *tfm)
332 unsigned long align = AESNI_ALIGN;
334 if (align <= crypto_tfm_ctx_alignment())
336 return PTR_ALIGN(crypto_aead_ctx(tfm), align);
340 static inline struct crypto_aes_ctx *aes_ctx(void *raw_ctx)
342 unsigned long addr = (unsigned long)raw_ctx;
343 unsigned long align = AESNI_ALIGN;
345 if (align <= crypto_tfm_ctx_alignment())
347 return (struct crypto_aes_ctx *)ALIGN(addr, align);
350 static int aes_set_key_common(struct crypto_tfm *tfm, void *raw_ctx,
351 const u8 *in_key, unsigned int key_len)
353 struct crypto_aes_ctx *ctx = aes_ctx(raw_ctx);
354 u32 *flags = &tfm->crt_flags;
357 if (key_len != AES_KEYSIZE_128 && key_len != AES_KEYSIZE_192 &&
358 key_len != AES_KEYSIZE_256) {
359 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
363 if (!irq_fpu_usable())
364 err = crypto_aes_expand_key(ctx, in_key, key_len);
367 err = aesni_set_key(ctx, in_key, key_len);
374 static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
375 unsigned int key_len)
377 return aes_set_key_common(tfm, crypto_tfm_ctx(tfm), in_key, key_len);
380 static void aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
382 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
384 if (!irq_fpu_usable())
385 crypto_aes_encrypt_x86(ctx, dst, src);
388 aesni_enc(ctx, dst, src);
393 static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
395 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
397 if (!irq_fpu_usable())
398 crypto_aes_decrypt_x86(ctx, dst, src);
401 aesni_dec(ctx, dst, src);
406 static void __aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
408 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
410 aesni_enc(ctx, dst, src);
413 static void __aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
415 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
417 aesni_dec(ctx, dst, src);
420 static int aesni_skcipher_setkey(struct crypto_skcipher *tfm, const u8 *key,
423 return aes_set_key_common(crypto_skcipher_tfm(tfm),
424 crypto_skcipher_ctx(tfm), key, len);
427 static int ecb_encrypt(struct skcipher_request *req)
429 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
430 struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
431 struct skcipher_walk walk;
435 err = skcipher_walk_virt(&walk, req, true);
438 while ((nbytes = walk.nbytes)) {
439 aesni_ecb_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
440 nbytes & AES_BLOCK_MASK);
441 nbytes &= AES_BLOCK_SIZE - 1;
442 err = skcipher_walk_done(&walk, nbytes);
449 static int ecb_decrypt(struct skcipher_request *req)
451 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
452 struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
453 struct skcipher_walk walk;
457 err = skcipher_walk_virt(&walk, req, true);
460 while ((nbytes = walk.nbytes)) {
461 aesni_ecb_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
462 nbytes & AES_BLOCK_MASK);
463 nbytes &= AES_BLOCK_SIZE - 1;
464 err = skcipher_walk_done(&walk, nbytes);
471 static int cbc_encrypt(struct skcipher_request *req)
473 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
474 struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
475 struct skcipher_walk walk;
479 err = skcipher_walk_virt(&walk, req, true);
482 while ((nbytes = walk.nbytes)) {
483 aesni_cbc_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
484 nbytes & AES_BLOCK_MASK, walk.iv);
485 nbytes &= AES_BLOCK_SIZE - 1;
486 err = skcipher_walk_done(&walk, nbytes);
493 static int cbc_decrypt(struct skcipher_request *req)
495 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
496 struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
497 struct skcipher_walk walk;
501 err = skcipher_walk_virt(&walk, req, true);
504 while ((nbytes = walk.nbytes)) {
505 aesni_cbc_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
506 nbytes & AES_BLOCK_MASK, walk.iv);
507 nbytes &= AES_BLOCK_SIZE - 1;
508 err = skcipher_walk_done(&walk, nbytes);
516 static void ctr_crypt_final(struct crypto_aes_ctx *ctx,
517 struct skcipher_walk *walk)
519 u8 *ctrblk = walk->iv;
520 u8 keystream[AES_BLOCK_SIZE];
521 u8 *src = walk->src.virt.addr;
522 u8 *dst = walk->dst.virt.addr;
523 unsigned int nbytes = walk->nbytes;
525 aesni_enc(ctx, keystream, ctrblk);
526 crypto_xor_cpy(dst, keystream, src, nbytes);
528 crypto_inc(ctrblk, AES_BLOCK_SIZE);
532 static void aesni_ctr_enc_avx_tfm(struct crypto_aes_ctx *ctx, u8 *out,
533 const u8 *in, unsigned int len, u8 *iv)
536 * based on key length, override with the by8 version
537 * of ctr mode encryption/decryption for improved performance
538 * aes_set_key_common() ensures that key length is one of
541 if (ctx->key_length == AES_KEYSIZE_128)
542 aes_ctr_enc_128_avx_by8(in, iv, (void *)ctx, out, len);
543 else if (ctx->key_length == AES_KEYSIZE_192)
544 aes_ctr_enc_192_avx_by8(in, iv, (void *)ctx, out, len);
546 aes_ctr_enc_256_avx_by8(in, iv, (void *)ctx, out, len);
550 static int ctr_crypt(struct skcipher_request *req)
552 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
553 struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
554 struct skcipher_walk walk;
558 err = skcipher_walk_virt(&walk, req, true);
561 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
562 aesni_ctr_enc_tfm(ctx, walk.dst.virt.addr, walk.src.virt.addr,
563 nbytes & AES_BLOCK_MASK, walk.iv);
564 nbytes &= AES_BLOCK_SIZE - 1;
565 err = skcipher_walk_done(&walk, nbytes);
568 ctr_crypt_final(ctx, &walk);
569 err = skcipher_walk_done(&walk, 0);
576 static int xts_aesni_setkey(struct crypto_skcipher *tfm, const u8 *key,
579 struct aesni_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
582 err = xts_verify_key(tfm, key, keylen);
588 /* first half of xts-key is for crypt */
589 err = aes_set_key_common(crypto_skcipher_tfm(tfm), ctx->raw_crypt_ctx,
594 /* second half of xts-key is for tweak */
595 return aes_set_key_common(crypto_skcipher_tfm(tfm), ctx->raw_tweak_ctx,
596 key + keylen, keylen);
600 static void aesni_xts_tweak(void *ctx, u8 *out, const u8 *in)
602 aesni_enc(ctx, out, in);
605 static void aesni_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv)
607 glue_xts_crypt_128bit_one(ctx, dst, src, iv, GLUE_FUNC_CAST(aesni_enc));
610 static void aesni_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv)
612 glue_xts_crypt_128bit_one(ctx, dst, src, iv, GLUE_FUNC_CAST(aesni_dec));
615 static void aesni_xts_enc8(void *ctx, u128 *dst, const u128 *src, le128 *iv)
617 aesni_xts_crypt8(ctx, (u8 *)dst, (const u8 *)src, true, (u8 *)iv);
620 static void aesni_xts_dec8(void *ctx, u128 *dst, const u128 *src, le128 *iv)
622 aesni_xts_crypt8(ctx, (u8 *)dst, (const u8 *)src, false, (u8 *)iv);
625 static const struct common_glue_ctx aesni_enc_xts = {
627 .fpu_blocks_limit = 1,
631 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_enc8) }
634 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_enc) }
638 static const struct common_glue_ctx aesni_dec_xts = {
640 .fpu_blocks_limit = 1,
644 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_dec8) }
647 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_dec) }
651 static int xts_encrypt(struct skcipher_request *req)
653 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
654 struct aesni_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
656 return glue_xts_req_128bit(&aesni_enc_xts, req,
657 XTS_TWEAK_CAST(aesni_xts_tweak),
658 aes_ctx(ctx->raw_tweak_ctx),
659 aes_ctx(ctx->raw_crypt_ctx));
662 static int xts_decrypt(struct skcipher_request *req)
664 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
665 struct aesni_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
667 return glue_xts_req_128bit(&aesni_dec_xts, req,
668 XTS_TWEAK_CAST(aesni_xts_tweak),
669 aes_ctx(ctx->raw_tweak_ctx),
670 aes_ctx(ctx->raw_crypt_ctx));
673 static int rfc4106_init(struct crypto_aead *aead)
675 struct cryptd_aead *cryptd_tfm;
676 struct cryptd_aead **ctx = crypto_aead_ctx(aead);
678 cryptd_tfm = cryptd_alloc_aead("__driver-gcm-aes-aesni",
680 CRYPTO_ALG_INTERNAL);
681 if (IS_ERR(cryptd_tfm))
682 return PTR_ERR(cryptd_tfm);
685 crypto_aead_set_reqsize(aead, crypto_aead_reqsize(&cryptd_tfm->base));
689 static void rfc4106_exit(struct crypto_aead *aead)
691 struct cryptd_aead **ctx = crypto_aead_ctx(aead);
693 cryptd_free_aead(*ctx);
697 rfc4106_set_hash_subkey(u8 *hash_subkey, const u8 *key, unsigned int key_len)
699 struct crypto_cipher *tfm;
702 tfm = crypto_alloc_cipher("aes", 0, 0);
706 ret = crypto_cipher_setkey(tfm, key, key_len);
708 goto out_free_cipher;
710 /* Clear the data in the hash sub key container to zero.*/
711 /* We want to cipher all zeros to create the hash sub key. */
712 memset(hash_subkey, 0, RFC4106_HASH_SUBKEY_SIZE);
714 crypto_cipher_encrypt_one(tfm, hash_subkey, hash_subkey);
717 crypto_free_cipher(tfm);
721 static int common_rfc4106_set_key(struct crypto_aead *aead, const u8 *key,
722 unsigned int key_len)
724 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(aead);
727 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
730 /*Account for 4 byte nonce at the end.*/
733 memcpy(ctx->nonce, key + key_len, sizeof(ctx->nonce));
735 return aes_set_key_common(crypto_aead_tfm(aead),
736 &ctx->aes_key_expanded, key, key_len) ?:
737 rfc4106_set_hash_subkey(ctx->hash_subkey, key, key_len);
740 static int gcmaes_wrapper_set_key(struct crypto_aead *parent, const u8 *key,
741 unsigned int key_len)
743 struct cryptd_aead **ctx = crypto_aead_ctx(parent);
744 struct cryptd_aead *cryptd_tfm = *ctx;
746 return crypto_aead_setkey(&cryptd_tfm->base, key, key_len);
749 static int common_rfc4106_set_authsize(struct crypto_aead *aead,
750 unsigned int authsize)
764 /* This is the Integrity Check Value (aka the authentication tag length and can
765 * be 8, 12 or 16 bytes long. */
766 static int gcmaes_wrapper_set_authsize(struct crypto_aead *parent,
767 unsigned int authsize)
769 struct cryptd_aead **ctx = crypto_aead_ctx(parent);
770 struct cryptd_aead *cryptd_tfm = *ctx;
772 return crypto_aead_setauthsize(&cryptd_tfm->base, authsize);
775 static int generic_gcmaes_set_authsize(struct crypto_aead *tfm,
776 unsigned int authsize)
794 static int gcmaes_crypt_by_sg(bool enc, struct aead_request *req,
795 unsigned int assoclen, u8 *hash_subkey,
796 u8 *iv, void *aes_ctx)
798 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
799 unsigned long auth_tag_len = crypto_aead_authsize(tfm);
800 struct gcm_context_data data AESNI_ALIGN_ATTR;
801 struct scatter_walk dst_sg_walk = {};
802 unsigned long left = req->cryptlen;
803 unsigned long len, srclen, dstlen;
804 struct scatter_walk assoc_sg_walk;
805 struct scatter_walk src_sg_walk;
806 struct scatterlist src_start[2];
807 struct scatterlist dst_start[2];
808 struct scatterlist *src_sg;
809 struct scatterlist *dst_sg;
810 u8 *src, *dst, *assoc;
815 left -= auth_tag_len;
817 /* Linearize assoc, if not already linear */
818 if (req->src->length >= assoclen && req->src->length &&
819 (!PageHighMem(sg_page(req->src)) ||
820 req->src->offset + req->src->length <= PAGE_SIZE)) {
821 scatterwalk_start(&assoc_sg_walk, req->src);
822 assoc = scatterwalk_map(&assoc_sg_walk);
824 /* assoc can be any length, so must be on heap */
825 assocmem = kmalloc(assoclen, GFP_ATOMIC);
826 if (unlikely(!assocmem))
830 scatterwalk_map_and_copy(assoc, req->src, 0, assoclen, 0);
834 src_sg = scatterwalk_ffwd(src_start, req->src, req->assoclen);
835 scatterwalk_start(&src_sg_walk, src_sg);
836 if (req->src != req->dst) {
837 dst_sg = scatterwalk_ffwd(dst_start, req->dst,
839 scatterwalk_start(&dst_sg_walk, dst_sg);
844 aesni_gcm_init(aes_ctx, &data, iv,
845 hash_subkey, assoc, assoclen);
846 if (req->src != req->dst) {
848 src = scatterwalk_map(&src_sg_walk);
849 dst = scatterwalk_map(&dst_sg_walk);
850 srclen = scatterwalk_clamp(&src_sg_walk, left);
851 dstlen = scatterwalk_clamp(&dst_sg_walk, left);
852 len = min(srclen, dstlen);
855 aesni_gcm_enc_update(aes_ctx, &data,
858 aesni_gcm_dec_update(aes_ctx, &data,
863 scatterwalk_unmap(src);
864 scatterwalk_unmap(dst);
865 scatterwalk_advance(&src_sg_walk, len);
866 scatterwalk_advance(&dst_sg_walk, len);
867 scatterwalk_done(&src_sg_walk, 0, left);
868 scatterwalk_done(&dst_sg_walk, 1, left);
872 dst = src = scatterwalk_map(&src_sg_walk);
873 len = scatterwalk_clamp(&src_sg_walk, left);
876 aesni_gcm_enc_update(aes_ctx, &data,
879 aesni_gcm_dec_update(aes_ctx, &data,
883 scatterwalk_unmap(src);
884 scatterwalk_advance(&src_sg_walk, len);
885 scatterwalk_done(&src_sg_walk, 1, left);
888 aesni_gcm_finalize(aes_ctx, &data, authTag, auth_tag_len);
892 scatterwalk_unmap(assoc);
899 /* Copy out original authTag */
900 scatterwalk_map_and_copy(authTagMsg, req->src,
901 req->assoclen + req->cryptlen -
905 /* Compare generated tag with passed in tag. */
906 return crypto_memneq(authTagMsg, authTag, auth_tag_len) ?
910 /* Copy in the authTag */
911 scatterwalk_map_and_copy(authTag, req->dst,
912 req->assoclen + req->cryptlen,
918 static int gcmaes_encrypt(struct aead_request *req, unsigned int assoclen,
919 u8 *hash_subkey, u8 *iv, void *aes_ctx)
921 u8 one_entry_in_sg = 0;
922 u8 *src, *dst, *assoc;
923 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
924 unsigned long auth_tag_len = crypto_aead_authsize(tfm);
925 struct scatter_walk src_sg_walk;
926 struct scatter_walk dst_sg_walk = {};
927 struct gcm_context_data data AESNI_ALIGN_ATTR;
929 if (((struct crypto_aes_ctx *)aes_ctx)->key_length != AES_KEYSIZE_128 ||
930 aesni_gcm_enc_tfm == aesni_gcm_enc ||
931 req->cryptlen < AVX_GEN2_OPTSIZE) {
932 return gcmaes_crypt_by_sg(true, req, assoclen, hash_subkey, iv,
935 if (sg_is_last(req->src) &&
936 (!PageHighMem(sg_page(req->src)) ||
937 req->src->offset + req->src->length <= PAGE_SIZE) &&
938 sg_is_last(req->dst) &&
939 (!PageHighMem(sg_page(req->dst)) ||
940 req->dst->offset + req->dst->length <= PAGE_SIZE)) {
942 scatterwalk_start(&src_sg_walk, req->src);
943 assoc = scatterwalk_map(&src_sg_walk);
944 src = assoc + req->assoclen;
946 if (unlikely(req->src != req->dst)) {
947 scatterwalk_start(&dst_sg_walk, req->dst);
948 dst = scatterwalk_map(&dst_sg_walk) + req->assoclen;
951 /* Allocate memory for src, dst, assoc */
952 assoc = kmalloc(req->cryptlen + auth_tag_len + req->assoclen,
954 if (unlikely(!assoc))
956 scatterwalk_map_and_copy(assoc, req->src, 0,
957 req->assoclen + req->cryptlen, 0);
958 src = assoc + req->assoclen;
963 aesni_gcm_enc_tfm(aes_ctx, &data, dst, src, req->cryptlen, iv,
964 hash_subkey, assoc, assoclen,
965 dst + req->cryptlen, auth_tag_len);
968 /* The authTag (aka the Integrity Check Value) needs to be written
969 * back to the packet. */
970 if (one_entry_in_sg) {
971 if (unlikely(req->src != req->dst)) {
972 scatterwalk_unmap(dst - req->assoclen);
973 scatterwalk_advance(&dst_sg_walk, req->dst->length);
974 scatterwalk_done(&dst_sg_walk, 1, 0);
976 scatterwalk_unmap(assoc);
977 scatterwalk_advance(&src_sg_walk, req->src->length);
978 scatterwalk_done(&src_sg_walk, req->src == req->dst, 0);
980 scatterwalk_map_and_copy(dst, req->dst, req->assoclen,
981 req->cryptlen + auth_tag_len, 1);
987 static int gcmaes_decrypt(struct aead_request *req, unsigned int assoclen,
988 u8 *hash_subkey, u8 *iv, void *aes_ctx)
990 u8 one_entry_in_sg = 0;
991 u8 *src, *dst, *assoc;
992 unsigned long tempCipherLen = 0;
993 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
994 unsigned long auth_tag_len = crypto_aead_authsize(tfm);
996 struct scatter_walk src_sg_walk;
997 struct scatter_walk dst_sg_walk = {};
998 struct gcm_context_data data AESNI_ALIGN_ATTR;
1001 if (((struct crypto_aes_ctx *)aes_ctx)->key_length != AES_KEYSIZE_128 ||
1002 aesni_gcm_enc_tfm == aesni_gcm_enc ||
1003 req->cryptlen < AVX_GEN2_OPTSIZE) {
1004 return gcmaes_crypt_by_sg(false, req, assoclen, hash_subkey, iv,
1007 tempCipherLen = (unsigned long)(req->cryptlen - auth_tag_len);
1009 if (sg_is_last(req->src) &&
1010 (!PageHighMem(sg_page(req->src)) ||
1011 req->src->offset + req->src->length <= PAGE_SIZE) &&
1012 sg_is_last(req->dst) && req->dst->length &&
1013 (!PageHighMem(sg_page(req->dst)) ||
1014 req->dst->offset + req->dst->length <= PAGE_SIZE)) {
1015 one_entry_in_sg = 1;
1016 scatterwalk_start(&src_sg_walk, req->src);
1017 assoc = scatterwalk_map(&src_sg_walk);
1018 src = assoc + req->assoclen;
1020 if (unlikely(req->src != req->dst)) {
1021 scatterwalk_start(&dst_sg_walk, req->dst);
1022 dst = scatterwalk_map(&dst_sg_walk) + req->assoclen;
1025 /* Allocate memory for src, dst, assoc */
1026 assoc = kmalloc(req->cryptlen + req->assoclen, GFP_ATOMIC);
1029 scatterwalk_map_and_copy(assoc, req->src, 0,
1030 req->assoclen + req->cryptlen, 0);
1031 src = assoc + req->assoclen;
1037 aesni_gcm_dec_tfm(aes_ctx, &data, dst, src, tempCipherLen, iv,
1038 hash_subkey, assoc, assoclen,
1039 authTag, auth_tag_len);
1042 /* Compare generated tag with passed in tag. */
1043 retval = crypto_memneq(src + tempCipherLen, authTag, auth_tag_len) ?
1046 if (one_entry_in_sg) {
1047 if (unlikely(req->src != req->dst)) {
1048 scatterwalk_unmap(dst - req->assoclen);
1049 scatterwalk_advance(&dst_sg_walk, req->dst->length);
1050 scatterwalk_done(&dst_sg_walk, 1, 0);
1052 scatterwalk_unmap(assoc);
1053 scatterwalk_advance(&src_sg_walk, req->src->length);
1054 scatterwalk_done(&src_sg_walk, req->src == req->dst, 0);
1056 scatterwalk_map_and_copy(dst, req->dst, req->assoclen,
1064 static int helper_rfc4106_encrypt(struct aead_request *req)
1066 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1067 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm);
1068 void *aes_ctx = &(ctx->aes_key_expanded);
1069 u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN)));
1071 __be32 counter = cpu_to_be32(1);
1073 /* Assuming we are supporting rfc4106 64-bit extended */
1074 /* sequence numbers We need to have the AAD length equal */
1075 /* to 16 or 20 bytes */
1076 if (unlikely(req->assoclen != 16 && req->assoclen != 20))
1079 /* IV below built */
1080 for (i = 0; i < 4; i++)
1081 *(iv+i) = ctx->nonce[i];
1082 for (i = 0; i < 8; i++)
1083 *(iv+4+i) = req->iv[i];
1084 *((__be32 *)(iv+12)) = counter;
1086 return gcmaes_encrypt(req, req->assoclen - 8, ctx->hash_subkey, iv,
1090 static int helper_rfc4106_decrypt(struct aead_request *req)
1092 __be32 counter = cpu_to_be32(1);
1093 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1094 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm);
1095 void *aes_ctx = &(ctx->aes_key_expanded);
1096 u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN)));
1099 if (unlikely(req->assoclen != 16 && req->assoclen != 20))
1102 /* Assuming we are supporting rfc4106 64-bit extended */
1103 /* sequence numbers We need to have the AAD length */
1104 /* equal to 16 or 20 bytes */
1106 /* IV below built */
1107 for (i = 0; i < 4; i++)
1108 *(iv+i) = ctx->nonce[i];
1109 for (i = 0; i < 8; i++)
1110 *(iv+4+i) = req->iv[i];
1111 *((__be32 *)(iv+12)) = counter;
1113 return gcmaes_decrypt(req, req->assoclen - 8, ctx->hash_subkey, iv,
1117 static int gcmaes_wrapper_encrypt(struct aead_request *req)
1119 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1120 struct cryptd_aead **ctx = crypto_aead_ctx(tfm);
1121 struct cryptd_aead *cryptd_tfm = *ctx;
1123 tfm = &cryptd_tfm->base;
1124 if (irq_fpu_usable() && (!in_atomic() ||
1125 !cryptd_aead_queued(cryptd_tfm)))
1126 tfm = cryptd_aead_child(cryptd_tfm);
1128 aead_request_set_tfm(req, tfm);
1130 return crypto_aead_encrypt(req);
1133 static int gcmaes_wrapper_decrypt(struct aead_request *req)
1135 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1136 struct cryptd_aead **ctx = crypto_aead_ctx(tfm);
1137 struct cryptd_aead *cryptd_tfm = *ctx;
1139 tfm = &cryptd_tfm->base;
1140 if (irq_fpu_usable() && (!in_atomic() ||
1141 !cryptd_aead_queued(cryptd_tfm)))
1142 tfm = cryptd_aead_child(cryptd_tfm);
1144 aead_request_set_tfm(req, tfm);
1146 return crypto_aead_decrypt(req);
1150 static struct crypto_alg aesni_algs[] = { {
1152 .cra_driver_name = "aes-aesni",
1153 .cra_priority = 300,
1154 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
1155 .cra_blocksize = AES_BLOCK_SIZE,
1156 .cra_ctxsize = CRYPTO_AES_CTX_SIZE,
1157 .cra_module = THIS_MODULE,
1160 .cia_min_keysize = AES_MIN_KEY_SIZE,
1161 .cia_max_keysize = AES_MAX_KEY_SIZE,
1162 .cia_setkey = aes_set_key,
1163 .cia_encrypt = aes_encrypt,
1164 .cia_decrypt = aes_decrypt
1168 .cra_name = "__aes",
1169 .cra_driver_name = "__aes-aesni",
1170 .cra_priority = 300,
1171 .cra_flags = CRYPTO_ALG_TYPE_CIPHER | CRYPTO_ALG_INTERNAL,
1172 .cra_blocksize = AES_BLOCK_SIZE,
1173 .cra_ctxsize = CRYPTO_AES_CTX_SIZE,
1174 .cra_module = THIS_MODULE,
1177 .cia_min_keysize = AES_MIN_KEY_SIZE,
1178 .cia_max_keysize = AES_MAX_KEY_SIZE,
1179 .cia_setkey = aes_set_key,
1180 .cia_encrypt = __aes_encrypt,
1181 .cia_decrypt = __aes_decrypt
1186 static struct skcipher_alg aesni_skciphers[] = {
1189 .cra_name = "__ecb(aes)",
1190 .cra_driver_name = "__ecb-aes-aesni",
1191 .cra_priority = 400,
1192 .cra_flags = CRYPTO_ALG_INTERNAL,
1193 .cra_blocksize = AES_BLOCK_SIZE,
1194 .cra_ctxsize = CRYPTO_AES_CTX_SIZE,
1195 .cra_module = THIS_MODULE,
1197 .min_keysize = AES_MIN_KEY_SIZE,
1198 .max_keysize = AES_MAX_KEY_SIZE,
1199 .setkey = aesni_skcipher_setkey,
1200 .encrypt = ecb_encrypt,
1201 .decrypt = ecb_decrypt,
1204 .cra_name = "__cbc(aes)",
1205 .cra_driver_name = "__cbc-aes-aesni",
1206 .cra_priority = 400,
1207 .cra_flags = CRYPTO_ALG_INTERNAL,
1208 .cra_blocksize = AES_BLOCK_SIZE,
1209 .cra_ctxsize = CRYPTO_AES_CTX_SIZE,
1210 .cra_module = THIS_MODULE,
1212 .min_keysize = AES_MIN_KEY_SIZE,
1213 .max_keysize = AES_MAX_KEY_SIZE,
1214 .ivsize = AES_BLOCK_SIZE,
1215 .setkey = aesni_skcipher_setkey,
1216 .encrypt = cbc_encrypt,
1217 .decrypt = cbc_decrypt,
1218 #ifdef CONFIG_X86_64
1221 .cra_name = "__ctr(aes)",
1222 .cra_driver_name = "__ctr-aes-aesni",
1223 .cra_priority = 400,
1224 .cra_flags = CRYPTO_ALG_INTERNAL,
1226 .cra_ctxsize = CRYPTO_AES_CTX_SIZE,
1227 .cra_module = THIS_MODULE,
1229 .min_keysize = AES_MIN_KEY_SIZE,
1230 .max_keysize = AES_MAX_KEY_SIZE,
1231 .ivsize = AES_BLOCK_SIZE,
1232 .chunksize = AES_BLOCK_SIZE,
1233 .setkey = aesni_skcipher_setkey,
1234 .encrypt = ctr_crypt,
1235 .decrypt = ctr_crypt,
1238 .cra_name = "__xts(aes)",
1239 .cra_driver_name = "__xts-aes-aesni",
1240 .cra_priority = 401,
1241 .cra_flags = CRYPTO_ALG_INTERNAL,
1242 .cra_blocksize = AES_BLOCK_SIZE,
1243 .cra_ctxsize = XTS_AES_CTX_SIZE,
1244 .cra_module = THIS_MODULE,
1246 .min_keysize = 2 * AES_MIN_KEY_SIZE,
1247 .max_keysize = 2 * AES_MAX_KEY_SIZE,
1248 .ivsize = AES_BLOCK_SIZE,
1249 .setkey = xts_aesni_setkey,
1250 .encrypt = xts_encrypt,
1251 .decrypt = xts_decrypt,
1257 struct simd_skcipher_alg *aesni_simd_skciphers[ARRAY_SIZE(aesni_skciphers)];
1260 const char *algname;
1261 const char *drvname;
1262 const char *basename;
1263 struct simd_skcipher_alg *simd;
1264 } aesni_simd_skciphers2[] = {
1265 #if (defined(MODULE) && IS_ENABLED(CONFIG_CRYPTO_PCBC)) || \
1266 IS_BUILTIN(CONFIG_CRYPTO_PCBC)
1268 .algname = "pcbc(aes)",
1269 .drvname = "pcbc-aes-aesni",
1270 .basename = "fpu(pcbc(__aes-aesni))",
1275 #ifdef CONFIG_X86_64
1276 static int generic_gcmaes_set_key(struct crypto_aead *aead, const u8 *key,
1277 unsigned int key_len)
1279 struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(aead);
1281 return aes_set_key_common(crypto_aead_tfm(aead),
1282 &ctx->aes_key_expanded, key, key_len) ?:
1283 rfc4106_set_hash_subkey(ctx->hash_subkey, key, key_len);
1286 static int generic_gcmaes_encrypt(struct aead_request *req)
1288 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1289 struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(tfm);
1290 void *aes_ctx = &(ctx->aes_key_expanded);
1291 u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN)));
1292 __be32 counter = cpu_to_be32(1);
1294 memcpy(iv, req->iv, 12);
1295 *((__be32 *)(iv+12)) = counter;
1297 return gcmaes_encrypt(req, req->assoclen, ctx->hash_subkey, iv,
1301 static int generic_gcmaes_decrypt(struct aead_request *req)
1303 __be32 counter = cpu_to_be32(1);
1304 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1305 struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(tfm);
1306 void *aes_ctx = &(ctx->aes_key_expanded);
1307 u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN)));
1309 memcpy(iv, req->iv, 12);
1310 *((__be32 *)(iv+12)) = counter;
1312 return gcmaes_decrypt(req, req->assoclen, ctx->hash_subkey, iv,
1316 static int generic_gcmaes_init(struct crypto_aead *aead)
1318 struct cryptd_aead *cryptd_tfm;
1319 struct cryptd_aead **ctx = crypto_aead_ctx(aead);
1321 cryptd_tfm = cryptd_alloc_aead("__driver-generic-gcm-aes-aesni",
1322 CRYPTO_ALG_INTERNAL,
1323 CRYPTO_ALG_INTERNAL);
1324 if (IS_ERR(cryptd_tfm))
1325 return PTR_ERR(cryptd_tfm);
1328 crypto_aead_set_reqsize(aead, crypto_aead_reqsize(&cryptd_tfm->base));
1333 static void generic_gcmaes_exit(struct crypto_aead *aead)
1335 struct cryptd_aead **ctx = crypto_aead_ctx(aead);
1337 cryptd_free_aead(*ctx);
1340 static struct aead_alg aesni_aead_algs[] = { {
1341 .setkey = common_rfc4106_set_key,
1342 .setauthsize = common_rfc4106_set_authsize,
1343 .encrypt = helper_rfc4106_encrypt,
1344 .decrypt = helper_rfc4106_decrypt,
1345 .ivsize = GCM_RFC4106_IV_SIZE,
1348 .cra_name = "__gcm-aes-aesni",
1349 .cra_driver_name = "__driver-gcm-aes-aesni",
1350 .cra_flags = CRYPTO_ALG_INTERNAL,
1352 .cra_ctxsize = sizeof(struct aesni_rfc4106_gcm_ctx),
1353 .cra_alignmask = AESNI_ALIGN - 1,
1354 .cra_module = THIS_MODULE,
1357 .init = rfc4106_init,
1358 .exit = rfc4106_exit,
1359 .setkey = gcmaes_wrapper_set_key,
1360 .setauthsize = gcmaes_wrapper_set_authsize,
1361 .encrypt = gcmaes_wrapper_encrypt,
1362 .decrypt = gcmaes_wrapper_decrypt,
1363 .ivsize = GCM_RFC4106_IV_SIZE,
1366 .cra_name = "rfc4106(gcm(aes))",
1367 .cra_driver_name = "rfc4106-gcm-aesni",
1368 .cra_priority = 400,
1369 .cra_flags = CRYPTO_ALG_ASYNC,
1371 .cra_ctxsize = sizeof(struct cryptd_aead *),
1372 .cra_module = THIS_MODULE,
1375 .setkey = generic_gcmaes_set_key,
1376 .setauthsize = generic_gcmaes_set_authsize,
1377 .encrypt = generic_gcmaes_encrypt,
1378 .decrypt = generic_gcmaes_decrypt,
1379 .ivsize = GCM_AES_IV_SIZE,
1382 .cra_name = "__generic-gcm-aes-aesni",
1383 .cra_driver_name = "__driver-generic-gcm-aes-aesni",
1385 .cra_flags = CRYPTO_ALG_INTERNAL,
1387 .cra_ctxsize = sizeof(struct generic_gcmaes_ctx),
1388 .cra_alignmask = AESNI_ALIGN - 1,
1389 .cra_module = THIS_MODULE,
1392 .init = generic_gcmaes_init,
1393 .exit = generic_gcmaes_exit,
1394 .setkey = gcmaes_wrapper_set_key,
1395 .setauthsize = gcmaes_wrapper_set_authsize,
1396 .encrypt = gcmaes_wrapper_encrypt,
1397 .decrypt = gcmaes_wrapper_decrypt,
1398 .ivsize = GCM_AES_IV_SIZE,
1401 .cra_name = "gcm(aes)",
1402 .cra_driver_name = "generic-gcm-aesni",
1403 .cra_priority = 400,
1404 .cra_flags = CRYPTO_ALG_ASYNC,
1406 .cra_ctxsize = sizeof(struct cryptd_aead *),
1407 .cra_module = THIS_MODULE,
1411 static struct aead_alg aesni_aead_algs[0];
1415 static const struct x86_cpu_id aesni_cpu_id[] = {
1416 X86_FEATURE_MATCH(X86_FEATURE_AES),
1419 MODULE_DEVICE_TABLE(x86cpu, aesni_cpu_id);
1421 static void aesni_free_simds(void)
1425 for (i = 0; i < ARRAY_SIZE(aesni_simd_skciphers) &&
1426 aesni_simd_skciphers[i]; i++)
1427 simd_skcipher_free(aesni_simd_skciphers[i]);
1429 for (i = 0; i < ARRAY_SIZE(aesni_simd_skciphers2); i++)
1430 if (aesni_simd_skciphers2[i].simd)
1431 simd_skcipher_free(aesni_simd_skciphers2[i].simd);
1434 static int __init aesni_init(void)
1436 struct simd_skcipher_alg *simd;
1437 const char *basename;
1438 const char *algname;
1439 const char *drvname;
1443 if (!x86_match_cpu(aesni_cpu_id))
1445 #ifdef CONFIG_X86_64
1446 #ifdef CONFIG_AS_AVX2
1447 if (boot_cpu_has(X86_FEATURE_AVX2)) {
1448 pr_info("AVX2 version of gcm_enc/dec engaged.\n");
1449 aesni_gcm_enc_tfm = aesni_gcm_enc_avx2;
1450 aesni_gcm_dec_tfm = aesni_gcm_dec_avx2;
1453 #ifdef CONFIG_AS_AVX
1454 if (boot_cpu_has(X86_FEATURE_AVX)) {
1455 pr_info("AVX version of gcm_enc/dec engaged.\n");
1456 aesni_gcm_enc_tfm = aesni_gcm_enc_avx;
1457 aesni_gcm_dec_tfm = aesni_gcm_dec_avx;
1461 pr_info("SSE version of gcm_enc/dec engaged.\n");
1462 aesni_gcm_enc_tfm = aesni_gcm_enc;
1463 aesni_gcm_dec_tfm = aesni_gcm_dec;
1465 aesni_ctr_enc_tfm = aesni_ctr_enc;
1466 #ifdef CONFIG_AS_AVX
1467 if (boot_cpu_has(X86_FEATURE_AVX)) {
1468 /* optimize performance of ctr mode encryption transform */
1469 aesni_ctr_enc_tfm = aesni_ctr_enc_avx_tfm;
1470 pr_info("AES CTR mode by8 optimization enabled\n");
1475 err = crypto_fpu_init();
1479 err = crypto_register_algs(aesni_algs, ARRAY_SIZE(aesni_algs));
1483 err = crypto_register_skciphers(aesni_skciphers,
1484 ARRAY_SIZE(aesni_skciphers));
1486 goto unregister_algs;
1488 err = crypto_register_aeads(aesni_aead_algs,
1489 ARRAY_SIZE(aesni_aead_algs));
1491 goto unregister_skciphers;
1493 for (i = 0; i < ARRAY_SIZE(aesni_skciphers); i++) {
1494 algname = aesni_skciphers[i].base.cra_name + 2;
1495 drvname = aesni_skciphers[i].base.cra_driver_name + 2;
1496 basename = aesni_skciphers[i].base.cra_driver_name;
1497 simd = simd_skcipher_create_compat(algname, drvname, basename);
1498 err = PTR_ERR(simd);
1500 goto unregister_simds;
1502 aesni_simd_skciphers[i] = simd;
1505 for (i = 0; i < ARRAY_SIZE(aesni_simd_skciphers2); i++) {
1506 algname = aesni_simd_skciphers2[i].algname;
1507 drvname = aesni_simd_skciphers2[i].drvname;
1508 basename = aesni_simd_skciphers2[i].basename;
1509 simd = simd_skcipher_create_compat(algname, drvname, basename);
1510 err = PTR_ERR(simd);
1514 aesni_simd_skciphers2[i].simd = simd;
1521 crypto_unregister_aeads(aesni_aead_algs, ARRAY_SIZE(aesni_aead_algs));
1522 unregister_skciphers:
1523 crypto_unregister_skciphers(aesni_skciphers,
1524 ARRAY_SIZE(aesni_skciphers));
1526 crypto_unregister_algs(aesni_algs, ARRAY_SIZE(aesni_algs));
1532 static void __exit aesni_exit(void)
1535 crypto_unregister_aeads(aesni_aead_algs, ARRAY_SIZE(aesni_aead_algs));
1536 crypto_unregister_skciphers(aesni_skciphers,
1537 ARRAY_SIZE(aesni_skciphers));
1538 crypto_unregister_algs(aesni_algs, ARRAY_SIZE(aesni_algs));
1543 late_initcall(aesni_init);
1544 module_exit(aesni_exit);
1546 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, Intel AES-NI instructions optimized");
1547 MODULE_LICENSE("GPL");
1548 MODULE_ALIAS_CRYPTO("aes");