2 * Support for Intel AES-NI instructions. This file contains glue
3 * code, the real AES implementation is in intel-aes_asm.S.
5 * Copyright (C) 2008, Intel Corp.
6 * Author: Huang Ying <ying.huang@intel.com>
8 * Added RFC4106 AES-GCM support for 128-bit keys under the AEAD
9 * interface for 64-bit kernels.
10 * Authors: Adrian Hoban <adrian.hoban@intel.com>
11 * Gabriele Paoloni <gabriele.paoloni@intel.com>
12 * Tadeusz Struk (tadeusz.struk@intel.com)
13 * Aidan O'Mahony (aidan.o.mahony@intel.com)
14 * Copyright (c) 2010, Intel Corporation.
16 * This program is free software; you can redistribute it and/or modify
17 * it under the terms of the GNU General Public License as published by
18 * the Free Software Foundation; either version 2 of the License, or
19 * (at your option) any later version.
22 #include <linux/hardirq.h>
23 #include <linux/types.h>
24 #include <linux/module.h>
25 #include <linux/err.h>
26 #include <crypto/algapi.h>
27 #include <crypto/aes.h>
28 #include <crypto/cryptd.h>
29 #include <crypto/ctr.h>
30 #include <crypto/b128ops.h>
31 #include <crypto/gcm.h>
32 #include <crypto/xts.h>
33 #include <asm/cpu_device_id.h>
34 #include <asm/fpu/api.h>
35 #include <asm/crypto/aes.h>
36 #include <crypto/scatterwalk.h>
37 #include <crypto/internal/aead.h>
38 #include <crypto/internal/simd.h>
39 #include <crypto/internal/skcipher.h>
40 #include <linux/workqueue.h>
41 #include <linux/spinlock.h>
43 #include <asm/crypto/glue_helper.h>
47 #define AESNI_ALIGN 16
48 #define AESNI_ALIGN_ATTR __attribute__ ((__aligned__(AESNI_ALIGN)))
49 #define AES_BLOCK_MASK (~(AES_BLOCK_SIZE - 1))
50 #define RFC4106_HASH_SUBKEY_SIZE 16
51 #define AESNI_ALIGN_EXTRA ((AESNI_ALIGN - 1) & ~(CRYPTO_MINALIGN - 1))
52 #define CRYPTO_AES_CTX_SIZE (sizeof(struct crypto_aes_ctx) + AESNI_ALIGN_EXTRA)
53 #define XTS_AES_CTX_SIZE (sizeof(struct aesni_xts_ctx) + AESNI_ALIGN_EXTRA)
55 /* This data is stored at the end of the crypto_tfm struct.
56 * It's a type of per "session" data storage location.
57 * This needs to be 16 byte aligned.
59 struct aesni_rfc4106_gcm_ctx {
60 u8 hash_subkey[16] AESNI_ALIGN_ATTR;
61 struct crypto_aes_ctx aes_key_expanded AESNI_ALIGN_ATTR;
65 struct generic_gcmaes_ctx {
66 u8 hash_subkey[16] AESNI_ALIGN_ATTR;
67 struct crypto_aes_ctx aes_key_expanded AESNI_ALIGN_ATTR;
70 struct aesni_xts_ctx {
71 u8 raw_tweak_ctx[sizeof(struct crypto_aes_ctx)] AESNI_ALIGN_ATTR;
72 u8 raw_crypt_ctx[sizeof(struct crypto_aes_ctx)] AESNI_ALIGN_ATTR;
75 asmlinkage int aesni_set_key(struct crypto_aes_ctx *ctx, const u8 *in_key,
76 unsigned int key_len);
77 asmlinkage void aesni_enc(struct crypto_aes_ctx *ctx, u8 *out,
79 asmlinkage void aesni_dec(struct crypto_aes_ctx *ctx, u8 *out,
81 asmlinkage void aesni_ecb_enc(struct crypto_aes_ctx *ctx, u8 *out,
82 const u8 *in, unsigned int len);
83 asmlinkage void aesni_ecb_dec(struct crypto_aes_ctx *ctx, u8 *out,
84 const u8 *in, unsigned int len);
85 asmlinkage void aesni_cbc_enc(struct crypto_aes_ctx *ctx, u8 *out,
86 const u8 *in, unsigned int len, u8 *iv);
87 asmlinkage void aesni_cbc_dec(struct crypto_aes_ctx *ctx, u8 *out,
88 const u8 *in, unsigned int len, u8 *iv);
90 int crypto_fpu_init(void);
91 void crypto_fpu_exit(void);
93 #define AVX_GEN2_OPTSIZE 640
94 #define AVX_GEN4_OPTSIZE 4096
98 static void (*aesni_ctr_enc_tfm)(struct crypto_aes_ctx *ctx, u8 *out,
99 const u8 *in, unsigned int len, u8 *iv);
100 asmlinkage void aesni_ctr_enc(struct crypto_aes_ctx *ctx, u8 *out,
101 const u8 *in, unsigned int len, u8 *iv);
103 asmlinkage void aesni_xts_crypt8(struct crypto_aes_ctx *ctx, u8 *out,
104 const u8 *in, bool enc, u8 *iv);
106 /* asmlinkage void aesni_gcm_enc()
107 * void *ctx, AES Key schedule. Starts on a 16 byte boundary.
108 * u8 *out, Ciphertext output. Encrypt in-place is allowed.
109 * const u8 *in, Plaintext input
110 * unsigned long plaintext_len, Length of data in bytes for encryption.
111 * u8 *iv, Pre-counter block j0: 12 byte IV concatenated with 0x00000001.
112 * 16-byte aligned pointer.
113 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
114 * const u8 *aad, Additional Authentication Data (AAD)
115 * unsigned long aad_len, Length of AAD in bytes.
116 * u8 *auth_tag, Authenticated Tag output.
117 * unsigned long auth_tag_len), Authenticated Tag Length in bytes.
118 * Valid values are 16 (most likely), 12 or 8.
120 asmlinkage void aesni_gcm_enc(void *ctx, u8 *out,
121 const u8 *in, unsigned long plaintext_len, u8 *iv,
122 u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
123 u8 *auth_tag, unsigned long auth_tag_len);
125 /* asmlinkage void aesni_gcm_dec()
126 * void *ctx, AES Key schedule. Starts on a 16 byte boundary.
127 * u8 *out, Plaintext output. Decrypt in-place is allowed.
128 * const u8 *in, Ciphertext input
129 * unsigned long ciphertext_len, Length of data in bytes for decryption.
130 * u8 *iv, Pre-counter block j0: 12 byte IV concatenated with 0x00000001.
131 * 16-byte aligned pointer.
132 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
133 * const u8 *aad, Additional Authentication Data (AAD)
134 * unsigned long aad_len, Length of AAD in bytes. With RFC4106 this is going
135 * to be 8 or 12 bytes
136 * u8 *auth_tag, Authenticated Tag output.
137 * unsigned long auth_tag_len) Authenticated Tag Length in bytes.
138 * Valid values are 16 (most likely), 12 or 8.
140 asmlinkage void aesni_gcm_dec(void *ctx, u8 *out,
141 const u8 *in, unsigned long ciphertext_len, u8 *iv,
142 u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
143 u8 *auth_tag, unsigned long auth_tag_len);
147 asmlinkage void aes_ctr_enc_128_avx_by8(const u8 *in, u8 *iv,
148 void *keys, u8 *out, unsigned int num_bytes);
149 asmlinkage void aes_ctr_enc_192_avx_by8(const u8 *in, u8 *iv,
150 void *keys, u8 *out, unsigned int num_bytes);
151 asmlinkage void aes_ctr_enc_256_avx_by8(const u8 *in, u8 *iv,
152 void *keys, u8 *out, unsigned int num_bytes);
154 * asmlinkage void aesni_gcm_precomp_avx_gen2()
155 * gcm_data *my_ctx_data, context data
156 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
158 asmlinkage void aesni_gcm_precomp_avx_gen2(void *my_ctx_data, u8 *hash_subkey);
160 asmlinkage void aesni_gcm_enc_avx_gen2(void *ctx, u8 *out,
161 const u8 *in, unsigned long plaintext_len, u8 *iv,
162 const u8 *aad, unsigned long aad_len,
163 u8 *auth_tag, unsigned long auth_tag_len);
165 asmlinkage void aesni_gcm_dec_avx_gen2(void *ctx, u8 *out,
166 const u8 *in, unsigned long ciphertext_len, u8 *iv,
167 const u8 *aad, unsigned long aad_len,
168 u8 *auth_tag, unsigned long auth_tag_len);
170 static void aesni_gcm_enc_avx(void *ctx, u8 *out,
171 const u8 *in, unsigned long plaintext_len, u8 *iv,
172 u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
173 u8 *auth_tag, unsigned long auth_tag_len)
175 struct crypto_aes_ctx *aes_ctx = (struct crypto_aes_ctx*)ctx;
176 if ((plaintext_len < AVX_GEN2_OPTSIZE) || (aes_ctx-> key_length != AES_KEYSIZE_128)){
177 aesni_gcm_enc(ctx, out, in, plaintext_len, iv, hash_subkey, aad,
178 aad_len, auth_tag, auth_tag_len);
180 aesni_gcm_precomp_avx_gen2(ctx, hash_subkey);
181 aesni_gcm_enc_avx_gen2(ctx, out, in, plaintext_len, iv, aad,
182 aad_len, auth_tag, auth_tag_len);
186 static void aesni_gcm_dec_avx(void *ctx, u8 *out,
187 const u8 *in, unsigned long ciphertext_len, u8 *iv,
188 u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
189 u8 *auth_tag, unsigned long auth_tag_len)
191 struct crypto_aes_ctx *aes_ctx = (struct crypto_aes_ctx*)ctx;
192 if ((ciphertext_len < AVX_GEN2_OPTSIZE) || (aes_ctx-> key_length != AES_KEYSIZE_128)) {
193 aesni_gcm_dec(ctx, out, in, ciphertext_len, iv, hash_subkey, aad,
194 aad_len, auth_tag, auth_tag_len);
196 aesni_gcm_precomp_avx_gen2(ctx, hash_subkey);
197 aesni_gcm_dec_avx_gen2(ctx, out, in, ciphertext_len, iv, aad,
198 aad_len, auth_tag, auth_tag_len);
203 #ifdef CONFIG_AS_AVX2
205 * asmlinkage void aesni_gcm_precomp_avx_gen4()
206 * gcm_data *my_ctx_data, context data
207 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
209 asmlinkage void aesni_gcm_precomp_avx_gen4(void *my_ctx_data, u8 *hash_subkey);
211 asmlinkage void aesni_gcm_enc_avx_gen4(void *ctx, u8 *out,
212 const u8 *in, unsigned long plaintext_len, u8 *iv,
213 const u8 *aad, unsigned long aad_len,
214 u8 *auth_tag, unsigned long auth_tag_len);
216 asmlinkage void aesni_gcm_dec_avx_gen4(void *ctx, u8 *out,
217 const u8 *in, unsigned long ciphertext_len, u8 *iv,
218 const u8 *aad, unsigned long aad_len,
219 u8 *auth_tag, unsigned long auth_tag_len);
221 static void aesni_gcm_enc_avx2(void *ctx, u8 *out,
222 const u8 *in, unsigned long plaintext_len, u8 *iv,
223 u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
224 u8 *auth_tag, unsigned long auth_tag_len)
226 struct crypto_aes_ctx *aes_ctx = (struct crypto_aes_ctx*)ctx;
227 if ((plaintext_len < AVX_GEN2_OPTSIZE) || (aes_ctx-> key_length != AES_KEYSIZE_128)) {
228 aesni_gcm_enc(ctx, out, in, plaintext_len, iv, hash_subkey, aad,
229 aad_len, auth_tag, auth_tag_len);
230 } else if (plaintext_len < AVX_GEN4_OPTSIZE) {
231 aesni_gcm_precomp_avx_gen2(ctx, hash_subkey);
232 aesni_gcm_enc_avx_gen2(ctx, out, in, plaintext_len, iv, aad,
233 aad_len, auth_tag, auth_tag_len);
235 aesni_gcm_precomp_avx_gen4(ctx, hash_subkey);
236 aesni_gcm_enc_avx_gen4(ctx, out, in, plaintext_len, iv, aad,
237 aad_len, auth_tag, auth_tag_len);
241 static void aesni_gcm_dec_avx2(void *ctx, u8 *out,
242 const u8 *in, unsigned long ciphertext_len, u8 *iv,
243 u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
244 u8 *auth_tag, unsigned long auth_tag_len)
246 struct crypto_aes_ctx *aes_ctx = (struct crypto_aes_ctx*)ctx;
247 if ((ciphertext_len < AVX_GEN2_OPTSIZE) || (aes_ctx-> key_length != AES_KEYSIZE_128)) {
248 aesni_gcm_dec(ctx, out, in, ciphertext_len, iv, hash_subkey,
249 aad, aad_len, auth_tag, auth_tag_len);
250 } else if (ciphertext_len < AVX_GEN4_OPTSIZE) {
251 aesni_gcm_precomp_avx_gen2(ctx, hash_subkey);
252 aesni_gcm_dec_avx_gen2(ctx, out, in, ciphertext_len, iv, aad,
253 aad_len, auth_tag, auth_tag_len);
255 aesni_gcm_precomp_avx_gen4(ctx, hash_subkey);
256 aesni_gcm_dec_avx_gen4(ctx, out, in, ciphertext_len, iv, aad,
257 aad_len, auth_tag, auth_tag_len);
262 static void (*aesni_gcm_enc_tfm)(void *ctx, u8 *out,
263 const u8 *in, unsigned long plaintext_len, u8 *iv,
264 u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
265 u8 *auth_tag, unsigned long auth_tag_len);
267 static void (*aesni_gcm_dec_tfm)(void *ctx, u8 *out,
268 const u8 *in, unsigned long ciphertext_len, u8 *iv,
269 u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
270 u8 *auth_tag, unsigned long auth_tag_len);
273 aesni_rfc4106_gcm_ctx *aesni_rfc4106_gcm_ctx_get(struct crypto_aead *tfm)
275 unsigned long align = AESNI_ALIGN;
277 if (align <= crypto_tfm_ctx_alignment())
279 return PTR_ALIGN(crypto_aead_ctx(tfm), align);
283 generic_gcmaes_ctx *generic_gcmaes_ctx_get(struct crypto_aead *tfm)
285 unsigned long align = AESNI_ALIGN;
287 if (align <= crypto_tfm_ctx_alignment())
289 return PTR_ALIGN(crypto_aead_ctx(tfm), align);
293 static inline struct crypto_aes_ctx *aes_ctx(void *raw_ctx)
295 unsigned long addr = (unsigned long)raw_ctx;
296 unsigned long align = AESNI_ALIGN;
298 if (align <= crypto_tfm_ctx_alignment())
300 return (struct crypto_aes_ctx *)ALIGN(addr, align);
303 static int aes_set_key_common(struct crypto_tfm *tfm, void *raw_ctx,
304 const u8 *in_key, unsigned int key_len)
306 struct crypto_aes_ctx *ctx = aes_ctx(raw_ctx);
307 u32 *flags = &tfm->crt_flags;
310 if (key_len != AES_KEYSIZE_128 && key_len != AES_KEYSIZE_192 &&
311 key_len != AES_KEYSIZE_256) {
312 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
316 if (!irq_fpu_usable())
317 err = crypto_aes_expand_key(ctx, in_key, key_len);
320 err = aesni_set_key(ctx, in_key, key_len);
327 static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
328 unsigned int key_len)
330 return aes_set_key_common(tfm, crypto_tfm_ctx(tfm), in_key, key_len);
333 static void aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
335 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
337 if (!irq_fpu_usable())
338 crypto_aes_encrypt_x86(ctx, dst, src);
341 aesni_enc(ctx, dst, src);
346 static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
348 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
350 if (!irq_fpu_usable())
351 crypto_aes_decrypt_x86(ctx, dst, src);
354 aesni_dec(ctx, dst, src);
359 static void __aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
361 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
363 aesni_enc(ctx, dst, src);
366 static void __aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
368 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
370 aesni_dec(ctx, dst, src);
373 static int aesni_skcipher_setkey(struct crypto_skcipher *tfm, const u8 *key,
376 return aes_set_key_common(crypto_skcipher_tfm(tfm),
377 crypto_skcipher_ctx(tfm), key, len);
380 static int ecb_encrypt(struct skcipher_request *req)
382 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
383 struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
384 struct skcipher_walk walk;
388 err = skcipher_walk_virt(&walk, req, true);
391 while ((nbytes = walk.nbytes)) {
392 aesni_ecb_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
393 nbytes & AES_BLOCK_MASK);
394 nbytes &= AES_BLOCK_SIZE - 1;
395 err = skcipher_walk_done(&walk, nbytes);
402 static int ecb_decrypt(struct skcipher_request *req)
404 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
405 struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
406 struct skcipher_walk walk;
410 err = skcipher_walk_virt(&walk, req, true);
413 while ((nbytes = walk.nbytes)) {
414 aesni_ecb_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
415 nbytes & AES_BLOCK_MASK);
416 nbytes &= AES_BLOCK_SIZE - 1;
417 err = skcipher_walk_done(&walk, nbytes);
424 static int cbc_encrypt(struct skcipher_request *req)
426 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
427 struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
428 struct skcipher_walk walk;
432 err = skcipher_walk_virt(&walk, req, true);
435 while ((nbytes = walk.nbytes)) {
436 aesni_cbc_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
437 nbytes & AES_BLOCK_MASK, walk.iv);
438 nbytes &= AES_BLOCK_SIZE - 1;
439 err = skcipher_walk_done(&walk, nbytes);
446 static int cbc_decrypt(struct skcipher_request *req)
448 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
449 struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
450 struct skcipher_walk walk;
454 err = skcipher_walk_virt(&walk, req, true);
457 while ((nbytes = walk.nbytes)) {
458 aesni_cbc_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
459 nbytes & AES_BLOCK_MASK, walk.iv);
460 nbytes &= AES_BLOCK_SIZE - 1;
461 err = skcipher_walk_done(&walk, nbytes);
469 static void ctr_crypt_final(struct crypto_aes_ctx *ctx,
470 struct skcipher_walk *walk)
472 u8 *ctrblk = walk->iv;
473 u8 keystream[AES_BLOCK_SIZE];
474 u8 *src = walk->src.virt.addr;
475 u8 *dst = walk->dst.virt.addr;
476 unsigned int nbytes = walk->nbytes;
478 aesni_enc(ctx, keystream, ctrblk);
479 crypto_xor_cpy(dst, keystream, src, nbytes);
481 crypto_inc(ctrblk, AES_BLOCK_SIZE);
485 static void aesni_ctr_enc_avx_tfm(struct crypto_aes_ctx *ctx, u8 *out,
486 const u8 *in, unsigned int len, u8 *iv)
489 * based on key length, override with the by8 version
490 * of ctr mode encryption/decryption for improved performance
491 * aes_set_key_common() ensures that key length is one of
494 if (ctx->key_length == AES_KEYSIZE_128)
495 aes_ctr_enc_128_avx_by8(in, iv, (void *)ctx, out, len);
496 else if (ctx->key_length == AES_KEYSIZE_192)
497 aes_ctr_enc_192_avx_by8(in, iv, (void *)ctx, out, len);
499 aes_ctr_enc_256_avx_by8(in, iv, (void *)ctx, out, len);
503 static int ctr_crypt(struct skcipher_request *req)
505 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
506 struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
507 struct skcipher_walk walk;
511 err = skcipher_walk_virt(&walk, req, true);
514 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
515 aesni_ctr_enc_tfm(ctx, walk.dst.virt.addr, walk.src.virt.addr,
516 nbytes & AES_BLOCK_MASK, walk.iv);
517 nbytes &= AES_BLOCK_SIZE - 1;
518 err = skcipher_walk_done(&walk, nbytes);
521 ctr_crypt_final(ctx, &walk);
522 err = skcipher_walk_done(&walk, 0);
529 static int xts_aesni_setkey(struct crypto_skcipher *tfm, const u8 *key,
532 struct aesni_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
535 err = xts_verify_key(tfm, key, keylen);
541 /* first half of xts-key is for crypt */
542 err = aes_set_key_common(crypto_skcipher_tfm(tfm), ctx->raw_crypt_ctx,
547 /* second half of xts-key is for tweak */
548 return aes_set_key_common(crypto_skcipher_tfm(tfm), ctx->raw_tweak_ctx,
549 key + keylen, keylen);
553 static void aesni_xts_tweak(void *ctx, u8 *out, const u8 *in)
555 aesni_enc(ctx, out, in);
558 static void aesni_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv)
560 glue_xts_crypt_128bit_one(ctx, dst, src, iv, GLUE_FUNC_CAST(aesni_enc));
563 static void aesni_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv)
565 glue_xts_crypt_128bit_one(ctx, dst, src, iv, GLUE_FUNC_CAST(aesni_dec));
568 static void aesni_xts_enc8(void *ctx, u128 *dst, const u128 *src, le128 *iv)
570 aesni_xts_crypt8(ctx, (u8 *)dst, (const u8 *)src, true, (u8 *)iv);
573 static void aesni_xts_dec8(void *ctx, u128 *dst, const u128 *src, le128 *iv)
575 aesni_xts_crypt8(ctx, (u8 *)dst, (const u8 *)src, false, (u8 *)iv);
578 static const struct common_glue_ctx aesni_enc_xts = {
580 .fpu_blocks_limit = 1,
584 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_enc8) }
587 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_enc) }
591 static const struct common_glue_ctx aesni_dec_xts = {
593 .fpu_blocks_limit = 1,
597 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_dec8) }
600 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_dec) }
604 static int xts_encrypt(struct skcipher_request *req)
606 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
607 struct aesni_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
609 return glue_xts_req_128bit(&aesni_enc_xts, req,
610 XTS_TWEAK_CAST(aesni_xts_tweak),
611 aes_ctx(ctx->raw_tweak_ctx),
612 aes_ctx(ctx->raw_crypt_ctx));
615 static int xts_decrypt(struct skcipher_request *req)
617 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
618 struct aesni_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
620 return glue_xts_req_128bit(&aesni_dec_xts, req,
621 XTS_TWEAK_CAST(aesni_xts_tweak),
622 aes_ctx(ctx->raw_tweak_ctx),
623 aes_ctx(ctx->raw_crypt_ctx));
626 static int rfc4106_init(struct crypto_aead *aead)
628 struct cryptd_aead *cryptd_tfm;
629 struct cryptd_aead **ctx = crypto_aead_ctx(aead);
631 cryptd_tfm = cryptd_alloc_aead("__driver-gcm-aes-aesni",
633 CRYPTO_ALG_INTERNAL);
634 if (IS_ERR(cryptd_tfm))
635 return PTR_ERR(cryptd_tfm);
638 crypto_aead_set_reqsize(aead, crypto_aead_reqsize(&cryptd_tfm->base));
642 static void rfc4106_exit(struct crypto_aead *aead)
644 struct cryptd_aead **ctx = crypto_aead_ctx(aead);
646 cryptd_free_aead(*ctx);
650 rfc4106_set_hash_subkey(u8 *hash_subkey, const u8 *key, unsigned int key_len)
652 struct crypto_cipher *tfm;
655 tfm = crypto_alloc_cipher("aes", 0, 0);
659 ret = crypto_cipher_setkey(tfm, key, key_len);
661 goto out_free_cipher;
663 /* Clear the data in the hash sub key container to zero.*/
664 /* We want to cipher all zeros to create the hash sub key. */
665 memset(hash_subkey, 0, RFC4106_HASH_SUBKEY_SIZE);
667 crypto_cipher_encrypt_one(tfm, hash_subkey, hash_subkey);
670 crypto_free_cipher(tfm);
674 static int common_rfc4106_set_key(struct crypto_aead *aead, const u8 *key,
675 unsigned int key_len)
677 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(aead);
680 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
683 /*Account for 4 byte nonce at the end.*/
686 memcpy(ctx->nonce, key + key_len, sizeof(ctx->nonce));
688 return aes_set_key_common(crypto_aead_tfm(aead),
689 &ctx->aes_key_expanded, key, key_len) ?:
690 rfc4106_set_hash_subkey(ctx->hash_subkey, key, key_len);
693 static int gcmaes_wrapper_set_key(struct crypto_aead *parent, const u8 *key,
694 unsigned int key_len)
696 struct cryptd_aead **ctx = crypto_aead_ctx(parent);
697 struct cryptd_aead *cryptd_tfm = *ctx;
699 return crypto_aead_setkey(&cryptd_tfm->base, key, key_len);
702 static int common_rfc4106_set_authsize(struct crypto_aead *aead,
703 unsigned int authsize)
717 /* This is the Integrity Check Value (aka the authentication tag length and can
718 * be 8, 12 or 16 bytes long. */
719 static int gcmaes_wrapper_set_authsize(struct crypto_aead *parent,
720 unsigned int authsize)
722 struct cryptd_aead **ctx = crypto_aead_ctx(parent);
723 struct cryptd_aead *cryptd_tfm = *ctx;
725 return crypto_aead_setauthsize(&cryptd_tfm->base, authsize);
728 static int generic_gcmaes_set_authsize(struct crypto_aead *tfm,
729 unsigned int authsize)
747 static int gcmaes_encrypt(struct aead_request *req, unsigned int assoclen,
748 u8 *hash_subkey, u8 *iv, void *aes_ctx)
750 u8 one_entry_in_sg = 0;
751 u8 *src, *dst, *assoc;
752 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
753 unsigned long auth_tag_len = crypto_aead_authsize(tfm);
754 struct scatter_walk src_sg_walk;
755 struct scatter_walk dst_sg_walk = {};
757 if (sg_is_last(req->src) &&
758 (!PageHighMem(sg_page(req->src)) ||
759 req->src->offset + req->src->length <= PAGE_SIZE) &&
760 sg_is_last(req->dst) &&
761 (!PageHighMem(sg_page(req->dst)) ||
762 req->dst->offset + req->dst->length <= PAGE_SIZE)) {
764 scatterwalk_start(&src_sg_walk, req->src);
765 assoc = scatterwalk_map(&src_sg_walk);
766 src = assoc + req->assoclen;
768 if (unlikely(req->src != req->dst)) {
769 scatterwalk_start(&dst_sg_walk, req->dst);
770 dst = scatterwalk_map(&dst_sg_walk) + req->assoclen;
773 /* Allocate memory for src, dst, assoc */
774 assoc = kmalloc(req->cryptlen + auth_tag_len + req->assoclen,
776 if (unlikely(!assoc))
778 scatterwalk_map_and_copy(assoc, req->src, 0,
779 req->assoclen + req->cryptlen, 0);
780 src = assoc + req->assoclen;
785 aesni_gcm_enc_tfm(aes_ctx, dst, src, req->cryptlen, iv,
786 hash_subkey, assoc, assoclen,
787 dst + req->cryptlen, auth_tag_len);
790 /* The authTag (aka the Integrity Check Value) needs to be written
791 * back to the packet. */
792 if (one_entry_in_sg) {
793 if (unlikely(req->src != req->dst)) {
794 scatterwalk_unmap(dst - req->assoclen);
795 scatterwalk_advance(&dst_sg_walk, req->dst->length);
796 scatterwalk_done(&dst_sg_walk, 1, 0);
798 scatterwalk_unmap(assoc);
799 scatterwalk_advance(&src_sg_walk, req->src->length);
800 scatterwalk_done(&src_sg_walk, req->src == req->dst, 0);
802 scatterwalk_map_and_copy(dst, req->dst, req->assoclen,
803 req->cryptlen + auth_tag_len, 1);
809 static int gcmaes_decrypt(struct aead_request *req, unsigned int assoclen,
810 u8 *hash_subkey, u8 *iv, void *aes_ctx)
812 u8 one_entry_in_sg = 0;
813 u8 *src, *dst, *assoc;
814 unsigned long tempCipherLen = 0;
815 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
816 unsigned long auth_tag_len = crypto_aead_authsize(tfm);
818 struct scatter_walk src_sg_walk;
819 struct scatter_walk dst_sg_walk = {};
822 tempCipherLen = (unsigned long)(req->cryptlen - auth_tag_len);
824 if (sg_is_last(req->src) &&
825 (!PageHighMem(sg_page(req->src)) ||
826 req->src->offset + req->src->length <= PAGE_SIZE) &&
827 sg_is_last(req->dst) && req->dst->length &&
828 (!PageHighMem(sg_page(req->dst)) ||
829 req->dst->offset + req->dst->length <= PAGE_SIZE)) {
831 scatterwalk_start(&src_sg_walk, req->src);
832 assoc = scatterwalk_map(&src_sg_walk);
833 src = assoc + req->assoclen;
835 if (unlikely(req->src != req->dst)) {
836 scatterwalk_start(&dst_sg_walk, req->dst);
837 dst = scatterwalk_map(&dst_sg_walk) + req->assoclen;
840 /* Allocate memory for src, dst, assoc */
841 assoc = kmalloc(req->cryptlen + req->assoclen, GFP_ATOMIC);
844 scatterwalk_map_and_copy(assoc, req->src, 0,
845 req->assoclen + req->cryptlen, 0);
846 src = assoc + req->assoclen;
852 aesni_gcm_dec_tfm(aes_ctx, dst, src, tempCipherLen, iv,
853 hash_subkey, assoc, assoclen,
854 authTag, auth_tag_len);
857 /* Compare generated tag with passed in tag. */
858 retval = crypto_memneq(src + tempCipherLen, authTag, auth_tag_len) ?
861 if (one_entry_in_sg) {
862 if (unlikely(req->src != req->dst)) {
863 scatterwalk_unmap(dst - req->assoclen);
864 scatterwalk_advance(&dst_sg_walk, req->dst->length);
865 scatterwalk_done(&dst_sg_walk, 1, 0);
867 scatterwalk_unmap(assoc);
868 scatterwalk_advance(&src_sg_walk, req->src->length);
869 scatterwalk_done(&src_sg_walk, req->src == req->dst, 0);
871 scatterwalk_map_and_copy(dst, req->dst, req->assoclen,
879 static int helper_rfc4106_encrypt(struct aead_request *req)
881 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
882 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm);
883 void *aes_ctx = &(ctx->aes_key_expanded);
884 u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN)));
886 __be32 counter = cpu_to_be32(1);
888 /* Assuming we are supporting rfc4106 64-bit extended */
889 /* sequence numbers We need to have the AAD length equal */
890 /* to 16 or 20 bytes */
891 if (unlikely(req->assoclen != 16 && req->assoclen != 20))
895 for (i = 0; i < 4; i++)
896 *(iv+i) = ctx->nonce[i];
897 for (i = 0; i < 8; i++)
898 *(iv+4+i) = req->iv[i];
899 *((__be32 *)(iv+12)) = counter;
901 return gcmaes_encrypt(req, req->assoclen - 8, ctx->hash_subkey, iv,
905 static int helper_rfc4106_decrypt(struct aead_request *req)
907 __be32 counter = cpu_to_be32(1);
908 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
909 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm);
910 void *aes_ctx = &(ctx->aes_key_expanded);
911 u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN)));
914 if (unlikely(req->assoclen != 16 && req->assoclen != 20))
917 /* Assuming we are supporting rfc4106 64-bit extended */
918 /* sequence numbers We need to have the AAD length */
919 /* equal to 16 or 20 bytes */
922 for (i = 0; i < 4; i++)
923 *(iv+i) = ctx->nonce[i];
924 for (i = 0; i < 8; i++)
925 *(iv+4+i) = req->iv[i];
926 *((__be32 *)(iv+12)) = counter;
928 return gcmaes_decrypt(req, req->assoclen - 8, ctx->hash_subkey, iv,
932 static int gcmaes_wrapper_encrypt(struct aead_request *req)
934 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
935 struct cryptd_aead **ctx = crypto_aead_ctx(tfm);
936 struct cryptd_aead *cryptd_tfm = *ctx;
938 tfm = &cryptd_tfm->base;
939 if (irq_fpu_usable() && (!in_atomic() ||
940 !cryptd_aead_queued(cryptd_tfm)))
941 tfm = cryptd_aead_child(cryptd_tfm);
943 aead_request_set_tfm(req, tfm);
945 return crypto_aead_encrypt(req);
948 static int gcmaes_wrapper_decrypt(struct aead_request *req)
950 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
951 struct cryptd_aead **ctx = crypto_aead_ctx(tfm);
952 struct cryptd_aead *cryptd_tfm = *ctx;
954 tfm = &cryptd_tfm->base;
955 if (irq_fpu_usable() && (!in_atomic() ||
956 !cryptd_aead_queued(cryptd_tfm)))
957 tfm = cryptd_aead_child(cryptd_tfm);
959 aead_request_set_tfm(req, tfm);
961 return crypto_aead_decrypt(req);
965 static struct crypto_alg aesni_algs[] = { {
967 .cra_driver_name = "aes-aesni",
969 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
970 .cra_blocksize = AES_BLOCK_SIZE,
971 .cra_ctxsize = CRYPTO_AES_CTX_SIZE,
972 .cra_module = THIS_MODULE,
975 .cia_min_keysize = AES_MIN_KEY_SIZE,
976 .cia_max_keysize = AES_MAX_KEY_SIZE,
977 .cia_setkey = aes_set_key,
978 .cia_encrypt = aes_encrypt,
979 .cia_decrypt = aes_decrypt
984 .cra_driver_name = "__aes-aesni",
986 .cra_flags = CRYPTO_ALG_TYPE_CIPHER | CRYPTO_ALG_INTERNAL,
987 .cra_blocksize = AES_BLOCK_SIZE,
988 .cra_ctxsize = CRYPTO_AES_CTX_SIZE,
989 .cra_module = THIS_MODULE,
992 .cia_min_keysize = AES_MIN_KEY_SIZE,
993 .cia_max_keysize = AES_MAX_KEY_SIZE,
994 .cia_setkey = aes_set_key,
995 .cia_encrypt = __aes_encrypt,
996 .cia_decrypt = __aes_decrypt
1001 static struct skcipher_alg aesni_skciphers[] = {
1004 .cra_name = "__ecb(aes)",
1005 .cra_driver_name = "__ecb-aes-aesni",
1006 .cra_priority = 400,
1007 .cra_flags = CRYPTO_ALG_INTERNAL,
1008 .cra_blocksize = AES_BLOCK_SIZE,
1009 .cra_ctxsize = CRYPTO_AES_CTX_SIZE,
1010 .cra_module = THIS_MODULE,
1012 .min_keysize = AES_MIN_KEY_SIZE,
1013 .max_keysize = AES_MAX_KEY_SIZE,
1014 .setkey = aesni_skcipher_setkey,
1015 .encrypt = ecb_encrypt,
1016 .decrypt = ecb_decrypt,
1019 .cra_name = "__cbc(aes)",
1020 .cra_driver_name = "__cbc-aes-aesni",
1021 .cra_priority = 400,
1022 .cra_flags = CRYPTO_ALG_INTERNAL,
1023 .cra_blocksize = AES_BLOCK_SIZE,
1024 .cra_ctxsize = CRYPTO_AES_CTX_SIZE,
1025 .cra_module = THIS_MODULE,
1027 .min_keysize = AES_MIN_KEY_SIZE,
1028 .max_keysize = AES_MAX_KEY_SIZE,
1029 .ivsize = AES_BLOCK_SIZE,
1030 .setkey = aesni_skcipher_setkey,
1031 .encrypt = cbc_encrypt,
1032 .decrypt = cbc_decrypt,
1033 #ifdef CONFIG_X86_64
1036 .cra_name = "__ctr(aes)",
1037 .cra_driver_name = "__ctr-aes-aesni",
1038 .cra_priority = 400,
1039 .cra_flags = CRYPTO_ALG_INTERNAL,
1041 .cra_ctxsize = CRYPTO_AES_CTX_SIZE,
1042 .cra_module = THIS_MODULE,
1044 .min_keysize = AES_MIN_KEY_SIZE,
1045 .max_keysize = AES_MAX_KEY_SIZE,
1046 .ivsize = AES_BLOCK_SIZE,
1047 .chunksize = AES_BLOCK_SIZE,
1048 .setkey = aesni_skcipher_setkey,
1049 .encrypt = ctr_crypt,
1050 .decrypt = ctr_crypt,
1053 .cra_name = "__xts(aes)",
1054 .cra_driver_name = "__xts-aes-aesni",
1055 .cra_priority = 401,
1056 .cra_flags = CRYPTO_ALG_INTERNAL,
1057 .cra_blocksize = AES_BLOCK_SIZE,
1058 .cra_ctxsize = XTS_AES_CTX_SIZE,
1059 .cra_module = THIS_MODULE,
1061 .min_keysize = 2 * AES_MIN_KEY_SIZE,
1062 .max_keysize = 2 * AES_MAX_KEY_SIZE,
1063 .ivsize = AES_BLOCK_SIZE,
1064 .setkey = xts_aesni_setkey,
1065 .encrypt = xts_encrypt,
1066 .decrypt = xts_decrypt,
1071 struct simd_skcipher_alg *aesni_simd_skciphers[ARRAY_SIZE(aesni_skciphers)];
1074 const char *algname;
1075 const char *drvname;
1076 const char *basename;
1077 struct simd_skcipher_alg *simd;
1078 } aesni_simd_skciphers2[] = {
1079 #if (defined(MODULE) && IS_ENABLED(CONFIG_CRYPTO_PCBC)) || \
1080 IS_BUILTIN(CONFIG_CRYPTO_PCBC)
1082 .algname = "pcbc(aes)",
1083 .drvname = "pcbc-aes-aesni",
1084 .basename = "fpu(pcbc(__aes-aesni))",
1089 #ifdef CONFIG_X86_64
1090 static int generic_gcmaes_set_key(struct crypto_aead *aead, const u8 *key,
1091 unsigned int key_len)
1093 struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(aead);
1095 return aes_set_key_common(crypto_aead_tfm(aead),
1096 &ctx->aes_key_expanded, key, key_len) ?:
1097 rfc4106_set_hash_subkey(ctx->hash_subkey, key, key_len);
1100 static int generic_gcmaes_encrypt(struct aead_request *req)
1102 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1103 struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(tfm);
1104 void *aes_ctx = &(ctx->aes_key_expanded);
1105 u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN)));
1106 __be32 counter = cpu_to_be32(1);
1108 memcpy(iv, req->iv, 12);
1109 *((__be32 *)(iv+12)) = counter;
1111 return gcmaes_encrypt(req, req->assoclen, ctx->hash_subkey, iv,
1115 static int generic_gcmaes_decrypt(struct aead_request *req)
1117 __be32 counter = cpu_to_be32(1);
1118 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1119 struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(tfm);
1120 void *aes_ctx = &(ctx->aes_key_expanded);
1121 u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN)));
1123 memcpy(iv, req->iv, 12);
1124 *((__be32 *)(iv+12)) = counter;
1126 return gcmaes_decrypt(req, req->assoclen, ctx->hash_subkey, iv,
1130 static int generic_gcmaes_init(struct crypto_aead *aead)
1132 struct cryptd_aead *cryptd_tfm;
1133 struct cryptd_aead **ctx = crypto_aead_ctx(aead);
1135 cryptd_tfm = cryptd_alloc_aead("__driver-generic-gcm-aes-aesni",
1136 CRYPTO_ALG_INTERNAL,
1137 CRYPTO_ALG_INTERNAL);
1138 if (IS_ERR(cryptd_tfm))
1139 return PTR_ERR(cryptd_tfm);
1142 crypto_aead_set_reqsize(aead, crypto_aead_reqsize(&cryptd_tfm->base));
1147 static void generic_gcmaes_exit(struct crypto_aead *aead)
1149 struct cryptd_aead **ctx = crypto_aead_ctx(aead);
1151 cryptd_free_aead(*ctx);
1154 static struct aead_alg aesni_aead_algs[] = { {
1155 .setkey = common_rfc4106_set_key,
1156 .setauthsize = common_rfc4106_set_authsize,
1157 .encrypt = helper_rfc4106_encrypt,
1158 .decrypt = helper_rfc4106_decrypt,
1159 .ivsize = GCM_RFC4106_IV_SIZE,
1162 .cra_name = "__gcm-aes-aesni",
1163 .cra_driver_name = "__driver-gcm-aes-aesni",
1164 .cra_flags = CRYPTO_ALG_INTERNAL,
1166 .cra_ctxsize = sizeof(struct aesni_rfc4106_gcm_ctx),
1167 .cra_alignmask = AESNI_ALIGN - 1,
1168 .cra_module = THIS_MODULE,
1171 .init = rfc4106_init,
1172 .exit = rfc4106_exit,
1173 .setkey = gcmaes_wrapper_set_key,
1174 .setauthsize = gcmaes_wrapper_set_authsize,
1175 .encrypt = gcmaes_wrapper_encrypt,
1176 .decrypt = gcmaes_wrapper_decrypt,
1177 .ivsize = GCM_RFC4106_IV_SIZE,
1180 .cra_name = "rfc4106(gcm(aes))",
1181 .cra_driver_name = "rfc4106-gcm-aesni",
1182 .cra_priority = 400,
1183 .cra_flags = CRYPTO_ALG_ASYNC,
1185 .cra_ctxsize = sizeof(struct cryptd_aead *),
1186 .cra_module = THIS_MODULE,
1189 .setkey = generic_gcmaes_set_key,
1190 .setauthsize = generic_gcmaes_set_authsize,
1191 .encrypt = generic_gcmaes_encrypt,
1192 .decrypt = generic_gcmaes_decrypt,
1193 .ivsize = GCM_AES_IV_SIZE,
1196 .cra_name = "__generic-gcm-aes-aesni",
1197 .cra_driver_name = "__driver-generic-gcm-aes-aesni",
1199 .cra_flags = CRYPTO_ALG_INTERNAL,
1201 .cra_ctxsize = sizeof(struct generic_gcmaes_ctx),
1202 .cra_alignmask = AESNI_ALIGN - 1,
1203 .cra_module = THIS_MODULE,
1206 .init = generic_gcmaes_init,
1207 .exit = generic_gcmaes_exit,
1208 .setkey = gcmaes_wrapper_set_key,
1209 .setauthsize = gcmaes_wrapper_set_authsize,
1210 .encrypt = gcmaes_wrapper_encrypt,
1211 .decrypt = gcmaes_wrapper_decrypt,
1212 .ivsize = GCM_AES_IV_SIZE,
1215 .cra_name = "gcm(aes)",
1216 .cra_driver_name = "generic-gcm-aesni",
1217 .cra_priority = 400,
1218 .cra_flags = CRYPTO_ALG_ASYNC,
1220 .cra_ctxsize = sizeof(struct cryptd_aead *),
1221 .cra_module = THIS_MODULE,
1225 static struct aead_alg aesni_aead_algs[0];
1229 static const struct x86_cpu_id aesni_cpu_id[] = {
1230 X86_FEATURE_MATCH(X86_FEATURE_AES),
1233 MODULE_DEVICE_TABLE(x86cpu, aesni_cpu_id);
1235 static void aesni_free_simds(void)
1239 for (i = 0; i < ARRAY_SIZE(aesni_simd_skciphers) &&
1240 aesni_simd_skciphers[i]; i++)
1241 simd_skcipher_free(aesni_simd_skciphers[i]);
1243 for (i = 0; i < ARRAY_SIZE(aesni_simd_skciphers2); i++)
1244 if (aesni_simd_skciphers2[i].simd)
1245 simd_skcipher_free(aesni_simd_skciphers2[i].simd);
1248 static int __init aesni_init(void)
1250 struct simd_skcipher_alg *simd;
1251 const char *basename;
1252 const char *algname;
1253 const char *drvname;
1257 if (!x86_match_cpu(aesni_cpu_id))
1259 #ifdef CONFIG_X86_64
1260 #ifdef CONFIG_AS_AVX2
1261 if (boot_cpu_has(X86_FEATURE_AVX2)) {
1262 pr_info("AVX2 version of gcm_enc/dec engaged.\n");
1263 aesni_gcm_enc_tfm = aesni_gcm_enc_avx2;
1264 aesni_gcm_dec_tfm = aesni_gcm_dec_avx2;
1267 #ifdef CONFIG_AS_AVX
1268 if (boot_cpu_has(X86_FEATURE_AVX)) {
1269 pr_info("AVX version of gcm_enc/dec engaged.\n");
1270 aesni_gcm_enc_tfm = aesni_gcm_enc_avx;
1271 aesni_gcm_dec_tfm = aesni_gcm_dec_avx;
1275 pr_info("SSE version of gcm_enc/dec engaged.\n");
1276 aesni_gcm_enc_tfm = aesni_gcm_enc;
1277 aesni_gcm_dec_tfm = aesni_gcm_dec;
1279 aesni_ctr_enc_tfm = aesni_ctr_enc;
1280 #ifdef CONFIG_AS_AVX
1281 if (boot_cpu_has(X86_FEATURE_AVX)) {
1282 /* optimize performance of ctr mode encryption transform */
1283 aesni_ctr_enc_tfm = aesni_ctr_enc_avx_tfm;
1284 pr_info("AES CTR mode by8 optimization enabled\n");
1289 err = crypto_fpu_init();
1293 err = crypto_register_algs(aesni_algs, ARRAY_SIZE(aesni_algs));
1297 err = crypto_register_skciphers(aesni_skciphers,
1298 ARRAY_SIZE(aesni_skciphers));
1300 goto unregister_algs;
1302 err = crypto_register_aeads(aesni_aead_algs,
1303 ARRAY_SIZE(aesni_aead_algs));
1305 goto unregister_skciphers;
1307 for (i = 0; i < ARRAY_SIZE(aesni_skciphers); i++) {
1308 algname = aesni_skciphers[i].base.cra_name + 2;
1309 drvname = aesni_skciphers[i].base.cra_driver_name + 2;
1310 basename = aesni_skciphers[i].base.cra_driver_name;
1311 simd = simd_skcipher_create_compat(algname, drvname, basename);
1312 err = PTR_ERR(simd);
1314 goto unregister_simds;
1316 aesni_simd_skciphers[i] = simd;
1319 for (i = 0; i < ARRAY_SIZE(aesni_simd_skciphers2); i++) {
1320 algname = aesni_simd_skciphers2[i].algname;
1321 drvname = aesni_simd_skciphers2[i].drvname;
1322 basename = aesni_simd_skciphers2[i].basename;
1323 simd = simd_skcipher_create_compat(algname, drvname, basename);
1324 err = PTR_ERR(simd);
1328 aesni_simd_skciphers2[i].simd = simd;
1335 crypto_unregister_aeads(aesni_aead_algs, ARRAY_SIZE(aesni_aead_algs));
1336 unregister_skciphers:
1337 crypto_unregister_skciphers(aesni_skciphers,
1338 ARRAY_SIZE(aesni_skciphers));
1340 crypto_unregister_algs(aesni_algs, ARRAY_SIZE(aesni_algs));
1346 static void __exit aesni_exit(void)
1349 crypto_unregister_aeads(aesni_aead_algs, ARRAY_SIZE(aesni_aead_algs));
1350 crypto_unregister_skciphers(aesni_skciphers,
1351 ARRAY_SIZE(aesni_skciphers));
1352 crypto_unregister_algs(aesni_algs, ARRAY_SIZE(aesni_algs));
1357 late_initcall(aesni_init);
1358 module_exit(aesni_exit);
1360 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, Intel AES-NI instructions optimized");
1361 MODULE_LICENSE("GPL");
1362 MODULE_ALIAS_CRYPTO("aes");