1 /* Glue code for AES encryption optimized for sparc64 crypto opcodes.
3 * This is based largely upon arch/x86/crypto/aesni-intel_glue.c
5 * Copyright (C) 2008, Intel Corp.
6 * Author: Huang Ying <ying.huang@intel.com>
8 * Added RFC4106 AES-GCM support for 128-bit keys under the AEAD
9 * interface for 64-bit kernels.
10 * Authors: Adrian Hoban <adrian.hoban@intel.com>
11 * Gabriele Paoloni <gabriele.paoloni@intel.com>
12 * Tadeusz Struk (tadeusz.struk@intel.com)
13 * Aidan O'Mahony (aidan.o.mahony@intel.com)
14 * Copyright (c) 2010, Intel Corporation.
17 #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
19 #include <linux/crypto.h>
20 #include <linux/init.h>
21 #include <linux/module.h>
23 #include <linux/types.h>
24 #include <crypto/algapi.h>
25 #include <crypto/aes.h>
27 #include <asm/fpumacro.h>
28 #include <asm/pstate.h>
34 void (*encrypt)(const u64 *key, const u32 *input, u32 *output);
35 void (*decrypt)(const u64 *key, const u32 *input, u32 *output);
36 void (*load_encrypt_keys)(const u64 *key);
37 void (*load_decrypt_keys)(const u64 *key);
38 void (*ecb_encrypt)(const u64 *key, const u64 *input, u64 *output,
40 void (*ecb_decrypt)(const u64 *key, const u64 *input, u64 *output,
42 void (*cbc_encrypt)(const u64 *key, const u64 *input, u64 *output,
43 unsigned int len, u64 *iv);
44 void (*cbc_decrypt)(const u64 *key, const u64 *input, u64 *output,
45 unsigned int len, u64 *iv);
46 void (*ctr_crypt)(const u64 *key, const u64 *input, u64 *output,
47 unsigned int len, u64 *iv);
50 struct crypto_sparc64_aes_ctx {
52 u64 key[AES_MAX_KEYLENGTH / sizeof(u64)];
54 u32 expanded_key_length;
57 extern void aes_sparc64_encrypt_128(const u64 *key, const u32 *input,
59 extern void aes_sparc64_encrypt_192(const u64 *key, const u32 *input,
61 extern void aes_sparc64_encrypt_256(const u64 *key, const u32 *input,
64 extern void aes_sparc64_decrypt_128(const u64 *key, const u32 *input,
66 extern void aes_sparc64_decrypt_192(const u64 *key, const u32 *input,
68 extern void aes_sparc64_decrypt_256(const u64 *key, const u32 *input,
71 extern void aes_sparc64_load_encrypt_keys_128(const u64 *key);
72 extern void aes_sparc64_load_encrypt_keys_192(const u64 *key);
73 extern void aes_sparc64_load_encrypt_keys_256(const u64 *key);
75 extern void aes_sparc64_load_decrypt_keys_128(const u64 *key);
76 extern void aes_sparc64_load_decrypt_keys_192(const u64 *key);
77 extern void aes_sparc64_load_decrypt_keys_256(const u64 *key);
79 extern void aes_sparc64_ecb_encrypt_128(const u64 *key, const u64 *input,
80 u64 *output, unsigned int len);
81 extern void aes_sparc64_ecb_encrypt_192(const u64 *key, const u64 *input,
82 u64 *output, unsigned int len);
83 extern void aes_sparc64_ecb_encrypt_256(const u64 *key, const u64 *input,
84 u64 *output, unsigned int len);
86 extern void aes_sparc64_ecb_decrypt_128(const u64 *key, const u64 *input,
87 u64 *output, unsigned int len);
88 extern void aes_sparc64_ecb_decrypt_192(const u64 *key, const u64 *input,
89 u64 *output, unsigned int len);
90 extern void aes_sparc64_ecb_decrypt_256(const u64 *key, const u64 *input,
91 u64 *output, unsigned int len);
93 extern void aes_sparc64_cbc_encrypt_128(const u64 *key, const u64 *input,
94 u64 *output, unsigned int len,
97 extern void aes_sparc64_cbc_encrypt_192(const u64 *key, const u64 *input,
98 u64 *output, unsigned int len,
101 extern void aes_sparc64_cbc_encrypt_256(const u64 *key, const u64 *input,
102 u64 *output, unsigned int len,
105 extern void aes_sparc64_cbc_decrypt_128(const u64 *key, const u64 *input,
106 u64 *output, unsigned int len,
109 extern void aes_sparc64_cbc_decrypt_192(const u64 *key, const u64 *input,
110 u64 *output, unsigned int len,
113 extern void aes_sparc64_cbc_decrypt_256(const u64 *key, const u64 *input,
114 u64 *output, unsigned int len,
117 extern void aes_sparc64_ctr_crypt_128(const u64 *key, const u64 *input,
118 u64 *output, unsigned int len,
120 extern void aes_sparc64_ctr_crypt_192(const u64 *key, const u64 *input,
121 u64 *output, unsigned int len,
123 extern void aes_sparc64_ctr_crypt_256(const u64 *key, const u64 *input,
124 u64 *output, unsigned int len,
127 static struct aes_ops aes128_ops = {
128 .encrypt = aes_sparc64_encrypt_128,
129 .decrypt = aes_sparc64_decrypt_128,
130 .load_encrypt_keys = aes_sparc64_load_encrypt_keys_128,
131 .load_decrypt_keys = aes_sparc64_load_decrypt_keys_128,
132 .ecb_encrypt = aes_sparc64_ecb_encrypt_128,
133 .ecb_decrypt = aes_sparc64_ecb_decrypt_128,
134 .cbc_encrypt = aes_sparc64_cbc_encrypt_128,
135 .cbc_decrypt = aes_sparc64_cbc_decrypt_128,
136 .ctr_crypt = aes_sparc64_ctr_crypt_128,
139 static struct aes_ops aes192_ops = {
140 .encrypt = aes_sparc64_encrypt_192,
141 .decrypt = aes_sparc64_decrypt_192,
142 .load_encrypt_keys = aes_sparc64_load_encrypt_keys_192,
143 .load_decrypt_keys = aes_sparc64_load_decrypt_keys_192,
144 .ecb_encrypt = aes_sparc64_ecb_encrypt_192,
145 .ecb_decrypt = aes_sparc64_ecb_decrypt_192,
146 .cbc_encrypt = aes_sparc64_cbc_encrypt_192,
147 .cbc_decrypt = aes_sparc64_cbc_decrypt_192,
148 .ctr_crypt = aes_sparc64_ctr_crypt_192,
151 static struct aes_ops aes256_ops = {
152 .encrypt = aes_sparc64_encrypt_256,
153 .decrypt = aes_sparc64_decrypt_256,
154 .load_encrypt_keys = aes_sparc64_load_encrypt_keys_256,
155 .load_decrypt_keys = aes_sparc64_load_decrypt_keys_256,
156 .ecb_encrypt = aes_sparc64_ecb_encrypt_256,
157 .ecb_decrypt = aes_sparc64_ecb_decrypt_256,
158 .cbc_encrypt = aes_sparc64_cbc_encrypt_256,
159 .cbc_decrypt = aes_sparc64_cbc_decrypt_256,
160 .ctr_crypt = aes_sparc64_ctr_crypt_256,
163 extern void aes_sparc64_key_expand(const u32 *in_key, u64 *output_key,
164 unsigned int key_len);
166 static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
167 unsigned int key_len)
169 struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
170 u32 *flags = &tfm->crt_flags;
173 case AES_KEYSIZE_128:
174 ctx->expanded_key_length = 0xb0;
175 ctx->ops = &aes128_ops;
178 case AES_KEYSIZE_192:
179 ctx->expanded_key_length = 0xd0;
180 ctx->ops = &aes192_ops;
183 case AES_KEYSIZE_256:
184 ctx->expanded_key_length = 0xf0;
185 ctx->ops = &aes256_ops;
189 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
193 aes_sparc64_key_expand((const u32 *)in_key, &ctx->key[0], key_len);
194 ctx->key_length = key_len;
199 static void aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
201 struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
203 ctx->ops->encrypt(&ctx->key[0], (const u32 *) src, (u32 *) dst);
206 static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
208 struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
210 ctx->ops->decrypt(&ctx->key[0], (const u32 *) src, (u32 *) dst);
213 #define AES_BLOCK_MASK (~(AES_BLOCK_SIZE-1))
215 static int ecb_encrypt(struct blkcipher_desc *desc,
216 struct scatterlist *dst, struct scatterlist *src,
219 struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
220 struct blkcipher_walk walk;
223 blkcipher_walk_init(&walk, dst, src, nbytes);
224 err = blkcipher_walk_virt(desc, &walk);
225 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
227 ctx->ops->load_encrypt_keys(&ctx->key[0]);
228 while ((nbytes = walk.nbytes)) {
229 unsigned int block_len = nbytes & AES_BLOCK_MASK;
231 if (likely(block_len)) {
232 ctx->ops->ecb_encrypt(&ctx->key[0],
233 (const u64 *)walk.src.virt.addr,
234 (u64 *) walk.dst.virt.addr,
237 nbytes &= AES_BLOCK_SIZE - 1;
238 err = blkcipher_walk_done(desc, &walk, nbytes);
244 static int ecb_decrypt(struct blkcipher_desc *desc,
245 struct scatterlist *dst, struct scatterlist *src,
248 struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
249 struct blkcipher_walk walk;
253 blkcipher_walk_init(&walk, dst, src, nbytes);
254 err = blkcipher_walk_virt(desc, &walk);
255 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
257 ctx->ops->load_decrypt_keys(&ctx->key[0]);
258 key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)];
259 while ((nbytes = walk.nbytes)) {
260 unsigned int block_len = nbytes & AES_BLOCK_MASK;
262 if (likely(block_len)) {
263 ctx->ops->ecb_decrypt(key_end,
264 (const u64 *) walk.src.virt.addr,
265 (u64 *) walk.dst.virt.addr, block_len);
267 nbytes &= AES_BLOCK_SIZE - 1;
268 err = blkcipher_walk_done(desc, &walk, nbytes);
275 static int cbc_encrypt(struct blkcipher_desc *desc,
276 struct scatterlist *dst, struct scatterlist *src,
279 struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
280 struct blkcipher_walk walk;
283 blkcipher_walk_init(&walk, dst, src, nbytes);
284 err = blkcipher_walk_virt(desc, &walk);
285 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
287 ctx->ops->load_encrypt_keys(&ctx->key[0]);
288 while ((nbytes = walk.nbytes)) {
289 unsigned int block_len = nbytes & AES_BLOCK_MASK;
291 if (likely(block_len)) {
292 ctx->ops->cbc_encrypt(&ctx->key[0],
293 (const u64 *)walk.src.virt.addr,
294 (u64 *) walk.dst.virt.addr,
295 block_len, (u64 *) walk.iv);
297 nbytes &= AES_BLOCK_SIZE - 1;
298 err = blkcipher_walk_done(desc, &walk, nbytes);
304 static int cbc_decrypt(struct blkcipher_desc *desc,
305 struct scatterlist *dst, struct scatterlist *src,
308 struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
309 struct blkcipher_walk walk;
313 blkcipher_walk_init(&walk, dst, src, nbytes);
314 err = blkcipher_walk_virt(desc, &walk);
315 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
317 ctx->ops->load_decrypt_keys(&ctx->key[0]);
318 key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)];
319 while ((nbytes = walk.nbytes)) {
320 unsigned int block_len = nbytes & AES_BLOCK_MASK;
322 if (likely(block_len)) {
323 ctx->ops->cbc_decrypt(key_end,
324 (const u64 *) walk.src.virt.addr,
325 (u64 *) walk.dst.virt.addr,
326 block_len, (u64 *) walk.iv);
328 nbytes &= AES_BLOCK_SIZE - 1;
329 err = blkcipher_walk_done(desc, &walk, nbytes);
336 static void ctr_crypt_final(struct crypto_sparc64_aes_ctx *ctx,
337 struct blkcipher_walk *walk)
339 u8 *ctrblk = walk->iv;
340 u64 keystream[AES_BLOCK_SIZE / sizeof(u64)];
341 u8 *src = walk->src.virt.addr;
342 u8 *dst = walk->dst.virt.addr;
343 unsigned int nbytes = walk->nbytes;
345 ctx->ops->ecb_encrypt(&ctx->key[0], (const u64 *)ctrblk,
346 keystream, AES_BLOCK_SIZE);
347 crypto_xor((u8 *) keystream, src, nbytes);
348 memcpy(dst, keystream, nbytes);
349 crypto_inc(ctrblk, AES_BLOCK_SIZE);
352 static int ctr_crypt(struct blkcipher_desc *desc,
353 struct scatterlist *dst, struct scatterlist *src,
356 struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
357 struct blkcipher_walk walk;
360 blkcipher_walk_init(&walk, dst, src, nbytes);
361 err = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE);
362 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
364 ctx->ops->load_encrypt_keys(&ctx->key[0]);
365 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
366 unsigned int block_len = nbytes & AES_BLOCK_MASK;
368 if (likely(block_len)) {
369 ctx->ops->ctr_crypt(&ctx->key[0],
370 (const u64 *)walk.src.virt.addr,
371 (u64 *) walk.dst.virt.addr,
372 block_len, (u64 *) walk.iv);
374 nbytes &= AES_BLOCK_SIZE - 1;
375 err = blkcipher_walk_done(desc, &walk, nbytes);
378 ctr_crypt_final(ctx, &walk);
379 err = blkcipher_walk_done(desc, &walk, 0);
385 static struct crypto_alg algs[] = { {
387 .cra_driver_name = "aes-sparc64",
388 .cra_priority = SPARC_CR_OPCODE_PRIORITY,
389 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
390 .cra_blocksize = AES_BLOCK_SIZE,
391 .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
393 .cra_module = THIS_MODULE,
396 .cia_min_keysize = AES_MIN_KEY_SIZE,
397 .cia_max_keysize = AES_MAX_KEY_SIZE,
398 .cia_setkey = aes_set_key,
399 .cia_encrypt = aes_encrypt,
400 .cia_decrypt = aes_decrypt
404 .cra_name = "ecb(aes)",
405 .cra_driver_name = "ecb-aes-sparc64",
406 .cra_priority = SPARC_CR_OPCODE_PRIORITY,
407 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
408 .cra_blocksize = AES_BLOCK_SIZE,
409 .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
411 .cra_type = &crypto_blkcipher_type,
412 .cra_module = THIS_MODULE,
415 .min_keysize = AES_MIN_KEY_SIZE,
416 .max_keysize = AES_MAX_KEY_SIZE,
417 .setkey = aes_set_key,
418 .encrypt = ecb_encrypt,
419 .decrypt = ecb_decrypt,
423 .cra_name = "cbc(aes)",
424 .cra_driver_name = "cbc-aes-sparc64",
425 .cra_priority = SPARC_CR_OPCODE_PRIORITY,
426 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
427 .cra_blocksize = AES_BLOCK_SIZE,
428 .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
430 .cra_type = &crypto_blkcipher_type,
431 .cra_module = THIS_MODULE,
434 .min_keysize = AES_MIN_KEY_SIZE,
435 .max_keysize = AES_MAX_KEY_SIZE,
436 .ivsize = AES_BLOCK_SIZE,
437 .setkey = aes_set_key,
438 .encrypt = cbc_encrypt,
439 .decrypt = cbc_decrypt,
443 .cra_name = "ctr(aes)",
444 .cra_driver_name = "ctr-aes-sparc64",
445 .cra_priority = SPARC_CR_OPCODE_PRIORITY,
446 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
448 .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
450 .cra_type = &crypto_blkcipher_type,
451 .cra_module = THIS_MODULE,
454 .min_keysize = AES_MIN_KEY_SIZE,
455 .max_keysize = AES_MAX_KEY_SIZE,
456 .ivsize = AES_BLOCK_SIZE,
457 .setkey = aes_set_key,
458 .encrypt = ctr_crypt,
459 .decrypt = ctr_crypt,
464 static bool __init sparc64_has_aes_opcode(void)
468 if (!(sparc64_elf_hwcap & HWCAP_SPARC_CRYPTO))
471 __asm__ __volatile__("rd %%asr26, %0" : "=r" (cfr));
472 if (!(cfr & CFR_AES))
478 static int __init aes_sparc64_mod_init(void)
482 for (i = 0; i < ARRAY_SIZE(algs); i++)
483 INIT_LIST_HEAD(&algs[i].cra_list);
485 if (sparc64_has_aes_opcode()) {
486 pr_info("Using sparc64 aes opcodes optimized AES implementation\n");
487 return crypto_register_algs(algs, ARRAY_SIZE(algs));
489 pr_info("sparc64 aes opcodes not available.\n");
493 static void __exit aes_sparc64_mod_fini(void)
495 crypto_unregister_algs(algs, ARRAY_SIZE(algs));
498 module_init(aes_sparc64_mod_init);
499 module_exit(aes_sparc64_mod_fini);
501 MODULE_LICENSE("GPL");
502 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, sparc64 aes opcode accelerated");
504 MODULE_ALIAS_CRYPTO("aes");
506 #include "crop_devid.c"