GNU Linux-libre 4.19.281-gnu1
[releases.git] / arch / x86 / crypto / camellia_aesni_avx_glue.c
1 /*
2  * Glue Code for x86_64/AVX/AES-NI assembler optimized version of Camellia
3  *
4  * Copyright © 2012-2013 Jussi Kivilinna <jussi.kivilinna@iki.fi>
5  *
6  * This program is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; either version 2 of the License, or
9  * (at your option) any later version.
10  *
11  */
12
13 #include <asm/crypto/camellia.h>
14 #include <asm/crypto/glue_helper.h>
15 #include <crypto/algapi.h>
16 #include <crypto/internal/simd.h>
17 #include <crypto/xts.h>
18 #include <linux/crypto.h>
19 #include <linux/err.h>
20 #include <linux/module.h>
21 #include <linux/types.h>
22
23 #define CAMELLIA_AESNI_PARALLEL_BLOCKS 16
24
25 /* 16-way parallel cipher functions (avx/aes-ni) */
26 asmlinkage void camellia_ecb_enc_16way(struct camellia_ctx *ctx, u8 *dst,
27                                        const u8 *src);
28 EXPORT_SYMBOL_GPL(camellia_ecb_enc_16way);
29
30 asmlinkage void camellia_ecb_dec_16way(struct camellia_ctx *ctx, u8 *dst,
31                                        const u8 *src);
32 EXPORT_SYMBOL_GPL(camellia_ecb_dec_16way);
33
34 asmlinkage void camellia_cbc_dec_16way(struct camellia_ctx *ctx, u8 *dst,
35                                        const u8 *src);
36 EXPORT_SYMBOL_GPL(camellia_cbc_dec_16way);
37
38 asmlinkage void camellia_ctr_16way(struct camellia_ctx *ctx, u8 *dst,
39                                    const u8 *src, le128 *iv);
40 EXPORT_SYMBOL_GPL(camellia_ctr_16way);
41
42 asmlinkage void camellia_xts_enc_16way(struct camellia_ctx *ctx, u8 *dst,
43                                        const u8 *src, le128 *iv);
44 EXPORT_SYMBOL_GPL(camellia_xts_enc_16way);
45
46 asmlinkage void camellia_xts_dec_16way(struct camellia_ctx *ctx, u8 *dst,
47                                        const u8 *src, le128 *iv);
48 EXPORT_SYMBOL_GPL(camellia_xts_dec_16way);
49
50 void camellia_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv)
51 {
52         glue_xts_crypt_128bit_one(ctx, dst, src, iv,
53                                   GLUE_FUNC_CAST(camellia_enc_blk));
54 }
55 EXPORT_SYMBOL_GPL(camellia_xts_enc);
56
57 void camellia_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv)
58 {
59         glue_xts_crypt_128bit_one(ctx, dst, src, iv,
60                                   GLUE_FUNC_CAST(camellia_dec_blk));
61 }
62 EXPORT_SYMBOL_GPL(camellia_xts_dec);
63
64 static const struct common_glue_ctx camellia_enc = {
65         .num_funcs = 3,
66         .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
67
68         .funcs = { {
69                 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
70                 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_enc_16way) }
71         }, {
72                 .num_blocks = 2,
73                 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_enc_blk_2way) }
74         }, {
75                 .num_blocks = 1,
76                 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_enc_blk) }
77         } }
78 };
79
80 static const struct common_glue_ctx camellia_ctr = {
81         .num_funcs = 3,
82         .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
83
84         .funcs = { {
85                 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
86                 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_ctr_16way) }
87         }, {
88                 .num_blocks = 2,
89                 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_crypt_ctr_2way) }
90         }, {
91                 .num_blocks = 1,
92                 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_crypt_ctr) }
93         } }
94 };
95
96 static const struct common_glue_ctx camellia_enc_xts = {
97         .num_funcs = 2,
98         .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
99
100         .funcs = { {
101                 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
102                 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_enc_16way) }
103         }, {
104                 .num_blocks = 1,
105                 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_enc) }
106         } }
107 };
108
109 static const struct common_glue_ctx camellia_dec = {
110         .num_funcs = 3,
111         .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
112
113         .funcs = { {
114                 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
115                 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_dec_16way) }
116         }, {
117                 .num_blocks = 2,
118                 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_dec_blk_2way) }
119         }, {
120                 .num_blocks = 1,
121                 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_dec_blk) }
122         } }
123 };
124
125 static const struct common_glue_ctx camellia_dec_cbc = {
126         .num_funcs = 3,
127         .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
128
129         .funcs = { {
130                 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
131                 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_cbc_dec_16way) }
132         }, {
133                 .num_blocks = 2,
134                 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_decrypt_cbc_2way) }
135         }, {
136                 .num_blocks = 1,
137                 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_dec_blk) }
138         } }
139 };
140
141 static const struct common_glue_ctx camellia_dec_xts = {
142         .num_funcs = 2,
143         .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
144
145         .funcs = { {
146                 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
147                 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_dec_16way) }
148         }, {
149                 .num_blocks = 1,
150                 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_dec) }
151         } }
152 };
153
154 static int camellia_setkey(struct crypto_skcipher *tfm, const u8 *key,
155                            unsigned int keylen)
156 {
157         return __camellia_setkey(crypto_skcipher_ctx(tfm), key, keylen,
158                                  &tfm->base.crt_flags);
159 }
160
161 static int ecb_encrypt(struct skcipher_request *req)
162 {
163         return glue_ecb_req_128bit(&camellia_enc, req);
164 }
165
166 static int ecb_decrypt(struct skcipher_request *req)
167 {
168         return glue_ecb_req_128bit(&camellia_dec, req);
169 }
170
171 static int cbc_encrypt(struct skcipher_request *req)
172 {
173         return glue_cbc_encrypt_req_128bit(GLUE_FUNC_CAST(camellia_enc_blk),
174                                            req);
175 }
176
177 static int cbc_decrypt(struct skcipher_request *req)
178 {
179         return glue_cbc_decrypt_req_128bit(&camellia_dec_cbc, req);
180 }
181
182 static int ctr_crypt(struct skcipher_request *req)
183 {
184         return glue_ctr_req_128bit(&camellia_ctr, req);
185 }
186
187 int xts_camellia_setkey(struct crypto_skcipher *tfm, const u8 *key,
188                         unsigned int keylen)
189 {
190         struct camellia_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
191         u32 *flags = &tfm->base.crt_flags;
192         int err;
193
194         err = xts_verify_key(tfm, key, keylen);
195         if (err)
196                 return err;
197
198         /* first half of xts-key is for crypt */
199         err = __camellia_setkey(&ctx->crypt_ctx, key, keylen / 2, flags);
200         if (err)
201                 return err;
202
203         /* second half of xts-key is for tweak */
204         return __camellia_setkey(&ctx->tweak_ctx, key + keylen / 2, keylen / 2,
205                                 flags);
206 }
207 EXPORT_SYMBOL_GPL(xts_camellia_setkey);
208
209 static int xts_encrypt(struct skcipher_request *req)
210 {
211         struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
212         struct camellia_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
213
214         return glue_xts_req_128bit(&camellia_enc_xts, req,
215                                    XTS_TWEAK_CAST(camellia_enc_blk),
216                                    &ctx->tweak_ctx, &ctx->crypt_ctx);
217 }
218
219 static int xts_decrypt(struct skcipher_request *req)
220 {
221         struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
222         struct camellia_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
223
224         return glue_xts_req_128bit(&camellia_dec_xts, req,
225                                    XTS_TWEAK_CAST(camellia_enc_blk),
226                                    &ctx->tweak_ctx, &ctx->crypt_ctx);
227 }
228
229 static struct skcipher_alg camellia_algs[] = {
230         {
231                 .base.cra_name          = "__ecb(camellia)",
232                 .base.cra_driver_name   = "__ecb-camellia-aesni",
233                 .base.cra_priority      = 400,
234                 .base.cra_flags         = CRYPTO_ALG_INTERNAL,
235                 .base.cra_blocksize     = CAMELLIA_BLOCK_SIZE,
236                 .base.cra_ctxsize       = sizeof(struct camellia_ctx),
237                 .base.cra_module        = THIS_MODULE,
238                 .min_keysize            = CAMELLIA_MIN_KEY_SIZE,
239                 .max_keysize            = CAMELLIA_MAX_KEY_SIZE,
240                 .setkey                 = camellia_setkey,
241                 .encrypt                = ecb_encrypt,
242                 .decrypt                = ecb_decrypt,
243         }, {
244                 .base.cra_name          = "__cbc(camellia)",
245                 .base.cra_driver_name   = "__cbc-camellia-aesni",
246                 .base.cra_priority      = 400,
247                 .base.cra_flags         = CRYPTO_ALG_INTERNAL,
248                 .base.cra_blocksize     = CAMELLIA_BLOCK_SIZE,
249                 .base.cra_ctxsize       = sizeof(struct camellia_ctx),
250                 .base.cra_module        = THIS_MODULE,
251                 .min_keysize            = CAMELLIA_MIN_KEY_SIZE,
252                 .max_keysize            = CAMELLIA_MAX_KEY_SIZE,
253                 .ivsize                 = CAMELLIA_BLOCK_SIZE,
254                 .setkey                 = camellia_setkey,
255                 .encrypt                = cbc_encrypt,
256                 .decrypt                = cbc_decrypt,
257         }, {
258                 .base.cra_name          = "__ctr(camellia)",
259                 .base.cra_driver_name   = "__ctr-camellia-aesni",
260                 .base.cra_priority      = 400,
261                 .base.cra_flags         = CRYPTO_ALG_INTERNAL,
262                 .base.cra_blocksize     = 1,
263                 .base.cra_ctxsize       = sizeof(struct camellia_ctx),
264                 .base.cra_module        = THIS_MODULE,
265                 .min_keysize            = CAMELLIA_MIN_KEY_SIZE,
266                 .max_keysize            = CAMELLIA_MAX_KEY_SIZE,
267                 .ivsize                 = CAMELLIA_BLOCK_SIZE,
268                 .chunksize              = CAMELLIA_BLOCK_SIZE,
269                 .setkey                 = camellia_setkey,
270                 .encrypt                = ctr_crypt,
271                 .decrypt                = ctr_crypt,
272         }, {
273                 .base.cra_name          = "__xts(camellia)",
274                 .base.cra_driver_name   = "__xts-camellia-aesni",
275                 .base.cra_priority      = 400,
276                 .base.cra_flags         = CRYPTO_ALG_INTERNAL,
277                 .base.cra_blocksize     = CAMELLIA_BLOCK_SIZE,
278                 .base.cra_ctxsize       = sizeof(struct camellia_xts_ctx),
279                 .base.cra_module        = THIS_MODULE,
280                 .min_keysize            = 2 * CAMELLIA_MIN_KEY_SIZE,
281                 .max_keysize            = 2 * CAMELLIA_MAX_KEY_SIZE,
282                 .ivsize                 = CAMELLIA_BLOCK_SIZE,
283                 .setkey                 = xts_camellia_setkey,
284                 .encrypt                = xts_encrypt,
285                 .decrypt                = xts_decrypt,
286         },
287 };
288
289 static struct simd_skcipher_alg *camellia_simd_algs[ARRAY_SIZE(camellia_algs)];
290
291 static int __init camellia_aesni_init(void)
292 {
293         const char *feature_name;
294
295         if (!boot_cpu_has(X86_FEATURE_AVX) ||
296             !boot_cpu_has(X86_FEATURE_AES) ||
297             !boot_cpu_has(X86_FEATURE_OSXSAVE)) {
298                 pr_info("AVX or AES-NI instructions are not detected.\n");
299                 return -ENODEV;
300         }
301
302         if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM,
303                                 &feature_name)) {
304                 pr_info("CPU feature '%s' is not supported.\n", feature_name);
305                 return -ENODEV;
306         }
307
308         return simd_register_skciphers_compat(camellia_algs,
309                                               ARRAY_SIZE(camellia_algs),
310                                               camellia_simd_algs);
311 }
312
313 static void __exit camellia_aesni_fini(void)
314 {
315         simd_unregister_skciphers(camellia_algs, ARRAY_SIZE(camellia_algs),
316                                   camellia_simd_algs);
317 }
318
319 module_init(camellia_aesni_init);
320 module_exit(camellia_aesni_fini);
321
322 MODULE_LICENSE("GPL");
323 MODULE_DESCRIPTION("Camellia Cipher Algorithm, AES-NI/AVX optimized");
324 MODULE_ALIAS_CRYPTO("camellia");
325 MODULE_ALIAS_CRYPTO("camellia-asm");