GNU Linux-libre 6.1.24-gnu
[releases.git] / arch / x86 / crypto / aria_aesni_avx_glue.c
1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2 /*
3  * Glue Code for the AVX/AES-NI/GFNI assembler implementation of the ARIA Cipher
4  *
5  * Copyright (c) 2022 Taehee Yoo <ap420073@gmail.com>
6  */
7
8 #include <crypto/algapi.h>
9 #include <crypto/internal/simd.h>
10 #include <crypto/aria.h>
11 #include <linux/crypto.h>
12 #include <linux/err.h>
13 #include <linux/module.h>
14 #include <linux/types.h>
15
16 #include "ecb_cbc_helpers.h"
17 #include "aria-avx.h"
18
19 asmlinkage void aria_aesni_avx_encrypt_16way(const void *ctx, u8 *dst,
20                                              const u8 *src);
21 asmlinkage void aria_aesni_avx_decrypt_16way(const void *ctx, u8 *dst,
22                                              const u8 *src);
23 asmlinkage void aria_aesni_avx_ctr_crypt_16way(const void *ctx, u8 *dst,
24                                                const u8 *src,
25                                                u8 *keystream, u8 *iv);
26 asmlinkage void aria_aesni_avx_gfni_encrypt_16way(const void *ctx, u8 *dst,
27                                                   const u8 *src);
28 asmlinkage void aria_aesni_avx_gfni_decrypt_16way(const void *ctx, u8 *dst,
29                                                   const u8 *src);
30 asmlinkage void aria_aesni_avx_gfni_ctr_crypt_16way(const void *ctx, u8 *dst,
31                                                     const u8 *src,
32                                                     u8 *keystream, u8 *iv);
33
34 static struct aria_avx_ops aria_ops;
35
36 static int ecb_do_encrypt(struct skcipher_request *req, const u32 *rkey)
37 {
38         ECB_WALK_START(req, ARIA_BLOCK_SIZE, ARIA_AESNI_PARALLEL_BLOCKS);
39         ECB_BLOCK(ARIA_AESNI_PARALLEL_BLOCKS, aria_ops.aria_encrypt_16way);
40         ECB_BLOCK(1, aria_encrypt);
41         ECB_WALK_END();
42 }
43
44 static int ecb_do_decrypt(struct skcipher_request *req, const u32 *rkey)
45 {
46         ECB_WALK_START(req, ARIA_BLOCK_SIZE, ARIA_AESNI_PARALLEL_BLOCKS);
47         ECB_BLOCK(ARIA_AESNI_PARALLEL_BLOCKS, aria_ops.aria_decrypt_16way);
48         ECB_BLOCK(1, aria_decrypt);
49         ECB_WALK_END();
50 }
51
52 static int aria_avx_ecb_encrypt(struct skcipher_request *req)
53 {
54         struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
55         struct aria_ctx *ctx = crypto_skcipher_ctx(tfm);
56
57         return ecb_do_encrypt(req, ctx->enc_key[0]);
58 }
59
60 static int aria_avx_ecb_decrypt(struct skcipher_request *req)
61 {
62         struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
63         struct aria_ctx *ctx = crypto_skcipher_ctx(tfm);
64
65         return ecb_do_decrypt(req, ctx->dec_key[0]);
66 }
67
68 static int aria_avx_set_key(struct crypto_skcipher *tfm, const u8 *key,
69                             unsigned int keylen)
70 {
71         return aria_set_key(&tfm->base, key, keylen);
72 }
73
74 static int aria_avx_ctr_encrypt(struct skcipher_request *req)
75 {
76         struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
77         struct aria_ctx *ctx = crypto_skcipher_ctx(tfm);
78         struct skcipher_walk walk;
79         unsigned int nbytes;
80         int err;
81
82         err = skcipher_walk_virt(&walk, req, false);
83
84         while ((nbytes = walk.nbytes) > 0) {
85                 const u8 *src = walk.src.virt.addr;
86                 u8 *dst = walk.dst.virt.addr;
87
88                 while (nbytes >= ARIA_AESNI_PARALLEL_BLOCK_SIZE) {
89                         u8 keystream[ARIA_AESNI_PARALLEL_BLOCK_SIZE];
90
91                         kernel_fpu_begin();
92                         aria_ops.aria_ctr_crypt_16way(ctx, dst, src, keystream,
93                                                       walk.iv);
94                         kernel_fpu_end();
95                         dst += ARIA_AESNI_PARALLEL_BLOCK_SIZE;
96                         src += ARIA_AESNI_PARALLEL_BLOCK_SIZE;
97                         nbytes -= ARIA_AESNI_PARALLEL_BLOCK_SIZE;
98                 }
99
100                 while (nbytes >= ARIA_BLOCK_SIZE) {
101                         u8 keystream[ARIA_BLOCK_SIZE];
102
103                         memcpy(keystream, walk.iv, ARIA_BLOCK_SIZE);
104                         crypto_inc(walk.iv, ARIA_BLOCK_SIZE);
105
106                         aria_encrypt(ctx, keystream, keystream);
107
108                         crypto_xor_cpy(dst, src, keystream, ARIA_BLOCK_SIZE);
109                         dst += ARIA_BLOCK_SIZE;
110                         src += ARIA_BLOCK_SIZE;
111                         nbytes -= ARIA_BLOCK_SIZE;
112                 }
113
114                 if (walk.nbytes == walk.total && nbytes > 0) {
115                         u8 keystream[ARIA_BLOCK_SIZE];
116
117                         memcpy(keystream, walk.iv, ARIA_BLOCK_SIZE);
118                         crypto_inc(walk.iv, ARIA_BLOCK_SIZE);
119
120                         aria_encrypt(ctx, keystream, keystream);
121
122                         crypto_xor_cpy(dst, src, keystream, nbytes);
123                         dst += nbytes;
124                         src += nbytes;
125                         nbytes = 0;
126                 }
127                 err = skcipher_walk_done(&walk, nbytes);
128         }
129
130         return err;
131 }
132
133 static struct skcipher_alg aria_algs[] = {
134         {
135                 .base.cra_name          = "__ecb(aria)",
136                 .base.cra_driver_name   = "__ecb-aria-avx",
137                 .base.cra_priority      = 400,
138                 .base.cra_flags         = CRYPTO_ALG_INTERNAL,
139                 .base.cra_blocksize     = ARIA_BLOCK_SIZE,
140                 .base.cra_ctxsize       = sizeof(struct aria_ctx),
141                 .base.cra_module        = THIS_MODULE,
142                 .min_keysize            = ARIA_MIN_KEY_SIZE,
143                 .max_keysize            = ARIA_MAX_KEY_SIZE,
144                 .setkey                 = aria_avx_set_key,
145                 .encrypt                = aria_avx_ecb_encrypt,
146                 .decrypt                = aria_avx_ecb_decrypt,
147         }, {
148                 .base.cra_name          = "__ctr(aria)",
149                 .base.cra_driver_name   = "__ctr-aria-avx",
150                 .base.cra_priority      = 400,
151                 .base.cra_flags         = CRYPTO_ALG_INTERNAL,
152                 .base.cra_blocksize     = 1,
153                 .base.cra_ctxsize       = sizeof(struct aria_ctx),
154                 .base.cra_module        = THIS_MODULE,
155                 .min_keysize            = ARIA_MIN_KEY_SIZE,
156                 .max_keysize            = ARIA_MAX_KEY_SIZE,
157                 .ivsize                 = ARIA_BLOCK_SIZE,
158                 .chunksize              = ARIA_BLOCK_SIZE,
159                 .walksize               = 16 * ARIA_BLOCK_SIZE,
160                 .setkey                 = aria_avx_set_key,
161                 .encrypt                = aria_avx_ctr_encrypt,
162                 .decrypt                = aria_avx_ctr_encrypt,
163         }
164 };
165
166 static struct simd_skcipher_alg *aria_simd_algs[ARRAY_SIZE(aria_algs)];
167
168 static int __init aria_avx_init(void)
169 {
170         const char *feature_name;
171
172         if (!boot_cpu_has(X86_FEATURE_AVX) ||
173             !boot_cpu_has(X86_FEATURE_AES) ||
174             !boot_cpu_has(X86_FEATURE_OSXSAVE)) {
175                 pr_info("AVX or AES-NI instructions are not detected.\n");
176                 return -ENODEV;
177         }
178
179         if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM,
180                                 &feature_name)) {
181                 pr_info("CPU feature '%s' is not supported.\n", feature_name);
182                 return -ENODEV;
183         }
184
185         if (boot_cpu_has(X86_FEATURE_GFNI)) {
186                 aria_ops.aria_encrypt_16way = aria_aesni_avx_gfni_encrypt_16way;
187                 aria_ops.aria_decrypt_16way = aria_aesni_avx_gfni_decrypt_16way;
188                 aria_ops.aria_ctr_crypt_16way = aria_aesni_avx_gfni_ctr_crypt_16way;
189         } else {
190                 aria_ops.aria_encrypt_16way = aria_aesni_avx_encrypt_16way;
191                 aria_ops.aria_decrypt_16way = aria_aesni_avx_decrypt_16way;
192                 aria_ops.aria_ctr_crypt_16way = aria_aesni_avx_ctr_crypt_16way;
193         }
194
195         return simd_register_skciphers_compat(aria_algs,
196                                               ARRAY_SIZE(aria_algs),
197                                               aria_simd_algs);
198 }
199
200 static void __exit aria_avx_exit(void)
201 {
202         simd_unregister_skciphers(aria_algs, ARRAY_SIZE(aria_algs),
203                                   aria_simd_algs);
204 }
205
206 module_init(aria_avx_init);
207 module_exit(aria_avx_exit);
208
209 MODULE_LICENSE("GPL");
210 MODULE_AUTHOR("Taehee Yoo <ap420073@gmail.com>");
211 MODULE_DESCRIPTION("ARIA Cipher Algorithm, AVX/AES-NI/GFNI optimized");
212 MODULE_ALIAS_CRYPTO("aria");
213 MODULE_ALIAS_CRYPTO("aria-aesni-avx");