GNU Linux-libre 5.4.274-gnu1
[releases.git] / arch / arm / crypto / crc32-ce-glue.c
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * Accelerated CRC32(C) using ARM CRC, NEON and Crypto Extensions instructions
4  *
5  * Copyright (C) 2016 Linaro Ltd <ard.biesheuvel@linaro.org>
6  */
7
8 #include <linux/cpufeature.h>
9 #include <linux/crc32.h>
10 #include <linux/init.h>
11 #include <linux/kernel.h>
12 #include <linux/module.h>
13 #include <linux/string.h>
14
15 #include <crypto/internal/hash.h>
16 #include <crypto/internal/simd.h>
17
18 #include <asm/hwcap.h>
19 #include <asm/neon.h>
20 #include <asm/simd.h>
21 #include <asm/unaligned.h>
22
23 #define PMULL_MIN_LEN           64L     /* minimum size of buffer
24                                          * for crc32_pmull_le_16 */
25 #define SCALE_F                 16L     /* size of NEON register */
26
27 asmlinkage u32 crc32_pmull_le(const u8 buf[], u32 len, u32 init_crc);
28 asmlinkage u32 crc32_armv8_le(u32 init_crc, const u8 buf[], u32 len);
29
30 asmlinkage u32 crc32c_pmull_le(const u8 buf[], u32 len, u32 init_crc);
31 asmlinkage u32 crc32c_armv8_le(u32 init_crc, const u8 buf[], u32 len);
32
33 static u32 (*fallback_crc32)(u32 init_crc, const u8 buf[], u32 len);
34 static u32 (*fallback_crc32c)(u32 init_crc, const u8 buf[], u32 len);
35
36 static int crc32_cra_init(struct crypto_tfm *tfm)
37 {
38         u32 *key = crypto_tfm_ctx(tfm);
39
40         *key = 0;
41         return 0;
42 }
43
44 static int crc32c_cra_init(struct crypto_tfm *tfm)
45 {
46         u32 *key = crypto_tfm_ctx(tfm);
47
48         *key = ~0;
49         return 0;
50 }
51
52 static int crc32_setkey(struct crypto_shash *hash, const u8 *key,
53                         unsigned int keylen)
54 {
55         u32 *mctx = crypto_shash_ctx(hash);
56
57         if (keylen != sizeof(u32)) {
58                 crypto_shash_set_flags(hash, CRYPTO_TFM_RES_BAD_KEY_LEN);
59                 return -EINVAL;
60         }
61         *mctx = le32_to_cpup((__le32 *)key);
62         return 0;
63 }
64
65 static int crc32_init(struct shash_desc *desc)
66 {
67         u32 *mctx = crypto_shash_ctx(desc->tfm);
68         u32 *crc = shash_desc_ctx(desc);
69
70         *crc = *mctx;
71         return 0;
72 }
73
74 static int crc32_update(struct shash_desc *desc, const u8 *data,
75                         unsigned int length)
76 {
77         u32 *crc = shash_desc_ctx(desc);
78
79         *crc = crc32_armv8_le(*crc, data, length);
80         return 0;
81 }
82
83 static int crc32c_update(struct shash_desc *desc, const u8 *data,
84                          unsigned int length)
85 {
86         u32 *crc = shash_desc_ctx(desc);
87
88         *crc = crc32c_armv8_le(*crc, data, length);
89         return 0;
90 }
91
92 static int crc32_final(struct shash_desc *desc, u8 *out)
93 {
94         u32 *crc = shash_desc_ctx(desc);
95
96         put_unaligned_le32(*crc, out);
97         return 0;
98 }
99
100 static int crc32c_final(struct shash_desc *desc, u8 *out)
101 {
102         u32 *crc = shash_desc_ctx(desc);
103
104         put_unaligned_le32(~*crc, out);
105         return 0;
106 }
107
108 static int crc32_pmull_update(struct shash_desc *desc, const u8 *data,
109                               unsigned int length)
110 {
111         u32 *crc = shash_desc_ctx(desc);
112         unsigned int l;
113
114         if (crypto_simd_usable()) {
115                 if ((u32)data % SCALE_F) {
116                         l = min_t(u32, length, SCALE_F - ((u32)data % SCALE_F));
117
118                         *crc = fallback_crc32(*crc, data, l);
119
120                         data += l;
121                         length -= l;
122                 }
123
124                 if (length >= PMULL_MIN_LEN) {
125                         l = round_down(length, SCALE_F);
126
127                         kernel_neon_begin();
128                         *crc = crc32_pmull_le(data, l, *crc);
129                         kernel_neon_end();
130
131                         data += l;
132                         length -= l;
133                 }
134         }
135
136         if (length > 0)
137                 *crc = fallback_crc32(*crc, data, length);
138
139         return 0;
140 }
141
142 static int crc32c_pmull_update(struct shash_desc *desc, const u8 *data,
143                                unsigned int length)
144 {
145         u32 *crc = shash_desc_ctx(desc);
146         unsigned int l;
147
148         if (crypto_simd_usable()) {
149                 if ((u32)data % SCALE_F) {
150                         l = min_t(u32, length, SCALE_F - ((u32)data % SCALE_F));
151
152                         *crc = fallback_crc32c(*crc, data, l);
153
154                         data += l;
155                         length -= l;
156                 }
157
158                 if (length >= PMULL_MIN_LEN) {
159                         l = round_down(length, SCALE_F);
160
161                         kernel_neon_begin();
162                         *crc = crc32c_pmull_le(data, l, *crc);
163                         kernel_neon_end();
164
165                         data += l;
166                         length -= l;
167                 }
168         }
169
170         if (length > 0)
171                 *crc = fallback_crc32c(*crc, data, length);
172
173         return 0;
174 }
175
176 static struct shash_alg crc32_pmull_algs[] = { {
177         .setkey                 = crc32_setkey,
178         .init                   = crc32_init,
179         .update                 = crc32_update,
180         .final                  = crc32_final,
181         .descsize               = sizeof(u32),
182         .digestsize             = sizeof(u32),
183
184         .base.cra_ctxsize       = sizeof(u32),
185         .base.cra_init          = crc32_cra_init,
186         .base.cra_name          = "crc32",
187         .base.cra_driver_name   = "crc32-arm-ce",
188         .base.cra_priority      = 200,
189         .base.cra_flags         = CRYPTO_ALG_OPTIONAL_KEY,
190         .base.cra_blocksize     = 1,
191         .base.cra_module        = THIS_MODULE,
192 }, {
193         .setkey                 = crc32_setkey,
194         .init                   = crc32_init,
195         .update                 = crc32c_update,
196         .final                  = crc32c_final,
197         .descsize               = sizeof(u32),
198         .digestsize             = sizeof(u32),
199
200         .base.cra_ctxsize       = sizeof(u32),
201         .base.cra_init          = crc32c_cra_init,
202         .base.cra_name          = "crc32c",
203         .base.cra_driver_name   = "crc32c-arm-ce",
204         .base.cra_priority      = 200,
205         .base.cra_flags         = CRYPTO_ALG_OPTIONAL_KEY,
206         .base.cra_blocksize     = 1,
207         .base.cra_module        = THIS_MODULE,
208 } };
209
210 static int __init crc32_pmull_mod_init(void)
211 {
212         if (elf_hwcap2 & HWCAP2_PMULL) {
213                 crc32_pmull_algs[0].update = crc32_pmull_update;
214                 crc32_pmull_algs[1].update = crc32c_pmull_update;
215
216                 if (elf_hwcap2 & HWCAP2_CRC32) {
217                         fallback_crc32 = crc32_armv8_le;
218                         fallback_crc32c = crc32c_armv8_le;
219                 } else {
220                         fallback_crc32 = crc32_le;
221                         fallback_crc32c = __crc32c_le;
222                 }
223         } else if (!(elf_hwcap2 & HWCAP2_CRC32)) {
224                 return -ENODEV;
225         }
226
227         return crypto_register_shashes(crc32_pmull_algs,
228                                        ARRAY_SIZE(crc32_pmull_algs));
229 }
230
231 static void __exit crc32_pmull_mod_exit(void)
232 {
233         crypto_unregister_shashes(crc32_pmull_algs,
234                                   ARRAY_SIZE(crc32_pmull_algs));
235 }
236
237 static const struct cpu_feature __maybe_unused crc32_cpu_feature[] = {
238         { cpu_feature(CRC32) }, { cpu_feature(PMULL) }, { }
239 };
240 MODULE_DEVICE_TABLE(cpu, crc32_cpu_feature);
241
242 module_init(crc32_pmull_mod_init);
243 module_exit(crc32_pmull_mod_exit);
244
245 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
246 MODULE_LICENSE("GPL v2");
247 MODULE_ALIAS_CRYPTO("crc32");
248 MODULE_ALIAS_CRYPTO("crc32c");