Linux 6.7-rc7
[linux-modified.git] / arch / arm64 / crypto / sha256-glue.c
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Linux/arm64 port of the OpenSSL SHA256 implementation for AArch64
4  *
5  * Copyright (c) 2016 Linaro Ltd. <ard.biesheuvel@linaro.org>
6  */
7
8 #include <asm/hwcap.h>
9 #include <asm/neon.h>
10 #include <asm/simd.h>
11 #include <crypto/internal/hash.h>
12 #include <crypto/internal/simd.h>
13 #include <crypto/sha2.h>
14 #include <crypto/sha256_base.h>
15 #include <linux/module.h>
16 #include <linux/string.h>
17 #include <linux/types.h>
18
19 MODULE_DESCRIPTION("SHA-224/SHA-256 secure hash for arm64");
20 MODULE_AUTHOR("Andy Polyakov <appro@openssl.org>");
21 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
22 MODULE_LICENSE("GPL v2");
23 MODULE_ALIAS_CRYPTO("sha224");
24 MODULE_ALIAS_CRYPTO("sha256");
25
26 asmlinkage void sha256_block_data_order(u32 *digest, const void *data,
27                                         unsigned int num_blks);
28 EXPORT_SYMBOL(sha256_block_data_order);
29
30 static void sha256_arm64_transform(struct sha256_state *sst, u8 const *src,
31                                    int blocks)
32 {
33         sha256_block_data_order(sst->state, src, blocks);
34 }
35
36 asmlinkage void sha256_block_neon(u32 *digest, const void *data,
37                                   unsigned int num_blks);
38
39 static void sha256_neon_transform(struct sha256_state *sst, u8 const *src,
40                                   int blocks)
41 {
42         sha256_block_neon(sst->state, src, blocks);
43 }
44
45 static int crypto_sha256_arm64_update(struct shash_desc *desc, const u8 *data,
46                                       unsigned int len)
47 {
48         return sha256_base_do_update(desc, data, len, sha256_arm64_transform);
49 }
50
51 static int crypto_sha256_arm64_finup(struct shash_desc *desc, const u8 *data,
52                                      unsigned int len, u8 *out)
53 {
54         if (len)
55                 sha256_base_do_update(desc, data, len, sha256_arm64_transform);
56         sha256_base_do_finalize(desc, sha256_arm64_transform);
57
58         return sha256_base_finish(desc, out);
59 }
60
61 static int crypto_sha256_arm64_final(struct shash_desc *desc, u8 *out)
62 {
63         return crypto_sha256_arm64_finup(desc, NULL, 0, out);
64 }
65
66 static struct shash_alg algs[] = { {
67         .digestsize             = SHA256_DIGEST_SIZE,
68         .init                   = sha256_base_init,
69         .update                 = crypto_sha256_arm64_update,
70         .final                  = crypto_sha256_arm64_final,
71         .finup                  = crypto_sha256_arm64_finup,
72         .descsize               = sizeof(struct sha256_state),
73         .base.cra_name          = "sha256",
74         .base.cra_driver_name   = "sha256-arm64",
75         .base.cra_priority      = 125,
76         .base.cra_blocksize     = SHA256_BLOCK_SIZE,
77         .base.cra_module        = THIS_MODULE,
78 }, {
79         .digestsize             = SHA224_DIGEST_SIZE,
80         .init                   = sha224_base_init,
81         .update                 = crypto_sha256_arm64_update,
82         .final                  = crypto_sha256_arm64_final,
83         .finup                  = crypto_sha256_arm64_finup,
84         .descsize               = sizeof(struct sha256_state),
85         .base.cra_name          = "sha224",
86         .base.cra_driver_name   = "sha224-arm64",
87         .base.cra_priority      = 125,
88         .base.cra_blocksize     = SHA224_BLOCK_SIZE,
89         .base.cra_module        = THIS_MODULE,
90 } };
91
92 static int sha256_update_neon(struct shash_desc *desc, const u8 *data,
93                               unsigned int len)
94 {
95         struct sha256_state *sctx = shash_desc_ctx(desc);
96
97         if (!crypto_simd_usable())
98                 return sha256_base_do_update(desc, data, len,
99                                 sha256_arm64_transform);
100
101         while (len > 0) {
102                 unsigned int chunk = len;
103
104                 /*
105                  * Don't hog the CPU for the entire time it takes to process all
106                  * input when running on a preemptible kernel, but process the
107                  * data block by block instead.
108                  */
109                 if (IS_ENABLED(CONFIG_PREEMPTION) &&
110                     chunk + sctx->count % SHA256_BLOCK_SIZE > SHA256_BLOCK_SIZE)
111                         chunk = SHA256_BLOCK_SIZE -
112                                 sctx->count % SHA256_BLOCK_SIZE;
113
114                 kernel_neon_begin();
115                 sha256_base_do_update(desc, data, chunk, sha256_neon_transform);
116                 kernel_neon_end();
117                 data += chunk;
118                 len -= chunk;
119         }
120         return 0;
121 }
122
123 static int sha256_finup_neon(struct shash_desc *desc, const u8 *data,
124                              unsigned int len, u8 *out)
125 {
126         if (!crypto_simd_usable()) {
127                 if (len)
128                         sha256_base_do_update(desc, data, len,
129                                 sha256_arm64_transform);
130                 sha256_base_do_finalize(desc, sha256_arm64_transform);
131         } else {
132                 if (len)
133                         sha256_update_neon(desc, data, len);
134                 kernel_neon_begin();
135                 sha256_base_do_finalize(desc, sha256_neon_transform);
136                 kernel_neon_end();
137         }
138         return sha256_base_finish(desc, out);
139 }
140
141 static int sha256_final_neon(struct shash_desc *desc, u8 *out)
142 {
143         return sha256_finup_neon(desc, NULL, 0, out);
144 }
145
146 static struct shash_alg neon_algs[] = { {
147         .digestsize             = SHA256_DIGEST_SIZE,
148         .init                   = sha256_base_init,
149         .update                 = sha256_update_neon,
150         .final                  = sha256_final_neon,
151         .finup                  = sha256_finup_neon,
152         .descsize               = sizeof(struct sha256_state),
153         .base.cra_name          = "sha256",
154         .base.cra_driver_name   = "sha256-arm64-neon",
155         .base.cra_priority      = 150,
156         .base.cra_blocksize     = SHA256_BLOCK_SIZE,
157         .base.cra_module        = THIS_MODULE,
158 }, {
159         .digestsize             = SHA224_DIGEST_SIZE,
160         .init                   = sha224_base_init,
161         .update                 = sha256_update_neon,
162         .final                  = sha256_final_neon,
163         .finup                  = sha256_finup_neon,
164         .descsize               = sizeof(struct sha256_state),
165         .base.cra_name          = "sha224",
166         .base.cra_driver_name   = "sha224-arm64-neon",
167         .base.cra_priority      = 150,
168         .base.cra_blocksize     = SHA224_BLOCK_SIZE,
169         .base.cra_module        = THIS_MODULE,
170 } };
171
172 static int __init sha256_mod_init(void)
173 {
174         int ret = crypto_register_shashes(algs, ARRAY_SIZE(algs));
175         if (ret)
176                 return ret;
177
178         if (cpu_have_named_feature(ASIMD)) {
179                 ret = crypto_register_shashes(neon_algs, ARRAY_SIZE(neon_algs));
180                 if (ret)
181                         crypto_unregister_shashes(algs, ARRAY_SIZE(algs));
182         }
183         return ret;
184 }
185
186 static void __exit sha256_mod_fini(void)
187 {
188         if (cpu_have_named_feature(ASIMD))
189                 crypto_unregister_shashes(neon_algs, ARRAY_SIZE(neon_algs));
190         crypto_unregister_shashes(algs, ARRAY_SIZE(algs));
191 }
192
193 module_init(sha256_mod_init);
194 module_exit(sha256_mod_fini);