hc
2024-05-10 748e4f3d702def1a4bff191e0cf93b6a05340f01
kernel/arch/sparc/crypto/sha256_glue.c
....@@ -1,3 +1,4 @@
1
+// SPDX-License-Identifier: GPL-2.0-only
12 /* Glue code for SHA256 hashing optimized for sparc64 crypto opcodes.
23 *
34 * This is based largely upon crypto/sha256_generic.c
....@@ -14,7 +15,6 @@
1415 #include <linux/init.h>
1516 #include <linux/module.h>
1617 #include <linux/mm.h>
17
-#include <linux/cryptohash.h>
1818 #include <linux/types.h>
1919 #include <crypto/sha.h>
2020
....@@ -156,7 +156,7 @@
156156 return 0;
157157 }
158158
159
-static struct shash_alg sha256 = {
159
+static struct shash_alg sha256_alg = {
160160 .digestsize = SHA256_DIGEST_SIZE,
161161 .init = sha256_sparc64_init,
162162 .update = sha256_sparc64_update,
....@@ -174,7 +174,7 @@
174174 }
175175 };
176176
177
-static struct shash_alg sha224 = {
177
+static struct shash_alg sha224_alg = {
178178 .digestsize = SHA224_DIGEST_SIZE,
179179 .init = sha224_sparc64_init,
180180 .update = sha256_sparc64_update,
....@@ -206,13 +206,13 @@
206206 static int __init sha256_sparc64_mod_init(void)
207207 {
208208 if (sparc64_has_sha256_opcode()) {
209
- int ret = crypto_register_shash(&sha224);
209
+ int ret = crypto_register_shash(&sha224_alg);
210210 if (ret < 0)
211211 return ret;
212212
213
- ret = crypto_register_shash(&sha256);
213
+ ret = crypto_register_shash(&sha256_alg);
214214 if (ret < 0) {
215
- crypto_unregister_shash(&sha224);
215
+ crypto_unregister_shash(&sha224_alg);
216216 return ret;
217217 }
218218
....@@ -225,8 +225,8 @@
225225
226226 static void __exit sha256_sparc64_mod_fini(void)
227227 {
228
- crypto_unregister_shash(&sha224);
229
- crypto_unregister_shash(&sha256);
228
+ crypto_unregister_shash(&sha224_alg);
229
+ crypto_unregister_shash(&sha256_alg);
230230 }
231231
232232 module_init(sha256_sparc64_mod_init);