forked from ~ljy/RK356X_SDK_RELEASE

hc
2024-05-11 297b60346df8beafee954a0fd7c2d64f33f3b9bc
kernel/arch/x86/crypto/serpent_avx_glue.c
....@@ -1,3 +1,4 @@
1
+// SPDX-License-Identifier: GPL-2.0-or-later
12 /*
23 * Glue Code for AVX assembler versions of Serpent Cipher
34 *
....@@ -5,22 +6,6 @@
56 * <Johannes.Goetzfried@informatik.stud.uni-erlangen.de>
67 *
78 * Copyright © 2011-2013 Jussi Kivilinna <jussi.kivilinna@iki.fi>
8
- *
9
- * This program is free software; you can redistribute it and/or modify
10
- * it under the terms of the GNU General Public License as published by
11
- * the Free Software Foundation; either version 2 of the License, or
12
- * (at your option) any later version.
13
- *
14
- * This program is distributed in the hope that it will be useful,
15
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
16
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17
- * GNU General Public License for more details.
18
- *
19
- * You should have received a copy of the GNU General Public License
20
- * along with this program; if not, write to the Free Software
21
- * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
22
- * USA
23
- *
249 */
2510
2611 #include <linux/module.h>
....@@ -35,33 +20,35 @@
3520 #include <asm/crypto/serpent-avx.h>
3621
3722 /* 8-way parallel cipher functions */
38
-asmlinkage void serpent_ecb_enc_8way_avx(struct serpent_ctx *ctx, u8 *dst,
23
+asmlinkage void serpent_ecb_enc_8way_avx(const void *ctx, u8 *dst,
3924 const u8 *src);
4025 EXPORT_SYMBOL_GPL(serpent_ecb_enc_8way_avx);
4126
42
-asmlinkage void serpent_ecb_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst,
27
+asmlinkage void serpent_ecb_dec_8way_avx(const void *ctx, u8 *dst,
4328 const u8 *src);
4429 EXPORT_SYMBOL_GPL(serpent_ecb_dec_8way_avx);
4530
46
-asmlinkage void serpent_cbc_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst,
31
+asmlinkage void serpent_cbc_dec_8way_avx(const void *ctx, u8 *dst,
4732 const u8 *src);
4833 EXPORT_SYMBOL_GPL(serpent_cbc_dec_8way_avx);
4934
50
-asmlinkage void serpent_ctr_8way_avx(struct serpent_ctx *ctx, u8 *dst,
51
- const u8 *src, le128 *iv);
35
+asmlinkage void serpent_ctr_8way_avx(const void *ctx, u8 *dst, const u8 *src,
36
+ le128 *iv);
5237 EXPORT_SYMBOL_GPL(serpent_ctr_8way_avx);
5338
54
-asmlinkage void serpent_xts_enc_8way_avx(struct serpent_ctx *ctx, u8 *dst,
39
+asmlinkage void serpent_xts_enc_8way_avx(const void *ctx, u8 *dst,
5540 const u8 *src, le128 *iv);
5641 EXPORT_SYMBOL_GPL(serpent_xts_enc_8way_avx);
5742
58
-asmlinkage void serpent_xts_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst,
43
+asmlinkage void serpent_xts_dec_8way_avx(const void *ctx, u8 *dst,
5944 const u8 *src, le128 *iv);
6045 EXPORT_SYMBOL_GPL(serpent_xts_dec_8way_avx);
6146
62
-void __serpent_crypt_ctr(void *ctx, u128 *dst, const u128 *src, le128 *iv)
47
+void __serpent_crypt_ctr(const void *ctx, u8 *d, const u8 *s, le128 *iv)
6348 {
6449 be128 ctrblk;
50
+ u128 *dst = (u128 *)d;
51
+ const u128 *src = (const u128 *)s;
6552
6653 le128_to_be128(&ctrblk, iv);
6754 le128_inc(iv);
....@@ -71,17 +58,15 @@
7158 }
7259 EXPORT_SYMBOL_GPL(__serpent_crypt_ctr);
7360
74
-void serpent_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv)
61
+void serpent_xts_enc(const void *ctx, u8 *dst, const u8 *src, le128 *iv)
7562 {
76
- glue_xts_crypt_128bit_one(ctx, dst, src, iv,
77
- GLUE_FUNC_CAST(__serpent_encrypt));
63
+ glue_xts_crypt_128bit_one(ctx, dst, src, iv, __serpent_encrypt);
7864 }
7965 EXPORT_SYMBOL_GPL(serpent_xts_enc);
8066
81
-void serpent_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv)
67
+void serpent_xts_dec(const void *ctx, u8 *dst, const u8 *src, le128 *iv)
8268 {
83
- glue_xts_crypt_128bit_one(ctx, dst, src, iv,
84
- GLUE_FUNC_CAST(__serpent_decrypt));
69
+ glue_xts_crypt_128bit_one(ctx, dst, src, iv, __serpent_decrypt);
8570 }
8671 EXPORT_SYMBOL_GPL(serpent_xts_dec);
8772
....@@ -117,10 +102,10 @@
117102
118103 .funcs = { {
119104 .num_blocks = SERPENT_PARALLEL_BLOCKS,
120
- .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_enc_8way_avx) }
105
+ .fn_u = { .ecb = serpent_ecb_enc_8way_avx }
121106 }, {
122107 .num_blocks = 1,
123
- .fn_u = { .ecb = GLUE_FUNC_CAST(__serpent_encrypt) }
108
+ .fn_u = { .ecb = __serpent_encrypt }
124109 } }
125110 };
126111
....@@ -130,10 +115,10 @@
130115
131116 .funcs = { {
132117 .num_blocks = SERPENT_PARALLEL_BLOCKS,
133
- .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(serpent_ctr_8way_avx) }
118
+ .fn_u = { .ctr = serpent_ctr_8way_avx }
134119 }, {
135120 .num_blocks = 1,
136
- .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(__serpent_crypt_ctr) }
121
+ .fn_u = { .ctr = __serpent_crypt_ctr }
137122 } }
138123 };
139124
....@@ -143,10 +128,10 @@
143128
144129 .funcs = { {
145130 .num_blocks = SERPENT_PARALLEL_BLOCKS,
146
- .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_enc_8way_avx) }
131
+ .fn_u = { .xts = serpent_xts_enc_8way_avx }
147132 }, {
148133 .num_blocks = 1,
149
- .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_enc) }
134
+ .fn_u = { .xts = serpent_xts_enc }
150135 } }
151136 };
152137
....@@ -156,10 +141,10 @@
156141
157142 .funcs = { {
158143 .num_blocks = SERPENT_PARALLEL_BLOCKS,
159
- .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_dec_8way_avx) }
144
+ .fn_u = { .ecb = serpent_ecb_dec_8way_avx }
160145 }, {
161146 .num_blocks = 1,
162
- .fn_u = { .ecb = GLUE_FUNC_CAST(__serpent_decrypt) }
147
+ .fn_u = { .ecb = __serpent_decrypt }
163148 } }
164149 };
165150
....@@ -169,10 +154,10 @@
169154
170155 .funcs = { {
171156 .num_blocks = SERPENT_PARALLEL_BLOCKS,
172
- .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(serpent_cbc_dec_8way_avx) }
157
+ .fn_u = { .cbc = serpent_cbc_dec_8way_avx }
173158 }, {
174159 .num_blocks = 1,
175
- .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(__serpent_decrypt) }
160
+ .fn_u = { .cbc = __serpent_decrypt }
176161 } }
177162 };
178163
....@@ -182,10 +167,10 @@
182167
183168 .funcs = { {
184169 .num_blocks = SERPENT_PARALLEL_BLOCKS,
185
- .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_dec_8way_avx) }
170
+ .fn_u = { .xts = serpent_xts_dec_8way_avx }
186171 }, {
187172 .num_blocks = 1,
188
- .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_dec) }
173
+ .fn_u = { .xts = serpent_xts_dec }
189174 } }
190175 };
191176
....@@ -201,8 +186,7 @@
201186
202187 static int cbc_encrypt(struct skcipher_request *req)
203188 {
204
- return glue_cbc_encrypt_req_128bit(GLUE_FUNC_CAST(__serpent_encrypt),
205
- req);
189
+ return glue_cbc_encrypt_req_128bit(__serpent_encrypt, req);
206190 }
207191
208192 static int cbc_decrypt(struct skcipher_request *req)
....@@ -221,8 +205,8 @@
221205 struct serpent_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
222206
223207 return glue_xts_req_128bit(&serpent_enc_xts, req,
224
- XTS_TWEAK_CAST(__serpent_encrypt),
225
- &ctx->tweak_ctx, &ctx->crypt_ctx);
208
+ __serpent_encrypt, &ctx->tweak_ctx,
209
+ &ctx->crypt_ctx, false);
226210 }
227211
228212 static int xts_decrypt(struct skcipher_request *req)
....@@ -231,8 +215,8 @@
231215 struct serpent_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
232216
233217 return glue_xts_req_128bit(&serpent_dec_xts, req,
234
- XTS_TWEAK_CAST(__serpent_encrypt),
235
- &ctx->tweak_ctx, &ctx->crypt_ctx);
218
+ __serpent_encrypt, &ctx->tweak_ctx,
219
+ &ctx->crypt_ctx, true);
236220 }
237221
238222 static struct skcipher_alg serpent_algs[] = {