forked from ~ljy/RK356X_SDK_RELEASE

hc
2024-05-10 9999e48639b3cecb08ffb37358bcba3b48161b29
kernel/arch/x86/crypto/cast6_avx_glue.c
....@@ -1,26 +1,11 @@
1
+// SPDX-License-Identifier: GPL-2.0-or-later
12 /*
2
- * Glue Code for the AVX assembler implemention of the Cast6 Cipher
3
+ * Glue Code for the AVX assembler implementation of the Cast6 Cipher
34 *
45 * Copyright (C) 2012 Johannes Goetzfried
56 * <Johannes.Goetzfried@informatik.stud.uni-erlangen.de>
67 *
78 * Copyright © 2013 Jussi Kivilinna <jussi.kivilinna@iki.fi>
8
- *
9
- * This program is free software; you can redistribute it and/or modify
10
- * it under the terms of the GNU General Public License as published by
11
- * the Free Software Foundation; either version 2 of the License, or
12
- * (at your option) any later version.
13
- *
14
- * This program is distributed in the hope that it will be useful,
15
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
16
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17
- * GNU General Public License for more details.
18
- *
19
- * You should have received a copy of the GNU General Public License
20
- * along with this program; if not, write to the Free Software
21
- * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
22
- * USA
23
- *
249 */
2510
2611 #include <linux/module.h>
....@@ -35,20 +20,17 @@
3520
3621 #define CAST6_PARALLEL_BLOCKS 8
3722
38
-asmlinkage void cast6_ecb_enc_8way(struct cast6_ctx *ctx, u8 *dst,
39
- const u8 *src);
40
-asmlinkage void cast6_ecb_dec_8way(struct cast6_ctx *ctx, u8 *dst,
41
- const u8 *src);
23
+asmlinkage void cast6_ecb_enc_8way(const void *ctx, u8 *dst, const u8 *src);
24
+asmlinkage void cast6_ecb_dec_8way(const void *ctx, u8 *dst, const u8 *src);
4225
43
-asmlinkage void cast6_cbc_dec_8way(struct cast6_ctx *ctx, u8 *dst,
44
- const u8 *src);
45
-asmlinkage void cast6_ctr_8way(struct cast6_ctx *ctx, u8 *dst, const u8 *src,
26
+asmlinkage void cast6_cbc_dec_8way(const void *ctx, u8 *dst, const u8 *src);
27
+asmlinkage void cast6_ctr_8way(const void *ctx, u8 *dst, const u8 *src,
4628 le128 *iv);
4729
48
-asmlinkage void cast6_xts_enc_8way(struct cast6_ctx *ctx, u8 *dst,
49
- const u8 *src, le128 *iv);
50
-asmlinkage void cast6_xts_dec_8way(struct cast6_ctx *ctx, u8 *dst,
51
- const u8 *src, le128 *iv);
30
+asmlinkage void cast6_xts_enc_8way(const void *ctx, u8 *dst, const u8 *src,
31
+ le128 *iv);
32
+asmlinkage void cast6_xts_dec_8way(const void *ctx, u8 *dst, const u8 *src,
33
+ le128 *iv);
5234
5335 static int cast6_setkey_skcipher(struct crypto_skcipher *tfm,
5436 const u8 *key, unsigned int keylen)
....@@ -56,21 +38,21 @@
5638 return cast6_setkey(&tfm->base, key, keylen);
5739 }
5840
59
-static void cast6_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv)
41
+static void cast6_xts_enc(const void *ctx, u8 *dst, const u8 *src, le128 *iv)
6042 {
61
- glue_xts_crypt_128bit_one(ctx, dst, src, iv,
62
- GLUE_FUNC_CAST(__cast6_encrypt));
43
+ glue_xts_crypt_128bit_one(ctx, dst, src, iv, __cast6_encrypt);
6344 }
6445
65
-static void cast6_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv)
46
+static void cast6_xts_dec(const void *ctx, u8 *dst, const u8 *src, le128 *iv)
6647 {
67
- glue_xts_crypt_128bit_one(ctx, dst, src, iv,
68
- GLUE_FUNC_CAST(__cast6_decrypt));
48
+ glue_xts_crypt_128bit_one(ctx, dst, src, iv, __cast6_decrypt);
6949 }
7050
71
-static void cast6_crypt_ctr(void *ctx, u128 *dst, const u128 *src, le128 *iv)
51
+static void cast6_crypt_ctr(const void *ctx, u8 *d, const u8 *s, le128 *iv)
7252 {
7353 be128 ctrblk;
54
+ u128 *dst = (u128 *)d;
55
+ const u128 *src = (const u128 *)s;
7456
7557 le128_to_be128(&ctrblk, iv);
7658 le128_inc(iv);
....@@ -85,10 +67,10 @@
8567
8668 .funcs = { {
8769 .num_blocks = CAST6_PARALLEL_BLOCKS,
88
- .fn_u = { .ecb = GLUE_FUNC_CAST(cast6_ecb_enc_8way) }
70
+ .fn_u = { .ecb = cast6_ecb_enc_8way }
8971 }, {
9072 .num_blocks = 1,
91
- .fn_u = { .ecb = GLUE_FUNC_CAST(__cast6_encrypt) }
73
+ .fn_u = { .ecb = __cast6_encrypt }
9274 } }
9375 };
9476
....@@ -98,10 +80,10 @@
9880
9981 .funcs = { {
10082 .num_blocks = CAST6_PARALLEL_BLOCKS,
101
- .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(cast6_ctr_8way) }
83
+ .fn_u = { .ctr = cast6_ctr_8way }
10284 }, {
10385 .num_blocks = 1,
104
- .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(cast6_crypt_ctr) }
86
+ .fn_u = { .ctr = cast6_crypt_ctr }
10587 } }
10688 };
10789
....@@ -111,10 +93,10 @@
11193
11294 .funcs = { {
11395 .num_blocks = CAST6_PARALLEL_BLOCKS,
114
- .fn_u = { .xts = GLUE_XTS_FUNC_CAST(cast6_xts_enc_8way) }
96
+ .fn_u = { .xts = cast6_xts_enc_8way }
11597 }, {
11698 .num_blocks = 1,
117
- .fn_u = { .xts = GLUE_XTS_FUNC_CAST(cast6_xts_enc) }
99
+ .fn_u = { .xts = cast6_xts_enc }
118100 } }
119101 };
120102
....@@ -124,10 +106,10 @@
124106
125107 .funcs = { {
126108 .num_blocks = CAST6_PARALLEL_BLOCKS,
127
- .fn_u = { .ecb = GLUE_FUNC_CAST(cast6_ecb_dec_8way) }
109
+ .fn_u = { .ecb = cast6_ecb_dec_8way }
128110 }, {
129111 .num_blocks = 1,
130
- .fn_u = { .ecb = GLUE_FUNC_CAST(__cast6_decrypt) }
112
+ .fn_u = { .ecb = __cast6_decrypt }
131113 } }
132114 };
133115
....@@ -137,10 +119,10 @@
137119
138120 .funcs = { {
139121 .num_blocks = CAST6_PARALLEL_BLOCKS,
140
- .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(cast6_cbc_dec_8way) }
122
+ .fn_u = { .cbc = cast6_cbc_dec_8way }
141123 }, {
142124 .num_blocks = 1,
143
- .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(__cast6_decrypt) }
125
+ .fn_u = { .cbc = __cast6_decrypt }
144126 } }
145127 };
146128
....@@ -150,10 +132,10 @@
150132
151133 .funcs = { {
152134 .num_blocks = CAST6_PARALLEL_BLOCKS,
153
- .fn_u = { .xts = GLUE_XTS_FUNC_CAST(cast6_xts_dec_8way) }
135
+ .fn_u = { .xts = cast6_xts_dec_8way }
154136 }, {
155137 .num_blocks = 1,
156
- .fn_u = { .xts = GLUE_XTS_FUNC_CAST(cast6_xts_dec) }
138
+ .fn_u = { .xts = cast6_xts_dec }
157139 } }
158140 };
159141
....@@ -169,8 +151,7 @@
169151
170152 static int cbc_encrypt(struct skcipher_request *req)
171153 {
172
- return glue_cbc_encrypt_req_128bit(GLUE_FUNC_CAST(__cast6_encrypt),
173
- req);
154
+ return glue_cbc_encrypt_req_128bit(__cast6_encrypt, req);
174155 }
175156
176157 static int cbc_decrypt(struct skcipher_request *req)
....@@ -192,7 +173,6 @@
192173 unsigned int keylen)
193174 {
194175 struct cast6_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
195
- u32 *flags = &tfm->base.crt_flags;
196176 int err;
197177
198178 err = xts_verify_key(tfm, key, keylen);
....@@ -200,13 +180,12 @@
200180 return err;
201181
202182 /* first half of xts-key is for crypt */
203
- err = __cast6_setkey(&ctx->crypt_ctx, key, keylen / 2, flags);
183
+ err = __cast6_setkey(&ctx->crypt_ctx, key, keylen / 2);
204184 if (err)
205185 return err;
206186
207187 /* second half of xts-key is for tweak */
208
- return __cast6_setkey(&ctx->tweak_ctx, key + keylen / 2, keylen / 2,
209
- flags);
188
+ return __cast6_setkey(&ctx->tweak_ctx, key + keylen / 2, keylen / 2);
210189 }
211190
212191 static int xts_encrypt(struct skcipher_request *req)
....@@ -214,9 +193,8 @@
214193 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
215194 struct cast6_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
216195
217
- return glue_xts_req_128bit(&cast6_enc_xts, req,
218
- XTS_TWEAK_CAST(__cast6_encrypt),
219
- &ctx->tweak_ctx, &ctx->crypt_ctx);
196
+ return glue_xts_req_128bit(&cast6_enc_xts, req, __cast6_encrypt,
197
+ &ctx->tweak_ctx, &ctx->crypt_ctx, false);
220198 }
221199
222200 static int xts_decrypt(struct skcipher_request *req)
....@@ -224,9 +202,8 @@
224202 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
225203 struct cast6_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
226204
227
- return glue_xts_req_128bit(&cast6_dec_xts, req,
228
- XTS_TWEAK_CAST(__cast6_encrypt),
229
- &ctx->tweak_ctx, &ctx->crypt_ctx);
205
+ return glue_xts_req_128bit(&cast6_dec_xts, req, __cast6_encrypt,
206
+ &ctx->tweak_ctx, &ctx->crypt_ctx, true);
230207 }
231208
232209 static struct skcipher_alg cast6_algs[] = {