hc
2023-12-11 d2ccde1c8e90d38cee87a1b0309ad2827f3fd30d
kernel/arch/x86/crypto/serpent_avx2_glue.c
....@@ -1,13 +1,8 @@
1
+// SPDX-License-Identifier: GPL-2.0-or-later
12 /*
23 * Glue Code for x86_64/AVX2 assembler optimized version of Serpent
34 *
45 * Copyright © 2012-2013 Jussi Kivilinna <jussi.kivilinna@mbnet.fi>
5
- *
6
- * This program is free software; you can redistribute it and/or modify
7
- * it under the terms of the GNU General Public License as published by
8
- * the Free Software Foundation; either version 2 of the License, or
9
- * (at your option) any later version.
10
- *
116 */
127
138 #include <linux/module.h>
....@@ -24,18 +19,16 @@
2419 #define SERPENT_AVX2_PARALLEL_BLOCKS 16
2520
2621 /* 16-way AVX2 parallel cipher functions */
27
-asmlinkage void serpent_ecb_enc_16way(struct serpent_ctx *ctx, u8 *dst,
28
- const u8 *src);
29
-asmlinkage void serpent_ecb_dec_16way(struct serpent_ctx *ctx, u8 *dst,
30
- const u8 *src);
31
-asmlinkage void serpent_cbc_dec_16way(void *ctx, u128 *dst, const u128 *src);
22
+asmlinkage void serpent_ecb_enc_16way(const void *ctx, u8 *dst, const u8 *src);
23
+asmlinkage void serpent_ecb_dec_16way(const void *ctx, u8 *dst, const u8 *src);
24
+asmlinkage void serpent_cbc_dec_16way(const void *ctx, u8 *dst, const u8 *src);
3225
33
-asmlinkage void serpent_ctr_16way(void *ctx, u128 *dst, const u128 *src,
26
+asmlinkage void serpent_ctr_16way(const void *ctx, u8 *dst, const u8 *src,
3427 le128 *iv);
35
-asmlinkage void serpent_xts_enc_16way(struct serpent_ctx *ctx, u8 *dst,
36
- const u8 *src, le128 *iv);
37
-asmlinkage void serpent_xts_dec_16way(struct serpent_ctx *ctx, u8 *dst,
38
- const u8 *src, le128 *iv);
28
+asmlinkage void serpent_xts_enc_16way(const void *ctx, u8 *dst, const u8 *src,
29
+ le128 *iv);
30
+asmlinkage void serpent_xts_dec_16way(const void *ctx, u8 *dst, const u8 *src,
31
+ le128 *iv);
3932
4033 static int serpent_setkey_skcipher(struct crypto_skcipher *tfm,
4134 const u8 *key, unsigned int keylen)
....@@ -49,13 +42,13 @@
4942
5043 .funcs = { {
5144 .num_blocks = 16,
52
- .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_enc_16way) }
45
+ .fn_u = { .ecb = serpent_ecb_enc_16way }
5346 }, {
5447 .num_blocks = 8,
55
- .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_enc_8way_avx) }
48
+ .fn_u = { .ecb = serpent_ecb_enc_8way_avx }
5649 }, {
5750 .num_blocks = 1,
58
- .fn_u = { .ecb = GLUE_FUNC_CAST(__serpent_encrypt) }
51
+ .fn_u = { .ecb = __serpent_encrypt }
5952 } }
6053 };
6154
....@@ -65,13 +58,13 @@
6558
6659 .funcs = { {
6760 .num_blocks = 16,
68
- .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(serpent_ctr_16way) }
61
+ .fn_u = { .ctr = serpent_ctr_16way }
6962 }, {
7063 .num_blocks = 8,
71
- .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(serpent_ctr_8way_avx) }
64
+ .fn_u = { .ctr = serpent_ctr_8way_avx }
7265 }, {
7366 .num_blocks = 1,
74
- .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(__serpent_crypt_ctr) }
67
+ .fn_u = { .ctr = __serpent_crypt_ctr }
7568 } }
7669 };
7770
....@@ -81,13 +74,13 @@
8174
8275 .funcs = { {
8376 .num_blocks = 16,
84
- .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_enc_16way) }
77
+ .fn_u = { .xts = serpent_xts_enc_16way }
8578 }, {
8679 .num_blocks = 8,
87
- .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_enc_8way_avx) }
80
+ .fn_u = { .xts = serpent_xts_enc_8way_avx }
8881 }, {
8982 .num_blocks = 1,
90
- .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_enc) }
83
+ .fn_u = { .xts = serpent_xts_enc }
9184 } }
9285 };
9386
....@@ -97,13 +90,13 @@
9790
9891 .funcs = { {
9992 .num_blocks = 16,
100
- .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_dec_16way) }
93
+ .fn_u = { .ecb = serpent_ecb_dec_16way }
10194 }, {
10295 .num_blocks = 8,
103
- .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_dec_8way_avx) }
96
+ .fn_u = { .ecb = serpent_ecb_dec_8way_avx }
10497 }, {
10598 .num_blocks = 1,
106
- .fn_u = { .ecb = GLUE_FUNC_CAST(__serpent_decrypt) }
99
+ .fn_u = { .ecb = __serpent_decrypt }
107100 } }
108101 };
109102
....@@ -113,13 +106,13 @@
113106
114107 .funcs = { {
115108 .num_blocks = 16,
116
- .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(serpent_cbc_dec_16way) }
109
+ .fn_u = { .cbc = serpent_cbc_dec_16way }
117110 }, {
118111 .num_blocks = 8,
119
- .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(serpent_cbc_dec_8way_avx) }
112
+ .fn_u = { .cbc = serpent_cbc_dec_8way_avx }
120113 }, {
121114 .num_blocks = 1,
122
- .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(__serpent_decrypt) }
115
+ .fn_u = { .cbc = __serpent_decrypt }
123116 } }
124117 };
125118
....@@ -129,13 +122,13 @@
129122
130123 .funcs = { {
131124 .num_blocks = 16,
132
- .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_dec_16way) }
125
+ .fn_u = { .xts = serpent_xts_dec_16way }
133126 }, {
134127 .num_blocks = 8,
135
- .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_dec_8way_avx) }
128
+ .fn_u = { .xts = serpent_xts_dec_8way_avx }
136129 }, {
137130 .num_blocks = 1,
138
- .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_dec) }
131
+ .fn_u = { .xts = serpent_xts_dec }
139132 } }
140133 };
141134
....@@ -151,8 +144,7 @@
151144
152145 static int cbc_encrypt(struct skcipher_request *req)
153146 {
154
- return glue_cbc_encrypt_req_128bit(GLUE_FUNC_CAST(__serpent_encrypt),
155
- req);
147
+ return glue_cbc_encrypt_req_128bit(__serpent_encrypt, req);
156148 }
157149
158150 static int cbc_decrypt(struct skcipher_request *req)
....@@ -171,8 +163,8 @@
171163 struct serpent_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
172164
173165 return glue_xts_req_128bit(&serpent_enc_xts, req,
174
- XTS_TWEAK_CAST(__serpent_encrypt),
175
- &ctx->tweak_ctx, &ctx->crypt_ctx);
166
+ __serpent_encrypt, &ctx->tweak_ctx,
167
+ &ctx->crypt_ctx, false);
176168 }
177169
178170 static int xts_decrypt(struct skcipher_request *req)
....@@ -181,8 +173,8 @@
181173 struct serpent_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
182174
183175 return glue_xts_req_128bit(&serpent_dec_xts, req,
184
- XTS_TWEAK_CAST(__serpent_encrypt),
185
- &ctx->tweak_ctx, &ctx->crypt_ctx);
176
+ __serpent_encrypt, &ctx->tweak_ctx,
177
+ &ctx->crypt_ctx, true);
186178 }
187179
188180 static struct skcipher_alg serpent_algs[] = {