hc
2023-12-11 d2ccde1c8e90d38cee87a1b0309ad2827f3fd30d
kernel/arch/x86/crypto/camellia_aesni_avx2_glue.c
....@@ -1,13 +1,8 @@
1
+// SPDX-License-Identifier: GPL-2.0-or-later
12 /*
23 * Glue Code for x86_64/AVX2/AES-NI assembler optimized version of Camellia
34 *
45 * Copyright © 2013 Jussi Kivilinna <jussi.kivilinna@mbnet.fi>
5
- *
6
- * This program is free software; you can redistribute it and/or modify
7
- * it under the terms of the GNU General Public License as published by
8
- * the Free Software Foundation; either version 2 of the License, or
9
- * (at your option) any later version.
10
- *
116 */
127
138 #include <asm/crypto/camellia.h>
....@@ -24,20 +19,17 @@
2419 #define CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS 32
2520
2621 /* 32-way AVX2/AES-NI parallel cipher functions */
27
-asmlinkage void camellia_ecb_enc_32way(struct camellia_ctx *ctx, u8 *dst,
28
- const u8 *src);
29
-asmlinkage void camellia_ecb_dec_32way(struct camellia_ctx *ctx, u8 *dst,
30
- const u8 *src);
22
+asmlinkage void camellia_ecb_enc_32way(const void *ctx, u8 *dst, const u8 *src);
23
+asmlinkage void camellia_ecb_dec_32way(const void *ctx, u8 *dst, const u8 *src);
3124
32
-asmlinkage void camellia_cbc_dec_32way(struct camellia_ctx *ctx, u8 *dst,
33
- const u8 *src);
34
-asmlinkage void camellia_ctr_32way(struct camellia_ctx *ctx, u8 *dst,
35
- const u8 *src, le128 *iv);
25
+asmlinkage void camellia_cbc_dec_32way(const void *ctx, u8 *dst, const u8 *src);
26
+asmlinkage void camellia_ctr_32way(const void *ctx, u8 *dst, const u8 *src,
27
+ le128 *iv);
3628
37
-asmlinkage void camellia_xts_enc_32way(struct camellia_ctx *ctx, u8 *dst,
38
- const u8 *src, le128 *iv);
39
-asmlinkage void camellia_xts_dec_32way(struct camellia_ctx *ctx, u8 *dst,
40
- const u8 *src, le128 *iv);
29
+asmlinkage void camellia_xts_enc_32way(const void *ctx, u8 *dst, const u8 *src,
30
+ le128 *iv);
31
+asmlinkage void camellia_xts_dec_32way(const void *ctx, u8 *dst, const u8 *src,
32
+ le128 *iv);
4133
4234 static const struct common_glue_ctx camellia_enc = {
4335 .num_funcs = 4,
....@@ -45,16 +37,16 @@
4537
4638 .funcs = { {
4739 .num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
48
- .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_enc_32way) }
40
+ .fn_u = { .ecb = camellia_ecb_enc_32way }
4941 }, {
5042 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
51
- .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_enc_16way) }
43
+ .fn_u = { .ecb = camellia_ecb_enc_16way }
5244 }, {
5345 .num_blocks = 2,
54
- .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_enc_blk_2way) }
46
+ .fn_u = { .ecb = camellia_enc_blk_2way }
5547 }, {
5648 .num_blocks = 1,
57
- .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_enc_blk) }
49
+ .fn_u = { .ecb = camellia_enc_blk }
5850 } }
5951 };
6052
....@@ -64,16 +56,16 @@
6456
6557 .funcs = { {
6658 .num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
67
- .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_ctr_32way) }
59
+ .fn_u = { .ctr = camellia_ctr_32way }
6860 }, {
6961 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
70
- .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_ctr_16way) }
62
+ .fn_u = { .ctr = camellia_ctr_16way }
7163 }, {
7264 .num_blocks = 2,
73
- .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_crypt_ctr_2way) }
65
+ .fn_u = { .ctr = camellia_crypt_ctr_2way }
7466 }, {
7567 .num_blocks = 1,
76
- .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_crypt_ctr) }
68
+ .fn_u = { .ctr = camellia_crypt_ctr }
7769 } }
7870 };
7971
....@@ -83,13 +75,13 @@
8375
8476 .funcs = { {
8577 .num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
86
- .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_enc_32way) }
78
+ .fn_u = { .xts = camellia_xts_enc_32way }
8779 }, {
8880 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
89
- .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_enc_16way) }
81
+ .fn_u = { .xts = camellia_xts_enc_16way }
9082 }, {
9183 .num_blocks = 1,
92
- .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_enc) }
84
+ .fn_u = { .xts = camellia_xts_enc }
9385 } }
9486 };
9587
....@@ -99,16 +91,16 @@
9991
10092 .funcs = { {
10193 .num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
102
- .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_dec_32way) }
94
+ .fn_u = { .ecb = camellia_ecb_dec_32way }
10395 }, {
10496 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
105
- .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_dec_16way) }
97
+ .fn_u = { .ecb = camellia_ecb_dec_16way }
10698 }, {
10799 .num_blocks = 2,
108
- .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_dec_blk_2way) }
100
+ .fn_u = { .ecb = camellia_dec_blk_2way }
109101 }, {
110102 .num_blocks = 1,
111
- .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_dec_blk) }
103
+ .fn_u = { .ecb = camellia_dec_blk }
112104 } }
113105 };
114106
....@@ -118,16 +110,16 @@
118110
119111 .funcs = { {
120112 .num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
121
- .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_cbc_dec_32way) }
113
+ .fn_u = { .cbc = camellia_cbc_dec_32way }
122114 }, {
123115 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
124
- .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_cbc_dec_16way) }
116
+ .fn_u = { .cbc = camellia_cbc_dec_16way }
125117 }, {
126118 .num_blocks = 2,
127
- .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_decrypt_cbc_2way) }
119
+ .fn_u = { .cbc = camellia_decrypt_cbc_2way }
128120 }, {
129121 .num_blocks = 1,
130
- .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_dec_blk) }
122
+ .fn_u = { .cbc = camellia_dec_blk }
131123 } }
132124 };
133125
....@@ -137,21 +129,20 @@
137129
138130 .funcs = { {
139131 .num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
140
- .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_dec_32way) }
132
+ .fn_u = { .xts = camellia_xts_dec_32way }
141133 }, {
142134 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
143
- .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_dec_16way) }
135
+ .fn_u = { .xts = camellia_xts_dec_16way }
144136 }, {
145137 .num_blocks = 1,
146
- .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_dec) }
138
+ .fn_u = { .xts = camellia_xts_dec }
147139 } }
148140 };
149141
150142 static int camellia_setkey(struct crypto_skcipher *tfm, const u8 *key,
151143 unsigned int keylen)
152144 {
153
- return __camellia_setkey(crypto_skcipher_ctx(tfm), key, keylen,
154
- &tfm->base.crt_flags);
145
+ return __camellia_setkey(crypto_skcipher_ctx(tfm), key, keylen);
155146 }
156147
157148 static int ecb_encrypt(struct skcipher_request *req)
....@@ -166,8 +157,7 @@
166157
167158 static int cbc_encrypt(struct skcipher_request *req)
168159 {
169
- return glue_cbc_encrypt_req_128bit(GLUE_FUNC_CAST(camellia_enc_blk),
170
- req);
160
+ return glue_cbc_encrypt_req_128bit(camellia_enc_blk, req);
171161 }
172162
173163 static int cbc_decrypt(struct skcipher_request *req)
....@@ -185,9 +175,8 @@
185175 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
186176 struct camellia_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
187177
188
- return glue_xts_req_128bit(&camellia_enc_xts, req,
189
- XTS_TWEAK_CAST(camellia_enc_blk),
190
- &ctx->tweak_ctx, &ctx->crypt_ctx);
178
+ return glue_xts_req_128bit(&camellia_enc_xts, req, camellia_enc_blk,
179
+ &ctx->tweak_ctx, &ctx->crypt_ctx, false);
191180 }
192181
193182 static int xts_decrypt(struct skcipher_request *req)
....@@ -195,9 +184,8 @@
195184 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
196185 struct camellia_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
197186
198
- return glue_xts_req_128bit(&camellia_dec_xts, req,
199
- XTS_TWEAK_CAST(camellia_enc_blk),
200
- &ctx->tweak_ctx, &ctx->crypt_ctx);
187
+ return glue_xts_req_128bit(&camellia_dec_xts, req, camellia_enc_blk,
188
+ &ctx->tweak_ctx, &ctx->crypt_ctx, true);
201189 }
202190
203191 static struct skcipher_alg camellia_algs[] = {