.. | .. |
---|
| 1 | +// SPDX-License-Identifier: GPL-2.0-or-later |
---|
1 | 2 | /* |
---|
2 | 3 | * Glue Code for x86_64/AVX2 assembler optimized version of Serpent |
---|
3 | 4 | * |
---|
4 | 5 | * Copyright © 2012-2013 Jussi Kivilinna <jussi.kivilinna@mbnet.fi> |
---|
5 | | - * |
---|
6 | | - * This program is free software; you can redistribute it and/or modify |
---|
7 | | - * it under the terms of the GNU General Public License as published by |
---|
8 | | - * the Free Software Foundation; either version 2 of the License, or |
---|
9 | | - * (at your option) any later version. |
---|
10 | | - * |
---|
11 | 6 | */ |
---|
12 | 7 | |
---|
13 | 8 | #include <linux/module.h> |
---|
.. | .. |
---|
24 | 19 | #define SERPENT_AVX2_PARALLEL_BLOCKS 16 |
---|
25 | 20 | |
---|
26 | 21 | /* 16-way AVX2 parallel cipher functions */ |
---|
27 | | -asmlinkage void serpent_ecb_enc_16way(struct serpent_ctx *ctx, u8 *dst, |
---|
28 | | - const u8 *src); |
---|
29 | | -asmlinkage void serpent_ecb_dec_16way(struct serpent_ctx *ctx, u8 *dst, |
---|
30 | | - const u8 *src); |
---|
31 | | -asmlinkage void serpent_cbc_dec_16way(void *ctx, u128 *dst, const u128 *src); |
---|
| 22 | +asmlinkage void serpent_ecb_enc_16way(const void *ctx, u8 *dst, const u8 *src); |
---|
| 23 | +asmlinkage void serpent_ecb_dec_16way(const void *ctx, u8 *dst, const u8 *src); |
---|
| 24 | +asmlinkage void serpent_cbc_dec_16way(const void *ctx, u8 *dst, const u8 *src); |
---|
32 | 25 | |
---|
33 | | -asmlinkage void serpent_ctr_16way(void *ctx, u128 *dst, const u128 *src, |
---|
| 26 | +asmlinkage void serpent_ctr_16way(const void *ctx, u8 *dst, const u8 *src, |
---|
34 | 27 | le128 *iv); |
---|
35 | | -asmlinkage void serpent_xts_enc_16way(struct serpent_ctx *ctx, u8 *dst, |
---|
36 | | - const u8 *src, le128 *iv); |
---|
37 | | -asmlinkage void serpent_xts_dec_16way(struct serpent_ctx *ctx, u8 *dst, |
---|
38 | | - const u8 *src, le128 *iv); |
---|
| 28 | +asmlinkage void serpent_xts_enc_16way(const void *ctx, u8 *dst, const u8 *src, |
---|
| 29 | + le128 *iv); |
---|
| 30 | +asmlinkage void serpent_xts_dec_16way(const void *ctx, u8 *dst, const u8 *src, |
---|
| 31 | + le128 *iv); |
---|
39 | 32 | |
---|
40 | 33 | static int serpent_setkey_skcipher(struct crypto_skcipher *tfm, |
---|
41 | 34 | const u8 *key, unsigned int keylen) |
---|
.. | .. |
---|
49 | 42 | |
---|
50 | 43 | .funcs = { { |
---|
51 | 44 | .num_blocks = 16, |
---|
52 | | - .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_enc_16way) } |
---|
| 45 | + .fn_u = { .ecb = serpent_ecb_enc_16way } |
---|
53 | 46 | }, { |
---|
54 | 47 | .num_blocks = 8, |
---|
55 | | - .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_enc_8way_avx) } |
---|
| 48 | + .fn_u = { .ecb = serpent_ecb_enc_8way_avx } |
---|
56 | 49 | }, { |
---|
57 | 50 | .num_blocks = 1, |
---|
58 | | - .fn_u = { .ecb = GLUE_FUNC_CAST(__serpent_encrypt) } |
---|
| 51 | + .fn_u = { .ecb = __serpent_encrypt } |
---|
59 | 52 | } } |
---|
60 | 53 | }; |
---|
61 | 54 | |
---|
.. | .. |
---|
65 | 58 | |
---|
66 | 59 | .funcs = { { |
---|
67 | 60 | .num_blocks = 16, |
---|
68 | | - .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(serpent_ctr_16way) } |
---|
| 61 | + .fn_u = { .ctr = serpent_ctr_16way } |
---|
69 | 62 | }, { |
---|
70 | 63 | .num_blocks = 8, |
---|
71 | | - .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(serpent_ctr_8way_avx) } |
---|
| 64 | + .fn_u = { .ctr = serpent_ctr_8way_avx } |
---|
72 | 65 | }, { |
---|
73 | 66 | .num_blocks = 1, |
---|
74 | | - .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(__serpent_crypt_ctr) } |
---|
| 67 | + .fn_u = { .ctr = __serpent_crypt_ctr } |
---|
75 | 68 | } } |
---|
76 | 69 | }; |
---|
77 | 70 | |
---|
.. | .. |
---|
81 | 74 | |
---|
82 | 75 | .funcs = { { |
---|
83 | 76 | .num_blocks = 16, |
---|
84 | | - .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_enc_16way) } |
---|
| 77 | + .fn_u = { .xts = serpent_xts_enc_16way } |
---|
85 | 78 | }, { |
---|
86 | 79 | .num_blocks = 8, |
---|
87 | | - .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_enc_8way_avx) } |
---|
| 80 | + .fn_u = { .xts = serpent_xts_enc_8way_avx } |
---|
88 | 81 | }, { |
---|
89 | 82 | .num_blocks = 1, |
---|
90 | | - .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_enc) } |
---|
| 83 | + .fn_u = { .xts = serpent_xts_enc } |
---|
91 | 84 | } } |
---|
92 | 85 | }; |
---|
93 | 86 | |
---|
.. | .. |
---|
97 | 90 | |
---|
98 | 91 | .funcs = { { |
---|
99 | 92 | .num_blocks = 16, |
---|
100 | | - .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_dec_16way) } |
---|
| 93 | + .fn_u = { .ecb = serpent_ecb_dec_16way } |
---|
101 | 94 | }, { |
---|
102 | 95 | .num_blocks = 8, |
---|
103 | | - .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_dec_8way_avx) } |
---|
| 96 | + .fn_u = { .ecb = serpent_ecb_dec_8way_avx } |
---|
104 | 97 | }, { |
---|
105 | 98 | .num_blocks = 1, |
---|
106 | | - .fn_u = { .ecb = GLUE_FUNC_CAST(__serpent_decrypt) } |
---|
| 99 | + .fn_u = { .ecb = __serpent_decrypt } |
---|
107 | 100 | } } |
---|
108 | 101 | }; |
---|
109 | 102 | |
---|
.. | .. |
---|
113 | 106 | |
---|
114 | 107 | .funcs = { { |
---|
115 | 108 | .num_blocks = 16, |
---|
116 | | - .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(serpent_cbc_dec_16way) } |
---|
| 109 | + .fn_u = { .cbc = serpent_cbc_dec_16way } |
---|
117 | 110 | }, { |
---|
118 | 111 | .num_blocks = 8, |
---|
119 | | - .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(serpent_cbc_dec_8way_avx) } |
---|
| 112 | + .fn_u = { .cbc = serpent_cbc_dec_8way_avx } |
---|
120 | 113 | }, { |
---|
121 | 114 | .num_blocks = 1, |
---|
122 | | - .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(__serpent_decrypt) } |
---|
| 115 | + .fn_u = { .cbc = __serpent_decrypt } |
---|
123 | 116 | } } |
---|
124 | 117 | }; |
---|
125 | 118 | |
---|
.. | .. |
---|
129 | 122 | |
---|
130 | 123 | .funcs = { { |
---|
131 | 124 | .num_blocks = 16, |
---|
132 | | - .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_dec_16way) } |
---|
| 125 | + .fn_u = { .xts = serpent_xts_dec_16way } |
---|
133 | 126 | }, { |
---|
134 | 127 | .num_blocks = 8, |
---|
135 | | - .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_dec_8way_avx) } |
---|
| 128 | + .fn_u = { .xts = serpent_xts_dec_8way_avx } |
---|
136 | 129 | }, { |
---|
137 | 130 | .num_blocks = 1, |
---|
138 | | - .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_dec) } |
---|
| 131 | + .fn_u = { .xts = serpent_xts_dec } |
---|
139 | 132 | } } |
---|
140 | 133 | }; |
---|
141 | 134 | |
---|
.. | .. |
---|
151 | 144 | |
---|
152 | 145 | static int cbc_encrypt(struct skcipher_request *req) |
---|
153 | 146 | { |
---|
154 | | - return glue_cbc_encrypt_req_128bit(GLUE_FUNC_CAST(__serpent_encrypt), |
---|
155 | | - req); |
---|
| 147 | + return glue_cbc_encrypt_req_128bit(__serpent_encrypt, req); |
---|
156 | 148 | } |
---|
157 | 149 | |
---|
158 | 150 | static int cbc_decrypt(struct skcipher_request *req) |
---|
.. | .. |
---|
171 | 163 | struct serpent_xts_ctx *ctx = crypto_skcipher_ctx(tfm); |
---|
172 | 164 | |
---|
173 | 165 | return glue_xts_req_128bit(&serpent_enc_xts, req, |
---|
174 | | - XTS_TWEAK_CAST(__serpent_encrypt), |
---|
175 | | - &ctx->tweak_ctx, &ctx->crypt_ctx); |
---|
| 166 | + __serpent_encrypt, &ctx->tweak_ctx, |
---|
| 167 | + &ctx->crypt_ctx, false); |
---|
176 | 168 | } |
---|
177 | 169 | |
---|
178 | 170 | static int xts_decrypt(struct skcipher_request *req) |
---|
.. | .. |
---|
181 | 173 | struct serpent_xts_ctx *ctx = crypto_skcipher_ctx(tfm); |
---|
182 | 174 | |
---|
183 | 175 | return glue_xts_req_128bit(&serpent_dec_xts, req, |
---|
184 | | - XTS_TWEAK_CAST(__serpent_encrypt), |
---|
185 | | - &ctx->tweak_ctx, &ctx->crypt_ctx); |
---|
| 176 | + __serpent_encrypt, &ctx->tweak_ctx, |
---|
| 177 | + &ctx->crypt_ctx, true); |
---|
186 | 178 | } |
---|
187 | 179 | |
---|
188 | 180 | static struct skcipher_alg serpent_algs[] = { |
---|