hc
2024-05-10 748e4f3d702def1a4bff191e0cf93b6a05340f01
kernel/arch/sparc/crypto/camellia_glue.c
....@@ -1,3 +1,4 @@
1
+// SPDX-License-Identifier: GPL-2.0-only
12 /* Glue code for CAMELLIA encryption optimized for sparc64 crypto opcodes.
23 *
34 * Copyright (C) 2012 David S. Miller <davem@davemloft.net>
....@@ -11,6 +12,7 @@
1112 #include <linux/mm.h>
1213 #include <linux/types.h>
1314 #include <crypto/algapi.h>
15
+#include <crypto/internal/skcipher.h>
1416
1517 #include <asm/fpumacro.h>
1618 #include <asm/pstate.h>
....@@ -37,18 +39,21 @@
3739 {
3840 struct camellia_sparc64_ctx *ctx = crypto_tfm_ctx(tfm);
3941 const u32 *in_key = (const u32 *) _in_key;
40
- u32 *flags = &tfm->crt_flags;
4142
42
- if (key_len != 16 && key_len != 24 && key_len != 32) {
43
- *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
43
+ if (key_len != 16 && key_len != 24 && key_len != 32)
4444 return -EINVAL;
45
- }
4645
4746 ctx->key_len = key_len;
4847
4948 camellia_sparc64_key_expand(in_key, &ctx->encrypt_key[0],
5049 key_len, &ctx->decrypt_key[0]);
5150 return 0;
51
+}
52
+
53
+static int camellia_set_key_skcipher(struct crypto_skcipher *tfm,
54
+ const u8 *in_key, unsigned int key_len)
55
+{
56
+ return camellia_set_key(crypto_skcipher_tfm(tfm), in_key, key_len);
5257 }
5358
5459 extern void camellia_sparc64_crypt(const u64 *key, const u32 *input,
....@@ -80,61 +85,46 @@
8085 extern ecb_crypt_op camellia_sparc64_ecb_crypt_3_grand_rounds;
8186 extern ecb_crypt_op camellia_sparc64_ecb_crypt_4_grand_rounds;
8287
83
-#define CAMELLIA_BLOCK_MASK (~(CAMELLIA_BLOCK_SIZE - 1))
84
-
85
-static int __ecb_crypt(struct blkcipher_desc *desc,
86
- struct scatterlist *dst, struct scatterlist *src,
87
- unsigned int nbytes, bool encrypt)
88
+static int __ecb_crypt(struct skcipher_request *req, bool encrypt)
8889 {
89
- struct camellia_sparc64_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
90
- struct blkcipher_walk walk;
90
+ struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
91
+ const struct camellia_sparc64_ctx *ctx = crypto_skcipher_ctx(tfm);
92
+ struct skcipher_walk walk;
9193 ecb_crypt_op *op;
9294 const u64 *key;
95
+ unsigned int nbytes;
9396 int err;
9497
9598 op = camellia_sparc64_ecb_crypt_3_grand_rounds;
9699 if (ctx->key_len != 16)
97100 op = camellia_sparc64_ecb_crypt_4_grand_rounds;
98101
99
- blkcipher_walk_init(&walk, dst, src, nbytes);
100
- err = blkcipher_walk_virt(desc, &walk);
101
- desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
102
+ err = skcipher_walk_virt(&walk, req, true);
103
+ if (err)
104
+ return err;
102105
103106 if (encrypt)
104107 key = &ctx->encrypt_key[0];
105108 else
106109 key = &ctx->decrypt_key[0];
107110 camellia_sparc64_load_keys(key, ctx->key_len);
108
- while ((nbytes = walk.nbytes)) {
109
- unsigned int block_len = nbytes & CAMELLIA_BLOCK_MASK;
110
-
111
- if (likely(block_len)) {
112
- const u64 *src64;
113
- u64 *dst64;
114
-
115
- src64 = (const u64 *)walk.src.virt.addr;
116
- dst64 = (u64 *) walk.dst.virt.addr;
117
- op(src64, dst64, block_len, key);
118
- }
119
- nbytes &= CAMELLIA_BLOCK_SIZE - 1;
120
- err = blkcipher_walk_done(desc, &walk, nbytes);
111
+ while ((nbytes = walk.nbytes) != 0) {
112
+ op(walk.src.virt.addr, walk.dst.virt.addr,
113
+ round_down(nbytes, CAMELLIA_BLOCK_SIZE), key);
114
+ err = skcipher_walk_done(&walk, nbytes % CAMELLIA_BLOCK_SIZE);
121115 }
122116 fprs_write(0);
123117 return err;
124118 }
125119
126
-static int ecb_encrypt(struct blkcipher_desc *desc,
127
- struct scatterlist *dst, struct scatterlist *src,
128
- unsigned int nbytes)
120
+static int ecb_encrypt(struct skcipher_request *req)
129121 {
130
- return __ecb_crypt(desc, dst, src, nbytes, true);
122
+ return __ecb_crypt(req, true);
131123 }
132124
133
-static int ecb_decrypt(struct blkcipher_desc *desc,
134
- struct scatterlist *dst, struct scatterlist *src,
135
- unsigned int nbytes)
125
+static int ecb_decrypt(struct skcipher_request *req)
136126 {
137
- return __ecb_crypt(desc, dst, src, nbytes, false);
127
+ return __ecb_crypt(req, false);
138128 }
139129
140130 typedef void cbc_crypt_op(const u64 *input, u64 *output, unsigned int len,
....@@ -145,85 +135,65 @@
145135 extern cbc_crypt_op camellia_sparc64_cbc_decrypt_3_grand_rounds;
146136 extern cbc_crypt_op camellia_sparc64_cbc_decrypt_4_grand_rounds;
147137
148
-static int cbc_encrypt(struct blkcipher_desc *desc,
149
- struct scatterlist *dst, struct scatterlist *src,
150
- unsigned int nbytes)
138
+static int cbc_encrypt(struct skcipher_request *req)
151139 {
152
- struct camellia_sparc64_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
153
- struct blkcipher_walk walk;
140
+ struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
141
+ const struct camellia_sparc64_ctx *ctx = crypto_skcipher_ctx(tfm);
142
+ struct skcipher_walk walk;
154143 cbc_crypt_op *op;
155144 const u64 *key;
145
+ unsigned int nbytes;
156146 int err;
157147
158148 op = camellia_sparc64_cbc_encrypt_3_grand_rounds;
159149 if (ctx->key_len != 16)
160150 op = camellia_sparc64_cbc_encrypt_4_grand_rounds;
161151
162
- blkcipher_walk_init(&walk, dst, src, nbytes);
163
- err = blkcipher_walk_virt(desc, &walk);
164
- desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
152
+ err = skcipher_walk_virt(&walk, req, true);
153
+ if (err)
154
+ return err;
165155
166156 key = &ctx->encrypt_key[0];
167157 camellia_sparc64_load_keys(key, ctx->key_len);
168
- while ((nbytes = walk.nbytes)) {
169
- unsigned int block_len = nbytes & CAMELLIA_BLOCK_MASK;
170
-
171
- if (likely(block_len)) {
172
- const u64 *src64;
173
- u64 *dst64;
174
-
175
- src64 = (const u64 *)walk.src.virt.addr;
176
- dst64 = (u64 *) walk.dst.virt.addr;
177
- op(src64, dst64, block_len, key,
178
- (u64 *) walk.iv);
179
- }
180
- nbytes &= CAMELLIA_BLOCK_SIZE - 1;
181
- err = blkcipher_walk_done(desc, &walk, nbytes);
158
+ while ((nbytes = walk.nbytes) != 0) {
159
+ op(walk.src.virt.addr, walk.dst.virt.addr,
160
+ round_down(nbytes, CAMELLIA_BLOCK_SIZE), key, walk.iv);
161
+ err = skcipher_walk_done(&walk, nbytes % CAMELLIA_BLOCK_SIZE);
182162 }
183163 fprs_write(0);
184164 return err;
185165 }
186166
187
-static int cbc_decrypt(struct blkcipher_desc *desc,
188
- struct scatterlist *dst, struct scatterlist *src,
189
- unsigned int nbytes)
167
+static int cbc_decrypt(struct skcipher_request *req)
190168 {
191
- struct camellia_sparc64_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
192
- struct blkcipher_walk walk;
169
+ struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
170
+ const struct camellia_sparc64_ctx *ctx = crypto_skcipher_ctx(tfm);
171
+ struct skcipher_walk walk;
193172 cbc_crypt_op *op;
194173 const u64 *key;
174
+ unsigned int nbytes;
195175 int err;
196176
197177 op = camellia_sparc64_cbc_decrypt_3_grand_rounds;
198178 if (ctx->key_len != 16)
199179 op = camellia_sparc64_cbc_decrypt_4_grand_rounds;
200180
201
- blkcipher_walk_init(&walk, dst, src, nbytes);
202
- err = blkcipher_walk_virt(desc, &walk);
203
- desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
181
+ err = skcipher_walk_virt(&walk, req, true);
182
+ if (err)
183
+ return err;
204184
205185 key = &ctx->decrypt_key[0];
206186 camellia_sparc64_load_keys(key, ctx->key_len);
207
- while ((nbytes = walk.nbytes)) {
208
- unsigned int block_len = nbytes & CAMELLIA_BLOCK_MASK;
209
-
210
- if (likely(block_len)) {
211
- const u64 *src64;
212
- u64 *dst64;
213
-
214
- src64 = (const u64 *)walk.src.virt.addr;
215
- dst64 = (u64 *) walk.dst.virt.addr;
216
- op(src64, dst64, block_len, key,
217
- (u64 *) walk.iv);
218
- }
219
- nbytes &= CAMELLIA_BLOCK_SIZE - 1;
220
- err = blkcipher_walk_done(desc, &walk, nbytes);
187
+ while ((nbytes = walk.nbytes) != 0) {
188
+ op(walk.src.virt.addr, walk.dst.virt.addr,
189
+ round_down(nbytes, CAMELLIA_BLOCK_SIZE), key, walk.iv);
190
+ err = skcipher_walk_done(&walk, nbytes % CAMELLIA_BLOCK_SIZE);
221191 }
222192 fprs_write(0);
223193 return err;
224194 }
225195
226
-static struct crypto_alg algs[] = { {
196
+static struct crypto_alg cipher_alg = {
227197 .cra_name = "camellia",
228198 .cra_driver_name = "camellia-sparc64",
229199 .cra_priority = SPARC_CR_OPCODE_PRIORITY,
....@@ -241,46 +211,37 @@
241211 .cia_decrypt = camellia_decrypt
242212 }
243213 }
244
-}, {
245
- .cra_name = "ecb(camellia)",
246
- .cra_driver_name = "ecb-camellia-sparc64",
247
- .cra_priority = SPARC_CR_OPCODE_PRIORITY,
248
- .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
249
- .cra_blocksize = CAMELLIA_BLOCK_SIZE,
250
- .cra_ctxsize = sizeof(struct camellia_sparc64_ctx),
251
- .cra_alignmask = 7,
252
- .cra_type = &crypto_blkcipher_type,
253
- .cra_module = THIS_MODULE,
254
- .cra_u = {
255
- .blkcipher = {
256
- .min_keysize = CAMELLIA_MIN_KEY_SIZE,
257
- .max_keysize = CAMELLIA_MAX_KEY_SIZE,
258
- .setkey = camellia_set_key,
259
- .encrypt = ecb_encrypt,
260
- .decrypt = ecb_decrypt,
261
- },
262
- },
263
-}, {
264
- .cra_name = "cbc(camellia)",
265
- .cra_driver_name = "cbc-camellia-sparc64",
266
- .cra_priority = SPARC_CR_OPCODE_PRIORITY,
267
- .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
268
- .cra_blocksize = CAMELLIA_BLOCK_SIZE,
269
- .cra_ctxsize = sizeof(struct camellia_sparc64_ctx),
270
- .cra_alignmask = 7,
271
- .cra_type = &crypto_blkcipher_type,
272
- .cra_module = THIS_MODULE,
273
- .cra_u = {
274
- .blkcipher = {
275
- .min_keysize = CAMELLIA_MIN_KEY_SIZE,
276
- .max_keysize = CAMELLIA_MAX_KEY_SIZE,
277
- .ivsize = CAMELLIA_BLOCK_SIZE,
278
- .setkey = camellia_set_key,
279
- .encrypt = cbc_encrypt,
280
- .decrypt = cbc_decrypt,
281
- },
282
- },
283
-}
214
+};
215
+
216
+static struct skcipher_alg skcipher_algs[] = {
217
+ {
218
+ .base.cra_name = "ecb(camellia)",
219
+ .base.cra_driver_name = "ecb-camellia-sparc64",
220
+ .base.cra_priority = SPARC_CR_OPCODE_PRIORITY,
221
+ .base.cra_blocksize = CAMELLIA_BLOCK_SIZE,
222
+ .base.cra_ctxsize = sizeof(struct camellia_sparc64_ctx),
223
+ .base.cra_alignmask = 7,
224
+ .base.cra_module = THIS_MODULE,
225
+ .min_keysize = CAMELLIA_MIN_KEY_SIZE,
226
+ .max_keysize = CAMELLIA_MAX_KEY_SIZE,
227
+ .setkey = camellia_set_key_skcipher,
228
+ .encrypt = ecb_encrypt,
229
+ .decrypt = ecb_decrypt,
230
+ }, {
231
+ .base.cra_name = "cbc(camellia)",
232
+ .base.cra_driver_name = "cbc-camellia-sparc64",
233
+ .base.cra_priority = SPARC_CR_OPCODE_PRIORITY,
234
+ .base.cra_blocksize = CAMELLIA_BLOCK_SIZE,
235
+ .base.cra_ctxsize = sizeof(struct camellia_sparc64_ctx),
236
+ .base.cra_alignmask = 7,
237
+ .base.cra_module = THIS_MODULE,
238
+ .min_keysize = CAMELLIA_MIN_KEY_SIZE,
239
+ .max_keysize = CAMELLIA_MAX_KEY_SIZE,
240
+ .ivsize = CAMELLIA_BLOCK_SIZE,
241
+ .setkey = camellia_set_key_skcipher,
242
+ .encrypt = cbc_encrypt,
243
+ .decrypt = cbc_decrypt,
244
+ }
284245 };
285246
286247 static bool __init sparc64_has_camellia_opcode(void)
....@@ -299,22 +260,27 @@
299260
300261 static int __init camellia_sparc64_mod_init(void)
301262 {
302
- int i;
263
+ int err;
303264
304
- for (i = 0; i < ARRAY_SIZE(algs); i++)
305
- INIT_LIST_HEAD(&algs[i].cra_list);
306
-
307
- if (sparc64_has_camellia_opcode()) {
308
- pr_info("Using sparc64 camellia opcodes optimized CAMELLIA implementation\n");
309
- return crypto_register_algs(algs, ARRAY_SIZE(algs));
265
+ if (!sparc64_has_camellia_opcode()) {
266
+ pr_info("sparc64 camellia opcodes not available.\n");
267
+ return -ENODEV;
310268 }
311
- pr_info("sparc64 camellia opcodes not available.\n");
312
- return -ENODEV;
269
+ pr_info("Using sparc64 camellia opcodes optimized CAMELLIA implementation\n");
270
+ err = crypto_register_alg(&cipher_alg);
271
+ if (err)
272
+ return err;
273
+ err = crypto_register_skciphers(skcipher_algs,
274
+ ARRAY_SIZE(skcipher_algs));
275
+ if (err)
276
+ crypto_unregister_alg(&cipher_alg);
277
+ return err;
313278 }
314279
315280 static void __exit camellia_sparc64_mod_fini(void)
316281 {
317
- crypto_unregister_algs(algs, ARRAY_SIZE(algs));
282
+ crypto_unregister_alg(&cipher_alg);
283
+ crypto_unregister_skciphers(skcipher_algs, ARRAY_SIZE(skcipher_algs));
318284 }
319285
320286 module_init(camellia_sparc64_mod_init);