hc
2023-12-11 d2ccde1c8e90d38cee87a1b0309ad2827f3fd30d
kernel/arch/s390/crypto/paes_s390.c
....@@ -5,7 +5,7 @@
55 * s390 implementation of the AES Cipher Algorithm with protected keys.
66 *
77 * s390 Version:
8
- * Copyright IBM Corp. 2017
8
+ * Copyright IBM Corp. 2017,2020
99 * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>
1010 * Harald Freudenberger <freude@de.ibm.com>
1111 */
....@@ -20,36 +20,115 @@
2020 #include <linux/module.h>
2121 #include <linux/cpufeature.h>
2222 #include <linux/init.h>
23
+#include <linux/mutex.h>
2324 #include <linux/spinlock.h>
25
+#include <crypto/internal/skcipher.h>
2426 #include <crypto/xts.h>
2527 #include <asm/cpacf.h>
2628 #include <asm/pkey.h>
2729
30
+/*
31
+ * Key blobs smaller/bigger than these defines are rejected
32
+ * by the common code even before the individual setkey function
33
+ * is called. As paes can handle different kinds of key blobs
34
+ * and padding is also possible, the limits need to be generous.
35
+ */
36
+#define PAES_MIN_KEYSIZE 16
37
+#define PAES_MAX_KEYSIZE 320
38
+
2839 static u8 *ctrblk;
29
-static DEFINE_SPINLOCK(ctrblk_lock);
40
+static DEFINE_MUTEX(ctrblk_lock);
3041
3142 static cpacf_mask_t km_functions, kmc_functions, kmctr_functions;
3243
44
+struct key_blob {
45
+ /*
46
+ * Small keys will be stored in the keybuf. Larger keys are
47
+ * stored in extra allocated memory. In both cases does
48
+ * key point to the memory where the key is stored.
49
+ * The code distinguishes by checking keylen against
50
+ * sizeof(keybuf). See the two following helper functions.
51
+ */
52
+ u8 *key;
53
+ u8 keybuf[128];
54
+ unsigned int keylen;
55
+};
56
+
57
+static inline int _key_to_kb(struct key_blob *kb,
58
+ const u8 *key,
59
+ unsigned int keylen)
60
+{
61
+ struct clearkey_header {
62
+ u8 type;
63
+ u8 res0[3];
64
+ u8 version;
65
+ u8 res1[3];
66
+ u32 keytype;
67
+ u32 len;
68
+ } __packed * h;
69
+
70
+ switch (keylen) {
71
+ case 16:
72
+ case 24:
73
+ case 32:
74
+ /* clear key value, prepare pkey clear key token in keybuf */
75
+ memset(kb->keybuf, 0, sizeof(kb->keybuf));
76
+ h = (struct clearkey_header *) kb->keybuf;
77
+ h->version = 0x02; /* TOKVER_CLEAR_KEY */
78
+ h->keytype = (keylen - 8) >> 3;
79
+ h->len = keylen;
80
+ memcpy(kb->keybuf + sizeof(*h), key, keylen);
81
+ kb->keylen = sizeof(*h) + keylen;
82
+ kb->key = kb->keybuf;
83
+ break;
84
+ default:
85
+ /* other key material, let pkey handle this */
86
+ if (keylen <= sizeof(kb->keybuf))
87
+ kb->key = kb->keybuf;
88
+ else {
89
+ kb->key = kmalloc(keylen, GFP_KERNEL);
90
+ if (!kb->key)
91
+ return -ENOMEM;
92
+ }
93
+ memcpy(kb->key, key, keylen);
94
+ kb->keylen = keylen;
95
+ break;
96
+ }
97
+
98
+ return 0;
99
+}
100
+
101
+static inline void _free_kb_keybuf(struct key_blob *kb)
102
+{
103
+ if (kb->key && kb->key != kb->keybuf
104
+ && kb->keylen > sizeof(kb->keybuf)) {
105
+ kfree(kb->key);
106
+ kb->key = NULL;
107
+ }
108
+}
109
+
33110 struct s390_paes_ctx {
34
- struct pkey_seckey sk;
111
+ struct key_blob kb;
35112 struct pkey_protkey pk;
113
+ spinlock_t pk_lock;
36114 unsigned long fc;
37115 };
38116
39117 struct s390_pxts_ctx {
40
- struct pkey_seckey sk[2];
118
+ struct key_blob kb[2];
41119 struct pkey_protkey pk[2];
120
+ spinlock_t pk_lock;
42121 unsigned long fc;
43122 };
44123
45
-static inline int __paes_convert_key(struct pkey_seckey *sk,
124
+static inline int __paes_keyblob2pkey(struct key_blob *kb,
46125 struct pkey_protkey *pk)
47126 {
48127 int i, ret;
49128
50129 /* try three times in case of failure */
51130 for (i = 0; i < 3; i++) {
52
- ret = pkey_skey2pkey(sk, pk);
131
+ ret = pkey_keyblob2pkey(kb->key, kb->keylen, pk);
53132 if (ret == 0)
54133 break;
55134 }
....@@ -57,11 +136,42 @@
57136 return ret;
58137 }
59138
60
-static int __paes_set_key(struct s390_paes_ctx *ctx)
139
+static inline int __paes_convert_key(struct s390_paes_ctx *ctx)
140
+{
141
+ struct pkey_protkey pkey;
142
+
143
+ if (__paes_keyblob2pkey(&ctx->kb, &pkey))
144
+ return -EINVAL;
145
+
146
+ spin_lock_bh(&ctx->pk_lock);
147
+ memcpy(&ctx->pk, &pkey, sizeof(pkey));
148
+ spin_unlock_bh(&ctx->pk_lock);
149
+
150
+ return 0;
151
+}
152
+
153
+static int ecb_paes_init(struct crypto_skcipher *tfm)
154
+{
155
+ struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
156
+
157
+ ctx->kb.key = NULL;
158
+ spin_lock_init(&ctx->pk_lock);
159
+
160
+ return 0;
161
+}
162
+
163
+static void ecb_paes_exit(struct crypto_skcipher *tfm)
164
+{
165
+ struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
166
+
167
+ _free_kb_keybuf(&ctx->kb);
168
+}
169
+
170
+static inline int __ecb_paes_set_key(struct s390_paes_ctx *ctx)
61171 {
62172 unsigned long fc;
63173
64
- if (__paes_convert_key(&ctx->sk, &ctx->pk))
174
+ if (__paes_convert_key(ctx))
65175 return -EINVAL;
66176
67177 /* Pick the correct function code based on the protected key type */
....@@ -75,92 +185,106 @@
75185 return ctx->fc ? 0 : -EINVAL;
76186 }
77187
78
-static int ecb_paes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
188
+static int ecb_paes_set_key(struct crypto_skcipher *tfm, const u8 *in_key,
79189 unsigned int key_len)
80190 {
81
- struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm);
191
+ int rc;
192
+ struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
82193
83
- if (key_len != SECKEYBLOBSIZE)
84
- return -EINVAL;
194
+ _free_kb_keybuf(&ctx->kb);
195
+ rc = _key_to_kb(&ctx->kb, in_key, key_len);
196
+ if (rc)
197
+ return rc;
85198
86
- memcpy(ctx->sk.seckey, in_key, SECKEYBLOBSIZE);
87
- if (__paes_set_key(ctx)) {
88
- tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
89
- return -EINVAL;
90
- }
91
- return 0;
199
+ return __ecb_paes_set_key(ctx);
92200 }
93201
94
-static int ecb_paes_crypt(struct blkcipher_desc *desc,
95
- unsigned long modifier,
96
- struct blkcipher_walk *walk)
202
+static int ecb_paes_crypt(struct skcipher_request *req, unsigned long modifier)
97203 {
98
- struct s390_paes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
204
+ struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
205
+ struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
206
+ struct skcipher_walk walk;
99207 unsigned int nbytes, n, k;
100208 int ret;
209
+ struct {
210
+ u8 key[MAXPROTKEYSIZE];
211
+ } param;
101212
102
- ret = blkcipher_walk_virt(desc, walk);
103
- while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
213
+ ret = skcipher_walk_virt(&walk, req, false);
214
+ if (ret)
215
+ return ret;
216
+
217
+ spin_lock_bh(&ctx->pk_lock);
218
+ memcpy(param.key, ctx->pk.protkey, MAXPROTKEYSIZE);
219
+ spin_unlock_bh(&ctx->pk_lock);
220
+
221
+ while ((nbytes = walk.nbytes) != 0) {
104222 /* only use complete blocks */
105223 n = nbytes & ~(AES_BLOCK_SIZE - 1);
106
- k = cpacf_km(ctx->fc | modifier, ctx->pk.protkey,
107
- walk->dst.virt.addr, walk->src.virt.addr, n);
224
+ k = cpacf_km(ctx->fc | modifier, &param,
225
+ walk.dst.virt.addr, walk.src.virt.addr, n);
108226 if (k)
109
- ret = blkcipher_walk_done(desc, walk, nbytes - k);
227
+ ret = skcipher_walk_done(&walk, nbytes - k);
110228 if (k < n) {
111
- if (__paes_set_key(ctx) != 0)
112
- return blkcipher_walk_done(desc, walk, -EIO);
229
+ if (__paes_convert_key(ctx))
230
+ return skcipher_walk_done(&walk, -EIO);
231
+ spin_lock_bh(&ctx->pk_lock);
232
+ memcpy(param.key, ctx->pk.protkey, MAXPROTKEYSIZE);
233
+ spin_unlock_bh(&ctx->pk_lock);
113234 }
114235 }
115236 return ret;
116237 }
117238
118
-static int ecb_paes_encrypt(struct blkcipher_desc *desc,
119
- struct scatterlist *dst, struct scatterlist *src,
120
- unsigned int nbytes)
239
+static int ecb_paes_encrypt(struct skcipher_request *req)
121240 {
122
- struct blkcipher_walk walk;
123
-
124
- blkcipher_walk_init(&walk, dst, src, nbytes);
125
- return ecb_paes_crypt(desc, CPACF_ENCRYPT, &walk);
241
+ return ecb_paes_crypt(req, 0);
126242 }
127243
128
-static int ecb_paes_decrypt(struct blkcipher_desc *desc,
129
- struct scatterlist *dst, struct scatterlist *src,
130
- unsigned int nbytes)
244
+static int ecb_paes_decrypt(struct skcipher_request *req)
131245 {
132
- struct blkcipher_walk walk;
133
-
134
- blkcipher_walk_init(&walk, dst, src, nbytes);
135
- return ecb_paes_crypt(desc, CPACF_DECRYPT, &walk);
246
+ return ecb_paes_crypt(req, CPACF_DECRYPT);
136247 }
137248
138
-static struct crypto_alg ecb_paes_alg = {
139
- .cra_name = "ecb(paes)",
140
- .cra_driver_name = "ecb-paes-s390",
141
- .cra_priority = 401, /* combo: aes + ecb + 1 */
142
- .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
143
- .cra_blocksize = AES_BLOCK_SIZE,
144
- .cra_ctxsize = sizeof(struct s390_paes_ctx),
145
- .cra_type = &crypto_blkcipher_type,
146
- .cra_module = THIS_MODULE,
147
- .cra_list = LIST_HEAD_INIT(ecb_paes_alg.cra_list),
148
- .cra_u = {
149
- .blkcipher = {
150
- .min_keysize = SECKEYBLOBSIZE,
151
- .max_keysize = SECKEYBLOBSIZE,
152
- .setkey = ecb_paes_set_key,
153
- .encrypt = ecb_paes_encrypt,
154
- .decrypt = ecb_paes_decrypt,
155
- }
156
- }
249
+static struct skcipher_alg ecb_paes_alg = {
250
+ .base.cra_name = "ecb(paes)",
251
+ .base.cra_driver_name = "ecb-paes-s390",
252
+ .base.cra_priority = 401, /* combo: aes + ecb + 1 */
253
+ .base.cra_blocksize = AES_BLOCK_SIZE,
254
+ .base.cra_ctxsize = sizeof(struct s390_paes_ctx),
255
+ .base.cra_module = THIS_MODULE,
256
+ .base.cra_list = LIST_HEAD_INIT(ecb_paes_alg.base.cra_list),
257
+ .init = ecb_paes_init,
258
+ .exit = ecb_paes_exit,
259
+ .min_keysize = PAES_MIN_KEYSIZE,
260
+ .max_keysize = PAES_MAX_KEYSIZE,
261
+ .setkey = ecb_paes_set_key,
262
+ .encrypt = ecb_paes_encrypt,
263
+ .decrypt = ecb_paes_decrypt,
157264 };
158265
159
-static int __cbc_paes_set_key(struct s390_paes_ctx *ctx)
266
+static int cbc_paes_init(struct crypto_skcipher *tfm)
267
+{
268
+ struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
269
+
270
+ ctx->kb.key = NULL;
271
+ spin_lock_init(&ctx->pk_lock);
272
+
273
+ return 0;
274
+}
275
+
276
+static void cbc_paes_exit(struct crypto_skcipher *tfm)
277
+{
278
+ struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
279
+
280
+ _free_kb_keybuf(&ctx->kb);
281
+}
282
+
283
+static inline int __cbc_paes_set_key(struct s390_paes_ctx *ctx)
160284 {
161285 unsigned long fc;
162286
163
- if (__paes_convert_key(&ctx->sk, &ctx->pk))
287
+ if (__paes_convert_key(ctx))
164288 return -EINVAL;
165289
166290 /* Pick the correct function code based on the protected key type */
....@@ -174,23 +298,25 @@
174298 return ctx->fc ? 0 : -EINVAL;
175299 }
176300
177
-static int cbc_paes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
301
+static int cbc_paes_set_key(struct crypto_skcipher *tfm, const u8 *in_key,
178302 unsigned int key_len)
179303 {
180
- struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm);
304
+ int rc;
305
+ struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
181306
182
- memcpy(ctx->sk.seckey, in_key, SECKEYBLOBSIZE);
183
- if (__cbc_paes_set_key(ctx)) {
184
- tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
185
- return -EINVAL;
186
- }
187
- return 0;
307
+ _free_kb_keybuf(&ctx->kb);
308
+ rc = _key_to_kb(&ctx->kb, in_key, key_len);
309
+ if (rc)
310
+ return rc;
311
+
312
+ return __cbc_paes_set_key(ctx);
188313 }
189314
190
-static int cbc_paes_crypt(struct blkcipher_desc *desc, unsigned long modifier,
191
- struct blkcipher_walk *walk)
315
+static int cbc_paes_crypt(struct skcipher_request *req, unsigned long modifier)
192316 {
193
- struct s390_paes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
317
+ struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
318
+ struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
319
+ struct skcipher_walk walk;
194320 unsigned int nbytes, n, k;
195321 int ret;
196322 struct {
....@@ -198,74 +324,103 @@
198324 u8 key[MAXPROTKEYSIZE];
199325 } param;
200326
201
- ret = blkcipher_walk_virt(desc, walk);
202
- memcpy(param.iv, walk->iv, AES_BLOCK_SIZE);
327
+ ret = skcipher_walk_virt(&walk, req, false);
328
+ if (ret)
329
+ return ret;
330
+
331
+ memcpy(param.iv, walk.iv, AES_BLOCK_SIZE);
332
+ spin_lock_bh(&ctx->pk_lock);
203333 memcpy(param.key, ctx->pk.protkey, MAXPROTKEYSIZE);
204
- while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
334
+ spin_unlock_bh(&ctx->pk_lock);
335
+
336
+ while ((nbytes = walk.nbytes) != 0) {
205337 /* only use complete blocks */
206338 n = nbytes & ~(AES_BLOCK_SIZE - 1);
207339 k = cpacf_kmc(ctx->fc | modifier, &param,
208
- walk->dst.virt.addr, walk->src.virt.addr, n);
209
- if (k)
210
- ret = blkcipher_walk_done(desc, walk, nbytes - k);
340
+ walk.dst.virt.addr, walk.src.virt.addr, n);
341
+ if (k) {
342
+ memcpy(walk.iv, param.iv, AES_BLOCK_SIZE);
343
+ ret = skcipher_walk_done(&walk, nbytes - k);
344
+ }
211345 if (k < n) {
212
- if (__cbc_paes_set_key(ctx) != 0)
213
- return blkcipher_walk_done(desc, walk, -EIO);
346
+ if (__paes_convert_key(ctx))
347
+ return skcipher_walk_done(&walk, -EIO);
348
+ spin_lock_bh(&ctx->pk_lock);
214349 memcpy(param.key, ctx->pk.protkey, MAXPROTKEYSIZE);
350
+ spin_unlock_bh(&ctx->pk_lock);
215351 }
216352 }
217
- memcpy(walk->iv, param.iv, AES_BLOCK_SIZE);
218353 return ret;
219354 }
220355
221
-static int cbc_paes_encrypt(struct blkcipher_desc *desc,
222
- struct scatterlist *dst, struct scatterlist *src,
223
- unsigned int nbytes)
356
+static int cbc_paes_encrypt(struct skcipher_request *req)
224357 {
225
- struct blkcipher_walk walk;
226
-
227
- blkcipher_walk_init(&walk, dst, src, nbytes);
228
- return cbc_paes_crypt(desc, 0, &walk);
358
+ return cbc_paes_crypt(req, 0);
229359 }
230360
231
-static int cbc_paes_decrypt(struct blkcipher_desc *desc,
232
- struct scatterlist *dst, struct scatterlist *src,
233
- unsigned int nbytes)
361
+static int cbc_paes_decrypt(struct skcipher_request *req)
234362 {
235
- struct blkcipher_walk walk;
236
-
237
- blkcipher_walk_init(&walk, dst, src, nbytes);
238
- return cbc_paes_crypt(desc, CPACF_DECRYPT, &walk);
363
+ return cbc_paes_crypt(req, CPACF_DECRYPT);
239364 }
240365
241
-static struct crypto_alg cbc_paes_alg = {
242
- .cra_name = "cbc(paes)",
243
- .cra_driver_name = "cbc-paes-s390",
244
- .cra_priority = 402, /* ecb-paes-s390 + 1 */
245
- .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
246
- .cra_blocksize = AES_BLOCK_SIZE,
247
- .cra_ctxsize = sizeof(struct s390_paes_ctx),
248
- .cra_type = &crypto_blkcipher_type,
249
- .cra_module = THIS_MODULE,
250
- .cra_list = LIST_HEAD_INIT(cbc_paes_alg.cra_list),
251
- .cra_u = {
252
- .blkcipher = {
253
- .min_keysize = SECKEYBLOBSIZE,
254
- .max_keysize = SECKEYBLOBSIZE,
255
- .ivsize = AES_BLOCK_SIZE,
256
- .setkey = cbc_paes_set_key,
257
- .encrypt = cbc_paes_encrypt,
258
- .decrypt = cbc_paes_decrypt,
259
- }
260
- }
366
+static struct skcipher_alg cbc_paes_alg = {
367
+ .base.cra_name = "cbc(paes)",
368
+ .base.cra_driver_name = "cbc-paes-s390",
369
+ .base.cra_priority = 402, /* ecb-paes-s390 + 1 */
370
+ .base.cra_blocksize = AES_BLOCK_SIZE,
371
+ .base.cra_ctxsize = sizeof(struct s390_paes_ctx),
372
+ .base.cra_module = THIS_MODULE,
373
+ .base.cra_list = LIST_HEAD_INIT(cbc_paes_alg.base.cra_list),
374
+ .init = cbc_paes_init,
375
+ .exit = cbc_paes_exit,
376
+ .min_keysize = PAES_MIN_KEYSIZE,
377
+ .max_keysize = PAES_MAX_KEYSIZE,
378
+ .ivsize = AES_BLOCK_SIZE,
379
+ .setkey = cbc_paes_set_key,
380
+ .encrypt = cbc_paes_encrypt,
381
+ .decrypt = cbc_paes_decrypt,
261382 };
262383
263
-static int __xts_paes_set_key(struct s390_pxts_ctx *ctx)
384
+static int xts_paes_init(struct crypto_skcipher *tfm)
385
+{
386
+ struct s390_pxts_ctx *ctx = crypto_skcipher_ctx(tfm);
387
+
388
+ ctx->kb[0].key = NULL;
389
+ ctx->kb[1].key = NULL;
390
+ spin_lock_init(&ctx->pk_lock);
391
+
392
+ return 0;
393
+}
394
+
395
+static void xts_paes_exit(struct crypto_skcipher *tfm)
396
+{
397
+ struct s390_pxts_ctx *ctx = crypto_skcipher_ctx(tfm);
398
+
399
+ _free_kb_keybuf(&ctx->kb[0]);
400
+ _free_kb_keybuf(&ctx->kb[1]);
401
+}
402
+
403
+static inline int __xts_paes_convert_key(struct s390_pxts_ctx *ctx)
404
+{
405
+ struct pkey_protkey pkey0, pkey1;
406
+
407
+ if (__paes_keyblob2pkey(&ctx->kb[0], &pkey0) ||
408
+ __paes_keyblob2pkey(&ctx->kb[1], &pkey1))
409
+ return -EINVAL;
410
+
411
+ spin_lock_bh(&ctx->pk_lock);
412
+ memcpy(&ctx->pk[0], &pkey0, sizeof(pkey0));
413
+ memcpy(&ctx->pk[1], &pkey1, sizeof(pkey1));
414
+ spin_unlock_bh(&ctx->pk_lock);
415
+
416
+ return 0;
417
+}
418
+
419
+static inline int __xts_paes_set_key(struct s390_pxts_ctx *ctx)
264420 {
265421 unsigned long fc;
266422
267
- if (__paes_convert_key(&ctx->sk[0], &ctx->pk[0]) ||
268
- __paes_convert_key(&ctx->sk[1], &ctx->pk[1]))
423
+ if (__xts_paes_convert_key(ctx))
269424 return -EINVAL;
270425
271426 if (ctx->pk[0].type != ctx->pk[1].type)
....@@ -282,19 +437,31 @@
282437 return ctx->fc ? 0 : -EINVAL;
283438 }
284439
285
-static int xts_paes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
286
- unsigned int key_len)
440
+static int xts_paes_set_key(struct crypto_skcipher *tfm, const u8 *in_key,
441
+ unsigned int xts_key_len)
287442 {
288
- struct s390_pxts_ctx *ctx = crypto_tfm_ctx(tfm);
443
+ int rc;
444
+ struct s390_pxts_ctx *ctx = crypto_skcipher_ctx(tfm);
289445 u8 ckey[2 * AES_MAX_KEY_SIZE];
290
- unsigned int ckey_len;
446
+ unsigned int ckey_len, key_len;
291447
292
- memcpy(ctx->sk[0].seckey, in_key, SECKEYBLOBSIZE);
293
- memcpy(ctx->sk[1].seckey, in_key + SECKEYBLOBSIZE, SECKEYBLOBSIZE);
294
- if (__xts_paes_set_key(ctx)) {
295
- tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
448
+ if (xts_key_len % 2)
296449 return -EINVAL;
297
- }
450
+
451
+ key_len = xts_key_len / 2;
452
+
453
+ _free_kb_keybuf(&ctx->kb[0]);
454
+ _free_kb_keybuf(&ctx->kb[1]);
455
+ rc = _key_to_kb(&ctx->kb[0], in_key, key_len);
456
+ if (rc)
457
+ return rc;
458
+ rc = _key_to_kb(&ctx->kb[1], in_key + key_len, key_len);
459
+ if (rc)
460
+ return rc;
461
+
462
+ rc = __xts_paes_set_key(ctx);
463
+ if (rc)
464
+ return rc;
298465
299466 /*
300467 * xts_check_key verifies the key length is not odd and makes
....@@ -305,13 +472,14 @@
305472 AES_KEYSIZE_128 : AES_KEYSIZE_256;
306473 memcpy(ckey, ctx->pk[0].protkey, ckey_len);
307474 memcpy(ckey + ckey_len, ctx->pk[1].protkey, ckey_len);
308
- return xts_check_key(tfm, ckey, 2*ckey_len);
475
+ return xts_verify_key(tfm, ckey, 2*ckey_len);
309476 }
310477
311
-static int xts_paes_crypt(struct blkcipher_desc *desc, unsigned long modifier,
312
- struct blkcipher_walk *walk)
478
+static int xts_paes_crypt(struct skcipher_request *req, unsigned long modifier)
313479 {
314
- struct s390_pxts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
480
+ struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
481
+ struct s390_pxts_ctx *ctx = crypto_skcipher_ctx(tfm);
482
+ struct skcipher_walk walk;
315483 unsigned int keylen, offset, nbytes, n, k;
316484 int ret;
317485 struct {
....@@ -326,81 +494,92 @@
326494 u8 init[16];
327495 } xts_param;
328496
329
- ret = blkcipher_walk_virt(desc, walk);
497
+ ret = skcipher_walk_virt(&walk, req, false);
498
+ if (ret)
499
+ return ret;
500
+
330501 keylen = (ctx->pk[0].type == PKEY_KEYTYPE_AES_128) ? 48 : 64;
331502 offset = (ctx->pk[0].type == PKEY_KEYTYPE_AES_128) ? 16 : 0;
332
-retry:
333
- memset(&pcc_param, 0, sizeof(pcc_param));
334
- memcpy(pcc_param.tweak, walk->iv, sizeof(pcc_param.tweak));
335
- memcpy(pcc_param.key + offset, ctx->pk[1].protkey, keylen);
336
- cpacf_pcc(ctx->fc, pcc_param.key + offset);
337503
504
+ memset(&pcc_param, 0, sizeof(pcc_param));
505
+ memcpy(pcc_param.tweak, walk.iv, sizeof(pcc_param.tweak));
506
+ spin_lock_bh(&ctx->pk_lock);
507
+ memcpy(pcc_param.key + offset, ctx->pk[1].protkey, keylen);
338508 memcpy(xts_param.key + offset, ctx->pk[0].protkey, keylen);
509
+ spin_unlock_bh(&ctx->pk_lock);
510
+ cpacf_pcc(ctx->fc, pcc_param.key + offset);
339511 memcpy(xts_param.init, pcc_param.xts, 16);
340512
341
- while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
513
+ while ((nbytes = walk.nbytes) != 0) {
342514 /* only use complete blocks */
343515 n = nbytes & ~(AES_BLOCK_SIZE - 1);
344516 k = cpacf_km(ctx->fc | modifier, xts_param.key + offset,
345
- walk->dst.virt.addr, walk->src.virt.addr, n);
517
+ walk.dst.virt.addr, walk.src.virt.addr, n);
346518 if (k)
347
- ret = blkcipher_walk_done(desc, walk, nbytes - k);
519
+ ret = skcipher_walk_done(&walk, nbytes - k);
348520 if (k < n) {
349
- if (__xts_paes_set_key(ctx) != 0)
350
- return blkcipher_walk_done(desc, walk, -EIO);
351
- goto retry;
521
+ if (__xts_paes_convert_key(ctx))
522
+ return skcipher_walk_done(&walk, -EIO);
523
+ spin_lock_bh(&ctx->pk_lock);
524
+ memcpy(xts_param.key + offset,
525
+ ctx->pk[0].protkey, keylen);
526
+ spin_unlock_bh(&ctx->pk_lock);
352527 }
353528 }
529
+
354530 return ret;
355531 }
356532
357
-static int xts_paes_encrypt(struct blkcipher_desc *desc,
358
- struct scatterlist *dst, struct scatterlist *src,
359
- unsigned int nbytes)
533
+static int xts_paes_encrypt(struct skcipher_request *req)
360534 {
361
- struct blkcipher_walk walk;
362
-
363
- blkcipher_walk_init(&walk, dst, src, nbytes);
364
- return xts_paes_crypt(desc, 0, &walk);
535
+ return xts_paes_crypt(req, 0);
365536 }
366537
367
-static int xts_paes_decrypt(struct blkcipher_desc *desc,
368
- struct scatterlist *dst, struct scatterlist *src,
369
- unsigned int nbytes)
538
+static int xts_paes_decrypt(struct skcipher_request *req)
370539 {
371
- struct blkcipher_walk walk;
372
-
373
- blkcipher_walk_init(&walk, dst, src, nbytes);
374
- return xts_paes_crypt(desc, CPACF_DECRYPT, &walk);
540
+ return xts_paes_crypt(req, CPACF_DECRYPT);
375541 }
376542
377
-static struct crypto_alg xts_paes_alg = {
378
- .cra_name = "xts(paes)",
379
- .cra_driver_name = "xts-paes-s390",
380
- .cra_priority = 402, /* ecb-paes-s390 + 1 */
381
- .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
382
- .cra_blocksize = AES_BLOCK_SIZE,
383
- .cra_ctxsize = sizeof(struct s390_pxts_ctx),
384
- .cra_type = &crypto_blkcipher_type,
385
- .cra_module = THIS_MODULE,
386
- .cra_list = LIST_HEAD_INIT(xts_paes_alg.cra_list),
387
- .cra_u = {
388
- .blkcipher = {
389
- .min_keysize = 2 * SECKEYBLOBSIZE,
390
- .max_keysize = 2 * SECKEYBLOBSIZE,
391
- .ivsize = AES_BLOCK_SIZE,
392
- .setkey = xts_paes_set_key,
393
- .encrypt = xts_paes_encrypt,
394
- .decrypt = xts_paes_decrypt,
395
- }
396
- }
543
+static struct skcipher_alg xts_paes_alg = {
544
+ .base.cra_name = "xts(paes)",
545
+ .base.cra_driver_name = "xts-paes-s390",
546
+ .base.cra_priority = 402, /* ecb-paes-s390 + 1 */
547
+ .base.cra_blocksize = AES_BLOCK_SIZE,
548
+ .base.cra_ctxsize = sizeof(struct s390_pxts_ctx),
549
+ .base.cra_module = THIS_MODULE,
550
+ .base.cra_list = LIST_HEAD_INIT(xts_paes_alg.base.cra_list),
551
+ .init = xts_paes_init,
552
+ .exit = xts_paes_exit,
553
+ .min_keysize = 2 * PAES_MIN_KEYSIZE,
554
+ .max_keysize = 2 * PAES_MAX_KEYSIZE,
555
+ .ivsize = AES_BLOCK_SIZE,
556
+ .setkey = xts_paes_set_key,
557
+ .encrypt = xts_paes_encrypt,
558
+ .decrypt = xts_paes_decrypt,
397559 };
398560
399
-static int __ctr_paes_set_key(struct s390_paes_ctx *ctx)
561
+static int ctr_paes_init(struct crypto_skcipher *tfm)
562
+{
563
+ struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
564
+
565
+ ctx->kb.key = NULL;
566
+ spin_lock_init(&ctx->pk_lock);
567
+
568
+ return 0;
569
+}
570
+
571
+static void ctr_paes_exit(struct crypto_skcipher *tfm)
572
+{
573
+ struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
574
+
575
+ _free_kb_keybuf(&ctx->kb);
576
+}
577
+
578
+static inline int __ctr_paes_set_key(struct s390_paes_ctx *ctx)
400579 {
401580 unsigned long fc;
402581
403
- if (__paes_convert_key(&ctx->sk, &ctx->pk))
582
+ if (__paes_convert_key(ctx))
404583 return -EINVAL;
405584
406585 /* Pick the correct function code based on the protected key type */
....@@ -415,17 +594,18 @@
415594 return ctx->fc ? 0 : -EINVAL;
416595 }
417596
418
-static int ctr_paes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
597
+static int ctr_paes_set_key(struct crypto_skcipher *tfm, const u8 *in_key,
419598 unsigned int key_len)
420599 {
421
- struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm);
600
+ int rc;
601
+ struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
422602
423
- memcpy(ctx->sk.seckey, in_key, key_len);
424
- if (__ctr_paes_set_key(ctx)) {
425
- tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
426
- return -EINVAL;
427
- }
428
- return 0;
603
+ _free_kb_keybuf(&ctx->kb);
604
+ rc = _key_to_kb(&ctx->kb, in_key, key_len);
605
+ if (rc)
606
+ return rc;
607
+
608
+ return __ctr_paes_set_key(ctx);
429609 }
430610
431611 static unsigned int __ctrblk_init(u8 *ctrptr, u8 *iv, unsigned int nbytes)
....@@ -443,119 +623,111 @@
443623 return n;
444624 }
445625
446
-static int ctr_paes_crypt(struct blkcipher_desc *desc, unsigned long modifier,
447
- struct blkcipher_walk *walk)
626
+static int ctr_paes_crypt(struct skcipher_request *req)
448627 {
449
- struct s390_paes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
628
+ struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
629
+ struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
450630 u8 buf[AES_BLOCK_SIZE], *ctrptr;
631
+ struct skcipher_walk walk;
451632 unsigned int nbytes, n, k;
452633 int ret, locked;
634
+ struct {
635
+ u8 key[MAXPROTKEYSIZE];
636
+ } param;
453637
454
- locked = spin_trylock(&ctrblk_lock);
638
+ ret = skcipher_walk_virt(&walk, req, false);
639
+ if (ret)
640
+ return ret;
455641
456
- ret = blkcipher_walk_virt_block(desc, walk, AES_BLOCK_SIZE);
457
- while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
642
+ spin_lock_bh(&ctx->pk_lock);
643
+ memcpy(param.key, ctx->pk.protkey, MAXPROTKEYSIZE);
644
+ spin_unlock_bh(&ctx->pk_lock);
645
+
646
+ locked = mutex_trylock(&ctrblk_lock);
647
+
648
+ while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
458649 n = AES_BLOCK_SIZE;
459650 if (nbytes >= 2*AES_BLOCK_SIZE && locked)
460
- n = __ctrblk_init(ctrblk, walk->iv, nbytes);
461
- ctrptr = (n > AES_BLOCK_SIZE) ? ctrblk : walk->iv;
462
- k = cpacf_kmctr(ctx->fc | modifier, ctx->pk.protkey,
463
- walk->dst.virt.addr, walk->src.virt.addr,
464
- n, ctrptr);
651
+ n = __ctrblk_init(ctrblk, walk.iv, nbytes);
652
+ ctrptr = (n > AES_BLOCK_SIZE) ? ctrblk : walk.iv;
653
+ k = cpacf_kmctr(ctx->fc, &param, walk.dst.virt.addr,
654
+ walk.src.virt.addr, n, ctrptr);
465655 if (k) {
466656 if (ctrptr == ctrblk)
467
- memcpy(walk->iv, ctrptr + k - AES_BLOCK_SIZE,
657
+ memcpy(walk.iv, ctrptr + k - AES_BLOCK_SIZE,
468658 AES_BLOCK_SIZE);
469
- crypto_inc(walk->iv, AES_BLOCK_SIZE);
470
- ret = blkcipher_walk_done(desc, walk, nbytes - n);
659
+ crypto_inc(walk.iv, AES_BLOCK_SIZE);
660
+ ret = skcipher_walk_done(&walk, nbytes - k);
471661 }
472662 if (k < n) {
473
- if (__ctr_paes_set_key(ctx) != 0) {
663
+ if (__paes_convert_key(ctx)) {
474664 if (locked)
475
- spin_unlock(&ctrblk_lock);
476
- return blkcipher_walk_done(desc, walk, -EIO);
665
+ mutex_unlock(&ctrblk_lock);
666
+ return skcipher_walk_done(&walk, -EIO);
477667 }
668
+ spin_lock_bh(&ctx->pk_lock);
669
+ memcpy(param.key, ctx->pk.protkey, MAXPROTKEYSIZE);
670
+ spin_unlock_bh(&ctx->pk_lock);
478671 }
479672 }
480673 if (locked)
481
- spin_unlock(&ctrblk_lock);
674
+ mutex_unlock(&ctrblk_lock);
482675 /*
483676 * final block may be < AES_BLOCK_SIZE, copy only nbytes
484677 */
485678 if (nbytes) {
486679 while (1) {
487
- if (cpacf_kmctr(ctx->fc | modifier,
488
- ctx->pk.protkey, buf,
489
- walk->src.virt.addr, AES_BLOCK_SIZE,
490
- walk->iv) == AES_BLOCK_SIZE)
680
+ if (cpacf_kmctr(ctx->fc, &param, buf,
681
+ walk.src.virt.addr, AES_BLOCK_SIZE,
682
+ walk.iv) == AES_BLOCK_SIZE)
491683 break;
492
- if (__ctr_paes_set_key(ctx) != 0)
493
- return blkcipher_walk_done(desc, walk, -EIO);
684
+ if (__paes_convert_key(ctx))
685
+ return skcipher_walk_done(&walk, -EIO);
686
+ spin_lock_bh(&ctx->pk_lock);
687
+ memcpy(param.key, ctx->pk.protkey, MAXPROTKEYSIZE);
688
+ spin_unlock_bh(&ctx->pk_lock);
494689 }
495
- memcpy(walk->dst.virt.addr, buf, nbytes);
496
- crypto_inc(walk->iv, AES_BLOCK_SIZE);
497
- ret = blkcipher_walk_done(desc, walk, 0);
690
+ memcpy(walk.dst.virt.addr, buf, nbytes);
691
+ crypto_inc(walk.iv, AES_BLOCK_SIZE);
692
+ ret = skcipher_walk_done(&walk, nbytes);
498693 }
499694
500695 return ret;
501696 }
502697
503
-static int ctr_paes_encrypt(struct blkcipher_desc *desc,
504
- struct scatterlist *dst, struct scatterlist *src,
505
- unsigned int nbytes)
506
-{
507
- struct blkcipher_walk walk;
508
-
509
- blkcipher_walk_init(&walk, dst, src, nbytes);
510
- return ctr_paes_crypt(desc, 0, &walk);
511
-}
512
-
513
-static int ctr_paes_decrypt(struct blkcipher_desc *desc,
514
- struct scatterlist *dst, struct scatterlist *src,
515
- unsigned int nbytes)
516
-{
517
- struct blkcipher_walk walk;
518
-
519
- blkcipher_walk_init(&walk, dst, src, nbytes);
520
- return ctr_paes_crypt(desc, CPACF_DECRYPT, &walk);
521
-}
522
-
523
-static struct crypto_alg ctr_paes_alg = {
524
- .cra_name = "ctr(paes)",
525
- .cra_driver_name = "ctr-paes-s390",
526
- .cra_priority = 402, /* ecb-paes-s390 + 1 */
527
- .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
528
- .cra_blocksize = 1,
529
- .cra_ctxsize = sizeof(struct s390_paes_ctx),
530
- .cra_type = &crypto_blkcipher_type,
531
- .cra_module = THIS_MODULE,
532
- .cra_list = LIST_HEAD_INIT(ctr_paes_alg.cra_list),
533
- .cra_u = {
534
- .blkcipher = {
535
- .min_keysize = SECKEYBLOBSIZE,
536
- .max_keysize = SECKEYBLOBSIZE,
537
- .ivsize = AES_BLOCK_SIZE,
538
- .setkey = ctr_paes_set_key,
539
- .encrypt = ctr_paes_encrypt,
540
- .decrypt = ctr_paes_decrypt,
541
- }
542
- }
698
+static struct skcipher_alg ctr_paes_alg = {
699
+ .base.cra_name = "ctr(paes)",
700
+ .base.cra_driver_name = "ctr-paes-s390",
701
+ .base.cra_priority = 402, /* ecb-paes-s390 + 1 */
702
+ .base.cra_blocksize = 1,
703
+ .base.cra_ctxsize = sizeof(struct s390_paes_ctx),
704
+ .base.cra_module = THIS_MODULE,
705
+ .base.cra_list = LIST_HEAD_INIT(ctr_paes_alg.base.cra_list),
706
+ .init = ctr_paes_init,
707
+ .exit = ctr_paes_exit,
708
+ .min_keysize = PAES_MIN_KEYSIZE,
709
+ .max_keysize = PAES_MAX_KEYSIZE,
710
+ .ivsize = AES_BLOCK_SIZE,
711
+ .setkey = ctr_paes_set_key,
712
+ .encrypt = ctr_paes_crypt,
713
+ .decrypt = ctr_paes_crypt,
714
+ .chunksize = AES_BLOCK_SIZE,
543715 };
544716
545
-static inline void __crypto_unregister_alg(struct crypto_alg *alg)
717
+static inline void __crypto_unregister_skcipher(struct skcipher_alg *alg)
546718 {
547
- if (!list_empty(&alg->cra_list))
548
- crypto_unregister_alg(alg);
719
+ if (!list_empty(&alg->base.cra_list))
720
+ crypto_unregister_skcipher(alg);
549721 }
550722
551723 static void paes_s390_fini(void)
552724 {
725
+ __crypto_unregister_skcipher(&ctr_paes_alg);
726
+ __crypto_unregister_skcipher(&xts_paes_alg);
727
+ __crypto_unregister_skcipher(&cbc_paes_alg);
728
+ __crypto_unregister_skcipher(&ecb_paes_alg);
553729 if (ctrblk)
554730 free_page((unsigned long) ctrblk);
555
- __crypto_unregister_alg(&ctr_paes_alg);
556
- __crypto_unregister_alg(&xts_paes_alg);
557
- __crypto_unregister_alg(&cbc_paes_alg);
558
- __crypto_unregister_alg(&ecb_paes_alg);
559731 }
560732
561733 static int __init paes_s390_init(void)
....@@ -570,7 +742,7 @@
570742 if (cpacf_test_func(&km_functions, CPACF_KM_PAES_128) ||
571743 cpacf_test_func(&km_functions, CPACF_KM_PAES_192) ||
572744 cpacf_test_func(&km_functions, CPACF_KM_PAES_256)) {
573
- ret = crypto_register_alg(&ecb_paes_alg);
745
+ ret = crypto_register_skcipher(&ecb_paes_alg);
574746 if (ret)
575747 goto out_err;
576748 }
....@@ -578,14 +750,14 @@
578750 if (cpacf_test_func(&kmc_functions, CPACF_KMC_PAES_128) ||
579751 cpacf_test_func(&kmc_functions, CPACF_KMC_PAES_192) ||
580752 cpacf_test_func(&kmc_functions, CPACF_KMC_PAES_256)) {
581
- ret = crypto_register_alg(&cbc_paes_alg);
753
+ ret = crypto_register_skcipher(&cbc_paes_alg);
582754 if (ret)
583755 goto out_err;
584756 }
585757
586758 if (cpacf_test_func(&km_functions, CPACF_KM_PXTS_128) ||
587759 cpacf_test_func(&km_functions, CPACF_KM_PXTS_256)) {
588
- ret = crypto_register_alg(&xts_paes_alg);
760
+ ret = crypto_register_skcipher(&xts_paes_alg);
589761 if (ret)
590762 goto out_err;
591763 }
....@@ -593,14 +765,14 @@
593765 if (cpacf_test_func(&kmctr_functions, CPACF_KMCTR_PAES_128) ||
594766 cpacf_test_func(&kmctr_functions, CPACF_KMCTR_PAES_192) ||
595767 cpacf_test_func(&kmctr_functions, CPACF_KMCTR_PAES_256)) {
596
- ret = crypto_register_alg(&ctr_paes_alg);
597
- if (ret)
598
- goto out_err;
599768 ctrblk = (u8 *) __get_free_page(GFP_KERNEL);
600769 if (!ctrblk) {
601770 ret = -ENOMEM;
602771 goto out_err;
603772 }
773
+ ret = crypto_register_skcipher(&ctr_paes_alg);
774
+ if (ret)
775
+ goto out_err;
604776 }
605777
606778 return 0;