.. | .. |
---|
21 | 21 | #include <crypto/algapi.h> |
---|
22 | 22 | #include <crypto/ghash.h> |
---|
23 | 23 | #include <crypto/internal/aead.h> |
---|
| 24 | +#include <crypto/internal/cipher.h> |
---|
24 | 25 | #include <crypto/internal/skcipher.h> |
---|
25 | 26 | #include <crypto/scatterwalk.h> |
---|
26 | 27 | #include <linux/err.h> |
---|
.. | .. |
---|
44 | 45 | int key_len; |
---|
45 | 46 | unsigned long fc; |
---|
46 | 47 | union { |
---|
47 | | - struct crypto_skcipher *blk; |
---|
| 48 | + struct crypto_skcipher *skcipher; |
---|
48 | 49 | struct crypto_cipher *cip; |
---|
49 | 50 | } fallback; |
---|
50 | 51 | }; |
---|
.. | .. |
---|
72 | 73 | unsigned int key_len) |
---|
73 | 74 | { |
---|
74 | 75 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); |
---|
75 | | - int ret; |
---|
76 | 76 | |
---|
77 | 77 | sctx->fallback.cip->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK; |
---|
78 | 78 | sctx->fallback.cip->base.crt_flags |= (tfm->crt_flags & |
---|
79 | 79 | CRYPTO_TFM_REQ_MASK); |
---|
80 | 80 | |
---|
81 | | - ret = crypto_cipher_setkey(sctx->fallback.cip, in_key, key_len); |
---|
82 | | - if (ret) { |
---|
83 | | - tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK; |
---|
84 | | - tfm->crt_flags |= (sctx->fallback.cip->base.crt_flags & |
---|
85 | | - CRYPTO_TFM_RES_MASK); |
---|
86 | | - } |
---|
87 | | - return ret; |
---|
| 81 | + return crypto_cipher_setkey(sctx->fallback.cip, in_key, key_len); |
---|
88 | 82 | } |
---|
89 | 83 | |
---|
90 | 84 | static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, |
---|
.. | .. |
---|
108 | 102 | return 0; |
---|
109 | 103 | } |
---|
110 | 104 | |
---|
111 | | -static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) |
---|
| 105 | +static void crypto_aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) |
---|
112 | 106 | { |
---|
113 | 107 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); |
---|
114 | 108 | |
---|
.. | .. |
---|
119 | 113 | cpacf_km(sctx->fc, &sctx->key, out, in, AES_BLOCK_SIZE); |
---|
120 | 114 | } |
---|
121 | 115 | |
---|
122 | | -static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) |
---|
| 116 | +static void crypto_aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) |
---|
123 | 117 | { |
---|
124 | 118 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); |
---|
125 | 119 | |
---|
.. | .. |
---|
137 | 131 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); |
---|
138 | 132 | |
---|
139 | 133 | sctx->fallback.cip = crypto_alloc_cipher(name, 0, |
---|
140 | | - CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK); |
---|
| 134 | + CRYPTO_ALG_NEED_FALLBACK); |
---|
141 | 135 | |
---|
142 | 136 | if (IS_ERR(sctx->fallback.cip)) { |
---|
143 | 137 | pr_err("Allocating AES fallback algorithm %s failed\n", |
---|
.. | .. |
---|
172 | 166 | .cia_min_keysize = AES_MIN_KEY_SIZE, |
---|
173 | 167 | .cia_max_keysize = AES_MAX_KEY_SIZE, |
---|
174 | 168 | .cia_setkey = aes_set_key, |
---|
175 | | - .cia_encrypt = aes_encrypt, |
---|
176 | | - .cia_decrypt = aes_decrypt, |
---|
| 169 | + .cia_encrypt = crypto_aes_encrypt, |
---|
| 170 | + .cia_decrypt = crypto_aes_decrypt, |
---|
177 | 171 | } |
---|
178 | 172 | } |
---|
179 | 173 | }; |
---|
180 | 174 | |
---|
181 | | -static int setkey_fallback_blk(struct crypto_tfm *tfm, const u8 *key, |
---|
182 | | - unsigned int len) |
---|
| 175 | +static int setkey_fallback_skcipher(struct crypto_skcipher *tfm, const u8 *key, |
---|
| 176 | + unsigned int len) |
---|
183 | 177 | { |
---|
184 | | - struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); |
---|
185 | | - unsigned int ret; |
---|
| 178 | + struct s390_aes_ctx *sctx = crypto_skcipher_ctx(tfm); |
---|
186 | 179 | |
---|
187 | | - crypto_skcipher_clear_flags(sctx->fallback.blk, CRYPTO_TFM_REQ_MASK); |
---|
188 | | - crypto_skcipher_set_flags(sctx->fallback.blk, tfm->crt_flags & |
---|
189 | | - CRYPTO_TFM_REQ_MASK); |
---|
190 | | - |
---|
191 | | - ret = crypto_skcipher_setkey(sctx->fallback.blk, key, len); |
---|
192 | | - |
---|
193 | | - tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK; |
---|
194 | | - tfm->crt_flags |= crypto_skcipher_get_flags(sctx->fallback.blk) & |
---|
195 | | - CRYPTO_TFM_RES_MASK; |
---|
196 | | - |
---|
197 | | - return ret; |
---|
| 180 | + crypto_skcipher_clear_flags(sctx->fallback.skcipher, |
---|
| 181 | + CRYPTO_TFM_REQ_MASK); |
---|
| 182 | + crypto_skcipher_set_flags(sctx->fallback.skcipher, |
---|
| 183 | + crypto_skcipher_get_flags(tfm) & |
---|
| 184 | + CRYPTO_TFM_REQ_MASK); |
---|
| 185 | + return crypto_skcipher_setkey(sctx->fallback.skcipher, key, len); |
---|
198 | 186 | } |
---|
199 | 187 | |
---|
200 | | -static int fallback_blk_dec(struct blkcipher_desc *desc, |
---|
201 | | - struct scatterlist *dst, struct scatterlist *src, |
---|
202 | | - unsigned int nbytes) |
---|
| 188 | +static int fallback_skcipher_crypt(struct s390_aes_ctx *sctx, |
---|
| 189 | + struct skcipher_request *req, |
---|
| 190 | + unsigned long modifier) |
---|
203 | 191 | { |
---|
204 | | - unsigned int ret; |
---|
205 | | - struct crypto_blkcipher *tfm = desc->tfm; |
---|
206 | | - struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(tfm); |
---|
207 | | - SKCIPHER_REQUEST_ON_STACK(req, sctx->fallback.blk); |
---|
| 192 | + struct skcipher_request *subreq = skcipher_request_ctx(req); |
---|
208 | 193 | |
---|
209 | | - skcipher_request_set_tfm(req, sctx->fallback.blk); |
---|
210 | | - skcipher_request_set_callback(req, desc->flags, NULL, NULL); |
---|
211 | | - skcipher_request_set_crypt(req, src, dst, nbytes, desc->info); |
---|
212 | | - |
---|
213 | | - ret = crypto_skcipher_decrypt(req); |
---|
214 | | - |
---|
215 | | - skcipher_request_zero(req); |
---|
216 | | - return ret; |
---|
| 194 | + *subreq = *req; |
---|
| 195 | + skcipher_request_set_tfm(subreq, sctx->fallback.skcipher); |
---|
| 196 | + return (modifier & CPACF_DECRYPT) ? |
---|
| 197 | + crypto_skcipher_decrypt(subreq) : |
---|
| 198 | + crypto_skcipher_encrypt(subreq); |
---|
217 | 199 | } |
---|
218 | 200 | |
---|
219 | | -static int fallback_blk_enc(struct blkcipher_desc *desc, |
---|
220 | | - struct scatterlist *dst, struct scatterlist *src, |
---|
221 | | - unsigned int nbytes) |
---|
222 | | -{ |
---|
223 | | - unsigned int ret; |
---|
224 | | - struct crypto_blkcipher *tfm = desc->tfm; |
---|
225 | | - struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(tfm); |
---|
226 | | - SKCIPHER_REQUEST_ON_STACK(req, sctx->fallback.blk); |
---|
227 | | - |
---|
228 | | - skcipher_request_set_tfm(req, sctx->fallback.blk); |
---|
229 | | - skcipher_request_set_callback(req, desc->flags, NULL, NULL); |
---|
230 | | - skcipher_request_set_crypt(req, src, dst, nbytes, desc->info); |
---|
231 | | - |
---|
232 | | - ret = crypto_skcipher_encrypt(req); |
---|
233 | | - return ret; |
---|
234 | | -} |
---|
235 | | - |
---|
236 | | -static int ecb_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, |
---|
| 201 | +static int ecb_aes_set_key(struct crypto_skcipher *tfm, const u8 *in_key, |
---|
237 | 202 | unsigned int key_len) |
---|
238 | 203 | { |
---|
239 | | - struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); |
---|
| 204 | + struct s390_aes_ctx *sctx = crypto_skcipher_ctx(tfm); |
---|
240 | 205 | unsigned long fc; |
---|
241 | 206 | |
---|
242 | 207 | /* Pick the correct function code based on the key length */ |
---|
.. | .. |
---|
247 | 212 | /* Check if the function code is available */ |
---|
248 | 213 | sctx->fc = (fc && cpacf_test_func(&km_functions, fc)) ? fc : 0; |
---|
249 | 214 | if (!sctx->fc) |
---|
250 | | - return setkey_fallback_blk(tfm, in_key, key_len); |
---|
| 215 | + return setkey_fallback_skcipher(tfm, in_key, key_len); |
---|
251 | 216 | |
---|
252 | 217 | sctx->key_len = key_len; |
---|
253 | 218 | memcpy(sctx->key, in_key, key_len); |
---|
254 | 219 | return 0; |
---|
255 | 220 | } |
---|
256 | 221 | |
---|
257 | | -static int ecb_aes_crypt(struct blkcipher_desc *desc, unsigned long modifier, |
---|
258 | | - struct blkcipher_walk *walk) |
---|
| 222 | +static int ecb_aes_crypt(struct skcipher_request *req, unsigned long modifier) |
---|
259 | 223 | { |
---|
260 | | - struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); |
---|
| 224 | + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
---|
| 225 | + struct s390_aes_ctx *sctx = crypto_skcipher_ctx(tfm); |
---|
| 226 | + struct skcipher_walk walk; |
---|
261 | 227 | unsigned int nbytes, n; |
---|
262 | 228 | int ret; |
---|
263 | 229 | |
---|
264 | | - ret = blkcipher_walk_virt(desc, walk); |
---|
265 | | - while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) { |
---|
| 230 | + if (unlikely(!sctx->fc)) |
---|
| 231 | + return fallback_skcipher_crypt(sctx, req, modifier); |
---|
| 232 | + |
---|
| 233 | + ret = skcipher_walk_virt(&walk, req, false); |
---|
| 234 | + while ((nbytes = walk.nbytes) != 0) { |
---|
266 | 235 | /* only use complete blocks */ |
---|
267 | 236 | n = nbytes & ~(AES_BLOCK_SIZE - 1); |
---|
268 | 237 | cpacf_km(sctx->fc | modifier, sctx->key, |
---|
269 | | - walk->dst.virt.addr, walk->src.virt.addr, n); |
---|
270 | | - ret = blkcipher_walk_done(desc, walk, nbytes - n); |
---|
| 238 | + walk.dst.virt.addr, walk.src.virt.addr, n); |
---|
| 239 | + ret = skcipher_walk_done(&walk, nbytes - n); |
---|
271 | 240 | } |
---|
272 | | - |
---|
273 | 241 | return ret; |
---|
274 | 242 | } |
---|
275 | 243 | |
---|
276 | | -static int ecb_aes_encrypt(struct blkcipher_desc *desc, |
---|
277 | | - struct scatterlist *dst, struct scatterlist *src, |
---|
278 | | - unsigned int nbytes) |
---|
| 244 | +static int ecb_aes_encrypt(struct skcipher_request *req) |
---|
279 | 245 | { |
---|
280 | | - struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); |
---|
281 | | - struct blkcipher_walk walk; |
---|
282 | | - |
---|
283 | | - if (unlikely(!sctx->fc)) |
---|
284 | | - return fallback_blk_enc(desc, dst, src, nbytes); |
---|
285 | | - |
---|
286 | | - blkcipher_walk_init(&walk, dst, src, nbytes); |
---|
287 | | - return ecb_aes_crypt(desc, 0, &walk); |
---|
| 246 | + return ecb_aes_crypt(req, 0); |
---|
288 | 247 | } |
---|
289 | 248 | |
---|
290 | | -static int ecb_aes_decrypt(struct blkcipher_desc *desc, |
---|
291 | | - struct scatterlist *dst, struct scatterlist *src, |
---|
292 | | - unsigned int nbytes) |
---|
| 249 | +static int ecb_aes_decrypt(struct skcipher_request *req) |
---|
293 | 250 | { |
---|
294 | | - struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); |
---|
295 | | - struct blkcipher_walk walk; |
---|
296 | | - |
---|
297 | | - if (unlikely(!sctx->fc)) |
---|
298 | | - return fallback_blk_dec(desc, dst, src, nbytes); |
---|
299 | | - |
---|
300 | | - blkcipher_walk_init(&walk, dst, src, nbytes); |
---|
301 | | - return ecb_aes_crypt(desc, CPACF_DECRYPT, &walk); |
---|
| 251 | + return ecb_aes_crypt(req, CPACF_DECRYPT); |
---|
302 | 252 | } |
---|
303 | 253 | |
---|
304 | | -static int fallback_init_blk(struct crypto_tfm *tfm) |
---|
| 254 | +static int fallback_init_skcipher(struct crypto_skcipher *tfm) |
---|
305 | 255 | { |
---|
306 | | - const char *name = tfm->__crt_alg->cra_name; |
---|
307 | | - struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); |
---|
| 256 | + const char *name = crypto_tfm_alg_name(&tfm->base); |
---|
| 257 | + struct s390_aes_ctx *sctx = crypto_skcipher_ctx(tfm); |
---|
308 | 258 | |
---|
309 | | - sctx->fallback.blk = crypto_alloc_skcipher(name, 0, |
---|
310 | | - CRYPTO_ALG_ASYNC | |
---|
311 | | - CRYPTO_ALG_NEED_FALLBACK); |
---|
| 259 | + sctx->fallback.skcipher = crypto_alloc_skcipher(name, 0, |
---|
| 260 | + CRYPTO_ALG_NEED_FALLBACK | CRYPTO_ALG_ASYNC); |
---|
312 | 261 | |
---|
313 | | - if (IS_ERR(sctx->fallback.blk)) { |
---|
| 262 | + if (IS_ERR(sctx->fallback.skcipher)) { |
---|
314 | 263 | pr_err("Allocating AES fallback algorithm %s failed\n", |
---|
315 | 264 | name); |
---|
316 | | - return PTR_ERR(sctx->fallback.blk); |
---|
| 265 | + return PTR_ERR(sctx->fallback.skcipher); |
---|
317 | 266 | } |
---|
318 | 267 | |
---|
| 268 | + crypto_skcipher_set_reqsize(tfm, sizeof(struct skcipher_request) + |
---|
| 269 | + crypto_skcipher_reqsize(sctx->fallback.skcipher)); |
---|
319 | 270 | return 0; |
---|
320 | 271 | } |
---|
321 | 272 | |
---|
322 | | -static void fallback_exit_blk(struct crypto_tfm *tfm) |
---|
| 273 | +static void fallback_exit_skcipher(struct crypto_skcipher *tfm) |
---|
323 | 274 | { |
---|
324 | | - struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); |
---|
| 275 | + struct s390_aes_ctx *sctx = crypto_skcipher_ctx(tfm); |
---|
325 | 276 | |
---|
326 | | - crypto_free_skcipher(sctx->fallback.blk); |
---|
| 277 | + crypto_free_skcipher(sctx->fallback.skcipher); |
---|
327 | 278 | } |
---|
328 | 279 | |
---|
329 | | -static struct crypto_alg ecb_aes_alg = { |
---|
330 | | - .cra_name = "ecb(aes)", |
---|
331 | | - .cra_driver_name = "ecb-aes-s390", |
---|
332 | | - .cra_priority = 401, /* combo: aes + ecb + 1 */ |
---|
333 | | - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | |
---|
334 | | - CRYPTO_ALG_NEED_FALLBACK, |
---|
335 | | - .cra_blocksize = AES_BLOCK_SIZE, |
---|
336 | | - .cra_ctxsize = sizeof(struct s390_aes_ctx), |
---|
337 | | - .cra_type = &crypto_blkcipher_type, |
---|
338 | | - .cra_module = THIS_MODULE, |
---|
339 | | - .cra_init = fallback_init_blk, |
---|
340 | | - .cra_exit = fallback_exit_blk, |
---|
341 | | - .cra_u = { |
---|
342 | | - .blkcipher = { |
---|
343 | | - .min_keysize = AES_MIN_KEY_SIZE, |
---|
344 | | - .max_keysize = AES_MAX_KEY_SIZE, |
---|
345 | | - .setkey = ecb_aes_set_key, |
---|
346 | | - .encrypt = ecb_aes_encrypt, |
---|
347 | | - .decrypt = ecb_aes_decrypt, |
---|
348 | | - } |
---|
349 | | - } |
---|
| 280 | +static struct skcipher_alg ecb_aes_alg = { |
---|
| 281 | + .base.cra_name = "ecb(aes)", |
---|
| 282 | + .base.cra_driver_name = "ecb-aes-s390", |
---|
| 283 | + .base.cra_priority = 401, /* combo: aes + ecb + 1 */ |
---|
| 284 | + .base.cra_flags = CRYPTO_ALG_NEED_FALLBACK, |
---|
| 285 | + .base.cra_blocksize = AES_BLOCK_SIZE, |
---|
| 286 | + .base.cra_ctxsize = sizeof(struct s390_aes_ctx), |
---|
| 287 | + .base.cra_module = THIS_MODULE, |
---|
| 288 | + .init = fallback_init_skcipher, |
---|
| 289 | + .exit = fallback_exit_skcipher, |
---|
| 290 | + .min_keysize = AES_MIN_KEY_SIZE, |
---|
| 291 | + .max_keysize = AES_MAX_KEY_SIZE, |
---|
| 292 | + .setkey = ecb_aes_set_key, |
---|
| 293 | + .encrypt = ecb_aes_encrypt, |
---|
| 294 | + .decrypt = ecb_aes_decrypt, |
---|
350 | 295 | }; |
---|
351 | 296 | |
---|
352 | | -static int cbc_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, |
---|
| 297 | +static int cbc_aes_set_key(struct crypto_skcipher *tfm, const u8 *in_key, |
---|
353 | 298 | unsigned int key_len) |
---|
354 | 299 | { |
---|
355 | | - struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); |
---|
| 300 | + struct s390_aes_ctx *sctx = crypto_skcipher_ctx(tfm); |
---|
356 | 301 | unsigned long fc; |
---|
357 | 302 | |
---|
358 | 303 | /* Pick the correct function code based on the key length */ |
---|
.. | .. |
---|
363 | 308 | /* Check if the function code is available */ |
---|
364 | 309 | sctx->fc = (fc && cpacf_test_func(&kmc_functions, fc)) ? fc : 0; |
---|
365 | 310 | if (!sctx->fc) |
---|
366 | | - return setkey_fallback_blk(tfm, in_key, key_len); |
---|
| 311 | + return setkey_fallback_skcipher(tfm, in_key, key_len); |
---|
367 | 312 | |
---|
368 | 313 | sctx->key_len = key_len; |
---|
369 | 314 | memcpy(sctx->key, in_key, key_len); |
---|
370 | 315 | return 0; |
---|
371 | 316 | } |
---|
372 | 317 | |
---|
373 | | -static int cbc_aes_crypt(struct blkcipher_desc *desc, unsigned long modifier, |
---|
374 | | - struct blkcipher_walk *walk) |
---|
| 318 | +static int cbc_aes_crypt(struct skcipher_request *req, unsigned long modifier) |
---|
375 | 319 | { |
---|
376 | | - struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); |
---|
| 320 | + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
---|
| 321 | + struct s390_aes_ctx *sctx = crypto_skcipher_ctx(tfm); |
---|
| 322 | + struct skcipher_walk walk; |
---|
377 | 323 | unsigned int nbytes, n; |
---|
378 | 324 | int ret; |
---|
379 | 325 | struct { |
---|
.. | .. |
---|
381 | 327 | u8 key[AES_MAX_KEY_SIZE]; |
---|
382 | 328 | } param; |
---|
383 | 329 | |
---|
384 | | - ret = blkcipher_walk_virt(desc, walk); |
---|
385 | | - memcpy(param.iv, walk->iv, AES_BLOCK_SIZE); |
---|
| 330 | + if (unlikely(!sctx->fc)) |
---|
| 331 | + return fallback_skcipher_crypt(sctx, req, modifier); |
---|
| 332 | + |
---|
| 333 | + ret = skcipher_walk_virt(&walk, req, false); |
---|
| 334 | + if (ret) |
---|
| 335 | + return ret; |
---|
| 336 | + memcpy(param.iv, walk.iv, AES_BLOCK_SIZE); |
---|
386 | 337 | memcpy(param.key, sctx->key, sctx->key_len); |
---|
387 | | - while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) { |
---|
| 338 | + while ((nbytes = walk.nbytes) != 0) { |
---|
388 | 339 | /* only use complete blocks */ |
---|
389 | 340 | n = nbytes & ~(AES_BLOCK_SIZE - 1); |
---|
390 | 341 | cpacf_kmc(sctx->fc | modifier, ¶m, |
---|
391 | | - walk->dst.virt.addr, walk->src.virt.addr, n); |
---|
392 | | - ret = blkcipher_walk_done(desc, walk, nbytes - n); |
---|
| 342 | + walk.dst.virt.addr, walk.src.virt.addr, n); |
---|
| 343 | + memcpy(walk.iv, param.iv, AES_BLOCK_SIZE); |
---|
| 344 | + ret = skcipher_walk_done(&walk, nbytes - n); |
---|
393 | 345 | } |
---|
394 | | - memcpy(walk->iv, param.iv, AES_BLOCK_SIZE); |
---|
| 346 | + memzero_explicit(¶m, sizeof(param)); |
---|
395 | 347 | return ret; |
---|
396 | 348 | } |
---|
397 | 349 | |
---|
398 | | -static int cbc_aes_encrypt(struct blkcipher_desc *desc, |
---|
399 | | - struct scatterlist *dst, struct scatterlist *src, |
---|
400 | | - unsigned int nbytes) |
---|
| 350 | +static int cbc_aes_encrypt(struct skcipher_request *req) |
---|
401 | 351 | { |
---|
402 | | - struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); |
---|
403 | | - struct blkcipher_walk walk; |
---|
404 | | - |
---|
405 | | - if (unlikely(!sctx->fc)) |
---|
406 | | - return fallback_blk_enc(desc, dst, src, nbytes); |
---|
407 | | - |
---|
408 | | - blkcipher_walk_init(&walk, dst, src, nbytes); |
---|
409 | | - return cbc_aes_crypt(desc, 0, &walk); |
---|
| 352 | + return cbc_aes_crypt(req, 0); |
---|
410 | 353 | } |
---|
411 | 354 | |
---|
412 | | -static int cbc_aes_decrypt(struct blkcipher_desc *desc, |
---|
413 | | - struct scatterlist *dst, struct scatterlist *src, |
---|
414 | | - unsigned int nbytes) |
---|
| 355 | +static int cbc_aes_decrypt(struct skcipher_request *req) |
---|
415 | 356 | { |
---|
416 | | - struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); |
---|
417 | | - struct blkcipher_walk walk; |
---|
418 | | - |
---|
419 | | - if (unlikely(!sctx->fc)) |
---|
420 | | - return fallback_blk_dec(desc, dst, src, nbytes); |
---|
421 | | - |
---|
422 | | - blkcipher_walk_init(&walk, dst, src, nbytes); |
---|
423 | | - return cbc_aes_crypt(desc, CPACF_DECRYPT, &walk); |
---|
| 357 | + return cbc_aes_crypt(req, CPACF_DECRYPT); |
---|
424 | 358 | } |
---|
425 | 359 | |
---|
426 | | -static struct crypto_alg cbc_aes_alg = { |
---|
427 | | - .cra_name = "cbc(aes)", |
---|
428 | | - .cra_driver_name = "cbc-aes-s390", |
---|
429 | | - .cra_priority = 402, /* ecb-aes-s390 + 1 */ |
---|
430 | | - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | |
---|
431 | | - CRYPTO_ALG_NEED_FALLBACK, |
---|
432 | | - .cra_blocksize = AES_BLOCK_SIZE, |
---|
433 | | - .cra_ctxsize = sizeof(struct s390_aes_ctx), |
---|
434 | | - .cra_type = &crypto_blkcipher_type, |
---|
435 | | - .cra_module = THIS_MODULE, |
---|
436 | | - .cra_init = fallback_init_blk, |
---|
437 | | - .cra_exit = fallback_exit_blk, |
---|
438 | | - .cra_u = { |
---|
439 | | - .blkcipher = { |
---|
440 | | - .min_keysize = AES_MIN_KEY_SIZE, |
---|
441 | | - .max_keysize = AES_MAX_KEY_SIZE, |
---|
442 | | - .ivsize = AES_BLOCK_SIZE, |
---|
443 | | - .setkey = cbc_aes_set_key, |
---|
444 | | - .encrypt = cbc_aes_encrypt, |
---|
445 | | - .decrypt = cbc_aes_decrypt, |
---|
446 | | - } |
---|
447 | | - } |
---|
| 360 | +static struct skcipher_alg cbc_aes_alg = { |
---|
| 361 | + .base.cra_name = "cbc(aes)", |
---|
| 362 | + .base.cra_driver_name = "cbc-aes-s390", |
---|
| 363 | + .base.cra_priority = 402, /* ecb-aes-s390 + 1 */ |
---|
| 364 | + .base.cra_flags = CRYPTO_ALG_NEED_FALLBACK, |
---|
| 365 | + .base.cra_blocksize = AES_BLOCK_SIZE, |
---|
| 366 | + .base.cra_ctxsize = sizeof(struct s390_aes_ctx), |
---|
| 367 | + .base.cra_module = THIS_MODULE, |
---|
| 368 | + .init = fallback_init_skcipher, |
---|
| 369 | + .exit = fallback_exit_skcipher, |
---|
| 370 | + .min_keysize = AES_MIN_KEY_SIZE, |
---|
| 371 | + .max_keysize = AES_MAX_KEY_SIZE, |
---|
| 372 | + .ivsize = AES_BLOCK_SIZE, |
---|
| 373 | + .setkey = cbc_aes_set_key, |
---|
| 374 | + .encrypt = cbc_aes_encrypt, |
---|
| 375 | + .decrypt = cbc_aes_decrypt, |
---|
448 | 376 | }; |
---|
449 | 377 | |
---|
450 | | -static int xts_fallback_setkey(struct crypto_tfm *tfm, const u8 *key, |
---|
451 | | - unsigned int len) |
---|
| 378 | +static int xts_fallback_setkey(struct crypto_skcipher *tfm, const u8 *key, |
---|
| 379 | + unsigned int len) |
---|
452 | 380 | { |
---|
453 | | - struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm); |
---|
454 | | - unsigned int ret; |
---|
| 381 | + struct s390_xts_ctx *xts_ctx = crypto_skcipher_ctx(tfm); |
---|
455 | 382 | |
---|
456 | 383 | crypto_skcipher_clear_flags(xts_ctx->fallback, CRYPTO_TFM_REQ_MASK); |
---|
457 | | - crypto_skcipher_set_flags(xts_ctx->fallback, tfm->crt_flags & |
---|
458 | | - CRYPTO_TFM_REQ_MASK); |
---|
459 | | - |
---|
460 | | - ret = crypto_skcipher_setkey(xts_ctx->fallback, key, len); |
---|
461 | | - |
---|
462 | | - tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK; |
---|
463 | | - tfm->crt_flags |= crypto_skcipher_get_flags(xts_ctx->fallback) & |
---|
464 | | - CRYPTO_TFM_RES_MASK; |
---|
465 | | - |
---|
466 | | - return ret; |
---|
| 384 | + crypto_skcipher_set_flags(xts_ctx->fallback, |
---|
| 385 | + crypto_skcipher_get_flags(tfm) & |
---|
| 386 | + CRYPTO_TFM_REQ_MASK); |
---|
| 387 | + return crypto_skcipher_setkey(xts_ctx->fallback, key, len); |
---|
467 | 388 | } |
---|
468 | 389 | |
---|
469 | | -static int xts_fallback_decrypt(struct blkcipher_desc *desc, |
---|
470 | | - struct scatterlist *dst, struct scatterlist *src, |
---|
471 | | - unsigned int nbytes) |
---|
472 | | -{ |
---|
473 | | - struct crypto_blkcipher *tfm = desc->tfm; |
---|
474 | | - struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(tfm); |
---|
475 | | - SKCIPHER_REQUEST_ON_STACK(req, xts_ctx->fallback); |
---|
476 | | - unsigned int ret; |
---|
477 | | - |
---|
478 | | - skcipher_request_set_tfm(req, xts_ctx->fallback); |
---|
479 | | - skcipher_request_set_callback(req, desc->flags, NULL, NULL); |
---|
480 | | - skcipher_request_set_crypt(req, src, dst, nbytes, desc->info); |
---|
481 | | - |
---|
482 | | - ret = crypto_skcipher_decrypt(req); |
---|
483 | | - |
---|
484 | | - skcipher_request_zero(req); |
---|
485 | | - return ret; |
---|
486 | | -} |
---|
487 | | - |
---|
488 | | -static int xts_fallback_encrypt(struct blkcipher_desc *desc, |
---|
489 | | - struct scatterlist *dst, struct scatterlist *src, |
---|
490 | | - unsigned int nbytes) |
---|
491 | | -{ |
---|
492 | | - struct crypto_blkcipher *tfm = desc->tfm; |
---|
493 | | - struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(tfm); |
---|
494 | | - SKCIPHER_REQUEST_ON_STACK(req, xts_ctx->fallback); |
---|
495 | | - unsigned int ret; |
---|
496 | | - |
---|
497 | | - skcipher_request_set_tfm(req, xts_ctx->fallback); |
---|
498 | | - skcipher_request_set_callback(req, desc->flags, NULL, NULL); |
---|
499 | | - skcipher_request_set_crypt(req, src, dst, nbytes, desc->info); |
---|
500 | | - |
---|
501 | | - ret = crypto_skcipher_encrypt(req); |
---|
502 | | - |
---|
503 | | - skcipher_request_zero(req); |
---|
504 | | - return ret; |
---|
505 | | -} |
---|
506 | | - |
---|
507 | | -static int xts_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, |
---|
| 390 | +static int xts_aes_set_key(struct crypto_skcipher *tfm, const u8 *in_key, |
---|
508 | 391 | unsigned int key_len) |
---|
509 | 392 | { |
---|
510 | | - struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm); |
---|
| 393 | + struct s390_xts_ctx *xts_ctx = crypto_skcipher_ctx(tfm); |
---|
511 | 394 | unsigned long fc; |
---|
512 | 395 | int err; |
---|
513 | 396 | |
---|
514 | | - err = xts_check_key(tfm, in_key, key_len); |
---|
| 397 | + err = xts_fallback_setkey(tfm, in_key, key_len); |
---|
515 | 398 | if (err) |
---|
516 | 399 | return err; |
---|
517 | 400 | |
---|
518 | 401 | /* In fips mode only 128 bit or 256 bit keys are valid */ |
---|
519 | | - if (fips_enabled && key_len != 32 && key_len != 64) { |
---|
520 | | - tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; |
---|
| 402 | + if (fips_enabled && key_len != 32 && key_len != 64) |
---|
521 | 403 | return -EINVAL; |
---|
522 | | - } |
---|
523 | 404 | |
---|
524 | 405 | /* Pick the correct function code based on the key length */ |
---|
525 | 406 | fc = (key_len == 32) ? CPACF_KM_XTS_128 : |
---|
.. | .. |
---|
528 | 409 | /* Check if the function code is available */ |
---|
529 | 410 | xts_ctx->fc = (fc && cpacf_test_func(&km_functions, fc)) ? fc : 0; |
---|
530 | 411 | if (!xts_ctx->fc) |
---|
531 | | - return xts_fallback_setkey(tfm, in_key, key_len); |
---|
| 412 | + return 0; |
---|
532 | 413 | |
---|
533 | 414 | /* Split the XTS key into the two subkeys */ |
---|
534 | 415 | key_len = key_len / 2; |
---|
.. | .. |
---|
538 | 419 | return 0; |
---|
539 | 420 | } |
---|
540 | 421 | |
---|
541 | | -static int xts_aes_crypt(struct blkcipher_desc *desc, unsigned long modifier, |
---|
542 | | - struct blkcipher_walk *walk) |
---|
| 422 | +static int xts_aes_crypt(struct skcipher_request *req, unsigned long modifier) |
---|
543 | 423 | { |
---|
544 | | - struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm); |
---|
| 424 | + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
---|
| 425 | + struct s390_xts_ctx *xts_ctx = crypto_skcipher_ctx(tfm); |
---|
| 426 | + struct skcipher_walk walk; |
---|
545 | 427 | unsigned int offset, nbytes, n; |
---|
546 | 428 | int ret; |
---|
547 | 429 | struct { |
---|
.. | .. |
---|
556 | 438 | u8 init[16]; |
---|
557 | 439 | } xts_param; |
---|
558 | 440 | |
---|
559 | | - ret = blkcipher_walk_virt(desc, walk); |
---|
| 441 | + if (req->cryptlen < AES_BLOCK_SIZE) |
---|
| 442 | + return -EINVAL; |
---|
| 443 | + |
---|
| 444 | + if (unlikely(!xts_ctx->fc || (req->cryptlen % AES_BLOCK_SIZE) != 0)) { |
---|
| 445 | + struct skcipher_request *subreq = skcipher_request_ctx(req); |
---|
| 446 | + |
---|
| 447 | + *subreq = *req; |
---|
| 448 | + skcipher_request_set_tfm(subreq, xts_ctx->fallback); |
---|
| 449 | + return (modifier & CPACF_DECRYPT) ? |
---|
| 450 | + crypto_skcipher_decrypt(subreq) : |
---|
| 451 | + crypto_skcipher_encrypt(subreq); |
---|
| 452 | + } |
---|
| 453 | + |
---|
| 454 | + ret = skcipher_walk_virt(&walk, req, false); |
---|
| 455 | + if (ret) |
---|
| 456 | + return ret; |
---|
560 | 457 | offset = xts_ctx->key_len & 0x10; |
---|
561 | 458 | memset(pcc_param.block, 0, sizeof(pcc_param.block)); |
---|
562 | 459 | memset(pcc_param.bit, 0, sizeof(pcc_param.bit)); |
---|
563 | 460 | memset(pcc_param.xts, 0, sizeof(pcc_param.xts)); |
---|
564 | | - memcpy(pcc_param.tweak, walk->iv, sizeof(pcc_param.tweak)); |
---|
| 461 | + memcpy(pcc_param.tweak, walk.iv, sizeof(pcc_param.tweak)); |
---|
565 | 462 | memcpy(pcc_param.key + offset, xts_ctx->pcc_key, xts_ctx->key_len); |
---|
566 | 463 | cpacf_pcc(xts_ctx->fc, pcc_param.key + offset); |
---|
567 | 464 | |
---|
568 | 465 | memcpy(xts_param.key + offset, xts_ctx->key, xts_ctx->key_len); |
---|
569 | 466 | memcpy(xts_param.init, pcc_param.xts, 16); |
---|
570 | 467 | |
---|
571 | | - while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) { |
---|
| 468 | + while ((nbytes = walk.nbytes) != 0) { |
---|
572 | 469 | /* only use complete blocks */ |
---|
573 | 470 | n = nbytes & ~(AES_BLOCK_SIZE - 1); |
---|
574 | 471 | cpacf_km(xts_ctx->fc | modifier, xts_param.key + offset, |
---|
575 | | - walk->dst.virt.addr, walk->src.virt.addr, n); |
---|
576 | | - ret = blkcipher_walk_done(desc, walk, nbytes - n); |
---|
| 472 | + walk.dst.virt.addr, walk.src.virt.addr, n); |
---|
| 473 | + ret = skcipher_walk_done(&walk, nbytes - n); |
---|
577 | 474 | } |
---|
| 475 | + memzero_explicit(&pcc_param, sizeof(pcc_param)); |
---|
| 476 | + memzero_explicit(&xts_param, sizeof(xts_param)); |
---|
578 | 477 | return ret; |
---|
579 | 478 | } |
---|
580 | 479 | |
---|
581 | | -static int xts_aes_encrypt(struct blkcipher_desc *desc, |
---|
582 | | - struct scatterlist *dst, struct scatterlist *src, |
---|
583 | | - unsigned int nbytes) |
---|
| 480 | +static int xts_aes_encrypt(struct skcipher_request *req) |
---|
584 | 481 | { |
---|
585 | | - struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm); |
---|
586 | | - struct blkcipher_walk walk; |
---|
587 | | - |
---|
588 | | - if (!nbytes) |
---|
589 | | - return -EINVAL; |
---|
590 | | - |
---|
591 | | - if (unlikely(!xts_ctx->fc)) |
---|
592 | | - return xts_fallback_encrypt(desc, dst, src, nbytes); |
---|
593 | | - |
---|
594 | | - blkcipher_walk_init(&walk, dst, src, nbytes); |
---|
595 | | - return xts_aes_crypt(desc, 0, &walk); |
---|
| 482 | + return xts_aes_crypt(req, 0); |
---|
596 | 483 | } |
---|
597 | 484 | |
---|
598 | | -static int xts_aes_decrypt(struct blkcipher_desc *desc, |
---|
599 | | - struct scatterlist *dst, struct scatterlist *src, |
---|
600 | | - unsigned int nbytes) |
---|
| 485 | +static int xts_aes_decrypt(struct skcipher_request *req) |
---|
601 | 486 | { |
---|
602 | | - struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm); |
---|
603 | | - struct blkcipher_walk walk; |
---|
604 | | - |
---|
605 | | - if (!nbytes) |
---|
606 | | - return -EINVAL; |
---|
607 | | - |
---|
608 | | - if (unlikely(!xts_ctx->fc)) |
---|
609 | | - return xts_fallback_decrypt(desc, dst, src, nbytes); |
---|
610 | | - |
---|
611 | | - blkcipher_walk_init(&walk, dst, src, nbytes); |
---|
612 | | - return xts_aes_crypt(desc, CPACF_DECRYPT, &walk); |
---|
| 487 | + return xts_aes_crypt(req, CPACF_DECRYPT); |
---|
613 | 488 | } |
---|
614 | 489 | |
---|
615 | | -static int xts_fallback_init(struct crypto_tfm *tfm) |
---|
| 490 | +static int xts_fallback_init(struct crypto_skcipher *tfm) |
---|
616 | 491 | { |
---|
617 | | - const char *name = tfm->__crt_alg->cra_name; |
---|
618 | | - struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm); |
---|
| 492 | + const char *name = crypto_tfm_alg_name(&tfm->base); |
---|
| 493 | + struct s390_xts_ctx *xts_ctx = crypto_skcipher_ctx(tfm); |
---|
619 | 494 | |
---|
620 | 495 | xts_ctx->fallback = crypto_alloc_skcipher(name, 0, |
---|
621 | | - CRYPTO_ALG_ASYNC | |
---|
622 | | - CRYPTO_ALG_NEED_FALLBACK); |
---|
| 496 | + CRYPTO_ALG_NEED_FALLBACK | CRYPTO_ALG_ASYNC); |
---|
623 | 497 | |
---|
624 | 498 | if (IS_ERR(xts_ctx->fallback)) { |
---|
625 | 499 | pr_err("Allocating XTS fallback algorithm %s failed\n", |
---|
626 | 500 | name); |
---|
627 | 501 | return PTR_ERR(xts_ctx->fallback); |
---|
628 | 502 | } |
---|
| 503 | + crypto_skcipher_set_reqsize(tfm, sizeof(struct skcipher_request) + |
---|
| 504 | + crypto_skcipher_reqsize(xts_ctx->fallback)); |
---|
629 | 505 | return 0; |
---|
630 | 506 | } |
---|
631 | 507 | |
---|
632 | | -static void xts_fallback_exit(struct crypto_tfm *tfm) |
---|
| 508 | +static void xts_fallback_exit(struct crypto_skcipher *tfm) |
---|
633 | 509 | { |
---|
634 | | - struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm); |
---|
| 510 | + struct s390_xts_ctx *xts_ctx = crypto_skcipher_ctx(tfm); |
---|
635 | 511 | |
---|
636 | 512 | crypto_free_skcipher(xts_ctx->fallback); |
---|
637 | 513 | } |
---|
638 | 514 | |
---|
639 | | -static struct crypto_alg xts_aes_alg = { |
---|
640 | | - .cra_name = "xts(aes)", |
---|
641 | | - .cra_driver_name = "xts-aes-s390", |
---|
642 | | - .cra_priority = 402, /* ecb-aes-s390 + 1 */ |
---|
643 | | - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | |
---|
644 | | - CRYPTO_ALG_NEED_FALLBACK, |
---|
645 | | - .cra_blocksize = AES_BLOCK_SIZE, |
---|
646 | | - .cra_ctxsize = sizeof(struct s390_xts_ctx), |
---|
647 | | - .cra_type = &crypto_blkcipher_type, |
---|
648 | | - .cra_module = THIS_MODULE, |
---|
649 | | - .cra_init = xts_fallback_init, |
---|
650 | | - .cra_exit = xts_fallback_exit, |
---|
651 | | - .cra_u = { |
---|
652 | | - .blkcipher = { |
---|
653 | | - .min_keysize = 2 * AES_MIN_KEY_SIZE, |
---|
654 | | - .max_keysize = 2 * AES_MAX_KEY_SIZE, |
---|
655 | | - .ivsize = AES_BLOCK_SIZE, |
---|
656 | | - .setkey = xts_aes_set_key, |
---|
657 | | - .encrypt = xts_aes_encrypt, |
---|
658 | | - .decrypt = xts_aes_decrypt, |
---|
659 | | - } |
---|
660 | | - } |
---|
| 515 | +static struct skcipher_alg xts_aes_alg = { |
---|
| 516 | + .base.cra_name = "xts(aes)", |
---|
| 517 | + .base.cra_driver_name = "xts-aes-s390", |
---|
| 518 | + .base.cra_priority = 402, /* ecb-aes-s390 + 1 */ |
---|
| 519 | + .base.cra_flags = CRYPTO_ALG_NEED_FALLBACK, |
---|
| 520 | + .base.cra_blocksize = AES_BLOCK_SIZE, |
---|
| 521 | + .base.cra_ctxsize = sizeof(struct s390_xts_ctx), |
---|
| 522 | + .base.cra_module = THIS_MODULE, |
---|
| 523 | + .init = xts_fallback_init, |
---|
| 524 | + .exit = xts_fallback_exit, |
---|
| 525 | + .min_keysize = 2 * AES_MIN_KEY_SIZE, |
---|
| 526 | + .max_keysize = 2 * AES_MAX_KEY_SIZE, |
---|
| 527 | + .ivsize = AES_BLOCK_SIZE, |
---|
| 528 | + .setkey = xts_aes_set_key, |
---|
| 529 | + .encrypt = xts_aes_encrypt, |
---|
| 530 | + .decrypt = xts_aes_decrypt, |
---|
661 | 531 | }; |
---|
662 | 532 | |
---|
663 | | -static int ctr_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, |
---|
| 533 | +static int ctr_aes_set_key(struct crypto_skcipher *tfm, const u8 *in_key, |
---|
664 | 534 | unsigned int key_len) |
---|
665 | 535 | { |
---|
666 | | - struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); |
---|
| 536 | + struct s390_aes_ctx *sctx = crypto_skcipher_ctx(tfm); |
---|
667 | 537 | unsigned long fc; |
---|
668 | 538 | |
---|
669 | 539 | /* Pick the correct function code based on the key length */ |
---|
.. | .. |
---|
674 | 544 | /* Check if the function code is available */ |
---|
675 | 545 | sctx->fc = (fc && cpacf_test_func(&kmctr_functions, fc)) ? fc : 0; |
---|
676 | 546 | if (!sctx->fc) |
---|
677 | | - return setkey_fallback_blk(tfm, in_key, key_len); |
---|
| 547 | + return setkey_fallback_skcipher(tfm, in_key, key_len); |
---|
678 | 548 | |
---|
679 | 549 | sctx->key_len = key_len; |
---|
680 | 550 | memcpy(sctx->key, in_key, key_len); |
---|
.. | .. |
---|
696 | 566 | return n; |
---|
697 | 567 | } |
---|
698 | 568 | |
---|
699 | | -static int ctr_aes_crypt(struct blkcipher_desc *desc, unsigned long modifier, |
---|
700 | | - struct blkcipher_walk *walk) |
---|
| 569 | +static int ctr_aes_crypt(struct skcipher_request *req) |
---|
701 | 570 | { |
---|
702 | | - struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); |
---|
| 571 | + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
---|
| 572 | + struct s390_aes_ctx *sctx = crypto_skcipher_ctx(tfm); |
---|
703 | 573 | u8 buf[AES_BLOCK_SIZE], *ctrptr; |
---|
| 574 | + struct skcipher_walk walk; |
---|
704 | 575 | unsigned int n, nbytes; |
---|
705 | 576 | int ret, locked; |
---|
706 | 577 | |
---|
| 578 | + if (unlikely(!sctx->fc)) |
---|
| 579 | + return fallback_skcipher_crypt(sctx, req, 0); |
---|
| 580 | + |
---|
707 | 581 | locked = mutex_trylock(&ctrblk_lock); |
---|
708 | 582 | |
---|
709 | | - ret = blkcipher_walk_virt_block(desc, walk, AES_BLOCK_SIZE); |
---|
710 | | - while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) { |
---|
| 583 | + ret = skcipher_walk_virt(&walk, req, false); |
---|
| 584 | + while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) { |
---|
711 | 585 | n = AES_BLOCK_SIZE; |
---|
| 586 | + |
---|
712 | 587 | if (nbytes >= 2*AES_BLOCK_SIZE && locked) |
---|
713 | | - n = __ctrblk_init(ctrblk, walk->iv, nbytes); |
---|
714 | | - ctrptr = (n > AES_BLOCK_SIZE) ? ctrblk : walk->iv; |
---|
715 | | - cpacf_kmctr(sctx->fc | modifier, sctx->key, |
---|
716 | | - walk->dst.virt.addr, walk->src.virt.addr, |
---|
717 | | - n, ctrptr); |
---|
| 588 | + n = __ctrblk_init(ctrblk, walk.iv, nbytes); |
---|
| 589 | + ctrptr = (n > AES_BLOCK_SIZE) ? ctrblk : walk.iv; |
---|
| 590 | + cpacf_kmctr(sctx->fc, sctx->key, walk.dst.virt.addr, |
---|
| 591 | + walk.src.virt.addr, n, ctrptr); |
---|
718 | 592 | if (ctrptr == ctrblk) |
---|
719 | | - memcpy(walk->iv, ctrptr + n - AES_BLOCK_SIZE, |
---|
| 593 | + memcpy(walk.iv, ctrptr + n - AES_BLOCK_SIZE, |
---|
720 | 594 | AES_BLOCK_SIZE); |
---|
721 | | - crypto_inc(walk->iv, AES_BLOCK_SIZE); |
---|
722 | | - ret = blkcipher_walk_done(desc, walk, nbytes - n); |
---|
| 595 | + crypto_inc(walk.iv, AES_BLOCK_SIZE); |
---|
| 596 | + ret = skcipher_walk_done(&walk, nbytes - n); |
---|
723 | 597 | } |
---|
724 | 598 | if (locked) |
---|
725 | 599 | mutex_unlock(&ctrblk_lock); |
---|
.. | .. |
---|
727 | 601 | * final block may be < AES_BLOCK_SIZE, copy only nbytes |
---|
728 | 602 | */ |
---|
729 | 603 | if (nbytes) { |
---|
730 | | - cpacf_kmctr(sctx->fc | modifier, sctx->key, |
---|
731 | | - buf, walk->src.virt.addr, |
---|
732 | | - AES_BLOCK_SIZE, walk->iv); |
---|
733 | | - memcpy(walk->dst.virt.addr, buf, nbytes); |
---|
734 | | - crypto_inc(walk->iv, AES_BLOCK_SIZE); |
---|
735 | | - ret = blkcipher_walk_done(desc, walk, 0); |
---|
| 604 | + cpacf_kmctr(sctx->fc, sctx->key, buf, walk.src.virt.addr, |
---|
| 605 | + AES_BLOCK_SIZE, walk.iv); |
---|
| 606 | + memcpy(walk.dst.virt.addr, buf, nbytes); |
---|
| 607 | + crypto_inc(walk.iv, AES_BLOCK_SIZE); |
---|
| 608 | + ret = skcipher_walk_done(&walk, 0); |
---|
736 | 609 | } |
---|
737 | 610 | |
---|
738 | 611 | return ret; |
---|
739 | 612 | } |
---|
740 | 613 | |
---|
741 | | -static int ctr_aes_encrypt(struct blkcipher_desc *desc, |
---|
742 | | - struct scatterlist *dst, struct scatterlist *src, |
---|
743 | | - unsigned int nbytes) |
---|
744 | | -{ |
---|
745 | | - struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); |
---|
746 | | - struct blkcipher_walk walk; |
---|
747 | | - |
---|
748 | | - if (unlikely(!sctx->fc)) |
---|
749 | | - return fallback_blk_enc(desc, dst, src, nbytes); |
---|
750 | | - |
---|
751 | | - blkcipher_walk_init(&walk, dst, src, nbytes); |
---|
752 | | - return ctr_aes_crypt(desc, 0, &walk); |
---|
753 | | -} |
---|
754 | | - |
---|
755 | | -static int ctr_aes_decrypt(struct blkcipher_desc *desc, |
---|
756 | | - struct scatterlist *dst, struct scatterlist *src, |
---|
757 | | - unsigned int nbytes) |
---|
758 | | -{ |
---|
759 | | - struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); |
---|
760 | | - struct blkcipher_walk walk; |
---|
761 | | - |
---|
762 | | - if (unlikely(!sctx->fc)) |
---|
763 | | - return fallback_blk_dec(desc, dst, src, nbytes); |
---|
764 | | - |
---|
765 | | - blkcipher_walk_init(&walk, dst, src, nbytes); |
---|
766 | | - return ctr_aes_crypt(desc, CPACF_DECRYPT, &walk); |
---|
767 | | -} |
---|
768 | | - |
---|
769 | | -static struct crypto_alg ctr_aes_alg = { |
---|
770 | | - .cra_name = "ctr(aes)", |
---|
771 | | - .cra_driver_name = "ctr-aes-s390", |
---|
772 | | - .cra_priority = 402, /* ecb-aes-s390 + 1 */ |
---|
773 | | - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | |
---|
774 | | - CRYPTO_ALG_NEED_FALLBACK, |
---|
775 | | - .cra_blocksize = 1, |
---|
776 | | - .cra_ctxsize = sizeof(struct s390_aes_ctx), |
---|
777 | | - .cra_type = &crypto_blkcipher_type, |
---|
778 | | - .cra_module = THIS_MODULE, |
---|
779 | | - .cra_init = fallback_init_blk, |
---|
780 | | - .cra_exit = fallback_exit_blk, |
---|
781 | | - .cra_u = { |
---|
782 | | - .blkcipher = { |
---|
783 | | - .min_keysize = AES_MIN_KEY_SIZE, |
---|
784 | | - .max_keysize = AES_MAX_KEY_SIZE, |
---|
785 | | - .ivsize = AES_BLOCK_SIZE, |
---|
786 | | - .setkey = ctr_aes_set_key, |
---|
787 | | - .encrypt = ctr_aes_encrypt, |
---|
788 | | - .decrypt = ctr_aes_decrypt, |
---|
789 | | - } |
---|
790 | | - } |
---|
| 614 | +static struct skcipher_alg ctr_aes_alg = { |
---|
| 615 | + .base.cra_name = "ctr(aes)", |
---|
| 616 | + .base.cra_driver_name = "ctr-aes-s390", |
---|
| 617 | + .base.cra_priority = 402, /* ecb-aes-s390 + 1 */ |
---|
| 618 | + .base.cra_flags = CRYPTO_ALG_NEED_FALLBACK, |
---|
| 619 | + .base.cra_blocksize = 1, |
---|
| 620 | + .base.cra_ctxsize = sizeof(struct s390_aes_ctx), |
---|
| 621 | + .base.cra_module = THIS_MODULE, |
---|
| 622 | + .init = fallback_init_skcipher, |
---|
| 623 | + .exit = fallback_exit_skcipher, |
---|
| 624 | + .min_keysize = AES_MIN_KEY_SIZE, |
---|
| 625 | + .max_keysize = AES_MAX_KEY_SIZE, |
---|
| 626 | + .ivsize = AES_BLOCK_SIZE, |
---|
| 627 | + .setkey = ctr_aes_set_key, |
---|
| 628 | + .encrypt = ctr_aes_crypt, |
---|
| 629 | + .decrypt = ctr_aes_crypt, |
---|
| 630 | + .chunksize = AES_BLOCK_SIZE, |
---|
791 | 631 | }; |
---|
792 | 632 | |
---|
793 | 633 | static int gcm_aes_setkey(struct crypto_aead *tfm, const u8 *key, |
---|
.. | .. |
---|
861 | 701 | unsigned int nbytes) |
---|
862 | 702 | { |
---|
863 | 703 | gw->walk_bytes_remain -= nbytes; |
---|
864 | | - scatterwalk_unmap(&gw->walk); |
---|
| 704 | + scatterwalk_unmap(gw->walk_ptr); |
---|
865 | 705 | scatterwalk_advance(&gw->walk, nbytes); |
---|
866 | 706 | scatterwalk_done(&gw->walk, 0, gw->walk_bytes_remain); |
---|
867 | 707 | gw->walk_ptr = NULL; |
---|
.. | .. |
---|
936 | 776 | goto out; |
---|
937 | 777 | } |
---|
938 | 778 | |
---|
939 | | - scatterwalk_unmap(&gw->walk); |
---|
| 779 | + scatterwalk_unmap(gw->walk_ptr); |
---|
940 | 780 | gw->walk_ptr = NULL; |
---|
941 | 781 | |
---|
942 | 782 | gw->ptr = gw->buf; |
---|
.. | .. |
---|
1116 | 956 | }, |
---|
1117 | 957 | }; |
---|
1118 | 958 | |
---|
1119 | | -static struct crypto_alg *aes_s390_algs_ptr[5]; |
---|
1120 | | -static int aes_s390_algs_num; |
---|
| 959 | +static struct crypto_alg *aes_s390_alg; |
---|
| 960 | +static struct skcipher_alg *aes_s390_skcipher_algs[4]; |
---|
| 961 | +static int aes_s390_skciphers_num; |
---|
1121 | 962 | static struct aead_alg *aes_s390_aead_alg; |
---|
1122 | 963 | |
---|
1123 | | -static int aes_s390_register_alg(struct crypto_alg *alg) |
---|
| 964 | +static int aes_s390_register_skcipher(struct skcipher_alg *alg) |
---|
1124 | 965 | { |
---|
1125 | 966 | int ret; |
---|
1126 | 967 | |
---|
1127 | | - ret = crypto_register_alg(alg); |
---|
| 968 | + ret = crypto_register_skcipher(alg); |
---|
1128 | 969 | if (!ret) |
---|
1129 | | - aes_s390_algs_ptr[aes_s390_algs_num++] = alg; |
---|
| 970 | + aes_s390_skcipher_algs[aes_s390_skciphers_num++] = alg; |
---|
1130 | 971 | return ret; |
---|
1131 | 972 | } |
---|
1132 | 973 | |
---|
1133 | 974 | static void aes_s390_fini(void) |
---|
1134 | 975 | { |
---|
1135 | | - while (aes_s390_algs_num--) |
---|
1136 | | - crypto_unregister_alg(aes_s390_algs_ptr[aes_s390_algs_num]); |
---|
| 976 | + if (aes_s390_alg) |
---|
| 977 | + crypto_unregister_alg(aes_s390_alg); |
---|
| 978 | + while (aes_s390_skciphers_num--) |
---|
| 979 | + crypto_unregister_skcipher(aes_s390_skcipher_algs[aes_s390_skciphers_num]); |
---|
1137 | 980 | if (ctrblk) |
---|
1138 | 981 | free_page((unsigned long) ctrblk); |
---|
1139 | 982 | |
---|
.. | .. |
---|
1154 | 997 | if (cpacf_test_func(&km_functions, CPACF_KM_AES_128) || |
---|
1155 | 998 | cpacf_test_func(&km_functions, CPACF_KM_AES_192) || |
---|
1156 | 999 | cpacf_test_func(&km_functions, CPACF_KM_AES_256)) { |
---|
1157 | | - ret = aes_s390_register_alg(&aes_alg); |
---|
| 1000 | + ret = crypto_register_alg(&aes_alg); |
---|
1158 | 1001 | if (ret) |
---|
1159 | 1002 | goto out_err; |
---|
1160 | | - ret = aes_s390_register_alg(&ecb_aes_alg); |
---|
| 1003 | + aes_s390_alg = &aes_alg; |
---|
| 1004 | + ret = aes_s390_register_skcipher(&ecb_aes_alg); |
---|
1161 | 1005 | if (ret) |
---|
1162 | 1006 | goto out_err; |
---|
1163 | 1007 | } |
---|
.. | .. |
---|
1165 | 1009 | if (cpacf_test_func(&kmc_functions, CPACF_KMC_AES_128) || |
---|
1166 | 1010 | cpacf_test_func(&kmc_functions, CPACF_KMC_AES_192) || |
---|
1167 | 1011 | cpacf_test_func(&kmc_functions, CPACF_KMC_AES_256)) { |
---|
1168 | | - ret = aes_s390_register_alg(&cbc_aes_alg); |
---|
| 1012 | + ret = aes_s390_register_skcipher(&cbc_aes_alg); |
---|
1169 | 1013 | if (ret) |
---|
1170 | 1014 | goto out_err; |
---|
1171 | 1015 | } |
---|
1172 | 1016 | |
---|
1173 | 1017 | if (cpacf_test_func(&km_functions, CPACF_KM_XTS_128) || |
---|
1174 | 1018 | cpacf_test_func(&km_functions, CPACF_KM_XTS_256)) { |
---|
1175 | | - ret = aes_s390_register_alg(&xts_aes_alg); |
---|
| 1019 | + ret = aes_s390_register_skcipher(&xts_aes_alg); |
---|
1176 | 1020 | if (ret) |
---|
1177 | 1021 | goto out_err; |
---|
1178 | 1022 | } |
---|
.. | .. |
---|
1185 | 1029 | ret = -ENOMEM; |
---|
1186 | 1030 | goto out_err; |
---|
1187 | 1031 | } |
---|
1188 | | - ret = aes_s390_register_alg(&ctr_aes_alg); |
---|
| 1032 | + ret = aes_s390_register_skcipher(&ctr_aes_alg); |
---|
1189 | 1033 | if (ret) |
---|
1190 | 1034 | goto out_err; |
---|
1191 | 1035 | } |
---|
.. | .. |
---|
1212 | 1056 | |
---|
1213 | 1057 | MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm"); |
---|
1214 | 1058 | MODULE_LICENSE("GPL"); |
---|
| 1059 | +MODULE_IMPORT_NS(CRYPTO_INTERNAL); |
---|