.. | .. |
---|
5 | 5 | * s390 implementation of the AES Cipher Algorithm with protected keys. |
---|
6 | 6 | * |
---|
7 | 7 | * s390 Version: |
---|
8 | | - * Copyright IBM Corp. 2017 |
---|
| 8 | + * Copyright IBM Corp. 2017,2020 |
---|
9 | 9 | * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com> |
---|
10 | 10 | * Harald Freudenberger <freude@de.ibm.com> |
---|
11 | 11 | */ |
---|
.. | .. |
---|
20 | 20 | #include <linux/module.h> |
---|
21 | 21 | #include <linux/cpufeature.h> |
---|
22 | 22 | #include <linux/init.h> |
---|
| 23 | +#include <linux/mutex.h> |
---|
23 | 24 | #include <linux/spinlock.h> |
---|
| 25 | +#include <crypto/internal/skcipher.h> |
---|
24 | 26 | #include <crypto/xts.h> |
---|
25 | 27 | #include <asm/cpacf.h> |
---|
26 | 28 | #include <asm/pkey.h> |
---|
27 | 29 | |
---|
| 30 | +/* |
---|
| 31 | + * Key blobs smaller/bigger than these defines are rejected |
---|
| 32 | + * by the common code even before the individual setkey function |
---|
| 33 | + * is called. As paes can handle different kinds of key blobs |
---|
| 34 | + * and padding is also possible, the limits need to be generous. |
---|
| 35 | + */ |
---|
| 36 | +#define PAES_MIN_KEYSIZE 16 |
---|
| 37 | +#define PAES_MAX_KEYSIZE 320 |
---|
| 38 | + |
---|
28 | 39 | static u8 *ctrblk; |
---|
29 | | -static DEFINE_SPINLOCK(ctrblk_lock); |
---|
| 40 | +static DEFINE_MUTEX(ctrblk_lock); |
---|
30 | 41 | |
---|
31 | 42 | static cpacf_mask_t km_functions, kmc_functions, kmctr_functions; |
---|
32 | 43 | |
---|
| 44 | +struct key_blob { |
---|
| 45 | + /* |
---|
| 46 | + * Small keys will be stored in the keybuf. Larger keys are |
---|
| 47 | + * stored in extra allocated memory. In both cases does |
---|
| 48 | + * key point to the memory where the key is stored. |
---|
| 49 | + * The code distinguishes by checking keylen against |
---|
| 50 | + * sizeof(keybuf). See the two following helper functions. |
---|
| 51 | + */ |
---|
| 52 | + u8 *key; |
---|
| 53 | + u8 keybuf[128]; |
---|
| 54 | + unsigned int keylen; |
---|
| 55 | +}; |
---|
| 56 | + |
---|
| 57 | +static inline int _key_to_kb(struct key_blob *kb, |
---|
| 58 | + const u8 *key, |
---|
| 59 | + unsigned int keylen) |
---|
| 60 | +{ |
---|
| 61 | + struct clearkey_header { |
---|
| 62 | + u8 type; |
---|
| 63 | + u8 res0[3]; |
---|
| 64 | + u8 version; |
---|
| 65 | + u8 res1[3]; |
---|
| 66 | + u32 keytype; |
---|
| 67 | + u32 len; |
---|
| 68 | + } __packed * h; |
---|
| 69 | + |
---|
| 70 | + switch (keylen) { |
---|
| 71 | + case 16: |
---|
| 72 | + case 24: |
---|
| 73 | + case 32: |
---|
| 74 | + /* clear key value, prepare pkey clear key token in keybuf */ |
---|
| 75 | + memset(kb->keybuf, 0, sizeof(kb->keybuf)); |
---|
| 76 | + h = (struct clearkey_header *) kb->keybuf; |
---|
| 77 | + h->version = 0x02; /* TOKVER_CLEAR_KEY */ |
---|
| 78 | + h->keytype = (keylen - 8) >> 3; |
---|
| 79 | + h->len = keylen; |
---|
| 80 | + memcpy(kb->keybuf + sizeof(*h), key, keylen); |
---|
| 81 | + kb->keylen = sizeof(*h) + keylen; |
---|
| 82 | + kb->key = kb->keybuf; |
---|
| 83 | + break; |
---|
| 84 | + default: |
---|
| 85 | + /* other key material, let pkey handle this */ |
---|
| 86 | + if (keylen <= sizeof(kb->keybuf)) |
---|
| 87 | + kb->key = kb->keybuf; |
---|
| 88 | + else { |
---|
| 89 | + kb->key = kmalloc(keylen, GFP_KERNEL); |
---|
| 90 | + if (!kb->key) |
---|
| 91 | + return -ENOMEM; |
---|
| 92 | + } |
---|
| 93 | + memcpy(kb->key, key, keylen); |
---|
| 94 | + kb->keylen = keylen; |
---|
| 95 | + break; |
---|
| 96 | + } |
---|
| 97 | + |
---|
| 98 | + return 0; |
---|
| 99 | +} |
---|
| 100 | + |
---|
| 101 | +static inline void _free_kb_keybuf(struct key_blob *kb) |
---|
| 102 | +{ |
---|
| 103 | + if (kb->key && kb->key != kb->keybuf |
---|
| 104 | + && kb->keylen > sizeof(kb->keybuf)) { |
---|
| 105 | + kfree(kb->key); |
---|
| 106 | + kb->key = NULL; |
---|
| 107 | + } |
---|
| 108 | +} |
---|
| 109 | + |
---|
33 | 110 | struct s390_paes_ctx { |
---|
34 | | - struct pkey_seckey sk; |
---|
| 111 | + struct key_blob kb; |
---|
35 | 112 | struct pkey_protkey pk; |
---|
| 113 | + spinlock_t pk_lock; |
---|
36 | 114 | unsigned long fc; |
---|
37 | 115 | }; |
---|
38 | 116 | |
---|
39 | 117 | struct s390_pxts_ctx { |
---|
40 | | - struct pkey_seckey sk[2]; |
---|
| 118 | + struct key_blob kb[2]; |
---|
41 | 119 | struct pkey_protkey pk[2]; |
---|
| 120 | + spinlock_t pk_lock; |
---|
42 | 121 | unsigned long fc; |
---|
43 | 122 | }; |
---|
44 | 123 | |
---|
45 | | -static inline int __paes_convert_key(struct pkey_seckey *sk, |
---|
| 124 | +static inline int __paes_keyblob2pkey(struct key_blob *kb, |
---|
46 | 125 | struct pkey_protkey *pk) |
---|
47 | 126 | { |
---|
48 | 127 | int i, ret; |
---|
49 | 128 | |
---|
50 | 129 | /* try three times in case of failure */ |
---|
51 | 130 | for (i = 0; i < 3; i++) { |
---|
52 | | - ret = pkey_skey2pkey(sk, pk); |
---|
| 131 | + ret = pkey_keyblob2pkey(kb->key, kb->keylen, pk); |
---|
53 | 132 | if (ret == 0) |
---|
54 | 133 | break; |
---|
55 | 134 | } |
---|
.. | .. |
---|
57 | 136 | return ret; |
---|
58 | 137 | } |
---|
59 | 138 | |
---|
60 | | -static int __paes_set_key(struct s390_paes_ctx *ctx) |
---|
| 139 | +static inline int __paes_convert_key(struct s390_paes_ctx *ctx) |
---|
| 140 | +{ |
---|
| 141 | + struct pkey_protkey pkey; |
---|
| 142 | + |
---|
| 143 | + if (__paes_keyblob2pkey(&ctx->kb, &pkey)) |
---|
| 144 | + return -EINVAL; |
---|
| 145 | + |
---|
| 146 | + spin_lock_bh(&ctx->pk_lock); |
---|
| 147 | + memcpy(&ctx->pk, &pkey, sizeof(pkey)); |
---|
| 148 | + spin_unlock_bh(&ctx->pk_lock); |
---|
| 149 | + |
---|
| 150 | + return 0; |
---|
| 151 | +} |
---|
| 152 | + |
---|
| 153 | +static int ecb_paes_init(struct crypto_skcipher *tfm) |
---|
| 154 | +{ |
---|
| 155 | + struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm); |
---|
| 156 | + |
---|
| 157 | + ctx->kb.key = NULL; |
---|
| 158 | + spin_lock_init(&ctx->pk_lock); |
---|
| 159 | + |
---|
| 160 | + return 0; |
---|
| 161 | +} |
---|
| 162 | + |
---|
| 163 | +static void ecb_paes_exit(struct crypto_skcipher *tfm) |
---|
| 164 | +{ |
---|
| 165 | + struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm); |
---|
| 166 | + |
---|
| 167 | + _free_kb_keybuf(&ctx->kb); |
---|
| 168 | +} |
---|
| 169 | + |
---|
| 170 | +static inline int __ecb_paes_set_key(struct s390_paes_ctx *ctx) |
---|
61 | 171 | { |
---|
62 | 172 | unsigned long fc; |
---|
63 | 173 | |
---|
64 | | - if (__paes_convert_key(&ctx->sk, &ctx->pk)) |
---|
| 174 | + if (__paes_convert_key(ctx)) |
---|
65 | 175 | return -EINVAL; |
---|
66 | 176 | |
---|
67 | 177 | /* Pick the correct function code based on the protected key type */ |
---|
.. | .. |
---|
75 | 185 | return ctx->fc ? 0 : -EINVAL; |
---|
76 | 186 | } |
---|
77 | 187 | |
---|
78 | | -static int ecb_paes_set_key(struct crypto_tfm *tfm, const u8 *in_key, |
---|
| 188 | +static int ecb_paes_set_key(struct crypto_skcipher *tfm, const u8 *in_key, |
---|
79 | 189 | unsigned int key_len) |
---|
80 | 190 | { |
---|
81 | | - struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm); |
---|
| 191 | + int rc; |
---|
| 192 | + struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm); |
---|
82 | 193 | |
---|
83 | | - if (key_len != SECKEYBLOBSIZE) |
---|
84 | | - return -EINVAL; |
---|
| 194 | + _free_kb_keybuf(&ctx->kb); |
---|
| 195 | + rc = _key_to_kb(&ctx->kb, in_key, key_len); |
---|
| 196 | + if (rc) |
---|
| 197 | + return rc; |
---|
85 | 198 | |
---|
86 | | - memcpy(ctx->sk.seckey, in_key, SECKEYBLOBSIZE); |
---|
87 | | - if (__paes_set_key(ctx)) { |
---|
88 | | - tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; |
---|
89 | | - return -EINVAL; |
---|
90 | | - } |
---|
91 | | - return 0; |
---|
| 199 | + return __ecb_paes_set_key(ctx); |
---|
92 | 200 | } |
---|
93 | 201 | |
---|
94 | | -static int ecb_paes_crypt(struct blkcipher_desc *desc, |
---|
95 | | - unsigned long modifier, |
---|
96 | | - struct blkcipher_walk *walk) |
---|
| 202 | +static int ecb_paes_crypt(struct skcipher_request *req, unsigned long modifier) |
---|
97 | 203 | { |
---|
98 | | - struct s390_paes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); |
---|
| 204 | + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
---|
| 205 | + struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm); |
---|
| 206 | + struct skcipher_walk walk; |
---|
99 | 207 | unsigned int nbytes, n, k; |
---|
100 | 208 | int ret; |
---|
| 209 | + struct { |
---|
| 210 | + u8 key[MAXPROTKEYSIZE]; |
---|
| 211 | + } param; |
---|
101 | 212 | |
---|
102 | | - ret = blkcipher_walk_virt(desc, walk); |
---|
103 | | - while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) { |
---|
| 213 | + ret = skcipher_walk_virt(&walk, req, false); |
---|
| 214 | + if (ret) |
---|
| 215 | + return ret; |
---|
| 216 | + |
---|
| 217 | + spin_lock_bh(&ctx->pk_lock); |
---|
| 218 | + memcpy(param.key, ctx->pk.protkey, MAXPROTKEYSIZE); |
---|
| 219 | + spin_unlock_bh(&ctx->pk_lock); |
---|
| 220 | + |
---|
| 221 | + while ((nbytes = walk.nbytes) != 0) { |
---|
104 | 222 | /* only use complete blocks */ |
---|
105 | 223 | n = nbytes & ~(AES_BLOCK_SIZE - 1); |
---|
106 | | - k = cpacf_km(ctx->fc | modifier, ctx->pk.protkey, |
---|
107 | | - walk->dst.virt.addr, walk->src.virt.addr, n); |
---|
| 224 | + k = cpacf_km(ctx->fc | modifier, ¶m, |
---|
| 225 | + walk.dst.virt.addr, walk.src.virt.addr, n); |
---|
108 | 226 | if (k) |
---|
109 | | - ret = blkcipher_walk_done(desc, walk, nbytes - k); |
---|
| 227 | + ret = skcipher_walk_done(&walk, nbytes - k); |
---|
110 | 228 | if (k < n) { |
---|
111 | | - if (__paes_set_key(ctx) != 0) |
---|
112 | | - return blkcipher_walk_done(desc, walk, -EIO); |
---|
| 229 | + if (__paes_convert_key(ctx)) |
---|
| 230 | + return skcipher_walk_done(&walk, -EIO); |
---|
| 231 | + spin_lock_bh(&ctx->pk_lock); |
---|
| 232 | + memcpy(param.key, ctx->pk.protkey, MAXPROTKEYSIZE); |
---|
| 233 | + spin_unlock_bh(&ctx->pk_lock); |
---|
113 | 234 | } |
---|
114 | 235 | } |
---|
115 | 236 | return ret; |
---|
116 | 237 | } |
---|
117 | 238 | |
---|
118 | | -static int ecb_paes_encrypt(struct blkcipher_desc *desc, |
---|
119 | | - struct scatterlist *dst, struct scatterlist *src, |
---|
120 | | - unsigned int nbytes) |
---|
| 239 | +static int ecb_paes_encrypt(struct skcipher_request *req) |
---|
121 | 240 | { |
---|
122 | | - struct blkcipher_walk walk; |
---|
123 | | - |
---|
124 | | - blkcipher_walk_init(&walk, dst, src, nbytes); |
---|
125 | | - return ecb_paes_crypt(desc, CPACF_ENCRYPT, &walk); |
---|
| 241 | + return ecb_paes_crypt(req, 0); |
---|
126 | 242 | } |
---|
127 | 243 | |
---|
128 | | -static int ecb_paes_decrypt(struct blkcipher_desc *desc, |
---|
129 | | - struct scatterlist *dst, struct scatterlist *src, |
---|
130 | | - unsigned int nbytes) |
---|
| 244 | +static int ecb_paes_decrypt(struct skcipher_request *req) |
---|
131 | 245 | { |
---|
132 | | - struct blkcipher_walk walk; |
---|
133 | | - |
---|
134 | | - blkcipher_walk_init(&walk, dst, src, nbytes); |
---|
135 | | - return ecb_paes_crypt(desc, CPACF_DECRYPT, &walk); |
---|
| 246 | + return ecb_paes_crypt(req, CPACF_DECRYPT); |
---|
136 | 247 | } |
---|
137 | 248 | |
---|
138 | | -static struct crypto_alg ecb_paes_alg = { |
---|
139 | | - .cra_name = "ecb(paes)", |
---|
140 | | - .cra_driver_name = "ecb-paes-s390", |
---|
141 | | - .cra_priority = 401, /* combo: aes + ecb + 1 */ |
---|
142 | | - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, |
---|
143 | | - .cra_blocksize = AES_BLOCK_SIZE, |
---|
144 | | - .cra_ctxsize = sizeof(struct s390_paes_ctx), |
---|
145 | | - .cra_type = &crypto_blkcipher_type, |
---|
146 | | - .cra_module = THIS_MODULE, |
---|
147 | | - .cra_list = LIST_HEAD_INIT(ecb_paes_alg.cra_list), |
---|
148 | | - .cra_u = { |
---|
149 | | - .blkcipher = { |
---|
150 | | - .min_keysize = SECKEYBLOBSIZE, |
---|
151 | | - .max_keysize = SECKEYBLOBSIZE, |
---|
152 | | - .setkey = ecb_paes_set_key, |
---|
153 | | - .encrypt = ecb_paes_encrypt, |
---|
154 | | - .decrypt = ecb_paes_decrypt, |
---|
155 | | - } |
---|
156 | | - } |
---|
| 249 | +static struct skcipher_alg ecb_paes_alg = { |
---|
| 250 | + .base.cra_name = "ecb(paes)", |
---|
| 251 | + .base.cra_driver_name = "ecb-paes-s390", |
---|
| 252 | + .base.cra_priority = 401, /* combo: aes + ecb + 1 */ |
---|
| 253 | + .base.cra_blocksize = AES_BLOCK_SIZE, |
---|
| 254 | + .base.cra_ctxsize = sizeof(struct s390_paes_ctx), |
---|
| 255 | + .base.cra_module = THIS_MODULE, |
---|
| 256 | + .base.cra_list = LIST_HEAD_INIT(ecb_paes_alg.base.cra_list), |
---|
| 257 | + .init = ecb_paes_init, |
---|
| 258 | + .exit = ecb_paes_exit, |
---|
| 259 | + .min_keysize = PAES_MIN_KEYSIZE, |
---|
| 260 | + .max_keysize = PAES_MAX_KEYSIZE, |
---|
| 261 | + .setkey = ecb_paes_set_key, |
---|
| 262 | + .encrypt = ecb_paes_encrypt, |
---|
| 263 | + .decrypt = ecb_paes_decrypt, |
---|
157 | 264 | }; |
---|
158 | 265 | |
---|
159 | | -static int __cbc_paes_set_key(struct s390_paes_ctx *ctx) |
---|
| 266 | +static int cbc_paes_init(struct crypto_skcipher *tfm) |
---|
| 267 | +{ |
---|
| 268 | + struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm); |
---|
| 269 | + |
---|
| 270 | + ctx->kb.key = NULL; |
---|
| 271 | + spin_lock_init(&ctx->pk_lock); |
---|
| 272 | + |
---|
| 273 | + return 0; |
---|
| 274 | +} |
---|
| 275 | + |
---|
| 276 | +static void cbc_paes_exit(struct crypto_skcipher *tfm) |
---|
| 277 | +{ |
---|
| 278 | + struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm); |
---|
| 279 | + |
---|
| 280 | + _free_kb_keybuf(&ctx->kb); |
---|
| 281 | +} |
---|
| 282 | + |
---|
| 283 | +static inline int __cbc_paes_set_key(struct s390_paes_ctx *ctx) |
---|
160 | 284 | { |
---|
161 | 285 | unsigned long fc; |
---|
162 | 286 | |
---|
163 | | - if (__paes_convert_key(&ctx->sk, &ctx->pk)) |
---|
| 287 | + if (__paes_convert_key(ctx)) |
---|
164 | 288 | return -EINVAL; |
---|
165 | 289 | |
---|
166 | 290 | /* Pick the correct function code based on the protected key type */ |
---|
.. | .. |
---|
174 | 298 | return ctx->fc ? 0 : -EINVAL; |
---|
175 | 299 | } |
---|
176 | 300 | |
---|
177 | | -static int cbc_paes_set_key(struct crypto_tfm *tfm, const u8 *in_key, |
---|
| 301 | +static int cbc_paes_set_key(struct crypto_skcipher *tfm, const u8 *in_key, |
---|
178 | 302 | unsigned int key_len) |
---|
179 | 303 | { |
---|
180 | | - struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm); |
---|
| 304 | + int rc; |
---|
| 305 | + struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm); |
---|
181 | 306 | |
---|
182 | | - memcpy(ctx->sk.seckey, in_key, SECKEYBLOBSIZE); |
---|
183 | | - if (__cbc_paes_set_key(ctx)) { |
---|
184 | | - tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; |
---|
185 | | - return -EINVAL; |
---|
186 | | - } |
---|
187 | | - return 0; |
---|
| 307 | + _free_kb_keybuf(&ctx->kb); |
---|
| 308 | + rc = _key_to_kb(&ctx->kb, in_key, key_len); |
---|
| 309 | + if (rc) |
---|
| 310 | + return rc; |
---|
| 311 | + |
---|
| 312 | + return __cbc_paes_set_key(ctx); |
---|
188 | 313 | } |
---|
189 | 314 | |
---|
190 | | -static int cbc_paes_crypt(struct blkcipher_desc *desc, unsigned long modifier, |
---|
191 | | - struct blkcipher_walk *walk) |
---|
| 315 | +static int cbc_paes_crypt(struct skcipher_request *req, unsigned long modifier) |
---|
192 | 316 | { |
---|
193 | | - struct s390_paes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); |
---|
| 317 | + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
---|
| 318 | + struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm); |
---|
| 319 | + struct skcipher_walk walk; |
---|
194 | 320 | unsigned int nbytes, n, k; |
---|
195 | 321 | int ret; |
---|
196 | 322 | struct { |
---|
.. | .. |
---|
198 | 324 | u8 key[MAXPROTKEYSIZE]; |
---|
199 | 325 | } param; |
---|
200 | 326 | |
---|
201 | | - ret = blkcipher_walk_virt(desc, walk); |
---|
202 | | - memcpy(param.iv, walk->iv, AES_BLOCK_SIZE); |
---|
| 327 | + ret = skcipher_walk_virt(&walk, req, false); |
---|
| 328 | + if (ret) |
---|
| 329 | + return ret; |
---|
| 330 | + |
---|
| 331 | + memcpy(param.iv, walk.iv, AES_BLOCK_SIZE); |
---|
| 332 | + spin_lock_bh(&ctx->pk_lock); |
---|
203 | 333 | memcpy(param.key, ctx->pk.protkey, MAXPROTKEYSIZE); |
---|
204 | | - while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) { |
---|
| 334 | + spin_unlock_bh(&ctx->pk_lock); |
---|
| 335 | + |
---|
| 336 | + while ((nbytes = walk.nbytes) != 0) { |
---|
205 | 337 | /* only use complete blocks */ |
---|
206 | 338 | n = nbytes & ~(AES_BLOCK_SIZE - 1); |
---|
207 | 339 | k = cpacf_kmc(ctx->fc | modifier, ¶m, |
---|
208 | | - walk->dst.virt.addr, walk->src.virt.addr, n); |
---|
209 | | - if (k) |
---|
210 | | - ret = blkcipher_walk_done(desc, walk, nbytes - k); |
---|
| 340 | + walk.dst.virt.addr, walk.src.virt.addr, n); |
---|
| 341 | + if (k) { |
---|
| 342 | + memcpy(walk.iv, param.iv, AES_BLOCK_SIZE); |
---|
| 343 | + ret = skcipher_walk_done(&walk, nbytes - k); |
---|
| 344 | + } |
---|
211 | 345 | if (k < n) { |
---|
212 | | - if (__cbc_paes_set_key(ctx) != 0) |
---|
213 | | - return blkcipher_walk_done(desc, walk, -EIO); |
---|
| 346 | + if (__paes_convert_key(ctx)) |
---|
| 347 | + return skcipher_walk_done(&walk, -EIO); |
---|
| 348 | + spin_lock_bh(&ctx->pk_lock); |
---|
214 | 349 | memcpy(param.key, ctx->pk.protkey, MAXPROTKEYSIZE); |
---|
| 350 | + spin_unlock_bh(&ctx->pk_lock); |
---|
215 | 351 | } |
---|
216 | 352 | } |
---|
217 | | - memcpy(walk->iv, param.iv, AES_BLOCK_SIZE); |
---|
218 | 353 | return ret; |
---|
219 | 354 | } |
---|
220 | 355 | |
---|
221 | | -static int cbc_paes_encrypt(struct blkcipher_desc *desc, |
---|
222 | | - struct scatterlist *dst, struct scatterlist *src, |
---|
223 | | - unsigned int nbytes) |
---|
| 356 | +static int cbc_paes_encrypt(struct skcipher_request *req) |
---|
224 | 357 | { |
---|
225 | | - struct blkcipher_walk walk; |
---|
226 | | - |
---|
227 | | - blkcipher_walk_init(&walk, dst, src, nbytes); |
---|
228 | | - return cbc_paes_crypt(desc, 0, &walk); |
---|
| 358 | + return cbc_paes_crypt(req, 0); |
---|
229 | 359 | } |
---|
230 | 360 | |
---|
231 | | -static int cbc_paes_decrypt(struct blkcipher_desc *desc, |
---|
232 | | - struct scatterlist *dst, struct scatterlist *src, |
---|
233 | | - unsigned int nbytes) |
---|
| 361 | +static int cbc_paes_decrypt(struct skcipher_request *req) |
---|
234 | 362 | { |
---|
235 | | - struct blkcipher_walk walk; |
---|
236 | | - |
---|
237 | | - blkcipher_walk_init(&walk, dst, src, nbytes); |
---|
238 | | - return cbc_paes_crypt(desc, CPACF_DECRYPT, &walk); |
---|
| 363 | + return cbc_paes_crypt(req, CPACF_DECRYPT); |
---|
239 | 364 | } |
---|
240 | 365 | |
---|
241 | | -static struct crypto_alg cbc_paes_alg = { |
---|
242 | | - .cra_name = "cbc(paes)", |
---|
243 | | - .cra_driver_name = "cbc-paes-s390", |
---|
244 | | - .cra_priority = 402, /* ecb-paes-s390 + 1 */ |
---|
245 | | - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, |
---|
246 | | - .cra_blocksize = AES_BLOCK_SIZE, |
---|
247 | | - .cra_ctxsize = sizeof(struct s390_paes_ctx), |
---|
248 | | - .cra_type = &crypto_blkcipher_type, |
---|
249 | | - .cra_module = THIS_MODULE, |
---|
250 | | - .cra_list = LIST_HEAD_INIT(cbc_paes_alg.cra_list), |
---|
251 | | - .cra_u = { |
---|
252 | | - .blkcipher = { |
---|
253 | | - .min_keysize = SECKEYBLOBSIZE, |
---|
254 | | - .max_keysize = SECKEYBLOBSIZE, |
---|
255 | | - .ivsize = AES_BLOCK_SIZE, |
---|
256 | | - .setkey = cbc_paes_set_key, |
---|
257 | | - .encrypt = cbc_paes_encrypt, |
---|
258 | | - .decrypt = cbc_paes_decrypt, |
---|
259 | | - } |
---|
260 | | - } |
---|
| 366 | +static struct skcipher_alg cbc_paes_alg = { |
---|
| 367 | + .base.cra_name = "cbc(paes)", |
---|
| 368 | + .base.cra_driver_name = "cbc-paes-s390", |
---|
| 369 | + .base.cra_priority = 402, /* ecb-paes-s390 + 1 */ |
---|
| 370 | + .base.cra_blocksize = AES_BLOCK_SIZE, |
---|
| 371 | + .base.cra_ctxsize = sizeof(struct s390_paes_ctx), |
---|
| 372 | + .base.cra_module = THIS_MODULE, |
---|
| 373 | + .base.cra_list = LIST_HEAD_INIT(cbc_paes_alg.base.cra_list), |
---|
| 374 | + .init = cbc_paes_init, |
---|
| 375 | + .exit = cbc_paes_exit, |
---|
| 376 | + .min_keysize = PAES_MIN_KEYSIZE, |
---|
| 377 | + .max_keysize = PAES_MAX_KEYSIZE, |
---|
| 378 | + .ivsize = AES_BLOCK_SIZE, |
---|
| 379 | + .setkey = cbc_paes_set_key, |
---|
| 380 | + .encrypt = cbc_paes_encrypt, |
---|
| 381 | + .decrypt = cbc_paes_decrypt, |
---|
261 | 382 | }; |
---|
262 | 383 | |
---|
263 | | -static int __xts_paes_set_key(struct s390_pxts_ctx *ctx) |
---|
| 384 | +static int xts_paes_init(struct crypto_skcipher *tfm) |
---|
| 385 | +{ |
---|
| 386 | + struct s390_pxts_ctx *ctx = crypto_skcipher_ctx(tfm); |
---|
| 387 | + |
---|
| 388 | + ctx->kb[0].key = NULL; |
---|
| 389 | + ctx->kb[1].key = NULL; |
---|
| 390 | + spin_lock_init(&ctx->pk_lock); |
---|
| 391 | + |
---|
| 392 | + return 0; |
---|
| 393 | +} |
---|
| 394 | + |
---|
| 395 | +static void xts_paes_exit(struct crypto_skcipher *tfm) |
---|
| 396 | +{ |
---|
| 397 | + struct s390_pxts_ctx *ctx = crypto_skcipher_ctx(tfm); |
---|
| 398 | + |
---|
| 399 | + _free_kb_keybuf(&ctx->kb[0]); |
---|
| 400 | + _free_kb_keybuf(&ctx->kb[1]); |
---|
| 401 | +} |
---|
| 402 | + |
---|
| 403 | +static inline int __xts_paes_convert_key(struct s390_pxts_ctx *ctx) |
---|
| 404 | +{ |
---|
| 405 | + struct pkey_protkey pkey0, pkey1; |
---|
| 406 | + |
---|
| 407 | + if (__paes_keyblob2pkey(&ctx->kb[0], &pkey0) || |
---|
| 408 | + __paes_keyblob2pkey(&ctx->kb[1], &pkey1)) |
---|
| 409 | + return -EINVAL; |
---|
| 410 | + |
---|
| 411 | + spin_lock_bh(&ctx->pk_lock); |
---|
| 412 | + memcpy(&ctx->pk[0], &pkey0, sizeof(pkey0)); |
---|
| 413 | + memcpy(&ctx->pk[1], &pkey1, sizeof(pkey1)); |
---|
| 414 | + spin_unlock_bh(&ctx->pk_lock); |
---|
| 415 | + |
---|
| 416 | + return 0; |
---|
| 417 | +} |
---|
| 418 | + |
---|
| 419 | +static inline int __xts_paes_set_key(struct s390_pxts_ctx *ctx) |
---|
264 | 420 | { |
---|
265 | 421 | unsigned long fc; |
---|
266 | 422 | |
---|
267 | | - if (__paes_convert_key(&ctx->sk[0], &ctx->pk[0]) || |
---|
268 | | - __paes_convert_key(&ctx->sk[1], &ctx->pk[1])) |
---|
| 423 | + if (__xts_paes_convert_key(ctx)) |
---|
269 | 424 | return -EINVAL; |
---|
270 | 425 | |
---|
271 | 426 | if (ctx->pk[0].type != ctx->pk[1].type) |
---|
.. | .. |
---|
282 | 437 | return ctx->fc ? 0 : -EINVAL; |
---|
283 | 438 | } |
---|
284 | 439 | |
---|
285 | | -static int xts_paes_set_key(struct crypto_tfm *tfm, const u8 *in_key, |
---|
286 | | - unsigned int key_len) |
---|
| 440 | +static int xts_paes_set_key(struct crypto_skcipher *tfm, const u8 *in_key, |
---|
| 441 | + unsigned int xts_key_len) |
---|
287 | 442 | { |
---|
288 | | - struct s390_pxts_ctx *ctx = crypto_tfm_ctx(tfm); |
---|
| 443 | + int rc; |
---|
| 444 | + struct s390_pxts_ctx *ctx = crypto_skcipher_ctx(tfm); |
---|
289 | 445 | u8 ckey[2 * AES_MAX_KEY_SIZE]; |
---|
290 | | - unsigned int ckey_len; |
---|
| 446 | + unsigned int ckey_len, key_len; |
---|
291 | 447 | |
---|
292 | | - memcpy(ctx->sk[0].seckey, in_key, SECKEYBLOBSIZE); |
---|
293 | | - memcpy(ctx->sk[1].seckey, in_key + SECKEYBLOBSIZE, SECKEYBLOBSIZE); |
---|
294 | | - if (__xts_paes_set_key(ctx)) { |
---|
295 | | - tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; |
---|
| 448 | + if (xts_key_len % 2) |
---|
296 | 449 | return -EINVAL; |
---|
297 | | - } |
---|
| 450 | + |
---|
| 451 | + key_len = xts_key_len / 2; |
---|
| 452 | + |
---|
| 453 | + _free_kb_keybuf(&ctx->kb[0]); |
---|
| 454 | + _free_kb_keybuf(&ctx->kb[1]); |
---|
| 455 | + rc = _key_to_kb(&ctx->kb[0], in_key, key_len); |
---|
| 456 | + if (rc) |
---|
| 457 | + return rc; |
---|
| 458 | + rc = _key_to_kb(&ctx->kb[1], in_key + key_len, key_len); |
---|
| 459 | + if (rc) |
---|
| 460 | + return rc; |
---|
| 461 | + |
---|
| 462 | + rc = __xts_paes_set_key(ctx); |
---|
| 463 | + if (rc) |
---|
| 464 | + return rc; |
---|
298 | 465 | |
---|
299 | 466 | /* |
---|
300 | 467 | * xts_check_key verifies the key length is not odd and makes |
---|
.. | .. |
---|
305 | 472 | AES_KEYSIZE_128 : AES_KEYSIZE_256; |
---|
306 | 473 | memcpy(ckey, ctx->pk[0].protkey, ckey_len); |
---|
307 | 474 | memcpy(ckey + ckey_len, ctx->pk[1].protkey, ckey_len); |
---|
308 | | - return xts_check_key(tfm, ckey, 2*ckey_len); |
---|
| 475 | + return xts_verify_key(tfm, ckey, 2*ckey_len); |
---|
309 | 476 | } |
---|
310 | 477 | |
---|
311 | | -static int xts_paes_crypt(struct blkcipher_desc *desc, unsigned long modifier, |
---|
312 | | - struct blkcipher_walk *walk) |
---|
| 478 | +static int xts_paes_crypt(struct skcipher_request *req, unsigned long modifier) |
---|
313 | 479 | { |
---|
314 | | - struct s390_pxts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); |
---|
| 480 | + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
---|
| 481 | + struct s390_pxts_ctx *ctx = crypto_skcipher_ctx(tfm); |
---|
| 482 | + struct skcipher_walk walk; |
---|
315 | 483 | unsigned int keylen, offset, nbytes, n, k; |
---|
316 | 484 | int ret; |
---|
317 | 485 | struct { |
---|
.. | .. |
---|
326 | 494 | u8 init[16]; |
---|
327 | 495 | } xts_param; |
---|
328 | 496 | |
---|
329 | | - ret = blkcipher_walk_virt(desc, walk); |
---|
| 497 | + ret = skcipher_walk_virt(&walk, req, false); |
---|
| 498 | + if (ret) |
---|
| 499 | + return ret; |
---|
| 500 | + |
---|
330 | 501 | keylen = (ctx->pk[0].type == PKEY_KEYTYPE_AES_128) ? 48 : 64; |
---|
331 | 502 | offset = (ctx->pk[0].type == PKEY_KEYTYPE_AES_128) ? 16 : 0; |
---|
332 | | -retry: |
---|
333 | | - memset(&pcc_param, 0, sizeof(pcc_param)); |
---|
334 | | - memcpy(pcc_param.tweak, walk->iv, sizeof(pcc_param.tweak)); |
---|
335 | | - memcpy(pcc_param.key + offset, ctx->pk[1].protkey, keylen); |
---|
336 | | - cpacf_pcc(ctx->fc, pcc_param.key + offset); |
---|
337 | 503 | |
---|
| 504 | + memset(&pcc_param, 0, sizeof(pcc_param)); |
---|
| 505 | + memcpy(pcc_param.tweak, walk.iv, sizeof(pcc_param.tweak)); |
---|
| 506 | + spin_lock_bh(&ctx->pk_lock); |
---|
| 507 | + memcpy(pcc_param.key + offset, ctx->pk[1].protkey, keylen); |
---|
338 | 508 | memcpy(xts_param.key + offset, ctx->pk[0].protkey, keylen); |
---|
| 509 | + spin_unlock_bh(&ctx->pk_lock); |
---|
| 510 | + cpacf_pcc(ctx->fc, pcc_param.key + offset); |
---|
339 | 511 | memcpy(xts_param.init, pcc_param.xts, 16); |
---|
340 | 512 | |
---|
341 | | - while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) { |
---|
| 513 | + while ((nbytes = walk.nbytes) != 0) { |
---|
342 | 514 | /* only use complete blocks */ |
---|
343 | 515 | n = nbytes & ~(AES_BLOCK_SIZE - 1); |
---|
344 | 516 | k = cpacf_km(ctx->fc | modifier, xts_param.key + offset, |
---|
345 | | - walk->dst.virt.addr, walk->src.virt.addr, n); |
---|
| 517 | + walk.dst.virt.addr, walk.src.virt.addr, n); |
---|
346 | 518 | if (k) |
---|
347 | | - ret = blkcipher_walk_done(desc, walk, nbytes - k); |
---|
| 519 | + ret = skcipher_walk_done(&walk, nbytes - k); |
---|
348 | 520 | if (k < n) { |
---|
349 | | - if (__xts_paes_set_key(ctx) != 0) |
---|
350 | | - return blkcipher_walk_done(desc, walk, -EIO); |
---|
351 | | - goto retry; |
---|
| 521 | + if (__xts_paes_convert_key(ctx)) |
---|
| 522 | + return skcipher_walk_done(&walk, -EIO); |
---|
| 523 | + spin_lock_bh(&ctx->pk_lock); |
---|
| 524 | + memcpy(xts_param.key + offset, |
---|
| 525 | + ctx->pk[0].protkey, keylen); |
---|
| 526 | + spin_unlock_bh(&ctx->pk_lock); |
---|
352 | 527 | } |
---|
353 | 528 | } |
---|
| 529 | + |
---|
354 | 530 | return ret; |
---|
355 | 531 | } |
---|
356 | 532 | |
---|
357 | | -static int xts_paes_encrypt(struct blkcipher_desc *desc, |
---|
358 | | - struct scatterlist *dst, struct scatterlist *src, |
---|
359 | | - unsigned int nbytes) |
---|
| 533 | +static int xts_paes_encrypt(struct skcipher_request *req) |
---|
360 | 534 | { |
---|
361 | | - struct blkcipher_walk walk; |
---|
362 | | - |
---|
363 | | - blkcipher_walk_init(&walk, dst, src, nbytes); |
---|
364 | | - return xts_paes_crypt(desc, 0, &walk); |
---|
| 535 | + return xts_paes_crypt(req, 0); |
---|
365 | 536 | } |
---|
366 | 537 | |
---|
367 | | -static int xts_paes_decrypt(struct blkcipher_desc *desc, |
---|
368 | | - struct scatterlist *dst, struct scatterlist *src, |
---|
369 | | - unsigned int nbytes) |
---|
| 538 | +static int xts_paes_decrypt(struct skcipher_request *req) |
---|
370 | 539 | { |
---|
371 | | - struct blkcipher_walk walk; |
---|
372 | | - |
---|
373 | | - blkcipher_walk_init(&walk, dst, src, nbytes); |
---|
374 | | - return xts_paes_crypt(desc, CPACF_DECRYPT, &walk); |
---|
| 540 | + return xts_paes_crypt(req, CPACF_DECRYPT); |
---|
375 | 541 | } |
---|
376 | 542 | |
---|
377 | | -static struct crypto_alg xts_paes_alg = { |
---|
378 | | - .cra_name = "xts(paes)", |
---|
379 | | - .cra_driver_name = "xts-paes-s390", |
---|
380 | | - .cra_priority = 402, /* ecb-paes-s390 + 1 */ |
---|
381 | | - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, |
---|
382 | | - .cra_blocksize = AES_BLOCK_SIZE, |
---|
383 | | - .cra_ctxsize = sizeof(struct s390_pxts_ctx), |
---|
384 | | - .cra_type = &crypto_blkcipher_type, |
---|
385 | | - .cra_module = THIS_MODULE, |
---|
386 | | - .cra_list = LIST_HEAD_INIT(xts_paes_alg.cra_list), |
---|
387 | | - .cra_u = { |
---|
388 | | - .blkcipher = { |
---|
389 | | - .min_keysize = 2 * SECKEYBLOBSIZE, |
---|
390 | | - .max_keysize = 2 * SECKEYBLOBSIZE, |
---|
391 | | - .ivsize = AES_BLOCK_SIZE, |
---|
392 | | - .setkey = xts_paes_set_key, |
---|
393 | | - .encrypt = xts_paes_encrypt, |
---|
394 | | - .decrypt = xts_paes_decrypt, |
---|
395 | | - } |
---|
396 | | - } |
---|
| 543 | +static struct skcipher_alg xts_paes_alg = { |
---|
| 544 | + .base.cra_name = "xts(paes)", |
---|
| 545 | + .base.cra_driver_name = "xts-paes-s390", |
---|
| 546 | + .base.cra_priority = 402, /* ecb-paes-s390 + 1 */ |
---|
| 547 | + .base.cra_blocksize = AES_BLOCK_SIZE, |
---|
| 548 | + .base.cra_ctxsize = sizeof(struct s390_pxts_ctx), |
---|
| 549 | + .base.cra_module = THIS_MODULE, |
---|
| 550 | + .base.cra_list = LIST_HEAD_INIT(xts_paes_alg.base.cra_list), |
---|
| 551 | + .init = xts_paes_init, |
---|
| 552 | + .exit = xts_paes_exit, |
---|
| 553 | + .min_keysize = 2 * PAES_MIN_KEYSIZE, |
---|
| 554 | + .max_keysize = 2 * PAES_MAX_KEYSIZE, |
---|
| 555 | + .ivsize = AES_BLOCK_SIZE, |
---|
| 556 | + .setkey = xts_paes_set_key, |
---|
| 557 | + .encrypt = xts_paes_encrypt, |
---|
| 558 | + .decrypt = xts_paes_decrypt, |
---|
397 | 559 | }; |
---|
398 | 560 | |
---|
399 | | -static int __ctr_paes_set_key(struct s390_paes_ctx *ctx) |
---|
| 561 | +static int ctr_paes_init(struct crypto_skcipher *tfm) |
---|
| 562 | +{ |
---|
| 563 | + struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm); |
---|
| 564 | + |
---|
| 565 | + ctx->kb.key = NULL; |
---|
| 566 | + spin_lock_init(&ctx->pk_lock); |
---|
| 567 | + |
---|
| 568 | + return 0; |
---|
| 569 | +} |
---|
| 570 | + |
---|
| 571 | +static void ctr_paes_exit(struct crypto_skcipher *tfm) |
---|
| 572 | +{ |
---|
| 573 | + struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm); |
---|
| 574 | + |
---|
| 575 | + _free_kb_keybuf(&ctx->kb); |
---|
| 576 | +} |
---|
| 577 | + |
---|
| 578 | +static inline int __ctr_paes_set_key(struct s390_paes_ctx *ctx) |
---|
400 | 579 | { |
---|
401 | 580 | unsigned long fc; |
---|
402 | 581 | |
---|
403 | | - if (__paes_convert_key(&ctx->sk, &ctx->pk)) |
---|
| 582 | + if (__paes_convert_key(ctx)) |
---|
404 | 583 | return -EINVAL; |
---|
405 | 584 | |
---|
406 | 585 | /* Pick the correct function code based on the protected key type */ |
---|
.. | .. |
---|
415 | 594 | return ctx->fc ? 0 : -EINVAL; |
---|
416 | 595 | } |
---|
417 | 596 | |
---|
418 | | -static int ctr_paes_set_key(struct crypto_tfm *tfm, const u8 *in_key, |
---|
| 597 | +static int ctr_paes_set_key(struct crypto_skcipher *tfm, const u8 *in_key, |
---|
419 | 598 | unsigned int key_len) |
---|
420 | 599 | { |
---|
421 | | - struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm); |
---|
| 600 | + int rc; |
---|
| 601 | + struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm); |
---|
422 | 602 | |
---|
423 | | - memcpy(ctx->sk.seckey, in_key, key_len); |
---|
424 | | - if (__ctr_paes_set_key(ctx)) { |
---|
425 | | - tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; |
---|
426 | | - return -EINVAL; |
---|
427 | | - } |
---|
428 | | - return 0; |
---|
| 603 | + _free_kb_keybuf(&ctx->kb); |
---|
| 604 | + rc = _key_to_kb(&ctx->kb, in_key, key_len); |
---|
| 605 | + if (rc) |
---|
| 606 | + return rc; |
---|
| 607 | + |
---|
| 608 | + return __ctr_paes_set_key(ctx); |
---|
429 | 609 | } |
---|
430 | 610 | |
---|
431 | 611 | static unsigned int __ctrblk_init(u8 *ctrptr, u8 *iv, unsigned int nbytes) |
---|
.. | .. |
---|
443 | 623 | return n; |
---|
444 | 624 | } |
---|
445 | 625 | |
---|
446 | | -static int ctr_paes_crypt(struct blkcipher_desc *desc, unsigned long modifier, |
---|
447 | | - struct blkcipher_walk *walk) |
---|
| 626 | +static int ctr_paes_crypt(struct skcipher_request *req) |
---|
448 | 627 | { |
---|
449 | | - struct s390_paes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); |
---|
| 628 | + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
---|
| 629 | + struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm); |
---|
450 | 630 | u8 buf[AES_BLOCK_SIZE], *ctrptr; |
---|
| 631 | + struct skcipher_walk walk; |
---|
451 | 632 | unsigned int nbytes, n, k; |
---|
452 | 633 | int ret, locked; |
---|
| 634 | + struct { |
---|
| 635 | + u8 key[MAXPROTKEYSIZE]; |
---|
| 636 | + } param; |
---|
453 | 637 | |
---|
454 | | - locked = spin_trylock(&ctrblk_lock); |
---|
| 638 | + ret = skcipher_walk_virt(&walk, req, false); |
---|
| 639 | + if (ret) |
---|
| 640 | + return ret; |
---|
455 | 641 | |
---|
456 | | - ret = blkcipher_walk_virt_block(desc, walk, AES_BLOCK_SIZE); |
---|
457 | | - while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) { |
---|
| 642 | + spin_lock_bh(&ctx->pk_lock); |
---|
| 643 | + memcpy(param.key, ctx->pk.protkey, MAXPROTKEYSIZE); |
---|
| 644 | + spin_unlock_bh(&ctx->pk_lock); |
---|
| 645 | + |
---|
| 646 | + locked = mutex_trylock(&ctrblk_lock); |
---|
| 647 | + |
---|
| 648 | + while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) { |
---|
458 | 649 | n = AES_BLOCK_SIZE; |
---|
459 | 650 | if (nbytes >= 2*AES_BLOCK_SIZE && locked) |
---|
460 | | - n = __ctrblk_init(ctrblk, walk->iv, nbytes); |
---|
461 | | - ctrptr = (n > AES_BLOCK_SIZE) ? ctrblk : walk->iv; |
---|
462 | | - k = cpacf_kmctr(ctx->fc | modifier, ctx->pk.protkey, |
---|
463 | | - walk->dst.virt.addr, walk->src.virt.addr, |
---|
464 | | - n, ctrptr); |
---|
| 651 | + n = __ctrblk_init(ctrblk, walk.iv, nbytes); |
---|
| 652 | + ctrptr = (n > AES_BLOCK_SIZE) ? ctrblk : walk.iv; |
---|
| 653 | + k = cpacf_kmctr(ctx->fc, ¶m, walk.dst.virt.addr, |
---|
| 654 | + walk.src.virt.addr, n, ctrptr); |
---|
465 | 655 | if (k) { |
---|
466 | 656 | if (ctrptr == ctrblk) |
---|
467 | | - memcpy(walk->iv, ctrptr + k - AES_BLOCK_SIZE, |
---|
| 657 | + memcpy(walk.iv, ctrptr + k - AES_BLOCK_SIZE, |
---|
468 | 658 | AES_BLOCK_SIZE); |
---|
469 | | - crypto_inc(walk->iv, AES_BLOCK_SIZE); |
---|
470 | | - ret = blkcipher_walk_done(desc, walk, nbytes - n); |
---|
| 659 | + crypto_inc(walk.iv, AES_BLOCK_SIZE); |
---|
| 660 | + ret = skcipher_walk_done(&walk, nbytes - k); |
---|
471 | 661 | } |
---|
472 | 662 | if (k < n) { |
---|
473 | | - if (__ctr_paes_set_key(ctx) != 0) { |
---|
| 663 | + if (__paes_convert_key(ctx)) { |
---|
474 | 664 | if (locked) |
---|
475 | | - spin_unlock(&ctrblk_lock); |
---|
476 | | - return blkcipher_walk_done(desc, walk, -EIO); |
---|
| 665 | + mutex_unlock(&ctrblk_lock); |
---|
| 666 | + return skcipher_walk_done(&walk, -EIO); |
---|
477 | 667 | } |
---|
| 668 | + spin_lock_bh(&ctx->pk_lock); |
---|
| 669 | + memcpy(param.key, ctx->pk.protkey, MAXPROTKEYSIZE); |
---|
| 670 | + spin_unlock_bh(&ctx->pk_lock); |
---|
478 | 671 | } |
---|
479 | 672 | } |
---|
480 | 673 | if (locked) |
---|
481 | | - spin_unlock(&ctrblk_lock); |
---|
| 674 | + mutex_unlock(&ctrblk_lock); |
---|
482 | 675 | /* |
---|
483 | 676 | * final block may be < AES_BLOCK_SIZE, copy only nbytes |
---|
484 | 677 | */ |
---|
485 | 678 | if (nbytes) { |
---|
486 | 679 | while (1) { |
---|
487 | | - if (cpacf_kmctr(ctx->fc | modifier, |
---|
488 | | - ctx->pk.protkey, buf, |
---|
489 | | - walk->src.virt.addr, AES_BLOCK_SIZE, |
---|
490 | | - walk->iv) == AES_BLOCK_SIZE) |
---|
| 680 | + if (cpacf_kmctr(ctx->fc, ¶m, buf, |
---|
| 681 | + walk.src.virt.addr, AES_BLOCK_SIZE, |
---|
| 682 | + walk.iv) == AES_BLOCK_SIZE) |
---|
491 | 683 | break; |
---|
492 | | - if (__ctr_paes_set_key(ctx) != 0) |
---|
493 | | - return blkcipher_walk_done(desc, walk, -EIO); |
---|
| 684 | + if (__paes_convert_key(ctx)) |
---|
| 685 | + return skcipher_walk_done(&walk, -EIO); |
---|
| 686 | + spin_lock_bh(&ctx->pk_lock); |
---|
| 687 | + memcpy(param.key, ctx->pk.protkey, MAXPROTKEYSIZE); |
---|
| 688 | + spin_unlock_bh(&ctx->pk_lock); |
---|
494 | 689 | } |
---|
495 | | - memcpy(walk->dst.virt.addr, buf, nbytes); |
---|
496 | | - crypto_inc(walk->iv, AES_BLOCK_SIZE); |
---|
497 | | - ret = blkcipher_walk_done(desc, walk, 0); |
---|
| 690 | + memcpy(walk.dst.virt.addr, buf, nbytes); |
---|
| 691 | + crypto_inc(walk.iv, AES_BLOCK_SIZE); |
---|
| 692 | + ret = skcipher_walk_done(&walk, nbytes); |
---|
498 | 693 | } |
---|
499 | 694 | |
---|
500 | 695 | return ret; |
---|
501 | 696 | } |
---|
502 | 697 | |
---|
503 | | -static int ctr_paes_encrypt(struct blkcipher_desc *desc, |
---|
504 | | - struct scatterlist *dst, struct scatterlist *src, |
---|
505 | | - unsigned int nbytes) |
---|
506 | | -{ |
---|
507 | | - struct blkcipher_walk walk; |
---|
508 | | - |
---|
509 | | - blkcipher_walk_init(&walk, dst, src, nbytes); |
---|
510 | | - return ctr_paes_crypt(desc, 0, &walk); |
---|
511 | | -} |
---|
512 | | - |
---|
513 | | -static int ctr_paes_decrypt(struct blkcipher_desc *desc, |
---|
514 | | - struct scatterlist *dst, struct scatterlist *src, |
---|
515 | | - unsigned int nbytes) |
---|
516 | | -{ |
---|
517 | | - struct blkcipher_walk walk; |
---|
518 | | - |
---|
519 | | - blkcipher_walk_init(&walk, dst, src, nbytes); |
---|
520 | | - return ctr_paes_crypt(desc, CPACF_DECRYPT, &walk); |
---|
521 | | -} |
---|
522 | | - |
---|
523 | | -static struct crypto_alg ctr_paes_alg = { |
---|
524 | | - .cra_name = "ctr(paes)", |
---|
525 | | - .cra_driver_name = "ctr-paes-s390", |
---|
526 | | - .cra_priority = 402, /* ecb-paes-s390 + 1 */ |
---|
527 | | - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, |
---|
528 | | - .cra_blocksize = 1, |
---|
529 | | - .cra_ctxsize = sizeof(struct s390_paes_ctx), |
---|
530 | | - .cra_type = &crypto_blkcipher_type, |
---|
531 | | - .cra_module = THIS_MODULE, |
---|
532 | | - .cra_list = LIST_HEAD_INIT(ctr_paes_alg.cra_list), |
---|
533 | | - .cra_u = { |
---|
534 | | - .blkcipher = { |
---|
535 | | - .min_keysize = SECKEYBLOBSIZE, |
---|
536 | | - .max_keysize = SECKEYBLOBSIZE, |
---|
537 | | - .ivsize = AES_BLOCK_SIZE, |
---|
538 | | - .setkey = ctr_paes_set_key, |
---|
539 | | - .encrypt = ctr_paes_encrypt, |
---|
540 | | - .decrypt = ctr_paes_decrypt, |
---|
541 | | - } |
---|
542 | | - } |
---|
| 698 | +static struct skcipher_alg ctr_paes_alg = { |
---|
| 699 | + .base.cra_name = "ctr(paes)", |
---|
| 700 | + .base.cra_driver_name = "ctr-paes-s390", |
---|
| 701 | + .base.cra_priority = 402, /* ecb-paes-s390 + 1 */ |
---|
| 702 | + .base.cra_blocksize = 1, |
---|
| 703 | + .base.cra_ctxsize = sizeof(struct s390_paes_ctx), |
---|
| 704 | + .base.cra_module = THIS_MODULE, |
---|
| 705 | + .base.cra_list = LIST_HEAD_INIT(ctr_paes_alg.base.cra_list), |
---|
| 706 | + .init = ctr_paes_init, |
---|
| 707 | + .exit = ctr_paes_exit, |
---|
| 708 | + .min_keysize = PAES_MIN_KEYSIZE, |
---|
| 709 | + .max_keysize = PAES_MAX_KEYSIZE, |
---|
| 710 | + .ivsize = AES_BLOCK_SIZE, |
---|
| 711 | + .setkey = ctr_paes_set_key, |
---|
| 712 | + .encrypt = ctr_paes_crypt, |
---|
| 713 | + .decrypt = ctr_paes_crypt, |
---|
| 714 | + .chunksize = AES_BLOCK_SIZE, |
---|
543 | 715 | }; |
---|
544 | 716 | |
---|
545 | | -static inline void __crypto_unregister_alg(struct crypto_alg *alg) |
---|
| 717 | +static inline void __crypto_unregister_skcipher(struct skcipher_alg *alg) |
---|
546 | 718 | { |
---|
547 | | - if (!list_empty(&alg->cra_list)) |
---|
548 | | - crypto_unregister_alg(alg); |
---|
| 719 | + if (!list_empty(&alg->base.cra_list)) |
---|
| 720 | + crypto_unregister_skcipher(alg); |
---|
549 | 721 | } |
---|
550 | 722 | |
---|
551 | 723 | static void paes_s390_fini(void) |
---|
552 | 724 | { |
---|
| 725 | + __crypto_unregister_skcipher(&ctr_paes_alg); |
---|
| 726 | + __crypto_unregister_skcipher(&xts_paes_alg); |
---|
| 727 | + __crypto_unregister_skcipher(&cbc_paes_alg); |
---|
| 728 | + __crypto_unregister_skcipher(&ecb_paes_alg); |
---|
553 | 729 | if (ctrblk) |
---|
554 | 730 | free_page((unsigned long) ctrblk); |
---|
555 | | - __crypto_unregister_alg(&ctr_paes_alg); |
---|
556 | | - __crypto_unregister_alg(&xts_paes_alg); |
---|
557 | | - __crypto_unregister_alg(&cbc_paes_alg); |
---|
558 | | - __crypto_unregister_alg(&ecb_paes_alg); |
---|
559 | 731 | } |
---|
560 | 732 | |
---|
561 | 733 | static int __init paes_s390_init(void) |
---|
.. | .. |
---|
570 | 742 | if (cpacf_test_func(&km_functions, CPACF_KM_PAES_128) || |
---|
571 | 743 | cpacf_test_func(&km_functions, CPACF_KM_PAES_192) || |
---|
572 | 744 | cpacf_test_func(&km_functions, CPACF_KM_PAES_256)) { |
---|
573 | | - ret = crypto_register_alg(&ecb_paes_alg); |
---|
| 745 | + ret = crypto_register_skcipher(&ecb_paes_alg); |
---|
574 | 746 | if (ret) |
---|
575 | 747 | goto out_err; |
---|
576 | 748 | } |
---|
.. | .. |
---|
578 | 750 | if (cpacf_test_func(&kmc_functions, CPACF_KMC_PAES_128) || |
---|
579 | 751 | cpacf_test_func(&kmc_functions, CPACF_KMC_PAES_192) || |
---|
580 | 752 | cpacf_test_func(&kmc_functions, CPACF_KMC_PAES_256)) { |
---|
581 | | - ret = crypto_register_alg(&cbc_paes_alg); |
---|
| 753 | + ret = crypto_register_skcipher(&cbc_paes_alg); |
---|
582 | 754 | if (ret) |
---|
583 | 755 | goto out_err; |
---|
584 | 756 | } |
---|
585 | 757 | |
---|
586 | 758 | if (cpacf_test_func(&km_functions, CPACF_KM_PXTS_128) || |
---|
587 | 759 | cpacf_test_func(&km_functions, CPACF_KM_PXTS_256)) { |
---|
588 | | - ret = crypto_register_alg(&xts_paes_alg); |
---|
| 760 | + ret = crypto_register_skcipher(&xts_paes_alg); |
---|
589 | 761 | if (ret) |
---|
590 | 762 | goto out_err; |
---|
591 | 763 | } |
---|
.. | .. |
---|
593 | 765 | if (cpacf_test_func(&kmctr_functions, CPACF_KMCTR_PAES_128) || |
---|
594 | 766 | cpacf_test_func(&kmctr_functions, CPACF_KMCTR_PAES_192) || |
---|
595 | 767 | cpacf_test_func(&kmctr_functions, CPACF_KMCTR_PAES_256)) { |
---|
596 | | - ret = crypto_register_alg(&ctr_paes_alg); |
---|
597 | | - if (ret) |
---|
598 | | - goto out_err; |
---|
599 | 768 | ctrblk = (u8 *) __get_free_page(GFP_KERNEL); |
---|
600 | 769 | if (!ctrblk) { |
---|
601 | 770 | ret = -ENOMEM; |
---|
602 | 771 | goto out_err; |
---|
603 | 772 | } |
---|
| 773 | + ret = crypto_register_skcipher(&ctr_paes_alg); |
---|
| 774 | + if (ret) |
---|
| 775 | + goto out_err; |
---|
604 | 776 | } |
---|
605 | 777 | |
---|
606 | 778 | return 0; |
---|