.. | .. |
---|
| 1 | +// SPDX-License-Identifier: GPL-2.0-only |
---|
1 | 2 | /** |
---|
2 | 3 | * AES XTS routines supporting VMX In-core instructions on Power 8 |
---|
3 | 4 | * |
---|
4 | 5 | * Copyright (C) 2015 International Business Machines Inc. |
---|
5 | 6 | * |
---|
6 | | - * This program is free software; you can redistribute it and/or modify |
---|
7 | | - * it under the terms of the GNU General Public License as published by |
---|
8 | | - * the Free Software Foundations; version 2 only. |
---|
9 | | - * |
---|
10 | | - * This program is distributed in the hope that it will be useful, |
---|
11 | | - * but WITHOUT ANY WARRANTY; without even the implied warranty of |
---|
12 | | - * MERCHANTABILITY of FITNESS FOR A PARTICUPAR PURPOSE. See the |
---|
13 | | - * GNU General Public License for more details. |
---|
14 | | - * |
---|
15 | | - * You should have received a copy of the GNU General Public License |
---|
16 | | - * along with this program; if not, write to the Free Software |
---|
17 | | - * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. |
---|
18 | | - * |
---|
19 | 7 | * Author: Leonidas S. Barbosa <leosilva@linux.vnet.ibm.com> |
---|
20 | 8 | */ |
---|
21 | 9 | |
---|
22 | | -#include <linux/types.h> |
---|
23 | | -#include <linux/err.h> |
---|
24 | | -#include <linux/crypto.h> |
---|
25 | | -#include <linux/delay.h> |
---|
26 | | -#include <linux/hardirq.h> |
---|
| 10 | +#include <asm/simd.h> |
---|
27 | 11 | #include <asm/switch_to.h> |
---|
28 | 12 | #include <crypto/aes.h> |
---|
29 | | -#include <crypto/scatterwalk.h> |
---|
| 13 | +#include <crypto/internal/simd.h> |
---|
| 14 | +#include <crypto/internal/skcipher.h> |
---|
30 | 15 | #include <crypto/xts.h> |
---|
31 | | -#include <crypto/skcipher.h> |
---|
32 | 16 | |
---|
33 | 17 | #include "aesp8-ppc.h" |
---|
34 | 18 | |
---|
.. | .. |
---|
39 | 23 | struct aes_key tweak_key; |
---|
40 | 24 | }; |
---|
41 | 25 | |
---|
42 | | -static int p8_aes_xts_init(struct crypto_tfm *tfm) |
---|
| 26 | +static int p8_aes_xts_init(struct crypto_skcipher *tfm) |
---|
43 | 27 | { |
---|
44 | | - const char *alg = crypto_tfm_alg_name(tfm); |
---|
| 28 | + struct p8_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm); |
---|
45 | 29 | struct crypto_skcipher *fallback; |
---|
46 | | - struct p8_aes_xts_ctx *ctx = crypto_tfm_ctx(tfm); |
---|
47 | 30 | |
---|
48 | | - fallback = crypto_alloc_skcipher(alg, 0, |
---|
49 | | - CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK); |
---|
| 31 | + fallback = crypto_alloc_skcipher("xts(aes)", 0, |
---|
| 32 | + CRYPTO_ALG_NEED_FALLBACK | |
---|
| 33 | + CRYPTO_ALG_ASYNC); |
---|
50 | 34 | if (IS_ERR(fallback)) { |
---|
51 | | - printk(KERN_ERR |
---|
52 | | - "Failed to allocate transformation for '%s': %ld\n", |
---|
53 | | - alg, PTR_ERR(fallback)); |
---|
| 35 | + pr_err("Failed to allocate xts(aes) fallback: %ld\n", |
---|
| 36 | + PTR_ERR(fallback)); |
---|
54 | 37 | return PTR_ERR(fallback); |
---|
55 | 38 | } |
---|
56 | 39 | |
---|
57 | | - crypto_skcipher_set_flags( |
---|
58 | | - fallback, |
---|
59 | | - crypto_skcipher_get_flags((struct crypto_skcipher *)tfm)); |
---|
| 40 | + crypto_skcipher_set_reqsize(tfm, sizeof(struct skcipher_request) + |
---|
| 41 | + crypto_skcipher_reqsize(fallback)); |
---|
60 | 42 | ctx->fallback = fallback; |
---|
61 | | - |
---|
62 | 43 | return 0; |
---|
63 | 44 | } |
---|
64 | 45 | |
---|
65 | | -static void p8_aes_xts_exit(struct crypto_tfm *tfm) |
---|
| 46 | +static void p8_aes_xts_exit(struct crypto_skcipher *tfm) |
---|
66 | 47 | { |
---|
67 | | - struct p8_aes_xts_ctx *ctx = crypto_tfm_ctx(tfm); |
---|
| 48 | + struct p8_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm); |
---|
68 | 49 | |
---|
69 | | - if (ctx->fallback) { |
---|
70 | | - crypto_free_skcipher(ctx->fallback); |
---|
71 | | - ctx->fallback = NULL; |
---|
72 | | - } |
---|
| 50 | + crypto_free_skcipher(ctx->fallback); |
---|
73 | 51 | } |
---|
74 | 52 | |
---|
75 | | -static int p8_aes_xts_setkey(struct crypto_tfm *tfm, const u8 *key, |
---|
| 53 | +static int p8_aes_xts_setkey(struct crypto_skcipher *tfm, const u8 *key, |
---|
76 | 54 | unsigned int keylen) |
---|
77 | 55 | { |
---|
| 56 | + struct p8_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm); |
---|
78 | 57 | int ret; |
---|
79 | | - struct p8_aes_xts_ctx *ctx = crypto_tfm_ctx(tfm); |
---|
80 | 58 | |
---|
81 | | - ret = xts_check_key(tfm, key, keylen); |
---|
| 59 | + ret = xts_verify_key(tfm, key, keylen); |
---|
82 | 60 | if (ret) |
---|
83 | 61 | return ret; |
---|
84 | 62 | |
---|
.. | .. |
---|
86 | 64 | pagefault_disable(); |
---|
87 | 65 | enable_kernel_vsx(); |
---|
88 | 66 | ret = aes_p8_set_encrypt_key(key + keylen/2, (keylen/2) * 8, &ctx->tweak_key); |
---|
89 | | - ret += aes_p8_set_encrypt_key(key, (keylen/2) * 8, &ctx->enc_key); |
---|
90 | | - ret += aes_p8_set_decrypt_key(key, (keylen/2) * 8, &ctx->dec_key); |
---|
| 67 | + ret |= aes_p8_set_encrypt_key(key, (keylen/2) * 8, &ctx->enc_key); |
---|
| 68 | + ret |= aes_p8_set_decrypt_key(key, (keylen/2) * 8, &ctx->dec_key); |
---|
91 | 69 | disable_kernel_vsx(); |
---|
92 | 70 | pagefault_enable(); |
---|
93 | 71 | preempt_enable(); |
---|
94 | 72 | |
---|
95 | | - ret += crypto_skcipher_setkey(ctx->fallback, key, keylen); |
---|
96 | | - return ret; |
---|
| 73 | + ret |= crypto_skcipher_setkey(ctx->fallback, key, keylen); |
---|
| 74 | + |
---|
| 75 | + return ret ? -EINVAL : 0; |
---|
97 | 76 | } |
---|
98 | 77 | |
---|
99 | | -static int p8_aes_xts_crypt(struct blkcipher_desc *desc, |
---|
100 | | - struct scatterlist *dst, |
---|
101 | | - struct scatterlist *src, |
---|
102 | | - unsigned int nbytes, int enc) |
---|
| 78 | +static int p8_aes_xts_crypt(struct skcipher_request *req, int enc) |
---|
103 | 79 | { |
---|
104 | | - int ret; |
---|
| 80 | + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
---|
| 81 | + const struct p8_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm); |
---|
| 82 | + struct skcipher_walk walk; |
---|
| 83 | + unsigned int nbytes; |
---|
105 | 84 | u8 tweak[AES_BLOCK_SIZE]; |
---|
106 | | - u8 *iv; |
---|
107 | | - struct blkcipher_walk walk; |
---|
108 | | - struct p8_aes_xts_ctx *ctx = |
---|
109 | | - crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm)); |
---|
| 85 | + int ret; |
---|
110 | 86 | |
---|
111 | | - if (in_interrupt()) { |
---|
112 | | - SKCIPHER_REQUEST_ON_STACK(req, ctx->fallback); |
---|
113 | | - skcipher_request_set_tfm(req, ctx->fallback); |
---|
114 | | - skcipher_request_set_callback(req, desc->flags, NULL, NULL); |
---|
115 | | - skcipher_request_set_crypt(req, src, dst, nbytes, desc->info); |
---|
116 | | - ret = enc? crypto_skcipher_encrypt(req) : crypto_skcipher_decrypt(req); |
---|
117 | | - skcipher_request_zero(req); |
---|
118 | | - } else { |
---|
119 | | - blkcipher_walk_init(&walk, dst, src, nbytes); |
---|
| 87 | + if (req->cryptlen < AES_BLOCK_SIZE) |
---|
| 88 | + return -EINVAL; |
---|
120 | 89 | |
---|
121 | | - ret = blkcipher_walk_virt(desc, &walk); |
---|
| 90 | + if (!crypto_simd_usable() || (req->cryptlen % XTS_BLOCK_SIZE) != 0) { |
---|
| 91 | + struct skcipher_request *subreq = skcipher_request_ctx(req); |
---|
122 | 92 | |
---|
| 93 | + *subreq = *req; |
---|
| 94 | + skcipher_request_set_tfm(subreq, ctx->fallback); |
---|
| 95 | + return enc ? crypto_skcipher_encrypt(subreq) : |
---|
| 96 | + crypto_skcipher_decrypt(subreq); |
---|
| 97 | + } |
---|
| 98 | + |
---|
| 99 | + ret = skcipher_walk_virt(&walk, req, false); |
---|
| 100 | + if (ret) |
---|
| 101 | + return ret; |
---|
| 102 | + |
---|
| 103 | + preempt_disable(); |
---|
| 104 | + pagefault_disable(); |
---|
| 105 | + enable_kernel_vsx(); |
---|
| 106 | + |
---|
| 107 | + aes_p8_encrypt(walk.iv, tweak, &ctx->tweak_key); |
---|
| 108 | + |
---|
| 109 | + disable_kernel_vsx(); |
---|
| 110 | + pagefault_enable(); |
---|
| 111 | + preempt_enable(); |
---|
| 112 | + |
---|
| 113 | + while ((nbytes = walk.nbytes) != 0) { |
---|
123 | 114 | preempt_disable(); |
---|
124 | 115 | pagefault_disable(); |
---|
125 | 116 | enable_kernel_vsx(); |
---|
126 | | - |
---|
127 | | - iv = walk.iv; |
---|
128 | | - memset(tweak, 0, AES_BLOCK_SIZE); |
---|
129 | | - aes_p8_encrypt(iv, tweak, &ctx->tweak_key); |
---|
130 | | - |
---|
| 117 | + if (enc) |
---|
| 118 | + aes_p8_xts_encrypt(walk.src.virt.addr, |
---|
| 119 | + walk.dst.virt.addr, |
---|
| 120 | + round_down(nbytes, AES_BLOCK_SIZE), |
---|
| 121 | + &ctx->enc_key, NULL, tweak); |
---|
| 122 | + else |
---|
| 123 | + aes_p8_xts_decrypt(walk.src.virt.addr, |
---|
| 124 | + walk.dst.virt.addr, |
---|
| 125 | + round_down(nbytes, AES_BLOCK_SIZE), |
---|
| 126 | + &ctx->dec_key, NULL, tweak); |
---|
131 | 127 | disable_kernel_vsx(); |
---|
132 | 128 | pagefault_enable(); |
---|
133 | 129 | preempt_enable(); |
---|
134 | 130 | |
---|
135 | | - while ((nbytes = walk.nbytes)) { |
---|
136 | | - preempt_disable(); |
---|
137 | | - pagefault_disable(); |
---|
138 | | - enable_kernel_vsx(); |
---|
139 | | - if (enc) |
---|
140 | | - aes_p8_xts_encrypt(walk.src.virt.addr, walk.dst.virt.addr, |
---|
141 | | - nbytes & AES_BLOCK_MASK, &ctx->enc_key, NULL, tweak); |
---|
142 | | - else |
---|
143 | | - aes_p8_xts_decrypt(walk.src.virt.addr, walk.dst.virt.addr, |
---|
144 | | - nbytes & AES_BLOCK_MASK, &ctx->dec_key, NULL, tweak); |
---|
145 | | - disable_kernel_vsx(); |
---|
146 | | - pagefault_enable(); |
---|
147 | | - preempt_enable(); |
---|
148 | | - |
---|
149 | | - nbytes &= AES_BLOCK_SIZE - 1; |
---|
150 | | - ret = blkcipher_walk_done(desc, &walk, nbytes); |
---|
151 | | - } |
---|
| 131 | + ret = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE); |
---|
152 | 132 | } |
---|
153 | 133 | return ret; |
---|
154 | 134 | } |
---|
155 | 135 | |
---|
156 | | -static int p8_aes_xts_encrypt(struct blkcipher_desc *desc, |
---|
157 | | - struct scatterlist *dst, |
---|
158 | | - struct scatterlist *src, unsigned int nbytes) |
---|
| 136 | +static int p8_aes_xts_encrypt(struct skcipher_request *req) |
---|
159 | 137 | { |
---|
160 | | - return p8_aes_xts_crypt(desc, dst, src, nbytes, 1); |
---|
| 138 | + return p8_aes_xts_crypt(req, 1); |
---|
161 | 139 | } |
---|
162 | 140 | |
---|
163 | | -static int p8_aes_xts_decrypt(struct blkcipher_desc *desc, |
---|
164 | | - struct scatterlist *dst, |
---|
165 | | - struct scatterlist *src, unsigned int nbytes) |
---|
| 141 | +static int p8_aes_xts_decrypt(struct skcipher_request *req) |
---|
166 | 142 | { |
---|
167 | | - return p8_aes_xts_crypt(desc, dst, src, nbytes, 0); |
---|
| 143 | + return p8_aes_xts_crypt(req, 0); |
---|
168 | 144 | } |
---|
169 | 145 | |
---|
170 | | -struct crypto_alg p8_aes_xts_alg = { |
---|
171 | | - .cra_name = "xts(aes)", |
---|
172 | | - .cra_driver_name = "p8_aes_xts", |
---|
173 | | - .cra_module = THIS_MODULE, |
---|
174 | | - .cra_priority = 2000, |
---|
175 | | - .cra_type = &crypto_blkcipher_type, |
---|
176 | | - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | CRYPTO_ALG_NEED_FALLBACK, |
---|
177 | | - .cra_alignmask = 0, |
---|
178 | | - .cra_blocksize = AES_BLOCK_SIZE, |
---|
179 | | - .cra_ctxsize = sizeof(struct p8_aes_xts_ctx), |
---|
180 | | - .cra_init = p8_aes_xts_init, |
---|
181 | | - .cra_exit = p8_aes_xts_exit, |
---|
182 | | - .cra_blkcipher = { |
---|
183 | | - .ivsize = AES_BLOCK_SIZE, |
---|
184 | | - .min_keysize = 2 * AES_MIN_KEY_SIZE, |
---|
185 | | - .max_keysize = 2 * AES_MAX_KEY_SIZE, |
---|
186 | | - .setkey = p8_aes_xts_setkey, |
---|
187 | | - .encrypt = p8_aes_xts_encrypt, |
---|
188 | | - .decrypt = p8_aes_xts_decrypt, |
---|
189 | | - } |
---|
| 146 | +struct skcipher_alg p8_aes_xts_alg = { |
---|
| 147 | + .base.cra_name = "xts(aes)", |
---|
| 148 | + .base.cra_driver_name = "p8_aes_xts", |
---|
| 149 | + .base.cra_module = THIS_MODULE, |
---|
| 150 | + .base.cra_priority = 2000, |
---|
| 151 | + .base.cra_flags = CRYPTO_ALG_NEED_FALLBACK, |
---|
| 152 | + .base.cra_blocksize = AES_BLOCK_SIZE, |
---|
| 153 | + .base.cra_ctxsize = sizeof(struct p8_aes_xts_ctx), |
---|
| 154 | + .setkey = p8_aes_xts_setkey, |
---|
| 155 | + .encrypt = p8_aes_xts_encrypt, |
---|
| 156 | + .decrypt = p8_aes_xts_decrypt, |
---|
| 157 | + .init = p8_aes_xts_init, |
---|
| 158 | + .exit = p8_aes_xts_exit, |
---|
| 159 | + .min_keysize = 2 * AES_MIN_KEY_SIZE, |
---|
| 160 | + .max_keysize = 2 * AES_MAX_KEY_SIZE, |
---|
| 161 | + .ivsize = AES_BLOCK_SIZE, |
---|
190 | 162 | }; |
---|