forked from ~ljy/RK356X_SDK_RELEASE

hc
2024-01-31 f9004dbfff8a3fbbd7e2a88c8a4327c7f2f8e5b2
kernel/drivers/crypto/vmx/aes_ctr.c
....@@ -1,33 +1,17 @@
1
+// SPDX-License-Identifier: GPL-2.0-only
12 /**
23 * AES CTR routines supporting VMX instructions on the Power 8
34 *
45 * Copyright (C) 2015 International Business Machines Inc.
56 *
6
- * This program is free software; you can redistribute it and/or modify
7
- * it under the terms of the GNU General Public License as published by
8
- * the Free Software Foundation; version 2 only.
9
- *
10
- * This program is distributed in the hope that it will be useful,
11
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
12
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13
- * GNU General Public License for more details.
14
- *
15
- * You should have received a copy of the GNU General Public License
16
- * along with this program; if not, write to the Free Software
17
- * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
18
- *
197 * Author: Marcelo Henrique Cerri <mhcerri@br.ibm.com>
208 */
219
22
-#include <linux/types.h>
23
-#include <linux/err.h>
24
-#include <linux/crypto.h>
25
-#include <linux/delay.h>
26
-#include <linux/hardirq.h>
10
+#include <asm/simd.h>
2711 #include <asm/switch_to.h>
2812 #include <crypto/aes.h>
29
-#include <crypto/scatterwalk.h>
30
-#include <crypto/skcipher.h>
13
+#include <crypto/internal/simd.h>
14
+#include <crypto/internal/skcipher.h>
3115
3216 #include "aesp8-ppc.h"
3317
....@@ -36,44 +20,38 @@
3620 struct aes_key enc_key;
3721 };
3822
39
-static int p8_aes_ctr_init(struct crypto_tfm *tfm)
23
+static int p8_aes_ctr_init(struct crypto_skcipher *tfm)
4024 {
41
- const char *alg = crypto_tfm_alg_name(tfm);
25
+ struct p8_aes_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
4226 struct crypto_skcipher *fallback;
43
- struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
4427
45
- fallback = crypto_alloc_skcipher(alg, 0,
46
- CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
28
+ fallback = crypto_alloc_skcipher("ctr(aes)", 0,
29
+ CRYPTO_ALG_NEED_FALLBACK |
30
+ CRYPTO_ALG_ASYNC);
4731 if (IS_ERR(fallback)) {
48
- printk(KERN_ERR
49
- "Failed to allocate transformation for '%s': %ld\n",
50
- alg, PTR_ERR(fallback));
32
+ pr_err("Failed to allocate ctr(aes) fallback: %ld\n",
33
+ PTR_ERR(fallback));
5134 return PTR_ERR(fallback);
5235 }
5336
54
- crypto_skcipher_set_flags(
55
- fallback,
56
- crypto_skcipher_get_flags((struct crypto_skcipher *)tfm));
37
+ crypto_skcipher_set_reqsize(tfm, sizeof(struct skcipher_request) +
38
+ crypto_skcipher_reqsize(fallback));
5739 ctx->fallback = fallback;
58
-
5940 return 0;
6041 }
6142
62
-static void p8_aes_ctr_exit(struct crypto_tfm *tfm)
43
+static void p8_aes_ctr_exit(struct crypto_skcipher *tfm)
6344 {
64
- struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
45
+ struct p8_aes_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
6546
66
- if (ctx->fallback) {
67
- crypto_free_skcipher(ctx->fallback);
68
- ctx->fallback = NULL;
69
- }
47
+ crypto_free_skcipher(ctx->fallback);
7048 }
7149
72
-static int p8_aes_ctr_setkey(struct crypto_tfm *tfm, const u8 *key,
50
+static int p8_aes_ctr_setkey(struct crypto_skcipher *tfm, const u8 *key,
7351 unsigned int keylen)
7452 {
53
+ struct p8_aes_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
7554 int ret;
76
- struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
7755
7856 preempt_disable();
7957 pagefault_disable();
....@@ -83,12 +61,13 @@
8361 pagefault_enable();
8462 preempt_enable();
8563
86
- ret += crypto_skcipher_setkey(ctx->fallback, key, keylen);
87
- return ret;
64
+ ret |= crypto_skcipher_setkey(ctx->fallback, key, keylen);
65
+
66
+ return ret ? -EINVAL : 0;
8867 }
8968
90
-static void p8_aes_ctr_final(struct p8_aes_ctr_ctx *ctx,
91
- struct blkcipher_walk *walk)
69
+static void p8_aes_ctr_final(const struct p8_aes_ctr_ctx *ctx,
70
+ struct skcipher_walk *walk)
9271 {
9372 u8 *ctrblk = walk->iv;
9473 u8 keystream[AES_BLOCK_SIZE];
....@@ -108,77 +87,63 @@
10887 crypto_inc(ctrblk, AES_BLOCK_SIZE);
10988 }
11089
111
-static int p8_aes_ctr_crypt(struct blkcipher_desc *desc,
112
- struct scatterlist *dst,
113
- struct scatterlist *src, unsigned int nbytes)
90
+static int p8_aes_ctr_crypt(struct skcipher_request *req)
11491 {
92
+ struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
93
+ const struct p8_aes_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
94
+ struct skcipher_walk walk;
95
+ unsigned int nbytes;
11596 int ret;
116
- u64 inc;
117
- struct blkcipher_walk walk;
118
- struct p8_aes_ctr_ctx *ctx =
119
- crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm));
12097
121
- if (in_interrupt()) {
122
- SKCIPHER_REQUEST_ON_STACK(req, ctx->fallback);
123
- skcipher_request_set_tfm(req, ctx->fallback);
124
- skcipher_request_set_callback(req, desc->flags, NULL, NULL);
125
- skcipher_request_set_crypt(req, src, dst, nbytes, desc->info);
126
- ret = crypto_skcipher_encrypt(req);
127
- skcipher_request_zero(req);
128
- } else {
129
- blkcipher_walk_init(&walk, dst, src, nbytes);
130
- ret = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE);
131
- while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
132
- preempt_disable();
133
- pagefault_disable();
134
- enable_kernel_vsx();
135
- aes_p8_ctr32_encrypt_blocks(walk.src.virt.addr,
136
- walk.dst.virt.addr,
137
- (nbytes &
138
- AES_BLOCK_MASK) /
139
- AES_BLOCK_SIZE,
140
- &ctx->enc_key,
141
- walk.iv);
142
- disable_kernel_vsx();
143
- pagefault_enable();
144
- preempt_enable();
98
+ if (!crypto_simd_usable()) {
99
+ struct skcipher_request *subreq = skcipher_request_ctx(req);
145100
146
- /* We need to update IV mostly for last bytes/round */
147
- inc = (nbytes & AES_BLOCK_MASK) / AES_BLOCK_SIZE;
148
- if (inc > 0)
149
- while (inc--)
150
- crypto_inc(walk.iv, AES_BLOCK_SIZE);
151
-
152
- nbytes &= AES_BLOCK_SIZE - 1;
153
- ret = blkcipher_walk_done(desc, &walk, nbytes);
154
- }
155
- if (walk.nbytes) {
156
- p8_aes_ctr_final(ctx, &walk);
157
- ret = blkcipher_walk_done(desc, &walk, 0);
158
- }
101
+ *subreq = *req;
102
+ skcipher_request_set_tfm(subreq, ctx->fallback);
103
+ return crypto_skcipher_encrypt(subreq);
159104 }
160105
106
+ ret = skcipher_walk_virt(&walk, req, false);
107
+ while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
108
+ preempt_disable();
109
+ pagefault_disable();
110
+ enable_kernel_vsx();
111
+ aes_p8_ctr32_encrypt_blocks(walk.src.virt.addr,
112
+ walk.dst.virt.addr,
113
+ nbytes / AES_BLOCK_SIZE,
114
+ &ctx->enc_key, walk.iv);
115
+ disable_kernel_vsx();
116
+ pagefault_enable();
117
+ preempt_enable();
118
+
119
+ do {
120
+ crypto_inc(walk.iv, AES_BLOCK_SIZE);
121
+ } while ((nbytes -= AES_BLOCK_SIZE) >= AES_BLOCK_SIZE);
122
+
123
+ ret = skcipher_walk_done(&walk, nbytes);
124
+ }
125
+ if (nbytes) {
126
+ p8_aes_ctr_final(ctx, &walk);
127
+ ret = skcipher_walk_done(&walk, 0);
128
+ }
161129 return ret;
162130 }
163131
164
-struct crypto_alg p8_aes_ctr_alg = {
165
- .cra_name = "ctr(aes)",
166
- .cra_driver_name = "p8_aes_ctr",
167
- .cra_module = THIS_MODULE,
168
- .cra_priority = 2000,
169
- .cra_type = &crypto_blkcipher_type,
170
- .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | CRYPTO_ALG_NEED_FALLBACK,
171
- .cra_alignmask = 0,
172
- .cra_blocksize = 1,
173
- .cra_ctxsize = sizeof(struct p8_aes_ctr_ctx),
174
- .cra_init = p8_aes_ctr_init,
175
- .cra_exit = p8_aes_ctr_exit,
176
- .cra_blkcipher = {
177
- .ivsize = AES_BLOCK_SIZE,
178
- .min_keysize = AES_MIN_KEY_SIZE,
179
- .max_keysize = AES_MAX_KEY_SIZE,
180
- .setkey = p8_aes_ctr_setkey,
181
- .encrypt = p8_aes_ctr_crypt,
182
- .decrypt = p8_aes_ctr_crypt,
183
- },
132
+struct skcipher_alg p8_aes_ctr_alg = {
133
+ .base.cra_name = "ctr(aes)",
134
+ .base.cra_driver_name = "p8_aes_ctr",
135
+ .base.cra_module = THIS_MODULE,
136
+ .base.cra_priority = 2000,
137
+ .base.cra_flags = CRYPTO_ALG_NEED_FALLBACK,
138
+ .base.cra_blocksize = 1,
139
+ .base.cra_ctxsize = sizeof(struct p8_aes_ctr_ctx),
140
+ .setkey = p8_aes_ctr_setkey,
141
+ .encrypt = p8_aes_ctr_crypt,
142
+ .decrypt = p8_aes_ctr_crypt,
143
+ .init = p8_aes_ctr_init,
144
+ .exit = p8_aes_ctr_exit,
145
+ .min_keysize = AES_MIN_KEY_SIZE,
146
+ .max_keysize = AES_MAX_KEY_SIZE,
147
+ .ivsize = AES_BLOCK_SIZE,
148
+ .chunksize = AES_BLOCK_SIZE,
184149 };