hc
2023-12-11 d2ccde1c8e90d38cee87a1b0309ad2827f3fd30d
kernel/include/crypto/internal/skcipher.h
....@@ -1,19 +1,15 @@
1
+/* SPDX-License-Identifier: GPL-2.0-or-later */
12 /*
23 * Symmetric key ciphers.
34 *
45 * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
5
- *
6
- * This program is free software; you can redistribute it and/or modify it
7
- * under the terms of the GNU General Public License as published by the Free
8
- * Software Foundation; either version 2 of the License, or (at your option)
9
- * any later version.
10
- *
116 */
127
138 #ifndef _CRYPTO_INTERNAL_SKCIPHER_H
149 #define _CRYPTO_INTERNAL_SKCIPHER_H
1510
1611 #include <crypto/algapi.h>
12
+#include <crypto/internal/cipher.h>
1713 #include <crypto/skcipher.h>
1814 #include <linux/list.h>
1915 #include <linux/types.h>
....@@ -70,8 +66,6 @@
7066 unsigned int alignmask;
7167 };
7268
73
-extern const struct crypto_type crypto_givcipher_type;
74
-
7569 static inline struct crypto_instance *skcipher_crypto_instance(
7670 struct skcipher_instance *inst)
7771 {
....@@ -95,14 +89,9 @@
9589 req->base.complete(&req->base, err);
9690 }
9791
98
-static inline void crypto_set_skcipher_spawn(
99
- struct crypto_skcipher_spawn *spawn, struct crypto_instance *inst)
100
-{
101
- crypto_set_spawn(&spawn->base, inst);
102
-}
103
-
104
-int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn, const char *name,
105
- u32 type, u32 mask);
92
+int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn,
93
+ struct crypto_instance *inst,
94
+ const char *name, u32 type, u32 mask);
10695
10796 static inline void crypto_drop_skcipher(struct crypto_skcipher_spawn *spawn)
10897 {
....@@ -147,23 +136,15 @@
147136 void skcipher_walk_atomise(struct skcipher_walk *walk);
148137 int skcipher_walk_async(struct skcipher_walk *walk,
149138 struct skcipher_request *req);
150
-int skcipher_walk_aead(struct skcipher_walk *walk, struct aead_request *req,
151
- bool atomic);
152139 int skcipher_walk_aead_encrypt(struct skcipher_walk *walk,
153140 struct aead_request *req, bool atomic);
154141 int skcipher_walk_aead_decrypt(struct skcipher_walk *walk,
155142 struct aead_request *req, bool atomic);
156143 void skcipher_walk_complete(struct skcipher_walk *walk, int err);
157144
158
-static inline void ablkcipher_request_complete(struct ablkcipher_request *req,
159
- int err)
145
+static inline void skcipher_walk_abort(struct skcipher_walk *walk)
160146 {
161
- req->base.complete(&req->base, err);
162
-}
163
-
164
-static inline u32 ablkcipher_request_flags(struct ablkcipher_request *req)
165
-{
166
- return req->base.flags;
147
+ skcipher_walk_done(walk, -ECANCELED);
167148 }
168149
169150 static inline void *crypto_skcipher_ctx(struct crypto_skcipher *tfm)
....@@ -184,28 +165,60 @@
184165 static inline unsigned int crypto_skcipher_alg_min_keysize(
185166 struct skcipher_alg *alg)
186167 {
187
- if ((alg->base.cra_flags & CRYPTO_ALG_TYPE_MASK) ==
188
- CRYPTO_ALG_TYPE_BLKCIPHER)
189
- return alg->base.cra_blkcipher.min_keysize;
190
-
191
- if (alg->base.cra_ablkcipher.encrypt)
192
- return alg->base.cra_ablkcipher.min_keysize;
193
-
194168 return alg->min_keysize;
195169 }
196170
197171 static inline unsigned int crypto_skcipher_alg_max_keysize(
198172 struct skcipher_alg *alg)
199173 {
200
- if ((alg->base.cra_flags & CRYPTO_ALG_TYPE_MASK) ==
201
- CRYPTO_ALG_TYPE_BLKCIPHER)
202
- return alg->base.cra_blkcipher.max_keysize;
203
-
204
- if (alg->base.cra_ablkcipher.encrypt)
205
- return alg->base.cra_ablkcipher.max_keysize;
206
-
207174 return alg->max_keysize;
208175 }
209176
177
+static inline unsigned int crypto_skcipher_alg_walksize(
178
+ struct skcipher_alg *alg)
179
+{
180
+ return alg->walksize;
181
+}
182
+
183
+/**
184
+ * crypto_skcipher_walksize() - obtain walk size
185
+ * @tfm: cipher handle
186
+ *
187
+ * In some cases, algorithms can only perform optimally when operating on
188
+ * multiple blocks in parallel. This is reflected by the walksize, which
189
+ * must be a multiple of the chunksize (or equal if the concern does not
190
+ * apply)
191
+ *
192
+ * Return: walk size in bytes
193
+ */
194
+static inline unsigned int crypto_skcipher_walksize(
195
+ struct crypto_skcipher *tfm)
196
+{
197
+ return crypto_skcipher_alg_walksize(crypto_skcipher_alg(tfm));
198
+}
199
+
200
+/* Helpers for simple block cipher modes of operation */
201
+struct skcipher_ctx_simple {
202
+ struct crypto_cipher *cipher; /* underlying block cipher */
203
+};
204
+static inline struct crypto_cipher *
205
+skcipher_cipher_simple(struct crypto_skcipher *tfm)
206
+{
207
+ struct skcipher_ctx_simple *ctx = crypto_skcipher_ctx(tfm);
208
+
209
+ return ctx->cipher;
210
+}
211
+
212
+struct skcipher_instance *skcipher_alloc_instance_simple(
213
+ struct crypto_template *tmpl, struct rtattr **tb);
214
+
215
+static inline struct crypto_alg *skcipher_ialg_simple(
216
+ struct skcipher_instance *inst)
217
+{
218
+ struct crypto_cipher_spawn *spawn = skcipher_instance_ctx(inst);
219
+
220
+ return crypto_spawn_cipher_alg(spawn);
221
+}
222
+
210223 #endif /* _CRYPTO_INTERNAL_SKCIPHER_H */
211224