hc
2024-12-19 9370bb92b2d16684ee45cf24e879c93c509162da
kernel/crypto/cipher.c
....@@ -1,19 +1,15 @@
1
+// SPDX-License-Identifier: GPL-2.0-or-later
12 /*
23 * Cryptographic API.
34 *
4
- * Cipher operations.
5
+ * Single-block cipher operations.
56 *
67 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
78 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
8
- *
9
- * This program is free software; you can redistribute it and/or modify it
10
- * under the terms of the GNU General Public License as published by the Free
11
- * Software Foundation; either version 2 of the License, or (at your option)
12
- * any later version.
13
- *
149 */
1510
1611 #include <crypto/algapi.h>
12
+#include <crypto/internal/cipher.h>
1713 #include <linux/kernel.h>
1814 #include <linux/crypto.h>
1915 #include <linux/errno.h>
....@@ -21,11 +17,11 @@
2117 #include <linux/string.h>
2218 #include "internal.h"
2319
24
-static int setkey_unaligned(struct crypto_tfm *tfm, const u8 *key,
20
+static int setkey_unaligned(struct crypto_cipher *tfm, const u8 *key,
2521 unsigned int keylen)
2622 {
27
- struct cipher_alg *cia = &tfm->__crt_alg->cra_cipher;
28
- unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
23
+ struct cipher_alg *cia = crypto_cipher_alg(tfm);
24
+ unsigned long alignmask = crypto_cipher_alignmask(tfm);
2925 int ret;
3026 u8 *buffer, *alignbuffer;
3127 unsigned long absize;
....@@ -37,83 +33,60 @@
3733
3834 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
3935 memcpy(alignbuffer, key, keylen);
40
- ret = cia->cia_setkey(tfm, alignbuffer, keylen);
36
+ ret = cia->cia_setkey(crypto_cipher_tfm(tfm), alignbuffer, keylen);
4137 memset(alignbuffer, 0, keylen);
4238 kfree(buffer);
4339 return ret;
4440
4541 }
4642
47
-static int setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen)
43
+int crypto_cipher_setkey(struct crypto_cipher *tfm,
44
+ const u8 *key, unsigned int keylen)
4845 {
49
- struct cipher_alg *cia = &tfm->__crt_alg->cra_cipher;
50
- unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
46
+ struct cipher_alg *cia = crypto_cipher_alg(tfm);
47
+ unsigned long alignmask = crypto_cipher_alignmask(tfm);
5148
52
- tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
53
- if (keylen < cia->cia_min_keysize || keylen > cia->cia_max_keysize) {
54
- tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
49
+ if (keylen < cia->cia_min_keysize || keylen > cia->cia_max_keysize)
5550 return -EINVAL;
56
- }
5751
5852 if ((unsigned long)key & alignmask)
5953 return setkey_unaligned(tfm, key, keylen);
6054
61
- return cia->cia_setkey(tfm, key, keylen);
55
+ return cia->cia_setkey(crypto_cipher_tfm(tfm), key, keylen);
6256 }
57
+EXPORT_SYMBOL_NS_GPL(crypto_cipher_setkey, CRYPTO_INTERNAL);
6358
64
-static void cipher_crypt_unaligned(void (*fn)(struct crypto_tfm *, u8 *,
65
- const u8 *),
66
- struct crypto_tfm *tfm,
67
- u8 *dst, const u8 *src)
59
+static inline void cipher_crypt_one(struct crypto_cipher *tfm,
60
+ u8 *dst, const u8 *src, bool enc)
6861 {
69
- unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
70
- unsigned int size = crypto_tfm_alg_blocksize(tfm);
71
- u8 buffer[MAX_CIPHER_BLOCKSIZE + MAX_CIPHER_ALIGNMASK];
72
- u8 *tmp = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
73
-
74
- memcpy(tmp, src, size);
75
- fn(tfm, tmp, tmp);
76
- memcpy(dst, tmp, size);
77
-}
78
-
79
-static void cipher_encrypt_unaligned(struct crypto_tfm *tfm,
80
- u8 *dst, const u8 *src)
81
-{
82
- unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
83
- struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
62
+ unsigned long alignmask = crypto_cipher_alignmask(tfm);
63
+ struct cipher_alg *cia = crypto_cipher_alg(tfm);
64
+ void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
65
+ enc ? cia->cia_encrypt : cia->cia_decrypt;
8466
8567 if (unlikely(((unsigned long)dst | (unsigned long)src) & alignmask)) {
86
- cipher_crypt_unaligned(cipher->cia_encrypt, tfm, dst, src);
87
- return;
68
+ unsigned int bs = crypto_cipher_blocksize(tfm);
69
+ u8 buffer[MAX_CIPHER_BLOCKSIZE + MAX_CIPHER_ALIGNMASK];
70
+ u8 *tmp = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
71
+
72
+ memcpy(tmp, src, bs);
73
+ fn(crypto_cipher_tfm(tfm), tmp, tmp);
74
+ memcpy(dst, tmp, bs);
75
+ } else {
76
+ fn(crypto_cipher_tfm(tfm), dst, src);
8877 }
89
-
90
- cipher->cia_encrypt(tfm, dst, src);
9178 }
9279
93
-static void cipher_decrypt_unaligned(struct crypto_tfm *tfm,
94
- u8 *dst, const u8 *src)
80
+void crypto_cipher_encrypt_one(struct crypto_cipher *tfm,
81
+ u8 *dst, const u8 *src)
9582 {
96
- unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
97
- struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
98
-
99
- if (unlikely(((unsigned long)dst | (unsigned long)src) & alignmask)) {
100
- cipher_crypt_unaligned(cipher->cia_decrypt, tfm, dst, src);
101
- return;
102
- }
103
-
104
- cipher->cia_decrypt(tfm, dst, src);
83
+ cipher_crypt_one(tfm, dst, src, true);
10584 }
85
+EXPORT_SYMBOL_NS_GPL(crypto_cipher_encrypt_one, CRYPTO_INTERNAL);
10686
107
-int crypto_init_cipher_ops(struct crypto_tfm *tfm)
87
+void crypto_cipher_decrypt_one(struct crypto_cipher *tfm,
88
+ u8 *dst, const u8 *src)
10889 {
109
- struct cipher_tfm *ops = &tfm->crt_cipher;
110
- struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
111
-
112
- ops->cit_setkey = setkey;
113
- ops->cit_encrypt_one = crypto_tfm_alg_alignmask(tfm) ?
114
- cipher_encrypt_unaligned : cipher->cia_encrypt;
115
- ops->cit_decrypt_one = crypto_tfm_alg_alignmask(tfm) ?
116
- cipher_decrypt_unaligned : cipher->cia_decrypt;
117
-
118
- return 0;
90
+ cipher_crypt_one(tfm, dst, src, false);
11991 }
92
+EXPORT_SYMBOL_NS_GPL(crypto_cipher_decrypt_one, CRYPTO_INTERNAL);