hc
2023-12-11 d2ccde1c8e90d38cee87a1b0309ad2827f3fd30d
kernel/arch/arm64/crypto/aes-ce.S
....@@ -1,21 +1,28 @@
1
+/* SPDX-License-Identifier: GPL-2.0-only */
12 /*
23 * linux/arch/arm64/crypto/aes-ce.S - AES cipher for ARMv8 with
34 * Crypto Extensions
45 *
56 * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
6
- *
7
- * This program is free software; you can redistribute it and/or modify
8
- * it under the terms of the GNU General Public License version 2 as
9
- * published by the Free Software Foundation.
107 */
118
129 #include <linux/linkage.h>
1310 #include <asm/assembler.h>
1411
15
-#define AES_ENTRY(func) ENTRY(ce_ ## func)
16
-#define AES_ENDPROC(func) ENDPROC(ce_ ## func)
12
+#define AES_FUNC_START(func) SYM_FUNC_START(ce_ ## func)
13
+#define AES_FUNC_END(func) SYM_FUNC_END(ce_ ## func)
1714
1815 .arch armv8-a+crypto
16
+
17
+ xtsmask .req v16
18
+ cbciv .req v16
19
+ vctr .req v16
20
+
21
+ .macro xts_reload_mask, tmp
22
+ .endm
23
+
24
+ .macro xts_cts_skip_tw, reg, lbl
25
+ .endm
1926
2027 /* preload all round keys */
2128 .macro load_round_keys, rounds, rk
....@@ -47,7 +54,7 @@
4754 load_round_keys \rounds, \temp
4855 .endm
4956
50
- .macro do_enc_Nx, de, mc, k, i0, i1, i2, i3
57
+ .macro do_enc_Nx, de, mc, k, i0, i1, i2, i3, i4
5158 aes\de \i0\().16b, \k\().16b
5259 aes\mc \i0\().16b, \i0\().16b
5360 .ifnb \i1
....@@ -58,27 +65,34 @@
5865 aes\mc \i2\().16b, \i2\().16b
5966 aes\de \i3\().16b, \k\().16b
6067 aes\mc \i3\().16b, \i3\().16b
68
+ .ifnb \i4
69
+ aes\de \i4\().16b, \k\().16b
70
+ aes\mc \i4\().16b, \i4\().16b
71
+ .endif
6172 .endif
6273 .endif
6374 .endm
6475
65
- /* up to 4 interleaved encryption rounds with the same round key */
66
- .macro round_Nx, enc, k, i0, i1, i2, i3
76
+ /* up to 5 interleaved encryption rounds with the same round key */
77
+ .macro round_Nx, enc, k, i0, i1, i2, i3, i4
6778 .ifc \enc, e
68
- do_enc_Nx e, mc, \k, \i0, \i1, \i2, \i3
79
+ do_enc_Nx e, mc, \k, \i0, \i1, \i2, \i3, \i4
6980 .else
70
- do_enc_Nx d, imc, \k, \i0, \i1, \i2, \i3
81
+ do_enc_Nx d, imc, \k, \i0, \i1, \i2, \i3, \i4
7182 .endif
7283 .endm
7384
74
- /* up to 4 interleaved final rounds */
75
- .macro fin_round_Nx, de, k, k2, i0, i1, i2, i3
85
+ /* up to 5 interleaved final rounds */
86
+ .macro fin_round_Nx, de, k, k2, i0, i1, i2, i3, i4
7687 aes\de \i0\().16b, \k\().16b
7788 .ifnb \i1
7889 aes\de \i1\().16b, \k\().16b
7990 .ifnb \i3
8091 aes\de \i2\().16b, \k\().16b
8192 aes\de \i3\().16b, \k\().16b
93
+ .ifnb \i4
94
+ aes\de \i4\().16b, \k\().16b
95
+ .endif
8296 .endif
8397 .endif
8498 eor \i0\().16b, \i0\().16b, \k2\().16b
....@@ -87,47 +101,52 @@
87101 .ifnb \i3
88102 eor \i2\().16b, \i2\().16b, \k2\().16b
89103 eor \i3\().16b, \i3\().16b, \k2\().16b
104
+ .ifnb \i4
105
+ eor \i4\().16b, \i4\().16b, \k2\().16b
106
+ .endif
90107 .endif
91108 .endif
92109 .endm
93110
94
- /* up to 4 interleaved blocks */
95
- .macro do_block_Nx, enc, rounds, i0, i1, i2, i3
111
+ /* up to 5 interleaved blocks */
112
+ .macro do_block_Nx, enc, rounds, i0, i1, i2, i3, i4
96113 cmp \rounds, #12
97114 blo 2222f /* 128 bits */
98115 beq 1111f /* 192 bits */
99
- round_Nx \enc, v17, \i0, \i1, \i2, \i3
100
- round_Nx \enc, v18, \i0, \i1, \i2, \i3
101
-1111: round_Nx \enc, v19, \i0, \i1, \i2, \i3
102
- round_Nx \enc, v20, \i0, \i1, \i2, \i3
116
+ round_Nx \enc, v17, \i0, \i1, \i2, \i3, \i4
117
+ round_Nx \enc, v18, \i0, \i1, \i2, \i3, \i4
118
+1111: round_Nx \enc, v19, \i0, \i1, \i2, \i3, \i4
119
+ round_Nx \enc, v20, \i0, \i1, \i2, \i3, \i4
103120 2222: .irp key, v21, v22, v23, v24, v25, v26, v27, v28, v29
104
- round_Nx \enc, \key, \i0, \i1, \i2, \i3
121
+ round_Nx \enc, \key, \i0, \i1, \i2, \i3, \i4
105122 .endr
106
- fin_round_Nx \enc, v30, v31, \i0, \i1, \i2, \i3
123
+ fin_round_Nx \enc, v30, v31, \i0, \i1, \i2, \i3, \i4
107124 .endm
108125
109126 .macro encrypt_block, in, rounds, t0, t1, t2
110127 do_block_Nx e, \rounds, \in
111128 .endm
112129
113
- .macro encrypt_block2x, i0, i1, rounds, t0, t1, t2
114
- do_block_Nx e, \rounds, \i0, \i1
115
- .endm
116
-
117130 .macro encrypt_block4x, i0, i1, i2, i3, rounds, t0, t1, t2
118131 do_block_Nx e, \rounds, \i0, \i1, \i2, \i3
132
+ .endm
133
+
134
+ .macro encrypt_block5x, i0, i1, i2, i3, i4, rounds, t0, t1, t2
135
+ do_block_Nx e, \rounds, \i0, \i1, \i2, \i3, \i4
119136 .endm
120137
121138 .macro decrypt_block, in, rounds, t0, t1, t2
122139 do_block_Nx d, \rounds, \in
123140 .endm
124141
125
- .macro decrypt_block2x, i0, i1, rounds, t0, t1, t2
126
- do_block_Nx d, \rounds, \i0, \i1
127
- .endm
128
-
129142 .macro decrypt_block4x, i0, i1, i2, i3, rounds, t0, t1, t2
130143 do_block_Nx d, \rounds, \i0, \i1, \i2, \i3
131144 .endm
132145
146
+ .macro decrypt_block5x, i0, i1, i2, i3, i4, rounds, t0, t1, t2
147
+ do_block_Nx d, \rounds, \i0, \i1, \i2, \i3, \i4
148
+ .endm
149
+
150
+#define MAX_STRIDE 5
151
+
133152 #include "aes-modes.S"