.. | .. |
---|
5 | 5 | * Antoine Tenart <antoine.tenart@free-electrons.com> |
---|
6 | 6 | */ |
---|
7 | 7 | |
---|
| 8 | +#include <crypto/aes.h> |
---|
8 | 9 | #include <crypto/hmac.h> |
---|
9 | 10 | #include <crypto/md5.h> |
---|
10 | 11 | #include <crypto/sha.h> |
---|
| 12 | +#include <crypto/sha3.h> |
---|
| 13 | +#include <crypto/skcipher.h> |
---|
| 14 | +#include <crypto/sm3.h> |
---|
| 15 | +#include <crypto/internal/cipher.h> |
---|
11 | 16 | #include <linux/device.h> |
---|
12 | 17 | #include <linux/dma-mapping.h> |
---|
13 | 18 | #include <linux/dmapool.h> |
---|
.. | .. |
---|
16 | 21 | |
---|
17 | 22 | struct safexcel_ahash_ctx { |
---|
18 | 23 | struct safexcel_context base; |
---|
19 | | - struct safexcel_crypto_priv *priv; |
---|
20 | 24 | |
---|
21 | 25 | u32 alg; |
---|
| 26 | + u8 key_sz; |
---|
| 27 | + bool cbcmac; |
---|
| 28 | + bool do_fallback; |
---|
| 29 | + bool fb_init_done; |
---|
| 30 | + bool fb_do_setkey; |
---|
22 | 31 | |
---|
23 | | - u32 ipad[SHA512_DIGEST_SIZE / sizeof(u32)]; |
---|
24 | | - u32 opad[SHA512_DIGEST_SIZE / sizeof(u32)]; |
---|
| 32 | + struct crypto_cipher *kaes; |
---|
| 33 | + struct crypto_ahash *fback; |
---|
| 34 | + struct crypto_shash *shpre; |
---|
| 35 | + struct shash_desc *shdesc; |
---|
25 | 36 | }; |
---|
26 | 37 | |
---|
27 | 38 | struct safexcel_ahash_req { |
---|
.. | .. |
---|
29 | 40 | bool finish; |
---|
30 | 41 | bool hmac; |
---|
31 | 42 | bool needs_inv; |
---|
| 43 | + bool hmac_zlen; |
---|
| 44 | + bool len_is_le; |
---|
| 45 | + bool not_first; |
---|
| 46 | + bool xcbcmac; |
---|
32 | 47 | |
---|
33 | 48 | int nents; |
---|
34 | 49 | dma_addr_t result_dma; |
---|
35 | 50 | |
---|
36 | 51 | u32 digest; |
---|
37 | 52 | |
---|
38 | | - u8 state_sz; /* expected sate size, only set once */ |
---|
39 | | - u32 state[SHA512_DIGEST_SIZE / sizeof(u32)] __aligned(sizeof(u32)); |
---|
| 53 | + u8 state_sz; /* expected state size, only set once */ |
---|
| 54 | + u8 block_sz; /* block size, only set once */ |
---|
| 55 | + u8 digest_sz; /* output digest size, only set once */ |
---|
| 56 | + __le32 state[SHA3_512_BLOCK_SIZE / |
---|
| 57 | + sizeof(__le32)] __aligned(sizeof(__le32)); |
---|
40 | 58 | |
---|
41 | | - u64 len[2]; |
---|
42 | | - u64 processed[2]; |
---|
| 59 | + u64 len; |
---|
| 60 | + u64 processed; |
---|
43 | 61 | |
---|
44 | | - u8 cache[SHA512_BLOCK_SIZE] __aligned(sizeof(u32)); |
---|
| 62 | + u8 cache[HASH_CACHE_SIZE] __aligned(sizeof(u32)); |
---|
45 | 63 | dma_addr_t cache_dma; |
---|
46 | 64 | unsigned int cache_sz; |
---|
47 | 65 | |
---|
48 | | - u8 cache_next[SHA512_BLOCK_SIZE] __aligned(sizeof(u32)); |
---|
| 66 | + u8 cache_next[HASH_CACHE_SIZE] __aligned(sizeof(u32)); |
---|
49 | 67 | }; |
---|
50 | 68 | |
---|
51 | 69 | static inline u64 safexcel_queued_len(struct safexcel_ahash_req *req) |
---|
52 | 70 | { |
---|
53 | | - u64 len, processed; |
---|
54 | | - |
---|
55 | | - len = (0xffffffff * req->len[1]) + req->len[0]; |
---|
56 | | - processed = (0xffffffff * req->processed[1]) + req->processed[0]; |
---|
57 | | - |
---|
58 | | - return len - processed; |
---|
| 71 | + return req->len - req->processed; |
---|
59 | 72 | } |
---|
60 | 73 | |
---|
61 | 74 | static void safexcel_hash_token(struct safexcel_command_desc *cdesc, |
---|
62 | | - u32 input_length, u32 result_length) |
---|
| 75 | + u32 input_length, u32 result_length, |
---|
| 76 | + bool cbcmac) |
---|
63 | 77 | { |
---|
64 | 78 | struct safexcel_token *token = |
---|
65 | 79 | (struct safexcel_token *)cdesc->control_data.token; |
---|
66 | 80 | |
---|
67 | 81 | token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION; |
---|
68 | 82 | token[0].packet_length = input_length; |
---|
69 | | - token[0].stat = EIP197_TOKEN_STAT_LAST_HASH; |
---|
70 | 83 | token[0].instructions = EIP197_TOKEN_INS_TYPE_HASH; |
---|
71 | 84 | |
---|
72 | | - token[1].opcode = EIP197_TOKEN_OPCODE_INSERT; |
---|
73 | | - token[1].packet_length = result_length; |
---|
74 | | - token[1].stat = EIP197_TOKEN_STAT_LAST_HASH | |
---|
| 85 | + input_length &= 15; |
---|
| 86 | + if (unlikely(cbcmac && input_length)) { |
---|
| 87 | + token[0].stat = 0; |
---|
| 88 | + token[1].opcode = EIP197_TOKEN_OPCODE_INSERT; |
---|
| 89 | + token[1].packet_length = 16 - input_length; |
---|
| 90 | + token[1].stat = EIP197_TOKEN_STAT_LAST_HASH; |
---|
| 91 | + token[1].instructions = EIP197_TOKEN_INS_TYPE_HASH; |
---|
| 92 | + } else { |
---|
| 93 | + token[0].stat = EIP197_TOKEN_STAT_LAST_HASH; |
---|
| 94 | + eip197_noop_token(&token[1]); |
---|
| 95 | + } |
---|
| 96 | + |
---|
| 97 | + token[2].opcode = EIP197_TOKEN_OPCODE_INSERT; |
---|
| 98 | + token[2].stat = EIP197_TOKEN_STAT_LAST_HASH | |
---|
75 | 99 | EIP197_TOKEN_STAT_LAST_PACKET; |
---|
76 | | - token[1].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT | |
---|
| 100 | + token[2].packet_length = result_length; |
---|
| 101 | + token[2].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT | |
---|
77 | 102 | EIP197_TOKEN_INS_INSERT_HASH_DIGEST; |
---|
| 103 | + |
---|
| 104 | + eip197_noop_token(&token[3]); |
---|
78 | 105 | } |
---|
79 | 106 | |
---|
80 | 107 | static void safexcel_context_control(struct safexcel_ahash_ctx *ctx, |
---|
81 | 108 | struct safexcel_ahash_req *req, |
---|
82 | | - struct safexcel_command_desc *cdesc, |
---|
83 | | - unsigned int digestsize) |
---|
| 109 | + struct safexcel_command_desc *cdesc) |
---|
84 | 110 | { |
---|
85 | | - struct safexcel_crypto_priv *priv = ctx->priv; |
---|
86 | | - int i; |
---|
| 111 | + struct safexcel_crypto_priv *priv = ctx->base.priv; |
---|
| 112 | + u64 count = 0; |
---|
87 | 113 | |
---|
88 | | - cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_HASH_OUT; |
---|
89 | | - cdesc->control_data.control0 |= ctx->alg; |
---|
90 | | - cdesc->control_data.control0 |= req->digest; |
---|
| 114 | + cdesc->control_data.control0 = ctx->alg; |
---|
| 115 | + cdesc->control_data.control1 = 0; |
---|
91 | 116 | |
---|
92 | | - if (req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) { |
---|
93 | | - if (req->processed[0] || req->processed[1]) { |
---|
94 | | - if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5) |
---|
95 | | - cdesc->control_data.control0 |= CONTEXT_CONTROL_SIZE(5); |
---|
96 | | - else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA1) |
---|
97 | | - cdesc->control_data.control0 |= CONTEXT_CONTROL_SIZE(6); |
---|
98 | | - else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA224 || |
---|
99 | | - ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA256) |
---|
100 | | - cdesc->control_data.control0 |= CONTEXT_CONTROL_SIZE(9); |
---|
101 | | - else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA384 || |
---|
102 | | - ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA512) |
---|
103 | | - cdesc->control_data.control0 |= CONTEXT_CONTROL_SIZE(17); |
---|
| 117 | + /* |
---|
| 118 | + * Copy the input digest if needed, and setup the context |
---|
| 119 | + * fields. Do this now as we need it to setup the first command |
---|
| 120 | + * descriptor. |
---|
| 121 | + */ |
---|
| 122 | + if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM)) { |
---|
| 123 | + if (req->xcbcmac) |
---|
| 124 | + memcpy(ctx->base.ctxr->data, &ctx->base.ipad, ctx->key_sz); |
---|
| 125 | + else |
---|
| 126 | + memcpy(ctx->base.ctxr->data, req->state, req->state_sz); |
---|
104 | 127 | |
---|
105 | | - cdesc->control_data.control1 |= CONTEXT_CONTROL_DIGEST_CNT; |
---|
106 | | - } else { |
---|
107 | | - cdesc->control_data.control0 |= CONTEXT_CONTROL_RESTART_HASH; |
---|
108 | | - } |
---|
| 128 | + if (!req->finish && req->xcbcmac) |
---|
| 129 | + cdesc->control_data.control0 |= |
---|
| 130 | + CONTEXT_CONTROL_DIGEST_XCM | |
---|
| 131 | + CONTEXT_CONTROL_TYPE_HASH_OUT | |
---|
| 132 | + CONTEXT_CONTROL_NO_FINISH_HASH | |
---|
| 133 | + CONTEXT_CONTROL_SIZE(req->state_sz / |
---|
| 134 | + sizeof(u32)); |
---|
| 135 | + else |
---|
| 136 | + cdesc->control_data.control0 |= |
---|
| 137 | + CONTEXT_CONTROL_DIGEST_XCM | |
---|
| 138 | + CONTEXT_CONTROL_TYPE_HASH_OUT | |
---|
| 139 | + CONTEXT_CONTROL_SIZE(req->state_sz / |
---|
| 140 | + sizeof(u32)); |
---|
| 141 | + return; |
---|
| 142 | + } else if (!req->processed) { |
---|
| 143 | + /* First - and possibly only - block of basic hash only */ |
---|
| 144 | + if (req->finish) |
---|
| 145 | + cdesc->control_data.control0 |= req->digest | |
---|
| 146 | + CONTEXT_CONTROL_TYPE_HASH_OUT | |
---|
| 147 | + CONTEXT_CONTROL_RESTART_HASH | |
---|
| 148 | + /* ensure its not 0! */ |
---|
| 149 | + CONTEXT_CONTROL_SIZE(1); |
---|
| 150 | + else |
---|
| 151 | + cdesc->control_data.control0 |= req->digest | |
---|
| 152 | + CONTEXT_CONTROL_TYPE_HASH_OUT | |
---|
| 153 | + CONTEXT_CONTROL_RESTART_HASH | |
---|
| 154 | + CONTEXT_CONTROL_NO_FINISH_HASH | |
---|
| 155 | + /* ensure its not 0! */ |
---|
| 156 | + CONTEXT_CONTROL_SIZE(1); |
---|
| 157 | + return; |
---|
| 158 | + } |
---|
109 | 159 | |
---|
110 | | - if (!req->finish) |
---|
111 | | - cdesc->control_data.control0 |= CONTEXT_CONTROL_NO_FINISH_HASH; |
---|
| 160 | + /* Hash continuation or HMAC, setup (inner) digest from state */ |
---|
| 161 | + memcpy(ctx->base.ctxr->data, req->state, req->state_sz); |
---|
112 | 162 | |
---|
113 | | - /* |
---|
114 | | - * Copy the input digest if needed, and setup the context |
---|
115 | | - * fields. Do this now as we need it to setup the first command |
---|
116 | | - * descriptor. |
---|
117 | | - */ |
---|
118 | | - if (req->processed[0] || req->processed[1]) { |
---|
119 | | - for (i = 0; i < digestsize / sizeof(u32); i++) |
---|
120 | | - ctx->base.ctxr->data[i] = cpu_to_le32(req->state[i]); |
---|
| 163 | + if (req->finish) { |
---|
| 164 | + /* Compute digest count for hash/HMAC finish operations */ |
---|
| 165 | + if ((req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) || |
---|
| 166 | + req->hmac_zlen || (req->processed != req->block_sz)) { |
---|
| 167 | + count = req->processed / EIP197_COUNTER_BLOCK_SIZE; |
---|
121 | 168 | |
---|
122 | | - if (req->finish) { |
---|
123 | | - u64 count = req->processed[0] / EIP197_COUNTER_BLOCK_SIZE; |
---|
124 | | - count += ((0xffffffff / EIP197_COUNTER_BLOCK_SIZE) * |
---|
125 | | - req->processed[1]); |
---|
126 | | - |
---|
127 | | - /* This is a haredware limitation, as the |
---|
128 | | - * counter must fit into an u32. This represents |
---|
129 | | - * a farily big amount of input data, so we |
---|
130 | | - * shouldn't see this. |
---|
131 | | - */ |
---|
132 | | - if (unlikely(count & 0xffff0000)) { |
---|
133 | | - dev_warn(priv->dev, |
---|
134 | | - "Input data is too big\n"); |
---|
135 | | - return; |
---|
136 | | - } |
---|
137 | | - |
---|
138 | | - ctx->base.ctxr->data[i] = cpu_to_le32(count); |
---|
| 169 | + /* This is a hardware limitation, as the |
---|
| 170 | + * counter must fit into an u32. This represents |
---|
| 171 | + * a fairly big amount of input data, so we |
---|
| 172 | + * shouldn't see this. |
---|
| 173 | + */ |
---|
| 174 | + if (unlikely(count & 0xffffffff00000000ULL)) { |
---|
| 175 | + dev_warn(priv->dev, |
---|
| 176 | + "Input data is too big\n"); |
---|
| 177 | + return; |
---|
139 | 178 | } |
---|
140 | 179 | } |
---|
141 | | - } else if (req->digest == CONTEXT_CONTROL_DIGEST_HMAC) { |
---|
142 | | - cdesc->control_data.control0 |= CONTEXT_CONTROL_SIZE(2 * req->state_sz / sizeof(u32)); |
---|
143 | 180 | |
---|
144 | | - memcpy(ctx->base.ctxr->data, ctx->ipad, req->state_sz); |
---|
145 | | - memcpy(ctx->base.ctxr->data + req->state_sz / sizeof(u32), |
---|
146 | | - ctx->opad, req->state_sz); |
---|
| 181 | + if ((req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) || |
---|
| 182 | + /* Special case: zero length HMAC */ |
---|
| 183 | + req->hmac_zlen || |
---|
| 184 | + /* PE HW < 4.4 cannot do HMAC continue, fake using hash */ |
---|
| 185 | + (req->processed != req->block_sz)) { |
---|
| 186 | + /* Basic hash continue operation, need digest + cnt */ |
---|
| 187 | + cdesc->control_data.control0 |= |
---|
| 188 | + CONTEXT_CONTROL_SIZE((req->state_sz >> 2) + 1) | |
---|
| 189 | + CONTEXT_CONTROL_TYPE_HASH_OUT | |
---|
| 190 | + CONTEXT_CONTROL_DIGEST_PRECOMPUTED; |
---|
| 191 | + /* For zero-len HMAC, don't finalize, already padded! */ |
---|
| 192 | + if (req->hmac_zlen) |
---|
| 193 | + cdesc->control_data.control0 |= |
---|
| 194 | + CONTEXT_CONTROL_NO_FINISH_HASH; |
---|
| 195 | + cdesc->control_data.control1 |= |
---|
| 196 | + CONTEXT_CONTROL_DIGEST_CNT; |
---|
| 197 | + ctx->base.ctxr->data[req->state_sz >> 2] = |
---|
| 198 | + cpu_to_le32(count); |
---|
| 199 | + req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; |
---|
| 200 | + |
---|
| 201 | + /* Clear zero-length HMAC flag for next operation! */ |
---|
| 202 | + req->hmac_zlen = false; |
---|
| 203 | + } else { /* HMAC */ |
---|
| 204 | + /* Need outer digest for HMAC finalization */ |
---|
| 205 | + memcpy(ctx->base.ctxr->data + (req->state_sz >> 2), |
---|
| 206 | + &ctx->base.opad, req->state_sz); |
---|
| 207 | + |
---|
| 208 | + /* Single pass HMAC - no digest count */ |
---|
| 209 | + cdesc->control_data.control0 |= |
---|
| 210 | + CONTEXT_CONTROL_SIZE(req->state_sz >> 1) | |
---|
| 211 | + CONTEXT_CONTROL_TYPE_HASH_OUT | |
---|
| 212 | + CONTEXT_CONTROL_DIGEST_HMAC; |
---|
| 213 | + } |
---|
| 214 | + } else { /* Hash continuation, do not finish yet */ |
---|
| 215 | + cdesc->control_data.control0 |= |
---|
| 216 | + CONTEXT_CONTROL_SIZE(req->state_sz >> 2) | |
---|
| 217 | + CONTEXT_CONTROL_DIGEST_PRECOMPUTED | |
---|
| 218 | + CONTEXT_CONTROL_TYPE_HASH_OUT | |
---|
| 219 | + CONTEXT_CONTROL_NO_FINISH_HASH; |
---|
147 | 220 | } |
---|
148 | 221 | } |
---|
149 | 222 | |
---|
150 | | -static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int ring, |
---|
| 223 | +static int safexcel_ahash_enqueue(struct ahash_request *areq); |
---|
| 224 | + |
---|
| 225 | +static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, |
---|
| 226 | + int ring, |
---|
151 | 227 | struct crypto_async_request *async, |
---|
152 | 228 | bool *should_complete, int *ret) |
---|
153 | 229 | { |
---|
.. | .. |
---|
155 | 231 | struct ahash_request *areq = ahash_request_cast(async); |
---|
156 | 232 | struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq); |
---|
157 | 233 | struct safexcel_ahash_req *sreq = ahash_request_ctx(areq); |
---|
| 234 | + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(ahash); |
---|
158 | 235 | u64 cache_len; |
---|
159 | 236 | |
---|
160 | 237 | *ret = 0; |
---|
.. | .. |
---|
176 | 253 | } |
---|
177 | 254 | |
---|
178 | 255 | if (sreq->result_dma) { |
---|
179 | | - dma_unmap_single(priv->dev, sreq->result_dma, sreq->state_sz, |
---|
| 256 | + dma_unmap_single(priv->dev, sreq->result_dma, sreq->digest_sz, |
---|
180 | 257 | DMA_FROM_DEVICE); |
---|
181 | 258 | sreq->result_dma = 0; |
---|
182 | 259 | } |
---|
.. | .. |
---|
185 | 262 | dma_unmap_single(priv->dev, sreq->cache_dma, sreq->cache_sz, |
---|
186 | 263 | DMA_TO_DEVICE); |
---|
187 | 264 | sreq->cache_dma = 0; |
---|
| 265 | + sreq->cache_sz = 0; |
---|
188 | 266 | } |
---|
189 | 267 | |
---|
190 | | - if (sreq->finish) |
---|
191 | | - memcpy(areq->result, sreq->state, |
---|
192 | | - crypto_ahash_digestsize(ahash)); |
---|
| 268 | + if (sreq->finish) { |
---|
| 269 | + if (sreq->hmac && |
---|
| 270 | + (sreq->digest != CONTEXT_CONTROL_DIGEST_HMAC)) { |
---|
| 271 | + /* Faking HMAC using hash - need to do outer hash */ |
---|
| 272 | + memcpy(sreq->cache, sreq->state, |
---|
| 273 | + crypto_ahash_digestsize(ahash)); |
---|
| 274 | + |
---|
| 275 | + memcpy(sreq->state, &ctx->base.opad, sreq->digest_sz); |
---|
| 276 | + |
---|
| 277 | + sreq->len = sreq->block_sz + |
---|
| 278 | + crypto_ahash_digestsize(ahash); |
---|
| 279 | + sreq->processed = sreq->block_sz; |
---|
| 280 | + sreq->hmac = 0; |
---|
| 281 | + |
---|
| 282 | + if (priv->flags & EIP197_TRC_CACHE) |
---|
| 283 | + ctx->base.needs_inv = true; |
---|
| 284 | + areq->nbytes = 0; |
---|
| 285 | + safexcel_ahash_enqueue(areq); |
---|
| 286 | + |
---|
| 287 | + *should_complete = false; /* Not done yet */ |
---|
| 288 | + return 1; |
---|
| 289 | + } |
---|
| 290 | + |
---|
| 291 | + if (unlikely(sreq->digest == CONTEXT_CONTROL_DIGEST_XCM && |
---|
| 292 | + ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_CRC32)) { |
---|
| 293 | + /* Undo final XOR with 0xffffffff ...*/ |
---|
| 294 | + *(__le32 *)areq->result = ~sreq->state[0]; |
---|
| 295 | + } else { |
---|
| 296 | + memcpy(areq->result, sreq->state, |
---|
| 297 | + crypto_ahash_digestsize(ahash)); |
---|
| 298 | + } |
---|
| 299 | + } |
---|
193 | 300 | |
---|
194 | 301 | cache_len = safexcel_queued_len(sreq); |
---|
195 | 302 | if (cache_len) |
---|
.. | .. |
---|
204 | 311 | int *commands, int *results) |
---|
205 | 312 | { |
---|
206 | 313 | struct ahash_request *areq = ahash_request_cast(async); |
---|
207 | | - struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq); |
---|
208 | 314 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); |
---|
209 | 315 | struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); |
---|
210 | | - struct safexcel_crypto_priv *priv = ctx->priv; |
---|
| 316 | + struct safexcel_crypto_priv *priv = ctx->base.priv; |
---|
211 | 317 | struct safexcel_command_desc *cdesc, *first_cdesc = NULL; |
---|
212 | 318 | struct safexcel_result_desc *rdesc; |
---|
213 | 319 | struct scatterlist *sg; |
---|
214 | | - int i, extra, n_cdesc = 0, ret = 0; |
---|
215 | | - u64 queued, len, cache_len; |
---|
| 320 | + struct safexcel_token *dmmy; |
---|
| 321 | + int i, extra = 0, n_cdesc = 0, ret = 0, cache_len, skip = 0; |
---|
| 322 | + u64 queued, len; |
---|
216 | 323 | |
---|
217 | | - queued = len = safexcel_queued_len(req); |
---|
218 | | - if (queued <= crypto_ahash_blocksize(ahash)) |
---|
| 324 | + queued = safexcel_queued_len(req); |
---|
| 325 | + if (queued <= HASH_CACHE_SIZE) |
---|
219 | 326 | cache_len = queued; |
---|
220 | 327 | else |
---|
221 | 328 | cache_len = queued - areq->nbytes; |
---|
222 | 329 | |
---|
223 | | - if (!req->last_req) { |
---|
| 330 | + if (!req->finish && !req->last_req) { |
---|
224 | 331 | /* If this is not the last request and the queued data does not |
---|
225 | | - * fit into full blocks, cache it for the next send() call. |
---|
| 332 | + * fit into full cache blocks, cache it for the next send call. |
---|
226 | 333 | */ |
---|
227 | | - extra = queued & (crypto_ahash_blocksize(ahash) - 1); |
---|
| 334 | + extra = queued & (HASH_CACHE_SIZE - 1); |
---|
| 335 | + |
---|
| 336 | + /* If this is not the last request and the queued data |
---|
| 337 | + * is a multiple of a block, cache the last one for now. |
---|
| 338 | + */ |
---|
228 | 339 | if (!extra) |
---|
229 | | - /* If this is not the last request and the queued data |
---|
230 | | - * is a multiple of a block, cache the last one for now. |
---|
231 | | - */ |
---|
232 | | - extra = crypto_ahash_blocksize(ahash); |
---|
| 340 | + extra = HASH_CACHE_SIZE; |
---|
233 | 341 | |
---|
234 | | - if (extra) { |
---|
235 | | - sg_pcopy_to_buffer(areq->src, sg_nents(areq->src), |
---|
236 | | - req->cache_next, extra, |
---|
237 | | - areq->nbytes - extra); |
---|
| 342 | + sg_pcopy_to_buffer(areq->src, sg_nents(areq->src), |
---|
| 343 | + req->cache_next, extra, |
---|
| 344 | + areq->nbytes - extra); |
---|
238 | 345 | |
---|
239 | | - queued -= extra; |
---|
240 | | - len -= extra; |
---|
| 346 | + queued -= extra; |
---|
241 | 347 | |
---|
242 | | - if (!queued) { |
---|
243 | | - *commands = 0; |
---|
244 | | - *results = 0; |
---|
245 | | - return 0; |
---|
246 | | - } |
---|
| 348 | + if (!queued) { |
---|
| 349 | + *commands = 0; |
---|
| 350 | + *results = 0; |
---|
| 351 | + return 0; |
---|
247 | 352 | } |
---|
| 353 | + |
---|
| 354 | + extra = 0; |
---|
248 | 355 | } |
---|
249 | 356 | |
---|
| 357 | + if (unlikely(req->xcbcmac && req->processed > AES_BLOCK_SIZE)) { |
---|
| 358 | + if (unlikely(cache_len < AES_BLOCK_SIZE)) { |
---|
| 359 | + /* |
---|
| 360 | + * Cache contains less than 1 full block, complete. |
---|
| 361 | + */ |
---|
| 362 | + extra = AES_BLOCK_SIZE - cache_len; |
---|
| 363 | + if (queued > cache_len) { |
---|
| 364 | + /* More data follows: borrow bytes */ |
---|
| 365 | + u64 tmp = queued - cache_len; |
---|
| 366 | + |
---|
| 367 | + skip = min_t(u64, tmp, extra); |
---|
| 368 | + sg_pcopy_to_buffer(areq->src, |
---|
| 369 | + sg_nents(areq->src), |
---|
| 370 | + req->cache + cache_len, |
---|
| 371 | + skip, 0); |
---|
| 372 | + } |
---|
| 373 | + extra -= skip; |
---|
| 374 | + memset(req->cache + cache_len + skip, 0, extra); |
---|
| 375 | + if (!ctx->cbcmac && extra) { |
---|
| 376 | + // 10- padding for XCBCMAC & CMAC |
---|
| 377 | + req->cache[cache_len + skip] = 0x80; |
---|
| 378 | + // HW will use K2 iso K3 - compensate! |
---|
| 379 | + for (i = 0; i < AES_BLOCK_SIZE / 4; i++) { |
---|
| 380 | + u32 *cache = (void *)req->cache; |
---|
| 381 | + u32 *ipad = ctx->base.ipad.word; |
---|
| 382 | + u32 x; |
---|
| 383 | + |
---|
| 384 | + x = ipad[i] ^ ipad[i + 4]; |
---|
| 385 | + cache[i] ^= swab32(x); |
---|
| 386 | + } |
---|
| 387 | + } |
---|
| 388 | + cache_len = AES_BLOCK_SIZE; |
---|
| 389 | + queued = queued + extra; |
---|
| 390 | + } |
---|
| 391 | + |
---|
| 392 | + /* XCBC continue: XOR previous result into 1st word */ |
---|
| 393 | + crypto_xor(req->cache, (const u8 *)req->state, AES_BLOCK_SIZE); |
---|
| 394 | + } |
---|
| 395 | + |
---|
| 396 | + len = queued; |
---|
250 | 397 | /* Add a command descriptor for the cached data, if any */ |
---|
251 | 398 | if (cache_len) { |
---|
252 | 399 | req->cache_dma = dma_map_single(priv->dev, req->cache, |
---|
.. | .. |
---|
257 | 404 | req->cache_sz = cache_len; |
---|
258 | 405 | first_cdesc = safexcel_add_cdesc(priv, ring, 1, |
---|
259 | 406 | (cache_len == len), |
---|
260 | | - req->cache_dma, cache_len, len, |
---|
261 | | - ctx->base.ctxr_dma); |
---|
| 407 | + req->cache_dma, cache_len, |
---|
| 408 | + len, ctx->base.ctxr_dma, |
---|
| 409 | + &dmmy); |
---|
262 | 410 | if (IS_ERR(first_cdesc)) { |
---|
263 | 411 | ret = PTR_ERR(first_cdesc); |
---|
264 | 412 | goto unmap_cache; |
---|
.. | .. |
---|
272 | 420 | |
---|
273 | 421 | /* Now handle the current ahash request buffer(s) */ |
---|
274 | 422 | req->nents = dma_map_sg(priv->dev, areq->src, |
---|
275 | | - sg_nents_for_len(areq->src, areq->nbytes), |
---|
| 423 | + sg_nents_for_len(areq->src, |
---|
| 424 | + areq->nbytes), |
---|
276 | 425 | DMA_TO_DEVICE); |
---|
277 | 426 | if (!req->nents) { |
---|
278 | 427 | ret = -ENOMEM; |
---|
.. | .. |
---|
282 | 431 | for_each_sg(areq->src, sg, req->nents, i) { |
---|
283 | 432 | int sglen = sg_dma_len(sg); |
---|
284 | 433 | |
---|
| 434 | + if (unlikely(sglen <= skip)) { |
---|
| 435 | + skip -= sglen; |
---|
| 436 | + continue; |
---|
| 437 | + } |
---|
| 438 | + |
---|
285 | 439 | /* Do not overflow the request */ |
---|
286 | | - if (queued < sglen) |
---|
| 440 | + if ((queued + skip) <= sglen) |
---|
287 | 441 | sglen = queued; |
---|
| 442 | + else |
---|
| 443 | + sglen -= skip; |
---|
288 | 444 | |
---|
289 | 445 | cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc, |
---|
290 | | - !(queued - sglen), sg_dma_address(sg), |
---|
291 | | - sglen, len, ctx->base.ctxr_dma); |
---|
| 446 | + !(queued - sglen), |
---|
| 447 | + sg_dma_address(sg) + skip, sglen, |
---|
| 448 | + len, ctx->base.ctxr_dma, &dmmy); |
---|
292 | 449 | if (IS_ERR(cdesc)) { |
---|
293 | 450 | ret = PTR_ERR(cdesc); |
---|
294 | 451 | goto unmap_sg; |
---|
295 | 452 | } |
---|
296 | | - n_cdesc++; |
---|
297 | 453 | |
---|
298 | | - if (n_cdesc == 1) |
---|
| 454 | + if (!n_cdesc) |
---|
299 | 455 | first_cdesc = cdesc; |
---|
| 456 | + n_cdesc++; |
---|
300 | 457 | |
---|
301 | 458 | queued -= sglen; |
---|
302 | 459 | if (!queued) |
---|
303 | 460 | break; |
---|
| 461 | + skip = 0; |
---|
304 | 462 | } |
---|
305 | 463 | |
---|
306 | 464 | send_command: |
---|
307 | 465 | /* Setup the context options */ |
---|
308 | | - safexcel_context_control(ctx, req, first_cdesc, req->state_sz); |
---|
| 466 | + safexcel_context_control(ctx, req, first_cdesc); |
---|
309 | 467 | |
---|
310 | 468 | /* Add the token */ |
---|
311 | | - safexcel_hash_token(first_cdesc, len, req->state_sz); |
---|
| 469 | + safexcel_hash_token(first_cdesc, len, req->digest_sz, ctx->cbcmac); |
---|
312 | 470 | |
---|
313 | | - req->result_dma = dma_map_single(priv->dev, req->state, req->state_sz, |
---|
| 471 | + req->result_dma = dma_map_single(priv->dev, req->state, req->digest_sz, |
---|
314 | 472 | DMA_FROM_DEVICE); |
---|
315 | 473 | if (dma_mapping_error(priv->dev, req->result_dma)) { |
---|
316 | 474 | ret = -EINVAL; |
---|
.. | .. |
---|
319 | 477 | |
---|
320 | 478 | /* Add a result descriptor */ |
---|
321 | 479 | rdesc = safexcel_add_rdesc(priv, ring, 1, 1, req->result_dma, |
---|
322 | | - req->state_sz); |
---|
| 480 | + req->digest_sz); |
---|
323 | 481 | if (IS_ERR(rdesc)) { |
---|
324 | 482 | ret = PTR_ERR(rdesc); |
---|
325 | 483 | goto unmap_result; |
---|
.. | .. |
---|
327 | 485 | |
---|
328 | 486 | safexcel_rdr_req_set(priv, ring, rdesc, &areq->base); |
---|
329 | 487 | |
---|
330 | | - req->processed[0] += len; |
---|
331 | | - if (req->processed[0] < len) |
---|
332 | | - req->processed[1]++; |
---|
| 488 | + req->processed += len - extra; |
---|
333 | 489 | |
---|
334 | 490 | *commands = n_cdesc; |
---|
335 | 491 | *results = 1; |
---|
336 | 492 | return 0; |
---|
337 | 493 | |
---|
338 | 494 | unmap_result: |
---|
339 | | - dma_unmap_single(priv->dev, req->result_dma, req->state_sz, |
---|
| 495 | + dma_unmap_single(priv->dev, req->result_dma, req->digest_sz, |
---|
340 | 496 | DMA_FROM_DEVICE); |
---|
341 | 497 | unmap_sg: |
---|
342 | | - dma_unmap_sg(priv->dev, areq->src, req->nents, DMA_TO_DEVICE); |
---|
| 498 | + if (req->nents) { |
---|
| 499 | + dma_unmap_sg(priv->dev, areq->src, req->nents, DMA_TO_DEVICE); |
---|
| 500 | + req->nents = 0; |
---|
| 501 | + } |
---|
343 | 502 | cdesc_rollback: |
---|
344 | 503 | for (i = 0; i < n_cdesc; i++) |
---|
345 | 504 | safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr); |
---|
.. | .. |
---|
347 | 506 | if (req->cache_dma) { |
---|
348 | 507 | dma_unmap_single(priv->dev, req->cache_dma, req->cache_sz, |
---|
349 | 508 | DMA_TO_DEVICE); |
---|
| 509 | + req->cache_dma = 0; |
---|
350 | 510 | req->cache_sz = 0; |
---|
351 | 511 | } |
---|
352 | 512 | |
---|
353 | 513 | return ret; |
---|
354 | | -} |
---|
355 | | - |
---|
356 | | -static inline bool safexcel_ahash_needs_inv_get(struct ahash_request *areq) |
---|
357 | | -{ |
---|
358 | | - struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); |
---|
359 | | - struct safexcel_ahash_req *req = ahash_request_ctx(areq); |
---|
360 | | - unsigned int state_w_sz = req->state_sz / sizeof(u32); |
---|
361 | | - u64 processed; |
---|
362 | | - int i; |
---|
363 | | - |
---|
364 | | - processed = req->processed[0] / EIP197_COUNTER_BLOCK_SIZE; |
---|
365 | | - processed += (0xffffffff / EIP197_COUNTER_BLOCK_SIZE) * req->processed[1]; |
---|
366 | | - |
---|
367 | | - for (i = 0; i < state_w_sz; i++) |
---|
368 | | - if (ctx->base.ctxr->data[i] != cpu_to_le32(req->state[i])) |
---|
369 | | - return true; |
---|
370 | | - |
---|
371 | | - if (ctx->base.ctxr->data[state_w_sz] != cpu_to_le32(processed)) |
---|
372 | | - return true; |
---|
373 | | - |
---|
374 | | - return false; |
---|
375 | 514 | } |
---|
376 | 515 | |
---|
377 | 516 | static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv, |
---|
.. | .. |
---|
453 | 592 | struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); |
---|
454 | 593 | int ret; |
---|
455 | 594 | |
---|
456 | | - ret = safexcel_invalidate_cache(async, ctx->priv, |
---|
| 595 | + ret = safexcel_invalidate_cache(async, ctx->base.priv, |
---|
457 | 596 | ctx->base.ctxr_dma, ring); |
---|
458 | 597 | if (unlikely(ret)) |
---|
459 | 598 | return ret; |
---|
.. | .. |
---|
482 | 621 | static int safexcel_ahash_exit_inv(struct crypto_tfm *tfm) |
---|
483 | 622 | { |
---|
484 | 623 | struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm); |
---|
485 | | - struct safexcel_crypto_priv *priv = ctx->priv; |
---|
| 624 | + struct safexcel_crypto_priv *priv = ctx->base.priv; |
---|
486 | 625 | EIP197_REQUEST_ON_STACK(req, ahash, EIP197_AHASH_REQ_SIZE); |
---|
487 | 626 | struct safexcel_ahash_req *rctx = ahash_request_ctx(req); |
---|
488 | 627 | struct safexcel_inv_result result = {}; |
---|
.. | .. |
---|
524 | 663 | static int safexcel_ahash_cache(struct ahash_request *areq) |
---|
525 | 664 | { |
---|
526 | 665 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); |
---|
527 | | - struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq); |
---|
528 | | - u64 queued, cache_len; |
---|
| 666 | + u64 cache_len; |
---|
529 | 667 | |
---|
530 | | - /* queued: everything accepted by the driver which will be handled by |
---|
531 | | - * the next send() calls. |
---|
532 | | - * tot sz handled by update() - tot sz handled by send() |
---|
533 | | - */ |
---|
534 | | - queued = safexcel_queued_len(req); |
---|
535 | 668 | /* cache_len: everything accepted by the driver but not sent yet, |
---|
536 | 669 | * tot sz handled by update() - last req sz - tot sz handled by send() |
---|
537 | 670 | */ |
---|
538 | | - cache_len = queued - areq->nbytes; |
---|
| 671 | + cache_len = safexcel_queued_len(req); |
---|
539 | 672 | |
---|
540 | 673 | /* |
---|
541 | 674 | * In case there isn't enough bytes to proceed (less than a |
---|
542 | 675 | * block size), cache the data until we have enough. |
---|
543 | 676 | */ |
---|
544 | | - if (cache_len + areq->nbytes <= crypto_ahash_blocksize(ahash)) { |
---|
| 677 | + if (cache_len + areq->nbytes <= HASH_CACHE_SIZE) { |
---|
545 | 678 | sg_pcopy_to_buffer(areq->src, sg_nents(areq->src), |
---|
546 | 679 | req->cache + cache_len, |
---|
547 | 680 | areq->nbytes, 0); |
---|
548 | | - return areq->nbytes; |
---|
| 681 | + return 0; |
---|
549 | 682 | } |
---|
550 | 683 | |
---|
551 | 684 | /* We couldn't cache all the data */ |
---|
.. | .. |
---|
556 | 689 | { |
---|
557 | 690 | struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); |
---|
558 | 691 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); |
---|
559 | | - struct safexcel_crypto_priv *priv = ctx->priv; |
---|
| 692 | + struct safexcel_crypto_priv *priv = ctx->base.priv; |
---|
560 | 693 | int ret, ring; |
---|
561 | 694 | |
---|
562 | 695 | req->needs_inv = false; |
---|
563 | 696 | |
---|
564 | 697 | if (ctx->base.ctxr) { |
---|
565 | 698 | if (priv->flags & EIP197_TRC_CACHE && !ctx->base.needs_inv && |
---|
566 | | - (req->processed[0] || req->processed[1]) && |
---|
567 | | - req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) |
---|
568 | | - /* We're still setting needs_inv here, even though it is |
---|
| 699 | + /* invalidate for *any* non-XCBC continuation */ |
---|
| 700 | + ((req->not_first && !req->xcbcmac) || |
---|
| 701 | + /* invalidate if (i)digest changed */ |
---|
| 702 | + memcmp(ctx->base.ctxr->data, req->state, req->state_sz) || |
---|
| 703 | + /* invalidate for HMAC finish with odigest changed */ |
---|
| 704 | + (req->finish && req->hmac && |
---|
| 705 | + memcmp(ctx->base.ctxr->data + (req->state_sz>>2), |
---|
| 706 | + &ctx->base.opad, req->state_sz)))) |
---|
| 707 | + /* |
---|
| 708 | + * We're still setting needs_inv here, even though it is |
---|
569 | 709 | * cleared right away, because the needs_inv flag can be |
---|
570 | 710 | * set in other functions and we want to keep the same |
---|
571 | 711 | * logic. |
---|
572 | 712 | */ |
---|
573 | | - ctx->base.needs_inv = safexcel_ahash_needs_inv_get(areq); |
---|
| 713 | + ctx->base.needs_inv = true; |
---|
574 | 714 | |
---|
575 | 715 | if (ctx->base.needs_inv) { |
---|
576 | 716 | ctx->base.needs_inv = false; |
---|
.. | .. |
---|
584 | 724 | if (!ctx->base.ctxr) |
---|
585 | 725 | return -ENOMEM; |
---|
586 | 726 | } |
---|
| 727 | + req->not_first = true; |
---|
587 | 728 | |
---|
588 | 729 | ring = ctx->base.ring; |
---|
589 | 730 | |
---|
.. | .. |
---|
600 | 741 | static int safexcel_ahash_update(struct ahash_request *areq) |
---|
601 | 742 | { |
---|
602 | 743 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); |
---|
603 | | - struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq); |
---|
| 744 | + int ret; |
---|
604 | 745 | |
---|
605 | 746 | /* If the request is 0 length, do nothing */ |
---|
606 | 747 | if (!areq->nbytes) |
---|
607 | 748 | return 0; |
---|
608 | 749 | |
---|
609 | | - req->len[0] += areq->nbytes; |
---|
610 | | - if (req->len[0] < areq->nbytes) |
---|
611 | | - req->len[1]++; |
---|
| 750 | + /* Add request to the cache if it fits */ |
---|
| 751 | + ret = safexcel_ahash_cache(areq); |
---|
612 | 752 | |
---|
613 | | - safexcel_ahash_cache(areq); |
---|
| 753 | + /* Update total request length */ |
---|
| 754 | + req->len += areq->nbytes; |
---|
614 | 755 | |
---|
615 | | - /* |
---|
616 | | - * We're not doing partial updates when performing an hmac request. |
---|
617 | | - * Everything will be handled by the final() call. |
---|
| 756 | + /* If not all data could fit into the cache, go process the excess. |
---|
| 757 | + * Also go process immediately for an HMAC IV precompute, which |
---|
| 758 | + * will never be finished at all, but needs to be processed anyway. |
---|
618 | 759 | */ |
---|
619 | | - if (req->digest == CONTEXT_CONTROL_DIGEST_HMAC) |
---|
620 | | - return 0; |
---|
621 | | - |
---|
622 | | - if (req->hmac) |
---|
623 | | - return safexcel_ahash_enqueue(areq); |
---|
624 | | - |
---|
625 | | - if (!req->last_req && |
---|
626 | | - safexcel_queued_len(req) > crypto_ahash_blocksize(ahash)) |
---|
| 760 | + if ((ret && !req->finish) || req->last_req) |
---|
627 | 761 | return safexcel_ahash_enqueue(areq); |
---|
628 | 762 | |
---|
629 | 763 | return 0; |
---|
.. | .. |
---|
634 | 768 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); |
---|
635 | 769 | struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); |
---|
636 | 770 | |
---|
637 | | - req->last_req = true; |
---|
638 | 771 | req->finish = true; |
---|
639 | 772 | |
---|
640 | | - /* If we have an overall 0 length request */ |
---|
641 | | - if (!req->len[0] && !req->len[1] && !areq->nbytes) { |
---|
| 773 | + if (unlikely(!req->len && !areq->nbytes)) { |
---|
| 774 | + /* |
---|
| 775 | + * If we have an overall 0 length *hash* request: |
---|
| 776 | + * The HW cannot do 0 length hash, so we provide the correct |
---|
| 777 | + * result directly here. |
---|
| 778 | + */ |
---|
642 | 779 | if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5) |
---|
643 | 780 | memcpy(areq->result, md5_zero_message_hash, |
---|
644 | 781 | MD5_DIGEST_SIZE); |
---|
.. | .. |
---|
657 | 794 | else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA512) |
---|
658 | 795 | memcpy(areq->result, sha512_zero_message_hash, |
---|
659 | 796 | SHA512_DIGEST_SIZE); |
---|
| 797 | + else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SM3) { |
---|
| 798 | + memcpy(areq->result, |
---|
| 799 | + EIP197_SM3_ZEROM_HASH, SM3_DIGEST_SIZE); |
---|
| 800 | + } |
---|
660 | 801 | |
---|
661 | 802 | return 0; |
---|
| 803 | + } else if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM && |
---|
| 804 | + ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5 && |
---|
| 805 | + req->len == sizeof(u32) && !areq->nbytes)) { |
---|
| 806 | + /* Zero length CRC32 */ |
---|
| 807 | + memcpy(areq->result, &ctx->base.ipad, sizeof(u32)); |
---|
| 808 | + return 0; |
---|
| 809 | + } else if (unlikely(ctx->cbcmac && req->len == AES_BLOCK_SIZE && |
---|
| 810 | + !areq->nbytes)) { |
---|
| 811 | + /* Zero length CBC MAC */ |
---|
| 812 | + memset(areq->result, 0, AES_BLOCK_SIZE); |
---|
| 813 | + return 0; |
---|
| 814 | + } else if (unlikely(req->xcbcmac && req->len == AES_BLOCK_SIZE && |
---|
| 815 | + !areq->nbytes)) { |
---|
| 816 | + /* Zero length (X)CBC/CMAC */ |
---|
| 817 | + int i; |
---|
| 818 | + |
---|
| 819 | + for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) { |
---|
| 820 | + u32 *result = (void *)areq->result; |
---|
| 821 | + |
---|
| 822 | + /* K3 */ |
---|
| 823 | + result[i] = swab32(ctx->base.ipad.word[i + 4]); |
---|
| 824 | + } |
---|
| 825 | + areq->result[0] ^= 0x80; // 10- padding |
---|
| 826 | + crypto_cipher_encrypt_one(ctx->kaes, areq->result, areq->result); |
---|
| 827 | + return 0; |
---|
| 828 | + } else if (unlikely(req->hmac && |
---|
| 829 | + (req->len == req->block_sz) && |
---|
| 830 | + !areq->nbytes)) { |
---|
| 831 | + /* |
---|
| 832 | + * If we have an overall 0 length *HMAC* request: |
---|
| 833 | + * For HMAC, we need to finalize the inner digest |
---|
| 834 | + * and then perform the outer hash. |
---|
| 835 | + */ |
---|
| 836 | + |
---|
| 837 | + /* generate pad block in the cache */ |
---|
| 838 | + /* start with a hash block of all zeroes */ |
---|
| 839 | + memset(req->cache, 0, req->block_sz); |
---|
| 840 | + /* set the first byte to 0x80 to 'append a 1 bit' */ |
---|
| 841 | + req->cache[0] = 0x80; |
---|
| 842 | + /* add the length in bits in the last 2 bytes */ |
---|
| 843 | + if (req->len_is_le) { |
---|
| 844 | + /* Little endian length word (e.g. MD5) */ |
---|
| 845 | + req->cache[req->block_sz-8] = (req->block_sz << 3) & |
---|
| 846 | + 255; |
---|
| 847 | + req->cache[req->block_sz-7] = (req->block_sz >> 5); |
---|
| 848 | + } else { |
---|
| 849 | + /* Big endian length word (e.g. any SHA) */ |
---|
| 850 | + req->cache[req->block_sz-2] = (req->block_sz >> 5); |
---|
| 851 | + req->cache[req->block_sz-1] = (req->block_sz << 3) & |
---|
| 852 | + 255; |
---|
| 853 | + } |
---|
| 854 | + |
---|
| 855 | + req->len += req->block_sz; /* plus 1 hash block */ |
---|
| 856 | + |
---|
| 857 | + /* Set special zero-length HMAC flag */ |
---|
| 858 | + req->hmac_zlen = true; |
---|
| 859 | + |
---|
| 860 | + /* Finalize HMAC */ |
---|
| 861 | + req->digest = CONTEXT_CONTROL_DIGEST_HMAC; |
---|
| 862 | + } else if (req->hmac) { |
---|
| 863 | + /* Finalize HMAC */ |
---|
| 864 | + req->digest = CONTEXT_CONTROL_DIGEST_HMAC; |
---|
662 | 865 | } |
---|
663 | 866 | |
---|
664 | 867 | return safexcel_ahash_enqueue(areq); |
---|
.. | .. |
---|
668 | 871 | { |
---|
669 | 872 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); |
---|
670 | 873 | |
---|
671 | | - req->last_req = true; |
---|
672 | 874 | req->finish = true; |
---|
673 | 875 | |
---|
674 | 876 | safexcel_ahash_update(areq); |
---|
.. | .. |
---|
677 | 879 | |
---|
678 | 880 | static int safexcel_ahash_export(struct ahash_request *areq, void *out) |
---|
679 | 881 | { |
---|
680 | | - struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq); |
---|
681 | 882 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); |
---|
682 | 883 | struct safexcel_ahash_export_state *export = out; |
---|
683 | 884 | |
---|
684 | | - export->len[0] = req->len[0]; |
---|
685 | | - export->len[1] = req->len[1]; |
---|
686 | | - export->processed[0] = req->processed[0]; |
---|
687 | | - export->processed[1] = req->processed[1]; |
---|
| 885 | + export->len = req->len; |
---|
| 886 | + export->processed = req->processed; |
---|
688 | 887 | |
---|
689 | 888 | export->digest = req->digest; |
---|
690 | 889 | |
---|
691 | 890 | memcpy(export->state, req->state, req->state_sz); |
---|
692 | | - memcpy(export->cache, req->cache, crypto_ahash_blocksize(ahash)); |
---|
| 891 | + memcpy(export->cache, req->cache, HASH_CACHE_SIZE); |
---|
693 | 892 | |
---|
694 | 893 | return 0; |
---|
695 | 894 | } |
---|
696 | 895 | |
---|
697 | 896 | static int safexcel_ahash_import(struct ahash_request *areq, const void *in) |
---|
698 | 897 | { |
---|
699 | | - struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq); |
---|
700 | 898 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); |
---|
701 | 899 | const struct safexcel_ahash_export_state *export = in; |
---|
702 | 900 | int ret; |
---|
.. | .. |
---|
705 | 903 | if (ret) |
---|
706 | 904 | return ret; |
---|
707 | 905 | |
---|
708 | | - req->len[0] = export->len[0]; |
---|
709 | | - req->len[1] = export->len[1]; |
---|
710 | | - req->processed[0] = export->processed[0]; |
---|
711 | | - req->processed[1] = export->processed[1]; |
---|
| 906 | + req->len = export->len; |
---|
| 907 | + req->processed = export->processed; |
---|
712 | 908 | |
---|
713 | 909 | req->digest = export->digest; |
---|
714 | 910 | |
---|
715 | | - memcpy(req->cache, export->cache, crypto_ahash_blocksize(ahash)); |
---|
| 911 | + memcpy(req->cache, export->cache, HASH_CACHE_SIZE); |
---|
716 | 912 | memcpy(req->state, export->state, req->state_sz); |
---|
717 | 913 | |
---|
718 | 914 | return 0; |
---|
.. | .. |
---|
725 | 921 | container_of(__crypto_ahash_alg(tfm->__crt_alg), |
---|
726 | 922 | struct safexcel_alg_template, alg.ahash); |
---|
727 | 923 | |
---|
728 | | - ctx->priv = tmpl->priv; |
---|
| 924 | + ctx->base.priv = tmpl->priv; |
---|
729 | 925 | ctx->base.send = safexcel_ahash_send; |
---|
730 | 926 | ctx->base.handle_result = safexcel_handle_result; |
---|
| 927 | + ctx->fb_do_setkey = false; |
---|
731 | 928 | |
---|
732 | 929 | crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm), |
---|
733 | 930 | sizeof(struct safexcel_ahash_req)); |
---|
.. | .. |
---|
741 | 938 | |
---|
742 | 939 | memset(req, 0, sizeof(*req)); |
---|
743 | 940 | |
---|
744 | | - req->state[0] = SHA1_H0; |
---|
745 | | - req->state[1] = SHA1_H1; |
---|
746 | | - req->state[2] = SHA1_H2; |
---|
747 | | - req->state[3] = SHA1_H3; |
---|
748 | | - req->state[4] = SHA1_H4; |
---|
749 | | - |
---|
750 | 941 | ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1; |
---|
751 | 942 | req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; |
---|
752 | 943 | req->state_sz = SHA1_DIGEST_SIZE; |
---|
| 944 | + req->digest_sz = SHA1_DIGEST_SIZE; |
---|
| 945 | + req->block_sz = SHA1_BLOCK_SIZE; |
---|
753 | 946 | |
---|
754 | 947 | return 0; |
---|
755 | 948 | } |
---|
.. | .. |
---|
767 | 960 | static void safexcel_ahash_cra_exit(struct crypto_tfm *tfm) |
---|
768 | 961 | { |
---|
769 | 962 | struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm); |
---|
770 | | - struct safexcel_crypto_priv *priv = ctx->priv; |
---|
| 963 | + struct safexcel_crypto_priv *priv = ctx->base.priv; |
---|
771 | 964 | int ret; |
---|
772 | 965 | |
---|
773 | 966 | /* context not allocated, skip invalidation */ |
---|
.. | .. |
---|
786 | 979 | |
---|
787 | 980 | struct safexcel_alg_template safexcel_alg_sha1 = { |
---|
788 | 981 | .type = SAFEXCEL_ALG_TYPE_AHASH, |
---|
789 | | - .engines = EIP97IES | EIP197B | EIP197D, |
---|
| 982 | + .algo_mask = SAFEXCEL_ALG_SHA1, |
---|
790 | 983 | .alg.ahash = { |
---|
791 | 984 | .init = safexcel_sha1_init, |
---|
792 | 985 | .update = safexcel_ahash_update, |
---|
.. | .. |
---|
801 | 994 | .base = { |
---|
802 | 995 | .cra_name = "sha1", |
---|
803 | 996 | .cra_driver_name = "safexcel-sha1", |
---|
804 | | - .cra_priority = 300, |
---|
| 997 | + .cra_priority = SAFEXCEL_CRA_PRIORITY, |
---|
805 | 998 | .cra_flags = CRYPTO_ALG_ASYNC | |
---|
| 999 | + CRYPTO_ALG_ALLOCATES_MEMORY | |
---|
806 | 1000 | CRYPTO_ALG_KERN_DRIVER_ONLY, |
---|
807 | 1001 | .cra_blocksize = SHA1_BLOCK_SIZE, |
---|
808 | 1002 | .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), |
---|
.. | .. |
---|
816 | 1010 | |
---|
817 | 1011 | static int safexcel_hmac_sha1_init(struct ahash_request *areq) |
---|
818 | 1012 | { |
---|
| 1013 | + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); |
---|
819 | 1014 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); |
---|
820 | 1015 | |
---|
821 | | - safexcel_sha1_init(areq); |
---|
822 | | - req->digest = CONTEXT_CONTROL_DIGEST_HMAC; |
---|
| 1016 | + memset(req, 0, sizeof(*req)); |
---|
| 1017 | + |
---|
| 1018 | + /* Start from ipad precompute */ |
---|
| 1019 | + memcpy(req->state, &ctx->base.ipad, SHA1_DIGEST_SIZE); |
---|
| 1020 | + /* Already processed the key^ipad part now! */ |
---|
| 1021 | + req->len = SHA1_BLOCK_SIZE; |
---|
| 1022 | + req->processed = SHA1_BLOCK_SIZE; |
---|
| 1023 | + |
---|
| 1024 | + ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1; |
---|
| 1025 | + req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; |
---|
| 1026 | + req->state_sz = SHA1_DIGEST_SIZE; |
---|
| 1027 | + req->digest_sz = SHA1_DIGEST_SIZE; |
---|
| 1028 | + req->block_sz = SHA1_BLOCK_SIZE; |
---|
| 1029 | + req->hmac = true; |
---|
| 1030 | + |
---|
823 | 1031 | return 0; |
---|
824 | 1032 | } |
---|
825 | 1033 | |
---|
.. | .. |
---|
878 | 1086 | } |
---|
879 | 1087 | |
---|
880 | 1088 | /* Avoid leaking */ |
---|
881 | | - memzero_explicit(keydup, keylen); |
---|
882 | | - kfree(keydup); |
---|
| 1089 | + kfree_sensitive(keydup); |
---|
883 | 1090 | |
---|
884 | 1091 | if (ret) |
---|
885 | 1092 | return ret; |
---|
.. | .. |
---|
931 | 1138 | return crypto_ahash_export(areq, state); |
---|
932 | 1139 | } |
---|
933 | 1140 | |
---|
934 | | -int safexcel_hmac_setkey(const char *alg, const u8 *key, unsigned int keylen, |
---|
935 | | - void *istate, void *ostate) |
---|
| 1141 | +static int __safexcel_hmac_setkey(const char *alg, const u8 *key, |
---|
| 1142 | + unsigned int keylen, |
---|
| 1143 | + void *istate, void *ostate) |
---|
936 | 1144 | { |
---|
937 | 1145 | struct ahash_request *areq; |
---|
938 | 1146 | struct crypto_ahash *tfm; |
---|
.. | .. |
---|
981 | 1189 | return ret; |
---|
982 | 1190 | } |
---|
983 | 1191 | |
---|
| 1192 | +int safexcel_hmac_setkey(struct safexcel_context *base, const u8 *key, |
---|
| 1193 | + unsigned int keylen, const char *alg, |
---|
| 1194 | + unsigned int state_sz) |
---|
| 1195 | +{ |
---|
| 1196 | + struct safexcel_crypto_priv *priv = base->priv; |
---|
| 1197 | + struct safexcel_ahash_export_state istate, ostate; |
---|
| 1198 | + int ret; |
---|
| 1199 | + |
---|
| 1200 | + ret = __safexcel_hmac_setkey(alg, key, keylen, &istate, &ostate); |
---|
| 1201 | + if (ret) |
---|
| 1202 | + return ret; |
---|
| 1203 | + |
---|
| 1204 | + if (priv->flags & EIP197_TRC_CACHE && base->ctxr && |
---|
| 1205 | + (memcmp(&base->ipad, istate.state, state_sz) || |
---|
| 1206 | + memcmp(&base->opad, ostate.state, state_sz))) |
---|
| 1207 | + base->needs_inv = true; |
---|
| 1208 | + |
---|
| 1209 | + memcpy(&base->ipad, &istate.state, state_sz); |
---|
| 1210 | + memcpy(&base->opad, &ostate.state, state_sz); |
---|
| 1211 | + |
---|
| 1212 | + return 0; |
---|
| 1213 | +} |
---|
| 1214 | + |
---|
984 | 1215 | static int safexcel_hmac_alg_setkey(struct crypto_ahash *tfm, const u8 *key, |
---|
985 | 1216 | unsigned int keylen, const char *alg, |
---|
986 | 1217 | unsigned int state_sz) |
---|
987 | 1218 | { |
---|
988 | | - struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm)); |
---|
989 | | - struct safexcel_crypto_priv *priv = ctx->priv; |
---|
990 | | - struct safexcel_ahash_export_state istate, ostate; |
---|
991 | | - int ret, i; |
---|
| 1219 | + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); |
---|
992 | 1220 | |
---|
993 | | - ret = safexcel_hmac_setkey(alg, key, keylen, &istate, &ostate); |
---|
994 | | - if (ret) |
---|
995 | | - return ret; |
---|
996 | | - |
---|
997 | | - if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr) { |
---|
998 | | - for (i = 0; i < state_sz / sizeof(u32); i++) { |
---|
999 | | - if (ctx->ipad[i] != le32_to_cpu(istate.state[i]) || |
---|
1000 | | - ctx->opad[i] != le32_to_cpu(ostate.state[i])) { |
---|
1001 | | - ctx->base.needs_inv = true; |
---|
1002 | | - break; |
---|
1003 | | - } |
---|
1004 | | - } |
---|
1005 | | - } |
---|
1006 | | - |
---|
1007 | | - memcpy(ctx->ipad, &istate.state, state_sz); |
---|
1008 | | - memcpy(ctx->opad, &ostate.state, state_sz); |
---|
1009 | | - |
---|
1010 | | - return 0; |
---|
| 1221 | + return safexcel_hmac_setkey(&ctx->base, key, keylen, alg, state_sz); |
---|
1011 | 1222 | } |
---|
1012 | 1223 | |
---|
1013 | 1224 | static int safexcel_hmac_sha1_setkey(struct crypto_ahash *tfm, const u8 *key, |
---|
.. | .. |
---|
1019 | 1230 | |
---|
1020 | 1231 | struct safexcel_alg_template safexcel_alg_hmac_sha1 = { |
---|
1021 | 1232 | .type = SAFEXCEL_ALG_TYPE_AHASH, |
---|
1022 | | - .engines = EIP97IES | EIP197B | EIP197D, |
---|
| 1233 | + .algo_mask = SAFEXCEL_ALG_SHA1, |
---|
1023 | 1234 | .alg.ahash = { |
---|
1024 | 1235 | .init = safexcel_hmac_sha1_init, |
---|
1025 | 1236 | .update = safexcel_ahash_update, |
---|
.. | .. |
---|
1035 | 1246 | .base = { |
---|
1036 | 1247 | .cra_name = "hmac(sha1)", |
---|
1037 | 1248 | .cra_driver_name = "safexcel-hmac-sha1", |
---|
1038 | | - .cra_priority = 300, |
---|
| 1249 | + .cra_priority = SAFEXCEL_CRA_PRIORITY, |
---|
1039 | 1250 | .cra_flags = CRYPTO_ALG_ASYNC | |
---|
| 1251 | + CRYPTO_ALG_ALLOCATES_MEMORY | |
---|
1040 | 1252 | CRYPTO_ALG_KERN_DRIVER_ONLY, |
---|
1041 | 1253 | .cra_blocksize = SHA1_BLOCK_SIZE, |
---|
1042 | 1254 | .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), |
---|
.. | .. |
---|
1055 | 1267 | |
---|
1056 | 1268 | memset(req, 0, sizeof(*req)); |
---|
1057 | 1269 | |
---|
1058 | | - req->state[0] = SHA256_H0; |
---|
1059 | | - req->state[1] = SHA256_H1; |
---|
1060 | | - req->state[2] = SHA256_H2; |
---|
1061 | | - req->state[3] = SHA256_H3; |
---|
1062 | | - req->state[4] = SHA256_H4; |
---|
1063 | | - req->state[5] = SHA256_H5; |
---|
1064 | | - req->state[6] = SHA256_H6; |
---|
1065 | | - req->state[7] = SHA256_H7; |
---|
1066 | | - |
---|
1067 | 1270 | ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256; |
---|
1068 | 1271 | req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; |
---|
1069 | 1272 | req->state_sz = SHA256_DIGEST_SIZE; |
---|
| 1273 | + req->digest_sz = SHA256_DIGEST_SIZE; |
---|
| 1274 | + req->block_sz = SHA256_BLOCK_SIZE; |
---|
1070 | 1275 | |
---|
1071 | 1276 | return 0; |
---|
1072 | 1277 | } |
---|
.. | .. |
---|
1083 | 1288 | |
---|
1084 | 1289 | struct safexcel_alg_template safexcel_alg_sha256 = { |
---|
1085 | 1290 | .type = SAFEXCEL_ALG_TYPE_AHASH, |
---|
1086 | | - .engines = EIP97IES | EIP197B | EIP197D, |
---|
| 1291 | + .algo_mask = SAFEXCEL_ALG_SHA2_256, |
---|
1087 | 1292 | .alg.ahash = { |
---|
1088 | 1293 | .init = safexcel_sha256_init, |
---|
1089 | 1294 | .update = safexcel_ahash_update, |
---|
.. | .. |
---|
1098 | 1303 | .base = { |
---|
1099 | 1304 | .cra_name = "sha256", |
---|
1100 | 1305 | .cra_driver_name = "safexcel-sha256", |
---|
1101 | | - .cra_priority = 300, |
---|
| 1306 | + .cra_priority = SAFEXCEL_CRA_PRIORITY, |
---|
1102 | 1307 | .cra_flags = CRYPTO_ALG_ASYNC | |
---|
| 1308 | + CRYPTO_ALG_ALLOCATES_MEMORY | |
---|
1103 | 1309 | CRYPTO_ALG_KERN_DRIVER_ONLY, |
---|
1104 | 1310 | .cra_blocksize = SHA256_BLOCK_SIZE, |
---|
1105 | 1311 | .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), |
---|
.. | .. |
---|
1118 | 1324 | |
---|
1119 | 1325 | memset(req, 0, sizeof(*req)); |
---|
1120 | 1326 | |
---|
1121 | | - req->state[0] = SHA224_H0; |
---|
1122 | | - req->state[1] = SHA224_H1; |
---|
1123 | | - req->state[2] = SHA224_H2; |
---|
1124 | | - req->state[3] = SHA224_H3; |
---|
1125 | | - req->state[4] = SHA224_H4; |
---|
1126 | | - req->state[5] = SHA224_H5; |
---|
1127 | | - req->state[6] = SHA224_H6; |
---|
1128 | | - req->state[7] = SHA224_H7; |
---|
1129 | | - |
---|
1130 | 1327 | ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224; |
---|
1131 | 1328 | req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; |
---|
1132 | 1329 | req->state_sz = SHA256_DIGEST_SIZE; |
---|
| 1330 | + req->digest_sz = SHA256_DIGEST_SIZE; |
---|
| 1331 | + req->block_sz = SHA256_BLOCK_SIZE; |
---|
1133 | 1332 | |
---|
1134 | 1333 | return 0; |
---|
1135 | 1334 | } |
---|
.. | .. |
---|
1146 | 1345 | |
---|
1147 | 1346 | struct safexcel_alg_template safexcel_alg_sha224 = { |
---|
1148 | 1347 | .type = SAFEXCEL_ALG_TYPE_AHASH, |
---|
1149 | | - .engines = EIP97IES | EIP197B | EIP197D, |
---|
| 1348 | + .algo_mask = SAFEXCEL_ALG_SHA2_256, |
---|
1150 | 1349 | .alg.ahash = { |
---|
1151 | 1350 | .init = safexcel_sha224_init, |
---|
1152 | 1351 | .update = safexcel_ahash_update, |
---|
.. | .. |
---|
1161 | 1360 | .base = { |
---|
1162 | 1361 | .cra_name = "sha224", |
---|
1163 | 1362 | .cra_driver_name = "safexcel-sha224", |
---|
1164 | | - .cra_priority = 300, |
---|
| 1363 | + .cra_priority = SAFEXCEL_CRA_PRIORITY, |
---|
1165 | 1364 | .cra_flags = CRYPTO_ALG_ASYNC | |
---|
| 1365 | + CRYPTO_ALG_ALLOCATES_MEMORY | |
---|
1166 | 1366 | CRYPTO_ALG_KERN_DRIVER_ONLY, |
---|
1167 | 1367 | .cra_blocksize = SHA224_BLOCK_SIZE, |
---|
1168 | 1368 | .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), |
---|
.. | .. |
---|
1183 | 1383 | |
---|
1184 | 1384 | static int safexcel_hmac_sha224_init(struct ahash_request *areq) |
---|
1185 | 1385 | { |
---|
| 1386 | + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); |
---|
1186 | 1387 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); |
---|
1187 | 1388 | |
---|
1188 | | - safexcel_sha224_init(areq); |
---|
1189 | | - req->digest = CONTEXT_CONTROL_DIGEST_HMAC; |
---|
| 1389 | + memset(req, 0, sizeof(*req)); |
---|
| 1390 | + |
---|
| 1391 | + /* Start from ipad precompute */ |
---|
| 1392 | + memcpy(req->state, &ctx->base.ipad, SHA256_DIGEST_SIZE); |
---|
| 1393 | + /* Already processed the key^ipad part now! */ |
---|
| 1394 | + req->len = SHA256_BLOCK_SIZE; |
---|
| 1395 | + req->processed = SHA256_BLOCK_SIZE; |
---|
| 1396 | + |
---|
| 1397 | + ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224; |
---|
| 1398 | + req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; |
---|
| 1399 | + req->state_sz = SHA256_DIGEST_SIZE; |
---|
| 1400 | + req->digest_sz = SHA256_DIGEST_SIZE; |
---|
| 1401 | + req->block_sz = SHA256_BLOCK_SIZE; |
---|
| 1402 | + req->hmac = true; |
---|
| 1403 | + |
---|
1190 | 1404 | return 0; |
---|
1191 | 1405 | } |
---|
1192 | 1406 | |
---|
.. | .. |
---|
1202 | 1416 | |
---|
1203 | 1417 | struct safexcel_alg_template safexcel_alg_hmac_sha224 = { |
---|
1204 | 1418 | .type = SAFEXCEL_ALG_TYPE_AHASH, |
---|
1205 | | - .engines = EIP97IES | EIP197B | EIP197D, |
---|
| 1419 | + .algo_mask = SAFEXCEL_ALG_SHA2_256, |
---|
1206 | 1420 | .alg.ahash = { |
---|
1207 | 1421 | .init = safexcel_hmac_sha224_init, |
---|
1208 | 1422 | .update = safexcel_ahash_update, |
---|
.. | .. |
---|
1218 | 1432 | .base = { |
---|
1219 | 1433 | .cra_name = "hmac(sha224)", |
---|
1220 | 1434 | .cra_driver_name = "safexcel-hmac-sha224", |
---|
1221 | | - .cra_priority = 300, |
---|
| 1435 | + .cra_priority = SAFEXCEL_CRA_PRIORITY, |
---|
1222 | 1436 | .cra_flags = CRYPTO_ALG_ASYNC | |
---|
| 1437 | + CRYPTO_ALG_ALLOCATES_MEMORY | |
---|
1223 | 1438 | CRYPTO_ALG_KERN_DRIVER_ONLY, |
---|
1224 | 1439 | .cra_blocksize = SHA224_BLOCK_SIZE, |
---|
1225 | 1440 | .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), |
---|
.. | .. |
---|
1240 | 1455 | |
---|
1241 | 1456 | static int safexcel_hmac_sha256_init(struct ahash_request *areq) |
---|
1242 | 1457 | { |
---|
| 1458 | + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); |
---|
1243 | 1459 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); |
---|
1244 | 1460 | |
---|
1245 | | - safexcel_sha256_init(areq); |
---|
1246 | | - req->digest = CONTEXT_CONTROL_DIGEST_HMAC; |
---|
| 1461 | + memset(req, 0, sizeof(*req)); |
---|
| 1462 | + |
---|
| 1463 | + /* Start from ipad precompute */ |
---|
| 1464 | + memcpy(req->state, &ctx->base.ipad, SHA256_DIGEST_SIZE); |
---|
| 1465 | + /* Already processed the key^ipad part now! */ |
---|
| 1466 | + req->len = SHA256_BLOCK_SIZE; |
---|
| 1467 | + req->processed = SHA256_BLOCK_SIZE; |
---|
| 1468 | + |
---|
| 1469 | + ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256; |
---|
| 1470 | + req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; |
---|
| 1471 | + req->state_sz = SHA256_DIGEST_SIZE; |
---|
| 1472 | + req->digest_sz = SHA256_DIGEST_SIZE; |
---|
| 1473 | + req->block_sz = SHA256_BLOCK_SIZE; |
---|
| 1474 | + req->hmac = true; |
---|
| 1475 | + |
---|
1247 | 1476 | return 0; |
---|
1248 | 1477 | } |
---|
1249 | 1478 | |
---|
.. | .. |
---|
1259 | 1488 | |
---|
1260 | 1489 | struct safexcel_alg_template safexcel_alg_hmac_sha256 = { |
---|
1261 | 1490 | .type = SAFEXCEL_ALG_TYPE_AHASH, |
---|
1262 | | - .engines = EIP97IES | EIP197B | EIP197D, |
---|
| 1491 | + .algo_mask = SAFEXCEL_ALG_SHA2_256, |
---|
1263 | 1492 | .alg.ahash = { |
---|
1264 | 1493 | .init = safexcel_hmac_sha256_init, |
---|
1265 | 1494 | .update = safexcel_ahash_update, |
---|
.. | .. |
---|
1275 | 1504 | .base = { |
---|
1276 | 1505 | .cra_name = "hmac(sha256)", |
---|
1277 | 1506 | .cra_driver_name = "safexcel-hmac-sha256", |
---|
1278 | | - .cra_priority = 300, |
---|
| 1507 | + .cra_priority = SAFEXCEL_CRA_PRIORITY, |
---|
1279 | 1508 | .cra_flags = CRYPTO_ALG_ASYNC | |
---|
| 1509 | + CRYPTO_ALG_ALLOCATES_MEMORY | |
---|
1280 | 1510 | CRYPTO_ALG_KERN_DRIVER_ONLY, |
---|
1281 | 1511 | .cra_blocksize = SHA256_BLOCK_SIZE, |
---|
1282 | 1512 | .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), |
---|
.. | .. |
---|
1295 | 1525 | |
---|
1296 | 1526 | memset(req, 0, sizeof(*req)); |
---|
1297 | 1527 | |
---|
1298 | | - req->state[0] = lower_32_bits(SHA512_H0); |
---|
1299 | | - req->state[1] = upper_32_bits(SHA512_H0); |
---|
1300 | | - req->state[2] = lower_32_bits(SHA512_H1); |
---|
1301 | | - req->state[3] = upper_32_bits(SHA512_H1); |
---|
1302 | | - req->state[4] = lower_32_bits(SHA512_H2); |
---|
1303 | | - req->state[5] = upper_32_bits(SHA512_H2); |
---|
1304 | | - req->state[6] = lower_32_bits(SHA512_H3); |
---|
1305 | | - req->state[7] = upper_32_bits(SHA512_H3); |
---|
1306 | | - req->state[8] = lower_32_bits(SHA512_H4); |
---|
1307 | | - req->state[9] = upper_32_bits(SHA512_H4); |
---|
1308 | | - req->state[10] = lower_32_bits(SHA512_H5); |
---|
1309 | | - req->state[11] = upper_32_bits(SHA512_H5); |
---|
1310 | | - req->state[12] = lower_32_bits(SHA512_H6); |
---|
1311 | | - req->state[13] = upper_32_bits(SHA512_H6); |
---|
1312 | | - req->state[14] = lower_32_bits(SHA512_H7); |
---|
1313 | | - req->state[15] = upper_32_bits(SHA512_H7); |
---|
1314 | | - |
---|
1315 | 1528 | ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512; |
---|
1316 | 1529 | req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; |
---|
1317 | 1530 | req->state_sz = SHA512_DIGEST_SIZE; |
---|
| 1531 | + req->digest_sz = SHA512_DIGEST_SIZE; |
---|
| 1532 | + req->block_sz = SHA512_BLOCK_SIZE; |
---|
1318 | 1533 | |
---|
1319 | 1534 | return 0; |
---|
1320 | 1535 | } |
---|
.. | .. |
---|
1331 | 1546 | |
---|
1332 | 1547 | struct safexcel_alg_template safexcel_alg_sha512 = { |
---|
1333 | 1548 | .type = SAFEXCEL_ALG_TYPE_AHASH, |
---|
1334 | | - .engines = EIP97IES | EIP197B | EIP197D, |
---|
| 1549 | + .algo_mask = SAFEXCEL_ALG_SHA2_512, |
---|
1335 | 1550 | .alg.ahash = { |
---|
1336 | 1551 | .init = safexcel_sha512_init, |
---|
1337 | 1552 | .update = safexcel_ahash_update, |
---|
.. | .. |
---|
1346 | 1561 | .base = { |
---|
1347 | 1562 | .cra_name = "sha512", |
---|
1348 | 1563 | .cra_driver_name = "safexcel-sha512", |
---|
1349 | | - .cra_priority = 300, |
---|
| 1564 | + .cra_priority = SAFEXCEL_CRA_PRIORITY, |
---|
1350 | 1565 | .cra_flags = CRYPTO_ALG_ASYNC | |
---|
| 1566 | + CRYPTO_ALG_ALLOCATES_MEMORY | |
---|
1351 | 1567 | CRYPTO_ALG_KERN_DRIVER_ONLY, |
---|
1352 | 1568 | .cra_blocksize = SHA512_BLOCK_SIZE, |
---|
1353 | 1569 | .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), |
---|
.. | .. |
---|
1366 | 1582 | |
---|
1367 | 1583 | memset(req, 0, sizeof(*req)); |
---|
1368 | 1584 | |
---|
1369 | | - req->state[0] = lower_32_bits(SHA384_H0); |
---|
1370 | | - req->state[1] = upper_32_bits(SHA384_H0); |
---|
1371 | | - req->state[2] = lower_32_bits(SHA384_H1); |
---|
1372 | | - req->state[3] = upper_32_bits(SHA384_H1); |
---|
1373 | | - req->state[4] = lower_32_bits(SHA384_H2); |
---|
1374 | | - req->state[5] = upper_32_bits(SHA384_H2); |
---|
1375 | | - req->state[6] = lower_32_bits(SHA384_H3); |
---|
1376 | | - req->state[7] = upper_32_bits(SHA384_H3); |
---|
1377 | | - req->state[8] = lower_32_bits(SHA384_H4); |
---|
1378 | | - req->state[9] = upper_32_bits(SHA384_H4); |
---|
1379 | | - req->state[10] = lower_32_bits(SHA384_H5); |
---|
1380 | | - req->state[11] = upper_32_bits(SHA384_H5); |
---|
1381 | | - req->state[12] = lower_32_bits(SHA384_H6); |
---|
1382 | | - req->state[13] = upper_32_bits(SHA384_H6); |
---|
1383 | | - req->state[14] = lower_32_bits(SHA384_H7); |
---|
1384 | | - req->state[15] = upper_32_bits(SHA384_H7); |
---|
1385 | | - |
---|
1386 | 1585 | ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384; |
---|
1387 | 1586 | req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; |
---|
1388 | 1587 | req->state_sz = SHA512_DIGEST_SIZE; |
---|
| 1588 | + req->digest_sz = SHA512_DIGEST_SIZE; |
---|
| 1589 | + req->block_sz = SHA512_BLOCK_SIZE; |
---|
1389 | 1590 | |
---|
1390 | 1591 | return 0; |
---|
1391 | 1592 | } |
---|
.. | .. |
---|
1402 | 1603 | |
---|
1403 | 1604 | struct safexcel_alg_template safexcel_alg_sha384 = { |
---|
1404 | 1605 | .type = SAFEXCEL_ALG_TYPE_AHASH, |
---|
1405 | | - .engines = EIP97IES | EIP197B | EIP197D, |
---|
| 1606 | + .algo_mask = SAFEXCEL_ALG_SHA2_512, |
---|
1406 | 1607 | .alg.ahash = { |
---|
1407 | 1608 | .init = safexcel_sha384_init, |
---|
1408 | 1609 | .update = safexcel_ahash_update, |
---|
.. | .. |
---|
1417 | 1618 | .base = { |
---|
1418 | 1619 | .cra_name = "sha384", |
---|
1419 | 1620 | .cra_driver_name = "safexcel-sha384", |
---|
1420 | | - .cra_priority = 300, |
---|
| 1621 | + .cra_priority = SAFEXCEL_CRA_PRIORITY, |
---|
1421 | 1622 | .cra_flags = CRYPTO_ALG_ASYNC | |
---|
| 1623 | + CRYPTO_ALG_ALLOCATES_MEMORY | |
---|
1422 | 1624 | CRYPTO_ALG_KERN_DRIVER_ONLY, |
---|
1423 | 1625 | .cra_blocksize = SHA384_BLOCK_SIZE, |
---|
1424 | 1626 | .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), |
---|
.. | .. |
---|
1439 | 1641 | |
---|
1440 | 1642 | static int safexcel_hmac_sha512_init(struct ahash_request *areq) |
---|
1441 | 1643 | { |
---|
| 1644 | + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); |
---|
1442 | 1645 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); |
---|
1443 | 1646 | |
---|
1444 | | - safexcel_sha512_init(areq); |
---|
1445 | | - req->digest = CONTEXT_CONTROL_DIGEST_HMAC; |
---|
| 1647 | + memset(req, 0, sizeof(*req)); |
---|
| 1648 | + |
---|
| 1649 | + /* Start from ipad precompute */ |
---|
| 1650 | + memcpy(req->state, &ctx->base.ipad, SHA512_DIGEST_SIZE); |
---|
| 1651 | + /* Already processed the key^ipad part now! */ |
---|
| 1652 | + req->len = SHA512_BLOCK_SIZE; |
---|
| 1653 | + req->processed = SHA512_BLOCK_SIZE; |
---|
| 1654 | + |
---|
| 1655 | + ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512; |
---|
| 1656 | + req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; |
---|
| 1657 | + req->state_sz = SHA512_DIGEST_SIZE; |
---|
| 1658 | + req->digest_sz = SHA512_DIGEST_SIZE; |
---|
| 1659 | + req->block_sz = SHA512_BLOCK_SIZE; |
---|
| 1660 | + req->hmac = true; |
---|
| 1661 | + |
---|
1446 | 1662 | return 0; |
---|
1447 | 1663 | } |
---|
1448 | 1664 | |
---|
.. | .. |
---|
1458 | 1674 | |
---|
1459 | 1675 | struct safexcel_alg_template safexcel_alg_hmac_sha512 = { |
---|
1460 | 1676 | .type = SAFEXCEL_ALG_TYPE_AHASH, |
---|
1461 | | - .engines = EIP97IES | EIP197B | EIP197D, |
---|
| 1677 | + .algo_mask = SAFEXCEL_ALG_SHA2_512, |
---|
1462 | 1678 | .alg.ahash = { |
---|
1463 | 1679 | .init = safexcel_hmac_sha512_init, |
---|
1464 | 1680 | .update = safexcel_ahash_update, |
---|
.. | .. |
---|
1474 | 1690 | .base = { |
---|
1475 | 1691 | .cra_name = "hmac(sha512)", |
---|
1476 | 1692 | .cra_driver_name = "safexcel-hmac-sha512", |
---|
1477 | | - .cra_priority = 300, |
---|
| 1693 | + .cra_priority = SAFEXCEL_CRA_PRIORITY, |
---|
1478 | 1694 | .cra_flags = CRYPTO_ALG_ASYNC | |
---|
| 1695 | + CRYPTO_ALG_ALLOCATES_MEMORY | |
---|
1479 | 1696 | CRYPTO_ALG_KERN_DRIVER_ONLY, |
---|
1480 | 1697 | .cra_blocksize = SHA512_BLOCK_SIZE, |
---|
1481 | 1698 | .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), |
---|
.. | .. |
---|
1496 | 1713 | |
---|
1497 | 1714 | static int safexcel_hmac_sha384_init(struct ahash_request *areq) |
---|
1498 | 1715 | { |
---|
| 1716 | + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); |
---|
1499 | 1717 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); |
---|
1500 | 1718 | |
---|
1501 | | - safexcel_sha384_init(areq); |
---|
1502 | | - req->digest = CONTEXT_CONTROL_DIGEST_HMAC; |
---|
| 1719 | + memset(req, 0, sizeof(*req)); |
---|
| 1720 | + |
---|
| 1721 | + /* Start from ipad precompute */ |
---|
| 1722 | + memcpy(req->state, &ctx->base.ipad, SHA512_DIGEST_SIZE); |
---|
| 1723 | + /* Already processed the key^ipad part now! */ |
---|
| 1724 | + req->len = SHA512_BLOCK_SIZE; |
---|
| 1725 | + req->processed = SHA512_BLOCK_SIZE; |
---|
| 1726 | + |
---|
| 1727 | + ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384; |
---|
| 1728 | + req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; |
---|
| 1729 | + req->state_sz = SHA512_DIGEST_SIZE; |
---|
| 1730 | + req->digest_sz = SHA512_DIGEST_SIZE; |
---|
| 1731 | + req->block_sz = SHA512_BLOCK_SIZE; |
---|
| 1732 | + req->hmac = true; |
---|
| 1733 | + |
---|
1503 | 1734 | return 0; |
---|
1504 | 1735 | } |
---|
1505 | 1736 | |
---|
.. | .. |
---|
1515 | 1746 | |
---|
1516 | 1747 | struct safexcel_alg_template safexcel_alg_hmac_sha384 = { |
---|
1517 | 1748 | .type = SAFEXCEL_ALG_TYPE_AHASH, |
---|
1518 | | - .engines = EIP97IES | EIP197B | EIP197D, |
---|
| 1749 | + .algo_mask = SAFEXCEL_ALG_SHA2_512, |
---|
1519 | 1750 | .alg.ahash = { |
---|
1520 | 1751 | .init = safexcel_hmac_sha384_init, |
---|
1521 | 1752 | .update = safexcel_ahash_update, |
---|
.. | .. |
---|
1531 | 1762 | .base = { |
---|
1532 | 1763 | .cra_name = "hmac(sha384)", |
---|
1533 | 1764 | .cra_driver_name = "safexcel-hmac-sha384", |
---|
1534 | | - .cra_priority = 300, |
---|
| 1765 | + .cra_priority = SAFEXCEL_CRA_PRIORITY, |
---|
1535 | 1766 | .cra_flags = CRYPTO_ALG_ASYNC | |
---|
| 1767 | + CRYPTO_ALG_ALLOCATES_MEMORY | |
---|
1536 | 1768 | CRYPTO_ALG_KERN_DRIVER_ONLY, |
---|
1537 | 1769 | .cra_blocksize = SHA384_BLOCK_SIZE, |
---|
1538 | 1770 | .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), |
---|
.. | .. |
---|
1551 | 1783 | |
---|
1552 | 1784 | memset(req, 0, sizeof(*req)); |
---|
1553 | 1785 | |
---|
1554 | | - req->state[0] = MD5_H0; |
---|
1555 | | - req->state[1] = MD5_H1; |
---|
1556 | | - req->state[2] = MD5_H2; |
---|
1557 | | - req->state[3] = MD5_H3; |
---|
1558 | | - |
---|
1559 | 1786 | ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5; |
---|
1560 | 1787 | req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; |
---|
1561 | 1788 | req->state_sz = MD5_DIGEST_SIZE; |
---|
| 1789 | + req->digest_sz = MD5_DIGEST_SIZE; |
---|
| 1790 | + req->block_sz = MD5_HMAC_BLOCK_SIZE; |
---|
1562 | 1791 | |
---|
1563 | 1792 | return 0; |
---|
1564 | 1793 | } |
---|
.. | .. |
---|
1575 | 1804 | |
---|
1576 | 1805 | struct safexcel_alg_template safexcel_alg_md5 = { |
---|
1577 | 1806 | .type = SAFEXCEL_ALG_TYPE_AHASH, |
---|
1578 | | - .engines = EIP97IES | EIP197B | EIP197D, |
---|
| 1807 | + .algo_mask = SAFEXCEL_ALG_MD5, |
---|
1579 | 1808 | .alg.ahash = { |
---|
1580 | 1809 | .init = safexcel_md5_init, |
---|
1581 | 1810 | .update = safexcel_ahash_update, |
---|
.. | .. |
---|
1590 | 1819 | .base = { |
---|
1591 | 1820 | .cra_name = "md5", |
---|
1592 | 1821 | .cra_driver_name = "safexcel-md5", |
---|
1593 | | - .cra_priority = 300, |
---|
| 1822 | + .cra_priority = SAFEXCEL_CRA_PRIORITY, |
---|
1594 | 1823 | .cra_flags = CRYPTO_ALG_ASYNC | |
---|
| 1824 | + CRYPTO_ALG_ALLOCATES_MEMORY | |
---|
1595 | 1825 | CRYPTO_ALG_KERN_DRIVER_ONLY, |
---|
1596 | 1826 | .cra_blocksize = MD5_HMAC_BLOCK_SIZE, |
---|
1597 | 1827 | .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), |
---|
.. | .. |
---|
1605 | 1835 | |
---|
1606 | 1836 | static int safexcel_hmac_md5_init(struct ahash_request *areq) |
---|
1607 | 1837 | { |
---|
| 1838 | + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); |
---|
1608 | 1839 | struct safexcel_ahash_req *req = ahash_request_ctx(areq); |
---|
1609 | 1840 | |
---|
1610 | | - safexcel_md5_init(areq); |
---|
1611 | | - req->digest = CONTEXT_CONTROL_DIGEST_HMAC; |
---|
| 1841 | + memset(req, 0, sizeof(*req)); |
---|
| 1842 | + |
---|
| 1843 | + /* Start from ipad precompute */ |
---|
| 1844 | + memcpy(req->state, &ctx->base.ipad, MD5_DIGEST_SIZE); |
---|
| 1845 | + /* Already processed the key^ipad part now! */ |
---|
| 1846 | + req->len = MD5_HMAC_BLOCK_SIZE; |
---|
| 1847 | + req->processed = MD5_HMAC_BLOCK_SIZE; |
---|
| 1848 | + |
---|
| 1849 | + ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5; |
---|
| 1850 | + req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; |
---|
| 1851 | + req->state_sz = MD5_DIGEST_SIZE; |
---|
| 1852 | + req->digest_sz = MD5_DIGEST_SIZE; |
---|
| 1853 | + req->block_sz = MD5_HMAC_BLOCK_SIZE; |
---|
| 1854 | + req->len_is_le = true; /* MD5 is little endian! ... */ |
---|
| 1855 | + req->hmac = true; |
---|
| 1856 | + |
---|
1612 | 1857 | return 0; |
---|
1613 | 1858 | } |
---|
1614 | 1859 | |
---|
.. | .. |
---|
1631 | 1876 | |
---|
1632 | 1877 | struct safexcel_alg_template safexcel_alg_hmac_md5 = { |
---|
1633 | 1878 | .type = SAFEXCEL_ALG_TYPE_AHASH, |
---|
1634 | | - .engines = EIP97IES | EIP197B | EIP197D, |
---|
| 1879 | + .algo_mask = SAFEXCEL_ALG_MD5, |
---|
1635 | 1880 | .alg.ahash = { |
---|
1636 | 1881 | .init = safexcel_hmac_md5_init, |
---|
1637 | 1882 | .update = safexcel_ahash_update, |
---|
.. | .. |
---|
1647 | 1892 | .base = { |
---|
1648 | 1893 | .cra_name = "hmac(md5)", |
---|
1649 | 1894 | .cra_driver_name = "safexcel-hmac-md5", |
---|
1650 | | - .cra_priority = 300, |
---|
| 1895 | + .cra_priority = SAFEXCEL_CRA_PRIORITY, |
---|
1651 | 1896 | .cra_flags = CRYPTO_ALG_ASYNC | |
---|
| 1897 | + CRYPTO_ALG_ALLOCATES_MEMORY | |
---|
1652 | 1898 | CRYPTO_ALG_KERN_DRIVER_ONLY, |
---|
1653 | 1899 | .cra_blocksize = MD5_HMAC_BLOCK_SIZE, |
---|
1654 | 1900 | .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), |
---|
.. | .. |
---|
1659 | 1905 | }, |
---|
1660 | 1906 | }, |
---|
1661 | 1907 | }; |
---|
| 1908 | + |
---|
| 1909 | +static int safexcel_crc32_cra_init(struct crypto_tfm *tfm) |
---|
| 1910 | +{ |
---|
| 1911 | + struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm); |
---|
| 1912 | + int ret = safexcel_ahash_cra_init(tfm); |
---|
| 1913 | + |
---|
| 1914 | + /* Default 'key' is all zeroes */ |
---|
| 1915 | + memset(&ctx->base.ipad, 0, sizeof(u32)); |
---|
| 1916 | + return ret; |
---|
| 1917 | +} |
---|
| 1918 | + |
---|
| 1919 | +static int safexcel_crc32_init(struct ahash_request *areq) |
---|
| 1920 | +{ |
---|
| 1921 | + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); |
---|
| 1922 | + struct safexcel_ahash_req *req = ahash_request_ctx(areq); |
---|
| 1923 | + |
---|
| 1924 | + memset(req, 0, sizeof(*req)); |
---|
| 1925 | + |
---|
| 1926 | + /* Start from loaded key */ |
---|
| 1927 | + req->state[0] = cpu_to_le32(~ctx->base.ipad.word[0]); |
---|
| 1928 | + /* Set processed to non-zero to enable invalidation detection */ |
---|
| 1929 | + req->len = sizeof(u32); |
---|
| 1930 | + req->processed = sizeof(u32); |
---|
| 1931 | + |
---|
| 1932 | + ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_CRC32; |
---|
| 1933 | + req->digest = CONTEXT_CONTROL_DIGEST_XCM; |
---|
| 1934 | + req->state_sz = sizeof(u32); |
---|
| 1935 | + req->digest_sz = sizeof(u32); |
---|
| 1936 | + req->block_sz = sizeof(u32); |
---|
| 1937 | + |
---|
| 1938 | + return 0; |
---|
| 1939 | +} |
---|
| 1940 | + |
---|
| 1941 | +static int safexcel_crc32_setkey(struct crypto_ahash *tfm, const u8 *key, |
---|
| 1942 | + unsigned int keylen) |
---|
| 1943 | +{ |
---|
| 1944 | + struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm)); |
---|
| 1945 | + |
---|
| 1946 | + if (keylen != sizeof(u32)) |
---|
| 1947 | + return -EINVAL; |
---|
| 1948 | + |
---|
| 1949 | + memcpy(&ctx->base.ipad, key, sizeof(u32)); |
---|
| 1950 | + return 0; |
---|
| 1951 | +} |
---|
| 1952 | + |
---|
| 1953 | +static int safexcel_crc32_digest(struct ahash_request *areq) |
---|
| 1954 | +{ |
---|
| 1955 | + return safexcel_crc32_init(areq) ?: safexcel_ahash_finup(areq); |
---|
| 1956 | +} |
---|
| 1957 | + |
---|
| 1958 | +struct safexcel_alg_template safexcel_alg_crc32 = { |
---|
| 1959 | + .type = SAFEXCEL_ALG_TYPE_AHASH, |
---|
| 1960 | + .algo_mask = 0, |
---|
| 1961 | + .alg.ahash = { |
---|
| 1962 | + .init = safexcel_crc32_init, |
---|
| 1963 | + .update = safexcel_ahash_update, |
---|
| 1964 | + .final = safexcel_ahash_final, |
---|
| 1965 | + .finup = safexcel_ahash_finup, |
---|
| 1966 | + .digest = safexcel_crc32_digest, |
---|
| 1967 | + .setkey = safexcel_crc32_setkey, |
---|
| 1968 | + .export = safexcel_ahash_export, |
---|
| 1969 | + .import = safexcel_ahash_import, |
---|
| 1970 | + .halg = { |
---|
| 1971 | + .digestsize = sizeof(u32), |
---|
| 1972 | + .statesize = sizeof(struct safexcel_ahash_export_state), |
---|
| 1973 | + .base = { |
---|
| 1974 | + .cra_name = "crc32", |
---|
| 1975 | + .cra_driver_name = "safexcel-crc32", |
---|
| 1976 | + .cra_priority = SAFEXCEL_CRA_PRIORITY, |
---|
| 1977 | + .cra_flags = CRYPTO_ALG_OPTIONAL_KEY | |
---|
| 1978 | + CRYPTO_ALG_ASYNC | |
---|
| 1979 | + CRYPTO_ALG_ALLOCATES_MEMORY | |
---|
| 1980 | + CRYPTO_ALG_KERN_DRIVER_ONLY, |
---|
| 1981 | + .cra_blocksize = 1, |
---|
| 1982 | + .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), |
---|
| 1983 | + .cra_init = safexcel_crc32_cra_init, |
---|
| 1984 | + .cra_exit = safexcel_ahash_cra_exit, |
---|
| 1985 | + .cra_module = THIS_MODULE, |
---|
| 1986 | + }, |
---|
| 1987 | + }, |
---|
| 1988 | + }, |
---|
| 1989 | +}; |
---|
| 1990 | + |
---|
| 1991 | +static int safexcel_cbcmac_init(struct ahash_request *areq) |
---|
| 1992 | +{ |
---|
| 1993 | + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); |
---|
| 1994 | + struct safexcel_ahash_req *req = ahash_request_ctx(areq); |
---|
| 1995 | + |
---|
| 1996 | + memset(req, 0, sizeof(*req)); |
---|
| 1997 | + |
---|
| 1998 | + /* Start from loaded keys */ |
---|
| 1999 | + memcpy(req->state, &ctx->base.ipad, ctx->key_sz); |
---|
| 2000 | + /* Set processed to non-zero to enable invalidation detection */ |
---|
| 2001 | + req->len = AES_BLOCK_SIZE; |
---|
| 2002 | + req->processed = AES_BLOCK_SIZE; |
---|
| 2003 | + |
---|
| 2004 | + req->digest = CONTEXT_CONTROL_DIGEST_XCM; |
---|
| 2005 | + req->state_sz = ctx->key_sz; |
---|
| 2006 | + req->digest_sz = AES_BLOCK_SIZE; |
---|
| 2007 | + req->block_sz = AES_BLOCK_SIZE; |
---|
| 2008 | + req->xcbcmac = true; |
---|
| 2009 | + |
---|
| 2010 | + return 0; |
---|
| 2011 | +} |
---|
| 2012 | + |
---|
| 2013 | +static int safexcel_cbcmac_setkey(struct crypto_ahash *tfm, const u8 *key, |
---|
| 2014 | + unsigned int len) |
---|
| 2015 | +{ |
---|
| 2016 | + struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm)); |
---|
| 2017 | + struct crypto_aes_ctx aes; |
---|
| 2018 | + int ret, i; |
---|
| 2019 | + |
---|
| 2020 | + ret = aes_expandkey(&aes, key, len); |
---|
| 2021 | + if (ret) |
---|
| 2022 | + return ret; |
---|
| 2023 | + |
---|
| 2024 | + memset(&ctx->base.ipad, 0, 2 * AES_BLOCK_SIZE); |
---|
| 2025 | + for (i = 0; i < len / sizeof(u32); i++) |
---|
| 2026 | + ctx->base.ipad.be[i + 8] = cpu_to_be32(aes.key_enc[i]); |
---|
| 2027 | + |
---|
| 2028 | + if (len == AES_KEYSIZE_192) { |
---|
| 2029 | + ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192; |
---|
| 2030 | + ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE; |
---|
| 2031 | + } else if (len == AES_KEYSIZE_256) { |
---|
| 2032 | + ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256; |
---|
| 2033 | + ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE; |
---|
| 2034 | + } else { |
---|
| 2035 | + ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128; |
---|
| 2036 | + ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE; |
---|
| 2037 | + } |
---|
| 2038 | + ctx->cbcmac = true; |
---|
| 2039 | + |
---|
| 2040 | + memzero_explicit(&aes, sizeof(aes)); |
---|
| 2041 | + return 0; |
---|
| 2042 | +} |
---|
| 2043 | + |
---|
| 2044 | +static int safexcel_cbcmac_digest(struct ahash_request *areq) |
---|
| 2045 | +{ |
---|
| 2046 | + return safexcel_cbcmac_init(areq) ?: safexcel_ahash_finup(areq); |
---|
| 2047 | +} |
---|
| 2048 | + |
---|
| 2049 | +struct safexcel_alg_template safexcel_alg_cbcmac = { |
---|
| 2050 | + .type = SAFEXCEL_ALG_TYPE_AHASH, |
---|
| 2051 | + .algo_mask = 0, |
---|
| 2052 | + .alg.ahash = { |
---|
| 2053 | + .init = safexcel_cbcmac_init, |
---|
| 2054 | + .update = safexcel_ahash_update, |
---|
| 2055 | + .final = safexcel_ahash_final, |
---|
| 2056 | + .finup = safexcel_ahash_finup, |
---|
| 2057 | + .digest = safexcel_cbcmac_digest, |
---|
| 2058 | + .setkey = safexcel_cbcmac_setkey, |
---|
| 2059 | + .export = safexcel_ahash_export, |
---|
| 2060 | + .import = safexcel_ahash_import, |
---|
| 2061 | + .halg = { |
---|
| 2062 | + .digestsize = AES_BLOCK_SIZE, |
---|
| 2063 | + .statesize = sizeof(struct safexcel_ahash_export_state), |
---|
| 2064 | + .base = { |
---|
| 2065 | + .cra_name = "cbcmac(aes)", |
---|
| 2066 | + .cra_driver_name = "safexcel-cbcmac-aes", |
---|
| 2067 | + .cra_priority = SAFEXCEL_CRA_PRIORITY, |
---|
| 2068 | + .cra_flags = CRYPTO_ALG_ASYNC | |
---|
| 2069 | + CRYPTO_ALG_ALLOCATES_MEMORY | |
---|
| 2070 | + CRYPTO_ALG_KERN_DRIVER_ONLY, |
---|
| 2071 | + .cra_blocksize = 1, |
---|
| 2072 | + .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), |
---|
| 2073 | + .cra_init = safexcel_ahash_cra_init, |
---|
| 2074 | + .cra_exit = safexcel_ahash_cra_exit, |
---|
| 2075 | + .cra_module = THIS_MODULE, |
---|
| 2076 | + }, |
---|
| 2077 | + }, |
---|
| 2078 | + }, |
---|
| 2079 | +}; |
---|
| 2080 | + |
---|
| 2081 | +static int safexcel_xcbcmac_setkey(struct crypto_ahash *tfm, const u8 *key, |
---|
| 2082 | + unsigned int len) |
---|
| 2083 | +{ |
---|
| 2084 | + struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm)); |
---|
| 2085 | + struct crypto_aes_ctx aes; |
---|
| 2086 | + u32 key_tmp[3 * AES_BLOCK_SIZE / sizeof(u32)]; |
---|
| 2087 | + int ret, i; |
---|
| 2088 | + |
---|
| 2089 | + ret = aes_expandkey(&aes, key, len); |
---|
| 2090 | + if (ret) |
---|
| 2091 | + return ret; |
---|
| 2092 | + |
---|
| 2093 | + /* precompute the XCBC key material */ |
---|
| 2094 | + crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK); |
---|
| 2095 | + crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) & |
---|
| 2096 | + CRYPTO_TFM_REQ_MASK); |
---|
| 2097 | + ret = crypto_cipher_setkey(ctx->kaes, key, len); |
---|
| 2098 | + if (ret) |
---|
| 2099 | + return ret; |
---|
| 2100 | + |
---|
| 2101 | + crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp + 2 * AES_BLOCK_SIZE, |
---|
| 2102 | + "\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1"); |
---|
| 2103 | + crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp, |
---|
| 2104 | + "\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2"); |
---|
| 2105 | + crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp + AES_BLOCK_SIZE, |
---|
| 2106 | + "\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3"); |
---|
| 2107 | + for (i = 0; i < 3 * AES_BLOCK_SIZE / sizeof(u32); i++) |
---|
| 2108 | + ctx->base.ipad.word[i] = swab32(key_tmp[i]); |
---|
| 2109 | + |
---|
| 2110 | + crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK); |
---|
| 2111 | + crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) & |
---|
| 2112 | + CRYPTO_TFM_REQ_MASK); |
---|
| 2113 | + ret = crypto_cipher_setkey(ctx->kaes, |
---|
| 2114 | + (u8 *)key_tmp + 2 * AES_BLOCK_SIZE, |
---|
| 2115 | + AES_MIN_KEY_SIZE); |
---|
| 2116 | + if (ret) |
---|
| 2117 | + return ret; |
---|
| 2118 | + |
---|
| 2119 | + ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128; |
---|
| 2120 | + ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE; |
---|
| 2121 | + ctx->cbcmac = false; |
---|
| 2122 | + |
---|
| 2123 | + memzero_explicit(&aes, sizeof(aes)); |
---|
| 2124 | + return 0; |
---|
| 2125 | +} |
---|
| 2126 | + |
---|
| 2127 | +static int safexcel_xcbcmac_cra_init(struct crypto_tfm *tfm) |
---|
| 2128 | +{ |
---|
| 2129 | + struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm); |
---|
| 2130 | + |
---|
| 2131 | + safexcel_ahash_cra_init(tfm); |
---|
| 2132 | + ctx->kaes = crypto_alloc_cipher("aes", 0, 0); |
---|
| 2133 | + return PTR_ERR_OR_ZERO(ctx->kaes); |
---|
| 2134 | +} |
---|
| 2135 | + |
---|
| 2136 | +static void safexcel_xcbcmac_cra_exit(struct crypto_tfm *tfm) |
---|
| 2137 | +{ |
---|
| 2138 | + struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm); |
---|
| 2139 | + |
---|
| 2140 | + crypto_free_cipher(ctx->kaes); |
---|
| 2141 | + safexcel_ahash_cra_exit(tfm); |
---|
| 2142 | +} |
---|
| 2143 | + |
---|
| 2144 | +struct safexcel_alg_template safexcel_alg_xcbcmac = { |
---|
| 2145 | + .type = SAFEXCEL_ALG_TYPE_AHASH, |
---|
| 2146 | + .algo_mask = 0, |
---|
| 2147 | + .alg.ahash = { |
---|
| 2148 | + .init = safexcel_cbcmac_init, |
---|
| 2149 | + .update = safexcel_ahash_update, |
---|
| 2150 | + .final = safexcel_ahash_final, |
---|
| 2151 | + .finup = safexcel_ahash_finup, |
---|
| 2152 | + .digest = safexcel_cbcmac_digest, |
---|
| 2153 | + .setkey = safexcel_xcbcmac_setkey, |
---|
| 2154 | + .export = safexcel_ahash_export, |
---|
| 2155 | + .import = safexcel_ahash_import, |
---|
| 2156 | + .halg = { |
---|
| 2157 | + .digestsize = AES_BLOCK_SIZE, |
---|
| 2158 | + .statesize = sizeof(struct safexcel_ahash_export_state), |
---|
| 2159 | + .base = { |
---|
| 2160 | + .cra_name = "xcbc(aes)", |
---|
| 2161 | + .cra_driver_name = "safexcel-xcbc-aes", |
---|
| 2162 | + .cra_priority = SAFEXCEL_CRA_PRIORITY, |
---|
| 2163 | + .cra_flags = CRYPTO_ALG_ASYNC | |
---|
| 2164 | + CRYPTO_ALG_ALLOCATES_MEMORY | |
---|
| 2165 | + CRYPTO_ALG_KERN_DRIVER_ONLY, |
---|
| 2166 | + .cra_blocksize = AES_BLOCK_SIZE, |
---|
| 2167 | + .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), |
---|
| 2168 | + .cra_init = safexcel_xcbcmac_cra_init, |
---|
| 2169 | + .cra_exit = safexcel_xcbcmac_cra_exit, |
---|
| 2170 | + .cra_module = THIS_MODULE, |
---|
| 2171 | + }, |
---|
| 2172 | + }, |
---|
| 2173 | + }, |
---|
| 2174 | +}; |
---|
| 2175 | + |
---|
| 2176 | +static int safexcel_cmac_setkey(struct crypto_ahash *tfm, const u8 *key, |
---|
| 2177 | + unsigned int len) |
---|
| 2178 | +{ |
---|
| 2179 | + struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm)); |
---|
| 2180 | + struct crypto_aes_ctx aes; |
---|
| 2181 | + __be64 consts[4]; |
---|
| 2182 | + u64 _const[2]; |
---|
| 2183 | + u8 msb_mask, gfmask; |
---|
| 2184 | + int ret, i; |
---|
| 2185 | + |
---|
| 2186 | + ret = aes_expandkey(&aes, key, len); |
---|
| 2187 | + if (ret) |
---|
| 2188 | + return ret; |
---|
| 2189 | + |
---|
| 2190 | + for (i = 0; i < len / sizeof(u32); i++) |
---|
| 2191 | + ctx->base.ipad.word[i + 8] = swab32(aes.key_enc[i]); |
---|
| 2192 | + |
---|
| 2193 | + /* precompute the CMAC key material */ |
---|
| 2194 | + crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK); |
---|
| 2195 | + crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) & |
---|
| 2196 | + CRYPTO_TFM_REQ_MASK); |
---|
| 2197 | + ret = crypto_cipher_setkey(ctx->kaes, key, len); |
---|
| 2198 | + if (ret) |
---|
| 2199 | + return ret; |
---|
| 2200 | + |
---|
| 2201 | + /* code below borrowed from crypto/cmac.c */ |
---|
| 2202 | + /* encrypt the zero block */ |
---|
| 2203 | + memset(consts, 0, AES_BLOCK_SIZE); |
---|
| 2204 | + crypto_cipher_encrypt_one(ctx->kaes, (u8 *)consts, (u8 *)consts); |
---|
| 2205 | + |
---|
| 2206 | + gfmask = 0x87; |
---|
| 2207 | + _const[0] = be64_to_cpu(consts[1]); |
---|
| 2208 | + _const[1] = be64_to_cpu(consts[0]); |
---|
| 2209 | + |
---|
| 2210 | + /* gf(2^128) multiply zero-ciphertext with u and u^2 */ |
---|
| 2211 | + for (i = 0; i < 4; i += 2) { |
---|
| 2212 | + msb_mask = ((s64)_const[1] >> 63) & gfmask; |
---|
| 2213 | + _const[1] = (_const[1] << 1) | (_const[0] >> 63); |
---|
| 2214 | + _const[0] = (_const[0] << 1) ^ msb_mask; |
---|
| 2215 | + |
---|
| 2216 | + consts[i + 0] = cpu_to_be64(_const[1]); |
---|
| 2217 | + consts[i + 1] = cpu_to_be64(_const[0]); |
---|
| 2218 | + } |
---|
| 2219 | + /* end of code borrowed from crypto/cmac.c */ |
---|
| 2220 | + |
---|
| 2221 | + for (i = 0; i < 2 * AES_BLOCK_SIZE / sizeof(u32); i++) |
---|
| 2222 | + ctx->base.ipad.be[i] = cpu_to_be32(((u32 *)consts)[i]); |
---|
| 2223 | + |
---|
| 2224 | + if (len == AES_KEYSIZE_192) { |
---|
| 2225 | + ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192; |
---|
| 2226 | + ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE; |
---|
| 2227 | + } else if (len == AES_KEYSIZE_256) { |
---|
| 2228 | + ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256; |
---|
| 2229 | + ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE; |
---|
| 2230 | + } else { |
---|
| 2231 | + ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128; |
---|
| 2232 | + ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE; |
---|
| 2233 | + } |
---|
| 2234 | + ctx->cbcmac = false; |
---|
| 2235 | + |
---|
| 2236 | + memzero_explicit(&aes, sizeof(aes)); |
---|
| 2237 | + return 0; |
---|
| 2238 | +} |
---|
| 2239 | + |
---|
| 2240 | +struct safexcel_alg_template safexcel_alg_cmac = { |
---|
| 2241 | + .type = SAFEXCEL_ALG_TYPE_AHASH, |
---|
| 2242 | + .algo_mask = 0, |
---|
| 2243 | + .alg.ahash = { |
---|
| 2244 | + .init = safexcel_cbcmac_init, |
---|
| 2245 | + .update = safexcel_ahash_update, |
---|
| 2246 | + .final = safexcel_ahash_final, |
---|
| 2247 | + .finup = safexcel_ahash_finup, |
---|
| 2248 | + .digest = safexcel_cbcmac_digest, |
---|
| 2249 | + .setkey = safexcel_cmac_setkey, |
---|
| 2250 | + .export = safexcel_ahash_export, |
---|
| 2251 | + .import = safexcel_ahash_import, |
---|
| 2252 | + .halg = { |
---|
| 2253 | + .digestsize = AES_BLOCK_SIZE, |
---|
| 2254 | + .statesize = sizeof(struct safexcel_ahash_export_state), |
---|
| 2255 | + .base = { |
---|
| 2256 | + .cra_name = "cmac(aes)", |
---|
| 2257 | + .cra_driver_name = "safexcel-cmac-aes", |
---|
| 2258 | + .cra_priority = SAFEXCEL_CRA_PRIORITY, |
---|
| 2259 | + .cra_flags = CRYPTO_ALG_ASYNC | |
---|
| 2260 | + CRYPTO_ALG_ALLOCATES_MEMORY | |
---|
| 2261 | + CRYPTO_ALG_KERN_DRIVER_ONLY, |
---|
| 2262 | + .cra_blocksize = AES_BLOCK_SIZE, |
---|
| 2263 | + .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), |
---|
| 2264 | + .cra_init = safexcel_xcbcmac_cra_init, |
---|
| 2265 | + .cra_exit = safexcel_xcbcmac_cra_exit, |
---|
| 2266 | + .cra_module = THIS_MODULE, |
---|
| 2267 | + }, |
---|
| 2268 | + }, |
---|
| 2269 | + }, |
---|
| 2270 | +}; |
---|
| 2271 | + |
---|
| 2272 | +static int safexcel_sm3_init(struct ahash_request *areq) |
---|
| 2273 | +{ |
---|
| 2274 | + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); |
---|
| 2275 | + struct safexcel_ahash_req *req = ahash_request_ctx(areq); |
---|
| 2276 | + |
---|
| 2277 | + memset(req, 0, sizeof(*req)); |
---|
| 2278 | + |
---|
| 2279 | + ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3; |
---|
| 2280 | + req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; |
---|
| 2281 | + req->state_sz = SM3_DIGEST_SIZE; |
---|
| 2282 | + req->digest_sz = SM3_DIGEST_SIZE; |
---|
| 2283 | + req->block_sz = SM3_BLOCK_SIZE; |
---|
| 2284 | + |
---|
| 2285 | + return 0; |
---|
| 2286 | +} |
---|
| 2287 | + |
---|
| 2288 | +static int safexcel_sm3_digest(struct ahash_request *areq) |
---|
| 2289 | +{ |
---|
| 2290 | + int ret = safexcel_sm3_init(areq); |
---|
| 2291 | + |
---|
| 2292 | + if (ret) |
---|
| 2293 | + return ret; |
---|
| 2294 | + |
---|
| 2295 | + return safexcel_ahash_finup(areq); |
---|
| 2296 | +} |
---|
| 2297 | + |
---|
| 2298 | +struct safexcel_alg_template safexcel_alg_sm3 = { |
---|
| 2299 | + .type = SAFEXCEL_ALG_TYPE_AHASH, |
---|
| 2300 | + .algo_mask = SAFEXCEL_ALG_SM3, |
---|
| 2301 | + .alg.ahash = { |
---|
| 2302 | + .init = safexcel_sm3_init, |
---|
| 2303 | + .update = safexcel_ahash_update, |
---|
| 2304 | + .final = safexcel_ahash_final, |
---|
| 2305 | + .finup = safexcel_ahash_finup, |
---|
| 2306 | + .digest = safexcel_sm3_digest, |
---|
| 2307 | + .export = safexcel_ahash_export, |
---|
| 2308 | + .import = safexcel_ahash_import, |
---|
| 2309 | + .halg = { |
---|
| 2310 | + .digestsize = SM3_DIGEST_SIZE, |
---|
| 2311 | + .statesize = sizeof(struct safexcel_ahash_export_state), |
---|
| 2312 | + .base = { |
---|
| 2313 | + .cra_name = "sm3", |
---|
| 2314 | + .cra_driver_name = "safexcel-sm3", |
---|
| 2315 | + .cra_priority = SAFEXCEL_CRA_PRIORITY, |
---|
| 2316 | + .cra_flags = CRYPTO_ALG_ASYNC | |
---|
| 2317 | + CRYPTO_ALG_ALLOCATES_MEMORY | |
---|
| 2318 | + CRYPTO_ALG_KERN_DRIVER_ONLY, |
---|
| 2319 | + .cra_blocksize = SM3_BLOCK_SIZE, |
---|
| 2320 | + .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), |
---|
| 2321 | + .cra_init = safexcel_ahash_cra_init, |
---|
| 2322 | + .cra_exit = safexcel_ahash_cra_exit, |
---|
| 2323 | + .cra_module = THIS_MODULE, |
---|
| 2324 | + }, |
---|
| 2325 | + }, |
---|
| 2326 | + }, |
---|
| 2327 | +}; |
---|
| 2328 | + |
---|
| 2329 | +static int safexcel_hmac_sm3_setkey(struct crypto_ahash *tfm, const u8 *key, |
---|
| 2330 | + unsigned int keylen) |
---|
| 2331 | +{ |
---|
| 2332 | + return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sm3", |
---|
| 2333 | + SM3_DIGEST_SIZE); |
---|
| 2334 | +} |
---|
| 2335 | + |
---|
| 2336 | +static int safexcel_hmac_sm3_init(struct ahash_request *areq) |
---|
| 2337 | +{ |
---|
| 2338 | + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); |
---|
| 2339 | + struct safexcel_ahash_req *req = ahash_request_ctx(areq); |
---|
| 2340 | + |
---|
| 2341 | + memset(req, 0, sizeof(*req)); |
---|
| 2342 | + |
---|
| 2343 | + /* Start from ipad precompute */ |
---|
| 2344 | + memcpy(req->state, &ctx->base.ipad, SM3_DIGEST_SIZE); |
---|
| 2345 | + /* Already processed the key^ipad part now! */ |
---|
| 2346 | + req->len = SM3_BLOCK_SIZE; |
---|
| 2347 | + req->processed = SM3_BLOCK_SIZE; |
---|
| 2348 | + |
---|
| 2349 | + ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3; |
---|
| 2350 | + req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED; |
---|
| 2351 | + req->state_sz = SM3_DIGEST_SIZE; |
---|
| 2352 | + req->digest_sz = SM3_DIGEST_SIZE; |
---|
| 2353 | + req->block_sz = SM3_BLOCK_SIZE; |
---|
| 2354 | + req->hmac = true; |
---|
| 2355 | + |
---|
| 2356 | + return 0; |
---|
| 2357 | +} |
---|
| 2358 | + |
---|
| 2359 | +static int safexcel_hmac_sm3_digest(struct ahash_request *areq) |
---|
| 2360 | +{ |
---|
| 2361 | + int ret = safexcel_hmac_sm3_init(areq); |
---|
| 2362 | + |
---|
| 2363 | + if (ret) |
---|
| 2364 | + return ret; |
---|
| 2365 | + |
---|
| 2366 | + return safexcel_ahash_finup(areq); |
---|
| 2367 | +} |
---|
| 2368 | + |
---|
| 2369 | +struct safexcel_alg_template safexcel_alg_hmac_sm3 = { |
---|
| 2370 | + .type = SAFEXCEL_ALG_TYPE_AHASH, |
---|
| 2371 | + .algo_mask = SAFEXCEL_ALG_SM3, |
---|
| 2372 | + .alg.ahash = { |
---|
| 2373 | + .init = safexcel_hmac_sm3_init, |
---|
| 2374 | + .update = safexcel_ahash_update, |
---|
| 2375 | + .final = safexcel_ahash_final, |
---|
| 2376 | + .finup = safexcel_ahash_finup, |
---|
| 2377 | + .digest = safexcel_hmac_sm3_digest, |
---|
| 2378 | + .setkey = safexcel_hmac_sm3_setkey, |
---|
| 2379 | + .export = safexcel_ahash_export, |
---|
| 2380 | + .import = safexcel_ahash_import, |
---|
| 2381 | + .halg = { |
---|
| 2382 | + .digestsize = SM3_DIGEST_SIZE, |
---|
| 2383 | + .statesize = sizeof(struct safexcel_ahash_export_state), |
---|
| 2384 | + .base = { |
---|
| 2385 | + .cra_name = "hmac(sm3)", |
---|
| 2386 | + .cra_driver_name = "safexcel-hmac-sm3", |
---|
| 2387 | + .cra_priority = SAFEXCEL_CRA_PRIORITY, |
---|
| 2388 | + .cra_flags = CRYPTO_ALG_ASYNC | |
---|
| 2389 | + CRYPTO_ALG_ALLOCATES_MEMORY | |
---|
| 2390 | + CRYPTO_ALG_KERN_DRIVER_ONLY, |
---|
| 2391 | + .cra_blocksize = SM3_BLOCK_SIZE, |
---|
| 2392 | + .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), |
---|
| 2393 | + .cra_init = safexcel_ahash_cra_init, |
---|
| 2394 | + .cra_exit = safexcel_ahash_cra_exit, |
---|
| 2395 | + .cra_module = THIS_MODULE, |
---|
| 2396 | + }, |
---|
| 2397 | + }, |
---|
| 2398 | + }, |
---|
| 2399 | +}; |
---|
| 2400 | + |
---|
| 2401 | +static int safexcel_sha3_224_init(struct ahash_request *areq) |
---|
| 2402 | +{ |
---|
| 2403 | + struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq); |
---|
| 2404 | + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); |
---|
| 2405 | + struct safexcel_ahash_req *req = ahash_request_ctx(areq); |
---|
| 2406 | + |
---|
| 2407 | + memset(req, 0, sizeof(*req)); |
---|
| 2408 | + |
---|
| 2409 | + ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224; |
---|
| 2410 | + req->digest = CONTEXT_CONTROL_DIGEST_INITIAL; |
---|
| 2411 | + req->state_sz = SHA3_224_DIGEST_SIZE; |
---|
| 2412 | + req->digest_sz = SHA3_224_DIGEST_SIZE; |
---|
| 2413 | + req->block_sz = SHA3_224_BLOCK_SIZE; |
---|
| 2414 | + ctx->do_fallback = false; |
---|
| 2415 | + ctx->fb_init_done = false; |
---|
| 2416 | + return 0; |
---|
| 2417 | +} |
---|
| 2418 | + |
---|
| 2419 | +static int safexcel_sha3_fbcheck(struct ahash_request *req) |
---|
| 2420 | +{ |
---|
| 2421 | + struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); |
---|
| 2422 | + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); |
---|
| 2423 | + struct ahash_request *subreq = ahash_request_ctx(req); |
---|
| 2424 | + int ret = 0; |
---|
| 2425 | + |
---|
| 2426 | + if (ctx->do_fallback) { |
---|
| 2427 | + ahash_request_set_tfm(subreq, ctx->fback); |
---|
| 2428 | + ahash_request_set_callback(subreq, req->base.flags, |
---|
| 2429 | + req->base.complete, req->base.data); |
---|
| 2430 | + ahash_request_set_crypt(subreq, req->src, req->result, |
---|
| 2431 | + req->nbytes); |
---|
| 2432 | + if (!ctx->fb_init_done) { |
---|
| 2433 | + if (ctx->fb_do_setkey) { |
---|
| 2434 | + /* Set fallback cipher HMAC key */ |
---|
| 2435 | + u8 key[SHA3_224_BLOCK_SIZE]; |
---|
| 2436 | + |
---|
| 2437 | + memcpy(key, &ctx->base.ipad, |
---|
| 2438 | + crypto_ahash_blocksize(ctx->fback) / 2); |
---|
| 2439 | + memcpy(key + |
---|
| 2440 | + crypto_ahash_blocksize(ctx->fback) / 2, |
---|
| 2441 | + &ctx->base.opad, |
---|
| 2442 | + crypto_ahash_blocksize(ctx->fback) / 2); |
---|
| 2443 | + ret = crypto_ahash_setkey(ctx->fback, key, |
---|
| 2444 | + crypto_ahash_blocksize(ctx->fback)); |
---|
| 2445 | + memzero_explicit(key, |
---|
| 2446 | + crypto_ahash_blocksize(ctx->fback)); |
---|
| 2447 | + ctx->fb_do_setkey = false; |
---|
| 2448 | + } |
---|
| 2449 | + ret = ret ?: crypto_ahash_init(subreq); |
---|
| 2450 | + ctx->fb_init_done = true; |
---|
| 2451 | + } |
---|
| 2452 | + } |
---|
| 2453 | + return ret; |
---|
| 2454 | +} |
---|
| 2455 | + |
---|
| 2456 | +static int safexcel_sha3_update(struct ahash_request *req) |
---|
| 2457 | +{ |
---|
| 2458 | + struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); |
---|
| 2459 | + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); |
---|
| 2460 | + struct ahash_request *subreq = ahash_request_ctx(req); |
---|
| 2461 | + |
---|
| 2462 | + ctx->do_fallback = true; |
---|
| 2463 | + return safexcel_sha3_fbcheck(req) ?: crypto_ahash_update(subreq); |
---|
| 2464 | +} |
---|
| 2465 | + |
---|
| 2466 | +static int safexcel_sha3_final(struct ahash_request *req) |
---|
| 2467 | +{ |
---|
| 2468 | + struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); |
---|
| 2469 | + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); |
---|
| 2470 | + struct ahash_request *subreq = ahash_request_ctx(req); |
---|
| 2471 | + |
---|
| 2472 | + ctx->do_fallback = true; |
---|
| 2473 | + return safexcel_sha3_fbcheck(req) ?: crypto_ahash_final(subreq); |
---|
| 2474 | +} |
---|
| 2475 | + |
---|
| 2476 | +static int safexcel_sha3_finup(struct ahash_request *req) |
---|
| 2477 | +{ |
---|
| 2478 | + struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); |
---|
| 2479 | + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); |
---|
| 2480 | + struct ahash_request *subreq = ahash_request_ctx(req); |
---|
| 2481 | + |
---|
| 2482 | + ctx->do_fallback |= !req->nbytes; |
---|
| 2483 | + if (ctx->do_fallback) |
---|
| 2484 | + /* Update or ex/import happened or len 0, cannot use the HW */ |
---|
| 2485 | + return safexcel_sha3_fbcheck(req) ?: |
---|
| 2486 | + crypto_ahash_finup(subreq); |
---|
| 2487 | + else |
---|
| 2488 | + return safexcel_ahash_finup(req); |
---|
| 2489 | +} |
---|
| 2490 | + |
---|
| 2491 | +static int safexcel_sha3_digest_fallback(struct ahash_request *req) |
---|
| 2492 | +{ |
---|
| 2493 | + struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); |
---|
| 2494 | + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); |
---|
| 2495 | + struct ahash_request *subreq = ahash_request_ctx(req); |
---|
| 2496 | + |
---|
| 2497 | + ctx->do_fallback = true; |
---|
| 2498 | + ctx->fb_init_done = false; |
---|
| 2499 | + return safexcel_sha3_fbcheck(req) ?: crypto_ahash_finup(subreq); |
---|
| 2500 | +} |
---|
| 2501 | + |
---|
| 2502 | +static int safexcel_sha3_224_digest(struct ahash_request *req) |
---|
| 2503 | +{ |
---|
| 2504 | + if (req->nbytes) |
---|
| 2505 | + return safexcel_sha3_224_init(req) ?: safexcel_ahash_finup(req); |
---|
| 2506 | + |
---|
| 2507 | + /* HW cannot do zero length hash, use fallback instead */ |
---|
| 2508 | + return safexcel_sha3_digest_fallback(req); |
---|
| 2509 | +} |
---|
| 2510 | + |
---|
| 2511 | +static int safexcel_sha3_export(struct ahash_request *req, void *out) |
---|
| 2512 | +{ |
---|
| 2513 | + struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); |
---|
| 2514 | + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); |
---|
| 2515 | + struct ahash_request *subreq = ahash_request_ctx(req); |
---|
| 2516 | + |
---|
| 2517 | + ctx->do_fallback = true; |
---|
| 2518 | + return safexcel_sha3_fbcheck(req) ?: crypto_ahash_export(subreq, out); |
---|
| 2519 | +} |
---|
| 2520 | + |
---|
| 2521 | +static int safexcel_sha3_import(struct ahash_request *req, const void *in) |
---|
| 2522 | +{ |
---|
| 2523 | + struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); |
---|
| 2524 | + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); |
---|
| 2525 | + struct ahash_request *subreq = ahash_request_ctx(req); |
---|
| 2526 | + |
---|
| 2527 | + ctx->do_fallback = true; |
---|
| 2528 | + return safexcel_sha3_fbcheck(req) ?: crypto_ahash_import(subreq, in); |
---|
| 2529 | + // return safexcel_ahash_import(req, in); |
---|
| 2530 | +} |
---|
| 2531 | + |
---|
| 2532 | +static int safexcel_sha3_cra_init(struct crypto_tfm *tfm) |
---|
| 2533 | +{ |
---|
| 2534 | + struct crypto_ahash *ahash = __crypto_ahash_cast(tfm); |
---|
| 2535 | + struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm); |
---|
| 2536 | + |
---|
| 2537 | + safexcel_ahash_cra_init(tfm); |
---|
| 2538 | + |
---|
| 2539 | + /* Allocate fallback implementation */ |
---|
| 2540 | + ctx->fback = crypto_alloc_ahash(crypto_tfm_alg_name(tfm), 0, |
---|
| 2541 | + CRYPTO_ALG_ASYNC | |
---|
| 2542 | + CRYPTO_ALG_NEED_FALLBACK); |
---|
| 2543 | + if (IS_ERR(ctx->fback)) |
---|
| 2544 | + return PTR_ERR(ctx->fback); |
---|
| 2545 | + |
---|
| 2546 | + /* Update statesize from fallback algorithm! */ |
---|
| 2547 | + crypto_hash_alg_common(ahash)->statesize = |
---|
| 2548 | + crypto_ahash_statesize(ctx->fback); |
---|
| 2549 | + crypto_ahash_set_reqsize(ahash, max(sizeof(struct safexcel_ahash_req), |
---|
| 2550 | + sizeof(struct ahash_request) + |
---|
| 2551 | + crypto_ahash_reqsize(ctx->fback))); |
---|
| 2552 | + return 0; |
---|
| 2553 | +} |
---|
| 2554 | + |
---|
| 2555 | +static void safexcel_sha3_cra_exit(struct crypto_tfm *tfm) |
---|
| 2556 | +{ |
---|
| 2557 | + struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm); |
---|
| 2558 | + |
---|
| 2559 | + crypto_free_ahash(ctx->fback); |
---|
| 2560 | + safexcel_ahash_cra_exit(tfm); |
---|
| 2561 | +} |
---|
| 2562 | + |
---|
| 2563 | +struct safexcel_alg_template safexcel_alg_sha3_224 = { |
---|
| 2564 | + .type = SAFEXCEL_ALG_TYPE_AHASH, |
---|
| 2565 | + .algo_mask = SAFEXCEL_ALG_SHA3, |
---|
| 2566 | + .alg.ahash = { |
---|
| 2567 | + .init = safexcel_sha3_224_init, |
---|
| 2568 | + .update = safexcel_sha3_update, |
---|
| 2569 | + .final = safexcel_sha3_final, |
---|
| 2570 | + .finup = safexcel_sha3_finup, |
---|
| 2571 | + .digest = safexcel_sha3_224_digest, |
---|
| 2572 | + .export = safexcel_sha3_export, |
---|
| 2573 | + .import = safexcel_sha3_import, |
---|
| 2574 | + .halg = { |
---|
| 2575 | + .digestsize = SHA3_224_DIGEST_SIZE, |
---|
| 2576 | + .statesize = sizeof(struct safexcel_ahash_export_state), |
---|
| 2577 | + .base = { |
---|
| 2578 | + .cra_name = "sha3-224", |
---|
| 2579 | + .cra_driver_name = "safexcel-sha3-224", |
---|
| 2580 | + .cra_priority = SAFEXCEL_CRA_PRIORITY, |
---|
| 2581 | + .cra_flags = CRYPTO_ALG_ASYNC | |
---|
| 2582 | + CRYPTO_ALG_KERN_DRIVER_ONLY | |
---|
| 2583 | + CRYPTO_ALG_NEED_FALLBACK, |
---|
| 2584 | + .cra_blocksize = SHA3_224_BLOCK_SIZE, |
---|
| 2585 | + .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), |
---|
| 2586 | + .cra_init = safexcel_sha3_cra_init, |
---|
| 2587 | + .cra_exit = safexcel_sha3_cra_exit, |
---|
| 2588 | + .cra_module = THIS_MODULE, |
---|
| 2589 | + }, |
---|
| 2590 | + }, |
---|
| 2591 | + }, |
---|
| 2592 | +}; |
---|
| 2593 | + |
---|
| 2594 | +static int safexcel_sha3_256_init(struct ahash_request *areq) |
---|
| 2595 | +{ |
---|
| 2596 | + struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq); |
---|
| 2597 | + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); |
---|
| 2598 | + struct safexcel_ahash_req *req = ahash_request_ctx(areq); |
---|
| 2599 | + |
---|
| 2600 | + memset(req, 0, sizeof(*req)); |
---|
| 2601 | + |
---|
| 2602 | + ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256; |
---|
| 2603 | + req->digest = CONTEXT_CONTROL_DIGEST_INITIAL; |
---|
| 2604 | + req->state_sz = SHA3_256_DIGEST_SIZE; |
---|
| 2605 | + req->digest_sz = SHA3_256_DIGEST_SIZE; |
---|
| 2606 | + req->block_sz = SHA3_256_BLOCK_SIZE; |
---|
| 2607 | + ctx->do_fallback = false; |
---|
| 2608 | + ctx->fb_init_done = false; |
---|
| 2609 | + return 0; |
---|
| 2610 | +} |
---|
| 2611 | + |
---|
| 2612 | +static int safexcel_sha3_256_digest(struct ahash_request *req) |
---|
| 2613 | +{ |
---|
| 2614 | + if (req->nbytes) |
---|
| 2615 | + return safexcel_sha3_256_init(req) ?: safexcel_ahash_finup(req); |
---|
| 2616 | + |
---|
| 2617 | + /* HW cannot do zero length hash, use fallback instead */ |
---|
| 2618 | + return safexcel_sha3_digest_fallback(req); |
---|
| 2619 | +} |
---|
| 2620 | + |
---|
| 2621 | +struct safexcel_alg_template safexcel_alg_sha3_256 = { |
---|
| 2622 | + .type = SAFEXCEL_ALG_TYPE_AHASH, |
---|
| 2623 | + .algo_mask = SAFEXCEL_ALG_SHA3, |
---|
| 2624 | + .alg.ahash = { |
---|
| 2625 | + .init = safexcel_sha3_256_init, |
---|
| 2626 | + .update = safexcel_sha3_update, |
---|
| 2627 | + .final = safexcel_sha3_final, |
---|
| 2628 | + .finup = safexcel_sha3_finup, |
---|
| 2629 | + .digest = safexcel_sha3_256_digest, |
---|
| 2630 | + .export = safexcel_sha3_export, |
---|
| 2631 | + .import = safexcel_sha3_import, |
---|
| 2632 | + .halg = { |
---|
| 2633 | + .digestsize = SHA3_256_DIGEST_SIZE, |
---|
| 2634 | + .statesize = sizeof(struct safexcel_ahash_export_state), |
---|
| 2635 | + .base = { |
---|
| 2636 | + .cra_name = "sha3-256", |
---|
| 2637 | + .cra_driver_name = "safexcel-sha3-256", |
---|
| 2638 | + .cra_priority = SAFEXCEL_CRA_PRIORITY, |
---|
| 2639 | + .cra_flags = CRYPTO_ALG_ASYNC | |
---|
| 2640 | + CRYPTO_ALG_KERN_DRIVER_ONLY | |
---|
| 2641 | + CRYPTO_ALG_NEED_FALLBACK, |
---|
| 2642 | + .cra_blocksize = SHA3_256_BLOCK_SIZE, |
---|
| 2643 | + .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), |
---|
| 2644 | + .cra_init = safexcel_sha3_cra_init, |
---|
| 2645 | + .cra_exit = safexcel_sha3_cra_exit, |
---|
| 2646 | + .cra_module = THIS_MODULE, |
---|
| 2647 | + }, |
---|
| 2648 | + }, |
---|
| 2649 | + }, |
---|
| 2650 | +}; |
---|
| 2651 | + |
---|
| 2652 | +static int safexcel_sha3_384_init(struct ahash_request *areq) |
---|
| 2653 | +{ |
---|
| 2654 | + struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq); |
---|
| 2655 | + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); |
---|
| 2656 | + struct safexcel_ahash_req *req = ahash_request_ctx(areq); |
---|
| 2657 | + |
---|
| 2658 | + memset(req, 0, sizeof(*req)); |
---|
| 2659 | + |
---|
| 2660 | + ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384; |
---|
| 2661 | + req->digest = CONTEXT_CONTROL_DIGEST_INITIAL; |
---|
| 2662 | + req->state_sz = SHA3_384_DIGEST_SIZE; |
---|
| 2663 | + req->digest_sz = SHA3_384_DIGEST_SIZE; |
---|
| 2664 | + req->block_sz = SHA3_384_BLOCK_SIZE; |
---|
| 2665 | + ctx->do_fallback = false; |
---|
| 2666 | + ctx->fb_init_done = false; |
---|
| 2667 | + return 0; |
---|
| 2668 | +} |
---|
| 2669 | + |
---|
| 2670 | +static int safexcel_sha3_384_digest(struct ahash_request *req) |
---|
| 2671 | +{ |
---|
| 2672 | + if (req->nbytes) |
---|
| 2673 | + return safexcel_sha3_384_init(req) ?: safexcel_ahash_finup(req); |
---|
| 2674 | + |
---|
| 2675 | + /* HW cannot do zero length hash, use fallback instead */ |
---|
| 2676 | + return safexcel_sha3_digest_fallback(req); |
---|
| 2677 | +} |
---|
| 2678 | + |
---|
| 2679 | +struct safexcel_alg_template safexcel_alg_sha3_384 = { |
---|
| 2680 | + .type = SAFEXCEL_ALG_TYPE_AHASH, |
---|
| 2681 | + .algo_mask = SAFEXCEL_ALG_SHA3, |
---|
| 2682 | + .alg.ahash = { |
---|
| 2683 | + .init = safexcel_sha3_384_init, |
---|
| 2684 | + .update = safexcel_sha3_update, |
---|
| 2685 | + .final = safexcel_sha3_final, |
---|
| 2686 | + .finup = safexcel_sha3_finup, |
---|
| 2687 | + .digest = safexcel_sha3_384_digest, |
---|
| 2688 | + .export = safexcel_sha3_export, |
---|
| 2689 | + .import = safexcel_sha3_import, |
---|
| 2690 | + .halg = { |
---|
| 2691 | + .digestsize = SHA3_384_DIGEST_SIZE, |
---|
| 2692 | + .statesize = sizeof(struct safexcel_ahash_export_state), |
---|
| 2693 | + .base = { |
---|
| 2694 | + .cra_name = "sha3-384", |
---|
| 2695 | + .cra_driver_name = "safexcel-sha3-384", |
---|
| 2696 | + .cra_priority = SAFEXCEL_CRA_PRIORITY, |
---|
| 2697 | + .cra_flags = CRYPTO_ALG_ASYNC | |
---|
| 2698 | + CRYPTO_ALG_KERN_DRIVER_ONLY | |
---|
| 2699 | + CRYPTO_ALG_NEED_FALLBACK, |
---|
| 2700 | + .cra_blocksize = SHA3_384_BLOCK_SIZE, |
---|
| 2701 | + .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), |
---|
| 2702 | + .cra_init = safexcel_sha3_cra_init, |
---|
| 2703 | + .cra_exit = safexcel_sha3_cra_exit, |
---|
| 2704 | + .cra_module = THIS_MODULE, |
---|
| 2705 | + }, |
---|
| 2706 | + }, |
---|
| 2707 | + }, |
---|
| 2708 | +}; |
---|
| 2709 | + |
---|
| 2710 | +static int safexcel_sha3_512_init(struct ahash_request *areq) |
---|
| 2711 | +{ |
---|
| 2712 | + struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq); |
---|
| 2713 | + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); |
---|
| 2714 | + struct safexcel_ahash_req *req = ahash_request_ctx(areq); |
---|
| 2715 | + |
---|
| 2716 | + memset(req, 0, sizeof(*req)); |
---|
| 2717 | + |
---|
| 2718 | + ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512; |
---|
| 2719 | + req->digest = CONTEXT_CONTROL_DIGEST_INITIAL; |
---|
| 2720 | + req->state_sz = SHA3_512_DIGEST_SIZE; |
---|
| 2721 | + req->digest_sz = SHA3_512_DIGEST_SIZE; |
---|
| 2722 | + req->block_sz = SHA3_512_BLOCK_SIZE; |
---|
| 2723 | + ctx->do_fallback = false; |
---|
| 2724 | + ctx->fb_init_done = false; |
---|
| 2725 | + return 0; |
---|
| 2726 | +} |
---|
| 2727 | + |
---|
| 2728 | +static int safexcel_sha3_512_digest(struct ahash_request *req) |
---|
| 2729 | +{ |
---|
| 2730 | + if (req->nbytes) |
---|
| 2731 | + return safexcel_sha3_512_init(req) ?: safexcel_ahash_finup(req); |
---|
| 2732 | + |
---|
| 2733 | + /* HW cannot do zero length hash, use fallback instead */ |
---|
| 2734 | + return safexcel_sha3_digest_fallback(req); |
---|
| 2735 | +} |
---|
| 2736 | + |
---|
| 2737 | +struct safexcel_alg_template safexcel_alg_sha3_512 = { |
---|
| 2738 | + .type = SAFEXCEL_ALG_TYPE_AHASH, |
---|
| 2739 | + .algo_mask = SAFEXCEL_ALG_SHA3, |
---|
| 2740 | + .alg.ahash = { |
---|
| 2741 | + .init = safexcel_sha3_512_init, |
---|
| 2742 | + .update = safexcel_sha3_update, |
---|
| 2743 | + .final = safexcel_sha3_final, |
---|
| 2744 | + .finup = safexcel_sha3_finup, |
---|
| 2745 | + .digest = safexcel_sha3_512_digest, |
---|
| 2746 | + .export = safexcel_sha3_export, |
---|
| 2747 | + .import = safexcel_sha3_import, |
---|
| 2748 | + .halg = { |
---|
| 2749 | + .digestsize = SHA3_512_DIGEST_SIZE, |
---|
| 2750 | + .statesize = sizeof(struct safexcel_ahash_export_state), |
---|
| 2751 | + .base = { |
---|
| 2752 | + .cra_name = "sha3-512", |
---|
| 2753 | + .cra_driver_name = "safexcel-sha3-512", |
---|
| 2754 | + .cra_priority = SAFEXCEL_CRA_PRIORITY, |
---|
| 2755 | + .cra_flags = CRYPTO_ALG_ASYNC | |
---|
| 2756 | + CRYPTO_ALG_KERN_DRIVER_ONLY | |
---|
| 2757 | + CRYPTO_ALG_NEED_FALLBACK, |
---|
| 2758 | + .cra_blocksize = SHA3_512_BLOCK_SIZE, |
---|
| 2759 | + .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), |
---|
| 2760 | + .cra_init = safexcel_sha3_cra_init, |
---|
| 2761 | + .cra_exit = safexcel_sha3_cra_exit, |
---|
| 2762 | + .cra_module = THIS_MODULE, |
---|
| 2763 | + }, |
---|
| 2764 | + }, |
---|
| 2765 | + }, |
---|
| 2766 | +}; |
---|
| 2767 | + |
---|
| 2768 | +static int safexcel_hmac_sha3_cra_init(struct crypto_tfm *tfm, const char *alg) |
---|
| 2769 | +{ |
---|
| 2770 | + struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm); |
---|
| 2771 | + int ret; |
---|
| 2772 | + |
---|
| 2773 | + ret = safexcel_sha3_cra_init(tfm); |
---|
| 2774 | + if (ret) |
---|
| 2775 | + return ret; |
---|
| 2776 | + |
---|
| 2777 | + /* Allocate precalc basic digest implementation */ |
---|
| 2778 | + ctx->shpre = crypto_alloc_shash(alg, 0, CRYPTO_ALG_NEED_FALLBACK); |
---|
| 2779 | + if (IS_ERR(ctx->shpre)) |
---|
| 2780 | + return PTR_ERR(ctx->shpre); |
---|
| 2781 | + |
---|
| 2782 | + ctx->shdesc = kmalloc(sizeof(*ctx->shdesc) + |
---|
| 2783 | + crypto_shash_descsize(ctx->shpre), GFP_KERNEL); |
---|
| 2784 | + if (!ctx->shdesc) { |
---|
| 2785 | + crypto_free_shash(ctx->shpre); |
---|
| 2786 | + return -ENOMEM; |
---|
| 2787 | + } |
---|
| 2788 | + ctx->shdesc->tfm = ctx->shpre; |
---|
| 2789 | + return 0; |
---|
| 2790 | +} |
---|
| 2791 | + |
---|
| 2792 | +static void safexcel_hmac_sha3_cra_exit(struct crypto_tfm *tfm) |
---|
| 2793 | +{ |
---|
| 2794 | + struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm); |
---|
| 2795 | + |
---|
| 2796 | + crypto_free_ahash(ctx->fback); |
---|
| 2797 | + crypto_free_shash(ctx->shpre); |
---|
| 2798 | + kfree(ctx->shdesc); |
---|
| 2799 | + safexcel_ahash_cra_exit(tfm); |
---|
| 2800 | +} |
---|
| 2801 | + |
---|
| 2802 | +static int safexcel_hmac_sha3_setkey(struct crypto_ahash *tfm, const u8 *key, |
---|
| 2803 | + unsigned int keylen) |
---|
| 2804 | +{ |
---|
| 2805 | + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); |
---|
| 2806 | + int ret = 0; |
---|
| 2807 | + |
---|
| 2808 | + if (keylen > crypto_ahash_blocksize(tfm)) { |
---|
| 2809 | + /* |
---|
| 2810 | + * If the key is larger than the blocksize, then hash it |
---|
| 2811 | + * first using our fallback cipher |
---|
| 2812 | + */ |
---|
| 2813 | + ret = crypto_shash_digest(ctx->shdesc, key, keylen, |
---|
| 2814 | + ctx->base.ipad.byte); |
---|
| 2815 | + keylen = crypto_shash_digestsize(ctx->shpre); |
---|
| 2816 | + |
---|
| 2817 | + /* |
---|
| 2818 | + * If the digest is larger than half the blocksize, we need to |
---|
| 2819 | + * move the rest to opad due to the way our HMAC infra works. |
---|
| 2820 | + */ |
---|
| 2821 | + if (keylen > crypto_ahash_blocksize(tfm) / 2) |
---|
| 2822 | + /* Buffers overlap, need to use memmove iso memcpy! */ |
---|
| 2823 | + memmove(&ctx->base.opad, |
---|
| 2824 | + ctx->base.ipad.byte + |
---|
| 2825 | + crypto_ahash_blocksize(tfm) / 2, |
---|
| 2826 | + keylen - crypto_ahash_blocksize(tfm) / 2); |
---|
| 2827 | + } else { |
---|
| 2828 | + /* |
---|
| 2829 | + * Copy the key to our ipad & opad buffers |
---|
| 2830 | + * Note that ipad and opad each contain one half of the key, |
---|
| 2831 | + * to match the existing HMAC driver infrastructure. |
---|
| 2832 | + */ |
---|
| 2833 | + if (keylen <= crypto_ahash_blocksize(tfm) / 2) { |
---|
| 2834 | + memcpy(&ctx->base.ipad, key, keylen); |
---|
| 2835 | + } else { |
---|
| 2836 | + memcpy(&ctx->base.ipad, key, |
---|
| 2837 | + crypto_ahash_blocksize(tfm) / 2); |
---|
| 2838 | + memcpy(&ctx->base.opad, |
---|
| 2839 | + key + crypto_ahash_blocksize(tfm) / 2, |
---|
| 2840 | + keylen - crypto_ahash_blocksize(tfm) / 2); |
---|
| 2841 | + } |
---|
| 2842 | + } |
---|
| 2843 | + |
---|
| 2844 | + /* Pad key with zeroes */ |
---|
| 2845 | + if (keylen <= crypto_ahash_blocksize(tfm) / 2) { |
---|
| 2846 | + memset(ctx->base.ipad.byte + keylen, 0, |
---|
| 2847 | + crypto_ahash_blocksize(tfm) / 2 - keylen); |
---|
| 2848 | + memset(&ctx->base.opad, 0, crypto_ahash_blocksize(tfm) / 2); |
---|
| 2849 | + } else { |
---|
| 2850 | + memset(ctx->base.opad.byte + keylen - |
---|
| 2851 | + crypto_ahash_blocksize(tfm) / 2, 0, |
---|
| 2852 | + crypto_ahash_blocksize(tfm) - keylen); |
---|
| 2853 | + } |
---|
| 2854 | + |
---|
| 2855 | + /* If doing fallback, still need to set the new key! */ |
---|
| 2856 | + ctx->fb_do_setkey = true; |
---|
| 2857 | + return ret; |
---|
| 2858 | +} |
---|
| 2859 | + |
---|
| 2860 | +static int safexcel_hmac_sha3_224_init(struct ahash_request *areq) |
---|
| 2861 | +{ |
---|
| 2862 | + struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq); |
---|
| 2863 | + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); |
---|
| 2864 | + struct safexcel_ahash_req *req = ahash_request_ctx(areq); |
---|
| 2865 | + |
---|
| 2866 | + memset(req, 0, sizeof(*req)); |
---|
| 2867 | + |
---|
| 2868 | + /* Copy (half of) the key */ |
---|
| 2869 | + memcpy(req->state, &ctx->base.ipad, SHA3_224_BLOCK_SIZE / 2); |
---|
| 2870 | + /* Start of HMAC should have len == processed == blocksize */ |
---|
| 2871 | + req->len = SHA3_224_BLOCK_SIZE; |
---|
| 2872 | + req->processed = SHA3_224_BLOCK_SIZE; |
---|
| 2873 | + ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224; |
---|
| 2874 | + req->digest = CONTEXT_CONTROL_DIGEST_HMAC; |
---|
| 2875 | + req->state_sz = SHA3_224_BLOCK_SIZE / 2; |
---|
| 2876 | + req->digest_sz = SHA3_224_DIGEST_SIZE; |
---|
| 2877 | + req->block_sz = SHA3_224_BLOCK_SIZE; |
---|
| 2878 | + req->hmac = true; |
---|
| 2879 | + ctx->do_fallback = false; |
---|
| 2880 | + ctx->fb_init_done = false; |
---|
| 2881 | + return 0; |
---|
| 2882 | +} |
---|
| 2883 | + |
---|
| 2884 | +static int safexcel_hmac_sha3_224_digest(struct ahash_request *req) |
---|
| 2885 | +{ |
---|
| 2886 | + if (req->nbytes) |
---|
| 2887 | + return safexcel_hmac_sha3_224_init(req) ?: |
---|
| 2888 | + safexcel_ahash_finup(req); |
---|
| 2889 | + |
---|
| 2890 | + /* HW cannot do zero length HMAC, use fallback instead */ |
---|
| 2891 | + return safexcel_sha3_digest_fallback(req); |
---|
| 2892 | +} |
---|
| 2893 | + |
---|
| 2894 | +static int safexcel_hmac_sha3_224_cra_init(struct crypto_tfm *tfm) |
---|
| 2895 | +{ |
---|
| 2896 | + return safexcel_hmac_sha3_cra_init(tfm, "sha3-224"); |
---|
| 2897 | +} |
---|
| 2898 | + |
---|
| 2899 | +struct safexcel_alg_template safexcel_alg_hmac_sha3_224 = { |
---|
| 2900 | + .type = SAFEXCEL_ALG_TYPE_AHASH, |
---|
| 2901 | + .algo_mask = SAFEXCEL_ALG_SHA3, |
---|
| 2902 | + .alg.ahash = { |
---|
| 2903 | + .init = safexcel_hmac_sha3_224_init, |
---|
| 2904 | + .update = safexcel_sha3_update, |
---|
| 2905 | + .final = safexcel_sha3_final, |
---|
| 2906 | + .finup = safexcel_sha3_finup, |
---|
| 2907 | + .digest = safexcel_hmac_sha3_224_digest, |
---|
| 2908 | + .setkey = safexcel_hmac_sha3_setkey, |
---|
| 2909 | + .export = safexcel_sha3_export, |
---|
| 2910 | + .import = safexcel_sha3_import, |
---|
| 2911 | + .halg = { |
---|
| 2912 | + .digestsize = SHA3_224_DIGEST_SIZE, |
---|
| 2913 | + .statesize = sizeof(struct safexcel_ahash_export_state), |
---|
| 2914 | + .base = { |
---|
| 2915 | + .cra_name = "hmac(sha3-224)", |
---|
| 2916 | + .cra_driver_name = "safexcel-hmac-sha3-224", |
---|
| 2917 | + .cra_priority = SAFEXCEL_CRA_PRIORITY, |
---|
| 2918 | + .cra_flags = CRYPTO_ALG_ASYNC | |
---|
| 2919 | + CRYPTO_ALG_KERN_DRIVER_ONLY | |
---|
| 2920 | + CRYPTO_ALG_NEED_FALLBACK, |
---|
| 2921 | + .cra_blocksize = SHA3_224_BLOCK_SIZE, |
---|
| 2922 | + .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), |
---|
| 2923 | + .cra_init = safexcel_hmac_sha3_224_cra_init, |
---|
| 2924 | + .cra_exit = safexcel_hmac_sha3_cra_exit, |
---|
| 2925 | + .cra_module = THIS_MODULE, |
---|
| 2926 | + }, |
---|
| 2927 | + }, |
---|
| 2928 | + }, |
---|
| 2929 | +}; |
---|
| 2930 | + |
---|
| 2931 | +static int safexcel_hmac_sha3_256_init(struct ahash_request *areq) |
---|
| 2932 | +{ |
---|
| 2933 | + struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq); |
---|
| 2934 | + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); |
---|
| 2935 | + struct safexcel_ahash_req *req = ahash_request_ctx(areq); |
---|
| 2936 | + |
---|
| 2937 | + memset(req, 0, sizeof(*req)); |
---|
| 2938 | + |
---|
| 2939 | + /* Copy (half of) the key */ |
---|
| 2940 | + memcpy(req->state, &ctx->base.ipad, SHA3_256_BLOCK_SIZE / 2); |
---|
| 2941 | + /* Start of HMAC should have len == processed == blocksize */ |
---|
| 2942 | + req->len = SHA3_256_BLOCK_SIZE; |
---|
| 2943 | + req->processed = SHA3_256_BLOCK_SIZE; |
---|
| 2944 | + ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256; |
---|
| 2945 | + req->digest = CONTEXT_CONTROL_DIGEST_HMAC; |
---|
| 2946 | + req->state_sz = SHA3_256_BLOCK_SIZE / 2; |
---|
| 2947 | + req->digest_sz = SHA3_256_DIGEST_SIZE; |
---|
| 2948 | + req->block_sz = SHA3_256_BLOCK_SIZE; |
---|
| 2949 | + req->hmac = true; |
---|
| 2950 | + ctx->do_fallback = false; |
---|
| 2951 | + ctx->fb_init_done = false; |
---|
| 2952 | + return 0; |
---|
| 2953 | +} |
---|
| 2954 | + |
---|
| 2955 | +static int safexcel_hmac_sha3_256_digest(struct ahash_request *req) |
---|
| 2956 | +{ |
---|
| 2957 | + if (req->nbytes) |
---|
| 2958 | + return safexcel_hmac_sha3_256_init(req) ?: |
---|
| 2959 | + safexcel_ahash_finup(req); |
---|
| 2960 | + |
---|
| 2961 | + /* HW cannot do zero length HMAC, use fallback instead */ |
---|
| 2962 | + return safexcel_sha3_digest_fallback(req); |
---|
| 2963 | +} |
---|
| 2964 | + |
---|
| 2965 | +static int safexcel_hmac_sha3_256_cra_init(struct crypto_tfm *tfm) |
---|
| 2966 | +{ |
---|
| 2967 | + return safexcel_hmac_sha3_cra_init(tfm, "sha3-256"); |
---|
| 2968 | +} |
---|
| 2969 | + |
---|
| 2970 | +struct safexcel_alg_template safexcel_alg_hmac_sha3_256 = { |
---|
| 2971 | + .type = SAFEXCEL_ALG_TYPE_AHASH, |
---|
| 2972 | + .algo_mask = SAFEXCEL_ALG_SHA3, |
---|
| 2973 | + .alg.ahash = { |
---|
| 2974 | + .init = safexcel_hmac_sha3_256_init, |
---|
| 2975 | + .update = safexcel_sha3_update, |
---|
| 2976 | + .final = safexcel_sha3_final, |
---|
| 2977 | + .finup = safexcel_sha3_finup, |
---|
| 2978 | + .digest = safexcel_hmac_sha3_256_digest, |
---|
| 2979 | + .setkey = safexcel_hmac_sha3_setkey, |
---|
| 2980 | + .export = safexcel_sha3_export, |
---|
| 2981 | + .import = safexcel_sha3_import, |
---|
| 2982 | + .halg = { |
---|
| 2983 | + .digestsize = SHA3_256_DIGEST_SIZE, |
---|
| 2984 | + .statesize = sizeof(struct safexcel_ahash_export_state), |
---|
| 2985 | + .base = { |
---|
| 2986 | + .cra_name = "hmac(sha3-256)", |
---|
| 2987 | + .cra_driver_name = "safexcel-hmac-sha3-256", |
---|
| 2988 | + .cra_priority = SAFEXCEL_CRA_PRIORITY, |
---|
| 2989 | + .cra_flags = CRYPTO_ALG_ASYNC | |
---|
| 2990 | + CRYPTO_ALG_KERN_DRIVER_ONLY | |
---|
| 2991 | + CRYPTO_ALG_NEED_FALLBACK, |
---|
| 2992 | + .cra_blocksize = SHA3_256_BLOCK_SIZE, |
---|
| 2993 | + .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), |
---|
| 2994 | + .cra_init = safexcel_hmac_sha3_256_cra_init, |
---|
| 2995 | + .cra_exit = safexcel_hmac_sha3_cra_exit, |
---|
| 2996 | + .cra_module = THIS_MODULE, |
---|
| 2997 | + }, |
---|
| 2998 | + }, |
---|
| 2999 | + }, |
---|
| 3000 | +}; |
---|
| 3001 | + |
---|
| 3002 | +static int safexcel_hmac_sha3_384_init(struct ahash_request *areq) |
---|
| 3003 | +{ |
---|
| 3004 | + struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq); |
---|
| 3005 | + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); |
---|
| 3006 | + struct safexcel_ahash_req *req = ahash_request_ctx(areq); |
---|
| 3007 | + |
---|
| 3008 | + memset(req, 0, sizeof(*req)); |
---|
| 3009 | + |
---|
| 3010 | + /* Copy (half of) the key */ |
---|
| 3011 | + memcpy(req->state, &ctx->base.ipad, SHA3_384_BLOCK_SIZE / 2); |
---|
| 3012 | + /* Start of HMAC should have len == processed == blocksize */ |
---|
| 3013 | + req->len = SHA3_384_BLOCK_SIZE; |
---|
| 3014 | + req->processed = SHA3_384_BLOCK_SIZE; |
---|
| 3015 | + ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384; |
---|
| 3016 | + req->digest = CONTEXT_CONTROL_DIGEST_HMAC; |
---|
| 3017 | + req->state_sz = SHA3_384_BLOCK_SIZE / 2; |
---|
| 3018 | + req->digest_sz = SHA3_384_DIGEST_SIZE; |
---|
| 3019 | + req->block_sz = SHA3_384_BLOCK_SIZE; |
---|
| 3020 | + req->hmac = true; |
---|
| 3021 | + ctx->do_fallback = false; |
---|
| 3022 | + ctx->fb_init_done = false; |
---|
| 3023 | + return 0; |
---|
| 3024 | +} |
---|
| 3025 | + |
---|
| 3026 | +static int safexcel_hmac_sha3_384_digest(struct ahash_request *req) |
---|
| 3027 | +{ |
---|
| 3028 | + if (req->nbytes) |
---|
| 3029 | + return safexcel_hmac_sha3_384_init(req) ?: |
---|
| 3030 | + safexcel_ahash_finup(req); |
---|
| 3031 | + |
---|
| 3032 | + /* HW cannot do zero length HMAC, use fallback instead */ |
---|
| 3033 | + return safexcel_sha3_digest_fallback(req); |
---|
| 3034 | +} |
---|
| 3035 | + |
---|
| 3036 | +static int safexcel_hmac_sha3_384_cra_init(struct crypto_tfm *tfm) |
---|
| 3037 | +{ |
---|
| 3038 | + return safexcel_hmac_sha3_cra_init(tfm, "sha3-384"); |
---|
| 3039 | +} |
---|
| 3040 | + |
---|
| 3041 | +struct safexcel_alg_template safexcel_alg_hmac_sha3_384 = { |
---|
| 3042 | + .type = SAFEXCEL_ALG_TYPE_AHASH, |
---|
| 3043 | + .algo_mask = SAFEXCEL_ALG_SHA3, |
---|
| 3044 | + .alg.ahash = { |
---|
| 3045 | + .init = safexcel_hmac_sha3_384_init, |
---|
| 3046 | + .update = safexcel_sha3_update, |
---|
| 3047 | + .final = safexcel_sha3_final, |
---|
| 3048 | + .finup = safexcel_sha3_finup, |
---|
| 3049 | + .digest = safexcel_hmac_sha3_384_digest, |
---|
| 3050 | + .setkey = safexcel_hmac_sha3_setkey, |
---|
| 3051 | + .export = safexcel_sha3_export, |
---|
| 3052 | + .import = safexcel_sha3_import, |
---|
| 3053 | + .halg = { |
---|
| 3054 | + .digestsize = SHA3_384_DIGEST_SIZE, |
---|
| 3055 | + .statesize = sizeof(struct safexcel_ahash_export_state), |
---|
| 3056 | + .base = { |
---|
| 3057 | + .cra_name = "hmac(sha3-384)", |
---|
| 3058 | + .cra_driver_name = "safexcel-hmac-sha3-384", |
---|
| 3059 | + .cra_priority = SAFEXCEL_CRA_PRIORITY, |
---|
| 3060 | + .cra_flags = CRYPTO_ALG_ASYNC | |
---|
| 3061 | + CRYPTO_ALG_KERN_DRIVER_ONLY | |
---|
| 3062 | + CRYPTO_ALG_NEED_FALLBACK, |
---|
| 3063 | + .cra_blocksize = SHA3_384_BLOCK_SIZE, |
---|
| 3064 | + .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), |
---|
| 3065 | + .cra_init = safexcel_hmac_sha3_384_cra_init, |
---|
| 3066 | + .cra_exit = safexcel_hmac_sha3_cra_exit, |
---|
| 3067 | + .cra_module = THIS_MODULE, |
---|
| 3068 | + }, |
---|
| 3069 | + }, |
---|
| 3070 | + }, |
---|
| 3071 | +}; |
---|
| 3072 | + |
---|
| 3073 | +static int safexcel_hmac_sha3_512_init(struct ahash_request *areq) |
---|
| 3074 | +{ |
---|
| 3075 | + struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq); |
---|
| 3076 | + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm); |
---|
| 3077 | + struct safexcel_ahash_req *req = ahash_request_ctx(areq); |
---|
| 3078 | + |
---|
| 3079 | + memset(req, 0, sizeof(*req)); |
---|
| 3080 | + |
---|
| 3081 | + /* Copy (half of) the key */ |
---|
| 3082 | + memcpy(req->state, &ctx->base.ipad, SHA3_512_BLOCK_SIZE / 2); |
---|
| 3083 | + /* Start of HMAC should have len == processed == blocksize */ |
---|
| 3084 | + req->len = SHA3_512_BLOCK_SIZE; |
---|
| 3085 | + req->processed = SHA3_512_BLOCK_SIZE; |
---|
| 3086 | + ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512; |
---|
| 3087 | + req->digest = CONTEXT_CONTROL_DIGEST_HMAC; |
---|
| 3088 | + req->state_sz = SHA3_512_BLOCK_SIZE / 2; |
---|
| 3089 | + req->digest_sz = SHA3_512_DIGEST_SIZE; |
---|
| 3090 | + req->block_sz = SHA3_512_BLOCK_SIZE; |
---|
| 3091 | + req->hmac = true; |
---|
| 3092 | + ctx->do_fallback = false; |
---|
| 3093 | + ctx->fb_init_done = false; |
---|
| 3094 | + return 0; |
---|
| 3095 | +} |
---|
| 3096 | + |
---|
| 3097 | +static int safexcel_hmac_sha3_512_digest(struct ahash_request *req) |
---|
| 3098 | +{ |
---|
| 3099 | + if (req->nbytes) |
---|
| 3100 | + return safexcel_hmac_sha3_512_init(req) ?: |
---|
| 3101 | + safexcel_ahash_finup(req); |
---|
| 3102 | + |
---|
| 3103 | + /* HW cannot do zero length HMAC, use fallback instead */ |
---|
| 3104 | + return safexcel_sha3_digest_fallback(req); |
---|
| 3105 | +} |
---|
| 3106 | + |
---|
| 3107 | +static int safexcel_hmac_sha3_512_cra_init(struct crypto_tfm *tfm) |
---|
| 3108 | +{ |
---|
| 3109 | + return safexcel_hmac_sha3_cra_init(tfm, "sha3-512"); |
---|
| 3110 | +} |
---|
| 3111 | +struct safexcel_alg_template safexcel_alg_hmac_sha3_512 = { |
---|
| 3112 | + .type = SAFEXCEL_ALG_TYPE_AHASH, |
---|
| 3113 | + .algo_mask = SAFEXCEL_ALG_SHA3, |
---|
| 3114 | + .alg.ahash = { |
---|
| 3115 | + .init = safexcel_hmac_sha3_512_init, |
---|
| 3116 | + .update = safexcel_sha3_update, |
---|
| 3117 | + .final = safexcel_sha3_final, |
---|
| 3118 | + .finup = safexcel_sha3_finup, |
---|
| 3119 | + .digest = safexcel_hmac_sha3_512_digest, |
---|
| 3120 | + .setkey = safexcel_hmac_sha3_setkey, |
---|
| 3121 | + .export = safexcel_sha3_export, |
---|
| 3122 | + .import = safexcel_sha3_import, |
---|
| 3123 | + .halg = { |
---|
| 3124 | + .digestsize = SHA3_512_DIGEST_SIZE, |
---|
| 3125 | + .statesize = sizeof(struct safexcel_ahash_export_state), |
---|
| 3126 | + .base = { |
---|
| 3127 | + .cra_name = "hmac(sha3-512)", |
---|
| 3128 | + .cra_driver_name = "safexcel-hmac-sha3-512", |
---|
| 3129 | + .cra_priority = SAFEXCEL_CRA_PRIORITY, |
---|
| 3130 | + .cra_flags = CRYPTO_ALG_ASYNC | |
---|
| 3131 | + CRYPTO_ALG_KERN_DRIVER_ONLY | |
---|
| 3132 | + CRYPTO_ALG_NEED_FALLBACK, |
---|
| 3133 | + .cra_blocksize = SHA3_512_BLOCK_SIZE, |
---|
| 3134 | + .cra_ctxsize = sizeof(struct safexcel_ahash_ctx), |
---|
| 3135 | + .cra_init = safexcel_hmac_sha3_512_cra_init, |
---|
| 3136 | + .cra_exit = safexcel_hmac_sha3_cra_exit, |
---|
| 3137 | + .cra_module = THIS_MODULE, |
---|
| 3138 | + }, |
---|
| 3139 | + }, |
---|
| 3140 | + }, |
---|
| 3141 | +}; |
---|