hc
2024-02-20 102a0743326a03cd1a1202ceda21e175b7d3575c
kernel/drivers/crypto/rockchip/rk3288_crypto_skcipher.c
....@@ -9,23 +9,77 @@
99 * Some ideas are from marvell-cesa.c and s5p-sss.c driver.
1010 */
1111 #include <linux/device.h>
12
+#include <crypto/scatterwalk.h>
1213 #include "rk3288_crypto.h"
1314
1415 #define RK_CRYPTO_DEC BIT(0)
1516
16
-static void rk_crypto_complete(struct crypto_async_request *base, int err)
17
+static int rk_cipher_need_fallback(struct skcipher_request *req)
1718 {
18
- if (base->complete)
19
- base->complete(base, err);
19
+ struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
20
+ unsigned int bs = crypto_skcipher_blocksize(tfm);
21
+ struct scatterlist *sgs, *sgd;
22
+ unsigned int stodo, dtodo, len;
23
+
24
+ if (!req->cryptlen)
25
+ return true;
26
+
27
+ len = req->cryptlen;
28
+ sgs = req->src;
29
+ sgd = req->dst;
30
+ while (sgs && sgd) {
31
+ if (!IS_ALIGNED(sgs->offset, sizeof(u32))) {
32
+ return true;
33
+ }
34
+ if (!IS_ALIGNED(sgd->offset, sizeof(u32))) {
35
+ return true;
36
+ }
37
+ stodo = min(len, sgs->length);
38
+ if (stodo % bs) {
39
+ return true;
40
+ }
41
+ dtodo = min(len, sgd->length);
42
+ if (dtodo % bs) {
43
+ return true;
44
+ }
45
+ if (stodo != dtodo) {
46
+ return true;
47
+ }
48
+ len -= stodo;
49
+ sgs = sg_next(sgs);
50
+ sgd = sg_next(sgd);
51
+ }
52
+ return false;
53
+}
54
+
55
+static int rk_cipher_fallback(struct skcipher_request *areq)
56
+{
57
+ struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
58
+ struct rk_cipher_ctx *op = crypto_skcipher_ctx(tfm);
59
+ struct rk_cipher_rctx *rctx = skcipher_request_ctx(areq);
60
+ int err;
61
+
62
+ skcipher_request_set_tfm(&rctx->fallback_req, op->fallback_tfm);
63
+ skcipher_request_set_callback(&rctx->fallback_req, areq->base.flags,
64
+ areq->base.complete, areq->base.data);
65
+ skcipher_request_set_crypt(&rctx->fallback_req, areq->src, areq->dst,
66
+ areq->cryptlen, areq->iv);
67
+ if (rctx->mode & RK_CRYPTO_DEC)
68
+ err = crypto_skcipher_decrypt(&rctx->fallback_req);
69
+ else
70
+ err = crypto_skcipher_encrypt(&rctx->fallback_req);
71
+ return err;
2072 }
2173
2274 static int rk_handle_req(struct rk_crypto_info *dev,
2375 struct skcipher_request *req)
2476 {
25
- if (!IS_ALIGNED(req->cryptlen, dev->align_size))
26
- return -EINVAL;
27
- else
28
- return dev->enqueue(dev, &req->base);
77
+ struct crypto_engine *engine = dev->engine;
78
+
79
+ if (rk_cipher_need_fallback(req))
80
+ return rk_cipher_fallback(req);
81
+
82
+ return crypto_transfer_skcipher_request_to_engine(engine, req);
2983 }
3084
3185 static int rk_aes_setkey(struct crypto_skcipher *cipher,
....@@ -38,8 +92,9 @@
3892 keylen != AES_KEYSIZE_256)
3993 return -EINVAL;
4094 ctx->keylen = keylen;
41
- memcpy_toio(ctx->dev->reg + RK_CRYPTO_AES_KEY_0, key, keylen);
42
- return 0;
95
+ memcpy(ctx->key, key, keylen);
96
+
97
+ return crypto_skcipher_setkey(ctx->fallback_tfm, key, keylen);
4398 }
4499
45100 static int rk_des_setkey(struct crypto_skcipher *cipher,
....@@ -53,8 +108,9 @@
53108 return err;
54109
55110 ctx->keylen = keylen;
56
- memcpy_toio(ctx->dev->reg + RK_CRYPTO_TDES_KEY1_0, key, keylen);
57
- return 0;
111
+ memcpy(ctx->key, key, keylen);
112
+
113
+ return crypto_skcipher_setkey(ctx->fallback_tfm, key, keylen);
58114 }
59115
60116 static int rk_tdes_setkey(struct crypto_skcipher *cipher,
....@@ -68,17 +124,19 @@
68124 return err;
69125
70126 ctx->keylen = keylen;
71
- memcpy_toio(ctx->dev->reg + RK_CRYPTO_TDES_KEY1_0, key, keylen);
72
- return 0;
127
+ memcpy(ctx->key, key, keylen);
128
+
129
+ return crypto_skcipher_setkey(ctx->fallback_tfm, key, keylen);
73130 }
74131
75132 static int rk_aes_ecb_encrypt(struct skcipher_request *req)
76133 {
77134 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
78135 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
136
+ struct rk_cipher_rctx *rctx = skcipher_request_ctx(req);
79137 struct rk_crypto_info *dev = ctx->dev;
80138
81
- ctx->mode = RK_CRYPTO_AES_ECB_MODE;
139
+ rctx->mode = RK_CRYPTO_AES_ECB_MODE;
82140 return rk_handle_req(dev, req);
83141 }
84142
....@@ -86,9 +144,10 @@
86144 {
87145 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
88146 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
147
+ struct rk_cipher_rctx *rctx = skcipher_request_ctx(req);
89148 struct rk_crypto_info *dev = ctx->dev;
90149
91
- ctx->mode = RK_CRYPTO_AES_ECB_MODE | RK_CRYPTO_DEC;
150
+ rctx->mode = RK_CRYPTO_AES_ECB_MODE | RK_CRYPTO_DEC;
92151 return rk_handle_req(dev, req);
93152 }
94153
....@@ -96,9 +155,10 @@
96155 {
97156 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
98157 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
158
+ struct rk_cipher_rctx *rctx = skcipher_request_ctx(req);
99159 struct rk_crypto_info *dev = ctx->dev;
100160
101
- ctx->mode = RK_CRYPTO_AES_CBC_MODE;
161
+ rctx->mode = RK_CRYPTO_AES_CBC_MODE;
102162 return rk_handle_req(dev, req);
103163 }
104164
....@@ -106,9 +166,10 @@
106166 {
107167 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
108168 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
169
+ struct rk_cipher_rctx *rctx = skcipher_request_ctx(req);
109170 struct rk_crypto_info *dev = ctx->dev;
110171
111
- ctx->mode = RK_CRYPTO_AES_CBC_MODE | RK_CRYPTO_DEC;
172
+ rctx->mode = RK_CRYPTO_AES_CBC_MODE | RK_CRYPTO_DEC;
112173 return rk_handle_req(dev, req);
113174 }
114175
....@@ -116,9 +177,10 @@
116177 {
117178 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
118179 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
180
+ struct rk_cipher_rctx *rctx = skcipher_request_ctx(req);
119181 struct rk_crypto_info *dev = ctx->dev;
120182
121
- ctx->mode = 0;
183
+ rctx->mode = 0;
122184 return rk_handle_req(dev, req);
123185 }
124186
....@@ -126,9 +188,10 @@
126188 {
127189 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
128190 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
191
+ struct rk_cipher_rctx *rctx = skcipher_request_ctx(req);
129192 struct rk_crypto_info *dev = ctx->dev;
130193
131
- ctx->mode = RK_CRYPTO_DEC;
194
+ rctx->mode = RK_CRYPTO_DEC;
132195 return rk_handle_req(dev, req);
133196 }
134197
....@@ -136,9 +199,10 @@
136199 {
137200 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
138201 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
202
+ struct rk_cipher_rctx *rctx = skcipher_request_ctx(req);
139203 struct rk_crypto_info *dev = ctx->dev;
140204
141
- ctx->mode = RK_CRYPTO_TDES_CHAINMODE_CBC;
205
+ rctx->mode = RK_CRYPTO_TDES_CHAINMODE_CBC;
142206 return rk_handle_req(dev, req);
143207 }
144208
....@@ -146,9 +210,10 @@
146210 {
147211 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
148212 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
213
+ struct rk_cipher_rctx *rctx = skcipher_request_ctx(req);
149214 struct rk_crypto_info *dev = ctx->dev;
150215
151
- ctx->mode = RK_CRYPTO_TDES_CHAINMODE_CBC | RK_CRYPTO_DEC;
216
+ rctx->mode = RK_CRYPTO_TDES_CHAINMODE_CBC | RK_CRYPTO_DEC;
152217 return rk_handle_req(dev, req);
153218 }
154219
....@@ -156,9 +221,10 @@
156221 {
157222 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
158223 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
224
+ struct rk_cipher_rctx *rctx = skcipher_request_ctx(req);
159225 struct rk_crypto_info *dev = ctx->dev;
160226
161
- ctx->mode = RK_CRYPTO_TDES_SELECT;
227
+ rctx->mode = RK_CRYPTO_TDES_SELECT;
162228 return rk_handle_req(dev, req);
163229 }
164230
....@@ -166,9 +232,10 @@
166232 {
167233 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
168234 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
235
+ struct rk_cipher_rctx *rctx = skcipher_request_ctx(req);
169236 struct rk_crypto_info *dev = ctx->dev;
170237
171
- ctx->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_DEC;
238
+ rctx->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_DEC;
172239 return rk_handle_req(dev, req);
173240 }
174241
....@@ -176,9 +243,10 @@
176243 {
177244 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
178245 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
246
+ struct rk_cipher_rctx *rctx = skcipher_request_ctx(req);
179247 struct rk_crypto_info *dev = ctx->dev;
180248
181
- ctx->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_TDES_CHAINMODE_CBC;
249
+ rctx->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_TDES_CHAINMODE_CBC;
182250 return rk_handle_req(dev, req);
183251 }
184252
....@@ -186,43 +254,42 @@
186254 {
187255 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
188256 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
257
+ struct rk_cipher_rctx *rctx = skcipher_request_ctx(req);
189258 struct rk_crypto_info *dev = ctx->dev;
190259
191
- ctx->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_TDES_CHAINMODE_CBC |
260
+ rctx->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_TDES_CHAINMODE_CBC |
192261 RK_CRYPTO_DEC;
193262 return rk_handle_req(dev, req);
194263 }
195264
196
-static void rk_ablk_hw_init(struct rk_crypto_info *dev)
265
+static void rk_ablk_hw_init(struct rk_crypto_info *dev, struct skcipher_request *req)
197266 {
198
- struct skcipher_request *req =
199
- skcipher_request_cast(dev->async_req);
200267 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
201268 struct crypto_tfm *tfm = crypto_skcipher_tfm(cipher);
269
+ struct rk_cipher_rctx *rctx = skcipher_request_ctx(req);
202270 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(cipher);
203
- u32 ivsize, block, conf_reg = 0;
271
+ u32 block, conf_reg = 0;
204272
205273 block = crypto_tfm_alg_blocksize(tfm);
206
- ivsize = crypto_skcipher_ivsize(cipher);
207274
208275 if (block == DES_BLOCK_SIZE) {
209
- ctx->mode |= RK_CRYPTO_TDES_FIFO_MODE |
276
+ rctx->mode |= RK_CRYPTO_TDES_FIFO_MODE |
210277 RK_CRYPTO_TDES_BYTESWAP_KEY |
211278 RK_CRYPTO_TDES_BYTESWAP_IV;
212
- CRYPTO_WRITE(dev, RK_CRYPTO_TDES_CTRL, ctx->mode);
213
- memcpy_toio(dev->reg + RK_CRYPTO_TDES_IV_0, req->iv, ivsize);
279
+ CRYPTO_WRITE(dev, RK_CRYPTO_TDES_CTRL, rctx->mode);
280
+ memcpy_toio(ctx->dev->reg + RK_CRYPTO_TDES_KEY1_0, ctx->key, ctx->keylen);
214281 conf_reg = RK_CRYPTO_DESSEL;
215282 } else {
216
- ctx->mode |= RK_CRYPTO_AES_FIFO_MODE |
283
+ rctx->mode |= RK_CRYPTO_AES_FIFO_MODE |
217284 RK_CRYPTO_AES_KEY_CHANGE |
218285 RK_CRYPTO_AES_BYTESWAP_KEY |
219286 RK_CRYPTO_AES_BYTESWAP_IV;
220287 if (ctx->keylen == AES_KEYSIZE_192)
221
- ctx->mode |= RK_CRYPTO_AES_192BIT_key;
288
+ rctx->mode |= RK_CRYPTO_AES_192BIT_key;
222289 else if (ctx->keylen == AES_KEYSIZE_256)
223
- ctx->mode |= RK_CRYPTO_AES_256BIT_key;
224
- CRYPTO_WRITE(dev, RK_CRYPTO_AES_CTRL, ctx->mode);
225
- memcpy_toio(dev->reg + RK_CRYPTO_AES_IV_0, req->iv, ivsize);
290
+ rctx->mode |= RK_CRYPTO_AES_256BIT_key;
291
+ CRYPTO_WRITE(dev, RK_CRYPTO_AES_CTRL, rctx->mode);
292
+ memcpy_toio(ctx->dev->reg + RK_CRYPTO_AES_KEY_0, ctx->key, ctx->keylen);
226293 }
227294 conf_reg |= RK_CRYPTO_BYTESWAP_BTFIFO |
228295 RK_CRYPTO_BYTESWAP_BRFIFO;
....@@ -231,146 +298,138 @@
231298 RK_CRYPTO_BCDMA_ERR_ENA | RK_CRYPTO_BCDMA_DONE_ENA);
232299 }
233300
234
-static void crypto_dma_start(struct rk_crypto_info *dev)
301
+static void crypto_dma_start(struct rk_crypto_info *dev,
302
+ struct scatterlist *sgs,
303
+ struct scatterlist *sgd, unsigned int todo)
235304 {
236
- CRYPTO_WRITE(dev, RK_CRYPTO_BRDMAS, dev->addr_in);
237
- CRYPTO_WRITE(dev, RK_CRYPTO_BRDMAL, dev->count / 4);
238
- CRYPTO_WRITE(dev, RK_CRYPTO_BTDMAS, dev->addr_out);
305
+ CRYPTO_WRITE(dev, RK_CRYPTO_BRDMAS, sg_dma_address(sgs));
306
+ CRYPTO_WRITE(dev, RK_CRYPTO_BRDMAL, todo);
307
+ CRYPTO_WRITE(dev, RK_CRYPTO_BTDMAS, sg_dma_address(sgd));
239308 CRYPTO_WRITE(dev, RK_CRYPTO_CTRL, RK_CRYPTO_BLOCK_START |
240309 _SBF(RK_CRYPTO_BLOCK_START, 16));
241310 }
242311
243
-static int rk_set_data_start(struct rk_crypto_info *dev)
312
+static int rk_cipher_run(struct crypto_engine *engine, void *async_req)
244313 {
245
- int err;
246
- struct skcipher_request *req =
247
- skcipher_request_cast(dev->async_req);
248
- struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
314
+ struct skcipher_request *areq = container_of(async_req, struct skcipher_request, base);
315
+ struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
249316 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
250
- u32 ivsize = crypto_skcipher_ivsize(tfm);
251
- u8 *src_last_blk = page_address(sg_page(dev->sg_src)) +
252
- dev->sg_src->offset + dev->sg_src->length - ivsize;
253
-
254
- /* Store the iv that need to be updated in chain mode.
255
- * And update the IV buffer to contain the next IV for decryption mode.
256
- */
257
- if (ctx->mode & RK_CRYPTO_DEC) {
258
- memcpy(ctx->iv, src_last_blk, ivsize);
259
- sg_pcopy_to_buffer(dev->first, dev->src_nents, req->iv,
260
- ivsize, dev->total - ivsize);
261
- }
262
-
263
- err = dev->load_data(dev, dev->sg_src, dev->sg_dst);
264
- if (!err)
265
- crypto_dma_start(dev);
266
- return err;
267
-}
268
-
269
-static int rk_ablk_start(struct rk_crypto_info *dev)
270
-{
271
- struct skcipher_request *req =
272
- skcipher_request_cast(dev->async_req);
273
- unsigned long flags;
317
+ struct rk_cipher_rctx *rctx = skcipher_request_ctx(areq);
318
+ struct scatterlist *sgs, *sgd;
274319 int err = 0;
320
+ int ivsize = crypto_skcipher_ivsize(tfm);
321
+ int offset;
322
+ u8 iv[AES_BLOCK_SIZE];
323
+ u8 biv[AES_BLOCK_SIZE];
324
+ u8 *ivtouse = areq->iv;
325
+ unsigned int len = areq->cryptlen;
326
+ unsigned int todo;
275327
276
- dev->left_bytes = req->cryptlen;
277
- dev->total = req->cryptlen;
278
- dev->sg_src = req->src;
279
- dev->first = req->src;
280
- dev->src_nents = sg_nents(req->src);
281
- dev->sg_dst = req->dst;
282
- dev->dst_nents = sg_nents(req->dst);
283
- dev->aligned = 1;
284
-
285
- spin_lock_irqsave(&dev->lock, flags);
286
- rk_ablk_hw_init(dev);
287
- err = rk_set_data_start(dev);
288
- spin_unlock_irqrestore(&dev->lock, flags);
289
- return err;
290
-}
291
-
292
-static void rk_iv_copyback(struct rk_crypto_info *dev)
293
-{
294
- struct skcipher_request *req =
295
- skcipher_request_cast(dev->async_req);
296
- struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
297
- struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
298
- u32 ivsize = crypto_skcipher_ivsize(tfm);
299
-
300
- /* Update the IV buffer to contain the next IV for encryption mode. */
301
- if (!(ctx->mode & RK_CRYPTO_DEC)) {
302
- if (dev->aligned) {
303
- memcpy(req->iv, sg_virt(dev->sg_dst) +
304
- dev->sg_dst->length - ivsize, ivsize);
305
- } else {
306
- memcpy(req->iv, dev->addr_vir +
307
- dev->count - ivsize, ivsize);
328
+ ivsize = crypto_skcipher_ivsize(tfm);
329
+ if (areq->iv && crypto_skcipher_ivsize(tfm) > 0) {
330
+ if (rctx->mode & RK_CRYPTO_DEC) {
331
+ offset = areq->cryptlen - ivsize;
332
+ scatterwalk_map_and_copy(rctx->backup_iv, areq->src,
333
+ offset, ivsize, 0);
308334 }
309335 }
310
-}
311336
312
-static void rk_update_iv(struct rk_crypto_info *dev)
313
-{
314
- struct skcipher_request *req =
315
- skcipher_request_cast(dev->async_req);
316
- struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
317
- struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
318
- u32 ivsize = crypto_skcipher_ivsize(tfm);
319
- u8 *new_iv = NULL;
337
+ sgs = areq->src;
338
+ sgd = areq->dst;
320339
321
- if (ctx->mode & RK_CRYPTO_DEC) {
322
- new_iv = ctx->iv;
323
- } else {
324
- new_iv = page_address(sg_page(dev->sg_dst)) +
325
- dev->sg_dst->offset + dev->sg_dst->length - ivsize;
326
- }
327
-
328
- if (ivsize == DES_BLOCK_SIZE)
329
- memcpy_toio(dev->reg + RK_CRYPTO_TDES_IV_0, new_iv, ivsize);
330
- else if (ivsize == AES_BLOCK_SIZE)
331
- memcpy_toio(dev->reg + RK_CRYPTO_AES_IV_0, new_iv, ivsize);
332
-}
333
-
334
-/* return:
335
- * true some err was occurred
336
- * fault no err, continue
337
- */
338
-static int rk_ablk_rx(struct rk_crypto_info *dev)
339
-{
340
- int err = 0;
341
- struct skcipher_request *req =
342
- skcipher_request_cast(dev->async_req);
343
-
344
- dev->unload_data(dev);
345
- if (!dev->aligned) {
346
- if (!sg_pcopy_from_buffer(req->dst, dev->dst_nents,
347
- dev->addr_vir, dev->count,
348
- dev->total - dev->left_bytes -
349
- dev->count)) {
350
- err = -EINVAL;
351
- goto out_rx;
340
+ while (sgs && sgd && len) {
341
+ if (!sgs->length) {
342
+ sgs = sg_next(sgs);
343
+ sgd = sg_next(sgd);
344
+ continue;
352345 }
353
- }
354
- if (dev->left_bytes) {
355
- rk_update_iv(dev);
356
- if (dev->aligned) {
357
- if (sg_is_last(dev->sg_src)) {
358
- dev_err(dev->dev, "[%s:%d] Lack of data\n",
359
- __func__, __LINE__);
360
- err = -ENOMEM;
361
- goto out_rx;
346
+ if (rctx->mode & RK_CRYPTO_DEC) {
347
+ /* we backup last block of source to be used as IV at next step */
348
+ offset = sgs->length - ivsize;
349
+ scatterwalk_map_and_copy(biv, sgs, offset, ivsize, 0);
350
+ }
351
+ if (sgs == sgd) {
352
+ err = dma_map_sg(ctx->dev->dev, sgs, 1, DMA_BIDIRECTIONAL);
353
+ if (err <= 0) {
354
+ err = -EINVAL;
355
+ goto theend_iv;
362356 }
363
- dev->sg_src = sg_next(dev->sg_src);
364
- dev->sg_dst = sg_next(dev->sg_dst);
357
+ } else {
358
+ err = dma_map_sg(ctx->dev->dev, sgs, 1, DMA_TO_DEVICE);
359
+ if (err <= 0) {
360
+ err = -EINVAL;
361
+ goto theend_iv;
362
+ }
363
+ err = dma_map_sg(ctx->dev->dev, sgd, 1, DMA_FROM_DEVICE);
364
+ if (err <= 0) {
365
+ err = -EINVAL;
366
+ goto theend_sgs;
367
+ }
365368 }
366
- err = rk_set_data_start(dev);
367
- } else {
368
- rk_iv_copyback(dev);
369
- /* here show the calculation is over without any err */
370
- dev->complete(dev->async_req, 0);
371
- tasklet_schedule(&dev->queue_task);
369
+ err = 0;
370
+ rk_ablk_hw_init(ctx->dev, areq);
371
+ if (ivsize) {
372
+ if (ivsize == DES_BLOCK_SIZE)
373
+ memcpy_toio(ctx->dev->reg + RK_CRYPTO_TDES_IV_0, ivtouse, ivsize);
374
+ else
375
+ memcpy_toio(ctx->dev->reg + RK_CRYPTO_AES_IV_0, ivtouse, ivsize);
376
+ }
377
+ reinit_completion(&ctx->dev->complete);
378
+ ctx->dev->status = 0;
379
+
380
+ todo = min(sg_dma_len(sgs), len);
381
+ len -= todo;
382
+ crypto_dma_start(ctx->dev, sgs, sgd, todo / 4);
383
+ wait_for_completion_interruptible_timeout(&ctx->dev->complete,
384
+ msecs_to_jiffies(2000));
385
+ if (!ctx->dev->status) {
386
+ dev_err(ctx->dev->dev, "DMA timeout\n");
387
+ err = -EFAULT;
388
+ goto theend;
389
+ }
390
+ if (sgs == sgd) {
391
+ dma_unmap_sg(ctx->dev->dev, sgs, 1, DMA_BIDIRECTIONAL);
392
+ } else {
393
+ dma_unmap_sg(ctx->dev->dev, sgs, 1, DMA_TO_DEVICE);
394
+ dma_unmap_sg(ctx->dev->dev, sgd, 1, DMA_FROM_DEVICE);
395
+ }
396
+ if (rctx->mode & RK_CRYPTO_DEC) {
397
+ memcpy(iv, biv, ivsize);
398
+ ivtouse = iv;
399
+ } else {
400
+ offset = sgd->length - ivsize;
401
+ scatterwalk_map_and_copy(iv, sgd, offset, ivsize, 0);
402
+ ivtouse = iv;
403
+ }
404
+ sgs = sg_next(sgs);
405
+ sgd = sg_next(sgd);
372406 }
373
-out_rx:
407
+
408
+ if (areq->iv && ivsize > 0) {
409
+ offset = areq->cryptlen - ivsize;
410
+ if (rctx->mode & RK_CRYPTO_DEC) {
411
+ memcpy(areq->iv, rctx->backup_iv, ivsize);
412
+ memzero_explicit(rctx->backup_iv, ivsize);
413
+ } else {
414
+ scatterwalk_map_and_copy(areq->iv, areq->dst, offset,
415
+ ivsize, 0);
416
+ }
417
+ }
418
+
419
+theend:
420
+ local_bh_disable();
421
+ crypto_finalize_skcipher_request(engine, areq, err);
422
+ local_bh_enable();
423
+ return 0;
424
+
425
+theend_sgs:
426
+ if (sgs == sgd) {
427
+ dma_unmap_sg(ctx->dev->dev, sgs, 1, DMA_BIDIRECTIONAL);
428
+ } else {
429
+ dma_unmap_sg(ctx->dev->dev, sgs, 1, DMA_TO_DEVICE);
430
+ dma_unmap_sg(ctx->dev->dev, sgd, 1, DMA_FROM_DEVICE);
431
+ }
432
+theend_iv:
374433 return err;
375434 }
376435
....@@ -378,26 +437,34 @@
378437 {
379438 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
380439 struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
440
+ const char *name = crypto_tfm_alg_name(&tfm->base);
381441 struct rk_crypto_tmp *algt;
382442
383443 algt = container_of(alg, struct rk_crypto_tmp, alg.skcipher);
384444
385445 ctx->dev = algt->dev;
386
- ctx->dev->align_size = crypto_tfm_alg_alignmask(crypto_skcipher_tfm(tfm)) + 1;
387
- ctx->dev->start = rk_ablk_start;
388
- ctx->dev->update = rk_ablk_rx;
389
- ctx->dev->complete = rk_crypto_complete;
390
- ctx->dev->addr_vir = (char *)__get_free_page(GFP_KERNEL);
391446
392
- return ctx->dev->addr_vir ? ctx->dev->enable_clk(ctx->dev) : -ENOMEM;
447
+ ctx->fallback_tfm = crypto_alloc_skcipher(name, 0, CRYPTO_ALG_NEED_FALLBACK);
448
+ if (IS_ERR(ctx->fallback_tfm)) {
449
+ dev_err(ctx->dev->dev, "ERROR: Cannot allocate fallback for %s %ld\n",
450
+ name, PTR_ERR(ctx->fallback_tfm));
451
+ return PTR_ERR(ctx->fallback_tfm);
452
+ }
453
+
454
+ tfm->reqsize = sizeof(struct rk_cipher_rctx) +
455
+ crypto_skcipher_reqsize(ctx->fallback_tfm);
456
+
457
+ ctx->enginectx.op.do_one_request = rk_cipher_run;
458
+
459
+ return 0;
393460 }
394461
395462 static void rk_ablk_exit_tfm(struct crypto_skcipher *tfm)
396463 {
397464 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
398465
399
- free_page((unsigned long)ctx->dev->addr_vir);
400
- ctx->dev->disable_clk(ctx->dev);
466
+ memzero_explicit(ctx->key, ctx->keylen);
467
+ crypto_free_skcipher(ctx->fallback_tfm);
401468 }
402469
403470 struct rk_crypto_tmp rk_ecb_aes_alg = {
....@@ -406,7 +473,7 @@
406473 .base.cra_name = "ecb(aes)",
407474 .base.cra_driver_name = "ecb-aes-rk",
408475 .base.cra_priority = 300,
409
- .base.cra_flags = CRYPTO_ALG_ASYNC,
476
+ .base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,
410477 .base.cra_blocksize = AES_BLOCK_SIZE,
411478 .base.cra_ctxsize = sizeof(struct rk_cipher_ctx),
412479 .base.cra_alignmask = 0x0f,
....@@ -428,7 +495,7 @@
428495 .base.cra_name = "cbc(aes)",
429496 .base.cra_driver_name = "cbc-aes-rk",
430497 .base.cra_priority = 300,
431
- .base.cra_flags = CRYPTO_ALG_ASYNC,
498
+ .base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,
432499 .base.cra_blocksize = AES_BLOCK_SIZE,
433500 .base.cra_ctxsize = sizeof(struct rk_cipher_ctx),
434501 .base.cra_alignmask = 0x0f,
....@@ -451,7 +518,7 @@
451518 .base.cra_name = "ecb(des)",
452519 .base.cra_driver_name = "ecb-des-rk",
453520 .base.cra_priority = 300,
454
- .base.cra_flags = CRYPTO_ALG_ASYNC,
521
+ .base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,
455522 .base.cra_blocksize = DES_BLOCK_SIZE,
456523 .base.cra_ctxsize = sizeof(struct rk_cipher_ctx),
457524 .base.cra_alignmask = 0x07,
....@@ -473,7 +540,7 @@
473540 .base.cra_name = "cbc(des)",
474541 .base.cra_driver_name = "cbc-des-rk",
475542 .base.cra_priority = 300,
476
- .base.cra_flags = CRYPTO_ALG_ASYNC,
543
+ .base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,
477544 .base.cra_blocksize = DES_BLOCK_SIZE,
478545 .base.cra_ctxsize = sizeof(struct rk_cipher_ctx),
479546 .base.cra_alignmask = 0x07,
....@@ -496,7 +563,7 @@
496563 .base.cra_name = "ecb(des3_ede)",
497564 .base.cra_driver_name = "ecb-des3-ede-rk",
498565 .base.cra_priority = 300,
499
- .base.cra_flags = CRYPTO_ALG_ASYNC,
566
+ .base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,
500567 .base.cra_blocksize = DES_BLOCK_SIZE,
501568 .base.cra_ctxsize = sizeof(struct rk_cipher_ctx),
502569 .base.cra_alignmask = 0x07,
....@@ -518,7 +585,7 @@
518585 .base.cra_name = "cbc(des3_ede)",
519586 .base.cra_driver_name = "cbc-des3-ede-rk",
520587 .base.cra_priority = 300,
521
- .base.cra_flags = CRYPTO_ALG_ASYNC,
588
+ .base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,
522589 .base.cra_blocksize = DES_BLOCK_SIZE,
523590 .base.cra_ctxsize = sizeof(struct rk_cipher_ctx),
524591 .base.cra_alignmask = 0x07,