| .. | .. |
|---|
| 1 | 1 | // SPDX-License-Identifier: GPL-2.0 |
|---|
| 2 | | -/* Copyright (C) 2012-2018 ARM Limited or its affiliates. */ |
|---|
| 2 | +/* Copyright (C) 2012-2019 ARM Limited (or its affiliates). */ |
|---|
| 3 | 3 | |
|---|
| 4 | 4 | #include <linux/kernel.h> |
|---|
| 5 | 5 | #include <linux/module.h> |
|---|
| 6 | 6 | #include <crypto/algapi.h> |
|---|
| 7 | 7 | #include <crypto/internal/skcipher.h> |
|---|
| 8 | | -#include <crypto/des.h> |
|---|
| 8 | +#include <crypto/internal/des.h> |
|---|
| 9 | 9 | #include <crypto/xts.h> |
|---|
| 10 | +#include <crypto/sm4.h> |
|---|
| 10 | 11 | #include <crypto/scatterwalk.h> |
|---|
| 11 | 12 | |
|---|
| 12 | 13 | #include "cc_driver.h" |
|---|
| .. | .. |
|---|
| 15 | 16 | #include "cc_cipher.h" |
|---|
| 16 | 17 | #include "cc_request_mgr.h" |
|---|
| 17 | 18 | |
|---|
| 18 | | -#define MAX_ABLKCIPHER_SEQ_LEN 6 |
|---|
| 19 | +#define MAX_SKCIPHER_SEQ_LEN 6 |
|---|
| 19 | 20 | |
|---|
| 20 | 21 | #define template_skcipher template_u.skcipher |
|---|
| 21 | | - |
|---|
| 22 | | -struct cc_cipher_handle { |
|---|
| 23 | | - struct list_head alg_list; |
|---|
| 24 | | -}; |
|---|
| 25 | 22 | |
|---|
| 26 | 23 | struct cc_user_key_info { |
|---|
| 27 | 24 | u8 *key; |
|---|
| .. | .. |
|---|
| 33 | 30 | enum cc_hw_crypto_key key2_slot; |
|---|
| 34 | 31 | }; |
|---|
| 35 | 32 | |
|---|
| 33 | +struct cc_cpp_key_info { |
|---|
| 34 | + u8 slot; |
|---|
| 35 | + enum cc_cpp_alg alg; |
|---|
| 36 | +}; |
|---|
| 37 | + |
|---|
| 38 | +enum cc_key_type { |
|---|
| 39 | + CC_UNPROTECTED_KEY, /* User key */ |
|---|
| 40 | + CC_HW_PROTECTED_KEY, /* HW (FDE) key */ |
|---|
| 41 | + CC_POLICY_PROTECTED_KEY, /* CPP key */ |
|---|
| 42 | + CC_INVALID_PROTECTED_KEY /* Invalid key */ |
|---|
| 43 | +}; |
|---|
| 44 | + |
|---|
| 36 | 45 | struct cc_cipher_ctx { |
|---|
| 37 | 46 | struct cc_drvdata *drvdata; |
|---|
| 38 | 47 | int keylen; |
|---|
| 39 | | - int key_round_number; |
|---|
| 40 | 48 | int cipher_mode; |
|---|
| 41 | 49 | int flow_mode; |
|---|
| 42 | 50 | unsigned int flags; |
|---|
| 43 | | - bool hw_key; |
|---|
| 51 | + enum cc_key_type key_type; |
|---|
| 44 | 52 | struct cc_user_key_info user; |
|---|
| 45 | | - struct cc_hw_key_info hw; |
|---|
| 53 | + union { |
|---|
| 54 | + struct cc_hw_key_info hw; |
|---|
| 55 | + struct cc_cpp_key_info cpp; |
|---|
| 56 | + }; |
|---|
| 46 | 57 | struct crypto_shash *shash_tfm; |
|---|
| 58 | + struct crypto_skcipher *fallback_tfm; |
|---|
| 59 | + bool fallback_on; |
|---|
| 47 | 60 | }; |
|---|
| 48 | 61 | |
|---|
| 49 | 62 | static void cc_cipher_complete(struct device *dev, void *cc_req, int err); |
|---|
| 50 | 63 | |
|---|
| 51 | | -static inline bool cc_is_hw_key(struct crypto_tfm *tfm) |
|---|
| 64 | +static inline enum cc_key_type cc_key_type(struct crypto_tfm *tfm) |
|---|
| 52 | 65 | { |
|---|
| 53 | 66 | struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); |
|---|
| 54 | 67 | |
|---|
| 55 | | - return ctx_p->hw_key; |
|---|
| 68 | + return ctx_p->key_type; |
|---|
| 56 | 69 | } |
|---|
| 57 | 70 | |
|---|
| 58 | 71 | static int validate_keys_sizes(struct cc_cipher_ctx *ctx_p, u32 size) |
|---|
| .. | .. |
|---|
| 62 | 75 | switch (size) { |
|---|
| 63 | 76 | case CC_AES_128_BIT_KEY_SIZE: |
|---|
| 64 | 77 | case CC_AES_192_BIT_KEY_SIZE: |
|---|
| 65 | | - if (ctx_p->cipher_mode != DRV_CIPHER_XTS && |
|---|
| 66 | | - ctx_p->cipher_mode != DRV_CIPHER_ESSIV && |
|---|
| 67 | | - ctx_p->cipher_mode != DRV_CIPHER_BITLOCKER) |
|---|
| 78 | + if (ctx_p->cipher_mode != DRV_CIPHER_XTS) |
|---|
| 68 | 79 | return 0; |
|---|
| 69 | 80 | break; |
|---|
| 70 | 81 | case CC_AES_256_BIT_KEY_SIZE: |
|---|
| .. | .. |
|---|
| 72 | 83 | case (CC_AES_192_BIT_KEY_SIZE * 2): |
|---|
| 73 | 84 | case (CC_AES_256_BIT_KEY_SIZE * 2): |
|---|
| 74 | 85 | if (ctx_p->cipher_mode == DRV_CIPHER_XTS || |
|---|
| 75 | | - ctx_p->cipher_mode == DRV_CIPHER_ESSIV || |
|---|
| 76 | | - ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER) |
|---|
| 86 | + ctx_p->cipher_mode == DRV_CIPHER_ESSIV) |
|---|
| 77 | 87 | return 0; |
|---|
| 78 | 88 | break; |
|---|
| 79 | 89 | default: |
|---|
| .. | .. |
|---|
| 84 | 94 | if (size == DES3_EDE_KEY_SIZE || size == DES_KEY_SIZE) |
|---|
| 85 | 95 | return 0; |
|---|
| 86 | 96 | break; |
|---|
| 97 | + case S_DIN_to_SM4: |
|---|
| 98 | + if (size == SM4_KEY_SIZE) |
|---|
| 99 | + return 0; |
|---|
| 87 | 100 | default: |
|---|
| 88 | 101 | break; |
|---|
| 89 | 102 | } |
|---|
| .. | .. |
|---|
| 97 | 110 | case S_DIN_to_AES: |
|---|
| 98 | 111 | switch (ctx_p->cipher_mode) { |
|---|
| 99 | 112 | case DRV_CIPHER_XTS: |
|---|
| 100 | | - if (size >= AES_BLOCK_SIZE && |
|---|
| 101 | | - IS_ALIGNED(size, AES_BLOCK_SIZE)) |
|---|
| 102 | | - return 0; |
|---|
| 103 | | - break; |
|---|
| 104 | 113 | case DRV_CIPHER_CBC_CTS: |
|---|
| 105 | 114 | if (size >= AES_BLOCK_SIZE) |
|---|
| 106 | 115 | return 0; |
|---|
| .. | .. |
|---|
| 111 | 120 | case DRV_CIPHER_ECB: |
|---|
| 112 | 121 | case DRV_CIPHER_CBC: |
|---|
| 113 | 122 | case DRV_CIPHER_ESSIV: |
|---|
| 114 | | - case DRV_CIPHER_BITLOCKER: |
|---|
| 115 | 123 | if (IS_ALIGNED(size, AES_BLOCK_SIZE)) |
|---|
| 116 | 124 | return 0; |
|---|
| 117 | 125 | break; |
|---|
| .. | .. |
|---|
| 123 | 131 | if (IS_ALIGNED(size, DES_BLOCK_SIZE)) |
|---|
| 124 | 132 | return 0; |
|---|
| 125 | 133 | break; |
|---|
| 134 | + case S_DIN_to_SM4: |
|---|
| 135 | + switch (ctx_p->cipher_mode) { |
|---|
| 136 | + case DRV_CIPHER_CTR: |
|---|
| 137 | + return 0; |
|---|
| 138 | + case DRV_CIPHER_ECB: |
|---|
| 139 | + case DRV_CIPHER_CBC: |
|---|
| 140 | + if (IS_ALIGNED(size, SM4_BLOCK_SIZE)) |
|---|
| 141 | + return 0; |
|---|
| 142 | + default: |
|---|
| 143 | + break; |
|---|
| 144 | + } |
|---|
| 126 | 145 | default: |
|---|
| 127 | 146 | break; |
|---|
| 128 | 147 | } |
|---|
| .. | .. |
|---|
| 137 | 156 | skcipher_alg.base); |
|---|
| 138 | 157 | struct device *dev = drvdata_to_dev(cc_alg->drvdata); |
|---|
| 139 | 158 | unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize; |
|---|
| 159 | + unsigned int fallback_req_size = 0; |
|---|
| 140 | 160 | |
|---|
| 141 | 161 | dev_dbg(dev, "Initializing context @%p for %s\n", ctx_p, |
|---|
| 142 | 162 | crypto_tfm_alg_name(tfm)); |
|---|
| 143 | | - |
|---|
| 144 | | - crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm), |
|---|
| 145 | | - sizeof(struct cipher_req_ctx)); |
|---|
| 146 | 163 | |
|---|
| 147 | 164 | ctx_p->cipher_mode = cc_alg->cipher_mode; |
|---|
| 148 | 165 | ctx_p->flow_mode = cc_alg->flow_mode; |
|---|
| 149 | 166 | ctx_p->drvdata = cc_alg->drvdata; |
|---|
| 150 | 167 | |
|---|
| 151 | 168 | if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) { |
|---|
| 169 | + const char *name = crypto_tfm_alg_name(tfm); |
|---|
| 170 | + |
|---|
| 152 | 171 | /* Alloc hash tfm for essiv */ |
|---|
| 153 | | - ctx_p->shash_tfm = crypto_alloc_shash("sha256-generic", 0, 0); |
|---|
| 172 | + ctx_p->shash_tfm = crypto_alloc_shash("sha256", 0, 0); |
|---|
| 154 | 173 | if (IS_ERR(ctx_p->shash_tfm)) { |
|---|
| 155 | 174 | dev_err(dev, "Error allocating hash tfm for ESSIV.\n"); |
|---|
| 156 | 175 | return PTR_ERR(ctx_p->shash_tfm); |
|---|
| 157 | 176 | } |
|---|
| 177 | + max_key_buf_size <<= 1; |
|---|
| 178 | + |
|---|
| 179 | + /* Alloc fallabck tfm or essiv when key size != 256 bit */ |
|---|
| 180 | + ctx_p->fallback_tfm = |
|---|
| 181 | + crypto_alloc_skcipher(name, 0, CRYPTO_ALG_NEED_FALLBACK | CRYPTO_ALG_ASYNC); |
|---|
| 182 | + |
|---|
| 183 | + if (IS_ERR(ctx_p->fallback_tfm)) { |
|---|
| 184 | + /* Note we're still allowing registration with no fallback since it's |
|---|
| 185 | + * better to have most modes supported than none at all. |
|---|
| 186 | + */ |
|---|
| 187 | + dev_warn(dev, "Error allocating fallback algo %s. Some modes may be available.\n", |
|---|
| 188 | + name); |
|---|
| 189 | + ctx_p->fallback_tfm = NULL; |
|---|
| 190 | + } else { |
|---|
| 191 | + fallback_req_size = crypto_skcipher_reqsize(ctx_p->fallback_tfm); |
|---|
| 192 | + } |
|---|
| 158 | 193 | } |
|---|
| 159 | 194 | |
|---|
| 195 | + crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm), |
|---|
| 196 | + sizeof(struct cipher_req_ctx) + fallback_req_size); |
|---|
| 197 | + |
|---|
| 160 | 198 | /* Allocate key buffer, cache line aligned */ |
|---|
| 161 | | - ctx_p->user.key = kmalloc(max_key_buf_size, GFP_KERNEL); |
|---|
| 199 | + ctx_p->user.key = kzalloc(max_key_buf_size, GFP_KERNEL); |
|---|
| 162 | 200 | if (!ctx_p->user.key) |
|---|
| 163 | | - goto free_shash; |
|---|
| 201 | + goto free_fallback; |
|---|
| 164 | 202 | |
|---|
| 165 | 203 | dev_dbg(dev, "Allocated key buffer in context. key=@%p\n", |
|---|
| 166 | 204 | ctx_p->user.key); |
|---|
| 167 | 205 | |
|---|
| 168 | 206 | /* Map key buffer */ |
|---|
| 169 | | - ctx_p->user.key_dma_addr = dma_map_single(dev, (void *)ctx_p->user.key, |
|---|
| 207 | + ctx_p->user.key_dma_addr = dma_map_single(dev, ctx_p->user.key, |
|---|
| 170 | 208 | max_key_buf_size, |
|---|
| 171 | 209 | DMA_TO_DEVICE); |
|---|
| 172 | 210 | if (dma_mapping_error(dev, ctx_p->user.key_dma_addr)) { |
|---|
| .. | .. |
|---|
| 181 | 219 | |
|---|
| 182 | 220 | free_key: |
|---|
| 183 | 221 | kfree(ctx_p->user.key); |
|---|
| 184 | | -free_shash: |
|---|
| 222 | +free_fallback: |
|---|
| 223 | + crypto_free_skcipher(ctx_p->fallback_tfm); |
|---|
| 185 | 224 | crypto_free_shash(ctx_p->shash_tfm); |
|---|
| 186 | 225 | |
|---|
| 187 | 226 | return -ENOMEM; |
|---|
| .. | .. |
|---|
| 204 | 243 | /* Free hash tfm for essiv */ |
|---|
| 205 | 244 | crypto_free_shash(ctx_p->shash_tfm); |
|---|
| 206 | 245 | ctx_p->shash_tfm = NULL; |
|---|
| 246 | + crypto_free_skcipher(ctx_p->fallback_tfm); |
|---|
| 247 | + ctx_p->fallback_tfm = NULL; |
|---|
| 207 | 248 | } |
|---|
| 208 | 249 | |
|---|
| 209 | 250 | /* Unmap key buffer */ |
|---|
| .. | .. |
|---|
| 213 | 254 | &ctx_p->user.key_dma_addr); |
|---|
| 214 | 255 | |
|---|
| 215 | 256 | /* Free key buffer in context */ |
|---|
| 216 | | - kzfree(ctx_p->user.key); |
|---|
| 217 | 257 | dev_dbg(dev, "Free key buffer in context. key=@%p\n", ctx_p->user.key); |
|---|
| 258 | + kfree_sensitive(ctx_p->user.key); |
|---|
| 218 | 259 | } |
|---|
| 219 | 260 | |
|---|
| 220 | 261 | struct tdes_keys { |
|---|
| .. | .. |
|---|
| 223 | 264 | u8 key3[DES_KEY_SIZE]; |
|---|
| 224 | 265 | }; |
|---|
| 225 | 266 | |
|---|
| 226 | | -static enum cc_hw_crypto_key cc_slot_to_hw_key(int slot_num) |
|---|
| 267 | +static enum cc_hw_crypto_key cc_slot_to_hw_key(u8 slot_num) |
|---|
| 227 | 268 | { |
|---|
| 228 | 269 | switch (slot_num) { |
|---|
| 229 | 270 | case 0: |
|---|
| .. | .. |
|---|
| 238 | 279 | return END_OF_KEYS; |
|---|
| 239 | 280 | } |
|---|
| 240 | 281 | |
|---|
| 282 | +static u8 cc_slot_to_cpp_key(u8 slot_num) |
|---|
| 283 | +{ |
|---|
| 284 | + return (slot_num - CC_FIRST_CPP_KEY_SLOT); |
|---|
| 285 | +} |
|---|
| 286 | + |
|---|
| 287 | +static inline enum cc_key_type cc_slot_to_key_type(u8 slot_num) |
|---|
| 288 | +{ |
|---|
| 289 | + if (slot_num >= CC_FIRST_HW_KEY_SLOT && slot_num <= CC_LAST_HW_KEY_SLOT) |
|---|
| 290 | + return CC_HW_PROTECTED_KEY; |
|---|
| 291 | + else if (slot_num >= CC_FIRST_CPP_KEY_SLOT && |
|---|
| 292 | + slot_num <= CC_LAST_CPP_KEY_SLOT) |
|---|
| 293 | + return CC_POLICY_PROTECTED_KEY; |
|---|
| 294 | + else |
|---|
| 295 | + return CC_INVALID_PROTECTED_KEY; |
|---|
| 296 | +} |
|---|
| 297 | + |
|---|
| 241 | 298 | static int cc_cipher_sethkey(struct crypto_skcipher *sktfm, const u8 *key, |
|---|
| 242 | 299 | unsigned int keylen) |
|---|
| 243 | 300 | { |
|---|
| .. | .. |
|---|
| 248 | 305 | |
|---|
| 249 | 306 | dev_dbg(dev, "Setting HW key in context @%p for %s. keylen=%u\n", |
|---|
| 250 | 307 | ctx_p, crypto_tfm_alg_name(tfm), keylen); |
|---|
| 251 | | - dump_byte_array("key", (u8 *)key, keylen); |
|---|
| 308 | + dump_byte_array("key", key, keylen); |
|---|
| 252 | 309 | |
|---|
| 253 | 310 | /* STAT_PHASE_0: Init and sanity checks */ |
|---|
| 254 | 311 | |
|---|
| 255 | | - /* This check the size of the hardware key token */ |
|---|
| 312 | + /* This check the size of the protected key token */ |
|---|
| 256 | 313 | if (keylen != sizeof(hki)) { |
|---|
| 257 | | - dev_err(dev, "Unsupported HW key size %d.\n", keylen); |
|---|
| 258 | | - crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN); |
|---|
| 259 | | - return -EINVAL; |
|---|
| 260 | | - } |
|---|
| 261 | | - |
|---|
| 262 | | - if (ctx_p->flow_mode != S_DIN_to_AES) { |
|---|
| 263 | | - dev_err(dev, "HW key not supported for non-AES flows\n"); |
|---|
| 314 | + dev_err(dev, "Unsupported protected key size %d.\n", keylen); |
|---|
| 264 | 315 | return -EINVAL; |
|---|
| 265 | 316 | } |
|---|
| 266 | 317 | |
|---|
| .. | .. |
|---|
| 272 | 323 | keylen = hki.keylen; |
|---|
| 273 | 324 | |
|---|
| 274 | 325 | if (validate_keys_sizes(ctx_p, keylen)) { |
|---|
| 275 | | - dev_err(dev, "Unsupported key size %d.\n", keylen); |
|---|
| 276 | | - crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN); |
|---|
| 326 | + dev_dbg(dev, "Unsupported key size %d.\n", keylen); |
|---|
| 277 | 327 | return -EINVAL; |
|---|
| 278 | | - } |
|---|
| 279 | | - |
|---|
| 280 | | - ctx_p->hw.key1_slot = cc_slot_to_hw_key(hki.hw_key1); |
|---|
| 281 | | - if (ctx_p->hw.key1_slot == END_OF_KEYS) { |
|---|
| 282 | | - dev_err(dev, "Unsupported hw key1 number (%d)\n", hki.hw_key1); |
|---|
| 283 | | - return -EINVAL; |
|---|
| 284 | | - } |
|---|
| 285 | | - |
|---|
| 286 | | - if (ctx_p->cipher_mode == DRV_CIPHER_XTS || |
|---|
| 287 | | - ctx_p->cipher_mode == DRV_CIPHER_ESSIV || |
|---|
| 288 | | - ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER) { |
|---|
| 289 | | - if (hki.hw_key1 == hki.hw_key2) { |
|---|
| 290 | | - dev_err(dev, "Illegal hw key numbers (%d,%d)\n", |
|---|
| 291 | | - hki.hw_key1, hki.hw_key2); |
|---|
| 292 | | - return -EINVAL; |
|---|
| 293 | | - } |
|---|
| 294 | | - ctx_p->hw.key2_slot = cc_slot_to_hw_key(hki.hw_key2); |
|---|
| 295 | | - if (ctx_p->hw.key2_slot == END_OF_KEYS) { |
|---|
| 296 | | - dev_err(dev, "Unsupported hw key2 number (%d)\n", |
|---|
| 297 | | - hki.hw_key2); |
|---|
| 298 | | - return -EINVAL; |
|---|
| 299 | | - } |
|---|
| 300 | 328 | } |
|---|
| 301 | 329 | |
|---|
| 302 | 330 | ctx_p->keylen = keylen; |
|---|
| 303 | | - ctx_p->hw_key = true; |
|---|
| 304 | | - dev_dbg(dev, "cc_is_hw_key ret 0"); |
|---|
| 331 | + ctx_p->fallback_on = false; |
|---|
| 332 | + |
|---|
| 333 | + switch (cc_slot_to_key_type(hki.hw_key1)) { |
|---|
| 334 | + case CC_HW_PROTECTED_KEY: |
|---|
| 335 | + if (ctx_p->flow_mode == S_DIN_to_SM4) { |
|---|
| 336 | + dev_err(dev, "Only AES HW protected keys are supported\n"); |
|---|
| 337 | + return -EINVAL; |
|---|
| 338 | + } |
|---|
| 339 | + |
|---|
| 340 | + ctx_p->hw.key1_slot = cc_slot_to_hw_key(hki.hw_key1); |
|---|
| 341 | + if (ctx_p->hw.key1_slot == END_OF_KEYS) { |
|---|
| 342 | + dev_err(dev, "Unsupported hw key1 number (%d)\n", |
|---|
| 343 | + hki.hw_key1); |
|---|
| 344 | + return -EINVAL; |
|---|
| 345 | + } |
|---|
| 346 | + |
|---|
| 347 | + if (ctx_p->cipher_mode == DRV_CIPHER_XTS || |
|---|
| 348 | + ctx_p->cipher_mode == DRV_CIPHER_ESSIV) { |
|---|
| 349 | + if (hki.hw_key1 == hki.hw_key2) { |
|---|
| 350 | + dev_err(dev, "Illegal hw key numbers (%d,%d)\n", |
|---|
| 351 | + hki.hw_key1, hki.hw_key2); |
|---|
| 352 | + return -EINVAL; |
|---|
| 353 | + } |
|---|
| 354 | + |
|---|
| 355 | + ctx_p->hw.key2_slot = cc_slot_to_hw_key(hki.hw_key2); |
|---|
| 356 | + if (ctx_p->hw.key2_slot == END_OF_KEYS) { |
|---|
| 357 | + dev_err(dev, "Unsupported hw key2 number (%d)\n", |
|---|
| 358 | + hki.hw_key2); |
|---|
| 359 | + return -EINVAL; |
|---|
| 360 | + } |
|---|
| 361 | + } |
|---|
| 362 | + |
|---|
| 363 | + ctx_p->key_type = CC_HW_PROTECTED_KEY; |
|---|
| 364 | + dev_dbg(dev, "HW protected key %d/%d set\n.", |
|---|
| 365 | + ctx_p->hw.key1_slot, ctx_p->hw.key2_slot); |
|---|
| 366 | + break; |
|---|
| 367 | + |
|---|
| 368 | + case CC_POLICY_PROTECTED_KEY: |
|---|
| 369 | + if (ctx_p->drvdata->hw_rev < CC_HW_REV_713) { |
|---|
| 370 | + dev_err(dev, "CPP keys not supported in this hardware revision.\n"); |
|---|
| 371 | + return -EINVAL; |
|---|
| 372 | + } |
|---|
| 373 | + |
|---|
| 374 | + if (ctx_p->cipher_mode != DRV_CIPHER_CBC && |
|---|
| 375 | + ctx_p->cipher_mode != DRV_CIPHER_CTR) { |
|---|
| 376 | + dev_err(dev, "CPP keys only supported in CBC or CTR modes.\n"); |
|---|
| 377 | + return -EINVAL; |
|---|
| 378 | + } |
|---|
| 379 | + |
|---|
| 380 | + ctx_p->cpp.slot = cc_slot_to_cpp_key(hki.hw_key1); |
|---|
| 381 | + if (ctx_p->flow_mode == S_DIN_to_AES) |
|---|
| 382 | + ctx_p->cpp.alg = CC_CPP_AES; |
|---|
| 383 | + else /* Must be SM4 since due to sethkey registration */ |
|---|
| 384 | + ctx_p->cpp.alg = CC_CPP_SM4; |
|---|
| 385 | + ctx_p->key_type = CC_POLICY_PROTECTED_KEY; |
|---|
| 386 | + dev_dbg(dev, "policy protected key alg: %d slot: %d.\n", |
|---|
| 387 | + ctx_p->cpp.alg, ctx_p->cpp.slot); |
|---|
| 388 | + break; |
|---|
| 389 | + |
|---|
| 390 | + default: |
|---|
| 391 | + dev_err(dev, "Unsupported protected key (%d)\n", hki.hw_key1); |
|---|
| 392 | + return -EINVAL; |
|---|
| 393 | + } |
|---|
| 305 | 394 | |
|---|
| 306 | 395 | return 0; |
|---|
| 307 | 396 | } |
|---|
| .. | .. |
|---|
| 319 | 408 | |
|---|
| 320 | 409 | dev_dbg(dev, "Setting key in context @%p for %s. keylen=%u\n", |
|---|
| 321 | 410 | ctx_p, crypto_tfm_alg_name(tfm), keylen); |
|---|
| 322 | | - dump_byte_array("key", (u8 *)key, keylen); |
|---|
| 411 | + dump_byte_array("key", key, keylen); |
|---|
| 323 | 412 | |
|---|
| 324 | 413 | /* STAT_PHASE_0: Init and sanity checks */ |
|---|
| 325 | 414 | |
|---|
| 326 | 415 | if (validate_keys_sizes(ctx_p, keylen)) { |
|---|
| 327 | | - dev_err(dev, "Unsupported key size %d.\n", keylen); |
|---|
| 328 | | - crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN); |
|---|
| 416 | + dev_dbg(dev, "Invalid key size %d.\n", keylen); |
|---|
| 329 | 417 | return -EINVAL; |
|---|
| 330 | 418 | } |
|---|
| 331 | 419 | |
|---|
| 332 | | - ctx_p->hw_key = false; |
|---|
| 420 | + if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) { |
|---|
| 421 | + |
|---|
| 422 | + /* We only support 256 bit ESSIV-CBC-AES keys */ |
|---|
| 423 | + if (keylen != AES_KEYSIZE_256) { |
|---|
| 424 | + unsigned int flags = crypto_tfm_get_flags(tfm) & CRYPTO_TFM_REQ_MASK; |
|---|
| 425 | + |
|---|
| 426 | + if (likely(ctx_p->fallback_tfm)) { |
|---|
| 427 | + ctx_p->fallback_on = true; |
|---|
| 428 | + crypto_skcipher_clear_flags(ctx_p->fallback_tfm, |
|---|
| 429 | + CRYPTO_TFM_REQ_MASK); |
|---|
| 430 | + crypto_skcipher_clear_flags(ctx_p->fallback_tfm, flags); |
|---|
| 431 | + return crypto_skcipher_setkey(ctx_p->fallback_tfm, key, keylen); |
|---|
| 432 | + } |
|---|
| 433 | + |
|---|
| 434 | + dev_dbg(dev, "Unsupported key size %d and no fallback.\n", keylen); |
|---|
| 435 | + return -EINVAL; |
|---|
| 436 | + } |
|---|
| 437 | + |
|---|
| 438 | + /* Internal ESSIV key buffer is double sized */ |
|---|
| 439 | + max_key_buf_size <<= 1; |
|---|
| 440 | + } |
|---|
| 441 | + |
|---|
| 442 | + ctx_p->fallback_on = false; |
|---|
| 443 | + ctx_p->key_type = CC_UNPROTECTED_KEY; |
|---|
| 333 | 444 | |
|---|
| 334 | 445 | /* |
|---|
| 335 | 446 | * Verify DES weak keys |
|---|
| .. | .. |
|---|
| 337 | 448 | * HW does the expansion on its own. |
|---|
| 338 | 449 | */ |
|---|
| 339 | 450 | if (ctx_p->flow_mode == S_DIN_to_DES) { |
|---|
| 340 | | - u32 tmp[DES3_EDE_EXPKEY_WORDS]; |
|---|
| 341 | | - if (keylen == DES3_EDE_KEY_SIZE && |
|---|
| 342 | | - __des3_ede_setkey(tmp, &tfm->crt_flags, key, |
|---|
| 343 | | - DES3_EDE_KEY_SIZE)) { |
|---|
| 344 | | - dev_dbg(dev, "weak 3DES key"); |
|---|
| 345 | | - return -EINVAL; |
|---|
| 346 | | - } else if (!des_ekey(tmp, key) && |
|---|
| 347 | | - (crypto_tfm_get_flags(tfm) & CRYPTO_TFM_REQ_WEAK_KEY)) { |
|---|
| 348 | | - tfm->crt_flags |= CRYPTO_TFM_RES_WEAK_KEY; |
|---|
| 451 | + if ((keylen == DES3_EDE_KEY_SIZE && |
|---|
| 452 | + verify_skcipher_des3_key(sktfm, key)) || |
|---|
| 453 | + verify_skcipher_des_key(sktfm, key)) { |
|---|
| 349 | 454 | dev_dbg(dev, "weak DES key"); |
|---|
| 350 | 455 | return -EINVAL; |
|---|
| 351 | 456 | } |
|---|
| .. | .. |
|---|
| 362 | 467 | max_key_buf_size, DMA_TO_DEVICE); |
|---|
| 363 | 468 | |
|---|
| 364 | 469 | memcpy(ctx_p->user.key, key, keylen); |
|---|
| 365 | | - if (keylen == 24) |
|---|
| 366 | | - memset(ctx_p->user.key + 24, 0, CC_AES_KEY_SIZE_MAX - 24); |
|---|
| 367 | 470 | |
|---|
| 368 | 471 | if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) { |
|---|
| 369 | 472 | /* sha256 for key2 - use sw implementation */ |
|---|
| 370 | | - int key_len = keylen >> 1; |
|---|
| 371 | 473 | int err; |
|---|
| 372 | 474 | |
|---|
| 373 | | - SHASH_DESC_ON_STACK(desc, ctx_p->shash_tfm); |
|---|
| 374 | | - |
|---|
| 375 | | - desc->tfm = ctx_p->shash_tfm; |
|---|
| 376 | | - |
|---|
| 377 | | - err = crypto_shash_digest(desc, ctx_p->user.key, key_len, |
|---|
| 378 | | - ctx_p->user.key + key_len); |
|---|
| 475 | + err = crypto_shash_tfm_digest(ctx_p->shash_tfm, |
|---|
| 476 | + ctx_p->user.key, keylen, |
|---|
| 477 | + ctx_p->user.key + keylen); |
|---|
| 379 | 478 | if (err) { |
|---|
| 380 | 479 | dev_err(dev, "Failed to hash ESSIV key.\n"); |
|---|
| 381 | 480 | return err; |
|---|
| 382 | 481 | } |
|---|
| 482 | + |
|---|
| 483 | + keylen <<= 1; |
|---|
| 383 | 484 | } |
|---|
| 384 | 485 | dma_sync_single_for_device(dev, ctx_p->user.key_dma_addr, |
|---|
| 385 | 486 | max_key_buf_size, DMA_TO_DEVICE); |
|---|
| .. | .. |
|---|
| 389 | 490 | return 0; |
|---|
| 390 | 491 | } |
|---|
| 391 | 492 | |
|---|
| 392 | | -static void cc_setup_cipher_desc(struct crypto_tfm *tfm, |
|---|
| 493 | +static int cc_out_setup_mode(struct cc_cipher_ctx *ctx_p) |
|---|
| 494 | +{ |
|---|
| 495 | + switch (ctx_p->flow_mode) { |
|---|
| 496 | + case S_DIN_to_AES: |
|---|
| 497 | + return S_AES_to_DOUT; |
|---|
| 498 | + case S_DIN_to_DES: |
|---|
| 499 | + return S_DES_to_DOUT; |
|---|
| 500 | + case S_DIN_to_SM4: |
|---|
| 501 | + return S_SM4_to_DOUT; |
|---|
| 502 | + default: |
|---|
| 503 | + return ctx_p->flow_mode; |
|---|
| 504 | + } |
|---|
| 505 | +} |
|---|
| 506 | + |
|---|
| 507 | +static void cc_setup_readiv_desc(struct crypto_tfm *tfm, |
|---|
| 508 | + struct cipher_req_ctx *req_ctx, |
|---|
| 509 | + unsigned int ivsize, struct cc_hw_desc desc[], |
|---|
| 510 | + unsigned int *seq_size) |
|---|
| 511 | +{ |
|---|
| 512 | + struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); |
|---|
| 513 | + struct device *dev = drvdata_to_dev(ctx_p->drvdata); |
|---|
| 514 | + int cipher_mode = ctx_p->cipher_mode; |
|---|
| 515 | + int flow_mode = cc_out_setup_mode(ctx_p); |
|---|
| 516 | + int direction = req_ctx->gen_ctx.op_type; |
|---|
| 517 | + dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr; |
|---|
| 518 | + |
|---|
| 519 | + if (ctx_p->key_type == CC_POLICY_PROTECTED_KEY) |
|---|
| 520 | + return; |
|---|
| 521 | + |
|---|
| 522 | + switch (cipher_mode) { |
|---|
| 523 | + case DRV_CIPHER_ECB: |
|---|
| 524 | + break; |
|---|
| 525 | + case DRV_CIPHER_CBC: |
|---|
| 526 | + case DRV_CIPHER_CBC_CTS: |
|---|
| 527 | + case DRV_CIPHER_CTR: |
|---|
| 528 | + case DRV_CIPHER_OFB: |
|---|
| 529 | + /* Read next IV */ |
|---|
| 530 | + hw_desc_init(&desc[*seq_size]); |
|---|
| 531 | + set_dout_dlli(&desc[*seq_size], iv_dma_addr, ivsize, NS_BIT, 1); |
|---|
| 532 | + set_cipher_config0(&desc[*seq_size], direction); |
|---|
| 533 | + set_flow_mode(&desc[*seq_size], flow_mode); |
|---|
| 534 | + set_cipher_mode(&desc[*seq_size], cipher_mode); |
|---|
| 535 | + if (cipher_mode == DRV_CIPHER_CTR || |
|---|
| 536 | + cipher_mode == DRV_CIPHER_OFB) { |
|---|
| 537 | + set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE1); |
|---|
| 538 | + } else { |
|---|
| 539 | + set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE0); |
|---|
| 540 | + } |
|---|
| 541 | + set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]); |
|---|
| 542 | + (*seq_size)++; |
|---|
| 543 | + break; |
|---|
| 544 | + case DRV_CIPHER_XTS: |
|---|
| 545 | + case DRV_CIPHER_ESSIV: |
|---|
| 546 | + /* IV */ |
|---|
| 547 | + hw_desc_init(&desc[*seq_size]); |
|---|
| 548 | + set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE1); |
|---|
| 549 | + set_cipher_mode(&desc[*seq_size], cipher_mode); |
|---|
| 550 | + set_cipher_config0(&desc[*seq_size], direction); |
|---|
| 551 | + set_flow_mode(&desc[*seq_size], flow_mode); |
|---|
| 552 | + set_dout_dlli(&desc[*seq_size], iv_dma_addr, CC_AES_BLOCK_SIZE, |
|---|
| 553 | + NS_BIT, 1); |
|---|
| 554 | + set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]); |
|---|
| 555 | + (*seq_size)++; |
|---|
| 556 | + break; |
|---|
| 557 | + default: |
|---|
| 558 | + dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode); |
|---|
| 559 | + } |
|---|
| 560 | +} |
|---|
| 561 | + |
|---|
| 562 | + |
|---|
| 563 | +static void cc_setup_state_desc(struct crypto_tfm *tfm, |
|---|
| 393 | 564 | struct cipher_req_ctx *req_ctx, |
|---|
| 394 | 565 | unsigned int ivsize, unsigned int nbytes, |
|---|
| 395 | 566 | struct cc_hw_desc desc[], |
|---|
| .. | .. |
|---|
| 400 | 571 | int cipher_mode = ctx_p->cipher_mode; |
|---|
| 401 | 572 | int flow_mode = ctx_p->flow_mode; |
|---|
| 402 | 573 | int direction = req_ctx->gen_ctx.op_type; |
|---|
| 403 | | - dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr; |
|---|
| 404 | | - unsigned int key_len = ctx_p->keylen; |
|---|
| 405 | 574 | dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr; |
|---|
| 406 | | - unsigned int du_size = nbytes; |
|---|
| 407 | | - |
|---|
| 408 | | - struct cc_crypto_alg *cc_alg = |
|---|
| 409 | | - container_of(tfm->__crt_alg, struct cc_crypto_alg, |
|---|
| 410 | | - skcipher_alg.base); |
|---|
| 411 | | - |
|---|
| 412 | | - if (cc_alg->data_unit) |
|---|
| 413 | | - du_size = cc_alg->data_unit; |
|---|
| 414 | 575 | |
|---|
| 415 | 576 | switch (cipher_mode) { |
|---|
| 577 | + case DRV_CIPHER_ECB: |
|---|
| 578 | + break; |
|---|
| 416 | 579 | case DRV_CIPHER_CBC: |
|---|
| 417 | 580 | case DRV_CIPHER_CBC_CTS: |
|---|
| 418 | 581 | case DRV_CIPHER_CTR: |
|---|
| 419 | 582 | case DRV_CIPHER_OFB: |
|---|
| 420 | | - /* Load cipher state */ |
|---|
| 583 | + /* Load IV */ |
|---|
| 421 | 584 | hw_desc_init(&desc[*seq_size]); |
|---|
| 422 | 585 | set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr, ivsize, |
|---|
| 423 | 586 | NS_BIT); |
|---|
| .. | .. |
|---|
| 431 | 594 | set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE0); |
|---|
| 432 | 595 | } |
|---|
| 433 | 596 | (*seq_size)++; |
|---|
| 434 | | - /*FALLTHROUGH*/ |
|---|
| 435 | | - case DRV_CIPHER_ECB: |
|---|
| 436 | | - /* Load key */ |
|---|
| 437 | | - hw_desc_init(&desc[*seq_size]); |
|---|
| 438 | | - set_cipher_mode(&desc[*seq_size], cipher_mode); |
|---|
| 439 | | - set_cipher_config0(&desc[*seq_size], direction); |
|---|
| 440 | | - if (flow_mode == S_DIN_to_AES) { |
|---|
| 441 | | - if (cc_is_hw_key(tfm)) { |
|---|
| 442 | | - set_hw_crypto_key(&desc[*seq_size], |
|---|
| 443 | | - ctx_p->hw.key1_slot); |
|---|
| 444 | | - } else { |
|---|
| 445 | | - set_din_type(&desc[*seq_size], DMA_DLLI, |
|---|
| 446 | | - key_dma_addr, ((key_len == 24) ? |
|---|
| 447 | | - AES_MAX_KEY_SIZE : |
|---|
| 448 | | - key_len), NS_BIT); |
|---|
| 449 | | - } |
|---|
| 450 | | - set_key_size_aes(&desc[*seq_size], key_len); |
|---|
| 451 | | - } else { |
|---|
| 452 | | - /*des*/ |
|---|
| 453 | | - set_din_type(&desc[*seq_size], DMA_DLLI, key_dma_addr, |
|---|
| 454 | | - key_len, NS_BIT); |
|---|
| 455 | | - set_key_size_des(&desc[*seq_size], key_len); |
|---|
| 456 | | - } |
|---|
| 457 | | - set_flow_mode(&desc[*seq_size], flow_mode); |
|---|
| 458 | | - set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0); |
|---|
| 459 | | - (*seq_size)++; |
|---|
| 460 | 597 | break; |
|---|
| 461 | 598 | case DRV_CIPHER_XTS: |
|---|
| 462 | 599 | case DRV_CIPHER_ESSIV: |
|---|
| 463 | | - case DRV_CIPHER_BITLOCKER: |
|---|
| 464 | | - /* Load AES key */ |
|---|
| 465 | | - hw_desc_init(&desc[*seq_size]); |
|---|
| 466 | | - set_cipher_mode(&desc[*seq_size], cipher_mode); |
|---|
| 467 | | - set_cipher_config0(&desc[*seq_size], direction); |
|---|
| 468 | | - if (cc_is_hw_key(tfm)) { |
|---|
| 469 | | - set_hw_crypto_key(&desc[*seq_size], |
|---|
| 470 | | - ctx_p->hw.key1_slot); |
|---|
| 471 | | - } else { |
|---|
| 472 | | - set_din_type(&desc[*seq_size], DMA_DLLI, key_dma_addr, |
|---|
| 473 | | - (key_len / 2), NS_BIT); |
|---|
| 474 | | - } |
|---|
| 475 | | - set_key_size_aes(&desc[*seq_size], (key_len / 2)); |
|---|
| 476 | | - set_flow_mode(&desc[*seq_size], flow_mode); |
|---|
| 477 | | - set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0); |
|---|
| 478 | | - (*seq_size)++; |
|---|
| 600 | + break; |
|---|
| 601 | + default: |
|---|
| 602 | + dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode); |
|---|
| 603 | + } |
|---|
| 604 | +} |
|---|
| 605 | + |
|---|
| 606 | + |
|---|
| 607 | +static void cc_setup_xex_state_desc(struct crypto_tfm *tfm, |
|---|
| 608 | + struct cipher_req_ctx *req_ctx, |
|---|
| 609 | + unsigned int ivsize, unsigned int nbytes, |
|---|
| 610 | + struct cc_hw_desc desc[], |
|---|
| 611 | + unsigned int *seq_size) |
|---|
| 612 | +{ |
|---|
| 613 | + struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); |
|---|
| 614 | + struct device *dev = drvdata_to_dev(ctx_p->drvdata); |
|---|
| 615 | + int cipher_mode = ctx_p->cipher_mode; |
|---|
| 616 | + int flow_mode = ctx_p->flow_mode; |
|---|
| 617 | + int direction = req_ctx->gen_ctx.op_type; |
|---|
| 618 | + dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr; |
|---|
| 619 | + unsigned int key_len = (ctx_p->keylen / 2); |
|---|
| 620 | + dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr; |
|---|
| 621 | + unsigned int key_offset = key_len; |
|---|
| 622 | + |
|---|
| 623 | + switch (cipher_mode) { |
|---|
| 624 | + case DRV_CIPHER_ECB: |
|---|
| 625 | + break; |
|---|
| 626 | + case DRV_CIPHER_CBC: |
|---|
| 627 | + case DRV_CIPHER_CBC_CTS: |
|---|
| 628 | + case DRV_CIPHER_CTR: |
|---|
| 629 | + case DRV_CIPHER_OFB: |
|---|
| 630 | + break; |
|---|
| 631 | + case DRV_CIPHER_XTS: |
|---|
| 632 | + case DRV_CIPHER_ESSIV: |
|---|
| 633 | + |
|---|
| 634 | + if (cipher_mode == DRV_CIPHER_ESSIV) |
|---|
| 635 | + key_len = SHA256_DIGEST_SIZE; |
|---|
| 479 | 636 | |
|---|
| 480 | 637 | /* load XEX key */ |
|---|
| 481 | 638 | hw_desc_init(&desc[*seq_size]); |
|---|
| 482 | 639 | set_cipher_mode(&desc[*seq_size], cipher_mode); |
|---|
| 483 | 640 | set_cipher_config0(&desc[*seq_size], direction); |
|---|
| 484 | | - if (cc_is_hw_key(tfm)) { |
|---|
| 641 | + if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) { |
|---|
| 485 | 642 | set_hw_crypto_key(&desc[*seq_size], |
|---|
| 486 | 643 | ctx_p->hw.key2_slot); |
|---|
| 487 | 644 | } else { |
|---|
| 488 | 645 | set_din_type(&desc[*seq_size], DMA_DLLI, |
|---|
| 489 | | - (key_dma_addr + (key_len / 2)), |
|---|
| 490 | | - (key_len / 2), NS_BIT); |
|---|
| 646 | + (key_dma_addr + key_offset), |
|---|
| 647 | + key_len, NS_BIT); |
|---|
| 491 | 648 | } |
|---|
| 492 | | - set_xex_data_unit_size(&desc[*seq_size], du_size); |
|---|
| 649 | + set_xex_data_unit_size(&desc[*seq_size], nbytes); |
|---|
| 493 | 650 | set_flow_mode(&desc[*seq_size], S_DIN_to_AES2); |
|---|
| 494 | | - set_key_size_aes(&desc[*seq_size], (key_len / 2)); |
|---|
| 651 | + set_key_size_aes(&desc[*seq_size], key_len); |
|---|
| 495 | 652 | set_setup_mode(&desc[*seq_size], SETUP_LOAD_XEX_KEY); |
|---|
| 496 | 653 | (*seq_size)++; |
|---|
| 497 | 654 | |
|---|
| 498 | | - /* Set state */ |
|---|
| 655 | + /* Load IV */ |
|---|
| 499 | 656 | hw_desc_init(&desc[*seq_size]); |
|---|
| 500 | 657 | set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1); |
|---|
| 501 | 658 | set_cipher_mode(&desc[*seq_size], cipher_mode); |
|---|
| 502 | 659 | set_cipher_config0(&desc[*seq_size], direction); |
|---|
| 503 | | - set_key_size_aes(&desc[*seq_size], (key_len / 2)); |
|---|
| 660 | + set_key_size_aes(&desc[*seq_size], key_len); |
|---|
| 504 | 661 | set_flow_mode(&desc[*seq_size], flow_mode); |
|---|
| 505 | 662 | set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr, |
|---|
| 506 | 663 | CC_AES_BLOCK_SIZE, NS_BIT); |
|---|
| .. | .. |
|---|
| 511 | 668 | } |
|---|
| 512 | 669 | } |
|---|
| 513 | 670 | |
|---|
| 514 | | -static void cc_setup_cipher_data(struct crypto_tfm *tfm, |
|---|
| 515 | | - struct cipher_req_ctx *req_ctx, |
|---|
| 516 | | - struct scatterlist *dst, |
|---|
| 517 | | - struct scatterlist *src, unsigned int nbytes, |
|---|
| 518 | | - void *areq, struct cc_hw_desc desc[], |
|---|
| 519 | | - unsigned int *seq_size) |
|---|
| 671 | +static int cc_out_flow_mode(struct cc_cipher_ctx *ctx_p) |
|---|
| 672 | +{ |
|---|
| 673 | + switch (ctx_p->flow_mode) { |
|---|
| 674 | + case S_DIN_to_AES: |
|---|
| 675 | + return DIN_AES_DOUT; |
|---|
| 676 | + case S_DIN_to_DES: |
|---|
| 677 | + return DIN_DES_DOUT; |
|---|
| 678 | + case S_DIN_to_SM4: |
|---|
| 679 | + return DIN_SM4_DOUT; |
|---|
| 680 | + default: |
|---|
| 681 | + return ctx_p->flow_mode; |
|---|
| 682 | + } |
|---|
| 683 | +} |
|---|
| 684 | + |
|---|
| 685 | +static void cc_setup_key_desc(struct crypto_tfm *tfm, |
|---|
| 686 | + struct cipher_req_ctx *req_ctx, |
|---|
| 687 | + unsigned int nbytes, struct cc_hw_desc desc[], |
|---|
| 688 | + unsigned int *seq_size) |
|---|
| 520 | 689 | { |
|---|
| 521 | 690 | struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); |
|---|
| 522 | 691 | struct device *dev = drvdata_to_dev(ctx_p->drvdata); |
|---|
| 523 | | - unsigned int flow_mode = ctx_p->flow_mode; |
|---|
| 692 | + int cipher_mode = ctx_p->cipher_mode; |
|---|
| 693 | + int flow_mode = ctx_p->flow_mode; |
|---|
| 694 | + int direction = req_ctx->gen_ctx.op_type; |
|---|
| 695 | + dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr; |
|---|
| 696 | + unsigned int key_len = ctx_p->keylen; |
|---|
| 697 | + unsigned int din_size; |
|---|
| 524 | 698 | |
|---|
| 525 | | - switch (ctx_p->flow_mode) { |
|---|
| 526 | | - case S_DIN_to_AES: |
|---|
| 527 | | - flow_mode = DIN_AES_DOUT; |
|---|
| 699 | + switch (cipher_mode) { |
|---|
| 700 | + case DRV_CIPHER_CBC: |
|---|
| 701 | + case DRV_CIPHER_CBC_CTS: |
|---|
| 702 | + case DRV_CIPHER_CTR: |
|---|
| 703 | + case DRV_CIPHER_OFB: |
|---|
| 704 | + case DRV_CIPHER_ECB: |
|---|
| 705 | + /* Load key */ |
|---|
| 706 | + hw_desc_init(&desc[*seq_size]); |
|---|
| 707 | + set_cipher_mode(&desc[*seq_size], cipher_mode); |
|---|
| 708 | + set_cipher_config0(&desc[*seq_size], direction); |
|---|
| 709 | + |
|---|
| 710 | + if (cc_key_type(tfm) == CC_POLICY_PROTECTED_KEY) { |
|---|
| 711 | + /* We use the AES key size coding for all CPP algs */ |
|---|
| 712 | + set_key_size_aes(&desc[*seq_size], key_len); |
|---|
| 713 | + set_cpp_crypto_key(&desc[*seq_size], ctx_p->cpp.slot); |
|---|
| 714 | + flow_mode = cc_out_flow_mode(ctx_p); |
|---|
| 715 | + } else { |
|---|
| 716 | + if (flow_mode == S_DIN_to_AES) { |
|---|
| 717 | + if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) { |
|---|
| 718 | + set_hw_crypto_key(&desc[*seq_size], |
|---|
| 719 | + ctx_p->hw.key1_slot); |
|---|
| 720 | + } else { |
|---|
| 721 | + /* CC_POLICY_UNPROTECTED_KEY |
|---|
| 722 | + * Invalid keys are filtered out in |
|---|
| 723 | + * sethkey() |
|---|
| 724 | + */ |
|---|
| 725 | + din_size = (key_len == 24) ? |
|---|
| 726 | + AES_MAX_KEY_SIZE : key_len; |
|---|
| 727 | + |
|---|
| 728 | + set_din_type(&desc[*seq_size], DMA_DLLI, |
|---|
| 729 | + key_dma_addr, din_size, |
|---|
| 730 | + NS_BIT); |
|---|
| 731 | + } |
|---|
| 732 | + set_key_size_aes(&desc[*seq_size], key_len); |
|---|
| 733 | + } else { |
|---|
| 734 | + /*des*/ |
|---|
| 735 | + set_din_type(&desc[*seq_size], DMA_DLLI, |
|---|
| 736 | + key_dma_addr, key_len, NS_BIT); |
|---|
| 737 | + set_key_size_des(&desc[*seq_size], key_len); |
|---|
| 738 | + } |
|---|
| 739 | + set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0); |
|---|
| 740 | + } |
|---|
| 741 | + set_flow_mode(&desc[*seq_size], flow_mode); |
|---|
| 742 | + (*seq_size)++; |
|---|
| 528 | 743 | break; |
|---|
| 529 | | - case S_DIN_to_DES: |
|---|
| 530 | | - flow_mode = DIN_DES_DOUT; |
|---|
| 744 | + case DRV_CIPHER_XTS: |
|---|
| 745 | + case DRV_CIPHER_ESSIV: |
|---|
| 746 | + /* Load AES key */ |
|---|
| 747 | + hw_desc_init(&desc[*seq_size]); |
|---|
| 748 | + set_cipher_mode(&desc[*seq_size], cipher_mode); |
|---|
| 749 | + set_cipher_config0(&desc[*seq_size], direction); |
|---|
| 750 | + if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) { |
|---|
| 751 | + set_hw_crypto_key(&desc[*seq_size], |
|---|
| 752 | + ctx_p->hw.key1_slot); |
|---|
| 753 | + } else { |
|---|
| 754 | + set_din_type(&desc[*seq_size], DMA_DLLI, key_dma_addr, |
|---|
| 755 | + (key_len / 2), NS_BIT); |
|---|
| 756 | + } |
|---|
| 757 | + set_key_size_aes(&desc[*seq_size], (key_len / 2)); |
|---|
| 758 | + set_flow_mode(&desc[*seq_size], flow_mode); |
|---|
| 759 | + set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0); |
|---|
| 760 | + (*seq_size)++; |
|---|
| 531 | 761 | break; |
|---|
| 532 | 762 | default: |
|---|
| 533 | | - dev_err(dev, "invalid flow mode, flow_mode = %d\n", flow_mode); |
|---|
| 534 | | - return; |
|---|
| 763 | + dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode); |
|---|
| 535 | 764 | } |
|---|
| 765 | +} |
|---|
| 766 | + |
|---|
| 767 | +static void cc_setup_mlli_desc(struct crypto_tfm *tfm, |
|---|
| 768 | + struct cipher_req_ctx *req_ctx, |
|---|
| 769 | + struct scatterlist *dst, struct scatterlist *src, |
|---|
| 770 | + unsigned int nbytes, void *areq, |
|---|
| 771 | + struct cc_hw_desc desc[], unsigned int *seq_size) |
|---|
| 772 | +{ |
|---|
| 773 | + struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); |
|---|
| 774 | + struct device *dev = drvdata_to_dev(ctx_p->drvdata); |
|---|
| 775 | + |
|---|
| 776 | + if (req_ctx->dma_buf_type == CC_DMA_BUF_MLLI) { |
|---|
| 777 | + /* bypass */ |
|---|
| 778 | + dev_dbg(dev, " bypass params addr %pad length 0x%X addr 0x%08X\n", |
|---|
| 779 | + &req_ctx->mlli_params.mlli_dma_addr, |
|---|
| 780 | + req_ctx->mlli_params.mlli_len, |
|---|
| 781 | + ctx_p->drvdata->mlli_sram_addr); |
|---|
| 782 | + hw_desc_init(&desc[*seq_size]); |
|---|
| 783 | + set_din_type(&desc[*seq_size], DMA_DLLI, |
|---|
| 784 | + req_ctx->mlli_params.mlli_dma_addr, |
|---|
| 785 | + req_ctx->mlli_params.mlli_len, NS_BIT); |
|---|
| 786 | + set_dout_sram(&desc[*seq_size], |
|---|
| 787 | + ctx_p->drvdata->mlli_sram_addr, |
|---|
| 788 | + req_ctx->mlli_params.mlli_len); |
|---|
| 789 | + set_flow_mode(&desc[*seq_size], BYPASS); |
|---|
| 790 | + (*seq_size)++; |
|---|
| 791 | + } |
|---|
| 792 | +} |
|---|
| 793 | + |
|---|
| 794 | +static void cc_setup_flow_desc(struct crypto_tfm *tfm, |
|---|
| 795 | + struct cipher_req_ctx *req_ctx, |
|---|
| 796 | + struct scatterlist *dst, struct scatterlist *src, |
|---|
| 797 | + unsigned int nbytes, struct cc_hw_desc desc[], |
|---|
| 798 | + unsigned int *seq_size) |
|---|
| 799 | +{ |
|---|
| 800 | + struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); |
|---|
| 801 | + struct device *dev = drvdata_to_dev(ctx_p->drvdata); |
|---|
| 802 | + unsigned int flow_mode = cc_out_flow_mode(ctx_p); |
|---|
| 803 | + bool last_desc = (ctx_p->key_type == CC_POLICY_PROTECTED_KEY || |
|---|
| 804 | + ctx_p->cipher_mode == DRV_CIPHER_ECB); |
|---|
| 805 | + |
|---|
| 536 | 806 | /* Process */ |
|---|
| 537 | 807 | if (req_ctx->dma_buf_type == CC_DMA_BUF_DLLI) { |
|---|
| 538 | 808 | dev_dbg(dev, " data params addr %pad length 0x%X\n", |
|---|
| .. | .. |
|---|
| 543 | 813 | set_din_type(&desc[*seq_size], DMA_DLLI, sg_dma_address(src), |
|---|
| 544 | 814 | nbytes, NS_BIT); |
|---|
| 545 | 815 | set_dout_dlli(&desc[*seq_size], sg_dma_address(dst), |
|---|
| 546 | | - nbytes, NS_BIT, (!areq ? 0 : 1)); |
|---|
| 547 | | - if (areq) |
|---|
| 816 | + nbytes, NS_BIT, (!last_desc ? 0 : 1)); |
|---|
| 817 | + if (last_desc) |
|---|
| 548 | 818 | set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]); |
|---|
| 549 | 819 | |
|---|
| 550 | 820 | set_flow_mode(&desc[*seq_size], flow_mode); |
|---|
| 551 | 821 | (*seq_size)++; |
|---|
| 552 | 822 | } else { |
|---|
| 553 | | - /* bypass */ |
|---|
| 554 | | - dev_dbg(dev, " bypass params addr %pad length 0x%X addr 0x%08X\n", |
|---|
| 555 | | - &req_ctx->mlli_params.mlli_dma_addr, |
|---|
| 556 | | - req_ctx->mlli_params.mlli_len, |
|---|
| 557 | | - (unsigned int)ctx_p->drvdata->mlli_sram_addr); |
|---|
| 558 | | - hw_desc_init(&desc[*seq_size]); |
|---|
| 559 | | - set_din_type(&desc[*seq_size], DMA_DLLI, |
|---|
| 560 | | - req_ctx->mlli_params.mlli_dma_addr, |
|---|
| 561 | | - req_ctx->mlli_params.mlli_len, NS_BIT); |
|---|
| 562 | | - set_dout_sram(&desc[*seq_size], |
|---|
| 563 | | - ctx_p->drvdata->mlli_sram_addr, |
|---|
| 564 | | - req_ctx->mlli_params.mlli_len); |
|---|
| 565 | | - set_flow_mode(&desc[*seq_size], BYPASS); |
|---|
| 566 | | - (*seq_size)++; |
|---|
| 567 | | - |
|---|
| 568 | 823 | hw_desc_init(&desc[*seq_size]); |
|---|
| 569 | 824 | set_din_type(&desc[*seq_size], DMA_MLLI, |
|---|
| 570 | 825 | ctx_p->drvdata->mlli_sram_addr, |
|---|
| 571 | 826 | req_ctx->in_mlli_nents, NS_BIT); |
|---|
| 572 | 827 | if (req_ctx->out_nents == 0) { |
|---|
| 573 | 828 | dev_dbg(dev, " din/dout params addr 0x%08X addr 0x%08X\n", |
|---|
| 574 | | - (unsigned int)ctx_p->drvdata->mlli_sram_addr, |
|---|
| 575 | | - (unsigned int)ctx_p->drvdata->mlli_sram_addr); |
|---|
| 829 | + ctx_p->drvdata->mlli_sram_addr, |
|---|
| 830 | + ctx_p->drvdata->mlli_sram_addr); |
|---|
| 576 | 831 | set_dout_mlli(&desc[*seq_size], |
|---|
| 577 | 832 | ctx_p->drvdata->mlli_sram_addr, |
|---|
| 578 | 833 | req_ctx->in_mlli_nents, NS_BIT, |
|---|
| 579 | | - (!areq ? 0 : 1)); |
|---|
| 834 | + (!last_desc ? 0 : 1)); |
|---|
| 580 | 835 | } else { |
|---|
| 581 | 836 | dev_dbg(dev, " din/dout params addr 0x%08X addr 0x%08X\n", |
|---|
| 582 | | - (unsigned int)ctx_p->drvdata->mlli_sram_addr, |
|---|
| 583 | | - (unsigned int)ctx_p->drvdata->mlli_sram_addr + |
|---|
| 837 | + ctx_p->drvdata->mlli_sram_addr, |
|---|
| 838 | + ctx_p->drvdata->mlli_sram_addr + |
|---|
| 584 | 839 | (u32)LLI_ENTRY_BYTE_SIZE * req_ctx->in_nents); |
|---|
| 585 | 840 | set_dout_mlli(&desc[*seq_size], |
|---|
| 586 | 841 | (ctx_p->drvdata->mlli_sram_addr + |
|---|
| 587 | 842 | (LLI_ENTRY_BYTE_SIZE * |
|---|
| 588 | 843 | req_ctx->in_mlli_nents)), |
|---|
| 589 | 844 | req_ctx->out_mlli_nents, NS_BIT, |
|---|
| 590 | | - (!areq ? 0 : 1)); |
|---|
| 845 | + (!last_desc ? 0 : 1)); |
|---|
| 591 | 846 | } |
|---|
| 592 | | - if (areq) |
|---|
| 847 | + if (last_desc) |
|---|
| 593 | 848 | set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]); |
|---|
| 594 | 849 | |
|---|
| 595 | 850 | set_flow_mode(&desc[*seq_size], flow_mode); |
|---|
| 596 | 851 | (*seq_size)++; |
|---|
| 597 | | - } |
|---|
| 598 | | -} |
|---|
| 599 | | - |
|---|
| 600 | | -/* |
|---|
| 601 | | - * Update a CTR-AES 128 bit counter |
|---|
| 602 | | - */ |
|---|
| 603 | | -static void cc_update_ctr(u8 *ctr, unsigned int increment) |
|---|
| 604 | | -{ |
|---|
| 605 | | - if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) || |
|---|
| 606 | | - IS_ALIGNED((unsigned long)ctr, 8)) { |
|---|
| 607 | | - |
|---|
| 608 | | - __be64 *high_be = (__be64 *)ctr; |
|---|
| 609 | | - __be64 *low_be = high_be + 1; |
|---|
| 610 | | - u64 orig_low = __be64_to_cpu(*low_be); |
|---|
| 611 | | - u64 new_low = orig_low + (u64)increment; |
|---|
| 612 | | - |
|---|
| 613 | | - *low_be = __cpu_to_be64(new_low); |
|---|
| 614 | | - |
|---|
| 615 | | - if (new_low < orig_low) |
|---|
| 616 | | - *high_be = __cpu_to_be64(__be64_to_cpu(*high_be) + 1); |
|---|
| 617 | | - } else { |
|---|
| 618 | | - u8 *pos = (ctr + AES_BLOCK_SIZE); |
|---|
| 619 | | - u8 val; |
|---|
| 620 | | - unsigned int size; |
|---|
| 621 | | - |
|---|
| 622 | | - for (; increment; increment--) |
|---|
| 623 | | - for (size = AES_BLOCK_SIZE; size; size--) { |
|---|
| 624 | | - val = *--pos + 1; |
|---|
| 625 | | - *pos = val; |
|---|
| 626 | | - if (val) |
|---|
| 627 | | - break; |
|---|
| 628 | | - } |
|---|
| 629 | 852 | } |
|---|
| 630 | 853 | } |
|---|
| 631 | 854 | |
|---|
| .. | .. |
|---|
| 636 | 859 | struct scatterlist *src = req->src; |
|---|
| 637 | 860 | struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req); |
|---|
| 638 | 861 | struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req); |
|---|
| 639 | | - struct crypto_tfm *tfm = crypto_skcipher_tfm(sk_tfm); |
|---|
| 640 | | - struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); |
|---|
| 641 | 862 | unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm); |
|---|
| 642 | | - unsigned int len; |
|---|
| 643 | 863 | |
|---|
| 644 | | - cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst); |
|---|
| 645 | | - |
|---|
| 646 | | - switch (ctx_p->cipher_mode) { |
|---|
| 647 | | - case DRV_CIPHER_CBC: |
|---|
| 648 | | - /* |
|---|
| 649 | | - * The crypto API expects us to set the req->iv to the last |
|---|
| 650 | | - * ciphertext block. For encrypt, simply copy from the result. |
|---|
| 651 | | - * For decrypt, we must copy from a saved buffer since this |
|---|
| 652 | | - * could be an in-place decryption operation and the src is |
|---|
| 653 | | - * lost by this point. |
|---|
| 654 | | - */ |
|---|
| 655 | | - if (req_ctx->gen_ctx.op_type == DRV_CRYPTO_DIRECTION_DECRYPT) { |
|---|
| 656 | | - memcpy(req->iv, req_ctx->backup_info, ivsize); |
|---|
| 657 | | - kzfree(req_ctx->backup_info); |
|---|
| 658 | | - } else if (!err) { |
|---|
| 659 | | - len = req->cryptlen - ivsize; |
|---|
| 660 | | - scatterwalk_map_and_copy(req->iv, req->dst, len, |
|---|
| 661 | | - ivsize, 0); |
|---|
| 662 | | - } |
|---|
| 663 | | - break; |
|---|
| 664 | | - |
|---|
| 665 | | - case DRV_CIPHER_CTR: |
|---|
| 666 | | - /* Compute the counter of the last block */ |
|---|
| 667 | | - len = ALIGN(req->cryptlen, AES_BLOCK_SIZE) / AES_BLOCK_SIZE; |
|---|
| 668 | | - cc_update_ctr((u8 *)req->iv, len); |
|---|
| 669 | | - break; |
|---|
| 670 | | - |
|---|
| 671 | | - default: |
|---|
| 672 | | - break; |
|---|
| 864 | + if (err != -EINPROGRESS) { |
|---|
| 865 | + /* Not a BACKLOG notification */ |
|---|
| 866 | + cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst); |
|---|
| 867 | + memcpy(req->iv, req_ctx->iv, ivsize); |
|---|
| 868 | + kfree_sensitive(req_ctx->iv); |
|---|
| 673 | 869 | } |
|---|
| 674 | | - |
|---|
| 675 | | - kzfree(req_ctx->iv); |
|---|
| 676 | 870 | |
|---|
| 677 | 871 | skcipher_request_complete(req, err); |
|---|
| 678 | 872 | } |
|---|
| .. | .. |
|---|
| 690 | 884 | void *iv = req->iv; |
|---|
| 691 | 885 | struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); |
|---|
| 692 | 886 | struct device *dev = drvdata_to_dev(ctx_p->drvdata); |
|---|
| 693 | | - struct cc_hw_desc desc[MAX_ABLKCIPHER_SEQ_LEN]; |
|---|
| 887 | + struct cc_hw_desc desc[MAX_SKCIPHER_SEQ_LEN]; |
|---|
| 694 | 888 | struct cc_crypto_req cc_req = {}; |
|---|
| 695 | 889 | int rc; |
|---|
| 696 | 890 | unsigned int seq_len = 0; |
|---|
| .. | .. |
|---|
| 702 | 896 | |
|---|
| 703 | 897 | /* STAT_PHASE_0: Init and sanity checks */ |
|---|
| 704 | 898 | |
|---|
| 705 | | - /* TODO: check data length according to mode */ |
|---|
| 706 | 899 | if (validate_data_size(ctx_p, nbytes)) { |
|---|
| 707 | | - dev_err(dev, "Unsupported data size %d.\n", nbytes); |
|---|
| 708 | | - crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_BLOCK_LEN); |
|---|
| 900 | + dev_dbg(dev, "Unsupported data size %d.\n", nbytes); |
|---|
| 709 | 901 | rc = -EINVAL; |
|---|
| 710 | 902 | goto exit_process; |
|---|
| 711 | 903 | } |
|---|
| .. | .. |
|---|
| 713 | 905 | /* No data to process is valid */ |
|---|
| 714 | 906 | rc = 0; |
|---|
| 715 | 907 | goto exit_process; |
|---|
| 908 | + } |
|---|
| 909 | + |
|---|
| 910 | + if (ctx_p->fallback_on) { |
|---|
| 911 | + struct skcipher_request *subreq = skcipher_request_ctx(req); |
|---|
| 912 | + |
|---|
| 913 | + *subreq = *req; |
|---|
| 914 | + skcipher_request_set_tfm(subreq, ctx_p->fallback_tfm); |
|---|
| 915 | + if (direction == DRV_CRYPTO_DIRECTION_ENCRYPT) |
|---|
| 916 | + return crypto_skcipher_encrypt(subreq); |
|---|
| 917 | + else |
|---|
| 918 | + return crypto_skcipher_decrypt(subreq); |
|---|
| 716 | 919 | } |
|---|
| 717 | 920 | |
|---|
| 718 | 921 | /* The IV we are handed may be allocted from the stack so |
|---|
| .. | .. |
|---|
| 725 | 928 | } |
|---|
| 726 | 929 | |
|---|
| 727 | 930 | /* Setup request structure */ |
|---|
| 728 | | - cc_req.user_cb = (void *)cc_cipher_complete; |
|---|
| 729 | | - cc_req.user_arg = (void *)req; |
|---|
| 931 | + cc_req.user_cb = cc_cipher_complete; |
|---|
| 932 | + cc_req.user_arg = req; |
|---|
| 933 | + |
|---|
| 934 | + /* Setup CPP operation details */ |
|---|
| 935 | + if (ctx_p->key_type == CC_POLICY_PROTECTED_KEY) { |
|---|
| 936 | + cc_req.cpp.is_cpp = true; |
|---|
| 937 | + cc_req.cpp.alg = ctx_p->cpp.alg; |
|---|
| 938 | + cc_req.cpp.slot = ctx_p->cpp.slot; |
|---|
| 939 | + } |
|---|
| 730 | 940 | |
|---|
| 731 | 941 | /* Setup request context */ |
|---|
| 732 | 942 | req_ctx->gen_ctx.op_type = direction; |
|---|
| .. | .. |
|---|
| 742 | 952 | |
|---|
| 743 | 953 | /* STAT_PHASE_2: Create sequence */ |
|---|
| 744 | 954 | |
|---|
| 745 | | - /* Setup processing */ |
|---|
| 746 | | - cc_setup_cipher_desc(tfm, req_ctx, ivsize, nbytes, desc, &seq_len); |
|---|
| 955 | + /* Setup state (IV) */ |
|---|
| 956 | + cc_setup_state_desc(tfm, req_ctx, ivsize, nbytes, desc, &seq_len); |
|---|
| 957 | + /* Setup MLLI line, if needed */ |
|---|
| 958 | + cc_setup_mlli_desc(tfm, req_ctx, dst, src, nbytes, req, desc, &seq_len); |
|---|
| 959 | + /* Setup key */ |
|---|
| 960 | + cc_setup_key_desc(tfm, req_ctx, nbytes, desc, &seq_len); |
|---|
| 961 | + /* Setup state (IV and XEX key) */ |
|---|
| 962 | + cc_setup_xex_state_desc(tfm, req_ctx, ivsize, nbytes, desc, &seq_len); |
|---|
| 747 | 963 | /* Data processing */ |
|---|
| 748 | | - cc_setup_cipher_data(tfm, req_ctx, dst, src, nbytes, req, desc, |
|---|
| 749 | | - &seq_len); |
|---|
| 964 | + cc_setup_flow_desc(tfm, req_ctx, dst, src, nbytes, desc, &seq_len); |
|---|
| 965 | + /* Read next IV */ |
|---|
| 966 | + cc_setup_readiv_desc(tfm, req_ctx, ivsize, desc, &seq_len); |
|---|
| 750 | 967 | |
|---|
| 751 | 968 | /* STAT_PHASE_3: Lock HW and push sequence */ |
|---|
| 752 | 969 | |
|---|
| .. | .. |
|---|
| 761 | 978 | |
|---|
| 762 | 979 | exit_process: |
|---|
| 763 | 980 | if (rc != -EINPROGRESS && rc != -EBUSY) { |
|---|
| 764 | | - kzfree(req_ctx->backup_info); |
|---|
| 765 | | - kzfree(req_ctx->iv); |
|---|
| 981 | + kfree_sensitive(req_ctx->iv); |
|---|
| 766 | 982 | } |
|---|
| 767 | 983 | |
|---|
| 768 | 984 | return rc; |
|---|
| .. | .. |
|---|
| 779 | 995 | |
|---|
| 780 | 996 | static int cc_cipher_decrypt(struct skcipher_request *req) |
|---|
| 781 | 997 | { |
|---|
| 782 | | - struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req); |
|---|
| 783 | | - struct crypto_tfm *tfm = crypto_skcipher_tfm(sk_tfm); |
|---|
| 784 | | - struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); |
|---|
| 785 | 998 | struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req); |
|---|
| 786 | | - unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm); |
|---|
| 787 | | - gfp_t flags = cc_gfp_flags(&req->base); |
|---|
| 788 | | - unsigned int len; |
|---|
| 789 | 999 | |
|---|
| 790 | 1000 | memset(req_ctx, 0, sizeof(*req_ctx)); |
|---|
| 791 | | - |
|---|
| 792 | | - if ((ctx_p->cipher_mode == DRV_CIPHER_CBC) && |
|---|
| 793 | | - (req->cryptlen >= ivsize)) { |
|---|
| 794 | | - |
|---|
| 795 | | - /* Allocate and save the last IV sized bytes of the source, |
|---|
| 796 | | - * which will be lost in case of in-place decryption. |
|---|
| 797 | | - */ |
|---|
| 798 | | - req_ctx->backup_info = kzalloc(ivsize, flags); |
|---|
| 799 | | - if (!req_ctx->backup_info) |
|---|
| 800 | | - return -ENOMEM; |
|---|
| 801 | | - |
|---|
| 802 | | - len = req->cryptlen - ivsize; |
|---|
| 803 | | - scatterwalk_map_and_copy(req_ctx->backup_info, req->src, len, |
|---|
| 804 | | - ivsize, 0); |
|---|
| 805 | | - } |
|---|
| 806 | 1001 | |
|---|
| 807 | 1002 | return cc_cipher_process(req, DRV_CRYPTO_DIRECTION_DECRYPT); |
|---|
| 808 | 1003 | } |
|---|
| .. | .. |
|---|
| 812 | 1007 | { |
|---|
| 813 | 1008 | .name = "xts(paes)", |
|---|
| 814 | 1009 | .driver_name = "xts-paes-ccree", |
|---|
| 815 | | - .blocksize = AES_BLOCK_SIZE, |
|---|
| 1010 | + .blocksize = 1, |
|---|
| 816 | 1011 | .template_skcipher = { |
|---|
| 817 | 1012 | .setkey = cc_cipher_sethkey, |
|---|
| 818 | 1013 | .encrypt = cc_cipher_encrypt, |
|---|
| .. | .. |
|---|
| 824 | 1019 | .cipher_mode = DRV_CIPHER_XTS, |
|---|
| 825 | 1020 | .flow_mode = S_DIN_to_AES, |
|---|
| 826 | 1021 | .min_hw_rev = CC_HW_REV_630, |
|---|
| 1022 | + .std_body = CC_STD_NIST, |
|---|
| 1023 | + .sec_func = true, |
|---|
| 827 | 1024 | }, |
|---|
| 828 | 1025 | { |
|---|
| 829 | | - .name = "xts512(paes)", |
|---|
| 830 | | - .driver_name = "xts-paes-du512-ccree", |
|---|
| 831 | | - .blocksize = AES_BLOCK_SIZE, |
|---|
| 832 | | - .template_skcipher = { |
|---|
| 833 | | - .setkey = cc_cipher_sethkey, |
|---|
| 834 | | - .encrypt = cc_cipher_encrypt, |
|---|
| 835 | | - .decrypt = cc_cipher_decrypt, |
|---|
| 836 | | - .min_keysize = CC_HW_KEY_SIZE, |
|---|
| 837 | | - .max_keysize = CC_HW_KEY_SIZE, |
|---|
| 838 | | - .ivsize = AES_BLOCK_SIZE, |
|---|
| 839 | | - }, |
|---|
| 840 | | - .cipher_mode = DRV_CIPHER_XTS, |
|---|
| 841 | | - .flow_mode = S_DIN_to_AES, |
|---|
| 842 | | - .data_unit = 512, |
|---|
| 843 | | - .min_hw_rev = CC_HW_REV_712, |
|---|
| 844 | | - }, |
|---|
| 845 | | - { |
|---|
| 846 | | - .name = "xts4096(paes)", |
|---|
| 847 | | - .driver_name = "xts-paes-du4096-ccree", |
|---|
| 848 | | - .blocksize = AES_BLOCK_SIZE, |
|---|
| 849 | | - .template_skcipher = { |
|---|
| 850 | | - .setkey = cc_cipher_sethkey, |
|---|
| 851 | | - .encrypt = cc_cipher_encrypt, |
|---|
| 852 | | - .decrypt = cc_cipher_decrypt, |
|---|
| 853 | | - .min_keysize = CC_HW_KEY_SIZE, |
|---|
| 854 | | - .max_keysize = CC_HW_KEY_SIZE, |
|---|
| 855 | | - .ivsize = AES_BLOCK_SIZE, |
|---|
| 856 | | - }, |
|---|
| 857 | | - .cipher_mode = DRV_CIPHER_XTS, |
|---|
| 858 | | - .flow_mode = S_DIN_to_AES, |
|---|
| 859 | | - .data_unit = 4096, |
|---|
| 860 | | - .min_hw_rev = CC_HW_REV_712, |
|---|
| 861 | | - }, |
|---|
| 862 | | - { |
|---|
| 863 | | - .name = "essiv(paes)", |
|---|
| 1026 | + .name = "essiv(cbc(paes),sha256)", |
|---|
| 864 | 1027 | .driver_name = "essiv-paes-ccree", |
|---|
| 865 | 1028 | .blocksize = AES_BLOCK_SIZE, |
|---|
| 866 | 1029 | .template_skcipher = { |
|---|
| .. | .. |
|---|
| 874 | 1037 | .cipher_mode = DRV_CIPHER_ESSIV, |
|---|
| 875 | 1038 | .flow_mode = S_DIN_to_AES, |
|---|
| 876 | 1039 | .min_hw_rev = CC_HW_REV_712, |
|---|
| 877 | | - }, |
|---|
| 878 | | - { |
|---|
| 879 | | - .name = "essiv512(paes)", |
|---|
| 880 | | - .driver_name = "essiv-paes-du512-ccree", |
|---|
| 881 | | - .blocksize = AES_BLOCK_SIZE, |
|---|
| 882 | | - .template_skcipher = { |
|---|
| 883 | | - .setkey = cc_cipher_sethkey, |
|---|
| 884 | | - .encrypt = cc_cipher_encrypt, |
|---|
| 885 | | - .decrypt = cc_cipher_decrypt, |
|---|
| 886 | | - .min_keysize = CC_HW_KEY_SIZE, |
|---|
| 887 | | - .max_keysize = CC_HW_KEY_SIZE, |
|---|
| 888 | | - .ivsize = AES_BLOCK_SIZE, |
|---|
| 889 | | - }, |
|---|
| 890 | | - .cipher_mode = DRV_CIPHER_ESSIV, |
|---|
| 891 | | - .flow_mode = S_DIN_to_AES, |
|---|
| 892 | | - .data_unit = 512, |
|---|
| 893 | | - .min_hw_rev = CC_HW_REV_712, |
|---|
| 894 | | - }, |
|---|
| 895 | | - { |
|---|
| 896 | | - .name = "essiv4096(paes)", |
|---|
| 897 | | - .driver_name = "essiv-paes-du4096-ccree", |
|---|
| 898 | | - .blocksize = AES_BLOCK_SIZE, |
|---|
| 899 | | - .template_skcipher = { |
|---|
| 900 | | - .setkey = cc_cipher_sethkey, |
|---|
| 901 | | - .encrypt = cc_cipher_encrypt, |
|---|
| 902 | | - .decrypt = cc_cipher_decrypt, |
|---|
| 903 | | - .min_keysize = CC_HW_KEY_SIZE, |
|---|
| 904 | | - .max_keysize = CC_HW_KEY_SIZE, |
|---|
| 905 | | - .ivsize = AES_BLOCK_SIZE, |
|---|
| 906 | | - }, |
|---|
| 907 | | - .cipher_mode = DRV_CIPHER_ESSIV, |
|---|
| 908 | | - .flow_mode = S_DIN_to_AES, |
|---|
| 909 | | - .data_unit = 4096, |
|---|
| 910 | | - .min_hw_rev = CC_HW_REV_712, |
|---|
| 911 | | - }, |
|---|
| 912 | | - { |
|---|
| 913 | | - .name = "bitlocker(paes)", |
|---|
| 914 | | - .driver_name = "bitlocker-paes-ccree", |
|---|
| 915 | | - .blocksize = AES_BLOCK_SIZE, |
|---|
| 916 | | - .template_skcipher = { |
|---|
| 917 | | - .setkey = cc_cipher_sethkey, |
|---|
| 918 | | - .encrypt = cc_cipher_encrypt, |
|---|
| 919 | | - .decrypt = cc_cipher_decrypt, |
|---|
| 920 | | - .min_keysize = CC_HW_KEY_SIZE, |
|---|
| 921 | | - .max_keysize = CC_HW_KEY_SIZE, |
|---|
| 922 | | - .ivsize = AES_BLOCK_SIZE, |
|---|
| 923 | | - }, |
|---|
| 924 | | - .cipher_mode = DRV_CIPHER_BITLOCKER, |
|---|
| 925 | | - .flow_mode = S_DIN_to_AES, |
|---|
| 926 | | - .min_hw_rev = CC_HW_REV_712, |
|---|
| 927 | | - }, |
|---|
| 928 | | - { |
|---|
| 929 | | - .name = "bitlocker512(paes)", |
|---|
| 930 | | - .driver_name = "bitlocker-paes-du512-ccree", |
|---|
| 931 | | - .blocksize = AES_BLOCK_SIZE, |
|---|
| 932 | | - .template_skcipher = { |
|---|
| 933 | | - .setkey = cc_cipher_sethkey, |
|---|
| 934 | | - .encrypt = cc_cipher_encrypt, |
|---|
| 935 | | - .decrypt = cc_cipher_decrypt, |
|---|
| 936 | | - .min_keysize = CC_HW_KEY_SIZE, |
|---|
| 937 | | - .max_keysize = CC_HW_KEY_SIZE, |
|---|
| 938 | | - .ivsize = AES_BLOCK_SIZE, |
|---|
| 939 | | - }, |
|---|
| 940 | | - .cipher_mode = DRV_CIPHER_BITLOCKER, |
|---|
| 941 | | - .flow_mode = S_DIN_to_AES, |
|---|
| 942 | | - .data_unit = 512, |
|---|
| 943 | | - .min_hw_rev = CC_HW_REV_712, |
|---|
| 944 | | - }, |
|---|
| 945 | | - { |
|---|
| 946 | | - .name = "bitlocker4096(paes)", |
|---|
| 947 | | - .driver_name = "bitlocker-paes-du4096-ccree", |
|---|
| 948 | | - .blocksize = AES_BLOCK_SIZE, |
|---|
| 949 | | - .template_skcipher = { |
|---|
| 950 | | - .setkey = cc_cipher_sethkey, |
|---|
| 951 | | - .encrypt = cc_cipher_encrypt, |
|---|
| 952 | | - .decrypt = cc_cipher_decrypt, |
|---|
| 953 | | - .min_keysize = CC_HW_KEY_SIZE, |
|---|
| 954 | | - .max_keysize = CC_HW_KEY_SIZE, |
|---|
| 955 | | - .ivsize = AES_BLOCK_SIZE, |
|---|
| 956 | | - }, |
|---|
| 957 | | - .cipher_mode = DRV_CIPHER_BITLOCKER, |
|---|
| 958 | | - .flow_mode = S_DIN_to_AES, |
|---|
| 959 | | - .data_unit = 4096, |
|---|
| 960 | | - .min_hw_rev = CC_HW_REV_712, |
|---|
| 1040 | + .std_body = CC_STD_NIST, |
|---|
| 1041 | + .sec_func = true, |
|---|
| 961 | 1042 | }, |
|---|
| 962 | 1043 | { |
|---|
| 963 | 1044 | .name = "ecb(paes)", |
|---|
| .. | .. |
|---|
| 974 | 1055 | .cipher_mode = DRV_CIPHER_ECB, |
|---|
| 975 | 1056 | .flow_mode = S_DIN_to_AES, |
|---|
| 976 | 1057 | .min_hw_rev = CC_HW_REV_712, |
|---|
| 1058 | + .std_body = CC_STD_NIST, |
|---|
| 1059 | + .sec_func = true, |
|---|
| 977 | 1060 | }, |
|---|
| 978 | 1061 | { |
|---|
| 979 | 1062 | .name = "cbc(paes)", |
|---|
| .. | .. |
|---|
| 990 | 1073 | .cipher_mode = DRV_CIPHER_CBC, |
|---|
| 991 | 1074 | .flow_mode = S_DIN_to_AES, |
|---|
| 992 | 1075 | .min_hw_rev = CC_HW_REV_712, |
|---|
| 1076 | + .std_body = CC_STD_NIST, |
|---|
| 1077 | + .sec_func = true, |
|---|
| 993 | 1078 | }, |
|---|
| 994 | 1079 | { |
|---|
| 995 | 1080 | .name = "ofb(paes)", |
|---|
| .. | .. |
|---|
| 1006 | 1091 | .cipher_mode = DRV_CIPHER_OFB, |
|---|
| 1007 | 1092 | .flow_mode = S_DIN_to_AES, |
|---|
| 1008 | 1093 | .min_hw_rev = CC_HW_REV_712, |
|---|
| 1094 | + .std_body = CC_STD_NIST, |
|---|
| 1095 | + .sec_func = true, |
|---|
| 1009 | 1096 | }, |
|---|
| 1010 | 1097 | { |
|---|
| 1011 | 1098 | .name = "cts(cbc(paes))", |
|---|
| .. | .. |
|---|
| 1022 | 1109 | .cipher_mode = DRV_CIPHER_CBC_CTS, |
|---|
| 1023 | 1110 | .flow_mode = S_DIN_to_AES, |
|---|
| 1024 | 1111 | .min_hw_rev = CC_HW_REV_712, |
|---|
| 1112 | + .std_body = CC_STD_NIST, |
|---|
| 1113 | + .sec_func = true, |
|---|
| 1025 | 1114 | }, |
|---|
| 1026 | 1115 | { |
|---|
| 1027 | 1116 | .name = "ctr(paes)", |
|---|
| .. | .. |
|---|
| 1038 | 1127 | .cipher_mode = DRV_CIPHER_CTR, |
|---|
| 1039 | 1128 | .flow_mode = S_DIN_to_AES, |
|---|
| 1040 | 1129 | .min_hw_rev = CC_HW_REV_712, |
|---|
| 1130 | + .std_body = CC_STD_NIST, |
|---|
| 1131 | + .sec_func = true, |
|---|
| 1041 | 1132 | }, |
|---|
| 1042 | 1133 | { |
|---|
| 1134 | + /* See https://www.mail-archive.com/linux-crypto@vger.kernel.org/msg40576.html |
|---|
| 1135 | + * for the reason why this differs from the generic |
|---|
| 1136 | + * implementation. |
|---|
| 1137 | + */ |
|---|
| 1043 | 1138 | .name = "xts(aes)", |
|---|
| 1044 | 1139 | .driver_name = "xts-aes-ccree", |
|---|
| 1045 | | - .blocksize = AES_BLOCK_SIZE, |
|---|
| 1140 | + .blocksize = 1, |
|---|
| 1046 | 1141 | .template_skcipher = { |
|---|
| 1047 | 1142 | .setkey = cc_cipher_setkey, |
|---|
| 1048 | 1143 | .encrypt = cc_cipher_encrypt, |
|---|
| .. | .. |
|---|
| 1054 | 1149 | .cipher_mode = DRV_CIPHER_XTS, |
|---|
| 1055 | 1150 | .flow_mode = S_DIN_to_AES, |
|---|
| 1056 | 1151 | .min_hw_rev = CC_HW_REV_630, |
|---|
| 1152 | + .std_body = CC_STD_NIST, |
|---|
| 1057 | 1153 | }, |
|---|
| 1058 | 1154 | { |
|---|
| 1059 | | - .name = "xts512(aes)", |
|---|
| 1060 | | - .driver_name = "xts-aes-du512-ccree", |
|---|
| 1061 | | - .blocksize = AES_BLOCK_SIZE, |
|---|
| 1062 | | - .template_skcipher = { |
|---|
| 1063 | | - .setkey = cc_cipher_setkey, |
|---|
| 1064 | | - .encrypt = cc_cipher_encrypt, |
|---|
| 1065 | | - .decrypt = cc_cipher_decrypt, |
|---|
| 1066 | | - .min_keysize = AES_MIN_KEY_SIZE * 2, |
|---|
| 1067 | | - .max_keysize = AES_MAX_KEY_SIZE * 2, |
|---|
| 1068 | | - .ivsize = AES_BLOCK_SIZE, |
|---|
| 1069 | | - }, |
|---|
| 1070 | | - .cipher_mode = DRV_CIPHER_XTS, |
|---|
| 1071 | | - .flow_mode = S_DIN_to_AES, |
|---|
| 1072 | | - .data_unit = 512, |
|---|
| 1073 | | - .min_hw_rev = CC_HW_REV_712, |
|---|
| 1074 | | - }, |
|---|
| 1075 | | - { |
|---|
| 1076 | | - .name = "xts4096(aes)", |
|---|
| 1077 | | - .driver_name = "xts-aes-du4096-ccree", |
|---|
| 1078 | | - .blocksize = AES_BLOCK_SIZE, |
|---|
| 1079 | | - .template_skcipher = { |
|---|
| 1080 | | - .setkey = cc_cipher_setkey, |
|---|
| 1081 | | - .encrypt = cc_cipher_encrypt, |
|---|
| 1082 | | - .decrypt = cc_cipher_decrypt, |
|---|
| 1083 | | - .min_keysize = AES_MIN_KEY_SIZE * 2, |
|---|
| 1084 | | - .max_keysize = AES_MAX_KEY_SIZE * 2, |
|---|
| 1085 | | - .ivsize = AES_BLOCK_SIZE, |
|---|
| 1086 | | - }, |
|---|
| 1087 | | - .cipher_mode = DRV_CIPHER_XTS, |
|---|
| 1088 | | - .flow_mode = S_DIN_to_AES, |
|---|
| 1089 | | - .data_unit = 4096, |
|---|
| 1090 | | - .min_hw_rev = CC_HW_REV_712, |
|---|
| 1091 | | - }, |
|---|
| 1092 | | - { |
|---|
| 1093 | | - .name = "essiv(aes)", |
|---|
| 1155 | + .name = "essiv(cbc(aes),sha256)", |
|---|
| 1094 | 1156 | .driver_name = "essiv-aes-ccree", |
|---|
| 1095 | 1157 | .blocksize = AES_BLOCK_SIZE, |
|---|
| 1096 | 1158 | .template_skcipher = { |
|---|
| 1097 | 1159 | .setkey = cc_cipher_setkey, |
|---|
| 1098 | 1160 | .encrypt = cc_cipher_encrypt, |
|---|
| 1099 | 1161 | .decrypt = cc_cipher_decrypt, |
|---|
| 1100 | | - .min_keysize = AES_MIN_KEY_SIZE * 2, |
|---|
| 1101 | | - .max_keysize = AES_MAX_KEY_SIZE * 2, |
|---|
| 1162 | + .min_keysize = AES_MIN_KEY_SIZE, |
|---|
| 1163 | + .max_keysize = AES_MAX_KEY_SIZE, |
|---|
| 1102 | 1164 | .ivsize = AES_BLOCK_SIZE, |
|---|
| 1103 | 1165 | }, |
|---|
| 1104 | 1166 | .cipher_mode = DRV_CIPHER_ESSIV, |
|---|
| 1105 | 1167 | .flow_mode = S_DIN_to_AES, |
|---|
| 1106 | 1168 | .min_hw_rev = CC_HW_REV_712, |
|---|
| 1107 | | - }, |
|---|
| 1108 | | - { |
|---|
| 1109 | | - .name = "essiv512(aes)", |
|---|
| 1110 | | - .driver_name = "essiv-aes-du512-ccree", |
|---|
| 1111 | | - .blocksize = AES_BLOCK_SIZE, |
|---|
| 1112 | | - .template_skcipher = { |
|---|
| 1113 | | - .setkey = cc_cipher_setkey, |
|---|
| 1114 | | - .encrypt = cc_cipher_encrypt, |
|---|
| 1115 | | - .decrypt = cc_cipher_decrypt, |
|---|
| 1116 | | - .min_keysize = AES_MIN_KEY_SIZE * 2, |
|---|
| 1117 | | - .max_keysize = AES_MAX_KEY_SIZE * 2, |
|---|
| 1118 | | - .ivsize = AES_BLOCK_SIZE, |
|---|
| 1119 | | - }, |
|---|
| 1120 | | - .cipher_mode = DRV_CIPHER_ESSIV, |
|---|
| 1121 | | - .flow_mode = S_DIN_to_AES, |
|---|
| 1122 | | - .data_unit = 512, |
|---|
| 1123 | | - .min_hw_rev = CC_HW_REV_712, |
|---|
| 1124 | | - }, |
|---|
| 1125 | | - { |
|---|
| 1126 | | - .name = "essiv4096(aes)", |
|---|
| 1127 | | - .driver_name = "essiv-aes-du4096-ccree", |
|---|
| 1128 | | - .blocksize = AES_BLOCK_SIZE, |
|---|
| 1129 | | - .template_skcipher = { |
|---|
| 1130 | | - .setkey = cc_cipher_setkey, |
|---|
| 1131 | | - .encrypt = cc_cipher_encrypt, |
|---|
| 1132 | | - .decrypt = cc_cipher_decrypt, |
|---|
| 1133 | | - .min_keysize = AES_MIN_KEY_SIZE * 2, |
|---|
| 1134 | | - .max_keysize = AES_MAX_KEY_SIZE * 2, |
|---|
| 1135 | | - .ivsize = AES_BLOCK_SIZE, |
|---|
| 1136 | | - }, |
|---|
| 1137 | | - .cipher_mode = DRV_CIPHER_ESSIV, |
|---|
| 1138 | | - .flow_mode = S_DIN_to_AES, |
|---|
| 1139 | | - .data_unit = 4096, |
|---|
| 1140 | | - .min_hw_rev = CC_HW_REV_712, |
|---|
| 1141 | | - }, |
|---|
| 1142 | | - { |
|---|
| 1143 | | - .name = "bitlocker(aes)", |
|---|
| 1144 | | - .driver_name = "bitlocker-aes-ccree", |
|---|
| 1145 | | - .blocksize = AES_BLOCK_SIZE, |
|---|
| 1146 | | - .template_skcipher = { |
|---|
| 1147 | | - .setkey = cc_cipher_setkey, |
|---|
| 1148 | | - .encrypt = cc_cipher_encrypt, |
|---|
| 1149 | | - .decrypt = cc_cipher_decrypt, |
|---|
| 1150 | | - .min_keysize = AES_MIN_KEY_SIZE * 2, |
|---|
| 1151 | | - .max_keysize = AES_MAX_KEY_SIZE * 2, |
|---|
| 1152 | | - .ivsize = AES_BLOCK_SIZE, |
|---|
| 1153 | | - }, |
|---|
| 1154 | | - .cipher_mode = DRV_CIPHER_BITLOCKER, |
|---|
| 1155 | | - .flow_mode = S_DIN_to_AES, |
|---|
| 1156 | | - .min_hw_rev = CC_HW_REV_712, |
|---|
| 1157 | | - }, |
|---|
| 1158 | | - { |
|---|
| 1159 | | - .name = "bitlocker512(aes)", |
|---|
| 1160 | | - .driver_name = "bitlocker-aes-du512-ccree", |
|---|
| 1161 | | - .blocksize = AES_BLOCK_SIZE, |
|---|
| 1162 | | - .template_skcipher = { |
|---|
| 1163 | | - .setkey = cc_cipher_setkey, |
|---|
| 1164 | | - .encrypt = cc_cipher_encrypt, |
|---|
| 1165 | | - .decrypt = cc_cipher_decrypt, |
|---|
| 1166 | | - .min_keysize = AES_MIN_KEY_SIZE * 2, |
|---|
| 1167 | | - .max_keysize = AES_MAX_KEY_SIZE * 2, |
|---|
| 1168 | | - .ivsize = AES_BLOCK_SIZE, |
|---|
| 1169 | | - }, |
|---|
| 1170 | | - .cipher_mode = DRV_CIPHER_BITLOCKER, |
|---|
| 1171 | | - .flow_mode = S_DIN_to_AES, |
|---|
| 1172 | | - .data_unit = 512, |
|---|
| 1173 | | - .min_hw_rev = CC_HW_REV_712, |
|---|
| 1174 | | - }, |
|---|
| 1175 | | - { |
|---|
| 1176 | | - .name = "bitlocker4096(aes)", |
|---|
| 1177 | | - .driver_name = "bitlocker-aes-du4096-ccree", |
|---|
| 1178 | | - .blocksize = AES_BLOCK_SIZE, |
|---|
| 1179 | | - .template_skcipher = { |
|---|
| 1180 | | - .setkey = cc_cipher_setkey, |
|---|
| 1181 | | - .encrypt = cc_cipher_encrypt, |
|---|
| 1182 | | - .decrypt = cc_cipher_decrypt, |
|---|
| 1183 | | - .min_keysize = AES_MIN_KEY_SIZE * 2, |
|---|
| 1184 | | - .max_keysize = AES_MAX_KEY_SIZE * 2, |
|---|
| 1185 | | - .ivsize = AES_BLOCK_SIZE, |
|---|
| 1186 | | - }, |
|---|
| 1187 | | - .cipher_mode = DRV_CIPHER_BITLOCKER, |
|---|
| 1188 | | - .flow_mode = S_DIN_to_AES, |
|---|
| 1189 | | - .data_unit = 4096, |
|---|
| 1190 | | - .min_hw_rev = CC_HW_REV_712, |
|---|
| 1169 | + .std_body = CC_STD_NIST, |
|---|
| 1191 | 1170 | }, |
|---|
| 1192 | 1171 | { |
|---|
| 1193 | 1172 | .name = "ecb(aes)", |
|---|
| .. | .. |
|---|
| 1204 | 1183 | .cipher_mode = DRV_CIPHER_ECB, |
|---|
| 1205 | 1184 | .flow_mode = S_DIN_to_AES, |
|---|
| 1206 | 1185 | .min_hw_rev = CC_HW_REV_630, |
|---|
| 1186 | + .std_body = CC_STD_NIST, |
|---|
| 1207 | 1187 | }, |
|---|
| 1208 | 1188 | { |
|---|
| 1209 | 1189 | .name = "cbc(aes)", |
|---|
| .. | .. |
|---|
| 1220 | 1200 | .cipher_mode = DRV_CIPHER_CBC, |
|---|
| 1221 | 1201 | .flow_mode = S_DIN_to_AES, |
|---|
| 1222 | 1202 | .min_hw_rev = CC_HW_REV_630, |
|---|
| 1203 | + .std_body = CC_STD_NIST, |
|---|
| 1223 | 1204 | }, |
|---|
| 1224 | 1205 | { |
|---|
| 1225 | 1206 | .name = "ofb(aes)", |
|---|
| 1226 | 1207 | .driver_name = "ofb-aes-ccree", |
|---|
| 1227 | | - .blocksize = AES_BLOCK_SIZE, |
|---|
| 1208 | + .blocksize = 1, |
|---|
| 1228 | 1209 | .template_skcipher = { |
|---|
| 1229 | 1210 | .setkey = cc_cipher_setkey, |
|---|
| 1230 | 1211 | .encrypt = cc_cipher_encrypt, |
|---|
| .. | .. |
|---|
| 1236 | 1217 | .cipher_mode = DRV_CIPHER_OFB, |
|---|
| 1237 | 1218 | .flow_mode = S_DIN_to_AES, |
|---|
| 1238 | 1219 | .min_hw_rev = CC_HW_REV_630, |
|---|
| 1220 | + .std_body = CC_STD_NIST, |
|---|
| 1239 | 1221 | }, |
|---|
| 1240 | 1222 | { |
|---|
| 1241 | 1223 | .name = "cts(cbc(aes))", |
|---|
| .. | .. |
|---|
| 1252 | 1234 | .cipher_mode = DRV_CIPHER_CBC_CTS, |
|---|
| 1253 | 1235 | .flow_mode = S_DIN_to_AES, |
|---|
| 1254 | 1236 | .min_hw_rev = CC_HW_REV_630, |
|---|
| 1237 | + .std_body = CC_STD_NIST, |
|---|
| 1255 | 1238 | }, |
|---|
| 1256 | 1239 | { |
|---|
| 1257 | 1240 | .name = "ctr(aes)", |
|---|
| .. | .. |
|---|
| 1268 | 1251 | .cipher_mode = DRV_CIPHER_CTR, |
|---|
| 1269 | 1252 | .flow_mode = S_DIN_to_AES, |
|---|
| 1270 | 1253 | .min_hw_rev = CC_HW_REV_630, |
|---|
| 1254 | + .std_body = CC_STD_NIST, |
|---|
| 1271 | 1255 | }, |
|---|
| 1272 | 1256 | { |
|---|
| 1273 | 1257 | .name = "cbc(des3_ede)", |
|---|
| .. | .. |
|---|
| 1284 | 1268 | .cipher_mode = DRV_CIPHER_CBC, |
|---|
| 1285 | 1269 | .flow_mode = S_DIN_to_DES, |
|---|
| 1286 | 1270 | .min_hw_rev = CC_HW_REV_630, |
|---|
| 1271 | + .std_body = CC_STD_NIST, |
|---|
| 1287 | 1272 | }, |
|---|
| 1288 | 1273 | { |
|---|
| 1289 | 1274 | .name = "ecb(des3_ede)", |
|---|
| .. | .. |
|---|
| 1300 | 1285 | .cipher_mode = DRV_CIPHER_ECB, |
|---|
| 1301 | 1286 | .flow_mode = S_DIN_to_DES, |
|---|
| 1302 | 1287 | .min_hw_rev = CC_HW_REV_630, |
|---|
| 1288 | + .std_body = CC_STD_NIST, |
|---|
| 1303 | 1289 | }, |
|---|
| 1304 | 1290 | { |
|---|
| 1305 | 1291 | .name = "cbc(des)", |
|---|
| .. | .. |
|---|
| 1316 | 1302 | .cipher_mode = DRV_CIPHER_CBC, |
|---|
| 1317 | 1303 | .flow_mode = S_DIN_to_DES, |
|---|
| 1318 | 1304 | .min_hw_rev = CC_HW_REV_630, |
|---|
| 1305 | + .std_body = CC_STD_NIST, |
|---|
| 1319 | 1306 | }, |
|---|
| 1320 | 1307 | { |
|---|
| 1321 | 1308 | .name = "ecb(des)", |
|---|
| .. | .. |
|---|
| 1332 | 1319 | .cipher_mode = DRV_CIPHER_ECB, |
|---|
| 1333 | 1320 | .flow_mode = S_DIN_to_DES, |
|---|
| 1334 | 1321 | .min_hw_rev = CC_HW_REV_630, |
|---|
| 1322 | + .std_body = CC_STD_NIST, |
|---|
| 1323 | + }, |
|---|
| 1324 | + { |
|---|
| 1325 | + .name = "cbc(sm4)", |
|---|
| 1326 | + .driver_name = "cbc-sm4-ccree", |
|---|
| 1327 | + .blocksize = SM4_BLOCK_SIZE, |
|---|
| 1328 | + .template_skcipher = { |
|---|
| 1329 | + .setkey = cc_cipher_setkey, |
|---|
| 1330 | + .encrypt = cc_cipher_encrypt, |
|---|
| 1331 | + .decrypt = cc_cipher_decrypt, |
|---|
| 1332 | + .min_keysize = SM4_KEY_SIZE, |
|---|
| 1333 | + .max_keysize = SM4_KEY_SIZE, |
|---|
| 1334 | + .ivsize = SM4_BLOCK_SIZE, |
|---|
| 1335 | + }, |
|---|
| 1336 | + .cipher_mode = DRV_CIPHER_CBC, |
|---|
| 1337 | + .flow_mode = S_DIN_to_SM4, |
|---|
| 1338 | + .min_hw_rev = CC_HW_REV_713, |
|---|
| 1339 | + .std_body = CC_STD_OSCCA, |
|---|
| 1340 | + }, |
|---|
| 1341 | + { |
|---|
| 1342 | + .name = "ecb(sm4)", |
|---|
| 1343 | + .driver_name = "ecb-sm4-ccree", |
|---|
| 1344 | + .blocksize = SM4_BLOCK_SIZE, |
|---|
| 1345 | + .template_skcipher = { |
|---|
| 1346 | + .setkey = cc_cipher_setkey, |
|---|
| 1347 | + .encrypt = cc_cipher_encrypt, |
|---|
| 1348 | + .decrypt = cc_cipher_decrypt, |
|---|
| 1349 | + .min_keysize = SM4_KEY_SIZE, |
|---|
| 1350 | + .max_keysize = SM4_KEY_SIZE, |
|---|
| 1351 | + .ivsize = 0, |
|---|
| 1352 | + }, |
|---|
| 1353 | + .cipher_mode = DRV_CIPHER_ECB, |
|---|
| 1354 | + .flow_mode = S_DIN_to_SM4, |
|---|
| 1355 | + .min_hw_rev = CC_HW_REV_713, |
|---|
| 1356 | + .std_body = CC_STD_OSCCA, |
|---|
| 1357 | + }, |
|---|
| 1358 | + { |
|---|
| 1359 | + .name = "ctr(sm4)", |
|---|
| 1360 | + .driver_name = "ctr-sm4-ccree", |
|---|
| 1361 | + .blocksize = 1, |
|---|
| 1362 | + .template_skcipher = { |
|---|
| 1363 | + .setkey = cc_cipher_setkey, |
|---|
| 1364 | + .encrypt = cc_cipher_encrypt, |
|---|
| 1365 | + .decrypt = cc_cipher_decrypt, |
|---|
| 1366 | + .min_keysize = SM4_KEY_SIZE, |
|---|
| 1367 | + .max_keysize = SM4_KEY_SIZE, |
|---|
| 1368 | + .ivsize = SM4_BLOCK_SIZE, |
|---|
| 1369 | + }, |
|---|
| 1370 | + .cipher_mode = DRV_CIPHER_CTR, |
|---|
| 1371 | + .flow_mode = S_DIN_to_SM4, |
|---|
| 1372 | + .min_hw_rev = CC_HW_REV_713, |
|---|
| 1373 | + .std_body = CC_STD_OSCCA, |
|---|
| 1374 | + }, |
|---|
| 1375 | + { |
|---|
| 1376 | + .name = "cbc(psm4)", |
|---|
| 1377 | + .driver_name = "cbc-psm4-ccree", |
|---|
| 1378 | + .blocksize = SM4_BLOCK_SIZE, |
|---|
| 1379 | + .template_skcipher = { |
|---|
| 1380 | + .setkey = cc_cipher_sethkey, |
|---|
| 1381 | + .encrypt = cc_cipher_encrypt, |
|---|
| 1382 | + .decrypt = cc_cipher_decrypt, |
|---|
| 1383 | + .min_keysize = CC_HW_KEY_SIZE, |
|---|
| 1384 | + .max_keysize = CC_HW_KEY_SIZE, |
|---|
| 1385 | + .ivsize = SM4_BLOCK_SIZE, |
|---|
| 1386 | + }, |
|---|
| 1387 | + .cipher_mode = DRV_CIPHER_CBC, |
|---|
| 1388 | + .flow_mode = S_DIN_to_SM4, |
|---|
| 1389 | + .min_hw_rev = CC_HW_REV_713, |
|---|
| 1390 | + .std_body = CC_STD_OSCCA, |
|---|
| 1391 | + .sec_func = true, |
|---|
| 1392 | + }, |
|---|
| 1393 | + { |
|---|
| 1394 | + .name = "ctr(psm4)", |
|---|
| 1395 | + .driver_name = "ctr-psm4-ccree", |
|---|
| 1396 | + .blocksize = SM4_BLOCK_SIZE, |
|---|
| 1397 | + .template_skcipher = { |
|---|
| 1398 | + .setkey = cc_cipher_sethkey, |
|---|
| 1399 | + .encrypt = cc_cipher_encrypt, |
|---|
| 1400 | + .decrypt = cc_cipher_decrypt, |
|---|
| 1401 | + .min_keysize = CC_HW_KEY_SIZE, |
|---|
| 1402 | + .max_keysize = CC_HW_KEY_SIZE, |
|---|
| 1403 | + .ivsize = SM4_BLOCK_SIZE, |
|---|
| 1404 | + }, |
|---|
| 1405 | + .cipher_mode = DRV_CIPHER_CTR, |
|---|
| 1406 | + .flow_mode = S_DIN_to_SM4, |
|---|
| 1407 | + .min_hw_rev = CC_HW_REV_713, |
|---|
| 1408 | + .std_body = CC_STD_OSCCA, |
|---|
| 1409 | + .sec_func = true, |
|---|
| 1335 | 1410 | }, |
|---|
| 1336 | 1411 | }; |
|---|
| 1337 | 1412 | |
|---|
| .. | .. |
|---|
| 1341 | 1416 | struct cc_crypto_alg *t_alg; |
|---|
| 1342 | 1417 | struct skcipher_alg *alg; |
|---|
| 1343 | 1418 | |
|---|
| 1344 | | - t_alg = kzalloc(sizeof(*t_alg), GFP_KERNEL); |
|---|
| 1419 | + t_alg = devm_kzalloc(dev, sizeof(*t_alg), GFP_KERNEL); |
|---|
| 1345 | 1420 | if (!t_alg) |
|---|
| 1346 | 1421 | return ERR_PTR(-ENOMEM); |
|---|
| 1347 | 1422 | |
|---|
| .. | .. |
|---|
| 1364 | 1439 | |
|---|
| 1365 | 1440 | t_alg->cipher_mode = tmpl->cipher_mode; |
|---|
| 1366 | 1441 | t_alg->flow_mode = tmpl->flow_mode; |
|---|
| 1367 | | - t_alg->data_unit = tmpl->data_unit; |
|---|
| 1368 | 1442 | |
|---|
| 1369 | 1443 | return t_alg; |
|---|
| 1370 | 1444 | } |
|---|
| .. | .. |
|---|
| 1372 | 1446 | int cc_cipher_free(struct cc_drvdata *drvdata) |
|---|
| 1373 | 1447 | { |
|---|
| 1374 | 1448 | struct cc_crypto_alg *t_alg, *n; |
|---|
| 1375 | | - struct cc_cipher_handle *cipher_handle = drvdata->cipher_handle; |
|---|
| 1376 | 1449 | |
|---|
| 1377 | | - if (cipher_handle) { |
|---|
| 1378 | | - /* Remove registered algs */ |
|---|
| 1379 | | - list_for_each_entry_safe(t_alg, n, &cipher_handle->alg_list, |
|---|
| 1380 | | - entry) { |
|---|
| 1381 | | - crypto_unregister_skcipher(&t_alg->skcipher_alg); |
|---|
| 1382 | | - list_del(&t_alg->entry); |
|---|
| 1383 | | - kfree(t_alg); |
|---|
| 1384 | | - } |
|---|
| 1385 | | - kfree(cipher_handle); |
|---|
| 1386 | | - drvdata->cipher_handle = NULL; |
|---|
| 1450 | + /* Remove registered algs */ |
|---|
| 1451 | + list_for_each_entry_safe(t_alg, n, &drvdata->alg_list, entry) { |
|---|
| 1452 | + crypto_unregister_skcipher(&t_alg->skcipher_alg); |
|---|
| 1453 | + list_del(&t_alg->entry); |
|---|
| 1387 | 1454 | } |
|---|
| 1388 | 1455 | return 0; |
|---|
| 1389 | 1456 | } |
|---|
| 1390 | 1457 | |
|---|
| 1391 | 1458 | int cc_cipher_alloc(struct cc_drvdata *drvdata) |
|---|
| 1392 | 1459 | { |
|---|
| 1393 | | - struct cc_cipher_handle *cipher_handle; |
|---|
| 1394 | 1460 | struct cc_crypto_alg *t_alg; |
|---|
| 1395 | 1461 | struct device *dev = drvdata_to_dev(drvdata); |
|---|
| 1396 | 1462 | int rc = -ENOMEM; |
|---|
| 1397 | 1463 | int alg; |
|---|
| 1398 | 1464 | |
|---|
| 1399 | | - cipher_handle = kmalloc(sizeof(*cipher_handle), GFP_KERNEL); |
|---|
| 1400 | | - if (!cipher_handle) |
|---|
| 1401 | | - return -ENOMEM; |
|---|
| 1402 | | - |
|---|
| 1403 | | - INIT_LIST_HEAD(&cipher_handle->alg_list); |
|---|
| 1404 | | - drvdata->cipher_handle = cipher_handle; |
|---|
| 1465 | + INIT_LIST_HEAD(&drvdata->alg_list); |
|---|
| 1405 | 1466 | |
|---|
| 1406 | 1467 | /* Linux crypto */ |
|---|
| 1407 | 1468 | dev_dbg(dev, "Number of algorithms = %zu\n", |
|---|
| 1408 | 1469 | ARRAY_SIZE(skcipher_algs)); |
|---|
| 1409 | 1470 | for (alg = 0; alg < ARRAY_SIZE(skcipher_algs); alg++) { |
|---|
| 1410 | | - if (skcipher_algs[alg].min_hw_rev > drvdata->hw_rev) |
|---|
| 1471 | + if ((skcipher_algs[alg].min_hw_rev > drvdata->hw_rev) || |
|---|
| 1472 | + !(drvdata->std_bodies & skcipher_algs[alg].std_body) || |
|---|
| 1473 | + (drvdata->sec_disabled && skcipher_algs[alg].sec_func)) |
|---|
| 1411 | 1474 | continue; |
|---|
| 1412 | 1475 | |
|---|
| 1413 | 1476 | dev_dbg(dev, "creating %s\n", skcipher_algs[alg].driver_name); |
|---|
| .. | .. |
|---|
| 1428 | 1491 | if (rc) { |
|---|
| 1429 | 1492 | dev_err(dev, "%s alg registration failed\n", |
|---|
| 1430 | 1493 | t_alg->skcipher_alg.base.cra_driver_name); |
|---|
| 1431 | | - kfree(t_alg); |
|---|
| 1432 | 1494 | goto fail0; |
|---|
| 1433 | | - } else { |
|---|
| 1434 | | - list_add_tail(&t_alg->entry, |
|---|
| 1435 | | - &cipher_handle->alg_list); |
|---|
| 1436 | | - dev_dbg(dev, "Registered %s\n", |
|---|
| 1437 | | - t_alg->skcipher_alg.base.cra_driver_name); |
|---|
| 1438 | 1495 | } |
|---|
| 1496 | + |
|---|
| 1497 | + list_add_tail(&t_alg->entry, &drvdata->alg_list); |
|---|
| 1498 | + dev_dbg(dev, "Registered %s\n", |
|---|
| 1499 | + t_alg->skcipher_alg.base.cra_driver_name); |
|---|
| 1439 | 1500 | } |
|---|
| 1440 | 1501 | return 0; |
|---|
| 1441 | 1502 | |
|---|