| .. | .. |
|---|
| 1 | +// SPDX-License-Identifier: GPL-2.0-only |
|---|
| 1 | 2 | /* |
|---|
| 2 | 3 | * aes-ccm-glue.c - AES-CCM transform for ARMv8 with Crypto Extensions |
|---|
| 3 | 4 | * |
|---|
| 4 | 5 | * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org> |
|---|
| 5 | | - * |
|---|
| 6 | | - * This program is free software; you can redistribute it and/or modify |
|---|
| 7 | | - * it under the terms of the GNU General Public License version 2 as |
|---|
| 8 | | - * published by the Free Software Foundation. |
|---|
| 9 | 6 | */ |
|---|
| 10 | 7 | |
|---|
| 11 | 8 | #include <asm/neon.h> |
|---|
| .. | .. |
|---|
| 14 | 11 | #include <crypto/aes.h> |
|---|
| 15 | 12 | #include <crypto/scatterwalk.h> |
|---|
| 16 | 13 | #include <crypto/internal/aead.h> |
|---|
| 14 | +#include <crypto/internal/simd.h> |
|---|
| 17 | 15 | #include <crypto/internal/skcipher.h> |
|---|
| 18 | 16 | #include <linux/module.h> |
|---|
| 19 | 17 | |
|---|
| .. | .. |
|---|
| 45 | 43 | asmlinkage void ce_aes_ccm_final(u8 mac[], u8 const ctr[], u32 const rk[], |
|---|
| 46 | 44 | u32 rounds); |
|---|
| 47 | 45 | |
|---|
| 48 | | -asmlinkage void __aes_arm64_encrypt(u32 *rk, u8 *out, const u8 *in, int rounds); |
|---|
| 49 | | - |
|---|
| 50 | 46 | static int ccm_setkey(struct crypto_aead *tfm, const u8 *in_key, |
|---|
| 51 | 47 | unsigned int key_len) |
|---|
| 52 | 48 | { |
|---|
| 53 | 49 | struct crypto_aes_ctx *ctx = crypto_aead_ctx(tfm); |
|---|
| 54 | | - int ret; |
|---|
| 55 | 50 | |
|---|
| 56 | | - ret = ce_aes_expandkey(ctx, in_key, key_len); |
|---|
| 57 | | - if (!ret) |
|---|
| 58 | | - return 0; |
|---|
| 59 | | - |
|---|
| 60 | | - tfm->base.crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; |
|---|
| 61 | | - return -EINVAL; |
|---|
| 51 | + return ce_aes_expandkey(ctx, in_key, key_len); |
|---|
| 62 | 52 | } |
|---|
| 63 | 53 | |
|---|
| 64 | 54 | static int ccm_setauthsize(struct crypto_aead *tfm, unsigned int authsize) |
|---|
| .. | .. |
|---|
| 109 | 99 | static void ccm_update_mac(struct crypto_aes_ctx *key, u8 mac[], u8 const in[], |
|---|
| 110 | 100 | u32 abytes, u32 *macp) |
|---|
| 111 | 101 | { |
|---|
| 112 | | - if (may_use_simd()) { |
|---|
| 102 | + if (crypto_simd_usable()) { |
|---|
| 113 | 103 | kernel_neon_begin(); |
|---|
| 114 | 104 | ce_aes_ccm_auth_data(mac, in, abytes, macp, key->key_enc, |
|---|
| 115 | 105 | num_rounds(key)); |
|---|
| .. | .. |
|---|
| 126 | 116 | } |
|---|
| 127 | 117 | |
|---|
| 128 | 118 | while (abytes >= AES_BLOCK_SIZE) { |
|---|
| 129 | | - __aes_arm64_encrypt(key->key_enc, mac, mac, |
|---|
| 130 | | - num_rounds(key)); |
|---|
| 119 | + aes_encrypt(key, mac, mac); |
|---|
| 131 | 120 | crypto_xor(mac, in, AES_BLOCK_SIZE); |
|---|
| 132 | 121 | |
|---|
| 133 | 122 | in += AES_BLOCK_SIZE; |
|---|
| .. | .. |
|---|
| 135 | 124 | } |
|---|
| 136 | 125 | |
|---|
| 137 | 126 | if (abytes > 0) { |
|---|
| 138 | | - __aes_arm64_encrypt(key->key_enc, mac, mac, |
|---|
| 139 | | - num_rounds(key)); |
|---|
| 127 | + aes_encrypt(key, mac, mac); |
|---|
| 140 | 128 | crypto_xor(mac, in, abytes); |
|---|
| 141 | 129 | *macp = abytes; |
|---|
| 142 | 130 | } |
|---|
| .. | .. |
|---|
| 208 | 196 | bsize = nbytes; |
|---|
| 209 | 197 | |
|---|
| 210 | 198 | crypto_inc(walk->iv, AES_BLOCK_SIZE); |
|---|
| 211 | | - __aes_arm64_encrypt(ctx->key_enc, buf, walk->iv, |
|---|
| 212 | | - num_rounds(ctx)); |
|---|
| 213 | | - __aes_arm64_encrypt(ctx->key_enc, mac, mac, |
|---|
| 214 | | - num_rounds(ctx)); |
|---|
| 199 | + aes_encrypt(ctx, buf, walk->iv); |
|---|
| 200 | + aes_encrypt(ctx, mac, mac); |
|---|
| 215 | 201 | if (enc) |
|---|
| 216 | 202 | crypto_xor(mac, src, bsize); |
|---|
| 217 | 203 | crypto_xor_cpy(dst, src, buf, bsize); |
|---|
| .. | .. |
|---|
| 226 | 212 | } |
|---|
| 227 | 213 | |
|---|
| 228 | 214 | if (!err) { |
|---|
| 229 | | - __aes_arm64_encrypt(ctx->key_enc, buf, iv0, num_rounds(ctx)); |
|---|
| 230 | | - __aes_arm64_encrypt(ctx->key_enc, mac, mac, num_rounds(ctx)); |
|---|
| 215 | + aes_encrypt(ctx, buf, iv0); |
|---|
| 216 | + aes_encrypt(ctx, mac, mac); |
|---|
| 231 | 217 | crypto_xor(mac, buf, AES_BLOCK_SIZE); |
|---|
| 232 | 218 | } |
|---|
| 233 | 219 | return err; |
|---|
| .. | .. |
|---|
| 253 | 239 | /* preserve the original iv for the final round */ |
|---|
| 254 | 240 | memcpy(buf, req->iv, AES_BLOCK_SIZE); |
|---|
| 255 | 241 | |
|---|
| 256 | | - err = skcipher_walk_aead_encrypt(&walk, req, true); |
|---|
| 242 | + err = skcipher_walk_aead_encrypt(&walk, req, false); |
|---|
| 257 | 243 | |
|---|
| 258 | | - if (may_use_simd()) { |
|---|
| 244 | + if (crypto_simd_usable()) { |
|---|
| 259 | 245 | while (walk.nbytes) { |
|---|
| 260 | 246 | u32 tail = walk.nbytes % AES_BLOCK_SIZE; |
|---|
| 261 | 247 | |
|---|
| .. | .. |
|---|
| 311 | 297 | /* preserve the original iv for the final round */ |
|---|
| 312 | 298 | memcpy(buf, req->iv, AES_BLOCK_SIZE); |
|---|
| 313 | 299 | |
|---|
| 314 | | - err = skcipher_walk_aead_decrypt(&walk, req, true); |
|---|
| 300 | + err = skcipher_walk_aead_decrypt(&walk, req, false); |
|---|
| 315 | 301 | |
|---|
| 316 | | - if (may_use_simd()) { |
|---|
| 302 | + if (crypto_simd_usable()) { |
|---|
| 317 | 303 | while (walk.nbytes) { |
|---|
| 318 | 304 | u32 tail = walk.nbytes % AES_BLOCK_SIZE; |
|---|
| 319 | 305 | |
|---|
| .. | .. |
|---|
| 372 | 358 | |
|---|
| 373 | 359 | static int __init aes_mod_init(void) |
|---|
| 374 | 360 | { |
|---|
| 375 | | - if (!(elf_hwcap & HWCAP_AES)) |
|---|
| 361 | + if (!cpu_have_named_feature(AES)) |
|---|
| 376 | 362 | return -ENODEV; |
|---|
| 377 | 363 | return crypto_register_aead(&ccm_aes_alg); |
|---|
| 378 | 364 | } |
|---|