| .. | .. |
|---|
| 1 | +// SPDX-License-Identifier: GPL-2.0-or-later |
|---|
| 1 | 2 | /* |
|---|
| 2 | 3 | * Glue Code for SSE2 assembler versions of Serpent Cipher |
|---|
| 3 | 4 | * |
|---|
| .. | .. |
|---|
| 11 | 12 | * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au> |
|---|
| 12 | 13 | * CTR part based on code (crypto/ctr.c) by: |
|---|
| 13 | 14 | * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com> |
|---|
| 14 | | - * |
|---|
| 15 | | - * This program is free software; you can redistribute it and/or modify |
|---|
| 16 | | - * it under the terms of the GNU General Public License as published by |
|---|
| 17 | | - * the Free Software Foundation; either version 2 of the License, or |
|---|
| 18 | | - * (at your option) any later version. |
|---|
| 19 | | - * |
|---|
| 20 | | - * This program is distributed in the hope that it will be useful, |
|---|
| 21 | | - * but WITHOUT ANY WARRANTY; without even the implied warranty of |
|---|
| 22 | | - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|---|
| 23 | | - * GNU General Public License for more details. |
|---|
| 24 | | - * |
|---|
| 25 | | - * You should have received a copy of the GNU General Public License |
|---|
| 26 | | - * along with this program; if not, write to the Free Software |
|---|
| 27 | | - * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 |
|---|
| 28 | | - * USA |
|---|
| 29 | | - * |
|---|
| 30 | 15 | */ |
|---|
| 31 | 16 | |
|---|
| 32 | 17 | #include <linux/module.h> |
|---|
| .. | .. |
|---|
| 46 | 31 | return __serpent_setkey(crypto_skcipher_ctx(tfm), key, keylen); |
|---|
| 47 | 32 | } |
|---|
| 48 | 33 | |
|---|
| 49 | | -static void serpent_decrypt_cbc_xway(void *ctx, u128 *dst, const u128 *src) |
|---|
| 34 | +static void serpent_decrypt_cbc_xway(const void *ctx, u8 *d, const u8 *s) |
|---|
| 50 | 35 | { |
|---|
| 51 | 36 | u128 ivs[SERPENT_PARALLEL_BLOCKS - 1]; |
|---|
| 37 | + u128 *dst = (u128 *)d; |
|---|
| 38 | + const u128 *src = (const u128 *)s; |
|---|
| 52 | 39 | unsigned int j; |
|---|
| 53 | 40 | |
|---|
| 54 | 41 | for (j = 0; j < SERPENT_PARALLEL_BLOCKS - 1; j++) |
|---|
| .. | .. |
|---|
| 60 | 47 | u128_xor(dst + (j + 1), dst + (j + 1), ivs + j); |
|---|
| 61 | 48 | } |
|---|
| 62 | 49 | |
|---|
| 63 | | -static void serpent_crypt_ctr(void *ctx, u128 *dst, const u128 *src, le128 *iv) |
|---|
| 50 | +static void serpent_crypt_ctr(const void *ctx, u8 *d, const u8 *s, le128 *iv) |
|---|
| 64 | 51 | { |
|---|
| 65 | 52 | be128 ctrblk; |
|---|
| 53 | + u128 *dst = (u128 *)d; |
|---|
| 54 | + const u128 *src = (const u128 *)s; |
|---|
| 66 | 55 | |
|---|
| 67 | 56 | le128_to_be128(&ctrblk, iv); |
|---|
| 68 | 57 | le128_inc(iv); |
|---|
| .. | .. |
|---|
| 71 | 60 | u128_xor(dst, src, (u128 *)&ctrblk); |
|---|
| 72 | 61 | } |
|---|
| 73 | 62 | |
|---|
| 74 | | -static void serpent_crypt_ctr_xway(void *ctx, u128 *dst, const u128 *src, |
|---|
| 63 | +static void serpent_crypt_ctr_xway(const void *ctx, u8 *d, const u8 *s, |
|---|
| 75 | 64 | le128 *iv) |
|---|
| 76 | 65 | { |
|---|
| 77 | 66 | be128 ctrblks[SERPENT_PARALLEL_BLOCKS]; |
|---|
| 67 | + u128 *dst = (u128 *)d; |
|---|
| 68 | + const u128 *src = (const u128 *)s; |
|---|
| 78 | 69 | unsigned int i; |
|---|
| 79 | 70 | |
|---|
| 80 | 71 | for (i = 0; i < SERPENT_PARALLEL_BLOCKS; i++) { |
|---|
| .. | .. |
|---|
| 94 | 85 | |
|---|
| 95 | 86 | .funcs = { { |
|---|
| 96 | 87 | .num_blocks = SERPENT_PARALLEL_BLOCKS, |
|---|
| 97 | | - .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_enc_blk_xway) } |
|---|
| 88 | + .fn_u = { .ecb = serpent_enc_blk_xway } |
|---|
| 98 | 89 | }, { |
|---|
| 99 | 90 | .num_blocks = 1, |
|---|
| 100 | | - .fn_u = { .ecb = GLUE_FUNC_CAST(__serpent_encrypt) } |
|---|
| 91 | + .fn_u = { .ecb = __serpent_encrypt } |
|---|
| 101 | 92 | } } |
|---|
| 102 | 93 | }; |
|---|
| 103 | 94 | |
|---|
| .. | .. |
|---|
| 107 | 98 | |
|---|
| 108 | 99 | .funcs = { { |
|---|
| 109 | 100 | .num_blocks = SERPENT_PARALLEL_BLOCKS, |
|---|
| 110 | | - .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(serpent_crypt_ctr_xway) } |
|---|
| 101 | + .fn_u = { .ctr = serpent_crypt_ctr_xway } |
|---|
| 111 | 102 | }, { |
|---|
| 112 | 103 | .num_blocks = 1, |
|---|
| 113 | | - .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(serpent_crypt_ctr) } |
|---|
| 104 | + .fn_u = { .ctr = serpent_crypt_ctr } |
|---|
| 114 | 105 | } } |
|---|
| 115 | 106 | }; |
|---|
| 116 | 107 | |
|---|
| .. | .. |
|---|
| 120 | 111 | |
|---|
| 121 | 112 | .funcs = { { |
|---|
| 122 | 113 | .num_blocks = SERPENT_PARALLEL_BLOCKS, |
|---|
| 123 | | - .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_dec_blk_xway) } |
|---|
| 114 | + .fn_u = { .ecb = serpent_dec_blk_xway } |
|---|
| 124 | 115 | }, { |
|---|
| 125 | 116 | .num_blocks = 1, |
|---|
| 126 | | - .fn_u = { .ecb = GLUE_FUNC_CAST(__serpent_decrypt) } |
|---|
| 117 | + .fn_u = { .ecb = __serpent_decrypt } |
|---|
| 127 | 118 | } } |
|---|
| 128 | 119 | }; |
|---|
| 129 | 120 | |
|---|
| .. | .. |
|---|
| 133 | 124 | |
|---|
| 134 | 125 | .funcs = { { |
|---|
| 135 | 126 | .num_blocks = SERPENT_PARALLEL_BLOCKS, |
|---|
| 136 | | - .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(serpent_decrypt_cbc_xway) } |
|---|
| 127 | + .fn_u = { .cbc = serpent_decrypt_cbc_xway } |
|---|
| 137 | 128 | }, { |
|---|
| 138 | 129 | .num_blocks = 1, |
|---|
| 139 | | - .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(__serpent_decrypt) } |
|---|
| 130 | + .fn_u = { .cbc = __serpent_decrypt } |
|---|
| 140 | 131 | } } |
|---|
| 141 | 132 | }; |
|---|
| 142 | 133 | |
|---|
| .. | .. |
|---|
| 152 | 143 | |
|---|
| 153 | 144 | static int cbc_encrypt(struct skcipher_request *req) |
|---|
| 154 | 145 | { |
|---|
| 155 | | - return glue_cbc_encrypt_req_128bit(GLUE_FUNC_CAST(__serpent_encrypt), |
|---|
| 146 | + return glue_cbc_encrypt_req_128bit(__serpent_encrypt, |
|---|
| 156 | 147 | req); |
|---|
| 157 | 148 | } |
|---|
| 158 | 149 | |
|---|