| .. | .. | 
|---|
 | 1 | +// SPDX-License-Identifier: GPL-2.0-only  | 
|---|
| 1 | 2 |  /* Glue code for CAMELLIA encryption optimized for sparc64 crypto opcodes. | 
|---|
| 2 | 3 |   * | 
|---|
| 3 | 4 |   * Copyright (C) 2012 David S. Miller <davem@davemloft.net> | 
|---|
| .. | .. | 
|---|
| 11 | 12 |  #include <linux/mm.h> | 
|---|
| 12 | 13 |  #include <linux/types.h> | 
|---|
| 13 | 14 |  #include <crypto/algapi.h> | 
|---|
 | 15 | +#include <crypto/internal/skcipher.h>  | 
|---|
| 14 | 16 |   | 
|---|
| 15 | 17 |  #include <asm/fpumacro.h> | 
|---|
| 16 | 18 |  #include <asm/pstate.h> | 
|---|
| .. | .. | 
|---|
| 37 | 39 |  { | 
|---|
| 38 | 40 |  	struct camellia_sparc64_ctx *ctx = crypto_tfm_ctx(tfm); | 
|---|
| 39 | 41 |  	const u32 *in_key = (const u32 *) _in_key; | 
|---|
| 40 |  | -	u32 *flags = &tfm->crt_flags;  | 
|---|
| 41 | 42 |   | 
|---|
| 42 |  | -	if (key_len != 16 && key_len != 24 && key_len != 32) {  | 
|---|
| 43 |  | -		*flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;  | 
|---|
 | 43 | +	if (key_len != 16 && key_len != 24 && key_len != 32)  | 
|---|
| 44 | 44 |  		return -EINVAL; | 
|---|
| 45 |  | -	}  | 
|---|
| 46 | 45 |   | 
|---|
| 47 | 46 |  	ctx->key_len = key_len; | 
|---|
| 48 | 47 |   | 
|---|
| 49 | 48 |  	camellia_sparc64_key_expand(in_key, &ctx->encrypt_key[0], | 
|---|
| 50 | 49 |  				    key_len, &ctx->decrypt_key[0]); | 
|---|
| 51 | 50 |  	return 0; | 
|---|
 | 51 | +}  | 
|---|
 | 52 | +  | 
|---|
 | 53 | +static int camellia_set_key_skcipher(struct crypto_skcipher *tfm,  | 
|---|
 | 54 | +				     const u8 *in_key, unsigned int key_len)  | 
|---|
 | 55 | +{  | 
|---|
 | 56 | +	return camellia_set_key(crypto_skcipher_tfm(tfm), in_key, key_len);  | 
|---|
| 52 | 57 |  } | 
|---|
| 53 | 58 |   | 
|---|
| 54 | 59 |  extern void camellia_sparc64_crypt(const u64 *key, const u32 *input, | 
|---|
| .. | .. | 
|---|
| 80 | 85 |  extern ecb_crypt_op camellia_sparc64_ecb_crypt_3_grand_rounds; | 
|---|
| 81 | 86 |  extern ecb_crypt_op camellia_sparc64_ecb_crypt_4_grand_rounds; | 
|---|
| 82 | 87 |   | 
|---|
| 83 |  | -#define CAMELLIA_BLOCK_MASK	(~(CAMELLIA_BLOCK_SIZE - 1))  | 
|---|
| 84 |  | -  | 
|---|
| 85 |  | -static int __ecb_crypt(struct blkcipher_desc *desc,  | 
|---|
| 86 |  | -		       struct scatterlist *dst, struct scatterlist *src,  | 
|---|
| 87 |  | -		       unsigned int nbytes, bool encrypt)  | 
|---|
 | 88 | +static int __ecb_crypt(struct skcipher_request *req, bool encrypt)  | 
|---|
| 88 | 89 |  { | 
|---|
| 89 |  | -	struct camellia_sparc64_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);  | 
|---|
| 90 |  | -	struct blkcipher_walk walk;  | 
|---|
 | 90 | +	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);  | 
|---|
 | 91 | +	const struct camellia_sparc64_ctx *ctx = crypto_skcipher_ctx(tfm);  | 
|---|
 | 92 | +	struct skcipher_walk walk;  | 
|---|
| 91 | 93 |  	ecb_crypt_op *op; | 
|---|
| 92 | 94 |  	const u64 *key; | 
|---|
 | 95 | +	unsigned int nbytes;  | 
|---|
| 93 | 96 |  	int err; | 
|---|
| 94 | 97 |   | 
|---|
| 95 | 98 |  	op = camellia_sparc64_ecb_crypt_3_grand_rounds; | 
|---|
| 96 | 99 |  	if (ctx->key_len != 16) | 
|---|
| 97 | 100 |  		op = camellia_sparc64_ecb_crypt_4_grand_rounds; | 
|---|
| 98 | 101 |   | 
|---|
| 99 |  | -	blkcipher_walk_init(&walk, dst, src, nbytes);  | 
|---|
| 100 |  | -	err = blkcipher_walk_virt(desc, &walk);  | 
|---|
| 101 |  | -	desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;  | 
|---|
 | 102 | +	err = skcipher_walk_virt(&walk, req, true);  | 
|---|
 | 103 | +	if (err)  | 
|---|
 | 104 | +		return err;  | 
|---|
| 102 | 105 |   | 
|---|
| 103 | 106 |  	if (encrypt) | 
|---|
| 104 | 107 |  		key = &ctx->encrypt_key[0]; | 
|---|
| 105 | 108 |  	else | 
|---|
| 106 | 109 |  		key = &ctx->decrypt_key[0]; | 
|---|
| 107 | 110 |  	camellia_sparc64_load_keys(key, ctx->key_len); | 
|---|
| 108 |  | -	while ((nbytes = walk.nbytes)) {  | 
|---|
| 109 |  | -		unsigned int block_len = nbytes & CAMELLIA_BLOCK_MASK;  | 
|---|
| 110 |  | -  | 
|---|
| 111 |  | -		if (likely(block_len)) {  | 
|---|
| 112 |  | -			const u64 *src64;  | 
|---|
| 113 |  | -			u64 *dst64;  | 
|---|
| 114 |  | -  | 
|---|
| 115 |  | -			src64 = (const u64 *)walk.src.virt.addr;  | 
|---|
| 116 |  | -			dst64 = (u64 *) walk.dst.virt.addr;  | 
|---|
| 117 |  | -			op(src64, dst64, block_len, key);  | 
|---|
| 118 |  | -		}  | 
|---|
| 119 |  | -		nbytes &= CAMELLIA_BLOCK_SIZE - 1;  | 
|---|
| 120 |  | -		err = blkcipher_walk_done(desc, &walk, nbytes);  | 
|---|
 | 111 | +	while ((nbytes = walk.nbytes) != 0) {  | 
|---|
 | 112 | +		op(walk.src.virt.addr, walk.dst.virt.addr,  | 
|---|
 | 113 | +		   round_down(nbytes, CAMELLIA_BLOCK_SIZE), key);  | 
|---|
 | 114 | +		err = skcipher_walk_done(&walk, nbytes % CAMELLIA_BLOCK_SIZE);  | 
|---|
| 121 | 115 |  	} | 
|---|
| 122 | 116 |  	fprs_write(0); | 
|---|
| 123 | 117 |  	return err; | 
|---|
| 124 | 118 |  } | 
|---|
| 125 | 119 |   | 
|---|
| 126 |  | -static int ecb_encrypt(struct blkcipher_desc *desc,  | 
|---|
| 127 |  | -		       struct scatterlist *dst, struct scatterlist *src,  | 
|---|
| 128 |  | -		       unsigned int nbytes)  | 
|---|
 | 120 | +static int ecb_encrypt(struct skcipher_request *req)  | 
|---|
| 129 | 121 |  { | 
|---|
| 130 |  | -	return __ecb_crypt(desc, dst, src, nbytes, true);  | 
|---|
 | 122 | +	return __ecb_crypt(req, true);  | 
|---|
| 131 | 123 |  } | 
|---|
| 132 | 124 |   | 
|---|
| 133 |  | -static int ecb_decrypt(struct blkcipher_desc *desc,  | 
|---|
| 134 |  | -		       struct scatterlist *dst, struct scatterlist *src,  | 
|---|
| 135 |  | -		       unsigned int nbytes)  | 
|---|
 | 125 | +static int ecb_decrypt(struct skcipher_request *req)  | 
|---|
| 136 | 126 |  { | 
|---|
| 137 |  | -	return __ecb_crypt(desc, dst, src, nbytes, false);  | 
|---|
 | 127 | +	return __ecb_crypt(req, false);  | 
|---|
| 138 | 128 |  } | 
|---|
| 139 | 129 |   | 
|---|
| 140 | 130 |  typedef void cbc_crypt_op(const u64 *input, u64 *output, unsigned int len, | 
|---|
| .. | .. | 
|---|
| 145 | 135 |  extern cbc_crypt_op camellia_sparc64_cbc_decrypt_3_grand_rounds; | 
|---|
| 146 | 136 |  extern cbc_crypt_op camellia_sparc64_cbc_decrypt_4_grand_rounds; | 
|---|
| 147 | 137 |   | 
|---|
| 148 |  | -static int cbc_encrypt(struct blkcipher_desc *desc,  | 
|---|
| 149 |  | -		       struct scatterlist *dst, struct scatterlist *src,  | 
|---|
| 150 |  | -		       unsigned int nbytes)  | 
|---|
 | 138 | +static int cbc_encrypt(struct skcipher_request *req)  | 
|---|
| 151 | 139 |  { | 
|---|
| 152 |  | -	struct camellia_sparc64_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);  | 
|---|
| 153 |  | -	struct blkcipher_walk walk;  | 
|---|
 | 140 | +	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);  | 
|---|
 | 141 | +	const struct camellia_sparc64_ctx *ctx = crypto_skcipher_ctx(tfm);  | 
|---|
 | 142 | +	struct skcipher_walk walk;  | 
|---|
| 154 | 143 |  	cbc_crypt_op *op; | 
|---|
| 155 | 144 |  	const u64 *key; | 
|---|
 | 145 | +	unsigned int nbytes;  | 
|---|
| 156 | 146 |  	int err; | 
|---|
| 157 | 147 |   | 
|---|
| 158 | 148 |  	op = camellia_sparc64_cbc_encrypt_3_grand_rounds; | 
|---|
| 159 | 149 |  	if (ctx->key_len != 16) | 
|---|
| 160 | 150 |  		op = camellia_sparc64_cbc_encrypt_4_grand_rounds; | 
|---|
| 161 | 151 |   | 
|---|
| 162 |  | -	blkcipher_walk_init(&walk, dst, src, nbytes);  | 
|---|
| 163 |  | -	err = blkcipher_walk_virt(desc, &walk);  | 
|---|
| 164 |  | -	desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;  | 
|---|
 | 152 | +	err = skcipher_walk_virt(&walk, req, true);  | 
|---|
 | 153 | +	if (err)  | 
|---|
 | 154 | +		return err;  | 
|---|
| 165 | 155 |   | 
|---|
| 166 | 156 |  	key = &ctx->encrypt_key[0]; | 
|---|
| 167 | 157 |  	camellia_sparc64_load_keys(key, ctx->key_len); | 
|---|
| 168 |  | -	while ((nbytes = walk.nbytes)) {  | 
|---|
| 169 |  | -		unsigned int block_len = nbytes & CAMELLIA_BLOCK_MASK;  | 
|---|
| 170 |  | -  | 
|---|
| 171 |  | -		if (likely(block_len)) {  | 
|---|
| 172 |  | -			const u64 *src64;  | 
|---|
| 173 |  | -			u64 *dst64;  | 
|---|
| 174 |  | -  | 
|---|
| 175 |  | -			src64 = (const u64 *)walk.src.virt.addr;  | 
|---|
| 176 |  | -			dst64 = (u64 *) walk.dst.virt.addr;  | 
|---|
| 177 |  | -			op(src64, dst64, block_len, key,  | 
|---|
| 178 |  | -			   (u64 *) walk.iv);  | 
|---|
| 179 |  | -		}  | 
|---|
| 180 |  | -		nbytes &= CAMELLIA_BLOCK_SIZE - 1;  | 
|---|
| 181 |  | -		err = blkcipher_walk_done(desc, &walk, nbytes);  | 
|---|
 | 158 | +	while ((nbytes = walk.nbytes) != 0) {  | 
|---|
 | 159 | +		op(walk.src.virt.addr, walk.dst.virt.addr,  | 
|---|
 | 160 | +		   round_down(nbytes, CAMELLIA_BLOCK_SIZE), key, walk.iv);  | 
|---|
 | 161 | +		err = skcipher_walk_done(&walk, nbytes % CAMELLIA_BLOCK_SIZE);  | 
|---|
| 182 | 162 |  	} | 
|---|
| 183 | 163 |  	fprs_write(0); | 
|---|
| 184 | 164 |  	return err; | 
|---|
| 185 | 165 |  } | 
|---|
| 186 | 166 |   | 
|---|
| 187 |  | -static int cbc_decrypt(struct blkcipher_desc *desc,  | 
|---|
| 188 |  | -		       struct scatterlist *dst, struct scatterlist *src,  | 
|---|
| 189 |  | -		       unsigned int nbytes)  | 
|---|
 | 167 | +static int cbc_decrypt(struct skcipher_request *req)  | 
|---|
| 190 | 168 |  { | 
|---|
| 191 |  | -	struct camellia_sparc64_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);  | 
|---|
| 192 |  | -	struct blkcipher_walk walk;  | 
|---|
 | 169 | +	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);  | 
|---|
 | 170 | +	const struct camellia_sparc64_ctx *ctx = crypto_skcipher_ctx(tfm);  | 
|---|
 | 171 | +	struct skcipher_walk walk;  | 
|---|
| 193 | 172 |  	cbc_crypt_op *op; | 
|---|
| 194 | 173 |  	const u64 *key; | 
|---|
 | 174 | +	unsigned int nbytes;  | 
|---|
| 195 | 175 |  	int err; | 
|---|
| 196 | 176 |   | 
|---|
| 197 | 177 |  	op = camellia_sparc64_cbc_decrypt_3_grand_rounds; | 
|---|
| 198 | 178 |  	if (ctx->key_len != 16) | 
|---|
| 199 | 179 |  		op = camellia_sparc64_cbc_decrypt_4_grand_rounds; | 
|---|
| 200 | 180 |   | 
|---|
| 201 |  | -	blkcipher_walk_init(&walk, dst, src, nbytes);  | 
|---|
| 202 |  | -	err = blkcipher_walk_virt(desc, &walk);  | 
|---|
| 203 |  | -	desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;  | 
|---|
 | 181 | +	err = skcipher_walk_virt(&walk, req, true);  | 
|---|
 | 182 | +	if (err)  | 
|---|
 | 183 | +		return err;  | 
|---|
| 204 | 184 |   | 
|---|
| 205 | 185 |  	key = &ctx->decrypt_key[0]; | 
|---|
| 206 | 186 |  	camellia_sparc64_load_keys(key, ctx->key_len); | 
|---|
| 207 |  | -	while ((nbytes = walk.nbytes)) {  | 
|---|
| 208 |  | -		unsigned int block_len = nbytes & CAMELLIA_BLOCK_MASK;  | 
|---|
| 209 |  | -  | 
|---|
| 210 |  | -		if (likely(block_len)) {  | 
|---|
| 211 |  | -			const u64 *src64;  | 
|---|
| 212 |  | -			u64 *dst64;  | 
|---|
| 213 |  | -  | 
|---|
| 214 |  | -			src64 = (const u64 *)walk.src.virt.addr;  | 
|---|
| 215 |  | -			dst64 = (u64 *) walk.dst.virt.addr;  | 
|---|
| 216 |  | -			op(src64, dst64, block_len, key,  | 
|---|
| 217 |  | -			   (u64 *) walk.iv);  | 
|---|
| 218 |  | -		}  | 
|---|
| 219 |  | -		nbytes &= CAMELLIA_BLOCK_SIZE - 1;  | 
|---|
| 220 |  | -		err = blkcipher_walk_done(desc, &walk, nbytes);  | 
|---|
 | 187 | +	while ((nbytes = walk.nbytes) != 0) {  | 
|---|
 | 188 | +		op(walk.src.virt.addr, walk.dst.virt.addr,  | 
|---|
 | 189 | +		   round_down(nbytes, CAMELLIA_BLOCK_SIZE), key, walk.iv);  | 
|---|
 | 190 | +		err = skcipher_walk_done(&walk, nbytes % CAMELLIA_BLOCK_SIZE);  | 
|---|
| 221 | 191 |  	} | 
|---|
| 222 | 192 |  	fprs_write(0); | 
|---|
| 223 | 193 |  	return err; | 
|---|
| 224 | 194 |  } | 
|---|
| 225 | 195 |   | 
|---|
| 226 |  | -static struct crypto_alg algs[] = { {  | 
|---|
 | 196 | +static struct crypto_alg cipher_alg = {  | 
|---|
| 227 | 197 |  	.cra_name		= "camellia", | 
|---|
| 228 | 198 |  	.cra_driver_name	= "camellia-sparc64", | 
|---|
| 229 | 199 |  	.cra_priority		= SPARC_CR_OPCODE_PRIORITY, | 
|---|
| .. | .. | 
|---|
| 241 | 211 |  			.cia_decrypt		= camellia_decrypt | 
|---|
| 242 | 212 |  		} | 
|---|
| 243 | 213 |  	} | 
|---|
| 244 |  | -}, {  | 
|---|
| 245 |  | -	.cra_name		= "ecb(camellia)",  | 
|---|
| 246 |  | -	.cra_driver_name	= "ecb-camellia-sparc64",  | 
|---|
| 247 |  | -	.cra_priority		= SPARC_CR_OPCODE_PRIORITY,  | 
|---|
| 248 |  | -	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER,  | 
|---|
| 249 |  | -	.cra_blocksize		= CAMELLIA_BLOCK_SIZE,  | 
|---|
| 250 |  | -	.cra_ctxsize		= sizeof(struct camellia_sparc64_ctx),  | 
|---|
| 251 |  | -	.cra_alignmask		= 7,  | 
|---|
| 252 |  | -	.cra_type		= &crypto_blkcipher_type,  | 
|---|
| 253 |  | -	.cra_module		= THIS_MODULE,  | 
|---|
| 254 |  | -	.cra_u = {  | 
|---|
| 255 |  | -		.blkcipher = {  | 
|---|
| 256 |  | -			.min_keysize	= CAMELLIA_MIN_KEY_SIZE,  | 
|---|
| 257 |  | -			.max_keysize	= CAMELLIA_MAX_KEY_SIZE,  | 
|---|
| 258 |  | -			.setkey		= camellia_set_key,  | 
|---|
| 259 |  | -			.encrypt	= ecb_encrypt,  | 
|---|
| 260 |  | -			.decrypt	= ecb_decrypt,  | 
|---|
| 261 |  | -		},  | 
|---|
| 262 |  | -	},  | 
|---|
| 263 |  | -}, {  | 
|---|
| 264 |  | -	.cra_name		= "cbc(camellia)",  | 
|---|
| 265 |  | -	.cra_driver_name	= "cbc-camellia-sparc64",  | 
|---|
| 266 |  | -	.cra_priority		= SPARC_CR_OPCODE_PRIORITY,  | 
|---|
| 267 |  | -	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER,  | 
|---|
| 268 |  | -	.cra_blocksize		= CAMELLIA_BLOCK_SIZE,  | 
|---|
| 269 |  | -	.cra_ctxsize		= sizeof(struct camellia_sparc64_ctx),  | 
|---|
| 270 |  | -	.cra_alignmask		= 7,  | 
|---|
| 271 |  | -	.cra_type		= &crypto_blkcipher_type,  | 
|---|
| 272 |  | -	.cra_module		= THIS_MODULE,  | 
|---|
| 273 |  | -	.cra_u = {  | 
|---|
| 274 |  | -		.blkcipher = {  | 
|---|
| 275 |  | -			.min_keysize	= CAMELLIA_MIN_KEY_SIZE,  | 
|---|
| 276 |  | -			.max_keysize	= CAMELLIA_MAX_KEY_SIZE,  | 
|---|
| 277 |  | -			.ivsize		= CAMELLIA_BLOCK_SIZE,  | 
|---|
| 278 |  | -			.setkey		= camellia_set_key,  | 
|---|
| 279 |  | -			.encrypt	= cbc_encrypt,  | 
|---|
| 280 |  | -			.decrypt	= cbc_decrypt,  | 
|---|
| 281 |  | -		},  | 
|---|
| 282 |  | -	},  | 
|---|
| 283 |  | -}  | 
|---|
 | 214 | +};  | 
|---|
 | 215 | +  | 
|---|
 | 216 | +static struct skcipher_alg skcipher_algs[] = {  | 
|---|
 | 217 | +	{  | 
|---|
 | 218 | +		.base.cra_name		= "ecb(camellia)",  | 
|---|
 | 219 | +		.base.cra_driver_name	= "ecb-camellia-sparc64",  | 
|---|
 | 220 | +		.base.cra_priority	= SPARC_CR_OPCODE_PRIORITY,  | 
|---|
 | 221 | +		.base.cra_blocksize	= CAMELLIA_BLOCK_SIZE,  | 
|---|
 | 222 | +		.base.cra_ctxsize	= sizeof(struct camellia_sparc64_ctx),  | 
|---|
 | 223 | +		.base.cra_alignmask	= 7,  | 
|---|
 | 224 | +		.base.cra_module	= THIS_MODULE,  | 
|---|
 | 225 | +		.min_keysize		= CAMELLIA_MIN_KEY_SIZE,  | 
|---|
 | 226 | +		.max_keysize		= CAMELLIA_MAX_KEY_SIZE,  | 
|---|
 | 227 | +		.setkey			= camellia_set_key_skcipher,  | 
|---|
 | 228 | +		.encrypt		= ecb_encrypt,  | 
|---|
 | 229 | +		.decrypt		= ecb_decrypt,  | 
|---|
 | 230 | +	}, {  | 
|---|
 | 231 | +		.base.cra_name		= "cbc(camellia)",  | 
|---|
 | 232 | +		.base.cra_driver_name	= "cbc-camellia-sparc64",  | 
|---|
 | 233 | +		.base.cra_priority	= SPARC_CR_OPCODE_PRIORITY,  | 
|---|
 | 234 | +		.base.cra_blocksize	= CAMELLIA_BLOCK_SIZE,  | 
|---|
 | 235 | +		.base.cra_ctxsize	= sizeof(struct camellia_sparc64_ctx),  | 
|---|
 | 236 | +		.base.cra_alignmask	= 7,  | 
|---|
 | 237 | +		.base.cra_module	= THIS_MODULE,  | 
|---|
 | 238 | +		.min_keysize		= CAMELLIA_MIN_KEY_SIZE,  | 
|---|
 | 239 | +		.max_keysize		= CAMELLIA_MAX_KEY_SIZE,  | 
|---|
 | 240 | +		.ivsize			= CAMELLIA_BLOCK_SIZE,  | 
|---|
 | 241 | +		.setkey			= camellia_set_key_skcipher,  | 
|---|
 | 242 | +		.encrypt		= cbc_encrypt,  | 
|---|
 | 243 | +		.decrypt		= cbc_decrypt,  | 
|---|
 | 244 | +	}  | 
|---|
| 284 | 245 |  }; | 
|---|
| 285 | 246 |   | 
|---|
| 286 | 247 |  static bool __init sparc64_has_camellia_opcode(void) | 
|---|
| .. | .. | 
|---|
| 299 | 260 |   | 
|---|
| 300 | 261 |  static int __init camellia_sparc64_mod_init(void) | 
|---|
| 301 | 262 |  { | 
|---|
| 302 |  | -	int i;  | 
|---|
 | 263 | +	int err;  | 
|---|
| 303 | 264 |   | 
|---|
| 304 |  | -	for (i = 0; i < ARRAY_SIZE(algs); i++)  | 
|---|
| 305 |  | -		INIT_LIST_HEAD(&algs[i].cra_list);  | 
|---|
| 306 |  | -  | 
|---|
| 307 |  | -	if (sparc64_has_camellia_opcode()) {  | 
|---|
| 308 |  | -		pr_info("Using sparc64 camellia opcodes optimized CAMELLIA implementation\n");  | 
|---|
| 309 |  | -		return crypto_register_algs(algs, ARRAY_SIZE(algs));  | 
|---|
 | 265 | +	if (!sparc64_has_camellia_opcode()) {  | 
|---|
 | 266 | +		pr_info("sparc64 camellia opcodes not available.\n");  | 
|---|
 | 267 | +		return -ENODEV;  | 
|---|
| 310 | 268 |  	} | 
|---|
| 311 |  | -	pr_info("sparc64 camellia opcodes not available.\n");  | 
|---|
| 312 |  | -	return -ENODEV;  | 
|---|
 | 269 | +	pr_info("Using sparc64 camellia opcodes optimized CAMELLIA implementation\n");  | 
|---|
 | 270 | +	err = crypto_register_alg(&cipher_alg);  | 
|---|
 | 271 | +	if (err)  | 
|---|
 | 272 | +		return err;  | 
|---|
 | 273 | +	err = crypto_register_skciphers(skcipher_algs,  | 
|---|
 | 274 | +					ARRAY_SIZE(skcipher_algs));  | 
|---|
 | 275 | +	if (err)  | 
|---|
 | 276 | +		crypto_unregister_alg(&cipher_alg);  | 
|---|
 | 277 | +	return err;  | 
|---|
| 313 | 278 |  } | 
|---|
| 314 | 279 |   | 
|---|
| 315 | 280 |  static void __exit camellia_sparc64_mod_fini(void) | 
|---|
| 316 | 281 |  { | 
|---|
| 317 |  | -	crypto_unregister_algs(algs, ARRAY_SIZE(algs));  | 
|---|
 | 282 | +	crypto_unregister_alg(&cipher_alg);  | 
|---|
 | 283 | +	crypto_unregister_skciphers(skcipher_algs, ARRAY_SIZE(skcipher_algs));  | 
|---|
| 318 | 284 |  } | 
|---|
| 319 | 285 |   | 
|---|
| 320 | 286 |  module_init(camellia_sparc64_mod_init); | 
|---|