From f9004dbfff8a3fbbd7e2a88c8a4327c7f2f8e5b2 Mon Sep 17 00:00:00 2001 From: hc <hc@nodka.com> Date: Wed, 31 Jan 2024 01:04:47 +0000 Subject: [PATCH] add driver 5G --- kernel/drivers/crypto/caam/caamalg_desc.c | 514 ++++++++++++++++++++++++++++++++------------------------ 1 files changed, 295 insertions(+), 219 deletions(-) diff --git a/kernel/drivers/crypto/caam/caamalg_desc.c b/kernel/drivers/crypto/caam/caamalg_desc.c index ceb0339..7571e1a 100644 --- a/kernel/drivers/crypto/caam/caamalg_desc.c +++ b/kernel/drivers/crypto/caam/caamalg_desc.c @@ -1,7 +1,8 @@ +// SPDX-License-Identifier: GPL-2.0+ /* - * Shared descriptors for aead, ablkcipher algorithms + * Shared descriptors for aead, skcipher algorithms * - * Copyright 2016 NXP + * Copyright 2016-2019 NXP */ #include "compat.h" @@ -32,12 +33,11 @@ } jump_cmd = append_jump(desc, JUMP_TEST_ALL | JUMP_COND_SHRD); - append_operation(desc, type | OP_ALG_AS_INITFINAL | - OP_ALG_DECRYPT); + append_operation(desc, type | OP_ALG_AS_INIT | OP_ALG_DECRYPT); uncond_jump_cmd = append_jump(desc, JUMP_TEST_ALL); set_jump_tgt_here(desc, jump_cmd); - append_operation(desc, type | OP_ALG_AS_INITFINAL | - OP_ALG_DECRYPT | OP_ALG_AAI_DK); + append_operation(desc, type | OP_ALG_AS_INIT | OP_ALG_DECRYPT | + OP_ALG_AAI_DK); set_jump_tgt_here(desc, uncond_jump_cmd); } @@ -114,11 +114,9 @@ append_seq_store(desc, icvsize, LDST_CLASS_2_CCB | LDST_SRCDST_BYTE_CONTEXT); -#ifdef DEBUG - print_hex_dump(KERN_ERR, - "aead null enc shdesc@" __stringify(__LINE__)": ", - DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1); -#endif + print_hex_dump_debug("aead null enc shdesc@" __stringify(__LINE__)": ", + DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), + 1); } EXPORT_SYMBOL(cnstr_shdsc_aead_null_encap); @@ -203,11 +201,9 @@ append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV); -#ifdef DEBUG - print_hex_dump(KERN_ERR, - "aead null dec shdesc@" __stringify(__LINE__)": ", - DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1); -#endif + print_hex_dump_debug("aead null dec shdesc@" __stringify(__LINE__)": ", + DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), + 1); } EXPORT_SYMBOL(cnstr_shdsc_aead_null_decap); @@ -357,10 +353,9 @@ append_seq_store(desc, icvsize, LDST_CLASS_2_CCB | LDST_SRCDST_BYTE_CONTEXT); -#ifdef DEBUG - print_hex_dump(KERN_ERR, "aead enc shdesc@" __stringify(__LINE__)": ", - DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1); -#endif + print_hex_dump_debug("aead enc shdesc@" __stringify(__LINE__)": ", + DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), + 1); } EXPORT_SYMBOL(cnstr_shdsc_aead_encap); @@ -378,6 +373,7 @@ * with OP_ALG_AAI_HMAC_PRECOMP. * @ivsize: initialization vector size * @icvsize: integrity check value (ICV) size (truncated or full) + * @geniv: whether to generate Encrypted Chain IV * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template * @nonce: pointer to rfc3686 nonce * @ctx1_iv_off: IV offset in CONTEXT1 register @@ -474,10 +470,9 @@ append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV); -#ifdef DEBUG - print_hex_dump(KERN_ERR, "aead dec shdesc@" __stringify(__LINE__)": ", - DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1); -#endif + print_hex_dump_debug("aead dec shdesc@" __stringify(__LINE__)": ", + DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), + 1); } EXPORT_SYMBOL(cnstr_shdsc_aead_decap); @@ -621,11 +616,9 @@ append_seq_store(desc, icvsize, LDST_CLASS_2_CCB | LDST_SRCDST_BYTE_CONTEXT); -#ifdef DEBUG - print_hex_dump(KERN_ERR, - "aead givenc shdesc@" __stringify(__LINE__)": ", - DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1); -#endif + print_hex_dump_debug("aead givenc shdesc@" __stringify(__LINE__)": ", + DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), + 1); } EXPORT_SYMBOL(cnstr_shdsc_aead_givencap); @@ -750,10 +743,9 @@ append_seq_store(desc, icvsize, LDST_CLASS_1_CCB | LDST_SRCDST_BYTE_CONTEXT); -#ifdef DEBUG - print_hex_dump(KERN_ERR, "gcm enc shdesc@" __stringify(__LINE__)": ", - DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1); -#endif + print_hex_dump_debug("gcm enc shdesc@" __stringify(__LINE__)": ", + DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), + 1); } EXPORT_SYMBOL(cnstr_shdsc_gcm_encap); @@ -846,10 +838,9 @@ append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1); -#ifdef DEBUG - print_hex_dump(KERN_ERR, "gcm dec shdesc@" __stringify(__LINE__)": ", - DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1); -#endif + print_hex_dump_debug("gcm dec shdesc@" __stringify(__LINE__)": ", + DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), + 1); } EXPORT_SYMBOL(cnstr_shdsc_gcm_decap); @@ -862,13 +853,16 @@ * @ivsize: initialization vector size * @icvsize: integrity check value (ICV) size (truncated or full) * @is_qi: true when called from caam/qi + * + * Input sequence: AAD | PTXT + * Output sequence: AAD | CTXT | ICV + * AAD length (assoclen), which includes the IV length, is available in Math3. */ void cnstr_shdsc_rfc4106_encap(u32 * const desc, struct alginfo *cdata, unsigned int ivsize, unsigned int icvsize, const bool is_qi) { - u32 *key_jump_cmd; - + u32 *key_jump_cmd, *zero_cryptlen_jump_cmd, *skip_instructions; init_sh_desc(desc, HDR_SHARE_SERIAL); /* Skip key loading if it is loaded due to sharing */ @@ -911,24 +905,26 @@ append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, ivsize); append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ); - /* Read assoc data */ + /* Skip AAD */ + append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF); + + /* Read cryptlen and set this value into VARSEQOUTLEN */ + append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG3, CAAM_CMD_SZ); + + /* If cryptlen is ZERO jump to AAD command */ + zero_cryptlen_jump_cmd = append_jump(desc, JUMP_TEST_ALL | + JUMP_COND_MATH_Z); + + /* Read AAD data */ append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF | FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1); + /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */ + append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA); + /* Skip IV */ append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_SKIP); - - /* Will read cryptlen bytes */ - append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ); - - /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */ - append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_MSG); - - /* Skip assoc data */ - append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF); - - /* cryptlen = seqoutlen - assoclen */ - append_math_sub(desc, VARSEQOUTLEN, VARSEQINLEN, REG0, CAAM_CMD_SZ); + append_math_add(desc, VARSEQINLEN, VARSEQOUTLEN, REG0, CAAM_CMD_SZ); /* Write encrypted data */ append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF); @@ -937,15 +933,25 @@ append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF | FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1); + /* Jump instructions to avoid double reading of AAD */ + skip_instructions = append_jump(desc, JUMP_TEST_ALL); + + /* There is no input data, cryptlen = 0 */ + set_jump_tgt_here(desc, zero_cryptlen_jump_cmd); + + /* Read AAD */ + append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF | + FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST1); + + set_jump_tgt_here(desc, skip_instructions); + /* Write ICV */ append_seq_store(desc, icvsize, LDST_CLASS_1_CCB | LDST_SRCDST_BYTE_CONTEXT); -#ifdef DEBUG - print_hex_dump(KERN_ERR, - "rfc4106 enc shdesc@" __stringify(__LINE__)": ", - DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1); -#endif + print_hex_dump_debug("rfc4106 enc shdesc@" __stringify(__LINE__)": ", + DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), + 1); } EXPORT_SYMBOL(cnstr_shdsc_rfc4106_encap); @@ -1038,11 +1044,9 @@ append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1); -#ifdef DEBUG - print_hex_dump(KERN_ERR, - "rfc4106 dec shdesc@" __stringify(__LINE__)": ", - DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1); -#endif + print_hex_dump_debug("rfc4106 dec shdesc@" __stringify(__LINE__)": ", + DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), + 1); } EXPORT_SYMBOL(cnstr_shdsc_rfc4106_decap); @@ -1123,11 +1127,9 @@ append_seq_store(desc, icvsize, LDST_CLASS_1_CCB | LDST_SRCDST_BYTE_CONTEXT); -#ifdef DEBUG - print_hex_dump(KERN_ERR, - "rfc4543 enc shdesc@" __stringify(__LINE__)": ", - DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1); -#endif + print_hex_dump_debug("rfc4543 enc shdesc@" __stringify(__LINE__)": ", + DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), + 1); } EXPORT_SYMBOL(cnstr_shdsc_rfc4543_encap); @@ -1213,19 +1215,147 @@ append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1); -#ifdef DEBUG - print_hex_dump(KERN_ERR, - "rfc4543 dec shdesc@" __stringify(__LINE__)": ", - DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1); -#endif + print_hex_dump_debug("rfc4543 dec shdesc@" __stringify(__LINE__)": ", + DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), + 1); } EXPORT_SYMBOL(cnstr_shdsc_rfc4543_decap); -/* - * For ablkcipher encrypt and decrypt, read from req->src and - * write to req->dst +/** + * cnstr_shdsc_chachapoly - Chacha20 + Poly1305 generic AEAD (rfc7539) and + * IPsec ESP (rfc7634, a.k.a. rfc7539esp) shared + * descriptor (non-protocol). + * @desc: pointer to buffer used for descriptor construction + * @cdata: pointer to block cipher transform definitions + * Valid algorithm values - OP_ALG_ALGSEL_CHACHA20 ANDed with + * OP_ALG_AAI_AEAD. + * @adata: pointer to authentication transform definitions + * Valid algorithm values - OP_ALG_ALGSEL_POLY1305 ANDed with + * OP_ALG_AAI_AEAD. + * @ivsize: initialization vector size + * @icvsize: integrity check value (ICV) size (truncated or full) + * @encap: true if encapsulation, false if decapsulation + * @is_qi: true when called from caam/qi */ -static inline void ablkcipher_append_src_dst(u32 *desc) +void cnstr_shdsc_chachapoly(u32 * const desc, struct alginfo *cdata, + struct alginfo *adata, unsigned int ivsize, + unsigned int icvsize, const bool encap, + const bool is_qi) +{ + u32 *key_jump_cmd, *wait_cmd; + u32 nfifo; + const bool is_ipsec = (ivsize != CHACHAPOLY_IV_SIZE); + + /* Note: Context registers are saved. */ + init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX); + + /* skip key loading if they are loaded due to sharing */ + key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | + JUMP_COND_SHRD); + + append_key_as_imm(desc, cdata->key_virt, cdata->keylen, cdata->keylen, + CLASS_1 | KEY_DEST_CLASS_REG); + + /* For IPsec load the salt from keymat in the context register */ + if (is_ipsec) + append_load_as_imm(desc, cdata->key_virt + cdata->keylen, 4, + LDST_CLASS_1_CCB | LDST_SRCDST_BYTE_CONTEXT | + 4 << LDST_OFFSET_SHIFT); + + set_jump_tgt_here(desc, key_jump_cmd); + + /* Class 2 and 1 operations: Poly & ChaCha */ + if (encap) { + append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL | + OP_ALG_ENCRYPT); + append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL | + OP_ALG_ENCRYPT); + } else { + append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL | + OP_ALG_DECRYPT | OP_ALG_ICV_ON); + append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL | + OP_ALG_DECRYPT); + } + + if (is_qi) { + u32 *wait_load_cmd; + u32 ctx1_iv_off = is_ipsec ? 8 : 4; + + /* REG3 = assoclen */ + append_seq_load(desc, 4, LDST_CLASS_DECO | + LDST_SRCDST_WORD_DECO_MATH3 | + 4 << LDST_OFFSET_SHIFT); + + wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | + JUMP_COND_CALM | JUMP_COND_NCP | + JUMP_COND_NOP | JUMP_COND_NIP | + JUMP_COND_NIFP); + set_jump_tgt_here(desc, wait_load_cmd); + + append_seq_load(desc, ivsize, LDST_CLASS_1_CCB | + LDST_SRCDST_BYTE_CONTEXT | + ctx1_iv_off << LDST_OFFSET_SHIFT); + } + + /* + * MAGIC with NFIFO + * Read associated data from the input and send them to class1 and + * class2 alignment blocks. From class1 send data to output fifo and + * then write it to memory since we don't need to encrypt AD. + */ + nfifo = NFIFOENTRY_DEST_BOTH | NFIFOENTRY_FC1 | NFIFOENTRY_FC2 | + NFIFOENTRY_DTYPE_POLY | NFIFOENTRY_BND; + append_load_imm_u32(desc, nfifo, LDST_CLASS_IND_CCB | + LDST_SRCDST_WORD_INFO_FIFO_SM | LDLEN_MATH3); + + append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ); + append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ); + append_seq_fifo_load(desc, 0, FIFOLD_TYPE_NOINFOFIFO | + FIFOLD_CLASS_CLASS1 | LDST_VLF); + append_move_len(desc, MOVE_AUX_LS | MOVE_SRC_AUX_ABLK | + MOVE_DEST_OUTFIFO | MOVELEN_MRSEL_MATH3); + append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | LDST_VLF); + + /* IPsec - copy IV at the output */ + if (is_ipsec) + append_seq_fifo_store(desc, ivsize, FIFOST_TYPE_METADATA | + 0x2 << 25); + + wait_cmd = append_jump(desc, JUMP_JSL | JUMP_TYPE_LOCAL | + JUMP_COND_NOP | JUMP_TEST_ALL); + set_jump_tgt_here(desc, wait_cmd); + + if (encap) { + /* Read and write cryptlen bytes */ + append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ); + append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, + CAAM_CMD_SZ); + aead_append_src_dst(desc, FIFOLD_TYPE_MSG1OUT2); + + /* Write ICV */ + append_seq_store(desc, icvsize, LDST_CLASS_2_CCB | + LDST_SRCDST_BYTE_CONTEXT); + } else { + /* Read and write cryptlen bytes */ + append_math_add(desc, VARSEQINLEN, SEQOUTLEN, REG0, + CAAM_CMD_SZ); + append_math_add(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, + CAAM_CMD_SZ); + aead_append_src_dst(desc, FIFOLD_TYPE_MSG); + + /* Load ICV for verification */ + append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 | + FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV); + } + + print_hex_dump_debug("chachapoly shdesc@" __stringify(__LINE__)": ", + DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), + 1); +} +EXPORT_SYMBOL(cnstr_shdsc_chachapoly); + +/* For skcipher encrypt and decrypt, read from req->src and write to req->dst */ +static inline void skcipher_append_src_dst(u32 *desc) { append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ); append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ); @@ -1235,20 +1365,24 @@ } /** - * cnstr_shdsc_ablkcipher_encap - ablkcipher encapsulation shared descriptor + * cnstr_shdsc_skcipher_encap - skcipher encapsulation shared descriptor * @desc: pointer to buffer used for descriptor construction * @cdata: pointer to block cipher transform definitions * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed - * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128. + * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128 + * - OP_ALG_ALGSEL_CHACHA20 * @ivsize: initialization vector size * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template * @ctx1_iv_off: IV offset in CONTEXT1 register */ -void cnstr_shdsc_ablkcipher_encap(u32 * const desc, struct alginfo *cdata, - unsigned int ivsize, const bool is_rfc3686, - const u32 ctx1_iv_off) +void cnstr_shdsc_skcipher_encap(u32 * const desc, struct alginfo *cdata, + unsigned int ivsize, const bool is_rfc3686, + const u32 ctx1_iv_off) { u32 *key_jump_cmd; + u32 options = cdata->algtype | OP_ALG_AS_INIT | OP_ALG_ENCRYPT; + bool is_chacha20 = ((cdata->algtype & OP_ALG_ALGSEL_MASK) == + OP_ALG_ALGSEL_CHACHA20); init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX); /* Skip if already shared */ @@ -1273,9 +1407,11 @@ set_jump_tgt_here(desc, key_jump_cmd); - /* Load iv */ - append_seq_load(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT | - LDST_CLASS_1_CCB | (ctx1_iv_off << LDST_OFFSET_SHIFT)); + /* Load IV, if there is one */ + if (ivsize) + append_seq_load(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT | + LDST_CLASS_1_CCB | (ctx1_iv_off << + LDST_OFFSET_SHIFT)); /* Load counter into CONTEXT1 reg */ if (is_rfc3686) @@ -1285,35 +1421,43 @@ LDST_OFFSET_SHIFT)); /* Load operation */ - append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL | - OP_ALG_ENCRYPT); + if (is_chacha20) + options |= OP_ALG_AS_FINALIZE; + append_operation(desc, options); /* Perform operation */ - ablkcipher_append_src_dst(desc); + skcipher_append_src_dst(desc); -#ifdef DEBUG - print_hex_dump(KERN_ERR, - "ablkcipher enc shdesc@" __stringify(__LINE__)": ", - DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1); -#endif + /* Store IV */ + if (!is_chacha20 && ivsize) + append_seq_store(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT | + LDST_CLASS_1_CCB | (ctx1_iv_off << + LDST_OFFSET_SHIFT)); + + print_hex_dump_debug("skcipher enc shdesc@" __stringify(__LINE__)": ", + DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), + 1); } -EXPORT_SYMBOL(cnstr_shdsc_ablkcipher_encap); +EXPORT_SYMBOL(cnstr_shdsc_skcipher_encap); /** - * cnstr_shdsc_ablkcipher_decap - ablkcipher decapsulation shared descriptor + * cnstr_shdsc_skcipher_decap - skcipher decapsulation shared descriptor * @desc: pointer to buffer used for descriptor construction * @cdata: pointer to block cipher transform definitions * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed - * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128. + * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128 + * - OP_ALG_ALGSEL_CHACHA20 * @ivsize: initialization vector size * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template * @ctx1_iv_off: IV offset in CONTEXT1 register */ -void cnstr_shdsc_ablkcipher_decap(u32 * const desc, struct alginfo *cdata, - unsigned int ivsize, const bool is_rfc3686, - const u32 ctx1_iv_off) +void cnstr_shdsc_skcipher_decap(u32 * const desc, struct alginfo *cdata, + unsigned int ivsize, const bool is_rfc3686, + const u32 ctx1_iv_off) { u32 *key_jump_cmd; + bool is_chacha20 = ((cdata->algtype & OP_ALG_ALGSEL_MASK) == + OP_ALG_ALGSEL_CHACHA20); init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX); /* Skip if already shared */ @@ -1338,9 +1482,11 @@ set_jump_tgt_here(desc, key_jump_cmd); - /* load IV */ - append_seq_load(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT | - LDST_CLASS_1_CCB | (ctx1_iv_off << LDST_OFFSET_SHIFT)); + /* Load IV, if there is one */ + if (ivsize) + append_seq_load(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT | + LDST_CLASS_1_CCB | (ctx1_iv_off << + LDST_OFFSET_SHIFT)); /* Load counter into CONTEXT1 reg */ if (is_rfc3686) @@ -1351,111 +1497,33 @@ /* Choose operation */ if (ctx1_iv_off) - append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL | + append_operation(desc, cdata->algtype | OP_ALG_AS_INIT | OP_ALG_DECRYPT); else append_dec_op1(desc, cdata->algtype); /* Perform operation */ - ablkcipher_append_src_dst(desc); + skcipher_append_src_dst(desc); -#ifdef DEBUG - print_hex_dump(KERN_ERR, - "ablkcipher dec shdesc@" __stringify(__LINE__)": ", - DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1); -#endif + /* Store IV */ + if (!is_chacha20 && ivsize) + append_seq_store(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT | + LDST_CLASS_1_CCB | (ctx1_iv_off << + LDST_OFFSET_SHIFT)); + + print_hex_dump_debug("skcipher dec shdesc@" __stringify(__LINE__)": ", + DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), + 1); } -EXPORT_SYMBOL(cnstr_shdsc_ablkcipher_decap); +EXPORT_SYMBOL(cnstr_shdsc_skcipher_decap); /** - * cnstr_shdsc_ablkcipher_givencap - ablkcipher encapsulation shared descriptor - * with HW-generated initialization vector. - * @desc: pointer to buffer used for descriptor construction - * @cdata: pointer to block cipher transform definitions - * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed - * with OP_ALG_AAI_CBC. - * @ivsize: initialization vector size - * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template - * @ctx1_iv_off: IV offset in CONTEXT1 register - */ -void cnstr_shdsc_ablkcipher_givencap(u32 * const desc, struct alginfo *cdata, - unsigned int ivsize, const bool is_rfc3686, - const u32 ctx1_iv_off) -{ - u32 *key_jump_cmd, geniv; - - init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX); - /* Skip if already shared */ - key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | - JUMP_COND_SHRD); - - /* Load class1 key only */ - append_key_as_imm(desc, cdata->key_virt, cdata->keylen, - cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG); - - /* Load Nonce into CONTEXT1 reg */ - if (is_rfc3686) { - const u8 *nonce = cdata->key_virt + cdata->keylen; - - append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE, - LDST_CLASS_IND_CCB | - LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM); - append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO | - MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) | - (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT)); - } - set_jump_tgt_here(desc, key_jump_cmd); - - /* Generate IV */ - geniv = NFIFOENTRY_STYPE_PAD | NFIFOENTRY_DEST_DECO | - NFIFOENTRY_DTYPE_MSG | NFIFOENTRY_LC1 | NFIFOENTRY_PTYPE_RND | - (ivsize << NFIFOENTRY_DLEN_SHIFT); - append_load_imm_u32(desc, geniv, LDST_CLASS_IND_CCB | - LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM); - append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO); - append_move(desc, MOVE_WAITCOMP | MOVE_SRC_INFIFO | - MOVE_DEST_CLASS1CTX | (ivsize << MOVE_LEN_SHIFT) | - (ctx1_iv_off << MOVE_OFFSET_SHIFT)); - append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO); - - /* Copy generated IV to memory */ - append_seq_store(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT | - LDST_CLASS_1_CCB | (ctx1_iv_off << LDST_OFFSET_SHIFT)); - - /* Load Counter into CONTEXT1 reg */ - if (is_rfc3686) - append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB | - LDST_SRCDST_BYTE_CONTEXT | - ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) << - LDST_OFFSET_SHIFT)); - - if (ctx1_iv_off) - append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | JUMP_COND_NCP | - (1 << JUMP_OFFSET_SHIFT)); - - /* Load operation */ - append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL | - OP_ALG_ENCRYPT); - - /* Perform operation */ - ablkcipher_append_src_dst(desc); - -#ifdef DEBUG - print_hex_dump(KERN_ERR, - "ablkcipher givenc shdesc@" __stringify(__LINE__) ": ", - DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1); -#endif -} -EXPORT_SYMBOL(cnstr_shdsc_ablkcipher_givencap); - -/** - * cnstr_shdsc_xts_ablkcipher_encap - xts ablkcipher encapsulation shared - * descriptor + * cnstr_shdsc_xts_skcipher_encap - xts skcipher encapsulation shared descriptor * @desc: pointer to buffer used for descriptor construction * @cdata: pointer to block cipher transform definitions * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_XTS. */ -void cnstr_shdsc_xts_ablkcipher_encap(u32 * const desc, struct alginfo *cdata) +void cnstr_shdsc_xts_skcipher_encap(u32 * const desc, struct alginfo *cdata) { /* * Set sector size to a big value, practically disabling @@ -1483,37 +1551,41 @@ set_jump_tgt_here(desc, key_jump_cmd); /* - * create sequence for loading the sector index - * Upper 8B of IV - will be used as sector index - * Lower 8B of IV - will be discarded + * create sequence for loading the sector index / 16B tweak value + * Lower 8B of IV - sector index / tweak lower half + * Upper 8B of IV - upper half of 16B tweak */ append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB | (0x20 << LDST_OFFSET_SHIFT)); - append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP); + append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB | + (0x30 << LDST_OFFSET_SHIFT)); /* Load operation */ append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT); /* Perform operation */ - ablkcipher_append_src_dst(desc); + skcipher_append_src_dst(desc); -#ifdef DEBUG - print_hex_dump(KERN_ERR, - "xts ablkcipher enc shdesc@" __stringify(__LINE__) ": ", - DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1); -#endif + /* Store lower 8B and upper 8B of IV */ + append_seq_store(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB | + (0x20 << LDST_OFFSET_SHIFT)); + append_seq_store(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB | + (0x30 << LDST_OFFSET_SHIFT)); + + print_hex_dump_debug("xts skcipher enc shdesc@" __stringify(__LINE__) + ": ", DUMP_PREFIX_ADDRESS, 16, 4, + desc, desc_bytes(desc), 1); } -EXPORT_SYMBOL(cnstr_shdsc_xts_ablkcipher_encap); +EXPORT_SYMBOL(cnstr_shdsc_xts_skcipher_encap); /** - * cnstr_shdsc_xts_ablkcipher_decap - xts ablkcipher decapsulation shared - * descriptor + * cnstr_shdsc_xts_skcipher_decap - xts skcipher decapsulation shared descriptor * @desc: pointer to buffer used for descriptor construction * @cdata: pointer to block cipher transform definitions * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_XTS. */ -void cnstr_shdsc_xts_ablkcipher_decap(u32 * const desc, struct alginfo *cdata) +void cnstr_shdsc_xts_skcipher_decap(u32 * const desc, struct alginfo *cdata) { /* * Set sector size to a big value, practically disabling @@ -1541,27 +1613,31 @@ set_jump_tgt_here(desc, key_jump_cmd); /* - * create sequence for loading the sector index - * Upper 8B of IV - will be used as sector index - * Lower 8B of IV - will be discarded + * create sequence for loading the sector index / 16B tweak value + * Lower 8B of IV - sector index / tweak lower half + * Upper 8B of IV - upper half of 16B tweak */ append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB | (0x20 << LDST_OFFSET_SHIFT)); - append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP); - + append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB | + (0x30 << LDST_OFFSET_SHIFT)); /* Load operation */ append_dec_op1(desc, cdata->algtype); /* Perform operation */ - ablkcipher_append_src_dst(desc); + skcipher_append_src_dst(desc); -#ifdef DEBUG - print_hex_dump(KERN_ERR, - "xts ablkcipher dec shdesc@" __stringify(__LINE__) ": ", - DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1); -#endif + /* Store lower 8B and upper 8B of IV */ + append_seq_store(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB | + (0x20 << LDST_OFFSET_SHIFT)); + append_seq_store(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB | + (0x30 << LDST_OFFSET_SHIFT)); + + print_hex_dump_debug("xts skcipher dec shdesc@" __stringify(__LINE__) + ": ", DUMP_PREFIX_ADDRESS, 16, 4, desc, + desc_bytes(desc), 1); } -EXPORT_SYMBOL(cnstr_shdsc_xts_ablkcipher_decap); +EXPORT_SYMBOL(cnstr_shdsc_xts_skcipher_decap); MODULE_LICENSE("GPL"); MODULE_DESCRIPTION("FSL CAAM descriptor support"); -- Gitblit v1.6.2