From: Pablo de Lara Date: Sun, 2 Jul 2017 05:41:14 +0000 (+0100) Subject: cryptodev: move IV parameters to session X-Git-Tag: spdx-start~2686 X-Git-Url: http://git.droids-corp.org/?a=commitdiff_plain;h=0fbd75a99f;p=dpdk.git cryptodev: move IV parameters to session Since IV parameters (offset and length) should not change for operations in the same session, these parameters are moved to the crypto transform structure, so they will be stored in the sessions. Signed-off-by: Pablo de Lara Acked-by: Declan Doherty Acked-by: Akhil Goyal Acked-by: Fiona Trahe --- diff --git a/app/test-crypto-perf/cperf_ops.c b/app/test-crypto-perf/cperf_ops.c index 018ce0e607..f2154459f0 100644 --- a/app/test-crypto-perf/cperf_ops.c +++ b/app/test-crypto-perf/cperf_ops.c @@ -106,9 +106,6 @@ cperf_set_ops_cipher(struct rte_crypto_op **ops, sym_op->m_dst = bufs_out[i]; /* cipher parameters */ - sym_op->cipher.iv.offset = iv_offset; - sym_op->cipher.iv.length = test_vector->iv.length; - if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 || options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 || options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3) @@ -215,9 +212,6 @@ cperf_set_ops_cipher_auth(struct rte_crypto_op **ops, sym_op->m_dst = bufs_out[i]; /* cipher parameters */ - sym_op->cipher.iv.offset = iv_offset; - sym_op->cipher.iv.length = test_vector->iv.length; - if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 || options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 || options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3) @@ -302,9 +296,6 @@ cperf_set_ops_aead(struct rte_crypto_op **ops, sym_op->m_dst = bufs_out[i]; /* cipher parameters */ - sym_op->cipher.iv.offset = iv_offset; - sym_op->cipher.iv.length = test_vector->iv.length; - sym_op->cipher.data.length = options->test_buffer_size; sym_op->cipher.data.offset = RTE_ALIGN_CEIL(options->auth_aad_sz, 16); @@ -365,7 +356,8 @@ cperf_set_ops_aead(struct rte_crypto_op **ops, static struct rte_cryptodev_sym_session * cperf_create_session(uint8_t dev_id, const struct cperf_options *options, - const struct cperf_test_vector *test_vector) + const struct cperf_test_vector *test_vector, + uint16_t iv_offset) { struct rte_crypto_sym_xform cipher_xform; struct rte_crypto_sym_xform auth_xform; @@ -379,6 +371,7 @@ cperf_create_session(uint8_t dev_id, cipher_xform.next = NULL; cipher_xform.cipher.algo = options->cipher_algo; cipher_xform.cipher.op = options->cipher_op; + cipher_xform.cipher.iv.offset = iv_offset; /* cipher different than null */ if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) { @@ -386,9 +379,12 @@ cperf_create_session(uint8_t dev_id, test_vector->cipher_key.data; cipher_xform.cipher.key.length = test_vector->cipher_key.length; + cipher_xform.cipher.iv.length = test_vector->iv.length; + } else { cipher_xform.cipher.key.data = NULL; cipher_xform.cipher.key.length = 0; + cipher_xform.cipher.iv.length = 0; } /* create crypto session */ sess = rte_cryptodev_sym_session_create(dev_id, &cipher_xform); @@ -432,6 +428,7 @@ cperf_create_session(uint8_t dev_id, cipher_xform.next = NULL; cipher_xform.cipher.algo = options->cipher_algo; cipher_xform.cipher.op = options->cipher_op; + cipher_xform.cipher.iv.offset = iv_offset; /* cipher different than null */ if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) { @@ -439,9 +436,11 @@ cperf_create_session(uint8_t dev_id, test_vector->cipher_key.data; cipher_xform.cipher.key.length = test_vector->cipher_key.length; + cipher_xform.cipher.iv.length = test_vector->iv.length; } else { cipher_xform.cipher.key.data = NULL; cipher_xform.cipher.key.length = 0; + cipher_xform.cipher.iv.length = 0; } /* diff --git a/app/test-crypto-perf/cperf_ops.h b/app/test-crypto-perf/cperf_ops.h index f7b431c721..bb83cd5b26 100644 --- a/app/test-crypto-perf/cperf_ops.h +++ b/app/test-crypto-perf/cperf_ops.h @@ -42,7 +42,8 @@ typedef struct rte_cryptodev_sym_session *(*cperf_sessions_create_t)( uint8_t dev_id, const struct cperf_options *options, - const struct cperf_test_vector *test_vector); + const struct cperf_test_vector *test_vector, + uint16_t iv_offset); typedef int (*cperf_populate_ops_t)(struct rte_crypto_op **ops, struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out, diff --git a/app/test-crypto-perf/cperf_test_latency.c b/app/test-crypto-perf/cperf_test_latency.c index c33129ba11..bc22a89bca 100644 --- a/app/test-crypto-perf/cperf_test_latency.c +++ b/app/test-crypto-perf/cperf_test_latency.c @@ -211,7 +211,12 @@ cperf_latency_test_constructor(uint8_t dev_id, uint16_t qp_id, ctx->options = options; ctx->test_vector = test_vector; - ctx->sess = op_fns->sess_create(dev_id, options, test_vector); + /* IV goes at the end of the crypto operation */ + uint16_t iv_offset = sizeof(struct rte_crypto_op) + + sizeof(struct rte_crypto_sym_op) + + sizeof(struct cperf_op_result *); + + ctx->sess = op_fns->sess_create(dev_id, options, test_vector, iv_offset); if (ctx->sess == NULL) goto err; diff --git a/app/test-crypto-perf/cperf_test_throughput.c b/app/test-crypto-perf/cperf_test_throughput.c index 5a90eb0cb2..d043f60bc2 100644 --- a/app/test-crypto-perf/cperf_test_throughput.c +++ b/app/test-crypto-perf/cperf_test_throughput.c @@ -195,7 +195,11 @@ cperf_throughput_test_constructor(uint8_t dev_id, uint16_t qp_id, ctx->options = options; ctx->test_vector = test_vector; - ctx->sess = op_fns->sess_create(dev_id, options, test_vector); + /* IV goes at the end of the cryptop operation */ + uint16_t iv_offset = sizeof(struct rte_crypto_op) + + sizeof(struct rte_crypto_sym_op); + + ctx->sess = op_fns->sess_create(dev_id, options, test_vector, iv_offset); if (ctx->sess == NULL) goto err; diff --git a/app/test-crypto-perf/cperf_test_vectors.c b/app/test-crypto-perf/cperf_test_vectors.c index 36b3f6f128..e29d7da5ed 100644 --- a/app/test-crypto-perf/cperf_test_vectors.c +++ b/app/test-crypto-perf/cperf_test_vectors.c @@ -1,3 +1,35 @@ +/*- + * BSD LICENSE + * + * Copyright(c) 2016-2017 Intel Corporation. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in + * the documentation and/or other materials provided with the + * distribution. + * * Neither the name of Intel Corporation nor the names of its + * contributors may be used to endorse or promote products derived + * from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + #include #include @@ -423,7 +455,16 @@ cperf_test_vector_get_dummy(struct cperf_options *options) memcpy(t_vec->iv.data, iv, options->cipher_iv_sz); } t_vec->ciphertext.length = options->max_buffer_size; + /* Set IV parameters */ + t_vec->iv.data = rte_malloc(NULL, options->cipher_iv_sz, + 16); + if (options->cipher_iv_sz && t_vec->iv.data == NULL) { + rte_free(t_vec); + return NULL; + } + memcpy(t_vec->iv.data, iv, options->cipher_iv_sz); t_vec->iv.length = options->cipher_iv_sz; + t_vec->data.cipher_offset = 0; t_vec->data.cipher_length = options->max_buffer_size; } diff --git a/app/test-crypto-perf/cperf_test_verify.c b/app/test-crypto-perf/cperf_test_verify.c index be684a63ec..e6f20c6ffb 100644 --- a/app/test-crypto-perf/cperf_test_verify.c +++ b/app/test-crypto-perf/cperf_test_verify.c @@ -199,7 +199,11 @@ cperf_verify_test_constructor(uint8_t dev_id, uint16_t qp_id, ctx->options = options; ctx->test_vector = test_vector; - ctx->sess = op_fns->sess_create(dev_id, options, test_vector); + /* IV goes at the end of the cryptop operation */ + uint16_t iv_offset = sizeof(struct rte_crypto_op) + + sizeof(struct rte_crypto_sym_op); + + ctx->sess = op_fns->sess_create(dev_id, options, test_vector, iv_offset); if (ctx->sess == NULL) goto err; diff --git a/doc/guides/prog_guide/cryptodev_lib.rst b/doc/guides/prog_guide/cryptodev_lib.rst index 48c58a96da..4e352f46eb 100644 --- a/doc/guides/prog_guide/cryptodev_lib.rst +++ b/doc/guides/prog_guide/cryptodev_lib.rst @@ -535,11 +535,6 @@ chain. uint32_t offset; uint32_t length; } data; /**< Data offsets and length for ciphering */ - - struct { - uint16_t offset; - uint16_t length; - } iv; /**< Initialisation vector parameters */ } cipher; struct { diff --git a/doc/guides/rel_notes/release_17_08.rst b/doc/guides/rel_notes/release_17_08.rst index 3ee6db34c1..1b4cd260ec 100644 --- a/doc/guides/rel_notes/release_17_08.rst +++ b/doc/guides/rel_notes/release_17_08.rst @@ -92,6 +92,7 @@ New Features * Removed field ``rte_crypto_sym_op_sess_type``. * Replaced pointer and physical address of IV with offset from the start of the crypto operation. + * Moved length and offset of cipher IV to ``rte_crypto_cipher_xform``. * **Reorganized the crypto operation structure.** @@ -191,6 +192,10 @@ ABI Changes Some fields have been modified in the ``rte_crypto_op`` and ``rte_crypto_sym_op`` structures, as described in the `New Features`_ section. +* **Reorganized the ``rte_crypto_sym_cipher_xform`` structure.** + + * Added cipher IV length and offset parameters. + Shared Library Versions ----------------------- diff --git a/drivers/crypto/aesni_gcm/aesni_gcm_pmd.c b/drivers/crypto/aesni_gcm/aesni_gcm_pmd.c index 61be723ee7..f7758291f9 100644 --- a/drivers/crypto/aesni_gcm/aesni_gcm_pmd.c +++ b/drivers/crypto/aesni_gcm/aesni_gcm_pmd.c @@ -104,6 +104,17 @@ aesni_gcm_set_session_parameters(struct aesni_gcm_session *sess, return -EINVAL; } + /* Set IV parameters */ + sess->iv.offset = cipher_xform->cipher.iv.offset; + sess->iv.length = cipher_xform->cipher.iv.length; + + /* IV check */ + if (sess->iv.length != 16 && sess->iv.length != 12 && + sess->iv.length != 0) { + GCM_LOG_ERR("Wrong IV length"); + return -EINVAL; + } + /* Select Crypto operation */ if (cipher_xform->cipher.op == RTE_CRYPTO_CIPHER_OP_ENCRYPT && auth_xform->auth.op == RTE_CRYPTO_AUTH_OP_GENERATE) @@ -221,20 +232,13 @@ process_gcm_crypto_op(struct rte_crypto_op *op, src = rte_pktmbuf_mtod_offset(m_src, uint8_t *, offset); - /* sanity checks */ - if (sym_op->cipher.iv.length != 16 && sym_op->cipher.iv.length != 12 && - sym_op->cipher.iv.length != 0) { - GCM_LOG_ERR("iv"); - return -1; - } - iv_ptr = rte_crypto_op_ctod_offset(op, uint8_t *, - sym_op->cipher.iv.offset); + session->iv.offset); /* * GCM working in 12B IV mode => 16B pre-counter block we need * to set BE LSB to 1, driver expects that 16B is allocated */ - if (sym_op->cipher.iv.length == 12) { + if (session->iv.length == 12) { uint32_t *iv_padd = (uint32_t *)&(iv_ptr[12]); *iv_padd = rte_bswap32(1); } diff --git a/drivers/crypto/aesni_gcm/aesni_gcm_pmd_private.h b/drivers/crypto/aesni_gcm/aesni_gcm_pmd_private.h index 0496b44700..2ed96f8a33 100644 --- a/drivers/crypto/aesni_gcm/aesni_gcm_pmd_private.h +++ b/drivers/crypto/aesni_gcm/aesni_gcm_pmd_private.h @@ -90,6 +90,11 @@ enum aesni_gcm_key { /** AESNI GCM private session structure */ struct aesni_gcm_session { + struct { + uint16_t length; + uint16_t offset; + } iv; + /**< IV parameters */ enum aesni_gcm_operation op; /**< GCM operation type */ enum aesni_gcm_key key; diff --git a/drivers/crypto/aesni_mb/rte_aesni_mb_pmd.c b/drivers/crypto/aesni_mb/rte_aesni_mb_pmd.c index ece7d00cdc..8a96fec98d 100644 --- a/drivers/crypto/aesni_mb/rte_aesni_mb_pmd.c +++ b/drivers/crypto/aesni_mb/rte_aesni_mb_pmd.c @@ -246,6 +246,10 @@ aesni_mb_set_session_cipher_parameters(const struct aesni_mb_op_fns *mb_ops, return -1; } + /* Set IV parameters */ + sess->iv.offset = xform->cipher.iv.offset; + sess->iv.length = xform->cipher.iv.length; + /* Expanded cipher keys */ (*aes_keyexp_fn)(xform->cipher.key.data, sess->cipher.expanded_aes_keys.encode, @@ -300,6 +304,9 @@ aesni_mb_set_session_parameters(const struct aesni_mb_op_fns *mb_ops, return -1; } + /* Default IV length = 0 */ + sess->iv.length = 0; + if (aesni_mb_set_session_auth_parameters(mb_ops, sess, auth_xform)) { MB_LOG_ERR("Invalid/unsupported authentication parameters"); return -1; @@ -472,8 +479,8 @@ set_mb_job_params(JOB_AES_HMAC *job, struct aesni_mb_qp *qp, /* Set IV parameters */ job->iv = rte_crypto_op_ctod_offset(op, uint8_t *, - op->sym->cipher.iv.offset); - job->iv_len_in_bytes = op->sym->cipher.iv.length; + session->iv.offset); + job->iv_len_in_bytes = session->iv.length; /* Data Parameter */ job->src = rte_pktmbuf_mtod(m_src, uint8_t *); diff --git a/drivers/crypto/aesni_mb/rte_aesni_mb_pmd_private.h b/drivers/crypto/aesni_mb/rte_aesni_mb_pmd_private.h index 0d82699c3c..5c50d375f9 100644 --- a/drivers/crypto/aesni_mb/rte_aesni_mb_pmd_private.h +++ b/drivers/crypto/aesni_mb/rte_aesni_mb_pmd_private.h @@ -167,6 +167,11 @@ struct aesni_mb_qp { /** AES-NI multi-buffer private session structure */ struct aesni_mb_session { JOB_CHAIN_ORDER chain_order; + struct { + uint16_t length; + uint16_t offset; + } iv; + /**< IV parameters */ /** Cipher Parameters */ struct { diff --git a/drivers/crypto/armv8/rte_armv8_pmd.c b/drivers/crypto/armv8/rte_armv8_pmd.c index 693eccd138..dac4fc3c15 100644 --- a/drivers/crypto/armv8/rte_armv8_pmd.c +++ b/drivers/crypto/armv8/rte_armv8_pmd.c @@ -432,7 +432,7 @@ armv8_crypto_set_session_chained_parameters(struct armv8_crypto_session *sess, case RTE_CRYPTO_CIPHER_AES_CBC: sess->cipher.algo = calg; /* IV len is always 16 bytes (block size) for AES CBC */ - sess->cipher.iv_len = 16; + sess->cipher.iv.length = 16; break; default: return -EINVAL; @@ -523,6 +523,9 @@ armv8_crypto_set_session_parameters(struct armv8_crypto_session *sess, return -EINVAL; } + /* Set IV offset */ + sess->cipher.iv.offset = cipher_xform->cipher.iv.offset; + if (is_chained_op) { ret = armv8_crypto_set_session_chained_parameters(sess, cipher_xform, auth_xform); @@ -649,13 +652,8 @@ process_armv8_chained_op op->sym->auth.digest.length); } - if (unlikely(op->sym->cipher.iv.length != sess->cipher.iv_len)) { - op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS; - return; - } - arg.cipher.iv = rte_crypto_op_ctod_offset(op, uint8_t *, - op->sym->cipher.iv.offset); + sess->cipher.iv.offset); arg.cipher.key = sess->cipher.key.data; /* Acquire combined mode function */ crypto_func = sess->crypto_func; diff --git a/drivers/crypto/armv8/rte_armv8_pmd_private.h b/drivers/crypto/armv8/rte_armv8_pmd_private.h index b75107f20c..75bde9ff9a 100644 --- a/drivers/crypto/armv8/rte_armv8_pmd_private.h +++ b/drivers/crypto/armv8/rte_armv8_pmd_private.h @@ -159,8 +159,11 @@ struct armv8_crypto_session { /**< cipher operation direction */ enum rte_crypto_cipher_algorithm algo; /**< cipher algorithm */ - int iv_len; - /**< IV length */ + struct { + uint16_t length; + uint16_t offset; + } iv; + /**< IV parameters */ struct { uint8_t data[256]; diff --git a/drivers/crypto/dpaa2_sec/dpaa2_sec_dpseci.c b/drivers/crypto/dpaa2_sec/dpaa2_sec_dpseci.c index 1605701b3c..3930794053 100644 --- a/drivers/crypto/dpaa2_sec/dpaa2_sec_dpseci.c +++ b/drivers/crypto/dpaa2_sec/dpaa2_sec_dpseci.c @@ -88,7 +88,7 @@ build_authenc_fd(dpaa2_sec_session *sess, uint8_t *old_icv; uint32_t mem_len = (7 * sizeof(struct qbman_fle)) + icv_len; uint8_t *iv_ptr = rte_crypto_op_ctod_offset(op, uint8_t *, - op->sym->cipher.iv.offset); + sess->iv.offset); PMD_INIT_FUNC_TRACE(); @@ -138,7 +138,7 @@ build_authenc_fd(dpaa2_sec_session *sess, sym_op->auth.digest.length, sym_op->cipher.data.offset, sym_op->cipher.data.length, - sym_op->cipher.iv.length, + sess->iv.length, sym_op->m_src->data_off); /* Configure Output FLE with Scatter/Gather Entry */ @@ -163,7 +163,7 @@ build_authenc_fd(dpaa2_sec_session *sess, DPAA2_VADDR_TO_IOVA(sym_op->auth.digest.data)); sge->length = sym_op->auth.digest.length; DPAA2_SET_FD_LEN(fd, (sym_op->auth.data.length + - sym_op->cipher.iv.length)); + sess->iv.length)); } DPAA2_SET_FLE_FIN(sge); @@ -175,13 +175,13 @@ build_authenc_fd(dpaa2_sec_session *sess, DPAA2_SET_FLE_SG_EXT(fle); DPAA2_SET_FLE_FIN(fle); fle->length = (sess->dir == DIR_ENC) ? - (sym_op->auth.data.length + sym_op->cipher.iv.length) : - (sym_op->auth.data.length + sym_op->cipher.iv.length + + (sym_op->auth.data.length + sess->iv.length) : + (sym_op->auth.data.length + sess->iv.length + sym_op->auth.digest.length); /* Configure Input SGE for Encap/Decap */ DPAA2_SET_FLE_ADDR(sge, DPAA2_VADDR_TO_IOVA(iv_ptr)); - sge->length = sym_op->cipher.iv.length; + sge->length = sess->iv.length; sge++; DPAA2_SET_FLE_ADDR(sge, DPAA2_MBUF_VADDR_TO_IOVA(sym_op->m_src)); @@ -198,7 +198,7 @@ build_authenc_fd(dpaa2_sec_session *sess, sge->length = sym_op->auth.digest.length; DPAA2_SET_FD_LEN(fd, (sym_op->auth.data.length + sym_op->auth.digest.length + - sym_op->cipher.iv.length)); + sess->iv.length)); } DPAA2_SET_FLE_FIN(sge); if (auth_only_len) { @@ -310,7 +310,7 @@ build_cipher_fd(dpaa2_sec_session *sess, struct rte_crypto_op *op, struct sec_flow_context *flc; struct ctxt_priv *priv = sess->ctxt; uint8_t *iv_ptr = rte_crypto_op_ctod_offset(op, uint8_t *, - op->sym->cipher.iv.offset); + sess->iv.offset); PMD_INIT_FUNC_TRACE(); @@ -347,21 +347,21 @@ build_cipher_fd(dpaa2_sec_session *sess, struct rte_crypto_op *op, flc = &priv->flc_desc[0].flc; DPAA2_SET_FD_ADDR(fd, DPAA2_VADDR_TO_IOVA(fle)); DPAA2_SET_FD_LEN(fd, sym_op->cipher.data.length + - sym_op->cipher.iv.length); + sess->iv.length); DPAA2_SET_FD_COMPOUND_FMT(fd); DPAA2_SET_FD_FLC(fd, DPAA2_VADDR_TO_IOVA(flc)); PMD_TX_LOG(DEBUG, "cipher_off: 0x%x/length %d,ivlen=%d data_off: 0x%x", sym_op->cipher.data.offset, sym_op->cipher.data.length, - sym_op->cipher.iv.length, + sess->iv.length, sym_op->m_src->data_off); DPAA2_SET_FLE_ADDR(fle, DPAA2_MBUF_VADDR_TO_IOVA(sym_op->m_src)); DPAA2_SET_FLE_OFFSET(fle, sym_op->cipher.data.offset + sym_op->m_src->data_off); - fle->length = sym_op->cipher.data.length + sym_op->cipher.iv.length; + fle->length = sym_op->cipher.data.length + sess->iv.length; PMD_TX_LOG(DEBUG, "1 - flc = %p, fle = %p FLEaddr = %x-%x, length %d", flc, fle, fle->addr_hi, fle->addr_lo, fle->length); @@ -369,12 +369,12 @@ build_cipher_fd(dpaa2_sec_session *sess, struct rte_crypto_op *op, fle++; DPAA2_SET_FLE_ADDR(fle, DPAA2_VADDR_TO_IOVA(sge)); - fle->length = sym_op->cipher.data.length + sym_op->cipher.iv.length; + fle->length = sym_op->cipher.data.length + sess->iv.length; DPAA2_SET_FLE_SG_EXT(fle); DPAA2_SET_FLE_ADDR(sge, DPAA2_VADDR_TO_IOVA(iv_ptr)); - sge->length = sym_op->cipher.iv.length; + sge->length = sess->iv.length; sge++; DPAA2_SET_FLE_ADDR(sge, DPAA2_MBUF_VADDR_TO_IOVA(sym_op->m_src)); @@ -798,6 +798,10 @@ dpaa2_sec_cipher_init(struct rte_cryptodev *dev, cipherdata.key_enc_flags = 0; cipherdata.key_type = RTA_DATA_IMM; + /* Set IV parameters */ + session->iv.offset = xform->cipher.iv.offset; + session->iv.length = xform->cipher.iv.length; + switch (xform->cipher.algo) { case RTE_CRYPTO_CIPHER_AES_CBC: cipherdata.algtype = OP_ALG_ALGSEL_AES; @@ -1016,6 +1020,11 @@ dpaa2_sec_aead_init(struct rte_cryptodev *dev, (cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT) ? DPAA2_SEC_HASH_CIPHER : DPAA2_SEC_CIPHER_HASH; } + + /* Set IV parameters */ + session->iv.offset = cipher_xform->iv.offset; + session->iv.length = cipher_xform->iv.length; + /* For SEC AEAD only one descriptor is required */ priv = (struct ctxt_priv *)rte_zmalloc(NULL, sizeof(struct ctxt_priv) + sizeof(struct sec_flc_desc), @@ -1216,6 +1225,10 @@ dpaa2_sec_session_configure(struct rte_cryptodev *dev, RTE_LOG(ERR, PMD, "invalid session struct"); return NULL; } + + /* Default IV length = 0 */ + session->iv.length = 0; + /* Cipher Only */ if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER && xform->next == NULL) { session->ctxt_type = DPAA2_SEC_CIPHER; diff --git a/drivers/crypto/dpaa2_sec/dpaa2_sec_priv.h b/drivers/crypto/dpaa2_sec/dpaa2_sec_priv.h index f5c6169438..d152161baa 100644 --- a/drivers/crypto/dpaa2_sec/dpaa2_sec_priv.h +++ b/drivers/crypto/dpaa2_sec/dpaa2_sec_priv.h @@ -187,6 +187,10 @@ typedef struct dpaa2_sec_session_entry { uint8_t *data; /**< pointer to key data */ size_t length; /**< key length in bytes */ } auth_key; + struct { + uint16_t length; /**< IV length in bytes */ + uint16_t offset; /**< IV offset in bytes */ + } iv; uint8_t status; union { struct dpaa2_sec_cipher_ctxt cipher_ctxt; @@ -275,8 +279,8 @@ static const struct rte_cryptodev_capabilities dpaa2_sec_capabilities[] = { .min = 32, .max = 32, .increment = 0 - }, - .aad_size = { 0 } + }, + .aad_size = { 0 } }, } }, } }, diff --git a/drivers/crypto/kasumi/rte_kasumi_pmd.c b/drivers/crypto/kasumi/rte_kasumi_pmd.c index 290f67063f..810699fe09 100644 --- a/drivers/crypto/kasumi/rte_kasumi_pmd.c +++ b/drivers/crypto/kasumi/rte_kasumi_pmd.c @@ -116,6 +116,13 @@ kasumi_set_session_parameters(struct kasumi_session *sess, /* Only KASUMI F8 supported */ if (cipher_xform->cipher.algo != RTE_CRYPTO_CIPHER_KASUMI_F8) return -EINVAL; + + sess->iv_offset = cipher_xform->cipher.iv.offset; + if (cipher_xform->cipher.iv.length != KASUMI_IV_LENGTH) { + KASUMI_LOG_ERR("Wrong IV length"); + return -EINVAL; + } + /* Initialize key */ sso_kasumi_init_f8_key_sched(cipher_xform->cipher.key.data, &sess->pKeySched_cipher); @@ -179,13 +186,6 @@ process_kasumi_cipher_op(struct rte_crypto_op **ops, uint32_t num_bytes[num_ops]; for (i = 0; i < num_ops; i++) { - /* Sanity checks. */ - if (ops[i]->sym->cipher.iv.length != KASUMI_IV_LENGTH) { - ops[i]->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS; - KASUMI_LOG_ERR("iv"); - break; - } - src[i] = rte_pktmbuf_mtod(ops[i]->sym->m_src, uint8_t *) + (ops[i]->sym->cipher.data.offset >> 3); dst[i] = ops[i]->sym->m_dst ? @@ -194,7 +194,7 @@ process_kasumi_cipher_op(struct rte_crypto_op **ops, rte_pktmbuf_mtod(ops[i]->sym->m_src, uint8_t *) + (ops[i]->sym->cipher.data.offset >> 3); iv_ptr = rte_crypto_op_ctod_offset(ops[i], uint8_t *, - ops[i]->sym->cipher.iv.offset); + session->iv_offset); iv[i] = *((uint64_t *)(iv_ptr)); num_bytes[i] = ops[i]->sym->cipher.data.length >> 3; @@ -218,13 +218,6 @@ process_kasumi_cipher_op_bit(struct rte_crypto_op *op, uint64_t iv; uint32_t length_in_bits, offset_in_bits; - /* Sanity checks. */ - if (unlikely(op->sym->cipher.iv.length != KASUMI_IV_LENGTH)) { - op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS; - KASUMI_LOG_ERR("iv"); - return 0; - } - offset_in_bits = op->sym->cipher.data.offset; src = rte_pktmbuf_mtod(op->sym->m_src, uint8_t *); if (op->sym->m_dst == NULL) { @@ -234,7 +227,7 @@ process_kasumi_cipher_op_bit(struct rte_crypto_op *op, } dst = rte_pktmbuf_mtod(op->sym->m_dst, uint8_t *); iv_ptr = rte_crypto_op_ctod_offset(op, uint8_t *, - op->sym->cipher.iv.offset); + session->iv_offset); iv = *((uint64_t *)(iv_ptr)); length_in_bits = op->sym->cipher.data.length; diff --git a/drivers/crypto/kasumi/rte_kasumi_pmd_private.h b/drivers/crypto/kasumi/rte_kasumi_pmd_private.h index fb586caaf3..6a0d47a846 100644 --- a/drivers/crypto/kasumi/rte_kasumi_pmd_private.h +++ b/drivers/crypto/kasumi/rte_kasumi_pmd_private.h @@ -92,6 +92,7 @@ struct kasumi_session { sso_kasumi_key_sched_t pKeySched_hash; enum kasumi_operation op; enum rte_crypto_auth_operation auth_op; + uint16_t iv_offset; } __rte_cache_aligned; diff --git a/drivers/crypto/null/null_crypto_pmd_ops.c b/drivers/crypto/null/null_crypto_pmd_ops.c index 12c946c989..5f74f0c989 100644 --- a/drivers/crypto/null/null_crypto_pmd_ops.c +++ b/drivers/crypto/null/null_crypto_pmd_ops.c @@ -72,11 +72,7 @@ static const struct rte_cryptodev_capabilities null_crypto_pmd_capabilities[] = .max = 0, .increment = 0 }, - .iv_size = { - .min = 0, - .max = 0, - .increment = 0 - } + .iv_size = { 0 } }, }, }, } }, diff --git a/drivers/crypto/openssl/rte_openssl_pmd.c b/drivers/crypto/openssl/rte_openssl_pmd.c index d3f16c9a6c..59d6915b8e 100644 --- a/drivers/crypto/openssl/rte_openssl_pmd.c +++ b/drivers/crypto/openssl/rte_openssl_pmd.c @@ -264,6 +264,10 @@ openssl_set_session_cipher_parameters(struct openssl_session *sess, /* Select cipher key */ sess->cipher.key.length = xform->cipher.key.length; + /* Set IV parameters */ + sess->iv.offset = xform->cipher.iv.offset; + sess->iv.length = xform->cipher.iv.length; + /* Select cipher algo */ switch (xform->cipher.algo) { case RTE_CRYPTO_CIPHER_3DES_CBC: @@ -397,6 +401,9 @@ openssl_set_session_parameters(struct openssl_session *sess, return -EINVAL; } + /* Default IV length = 0 */ + sess->iv.length = 0; + /* cipher_xform must be check before auth_xform */ if (cipher_xform) { if (openssl_set_session_cipher_parameters( @@ -924,8 +931,8 @@ process_openssl_combined_op } iv = rte_crypto_op_ctod_offset(op, uint8_t *, - op->sym->cipher.iv.offset); - ivlen = op->sym->cipher.iv.length; + sess->iv.offset); + ivlen = sess->iv.length; aad = op->sym->auth.aad.data; aadlen = op->sym->auth.aad.length; @@ -989,7 +996,7 @@ process_openssl_cipher_op op->sym->cipher.data.offset); iv = rte_crypto_op_ctod_offset(op, uint8_t *, - op->sym->cipher.iv.offset); + sess->iv.offset); if (sess->cipher.mode == OPENSSL_CIPHER_LIB) if (sess->cipher.direction == RTE_CRYPTO_CIPHER_OP_ENCRYPT) @@ -1031,7 +1038,7 @@ process_openssl_docsis_bpi_op(struct rte_crypto_op *op, op->sym->cipher.data.offset); iv = rte_crypto_op_ctod_offset(op, uint8_t *, - op->sym->cipher.iv.offset); + sess->iv.offset); block_size = DES_BLOCK_SIZE; @@ -1090,7 +1097,7 @@ process_openssl_docsis_bpi_op(struct rte_crypto_op *op, last_block_len, sess->cipher.bpi_ctx); /* Prepare parameters for CBC mode op */ iv = rte_crypto_op_ctod_offset(op, uint8_t *, - op->sym->cipher.iv.offset); + sess->iv.offset); dst += last_block_len - srclen; srclen -= last_block_len; } diff --git a/drivers/crypto/openssl/rte_openssl_pmd_private.h b/drivers/crypto/openssl/rte_openssl_pmd_private.h index 4d820c51bb..3a64853e1e 100644 --- a/drivers/crypto/openssl/rte_openssl_pmd_private.h +++ b/drivers/crypto/openssl/rte_openssl_pmd_private.h @@ -108,6 +108,11 @@ struct openssl_session { enum openssl_chain_order chain_order; /**< chain order mode */ + struct { + uint16_t length; + uint16_t offset; + } iv; + /**< IV parameters */ /** Cipher Parameters */ struct { enum rte_crypto_cipher_operation direction; diff --git a/drivers/crypto/qat/qat_adf/qat_algs.h b/drivers/crypto/qat/qat_adf/qat_algs.h index 5c63406bc7..e8fa3d34be 100644 --- a/drivers/crypto/qat/qat_adf/qat_algs.h +++ b/drivers/crypto/qat/qat_adf/qat_algs.h @@ -127,6 +127,10 @@ struct qat_session { struct icp_qat_fw_la_bulk_req fw_req; uint8_t aad_len; struct qat_crypto_instance *inst; + struct { + uint16_t offset; + uint16_t length; + } iv; rte_spinlock_t lock; /* protects this struct */ }; diff --git a/drivers/crypto/qat/qat_crypto.c b/drivers/crypto/qat/qat_crypto.c index b7f891b5c4..f0c37bc562 100644 --- a/drivers/crypto/qat/qat_crypto.c +++ b/drivers/crypto/qat/qat_crypto.c @@ -298,6 +298,9 @@ qat_crypto_sym_configure_session_cipher(struct rte_cryptodev *dev, /* Get cipher xform from crypto xform chain */ cipher_xform = qat_get_cipher_xform(xform); + session->iv.offset = cipher_xform->iv.offset; + session->iv.length = cipher_xform->iv.length; + switch (cipher_xform->algo) { case RTE_CRYPTO_CIPHER_AES_CBC: if (qat_alg_validate_aes_key(cipher_xform->key.length, @@ -643,7 +646,7 @@ qat_bpicipher_preprocess(struct qat_session *ctx, else /* runt block, i.e. less than one full block */ iv = rte_crypto_op_ctod_offset(op, uint8_t *, - sym_op->cipher.iv.offset); + ctx->iv.offset); #ifdef RTE_LIBRTE_PMD_QAT_DEBUG_TX rte_hexdump(stdout, "BPI: src before pre-process:", last_block, @@ -699,7 +702,7 @@ qat_bpicipher_postprocess(struct qat_session *ctx, else /* runt block, i.e. less than one full block */ iv = rte_crypto_op_ctod_offset(op, uint8_t *, - sym_op->cipher.iv.offset); + ctx->iv.offset); #ifdef RTE_LIBRTE_PMD_QAT_DEBUG_RX rte_hexdump(stdout, "BPI: src before post-process:", last_block, @@ -975,27 +978,20 @@ qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg, } iv_ptr = rte_crypto_op_ctod_offset(op, uint8_t *, - op->sym->cipher.iv.offset); + ctx->iv.offset); /* copy IV into request if it fits */ - /* - * If IV length is zero do not copy anything but still - * use request descriptor embedded IV - * - */ - if (op->sym->cipher.iv.length) { - if (op->sym->cipher.iv.length <= - sizeof(cipher_param->u.cipher_IV_array)) { - rte_memcpy(cipher_param->u.cipher_IV_array, - iv_ptr, - op->sym->cipher.iv.length); - } else { - ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET( - qat_req->comn_hdr.serv_specif_flags, - ICP_QAT_FW_CIPH_IV_64BIT_PTR); - cipher_param->u.s.cipher_IV_ptr = - rte_crypto_op_ctophys_offset(op, - op->sym->cipher.iv.offset); - } + if (ctx->iv.length <= + sizeof(cipher_param->u.cipher_IV_array)) { + rte_memcpy(cipher_param->u.cipher_IV_array, + iv_ptr, + ctx->iv.length); + } else { + ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET( + qat_req->comn_hdr.serv_specif_flags, + ICP_QAT_FW_CIPH_IV_64BIT_PTR); + cipher_param->u.s.cipher_IV_ptr = + rte_crypto_op_ctophys_offset(op, + ctx->iv.offset); } min_ofs = cipher_ofs; } @@ -1151,7 +1147,7 @@ qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg, if (ctx->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_128 || ctx->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_64) { - if (op->sym->cipher.iv.length == 12) { + if (ctx->iv.length == 12) { /* * For GCM a 12 byte IV is allowed, * but we need to inform the f/w @@ -1187,7 +1183,7 @@ qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg, rte_pktmbuf_data_len(op->sym->m_src)); if (do_cipher) rte_hexdump(stdout, "iv:", iv_ptr, - op->sym->cipher.iv.length); + ctx->iv.length); if (do_auth) { rte_hexdump(stdout, "digest:", op->sym->auth.digest.data, diff --git a/drivers/crypto/snow3g/rte_snow3g_pmd.c b/drivers/crypto/snow3g/rte_snow3g_pmd.c index 68121b0e0d..b21691f409 100644 --- a/drivers/crypto/snow3g/rte_snow3g_pmd.c +++ b/drivers/crypto/snow3g/rte_snow3g_pmd.c @@ -116,6 +116,13 @@ snow3g_set_session_parameters(struct snow3g_session *sess, /* Only SNOW 3G UEA2 supported */ if (cipher_xform->cipher.algo != RTE_CRYPTO_CIPHER_SNOW3G_UEA2) return -EINVAL; + + if (cipher_xform->cipher.iv.length != SNOW3G_IV_LENGTH) { + SNOW3G_LOG_ERR("Wrong IV length"); + return -EINVAL; + } + sess->iv_offset = cipher_xform->cipher.iv.offset; + /* Initialize key */ sso_snow3g_init_key_sched(cipher_xform->cipher.key.data, &sess->pKeySched_cipher); @@ -178,13 +185,6 @@ process_snow3g_cipher_op(struct rte_crypto_op **ops, uint32_t num_bytes[SNOW3G_MAX_BURST]; for (i = 0; i < num_ops; i++) { - /* Sanity checks. */ - if (unlikely(ops[i]->sym->cipher.iv.length != SNOW3G_IV_LENGTH)) { - ops[i]->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS; - SNOW3G_LOG_ERR("iv"); - break; - } - src[i] = rte_pktmbuf_mtod(ops[i]->sym->m_src, uint8_t *) + (ops[i]->sym->cipher.data.offset >> 3); dst[i] = ops[i]->sym->m_dst ? @@ -193,7 +193,7 @@ process_snow3g_cipher_op(struct rte_crypto_op **ops, rte_pktmbuf_mtod(ops[i]->sym->m_src, uint8_t *) + (ops[i]->sym->cipher.data.offset >> 3); iv[i] = rte_crypto_op_ctod_offset(ops[i], uint8_t *, - ops[i]->sym->cipher.iv.offset); + session->iv_offset); num_bytes[i] = ops[i]->sym->cipher.data.length >> 3; processed_ops++; @@ -214,13 +214,6 @@ process_snow3g_cipher_op_bit(struct rte_crypto_op *op, uint8_t *iv; uint32_t length_in_bits, offset_in_bits; - /* Sanity checks. */ - if (unlikely(op->sym->cipher.iv.length != SNOW3G_IV_LENGTH)) { - op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS; - SNOW3G_LOG_ERR("iv"); - return 0; - } - offset_in_bits = op->sym->cipher.data.offset; src = rte_pktmbuf_mtod(op->sym->m_src, uint8_t *); if (op->sym->m_dst == NULL) { @@ -230,7 +223,7 @@ process_snow3g_cipher_op_bit(struct rte_crypto_op *op, } dst = rte_pktmbuf_mtod(op->sym->m_dst, uint8_t *); iv = rte_crypto_op_ctod_offset(op, uint8_t *, - op->sym->cipher.iv.offset); + session->iv_offset); length_in_bits = op->sym->cipher.data.length; sso_snow3g_f8_1_buffer_bit(&session->pKeySched_cipher, iv, diff --git a/drivers/crypto/snow3g/rte_snow3g_pmd_private.h b/drivers/crypto/snow3g/rte_snow3g_pmd_private.h index 03973b972b..e8943a7746 100644 --- a/drivers/crypto/snow3g/rte_snow3g_pmd_private.h +++ b/drivers/crypto/snow3g/rte_snow3g_pmd_private.h @@ -91,6 +91,7 @@ struct snow3g_session { enum rte_crypto_auth_operation auth_op; sso_snow3g_key_schedule_t pKeySched_cipher; sso_snow3g_key_schedule_t pKeySched_hash; + uint16_t iv_offset; } __rte_cache_aligned; diff --git a/drivers/crypto/zuc/rte_zuc_pmd.c b/drivers/crypto/zuc/rte_zuc_pmd.c index 3923e3c667..9f9298d065 100644 --- a/drivers/crypto/zuc/rte_zuc_pmd.c +++ b/drivers/crypto/zuc/rte_zuc_pmd.c @@ -115,6 +115,13 @@ zuc_set_session_parameters(struct zuc_session *sess, /* Only ZUC EEA3 supported */ if (cipher_xform->cipher.algo != RTE_CRYPTO_CIPHER_ZUC_EEA3) return -EINVAL; + + if (cipher_xform->cipher.iv.length != ZUC_IV_KEY_LENGTH) { + ZUC_LOG_ERR("Wrong IV length"); + return -EINVAL; + } + sess->iv_offset = cipher_xform->cipher.iv.offset; + /* Copy the key */ memcpy(sess->pKey_cipher, cipher_xform->cipher.key.data, ZUC_IV_KEY_LENGTH); @@ -178,13 +185,6 @@ process_zuc_cipher_op(struct rte_crypto_op **ops, uint8_t *cipher_keys[ZUC_MAX_BURST]; for (i = 0; i < num_ops; i++) { - /* Sanity checks. */ - if (unlikely(ops[i]->sym->cipher.iv.length != ZUC_IV_KEY_LENGTH)) { - ops[i]->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS; - ZUC_LOG_ERR("iv"); - break; - } - if (((ops[i]->sym->cipher.data.length % BYTE_LEN) != 0) || ((ops[i]->sym->cipher.data.offset % BYTE_LEN) != 0)) { @@ -214,7 +214,7 @@ process_zuc_cipher_op(struct rte_crypto_op **ops, rte_pktmbuf_mtod(ops[i]->sym->m_src, uint8_t *) + (ops[i]->sym->cipher.data.offset >> 3); iv[i] = rte_crypto_op_ctod_offset(ops[i], uint8_t *, - ops[i]->sym->cipher.iv.offset); + session->iv_offset); num_bytes[i] = ops[i]->sym->cipher.data.length >> 3; cipher_keys[i] = session->pKey_cipher; diff --git a/drivers/crypto/zuc/rte_zuc_pmd_ops.c b/drivers/crypto/zuc/rte_zuc_pmd_ops.c index e793459c75..c24b9bd34a 100644 --- a/drivers/crypto/zuc/rte_zuc_pmd_ops.c +++ b/drivers/crypto/zuc/rte_zuc_pmd_ops.c @@ -80,7 +80,7 @@ static const struct rte_cryptodev_capabilities zuc_pmd_capabilities[] = { .min = 16, .max = 16, .increment = 0 - } + }, }, } }, } }, diff --git a/drivers/crypto/zuc/rte_zuc_pmd_private.h b/drivers/crypto/zuc/rte_zuc_pmd_private.h index 030f120b9a..cee1b5df4e 100644 --- a/drivers/crypto/zuc/rte_zuc_pmd_private.h +++ b/drivers/crypto/zuc/rte_zuc_pmd_private.h @@ -92,6 +92,7 @@ struct zuc_session { enum rte_crypto_auth_operation auth_op; uint8_t pKey_cipher[ZUC_IV_KEY_LENGTH]; uint8_t pKey_hash[ZUC_IV_KEY_LENGTH]; + uint16_t iv_offset; } __rte_cache_aligned; diff --git a/examples/ipsec-secgw/esp.c b/examples/ipsec-secgw/esp.c index 387ce4f8c1..ead4071efa 100644 --- a/examples/ipsec-secgw/esp.c +++ b/examples/ipsec-secgw/esp.c @@ -50,9 +50,6 @@ #include "esp.h" #include "ipip.h" -#define IV_OFFSET (sizeof(struct rte_crypto_op) + \ - sizeof(struct rte_crypto_sym_op)) - int esp_inbound(struct rte_mbuf *m, struct ipsec_sa *sa, struct rte_crypto_op *cop) @@ -104,8 +101,6 @@ esp_inbound(struct rte_mbuf *m, struct ipsec_sa *sa, case RTE_CRYPTO_CIPHER_AES_CBC: /* Copy IV at the end of crypto operation */ rte_memcpy(iv_ptr, iv, sa->iv_len); - sym_cop->cipher.iv.offset = IV_OFFSET; - sym_cop->cipher.iv.length = sa->iv_len; break; case RTE_CRYPTO_CIPHER_AES_CTR: case RTE_CRYPTO_CIPHER_AES_GCM: @@ -113,8 +108,6 @@ esp_inbound(struct rte_mbuf *m, struct ipsec_sa *sa, icb->salt = sa->salt; memcpy(&icb->iv, iv, 8); icb->cnt = rte_cpu_to_be_32(1); - sym_cop->cipher.iv.offset = IV_OFFSET; - sym_cop->cipher.iv.length = 16; break; default: RTE_LOG(ERR, IPSEC_ESP, "unsupported cipher algorithm %u\n", @@ -348,8 +341,6 @@ esp_outbound(struct rte_mbuf *m, struct ipsec_sa *sa, icb->salt = sa->salt; icb->iv = sa->seq; icb->cnt = rte_cpu_to_be_32(1); - sym_cop->cipher.iv.offset = IV_OFFSET; - sym_cop->cipher.iv.length = 16; uint8_t *aad; diff --git a/examples/ipsec-secgw/ipsec.h b/examples/ipsec-secgw/ipsec.h index d48b299591..97d67d0251 100644 --- a/examples/ipsec-secgw/ipsec.h +++ b/examples/ipsec-secgw/ipsec.h @@ -48,6 +48,9 @@ #define MAX_DIGEST_SIZE 32 /* Bytes -- 256 bits */ +#define IV_OFFSET (sizeof(struct rte_crypto_op) + \ + sizeof(struct rte_crypto_sym_op)) + #define uint32_t_to_char(ip, a, b, c, d) do {\ *a = (uint8_t)(ip >> 24 & 0xff);\ *b = (uint8_t)(ip >> 16 & 0xff);\ diff --git a/examples/ipsec-secgw/sa.c b/examples/ipsec-secgw/sa.c index 39624c4936..85e4d4e6a3 100644 --- a/examples/ipsec-secgw/sa.c +++ b/examples/ipsec-secgw/sa.c @@ -589,6 +589,7 @@ sa_add_rules(struct sa_ctx *sa_ctx, const struct ipsec_sa entries[], { struct ipsec_sa *sa; uint32_t i, idx; + uint16_t iv_length; for (i = 0; i < nb_entries; i++) { idx = SPI2IDX(entries[i].spi); @@ -607,6 +608,21 @@ sa_add_rules(struct sa_ctx *sa_ctx, const struct ipsec_sa entries[], sa->dst.ip.ip4 = rte_cpu_to_be_32(sa->dst.ip.ip4); } + switch (sa->cipher_algo) { + case RTE_CRYPTO_CIPHER_NULL: + case RTE_CRYPTO_CIPHER_AES_CBC: + iv_length = sa->iv_len; + break; + case RTE_CRYPTO_CIPHER_AES_CTR: + case RTE_CRYPTO_CIPHER_AES_GCM: + iv_length = 16; + break; + default: + RTE_LOG(ERR, IPSEC_ESP, "unsupported cipher algorithm %u\n", + sa->cipher_algo); + return -EINVAL; + } + if (inbound) { sa_ctx->xf[idx].b.type = RTE_CRYPTO_SYM_XFORM_CIPHER; sa_ctx->xf[idx].b.cipher.algo = sa->cipher_algo; @@ -615,6 +631,8 @@ sa_add_rules(struct sa_ctx *sa_ctx, const struct ipsec_sa entries[], sa->cipher_key_len; sa_ctx->xf[idx].b.cipher.op = RTE_CRYPTO_CIPHER_OP_DECRYPT; + sa_ctx->xf[idx].b.cipher.iv.offset = IV_OFFSET; + sa_ctx->xf[idx].b.cipher.iv.length = iv_length; sa_ctx->xf[idx].b.next = NULL; sa_ctx->xf[idx].a.type = RTE_CRYPTO_SYM_XFORM_AUTH; @@ -637,6 +655,8 @@ sa_add_rules(struct sa_ctx *sa_ctx, const struct ipsec_sa entries[], sa->cipher_key_len; sa_ctx->xf[idx].a.cipher.op = RTE_CRYPTO_CIPHER_OP_ENCRYPT; + sa_ctx->xf[idx].a.cipher.iv.offset = IV_OFFSET; + sa_ctx->xf[idx].a.cipher.iv.length = iv_length; sa_ctx->xf[idx].a.next = NULL; sa_ctx->xf[idx].b.type = RTE_CRYPTO_SYM_XFORM_AUTH; diff --git a/examples/l2fwd-crypto/main.c b/examples/l2fwd-crypto/main.c index c810b48c1a..d28dcf232c 100644 --- a/examples/l2fwd-crypto/main.c +++ b/examples/l2fwd-crypto/main.c @@ -139,6 +139,11 @@ struct l2fwd_key { phys_addr_t phys_addr; }; +struct l2fwd_iv { + uint8_t *data; + uint16_t length; +}; + /** l2fwd crypto application command line options */ struct l2fwd_crypto_options { unsigned portmask; @@ -155,8 +160,8 @@ struct l2fwd_crypto_options { unsigned ckey_param; int ckey_random_size; - struct l2fwd_key iv; - unsigned iv_param; + struct l2fwd_iv iv; + unsigned int iv_param; int iv_random_size; struct rte_crypto_sym_xform auth_xform; @@ -183,7 +188,7 @@ struct l2fwd_crypto_params { unsigned digest_length; unsigned block_size; - struct l2fwd_key iv; + struct l2fwd_iv iv; struct l2fwd_key aad; struct rte_cryptodev_sym_session *session; @@ -489,9 +494,6 @@ l2fwd_simple_crypto_enqueue(struct rte_mbuf *m, /* Copy IV at the end of the crypto operation */ rte_memcpy(iv_ptr, cparams->iv.data, cparams->iv.length); - op->sym->cipher.iv.offset = IV_OFFSET; - op->sym->cipher.iv.length = cparams->iv.length; - /* For wireless algorithms, offset/length must be in bits */ if (cparams->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 || cparams->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 || @@ -703,6 +705,9 @@ l2fwd_main_loop(struct l2fwd_crypto_options *options) port_cparams[i].iv.length); port_cparams[i].cipher_algo = options->cipher_xform.cipher.algo; + /* Set IV parameters */ + options->cipher_xform.cipher.iv.offset = IV_OFFSET; + options->cipher_xform.cipher.iv.length = options->iv.length; } port_cparams[i].session = initialize_crypto_session(options, @@ -1547,6 +1552,46 @@ check_supported_size(uint16_t length, uint16_t min, uint16_t max, return -1; } + +static int +check_iv_param(const struct rte_crypto_param_range *iv_range_size, + unsigned int iv_param, int iv_random_size, + uint16_t *iv_length) +{ + /* + * Check if length of provided IV is supported + * by the algorithm chosen. + */ + if (iv_param) { + if (check_supported_size(*iv_length, + iv_range_size->min, + iv_range_size->max, + iv_range_size->increment) + != 0) { + printf("Unsupported IV length\n"); + return -1; + } + /* + * Check if length of IV to be randomly generated + * is supported by the algorithm chosen. + */ + } else if (iv_random_size != -1) { + if (check_supported_size(iv_random_size, + iv_range_size->min, + iv_range_size->max, + iv_range_size->increment) + != 0) { + printf("Unsupported IV length\n"); + return -1; + } + *iv_length = iv_random_size; + /* No size provided, use minimum size. */ + } else + *iv_length = iv_range_size->min; + + return 0; +} + static int initialize_cryptodevs(struct l2fwd_crypto_options *options, unsigned nb_ports, uint8_t *enabled_cdevs) @@ -1614,36 +1659,9 @@ initialize_cryptodevs(struct l2fwd_crypto_options *options, unsigned nb_ports, } options->block_size = cap->sym.cipher.block_size; - /* - * Check if length of provided IV is supported - * by the algorithm chosen. - */ - if (options->iv_param) { - if (check_supported_size(options->iv.length, - cap->sym.cipher.iv_size.min, - cap->sym.cipher.iv_size.max, - cap->sym.cipher.iv_size.increment) - != 0) { - printf("Unsupported IV length\n"); - return -1; - } - /* - * Check if length of IV to be randomly generated - * is supported by the algorithm chosen. - */ - } else if (options->iv_random_size != -1) { - if (check_supported_size(options->iv_random_size, - cap->sym.cipher.iv_size.min, - cap->sym.cipher.iv_size.max, - cap->sym.cipher.iv_size.increment) - != 0) { - printf("Unsupported IV length\n"); - return -1; - } - options->iv.length = options->iv_random_size; - /* No size provided, use minimum size. */ - } else - options->iv.length = cap->sym.cipher.iv_size.min; + + check_iv_param(&cap->sym.cipher.iv_size, options->iv_param, + options->iv_random_size, &options->iv.length); /* * Check if length of provided cipher key is supported diff --git a/lib/librte_cryptodev/rte_crypto_sym.h b/lib/librte_cryptodev/rte_crypto_sym.h index db594c82f5..a12fd9fff3 100644 --- a/lib/librte_cryptodev/rte_crypto_sym.h +++ b/lib/librte_cryptodev/rte_crypto_sym.h @@ -190,6 +190,55 @@ struct rte_crypto_cipher_xform { * - Each key can be either 128 bits (16 bytes) or 256 bits (32 bytes). * - Both keys must have the same size. **/ + struct { + uint16_t offset; + /**< Starting point for Initialisation Vector or Counter, + * specified as number of bytes from start of crypto + * operation (rte_crypto_op). + * + * - For block ciphers in CBC or F8 mode, or for KASUMI + * in F8 mode, or for SNOW 3G in UEA2 mode, this is the + * Initialisation Vector (IV) value. + * + * - For block ciphers in CTR mode, this is the counter. + * + * - For GCM mode, this is either the IV (if the length + * is 96 bits) or J0 (for other sizes), where J0 is as + * defined by NIST SP800-38D. Regardless of the IV + * length, a full 16 bytes needs to be allocated. + * + * - For CCM mode, the first byte is reserved, and the + * nonce should be written starting at &iv[1] (to allow + * space for the implementation to write in the flags + * in the first byte). Note that a full 16 bytes should + * be allocated, even though the length field will + * have a value less than this. + * + * - For AES-XTS, this is the 128bit tweak, i, from + * IEEE Std 1619-2007. + * + * For optimum performance, the data pointed to SHOULD + * be 8-byte aligned. + */ + uint16_t length; + /**< Length of valid IV data. + * + * - For block ciphers in CBC or F8 mode, or for KASUMI + * in F8 mode, or for SNOW 3G in UEA2 mode, this is the + * length of the IV (which must be the same as the + * block length of the cipher). + * + * - For block ciphers in CTR mode, this is the length + * of the counter (which must be the same as the block + * length of the cipher). + * + * - For GCM mode, this is either 12 (for 96-bit IVs) + * or 16, in which case data points to J0. + * + * - For CCM mode, this is the length of the nonce, + * which can be in the range 7 to 13 inclusive. + */ + } iv; /**< Initialisation vector parameters */ }; /** Symmetric Authentication / Hash Algorithms */ @@ -463,55 +512,6 @@ struct rte_crypto_sym_op { */ } data; /**< Data offsets and length for ciphering */ - struct { - uint16_t offset; - /**< Starting point for Initialisation Vector or Counter, - * specified as number of bytes from start of crypto - * operation. - * - * - For block ciphers in CBC or F8 mode, or for KASUMI - * in F8 mode, or for SNOW 3G in UEA2 mode, this is the - * Initialisation Vector (IV) value. - * - * - For block ciphers in CTR mode, this is the counter. - * - * - For GCM mode, this is either the IV (if the length - * is 96 bits) or J0 (for other sizes), where J0 is as - * defined by NIST SP800-38D. Regardless of the IV - * length, a full 16 bytes needs to be allocated. - * - * - For CCM mode, the first byte is reserved, and the - * nonce should be written starting at &iv[1] (to allow - * space for the implementation to write in the flags - * in the first byte). Note that a full 16 bytes should - * be allocated, even though the length field will - * have a value less than this. - * - * - For AES-XTS, this is the 128bit tweak, i, from - * IEEE Std 1619-2007. - * - * For optimum performance, the data pointed to SHOULD - * be 8-byte aligned. - */ - uint16_t length; - /**< Length of valid IV data. - * - * - For block ciphers in CBC or F8 mode, or for KASUMI - * in F8 mode, or for SNOW 3G in UEA2 mode, this is the - * length of the IV (which must be the same as the - * block length of the cipher). - * - * - For block ciphers in CTR mode, this is the length - * of the counter (which must be the same as the block - * length of the cipher). - * - * - For GCM mode, this is either 12 (for 96-bit IVs) - * or 16, in which case data points to J0. - * - * - For CCM mode, this is the length of the nonce, - * which can be in the range 7 to 13 inclusive. - */ - } iv; /**< Initialisation vector parameters */ } cipher; struct { diff --git a/test/test/test_cryptodev.c b/test/test/test_cryptodev.c index a509beacd8..5bd55c7d2d 100644 --- a/test/test/test_cryptodev.c +++ b/test/test/test_cryptodev.c @@ -1271,6 +1271,8 @@ test_AES_CBC_HMAC_SHA1_encrypt_digest(void) ut_params->cipher_xform.cipher.op = RTE_CRYPTO_CIPHER_OP_ENCRYPT; ut_params->cipher_xform.cipher.key.data = aes_cbc_key; ut_params->cipher_xform.cipher.key.length = CIPHER_KEY_LENGTH_AES_CBC; + ut_params->cipher_xform.cipher.iv.offset = IV_OFFSET; + ut_params->cipher_xform.cipher.iv.length = CIPHER_IV_LENGTH_AES_CBC; /* Setup HMAC Parameters */ ut_params->auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH; @@ -1311,13 +1313,11 @@ test_AES_CBC_HMAC_SHA1_encrypt_digest(void) sym_op->auth.data.offset = 0; sym_op->auth.data.length = QUOTE_512_BYTES; - /* Set crypto operation cipher parameters */ - sym_op->cipher.iv.offset = IV_OFFSET; - sym_op->cipher.iv.length = CIPHER_IV_LENGTH_AES_CBC; - + /* Copy IV at the end of the crypto operation */ rte_memcpy(rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET), aes_cbc_iv, CIPHER_IV_LENGTH_AES_CBC); + /* Set crypto operation cipher parameters */ sym_op->cipher.data.offset = 0; sym_op->cipher.data.length = QUOTE_512_BYTES; @@ -1405,6 +1405,8 @@ test_AES_CBC_HMAC_SHA512_decrypt_create_session_params( ut_params->cipher_xform.cipher.op = RTE_CRYPTO_CIPHER_OP_DECRYPT; ut_params->cipher_xform.cipher.key.data = cipher_key; ut_params->cipher_xform.cipher.key.length = CIPHER_KEY_LENGTH_AES_CBC; + ut_params->cipher_xform.cipher.iv.offset = IV_OFFSET; + ut_params->cipher_xform.cipher.iv.length = CIPHER_IV_LENGTH_AES_CBC; /* Setup HMAC Parameters */ ut_params->auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH; @@ -1463,9 +1465,7 @@ test_AES_CBC_HMAC_SHA512_decrypt_perform(struct rte_cryptodev_sym_session *sess, sym_op->auth.data.offset = 0; sym_op->auth.data.length = QUOTE_512_BYTES; - sym_op->cipher.iv.offset = IV_OFFSET; - sym_op->cipher.iv.length = CIPHER_IV_LENGTH_AES_CBC; - + /* Copy IV at the end of the crypto operation */ rte_memcpy(rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET), iv, CIPHER_IV_LENGTH_AES_CBC); @@ -1807,7 +1807,8 @@ static int create_wireless_algo_cipher_session(uint8_t dev_id, enum rte_crypto_cipher_operation op, enum rte_crypto_cipher_algorithm algo, - const uint8_t *key, const uint8_t key_len) + const uint8_t *key, const uint8_t key_len, + uint8_t iv_len) { uint8_t cipher_key[key_len]; @@ -1823,6 +1824,8 @@ create_wireless_algo_cipher_session(uint8_t dev_id, ut_params->cipher_xform.cipher.op = op; ut_params->cipher_xform.cipher.key.data = cipher_key; ut_params->cipher_xform.cipher.key.length = key_len; + ut_params->cipher_xform.cipher.iv.offset = IV_OFFSET; + ut_params->cipher_xform.cipher.iv.length = iv_len; TEST_HEXDUMP(stdout, "key:", key, key_len); @@ -1857,9 +1860,6 @@ create_wireless_algo_cipher_operation(const uint8_t *iv, uint8_t iv_len, sym_op->m_src = ut_params->ibuf; /* iv */ - sym_op->cipher.iv.offset = IV_OFFSET; - sym_op->cipher.iv.length = iv_len; - rte_memcpy(rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET), iv, iv_len); sym_op->cipher.data.length = cipher_len; @@ -1891,9 +1891,6 @@ create_wireless_algo_cipher_operation_oop(const uint8_t *iv, uint8_t iv_len, sym_op->m_dst = ut_params->obuf; /* iv */ - sym_op->cipher.iv.offset = IV_OFFSET; - sym_op->cipher.iv.length = iv_len; - rte_memcpy(rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET), iv, iv_len); sym_op->cipher.data.length = cipher_len; @@ -1908,7 +1905,8 @@ create_wireless_algo_cipher_auth_session(uint8_t dev_id, enum rte_crypto_auth_algorithm auth_algo, enum rte_crypto_cipher_algorithm cipher_algo, const uint8_t *key, const uint8_t key_len, - const uint8_t aad_len, const uint8_t auth_len) + const uint8_t aad_len, const uint8_t auth_len, + uint8_t iv_len) { uint8_t cipher_auth_key[key_len]; @@ -1937,6 +1935,8 @@ create_wireless_algo_cipher_auth_session(uint8_t dev_id, ut_params->cipher_xform.cipher.op = cipher_op; ut_params->cipher_xform.cipher.key.data = cipher_auth_key; ut_params->cipher_xform.cipher.key.length = key_len; + ut_params->cipher_xform.cipher.iv.offset = IV_OFFSET; + ut_params->cipher_xform.cipher.iv.length = iv_len; TEST_HEXDUMP(stdout, "key:", key, key_len); @@ -1963,6 +1963,7 @@ create_wireless_cipher_auth_session(uint8_t dev_id, const uint8_t *key = tdata->key.data; const uint8_t aad_len = tdata->aad.len; const uint8_t auth_len = tdata->digest.len; + uint8_t iv_len = tdata->iv.len; memcpy(cipher_auth_key, key, key_len); @@ -1986,6 +1987,9 @@ create_wireless_cipher_auth_session(uint8_t dev_id, ut_params->cipher_xform.cipher.op = cipher_op; ut_params->cipher_xform.cipher.key.data = cipher_auth_key; ut_params->cipher_xform.cipher.key.length = key_len; + ut_params->cipher_xform.cipher.iv.offset = IV_OFFSET; + ut_params->cipher_xform.cipher.iv.length = iv_len; + TEST_HEXDUMP(stdout, "key:", key, key_len); @@ -2014,7 +2018,8 @@ create_wireless_algo_auth_cipher_session(uint8_t dev_id, enum rte_crypto_auth_algorithm auth_algo, enum rte_crypto_cipher_algorithm cipher_algo, const uint8_t *key, const uint8_t key_len, - const uint8_t aad_len, const uint8_t auth_len) + const uint8_t aad_len, const uint8_t auth_len, + uint8_t iv_len) { uint8_t auth_cipher_key[key_len]; @@ -2039,6 +2044,8 @@ create_wireless_algo_auth_cipher_session(uint8_t dev_id, ut_params->cipher_xform.cipher.op = cipher_op; ut_params->cipher_xform.cipher.key.data = auth_cipher_key; ut_params->cipher_xform.cipher.key.length = key_len; + ut_params->cipher_xform.cipher.iv.offset = IV_OFFSET; + ut_params->cipher_xform.cipher.iv.length = iv_len; TEST_HEXDUMP(stdout, "key:", key, key_len); @@ -2212,9 +2219,6 @@ create_wireless_cipher_hash_operation(const struct wireless_test_data *tdata, TEST_HEXDUMP(stdout, "aad:", sym_op->auth.aad.data, aad_len); /* iv */ - sym_op->cipher.iv.offset = IV_OFFSET; - sym_op->cipher.iv.length = iv_len; - rte_memcpy(rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET), iv, iv_len); sym_op->cipher.data.length = cipher_len; @@ -2307,9 +2311,6 @@ create_wireless_algo_cipher_hash_operation(const uint8_t *auth_tag, TEST_HEXDUMP(stdout, "aad:", sym_op->auth.aad.data, aad_len); /* iv */ - sym_op->cipher.iv.offset = IV_OFFSET; - sym_op->cipher.iv.length = iv_len; - rte_memcpy(rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET), iv, iv_len); sym_op->cipher.data.length = cipher_len; @@ -2390,9 +2391,6 @@ create_wireless_algo_auth_cipher_operation(const unsigned auth_tag_len, sym_op->auth.aad.data, aad_len); /* iv */ - sym_op->cipher.iv.offset = IV_OFFSET; - sym_op->cipher.iv.length = iv_len; - rte_memcpy(rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET), iv, iv_len); sym_op->cipher.data.length = cipher_len; @@ -2802,7 +2800,8 @@ test_kasumi_encryption(const struct kasumi_test_data *tdata) retval = create_wireless_algo_cipher_session(ts_params->valid_devs[0], RTE_CRYPTO_CIPHER_OP_ENCRYPT, RTE_CRYPTO_CIPHER_KASUMI_F8, - tdata->key.data, tdata->key.len); + tdata->key.data, tdata->key.len, + tdata->iv.len); if (retval < 0) return retval; @@ -2877,7 +2876,8 @@ test_kasumi_encryption_sgl(const struct kasumi_test_data *tdata) retval = create_wireless_algo_cipher_session(ts_params->valid_devs[0], RTE_CRYPTO_CIPHER_OP_ENCRYPT, RTE_CRYPTO_CIPHER_KASUMI_F8, - tdata->key.data, tdata->key.len); + tdata->key.data, tdata->key.len, + tdata->iv.len); if (retval < 0) return retval; @@ -2941,7 +2941,8 @@ test_kasumi_encryption_oop(const struct kasumi_test_data *tdata) retval = create_wireless_algo_cipher_session(ts_params->valid_devs[0], RTE_CRYPTO_CIPHER_OP_ENCRYPT, RTE_CRYPTO_CIPHER_KASUMI_F8, - tdata->key.data, tdata->key.len); + tdata->key.data, tdata->key.len, + tdata->iv.len); if (retval < 0) return retval; @@ -3018,7 +3019,8 @@ test_kasumi_encryption_oop_sgl(const struct kasumi_test_data *tdata) retval = create_wireless_algo_cipher_session(ts_params->valid_devs[0], RTE_CRYPTO_CIPHER_OP_ENCRYPT, RTE_CRYPTO_CIPHER_KASUMI_F8, - tdata->key.data, tdata->key.len); + tdata->key.data, tdata->key.len, + tdata->iv.len); if (retval < 0) return retval; @@ -3081,7 +3083,8 @@ test_kasumi_decryption_oop(const struct kasumi_test_data *tdata) retval = create_wireless_algo_cipher_session(ts_params->valid_devs[0], RTE_CRYPTO_CIPHER_OP_DECRYPT, RTE_CRYPTO_CIPHER_KASUMI_F8, - tdata->key.data, tdata->key.len); + tdata->key.data, tdata->key.len, + tdata->iv.len); if (retval < 0) return retval; @@ -3147,7 +3150,8 @@ test_kasumi_decryption(const struct kasumi_test_data *tdata) retval = create_wireless_algo_cipher_session(ts_params->valid_devs[0], RTE_CRYPTO_CIPHER_OP_DECRYPT, RTE_CRYPTO_CIPHER_KASUMI_F8, - tdata->key.data, tdata->key.len); + tdata->key.data, tdata->key.len, + tdata->iv.len); if (retval < 0) return retval; @@ -3211,7 +3215,8 @@ test_snow3g_encryption(const struct snow3g_test_data *tdata) retval = create_wireless_algo_cipher_session(ts_params->valid_devs[0], RTE_CRYPTO_CIPHER_OP_ENCRYPT, RTE_CRYPTO_CIPHER_SNOW3G_UEA2, - tdata->key.data, tdata->key.len); + tdata->key.data, tdata->key.len, + tdata->iv.len); if (retval < 0) return retval; @@ -3275,7 +3280,8 @@ test_snow3g_encryption_oop(const struct snow3g_test_data *tdata) retval = create_wireless_algo_cipher_session(ts_params->valid_devs[0], RTE_CRYPTO_CIPHER_OP_ENCRYPT, RTE_CRYPTO_CIPHER_SNOW3G_UEA2, - tdata->key.data, tdata->key.len); + tdata->key.data, tdata->key.len, + tdata->iv.len); if (retval < 0) return retval; @@ -3356,7 +3362,8 @@ test_snow3g_encryption_oop_sgl(const struct snow3g_test_data *tdata) retval = create_wireless_algo_cipher_session(ts_params->valid_devs[0], RTE_CRYPTO_CIPHER_OP_ENCRYPT, RTE_CRYPTO_CIPHER_SNOW3G_UEA2, - tdata->key.data, tdata->key.len); + tdata->key.data, tdata->key.len, + tdata->iv.len); if (retval < 0) return retval; @@ -3445,7 +3452,8 @@ test_snow3g_encryption_offset_oop(const struct snow3g_test_data *tdata) retval = create_wireless_algo_cipher_session(ts_params->valid_devs[0], RTE_CRYPTO_CIPHER_OP_ENCRYPT, RTE_CRYPTO_CIPHER_SNOW3G_UEA2, - tdata->key.data, tdata->key.len); + tdata->key.data, tdata->key.len, + tdata->iv.len); if (retval < 0) return retval; @@ -3535,7 +3543,8 @@ static int test_snow3g_decryption(const struct snow3g_test_data *tdata) retval = create_wireless_algo_cipher_session(ts_params->valid_devs[0], RTE_CRYPTO_CIPHER_OP_DECRYPT, RTE_CRYPTO_CIPHER_SNOW3G_UEA2, - tdata->key.data, tdata->key.len); + tdata->key.data, tdata->key.len, + tdata->iv.len); if (retval < 0) return retval; @@ -3596,7 +3605,8 @@ static int test_snow3g_decryption_oop(const struct snow3g_test_data *tdata) retval = create_wireless_algo_cipher_session(ts_params->valid_devs[0], RTE_CRYPTO_CIPHER_OP_DECRYPT, RTE_CRYPTO_CIPHER_SNOW3G_UEA2, - tdata->key.data, tdata->key.len); + tdata->key.data, tdata->key.len, + tdata->iv.len); if (retval < 0) return retval; @@ -3759,7 +3769,8 @@ test_snow3g_cipher_auth(const struct snow3g_test_data *tdata) RTE_CRYPTO_AUTH_SNOW3G_UIA2, RTE_CRYPTO_CIPHER_SNOW3G_UEA2, tdata->key.data, tdata->key.len, - tdata->aad.len, tdata->digest.len); + tdata->aad.len, tdata->digest.len, + tdata->iv.len); if (retval < 0) return retval; ut_params->ibuf = rte_pktmbuf_alloc(ts_params->mbuf_pool); @@ -3841,7 +3852,8 @@ test_snow3g_auth_cipher(const struct snow3g_test_data *tdata) RTE_CRYPTO_AUTH_SNOW3G_UIA2, RTE_CRYPTO_CIPHER_SNOW3G_UEA2, tdata->key.data, tdata->key.len, - tdata->aad.len, tdata->digest.len); + tdata->aad.len, tdata->digest.len, + tdata->iv.len); if (retval < 0) return retval; @@ -3927,7 +3939,8 @@ test_kasumi_auth_cipher(const struct kasumi_test_data *tdata) RTE_CRYPTO_AUTH_KASUMI_F9, RTE_CRYPTO_CIPHER_KASUMI_F8, tdata->key.data, tdata->key.len, - tdata->aad.len, tdata->digest.len); + tdata->aad.len, tdata->digest.len, + tdata->iv.len); if (retval < 0) return retval; ut_params->ibuf = rte_pktmbuf_alloc(ts_params->mbuf_pool); @@ -4009,7 +4022,8 @@ test_kasumi_cipher_auth(const struct kasumi_test_data *tdata) RTE_CRYPTO_AUTH_KASUMI_F9, RTE_CRYPTO_CIPHER_KASUMI_F8, tdata->key.data, tdata->key.len, - tdata->aad.len, tdata->digest.len); + tdata->aad.len, tdata->digest.len, + tdata->iv.len); if (retval < 0) return retval; @@ -4098,7 +4112,8 @@ test_zuc_encryption(const struct wireless_test_data *tdata) retval = create_wireless_algo_cipher_session(ts_params->valid_devs[0], RTE_CRYPTO_CIPHER_OP_ENCRYPT, RTE_CRYPTO_CIPHER_ZUC_EEA3, - tdata->key.data, tdata->key.len); + tdata->key.data, tdata->key.len, + tdata->iv.len); if (retval < 0) return retval; @@ -4193,7 +4208,8 @@ test_zuc_encryption_sgl(const struct wireless_test_data *tdata) retval = create_wireless_algo_cipher_session(ts_params->valid_devs[0], RTE_CRYPTO_CIPHER_OP_ENCRYPT, RTE_CRYPTO_CIPHER_ZUC_EEA3, - tdata->key.data, tdata->key.len); + tdata->key.data, tdata->key.len, + tdata->iv.len); if (retval < 0) return retval; @@ -4725,6 +4741,7 @@ static int create_gcm_session(uint8_t dev_id, enum rte_crypto_cipher_operation op, const uint8_t *key, const uint8_t key_len, const uint8_t aad_len, const uint8_t auth_len, + uint8_t iv_len, enum rte_crypto_auth_operation auth_op) { uint8_t cipher_key[key_len]; @@ -4742,6 +4759,8 @@ create_gcm_session(uint8_t dev_id, enum rte_crypto_cipher_operation op, ut_params->cipher_xform.cipher.op = op; ut_params->cipher_xform.cipher.key.data = cipher_key; ut_params->cipher_xform.cipher.key.length = key_len; + ut_params->cipher_xform.cipher.iv.offset = IV_OFFSET; + ut_params->cipher_xform.cipher.iv.length = iv_len; TEST_HEXDUMP(stdout, "key:", key, key_len); @@ -4778,6 +4797,7 @@ create_gcm_xforms(struct rte_crypto_op *op, enum rte_crypto_cipher_operation cipher_op, uint8_t *key, const uint8_t key_len, const uint8_t aad_len, const uint8_t auth_len, + uint8_t iv_len, enum rte_crypto_auth_operation auth_op) { TEST_ASSERT_NOT_NULL(rte_crypto_op_sym_xforms_alloc(op, 2), @@ -4791,6 +4811,8 @@ create_gcm_xforms(struct rte_crypto_op *op, sym_op->xform->cipher.op = cipher_op; sym_op->xform->cipher.key.data = key; sym_op->xform->cipher.key.length = key_len; + sym_op->xform->cipher.iv.offset = IV_OFFSET; + sym_op->xform->cipher.iv.length = iv_len; TEST_HEXDUMP(stdout, "key:", key, key_len); @@ -4842,12 +4864,10 @@ create_gcm_operation(enum rte_crypto_cipher_operation op, /* Append IV at the end of the crypto operation*/ uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET); - sym_op->cipher.iv.offset = IV_OFFSET; - sym_op->cipher.iv.length = tdata->iv.len; rte_memcpy(iv_ptr, tdata->iv.data, tdata->iv.len); TEST_HEXDUMP(stdout, "iv:", iv_ptr, - sym_op->cipher.iv.length); + tdata->iv.len); /* Append plaintext/ciphertext */ if (op == RTE_CRYPTO_CIPHER_OP_ENCRYPT) { @@ -4951,6 +4971,7 @@ test_mb_AES_GCM_authenticated_encryption(const struct gcm_test_data *tdata) RTE_CRYPTO_CIPHER_OP_ENCRYPT, tdata->key.data, tdata->key.len, tdata->aad.len, tdata->auth_tag.len, + tdata->iv.len, RTE_CRYPTO_AUTH_OP_GENERATE); if (retval < 0) return retval; @@ -5128,6 +5149,7 @@ test_mb_AES_GCM_authenticated_decryption(const struct gcm_test_data *tdata) RTE_CRYPTO_CIPHER_OP_DECRYPT, tdata->key.data, tdata->key.len, tdata->aad.len, tdata->auth_tag.len, + tdata->iv.len, RTE_CRYPTO_AUTH_OP_VERIFY); if (retval < 0) return retval; @@ -5294,6 +5316,7 @@ test_AES_GCM_authenticated_encryption_oop(const struct gcm_test_data *tdata) RTE_CRYPTO_CIPHER_OP_ENCRYPT, tdata->key.data, tdata->key.len, tdata->aad.len, tdata->auth_tag.len, + tdata->iv.len, RTE_CRYPTO_AUTH_OP_GENERATE); if (retval < 0) return retval; @@ -5370,6 +5393,7 @@ test_AES_GCM_authenticated_decryption_oop(const struct gcm_test_data *tdata) RTE_CRYPTO_CIPHER_OP_DECRYPT, tdata->key.data, tdata->key.len, tdata->aad.len, tdata->auth_tag.len, + tdata->iv.len, RTE_CRYPTO_AUTH_OP_VERIFY); if (retval < 0) return retval; @@ -5453,6 +5477,7 @@ test_AES_GCM_authenticated_encryption_sessionless( RTE_CRYPTO_CIPHER_OP_ENCRYPT, key, tdata->key.len, tdata->aad.len, tdata->auth_tag.len, + tdata->iv.len, RTE_CRYPTO_AUTH_OP_GENERATE); if (retval < 0) return retval; @@ -5533,6 +5558,7 @@ test_AES_GCM_authenticated_decryption_sessionless( RTE_CRYPTO_CIPHER_OP_DECRYPT, key, tdata->key.len, tdata->aad.len, tdata->auth_tag.len, + tdata->iv.len, RTE_CRYPTO_AUTH_OP_VERIFY); if (retval < 0) return retval; @@ -6417,9 +6443,6 @@ create_gmac_operation(enum rte_crypto_auth_operation op, uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET); - sym_op->cipher.iv.offset = IV_OFFSET; - sym_op->cipher.iv.length = tdata->iv.len; - rte_memcpy(iv_ptr, tdata->iv.data, tdata->iv.len); TEST_HEXDUMP(stdout, "iv:", iv_ptr, tdata->iv.len); @@ -6451,6 +6474,8 @@ static int create_gmac_session(uint8_t dev_id, ut_params->cipher_xform.cipher.op = op; ut_params->cipher_xform.cipher.key.data = cipher_key; ut_params->cipher_xform.cipher.key.length = tdata->key.len; + ut_params->cipher_xform.cipher.iv.offset = IV_OFFSET; + ut_params->cipher_xform.cipher.iv.length = tdata->iv.len; ut_params->auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH; ut_params->auth_xform.next = NULL; @@ -6849,6 +6874,8 @@ create_auth_cipher_session(struct crypto_unittest_params *ut_params, ut_params->cipher_xform.cipher.op = cipher_op; ut_params->cipher_xform.cipher.key.data = cipher_key; ut_params->cipher_xform.cipher.key.length = reference->cipher_key.len; + ut_params->cipher_xform.cipher.iv.offset = IV_OFFSET; + ut_params->cipher_xform.cipher.iv.length = reference->iv.len; /* Create Crypto session*/ ut_params->sess = rte_cryptodev_sym_session_create(dev_id, @@ -6960,9 +6987,6 @@ create_auth_GMAC_operation(struct crypto_testsuite_params *ts_params, sym_op->auth.digest.data, sym_op->auth.digest.length); - sym_op->cipher.iv.offset = IV_OFFSET; - sym_op->cipher.iv.length = reference->iv.len; - rte_memcpy(rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET), reference->iv.data, reference->iv.len); @@ -7017,9 +7041,6 @@ create_cipher_auth_operation(struct crypto_testsuite_params *ts_params, sym_op->auth.digest.data, sym_op->auth.digest.length); - sym_op->cipher.iv.offset = IV_OFFSET; - sym_op->cipher.iv.length = reference->iv.len; - rte_memcpy(rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET), reference->iv.data, reference->iv.len); @@ -7267,8 +7288,6 @@ create_gcm_operation_SGL(enum rte_crypto_cipher_operation op, uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET); - sym_op->cipher.iv.offset = IV_OFFSET; - sym_op->cipher.iv.length = iv_len; rte_memcpy(iv_ptr, tdata->iv.data, iv_len); @@ -7349,6 +7368,7 @@ test_AES_GCM_authenticated_encryption_SGL(const struct gcm_test_data *tdata, RTE_CRYPTO_CIPHER_OP_ENCRYPT, tdata->key.data, tdata->key.len, tdata->aad.len, tdata->auth_tag.len, + tdata->iv.len, RTE_CRYPTO_AUTH_OP_GENERATE); if (retval < 0) return retval; diff --git a/test/test/test_cryptodev_blockcipher.c b/test/test/test_cryptodev_blockcipher.c index aed6c847ec..eb9de66f71 100644 --- a/test/test/test_cryptodev_blockcipher.c +++ b/test/test/test_cryptodev_blockcipher.c @@ -288,11 +288,11 @@ test_blockcipher_one_case(const struct blockcipher_test_case *t, RTE_CRYPTO_CIPHER_OP_DECRYPT; cipher_xform->cipher.key.data = cipher_key; cipher_xform->cipher.key.length = tdata->cipher_key.len; + cipher_xform->cipher.iv.offset = IV_OFFSET; + cipher_xform->cipher.iv.length = tdata->iv.len; sym_op->cipher.data.offset = 0; sym_op->cipher.data.length = tdata->ciphertext.len; - sym_op->cipher.iv.offset = IV_OFFSET; - sym_op->cipher.iv.length = tdata->iv.len; rte_memcpy(rte_crypto_op_ctod_offset(op, uint8_t *, IV_OFFSET), tdata->iv.data, tdata->iv.len); diff --git a/test/test/test_cryptodev_perf.c b/test/test/test_cryptodev_perf.c index cbf21c74cd..8bd2e78dcc 100644 --- a/test/test/test_cryptodev_perf.c +++ b/test/test/test_cryptodev_perf.c @@ -43,6 +43,8 @@ #include "test_cryptodev.h" #include "test_cryptodev_gcm_test_vectors.h" +#define AES_CIPHER_IV_LENGTH 16 +#define TRIPLE_DES_CIPHER_IV_LENGTH 8 #define PERF_NUM_OPS_INFLIGHT (128) #define DEFAULT_NUM_REQS_TO_SUBMIT (10000000) @@ -67,9 +69,6 @@ enum chain_mode { struct symmetric_op { - const uint8_t *iv_data; - uint32_t iv_len; - const uint8_t *aad_data; uint32_t aad_len; @@ -96,6 +95,8 @@ struct symmetric_session_attrs { const uint8_t *key_auth_data; uint32_t key_auth_len; + const uint8_t *iv_data; + uint16_t iv_len; uint32_t digest_len; }; @@ -1933,7 +1934,8 @@ test_perf_crypto_qp_vary_burst_size(uint16_t dev_num) ut_params->cipher_xform.cipher.op = RTE_CRYPTO_CIPHER_OP_DECRYPT; ut_params->cipher_xform.cipher.key.data = aes_cbc_128_key; ut_params->cipher_xform.cipher.key.length = CIPHER_IV_LENGTH_AES_CBC; - + ut_params->cipher_xform.cipher.iv.offset = IV_OFFSET; + ut_params->cipher_xform.cipher.iv.length = CIPHER_IV_LENGTH_AES_CBC; /* Setup HMAC Parameters */ ut_params->auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH; @@ -1981,9 +1983,6 @@ test_perf_crypto_qp_vary_burst_size(uint16_t dev_num) op->sym->auth.data.offset = 0; op->sym->auth.data.length = data_params[0].length; - op->sym->cipher.iv.offset = IV_OFFSET; - op->sym->cipher.iv.length = CIPHER_IV_LENGTH_AES_CBC; - rte_memcpy(rte_crypto_op_ctod_offset(op, uint8_t *, IV_OFFSET), aes_cbc_128_iv, CIPHER_IV_LENGTH_AES_CBC); @@ -2646,6 +2645,8 @@ test_perf_create_aes_sha_session(uint8_t dev_id, enum chain_mode chain, cipher_xform.cipher.key.data = aes_key; cipher_xform.cipher.key.length = cipher_key_len; + cipher_xform.cipher.iv.offset = IV_OFFSET; + cipher_xform.cipher.iv.length = AES_CIPHER_IV_LENGTH; if (chain != CIPHER_ONLY) { /* Setup HMAC Parameters */ auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH; @@ -2694,6 +2695,9 @@ test_perf_create_snow3g_session(uint8_t dev_id, enum chain_mode chain, cipher_xform.cipher.key.data = snow3g_cipher_key; cipher_xform.cipher.key.length = cipher_key_len; + cipher_xform.cipher.iv.offset = IV_OFFSET; + cipher_xform.cipher.iv.length = SNOW3G_CIPHER_IV_LENGTH; + /* Setup HMAC Parameters */ auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH; @@ -2741,17 +2745,20 @@ test_perf_create_openssl_session(uint8_t dev_id, enum chain_mode chain, /* Setup Cipher Parameters */ cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER; cipher_xform.cipher.algo = cipher_algo; + cipher_xform.cipher.iv.offset = IV_OFFSET; cipher_xform.cipher.op = RTE_CRYPTO_CIPHER_OP_ENCRYPT; switch (cipher_algo) { case RTE_CRYPTO_CIPHER_3DES_CBC: case RTE_CRYPTO_CIPHER_3DES_CTR: cipher_xform.cipher.key.data = triple_des_key; + cipher_xform.cipher.iv.length = TRIPLE_DES_CIPHER_IV_LENGTH; break; case RTE_CRYPTO_CIPHER_AES_CBC: case RTE_CRYPTO_CIPHER_AES_CTR: case RTE_CRYPTO_CIPHER_AES_GCM: cipher_xform.cipher.key.data = aes_key; + cipher_xform.cipher.iv.length = AES_CIPHER_IV_LENGTH; break; default: return NULL; @@ -2816,6 +2823,8 @@ test_perf_create_armv8_session(uint8_t dev_id, enum chain_mode chain, } cipher_xform.cipher.key.length = cipher_key_len; + cipher_xform.cipher.iv.offset = IV_OFFSET; + cipher_xform.cipher.iv.length = AES_CIPHER_IV_LENGTH; /* Setup Auth Parameters */ auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH; @@ -2844,9 +2853,7 @@ test_perf_create_armv8_session(uint8_t dev_id, enum chain_mode chain, } } -#define AES_CIPHER_IV_LENGTH 16 #define AES_GCM_AAD_LENGTH 16 -#define TRIPLE_DES_CIPHER_IV_LENGTH 8 static struct rte_mbuf * test_perf_create_pktmbuf(struct rte_mempool *mpool, unsigned buf_sz) @@ -2893,12 +2900,11 @@ test_perf_set_crypto_op_aes(struct rte_crypto_op *op, struct rte_mbuf *m, } - /* Cipher Parameters */ - op->sym->cipher.iv.offset = IV_OFFSET; - op->sym->cipher.iv.length = AES_CIPHER_IV_LENGTH; + /* Copy the IV at the end of the crypto operation */ rte_memcpy(rte_crypto_op_ctod_offset(op, uint8_t *, IV_OFFSET), aes_iv, AES_CIPHER_IV_LENGTH); + /* Cipher Parameters */ op->sym->cipher.data.offset = 0; op->sym->cipher.data.length = data_len; @@ -2926,9 +2932,7 @@ test_perf_set_crypto_op_aes_gcm(struct rte_crypto_op *op, struct rte_mbuf *m, op->sym->auth.aad.data = aes_gcm_aad; op->sym->auth.aad.length = AES_GCM_AAD_LENGTH; - /* Cipher Parameters */ - op->sym->cipher.iv.offset = IV_OFFSET; - op->sym->cipher.iv.length = AES_CIPHER_IV_LENGTH; + /* Copy IV at the end of the crypto operation */ rte_memcpy(rte_crypto_op_ctod_offset(op, uint8_t *, IV_OFFSET), aes_iv, AES_CIPHER_IV_LENGTH); @@ -2970,10 +2974,6 @@ test_perf_set_crypto_op_snow3g(struct rte_crypto_op *op, struct rte_mbuf *m, IV_OFFSET); op->sym->auth.aad.length = SNOW3G_CIPHER_IV_LENGTH; - /* Cipher Parameters */ - op->sym->cipher.iv.offset = IV_OFFSET; - op->sym->cipher.iv.length = SNOW3G_CIPHER_IV_LENGTH; - /* Data lengths/offsets Parameters */ op->sym->auth.data.offset = 0; op->sym->auth.data.length = data_len << 3; @@ -2997,9 +2997,7 @@ test_perf_set_crypto_op_snow3g_cipher(struct rte_crypto_op *op, return NULL; } - /* Cipher Parameters */ - op->sym->cipher.iv.offset = IV_OFFSET; - op->sym->cipher.iv.length = SNOW3G_CIPHER_IV_LENGTH; + /* Copy IV at the end of the crypto operation */ rte_memcpy(rte_crypto_op_ctod_offset(op, uint8_t *, IV_OFFSET), snow3g_iv, SNOW3G_CIPHER_IV_LENGTH); @@ -3068,9 +3066,7 @@ test_perf_set_crypto_op_3des(struct rte_crypto_op *op, struct rte_mbuf *m, rte_pktmbuf_mtophys_offset(m, data_len); op->sym->auth.digest.length = digest_len; - /* Cipher Parameters */ - op->sym->cipher.iv.offset = IV_OFFSET; - op->sym->cipher.iv.length = TRIPLE_DES_CIPHER_IV_LENGTH; + /* Copy IV at the end of the crypto operation */ rte_memcpy(rte_crypto_op_ctod_offset(op, uint8_t *, IV_OFFSET), triple_des_iv, TRIPLE_DES_CIPHER_IV_LENGTH); @@ -4136,6 +4132,8 @@ test_perf_create_session(uint8_t dev_id, struct perf_test_params *pparams) cipher_xform.cipher.op = pparams->session_attrs->cipher; cipher_xform.cipher.key.data = cipher_key; cipher_xform.cipher.key.length = pparams->session_attrs->key_cipher_len; + cipher_xform.cipher.iv.length = pparams->session_attrs->iv_len; + cipher_xform.cipher.iv.offset = IV_OFFSET; auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH; auth_xform.next = NULL; @@ -4190,14 +4188,11 @@ perf_gcm_set_crypto_op(struct rte_crypto_op *op, struct rte_mbuf *m, rte_memcpy(op->sym->auth.aad.data, params->symmetric_op->aad_data, params->symmetric_op->aad_len); - op->sym->cipher.iv.offset = IV_OFFSET; - rte_memcpy(iv_ptr, params->symmetric_op->iv_data, - params->symmetric_op->iv_len); - if (params->symmetric_op->iv_len == 12) + rte_memcpy(iv_ptr, params->session_attrs->iv_data, + params->session_attrs->iv_len); + if (params->session_attrs->iv_len == 12) iv_ptr[15] = 1; - op->sym->cipher.iv.length = params->symmetric_op->iv_len; - op->sym->auth.data.offset = params->symmetric_op->aad_len; op->sym->auth.data.length = params->symmetric_op->p_len; @@ -4434,11 +4429,11 @@ test_perf_AES_GCM(int continual_buf_len, int continual_size) session_attrs[i].key_auth_len = 0; session_attrs[i].digest_len = gcm_test->auth_tag.len; + session_attrs[i].iv_len = gcm_test->iv.len; + session_attrs[i].iv_data = gcm_test->iv.data; ops_set[i].aad_data = gcm_test->aad.data; ops_set[i].aad_len = gcm_test->aad.len; - ops_set[i].iv_data = gcm_test->iv.data; - ops_set[i].iv_len = gcm_test->iv.len; ops_set[i].p_data = gcm_test->plaintext.data; ops_set[i].p_len = buf_lengths[i]; ops_set[i].c_data = gcm_test->ciphertext.data;