From 5082f991f6def0ef06e9ff1cf9f39ef3b78657c5 Mon Sep 17 00:00:00 2001 From: Pablo de Lara Date: Sun, 2 Jul 2017 06:41:13 +0100 Subject: [PATCH] cryptodev: pass IV as offset Since IV now is copied after the crypto operation, in its private size, IV can be passed only with offset and length. Signed-off-by: Pablo de Lara Acked-by: Declan Doherty Acked-by: Akhil Goyal Acked-by: Fiona Trahe --- app/test-crypto-perf/cperf_ops.c | 49 +++++---- doc/guides/prog_guide/cryptodev_lib.rst | 3 +- doc/guides/rel_notes/release_17_08.rst | 2 + drivers/crypto/aesni_gcm/aesni_gcm_pmd.c | 80 ++++++++------- drivers/crypto/aesni_mb/rte_aesni_mb_pmd.c | 3 +- drivers/crypto/armv8/rte_armv8_pmd.c | 3 +- drivers/crypto/dpaa2_sec/dpaa2_sec_dpseci.c | 8 +- drivers/crypto/kasumi/rte_kasumi_pmd.c | 26 +++-- drivers/crypto/openssl/rte_openssl_pmd.c | 12 ++- drivers/crypto/qat/qat_crypto.c | 30 ++++-- drivers/crypto/snow3g/rte_snow3g_pmd.c | 14 +-- drivers/crypto/zuc/rte_zuc_pmd.c | 7 +- examples/ipsec-secgw/esp.c | 14 +-- examples/l2fwd-crypto/main.c | 5 +- lib/librte_cryptodev/rte_crypto_sym.h | 7 +- test/test/test_cryptodev.c | 107 ++++++++------------ test/test/test_cryptodev_blockcipher.c | 8 +- test/test/test_cryptodev_perf.c | 60 ++++------- 18 files changed, 211 insertions(+), 227 deletions(-) diff --git a/app/test-crypto-perf/cperf_ops.c b/app/test-crypto-perf/cperf_ops.c index 0f45a3c8fd..018ce0e607 100644 --- a/app/test-crypto-perf/cperf_ops.c +++ b/app/test-crypto-perf/cperf_ops.c @@ -106,10 +106,7 @@ cperf_set_ops_cipher(struct rte_crypto_op **ops, sym_op->m_dst = bufs_out[i]; /* cipher parameters */ - sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(ops[i], - uint8_t *, iv_offset); - sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(ops[i], - iv_offset); + sym_op->cipher.iv.offset = iv_offset; sym_op->cipher.iv.length = test_vector->iv.length; if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 || @@ -123,11 +120,13 @@ cperf_set_ops_cipher(struct rte_crypto_op **ops, } if (options->test == CPERF_TEST_TYPE_VERIFY) { - for (i = 0; i < nb_ops; i++) - memcpy(ops[i]->sym->cipher.iv.data, - test_vector->iv.data, - test_vector->iv.length); - } + for (i = 0; i < nb_ops; i++) { + uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i], + uint8_t *, iv_offset); + + memcpy(iv_ptr, test_vector->iv.data, + test_vector->iv.length); + } } return 0; } @@ -216,10 +215,7 @@ cperf_set_ops_cipher_auth(struct rte_crypto_op **ops, sym_op->m_dst = bufs_out[i]; /* cipher parameters */ - sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(ops[i], - uint8_t *, iv_offset); - sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(ops[i], - iv_offset); + sym_op->cipher.iv.offset = iv_offset; sym_op->cipher.iv.length = test_vector->iv.length; if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 || @@ -275,10 +271,13 @@ cperf_set_ops_cipher_auth(struct rte_crypto_op **ops, } if (options->test == CPERF_TEST_TYPE_VERIFY) { - for (i = 0; i < nb_ops; i++) - memcpy(ops[i]->sym->cipher.iv.data, - test_vector->iv.data, - test_vector->iv.length); + for (i = 0; i < nb_ops; i++) { + uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i], + uint8_t *, iv_offset); + + memcpy(iv_ptr, test_vector->iv.data, + test_vector->iv.length); + } } return 0; @@ -303,10 +302,7 @@ cperf_set_ops_aead(struct rte_crypto_op **ops, sym_op->m_dst = bufs_out[i]; /* cipher parameters */ - sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(ops[i], - uint8_t *, iv_offset); - sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(ops[i], - iv_offset); + sym_op->cipher.iv.offset = iv_offset; sym_op->cipher.iv.length = test_vector->iv.length; sym_op->cipher.data.length = options->test_buffer_size; @@ -354,10 +350,13 @@ cperf_set_ops_aead(struct rte_crypto_op **ops, } if (options->test == CPERF_TEST_TYPE_VERIFY) { - for (i = 0; i < nb_ops; i++) - memcpy(ops[i]->sym->cipher.iv.data, - test_vector->iv.data, - test_vector->iv.length); + for (i = 0; i < nb_ops; i++) { + uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i], + uint8_t *, iv_offset); + + memcpy(iv_ptr, test_vector->iv.data, + test_vector->iv.length); + } } return 0; diff --git a/doc/guides/prog_guide/cryptodev_lib.rst b/doc/guides/prog_guide/cryptodev_lib.rst index c9a29f862c..48c58a96da 100644 --- a/doc/guides/prog_guide/cryptodev_lib.rst +++ b/doc/guides/prog_guide/cryptodev_lib.rst @@ -537,8 +537,7 @@ chain. } data; /**< Data offsets and length for ciphering */ struct { - uint8_t *data; - phys_addr_t phys_addr; + uint16_t offset; uint16_t length; } iv; /**< Initialisation vector parameters */ } cipher; diff --git a/doc/guides/rel_notes/release_17_08.rst b/doc/guides/rel_notes/release_17_08.rst index e03333ceba..3ee6db34c1 100644 --- a/doc/guides/rel_notes/release_17_08.rst +++ b/doc/guides/rel_notes/release_17_08.rst @@ -90,6 +90,8 @@ New Features The crypto operation (``rte_crypto_sym_op``) has been reorganized as follows: * Removed field ``rte_crypto_sym_op_sess_type``. + * Replaced pointer and physical address of IV with offset from the start + of the crypto operation. * **Reorganized the crypto operation structure.** diff --git a/drivers/crypto/aesni_gcm/aesni_gcm_pmd.c b/drivers/crypto/aesni_gcm/aesni_gcm_pmd.c index 92a5027c7c..61be723ee7 100644 --- a/drivers/crypto/aesni_gcm/aesni_gcm_pmd.c +++ b/drivers/crypto/aesni_gcm/aesni_gcm_pmd.c @@ -180,12 +180,14 @@ aesni_gcm_get_session(struct aesni_gcm_qp *qp, struct rte_crypto_op *op) * */ static int -process_gcm_crypto_op(struct rte_crypto_sym_op *op, +process_gcm_crypto_op(struct rte_crypto_op *op, struct aesni_gcm_session *session) { uint8_t *src, *dst; - struct rte_mbuf *m_src = op->m_src; - uint32_t offset = op->cipher.data.offset; + uint8_t *iv_ptr; + struct rte_crypto_sym_op *sym_op = op->sym; + struct rte_mbuf *m_src = sym_op->m_src; + uint32_t offset = sym_op->cipher.data.offset; uint32_t part_len, total_len, data_len; RTE_ASSERT(m_src != NULL); @@ -198,46 +200,48 @@ process_gcm_crypto_op(struct rte_crypto_sym_op *op, } data_len = m_src->data_len - offset; - part_len = (data_len < op->cipher.data.length) ? data_len : - op->cipher.data.length; + part_len = (data_len < sym_op->cipher.data.length) ? data_len : + sym_op->cipher.data.length; /* Destination buffer is required when segmented source buffer */ - RTE_ASSERT((part_len == op->cipher.data.length) || - ((part_len != op->cipher.data.length) && - (op->m_dst != NULL))); + RTE_ASSERT((part_len == sym_op->cipher.data.length) || + ((part_len != sym_op->cipher.data.length) && + (sym_op->m_dst != NULL))); /* Segmented destination buffer is not supported */ - RTE_ASSERT((op->m_dst == NULL) || - ((op->m_dst != NULL) && - rte_pktmbuf_is_contiguous(op->m_dst))); + RTE_ASSERT((sym_op->m_dst == NULL) || + ((sym_op->m_dst != NULL) && + rte_pktmbuf_is_contiguous(sym_op->m_dst))); - dst = op->m_dst ? - rte_pktmbuf_mtod_offset(op->m_dst, uint8_t *, - op->cipher.data.offset) : - rte_pktmbuf_mtod_offset(op->m_src, uint8_t *, - op->cipher.data.offset); + dst = sym_op->m_dst ? + rte_pktmbuf_mtod_offset(sym_op->m_dst, uint8_t *, + sym_op->cipher.data.offset) : + rte_pktmbuf_mtod_offset(sym_op->m_src, uint8_t *, + sym_op->cipher.data.offset); src = rte_pktmbuf_mtod_offset(m_src, uint8_t *, offset); /* sanity checks */ - if (op->cipher.iv.length != 16 && op->cipher.iv.length != 12 && - op->cipher.iv.length != 0) { + if (sym_op->cipher.iv.length != 16 && sym_op->cipher.iv.length != 12 && + sym_op->cipher.iv.length != 0) { GCM_LOG_ERR("iv"); return -1; } + iv_ptr = rte_crypto_op_ctod_offset(op, uint8_t *, + sym_op->cipher.iv.offset); /* * GCM working in 12B IV mode => 16B pre-counter block we need * to set BE LSB to 1, driver expects that 16B is allocated */ - if (op->cipher.iv.length == 12) { - uint32_t *iv_padd = (uint32_t *)&op->cipher.iv.data[12]; + if (sym_op->cipher.iv.length == 12) { + uint32_t *iv_padd = (uint32_t *)&(iv_ptr[12]); *iv_padd = rte_bswap32(1); } - if (op->auth.digest.length != 16 && - op->auth.digest.length != 12 && - op->auth.digest.length != 8) { + if (sym_op->auth.digest.length != 16 && + sym_op->auth.digest.length != 12 && + sym_op->auth.digest.length != 8) { GCM_LOG_ERR("digest"); return -1; } @@ -245,13 +249,13 @@ process_gcm_crypto_op(struct rte_crypto_sym_op *op, if (session->op == AESNI_GCM_OP_AUTHENTICATED_ENCRYPTION) { aesni_gcm_enc[session->key].init(&session->gdata, - op->cipher.iv.data, - op->auth.aad.data, - (uint64_t)op->auth.aad.length); + iv_ptr, + sym_op->auth.aad.data, + (uint64_t)sym_op->auth.aad.length); aesni_gcm_enc[session->key].update(&session->gdata, dst, src, (uint64_t)part_len); - total_len = op->cipher.data.length - part_len; + total_len = sym_op->cipher.data.length - part_len; while (total_len) { dst += part_len; @@ -270,12 +274,12 @@ process_gcm_crypto_op(struct rte_crypto_sym_op *op, } aesni_gcm_enc[session->key].finalize(&session->gdata, - op->auth.digest.data, - (uint64_t)op->auth.digest.length); + sym_op->auth.digest.data, + (uint64_t)sym_op->auth.digest.length); } else { /* session->op == AESNI_GCM_OP_AUTHENTICATED_DECRYPTION */ - uint8_t *auth_tag = (uint8_t *)rte_pktmbuf_append(op->m_dst ? - op->m_dst : op->m_src, - op->auth.digest.length); + uint8_t *auth_tag = (uint8_t *)rte_pktmbuf_append(sym_op->m_dst ? + sym_op->m_dst : sym_op->m_src, + sym_op->auth.digest.length); if (!auth_tag) { GCM_LOG_ERR("auth_tag"); @@ -283,13 +287,13 @@ process_gcm_crypto_op(struct rte_crypto_sym_op *op, } aesni_gcm_dec[session->key].init(&session->gdata, - op->cipher.iv.data, - op->auth.aad.data, - (uint64_t)op->auth.aad.length); + iv_ptr, + sym_op->auth.aad.data, + (uint64_t)sym_op->auth.aad.length); aesni_gcm_dec[session->key].update(&session->gdata, dst, src, (uint64_t)part_len); - total_len = op->cipher.data.length - part_len; + total_len = sym_op->cipher.data.length - part_len; while (total_len) { dst += part_len; @@ -309,7 +313,7 @@ process_gcm_crypto_op(struct rte_crypto_sym_op *op, aesni_gcm_dec[session->key].finalize(&session->gdata, auth_tag, - (uint64_t)op->auth.digest.length); + (uint64_t)sym_op->auth.digest.length); } return 0; @@ -401,7 +405,7 @@ aesni_gcm_pmd_dequeue_burst(void *queue_pair, break; } - retval = process_gcm_crypto_op(ops[i]->sym, sess); + retval = process_gcm_crypto_op(ops[i], sess); if (retval < 0) { ops[i]->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS; qp->qp_stats.dequeue_err_count++; diff --git a/drivers/crypto/aesni_mb/rte_aesni_mb_pmd.c b/drivers/crypto/aesni_mb/rte_aesni_mb_pmd.c index fa8112ae30..ece7d00cdc 100644 --- a/drivers/crypto/aesni_mb/rte_aesni_mb_pmd.c +++ b/drivers/crypto/aesni_mb/rte_aesni_mb_pmd.c @@ -471,7 +471,8 @@ set_mb_job_params(JOB_AES_HMAC *job, struct aesni_mb_qp *qp, get_truncated_digest_byte_length(job->hash_alg); /* Set IV parameters */ - job->iv = op->sym->cipher.iv.data; + job->iv = rte_crypto_op_ctod_offset(op, uint8_t *, + op->sym->cipher.iv.offset); job->iv_len_in_bytes = op->sym->cipher.iv.length; /* Data Parameter */ diff --git a/drivers/crypto/armv8/rte_armv8_pmd.c b/drivers/crypto/armv8/rte_armv8_pmd.c index 4a79b61d30..693eccd138 100644 --- a/drivers/crypto/armv8/rte_armv8_pmd.c +++ b/drivers/crypto/armv8/rte_armv8_pmd.c @@ -654,7 +654,8 @@ process_armv8_chained_op return; } - arg.cipher.iv = op->sym->cipher.iv.data; + arg.cipher.iv = rte_crypto_op_ctod_offset(op, uint8_t *, + op->sym->cipher.iv.offset); arg.cipher.key = sess->cipher.key.data; /* Acquire combined mode function */ crypto_func = sess->crypto_func; diff --git a/drivers/crypto/dpaa2_sec/dpaa2_sec_dpseci.c b/drivers/crypto/dpaa2_sec/dpaa2_sec_dpseci.c index e1543951c6..1605701b3c 100644 --- a/drivers/crypto/dpaa2_sec/dpaa2_sec_dpseci.c +++ b/drivers/crypto/dpaa2_sec/dpaa2_sec_dpseci.c @@ -87,6 +87,8 @@ build_authenc_fd(dpaa2_sec_session *sess, int icv_len = sym_op->auth.digest.length; uint8_t *old_icv; uint32_t mem_len = (7 * sizeof(struct qbman_fle)) + icv_len; + uint8_t *iv_ptr = rte_crypto_op_ctod_offset(op, uint8_t *, + op->sym->cipher.iv.offset); PMD_INIT_FUNC_TRACE(); @@ -178,7 +180,7 @@ build_authenc_fd(dpaa2_sec_session *sess, sym_op->auth.digest.length); /* Configure Input SGE for Encap/Decap */ - DPAA2_SET_FLE_ADDR(sge, DPAA2_VADDR_TO_IOVA(sym_op->cipher.iv.data)); + DPAA2_SET_FLE_ADDR(sge, DPAA2_VADDR_TO_IOVA(iv_ptr)); sge->length = sym_op->cipher.iv.length; sge++; @@ -307,6 +309,8 @@ build_cipher_fd(dpaa2_sec_session *sess, struct rte_crypto_op *op, uint32_t mem_len = (5 * sizeof(struct qbman_fle)); struct sec_flow_context *flc; struct ctxt_priv *priv = sess->ctxt; + uint8_t *iv_ptr = rte_crypto_op_ctod_offset(op, uint8_t *, + op->sym->cipher.iv.offset); PMD_INIT_FUNC_TRACE(); @@ -369,7 +373,7 @@ build_cipher_fd(dpaa2_sec_session *sess, struct rte_crypto_op *op, DPAA2_SET_FLE_SG_EXT(fle); - DPAA2_SET_FLE_ADDR(sge, DPAA2_VADDR_TO_IOVA(sym_op->cipher.iv.data)); + DPAA2_SET_FLE_ADDR(sge, DPAA2_VADDR_TO_IOVA(iv_ptr)); sge->length = sym_op->cipher.iv.length; sge++; diff --git a/drivers/crypto/kasumi/rte_kasumi_pmd.c b/drivers/crypto/kasumi/rte_kasumi_pmd.c index 7edf81907c..290f67063f 100644 --- a/drivers/crypto/kasumi/rte_kasumi_pmd.c +++ b/drivers/crypto/kasumi/rte_kasumi_pmd.c @@ -174,7 +174,8 @@ process_kasumi_cipher_op(struct rte_crypto_op **ops, unsigned i; uint8_t processed_ops = 0; uint8_t *src[num_ops], *dst[num_ops]; - uint64_t IV[num_ops]; + uint8_t *iv_ptr; + uint64_t iv[num_ops]; uint32_t num_bytes[num_ops]; for (i = 0; i < num_ops; i++) { @@ -192,14 +193,16 @@ process_kasumi_cipher_op(struct rte_crypto_op **ops, (ops[i]->sym->cipher.data.offset >> 3) : rte_pktmbuf_mtod(ops[i]->sym->m_src, uint8_t *) + (ops[i]->sym->cipher.data.offset >> 3); - IV[i] = *((uint64_t *)(ops[i]->sym->cipher.iv.data)); + iv_ptr = rte_crypto_op_ctod_offset(ops[i], uint8_t *, + ops[i]->sym->cipher.iv.offset); + iv[i] = *((uint64_t *)(iv_ptr)); num_bytes[i] = ops[i]->sym->cipher.data.length >> 3; processed_ops++; } if (processed_ops != 0) - sso_kasumi_f8_n_buffer(&session->pKeySched_cipher, IV, + sso_kasumi_f8_n_buffer(&session->pKeySched_cipher, iv, src, dst, num_bytes, processed_ops); return processed_ops; @@ -211,7 +214,8 @@ process_kasumi_cipher_op_bit(struct rte_crypto_op *op, struct kasumi_session *session) { uint8_t *src, *dst; - uint64_t IV; + uint8_t *iv_ptr; + uint64_t iv; uint32_t length_in_bits, offset_in_bits; /* Sanity checks. */ @@ -229,10 +233,12 @@ process_kasumi_cipher_op_bit(struct rte_crypto_op *op, return 0; } dst = rte_pktmbuf_mtod(op->sym->m_dst, uint8_t *); - IV = *((uint64_t *)(op->sym->cipher.iv.data)); + iv_ptr = rte_crypto_op_ctod_offset(op, uint8_t *, + op->sym->cipher.iv.offset); + iv = *((uint64_t *)(iv_ptr)); length_in_bits = op->sym->cipher.data.length; - sso_kasumi_f8_1_buffer_bit(&session->pKeySched_cipher, IV, + sso_kasumi_f8_1_buffer_bit(&session->pKeySched_cipher, iv, src, dst, length_in_bits, offset_in_bits); return 1; @@ -250,7 +256,7 @@ process_kasumi_hash_op(struct rte_crypto_op **ops, uint32_t length_in_bits; uint32_t num_bytes; uint32_t shift_bits; - uint64_t IV; + uint64_t iv; uint8_t direction; for (i = 0; i < num_ops; i++) { @@ -278,7 +284,7 @@ process_kasumi_hash_op(struct rte_crypto_op **ops, src = rte_pktmbuf_mtod(ops[i]->sym->m_src, uint8_t *) + (ops[i]->sym->auth.data.offset >> 3); /* IV from AAD */ - IV = *((uint64_t *)(ops[i]->sym->auth.aad.data)); + iv = *((uint64_t *)(ops[i]->sym->auth.aad.data)); /* Direction from next bit after end of message */ num_bytes = (length_in_bits >> 3) + 1; shift_bits = (BYTE_LEN - 1 - length_in_bits) % BYTE_LEN; @@ -289,7 +295,7 @@ process_kasumi_hash_op(struct rte_crypto_op **ops, ops[i]->sym->auth.digest.length); sso_kasumi_f9_1_buffer_user(&session->pKeySched_hash, - IV, src, + iv, src, length_in_bits, dst, direction); /* Verify digest. */ if (memcmp(dst, ops[i]->sym->auth.digest.data, @@ -303,7 +309,7 @@ process_kasumi_hash_op(struct rte_crypto_op **ops, dst = ops[i]->sym->auth.digest.data; sso_kasumi_f9_1_buffer_user(&session->pKeySched_hash, - IV, src, + iv, src, length_in_bits, dst, direction); } processed_ops++; diff --git a/drivers/crypto/openssl/rte_openssl_pmd.c b/drivers/crypto/openssl/rte_openssl_pmd.c index bfc237122e..d3f16c9a6c 100644 --- a/drivers/crypto/openssl/rte_openssl_pmd.c +++ b/drivers/crypto/openssl/rte_openssl_pmd.c @@ -923,7 +923,8 @@ process_openssl_combined_op return; } - iv = op->sym->cipher.iv.data; + iv = rte_crypto_op_ctod_offset(op, uint8_t *, + op->sym->cipher.iv.offset); ivlen = op->sym->cipher.iv.length; aad = op->sym->auth.aad.data; aadlen = op->sym->auth.aad.length; @@ -987,7 +988,8 @@ process_openssl_cipher_op dst = rte_pktmbuf_mtod_offset(mbuf_dst, uint8_t *, op->sym->cipher.data.offset); - iv = op->sym->cipher.iv.data; + iv = rte_crypto_op_ctod_offset(op, uint8_t *, + op->sym->cipher.iv.offset); if (sess->cipher.mode == OPENSSL_CIPHER_LIB) if (sess->cipher.direction == RTE_CRYPTO_CIPHER_OP_ENCRYPT) @@ -1028,7 +1030,8 @@ process_openssl_docsis_bpi_op(struct rte_crypto_op *op, dst = rte_pktmbuf_mtod_offset(mbuf_dst, uint8_t *, op->sym->cipher.data.offset); - iv = op->sym->cipher.iv.data; + iv = rte_crypto_op_ctod_offset(op, uint8_t *, + op->sym->cipher.iv.offset); block_size = DES_BLOCK_SIZE; @@ -1086,7 +1089,8 @@ process_openssl_docsis_bpi_op(struct rte_crypto_op *op, dst, iv, last_block_len, sess->cipher.bpi_ctx); /* Prepare parameters for CBC mode op */ - iv = op->sym->cipher.iv.data; + iv = rte_crypto_op_ctod_offset(op, uint8_t *, + op->sym->cipher.iv.offset); dst += last_block_len - srclen; srclen -= last_block_len; } diff --git a/drivers/crypto/qat/qat_crypto.c b/drivers/crypto/qat/qat_crypto.c index e05c31630d..b7f891b5c4 100644 --- a/drivers/crypto/qat/qat_crypto.c +++ b/drivers/crypto/qat/qat_crypto.c @@ -642,7 +642,8 @@ qat_bpicipher_preprocess(struct qat_session *ctx, iv = last_block - block_len; else /* runt block, i.e. less than one full block */ - iv = sym_op->cipher.iv.data; + iv = rte_crypto_op_ctod_offset(op, uint8_t *, + sym_op->cipher.iv.offset); #ifdef RTE_LIBRTE_PMD_QAT_DEBUG_TX rte_hexdump(stdout, "BPI: src before pre-process:", last_block, @@ -697,7 +698,8 @@ qat_bpicipher_postprocess(struct qat_session *ctx, iv = dst - block_len; else /* runt block, i.e. less than one full block */ - iv = sym_op->cipher.iv.data; + iv = rte_crypto_op_ctod_offset(op, uint8_t *, + sym_op->cipher.iv.offset); #ifdef RTE_LIBRTE_PMD_QAT_DEBUG_RX rte_hexdump(stdout, "BPI: src before post-process:", last_block, @@ -898,6 +900,7 @@ qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg, uint32_t min_ofs = 0; uint64_t src_buf_start = 0, dst_buf_start = 0; uint8_t do_sgl = 0; + uint8_t *iv_ptr; #ifdef RTE_LIBRTE_PMD_QAT_DEBUG_TX @@ -971,6 +974,8 @@ qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg, cipher_ofs = op->sym->cipher.data.offset; } + iv_ptr = rte_crypto_op_ctod_offset(op, uint8_t *, + op->sym->cipher.iv.offset); /* copy IV into request if it fits */ /* * If IV length is zero do not copy anything but still @@ -981,14 +986,15 @@ qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg, if (op->sym->cipher.iv.length <= sizeof(cipher_param->u.cipher_IV_array)) { rte_memcpy(cipher_param->u.cipher_IV_array, - op->sym->cipher.iv.data, + iv_ptr, op->sym->cipher.iv.length); } else { ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET( qat_req->comn_hdr.serv_specif_flags, ICP_QAT_FW_CIPH_IV_64BIT_PTR); cipher_param->u.s.cipher_IV_ptr = - op->sym->cipher.iv.phys_addr; + rte_crypto_op_ctophys_offset(op, + op->sym->cipher.iv.offset); } } min_ofs = cipher_ofs; @@ -1179,12 +1185,16 @@ qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg, rte_hexdump(stdout, "src_data:", rte_pktmbuf_mtod(op->sym->m_src, uint8_t*), rte_pktmbuf_data_len(op->sym->m_src)); - rte_hexdump(stdout, "iv:", op->sym->cipher.iv.data, - op->sym->cipher.iv.length); - rte_hexdump(stdout, "digest:", op->sym->auth.digest.data, - op->sym->auth.digest.length); - rte_hexdump(stdout, "aad:", op->sym->auth.aad.data, - op->sym->auth.aad.length); + if (do_cipher) + rte_hexdump(stdout, "iv:", iv_ptr, + op->sym->cipher.iv.length); + + if (do_auth) { + rte_hexdump(stdout, "digest:", op->sym->auth.digest.data, + op->sym->auth.digest.length); + rte_hexdump(stdout, "aad:", op->sym->auth.aad.data, + op->sym->auth.aad.length); + } #endif return 0; } diff --git a/drivers/crypto/snow3g/rte_snow3g_pmd.c b/drivers/crypto/snow3g/rte_snow3g_pmd.c index a8bcd9d41e..68121b0e0d 100644 --- a/drivers/crypto/snow3g/rte_snow3g_pmd.c +++ b/drivers/crypto/snow3g/rte_snow3g_pmd.c @@ -174,7 +174,7 @@ process_snow3g_cipher_op(struct rte_crypto_op **ops, unsigned i; uint8_t processed_ops = 0; uint8_t *src[SNOW3G_MAX_BURST], *dst[SNOW3G_MAX_BURST]; - uint8_t *IV[SNOW3G_MAX_BURST]; + uint8_t *iv[SNOW3G_MAX_BURST]; uint32_t num_bytes[SNOW3G_MAX_BURST]; for (i = 0; i < num_ops; i++) { @@ -192,13 +192,14 @@ process_snow3g_cipher_op(struct rte_crypto_op **ops, (ops[i]->sym->cipher.data.offset >> 3) : rte_pktmbuf_mtod(ops[i]->sym->m_src, uint8_t *) + (ops[i]->sym->cipher.data.offset >> 3); - IV[i] = ops[i]->sym->cipher.iv.data; + iv[i] = rte_crypto_op_ctod_offset(ops[i], uint8_t *, + ops[i]->sym->cipher.iv.offset); num_bytes[i] = ops[i]->sym->cipher.data.length >> 3; processed_ops++; } - sso_snow3g_f8_n_buffer(&session->pKeySched_cipher, IV, src, dst, + sso_snow3g_f8_n_buffer(&session->pKeySched_cipher, iv, src, dst, num_bytes, processed_ops); return processed_ops; @@ -210,7 +211,7 @@ process_snow3g_cipher_op_bit(struct rte_crypto_op *op, struct snow3g_session *session) { uint8_t *src, *dst; - uint8_t *IV; + uint8_t *iv; uint32_t length_in_bits, offset_in_bits; /* Sanity checks. */ @@ -228,10 +229,11 @@ process_snow3g_cipher_op_bit(struct rte_crypto_op *op, return 0; } dst = rte_pktmbuf_mtod(op->sym->m_dst, uint8_t *); - IV = op->sym->cipher.iv.data; + iv = rte_crypto_op_ctod_offset(op, uint8_t *, + op->sym->cipher.iv.offset); length_in_bits = op->sym->cipher.data.length; - sso_snow3g_f8_1_buffer_bit(&session->pKeySched_cipher, IV, + sso_snow3g_f8_1_buffer_bit(&session->pKeySched_cipher, iv, src, dst, length_in_bits, offset_in_bits); return 1; diff --git a/drivers/crypto/zuc/rte_zuc_pmd.c b/drivers/crypto/zuc/rte_zuc_pmd.c index 25a959f7d9..3923e3c667 100644 --- a/drivers/crypto/zuc/rte_zuc_pmd.c +++ b/drivers/crypto/zuc/rte_zuc_pmd.c @@ -173,7 +173,7 @@ process_zuc_cipher_op(struct rte_crypto_op **ops, unsigned i; uint8_t processed_ops = 0; uint8_t *src[ZUC_MAX_BURST], *dst[ZUC_MAX_BURST]; - uint8_t *IV[ZUC_MAX_BURST]; + uint8_t *iv[ZUC_MAX_BURST]; uint32_t num_bytes[ZUC_MAX_BURST]; uint8_t *cipher_keys[ZUC_MAX_BURST]; @@ -213,7 +213,8 @@ process_zuc_cipher_op(struct rte_crypto_op **ops, (ops[i]->sym->cipher.data.offset >> 3) : rte_pktmbuf_mtod(ops[i]->sym->m_src, uint8_t *) + (ops[i]->sym->cipher.data.offset >> 3); - IV[i] = ops[i]->sym->cipher.iv.data; + iv[i] = rte_crypto_op_ctod_offset(ops[i], uint8_t *, + ops[i]->sym->cipher.iv.offset); num_bytes[i] = ops[i]->sym->cipher.data.length >> 3; cipher_keys[i] = session->pKey_cipher; @@ -221,7 +222,7 @@ process_zuc_cipher_op(struct rte_crypto_op **ops, processed_ops++; } - sso_zuc_eea3_n_buffer(cipher_keys, IV, src, dst, + sso_zuc_eea3_n_buffer(cipher_keys, iv, src, dst, num_bytes, processed_ops); return processed_ops; diff --git a/examples/ipsec-secgw/esp.c b/examples/ipsec-secgw/esp.c index d084456382..387ce4f8c1 100644 --- a/examples/ipsec-secgw/esp.c +++ b/examples/ipsec-secgw/esp.c @@ -104,9 +104,7 @@ esp_inbound(struct rte_mbuf *m, struct ipsec_sa *sa, case RTE_CRYPTO_CIPHER_AES_CBC: /* Copy IV at the end of crypto operation */ rte_memcpy(iv_ptr, iv, sa->iv_len); - sym_cop->cipher.iv.data = iv_ptr; - sym_cop->cipher.iv.phys_addr = - rte_crypto_op_ctophys_offset(cop, IV_OFFSET); + sym_cop->cipher.iv.offset = IV_OFFSET; sym_cop->cipher.iv.length = sa->iv_len; break; case RTE_CRYPTO_CIPHER_AES_CTR: @@ -115,9 +113,7 @@ esp_inbound(struct rte_mbuf *m, struct ipsec_sa *sa, icb->salt = sa->salt; memcpy(&icb->iv, iv, 8); icb->cnt = rte_cpu_to_be_32(1); - sym_cop->cipher.iv.data = iv_ptr; - sym_cop->cipher.iv.phys_addr = - rte_crypto_op_ctophys_offset(cop, IV_OFFSET); + sym_cop->cipher.iv.offset = IV_OFFSET; sym_cop->cipher.iv.length = 16; break; default: @@ -348,15 +344,11 @@ esp_outbound(struct rte_mbuf *m, struct ipsec_sa *sa, padding[pad_len - 2] = pad_len - 2; padding[pad_len - 1] = nlp; - uint8_t *iv_ptr = rte_crypto_op_ctod_offset(cop, - uint8_t *, IV_OFFSET); struct cnt_blk *icb = get_cnt_blk(m); icb->salt = sa->salt; icb->iv = sa->seq; icb->cnt = rte_cpu_to_be_32(1); - sym_cop->cipher.iv.data = iv_ptr; - sym_cop->cipher.iv.phys_addr = - rte_crypto_op_ctophys_offset(cop, IV_OFFSET); + sym_cop->cipher.iv.offset = IV_OFFSET; sym_cop->cipher.iv.length = 16; uint8_t *aad; diff --git a/examples/l2fwd-crypto/main.c b/examples/l2fwd-crypto/main.c index 6941ee47e1..c810b48c1a 100644 --- a/examples/l2fwd-crypto/main.c +++ b/examples/l2fwd-crypto/main.c @@ -489,9 +489,7 @@ l2fwd_simple_crypto_enqueue(struct rte_mbuf *m, /* Copy IV at the end of the crypto operation */ rte_memcpy(iv_ptr, cparams->iv.data, cparams->iv.length); - op->sym->cipher.iv.data = iv_ptr; - op->sym->cipher.iv.phys_addr = - rte_crypto_op_ctophys_offset(op, IV_OFFSET); + op->sym->cipher.iv.offset = IV_OFFSET; op->sym->cipher.iv.length = cparams->iv.length; /* For wireless algorithms, offset/length must be in bits */ @@ -700,7 +698,6 @@ l2fwd_main_loop(struct l2fwd_crypto_options *options) if (port_cparams[i].do_cipher) { port_cparams[i].iv.data = options->iv.data; port_cparams[i].iv.length = options->iv.length; - port_cparams[i].iv.phys_addr = options->iv.phys_addr; if (!options->iv_param) generate_random_key(port_cparams[i].iv.data, port_cparams[i].iv.length); diff --git a/lib/librte_cryptodev/rte_crypto_sym.h b/lib/librte_cryptodev/rte_crypto_sym.h index 1e7f8629f9..db594c82f5 100644 --- a/lib/librte_cryptodev/rte_crypto_sym.h +++ b/lib/librte_cryptodev/rte_crypto_sym.h @@ -464,8 +464,10 @@ struct rte_crypto_sym_op { } data; /**< Data offsets and length for ciphering */ struct { - uint8_t *data; - /**< Initialisation Vector or Counter. + uint16_t offset; + /**< Starting point for Initialisation Vector or Counter, + * specified as number of bytes from start of crypto + * operation. * * - For block ciphers in CBC or F8 mode, or for KASUMI * in F8 mode, or for SNOW 3G in UEA2 mode, this is the @@ -491,7 +493,6 @@ struct rte_crypto_sym_op { * For optimum performance, the data pointed to SHOULD * be 8-byte aligned. */ - phys_addr_t phys_addr; uint16_t length; /**< Length of valid IV data. * diff --git a/test/test/test_cryptodev.c b/test/test/test_cryptodev.c index 4603679544..a509beacd8 100644 --- a/test/test/test_cryptodev.c +++ b/test/test/test_cryptodev.c @@ -1312,13 +1312,11 @@ test_AES_CBC_HMAC_SHA1_encrypt_digest(void) sym_op->auth.data.length = QUOTE_512_BYTES; /* Set crypto operation cipher parameters */ - sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(ut_params->op, - uint8_t *, IV_OFFSET); - sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(ut_params->op, - IV_OFFSET); + sym_op->cipher.iv.offset = IV_OFFSET; sym_op->cipher.iv.length = CIPHER_IV_LENGTH_AES_CBC; - rte_memcpy(sym_op->cipher.iv.data, aes_cbc_iv, CIPHER_IV_LENGTH_AES_CBC); + rte_memcpy(rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET), + aes_cbc_iv, CIPHER_IV_LENGTH_AES_CBC); sym_op->cipher.data.offset = 0; sym_op->cipher.data.length = QUOTE_512_BYTES; @@ -1465,13 +1463,11 @@ test_AES_CBC_HMAC_SHA512_decrypt_perform(struct rte_cryptodev_sym_session *sess, sym_op->auth.data.offset = 0; sym_op->auth.data.length = QUOTE_512_BYTES; - sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(ut_params->op, - uint8_t *, IV_OFFSET); - sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(ut_params->op, - IV_OFFSET); + sym_op->cipher.iv.offset = IV_OFFSET; sym_op->cipher.iv.length = CIPHER_IV_LENGTH_AES_CBC; - rte_memcpy(sym_op->cipher.iv.data, iv, CIPHER_IV_LENGTH_AES_CBC); + rte_memcpy(rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET), + iv, CIPHER_IV_LENGTH_AES_CBC); sym_op->cipher.data.offset = 0; sym_op->cipher.data.length = QUOTE_512_BYTES; @@ -1861,13 +1857,11 @@ create_wireless_algo_cipher_operation(const uint8_t *iv, uint8_t iv_len, sym_op->m_src = ut_params->ibuf; /* iv */ - sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(ut_params->op, - uint8_t *, IV_OFFSET); - sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(ut_params->op, - IV_OFFSET); + sym_op->cipher.iv.offset = IV_OFFSET; sym_op->cipher.iv.length = iv_len; - rte_memcpy(sym_op->cipher.iv.data, iv, iv_len); + rte_memcpy(rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET), + iv, iv_len); sym_op->cipher.data.length = cipher_len; sym_op->cipher.data.offset = cipher_offset; return 0; @@ -1897,13 +1891,11 @@ create_wireless_algo_cipher_operation_oop(const uint8_t *iv, uint8_t iv_len, sym_op->m_dst = ut_params->obuf; /* iv */ - sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(ut_params->op, - uint8_t *, IV_OFFSET); - sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(ut_params->op, - IV_OFFSET); + sym_op->cipher.iv.offset = IV_OFFSET; sym_op->cipher.iv.length = iv_len; - rte_memcpy(sym_op->cipher.iv.data, iv, iv_len); + rte_memcpy(rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET), + iv, iv_len); sym_op->cipher.data.length = cipher_len; sym_op->cipher.data.offset = cipher_offset; return 0; @@ -2220,13 +2212,11 @@ create_wireless_cipher_hash_operation(const struct wireless_test_data *tdata, TEST_HEXDUMP(stdout, "aad:", sym_op->auth.aad.data, aad_len); /* iv */ - sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(ut_params->op, - uint8_t *, IV_OFFSET); - sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(ut_params->op, - IV_OFFSET); + sym_op->cipher.iv.offset = IV_OFFSET; sym_op->cipher.iv.length = iv_len; - rte_memcpy(sym_op->cipher.iv.data, iv, iv_len); + rte_memcpy(rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET), + iv, iv_len); sym_op->cipher.data.length = cipher_len; sym_op->cipher.data.offset = cipher_offset + auth_offset; sym_op->auth.data.length = auth_len; @@ -2317,13 +2307,11 @@ create_wireless_algo_cipher_hash_operation(const uint8_t *auth_tag, TEST_HEXDUMP(stdout, "aad:", sym_op->auth.aad.data, aad_len); /* iv */ - sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(ut_params->op, - uint8_t *, IV_OFFSET); - sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(ut_params->op, - IV_OFFSET); + sym_op->cipher.iv.offset = IV_OFFSET; sym_op->cipher.iv.length = iv_len; - rte_memcpy(sym_op->cipher.iv.data, iv, iv_len); + rte_memcpy(rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET), + iv, iv_len); sym_op->cipher.data.length = cipher_len; sym_op->cipher.data.offset = cipher_offset + auth_offset; sym_op->auth.data.length = auth_len; @@ -2402,14 +2390,11 @@ create_wireless_algo_auth_cipher_operation(const unsigned auth_tag_len, sym_op->auth.aad.data, aad_len); /* iv */ - sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(ut_params->op, - uint8_t *, IV_OFFSET); - sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(ut_params->op, - IV_OFFSET); + sym_op->cipher.iv.offset = IV_OFFSET; sym_op->cipher.iv.length = iv_len; - rte_memcpy(sym_op->cipher.iv.data, iv, iv_len); - + rte_memcpy(rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET), + iv, iv_len); sym_op->cipher.data.length = cipher_len; sym_op->cipher.data.offset = auth_offset + cipher_offset; @@ -4855,14 +4840,13 @@ create_gcm_operation(enum rte_crypto_cipher_operation op, sym_op->auth.aad.length); /* Append IV at the end of the crypto operation*/ - sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(ut_params->op, - uint8_t *, IV_OFFSET); - sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(ut_params->op, - IV_OFFSET); + uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ut_params->op, + uint8_t *, IV_OFFSET); + sym_op->cipher.iv.offset = IV_OFFSET; sym_op->cipher.iv.length = tdata->iv.len; - rte_memcpy(sym_op->cipher.iv.data, tdata->iv.data, tdata->iv.len); - TEST_HEXDUMP(stdout, "iv:", sym_op->cipher.iv.data, + rte_memcpy(iv_ptr, tdata->iv.data, tdata->iv.len); + TEST_HEXDUMP(stdout, "iv:", iv_ptr, sym_op->cipher.iv.length); /* Append plaintext/ciphertext */ @@ -6430,15 +6414,15 @@ create_gmac_operation(enum rte_crypto_auth_operation op, sym_op->auth.digest.length); } - sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(ut_params->op, - uint8_t *, IV_OFFSET); - sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(ut_params->op, - IV_OFFSET); + uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ut_params->op, + uint8_t *, IV_OFFSET); + + sym_op->cipher.iv.offset = IV_OFFSET; sym_op->cipher.iv.length = tdata->iv.len; - rte_memcpy(sym_op->cipher.iv.data, tdata->iv.data, tdata->iv.len); + rte_memcpy(iv_ptr, tdata->iv.data, tdata->iv.len); - TEST_HEXDUMP(stdout, "iv:", sym_op->cipher.iv.data, tdata->iv.len); + TEST_HEXDUMP(stdout, "iv:", iv_ptr, tdata->iv.len); sym_op->cipher.data.length = 0; sym_op->cipher.data.offset = 0; @@ -6976,13 +6960,11 @@ create_auth_GMAC_operation(struct crypto_testsuite_params *ts_params, sym_op->auth.digest.data, sym_op->auth.digest.length); - sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(ut_params->op, - uint8_t *, IV_OFFSET); - sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(ut_params->op, - IV_OFFSET); + sym_op->cipher.iv.offset = IV_OFFSET; sym_op->cipher.iv.length = reference->iv.len; - rte_memcpy(sym_op->cipher.iv.data, reference->iv.data, reference->iv.len); + rte_memcpy(rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET), + reference->iv.data, reference->iv.len); sym_op->cipher.data.length = 0; sym_op->cipher.data.offset = 0; @@ -7035,13 +7017,11 @@ create_cipher_auth_operation(struct crypto_testsuite_params *ts_params, sym_op->auth.digest.data, sym_op->auth.digest.length); - sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(ut_params->op, - uint8_t *, IV_OFFSET); - sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(ut_params->op, - IV_OFFSET); + sym_op->cipher.iv.offset = IV_OFFSET; sym_op->cipher.iv.length = reference->iv.len; - rte_memcpy(sym_op->cipher.iv.data, reference->iv.data, reference->iv.len); + rte_memcpy(rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET), + reference->iv.data, reference->iv.len); sym_op->cipher.data.length = reference->ciphertext.len; sym_op->cipher.data.offset = 0; @@ -7285,13 +7265,12 @@ create_gcm_operation_SGL(enum rte_crypto_cipher_operation op, sym_op->auth.digest.length); } - sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(ut_params->op, - uint8_t *, IV_OFFSET); - sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(ut_params->op, - IV_OFFSET); + uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ut_params->op, + uint8_t *, IV_OFFSET); + sym_op->cipher.iv.offset = IV_OFFSET; sym_op->cipher.iv.length = iv_len; - rte_memcpy(sym_op->cipher.iv.data, tdata->iv.data, iv_len); + rte_memcpy(iv_ptr, tdata->iv.data, iv_len); sym_op->auth.aad.data = (uint8_t *)rte_pktmbuf_prepend( ut_params->ibuf, aad_len); @@ -7304,7 +7283,7 @@ create_gcm_operation_SGL(enum rte_crypto_cipher_operation op, memset(sym_op->auth.aad.data, 0, aad_len); rte_memcpy(sym_op->auth.aad.data, tdata->aad.data, aad_len); - TEST_HEXDUMP(stdout, "iv:", sym_op->cipher.iv.data, iv_len); + TEST_HEXDUMP(stdout, "iv:", iv_ptr, iv_len); TEST_HEXDUMP(stdout, "aad:", sym_op->auth.aad.data, aad_len); diff --git a/test/test/test_cryptodev_blockcipher.c b/test/test/test_cryptodev_blockcipher.c index aa3ecc8639..aed6c847ec 100644 --- a/test/test/test_cryptodev_blockcipher.c +++ b/test/test/test_cryptodev_blockcipher.c @@ -291,12 +291,10 @@ test_blockcipher_one_case(const struct blockcipher_test_case *t, sym_op->cipher.data.offset = 0; sym_op->cipher.data.length = tdata->ciphertext.len; - sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(op, - uint8_t *, IV_OFFSET); - sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(op, - IV_OFFSET); + sym_op->cipher.iv.offset = IV_OFFSET; sym_op->cipher.iv.length = tdata->iv.len; - rte_memcpy(sym_op->cipher.iv.data, tdata->iv.data, + rte_memcpy(rte_crypto_op_ctod_offset(op, uint8_t *, IV_OFFSET), + tdata->iv.data, tdata->iv.len); } diff --git a/test/test/test_cryptodev_perf.c b/test/test/test_cryptodev_perf.c index a959a6cd87..cbf21c74cd 100644 --- a/test/test/test_cryptodev_perf.c +++ b/test/test/test_cryptodev_perf.c @@ -1981,15 +1981,11 @@ test_perf_crypto_qp_vary_burst_size(uint16_t dev_num) op->sym->auth.data.offset = 0; op->sym->auth.data.length = data_params[0].length; - - op->sym->cipher.iv.data = rte_crypto_op_ctod_offset(op, - uint8_t *, IV_OFFSET); - op->sym->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(op, - IV_OFFSET); + op->sym->cipher.iv.offset = IV_OFFSET; op->sym->cipher.iv.length = CIPHER_IV_LENGTH_AES_CBC; - rte_memcpy(op->sym->cipher.iv.data, aes_cbc_128_iv, - CIPHER_IV_LENGTH_AES_CBC); + rte_memcpy(rte_crypto_op_ctod_offset(op, uint8_t *, IV_OFFSET), + aes_cbc_128_iv, CIPHER_IV_LENGTH_AES_CBC); op->sym->cipher.data.offset = 0; op->sym->cipher.data.length = data_params[0].length; @@ -2898,13 +2894,10 @@ test_perf_set_crypto_op_aes(struct rte_crypto_op *op, struct rte_mbuf *m, /* Cipher Parameters */ - op->sym->cipher.iv.data = rte_crypto_op_ctod_offset(op, - uint8_t *, IV_OFFSET); - op->sym->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(op, - IV_OFFSET); + op->sym->cipher.iv.offset = IV_OFFSET; op->sym->cipher.iv.length = AES_CIPHER_IV_LENGTH; - - rte_memcpy(op->sym->cipher.iv.data, aes_iv, AES_CIPHER_IV_LENGTH); + rte_memcpy(rte_crypto_op_ctod_offset(op, uint8_t *, IV_OFFSET), + aes_iv, AES_CIPHER_IV_LENGTH); op->sym->cipher.data.offset = 0; op->sym->cipher.data.length = data_len; @@ -2934,12 +2927,10 @@ test_perf_set_crypto_op_aes_gcm(struct rte_crypto_op *op, struct rte_mbuf *m, op->sym->auth.aad.length = AES_GCM_AAD_LENGTH; /* Cipher Parameters */ - op->sym->cipher.iv.data = rte_crypto_op_ctod_offset(op, - uint8_t *, IV_OFFSET); - op->sym->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(op, - IV_OFFSET); + op->sym->cipher.iv.offset = IV_OFFSET; op->sym->cipher.iv.length = AES_CIPHER_IV_LENGTH; - rte_memcpy(op->sym->cipher.iv.data, aes_iv, AES_CIPHER_IV_LENGTH); + rte_memcpy(rte_crypto_op_ctod_offset(op, uint8_t *, IV_OFFSET), + aes_iv, AES_CIPHER_IV_LENGTH); /* Data lengths/offsets Parameters */ op->sym->auth.data.offset = 0; @@ -2980,9 +2971,7 @@ test_perf_set_crypto_op_snow3g(struct rte_crypto_op *op, struct rte_mbuf *m, op->sym->auth.aad.length = SNOW3G_CIPHER_IV_LENGTH; /* Cipher Parameters */ - op->sym->cipher.iv.data = iv_ptr; - op->sym->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(op, - IV_OFFSET); + op->sym->cipher.iv.offset = IV_OFFSET; op->sym->cipher.iv.length = SNOW3G_CIPHER_IV_LENGTH; /* Data lengths/offsets Parameters */ @@ -3009,12 +2998,10 @@ test_perf_set_crypto_op_snow3g_cipher(struct rte_crypto_op *op, } /* Cipher Parameters */ - op->sym->cipher.iv.data = rte_crypto_op_ctod_offset(op, - uint8_t *, IV_OFFSET); - op->sym->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(op, - IV_OFFSET); + op->sym->cipher.iv.offset = IV_OFFSET; op->sym->cipher.iv.length = SNOW3G_CIPHER_IV_LENGTH; - rte_memcpy(op->sym->cipher.iv.data, snow3g_iv, SNOW3G_CIPHER_IV_LENGTH); + rte_memcpy(rte_crypto_op_ctod_offset(op, uint8_t *, IV_OFFSET), + snow3g_iv, SNOW3G_CIPHER_IV_LENGTH); op->sym->cipher.data.offset = 0; op->sym->cipher.data.length = data_len << 3; @@ -3082,13 +3069,10 @@ test_perf_set_crypto_op_3des(struct rte_crypto_op *op, struct rte_mbuf *m, op->sym->auth.digest.length = digest_len; /* Cipher Parameters */ - op->sym->cipher.iv.data = rte_crypto_op_ctod_offset(op, - uint8_t *, IV_OFFSET); - op->sym->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(op, - IV_OFFSET); + op->sym->cipher.iv.offset = IV_OFFSET; op->sym->cipher.iv.length = TRIPLE_DES_CIPHER_IV_LENGTH; - rte_memcpy(op->sym->cipher.iv.data, triple_des_iv, - TRIPLE_DES_CIPHER_IV_LENGTH); + rte_memcpy(rte_crypto_op_ctod_offset(op, uint8_t *, IV_OFFSET), + triple_des_iv, TRIPLE_DES_CIPHER_IV_LENGTH); /* Data lengths/offsets Parameters */ op->sym->auth.data.offset = 0; @@ -4183,6 +4167,9 @@ perf_gcm_set_crypto_op(struct rte_crypto_op *op, struct rte_mbuf *m, struct crypto_params *m_hlp, struct perf_test_params *params) { + uint8_t *iv_ptr = rte_crypto_op_ctod_offset(op, + uint8_t *, IV_OFFSET); + if (rte_crypto_op_attach_sym_session(op, sess) != 0) { rte_crypto_op_free(op); return NULL; @@ -4203,14 +4190,11 @@ perf_gcm_set_crypto_op(struct rte_crypto_op *op, struct rte_mbuf *m, rte_memcpy(op->sym->auth.aad.data, params->symmetric_op->aad_data, params->symmetric_op->aad_len); - op->sym->cipher.iv.data = rte_crypto_op_ctod_offset(op, - uint8_t *, IV_OFFSET); - op->sym->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(op, - IV_OFFSET); - rte_memcpy(op->sym->cipher.iv.data, params->symmetric_op->iv_data, + op->sym->cipher.iv.offset = IV_OFFSET; + rte_memcpy(iv_ptr, params->symmetric_op->iv_data, params->symmetric_op->iv_len); if (params->symmetric_op->iv_len == 12) - op->sym->cipher.iv.data[15] = 1; + iv_ptr[15] = 1; op->sym->cipher.iv.length = params->symmetric_op->iv_len; -- 2.20.1