sym_op->m_dst = bufs_out[i];
/* cipher parameters */
- sym_op->cipher.iv.offset = iv_offset;
- sym_op->cipher.iv.length = test_vector->iv.length;
-
if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
sym_op->m_dst = bufs_out[i];
/* cipher parameters */
- sym_op->cipher.iv.offset = iv_offset;
- sym_op->cipher.iv.length = test_vector->iv.length;
-
if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
sym_op->m_dst = bufs_out[i];
/* cipher parameters */
- sym_op->cipher.iv.offset = iv_offset;
- sym_op->cipher.iv.length = test_vector->iv.length;
-
sym_op->cipher.data.length = options->test_buffer_size;
sym_op->cipher.data.offset =
RTE_ALIGN_CEIL(options->auth_aad_sz, 16);
static struct rte_cryptodev_sym_session *
cperf_create_session(uint8_t dev_id,
const struct cperf_options *options,
- const struct cperf_test_vector *test_vector)
+ const struct cperf_test_vector *test_vector,
+ uint16_t iv_offset)
{
struct rte_crypto_sym_xform cipher_xform;
struct rte_crypto_sym_xform auth_xform;
cipher_xform.next = NULL;
cipher_xform.cipher.algo = options->cipher_algo;
cipher_xform.cipher.op = options->cipher_op;
+ cipher_xform.cipher.iv.offset = iv_offset;
/* cipher different than null */
if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
test_vector->cipher_key.data;
cipher_xform.cipher.key.length =
test_vector->cipher_key.length;
+ cipher_xform.cipher.iv.length = test_vector->iv.length;
+
} else {
cipher_xform.cipher.key.data = NULL;
cipher_xform.cipher.key.length = 0;
+ cipher_xform.cipher.iv.length = 0;
}
/* create crypto session */
sess = rte_cryptodev_sym_session_create(dev_id, &cipher_xform);
cipher_xform.next = NULL;
cipher_xform.cipher.algo = options->cipher_algo;
cipher_xform.cipher.op = options->cipher_op;
+ cipher_xform.cipher.iv.offset = iv_offset;
/* cipher different than null */
if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
test_vector->cipher_key.data;
cipher_xform.cipher.key.length =
test_vector->cipher_key.length;
+ cipher_xform.cipher.iv.length = test_vector->iv.length;
} else {
cipher_xform.cipher.key.data = NULL;
cipher_xform.cipher.key.length = 0;
+ cipher_xform.cipher.iv.length = 0;
}
/*
typedef struct rte_cryptodev_sym_session *(*cperf_sessions_create_t)(
uint8_t dev_id, const struct cperf_options *options,
- const struct cperf_test_vector *test_vector);
+ const struct cperf_test_vector *test_vector,
+ uint16_t iv_offset);
typedef int (*cperf_populate_ops_t)(struct rte_crypto_op **ops,
struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
ctx->options = options;
ctx->test_vector = test_vector;
- ctx->sess = op_fns->sess_create(dev_id, options, test_vector);
+ /* IV goes at the end of the crypto operation */
+ uint16_t iv_offset = sizeof(struct rte_crypto_op) +
+ sizeof(struct rte_crypto_sym_op) +
+ sizeof(struct cperf_op_result *);
+
+ ctx->sess = op_fns->sess_create(dev_id, options, test_vector, iv_offset);
if (ctx->sess == NULL)
goto err;
ctx->options = options;
ctx->test_vector = test_vector;
- ctx->sess = op_fns->sess_create(dev_id, options, test_vector);
+ /* IV goes at the end of the cryptop operation */
+ uint16_t iv_offset = sizeof(struct rte_crypto_op) +
+ sizeof(struct rte_crypto_sym_op);
+
+ ctx->sess = op_fns->sess_create(dev_id, options, test_vector, iv_offset);
if (ctx->sess == NULL)
goto err;
+/*-
+ * BSD LICENSE
+ *
+ * Copyright(c) 2016-2017 Intel Corporation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in
+ * the documentation and/or other materials provided with the
+ * distribution.
+ * * Neither the name of Intel Corporation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
#include <rte_crypto.h>
#include <rte_malloc.h>
memcpy(t_vec->iv.data, iv, options->cipher_iv_sz);
}
t_vec->ciphertext.length = options->max_buffer_size;
+ /* Set IV parameters */
+ t_vec->iv.data = rte_malloc(NULL, options->cipher_iv_sz,
+ 16);
+ if (options->cipher_iv_sz && t_vec->iv.data == NULL) {
+ rte_free(t_vec);
+ return NULL;
+ }
+ memcpy(t_vec->iv.data, iv, options->cipher_iv_sz);
t_vec->iv.length = options->cipher_iv_sz;
+
t_vec->data.cipher_offset = 0;
t_vec->data.cipher_length = options->max_buffer_size;
}
ctx->options = options;
ctx->test_vector = test_vector;
- ctx->sess = op_fns->sess_create(dev_id, options, test_vector);
+ /* IV goes at the end of the cryptop operation */
+ uint16_t iv_offset = sizeof(struct rte_crypto_op) +
+ sizeof(struct rte_crypto_sym_op);
+
+ ctx->sess = op_fns->sess_create(dev_id, options, test_vector, iv_offset);
if (ctx->sess == NULL)
goto err;
uint32_t offset;
uint32_t length;
} data; /**< Data offsets and length for ciphering */
-
- struct {
- uint16_t offset;
- uint16_t length;
- } iv; /**< Initialisation vector parameters */
} cipher;
struct {
* Removed field ``rte_crypto_sym_op_sess_type``.
* Replaced pointer and physical address of IV with offset from the start
of the crypto operation.
+ * Moved length and offset of cipher IV to ``rte_crypto_cipher_xform``.
* **Reorganized the crypto operation structure.**
Some fields have been modified in the ``rte_crypto_op`` and ``rte_crypto_sym_op``
structures, as described in the `New Features`_ section.
+* **Reorganized the ``rte_crypto_sym_cipher_xform`` structure.**
+
+ * Added cipher IV length and offset parameters.
+
Shared Library Versions
-----------------------
return -EINVAL;
}
+ /* Set IV parameters */
+ sess->iv.offset = cipher_xform->cipher.iv.offset;
+ sess->iv.length = cipher_xform->cipher.iv.length;
+
+ /* IV check */
+ if (sess->iv.length != 16 && sess->iv.length != 12 &&
+ sess->iv.length != 0) {
+ GCM_LOG_ERR("Wrong IV length");
+ return -EINVAL;
+ }
+
/* Select Crypto operation */
if (cipher_xform->cipher.op == RTE_CRYPTO_CIPHER_OP_ENCRYPT &&
auth_xform->auth.op == RTE_CRYPTO_AUTH_OP_GENERATE)
src = rte_pktmbuf_mtod_offset(m_src, uint8_t *, offset);
- /* sanity checks */
- if (sym_op->cipher.iv.length != 16 && sym_op->cipher.iv.length != 12 &&
- sym_op->cipher.iv.length != 0) {
- GCM_LOG_ERR("iv");
- return -1;
- }
-
iv_ptr = rte_crypto_op_ctod_offset(op, uint8_t *,
- sym_op->cipher.iv.offset);
+ session->iv.offset);
/*
* GCM working in 12B IV mode => 16B pre-counter block we need
* to set BE LSB to 1, driver expects that 16B is allocated
*/
- if (sym_op->cipher.iv.length == 12) {
+ if (session->iv.length == 12) {
uint32_t *iv_padd = (uint32_t *)&(iv_ptr[12]);
*iv_padd = rte_bswap32(1);
}
/** AESNI GCM private session structure */
struct aesni_gcm_session {
+ struct {
+ uint16_t length;
+ uint16_t offset;
+ } iv;
+ /**< IV parameters */
enum aesni_gcm_operation op;
/**< GCM operation type */
enum aesni_gcm_key key;
return -1;
}
+ /* Set IV parameters */
+ sess->iv.offset = xform->cipher.iv.offset;
+ sess->iv.length = xform->cipher.iv.length;
+
/* Expanded cipher keys */
(*aes_keyexp_fn)(xform->cipher.key.data,
sess->cipher.expanded_aes_keys.encode,
return -1;
}
+ /* Default IV length = 0 */
+ sess->iv.length = 0;
+
if (aesni_mb_set_session_auth_parameters(mb_ops, sess, auth_xform)) {
MB_LOG_ERR("Invalid/unsupported authentication parameters");
return -1;
/* Set IV parameters */
job->iv = rte_crypto_op_ctod_offset(op, uint8_t *,
- op->sym->cipher.iv.offset);
- job->iv_len_in_bytes = op->sym->cipher.iv.length;
+ session->iv.offset);
+ job->iv_len_in_bytes = session->iv.length;
/* Data Parameter */
job->src = rte_pktmbuf_mtod(m_src, uint8_t *);
/** AES-NI multi-buffer private session structure */
struct aesni_mb_session {
JOB_CHAIN_ORDER chain_order;
+ struct {
+ uint16_t length;
+ uint16_t offset;
+ } iv;
+ /**< IV parameters */
/** Cipher Parameters */
struct {
case RTE_CRYPTO_CIPHER_AES_CBC:
sess->cipher.algo = calg;
/* IV len is always 16 bytes (block size) for AES CBC */
- sess->cipher.iv_len = 16;
+ sess->cipher.iv.length = 16;
break;
default:
return -EINVAL;
return -EINVAL;
}
+ /* Set IV offset */
+ sess->cipher.iv.offset = cipher_xform->cipher.iv.offset;
+
if (is_chained_op) {
ret = armv8_crypto_set_session_chained_parameters(sess,
cipher_xform, auth_xform);
op->sym->auth.digest.length);
}
- if (unlikely(op->sym->cipher.iv.length != sess->cipher.iv_len)) {
- op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
- return;
- }
-
arg.cipher.iv = rte_crypto_op_ctod_offset(op, uint8_t *,
- op->sym->cipher.iv.offset);
+ sess->cipher.iv.offset);
arg.cipher.key = sess->cipher.key.data;
/* Acquire combined mode function */
crypto_func = sess->crypto_func;
/**< cipher operation direction */
enum rte_crypto_cipher_algorithm algo;
/**< cipher algorithm */
- int iv_len;
- /**< IV length */
+ struct {
+ uint16_t length;
+ uint16_t offset;
+ } iv;
+ /**< IV parameters */
struct {
uint8_t data[256];
uint8_t *old_icv;
uint32_t mem_len = (7 * sizeof(struct qbman_fle)) + icv_len;
uint8_t *iv_ptr = rte_crypto_op_ctod_offset(op, uint8_t *,
- op->sym->cipher.iv.offset);
+ sess->iv.offset);
PMD_INIT_FUNC_TRACE();
sym_op->auth.digest.length,
sym_op->cipher.data.offset,
sym_op->cipher.data.length,
- sym_op->cipher.iv.length,
+ sess->iv.length,
sym_op->m_src->data_off);
/* Configure Output FLE with Scatter/Gather Entry */
DPAA2_VADDR_TO_IOVA(sym_op->auth.digest.data));
sge->length = sym_op->auth.digest.length;
DPAA2_SET_FD_LEN(fd, (sym_op->auth.data.length +
- sym_op->cipher.iv.length));
+ sess->iv.length));
}
DPAA2_SET_FLE_FIN(sge);
DPAA2_SET_FLE_SG_EXT(fle);
DPAA2_SET_FLE_FIN(fle);
fle->length = (sess->dir == DIR_ENC) ?
- (sym_op->auth.data.length + sym_op->cipher.iv.length) :
- (sym_op->auth.data.length + sym_op->cipher.iv.length +
+ (sym_op->auth.data.length + sess->iv.length) :
+ (sym_op->auth.data.length + sess->iv.length +
sym_op->auth.digest.length);
/* Configure Input SGE for Encap/Decap */
DPAA2_SET_FLE_ADDR(sge, DPAA2_VADDR_TO_IOVA(iv_ptr));
- sge->length = sym_op->cipher.iv.length;
+ sge->length = sess->iv.length;
sge++;
DPAA2_SET_FLE_ADDR(sge, DPAA2_MBUF_VADDR_TO_IOVA(sym_op->m_src));
sge->length = sym_op->auth.digest.length;
DPAA2_SET_FD_LEN(fd, (sym_op->auth.data.length +
sym_op->auth.digest.length +
- sym_op->cipher.iv.length));
+ sess->iv.length));
}
DPAA2_SET_FLE_FIN(sge);
if (auth_only_len) {
struct sec_flow_context *flc;
struct ctxt_priv *priv = sess->ctxt;
uint8_t *iv_ptr = rte_crypto_op_ctod_offset(op, uint8_t *,
- op->sym->cipher.iv.offset);
+ sess->iv.offset);
PMD_INIT_FUNC_TRACE();
flc = &priv->flc_desc[0].flc;
DPAA2_SET_FD_ADDR(fd, DPAA2_VADDR_TO_IOVA(fle));
DPAA2_SET_FD_LEN(fd, sym_op->cipher.data.length +
- sym_op->cipher.iv.length);
+ sess->iv.length);
DPAA2_SET_FD_COMPOUND_FMT(fd);
DPAA2_SET_FD_FLC(fd, DPAA2_VADDR_TO_IOVA(flc));
PMD_TX_LOG(DEBUG, "cipher_off: 0x%x/length %d,ivlen=%d data_off: 0x%x",
sym_op->cipher.data.offset,
sym_op->cipher.data.length,
- sym_op->cipher.iv.length,
+ sess->iv.length,
sym_op->m_src->data_off);
DPAA2_SET_FLE_ADDR(fle, DPAA2_MBUF_VADDR_TO_IOVA(sym_op->m_src));
DPAA2_SET_FLE_OFFSET(fle, sym_op->cipher.data.offset +
sym_op->m_src->data_off);
- fle->length = sym_op->cipher.data.length + sym_op->cipher.iv.length;
+ fle->length = sym_op->cipher.data.length + sess->iv.length;
PMD_TX_LOG(DEBUG, "1 - flc = %p, fle = %p FLEaddr = %x-%x, length %d",
flc, fle, fle->addr_hi, fle->addr_lo, fle->length);
fle++;
DPAA2_SET_FLE_ADDR(fle, DPAA2_VADDR_TO_IOVA(sge));
- fle->length = sym_op->cipher.data.length + sym_op->cipher.iv.length;
+ fle->length = sym_op->cipher.data.length + sess->iv.length;
DPAA2_SET_FLE_SG_EXT(fle);
DPAA2_SET_FLE_ADDR(sge, DPAA2_VADDR_TO_IOVA(iv_ptr));
- sge->length = sym_op->cipher.iv.length;
+ sge->length = sess->iv.length;
sge++;
DPAA2_SET_FLE_ADDR(sge, DPAA2_MBUF_VADDR_TO_IOVA(sym_op->m_src));
cipherdata.key_enc_flags = 0;
cipherdata.key_type = RTA_DATA_IMM;
+ /* Set IV parameters */
+ session->iv.offset = xform->cipher.iv.offset;
+ session->iv.length = xform->cipher.iv.length;
+
switch (xform->cipher.algo) {
case RTE_CRYPTO_CIPHER_AES_CBC:
cipherdata.algtype = OP_ALG_ALGSEL_AES;
(cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT) ?
DPAA2_SEC_HASH_CIPHER : DPAA2_SEC_CIPHER_HASH;
}
+
+ /* Set IV parameters */
+ session->iv.offset = cipher_xform->iv.offset;
+ session->iv.length = cipher_xform->iv.length;
+
/* For SEC AEAD only one descriptor is required */
priv = (struct ctxt_priv *)rte_zmalloc(NULL,
sizeof(struct ctxt_priv) + sizeof(struct sec_flc_desc),
RTE_LOG(ERR, PMD, "invalid session struct");
return NULL;
}
+
+ /* Default IV length = 0 */
+ session->iv.length = 0;
+
/* Cipher Only */
if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER && xform->next == NULL) {
session->ctxt_type = DPAA2_SEC_CIPHER;
uint8_t *data; /**< pointer to key data */
size_t length; /**< key length in bytes */
} auth_key;
+ struct {
+ uint16_t length; /**< IV length in bytes */
+ uint16_t offset; /**< IV offset in bytes */
+ } iv;
uint8_t status;
union {
struct dpaa2_sec_cipher_ctxt cipher_ctxt;
.min = 32,
.max = 32,
.increment = 0
- },
- .aad_size = { 0 }
+ },
+ .aad_size = { 0 }
}, }
}, }
},
/* Only KASUMI F8 supported */
if (cipher_xform->cipher.algo != RTE_CRYPTO_CIPHER_KASUMI_F8)
return -EINVAL;
+
+ sess->iv_offset = cipher_xform->cipher.iv.offset;
+ if (cipher_xform->cipher.iv.length != KASUMI_IV_LENGTH) {
+ KASUMI_LOG_ERR("Wrong IV length");
+ return -EINVAL;
+ }
+
/* Initialize key */
sso_kasumi_init_f8_key_sched(cipher_xform->cipher.key.data,
&sess->pKeySched_cipher);
uint32_t num_bytes[num_ops];
for (i = 0; i < num_ops; i++) {
- /* Sanity checks. */
- if (ops[i]->sym->cipher.iv.length != KASUMI_IV_LENGTH) {
- ops[i]->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
- KASUMI_LOG_ERR("iv");
- break;
- }
-
src[i] = rte_pktmbuf_mtod(ops[i]->sym->m_src, uint8_t *) +
(ops[i]->sym->cipher.data.offset >> 3);
dst[i] = ops[i]->sym->m_dst ?
rte_pktmbuf_mtod(ops[i]->sym->m_src, uint8_t *) +
(ops[i]->sym->cipher.data.offset >> 3);
iv_ptr = rte_crypto_op_ctod_offset(ops[i], uint8_t *,
- ops[i]->sym->cipher.iv.offset);
+ session->iv_offset);
iv[i] = *((uint64_t *)(iv_ptr));
num_bytes[i] = ops[i]->sym->cipher.data.length >> 3;
uint64_t iv;
uint32_t length_in_bits, offset_in_bits;
- /* Sanity checks. */
- if (unlikely(op->sym->cipher.iv.length != KASUMI_IV_LENGTH)) {
- op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
- KASUMI_LOG_ERR("iv");
- return 0;
- }
-
offset_in_bits = op->sym->cipher.data.offset;
src = rte_pktmbuf_mtod(op->sym->m_src, uint8_t *);
if (op->sym->m_dst == NULL) {
}
dst = rte_pktmbuf_mtod(op->sym->m_dst, uint8_t *);
iv_ptr = rte_crypto_op_ctod_offset(op, uint8_t *,
- op->sym->cipher.iv.offset);
+ session->iv_offset);
iv = *((uint64_t *)(iv_ptr));
length_in_bits = op->sym->cipher.data.length;
sso_kasumi_key_sched_t pKeySched_hash;
enum kasumi_operation op;
enum rte_crypto_auth_operation auth_op;
+ uint16_t iv_offset;
} __rte_cache_aligned;
.max = 0,
.increment = 0
},
- .iv_size = {
- .min = 0,
- .max = 0,
- .increment = 0
- }
+ .iv_size = { 0 }
}, },
}, }
},
/* Select cipher key */
sess->cipher.key.length = xform->cipher.key.length;
+ /* Set IV parameters */
+ sess->iv.offset = xform->cipher.iv.offset;
+ sess->iv.length = xform->cipher.iv.length;
+
/* Select cipher algo */
switch (xform->cipher.algo) {
case RTE_CRYPTO_CIPHER_3DES_CBC:
return -EINVAL;
}
+ /* Default IV length = 0 */
+ sess->iv.length = 0;
+
/* cipher_xform must be check before auth_xform */
if (cipher_xform) {
if (openssl_set_session_cipher_parameters(
}
iv = rte_crypto_op_ctod_offset(op, uint8_t *,
- op->sym->cipher.iv.offset);
- ivlen = op->sym->cipher.iv.length;
+ sess->iv.offset);
+ ivlen = sess->iv.length;
aad = op->sym->auth.aad.data;
aadlen = op->sym->auth.aad.length;
op->sym->cipher.data.offset);
iv = rte_crypto_op_ctod_offset(op, uint8_t *,
- op->sym->cipher.iv.offset);
+ sess->iv.offset);
if (sess->cipher.mode == OPENSSL_CIPHER_LIB)
if (sess->cipher.direction == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
op->sym->cipher.data.offset);
iv = rte_crypto_op_ctod_offset(op, uint8_t *,
- op->sym->cipher.iv.offset);
+ sess->iv.offset);
block_size = DES_BLOCK_SIZE;
last_block_len, sess->cipher.bpi_ctx);
/* Prepare parameters for CBC mode op */
iv = rte_crypto_op_ctod_offset(op, uint8_t *,
- op->sym->cipher.iv.offset);
+ sess->iv.offset);
dst += last_block_len - srclen;
srclen -= last_block_len;
}
enum openssl_chain_order chain_order;
/**< chain order mode */
+ struct {
+ uint16_t length;
+ uint16_t offset;
+ } iv;
+ /**< IV parameters */
/** Cipher Parameters */
struct {
enum rte_crypto_cipher_operation direction;
struct icp_qat_fw_la_bulk_req fw_req;
uint8_t aad_len;
struct qat_crypto_instance *inst;
+ struct {
+ uint16_t offset;
+ uint16_t length;
+ } iv;
rte_spinlock_t lock; /* protects this struct */
};
/* Get cipher xform from crypto xform chain */
cipher_xform = qat_get_cipher_xform(xform);
+ session->iv.offset = cipher_xform->iv.offset;
+ session->iv.length = cipher_xform->iv.length;
+
switch (cipher_xform->algo) {
case RTE_CRYPTO_CIPHER_AES_CBC:
if (qat_alg_validate_aes_key(cipher_xform->key.length,
else
/* runt block, i.e. less than one full block */
iv = rte_crypto_op_ctod_offset(op, uint8_t *,
- sym_op->cipher.iv.offset);
+ ctx->iv.offset);
#ifdef RTE_LIBRTE_PMD_QAT_DEBUG_TX
rte_hexdump(stdout, "BPI: src before pre-process:", last_block,
else
/* runt block, i.e. less than one full block */
iv = rte_crypto_op_ctod_offset(op, uint8_t *,
- sym_op->cipher.iv.offset);
+ ctx->iv.offset);
#ifdef RTE_LIBRTE_PMD_QAT_DEBUG_RX
rte_hexdump(stdout, "BPI: src before post-process:", last_block,
}
iv_ptr = rte_crypto_op_ctod_offset(op, uint8_t *,
- op->sym->cipher.iv.offset);
+ ctx->iv.offset);
/* copy IV into request if it fits */
- /*
- * If IV length is zero do not copy anything but still
- * use request descriptor embedded IV
- *
- */
- if (op->sym->cipher.iv.length) {
- if (op->sym->cipher.iv.length <=
- sizeof(cipher_param->u.cipher_IV_array)) {
- rte_memcpy(cipher_param->u.cipher_IV_array,
- iv_ptr,
- op->sym->cipher.iv.length);
- } else {
- ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(
- qat_req->comn_hdr.serv_specif_flags,
- ICP_QAT_FW_CIPH_IV_64BIT_PTR);
- cipher_param->u.s.cipher_IV_ptr =
- rte_crypto_op_ctophys_offset(op,
- op->sym->cipher.iv.offset);
- }
+ if (ctx->iv.length <=
+ sizeof(cipher_param->u.cipher_IV_array)) {
+ rte_memcpy(cipher_param->u.cipher_IV_array,
+ iv_ptr,
+ ctx->iv.length);
+ } else {
+ ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(
+ qat_req->comn_hdr.serv_specif_flags,
+ ICP_QAT_FW_CIPH_IV_64BIT_PTR);
+ cipher_param->u.s.cipher_IV_ptr =
+ rte_crypto_op_ctophys_offset(op,
+ ctx->iv.offset);
}
min_ofs = cipher_ofs;
}
if (ctx->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_128 ||
ctx->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_64) {
- if (op->sym->cipher.iv.length == 12) {
+ if (ctx->iv.length == 12) {
/*
* For GCM a 12 byte IV is allowed,
* but we need to inform the f/w
rte_pktmbuf_data_len(op->sym->m_src));
if (do_cipher)
rte_hexdump(stdout, "iv:", iv_ptr,
- op->sym->cipher.iv.length);
+ ctx->iv.length);
if (do_auth) {
rte_hexdump(stdout, "digest:", op->sym->auth.digest.data,
/* Only SNOW 3G UEA2 supported */
if (cipher_xform->cipher.algo != RTE_CRYPTO_CIPHER_SNOW3G_UEA2)
return -EINVAL;
+
+ if (cipher_xform->cipher.iv.length != SNOW3G_IV_LENGTH) {
+ SNOW3G_LOG_ERR("Wrong IV length");
+ return -EINVAL;
+ }
+ sess->iv_offset = cipher_xform->cipher.iv.offset;
+
/* Initialize key */
sso_snow3g_init_key_sched(cipher_xform->cipher.key.data,
&sess->pKeySched_cipher);
uint32_t num_bytes[SNOW3G_MAX_BURST];
for (i = 0; i < num_ops; i++) {
- /* Sanity checks. */
- if (unlikely(ops[i]->sym->cipher.iv.length != SNOW3G_IV_LENGTH)) {
- ops[i]->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
- SNOW3G_LOG_ERR("iv");
- break;
- }
-
src[i] = rte_pktmbuf_mtod(ops[i]->sym->m_src, uint8_t *) +
(ops[i]->sym->cipher.data.offset >> 3);
dst[i] = ops[i]->sym->m_dst ?
rte_pktmbuf_mtod(ops[i]->sym->m_src, uint8_t *) +
(ops[i]->sym->cipher.data.offset >> 3);
iv[i] = rte_crypto_op_ctod_offset(ops[i], uint8_t *,
- ops[i]->sym->cipher.iv.offset);
+ session->iv_offset);
num_bytes[i] = ops[i]->sym->cipher.data.length >> 3;
processed_ops++;
uint8_t *iv;
uint32_t length_in_bits, offset_in_bits;
- /* Sanity checks. */
- if (unlikely(op->sym->cipher.iv.length != SNOW3G_IV_LENGTH)) {
- op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
- SNOW3G_LOG_ERR("iv");
- return 0;
- }
-
offset_in_bits = op->sym->cipher.data.offset;
src = rte_pktmbuf_mtod(op->sym->m_src, uint8_t *);
if (op->sym->m_dst == NULL) {
}
dst = rte_pktmbuf_mtod(op->sym->m_dst, uint8_t *);
iv = rte_crypto_op_ctod_offset(op, uint8_t *,
- op->sym->cipher.iv.offset);
+ session->iv_offset);
length_in_bits = op->sym->cipher.data.length;
sso_snow3g_f8_1_buffer_bit(&session->pKeySched_cipher, iv,
enum rte_crypto_auth_operation auth_op;
sso_snow3g_key_schedule_t pKeySched_cipher;
sso_snow3g_key_schedule_t pKeySched_hash;
+ uint16_t iv_offset;
} __rte_cache_aligned;
/* Only ZUC EEA3 supported */
if (cipher_xform->cipher.algo != RTE_CRYPTO_CIPHER_ZUC_EEA3)
return -EINVAL;
+
+ if (cipher_xform->cipher.iv.length != ZUC_IV_KEY_LENGTH) {
+ ZUC_LOG_ERR("Wrong IV length");
+ return -EINVAL;
+ }
+ sess->iv_offset = cipher_xform->cipher.iv.offset;
+
/* Copy the key */
memcpy(sess->pKey_cipher, cipher_xform->cipher.key.data,
ZUC_IV_KEY_LENGTH);
uint8_t *cipher_keys[ZUC_MAX_BURST];
for (i = 0; i < num_ops; i++) {
- /* Sanity checks. */
- if (unlikely(ops[i]->sym->cipher.iv.length != ZUC_IV_KEY_LENGTH)) {
- ops[i]->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
- ZUC_LOG_ERR("iv");
- break;
- }
-
if (((ops[i]->sym->cipher.data.length % BYTE_LEN) != 0)
|| ((ops[i]->sym->cipher.data.offset
% BYTE_LEN) != 0)) {
rte_pktmbuf_mtod(ops[i]->sym->m_src, uint8_t *) +
(ops[i]->sym->cipher.data.offset >> 3);
iv[i] = rte_crypto_op_ctod_offset(ops[i], uint8_t *,
- ops[i]->sym->cipher.iv.offset);
+ session->iv_offset);
num_bytes[i] = ops[i]->sym->cipher.data.length >> 3;
cipher_keys[i] = session->pKey_cipher;
.min = 16,
.max = 16,
.increment = 0
- }
+ },
}, }
}, }
},
enum rte_crypto_auth_operation auth_op;
uint8_t pKey_cipher[ZUC_IV_KEY_LENGTH];
uint8_t pKey_hash[ZUC_IV_KEY_LENGTH];
+ uint16_t iv_offset;
} __rte_cache_aligned;
#include "esp.h"
#include "ipip.h"
-#define IV_OFFSET (sizeof(struct rte_crypto_op) + \
- sizeof(struct rte_crypto_sym_op))
-
int
esp_inbound(struct rte_mbuf *m, struct ipsec_sa *sa,
struct rte_crypto_op *cop)
case RTE_CRYPTO_CIPHER_AES_CBC:
/* Copy IV at the end of crypto operation */
rte_memcpy(iv_ptr, iv, sa->iv_len);
- sym_cop->cipher.iv.offset = IV_OFFSET;
- sym_cop->cipher.iv.length = sa->iv_len;
break;
case RTE_CRYPTO_CIPHER_AES_CTR:
case RTE_CRYPTO_CIPHER_AES_GCM:
icb->salt = sa->salt;
memcpy(&icb->iv, iv, 8);
icb->cnt = rte_cpu_to_be_32(1);
- sym_cop->cipher.iv.offset = IV_OFFSET;
- sym_cop->cipher.iv.length = 16;
break;
default:
RTE_LOG(ERR, IPSEC_ESP, "unsupported cipher algorithm %u\n",
icb->salt = sa->salt;
icb->iv = sa->seq;
icb->cnt = rte_cpu_to_be_32(1);
- sym_cop->cipher.iv.offset = IV_OFFSET;
- sym_cop->cipher.iv.length = 16;
uint8_t *aad;
#define MAX_DIGEST_SIZE 32 /* Bytes -- 256 bits */
+#define IV_OFFSET (sizeof(struct rte_crypto_op) + \
+ sizeof(struct rte_crypto_sym_op))
+
#define uint32_t_to_char(ip, a, b, c, d) do {\
*a = (uint8_t)(ip >> 24 & 0xff);\
*b = (uint8_t)(ip >> 16 & 0xff);\
{
struct ipsec_sa *sa;
uint32_t i, idx;
+ uint16_t iv_length;
for (i = 0; i < nb_entries; i++) {
idx = SPI2IDX(entries[i].spi);
sa->dst.ip.ip4 = rte_cpu_to_be_32(sa->dst.ip.ip4);
}
+ switch (sa->cipher_algo) {
+ case RTE_CRYPTO_CIPHER_NULL:
+ case RTE_CRYPTO_CIPHER_AES_CBC:
+ iv_length = sa->iv_len;
+ break;
+ case RTE_CRYPTO_CIPHER_AES_CTR:
+ case RTE_CRYPTO_CIPHER_AES_GCM:
+ iv_length = 16;
+ break;
+ default:
+ RTE_LOG(ERR, IPSEC_ESP, "unsupported cipher algorithm %u\n",
+ sa->cipher_algo);
+ return -EINVAL;
+ }
+
if (inbound) {
sa_ctx->xf[idx].b.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
sa_ctx->xf[idx].b.cipher.algo = sa->cipher_algo;
sa->cipher_key_len;
sa_ctx->xf[idx].b.cipher.op =
RTE_CRYPTO_CIPHER_OP_DECRYPT;
+ sa_ctx->xf[idx].b.cipher.iv.offset = IV_OFFSET;
+ sa_ctx->xf[idx].b.cipher.iv.length = iv_length;
sa_ctx->xf[idx].b.next = NULL;
sa_ctx->xf[idx].a.type = RTE_CRYPTO_SYM_XFORM_AUTH;
sa->cipher_key_len;
sa_ctx->xf[idx].a.cipher.op =
RTE_CRYPTO_CIPHER_OP_ENCRYPT;
+ sa_ctx->xf[idx].a.cipher.iv.offset = IV_OFFSET;
+ sa_ctx->xf[idx].a.cipher.iv.length = iv_length;
sa_ctx->xf[idx].a.next = NULL;
sa_ctx->xf[idx].b.type = RTE_CRYPTO_SYM_XFORM_AUTH;
phys_addr_t phys_addr;
};
+struct l2fwd_iv {
+ uint8_t *data;
+ uint16_t length;
+};
+
/** l2fwd crypto application command line options */
struct l2fwd_crypto_options {
unsigned portmask;
unsigned ckey_param;
int ckey_random_size;
- struct l2fwd_key iv;
- unsigned iv_param;
+ struct l2fwd_iv iv;
+ unsigned int iv_param;
int iv_random_size;
struct rte_crypto_sym_xform auth_xform;
unsigned digest_length;
unsigned block_size;
- struct l2fwd_key iv;
+ struct l2fwd_iv iv;
struct l2fwd_key aad;
struct rte_cryptodev_sym_session *session;
/* Copy IV at the end of the crypto operation */
rte_memcpy(iv_ptr, cparams->iv.data, cparams->iv.length);
- op->sym->cipher.iv.offset = IV_OFFSET;
- op->sym->cipher.iv.length = cparams->iv.length;
-
/* For wireless algorithms, offset/length must be in bits */
if (cparams->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
cparams->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
port_cparams[i].iv.length);
port_cparams[i].cipher_algo = options->cipher_xform.cipher.algo;
+ /* Set IV parameters */
+ options->cipher_xform.cipher.iv.offset = IV_OFFSET;
+ options->cipher_xform.cipher.iv.length = options->iv.length;
}
port_cparams[i].session = initialize_crypto_session(options,
return -1;
}
+
+static int
+check_iv_param(const struct rte_crypto_param_range *iv_range_size,
+ unsigned int iv_param, int iv_random_size,
+ uint16_t *iv_length)
+{
+ /*
+ * Check if length of provided IV is supported
+ * by the algorithm chosen.
+ */
+ if (iv_param) {
+ if (check_supported_size(*iv_length,
+ iv_range_size->min,
+ iv_range_size->max,
+ iv_range_size->increment)
+ != 0) {
+ printf("Unsupported IV length\n");
+ return -1;
+ }
+ /*
+ * Check if length of IV to be randomly generated
+ * is supported by the algorithm chosen.
+ */
+ } else if (iv_random_size != -1) {
+ if (check_supported_size(iv_random_size,
+ iv_range_size->min,
+ iv_range_size->max,
+ iv_range_size->increment)
+ != 0) {
+ printf("Unsupported IV length\n");
+ return -1;
+ }
+ *iv_length = iv_random_size;
+ /* No size provided, use minimum size. */
+ } else
+ *iv_length = iv_range_size->min;
+
+ return 0;
+}
+
static int
initialize_cryptodevs(struct l2fwd_crypto_options *options, unsigned nb_ports,
uint8_t *enabled_cdevs)
}
options->block_size = cap->sym.cipher.block_size;
- /*
- * Check if length of provided IV is supported
- * by the algorithm chosen.
- */
- if (options->iv_param) {
- if (check_supported_size(options->iv.length,
- cap->sym.cipher.iv_size.min,
- cap->sym.cipher.iv_size.max,
- cap->sym.cipher.iv_size.increment)
- != 0) {
- printf("Unsupported IV length\n");
- return -1;
- }
- /*
- * Check if length of IV to be randomly generated
- * is supported by the algorithm chosen.
- */
- } else if (options->iv_random_size != -1) {
- if (check_supported_size(options->iv_random_size,
- cap->sym.cipher.iv_size.min,
- cap->sym.cipher.iv_size.max,
- cap->sym.cipher.iv_size.increment)
- != 0) {
- printf("Unsupported IV length\n");
- return -1;
- }
- options->iv.length = options->iv_random_size;
- /* No size provided, use minimum size. */
- } else
- options->iv.length = cap->sym.cipher.iv_size.min;
+
+ check_iv_param(&cap->sym.cipher.iv_size, options->iv_param,
+ options->iv_random_size, &options->iv.length);
/*
* Check if length of provided cipher key is supported
* - Each key can be either 128 bits (16 bytes) or 256 bits (32 bytes).
* - Both keys must have the same size.
**/
+ struct {
+ uint16_t offset;
+ /**< Starting point for Initialisation Vector or Counter,
+ * specified as number of bytes from start of crypto
+ * operation (rte_crypto_op).
+ *
+ * - For block ciphers in CBC or F8 mode, or for KASUMI
+ * in F8 mode, or for SNOW 3G in UEA2 mode, this is the
+ * Initialisation Vector (IV) value.
+ *
+ * - For block ciphers in CTR mode, this is the counter.
+ *
+ * - For GCM mode, this is either the IV (if the length
+ * is 96 bits) or J0 (for other sizes), where J0 is as
+ * defined by NIST SP800-38D. Regardless of the IV
+ * length, a full 16 bytes needs to be allocated.
+ *
+ * - For CCM mode, the first byte is reserved, and the
+ * nonce should be written starting at &iv[1] (to allow
+ * space for the implementation to write in the flags
+ * in the first byte). Note that a full 16 bytes should
+ * be allocated, even though the length field will
+ * have a value less than this.
+ *
+ * - For AES-XTS, this is the 128bit tweak, i, from
+ * IEEE Std 1619-2007.
+ *
+ * For optimum performance, the data pointed to SHOULD
+ * be 8-byte aligned.
+ */
+ uint16_t length;
+ /**< Length of valid IV data.
+ *
+ * - For block ciphers in CBC or F8 mode, or for KASUMI
+ * in F8 mode, or for SNOW 3G in UEA2 mode, this is the
+ * length of the IV (which must be the same as the
+ * block length of the cipher).
+ *
+ * - For block ciphers in CTR mode, this is the length
+ * of the counter (which must be the same as the block
+ * length of the cipher).
+ *
+ * - For GCM mode, this is either 12 (for 96-bit IVs)
+ * or 16, in which case data points to J0.
+ *
+ * - For CCM mode, this is the length of the nonce,
+ * which can be in the range 7 to 13 inclusive.
+ */
+ } iv; /**< Initialisation vector parameters */
};
/** Symmetric Authentication / Hash Algorithms */
*/
} data; /**< Data offsets and length for ciphering */
- struct {
- uint16_t offset;
- /**< Starting point for Initialisation Vector or Counter,
- * specified as number of bytes from start of crypto
- * operation.
- *
- * - For block ciphers in CBC or F8 mode, or for KASUMI
- * in F8 mode, or for SNOW 3G in UEA2 mode, this is the
- * Initialisation Vector (IV) value.
- *
- * - For block ciphers in CTR mode, this is the counter.
- *
- * - For GCM mode, this is either the IV (if the length
- * is 96 bits) or J0 (for other sizes), where J0 is as
- * defined by NIST SP800-38D. Regardless of the IV
- * length, a full 16 bytes needs to be allocated.
- *
- * - For CCM mode, the first byte is reserved, and the
- * nonce should be written starting at &iv[1] (to allow
- * space for the implementation to write in the flags
- * in the first byte). Note that a full 16 bytes should
- * be allocated, even though the length field will
- * have a value less than this.
- *
- * - For AES-XTS, this is the 128bit tweak, i, from
- * IEEE Std 1619-2007.
- *
- * For optimum performance, the data pointed to SHOULD
- * be 8-byte aligned.
- */
- uint16_t length;
- /**< Length of valid IV data.
- *
- * - For block ciphers in CBC or F8 mode, or for KASUMI
- * in F8 mode, or for SNOW 3G in UEA2 mode, this is the
- * length of the IV (which must be the same as the
- * block length of the cipher).
- *
- * - For block ciphers in CTR mode, this is the length
- * of the counter (which must be the same as the block
- * length of the cipher).
- *
- * - For GCM mode, this is either 12 (for 96-bit IVs)
- * or 16, in which case data points to J0.
- *
- * - For CCM mode, this is the length of the nonce,
- * which can be in the range 7 to 13 inclusive.
- */
- } iv; /**< Initialisation vector parameters */
} cipher;
struct {
ut_params->cipher_xform.cipher.op = RTE_CRYPTO_CIPHER_OP_ENCRYPT;
ut_params->cipher_xform.cipher.key.data = aes_cbc_key;
ut_params->cipher_xform.cipher.key.length = CIPHER_KEY_LENGTH_AES_CBC;
+ ut_params->cipher_xform.cipher.iv.offset = IV_OFFSET;
+ ut_params->cipher_xform.cipher.iv.length = CIPHER_IV_LENGTH_AES_CBC;
/* Setup HMAC Parameters */
ut_params->auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
sym_op->auth.data.offset = 0;
sym_op->auth.data.length = QUOTE_512_BYTES;
- /* Set crypto operation cipher parameters */
- sym_op->cipher.iv.offset = IV_OFFSET;
- sym_op->cipher.iv.length = CIPHER_IV_LENGTH_AES_CBC;
-
+ /* Copy IV at the end of the crypto operation */
rte_memcpy(rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET),
aes_cbc_iv, CIPHER_IV_LENGTH_AES_CBC);
+ /* Set crypto operation cipher parameters */
sym_op->cipher.data.offset = 0;
sym_op->cipher.data.length = QUOTE_512_BYTES;
ut_params->cipher_xform.cipher.op = RTE_CRYPTO_CIPHER_OP_DECRYPT;
ut_params->cipher_xform.cipher.key.data = cipher_key;
ut_params->cipher_xform.cipher.key.length = CIPHER_KEY_LENGTH_AES_CBC;
+ ut_params->cipher_xform.cipher.iv.offset = IV_OFFSET;
+ ut_params->cipher_xform.cipher.iv.length = CIPHER_IV_LENGTH_AES_CBC;
/* Setup HMAC Parameters */
ut_params->auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
sym_op->auth.data.offset = 0;
sym_op->auth.data.length = QUOTE_512_BYTES;
- sym_op->cipher.iv.offset = IV_OFFSET;
- sym_op->cipher.iv.length = CIPHER_IV_LENGTH_AES_CBC;
-
+ /* Copy IV at the end of the crypto operation */
rte_memcpy(rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET),
iv, CIPHER_IV_LENGTH_AES_CBC);
create_wireless_algo_cipher_session(uint8_t dev_id,
enum rte_crypto_cipher_operation op,
enum rte_crypto_cipher_algorithm algo,
- const uint8_t *key, const uint8_t key_len)
+ const uint8_t *key, const uint8_t key_len,
+ uint8_t iv_len)
{
uint8_t cipher_key[key_len];
ut_params->cipher_xform.cipher.op = op;
ut_params->cipher_xform.cipher.key.data = cipher_key;
ut_params->cipher_xform.cipher.key.length = key_len;
+ ut_params->cipher_xform.cipher.iv.offset = IV_OFFSET;
+ ut_params->cipher_xform.cipher.iv.length = iv_len;
TEST_HEXDUMP(stdout, "key:", key, key_len);
sym_op->m_src = ut_params->ibuf;
/* iv */
- sym_op->cipher.iv.offset = IV_OFFSET;
- sym_op->cipher.iv.length = iv_len;
-
rte_memcpy(rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET),
iv, iv_len);
sym_op->cipher.data.length = cipher_len;
sym_op->m_dst = ut_params->obuf;
/* iv */
- sym_op->cipher.iv.offset = IV_OFFSET;
- sym_op->cipher.iv.length = iv_len;
-
rte_memcpy(rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET),
iv, iv_len);
sym_op->cipher.data.length = cipher_len;
enum rte_crypto_auth_algorithm auth_algo,
enum rte_crypto_cipher_algorithm cipher_algo,
const uint8_t *key, const uint8_t key_len,
- const uint8_t aad_len, const uint8_t auth_len)
+ const uint8_t aad_len, const uint8_t auth_len,
+ uint8_t iv_len)
{
uint8_t cipher_auth_key[key_len];
ut_params->cipher_xform.cipher.op = cipher_op;
ut_params->cipher_xform.cipher.key.data = cipher_auth_key;
ut_params->cipher_xform.cipher.key.length = key_len;
+ ut_params->cipher_xform.cipher.iv.offset = IV_OFFSET;
+ ut_params->cipher_xform.cipher.iv.length = iv_len;
TEST_HEXDUMP(stdout, "key:", key, key_len);
const uint8_t *key = tdata->key.data;
const uint8_t aad_len = tdata->aad.len;
const uint8_t auth_len = tdata->digest.len;
+ uint8_t iv_len = tdata->iv.len;
memcpy(cipher_auth_key, key, key_len);
ut_params->cipher_xform.cipher.op = cipher_op;
ut_params->cipher_xform.cipher.key.data = cipher_auth_key;
ut_params->cipher_xform.cipher.key.length = key_len;
+ ut_params->cipher_xform.cipher.iv.offset = IV_OFFSET;
+ ut_params->cipher_xform.cipher.iv.length = iv_len;
+
TEST_HEXDUMP(stdout, "key:", key, key_len);
enum rte_crypto_auth_algorithm auth_algo,
enum rte_crypto_cipher_algorithm cipher_algo,
const uint8_t *key, const uint8_t key_len,
- const uint8_t aad_len, const uint8_t auth_len)
+ const uint8_t aad_len, const uint8_t auth_len,
+ uint8_t iv_len)
{
uint8_t auth_cipher_key[key_len];
ut_params->cipher_xform.cipher.op = cipher_op;
ut_params->cipher_xform.cipher.key.data = auth_cipher_key;
ut_params->cipher_xform.cipher.key.length = key_len;
+ ut_params->cipher_xform.cipher.iv.offset = IV_OFFSET;
+ ut_params->cipher_xform.cipher.iv.length = iv_len;
TEST_HEXDUMP(stdout, "key:", key, key_len);
TEST_HEXDUMP(stdout, "aad:", sym_op->auth.aad.data, aad_len);
/* iv */
- sym_op->cipher.iv.offset = IV_OFFSET;
- sym_op->cipher.iv.length = iv_len;
-
rte_memcpy(rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET),
iv, iv_len);
sym_op->cipher.data.length = cipher_len;
TEST_HEXDUMP(stdout, "aad:", sym_op->auth.aad.data, aad_len);
/* iv */
- sym_op->cipher.iv.offset = IV_OFFSET;
- sym_op->cipher.iv.length = iv_len;
-
rte_memcpy(rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET),
iv, iv_len);
sym_op->cipher.data.length = cipher_len;
sym_op->auth.aad.data, aad_len);
/* iv */
- sym_op->cipher.iv.offset = IV_OFFSET;
- sym_op->cipher.iv.length = iv_len;
-
rte_memcpy(rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET),
iv, iv_len);
sym_op->cipher.data.length = cipher_len;
retval = create_wireless_algo_cipher_session(ts_params->valid_devs[0],
RTE_CRYPTO_CIPHER_OP_ENCRYPT,
RTE_CRYPTO_CIPHER_KASUMI_F8,
- tdata->key.data, tdata->key.len);
+ tdata->key.data, tdata->key.len,
+ tdata->iv.len);
if (retval < 0)
return retval;
retval = create_wireless_algo_cipher_session(ts_params->valid_devs[0],
RTE_CRYPTO_CIPHER_OP_ENCRYPT,
RTE_CRYPTO_CIPHER_KASUMI_F8,
- tdata->key.data, tdata->key.len);
+ tdata->key.data, tdata->key.len,
+ tdata->iv.len);
if (retval < 0)
return retval;
retval = create_wireless_algo_cipher_session(ts_params->valid_devs[0],
RTE_CRYPTO_CIPHER_OP_ENCRYPT,
RTE_CRYPTO_CIPHER_KASUMI_F8,
- tdata->key.data, tdata->key.len);
+ tdata->key.data, tdata->key.len,
+ tdata->iv.len);
if (retval < 0)
return retval;
retval = create_wireless_algo_cipher_session(ts_params->valid_devs[0],
RTE_CRYPTO_CIPHER_OP_ENCRYPT,
RTE_CRYPTO_CIPHER_KASUMI_F8,
- tdata->key.data, tdata->key.len);
+ tdata->key.data, tdata->key.len,
+ tdata->iv.len);
if (retval < 0)
return retval;
retval = create_wireless_algo_cipher_session(ts_params->valid_devs[0],
RTE_CRYPTO_CIPHER_OP_DECRYPT,
RTE_CRYPTO_CIPHER_KASUMI_F8,
- tdata->key.data, tdata->key.len);
+ tdata->key.data, tdata->key.len,
+ tdata->iv.len);
if (retval < 0)
return retval;
retval = create_wireless_algo_cipher_session(ts_params->valid_devs[0],
RTE_CRYPTO_CIPHER_OP_DECRYPT,
RTE_CRYPTO_CIPHER_KASUMI_F8,
- tdata->key.data, tdata->key.len);
+ tdata->key.data, tdata->key.len,
+ tdata->iv.len);
if (retval < 0)
return retval;
retval = create_wireless_algo_cipher_session(ts_params->valid_devs[0],
RTE_CRYPTO_CIPHER_OP_ENCRYPT,
RTE_CRYPTO_CIPHER_SNOW3G_UEA2,
- tdata->key.data, tdata->key.len);
+ tdata->key.data, tdata->key.len,
+ tdata->iv.len);
if (retval < 0)
return retval;
retval = create_wireless_algo_cipher_session(ts_params->valid_devs[0],
RTE_CRYPTO_CIPHER_OP_ENCRYPT,
RTE_CRYPTO_CIPHER_SNOW3G_UEA2,
- tdata->key.data, tdata->key.len);
+ tdata->key.data, tdata->key.len,
+ tdata->iv.len);
if (retval < 0)
return retval;
retval = create_wireless_algo_cipher_session(ts_params->valid_devs[0],
RTE_CRYPTO_CIPHER_OP_ENCRYPT,
RTE_CRYPTO_CIPHER_SNOW3G_UEA2,
- tdata->key.data, tdata->key.len);
+ tdata->key.data, tdata->key.len,
+ tdata->iv.len);
if (retval < 0)
return retval;
retval = create_wireless_algo_cipher_session(ts_params->valid_devs[0],
RTE_CRYPTO_CIPHER_OP_ENCRYPT,
RTE_CRYPTO_CIPHER_SNOW3G_UEA2,
- tdata->key.data, tdata->key.len);
+ tdata->key.data, tdata->key.len,
+ tdata->iv.len);
if (retval < 0)
return retval;
retval = create_wireless_algo_cipher_session(ts_params->valid_devs[0],
RTE_CRYPTO_CIPHER_OP_DECRYPT,
RTE_CRYPTO_CIPHER_SNOW3G_UEA2,
- tdata->key.data, tdata->key.len);
+ tdata->key.data, tdata->key.len,
+ tdata->iv.len);
if (retval < 0)
return retval;
retval = create_wireless_algo_cipher_session(ts_params->valid_devs[0],
RTE_CRYPTO_CIPHER_OP_DECRYPT,
RTE_CRYPTO_CIPHER_SNOW3G_UEA2,
- tdata->key.data, tdata->key.len);
+ tdata->key.data, tdata->key.len,
+ tdata->iv.len);
if (retval < 0)
return retval;
RTE_CRYPTO_AUTH_SNOW3G_UIA2,
RTE_CRYPTO_CIPHER_SNOW3G_UEA2,
tdata->key.data, tdata->key.len,
- tdata->aad.len, tdata->digest.len);
+ tdata->aad.len, tdata->digest.len,
+ tdata->iv.len);
if (retval < 0)
return retval;
ut_params->ibuf = rte_pktmbuf_alloc(ts_params->mbuf_pool);
RTE_CRYPTO_AUTH_SNOW3G_UIA2,
RTE_CRYPTO_CIPHER_SNOW3G_UEA2,
tdata->key.data, tdata->key.len,
- tdata->aad.len, tdata->digest.len);
+ tdata->aad.len, tdata->digest.len,
+ tdata->iv.len);
if (retval < 0)
return retval;
RTE_CRYPTO_AUTH_KASUMI_F9,
RTE_CRYPTO_CIPHER_KASUMI_F8,
tdata->key.data, tdata->key.len,
- tdata->aad.len, tdata->digest.len);
+ tdata->aad.len, tdata->digest.len,
+ tdata->iv.len);
if (retval < 0)
return retval;
ut_params->ibuf = rte_pktmbuf_alloc(ts_params->mbuf_pool);
RTE_CRYPTO_AUTH_KASUMI_F9,
RTE_CRYPTO_CIPHER_KASUMI_F8,
tdata->key.data, tdata->key.len,
- tdata->aad.len, tdata->digest.len);
+ tdata->aad.len, tdata->digest.len,
+ tdata->iv.len);
if (retval < 0)
return retval;
retval = create_wireless_algo_cipher_session(ts_params->valid_devs[0],
RTE_CRYPTO_CIPHER_OP_ENCRYPT,
RTE_CRYPTO_CIPHER_ZUC_EEA3,
- tdata->key.data, tdata->key.len);
+ tdata->key.data, tdata->key.len,
+ tdata->iv.len);
if (retval < 0)
return retval;
retval = create_wireless_algo_cipher_session(ts_params->valid_devs[0],
RTE_CRYPTO_CIPHER_OP_ENCRYPT,
RTE_CRYPTO_CIPHER_ZUC_EEA3,
- tdata->key.data, tdata->key.len);
+ tdata->key.data, tdata->key.len,
+ tdata->iv.len);
if (retval < 0)
return retval;
create_gcm_session(uint8_t dev_id, enum rte_crypto_cipher_operation op,
const uint8_t *key, const uint8_t key_len,
const uint8_t aad_len, const uint8_t auth_len,
+ uint8_t iv_len,
enum rte_crypto_auth_operation auth_op)
{
uint8_t cipher_key[key_len];
ut_params->cipher_xform.cipher.op = op;
ut_params->cipher_xform.cipher.key.data = cipher_key;
ut_params->cipher_xform.cipher.key.length = key_len;
+ ut_params->cipher_xform.cipher.iv.offset = IV_OFFSET;
+ ut_params->cipher_xform.cipher.iv.length = iv_len;
TEST_HEXDUMP(stdout, "key:", key, key_len);
enum rte_crypto_cipher_operation cipher_op,
uint8_t *key, const uint8_t key_len,
const uint8_t aad_len, const uint8_t auth_len,
+ uint8_t iv_len,
enum rte_crypto_auth_operation auth_op)
{
TEST_ASSERT_NOT_NULL(rte_crypto_op_sym_xforms_alloc(op, 2),
sym_op->xform->cipher.op = cipher_op;
sym_op->xform->cipher.key.data = key;
sym_op->xform->cipher.key.length = key_len;
+ sym_op->xform->cipher.iv.offset = IV_OFFSET;
+ sym_op->xform->cipher.iv.length = iv_len;
TEST_HEXDUMP(stdout, "key:", key, key_len);
/* Append IV at the end of the crypto operation*/
uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ut_params->op,
uint8_t *, IV_OFFSET);
- sym_op->cipher.iv.offset = IV_OFFSET;
- sym_op->cipher.iv.length = tdata->iv.len;
rte_memcpy(iv_ptr, tdata->iv.data, tdata->iv.len);
TEST_HEXDUMP(stdout, "iv:", iv_ptr,
- sym_op->cipher.iv.length);
+ tdata->iv.len);
/* Append plaintext/ciphertext */
if (op == RTE_CRYPTO_CIPHER_OP_ENCRYPT) {
RTE_CRYPTO_CIPHER_OP_ENCRYPT,
tdata->key.data, tdata->key.len,
tdata->aad.len, tdata->auth_tag.len,
+ tdata->iv.len,
RTE_CRYPTO_AUTH_OP_GENERATE);
if (retval < 0)
return retval;
RTE_CRYPTO_CIPHER_OP_DECRYPT,
tdata->key.data, tdata->key.len,
tdata->aad.len, tdata->auth_tag.len,
+ tdata->iv.len,
RTE_CRYPTO_AUTH_OP_VERIFY);
if (retval < 0)
return retval;
RTE_CRYPTO_CIPHER_OP_ENCRYPT,
tdata->key.data, tdata->key.len,
tdata->aad.len, tdata->auth_tag.len,
+ tdata->iv.len,
RTE_CRYPTO_AUTH_OP_GENERATE);
if (retval < 0)
return retval;
RTE_CRYPTO_CIPHER_OP_DECRYPT,
tdata->key.data, tdata->key.len,
tdata->aad.len, tdata->auth_tag.len,
+ tdata->iv.len,
RTE_CRYPTO_AUTH_OP_VERIFY);
if (retval < 0)
return retval;
RTE_CRYPTO_CIPHER_OP_ENCRYPT,
key, tdata->key.len,
tdata->aad.len, tdata->auth_tag.len,
+ tdata->iv.len,
RTE_CRYPTO_AUTH_OP_GENERATE);
if (retval < 0)
return retval;
RTE_CRYPTO_CIPHER_OP_DECRYPT,
key, tdata->key.len,
tdata->aad.len, tdata->auth_tag.len,
+ tdata->iv.len,
RTE_CRYPTO_AUTH_OP_VERIFY);
if (retval < 0)
return retval;
uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ut_params->op,
uint8_t *, IV_OFFSET);
- sym_op->cipher.iv.offset = IV_OFFSET;
- sym_op->cipher.iv.length = tdata->iv.len;
-
rte_memcpy(iv_ptr, tdata->iv.data, tdata->iv.len);
TEST_HEXDUMP(stdout, "iv:", iv_ptr, tdata->iv.len);
ut_params->cipher_xform.cipher.op = op;
ut_params->cipher_xform.cipher.key.data = cipher_key;
ut_params->cipher_xform.cipher.key.length = tdata->key.len;
+ ut_params->cipher_xform.cipher.iv.offset = IV_OFFSET;
+ ut_params->cipher_xform.cipher.iv.length = tdata->iv.len;
ut_params->auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
ut_params->auth_xform.next = NULL;
ut_params->cipher_xform.cipher.op = cipher_op;
ut_params->cipher_xform.cipher.key.data = cipher_key;
ut_params->cipher_xform.cipher.key.length = reference->cipher_key.len;
+ ut_params->cipher_xform.cipher.iv.offset = IV_OFFSET;
+ ut_params->cipher_xform.cipher.iv.length = reference->iv.len;
/* Create Crypto session*/
ut_params->sess = rte_cryptodev_sym_session_create(dev_id,
sym_op->auth.digest.data,
sym_op->auth.digest.length);
- sym_op->cipher.iv.offset = IV_OFFSET;
- sym_op->cipher.iv.length = reference->iv.len;
-
rte_memcpy(rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET),
reference->iv.data, reference->iv.len);
sym_op->auth.digest.data,
sym_op->auth.digest.length);
- sym_op->cipher.iv.offset = IV_OFFSET;
- sym_op->cipher.iv.length = reference->iv.len;
-
rte_memcpy(rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET),
reference->iv.data, reference->iv.len);
uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ut_params->op,
uint8_t *, IV_OFFSET);
- sym_op->cipher.iv.offset = IV_OFFSET;
- sym_op->cipher.iv.length = iv_len;
rte_memcpy(iv_ptr, tdata->iv.data, iv_len);
RTE_CRYPTO_CIPHER_OP_ENCRYPT,
tdata->key.data, tdata->key.len,
tdata->aad.len, tdata->auth_tag.len,
+ tdata->iv.len,
RTE_CRYPTO_AUTH_OP_GENERATE);
if (retval < 0)
return retval;
RTE_CRYPTO_CIPHER_OP_DECRYPT;
cipher_xform->cipher.key.data = cipher_key;
cipher_xform->cipher.key.length = tdata->cipher_key.len;
+ cipher_xform->cipher.iv.offset = IV_OFFSET;
+ cipher_xform->cipher.iv.length = tdata->iv.len;
sym_op->cipher.data.offset = 0;
sym_op->cipher.data.length = tdata->ciphertext.len;
- sym_op->cipher.iv.offset = IV_OFFSET;
- sym_op->cipher.iv.length = tdata->iv.len;
rte_memcpy(rte_crypto_op_ctod_offset(op, uint8_t *, IV_OFFSET),
tdata->iv.data,
tdata->iv.len);
#include "test_cryptodev.h"
#include "test_cryptodev_gcm_test_vectors.h"
+#define AES_CIPHER_IV_LENGTH 16
+#define TRIPLE_DES_CIPHER_IV_LENGTH 8
#define PERF_NUM_OPS_INFLIGHT (128)
#define DEFAULT_NUM_REQS_TO_SUBMIT (10000000)
struct symmetric_op {
- const uint8_t *iv_data;
- uint32_t iv_len;
-
const uint8_t *aad_data;
uint32_t aad_len;
const uint8_t *key_auth_data;
uint32_t key_auth_len;
+ const uint8_t *iv_data;
+ uint16_t iv_len;
uint32_t digest_len;
};
ut_params->cipher_xform.cipher.op = RTE_CRYPTO_CIPHER_OP_DECRYPT;
ut_params->cipher_xform.cipher.key.data = aes_cbc_128_key;
ut_params->cipher_xform.cipher.key.length = CIPHER_IV_LENGTH_AES_CBC;
-
+ ut_params->cipher_xform.cipher.iv.offset = IV_OFFSET;
+ ut_params->cipher_xform.cipher.iv.length = CIPHER_IV_LENGTH_AES_CBC;
/* Setup HMAC Parameters */
ut_params->auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
op->sym->auth.data.offset = 0;
op->sym->auth.data.length = data_params[0].length;
- op->sym->cipher.iv.offset = IV_OFFSET;
- op->sym->cipher.iv.length = CIPHER_IV_LENGTH_AES_CBC;
-
rte_memcpy(rte_crypto_op_ctod_offset(op, uint8_t *, IV_OFFSET),
aes_cbc_128_iv, CIPHER_IV_LENGTH_AES_CBC);
cipher_xform.cipher.key.data = aes_key;
cipher_xform.cipher.key.length = cipher_key_len;
+ cipher_xform.cipher.iv.offset = IV_OFFSET;
+ cipher_xform.cipher.iv.length = AES_CIPHER_IV_LENGTH;
if (chain != CIPHER_ONLY) {
/* Setup HMAC Parameters */
auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
cipher_xform.cipher.key.data = snow3g_cipher_key;
cipher_xform.cipher.key.length = cipher_key_len;
+ cipher_xform.cipher.iv.offset = IV_OFFSET;
+ cipher_xform.cipher.iv.length = SNOW3G_CIPHER_IV_LENGTH;
+
/* Setup HMAC Parameters */
auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
/* Setup Cipher Parameters */
cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
cipher_xform.cipher.algo = cipher_algo;
+ cipher_xform.cipher.iv.offset = IV_OFFSET;
cipher_xform.cipher.op = RTE_CRYPTO_CIPHER_OP_ENCRYPT;
switch (cipher_algo) {
case RTE_CRYPTO_CIPHER_3DES_CBC:
case RTE_CRYPTO_CIPHER_3DES_CTR:
cipher_xform.cipher.key.data = triple_des_key;
+ cipher_xform.cipher.iv.length = TRIPLE_DES_CIPHER_IV_LENGTH;
break;
case RTE_CRYPTO_CIPHER_AES_CBC:
case RTE_CRYPTO_CIPHER_AES_CTR:
case RTE_CRYPTO_CIPHER_AES_GCM:
cipher_xform.cipher.key.data = aes_key;
+ cipher_xform.cipher.iv.length = AES_CIPHER_IV_LENGTH;
break;
default:
return NULL;
}
cipher_xform.cipher.key.length = cipher_key_len;
+ cipher_xform.cipher.iv.offset = IV_OFFSET;
+ cipher_xform.cipher.iv.length = AES_CIPHER_IV_LENGTH;
/* Setup Auth Parameters */
auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
}
}
-#define AES_CIPHER_IV_LENGTH 16
#define AES_GCM_AAD_LENGTH 16
-#define TRIPLE_DES_CIPHER_IV_LENGTH 8
static struct rte_mbuf *
test_perf_create_pktmbuf(struct rte_mempool *mpool, unsigned buf_sz)
}
- /* Cipher Parameters */
- op->sym->cipher.iv.offset = IV_OFFSET;
- op->sym->cipher.iv.length = AES_CIPHER_IV_LENGTH;
+ /* Copy the IV at the end of the crypto operation */
rte_memcpy(rte_crypto_op_ctod_offset(op, uint8_t *, IV_OFFSET),
aes_iv, AES_CIPHER_IV_LENGTH);
+ /* Cipher Parameters */
op->sym->cipher.data.offset = 0;
op->sym->cipher.data.length = data_len;
op->sym->auth.aad.data = aes_gcm_aad;
op->sym->auth.aad.length = AES_GCM_AAD_LENGTH;
- /* Cipher Parameters */
- op->sym->cipher.iv.offset = IV_OFFSET;
- op->sym->cipher.iv.length = AES_CIPHER_IV_LENGTH;
+ /* Copy IV at the end of the crypto operation */
rte_memcpy(rte_crypto_op_ctod_offset(op, uint8_t *, IV_OFFSET),
aes_iv, AES_CIPHER_IV_LENGTH);
IV_OFFSET);
op->sym->auth.aad.length = SNOW3G_CIPHER_IV_LENGTH;
- /* Cipher Parameters */
- op->sym->cipher.iv.offset = IV_OFFSET;
- op->sym->cipher.iv.length = SNOW3G_CIPHER_IV_LENGTH;
-
/* Data lengths/offsets Parameters */
op->sym->auth.data.offset = 0;
op->sym->auth.data.length = data_len << 3;
return NULL;
}
- /* Cipher Parameters */
- op->sym->cipher.iv.offset = IV_OFFSET;
- op->sym->cipher.iv.length = SNOW3G_CIPHER_IV_LENGTH;
+ /* Copy IV at the end of the crypto operation */
rte_memcpy(rte_crypto_op_ctod_offset(op, uint8_t *, IV_OFFSET),
snow3g_iv, SNOW3G_CIPHER_IV_LENGTH);
rte_pktmbuf_mtophys_offset(m, data_len);
op->sym->auth.digest.length = digest_len;
- /* Cipher Parameters */
- op->sym->cipher.iv.offset = IV_OFFSET;
- op->sym->cipher.iv.length = TRIPLE_DES_CIPHER_IV_LENGTH;
+ /* Copy IV at the end of the crypto operation */
rte_memcpy(rte_crypto_op_ctod_offset(op, uint8_t *, IV_OFFSET),
triple_des_iv, TRIPLE_DES_CIPHER_IV_LENGTH);
cipher_xform.cipher.op = pparams->session_attrs->cipher;
cipher_xform.cipher.key.data = cipher_key;
cipher_xform.cipher.key.length = pparams->session_attrs->key_cipher_len;
+ cipher_xform.cipher.iv.length = pparams->session_attrs->iv_len;
+ cipher_xform.cipher.iv.offset = IV_OFFSET;
auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
auth_xform.next = NULL;
rte_memcpy(op->sym->auth.aad.data, params->symmetric_op->aad_data,
params->symmetric_op->aad_len);
- op->sym->cipher.iv.offset = IV_OFFSET;
- rte_memcpy(iv_ptr, params->symmetric_op->iv_data,
- params->symmetric_op->iv_len);
- if (params->symmetric_op->iv_len == 12)
+ rte_memcpy(iv_ptr, params->session_attrs->iv_data,
+ params->session_attrs->iv_len);
+ if (params->session_attrs->iv_len == 12)
iv_ptr[15] = 1;
- op->sym->cipher.iv.length = params->symmetric_op->iv_len;
-
op->sym->auth.data.offset =
params->symmetric_op->aad_len;
op->sym->auth.data.length = params->symmetric_op->p_len;
session_attrs[i].key_auth_len = 0;
session_attrs[i].digest_len =
gcm_test->auth_tag.len;
+ session_attrs[i].iv_len = gcm_test->iv.len;
+ session_attrs[i].iv_data = gcm_test->iv.data;
ops_set[i].aad_data = gcm_test->aad.data;
ops_set[i].aad_len = gcm_test->aad.len;
- ops_set[i].iv_data = gcm_test->iv.data;
- ops_set[i].iv_len = gcm_test->iv.len;
ops_set[i].p_data = gcm_test->plaintext.data;
ops_set[i].p_len = buf_lengths[i];
ops_set[i].c_data = gcm_test->ciphertext.data;