sym_op->m_dst = bufs_out[i];
/* cipher parameters */
- sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(ops[i],
- uint8_t *, iv_offset);
- sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(ops[i],
- iv_offset);
+ sym_op->cipher.iv.offset = iv_offset;
sym_op->cipher.iv.length = test_vector->iv.length;
if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
}
if (options->test == CPERF_TEST_TYPE_VERIFY) {
- for (i = 0; i < nb_ops; i++)
- memcpy(ops[i]->sym->cipher.iv.data,
- test_vector->iv.data,
- test_vector->iv.length);
- }
+ for (i = 0; i < nb_ops; i++) {
+ uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
+ uint8_t *, iv_offset);
+
+ memcpy(iv_ptr, test_vector->iv.data,
+ test_vector->iv.length);
+ } }
return 0;
}
sym_op->m_dst = bufs_out[i];
/* cipher parameters */
- sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(ops[i],
- uint8_t *, iv_offset);
- sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(ops[i],
- iv_offset);
+ sym_op->cipher.iv.offset = iv_offset;
sym_op->cipher.iv.length = test_vector->iv.length;
if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
}
if (options->test == CPERF_TEST_TYPE_VERIFY) {
- for (i = 0; i < nb_ops; i++)
- memcpy(ops[i]->sym->cipher.iv.data,
- test_vector->iv.data,
- test_vector->iv.length);
+ for (i = 0; i < nb_ops; i++) {
+ uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
+ uint8_t *, iv_offset);
+
+ memcpy(iv_ptr, test_vector->iv.data,
+ test_vector->iv.length);
+ }
}
return 0;
sym_op->m_dst = bufs_out[i];
/* cipher parameters */
- sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(ops[i],
- uint8_t *, iv_offset);
- sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(ops[i],
- iv_offset);
+ sym_op->cipher.iv.offset = iv_offset;
sym_op->cipher.iv.length = test_vector->iv.length;
sym_op->cipher.data.length = options->test_buffer_size;
}
if (options->test == CPERF_TEST_TYPE_VERIFY) {
- for (i = 0; i < nb_ops; i++)
- memcpy(ops[i]->sym->cipher.iv.data,
- test_vector->iv.data,
- test_vector->iv.length);
+ for (i = 0; i < nb_ops; i++) {
+ uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
+ uint8_t *, iv_offset);
+
+ memcpy(iv_ptr, test_vector->iv.data,
+ test_vector->iv.length);
+ }
}
return 0;
} data; /**< Data offsets and length for ciphering */
struct {
- uint8_t *data;
- phys_addr_t phys_addr;
+ uint16_t offset;
uint16_t length;
} iv; /**< Initialisation vector parameters */
} cipher;
The crypto operation (``rte_crypto_sym_op``) has been reorganized as follows:
* Removed field ``rte_crypto_sym_op_sess_type``.
+ * Replaced pointer and physical address of IV with offset from the start
+ of the crypto operation.
* **Reorganized the crypto operation structure.**
*
*/
static int
-process_gcm_crypto_op(struct rte_crypto_sym_op *op,
+process_gcm_crypto_op(struct rte_crypto_op *op,
struct aesni_gcm_session *session)
{
uint8_t *src, *dst;
- struct rte_mbuf *m_src = op->m_src;
- uint32_t offset = op->cipher.data.offset;
+ uint8_t *iv_ptr;
+ struct rte_crypto_sym_op *sym_op = op->sym;
+ struct rte_mbuf *m_src = sym_op->m_src;
+ uint32_t offset = sym_op->cipher.data.offset;
uint32_t part_len, total_len, data_len;
RTE_ASSERT(m_src != NULL);
}
data_len = m_src->data_len - offset;
- part_len = (data_len < op->cipher.data.length) ? data_len :
- op->cipher.data.length;
+ part_len = (data_len < sym_op->cipher.data.length) ? data_len :
+ sym_op->cipher.data.length;
/* Destination buffer is required when segmented source buffer */
- RTE_ASSERT((part_len == op->cipher.data.length) ||
- ((part_len != op->cipher.data.length) &&
- (op->m_dst != NULL)));
+ RTE_ASSERT((part_len == sym_op->cipher.data.length) ||
+ ((part_len != sym_op->cipher.data.length) &&
+ (sym_op->m_dst != NULL)));
/* Segmented destination buffer is not supported */
- RTE_ASSERT((op->m_dst == NULL) ||
- ((op->m_dst != NULL) &&
- rte_pktmbuf_is_contiguous(op->m_dst)));
+ RTE_ASSERT((sym_op->m_dst == NULL) ||
+ ((sym_op->m_dst != NULL) &&
+ rte_pktmbuf_is_contiguous(sym_op->m_dst)));
- dst = op->m_dst ?
- rte_pktmbuf_mtod_offset(op->m_dst, uint8_t *,
- op->cipher.data.offset) :
- rte_pktmbuf_mtod_offset(op->m_src, uint8_t *,
- op->cipher.data.offset);
+ dst = sym_op->m_dst ?
+ rte_pktmbuf_mtod_offset(sym_op->m_dst, uint8_t *,
+ sym_op->cipher.data.offset) :
+ rte_pktmbuf_mtod_offset(sym_op->m_src, uint8_t *,
+ sym_op->cipher.data.offset);
src = rte_pktmbuf_mtod_offset(m_src, uint8_t *, offset);
/* sanity checks */
- if (op->cipher.iv.length != 16 && op->cipher.iv.length != 12 &&
- op->cipher.iv.length != 0) {
+ if (sym_op->cipher.iv.length != 16 && sym_op->cipher.iv.length != 12 &&
+ sym_op->cipher.iv.length != 0) {
GCM_LOG_ERR("iv");
return -1;
}
+ iv_ptr = rte_crypto_op_ctod_offset(op, uint8_t *,
+ sym_op->cipher.iv.offset);
/*
* GCM working in 12B IV mode => 16B pre-counter block we need
* to set BE LSB to 1, driver expects that 16B is allocated
*/
- if (op->cipher.iv.length == 12) {
- uint32_t *iv_padd = (uint32_t *)&op->cipher.iv.data[12];
+ if (sym_op->cipher.iv.length == 12) {
+ uint32_t *iv_padd = (uint32_t *)&(iv_ptr[12]);
*iv_padd = rte_bswap32(1);
}
- if (op->auth.digest.length != 16 &&
- op->auth.digest.length != 12 &&
- op->auth.digest.length != 8) {
+ if (sym_op->auth.digest.length != 16 &&
+ sym_op->auth.digest.length != 12 &&
+ sym_op->auth.digest.length != 8) {
GCM_LOG_ERR("digest");
return -1;
}
if (session->op == AESNI_GCM_OP_AUTHENTICATED_ENCRYPTION) {
aesni_gcm_enc[session->key].init(&session->gdata,
- op->cipher.iv.data,
- op->auth.aad.data,
- (uint64_t)op->auth.aad.length);
+ iv_ptr,
+ sym_op->auth.aad.data,
+ (uint64_t)sym_op->auth.aad.length);
aesni_gcm_enc[session->key].update(&session->gdata, dst, src,
(uint64_t)part_len);
- total_len = op->cipher.data.length - part_len;
+ total_len = sym_op->cipher.data.length - part_len;
while (total_len) {
dst += part_len;
}
aesni_gcm_enc[session->key].finalize(&session->gdata,
- op->auth.digest.data,
- (uint64_t)op->auth.digest.length);
+ sym_op->auth.digest.data,
+ (uint64_t)sym_op->auth.digest.length);
} else { /* session->op == AESNI_GCM_OP_AUTHENTICATED_DECRYPTION */
- uint8_t *auth_tag = (uint8_t *)rte_pktmbuf_append(op->m_dst ?
- op->m_dst : op->m_src,
- op->auth.digest.length);
+ uint8_t *auth_tag = (uint8_t *)rte_pktmbuf_append(sym_op->m_dst ?
+ sym_op->m_dst : sym_op->m_src,
+ sym_op->auth.digest.length);
if (!auth_tag) {
GCM_LOG_ERR("auth_tag");
}
aesni_gcm_dec[session->key].init(&session->gdata,
- op->cipher.iv.data,
- op->auth.aad.data,
- (uint64_t)op->auth.aad.length);
+ iv_ptr,
+ sym_op->auth.aad.data,
+ (uint64_t)sym_op->auth.aad.length);
aesni_gcm_dec[session->key].update(&session->gdata, dst, src,
(uint64_t)part_len);
- total_len = op->cipher.data.length - part_len;
+ total_len = sym_op->cipher.data.length - part_len;
while (total_len) {
dst += part_len;
aesni_gcm_dec[session->key].finalize(&session->gdata,
auth_tag,
- (uint64_t)op->auth.digest.length);
+ (uint64_t)sym_op->auth.digest.length);
}
return 0;
break;
}
- retval = process_gcm_crypto_op(ops[i]->sym, sess);
+ retval = process_gcm_crypto_op(ops[i], sess);
if (retval < 0) {
ops[i]->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
qp->qp_stats.dequeue_err_count++;
get_truncated_digest_byte_length(job->hash_alg);
/* Set IV parameters */
- job->iv = op->sym->cipher.iv.data;
+ job->iv = rte_crypto_op_ctod_offset(op, uint8_t *,
+ op->sym->cipher.iv.offset);
job->iv_len_in_bytes = op->sym->cipher.iv.length;
/* Data Parameter */
return;
}
- arg.cipher.iv = op->sym->cipher.iv.data;
+ arg.cipher.iv = rte_crypto_op_ctod_offset(op, uint8_t *,
+ op->sym->cipher.iv.offset);
arg.cipher.key = sess->cipher.key.data;
/* Acquire combined mode function */
crypto_func = sess->crypto_func;
int icv_len = sym_op->auth.digest.length;
uint8_t *old_icv;
uint32_t mem_len = (7 * sizeof(struct qbman_fle)) + icv_len;
+ uint8_t *iv_ptr = rte_crypto_op_ctod_offset(op, uint8_t *,
+ op->sym->cipher.iv.offset);
PMD_INIT_FUNC_TRACE();
sym_op->auth.digest.length);
/* Configure Input SGE for Encap/Decap */
- DPAA2_SET_FLE_ADDR(sge, DPAA2_VADDR_TO_IOVA(sym_op->cipher.iv.data));
+ DPAA2_SET_FLE_ADDR(sge, DPAA2_VADDR_TO_IOVA(iv_ptr));
sge->length = sym_op->cipher.iv.length;
sge++;
uint32_t mem_len = (5 * sizeof(struct qbman_fle));
struct sec_flow_context *flc;
struct ctxt_priv *priv = sess->ctxt;
+ uint8_t *iv_ptr = rte_crypto_op_ctod_offset(op, uint8_t *,
+ op->sym->cipher.iv.offset);
PMD_INIT_FUNC_TRACE();
DPAA2_SET_FLE_SG_EXT(fle);
- DPAA2_SET_FLE_ADDR(sge, DPAA2_VADDR_TO_IOVA(sym_op->cipher.iv.data));
+ DPAA2_SET_FLE_ADDR(sge, DPAA2_VADDR_TO_IOVA(iv_ptr));
sge->length = sym_op->cipher.iv.length;
sge++;
unsigned i;
uint8_t processed_ops = 0;
uint8_t *src[num_ops], *dst[num_ops];
- uint64_t IV[num_ops];
+ uint8_t *iv_ptr;
+ uint64_t iv[num_ops];
uint32_t num_bytes[num_ops];
for (i = 0; i < num_ops; i++) {
(ops[i]->sym->cipher.data.offset >> 3) :
rte_pktmbuf_mtod(ops[i]->sym->m_src, uint8_t *) +
(ops[i]->sym->cipher.data.offset >> 3);
- IV[i] = *((uint64_t *)(ops[i]->sym->cipher.iv.data));
+ iv_ptr = rte_crypto_op_ctod_offset(ops[i], uint8_t *,
+ ops[i]->sym->cipher.iv.offset);
+ iv[i] = *((uint64_t *)(iv_ptr));
num_bytes[i] = ops[i]->sym->cipher.data.length >> 3;
processed_ops++;
}
if (processed_ops != 0)
- sso_kasumi_f8_n_buffer(&session->pKeySched_cipher, IV,
+ sso_kasumi_f8_n_buffer(&session->pKeySched_cipher, iv,
src, dst, num_bytes, processed_ops);
return processed_ops;
struct kasumi_session *session)
{
uint8_t *src, *dst;
- uint64_t IV;
+ uint8_t *iv_ptr;
+ uint64_t iv;
uint32_t length_in_bits, offset_in_bits;
/* Sanity checks. */
return 0;
}
dst = rte_pktmbuf_mtod(op->sym->m_dst, uint8_t *);
- IV = *((uint64_t *)(op->sym->cipher.iv.data));
+ iv_ptr = rte_crypto_op_ctod_offset(op, uint8_t *,
+ op->sym->cipher.iv.offset);
+ iv = *((uint64_t *)(iv_ptr));
length_in_bits = op->sym->cipher.data.length;
- sso_kasumi_f8_1_buffer_bit(&session->pKeySched_cipher, IV,
+ sso_kasumi_f8_1_buffer_bit(&session->pKeySched_cipher, iv,
src, dst, length_in_bits, offset_in_bits);
return 1;
uint32_t length_in_bits;
uint32_t num_bytes;
uint32_t shift_bits;
- uint64_t IV;
+ uint64_t iv;
uint8_t direction;
for (i = 0; i < num_ops; i++) {
src = rte_pktmbuf_mtod(ops[i]->sym->m_src, uint8_t *) +
(ops[i]->sym->auth.data.offset >> 3);
/* IV from AAD */
- IV = *((uint64_t *)(ops[i]->sym->auth.aad.data));
+ iv = *((uint64_t *)(ops[i]->sym->auth.aad.data));
/* Direction from next bit after end of message */
num_bytes = (length_in_bits >> 3) + 1;
shift_bits = (BYTE_LEN - 1 - length_in_bits) % BYTE_LEN;
ops[i]->sym->auth.digest.length);
sso_kasumi_f9_1_buffer_user(&session->pKeySched_hash,
- IV, src,
+ iv, src,
length_in_bits, dst, direction);
/* Verify digest. */
if (memcmp(dst, ops[i]->sym->auth.digest.data,
dst = ops[i]->sym->auth.digest.data;
sso_kasumi_f9_1_buffer_user(&session->pKeySched_hash,
- IV, src,
+ iv, src,
length_in_bits, dst, direction);
}
processed_ops++;
return;
}
- iv = op->sym->cipher.iv.data;
+ iv = rte_crypto_op_ctod_offset(op, uint8_t *,
+ op->sym->cipher.iv.offset);
ivlen = op->sym->cipher.iv.length;
aad = op->sym->auth.aad.data;
aadlen = op->sym->auth.aad.length;
dst = rte_pktmbuf_mtod_offset(mbuf_dst, uint8_t *,
op->sym->cipher.data.offset);
- iv = op->sym->cipher.iv.data;
+ iv = rte_crypto_op_ctod_offset(op, uint8_t *,
+ op->sym->cipher.iv.offset);
if (sess->cipher.mode == OPENSSL_CIPHER_LIB)
if (sess->cipher.direction == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
dst = rte_pktmbuf_mtod_offset(mbuf_dst, uint8_t *,
op->sym->cipher.data.offset);
- iv = op->sym->cipher.iv.data;
+ iv = rte_crypto_op_ctod_offset(op, uint8_t *,
+ op->sym->cipher.iv.offset);
block_size = DES_BLOCK_SIZE;
dst, iv,
last_block_len, sess->cipher.bpi_ctx);
/* Prepare parameters for CBC mode op */
- iv = op->sym->cipher.iv.data;
+ iv = rte_crypto_op_ctod_offset(op, uint8_t *,
+ op->sym->cipher.iv.offset);
dst += last_block_len - srclen;
srclen -= last_block_len;
}
iv = last_block - block_len;
else
/* runt block, i.e. less than one full block */
- iv = sym_op->cipher.iv.data;
+ iv = rte_crypto_op_ctod_offset(op, uint8_t *,
+ sym_op->cipher.iv.offset);
#ifdef RTE_LIBRTE_PMD_QAT_DEBUG_TX
rte_hexdump(stdout, "BPI: src before pre-process:", last_block,
iv = dst - block_len;
else
/* runt block, i.e. less than one full block */
- iv = sym_op->cipher.iv.data;
+ iv = rte_crypto_op_ctod_offset(op, uint8_t *,
+ sym_op->cipher.iv.offset);
#ifdef RTE_LIBRTE_PMD_QAT_DEBUG_RX
rte_hexdump(stdout, "BPI: src before post-process:", last_block,
uint32_t min_ofs = 0;
uint64_t src_buf_start = 0, dst_buf_start = 0;
uint8_t do_sgl = 0;
+ uint8_t *iv_ptr;
#ifdef RTE_LIBRTE_PMD_QAT_DEBUG_TX
cipher_ofs = op->sym->cipher.data.offset;
}
+ iv_ptr = rte_crypto_op_ctod_offset(op, uint8_t *,
+ op->sym->cipher.iv.offset);
/* copy IV into request if it fits */
/*
* If IV length is zero do not copy anything but still
if (op->sym->cipher.iv.length <=
sizeof(cipher_param->u.cipher_IV_array)) {
rte_memcpy(cipher_param->u.cipher_IV_array,
- op->sym->cipher.iv.data,
+ iv_ptr,
op->sym->cipher.iv.length);
} else {
ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(
qat_req->comn_hdr.serv_specif_flags,
ICP_QAT_FW_CIPH_IV_64BIT_PTR);
cipher_param->u.s.cipher_IV_ptr =
- op->sym->cipher.iv.phys_addr;
+ rte_crypto_op_ctophys_offset(op,
+ op->sym->cipher.iv.offset);
}
}
min_ofs = cipher_ofs;
rte_hexdump(stdout, "src_data:",
rte_pktmbuf_mtod(op->sym->m_src, uint8_t*),
rte_pktmbuf_data_len(op->sym->m_src));
- rte_hexdump(stdout, "iv:", op->sym->cipher.iv.data,
- op->sym->cipher.iv.length);
- rte_hexdump(stdout, "digest:", op->sym->auth.digest.data,
- op->sym->auth.digest.length);
- rte_hexdump(stdout, "aad:", op->sym->auth.aad.data,
- op->sym->auth.aad.length);
+ if (do_cipher)
+ rte_hexdump(stdout, "iv:", iv_ptr,
+ op->sym->cipher.iv.length);
+
+ if (do_auth) {
+ rte_hexdump(stdout, "digest:", op->sym->auth.digest.data,
+ op->sym->auth.digest.length);
+ rte_hexdump(stdout, "aad:", op->sym->auth.aad.data,
+ op->sym->auth.aad.length);
+ }
#endif
return 0;
}
unsigned i;
uint8_t processed_ops = 0;
uint8_t *src[SNOW3G_MAX_BURST], *dst[SNOW3G_MAX_BURST];
- uint8_t *IV[SNOW3G_MAX_BURST];
+ uint8_t *iv[SNOW3G_MAX_BURST];
uint32_t num_bytes[SNOW3G_MAX_BURST];
for (i = 0; i < num_ops; i++) {
(ops[i]->sym->cipher.data.offset >> 3) :
rte_pktmbuf_mtod(ops[i]->sym->m_src, uint8_t *) +
(ops[i]->sym->cipher.data.offset >> 3);
- IV[i] = ops[i]->sym->cipher.iv.data;
+ iv[i] = rte_crypto_op_ctod_offset(ops[i], uint8_t *,
+ ops[i]->sym->cipher.iv.offset);
num_bytes[i] = ops[i]->sym->cipher.data.length >> 3;
processed_ops++;
}
- sso_snow3g_f8_n_buffer(&session->pKeySched_cipher, IV, src, dst,
+ sso_snow3g_f8_n_buffer(&session->pKeySched_cipher, iv, src, dst,
num_bytes, processed_ops);
return processed_ops;
struct snow3g_session *session)
{
uint8_t *src, *dst;
- uint8_t *IV;
+ uint8_t *iv;
uint32_t length_in_bits, offset_in_bits;
/* Sanity checks. */
return 0;
}
dst = rte_pktmbuf_mtod(op->sym->m_dst, uint8_t *);
- IV = op->sym->cipher.iv.data;
+ iv = rte_crypto_op_ctod_offset(op, uint8_t *,
+ op->sym->cipher.iv.offset);
length_in_bits = op->sym->cipher.data.length;
- sso_snow3g_f8_1_buffer_bit(&session->pKeySched_cipher, IV,
+ sso_snow3g_f8_1_buffer_bit(&session->pKeySched_cipher, iv,
src, dst, length_in_bits, offset_in_bits);
return 1;
unsigned i;
uint8_t processed_ops = 0;
uint8_t *src[ZUC_MAX_BURST], *dst[ZUC_MAX_BURST];
- uint8_t *IV[ZUC_MAX_BURST];
+ uint8_t *iv[ZUC_MAX_BURST];
uint32_t num_bytes[ZUC_MAX_BURST];
uint8_t *cipher_keys[ZUC_MAX_BURST];
(ops[i]->sym->cipher.data.offset >> 3) :
rte_pktmbuf_mtod(ops[i]->sym->m_src, uint8_t *) +
(ops[i]->sym->cipher.data.offset >> 3);
- IV[i] = ops[i]->sym->cipher.iv.data;
+ iv[i] = rte_crypto_op_ctod_offset(ops[i], uint8_t *,
+ ops[i]->sym->cipher.iv.offset);
num_bytes[i] = ops[i]->sym->cipher.data.length >> 3;
cipher_keys[i] = session->pKey_cipher;
processed_ops++;
}
- sso_zuc_eea3_n_buffer(cipher_keys, IV, src, dst,
+ sso_zuc_eea3_n_buffer(cipher_keys, iv, src, dst,
num_bytes, processed_ops);
return processed_ops;
case RTE_CRYPTO_CIPHER_AES_CBC:
/* Copy IV at the end of crypto operation */
rte_memcpy(iv_ptr, iv, sa->iv_len);
- sym_cop->cipher.iv.data = iv_ptr;
- sym_cop->cipher.iv.phys_addr =
- rte_crypto_op_ctophys_offset(cop, IV_OFFSET);
+ sym_cop->cipher.iv.offset = IV_OFFSET;
sym_cop->cipher.iv.length = sa->iv_len;
break;
case RTE_CRYPTO_CIPHER_AES_CTR:
icb->salt = sa->salt;
memcpy(&icb->iv, iv, 8);
icb->cnt = rte_cpu_to_be_32(1);
- sym_cop->cipher.iv.data = iv_ptr;
- sym_cop->cipher.iv.phys_addr =
- rte_crypto_op_ctophys_offset(cop, IV_OFFSET);
+ sym_cop->cipher.iv.offset = IV_OFFSET;
sym_cop->cipher.iv.length = 16;
break;
default:
padding[pad_len - 2] = pad_len - 2;
padding[pad_len - 1] = nlp;
- uint8_t *iv_ptr = rte_crypto_op_ctod_offset(cop,
- uint8_t *, IV_OFFSET);
struct cnt_blk *icb = get_cnt_blk(m);
icb->salt = sa->salt;
icb->iv = sa->seq;
icb->cnt = rte_cpu_to_be_32(1);
- sym_cop->cipher.iv.data = iv_ptr;
- sym_cop->cipher.iv.phys_addr =
- rte_crypto_op_ctophys_offset(cop, IV_OFFSET);
+ sym_cop->cipher.iv.offset = IV_OFFSET;
sym_cop->cipher.iv.length = 16;
uint8_t *aad;
/* Copy IV at the end of the crypto operation */
rte_memcpy(iv_ptr, cparams->iv.data, cparams->iv.length);
- op->sym->cipher.iv.data = iv_ptr;
- op->sym->cipher.iv.phys_addr =
- rte_crypto_op_ctophys_offset(op, IV_OFFSET);
+ op->sym->cipher.iv.offset = IV_OFFSET;
op->sym->cipher.iv.length = cparams->iv.length;
/* For wireless algorithms, offset/length must be in bits */
if (port_cparams[i].do_cipher) {
port_cparams[i].iv.data = options->iv.data;
port_cparams[i].iv.length = options->iv.length;
- port_cparams[i].iv.phys_addr = options->iv.phys_addr;
if (!options->iv_param)
generate_random_key(port_cparams[i].iv.data,
port_cparams[i].iv.length);
} data; /**< Data offsets and length for ciphering */
struct {
- uint8_t *data;
- /**< Initialisation Vector or Counter.
+ uint16_t offset;
+ /**< Starting point for Initialisation Vector or Counter,
+ * specified as number of bytes from start of crypto
+ * operation.
*
* - For block ciphers in CBC or F8 mode, or for KASUMI
* in F8 mode, or for SNOW 3G in UEA2 mode, this is the
* For optimum performance, the data pointed to SHOULD
* be 8-byte aligned.
*/
- phys_addr_t phys_addr;
uint16_t length;
/**< Length of valid IV data.
*
sym_op->auth.data.length = QUOTE_512_BYTES;
/* Set crypto operation cipher parameters */
- sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(ut_params->op,
- uint8_t *, IV_OFFSET);
- sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(ut_params->op,
- IV_OFFSET);
+ sym_op->cipher.iv.offset = IV_OFFSET;
sym_op->cipher.iv.length = CIPHER_IV_LENGTH_AES_CBC;
- rte_memcpy(sym_op->cipher.iv.data, aes_cbc_iv, CIPHER_IV_LENGTH_AES_CBC);
+ rte_memcpy(rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET),
+ aes_cbc_iv, CIPHER_IV_LENGTH_AES_CBC);
sym_op->cipher.data.offset = 0;
sym_op->cipher.data.length = QUOTE_512_BYTES;
sym_op->auth.data.offset = 0;
sym_op->auth.data.length = QUOTE_512_BYTES;
- sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(ut_params->op,
- uint8_t *, IV_OFFSET);
- sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(ut_params->op,
- IV_OFFSET);
+ sym_op->cipher.iv.offset = IV_OFFSET;
sym_op->cipher.iv.length = CIPHER_IV_LENGTH_AES_CBC;
- rte_memcpy(sym_op->cipher.iv.data, iv, CIPHER_IV_LENGTH_AES_CBC);
+ rte_memcpy(rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET),
+ iv, CIPHER_IV_LENGTH_AES_CBC);
sym_op->cipher.data.offset = 0;
sym_op->cipher.data.length = QUOTE_512_BYTES;
sym_op->m_src = ut_params->ibuf;
/* iv */
- sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(ut_params->op,
- uint8_t *, IV_OFFSET);
- sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(ut_params->op,
- IV_OFFSET);
+ sym_op->cipher.iv.offset = IV_OFFSET;
sym_op->cipher.iv.length = iv_len;
- rte_memcpy(sym_op->cipher.iv.data, iv, iv_len);
+ rte_memcpy(rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET),
+ iv, iv_len);
sym_op->cipher.data.length = cipher_len;
sym_op->cipher.data.offset = cipher_offset;
return 0;
sym_op->m_dst = ut_params->obuf;
/* iv */
- sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(ut_params->op,
- uint8_t *, IV_OFFSET);
- sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(ut_params->op,
- IV_OFFSET);
+ sym_op->cipher.iv.offset = IV_OFFSET;
sym_op->cipher.iv.length = iv_len;
- rte_memcpy(sym_op->cipher.iv.data, iv, iv_len);
+ rte_memcpy(rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET),
+ iv, iv_len);
sym_op->cipher.data.length = cipher_len;
sym_op->cipher.data.offset = cipher_offset;
return 0;
TEST_HEXDUMP(stdout, "aad:", sym_op->auth.aad.data, aad_len);
/* iv */
- sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(ut_params->op,
- uint8_t *, IV_OFFSET);
- sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(ut_params->op,
- IV_OFFSET);
+ sym_op->cipher.iv.offset = IV_OFFSET;
sym_op->cipher.iv.length = iv_len;
- rte_memcpy(sym_op->cipher.iv.data, iv, iv_len);
+ rte_memcpy(rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET),
+ iv, iv_len);
sym_op->cipher.data.length = cipher_len;
sym_op->cipher.data.offset = cipher_offset + auth_offset;
sym_op->auth.data.length = auth_len;
TEST_HEXDUMP(stdout, "aad:", sym_op->auth.aad.data, aad_len);
/* iv */
- sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(ut_params->op,
- uint8_t *, IV_OFFSET);
- sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(ut_params->op,
- IV_OFFSET);
+ sym_op->cipher.iv.offset = IV_OFFSET;
sym_op->cipher.iv.length = iv_len;
- rte_memcpy(sym_op->cipher.iv.data, iv, iv_len);
+ rte_memcpy(rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET),
+ iv, iv_len);
sym_op->cipher.data.length = cipher_len;
sym_op->cipher.data.offset = cipher_offset + auth_offset;
sym_op->auth.data.length = auth_len;
sym_op->auth.aad.data, aad_len);
/* iv */
- sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(ut_params->op,
- uint8_t *, IV_OFFSET);
- sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(ut_params->op,
- IV_OFFSET);
+ sym_op->cipher.iv.offset = IV_OFFSET;
sym_op->cipher.iv.length = iv_len;
- rte_memcpy(sym_op->cipher.iv.data, iv, iv_len);
-
+ rte_memcpy(rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET),
+ iv, iv_len);
sym_op->cipher.data.length = cipher_len;
sym_op->cipher.data.offset = auth_offset + cipher_offset;
sym_op->auth.aad.length);
/* Append IV at the end of the crypto operation*/
- sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(ut_params->op,
- uint8_t *, IV_OFFSET);
- sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(ut_params->op,
- IV_OFFSET);
+ uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ut_params->op,
+ uint8_t *, IV_OFFSET);
+ sym_op->cipher.iv.offset = IV_OFFSET;
sym_op->cipher.iv.length = tdata->iv.len;
- rte_memcpy(sym_op->cipher.iv.data, tdata->iv.data, tdata->iv.len);
- TEST_HEXDUMP(stdout, "iv:", sym_op->cipher.iv.data,
+ rte_memcpy(iv_ptr, tdata->iv.data, tdata->iv.len);
+ TEST_HEXDUMP(stdout, "iv:", iv_ptr,
sym_op->cipher.iv.length);
/* Append plaintext/ciphertext */
sym_op->auth.digest.length);
}
- sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(ut_params->op,
- uint8_t *, IV_OFFSET);
- sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(ut_params->op,
- IV_OFFSET);
+ uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ut_params->op,
+ uint8_t *, IV_OFFSET);
+
+ sym_op->cipher.iv.offset = IV_OFFSET;
sym_op->cipher.iv.length = tdata->iv.len;
- rte_memcpy(sym_op->cipher.iv.data, tdata->iv.data, tdata->iv.len);
+ rte_memcpy(iv_ptr, tdata->iv.data, tdata->iv.len);
- TEST_HEXDUMP(stdout, "iv:", sym_op->cipher.iv.data, tdata->iv.len);
+ TEST_HEXDUMP(stdout, "iv:", iv_ptr, tdata->iv.len);
sym_op->cipher.data.length = 0;
sym_op->cipher.data.offset = 0;
sym_op->auth.digest.data,
sym_op->auth.digest.length);
- sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(ut_params->op,
- uint8_t *, IV_OFFSET);
- sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(ut_params->op,
- IV_OFFSET);
+ sym_op->cipher.iv.offset = IV_OFFSET;
sym_op->cipher.iv.length = reference->iv.len;
- rte_memcpy(sym_op->cipher.iv.data, reference->iv.data, reference->iv.len);
+ rte_memcpy(rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET),
+ reference->iv.data, reference->iv.len);
sym_op->cipher.data.length = 0;
sym_op->cipher.data.offset = 0;
sym_op->auth.digest.data,
sym_op->auth.digest.length);
- sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(ut_params->op,
- uint8_t *, IV_OFFSET);
- sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(ut_params->op,
- IV_OFFSET);
+ sym_op->cipher.iv.offset = IV_OFFSET;
sym_op->cipher.iv.length = reference->iv.len;
- rte_memcpy(sym_op->cipher.iv.data, reference->iv.data, reference->iv.len);
+ rte_memcpy(rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET),
+ reference->iv.data, reference->iv.len);
sym_op->cipher.data.length = reference->ciphertext.len;
sym_op->cipher.data.offset = 0;
sym_op->auth.digest.length);
}
- sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(ut_params->op,
- uint8_t *, IV_OFFSET);
- sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(ut_params->op,
- IV_OFFSET);
+ uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ut_params->op,
+ uint8_t *, IV_OFFSET);
+ sym_op->cipher.iv.offset = IV_OFFSET;
sym_op->cipher.iv.length = iv_len;
- rte_memcpy(sym_op->cipher.iv.data, tdata->iv.data, iv_len);
+ rte_memcpy(iv_ptr, tdata->iv.data, iv_len);
sym_op->auth.aad.data = (uint8_t *)rte_pktmbuf_prepend(
ut_params->ibuf, aad_len);
memset(sym_op->auth.aad.data, 0, aad_len);
rte_memcpy(sym_op->auth.aad.data, tdata->aad.data, aad_len);
- TEST_HEXDUMP(stdout, "iv:", sym_op->cipher.iv.data, iv_len);
+ TEST_HEXDUMP(stdout, "iv:", iv_ptr, iv_len);
TEST_HEXDUMP(stdout, "aad:",
sym_op->auth.aad.data, aad_len);
sym_op->cipher.data.offset = 0;
sym_op->cipher.data.length = tdata->ciphertext.len;
- sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(op,
- uint8_t *, IV_OFFSET);
- sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(op,
- IV_OFFSET);
+ sym_op->cipher.iv.offset = IV_OFFSET;
sym_op->cipher.iv.length = tdata->iv.len;
- rte_memcpy(sym_op->cipher.iv.data, tdata->iv.data,
+ rte_memcpy(rte_crypto_op_ctod_offset(op, uint8_t *, IV_OFFSET),
+ tdata->iv.data,
tdata->iv.len);
}
op->sym->auth.data.offset = 0;
op->sym->auth.data.length = data_params[0].length;
-
- op->sym->cipher.iv.data = rte_crypto_op_ctod_offset(op,
- uint8_t *, IV_OFFSET);
- op->sym->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(op,
- IV_OFFSET);
+ op->sym->cipher.iv.offset = IV_OFFSET;
op->sym->cipher.iv.length = CIPHER_IV_LENGTH_AES_CBC;
- rte_memcpy(op->sym->cipher.iv.data, aes_cbc_128_iv,
- CIPHER_IV_LENGTH_AES_CBC);
+ rte_memcpy(rte_crypto_op_ctod_offset(op, uint8_t *, IV_OFFSET),
+ aes_cbc_128_iv, CIPHER_IV_LENGTH_AES_CBC);
op->sym->cipher.data.offset = 0;
op->sym->cipher.data.length = data_params[0].length;
/* Cipher Parameters */
- op->sym->cipher.iv.data = rte_crypto_op_ctod_offset(op,
- uint8_t *, IV_OFFSET);
- op->sym->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(op,
- IV_OFFSET);
+ op->sym->cipher.iv.offset = IV_OFFSET;
op->sym->cipher.iv.length = AES_CIPHER_IV_LENGTH;
-
- rte_memcpy(op->sym->cipher.iv.data, aes_iv, AES_CIPHER_IV_LENGTH);
+ rte_memcpy(rte_crypto_op_ctod_offset(op, uint8_t *, IV_OFFSET),
+ aes_iv, AES_CIPHER_IV_LENGTH);
op->sym->cipher.data.offset = 0;
op->sym->cipher.data.length = data_len;
op->sym->auth.aad.length = AES_GCM_AAD_LENGTH;
/* Cipher Parameters */
- op->sym->cipher.iv.data = rte_crypto_op_ctod_offset(op,
- uint8_t *, IV_OFFSET);
- op->sym->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(op,
- IV_OFFSET);
+ op->sym->cipher.iv.offset = IV_OFFSET;
op->sym->cipher.iv.length = AES_CIPHER_IV_LENGTH;
- rte_memcpy(op->sym->cipher.iv.data, aes_iv, AES_CIPHER_IV_LENGTH);
+ rte_memcpy(rte_crypto_op_ctod_offset(op, uint8_t *, IV_OFFSET),
+ aes_iv, AES_CIPHER_IV_LENGTH);
/* Data lengths/offsets Parameters */
op->sym->auth.data.offset = 0;
op->sym->auth.aad.length = SNOW3G_CIPHER_IV_LENGTH;
/* Cipher Parameters */
- op->sym->cipher.iv.data = iv_ptr;
- op->sym->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(op,
- IV_OFFSET);
+ op->sym->cipher.iv.offset = IV_OFFSET;
op->sym->cipher.iv.length = SNOW3G_CIPHER_IV_LENGTH;
/* Data lengths/offsets Parameters */
}
/* Cipher Parameters */
- op->sym->cipher.iv.data = rte_crypto_op_ctod_offset(op,
- uint8_t *, IV_OFFSET);
- op->sym->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(op,
- IV_OFFSET);
+ op->sym->cipher.iv.offset = IV_OFFSET;
op->sym->cipher.iv.length = SNOW3G_CIPHER_IV_LENGTH;
- rte_memcpy(op->sym->cipher.iv.data, snow3g_iv, SNOW3G_CIPHER_IV_LENGTH);
+ rte_memcpy(rte_crypto_op_ctod_offset(op, uint8_t *, IV_OFFSET),
+ snow3g_iv, SNOW3G_CIPHER_IV_LENGTH);
op->sym->cipher.data.offset = 0;
op->sym->cipher.data.length = data_len << 3;
op->sym->auth.digest.length = digest_len;
/* Cipher Parameters */
- op->sym->cipher.iv.data = rte_crypto_op_ctod_offset(op,
- uint8_t *, IV_OFFSET);
- op->sym->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(op,
- IV_OFFSET);
+ op->sym->cipher.iv.offset = IV_OFFSET;
op->sym->cipher.iv.length = TRIPLE_DES_CIPHER_IV_LENGTH;
- rte_memcpy(op->sym->cipher.iv.data, triple_des_iv,
- TRIPLE_DES_CIPHER_IV_LENGTH);
+ rte_memcpy(rte_crypto_op_ctod_offset(op, uint8_t *, IV_OFFSET),
+ triple_des_iv, TRIPLE_DES_CIPHER_IV_LENGTH);
/* Data lengths/offsets Parameters */
op->sym->auth.data.offset = 0;
struct crypto_params *m_hlp,
struct perf_test_params *params)
{
+ uint8_t *iv_ptr = rte_crypto_op_ctod_offset(op,
+ uint8_t *, IV_OFFSET);
+
if (rte_crypto_op_attach_sym_session(op, sess) != 0) {
rte_crypto_op_free(op);
return NULL;
rte_memcpy(op->sym->auth.aad.data, params->symmetric_op->aad_data,
params->symmetric_op->aad_len);
- op->sym->cipher.iv.data = rte_crypto_op_ctod_offset(op,
- uint8_t *, IV_OFFSET);
- op->sym->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(op,
- IV_OFFSET);
- rte_memcpy(op->sym->cipher.iv.data, params->symmetric_op->iv_data,
+ op->sym->cipher.iv.offset = IV_OFFSET;
+ rte_memcpy(iv_ptr, params->symmetric_op->iv_data,
params->symmetric_op->iv_len);
if (params->symmetric_op->iv_len == 12)
- op->sym->cipher.iv.data[15] = 1;
+ iv_ptr[15] = 1;
op->sym->cipher.iv.length = params->symmetric_op->iv_len;