/*-
* BSD LICENSE
*
- * Copyright(c) 2015-2016 Intel Corporation. All rights reserved.
+ * Copyright(c) 2015-2017 Intel Corporation. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
struct crypto_params {
uint8_t *aad;
- uint8_t *iv;
uint8_t *digest;
};
RTE_CRYPTO_OP_TYPE_SYMMETRIC,
NUM_MBUFS, MBUF_CACHE_SIZE,
DEFAULT_NUM_XFORMS *
- sizeof(struct rte_crypto_sym_xform),
+ sizeof(struct rte_crypto_sym_xform) +
+ MAXIMUM_IV_LENGTH,
rte_socket_id());
if (ts_params->op_mpool == NULL) {
RTE_LOG(ERR, USER1, "Can't create CRYPTO_OP_POOL\n");
data_params[0].length);
op->sym->auth.digest.length = DIGEST_BYTE_LENGTH_SHA256;
- op->sym->auth.data.offset = CIPHER_IV_LENGTH_AES_CBC;
+ op->sym->auth.data.offset = 0;
op->sym->auth.data.length = data_params[0].length;
- op->sym->cipher.iv.data = (uint8_t *)rte_pktmbuf_prepend(m,
- CIPHER_IV_LENGTH_AES_CBC);
- op->sym->cipher.iv.phys_addr = rte_pktmbuf_mtophys(m);
+ op->sym->cipher.iv.data = rte_crypto_op_ctod_offset(op,
+ uint8_t *, IV_OFFSET);
+ op->sym->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(op,
+ IV_OFFSET);
op->sym->cipher.iv.length = CIPHER_IV_LENGTH_AES_CBC;
rte_memcpy(op->sym->cipher.iv.data, aes_cbc_128_iv,
CIPHER_IV_LENGTH_AES_CBC);
- op->sym->cipher.data.offset = CIPHER_IV_LENGTH_AES_CBC;
+ op->sym->cipher.data.offset = 0;
op->sym->cipher.data.length = data_params[0].length;
op->sym->m_src = m;
op->sym->auth.data.length = 0;
} else {
op->sym->auth.digest.data = rte_pktmbuf_mtod_offset(m,
- uint8_t *, AES_CIPHER_IV_LENGTH + data_len);
+ uint8_t *, data_len);
op->sym->auth.digest.phys_addr = rte_pktmbuf_mtophys_offset(m,
- AES_CIPHER_IV_LENGTH + data_len);
+ data_len);
op->sym->auth.digest.length = digest_len;
- op->sym->auth.data.offset = AES_CIPHER_IV_LENGTH;
+ op->sym->auth.data.offset = 0;
op->sym->auth.data.length = data_len;
}
/* Cipher Parameters */
- op->sym->cipher.iv.data = rte_pktmbuf_mtod(m, uint8_t *);
- op->sym->cipher.iv.phys_addr = rte_pktmbuf_mtophys(m);
+ op->sym->cipher.iv.data = rte_crypto_op_ctod_offset(op,
+ uint8_t *, IV_OFFSET);
+ op->sym->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(op,
+ IV_OFFSET);
op->sym->cipher.iv.length = AES_CIPHER_IV_LENGTH;
rte_memcpy(op->sym->cipher.iv.data, aes_iv, AES_CIPHER_IV_LENGTH);
- op->sym->cipher.data.offset = AES_CIPHER_IV_LENGTH;
+ op->sym->cipher.data.offset = 0;
op->sym->cipher.data.length = data_len;
op->sym->m_src = m;
op->sym->auth.aad.length = AES_GCM_AAD_LENGTH;
/* Cipher Parameters */
- op->sym->cipher.iv.data = aes_iv;
+ op->sym->cipher.iv.data = rte_crypto_op_ctod_offset(op,
+ uint8_t *, IV_OFFSET);
+ op->sym->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(op,
+ IV_OFFSET);
op->sym->cipher.iv.length = AES_CIPHER_IV_LENGTH;
+ rte_memcpy(op->sym->cipher.iv.data, aes_iv, AES_CIPHER_IV_LENGTH);
/* Data lengths/offsets Parameters */
op->sym->auth.data.offset = 0;
struct rte_cryptodev_sym_session *sess, unsigned data_len,
unsigned digest_len)
{
+ uint8_t *iv_ptr = rte_crypto_op_ctod_offset(op,
+ uint8_t *, IV_OFFSET);
+
if (rte_crypto_op_attach_sym_session(op, sess) != 0) {
rte_crypto_op_free(op);
return NULL;
}
+ rte_memcpy(iv_ptr, snow3g_iv, SNOW3G_CIPHER_IV_LENGTH);
+
/* Authentication Parameters */
op->sym->auth.digest.data = (uint8_t *)m->buf_addr +
(m->data_off + data_len);
op->sym->auth.digest.phys_addr =
rte_pktmbuf_mtophys_offset(m, data_len);
op->sym->auth.digest.length = digest_len;
- op->sym->auth.aad.data = snow3g_iv;
+ op->sym->auth.aad.data = iv_ptr;
+ op->sym->auth.aad.phys_addr = rte_crypto_op_ctophys_offset(op,
+ IV_OFFSET);
op->sym->auth.aad.length = SNOW3G_CIPHER_IV_LENGTH;
/* Cipher Parameters */
- op->sym->cipher.iv.data = snow3g_iv;
+ op->sym->cipher.iv.data = iv_ptr;
+ op->sym->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(op,
+ IV_OFFSET);
op->sym->cipher.iv.length = SNOW3G_CIPHER_IV_LENGTH;
/* Data lengths/offsets Parameters */
}
/* Cipher Parameters */
- op->sym->cipher.iv.data = rte_pktmbuf_mtod(m, uint8_t *);
+ op->sym->cipher.iv.data = rte_crypto_op_ctod_offset(op,
+ uint8_t *, IV_OFFSET);
+ op->sym->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(op,
+ IV_OFFSET);
op->sym->cipher.iv.length = SNOW3G_CIPHER_IV_LENGTH;
rte_memcpy(op->sym->cipher.iv.data, snow3g_iv, SNOW3G_CIPHER_IV_LENGTH);
- op->sym->cipher.iv.phys_addr = rte_pktmbuf_mtophys(m);
- op->sym->cipher.data.offset = SNOW3G_CIPHER_IV_LENGTH;
+ op->sym->cipher.data.offset = 0;
op->sym->cipher.data.length = data_len << 3;
op->sym->m_src = m;
rte_pktmbuf_mtophys_offset(m, data_len +
SNOW3G_CIPHER_IV_LENGTH);
op->sym->auth.digest.length = digest_len;
- op->sym->auth.aad.data = rte_pktmbuf_mtod(m, uint8_t *);
+ op->sym->auth.aad.data = rte_crypto_op_ctod_offset(op,
+ uint8_t *, IV_OFFSET);
+ op->sym->auth.aad.phys_addr = rte_crypto_op_ctophys_offset(op,
+ IV_OFFSET);
op->sym->auth.aad.length = SNOW3G_CIPHER_IV_LENGTH;
rte_memcpy(op->sym->auth.aad.data, snow3g_iv,
SNOW3G_CIPHER_IV_LENGTH);
- op->sym->auth.aad.phys_addr = rte_pktmbuf_mtophys(m);
/* Data lengths/offsets Parameters */
- op->sym->auth.data.offset = SNOW3G_CIPHER_IV_LENGTH;
+ op->sym->auth.data.offset = 0;
op->sym->auth.data.length = data_len << 3;
op->sym->m_src = m;
op->sym->auth.digest.length = digest_len;
/* Cipher Parameters */
- op->sym->cipher.iv.data = triple_des_iv;
+ op->sym->cipher.iv.data = rte_crypto_op_ctod_offset(op,
+ uint8_t *, IV_OFFSET);
+ op->sym->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(op,
+ IV_OFFSET);
op->sym->cipher.iv.length = TRIPLE_DES_CIPHER_IV_LENGTH;
+ rte_memcpy(op->sym->cipher.iv.data, triple_des_iv,
+ TRIPLE_DES_CIPHER_IV_LENGTH);
/* Data lengths/offsets Parameters */
op->sym->auth.data.offset = 0;
return -1;
}
- /* Make room for Digest and IV in mbuf */
+ /* Make room for Digest in mbuf */
if (pparams->chain != CIPHER_ONLY)
rte_pktmbuf_append(mbufs[i], digest_length);
- rte_pktmbuf_prepend(mbufs[i], AES_CIPHER_IV_LENGTH);
}
/* Generate a burst of crypto operations */
for (i = 0; i < (pparams->burst_size * NUM_MBUF_SETS); i++) {
/*
- * Buffer size + iv/aad len is allocated, for perf tests they
+ * Buffer size is allocated, for perf tests they
* are equal + digest len.
*/
mbufs[i] = test_perf_create_pktmbuf(
ts_params->mbuf_mp,
- pparams->buf_size + SNOW3G_CIPHER_IV_LENGTH +
+ pparams->buf_size +
digest_length);
if (mbufs[i] == NULL) {
return NULL;
}
- uint16_t iv_pad_len = ALIGN_POW2_ROUNDUP(params->symmetric_op->iv_len,
- 16);
-
op->sym->auth.digest.data = m_hlp->digest;
op->sym->auth.digest.phys_addr = rte_pktmbuf_mtophys_offset(
m,
params->symmetric_op->aad_len +
- iv_pad_len +
params->symmetric_op->p_len);
op->sym->auth.digest.length = params->symmetric_op->t_len;
op->sym->auth.aad.data = m_hlp->aad;
op->sym->auth.aad.length = params->symmetric_op->aad_len;
- op->sym->auth.aad.phys_addr = rte_pktmbuf_mtophys_offset(
- m,
- iv_pad_len);
+ op->sym->auth.aad.phys_addr = rte_pktmbuf_mtophys(m);
rte_memcpy(op->sym->auth.aad.data, params->symmetric_op->aad_data,
params->symmetric_op->aad_len);
- op->sym->cipher.iv.data = m_hlp->iv;
+ op->sym->cipher.iv.data = rte_crypto_op_ctod_offset(op,
+ uint8_t *, IV_OFFSET);
+ op->sym->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(op,
+ IV_OFFSET);
rte_memcpy(op->sym->cipher.iv.data, params->symmetric_op->iv_data,
params->symmetric_op->iv_len);
if (params->symmetric_op->iv_len == 12)
op->sym->cipher.iv.length = params->symmetric_op->iv_len;
op->sym->auth.data.offset =
- iv_pad_len + params->symmetric_op->aad_len;
+ params->symmetric_op->aad_len;
op->sym->auth.data.length = params->symmetric_op->p_len;
op->sym->cipher.data.offset =
- iv_pad_len + params->symmetric_op->aad_len;
+ params->symmetric_op->aad_len;
op->sym->cipher.data.length = params->symmetric_op->p_len;
op->sym->m_src = m;
unsigned buf_sz, struct crypto_params *m_hlp)
{
struct rte_mbuf *m = rte_pktmbuf_alloc(mpool);
- uint16_t iv_pad_len =
- ALIGN_POW2_ROUNDUP(params->symmetric_op->iv_len, 16);
uint16_t aad_len = params->symmetric_op->aad_len;
uint16_t digest_size = params->symmetric_op->t_len;
char *p;
}
m_hlp->aad = (uint8_t *)p;
- p = rte_pktmbuf_append(m, iv_pad_len);
- if (p == NULL) {
- rte_pktmbuf_free(m);
- return NULL;
- }
- m_hlp->iv = (uint8_t *)p;
-
p = rte_pktmbuf_append(m, buf_sz);
if (p == NULL) {
rte_pktmbuf_free(m);
for (m = 0; m < burst_dequeued; m++) {
if (test_ops) {
- uint16_t iv_pad_len = ALIGN_POW2_ROUNDUP
- (pparams->symmetric_op->iv_len, 16);
uint8_t *pkt = rte_pktmbuf_mtod(
proc_ops[m]->sym->m_src,
uint8_t *);
TEST_ASSERT_BUFFERS_ARE_EQUAL(
pparams->symmetric_op->c_data,
- pkt + iv_pad_len +
+ pkt +
pparams->symmetric_op->aad_len,
pparams->symmetric_op->c_len,
"GCM Ciphertext data not as expected");
TEST_ASSERT_BUFFERS_ARE_EQUAL(
pparams->symmetric_op->t_data,
- pkt + iv_pad_len +
+ pkt +
pparams->symmetric_op->aad_len +
pparams->symmetric_op->c_len,
pparams->symmetric_op->t_len,