sym_op->auth.digest.length = options->auth_digest_sz;
sym_op->auth.aad.phys_addr = test_vector->aad.phys_addr;
sym_op->auth.aad.data = test_vector->aad.data;
- sym_op->auth.aad.length = options->auth_aad_sz;
}
sym_op->auth.digest.length = options->auth_digest_sz;
sym_op->auth.aad.phys_addr = test_vector->aad.phys_addr;
sym_op->auth.aad.data = test_vector->aad.data;
- sym_op->auth.aad.length = options->auth_aad_sz;
}
if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
sym_op->auth.aad.data = rte_pktmbuf_mtod(bufs_in[i], uint8_t *);
sym_op->auth.aad.phys_addr = rte_pktmbuf_mtophys(bufs_in[i]);
- sym_op->auth.aad.length = options->auth_aad_sz;
/* authentication parameters */
if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
struct {
uint8_t *data;
phys_addr_t phys_addr;
- uint16_t length;
} aad; /**< Additional authentication parameters */
} auth;
}
* Replaced pointer and physical address of IV with offset from the start
of the crypto operation.
* Moved length and offset of cipher IV to ``rte_crypto_cipher_xform``.
+ * Removed Additional Authentication Data (AAD) length.
* **Reorganized the crypto operation structure.**
* **Reorganized the ``rte_crypto_sym_auth_xform`` structure.**
* Added authentication IV length and offset parameters.
+ * Changed field size of AAD length from uint32_t to uint16_t.
Shared Library Versions
return -EINVAL;
}
+ sess->aad_length = auth_xform->auth.add_auth_data_length;
+
return 0;
}
aesni_gcm_enc[session->key].init(&session->gdata,
iv_ptr,
sym_op->auth.aad.data,
- (uint64_t)sym_op->auth.aad.length);
+ (uint64_t)session->aad_length);
aesni_gcm_enc[session->key].update(&session->gdata, dst, src,
(uint64_t)part_len);
aesni_gcm_dec[session->key].init(&session->gdata,
iv_ptr,
sym_op->auth.aad.data,
- (uint64_t)sym_op->auth.aad.length);
+ (uint64_t)session->aad_length);
aesni_gcm_dec[session->key].update(&session->gdata, dst, src,
(uint64_t)part_len);
/*-
* BSD LICENSE
*
- * Copyright(c) 2016 Intel Corporation. All rights reserved.
+ * Copyright(c) 2016-2017 Intel Corporation. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
uint16_t offset;
} iv;
/**< IV parameters */
+ uint16_t aad_length;
+ /**< AAD length */
enum aesni_gcm_operation op;
/**< GCM operation type */
enum aesni_gcm_key key;
return -EINVAL;
}
+ sess->auth.aad_length = xform->auth.add_auth_data_length;
+
return 0;
}
sess->iv.offset);
ivlen = sess->iv.length;
aad = op->sym->auth.aad.data;
- aadlen = op->sym->auth.aad.length;
+ aadlen = sess->auth.aad_length;
tag = op->sym->auth.digest.data;
if (tag == NULL)
/*-
* BSD LICENSE
*
- * Copyright(c) 2016 Intel Corporation. All rights reserved.
+ * Copyright(c) 2016-2017 Intel Corporation. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
/**< pointer to EVP context structure */
} hmac;
};
+
+ uint16_t aad_length;
+ /**< AAD length */
} auth;
} __rte_cache_aligned;
ICP_QAT_HW_GALOIS_128_STATE1_SZ +
ICP_QAT_HW_GALOIS_H_SZ);
*aad_len = rte_bswap32(add_auth_data_length);
+ cdesc->aad_len = add_auth_data_length;
break;
case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
cipher_param->cipher_length = 0;
cipher_param->cipher_offset = 0;
auth_param->u1.aad_adr = 0;
- auth_param->auth_len = op->sym->auth.aad.length;
+ auth_param->auth_len = ctx->aad_len;
auth_param->auth_off = op->sym->auth.data.offset;
auth_param->u2.aad_sz = 0;
}
rte_hexdump(stdout, "digest:", op->sym->auth.digest.data,
op->sym->auth.digest.length);
rte_hexdump(stdout, "aad:", op->sym->auth.aad.data,
- op->sym->auth.aad.length);
+ ctx->aad_len);
}
#endif
return 0;
sym_cop->auth.aad.data = aad;
sym_cop->auth.aad.phys_addr = rte_pktmbuf_mtophys_offset(m,
aad - rte_pktmbuf_mtod(m, uint8_t *));
- sym_cop->auth.aad.length = 8;
break;
default:
RTE_LOG(ERR, IPSEC_ESP, "unsupported auth algorithm %u\n",
sym_cop->auth.aad.data = aad;
sym_cop->auth.aad.phys_addr = rte_pktmbuf_mtophys_offset(m,
aad - rte_pktmbuf_mtod(m, uint8_t *));
- sym_cop->auth.aad.length = 8;
break;
default:
RTE_LOG(ERR, IPSEC_ESP, "unsupported auth algorithm %u\n",
if (cparams->aad.length) {
op->sym->auth.aad.data = cparams->aad.data;
op->sym->auth.aad.phys_addr = cparams->aad.phys_addr;
- op->sym->auth.aad.length = cparams->aad.length;
} else {
op->sym->auth.aad.data = NULL;
op->sym->auth.aad.phys_addr = 0;
- op->sym->auth.aad.length = 0;
}
}
options->auth_xform.auth.digest_length;
if (options->auth_xform.auth.add_auth_data_length) {
port_cparams[i].aad.data = options->aad.data;
- port_cparams[i].aad.length =
- options->auth_xform.auth.add_auth_data_length;
port_cparams[i].aad.phys_addr = options->aad.phys_addr;
if (!options->aad_param)
generate_random_key(port_cparams[i].aad.data,
* the result shall be truncated.
*/
- uint32_t add_auth_data_length;
+ uint16_t add_auth_data_length;
/**< The length of the additional authenticated data (AAD) in bytes.
* The maximum permitted value is 65535 (2^16 - 1) bytes, unless
* otherwise specified below.
* operation, this field is used to pass plaintext.
*/
phys_addr_t phys_addr; /**< physical address */
- uint16_t length;
- /**< Length of additional authenticated data (AAD)
- * in bytes
- */
} aad;
/**< Additional authentication parameters */
} auth;
static int
create_gcm_session(uint8_t dev_id, enum rte_crypto_cipher_operation op,
const uint8_t *key, const uint8_t key_len,
- const uint8_t aad_len, const uint8_t auth_len,
+ const uint16_t aad_len, const uint8_t auth_len,
uint8_t iv_len,
enum rte_crypto_auth_operation auth_op)
{
TEST_ASSERT_NOT_NULL(sym_op->auth.aad.data,
"no room to append aad");
- sym_op->auth.aad.length = tdata->aad.len;
sym_op->auth.aad.phys_addr =
rte_pktmbuf_mtophys(ut_params->ibuf);
memcpy(sym_op->auth.aad.data, tdata->aad.data, tdata->aad.len);
TEST_HEXDUMP(stdout, "aad:", sym_op->auth.aad.data,
- sym_op->auth.aad.length);
+ tdata->aad.len);
/* Append IV at the end of the crypto operation*/
uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ut_params->op,
TEST_ASSERT_NOT_NULL(sym_op->auth.aad.data,
"no room to append aad");
- sym_op->auth.aad.length = tdata->aad.len;
sym_op->auth.aad.phys_addr =
rte_pktmbuf_mtophys(ut_params->ibuf);
memcpy(sym_op->auth.aad.data, tdata->aad.data, tdata->aad.len);
ut_params->auth_xform.auth.algo = RTE_CRYPTO_AUTH_AES_GMAC;
ut_params->auth_xform.auth.op = auth_op;
ut_params->auth_xform.auth.digest_length = tdata->gmac_tag.len;
- ut_params->auth_xform.auth.add_auth_data_length = 0;
+ ut_params->auth_xform.auth.add_auth_data_length = tdata->aad.len;
ut_params->auth_xform.auth.key.length = 0;
ut_params->auth_xform.auth.key.data = NULL;
TEST_HEXDUMP(stdout, "AAD:", sym_op->auth.aad.data, reference->aad.len);
sym_op->auth.aad.phys_addr = rte_pktmbuf_mtophys(ut_params->ibuf);
- sym_op->auth.aad.length = reference->aad.len;
/* digest */
sym_op->auth.digest.data = (uint8_t *)rte_pktmbuf_append(
"no room to prepend aad");
sym_op->auth.aad.phys_addr = rte_pktmbuf_mtophys(
ut_params->ibuf);
- sym_op->auth.aad.length = aad_len;
memset(sym_op->auth.aad.data, 0, aad_len);
rte_memcpy(sym_op->auth.aad.data, tdata->aad.data, aad_len);
#define AES_CIPHER_IV_LENGTH 16
#define TRIPLE_DES_CIPHER_IV_LENGTH 8
+#define AES_GCM_AAD_LENGTH 16
#define PERF_NUM_OPS_INFLIGHT (128)
#define DEFAULT_NUM_REQS_TO_SUBMIT (10000000)
struct symmetric_op {
const uint8_t *aad_data;
- uint32_t aad_len;
const uint8_t *p_data;
uint32_t p_len;
const uint8_t *iv_data;
uint16_t iv_len;
+ uint16_t aad_len;
uint32_t digest_len;
};
break;
case RTE_CRYPTO_AUTH_AES_GCM:
auth_xform.auth.key.data = NULL;
+ auth_xform.auth.add_auth_data_length = AES_GCM_AAD_LENGTH;
break;
default:
return NULL;
}
}
-#define AES_GCM_AAD_LENGTH 16
-
static struct rte_mbuf *
test_perf_create_pktmbuf(struct rte_mempool *mpool, unsigned buf_sz)
{
op->sym->auth.digest.phys_addr = 0;
op->sym->auth.digest.length = 0;
op->sym->auth.aad.data = NULL;
- op->sym->auth.aad.length = 0;
op->sym->auth.data.offset = 0;
op->sym->auth.data.length = 0;
} else {
rte_pktmbuf_mtophys_offset(m, data_len);
op->sym->auth.digest.length = digest_len;
op->sym->auth.aad.data = aes_gcm_aad;
- op->sym->auth.aad.length = AES_GCM_AAD_LENGTH;
/* Copy IV at the end of the crypto operation */
rte_memcpy(rte_crypto_op_ctod_offset(op, uint8_t *, IV_OFFSET),
rte_memcpy(rte_crypto_op_ctod_offset(op, uint8_t *, IV_OFFSET),
snow3g_iv, SNOW3G_CIPHER_IV_LENGTH);
+ /* Cipher Parameters */
op->sym->cipher.data.offset = 0;
op->sym->cipher.data.length = data_len << 3;
+ rte_memcpy(rte_crypto_op_ctod_offset(op, uint8_t *, IV_OFFSET),
+ snow3g_iv,
+ SNOW3G_CIPHER_IV_LENGTH);
+
op->sym->m_src = m;
return op;
auth_xform.auth.op = pparams->session_attrs->auth;
auth_xform.auth.algo = pparams->session_attrs->auth_algorithm;
+ auth_xform.auth.add_auth_data_length = pparams->session_attrs->aad_len;
auth_xform.auth.digest_length = pparams->session_attrs->digest_len;
auth_xform.auth.key.length = pparams->session_attrs->key_auth_len;
op->sym->auth.digest.data = m_hlp->digest;
op->sym->auth.digest.phys_addr = rte_pktmbuf_mtophys_offset(
m,
- params->symmetric_op->aad_len +
+ params->session_attrs->aad_len +
params->symmetric_op->p_len);
op->sym->auth.digest.length = params->symmetric_op->t_len;
op->sym->auth.aad.data = m_hlp->aad;
- op->sym->auth.aad.length = params->symmetric_op->aad_len;
op->sym->auth.aad.phys_addr = rte_pktmbuf_mtophys(m);
rte_memcpy(op->sym->auth.aad.data, params->symmetric_op->aad_data,
- params->symmetric_op->aad_len);
+ params->session_attrs->aad_len);
rte_memcpy(iv_ptr, params->session_attrs->iv_data,
params->session_attrs->iv_len);
iv_ptr[15] = 1;
op->sym->auth.data.offset =
- params->symmetric_op->aad_len;
+ params->session_attrs->aad_len;
op->sym->auth.data.length = params->symmetric_op->p_len;
op->sym->cipher.data.offset =
- params->symmetric_op->aad_len;
+ params->session_attrs->aad_len;
op->sym->cipher.data.length = params->symmetric_op->p_len;
op->sym->m_src = m;
unsigned buf_sz, struct crypto_params *m_hlp)
{
struct rte_mbuf *m = rte_pktmbuf_alloc(mpool);
- uint16_t aad_len = params->symmetric_op->aad_len;
+ uint16_t aad_len = params->session_attrs->aad_len;
uint16_t digest_size = params->symmetric_op->t_len;
char *p;
TEST_ASSERT_BUFFERS_ARE_EQUAL(
pparams->symmetric_op->c_data,
pkt +
- pparams->symmetric_op->aad_len,
+ pparams->session_attrs->aad_len,
pparams->symmetric_op->c_len,
"GCM Ciphertext data not as expected");
TEST_ASSERT_BUFFERS_ARE_EQUAL(
pparams->symmetric_op->t_data,
pkt +
- pparams->symmetric_op->aad_len +
+ pparams->session_attrs->aad_len +
pparams->symmetric_op->c_len,
pparams->symmetric_op->t_len,
"GCM MAC data not as expected");
RTE_CRYPTO_AUTH_OP_GENERATE;
session_attrs[i].key_auth_data = NULL;
session_attrs[i].key_auth_len = 0;
+ session_attrs[i].aad_len = gcm_test->aad.len;
session_attrs[i].digest_len =
gcm_test->auth_tag.len;
session_attrs[i].iv_len = gcm_test->iv.len;
session_attrs[i].iv_data = gcm_test->iv.data;
ops_set[i].aad_data = gcm_test->aad.data;
- ops_set[i].aad_len = gcm_test->aad.len;
ops_set[i].p_data = gcm_test->plaintext.data;
ops_set[i].p_len = buf_lengths[i];
ops_set[i].c_data = gcm_test->ciphertext.data;