{
const struct rte_crypto_sym_xform *auth_xform;
const struct rte_crypto_sym_xform *cipher_xform;
+ uint16_t digest_length;
+ uint8_t key_length;
+ uint8_t *key;
- if (xform->next == NULL || xform->next->next != NULL) {
- GCM_LOG_ERR("Two and only two chained xform required");
- return -EINVAL;
- }
-
- if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
- xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH) {
- auth_xform = xform->next;
- cipher_xform = xform;
- } else if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH &&
- xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER) {
+ /* AES-GMAC */
+ if (xform->next == NULL) {
auth_xform = xform;
- cipher_xform = xform->next;
+ if (auth_xform->auth.algo != RTE_CRYPTO_AUTH_AES_GMAC) {
+ GCM_LOG_ERR("Only AES GMAC is supported as an "
+ "authentication only algorithm");
+ return -EINVAL;
+ }
+ /* Set IV parameters */
+ sess->iv.offset = auth_xform->auth.iv.offset;
+ sess->iv.length = auth_xform->auth.iv.length;
+
+ /* Select Crypto operation */
+ if (auth_xform->auth.op == RTE_CRYPTO_AUTH_OP_GENERATE)
+ sess->op = AESNI_GMAC_OP_GENERATE;
+ else
+ sess->op = AESNI_GMAC_OP_VERIFY;
+
+ key_length = auth_xform->auth.key.length;
+ key = auth_xform->auth.key.data;
+ /* AES-GCM */
} else {
- GCM_LOG_ERR("Cipher and auth xform required");
- return -EINVAL;
- }
+ if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
+ xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH) {
+ auth_xform = xform->next;
+ cipher_xform = xform;
+ } else if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH &&
+ xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER) {
+ auth_xform = xform;
+ cipher_xform = xform->next;
+ } else {
+ GCM_LOG_ERR("Cipher and auth xform required "
+ "when using AES GCM");
+ return -EINVAL;
+ }
- if (!(cipher_xform->cipher.algo == RTE_CRYPTO_CIPHER_AES_GCM &&
- (auth_xform->auth.algo == RTE_CRYPTO_AUTH_AES_GCM ||
- auth_xform->auth.algo == RTE_CRYPTO_AUTH_AES_GMAC))) {
- GCM_LOG_ERR("We only support AES GCM and AES GMAC");
- return -EINVAL;
+ if (!(cipher_xform->cipher.algo == RTE_CRYPTO_CIPHER_AES_GCM &&
+ (auth_xform->auth.algo == RTE_CRYPTO_AUTH_AES_GCM))) {
+ GCM_LOG_ERR("The only combined operation "
+ "supported is AES GCM");
+ return -EINVAL;
+ }
+
+ /* Set IV parameters */
+ sess->iv.offset = cipher_xform->cipher.iv.offset;
+ sess->iv.length = cipher_xform->cipher.iv.length;
+
+ /* Select Crypto operation */
+ if (cipher_xform->cipher.op == RTE_CRYPTO_CIPHER_OP_ENCRYPT &&
+ auth_xform->auth.op == RTE_CRYPTO_AUTH_OP_GENERATE)
+ sess->op = AESNI_GCM_OP_AUTHENTICATED_ENCRYPTION;
+ else if (cipher_xform->cipher.op == RTE_CRYPTO_CIPHER_OP_DECRYPT &&
+ auth_xform->auth.op == RTE_CRYPTO_AUTH_OP_VERIFY)
+ sess->op = AESNI_GCM_OP_AUTHENTICATED_DECRYPTION;
+ else {
+ GCM_LOG_ERR("Cipher/Auth operations: Encrypt/Generate or"
+ " Decrypt/Verify are valid only");
+ return -EINVAL;
+ }
+
+ key_length = cipher_xform->auth.key.length;
+ key = cipher_xform->auth.key.data;
+
+ sess->aad_length = auth_xform->auth.add_auth_data_length;
}
- /* Select Crypto operation */
- if (cipher_xform->cipher.op == RTE_CRYPTO_CIPHER_OP_ENCRYPT &&
- auth_xform->auth.op == RTE_CRYPTO_AUTH_OP_GENERATE)
- sess->op = AESNI_GCM_OP_AUTHENTICATED_ENCRYPTION;
- else if (cipher_xform->cipher.op == RTE_CRYPTO_CIPHER_OP_DECRYPT &&
- auth_xform->auth.op == RTE_CRYPTO_AUTH_OP_VERIFY)
- sess->op = AESNI_GCM_OP_AUTHENTICATED_DECRYPTION;
- else {
- GCM_LOG_ERR("Cipher/Auth operations: Encrypt/Generate or"
- " Decrypt/Verify are valid only");
+ /* IV check */
+ if (sess->iv.length != 16 && sess->iv.length != 12 &&
+ sess->iv.length != 0) {
+ GCM_LOG_ERR("Wrong IV length");
return -EINVAL;
}
+ digest_length = auth_xform->auth.digest_length;
+
/* Check key length and calculate GCM pre-compute. */
- switch (cipher_xform->cipher.key.length) {
+ switch (key_length) {
case 16:
- aesni_gcm128_pre(cipher_xform->cipher.key.data, &sess->gdata);
+ aesni_gcm128_pre(key, &sess->gdata);
sess->key = AESNI_GCM_KEY_128;
break;
case 32:
- aesni_gcm256_pre(cipher_xform->cipher.key.data, &sess->gdata);
+ aesni_gcm256_pre(key, &sess->gdata);
sess->key = AESNI_GCM_KEY_256;
break;
default:
- GCM_LOG_ERR("Unsupported cipher key length");
+ GCM_LOG_ERR("Unsupported cipher/auth key length");
return -EINVAL;
}
+ /* Digest check */
+ if (digest_length != 16 &&
+ digest_length != 12 &&
+ digest_length != 8) {
+ GCM_LOG_ERR("digest");
+ return -EINVAL;
+ }
+ sess->digest_length = digest_length;
+
return 0;
}
uint8_t *iv_ptr;
struct rte_crypto_sym_op *sym_op = op->sym;
struct rte_mbuf *m_src = sym_op->m_src;
- uint32_t offset = sym_op->cipher.data.offset;
+ uint32_t offset, data_offset, data_length;
uint32_t part_len, total_len, data_len;
+ if (session->op == AESNI_GCM_OP_AUTHENTICATED_ENCRYPTION ||
+ session->op == AESNI_GCM_OP_AUTHENTICATED_DECRYPTION) {
+ offset = sym_op->cipher.data.offset;
+ data_offset = offset;
+ data_length = sym_op->cipher.data.length;
+ } else {
+ offset = sym_op->auth.data.offset;
+ data_offset = offset;
+ data_length = sym_op->auth.data.length;
+ }
+
RTE_ASSERT(m_src != NULL);
while (offset >= m_src->data_len) {
}
data_len = m_src->data_len - offset;
- part_len = (data_len < sym_op->cipher.data.length) ? data_len :
- sym_op->cipher.data.length;
+ part_len = (data_len < data_length) ? data_len :
+ data_length;
/* Destination buffer is required when segmented source buffer */
- RTE_ASSERT((part_len == sym_op->cipher.data.length) ||
- ((part_len != sym_op->cipher.data.length) &&
+ RTE_ASSERT((part_len == data_length) ||
+ ((part_len != data_length) &&
(sym_op->m_dst != NULL)));
/* Segmented destination buffer is not supported */
RTE_ASSERT((sym_op->m_dst == NULL) ||
dst = sym_op->m_dst ?
rte_pktmbuf_mtod_offset(sym_op->m_dst, uint8_t *,
- sym_op->cipher.data.offset) :
+ data_offset) :
rte_pktmbuf_mtod_offset(sym_op->m_src, uint8_t *,
- sym_op->cipher.data.offset);
+ data_offset);
src = rte_pktmbuf_mtod_offset(m_src, uint8_t *, offset);
- /* sanity checks */
- if (sym_op->cipher.iv.length != 16 && sym_op->cipher.iv.length != 12 &&
- sym_op->cipher.iv.length != 0) {
- GCM_LOG_ERR("iv");
- return -1;
- }
-
iv_ptr = rte_crypto_op_ctod_offset(op, uint8_t *,
- sym_op->cipher.iv.offset);
+ session->iv.offset);
/*
* GCM working in 12B IV mode => 16B pre-counter block we need
* to set BE LSB to 1, driver expects that 16B is allocated
*/
- if (sym_op->cipher.iv.length == 12) {
+ if (session->iv.length == 12) {
uint32_t *iv_padd = (uint32_t *)&(iv_ptr[12]);
*iv_padd = rte_bswap32(1);
}
- if (sym_op->auth.digest.length != 16 &&
- sym_op->auth.digest.length != 12 &&
- sym_op->auth.digest.length != 8) {
- GCM_LOG_ERR("digest");
- return -1;
- }
-
if (session->op == AESNI_GCM_OP_AUTHENTICATED_ENCRYPTION) {
aesni_gcm_enc[session->key].init(&session->gdata,
iv_ptr,
sym_op->auth.aad.data,
- (uint64_t)sym_op->auth.aad.length);
+ (uint64_t)session->aad_length);
aesni_gcm_enc[session->key].update(&session->gdata, dst, src,
(uint64_t)part_len);
- total_len = sym_op->cipher.data.length - part_len;
+ total_len = data_length - part_len;
while (total_len) {
dst += part_len;
aesni_gcm_enc[session->key].finalize(&session->gdata,
sym_op->auth.digest.data,
- (uint64_t)sym_op->auth.digest.length);
- } else { /* session->op == AESNI_GCM_OP_AUTHENTICATED_DECRYPTION */
+ (uint64_t)session->digest_length);
+ } else if (session->op == AESNI_GCM_OP_AUTHENTICATED_DECRYPTION) {
uint8_t *auth_tag = (uint8_t *)rte_pktmbuf_append(sym_op->m_dst ?
sym_op->m_dst : sym_op->m_src,
- sym_op->auth.digest.length);
+ session->digest_length);
if (!auth_tag) {
GCM_LOG_ERR("auth_tag");
aesni_gcm_dec[session->key].init(&session->gdata,
iv_ptr,
sym_op->auth.aad.data,
- (uint64_t)sym_op->auth.aad.length);
+ (uint64_t)session->aad_length);
aesni_gcm_dec[session->key].update(&session->gdata, dst, src,
(uint64_t)part_len);
- total_len = sym_op->cipher.data.length - part_len;
+ total_len = data_length - part_len;
while (total_len) {
dst += part_len;
aesni_gcm_dec[session->key].finalize(&session->gdata,
auth_tag,
- (uint64_t)sym_op->auth.digest.length);
+ (uint64_t)session->digest_length);
+ } else if (session->op == AESNI_GMAC_OP_GENERATE) {
+ aesni_gcm_enc[session->key].init(&session->gdata,
+ iv_ptr,
+ src,
+ (uint64_t)data_length);
+ aesni_gcm_enc[session->key].finalize(&session->gdata,
+ sym_op->auth.digest.data,
+ (uint64_t)session->digest_length);
+ } else { /* AESNI_GMAC_OP_VERIFY */
+ uint8_t *auth_tag = (uint8_t *)rte_pktmbuf_append(sym_op->m_dst ?
+ sym_op->m_dst : sym_op->m_src,
+ session->digest_length);
+
+ if (!auth_tag) {
+ GCM_LOG_ERR("auth_tag");
+ return -1;
+ }
+
+ aesni_gcm_dec[session->key].init(&session->gdata,
+ iv_ptr,
+ src,
+ (uint64_t)data_length);
+
+ aesni_gcm_dec[session->key].finalize(&session->gdata,
+ auth_tag,
+ (uint64_t)session->digest_length);
}
return 0;
op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
/* Verify digest if required */
- if (session->op == AESNI_GCM_OP_AUTHENTICATED_DECRYPTION) {
+ if (session->op == AESNI_GCM_OP_AUTHENTICATED_DECRYPTION ||
+ session->op == AESNI_GMAC_OP_VERIFY) {
uint8_t *tag = rte_pktmbuf_mtod_offset(m, uint8_t *,
- m->data_len - op->sym->auth.digest.length);
+ m->data_len - session->digest_length);
#ifdef RTE_LIBRTE_PMD_AESNI_GCM_DEBUG
rte_hexdump(stdout, "auth tag (orig):",
- op->sym->auth.digest.data, op->sym->auth.digest.length);
+ op->sym->auth.digest.data, session->digest_length);
rte_hexdump(stdout, "auth tag (calc):",
- tag, op->sym->auth.digest.length);
+ tag, session->digest_length);
#endif
if (memcmp(tag, op->sym->auth.digest.data,
- op->sym->auth.digest.length) != 0)
+ session->digest_length) != 0)
op->status = RTE_CRYPTO_OP_STATUS_AUTH_FAILED;
/* trim area used for digest from mbuf */
- rte_pktmbuf_trim(m, op->sym->auth.digest.length);
+ rte_pktmbuf_trim(m, session->digest_length);
}
}