static void
calculate_auth_precomputes(hash_one_block_t one_block_hash,
uint8_t *ipad, uint8_t *opad,
- uint8_t *hkey, uint16_t hkey_len,
+ const uint8_t *hkey, uint16_t hkey_len,
uint16_t blocksize)
{
unsigned i, length;
if (xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
return AESNI_MB_OP_HASH_CIPHER;
}
-
+#if IMB_VERSION_NUM > IMB_VERSION(0, 52, 0)
+ if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
+ if (xform->aead.op == RTE_CRYPTO_AEAD_OP_ENCRYPT) {
+ /*
+ * CCM requires to hash first and cipher later
+ * when encrypting
+ */
+ if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_CCM)
+ return AESNI_MB_OP_AEAD_HASH_CIPHER;
+ else
+ return AESNI_MB_OP_AEAD_CIPHER_HASH;
+ } else {
+ if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_CCM)
+ return AESNI_MB_OP_AEAD_CIPHER_HASH;
+ else
+ return AESNI_MB_OP_AEAD_HASH_CIPHER;
+ }
+ }
+#else
if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_CCM ||
xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM) {
return AESNI_MB_OP_AEAD_HASH_CIPHER;
}
}
+#endif
return AESNI_MB_OP_NOT_SUPPORTED;
}