#include "cnxk_security.h"
+static void
+ipsec_hmac_opad_ipad_gen(struct rte_crypto_sym_xform *auth_xform,
+ uint8_t *hmac_opad_ipad)
+{
+ const uint8_t *key = auth_xform->auth.key.data;
+ uint32_t length = auth_xform->auth.key.length;
+ uint8_t opad[128] = {[0 ... 127] = 0x5c};
+ uint8_t ipad[128] = {[0 ... 127] = 0x36};
+ uint32_t i;
+
+ /* HMAC OPAD and IPAD */
+ for (i = 0; i < 127 && i < length; i++) {
+ opad[i] = opad[i] ^ key[i];
+ ipad[i] = ipad[i] ^ key[i];
+ }
+
+ /* Precompute hash of HMAC OPAD and IPAD to avoid
+ * per packet computation
+ */
+ switch (auth_xform->auth.algo) {
+ case RTE_CRYPTO_AUTH_SHA1_HMAC:
+ roc_hash_sha1_gen(opad, (uint32_t *)&hmac_opad_ipad[0]);
+ roc_hash_sha1_gen(ipad, (uint32_t *)&hmac_opad_ipad[24]);
+ break;
+ default:
+ break;
+ }
+}
+
static int
ot_ipsec_sa_common_param_fill(union roc_ot_ipsec_sa_word2 *w2,
uint8_t *cipher_key, uint8_t *salt_key,
+ uint8_t *hmac_opad_ipad,
struct rte_security_ipsec_xform *ipsec_xfrm,
struct rte_crypto_sym_xform *crypto_xfrm)
{
+ struct rte_crypto_sym_xform *auth_xfrm, *cipher_xfrm;
const uint8_t *key;
uint32_t *tmp_salt;
uint64_t *tmp_key;
switch (ipsec_xfrm->direction) {
case RTE_SECURITY_IPSEC_SA_DIR_INGRESS:
w2->s.dir = ROC_IE_OT_SA_DIR_INBOUND;
+ auth_xfrm = crypto_xfrm;
+ cipher_xfrm = crypto_xfrm->next;
break;
case RTE_SECURITY_IPSEC_SA_DIR_EGRESS:
w2->s.dir = ROC_IE_OT_SA_DIR_OUTBOUND;
+ cipher_xfrm = crypto_xfrm;
+ auth_xfrm = crypto_xfrm->next;
break;
default:
return -EINVAL;
return -ENOTSUP;
}
} else {
- return -ENOTSUP;
+ switch (cipher_xfrm->cipher.algo) {
+ case RTE_CRYPTO_CIPHER_AES_CBC:
+ w2->s.enc_type = ROC_IE_OT_SA_ENC_AES_CBC;
+ break;
+ default:
+ return -ENOTSUP;
+ }
+
+ switch (auth_xfrm->auth.algo) {
+ case RTE_CRYPTO_AUTH_SHA1_HMAC:
+ w2->s.auth_type = ROC_IE_OT_SA_AUTH_SHA1;
+ break;
+ default:
+ return -ENOTSUP;
+ }
+
+ key = cipher_xfrm->cipher.key.data;
+ length = cipher_xfrm->cipher.key.length;
+
+ ipsec_hmac_opad_ipad_gen(auth_xfrm, hmac_opad_ipad);
+
+ tmp_key = (uint64_t *)hmac_opad_ipad;
+ for (i = 0;
+ i < (int)(ROC_CTX_MAX_OPAD_IPAD_LEN / sizeof(uint64_t));
+ i++)
+ tmp_key[i] = rte_be_to_cpu_64(tmp_key[i]);
}
/* Set encapsulation type */
w2.u64 = 0;
rc = ot_ipsec_sa_common_param_fill(&w2, sa->cipher_key, sa->w8.s.salt,
- ipsec_xfrm, crypto_xfrm);
+ sa->hmac_opad_ipad, ipsec_xfrm,
+ crypto_xfrm);
if (rc)
return rc;
w2.u64 = 0;
rc = ot_ipsec_sa_common_param_fill(&w2, sa->cipher_key, sa->iv.s.salt,
- ipsec_xfrm, crypto_xfrm);
+ sa->hmac_opad_ipad, ipsec_xfrm,
+ crypto_xfrm);
if (rc)
return rc;
#include "roc_api.h"
+static int
+ipsec_xform_cipher_verify(struct rte_crypto_sym_xform *xform)
+{
+ if (xform->cipher.algo == RTE_CRYPTO_CIPHER_AES_CBC) {
+ switch (xform->cipher.key.length) {
+ case 16:
+ case 24:
+ case 32:
+ break;
+ default:
+ return -ENOTSUP;
+ }
+ return 0;
+ }
+
+ return -ENOTSUP;
+}
+
+static int
+ipsec_xform_auth_verify(struct rte_crypto_sym_xform *xform)
+{
+ uint16_t keylen = xform->auth.key.length;
+
+ if (xform->auth.algo == RTE_CRYPTO_AUTH_SHA1_HMAC) {
+ if (keylen >= 20 && keylen <= 64)
+ return 0;
+ }
+
+ return -ENOTSUP;
+}
+
static int
ipsec_xform_aead_verify(struct rte_security_ipsec_xform *ipsec_xfrm,
struct rte_crypto_sym_xform *crypto_xfrm)
cn10k_ipsec_xform_verify(struct rte_security_ipsec_xform *ipsec_xfrm,
struct rte_crypto_sym_xform *crypto_xfrm)
{
+ struct rte_crypto_sym_xform *auth_xform, *cipher_xform;
+ int ret;
+
if ((ipsec_xfrm->direction != RTE_SECURITY_IPSEC_SA_DIR_INGRESS) &&
(ipsec_xfrm->direction != RTE_SECURITY_IPSEC_SA_DIR_EGRESS))
return -EINVAL;
if (crypto_xfrm->type == RTE_CRYPTO_SYM_XFORM_AEAD)
return ipsec_xform_aead_verify(ipsec_xfrm, crypto_xfrm);
- return -ENOTSUP;
+ if (crypto_xfrm->next == NULL)
+ return -EINVAL;
+
+ if (ipsec_xfrm->direction == RTE_SECURITY_IPSEC_SA_DIR_INGRESS) {
+ /* Ingress */
+ if (crypto_xfrm->type != RTE_CRYPTO_SYM_XFORM_AUTH ||
+ crypto_xfrm->next->type != RTE_CRYPTO_SYM_XFORM_CIPHER)
+ return -EINVAL;
+ auth_xform = crypto_xfrm;
+ cipher_xform = crypto_xfrm->next;
+ } else {
+ /* Egress */
+ if (crypto_xfrm->type != RTE_CRYPTO_SYM_XFORM_CIPHER ||
+ crypto_xfrm->next->type != RTE_CRYPTO_SYM_XFORM_AUTH)
+ return -EINVAL;
+ cipher_xform = crypto_xfrm;
+ auth_xform = crypto_xfrm->next;
+ }
+
+ ret = ipsec_xform_cipher_verify(cipher_xform);
+ if (ret)
+ return ret;
+
+ ret = ipsec_xform_auth_verify(auth_xform);
+ if (ret)
+ return ret;
+
+ return 0;
}
static uint64_t
}, }
}, }
},
+ { /* AES CBC */
+ .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
+ {.sym = {
+ .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
+ {.cipher = {
+ .algo = RTE_CRYPTO_CIPHER_AES_CBC,
+ .block_size = 16,
+ .key_size = {
+ .min = 16,
+ .max = 32,
+ .increment = 8
+ },
+ .iv_size = {
+ .min = 16,
+ .max = 16,
+ .increment = 0
+ }
+ }, }
+ }, }
+ },
+};
+
+static const struct rte_cryptodev_capabilities sec_caps_sha1_sha2[] = {
+ { /* SHA1 HMAC */
+ .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
+ {.sym = {
+ .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+ {.auth = {
+ .algo = RTE_CRYPTO_AUTH_SHA1_HMAC,
+ .block_size = 64,
+ .key_size = {
+ .min = 20,
+ .max = 64,
+ .increment = 1
+ },
+ .digest_size = {
+ .min = 12,
+ .max = 12,
+ .increment = 0
+ },
+ }, }
+ }, }
+ },
};
static const struct rte_security_capability sec_caps_templ[] = {
int cur_pos = 0;
SEC_CAPS_ADD(cnxk_caps, &cur_pos, hw_caps, aes);
+ SEC_CAPS_ADD(cnxk_caps, &cur_pos, hw_caps, sha1_sha2);
sec_caps_add(cnxk_caps, &cur_pos, caps_end, RTE_DIM(caps_end));
}