1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(C) 2021 Marvell.
4 #ifndef __CNXK_IPSEC_H__
5 #define __CNXK_IPSEC_H__
7 #include <rte_security.h>
8 #include <rte_security_driver.h>
12 extern struct rte_security_ops cnxk_sec_ops;
14 struct cnxk_cpt_inst_tmpl {
21 ipsec_xform_cipher_verify(struct rte_crypto_sym_xform *crypto_xform)
23 if (crypto_xform->cipher.algo == RTE_CRYPTO_CIPHER_NULL)
26 if (crypto_xform->cipher.algo == RTE_CRYPTO_CIPHER_AES_CBC ||
27 crypto_xform->cipher.algo == RTE_CRYPTO_CIPHER_AES_CTR) {
28 switch (crypto_xform->cipher.key.length) {
43 ipsec_xform_auth_verify(struct rte_crypto_sym_xform *crypto_xform)
45 uint16_t keylen = crypto_xform->auth.key.length;
47 if (crypto_xform->auth.algo == RTE_CRYPTO_AUTH_NULL)
50 if (crypto_xform->auth.algo == RTE_CRYPTO_AUTH_SHA1_HMAC) {
51 if (keylen >= 20 && keylen <= 64)
53 } else if (crypto_xform->auth.algo == RTE_CRYPTO_AUTH_SHA256_HMAC) {
54 if (keylen >= 32 && keylen <= 64)
56 } else if (crypto_xform->auth.algo == RTE_CRYPTO_AUTH_SHA384_HMAC) {
59 } else if (crypto_xform->auth.algo == RTE_CRYPTO_AUTH_SHA512_HMAC) {
62 } else if (crypto_xform->auth.algo == RTE_CRYPTO_AUTH_AES_GMAC) {
63 if (keylen >= 16 && keylen <= 32)
67 if (crypto_xform->auth.algo == RTE_CRYPTO_AUTH_AES_XCBC_MAC &&
68 keylen == ROC_CPT_AES_XCBC_KEY_LENGTH)
75 ipsec_xform_aead_verify(struct rte_security_ipsec_xform *ipsec_xform,
76 struct rte_crypto_sym_xform *crypto_xform)
78 if (ipsec_xform->direction == RTE_SECURITY_IPSEC_SA_DIR_EGRESS &&
79 crypto_xform->aead.op != RTE_CRYPTO_AEAD_OP_ENCRYPT)
82 if (ipsec_xform->direction == RTE_SECURITY_IPSEC_SA_DIR_INGRESS &&
83 crypto_xform->aead.op != RTE_CRYPTO_AEAD_OP_DECRYPT)
86 if (crypto_xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM) {
87 switch (crypto_xform->aead.key.length) {
102 cnxk_ipsec_xform_verify(struct rte_security_ipsec_xform *ipsec_xform,
103 struct rte_crypto_sym_xform *crypto_xform)
105 struct rte_crypto_sym_xform *auth_xform, *cipher_xform;
108 if ((ipsec_xform->direction != RTE_SECURITY_IPSEC_SA_DIR_INGRESS) &&
109 (ipsec_xform->direction != RTE_SECURITY_IPSEC_SA_DIR_EGRESS))
112 if ((ipsec_xform->proto != RTE_SECURITY_IPSEC_SA_PROTO_ESP) &&
113 (ipsec_xform->proto != RTE_SECURITY_IPSEC_SA_PROTO_AH))
116 if ((ipsec_xform->mode != RTE_SECURITY_IPSEC_SA_MODE_TRANSPORT) &&
117 (ipsec_xform->mode != RTE_SECURITY_IPSEC_SA_MODE_TUNNEL))
120 if ((ipsec_xform->mode == RTE_SECURITY_IPSEC_SA_MODE_TUNNEL) &&
121 (ipsec_xform->tunnel.type != RTE_SECURITY_IPSEC_TUNNEL_IPV4) &&
122 (ipsec_xform->tunnel.type != RTE_SECURITY_IPSEC_TUNNEL_IPV6))
125 if (crypto_xform->type == RTE_CRYPTO_SYM_XFORM_AEAD)
126 return ipsec_xform_aead_verify(ipsec_xform, crypto_xform);
128 if (ipsec_xform->proto == RTE_SECURITY_IPSEC_SA_PROTO_AH) {
129 if (ipsec_xform->direction == RTE_SECURITY_IPSEC_SA_DIR_INGRESS) {
131 auth_xform = crypto_xform;
132 cipher_xform = crypto_xform->next;
134 if (crypto_xform->type != RTE_CRYPTO_SYM_XFORM_AUTH)
137 if ((cipher_xform != NULL) && ((cipher_xform->type !=
138 RTE_CRYPTO_SYM_XFORM_CIPHER) ||
139 (cipher_xform->cipher.algo !=
140 RTE_CRYPTO_CIPHER_NULL)))
144 if (crypto_xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER) {
145 cipher_xform = crypto_xform;
146 auth_xform = crypto_xform->next;
148 if (auth_xform == NULL ||
149 cipher_xform->cipher.algo !=
150 RTE_CRYPTO_CIPHER_NULL)
152 } else if (crypto_xform->type ==
153 RTE_CRYPTO_SYM_XFORM_AUTH)
154 auth_xform = crypto_xform;
159 if (crypto_xform->next == NULL)
162 if (ipsec_xform->direction ==
163 RTE_SECURITY_IPSEC_SA_DIR_INGRESS) {
165 if (crypto_xform->type != RTE_CRYPTO_SYM_XFORM_AUTH ||
166 crypto_xform->next->type !=
167 RTE_CRYPTO_SYM_XFORM_CIPHER)
169 auth_xform = crypto_xform;
170 cipher_xform = crypto_xform->next;
173 if (crypto_xform->type != RTE_CRYPTO_SYM_XFORM_CIPHER ||
174 crypto_xform->next->type !=
175 RTE_CRYPTO_SYM_XFORM_AUTH)
177 cipher_xform = crypto_xform;
178 auth_xform = crypto_xform->next;
181 ret = ipsec_xform_cipher_verify(cipher_xform);
186 return ipsec_xform_auth_verify(auth_xform);
188 #endif /* __CNXK_IPSEC_H__ */