1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2018 Intel Corporation
11 #include "ipsec_sqn.h"
13 /* some helper structures */
15 struct rte_crypto_auth_xform *auth;
16 struct rte_crypto_cipher_xform *cipher;
17 struct rte_crypto_aead_xform *aead;
21 * helper routine, fills internal crypto_xform structure.
24 fill_crypto_xform(struct crypto_xform *xform, uint64_t type,
25 const struct rte_ipsec_sa_prm *prm)
27 struct rte_crypto_sym_xform *xf, *xfn;
29 memset(xform, 0, sizeof(*xform));
31 xf = prm->crypto_xform;
37 /* for AEAD just one xform required */
38 if (xf->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
41 xform->aead = &xf->aead;
43 * CIPHER+AUTH xforms are expected in strict order,
44 * depending on SA direction:
45 * inbound: AUTH+CIPHER
46 * outbound: CIPHER+AUTH
48 } else if ((type & RTE_IPSEC_SATP_DIR_MASK) == RTE_IPSEC_SATP_DIR_IB) {
50 /* wrong order or no cipher */
51 if (xfn == NULL || xf->type != RTE_CRYPTO_SYM_XFORM_AUTH ||
52 xfn->type != RTE_CRYPTO_SYM_XFORM_CIPHER)
55 xform->auth = &xf->auth;
56 xform->cipher = &xfn->cipher;
60 /* wrong order or no auth */
61 if (xfn == NULL || xf->type != RTE_CRYPTO_SYM_XFORM_CIPHER ||
62 xfn->type != RTE_CRYPTO_SYM_XFORM_AUTH)
65 xform->cipher = &xf->cipher;
66 xform->auth = &xfn->auth;
72 uint64_t __rte_experimental
73 rte_ipsec_sa_type(const struct rte_ipsec_sa *sa)
79 ipsec_sa_size(uint32_t wsz, uint64_t type, uint32_t *nb_bucket)
84 if (wsz != 0 && (type & RTE_IPSEC_SATP_DIR_MASK) ==
85 RTE_IPSEC_SATP_DIR_IB)
86 n = replay_num_bucket(wsz);
88 if (n > WINDOW_BUCKET_MAX)
94 sz += sizeof(struct rte_ipsec_sa);
98 void __rte_experimental
99 rte_ipsec_sa_fini(struct rte_ipsec_sa *sa)
101 memset(sa, 0, sa->size);
105 fill_sa_type(const struct rte_ipsec_sa_prm *prm, uint64_t *type)
111 if (prm->ipsec_xform.proto == RTE_SECURITY_IPSEC_SA_PROTO_AH)
112 tp |= RTE_IPSEC_SATP_PROTO_AH;
113 else if (prm->ipsec_xform.proto == RTE_SECURITY_IPSEC_SA_PROTO_ESP)
114 tp |= RTE_IPSEC_SATP_PROTO_ESP;
118 if (prm->ipsec_xform.direction == RTE_SECURITY_IPSEC_SA_DIR_EGRESS)
119 tp |= RTE_IPSEC_SATP_DIR_OB;
120 else if (prm->ipsec_xform.direction ==
121 RTE_SECURITY_IPSEC_SA_DIR_INGRESS)
122 tp |= RTE_IPSEC_SATP_DIR_IB;
126 if (prm->ipsec_xform.mode == RTE_SECURITY_IPSEC_SA_MODE_TUNNEL) {
127 if (prm->ipsec_xform.tunnel.type ==
128 RTE_SECURITY_IPSEC_TUNNEL_IPV4)
129 tp |= RTE_IPSEC_SATP_MODE_TUNLV4;
130 else if (prm->ipsec_xform.tunnel.type ==
131 RTE_SECURITY_IPSEC_TUNNEL_IPV6)
132 tp |= RTE_IPSEC_SATP_MODE_TUNLV6;
136 if (prm->tun.next_proto == IPPROTO_IPIP)
137 tp |= RTE_IPSEC_SATP_IPV4;
138 else if (prm->tun.next_proto == IPPROTO_IPV6)
139 tp |= RTE_IPSEC_SATP_IPV6;
142 } else if (prm->ipsec_xform.mode ==
143 RTE_SECURITY_IPSEC_SA_MODE_TRANSPORT) {
144 tp |= RTE_IPSEC_SATP_MODE_TRANS;
145 if (prm->trs.proto == IPPROTO_IPIP)
146 tp |= RTE_IPSEC_SATP_IPV4;
147 else if (prm->trs.proto == IPPROTO_IPV6)
148 tp |= RTE_IPSEC_SATP_IPV6;
159 esp_inb_init(struct rte_ipsec_sa *sa)
161 /* these params may differ with new algorithms support */
162 sa->ctp.auth.offset = 0;
163 sa->ctp.auth.length = sa->icv_len - sa->sqh_len;
164 sa->ctp.cipher.offset = sizeof(struct esp_hdr) + sa->iv_len;
165 sa->ctp.cipher.length = sa->icv_len + sa->ctp.cipher.offset;
169 esp_inb_tun_init(struct rte_ipsec_sa *sa, const struct rte_ipsec_sa_prm *prm)
171 sa->proto = prm->tun.next_proto;
176 esp_outb_init(struct rte_ipsec_sa *sa, uint32_t hlen)
180 /* these params may differ with new algorithms support */
181 sa->ctp.auth.offset = hlen;
182 sa->ctp.auth.length = sizeof(struct esp_hdr) + sa->iv_len + sa->sqh_len;
183 if (sa->aad_len != 0) {
184 sa->ctp.cipher.offset = hlen + sizeof(struct esp_hdr) +
186 sa->ctp.cipher.length = 0;
188 sa->ctp.cipher.offset = sa->hdr_len + sizeof(struct esp_hdr);
189 sa->ctp.cipher.length = sa->iv_len;
194 esp_outb_tun_init(struct rte_ipsec_sa *sa, const struct rte_ipsec_sa_prm *prm)
196 sa->proto = prm->tun.next_proto;
197 sa->hdr_len = prm->tun.hdr_len;
198 sa->hdr_l3_off = prm->tun.hdr_l3_off;
199 memcpy(sa->hdr, prm->tun.hdr, sa->hdr_len);
201 esp_outb_init(sa, sa->hdr_len);
205 esp_sa_init(struct rte_ipsec_sa *sa, const struct rte_ipsec_sa_prm *prm,
206 const struct crypto_xform *cxf)
208 static const uint64_t msk = RTE_IPSEC_SATP_DIR_MASK |
209 RTE_IPSEC_SATP_MODE_MASK;
211 if (cxf->aead != NULL) {
213 if (cxf->aead->algo != RTE_CRYPTO_AEAD_AES_GCM)
215 sa->icv_len = cxf->aead->digest_length;
216 sa->iv_ofs = cxf->aead->iv.offset;
217 sa->iv_len = sizeof(uint64_t);
218 sa->pad_align = IPSEC_PAD_AES_GCM;
220 sa->icv_len = cxf->auth->digest_length;
221 sa->iv_ofs = cxf->cipher->iv.offset;
222 sa->sqh_len = IS_ESN(sa) ? sizeof(uint32_t) : 0;
223 if (cxf->cipher->algo == RTE_CRYPTO_CIPHER_NULL) {
224 sa->pad_align = IPSEC_PAD_NULL;
226 } else if (cxf->cipher->algo == RTE_CRYPTO_CIPHER_AES_CBC) {
227 sa->pad_align = IPSEC_PAD_AES_CBC;
228 sa->iv_len = IPSEC_MAX_IV_SIZE;
233 sa->udata = prm->userdata;
234 sa->spi = rte_cpu_to_be_32(prm->ipsec_xform.spi);
235 sa->salt = prm->ipsec_xform.salt;
237 switch (sa->type & msk) {
238 case (RTE_IPSEC_SATP_DIR_IB | RTE_IPSEC_SATP_MODE_TUNLV4):
239 case (RTE_IPSEC_SATP_DIR_IB | RTE_IPSEC_SATP_MODE_TUNLV6):
240 esp_inb_tun_init(sa, prm);
242 case (RTE_IPSEC_SATP_DIR_IB | RTE_IPSEC_SATP_MODE_TRANS):
245 case (RTE_IPSEC_SATP_DIR_OB | RTE_IPSEC_SATP_MODE_TUNLV4):
246 case (RTE_IPSEC_SATP_DIR_OB | RTE_IPSEC_SATP_MODE_TUNLV6):
247 esp_outb_tun_init(sa, prm);
249 case (RTE_IPSEC_SATP_DIR_OB | RTE_IPSEC_SATP_MODE_TRANS):
250 esp_outb_init(sa, 0);
257 int __rte_experimental
258 rte_ipsec_sa_size(const struct rte_ipsec_sa_prm *prm)
267 /* determine SA type */
268 rc = fill_sa_type(prm, &type);
272 /* determine required size */
273 return ipsec_sa_size(prm->replay_win_sz, type, &nb);
276 int __rte_experimental
277 rte_ipsec_sa_init(struct rte_ipsec_sa *sa, const struct rte_ipsec_sa_prm *prm,
283 struct crypto_xform cxf;
285 if (sa == NULL || prm == NULL)
288 /* determine SA type */
289 rc = fill_sa_type(prm, &type);
293 /* determine required size */
294 sz = ipsec_sa_size(prm->replay_win_sz, type, &nb);
297 else if (size < (uint32_t)sz)
300 /* only esp is supported right now */
301 if (prm->ipsec_xform.proto != RTE_SECURITY_IPSEC_SA_PROTO_ESP)
304 if (prm->ipsec_xform.mode == RTE_SECURITY_IPSEC_SA_MODE_TUNNEL &&
305 prm->tun.hdr_len > sizeof(sa->hdr))
308 rc = fill_crypto_xform(&cxf, type, prm);
318 /* check for ESN flag */
319 sa->sqn_mask = (prm->ipsec_xform.options.esn == 0) ?
320 UINT32_MAX : UINT64_MAX;
322 rc = esp_sa_init(sa, prm, &cxf);
324 rte_ipsec_sa_fini(sa);
326 /* fill replay window related fields */
328 sa->replay.win_sz = prm->replay_win_sz;
329 sa->replay.nb_bucket = nb;
330 sa->replay.bucket_index_mask = sa->replay.nb_bucket - 1;
331 sa->sqn.inb = (struct replay_sqn *)(sa + 1);
338 ipsec_sa_pkt_func_select(const struct rte_ipsec_session *ss,
339 const struct rte_ipsec_sa *sa, struct rte_ipsec_sa_pkt_func *pf)
346 pf[0] = (struct rte_ipsec_sa_pkt_func) { 0 };