examples/pipeline: fix build
[dpdk.git] / drivers / crypto / cnxk / cn9k_ipsec.c
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(C) 2021 Marvell.
3  */
4
5 #include <cryptodev_pmd.h>
6 #include <rte_ip.h>
7 #include <rte_security.h>
8 #include <rte_security_driver.h>
9
10 #include "cnxk_cryptodev.h"
11 #include "cnxk_cryptodev_ops.h"
12 #include "cnxk_ipsec.h"
13 #include "cnxk_security.h"
14 #include "cn9k_ipsec.h"
15
16 #include "roc_api.h"
17
18 static inline int
19 cn9k_cpt_enq_sa_write(struct cn9k_ipsec_sa *sa, struct cnxk_cpt_qp *qp,
20                       uint8_t opcode, size_t ctx_len)
21 {
22         struct roc_cpt *roc_cpt = qp->lf.roc_cpt;
23         uint64_t lmtline = qp->lmtline.lmt_base;
24         uint64_t io_addr = qp->lmtline.io_addr;
25         uint64_t lmt_status, time_out;
26         struct cpt_cn9k_res_s *res;
27         struct cpt_inst_s inst;
28         uint64_t *mdata;
29         int ret = 0;
30
31         if (unlikely(rte_mempool_get(qp->meta_info.pool, (void **)&mdata) < 0))
32                 return -ENOMEM;
33
34         res = (struct cpt_cn9k_res_s *)RTE_PTR_ALIGN(mdata, 16);
35         res->compcode = CPT_COMP_NOT_DONE;
36
37         inst.w4.s.opcode_major = opcode;
38         inst.w4.s.opcode_minor = ctx_len >> 3;
39         inst.w4.s.param1 = 0;
40         inst.w4.s.param2 = 0;
41         inst.w4.s.dlen = ctx_len;
42         inst.dptr = rte_mempool_virt2iova(sa);
43         inst.rptr = 0;
44         inst.w7.s.cptr = rte_mempool_virt2iova(sa);
45         inst.w7.s.egrp = roc_cpt->eng_grp[CPT_ENG_TYPE_IE];
46
47         inst.w0.u64 = 0;
48         inst.w2.u64 = 0;
49         inst.w3.u64 = 0;
50         inst.res_addr = rte_mempool_virt2iova(res);
51
52         rte_io_wmb();
53
54         do {
55                 /* Copy CPT command to LMTLINE */
56                 roc_lmt_mov64((void *)lmtline, &inst);
57                 lmt_status = roc_lmt_submit_ldeor(io_addr);
58         } while (lmt_status == 0);
59
60         time_out = rte_get_timer_cycles() +
61                    DEFAULT_COMMAND_TIMEOUT * rte_get_timer_hz();
62
63         while (res->compcode == CPT_COMP_NOT_DONE) {
64                 if (rte_get_timer_cycles() > time_out) {
65                         rte_mempool_put(qp->meta_info.pool, mdata);
66                         plt_err("Request timed out");
67                         return -ETIMEDOUT;
68                 }
69                 rte_io_rmb();
70         }
71
72         if (unlikely(res->compcode != CPT_COMP_GOOD)) {
73                 ret = res->compcode;
74                 switch (ret) {
75                 case CPT_COMP_INSTERR:
76                         plt_err("Request failed with instruction error");
77                         break;
78                 case CPT_COMP_FAULT:
79                         plt_err("Request failed with DMA fault");
80                         break;
81                 case CPT_COMP_HWERR:
82                         plt_err("Request failed with hardware error");
83                         break;
84                 default:
85                         plt_err("Request failed with unknown hardware "
86                                 "completion code : 0x%x",
87                                 ret);
88                 }
89                 ret = -EINVAL;
90                 goto mempool_put;
91         }
92
93         if (unlikely(res->uc_compcode != ROC_IE_ON_UCC_SUCCESS)) {
94                 ret = res->uc_compcode;
95                 switch (ret) {
96                 case ROC_IE_ON_AUTH_UNSUPPORTED:
97                         plt_err("Invalid auth type");
98                         break;
99                 case ROC_IE_ON_ENCRYPT_UNSUPPORTED:
100                         plt_err("Invalid encrypt type");
101                         break;
102                 default:
103                         plt_err("Request failed with unknown microcode "
104                                 "completion code : 0x%x",
105                                 ret);
106                 }
107                 ret = -ENOTSUP;
108         }
109
110 mempool_put:
111         rte_mempool_put(qp->meta_info.pool, mdata);
112         return ret;
113 }
114
115 static inline int
116 ipsec_sa_ctl_set(struct rte_security_ipsec_xform *ipsec,
117                  struct rte_crypto_sym_xform *crypto_xform,
118                  struct roc_ie_on_sa_ctl *ctl)
119 {
120         struct rte_crypto_sym_xform *cipher_xform, *auth_xform;
121         int aes_key_len = 0;
122
123         if (crypto_xform->type == RTE_CRYPTO_SYM_XFORM_AUTH) {
124                 auth_xform = crypto_xform;
125                 cipher_xform = crypto_xform->next;
126         } else {
127                 cipher_xform = crypto_xform;
128                 auth_xform = crypto_xform->next;
129         }
130
131         if (ipsec->direction == RTE_SECURITY_IPSEC_SA_DIR_EGRESS)
132                 ctl->direction = ROC_IE_SA_DIR_OUTBOUND;
133         else
134                 ctl->direction = ROC_IE_SA_DIR_INBOUND;
135
136         if (ipsec->mode == RTE_SECURITY_IPSEC_SA_MODE_TUNNEL) {
137                 if (ipsec->tunnel.type == RTE_SECURITY_IPSEC_TUNNEL_IPV4)
138                         ctl->outer_ip_ver = ROC_IE_SA_IP_VERSION_4;
139                 else if (ipsec->tunnel.type == RTE_SECURITY_IPSEC_TUNNEL_IPV6)
140                         ctl->outer_ip_ver = ROC_IE_SA_IP_VERSION_6;
141                 else
142                         return -EINVAL;
143         }
144
145         if (ipsec->mode == RTE_SECURITY_IPSEC_SA_MODE_TRANSPORT) {
146                 ctl->ipsec_mode = ROC_IE_SA_MODE_TRANSPORT;
147                 ctl->outer_ip_ver = ROC_IE_SA_IP_VERSION_4;
148         } else if (ipsec->mode == RTE_SECURITY_IPSEC_SA_MODE_TUNNEL)
149                 ctl->ipsec_mode = ROC_IE_SA_MODE_TUNNEL;
150         else
151                 return -EINVAL;
152
153         if (ipsec->proto == RTE_SECURITY_IPSEC_SA_PROTO_AH)
154                 ctl->ipsec_proto = ROC_IE_SA_PROTOCOL_AH;
155         else if (ipsec->proto == RTE_SECURITY_IPSEC_SA_PROTO_ESP)
156                 ctl->ipsec_proto = ROC_IE_SA_PROTOCOL_ESP;
157         else
158                 return -EINVAL;
159
160         if (crypto_xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
161                 switch (crypto_xform->aead.algo) {
162                 case RTE_CRYPTO_AEAD_AES_GCM:
163                         ctl->enc_type = ROC_IE_ON_SA_ENC_AES_GCM;
164                         aes_key_len = crypto_xform->aead.key.length;
165                         break;
166                 default:
167                         plt_err("Unsupported AEAD algorithm");
168                         return -ENOTSUP;
169                 }
170         } else {
171                 if (cipher_xform != NULL) {
172                         switch (cipher_xform->cipher.algo) {
173                         case RTE_CRYPTO_CIPHER_NULL:
174                                 ctl->enc_type = ROC_IE_ON_SA_ENC_NULL;
175                                 break;
176                         case RTE_CRYPTO_CIPHER_AES_CBC:
177                                 ctl->enc_type = ROC_IE_ON_SA_ENC_AES_CBC;
178                                 aes_key_len = cipher_xform->cipher.key.length;
179                                 break;
180                         case RTE_CRYPTO_CIPHER_AES_CTR:
181                                 ctl->enc_type = ROC_IE_ON_SA_ENC_AES_CTR;
182                                 aes_key_len = cipher_xform->cipher.key.length;
183                                 break;
184                         default:
185                                 plt_err("Unsupported cipher algorithm");
186                                 return -ENOTSUP;
187                         }
188                 }
189
190                 switch (auth_xform->auth.algo) {
191                 case RTE_CRYPTO_AUTH_NULL:
192                         ctl->auth_type = ROC_IE_ON_SA_AUTH_NULL;
193                         break;
194                 case RTE_CRYPTO_AUTH_MD5_HMAC:
195                         ctl->auth_type = ROC_IE_ON_SA_AUTH_MD5;
196                         break;
197                 case RTE_CRYPTO_AUTH_SHA1_HMAC:
198                         ctl->auth_type = ROC_IE_ON_SA_AUTH_SHA1;
199                         break;
200                 case RTE_CRYPTO_AUTH_SHA224_HMAC:
201                         ctl->auth_type = ROC_IE_ON_SA_AUTH_SHA2_224;
202                         break;
203                 case RTE_CRYPTO_AUTH_SHA256_HMAC:
204                         ctl->auth_type = ROC_IE_ON_SA_AUTH_SHA2_256;
205                         break;
206                 case RTE_CRYPTO_AUTH_SHA384_HMAC:
207                         ctl->auth_type = ROC_IE_ON_SA_AUTH_SHA2_384;
208                         break;
209                 case RTE_CRYPTO_AUTH_SHA512_HMAC:
210                         ctl->auth_type = ROC_IE_ON_SA_AUTH_SHA2_512;
211                         break;
212                 case RTE_CRYPTO_AUTH_AES_GMAC:
213                         ctl->auth_type = ROC_IE_ON_SA_AUTH_AES_GMAC;
214                         aes_key_len = auth_xform->auth.key.length;
215                         break;
216                 case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
217                         ctl->auth_type = ROC_IE_ON_SA_AUTH_AES_XCBC_128;
218                         break;
219                 default:
220                         plt_err("Unsupported auth algorithm");
221                         return -ENOTSUP;
222                 }
223         }
224
225         /* Set AES key length */
226         if (ctl->enc_type == ROC_IE_ON_SA_ENC_AES_CBC ||
227             ctl->enc_type == ROC_IE_ON_SA_ENC_AES_CCM ||
228             ctl->enc_type == ROC_IE_ON_SA_ENC_AES_CTR ||
229             ctl->enc_type == ROC_IE_ON_SA_ENC_AES_GCM ||
230             ctl->auth_type == ROC_IE_ON_SA_AUTH_AES_GMAC) {
231                 switch (aes_key_len) {
232                 case 16:
233                         ctl->aes_key_len = ROC_IE_SA_AES_KEY_LEN_128;
234                         break;
235                 case 24:
236                         ctl->aes_key_len = ROC_IE_SA_AES_KEY_LEN_192;
237                         break;
238                 case 32:
239                         ctl->aes_key_len = ROC_IE_SA_AES_KEY_LEN_256;
240                         break;
241                 default:
242                         plt_err("Invalid AES key length");
243                         return -EINVAL;
244                 }
245         }
246
247         if (ipsec->options.esn)
248                 ctl->esn_en = 1;
249
250         if (ipsec->options.udp_encap == 1)
251                 ctl->encap_type = ROC_IE_ON_SA_ENCAP_UDP;
252
253         ctl->copy_df = ipsec->options.copy_df;
254
255         ctl->spi = rte_cpu_to_be_32(ipsec->spi);
256
257         rte_io_wmb();
258
259         ctl->valid = 1;
260
261         return 0;
262 }
263
264 static inline int
265 fill_ipsec_common_sa(struct rte_security_ipsec_xform *ipsec,
266                      struct rte_crypto_sym_xform *crypto_xform,
267                      struct roc_ie_on_common_sa *common_sa)
268 {
269         struct rte_crypto_sym_xform *cipher_xform, *auth_xform;
270         const uint8_t *cipher_key;
271         int cipher_key_len = 0;
272         int ret;
273
274         ret = ipsec_sa_ctl_set(ipsec, crypto_xform, &common_sa->ctl);
275         if (ret)
276                 return ret;
277
278         if (ipsec->esn.value) {
279                 common_sa->esn_low = ipsec->esn.low;
280                 common_sa->esn_hi = ipsec->esn.hi;
281         }
282
283         if (crypto_xform->type == RTE_CRYPTO_SYM_XFORM_AUTH) {
284                 auth_xform = crypto_xform;
285                 cipher_xform = crypto_xform->next;
286         } else {
287                 cipher_xform = crypto_xform;
288                 auth_xform = crypto_xform->next;
289         }
290
291         if (crypto_xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
292                 if (crypto_xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
293                         memcpy(common_sa->iv.gcm.nonce, &ipsec->salt, 4);
294                 cipher_key = crypto_xform->aead.key.data;
295                 cipher_key_len = crypto_xform->aead.key.length;
296         } else {
297                 if (cipher_xform) {
298                         cipher_key = cipher_xform->cipher.key.data;
299                         cipher_key_len = cipher_xform->cipher.key.length;
300                 }
301
302                 if (auth_xform->auth.algo == RTE_CRYPTO_AUTH_AES_GMAC) {
303                         memcpy(common_sa->iv.gcm.nonce, &ipsec->salt, 4);
304                         cipher_key = auth_xform->auth.key.data;
305                         cipher_key_len = auth_xform->auth.key.length;
306                 }
307         }
308
309         if (cipher_key_len != 0)
310                 memcpy(common_sa->cipher_key, cipher_key, cipher_key_len);
311
312         return 0;
313 }
314
315 static int
316 cn9k_ipsec_outb_sa_create(struct cnxk_cpt_qp *qp,
317                           struct rte_security_ipsec_xform *ipsec,
318                           struct rte_crypto_sym_xform *crypto_xform,
319                           struct rte_security_session *sec_sess)
320 {
321         struct roc_ie_on_ip_template *template = NULL;
322         struct roc_cpt *roc_cpt = qp->lf.roc_cpt;
323         struct rte_crypto_sym_xform *auth_xform;
324         union roc_on_ipsec_outb_param1 param1;
325         struct cnxk_cpt_inst_tmpl *inst_tmpl;
326         struct roc_ie_on_outb_sa *out_sa;
327         struct cn9k_sec_session *sess;
328         struct roc_ie_on_sa_ctl *ctl;
329         struct cn9k_ipsec_sa *sa;
330         struct rte_ipv6_hdr *ip6;
331         struct rte_ipv4_hdr *ip4;
332         const uint8_t *auth_key;
333         union cpt_inst_w4 w4;
334         union cpt_inst_w7 w7;
335         int auth_key_len = 0;
336         size_t ctx_len;
337         int ret;
338
339         sess = get_sec_session_private_data(sec_sess);
340         sa = &sess->sa;
341         out_sa = &sa->out_sa;
342         ctl = &out_sa->common_sa.ctl;
343
344         memset(sa, 0, sizeof(struct cn9k_ipsec_sa));
345
346         /* Initialize lookaside IPsec private data */
347         sa->dir = RTE_SECURITY_IPSEC_SA_DIR_EGRESS;
348         /* Start ip id from 1 */
349         sa->ip_id = 1;
350         sa->seq_lo = 1;
351         sa->seq_hi = 0;
352
353         if (ipsec->esn.value)
354                 sa->esn = ipsec->esn.value;
355
356         if (crypto_xform->type == RTE_CRYPTO_SYM_XFORM_AUTH)
357                 auth_xform = crypto_xform;
358         else
359                 auth_xform = crypto_xform->next;
360
361         ret = fill_ipsec_common_sa(ipsec, crypto_xform, &out_sa->common_sa);
362         if (ret)
363                 return ret;
364
365         ret = cnxk_ipsec_outb_rlens_get(&sa->rlens, ipsec, crypto_xform);
366         if (ret)
367                 return ret;
368
369         if (ctl->enc_type == ROC_IE_ON_SA_ENC_AES_GCM ||
370             ctl->auth_type == ROC_IE_ON_SA_AUTH_NULL ||
371             ctl->auth_type == ROC_IE_ON_SA_AUTH_AES_GMAC) {
372                 template = &out_sa->aes_gcm.template;
373                 ctx_len = offsetof(struct roc_ie_on_outb_sa, aes_gcm.template);
374         } else {
375                 switch (ctl->auth_type) {
376                 case ROC_IE_ON_SA_AUTH_SHA1:
377                         template = &out_sa->sha1.template;
378                         ctx_len = offsetof(struct roc_ie_on_outb_sa,
379                                            sha1.template);
380                         break;
381                 case ROC_IE_ON_SA_AUTH_SHA2_256:
382                 case ROC_IE_ON_SA_AUTH_SHA2_384:
383                 case ROC_IE_ON_SA_AUTH_SHA2_512:
384                         template = &out_sa->sha2.template;
385                         ctx_len = offsetof(struct roc_ie_on_outb_sa,
386                                            sha2.template);
387                         break;
388                 case ROC_IE_ON_SA_AUTH_AES_XCBC_128:
389                         template = &out_sa->aes_xcbc.template;
390                         ctx_len = offsetof(struct roc_ie_on_outb_sa,
391                                            aes_xcbc.template);
392                         break;
393                 default:
394                         plt_err("Unsupported auth algorithm");
395                         return -EINVAL;
396                 }
397         }
398
399         ip4 = (struct rte_ipv4_hdr *)&template->ip4.ipv4_hdr;
400         if (ipsec->options.udp_encap) {
401                 ip4->next_proto_id = IPPROTO_UDP;
402                 template->ip4.udp_src = rte_be_to_cpu_16(4500);
403                 template->ip4.udp_dst = rte_be_to_cpu_16(4500);
404         } else {
405                 if (ipsec->proto == RTE_SECURITY_IPSEC_SA_PROTO_AH)
406                         ip4->next_proto_id = IPPROTO_AH;
407                 else
408                         ip4->next_proto_id = IPPROTO_ESP;
409         }
410
411         if (ipsec->mode == RTE_SECURITY_IPSEC_SA_MODE_TUNNEL) {
412                 if (ipsec->tunnel.type == RTE_SECURITY_IPSEC_TUNNEL_IPV4) {
413                         uint16_t frag_off = 0;
414                         ctx_len += sizeof(template->ip4);
415
416                         ip4->version_ihl = RTE_IPV4_VHL_DEF;
417                         ip4->time_to_live = ipsec->tunnel.ipv4.ttl;
418                         ip4->type_of_service |= (ipsec->tunnel.ipv4.dscp << 2);
419                         if (ipsec->tunnel.ipv4.df)
420                                 frag_off |= RTE_IPV4_HDR_DF_FLAG;
421                         ip4->fragment_offset = rte_cpu_to_be_16(frag_off);
422
423                         memcpy(&ip4->src_addr, &ipsec->tunnel.ipv4.src_ip,
424                                sizeof(struct in_addr));
425                         memcpy(&ip4->dst_addr, &ipsec->tunnel.ipv4.dst_ip,
426                                sizeof(struct in_addr));
427                 } else if (ipsec->tunnel.type ==
428                            RTE_SECURITY_IPSEC_TUNNEL_IPV6) {
429                         ctx_len += sizeof(template->ip6);
430
431                         ip6 = (struct rte_ipv6_hdr *)&template->ip6.ipv6_hdr;
432                         if (ipsec->options.udp_encap) {
433                                 ip6->proto = IPPROTO_UDP;
434                                 template->ip6.udp_src = rte_be_to_cpu_16(4500);
435                                 template->ip6.udp_dst = rte_be_to_cpu_16(4500);
436                         } else {
437                                 ip6->proto = (ipsec->proto ==
438                                               RTE_SECURITY_IPSEC_SA_PROTO_ESP) ?
439                                                      IPPROTO_ESP :
440                                                      IPPROTO_AH;
441                         }
442                         ip6->vtc_flow =
443                                 rte_cpu_to_be_32(0x60000000 |
444                                                  ((ipsec->tunnel.ipv6.dscp
445                                                    << RTE_IPV6_HDR_TC_SHIFT) &
446                                                   RTE_IPV6_HDR_TC_MASK) |
447                                                  ((ipsec->tunnel.ipv6.flabel
448                                                    << RTE_IPV6_HDR_FL_SHIFT) &
449                                                   RTE_IPV6_HDR_FL_MASK));
450                         ip6->hop_limits = ipsec->tunnel.ipv6.hlimit;
451                         memcpy(&ip6->src_addr, &ipsec->tunnel.ipv6.src_addr,
452                                sizeof(struct in6_addr));
453                         memcpy(&ip6->dst_addr, &ipsec->tunnel.ipv6.dst_addr,
454                                sizeof(struct in6_addr));
455                 }
456         } else
457                 ctx_len += sizeof(template->ip4);
458
459         ctx_len += RTE_ALIGN_CEIL(ctx_len, 8);
460
461         if (crypto_xform->type != RTE_CRYPTO_SYM_XFORM_AEAD) {
462                 auth_key = auth_xform->auth.key.data;
463                 auth_key_len = auth_xform->auth.key.length;
464
465                 switch (auth_xform->auth.algo) {
466                 case RTE_CRYPTO_AUTH_AES_GMAC:
467                 case RTE_CRYPTO_AUTH_NULL:
468                         break;
469                 case RTE_CRYPTO_AUTH_SHA1_HMAC:
470                         memcpy(out_sa->sha1.hmac_key, auth_key, auth_key_len);
471                         break;
472                 case RTE_CRYPTO_AUTH_SHA256_HMAC:
473                 case RTE_CRYPTO_AUTH_SHA384_HMAC:
474                 case RTE_CRYPTO_AUTH_SHA512_HMAC:
475                         memcpy(out_sa->sha2.hmac_key, auth_key, auth_key_len);
476                         break;
477                 case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
478                         memcpy(out_sa->aes_xcbc.key, auth_key, auth_key_len);
479                         break;
480                 default:
481                         plt_err("Unsupported auth algorithm %u",
482                                 auth_xform->auth.algo);
483                         return -ENOTSUP;
484                 }
485         }
486
487         inst_tmpl = &sa->inst;
488
489         w4.u64 = 0;
490         w4.s.opcode_major = ROC_IE_ON_MAJOR_OP_PROCESS_OUTBOUND_IPSEC;
491         w4.s.opcode_minor = ctx_len >> 3;
492
493         param1.u16 = 0;
494         param1.s.ikev2 = 1;
495
496         sa->custom_hdr_len = sizeof(struct roc_ie_on_outb_hdr) -
497                              ROC_IE_ON_MAX_IV_LEN;
498
499 #ifdef LA_IPSEC_DEBUG
500         /* Use IV from application in debug mode */
501         if (ipsec->options.iv_gen_disable == 1) {
502                 param1.s.per_pkt_iv = ROC_IE_ON_IV_SRC_FROM_DPTR;
503                 sa->custom_hdr_len = sizeof(struct roc_ie_on_outb_hdr);
504
505                 if (crypto_xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
506                         sa->cipher_iv_off = crypto_xform->aead.iv.offset;
507                         sa->cipher_iv_len = crypto_xform->aead.iv.length;
508                 } else if (crypto_xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER) {
509                         sa->cipher_iv_off = crypto_xform->cipher.iv.offset;
510                         sa->cipher_iv_len = crypto_xform->cipher.iv.length;
511                 } else {
512                         sa->cipher_iv_off = crypto_xform->auth.iv.offset;
513                         sa->cipher_iv_len = crypto_xform->auth.iv.length;
514                 }
515         }
516 #else
517         if (ipsec->options.iv_gen_disable != 0) {
518                 plt_err("Application provided IV is not supported");
519                 return -ENOTSUP;
520         }
521 #endif
522
523         w4.s.param1 = param1.u16;
524
525         inst_tmpl->w4 = w4.u64;
526
527         w7.u64 = 0;
528         w7.s.egrp = roc_cpt->eng_grp[CPT_ENG_TYPE_IE];
529         w7.s.cptr = rte_mempool_virt2iova(out_sa);
530         inst_tmpl->w7 = w7.u64;
531
532         return cn9k_cpt_enq_sa_write(
533                 sa, qp, ROC_IE_ON_MAJOR_OP_WRITE_IPSEC_OUTBOUND, ctx_len);
534 }
535
536 static int
537 cn9k_ipsec_inb_sa_create(struct cnxk_cpt_qp *qp,
538                          struct rte_security_ipsec_xform *ipsec,
539                          struct rte_crypto_sym_xform *crypto_xform,
540                          struct rte_security_session *sec_sess)
541 {
542         struct rte_crypto_sym_xform *auth_xform = crypto_xform;
543         struct roc_cpt *roc_cpt = qp->lf.roc_cpt;
544         union roc_on_ipsec_inb_param2 param2;
545         struct cnxk_cpt_inst_tmpl *inst_tmpl;
546         struct roc_ie_on_inb_sa *in_sa;
547         struct cn9k_sec_session *sess;
548         struct cn9k_ipsec_sa *sa;
549         const uint8_t *auth_key;
550         union cpt_inst_w4 w4;
551         union cpt_inst_w7 w7;
552         int auth_key_len = 0;
553         size_t ctx_len = 0;
554         int ret;
555
556         sess = get_sec_session_private_data(sec_sess);
557         sa = &sess->sa;
558         in_sa = &sa->in_sa;
559
560         memset(sa, 0, sizeof(struct cn9k_ipsec_sa));
561
562         sa->dir = RTE_SECURITY_IPSEC_SA_DIR_INGRESS;
563         sa->replay_win_sz = ipsec->replay_win_sz;
564
565         ret = fill_ipsec_common_sa(ipsec, crypto_xform, &in_sa->common_sa);
566         if (ret)
567                 return ret;
568
569         if (crypto_xform->type == RTE_CRYPTO_SYM_XFORM_AEAD ||
570             auth_xform->auth.algo == RTE_CRYPTO_AUTH_NULL ||
571             auth_xform->auth.algo == RTE_CRYPTO_AUTH_AES_GMAC) {
572                 ctx_len = offsetof(struct roc_ie_on_inb_sa,
573                                    sha1_or_gcm.hmac_key[0]);
574         } else {
575                 auth_key = auth_xform->auth.key.data;
576                 auth_key_len = auth_xform->auth.key.length;
577
578                 switch (auth_xform->auth.algo) {
579                 case RTE_CRYPTO_AUTH_NULL:
580                         break;
581                 case RTE_CRYPTO_AUTH_SHA1_HMAC:
582                         memcpy(in_sa->sha1_or_gcm.hmac_key, auth_key,
583                                auth_key_len);
584                         ctx_len = offsetof(struct roc_ie_on_inb_sa,
585                                            sha1_or_gcm.selector);
586                         break;
587                 case RTE_CRYPTO_AUTH_SHA256_HMAC:
588                 case RTE_CRYPTO_AUTH_SHA384_HMAC:
589                 case RTE_CRYPTO_AUTH_SHA512_HMAC:
590                         memcpy(in_sa->sha2.hmac_key, auth_key, auth_key_len);
591                         ctx_len = offsetof(struct roc_ie_on_inb_sa,
592                                            sha2.selector);
593                         break;
594                 case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
595                         memcpy(in_sa->aes_xcbc.key, auth_key, auth_key_len);
596                         ctx_len = offsetof(struct roc_ie_on_inb_sa,
597                                            aes_xcbc.selector);
598                         break;
599                 default:
600                         plt_err("Unsupported auth algorithm %u",
601                                 auth_xform->auth.algo);
602                         return -ENOTSUP;
603                 }
604         }
605
606         inst_tmpl = &sa->inst;
607
608         w4.u64 = 0;
609         w4.s.opcode_major = ROC_IE_ON_MAJOR_OP_PROCESS_INBOUND_IPSEC;
610         w4.s.opcode_minor = ctx_len >> 3;
611
612         param2.u16 = 0;
613         param2.s.ikev2 = 1;
614         w4.s.param2 = param2.u16;
615
616         inst_tmpl->w4 = w4.u64;
617
618         w7.u64 = 0;
619         w7.s.egrp = roc_cpt->eng_grp[CPT_ENG_TYPE_IE];
620         w7.s.cptr = rte_mempool_virt2iova(in_sa);
621         inst_tmpl->w7 = w7.u64;
622
623         if (sa->replay_win_sz) {
624                 if (sa->replay_win_sz > CNXK_ON_AR_WIN_SIZE_MAX) {
625                         plt_err("Replay window size:%u is not supported",
626                                 sa->replay_win_sz);
627                         return -ENOTSUP;
628                 }
629
630                 /* Set window bottom to 1, base and top to size of window */
631                 sa->ar.winb = 1;
632                 sa->ar.wint = sa->replay_win_sz;
633                 sa->ar.base = sa->replay_win_sz;
634
635                 in_sa->common_sa.esn_low = sa->seq_lo;
636                 in_sa->common_sa.esn_hi = sa->seq_hi;
637         }
638
639         return cn9k_cpt_enq_sa_write(
640                 sa, qp, ROC_IE_ON_MAJOR_OP_WRITE_IPSEC_INBOUND, ctx_len);
641 }
642
643 static inline int
644 cn9k_ipsec_xform_verify(struct rte_security_ipsec_xform *ipsec,
645                         struct rte_crypto_sym_xform *crypto)
646 {
647         if (ipsec->life.bytes_hard_limit != 0 ||
648             ipsec->life.bytes_soft_limit != 0 ||
649             ipsec->life.packets_hard_limit != 0 ||
650             ipsec->life.packets_soft_limit != 0)
651                 return -ENOTSUP;
652
653         if (ipsec->mode == RTE_SECURITY_IPSEC_SA_MODE_TRANSPORT &&
654             ipsec->proto != RTE_SECURITY_IPSEC_SA_PROTO_AH) {
655                 enum rte_crypto_sym_xform_type type = crypto->type;
656
657                 if (type == RTE_CRYPTO_SYM_XFORM_AEAD) {
658                         if ((crypto->aead.algo == RTE_CRYPTO_AEAD_AES_GCM) &&
659                             (crypto->aead.key.length == 32)) {
660                                 plt_err("Transport mode AES-256-GCM is not supported");
661                                 return -ENOTSUP;
662                         }
663                 } else {
664                         struct rte_crypto_cipher_xform *cipher;
665                         struct rte_crypto_auth_xform *auth;
666
667                         if (crypto->type == RTE_CRYPTO_SYM_XFORM_CIPHER) {
668                                 cipher = &crypto->cipher;
669                                 auth = &crypto->next->auth;
670                         } else {
671                                 cipher = &crypto->next->cipher;
672                                 auth = &crypto->auth;
673                         }
674
675                         if ((cipher->algo == RTE_CRYPTO_CIPHER_AES_CBC) &&
676                             (auth->algo == RTE_CRYPTO_AUTH_SHA256_HMAC)) {
677                                 plt_err("Transport mode AES-CBC SHA2 HMAC 256 is not supported");
678                                 return -ENOTSUP;
679                         }
680
681                         if ((cipher->algo == RTE_CRYPTO_CIPHER_AES_CBC) &&
682                             (auth->algo == RTE_CRYPTO_AUTH_SHA384_HMAC)) {
683                                 plt_err("Transport mode AES-CBC SHA2 HMAC 384 is not supported");
684                                 return -ENOTSUP;
685                         }
686
687                         if ((cipher->algo == RTE_CRYPTO_CIPHER_AES_CBC) &&
688                             (auth->algo == RTE_CRYPTO_AUTH_SHA512_HMAC)) {
689                                 plt_err("Transport mode AES-CBC SHA2 HMAC 512 is not supported");
690                                 return -ENOTSUP;
691                         }
692
693                         if ((cipher->algo == RTE_CRYPTO_CIPHER_AES_CBC) &&
694                             (auth->algo == RTE_CRYPTO_AUTH_AES_XCBC_MAC)) {
695                                 plt_err("Transport mode AES-CBC AES-XCBC is not supported");
696                                 return -ENOTSUP;
697                         }
698                 }
699         }
700
701         return 0;
702 }
703
704 static int
705 cn9k_ipsec_session_create(void *dev,
706                           struct rte_security_ipsec_xform *ipsec_xform,
707                           struct rte_crypto_sym_xform *crypto_xform,
708                           struct rte_security_session *sess)
709 {
710         struct rte_cryptodev *crypto_dev = dev;
711         struct cnxk_cpt_qp *qp;
712         int ret;
713
714         qp = crypto_dev->data->queue_pairs[0];
715         if (qp == NULL) {
716                 plt_err("CPT queue pairs need to be setup for creating security"
717                         " session");
718                 return -EPERM;
719         }
720
721         ret = cnxk_ipsec_xform_verify(ipsec_xform, crypto_xform);
722         if (ret)
723                 return ret;
724
725         ret = cn9k_ipsec_xform_verify(ipsec_xform, crypto_xform);
726         if (ret)
727                 return ret;
728
729         if (ipsec_xform->direction == RTE_SECURITY_IPSEC_SA_DIR_INGRESS)
730                 return cn9k_ipsec_inb_sa_create(qp, ipsec_xform, crypto_xform,
731                                                 sess);
732         else
733                 return cn9k_ipsec_outb_sa_create(qp, ipsec_xform, crypto_xform,
734                                                  sess);
735 }
736
737 static int
738 cn9k_sec_session_create(void *device, struct rte_security_session_conf *conf,
739                         struct rte_security_session *sess,
740                         struct rte_mempool *mempool)
741 {
742         struct cn9k_sec_session *priv;
743         int ret;
744
745         if (conf->action_type != RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL)
746                 return -EINVAL;
747
748         if (rte_mempool_get(mempool, (void **)&priv)) {
749                 plt_err("Could not allocate security session private data");
750                 return -ENOMEM;
751         }
752
753         memset(priv, 0, sizeof(*priv));
754
755         set_sec_session_private_data(sess, priv);
756
757         if (conf->protocol != RTE_SECURITY_PROTOCOL_IPSEC) {
758                 ret = -ENOTSUP;
759                 goto mempool_put;
760         }
761
762         ret = cn9k_ipsec_session_create(device, &conf->ipsec,
763                                         conf->crypto_xform, sess);
764         if (ret)
765                 goto mempool_put;
766
767         return 0;
768
769 mempool_put:
770         rte_mempool_put(mempool, priv);
771         set_sec_session_private_data(sess, NULL);
772         return ret;
773 }
774
775 static int
776 cn9k_sec_session_destroy(void *device __rte_unused,
777                          struct rte_security_session *sess)
778 {
779         struct roc_ie_on_outb_sa *out_sa;
780         struct cn9k_sec_session *priv;
781         struct rte_mempool *sess_mp;
782         struct roc_ie_on_sa_ctl *ctl;
783         struct cn9k_ipsec_sa *sa;
784
785         priv = get_sec_session_private_data(sess);
786         if (priv == NULL)
787                 return 0;
788
789         sa = &priv->sa;
790         out_sa = &sa->out_sa;
791
792         ctl = &out_sa->common_sa.ctl;
793         ctl->valid = 0;
794
795         rte_io_wmb();
796
797         sess_mp = rte_mempool_from_obj(priv);
798
799         memset(priv, 0, sizeof(*priv));
800
801         set_sec_session_private_data(sess, NULL);
802         rte_mempool_put(sess_mp, priv);
803
804         return 0;
805 }
806
807 static unsigned int
808 cn9k_sec_session_get_size(void *device __rte_unused)
809 {
810         return sizeof(struct cn9k_sec_session);
811 }
812
813 static int
814 cn9k_sec_session_update(void *device, struct rte_security_session *sec_sess,
815                         struct rte_security_session_conf *conf)
816 {
817         struct rte_cryptodev *crypto_dev = device;
818         struct cnxk_cpt_qp *qp;
819         int ret;
820
821         qp = crypto_dev->data->queue_pairs[0];
822         if (qp == NULL) {
823                 plt_err("CPT queue pairs need to be setup for updating security"
824                         " session");
825                 return -EPERM;
826         }
827
828         if (conf->ipsec.direction == RTE_SECURITY_IPSEC_SA_DIR_INGRESS)
829                 return -ENOTSUP;
830
831         ret = cnxk_ipsec_xform_verify(&conf->ipsec, conf->crypto_xform);
832         if (ret)
833                 return ret;
834
835         ret = cn9k_ipsec_xform_verify(&conf->ipsec, conf->crypto_xform);
836         if (ret)
837                 return ret;
838
839         return cn9k_ipsec_outb_sa_create(qp, &conf->ipsec, conf->crypto_xform,
840                                          sec_sess);
841 }
842
843 /* Update platform specific security ops */
844 void
845 cn9k_sec_ops_override(void)
846 {
847         /* Update platform specific ops */
848         cnxk_sec_ops.session_create = cn9k_sec_session_create;
849         cnxk_sec_ops.session_destroy = cn9k_sec_session_destroy;
850         cnxk_sec_ops.session_get_size = cn9k_sec_session_get_size;
851         cnxk_sec_ops.session_update = cn9k_sec_session_update;
852 }