1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(C) 2021 Marvell.
5 #include <cryptodev_pmd.h>
6 #include <rte_security.h>
10 #include "cnxk_cryptodev.h"
11 #include "cnxk_cryptodev_capabilities.h"
12 #include "cnxk_security_ar.h"
14 #define CPT_CAPS_ADD(cnxk_caps, cur_pos, hw_caps, name) \
16 if ((hw_caps[CPT_ENG_TYPE_SE].name) || \
17 (hw_caps[CPT_ENG_TYPE_IE].name) || \
18 (hw_caps[CPT_ENG_TYPE_AE].name)) \
19 cpt_caps_add(cnxk_caps, cur_pos, caps_##name, \
20 RTE_DIM(caps_##name)); \
23 #define SEC_CAPS_ADD(cnxk_caps, cur_pos, hw_caps, name) \
25 if ((hw_caps[CPT_ENG_TYPE_SE].name) || \
26 (hw_caps[CPT_ENG_TYPE_IE].name) || \
27 (hw_caps[CPT_ENG_TYPE_AE].name)) \
28 sec_caps_add(cnxk_caps, cur_pos, sec_caps_##name, \
29 RTE_DIM(sec_caps_##name)); \
32 static const struct rte_cryptodev_capabilities caps_mul[] = {
34 .op = RTE_CRYPTO_OP_TYPE_ASYMMETRIC,
37 .xform_type = RTE_CRYPTO_ASYM_XFORM_RSA,
38 .op_types = ((1 << RTE_CRYPTO_ASYM_OP_SIGN) |
39 (1 << RTE_CRYPTO_ASYM_OP_VERIFY) |
40 (1 << RTE_CRYPTO_ASYM_OP_ENCRYPT) |
41 (1 << RTE_CRYPTO_ASYM_OP_DECRYPT)),
51 .op = RTE_CRYPTO_OP_TYPE_ASYMMETRIC,
54 .xform_type = RTE_CRYPTO_ASYM_XFORM_MODEX,
65 .op = RTE_CRYPTO_OP_TYPE_ASYMMETRIC,
68 .xform_type = RTE_CRYPTO_ASYM_XFORM_ECDSA,
69 .op_types = ((1 << RTE_CRYPTO_ASYM_OP_SIGN) |
70 (1 << RTE_CRYPTO_ASYM_OP_VERIFY)),
76 .op = RTE_CRYPTO_OP_TYPE_ASYMMETRIC,
79 .xform_type = RTE_CRYPTO_ASYM_XFORM_ECPM,
87 static const struct rte_cryptodev_capabilities caps_sha1_sha2[] = {
89 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
91 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
93 .algo = RTE_CRYPTO_AUTH_SHA1,
109 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
111 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
113 .algo = RTE_CRYPTO_AUTH_SHA1_HMAC,
129 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
131 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
133 .algo = RTE_CRYPTO_AUTH_SHA224,
149 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
151 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
153 .algo = RTE_CRYPTO_AUTH_SHA224_HMAC,
169 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
171 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
173 .algo = RTE_CRYPTO_AUTH_SHA256,
189 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
191 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
193 .algo = RTE_CRYPTO_AUTH_SHA256_HMAC,
209 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
211 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
213 .algo = RTE_CRYPTO_AUTH_SHA384,
229 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
231 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
233 .algo = RTE_CRYPTO_AUTH_SHA384_HMAC,
249 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
251 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
253 .algo = RTE_CRYPTO_AUTH_SHA512,
269 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
271 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
273 .algo = RTE_CRYPTO_AUTH_SHA512_HMAC,
289 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
291 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
293 .algo = RTE_CRYPTO_AUTH_MD5,
309 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
311 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
313 .algo = RTE_CRYPTO_AUTH_MD5_HMAC,
330 static const struct rte_cryptodev_capabilities caps_chacha20[] = {
331 { /* Chacha20-Poly1305 */
332 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
334 .xform_type = RTE_CRYPTO_SYM_XFORM_AEAD,
336 .algo = RTE_CRYPTO_AEAD_CHACHA20_POLY1305,
363 static const struct rte_cryptodev_capabilities caps_zuc_snow3g[] = {
364 { /* SNOW 3G (UEA2) */
365 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
367 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
369 .algo = RTE_CRYPTO_CIPHER_SNOW3G_UEA2,
385 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
387 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
389 .algo = RTE_CRYPTO_CIPHER_ZUC_EEA3,
404 { /* SNOW 3G (UIA2) */
405 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
407 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
409 .algo = RTE_CRYPTO_AUTH_SNOW3G_UIA2,
430 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
432 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
434 .algo = RTE_CRYPTO_AUTH_ZUC_EIA3,
456 static const struct rte_cryptodev_capabilities caps_aes[] = {
457 { /* AES GMAC (AUTH) */
458 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
460 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
462 .algo = RTE_CRYPTO_AUTH_AES_GMAC,
483 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
485 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
487 .algo = RTE_CRYPTO_CIPHER_AES_CBC,
503 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
505 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
507 .algo = RTE_CRYPTO_CIPHER_AES_CTR,
523 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
525 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
527 .algo = RTE_CRYPTO_CIPHER_AES_XTS,
543 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
545 .xform_type = RTE_CRYPTO_SYM_XFORM_AEAD,
547 .algo = RTE_CRYPTO_AEAD_AES_GCM,
573 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
575 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
577 .algo = RTE_CRYPTO_AUTH_AES_CMAC,
594 static const struct rte_cryptodev_capabilities caps_kasumi[] = {
596 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
598 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
600 .algo = RTE_CRYPTO_CIPHER_KASUMI_F8,
616 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
618 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
620 .algo = RTE_CRYPTO_AUTH_KASUMI_F9,
637 static const struct rte_cryptodev_capabilities caps_des[] = {
639 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
641 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
643 .algo = RTE_CRYPTO_CIPHER_3DES_CBC,
659 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
661 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
663 .algo = RTE_CRYPTO_CIPHER_3DES_ECB,
679 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
681 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
683 .algo = RTE_CRYPTO_CIPHER_DES_CBC,
700 static const struct rte_cryptodev_capabilities caps_null[] = {
702 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
704 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
706 .algo = RTE_CRYPTO_AUTH_NULL,
721 { /* NULL (CIPHER) */
722 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
724 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
726 .algo = RTE_CRYPTO_CIPHER_NULL,
743 static const struct rte_cryptodev_capabilities caps_end[] = {
744 RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST()
747 static const struct rte_cryptodev_capabilities sec_caps_aes[] = {
749 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
751 .xform_type = RTE_CRYPTO_SYM_XFORM_AEAD,
753 .algo = RTE_CRYPTO_AEAD_AES_GCM,
779 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
781 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
783 .algo = RTE_CRYPTO_CIPHER_AES_CTR,
799 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
801 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
803 .algo = RTE_CRYPTO_CIPHER_AES_CBC,
819 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
821 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
823 .algo = RTE_CRYPTO_AUTH_AES_XCBC_MAC,
838 { /* AES GMAC (AUTH) */
839 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
841 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
843 .algo = RTE_CRYPTO_AUTH_AES_GMAC,
865 static const struct rte_cryptodev_capabilities sec_caps_des[] = {
867 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
869 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
871 .algo = RTE_CRYPTO_CIPHER_3DES_CBC,
888 static const struct rte_cryptodev_capabilities sec_caps_sha1_sha2[] = {
890 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
892 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
894 .algo = RTE_CRYPTO_AUTH_SHA1_HMAC,
910 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
912 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
914 .algo = RTE_CRYPTO_AUTH_SHA256_HMAC,
930 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
932 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
934 .algo = RTE_CRYPTO_AUTH_SHA384_HMAC,
950 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
952 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
954 .algo = RTE_CRYPTO_AUTH_SHA512_HMAC,
971 static const struct rte_cryptodev_capabilities sec_caps_null[] = {
972 { /* NULL (CIPHER) */
973 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
975 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
977 .algo = RTE_CRYPTO_CIPHER_NULL,
994 static const struct rte_security_capability sec_caps_templ[] = {
995 { /* IPsec Lookaside Protocol ESP Tunnel Ingress */
996 .action = RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
997 .protocol = RTE_SECURITY_PROTOCOL_IPSEC,
999 .proto = RTE_SECURITY_IPSEC_SA_PROTO_ESP,
1000 .mode = RTE_SECURITY_IPSEC_SA_MODE_TUNNEL,
1001 .direction = RTE_SECURITY_IPSEC_SA_DIR_INGRESS,
1004 .crypto_capabilities = NULL,
1006 { /* IPsec Lookaside Protocol ESP Tunnel Egress */
1007 .action = RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
1008 .protocol = RTE_SECURITY_PROTOCOL_IPSEC,
1010 .proto = RTE_SECURITY_IPSEC_SA_PROTO_ESP,
1011 .mode = RTE_SECURITY_IPSEC_SA_MODE_TUNNEL,
1012 .direction = RTE_SECURITY_IPSEC_SA_DIR_EGRESS,
1015 .crypto_capabilities = NULL,
1017 { /* IPsec Lookaside Protocol ESP Transport Ingress */
1018 .action = RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
1019 .protocol = RTE_SECURITY_PROTOCOL_IPSEC,
1021 .proto = RTE_SECURITY_IPSEC_SA_PROTO_ESP,
1022 .mode = RTE_SECURITY_IPSEC_SA_MODE_TRANSPORT,
1023 .direction = RTE_SECURITY_IPSEC_SA_DIR_INGRESS,
1026 .crypto_capabilities = NULL,
1028 { /* IPsec Lookaside Protocol ESP Transport Egress */
1029 .action = RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
1030 .protocol = RTE_SECURITY_PROTOCOL_IPSEC,
1032 .proto = RTE_SECURITY_IPSEC_SA_PROTO_ESP,
1033 .mode = RTE_SECURITY_IPSEC_SA_MODE_TRANSPORT,
1034 .direction = RTE_SECURITY_IPSEC_SA_DIR_EGRESS,
1037 .crypto_capabilities = NULL,
1039 { /* IPsec Lookaside Protocol AH Tunnel Ingress */
1040 .action = RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
1041 .protocol = RTE_SECURITY_PROTOCOL_IPSEC,
1043 .proto = RTE_SECURITY_IPSEC_SA_PROTO_AH,
1044 .mode = RTE_SECURITY_IPSEC_SA_MODE_TUNNEL,
1045 .direction = RTE_SECURITY_IPSEC_SA_DIR_INGRESS,
1048 .crypto_capabilities = NULL,
1050 { /* IPsec Lookaside Protocol AH Tunnel Egress */
1051 .action = RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
1052 .protocol = RTE_SECURITY_PROTOCOL_IPSEC,
1054 .proto = RTE_SECURITY_IPSEC_SA_PROTO_AH,
1055 .mode = RTE_SECURITY_IPSEC_SA_MODE_TUNNEL,
1056 .direction = RTE_SECURITY_IPSEC_SA_DIR_EGRESS,
1059 .crypto_capabilities = NULL,
1061 { /* IPsec Lookaside Protocol AH Transport Ingress */
1062 .action = RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
1063 .protocol = RTE_SECURITY_PROTOCOL_IPSEC,
1065 .proto = RTE_SECURITY_IPSEC_SA_PROTO_AH,
1066 .mode = RTE_SECURITY_IPSEC_SA_MODE_TRANSPORT,
1067 .direction = RTE_SECURITY_IPSEC_SA_DIR_INGRESS,
1070 .crypto_capabilities = NULL,
1072 { /* IPsec Lookaside Protocol AH Transport Egress */
1073 .action = RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
1074 .protocol = RTE_SECURITY_PROTOCOL_IPSEC,
1076 .proto = RTE_SECURITY_IPSEC_SA_PROTO_AH,
1077 .mode = RTE_SECURITY_IPSEC_SA_MODE_TRANSPORT,
1078 .direction = RTE_SECURITY_IPSEC_SA_DIR_EGRESS,
1081 .crypto_capabilities = NULL,
1084 .action = RTE_SECURITY_ACTION_TYPE_NONE
1089 cpt_caps_add(struct rte_cryptodev_capabilities cnxk_caps[], int *cur_pos,
1090 const struct rte_cryptodev_capabilities *caps, int nb_caps)
1092 if (*cur_pos + nb_caps > CNXK_CPT_MAX_CAPS)
1095 memcpy(&cnxk_caps[*cur_pos], caps, nb_caps * sizeof(caps[0]));
1096 *cur_pos += nb_caps;
1100 cn10k_crypto_caps_update(struct rte_cryptodev_capabilities cnxk_caps[])
1103 struct rte_cryptodev_capabilities *caps;
1106 while ((caps = &cnxk_caps[i++])->op != RTE_CRYPTO_OP_TYPE_UNDEFINED) {
1107 if ((caps->op == RTE_CRYPTO_OP_TYPE_SYMMETRIC) &&
1108 (caps->sym.xform_type == RTE_CRYPTO_SYM_XFORM_CIPHER) &&
1109 (caps->sym.cipher.algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)) {
1111 caps->sym.cipher.key_size.max = 32;
1112 caps->sym.cipher.key_size.increment = 16;
1113 caps->sym.cipher.iv_size.max = 25;
1114 caps->sym.cipher.iv_size.increment = 1;
1117 if ((caps->op == RTE_CRYPTO_OP_TYPE_SYMMETRIC) &&
1118 (caps->sym.xform_type == RTE_CRYPTO_SYM_XFORM_AUTH) &&
1119 (caps->sym.auth.algo == RTE_CRYPTO_AUTH_ZUC_EIA3)) {
1121 caps->sym.auth.key_size.max = 32;
1122 caps->sym.auth.key_size.increment = 16;
1123 caps->sym.auth.digest_size.max = 16;
1124 caps->sym.auth.digest_size.increment = 4;
1125 caps->sym.auth.iv_size.max = 25;
1126 caps->sym.auth.iv_size.increment = 1;
1132 crypto_caps_populate(struct rte_cryptodev_capabilities cnxk_caps[],
1133 union cpt_eng_caps *hw_caps)
1137 CPT_CAPS_ADD(cnxk_caps, &cur_pos, hw_caps, mul);
1138 CPT_CAPS_ADD(cnxk_caps, &cur_pos, hw_caps, sha1_sha2);
1139 CPT_CAPS_ADD(cnxk_caps, &cur_pos, hw_caps, chacha20);
1140 CPT_CAPS_ADD(cnxk_caps, &cur_pos, hw_caps, zuc_snow3g);
1141 CPT_CAPS_ADD(cnxk_caps, &cur_pos, hw_caps, aes);
1142 CPT_CAPS_ADD(cnxk_caps, &cur_pos, hw_caps, kasumi);
1143 CPT_CAPS_ADD(cnxk_caps, &cur_pos, hw_caps, des);
1145 cpt_caps_add(cnxk_caps, &cur_pos, caps_null, RTE_DIM(caps_null));
1146 cpt_caps_add(cnxk_caps, &cur_pos, caps_end, RTE_DIM(caps_end));
1148 if (roc_model_is_cn10k())
1149 cn10k_crypto_caps_update(cnxk_caps);
1152 const struct rte_cryptodev_capabilities *
1153 cnxk_crypto_capabilities_get(struct cnxk_cpt_vf *vf)
1155 return vf->crypto_caps;
1159 sec_caps_add(struct rte_cryptodev_capabilities cnxk_caps[], int *cur_pos,
1160 const struct rte_cryptodev_capabilities *caps, int nb_caps)
1162 if (*cur_pos + nb_caps > CNXK_SEC_CRYPTO_MAX_CAPS) {
1163 rte_panic("Could not add sec crypto caps");
1167 memcpy(&cnxk_caps[*cur_pos], caps, nb_caps * sizeof(caps[0]));
1168 *cur_pos += nb_caps;
1172 cn10k_sec_crypto_caps_update(struct rte_cryptodev_capabilities cnxk_caps[],
1175 const struct rte_cryptodev_capabilities *cap;
1178 if ((CNXK_SEC_CRYPTO_MAX_CAPS - *cur_pos) < 1) {
1179 rte_panic("Could not add sec crypto caps");
1184 for (i = 0; i < RTE_DIM(caps_null); i++) {
1185 cap = &caps_null[i];
1186 if (cap->sym.xform_type == RTE_CRYPTO_SYM_XFORM_AUTH &&
1187 cap->sym.auth.algo == RTE_CRYPTO_AUTH_NULL) {
1188 cnxk_caps[*cur_pos] = caps_null[i];
1195 cn9k_sec_crypto_caps_update(struct rte_cryptodev_capabilities cnxk_caps[])
1198 struct rte_cryptodev_capabilities *caps;
1201 while ((caps = &cnxk_caps[i++])->op != RTE_CRYPTO_OP_TYPE_UNDEFINED) {
1202 if ((caps->op == RTE_CRYPTO_OP_TYPE_SYMMETRIC) &&
1203 (caps->sym.xform_type == RTE_CRYPTO_SYM_XFORM_AUTH) &&
1204 (caps->sym.auth.algo == RTE_CRYPTO_AUTH_SHA256_HMAC)) {
1205 caps->sym.auth.key_size.min = 32;
1206 caps->sym.auth.key_size.max = 64;
1207 caps->sym.auth.key_size.increment = 1;
1215 sec_crypto_caps_populate(struct rte_cryptodev_capabilities cnxk_caps[],
1216 union cpt_eng_caps *hw_caps)
1220 SEC_CAPS_ADD(cnxk_caps, &cur_pos, hw_caps, aes);
1221 SEC_CAPS_ADD(cnxk_caps, &cur_pos, hw_caps, des);
1222 SEC_CAPS_ADD(cnxk_caps, &cur_pos, hw_caps, sha1_sha2);
1224 if (roc_model_is_cn10k())
1225 cn10k_sec_crypto_caps_update(cnxk_caps, &cur_pos);
1227 cn9k_sec_crypto_caps_update(cnxk_caps);
1229 sec_caps_add(cnxk_caps, &cur_pos, sec_caps_null,
1230 RTE_DIM(sec_caps_null));
1231 sec_caps_add(cnxk_caps, &cur_pos, caps_end, RTE_DIM(caps_end));
1235 cnxk_sec_caps_update(struct rte_security_capability *sec_cap)
1237 sec_cap->ipsec.options.udp_encap = 1;
1238 sec_cap->ipsec.options.copy_df = 1;
1239 sec_cap->ipsec.options.copy_dscp = 1;
1243 cn10k_sec_caps_update(struct rte_security_capability *sec_cap)
1245 if (sec_cap->ipsec.direction == RTE_SECURITY_IPSEC_SA_DIR_EGRESS) {
1246 #ifdef LA_IPSEC_DEBUG
1247 sec_cap->ipsec.options.iv_gen_disable = 1;
1250 sec_cap->ipsec.options.udp_ports_verify = 1;
1251 if (sec_cap->ipsec.mode == RTE_SECURITY_IPSEC_SA_MODE_TUNNEL)
1252 sec_cap->ipsec.options.tunnel_hdr_verify =
1253 RTE_SECURITY_IPSEC_TUNNEL_VERIFY_SRC_DST_ADDR;
1255 sec_cap->ipsec.options.dec_ttl = 1;
1256 sec_cap->ipsec.options.ip_csum_enable = 1;
1257 sec_cap->ipsec.options.l4_csum_enable = 1;
1258 sec_cap->ipsec.options.stats = 1;
1259 sec_cap->ipsec.options.esn = 1;
1260 sec_cap->ipsec.replay_win_sz_max = ROC_AR_WIN_SIZE_MAX;
1264 cn9k_sec_caps_update(struct rte_security_capability *sec_cap)
1266 if (sec_cap->ipsec.direction == RTE_SECURITY_IPSEC_SA_DIR_EGRESS) {
1267 #ifdef LA_IPSEC_DEBUG
1268 sec_cap->ipsec.options.iv_gen_disable = 1;
1271 sec_cap->ipsec.replay_win_sz_max = CNXK_ON_AR_WIN_SIZE_MAX;
1275 cnxk_cpt_caps_populate(struct cnxk_cpt_vf *vf)
1279 crypto_caps_populate(vf->crypto_caps, vf->cpt.hw_caps);
1280 sec_crypto_caps_populate(vf->sec_crypto_caps, vf->cpt.hw_caps);
1282 PLT_STATIC_ASSERT(RTE_DIM(sec_caps_templ) <= RTE_DIM(vf->sec_caps));
1283 memcpy(vf->sec_caps, sec_caps_templ, sizeof(sec_caps_templ));
1285 for (i = 0; i < RTE_DIM(sec_caps_templ) - 1; i++) {
1286 vf->sec_caps[i].crypto_capabilities = vf->sec_crypto_caps;
1288 cnxk_sec_caps_update(&vf->sec_caps[i]);
1290 if (roc_model_is_cn10k())
1291 cn10k_sec_caps_update(&vf->sec_caps[i]);
1293 if (roc_model_is_cn9k())
1294 cn9k_sec_caps_update(&vf->sec_caps[i]);
1299 const struct rte_security_capability *
1300 cnxk_crypto_sec_capabilities_get(void *device)
1302 struct rte_cryptodev *dev = device;
1303 struct cnxk_cpt_vf *vf;
1305 vf = dev->data->dev_private;
1306 return vf->sec_caps;