1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(C) 2021 Marvell.
5 #include <cryptodev_pmd.h>
6 #include <rte_security.h>
10 #include "cnxk_cryptodev.h"
11 #include "cnxk_cryptodev_capabilities.h"
13 #define CPT_CAPS_ADD(cnxk_caps, cur_pos, hw_caps, name) \
15 if ((hw_caps[CPT_ENG_TYPE_SE].name) || \
16 (hw_caps[CPT_ENG_TYPE_IE].name) || \
17 (hw_caps[CPT_ENG_TYPE_AE].name)) \
18 cpt_caps_add(cnxk_caps, cur_pos, caps_##name, \
19 RTE_DIM(caps_##name)); \
22 #define SEC_CAPS_ADD(cnxk_caps, cur_pos, hw_caps, name) \
24 if ((hw_caps[CPT_ENG_TYPE_SE].name) || \
25 (hw_caps[CPT_ENG_TYPE_IE].name) || \
26 (hw_caps[CPT_ENG_TYPE_AE].name)) \
27 sec_caps_add(cnxk_caps, cur_pos, sec_caps_##name, \
28 RTE_DIM(sec_caps_##name)); \
31 static const struct rte_cryptodev_capabilities caps_mul[] = {
33 .op = RTE_CRYPTO_OP_TYPE_ASYMMETRIC,
36 .xform_type = RTE_CRYPTO_ASYM_XFORM_RSA,
37 .op_types = ((1 << RTE_CRYPTO_ASYM_OP_SIGN) |
38 (1 << RTE_CRYPTO_ASYM_OP_VERIFY) |
39 (1 << RTE_CRYPTO_ASYM_OP_ENCRYPT) |
40 (1 << RTE_CRYPTO_ASYM_OP_DECRYPT)),
50 .op = RTE_CRYPTO_OP_TYPE_ASYMMETRIC,
53 .xform_type = RTE_CRYPTO_ASYM_XFORM_MODEX,
64 .op = RTE_CRYPTO_OP_TYPE_ASYMMETRIC,
67 .xform_type = RTE_CRYPTO_ASYM_XFORM_ECDSA,
68 .op_types = ((1 << RTE_CRYPTO_ASYM_OP_SIGN) |
69 (1 << RTE_CRYPTO_ASYM_OP_VERIFY)),
75 .op = RTE_CRYPTO_OP_TYPE_ASYMMETRIC,
78 .xform_type = RTE_CRYPTO_ASYM_XFORM_ECPM,
86 static const struct rte_cryptodev_capabilities caps_sha1_sha2[] = {
88 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
90 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
92 .algo = RTE_CRYPTO_AUTH_SHA1,
108 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
110 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
112 .algo = RTE_CRYPTO_AUTH_SHA1_HMAC,
128 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
130 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
132 .algo = RTE_CRYPTO_AUTH_SHA224,
148 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
150 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
152 .algo = RTE_CRYPTO_AUTH_SHA224_HMAC,
168 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
170 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
172 .algo = RTE_CRYPTO_AUTH_SHA256,
188 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
190 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
192 .algo = RTE_CRYPTO_AUTH_SHA256_HMAC,
208 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
210 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
212 .algo = RTE_CRYPTO_AUTH_SHA384,
228 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
230 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
232 .algo = RTE_CRYPTO_AUTH_SHA384_HMAC,
248 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
250 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
252 .algo = RTE_CRYPTO_AUTH_SHA512,
268 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
270 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
272 .algo = RTE_CRYPTO_AUTH_SHA512_HMAC,
288 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
290 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
292 .algo = RTE_CRYPTO_AUTH_MD5,
308 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
310 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
312 .algo = RTE_CRYPTO_AUTH_MD5_HMAC,
329 static const struct rte_cryptodev_capabilities caps_chacha20[] = {
330 { /* Chacha20-Poly1305 */
331 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
333 .xform_type = RTE_CRYPTO_SYM_XFORM_AEAD,
335 .algo = RTE_CRYPTO_AEAD_CHACHA20_POLY1305,
362 static const struct rte_cryptodev_capabilities caps_zuc_snow3g[] = {
363 { /* SNOW 3G (UEA2) */
364 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
366 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
368 .algo = RTE_CRYPTO_CIPHER_SNOW3G_UEA2,
384 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
386 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
388 .algo = RTE_CRYPTO_CIPHER_ZUC_EEA3,
403 { /* SNOW 3G (UIA2) */
404 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
406 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
408 .algo = RTE_CRYPTO_AUTH_SNOW3G_UIA2,
429 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
431 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
433 .algo = RTE_CRYPTO_AUTH_ZUC_EIA3,
455 static const struct rte_cryptodev_capabilities caps_aes[] = {
456 { /* AES GMAC (AUTH) */
457 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
459 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
461 .algo = RTE_CRYPTO_AUTH_AES_GMAC,
482 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
484 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
486 .algo = RTE_CRYPTO_CIPHER_AES_CBC,
502 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
504 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
506 .algo = RTE_CRYPTO_CIPHER_AES_CTR,
522 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
524 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
526 .algo = RTE_CRYPTO_CIPHER_AES_XTS,
542 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
544 .xform_type = RTE_CRYPTO_SYM_XFORM_AEAD,
546 .algo = RTE_CRYPTO_AEAD_AES_GCM,
572 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
574 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
576 .algo = RTE_CRYPTO_AUTH_AES_CMAC,
593 static const struct rte_cryptodev_capabilities caps_kasumi[] = {
595 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
597 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
599 .algo = RTE_CRYPTO_CIPHER_KASUMI_F8,
615 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
617 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
619 .algo = RTE_CRYPTO_AUTH_KASUMI_F9,
636 static const struct rte_cryptodev_capabilities caps_des[] = {
638 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
640 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
642 .algo = RTE_CRYPTO_CIPHER_3DES_CBC,
658 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
660 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
662 .algo = RTE_CRYPTO_CIPHER_3DES_ECB,
678 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
680 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
682 .algo = RTE_CRYPTO_CIPHER_DES_CBC,
699 static const struct rte_cryptodev_capabilities caps_null[] = {
701 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
703 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
705 .algo = RTE_CRYPTO_AUTH_NULL,
720 { /* NULL (CIPHER) */
721 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
723 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
725 .algo = RTE_CRYPTO_CIPHER_NULL,
742 static const struct rte_cryptodev_capabilities caps_end[] = {
743 RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST()
746 static const struct rte_cryptodev_capabilities sec_caps_aes[] = {
748 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
750 .xform_type = RTE_CRYPTO_SYM_XFORM_AEAD,
752 .algo = RTE_CRYPTO_AEAD_AES_GCM,
778 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
780 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
782 .algo = RTE_CRYPTO_CIPHER_AES_CTR,
798 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
800 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
802 .algo = RTE_CRYPTO_CIPHER_AES_CBC,
818 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
820 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
822 .algo = RTE_CRYPTO_AUTH_AES_XCBC_MAC,
839 static const struct rte_cryptodev_capabilities sec_caps_sha1_sha2[] = {
841 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
843 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
845 .algo = RTE_CRYPTO_AUTH_SHA1_HMAC,
861 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
863 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
865 .algo = RTE_CRYPTO_AUTH_SHA256_HMAC,
881 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
883 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
885 .algo = RTE_CRYPTO_AUTH_SHA384_HMAC,
901 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
903 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
905 .algo = RTE_CRYPTO_AUTH_SHA512_HMAC,
922 static const struct rte_cryptodev_capabilities sec_caps_null[] = {
923 { /* NULL (CIPHER) */
924 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
926 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
928 .algo = RTE_CRYPTO_CIPHER_NULL,
945 static const struct rte_security_capability sec_caps_templ[] = {
946 { /* IPsec Lookaside Protocol ESP Tunnel Ingress */
947 .action = RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
948 .protocol = RTE_SECURITY_PROTOCOL_IPSEC,
950 .proto = RTE_SECURITY_IPSEC_SA_PROTO_ESP,
951 .mode = RTE_SECURITY_IPSEC_SA_MODE_TUNNEL,
952 .direction = RTE_SECURITY_IPSEC_SA_DIR_INGRESS,
955 .crypto_capabilities = NULL,
957 { /* IPsec Lookaside Protocol ESP Tunnel Egress */
958 .action = RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
959 .protocol = RTE_SECURITY_PROTOCOL_IPSEC,
961 .proto = RTE_SECURITY_IPSEC_SA_PROTO_ESP,
962 .mode = RTE_SECURITY_IPSEC_SA_MODE_TUNNEL,
963 .direction = RTE_SECURITY_IPSEC_SA_DIR_EGRESS,
966 .crypto_capabilities = NULL,
968 { /* IPsec Lookaside Protocol ESP Transport Ingress */
969 .action = RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
970 .protocol = RTE_SECURITY_PROTOCOL_IPSEC,
972 .proto = RTE_SECURITY_IPSEC_SA_PROTO_ESP,
973 .mode = RTE_SECURITY_IPSEC_SA_MODE_TRANSPORT,
974 .direction = RTE_SECURITY_IPSEC_SA_DIR_INGRESS,
977 .crypto_capabilities = NULL,
979 { /* IPsec Lookaside Protocol ESP Transport Egress */
980 .action = RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
981 .protocol = RTE_SECURITY_PROTOCOL_IPSEC,
983 .proto = RTE_SECURITY_IPSEC_SA_PROTO_ESP,
984 .mode = RTE_SECURITY_IPSEC_SA_MODE_TRANSPORT,
985 .direction = RTE_SECURITY_IPSEC_SA_DIR_EGRESS,
988 .crypto_capabilities = NULL,
991 .action = RTE_SECURITY_ACTION_TYPE_NONE
996 cpt_caps_add(struct rte_cryptodev_capabilities cnxk_caps[], int *cur_pos,
997 const struct rte_cryptodev_capabilities *caps, int nb_caps)
999 if (*cur_pos + nb_caps > CNXK_CPT_MAX_CAPS)
1002 memcpy(&cnxk_caps[*cur_pos], caps, nb_caps * sizeof(caps[0]));
1003 *cur_pos += nb_caps;
1007 cn10k_crypto_caps_update(struct rte_cryptodev_capabilities cnxk_caps[])
1010 struct rte_cryptodev_capabilities *caps;
1013 while ((caps = &cnxk_caps[i++])->op != RTE_CRYPTO_OP_TYPE_UNDEFINED) {
1014 if ((caps->op == RTE_CRYPTO_OP_TYPE_SYMMETRIC) &&
1015 (caps->sym.xform_type == RTE_CRYPTO_SYM_XFORM_CIPHER) &&
1016 (caps->sym.cipher.algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)) {
1018 caps->sym.cipher.key_size.max = 32;
1019 caps->sym.cipher.key_size.increment = 16;
1020 caps->sym.cipher.iv_size.max = 25;
1021 caps->sym.cipher.iv_size.increment = 1;
1024 if ((caps->op == RTE_CRYPTO_OP_TYPE_SYMMETRIC) &&
1025 (caps->sym.xform_type == RTE_CRYPTO_SYM_XFORM_AUTH) &&
1026 (caps->sym.auth.algo == RTE_CRYPTO_AUTH_ZUC_EIA3)) {
1028 caps->sym.auth.key_size.max = 32;
1029 caps->sym.auth.key_size.increment = 16;
1030 caps->sym.auth.digest_size.max = 16;
1031 caps->sym.auth.digest_size.increment = 4;
1032 caps->sym.auth.iv_size.max = 25;
1033 caps->sym.auth.iv_size.increment = 1;
1039 crypto_caps_populate(struct rte_cryptodev_capabilities cnxk_caps[],
1040 union cpt_eng_caps *hw_caps)
1044 CPT_CAPS_ADD(cnxk_caps, &cur_pos, hw_caps, mul);
1045 CPT_CAPS_ADD(cnxk_caps, &cur_pos, hw_caps, sha1_sha2);
1046 CPT_CAPS_ADD(cnxk_caps, &cur_pos, hw_caps, chacha20);
1047 CPT_CAPS_ADD(cnxk_caps, &cur_pos, hw_caps, zuc_snow3g);
1048 CPT_CAPS_ADD(cnxk_caps, &cur_pos, hw_caps, aes);
1049 CPT_CAPS_ADD(cnxk_caps, &cur_pos, hw_caps, kasumi);
1050 CPT_CAPS_ADD(cnxk_caps, &cur_pos, hw_caps, des);
1052 cpt_caps_add(cnxk_caps, &cur_pos, caps_null, RTE_DIM(caps_null));
1053 cpt_caps_add(cnxk_caps, &cur_pos, caps_end, RTE_DIM(caps_end));
1055 if (roc_model_is_cn10k())
1056 cn10k_crypto_caps_update(cnxk_caps);
1059 const struct rte_cryptodev_capabilities *
1060 cnxk_crypto_capabilities_get(struct cnxk_cpt_vf *vf)
1062 return vf->crypto_caps;
1066 sec_caps_add(struct rte_cryptodev_capabilities cnxk_caps[], int *cur_pos,
1067 const struct rte_cryptodev_capabilities *caps, int nb_caps)
1069 if (*cur_pos + nb_caps > CNXK_SEC_CRYPTO_MAX_CAPS) {
1070 rte_panic("Could not add sec crypto caps");
1074 memcpy(&cnxk_caps[*cur_pos], caps, nb_caps * sizeof(caps[0]));
1075 *cur_pos += nb_caps;
1079 cn10k_sec_crypto_caps_update(struct rte_cryptodev_capabilities cnxk_caps[],
1082 const struct rte_cryptodev_capabilities *cap;
1085 if ((CNXK_SEC_CRYPTO_MAX_CAPS - *cur_pos) < 1) {
1086 rte_panic("Could not add sec crypto caps");
1091 for (i = 0; i < RTE_DIM(caps_null); i++) {
1092 cap = &caps_null[i];
1093 if (cap->sym.xform_type == RTE_CRYPTO_SYM_XFORM_AUTH &&
1094 cap->sym.auth.algo == RTE_CRYPTO_AUTH_NULL) {
1095 cnxk_caps[*cur_pos] = caps_null[i];
1102 cn9k_sec_crypto_caps_update(struct rte_cryptodev_capabilities cnxk_caps[])
1105 struct rte_cryptodev_capabilities *caps;
1108 while ((caps = &cnxk_caps[i++])->op != RTE_CRYPTO_OP_TYPE_UNDEFINED) {
1109 if ((caps->op == RTE_CRYPTO_OP_TYPE_SYMMETRIC) &&
1110 (caps->sym.xform_type == RTE_CRYPTO_SYM_XFORM_AUTH) &&
1111 (caps->sym.auth.algo == RTE_CRYPTO_AUTH_SHA256_HMAC)) {
1112 caps->sym.auth.key_size.min = 32;
1113 caps->sym.auth.key_size.max = 64;
1114 caps->sym.auth.key_size.increment = 1;
1122 sec_crypto_caps_populate(struct rte_cryptodev_capabilities cnxk_caps[],
1123 union cpt_eng_caps *hw_caps)
1127 SEC_CAPS_ADD(cnxk_caps, &cur_pos, hw_caps, aes);
1128 SEC_CAPS_ADD(cnxk_caps, &cur_pos, hw_caps, sha1_sha2);
1130 if (roc_model_is_cn10k())
1131 cn10k_sec_crypto_caps_update(cnxk_caps, &cur_pos);
1133 cn9k_sec_crypto_caps_update(cnxk_caps);
1135 sec_caps_add(cnxk_caps, &cur_pos, sec_caps_null,
1136 RTE_DIM(sec_caps_null));
1137 sec_caps_add(cnxk_caps, &cur_pos, caps_end, RTE_DIM(caps_end));
1141 cnxk_sec_caps_update(struct rte_security_capability *sec_cap)
1143 sec_cap->ipsec.options.udp_encap = 1;
1144 sec_cap->ipsec.options.copy_df = 1;
1145 sec_cap->ipsec.options.copy_dscp = 1;
1149 cn10k_sec_caps_update(struct rte_security_capability *sec_cap)
1151 if (sec_cap->ipsec.direction == RTE_SECURITY_IPSEC_SA_DIR_EGRESS) {
1152 #ifdef LA_IPSEC_DEBUG
1153 sec_cap->ipsec.options.iv_gen_disable = 1;
1156 sec_cap->ipsec.options.udp_ports_verify = 1;
1157 if (sec_cap->ipsec.mode == RTE_SECURITY_IPSEC_SA_MODE_TUNNEL)
1158 sec_cap->ipsec.options.tunnel_hdr_verify =
1159 RTE_SECURITY_IPSEC_TUNNEL_VERIFY_SRC_DST_ADDR;
1161 sec_cap->ipsec.options.ip_csum_enable = 1;
1162 sec_cap->ipsec.options.l4_csum_enable = 1;
1163 sec_cap->ipsec.options.stats = 1;
1167 cn9k_sec_caps_update(struct rte_security_capability *sec_cap)
1169 if (sec_cap->ipsec.direction == RTE_SECURITY_IPSEC_SA_DIR_EGRESS) {
1170 #ifdef LA_IPSEC_DEBUG
1171 sec_cap->ipsec.options.iv_gen_disable = 1;
1177 cnxk_cpt_caps_populate(struct cnxk_cpt_vf *vf)
1181 crypto_caps_populate(vf->crypto_caps, vf->cpt.hw_caps);
1182 sec_crypto_caps_populate(vf->sec_crypto_caps, vf->cpt.hw_caps);
1184 PLT_STATIC_ASSERT(RTE_DIM(sec_caps_templ) <= RTE_DIM(vf->sec_caps));
1185 memcpy(vf->sec_caps, sec_caps_templ, sizeof(sec_caps_templ));
1187 for (i = 0; i < RTE_DIM(sec_caps_templ) - 1; i++) {
1188 vf->sec_caps[i].crypto_capabilities = vf->sec_crypto_caps;
1190 cnxk_sec_caps_update(&vf->sec_caps[i]);
1192 if (roc_model_is_cn10k())
1193 cn10k_sec_caps_update(&vf->sec_caps[i]);
1195 if (roc_model_is_cn9k())
1196 cn9k_sec_caps_update(&vf->sec_caps[i]);
1201 const struct rte_security_capability *
1202 cnxk_crypto_sec_capabilities_get(void *device)
1204 struct rte_cryptodev *dev = device;
1205 struct cnxk_cpt_vf *vf;
1207 vf = dev->data->dev_private;
1208 return vf->sec_caps;