1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(C) 2021 Marvell.
5 #include <rte_cryptodev.h>
9 #include "cnxk_cryptodev.h"
10 #include "cnxk_cryptodev_capabilities.h"
12 #define CPT_CAPS_ADD(cnxk_caps, cur_pos, hw_caps, name) \
14 if ((hw_caps[CPT_ENG_TYPE_SE].name) || \
15 (hw_caps[CPT_ENG_TYPE_IE].name) || \
16 (hw_caps[CPT_ENG_TYPE_AE].name)) \
17 cpt_caps_add(cnxk_caps, cur_pos, caps_##name, \
18 RTE_DIM(caps_##name)); \
21 static const struct rte_cryptodev_capabilities caps_sha1_sha2[] = {
23 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
25 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
27 .algo = RTE_CRYPTO_AUTH_SHA1,
43 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
45 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
47 .algo = RTE_CRYPTO_AUTH_SHA1_HMAC,
63 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
65 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
67 .algo = RTE_CRYPTO_AUTH_SHA224,
83 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
85 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
87 .algo = RTE_CRYPTO_AUTH_SHA224_HMAC,
103 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
105 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
107 .algo = RTE_CRYPTO_AUTH_SHA256,
123 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
125 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
127 .algo = RTE_CRYPTO_AUTH_SHA256_HMAC,
143 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
145 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
147 .algo = RTE_CRYPTO_AUTH_SHA384,
163 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
165 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
167 .algo = RTE_CRYPTO_AUTH_SHA384_HMAC,
183 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
185 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
187 .algo = RTE_CRYPTO_AUTH_SHA512,
203 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
205 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
207 .algo = RTE_CRYPTO_AUTH_SHA512_HMAC,
223 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
225 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
227 .algo = RTE_CRYPTO_AUTH_MD5,
243 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
245 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
247 .algo = RTE_CRYPTO_AUTH_MD5_HMAC,
264 static const struct rte_cryptodev_capabilities caps_chacha20[] = {
265 { /* Chacha20-Poly1305 */
266 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
268 .xform_type = RTE_CRYPTO_SYM_XFORM_AEAD,
270 .algo = RTE_CRYPTO_AEAD_CHACHA20_POLY1305,
297 static const struct rte_cryptodev_capabilities caps_zuc_snow3g[] = {
298 { /* SNOW 3G (UEA2) */
299 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
301 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
303 .algo = RTE_CRYPTO_CIPHER_SNOW3G_UEA2,
319 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
321 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
323 .algo = RTE_CRYPTO_CIPHER_ZUC_EEA3,
338 { /* SNOW 3G (UIA2) */
339 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
341 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
343 .algo = RTE_CRYPTO_AUTH_SNOW3G_UIA2,
364 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
366 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
368 .algo = RTE_CRYPTO_AUTH_ZUC_EIA3,
390 static const struct rte_cryptodev_capabilities caps_aes[] = {
391 { /* AES GMAC (AUTH) */
392 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
394 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
396 .algo = RTE_CRYPTO_AUTH_AES_GMAC,
417 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
419 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
421 .algo = RTE_CRYPTO_CIPHER_AES_CBC,
437 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
439 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
441 .algo = RTE_CRYPTO_CIPHER_AES_CTR,
457 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
459 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
461 .algo = RTE_CRYPTO_CIPHER_AES_XTS,
477 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
479 .xform_type = RTE_CRYPTO_SYM_XFORM_AEAD,
481 .algo = RTE_CRYPTO_AEAD_AES_GCM,
508 static const struct rte_cryptodev_capabilities caps_kasumi[] = {
510 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
512 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
514 .algo = RTE_CRYPTO_CIPHER_KASUMI_F8,
530 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
532 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
534 .algo = RTE_CRYPTO_AUTH_KASUMI_F9,
551 static const struct rte_cryptodev_capabilities caps_des[] = {
553 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
555 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
557 .algo = RTE_CRYPTO_CIPHER_3DES_CBC,
573 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
575 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
577 .algo = RTE_CRYPTO_CIPHER_3DES_ECB,
593 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
595 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
597 .algo = RTE_CRYPTO_CIPHER_DES_CBC,
614 static const struct rte_cryptodev_capabilities caps_null[] = {
616 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
618 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
620 .algo = RTE_CRYPTO_AUTH_NULL,
635 { /* NULL (CIPHER) */
636 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
638 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
640 .algo = RTE_CRYPTO_CIPHER_NULL,
657 static const struct rte_cryptodev_capabilities caps_end[] = {
658 RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST()
662 cpt_caps_add(struct rte_cryptodev_capabilities cnxk_caps[], int *cur_pos,
663 const struct rte_cryptodev_capabilities *caps, int nb_caps)
665 if (*cur_pos + nb_caps > CNXK_CPT_MAX_CAPS)
668 memcpy(&cnxk_caps[*cur_pos], caps, nb_caps * sizeof(caps[0]));
673 crypto_caps_populate(struct rte_cryptodev_capabilities cnxk_caps[],
674 union cpt_eng_caps *hw_caps)
678 CPT_CAPS_ADD(cnxk_caps, &cur_pos, hw_caps, sha1_sha2);
679 CPT_CAPS_ADD(cnxk_caps, &cur_pos, hw_caps, chacha20);
680 CPT_CAPS_ADD(cnxk_caps, &cur_pos, hw_caps, zuc_snow3g);
681 CPT_CAPS_ADD(cnxk_caps, &cur_pos, hw_caps, aes);
682 CPT_CAPS_ADD(cnxk_caps, &cur_pos, hw_caps, kasumi);
683 CPT_CAPS_ADD(cnxk_caps, &cur_pos, hw_caps, des);
685 cpt_caps_add(cnxk_caps, &cur_pos, caps_null, RTE_DIM(caps_null));
686 cpt_caps_add(cnxk_caps, &cur_pos, caps_end, RTE_DIM(caps_end));
689 const struct rte_cryptodev_capabilities *
690 cnxk_crypto_capabilities_get(struct cnxk_cpt_vf *vf)
692 return vf->crypto_caps;
696 cnxk_cpt_caps_populate(struct cnxk_cpt_vf *vf)
698 crypto_caps_populate(vf->crypto_caps, vf->cpt.hw_caps);