]> git.droids-corp.org - dpdk.git/commitdiff
cryptodev: add capabilities discovery
authorDeclan Doherty <declan.doherty@intel.com>
Fri, 11 Mar 2016 01:36:54 +0000 (01:36 +0000)
committerThomas Monjalon <thomas.monjalon@6wind.com>
Fri, 11 Mar 2016 09:43:09 +0000 (10:43 +0100)
This patch add a mechanism for discovery of crypto device features and supported
crypto operations and algorithms. It also provides a method for a crypto PMD to
publish any data range limitations it may have for the operations and algorithms
it supports.

The parameter feature_flags added to rte_cryptodev struct is used to capture
features such as operations supported (symmetric crypto, operation chaining etc)
as well parameter such as whether the device is hardware accelerated or uses
SIMD instructions.

The capabilities parameter allows a PMD to define an array of supported operations
with any limitation which that implementation may have.

Finally the rte_cryptodev_info struct has been extended to allow retrieval of
these parameter using the existing rte_cryptodev_info_get() API.

Signed-off-by: Declan Doherty <declan.doherty@intel.com>
Signed-off-by: Pablo de Lara <pablo.de.lara.guarch@intel.com>
Acked-by: Fiona Trahe <fiona.trahe@intel.com>
13 files changed:
drivers/crypto/aesni_gcm/aesni_gcm_pmd.c
drivers/crypto/aesni_gcm/aesni_gcm_pmd_ops.c
drivers/crypto/aesni_mb/rte_aesni_mb_pmd.c
drivers/crypto/aesni_mb/rte_aesni_mb_pmd_ops.c
drivers/crypto/null/null_crypto_pmd.c
drivers/crypto/null/null_crypto_pmd_ops.c
drivers/crypto/qat/qat_crypto.c
drivers/crypto/qat/rte_qat_cryptodev.c
drivers/crypto/snow3g/rte_snow3g_pmd.c
drivers/crypto/snow3g/rte_snow3g_pmd_ops.c
lib/librte_cryptodev/rte_cryptodev.c
lib/librte_cryptodev/rte_cryptodev.h
lib/librte_cryptodev/rte_cryptodev_version.map

index 83aa272d1d531315ae43524710e00eb193ea3b84..35577bca29aa596671796cbe9b7d807bca531d1a 100644 (file)
@@ -445,6 +445,24 @@ aesni_gcm_create(const char *name,
        dev->dequeue_burst = aesni_gcm_pmd_dequeue_burst;
        dev->enqueue_burst = aesni_gcm_pmd_enqueue_burst;
 
+       dev->feature_flags = RTE_CRYPTODEV_FF_SYMMETRIC_CRYPTO |
+                       RTE_CRYPTODEV_FF_SYM_OPERATION_CHAINING |
+                       RTE_CRYPTODEV_FF_CPU_AESNI;
+
+       switch (vector_mode) {
+       case RTE_AESNI_GCM_SSE:
+               dev->feature_flags |= RTE_CRYPTODEV_FF_CPU_SSE;
+               break;
+       case RTE_AESNI_GCM_AVX:
+               dev->feature_flags |= RTE_CRYPTODEV_FF_CPU_AVX;
+               break;
+       case RTE_AESNI_GCM_AVX2:
+               dev->feature_flags |= RTE_CRYPTODEV_FF_CPU_AVX2;
+               break;
+       default:
+               break;
+       }
+
        /* Set vector instructions mode supported */
        internals = dev->data->dev_private;
 
index f865e0dd6d0ec7d744b35640a7bd23371444b90c..387f8d100cd762df4f5db1da16d3662b8a8135f0 100644 (file)
 
 #include "aesni_gcm_pmd_private.h"
 
+static const struct rte_cryptodev_capabilities aesni_gcm_pmd_capabilities[] = {
+       {       /* AES GCM (AUTH) */
+               .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
+               .sym = {
+                       .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+                       .auth = {
+                               .algo = RTE_CRYPTO_AUTH_AES_GCM,
+                               .block_size = 16,
+                               .key_size = {
+                                       .min = 16,
+                                       .max = 32,
+                                       .increment = 8
+                               },
+                               .digest_size = {
+                                       .min = 8,
+                                       .max = 16,
+                                       .increment = 4
+                               },
+                               .aad_size = {
+                                       .min = 8,
+                                       .max = 12,
+                                       .increment = 4
+                               }
+                       }
+               }
+       },
+       {       /* AES GCM (CIPHER) */
+               .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
+               .sym = {
+                       .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
+                       .cipher = {
+                               .algo = RTE_CRYPTO_CIPHER_AES_GCM,
+                               .block_size = 16,
+                               .key_size = {
+                                       .min = 16,
+                                       .max = 32,
+                                       .increment = 8
+                               },
+                               .iv_size = {
+                                       .min = 16,
+                                       .max = 16,
+                                       .increment = 0
+                               }
+                       }
+               }
+       },
+       RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST()
+};
+
 /** Configure device */
 static int
 aesni_gcm_pmd_config(__rte_unused struct rte_cryptodev *dev)
@@ -107,6 +156,8 @@ aesni_gcm_pmd_info_get(struct rte_cryptodev *dev,
 
        if (dev_info != NULL) {
                dev_info->dev_type = dev->dev_type;
+                dev_info->feature_flags = dev->feature_flags;
+                dev_info->capabilities = aesni_gcm_pmd_capabilities;
 
                dev_info->max_nb_queue_pairs = internals->max_nb_queue_pairs;
                dev_info->sym.max_nb_sessions = internals->max_nb_sessions;
index 9599cc491d7cf74a0492515fc049cce2540a0245..4e3885cf80214342bf84f0902b96e941e2eddf84 100644 (file)
@@ -640,6 +640,24 @@ cryptodev_aesni_mb_create(const char *name, unsigned socket_id)
        dev->dequeue_burst = aesni_mb_pmd_dequeue_burst;
        dev->enqueue_burst = aesni_mb_pmd_enqueue_burst;
 
+       dev->feature_flags = RTE_CRYPTODEV_FF_SYMMETRIC_CRYPTO |
+                       RTE_CRYPTODEV_FF_SYM_OPERATION_CHAINING |
+                       RTE_CRYPTODEV_FF_CPU_AESNI;
+
+       switch (vector_mode) {
+       case RTE_AESNI_MB_SSE:
+               dev->feature_flags |= RTE_CRYPTODEV_FF_CPU_SSE;
+               break;
+       case RTE_AESNI_MB_AVX:
+               dev->feature_flags |= RTE_CRYPTODEV_FF_CPU_AVX;
+               break;
+       case RTE_AESNI_MB_AVX2:
+               dev->feature_flags |= RTE_CRYPTODEV_FF_CPU_AVX2;
+               break;
+       default:
+               break;
+       }
+
        /* Set vector instructions mode supported */
        internals = dev->data->dev_private;
 
index b1dd103a557a1081bb4729039ba3406f94877271..5a439e6b0c8079e3ed656a0985f1fe5ba4a4072b 100644 (file)
 
 #include "rte_aesni_mb_pmd_private.h"
 
+
+static const struct rte_cryptodev_capabilities aesni_mb_pmd_capabilities[] = {
+       {       /* MD5 HMAC */
+               .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
+               .sym = {
+                       .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+                       .auth = {
+                               .algo = RTE_CRYPTO_AUTH_MD5_HMAC,
+                               .block_size = 64,
+                               .key_size = {
+                                       .min = 64,
+                                       .max = 64,
+                                       .increment = 0
+                               },
+                               .digest_size = {
+                                       .min = 12,
+                                       .max = 12,
+                                       .increment = 0
+                               },
+                               .aad_size = { 0 }
+                       }
+               }
+       },
+       {       /* SHA1 HMAC */
+               .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
+               .sym = {
+                       .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+                       .auth = {
+                               .algo = RTE_CRYPTO_AUTH_SHA1_HMAC,
+                               .block_size = 64,
+                               .key_size = {
+                                       .min = 64,
+                                       .max = 64,
+                                       .increment = 0
+                               },
+                               .digest_size = {
+                                       .min = 12,
+                                       .max = 12,
+                                       .increment = 0
+                               },
+                               .aad_size = { 0 }
+                       }
+               }
+       },
+       {       /* SHA224 HMAC */
+               .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
+               .sym = {
+                       .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+                       .auth = {
+                               .algo = RTE_CRYPTO_AUTH_SHA224_HMAC,
+                               .block_size = 64,
+                               .key_size = {
+                                       .min = 64,
+                                       .max = 64,
+                                       .increment = 0
+                               },
+                               .digest_size = {
+                                       .min = 14,
+                                       .max = 14,
+                                       .increment = 0
+                               },
+                               .aad_size = { 0 }
+                       }
+               }
+       },
+       {       /* SHA256 HMAC */
+               .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
+               .sym = {
+                       .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+                       .auth = {
+                               .algo = RTE_CRYPTO_AUTH_SHA256_HMAC,
+                               .block_size = 64,
+                               .key_size = {
+                                       .min = 64,
+                                       .max = 64,
+                                       .increment = 0
+                               },
+                               .digest_size = {
+                                       .min = 16,
+                                       .max = 16,
+                                       .increment = 0
+                               },
+                               .aad_size = { 0 }
+                       }
+               }
+       },
+       {       /* SHA384 HMAC */
+               .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
+               .sym = {
+                       .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+                       .auth = {
+                               .algo = RTE_CRYPTO_AUTH_SHA384_HMAC,
+                               .block_size = 128,
+                               .key_size = {
+                                       .min = 128,
+                                       .max = 128,
+                                       .increment = 0
+                               },
+                               .digest_size = {
+                                       .min = 24,
+                                       .max = 24,
+                                       .increment = 0
+                               },
+                               .aad_size = { 0 }
+                       }
+               }
+       },
+       {       /* SHA512 HMAC */
+               .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
+               .sym = {
+                       .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+                       .auth = {
+                               .algo = RTE_CRYPTO_AUTH_SHA512_HMAC,
+                               .block_size = 128,
+                               .key_size = {
+                                       .min = 128,
+                                       .max = 128,
+                                       .increment = 0
+                               },
+                               .digest_size = {
+                                       .min = 32,
+                                       .max = 32,
+                                       .increment = 0
+                               },
+                               .aad_size = { 0 }
+                       }
+               }
+       },
+       {       /* AES XCBC HMAC */
+               .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
+               .sym = {
+                       .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+                       .auth = {
+                               .algo = RTE_CRYPTO_AUTH_AES_XCBC_MAC,
+                               .block_size = 16,
+                               .key_size = {
+                                       .min = 16,
+                                       .max = 16,
+                                       .increment = 0
+                               },
+                               .digest_size = {
+                                       .min = 12,
+                                       .max = 12,
+                                       .increment = 0
+                               },
+                               .aad_size = { 0 }
+                       }
+               }
+       },
+       {       /* AES CBC */
+               .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
+               .sym = {
+                       .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
+                       .cipher = {
+                               .algo = RTE_CRYPTO_CIPHER_AES_CBC,
+                               .block_size = 16,
+                               .key_size = {
+                                       .min = 16,
+                                       .max = 32,
+                                       .increment = 8
+                               },
+                               .iv_size = {
+                                       .min = 16,
+                                       .max = 16,
+                                       .increment = 0
+                               }
+                       }
+               }
+       },
+       RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST()
+};
+
+
 /** Configure device */
 static int
 aesni_mb_pmd_config(__rte_unused struct rte_cryptodev *dev)
@@ -107,6 +280,8 @@ aesni_mb_pmd_info_get(struct rte_cryptodev *dev,
 
        if (dev_info != NULL) {
                dev_info->dev_type = dev->dev_type;
+               dev_info->feature_flags = dev->feature_flags;
+               dev_info->capabilities = aesni_mb_pmd_capabilities;
                dev_info->max_nb_queue_pairs = internals->max_nb_queue_pairs;
                dev_info->sym.max_nb_sessions = internals->max_nb_sessions;
        }
index a4bb15237442f53af7fe3cd09089f543029d2139..82b990c4be8b2eb65f43e45aa9ca4b59714d73e7 100644 (file)
@@ -215,6 +215,9 @@ cryptodev_null_create(const char *name,
        dev->dequeue_burst = null_crypto_pmd_dequeue_burst;
        dev->enqueue_burst = null_crypto_pmd_enqueue_burst;
 
+       dev->feature_flags = RTE_CRYPTODEV_FF_SYMMETRIC_CRYPTO |
+                       RTE_CRYPTODEV_FF_SYM_OPERATION_CHAINING;
+
        internals = dev->data->dev_private;
 
        internals->max_nb_qpairs = init_params->max_nb_queue_pairs;
index 6bf955ef75c8fb441814aff50a7a2e8cfe58a16d..39f8088d98d2c6f47c51cbd132833f4ac22a86c1 100644 (file)
 
 #include "null_crypto_pmd_private.h"
 
+static const struct rte_cryptodev_capabilities null_crypto_pmd_capabilities[] = {
+       {       /* NULL (AUTH) */
+               .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
+               .sym = {
+                       .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+                       .auth = {
+                               .algo = RTE_CRYPTO_AUTH_NULL,
+                               .block_size = 1,
+                               .key_size = {
+                                       .min = 0,
+                                       .max = 0,
+                                       .increment = 0
+                               },
+                               .digest_size = {
+                                       .min = 0,
+                                       .max = 0,
+                                       .increment = 0
+                               },
+                               .aad_size = { 0 }
+                       }
+               }
+       },
+       {       /* NULL (CIPHER) */
+               .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
+               .sym = {
+                       .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
+                       .cipher = {
+                               .algo = RTE_CRYPTO_CIPHER_NULL,
+                               .block_size = 1,
+                               .key_size = {
+                                       .min = 0,
+                                       .max = 0,
+                                       .increment = 8
+                               },
+                               .iv_size = {
+                                       .min = 0,
+                                       .max = 0,
+                                       .increment = 0
+                               }
+                       }
+               }
+       },
+       RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST()
+};
+
 /** Configure device */
 static int
 null_crypto_pmd_config(__rte_unused struct rte_cryptodev *dev)
@@ -108,6 +153,8 @@ null_crypto_pmd_info_get(struct rte_cryptodev *dev,
                dev_info->dev_type = dev->dev_type;
                dev_info->max_nb_queue_pairs = internals->max_nb_qpairs;
                dev_info->sym.max_nb_sessions = internals->max_nb_sessions;
+               dev_info->feature_flags = dev->feature_flags;
+               dev_info->capabilities = null_crypto_pmd_capabilities;
        }
 }
 
index f267da5c4122147b890e36259f00d3463d75c846..29c1fe52766e9682c9e2b50998b9770b3950013d 100644 (file)
 
 #define BYTE_LENGTH    8
 
+static const struct rte_cryptodev_capabilities qat_pmd_capabilities[] = {
+       {       /* SHA1 HMAC */
+               .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
+               .sym = {
+                       .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+                       .auth = {
+                               .algo = RTE_CRYPTO_AUTH_SHA1_HMAC,
+                               .block_size = 64,
+                               .key_size = {
+                                       .min = 64,
+                                       .max = 64,
+                                       .increment = 0
+                               },
+                               .digest_size = {
+                                       .min = 20,
+                                       .max = 20,
+                                       .increment = 0
+                               },
+                               .aad_size = { 0 }
+                       }
+               }
+       },
+       {       /* SHA256 HMAC */
+               .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
+               .sym = {
+                       .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+                       .auth = {
+                               .algo = RTE_CRYPTO_AUTH_SHA256_HMAC,
+                               .block_size = 64,
+                               .key_size = {
+                                       .min = 64,
+                                       .max = 64,
+                                       .increment = 0
+                               },
+                               .digest_size = {
+                                       .min = 32,
+                                       .max = 32,
+                                       .increment = 0
+                               },
+                               .aad_size = { 0 }
+                       }
+               }
+       },
+       {       /* SHA512 HMAC */
+               .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
+               .sym = {
+                       .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+                       .auth = {
+                               .algo = RTE_CRYPTO_AUTH_SHA512_HMAC,
+                               .block_size = 128,
+                               .key_size = {
+                                       .min = 128,
+                                       .max = 128,
+                                       .increment = 0
+                               },
+                               .digest_size = {
+                                       .min = 64,
+                                       .max = 64,
+                                       .increment = 0
+                               },
+                               .aad_size = { 0 }
+                       }
+               }
+       },
+       {       /* AES XCBC MAC */
+               .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
+               .sym = {
+                       .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+                       .auth = {
+                               .algo = RTE_CRYPTO_AUTH_AES_XCBC_MAC,
+                               .block_size = 16,
+                               .key_size = {
+                                       .min = 16,
+                                       .max = 16,
+                                       .increment = 0
+                               },
+                               .digest_size = {
+                                       .min = 16,
+                                       .max = 16,
+                                       .increment = 0
+                               },
+                               .aad_size = { 0 }
+                       }
+               }
+       },
+       {       /* AES GCM (AUTH) */
+               .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
+               .sym = {
+                       .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+                       .auth = {
+                               .algo = RTE_CRYPTO_AUTH_AES_GCM,
+                               .block_size = 16,
+                               .key_size = {
+                                       .min = 16,
+                                       .max = 32,
+                                       .increment = 8
+                               },
+                               .digest_size = {
+                                       .min = 8,
+                                       .max = 16,
+                                       .increment = 4
+                               },
+                               .aad_size = {
+                                       .min = 8,
+                                       .max = 12,
+                                       .increment = 4
+                               }
+                       }
+               }
+       },
+       {       /* SNOW3G (UIA2) */
+               .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
+               .sym = {
+                       .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+                       .auth = {
+                               .algo = RTE_CRYPTO_AUTH_SNOW3G_UIA2,
+                               .block_size = 16,
+                               .key_size = {
+                                       .min = 16,
+                                       .max = 16,
+                                       .increment = 0
+                               },
+                               .digest_size = {
+                                       .min = 4,
+                                       .max = 4,
+                                       .increment = 0
+                               },
+                               .aad_size = {
+                                       .min = 16,
+                                       .max = 16,
+                                       .increment = 0
+                               }
+                       }
+               }
+       },
+       {       /* AES GCM (CIPHER) */
+               .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
+               .sym = {
+                       .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
+                       .cipher = {
+                               .algo = RTE_CRYPTO_CIPHER_AES_GCM,
+                               .block_size = 16,
+                               .key_size = {
+                                       .min = 16,
+                                       .max = 32,
+                                       .increment = 8
+                               },
+                               .iv_size = {
+                                       .min = 16,
+                                       .max = 16,
+                                       .increment = 0
+                               }
+                       }
+               }
+       },
+       {       /* AES CBC */
+               .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
+               .sym = {
+                       .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
+                       .cipher = {
+                               RTE_CRYPTO_CIPHER_AES_CBC,
+                               .block_size = 16,
+                               .key_size = {
+                                       .min = 16,
+                                       .max = 32,
+                                       .increment = 8
+                               },
+                               .iv_size = {
+                                       .min = 16,
+                                       .max = 16,
+                                       .increment = 0
+                               }
+                       }
+               }
+       },
+       {       /* SNOW3G (UEA2) */
+               .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
+               .sym = {
+                       .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
+                       .cipher = {
+                               .algo = RTE_CRYPTO_CIPHER_SNOW3G_UEA2,
+                               .block_size = 16,
+                               .key_size = {
+                                       .min = 16,
+                                       .max = 16,
+                                       .increment = 0
+                               },
+                               .iv_size = {
+                                       .min = 16,
+                                       .max = 16,
+                                       .increment = 0
+                               }
+                       }
+               }
+       },
+       RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST()
+};
+
 static inline uint32_t
 adf_modulo(uint32_t data, uint32_t shift);
 
@@ -652,7 +850,8 @@ void qat_dev_info_get(__rte_unused struct rte_cryptodev *dev,
                info->max_nb_queue_pairs =
                                ADF_NUM_SYM_QPS_PER_BUNDLE *
                                ADF_NUM_BUNDLES_PER_DEV;
-
+               info->feature_flags = dev->feature_flags;
+               info->capabilities = qat_pmd_capabilities;
                info->sym.max_nb_sessions = internals->max_nb_sessions;
                info->dev_type = RTE_CRYPTODEV_QAT_SYM_PMD;
        }
index 5e51aca3c6e01436c7b89c4fbdc68c896f104a2c..a7912f5a23bdf94f772e014b73d93d5afe05e11f 100644 (file)
@@ -95,6 +95,9 @@ crypto_qat_dev_init(__attribute__((unused)) struct rte_cryptodev_driver *crypto_
        cryptodev->enqueue_burst = qat_pmd_enqueue_op_burst;
        cryptodev->dequeue_burst = qat_pmd_dequeue_op_burst;
 
+       cryptodev->feature_flags = RTE_CRYPTODEV_FF_SYMMETRIC_CRYPTO |
+                       RTE_CRYPTODEV_FF_HW_ACCELERATED |
+                       RTE_CRYPTODEV_FF_SYM_OPERATION_CHAINING;
 
        internals = cryptodev->data->dev_private;
        internals->max_nb_sessions = RTE_QAT_PMD_MAX_NB_SESSIONS;
index 6f84e5c494e29f1742ff079e78ba63d75170c1a3..97c7fb51373924121b0d2fcee3a113ed78c24aff 100644 (file)
@@ -490,6 +490,9 @@ cryptodev_snow3g_create(const char *name,
        dev->dequeue_burst = snow3g_pmd_dequeue_burst;
        dev->enqueue_burst = snow3g_pmd_enqueue_burst;
 
+       dev->feature_flags = RTE_CRYPTODEV_FF_SYMMETRIC_CRYPTO |
+                       RTE_CRYPTODEV_FF_SYM_OPERATION_CHAINING;
+
        internals = dev->data->dev_private;
 
        internals->max_nb_queue_pairs = init_params->max_nb_queue_pairs;
index 5643323ce411ccb8293c7619b86e02532bfe52b6..74eee23031f7afb1bf5207cfbcc43d2bdf09232d 100644 (file)
 
 #include "rte_snow3g_pmd_private.h"
 
+static const struct rte_cryptodev_capabilities snow3g_pmd_capabilities[] = {
+       {       /* SNOW3G (UIA2) */
+               .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
+               .sym = {
+                       .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+                       .auth = {
+                               .algo = RTE_CRYPTO_AUTH_SNOW3G_UIA2,
+                               .block_size = 16,
+                               .key_size = {
+                                       .min = 16,
+                                       .max = 16,
+                                       .increment = 0
+                               },
+                               .digest_size = {
+                                       .min = 4,
+                                       .max = 4,
+                                       .increment = 0
+                               },
+                               .aad_size = {
+                                       .min = 16,
+                                       .max = 16,
+                                       .increment = 0
+                               }
+                       }
+               }
+       },
+       {       /* SNOW3G (UEA2) */
+               .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
+               .sym = {
+                       .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
+                       .cipher = {
+                               .algo = RTE_CRYPTO_CIPHER_SNOW3G_UEA2,
+                               .block_size = 16,
+                               .key_size = {
+                                       .min = 16,
+                                       .max = 16,
+                                       .increment = 0
+                               },
+                               .iv_size = {
+                                       .min = 16,
+                                       .max = 16,
+                                       .increment = 0
+                               }
+                       }
+               }
+       },
+       RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST()
+};
+
 /** Configure device */
 static int
 snow3g_pmd_config(__rte_unused struct rte_cryptodev *dev)
@@ -109,6 +158,8 @@ snow3g_pmd_info_get(struct rte_cryptodev *dev,
                dev_info->dev_type = dev->dev_type;
                dev_info->max_nb_queue_pairs = internals->max_nb_queue_pairs;
                dev_info->sym.max_nb_sessions = internals->max_nb_sessions;
+               dev_info->feature_flags = dev->feature_flags;
+               dev_info->capabilities = snow3g_pmd_capabilities;
        }
 }
 
index 4632ca3aadcedafda2d557069fe011e6c72fa252..2d72dd7ea1e806da6ef6db177c82e706ab928544 100644 (file)
@@ -102,6 +102,34 @@ struct rte_cryptodev_callback {
        uint32_t active;                        /**< Callback is executing */
 };
 
+
+const char *
+rte_cryptodev_get_feature_name(uint64_t flag)
+{
+       switch (flag) {
+       case RTE_CRYPTODEV_FF_SYMMETRIC_CRYPTO:
+               return "SYMMETRIC_CRYPTO";
+       case RTE_CRYPTODEV_FF_ASYMMETRIC_CRYPTO:
+               return "ASYMMETRIC_CRYPTO";
+       case RTE_CRYPTODEV_FF_SYM_OPERATION_CHAINING:
+               return "SYM_OPERATION_CHAINING";
+       case RTE_CRYPTODEV_FF_CPU_SSE:
+               return "CPU_SSE";
+       case RTE_CRYPTODEV_FF_CPU_AVX:
+               return "CPU_AVX";
+       case RTE_CRYPTODEV_FF_CPU_AVX2:
+               return "CPU_AVX2";
+       case RTE_CRYPTODEV_FF_CPU_AESNI:
+               return "CPU_AESNI";
+       case RTE_CRYPTODEV_FF_HW_ACCELERATED:
+               return "HW_ACCELERATED";
+
+       default:
+               return NULL;
+       }
+}
+
+
 int
 rte_cryptodev_create_vdev(const char *name, const char *args)
 {
index eb85c46dccef2a90256a48856c08cce128dfef35..b599c9501e3d5d339568a512b6fec72556b8d011 100644 (file)
@@ -98,12 +98,145 @@ extern const char **rte_cyptodev_names;
 #define CDEV_PMD_TRACE(fmt, args...)
 #endif
 
+/**
+ * Symmetric Crypto Capability
+ */
+struct rte_cryptodev_symmetric_capability {
+       enum rte_crypto_sym_xform_type xform_type;
+       /**< Transform type : Authentication / Cipher */
+       union {
+               struct {
+                       enum rte_crypto_auth_algorithm algo;
+                       /**< authentication algorithm */
+                       uint16_t block_size;
+                       /**< algorithm block size */
+                       struct {
+                               uint16_t min;   /**< minimum key size */
+                               uint16_t max;   /**< maximum key size */
+                               uint16_t increment;
+                               /**< if a range of sizes are supported,
+                                * this parameter is used to indicate
+                                * increments in byte size that are supported
+                                * between the minimum and maximum */
+                       } key_size;
+                       /**< auth key size range */
+                       struct {
+                               uint16_t min;   /**< minimum digest size */
+                               uint16_t max;   /**< maximum digest size */
+                               uint16_t increment;
+                               /**< if a range of sizes are supported,
+                                * this parameter is used to indicate
+                                * increments in byte size that are supported
+                                * between the minimum and maximum */
+                       } digest_size;
+                       /**< digest size range */
+                       struct {
+                               uint16_t min;   /**< minimum aad size */
+                               uint16_t max;   /**< maximum aad size */
+                               uint16_t increment;
+                               /**< if a range of sizes are supported,
+                                * this parameter is used to indicate
+                                * increments in byte size that are supported
+                                * between the minimum and maximum */
+                       } aad_size;
+                       /**< Additional authentication data size range */
+               } auth;
+               /**< Symmetric Authentication transform capabilities */
+               struct {
+                       enum rte_crypto_cipher_algorithm algo;
+                       /**< cipher algorithm */
+                       uint16_t block_size;
+                       /**< algorithm block size */
+                       struct {
+                               uint16_t min;   /**< minimum key size */
+                               uint16_t max;   /**< maximum key size */
+                               uint16_t increment;
+                               /**< if a range of sizes are supported,
+                                * this parameter is used to indicate
+                                * increments in byte size that are supported
+                                * between the minimum and maximum */
+                       } key_size;
+                       /**< cipher key size range */
+                       struct {
+                               uint16_t min;   /**< minimum iv size */
+                               uint16_t max;   /**< maximum iv size */
+                               uint16_t increment;
+                               /**< if a range of sizes are supported,
+                                * this parameter is used to indicate
+                                * increments in byte size that are supported
+                                * between the minimum and maximum */
+                       } iv_size;
+                       /**< Initialisation vector data size range */
+               } cipher;
+               /**< Symmetric Cipher transform capabilities */
+       };
+};
+
+/** Structure used to capture a capability of a crypto device */
+struct rte_cryptodev_capabilities {
+       enum rte_crypto_op_type op;
+       /**< Operation type */
+
+       union {
+               struct rte_cryptodev_symmetric_capability sym;
+               /**< Symmetric operation capability parameters */
+       };
+};
+
+/** Macro used at end of crypto PMD list */
+#define RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST() \
+       { RTE_CRYPTO_OP_TYPE_UNDEFINED }
+
+
+/**
+ * Crypto device supported feature flags
+ *
+ * Note:
+ * New features flags should be added to the end of the list
+ *
+ * Keep these flags synchronised with rte_cryptodev_get_feature_name()
+ */
+#define        RTE_CRYPTODEV_FF_SYMMETRIC_CRYPTO       (1ULL << 0)
+/**< Symmetric crypto operations are supported */
+#define        RTE_CRYPTODEV_FF_ASYMMETRIC_CRYPTO      (1ULL << 1)
+/**< Asymmetric crypto operations are supported */
+#define        RTE_CRYPTODEV_FF_SYM_OPERATION_CHAINING (1ULL << 2)
+/**< Chaining symmetric crypto operations are supported */
+#define        RTE_CRYPTODEV_FF_CPU_SSE                (1ULL << 3)
+/**< Utilises CPU SIMD SSE instructions */
+#define        RTE_CRYPTODEV_FF_CPU_AVX                (1ULL << 4)
+/**< Utilises CPU SIMD AVX instructions */
+#define        RTE_CRYPTODEV_FF_CPU_AVX2               (1ULL << 5)
+/**< Utilises CPU SIMD AVX2 instructions */
+#define        RTE_CRYPTODEV_FF_CPU_AESNI              (1ULL << 6)
+/**< Utilises CPU AES-NI instructions */
+#define        RTE_CRYPTODEV_FF_HW_ACCELERATED         (1ULL << 7)
+/**< Operations are off-loaded to an external hardware accelerator */
+
+
+/**
+ * Get the name of a crypto device feature flag
+ *
+ * @param      flag    The mask describing the flag.
+ *
+ * @return
+ *   The name of this flag, or NULL if it's not a valid feature flag.
+ */
+
+extern const char *
+rte_cryptodev_get_feature_name(uint64_t flag);
+
 /**  Crypto device information */
 struct rte_cryptodev_info {
        const char *driver_name;                /**< Driver name. */
        enum rte_cryptodev_type dev_type;       /**< Device type */
        struct rte_pci_device *pci_dev;         /**< PCI information. */
 
+       uint64_t feature_flags;                 /**< Feature flags */
+
+       const struct rte_cryptodev_capabilities *capabilities;
+       /**< Array of devices supported capabilities */
+
        unsigned max_nb_queue_pairs;
        /**< Maximum number of queues pairs supported by device. */
 
@@ -540,6 +673,8 @@ typedef uint16_t (*enqueue_pkt_burst_t)(void *qp,
 /**< Enqueue packets for processing on queue pair of a device. */
 
 
+
+
 struct rte_cryptodev_callback;
 
 /** Structure to keep track of registered callbacks */
@@ -558,6 +693,8 @@ struct rte_cryptodev {
        /**< Pointer to device data */
        struct rte_cryptodev_ops *dev_ops;
        /**< Functions exported by PMD */
+       uint64_t feature_flags;
+       /**< Supported features */
        struct rte_pci_device *pci_dev;
        /**< PCI info. supplied by probing */
 
index 6de6c7de0a28dd5fc0601e709e0fcf23b952194e..41004e1c392dd559ce88c6f4df2eb138e5f24b86 100644 (file)
@@ -10,6 +10,7 @@ DPDK_16.04 {
        rte_cryptodev_configure;
        rte_cryptodev_create_vdev;
        rte_cryptodev_get_dev_id;
+       rte_cryptodev_get_feature_name;
        rte_cryptodev_info_get;
        rte_cryptodev_pmd_allocate;
        rte_cryptodev_pmd_callback_process;