From: Declan Doherty Date: Fri, 11 Mar 2016 01:36:54 +0000 (+0000) Subject: cryptodev: add capabilities discovery X-Git-Tag: spdx-start~7490 X-Git-Url: http://git.droids-corp.org/?a=commitdiff_plain;h=26c2e4ad5ad4fd671576f6847e1cbb6e98c05682;p=dpdk.git cryptodev: add capabilities discovery This patch add a mechanism for discovery of crypto device features and supported crypto operations and algorithms. It also provides a method for a crypto PMD to publish any data range limitations it may have for the operations and algorithms it supports. The parameter feature_flags added to rte_cryptodev struct is used to capture features such as operations supported (symmetric crypto, operation chaining etc) as well parameter such as whether the device is hardware accelerated or uses SIMD instructions. The capabilities parameter allows a PMD to define an array of supported operations with any limitation which that implementation may have. Finally the rte_cryptodev_info struct has been extended to allow retrieval of these parameter using the existing rte_cryptodev_info_get() API. Signed-off-by: Declan Doherty Signed-off-by: Pablo de Lara Acked-by: Fiona Trahe --- diff --git a/drivers/crypto/aesni_gcm/aesni_gcm_pmd.c b/drivers/crypto/aesni_gcm/aesni_gcm_pmd.c index 83aa272d1d..35577bca29 100644 --- a/drivers/crypto/aesni_gcm/aesni_gcm_pmd.c +++ b/drivers/crypto/aesni_gcm/aesni_gcm_pmd.c @@ -445,6 +445,24 @@ aesni_gcm_create(const char *name, dev->dequeue_burst = aesni_gcm_pmd_dequeue_burst; dev->enqueue_burst = aesni_gcm_pmd_enqueue_burst; + dev->feature_flags = RTE_CRYPTODEV_FF_SYMMETRIC_CRYPTO | + RTE_CRYPTODEV_FF_SYM_OPERATION_CHAINING | + RTE_CRYPTODEV_FF_CPU_AESNI; + + switch (vector_mode) { + case RTE_AESNI_GCM_SSE: + dev->feature_flags |= RTE_CRYPTODEV_FF_CPU_SSE; + break; + case RTE_AESNI_GCM_AVX: + dev->feature_flags |= RTE_CRYPTODEV_FF_CPU_AVX; + break; + case RTE_AESNI_GCM_AVX2: + dev->feature_flags |= RTE_CRYPTODEV_FF_CPU_AVX2; + break; + default: + break; + } + /* Set vector instructions mode supported */ internals = dev->data->dev_private; diff --git a/drivers/crypto/aesni_gcm/aesni_gcm_pmd_ops.c b/drivers/crypto/aesni_gcm/aesni_gcm_pmd_ops.c index f865e0dd6d..387f8d100c 100644 --- a/drivers/crypto/aesni_gcm/aesni_gcm_pmd_ops.c +++ b/drivers/crypto/aesni_gcm/aesni_gcm_pmd_ops.c @@ -38,6 +38,55 @@ #include "aesni_gcm_pmd_private.h" +static const struct rte_cryptodev_capabilities aesni_gcm_pmd_capabilities[] = { + { /* AES GCM (AUTH) */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + .sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, + .auth = { + .algo = RTE_CRYPTO_AUTH_AES_GCM, + .block_size = 16, + .key_size = { + .min = 16, + .max = 32, + .increment = 8 + }, + .digest_size = { + .min = 8, + .max = 16, + .increment = 4 + }, + .aad_size = { + .min = 8, + .max = 12, + .increment = 4 + } + } + } + }, + { /* AES GCM (CIPHER) */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + .sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, + .cipher = { + .algo = RTE_CRYPTO_CIPHER_AES_GCM, + .block_size = 16, + .key_size = { + .min = 16, + .max = 32, + .increment = 8 + }, + .iv_size = { + .min = 16, + .max = 16, + .increment = 0 + } + } + } + }, + RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST() +}; + /** Configure device */ static int aesni_gcm_pmd_config(__rte_unused struct rte_cryptodev *dev) @@ -107,6 +156,8 @@ aesni_gcm_pmd_info_get(struct rte_cryptodev *dev, if (dev_info != NULL) { dev_info->dev_type = dev->dev_type; + dev_info->feature_flags = dev->feature_flags; + dev_info->capabilities = aesni_gcm_pmd_capabilities; dev_info->max_nb_queue_pairs = internals->max_nb_queue_pairs; dev_info->sym.max_nb_sessions = internals->max_nb_sessions; diff --git a/drivers/crypto/aesni_mb/rte_aesni_mb_pmd.c b/drivers/crypto/aesni_mb/rte_aesni_mb_pmd.c index 9599cc491d..4e3885cf80 100644 --- a/drivers/crypto/aesni_mb/rte_aesni_mb_pmd.c +++ b/drivers/crypto/aesni_mb/rte_aesni_mb_pmd.c @@ -640,6 +640,24 @@ cryptodev_aesni_mb_create(const char *name, unsigned socket_id) dev->dequeue_burst = aesni_mb_pmd_dequeue_burst; dev->enqueue_burst = aesni_mb_pmd_enqueue_burst; + dev->feature_flags = RTE_CRYPTODEV_FF_SYMMETRIC_CRYPTO | + RTE_CRYPTODEV_FF_SYM_OPERATION_CHAINING | + RTE_CRYPTODEV_FF_CPU_AESNI; + + switch (vector_mode) { + case RTE_AESNI_MB_SSE: + dev->feature_flags |= RTE_CRYPTODEV_FF_CPU_SSE; + break; + case RTE_AESNI_MB_AVX: + dev->feature_flags |= RTE_CRYPTODEV_FF_CPU_AVX; + break; + case RTE_AESNI_MB_AVX2: + dev->feature_flags |= RTE_CRYPTODEV_FF_CPU_AVX2; + break; + default: + break; + } + /* Set vector instructions mode supported */ internals = dev->data->dev_private; diff --git a/drivers/crypto/aesni_mb/rte_aesni_mb_pmd_ops.c b/drivers/crypto/aesni_mb/rte_aesni_mb_pmd_ops.c index b1dd103a55..5a439e6b0c 100644 --- a/drivers/crypto/aesni_mb/rte_aesni_mb_pmd_ops.c +++ b/drivers/crypto/aesni_mb/rte_aesni_mb_pmd_ops.c @@ -38,6 +38,179 @@ #include "rte_aesni_mb_pmd_private.h" + +static const struct rte_cryptodev_capabilities aesni_mb_pmd_capabilities[] = { + { /* MD5 HMAC */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + .sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, + .auth = { + .algo = RTE_CRYPTO_AUTH_MD5_HMAC, + .block_size = 64, + .key_size = { + .min = 64, + .max = 64, + .increment = 0 + }, + .digest_size = { + .min = 12, + .max = 12, + .increment = 0 + }, + .aad_size = { 0 } + } + } + }, + { /* SHA1 HMAC */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + .sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, + .auth = { + .algo = RTE_CRYPTO_AUTH_SHA1_HMAC, + .block_size = 64, + .key_size = { + .min = 64, + .max = 64, + .increment = 0 + }, + .digest_size = { + .min = 12, + .max = 12, + .increment = 0 + }, + .aad_size = { 0 } + } + } + }, + { /* SHA224 HMAC */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + .sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, + .auth = { + .algo = RTE_CRYPTO_AUTH_SHA224_HMAC, + .block_size = 64, + .key_size = { + .min = 64, + .max = 64, + .increment = 0 + }, + .digest_size = { + .min = 14, + .max = 14, + .increment = 0 + }, + .aad_size = { 0 } + } + } + }, + { /* SHA256 HMAC */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + .sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, + .auth = { + .algo = RTE_CRYPTO_AUTH_SHA256_HMAC, + .block_size = 64, + .key_size = { + .min = 64, + .max = 64, + .increment = 0 + }, + .digest_size = { + .min = 16, + .max = 16, + .increment = 0 + }, + .aad_size = { 0 } + } + } + }, + { /* SHA384 HMAC */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + .sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, + .auth = { + .algo = RTE_CRYPTO_AUTH_SHA384_HMAC, + .block_size = 128, + .key_size = { + .min = 128, + .max = 128, + .increment = 0 + }, + .digest_size = { + .min = 24, + .max = 24, + .increment = 0 + }, + .aad_size = { 0 } + } + } + }, + { /* SHA512 HMAC */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + .sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, + .auth = { + .algo = RTE_CRYPTO_AUTH_SHA512_HMAC, + .block_size = 128, + .key_size = { + .min = 128, + .max = 128, + .increment = 0 + }, + .digest_size = { + .min = 32, + .max = 32, + .increment = 0 + }, + .aad_size = { 0 } + } + } + }, + { /* AES XCBC HMAC */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + .sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, + .auth = { + .algo = RTE_CRYPTO_AUTH_AES_XCBC_MAC, + .block_size = 16, + .key_size = { + .min = 16, + .max = 16, + .increment = 0 + }, + .digest_size = { + .min = 12, + .max = 12, + .increment = 0 + }, + .aad_size = { 0 } + } + } + }, + { /* AES CBC */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + .sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, + .cipher = { + .algo = RTE_CRYPTO_CIPHER_AES_CBC, + .block_size = 16, + .key_size = { + .min = 16, + .max = 32, + .increment = 8 + }, + .iv_size = { + .min = 16, + .max = 16, + .increment = 0 + } + } + } + }, + RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST() +}; + + /** Configure device */ static int aesni_mb_pmd_config(__rte_unused struct rte_cryptodev *dev) @@ -107,6 +280,8 @@ aesni_mb_pmd_info_get(struct rte_cryptodev *dev, if (dev_info != NULL) { dev_info->dev_type = dev->dev_type; + dev_info->feature_flags = dev->feature_flags; + dev_info->capabilities = aesni_mb_pmd_capabilities; dev_info->max_nb_queue_pairs = internals->max_nb_queue_pairs; dev_info->sym.max_nb_sessions = internals->max_nb_sessions; } diff --git a/drivers/crypto/null/null_crypto_pmd.c b/drivers/crypto/null/null_crypto_pmd.c index a4bb152374..82b990c4be 100644 --- a/drivers/crypto/null/null_crypto_pmd.c +++ b/drivers/crypto/null/null_crypto_pmd.c @@ -215,6 +215,9 @@ cryptodev_null_create(const char *name, dev->dequeue_burst = null_crypto_pmd_dequeue_burst; dev->enqueue_burst = null_crypto_pmd_enqueue_burst; + dev->feature_flags = RTE_CRYPTODEV_FF_SYMMETRIC_CRYPTO | + RTE_CRYPTODEV_FF_SYM_OPERATION_CHAINING; + internals = dev->data->dev_private; internals->max_nb_qpairs = init_params->max_nb_queue_pairs; diff --git a/drivers/crypto/null/null_crypto_pmd_ops.c b/drivers/crypto/null/null_crypto_pmd_ops.c index 6bf955ef75..39f8088d98 100644 --- a/drivers/crypto/null/null_crypto_pmd_ops.c +++ b/drivers/crypto/null/null_crypto_pmd_ops.c @@ -38,6 +38,51 @@ #include "null_crypto_pmd_private.h" +static const struct rte_cryptodev_capabilities null_crypto_pmd_capabilities[] = { + { /* NULL (AUTH) */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + .sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, + .auth = { + .algo = RTE_CRYPTO_AUTH_NULL, + .block_size = 1, + .key_size = { + .min = 0, + .max = 0, + .increment = 0 + }, + .digest_size = { + .min = 0, + .max = 0, + .increment = 0 + }, + .aad_size = { 0 } + } + } + }, + { /* NULL (CIPHER) */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + .sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, + .cipher = { + .algo = RTE_CRYPTO_CIPHER_NULL, + .block_size = 1, + .key_size = { + .min = 0, + .max = 0, + .increment = 8 + }, + .iv_size = { + .min = 0, + .max = 0, + .increment = 0 + } + } + } + }, + RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST() +}; + /** Configure device */ static int null_crypto_pmd_config(__rte_unused struct rte_cryptodev *dev) @@ -108,6 +153,8 @@ null_crypto_pmd_info_get(struct rte_cryptodev *dev, dev_info->dev_type = dev->dev_type; dev_info->max_nb_queue_pairs = internals->max_nb_qpairs; dev_info->sym.max_nb_sessions = internals->max_nb_sessions; + dev_info->feature_flags = dev->feature_flags; + dev_info->capabilities = null_crypto_pmd_capabilities; } } diff --git a/drivers/crypto/qat/qat_crypto.c b/drivers/crypto/qat/qat_crypto.c index f267da5c41..29c1fe5276 100644 --- a/drivers/crypto/qat/qat_crypto.c +++ b/drivers/crypto/qat/qat_crypto.c @@ -68,6 +68,204 @@ #define BYTE_LENGTH 8 +static const struct rte_cryptodev_capabilities qat_pmd_capabilities[] = { + { /* SHA1 HMAC */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + .sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, + .auth = { + .algo = RTE_CRYPTO_AUTH_SHA1_HMAC, + .block_size = 64, + .key_size = { + .min = 64, + .max = 64, + .increment = 0 + }, + .digest_size = { + .min = 20, + .max = 20, + .increment = 0 + }, + .aad_size = { 0 } + } + } + }, + { /* SHA256 HMAC */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + .sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, + .auth = { + .algo = RTE_CRYPTO_AUTH_SHA256_HMAC, + .block_size = 64, + .key_size = { + .min = 64, + .max = 64, + .increment = 0 + }, + .digest_size = { + .min = 32, + .max = 32, + .increment = 0 + }, + .aad_size = { 0 } + } + } + }, + { /* SHA512 HMAC */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + .sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, + .auth = { + .algo = RTE_CRYPTO_AUTH_SHA512_HMAC, + .block_size = 128, + .key_size = { + .min = 128, + .max = 128, + .increment = 0 + }, + .digest_size = { + .min = 64, + .max = 64, + .increment = 0 + }, + .aad_size = { 0 } + } + } + }, + { /* AES XCBC MAC */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + .sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, + .auth = { + .algo = RTE_CRYPTO_AUTH_AES_XCBC_MAC, + .block_size = 16, + .key_size = { + .min = 16, + .max = 16, + .increment = 0 + }, + .digest_size = { + .min = 16, + .max = 16, + .increment = 0 + }, + .aad_size = { 0 } + } + } + }, + { /* AES GCM (AUTH) */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + .sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, + .auth = { + .algo = RTE_CRYPTO_AUTH_AES_GCM, + .block_size = 16, + .key_size = { + .min = 16, + .max = 32, + .increment = 8 + }, + .digest_size = { + .min = 8, + .max = 16, + .increment = 4 + }, + .aad_size = { + .min = 8, + .max = 12, + .increment = 4 + } + } + } + }, + { /* SNOW3G (UIA2) */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + .sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, + .auth = { + .algo = RTE_CRYPTO_AUTH_SNOW3G_UIA2, + .block_size = 16, + .key_size = { + .min = 16, + .max = 16, + .increment = 0 + }, + .digest_size = { + .min = 4, + .max = 4, + .increment = 0 + }, + .aad_size = { + .min = 16, + .max = 16, + .increment = 0 + } + } + } + }, + { /* AES GCM (CIPHER) */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + .sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, + .cipher = { + .algo = RTE_CRYPTO_CIPHER_AES_GCM, + .block_size = 16, + .key_size = { + .min = 16, + .max = 32, + .increment = 8 + }, + .iv_size = { + .min = 16, + .max = 16, + .increment = 0 + } + } + } + }, + { /* AES CBC */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + .sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, + .cipher = { + RTE_CRYPTO_CIPHER_AES_CBC, + .block_size = 16, + .key_size = { + .min = 16, + .max = 32, + .increment = 8 + }, + .iv_size = { + .min = 16, + .max = 16, + .increment = 0 + } + } + } + }, + { /* SNOW3G (UEA2) */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + .sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, + .cipher = { + .algo = RTE_CRYPTO_CIPHER_SNOW3G_UEA2, + .block_size = 16, + .key_size = { + .min = 16, + .max = 16, + .increment = 0 + }, + .iv_size = { + .min = 16, + .max = 16, + .increment = 0 + } + } + } + }, + RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST() +}; + static inline uint32_t adf_modulo(uint32_t data, uint32_t shift); @@ -652,7 +850,8 @@ void qat_dev_info_get(__rte_unused struct rte_cryptodev *dev, info->max_nb_queue_pairs = ADF_NUM_SYM_QPS_PER_BUNDLE * ADF_NUM_BUNDLES_PER_DEV; - + info->feature_flags = dev->feature_flags; + info->capabilities = qat_pmd_capabilities; info->sym.max_nb_sessions = internals->max_nb_sessions; info->dev_type = RTE_CRYPTODEV_QAT_SYM_PMD; } diff --git a/drivers/crypto/qat/rte_qat_cryptodev.c b/drivers/crypto/qat/rte_qat_cryptodev.c index 5e51aca3c6..a7912f5a23 100644 --- a/drivers/crypto/qat/rte_qat_cryptodev.c +++ b/drivers/crypto/qat/rte_qat_cryptodev.c @@ -95,6 +95,9 @@ crypto_qat_dev_init(__attribute__((unused)) struct rte_cryptodev_driver *crypto_ cryptodev->enqueue_burst = qat_pmd_enqueue_op_burst; cryptodev->dequeue_burst = qat_pmd_dequeue_op_burst; + cryptodev->feature_flags = RTE_CRYPTODEV_FF_SYMMETRIC_CRYPTO | + RTE_CRYPTODEV_FF_HW_ACCELERATED | + RTE_CRYPTODEV_FF_SYM_OPERATION_CHAINING; internals = cryptodev->data->dev_private; internals->max_nb_sessions = RTE_QAT_PMD_MAX_NB_SESSIONS; diff --git a/drivers/crypto/snow3g/rte_snow3g_pmd.c b/drivers/crypto/snow3g/rte_snow3g_pmd.c index 6f84e5c494..97c7fb5137 100644 --- a/drivers/crypto/snow3g/rte_snow3g_pmd.c +++ b/drivers/crypto/snow3g/rte_snow3g_pmd.c @@ -490,6 +490,9 @@ cryptodev_snow3g_create(const char *name, dev->dequeue_burst = snow3g_pmd_dequeue_burst; dev->enqueue_burst = snow3g_pmd_enqueue_burst; + dev->feature_flags = RTE_CRYPTODEV_FF_SYMMETRIC_CRYPTO | + RTE_CRYPTODEV_FF_SYM_OPERATION_CHAINING; + internals = dev->data->dev_private; internals->max_nb_queue_pairs = init_params->max_nb_queue_pairs; diff --git a/drivers/crypto/snow3g/rte_snow3g_pmd_ops.c b/drivers/crypto/snow3g/rte_snow3g_pmd_ops.c index 5643323ce4..74eee23031 100644 --- a/drivers/crypto/snow3g/rte_snow3g_pmd_ops.c +++ b/drivers/crypto/snow3g/rte_snow3g_pmd_ops.c @@ -38,6 +38,55 @@ #include "rte_snow3g_pmd_private.h" +static const struct rte_cryptodev_capabilities snow3g_pmd_capabilities[] = { + { /* SNOW3G (UIA2) */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + .sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, + .auth = { + .algo = RTE_CRYPTO_AUTH_SNOW3G_UIA2, + .block_size = 16, + .key_size = { + .min = 16, + .max = 16, + .increment = 0 + }, + .digest_size = { + .min = 4, + .max = 4, + .increment = 0 + }, + .aad_size = { + .min = 16, + .max = 16, + .increment = 0 + } + } + } + }, + { /* SNOW3G (UEA2) */ + .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, + .sym = { + .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, + .cipher = { + .algo = RTE_CRYPTO_CIPHER_SNOW3G_UEA2, + .block_size = 16, + .key_size = { + .min = 16, + .max = 16, + .increment = 0 + }, + .iv_size = { + .min = 16, + .max = 16, + .increment = 0 + } + } + } + }, + RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST() +}; + /** Configure device */ static int snow3g_pmd_config(__rte_unused struct rte_cryptodev *dev) @@ -109,6 +158,8 @@ snow3g_pmd_info_get(struct rte_cryptodev *dev, dev_info->dev_type = dev->dev_type; dev_info->max_nb_queue_pairs = internals->max_nb_queue_pairs; dev_info->sym.max_nb_sessions = internals->max_nb_sessions; + dev_info->feature_flags = dev->feature_flags; + dev_info->capabilities = snow3g_pmd_capabilities; } } diff --git a/lib/librte_cryptodev/rte_cryptodev.c b/lib/librte_cryptodev/rte_cryptodev.c index 4632ca3aad..2d72dd7ea1 100644 --- a/lib/librte_cryptodev/rte_cryptodev.c +++ b/lib/librte_cryptodev/rte_cryptodev.c @@ -102,6 +102,34 @@ struct rte_cryptodev_callback { uint32_t active; /**< Callback is executing */ }; + +const char * +rte_cryptodev_get_feature_name(uint64_t flag) +{ + switch (flag) { + case RTE_CRYPTODEV_FF_SYMMETRIC_CRYPTO: + return "SYMMETRIC_CRYPTO"; + case RTE_CRYPTODEV_FF_ASYMMETRIC_CRYPTO: + return "ASYMMETRIC_CRYPTO"; + case RTE_CRYPTODEV_FF_SYM_OPERATION_CHAINING: + return "SYM_OPERATION_CHAINING"; + case RTE_CRYPTODEV_FF_CPU_SSE: + return "CPU_SSE"; + case RTE_CRYPTODEV_FF_CPU_AVX: + return "CPU_AVX"; + case RTE_CRYPTODEV_FF_CPU_AVX2: + return "CPU_AVX2"; + case RTE_CRYPTODEV_FF_CPU_AESNI: + return "CPU_AESNI"; + case RTE_CRYPTODEV_FF_HW_ACCELERATED: + return "HW_ACCELERATED"; + + default: + return NULL; + } +} + + int rte_cryptodev_create_vdev(const char *name, const char *args) { diff --git a/lib/librte_cryptodev/rte_cryptodev.h b/lib/librte_cryptodev/rte_cryptodev.h index eb85c46dcc..b599c9501e 100644 --- a/lib/librte_cryptodev/rte_cryptodev.h +++ b/lib/librte_cryptodev/rte_cryptodev.h @@ -98,12 +98,145 @@ extern const char **rte_cyptodev_names; #define CDEV_PMD_TRACE(fmt, args...) #endif +/** + * Symmetric Crypto Capability + */ +struct rte_cryptodev_symmetric_capability { + enum rte_crypto_sym_xform_type xform_type; + /**< Transform type : Authentication / Cipher */ + union { + struct { + enum rte_crypto_auth_algorithm algo; + /**< authentication algorithm */ + uint16_t block_size; + /**< algorithm block size */ + struct { + uint16_t min; /**< minimum key size */ + uint16_t max; /**< maximum key size */ + uint16_t increment; + /**< if a range of sizes are supported, + * this parameter is used to indicate + * increments in byte size that are supported + * between the minimum and maximum */ + } key_size; + /**< auth key size range */ + struct { + uint16_t min; /**< minimum digest size */ + uint16_t max; /**< maximum digest size */ + uint16_t increment; + /**< if a range of sizes are supported, + * this parameter is used to indicate + * increments in byte size that are supported + * between the minimum and maximum */ + } digest_size; + /**< digest size range */ + struct { + uint16_t min; /**< minimum aad size */ + uint16_t max; /**< maximum aad size */ + uint16_t increment; + /**< if a range of sizes are supported, + * this parameter is used to indicate + * increments in byte size that are supported + * between the minimum and maximum */ + } aad_size; + /**< Additional authentication data size range */ + } auth; + /**< Symmetric Authentication transform capabilities */ + struct { + enum rte_crypto_cipher_algorithm algo; + /**< cipher algorithm */ + uint16_t block_size; + /**< algorithm block size */ + struct { + uint16_t min; /**< minimum key size */ + uint16_t max; /**< maximum key size */ + uint16_t increment; + /**< if a range of sizes are supported, + * this parameter is used to indicate + * increments in byte size that are supported + * between the minimum and maximum */ + } key_size; + /**< cipher key size range */ + struct { + uint16_t min; /**< minimum iv size */ + uint16_t max; /**< maximum iv size */ + uint16_t increment; + /**< if a range of sizes are supported, + * this parameter is used to indicate + * increments in byte size that are supported + * between the minimum and maximum */ + } iv_size; + /**< Initialisation vector data size range */ + } cipher; + /**< Symmetric Cipher transform capabilities */ + }; +}; + +/** Structure used to capture a capability of a crypto device */ +struct rte_cryptodev_capabilities { + enum rte_crypto_op_type op; + /**< Operation type */ + + union { + struct rte_cryptodev_symmetric_capability sym; + /**< Symmetric operation capability parameters */ + }; +}; + +/** Macro used at end of crypto PMD list */ +#define RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST() \ + { RTE_CRYPTO_OP_TYPE_UNDEFINED } + + +/** + * Crypto device supported feature flags + * + * Note: + * New features flags should be added to the end of the list + * + * Keep these flags synchronised with rte_cryptodev_get_feature_name() + */ +#define RTE_CRYPTODEV_FF_SYMMETRIC_CRYPTO (1ULL << 0) +/**< Symmetric crypto operations are supported */ +#define RTE_CRYPTODEV_FF_ASYMMETRIC_CRYPTO (1ULL << 1) +/**< Asymmetric crypto operations are supported */ +#define RTE_CRYPTODEV_FF_SYM_OPERATION_CHAINING (1ULL << 2) +/**< Chaining symmetric crypto operations are supported */ +#define RTE_CRYPTODEV_FF_CPU_SSE (1ULL << 3) +/**< Utilises CPU SIMD SSE instructions */ +#define RTE_CRYPTODEV_FF_CPU_AVX (1ULL << 4) +/**< Utilises CPU SIMD AVX instructions */ +#define RTE_CRYPTODEV_FF_CPU_AVX2 (1ULL << 5) +/**< Utilises CPU SIMD AVX2 instructions */ +#define RTE_CRYPTODEV_FF_CPU_AESNI (1ULL << 6) +/**< Utilises CPU AES-NI instructions */ +#define RTE_CRYPTODEV_FF_HW_ACCELERATED (1ULL << 7) +/**< Operations are off-loaded to an external hardware accelerator */ + + +/** + * Get the name of a crypto device feature flag + * + * @param flag The mask describing the flag. + * + * @return + * The name of this flag, or NULL if it's not a valid feature flag. + */ + +extern const char * +rte_cryptodev_get_feature_name(uint64_t flag); + /** Crypto device information */ struct rte_cryptodev_info { const char *driver_name; /**< Driver name. */ enum rte_cryptodev_type dev_type; /**< Device type */ struct rte_pci_device *pci_dev; /**< PCI information. */ + uint64_t feature_flags; /**< Feature flags */ + + const struct rte_cryptodev_capabilities *capabilities; + /**< Array of devices supported capabilities */ + unsigned max_nb_queue_pairs; /**< Maximum number of queues pairs supported by device. */ @@ -540,6 +673,8 @@ typedef uint16_t (*enqueue_pkt_burst_t)(void *qp, /**< Enqueue packets for processing on queue pair of a device. */ + + struct rte_cryptodev_callback; /** Structure to keep track of registered callbacks */ @@ -558,6 +693,8 @@ struct rte_cryptodev { /**< Pointer to device data */ struct rte_cryptodev_ops *dev_ops; /**< Functions exported by PMD */ + uint64_t feature_flags; + /**< Supported features */ struct rte_pci_device *pci_dev; /**< PCI info. supplied by probing */ diff --git a/lib/librte_cryptodev/rte_cryptodev_version.map b/lib/librte_cryptodev/rte_cryptodev_version.map index 6de6c7de0a..41004e1c39 100644 --- a/lib/librte_cryptodev/rte_cryptodev_version.map +++ b/lib/librte_cryptodev/rte_cryptodev_version.map @@ -10,6 +10,7 @@ DPDK_16.04 { rte_cryptodev_configure; rte_cryptodev_create_vdev; rte_cryptodev_get_dev_id; + rte_cryptodev_get_feature_name; rte_cryptodev_info_get; rte_cryptodev_pmd_allocate; rte_cryptodev_pmd_callback_process;