X-Git-Url: http://git.droids-corp.org/?a=blobdiff_plain;ds=sidebyside;f=lib%2Flibrte_cryptodev%2Frte_cryptodev.c;h=e37b83afd2c477066d079da8aa9c8927334a1277;hb=8517072c8715556968b59e5872f580c7cad2e53c;hp=65d61a3ef55339e64d6dfa867aaa8393e34d8ed6;hpb=285b5d1b1fd3c5240c691e3ba33eeb7c1e2c67c3;p=dpdk.git diff --git a/lib/librte_cryptodev/rte_cryptodev.c b/lib/librte_cryptodev/rte_cryptodev.c index 65d61a3ef5..e37b83afd2 100644 --- a/lib/librte_cryptodev/rte_cryptodev.c +++ b/lib/librte_cryptodev/rte_cryptodev.c @@ -36,10 +36,13 @@ #include #include #include +#include +#include #include "rte_crypto.h" #include "rte_cryptodev.h" #include "rte_cryptodev_pmd.h" +#include "rte_cryptodev_trace.h" static uint8_t nb_drivers; @@ -56,6 +59,14 @@ static struct rte_cryptodev_global cryptodev_globals = { /* spinlock for crypto device callbacks */ static rte_spinlock_t rte_cryptodev_cb_lock = RTE_SPINLOCK_INITIALIZER; +static const struct rte_cryptodev_capabilities + cryptodev_undefined_capabilities[] = { + RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST() +}; + +static struct rte_cryptodev_capabilities + *capability_copy[RTE_CRYPTO_MAX_DEVS]; +static uint8_t is_capability_checked[RTE_CRYPTO_MAX_DEVS]; /** * The user application callback description. @@ -151,6 +162,7 @@ const char * rte_crypto_aead_algorithm_strings[] = { [RTE_CRYPTO_AEAD_AES_CCM] = "aes-ccm", [RTE_CRYPTO_AEAD_AES_GCM] = "aes-gcm", + [RTE_CRYPTO_AEAD_CHACHA20_POLY1305] = "chacha20-poly1305" }; /** @@ -279,15 +291,15 @@ rte_crypto_auth_operation_strings[] = { [RTE_CRYPTO_AUTH_OP_GENERATE] = "generate" }; -const struct rte_cryptodev_symmetric_capability * -rte_cryptodev_sym_capability_get(uint8_t dev_id, +const struct rte_cryptodev_symmetric_capability __vsym * +rte_cryptodev_sym_capability_get_v20(uint8_t dev_id, const struct rte_cryptodev_sym_capability_idx *idx) { const struct rte_cryptodev_capabilities *capability; struct rte_cryptodev_info dev_info; int i = 0; - rte_cryptodev_info_get(dev_id, &dev_info); + rte_cryptodev_info_get_v20(dev_id, &dev_info); while ((capability = &dev_info.capabilities[i++])->op != RTE_CRYPTO_OP_TYPE_UNDEFINED) { @@ -311,8 +323,47 @@ rte_cryptodev_sym_capability_get(uint8_t dev_id, } return NULL; +} +VERSION_SYMBOL(rte_cryptodev_sym_capability_get, _v20, 20.0); + +const struct rte_cryptodev_symmetric_capability __vsym * +rte_cryptodev_sym_capability_get_v21(uint8_t dev_id, + const struct rte_cryptodev_sym_capability_idx *idx) +{ + const struct rte_cryptodev_capabilities *capability; + struct rte_cryptodev_info dev_info; + int i = 0; + + rte_cryptodev_info_get(dev_id, &dev_info); + + while ((capability = &dev_info.capabilities[i++])->op != + RTE_CRYPTO_OP_TYPE_UNDEFINED) { + if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC) + continue; + if (capability->sym.xform_type != idx->type) + continue; + + if (idx->type == RTE_CRYPTO_SYM_XFORM_AUTH && + capability->sym.auth.algo == idx->algo.auth) + return &capability->sym; + + if (idx->type == RTE_CRYPTO_SYM_XFORM_CIPHER && + capability->sym.cipher.algo == idx->algo.cipher) + return &capability->sym; + + if (idx->type == RTE_CRYPTO_SYM_XFORM_AEAD && + capability->sym.aead.algo == idx->algo.aead) + return &capability->sym; + } + + return NULL; } +MAP_STATIC_SYMBOL(const struct rte_cryptodev_symmetric_capability * + rte_cryptodev_sym_capability_get(uint8_t dev_id, + const struct rte_cryptodev_sym_capability_idx *idx), + rte_cryptodev_sym_capability_get_v21); +BIND_DEFAULT_SYMBOL(rte_cryptodev_sym_capability_get, _v21, 21); static int param_range_check(uint16_t size, const struct rte_crypto_param_range *range) @@ -495,6 +546,12 @@ rte_cryptodev_get_feature_name(uint64_t flag) return "DIGEST_ENCRYPTED"; case RTE_CRYPTODEV_FF_SYM_CPU_CRYPTO: return "SYM_CPU_CRYPTO"; + case RTE_CRYPTODEV_FF_ASYM_SESSIONLESS: + return "ASYM_SESSIONLESS"; + case RTE_CRYPTODEV_FF_SYM_SESSIONLESS: + return "SYM_SESSIONLESS"; + case RTE_CRYPTODEV_FF_NON_BYTE_ALIGNED_DATA: + return "NON_BYTE_ALIGNED_DATA"; default: return NULL; } @@ -912,6 +969,7 @@ rte_cryptodev_configure(uint8_t dev_id, struct rte_cryptodev_config *config) return diag; } + rte_cryptodev_trace_configure(dev_id, config); return (*dev->dev_ops->dev_configure)(dev, config); } @@ -940,6 +998,7 @@ rte_cryptodev_start(uint8_t dev_id) } diag = (*dev->dev_ops->dev_start)(dev); + rte_cryptodev_trace_start(dev_id, diag); if (diag == 0) dev->data->dev_started = 1; else @@ -969,6 +1028,7 @@ rte_cryptodev_stop(uint8_t dev_id) } (*dev->dev_ops->dev_stop)(dev); + rte_cryptodev_trace_stop(dev_id); dev->data->dev_started = 0; } @@ -1005,6 +1065,13 @@ rte_cryptodev_close(uint8_t dev_id) RTE_FUNC_PTR_OR_ERR_RET(*dev->dev_ops->dev_close, -ENOTSUP); retval = (*dev->dev_ops->dev_close)(dev); + rte_cryptodev_trace_close(dev_id, retval); + + if (capability_copy[dev_id]) { + free(capability_copy[dev_id]); + capability_copy[dev_id] = NULL; + } + is_capability_checked[dev_id] = 0; if (retval < 0) return retval; @@ -1074,6 +1141,7 @@ rte_cryptodev_queue_pair_setup(uint8_t dev_id, uint16_t queue_pair_id, RTE_FUNC_PTR_OR_ERR_RET(*dev->dev_ops->queue_pair_setup, -ENOTSUP); + rte_cryptodev_trace_queue_pair_setup(dev_id, queue_pair_id, qp_conf); return (*dev->dev_ops->queue_pair_setup)(dev, queue_pair_id, qp_conf, socket_id); } @@ -1118,9 +1186,61 @@ rte_cryptodev_stats_reset(uint8_t dev_id) (*dev->dev_ops->stats_reset)(dev); } +static void +get_v20_capabilities(uint8_t dev_id, struct rte_cryptodev_info *dev_info) +{ + const struct rte_cryptodev_capabilities *capability; + uint8_t found_invalid_capa = 0; + uint8_t counter = 0; + + for (capability = dev_info->capabilities; + capability->op != RTE_CRYPTO_OP_TYPE_UNDEFINED; + ++capability, ++counter) { + if (capability->op == RTE_CRYPTO_OP_TYPE_SYMMETRIC && + capability->sym.xform_type == + RTE_CRYPTO_SYM_XFORM_AEAD + && capability->sym.aead.algo >= + RTE_CRYPTO_AEAD_CHACHA20_POLY1305) { + found_invalid_capa = 1; + counter--; + } + } + is_capability_checked[dev_id] = 1; + if (!found_invalid_capa) + return; + capability_copy[dev_id] = malloc(counter * + sizeof(struct rte_cryptodev_capabilities)); + if (capability_copy[dev_id] == NULL) { + /* + * error case - no memory to store the trimmed + * list, so have to return an empty list + */ + dev_info->capabilities = + cryptodev_undefined_capabilities; + is_capability_checked[dev_id] = 0; + } else { + counter = 0; + for (capability = dev_info->capabilities; + capability->op != + RTE_CRYPTO_OP_TYPE_UNDEFINED; + capability++) { + if (!(capability->op == + RTE_CRYPTO_OP_TYPE_SYMMETRIC + && capability->sym.xform_type == + RTE_CRYPTO_SYM_XFORM_AEAD + && capability->sym.aead.algo >= + RTE_CRYPTO_AEAD_CHACHA20_POLY1305)) { + capability_copy[dev_id][counter++] = + *capability; + } + } + dev_info->capabilities = + capability_copy[dev_id]; + } +} -void -rte_cryptodev_info_get(uint8_t dev_id, struct rte_cryptodev_info *dev_info) +void __vsym +rte_cryptodev_info_get_v20(uint8_t dev_id, struct rte_cryptodev_info *dev_info) { struct rte_cryptodev *dev; @@ -1136,10 +1256,40 @@ rte_cryptodev_info_get(uint8_t dev_id, struct rte_cryptodev_info *dev_info) RTE_FUNC_PTR_OR_RET(*dev->dev_ops->dev_infos_get); (*dev->dev_ops->dev_infos_get)(dev, dev_info); + if (capability_copy[dev_id] == NULL) { + if (!is_capability_checked[dev_id]) + get_v20_capabilities(dev_id, dev_info); + } else + dev_info->capabilities = capability_copy[dev_id]; + dev_info->driver_name = dev->device->driver->name; dev_info->device = dev->device; } +VERSION_SYMBOL(rte_cryptodev_info_get, _v20, 20.0); +void __vsym +rte_cryptodev_info_get_v21(uint8_t dev_id, struct rte_cryptodev_info *dev_info) +{ + struct rte_cryptodev *dev; + + if (!rte_cryptodev_pmd_is_valid_dev(dev_id)) { + CDEV_LOG_ERR("Invalid dev_id=%d", dev_id); + return; + } + + dev = &rte_crypto_devices[dev_id]; + + memset(dev_info, 0, sizeof(struct rte_cryptodev_info)); + + RTE_FUNC_PTR_OR_RET(*dev->dev_ops->dev_infos_get); + (*dev->dev_ops->dev_infos_get)(dev, dev_info); + + dev_info->driver_name = dev->device->driver->name; + dev_info->device = dev->device; +} +MAP_STATIC_SYMBOL(void rte_cryptodev_info_get(uint8_t dev_id, + struct rte_cryptodev_info *dev_info), rte_cryptodev_info_get_v21); +BIND_DEFAULT_SYMBOL(rte_cryptodev_info_get, _v21, 21); int rte_cryptodev_callback_register(uint8_t dev_id, @@ -1295,6 +1445,7 @@ rte_cryptodev_sym_session_init(uint8_t dev_id, } } + rte_cryptodev_trace_sym_session_init(dev_id, sess, xforms, mp); sess->sess_data[index].refcnt++; return 0; } @@ -1336,6 +1487,7 @@ rte_cryptodev_asym_session_init(uint8_t dev_id, } } + rte_cryptodev_trace_asym_session_init(dev_id, sess, xforms, mp); return 0; } @@ -1376,6 +1528,8 @@ rte_cryptodev_sym_session_pool_create(const char *name, uint32_t nb_elts, pool_priv->nb_drivers = nb_drivers; pool_priv->user_data_sz = user_data_size; + rte_cryptodev_trace_sym_session_pool_create(name, nb_elts, + elt_size, cache_size, user_data_size, mp); return mp; } @@ -1420,6 +1574,7 @@ rte_cryptodev_sym_session_create(struct rte_mempool *mp) memset(sess->sess_data, 0, rte_cryptodev_sym_session_data_size(sess)); + rte_cryptodev_trace_sym_session_create(mp, sess); return sess; } @@ -1439,6 +1594,7 @@ rte_cryptodev_asym_session_create(struct rte_mempool *mp) */ memset(sess, 0, (sizeof(void *) * nb_drivers) + sizeof(uint8_t)); + rte_cryptodev_trace_asym_session_create(mp, sess); return sess; } @@ -1469,6 +1625,7 @@ rte_cryptodev_sym_session_clear(uint8_t dev_id, dev->dev_ops->sym_session_clear(dev, sess); + rte_cryptodev_trace_sym_session_clear(dev_id, sess); return 0; } @@ -1492,6 +1649,7 @@ rte_cryptodev_asym_session_clear(uint8_t dev_id, dev->dev_ops->asym_session_clear(dev, sess); + rte_cryptodev_trace_sym_session_clear(dev_id, sess); return 0; } @@ -1514,6 +1672,7 @@ rte_cryptodev_sym_session_free(struct rte_cryptodev_sym_session *sess) sess_mp = rte_mempool_from_obj(sess); rte_mempool_put(sess_mp, sess); + rte_cryptodev_trace_sym_session_free(sess); return 0; } @@ -1538,6 +1697,7 @@ rte_cryptodev_asym_session_free(struct rte_cryptodev_asym_session *sess) sess_mp = rte_mempool_from_obj(sess); rte_mempool_put(sess_mp, sess); + rte_cryptodev_trace_asym_session_free(sess); return 0; }