#include <rte_errno.h>
#include <rte_spinlock.h>
#include <rte_string_fns.h>
+#include <rte_compat.h>
+#include <rte_function_versioning.h>
#include "rte_crypto.h"
#include "rte_cryptodev.h"
/* spinlock for crypto device callbacks */
static rte_spinlock_t rte_cryptodev_cb_lock = RTE_SPINLOCK_INITIALIZER;
+static const struct rte_cryptodev_capabilities
+ cryptodev_undefined_capabilities[] = {
+ RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST()
+};
+
+static struct rte_cryptodev_capabilities
+ *capability_copy[RTE_CRYPTO_MAX_DEVS];
+static uint8_t is_capability_checked[RTE_CRYPTO_MAX_DEVS];
/**
* The user application callback description.
[RTE_CRYPTO_AUTH_OP_GENERATE] = "generate"
};
-const struct rte_cryptodev_symmetric_capability *
-rte_cryptodev_sym_capability_get(uint8_t dev_id,
+const struct rte_cryptodev_symmetric_capability __vsym *
+rte_cryptodev_sym_capability_get_v20(uint8_t dev_id,
const struct rte_cryptodev_sym_capability_idx *idx)
{
const struct rte_cryptodev_capabilities *capability;
struct rte_cryptodev_info dev_info;
int i = 0;
- rte_cryptodev_info_get(dev_id, &dev_info);
+ rte_cryptodev_info_get_v20(dev_id, &dev_info);
while ((capability = &dev_info.capabilities[i++])->op !=
RTE_CRYPTO_OP_TYPE_UNDEFINED) {
}
return NULL;
+}
+VERSION_SYMBOL(rte_cryptodev_sym_capability_get, _v20, 20.0);
+
+const struct rte_cryptodev_symmetric_capability __vsym *
+rte_cryptodev_sym_capability_get_v21(uint8_t dev_id,
+ const struct rte_cryptodev_sym_capability_idx *idx)
+{
+ const struct rte_cryptodev_capabilities *capability;
+ struct rte_cryptodev_info dev_info;
+ int i = 0;
+
+ rte_cryptodev_info_get(dev_id, &dev_info);
+
+ while ((capability = &dev_info.capabilities[i++])->op !=
+ RTE_CRYPTO_OP_TYPE_UNDEFINED) {
+ if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
+ continue;
+ if (capability->sym.xform_type != idx->type)
+ continue;
+
+ if (idx->type == RTE_CRYPTO_SYM_XFORM_AUTH &&
+ capability->sym.auth.algo == idx->algo.auth)
+ return &capability->sym;
+
+ if (idx->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
+ capability->sym.cipher.algo == idx->algo.cipher)
+ return &capability->sym;
+
+ if (idx->type == RTE_CRYPTO_SYM_XFORM_AEAD &&
+ capability->sym.aead.algo == idx->algo.aead)
+ return &capability->sym;
+ }
+
+ return NULL;
}
+MAP_STATIC_SYMBOL(const struct rte_cryptodev_symmetric_capability *
+ rte_cryptodev_sym_capability_get(uint8_t dev_id,
+ const struct rte_cryptodev_sym_capability_idx *idx),
+ rte_cryptodev_sym_capability_get_v21);
+BIND_DEFAULT_SYMBOL(rte_cryptodev_sym_capability_get, _v21, 21);
static int
param_range_check(uint16_t size, const struct rte_crypto_param_range *range)
retval = (*dev->dev_ops->dev_close)(dev);
rte_cryptodev_trace_close(dev_id, retval);
+ if (capability_copy[dev_id]) {
+ free(capability_copy[dev_id]);
+ capability_copy[dev_id] = NULL;
+ }
+ is_capability_checked[dev_id] = 0;
+
if (retval < 0)
return retval;
(*dev->dev_ops->stats_reset)(dev);
}
+static void
+get_v20_capabilities(uint8_t dev_id, struct rte_cryptodev_info *dev_info)
+{
+ const struct rte_cryptodev_capabilities *capability;
+ uint8_t found_invalid_capa = 0;
+ uint8_t counter = 0;
+
+ for (capability = dev_info->capabilities;
+ capability->op != RTE_CRYPTO_OP_TYPE_UNDEFINED;
+ ++capability, ++counter) {
+ if (capability->op == RTE_CRYPTO_OP_TYPE_SYMMETRIC &&
+ capability->sym.xform_type ==
+ RTE_CRYPTO_SYM_XFORM_AEAD
+ && capability->sym.aead.algo >=
+ RTE_CRYPTO_AEAD_CHACHA20_POLY1305) {
+ found_invalid_capa = 1;
+ counter--;
+ }
+ }
+ is_capability_checked[dev_id] = 1;
+ if (!found_invalid_capa)
+ return;
+ capability_copy[dev_id] = malloc(counter *
+ sizeof(struct rte_cryptodev_capabilities));
+ if (capability_copy[dev_id] == NULL) {
+ /*
+ * error case - no memory to store the trimmed
+ * list, so have to return an empty list
+ */
+ dev_info->capabilities =
+ cryptodev_undefined_capabilities;
+ is_capability_checked[dev_id] = 0;
+ } else {
+ counter = 0;
+ for (capability = dev_info->capabilities;
+ capability->op !=
+ RTE_CRYPTO_OP_TYPE_UNDEFINED;
+ capability++) {
+ if (!(capability->op ==
+ RTE_CRYPTO_OP_TYPE_SYMMETRIC
+ && capability->sym.xform_type ==
+ RTE_CRYPTO_SYM_XFORM_AEAD
+ && capability->sym.aead.algo >=
+ RTE_CRYPTO_AEAD_CHACHA20_POLY1305)) {
+ capability_copy[dev_id][counter++] =
+ *capability;
+ }
+ }
+ dev_info->capabilities =
+ capability_copy[dev_id];
+ }
+}
-void
-rte_cryptodev_info_get(uint8_t dev_id, struct rte_cryptodev_info *dev_info)
+void __vsym
+rte_cryptodev_info_get_v20(uint8_t dev_id, struct rte_cryptodev_info *dev_info)
{
struct rte_cryptodev *dev;
RTE_FUNC_PTR_OR_RET(*dev->dev_ops->dev_infos_get);
(*dev->dev_ops->dev_infos_get)(dev, dev_info);
+ if (capability_copy[dev_id] == NULL) {
+ if (!is_capability_checked[dev_id])
+ get_v20_capabilities(dev_id, dev_info);
+ } else
+ dev_info->capabilities = capability_copy[dev_id];
+
dev_info->driver_name = dev->device->driver->name;
dev_info->device = dev->device;
}
+VERSION_SYMBOL(rte_cryptodev_info_get, _v20, 20.0);
+void __vsym
+rte_cryptodev_info_get_v21(uint8_t dev_id, struct rte_cryptodev_info *dev_info)
+{
+ struct rte_cryptodev *dev;
+
+ if (!rte_cryptodev_pmd_is_valid_dev(dev_id)) {
+ CDEV_LOG_ERR("Invalid dev_id=%d", dev_id);
+ return;
+ }
+
+ dev = &rte_crypto_devices[dev_id];
+
+ memset(dev_info, 0, sizeof(struct rte_cryptodev_info));
+
+ RTE_FUNC_PTR_OR_RET(*dev->dev_ops->dev_infos_get);
+ (*dev->dev_ops->dev_infos_get)(dev, dev_info);
+
+ dev_info->driver_name = dev->device->driver->name;
+ dev_info->device = dev->device;
+}
+MAP_STATIC_SYMBOL(void rte_cryptodev_info_get(uint8_t dev_id,
+ struct rte_cryptodev_info *dev_info), rte_cryptodev_info_get_v21);
+BIND_DEFAULT_SYMBOL(rte_cryptodev_info_get, _v21, 21);
int
rte_cryptodev_callback_register(uint8_t dev_id,
* - Return NULL if the capability not exist.
*/
const struct rte_cryptodev_symmetric_capability *
+rte_cryptodev_sym_capability_get_v20(uint8_t dev_id,
+ const struct rte_cryptodev_sym_capability_idx *idx);
+
+const struct rte_cryptodev_symmetric_capability *
+rte_cryptodev_sym_capability_get_v21(uint8_t dev_id,
+ const struct rte_cryptodev_sym_capability_idx *idx);
+
+const struct rte_cryptodev_symmetric_capability *
rte_cryptodev_sym_capability_get(uint8_t dev_id,
const struct rte_cryptodev_sym_capability_idx *idx);
* the last valid element has it's op field set to
* RTE_CRYPTO_OP_TYPE_UNDEFINED.
*/
-extern void
+
+void
rte_cryptodev_info_get(uint8_t dev_id, struct rte_cryptodev_info *dev_info);
+/* An extra element RTE_CRYPTO_AEAD_CHACHA20_POLY1305 is added
+ * to enum rte_crypto_aead_algorithm, also changing the value of
+ * RTE_CRYPTO_AEAD_LIST_END. To maintain ABI compatibility with applications
+ * which linked against earlier versions, preventing them, for example, from
+ * picking up the new value and using it to index into an array sized too small
+ * for it, it is necessary to have two versions of rte_cryptodev_info_get()
+ * The latest version just returns directly the capabilities retrieved from
+ * the device. The compatible version inspects the capabilities retrieved
+ * from the device, but only returns them directly if the new value
+ * is not included. If the new value is included, it allocates space
+ * for a copy of the device capabilities, trims the new value from this
+ * and returns this copy. It only needs to do this once per device.
+ * For the corner case of a corner case when the alloc may fail,
+ * an empty capability list is returned, as there is no mechanism to return
+ * an error and adding such a mechanism would itself be an ABI breakage.
+ * The compatible version can be removed after the next major ABI release.
+ */
+
+void
+rte_cryptodev_info_get_v20(uint8_t dev_id, struct rte_cryptodev_info *dev_info);
+
+void
+rte_cryptodev_info_get_v21(uint8_t dev_id, struct rte_cryptodev_info *dev_info);
/**
* Register a callback function for specific device id.