#include <rte_errno.h>
#include <rte_spinlock.h>
#include <rte_string_fns.h>
-#include <rte_compat.h>
-#include <rte_function_versioning.h>
#include "rte_crypto.h"
#include "rte_cryptodev.h"
/* spinlock for crypto device callbacks */
static rte_spinlock_t rte_cryptodev_cb_lock = RTE_SPINLOCK_INITIALIZER;
-static const struct rte_cryptodev_capabilities
- cryptodev_undefined_capabilities[] = {
- RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST()
-};
-
-static struct rte_cryptodev_capabilities
- *capability_copy[RTE_CRYPTO_MAX_DEVS];
-static uint8_t is_capability_checked[RTE_CRYPTO_MAX_DEVS];
-
/**
* The user application callback description.
*
[RTE_CRYPTO_AUTH_OP_GENERATE] = "generate"
};
-const struct rte_cryptodev_symmetric_capability __vsym *
-rte_cryptodev_sym_capability_get_v20(uint8_t dev_id,
- const struct rte_cryptodev_sym_capability_idx *idx)
-{
- const struct rte_cryptodev_capabilities *capability;
- struct rte_cryptodev_info dev_info;
- int i = 0;
-
- rte_cryptodev_info_get_v20(dev_id, &dev_info);
-
- while ((capability = &dev_info.capabilities[i++])->op !=
- RTE_CRYPTO_OP_TYPE_UNDEFINED) {
- if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
- continue;
-
- if (capability->sym.xform_type != idx->type)
- continue;
-
- if (idx->type == RTE_CRYPTO_SYM_XFORM_AUTH &&
- capability->sym.auth.algo == idx->algo.auth)
- return &capability->sym;
-
- if (idx->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
- capability->sym.cipher.algo == idx->algo.cipher)
- return &capability->sym;
-
- if (idx->type == RTE_CRYPTO_SYM_XFORM_AEAD &&
- capability->sym.aead.algo == idx->algo.aead)
- return &capability->sym;
- }
-
- return NULL;
-}
-VERSION_SYMBOL(rte_cryptodev_sym_capability_get, _v20, 20.0);
-
-const struct rte_cryptodev_symmetric_capability __vsym *
-rte_cryptodev_sym_capability_get_v21(uint8_t dev_id,
+const struct rte_cryptodev_symmetric_capability *
+rte_cryptodev_sym_capability_get(uint8_t dev_id,
const struct rte_cryptodev_sym_capability_idx *idx)
{
const struct rte_cryptodev_capabilities *capability;
return NULL;
}
-MAP_STATIC_SYMBOL(const struct rte_cryptodev_symmetric_capability *
- rte_cryptodev_sym_capability_get(uint8_t dev_id,
- const struct rte_cryptodev_sym_capability_idx *idx),
- rte_cryptodev_sym_capability_get_v21);
-BIND_DEFAULT_SYMBOL(rte_cryptodev_sym_capability_get, _v21, 21);
static int
param_range_check(uint16_t size, const struct rte_crypto_param_range *range)
retval = (*dev->dev_ops->dev_close)(dev);
rte_cryptodev_trace_close(dev_id, retval);
- if (capability_copy[dev_id]) {
- free(capability_copy[dev_id]);
- capability_copy[dev_id] = NULL;
- }
- is_capability_checked[dev_id] = 0;
-
if (retval < 0)
return retval;
(*dev->dev_ops->stats_reset)(dev);
}
-static void
-get_v20_capabilities(uint8_t dev_id, struct rte_cryptodev_info *dev_info)
-{
- const struct rte_cryptodev_capabilities *capability;
- uint8_t found_invalid_capa = 0;
- uint8_t counter = 0;
-
- for (capability = dev_info->capabilities;
- capability->op != RTE_CRYPTO_OP_TYPE_UNDEFINED;
- ++capability, ++counter) {
- if (capability->op == RTE_CRYPTO_OP_TYPE_SYMMETRIC &&
- capability->sym.xform_type ==
- RTE_CRYPTO_SYM_XFORM_AEAD
- && capability->sym.aead.algo >=
- RTE_CRYPTO_AEAD_CHACHA20_POLY1305) {
- found_invalid_capa = 1;
- counter--;
- }
- }
- is_capability_checked[dev_id] = 1;
- if (!found_invalid_capa)
- return;
- capability_copy[dev_id] = malloc(counter *
- sizeof(struct rte_cryptodev_capabilities));
- if (capability_copy[dev_id] == NULL) {
- /*
- * error case - no memory to store the trimmed
- * list, so have to return an empty list
- */
- dev_info->capabilities =
- cryptodev_undefined_capabilities;
- is_capability_checked[dev_id] = 0;
- } else {
- counter = 0;
- for (capability = dev_info->capabilities;
- capability->op !=
- RTE_CRYPTO_OP_TYPE_UNDEFINED;
- capability++) {
- if (!(capability->op ==
- RTE_CRYPTO_OP_TYPE_SYMMETRIC
- && capability->sym.xform_type ==
- RTE_CRYPTO_SYM_XFORM_AEAD
- && capability->sym.aead.algo >=
- RTE_CRYPTO_AEAD_CHACHA20_POLY1305)) {
- capability_copy[dev_id][counter++] =
- *capability;
- }
- }
- dev_info->capabilities =
- capability_copy[dev_id];
- }
-}
-
-void __vsym
-rte_cryptodev_info_get_v20(uint8_t dev_id, struct rte_cryptodev_info *dev_info)
-{
- struct rte_cryptodev *dev;
-
- if (!rte_cryptodev_pmd_is_valid_dev(dev_id)) {
- CDEV_LOG_ERR("Invalid dev_id=%d", dev_id);
- return;
- }
-
- dev = &rte_crypto_devices[dev_id];
-
- memset(dev_info, 0, sizeof(struct rte_cryptodev_info));
-
- RTE_FUNC_PTR_OR_RET(*dev->dev_ops->dev_infos_get);
- (*dev->dev_ops->dev_infos_get)(dev, dev_info);
-
- if (capability_copy[dev_id] == NULL) {
- if (!is_capability_checked[dev_id])
- get_v20_capabilities(dev_id, dev_info);
- } else
- dev_info->capabilities = capability_copy[dev_id];
-
- dev_info->driver_name = dev->device->driver->name;
- dev_info->device = dev->device;
-}
-VERSION_SYMBOL(rte_cryptodev_info_get, _v20, 20.0);
-
-void __vsym
-rte_cryptodev_info_get_v21(uint8_t dev_id, struct rte_cryptodev_info *dev_info)
+void
+rte_cryptodev_info_get(uint8_t dev_id, struct rte_cryptodev_info *dev_info)
{
struct rte_cryptodev *dev;
dev_info->driver_name = dev->device->driver->name;
dev_info->device = dev->device;
}
-MAP_STATIC_SYMBOL(void rte_cryptodev_info_get(uint8_t dev_id,
- struct rte_cryptodev_info *dev_info), rte_cryptodev_info_get_v21);
-BIND_DEFAULT_SYMBOL(rte_cryptodev_info_get, _v21, 21);
int
rte_cryptodev_callback_register(uint8_t dev_id,
rte_spinlock_unlock(&rte_cryptodev_cb_lock);
}
-
int
rte_cryptodev_sym_session_init(uint8_t dev_id,
struct rte_cryptodev_sym_session *sess,
return dev->dev_ops->sym_cpu_process(dev, sess, ofs, vec);
}
+int
+rte_cryptodev_get_raw_dp_ctx_size(uint8_t dev_id)
+{
+ struct rte_cryptodev *dev;
+ int32_t size = sizeof(struct rte_crypto_raw_dp_ctx);
+ int32_t priv_size;
+
+ if (!rte_cryptodev_pmd_is_valid_dev(dev_id))
+ return -EINVAL;
+
+ dev = rte_cryptodev_pmd_get_dev(dev_id);
+
+ if (*dev->dev_ops->sym_get_raw_dp_ctx_size == NULL ||
+ !(dev->feature_flags & RTE_CRYPTODEV_FF_SYM_RAW_DP)) {
+ return -ENOTSUP;
+ }
+
+ priv_size = (*dev->dev_ops->sym_get_raw_dp_ctx_size)(dev);
+ if (priv_size < 0)
+ return -ENOTSUP;
+
+ return RTE_ALIGN_CEIL((size + priv_size), 8);
+}
+
+int
+rte_cryptodev_configure_raw_dp_ctx(uint8_t dev_id, uint16_t qp_id,
+ struct rte_crypto_raw_dp_ctx *ctx,
+ enum rte_crypto_op_sess_type sess_type,
+ union rte_cryptodev_session_ctx session_ctx,
+ uint8_t is_update)
+{
+ struct rte_cryptodev *dev;
+
+ if (!rte_cryptodev_get_qp_status(dev_id, qp_id))
+ return -EINVAL;
+
+ dev = rte_cryptodev_pmd_get_dev(dev_id);
+ if (!(dev->feature_flags & RTE_CRYPTODEV_FF_SYM_RAW_DP)
+ || dev->dev_ops->sym_configure_raw_dp_ctx == NULL)
+ return -ENOTSUP;
+
+ return (*dev->dev_ops->sym_configure_raw_dp_ctx)(dev, qp_id, ctx,
+ sess_type, session_ctx, is_update);
+}
+
+uint32_t
+rte_cryptodev_raw_enqueue_burst(struct rte_crypto_raw_dp_ctx *ctx,
+ struct rte_crypto_sym_vec *vec, union rte_crypto_sym_ofs ofs,
+ void **user_data, int *enqueue_status)
+{
+ return (*ctx->enqueue_burst)(ctx->qp_data, ctx->drv_ctx_data, vec,
+ ofs, user_data, enqueue_status);
+}
+
+int
+rte_cryptodev_raw_enqueue_done(struct rte_crypto_raw_dp_ctx *ctx,
+ uint32_t n)
+{
+ return (*ctx->enqueue_done)(ctx->qp_data, ctx->drv_ctx_data, n);
+}
+
+uint32_t
+rte_cryptodev_raw_dequeue_burst(struct rte_crypto_raw_dp_ctx *ctx,
+ rte_cryptodev_raw_get_dequeue_count_t get_dequeue_count,
+ rte_cryptodev_raw_post_dequeue_t post_dequeue,
+ void **out_user_data, uint8_t is_user_data_array,
+ uint32_t *n_success_jobs, int *status)
+{
+ return (*ctx->dequeue_burst)(ctx->qp_data, ctx->drv_ctx_data,
+ get_dequeue_count, post_dequeue, out_user_data,
+ is_user_data_array, n_success_jobs, status);
+}
+
+int
+rte_cryptodev_raw_dequeue_done(struct rte_crypto_raw_dp_ctx *ctx,
+ uint32_t n)
+{
+ return (*ctx->dequeue_done)(ctx->qp_data, ctx->drv_ctx_data, n);
+}
+
/** Initialise rte_crypto_op mempool element */
static void
rte_crypto_op_init(struct rte_mempool *mempool,