return 0;
}
+void
+rte_cryptodev_pmd_probing_finish(struct rte_cryptodev *cryptodev)
+{
+ if (cryptodev == NULL)
+ return;
+ /*
+ * for secondary process, at that point we expect device
+ * to be already 'usable', so shared data and all function
+ * pointers for fast-path devops have to be setup properly
+ * inside rte_cryptodev.
+ */
+ if (rte_eal_process_type() == RTE_PROC_SECONDARY)
+ cryptodev_fp_ops_set(rte_crypto_fp_ops +
+ cryptodev->data->dev_id, cryptodev);
+}
+
static uint16_t
dummy_crypto_enqueue_burst(__rte_unused void *qp,
__rte_unused struct rte_crypto_op **ops,
uint8_t rte_cryptodev_allocate_driver(struct cryptodev_driver *crypto_drv,
const struct rte_driver *drv);
+/**
+ * @internal
+ * This is the last step of device probing. It must be called after a
+ * cryptodev is allocated and initialized successfully.
+ *
+ * @param dev Pointer to cryptodev struct
+ *
+ * @return
+ * void
+ */
+__rte_internal
+void
+rte_cryptodev_pmd_probing_finish(struct rte_cryptodev *dev);
#define RTE_PMD_REGISTER_CRYPTO_DRIVER(crypto_drv, drv, driver_id)\
RTE_INIT(init_ ##driver_id)\
rte_cryptodev_pmd_get_dev;
rte_cryptodev_pmd_get_named_dev;
rte_cryptodev_pmd_parse_input_args;
+ rte_cryptodev_pmd_probing_finish;
rte_cryptodev_pmd_release_device;
};