+++ /dev/null
-# SPDX-License-Identifier: BSD-3-Clause
-# Copyright(c) 2015-2018 Intel Corporation
-
-Makefile for crypto QAT PMD is in common/qat directory.
-The build for the QAT driver is done from there as only one library is built for the
-whole QAT pci device and that library includes all the services (crypto, compression)
-which are enabled on the device.
+++ /dev/null
-# SPDX-License-Identifier: BSD-3-Clause
-# Copyright(c) 2017-2018 Intel Corporation
-
-if is_windows
- build = false
- reason = 'not supported on Windows'
- subdir_done()
-endif
-
-# this does not build the QAT driver, instead that is done in the compression
-# driver which comes later. Here we just add our sources files to the list
-build = false
-reason = '' # sentinal value to suppress printout
-dep = dependency('libcrypto', required: false, method: 'pkg-config')
-qat_includes += include_directories('.')
-qat_deps += 'cryptodev'
-qat_deps += 'net'
-qat_deps += 'security'
-if dep.found()
- # Add our sources files to the list
- qat_sources += files(
- 'qat_asym.c',
- 'qat_asym_pmd.c',
- 'qat_sym.c',
- 'qat_sym_hw_dp.c',
- 'qat_sym_pmd.c',
- 'qat_sym_session.c',
- )
- qat_ext_deps += dep
- qat_cflags += '-DBUILD_QAT_SYM'
- qat_cflags += '-DBUILD_QAT_ASYM'
-endif
+++ /dev/null
-/* SPDX-License-Identifier: BSD-3-Clause
- * Copyright(c) 2019 Intel Corporation
- */
-
-#ifndef _QAT_ASYM_CAPABILITIES_H_
-#define _QAT_ASYM_CAPABILITIES_H_
-
-#define QAT_BASE_GEN1_ASYM_CAPABILITIES \
- { /* modexp */ \
- .op = RTE_CRYPTO_OP_TYPE_ASYMMETRIC, \
- {.asym = { \
- .xform_capa = { \
- .xform_type = RTE_CRYPTO_ASYM_XFORM_MODEX, \
- .op_types = 0, \
- { \
- .modlen = { \
- .min = 1, \
- .max = 512, \
- .increment = 1 \
- }, } \
- } \
- }, \
- } \
- }, \
- { /* modinv */ \
- .op = RTE_CRYPTO_OP_TYPE_ASYMMETRIC, \
- {.asym = { \
- .xform_capa = { \
- .xform_type = RTE_CRYPTO_ASYM_XFORM_MODINV, \
- .op_types = 0, \
- { \
- .modlen = { \
- .min = 1, \
- .max = 512, \
- .increment = 1 \
- }, } \
- } \
- }, \
- } \
- }, \
- { /* RSA */ \
- .op = RTE_CRYPTO_OP_TYPE_ASYMMETRIC, \
- {.asym = { \
- .xform_capa = { \
- .xform_type = RTE_CRYPTO_ASYM_XFORM_RSA, \
- .op_types = ((1 << RTE_CRYPTO_ASYM_OP_SIGN) | \
- (1 << RTE_CRYPTO_ASYM_OP_VERIFY) | \
- (1 << RTE_CRYPTO_ASYM_OP_ENCRYPT) | \
- (1 << RTE_CRYPTO_ASYM_OP_DECRYPT)), \
- { \
- .modlen = { \
- /* min length is based on openssl rsa keygen */ \
- .min = 64, \
- /* value 0 symbolizes no limit on max length */ \
- .max = 512, \
- .increment = 64 \
- }, } \
- } \
- }, \
- } \
- } \
-
-#endif /* _QAT_ASYM_CAPABILITIES_H_ */
#include "qat_crypto.h"
#include "qat_asym.h"
#include "qat_asym_pmd.h"
-#include "qat_sym_capabilities.h"
-#include "qat_asym_capabilities.h"
uint8_t qat_asym_driver_id;
-
-static const struct rte_cryptodev_capabilities qat_gen1_asym_capabilities[] = {
- QAT_BASE_GEN1_ASYM_CAPABILITIES,
- RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST()
-};
+struct qat_crypto_gen_dev_ops qat_asym_gen_dev_ops[QAT_N_GENS];
void
qat_asym_init_op_cookie(void *op_cookie)
.socket_id = qat_dev_instance->pci_dev->device.numa_node,
.private_data_size = sizeof(struct qat_cryptodev_private)
};
+ struct qat_capabilities_info capa_info;
+ const struct rte_cryptodev_capabilities *capabilities;
+ const struct qat_crypto_gen_dev_ops *gen_dev_ops =
+ &qat_asym_gen_dev_ops[qat_pci_dev->qat_dev_gen];
char name[RTE_CRYPTODEV_NAME_MAX_LEN];
char capa_memz_name[RTE_CRYPTODEV_NAME_MAX_LEN];
struct rte_cryptodev *cryptodev;
struct qat_cryptodev_private *internals;
+ uint64_t capa_size;
- if (qat_pci_dev->qat_dev_gen == QAT_GEN4) {
- QAT_LOG(ERR, "Asymmetric crypto PMD not supported on QAT 4xxx");
- return -EFAULT;
- }
- if (qat_pci_dev->qat_dev_gen == QAT_GEN3) {
- QAT_LOG(ERR, "Asymmetric crypto PMD not supported on QAT c4xxx");
- return -EFAULT;
- }
snprintf(name, RTE_CRYPTODEV_NAME_MAX_LEN, "%s_%s",
qat_pci_dev->name, "asym");
QAT_LOG(DEBUG, "Creating QAT ASYM device %s\n", name);
+ if (gen_dev_ops->cryptodev_ops == NULL) {
+ QAT_LOG(ERR, "Device %s does not support asymmetric crypto",
+ name);
+ return -EFAULT;
+ }
+
if (rte_eal_process_type() == RTE_PROC_PRIMARY) {
qat_pci_dev->qat_asym_driver_id =
qat_asym_driver_id;
cryptodev->enqueue_burst = qat_asym_pmd_enqueue_op_burst;
cryptodev->dequeue_burst = qat_asym_pmd_dequeue_op_burst;
- cryptodev->feature_flags = RTE_CRYPTODEV_FF_ASYMMETRIC_CRYPTO |
- RTE_CRYPTODEV_FF_HW_ACCELERATED |
- RTE_CRYPTODEV_FF_ASYM_SESSIONLESS |
- RTE_CRYPTODEV_FF_RSA_PRIV_OP_KEY_EXP |
- RTE_CRYPTODEV_FF_RSA_PRIV_OP_KEY_QT;
+
+ cryptodev->feature_flags = gen_dev_ops->get_feature_flags(qat_pci_dev);
if (rte_eal_process_type() != RTE_PROC_PRIMARY)
return 0;
internals = cryptodev->data->dev_private;
internals->qat_dev = qat_pci_dev;
internals->dev_id = cryptodev->data->dev_id;
- internals->qat_dev_capabilities = qat_gen1_asym_capabilities;
internals->service_type = QAT_SERVICE_ASYMMETRIC;
+ capa_info = gen_dev_ops->get_capabilities(qat_pci_dev);
+ capabilities = capa_info.data;
+ capa_size = capa_info.size;
+
internals->capa_mz = rte_memzone_lookup(capa_memz_name);
if (internals->capa_mz == NULL) {
internals->capa_mz = rte_memzone_reserve(capa_memz_name,
- sizeof(qat_gen1_asym_capabilities),
- rte_socket_id(), 0);
- }
- if (internals->capa_mz == NULL) {
- QAT_LOG(DEBUG,
- "Error allocating memzone for capabilities, destroying PMD for %s",
- name);
- rte_cryptodev_pmd_destroy(cryptodev);
- memset(&qat_dev_instance->asym_rte_dev, 0,
- sizeof(qat_dev_instance->asym_rte_dev));
- return -EFAULT;
+ capa_size, rte_socket_id(), 0);
+ if (internals->capa_mz == NULL) {
+ QAT_LOG(DEBUG,
+ "Error allocating memzone for capabilities, "
+ "destroying PMD for %s",
+ name);
+ rte_cryptodev_pmd_destroy(cryptodev);
+ memset(&qat_dev_instance->asym_rte_dev, 0,
+ sizeof(qat_dev_instance->asym_rte_dev));
+ return -EFAULT;
+ }
}
- memcpy(internals->capa_mz->addr, qat_gen1_asym_capabilities,
- sizeof(qat_gen1_asym_capabilities));
+ memcpy(internals->capa_mz->addr, capabilities, capa_size);
internals->qat_dev_capabilities = internals->capa_mz->addr;
while (1) {
#define _QAT_ASYM_PMD_H_
#include <rte_cryptodev.h>
+#include "qat_crypto.h"
#include "qat_device.h"
/** Intel(R) QAT Asymmetric Crypto PMD driver name */
#define CRYPTODEV_NAME_QAT_ASYM_PMD crypto_qat_asym
+/**
+ * Helper function to add an asym capability
+ * <name> <op type> <modlen (min, max, increment)>
+ **/
+#define QAT_ASYM_CAP(n, o, l, r, i) \
+ { \
+ .op = RTE_CRYPTO_OP_TYPE_ASYMMETRIC, \
+ {.asym = { \
+ .xform_capa = { \
+ .xform_type = RTE_CRYPTO_ASYM_XFORM_##n,\
+ .op_types = o, \
+ { \
+ .modlen = { \
+ .min = l, \
+ .max = r, \
+ .increment = i \
+ }, } \
+ } \
+ }, \
+ } \
+ }
+
extern uint8_t qat_asym_driver_id;
+extern struct qat_crypto_gen_dev_ops qat_asym_gen_dev_ops[];
+
void
qat_asym_init_op_cookie(void *op_cookie);
uint64_t size;
};
+typedef struct qat_capabilities_info (*get_capabilities_info_t)
+ (struct qat_pci_device *qat_dev);
+
+typedef uint64_t (*get_feature_flags_t)(struct qat_pci_device *qat_dev);
+
+typedef void * (*create_security_ctx_t)(void *cryptodev);
+
+struct qat_crypto_gen_dev_ops {
+ get_feature_flags_t get_feature_flags;
+ get_capabilities_info_t get_capabilities;
+ struct rte_cryptodev_ops *cryptodev_ops;
+#ifdef RTE_LIB_SECURITY
+ create_security_ctx_t create_security_ctx;
+#endif
+};
+
int
qat_cryptodev_config(struct rte_cryptodev *dev,
struct rte_cryptodev_config *config);
+++ /dev/null
-/* SPDX-License-Identifier: BSD-3-Clause
- * Copyright(c) 2017-2019 Intel Corporation
- */
-
-#ifndef _QAT_SYM_CAPABILITIES_H_
-#define _QAT_SYM_CAPABILITIES_H_
-
-#define QAT_BASE_GEN1_SYM_CAPABILITIES \
- { /* SHA1 */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
- {.auth = { \
- .algo = RTE_CRYPTO_AUTH_SHA1, \
- .block_size = 64, \
- .key_size = { \
- .min = 0, \
- .max = 0, \
- .increment = 0 \
- }, \
- .digest_size = { \
- .min = 1, \
- .max = 20, \
- .increment = 1 \
- }, \
- .iv_size = { 0 } \
- }, } \
- }, } \
- }, \
- { /* SHA224 */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
- {.auth = { \
- .algo = RTE_CRYPTO_AUTH_SHA224, \
- .block_size = 64, \
- .key_size = { \
- .min = 0, \
- .max = 0, \
- .increment = 0 \
- }, \
- .digest_size = { \
- .min = 1, \
- .max = 28, \
- .increment = 1 \
- }, \
- .iv_size = { 0 } \
- }, } \
- }, } \
- }, \
- { /* SHA256 */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
- {.auth = { \
- .algo = RTE_CRYPTO_AUTH_SHA256, \
- .block_size = 64, \
- .key_size = { \
- .min = 0, \
- .max = 0, \
- .increment = 0 \
- }, \
- .digest_size = { \
- .min = 1, \
- .max = 32, \
- .increment = 1 \
- }, \
- .iv_size = { 0 } \
- }, } \
- }, } \
- }, \
- { /* SHA384 */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
- {.auth = { \
- .algo = RTE_CRYPTO_AUTH_SHA384, \
- .block_size = 128, \
- .key_size = { \
- .min = 0, \
- .max = 0, \
- .increment = 0 \
- }, \
- .digest_size = { \
- .min = 1, \
- .max = 48, \
- .increment = 1 \
- }, \
- .iv_size = { 0 } \
- }, } \
- }, } \
- }, \
- { /* SHA512 */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
- {.auth = { \
- .algo = RTE_CRYPTO_AUTH_SHA512, \
- .block_size = 128, \
- .key_size = { \
- .min = 0, \
- .max = 0, \
- .increment = 0 \
- }, \
- .digest_size = { \
- .min = 1, \
- .max = 64, \
- .increment = 1 \
- }, \
- .iv_size = { 0 } \
- }, } \
- }, } \
- }, \
- { /* SHA1 HMAC */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
- {.auth = { \
- .algo = RTE_CRYPTO_AUTH_SHA1_HMAC, \
- .block_size = 64, \
- .key_size = { \
- .min = 1, \
- .max = 64, \
- .increment = 1 \
- }, \
- .digest_size = { \
- .min = 1, \
- .max = 20, \
- .increment = 1 \
- }, \
- .iv_size = { 0 } \
- }, } \
- }, } \
- }, \
- { /* SHA224 HMAC */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
- {.auth = { \
- .algo = RTE_CRYPTO_AUTH_SHA224_HMAC, \
- .block_size = 64, \
- .key_size = { \
- .min = 1, \
- .max = 64, \
- .increment = 1 \
- }, \
- .digest_size = { \
- .min = 1, \
- .max = 28, \
- .increment = 1 \
- }, \
- .iv_size = { 0 } \
- }, } \
- }, } \
- }, \
- { /* SHA256 HMAC */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
- {.auth = { \
- .algo = RTE_CRYPTO_AUTH_SHA256_HMAC, \
- .block_size = 64, \
- .key_size = { \
- .min = 1, \
- .max = 64, \
- .increment = 1 \
- }, \
- .digest_size = { \
- .min = 1, \
- .max = 32, \
- .increment = 1 \
- }, \
- .iv_size = { 0 } \
- }, } \
- }, } \
- }, \
- { /* SHA384 HMAC */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
- {.auth = { \
- .algo = RTE_CRYPTO_AUTH_SHA384_HMAC, \
- .block_size = 128, \
- .key_size = { \
- .min = 1, \
- .max = 128, \
- .increment = 1 \
- }, \
- .digest_size = { \
- .min = 1, \
- .max = 48, \
- .increment = 1 \
- }, \
- .iv_size = { 0 } \
- }, } \
- }, } \
- }, \
- { /* SHA512 HMAC */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
- {.auth = { \
- .algo = RTE_CRYPTO_AUTH_SHA512_HMAC, \
- .block_size = 128, \
- .key_size = { \
- .min = 1, \
- .max = 128, \
- .increment = 1 \
- }, \
- .digest_size = { \
- .min = 1, \
- .max = 64, \
- .increment = 1 \
- }, \
- .iv_size = { 0 } \
- }, } \
- }, } \
- }, \
- { /* MD5 HMAC */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
- {.auth = { \
- .algo = RTE_CRYPTO_AUTH_MD5_HMAC, \
- .block_size = 64, \
- .key_size = { \
- .min = 1, \
- .max = 64, \
- .increment = 1 \
- }, \
- .digest_size = { \
- .min = 1, \
- .max = 16, \
- .increment = 1 \
- }, \
- .iv_size = { 0 } \
- }, } \
- }, } \
- }, \
- { /* AES XCBC MAC */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
- {.auth = { \
- .algo = RTE_CRYPTO_AUTH_AES_XCBC_MAC, \
- .block_size = 16, \
- .key_size = { \
- .min = 16, \
- .max = 16, \
- .increment = 0 \
- }, \
- .digest_size = { \
- .min = 12, \
- .max = 12, \
- .increment = 0 \
- }, \
- .aad_size = { 0 }, \
- .iv_size = { 0 } \
- }, } \
- }, } \
- }, \
- { /* AES CMAC */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
- {.auth = { \
- .algo = RTE_CRYPTO_AUTH_AES_CMAC, \
- .block_size = 16, \
- .key_size = { \
- .min = 16, \
- .max = 16, \
- .increment = 0 \
- }, \
- .digest_size = { \
- .min = 4, \
- .max = 16, \
- .increment = 4 \
- } \
- }, } \
- }, } \
- }, \
- { /* AES CCM */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_AEAD, \
- {.aead = { \
- .algo = RTE_CRYPTO_AEAD_AES_CCM, \
- .block_size = 16, \
- .key_size = { \
- .min = 16, \
- .max = 16, \
- .increment = 0 \
- }, \
- .digest_size = { \
- .min = 4, \
- .max = 16, \
- .increment = 2 \
- }, \
- .aad_size = { \
- .min = 0, \
- .max = 224, \
- .increment = 1 \
- }, \
- .iv_size = { \
- .min = 7, \
- .max = 13, \
- .increment = 1 \
- }, \
- }, } \
- }, } \
- }, \
- { /* AES GCM */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_AEAD, \
- {.aead = { \
- .algo = RTE_CRYPTO_AEAD_AES_GCM, \
- .block_size = 16, \
- .key_size = { \
- .min = 16, \
- .max = 32, \
- .increment = 8 \
- }, \
- .digest_size = { \
- .min = 8, \
- .max = 16, \
- .increment = 4 \
- }, \
- .aad_size = { \
- .min = 0, \
- .max = 240, \
- .increment = 1 \
- }, \
- .iv_size = { \
- .min = 0, \
- .max = 12, \
- .increment = 12 \
- }, \
- }, } \
- }, } \
- }, \
- { /* AES GMAC (AUTH) */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
- {.auth = { \
- .algo = RTE_CRYPTO_AUTH_AES_GMAC, \
- .block_size = 16, \
- .key_size = { \
- .min = 16, \
- .max = 32, \
- .increment = 8 \
- }, \
- .digest_size = { \
- .min = 8, \
- .max = 16, \
- .increment = 4 \
- }, \
- .iv_size = { \
- .min = 0, \
- .max = 12, \
- .increment = 12 \
- } \
- }, } \
- }, } \
- }, \
- { /* SNOW 3G (UIA2) */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
- {.auth = { \
- .algo = RTE_CRYPTO_AUTH_SNOW3G_UIA2, \
- .block_size = 16, \
- .key_size = { \
- .min = 16, \
- .max = 16, \
- .increment = 0 \
- }, \
- .digest_size = { \
- .min = 4, \
- .max = 4, \
- .increment = 0 \
- }, \
- .iv_size = { \
- .min = 16, \
- .max = 16, \
- .increment = 0 \
- } \
- }, } \
- }, } \
- }, \
- { /* AES CBC */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, \
- {.cipher = { \
- .algo = RTE_CRYPTO_CIPHER_AES_CBC, \
- .block_size = 16, \
- .key_size = { \
- .min = 16, \
- .max = 32, \
- .increment = 8 \
- }, \
- .iv_size = { \
- .min = 16, \
- .max = 16, \
- .increment = 0 \
- } \
- }, } \
- }, } \
- }, \
- { /* AES XTS */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, \
- {.cipher = { \
- .algo = RTE_CRYPTO_CIPHER_AES_XTS, \
- .block_size = 16, \
- .key_size = { \
- .min = 32, \
- .max = 64, \
- .increment = 32 \
- }, \
- .iv_size = { \
- .min = 16, \
- .max = 16, \
- .increment = 0 \
- } \
- }, } \
- }, } \
- }, \
- { /* AES DOCSIS BPI */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, \
- {.cipher = { \
- .algo = RTE_CRYPTO_CIPHER_AES_DOCSISBPI,\
- .block_size = 16, \
- .key_size = { \
- .min = 16, \
- .max = 32, \
- .increment = 16 \
- }, \
- .iv_size = { \
- .min = 16, \
- .max = 16, \
- .increment = 0 \
- } \
- }, } \
- }, } \
- }, \
- { /* SNOW 3G (UEA2) */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, \
- {.cipher = { \
- .algo = RTE_CRYPTO_CIPHER_SNOW3G_UEA2, \
- .block_size = 16, \
- .key_size = { \
- .min = 16, \
- .max = 16, \
- .increment = 0 \
- }, \
- .iv_size = { \
- .min = 16, \
- .max = 16, \
- .increment = 0 \
- } \
- }, } \
- }, } \
- }, \
- { /* AES CTR */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, \
- {.cipher = { \
- .algo = RTE_CRYPTO_CIPHER_AES_CTR, \
- .block_size = 16, \
- .key_size = { \
- .min = 16, \
- .max = 32, \
- .increment = 8 \
- }, \
- .iv_size = { \
- .min = 16, \
- .max = 16, \
- .increment = 0 \
- } \
- }, } \
- }, } \
- }, \
- { /* NULL (AUTH) */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
- {.auth = { \
- .algo = RTE_CRYPTO_AUTH_NULL, \
- .block_size = 1, \
- .key_size = { \
- .min = 0, \
- .max = 0, \
- .increment = 0 \
- }, \
- .digest_size = { \
- .min = 0, \
- .max = 0, \
- .increment = 0 \
- }, \
- .iv_size = { 0 } \
- }, }, \
- }, }, \
- }, \
- { /* NULL (CIPHER) */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, \
- {.cipher = { \
- .algo = RTE_CRYPTO_CIPHER_NULL, \
- .block_size = 1, \
- .key_size = { \
- .min = 0, \
- .max = 0, \
- .increment = 0 \
- }, \
- .iv_size = { \
- .min = 0, \
- .max = 0, \
- .increment = 0 \
- } \
- }, }, \
- }, } \
- }, \
- { /* KASUMI (F8) */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, \
- {.cipher = { \
- .algo = RTE_CRYPTO_CIPHER_KASUMI_F8, \
- .block_size = 8, \
- .key_size = { \
- .min = 16, \
- .max = 16, \
- .increment = 0 \
- }, \
- .iv_size = { \
- .min = 8, \
- .max = 8, \
- .increment = 0 \
- } \
- }, } \
- }, } \
- }, \
- { /* KASUMI (F9) */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
- {.auth = { \
- .algo = RTE_CRYPTO_AUTH_KASUMI_F9, \
- .block_size = 8, \
- .key_size = { \
- .min = 16, \
- .max = 16, \
- .increment = 0 \
- }, \
- .digest_size = { \
- .min = 4, \
- .max = 4, \
- .increment = 0 \
- }, \
- .iv_size = { 0 } \
- }, } \
- }, } \
- }, \
- { /* 3DES CBC */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, \
- {.cipher = { \
- .algo = RTE_CRYPTO_CIPHER_3DES_CBC, \
- .block_size = 8, \
- .key_size = { \
- .min = 8, \
- .max = 24, \
- .increment = 8 \
- }, \
- .iv_size = { \
- .min = 8, \
- .max = 8, \
- .increment = 0 \
- } \
- }, } \
- }, } \
- }, \
- { /* 3DES CTR */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, \
- {.cipher = { \
- .algo = RTE_CRYPTO_CIPHER_3DES_CTR, \
- .block_size = 8, \
- .key_size = { \
- .min = 16, \
- .max = 24, \
- .increment = 8 \
- }, \
- .iv_size = { \
- .min = 8, \
- .max = 8, \
- .increment = 0 \
- } \
- }, } \
- }, } \
- }, \
- { /* DES CBC */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, \
- {.cipher = { \
- .algo = RTE_CRYPTO_CIPHER_DES_CBC, \
- .block_size = 8, \
- .key_size = { \
- .min = 8, \
- .max = 8, \
- .increment = 0 \
- }, \
- .iv_size = { \
- .min = 8, \
- .max = 8, \
- .increment = 0 \
- } \
- }, } \
- }, } \
- }, \
- { /* DES DOCSISBPI */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, \
- {.cipher = { \
- .algo = RTE_CRYPTO_CIPHER_DES_DOCSISBPI,\
- .block_size = 8, \
- .key_size = { \
- .min = 8, \
- .max = 8, \
- .increment = 0 \
- }, \
- .iv_size = { \
- .min = 8, \
- .max = 8, \
- .increment = 0 \
- } \
- }, } \
- }, } \
- }
-
-#define QAT_EXTRA_GEN2_SYM_CAPABILITIES \
- { /* ZUC (EEA3) */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, \
- {.cipher = { \
- .algo = RTE_CRYPTO_CIPHER_ZUC_EEA3, \
- .block_size = 16, \
- .key_size = { \
- .min = 16, \
- .max = 16, \
- .increment = 0 \
- }, \
- .iv_size = { \
- .min = 16, \
- .max = 16, \
- .increment = 0 \
- } \
- }, } \
- }, } \
- }, \
- { /* ZUC (EIA3) */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
- {.auth = { \
- .algo = RTE_CRYPTO_AUTH_ZUC_EIA3, \
- .block_size = 16, \
- .key_size = { \
- .min = 16, \
- .max = 16, \
- .increment = 0 \
- }, \
- .digest_size = { \
- .min = 4, \
- .max = 4, \
- .increment = 0 \
- }, \
- .iv_size = { \
- .min = 16, \
- .max = 16, \
- .increment = 0 \
- } \
- }, } \
- }, } \
- }
-
-#define QAT_EXTRA_GEN3_SYM_CAPABILITIES \
- { /* Chacha20-Poly1305 */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_AEAD, \
- {.aead = { \
- .algo = RTE_CRYPTO_AEAD_CHACHA20_POLY1305, \
- .block_size = 64, \
- .key_size = { \
- .min = 32, \
- .max = 32, \
- .increment = 0 \
- }, \
- .digest_size = { \
- .min = 16, \
- .max = 16, \
- .increment = 0 \
- }, \
- .aad_size = { \
- .min = 0, \
- .max = 240, \
- .increment = 1 \
- }, \
- .iv_size = { \
- .min = 12, \
- .max = 12, \
- .increment = 0 \
- }, \
- }, } \
- }, } \
- }
-
-#define QAT_BASE_GEN4_SYM_CAPABILITIES \
- { /* AES CBC */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, \
- {.cipher = { \
- .algo = RTE_CRYPTO_CIPHER_AES_CBC, \
- .block_size = 16, \
- .key_size = { \
- .min = 16, \
- .max = 32, \
- .increment = 8 \
- }, \
- .iv_size = { \
- .min = 16, \
- .max = 16, \
- .increment = 0 \
- } \
- }, } \
- }, } \
- }, \
- { /* SHA1 HMAC */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
- {.auth = { \
- .algo = RTE_CRYPTO_AUTH_SHA1_HMAC, \
- .block_size = 64, \
- .key_size = { \
- .min = 1, \
- .max = 64, \
- .increment = 1 \
- }, \
- .digest_size = { \
- .min = 1, \
- .max = 20, \
- .increment = 1 \
- }, \
- .iv_size = { 0 } \
- }, } \
- }, } \
- }, \
- { /* SHA224 HMAC */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
- {.auth = { \
- .algo = RTE_CRYPTO_AUTH_SHA224_HMAC, \
- .block_size = 64, \
- .key_size = { \
- .min = 1, \
- .max = 64, \
- .increment = 1 \
- }, \
- .digest_size = { \
- .min = 1, \
- .max = 28, \
- .increment = 1 \
- }, \
- .iv_size = { 0 } \
- }, } \
- }, } \
- }, \
- { /* SHA256 HMAC */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
- {.auth = { \
- .algo = RTE_CRYPTO_AUTH_SHA256_HMAC, \
- .block_size = 64, \
- .key_size = { \
- .min = 1, \
- .max = 64, \
- .increment = 1 \
- }, \
- .digest_size = { \
- .min = 1, \
- .max = 32, \
- .increment = 1 \
- }, \
- .iv_size = { 0 } \
- }, } \
- }, } \
- }, \
- { /* SHA384 HMAC */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
- {.auth = { \
- .algo = RTE_CRYPTO_AUTH_SHA384_HMAC, \
- .block_size = 128, \
- .key_size = { \
- .min = 1, \
- .max = 128, \
- .increment = 1 \
- }, \
- .digest_size = { \
- .min = 1, \
- .max = 48, \
- .increment = 1 \
- }, \
- .iv_size = { 0 } \
- }, } \
- }, } \
- }, \
- { /* SHA512 HMAC */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
- {.auth = { \
- .algo = RTE_CRYPTO_AUTH_SHA512_HMAC, \
- .block_size = 128, \
- .key_size = { \
- .min = 1, \
- .max = 128, \
- .increment = 1 \
- }, \
- .digest_size = { \
- .min = 1, \
- .max = 64, \
- .increment = 1 \
- }, \
- .iv_size = { 0 } \
- }, } \
- }, } \
- }, \
- { /* AES XCBC MAC */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
- {.auth = { \
- .algo = RTE_CRYPTO_AUTH_AES_XCBC_MAC, \
- .block_size = 16, \
- .key_size = { \
- .min = 16, \
- .max = 16, \
- .increment = 0 \
- }, \
- .digest_size = { \
- .min = 12, \
- .max = 12, \
- .increment = 0 \
- }, \
- .aad_size = { 0 }, \
- .iv_size = { 0 } \
- }, } \
- }, } \
- }, \
- { /* AES CMAC */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
- {.auth = { \
- .algo = RTE_CRYPTO_AUTH_AES_CMAC, \
- .block_size = 16, \
- .key_size = { \
- .min = 16, \
- .max = 16, \
- .increment = 0 \
- }, \
- .digest_size = { \
- .min = 4, \
- .max = 16, \
- .increment = 4 \
- } \
- }, } \
- }, } \
- }, \
- { /* AES DOCSIS BPI */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, \
- {.cipher = { \
- .algo = RTE_CRYPTO_CIPHER_AES_DOCSISBPI,\
- .block_size = 16, \
- .key_size = { \
- .min = 16, \
- .max = 32, \
- .increment = 16 \
- }, \
- .iv_size = { \
- .min = 16, \
- .max = 16, \
- .increment = 0 \
- } \
- }, } \
- }, } \
- }, \
- { /* NULL (AUTH) */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
- {.auth = { \
- .algo = RTE_CRYPTO_AUTH_NULL, \
- .block_size = 1, \
- .key_size = { \
- .min = 0, \
- .max = 0, \
- .increment = 0 \
- }, \
- .digest_size = { \
- .min = 0, \
- .max = 0, \
- .increment = 0 \
- }, \
- .iv_size = { 0 } \
- }, }, \
- }, }, \
- }, \
- { /* NULL (CIPHER) */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, \
- {.cipher = { \
- .algo = RTE_CRYPTO_CIPHER_NULL, \
- .block_size = 1, \
- .key_size = { \
- .min = 0, \
- .max = 0, \
- .increment = 0 \
- }, \
- .iv_size = { \
- .min = 0, \
- .max = 0, \
- .increment = 0 \
- } \
- }, }, \
- }, } \
- }, \
- { /* SHA1 */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
- {.auth = { \
- .algo = RTE_CRYPTO_AUTH_SHA1, \
- .block_size = 64, \
- .key_size = { \
- .min = 0, \
- .max = 0, \
- .increment = 0 \
- }, \
- .digest_size = { \
- .min = 1, \
- .max = 20, \
- .increment = 1 \
- }, \
- .iv_size = { 0 } \
- }, } \
- }, } \
- }, \
- { /* SHA224 */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
- {.auth = { \
- .algo = RTE_CRYPTO_AUTH_SHA224, \
- .block_size = 64, \
- .key_size = { \
- .min = 0, \
- .max = 0, \
- .increment = 0 \
- }, \
- .digest_size = { \
- .min = 1, \
- .max = 28, \
- .increment = 1 \
- }, \
- .iv_size = { 0 } \
- }, } \
- }, } \
- }, \
- { /* SHA256 */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
- {.auth = { \
- .algo = RTE_CRYPTO_AUTH_SHA256, \
- .block_size = 64, \
- .key_size = { \
- .min = 0, \
- .max = 0, \
- .increment = 0 \
- }, \
- .digest_size = { \
- .min = 1, \
- .max = 32, \
- .increment = 1 \
- }, \
- .iv_size = { 0 } \
- }, } \
- }, } \
- }, \
- { /* SHA384 */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
- {.auth = { \
- .algo = RTE_CRYPTO_AUTH_SHA384, \
- .block_size = 128, \
- .key_size = { \
- .min = 0, \
- .max = 0, \
- .increment = 0 \
- }, \
- .digest_size = { \
- .min = 1, \
- .max = 48, \
- .increment = 1 \
- }, \
- .iv_size = { 0 } \
- }, } \
- }, } \
- }, \
- { /* SHA512 */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
- {.auth = { \
- .algo = RTE_CRYPTO_AUTH_SHA512, \
- .block_size = 128, \
- .key_size = { \
- .min = 0, \
- .max = 0, \
- .increment = 0 \
- }, \
- .digest_size = { \
- .min = 1, \
- .max = 64, \
- .increment = 1 \
- }, \
- .iv_size = { 0 } \
- }, } \
- }, } \
- }, \
- { /* AES CTR */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, \
- {.cipher = { \
- .algo = RTE_CRYPTO_CIPHER_AES_CTR, \
- .block_size = 16, \
- .key_size = { \
- .min = 16, \
- .max = 32, \
- .increment = 8 \
- }, \
- .iv_size = { \
- .min = 16, \
- .max = 16, \
- .increment = 0 \
- } \
- }, } \
- }, } \
- }, \
- { /* AES GCM */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_AEAD, \
- {.aead = { \
- .algo = RTE_CRYPTO_AEAD_AES_GCM, \
- .block_size = 16, \
- .key_size = { \
- .min = 16, \
- .max = 32, \
- .increment = 8 \
- }, \
- .digest_size = { \
- .min = 8, \
- .max = 16, \
- .increment = 4 \
- }, \
- .aad_size = { \
- .min = 0, \
- .max = 240, \
- .increment = 1 \
- }, \
- .iv_size = { \
- .min = 0, \
- .max = 12, \
- .increment = 12 \
- }, \
- }, } \
- }, } \
- }, \
- { /* AES CCM */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_AEAD, \
- {.aead = { \
- .algo = RTE_CRYPTO_AEAD_AES_CCM, \
- .block_size = 16, \
- .key_size = { \
- .min = 16, \
- .max = 16, \
- .increment = 0 \
- }, \
- .digest_size = { \
- .min = 4, \
- .max = 16, \
- .increment = 2 \
- }, \
- .aad_size = { \
- .min = 0, \
- .max = 224, \
- .increment = 1 \
- }, \
- .iv_size = { \
- .min = 7, \
- .max = 13, \
- .increment = 1 \
- }, \
- }, } \
- }, } \
- }, \
- { /* Chacha20-Poly1305 */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_AEAD, \
- {.aead = { \
- .algo = RTE_CRYPTO_AEAD_CHACHA20_POLY1305, \
- .block_size = 64, \
- .key_size = { \
- .min = 32, \
- .max = 32, \
- .increment = 0 \
- }, \
- .digest_size = { \
- .min = 16, \
- .max = 16, \
- .increment = 0 \
- }, \
- .aad_size = { \
- .min = 0, \
- .max = 240, \
- .increment = 1 \
- }, \
- .iv_size = { \
- .min = 12, \
- .max = 12, \
- .increment = 0 \
- }, \
- }, } \
- }, } \
- }, \
- { /* AES GMAC (AUTH) */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
- {.auth = { \
- .algo = RTE_CRYPTO_AUTH_AES_GMAC, \
- .block_size = 16, \
- .key_size = { \
- .min = 16, \
- .max = 32, \
- .increment = 8 \
- }, \
- .digest_size = { \
- .min = 8, \
- .max = 16, \
- .increment = 4 \
- }, \
- .iv_size = { \
- .min = 0, \
- .max = 12, \
- .increment = 12 \
- } \
- }, } \
- }, } \
- } \
-
-
-
-#ifdef RTE_LIB_SECURITY
-#define QAT_SECURITY_SYM_CAPABILITIES \
- { /* AES DOCSIS BPI */ \
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
- {.sym = { \
- .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, \
- {.cipher = { \
- .algo = RTE_CRYPTO_CIPHER_AES_DOCSISBPI,\
- .block_size = 16, \
- .key_size = { \
- .min = 16, \
- .max = 32, \
- .increment = 16 \
- }, \
- .iv_size = { \
- .min = 16, \
- .max = 16, \
- .increment = 0 \
- } \
- }, } \
- }, } \
- }
-
-#define QAT_SECURITY_CAPABILITIES(sym) \
- [0] = { /* DOCSIS Uplink */ \
- .action = RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL, \
- .protocol = RTE_SECURITY_PROTOCOL_DOCSIS, \
- .docsis = { \
- .direction = RTE_SECURITY_DOCSIS_UPLINK \
- }, \
- .crypto_capabilities = (sym) \
- }, \
- [1] = { /* DOCSIS Downlink */ \
- .action = RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL, \
- .protocol = RTE_SECURITY_PROTOCOL_DOCSIS, \
- .docsis = { \
- .direction = RTE_SECURITY_DOCSIS_DOWNLINK \
- }, \
- .crypto_capabilities = (sym) \
- }
-#endif
-
-#endif /* _QAT_SYM_CAPABILITIES_H_ */
uint8_t qat_sym_driver_id;
-static const struct rte_cryptodev_capabilities qat_gen1_sym_capabilities[] = {
- QAT_BASE_GEN1_SYM_CAPABILITIES,
- RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST()
-};
-
-static const struct rte_cryptodev_capabilities qat_gen2_sym_capabilities[] = {
- QAT_BASE_GEN1_SYM_CAPABILITIES,
- QAT_EXTRA_GEN2_SYM_CAPABILITIES,
- RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST()
-};
-
-static const struct rte_cryptodev_capabilities qat_gen3_sym_capabilities[] = {
- QAT_BASE_GEN1_SYM_CAPABILITIES,
- QAT_EXTRA_GEN2_SYM_CAPABILITIES,
- QAT_EXTRA_GEN3_SYM_CAPABILITIES,
- RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST()
-};
-
-static const struct rte_cryptodev_capabilities qat_gen4_sym_capabilities[] = {
- QAT_BASE_GEN4_SYM_CAPABILITIES,
- RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST()
-};
-
-#ifdef RTE_LIB_SECURITY
-static const struct rte_cryptodev_capabilities
- qat_security_sym_capabilities[] = {
- QAT_SECURITY_SYM_CAPABILITIES,
- RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST()
-};
-
-static const struct rte_security_capability qat_security_capabilities[] = {
- QAT_SECURITY_CAPABILITIES(qat_security_sym_capabilities),
- {
- .action = RTE_SECURITY_ACTION_TYPE_NONE
- }
-};
-#endif
-
-static struct rte_cryptodev_ops crypto_qat_ops = {
-
- /* Device related operations */
- .dev_configure = qat_cryptodev_config,
- .dev_start = qat_cryptodev_start,
- .dev_stop = qat_cryptodev_stop,
- .dev_close = qat_cryptodev_close,
- .dev_infos_get = qat_cryptodev_info_get,
-
- .stats_get = qat_cryptodev_stats_get,
- .stats_reset = qat_cryptodev_stats_reset,
- .queue_pair_setup = qat_cryptodev_qp_setup,
- .queue_pair_release = qat_cryptodev_qp_release,
-
- /* Crypto related operations */
- .sym_session_get_size = qat_sym_session_get_private_size,
- .sym_session_configure = qat_sym_session_configure,
- .sym_session_clear = qat_sym_session_clear,
-
- /* Raw data-path API related operations */
- .sym_get_raw_dp_ctx_size = qat_sym_get_dp_ctx_size,
- .sym_configure_raw_dp_ctx = qat_sym_configure_dp_ctx,
-};
-
-#ifdef RTE_LIB_SECURITY
-static const struct rte_security_capability *
-qat_security_cap_get(void *device __rte_unused)
-{
- return qat_security_capabilities;
-}
-
-static struct rte_security_ops security_qat_ops = {
-
- .session_create = qat_security_session_create,
- .session_update = NULL,
- .session_stats_get = NULL,
- .session_destroy = qat_security_session_destroy,
- .set_pkt_metadata = NULL,
- .capabilities_get = qat_security_cap_get
-};
-#endif
+struct qat_crypto_gen_dev_ops qat_sym_gen_dev_ops[QAT_N_GENS];
void
qat_sym_init_op_cookie(void *op_cookie)
int i = 0, ret = 0;
struct qat_device_info *qat_dev_instance =
&qat_pci_devs[qat_pci_dev->qat_dev_id];
-
struct rte_cryptodev_pmd_init_params init_params = {
.name = "",
.socket_id = qat_dev_instance->pci_dev->device.numa_node,
char capa_memz_name[RTE_CRYPTODEV_NAME_MAX_LEN];
struct rte_cryptodev *cryptodev;
struct qat_cryptodev_private *internals;
+ struct qat_capabilities_info capa_info;
const struct rte_cryptodev_capabilities *capabilities;
+ const struct qat_crypto_gen_dev_ops *gen_dev_ops =
+ &qat_sym_gen_dev_ops[qat_pci_dev->qat_dev_gen];
uint64_t capa_size;
snprintf(name, RTE_CRYPTODEV_NAME_MAX_LEN, "%s_%s",
qat_pci_dev->name, "sym");
QAT_LOG(DEBUG, "Creating QAT SYM device %s", name);
+ if (gen_dev_ops->cryptodev_ops == NULL) {
+ QAT_LOG(ERR, "Device %s does not support symmetric crypto",
+ name);
+ return -EFAULT;
+ }
+
/*
* All processes must use same driver id so they can share sessions.
* Store driver_id so we can validate that all processes have the same
qat_dev_instance->sym_rte_dev.name = cryptodev->data->name;
cryptodev->driver_id = qat_sym_driver_id;
- cryptodev->dev_ops = &crypto_qat_ops;
+ cryptodev->dev_ops = gen_dev_ops->cryptodev_ops;
cryptodev->enqueue_burst = qat_sym_pmd_enqueue_op_burst;
cryptodev->dequeue_burst = qat_sym_pmd_dequeue_op_burst;
- cryptodev->feature_flags = RTE_CRYPTODEV_FF_SYMMETRIC_CRYPTO |
- RTE_CRYPTODEV_FF_HW_ACCELERATED |
- RTE_CRYPTODEV_FF_SYM_OPERATION_CHAINING |
- RTE_CRYPTODEV_FF_IN_PLACE_SGL |
- RTE_CRYPTODEV_FF_OOP_SGL_IN_SGL_OUT |
- RTE_CRYPTODEV_FF_OOP_SGL_IN_LB_OUT |
- RTE_CRYPTODEV_FF_OOP_LB_IN_SGL_OUT |
- RTE_CRYPTODEV_FF_OOP_LB_IN_LB_OUT |
- RTE_CRYPTODEV_FF_DIGEST_ENCRYPTED;
-
- if (qat_pci_dev->qat_dev_gen < QAT_GEN4)
- cryptodev->feature_flags |= RTE_CRYPTODEV_FF_SYM_RAW_DP;
+ cryptodev->feature_flags = gen_dev_ops->get_feature_flags(qat_pci_dev);
if (rte_eal_process_type() != RTE_PROC_PRIMARY)
return 0;
- snprintf(capa_memz_name, RTE_CRYPTODEV_NAME_MAX_LEN,
- "QAT_SYM_CAPA_GEN_%d",
- qat_pci_dev->qat_dev_gen);
-
#ifdef RTE_LIB_SECURITY
- struct rte_security_ctx *security_instance;
- security_instance = rte_malloc("qat_sec",
- sizeof(struct rte_security_ctx),
- RTE_CACHE_LINE_SIZE);
- if (security_instance == NULL) {
- QAT_LOG(ERR, "rte_security_ctx memory alloc failed");
- ret = -ENOMEM;
- goto error;
- }
+ if (gen_dev_ops->create_security_ctx) {
+ cryptodev->security_ctx =
+ gen_dev_ops->create_security_ctx((void *)cryptodev);
+ if (cryptodev->security_ctx == NULL) {
+ QAT_LOG(ERR, "rte_security_ctx memory alloc failed");
+ ret = -ENOMEM;
+ goto error;
+ }
+
+ cryptodev->feature_flags |= RTE_CRYPTODEV_FF_SECURITY;
+ QAT_LOG(INFO, "Device %s rte_security support enabled", name);
+ } else
+ QAT_LOG(INFO, "Device %s rte_security support disabled", name);
- security_instance->device = (void *)cryptodev;
- security_instance->ops = &security_qat_ops;
- security_instance->sess_cnt = 0;
- cryptodev->security_ctx = security_instance;
- cryptodev->feature_flags |= RTE_CRYPTODEV_FF_SECURITY;
#endif
+ snprintf(capa_memz_name, RTE_CRYPTODEV_NAME_MAX_LEN,
+ "QAT_SYM_CAPA_GEN_%d",
+ qat_pci_dev->qat_dev_gen);
internals = cryptodev->data->dev_private;
internals->qat_dev = qat_pci_dev;
internals->service_type = QAT_SERVICE_SYMMETRIC;
-
internals->dev_id = cryptodev->data->dev_id;
- switch (qat_pci_dev->qat_dev_gen) {
- case QAT_GEN1:
- capabilities = qat_gen1_sym_capabilities;
- capa_size = sizeof(qat_gen1_sym_capabilities);
- break;
- case QAT_GEN2:
- capabilities = qat_gen2_sym_capabilities;
- capa_size = sizeof(qat_gen2_sym_capabilities);
- break;
- case QAT_GEN3:
- capabilities = qat_gen3_sym_capabilities;
- capa_size = sizeof(qat_gen3_sym_capabilities);
- break;
- case QAT_GEN4:
- capabilities = qat_gen4_sym_capabilities;
- capa_size = sizeof(qat_gen4_sym_capabilities);
- break;
- default:
- QAT_LOG(DEBUG,
- "QAT gen %d capabilities unknown",
- qat_pci_dev->qat_dev_gen);
- ret = -(EINVAL);
- goto error;
- }
+
+ capa_info = gen_dev_ops->get_capabilities(qat_pci_dev);
+ capabilities = capa_info.data;
+ capa_size = capa_info.size;
internals->capa_mz = rte_memzone_lookup(capa_memz_name);
if (internals->capa_mz == NULL) {
internals->capa_mz = rte_memzone_reserve(capa_memz_name,
- capa_size,
- rte_socket_id(), 0);
- }
- if (internals->capa_mz == NULL) {
- QAT_LOG(DEBUG,
- "Error allocating memzone for capabilities, destroying "
- "PMD for %s",
- name);
- ret = -EFAULT;
- goto error;
+ capa_size, rte_socket_id(), 0);
+ if (internals->capa_mz == NULL) {
+ QAT_LOG(DEBUG,
+ "Error allocating capability memzon for %s",
+ name);
+ ret = -EFAULT;
+ goto error;
+ }
}
memcpy(internals->capa_mz->addr, capabilities, capa_size);
#include <rte_security.h>
#endif
-#include "qat_sym_capabilities.h"
#include "qat_crypto.h"
#include "qat_device.h"
#define QAT_SYM_CAP_MIXED_CRYPTO (1 << 0)
#define QAT_SYM_CAP_VALID (1 << 31)
+/**
+ * Macro to add a sym capability
+ * helper function to add an sym capability
+ * <n: name> <b: block size> <k: key size> <d: digest size>
+ * <a: aad_size> <i: iv_size>
+ **/
+#define QAT_SYM_PLAIN_AUTH_CAP(n, b, d) \
+ { \
+ .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
+ {.sym = { \
+ .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
+ {.auth = { \
+ .algo = RTE_CRYPTO_AUTH_##n, \
+ b, d \
+ }, } \
+ }, } \
+ }
+
+#define QAT_SYM_AUTH_CAP(n, b, k, d, a, i) \
+ { \
+ .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
+ {.sym = { \
+ .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
+ {.auth = { \
+ .algo = RTE_CRYPTO_AUTH_##n, \
+ b, k, d, a, i \
+ }, } \
+ }, } \
+ }
+
+#define QAT_SYM_AEAD_CAP(n, b, k, d, a, i) \
+ { \
+ .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
+ {.sym = { \
+ .xform_type = RTE_CRYPTO_SYM_XFORM_AEAD, \
+ {.aead = { \
+ .algo = RTE_CRYPTO_AEAD_##n, \
+ b, k, d, a, i \
+ }, } \
+ }, } \
+ }
+
+#define QAT_SYM_CIPHER_CAP(n, b, k, i) \
+ { \
+ .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
+ {.sym = { \
+ .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, \
+ {.cipher = { \
+ .algo = RTE_CRYPTO_CIPHER_##n, \
+ b, k, i \
+ }, } \
+ }, } \
+ }
+
extern uint8_t qat_sym_driver_id;
+extern struct qat_crypto_gen_dev_ops qat_sym_gen_dev_ops[];
+
int
qat_sym_dev_create(struct qat_pci_device *qat_pci_dev,
struct qat_dev_cmd_param *qat_dev_cmd_param);