CCP PMD supports authentication offload to either of CCP or CPU.
The earlier version of patch provides this option as compile time.
This patch changes this option from compile time to run time.
User can pass "ccp_auth_opt=1" as an additional arguments to vdev
parameter to enable authentication operations on CPU.
Signed-off-by: Ravi Kumar <ravi1.kumar@amd.com>
# Compile PMD for AMD CCP crypto device
#
CONFIG_RTE_LIBRTE_PMD_CCP=n
-CONFIG_RTE_LIBRTE_PMD_CCP_CPU_AUTH=n
#
# Compile PMD for Marvell Crypto device
Installation
------------
-To compile CCP PMD, it has to be enabled in the config/common_base file.
+To compile ccp PMD, it has to be enabled in the config/common_base file and openssl
+packages have to be installed in the build environment.
+
* ``CONFIG_RTE_LIBRTE_PMD_CCP=y``
-The CCP PMD also supports computing authentication over CPU with cipher offloaded
-to CCP. To enable this feature, enable following in the configuration.
-* ``CONFIG_RTE_LIBRTE_PMD_CCP_CPU_AUTH=y``
+For Ubuntu 16.04 LTS use below to install openssl in the build system:
+
+.. code-block:: console
+
+ sudo apt-get install openssl
This code was verified on Ubuntu 16.04.
echo "1022 1456" > /sys/bus/pci/drivers/igb_uio/new_id
Another way to bind the CCP devices to DPDK UIO driver is by using the ``dpdk-devbind.py`` script.
-The following command assumes ``BFD`` of ``0000:09:00.2``::
+The following command assumes ``BFD`` as ``0000:09:00.2``::
cd to the top-level DPDK directory
./usertools/dpdk-devbind.py -b igb_uio 0000:09:00.2
-To verify real traffic l2fwd-crypto example can be used with following command:
+In order to enable the ccp crypto PMD, user must set CONFIG_RTE_LIBRTE_PMD_CCP=y in config/common_base.
+
+To use the PMD in an application, user must:
+
+* Call rte_vdev_init("crypto_ccp") within the application.
+
+* Use --vdev="crypto_ccp" in the EAL options, which will call rte_vdev_init() internally.
+
+The following parameters (all optional) can be provided in the previous two calls:
+
+* socket_id: Specify the socket where the memory for the device is going to be allocated.
+ (by default, socket_id will be the socket where the core that is creating the PMD is running on).
+
+* max_nb_queue_pairs: Specify the maximum number of queue pairs in the device.
+
+* max_nb_sessions: Specify the maximum number of sessions that can be created (2048 by default).
+
+* ccp_auth_opt: Specify authentication operations to perform on CPU using openssl APIs.
+
+To validate ccp pmd, l2fwd-crypto example can be used with following command:
.. code-block:: console
:11:11:11:11:11:11:11:11:11:11:11:11:11:11:11:11:11:11:11:11:11:11:11:11:11
:11:11:11:11:11:11:11:11:11:11:11:11:11:11:11:11:11
+The CCP PMD also supports computing authentication over CPU with cipher offloaded to CCP.
+To enable this feature, pass an additional argument as ccp_auth_opt=1 to --vdev parameters as
+following:
+
+.. code-block:: console
+
+ sudo ./build/l2fwd-crypto -l 1 -n 4 --vdev "crypto_ccp,ccp_auth_opt=1" -- -p 0x1
+ --chain CIPHER_HASH --cipher_op ENCRYPT --cipher_algo AES_CBC
+ --cipher_key 00:01:02:03:04:05:06:07:08:09:0a:0b:0c:0d:0e:0f
+ --iv 00:01:02:03:04:05:06:07:08:09:0a:0b:0c:0d:0e:ff
+ --auth_op GENERATE --auth_algo SHA1_HMAC
+ --auth_key 11:11:11:11:11:11:11:11:11:11:11:11:11:11:11:11:11:11:11:11:11:11
+ :11:11:11:11:11:11:11:11:11:11:11:11:11:11:11:11:11:11:11:11:11:11:11:11:11
+ :11:11:11:11:11:11:11:11:11:11:11:11:11:11:11:11:11
+
Limitations
-----------
-* Chained mbufs are not supported
-* MD5_HMAC is supported only if ``CONFIG_RTE_LIBRTE_PMD_CCP_CPU_AUTH=y`` is enabled in configuration
+* Chained mbufs are not supported.
+* MD5_HMAC is supported only for CPU based authentication.
LDLIBS += -lrte_cryptodev
LDLIBS += -lrte_pci -lrte_bus_pci
LDLIBS += -lrte_bus_vdev
+LDLIBS += -lrte_kvargs
# versioning export map
EXPORT_MAP := rte_pmd_ccp_version.map
#include "ccp_pci.h"
#include "ccp_pmd_private.h"
-#ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
#include <openssl/conf.h>
#include <openssl/err.h>
#include <openssl/hmac.h>
-#endif
/* SHA initial context values */
static uint32_t ccp_sha1_init[SHA_COMMON_DIGEST_SIZE / sizeof(uint32_t)] = {
else
sess->auth.op = CCP_AUTH_OP_VERIFY;
switch (auth_xform->algo) {
-#ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
case RTE_CRYPTO_AUTH_MD5_HMAC:
- sess->auth.algo = CCP_AUTH_ALGO_MD5_HMAC;
- sess->auth.offset = (CCP_SB_BYTES << 1) - MD5_DIGEST_SIZE;
- sess->auth.key_length = auth_xform->key.length;
- sess->auth.block_size = MD5_BLOCK_SIZE;
- memset(sess->auth.key, 0, sess->auth.block_size);
- rte_memcpy(sess->auth.key, auth_xform->key.data,
- auth_xform->key.length);
+ if (sess->auth_opt) {
+ sess->auth.algo = CCP_AUTH_ALGO_MD5_HMAC;
+ sess->auth.offset = ((CCP_SB_BYTES << 1) -
+ MD5_DIGEST_SIZE);
+ sess->auth.key_length = auth_xform->key.length;
+ sess->auth.block_size = MD5_BLOCK_SIZE;
+ memset(sess->auth.key, 0, sess->auth.block_size);
+ rte_memcpy(sess->auth.key, auth_xform->key.data,
+ auth_xform->key.length);
+ } else
+ return -1; /* HMAC MD5 not supported on CCP */
break;
-#endif
case RTE_CRYPTO_AUTH_SHA1:
sess->auth.engine = CCP_ENGINE_SHA;
sess->auth.algo = CCP_AUTH_ALGO_SHA1;
sess->auth.offset = CCP_SB_BYTES - SHA1_DIGEST_SIZE;
break;
case RTE_CRYPTO_AUTH_SHA1_HMAC:
-#ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
- if (auth_xform->key.length > SHA1_BLOCK_SIZE)
- return -1;
- sess->auth.algo = CCP_AUTH_ALGO_SHA1_HMAC;
- sess->auth.offset = CCP_SB_BYTES - SHA1_DIGEST_SIZE;
- sess->auth.block_size = SHA1_BLOCK_SIZE;
- sess->auth.key_length = auth_xform->key.length;
- memset(sess->auth.key, 0, sess->auth.block_size);
- rte_memcpy(sess->auth.key, auth_xform->key.data,
- auth_xform->key.length);
-#else
- if (auth_xform->key.length > SHA1_BLOCK_SIZE)
- return -1;
- sess->auth.engine = CCP_ENGINE_SHA;
- sess->auth.algo = CCP_AUTH_ALGO_SHA1_HMAC;
- sess->auth.ut.sha_type = CCP_SHA_TYPE_1;
- sess->auth.ctx_len = CCP_SB_BYTES;
- sess->auth.offset = CCP_SB_BYTES - SHA1_DIGEST_SIZE;
- sess->auth.block_size = SHA1_BLOCK_SIZE;
- sess->auth.key_length = auth_xform->key.length;
- memset(sess->auth.key, 0, sess->auth.block_size);
- memset(sess->auth.pre_compute, 0, sess->auth.ctx_len << 1);
- rte_memcpy(sess->auth.key, auth_xform->key.data,
- auth_xform->key.length);
- if (generate_partial_hash(sess))
- return -1;
-#endif
+ if (sess->auth_opt) {
+ if (auth_xform->key.length > SHA1_BLOCK_SIZE)
+ return -1;
+ sess->auth.algo = CCP_AUTH_ALGO_SHA1_HMAC;
+ sess->auth.offset = CCP_SB_BYTES - SHA1_DIGEST_SIZE;
+ sess->auth.block_size = SHA1_BLOCK_SIZE;
+ sess->auth.key_length = auth_xform->key.length;
+ memset(sess->auth.key, 0, sess->auth.block_size);
+ rte_memcpy(sess->auth.key, auth_xform->key.data,
+ auth_xform->key.length);
+ } else {
+ if (auth_xform->key.length > SHA1_BLOCK_SIZE)
+ return -1;
+ sess->auth.engine = CCP_ENGINE_SHA;
+ sess->auth.algo = CCP_AUTH_ALGO_SHA1_HMAC;
+ sess->auth.ut.sha_type = CCP_SHA_TYPE_1;
+ sess->auth.ctx_len = CCP_SB_BYTES;
+ sess->auth.offset = CCP_SB_BYTES - SHA1_DIGEST_SIZE;
+ sess->auth.block_size = SHA1_BLOCK_SIZE;
+ sess->auth.key_length = auth_xform->key.length;
+ memset(sess->auth.key, 0, sess->auth.block_size);
+ memset(sess->auth.pre_compute, 0,
+ sess->auth.ctx_len << 1);
+ rte_memcpy(sess->auth.key, auth_xform->key.data,
+ auth_xform->key.length);
+ if (generate_partial_hash(sess))
+ return -1;
+ }
break;
case RTE_CRYPTO_AUTH_SHA224:
sess->auth.algo = CCP_AUTH_ALGO_SHA224;
sess->auth.offset = CCP_SB_BYTES - SHA224_DIGEST_SIZE;
break;
case RTE_CRYPTO_AUTH_SHA224_HMAC:
-#ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
- if (auth_xform->key.length > SHA224_BLOCK_SIZE)
- return -1;
- sess->auth.algo = CCP_AUTH_ALGO_SHA224_HMAC;
- sess->auth.offset = CCP_SB_BYTES - SHA224_DIGEST_SIZE;
- sess->auth.block_size = SHA224_BLOCK_SIZE;
- sess->auth.key_length = auth_xform->key.length;
- memset(sess->auth.key, 0, sess->auth.block_size);
- rte_memcpy(sess->auth.key, auth_xform->key.data,
- auth_xform->key.length);
-#else
- if (auth_xform->key.length > SHA224_BLOCK_SIZE)
- return -1;
- sess->auth.algo = CCP_AUTH_ALGO_SHA224_HMAC;
- sess->auth.engine = CCP_ENGINE_SHA;
- sess->auth.ut.sha_type = CCP_SHA_TYPE_224;
- sess->auth.ctx_len = CCP_SB_BYTES;
- sess->auth.offset = CCP_SB_BYTES - SHA224_DIGEST_SIZE;
- sess->auth.block_size = SHA224_BLOCK_SIZE;
- sess->auth.key_length = auth_xform->key.length;
- memset(sess->auth.key, 0, sess->auth.block_size);
- memset(sess->auth.pre_compute, 0, sess->auth.ctx_len << 1);
- rte_memcpy(sess->auth.key, auth_xform->key.data,
- auth_xform->key.length);
- if (generate_partial_hash(sess))
- return -1;
-#endif
+ if (sess->auth_opt) {
+ if (auth_xform->key.length > SHA224_BLOCK_SIZE)
+ return -1;
+ sess->auth.algo = CCP_AUTH_ALGO_SHA224_HMAC;
+ sess->auth.offset = CCP_SB_BYTES - SHA224_DIGEST_SIZE;
+ sess->auth.block_size = SHA224_BLOCK_SIZE;
+ sess->auth.key_length = auth_xform->key.length;
+ memset(sess->auth.key, 0, sess->auth.block_size);
+ rte_memcpy(sess->auth.key, auth_xform->key.data,
+ auth_xform->key.length);
+ } else {
+ if (auth_xform->key.length > SHA224_BLOCK_SIZE)
+ return -1;
+ sess->auth.algo = CCP_AUTH_ALGO_SHA224_HMAC;
+ sess->auth.engine = CCP_ENGINE_SHA;
+ sess->auth.ut.sha_type = CCP_SHA_TYPE_224;
+ sess->auth.ctx_len = CCP_SB_BYTES;
+ sess->auth.offset = CCP_SB_BYTES - SHA224_DIGEST_SIZE;
+ sess->auth.block_size = SHA224_BLOCK_SIZE;
+ sess->auth.key_length = auth_xform->key.length;
+ memset(sess->auth.key, 0, sess->auth.block_size);
+ memset(sess->auth.pre_compute, 0,
+ sess->auth.ctx_len << 1);
+ rte_memcpy(sess->auth.key, auth_xform->key.data,
+ auth_xform->key.length);
+ if (generate_partial_hash(sess))
+ return -1;
+ }
break;
case RTE_CRYPTO_AUTH_SHA3_224:
sess->auth.algo = CCP_AUTH_ALGO_SHA3_224;
sess->auth.offset = CCP_SB_BYTES - SHA256_DIGEST_SIZE;
break;
case RTE_CRYPTO_AUTH_SHA256_HMAC:
-#ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
- if (auth_xform->key.length > SHA256_BLOCK_SIZE)
- return -1;
- sess->auth.algo = CCP_AUTH_ALGO_SHA256_HMAC;
- sess->auth.offset = CCP_SB_BYTES - SHA256_DIGEST_SIZE;
- sess->auth.block_size = SHA256_BLOCK_SIZE;
- sess->auth.key_length = auth_xform->key.length;
- memset(sess->auth.key, 0, sess->auth.block_size);
- rte_memcpy(sess->auth.key, auth_xform->key.data,
- auth_xform->key.length);
-#else
- if (auth_xform->key.length > SHA256_BLOCK_SIZE)
- return -1;
- sess->auth.algo = CCP_AUTH_ALGO_SHA256_HMAC;
- sess->auth.engine = CCP_ENGINE_SHA;
- sess->auth.ut.sha_type = CCP_SHA_TYPE_256;
- sess->auth.ctx_len = CCP_SB_BYTES;
- sess->auth.offset = CCP_SB_BYTES - SHA256_DIGEST_SIZE;
- sess->auth.block_size = SHA256_BLOCK_SIZE;
- sess->auth.key_length = auth_xform->key.length;
- memset(sess->auth.key, 0, sess->auth.block_size);
- memset(sess->auth.pre_compute, 0, sess->auth.ctx_len << 1);
- rte_memcpy(sess->auth.key, auth_xform->key.data,
- auth_xform->key.length);
- if (generate_partial_hash(sess))
- return -1;
-#endif
+ if (sess->auth_opt) {
+ if (auth_xform->key.length > SHA256_BLOCK_SIZE)
+ return -1;
+ sess->auth.algo = CCP_AUTH_ALGO_SHA256_HMAC;
+ sess->auth.offset = CCP_SB_BYTES - SHA256_DIGEST_SIZE;
+ sess->auth.block_size = SHA256_BLOCK_SIZE;
+ sess->auth.key_length = auth_xform->key.length;
+ memset(sess->auth.key, 0, sess->auth.block_size);
+ rte_memcpy(sess->auth.key, auth_xform->key.data,
+ auth_xform->key.length);
+ } else {
+ if (auth_xform->key.length > SHA256_BLOCK_SIZE)
+ return -1;
+ sess->auth.algo = CCP_AUTH_ALGO_SHA256_HMAC;
+ sess->auth.engine = CCP_ENGINE_SHA;
+ sess->auth.ut.sha_type = CCP_SHA_TYPE_256;
+ sess->auth.ctx_len = CCP_SB_BYTES;
+ sess->auth.offset = CCP_SB_BYTES - SHA256_DIGEST_SIZE;
+ sess->auth.block_size = SHA256_BLOCK_SIZE;
+ sess->auth.key_length = auth_xform->key.length;
+ memset(sess->auth.key, 0, sess->auth.block_size);
+ memset(sess->auth.pre_compute, 0,
+ sess->auth.ctx_len << 1);
+ rte_memcpy(sess->auth.key, auth_xform->key.data,
+ auth_xform->key.length);
+ if (generate_partial_hash(sess))
+ return -1;
+ }
break;
case RTE_CRYPTO_AUTH_SHA3_256:
sess->auth.algo = CCP_AUTH_ALGO_SHA3_256;
sess->auth.offset = (CCP_SB_BYTES << 1) - SHA384_DIGEST_SIZE;
break;
case RTE_CRYPTO_AUTH_SHA384_HMAC:
-#ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
- if (auth_xform->key.length > SHA384_BLOCK_SIZE)
- return -1;
- sess->auth.algo = CCP_AUTH_ALGO_SHA384_HMAC;
- sess->auth.offset = (CCP_SB_BYTES << 1) - SHA384_DIGEST_SIZE;
- sess->auth.block_size = SHA384_BLOCK_SIZE;
- sess->auth.key_length = auth_xform->key.length;
- memset(sess->auth.key, 0, sess->auth.block_size);
- rte_memcpy(sess->auth.key, auth_xform->key.data,
- auth_xform->key.length);
-#else
- if (auth_xform->key.length > SHA384_BLOCK_SIZE)
- return -1;
- sess->auth.algo = CCP_AUTH_ALGO_SHA384_HMAC;
- sess->auth.engine = CCP_ENGINE_SHA;
- sess->auth.ut.sha_type = CCP_SHA_TYPE_384;
- sess->auth.ctx_len = CCP_SB_BYTES << 1;
- sess->auth.offset = (CCP_SB_BYTES << 1) - SHA384_DIGEST_SIZE;
- sess->auth.block_size = SHA384_BLOCK_SIZE;
- sess->auth.key_length = auth_xform->key.length;
- memset(sess->auth.key, 0, sess->auth.block_size);
- memset(sess->auth.pre_compute, 0, sess->auth.ctx_len << 1);
- rte_memcpy(sess->auth.key, auth_xform->key.data,
- auth_xform->key.length);
- if (generate_partial_hash(sess))
- return -1;
-#endif
+ if (sess->auth_opt) {
+ if (auth_xform->key.length > SHA384_BLOCK_SIZE)
+ return -1;
+ sess->auth.algo = CCP_AUTH_ALGO_SHA384_HMAC;
+ sess->auth.offset = ((CCP_SB_BYTES << 1) -
+ SHA384_DIGEST_SIZE);
+ sess->auth.block_size = SHA384_BLOCK_SIZE;
+ sess->auth.key_length = auth_xform->key.length;
+ memset(sess->auth.key, 0, sess->auth.block_size);
+ rte_memcpy(sess->auth.key, auth_xform->key.data,
+ auth_xform->key.length);
+ } else {
+ if (auth_xform->key.length > SHA384_BLOCK_SIZE)
+ return -1;
+ sess->auth.algo = CCP_AUTH_ALGO_SHA384_HMAC;
+ sess->auth.engine = CCP_ENGINE_SHA;
+ sess->auth.ut.sha_type = CCP_SHA_TYPE_384;
+ sess->auth.ctx_len = CCP_SB_BYTES << 1;
+ sess->auth.offset = ((CCP_SB_BYTES << 1) -
+ SHA384_DIGEST_SIZE);
+ sess->auth.block_size = SHA384_BLOCK_SIZE;
+ sess->auth.key_length = auth_xform->key.length;
+ memset(sess->auth.key, 0, sess->auth.block_size);
+ memset(sess->auth.pre_compute, 0,
+ sess->auth.ctx_len << 1);
+ rte_memcpy(sess->auth.key, auth_xform->key.data,
+ auth_xform->key.length);
+ if (generate_partial_hash(sess))
+ return -1;
+ }
break;
case RTE_CRYPTO_AUTH_SHA3_384:
sess->auth.algo = CCP_AUTH_ALGO_SHA3_384;
sess->auth.offset = (CCP_SB_BYTES << 1) - SHA512_DIGEST_SIZE;
break;
case RTE_CRYPTO_AUTH_SHA512_HMAC:
-#ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
- if (auth_xform->key.length > SHA512_BLOCK_SIZE)
- return -1;
- sess->auth.algo = CCP_AUTH_ALGO_SHA512_HMAC;
- sess->auth.offset = (CCP_SB_BYTES << 1) - SHA512_DIGEST_SIZE;
- sess->auth.block_size = SHA512_BLOCK_SIZE;
- sess->auth.key_length = auth_xform->key.length;
- memset(sess->auth.key, 0, sess->auth.block_size);
- rte_memcpy(sess->auth.key, auth_xform->key.data,
- auth_xform->key.length);
-#else
- if (auth_xform->key.length > SHA512_BLOCK_SIZE)
- return -1;
- sess->auth.algo = CCP_AUTH_ALGO_SHA512_HMAC;
- sess->auth.engine = CCP_ENGINE_SHA;
- sess->auth.ut.sha_type = CCP_SHA_TYPE_512;
- sess->auth.ctx_len = CCP_SB_BYTES << 1;
- sess->auth.offset = (CCP_SB_BYTES << 1) - SHA512_DIGEST_SIZE;
- sess->auth.block_size = SHA512_BLOCK_SIZE;
- sess->auth.key_length = auth_xform->key.length;
- memset(sess->auth.key, 0, sess->auth.block_size);
- memset(sess->auth.pre_compute, 0, sess->auth.ctx_len << 1);
- rte_memcpy(sess->auth.key, auth_xform->key.data,
- auth_xform->key.length);
- if (generate_partial_hash(sess))
- return -1;
-#endif
+ if (sess->auth_opt) {
+ if (auth_xform->key.length > SHA512_BLOCK_SIZE)
+ return -1;
+ sess->auth.algo = CCP_AUTH_ALGO_SHA512_HMAC;
+ sess->auth.offset = ((CCP_SB_BYTES << 1) -
+ SHA512_DIGEST_SIZE);
+ sess->auth.block_size = SHA512_BLOCK_SIZE;
+ sess->auth.key_length = auth_xform->key.length;
+ memset(sess->auth.key, 0, sess->auth.block_size);
+ rte_memcpy(sess->auth.key, auth_xform->key.data,
+ auth_xform->key.length);
+ } else {
+ if (auth_xform->key.length > SHA512_BLOCK_SIZE)
+ return -1;
+ sess->auth.algo = CCP_AUTH_ALGO_SHA512_HMAC;
+ sess->auth.engine = CCP_ENGINE_SHA;
+ sess->auth.ut.sha_type = CCP_SHA_TYPE_512;
+ sess->auth.ctx_len = CCP_SB_BYTES << 1;
+ sess->auth.offset = ((CCP_SB_BYTES << 1) -
+ SHA512_DIGEST_SIZE);
+ sess->auth.block_size = SHA512_BLOCK_SIZE;
+ sess->auth.key_length = auth_xform->key.length;
+ memset(sess->auth.key, 0, sess->auth.block_size);
+ memset(sess->auth.pre_compute, 0,
+ sess->auth.ctx_len << 1);
+ rte_memcpy(sess->auth.key, auth_xform->key.data,
+ auth_xform->key.length);
+ if (generate_partial_hash(sess))
+ return -1;
+ }
break;
case RTE_CRYPTO_AUTH_SHA3_512:
sess->auth.algo = CCP_AUTH_ALGO_SHA3_512;
int
ccp_set_session_parameters(struct ccp_session *sess,
- const struct rte_crypto_sym_xform *xform)
+ const struct rte_crypto_sym_xform *xform,
+ struct ccp_private *internals)
{
const struct rte_crypto_sym_xform *cipher_xform = NULL;
const struct rte_crypto_sym_xform *auth_xform = NULL;
const struct rte_crypto_sym_xform *aead_xform = NULL;
int ret = 0;
+ sess->auth_opt = internals->auth_opt;
sess->cmd_id = ccp_get_cmd_id(xform);
switch (sess->cmd_id) {
count = 3;
/**< op + lsb passthrough cpy to/from*/
break;
-#ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
case CCP_AUTH_ALGO_MD5_HMAC:
break;
-#endif
case CCP_AUTH_ALGO_SHA1_HMAC:
case CCP_AUTH_ALGO_SHA224_HMAC:
case CCP_AUTH_ALGO_SHA256_HMAC:
-#ifndef RTE_LIBRTE_PMD_CCP_CPU_AUTH
- count = 6;
-#endif
+ if (session->auth_opt == 0)
+ count = 6;
break;
case CCP_AUTH_ALGO_SHA384_HMAC:
case CCP_AUTH_ALGO_SHA512_HMAC:
-#ifndef RTE_LIBRTE_PMD_CCP_CPU_AUTH
- count = 7;
-#endif
/**
* 1. Load PHash1 = H(k ^ ipad); to LSB
* 2. generate IHash = H(hash on meassage with PHash1
* as init value);
* 6. Retrieve HMAC output from LSB to host memory
*/
+ if (session->auth_opt == 0)
+ count = 7;
break;
case CCP_AUTH_ALGO_SHA3_224:
case CCP_AUTH_ALGO_SHA3_256:
return count;
}
-#ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
static uint8_t
algo_select(int sessalgo,
const EVP_MD **algo)
EVP_PKEY_free(pkey);
return 0;
}
-#endif
static void
ccp_perform_passthru(struct ccp_passthru *pst,
result = ccp_perform_sha(op, cmd_q);
b_info->desccnt += 3;
break;
-#ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
case CCP_AUTH_ALGO_MD5_HMAC:
+ if (session->auth_opt == 0)
+ result = -1;
break;
-#endif
case CCP_AUTH_ALGO_SHA1_HMAC:
case CCP_AUTH_ALGO_SHA224_HMAC:
case CCP_AUTH_ALGO_SHA256_HMAC:
-#ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
- break;
-#endif
- result = ccp_perform_hmac(op, cmd_q);
- b_info->desccnt += 6;
+ if (session->auth_opt == 0) {
+ result = ccp_perform_hmac(op, cmd_q);
+ b_info->desccnt += 6;
+ }
break;
case CCP_AUTH_ALGO_SHA384_HMAC:
case CCP_AUTH_ALGO_SHA512_HMAC:
-#ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
- break;
-#endif
- result = ccp_perform_hmac(op, cmd_q);
- b_info->desccnt += 7;
+ if (session->auth_opt == 0) {
+ result = ccp_perform_hmac(op, cmd_q);
+ b_info->desccnt += 7;
+ }
break;
case CCP_AUTH_ALGO_SHA3_224:
case CCP_AUTH_ALGO_SHA3_256:
int i, result = 0;
struct ccp_batch_info *b_info;
struct ccp_session *session;
-#ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
EVP_MD_CTX *auth_ctx = NULL;
-#endif
if (rte_mempool_get(qp->batch_mp, (void **)&b_info)) {
CCP_LOG_ERR("batch info allocation failed");
return 0;
}
-#ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
+
auth_ctx = EVP_MD_CTX_create();
if (unlikely(!auth_ctx)) {
CCP_LOG_ERR("Unable to create auth ctx");
return 0;
}
b_info->auth_ctr = 0;
-#endif
+
/* populate batch info necessary for dequeue */
b_info->op_idx = 0;
b_info->lsb_buf_idx = 0;
result = ccp_crypto_cipher(op[i], cmd_q, b_info);
break;
case CCP_CMD_AUTH:
- result = ccp_crypto_auth(op[i], cmd_q, b_info);
-#ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
- b_info->auth_ctr++;
- result = cpu_crypto_auth(qp, op[i],
- session, auth_ctx);
-#endif
+ if (session->auth_opt) {
+ b_info->auth_ctr++;
+ result = cpu_crypto_auth(qp, op[i],
+ session, auth_ctx);
+ } else
+ result = ccp_crypto_auth(op[i], cmd_q, b_info);
break;
case CCP_CMD_CIPHER_HASH:
result = ccp_crypto_cipher(op[i], cmd_q, b_info);
result = ccp_crypto_auth(op[i], cmd_q, b_info);
break;
case CCP_CMD_HASH_CIPHER:
- result = ccp_crypto_auth(op[i], cmd_q, b_info);
-#ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
- result = cpu_crypto_auth(qp, op[i],
- session, auth_ctx);
- if (op[i]->status != RTE_CRYPTO_OP_STATUS_SUCCESS)
- continue;
-#endif
+ if (session->auth_opt) {
+ result = cpu_crypto_auth(qp, op[i],
+ session, auth_ctx);
+ if (op[i]->status !=
+ RTE_CRYPTO_OP_STATUS_SUCCESS)
+ continue;
+ } else
+ result = ccp_crypto_auth(op[i], cmd_q, b_info);
+
if (result)
break;
result = ccp_crypto_cipher(op[i], cmd_q, b_info);
rte_ring_enqueue(qp->processed_pkts, (void *)b_info);
-#ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
EVP_MD_CTX_destroy(auth_ctx);
-#endif
return i;
}
}
static int
-#ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
ccp_prepare_ops(struct ccp_qp *qp,
-#else
-ccp_prepare_ops(struct ccp_qp *qp __rte_unused,
-#endif
struct rte_crypto_op **op_d,
struct ccp_batch_info *b_info,
uint16_t nb_ops)
int i, min_ops;
struct ccp_session *session;
-#ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
EVP_MD_CTX *auth_ctx = NULL;
auth_ctx = EVP_MD_CTX_create();
CCP_LOG_ERR("Unable to create auth ctx");
return 0;
}
-#endif
min_ops = RTE_MIN(nb_ops, b_info->opcnt);
for (i = 0; i < min_ops; i++) {
op_d[i]->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
break;
case CCP_CMD_AUTH:
-#ifndef RTE_LIBRTE_PMD_CCP_CPU_AUTH
- ccp_auth_dq_prepare(op_d[i]);
-#endif
+ if (session->auth_opt == 0)
+ ccp_auth_dq_prepare(op_d[i]);
break;
case CCP_CMD_CIPHER_HASH:
-#ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
- cpu_crypto_auth(qp, op_d[i],
- session, auth_ctx);
-#else
- ccp_auth_dq_prepare(op_d[i]);
-#endif
+ if (session->auth_opt)
+ cpu_crypto_auth(qp, op_d[i],
+ session, auth_ctx);
+ else
+ ccp_auth_dq_prepare(op_d[i]);
break;
case CCP_CMD_HASH_CIPHER:
-#ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
- op_d[i]->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
-#else
- ccp_auth_dq_prepare(op_d[i]);
-#endif
+ if (session->auth_opt)
+ op_d[i]->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
+ else
+ ccp_auth_dq_prepare(op_d[i]);
break;
case CCP_CMD_COMBINED:
ccp_auth_dq_prepare(op_d[i]);
}
}
-#ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
EVP_MD_CTX_destroy(auth_ctx);
-#endif
b_info->opcnt -= min_ops;
return min_ops;
}
} else if (rte_ring_dequeue(qp->processed_pkts,
(void **)&b_info))
return 0;
-#ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
+
if (b_info->auth_ctr == b_info->opcnt)
goto success;
-#endif
cur_head_offset = CCP_READ_REG(b_info->cmd_q->reg_base,
CMD_Q_HEAD_LO_BASE);
#define HMAC_IPAD_VALUE 0x36
#define HMAC_OPAD_VALUE 0x5c
-#ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
+/* MD5 */
#define MD5_DIGEST_SIZE 16
#define MD5_BLOCK_SIZE 64
-#endif
/* SHA */
#define SHA_COMMON_DIGEST_SIZE 32
CCP_AUTH_ALGO_SHA3_512_HMAC,
CCP_AUTH_ALGO_AES_CMAC,
CCP_AUTH_ALGO_AES_GCM,
-#ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
CCP_AUTH_ALGO_MD5_HMAC,
-#endif
};
/**
/* CCP crypto private session structure */
struct ccp_session {
+ bool auth_opt;
enum ccp_cmd_order cmd_id;
/**< chain order mode */
struct {
extern uint8_t ccp_cryptodev_driver_id;
struct ccp_qp;
+struct ccp_private;
/**
* Set and validate CCP crypto session parameters
* @return 0 on success otherwise -1
*/
int ccp_set_session_parameters(struct ccp_session *sess,
- const struct rte_crypto_sym_xform *xform);
+ const struct rte_crypto_sym_xform *xform,
+ struct ccp_private *internals);
/**
* Find count of slots
#include "ccp_dev.h"
#include "ccp_crypto.h"
-static const struct rte_cryptodev_capabilities ccp_pmd_capabilities[] = {
-#ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
- { /* MD5 HMAC */
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
- {.sym = {
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
- {.auth = {
- .algo = RTE_CRYPTO_AUTH_MD5_HMAC,
- .block_size = 64,
- .key_size = {
- .min = 1,
- .max = 64,
- .increment = 1
- },
- .digest_size = {
- .min = 16,
- .max = 16,
- .increment = 0
- },
- .aad_size = { 0 }
- }, }
- }, }
- },
-#endif
- { /* SHA1 */
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
- {.sym = {
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
- {.auth = {
- .algo = RTE_CRYPTO_AUTH_SHA1,
- .block_size = 64,
- .key_size = {
- .min = 0,
- .max = 0,
- .increment = 0
- },
- .digest_size = {
- .min = 20,
- .max = 20,
- .increment = 0
- },
- .aad_size = { 0 }
- }, }
- }, }
- },
- { /* SHA1 HMAC */
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
- {.sym = {
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
- {.auth = {
- .algo = RTE_CRYPTO_AUTH_SHA1_HMAC,
- .block_size = 64,
- .key_size = {
- .min = 1,
- .max = 64,
- .increment = 1
- },
- .digest_size = {
- .min = 20,
- .max = 20,
- .increment = 0
- },
- .aad_size = { 0 }
- }, }
- }, }
- },
- { /* SHA224 */
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
- {.sym = {
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
- {.auth = {
- .algo = RTE_CRYPTO_AUTH_SHA224,
- .block_size = 64,
- .key_size = {
- .min = 0,
- .max = 0,
- .increment = 0
- },
- .digest_size = {
- .min = 28,
- .max = 28,
- .increment = 0
- },
- .aad_size = { 0 }
- }, }
- }, }
- },
- { /* SHA224 HMAC */
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
- {.sym = {
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
- {.auth = {
- .algo = RTE_CRYPTO_AUTH_SHA224_HMAC,
- .block_size = 64,
- .key_size = {
- .min = 1,
- .max = 64,
- .increment = 1
- },
- .digest_size = {
- .min = 28,
- .max = 28,
- .increment = 0
- },
- .aad_size = { 0 }
- }, }
- }, }
- },
- { /* SHA3-224 */
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
- {.sym = {
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
- {.auth = {
- .algo = RTE_CRYPTO_AUTH_SHA3_224,
- .block_size = 144,
- .key_size = {
- .min = 0,
- .max = 0,
- .increment = 0
- },
- .digest_size = {
- .min = 28,
- .max = 28,
- .increment = 0
- },
- .aad_size = { 0 }
- }, }
- }, }
- },
- { /* SHA3-224 HMAC*/
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
- {.sym = {
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
- {.auth = {
- .algo = RTE_CRYPTO_AUTH_SHA3_224_HMAC,
- .block_size = 144,
- .key_size = {
- .min = 1,
- .max = 144,
- .increment = 1
- },
- .digest_size = {
- .min = 28,
- .max = 28,
- .increment = 0
- },
- .aad_size = { 0 }
- }, }
- }, }
- },
- { /* SHA256 */
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
- {.sym = {
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
- {.auth = {
- .algo = RTE_CRYPTO_AUTH_SHA256,
- .block_size = 64,
- .key_size = {
- .min = 0,
- .max = 0,
- .increment = 0
- },
- .digest_size = {
- .min = 32,
- .max = 32,
- .increment = 0
- },
- .aad_size = { 0 }
- }, }
- }, }
- },
- { /* SHA256 HMAC */
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
- {.sym = {
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
- {.auth = {
- .algo = RTE_CRYPTO_AUTH_SHA256_HMAC,
- .block_size = 64,
- .key_size = {
- .min = 1,
- .max = 64,
- .increment = 1
- },
- .digest_size = {
- .min = 32,
- .max = 32,
- .increment = 0
- },
- .aad_size = { 0 }
- }, }
- }, }
- },
- { /* SHA3-256 */
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
- {.sym = {
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
- {.auth = {
- .algo = RTE_CRYPTO_AUTH_SHA3_256,
- .block_size = 136,
- .key_size = {
- .min = 0,
- .max = 0,
- .increment = 0
- },
- .digest_size = {
- .min = 32,
- .max = 32,
- .increment = 0
- },
- .aad_size = { 0 }
- }, }
- }, }
- },
- { /* SHA3-256-HMAC */
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
- {.sym = {
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
- {.auth = {
- .algo = RTE_CRYPTO_AUTH_SHA3_256_HMAC,
- .block_size = 136,
- .key_size = {
- .min = 1,
- .max = 136,
- .increment = 1
- },
- .digest_size = {
- .min = 32,
- .max = 32,
- .increment = 0
- },
- .aad_size = { 0 }
- }, }
- }, }
- },
- { /* SHA384 */
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
- {.sym = {
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
- {.auth = {
- .algo = RTE_CRYPTO_AUTH_SHA384,
- .block_size = 128,
- .key_size = {
- .min = 0,
- .max = 0,
- .increment = 0
- },
- .digest_size = {
- .min = 48,
- .max = 48,
- .increment = 0
- },
- .aad_size = { 0 }
- }, }
- }, }
- },
- { /* SHA384 HMAC */
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
- {.sym = {
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
- {.auth = {
- .algo = RTE_CRYPTO_AUTH_SHA384_HMAC,
- .block_size = 128,
- .key_size = {
- .min = 1,
- .max = 128,
- .increment = 1
- },
- .digest_size = {
- .min = 48,
- .max = 48,
- .increment = 0
- },
- .aad_size = { 0 }
- }, }
- }, }
- },
- { /* SHA3-384 */
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
- {.sym = {
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
- {.auth = {
- .algo = RTE_CRYPTO_AUTH_SHA3_384,
- .block_size = 104,
- .key_size = {
- .min = 0,
- .max = 0,
- .increment = 0
- },
- .digest_size = {
- .min = 48,
- .max = 48,
- .increment = 0
- },
- .aad_size = { 0 }
- }, }
- }, }
- },
- { /* SHA3-384-HMAC */
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
- {.sym = {
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
- {.auth = {
- .algo = RTE_CRYPTO_AUTH_SHA3_384_HMAC,
- .block_size = 104,
- .key_size = {
- .min = 1,
- .max = 104,
- .increment = 1
- },
- .digest_size = {
- .min = 48,
- .max = 48,
- .increment = 0
- },
- .aad_size = { 0 }
- }, }
- }, }
- },
- { /* SHA512 */
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
- {.sym = {
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
- {.auth = {
- .algo = RTE_CRYPTO_AUTH_SHA512,
- .block_size = 128,
- .key_size = {
- .min = 0,
- .max = 0,
- .increment = 0
- },
- .digest_size = {
- .min = 64,
- .max = 64,
- .increment = 0
- },
- .aad_size = { 0 }
- }, }
- }, }
- },
- { /* SHA512 HMAC */
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
- {.sym = {
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
- {.auth = {
- .algo = RTE_CRYPTO_AUTH_SHA512_HMAC,
- .block_size = 128,
- .key_size = {
- .min = 1,
- .max = 128,
- .increment = 1
- },
- .digest_size = {
- .min = 64,
- .max = 64,
- .increment = 0
- },
- .aad_size = { 0 }
- }, }
- }, }
- },
- { /* SHA3-512 */
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
- {.sym = {
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
- {.auth = {
- .algo = RTE_CRYPTO_AUTH_SHA3_512,
- .block_size = 72,
- .key_size = {
- .min = 0,
- .max = 0,
- .increment = 0
- },
- .digest_size = {
- .min = 64,
- .max = 64,
- .increment = 0
- },
- .aad_size = { 0 }
- }, }
- }, }
- },
- { /* SHA3-512-HMAC */
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
- {.sym = {
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
- {.auth = {
- .algo = RTE_CRYPTO_AUTH_SHA3_512_HMAC,
- .block_size = 72,
- .key_size = {
- .min = 1,
- .max = 72,
- .increment = 1
- },
- .digest_size = {
- .min = 64,
- .max = 64,
- .increment = 0
- },
- .aad_size = { 0 }
- }, }
- }, }
- },
- { /*AES-CMAC */
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
- {.sym = {
- .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
- {.auth = {
- .algo = RTE_CRYPTO_AUTH_AES_CMAC,
- .block_size = 16,
- .key_size = {
- .min = 16,
- .max = 32,
- .increment = 8
- },
- .digest_size = {
- .min = 16,
- .max = 16,
- .increment = 0
- },
- }, }
- }, }
- },
- { /* AES ECB */
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
- {.sym = {
- .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
- {.cipher = {
- .algo = RTE_CRYPTO_CIPHER_AES_ECB,
- .block_size = 16,
- .key_size = {
- .min = 16,
- .max = 32,
- .increment = 8
- },
- .iv_size = {
- .min = 0,
- .max = 0,
- .increment = 0
- }
- }, }
- }, }
- },
- { /* AES CBC */
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
- {.sym = {
- .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
- {.cipher = {
- .algo = RTE_CRYPTO_CIPHER_AES_CBC,
- .block_size = 16,
- .key_size = {
- .min = 16,
- .max = 32,
- .increment = 8
- },
- .iv_size = {
- .min = 16,
- .max = 16,
- .increment = 0
- }
- }, }
- }, }
- },
- { /* AES CTR */
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
- {.sym = {
- .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
- {.cipher = {
- .algo = RTE_CRYPTO_CIPHER_AES_CTR,
- .block_size = 16,
- .key_size = {
- .min = 16,
- .max = 32,
- .increment = 8
- },
- .iv_size = {
- .min = 16,
- .max = 16,
- .increment = 0
- }
- }, }
- }, }
- },
- { /* 3DES CBC */
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
- {.sym = {
- .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
- {.cipher = {
- .algo = RTE_CRYPTO_CIPHER_3DES_CBC,
- .block_size = 8,
- .key_size = {
- .min = 16,
- .max = 24,
- .increment = 8
- },
- .iv_size = {
- .min = 8,
- .max = 8,
- .increment = 0
- }
- }, }
- }, }
- },
- { /* AES GCM */
- .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
- {.sym = {
- .xform_type = RTE_CRYPTO_SYM_XFORM_AEAD,
- {.aead = {
- .algo = RTE_CRYPTO_AEAD_AES_GCM,
- .block_size = 16,
- .key_size = {
- .min = 16,
- .max = 32,
- .increment = 8
- },
- .digest_size = {
- .min = 16,
- .max = 16,
- .increment = 0
- },
- .aad_size = {
- .min = 0,
- .max = 65535,
- .increment = 1
- },
- .iv_size = {
- .min = 12,
- .max = 16,
- .increment = 4
- },
- }, }
- }, }
- },
+#define CCP_BASE_SYM_CRYPTO_CAPABILITIES \
+ { /* SHA1 */ \
+ .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
+ {.sym = { \
+ .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
+ {.auth = { \
+ .algo = RTE_CRYPTO_AUTH_SHA1, \
+ .block_size = 64, \
+ .key_size = { \
+ .min = 0, \
+ .max = 0, \
+ .increment = 0 \
+ }, \
+ .digest_size = { \
+ .min = 20, \
+ .max = 20, \
+ .increment = 0 \
+ }, \
+ .aad_size = { 0 } \
+ }, } \
+ }, } \
+ }, \
+ { /* SHA1 HMAC */ \
+ .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
+ {.sym = { \
+ .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
+ {.auth = { \
+ .algo = RTE_CRYPTO_AUTH_SHA1_HMAC, \
+ .block_size = 64, \
+ .key_size = { \
+ .min = 1, \
+ .max = 64, \
+ .increment = 1 \
+ }, \
+ .digest_size = { \
+ .min = 20, \
+ .max = 20, \
+ .increment = 0 \
+ }, \
+ .aad_size = { 0 } \
+ }, } \
+ }, } \
+ }, \
+ { /* SHA224 */ \
+ .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
+ {.sym = { \
+ .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
+ {.auth = { \
+ .algo = RTE_CRYPTO_AUTH_SHA224, \
+ .block_size = 64, \
+ .key_size = { \
+ .min = 0, \
+ .max = 0, \
+ .increment = 0 \
+ }, \
+ .digest_size = { \
+ .min = 28, \
+ .max = 28, \
+ .increment = 0 \
+ }, \
+ .aad_size = { 0 } \
+ }, } \
+ }, } \
+ }, \
+ { /* SHA224 HMAC */ \
+ .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
+ {.sym = { \
+ .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
+ {.auth = { \
+ .algo = RTE_CRYPTO_AUTH_SHA224_HMAC, \
+ .block_size = 64, \
+ .key_size = { \
+ .min = 1, \
+ .max = 64, \
+ .increment = 1 \
+ }, \
+ .digest_size = { \
+ .min = 28, \
+ .max = 28, \
+ .increment = 0 \
+ }, \
+ .aad_size = { 0 } \
+ }, } \
+ }, } \
+ }, \
+ { /* SHA3-224 */ \
+ .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
+ {.sym = { \
+ .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
+ {.auth = { \
+ .algo = RTE_CRYPTO_AUTH_SHA3_224, \
+ .block_size = 144, \
+ .key_size = { \
+ .min = 0, \
+ .max = 0, \
+ .increment = 0 \
+ }, \
+ .digest_size = { \
+ .min = 28, \
+ .max = 28, \
+ .increment = 0 \
+ }, \
+ .aad_size = { 0 } \
+ }, } \
+ }, } \
+ }, \
+ { /* SHA3-224 HMAC*/ \
+ .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
+ {.sym = { \
+ .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
+ {.auth = { \
+ .algo = RTE_CRYPTO_AUTH_SHA3_224_HMAC, \
+ .block_size = 144, \
+ .key_size = { \
+ .min = 1, \
+ .max = 144, \
+ .increment = 1 \
+ }, \
+ .digest_size = { \
+ .min = 28, \
+ .max = 28, \
+ .increment = 0 \
+ }, \
+ .aad_size = { 0 } \
+ }, } \
+ }, } \
+ }, \
+ { /* SHA256 */ \
+ .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
+ {.sym = { \
+ .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
+ {.auth = { \
+ .algo = RTE_CRYPTO_AUTH_SHA256, \
+ .block_size = 64, \
+ .key_size = { \
+ .min = 0, \
+ .max = 0, \
+ .increment = 0 \
+ }, \
+ .digest_size = { \
+ .min = 32, \
+ .max = 32, \
+ .increment = 0 \
+ }, \
+ .aad_size = { 0 } \
+ }, } \
+ }, } \
+ }, \
+ { /* SHA256 HMAC */ \
+ .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
+ {.sym = { \
+ .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
+ {.auth = { \
+ .algo = RTE_CRYPTO_AUTH_SHA256_HMAC, \
+ .block_size = 64, \
+ .key_size = { \
+ .min = 1, \
+ .max = 64, \
+ .increment = 1 \
+ }, \
+ .digest_size = { \
+ .min = 32, \
+ .max = 32, \
+ .increment = 0 \
+ }, \
+ .aad_size = { 0 } \
+ }, } \
+ }, } \
+ }, \
+ { /* SHA3-256 */ \
+ .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
+ {.sym = { \
+ .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
+ {.auth = { \
+ .algo = RTE_CRYPTO_AUTH_SHA3_256, \
+ .block_size = 136, \
+ .key_size = { \
+ .min = 0, \
+ .max = 0, \
+ .increment = 0 \
+ }, \
+ .digest_size = { \
+ .min = 32, \
+ .max = 32, \
+ .increment = 0 \
+ }, \
+ .aad_size = { 0 } \
+ }, } \
+ }, } \
+ }, \
+ { /* SHA3-256-HMAC */ \
+ .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
+ {.sym = { \
+ .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
+ {.auth = { \
+ .algo = RTE_CRYPTO_AUTH_SHA3_256_HMAC, \
+ .block_size = 136, \
+ .key_size = { \
+ .min = 1, \
+ .max = 136, \
+ .increment = 1 \
+ }, \
+ .digest_size = { \
+ .min = 32, \
+ .max = 32, \
+ .increment = 0 \
+ }, \
+ .aad_size = { 0 } \
+ }, } \
+ }, } \
+ }, \
+ { /* SHA384 */ \
+ .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
+ {.sym = { \
+ .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
+ {.auth = { \
+ .algo = RTE_CRYPTO_AUTH_SHA384, \
+ .block_size = 128, \
+ .key_size = { \
+ .min = 0, \
+ .max = 0, \
+ .increment = 0 \
+ }, \
+ .digest_size = { \
+ .min = 48, \
+ .max = 48, \
+ .increment = 0 \
+ }, \
+ .aad_size = { 0 } \
+ }, } \
+ }, } \
+ }, \
+ { /* SHA384 HMAC */ \
+ .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
+ {.sym = { \
+ .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
+ {.auth = { \
+ .algo = RTE_CRYPTO_AUTH_SHA384_HMAC, \
+ .block_size = 128, \
+ .key_size = { \
+ .min = 1, \
+ .max = 128, \
+ .increment = 1 \
+ }, \
+ .digest_size = { \
+ .min = 48, \
+ .max = 48, \
+ .increment = 0 \
+ }, \
+ .aad_size = { 0 } \
+ }, } \
+ }, } \
+ }, \
+ { /* SHA3-384 */ \
+ .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
+ {.sym = { \
+ .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
+ {.auth = { \
+ .algo = RTE_CRYPTO_AUTH_SHA3_384, \
+ .block_size = 104, \
+ .key_size = { \
+ .min = 0, \
+ .max = 0, \
+ .increment = 0 \
+ }, \
+ .digest_size = { \
+ .min = 48, \
+ .max = 48, \
+ .increment = 0 \
+ }, \
+ .aad_size = { 0 } \
+ }, } \
+ }, } \
+ }, \
+ { /* SHA3-384-HMAC */ \
+ .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
+ {.sym = { \
+ .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
+ {.auth = { \
+ .algo = RTE_CRYPTO_AUTH_SHA3_384_HMAC, \
+ .block_size = 104, \
+ .key_size = { \
+ .min = 1, \
+ .max = 104, \
+ .increment = 1 \
+ }, \
+ .digest_size = { \
+ .min = 48, \
+ .max = 48, \
+ .increment = 0 \
+ }, \
+ .aad_size = { 0 } \
+ }, } \
+ }, } \
+ }, \
+ { /* SHA512 */ \
+ .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
+ {.sym = { \
+ .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
+ {.auth = { \
+ .algo = RTE_CRYPTO_AUTH_SHA512, \
+ .block_size = 128, \
+ .key_size = { \
+ .min = 0, \
+ .max = 0, \
+ .increment = 0 \
+ }, \
+ .digest_size = { \
+ .min = 64, \
+ .max = 64, \
+ .increment = 0 \
+ }, \
+ .aad_size = { 0 } \
+ }, } \
+ }, } \
+ }, \
+ { /* SHA512 HMAC */ \
+ .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
+ {.sym = { \
+ .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
+ {.auth = { \
+ .algo = RTE_CRYPTO_AUTH_SHA512_HMAC, \
+ .block_size = 128, \
+ .key_size = { \
+ .min = 1, \
+ .max = 128, \
+ .increment = 1 \
+ }, \
+ .digest_size = { \
+ .min = 64, \
+ .max = 64, \
+ .increment = 0 \
+ }, \
+ .aad_size = { 0 } \
+ }, } \
+ }, } \
+ }, \
+ { /* SHA3-512 */ \
+ .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
+ {.sym = { \
+ .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
+ {.auth = { \
+ .algo = RTE_CRYPTO_AUTH_SHA3_512, \
+ .block_size = 72, \
+ .key_size = { \
+ .min = 0, \
+ .max = 0, \
+ .increment = 0 \
+ }, \
+ .digest_size = { \
+ .min = 64, \
+ .max = 64, \
+ .increment = 0 \
+ }, \
+ .aad_size = { 0 } \
+ }, } \
+ }, } \
+ }, \
+ { /* SHA3-512-HMAC */ \
+ .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
+ {.sym = { \
+ .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
+ {.auth = { \
+ .algo = RTE_CRYPTO_AUTH_SHA3_512_HMAC, \
+ .block_size = 72, \
+ .key_size = { \
+ .min = 1, \
+ .max = 72, \
+ .increment = 1 \
+ }, \
+ .digest_size = { \
+ .min = 64, \
+ .max = 64, \
+ .increment = 0 \
+ }, \
+ .aad_size = { 0 } \
+ }, } \
+ }, } \
+ }, \
+ { /*AES-CMAC */ \
+ .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
+ {.sym = { \
+ .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
+ {.auth = { \
+ .algo = RTE_CRYPTO_AUTH_AES_CMAC, \
+ .block_size = 16, \
+ .key_size = { \
+ .min = 16, \
+ .max = 32, \
+ .increment = 8 \
+ }, \
+ .digest_size = { \
+ .min = 16, \
+ .max = 16, \
+ .increment = 0 \
+ }, \
+ }, } \
+ }, } \
+ }, \
+ { /* AES ECB */ \
+ .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
+ {.sym = { \
+ .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, \
+ {.cipher = { \
+ .algo = RTE_CRYPTO_CIPHER_AES_ECB, \
+ .block_size = 16, \
+ .key_size = { \
+ .min = 16, \
+ .max = 32, \
+ .increment = 8 \
+ }, \
+ .iv_size = { \
+ .min = 0, \
+ .max = 0, \
+ .increment = 0 \
+ } \
+ }, } \
+ }, } \
+ }, \
+ { /* AES CBC */ \
+ .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
+ {.sym = { \
+ .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, \
+ {.cipher = { \
+ .algo = RTE_CRYPTO_CIPHER_AES_CBC, \
+ .block_size = 16, \
+ .key_size = { \
+ .min = 16, \
+ .max = 32, \
+ .increment = 8 \
+ }, \
+ .iv_size = { \
+ .min = 16, \
+ .max = 16, \
+ .increment = 0 \
+ } \
+ }, } \
+ }, } \
+ }, \
+ { /* AES CTR */ \
+ .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
+ {.sym = { \
+ .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, \
+ {.cipher = { \
+ .algo = RTE_CRYPTO_CIPHER_AES_CTR, \
+ .block_size = 16, \
+ .key_size = { \
+ .min = 16, \
+ .max = 32, \
+ .increment = 8 \
+ }, \
+ .iv_size = { \
+ .min = 16, \
+ .max = 16, \
+ .increment = 0 \
+ } \
+ }, } \
+ }, } \
+ }, \
+ { /* 3DES CBC */ \
+ .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
+ {.sym = { \
+ .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, \
+ {.cipher = { \
+ .algo = RTE_CRYPTO_CIPHER_3DES_CBC, \
+ .block_size = 8, \
+ .key_size = { \
+ .min = 16, \
+ .max = 24, \
+ .increment = 8 \
+ }, \
+ .iv_size = { \
+ .min = 8, \
+ .max = 8, \
+ .increment = 0 \
+ } \
+ }, } \
+ }, } \
+ }, \
+ { /* AES GCM */ \
+ .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
+ {.sym = { \
+ .xform_type = RTE_CRYPTO_SYM_XFORM_AEAD, \
+ {.aead = { \
+ .algo = RTE_CRYPTO_AEAD_AES_GCM, \
+ .block_size = 16, \
+ .key_size = { \
+ .min = 16, \
+ .max = 32, \
+ .increment = 8 \
+ }, \
+ .digest_size = { \
+ .min = 16, \
+ .max = 16, \
+ .increment = 0 \
+ }, \
+ .aad_size = { \
+ .min = 0, \
+ .max = 65535, \
+ .increment = 1 \
+ }, \
+ .iv_size = { \
+ .min = 12, \
+ .max = 16, \
+ .increment = 4 \
+ }, \
+ }, } \
+ }, } \
+ }
+
+#define CCP_EXTRA_SYM_CRYPTO_CAPABILITIES \
+ { /* MD5 HMAC */ \
+ .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
+ {.sym = { \
+ .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
+ {.auth = { \
+ .algo = RTE_CRYPTO_AUTH_MD5_HMAC, \
+ .block_size = 64, \
+ .key_size = { \
+ .min = 1, \
+ .max = 64, \
+ .increment = 1 \
+ }, \
+ .digest_size = { \
+ .min = 16, \
+ .max = 16, \
+ .increment = 0 \
+ }, \
+ .aad_size = { 0 } \
+ }, } \
+ }, } \
+ }
+
+static const struct rte_cryptodev_capabilities ccp_crypto_cap[] = {
+ CCP_BASE_SYM_CRYPTO_CAPABILITIES,
+ RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST()
+};
+
+static const struct rte_cryptodev_capabilities ccp_crypto_cap_complete[] = {
+ CCP_EXTRA_SYM_CRYPTO_CAPABILITIES,
+ CCP_BASE_SYM_CRYPTO_CAPABILITIES,
RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST()
};
if (dev_info != NULL) {
dev_info->driver_id = dev->driver_id;
dev_info->feature_flags = dev->feature_flags;
- dev_info->capabilities = ccp_pmd_capabilities;
+ dev_info->capabilities = ccp_crypto_cap;
+ if (internals->auth_opt == 1)
+ dev_info->capabilities = ccp_crypto_cap_complete;
dev_info->max_nb_queue_pairs = internals->max_nb_qpairs;
dev_info->sym.max_nb_sessions = internals->max_nb_sessions;
}
{
int ret;
void *sess_private_data;
+ struct ccp_private *internals;
if (unlikely(sess == NULL || xform == NULL)) {
CCP_LOG_ERR("Invalid session struct or xform");
CCP_LOG_ERR("Couldn't get object from session mempool");
return -ENOMEM;
}
- ret = ccp_set_session_parameters(sess_private_data, xform);
+ internals = (struct ccp_private *)dev->data->dev_private;
+ ret = ccp_set_session_parameters(sess_private_data, xform, internals);
if (ret != 0) {
CCP_LOG_ERR("failed configure session parameters");
unsigned int max_nb_qpairs; /**< Max number of queue pairs */
unsigned int max_nb_sessions; /**< Max number of sessions */
uint8_t crypto_num_dev; /**< Number of working crypto devices */
+ bool auth_opt; /**< Authentication offload option */
struct ccp_device *last_dev; /**< Last working crypto device */
};
phys_addr_t lsb_buf_phys;
/**< LSB intermediate buf for passthru */
int lsb_buf_idx;
-#ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
uint16_t auth_ctr;
- /**< auth only ops batch */
-#endif
+ /**< auth only ops batch for CPU based auth */
} __rte_cache_aligned;
/**< CCP crypto queue pair */
static unsigned int ccp_pmd_init_done;
uint8_t ccp_cryptodev_driver_id;
+struct ccp_pmd_init_params {
+ struct rte_cryptodev_pmd_init_params def_p;
+ bool auth_opt;
+};
+
+#define CCP_CRYPTODEV_PARAM_NAME ("name")
+#define CCP_CRYPTODEV_PARAM_SOCKET_ID ("socket_id")
+#define CCP_CRYPTODEV_PARAM_MAX_NB_QP ("max_nb_queue_pairs")
+#define CCP_CRYPTODEV_PARAM_MAX_NB_SESS ("max_nb_sessions")
+#define CCP_CRYPTODEV_PARAM_AUTH_OPT ("ccp_auth_opt")
+
+const char *ccp_pmd_valid_params[] = {
+ CCP_CRYPTODEV_PARAM_NAME,
+ CCP_CRYPTODEV_PARAM_SOCKET_ID,
+ CCP_CRYPTODEV_PARAM_MAX_NB_QP,
+ CCP_CRYPTODEV_PARAM_MAX_NB_SESS,
+ CCP_CRYPTODEV_PARAM_AUTH_OPT,
+};
+
+/** ccp pmd auth option */
+enum ccp_pmd_auth_opt {
+ CCP_PMD_AUTH_OPT_CCP = 0,
+ CCP_PMD_AUTH_OPT_CPU,
+};
+
+/** parse integer from integer argument */
+static int
+parse_integer_arg(const char *key __rte_unused,
+ const char *value, void *extra_args)
+{
+ int *i = (int *) extra_args;
+
+ *i = atoi(value);
+ if (*i < 0) {
+ CCP_LOG_ERR("Argument has to be positive.\n");
+ return -EINVAL;
+ }
+
+ return 0;
+}
+
+/** parse name argument */
+static int
+parse_name_arg(const char *key __rte_unused,
+ const char *value, void *extra_args)
+{
+ struct rte_cryptodev_pmd_init_params *params = extra_args;
+
+ if (strlen(value) >= RTE_CRYPTODEV_NAME_MAX_LEN - 1) {
+ CCP_LOG_ERR("Invalid name %s, should be less than "
+ "%u bytes.\n", value,
+ RTE_CRYPTODEV_NAME_MAX_LEN - 1);
+ return -EINVAL;
+ }
+
+ strncpy(params->name, value, RTE_CRYPTODEV_NAME_MAX_LEN);
+
+ return 0;
+}
+
+/** parse authentication operation option */
+static int
+parse_auth_opt_arg(const char *key __rte_unused,
+ const char *value, void *extra_args)
+{
+ struct ccp_pmd_init_params *params = extra_args;
+ int i;
+
+ i = atoi(value);
+ if (i < CCP_PMD_AUTH_OPT_CCP || i > CCP_PMD_AUTH_OPT_CPU) {
+ CCP_LOG_ERR("Invalid ccp pmd auth option. "
+ "0->auth on CCP(default), "
+ "1->auth on CPU\n");
+ return -EINVAL;
+ }
+ params->auth_opt = i;
+ return 0;
+}
+
+static int
+ccp_pmd_parse_input_args(struct ccp_pmd_init_params *params,
+ const char *input_args)
+{
+ struct rte_kvargs *kvlist = NULL;
+ int ret = 0;
+
+ if (params == NULL)
+ return -EINVAL;
+
+ if (input_args) {
+ kvlist = rte_kvargs_parse(input_args,
+ ccp_pmd_valid_params);
+ if (kvlist == NULL)
+ return -1;
+
+ ret = rte_kvargs_process(kvlist,
+ CCP_CRYPTODEV_PARAM_MAX_NB_QP,
+ &parse_integer_arg,
+ ¶ms->def_p.max_nb_queue_pairs);
+ if (ret < 0)
+ goto free_kvlist;
+
+ ret = rte_kvargs_process(kvlist,
+ CCP_CRYPTODEV_PARAM_MAX_NB_SESS,
+ &parse_integer_arg,
+ ¶ms->def_p.max_nb_sessions);
+ if (ret < 0)
+ goto free_kvlist;
+
+ ret = rte_kvargs_process(kvlist,
+ CCP_CRYPTODEV_PARAM_SOCKET_ID,
+ &parse_integer_arg,
+ ¶ms->def_p.socket_id);
+ if (ret < 0)
+ goto free_kvlist;
+
+ ret = rte_kvargs_process(kvlist,
+ CCP_CRYPTODEV_PARAM_NAME,
+ &parse_name_arg,
+ ¶ms->def_p);
+ if (ret < 0)
+ goto free_kvlist;
+
+ ret = rte_kvargs_process(kvlist,
+ CCP_CRYPTODEV_PARAM_AUTH_OPT,
+ &parse_auth_opt_arg,
+ params);
+ if (ret < 0)
+ goto free_kvlist;
+
+ }
+
+free_kvlist:
+ rte_kvargs_free(kvlist);
+ return ret;
+}
+
static struct ccp_session *
get_ccp_session(struct ccp_qp *qp, struct rte_crypto_op *op)
{
} else if (op->sess_type == RTE_CRYPTO_OP_SESSIONLESS) {
void *_sess;
void *_sess_private_data = NULL;
+ struct ccp_private *internals;
if (rte_mempool_get(qp->sess_mp, &_sess))
return NULL;
sess = (struct ccp_session *)_sess_private_data;
- if (unlikely(ccp_set_session_parameters(sess,
- op->sym->xform) != 0)) {
+ internals = (struct ccp_private *)qp->dev->data->dev_private;
+ if (unlikely(ccp_set_session_parameters(sess, op->sym->xform,
+ internals) != 0)) {
rte_mempool_put(qp->sess_mp, _sess);
rte_mempool_put(qp->sess_mp, _sess_private_data);
sess = NULL;
static int
cryptodev_ccp_create(const char *name,
struct rte_vdev_device *vdev,
- struct rte_cryptodev_pmd_init_params *init_params)
+ struct ccp_pmd_init_params *init_params)
{
struct rte_cryptodev *dev;
struct ccp_private *internals;
uint8_t cryptodev_cnt = 0;
- if (init_params->name[0] == '\0')
- snprintf(init_params->name, sizeof(init_params->name),
- "%s", name);
+ if (init_params->def_p.name[0] == '\0')
+ snprintf(init_params->def_p.name,
+ sizeof(init_params->def_p.name),
+ "%s", name);
- dev = rte_cryptodev_pmd_create(init_params->name,
+ dev = rte_cryptodev_pmd_create(init_params->def_p.name,
&vdev->device,
- init_params);
+ &init_params->def_p);
if (dev == NULL) {
CCP_LOG_ERR("failed to create cryptodev vdev");
goto init_error;
internals = dev->data->dev_private;
- internals->max_nb_qpairs = init_params->max_nb_queue_pairs;
- internals->max_nb_sessions = init_params->max_nb_sessions;
+ internals->max_nb_qpairs = init_params->def_p.max_nb_queue_pairs;
+ internals->max_nb_sessions = init_params->def_p.max_nb_sessions;
+ internals->auth_opt = init_params->auth_opt;
internals->crypto_num_dev = cryptodev_cnt;
return 0;
init_error:
CCP_LOG_ERR("driver %s: %s() failed",
- init_params->name, __func__);
+ init_params->def_p.name, __func__);
cryptodev_ccp_remove(vdev);
return -EFAULT;
{
int rc = 0;
const char *name;
- struct rte_cryptodev_pmd_init_params init_params = {
- "",
- sizeof(struct ccp_private),
- rte_socket_id(),
- CCP_PMD_MAX_QUEUE_PAIRS,
- RTE_CRYPTODEV_PMD_DEFAULT_MAX_NB_SESSIONS
+ struct ccp_pmd_init_params init_params = {
+ .def_p = {
+ "",
+ sizeof(struct ccp_private),
+ rte_socket_id(),
+ CCP_PMD_MAX_QUEUE_PAIRS,
+ RTE_CRYPTODEV_PMD_DEFAULT_MAX_NB_SESSIONS
+ },
+ .auth_opt = CCP_PMD_AUTH_OPT_CCP,
};
const char *input_args;
return -EINVAL;
input_args = rte_vdev_device_args(vdev);
- rte_cryptodev_pmd_parse_input_args(&init_params, input_args);
- init_params.max_nb_queue_pairs = CCP_PMD_MAX_QUEUE_PAIRS;
+ ccp_pmd_parse_input_args(&init_params, input_args);
+ init_params.def_p.max_nb_queue_pairs = CCP_PMD_MAX_QUEUE_PAIRS;
RTE_LOG(INFO, PMD, "Initialising %s on NUMA node %d\n", name,
- init_params.socket_id);
+ init_params.def_p.socket_id);
RTE_LOG(INFO, PMD, "Max number of queue pairs = %d\n",
- init_params.max_nb_queue_pairs);
+ init_params.def_p.max_nb_queue_pairs);
RTE_LOG(INFO, PMD, "Max number of sessions = %d\n",
- init_params.max_nb_sessions);
+ init_params.def_p.max_nb_sessions);
+ RTE_LOG(INFO, PMD, "Authentication offload to %s\n",
+ ((init_params.auth_opt == 0) ? "CCP" : "CPU"));
rc = cryptodev_ccp_create(name, vdev, &init_params);
if (rc)
RTE_PMD_REGISTER_VDEV(CRYPTODEV_NAME_CCP_PMD, cryptodev_ccp_pmd_drv);
RTE_PMD_REGISTER_PARAM_STRING(CRYPTODEV_NAME_CCP_PMD,
- "max_nb_queue_pairs=<int> max_nb_sessions=<int> socket_id=<int>");
+ "max_nb_queue_pairs=<int> "
+ "max_nb_sessions=<int> "
+ "socket_id=<int> "
+ "ccp_auth_opt=<int>");
RTE_PMD_REGISTER_CRYPTO_DRIVER(ccp_crypto_drv, cryptodev_ccp_pmd_drv.driver,
ccp_cryptodev_driver_id);