#include "ccp_pci.h"
#include "ccp_pmd_private.h"
+#include <openssl/conf.h>
+#include <openssl/err.h>
+#include <openssl/hmac.h>
+
/* SHA initial context values */
static uint32_t ccp_sha1_init[SHA_COMMON_DIGEST_SIZE / sizeof(uint32_t)] = {
SHA1_H4, SHA1_H3,
else
sess->auth.op = CCP_AUTH_OP_VERIFY;
switch (auth_xform->algo) {
+ case RTE_CRYPTO_AUTH_MD5_HMAC:
+ if (sess->auth_opt) {
+ sess->auth.algo = CCP_AUTH_ALGO_MD5_HMAC;
+ sess->auth.offset = ((CCP_SB_BYTES << 1) -
+ MD5_DIGEST_SIZE);
+ sess->auth.key_length = auth_xform->key.length;
+ sess->auth.block_size = MD5_BLOCK_SIZE;
+ memset(sess->auth.key, 0, sess->auth.block_size);
+ rte_memcpy(sess->auth.key, auth_xform->key.data,
+ auth_xform->key.length);
+ } else
+ return -1; /* HMAC MD5 not supported on CCP */
+ break;
case RTE_CRYPTO_AUTH_SHA1:
sess->auth.engine = CCP_ENGINE_SHA;
sess->auth.algo = CCP_AUTH_ALGO_SHA1;
sess->auth.offset = CCP_SB_BYTES - SHA1_DIGEST_SIZE;
break;
case RTE_CRYPTO_AUTH_SHA1_HMAC:
- if (auth_xform->key.length > SHA1_BLOCK_SIZE)
- return -1;
- sess->auth.engine = CCP_ENGINE_SHA;
- sess->auth.algo = CCP_AUTH_ALGO_SHA1_HMAC;
- sess->auth.ut.sha_type = CCP_SHA_TYPE_1;
- sess->auth.ctx_len = CCP_SB_BYTES;
- sess->auth.offset = CCP_SB_BYTES - SHA1_DIGEST_SIZE;
- sess->auth.block_size = SHA1_BLOCK_SIZE;
- sess->auth.key_length = auth_xform->key.length;
- memset(sess->auth.key, 0, sess->auth.block_size);
- memset(sess->auth.pre_compute, 0, sess->auth.ctx_len << 1);
- rte_memcpy(sess->auth.key, auth_xform->key.data,
- auth_xform->key.length);
- if (generate_partial_hash(sess))
- return -1;
+ if (sess->auth_opt) {
+ if (auth_xform->key.length > SHA1_BLOCK_SIZE)
+ return -1;
+ sess->auth.algo = CCP_AUTH_ALGO_SHA1_HMAC;
+ sess->auth.offset = CCP_SB_BYTES - SHA1_DIGEST_SIZE;
+ sess->auth.block_size = SHA1_BLOCK_SIZE;
+ sess->auth.key_length = auth_xform->key.length;
+ memset(sess->auth.key, 0, sess->auth.block_size);
+ rte_memcpy(sess->auth.key, auth_xform->key.data,
+ auth_xform->key.length);
+ } else {
+ if (auth_xform->key.length > SHA1_BLOCK_SIZE)
+ return -1;
+ sess->auth.engine = CCP_ENGINE_SHA;
+ sess->auth.algo = CCP_AUTH_ALGO_SHA1_HMAC;
+ sess->auth.ut.sha_type = CCP_SHA_TYPE_1;
+ sess->auth.ctx_len = CCP_SB_BYTES;
+ sess->auth.offset = CCP_SB_BYTES - SHA1_DIGEST_SIZE;
+ sess->auth.block_size = SHA1_BLOCK_SIZE;
+ sess->auth.key_length = auth_xform->key.length;
+ memset(sess->auth.key, 0, sess->auth.block_size);
+ memset(sess->auth.pre_compute, 0,
+ sess->auth.ctx_len << 1);
+ rte_memcpy(sess->auth.key, auth_xform->key.data,
+ auth_xform->key.length);
+ if (generate_partial_hash(sess))
+ return -1;
+ }
break;
case RTE_CRYPTO_AUTH_SHA224:
sess->auth.algo = CCP_AUTH_ALGO_SHA224;
sess->auth.offset = CCP_SB_BYTES - SHA224_DIGEST_SIZE;
break;
case RTE_CRYPTO_AUTH_SHA224_HMAC:
- if (auth_xform->key.length > SHA224_BLOCK_SIZE)
- return -1;
- sess->auth.algo = CCP_AUTH_ALGO_SHA224_HMAC;
- sess->auth.engine = CCP_ENGINE_SHA;
- sess->auth.ut.sha_type = CCP_SHA_TYPE_224;
- sess->auth.ctx_len = CCP_SB_BYTES;
- sess->auth.offset = CCP_SB_BYTES - SHA224_DIGEST_SIZE;
- sess->auth.block_size = SHA224_BLOCK_SIZE;
- sess->auth.key_length = auth_xform->key.length;
- memset(sess->auth.key, 0, sess->auth.block_size);
- memset(sess->auth.pre_compute, 0, sess->auth.ctx_len << 1);
- rte_memcpy(sess->auth.key, auth_xform->key.data,
- auth_xform->key.length);
- if (generate_partial_hash(sess))
- return -1;
+ if (sess->auth_opt) {
+ if (auth_xform->key.length > SHA224_BLOCK_SIZE)
+ return -1;
+ sess->auth.algo = CCP_AUTH_ALGO_SHA224_HMAC;
+ sess->auth.offset = CCP_SB_BYTES - SHA224_DIGEST_SIZE;
+ sess->auth.block_size = SHA224_BLOCK_SIZE;
+ sess->auth.key_length = auth_xform->key.length;
+ memset(sess->auth.key, 0, sess->auth.block_size);
+ rte_memcpy(sess->auth.key, auth_xform->key.data,
+ auth_xform->key.length);
+ } else {
+ if (auth_xform->key.length > SHA224_BLOCK_SIZE)
+ return -1;
+ sess->auth.algo = CCP_AUTH_ALGO_SHA224_HMAC;
+ sess->auth.engine = CCP_ENGINE_SHA;
+ sess->auth.ut.sha_type = CCP_SHA_TYPE_224;
+ sess->auth.ctx_len = CCP_SB_BYTES;
+ sess->auth.offset = CCP_SB_BYTES - SHA224_DIGEST_SIZE;
+ sess->auth.block_size = SHA224_BLOCK_SIZE;
+ sess->auth.key_length = auth_xform->key.length;
+ memset(sess->auth.key, 0, sess->auth.block_size);
+ memset(sess->auth.pre_compute, 0,
+ sess->auth.ctx_len << 1);
+ rte_memcpy(sess->auth.key, auth_xform->key.data,
+ auth_xform->key.length);
+ if (generate_partial_hash(sess))
+ return -1;
+ }
break;
case RTE_CRYPTO_AUTH_SHA3_224:
sess->auth.algo = CCP_AUTH_ALGO_SHA3_224;
sess->auth.offset = CCP_SB_BYTES - SHA256_DIGEST_SIZE;
break;
case RTE_CRYPTO_AUTH_SHA256_HMAC:
- if (auth_xform->key.length > SHA256_BLOCK_SIZE)
- return -1;
- sess->auth.algo = CCP_AUTH_ALGO_SHA256_HMAC;
- sess->auth.engine = CCP_ENGINE_SHA;
- sess->auth.ut.sha_type = CCP_SHA_TYPE_256;
- sess->auth.ctx_len = CCP_SB_BYTES;
- sess->auth.offset = CCP_SB_BYTES - SHA256_DIGEST_SIZE;
- sess->auth.block_size = SHA256_BLOCK_SIZE;
- sess->auth.key_length = auth_xform->key.length;
- memset(sess->auth.key, 0, sess->auth.block_size);
- memset(sess->auth.pre_compute, 0, sess->auth.ctx_len << 1);
- rte_memcpy(sess->auth.key, auth_xform->key.data,
- auth_xform->key.length);
- if (generate_partial_hash(sess))
- return -1;
+ if (sess->auth_opt) {
+ if (auth_xform->key.length > SHA256_BLOCK_SIZE)
+ return -1;
+ sess->auth.algo = CCP_AUTH_ALGO_SHA256_HMAC;
+ sess->auth.offset = CCP_SB_BYTES - SHA256_DIGEST_SIZE;
+ sess->auth.block_size = SHA256_BLOCK_SIZE;
+ sess->auth.key_length = auth_xform->key.length;
+ memset(sess->auth.key, 0, sess->auth.block_size);
+ rte_memcpy(sess->auth.key, auth_xform->key.data,
+ auth_xform->key.length);
+ } else {
+ if (auth_xform->key.length > SHA256_BLOCK_SIZE)
+ return -1;
+ sess->auth.algo = CCP_AUTH_ALGO_SHA256_HMAC;
+ sess->auth.engine = CCP_ENGINE_SHA;
+ sess->auth.ut.sha_type = CCP_SHA_TYPE_256;
+ sess->auth.ctx_len = CCP_SB_BYTES;
+ sess->auth.offset = CCP_SB_BYTES - SHA256_DIGEST_SIZE;
+ sess->auth.block_size = SHA256_BLOCK_SIZE;
+ sess->auth.key_length = auth_xform->key.length;
+ memset(sess->auth.key, 0, sess->auth.block_size);
+ memset(sess->auth.pre_compute, 0,
+ sess->auth.ctx_len << 1);
+ rte_memcpy(sess->auth.key, auth_xform->key.data,
+ auth_xform->key.length);
+ if (generate_partial_hash(sess))
+ return -1;
+ }
break;
case RTE_CRYPTO_AUTH_SHA3_256:
sess->auth.algo = CCP_AUTH_ALGO_SHA3_256;
sess->auth.offset = (CCP_SB_BYTES << 1) - SHA384_DIGEST_SIZE;
break;
case RTE_CRYPTO_AUTH_SHA384_HMAC:
- if (auth_xform->key.length > SHA384_BLOCK_SIZE)
- return -1;
- sess->auth.algo = CCP_AUTH_ALGO_SHA384_HMAC;
- sess->auth.engine = CCP_ENGINE_SHA;
- sess->auth.ut.sha_type = CCP_SHA_TYPE_384;
- sess->auth.ctx_len = CCP_SB_BYTES << 1;
- sess->auth.offset = (CCP_SB_BYTES << 1) - SHA384_DIGEST_SIZE;
- sess->auth.block_size = SHA384_BLOCK_SIZE;
- sess->auth.key_length = auth_xform->key.length;
- memset(sess->auth.key, 0, sess->auth.block_size);
- memset(sess->auth.pre_compute, 0, sess->auth.ctx_len << 1);
- rte_memcpy(sess->auth.key, auth_xform->key.data,
- auth_xform->key.length);
- if (generate_partial_hash(sess))
- return -1;
+ if (sess->auth_opt) {
+ if (auth_xform->key.length > SHA384_BLOCK_SIZE)
+ return -1;
+ sess->auth.algo = CCP_AUTH_ALGO_SHA384_HMAC;
+ sess->auth.offset = ((CCP_SB_BYTES << 1) -
+ SHA384_DIGEST_SIZE);
+ sess->auth.block_size = SHA384_BLOCK_SIZE;
+ sess->auth.key_length = auth_xform->key.length;
+ memset(sess->auth.key, 0, sess->auth.block_size);
+ rte_memcpy(sess->auth.key, auth_xform->key.data,
+ auth_xform->key.length);
+ } else {
+ if (auth_xform->key.length > SHA384_BLOCK_SIZE)
+ return -1;
+ sess->auth.algo = CCP_AUTH_ALGO_SHA384_HMAC;
+ sess->auth.engine = CCP_ENGINE_SHA;
+ sess->auth.ut.sha_type = CCP_SHA_TYPE_384;
+ sess->auth.ctx_len = CCP_SB_BYTES << 1;
+ sess->auth.offset = ((CCP_SB_BYTES << 1) -
+ SHA384_DIGEST_SIZE);
+ sess->auth.block_size = SHA384_BLOCK_SIZE;
+ sess->auth.key_length = auth_xform->key.length;
+ memset(sess->auth.key, 0, sess->auth.block_size);
+ memset(sess->auth.pre_compute, 0,
+ sess->auth.ctx_len << 1);
+ rte_memcpy(sess->auth.key, auth_xform->key.data,
+ auth_xform->key.length);
+ if (generate_partial_hash(sess))
+ return -1;
+ }
break;
case RTE_CRYPTO_AUTH_SHA3_384:
sess->auth.algo = CCP_AUTH_ALGO_SHA3_384;
sess->auth.offset = (CCP_SB_BYTES << 1) - SHA512_DIGEST_SIZE;
break;
case RTE_CRYPTO_AUTH_SHA512_HMAC:
- if (auth_xform->key.length > SHA512_BLOCK_SIZE)
- return -1;
- sess->auth.algo = CCP_AUTH_ALGO_SHA512_HMAC;
- sess->auth.engine = CCP_ENGINE_SHA;
- sess->auth.ut.sha_type = CCP_SHA_TYPE_512;
- sess->auth.ctx_len = CCP_SB_BYTES << 1;
- sess->auth.offset = (CCP_SB_BYTES << 1) - SHA512_DIGEST_SIZE;
- sess->auth.block_size = SHA512_BLOCK_SIZE;
- sess->auth.key_length = auth_xform->key.length;
- memset(sess->auth.key, 0, sess->auth.block_size);
- memset(sess->auth.pre_compute, 0, sess->auth.ctx_len << 1);
- rte_memcpy(sess->auth.key, auth_xform->key.data,
- auth_xform->key.length);
- if (generate_partial_hash(sess))
- return -1;
+ if (sess->auth_opt) {
+ if (auth_xform->key.length > SHA512_BLOCK_SIZE)
+ return -1;
+ sess->auth.algo = CCP_AUTH_ALGO_SHA512_HMAC;
+ sess->auth.offset = ((CCP_SB_BYTES << 1) -
+ SHA512_DIGEST_SIZE);
+ sess->auth.block_size = SHA512_BLOCK_SIZE;
+ sess->auth.key_length = auth_xform->key.length;
+ memset(sess->auth.key, 0, sess->auth.block_size);
+ rte_memcpy(sess->auth.key, auth_xform->key.data,
+ auth_xform->key.length);
+ } else {
+ if (auth_xform->key.length > SHA512_BLOCK_SIZE)
+ return -1;
+ sess->auth.algo = CCP_AUTH_ALGO_SHA512_HMAC;
+ sess->auth.engine = CCP_ENGINE_SHA;
+ sess->auth.ut.sha_type = CCP_SHA_TYPE_512;
+ sess->auth.ctx_len = CCP_SB_BYTES << 1;
+ sess->auth.offset = ((CCP_SB_BYTES << 1) -
+ SHA512_DIGEST_SIZE);
+ sess->auth.block_size = SHA512_BLOCK_SIZE;
+ sess->auth.key_length = auth_xform->key.length;
+ memset(sess->auth.key, 0, sess->auth.block_size);
+ memset(sess->auth.pre_compute, 0,
+ sess->auth.ctx_len << 1);
+ rte_memcpy(sess->auth.key, auth_xform->key.data,
+ auth_xform->key.length);
+ if (generate_partial_hash(sess))
+ return -1;
+ }
break;
case RTE_CRYPTO_AUTH_SHA3_512:
sess->auth.algo = CCP_AUTH_ALGO_SHA3_512;
sess->auth.engine = CCP_ENGINE_AES;
sess->auth.um.aes_mode = CCP_AES_MODE_CMAC;
sess->auth.key_length = auth_xform->key.length;
- /**<padding and hash result*/
+ /* padding and hash result */
sess->auth.ctx_len = CCP_SB_BYTES << 1;
sess->auth.offset = AES_BLOCK_SIZE;
sess->auth.block_size = AES_BLOCK_SIZE;
int
ccp_set_session_parameters(struct ccp_session *sess,
- const struct rte_crypto_sym_xform *xform)
+ const struct rte_crypto_sym_xform *xform,
+ struct ccp_private *internals)
{
const struct rte_crypto_sym_xform *cipher_xform = NULL;
const struct rte_crypto_sym_xform *auth_xform = NULL;
const struct rte_crypto_sym_xform *aead_xform = NULL;
int ret = 0;
+ sess->auth_opt = internals->auth_opt;
sess->cmd_id = ccp_get_cmd_id(xform);
switch (sess->cmd_id) {
count = 3;
/**< op + lsb passthrough cpy to/from*/
break;
+ case CCP_AUTH_ALGO_MD5_HMAC:
+ break;
case CCP_AUTH_ALGO_SHA1_HMAC:
case CCP_AUTH_ALGO_SHA224_HMAC:
case CCP_AUTH_ALGO_SHA256_HMAC:
- count = 6;
+ if (session->auth_opt == 0)
+ count = 6;
break;
case CCP_AUTH_ALGO_SHA384_HMAC:
case CCP_AUTH_ALGO_SHA512_HMAC:
- count = 7;
/**
* 1. Load PHash1 = H(k ^ ipad); to LSB
* 2. generate IHash = H(hash on meassage with PHash1
* as init value);
* 6. Retrieve HMAC output from LSB to host memory
*/
+ if (session->auth_opt == 0)
+ count = 7;
break;
case CCP_AUTH_ALGO_SHA3_224:
case CCP_AUTH_ALGO_SHA3_256:
return count;
}
+static uint8_t
+algo_select(int sessalgo,
+ const EVP_MD **algo)
+{
+ int res = 0;
+
+ switch (sessalgo) {
+ case CCP_AUTH_ALGO_MD5_HMAC:
+ *algo = EVP_md5();
+ break;
+ case CCP_AUTH_ALGO_SHA1_HMAC:
+ *algo = EVP_sha1();
+ break;
+ case CCP_AUTH_ALGO_SHA224_HMAC:
+ *algo = EVP_sha224();
+ break;
+ case CCP_AUTH_ALGO_SHA256_HMAC:
+ *algo = EVP_sha256();
+ break;
+ case CCP_AUTH_ALGO_SHA384_HMAC:
+ *algo = EVP_sha384();
+ break;
+ case CCP_AUTH_ALGO_SHA512_HMAC:
+ *algo = EVP_sha512();
+ break;
+ default:
+ res = -EINVAL;
+ break;
+ }
+ return res;
+}
+
+static int
+process_cpu_auth_hmac(uint8_t *src, uint8_t *dst,
+ __rte_unused uint8_t *iv,
+ EVP_PKEY *pkey,
+ int srclen,
+ EVP_MD_CTX *ctx,
+ const EVP_MD *algo,
+ uint16_t d_len)
+{
+ size_t dstlen;
+ unsigned char temp_dst[64];
+
+ if (EVP_DigestSignInit(ctx, NULL, algo, NULL, pkey) <= 0)
+ goto process_auth_err;
+
+ if (EVP_DigestSignUpdate(ctx, (char *)src, srclen) <= 0)
+ goto process_auth_err;
+
+ if (EVP_DigestSignFinal(ctx, temp_dst, &dstlen) <= 0)
+ goto process_auth_err;
+
+ memcpy(dst, temp_dst, d_len);
+ return 0;
+process_auth_err:
+ CCP_LOG_ERR("Process cpu auth failed");
+ return -EINVAL;
+}
+
+static int cpu_crypto_auth(struct ccp_qp *qp,
+ struct rte_crypto_op *op,
+ struct ccp_session *sess,
+ EVP_MD_CTX *ctx)
+{
+ uint8_t *src, *dst;
+ int srclen, status;
+ struct rte_mbuf *mbuf_src, *mbuf_dst;
+ const EVP_MD *algo = NULL;
+ EVP_PKEY *pkey;
+
+ algo_select(sess->auth.algo, &algo);
+ pkey = EVP_PKEY_new_mac_key(EVP_PKEY_HMAC, NULL, sess->auth.key,
+ sess->auth.key_length);
+ mbuf_src = op->sym->m_src;
+ mbuf_dst = op->sym->m_dst ? op->sym->m_dst : op->sym->m_src;
+ srclen = op->sym->auth.data.length;
+ src = rte_pktmbuf_mtod_offset(mbuf_src, uint8_t *,
+ op->sym->auth.data.offset);
+
+ if (sess->auth.op == CCP_AUTH_OP_VERIFY) {
+ dst = qp->temp_digest;
+ } else {
+ dst = op->sym->auth.digest.data;
+ if (dst == NULL) {
+ dst = rte_pktmbuf_mtod_offset(mbuf_dst, uint8_t *,
+ op->sym->auth.data.offset +
+ sess->auth.digest_length);
+ }
+ }
+ status = process_cpu_auth_hmac(src, dst, NULL,
+ pkey, srclen,
+ ctx,
+ algo,
+ sess->auth.digest_length);
+ if (status) {
+ op->status = RTE_CRYPTO_OP_STATUS_ERROR;
+ return status;
+ }
+
+ if (sess->auth.op == CCP_AUTH_OP_VERIFY) {
+ if (memcmp(dst, op->sym->auth.digest.data,
+ sess->auth.digest_length) != 0) {
+ op->status = RTE_CRYPTO_OP_STATUS_AUTH_FAILED;
+ } else {
+ op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
+ }
+ } else {
+ op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
+ }
+ EVP_PKEY_free(pkey);
+ return 0;
+}
+
static void
ccp_perform_passthru(struct ccp_passthru *pst,
struct ccp_queue *cmd_q)
void *append_ptr;
uint8_t *addr;
- session = (struct ccp_session *)get_session_private_data(
+ session = (struct ccp_session *)get_sym_session_private_data(
op->sym->session,
ccp_cryptodev_driver_id);
addr = session->auth.pre_compute;
void *append_ptr;
uint64_t auth_msg_bits;
- session = (struct ccp_session *)get_session_private_data(
+ session = (struct ccp_session *)get_sym_session_private_data(
op->sym->session,
ccp_cryptodev_driver_id);
uint32_t tail;
phys_addr_t src_addr, dest_addr, ctx_paddr, dest_addr_t;
- session = (struct ccp_session *)get_session_private_data(
+ session = (struct ccp_session *)get_sym_session_private_data(
op->sym->session,
ccp_cryptodev_driver_id);
uint32_t tail;
phys_addr_t src_addr, dest_addr, ctx_paddr;
- session = (struct ccp_session *)get_session_private_data(
+ session = (struct ccp_session *)get_sym_session_private_data(
op->sym->session,
ccp_cryptodev_driver_id);
phys_addr_t src_addr, dest_addr, key_addr;
int length, non_align_len;
- session = (struct ccp_session *)get_session_private_data(
+ session = (struct ccp_session *)get_sym_session_private_data(
op->sym->session,
ccp_cryptodev_driver_id);
key_addr = rte_mem_virt2phy(session->auth.key_ccp);
phys_addr_t src_addr, dest_addr, key_addr;
uint8_t *iv;
- session = (struct ccp_session *)get_session_private_data(
+ session = (struct ccp_session *)get_sym_session_private_data(
op->sym->session,
ccp_cryptodev_driver_id);
function.raw = 0;
uint8_t *iv;
phys_addr_t src_addr, dest_addr, key_addr;
- session = (struct ccp_session *)get_session_private_data(
+ session = (struct ccp_session *)get_sym_session_private_data(
op->sym->session,
ccp_cryptodev_driver_id);
phys_addr_t digest_dest_addr;
int length, non_align_len;
- session = (struct ccp_session *)get_session_private_data(
+ session = (struct ccp_session *)get_sym_session_private_data(
op->sym->session,
ccp_cryptodev_driver_id);
iv = rte_crypto_op_ctod_offset(op, uint8_t *, session->iv.offset);
int result = 0;
struct ccp_session *session;
- session = (struct ccp_session *)get_session_private_data(
+ session = (struct ccp_session *)get_sym_session_private_data(
op->sym->session,
ccp_cryptodev_driver_id);
int result = 0;
struct ccp_session *session;
- session = (struct ccp_session *)get_session_private_data(
+ session = (struct ccp_session *)get_sym_session_private_data(
op->sym->session,
ccp_cryptodev_driver_id);
result = ccp_perform_sha(op, cmd_q);
b_info->desccnt += 3;
break;
+ case CCP_AUTH_ALGO_MD5_HMAC:
+ if (session->auth_opt == 0)
+ result = -1;
+ break;
case CCP_AUTH_ALGO_SHA1_HMAC:
case CCP_AUTH_ALGO_SHA224_HMAC:
case CCP_AUTH_ALGO_SHA256_HMAC:
- result = ccp_perform_hmac(op, cmd_q);
- b_info->desccnt += 6;
+ if (session->auth_opt == 0) {
+ result = ccp_perform_hmac(op, cmd_q);
+ b_info->desccnt += 6;
+ }
break;
case CCP_AUTH_ALGO_SHA384_HMAC:
case CCP_AUTH_ALGO_SHA512_HMAC:
- result = ccp_perform_hmac(op, cmd_q);
- b_info->desccnt += 7;
+ if (session->auth_opt == 0) {
+ result = ccp_perform_hmac(op, cmd_q);
+ b_info->desccnt += 7;
+ }
break;
case CCP_AUTH_ALGO_SHA3_224:
case CCP_AUTH_ALGO_SHA3_256:
int result = 0;
struct ccp_session *session;
- session = (struct ccp_session *)get_session_private_data(
+ session = (struct ccp_session *)get_sym_session_private_data(
op->sym->session,
ccp_cryptodev_driver_id);
}
int
-process_ops_to_enqueue(const struct ccp_qp *qp,
+process_ops_to_enqueue(struct ccp_qp *qp,
struct rte_crypto_op **op,
struct ccp_queue *cmd_q,
uint16_t nb_ops,
int i, result = 0;
struct ccp_batch_info *b_info;
struct ccp_session *session;
+ EVP_MD_CTX *auth_ctx = NULL;
if (rte_mempool_get(qp->batch_mp, (void **)&b_info)) {
CCP_LOG_ERR("batch info allocation failed");
return 0;
}
+
+ auth_ctx = EVP_MD_CTX_create();
+ if (unlikely(!auth_ctx)) {
+ CCP_LOG_ERR("Unable to create auth ctx");
+ return 0;
+ }
+ b_info->auth_ctr = 0;
+
/* populate batch info necessary for dequeue */
b_info->op_idx = 0;
b_info->lsb_buf_idx = 0;
b_info->head_offset = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx *
Q_DESC_SIZE);
for (i = 0; i < nb_ops; i++) {
- session = (struct ccp_session *)get_session_private_data(
+ session = (struct ccp_session *)get_sym_session_private_data(
op[i]->sym->session,
ccp_cryptodev_driver_id);
switch (session->cmd_id) {
result = ccp_crypto_cipher(op[i], cmd_q, b_info);
break;
case CCP_CMD_AUTH:
- result = ccp_crypto_auth(op[i], cmd_q, b_info);
+ if (session->auth_opt) {
+ b_info->auth_ctr++;
+ result = cpu_crypto_auth(qp, op[i],
+ session, auth_ctx);
+ } else
+ result = ccp_crypto_auth(op[i], cmd_q, b_info);
break;
case CCP_CMD_CIPHER_HASH:
result = ccp_crypto_cipher(op[i], cmd_q, b_info);
result = ccp_crypto_auth(op[i], cmd_q, b_info);
break;
case CCP_CMD_HASH_CIPHER:
- result = ccp_crypto_auth(op[i], cmd_q, b_info);
+ if (session->auth_opt) {
+ result = cpu_crypto_auth(qp, op[i],
+ session, auth_ctx);
+ if (op[i]->status !=
+ RTE_CRYPTO_OP_STATUS_SUCCESS)
+ continue;
+ } else
+ result = ccp_crypto_auth(op[i], cmd_q, b_info);
+
if (result)
break;
result = ccp_crypto_cipher(op[i], cmd_q, b_info);
rte_ring_enqueue(qp->processed_pkts, (void *)b_info);
+ EVP_MD_CTX_destroy(auth_ctx);
return i;
}
int offset, digest_offset;
uint8_t digest_le[64];
- session = (struct ccp_session *)get_session_private_data(
+ session = (struct ccp_session *)get_sym_session_private_data(
op->sym->session,
ccp_cryptodev_driver_id);
}
static int
-ccp_prepare_ops(struct rte_crypto_op **op_d,
+ccp_prepare_ops(struct ccp_qp *qp,
+ struct rte_crypto_op **op_d,
struct ccp_batch_info *b_info,
uint16_t nb_ops)
{
int i, min_ops;
struct ccp_session *session;
+ EVP_MD_CTX *auth_ctx = NULL;
+
+ auth_ctx = EVP_MD_CTX_create();
+ if (unlikely(!auth_ctx)) {
+ CCP_LOG_ERR("Unable to create auth ctx");
+ return 0;
+ }
min_ops = RTE_MIN(nb_ops, b_info->opcnt);
for (i = 0; i < min_ops; i++) {
op_d[i] = b_info->op[b_info->op_idx++];
- session = (struct ccp_session *)get_session_private_data(
+ session = (struct ccp_session *)get_sym_session_private_data(
op_d[i]->sym->session,
ccp_cryptodev_driver_id);
switch (session->cmd_id) {
op_d[i]->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
break;
case CCP_CMD_AUTH:
+ if (session->auth_opt == 0)
+ ccp_auth_dq_prepare(op_d[i]);
+ break;
case CCP_CMD_CIPHER_HASH:
+ if (session->auth_opt)
+ cpu_crypto_auth(qp, op_d[i],
+ session, auth_ctx);
+ else
+ ccp_auth_dq_prepare(op_d[i]);
+ break;
case CCP_CMD_HASH_CIPHER:
+ if (session->auth_opt)
+ op_d[i]->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
+ else
+ ccp_auth_dq_prepare(op_d[i]);
+ break;
case CCP_CMD_COMBINED:
ccp_auth_dq_prepare(op_d[i]);
break;
}
}
+ EVP_MD_CTX_destroy(auth_ctx);
b_info->opcnt -= min_ops;
return min_ops;
}
} else if (rte_ring_dequeue(qp->processed_pkts,
(void **)&b_info))
return 0;
+
+ if (b_info->auth_ctr == b_info->opcnt)
+ goto success;
cur_head_offset = CCP_READ_REG(b_info->cmd_q->reg_base,
CMD_Q_HEAD_LO_BASE);
success:
- nb_ops = ccp_prepare_ops(op, b_info, nb_ops);
+ nb_ops = ccp_prepare_ops(qp, op, b_info, nb_ops);
rte_atomic64_add(&b_info->cmd_q->free_slots, b_info->desccnt);
b_info->desccnt = 0;
if (b_info->opcnt > 0) {