1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2018 Advanced Micro Devices, Inc. All rights reserved.
5 #define OPENSSL_API_COMPAT 0x10100000L
12 #include <sys/queue.h>
13 #include <sys/types.h>
15 #include <openssl/sha.h>
16 #include <openssl/cmac.h> /*sub key apis*/
17 #include <openssl/evp.h> /*sub key apis*/
19 #include <rte_hexdump.h>
20 #include <rte_memzone.h>
21 #include <rte_malloc.h>
22 #include <rte_memory.h>
23 #include <rte_spinlock.h>
24 #include <rte_string_fns.h>
25 #include <cryptodev_pmd.h>
28 #include "ccp_crypto.h"
30 #include "ccp_pmd_private.h"
32 #include <openssl/conf.h>
33 #include <openssl/err.h>
34 #include <openssl/hmac.h>
36 extern int iommu_mode;
38 /* SHA initial context values */
39 uint32_t ccp_sha1_init[SHA_COMMON_DIGEST_SIZE / sizeof(uint32_t)] = {
46 uint32_t ccp_sha224_init[SHA256_DIGEST_SIZE / sizeof(uint32_t)] = {
53 uint32_t ccp_sha256_init[SHA256_DIGEST_SIZE / sizeof(uint32_t)] = {
60 uint64_t ccp_sha384_init[SHA512_DIGEST_SIZE / sizeof(uint64_t)] = {
67 uint64_t ccp_sha512_init[SHA512_DIGEST_SIZE / sizeof(uint64_t)] = {
75 #define SHA3_CONST(x) x
77 #define SHA3_CONST(x) x##L
80 /** 'Words' here refers to uint64_t */
81 #define SHA3_KECCAK_SPONGE_WORDS \
82 (((1600) / 8) / sizeof(uint64_t))
83 typedef struct sha3_context_ {
86 * The portion of the input message that we
90 uint64_t s[SHA3_KECCAK_SPONGE_WORDS];
92 uint8_t sb[SHA3_KECCAK_SPONGE_WORDS * 8];
93 /**total 200 ctx size**/
95 unsigned int byteIndex;
97 * 0..7--the next byte after the set one
98 * (starts from 0; 0--none are buffered)
100 unsigned int wordIndex;
102 * 0..24--the next word to integrate input
105 unsigned int capacityWords;
107 * the double size of the hash output in
108 * words (e.g. 16 for Keccak 512)
113 #define SHA3_ROTL64(x, y) \
114 (((x) << (y)) | ((x) >> ((sizeof(uint64_t)*8) - (y))))
117 static const uint64_t keccakf_rndc[24] = {
118 SHA3_CONST(0x0000000000000001UL), SHA3_CONST(0x0000000000008082UL),
119 SHA3_CONST(0x800000000000808aUL), SHA3_CONST(0x8000000080008000UL),
120 SHA3_CONST(0x000000000000808bUL), SHA3_CONST(0x0000000080000001UL),
121 SHA3_CONST(0x8000000080008081UL), SHA3_CONST(0x8000000000008009UL),
122 SHA3_CONST(0x000000000000008aUL), SHA3_CONST(0x0000000000000088UL),
123 SHA3_CONST(0x0000000080008009UL), SHA3_CONST(0x000000008000000aUL),
124 SHA3_CONST(0x000000008000808bUL), SHA3_CONST(0x800000000000008bUL),
125 SHA3_CONST(0x8000000000008089UL), SHA3_CONST(0x8000000000008003UL),
126 SHA3_CONST(0x8000000000008002UL), SHA3_CONST(0x8000000000000080UL),
127 SHA3_CONST(0x000000000000800aUL), SHA3_CONST(0x800000008000000aUL),
128 SHA3_CONST(0x8000000080008081UL), SHA3_CONST(0x8000000000008080UL),
129 SHA3_CONST(0x0000000080000001UL), SHA3_CONST(0x8000000080008008UL)
132 static const unsigned int keccakf_rotc[24] = {
133 1, 3, 6, 10, 15, 21, 28, 36, 45, 55, 2, 14, 27, 41, 56, 8, 25, 43, 62,
137 static const unsigned int keccakf_piln[24] = {
138 10, 7, 11, 17, 18, 3, 5, 16, 8, 21, 24, 4, 15, 23, 19, 13, 12, 2, 20,
142 static enum ccp_cmd_order
143 ccp_get_cmd_id(const struct rte_crypto_sym_xform *xform)
145 enum ccp_cmd_order res = CCP_CMD_NOT_SUPPORTED;
149 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH) {
150 if (xform->next == NULL)
152 else if (xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
153 return CCP_CMD_HASH_CIPHER;
155 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER) {
156 if (xform->next == NULL)
157 return CCP_CMD_CIPHER;
158 else if (xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
159 return CCP_CMD_CIPHER_HASH;
161 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD)
162 return CCP_CMD_COMBINED;
166 /* partial hash using openssl */
167 static int partial_hash_sha1(uint8_t *data_in, uint8_t *data_out)
171 if (!SHA1_Init(&ctx))
173 SHA1_Transform(&ctx, data_in);
174 rte_memcpy(data_out, &ctx, SHA_DIGEST_LENGTH);
178 static int partial_hash_sha224(uint8_t *data_in, uint8_t *data_out)
182 if (!SHA224_Init(&ctx))
184 SHA256_Transform(&ctx, data_in);
185 rte_memcpy(data_out, &ctx,
186 SHA256_DIGEST_LENGTH);
190 static int partial_hash_sha256(uint8_t *data_in, uint8_t *data_out)
194 if (!SHA256_Init(&ctx))
196 SHA256_Transform(&ctx, data_in);
197 rte_memcpy(data_out, &ctx,
198 SHA256_DIGEST_LENGTH);
202 static int partial_hash_sha384(uint8_t *data_in, uint8_t *data_out)
206 if (!SHA384_Init(&ctx))
208 SHA512_Transform(&ctx, data_in);
209 rte_memcpy(data_out, &ctx,
210 SHA512_DIGEST_LENGTH);
214 static int partial_hash_sha512(uint8_t *data_in, uint8_t *data_out)
218 if (!SHA512_Init(&ctx))
220 SHA512_Transform(&ctx, data_in);
221 rte_memcpy(data_out, &ctx,
222 SHA512_DIGEST_LENGTH);
227 keccakf(uint64_t s[25])
231 #define KECCAK_ROUNDS 24
233 for (round = 0; round < KECCAK_ROUNDS; round++) {
236 for (i = 0; i < 5; i++)
237 bc[i] = s[i] ^ s[i + 5] ^ s[i + 10] ^ s[i + 15] ^
240 for (i = 0; i < 5; i++) {
241 t = bc[(i + 4) % 5] ^ SHA3_ROTL64(bc[(i + 1) % 5], 1);
242 for (j = 0; j < 25; j += 5)
248 for (i = 0; i < 24; i++) {
251 s[j] = SHA3_ROTL64(t, keccakf_rotc[i]);
256 for (j = 0; j < 25; j += 5) {
257 for (i = 0; i < 5; i++)
259 for (i = 0; i < 5; i++)
260 s[j + i] ^= (~bc[(i + 1) % 5]) &
265 s[0] ^= keccakf_rndc[round];
270 sha3_Init224(void *priv)
272 sha3_context *ctx = (sha3_context *) priv;
274 memset(ctx, 0, sizeof(*ctx));
275 ctx->capacityWords = 2 * 224 / (8 * sizeof(uint64_t));
279 sha3_Init256(void *priv)
281 sha3_context *ctx = (sha3_context *) priv;
283 memset(ctx, 0, sizeof(*ctx));
284 ctx->capacityWords = 2 * 256 / (8 * sizeof(uint64_t));
288 sha3_Init384(void *priv)
290 sha3_context *ctx = (sha3_context *) priv;
292 memset(ctx, 0, sizeof(*ctx));
293 ctx->capacityWords = 2 * 384 / (8 * sizeof(uint64_t));
297 sha3_Init512(void *priv)
299 sha3_context *ctx = (sha3_context *) priv;
301 memset(ctx, 0, sizeof(*ctx));
302 ctx->capacityWords = 2 * 512 / (8 * sizeof(uint64_t));
306 /* This is simply the 'update' with the padding block.
307 * The padding block is 0x01 || 0x00* || 0x80. First 0x01 and last 0x80
308 * bytes are always present, but they can be the same byte.
311 sha3_Update(void *priv, void const *bufIn, size_t len)
313 sha3_context *ctx = (sha3_context *) priv;
314 unsigned int old_tail = (8 - ctx->byteIndex) & 7;
318 const uint8_t *buf = bufIn;
320 if (len < old_tail) {
322 ctx->saved |= (uint64_t) (*(buf++)) <<
323 ((ctx->byteIndex++) * 8);
330 ctx->saved |= (uint64_t) (*(buf++)) <<
331 ((ctx->byteIndex++) * 8);
333 ctx->s[ctx->wordIndex] ^= ctx->saved;
336 if (++ctx->wordIndex ==
337 (SHA3_KECCAK_SPONGE_WORDS - ctx->capacityWords)) {
343 words = len / sizeof(uint64_t);
344 tail = len - words * sizeof(uint64_t);
346 for (i = 0; i < words; i++, buf += sizeof(uint64_t)) {
347 const uint64_t t = (uint64_t) (buf[0]) |
348 ((uint64_t) (buf[1]) << 8 * 1) |
349 ((uint64_t) (buf[2]) << 8 * 2) |
350 ((uint64_t) (buf[3]) << 8 * 3) |
351 ((uint64_t) (buf[4]) << 8 * 4) |
352 ((uint64_t) (buf[5]) << 8 * 5) |
353 ((uint64_t) (buf[6]) << 8 * 6) |
354 ((uint64_t) (buf[7]) << 8 * 7);
355 ctx->s[ctx->wordIndex] ^= t;
356 if (++ctx->wordIndex ==
357 (SHA3_KECCAK_SPONGE_WORDS - ctx->capacityWords)) {
364 ctx->saved |= (uint64_t) (*(buf++)) << ((ctx->byteIndex++) * 8);
367 int partial_hash_sha3_224(uint8_t *data_in, uint8_t *data_out)
372 ctx = rte_zmalloc("sha3-ctx", sizeof(sha3_context), 0);
374 CCP_LOG_ERR("sha3-ctx creation failed");
378 sha3_Update(ctx, data_in, SHA3_224_BLOCK_SIZE);
379 for (i = 0; i < CCP_SHA3_CTX_SIZE; i++, data_out++)
380 *data_out = ctx->sb[CCP_SHA3_CTX_SIZE - i - 1];
386 int partial_hash_sha3_256(uint8_t *data_in, uint8_t *data_out)
391 ctx = rte_zmalloc("sha3-ctx", sizeof(sha3_context), 0);
393 CCP_LOG_ERR("sha3-ctx creation failed");
397 sha3_Update(ctx, data_in, SHA3_256_BLOCK_SIZE);
398 for (i = 0; i < CCP_SHA3_CTX_SIZE; i++, data_out++)
399 *data_out = ctx->sb[CCP_SHA3_CTX_SIZE - i - 1];
405 int partial_hash_sha3_384(uint8_t *data_in, uint8_t *data_out)
410 ctx = rte_zmalloc("sha3-ctx", sizeof(sha3_context), 0);
412 CCP_LOG_ERR("sha3-ctx creation failed");
416 sha3_Update(ctx, data_in, SHA3_384_BLOCK_SIZE);
417 for (i = 0; i < CCP_SHA3_CTX_SIZE; i++, data_out++)
418 *data_out = ctx->sb[CCP_SHA3_CTX_SIZE - i - 1];
424 int partial_hash_sha3_512(uint8_t *data_in, uint8_t *data_out)
429 ctx = rte_zmalloc("sha3-ctx", sizeof(sha3_context), 0);
431 CCP_LOG_ERR("sha3-ctx creation failed");
435 sha3_Update(ctx, data_in, SHA3_512_BLOCK_SIZE);
436 for (i = 0; i < CCP_SHA3_CTX_SIZE; i++, data_out++)
437 *data_out = ctx->sb[CCP_SHA3_CTX_SIZE - i - 1];
443 static int generate_partial_hash(struct ccp_session *sess)
446 uint8_t ipad[sess->auth.block_size];
447 uint8_t opad[sess->auth.block_size];
448 uint8_t *ipad_t, *opad_t;
449 uint32_t *hash_value_be32, hash_temp32[8];
450 uint64_t *hash_value_be64, hash_temp64[8];
452 uint8_t *hash_value_sha3;
454 opad_t = ipad_t = (uint8_t *)sess->auth.key;
456 hash_value_be32 = (uint32_t *)((uint8_t *)sess->auth.pre_compute);
457 hash_value_be64 = (uint64_t *)((uint8_t *)sess->auth.pre_compute);
459 /* considering key size is always equal to block size of algorithm */
460 for (i = 0; i < sess->auth.block_size; i++) {
461 ipad[i] = (ipad_t[i] ^ HMAC_IPAD_VALUE);
462 opad[i] = (opad_t[i] ^ HMAC_OPAD_VALUE);
465 switch (sess->auth.algo) {
466 case CCP_AUTH_ALGO_SHA1_HMAC:
467 count = SHA1_DIGEST_SIZE >> 2;
469 if (partial_hash_sha1(ipad, (uint8_t *)hash_temp32))
471 for (i = 0; i < count; i++, hash_value_be32++)
472 *hash_value_be32 = hash_temp32[count - 1 - i];
474 hash_value_be32 = (uint32_t *)((uint8_t *)sess->auth.pre_compute
475 + sess->auth.ctx_len);
476 if (partial_hash_sha1(opad, (uint8_t *)hash_temp32))
478 for (i = 0; i < count; i++, hash_value_be32++)
479 *hash_value_be32 = hash_temp32[count - 1 - i];
481 case CCP_AUTH_ALGO_SHA224_HMAC:
482 count = SHA256_DIGEST_SIZE >> 2;
484 if (partial_hash_sha224(ipad, (uint8_t *)hash_temp32))
486 for (i = 0; i < count; i++, hash_value_be32++)
487 *hash_value_be32 = hash_temp32[count - 1 - i];
489 hash_value_be32 = (uint32_t *)((uint8_t *)sess->auth.pre_compute
490 + sess->auth.ctx_len);
491 if (partial_hash_sha224(opad, (uint8_t *)hash_temp32))
493 for (i = 0; i < count; i++, hash_value_be32++)
494 *hash_value_be32 = hash_temp32[count - 1 - i];
496 case CCP_AUTH_ALGO_SHA3_224_HMAC:
497 hash_value_sha3 = sess->auth.pre_compute;
498 if (partial_hash_sha3_224(ipad, hash_value_sha3))
501 hash_value_sha3 = (uint8_t *)(sess->auth.pre_compute
502 + sess->auth.ctx_len);
503 if (partial_hash_sha3_224(opad, hash_value_sha3))
506 case CCP_AUTH_ALGO_SHA256_HMAC:
507 count = SHA256_DIGEST_SIZE >> 2;
509 if (partial_hash_sha256(ipad, (uint8_t *)hash_temp32))
511 for (i = 0; i < count; i++, hash_value_be32++)
512 *hash_value_be32 = hash_temp32[count - 1 - i];
514 hash_value_be32 = (uint32_t *)((uint8_t *)sess->auth.pre_compute
515 + sess->auth.ctx_len);
516 if (partial_hash_sha256(opad, (uint8_t *)hash_temp32))
518 for (i = 0; i < count; i++, hash_value_be32++)
519 *hash_value_be32 = hash_temp32[count - 1 - i];
521 case CCP_AUTH_ALGO_SHA3_256_HMAC:
522 hash_value_sha3 = sess->auth.pre_compute;
523 if (partial_hash_sha3_256(ipad, hash_value_sha3))
526 hash_value_sha3 = (uint8_t *)(sess->auth.pre_compute
527 + sess->auth.ctx_len);
528 if (partial_hash_sha3_256(opad, hash_value_sha3))
531 case CCP_AUTH_ALGO_SHA384_HMAC:
532 count = SHA512_DIGEST_SIZE >> 3;
534 if (partial_hash_sha384(ipad, (uint8_t *)hash_temp64))
536 for (i = 0; i < count; i++, hash_value_be64++)
537 *hash_value_be64 = hash_temp64[count - 1 - i];
539 hash_value_be64 = (uint64_t *)((uint8_t *)sess->auth.pre_compute
540 + sess->auth.ctx_len);
541 if (partial_hash_sha384(opad, (uint8_t *)hash_temp64))
543 for (i = 0; i < count; i++, hash_value_be64++)
544 *hash_value_be64 = hash_temp64[count - 1 - i];
546 case CCP_AUTH_ALGO_SHA3_384_HMAC:
547 hash_value_sha3 = sess->auth.pre_compute;
548 if (partial_hash_sha3_384(ipad, hash_value_sha3))
551 hash_value_sha3 = (uint8_t *)(sess->auth.pre_compute
552 + sess->auth.ctx_len);
553 if (partial_hash_sha3_384(opad, hash_value_sha3))
556 case CCP_AUTH_ALGO_SHA512_HMAC:
557 count = SHA512_DIGEST_SIZE >> 3;
559 if (partial_hash_sha512(ipad, (uint8_t *)hash_temp64))
561 for (i = 0; i < count; i++, hash_value_be64++)
562 *hash_value_be64 = hash_temp64[count - 1 - i];
564 hash_value_be64 = (uint64_t *)((uint8_t *)sess->auth.pre_compute
565 + sess->auth.ctx_len);
566 if (partial_hash_sha512(opad, (uint8_t *)hash_temp64))
568 for (i = 0; i < count; i++, hash_value_be64++)
569 *hash_value_be64 = hash_temp64[count - 1 - i];
571 case CCP_AUTH_ALGO_SHA3_512_HMAC:
572 hash_value_sha3 = sess->auth.pre_compute;
573 if (partial_hash_sha3_512(ipad, hash_value_sha3))
576 hash_value_sha3 = (uint8_t *)(sess->auth.pre_compute
577 + sess->auth.ctx_len);
578 if (partial_hash_sha3_512(opad, hash_value_sha3))
582 CCP_LOG_ERR("Invalid auth algo");
587 /* prepare temporary keys K1 and K2 */
588 static void prepare_key(unsigned char *k, unsigned char *l, int bl)
591 /* Shift block to left, including carry */
592 for (i = 0; i < bl; i++) {
594 if (i < bl - 1 && l[i + 1] & 0x80)
597 /* If MSB set fixup with R */
599 k[bl - 1] ^= bl == 16 ? 0x87 : 0x1b;
602 /* subkeys K1 and K2 generation for CMAC */
604 generate_cmac_subkeys(struct ccp_session *sess)
606 const EVP_CIPHER *algo;
608 unsigned char *ccp_ctx;
611 unsigned char zero_iv[AES_BLOCK_SIZE] = {0};
612 unsigned char dst[2 * AES_BLOCK_SIZE] = {0};
613 unsigned char k1[AES_BLOCK_SIZE] = {0};
614 unsigned char k2[AES_BLOCK_SIZE] = {0};
616 if (sess->auth.ut.aes_type == CCP_AES_TYPE_128)
617 algo = EVP_aes_128_cbc();
618 else if (sess->auth.ut.aes_type == CCP_AES_TYPE_192)
619 algo = EVP_aes_192_cbc();
620 else if (sess->auth.ut.aes_type == CCP_AES_TYPE_256)
621 algo = EVP_aes_256_cbc();
623 CCP_LOG_ERR("Invalid CMAC type length");
627 ctx = EVP_CIPHER_CTX_new();
629 CCP_LOG_ERR("ctx creation failed");
632 if (EVP_EncryptInit(ctx, algo, (unsigned char *)sess->auth.key,
633 (unsigned char *)zero_iv) <= 0)
634 goto key_generate_err;
635 if (EVP_CIPHER_CTX_set_padding(ctx, 0) <= 0)
636 goto key_generate_err;
637 if (EVP_EncryptUpdate(ctx, dst, &dstlen, zero_iv,
638 AES_BLOCK_SIZE) <= 0)
639 goto key_generate_err;
640 if (EVP_EncryptFinal_ex(ctx, dst + dstlen, &totlen) <= 0)
641 goto key_generate_err;
643 memset(sess->auth.pre_compute, 0, CCP_SB_BYTES * 2);
645 ccp_ctx = (unsigned char *)(sess->auth.pre_compute + CCP_SB_BYTES - 1);
646 prepare_key(k1, dst, AES_BLOCK_SIZE);
647 for (i = 0; i < AES_BLOCK_SIZE; i++, ccp_ctx--)
650 ccp_ctx = (unsigned char *)(sess->auth.pre_compute +
651 (2 * CCP_SB_BYTES) - 1);
652 prepare_key(k2, k1, AES_BLOCK_SIZE);
653 for (i = 0; i < AES_BLOCK_SIZE; i++, ccp_ctx--)
656 EVP_CIPHER_CTX_free(ctx);
661 CCP_LOG_ERR("CMAC Init failed");
665 /* configure session */
667 ccp_configure_session_cipher(struct ccp_session *sess,
668 const struct rte_crypto_sym_xform *xform)
670 const struct rte_crypto_cipher_xform *cipher_xform = NULL;
673 cipher_xform = &xform->cipher;
675 /* set cipher direction */
676 if (cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
677 sess->cipher.dir = CCP_CIPHER_DIR_ENCRYPT;
679 sess->cipher.dir = CCP_CIPHER_DIR_DECRYPT;
682 sess->cipher.key_length = cipher_xform->key.length;
683 rte_memcpy(sess->cipher.key, cipher_xform->key.data,
684 cipher_xform->key.length);
686 /* set iv parameters */
687 sess->iv.offset = cipher_xform->iv.offset;
688 sess->iv.length = cipher_xform->iv.length;
690 switch (cipher_xform->algo) {
691 case RTE_CRYPTO_CIPHER_AES_CTR:
692 sess->cipher.algo = CCP_CIPHER_ALGO_AES_CTR;
693 sess->cipher.um.aes_mode = CCP_AES_MODE_CTR;
694 sess->cipher.engine = CCP_ENGINE_AES;
696 case RTE_CRYPTO_CIPHER_AES_ECB:
697 sess->cipher.algo = CCP_CIPHER_ALGO_AES_CBC;
698 sess->cipher.um.aes_mode = CCP_AES_MODE_ECB;
699 sess->cipher.engine = CCP_ENGINE_AES;
701 case RTE_CRYPTO_CIPHER_AES_CBC:
702 sess->cipher.algo = CCP_CIPHER_ALGO_AES_CBC;
703 sess->cipher.um.aes_mode = CCP_AES_MODE_CBC;
704 sess->cipher.engine = CCP_ENGINE_AES;
706 case RTE_CRYPTO_CIPHER_3DES_CBC:
707 sess->cipher.algo = CCP_CIPHER_ALGO_3DES_CBC;
708 sess->cipher.um.des_mode = CCP_DES_MODE_CBC;
709 sess->cipher.engine = CCP_ENGINE_3DES;
712 CCP_LOG_ERR("Unsupported cipher algo");
717 switch (sess->cipher.engine) {
719 if (sess->cipher.key_length == 16)
720 sess->cipher.ut.aes_type = CCP_AES_TYPE_128;
721 else if (sess->cipher.key_length == 24)
722 sess->cipher.ut.aes_type = CCP_AES_TYPE_192;
723 else if (sess->cipher.key_length == 32)
724 sess->cipher.ut.aes_type = CCP_AES_TYPE_256;
726 CCP_LOG_ERR("Invalid cipher key length");
729 for (i = 0; i < sess->cipher.key_length ; i++)
730 sess->cipher.key_ccp[sess->cipher.key_length - i - 1] =
733 case CCP_ENGINE_3DES:
734 if (sess->cipher.key_length == 16)
735 sess->cipher.ut.des_type = CCP_DES_TYPE_128;
736 else if (sess->cipher.key_length == 24)
737 sess->cipher.ut.des_type = CCP_DES_TYPE_192;
739 CCP_LOG_ERR("Invalid cipher key length");
742 for (j = 0, x = 0; j < sess->cipher.key_length/8; j++, x += 8)
743 for (i = 0; i < 8; i++)
744 sess->cipher.key_ccp[(8 + x) - i - 1] =
745 sess->cipher.key[i + x];
748 CCP_LOG_ERR("Invalid CCP Engine");
751 if (iommu_mode == 2) {
752 sess->cipher.nonce_phys = rte_mem_virt2iova(sess->cipher.nonce);
753 sess->cipher.key_phys = rte_mem_virt2iova(sess->cipher.key_ccp);
755 sess->cipher.nonce_phys = rte_mem_virt2phy(sess->cipher.nonce);
756 sess->cipher.key_phys = rte_mem_virt2phy(sess->cipher.key_ccp);
762 ccp_configure_session_auth(struct ccp_session *sess,
763 const struct rte_crypto_sym_xform *xform)
765 const struct rte_crypto_auth_xform *auth_xform = NULL;
768 auth_xform = &xform->auth;
770 sess->auth.digest_length = auth_xform->digest_length;
771 if (auth_xform->op == RTE_CRYPTO_AUTH_OP_GENERATE)
772 sess->auth.op = CCP_AUTH_OP_GENERATE;
774 sess->auth.op = CCP_AUTH_OP_VERIFY;
775 switch (auth_xform->algo) {
776 case RTE_CRYPTO_AUTH_MD5_HMAC:
777 if (sess->auth_opt) {
778 sess->auth.algo = CCP_AUTH_ALGO_MD5_HMAC;
779 sess->auth.offset = ((CCP_SB_BYTES << 1) -
781 sess->auth.key_length = auth_xform->key.length;
782 sess->auth.block_size = MD5_BLOCK_SIZE;
783 memset(sess->auth.key, 0, sess->auth.block_size);
784 rte_memcpy(sess->auth.key, auth_xform->key.data,
785 auth_xform->key.length);
787 return -1; /* HMAC MD5 not supported on CCP */
789 case RTE_CRYPTO_AUTH_SHA1:
790 sess->auth.engine = CCP_ENGINE_SHA;
791 sess->auth.algo = CCP_AUTH_ALGO_SHA1;
792 sess->auth.ut.sha_type = CCP_SHA_TYPE_1;
793 sess->auth.ctx = (void *)ccp_sha1_init;
794 sess->auth.ctx_len = CCP_SB_BYTES;
795 sess->auth.offset = CCP_SB_BYTES - SHA1_DIGEST_SIZE;
796 rte_memcpy(sha_ctx, sess->auth.ctx, SHA_COMMON_DIGEST_SIZE);
798 case RTE_CRYPTO_AUTH_SHA1_HMAC:
799 if (sess->auth_opt) {
800 if (auth_xform->key.length > SHA1_BLOCK_SIZE)
802 sess->auth.algo = CCP_AUTH_ALGO_SHA1_HMAC;
803 sess->auth.offset = CCP_SB_BYTES - SHA1_DIGEST_SIZE;
804 sess->auth.block_size = SHA1_BLOCK_SIZE;
805 sess->auth.key_length = auth_xform->key.length;
806 memset(sess->auth.key, 0, sess->auth.block_size);
807 rte_memcpy(sess->auth.key, auth_xform->key.data,
808 auth_xform->key.length);
810 if (auth_xform->key.length > SHA1_BLOCK_SIZE)
812 sess->auth.engine = CCP_ENGINE_SHA;
813 sess->auth.algo = CCP_AUTH_ALGO_SHA1_HMAC;
814 sess->auth.ut.sha_type = CCP_SHA_TYPE_1;
815 sess->auth.ctx_len = CCP_SB_BYTES;
816 sess->auth.offset = CCP_SB_BYTES - SHA1_DIGEST_SIZE;
817 sess->auth.block_size = SHA1_BLOCK_SIZE;
818 sess->auth.key_length = auth_xform->key.length;
819 memset(sess->auth.key, 0, sess->auth.block_size);
820 memset(sess->auth.pre_compute, 0,
821 sess->auth.ctx_len << 1);
822 rte_memcpy(sess->auth.key, auth_xform->key.data,
823 auth_xform->key.length);
824 if (generate_partial_hash(sess))
828 case RTE_CRYPTO_AUTH_SHA224:
829 sess->auth.algo = CCP_AUTH_ALGO_SHA224;
830 sess->auth.engine = CCP_ENGINE_SHA;
831 sess->auth.ut.sha_type = CCP_SHA_TYPE_224;
832 sess->auth.ctx = (void *)ccp_sha224_init;
833 sess->auth.ctx_len = CCP_SB_BYTES;
834 sess->auth.offset = CCP_SB_BYTES - SHA224_DIGEST_SIZE;
835 rte_memcpy(sha_ctx, sess->auth.ctx, SHA256_DIGEST_SIZE);
837 case RTE_CRYPTO_AUTH_SHA224_HMAC:
838 if (sess->auth_opt) {
839 if (auth_xform->key.length > SHA224_BLOCK_SIZE)
841 sess->auth.algo = CCP_AUTH_ALGO_SHA224_HMAC;
842 sess->auth.offset = CCP_SB_BYTES - SHA224_DIGEST_SIZE;
843 sess->auth.block_size = SHA224_BLOCK_SIZE;
844 sess->auth.key_length = auth_xform->key.length;
845 memset(sess->auth.key, 0, sess->auth.block_size);
846 rte_memcpy(sess->auth.key, auth_xform->key.data,
847 auth_xform->key.length);
849 if (auth_xform->key.length > SHA224_BLOCK_SIZE)
851 sess->auth.algo = CCP_AUTH_ALGO_SHA224_HMAC;
852 sess->auth.engine = CCP_ENGINE_SHA;
853 sess->auth.ut.sha_type = CCP_SHA_TYPE_224;
854 sess->auth.ctx_len = CCP_SB_BYTES;
855 sess->auth.offset = CCP_SB_BYTES - SHA224_DIGEST_SIZE;
856 sess->auth.block_size = SHA224_BLOCK_SIZE;
857 sess->auth.key_length = auth_xform->key.length;
858 memset(sess->auth.key, 0, sess->auth.block_size);
859 memset(sess->auth.pre_compute, 0,
860 sess->auth.ctx_len << 1);
861 rte_memcpy(sess->auth.key, auth_xform->key.data,
862 auth_xform->key.length);
863 if (generate_partial_hash(sess))
867 case RTE_CRYPTO_AUTH_SHA3_224:
868 sess->auth.algo = CCP_AUTH_ALGO_SHA3_224;
869 sess->auth.engine = CCP_ENGINE_SHA;
870 sess->auth.ut.sha_type = CCP_SHA3_TYPE_224;
871 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
872 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA224_DIGEST_SIZE;
874 case RTE_CRYPTO_AUTH_SHA3_224_HMAC:
875 if (auth_xform->key.length > SHA3_224_BLOCK_SIZE)
877 sess->auth.algo = CCP_AUTH_ALGO_SHA3_224_HMAC;
878 sess->auth.engine = CCP_ENGINE_SHA;
879 sess->auth.ut.sha_type = CCP_SHA3_TYPE_224;
880 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
881 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA224_DIGEST_SIZE;
882 sess->auth.block_size = SHA3_224_BLOCK_SIZE;
883 sess->auth.key_length = auth_xform->key.length;
884 memset(sess->auth.key, 0, sess->auth.block_size);
885 memset(sess->auth.pre_compute, 0, 2 * sess->auth.ctx_len);
886 rte_memcpy(sess->auth.key, auth_xform->key.data,
887 auth_xform->key.length);
888 if (generate_partial_hash(sess))
891 case RTE_CRYPTO_AUTH_SHA256:
892 sess->auth.algo = CCP_AUTH_ALGO_SHA256;
893 sess->auth.engine = CCP_ENGINE_SHA;
894 sess->auth.ut.sha_type = CCP_SHA_TYPE_256;
895 sess->auth.ctx = (void *)ccp_sha256_init;
896 sess->auth.ctx_len = CCP_SB_BYTES;
897 sess->auth.offset = CCP_SB_BYTES - SHA256_DIGEST_SIZE;
898 rte_memcpy(sha_ctx, sess->auth.ctx, SHA256_DIGEST_SIZE);
900 case RTE_CRYPTO_AUTH_SHA256_HMAC:
901 if (sess->auth_opt) {
902 if (auth_xform->key.length > SHA256_BLOCK_SIZE)
904 sess->auth.algo = CCP_AUTH_ALGO_SHA256_HMAC;
905 sess->auth.offset = CCP_SB_BYTES - SHA256_DIGEST_SIZE;
906 sess->auth.block_size = SHA256_BLOCK_SIZE;
907 sess->auth.key_length = auth_xform->key.length;
908 memset(sess->auth.key, 0, sess->auth.block_size);
909 rte_memcpy(sess->auth.key, auth_xform->key.data,
910 auth_xform->key.length);
912 if (auth_xform->key.length > SHA256_BLOCK_SIZE)
914 sess->auth.algo = CCP_AUTH_ALGO_SHA256_HMAC;
915 sess->auth.engine = CCP_ENGINE_SHA;
916 sess->auth.ut.sha_type = CCP_SHA_TYPE_256;
917 sess->auth.ctx_len = CCP_SB_BYTES;
918 sess->auth.offset = CCP_SB_BYTES - SHA256_DIGEST_SIZE;
919 sess->auth.block_size = SHA256_BLOCK_SIZE;
920 sess->auth.key_length = auth_xform->key.length;
921 memset(sess->auth.key, 0, sess->auth.block_size);
922 memset(sess->auth.pre_compute, 0,
923 sess->auth.ctx_len << 1);
924 rte_memcpy(sess->auth.key, auth_xform->key.data,
925 auth_xform->key.length);
926 if (generate_partial_hash(sess))
930 case RTE_CRYPTO_AUTH_SHA3_256:
931 sess->auth.algo = CCP_AUTH_ALGO_SHA3_256;
932 sess->auth.engine = CCP_ENGINE_SHA;
933 sess->auth.ut.sha_type = CCP_SHA3_TYPE_256;
934 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
935 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA256_DIGEST_SIZE;
937 case RTE_CRYPTO_AUTH_SHA3_256_HMAC:
938 if (auth_xform->key.length > SHA3_256_BLOCK_SIZE)
940 sess->auth.algo = CCP_AUTH_ALGO_SHA3_256_HMAC;
941 sess->auth.engine = CCP_ENGINE_SHA;
942 sess->auth.ut.sha_type = CCP_SHA3_TYPE_256;
943 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
944 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA256_DIGEST_SIZE;
945 sess->auth.block_size = SHA3_256_BLOCK_SIZE;
946 sess->auth.key_length = auth_xform->key.length;
947 memset(sess->auth.key, 0, sess->auth.block_size);
948 memset(sess->auth.pre_compute, 0, 2 * sess->auth.ctx_len);
949 rte_memcpy(sess->auth.key, auth_xform->key.data,
950 auth_xform->key.length);
951 if (generate_partial_hash(sess))
954 case RTE_CRYPTO_AUTH_SHA384:
955 sess->auth.algo = CCP_AUTH_ALGO_SHA384;
956 sess->auth.engine = CCP_ENGINE_SHA;
957 sess->auth.ut.sha_type = CCP_SHA_TYPE_384;
958 sess->auth.ctx = (void *)ccp_sha384_init;
959 sess->auth.ctx_len = CCP_SB_BYTES << 1;
960 sess->auth.offset = (CCP_SB_BYTES << 1) - SHA384_DIGEST_SIZE;
961 rte_memcpy(sha_ctx, sess->auth.ctx, SHA512_DIGEST_SIZE);
963 case RTE_CRYPTO_AUTH_SHA384_HMAC:
964 if (sess->auth_opt) {
965 if (auth_xform->key.length > SHA384_BLOCK_SIZE)
967 sess->auth.algo = CCP_AUTH_ALGO_SHA384_HMAC;
968 sess->auth.offset = ((CCP_SB_BYTES << 1) -
970 sess->auth.block_size = SHA384_BLOCK_SIZE;
971 sess->auth.key_length = auth_xform->key.length;
972 memset(sess->auth.key, 0, sess->auth.block_size);
973 rte_memcpy(sess->auth.key, auth_xform->key.data,
974 auth_xform->key.length);
976 if (auth_xform->key.length > SHA384_BLOCK_SIZE)
978 sess->auth.algo = CCP_AUTH_ALGO_SHA384_HMAC;
979 sess->auth.engine = CCP_ENGINE_SHA;
980 sess->auth.ut.sha_type = CCP_SHA_TYPE_384;
981 sess->auth.ctx_len = CCP_SB_BYTES << 1;
982 sess->auth.offset = ((CCP_SB_BYTES << 1) -
984 sess->auth.block_size = SHA384_BLOCK_SIZE;
985 sess->auth.key_length = auth_xform->key.length;
986 memset(sess->auth.key, 0, sess->auth.block_size);
987 memset(sess->auth.pre_compute, 0,
988 sess->auth.ctx_len << 1);
989 rte_memcpy(sess->auth.key, auth_xform->key.data,
990 auth_xform->key.length);
991 if (generate_partial_hash(sess))
995 case RTE_CRYPTO_AUTH_SHA3_384:
996 sess->auth.algo = CCP_AUTH_ALGO_SHA3_384;
997 sess->auth.engine = CCP_ENGINE_SHA;
998 sess->auth.ut.sha_type = CCP_SHA3_TYPE_384;
999 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
1000 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA384_DIGEST_SIZE;
1002 case RTE_CRYPTO_AUTH_SHA3_384_HMAC:
1003 if (auth_xform->key.length > SHA3_384_BLOCK_SIZE)
1005 sess->auth.algo = CCP_AUTH_ALGO_SHA3_384_HMAC;
1006 sess->auth.engine = CCP_ENGINE_SHA;
1007 sess->auth.ut.sha_type = CCP_SHA3_TYPE_384;
1008 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
1009 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA384_DIGEST_SIZE;
1010 sess->auth.block_size = SHA3_384_BLOCK_SIZE;
1011 sess->auth.key_length = auth_xform->key.length;
1012 memset(sess->auth.key, 0, sess->auth.block_size);
1013 memset(sess->auth.pre_compute, 0, 2 * sess->auth.ctx_len);
1014 rte_memcpy(sess->auth.key, auth_xform->key.data,
1015 auth_xform->key.length);
1016 if (generate_partial_hash(sess))
1019 case RTE_CRYPTO_AUTH_SHA512:
1020 sess->auth.algo = CCP_AUTH_ALGO_SHA512;
1021 sess->auth.engine = CCP_ENGINE_SHA;
1022 sess->auth.ut.sha_type = CCP_SHA_TYPE_512;
1023 sess->auth.ctx = (void *)ccp_sha512_init;
1024 sess->auth.ctx_len = CCP_SB_BYTES << 1;
1025 sess->auth.offset = (CCP_SB_BYTES << 1) - SHA512_DIGEST_SIZE;
1026 rte_memcpy(sha_ctx, sess->auth.ctx, SHA512_DIGEST_SIZE);
1028 case RTE_CRYPTO_AUTH_SHA512_HMAC:
1029 if (sess->auth_opt) {
1030 if (auth_xform->key.length > SHA512_BLOCK_SIZE)
1032 sess->auth.algo = CCP_AUTH_ALGO_SHA512_HMAC;
1033 sess->auth.offset = ((CCP_SB_BYTES << 1) -
1034 SHA512_DIGEST_SIZE);
1035 sess->auth.block_size = SHA512_BLOCK_SIZE;
1036 sess->auth.key_length = auth_xform->key.length;
1037 memset(sess->auth.key, 0, sess->auth.block_size);
1038 rte_memcpy(sess->auth.key, auth_xform->key.data,
1039 auth_xform->key.length);
1041 if (auth_xform->key.length > SHA512_BLOCK_SIZE)
1043 sess->auth.algo = CCP_AUTH_ALGO_SHA512_HMAC;
1044 sess->auth.engine = CCP_ENGINE_SHA;
1045 sess->auth.ut.sha_type = CCP_SHA_TYPE_512;
1046 sess->auth.ctx_len = CCP_SB_BYTES << 1;
1047 sess->auth.offset = ((CCP_SB_BYTES << 1) -
1048 SHA512_DIGEST_SIZE);
1049 sess->auth.block_size = SHA512_BLOCK_SIZE;
1050 sess->auth.key_length = auth_xform->key.length;
1051 memset(sess->auth.key, 0, sess->auth.block_size);
1052 memset(sess->auth.pre_compute, 0,
1053 sess->auth.ctx_len << 1);
1054 rte_memcpy(sess->auth.key, auth_xform->key.data,
1055 auth_xform->key.length);
1056 if (generate_partial_hash(sess))
1060 case RTE_CRYPTO_AUTH_SHA3_512:
1061 sess->auth.algo = CCP_AUTH_ALGO_SHA3_512;
1062 sess->auth.engine = CCP_ENGINE_SHA;
1063 sess->auth.ut.sha_type = CCP_SHA3_TYPE_512;
1064 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
1065 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA512_DIGEST_SIZE;
1067 case RTE_CRYPTO_AUTH_SHA3_512_HMAC:
1068 if (auth_xform->key.length > SHA3_512_BLOCK_SIZE)
1070 sess->auth.algo = CCP_AUTH_ALGO_SHA3_512_HMAC;
1071 sess->auth.engine = CCP_ENGINE_SHA;
1072 sess->auth.ut.sha_type = CCP_SHA3_TYPE_512;
1073 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
1074 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA512_DIGEST_SIZE;
1075 sess->auth.block_size = SHA3_512_BLOCK_SIZE;
1076 sess->auth.key_length = auth_xform->key.length;
1077 memset(sess->auth.key, 0, sess->auth.block_size);
1078 memset(sess->auth.pre_compute, 0, 2 * sess->auth.ctx_len);
1079 rte_memcpy(sess->auth.key, auth_xform->key.data,
1080 auth_xform->key.length);
1081 if (generate_partial_hash(sess))
1084 case RTE_CRYPTO_AUTH_AES_CMAC:
1085 sess->auth.algo = CCP_AUTH_ALGO_AES_CMAC;
1086 sess->auth.engine = CCP_ENGINE_AES;
1087 sess->auth.um.aes_mode = CCP_AES_MODE_CMAC;
1088 sess->auth.key_length = auth_xform->key.length;
1089 /* padding and hash result */
1090 sess->auth.ctx_len = CCP_SB_BYTES << 1;
1091 sess->auth.offset = AES_BLOCK_SIZE;
1092 sess->auth.block_size = AES_BLOCK_SIZE;
1093 if (sess->auth.key_length == 16)
1094 sess->auth.ut.aes_type = CCP_AES_TYPE_128;
1095 else if (sess->auth.key_length == 24)
1096 sess->auth.ut.aes_type = CCP_AES_TYPE_192;
1097 else if (sess->auth.key_length == 32)
1098 sess->auth.ut.aes_type = CCP_AES_TYPE_256;
1100 CCP_LOG_ERR("Invalid CMAC key length");
1103 rte_memcpy(sess->auth.key, auth_xform->key.data,
1104 sess->auth.key_length);
1105 for (i = 0; i < sess->auth.key_length; i++)
1106 sess->auth.key_ccp[sess->auth.key_length - i - 1] =
1108 if (generate_cmac_subkeys(sess))
1112 CCP_LOG_ERR("Unsupported hash algo");
1119 ccp_configure_session_aead(struct ccp_session *sess,
1120 const struct rte_crypto_sym_xform *xform)
1122 const struct rte_crypto_aead_xform *aead_xform = NULL;
1125 aead_xform = &xform->aead;
1127 sess->cipher.key_length = aead_xform->key.length;
1128 rte_memcpy(sess->cipher.key, aead_xform->key.data,
1129 aead_xform->key.length);
1131 if (aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) {
1132 sess->cipher.dir = CCP_CIPHER_DIR_ENCRYPT;
1133 sess->auth.op = CCP_AUTH_OP_GENERATE;
1135 sess->cipher.dir = CCP_CIPHER_DIR_DECRYPT;
1136 sess->auth.op = CCP_AUTH_OP_VERIFY;
1138 sess->aead_algo = aead_xform->algo;
1139 sess->auth.aad_length = aead_xform->aad_length;
1140 sess->auth.digest_length = aead_xform->digest_length;
1142 /* set iv parameters */
1143 sess->iv.offset = aead_xform->iv.offset;
1144 sess->iv.length = aead_xform->iv.length;
1146 switch (aead_xform->algo) {
1147 case RTE_CRYPTO_AEAD_AES_GCM:
1148 sess->cipher.algo = CCP_CIPHER_ALGO_AES_GCM;
1149 sess->cipher.um.aes_mode = CCP_AES_MODE_GCTR;
1150 sess->cipher.engine = CCP_ENGINE_AES;
1151 if (sess->cipher.key_length == 16)
1152 sess->cipher.ut.aes_type = CCP_AES_TYPE_128;
1153 else if (sess->cipher.key_length == 24)
1154 sess->cipher.ut.aes_type = CCP_AES_TYPE_192;
1155 else if (sess->cipher.key_length == 32)
1156 sess->cipher.ut.aes_type = CCP_AES_TYPE_256;
1158 CCP_LOG_ERR("Invalid aead key length");
1161 for (i = 0; i < sess->cipher.key_length; i++)
1162 sess->cipher.key_ccp[sess->cipher.key_length - i - 1] =
1163 sess->cipher.key[i];
1164 sess->auth.algo = CCP_AUTH_ALGO_AES_GCM;
1165 sess->auth.engine = CCP_ENGINE_AES;
1166 sess->auth.um.aes_mode = CCP_AES_MODE_GHASH;
1167 sess->auth.ctx_len = CCP_SB_BYTES;
1168 sess->auth.offset = 0;
1169 sess->auth.block_size = AES_BLOCK_SIZE;
1170 sess->cmd_id = CCP_CMD_COMBINED;
1173 CCP_LOG_ERR("Unsupported aead algo");
1176 if (iommu_mode == 2) {
1177 sess->cipher.nonce_phys = rte_mem_virt2iova(sess->cipher.nonce);
1178 sess->cipher.key_phys = rte_mem_virt2iova(sess->cipher.key_ccp);
1180 sess->cipher.nonce_phys = rte_mem_virt2phy(sess->cipher.nonce);
1181 sess->cipher.key_phys = rte_mem_virt2phy(sess->cipher.key_ccp);
1187 ccp_set_session_parameters(struct ccp_session *sess,
1188 const struct rte_crypto_sym_xform *xform,
1189 struct ccp_private *internals)
1191 const struct rte_crypto_sym_xform *cipher_xform = NULL;
1192 const struct rte_crypto_sym_xform *auth_xform = NULL;
1193 const struct rte_crypto_sym_xform *aead_xform = NULL;
1196 sess->auth_opt = internals->auth_opt;
1197 sess->cmd_id = ccp_get_cmd_id(xform);
1199 switch (sess->cmd_id) {
1200 case CCP_CMD_CIPHER:
1201 cipher_xform = xform;
1206 case CCP_CMD_CIPHER_HASH:
1207 cipher_xform = xform;
1208 auth_xform = xform->next;
1210 case CCP_CMD_HASH_CIPHER:
1212 cipher_xform = xform->next;
1214 case CCP_CMD_COMBINED:
1218 CCP_LOG_ERR("Unsupported cmd_id");
1222 /* Default IV length = 0 */
1223 sess->iv.length = 0;
1225 ret = ccp_configure_session_cipher(sess, cipher_xform);
1227 CCP_LOG_ERR("Invalid/unsupported cipher parameters");
1232 ret = ccp_configure_session_auth(sess, auth_xform);
1234 CCP_LOG_ERR("Invalid/unsupported auth parameters");
1239 ret = ccp_configure_session_aead(sess, aead_xform);
1241 CCP_LOG_ERR("Invalid/unsupported aead parameters");
1248 /* calculate CCP descriptors requirement */
1250 ccp_cipher_slot(struct ccp_session *session)
1254 switch (session->cipher.algo) {
1255 case CCP_CIPHER_ALGO_AES_CBC:
1257 /**< op + passthrough for iv */
1259 case CCP_CIPHER_ALGO_AES_ECB:
1263 case CCP_CIPHER_ALGO_AES_CTR:
1265 /**< op + passthrough for iv */
1267 case CCP_CIPHER_ALGO_3DES_CBC:
1269 /**< op + passthrough for iv */
1272 CCP_LOG_ERR("Unsupported cipher algo %d",
1273 session->cipher.algo);
1279 ccp_auth_slot(struct ccp_session *session)
1283 switch (session->auth.algo) {
1284 case CCP_AUTH_ALGO_SHA1:
1285 case CCP_AUTH_ALGO_SHA224:
1286 case CCP_AUTH_ALGO_SHA256:
1287 case CCP_AUTH_ALGO_SHA384:
1288 case CCP_AUTH_ALGO_SHA512:
1290 /**< op + lsb passthrough cpy to/from*/
1292 case CCP_AUTH_ALGO_MD5_HMAC:
1294 case CCP_AUTH_ALGO_SHA1_HMAC:
1295 case CCP_AUTH_ALGO_SHA224_HMAC:
1296 case CCP_AUTH_ALGO_SHA256_HMAC:
1297 if (session->auth_opt == 0)
1300 case CCP_AUTH_ALGO_SHA384_HMAC:
1301 case CCP_AUTH_ALGO_SHA512_HMAC:
1303 * 1. Load PHash1 = H(k ^ ipad); to LSB
1304 * 2. generate IHash = H(hash on message with PHash1
1306 * 3. Retrieve IHash 2 slots for 384/512
1307 * 4. Load Phash2 = H(k ^ opad); to LSB
1308 * 5. generate FHash = H(hash on Ihash with Phash2
1310 * 6. Retrieve HMAC output from LSB to host memory
1312 if (session->auth_opt == 0)
1315 case CCP_AUTH_ALGO_SHA3_224:
1316 case CCP_AUTH_ALGO_SHA3_256:
1317 case CCP_AUTH_ALGO_SHA3_384:
1318 case CCP_AUTH_ALGO_SHA3_512:
1320 /**< only op ctx and dst in host memory*/
1322 case CCP_AUTH_ALGO_SHA3_224_HMAC:
1323 case CCP_AUTH_ALGO_SHA3_256_HMAC:
1326 case CCP_AUTH_ALGO_SHA3_384_HMAC:
1327 case CCP_AUTH_ALGO_SHA3_512_HMAC:
1330 * 1. Op to Perform Ihash
1331 * 2. Retrieve result from LSB to host memory
1332 * 3. Perform final hash
1335 case CCP_AUTH_ALGO_AES_CMAC:
1339 * extra descriptor in padding case
1340 * (k1/k2(255:128) with iv(127:0))
1345 CCP_LOG_ERR("Unsupported auth algo %d",
1346 session->auth.algo);
1353 ccp_aead_slot(struct ccp_session *session)
1357 switch (session->aead_algo) {
1358 case RTE_CRYPTO_AEAD_AES_GCM:
1361 CCP_LOG_ERR("Unsupported aead algo %d",
1362 session->aead_algo);
1364 switch (session->auth.algo) {
1365 case CCP_AUTH_ALGO_AES_GCM:
1371 * 4. Reload passthru
1376 CCP_LOG_ERR("Unsupported combined auth ALGO %d",
1377 session->auth.algo);
1383 ccp_compute_slot_count(struct ccp_session *session)
1387 switch (session->cmd_id) {
1388 case CCP_CMD_CIPHER:
1389 count = ccp_cipher_slot(session);
1392 count = ccp_auth_slot(session);
1394 case CCP_CMD_CIPHER_HASH:
1395 case CCP_CMD_HASH_CIPHER:
1396 count = ccp_cipher_slot(session);
1397 count += ccp_auth_slot(session);
1399 case CCP_CMD_COMBINED:
1400 count = ccp_aead_slot(session);
1403 CCP_LOG_ERR("Unsupported cmd_id");
1411 algo_select(int sessalgo,
1412 const EVP_MD **algo)
1417 case CCP_AUTH_ALGO_MD5_HMAC:
1420 case CCP_AUTH_ALGO_SHA1_HMAC:
1423 case CCP_AUTH_ALGO_SHA224_HMAC:
1424 *algo = EVP_sha224();
1426 case CCP_AUTH_ALGO_SHA256_HMAC:
1427 *algo = EVP_sha256();
1429 case CCP_AUTH_ALGO_SHA384_HMAC:
1430 *algo = EVP_sha384();
1432 case CCP_AUTH_ALGO_SHA512_HMAC:
1433 *algo = EVP_sha512();
1443 process_cpu_auth_hmac(uint8_t *src, uint8_t *dst,
1444 __rte_unused uint8_t *iv,
1452 unsigned char temp_dst[64];
1454 if (EVP_DigestSignInit(ctx, NULL, algo, NULL, pkey) <= 0)
1455 goto process_auth_err;
1457 if (EVP_DigestSignUpdate(ctx, (char *)src, srclen) <= 0)
1458 goto process_auth_err;
1460 if (EVP_DigestSignFinal(ctx, temp_dst, &dstlen) <= 0)
1461 goto process_auth_err;
1463 memcpy(dst, temp_dst, d_len);
1466 CCP_LOG_ERR("Process cpu auth failed");
1470 static int cpu_crypto_auth(struct ccp_qp *qp,
1471 struct rte_crypto_op *op,
1472 struct ccp_session *sess,
1477 struct rte_mbuf *mbuf_src, *mbuf_dst;
1478 const EVP_MD *algo = NULL;
1481 algo_select(sess->auth.algo, &algo);
1482 pkey = EVP_PKEY_new_mac_key(EVP_PKEY_HMAC, NULL, sess->auth.key,
1483 sess->auth.key_length);
1484 mbuf_src = op->sym->m_src;
1485 mbuf_dst = op->sym->m_dst ? op->sym->m_dst : op->sym->m_src;
1486 srclen = op->sym->auth.data.length;
1487 src = rte_pktmbuf_mtod_offset(mbuf_src, uint8_t *,
1488 op->sym->auth.data.offset);
1490 if (sess->auth.op == CCP_AUTH_OP_VERIFY) {
1491 dst = qp->temp_digest;
1493 dst = op->sym->auth.digest.data;
1495 dst = rte_pktmbuf_mtod_offset(mbuf_dst, uint8_t *,
1496 op->sym->auth.data.offset +
1497 sess->auth.digest_length);
1500 status = process_cpu_auth_hmac(src, dst, NULL,
1504 sess->auth.digest_length);
1506 op->status = RTE_CRYPTO_OP_STATUS_ERROR;
1510 if (sess->auth.op == CCP_AUTH_OP_VERIFY) {
1511 if (memcmp(dst, op->sym->auth.digest.data,
1512 sess->auth.digest_length) != 0) {
1513 op->status = RTE_CRYPTO_OP_STATUS_AUTH_FAILED;
1515 op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
1518 op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
1520 EVP_PKEY_free(pkey);
1525 ccp_perform_passthru(struct ccp_passthru *pst,
1526 struct ccp_queue *cmd_q)
1528 struct ccp_desc *desc;
1529 union ccp_function function;
1531 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1533 CCP_CMD_ENGINE(desc) = CCP_ENGINE_PASSTHRU;
1535 CCP_CMD_SOC(desc) = 0;
1536 CCP_CMD_IOC(desc) = 0;
1537 CCP_CMD_INIT(desc) = 0;
1538 CCP_CMD_EOM(desc) = 0;
1539 CCP_CMD_PROT(desc) = 0;
1542 CCP_PT_BYTESWAP(&function) = pst->byte_swap;
1543 CCP_PT_BITWISE(&function) = pst->bit_mod;
1544 CCP_CMD_FUNCTION(desc) = function.raw;
1546 CCP_CMD_LEN(desc) = pst->len;
1549 CCP_CMD_SRC_LO(desc) = (uint32_t)(pst->src_addr);
1550 CCP_CMD_SRC_HI(desc) = high32_value(pst->src_addr);
1551 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1553 CCP_CMD_DST_LO(desc) = (uint32_t)(pst->dest_addr);
1554 CCP_CMD_DST_HI(desc) = 0;
1555 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SB;
1557 if (pst->bit_mod != CCP_PASSTHRU_BITWISE_NOOP)
1558 CCP_CMD_LSB_ID(desc) = cmd_q->sb_key;
1561 CCP_CMD_SRC_LO(desc) = (uint32_t)(pst->src_addr);
1562 CCP_CMD_SRC_HI(desc) = 0;
1563 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SB;
1565 CCP_CMD_DST_LO(desc) = (uint32_t)(pst->dest_addr);
1566 CCP_CMD_DST_HI(desc) = high32_value(pst->dest_addr);
1567 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1570 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1574 ccp_perform_hmac(struct rte_crypto_op *op,
1575 struct ccp_queue *cmd_q)
1578 struct ccp_session *session;
1579 union ccp_function function;
1580 struct ccp_desc *desc;
1582 phys_addr_t src_addr, dest_addr, dest_addr_t;
1583 struct ccp_passthru pst;
1584 uint64_t auth_msg_bits;
1588 session = (struct ccp_session *)get_sym_session_private_data(
1590 ccp_cryptodev_driver_id);
1591 addr = session->auth.pre_compute;
1593 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
1594 op->sym->auth.data.offset);
1595 append_ptr = (void *)rte_pktmbuf_append(op->sym->m_src,
1596 session->auth.ctx_len);
1597 if (iommu_mode == 2) {
1598 dest_addr = (phys_addr_t)rte_mem_virt2iova(append_ptr);
1599 pst.src_addr = (phys_addr_t)rte_mem_virt2iova((void *)addr);
1601 dest_addr = (phys_addr_t)rte_mem_virt2phy(append_ptr);
1602 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *)addr);
1604 dest_addr_t = dest_addr;
1606 /** Load PHash1 to LSB*/
1607 pst.dest_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1608 pst.len = session->auth.ctx_len;
1610 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1611 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1612 ccp_perform_passthru(&pst, cmd_q);
1614 /**sha engine command descriptor for IntermediateHash*/
1616 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1617 memset(desc, 0, Q_DESC_SIZE);
1619 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1621 CCP_CMD_SOC(desc) = 0;
1622 CCP_CMD_IOC(desc) = 0;
1623 CCP_CMD_INIT(desc) = 1;
1624 CCP_CMD_EOM(desc) = 1;
1625 CCP_CMD_PROT(desc) = 0;
1628 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1629 CCP_CMD_FUNCTION(desc) = function.raw;
1631 CCP_CMD_LEN(desc) = op->sym->auth.data.length;
1632 auth_msg_bits = (op->sym->auth.data.length +
1633 session->auth.block_size) * 8;
1635 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
1636 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
1637 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1639 CCP_CMD_LSB_ID(desc) = cmd_q->sb_sha;
1640 CCP_CMD_SHA_LO(desc) = ((uint32_t)auth_msg_bits);
1641 CCP_CMD_SHA_HI(desc) = high32_value(auth_msg_bits);
1643 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1647 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1648 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1649 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1650 cmd_q->qcontrol | CMD_Q_RUN);
1652 /* Intermediate Hash value retrieve */
1653 if ((session->auth.ut.sha_type == CCP_SHA_TYPE_384) ||
1654 (session->auth.ut.sha_type == CCP_SHA_TYPE_512)) {
1657 (phys_addr_t)((cmd_q->sb_sha + 1) * CCP_SB_BYTES);
1658 pst.dest_addr = dest_addr_t;
1659 pst.len = CCP_SB_BYTES;
1661 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1662 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1663 ccp_perform_passthru(&pst, cmd_q);
1665 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1666 pst.dest_addr = dest_addr_t + CCP_SB_BYTES;
1667 pst.len = CCP_SB_BYTES;
1669 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1670 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1671 ccp_perform_passthru(&pst, cmd_q);
1674 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1675 pst.dest_addr = dest_addr_t;
1676 pst.len = session->auth.ctx_len;
1678 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1679 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1680 ccp_perform_passthru(&pst, cmd_q);
1684 /** Load PHash2 to LSB*/
1685 addr += session->auth.ctx_len;
1686 if (iommu_mode == 2)
1687 pst.src_addr = (phys_addr_t)rte_mem_virt2iova((void *)addr);
1689 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *)addr);
1690 pst.dest_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1691 pst.len = session->auth.ctx_len;
1693 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1694 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1695 ccp_perform_passthru(&pst, cmd_q);
1697 /**sha engine command descriptor for FinalHash*/
1698 dest_addr_t += session->auth.offset;
1700 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1701 memset(desc, 0, Q_DESC_SIZE);
1703 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1705 CCP_CMD_SOC(desc) = 0;
1706 CCP_CMD_IOC(desc) = 0;
1707 CCP_CMD_INIT(desc) = 1;
1708 CCP_CMD_EOM(desc) = 1;
1709 CCP_CMD_PROT(desc) = 0;
1712 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1713 CCP_CMD_FUNCTION(desc) = function.raw;
1715 CCP_CMD_LEN(desc) = (session->auth.ctx_len -
1716 session->auth.offset);
1717 auth_msg_bits = (session->auth.block_size +
1718 session->auth.ctx_len -
1719 session->auth.offset) * 8;
1721 CCP_CMD_SRC_LO(desc) = (uint32_t)(dest_addr_t);
1722 CCP_CMD_SRC_HI(desc) = high32_value(dest_addr_t);
1723 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1725 CCP_CMD_LSB_ID(desc) = cmd_q->sb_sha;
1726 CCP_CMD_SHA_LO(desc) = ((uint32_t)auth_msg_bits);
1727 CCP_CMD_SHA_HI(desc) = high32_value(auth_msg_bits);
1729 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1733 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1734 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1735 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1736 cmd_q->qcontrol | CMD_Q_RUN);
1738 /* Retrieve hmac output */
1739 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1740 pst.dest_addr = dest_addr;
1741 pst.len = session->auth.ctx_len;
1743 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1744 if ((session->auth.ut.sha_type == CCP_SHA_TYPE_384) ||
1745 (session->auth.ut.sha_type == CCP_SHA_TYPE_512))
1746 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1748 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1749 ccp_perform_passthru(&pst, cmd_q);
1751 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
1757 ccp_perform_sha(struct rte_crypto_op *op,
1758 struct ccp_queue *cmd_q)
1760 struct ccp_session *session;
1761 union ccp_function function;
1762 struct ccp_desc *desc;
1764 phys_addr_t src_addr, dest_addr;
1765 struct ccp_passthru pst;
1767 uint64_t auth_msg_bits;
1769 session = (struct ccp_session *)get_sym_session_private_data(
1771 ccp_cryptodev_driver_id);
1773 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
1774 op->sym->auth.data.offset);
1775 append_ptr = (void *)rte_pktmbuf_append(op->sym->m_src,
1776 session->auth.ctx_len);
1777 if (iommu_mode == 2) {
1778 dest_addr = (phys_addr_t)rte_mem_virt2iova(append_ptr);
1779 pst.src_addr = (phys_addr_t)sha_ctx;
1781 dest_addr = (phys_addr_t)rte_mem_virt2phy(append_ptr);
1782 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *)
1786 /** Passthru sha context*/
1788 pst.dest_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1789 pst.len = session->auth.ctx_len;
1791 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1792 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1793 ccp_perform_passthru(&pst, cmd_q);
1795 /**prepare sha command descriptor*/
1797 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1798 memset(desc, 0, Q_DESC_SIZE);
1800 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1802 CCP_CMD_SOC(desc) = 0;
1803 CCP_CMD_IOC(desc) = 0;
1804 CCP_CMD_INIT(desc) = 1;
1805 CCP_CMD_EOM(desc) = 1;
1806 CCP_CMD_PROT(desc) = 0;
1809 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1810 CCP_CMD_FUNCTION(desc) = function.raw;
1812 CCP_CMD_LEN(desc) = op->sym->auth.data.length;
1813 auth_msg_bits = op->sym->auth.data.length * 8;
1815 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
1816 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
1817 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1819 CCP_CMD_LSB_ID(desc) = cmd_q->sb_sha;
1820 CCP_CMD_SHA_LO(desc) = ((uint32_t)auth_msg_bits);
1821 CCP_CMD_SHA_HI(desc) = high32_value(auth_msg_bits);
1823 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1827 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1828 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1829 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1830 cmd_q->qcontrol | CMD_Q_RUN);
1832 /* Hash value retrieve */
1833 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1834 pst.dest_addr = dest_addr;
1835 pst.len = session->auth.ctx_len;
1837 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1838 if ((session->auth.ut.sha_type == CCP_SHA_TYPE_384) ||
1839 (session->auth.ut.sha_type == CCP_SHA_TYPE_512))
1840 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1842 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1843 ccp_perform_passthru(&pst, cmd_q);
1845 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
1851 ccp_perform_sha3_hmac(struct rte_crypto_op *op,
1852 struct ccp_queue *cmd_q)
1854 struct ccp_session *session;
1855 struct ccp_passthru pst;
1856 union ccp_function function;
1857 struct ccp_desc *desc;
1858 uint8_t *append_ptr;
1860 phys_addr_t src_addr, dest_addr, ctx_paddr, dest_addr_t;
1862 session = (struct ccp_session *)get_sym_session_private_data(
1864 ccp_cryptodev_driver_id);
1866 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
1867 op->sym->auth.data.offset);
1868 append_ptr = (uint8_t *)rte_pktmbuf_append(op->sym->m_src,
1869 session->auth.ctx_len);
1871 CCP_LOG_ERR("CCP MBUF append failed\n");
1874 if (iommu_mode == 2) {
1875 dest_addr = (phys_addr_t)rte_mem_virt2iova((void *)append_ptr);
1876 ctx_paddr = (phys_addr_t)rte_mem_virt2iova(
1877 session->auth.pre_compute);
1879 dest_addr = (phys_addr_t)rte_mem_virt2phy((void *)append_ptr);
1880 ctx_paddr = (phys_addr_t)rte_mem_virt2phy(
1881 session->auth.pre_compute);
1883 dest_addr_t = dest_addr + (session->auth.ctx_len / 2);
1884 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1885 memset(desc, 0, Q_DESC_SIZE);
1887 /*desc1 for SHA3-Ihash operation */
1888 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1889 CCP_CMD_INIT(desc) = 1;
1890 CCP_CMD_EOM(desc) = 1;
1893 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1894 CCP_CMD_FUNCTION(desc) = function.raw;
1895 CCP_CMD_LEN(desc) = op->sym->auth.data.length;
1897 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
1898 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
1899 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1901 CCP_CMD_DST_LO(desc) = (cmd_q->sb_sha * CCP_SB_BYTES);
1902 CCP_CMD_DST_HI(desc) = 0;
1903 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SB;
1905 CCP_CMD_KEY_LO(desc) = ((uint32_t)ctx_paddr);
1906 CCP_CMD_KEY_HI(desc) = high32_value(ctx_paddr);
1907 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1909 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1912 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1913 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1914 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1915 cmd_q->qcontrol | CMD_Q_RUN);
1917 /* Intermediate Hash value retrieve */
1918 if ((session->auth.ut.sha_type == CCP_SHA3_TYPE_384) ||
1919 (session->auth.ut.sha_type == CCP_SHA3_TYPE_512)) {
1922 (phys_addr_t)((cmd_q->sb_sha + 1) * CCP_SB_BYTES);
1923 pst.dest_addr = dest_addr_t;
1924 pst.len = CCP_SB_BYTES;
1926 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1927 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1928 ccp_perform_passthru(&pst, cmd_q);
1930 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1931 pst.dest_addr = dest_addr_t + CCP_SB_BYTES;
1932 pst.len = CCP_SB_BYTES;
1934 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1935 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1936 ccp_perform_passthru(&pst, cmd_q);
1939 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1940 pst.dest_addr = dest_addr_t;
1941 pst.len = CCP_SB_BYTES;
1943 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1944 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1945 ccp_perform_passthru(&pst, cmd_q);
1948 /**sha engine command descriptor for FinalHash*/
1949 ctx_paddr += CCP_SHA3_CTX_SIZE;
1950 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1951 memset(desc, 0, Q_DESC_SIZE);
1953 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1954 CCP_CMD_INIT(desc) = 1;
1955 CCP_CMD_EOM(desc) = 1;
1958 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1959 CCP_CMD_FUNCTION(desc) = function.raw;
1961 if (session->auth.ut.sha_type == CCP_SHA3_TYPE_224) {
1962 dest_addr_t += (CCP_SB_BYTES - SHA224_DIGEST_SIZE);
1963 CCP_CMD_LEN(desc) = SHA224_DIGEST_SIZE;
1964 } else if (session->auth.ut.sha_type == CCP_SHA3_TYPE_256) {
1965 CCP_CMD_LEN(desc) = SHA256_DIGEST_SIZE;
1966 } else if (session->auth.ut.sha_type == CCP_SHA3_TYPE_384) {
1967 dest_addr_t += (2 * CCP_SB_BYTES - SHA384_DIGEST_SIZE);
1968 CCP_CMD_LEN(desc) = SHA384_DIGEST_SIZE;
1970 CCP_CMD_LEN(desc) = SHA512_DIGEST_SIZE;
1973 CCP_CMD_SRC_LO(desc) = ((uint32_t)dest_addr_t);
1974 CCP_CMD_SRC_HI(desc) = high32_value(dest_addr_t);
1975 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1977 CCP_CMD_DST_LO(desc) = (uint32_t)dest_addr;
1978 CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
1979 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1981 CCP_CMD_KEY_LO(desc) = ((uint32_t)ctx_paddr);
1982 CCP_CMD_KEY_HI(desc) = high32_value(ctx_paddr);
1983 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1985 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1988 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1989 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1990 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1991 cmd_q->qcontrol | CMD_Q_RUN);
1993 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
1998 ccp_perform_sha3(struct rte_crypto_op *op,
1999 struct ccp_queue *cmd_q)
2001 struct ccp_session *session;
2002 union ccp_function function;
2003 struct ccp_desc *desc;
2004 uint8_t *ctx_addr = NULL, *append_ptr = NULL;
2006 phys_addr_t src_addr, dest_addr, ctx_paddr;
2008 session = (struct ccp_session *)get_sym_session_private_data(
2010 ccp_cryptodev_driver_id);
2012 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
2013 op->sym->auth.data.offset);
2014 append_ptr = (uint8_t *)rte_pktmbuf_append(op->sym->m_src,
2015 session->auth.ctx_len);
2017 CCP_LOG_ERR("CCP MBUF append failed\n");
2020 if (iommu_mode == 2) {
2021 dest_addr = (phys_addr_t)rte_mem_virt2iova((void *)append_ptr);
2022 ctx_paddr = (phys_addr_t)rte_mem_virt2iova((void *)ctx_addr);
2024 dest_addr = (phys_addr_t)rte_mem_virt2phy((void *)append_ptr);
2025 ctx_paddr = (phys_addr_t)rte_mem_virt2phy((void *)ctx_addr);
2028 ctx_addr = session->auth.sha3_ctx;
2030 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2031 memset(desc, 0, Q_DESC_SIZE);
2033 /* prepare desc for SHA3 operation */
2034 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
2035 CCP_CMD_INIT(desc) = 1;
2036 CCP_CMD_EOM(desc) = 1;
2039 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
2040 CCP_CMD_FUNCTION(desc) = function.raw;
2042 CCP_CMD_LEN(desc) = op->sym->auth.data.length;
2044 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2045 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2046 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2048 CCP_CMD_DST_LO(desc) = ((uint32_t)dest_addr);
2049 CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
2050 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2052 CCP_CMD_KEY_LO(desc) = ((uint32_t)ctx_paddr);
2053 CCP_CMD_KEY_HI(desc) = high32_value(ctx_paddr);
2054 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2056 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2060 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2061 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2062 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2063 cmd_q->qcontrol | CMD_Q_RUN);
2065 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
2070 ccp_perform_aes_cmac(struct rte_crypto_op *op,
2071 struct ccp_queue *cmd_q)
2073 struct ccp_session *session;
2074 union ccp_function function;
2075 struct ccp_passthru pst;
2076 struct ccp_desc *desc;
2078 uint8_t *src_tb, *append_ptr, *ctx_addr;
2079 phys_addr_t src_addr, dest_addr, key_addr;
2080 int length, non_align_len;
2082 session = (struct ccp_session *)get_sym_session_private_data(
2084 ccp_cryptodev_driver_id);
2085 key_addr = rte_mem_virt2phy(session->auth.key_ccp);
2087 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
2088 op->sym->auth.data.offset);
2089 append_ptr = (uint8_t *)rte_pktmbuf_append(op->sym->m_src,
2090 session->auth.ctx_len);
2091 dest_addr = (phys_addr_t)rte_mem_virt2phy((void *)append_ptr);
2094 CCP_AES_ENCRYPT(&function) = CCP_CIPHER_DIR_ENCRYPT;
2095 CCP_AES_MODE(&function) = session->auth.um.aes_mode;
2096 CCP_AES_TYPE(&function) = session->auth.ut.aes_type;
2098 if (op->sym->auth.data.length % session->auth.block_size == 0) {
2100 ctx_addr = session->auth.pre_compute;
2101 memset(ctx_addr, 0, AES_BLOCK_SIZE);
2102 if (iommu_mode == 2)
2103 pst.src_addr = (phys_addr_t)rte_mem_virt2iova(
2106 pst.src_addr = (phys_addr_t)rte_mem_virt2phy(
2109 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2110 pst.len = CCP_SB_BYTES;
2112 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2113 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
2114 ccp_perform_passthru(&pst, cmd_q);
2116 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2117 memset(desc, 0, Q_DESC_SIZE);
2119 /* prepare desc for aes-cmac command */
2120 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2121 CCP_CMD_EOM(desc) = 1;
2122 CCP_CMD_FUNCTION(desc) = function.raw;
2124 CCP_CMD_LEN(desc) = op->sym->auth.data.length;
2125 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2126 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2127 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2129 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2130 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2131 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2132 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2134 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2139 (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2140 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2141 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2142 cmd_q->qcontrol | CMD_Q_RUN);
2144 ctx_addr = session->auth.pre_compute + CCP_SB_BYTES;
2145 memset(ctx_addr, 0, AES_BLOCK_SIZE);
2146 if (iommu_mode == 2)
2147 pst.src_addr = (phys_addr_t)rte_mem_virt2iova(
2150 pst.src_addr = (phys_addr_t)rte_mem_virt2phy(
2152 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2153 pst.len = CCP_SB_BYTES;
2155 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2156 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
2157 ccp_perform_passthru(&pst, cmd_q);
2159 length = (op->sym->auth.data.length / AES_BLOCK_SIZE);
2160 length *= AES_BLOCK_SIZE;
2161 non_align_len = op->sym->auth.data.length - length;
2162 /* prepare desc for aes-cmac command */
2164 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2165 memset(desc, 0, Q_DESC_SIZE);
2167 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2168 CCP_CMD_INIT(desc) = 1;
2169 CCP_CMD_FUNCTION(desc) = function.raw;
2171 CCP_CMD_LEN(desc) = length;
2172 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2173 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2174 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2176 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2177 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2178 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2179 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2181 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2184 append_ptr = append_ptr + CCP_SB_BYTES;
2185 memset(append_ptr, 0, AES_BLOCK_SIZE);
2186 src_tb = rte_pktmbuf_mtod_offset(op->sym->m_src,
2188 op->sym->auth.data.offset +
2190 rte_memcpy(append_ptr, src_tb, non_align_len);
2191 append_ptr[non_align_len] = CMAC_PAD_VALUE;
2193 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2194 memset(desc, 0, Q_DESC_SIZE);
2196 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2197 CCP_CMD_EOM(desc) = 1;
2198 CCP_CMD_FUNCTION(desc) = function.raw;
2199 CCP_CMD_LEN(desc) = AES_BLOCK_SIZE;
2201 CCP_CMD_SRC_LO(desc) = ((uint32_t)(dest_addr + CCP_SB_BYTES));
2202 CCP_CMD_SRC_HI(desc) = high32_value(dest_addr + CCP_SB_BYTES);
2203 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2205 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2206 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2207 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2208 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2210 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2214 (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2215 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2216 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2217 cmd_q->qcontrol | CMD_Q_RUN);
2219 /* Retrieve result */
2220 pst.dest_addr = dest_addr;
2221 pst.src_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2222 pst.len = CCP_SB_BYTES;
2224 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2225 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
2226 ccp_perform_passthru(&pst, cmd_q);
2228 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
2233 ccp_perform_aes(struct rte_crypto_op *op,
2234 struct ccp_queue *cmd_q,
2235 struct ccp_batch_info *b_info)
2237 struct ccp_session *session;
2238 union ccp_function function;
2240 struct ccp_passthru pst = {0};
2241 struct ccp_desc *desc;
2242 phys_addr_t src_addr, dest_addr, key_addr;
2245 session = (struct ccp_session *)get_sym_session_private_data(
2247 ccp_cryptodev_driver_id);
2250 iv = rte_crypto_op_ctod_offset(op, uint8_t *, session->iv.offset);
2251 if (session->cipher.um.aes_mode != CCP_AES_MODE_ECB) {
2252 if (session->cipher.um.aes_mode == CCP_AES_MODE_CTR) {
2253 rte_memcpy(session->cipher.nonce + AES_BLOCK_SIZE,
2254 iv, session->iv.length);
2255 pst.src_addr = (phys_addr_t)session->cipher.nonce_phys;
2256 CCP_AES_SIZE(&function) = 0x1F;
2259 &(b_info->lsb_buf[b_info->lsb_buf_idx*CCP_SB_BYTES]);
2260 rte_memcpy(lsb_buf +
2261 (CCP_SB_BYTES - session->iv.length),
2262 iv, session->iv.length);
2263 pst.src_addr = b_info->lsb_buf_phys +
2264 (b_info->lsb_buf_idx * CCP_SB_BYTES);
2265 b_info->lsb_buf_idx++;
2268 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2269 pst.len = CCP_SB_BYTES;
2271 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2272 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
2273 ccp_perform_passthru(&pst, cmd_q);
2276 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2278 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
2279 op->sym->cipher.data.offset);
2280 if (likely(op->sym->m_dst != NULL))
2281 dest_addr = rte_pktmbuf_iova_offset(op->sym->m_dst,
2282 op->sym->cipher.data.offset);
2284 dest_addr = src_addr;
2285 key_addr = session->cipher.key_phys;
2287 /* prepare desc for aes command */
2288 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2289 CCP_CMD_INIT(desc) = 1;
2290 CCP_CMD_EOM(desc) = 1;
2292 CCP_AES_ENCRYPT(&function) = session->cipher.dir;
2293 CCP_AES_MODE(&function) = session->cipher.um.aes_mode;
2294 CCP_AES_TYPE(&function) = session->cipher.ut.aes_type;
2295 CCP_CMD_FUNCTION(desc) = function.raw;
2297 CCP_CMD_LEN(desc) = op->sym->cipher.data.length;
2299 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2300 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2301 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2303 CCP_CMD_DST_LO(desc) = ((uint32_t)dest_addr);
2304 CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
2305 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2307 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2308 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2309 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2311 if (session->cipher.um.aes_mode != CCP_AES_MODE_ECB)
2312 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2314 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2315 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
2320 ccp_perform_3des(struct rte_crypto_op *op,
2321 struct ccp_queue *cmd_q,
2322 struct ccp_batch_info *b_info)
2324 struct ccp_session *session;
2325 union ccp_function function;
2326 unsigned char *lsb_buf;
2327 struct ccp_passthru pst;
2328 struct ccp_desc *desc;
2331 phys_addr_t src_addr, dest_addr, key_addr;
2333 session = (struct ccp_session *)get_sym_session_private_data(
2335 ccp_cryptodev_driver_id);
2337 iv = rte_crypto_op_ctod_offset(op, uint8_t *, session->iv.offset);
2338 switch (session->cipher.um.des_mode) {
2339 case CCP_DES_MODE_CBC:
2340 lsb_buf = &(b_info->lsb_buf[b_info->lsb_buf_idx*CCP_SB_BYTES]);
2341 b_info->lsb_buf_idx++;
2343 rte_memcpy(lsb_buf + (CCP_SB_BYTES - session->iv.length),
2344 iv, session->iv.length);
2345 if (iommu_mode == 2)
2346 pst.src_addr = (phys_addr_t)rte_mem_virt2iova(
2349 pst.src_addr = (phys_addr_t)rte_mem_virt2phy(
2351 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2352 pst.len = CCP_SB_BYTES;
2354 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2355 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
2356 ccp_perform_passthru(&pst, cmd_q);
2358 case CCP_DES_MODE_CFB:
2359 case CCP_DES_MODE_ECB:
2360 CCP_LOG_ERR("Unsupported DES cipher mode");
2364 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
2365 op->sym->cipher.data.offset);
2366 if (unlikely(op->sym->m_dst != NULL))
2368 rte_pktmbuf_iova_offset(op->sym->m_dst,
2369 op->sym->cipher.data.offset);
2371 dest_addr = src_addr;
2373 if (iommu_mode == 2)
2374 key_addr = rte_mem_virt2iova(session->cipher.key_ccp);
2376 key_addr = rte_mem_virt2phy(session->cipher.key_ccp);
2378 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2380 memset(desc, 0, Q_DESC_SIZE);
2382 /* prepare desc for des command */
2383 CCP_CMD_ENGINE(desc) = CCP_ENGINE_3DES;
2385 CCP_CMD_SOC(desc) = 0;
2386 CCP_CMD_IOC(desc) = 0;
2387 CCP_CMD_INIT(desc) = 1;
2388 CCP_CMD_EOM(desc) = 1;
2389 CCP_CMD_PROT(desc) = 0;
2392 CCP_DES_ENCRYPT(&function) = session->cipher.dir;
2393 CCP_DES_MODE(&function) = session->cipher.um.des_mode;
2394 CCP_DES_TYPE(&function) = session->cipher.ut.des_type;
2395 CCP_CMD_FUNCTION(desc) = function.raw;
2397 CCP_CMD_LEN(desc) = op->sym->cipher.data.length;
2399 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2400 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2401 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2403 CCP_CMD_DST_LO(desc) = ((uint32_t)dest_addr);
2404 CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
2405 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2407 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2408 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2409 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2411 if (session->cipher.um.des_mode)
2412 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2414 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2418 /* Write the new tail address back to the queue register */
2419 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2420 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2421 /* Turn the queue back on using our cached control register */
2422 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2423 cmd_q->qcontrol | CMD_Q_RUN);
2425 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
2430 ccp_perform_aes_gcm(struct rte_crypto_op *op, struct ccp_queue *cmd_q)
2432 struct ccp_session *session;
2433 union ccp_function function;
2435 struct ccp_passthru pst;
2436 struct ccp_desc *desc;
2439 phys_addr_t src_addr, dest_addr, key_addr, aad_addr;
2440 phys_addr_t digest_dest_addr;
2441 int length, non_align_len;
2443 session = (struct ccp_session *)get_sym_session_private_data(
2445 ccp_cryptodev_driver_id);
2446 iv = rte_crypto_op_ctod_offset(op, uint8_t *, session->iv.offset);
2447 key_addr = session->cipher.key_phys;
2449 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
2450 op->sym->aead.data.offset);
2451 if (unlikely(op->sym->m_dst != NULL))
2452 dest_addr = rte_pktmbuf_iova_offset(op->sym->m_dst,
2453 op->sym->aead.data.offset);
2455 dest_addr = src_addr;
2456 rte_pktmbuf_append(op->sym->m_src, session->auth.ctx_len);
2457 digest_dest_addr = op->sym->aead.digest.phys_addr;
2458 temp = (uint64_t *)(op->sym->aead.digest.data + AES_BLOCK_SIZE);
2459 *temp++ = rte_bswap64(session->auth.aad_length << 3);
2460 *temp = rte_bswap64(op->sym->aead.data.length << 3);
2462 non_align_len = op->sym->aead.data.length % AES_BLOCK_SIZE;
2463 length = CCP_ALIGN(op->sym->aead.data.length, AES_BLOCK_SIZE);
2465 aad_addr = op->sym->aead.aad.phys_addr;
2467 /* CMD1 IV Passthru */
2468 rte_memcpy(session->cipher.nonce + AES_BLOCK_SIZE, iv,
2469 session->iv.length);
2470 pst.src_addr = session->cipher.nonce_phys;
2471 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2472 pst.len = CCP_SB_BYTES;
2474 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2475 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
2476 ccp_perform_passthru(&pst, cmd_q);
2478 /* CMD2 GHASH-AAD */
2480 CCP_AES_ENCRYPT(&function) = CCP_AES_MODE_GHASH_AAD;
2481 CCP_AES_MODE(&function) = CCP_AES_MODE_GHASH;
2482 CCP_AES_TYPE(&function) = session->cipher.ut.aes_type;
2484 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2485 memset(desc, 0, Q_DESC_SIZE);
2487 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2488 CCP_CMD_INIT(desc) = 1;
2489 CCP_CMD_FUNCTION(desc) = function.raw;
2491 CCP_CMD_LEN(desc) = session->auth.aad_length;
2493 CCP_CMD_SRC_LO(desc) = ((uint32_t)aad_addr);
2494 CCP_CMD_SRC_HI(desc) = high32_value(aad_addr);
2495 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2497 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2498 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2499 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2501 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2503 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2506 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2507 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2508 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2509 cmd_q->qcontrol | CMD_Q_RUN);
2511 /* CMD3 : GCTR Plain text */
2513 CCP_AES_ENCRYPT(&function) = session->cipher.dir;
2514 CCP_AES_MODE(&function) = CCP_AES_MODE_GCTR;
2515 CCP_AES_TYPE(&function) = session->cipher.ut.aes_type;
2516 if (non_align_len == 0)
2517 CCP_AES_SIZE(&function) = (AES_BLOCK_SIZE << 3) - 1;
2519 CCP_AES_SIZE(&function) = (non_align_len << 3) - 1;
2522 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2523 memset(desc, 0, Q_DESC_SIZE);
2525 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2526 CCP_CMD_EOM(desc) = 1;
2527 CCP_CMD_FUNCTION(desc) = function.raw;
2529 CCP_CMD_LEN(desc) = length;
2531 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2532 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2533 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2535 CCP_CMD_DST_LO(desc) = ((uint32_t)dest_addr);
2536 CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
2537 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2539 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2540 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2541 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2543 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2545 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2548 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2549 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2550 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2551 cmd_q->qcontrol | CMD_Q_RUN);
2553 /* CMD4 : PT to copy IV */
2554 pst.src_addr = session->cipher.nonce_phys;
2555 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2556 pst.len = AES_BLOCK_SIZE;
2558 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2559 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
2560 ccp_perform_passthru(&pst, cmd_q);
2562 /* CMD5 : GHASH-Final */
2564 CCP_AES_ENCRYPT(&function) = CCP_AES_MODE_GHASH_FINAL;
2565 CCP_AES_MODE(&function) = CCP_AES_MODE_GHASH;
2566 CCP_AES_TYPE(&function) = session->cipher.ut.aes_type;
2568 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2569 memset(desc, 0, Q_DESC_SIZE);
2571 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2572 CCP_CMD_FUNCTION(desc) = function.raw;
2573 /* Last block (AAD_len || PT_len)*/
2574 CCP_CMD_LEN(desc) = AES_BLOCK_SIZE;
2576 CCP_CMD_SRC_LO(desc) = ((uint32_t)digest_dest_addr + AES_BLOCK_SIZE);
2577 CCP_CMD_SRC_HI(desc) = high32_value(digest_dest_addr + AES_BLOCK_SIZE);
2578 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2580 CCP_CMD_DST_LO(desc) = ((uint32_t)digest_dest_addr);
2581 CCP_CMD_DST_HI(desc) = high32_value(digest_dest_addr);
2582 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2584 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2585 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2586 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2588 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2590 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2593 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2594 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2595 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2596 cmd_q->qcontrol | CMD_Q_RUN);
2598 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
2603 ccp_crypto_cipher(struct rte_crypto_op *op,
2604 struct ccp_queue *cmd_q,
2605 struct ccp_batch_info *b_info)
2608 struct ccp_session *session;
2610 session = (struct ccp_session *)get_sym_session_private_data(
2612 ccp_cryptodev_driver_id);
2614 switch (session->cipher.algo) {
2615 case CCP_CIPHER_ALGO_AES_CBC:
2616 result = ccp_perform_aes(op, cmd_q, b_info);
2617 b_info->desccnt += 2;
2619 case CCP_CIPHER_ALGO_AES_CTR:
2620 result = ccp_perform_aes(op, cmd_q, b_info);
2621 b_info->desccnt += 2;
2623 case CCP_CIPHER_ALGO_AES_ECB:
2624 result = ccp_perform_aes(op, cmd_q, b_info);
2625 b_info->desccnt += 1;
2627 case CCP_CIPHER_ALGO_3DES_CBC:
2628 result = ccp_perform_3des(op, cmd_q, b_info);
2629 b_info->desccnt += 2;
2632 CCP_LOG_ERR("Unsupported cipher algo %d",
2633 session->cipher.algo);
2640 ccp_crypto_auth(struct rte_crypto_op *op,
2641 struct ccp_queue *cmd_q,
2642 struct ccp_batch_info *b_info)
2646 struct ccp_session *session;
2648 session = (struct ccp_session *)get_sym_session_private_data(
2650 ccp_cryptodev_driver_id);
2652 switch (session->auth.algo) {
2653 case CCP_AUTH_ALGO_SHA1:
2654 case CCP_AUTH_ALGO_SHA224:
2655 case CCP_AUTH_ALGO_SHA256:
2656 case CCP_AUTH_ALGO_SHA384:
2657 case CCP_AUTH_ALGO_SHA512:
2658 result = ccp_perform_sha(op, cmd_q);
2659 b_info->desccnt += 3;
2661 case CCP_AUTH_ALGO_MD5_HMAC:
2662 if (session->auth_opt == 0)
2665 case CCP_AUTH_ALGO_SHA1_HMAC:
2666 case CCP_AUTH_ALGO_SHA224_HMAC:
2667 case CCP_AUTH_ALGO_SHA256_HMAC:
2668 if (session->auth_opt == 0) {
2669 result = ccp_perform_hmac(op, cmd_q);
2670 b_info->desccnt += 6;
2673 case CCP_AUTH_ALGO_SHA384_HMAC:
2674 case CCP_AUTH_ALGO_SHA512_HMAC:
2675 if (session->auth_opt == 0) {
2676 result = ccp_perform_hmac(op, cmd_q);
2677 b_info->desccnt += 7;
2680 case CCP_AUTH_ALGO_SHA3_224:
2681 case CCP_AUTH_ALGO_SHA3_256:
2682 case CCP_AUTH_ALGO_SHA3_384:
2683 case CCP_AUTH_ALGO_SHA3_512:
2684 result = ccp_perform_sha3(op, cmd_q);
2685 b_info->desccnt += 1;
2687 case CCP_AUTH_ALGO_SHA3_224_HMAC:
2688 case CCP_AUTH_ALGO_SHA3_256_HMAC:
2689 result = ccp_perform_sha3_hmac(op, cmd_q);
2690 b_info->desccnt += 3;
2692 case CCP_AUTH_ALGO_SHA3_384_HMAC:
2693 case CCP_AUTH_ALGO_SHA3_512_HMAC:
2694 result = ccp_perform_sha3_hmac(op, cmd_q);
2695 b_info->desccnt += 4;
2697 case CCP_AUTH_ALGO_AES_CMAC:
2698 result = ccp_perform_aes_cmac(op, cmd_q);
2699 b_info->desccnt += 4;
2702 CCP_LOG_ERR("Unsupported auth algo %d",
2703 session->auth.algo);
2711 ccp_crypto_aead(struct rte_crypto_op *op,
2712 struct ccp_queue *cmd_q,
2713 struct ccp_batch_info *b_info)
2716 struct ccp_session *session;
2718 session = (struct ccp_session *)get_sym_session_private_data(
2720 ccp_cryptodev_driver_id);
2722 switch (session->auth.algo) {
2723 case CCP_AUTH_ALGO_AES_GCM:
2724 if (session->cipher.algo != CCP_CIPHER_ALGO_AES_GCM) {
2725 CCP_LOG_ERR("Incorrect chain order");
2728 result = ccp_perform_aes_gcm(op, cmd_q);
2729 b_info->desccnt += 5;
2732 CCP_LOG_ERR("Unsupported aead algo %d",
2733 session->aead_algo);
2740 process_ops_to_enqueue(struct ccp_qp *qp,
2741 struct rte_crypto_op **op,
2742 struct ccp_queue *cmd_q,
2744 uint16_t total_nb_ops,
2749 struct ccp_batch_info *b_info;
2750 struct ccp_session *session;
2751 EVP_MD_CTX *auth_ctx = NULL;
2753 if (rte_mempool_get(qp->batch_mp, (void **)&b_info)) {
2754 CCP_LOG_ERR("batch info allocation failed");
2758 auth_ctx = EVP_MD_CTX_create();
2759 if (unlikely(!auth_ctx)) {
2760 CCP_LOG_ERR("Unable to create auth ctx");
2763 b_info->auth_ctr = 0;
2765 /* populate batch info necessary for dequeue */
2768 b_info->lsb_buf_idx = 0;
2769 b_info->desccnt = 0;
2770 b_info->cmd_q = cmd_q;
2771 if (iommu_mode == 2)
2772 b_info->lsb_buf_phys =
2773 (phys_addr_t)rte_mem_virt2iova((void *)b_info->lsb_buf);
2775 b_info->lsb_buf_phys =
2776 (phys_addr_t)rte_mem_virt2phy((void *)b_info->lsb_buf);
2778 rte_atomic64_sub(&b_info->cmd_q->free_slots, slots_req);
2780 b_info->head_offset = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx *
2782 for (i = b_idx; i < (nb_ops+b_idx); i++) {
2783 session = (struct ccp_session *)get_sym_session_private_data(
2784 op[i]->sym->session,
2785 ccp_cryptodev_driver_id);
2786 switch (session->cmd_id) {
2787 case CCP_CMD_CIPHER:
2788 result = ccp_crypto_cipher(op[i], cmd_q, b_info);
2791 if (session->auth_opt) {
2793 result = cpu_crypto_auth(qp, op[i],
2796 result = ccp_crypto_auth(op[i], cmd_q, b_info);
2798 case CCP_CMD_CIPHER_HASH:
2799 result = ccp_crypto_cipher(op[i], cmd_q, b_info);
2802 result = ccp_crypto_auth(op[i], cmd_q, b_info);
2804 case CCP_CMD_HASH_CIPHER:
2805 if (session->auth_opt) {
2806 result = cpu_crypto_auth(qp, op[i],
2808 if (op[i]->status !=
2809 RTE_CRYPTO_OP_STATUS_SUCCESS)
2810 CCP_LOG_ERR("RTE_CRYPTO_OP_STATUS_AUTH_FAILED");
2812 result = ccp_crypto_auth(op[i], cmd_q, b_info);
2816 result = ccp_crypto_cipher(op[i], cmd_q, b_info);
2818 case CCP_CMD_COMBINED:
2819 result = ccp_crypto_aead(op[i], cmd_q, b_info);
2822 CCP_LOG_ERR("Unsupported cmd_id");
2825 if (unlikely(result < 0)) {
2826 rte_atomic64_add(&b_info->cmd_q->free_slots,
2827 (slots_req - b_info->desccnt));
2830 b_info->op[i] = op[i];
2834 b_info->b_idx = b_idx;
2835 b_info->total_nb_ops = total_nb_ops;
2836 b_info->tail_offset = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx *
2840 /* Write the new tail address back to the queue register */
2841 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE,
2842 b_info->tail_offset);
2843 /* Turn the queue back on using our cached control register */
2844 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2845 cmd_q->qcontrol | CMD_Q_RUN);
2847 rte_ring_enqueue(qp->processed_pkts, (void *)b_info);
2849 EVP_MD_CTX_destroy(auth_ctx);
2853 static inline void ccp_auth_dq_prepare(struct rte_crypto_op *op)
2855 struct ccp_session *session;
2856 uint8_t *digest_data, *addr;
2857 struct rte_mbuf *m_last;
2858 int offset, digest_offset;
2859 uint8_t digest_le[64];
2861 session = (struct ccp_session *)get_sym_session_private_data(
2863 ccp_cryptodev_driver_id);
2865 if (session->cmd_id == CCP_CMD_COMBINED) {
2866 digest_data = op->sym->aead.digest.data;
2867 digest_offset = op->sym->aead.data.offset +
2868 op->sym->aead.data.length;
2870 digest_data = op->sym->auth.digest.data;
2871 digest_offset = op->sym->auth.data.offset +
2872 op->sym->auth.data.length;
2874 m_last = rte_pktmbuf_lastseg(op->sym->m_src);
2875 addr = (uint8_t *)((char *)m_last->buf_addr + m_last->data_off +
2876 m_last->data_len - session->auth.ctx_len);
2879 offset = session->auth.offset;
2881 if (session->auth.engine == CCP_ENGINE_SHA)
2882 if ((session->auth.ut.sha_type != CCP_SHA_TYPE_1) &&
2883 (session->auth.ut.sha_type != CCP_SHA_TYPE_224) &&
2884 (session->auth.ut.sha_type != CCP_SHA_TYPE_256)) {
2885 /* All other algorithms require byte
2890 offset = session->auth.ctx_len -
2891 session->auth.offset - 1;
2892 for (i = 0; i < session->auth.digest_length; i++)
2893 digest_le[i] = addr[offset - i];
2898 op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
2899 if (session->auth.op == CCP_AUTH_OP_VERIFY) {
2900 if (memcmp(addr + offset, digest_data,
2901 session->auth.digest_length) != 0)
2902 op->status = RTE_CRYPTO_OP_STATUS_AUTH_FAILED;
2905 if (unlikely(digest_data == 0))
2906 digest_data = rte_pktmbuf_mtod_offset(
2907 op->sym->m_dst, uint8_t *,
2909 rte_memcpy(digest_data, addr + offset,
2910 session->auth.digest_length);
2912 /* Trim area used for digest from mbuf. */
2913 rte_pktmbuf_trim(op->sym->m_src,
2914 session->auth.ctx_len);
2918 ccp_prepare_ops(struct ccp_qp *qp,
2919 struct rte_crypto_op **op_d,
2920 struct ccp_batch_info *b_info,
2924 struct ccp_session *session;
2926 EVP_MD_CTX *auth_ctx = NULL;
2928 auth_ctx = EVP_MD_CTX_create();
2929 if (unlikely(!auth_ctx)) {
2930 CCP_LOG_ERR("Unable to create auth ctx");
2933 min_ops = RTE_MIN(nb_ops, b_info->opcnt);
2935 for (i = b_info->b_idx; i < min_ops; i++) {
2936 op_d[i] = b_info->op[b_info->b_idx + b_info->op_idx++];
2937 session = (struct ccp_session *)get_sym_session_private_data(
2938 op_d[i]->sym->session,
2939 ccp_cryptodev_driver_id);
2940 switch (session->cmd_id) {
2941 case CCP_CMD_CIPHER:
2942 op_d[i]->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
2945 if (session->auth_opt == 0)
2946 ccp_auth_dq_prepare(op_d[i]);
2948 case CCP_CMD_CIPHER_HASH:
2949 if (session->auth_opt)
2950 cpu_crypto_auth(qp, op_d[i],
2953 ccp_auth_dq_prepare(op_d[i]);
2955 case CCP_CMD_HASH_CIPHER:
2956 if (session->auth_opt)
2957 op_d[i]->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
2959 ccp_auth_dq_prepare(op_d[i]);
2961 case CCP_CMD_COMBINED:
2962 ccp_auth_dq_prepare(op_d[i]);
2965 CCP_LOG_ERR("Unsupported cmd_id");
2969 EVP_MD_CTX_destroy(auth_ctx);
2970 b_info->opcnt -= min_ops;
2975 process_ops_to_dequeue(struct ccp_qp *qp,
2976 struct rte_crypto_op **op,
2978 uint16_t *total_nb_ops)
2980 struct ccp_batch_info *b_info;
2981 uint32_t cur_head_offset;
2983 if (qp->b_info != NULL) {
2984 b_info = qp->b_info;
2985 if (unlikely(b_info->op_idx > 0))
2987 } else if (rte_ring_dequeue(qp->processed_pkts,
2991 if (b_info->auth_ctr == b_info->opcnt)
2993 *total_nb_ops = b_info->total_nb_ops;
2994 cur_head_offset = CCP_READ_REG(b_info->cmd_q->reg_base,
2995 CMD_Q_HEAD_LO_BASE);
2997 if (b_info->head_offset < b_info->tail_offset) {
2998 if ((cur_head_offset >= b_info->head_offset) &&
2999 (cur_head_offset < b_info->tail_offset)) {
3000 qp->b_info = b_info;
3003 } else if (b_info->tail_offset != b_info->head_offset) {
3004 if ((cur_head_offset >= b_info->head_offset) ||
3005 (cur_head_offset < b_info->tail_offset)) {
3006 qp->b_info = b_info;
3013 *total_nb_ops = b_info->total_nb_ops;
3014 nb_ops = ccp_prepare_ops(qp, op, b_info, nb_ops);
3015 rte_atomic64_add(&b_info->cmd_q->free_slots, b_info->desccnt);
3016 b_info->desccnt = 0;
3017 if (b_info->opcnt > 0) {
3018 qp->b_info = b_info;
3020 rte_mempool_put(qp->batch_mp, (void *)b_info);