1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2018 Advanced Micro Devices, Inc. All rights reserved.
10 #include <sys/queue.h>
11 #include <sys/types.h>
13 #include <openssl/sha.h>
14 #include <openssl/cmac.h> /*sub key apis*/
15 #include <openssl/evp.h> /*sub key apis*/
17 #include <rte_hexdump.h>
18 #include <rte_memzone.h>
19 #include <rte_malloc.h>
20 #include <rte_memory.h>
21 #include <rte_spinlock.h>
22 #include <rte_string_fns.h>
23 #include <cryptodev_pmd.h>
26 #include "ccp_crypto.h"
28 #include "ccp_pmd_private.h"
30 #include <openssl/conf.h>
31 #include <openssl/err.h>
32 #include <openssl/hmac.h>
34 extern int iommu_mode;
36 /* SHA initial context values */
37 uint32_t ccp_sha1_init[SHA_COMMON_DIGEST_SIZE / sizeof(uint32_t)] = {
44 uint32_t ccp_sha224_init[SHA256_DIGEST_SIZE / sizeof(uint32_t)] = {
51 uint32_t ccp_sha256_init[SHA256_DIGEST_SIZE / sizeof(uint32_t)] = {
58 uint64_t ccp_sha384_init[SHA512_DIGEST_SIZE / sizeof(uint64_t)] = {
65 uint64_t ccp_sha512_init[SHA512_DIGEST_SIZE / sizeof(uint64_t)] = {
73 #define SHA3_CONST(x) x
75 #define SHA3_CONST(x) x##L
78 /** 'Words' here refers to uint64_t */
79 #define SHA3_KECCAK_SPONGE_WORDS \
80 (((1600) / 8) / sizeof(uint64_t))
81 typedef struct sha3_context_ {
84 * The portion of the input message that we
88 uint64_t s[SHA3_KECCAK_SPONGE_WORDS];
90 uint8_t sb[SHA3_KECCAK_SPONGE_WORDS * 8];
91 /**total 200 ctx size**/
93 unsigned int byteIndex;
95 * 0..7--the next byte after the set one
96 * (starts from 0; 0--none are buffered)
98 unsigned int wordIndex;
100 * 0..24--the next word to integrate input
103 unsigned int capacityWords;
105 * the double size of the hash output in
106 * words (e.g. 16 for Keccak 512)
111 #define SHA3_ROTL64(x, y) \
112 (((x) << (y)) | ((x) >> ((sizeof(uint64_t)*8) - (y))))
115 static const uint64_t keccakf_rndc[24] = {
116 SHA3_CONST(0x0000000000000001UL), SHA3_CONST(0x0000000000008082UL),
117 SHA3_CONST(0x800000000000808aUL), SHA3_CONST(0x8000000080008000UL),
118 SHA3_CONST(0x000000000000808bUL), SHA3_CONST(0x0000000080000001UL),
119 SHA3_CONST(0x8000000080008081UL), SHA3_CONST(0x8000000000008009UL),
120 SHA3_CONST(0x000000000000008aUL), SHA3_CONST(0x0000000000000088UL),
121 SHA3_CONST(0x0000000080008009UL), SHA3_CONST(0x000000008000000aUL),
122 SHA3_CONST(0x000000008000808bUL), SHA3_CONST(0x800000000000008bUL),
123 SHA3_CONST(0x8000000000008089UL), SHA3_CONST(0x8000000000008003UL),
124 SHA3_CONST(0x8000000000008002UL), SHA3_CONST(0x8000000000000080UL),
125 SHA3_CONST(0x000000000000800aUL), SHA3_CONST(0x800000008000000aUL),
126 SHA3_CONST(0x8000000080008081UL), SHA3_CONST(0x8000000000008080UL),
127 SHA3_CONST(0x0000000080000001UL), SHA3_CONST(0x8000000080008008UL)
130 static const unsigned int keccakf_rotc[24] = {
131 1, 3, 6, 10, 15, 21, 28, 36, 45, 55, 2, 14, 27, 41, 56, 8, 25, 43, 62,
135 static const unsigned int keccakf_piln[24] = {
136 10, 7, 11, 17, 18, 3, 5, 16, 8, 21, 24, 4, 15, 23, 19, 13, 12, 2, 20,
140 static enum ccp_cmd_order
141 ccp_get_cmd_id(const struct rte_crypto_sym_xform *xform)
143 enum ccp_cmd_order res = CCP_CMD_NOT_SUPPORTED;
147 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH) {
148 if (xform->next == NULL)
150 else if (xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
151 return CCP_CMD_HASH_CIPHER;
153 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER) {
154 if (xform->next == NULL)
155 return CCP_CMD_CIPHER;
156 else if (xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
157 return CCP_CMD_CIPHER_HASH;
159 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD)
160 return CCP_CMD_COMBINED;
164 /* partial hash using openssl */
165 static int partial_hash_sha1(uint8_t *data_in, uint8_t *data_out)
169 if (!SHA1_Init(&ctx))
171 SHA1_Transform(&ctx, data_in);
172 rte_memcpy(data_out, &ctx, SHA_DIGEST_LENGTH);
176 static int partial_hash_sha224(uint8_t *data_in, uint8_t *data_out)
180 if (!SHA224_Init(&ctx))
182 SHA256_Transform(&ctx, data_in);
183 rte_memcpy(data_out, &ctx,
184 SHA256_DIGEST_LENGTH);
188 static int partial_hash_sha256(uint8_t *data_in, uint8_t *data_out)
192 if (!SHA256_Init(&ctx))
194 SHA256_Transform(&ctx, data_in);
195 rte_memcpy(data_out, &ctx,
196 SHA256_DIGEST_LENGTH);
200 static int partial_hash_sha384(uint8_t *data_in, uint8_t *data_out)
204 if (!SHA384_Init(&ctx))
206 SHA512_Transform(&ctx, data_in);
207 rte_memcpy(data_out, &ctx,
208 SHA512_DIGEST_LENGTH);
212 static int partial_hash_sha512(uint8_t *data_in, uint8_t *data_out)
216 if (!SHA512_Init(&ctx))
218 SHA512_Transform(&ctx, data_in);
219 rte_memcpy(data_out, &ctx,
220 SHA512_DIGEST_LENGTH);
225 keccakf(uint64_t s[25])
229 #define KECCAK_ROUNDS 24
231 for (round = 0; round < KECCAK_ROUNDS; round++) {
234 for (i = 0; i < 5; i++)
235 bc[i] = s[i] ^ s[i + 5] ^ s[i + 10] ^ s[i + 15] ^
238 for (i = 0; i < 5; i++) {
239 t = bc[(i + 4) % 5] ^ SHA3_ROTL64(bc[(i + 1) % 5], 1);
240 for (j = 0; j < 25; j += 5)
246 for (i = 0; i < 24; i++) {
249 s[j] = SHA3_ROTL64(t, keccakf_rotc[i]);
254 for (j = 0; j < 25; j += 5) {
255 for (i = 0; i < 5; i++)
257 for (i = 0; i < 5; i++)
258 s[j + i] ^= (~bc[(i + 1) % 5]) &
263 s[0] ^= keccakf_rndc[round];
268 sha3_Init224(void *priv)
270 sha3_context *ctx = (sha3_context *) priv;
272 memset(ctx, 0, sizeof(*ctx));
273 ctx->capacityWords = 2 * 224 / (8 * sizeof(uint64_t));
277 sha3_Init256(void *priv)
279 sha3_context *ctx = (sha3_context *) priv;
281 memset(ctx, 0, sizeof(*ctx));
282 ctx->capacityWords = 2 * 256 / (8 * sizeof(uint64_t));
286 sha3_Init384(void *priv)
288 sha3_context *ctx = (sha3_context *) priv;
290 memset(ctx, 0, sizeof(*ctx));
291 ctx->capacityWords = 2 * 384 / (8 * sizeof(uint64_t));
295 sha3_Init512(void *priv)
297 sha3_context *ctx = (sha3_context *) priv;
299 memset(ctx, 0, sizeof(*ctx));
300 ctx->capacityWords = 2 * 512 / (8 * sizeof(uint64_t));
304 /* This is simply the 'update' with the padding block.
305 * The padding block is 0x01 || 0x00* || 0x80. First 0x01 and last 0x80
306 * bytes are always present, but they can be the same byte.
309 sha3_Update(void *priv, void const *bufIn, size_t len)
311 sha3_context *ctx = (sha3_context *) priv;
312 unsigned int old_tail = (8 - ctx->byteIndex) & 7;
316 const uint8_t *buf = bufIn;
318 if (len < old_tail) {
320 ctx->saved |= (uint64_t) (*(buf++)) <<
321 ((ctx->byteIndex++) * 8);
328 ctx->saved |= (uint64_t) (*(buf++)) <<
329 ((ctx->byteIndex++) * 8);
331 ctx->s[ctx->wordIndex] ^= ctx->saved;
334 if (++ctx->wordIndex ==
335 (SHA3_KECCAK_SPONGE_WORDS - ctx->capacityWords)) {
341 words = len / sizeof(uint64_t);
342 tail = len - words * sizeof(uint64_t);
344 for (i = 0; i < words; i++, buf += sizeof(uint64_t)) {
345 const uint64_t t = (uint64_t) (buf[0]) |
346 ((uint64_t) (buf[1]) << 8 * 1) |
347 ((uint64_t) (buf[2]) << 8 * 2) |
348 ((uint64_t) (buf[3]) << 8 * 3) |
349 ((uint64_t) (buf[4]) << 8 * 4) |
350 ((uint64_t) (buf[5]) << 8 * 5) |
351 ((uint64_t) (buf[6]) << 8 * 6) |
352 ((uint64_t) (buf[7]) << 8 * 7);
353 ctx->s[ctx->wordIndex] ^= t;
354 if (++ctx->wordIndex ==
355 (SHA3_KECCAK_SPONGE_WORDS - ctx->capacityWords)) {
362 ctx->saved |= (uint64_t) (*(buf++)) << ((ctx->byteIndex++) * 8);
365 int partial_hash_sha3_224(uint8_t *data_in, uint8_t *data_out)
370 ctx = rte_zmalloc("sha3-ctx", sizeof(sha3_context), 0);
372 CCP_LOG_ERR("sha3-ctx creation failed");
376 sha3_Update(ctx, data_in, SHA3_224_BLOCK_SIZE);
377 for (i = 0; i < CCP_SHA3_CTX_SIZE; i++, data_out++)
378 *data_out = ctx->sb[CCP_SHA3_CTX_SIZE - i - 1];
384 int partial_hash_sha3_256(uint8_t *data_in, uint8_t *data_out)
389 ctx = rte_zmalloc("sha3-ctx", sizeof(sha3_context), 0);
391 CCP_LOG_ERR("sha3-ctx creation failed");
395 sha3_Update(ctx, data_in, SHA3_256_BLOCK_SIZE);
396 for (i = 0; i < CCP_SHA3_CTX_SIZE; i++, data_out++)
397 *data_out = ctx->sb[CCP_SHA3_CTX_SIZE - i - 1];
403 int partial_hash_sha3_384(uint8_t *data_in, uint8_t *data_out)
408 ctx = rte_zmalloc("sha3-ctx", sizeof(sha3_context), 0);
410 CCP_LOG_ERR("sha3-ctx creation failed");
414 sha3_Update(ctx, data_in, SHA3_384_BLOCK_SIZE);
415 for (i = 0; i < CCP_SHA3_CTX_SIZE; i++, data_out++)
416 *data_out = ctx->sb[CCP_SHA3_CTX_SIZE - i - 1];
422 int partial_hash_sha3_512(uint8_t *data_in, uint8_t *data_out)
427 ctx = rte_zmalloc("sha3-ctx", sizeof(sha3_context), 0);
429 CCP_LOG_ERR("sha3-ctx creation failed");
433 sha3_Update(ctx, data_in, SHA3_512_BLOCK_SIZE);
434 for (i = 0; i < CCP_SHA3_CTX_SIZE; i++, data_out++)
435 *data_out = ctx->sb[CCP_SHA3_CTX_SIZE - i - 1];
441 static int generate_partial_hash(struct ccp_session *sess)
444 uint8_t ipad[sess->auth.block_size];
445 uint8_t opad[sess->auth.block_size];
446 uint8_t *ipad_t, *opad_t;
447 uint32_t *hash_value_be32, hash_temp32[8];
448 uint64_t *hash_value_be64, hash_temp64[8];
450 uint8_t *hash_value_sha3;
452 opad_t = ipad_t = (uint8_t *)sess->auth.key;
454 hash_value_be32 = (uint32_t *)((uint8_t *)sess->auth.pre_compute);
455 hash_value_be64 = (uint64_t *)((uint8_t *)sess->auth.pre_compute);
457 /* considering key size is always equal to block size of algorithm */
458 for (i = 0; i < sess->auth.block_size; i++) {
459 ipad[i] = (ipad_t[i] ^ HMAC_IPAD_VALUE);
460 opad[i] = (opad_t[i] ^ HMAC_OPAD_VALUE);
463 switch (sess->auth.algo) {
464 case CCP_AUTH_ALGO_SHA1_HMAC:
465 count = SHA1_DIGEST_SIZE >> 2;
467 if (partial_hash_sha1(ipad, (uint8_t *)hash_temp32))
469 for (i = 0; i < count; i++, hash_value_be32++)
470 *hash_value_be32 = hash_temp32[count - 1 - i];
472 hash_value_be32 = (uint32_t *)((uint8_t *)sess->auth.pre_compute
473 + sess->auth.ctx_len);
474 if (partial_hash_sha1(opad, (uint8_t *)hash_temp32))
476 for (i = 0; i < count; i++, hash_value_be32++)
477 *hash_value_be32 = hash_temp32[count - 1 - i];
479 case CCP_AUTH_ALGO_SHA224_HMAC:
480 count = SHA256_DIGEST_SIZE >> 2;
482 if (partial_hash_sha224(ipad, (uint8_t *)hash_temp32))
484 for (i = 0; i < count; i++, hash_value_be32++)
485 *hash_value_be32 = hash_temp32[count - 1 - i];
487 hash_value_be32 = (uint32_t *)((uint8_t *)sess->auth.pre_compute
488 + sess->auth.ctx_len);
489 if (partial_hash_sha224(opad, (uint8_t *)hash_temp32))
491 for (i = 0; i < count; i++, hash_value_be32++)
492 *hash_value_be32 = hash_temp32[count - 1 - i];
494 case CCP_AUTH_ALGO_SHA3_224_HMAC:
495 hash_value_sha3 = sess->auth.pre_compute;
496 if (partial_hash_sha3_224(ipad, hash_value_sha3))
499 hash_value_sha3 = (uint8_t *)(sess->auth.pre_compute
500 + sess->auth.ctx_len);
501 if (partial_hash_sha3_224(opad, hash_value_sha3))
504 case CCP_AUTH_ALGO_SHA256_HMAC:
505 count = SHA256_DIGEST_SIZE >> 2;
507 if (partial_hash_sha256(ipad, (uint8_t *)hash_temp32))
509 for (i = 0; i < count; i++, hash_value_be32++)
510 *hash_value_be32 = hash_temp32[count - 1 - i];
512 hash_value_be32 = (uint32_t *)((uint8_t *)sess->auth.pre_compute
513 + sess->auth.ctx_len);
514 if (partial_hash_sha256(opad, (uint8_t *)hash_temp32))
516 for (i = 0; i < count; i++, hash_value_be32++)
517 *hash_value_be32 = hash_temp32[count - 1 - i];
519 case CCP_AUTH_ALGO_SHA3_256_HMAC:
520 hash_value_sha3 = sess->auth.pre_compute;
521 if (partial_hash_sha3_256(ipad, hash_value_sha3))
524 hash_value_sha3 = (uint8_t *)(sess->auth.pre_compute
525 + sess->auth.ctx_len);
526 if (partial_hash_sha3_256(opad, hash_value_sha3))
529 case CCP_AUTH_ALGO_SHA384_HMAC:
530 count = SHA512_DIGEST_SIZE >> 3;
532 if (partial_hash_sha384(ipad, (uint8_t *)hash_temp64))
534 for (i = 0; i < count; i++, hash_value_be64++)
535 *hash_value_be64 = hash_temp64[count - 1 - i];
537 hash_value_be64 = (uint64_t *)((uint8_t *)sess->auth.pre_compute
538 + sess->auth.ctx_len);
539 if (partial_hash_sha384(opad, (uint8_t *)hash_temp64))
541 for (i = 0; i < count; i++, hash_value_be64++)
542 *hash_value_be64 = hash_temp64[count - 1 - i];
544 case CCP_AUTH_ALGO_SHA3_384_HMAC:
545 hash_value_sha3 = sess->auth.pre_compute;
546 if (partial_hash_sha3_384(ipad, hash_value_sha3))
549 hash_value_sha3 = (uint8_t *)(sess->auth.pre_compute
550 + sess->auth.ctx_len);
551 if (partial_hash_sha3_384(opad, hash_value_sha3))
554 case CCP_AUTH_ALGO_SHA512_HMAC:
555 count = SHA512_DIGEST_SIZE >> 3;
557 if (partial_hash_sha512(ipad, (uint8_t *)hash_temp64))
559 for (i = 0; i < count; i++, hash_value_be64++)
560 *hash_value_be64 = hash_temp64[count - 1 - i];
562 hash_value_be64 = (uint64_t *)((uint8_t *)sess->auth.pre_compute
563 + sess->auth.ctx_len);
564 if (partial_hash_sha512(opad, (uint8_t *)hash_temp64))
566 for (i = 0; i < count; i++, hash_value_be64++)
567 *hash_value_be64 = hash_temp64[count - 1 - i];
569 case CCP_AUTH_ALGO_SHA3_512_HMAC:
570 hash_value_sha3 = sess->auth.pre_compute;
571 if (partial_hash_sha3_512(ipad, hash_value_sha3))
574 hash_value_sha3 = (uint8_t *)(sess->auth.pre_compute
575 + sess->auth.ctx_len);
576 if (partial_hash_sha3_512(opad, hash_value_sha3))
580 CCP_LOG_ERR("Invalid auth algo");
585 /* prepare temporary keys K1 and K2 */
586 static void prepare_key(unsigned char *k, unsigned char *l, int bl)
589 /* Shift block to left, including carry */
590 for (i = 0; i < bl; i++) {
592 if (i < bl - 1 && l[i + 1] & 0x80)
595 /* If MSB set fixup with R */
597 k[bl - 1] ^= bl == 16 ? 0x87 : 0x1b;
600 /* subkeys K1 and K2 generation for CMAC */
602 generate_cmac_subkeys(struct ccp_session *sess)
604 const EVP_CIPHER *algo;
606 unsigned char *ccp_ctx;
609 unsigned char zero_iv[AES_BLOCK_SIZE] = {0};
610 unsigned char dst[2 * AES_BLOCK_SIZE] = {0};
611 unsigned char k1[AES_BLOCK_SIZE] = {0};
612 unsigned char k2[AES_BLOCK_SIZE] = {0};
614 if (sess->auth.ut.aes_type == CCP_AES_TYPE_128)
615 algo = EVP_aes_128_cbc();
616 else if (sess->auth.ut.aes_type == CCP_AES_TYPE_192)
617 algo = EVP_aes_192_cbc();
618 else if (sess->auth.ut.aes_type == CCP_AES_TYPE_256)
619 algo = EVP_aes_256_cbc();
621 CCP_LOG_ERR("Invalid CMAC type length");
625 ctx = EVP_CIPHER_CTX_new();
627 CCP_LOG_ERR("ctx creation failed");
630 if (EVP_EncryptInit(ctx, algo, (unsigned char *)sess->auth.key,
631 (unsigned char *)zero_iv) <= 0)
632 goto key_generate_err;
633 if (EVP_CIPHER_CTX_set_padding(ctx, 0) <= 0)
634 goto key_generate_err;
635 if (EVP_EncryptUpdate(ctx, dst, &dstlen, zero_iv,
636 AES_BLOCK_SIZE) <= 0)
637 goto key_generate_err;
638 if (EVP_EncryptFinal_ex(ctx, dst + dstlen, &totlen) <= 0)
639 goto key_generate_err;
641 memset(sess->auth.pre_compute, 0, CCP_SB_BYTES * 2);
643 ccp_ctx = (unsigned char *)(sess->auth.pre_compute + CCP_SB_BYTES - 1);
644 prepare_key(k1, dst, AES_BLOCK_SIZE);
645 for (i = 0; i < AES_BLOCK_SIZE; i++, ccp_ctx--)
648 ccp_ctx = (unsigned char *)(sess->auth.pre_compute +
649 (2 * CCP_SB_BYTES) - 1);
650 prepare_key(k2, k1, AES_BLOCK_SIZE);
651 for (i = 0; i < AES_BLOCK_SIZE; i++, ccp_ctx--)
654 EVP_CIPHER_CTX_free(ctx);
659 CCP_LOG_ERR("CMAC Init failed");
663 /* configure session */
665 ccp_configure_session_cipher(struct ccp_session *sess,
666 const struct rte_crypto_sym_xform *xform)
668 const struct rte_crypto_cipher_xform *cipher_xform = NULL;
671 cipher_xform = &xform->cipher;
673 /* set cipher direction */
674 if (cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
675 sess->cipher.dir = CCP_CIPHER_DIR_ENCRYPT;
677 sess->cipher.dir = CCP_CIPHER_DIR_DECRYPT;
680 sess->cipher.key_length = cipher_xform->key.length;
681 rte_memcpy(sess->cipher.key, cipher_xform->key.data,
682 cipher_xform->key.length);
684 /* set iv parameters */
685 sess->iv.offset = cipher_xform->iv.offset;
686 sess->iv.length = cipher_xform->iv.length;
688 switch (cipher_xform->algo) {
689 case RTE_CRYPTO_CIPHER_AES_CTR:
690 sess->cipher.algo = CCP_CIPHER_ALGO_AES_CTR;
691 sess->cipher.um.aes_mode = CCP_AES_MODE_CTR;
692 sess->cipher.engine = CCP_ENGINE_AES;
694 case RTE_CRYPTO_CIPHER_AES_ECB:
695 sess->cipher.algo = CCP_CIPHER_ALGO_AES_CBC;
696 sess->cipher.um.aes_mode = CCP_AES_MODE_ECB;
697 sess->cipher.engine = CCP_ENGINE_AES;
699 case RTE_CRYPTO_CIPHER_AES_CBC:
700 sess->cipher.algo = CCP_CIPHER_ALGO_AES_CBC;
701 sess->cipher.um.aes_mode = CCP_AES_MODE_CBC;
702 sess->cipher.engine = CCP_ENGINE_AES;
704 case RTE_CRYPTO_CIPHER_3DES_CBC:
705 sess->cipher.algo = CCP_CIPHER_ALGO_3DES_CBC;
706 sess->cipher.um.des_mode = CCP_DES_MODE_CBC;
707 sess->cipher.engine = CCP_ENGINE_3DES;
710 CCP_LOG_ERR("Unsupported cipher algo");
715 switch (sess->cipher.engine) {
717 if (sess->cipher.key_length == 16)
718 sess->cipher.ut.aes_type = CCP_AES_TYPE_128;
719 else if (sess->cipher.key_length == 24)
720 sess->cipher.ut.aes_type = CCP_AES_TYPE_192;
721 else if (sess->cipher.key_length == 32)
722 sess->cipher.ut.aes_type = CCP_AES_TYPE_256;
724 CCP_LOG_ERR("Invalid cipher key length");
727 for (i = 0; i < sess->cipher.key_length ; i++)
728 sess->cipher.key_ccp[sess->cipher.key_length - i - 1] =
731 case CCP_ENGINE_3DES:
732 if (sess->cipher.key_length == 16)
733 sess->cipher.ut.des_type = CCP_DES_TYPE_128;
734 else if (sess->cipher.key_length == 24)
735 sess->cipher.ut.des_type = CCP_DES_TYPE_192;
737 CCP_LOG_ERR("Invalid cipher key length");
740 for (j = 0, x = 0; j < sess->cipher.key_length/8; j++, x += 8)
741 for (i = 0; i < 8; i++)
742 sess->cipher.key_ccp[(8 + x) - i - 1] =
743 sess->cipher.key[i + x];
746 CCP_LOG_ERR("Invalid CCP Engine");
749 if (iommu_mode == 2) {
750 sess->cipher.nonce_phys = rte_mem_virt2iova(sess->cipher.nonce);
751 sess->cipher.key_phys = rte_mem_virt2iova(sess->cipher.key_ccp);
753 sess->cipher.nonce_phys = rte_mem_virt2phy(sess->cipher.nonce);
754 sess->cipher.key_phys = rte_mem_virt2phy(sess->cipher.key_ccp);
760 ccp_configure_session_auth(struct ccp_session *sess,
761 const struct rte_crypto_sym_xform *xform)
763 const struct rte_crypto_auth_xform *auth_xform = NULL;
766 auth_xform = &xform->auth;
768 sess->auth.digest_length = auth_xform->digest_length;
769 if (auth_xform->op == RTE_CRYPTO_AUTH_OP_GENERATE)
770 sess->auth.op = CCP_AUTH_OP_GENERATE;
772 sess->auth.op = CCP_AUTH_OP_VERIFY;
773 switch (auth_xform->algo) {
774 case RTE_CRYPTO_AUTH_MD5_HMAC:
775 if (sess->auth_opt) {
776 sess->auth.algo = CCP_AUTH_ALGO_MD5_HMAC;
777 sess->auth.offset = ((CCP_SB_BYTES << 1) -
779 sess->auth.key_length = auth_xform->key.length;
780 sess->auth.block_size = MD5_BLOCK_SIZE;
781 memset(sess->auth.key, 0, sess->auth.block_size);
782 rte_memcpy(sess->auth.key, auth_xform->key.data,
783 auth_xform->key.length);
785 return -1; /* HMAC MD5 not supported on CCP */
787 case RTE_CRYPTO_AUTH_SHA1:
788 sess->auth.engine = CCP_ENGINE_SHA;
789 sess->auth.algo = CCP_AUTH_ALGO_SHA1;
790 sess->auth.ut.sha_type = CCP_SHA_TYPE_1;
791 sess->auth.ctx = (void *)ccp_sha1_init;
792 sess->auth.ctx_len = CCP_SB_BYTES;
793 sess->auth.offset = CCP_SB_BYTES - SHA1_DIGEST_SIZE;
794 rte_memcpy(sha_ctx, sess->auth.ctx, SHA_COMMON_DIGEST_SIZE);
796 case RTE_CRYPTO_AUTH_SHA1_HMAC:
797 if (sess->auth_opt) {
798 if (auth_xform->key.length > SHA1_BLOCK_SIZE)
800 sess->auth.algo = CCP_AUTH_ALGO_SHA1_HMAC;
801 sess->auth.offset = CCP_SB_BYTES - SHA1_DIGEST_SIZE;
802 sess->auth.block_size = SHA1_BLOCK_SIZE;
803 sess->auth.key_length = auth_xform->key.length;
804 memset(sess->auth.key, 0, sess->auth.block_size);
805 rte_memcpy(sess->auth.key, auth_xform->key.data,
806 auth_xform->key.length);
808 if (auth_xform->key.length > SHA1_BLOCK_SIZE)
810 sess->auth.engine = CCP_ENGINE_SHA;
811 sess->auth.algo = CCP_AUTH_ALGO_SHA1_HMAC;
812 sess->auth.ut.sha_type = CCP_SHA_TYPE_1;
813 sess->auth.ctx_len = CCP_SB_BYTES;
814 sess->auth.offset = CCP_SB_BYTES - SHA1_DIGEST_SIZE;
815 sess->auth.block_size = SHA1_BLOCK_SIZE;
816 sess->auth.key_length = auth_xform->key.length;
817 memset(sess->auth.key, 0, sess->auth.block_size);
818 memset(sess->auth.pre_compute, 0,
819 sess->auth.ctx_len << 1);
820 rte_memcpy(sess->auth.key, auth_xform->key.data,
821 auth_xform->key.length);
822 if (generate_partial_hash(sess))
826 case RTE_CRYPTO_AUTH_SHA224:
827 sess->auth.algo = CCP_AUTH_ALGO_SHA224;
828 sess->auth.engine = CCP_ENGINE_SHA;
829 sess->auth.ut.sha_type = CCP_SHA_TYPE_224;
830 sess->auth.ctx = (void *)ccp_sha224_init;
831 sess->auth.ctx_len = CCP_SB_BYTES;
832 sess->auth.offset = CCP_SB_BYTES - SHA224_DIGEST_SIZE;
833 rte_memcpy(sha_ctx, sess->auth.ctx, SHA256_DIGEST_SIZE);
835 case RTE_CRYPTO_AUTH_SHA224_HMAC:
836 if (sess->auth_opt) {
837 if (auth_xform->key.length > SHA224_BLOCK_SIZE)
839 sess->auth.algo = CCP_AUTH_ALGO_SHA224_HMAC;
840 sess->auth.offset = CCP_SB_BYTES - SHA224_DIGEST_SIZE;
841 sess->auth.block_size = SHA224_BLOCK_SIZE;
842 sess->auth.key_length = auth_xform->key.length;
843 memset(sess->auth.key, 0, sess->auth.block_size);
844 rte_memcpy(sess->auth.key, auth_xform->key.data,
845 auth_xform->key.length);
847 if (auth_xform->key.length > SHA224_BLOCK_SIZE)
849 sess->auth.algo = CCP_AUTH_ALGO_SHA224_HMAC;
850 sess->auth.engine = CCP_ENGINE_SHA;
851 sess->auth.ut.sha_type = CCP_SHA_TYPE_224;
852 sess->auth.ctx_len = CCP_SB_BYTES;
853 sess->auth.offset = CCP_SB_BYTES - SHA224_DIGEST_SIZE;
854 sess->auth.block_size = SHA224_BLOCK_SIZE;
855 sess->auth.key_length = auth_xform->key.length;
856 memset(sess->auth.key, 0, sess->auth.block_size);
857 memset(sess->auth.pre_compute, 0,
858 sess->auth.ctx_len << 1);
859 rte_memcpy(sess->auth.key, auth_xform->key.data,
860 auth_xform->key.length);
861 if (generate_partial_hash(sess))
865 case RTE_CRYPTO_AUTH_SHA3_224:
866 sess->auth.algo = CCP_AUTH_ALGO_SHA3_224;
867 sess->auth.engine = CCP_ENGINE_SHA;
868 sess->auth.ut.sha_type = CCP_SHA3_TYPE_224;
869 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
870 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA224_DIGEST_SIZE;
872 case RTE_CRYPTO_AUTH_SHA3_224_HMAC:
873 if (auth_xform->key.length > SHA3_224_BLOCK_SIZE)
875 sess->auth.algo = CCP_AUTH_ALGO_SHA3_224_HMAC;
876 sess->auth.engine = CCP_ENGINE_SHA;
877 sess->auth.ut.sha_type = CCP_SHA3_TYPE_224;
878 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
879 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA224_DIGEST_SIZE;
880 sess->auth.block_size = SHA3_224_BLOCK_SIZE;
881 sess->auth.key_length = auth_xform->key.length;
882 memset(sess->auth.key, 0, sess->auth.block_size);
883 memset(sess->auth.pre_compute, 0, 2 * sess->auth.ctx_len);
884 rte_memcpy(sess->auth.key, auth_xform->key.data,
885 auth_xform->key.length);
886 if (generate_partial_hash(sess))
889 case RTE_CRYPTO_AUTH_SHA256:
890 sess->auth.algo = CCP_AUTH_ALGO_SHA256;
891 sess->auth.engine = CCP_ENGINE_SHA;
892 sess->auth.ut.sha_type = CCP_SHA_TYPE_256;
893 sess->auth.ctx = (void *)ccp_sha256_init;
894 sess->auth.ctx_len = CCP_SB_BYTES;
895 sess->auth.offset = CCP_SB_BYTES - SHA256_DIGEST_SIZE;
896 rte_memcpy(sha_ctx, sess->auth.ctx, SHA256_DIGEST_SIZE);
898 case RTE_CRYPTO_AUTH_SHA256_HMAC:
899 if (sess->auth_opt) {
900 if (auth_xform->key.length > SHA256_BLOCK_SIZE)
902 sess->auth.algo = CCP_AUTH_ALGO_SHA256_HMAC;
903 sess->auth.offset = CCP_SB_BYTES - SHA256_DIGEST_SIZE;
904 sess->auth.block_size = SHA256_BLOCK_SIZE;
905 sess->auth.key_length = auth_xform->key.length;
906 memset(sess->auth.key, 0, sess->auth.block_size);
907 rte_memcpy(sess->auth.key, auth_xform->key.data,
908 auth_xform->key.length);
910 if (auth_xform->key.length > SHA256_BLOCK_SIZE)
912 sess->auth.algo = CCP_AUTH_ALGO_SHA256_HMAC;
913 sess->auth.engine = CCP_ENGINE_SHA;
914 sess->auth.ut.sha_type = CCP_SHA_TYPE_256;
915 sess->auth.ctx_len = CCP_SB_BYTES;
916 sess->auth.offset = CCP_SB_BYTES - SHA256_DIGEST_SIZE;
917 sess->auth.block_size = SHA256_BLOCK_SIZE;
918 sess->auth.key_length = auth_xform->key.length;
919 memset(sess->auth.key, 0, sess->auth.block_size);
920 memset(sess->auth.pre_compute, 0,
921 sess->auth.ctx_len << 1);
922 rte_memcpy(sess->auth.key, auth_xform->key.data,
923 auth_xform->key.length);
924 if (generate_partial_hash(sess))
928 case RTE_CRYPTO_AUTH_SHA3_256:
929 sess->auth.algo = CCP_AUTH_ALGO_SHA3_256;
930 sess->auth.engine = CCP_ENGINE_SHA;
931 sess->auth.ut.sha_type = CCP_SHA3_TYPE_256;
932 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
933 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA256_DIGEST_SIZE;
935 case RTE_CRYPTO_AUTH_SHA3_256_HMAC:
936 if (auth_xform->key.length > SHA3_256_BLOCK_SIZE)
938 sess->auth.algo = CCP_AUTH_ALGO_SHA3_256_HMAC;
939 sess->auth.engine = CCP_ENGINE_SHA;
940 sess->auth.ut.sha_type = CCP_SHA3_TYPE_256;
941 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
942 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA256_DIGEST_SIZE;
943 sess->auth.block_size = SHA3_256_BLOCK_SIZE;
944 sess->auth.key_length = auth_xform->key.length;
945 memset(sess->auth.key, 0, sess->auth.block_size);
946 memset(sess->auth.pre_compute, 0, 2 * sess->auth.ctx_len);
947 rte_memcpy(sess->auth.key, auth_xform->key.data,
948 auth_xform->key.length);
949 if (generate_partial_hash(sess))
952 case RTE_CRYPTO_AUTH_SHA384:
953 sess->auth.algo = CCP_AUTH_ALGO_SHA384;
954 sess->auth.engine = CCP_ENGINE_SHA;
955 sess->auth.ut.sha_type = CCP_SHA_TYPE_384;
956 sess->auth.ctx = (void *)ccp_sha384_init;
957 sess->auth.ctx_len = CCP_SB_BYTES << 1;
958 sess->auth.offset = (CCP_SB_BYTES << 1) - SHA384_DIGEST_SIZE;
959 rte_memcpy(sha_ctx, sess->auth.ctx, SHA512_DIGEST_SIZE);
961 case RTE_CRYPTO_AUTH_SHA384_HMAC:
962 if (sess->auth_opt) {
963 if (auth_xform->key.length > SHA384_BLOCK_SIZE)
965 sess->auth.algo = CCP_AUTH_ALGO_SHA384_HMAC;
966 sess->auth.offset = ((CCP_SB_BYTES << 1) -
968 sess->auth.block_size = SHA384_BLOCK_SIZE;
969 sess->auth.key_length = auth_xform->key.length;
970 memset(sess->auth.key, 0, sess->auth.block_size);
971 rte_memcpy(sess->auth.key, auth_xform->key.data,
972 auth_xform->key.length);
974 if (auth_xform->key.length > SHA384_BLOCK_SIZE)
976 sess->auth.algo = CCP_AUTH_ALGO_SHA384_HMAC;
977 sess->auth.engine = CCP_ENGINE_SHA;
978 sess->auth.ut.sha_type = CCP_SHA_TYPE_384;
979 sess->auth.ctx_len = CCP_SB_BYTES << 1;
980 sess->auth.offset = ((CCP_SB_BYTES << 1) -
982 sess->auth.block_size = SHA384_BLOCK_SIZE;
983 sess->auth.key_length = auth_xform->key.length;
984 memset(sess->auth.key, 0, sess->auth.block_size);
985 memset(sess->auth.pre_compute, 0,
986 sess->auth.ctx_len << 1);
987 rte_memcpy(sess->auth.key, auth_xform->key.data,
988 auth_xform->key.length);
989 if (generate_partial_hash(sess))
993 case RTE_CRYPTO_AUTH_SHA3_384:
994 sess->auth.algo = CCP_AUTH_ALGO_SHA3_384;
995 sess->auth.engine = CCP_ENGINE_SHA;
996 sess->auth.ut.sha_type = CCP_SHA3_TYPE_384;
997 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
998 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA384_DIGEST_SIZE;
1000 case RTE_CRYPTO_AUTH_SHA3_384_HMAC:
1001 if (auth_xform->key.length > SHA3_384_BLOCK_SIZE)
1003 sess->auth.algo = CCP_AUTH_ALGO_SHA3_384_HMAC;
1004 sess->auth.engine = CCP_ENGINE_SHA;
1005 sess->auth.ut.sha_type = CCP_SHA3_TYPE_384;
1006 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
1007 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA384_DIGEST_SIZE;
1008 sess->auth.block_size = SHA3_384_BLOCK_SIZE;
1009 sess->auth.key_length = auth_xform->key.length;
1010 memset(sess->auth.key, 0, sess->auth.block_size);
1011 memset(sess->auth.pre_compute, 0, 2 * sess->auth.ctx_len);
1012 rte_memcpy(sess->auth.key, auth_xform->key.data,
1013 auth_xform->key.length);
1014 if (generate_partial_hash(sess))
1017 case RTE_CRYPTO_AUTH_SHA512:
1018 sess->auth.algo = CCP_AUTH_ALGO_SHA512;
1019 sess->auth.engine = CCP_ENGINE_SHA;
1020 sess->auth.ut.sha_type = CCP_SHA_TYPE_512;
1021 sess->auth.ctx = (void *)ccp_sha512_init;
1022 sess->auth.ctx_len = CCP_SB_BYTES << 1;
1023 sess->auth.offset = (CCP_SB_BYTES << 1) - SHA512_DIGEST_SIZE;
1024 rte_memcpy(sha_ctx, sess->auth.ctx, SHA512_DIGEST_SIZE);
1026 case RTE_CRYPTO_AUTH_SHA512_HMAC:
1027 if (sess->auth_opt) {
1028 if (auth_xform->key.length > SHA512_BLOCK_SIZE)
1030 sess->auth.algo = CCP_AUTH_ALGO_SHA512_HMAC;
1031 sess->auth.offset = ((CCP_SB_BYTES << 1) -
1032 SHA512_DIGEST_SIZE);
1033 sess->auth.block_size = SHA512_BLOCK_SIZE;
1034 sess->auth.key_length = auth_xform->key.length;
1035 memset(sess->auth.key, 0, sess->auth.block_size);
1036 rte_memcpy(sess->auth.key, auth_xform->key.data,
1037 auth_xform->key.length);
1039 if (auth_xform->key.length > SHA512_BLOCK_SIZE)
1041 sess->auth.algo = CCP_AUTH_ALGO_SHA512_HMAC;
1042 sess->auth.engine = CCP_ENGINE_SHA;
1043 sess->auth.ut.sha_type = CCP_SHA_TYPE_512;
1044 sess->auth.ctx_len = CCP_SB_BYTES << 1;
1045 sess->auth.offset = ((CCP_SB_BYTES << 1) -
1046 SHA512_DIGEST_SIZE);
1047 sess->auth.block_size = SHA512_BLOCK_SIZE;
1048 sess->auth.key_length = auth_xform->key.length;
1049 memset(sess->auth.key, 0, sess->auth.block_size);
1050 memset(sess->auth.pre_compute, 0,
1051 sess->auth.ctx_len << 1);
1052 rte_memcpy(sess->auth.key, auth_xform->key.data,
1053 auth_xform->key.length);
1054 if (generate_partial_hash(sess))
1058 case RTE_CRYPTO_AUTH_SHA3_512:
1059 sess->auth.algo = CCP_AUTH_ALGO_SHA3_512;
1060 sess->auth.engine = CCP_ENGINE_SHA;
1061 sess->auth.ut.sha_type = CCP_SHA3_TYPE_512;
1062 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
1063 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA512_DIGEST_SIZE;
1065 case RTE_CRYPTO_AUTH_SHA3_512_HMAC:
1066 if (auth_xform->key.length > SHA3_512_BLOCK_SIZE)
1068 sess->auth.algo = CCP_AUTH_ALGO_SHA3_512_HMAC;
1069 sess->auth.engine = CCP_ENGINE_SHA;
1070 sess->auth.ut.sha_type = CCP_SHA3_TYPE_512;
1071 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
1072 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA512_DIGEST_SIZE;
1073 sess->auth.block_size = SHA3_512_BLOCK_SIZE;
1074 sess->auth.key_length = auth_xform->key.length;
1075 memset(sess->auth.key, 0, sess->auth.block_size);
1076 memset(sess->auth.pre_compute, 0, 2 * sess->auth.ctx_len);
1077 rte_memcpy(sess->auth.key, auth_xform->key.data,
1078 auth_xform->key.length);
1079 if (generate_partial_hash(sess))
1082 case RTE_CRYPTO_AUTH_AES_CMAC:
1083 sess->auth.algo = CCP_AUTH_ALGO_AES_CMAC;
1084 sess->auth.engine = CCP_ENGINE_AES;
1085 sess->auth.um.aes_mode = CCP_AES_MODE_CMAC;
1086 sess->auth.key_length = auth_xform->key.length;
1087 /* padding and hash result */
1088 sess->auth.ctx_len = CCP_SB_BYTES << 1;
1089 sess->auth.offset = AES_BLOCK_SIZE;
1090 sess->auth.block_size = AES_BLOCK_SIZE;
1091 if (sess->auth.key_length == 16)
1092 sess->auth.ut.aes_type = CCP_AES_TYPE_128;
1093 else if (sess->auth.key_length == 24)
1094 sess->auth.ut.aes_type = CCP_AES_TYPE_192;
1095 else if (sess->auth.key_length == 32)
1096 sess->auth.ut.aes_type = CCP_AES_TYPE_256;
1098 CCP_LOG_ERR("Invalid CMAC key length");
1101 rte_memcpy(sess->auth.key, auth_xform->key.data,
1102 sess->auth.key_length);
1103 for (i = 0; i < sess->auth.key_length; i++)
1104 sess->auth.key_ccp[sess->auth.key_length - i - 1] =
1106 if (generate_cmac_subkeys(sess))
1110 CCP_LOG_ERR("Unsupported hash algo");
1117 ccp_configure_session_aead(struct ccp_session *sess,
1118 const struct rte_crypto_sym_xform *xform)
1120 const struct rte_crypto_aead_xform *aead_xform = NULL;
1123 aead_xform = &xform->aead;
1125 sess->cipher.key_length = aead_xform->key.length;
1126 rte_memcpy(sess->cipher.key, aead_xform->key.data,
1127 aead_xform->key.length);
1129 if (aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) {
1130 sess->cipher.dir = CCP_CIPHER_DIR_ENCRYPT;
1131 sess->auth.op = CCP_AUTH_OP_GENERATE;
1133 sess->cipher.dir = CCP_CIPHER_DIR_DECRYPT;
1134 sess->auth.op = CCP_AUTH_OP_VERIFY;
1136 sess->aead_algo = aead_xform->algo;
1137 sess->auth.aad_length = aead_xform->aad_length;
1138 sess->auth.digest_length = aead_xform->digest_length;
1140 /* set iv parameters */
1141 sess->iv.offset = aead_xform->iv.offset;
1142 sess->iv.length = aead_xform->iv.length;
1144 switch (aead_xform->algo) {
1145 case RTE_CRYPTO_AEAD_AES_GCM:
1146 sess->cipher.algo = CCP_CIPHER_ALGO_AES_GCM;
1147 sess->cipher.um.aes_mode = CCP_AES_MODE_GCTR;
1148 sess->cipher.engine = CCP_ENGINE_AES;
1149 if (sess->cipher.key_length == 16)
1150 sess->cipher.ut.aes_type = CCP_AES_TYPE_128;
1151 else if (sess->cipher.key_length == 24)
1152 sess->cipher.ut.aes_type = CCP_AES_TYPE_192;
1153 else if (sess->cipher.key_length == 32)
1154 sess->cipher.ut.aes_type = CCP_AES_TYPE_256;
1156 CCP_LOG_ERR("Invalid aead key length");
1159 for (i = 0; i < sess->cipher.key_length; i++)
1160 sess->cipher.key_ccp[sess->cipher.key_length - i - 1] =
1161 sess->cipher.key[i];
1162 sess->auth.algo = CCP_AUTH_ALGO_AES_GCM;
1163 sess->auth.engine = CCP_ENGINE_AES;
1164 sess->auth.um.aes_mode = CCP_AES_MODE_GHASH;
1165 sess->auth.ctx_len = CCP_SB_BYTES;
1166 sess->auth.offset = 0;
1167 sess->auth.block_size = AES_BLOCK_SIZE;
1168 sess->cmd_id = CCP_CMD_COMBINED;
1171 CCP_LOG_ERR("Unsupported aead algo");
1174 if (iommu_mode == 2) {
1175 sess->cipher.nonce_phys = rte_mem_virt2iova(sess->cipher.nonce);
1176 sess->cipher.key_phys = rte_mem_virt2iova(sess->cipher.key_ccp);
1178 sess->cipher.nonce_phys = rte_mem_virt2phy(sess->cipher.nonce);
1179 sess->cipher.key_phys = rte_mem_virt2phy(sess->cipher.key_ccp);
1185 ccp_set_session_parameters(struct ccp_session *sess,
1186 const struct rte_crypto_sym_xform *xform,
1187 struct ccp_private *internals)
1189 const struct rte_crypto_sym_xform *cipher_xform = NULL;
1190 const struct rte_crypto_sym_xform *auth_xform = NULL;
1191 const struct rte_crypto_sym_xform *aead_xform = NULL;
1194 sess->auth_opt = internals->auth_opt;
1195 sess->cmd_id = ccp_get_cmd_id(xform);
1197 switch (sess->cmd_id) {
1198 case CCP_CMD_CIPHER:
1199 cipher_xform = xform;
1204 case CCP_CMD_CIPHER_HASH:
1205 cipher_xform = xform;
1206 auth_xform = xform->next;
1208 case CCP_CMD_HASH_CIPHER:
1210 cipher_xform = xform->next;
1212 case CCP_CMD_COMBINED:
1216 CCP_LOG_ERR("Unsupported cmd_id");
1220 /* Default IV length = 0 */
1221 sess->iv.length = 0;
1223 ret = ccp_configure_session_cipher(sess, cipher_xform);
1225 CCP_LOG_ERR("Invalid/unsupported cipher parameters");
1230 ret = ccp_configure_session_auth(sess, auth_xform);
1232 CCP_LOG_ERR("Invalid/unsupported auth parameters");
1237 ret = ccp_configure_session_aead(sess, aead_xform);
1239 CCP_LOG_ERR("Invalid/unsupported aead parameters");
1246 /* calculate CCP descriptors requirement */
1248 ccp_cipher_slot(struct ccp_session *session)
1252 switch (session->cipher.algo) {
1253 case CCP_CIPHER_ALGO_AES_CBC:
1255 /**< op + passthrough for iv */
1257 case CCP_CIPHER_ALGO_AES_ECB:
1261 case CCP_CIPHER_ALGO_AES_CTR:
1263 /**< op + passthrough for iv */
1265 case CCP_CIPHER_ALGO_3DES_CBC:
1267 /**< op + passthrough for iv */
1270 CCP_LOG_ERR("Unsupported cipher algo %d",
1271 session->cipher.algo);
1277 ccp_auth_slot(struct ccp_session *session)
1281 switch (session->auth.algo) {
1282 case CCP_AUTH_ALGO_SHA1:
1283 case CCP_AUTH_ALGO_SHA224:
1284 case CCP_AUTH_ALGO_SHA256:
1285 case CCP_AUTH_ALGO_SHA384:
1286 case CCP_AUTH_ALGO_SHA512:
1288 /**< op + lsb passthrough cpy to/from*/
1290 case CCP_AUTH_ALGO_MD5_HMAC:
1292 case CCP_AUTH_ALGO_SHA1_HMAC:
1293 case CCP_AUTH_ALGO_SHA224_HMAC:
1294 case CCP_AUTH_ALGO_SHA256_HMAC:
1295 if (session->auth_opt == 0)
1298 case CCP_AUTH_ALGO_SHA384_HMAC:
1299 case CCP_AUTH_ALGO_SHA512_HMAC:
1301 * 1. Load PHash1 = H(k ^ ipad); to LSB
1302 * 2. generate IHash = H(hash on meassage with PHash1
1304 * 3. Retrieve IHash 2 slots for 384/512
1305 * 4. Load Phash2 = H(k ^ opad); to LSB
1306 * 5. generate FHash = H(hash on Ihash with Phash2
1308 * 6. Retrieve HMAC output from LSB to host memory
1310 if (session->auth_opt == 0)
1313 case CCP_AUTH_ALGO_SHA3_224:
1314 case CCP_AUTH_ALGO_SHA3_256:
1315 case CCP_AUTH_ALGO_SHA3_384:
1316 case CCP_AUTH_ALGO_SHA3_512:
1318 /**< only op ctx and dst in host memory*/
1320 case CCP_AUTH_ALGO_SHA3_224_HMAC:
1321 case CCP_AUTH_ALGO_SHA3_256_HMAC:
1324 case CCP_AUTH_ALGO_SHA3_384_HMAC:
1325 case CCP_AUTH_ALGO_SHA3_512_HMAC:
1328 * 1. Op to Perform Ihash
1329 * 2. Retrieve result from LSB to host memory
1330 * 3. Perform final hash
1333 case CCP_AUTH_ALGO_AES_CMAC:
1337 * extra descriptor in padding case
1338 * (k1/k2(255:128) with iv(127:0))
1343 CCP_LOG_ERR("Unsupported auth algo %d",
1344 session->auth.algo);
1351 ccp_aead_slot(struct ccp_session *session)
1355 switch (session->aead_algo) {
1356 case RTE_CRYPTO_AEAD_AES_GCM:
1359 CCP_LOG_ERR("Unsupported aead algo %d",
1360 session->aead_algo);
1362 switch (session->auth.algo) {
1363 case CCP_AUTH_ALGO_AES_GCM:
1369 * 4. Reload passthru
1374 CCP_LOG_ERR("Unsupported combined auth ALGO %d",
1375 session->auth.algo);
1381 ccp_compute_slot_count(struct ccp_session *session)
1385 switch (session->cmd_id) {
1386 case CCP_CMD_CIPHER:
1387 count = ccp_cipher_slot(session);
1390 count = ccp_auth_slot(session);
1392 case CCP_CMD_CIPHER_HASH:
1393 case CCP_CMD_HASH_CIPHER:
1394 count = ccp_cipher_slot(session);
1395 count += ccp_auth_slot(session);
1397 case CCP_CMD_COMBINED:
1398 count = ccp_aead_slot(session);
1401 CCP_LOG_ERR("Unsupported cmd_id");
1409 algo_select(int sessalgo,
1410 const EVP_MD **algo)
1415 case CCP_AUTH_ALGO_MD5_HMAC:
1418 case CCP_AUTH_ALGO_SHA1_HMAC:
1421 case CCP_AUTH_ALGO_SHA224_HMAC:
1422 *algo = EVP_sha224();
1424 case CCP_AUTH_ALGO_SHA256_HMAC:
1425 *algo = EVP_sha256();
1427 case CCP_AUTH_ALGO_SHA384_HMAC:
1428 *algo = EVP_sha384();
1430 case CCP_AUTH_ALGO_SHA512_HMAC:
1431 *algo = EVP_sha512();
1441 process_cpu_auth_hmac(uint8_t *src, uint8_t *dst,
1442 __rte_unused uint8_t *iv,
1450 unsigned char temp_dst[64];
1452 if (EVP_DigestSignInit(ctx, NULL, algo, NULL, pkey) <= 0)
1453 goto process_auth_err;
1455 if (EVP_DigestSignUpdate(ctx, (char *)src, srclen) <= 0)
1456 goto process_auth_err;
1458 if (EVP_DigestSignFinal(ctx, temp_dst, &dstlen) <= 0)
1459 goto process_auth_err;
1461 memcpy(dst, temp_dst, d_len);
1464 CCP_LOG_ERR("Process cpu auth failed");
1468 static int cpu_crypto_auth(struct ccp_qp *qp,
1469 struct rte_crypto_op *op,
1470 struct ccp_session *sess,
1475 struct rte_mbuf *mbuf_src, *mbuf_dst;
1476 const EVP_MD *algo = NULL;
1479 algo_select(sess->auth.algo, &algo);
1480 pkey = EVP_PKEY_new_mac_key(EVP_PKEY_HMAC, NULL, sess->auth.key,
1481 sess->auth.key_length);
1482 mbuf_src = op->sym->m_src;
1483 mbuf_dst = op->sym->m_dst ? op->sym->m_dst : op->sym->m_src;
1484 srclen = op->sym->auth.data.length;
1485 src = rte_pktmbuf_mtod_offset(mbuf_src, uint8_t *,
1486 op->sym->auth.data.offset);
1488 if (sess->auth.op == CCP_AUTH_OP_VERIFY) {
1489 dst = qp->temp_digest;
1491 dst = op->sym->auth.digest.data;
1493 dst = rte_pktmbuf_mtod_offset(mbuf_dst, uint8_t *,
1494 op->sym->auth.data.offset +
1495 sess->auth.digest_length);
1498 status = process_cpu_auth_hmac(src, dst, NULL,
1502 sess->auth.digest_length);
1504 op->status = RTE_CRYPTO_OP_STATUS_ERROR;
1508 if (sess->auth.op == CCP_AUTH_OP_VERIFY) {
1509 if (memcmp(dst, op->sym->auth.digest.data,
1510 sess->auth.digest_length) != 0) {
1511 op->status = RTE_CRYPTO_OP_STATUS_AUTH_FAILED;
1513 op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
1516 op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
1518 EVP_PKEY_free(pkey);
1523 ccp_perform_passthru(struct ccp_passthru *pst,
1524 struct ccp_queue *cmd_q)
1526 struct ccp_desc *desc;
1527 union ccp_function function;
1529 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1531 CCP_CMD_ENGINE(desc) = CCP_ENGINE_PASSTHRU;
1533 CCP_CMD_SOC(desc) = 0;
1534 CCP_CMD_IOC(desc) = 0;
1535 CCP_CMD_INIT(desc) = 0;
1536 CCP_CMD_EOM(desc) = 0;
1537 CCP_CMD_PROT(desc) = 0;
1540 CCP_PT_BYTESWAP(&function) = pst->byte_swap;
1541 CCP_PT_BITWISE(&function) = pst->bit_mod;
1542 CCP_CMD_FUNCTION(desc) = function.raw;
1544 CCP_CMD_LEN(desc) = pst->len;
1547 CCP_CMD_SRC_LO(desc) = (uint32_t)(pst->src_addr);
1548 CCP_CMD_SRC_HI(desc) = high32_value(pst->src_addr);
1549 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1551 CCP_CMD_DST_LO(desc) = (uint32_t)(pst->dest_addr);
1552 CCP_CMD_DST_HI(desc) = 0;
1553 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SB;
1555 if (pst->bit_mod != CCP_PASSTHRU_BITWISE_NOOP)
1556 CCP_CMD_LSB_ID(desc) = cmd_q->sb_key;
1559 CCP_CMD_SRC_LO(desc) = (uint32_t)(pst->src_addr);
1560 CCP_CMD_SRC_HI(desc) = 0;
1561 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SB;
1563 CCP_CMD_DST_LO(desc) = (uint32_t)(pst->dest_addr);
1564 CCP_CMD_DST_HI(desc) = high32_value(pst->dest_addr);
1565 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1568 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1572 ccp_perform_hmac(struct rte_crypto_op *op,
1573 struct ccp_queue *cmd_q)
1576 struct ccp_session *session;
1577 union ccp_function function;
1578 struct ccp_desc *desc;
1580 phys_addr_t src_addr, dest_addr, dest_addr_t;
1581 struct ccp_passthru pst;
1582 uint64_t auth_msg_bits;
1586 session = (struct ccp_session *)get_sym_session_private_data(
1588 ccp_cryptodev_driver_id);
1589 addr = session->auth.pre_compute;
1591 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
1592 op->sym->auth.data.offset);
1593 append_ptr = (void *)rte_pktmbuf_append(op->sym->m_src,
1594 session->auth.ctx_len);
1595 if (iommu_mode == 2) {
1596 dest_addr = (phys_addr_t)rte_mem_virt2iova(append_ptr);
1597 pst.src_addr = (phys_addr_t)rte_mem_virt2iova((void *)addr);
1599 dest_addr = (phys_addr_t)rte_mem_virt2phy(append_ptr);
1600 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *)addr);
1602 dest_addr_t = dest_addr;
1604 /** Load PHash1 to LSB*/
1605 pst.dest_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1606 pst.len = session->auth.ctx_len;
1608 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1609 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1610 ccp_perform_passthru(&pst, cmd_q);
1612 /**sha engine command descriptor for IntermediateHash*/
1614 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1615 memset(desc, 0, Q_DESC_SIZE);
1617 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1619 CCP_CMD_SOC(desc) = 0;
1620 CCP_CMD_IOC(desc) = 0;
1621 CCP_CMD_INIT(desc) = 1;
1622 CCP_CMD_EOM(desc) = 1;
1623 CCP_CMD_PROT(desc) = 0;
1626 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1627 CCP_CMD_FUNCTION(desc) = function.raw;
1629 CCP_CMD_LEN(desc) = op->sym->auth.data.length;
1630 auth_msg_bits = (op->sym->auth.data.length +
1631 session->auth.block_size) * 8;
1633 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
1634 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
1635 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1637 CCP_CMD_LSB_ID(desc) = cmd_q->sb_sha;
1638 CCP_CMD_SHA_LO(desc) = ((uint32_t)auth_msg_bits);
1639 CCP_CMD_SHA_HI(desc) = high32_value(auth_msg_bits);
1641 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1645 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1646 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1647 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1648 cmd_q->qcontrol | CMD_Q_RUN);
1650 /* Intermediate Hash value retrieve */
1651 if ((session->auth.ut.sha_type == CCP_SHA_TYPE_384) ||
1652 (session->auth.ut.sha_type == CCP_SHA_TYPE_512)) {
1655 (phys_addr_t)((cmd_q->sb_sha + 1) * CCP_SB_BYTES);
1656 pst.dest_addr = dest_addr_t;
1657 pst.len = CCP_SB_BYTES;
1659 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1660 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1661 ccp_perform_passthru(&pst, cmd_q);
1663 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1664 pst.dest_addr = dest_addr_t + CCP_SB_BYTES;
1665 pst.len = CCP_SB_BYTES;
1667 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1668 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1669 ccp_perform_passthru(&pst, cmd_q);
1672 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1673 pst.dest_addr = dest_addr_t;
1674 pst.len = session->auth.ctx_len;
1676 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1677 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1678 ccp_perform_passthru(&pst, cmd_q);
1682 /** Load PHash2 to LSB*/
1683 addr += session->auth.ctx_len;
1684 if (iommu_mode == 2)
1685 pst.src_addr = (phys_addr_t)rte_mem_virt2iova((void *)addr);
1687 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *)addr);
1688 pst.dest_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1689 pst.len = session->auth.ctx_len;
1691 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1692 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1693 ccp_perform_passthru(&pst, cmd_q);
1695 /**sha engine command descriptor for FinalHash*/
1696 dest_addr_t += session->auth.offset;
1698 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1699 memset(desc, 0, Q_DESC_SIZE);
1701 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1703 CCP_CMD_SOC(desc) = 0;
1704 CCP_CMD_IOC(desc) = 0;
1705 CCP_CMD_INIT(desc) = 1;
1706 CCP_CMD_EOM(desc) = 1;
1707 CCP_CMD_PROT(desc) = 0;
1710 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1711 CCP_CMD_FUNCTION(desc) = function.raw;
1713 CCP_CMD_LEN(desc) = (session->auth.ctx_len -
1714 session->auth.offset);
1715 auth_msg_bits = (session->auth.block_size +
1716 session->auth.ctx_len -
1717 session->auth.offset) * 8;
1719 CCP_CMD_SRC_LO(desc) = (uint32_t)(dest_addr_t);
1720 CCP_CMD_SRC_HI(desc) = high32_value(dest_addr_t);
1721 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1723 CCP_CMD_LSB_ID(desc) = cmd_q->sb_sha;
1724 CCP_CMD_SHA_LO(desc) = ((uint32_t)auth_msg_bits);
1725 CCP_CMD_SHA_HI(desc) = high32_value(auth_msg_bits);
1727 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1731 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1732 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1733 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1734 cmd_q->qcontrol | CMD_Q_RUN);
1736 /* Retrieve hmac output */
1737 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1738 pst.dest_addr = dest_addr;
1739 pst.len = session->auth.ctx_len;
1741 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1742 if ((session->auth.ut.sha_type == CCP_SHA_TYPE_384) ||
1743 (session->auth.ut.sha_type == CCP_SHA_TYPE_512))
1744 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1746 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1747 ccp_perform_passthru(&pst, cmd_q);
1749 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
1755 ccp_perform_sha(struct rte_crypto_op *op,
1756 struct ccp_queue *cmd_q)
1758 struct ccp_session *session;
1759 union ccp_function function;
1760 struct ccp_desc *desc;
1762 phys_addr_t src_addr, dest_addr;
1763 struct ccp_passthru pst;
1765 uint64_t auth_msg_bits;
1767 session = (struct ccp_session *)get_sym_session_private_data(
1769 ccp_cryptodev_driver_id);
1771 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
1772 op->sym->auth.data.offset);
1773 append_ptr = (void *)rte_pktmbuf_append(op->sym->m_src,
1774 session->auth.ctx_len);
1775 if (iommu_mode == 2) {
1776 dest_addr = (phys_addr_t)rte_mem_virt2iova(append_ptr);
1777 pst.src_addr = (phys_addr_t)sha_ctx;
1779 dest_addr = (phys_addr_t)rte_mem_virt2phy(append_ptr);
1780 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *)
1784 /** Passthru sha context*/
1786 pst.dest_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1787 pst.len = session->auth.ctx_len;
1789 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1790 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1791 ccp_perform_passthru(&pst, cmd_q);
1793 /**prepare sha command descriptor*/
1795 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1796 memset(desc, 0, Q_DESC_SIZE);
1798 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1800 CCP_CMD_SOC(desc) = 0;
1801 CCP_CMD_IOC(desc) = 0;
1802 CCP_CMD_INIT(desc) = 1;
1803 CCP_CMD_EOM(desc) = 1;
1804 CCP_CMD_PROT(desc) = 0;
1807 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1808 CCP_CMD_FUNCTION(desc) = function.raw;
1810 CCP_CMD_LEN(desc) = op->sym->auth.data.length;
1811 auth_msg_bits = op->sym->auth.data.length * 8;
1813 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
1814 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
1815 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1817 CCP_CMD_LSB_ID(desc) = cmd_q->sb_sha;
1818 CCP_CMD_SHA_LO(desc) = ((uint32_t)auth_msg_bits);
1819 CCP_CMD_SHA_HI(desc) = high32_value(auth_msg_bits);
1821 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1825 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1826 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1827 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1828 cmd_q->qcontrol | CMD_Q_RUN);
1830 /* Hash value retrieve */
1831 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1832 pst.dest_addr = dest_addr;
1833 pst.len = session->auth.ctx_len;
1835 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1836 if ((session->auth.ut.sha_type == CCP_SHA_TYPE_384) ||
1837 (session->auth.ut.sha_type == CCP_SHA_TYPE_512))
1838 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1840 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1841 ccp_perform_passthru(&pst, cmd_q);
1843 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
1849 ccp_perform_sha3_hmac(struct rte_crypto_op *op,
1850 struct ccp_queue *cmd_q)
1852 struct ccp_session *session;
1853 struct ccp_passthru pst;
1854 union ccp_function function;
1855 struct ccp_desc *desc;
1856 uint8_t *append_ptr;
1858 phys_addr_t src_addr, dest_addr, ctx_paddr, dest_addr_t;
1860 session = (struct ccp_session *)get_sym_session_private_data(
1862 ccp_cryptodev_driver_id);
1864 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
1865 op->sym->auth.data.offset);
1866 append_ptr = (uint8_t *)rte_pktmbuf_append(op->sym->m_src,
1867 session->auth.ctx_len);
1869 CCP_LOG_ERR("CCP MBUF append failed\n");
1872 if (iommu_mode == 2) {
1873 dest_addr = (phys_addr_t)rte_mem_virt2iova((void *)append_ptr);
1874 ctx_paddr = (phys_addr_t)rte_mem_virt2iova(
1875 session->auth.pre_compute);
1877 dest_addr = (phys_addr_t)rte_mem_virt2phy((void *)append_ptr);
1878 ctx_paddr = (phys_addr_t)rte_mem_virt2phy(
1879 session->auth.pre_compute);
1881 dest_addr_t = dest_addr + (session->auth.ctx_len / 2);
1882 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1883 memset(desc, 0, Q_DESC_SIZE);
1885 /*desc1 for SHA3-Ihash operation */
1886 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1887 CCP_CMD_INIT(desc) = 1;
1888 CCP_CMD_EOM(desc) = 1;
1891 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1892 CCP_CMD_FUNCTION(desc) = function.raw;
1893 CCP_CMD_LEN(desc) = op->sym->auth.data.length;
1895 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
1896 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
1897 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1899 CCP_CMD_DST_LO(desc) = (cmd_q->sb_sha * CCP_SB_BYTES);
1900 CCP_CMD_DST_HI(desc) = 0;
1901 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SB;
1903 CCP_CMD_KEY_LO(desc) = ((uint32_t)ctx_paddr);
1904 CCP_CMD_KEY_HI(desc) = high32_value(ctx_paddr);
1905 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1907 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1910 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1911 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1912 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1913 cmd_q->qcontrol | CMD_Q_RUN);
1915 /* Intermediate Hash value retrieve */
1916 if ((session->auth.ut.sha_type == CCP_SHA3_TYPE_384) ||
1917 (session->auth.ut.sha_type == CCP_SHA3_TYPE_512)) {
1920 (phys_addr_t)((cmd_q->sb_sha + 1) * CCP_SB_BYTES);
1921 pst.dest_addr = dest_addr_t;
1922 pst.len = CCP_SB_BYTES;
1924 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1925 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1926 ccp_perform_passthru(&pst, cmd_q);
1928 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1929 pst.dest_addr = dest_addr_t + CCP_SB_BYTES;
1930 pst.len = CCP_SB_BYTES;
1932 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1933 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1934 ccp_perform_passthru(&pst, cmd_q);
1937 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1938 pst.dest_addr = dest_addr_t;
1939 pst.len = CCP_SB_BYTES;
1941 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1942 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1943 ccp_perform_passthru(&pst, cmd_q);
1946 /**sha engine command descriptor for FinalHash*/
1947 ctx_paddr += CCP_SHA3_CTX_SIZE;
1948 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1949 memset(desc, 0, Q_DESC_SIZE);
1951 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1952 CCP_CMD_INIT(desc) = 1;
1953 CCP_CMD_EOM(desc) = 1;
1956 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1957 CCP_CMD_FUNCTION(desc) = function.raw;
1959 if (session->auth.ut.sha_type == CCP_SHA3_TYPE_224) {
1960 dest_addr_t += (CCP_SB_BYTES - SHA224_DIGEST_SIZE);
1961 CCP_CMD_LEN(desc) = SHA224_DIGEST_SIZE;
1962 } else if (session->auth.ut.sha_type == CCP_SHA3_TYPE_256) {
1963 CCP_CMD_LEN(desc) = SHA256_DIGEST_SIZE;
1964 } else if (session->auth.ut.sha_type == CCP_SHA3_TYPE_384) {
1965 dest_addr_t += (2 * CCP_SB_BYTES - SHA384_DIGEST_SIZE);
1966 CCP_CMD_LEN(desc) = SHA384_DIGEST_SIZE;
1968 CCP_CMD_LEN(desc) = SHA512_DIGEST_SIZE;
1971 CCP_CMD_SRC_LO(desc) = ((uint32_t)dest_addr_t);
1972 CCP_CMD_SRC_HI(desc) = high32_value(dest_addr_t);
1973 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1975 CCP_CMD_DST_LO(desc) = (uint32_t)dest_addr;
1976 CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
1977 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1979 CCP_CMD_KEY_LO(desc) = ((uint32_t)ctx_paddr);
1980 CCP_CMD_KEY_HI(desc) = high32_value(ctx_paddr);
1981 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1983 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1986 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1987 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1988 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1989 cmd_q->qcontrol | CMD_Q_RUN);
1991 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
1996 ccp_perform_sha3(struct rte_crypto_op *op,
1997 struct ccp_queue *cmd_q)
1999 struct ccp_session *session;
2000 union ccp_function function;
2001 struct ccp_desc *desc;
2002 uint8_t *ctx_addr = NULL, *append_ptr = NULL;
2004 phys_addr_t src_addr, dest_addr, ctx_paddr;
2006 session = (struct ccp_session *)get_sym_session_private_data(
2008 ccp_cryptodev_driver_id);
2010 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
2011 op->sym->auth.data.offset);
2012 append_ptr = (uint8_t *)rte_pktmbuf_append(op->sym->m_src,
2013 session->auth.ctx_len);
2015 CCP_LOG_ERR("CCP MBUF append failed\n");
2018 if (iommu_mode == 2) {
2019 dest_addr = (phys_addr_t)rte_mem_virt2iova((void *)append_ptr);
2020 ctx_paddr = (phys_addr_t)rte_mem_virt2iova((void *)ctx_addr);
2022 dest_addr = (phys_addr_t)rte_mem_virt2phy((void *)append_ptr);
2023 ctx_paddr = (phys_addr_t)rte_mem_virt2phy((void *)ctx_addr);
2026 ctx_addr = session->auth.sha3_ctx;
2028 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2029 memset(desc, 0, Q_DESC_SIZE);
2031 /* prepare desc for SHA3 operation */
2032 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
2033 CCP_CMD_INIT(desc) = 1;
2034 CCP_CMD_EOM(desc) = 1;
2037 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
2038 CCP_CMD_FUNCTION(desc) = function.raw;
2040 CCP_CMD_LEN(desc) = op->sym->auth.data.length;
2042 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2043 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2044 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2046 CCP_CMD_DST_LO(desc) = ((uint32_t)dest_addr);
2047 CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
2048 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2050 CCP_CMD_KEY_LO(desc) = ((uint32_t)ctx_paddr);
2051 CCP_CMD_KEY_HI(desc) = high32_value(ctx_paddr);
2052 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2054 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2058 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2059 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2060 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2061 cmd_q->qcontrol | CMD_Q_RUN);
2063 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
2068 ccp_perform_aes_cmac(struct rte_crypto_op *op,
2069 struct ccp_queue *cmd_q)
2071 struct ccp_session *session;
2072 union ccp_function function;
2073 struct ccp_passthru pst;
2074 struct ccp_desc *desc;
2076 uint8_t *src_tb, *append_ptr, *ctx_addr;
2077 phys_addr_t src_addr, dest_addr, key_addr;
2078 int length, non_align_len;
2080 session = (struct ccp_session *)get_sym_session_private_data(
2082 ccp_cryptodev_driver_id);
2083 key_addr = rte_mem_virt2phy(session->auth.key_ccp);
2085 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
2086 op->sym->auth.data.offset);
2087 append_ptr = (uint8_t *)rte_pktmbuf_append(op->sym->m_src,
2088 session->auth.ctx_len);
2089 dest_addr = (phys_addr_t)rte_mem_virt2phy((void *)append_ptr);
2092 CCP_AES_ENCRYPT(&function) = CCP_CIPHER_DIR_ENCRYPT;
2093 CCP_AES_MODE(&function) = session->auth.um.aes_mode;
2094 CCP_AES_TYPE(&function) = session->auth.ut.aes_type;
2096 if (op->sym->auth.data.length % session->auth.block_size == 0) {
2098 ctx_addr = session->auth.pre_compute;
2099 memset(ctx_addr, 0, AES_BLOCK_SIZE);
2100 if (iommu_mode == 2)
2101 pst.src_addr = (phys_addr_t)rte_mem_virt2iova(
2104 pst.src_addr = (phys_addr_t)rte_mem_virt2phy(
2107 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2108 pst.len = CCP_SB_BYTES;
2110 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2111 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
2112 ccp_perform_passthru(&pst, cmd_q);
2114 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2115 memset(desc, 0, Q_DESC_SIZE);
2117 /* prepare desc for aes-cmac command */
2118 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2119 CCP_CMD_EOM(desc) = 1;
2120 CCP_CMD_FUNCTION(desc) = function.raw;
2122 CCP_CMD_LEN(desc) = op->sym->auth.data.length;
2123 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2124 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2125 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2127 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2128 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2129 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2130 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2132 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2137 (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2138 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2139 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2140 cmd_q->qcontrol | CMD_Q_RUN);
2142 ctx_addr = session->auth.pre_compute + CCP_SB_BYTES;
2143 memset(ctx_addr, 0, AES_BLOCK_SIZE);
2144 if (iommu_mode == 2)
2145 pst.src_addr = (phys_addr_t)rte_mem_virt2iova(
2148 pst.src_addr = (phys_addr_t)rte_mem_virt2phy(
2150 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2151 pst.len = CCP_SB_BYTES;
2153 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2154 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
2155 ccp_perform_passthru(&pst, cmd_q);
2157 length = (op->sym->auth.data.length / AES_BLOCK_SIZE);
2158 length *= AES_BLOCK_SIZE;
2159 non_align_len = op->sym->auth.data.length - length;
2160 /* prepare desc for aes-cmac command */
2162 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2163 memset(desc, 0, Q_DESC_SIZE);
2165 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2166 CCP_CMD_INIT(desc) = 1;
2167 CCP_CMD_FUNCTION(desc) = function.raw;
2169 CCP_CMD_LEN(desc) = length;
2170 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2171 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2172 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2174 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2175 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2176 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2177 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2179 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2182 append_ptr = append_ptr + CCP_SB_BYTES;
2183 memset(append_ptr, 0, AES_BLOCK_SIZE);
2184 src_tb = rte_pktmbuf_mtod_offset(op->sym->m_src,
2186 op->sym->auth.data.offset +
2188 rte_memcpy(append_ptr, src_tb, non_align_len);
2189 append_ptr[non_align_len] = CMAC_PAD_VALUE;
2191 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2192 memset(desc, 0, Q_DESC_SIZE);
2194 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2195 CCP_CMD_EOM(desc) = 1;
2196 CCP_CMD_FUNCTION(desc) = function.raw;
2197 CCP_CMD_LEN(desc) = AES_BLOCK_SIZE;
2199 CCP_CMD_SRC_LO(desc) = ((uint32_t)(dest_addr + CCP_SB_BYTES));
2200 CCP_CMD_SRC_HI(desc) = high32_value(dest_addr + CCP_SB_BYTES);
2201 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2203 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2204 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2205 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2206 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2208 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2212 (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2213 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2214 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2215 cmd_q->qcontrol | CMD_Q_RUN);
2217 /* Retrieve result */
2218 pst.dest_addr = dest_addr;
2219 pst.src_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2220 pst.len = CCP_SB_BYTES;
2222 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2223 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
2224 ccp_perform_passthru(&pst, cmd_q);
2226 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
2231 ccp_perform_aes(struct rte_crypto_op *op,
2232 struct ccp_queue *cmd_q,
2233 struct ccp_batch_info *b_info)
2235 struct ccp_session *session;
2236 union ccp_function function;
2238 struct ccp_passthru pst = {0};
2239 struct ccp_desc *desc;
2240 phys_addr_t src_addr, dest_addr, key_addr;
2243 session = (struct ccp_session *)get_sym_session_private_data(
2245 ccp_cryptodev_driver_id);
2248 iv = rte_crypto_op_ctod_offset(op, uint8_t *, session->iv.offset);
2249 if (session->cipher.um.aes_mode != CCP_AES_MODE_ECB) {
2250 if (session->cipher.um.aes_mode == CCP_AES_MODE_CTR) {
2251 rte_memcpy(session->cipher.nonce + AES_BLOCK_SIZE,
2252 iv, session->iv.length);
2253 pst.src_addr = (phys_addr_t)session->cipher.nonce_phys;
2254 CCP_AES_SIZE(&function) = 0x1F;
2257 &(b_info->lsb_buf[b_info->lsb_buf_idx*CCP_SB_BYTES]);
2258 rte_memcpy(lsb_buf +
2259 (CCP_SB_BYTES - session->iv.length),
2260 iv, session->iv.length);
2261 pst.src_addr = b_info->lsb_buf_phys +
2262 (b_info->lsb_buf_idx * CCP_SB_BYTES);
2263 b_info->lsb_buf_idx++;
2266 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2267 pst.len = CCP_SB_BYTES;
2269 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2270 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
2271 ccp_perform_passthru(&pst, cmd_q);
2274 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2276 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
2277 op->sym->cipher.data.offset);
2278 if (likely(op->sym->m_dst != NULL))
2279 dest_addr = rte_pktmbuf_iova_offset(op->sym->m_dst,
2280 op->sym->cipher.data.offset);
2282 dest_addr = src_addr;
2283 key_addr = session->cipher.key_phys;
2285 /* prepare desc for aes command */
2286 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2287 CCP_CMD_INIT(desc) = 1;
2288 CCP_CMD_EOM(desc) = 1;
2290 CCP_AES_ENCRYPT(&function) = session->cipher.dir;
2291 CCP_AES_MODE(&function) = session->cipher.um.aes_mode;
2292 CCP_AES_TYPE(&function) = session->cipher.ut.aes_type;
2293 CCP_CMD_FUNCTION(desc) = function.raw;
2295 CCP_CMD_LEN(desc) = op->sym->cipher.data.length;
2297 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2298 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2299 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2301 CCP_CMD_DST_LO(desc) = ((uint32_t)dest_addr);
2302 CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
2303 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2305 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2306 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2307 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2309 if (session->cipher.um.aes_mode != CCP_AES_MODE_ECB)
2310 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2312 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2313 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
2318 ccp_perform_3des(struct rte_crypto_op *op,
2319 struct ccp_queue *cmd_q,
2320 struct ccp_batch_info *b_info)
2322 struct ccp_session *session;
2323 union ccp_function function;
2324 unsigned char *lsb_buf;
2325 struct ccp_passthru pst;
2326 struct ccp_desc *desc;
2329 phys_addr_t src_addr, dest_addr, key_addr;
2331 session = (struct ccp_session *)get_sym_session_private_data(
2333 ccp_cryptodev_driver_id);
2335 iv = rte_crypto_op_ctod_offset(op, uint8_t *, session->iv.offset);
2336 switch (session->cipher.um.des_mode) {
2337 case CCP_DES_MODE_CBC:
2338 lsb_buf = &(b_info->lsb_buf[b_info->lsb_buf_idx*CCP_SB_BYTES]);
2339 b_info->lsb_buf_idx++;
2341 rte_memcpy(lsb_buf + (CCP_SB_BYTES - session->iv.length),
2342 iv, session->iv.length);
2343 if (iommu_mode == 2)
2344 pst.src_addr = (phys_addr_t)rte_mem_virt2iova(
2347 pst.src_addr = (phys_addr_t)rte_mem_virt2phy(
2349 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2350 pst.len = CCP_SB_BYTES;
2352 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2353 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
2354 ccp_perform_passthru(&pst, cmd_q);
2356 case CCP_DES_MODE_CFB:
2357 case CCP_DES_MODE_ECB:
2358 CCP_LOG_ERR("Unsupported DES cipher mode");
2362 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
2363 op->sym->cipher.data.offset);
2364 if (unlikely(op->sym->m_dst != NULL))
2366 rte_pktmbuf_iova_offset(op->sym->m_dst,
2367 op->sym->cipher.data.offset);
2369 dest_addr = src_addr;
2371 if (iommu_mode == 2)
2372 key_addr = rte_mem_virt2iova(session->cipher.key_ccp);
2374 key_addr = rte_mem_virt2phy(session->cipher.key_ccp);
2376 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2378 memset(desc, 0, Q_DESC_SIZE);
2380 /* prepare desc for des command */
2381 CCP_CMD_ENGINE(desc) = CCP_ENGINE_3DES;
2383 CCP_CMD_SOC(desc) = 0;
2384 CCP_CMD_IOC(desc) = 0;
2385 CCP_CMD_INIT(desc) = 1;
2386 CCP_CMD_EOM(desc) = 1;
2387 CCP_CMD_PROT(desc) = 0;
2390 CCP_DES_ENCRYPT(&function) = session->cipher.dir;
2391 CCP_DES_MODE(&function) = session->cipher.um.des_mode;
2392 CCP_DES_TYPE(&function) = session->cipher.ut.des_type;
2393 CCP_CMD_FUNCTION(desc) = function.raw;
2395 CCP_CMD_LEN(desc) = op->sym->cipher.data.length;
2397 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2398 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2399 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2401 CCP_CMD_DST_LO(desc) = ((uint32_t)dest_addr);
2402 CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
2403 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2405 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2406 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2407 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2409 if (session->cipher.um.des_mode)
2410 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2412 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2416 /* Write the new tail address back to the queue register */
2417 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2418 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2419 /* Turn the queue back on using our cached control register */
2420 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2421 cmd_q->qcontrol | CMD_Q_RUN);
2423 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
2428 ccp_perform_aes_gcm(struct rte_crypto_op *op, struct ccp_queue *cmd_q)
2430 struct ccp_session *session;
2431 union ccp_function function;
2433 struct ccp_passthru pst;
2434 struct ccp_desc *desc;
2437 phys_addr_t src_addr, dest_addr, key_addr, aad_addr;
2438 phys_addr_t digest_dest_addr;
2439 int length, non_align_len;
2441 session = (struct ccp_session *)get_sym_session_private_data(
2443 ccp_cryptodev_driver_id);
2444 iv = rte_crypto_op_ctod_offset(op, uint8_t *, session->iv.offset);
2445 key_addr = session->cipher.key_phys;
2447 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
2448 op->sym->aead.data.offset);
2449 if (unlikely(op->sym->m_dst != NULL))
2450 dest_addr = rte_pktmbuf_iova_offset(op->sym->m_dst,
2451 op->sym->aead.data.offset);
2453 dest_addr = src_addr;
2454 rte_pktmbuf_append(op->sym->m_src, session->auth.ctx_len);
2455 digest_dest_addr = op->sym->aead.digest.phys_addr;
2456 temp = (uint64_t *)(op->sym->aead.digest.data + AES_BLOCK_SIZE);
2457 *temp++ = rte_bswap64(session->auth.aad_length << 3);
2458 *temp = rte_bswap64(op->sym->aead.data.length << 3);
2460 non_align_len = op->sym->aead.data.length % AES_BLOCK_SIZE;
2461 length = CCP_ALIGN(op->sym->aead.data.length, AES_BLOCK_SIZE);
2463 aad_addr = op->sym->aead.aad.phys_addr;
2465 /* CMD1 IV Passthru */
2466 rte_memcpy(session->cipher.nonce + AES_BLOCK_SIZE, iv,
2467 session->iv.length);
2468 pst.src_addr = session->cipher.nonce_phys;
2469 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2470 pst.len = CCP_SB_BYTES;
2472 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2473 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
2474 ccp_perform_passthru(&pst, cmd_q);
2476 /* CMD2 GHASH-AAD */
2478 CCP_AES_ENCRYPT(&function) = CCP_AES_MODE_GHASH_AAD;
2479 CCP_AES_MODE(&function) = CCP_AES_MODE_GHASH;
2480 CCP_AES_TYPE(&function) = session->cipher.ut.aes_type;
2482 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2483 memset(desc, 0, Q_DESC_SIZE);
2485 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2486 CCP_CMD_INIT(desc) = 1;
2487 CCP_CMD_FUNCTION(desc) = function.raw;
2489 CCP_CMD_LEN(desc) = session->auth.aad_length;
2491 CCP_CMD_SRC_LO(desc) = ((uint32_t)aad_addr);
2492 CCP_CMD_SRC_HI(desc) = high32_value(aad_addr);
2493 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2495 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2496 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2497 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2499 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2501 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2504 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2505 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2506 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2507 cmd_q->qcontrol | CMD_Q_RUN);
2509 /* CMD3 : GCTR Plain text */
2511 CCP_AES_ENCRYPT(&function) = session->cipher.dir;
2512 CCP_AES_MODE(&function) = CCP_AES_MODE_GCTR;
2513 CCP_AES_TYPE(&function) = session->cipher.ut.aes_type;
2514 if (non_align_len == 0)
2515 CCP_AES_SIZE(&function) = (AES_BLOCK_SIZE << 3) - 1;
2517 CCP_AES_SIZE(&function) = (non_align_len << 3) - 1;
2520 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2521 memset(desc, 0, Q_DESC_SIZE);
2523 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2524 CCP_CMD_EOM(desc) = 1;
2525 CCP_CMD_FUNCTION(desc) = function.raw;
2527 CCP_CMD_LEN(desc) = length;
2529 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2530 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2531 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2533 CCP_CMD_DST_LO(desc) = ((uint32_t)dest_addr);
2534 CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
2535 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2537 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2538 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2539 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2541 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2543 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2546 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2547 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2548 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2549 cmd_q->qcontrol | CMD_Q_RUN);
2551 /* CMD4 : PT to copy IV */
2552 pst.src_addr = session->cipher.nonce_phys;
2553 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2554 pst.len = AES_BLOCK_SIZE;
2556 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2557 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
2558 ccp_perform_passthru(&pst, cmd_q);
2560 /* CMD5 : GHASH-Final */
2562 CCP_AES_ENCRYPT(&function) = CCP_AES_MODE_GHASH_FINAL;
2563 CCP_AES_MODE(&function) = CCP_AES_MODE_GHASH;
2564 CCP_AES_TYPE(&function) = session->cipher.ut.aes_type;
2566 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2567 memset(desc, 0, Q_DESC_SIZE);
2569 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2570 CCP_CMD_FUNCTION(desc) = function.raw;
2571 /* Last block (AAD_len || PT_len)*/
2572 CCP_CMD_LEN(desc) = AES_BLOCK_SIZE;
2574 CCP_CMD_SRC_LO(desc) = ((uint32_t)digest_dest_addr + AES_BLOCK_SIZE);
2575 CCP_CMD_SRC_HI(desc) = high32_value(digest_dest_addr + AES_BLOCK_SIZE);
2576 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2578 CCP_CMD_DST_LO(desc) = ((uint32_t)digest_dest_addr);
2579 CCP_CMD_DST_HI(desc) = high32_value(digest_dest_addr);
2580 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2582 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2583 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2584 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2586 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2588 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2591 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2592 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2593 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2594 cmd_q->qcontrol | CMD_Q_RUN);
2596 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
2601 ccp_crypto_cipher(struct rte_crypto_op *op,
2602 struct ccp_queue *cmd_q,
2603 struct ccp_batch_info *b_info)
2606 struct ccp_session *session;
2608 session = (struct ccp_session *)get_sym_session_private_data(
2610 ccp_cryptodev_driver_id);
2612 switch (session->cipher.algo) {
2613 case CCP_CIPHER_ALGO_AES_CBC:
2614 result = ccp_perform_aes(op, cmd_q, b_info);
2615 b_info->desccnt += 2;
2617 case CCP_CIPHER_ALGO_AES_CTR:
2618 result = ccp_perform_aes(op, cmd_q, b_info);
2619 b_info->desccnt += 2;
2621 case CCP_CIPHER_ALGO_AES_ECB:
2622 result = ccp_perform_aes(op, cmd_q, b_info);
2623 b_info->desccnt += 1;
2625 case CCP_CIPHER_ALGO_3DES_CBC:
2626 result = ccp_perform_3des(op, cmd_q, b_info);
2627 b_info->desccnt += 2;
2630 CCP_LOG_ERR("Unsupported cipher algo %d",
2631 session->cipher.algo);
2638 ccp_crypto_auth(struct rte_crypto_op *op,
2639 struct ccp_queue *cmd_q,
2640 struct ccp_batch_info *b_info)
2644 struct ccp_session *session;
2646 session = (struct ccp_session *)get_sym_session_private_data(
2648 ccp_cryptodev_driver_id);
2650 switch (session->auth.algo) {
2651 case CCP_AUTH_ALGO_SHA1:
2652 case CCP_AUTH_ALGO_SHA224:
2653 case CCP_AUTH_ALGO_SHA256:
2654 case CCP_AUTH_ALGO_SHA384:
2655 case CCP_AUTH_ALGO_SHA512:
2656 result = ccp_perform_sha(op, cmd_q);
2657 b_info->desccnt += 3;
2659 case CCP_AUTH_ALGO_MD5_HMAC:
2660 if (session->auth_opt == 0)
2663 case CCP_AUTH_ALGO_SHA1_HMAC:
2664 case CCP_AUTH_ALGO_SHA224_HMAC:
2665 case CCP_AUTH_ALGO_SHA256_HMAC:
2666 if (session->auth_opt == 0) {
2667 result = ccp_perform_hmac(op, cmd_q);
2668 b_info->desccnt += 6;
2671 case CCP_AUTH_ALGO_SHA384_HMAC:
2672 case CCP_AUTH_ALGO_SHA512_HMAC:
2673 if (session->auth_opt == 0) {
2674 result = ccp_perform_hmac(op, cmd_q);
2675 b_info->desccnt += 7;
2678 case CCP_AUTH_ALGO_SHA3_224:
2679 case CCP_AUTH_ALGO_SHA3_256:
2680 case CCP_AUTH_ALGO_SHA3_384:
2681 case CCP_AUTH_ALGO_SHA3_512:
2682 result = ccp_perform_sha3(op, cmd_q);
2683 b_info->desccnt += 1;
2685 case CCP_AUTH_ALGO_SHA3_224_HMAC:
2686 case CCP_AUTH_ALGO_SHA3_256_HMAC:
2687 result = ccp_perform_sha3_hmac(op, cmd_q);
2688 b_info->desccnt += 3;
2690 case CCP_AUTH_ALGO_SHA3_384_HMAC:
2691 case CCP_AUTH_ALGO_SHA3_512_HMAC:
2692 result = ccp_perform_sha3_hmac(op, cmd_q);
2693 b_info->desccnt += 4;
2695 case CCP_AUTH_ALGO_AES_CMAC:
2696 result = ccp_perform_aes_cmac(op, cmd_q);
2697 b_info->desccnt += 4;
2700 CCP_LOG_ERR("Unsupported auth algo %d",
2701 session->auth.algo);
2709 ccp_crypto_aead(struct rte_crypto_op *op,
2710 struct ccp_queue *cmd_q,
2711 struct ccp_batch_info *b_info)
2714 struct ccp_session *session;
2716 session = (struct ccp_session *)get_sym_session_private_data(
2718 ccp_cryptodev_driver_id);
2720 switch (session->auth.algo) {
2721 case CCP_AUTH_ALGO_AES_GCM:
2722 if (session->cipher.algo != CCP_CIPHER_ALGO_AES_GCM) {
2723 CCP_LOG_ERR("Incorrect chain order");
2726 result = ccp_perform_aes_gcm(op, cmd_q);
2727 b_info->desccnt += 5;
2730 CCP_LOG_ERR("Unsupported aead algo %d",
2731 session->aead_algo);
2738 process_ops_to_enqueue(struct ccp_qp *qp,
2739 struct rte_crypto_op **op,
2740 struct ccp_queue *cmd_q,
2742 uint16_t total_nb_ops,
2747 struct ccp_batch_info *b_info;
2748 struct ccp_session *session;
2749 EVP_MD_CTX *auth_ctx = NULL;
2751 if (rte_mempool_get(qp->batch_mp, (void **)&b_info)) {
2752 CCP_LOG_ERR("batch info allocation failed");
2756 auth_ctx = EVP_MD_CTX_create();
2757 if (unlikely(!auth_ctx)) {
2758 CCP_LOG_ERR("Unable to create auth ctx");
2761 b_info->auth_ctr = 0;
2763 /* populate batch info necessary for dequeue */
2766 b_info->lsb_buf_idx = 0;
2767 b_info->desccnt = 0;
2768 b_info->cmd_q = cmd_q;
2769 if (iommu_mode == 2)
2770 b_info->lsb_buf_phys =
2771 (phys_addr_t)rte_mem_virt2iova((void *)b_info->lsb_buf);
2773 b_info->lsb_buf_phys =
2774 (phys_addr_t)rte_mem_virt2phy((void *)b_info->lsb_buf);
2776 rte_atomic64_sub(&b_info->cmd_q->free_slots, slots_req);
2778 b_info->head_offset = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx *
2780 for (i = b_idx; i < (nb_ops+b_idx); i++) {
2781 session = (struct ccp_session *)get_sym_session_private_data(
2782 op[i]->sym->session,
2783 ccp_cryptodev_driver_id);
2784 switch (session->cmd_id) {
2785 case CCP_CMD_CIPHER:
2786 result = ccp_crypto_cipher(op[i], cmd_q, b_info);
2789 if (session->auth_opt) {
2791 result = cpu_crypto_auth(qp, op[i],
2794 result = ccp_crypto_auth(op[i], cmd_q, b_info);
2796 case CCP_CMD_CIPHER_HASH:
2797 result = ccp_crypto_cipher(op[i], cmd_q, b_info);
2800 result = ccp_crypto_auth(op[i], cmd_q, b_info);
2802 case CCP_CMD_HASH_CIPHER:
2803 if (session->auth_opt) {
2804 result = cpu_crypto_auth(qp, op[i],
2806 if (op[i]->status !=
2807 RTE_CRYPTO_OP_STATUS_SUCCESS)
2808 CCP_LOG_ERR("RTE_CRYPTO_OP_STATUS_AUTH_FAILED");
2810 result = ccp_crypto_auth(op[i], cmd_q, b_info);
2814 result = ccp_crypto_cipher(op[i], cmd_q, b_info);
2816 case CCP_CMD_COMBINED:
2817 result = ccp_crypto_aead(op[i], cmd_q, b_info);
2820 CCP_LOG_ERR("Unsupported cmd_id");
2823 if (unlikely(result < 0)) {
2824 rte_atomic64_add(&b_info->cmd_q->free_slots,
2825 (slots_req - b_info->desccnt));
2828 b_info->op[i] = op[i];
2832 b_info->b_idx = b_idx;
2833 b_info->total_nb_ops = total_nb_ops;
2834 b_info->tail_offset = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx *
2838 /* Write the new tail address back to the queue register */
2839 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE,
2840 b_info->tail_offset);
2841 /* Turn the queue back on using our cached control register */
2842 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2843 cmd_q->qcontrol | CMD_Q_RUN);
2845 rte_ring_enqueue(qp->processed_pkts, (void *)b_info);
2847 EVP_MD_CTX_destroy(auth_ctx);
2851 static inline void ccp_auth_dq_prepare(struct rte_crypto_op *op)
2853 struct ccp_session *session;
2854 uint8_t *digest_data, *addr;
2855 struct rte_mbuf *m_last;
2856 int offset, digest_offset;
2857 uint8_t digest_le[64];
2859 session = (struct ccp_session *)get_sym_session_private_data(
2861 ccp_cryptodev_driver_id);
2863 if (session->cmd_id == CCP_CMD_COMBINED) {
2864 digest_data = op->sym->aead.digest.data;
2865 digest_offset = op->sym->aead.data.offset +
2866 op->sym->aead.data.length;
2868 digest_data = op->sym->auth.digest.data;
2869 digest_offset = op->sym->auth.data.offset +
2870 op->sym->auth.data.length;
2872 m_last = rte_pktmbuf_lastseg(op->sym->m_src);
2873 addr = (uint8_t *)((char *)m_last->buf_addr + m_last->data_off +
2874 m_last->data_len - session->auth.ctx_len);
2877 offset = session->auth.offset;
2879 if (session->auth.engine == CCP_ENGINE_SHA)
2880 if ((session->auth.ut.sha_type != CCP_SHA_TYPE_1) &&
2881 (session->auth.ut.sha_type != CCP_SHA_TYPE_224) &&
2882 (session->auth.ut.sha_type != CCP_SHA_TYPE_256)) {
2883 /* All other algorithms require byte
2888 offset = session->auth.ctx_len -
2889 session->auth.offset - 1;
2890 for (i = 0; i < session->auth.digest_length; i++)
2891 digest_le[i] = addr[offset - i];
2896 op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
2897 if (session->auth.op == CCP_AUTH_OP_VERIFY) {
2898 if (memcmp(addr + offset, digest_data,
2899 session->auth.digest_length) != 0)
2900 op->status = RTE_CRYPTO_OP_STATUS_AUTH_FAILED;
2903 if (unlikely(digest_data == 0))
2904 digest_data = rte_pktmbuf_mtod_offset(
2905 op->sym->m_dst, uint8_t *,
2907 rte_memcpy(digest_data, addr + offset,
2908 session->auth.digest_length);
2910 /* Trim area used for digest from mbuf. */
2911 rte_pktmbuf_trim(op->sym->m_src,
2912 session->auth.ctx_len);
2916 ccp_prepare_ops(struct ccp_qp *qp,
2917 struct rte_crypto_op **op_d,
2918 struct ccp_batch_info *b_info,
2922 struct ccp_session *session;
2924 EVP_MD_CTX *auth_ctx = NULL;
2926 auth_ctx = EVP_MD_CTX_create();
2927 if (unlikely(!auth_ctx)) {
2928 CCP_LOG_ERR("Unable to create auth ctx");
2931 min_ops = RTE_MIN(nb_ops, b_info->opcnt);
2933 for (i = b_info->b_idx; i < min_ops; i++) {
2934 op_d[i] = b_info->op[b_info->b_idx + b_info->op_idx++];
2935 session = (struct ccp_session *)get_sym_session_private_data(
2936 op_d[i]->sym->session,
2937 ccp_cryptodev_driver_id);
2938 switch (session->cmd_id) {
2939 case CCP_CMD_CIPHER:
2940 op_d[i]->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
2943 if (session->auth_opt == 0)
2944 ccp_auth_dq_prepare(op_d[i]);
2946 case CCP_CMD_CIPHER_HASH:
2947 if (session->auth_opt)
2948 cpu_crypto_auth(qp, op_d[i],
2951 ccp_auth_dq_prepare(op_d[i]);
2953 case CCP_CMD_HASH_CIPHER:
2954 if (session->auth_opt)
2955 op_d[i]->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
2957 ccp_auth_dq_prepare(op_d[i]);
2959 case CCP_CMD_COMBINED:
2960 ccp_auth_dq_prepare(op_d[i]);
2963 CCP_LOG_ERR("Unsupported cmd_id");
2967 EVP_MD_CTX_destroy(auth_ctx);
2968 b_info->opcnt -= min_ops;
2973 process_ops_to_dequeue(struct ccp_qp *qp,
2974 struct rte_crypto_op **op,
2976 uint16_t *total_nb_ops)
2978 struct ccp_batch_info *b_info;
2979 uint32_t cur_head_offset;
2981 if (qp->b_info != NULL) {
2982 b_info = qp->b_info;
2983 if (unlikely(b_info->op_idx > 0))
2985 } else if (rte_ring_dequeue(qp->processed_pkts,
2989 if (b_info->auth_ctr == b_info->opcnt)
2991 *total_nb_ops = b_info->total_nb_ops;
2992 cur_head_offset = CCP_READ_REG(b_info->cmd_q->reg_base,
2993 CMD_Q_HEAD_LO_BASE);
2995 if (b_info->head_offset < b_info->tail_offset) {
2996 if ((cur_head_offset >= b_info->head_offset) &&
2997 (cur_head_offset < b_info->tail_offset)) {
2998 qp->b_info = b_info;
3001 } else if (b_info->tail_offset != b_info->head_offset) {
3002 if ((cur_head_offset >= b_info->head_offset) ||
3003 (cur_head_offset < b_info->tail_offset)) {
3004 qp->b_info = b_info;
3011 *total_nb_ops = b_info->total_nb_ops;
3012 nb_ops = ccp_prepare_ops(qp, op, b_info, nb_ops);
3013 rte_atomic64_add(&b_info->cmd_q->free_slots, b_info->desccnt);
3014 b_info->desccnt = 0;
3015 if (b_info->opcnt > 0) {
3016 qp->b_info = b_info;
3018 rte_mempool_put(qp->batch_mp, (void *)b_info);