1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2018 Advanced Micro Devices, Inc. All rights reserved.
10 #include <sys/queue.h>
11 #include <sys/types.h>
13 #include <openssl/sha.h>
14 #include <openssl/cmac.h> /*sub key apis*/
15 #include <openssl/evp.h> /*sub key apis*/
17 #include <rte_hexdump.h>
18 #include <rte_memzone.h>
19 #include <rte_malloc.h>
20 #include <rte_memory.h>
21 #include <rte_spinlock.h>
22 #include <rte_string_fns.h>
23 #include <rte_cryptodev_pmd.h>
26 #include "ccp_crypto.h"
28 #include "ccp_pmd_private.h"
30 #ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
31 #include <openssl/conf.h>
32 #include <openssl/err.h>
33 #include <openssl/hmac.h>
36 /* SHA initial context values */
37 static uint32_t ccp_sha1_init[SHA_COMMON_DIGEST_SIZE / sizeof(uint32_t)] = {
44 uint32_t ccp_sha224_init[SHA256_DIGEST_SIZE / sizeof(uint32_t)] = {
51 uint32_t ccp_sha256_init[SHA256_DIGEST_SIZE / sizeof(uint32_t)] = {
58 uint64_t ccp_sha384_init[SHA512_DIGEST_SIZE / sizeof(uint64_t)] = {
65 uint64_t ccp_sha512_init[SHA512_DIGEST_SIZE / sizeof(uint64_t)] = {
73 #define SHA3_CONST(x) x
75 #define SHA3_CONST(x) x##L
78 /** 'Words' here refers to uint64_t */
79 #define SHA3_KECCAK_SPONGE_WORDS \
80 (((1600) / 8) / sizeof(uint64_t))
81 typedef struct sha3_context_ {
84 * The portion of the input message that we
88 uint64_t s[SHA3_KECCAK_SPONGE_WORDS];
90 uint8_t sb[SHA3_KECCAK_SPONGE_WORDS * 8];
91 /**total 200 ctx size**/
93 unsigned int byteIndex;
95 * 0..7--the next byte after the set one
96 * (starts from 0; 0--none are buffered)
98 unsigned int wordIndex;
100 * 0..24--the next word to integrate input
103 unsigned int capacityWords;
105 * the double size of the hash output in
106 * words (e.g. 16 for Keccak 512)
111 #define SHA3_ROTL64(x, y) \
112 (((x) << (y)) | ((x) >> ((sizeof(uint64_t)*8) - (y))))
115 static const uint64_t keccakf_rndc[24] = {
116 SHA3_CONST(0x0000000000000001UL), SHA3_CONST(0x0000000000008082UL),
117 SHA3_CONST(0x800000000000808aUL), SHA3_CONST(0x8000000080008000UL),
118 SHA3_CONST(0x000000000000808bUL), SHA3_CONST(0x0000000080000001UL),
119 SHA3_CONST(0x8000000080008081UL), SHA3_CONST(0x8000000000008009UL),
120 SHA3_CONST(0x000000000000008aUL), SHA3_CONST(0x0000000000000088UL),
121 SHA3_CONST(0x0000000080008009UL), SHA3_CONST(0x000000008000000aUL),
122 SHA3_CONST(0x000000008000808bUL), SHA3_CONST(0x800000000000008bUL),
123 SHA3_CONST(0x8000000000008089UL), SHA3_CONST(0x8000000000008003UL),
124 SHA3_CONST(0x8000000000008002UL), SHA3_CONST(0x8000000000000080UL),
125 SHA3_CONST(0x000000000000800aUL), SHA3_CONST(0x800000008000000aUL),
126 SHA3_CONST(0x8000000080008081UL), SHA3_CONST(0x8000000000008080UL),
127 SHA3_CONST(0x0000000080000001UL), SHA3_CONST(0x8000000080008008UL)
130 static const unsigned int keccakf_rotc[24] = {
131 1, 3, 6, 10, 15, 21, 28, 36, 45, 55, 2, 14, 27, 41, 56, 8, 25, 43, 62,
135 static const unsigned int keccakf_piln[24] = {
136 10, 7, 11, 17, 18, 3, 5, 16, 8, 21, 24, 4, 15, 23, 19, 13, 12, 2, 20,
140 static enum ccp_cmd_order
141 ccp_get_cmd_id(const struct rte_crypto_sym_xform *xform)
143 enum ccp_cmd_order res = CCP_CMD_NOT_SUPPORTED;
147 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH) {
148 if (xform->next == NULL)
150 else if (xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
151 return CCP_CMD_HASH_CIPHER;
153 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER) {
154 if (xform->next == NULL)
155 return CCP_CMD_CIPHER;
156 else if (xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
157 return CCP_CMD_CIPHER_HASH;
159 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD)
160 return CCP_CMD_COMBINED;
164 /* partial hash using openssl */
165 static int partial_hash_sha1(uint8_t *data_in, uint8_t *data_out)
169 if (!SHA1_Init(&ctx))
171 SHA1_Transform(&ctx, data_in);
172 rte_memcpy(data_out, &ctx, SHA_DIGEST_LENGTH);
176 static int partial_hash_sha224(uint8_t *data_in, uint8_t *data_out)
180 if (!SHA224_Init(&ctx))
182 SHA256_Transform(&ctx, data_in);
183 rte_memcpy(data_out, &ctx,
184 SHA256_DIGEST_LENGTH);
188 static int partial_hash_sha256(uint8_t *data_in, uint8_t *data_out)
192 if (!SHA256_Init(&ctx))
194 SHA256_Transform(&ctx, data_in);
195 rte_memcpy(data_out, &ctx,
196 SHA256_DIGEST_LENGTH);
200 static int partial_hash_sha384(uint8_t *data_in, uint8_t *data_out)
204 if (!SHA384_Init(&ctx))
206 SHA512_Transform(&ctx, data_in);
207 rte_memcpy(data_out, &ctx,
208 SHA512_DIGEST_LENGTH);
212 static int partial_hash_sha512(uint8_t *data_in, uint8_t *data_out)
216 if (!SHA512_Init(&ctx))
218 SHA512_Transform(&ctx, data_in);
219 rte_memcpy(data_out, &ctx,
220 SHA512_DIGEST_LENGTH);
225 keccakf(uint64_t s[25])
229 #define KECCAK_ROUNDS 24
231 for (round = 0; round < KECCAK_ROUNDS; round++) {
234 for (i = 0; i < 5; i++)
235 bc[i] = s[i] ^ s[i + 5] ^ s[i + 10] ^ s[i + 15] ^
238 for (i = 0; i < 5; i++) {
239 t = bc[(i + 4) % 5] ^ SHA3_ROTL64(bc[(i + 1) % 5], 1);
240 for (j = 0; j < 25; j += 5)
246 for (i = 0; i < 24; i++) {
249 s[j] = SHA3_ROTL64(t, keccakf_rotc[i]);
254 for (j = 0; j < 25; j += 5) {
255 for (i = 0; i < 5; i++)
257 for (i = 0; i < 5; i++)
258 s[j + i] ^= (~bc[(i + 1) % 5]) &
263 s[0] ^= keccakf_rndc[round];
268 sha3_Init224(void *priv)
270 sha3_context *ctx = (sha3_context *) priv;
272 memset(ctx, 0, sizeof(*ctx));
273 ctx->capacityWords = 2 * 224 / (8 * sizeof(uint64_t));
277 sha3_Init256(void *priv)
279 sha3_context *ctx = (sha3_context *) priv;
281 memset(ctx, 0, sizeof(*ctx));
282 ctx->capacityWords = 2 * 256 / (8 * sizeof(uint64_t));
286 sha3_Init384(void *priv)
288 sha3_context *ctx = (sha3_context *) priv;
290 memset(ctx, 0, sizeof(*ctx));
291 ctx->capacityWords = 2 * 384 / (8 * sizeof(uint64_t));
295 sha3_Init512(void *priv)
297 sha3_context *ctx = (sha3_context *) priv;
299 memset(ctx, 0, sizeof(*ctx));
300 ctx->capacityWords = 2 * 512 / (8 * sizeof(uint64_t));
304 /* This is simply the 'update' with the padding block.
305 * The padding block is 0x01 || 0x00* || 0x80. First 0x01 and last 0x80
306 * bytes are always present, but they can be the same byte.
309 sha3_Update(void *priv, void const *bufIn, size_t len)
311 sha3_context *ctx = (sha3_context *) priv;
312 unsigned int old_tail = (8 - ctx->byteIndex) & 7;
316 const uint8_t *buf = bufIn;
318 if (len < old_tail) {
320 ctx->saved |= (uint64_t) (*(buf++)) <<
321 ((ctx->byteIndex++) * 8);
328 ctx->saved |= (uint64_t) (*(buf++)) <<
329 ((ctx->byteIndex++) * 8);
331 ctx->s[ctx->wordIndex] ^= ctx->saved;
334 if (++ctx->wordIndex ==
335 (SHA3_KECCAK_SPONGE_WORDS - ctx->capacityWords)) {
341 words = len / sizeof(uint64_t);
342 tail = len - words * sizeof(uint64_t);
344 for (i = 0; i < words; i++, buf += sizeof(uint64_t)) {
345 const uint64_t t = (uint64_t) (buf[0]) |
346 ((uint64_t) (buf[1]) << 8 * 1) |
347 ((uint64_t) (buf[2]) << 8 * 2) |
348 ((uint64_t) (buf[3]) << 8 * 3) |
349 ((uint64_t) (buf[4]) << 8 * 4) |
350 ((uint64_t) (buf[5]) << 8 * 5) |
351 ((uint64_t) (buf[6]) << 8 * 6) |
352 ((uint64_t) (buf[7]) << 8 * 7);
353 ctx->s[ctx->wordIndex] ^= t;
354 if (++ctx->wordIndex ==
355 (SHA3_KECCAK_SPONGE_WORDS - ctx->capacityWords)) {
362 ctx->saved |= (uint64_t) (*(buf++)) << ((ctx->byteIndex++) * 8);
365 int partial_hash_sha3_224(uint8_t *data_in, uint8_t *data_out)
370 ctx = rte_zmalloc("sha3-ctx", sizeof(sha3_context), 0);
372 CCP_LOG_ERR("sha3-ctx creation failed");
376 sha3_Update(ctx, data_in, SHA3_224_BLOCK_SIZE);
377 for (i = 0; i < CCP_SHA3_CTX_SIZE; i++, data_out++)
378 *data_out = ctx->sb[CCP_SHA3_CTX_SIZE - i - 1];
384 int partial_hash_sha3_256(uint8_t *data_in, uint8_t *data_out)
389 ctx = rte_zmalloc("sha3-ctx", sizeof(sha3_context), 0);
391 CCP_LOG_ERR("sha3-ctx creation failed");
395 sha3_Update(ctx, data_in, SHA3_256_BLOCK_SIZE);
396 for (i = 0; i < CCP_SHA3_CTX_SIZE; i++, data_out++)
397 *data_out = ctx->sb[CCP_SHA3_CTX_SIZE - i - 1];
403 int partial_hash_sha3_384(uint8_t *data_in, uint8_t *data_out)
408 ctx = rte_zmalloc("sha3-ctx", sizeof(sha3_context), 0);
410 CCP_LOG_ERR("sha3-ctx creation failed");
414 sha3_Update(ctx, data_in, SHA3_384_BLOCK_SIZE);
415 for (i = 0; i < CCP_SHA3_CTX_SIZE; i++, data_out++)
416 *data_out = ctx->sb[CCP_SHA3_CTX_SIZE - i - 1];
422 int partial_hash_sha3_512(uint8_t *data_in, uint8_t *data_out)
427 ctx = rte_zmalloc("sha3-ctx", sizeof(sha3_context), 0);
429 CCP_LOG_ERR("sha3-ctx creation failed");
433 sha3_Update(ctx, data_in, SHA3_512_BLOCK_SIZE);
434 for (i = 0; i < CCP_SHA3_CTX_SIZE; i++, data_out++)
435 *data_out = ctx->sb[CCP_SHA3_CTX_SIZE - i - 1];
441 static int generate_partial_hash(struct ccp_session *sess)
444 uint8_t ipad[sess->auth.block_size];
445 uint8_t opad[sess->auth.block_size];
446 uint8_t *ipad_t, *opad_t;
447 uint32_t *hash_value_be32, hash_temp32[8];
448 uint64_t *hash_value_be64, hash_temp64[8];
450 uint8_t *hash_value_sha3;
452 opad_t = ipad_t = (uint8_t *)sess->auth.key;
454 hash_value_be32 = (uint32_t *)((uint8_t *)sess->auth.pre_compute);
455 hash_value_be64 = (uint64_t *)((uint8_t *)sess->auth.pre_compute);
457 /* considering key size is always equal to block size of algorithm */
458 for (i = 0; i < sess->auth.block_size; i++) {
459 ipad[i] = (ipad_t[i] ^ HMAC_IPAD_VALUE);
460 opad[i] = (opad_t[i] ^ HMAC_OPAD_VALUE);
463 switch (sess->auth.algo) {
464 case CCP_AUTH_ALGO_SHA1_HMAC:
465 count = SHA1_DIGEST_SIZE >> 2;
467 if (partial_hash_sha1(ipad, (uint8_t *)hash_temp32))
469 for (i = 0; i < count; i++, hash_value_be32++)
470 *hash_value_be32 = hash_temp32[count - 1 - i];
472 hash_value_be32 = (uint32_t *)((uint8_t *)sess->auth.pre_compute
473 + sess->auth.ctx_len);
474 if (partial_hash_sha1(opad, (uint8_t *)hash_temp32))
476 for (i = 0; i < count; i++, hash_value_be32++)
477 *hash_value_be32 = hash_temp32[count - 1 - i];
479 case CCP_AUTH_ALGO_SHA224_HMAC:
480 count = SHA256_DIGEST_SIZE >> 2;
482 if (partial_hash_sha224(ipad, (uint8_t *)hash_temp32))
484 for (i = 0; i < count; i++, hash_value_be32++)
485 *hash_value_be32 = hash_temp32[count - 1 - i];
487 hash_value_be32 = (uint32_t *)((uint8_t *)sess->auth.pre_compute
488 + sess->auth.ctx_len);
489 if (partial_hash_sha224(opad, (uint8_t *)hash_temp32))
491 for (i = 0; i < count; i++, hash_value_be32++)
492 *hash_value_be32 = hash_temp32[count - 1 - i];
494 case CCP_AUTH_ALGO_SHA3_224_HMAC:
495 hash_value_sha3 = sess->auth.pre_compute;
496 if (partial_hash_sha3_224(ipad, hash_value_sha3))
499 hash_value_sha3 = (uint8_t *)(sess->auth.pre_compute
500 + sess->auth.ctx_len);
501 if (partial_hash_sha3_224(opad, hash_value_sha3))
504 case CCP_AUTH_ALGO_SHA256_HMAC:
505 count = SHA256_DIGEST_SIZE >> 2;
507 if (partial_hash_sha256(ipad, (uint8_t *)hash_temp32))
509 for (i = 0; i < count; i++, hash_value_be32++)
510 *hash_value_be32 = hash_temp32[count - 1 - i];
512 hash_value_be32 = (uint32_t *)((uint8_t *)sess->auth.pre_compute
513 + sess->auth.ctx_len);
514 if (partial_hash_sha256(opad, (uint8_t *)hash_temp32))
516 for (i = 0; i < count; i++, hash_value_be32++)
517 *hash_value_be32 = hash_temp32[count - 1 - i];
519 case CCP_AUTH_ALGO_SHA3_256_HMAC:
520 hash_value_sha3 = sess->auth.pre_compute;
521 if (partial_hash_sha3_256(ipad, hash_value_sha3))
524 hash_value_sha3 = (uint8_t *)(sess->auth.pre_compute
525 + sess->auth.ctx_len);
526 if (partial_hash_sha3_256(opad, hash_value_sha3))
529 case CCP_AUTH_ALGO_SHA384_HMAC:
530 count = SHA512_DIGEST_SIZE >> 3;
532 if (partial_hash_sha384(ipad, (uint8_t *)hash_temp64))
534 for (i = 0; i < count; i++, hash_value_be64++)
535 *hash_value_be64 = hash_temp64[count - 1 - i];
537 hash_value_be64 = (uint64_t *)((uint8_t *)sess->auth.pre_compute
538 + sess->auth.ctx_len);
539 if (partial_hash_sha384(opad, (uint8_t *)hash_temp64))
541 for (i = 0; i < count; i++, hash_value_be64++)
542 *hash_value_be64 = hash_temp64[count - 1 - i];
544 case CCP_AUTH_ALGO_SHA3_384_HMAC:
545 hash_value_sha3 = sess->auth.pre_compute;
546 if (partial_hash_sha3_384(ipad, hash_value_sha3))
549 hash_value_sha3 = (uint8_t *)(sess->auth.pre_compute
550 + sess->auth.ctx_len);
551 if (partial_hash_sha3_384(opad, hash_value_sha3))
554 case CCP_AUTH_ALGO_SHA512_HMAC:
555 count = SHA512_DIGEST_SIZE >> 3;
557 if (partial_hash_sha512(ipad, (uint8_t *)hash_temp64))
559 for (i = 0; i < count; i++, hash_value_be64++)
560 *hash_value_be64 = hash_temp64[count - 1 - i];
562 hash_value_be64 = (uint64_t *)((uint8_t *)sess->auth.pre_compute
563 + sess->auth.ctx_len);
564 if (partial_hash_sha512(opad, (uint8_t *)hash_temp64))
566 for (i = 0; i < count; i++, hash_value_be64++)
567 *hash_value_be64 = hash_temp64[count - 1 - i];
569 case CCP_AUTH_ALGO_SHA3_512_HMAC:
570 hash_value_sha3 = sess->auth.pre_compute;
571 if (partial_hash_sha3_512(ipad, hash_value_sha3))
574 hash_value_sha3 = (uint8_t *)(sess->auth.pre_compute
575 + sess->auth.ctx_len);
576 if (partial_hash_sha3_512(opad, hash_value_sha3))
580 CCP_LOG_ERR("Invalid auth algo");
585 /* prepare temporary keys K1 and K2 */
586 static void prepare_key(unsigned char *k, unsigned char *l, int bl)
589 /* Shift block to left, including carry */
590 for (i = 0; i < bl; i++) {
592 if (i < bl - 1 && l[i + 1] & 0x80)
595 /* If MSB set fixup with R */
597 k[bl - 1] ^= bl == 16 ? 0x87 : 0x1b;
600 /* subkeys K1 and K2 generation for CMAC */
602 generate_cmac_subkeys(struct ccp_session *sess)
604 const EVP_CIPHER *algo;
606 unsigned char *ccp_ctx;
609 unsigned char zero_iv[AES_BLOCK_SIZE] = {0};
610 unsigned char dst[2 * AES_BLOCK_SIZE] = {0};
611 unsigned char k1[AES_BLOCK_SIZE] = {0};
612 unsigned char k2[AES_BLOCK_SIZE] = {0};
614 if (sess->auth.ut.aes_type == CCP_AES_TYPE_128)
615 algo = EVP_aes_128_cbc();
616 else if (sess->auth.ut.aes_type == CCP_AES_TYPE_192)
617 algo = EVP_aes_192_cbc();
618 else if (sess->auth.ut.aes_type == CCP_AES_TYPE_256)
619 algo = EVP_aes_256_cbc();
621 CCP_LOG_ERR("Invalid CMAC type length");
625 ctx = EVP_CIPHER_CTX_new();
627 CCP_LOG_ERR("ctx creation failed");
630 if (EVP_EncryptInit(ctx, algo, (unsigned char *)sess->auth.key,
631 (unsigned char *)zero_iv) <= 0)
632 goto key_generate_err;
633 if (EVP_CIPHER_CTX_set_padding(ctx, 0) <= 0)
634 goto key_generate_err;
635 if (EVP_EncryptUpdate(ctx, dst, &dstlen, zero_iv,
636 AES_BLOCK_SIZE) <= 0)
637 goto key_generate_err;
638 if (EVP_EncryptFinal_ex(ctx, dst + dstlen, &totlen) <= 0)
639 goto key_generate_err;
641 memset(sess->auth.pre_compute, 0, CCP_SB_BYTES * 2);
643 ccp_ctx = (unsigned char *)(sess->auth.pre_compute + CCP_SB_BYTES - 1);
644 prepare_key(k1, dst, AES_BLOCK_SIZE);
645 for (i = 0; i < AES_BLOCK_SIZE; i++, ccp_ctx--)
648 ccp_ctx = (unsigned char *)(sess->auth.pre_compute +
649 (2 * CCP_SB_BYTES) - 1);
650 prepare_key(k2, k1, AES_BLOCK_SIZE);
651 for (i = 0; i < AES_BLOCK_SIZE; i++, ccp_ctx--)
654 EVP_CIPHER_CTX_free(ctx);
659 CCP_LOG_ERR("CMAC Init failed");
663 /* configure session */
665 ccp_configure_session_cipher(struct ccp_session *sess,
666 const struct rte_crypto_sym_xform *xform)
668 const struct rte_crypto_cipher_xform *cipher_xform = NULL;
671 cipher_xform = &xform->cipher;
673 /* set cipher direction */
674 if (cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
675 sess->cipher.dir = CCP_CIPHER_DIR_ENCRYPT;
677 sess->cipher.dir = CCP_CIPHER_DIR_DECRYPT;
680 sess->cipher.key_length = cipher_xform->key.length;
681 rte_memcpy(sess->cipher.key, cipher_xform->key.data,
682 cipher_xform->key.length);
684 /* set iv parameters */
685 sess->iv.offset = cipher_xform->iv.offset;
686 sess->iv.length = cipher_xform->iv.length;
688 switch (cipher_xform->algo) {
689 case RTE_CRYPTO_CIPHER_AES_CTR:
690 sess->cipher.algo = CCP_CIPHER_ALGO_AES_CTR;
691 sess->cipher.um.aes_mode = CCP_AES_MODE_CTR;
692 sess->cipher.engine = CCP_ENGINE_AES;
694 case RTE_CRYPTO_CIPHER_AES_ECB:
695 sess->cipher.algo = CCP_CIPHER_ALGO_AES_CBC;
696 sess->cipher.um.aes_mode = CCP_AES_MODE_ECB;
697 sess->cipher.engine = CCP_ENGINE_AES;
699 case RTE_CRYPTO_CIPHER_AES_CBC:
700 sess->cipher.algo = CCP_CIPHER_ALGO_AES_CBC;
701 sess->cipher.um.aes_mode = CCP_AES_MODE_CBC;
702 sess->cipher.engine = CCP_ENGINE_AES;
704 case RTE_CRYPTO_CIPHER_3DES_CBC:
705 sess->cipher.algo = CCP_CIPHER_ALGO_3DES_CBC;
706 sess->cipher.um.des_mode = CCP_DES_MODE_CBC;
707 sess->cipher.engine = CCP_ENGINE_3DES;
710 CCP_LOG_ERR("Unsupported cipher algo");
715 switch (sess->cipher.engine) {
717 if (sess->cipher.key_length == 16)
718 sess->cipher.ut.aes_type = CCP_AES_TYPE_128;
719 else if (sess->cipher.key_length == 24)
720 sess->cipher.ut.aes_type = CCP_AES_TYPE_192;
721 else if (sess->cipher.key_length == 32)
722 sess->cipher.ut.aes_type = CCP_AES_TYPE_256;
724 CCP_LOG_ERR("Invalid cipher key length");
727 for (i = 0; i < sess->cipher.key_length ; i++)
728 sess->cipher.key_ccp[sess->cipher.key_length - i - 1] =
731 case CCP_ENGINE_3DES:
732 if (sess->cipher.key_length == 16)
733 sess->cipher.ut.des_type = CCP_DES_TYPE_128;
734 else if (sess->cipher.key_length == 24)
735 sess->cipher.ut.des_type = CCP_DES_TYPE_192;
737 CCP_LOG_ERR("Invalid cipher key length");
740 for (j = 0, x = 0; j < sess->cipher.key_length/8; j++, x += 8)
741 for (i = 0; i < 8; i++)
742 sess->cipher.key_ccp[(8 + x) - i - 1] =
743 sess->cipher.key[i + x];
746 CCP_LOG_ERR("Invalid CCP Engine");
749 sess->cipher.nonce_phys = rte_mem_virt2phy(sess->cipher.nonce);
750 sess->cipher.key_phys = rte_mem_virt2phy(sess->cipher.key_ccp);
755 ccp_configure_session_auth(struct ccp_session *sess,
756 const struct rte_crypto_sym_xform *xform)
758 const struct rte_crypto_auth_xform *auth_xform = NULL;
761 auth_xform = &xform->auth;
763 sess->auth.digest_length = auth_xform->digest_length;
764 if (auth_xform->op == RTE_CRYPTO_AUTH_OP_GENERATE)
765 sess->auth.op = CCP_AUTH_OP_GENERATE;
767 sess->auth.op = CCP_AUTH_OP_VERIFY;
768 switch (auth_xform->algo) {
769 #ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
770 case RTE_CRYPTO_AUTH_MD5_HMAC:
771 sess->auth.algo = CCP_AUTH_ALGO_MD5_HMAC;
772 sess->auth.offset = (CCP_SB_BYTES << 1) - MD5_DIGEST_SIZE;
773 sess->auth.key_length = auth_xform->key.length;
774 sess->auth.block_size = MD5_BLOCK_SIZE;
775 memset(sess->auth.key, 0, sess->auth.block_size);
776 rte_memcpy(sess->auth.key, auth_xform->key.data,
777 auth_xform->key.length);
780 case RTE_CRYPTO_AUTH_SHA1:
781 sess->auth.engine = CCP_ENGINE_SHA;
782 sess->auth.algo = CCP_AUTH_ALGO_SHA1;
783 sess->auth.ut.sha_type = CCP_SHA_TYPE_1;
784 sess->auth.ctx = (void *)ccp_sha1_init;
785 sess->auth.ctx_len = CCP_SB_BYTES;
786 sess->auth.offset = CCP_SB_BYTES - SHA1_DIGEST_SIZE;
788 case RTE_CRYPTO_AUTH_SHA1_HMAC:
789 #ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
790 if (auth_xform->key.length > SHA1_BLOCK_SIZE)
792 sess->auth.algo = CCP_AUTH_ALGO_SHA1_HMAC;
793 sess->auth.offset = CCP_SB_BYTES - SHA1_DIGEST_SIZE;
794 sess->auth.block_size = SHA1_BLOCK_SIZE;
795 sess->auth.key_length = auth_xform->key.length;
796 memset(sess->auth.key, 0, sess->auth.block_size);
797 rte_memcpy(sess->auth.key, auth_xform->key.data,
798 auth_xform->key.length);
800 if (auth_xform->key.length > SHA1_BLOCK_SIZE)
802 sess->auth.engine = CCP_ENGINE_SHA;
803 sess->auth.algo = CCP_AUTH_ALGO_SHA1_HMAC;
804 sess->auth.ut.sha_type = CCP_SHA_TYPE_1;
805 sess->auth.ctx_len = CCP_SB_BYTES;
806 sess->auth.offset = CCP_SB_BYTES - SHA1_DIGEST_SIZE;
807 sess->auth.block_size = SHA1_BLOCK_SIZE;
808 sess->auth.key_length = auth_xform->key.length;
809 memset(sess->auth.key, 0, sess->auth.block_size);
810 memset(sess->auth.pre_compute, 0, sess->auth.ctx_len << 1);
811 rte_memcpy(sess->auth.key, auth_xform->key.data,
812 auth_xform->key.length);
813 if (generate_partial_hash(sess))
817 case RTE_CRYPTO_AUTH_SHA224:
818 sess->auth.algo = CCP_AUTH_ALGO_SHA224;
819 sess->auth.engine = CCP_ENGINE_SHA;
820 sess->auth.ut.sha_type = CCP_SHA_TYPE_224;
821 sess->auth.ctx = (void *)ccp_sha224_init;
822 sess->auth.ctx_len = CCP_SB_BYTES;
823 sess->auth.offset = CCP_SB_BYTES - SHA224_DIGEST_SIZE;
825 case RTE_CRYPTO_AUTH_SHA224_HMAC:
826 #ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
827 if (auth_xform->key.length > SHA224_BLOCK_SIZE)
829 sess->auth.algo = CCP_AUTH_ALGO_SHA224_HMAC;
830 sess->auth.offset = CCP_SB_BYTES - SHA224_DIGEST_SIZE;
831 sess->auth.block_size = SHA224_BLOCK_SIZE;
832 sess->auth.key_length = auth_xform->key.length;
833 memset(sess->auth.key, 0, sess->auth.block_size);
834 rte_memcpy(sess->auth.key, auth_xform->key.data,
835 auth_xform->key.length);
837 if (auth_xform->key.length > SHA224_BLOCK_SIZE)
839 sess->auth.algo = CCP_AUTH_ALGO_SHA224_HMAC;
840 sess->auth.engine = CCP_ENGINE_SHA;
841 sess->auth.ut.sha_type = CCP_SHA_TYPE_224;
842 sess->auth.ctx_len = CCP_SB_BYTES;
843 sess->auth.offset = CCP_SB_BYTES - SHA224_DIGEST_SIZE;
844 sess->auth.block_size = SHA224_BLOCK_SIZE;
845 sess->auth.key_length = auth_xform->key.length;
846 memset(sess->auth.key, 0, sess->auth.block_size);
847 memset(sess->auth.pre_compute, 0, sess->auth.ctx_len << 1);
848 rte_memcpy(sess->auth.key, auth_xform->key.data,
849 auth_xform->key.length);
850 if (generate_partial_hash(sess))
854 case RTE_CRYPTO_AUTH_SHA3_224:
855 sess->auth.algo = CCP_AUTH_ALGO_SHA3_224;
856 sess->auth.engine = CCP_ENGINE_SHA;
857 sess->auth.ut.sha_type = CCP_SHA3_TYPE_224;
858 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
859 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA224_DIGEST_SIZE;
861 case RTE_CRYPTO_AUTH_SHA3_224_HMAC:
862 if (auth_xform->key.length > SHA3_224_BLOCK_SIZE)
864 sess->auth.algo = CCP_AUTH_ALGO_SHA3_224_HMAC;
865 sess->auth.engine = CCP_ENGINE_SHA;
866 sess->auth.ut.sha_type = CCP_SHA3_TYPE_224;
867 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
868 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA224_DIGEST_SIZE;
869 sess->auth.block_size = SHA3_224_BLOCK_SIZE;
870 sess->auth.key_length = auth_xform->key.length;
871 memset(sess->auth.key, 0, sess->auth.block_size);
872 memset(sess->auth.pre_compute, 0, 2 * sess->auth.ctx_len);
873 rte_memcpy(sess->auth.key, auth_xform->key.data,
874 auth_xform->key.length);
875 if (generate_partial_hash(sess))
878 case RTE_CRYPTO_AUTH_SHA256:
879 sess->auth.algo = CCP_AUTH_ALGO_SHA256;
880 sess->auth.engine = CCP_ENGINE_SHA;
881 sess->auth.ut.sha_type = CCP_SHA_TYPE_256;
882 sess->auth.ctx = (void *)ccp_sha256_init;
883 sess->auth.ctx_len = CCP_SB_BYTES;
884 sess->auth.offset = CCP_SB_BYTES - SHA256_DIGEST_SIZE;
886 case RTE_CRYPTO_AUTH_SHA256_HMAC:
887 #ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
888 if (auth_xform->key.length > SHA256_BLOCK_SIZE)
890 sess->auth.algo = CCP_AUTH_ALGO_SHA256_HMAC;
891 sess->auth.offset = CCP_SB_BYTES - SHA256_DIGEST_SIZE;
892 sess->auth.block_size = SHA256_BLOCK_SIZE;
893 sess->auth.key_length = auth_xform->key.length;
894 memset(sess->auth.key, 0, sess->auth.block_size);
895 rte_memcpy(sess->auth.key, auth_xform->key.data,
896 auth_xform->key.length);
898 if (auth_xform->key.length > SHA256_BLOCK_SIZE)
900 sess->auth.algo = CCP_AUTH_ALGO_SHA256_HMAC;
901 sess->auth.engine = CCP_ENGINE_SHA;
902 sess->auth.ut.sha_type = CCP_SHA_TYPE_256;
903 sess->auth.ctx_len = CCP_SB_BYTES;
904 sess->auth.offset = CCP_SB_BYTES - SHA256_DIGEST_SIZE;
905 sess->auth.block_size = SHA256_BLOCK_SIZE;
906 sess->auth.key_length = auth_xform->key.length;
907 memset(sess->auth.key, 0, sess->auth.block_size);
908 memset(sess->auth.pre_compute, 0, sess->auth.ctx_len << 1);
909 rte_memcpy(sess->auth.key, auth_xform->key.data,
910 auth_xform->key.length);
911 if (generate_partial_hash(sess))
915 case RTE_CRYPTO_AUTH_SHA3_256:
916 sess->auth.algo = CCP_AUTH_ALGO_SHA3_256;
917 sess->auth.engine = CCP_ENGINE_SHA;
918 sess->auth.ut.sha_type = CCP_SHA3_TYPE_256;
919 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
920 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA256_DIGEST_SIZE;
922 case RTE_CRYPTO_AUTH_SHA3_256_HMAC:
923 if (auth_xform->key.length > SHA3_256_BLOCK_SIZE)
925 sess->auth.algo = CCP_AUTH_ALGO_SHA3_256_HMAC;
926 sess->auth.engine = CCP_ENGINE_SHA;
927 sess->auth.ut.sha_type = CCP_SHA3_TYPE_256;
928 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
929 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA256_DIGEST_SIZE;
930 sess->auth.block_size = SHA3_256_BLOCK_SIZE;
931 sess->auth.key_length = auth_xform->key.length;
932 memset(sess->auth.key, 0, sess->auth.block_size);
933 memset(sess->auth.pre_compute, 0, 2 * sess->auth.ctx_len);
934 rte_memcpy(sess->auth.key, auth_xform->key.data,
935 auth_xform->key.length);
936 if (generate_partial_hash(sess))
939 case RTE_CRYPTO_AUTH_SHA384:
940 sess->auth.algo = CCP_AUTH_ALGO_SHA384;
941 sess->auth.engine = CCP_ENGINE_SHA;
942 sess->auth.ut.sha_type = CCP_SHA_TYPE_384;
943 sess->auth.ctx = (void *)ccp_sha384_init;
944 sess->auth.ctx_len = CCP_SB_BYTES << 1;
945 sess->auth.offset = (CCP_SB_BYTES << 1) - SHA384_DIGEST_SIZE;
947 case RTE_CRYPTO_AUTH_SHA384_HMAC:
948 #ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
949 if (auth_xform->key.length > SHA384_BLOCK_SIZE)
951 sess->auth.algo = CCP_AUTH_ALGO_SHA384_HMAC;
952 sess->auth.offset = (CCP_SB_BYTES << 1) - SHA384_DIGEST_SIZE;
953 sess->auth.block_size = SHA384_BLOCK_SIZE;
954 sess->auth.key_length = auth_xform->key.length;
955 memset(sess->auth.key, 0, sess->auth.block_size);
956 rte_memcpy(sess->auth.key, auth_xform->key.data,
957 auth_xform->key.length);
959 if (auth_xform->key.length > SHA384_BLOCK_SIZE)
961 sess->auth.algo = CCP_AUTH_ALGO_SHA384_HMAC;
962 sess->auth.engine = CCP_ENGINE_SHA;
963 sess->auth.ut.sha_type = CCP_SHA_TYPE_384;
964 sess->auth.ctx_len = CCP_SB_BYTES << 1;
965 sess->auth.offset = (CCP_SB_BYTES << 1) - SHA384_DIGEST_SIZE;
966 sess->auth.block_size = SHA384_BLOCK_SIZE;
967 sess->auth.key_length = auth_xform->key.length;
968 memset(sess->auth.key, 0, sess->auth.block_size);
969 memset(sess->auth.pre_compute, 0, sess->auth.ctx_len << 1);
970 rte_memcpy(sess->auth.key, auth_xform->key.data,
971 auth_xform->key.length);
972 if (generate_partial_hash(sess))
976 case RTE_CRYPTO_AUTH_SHA3_384:
977 sess->auth.algo = CCP_AUTH_ALGO_SHA3_384;
978 sess->auth.engine = CCP_ENGINE_SHA;
979 sess->auth.ut.sha_type = CCP_SHA3_TYPE_384;
980 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
981 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA384_DIGEST_SIZE;
983 case RTE_CRYPTO_AUTH_SHA3_384_HMAC:
984 if (auth_xform->key.length > SHA3_384_BLOCK_SIZE)
986 sess->auth.algo = CCP_AUTH_ALGO_SHA3_384_HMAC;
987 sess->auth.engine = CCP_ENGINE_SHA;
988 sess->auth.ut.sha_type = CCP_SHA3_TYPE_384;
989 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
990 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA384_DIGEST_SIZE;
991 sess->auth.block_size = SHA3_384_BLOCK_SIZE;
992 sess->auth.key_length = auth_xform->key.length;
993 memset(sess->auth.key, 0, sess->auth.block_size);
994 memset(sess->auth.pre_compute, 0, 2 * sess->auth.ctx_len);
995 rte_memcpy(sess->auth.key, auth_xform->key.data,
996 auth_xform->key.length);
997 if (generate_partial_hash(sess))
1000 case RTE_CRYPTO_AUTH_SHA512:
1001 sess->auth.algo = CCP_AUTH_ALGO_SHA512;
1002 sess->auth.engine = CCP_ENGINE_SHA;
1003 sess->auth.ut.sha_type = CCP_SHA_TYPE_512;
1004 sess->auth.ctx = (void *)ccp_sha512_init;
1005 sess->auth.ctx_len = CCP_SB_BYTES << 1;
1006 sess->auth.offset = (CCP_SB_BYTES << 1) - SHA512_DIGEST_SIZE;
1008 case RTE_CRYPTO_AUTH_SHA512_HMAC:
1009 #ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
1010 if (auth_xform->key.length > SHA512_BLOCK_SIZE)
1012 sess->auth.algo = CCP_AUTH_ALGO_SHA512_HMAC;
1013 sess->auth.offset = (CCP_SB_BYTES << 1) - SHA512_DIGEST_SIZE;
1014 sess->auth.block_size = SHA512_BLOCK_SIZE;
1015 sess->auth.key_length = auth_xform->key.length;
1016 memset(sess->auth.key, 0, sess->auth.block_size);
1017 rte_memcpy(sess->auth.key, auth_xform->key.data,
1018 auth_xform->key.length);
1020 if (auth_xform->key.length > SHA512_BLOCK_SIZE)
1022 sess->auth.algo = CCP_AUTH_ALGO_SHA512_HMAC;
1023 sess->auth.engine = CCP_ENGINE_SHA;
1024 sess->auth.ut.sha_type = CCP_SHA_TYPE_512;
1025 sess->auth.ctx_len = CCP_SB_BYTES << 1;
1026 sess->auth.offset = (CCP_SB_BYTES << 1) - SHA512_DIGEST_SIZE;
1027 sess->auth.block_size = SHA512_BLOCK_SIZE;
1028 sess->auth.key_length = auth_xform->key.length;
1029 memset(sess->auth.key, 0, sess->auth.block_size);
1030 memset(sess->auth.pre_compute, 0, sess->auth.ctx_len << 1);
1031 rte_memcpy(sess->auth.key, auth_xform->key.data,
1032 auth_xform->key.length);
1033 if (generate_partial_hash(sess))
1037 case RTE_CRYPTO_AUTH_SHA3_512:
1038 sess->auth.algo = CCP_AUTH_ALGO_SHA3_512;
1039 sess->auth.engine = CCP_ENGINE_SHA;
1040 sess->auth.ut.sha_type = CCP_SHA3_TYPE_512;
1041 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
1042 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA512_DIGEST_SIZE;
1044 case RTE_CRYPTO_AUTH_SHA3_512_HMAC:
1045 if (auth_xform->key.length > SHA3_512_BLOCK_SIZE)
1047 sess->auth.algo = CCP_AUTH_ALGO_SHA3_512_HMAC;
1048 sess->auth.engine = CCP_ENGINE_SHA;
1049 sess->auth.ut.sha_type = CCP_SHA3_TYPE_512;
1050 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
1051 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA512_DIGEST_SIZE;
1052 sess->auth.block_size = SHA3_512_BLOCK_SIZE;
1053 sess->auth.key_length = auth_xform->key.length;
1054 memset(sess->auth.key, 0, sess->auth.block_size);
1055 memset(sess->auth.pre_compute, 0, 2 * sess->auth.ctx_len);
1056 rte_memcpy(sess->auth.key, auth_xform->key.data,
1057 auth_xform->key.length);
1058 if (generate_partial_hash(sess))
1061 case RTE_CRYPTO_AUTH_AES_CMAC:
1062 sess->auth.algo = CCP_AUTH_ALGO_AES_CMAC;
1063 sess->auth.engine = CCP_ENGINE_AES;
1064 sess->auth.um.aes_mode = CCP_AES_MODE_CMAC;
1065 sess->auth.key_length = auth_xform->key.length;
1066 /* padding and hash result */
1067 sess->auth.ctx_len = CCP_SB_BYTES << 1;
1068 sess->auth.offset = AES_BLOCK_SIZE;
1069 sess->auth.block_size = AES_BLOCK_SIZE;
1070 if (sess->auth.key_length == 16)
1071 sess->auth.ut.aes_type = CCP_AES_TYPE_128;
1072 else if (sess->auth.key_length == 24)
1073 sess->auth.ut.aes_type = CCP_AES_TYPE_192;
1074 else if (sess->auth.key_length == 32)
1075 sess->auth.ut.aes_type = CCP_AES_TYPE_256;
1077 CCP_LOG_ERR("Invalid CMAC key length");
1080 rte_memcpy(sess->auth.key, auth_xform->key.data,
1081 sess->auth.key_length);
1082 for (i = 0; i < sess->auth.key_length; i++)
1083 sess->auth.key_ccp[sess->auth.key_length - i - 1] =
1085 if (generate_cmac_subkeys(sess))
1089 CCP_LOG_ERR("Unsupported hash algo");
1096 ccp_configure_session_aead(struct ccp_session *sess,
1097 const struct rte_crypto_sym_xform *xform)
1099 const struct rte_crypto_aead_xform *aead_xform = NULL;
1102 aead_xform = &xform->aead;
1104 sess->cipher.key_length = aead_xform->key.length;
1105 rte_memcpy(sess->cipher.key, aead_xform->key.data,
1106 aead_xform->key.length);
1108 if (aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) {
1109 sess->cipher.dir = CCP_CIPHER_DIR_ENCRYPT;
1110 sess->auth.op = CCP_AUTH_OP_GENERATE;
1112 sess->cipher.dir = CCP_CIPHER_DIR_DECRYPT;
1113 sess->auth.op = CCP_AUTH_OP_VERIFY;
1115 sess->aead_algo = aead_xform->algo;
1116 sess->auth.aad_length = aead_xform->aad_length;
1117 sess->auth.digest_length = aead_xform->digest_length;
1119 /* set iv parameters */
1120 sess->iv.offset = aead_xform->iv.offset;
1121 sess->iv.length = aead_xform->iv.length;
1123 switch (aead_xform->algo) {
1124 case RTE_CRYPTO_AEAD_AES_GCM:
1125 sess->cipher.algo = CCP_CIPHER_ALGO_AES_GCM;
1126 sess->cipher.um.aes_mode = CCP_AES_MODE_GCTR;
1127 sess->cipher.engine = CCP_ENGINE_AES;
1128 if (sess->cipher.key_length == 16)
1129 sess->cipher.ut.aes_type = CCP_AES_TYPE_128;
1130 else if (sess->cipher.key_length == 24)
1131 sess->cipher.ut.aes_type = CCP_AES_TYPE_192;
1132 else if (sess->cipher.key_length == 32)
1133 sess->cipher.ut.aes_type = CCP_AES_TYPE_256;
1135 CCP_LOG_ERR("Invalid aead key length");
1138 for (i = 0; i < sess->cipher.key_length; i++)
1139 sess->cipher.key_ccp[sess->cipher.key_length - i - 1] =
1140 sess->cipher.key[i];
1141 sess->auth.algo = CCP_AUTH_ALGO_AES_GCM;
1142 sess->auth.engine = CCP_ENGINE_AES;
1143 sess->auth.um.aes_mode = CCP_AES_MODE_GHASH;
1144 sess->auth.ctx_len = CCP_SB_BYTES;
1145 sess->auth.offset = 0;
1146 sess->auth.block_size = AES_BLOCK_SIZE;
1147 sess->cmd_id = CCP_CMD_COMBINED;
1150 CCP_LOG_ERR("Unsupported aead algo");
1153 sess->cipher.nonce_phys = rte_mem_virt2phy(sess->cipher.nonce);
1154 sess->cipher.key_phys = rte_mem_virt2phy(sess->cipher.key_ccp);
1159 ccp_set_session_parameters(struct ccp_session *sess,
1160 const struct rte_crypto_sym_xform *xform)
1162 const struct rte_crypto_sym_xform *cipher_xform = NULL;
1163 const struct rte_crypto_sym_xform *auth_xform = NULL;
1164 const struct rte_crypto_sym_xform *aead_xform = NULL;
1167 sess->cmd_id = ccp_get_cmd_id(xform);
1169 switch (sess->cmd_id) {
1170 case CCP_CMD_CIPHER:
1171 cipher_xform = xform;
1176 case CCP_CMD_CIPHER_HASH:
1177 cipher_xform = xform;
1178 auth_xform = xform->next;
1180 case CCP_CMD_HASH_CIPHER:
1182 cipher_xform = xform->next;
1184 case CCP_CMD_COMBINED:
1188 CCP_LOG_ERR("Unsupported cmd_id");
1192 /* Default IV length = 0 */
1193 sess->iv.length = 0;
1195 ret = ccp_configure_session_cipher(sess, cipher_xform);
1197 CCP_LOG_ERR("Invalid/unsupported cipher parameters");
1202 ret = ccp_configure_session_auth(sess, auth_xform);
1204 CCP_LOG_ERR("Invalid/unsupported auth parameters");
1209 ret = ccp_configure_session_aead(sess, aead_xform);
1211 CCP_LOG_ERR("Invalid/unsupported aead parameters");
1218 /* calculate CCP descriptors requirement */
1220 ccp_cipher_slot(struct ccp_session *session)
1224 switch (session->cipher.algo) {
1225 case CCP_CIPHER_ALGO_AES_CBC:
1227 /**< op + passthrough for iv */
1229 case CCP_CIPHER_ALGO_AES_ECB:
1233 case CCP_CIPHER_ALGO_AES_CTR:
1235 /**< op + passthrough for iv */
1237 case CCP_CIPHER_ALGO_3DES_CBC:
1239 /**< op + passthrough for iv */
1242 CCP_LOG_ERR("Unsupported cipher algo %d",
1243 session->cipher.algo);
1249 ccp_auth_slot(struct ccp_session *session)
1253 switch (session->auth.algo) {
1254 case CCP_AUTH_ALGO_SHA1:
1255 case CCP_AUTH_ALGO_SHA224:
1256 case CCP_AUTH_ALGO_SHA256:
1257 case CCP_AUTH_ALGO_SHA384:
1258 case CCP_AUTH_ALGO_SHA512:
1260 /**< op + lsb passthrough cpy to/from*/
1262 #ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
1263 case CCP_AUTH_ALGO_MD5_HMAC:
1266 case CCP_AUTH_ALGO_SHA1_HMAC:
1267 case CCP_AUTH_ALGO_SHA224_HMAC:
1268 case CCP_AUTH_ALGO_SHA256_HMAC:
1269 #ifndef RTE_LIBRTE_PMD_CCP_CPU_AUTH
1273 case CCP_AUTH_ALGO_SHA384_HMAC:
1274 case CCP_AUTH_ALGO_SHA512_HMAC:
1275 #ifndef RTE_LIBRTE_PMD_CCP_CPU_AUTH
1279 * 1. Load PHash1 = H(k ^ ipad); to LSB
1280 * 2. generate IHash = H(hash on meassage with PHash1
1282 * 3. Retrieve IHash 2 slots for 384/512
1283 * 4. Load Phash2 = H(k ^ opad); to LSB
1284 * 5. generate FHash = H(hash on Ihash with Phash2
1286 * 6. Retrieve HMAC output from LSB to host memory
1289 case CCP_AUTH_ALGO_SHA3_224:
1290 case CCP_AUTH_ALGO_SHA3_256:
1291 case CCP_AUTH_ALGO_SHA3_384:
1292 case CCP_AUTH_ALGO_SHA3_512:
1294 /**< only op ctx and dst in host memory*/
1296 case CCP_AUTH_ALGO_SHA3_224_HMAC:
1297 case CCP_AUTH_ALGO_SHA3_256_HMAC:
1300 case CCP_AUTH_ALGO_SHA3_384_HMAC:
1301 case CCP_AUTH_ALGO_SHA3_512_HMAC:
1304 * 1. Op to Perform Ihash
1305 * 2. Retrieve result from LSB to host memory
1306 * 3. Perform final hash
1309 case CCP_AUTH_ALGO_AES_CMAC:
1313 * extra descriptor in padding case
1314 * (k1/k2(255:128) with iv(127:0))
1319 CCP_LOG_ERR("Unsupported auth algo %d",
1320 session->auth.algo);
1327 ccp_aead_slot(struct ccp_session *session)
1331 switch (session->aead_algo) {
1332 case RTE_CRYPTO_AEAD_AES_GCM:
1335 CCP_LOG_ERR("Unsupported aead algo %d",
1336 session->aead_algo);
1338 switch (session->auth.algo) {
1339 case CCP_AUTH_ALGO_AES_GCM:
1345 * 4. Reload passthru
1350 CCP_LOG_ERR("Unsupported combined auth ALGO %d",
1351 session->auth.algo);
1357 ccp_compute_slot_count(struct ccp_session *session)
1361 switch (session->cmd_id) {
1362 case CCP_CMD_CIPHER:
1363 count = ccp_cipher_slot(session);
1366 count = ccp_auth_slot(session);
1368 case CCP_CMD_CIPHER_HASH:
1369 case CCP_CMD_HASH_CIPHER:
1370 count = ccp_cipher_slot(session);
1371 count += ccp_auth_slot(session);
1373 case CCP_CMD_COMBINED:
1374 count = ccp_aead_slot(session);
1377 CCP_LOG_ERR("Unsupported cmd_id");
1384 #ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
1386 algo_select(int sessalgo,
1387 const EVP_MD **algo)
1392 case CCP_AUTH_ALGO_MD5_HMAC:
1395 case CCP_AUTH_ALGO_SHA1_HMAC:
1398 case CCP_AUTH_ALGO_SHA224_HMAC:
1399 *algo = EVP_sha224();
1401 case CCP_AUTH_ALGO_SHA256_HMAC:
1402 *algo = EVP_sha256();
1404 case CCP_AUTH_ALGO_SHA384_HMAC:
1405 *algo = EVP_sha384();
1407 case CCP_AUTH_ALGO_SHA512_HMAC:
1408 *algo = EVP_sha512();
1418 process_cpu_auth_hmac(uint8_t *src, uint8_t *dst,
1419 __rte_unused uint8_t *iv,
1427 unsigned char temp_dst[64];
1429 if (EVP_DigestSignInit(ctx, NULL, algo, NULL, pkey) <= 0)
1430 goto process_auth_err;
1432 if (EVP_DigestSignUpdate(ctx, (char *)src, srclen) <= 0)
1433 goto process_auth_err;
1435 if (EVP_DigestSignFinal(ctx, temp_dst, &dstlen) <= 0)
1436 goto process_auth_err;
1438 memcpy(dst, temp_dst, d_len);
1441 CCP_LOG_ERR("Process cpu auth failed");
1445 static int cpu_crypto_auth(struct ccp_qp *qp,
1446 struct rte_crypto_op *op,
1447 struct ccp_session *sess,
1452 struct rte_mbuf *mbuf_src, *mbuf_dst;
1453 const EVP_MD *algo = NULL;
1456 algo_select(sess->auth.algo, &algo);
1457 pkey = EVP_PKEY_new_mac_key(EVP_PKEY_HMAC, NULL, sess->auth.key,
1458 sess->auth.key_length);
1459 mbuf_src = op->sym->m_src;
1460 mbuf_dst = op->sym->m_dst ? op->sym->m_dst : op->sym->m_src;
1461 srclen = op->sym->auth.data.length;
1462 src = rte_pktmbuf_mtod_offset(mbuf_src, uint8_t *,
1463 op->sym->auth.data.offset);
1465 if (sess->auth.op == CCP_AUTH_OP_VERIFY) {
1466 dst = qp->temp_digest;
1468 dst = op->sym->auth.digest.data;
1470 dst = rte_pktmbuf_mtod_offset(mbuf_dst, uint8_t *,
1471 op->sym->auth.data.offset +
1472 sess->auth.digest_length);
1475 status = process_cpu_auth_hmac(src, dst, NULL,
1479 sess->auth.digest_length);
1481 op->status = RTE_CRYPTO_OP_STATUS_ERROR;
1485 if (sess->auth.op == CCP_AUTH_OP_VERIFY) {
1486 if (memcmp(dst, op->sym->auth.digest.data,
1487 sess->auth.digest_length) != 0) {
1488 op->status = RTE_CRYPTO_OP_STATUS_AUTH_FAILED;
1490 op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
1493 op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
1495 EVP_PKEY_free(pkey);
1501 ccp_perform_passthru(struct ccp_passthru *pst,
1502 struct ccp_queue *cmd_q)
1504 struct ccp_desc *desc;
1505 union ccp_function function;
1507 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1509 CCP_CMD_ENGINE(desc) = CCP_ENGINE_PASSTHRU;
1511 CCP_CMD_SOC(desc) = 0;
1512 CCP_CMD_IOC(desc) = 0;
1513 CCP_CMD_INIT(desc) = 0;
1514 CCP_CMD_EOM(desc) = 0;
1515 CCP_CMD_PROT(desc) = 0;
1518 CCP_PT_BYTESWAP(&function) = pst->byte_swap;
1519 CCP_PT_BITWISE(&function) = pst->bit_mod;
1520 CCP_CMD_FUNCTION(desc) = function.raw;
1522 CCP_CMD_LEN(desc) = pst->len;
1525 CCP_CMD_SRC_LO(desc) = (uint32_t)(pst->src_addr);
1526 CCP_CMD_SRC_HI(desc) = high32_value(pst->src_addr);
1527 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1529 CCP_CMD_DST_LO(desc) = (uint32_t)(pst->dest_addr);
1530 CCP_CMD_DST_HI(desc) = 0;
1531 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SB;
1533 if (pst->bit_mod != CCP_PASSTHRU_BITWISE_NOOP)
1534 CCP_CMD_LSB_ID(desc) = cmd_q->sb_key;
1537 CCP_CMD_SRC_LO(desc) = (uint32_t)(pst->src_addr);
1538 CCP_CMD_SRC_HI(desc) = 0;
1539 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SB;
1541 CCP_CMD_DST_LO(desc) = (uint32_t)(pst->dest_addr);
1542 CCP_CMD_DST_HI(desc) = high32_value(pst->dest_addr);
1543 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1546 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1550 ccp_perform_hmac(struct rte_crypto_op *op,
1551 struct ccp_queue *cmd_q)
1554 struct ccp_session *session;
1555 union ccp_function function;
1556 struct ccp_desc *desc;
1558 phys_addr_t src_addr, dest_addr, dest_addr_t;
1559 struct ccp_passthru pst;
1560 uint64_t auth_msg_bits;
1564 session = (struct ccp_session *)get_session_private_data(
1566 ccp_cryptodev_driver_id);
1567 addr = session->auth.pre_compute;
1569 src_addr = rte_pktmbuf_mtophys_offset(op->sym->m_src,
1570 op->sym->auth.data.offset);
1571 append_ptr = (void *)rte_pktmbuf_append(op->sym->m_src,
1572 session->auth.ctx_len);
1573 dest_addr = (phys_addr_t)rte_mem_virt2phy(append_ptr);
1574 dest_addr_t = dest_addr;
1576 /** Load PHash1 to LSB*/
1577 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *)addr);
1578 pst.dest_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1579 pst.len = session->auth.ctx_len;
1581 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1582 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1583 ccp_perform_passthru(&pst, cmd_q);
1585 /**sha engine command descriptor for IntermediateHash*/
1587 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1588 memset(desc, 0, Q_DESC_SIZE);
1590 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1592 CCP_CMD_SOC(desc) = 0;
1593 CCP_CMD_IOC(desc) = 0;
1594 CCP_CMD_INIT(desc) = 1;
1595 CCP_CMD_EOM(desc) = 1;
1596 CCP_CMD_PROT(desc) = 0;
1599 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1600 CCP_CMD_FUNCTION(desc) = function.raw;
1602 CCP_CMD_LEN(desc) = op->sym->auth.data.length;
1603 auth_msg_bits = (op->sym->auth.data.length +
1604 session->auth.block_size) * 8;
1606 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
1607 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
1608 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1610 CCP_CMD_LSB_ID(desc) = cmd_q->sb_sha;
1611 CCP_CMD_SHA_LO(desc) = ((uint32_t)auth_msg_bits);
1612 CCP_CMD_SHA_HI(desc) = high32_value(auth_msg_bits);
1614 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1618 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1619 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1620 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1621 cmd_q->qcontrol | CMD_Q_RUN);
1623 /* Intermediate Hash value retrieve */
1624 if ((session->auth.ut.sha_type == CCP_SHA_TYPE_384) ||
1625 (session->auth.ut.sha_type == CCP_SHA_TYPE_512)) {
1628 (phys_addr_t)((cmd_q->sb_sha + 1) * CCP_SB_BYTES);
1629 pst.dest_addr = dest_addr_t;
1630 pst.len = CCP_SB_BYTES;
1632 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1633 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1634 ccp_perform_passthru(&pst, cmd_q);
1636 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1637 pst.dest_addr = dest_addr_t + CCP_SB_BYTES;
1638 pst.len = CCP_SB_BYTES;
1640 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1641 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1642 ccp_perform_passthru(&pst, cmd_q);
1645 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1646 pst.dest_addr = dest_addr_t;
1647 pst.len = session->auth.ctx_len;
1649 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1650 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1651 ccp_perform_passthru(&pst, cmd_q);
1655 /** Load PHash2 to LSB*/
1656 addr += session->auth.ctx_len;
1657 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *)addr);
1658 pst.dest_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1659 pst.len = session->auth.ctx_len;
1661 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1662 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1663 ccp_perform_passthru(&pst, cmd_q);
1665 /**sha engine command descriptor for FinalHash*/
1666 dest_addr_t += session->auth.offset;
1668 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1669 memset(desc, 0, Q_DESC_SIZE);
1671 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1673 CCP_CMD_SOC(desc) = 0;
1674 CCP_CMD_IOC(desc) = 0;
1675 CCP_CMD_INIT(desc) = 1;
1676 CCP_CMD_EOM(desc) = 1;
1677 CCP_CMD_PROT(desc) = 0;
1680 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1681 CCP_CMD_FUNCTION(desc) = function.raw;
1683 CCP_CMD_LEN(desc) = (session->auth.ctx_len -
1684 session->auth.offset);
1685 auth_msg_bits = (session->auth.block_size +
1686 session->auth.ctx_len -
1687 session->auth.offset) * 8;
1689 CCP_CMD_SRC_LO(desc) = (uint32_t)(dest_addr_t);
1690 CCP_CMD_SRC_HI(desc) = high32_value(dest_addr_t);
1691 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1693 CCP_CMD_LSB_ID(desc) = cmd_q->sb_sha;
1694 CCP_CMD_SHA_LO(desc) = ((uint32_t)auth_msg_bits);
1695 CCP_CMD_SHA_HI(desc) = high32_value(auth_msg_bits);
1697 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1701 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1702 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1703 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1704 cmd_q->qcontrol | CMD_Q_RUN);
1706 /* Retrieve hmac output */
1707 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1708 pst.dest_addr = dest_addr;
1709 pst.len = session->auth.ctx_len;
1711 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1712 if ((session->auth.ut.sha_type == CCP_SHA_TYPE_384) ||
1713 (session->auth.ut.sha_type == CCP_SHA_TYPE_512))
1714 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1716 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1717 ccp_perform_passthru(&pst, cmd_q);
1719 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
1725 ccp_perform_sha(struct rte_crypto_op *op,
1726 struct ccp_queue *cmd_q)
1728 struct ccp_session *session;
1729 union ccp_function function;
1730 struct ccp_desc *desc;
1732 phys_addr_t src_addr, dest_addr;
1733 struct ccp_passthru pst;
1735 uint64_t auth_msg_bits;
1737 session = (struct ccp_session *)get_session_private_data(
1739 ccp_cryptodev_driver_id);
1741 src_addr = rte_pktmbuf_mtophys_offset(op->sym->m_src,
1742 op->sym->auth.data.offset);
1744 append_ptr = (void *)rte_pktmbuf_append(op->sym->m_src,
1745 session->auth.ctx_len);
1746 dest_addr = (phys_addr_t)rte_mem_virt2phy(append_ptr);
1748 /** Passthru sha context*/
1750 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *)
1752 pst.dest_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1753 pst.len = session->auth.ctx_len;
1755 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1756 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1757 ccp_perform_passthru(&pst, cmd_q);
1759 /**prepare sha command descriptor*/
1761 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1762 memset(desc, 0, Q_DESC_SIZE);
1764 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1766 CCP_CMD_SOC(desc) = 0;
1767 CCP_CMD_IOC(desc) = 0;
1768 CCP_CMD_INIT(desc) = 1;
1769 CCP_CMD_EOM(desc) = 1;
1770 CCP_CMD_PROT(desc) = 0;
1773 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1774 CCP_CMD_FUNCTION(desc) = function.raw;
1776 CCP_CMD_LEN(desc) = op->sym->auth.data.length;
1777 auth_msg_bits = op->sym->auth.data.length * 8;
1779 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
1780 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
1781 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1783 CCP_CMD_LSB_ID(desc) = cmd_q->sb_sha;
1784 CCP_CMD_SHA_LO(desc) = ((uint32_t)auth_msg_bits);
1785 CCP_CMD_SHA_HI(desc) = high32_value(auth_msg_bits);
1787 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1791 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1792 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1793 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1794 cmd_q->qcontrol | CMD_Q_RUN);
1796 /* Hash value retrieve */
1797 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1798 pst.dest_addr = dest_addr;
1799 pst.len = session->auth.ctx_len;
1801 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1802 if ((session->auth.ut.sha_type == CCP_SHA_TYPE_384) ||
1803 (session->auth.ut.sha_type == CCP_SHA_TYPE_512))
1804 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1806 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1807 ccp_perform_passthru(&pst, cmd_q);
1809 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
1815 ccp_perform_sha3_hmac(struct rte_crypto_op *op,
1816 struct ccp_queue *cmd_q)
1818 struct ccp_session *session;
1819 struct ccp_passthru pst;
1820 union ccp_function function;
1821 struct ccp_desc *desc;
1822 uint8_t *append_ptr;
1824 phys_addr_t src_addr, dest_addr, ctx_paddr, dest_addr_t;
1826 session = (struct ccp_session *)get_session_private_data(
1828 ccp_cryptodev_driver_id);
1830 src_addr = rte_pktmbuf_mtophys_offset(op->sym->m_src,
1831 op->sym->auth.data.offset);
1832 append_ptr = (uint8_t *)rte_pktmbuf_append(op->sym->m_src,
1833 session->auth.ctx_len);
1835 CCP_LOG_ERR("CCP MBUF append failed\n");
1838 dest_addr = (phys_addr_t)rte_mem_virt2phy((void *)append_ptr);
1839 dest_addr_t = dest_addr + (session->auth.ctx_len / 2);
1840 ctx_paddr = (phys_addr_t)rte_mem_virt2phy((void
1841 *)session->auth.pre_compute);
1842 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1843 memset(desc, 0, Q_DESC_SIZE);
1845 /*desc1 for SHA3-Ihash operation */
1846 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1847 CCP_CMD_INIT(desc) = 1;
1848 CCP_CMD_EOM(desc) = 1;
1851 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1852 CCP_CMD_FUNCTION(desc) = function.raw;
1853 CCP_CMD_LEN(desc) = op->sym->auth.data.length;
1855 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
1856 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
1857 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1859 CCP_CMD_DST_LO(desc) = (cmd_q->sb_sha * CCP_SB_BYTES);
1860 CCP_CMD_DST_HI(desc) = 0;
1861 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SB;
1863 CCP_CMD_KEY_LO(desc) = ((uint32_t)ctx_paddr);
1864 CCP_CMD_KEY_HI(desc) = high32_value(ctx_paddr);
1865 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1867 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1870 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1871 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1872 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1873 cmd_q->qcontrol | CMD_Q_RUN);
1875 /* Intermediate Hash value retrieve */
1876 if ((session->auth.ut.sha_type == CCP_SHA3_TYPE_384) ||
1877 (session->auth.ut.sha_type == CCP_SHA3_TYPE_512)) {
1880 (phys_addr_t)((cmd_q->sb_sha + 1) * CCP_SB_BYTES);
1881 pst.dest_addr = dest_addr_t;
1882 pst.len = CCP_SB_BYTES;
1884 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1885 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1886 ccp_perform_passthru(&pst, cmd_q);
1888 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1889 pst.dest_addr = dest_addr_t + CCP_SB_BYTES;
1890 pst.len = CCP_SB_BYTES;
1892 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1893 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1894 ccp_perform_passthru(&pst, cmd_q);
1897 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1898 pst.dest_addr = dest_addr_t;
1899 pst.len = CCP_SB_BYTES;
1901 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1902 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1903 ccp_perform_passthru(&pst, cmd_q);
1906 /**sha engine command descriptor for FinalHash*/
1907 ctx_paddr += CCP_SHA3_CTX_SIZE;
1908 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1909 memset(desc, 0, Q_DESC_SIZE);
1911 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1912 CCP_CMD_INIT(desc) = 1;
1913 CCP_CMD_EOM(desc) = 1;
1916 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1917 CCP_CMD_FUNCTION(desc) = function.raw;
1919 if (session->auth.ut.sha_type == CCP_SHA3_TYPE_224) {
1920 dest_addr_t += (CCP_SB_BYTES - SHA224_DIGEST_SIZE);
1921 CCP_CMD_LEN(desc) = SHA224_DIGEST_SIZE;
1922 } else if (session->auth.ut.sha_type == CCP_SHA3_TYPE_256) {
1923 CCP_CMD_LEN(desc) = SHA256_DIGEST_SIZE;
1924 } else if (session->auth.ut.sha_type == CCP_SHA3_TYPE_384) {
1925 dest_addr_t += (2 * CCP_SB_BYTES - SHA384_DIGEST_SIZE);
1926 CCP_CMD_LEN(desc) = SHA384_DIGEST_SIZE;
1928 CCP_CMD_LEN(desc) = SHA512_DIGEST_SIZE;
1931 CCP_CMD_SRC_LO(desc) = ((uint32_t)dest_addr_t);
1932 CCP_CMD_SRC_HI(desc) = high32_value(dest_addr_t);
1933 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1935 CCP_CMD_DST_LO(desc) = (uint32_t)dest_addr;
1936 CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
1937 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1939 CCP_CMD_KEY_LO(desc) = ((uint32_t)ctx_paddr);
1940 CCP_CMD_KEY_HI(desc) = high32_value(ctx_paddr);
1941 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1943 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1946 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1947 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1948 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1949 cmd_q->qcontrol | CMD_Q_RUN);
1951 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
1956 ccp_perform_sha3(struct rte_crypto_op *op,
1957 struct ccp_queue *cmd_q)
1959 struct ccp_session *session;
1960 union ccp_function function;
1961 struct ccp_desc *desc;
1962 uint8_t *ctx_addr, *append_ptr;
1964 phys_addr_t src_addr, dest_addr, ctx_paddr;
1966 session = (struct ccp_session *)get_session_private_data(
1968 ccp_cryptodev_driver_id);
1970 src_addr = rte_pktmbuf_mtophys_offset(op->sym->m_src,
1971 op->sym->auth.data.offset);
1972 append_ptr = (uint8_t *)rte_pktmbuf_append(op->sym->m_src,
1973 session->auth.ctx_len);
1975 CCP_LOG_ERR("CCP MBUF append failed\n");
1978 dest_addr = (phys_addr_t)rte_mem_virt2phy((void *)append_ptr);
1979 ctx_addr = session->auth.sha3_ctx;
1980 ctx_paddr = (phys_addr_t)rte_mem_virt2phy((void *)ctx_addr);
1982 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1983 memset(desc, 0, Q_DESC_SIZE);
1985 /* prepare desc for SHA3 operation */
1986 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1987 CCP_CMD_INIT(desc) = 1;
1988 CCP_CMD_EOM(desc) = 1;
1991 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1992 CCP_CMD_FUNCTION(desc) = function.raw;
1994 CCP_CMD_LEN(desc) = op->sym->auth.data.length;
1996 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
1997 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
1998 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2000 CCP_CMD_DST_LO(desc) = ((uint32_t)dest_addr);
2001 CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
2002 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2004 CCP_CMD_KEY_LO(desc) = ((uint32_t)ctx_paddr);
2005 CCP_CMD_KEY_HI(desc) = high32_value(ctx_paddr);
2006 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2008 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2012 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2013 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2014 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2015 cmd_q->qcontrol | CMD_Q_RUN);
2017 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
2022 ccp_perform_aes_cmac(struct rte_crypto_op *op,
2023 struct ccp_queue *cmd_q)
2025 struct ccp_session *session;
2026 union ccp_function function;
2027 struct ccp_passthru pst;
2028 struct ccp_desc *desc;
2030 uint8_t *src_tb, *append_ptr, *ctx_addr;
2031 phys_addr_t src_addr, dest_addr, key_addr;
2032 int length, non_align_len;
2034 session = (struct ccp_session *)get_session_private_data(
2036 ccp_cryptodev_driver_id);
2037 key_addr = rte_mem_virt2phy(session->auth.key_ccp);
2039 src_addr = rte_pktmbuf_mtophys_offset(op->sym->m_src,
2040 op->sym->auth.data.offset);
2041 append_ptr = (uint8_t *)rte_pktmbuf_append(op->sym->m_src,
2042 session->auth.ctx_len);
2043 dest_addr = (phys_addr_t)rte_mem_virt2phy((void *)append_ptr);
2046 CCP_AES_ENCRYPT(&function) = CCP_CIPHER_DIR_ENCRYPT;
2047 CCP_AES_MODE(&function) = session->auth.um.aes_mode;
2048 CCP_AES_TYPE(&function) = session->auth.ut.aes_type;
2050 if (op->sym->auth.data.length % session->auth.block_size == 0) {
2052 ctx_addr = session->auth.pre_compute;
2053 memset(ctx_addr, 0, AES_BLOCK_SIZE);
2054 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *)ctx_addr);
2055 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2056 pst.len = CCP_SB_BYTES;
2058 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2059 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
2060 ccp_perform_passthru(&pst, cmd_q);
2062 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2063 memset(desc, 0, Q_DESC_SIZE);
2065 /* prepare desc for aes-cmac command */
2066 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2067 CCP_CMD_EOM(desc) = 1;
2068 CCP_CMD_FUNCTION(desc) = function.raw;
2070 CCP_CMD_LEN(desc) = op->sym->auth.data.length;
2071 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2072 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2073 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2075 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2076 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2077 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2078 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2080 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2085 (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2086 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2087 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2088 cmd_q->qcontrol | CMD_Q_RUN);
2090 ctx_addr = session->auth.pre_compute + CCP_SB_BYTES;
2091 memset(ctx_addr, 0, AES_BLOCK_SIZE);
2092 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *)ctx_addr);
2093 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2094 pst.len = CCP_SB_BYTES;
2096 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2097 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
2098 ccp_perform_passthru(&pst, cmd_q);
2100 length = (op->sym->auth.data.length / AES_BLOCK_SIZE);
2101 length *= AES_BLOCK_SIZE;
2102 non_align_len = op->sym->auth.data.length - length;
2103 /* prepare desc for aes-cmac command */
2105 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2106 memset(desc, 0, Q_DESC_SIZE);
2108 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2109 CCP_CMD_INIT(desc) = 1;
2110 CCP_CMD_FUNCTION(desc) = function.raw;
2112 CCP_CMD_LEN(desc) = length;
2113 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2114 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2115 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2117 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2118 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2119 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2120 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2122 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2125 append_ptr = append_ptr + CCP_SB_BYTES;
2126 memset(append_ptr, 0, AES_BLOCK_SIZE);
2127 src_tb = rte_pktmbuf_mtod_offset(op->sym->m_src,
2129 op->sym->auth.data.offset +
2131 rte_memcpy(append_ptr, src_tb, non_align_len);
2132 append_ptr[non_align_len] = CMAC_PAD_VALUE;
2134 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2135 memset(desc, 0, Q_DESC_SIZE);
2137 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2138 CCP_CMD_EOM(desc) = 1;
2139 CCP_CMD_FUNCTION(desc) = function.raw;
2140 CCP_CMD_LEN(desc) = AES_BLOCK_SIZE;
2142 CCP_CMD_SRC_LO(desc) = ((uint32_t)(dest_addr + CCP_SB_BYTES));
2143 CCP_CMD_SRC_HI(desc) = high32_value(dest_addr + CCP_SB_BYTES);
2144 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2146 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2147 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2148 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2149 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2151 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2155 (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2156 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2157 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2158 cmd_q->qcontrol | CMD_Q_RUN);
2160 /* Retrieve result */
2161 pst.dest_addr = dest_addr;
2162 pst.src_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2163 pst.len = CCP_SB_BYTES;
2165 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2166 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
2167 ccp_perform_passthru(&pst, cmd_q);
2169 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
2174 ccp_perform_aes(struct rte_crypto_op *op,
2175 struct ccp_queue *cmd_q,
2176 struct ccp_batch_info *b_info)
2178 struct ccp_session *session;
2179 union ccp_function function;
2181 struct ccp_passthru pst = {0};
2182 struct ccp_desc *desc;
2183 phys_addr_t src_addr, dest_addr, key_addr;
2186 session = (struct ccp_session *)get_session_private_data(
2188 ccp_cryptodev_driver_id);
2191 iv = rte_crypto_op_ctod_offset(op, uint8_t *, session->iv.offset);
2192 if (session->cipher.um.aes_mode != CCP_AES_MODE_ECB) {
2193 if (session->cipher.um.aes_mode == CCP_AES_MODE_CTR) {
2194 rte_memcpy(session->cipher.nonce + AES_BLOCK_SIZE,
2195 iv, session->iv.length);
2196 pst.src_addr = (phys_addr_t)session->cipher.nonce_phys;
2197 CCP_AES_SIZE(&function) = 0x1F;
2200 &(b_info->lsb_buf[b_info->lsb_buf_idx*CCP_SB_BYTES]);
2201 rte_memcpy(lsb_buf +
2202 (CCP_SB_BYTES - session->iv.length),
2203 iv, session->iv.length);
2204 pst.src_addr = b_info->lsb_buf_phys +
2205 (b_info->lsb_buf_idx * CCP_SB_BYTES);
2206 b_info->lsb_buf_idx++;
2209 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2210 pst.len = CCP_SB_BYTES;
2212 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2213 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
2214 ccp_perform_passthru(&pst, cmd_q);
2217 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2219 src_addr = rte_pktmbuf_mtophys_offset(op->sym->m_src,
2220 op->sym->cipher.data.offset);
2221 if (likely(op->sym->m_dst != NULL))
2222 dest_addr = rte_pktmbuf_mtophys_offset(op->sym->m_dst,
2223 op->sym->cipher.data.offset);
2225 dest_addr = src_addr;
2226 key_addr = session->cipher.key_phys;
2228 /* prepare desc for aes command */
2229 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2230 CCP_CMD_INIT(desc) = 1;
2231 CCP_CMD_EOM(desc) = 1;
2233 CCP_AES_ENCRYPT(&function) = session->cipher.dir;
2234 CCP_AES_MODE(&function) = session->cipher.um.aes_mode;
2235 CCP_AES_TYPE(&function) = session->cipher.ut.aes_type;
2236 CCP_CMD_FUNCTION(desc) = function.raw;
2238 CCP_CMD_LEN(desc) = op->sym->cipher.data.length;
2240 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2241 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2242 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2244 CCP_CMD_DST_LO(desc) = ((uint32_t)dest_addr);
2245 CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
2246 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2248 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2249 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2250 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2252 if (session->cipher.um.aes_mode != CCP_AES_MODE_ECB)
2253 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2255 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2256 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
2261 ccp_perform_3des(struct rte_crypto_op *op,
2262 struct ccp_queue *cmd_q,
2263 struct ccp_batch_info *b_info)
2265 struct ccp_session *session;
2266 union ccp_function function;
2267 unsigned char *lsb_buf;
2268 struct ccp_passthru pst;
2269 struct ccp_desc *desc;
2272 phys_addr_t src_addr, dest_addr, key_addr;
2274 session = (struct ccp_session *)get_session_private_data(
2276 ccp_cryptodev_driver_id);
2278 iv = rte_crypto_op_ctod_offset(op, uint8_t *, session->iv.offset);
2279 switch (session->cipher.um.des_mode) {
2280 case CCP_DES_MODE_CBC:
2281 lsb_buf = &(b_info->lsb_buf[b_info->lsb_buf_idx*CCP_SB_BYTES]);
2282 b_info->lsb_buf_idx++;
2284 rte_memcpy(lsb_buf + (CCP_SB_BYTES - session->iv.length),
2285 iv, session->iv.length);
2287 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *) lsb_buf);
2288 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2289 pst.len = CCP_SB_BYTES;
2291 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2292 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
2293 ccp_perform_passthru(&pst, cmd_q);
2295 case CCP_DES_MODE_CFB:
2296 case CCP_DES_MODE_ECB:
2297 CCP_LOG_ERR("Unsupported DES cipher mode");
2301 src_addr = rte_pktmbuf_mtophys_offset(op->sym->m_src,
2302 op->sym->cipher.data.offset);
2303 if (unlikely(op->sym->m_dst != NULL))
2305 rte_pktmbuf_mtophys_offset(op->sym->m_dst,
2306 op->sym->cipher.data.offset);
2308 dest_addr = src_addr;
2310 key_addr = rte_mem_virt2phy(session->cipher.key_ccp);
2312 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2314 memset(desc, 0, Q_DESC_SIZE);
2316 /* prepare desc for des command */
2317 CCP_CMD_ENGINE(desc) = CCP_ENGINE_3DES;
2319 CCP_CMD_SOC(desc) = 0;
2320 CCP_CMD_IOC(desc) = 0;
2321 CCP_CMD_INIT(desc) = 1;
2322 CCP_CMD_EOM(desc) = 1;
2323 CCP_CMD_PROT(desc) = 0;
2326 CCP_DES_ENCRYPT(&function) = session->cipher.dir;
2327 CCP_DES_MODE(&function) = session->cipher.um.des_mode;
2328 CCP_DES_TYPE(&function) = session->cipher.ut.des_type;
2329 CCP_CMD_FUNCTION(desc) = function.raw;
2331 CCP_CMD_LEN(desc) = op->sym->cipher.data.length;
2333 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2334 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2335 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2337 CCP_CMD_DST_LO(desc) = ((uint32_t)dest_addr);
2338 CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
2339 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2341 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2342 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2343 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2345 if (session->cipher.um.des_mode)
2346 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2348 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2352 /* Write the new tail address back to the queue register */
2353 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2354 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2355 /* Turn the queue back on using our cached control register */
2356 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2357 cmd_q->qcontrol | CMD_Q_RUN);
2359 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
2364 ccp_perform_aes_gcm(struct rte_crypto_op *op, struct ccp_queue *cmd_q)
2366 struct ccp_session *session;
2367 union ccp_function function;
2369 struct ccp_passthru pst;
2370 struct ccp_desc *desc;
2373 phys_addr_t src_addr, dest_addr, key_addr, aad_addr;
2374 phys_addr_t digest_dest_addr;
2375 int length, non_align_len;
2377 session = (struct ccp_session *)get_session_private_data(
2379 ccp_cryptodev_driver_id);
2380 iv = rte_crypto_op_ctod_offset(op, uint8_t *, session->iv.offset);
2381 key_addr = session->cipher.key_phys;
2383 src_addr = rte_pktmbuf_mtophys_offset(op->sym->m_src,
2384 op->sym->aead.data.offset);
2385 if (unlikely(op->sym->m_dst != NULL))
2386 dest_addr = rte_pktmbuf_mtophys_offset(op->sym->m_dst,
2387 op->sym->aead.data.offset);
2389 dest_addr = src_addr;
2390 rte_pktmbuf_append(op->sym->m_src, session->auth.ctx_len);
2391 digest_dest_addr = op->sym->aead.digest.phys_addr;
2392 temp = (uint64_t *)(op->sym->aead.digest.data + AES_BLOCK_SIZE);
2393 *temp++ = rte_bswap64(session->auth.aad_length << 3);
2394 *temp = rte_bswap64(op->sym->aead.data.length << 3);
2396 non_align_len = op->sym->aead.data.length % AES_BLOCK_SIZE;
2397 length = CCP_ALIGN(op->sym->aead.data.length, AES_BLOCK_SIZE);
2399 aad_addr = op->sym->aead.aad.phys_addr;
2401 /* CMD1 IV Passthru */
2402 rte_memcpy(session->cipher.nonce + AES_BLOCK_SIZE, iv,
2403 session->iv.length);
2404 pst.src_addr = session->cipher.nonce_phys;
2405 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2406 pst.len = CCP_SB_BYTES;
2408 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2409 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
2410 ccp_perform_passthru(&pst, cmd_q);
2412 /* CMD2 GHASH-AAD */
2414 CCP_AES_ENCRYPT(&function) = CCP_AES_MODE_GHASH_AAD;
2415 CCP_AES_MODE(&function) = CCP_AES_MODE_GHASH;
2416 CCP_AES_TYPE(&function) = session->cipher.ut.aes_type;
2418 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2419 memset(desc, 0, Q_DESC_SIZE);
2421 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2422 CCP_CMD_INIT(desc) = 1;
2423 CCP_CMD_FUNCTION(desc) = function.raw;
2425 CCP_CMD_LEN(desc) = session->auth.aad_length;
2427 CCP_CMD_SRC_LO(desc) = ((uint32_t)aad_addr);
2428 CCP_CMD_SRC_HI(desc) = high32_value(aad_addr);
2429 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2431 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2432 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2433 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2435 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2437 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2440 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2441 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2442 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2443 cmd_q->qcontrol | CMD_Q_RUN);
2445 /* CMD3 : GCTR Plain text */
2447 CCP_AES_ENCRYPT(&function) = session->cipher.dir;
2448 CCP_AES_MODE(&function) = CCP_AES_MODE_GCTR;
2449 CCP_AES_TYPE(&function) = session->cipher.ut.aes_type;
2450 if (non_align_len == 0)
2451 CCP_AES_SIZE(&function) = (AES_BLOCK_SIZE << 3) - 1;
2453 CCP_AES_SIZE(&function) = (non_align_len << 3) - 1;
2456 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2457 memset(desc, 0, Q_DESC_SIZE);
2459 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2460 CCP_CMD_EOM(desc) = 1;
2461 CCP_CMD_FUNCTION(desc) = function.raw;
2463 CCP_CMD_LEN(desc) = length;
2465 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2466 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2467 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2469 CCP_CMD_DST_LO(desc) = ((uint32_t)dest_addr);
2470 CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
2471 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2473 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2474 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2475 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2477 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2479 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2482 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2483 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2484 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2485 cmd_q->qcontrol | CMD_Q_RUN);
2487 /* CMD4 : PT to copy IV */
2488 pst.src_addr = session->cipher.nonce_phys;
2489 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2490 pst.len = AES_BLOCK_SIZE;
2492 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2493 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
2494 ccp_perform_passthru(&pst, cmd_q);
2496 /* CMD5 : GHASH-Final */
2498 CCP_AES_ENCRYPT(&function) = CCP_AES_MODE_GHASH_FINAL;
2499 CCP_AES_MODE(&function) = CCP_AES_MODE_GHASH;
2500 CCP_AES_TYPE(&function) = session->cipher.ut.aes_type;
2502 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2503 memset(desc, 0, Q_DESC_SIZE);
2505 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2506 CCP_CMD_FUNCTION(desc) = function.raw;
2507 /* Last block (AAD_len || PT_len)*/
2508 CCP_CMD_LEN(desc) = AES_BLOCK_SIZE;
2510 CCP_CMD_SRC_LO(desc) = ((uint32_t)digest_dest_addr + AES_BLOCK_SIZE);
2511 CCP_CMD_SRC_HI(desc) = high32_value(digest_dest_addr + AES_BLOCK_SIZE);
2512 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2514 CCP_CMD_DST_LO(desc) = ((uint32_t)digest_dest_addr);
2515 CCP_CMD_DST_HI(desc) = high32_value(digest_dest_addr);
2516 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2518 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2519 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2520 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2522 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2524 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2527 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2528 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2529 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2530 cmd_q->qcontrol | CMD_Q_RUN);
2532 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
2537 ccp_crypto_cipher(struct rte_crypto_op *op,
2538 struct ccp_queue *cmd_q,
2539 struct ccp_batch_info *b_info)
2542 struct ccp_session *session;
2544 session = (struct ccp_session *)get_session_private_data(
2546 ccp_cryptodev_driver_id);
2548 switch (session->cipher.algo) {
2549 case CCP_CIPHER_ALGO_AES_CBC:
2550 result = ccp_perform_aes(op, cmd_q, b_info);
2551 b_info->desccnt += 2;
2553 case CCP_CIPHER_ALGO_AES_CTR:
2554 result = ccp_perform_aes(op, cmd_q, b_info);
2555 b_info->desccnt += 2;
2557 case CCP_CIPHER_ALGO_AES_ECB:
2558 result = ccp_perform_aes(op, cmd_q, b_info);
2559 b_info->desccnt += 1;
2561 case CCP_CIPHER_ALGO_3DES_CBC:
2562 result = ccp_perform_3des(op, cmd_q, b_info);
2563 b_info->desccnt += 2;
2566 CCP_LOG_ERR("Unsupported cipher algo %d",
2567 session->cipher.algo);
2574 ccp_crypto_auth(struct rte_crypto_op *op,
2575 struct ccp_queue *cmd_q,
2576 struct ccp_batch_info *b_info)
2580 struct ccp_session *session;
2582 session = (struct ccp_session *)get_session_private_data(
2584 ccp_cryptodev_driver_id);
2586 switch (session->auth.algo) {
2587 case CCP_AUTH_ALGO_SHA1:
2588 case CCP_AUTH_ALGO_SHA224:
2589 case CCP_AUTH_ALGO_SHA256:
2590 case CCP_AUTH_ALGO_SHA384:
2591 case CCP_AUTH_ALGO_SHA512:
2592 result = ccp_perform_sha(op, cmd_q);
2593 b_info->desccnt += 3;
2595 #ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
2596 case CCP_AUTH_ALGO_MD5_HMAC:
2599 case CCP_AUTH_ALGO_SHA1_HMAC:
2600 case CCP_AUTH_ALGO_SHA224_HMAC:
2601 case CCP_AUTH_ALGO_SHA256_HMAC:
2602 #ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
2605 result = ccp_perform_hmac(op, cmd_q);
2606 b_info->desccnt += 6;
2608 case CCP_AUTH_ALGO_SHA384_HMAC:
2609 case CCP_AUTH_ALGO_SHA512_HMAC:
2610 #ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
2613 result = ccp_perform_hmac(op, cmd_q);
2614 b_info->desccnt += 7;
2616 case CCP_AUTH_ALGO_SHA3_224:
2617 case CCP_AUTH_ALGO_SHA3_256:
2618 case CCP_AUTH_ALGO_SHA3_384:
2619 case CCP_AUTH_ALGO_SHA3_512:
2620 result = ccp_perform_sha3(op, cmd_q);
2621 b_info->desccnt += 1;
2623 case CCP_AUTH_ALGO_SHA3_224_HMAC:
2624 case CCP_AUTH_ALGO_SHA3_256_HMAC:
2625 result = ccp_perform_sha3_hmac(op, cmd_q);
2626 b_info->desccnt += 3;
2628 case CCP_AUTH_ALGO_SHA3_384_HMAC:
2629 case CCP_AUTH_ALGO_SHA3_512_HMAC:
2630 result = ccp_perform_sha3_hmac(op, cmd_q);
2631 b_info->desccnt += 4;
2633 case CCP_AUTH_ALGO_AES_CMAC:
2634 result = ccp_perform_aes_cmac(op, cmd_q);
2635 b_info->desccnt += 4;
2638 CCP_LOG_ERR("Unsupported auth algo %d",
2639 session->auth.algo);
2647 ccp_crypto_aead(struct rte_crypto_op *op,
2648 struct ccp_queue *cmd_q,
2649 struct ccp_batch_info *b_info)
2652 struct ccp_session *session;
2654 session = (struct ccp_session *)get_session_private_data(
2656 ccp_cryptodev_driver_id);
2658 switch (session->auth.algo) {
2659 case CCP_AUTH_ALGO_AES_GCM:
2660 if (session->cipher.algo != CCP_CIPHER_ALGO_AES_GCM) {
2661 CCP_LOG_ERR("Incorrect chain order");
2664 result = ccp_perform_aes_gcm(op, cmd_q);
2665 b_info->desccnt += 5;
2668 CCP_LOG_ERR("Unsupported aead algo %d",
2669 session->aead_algo);
2676 process_ops_to_enqueue(struct ccp_qp *qp,
2677 struct rte_crypto_op **op,
2678 struct ccp_queue *cmd_q,
2683 struct ccp_batch_info *b_info;
2684 struct ccp_session *session;
2685 #ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
2686 EVP_MD_CTX *auth_ctx = NULL;
2689 if (rte_mempool_get(qp->batch_mp, (void **)&b_info)) {
2690 CCP_LOG_ERR("batch info allocation failed");
2693 #ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
2694 auth_ctx = EVP_MD_CTX_create();
2695 if (unlikely(!auth_ctx)) {
2696 CCP_LOG_ERR("Unable to create auth ctx");
2699 b_info->auth_ctr = 0;
2701 /* populate batch info necessary for dequeue */
2703 b_info->lsb_buf_idx = 0;
2704 b_info->desccnt = 0;
2705 b_info->cmd_q = cmd_q;
2706 b_info->lsb_buf_phys =
2707 (phys_addr_t)rte_mem_virt2phy((void *)b_info->lsb_buf);
2708 rte_atomic64_sub(&b_info->cmd_q->free_slots, slots_req);
2710 b_info->head_offset = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx *
2712 for (i = 0; i < nb_ops; i++) {
2713 session = (struct ccp_session *)get_session_private_data(
2714 op[i]->sym->session,
2715 ccp_cryptodev_driver_id);
2716 switch (session->cmd_id) {
2717 case CCP_CMD_CIPHER:
2718 result = ccp_crypto_cipher(op[i], cmd_q, b_info);
2721 result = ccp_crypto_auth(op[i], cmd_q, b_info);
2722 #ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
2724 result = cpu_crypto_auth(qp, op[i],
2728 case CCP_CMD_CIPHER_HASH:
2729 result = ccp_crypto_cipher(op[i], cmd_q, b_info);
2732 result = ccp_crypto_auth(op[i], cmd_q, b_info);
2734 case CCP_CMD_HASH_CIPHER:
2735 result = ccp_crypto_auth(op[i], cmd_q, b_info);
2736 #ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
2737 result = cpu_crypto_auth(qp, op[i],
2739 if (op[i]->status != RTE_CRYPTO_OP_STATUS_SUCCESS)
2744 result = ccp_crypto_cipher(op[i], cmd_q, b_info);
2746 case CCP_CMD_COMBINED:
2747 result = ccp_crypto_aead(op[i], cmd_q, b_info);
2750 CCP_LOG_ERR("Unsupported cmd_id");
2753 if (unlikely(result < 0)) {
2754 rte_atomic64_add(&b_info->cmd_q->free_slots,
2755 (slots_req - b_info->desccnt));
2758 b_info->op[i] = op[i];
2762 b_info->tail_offset = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx *
2766 /* Write the new tail address back to the queue register */
2767 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE,
2768 b_info->tail_offset);
2769 /* Turn the queue back on using our cached control register */
2770 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2771 cmd_q->qcontrol | CMD_Q_RUN);
2773 rte_ring_enqueue(qp->processed_pkts, (void *)b_info);
2775 #ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
2776 EVP_MD_CTX_destroy(auth_ctx);
2781 static inline void ccp_auth_dq_prepare(struct rte_crypto_op *op)
2783 struct ccp_session *session;
2784 uint8_t *digest_data, *addr;
2785 struct rte_mbuf *m_last;
2786 int offset, digest_offset;
2787 uint8_t digest_le[64];
2789 session = (struct ccp_session *)get_session_private_data(
2791 ccp_cryptodev_driver_id);
2793 if (session->cmd_id == CCP_CMD_COMBINED) {
2794 digest_data = op->sym->aead.digest.data;
2795 digest_offset = op->sym->aead.data.offset +
2796 op->sym->aead.data.length;
2798 digest_data = op->sym->auth.digest.data;
2799 digest_offset = op->sym->auth.data.offset +
2800 op->sym->auth.data.length;
2802 m_last = rte_pktmbuf_lastseg(op->sym->m_src);
2803 addr = (uint8_t *)((char *)m_last->buf_addr + m_last->data_off +
2804 m_last->data_len - session->auth.ctx_len);
2807 offset = session->auth.offset;
2809 if (session->auth.engine == CCP_ENGINE_SHA)
2810 if ((session->auth.ut.sha_type != CCP_SHA_TYPE_1) &&
2811 (session->auth.ut.sha_type != CCP_SHA_TYPE_224) &&
2812 (session->auth.ut.sha_type != CCP_SHA_TYPE_256)) {
2813 /* All other algorithms require byte
2818 offset = session->auth.ctx_len -
2819 session->auth.offset - 1;
2820 for (i = 0; i < session->auth.digest_length; i++)
2821 digest_le[i] = addr[offset - i];
2826 op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
2827 if (session->auth.op == CCP_AUTH_OP_VERIFY) {
2828 if (memcmp(addr + offset, digest_data,
2829 session->auth.digest_length) != 0)
2830 op->status = RTE_CRYPTO_OP_STATUS_AUTH_FAILED;
2833 if (unlikely(digest_data == 0))
2834 digest_data = rte_pktmbuf_mtod_offset(
2835 op->sym->m_dst, uint8_t *,
2837 rte_memcpy(digest_data, addr + offset,
2838 session->auth.digest_length);
2840 /* Trim area used for digest from mbuf. */
2841 rte_pktmbuf_trim(op->sym->m_src,
2842 session->auth.ctx_len);
2846 #ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
2847 ccp_prepare_ops(struct ccp_qp *qp,
2849 ccp_prepare_ops(struct ccp_qp *qp __rte_unused,
2851 struct rte_crypto_op **op_d,
2852 struct ccp_batch_info *b_info,
2856 struct ccp_session *session;
2858 #ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
2859 EVP_MD_CTX *auth_ctx = NULL;
2861 auth_ctx = EVP_MD_CTX_create();
2862 if (unlikely(!auth_ctx)) {
2863 CCP_LOG_ERR("Unable to create auth ctx");
2867 min_ops = RTE_MIN(nb_ops, b_info->opcnt);
2869 for (i = 0; i < min_ops; i++) {
2870 op_d[i] = b_info->op[b_info->op_idx++];
2871 session = (struct ccp_session *)get_session_private_data(
2872 op_d[i]->sym->session,
2873 ccp_cryptodev_driver_id);
2874 switch (session->cmd_id) {
2875 case CCP_CMD_CIPHER:
2876 op_d[i]->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
2879 #ifndef RTE_LIBRTE_PMD_CCP_CPU_AUTH
2880 ccp_auth_dq_prepare(op_d[i]);
2883 case CCP_CMD_CIPHER_HASH:
2884 #ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
2885 cpu_crypto_auth(qp, op_d[i],
2888 ccp_auth_dq_prepare(op_d[i]);
2891 case CCP_CMD_HASH_CIPHER:
2892 #ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
2893 op_d[i]->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
2895 ccp_auth_dq_prepare(op_d[i]);
2898 case CCP_CMD_COMBINED:
2899 ccp_auth_dq_prepare(op_d[i]);
2902 CCP_LOG_ERR("Unsupported cmd_id");
2906 #ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
2907 EVP_MD_CTX_destroy(auth_ctx);
2909 b_info->opcnt -= min_ops;
2914 process_ops_to_dequeue(struct ccp_qp *qp,
2915 struct rte_crypto_op **op,
2918 struct ccp_batch_info *b_info;
2919 uint32_t cur_head_offset;
2921 if (qp->b_info != NULL) {
2922 b_info = qp->b_info;
2923 if (unlikely(b_info->op_idx > 0))
2925 } else if (rte_ring_dequeue(qp->processed_pkts,
2928 #ifdef RTE_LIBRTE_PMD_CCP_CPU_AUTH
2929 if (b_info->auth_ctr == b_info->opcnt)
2932 cur_head_offset = CCP_READ_REG(b_info->cmd_q->reg_base,
2933 CMD_Q_HEAD_LO_BASE);
2935 if (b_info->head_offset < b_info->tail_offset) {
2936 if ((cur_head_offset >= b_info->head_offset) &&
2937 (cur_head_offset < b_info->tail_offset)) {
2938 qp->b_info = b_info;
2942 if ((cur_head_offset >= b_info->head_offset) ||
2943 (cur_head_offset < b_info->tail_offset)) {
2944 qp->b_info = b_info;
2951 nb_ops = ccp_prepare_ops(qp, op, b_info, nb_ops);
2952 rte_atomic64_add(&b_info->cmd_q->free_slots, b_info->desccnt);
2953 b_info->desccnt = 0;
2954 if (b_info->opcnt > 0) {
2955 qp->b_info = b_info;
2957 rte_mempool_put(qp->batch_mp, (void *)b_info);