1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2018 Advanced Micro Devices, Inc. All rights reserved.
10 #include <sys/queue.h>
11 #include <sys/types.h>
13 #include <openssl/sha.h>
14 #include <openssl/cmac.h> /*sub key apis*/
15 #include <openssl/evp.h> /*sub key apis*/
17 #include <rte_hexdump.h>
18 #include <rte_memzone.h>
19 #include <rte_malloc.h>
20 #include <rte_memory.h>
21 #include <rte_spinlock.h>
22 #include <rte_string_fns.h>
23 #include <rte_cryptodev_pmd.h>
26 #include "ccp_crypto.h"
28 #include "ccp_pmd_private.h"
30 /* SHA initial context values */
31 static uint32_t ccp_sha1_init[SHA_COMMON_DIGEST_SIZE / sizeof(uint32_t)] = {
38 uint32_t ccp_sha224_init[SHA256_DIGEST_SIZE / sizeof(uint32_t)] = {
45 uint32_t ccp_sha256_init[SHA256_DIGEST_SIZE / sizeof(uint32_t)] = {
52 uint64_t ccp_sha384_init[SHA512_DIGEST_SIZE / sizeof(uint64_t)] = {
59 uint64_t ccp_sha512_init[SHA512_DIGEST_SIZE / sizeof(uint64_t)] = {
67 #define SHA3_CONST(x) x
69 #define SHA3_CONST(x) x##L
72 /** 'Words' here refers to uint64_t */
73 #define SHA3_KECCAK_SPONGE_WORDS \
74 (((1600) / 8) / sizeof(uint64_t))
75 typedef struct sha3_context_ {
78 * The portion of the input message that we
82 uint64_t s[SHA3_KECCAK_SPONGE_WORDS];
84 uint8_t sb[SHA3_KECCAK_SPONGE_WORDS * 8];
85 /**total 200 ctx size**/
87 unsigned int byteIndex;
89 * 0..7--the next byte after the set one
90 * (starts from 0; 0--none are buffered)
92 unsigned int wordIndex;
94 * 0..24--the next word to integrate input
97 unsigned int capacityWords;
99 * the double size of the hash output in
100 * words (e.g. 16 for Keccak 512)
105 #define SHA3_ROTL64(x, y) \
106 (((x) << (y)) | ((x) >> ((sizeof(uint64_t)*8) - (y))))
109 static const uint64_t keccakf_rndc[24] = {
110 SHA3_CONST(0x0000000000000001UL), SHA3_CONST(0x0000000000008082UL),
111 SHA3_CONST(0x800000000000808aUL), SHA3_CONST(0x8000000080008000UL),
112 SHA3_CONST(0x000000000000808bUL), SHA3_CONST(0x0000000080000001UL),
113 SHA3_CONST(0x8000000080008081UL), SHA3_CONST(0x8000000000008009UL),
114 SHA3_CONST(0x000000000000008aUL), SHA3_CONST(0x0000000000000088UL),
115 SHA3_CONST(0x0000000080008009UL), SHA3_CONST(0x000000008000000aUL),
116 SHA3_CONST(0x000000008000808bUL), SHA3_CONST(0x800000000000008bUL),
117 SHA3_CONST(0x8000000000008089UL), SHA3_CONST(0x8000000000008003UL),
118 SHA3_CONST(0x8000000000008002UL), SHA3_CONST(0x8000000000000080UL),
119 SHA3_CONST(0x000000000000800aUL), SHA3_CONST(0x800000008000000aUL),
120 SHA3_CONST(0x8000000080008081UL), SHA3_CONST(0x8000000000008080UL),
121 SHA3_CONST(0x0000000080000001UL), SHA3_CONST(0x8000000080008008UL)
124 static const unsigned int keccakf_rotc[24] = {
125 1, 3, 6, 10, 15, 21, 28, 36, 45, 55, 2, 14, 27, 41, 56, 8, 25, 43, 62,
129 static const unsigned int keccakf_piln[24] = {
130 10, 7, 11, 17, 18, 3, 5, 16, 8, 21, 24, 4, 15, 23, 19, 13, 12, 2, 20,
134 static enum ccp_cmd_order
135 ccp_get_cmd_id(const struct rte_crypto_sym_xform *xform)
137 enum ccp_cmd_order res = CCP_CMD_NOT_SUPPORTED;
141 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH) {
142 if (xform->next == NULL)
144 else if (xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
145 return CCP_CMD_HASH_CIPHER;
147 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER) {
148 if (xform->next == NULL)
149 return CCP_CMD_CIPHER;
150 else if (xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
151 return CCP_CMD_CIPHER_HASH;
153 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD)
154 return CCP_CMD_COMBINED;
158 /* partial hash using openssl */
159 static int partial_hash_sha1(uint8_t *data_in, uint8_t *data_out)
163 if (!SHA1_Init(&ctx))
165 SHA1_Transform(&ctx, data_in);
166 rte_memcpy(data_out, &ctx, SHA_DIGEST_LENGTH);
170 static int partial_hash_sha224(uint8_t *data_in, uint8_t *data_out)
174 if (!SHA224_Init(&ctx))
176 SHA256_Transform(&ctx, data_in);
177 rte_memcpy(data_out, &ctx,
178 SHA256_DIGEST_LENGTH);
182 static int partial_hash_sha256(uint8_t *data_in, uint8_t *data_out)
186 if (!SHA256_Init(&ctx))
188 SHA256_Transform(&ctx, data_in);
189 rte_memcpy(data_out, &ctx,
190 SHA256_DIGEST_LENGTH);
194 static int partial_hash_sha384(uint8_t *data_in, uint8_t *data_out)
198 if (!SHA384_Init(&ctx))
200 SHA512_Transform(&ctx, data_in);
201 rte_memcpy(data_out, &ctx,
202 SHA512_DIGEST_LENGTH);
206 static int partial_hash_sha512(uint8_t *data_in, uint8_t *data_out)
210 if (!SHA512_Init(&ctx))
212 SHA512_Transform(&ctx, data_in);
213 rte_memcpy(data_out, &ctx,
214 SHA512_DIGEST_LENGTH);
219 keccakf(uint64_t s[25])
223 #define KECCAK_ROUNDS 24
225 for (round = 0; round < KECCAK_ROUNDS; round++) {
228 for (i = 0; i < 5; i++)
229 bc[i] = s[i] ^ s[i + 5] ^ s[i + 10] ^ s[i + 15] ^
232 for (i = 0; i < 5; i++) {
233 t = bc[(i + 4) % 5] ^ SHA3_ROTL64(bc[(i + 1) % 5], 1);
234 for (j = 0; j < 25; j += 5)
240 for (i = 0; i < 24; i++) {
243 s[j] = SHA3_ROTL64(t, keccakf_rotc[i]);
248 for (j = 0; j < 25; j += 5) {
249 for (i = 0; i < 5; i++)
251 for (i = 0; i < 5; i++)
252 s[j + i] ^= (~bc[(i + 1) % 5]) &
257 s[0] ^= keccakf_rndc[round];
262 sha3_Init224(void *priv)
264 sha3_context *ctx = (sha3_context *) priv;
266 memset(ctx, 0, sizeof(*ctx));
267 ctx->capacityWords = 2 * 224 / (8 * sizeof(uint64_t));
271 sha3_Init256(void *priv)
273 sha3_context *ctx = (sha3_context *) priv;
275 memset(ctx, 0, sizeof(*ctx));
276 ctx->capacityWords = 2 * 256 / (8 * sizeof(uint64_t));
280 sha3_Init384(void *priv)
282 sha3_context *ctx = (sha3_context *) priv;
284 memset(ctx, 0, sizeof(*ctx));
285 ctx->capacityWords = 2 * 384 / (8 * sizeof(uint64_t));
289 sha3_Init512(void *priv)
291 sha3_context *ctx = (sha3_context *) priv;
293 memset(ctx, 0, sizeof(*ctx));
294 ctx->capacityWords = 2 * 512 / (8 * sizeof(uint64_t));
298 /* This is simply the 'update' with the padding block.
299 * The padding block is 0x01 || 0x00* || 0x80. First 0x01 and last 0x80
300 * bytes are always present, but they can be the same byte.
303 sha3_Update(void *priv, void const *bufIn, size_t len)
305 sha3_context *ctx = (sha3_context *) priv;
306 unsigned int old_tail = (8 - ctx->byteIndex) & 7;
310 const uint8_t *buf = bufIn;
312 if (len < old_tail) {
314 ctx->saved |= (uint64_t) (*(buf++)) <<
315 ((ctx->byteIndex++) * 8);
322 ctx->saved |= (uint64_t) (*(buf++)) <<
323 ((ctx->byteIndex++) * 8);
325 ctx->s[ctx->wordIndex] ^= ctx->saved;
328 if (++ctx->wordIndex ==
329 (SHA3_KECCAK_SPONGE_WORDS - ctx->capacityWords)) {
335 words = len / sizeof(uint64_t);
336 tail = len - words * sizeof(uint64_t);
338 for (i = 0; i < words; i++, buf += sizeof(uint64_t)) {
339 const uint64_t t = (uint64_t) (buf[0]) |
340 ((uint64_t) (buf[1]) << 8 * 1) |
341 ((uint64_t) (buf[2]) << 8 * 2) |
342 ((uint64_t) (buf[3]) << 8 * 3) |
343 ((uint64_t) (buf[4]) << 8 * 4) |
344 ((uint64_t) (buf[5]) << 8 * 5) |
345 ((uint64_t) (buf[6]) << 8 * 6) |
346 ((uint64_t) (buf[7]) << 8 * 7);
347 ctx->s[ctx->wordIndex] ^= t;
348 if (++ctx->wordIndex ==
349 (SHA3_KECCAK_SPONGE_WORDS - ctx->capacityWords)) {
356 ctx->saved |= (uint64_t) (*(buf++)) << ((ctx->byteIndex++) * 8);
359 int partial_hash_sha3_224(uint8_t *data_in, uint8_t *data_out)
364 ctx = rte_zmalloc("sha3-ctx", sizeof(sha3_context), 0);
366 CCP_LOG_ERR("sha3-ctx creation failed");
370 sha3_Update(ctx, data_in, SHA3_224_BLOCK_SIZE);
371 for (i = 0; i < CCP_SHA3_CTX_SIZE; i++, data_out++)
372 *data_out = ctx->sb[CCP_SHA3_CTX_SIZE - i - 1];
378 int partial_hash_sha3_256(uint8_t *data_in, uint8_t *data_out)
383 ctx = rte_zmalloc("sha3-ctx", sizeof(sha3_context), 0);
385 CCP_LOG_ERR("sha3-ctx creation failed");
389 sha3_Update(ctx, data_in, SHA3_256_BLOCK_SIZE);
390 for (i = 0; i < CCP_SHA3_CTX_SIZE; i++, data_out++)
391 *data_out = ctx->sb[CCP_SHA3_CTX_SIZE - i - 1];
397 int partial_hash_sha3_384(uint8_t *data_in, uint8_t *data_out)
402 ctx = rte_zmalloc("sha3-ctx", sizeof(sha3_context), 0);
404 CCP_LOG_ERR("sha3-ctx creation failed");
408 sha3_Update(ctx, data_in, SHA3_384_BLOCK_SIZE);
409 for (i = 0; i < CCP_SHA3_CTX_SIZE; i++, data_out++)
410 *data_out = ctx->sb[CCP_SHA3_CTX_SIZE - i - 1];
416 int partial_hash_sha3_512(uint8_t *data_in, uint8_t *data_out)
421 ctx = rte_zmalloc("sha3-ctx", sizeof(sha3_context), 0);
423 CCP_LOG_ERR("sha3-ctx creation failed");
427 sha3_Update(ctx, data_in, SHA3_512_BLOCK_SIZE);
428 for (i = 0; i < CCP_SHA3_CTX_SIZE; i++, data_out++)
429 *data_out = ctx->sb[CCP_SHA3_CTX_SIZE - i - 1];
435 static int generate_partial_hash(struct ccp_session *sess)
438 uint8_t ipad[sess->auth.block_size];
439 uint8_t opad[sess->auth.block_size];
440 uint8_t *ipad_t, *opad_t;
441 uint32_t *hash_value_be32, hash_temp32[8];
442 uint64_t *hash_value_be64, hash_temp64[8];
444 uint8_t *hash_value_sha3;
446 opad_t = ipad_t = (uint8_t *)sess->auth.key;
448 hash_value_be32 = (uint32_t *)((uint8_t *)sess->auth.pre_compute);
449 hash_value_be64 = (uint64_t *)((uint8_t *)sess->auth.pre_compute);
451 /* considering key size is always equal to block size of algorithm */
452 for (i = 0; i < sess->auth.block_size; i++) {
453 ipad[i] = (ipad_t[i] ^ HMAC_IPAD_VALUE);
454 opad[i] = (opad_t[i] ^ HMAC_OPAD_VALUE);
457 switch (sess->auth.algo) {
458 case CCP_AUTH_ALGO_SHA1_HMAC:
459 count = SHA1_DIGEST_SIZE >> 2;
461 if (partial_hash_sha1(ipad, (uint8_t *)hash_temp32))
463 for (i = 0; i < count; i++, hash_value_be32++)
464 *hash_value_be32 = hash_temp32[count - 1 - i];
466 hash_value_be32 = (uint32_t *)((uint8_t *)sess->auth.pre_compute
467 + sess->auth.ctx_len);
468 if (partial_hash_sha1(opad, (uint8_t *)hash_temp32))
470 for (i = 0; i < count; i++, hash_value_be32++)
471 *hash_value_be32 = hash_temp32[count - 1 - i];
473 case CCP_AUTH_ALGO_SHA224_HMAC:
474 count = SHA256_DIGEST_SIZE >> 2;
476 if (partial_hash_sha224(ipad, (uint8_t *)hash_temp32))
478 for (i = 0; i < count; i++, hash_value_be32++)
479 *hash_value_be32 = hash_temp32[count - 1 - i];
481 hash_value_be32 = (uint32_t *)((uint8_t *)sess->auth.pre_compute
482 + sess->auth.ctx_len);
483 if (partial_hash_sha224(opad, (uint8_t *)hash_temp32))
485 for (i = 0; i < count; i++, hash_value_be32++)
486 *hash_value_be32 = hash_temp32[count - 1 - i];
488 case CCP_AUTH_ALGO_SHA3_224_HMAC:
489 hash_value_sha3 = sess->auth.pre_compute;
490 if (partial_hash_sha3_224(ipad, hash_value_sha3))
493 hash_value_sha3 = (uint8_t *)(sess->auth.pre_compute
494 + sess->auth.ctx_len);
495 if (partial_hash_sha3_224(opad, hash_value_sha3))
498 case CCP_AUTH_ALGO_SHA256_HMAC:
499 count = SHA256_DIGEST_SIZE >> 2;
501 if (partial_hash_sha256(ipad, (uint8_t *)hash_temp32))
503 for (i = 0; i < count; i++, hash_value_be32++)
504 *hash_value_be32 = hash_temp32[count - 1 - i];
506 hash_value_be32 = (uint32_t *)((uint8_t *)sess->auth.pre_compute
507 + sess->auth.ctx_len);
508 if (partial_hash_sha256(opad, (uint8_t *)hash_temp32))
510 for (i = 0; i < count; i++, hash_value_be32++)
511 *hash_value_be32 = hash_temp32[count - 1 - i];
513 case CCP_AUTH_ALGO_SHA3_256_HMAC:
514 hash_value_sha3 = sess->auth.pre_compute;
515 if (partial_hash_sha3_256(ipad, hash_value_sha3))
518 hash_value_sha3 = (uint8_t *)(sess->auth.pre_compute
519 + sess->auth.ctx_len);
520 if (partial_hash_sha3_256(opad, hash_value_sha3))
523 case CCP_AUTH_ALGO_SHA384_HMAC:
524 count = SHA512_DIGEST_SIZE >> 3;
526 if (partial_hash_sha384(ipad, (uint8_t *)hash_temp64))
528 for (i = 0; i < count; i++, hash_value_be64++)
529 *hash_value_be64 = hash_temp64[count - 1 - i];
531 hash_value_be64 = (uint64_t *)((uint8_t *)sess->auth.pre_compute
532 + sess->auth.ctx_len);
533 if (partial_hash_sha384(opad, (uint8_t *)hash_temp64))
535 for (i = 0; i < count; i++, hash_value_be64++)
536 *hash_value_be64 = hash_temp64[count - 1 - i];
538 case CCP_AUTH_ALGO_SHA3_384_HMAC:
539 hash_value_sha3 = sess->auth.pre_compute;
540 if (partial_hash_sha3_384(ipad, hash_value_sha3))
543 hash_value_sha3 = (uint8_t *)(sess->auth.pre_compute
544 + sess->auth.ctx_len);
545 if (partial_hash_sha3_384(opad, hash_value_sha3))
548 case CCP_AUTH_ALGO_SHA512_HMAC:
549 count = SHA512_DIGEST_SIZE >> 3;
551 if (partial_hash_sha512(ipad, (uint8_t *)hash_temp64))
553 for (i = 0; i < count; i++, hash_value_be64++)
554 *hash_value_be64 = hash_temp64[count - 1 - i];
556 hash_value_be64 = (uint64_t *)((uint8_t *)sess->auth.pre_compute
557 + sess->auth.ctx_len);
558 if (partial_hash_sha512(opad, (uint8_t *)hash_temp64))
560 for (i = 0; i < count; i++, hash_value_be64++)
561 *hash_value_be64 = hash_temp64[count - 1 - i];
563 case CCP_AUTH_ALGO_SHA3_512_HMAC:
564 hash_value_sha3 = sess->auth.pre_compute;
565 if (partial_hash_sha3_512(ipad, hash_value_sha3))
568 hash_value_sha3 = (uint8_t *)(sess->auth.pre_compute
569 + sess->auth.ctx_len);
570 if (partial_hash_sha3_512(opad, hash_value_sha3))
574 CCP_LOG_ERR("Invalid auth algo");
579 /* prepare temporary keys K1 and K2 */
580 static void prepare_key(unsigned char *k, unsigned char *l, int bl)
583 /* Shift block to left, including carry */
584 for (i = 0; i < bl; i++) {
586 if (i < bl - 1 && l[i + 1] & 0x80)
589 /* If MSB set fixup with R */
591 k[bl - 1] ^= bl == 16 ? 0x87 : 0x1b;
594 /* subkeys K1 and K2 generation for CMAC */
596 generate_cmac_subkeys(struct ccp_session *sess)
598 const EVP_CIPHER *algo;
600 unsigned char *ccp_ctx;
603 unsigned char zero_iv[AES_BLOCK_SIZE] = {0};
604 unsigned char dst[2 * AES_BLOCK_SIZE] = {0};
605 unsigned char k1[AES_BLOCK_SIZE] = {0};
606 unsigned char k2[AES_BLOCK_SIZE] = {0};
608 if (sess->auth.ut.aes_type == CCP_AES_TYPE_128)
609 algo = EVP_aes_128_cbc();
610 else if (sess->auth.ut.aes_type == CCP_AES_TYPE_192)
611 algo = EVP_aes_192_cbc();
612 else if (sess->auth.ut.aes_type == CCP_AES_TYPE_256)
613 algo = EVP_aes_256_cbc();
615 CCP_LOG_ERR("Invalid CMAC type length");
619 ctx = EVP_CIPHER_CTX_new();
621 CCP_LOG_ERR("ctx creation failed");
624 if (EVP_EncryptInit(ctx, algo, (unsigned char *)sess->auth.key,
625 (unsigned char *)zero_iv) <= 0)
626 goto key_generate_err;
627 if (EVP_CIPHER_CTX_set_padding(ctx, 0) <= 0)
628 goto key_generate_err;
629 if (EVP_EncryptUpdate(ctx, dst, &dstlen, zero_iv,
630 AES_BLOCK_SIZE) <= 0)
631 goto key_generate_err;
632 if (EVP_EncryptFinal_ex(ctx, dst + dstlen, &totlen) <= 0)
633 goto key_generate_err;
635 memset(sess->auth.pre_compute, 0, CCP_SB_BYTES * 2);
637 ccp_ctx = (unsigned char *)(sess->auth.pre_compute + CCP_SB_BYTES - 1);
638 prepare_key(k1, dst, AES_BLOCK_SIZE);
639 for (i = 0; i < AES_BLOCK_SIZE; i++, ccp_ctx--)
642 ccp_ctx = (unsigned char *)(sess->auth.pre_compute +
643 (2 * CCP_SB_BYTES) - 1);
644 prepare_key(k2, k1, AES_BLOCK_SIZE);
645 for (i = 0; i < AES_BLOCK_SIZE; i++, ccp_ctx--)
648 EVP_CIPHER_CTX_free(ctx);
653 CCP_LOG_ERR("CMAC Init failed");
657 /* configure session */
659 ccp_configure_session_cipher(struct ccp_session *sess,
660 const struct rte_crypto_sym_xform *xform)
662 const struct rte_crypto_cipher_xform *cipher_xform = NULL;
665 cipher_xform = &xform->cipher;
667 /* set cipher direction */
668 if (cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
669 sess->cipher.dir = CCP_CIPHER_DIR_ENCRYPT;
671 sess->cipher.dir = CCP_CIPHER_DIR_DECRYPT;
674 sess->cipher.key_length = cipher_xform->key.length;
675 rte_memcpy(sess->cipher.key, cipher_xform->key.data,
676 cipher_xform->key.length);
678 /* set iv parameters */
679 sess->iv.offset = cipher_xform->iv.offset;
680 sess->iv.length = cipher_xform->iv.length;
682 switch (cipher_xform->algo) {
683 case RTE_CRYPTO_CIPHER_AES_CTR:
684 sess->cipher.algo = CCP_CIPHER_ALGO_AES_CTR;
685 sess->cipher.um.aes_mode = CCP_AES_MODE_CTR;
686 sess->cipher.engine = CCP_ENGINE_AES;
688 case RTE_CRYPTO_CIPHER_AES_ECB:
689 sess->cipher.algo = CCP_CIPHER_ALGO_AES_CBC;
690 sess->cipher.um.aes_mode = CCP_AES_MODE_ECB;
691 sess->cipher.engine = CCP_ENGINE_AES;
693 case RTE_CRYPTO_CIPHER_AES_CBC:
694 sess->cipher.algo = CCP_CIPHER_ALGO_AES_CBC;
695 sess->cipher.um.aes_mode = CCP_AES_MODE_CBC;
696 sess->cipher.engine = CCP_ENGINE_AES;
698 case RTE_CRYPTO_CIPHER_3DES_CBC:
699 sess->cipher.algo = CCP_CIPHER_ALGO_3DES_CBC;
700 sess->cipher.um.des_mode = CCP_DES_MODE_CBC;
701 sess->cipher.engine = CCP_ENGINE_3DES;
704 CCP_LOG_ERR("Unsupported cipher algo");
709 switch (sess->cipher.engine) {
711 if (sess->cipher.key_length == 16)
712 sess->cipher.ut.aes_type = CCP_AES_TYPE_128;
713 else if (sess->cipher.key_length == 24)
714 sess->cipher.ut.aes_type = CCP_AES_TYPE_192;
715 else if (sess->cipher.key_length == 32)
716 sess->cipher.ut.aes_type = CCP_AES_TYPE_256;
718 CCP_LOG_ERR("Invalid cipher key length");
721 for (i = 0; i < sess->cipher.key_length ; i++)
722 sess->cipher.key_ccp[sess->cipher.key_length - i - 1] =
725 case CCP_ENGINE_3DES:
726 if (sess->cipher.key_length == 16)
727 sess->cipher.ut.des_type = CCP_DES_TYPE_128;
728 else if (sess->cipher.key_length == 24)
729 sess->cipher.ut.des_type = CCP_DES_TYPE_192;
731 CCP_LOG_ERR("Invalid cipher key length");
734 for (j = 0, x = 0; j < sess->cipher.key_length/8; j++, x += 8)
735 for (i = 0; i < 8; i++)
736 sess->cipher.key_ccp[(8 + x) - i - 1] =
737 sess->cipher.key[i + x];
740 CCP_LOG_ERR("Invalid CCP Engine");
743 sess->cipher.nonce_phys = rte_mem_virt2phy(sess->cipher.nonce);
744 sess->cipher.key_phys = rte_mem_virt2phy(sess->cipher.key_ccp);
749 ccp_configure_session_auth(struct ccp_session *sess,
750 const struct rte_crypto_sym_xform *xform)
752 const struct rte_crypto_auth_xform *auth_xform = NULL;
755 auth_xform = &xform->auth;
757 sess->auth.digest_length = auth_xform->digest_length;
758 if (auth_xform->op == RTE_CRYPTO_AUTH_OP_GENERATE)
759 sess->auth.op = CCP_AUTH_OP_GENERATE;
761 sess->auth.op = CCP_AUTH_OP_VERIFY;
762 switch (auth_xform->algo) {
763 case RTE_CRYPTO_AUTH_SHA1:
764 sess->auth.engine = CCP_ENGINE_SHA;
765 sess->auth.algo = CCP_AUTH_ALGO_SHA1;
766 sess->auth.ut.sha_type = CCP_SHA_TYPE_1;
767 sess->auth.ctx = (void *)ccp_sha1_init;
768 sess->auth.ctx_len = CCP_SB_BYTES;
769 sess->auth.offset = CCP_SB_BYTES - SHA1_DIGEST_SIZE;
771 case RTE_CRYPTO_AUTH_SHA1_HMAC:
772 if (auth_xform->key.length > SHA1_BLOCK_SIZE)
774 sess->auth.engine = CCP_ENGINE_SHA;
775 sess->auth.algo = CCP_AUTH_ALGO_SHA1_HMAC;
776 sess->auth.ut.sha_type = CCP_SHA_TYPE_1;
777 sess->auth.ctx_len = CCP_SB_BYTES;
778 sess->auth.offset = CCP_SB_BYTES - SHA1_DIGEST_SIZE;
779 sess->auth.block_size = SHA1_BLOCK_SIZE;
780 sess->auth.key_length = auth_xform->key.length;
781 memset(sess->auth.key, 0, sess->auth.block_size);
782 memset(sess->auth.pre_compute, 0, sess->auth.ctx_len << 1);
783 rte_memcpy(sess->auth.key, auth_xform->key.data,
784 auth_xform->key.length);
785 if (generate_partial_hash(sess))
788 case RTE_CRYPTO_AUTH_SHA224:
789 sess->auth.algo = CCP_AUTH_ALGO_SHA224;
790 sess->auth.engine = CCP_ENGINE_SHA;
791 sess->auth.ut.sha_type = CCP_SHA_TYPE_224;
792 sess->auth.ctx = (void *)ccp_sha224_init;
793 sess->auth.ctx_len = CCP_SB_BYTES;
794 sess->auth.offset = CCP_SB_BYTES - SHA224_DIGEST_SIZE;
796 case RTE_CRYPTO_AUTH_SHA224_HMAC:
797 if (auth_xform->key.length > SHA224_BLOCK_SIZE)
799 sess->auth.algo = CCP_AUTH_ALGO_SHA224_HMAC;
800 sess->auth.engine = CCP_ENGINE_SHA;
801 sess->auth.ut.sha_type = CCP_SHA_TYPE_224;
802 sess->auth.ctx_len = CCP_SB_BYTES;
803 sess->auth.offset = CCP_SB_BYTES - SHA224_DIGEST_SIZE;
804 sess->auth.block_size = SHA224_BLOCK_SIZE;
805 sess->auth.key_length = auth_xform->key.length;
806 memset(sess->auth.key, 0, sess->auth.block_size);
807 memset(sess->auth.pre_compute, 0, sess->auth.ctx_len << 1);
808 rte_memcpy(sess->auth.key, auth_xform->key.data,
809 auth_xform->key.length);
810 if (generate_partial_hash(sess))
813 case RTE_CRYPTO_AUTH_SHA3_224:
814 sess->auth.algo = CCP_AUTH_ALGO_SHA3_224;
815 sess->auth.engine = CCP_ENGINE_SHA;
816 sess->auth.ut.sha_type = CCP_SHA3_TYPE_224;
817 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
818 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA224_DIGEST_SIZE;
820 case RTE_CRYPTO_AUTH_SHA3_224_HMAC:
821 if (auth_xform->key.length > SHA3_224_BLOCK_SIZE)
823 sess->auth.algo = CCP_AUTH_ALGO_SHA3_224_HMAC;
824 sess->auth.engine = CCP_ENGINE_SHA;
825 sess->auth.ut.sha_type = CCP_SHA3_TYPE_224;
826 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
827 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA224_DIGEST_SIZE;
828 sess->auth.block_size = SHA3_224_BLOCK_SIZE;
829 sess->auth.key_length = auth_xform->key.length;
830 memset(sess->auth.key, 0, sess->auth.block_size);
831 memset(sess->auth.pre_compute, 0, 2 * sess->auth.ctx_len);
832 rte_memcpy(sess->auth.key, auth_xform->key.data,
833 auth_xform->key.length);
834 if (generate_partial_hash(sess))
837 case RTE_CRYPTO_AUTH_SHA256:
838 sess->auth.algo = CCP_AUTH_ALGO_SHA256;
839 sess->auth.engine = CCP_ENGINE_SHA;
840 sess->auth.ut.sha_type = CCP_SHA_TYPE_256;
841 sess->auth.ctx = (void *)ccp_sha256_init;
842 sess->auth.ctx_len = CCP_SB_BYTES;
843 sess->auth.offset = CCP_SB_BYTES - SHA256_DIGEST_SIZE;
845 case RTE_CRYPTO_AUTH_SHA256_HMAC:
846 if (auth_xform->key.length > SHA256_BLOCK_SIZE)
848 sess->auth.algo = CCP_AUTH_ALGO_SHA256_HMAC;
849 sess->auth.engine = CCP_ENGINE_SHA;
850 sess->auth.ut.sha_type = CCP_SHA_TYPE_256;
851 sess->auth.ctx_len = CCP_SB_BYTES;
852 sess->auth.offset = CCP_SB_BYTES - SHA256_DIGEST_SIZE;
853 sess->auth.block_size = SHA256_BLOCK_SIZE;
854 sess->auth.key_length = auth_xform->key.length;
855 memset(sess->auth.key, 0, sess->auth.block_size);
856 memset(sess->auth.pre_compute, 0, sess->auth.ctx_len << 1);
857 rte_memcpy(sess->auth.key, auth_xform->key.data,
858 auth_xform->key.length);
859 if (generate_partial_hash(sess))
862 case RTE_CRYPTO_AUTH_SHA3_256:
863 sess->auth.algo = CCP_AUTH_ALGO_SHA3_256;
864 sess->auth.engine = CCP_ENGINE_SHA;
865 sess->auth.ut.sha_type = CCP_SHA3_TYPE_256;
866 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
867 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA256_DIGEST_SIZE;
869 case RTE_CRYPTO_AUTH_SHA3_256_HMAC:
870 if (auth_xform->key.length > SHA3_256_BLOCK_SIZE)
872 sess->auth.algo = CCP_AUTH_ALGO_SHA3_256_HMAC;
873 sess->auth.engine = CCP_ENGINE_SHA;
874 sess->auth.ut.sha_type = CCP_SHA3_TYPE_256;
875 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
876 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA256_DIGEST_SIZE;
877 sess->auth.block_size = SHA3_256_BLOCK_SIZE;
878 sess->auth.key_length = auth_xform->key.length;
879 memset(sess->auth.key, 0, sess->auth.block_size);
880 memset(sess->auth.pre_compute, 0, 2 * sess->auth.ctx_len);
881 rte_memcpy(sess->auth.key, auth_xform->key.data,
882 auth_xform->key.length);
883 if (generate_partial_hash(sess))
886 case RTE_CRYPTO_AUTH_SHA384:
887 sess->auth.algo = CCP_AUTH_ALGO_SHA384;
888 sess->auth.engine = CCP_ENGINE_SHA;
889 sess->auth.ut.sha_type = CCP_SHA_TYPE_384;
890 sess->auth.ctx = (void *)ccp_sha384_init;
891 sess->auth.ctx_len = CCP_SB_BYTES << 1;
892 sess->auth.offset = (CCP_SB_BYTES << 1) - SHA384_DIGEST_SIZE;
894 case RTE_CRYPTO_AUTH_SHA384_HMAC:
895 if (auth_xform->key.length > SHA384_BLOCK_SIZE)
897 sess->auth.algo = CCP_AUTH_ALGO_SHA384_HMAC;
898 sess->auth.engine = CCP_ENGINE_SHA;
899 sess->auth.ut.sha_type = CCP_SHA_TYPE_384;
900 sess->auth.ctx_len = CCP_SB_BYTES << 1;
901 sess->auth.offset = (CCP_SB_BYTES << 1) - SHA384_DIGEST_SIZE;
902 sess->auth.block_size = SHA384_BLOCK_SIZE;
903 sess->auth.key_length = auth_xform->key.length;
904 memset(sess->auth.key, 0, sess->auth.block_size);
905 memset(sess->auth.pre_compute, 0, sess->auth.ctx_len << 1);
906 rte_memcpy(sess->auth.key, auth_xform->key.data,
907 auth_xform->key.length);
908 if (generate_partial_hash(sess))
911 case RTE_CRYPTO_AUTH_SHA3_384:
912 sess->auth.algo = CCP_AUTH_ALGO_SHA3_384;
913 sess->auth.engine = CCP_ENGINE_SHA;
914 sess->auth.ut.sha_type = CCP_SHA3_TYPE_384;
915 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
916 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA384_DIGEST_SIZE;
918 case RTE_CRYPTO_AUTH_SHA3_384_HMAC:
919 if (auth_xform->key.length > SHA3_384_BLOCK_SIZE)
921 sess->auth.algo = CCP_AUTH_ALGO_SHA3_384_HMAC;
922 sess->auth.engine = CCP_ENGINE_SHA;
923 sess->auth.ut.sha_type = CCP_SHA3_TYPE_384;
924 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
925 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA384_DIGEST_SIZE;
926 sess->auth.block_size = SHA3_384_BLOCK_SIZE;
927 sess->auth.key_length = auth_xform->key.length;
928 memset(sess->auth.key, 0, sess->auth.block_size);
929 memset(sess->auth.pre_compute, 0, 2 * sess->auth.ctx_len);
930 rte_memcpy(sess->auth.key, auth_xform->key.data,
931 auth_xform->key.length);
932 if (generate_partial_hash(sess))
935 case RTE_CRYPTO_AUTH_SHA512:
936 sess->auth.algo = CCP_AUTH_ALGO_SHA512;
937 sess->auth.engine = CCP_ENGINE_SHA;
938 sess->auth.ut.sha_type = CCP_SHA_TYPE_512;
939 sess->auth.ctx = (void *)ccp_sha512_init;
940 sess->auth.ctx_len = CCP_SB_BYTES << 1;
941 sess->auth.offset = (CCP_SB_BYTES << 1) - SHA512_DIGEST_SIZE;
943 case RTE_CRYPTO_AUTH_SHA512_HMAC:
944 if (auth_xform->key.length > SHA512_BLOCK_SIZE)
946 sess->auth.algo = CCP_AUTH_ALGO_SHA512_HMAC;
947 sess->auth.engine = CCP_ENGINE_SHA;
948 sess->auth.ut.sha_type = CCP_SHA_TYPE_512;
949 sess->auth.ctx_len = CCP_SB_BYTES << 1;
950 sess->auth.offset = (CCP_SB_BYTES << 1) - SHA512_DIGEST_SIZE;
951 sess->auth.block_size = SHA512_BLOCK_SIZE;
952 sess->auth.key_length = auth_xform->key.length;
953 memset(sess->auth.key, 0, sess->auth.block_size);
954 memset(sess->auth.pre_compute, 0, sess->auth.ctx_len << 1);
955 rte_memcpy(sess->auth.key, auth_xform->key.data,
956 auth_xform->key.length);
957 if (generate_partial_hash(sess))
960 case RTE_CRYPTO_AUTH_SHA3_512:
961 sess->auth.algo = CCP_AUTH_ALGO_SHA3_512;
962 sess->auth.engine = CCP_ENGINE_SHA;
963 sess->auth.ut.sha_type = CCP_SHA3_TYPE_512;
964 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
965 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA512_DIGEST_SIZE;
967 case RTE_CRYPTO_AUTH_SHA3_512_HMAC:
968 if (auth_xform->key.length > SHA3_512_BLOCK_SIZE)
970 sess->auth.algo = CCP_AUTH_ALGO_SHA3_512_HMAC;
971 sess->auth.engine = CCP_ENGINE_SHA;
972 sess->auth.ut.sha_type = CCP_SHA3_TYPE_512;
973 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
974 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA512_DIGEST_SIZE;
975 sess->auth.block_size = SHA3_512_BLOCK_SIZE;
976 sess->auth.key_length = auth_xform->key.length;
977 memset(sess->auth.key, 0, sess->auth.block_size);
978 memset(sess->auth.pre_compute, 0, 2 * sess->auth.ctx_len);
979 rte_memcpy(sess->auth.key, auth_xform->key.data,
980 auth_xform->key.length);
981 if (generate_partial_hash(sess))
984 case RTE_CRYPTO_AUTH_AES_CMAC:
985 sess->auth.algo = CCP_AUTH_ALGO_AES_CMAC;
986 sess->auth.engine = CCP_ENGINE_AES;
987 sess->auth.um.aes_mode = CCP_AES_MODE_CMAC;
988 sess->auth.key_length = auth_xform->key.length;
989 /**<padding and hash result*/
990 sess->auth.ctx_len = CCP_SB_BYTES << 1;
991 sess->auth.offset = AES_BLOCK_SIZE;
992 sess->auth.block_size = AES_BLOCK_SIZE;
993 if (sess->auth.key_length == 16)
994 sess->auth.ut.aes_type = CCP_AES_TYPE_128;
995 else if (sess->auth.key_length == 24)
996 sess->auth.ut.aes_type = CCP_AES_TYPE_192;
997 else if (sess->auth.key_length == 32)
998 sess->auth.ut.aes_type = CCP_AES_TYPE_256;
1000 CCP_LOG_ERR("Invalid CMAC key length");
1003 rte_memcpy(sess->auth.key, auth_xform->key.data,
1004 sess->auth.key_length);
1005 for (i = 0; i < sess->auth.key_length; i++)
1006 sess->auth.key_ccp[sess->auth.key_length - i - 1] =
1008 if (generate_cmac_subkeys(sess))
1012 CCP_LOG_ERR("Unsupported hash algo");
1019 ccp_configure_session_aead(struct ccp_session *sess,
1020 const struct rte_crypto_sym_xform *xform)
1022 const struct rte_crypto_aead_xform *aead_xform = NULL;
1025 aead_xform = &xform->aead;
1027 sess->cipher.key_length = aead_xform->key.length;
1028 rte_memcpy(sess->cipher.key, aead_xform->key.data,
1029 aead_xform->key.length);
1031 if (aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) {
1032 sess->cipher.dir = CCP_CIPHER_DIR_ENCRYPT;
1033 sess->auth.op = CCP_AUTH_OP_GENERATE;
1035 sess->cipher.dir = CCP_CIPHER_DIR_DECRYPT;
1036 sess->auth.op = CCP_AUTH_OP_VERIFY;
1038 sess->aead_algo = aead_xform->algo;
1039 sess->auth.aad_length = aead_xform->aad_length;
1040 sess->auth.digest_length = aead_xform->digest_length;
1042 /* set iv parameters */
1043 sess->iv.offset = aead_xform->iv.offset;
1044 sess->iv.length = aead_xform->iv.length;
1046 switch (aead_xform->algo) {
1047 case RTE_CRYPTO_AEAD_AES_GCM:
1048 sess->cipher.algo = CCP_CIPHER_ALGO_AES_GCM;
1049 sess->cipher.um.aes_mode = CCP_AES_MODE_GCTR;
1050 sess->cipher.engine = CCP_ENGINE_AES;
1051 if (sess->cipher.key_length == 16)
1052 sess->cipher.ut.aes_type = CCP_AES_TYPE_128;
1053 else if (sess->cipher.key_length == 24)
1054 sess->cipher.ut.aes_type = CCP_AES_TYPE_192;
1055 else if (sess->cipher.key_length == 32)
1056 sess->cipher.ut.aes_type = CCP_AES_TYPE_256;
1058 CCP_LOG_ERR("Invalid aead key length");
1061 for (i = 0; i < sess->cipher.key_length; i++)
1062 sess->cipher.key_ccp[sess->cipher.key_length - i - 1] =
1063 sess->cipher.key[i];
1064 sess->auth.algo = CCP_AUTH_ALGO_AES_GCM;
1065 sess->auth.engine = CCP_ENGINE_AES;
1066 sess->auth.um.aes_mode = CCP_AES_MODE_GHASH;
1067 sess->auth.ctx_len = CCP_SB_BYTES;
1068 sess->auth.offset = 0;
1069 sess->auth.block_size = AES_BLOCK_SIZE;
1070 sess->cmd_id = CCP_CMD_COMBINED;
1073 CCP_LOG_ERR("Unsupported aead algo");
1076 sess->cipher.nonce_phys = rte_mem_virt2phy(sess->cipher.nonce);
1077 sess->cipher.key_phys = rte_mem_virt2phy(sess->cipher.key_ccp);
1082 ccp_set_session_parameters(struct ccp_session *sess,
1083 const struct rte_crypto_sym_xform *xform)
1085 const struct rte_crypto_sym_xform *cipher_xform = NULL;
1086 const struct rte_crypto_sym_xform *auth_xform = NULL;
1087 const struct rte_crypto_sym_xform *aead_xform = NULL;
1090 sess->cmd_id = ccp_get_cmd_id(xform);
1092 switch (sess->cmd_id) {
1093 case CCP_CMD_CIPHER:
1094 cipher_xform = xform;
1099 case CCP_CMD_CIPHER_HASH:
1100 cipher_xform = xform;
1101 auth_xform = xform->next;
1103 case CCP_CMD_HASH_CIPHER:
1105 cipher_xform = xform->next;
1107 case CCP_CMD_COMBINED:
1111 CCP_LOG_ERR("Unsupported cmd_id");
1115 /* Default IV length = 0 */
1116 sess->iv.length = 0;
1118 ret = ccp_configure_session_cipher(sess, cipher_xform);
1120 CCP_LOG_ERR("Invalid/unsupported cipher parameters");
1125 ret = ccp_configure_session_auth(sess, auth_xform);
1127 CCP_LOG_ERR("Invalid/unsupported auth parameters");
1132 ret = ccp_configure_session_aead(sess, aead_xform);
1134 CCP_LOG_ERR("Invalid/unsupported aead parameters");
1141 /* calculate CCP descriptors requirement */
1143 ccp_cipher_slot(struct ccp_session *session)
1147 switch (session->cipher.algo) {
1148 case CCP_CIPHER_ALGO_AES_CBC:
1150 /**< op + passthrough for iv */
1152 case CCP_CIPHER_ALGO_AES_ECB:
1156 case CCP_CIPHER_ALGO_AES_CTR:
1158 /**< op + passthrough for iv */
1160 case CCP_CIPHER_ALGO_3DES_CBC:
1162 /**< op + passthrough for iv */
1165 CCP_LOG_ERR("Unsupported cipher algo %d",
1166 session->cipher.algo);
1172 ccp_auth_slot(struct ccp_session *session)
1176 switch (session->auth.algo) {
1177 case CCP_AUTH_ALGO_SHA1:
1178 case CCP_AUTH_ALGO_SHA224:
1179 case CCP_AUTH_ALGO_SHA256:
1180 case CCP_AUTH_ALGO_SHA384:
1181 case CCP_AUTH_ALGO_SHA512:
1183 /**< op + lsb passthrough cpy to/from*/
1185 case CCP_AUTH_ALGO_SHA1_HMAC:
1186 case CCP_AUTH_ALGO_SHA224_HMAC:
1187 case CCP_AUTH_ALGO_SHA256_HMAC:
1190 case CCP_AUTH_ALGO_SHA384_HMAC:
1191 case CCP_AUTH_ALGO_SHA512_HMAC:
1194 * 1. Load PHash1 = H(k ^ ipad); to LSB
1195 * 2. generate IHash = H(hash on meassage with PHash1
1197 * 3. Retrieve IHash 2 slots for 384/512
1198 * 4. Load Phash2 = H(k ^ opad); to LSB
1199 * 5. generate FHash = H(hash on Ihash with Phash2
1201 * 6. Retrieve HMAC output from LSB to host memory
1204 case CCP_AUTH_ALGO_SHA3_224:
1205 case CCP_AUTH_ALGO_SHA3_256:
1206 case CCP_AUTH_ALGO_SHA3_384:
1207 case CCP_AUTH_ALGO_SHA3_512:
1209 /**< only op ctx and dst in host memory*/
1211 case CCP_AUTH_ALGO_SHA3_224_HMAC:
1212 case CCP_AUTH_ALGO_SHA3_256_HMAC:
1215 case CCP_AUTH_ALGO_SHA3_384_HMAC:
1216 case CCP_AUTH_ALGO_SHA3_512_HMAC:
1219 * 1. Op to Perform Ihash
1220 * 2. Retrieve result from LSB to host memory
1221 * 3. Perform final hash
1224 case CCP_AUTH_ALGO_AES_CMAC:
1228 * extra descriptor in padding case
1229 * (k1/k2(255:128) with iv(127:0))
1234 CCP_LOG_ERR("Unsupported auth algo %d",
1235 session->auth.algo);
1242 ccp_aead_slot(struct ccp_session *session)
1246 switch (session->aead_algo) {
1247 case RTE_CRYPTO_AEAD_AES_GCM:
1250 CCP_LOG_ERR("Unsupported aead algo %d",
1251 session->aead_algo);
1253 switch (session->auth.algo) {
1254 case CCP_AUTH_ALGO_AES_GCM:
1260 * 4. Reload passthru
1265 CCP_LOG_ERR("Unsupported combined auth ALGO %d",
1266 session->auth.algo);
1272 ccp_compute_slot_count(struct ccp_session *session)
1276 switch (session->cmd_id) {
1277 case CCP_CMD_CIPHER:
1278 count = ccp_cipher_slot(session);
1281 count = ccp_auth_slot(session);
1283 case CCP_CMD_CIPHER_HASH:
1284 case CCP_CMD_HASH_CIPHER:
1285 count = ccp_cipher_slot(session);
1286 count += ccp_auth_slot(session);
1288 case CCP_CMD_COMBINED:
1289 count = ccp_aead_slot(session);
1292 CCP_LOG_ERR("Unsupported cmd_id");
1300 ccp_perform_passthru(struct ccp_passthru *pst,
1301 struct ccp_queue *cmd_q)
1303 struct ccp_desc *desc;
1304 union ccp_function function;
1306 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1308 CCP_CMD_ENGINE(desc) = CCP_ENGINE_PASSTHRU;
1310 CCP_CMD_SOC(desc) = 0;
1311 CCP_CMD_IOC(desc) = 0;
1312 CCP_CMD_INIT(desc) = 0;
1313 CCP_CMD_EOM(desc) = 0;
1314 CCP_CMD_PROT(desc) = 0;
1317 CCP_PT_BYTESWAP(&function) = pst->byte_swap;
1318 CCP_PT_BITWISE(&function) = pst->bit_mod;
1319 CCP_CMD_FUNCTION(desc) = function.raw;
1321 CCP_CMD_LEN(desc) = pst->len;
1324 CCP_CMD_SRC_LO(desc) = (uint32_t)(pst->src_addr);
1325 CCP_CMD_SRC_HI(desc) = high32_value(pst->src_addr);
1326 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1328 CCP_CMD_DST_LO(desc) = (uint32_t)(pst->dest_addr);
1329 CCP_CMD_DST_HI(desc) = 0;
1330 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SB;
1332 if (pst->bit_mod != CCP_PASSTHRU_BITWISE_NOOP)
1333 CCP_CMD_LSB_ID(desc) = cmd_q->sb_key;
1336 CCP_CMD_SRC_LO(desc) = (uint32_t)(pst->src_addr);
1337 CCP_CMD_SRC_HI(desc) = 0;
1338 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SB;
1340 CCP_CMD_DST_LO(desc) = (uint32_t)(pst->dest_addr);
1341 CCP_CMD_DST_HI(desc) = high32_value(pst->dest_addr);
1342 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1345 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1349 ccp_perform_hmac(struct rte_crypto_op *op,
1350 struct ccp_queue *cmd_q)
1353 struct ccp_session *session;
1354 union ccp_function function;
1355 struct ccp_desc *desc;
1357 phys_addr_t src_addr, dest_addr, dest_addr_t;
1358 struct ccp_passthru pst;
1359 uint64_t auth_msg_bits;
1363 session = (struct ccp_session *)get_session_private_data(
1365 ccp_cryptodev_driver_id);
1366 addr = session->auth.pre_compute;
1368 src_addr = rte_pktmbuf_mtophys_offset(op->sym->m_src,
1369 op->sym->auth.data.offset);
1370 append_ptr = (void *)rte_pktmbuf_append(op->sym->m_src,
1371 session->auth.ctx_len);
1372 dest_addr = (phys_addr_t)rte_mem_virt2phy(append_ptr);
1373 dest_addr_t = dest_addr;
1375 /** Load PHash1 to LSB*/
1376 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *)addr);
1377 pst.dest_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1378 pst.len = session->auth.ctx_len;
1380 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1381 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1382 ccp_perform_passthru(&pst, cmd_q);
1384 /**sha engine command descriptor for IntermediateHash*/
1386 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1387 memset(desc, 0, Q_DESC_SIZE);
1389 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1391 CCP_CMD_SOC(desc) = 0;
1392 CCP_CMD_IOC(desc) = 0;
1393 CCP_CMD_INIT(desc) = 1;
1394 CCP_CMD_EOM(desc) = 1;
1395 CCP_CMD_PROT(desc) = 0;
1398 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1399 CCP_CMD_FUNCTION(desc) = function.raw;
1401 CCP_CMD_LEN(desc) = op->sym->auth.data.length;
1402 auth_msg_bits = (op->sym->auth.data.length +
1403 session->auth.block_size) * 8;
1405 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
1406 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
1407 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1409 CCP_CMD_LSB_ID(desc) = cmd_q->sb_sha;
1410 CCP_CMD_SHA_LO(desc) = ((uint32_t)auth_msg_bits);
1411 CCP_CMD_SHA_HI(desc) = high32_value(auth_msg_bits);
1413 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1417 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1418 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1419 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1420 cmd_q->qcontrol | CMD_Q_RUN);
1422 /* Intermediate Hash value retrieve */
1423 if ((session->auth.ut.sha_type == CCP_SHA_TYPE_384) ||
1424 (session->auth.ut.sha_type == CCP_SHA_TYPE_512)) {
1427 (phys_addr_t)((cmd_q->sb_sha + 1) * CCP_SB_BYTES);
1428 pst.dest_addr = dest_addr_t;
1429 pst.len = CCP_SB_BYTES;
1431 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1432 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1433 ccp_perform_passthru(&pst, cmd_q);
1435 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1436 pst.dest_addr = dest_addr_t + CCP_SB_BYTES;
1437 pst.len = CCP_SB_BYTES;
1439 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1440 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1441 ccp_perform_passthru(&pst, cmd_q);
1444 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1445 pst.dest_addr = dest_addr_t;
1446 pst.len = session->auth.ctx_len;
1448 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1449 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1450 ccp_perform_passthru(&pst, cmd_q);
1454 /** Load PHash2 to LSB*/
1455 addr += session->auth.ctx_len;
1456 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *)addr);
1457 pst.dest_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1458 pst.len = session->auth.ctx_len;
1460 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1461 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1462 ccp_perform_passthru(&pst, cmd_q);
1464 /**sha engine command descriptor for FinalHash*/
1465 dest_addr_t += session->auth.offset;
1467 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1468 memset(desc, 0, Q_DESC_SIZE);
1470 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1472 CCP_CMD_SOC(desc) = 0;
1473 CCP_CMD_IOC(desc) = 0;
1474 CCP_CMD_INIT(desc) = 1;
1475 CCP_CMD_EOM(desc) = 1;
1476 CCP_CMD_PROT(desc) = 0;
1479 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1480 CCP_CMD_FUNCTION(desc) = function.raw;
1482 CCP_CMD_LEN(desc) = (session->auth.ctx_len -
1483 session->auth.offset);
1484 auth_msg_bits = (session->auth.block_size +
1485 session->auth.ctx_len -
1486 session->auth.offset) * 8;
1488 CCP_CMD_SRC_LO(desc) = (uint32_t)(dest_addr_t);
1489 CCP_CMD_SRC_HI(desc) = high32_value(dest_addr_t);
1490 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1492 CCP_CMD_LSB_ID(desc) = cmd_q->sb_sha;
1493 CCP_CMD_SHA_LO(desc) = ((uint32_t)auth_msg_bits);
1494 CCP_CMD_SHA_HI(desc) = high32_value(auth_msg_bits);
1496 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1500 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1501 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1502 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1503 cmd_q->qcontrol | CMD_Q_RUN);
1505 /* Retrieve hmac output */
1506 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1507 pst.dest_addr = dest_addr;
1508 pst.len = session->auth.ctx_len;
1510 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1511 if ((session->auth.ut.sha_type == CCP_SHA_TYPE_384) ||
1512 (session->auth.ut.sha_type == CCP_SHA_TYPE_512))
1513 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1515 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1516 ccp_perform_passthru(&pst, cmd_q);
1518 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
1524 ccp_perform_sha(struct rte_crypto_op *op,
1525 struct ccp_queue *cmd_q)
1527 struct ccp_session *session;
1528 union ccp_function function;
1529 struct ccp_desc *desc;
1531 phys_addr_t src_addr, dest_addr;
1532 struct ccp_passthru pst;
1534 uint64_t auth_msg_bits;
1536 session = (struct ccp_session *)get_session_private_data(
1538 ccp_cryptodev_driver_id);
1540 src_addr = rte_pktmbuf_mtophys_offset(op->sym->m_src,
1541 op->sym->auth.data.offset);
1543 append_ptr = (void *)rte_pktmbuf_append(op->sym->m_src,
1544 session->auth.ctx_len);
1545 dest_addr = (phys_addr_t)rte_mem_virt2phy(append_ptr);
1547 /** Passthru sha context*/
1549 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *)
1551 pst.dest_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1552 pst.len = session->auth.ctx_len;
1554 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1555 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1556 ccp_perform_passthru(&pst, cmd_q);
1558 /**prepare sha command descriptor*/
1560 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1561 memset(desc, 0, Q_DESC_SIZE);
1563 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1565 CCP_CMD_SOC(desc) = 0;
1566 CCP_CMD_IOC(desc) = 0;
1567 CCP_CMD_INIT(desc) = 1;
1568 CCP_CMD_EOM(desc) = 1;
1569 CCP_CMD_PROT(desc) = 0;
1572 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1573 CCP_CMD_FUNCTION(desc) = function.raw;
1575 CCP_CMD_LEN(desc) = op->sym->auth.data.length;
1576 auth_msg_bits = op->sym->auth.data.length * 8;
1578 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
1579 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
1580 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1582 CCP_CMD_LSB_ID(desc) = cmd_q->sb_sha;
1583 CCP_CMD_SHA_LO(desc) = ((uint32_t)auth_msg_bits);
1584 CCP_CMD_SHA_HI(desc) = high32_value(auth_msg_bits);
1586 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1590 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1591 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1592 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1593 cmd_q->qcontrol | CMD_Q_RUN);
1595 /* Hash value retrieve */
1596 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1597 pst.dest_addr = dest_addr;
1598 pst.len = session->auth.ctx_len;
1600 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1601 if ((session->auth.ut.sha_type == CCP_SHA_TYPE_384) ||
1602 (session->auth.ut.sha_type == CCP_SHA_TYPE_512))
1603 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1605 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1606 ccp_perform_passthru(&pst, cmd_q);
1608 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
1614 ccp_perform_sha3_hmac(struct rte_crypto_op *op,
1615 struct ccp_queue *cmd_q)
1617 struct ccp_session *session;
1618 struct ccp_passthru pst;
1619 union ccp_function function;
1620 struct ccp_desc *desc;
1621 uint8_t *append_ptr;
1623 phys_addr_t src_addr, dest_addr, ctx_paddr, dest_addr_t;
1625 session = (struct ccp_session *)get_session_private_data(
1627 ccp_cryptodev_driver_id);
1629 src_addr = rte_pktmbuf_mtophys_offset(op->sym->m_src,
1630 op->sym->auth.data.offset);
1631 append_ptr = (uint8_t *)rte_pktmbuf_append(op->sym->m_src,
1632 session->auth.ctx_len);
1634 CCP_LOG_ERR("CCP MBUF append failed\n");
1637 dest_addr = (phys_addr_t)rte_mem_virt2phy((void *)append_ptr);
1638 dest_addr_t = dest_addr + (session->auth.ctx_len / 2);
1639 ctx_paddr = (phys_addr_t)rte_mem_virt2phy((void
1640 *)session->auth.pre_compute);
1641 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1642 memset(desc, 0, Q_DESC_SIZE);
1644 /*desc1 for SHA3-Ihash operation */
1645 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1646 CCP_CMD_INIT(desc) = 1;
1647 CCP_CMD_EOM(desc) = 1;
1650 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1651 CCP_CMD_FUNCTION(desc) = function.raw;
1652 CCP_CMD_LEN(desc) = op->sym->auth.data.length;
1654 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
1655 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
1656 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1658 CCP_CMD_DST_LO(desc) = (cmd_q->sb_sha * CCP_SB_BYTES);
1659 CCP_CMD_DST_HI(desc) = 0;
1660 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SB;
1662 CCP_CMD_KEY_LO(desc) = ((uint32_t)ctx_paddr);
1663 CCP_CMD_KEY_HI(desc) = high32_value(ctx_paddr);
1664 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1666 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1669 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1670 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1671 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1672 cmd_q->qcontrol | CMD_Q_RUN);
1674 /* Intermediate Hash value retrieve */
1675 if ((session->auth.ut.sha_type == CCP_SHA3_TYPE_384) ||
1676 (session->auth.ut.sha_type == CCP_SHA3_TYPE_512)) {
1679 (phys_addr_t)((cmd_q->sb_sha + 1) * CCP_SB_BYTES);
1680 pst.dest_addr = dest_addr_t;
1681 pst.len = CCP_SB_BYTES;
1683 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1684 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1685 ccp_perform_passthru(&pst, cmd_q);
1687 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1688 pst.dest_addr = dest_addr_t + CCP_SB_BYTES;
1689 pst.len = CCP_SB_BYTES;
1691 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1692 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1693 ccp_perform_passthru(&pst, cmd_q);
1696 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1697 pst.dest_addr = dest_addr_t;
1698 pst.len = CCP_SB_BYTES;
1700 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1701 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1702 ccp_perform_passthru(&pst, cmd_q);
1705 /**sha engine command descriptor for FinalHash*/
1706 ctx_paddr += CCP_SHA3_CTX_SIZE;
1707 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1708 memset(desc, 0, Q_DESC_SIZE);
1710 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1711 CCP_CMD_INIT(desc) = 1;
1712 CCP_CMD_EOM(desc) = 1;
1715 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1716 CCP_CMD_FUNCTION(desc) = function.raw;
1718 if (session->auth.ut.sha_type == CCP_SHA3_TYPE_224) {
1719 dest_addr_t += (CCP_SB_BYTES - SHA224_DIGEST_SIZE);
1720 CCP_CMD_LEN(desc) = SHA224_DIGEST_SIZE;
1721 } else if (session->auth.ut.sha_type == CCP_SHA3_TYPE_256) {
1722 CCP_CMD_LEN(desc) = SHA256_DIGEST_SIZE;
1723 } else if (session->auth.ut.sha_type == CCP_SHA3_TYPE_384) {
1724 dest_addr_t += (2 * CCP_SB_BYTES - SHA384_DIGEST_SIZE);
1725 CCP_CMD_LEN(desc) = SHA384_DIGEST_SIZE;
1727 CCP_CMD_LEN(desc) = SHA512_DIGEST_SIZE;
1730 CCP_CMD_SRC_LO(desc) = ((uint32_t)dest_addr_t);
1731 CCP_CMD_SRC_HI(desc) = high32_value(dest_addr_t);
1732 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1734 CCP_CMD_DST_LO(desc) = (uint32_t)dest_addr;
1735 CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
1736 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1738 CCP_CMD_KEY_LO(desc) = ((uint32_t)ctx_paddr);
1739 CCP_CMD_KEY_HI(desc) = high32_value(ctx_paddr);
1740 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1742 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1745 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1746 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1747 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1748 cmd_q->qcontrol | CMD_Q_RUN);
1750 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
1755 ccp_perform_sha3(struct rte_crypto_op *op,
1756 struct ccp_queue *cmd_q)
1758 struct ccp_session *session;
1759 union ccp_function function;
1760 struct ccp_desc *desc;
1761 uint8_t *ctx_addr, *append_ptr;
1763 phys_addr_t src_addr, dest_addr, ctx_paddr;
1765 session = (struct ccp_session *)get_session_private_data(
1767 ccp_cryptodev_driver_id);
1769 src_addr = rte_pktmbuf_mtophys_offset(op->sym->m_src,
1770 op->sym->auth.data.offset);
1771 append_ptr = (uint8_t *)rte_pktmbuf_append(op->sym->m_src,
1772 session->auth.ctx_len);
1774 CCP_LOG_ERR("CCP MBUF append failed\n");
1777 dest_addr = (phys_addr_t)rte_mem_virt2phy((void *)append_ptr);
1778 ctx_addr = session->auth.sha3_ctx;
1779 ctx_paddr = (phys_addr_t)rte_mem_virt2phy((void *)ctx_addr);
1781 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1782 memset(desc, 0, Q_DESC_SIZE);
1784 /* prepare desc for SHA3 operation */
1785 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1786 CCP_CMD_INIT(desc) = 1;
1787 CCP_CMD_EOM(desc) = 1;
1790 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1791 CCP_CMD_FUNCTION(desc) = function.raw;
1793 CCP_CMD_LEN(desc) = op->sym->auth.data.length;
1795 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
1796 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
1797 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1799 CCP_CMD_DST_LO(desc) = ((uint32_t)dest_addr);
1800 CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
1801 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1803 CCP_CMD_KEY_LO(desc) = ((uint32_t)ctx_paddr);
1804 CCP_CMD_KEY_HI(desc) = high32_value(ctx_paddr);
1805 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1807 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1811 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1812 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1813 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1814 cmd_q->qcontrol | CMD_Q_RUN);
1816 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
1821 ccp_perform_aes_cmac(struct rte_crypto_op *op,
1822 struct ccp_queue *cmd_q)
1824 struct ccp_session *session;
1825 union ccp_function function;
1826 struct ccp_passthru pst;
1827 struct ccp_desc *desc;
1829 uint8_t *src_tb, *append_ptr, *ctx_addr;
1830 phys_addr_t src_addr, dest_addr, key_addr;
1831 int length, non_align_len;
1833 session = (struct ccp_session *)get_session_private_data(
1835 ccp_cryptodev_driver_id);
1836 key_addr = rte_mem_virt2phy(session->auth.key_ccp);
1838 src_addr = rte_pktmbuf_mtophys_offset(op->sym->m_src,
1839 op->sym->auth.data.offset);
1840 append_ptr = (uint8_t *)rte_pktmbuf_append(op->sym->m_src,
1841 session->auth.ctx_len);
1842 dest_addr = (phys_addr_t)rte_mem_virt2phy((void *)append_ptr);
1845 CCP_AES_ENCRYPT(&function) = CCP_CIPHER_DIR_ENCRYPT;
1846 CCP_AES_MODE(&function) = session->auth.um.aes_mode;
1847 CCP_AES_TYPE(&function) = session->auth.ut.aes_type;
1849 if (op->sym->auth.data.length % session->auth.block_size == 0) {
1851 ctx_addr = session->auth.pre_compute;
1852 memset(ctx_addr, 0, AES_BLOCK_SIZE);
1853 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *)ctx_addr);
1854 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
1855 pst.len = CCP_SB_BYTES;
1857 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1858 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1859 ccp_perform_passthru(&pst, cmd_q);
1861 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1862 memset(desc, 0, Q_DESC_SIZE);
1864 /* prepare desc for aes-cmac command */
1865 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
1866 CCP_CMD_EOM(desc) = 1;
1867 CCP_CMD_FUNCTION(desc) = function.raw;
1869 CCP_CMD_LEN(desc) = op->sym->auth.data.length;
1870 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
1871 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
1872 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1874 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
1875 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
1876 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1877 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
1879 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1884 (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1885 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1886 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1887 cmd_q->qcontrol | CMD_Q_RUN);
1889 ctx_addr = session->auth.pre_compute + CCP_SB_BYTES;
1890 memset(ctx_addr, 0, AES_BLOCK_SIZE);
1891 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *)ctx_addr);
1892 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
1893 pst.len = CCP_SB_BYTES;
1895 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1896 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1897 ccp_perform_passthru(&pst, cmd_q);
1899 length = (op->sym->auth.data.length / AES_BLOCK_SIZE);
1900 length *= AES_BLOCK_SIZE;
1901 non_align_len = op->sym->auth.data.length - length;
1902 /* prepare desc for aes-cmac command */
1904 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1905 memset(desc, 0, Q_DESC_SIZE);
1907 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
1908 CCP_CMD_INIT(desc) = 1;
1909 CCP_CMD_FUNCTION(desc) = function.raw;
1911 CCP_CMD_LEN(desc) = length;
1912 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
1913 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
1914 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1916 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
1917 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
1918 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1919 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
1921 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1924 append_ptr = append_ptr + CCP_SB_BYTES;
1925 memset(append_ptr, 0, AES_BLOCK_SIZE);
1926 src_tb = rte_pktmbuf_mtod_offset(op->sym->m_src,
1928 op->sym->auth.data.offset +
1930 rte_memcpy(append_ptr, src_tb, non_align_len);
1931 append_ptr[non_align_len] = CMAC_PAD_VALUE;
1933 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1934 memset(desc, 0, Q_DESC_SIZE);
1936 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
1937 CCP_CMD_EOM(desc) = 1;
1938 CCP_CMD_FUNCTION(desc) = function.raw;
1939 CCP_CMD_LEN(desc) = AES_BLOCK_SIZE;
1941 CCP_CMD_SRC_LO(desc) = ((uint32_t)(dest_addr + CCP_SB_BYTES));
1942 CCP_CMD_SRC_HI(desc) = high32_value(dest_addr + CCP_SB_BYTES);
1943 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1945 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
1946 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
1947 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1948 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
1950 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1954 (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1955 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1956 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1957 cmd_q->qcontrol | CMD_Q_RUN);
1959 /* Retrieve result */
1960 pst.dest_addr = dest_addr;
1961 pst.src_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
1962 pst.len = CCP_SB_BYTES;
1964 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1965 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1966 ccp_perform_passthru(&pst, cmd_q);
1968 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
1973 ccp_perform_aes(struct rte_crypto_op *op,
1974 struct ccp_queue *cmd_q,
1975 struct ccp_batch_info *b_info)
1977 struct ccp_session *session;
1978 union ccp_function function;
1980 struct ccp_passthru pst = {0};
1981 struct ccp_desc *desc;
1982 phys_addr_t src_addr, dest_addr, key_addr;
1985 session = (struct ccp_session *)get_session_private_data(
1987 ccp_cryptodev_driver_id);
1990 iv = rte_crypto_op_ctod_offset(op, uint8_t *, session->iv.offset);
1991 if (session->cipher.um.aes_mode != CCP_AES_MODE_ECB) {
1992 if (session->cipher.um.aes_mode == CCP_AES_MODE_CTR) {
1993 rte_memcpy(session->cipher.nonce + AES_BLOCK_SIZE,
1994 iv, session->iv.length);
1995 pst.src_addr = (phys_addr_t)session->cipher.nonce_phys;
1996 CCP_AES_SIZE(&function) = 0x1F;
1999 &(b_info->lsb_buf[b_info->lsb_buf_idx*CCP_SB_BYTES]);
2000 rte_memcpy(lsb_buf +
2001 (CCP_SB_BYTES - session->iv.length),
2002 iv, session->iv.length);
2003 pst.src_addr = b_info->lsb_buf_phys +
2004 (b_info->lsb_buf_idx * CCP_SB_BYTES);
2005 b_info->lsb_buf_idx++;
2008 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2009 pst.len = CCP_SB_BYTES;
2011 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2012 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
2013 ccp_perform_passthru(&pst, cmd_q);
2016 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2018 src_addr = rte_pktmbuf_mtophys_offset(op->sym->m_src,
2019 op->sym->cipher.data.offset);
2020 if (likely(op->sym->m_dst != NULL))
2021 dest_addr = rte_pktmbuf_mtophys_offset(op->sym->m_dst,
2022 op->sym->cipher.data.offset);
2024 dest_addr = src_addr;
2025 key_addr = session->cipher.key_phys;
2027 /* prepare desc for aes command */
2028 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2029 CCP_CMD_INIT(desc) = 1;
2030 CCP_CMD_EOM(desc) = 1;
2032 CCP_AES_ENCRYPT(&function) = session->cipher.dir;
2033 CCP_AES_MODE(&function) = session->cipher.um.aes_mode;
2034 CCP_AES_TYPE(&function) = session->cipher.ut.aes_type;
2035 CCP_CMD_FUNCTION(desc) = function.raw;
2037 CCP_CMD_LEN(desc) = op->sym->cipher.data.length;
2039 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2040 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2041 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2043 CCP_CMD_DST_LO(desc) = ((uint32_t)dest_addr);
2044 CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
2045 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2047 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2048 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2049 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2051 if (session->cipher.um.aes_mode != CCP_AES_MODE_ECB)
2052 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2054 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2055 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
2060 ccp_perform_3des(struct rte_crypto_op *op,
2061 struct ccp_queue *cmd_q,
2062 struct ccp_batch_info *b_info)
2064 struct ccp_session *session;
2065 union ccp_function function;
2066 unsigned char *lsb_buf;
2067 struct ccp_passthru pst;
2068 struct ccp_desc *desc;
2071 phys_addr_t src_addr, dest_addr, key_addr;
2073 session = (struct ccp_session *)get_session_private_data(
2075 ccp_cryptodev_driver_id);
2077 iv = rte_crypto_op_ctod_offset(op, uint8_t *, session->iv.offset);
2078 switch (session->cipher.um.des_mode) {
2079 case CCP_DES_MODE_CBC:
2080 lsb_buf = &(b_info->lsb_buf[b_info->lsb_buf_idx*CCP_SB_BYTES]);
2081 b_info->lsb_buf_idx++;
2083 rte_memcpy(lsb_buf + (CCP_SB_BYTES - session->iv.length),
2084 iv, session->iv.length);
2086 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *) lsb_buf);
2087 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2088 pst.len = CCP_SB_BYTES;
2090 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2091 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
2092 ccp_perform_passthru(&pst, cmd_q);
2094 case CCP_DES_MODE_CFB:
2095 case CCP_DES_MODE_ECB:
2096 CCP_LOG_ERR("Unsupported DES cipher mode");
2100 src_addr = rte_pktmbuf_mtophys_offset(op->sym->m_src,
2101 op->sym->cipher.data.offset);
2102 if (unlikely(op->sym->m_dst != NULL))
2104 rte_pktmbuf_mtophys_offset(op->sym->m_dst,
2105 op->sym->cipher.data.offset);
2107 dest_addr = src_addr;
2109 key_addr = rte_mem_virt2phy(session->cipher.key_ccp);
2111 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2113 memset(desc, 0, Q_DESC_SIZE);
2115 /* prepare desc for des command */
2116 CCP_CMD_ENGINE(desc) = CCP_ENGINE_3DES;
2118 CCP_CMD_SOC(desc) = 0;
2119 CCP_CMD_IOC(desc) = 0;
2120 CCP_CMD_INIT(desc) = 1;
2121 CCP_CMD_EOM(desc) = 1;
2122 CCP_CMD_PROT(desc) = 0;
2125 CCP_DES_ENCRYPT(&function) = session->cipher.dir;
2126 CCP_DES_MODE(&function) = session->cipher.um.des_mode;
2127 CCP_DES_TYPE(&function) = session->cipher.ut.des_type;
2128 CCP_CMD_FUNCTION(desc) = function.raw;
2130 CCP_CMD_LEN(desc) = op->sym->cipher.data.length;
2132 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2133 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2134 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2136 CCP_CMD_DST_LO(desc) = ((uint32_t)dest_addr);
2137 CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
2138 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2140 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2141 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2142 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2144 if (session->cipher.um.des_mode)
2145 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2147 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2151 /* Write the new tail address back to the queue register */
2152 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2153 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2154 /* Turn the queue back on using our cached control register */
2155 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2156 cmd_q->qcontrol | CMD_Q_RUN);
2158 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
2163 ccp_perform_aes_gcm(struct rte_crypto_op *op, struct ccp_queue *cmd_q)
2165 struct ccp_session *session;
2166 union ccp_function function;
2168 struct ccp_passthru pst;
2169 struct ccp_desc *desc;
2172 phys_addr_t src_addr, dest_addr, key_addr, aad_addr;
2173 phys_addr_t digest_dest_addr;
2174 int length, non_align_len;
2176 session = (struct ccp_session *)get_session_private_data(
2178 ccp_cryptodev_driver_id);
2179 iv = rte_crypto_op_ctod_offset(op, uint8_t *, session->iv.offset);
2180 key_addr = session->cipher.key_phys;
2182 src_addr = rte_pktmbuf_mtophys_offset(op->sym->m_src,
2183 op->sym->aead.data.offset);
2184 if (unlikely(op->sym->m_dst != NULL))
2185 dest_addr = rte_pktmbuf_mtophys_offset(op->sym->m_dst,
2186 op->sym->aead.data.offset);
2188 dest_addr = src_addr;
2189 rte_pktmbuf_append(op->sym->m_src, session->auth.ctx_len);
2190 digest_dest_addr = op->sym->aead.digest.phys_addr;
2191 temp = (uint64_t *)(op->sym->aead.digest.data + AES_BLOCK_SIZE);
2192 *temp++ = rte_bswap64(session->auth.aad_length << 3);
2193 *temp = rte_bswap64(op->sym->aead.data.length << 3);
2195 non_align_len = op->sym->aead.data.length % AES_BLOCK_SIZE;
2196 length = CCP_ALIGN(op->sym->aead.data.length, AES_BLOCK_SIZE);
2198 aad_addr = op->sym->aead.aad.phys_addr;
2200 /* CMD1 IV Passthru */
2201 rte_memcpy(session->cipher.nonce + AES_BLOCK_SIZE, iv,
2202 session->iv.length);
2203 pst.src_addr = session->cipher.nonce_phys;
2204 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2205 pst.len = CCP_SB_BYTES;
2207 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2208 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
2209 ccp_perform_passthru(&pst, cmd_q);
2211 /* CMD2 GHASH-AAD */
2213 CCP_AES_ENCRYPT(&function) = CCP_AES_MODE_GHASH_AAD;
2214 CCP_AES_MODE(&function) = CCP_AES_MODE_GHASH;
2215 CCP_AES_TYPE(&function) = session->cipher.ut.aes_type;
2217 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2218 memset(desc, 0, Q_DESC_SIZE);
2220 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2221 CCP_CMD_INIT(desc) = 1;
2222 CCP_CMD_FUNCTION(desc) = function.raw;
2224 CCP_CMD_LEN(desc) = session->auth.aad_length;
2226 CCP_CMD_SRC_LO(desc) = ((uint32_t)aad_addr);
2227 CCP_CMD_SRC_HI(desc) = high32_value(aad_addr);
2228 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2230 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2231 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2232 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2234 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2236 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2239 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2240 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2241 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2242 cmd_q->qcontrol | CMD_Q_RUN);
2244 /* CMD3 : GCTR Plain text */
2246 CCP_AES_ENCRYPT(&function) = session->cipher.dir;
2247 CCP_AES_MODE(&function) = CCP_AES_MODE_GCTR;
2248 CCP_AES_TYPE(&function) = session->cipher.ut.aes_type;
2249 if (non_align_len == 0)
2250 CCP_AES_SIZE(&function) = (AES_BLOCK_SIZE << 3) - 1;
2252 CCP_AES_SIZE(&function) = (non_align_len << 3) - 1;
2255 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2256 memset(desc, 0, Q_DESC_SIZE);
2258 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2259 CCP_CMD_EOM(desc) = 1;
2260 CCP_CMD_FUNCTION(desc) = function.raw;
2262 CCP_CMD_LEN(desc) = length;
2264 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2265 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2266 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2268 CCP_CMD_DST_LO(desc) = ((uint32_t)dest_addr);
2269 CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
2270 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2272 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2273 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2274 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2276 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2278 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2281 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2282 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2283 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2284 cmd_q->qcontrol | CMD_Q_RUN);
2286 /* CMD4 : PT to copy IV */
2287 pst.src_addr = session->cipher.nonce_phys;
2288 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2289 pst.len = AES_BLOCK_SIZE;
2291 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2292 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
2293 ccp_perform_passthru(&pst, cmd_q);
2295 /* CMD5 : GHASH-Final */
2297 CCP_AES_ENCRYPT(&function) = CCP_AES_MODE_GHASH_FINAL;
2298 CCP_AES_MODE(&function) = CCP_AES_MODE_GHASH;
2299 CCP_AES_TYPE(&function) = session->cipher.ut.aes_type;
2301 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2302 memset(desc, 0, Q_DESC_SIZE);
2304 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2305 CCP_CMD_FUNCTION(desc) = function.raw;
2306 /* Last block (AAD_len || PT_len)*/
2307 CCP_CMD_LEN(desc) = AES_BLOCK_SIZE;
2309 CCP_CMD_SRC_LO(desc) = ((uint32_t)digest_dest_addr + AES_BLOCK_SIZE);
2310 CCP_CMD_SRC_HI(desc) = high32_value(digest_dest_addr + AES_BLOCK_SIZE);
2311 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2313 CCP_CMD_DST_LO(desc) = ((uint32_t)digest_dest_addr);
2314 CCP_CMD_DST_HI(desc) = high32_value(digest_dest_addr);
2315 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2317 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2318 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2319 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2321 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2323 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2326 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2327 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2328 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2329 cmd_q->qcontrol | CMD_Q_RUN);
2331 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
2336 ccp_crypto_cipher(struct rte_crypto_op *op,
2337 struct ccp_queue *cmd_q,
2338 struct ccp_batch_info *b_info)
2341 struct ccp_session *session;
2343 session = (struct ccp_session *)get_session_private_data(
2345 ccp_cryptodev_driver_id);
2347 switch (session->cipher.algo) {
2348 case CCP_CIPHER_ALGO_AES_CBC:
2349 result = ccp_perform_aes(op, cmd_q, b_info);
2350 b_info->desccnt += 2;
2352 case CCP_CIPHER_ALGO_AES_CTR:
2353 result = ccp_perform_aes(op, cmd_q, b_info);
2354 b_info->desccnt += 2;
2356 case CCP_CIPHER_ALGO_AES_ECB:
2357 result = ccp_perform_aes(op, cmd_q, b_info);
2358 b_info->desccnt += 1;
2360 case CCP_CIPHER_ALGO_3DES_CBC:
2361 result = ccp_perform_3des(op, cmd_q, b_info);
2362 b_info->desccnt += 2;
2365 CCP_LOG_ERR("Unsupported cipher algo %d",
2366 session->cipher.algo);
2373 ccp_crypto_auth(struct rte_crypto_op *op,
2374 struct ccp_queue *cmd_q,
2375 struct ccp_batch_info *b_info)
2379 struct ccp_session *session;
2381 session = (struct ccp_session *)get_session_private_data(
2383 ccp_cryptodev_driver_id);
2385 switch (session->auth.algo) {
2386 case CCP_AUTH_ALGO_SHA1:
2387 case CCP_AUTH_ALGO_SHA224:
2388 case CCP_AUTH_ALGO_SHA256:
2389 case CCP_AUTH_ALGO_SHA384:
2390 case CCP_AUTH_ALGO_SHA512:
2391 result = ccp_perform_sha(op, cmd_q);
2392 b_info->desccnt += 3;
2394 case CCP_AUTH_ALGO_SHA1_HMAC:
2395 case CCP_AUTH_ALGO_SHA224_HMAC:
2396 case CCP_AUTH_ALGO_SHA256_HMAC:
2397 result = ccp_perform_hmac(op, cmd_q);
2398 b_info->desccnt += 6;
2400 case CCP_AUTH_ALGO_SHA384_HMAC:
2401 case CCP_AUTH_ALGO_SHA512_HMAC:
2402 result = ccp_perform_hmac(op, cmd_q);
2403 b_info->desccnt += 7;
2405 case CCP_AUTH_ALGO_SHA3_224:
2406 case CCP_AUTH_ALGO_SHA3_256:
2407 case CCP_AUTH_ALGO_SHA3_384:
2408 case CCP_AUTH_ALGO_SHA3_512:
2409 result = ccp_perform_sha3(op, cmd_q);
2410 b_info->desccnt += 1;
2412 case CCP_AUTH_ALGO_SHA3_224_HMAC:
2413 case CCP_AUTH_ALGO_SHA3_256_HMAC:
2414 result = ccp_perform_sha3_hmac(op, cmd_q);
2415 b_info->desccnt += 3;
2417 case CCP_AUTH_ALGO_SHA3_384_HMAC:
2418 case CCP_AUTH_ALGO_SHA3_512_HMAC:
2419 result = ccp_perform_sha3_hmac(op, cmd_q);
2420 b_info->desccnt += 4;
2422 case CCP_AUTH_ALGO_AES_CMAC:
2423 result = ccp_perform_aes_cmac(op, cmd_q);
2424 b_info->desccnt += 4;
2427 CCP_LOG_ERR("Unsupported auth algo %d",
2428 session->auth.algo);
2436 ccp_crypto_aead(struct rte_crypto_op *op,
2437 struct ccp_queue *cmd_q,
2438 struct ccp_batch_info *b_info)
2441 struct ccp_session *session;
2443 session = (struct ccp_session *)get_session_private_data(
2445 ccp_cryptodev_driver_id);
2447 switch (session->auth.algo) {
2448 case CCP_AUTH_ALGO_AES_GCM:
2449 if (session->cipher.algo != CCP_CIPHER_ALGO_AES_GCM) {
2450 CCP_LOG_ERR("Incorrect chain order");
2453 result = ccp_perform_aes_gcm(op, cmd_q);
2454 b_info->desccnt += 5;
2457 CCP_LOG_ERR("Unsupported aead algo %d",
2458 session->aead_algo);
2465 process_ops_to_enqueue(const struct ccp_qp *qp,
2466 struct rte_crypto_op **op,
2467 struct ccp_queue *cmd_q,
2472 struct ccp_batch_info *b_info;
2473 struct ccp_session *session;
2475 if (rte_mempool_get(qp->batch_mp, (void **)&b_info)) {
2476 CCP_LOG_ERR("batch info allocation failed");
2479 /* populate batch info necessary for dequeue */
2481 b_info->lsb_buf_idx = 0;
2482 b_info->desccnt = 0;
2483 b_info->cmd_q = cmd_q;
2484 b_info->lsb_buf_phys =
2485 (phys_addr_t)rte_mem_virt2phy((void *)b_info->lsb_buf);
2486 rte_atomic64_sub(&b_info->cmd_q->free_slots, slots_req);
2488 b_info->head_offset = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx *
2490 for (i = 0; i < nb_ops; i++) {
2491 session = (struct ccp_session *)get_session_private_data(
2492 op[i]->sym->session,
2493 ccp_cryptodev_driver_id);
2494 switch (session->cmd_id) {
2495 case CCP_CMD_CIPHER:
2496 result = ccp_crypto_cipher(op[i], cmd_q, b_info);
2499 result = ccp_crypto_auth(op[i], cmd_q, b_info);
2501 case CCP_CMD_CIPHER_HASH:
2502 result = ccp_crypto_cipher(op[i], cmd_q, b_info);
2505 result = ccp_crypto_auth(op[i], cmd_q, b_info);
2507 case CCP_CMD_HASH_CIPHER:
2508 result = ccp_crypto_auth(op[i], cmd_q, b_info);
2511 result = ccp_crypto_cipher(op[i], cmd_q, b_info);
2513 case CCP_CMD_COMBINED:
2514 result = ccp_crypto_aead(op[i], cmd_q, b_info);
2517 CCP_LOG_ERR("Unsupported cmd_id");
2520 if (unlikely(result < 0)) {
2521 rte_atomic64_add(&b_info->cmd_q->free_slots,
2522 (slots_req - b_info->desccnt));
2525 b_info->op[i] = op[i];
2529 b_info->tail_offset = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx *
2533 /* Write the new tail address back to the queue register */
2534 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE,
2535 b_info->tail_offset);
2536 /* Turn the queue back on using our cached control register */
2537 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2538 cmd_q->qcontrol | CMD_Q_RUN);
2540 rte_ring_enqueue(qp->processed_pkts, (void *)b_info);
2545 static inline void ccp_auth_dq_prepare(struct rte_crypto_op *op)
2547 struct ccp_session *session;
2548 uint8_t *digest_data, *addr;
2549 struct rte_mbuf *m_last;
2550 int offset, digest_offset;
2551 uint8_t digest_le[64];
2553 session = (struct ccp_session *)get_session_private_data(
2555 ccp_cryptodev_driver_id);
2557 if (session->cmd_id == CCP_CMD_COMBINED) {
2558 digest_data = op->sym->aead.digest.data;
2559 digest_offset = op->sym->aead.data.offset +
2560 op->sym->aead.data.length;
2562 digest_data = op->sym->auth.digest.data;
2563 digest_offset = op->sym->auth.data.offset +
2564 op->sym->auth.data.length;
2566 m_last = rte_pktmbuf_lastseg(op->sym->m_src);
2567 addr = (uint8_t *)((char *)m_last->buf_addr + m_last->data_off +
2568 m_last->data_len - session->auth.ctx_len);
2571 offset = session->auth.offset;
2573 if (session->auth.engine == CCP_ENGINE_SHA)
2574 if ((session->auth.ut.sha_type != CCP_SHA_TYPE_1) &&
2575 (session->auth.ut.sha_type != CCP_SHA_TYPE_224) &&
2576 (session->auth.ut.sha_type != CCP_SHA_TYPE_256)) {
2577 /* All other algorithms require byte
2582 offset = session->auth.ctx_len -
2583 session->auth.offset - 1;
2584 for (i = 0; i < session->auth.digest_length; i++)
2585 digest_le[i] = addr[offset - i];
2590 op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
2591 if (session->auth.op == CCP_AUTH_OP_VERIFY) {
2592 if (memcmp(addr + offset, digest_data,
2593 session->auth.digest_length) != 0)
2594 op->status = RTE_CRYPTO_OP_STATUS_AUTH_FAILED;
2597 if (unlikely(digest_data == 0))
2598 digest_data = rte_pktmbuf_mtod_offset(
2599 op->sym->m_dst, uint8_t *,
2601 rte_memcpy(digest_data, addr + offset,
2602 session->auth.digest_length);
2604 /* Trim area used for digest from mbuf. */
2605 rte_pktmbuf_trim(op->sym->m_src,
2606 session->auth.ctx_len);
2610 ccp_prepare_ops(struct rte_crypto_op **op_d,
2611 struct ccp_batch_info *b_info,
2615 struct ccp_session *session;
2617 min_ops = RTE_MIN(nb_ops, b_info->opcnt);
2619 for (i = 0; i < min_ops; i++) {
2620 op_d[i] = b_info->op[b_info->op_idx++];
2621 session = (struct ccp_session *)get_session_private_data(
2622 op_d[i]->sym->session,
2623 ccp_cryptodev_driver_id);
2624 switch (session->cmd_id) {
2625 case CCP_CMD_CIPHER:
2626 op_d[i]->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
2629 case CCP_CMD_CIPHER_HASH:
2630 case CCP_CMD_HASH_CIPHER:
2631 case CCP_CMD_COMBINED:
2632 ccp_auth_dq_prepare(op_d[i]);
2635 CCP_LOG_ERR("Unsupported cmd_id");
2639 b_info->opcnt -= min_ops;
2644 process_ops_to_dequeue(struct ccp_qp *qp,
2645 struct rte_crypto_op **op,
2648 struct ccp_batch_info *b_info;
2649 uint32_t cur_head_offset;
2651 if (qp->b_info != NULL) {
2652 b_info = qp->b_info;
2653 if (unlikely(b_info->op_idx > 0))
2655 } else if (rte_ring_dequeue(qp->processed_pkts,
2658 cur_head_offset = CCP_READ_REG(b_info->cmd_q->reg_base,
2659 CMD_Q_HEAD_LO_BASE);
2661 if (b_info->head_offset < b_info->tail_offset) {
2662 if ((cur_head_offset >= b_info->head_offset) &&
2663 (cur_head_offset < b_info->tail_offset)) {
2664 qp->b_info = b_info;
2668 if ((cur_head_offset >= b_info->head_offset) ||
2669 (cur_head_offset < b_info->tail_offset)) {
2670 qp->b_info = b_info;
2677 nb_ops = ccp_prepare_ops(op, b_info, nb_ops);
2678 rte_atomic64_add(&b_info->cmd_q->free_slots, b_info->desccnt);
2679 b_info->desccnt = 0;
2680 if (b_info->opcnt > 0) {
2681 qp->b_info = b_info;
2683 rte_mempool_put(qp->batch_mp, (void *)b_info);