1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2018 Advanced Micro Devices, Inc. All rights reserved.
10 #include <sys/queue.h>
11 #include <sys/types.h>
13 #include <openssl/sha.h>
14 #include <openssl/cmac.h> /*sub key apis*/
15 #include <openssl/evp.h> /*sub key apis*/
17 #include <rte_hexdump.h>
18 #include <rte_memzone.h>
19 #include <rte_malloc.h>
20 #include <rte_memory.h>
21 #include <rte_spinlock.h>
22 #include <rte_string_fns.h>
23 #include <rte_cryptodev_pmd.h>
26 #include "ccp_crypto.h"
28 #include "ccp_pmd_private.h"
30 #include <openssl/conf.h>
31 #include <openssl/err.h>
32 #include <openssl/hmac.h>
34 /* SHA initial context values */
35 static uint32_t ccp_sha1_init[SHA_COMMON_DIGEST_SIZE / sizeof(uint32_t)] = {
42 uint32_t ccp_sha224_init[SHA256_DIGEST_SIZE / sizeof(uint32_t)] = {
49 uint32_t ccp_sha256_init[SHA256_DIGEST_SIZE / sizeof(uint32_t)] = {
56 uint64_t ccp_sha384_init[SHA512_DIGEST_SIZE / sizeof(uint64_t)] = {
63 uint64_t ccp_sha512_init[SHA512_DIGEST_SIZE / sizeof(uint64_t)] = {
71 #define SHA3_CONST(x) x
73 #define SHA3_CONST(x) x##L
76 /** 'Words' here refers to uint64_t */
77 #define SHA3_KECCAK_SPONGE_WORDS \
78 (((1600) / 8) / sizeof(uint64_t))
79 typedef struct sha3_context_ {
82 * The portion of the input message that we
86 uint64_t s[SHA3_KECCAK_SPONGE_WORDS];
88 uint8_t sb[SHA3_KECCAK_SPONGE_WORDS * 8];
89 /**total 200 ctx size**/
91 unsigned int byteIndex;
93 * 0..7--the next byte after the set one
94 * (starts from 0; 0--none are buffered)
96 unsigned int wordIndex;
98 * 0..24--the next word to integrate input
101 unsigned int capacityWords;
103 * the double size of the hash output in
104 * words (e.g. 16 for Keccak 512)
109 #define SHA3_ROTL64(x, y) \
110 (((x) << (y)) | ((x) >> ((sizeof(uint64_t)*8) - (y))))
113 static const uint64_t keccakf_rndc[24] = {
114 SHA3_CONST(0x0000000000000001UL), SHA3_CONST(0x0000000000008082UL),
115 SHA3_CONST(0x800000000000808aUL), SHA3_CONST(0x8000000080008000UL),
116 SHA3_CONST(0x000000000000808bUL), SHA3_CONST(0x0000000080000001UL),
117 SHA3_CONST(0x8000000080008081UL), SHA3_CONST(0x8000000000008009UL),
118 SHA3_CONST(0x000000000000008aUL), SHA3_CONST(0x0000000000000088UL),
119 SHA3_CONST(0x0000000080008009UL), SHA3_CONST(0x000000008000000aUL),
120 SHA3_CONST(0x000000008000808bUL), SHA3_CONST(0x800000000000008bUL),
121 SHA3_CONST(0x8000000000008089UL), SHA3_CONST(0x8000000000008003UL),
122 SHA3_CONST(0x8000000000008002UL), SHA3_CONST(0x8000000000000080UL),
123 SHA3_CONST(0x000000000000800aUL), SHA3_CONST(0x800000008000000aUL),
124 SHA3_CONST(0x8000000080008081UL), SHA3_CONST(0x8000000000008080UL),
125 SHA3_CONST(0x0000000080000001UL), SHA3_CONST(0x8000000080008008UL)
128 static const unsigned int keccakf_rotc[24] = {
129 1, 3, 6, 10, 15, 21, 28, 36, 45, 55, 2, 14, 27, 41, 56, 8, 25, 43, 62,
133 static const unsigned int keccakf_piln[24] = {
134 10, 7, 11, 17, 18, 3, 5, 16, 8, 21, 24, 4, 15, 23, 19, 13, 12, 2, 20,
138 static enum ccp_cmd_order
139 ccp_get_cmd_id(const struct rte_crypto_sym_xform *xform)
141 enum ccp_cmd_order res = CCP_CMD_NOT_SUPPORTED;
145 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH) {
146 if (xform->next == NULL)
148 else if (xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
149 return CCP_CMD_HASH_CIPHER;
151 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER) {
152 if (xform->next == NULL)
153 return CCP_CMD_CIPHER;
154 else if (xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
155 return CCP_CMD_CIPHER_HASH;
157 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD)
158 return CCP_CMD_COMBINED;
162 /* partial hash using openssl */
163 static int partial_hash_sha1(uint8_t *data_in, uint8_t *data_out)
167 if (!SHA1_Init(&ctx))
169 SHA1_Transform(&ctx, data_in);
170 rte_memcpy(data_out, &ctx, SHA_DIGEST_LENGTH);
174 static int partial_hash_sha224(uint8_t *data_in, uint8_t *data_out)
178 if (!SHA224_Init(&ctx))
180 SHA256_Transform(&ctx, data_in);
181 rte_memcpy(data_out, &ctx,
182 SHA256_DIGEST_LENGTH);
186 static int partial_hash_sha256(uint8_t *data_in, uint8_t *data_out)
190 if (!SHA256_Init(&ctx))
192 SHA256_Transform(&ctx, data_in);
193 rte_memcpy(data_out, &ctx,
194 SHA256_DIGEST_LENGTH);
198 static int partial_hash_sha384(uint8_t *data_in, uint8_t *data_out)
202 if (!SHA384_Init(&ctx))
204 SHA512_Transform(&ctx, data_in);
205 rte_memcpy(data_out, &ctx,
206 SHA512_DIGEST_LENGTH);
210 static int partial_hash_sha512(uint8_t *data_in, uint8_t *data_out)
214 if (!SHA512_Init(&ctx))
216 SHA512_Transform(&ctx, data_in);
217 rte_memcpy(data_out, &ctx,
218 SHA512_DIGEST_LENGTH);
223 keccakf(uint64_t s[25])
227 #define KECCAK_ROUNDS 24
229 for (round = 0; round < KECCAK_ROUNDS; round++) {
232 for (i = 0; i < 5; i++)
233 bc[i] = s[i] ^ s[i + 5] ^ s[i + 10] ^ s[i + 15] ^
236 for (i = 0; i < 5; i++) {
237 t = bc[(i + 4) % 5] ^ SHA3_ROTL64(bc[(i + 1) % 5], 1);
238 for (j = 0; j < 25; j += 5)
244 for (i = 0; i < 24; i++) {
247 s[j] = SHA3_ROTL64(t, keccakf_rotc[i]);
252 for (j = 0; j < 25; j += 5) {
253 for (i = 0; i < 5; i++)
255 for (i = 0; i < 5; i++)
256 s[j + i] ^= (~bc[(i + 1) % 5]) &
261 s[0] ^= keccakf_rndc[round];
266 sha3_Init224(void *priv)
268 sha3_context *ctx = (sha3_context *) priv;
270 memset(ctx, 0, sizeof(*ctx));
271 ctx->capacityWords = 2 * 224 / (8 * sizeof(uint64_t));
275 sha3_Init256(void *priv)
277 sha3_context *ctx = (sha3_context *) priv;
279 memset(ctx, 0, sizeof(*ctx));
280 ctx->capacityWords = 2 * 256 / (8 * sizeof(uint64_t));
284 sha3_Init384(void *priv)
286 sha3_context *ctx = (sha3_context *) priv;
288 memset(ctx, 0, sizeof(*ctx));
289 ctx->capacityWords = 2 * 384 / (8 * sizeof(uint64_t));
293 sha3_Init512(void *priv)
295 sha3_context *ctx = (sha3_context *) priv;
297 memset(ctx, 0, sizeof(*ctx));
298 ctx->capacityWords = 2 * 512 / (8 * sizeof(uint64_t));
302 /* This is simply the 'update' with the padding block.
303 * The padding block is 0x01 || 0x00* || 0x80. First 0x01 and last 0x80
304 * bytes are always present, but they can be the same byte.
307 sha3_Update(void *priv, void const *bufIn, size_t len)
309 sha3_context *ctx = (sha3_context *) priv;
310 unsigned int old_tail = (8 - ctx->byteIndex) & 7;
314 const uint8_t *buf = bufIn;
316 if (len < old_tail) {
318 ctx->saved |= (uint64_t) (*(buf++)) <<
319 ((ctx->byteIndex++) * 8);
326 ctx->saved |= (uint64_t) (*(buf++)) <<
327 ((ctx->byteIndex++) * 8);
329 ctx->s[ctx->wordIndex] ^= ctx->saved;
332 if (++ctx->wordIndex ==
333 (SHA3_KECCAK_SPONGE_WORDS - ctx->capacityWords)) {
339 words = len / sizeof(uint64_t);
340 tail = len - words * sizeof(uint64_t);
342 for (i = 0; i < words; i++, buf += sizeof(uint64_t)) {
343 const uint64_t t = (uint64_t) (buf[0]) |
344 ((uint64_t) (buf[1]) << 8 * 1) |
345 ((uint64_t) (buf[2]) << 8 * 2) |
346 ((uint64_t) (buf[3]) << 8 * 3) |
347 ((uint64_t) (buf[4]) << 8 * 4) |
348 ((uint64_t) (buf[5]) << 8 * 5) |
349 ((uint64_t) (buf[6]) << 8 * 6) |
350 ((uint64_t) (buf[7]) << 8 * 7);
351 ctx->s[ctx->wordIndex] ^= t;
352 if (++ctx->wordIndex ==
353 (SHA3_KECCAK_SPONGE_WORDS - ctx->capacityWords)) {
360 ctx->saved |= (uint64_t) (*(buf++)) << ((ctx->byteIndex++) * 8);
363 int partial_hash_sha3_224(uint8_t *data_in, uint8_t *data_out)
368 ctx = rte_zmalloc("sha3-ctx", sizeof(sha3_context), 0);
370 CCP_LOG_ERR("sha3-ctx creation failed");
374 sha3_Update(ctx, data_in, SHA3_224_BLOCK_SIZE);
375 for (i = 0; i < CCP_SHA3_CTX_SIZE; i++, data_out++)
376 *data_out = ctx->sb[CCP_SHA3_CTX_SIZE - i - 1];
382 int partial_hash_sha3_256(uint8_t *data_in, uint8_t *data_out)
387 ctx = rte_zmalloc("sha3-ctx", sizeof(sha3_context), 0);
389 CCP_LOG_ERR("sha3-ctx creation failed");
393 sha3_Update(ctx, data_in, SHA3_256_BLOCK_SIZE);
394 for (i = 0; i < CCP_SHA3_CTX_SIZE; i++, data_out++)
395 *data_out = ctx->sb[CCP_SHA3_CTX_SIZE - i - 1];
401 int partial_hash_sha3_384(uint8_t *data_in, uint8_t *data_out)
406 ctx = rte_zmalloc("sha3-ctx", sizeof(sha3_context), 0);
408 CCP_LOG_ERR("sha3-ctx creation failed");
412 sha3_Update(ctx, data_in, SHA3_384_BLOCK_SIZE);
413 for (i = 0; i < CCP_SHA3_CTX_SIZE; i++, data_out++)
414 *data_out = ctx->sb[CCP_SHA3_CTX_SIZE - i - 1];
420 int partial_hash_sha3_512(uint8_t *data_in, uint8_t *data_out)
425 ctx = rte_zmalloc("sha3-ctx", sizeof(sha3_context), 0);
427 CCP_LOG_ERR("sha3-ctx creation failed");
431 sha3_Update(ctx, data_in, SHA3_512_BLOCK_SIZE);
432 for (i = 0; i < CCP_SHA3_CTX_SIZE; i++, data_out++)
433 *data_out = ctx->sb[CCP_SHA3_CTX_SIZE - i - 1];
439 static int generate_partial_hash(struct ccp_session *sess)
442 uint8_t ipad[sess->auth.block_size];
443 uint8_t opad[sess->auth.block_size];
444 uint8_t *ipad_t, *opad_t;
445 uint32_t *hash_value_be32, hash_temp32[8];
446 uint64_t *hash_value_be64, hash_temp64[8];
448 uint8_t *hash_value_sha3;
450 opad_t = ipad_t = (uint8_t *)sess->auth.key;
452 hash_value_be32 = (uint32_t *)((uint8_t *)sess->auth.pre_compute);
453 hash_value_be64 = (uint64_t *)((uint8_t *)sess->auth.pre_compute);
455 /* considering key size is always equal to block size of algorithm */
456 for (i = 0; i < sess->auth.block_size; i++) {
457 ipad[i] = (ipad_t[i] ^ HMAC_IPAD_VALUE);
458 opad[i] = (opad_t[i] ^ HMAC_OPAD_VALUE);
461 switch (sess->auth.algo) {
462 case CCP_AUTH_ALGO_SHA1_HMAC:
463 count = SHA1_DIGEST_SIZE >> 2;
465 if (partial_hash_sha1(ipad, (uint8_t *)hash_temp32))
467 for (i = 0; i < count; i++, hash_value_be32++)
468 *hash_value_be32 = hash_temp32[count - 1 - i];
470 hash_value_be32 = (uint32_t *)((uint8_t *)sess->auth.pre_compute
471 + sess->auth.ctx_len);
472 if (partial_hash_sha1(opad, (uint8_t *)hash_temp32))
474 for (i = 0; i < count; i++, hash_value_be32++)
475 *hash_value_be32 = hash_temp32[count - 1 - i];
477 case CCP_AUTH_ALGO_SHA224_HMAC:
478 count = SHA256_DIGEST_SIZE >> 2;
480 if (partial_hash_sha224(ipad, (uint8_t *)hash_temp32))
482 for (i = 0; i < count; i++, hash_value_be32++)
483 *hash_value_be32 = hash_temp32[count - 1 - i];
485 hash_value_be32 = (uint32_t *)((uint8_t *)sess->auth.pre_compute
486 + sess->auth.ctx_len);
487 if (partial_hash_sha224(opad, (uint8_t *)hash_temp32))
489 for (i = 0; i < count; i++, hash_value_be32++)
490 *hash_value_be32 = hash_temp32[count - 1 - i];
492 case CCP_AUTH_ALGO_SHA3_224_HMAC:
493 hash_value_sha3 = sess->auth.pre_compute;
494 if (partial_hash_sha3_224(ipad, hash_value_sha3))
497 hash_value_sha3 = (uint8_t *)(sess->auth.pre_compute
498 + sess->auth.ctx_len);
499 if (partial_hash_sha3_224(opad, hash_value_sha3))
502 case CCP_AUTH_ALGO_SHA256_HMAC:
503 count = SHA256_DIGEST_SIZE >> 2;
505 if (partial_hash_sha256(ipad, (uint8_t *)hash_temp32))
507 for (i = 0; i < count; i++, hash_value_be32++)
508 *hash_value_be32 = hash_temp32[count - 1 - i];
510 hash_value_be32 = (uint32_t *)((uint8_t *)sess->auth.pre_compute
511 + sess->auth.ctx_len);
512 if (partial_hash_sha256(opad, (uint8_t *)hash_temp32))
514 for (i = 0; i < count; i++, hash_value_be32++)
515 *hash_value_be32 = hash_temp32[count - 1 - i];
517 case CCP_AUTH_ALGO_SHA3_256_HMAC:
518 hash_value_sha3 = sess->auth.pre_compute;
519 if (partial_hash_sha3_256(ipad, hash_value_sha3))
522 hash_value_sha3 = (uint8_t *)(sess->auth.pre_compute
523 + sess->auth.ctx_len);
524 if (partial_hash_sha3_256(opad, hash_value_sha3))
527 case CCP_AUTH_ALGO_SHA384_HMAC:
528 count = SHA512_DIGEST_SIZE >> 3;
530 if (partial_hash_sha384(ipad, (uint8_t *)hash_temp64))
532 for (i = 0; i < count; i++, hash_value_be64++)
533 *hash_value_be64 = hash_temp64[count - 1 - i];
535 hash_value_be64 = (uint64_t *)((uint8_t *)sess->auth.pre_compute
536 + sess->auth.ctx_len);
537 if (partial_hash_sha384(opad, (uint8_t *)hash_temp64))
539 for (i = 0; i < count; i++, hash_value_be64++)
540 *hash_value_be64 = hash_temp64[count - 1 - i];
542 case CCP_AUTH_ALGO_SHA3_384_HMAC:
543 hash_value_sha3 = sess->auth.pre_compute;
544 if (partial_hash_sha3_384(ipad, hash_value_sha3))
547 hash_value_sha3 = (uint8_t *)(sess->auth.pre_compute
548 + sess->auth.ctx_len);
549 if (partial_hash_sha3_384(opad, hash_value_sha3))
552 case CCP_AUTH_ALGO_SHA512_HMAC:
553 count = SHA512_DIGEST_SIZE >> 3;
555 if (partial_hash_sha512(ipad, (uint8_t *)hash_temp64))
557 for (i = 0; i < count; i++, hash_value_be64++)
558 *hash_value_be64 = hash_temp64[count - 1 - i];
560 hash_value_be64 = (uint64_t *)((uint8_t *)sess->auth.pre_compute
561 + sess->auth.ctx_len);
562 if (partial_hash_sha512(opad, (uint8_t *)hash_temp64))
564 for (i = 0; i < count; i++, hash_value_be64++)
565 *hash_value_be64 = hash_temp64[count - 1 - i];
567 case CCP_AUTH_ALGO_SHA3_512_HMAC:
568 hash_value_sha3 = sess->auth.pre_compute;
569 if (partial_hash_sha3_512(ipad, hash_value_sha3))
572 hash_value_sha3 = (uint8_t *)(sess->auth.pre_compute
573 + sess->auth.ctx_len);
574 if (partial_hash_sha3_512(opad, hash_value_sha3))
578 CCP_LOG_ERR("Invalid auth algo");
583 /* prepare temporary keys K1 and K2 */
584 static void prepare_key(unsigned char *k, unsigned char *l, int bl)
587 /* Shift block to left, including carry */
588 for (i = 0; i < bl; i++) {
590 if (i < bl - 1 && l[i + 1] & 0x80)
593 /* If MSB set fixup with R */
595 k[bl - 1] ^= bl == 16 ? 0x87 : 0x1b;
598 /* subkeys K1 and K2 generation for CMAC */
600 generate_cmac_subkeys(struct ccp_session *sess)
602 const EVP_CIPHER *algo;
604 unsigned char *ccp_ctx;
607 unsigned char zero_iv[AES_BLOCK_SIZE] = {0};
608 unsigned char dst[2 * AES_BLOCK_SIZE] = {0};
609 unsigned char k1[AES_BLOCK_SIZE] = {0};
610 unsigned char k2[AES_BLOCK_SIZE] = {0};
612 if (sess->auth.ut.aes_type == CCP_AES_TYPE_128)
613 algo = EVP_aes_128_cbc();
614 else if (sess->auth.ut.aes_type == CCP_AES_TYPE_192)
615 algo = EVP_aes_192_cbc();
616 else if (sess->auth.ut.aes_type == CCP_AES_TYPE_256)
617 algo = EVP_aes_256_cbc();
619 CCP_LOG_ERR("Invalid CMAC type length");
623 ctx = EVP_CIPHER_CTX_new();
625 CCP_LOG_ERR("ctx creation failed");
628 if (EVP_EncryptInit(ctx, algo, (unsigned char *)sess->auth.key,
629 (unsigned char *)zero_iv) <= 0)
630 goto key_generate_err;
631 if (EVP_CIPHER_CTX_set_padding(ctx, 0) <= 0)
632 goto key_generate_err;
633 if (EVP_EncryptUpdate(ctx, dst, &dstlen, zero_iv,
634 AES_BLOCK_SIZE) <= 0)
635 goto key_generate_err;
636 if (EVP_EncryptFinal_ex(ctx, dst + dstlen, &totlen) <= 0)
637 goto key_generate_err;
639 memset(sess->auth.pre_compute, 0, CCP_SB_BYTES * 2);
641 ccp_ctx = (unsigned char *)(sess->auth.pre_compute + CCP_SB_BYTES - 1);
642 prepare_key(k1, dst, AES_BLOCK_SIZE);
643 for (i = 0; i < AES_BLOCK_SIZE; i++, ccp_ctx--)
646 ccp_ctx = (unsigned char *)(sess->auth.pre_compute +
647 (2 * CCP_SB_BYTES) - 1);
648 prepare_key(k2, k1, AES_BLOCK_SIZE);
649 for (i = 0; i < AES_BLOCK_SIZE; i++, ccp_ctx--)
652 EVP_CIPHER_CTX_free(ctx);
657 CCP_LOG_ERR("CMAC Init failed");
661 /* configure session */
663 ccp_configure_session_cipher(struct ccp_session *sess,
664 const struct rte_crypto_sym_xform *xform)
666 const struct rte_crypto_cipher_xform *cipher_xform = NULL;
669 cipher_xform = &xform->cipher;
671 /* set cipher direction */
672 if (cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
673 sess->cipher.dir = CCP_CIPHER_DIR_ENCRYPT;
675 sess->cipher.dir = CCP_CIPHER_DIR_DECRYPT;
678 sess->cipher.key_length = cipher_xform->key.length;
679 rte_memcpy(sess->cipher.key, cipher_xform->key.data,
680 cipher_xform->key.length);
682 /* set iv parameters */
683 sess->iv.offset = cipher_xform->iv.offset;
684 sess->iv.length = cipher_xform->iv.length;
686 switch (cipher_xform->algo) {
687 case RTE_CRYPTO_CIPHER_AES_CTR:
688 sess->cipher.algo = CCP_CIPHER_ALGO_AES_CTR;
689 sess->cipher.um.aes_mode = CCP_AES_MODE_CTR;
690 sess->cipher.engine = CCP_ENGINE_AES;
692 case RTE_CRYPTO_CIPHER_AES_ECB:
693 sess->cipher.algo = CCP_CIPHER_ALGO_AES_CBC;
694 sess->cipher.um.aes_mode = CCP_AES_MODE_ECB;
695 sess->cipher.engine = CCP_ENGINE_AES;
697 case RTE_CRYPTO_CIPHER_AES_CBC:
698 sess->cipher.algo = CCP_CIPHER_ALGO_AES_CBC;
699 sess->cipher.um.aes_mode = CCP_AES_MODE_CBC;
700 sess->cipher.engine = CCP_ENGINE_AES;
702 case RTE_CRYPTO_CIPHER_3DES_CBC:
703 sess->cipher.algo = CCP_CIPHER_ALGO_3DES_CBC;
704 sess->cipher.um.des_mode = CCP_DES_MODE_CBC;
705 sess->cipher.engine = CCP_ENGINE_3DES;
708 CCP_LOG_ERR("Unsupported cipher algo");
713 switch (sess->cipher.engine) {
715 if (sess->cipher.key_length == 16)
716 sess->cipher.ut.aes_type = CCP_AES_TYPE_128;
717 else if (sess->cipher.key_length == 24)
718 sess->cipher.ut.aes_type = CCP_AES_TYPE_192;
719 else if (sess->cipher.key_length == 32)
720 sess->cipher.ut.aes_type = CCP_AES_TYPE_256;
722 CCP_LOG_ERR("Invalid cipher key length");
725 for (i = 0; i < sess->cipher.key_length ; i++)
726 sess->cipher.key_ccp[sess->cipher.key_length - i - 1] =
729 case CCP_ENGINE_3DES:
730 if (sess->cipher.key_length == 16)
731 sess->cipher.ut.des_type = CCP_DES_TYPE_128;
732 else if (sess->cipher.key_length == 24)
733 sess->cipher.ut.des_type = CCP_DES_TYPE_192;
735 CCP_LOG_ERR("Invalid cipher key length");
738 for (j = 0, x = 0; j < sess->cipher.key_length/8; j++, x += 8)
739 for (i = 0; i < 8; i++)
740 sess->cipher.key_ccp[(8 + x) - i - 1] =
741 sess->cipher.key[i + x];
744 CCP_LOG_ERR("Invalid CCP Engine");
747 sess->cipher.nonce_phys = rte_mem_virt2phy(sess->cipher.nonce);
748 sess->cipher.key_phys = rte_mem_virt2phy(sess->cipher.key_ccp);
753 ccp_configure_session_auth(struct ccp_session *sess,
754 const struct rte_crypto_sym_xform *xform)
756 const struct rte_crypto_auth_xform *auth_xform = NULL;
759 auth_xform = &xform->auth;
761 sess->auth.digest_length = auth_xform->digest_length;
762 if (auth_xform->op == RTE_CRYPTO_AUTH_OP_GENERATE)
763 sess->auth.op = CCP_AUTH_OP_GENERATE;
765 sess->auth.op = CCP_AUTH_OP_VERIFY;
766 switch (auth_xform->algo) {
767 case RTE_CRYPTO_AUTH_MD5_HMAC:
768 if (sess->auth_opt) {
769 sess->auth.algo = CCP_AUTH_ALGO_MD5_HMAC;
770 sess->auth.offset = ((CCP_SB_BYTES << 1) -
772 sess->auth.key_length = auth_xform->key.length;
773 sess->auth.block_size = MD5_BLOCK_SIZE;
774 memset(sess->auth.key, 0, sess->auth.block_size);
775 rte_memcpy(sess->auth.key, auth_xform->key.data,
776 auth_xform->key.length);
778 return -1; /* HMAC MD5 not supported on CCP */
780 case RTE_CRYPTO_AUTH_SHA1:
781 sess->auth.engine = CCP_ENGINE_SHA;
782 sess->auth.algo = CCP_AUTH_ALGO_SHA1;
783 sess->auth.ut.sha_type = CCP_SHA_TYPE_1;
784 sess->auth.ctx = (void *)ccp_sha1_init;
785 sess->auth.ctx_len = CCP_SB_BYTES;
786 sess->auth.offset = CCP_SB_BYTES - SHA1_DIGEST_SIZE;
788 case RTE_CRYPTO_AUTH_SHA1_HMAC:
789 if (sess->auth_opt) {
790 if (auth_xform->key.length > SHA1_BLOCK_SIZE)
792 sess->auth.algo = CCP_AUTH_ALGO_SHA1_HMAC;
793 sess->auth.offset = CCP_SB_BYTES - SHA1_DIGEST_SIZE;
794 sess->auth.block_size = SHA1_BLOCK_SIZE;
795 sess->auth.key_length = auth_xform->key.length;
796 memset(sess->auth.key, 0, sess->auth.block_size);
797 rte_memcpy(sess->auth.key, auth_xform->key.data,
798 auth_xform->key.length);
800 if (auth_xform->key.length > SHA1_BLOCK_SIZE)
802 sess->auth.engine = CCP_ENGINE_SHA;
803 sess->auth.algo = CCP_AUTH_ALGO_SHA1_HMAC;
804 sess->auth.ut.sha_type = CCP_SHA_TYPE_1;
805 sess->auth.ctx_len = CCP_SB_BYTES;
806 sess->auth.offset = CCP_SB_BYTES - SHA1_DIGEST_SIZE;
807 sess->auth.block_size = SHA1_BLOCK_SIZE;
808 sess->auth.key_length = auth_xform->key.length;
809 memset(sess->auth.key, 0, sess->auth.block_size);
810 memset(sess->auth.pre_compute, 0,
811 sess->auth.ctx_len << 1);
812 rte_memcpy(sess->auth.key, auth_xform->key.data,
813 auth_xform->key.length);
814 if (generate_partial_hash(sess))
818 case RTE_CRYPTO_AUTH_SHA224:
819 sess->auth.algo = CCP_AUTH_ALGO_SHA224;
820 sess->auth.engine = CCP_ENGINE_SHA;
821 sess->auth.ut.sha_type = CCP_SHA_TYPE_224;
822 sess->auth.ctx = (void *)ccp_sha224_init;
823 sess->auth.ctx_len = CCP_SB_BYTES;
824 sess->auth.offset = CCP_SB_BYTES - SHA224_DIGEST_SIZE;
826 case RTE_CRYPTO_AUTH_SHA224_HMAC:
827 if (sess->auth_opt) {
828 if (auth_xform->key.length > SHA224_BLOCK_SIZE)
830 sess->auth.algo = CCP_AUTH_ALGO_SHA224_HMAC;
831 sess->auth.offset = CCP_SB_BYTES - SHA224_DIGEST_SIZE;
832 sess->auth.block_size = SHA224_BLOCK_SIZE;
833 sess->auth.key_length = auth_xform->key.length;
834 memset(sess->auth.key, 0, sess->auth.block_size);
835 rte_memcpy(sess->auth.key, auth_xform->key.data,
836 auth_xform->key.length);
838 if (auth_xform->key.length > SHA224_BLOCK_SIZE)
840 sess->auth.algo = CCP_AUTH_ALGO_SHA224_HMAC;
841 sess->auth.engine = CCP_ENGINE_SHA;
842 sess->auth.ut.sha_type = CCP_SHA_TYPE_224;
843 sess->auth.ctx_len = CCP_SB_BYTES;
844 sess->auth.offset = CCP_SB_BYTES - SHA224_DIGEST_SIZE;
845 sess->auth.block_size = SHA224_BLOCK_SIZE;
846 sess->auth.key_length = auth_xform->key.length;
847 memset(sess->auth.key, 0, sess->auth.block_size);
848 memset(sess->auth.pre_compute, 0,
849 sess->auth.ctx_len << 1);
850 rte_memcpy(sess->auth.key, auth_xform->key.data,
851 auth_xform->key.length);
852 if (generate_partial_hash(sess))
856 case RTE_CRYPTO_AUTH_SHA3_224:
857 sess->auth.algo = CCP_AUTH_ALGO_SHA3_224;
858 sess->auth.engine = CCP_ENGINE_SHA;
859 sess->auth.ut.sha_type = CCP_SHA3_TYPE_224;
860 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
861 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA224_DIGEST_SIZE;
863 case RTE_CRYPTO_AUTH_SHA3_224_HMAC:
864 if (auth_xform->key.length > SHA3_224_BLOCK_SIZE)
866 sess->auth.algo = CCP_AUTH_ALGO_SHA3_224_HMAC;
867 sess->auth.engine = CCP_ENGINE_SHA;
868 sess->auth.ut.sha_type = CCP_SHA3_TYPE_224;
869 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
870 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA224_DIGEST_SIZE;
871 sess->auth.block_size = SHA3_224_BLOCK_SIZE;
872 sess->auth.key_length = auth_xform->key.length;
873 memset(sess->auth.key, 0, sess->auth.block_size);
874 memset(sess->auth.pre_compute, 0, 2 * sess->auth.ctx_len);
875 rte_memcpy(sess->auth.key, auth_xform->key.data,
876 auth_xform->key.length);
877 if (generate_partial_hash(sess))
880 case RTE_CRYPTO_AUTH_SHA256:
881 sess->auth.algo = CCP_AUTH_ALGO_SHA256;
882 sess->auth.engine = CCP_ENGINE_SHA;
883 sess->auth.ut.sha_type = CCP_SHA_TYPE_256;
884 sess->auth.ctx = (void *)ccp_sha256_init;
885 sess->auth.ctx_len = CCP_SB_BYTES;
886 sess->auth.offset = CCP_SB_BYTES - SHA256_DIGEST_SIZE;
888 case RTE_CRYPTO_AUTH_SHA256_HMAC:
889 if (sess->auth_opt) {
890 if (auth_xform->key.length > SHA256_BLOCK_SIZE)
892 sess->auth.algo = CCP_AUTH_ALGO_SHA256_HMAC;
893 sess->auth.offset = CCP_SB_BYTES - SHA256_DIGEST_SIZE;
894 sess->auth.block_size = SHA256_BLOCK_SIZE;
895 sess->auth.key_length = auth_xform->key.length;
896 memset(sess->auth.key, 0, sess->auth.block_size);
897 rte_memcpy(sess->auth.key, auth_xform->key.data,
898 auth_xform->key.length);
900 if (auth_xform->key.length > SHA256_BLOCK_SIZE)
902 sess->auth.algo = CCP_AUTH_ALGO_SHA256_HMAC;
903 sess->auth.engine = CCP_ENGINE_SHA;
904 sess->auth.ut.sha_type = CCP_SHA_TYPE_256;
905 sess->auth.ctx_len = CCP_SB_BYTES;
906 sess->auth.offset = CCP_SB_BYTES - SHA256_DIGEST_SIZE;
907 sess->auth.block_size = SHA256_BLOCK_SIZE;
908 sess->auth.key_length = auth_xform->key.length;
909 memset(sess->auth.key, 0, sess->auth.block_size);
910 memset(sess->auth.pre_compute, 0,
911 sess->auth.ctx_len << 1);
912 rte_memcpy(sess->auth.key, auth_xform->key.data,
913 auth_xform->key.length);
914 if (generate_partial_hash(sess))
918 case RTE_CRYPTO_AUTH_SHA3_256:
919 sess->auth.algo = CCP_AUTH_ALGO_SHA3_256;
920 sess->auth.engine = CCP_ENGINE_SHA;
921 sess->auth.ut.sha_type = CCP_SHA3_TYPE_256;
922 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
923 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA256_DIGEST_SIZE;
925 case RTE_CRYPTO_AUTH_SHA3_256_HMAC:
926 if (auth_xform->key.length > SHA3_256_BLOCK_SIZE)
928 sess->auth.algo = CCP_AUTH_ALGO_SHA3_256_HMAC;
929 sess->auth.engine = CCP_ENGINE_SHA;
930 sess->auth.ut.sha_type = CCP_SHA3_TYPE_256;
931 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
932 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA256_DIGEST_SIZE;
933 sess->auth.block_size = SHA3_256_BLOCK_SIZE;
934 sess->auth.key_length = auth_xform->key.length;
935 memset(sess->auth.key, 0, sess->auth.block_size);
936 memset(sess->auth.pre_compute, 0, 2 * sess->auth.ctx_len);
937 rte_memcpy(sess->auth.key, auth_xform->key.data,
938 auth_xform->key.length);
939 if (generate_partial_hash(sess))
942 case RTE_CRYPTO_AUTH_SHA384:
943 sess->auth.algo = CCP_AUTH_ALGO_SHA384;
944 sess->auth.engine = CCP_ENGINE_SHA;
945 sess->auth.ut.sha_type = CCP_SHA_TYPE_384;
946 sess->auth.ctx = (void *)ccp_sha384_init;
947 sess->auth.ctx_len = CCP_SB_BYTES << 1;
948 sess->auth.offset = (CCP_SB_BYTES << 1) - SHA384_DIGEST_SIZE;
950 case RTE_CRYPTO_AUTH_SHA384_HMAC:
951 if (sess->auth_opt) {
952 if (auth_xform->key.length > SHA384_BLOCK_SIZE)
954 sess->auth.algo = CCP_AUTH_ALGO_SHA384_HMAC;
955 sess->auth.offset = ((CCP_SB_BYTES << 1) -
957 sess->auth.block_size = SHA384_BLOCK_SIZE;
958 sess->auth.key_length = auth_xform->key.length;
959 memset(sess->auth.key, 0, sess->auth.block_size);
960 rte_memcpy(sess->auth.key, auth_xform->key.data,
961 auth_xform->key.length);
963 if (auth_xform->key.length > SHA384_BLOCK_SIZE)
965 sess->auth.algo = CCP_AUTH_ALGO_SHA384_HMAC;
966 sess->auth.engine = CCP_ENGINE_SHA;
967 sess->auth.ut.sha_type = CCP_SHA_TYPE_384;
968 sess->auth.ctx_len = CCP_SB_BYTES << 1;
969 sess->auth.offset = ((CCP_SB_BYTES << 1) -
971 sess->auth.block_size = SHA384_BLOCK_SIZE;
972 sess->auth.key_length = auth_xform->key.length;
973 memset(sess->auth.key, 0, sess->auth.block_size);
974 memset(sess->auth.pre_compute, 0,
975 sess->auth.ctx_len << 1);
976 rte_memcpy(sess->auth.key, auth_xform->key.data,
977 auth_xform->key.length);
978 if (generate_partial_hash(sess))
982 case RTE_CRYPTO_AUTH_SHA3_384:
983 sess->auth.algo = CCP_AUTH_ALGO_SHA3_384;
984 sess->auth.engine = CCP_ENGINE_SHA;
985 sess->auth.ut.sha_type = CCP_SHA3_TYPE_384;
986 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
987 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA384_DIGEST_SIZE;
989 case RTE_CRYPTO_AUTH_SHA3_384_HMAC:
990 if (auth_xform->key.length > SHA3_384_BLOCK_SIZE)
992 sess->auth.algo = CCP_AUTH_ALGO_SHA3_384_HMAC;
993 sess->auth.engine = CCP_ENGINE_SHA;
994 sess->auth.ut.sha_type = CCP_SHA3_TYPE_384;
995 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
996 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA384_DIGEST_SIZE;
997 sess->auth.block_size = SHA3_384_BLOCK_SIZE;
998 sess->auth.key_length = auth_xform->key.length;
999 memset(sess->auth.key, 0, sess->auth.block_size);
1000 memset(sess->auth.pre_compute, 0, 2 * sess->auth.ctx_len);
1001 rte_memcpy(sess->auth.key, auth_xform->key.data,
1002 auth_xform->key.length);
1003 if (generate_partial_hash(sess))
1006 case RTE_CRYPTO_AUTH_SHA512:
1007 sess->auth.algo = CCP_AUTH_ALGO_SHA512;
1008 sess->auth.engine = CCP_ENGINE_SHA;
1009 sess->auth.ut.sha_type = CCP_SHA_TYPE_512;
1010 sess->auth.ctx = (void *)ccp_sha512_init;
1011 sess->auth.ctx_len = CCP_SB_BYTES << 1;
1012 sess->auth.offset = (CCP_SB_BYTES << 1) - SHA512_DIGEST_SIZE;
1014 case RTE_CRYPTO_AUTH_SHA512_HMAC:
1015 if (sess->auth_opt) {
1016 if (auth_xform->key.length > SHA512_BLOCK_SIZE)
1018 sess->auth.algo = CCP_AUTH_ALGO_SHA512_HMAC;
1019 sess->auth.offset = ((CCP_SB_BYTES << 1) -
1020 SHA512_DIGEST_SIZE);
1021 sess->auth.block_size = SHA512_BLOCK_SIZE;
1022 sess->auth.key_length = auth_xform->key.length;
1023 memset(sess->auth.key, 0, sess->auth.block_size);
1024 rte_memcpy(sess->auth.key, auth_xform->key.data,
1025 auth_xform->key.length);
1027 if (auth_xform->key.length > SHA512_BLOCK_SIZE)
1029 sess->auth.algo = CCP_AUTH_ALGO_SHA512_HMAC;
1030 sess->auth.engine = CCP_ENGINE_SHA;
1031 sess->auth.ut.sha_type = CCP_SHA_TYPE_512;
1032 sess->auth.ctx_len = CCP_SB_BYTES << 1;
1033 sess->auth.offset = ((CCP_SB_BYTES << 1) -
1034 SHA512_DIGEST_SIZE);
1035 sess->auth.block_size = SHA512_BLOCK_SIZE;
1036 sess->auth.key_length = auth_xform->key.length;
1037 memset(sess->auth.key, 0, sess->auth.block_size);
1038 memset(sess->auth.pre_compute, 0,
1039 sess->auth.ctx_len << 1);
1040 rte_memcpy(sess->auth.key, auth_xform->key.data,
1041 auth_xform->key.length);
1042 if (generate_partial_hash(sess))
1046 case RTE_CRYPTO_AUTH_SHA3_512:
1047 sess->auth.algo = CCP_AUTH_ALGO_SHA3_512;
1048 sess->auth.engine = CCP_ENGINE_SHA;
1049 sess->auth.ut.sha_type = CCP_SHA3_TYPE_512;
1050 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
1051 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA512_DIGEST_SIZE;
1053 case RTE_CRYPTO_AUTH_SHA3_512_HMAC:
1054 if (auth_xform->key.length > SHA3_512_BLOCK_SIZE)
1056 sess->auth.algo = CCP_AUTH_ALGO_SHA3_512_HMAC;
1057 sess->auth.engine = CCP_ENGINE_SHA;
1058 sess->auth.ut.sha_type = CCP_SHA3_TYPE_512;
1059 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
1060 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA512_DIGEST_SIZE;
1061 sess->auth.block_size = SHA3_512_BLOCK_SIZE;
1062 sess->auth.key_length = auth_xform->key.length;
1063 memset(sess->auth.key, 0, sess->auth.block_size);
1064 memset(sess->auth.pre_compute, 0, 2 * sess->auth.ctx_len);
1065 rte_memcpy(sess->auth.key, auth_xform->key.data,
1066 auth_xform->key.length);
1067 if (generate_partial_hash(sess))
1070 case RTE_CRYPTO_AUTH_AES_CMAC:
1071 sess->auth.algo = CCP_AUTH_ALGO_AES_CMAC;
1072 sess->auth.engine = CCP_ENGINE_AES;
1073 sess->auth.um.aes_mode = CCP_AES_MODE_CMAC;
1074 sess->auth.key_length = auth_xform->key.length;
1075 /* padding and hash result */
1076 sess->auth.ctx_len = CCP_SB_BYTES << 1;
1077 sess->auth.offset = AES_BLOCK_SIZE;
1078 sess->auth.block_size = AES_BLOCK_SIZE;
1079 if (sess->auth.key_length == 16)
1080 sess->auth.ut.aes_type = CCP_AES_TYPE_128;
1081 else if (sess->auth.key_length == 24)
1082 sess->auth.ut.aes_type = CCP_AES_TYPE_192;
1083 else if (sess->auth.key_length == 32)
1084 sess->auth.ut.aes_type = CCP_AES_TYPE_256;
1086 CCP_LOG_ERR("Invalid CMAC key length");
1089 rte_memcpy(sess->auth.key, auth_xform->key.data,
1090 sess->auth.key_length);
1091 for (i = 0; i < sess->auth.key_length; i++)
1092 sess->auth.key_ccp[sess->auth.key_length - i - 1] =
1094 if (generate_cmac_subkeys(sess))
1098 CCP_LOG_ERR("Unsupported hash algo");
1105 ccp_configure_session_aead(struct ccp_session *sess,
1106 const struct rte_crypto_sym_xform *xform)
1108 const struct rte_crypto_aead_xform *aead_xform = NULL;
1111 aead_xform = &xform->aead;
1113 sess->cipher.key_length = aead_xform->key.length;
1114 rte_memcpy(sess->cipher.key, aead_xform->key.data,
1115 aead_xform->key.length);
1117 if (aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) {
1118 sess->cipher.dir = CCP_CIPHER_DIR_ENCRYPT;
1119 sess->auth.op = CCP_AUTH_OP_GENERATE;
1121 sess->cipher.dir = CCP_CIPHER_DIR_DECRYPT;
1122 sess->auth.op = CCP_AUTH_OP_VERIFY;
1124 sess->aead_algo = aead_xform->algo;
1125 sess->auth.aad_length = aead_xform->aad_length;
1126 sess->auth.digest_length = aead_xform->digest_length;
1128 /* set iv parameters */
1129 sess->iv.offset = aead_xform->iv.offset;
1130 sess->iv.length = aead_xform->iv.length;
1132 switch (aead_xform->algo) {
1133 case RTE_CRYPTO_AEAD_AES_GCM:
1134 sess->cipher.algo = CCP_CIPHER_ALGO_AES_GCM;
1135 sess->cipher.um.aes_mode = CCP_AES_MODE_GCTR;
1136 sess->cipher.engine = CCP_ENGINE_AES;
1137 if (sess->cipher.key_length == 16)
1138 sess->cipher.ut.aes_type = CCP_AES_TYPE_128;
1139 else if (sess->cipher.key_length == 24)
1140 sess->cipher.ut.aes_type = CCP_AES_TYPE_192;
1141 else if (sess->cipher.key_length == 32)
1142 sess->cipher.ut.aes_type = CCP_AES_TYPE_256;
1144 CCP_LOG_ERR("Invalid aead key length");
1147 for (i = 0; i < sess->cipher.key_length; i++)
1148 sess->cipher.key_ccp[sess->cipher.key_length - i - 1] =
1149 sess->cipher.key[i];
1150 sess->auth.algo = CCP_AUTH_ALGO_AES_GCM;
1151 sess->auth.engine = CCP_ENGINE_AES;
1152 sess->auth.um.aes_mode = CCP_AES_MODE_GHASH;
1153 sess->auth.ctx_len = CCP_SB_BYTES;
1154 sess->auth.offset = 0;
1155 sess->auth.block_size = AES_BLOCK_SIZE;
1156 sess->cmd_id = CCP_CMD_COMBINED;
1159 CCP_LOG_ERR("Unsupported aead algo");
1162 sess->cipher.nonce_phys = rte_mem_virt2phy(sess->cipher.nonce);
1163 sess->cipher.key_phys = rte_mem_virt2phy(sess->cipher.key_ccp);
1168 ccp_set_session_parameters(struct ccp_session *sess,
1169 const struct rte_crypto_sym_xform *xform,
1170 struct ccp_private *internals)
1172 const struct rte_crypto_sym_xform *cipher_xform = NULL;
1173 const struct rte_crypto_sym_xform *auth_xform = NULL;
1174 const struct rte_crypto_sym_xform *aead_xform = NULL;
1177 sess->auth_opt = internals->auth_opt;
1178 sess->cmd_id = ccp_get_cmd_id(xform);
1180 switch (sess->cmd_id) {
1181 case CCP_CMD_CIPHER:
1182 cipher_xform = xform;
1187 case CCP_CMD_CIPHER_HASH:
1188 cipher_xform = xform;
1189 auth_xform = xform->next;
1191 case CCP_CMD_HASH_CIPHER:
1193 cipher_xform = xform->next;
1195 case CCP_CMD_COMBINED:
1199 CCP_LOG_ERR("Unsupported cmd_id");
1203 /* Default IV length = 0 */
1204 sess->iv.length = 0;
1206 ret = ccp_configure_session_cipher(sess, cipher_xform);
1208 CCP_LOG_ERR("Invalid/unsupported cipher parameters");
1213 ret = ccp_configure_session_auth(sess, auth_xform);
1215 CCP_LOG_ERR("Invalid/unsupported auth parameters");
1220 ret = ccp_configure_session_aead(sess, aead_xform);
1222 CCP_LOG_ERR("Invalid/unsupported aead parameters");
1229 /* calculate CCP descriptors requirement */
1231 ccp_cipher_slot(struct ccp_session *session)
1235 switch (session->cipher.algo) {
1236 case CCP_CIPHER_ALGO_AES_CBC:
1238 /**< op + passthrough for iv */
1240 case CCP_CIPHER_ALGO_AES_ECB:
1244 case CCP_CIPHER_ALGO_AES_CTR:
1246 /**< op + passthrough for iv */
1248 case CCP_CIPHER_ALGO_3DES_CBC:
1250 /**< op + passthrough for iv */
1253 CCP_LOG_ERR("Unsupported cipher algo %d",
1254 session->cipher.algo);
1260 ccp_auth_slot(struct ccp_session *session)
1264 switch (session->auth.algo) {
1265 case CCP_AUTH_ALGO_SHA1:
1266 case CCP_AUTH_ALGO_SHA224:
1267 case CCP_AUTH_ALGO_SHA256:
1268 case CCP_AUTH_ALGO_SHA384:
1269 case CCP_AUTH_ALGO_SHA512:
1271 /**< op + lsb passthrough cpy to/from*/
1273 case CCP_AUTH_ALGO_MD5_HMAC:
1275 case CCP_AUTH_ALGO_SHA1_HMAC:
1276 case CCP_AUTH_ALGO_SHA224_HMAC:
1277 case CCP_AUTH_ALGO_SHA256_HMAC:
1278 if (session->auth_opt == 0)
1281 case CCP_AUTH_ALGO_SHA384_HMAC:
1282 case CCP_AUTH_ALGO_SHA512_HMAC:
1284 * 1. Load PHash1 = H(k ^ ipad); to LSB
1285 * 2. generate IHash = H(hash on meassage with PHash1
1287 * 3. Retrieve IHash 2 slots for 384/512
1288 * 4. Load Phash2 = H(k ^ opad); to LSB
1289 * 5. generate FHash = H(hash on Ihash with Phash2
1291 * 6. Retrieve HMAC output from LSB to host memory
1293 if (session->auth_opt == 0)
1296 case CCP_AUTH_ALGO_SHA3_224:
1297 case CCP_AUTH_ALGO_SHA3_256:
1298 case CCP_AUTH_ALGO_SHA3_384:
1299 case CCP_AUTH_ALGO_SHA3_512:
1301 /**< only op ctx and dst in host memory*/
1303 case CCP_AUTH_ALGO_SHA3_224_HMAC:
1304 case CCP_AUTH_ALGO_SHA3_256_HMAC:
1307 case CCP_AUTH_ALGO_SHA3_384_HMAC:
1308 case CCP_AUTH_ALGO_SHA3_512_HMAC:
1311 * 1. Op to Perform Ihash
1312 * 2. Retrieve result from LSB to host memory
1313 * 3. Perform final hash
1316 case CCP_AUTH_ALGO_AES_CMAC:
1320 * extra descriptor in padding case
1321 * (k1/k2(255:128) with iv(127:0))
1326 CCP_LOG_ERR("Unsupported auth algo %d",
1327 session->auth.algo);
1334 ccp_aead_slot(struct ccp_session *session)
1338 switch (session->aead_algo) {
1339 case RTE_CRYPTO_AEAD_AES_GCM:
1342 CCP_LOG_ERR("Unsupported aead algo %d",
1343 session->aead_algo);
1345 switch (session->auth.algo) {
1346 case CCP_AUTH_ALGO_AES_GCM:
1352 * 4. Reload passthru
1357 CCP_LOG_ERR("Unsupported combined auth ALGO %d",
1358 session->auth.algo);
1364 ccp_compute_slot_count(struct ccp_session *session)
1368 switch (session->cmd_id) {
1369 case CCP_CMD_CIPHER:
1370 count = ccp_cipher_slot(session);
1373 count = ccp_auth_slot(session);
1375 case CCP_CMD_CIPHER_HASH:
1376 case CCP_CMD_HASH_CIPHER:
1377 count = ccp_cipher_slot(session);
1378 count += ccp_auth_slot(session);
1380 case CCP_CMD_COMBINED:
1381 count = ccp_aead_slot(session);
1384 CCP_LOG_ERR("Unsupported cmd_id");
1392 algo_select(int sessalgo,
1393 const EVP_MD **algo)
1398 case CCP_AUTH_ALGO_MD5_HMAC:
1401 case CCP_AUTH_ALGO_SHA1_HMAC:
1404 case CCP_AUTH_ALGO_SHA224_HMAC:
1405 *algo = EVP_sha224();
1407 case CCP_AUTH_ALGO_SHA256_HMAC:
1408 *algo = EVP_sha256();
1410 case CCP_AUTH_ALGO_SHA384_HMAC:
1411 *algo = EVP_sha384();
1413 case CCP_AUTH_ALGO_SHA512_HMAC:
1414 *algo = EVP_sha512();
1424 process_cpu_auth_hmac(uint8_t *src, uint8_t *dst,
1425 __rte_unused uint8_t *iv,
1433 unsigned char temp_dst[64];
1435 if (EVP_DigestSignInit(ctx, NULL, algo, NULL, pkey) <= 0)
1436 goto process_auth_err;
1438 if (EVP_DigestSignUpdate(ctx, (char *)src, srclen) <= 0)
1439 goto process_auth_err;
1441 if (EVP_DigestSignFinal(ctx, temp_dst, &dstlen) <= 0)
1442 goto process_auth_err;
1444 memcpy(dst, temp_dst, d_len);
1447 CCP_LOG_ERR("Process cpu auth failed");
1451 static int cpu_crypto_auth(struct ccp_qp *qp,
1452 struct rte_crypto_op *op,
1453 struct ccp_session *sess,
1458 struct rte_mbuf *mbuf_src, *mbuf_dst;
1459 const EVP_MD *algo = NULL;
1462 algo_select(sess->auth.algo, &algo);
1463 pkey = EVP_PKEY_new_mac_key(EVP_PKEY_HMAC, NULL, sess->auth.key,
1464 sess->auth.key_length);
1465 mbuf_src = op->sym->m_src;
1466 mbuf_dst = op->sym->m_dst ? op->sym->m_dst : op->sym->m_src;
1467 srclen = op->sym->auth.data.length;
1468 src = rte_pktmbuf_mtod_offset(mbuf_src, uint8_t *,
1469 op->sym->auth.data.offset);
1471 if (sess->auth.op == CCP_AUTH_OP_VERIFY) {
1472 dst = qp->temp_digest;
1474 dst = op->sym->auth.digest.data;
1476 dst = rte_pktmbuf_mtod_offset(mbuf_dst, uint8_t *,
1477 op->sym->auth.data.offset +
1478 sess->auth.digest_length);
1481 status = process_cpu_auth_hmac(src, dst, NULL,
1485 sess->auth.digest_length);
1487 op->status = RTE_CRYPTO_OP_STATUS_ERROR;
1491 if (sess->auth.op == CCP_AUTH_OP_VERIFY) {
1492 if (memcmp(dst, op->sym->auth.digest.data,
1493 sess->auth.digest_length) != 0) {
1494 op->status = RTE_CRYPTO_OP_STATUS_AUTH_FAILED;
1496 op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
1499 op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
1501 EVP_PKEY_free(pkey);
1506 ccp_perform_passthru(struct ccp_passthru *pst,
1507 struct ccp_queue *cmd_q)
1509 struct ccp_desc *desc;
1510 union ccp_function function;
1512 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1514 CCP_CMD_ENGINE(desc) = CCP_ENGINE_PASSTHRU;
1516 CCP_CMD_SOC(desc) = 0;
1517 CCP_CMD_IOC(desc) = 0;
1518 CCP_CMD_INIT(desc) = 0;
1519 CCP_CMD_EOM(desc) = 0;
1520 CCP_CMD_PROT(desc) = 0;
1523 CCP_PT_BYTESWAP(&function) = pst->byte_swap;
1524 CCP_PT_BITWISE(&function) = pst->bit_mod;
1525 CCP_CMD_FUNCTION(desc) = function.raw;
1527 CCP_CMD_LEN(desc) = pst->len;
1530 CCP_CMD_SRC_LO(desc) = (uint32_t)(pst->src_addr);
1531 CCP_CMD_SRC_HI(desc) = high32_value(pst->src_addr);
1532 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1534 CCP_CMD_DST_LO(desc) = (uint32_t)(pst->dest_addr);
1535 CCP_CMD_DST_HI(desc) = 0;
1536 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SB;
1538 if (pst->bit_mod != CCP_PASSTHRU_BITWISE_NOOP)
1539 CCP_CMD_LSB_ID(desc) = cmd_q->sb_key;
1542 CCP_CMD_SRC_LO(desc) = (uint32_t)(pst->src_addr);
1543 CCP_CMD_SRC_HI(desc) = 0;
1544 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SB;
1546 CCP_CMD_DST_LO(desc) = (uint32_t)(pst->dest_addr);
1547 CCP_CMD_DST_HI(desc) = high32_value(pst->dest_addr);
1548 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1551 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1555 ccp_perform_hmac(struct rte_crypto_op *op,
1556 struct ccp_queue *cmd_q)
1559 struct ccp_session *session;
1560 union ccp_function function;
1561 struct ccp_desc *desc;
1563 phys_addr_t src_addr, dest_addr, dest_addr_t;
1564 struct ccp_passthru pst;
1565 uint64_t auth_msg_bits;
1569 session = (struct ccp_session *)get_sym_session_private_data(
1571 ccp_cryptodev_driver_id);
1572 addr = session->auth.pre_compute;
1574 src_addr = rte_pktmbuf_mtophys_offset(op->sym->m_src,
1575 op->sym->auth.data.offset);
1576 append_ptr = (void *)rte_pktmbuf_append(op->sym->m_src,
1577 session->auth.ctx_len);
1578 dest_addr = (phys_addr_t)rte_mem_virt2phy(append_ptr);
1579 dest_addr_t = dest_addr;
1581 /** Load PHash1 to LSB*/
1582 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *)addr);
1583 pst.dest_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1584 pst.len = session->auth.ctx_len;
1586 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1587 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1588 ccp_perform_passthru(&pst, cmd_q);
1590 /**sha engine command descriptor for IntermediateHash*/
1592 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1593 memset(desc, 0, Q_DESC_SIZE);
1595 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1597 CCP_CMD_SOC(desc) = 0;
1598 CCP_CMD_IOC(desc) = 0;
1599 CCP_CMD_INIT(desc) = 1;
1600 CCP_CMD_EOM(desc) = 1;
1601 CCP_CMD_PROT(desc) = 0;
1604 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1605 CCP_CMD_FUNCTION(desc) = function.raw;
1607 CCP_CMD_LEN(desc) = op->sym->auth.data.length;
1608 auth_msg_bits = (op->sym->auth.data.length +
1609 session->auth.block_size) * 8;
1611 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
1612 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
1613 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1615 CCP_CMD_LSB_ID(desc) = cmd_q->sb_sha;
1616 CCP_CMD_SHA_LO(desc) = ((uint32_t)auth_msg_bits);
1617 CCP_CMD_SHA_HI(desc) = high32_value(auth_msg_bits);
1619 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1623 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1624 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1625 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1626 cmd_q->qcontrol | CMD_Q_RUN);
1628 /* Intermediate Hash value retrieve */
1629 if ((session->auth.ut.sha_type == CCP_SHA_TYPE_384) ||
1630 (session->auth.ut.sha_type == CCP_SHA_TYPE_512)) {
1633 (phys_addr_t)((cmd_q->sb_sha + 1) * CCP_SB_BYTES);
1634 pst.dest_addr = dest_addr_t;
1635 pst.len = CCP_SB_BYTES;
1637 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1638 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1639 ccp_perform_passthru(&pst, cmd_q);
1641 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1642 pst.dest_addr = dest_addr_t + CCP_SB_BYTES;
1643 pst.len = CCP_SB_BYTES;
1645 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1646 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1647 ccp_perform_passthru(&pst, cmd_q);
1650 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1651 pst.dest_addr = dest_addr_t;
1652 pst.len = session->auth.ctx_len;
1654 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1655 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1656 ccp_perform_passthru(&pst, cmd_q);
1660 /** Load PHash2 to LSB*/
1661 addr += session->auth.ctx_len;
1662 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *)addr);
1663 pst.dest_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1664 pst.len = session->auth.ctx_len;
1666 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1667 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1668 ccp_perform_passthru(&pst, cmd_q);
1670 /**sha engine command descriptor for FinalHash*/
1671 dest_addr_t += session->auth.offset;
1673 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1674 memset(desc, 0, Q_DESC_SIZE);
1676 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1678 CCP_CMD_SOC(desc) = 0;
1679 CCP_CMD_IOC(desc) = 0;
1680 CCP_CMD_INIT(desc) = 1;
1681 CCP_CMD_EOM(desc) = 1;
1682 CCP_CMD_PROT(desc) = 0;
1685 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1686 CCP_CMD_FUNCTION(desc) = function.raw;
1688 CCP_CMD_LEN(desc) = (session->auth.ctx_len -
1689 session->auth.offset);
1690 auth_msg_bits = (session->auth.block_size +
1691 session->auth.ctx_len -
1692 session->auth.offset) * 8;
1694 CCP_CMD_SRC_LO(desc) = (uint32_t)(dest_addr_t);
1695 CCP_CMD_SRC_HI(desc) = high32_value(dest_addr_t);
1696 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1698 CCP_CMD_LSB_ID(desc) = cmd_q->sb_sha;
1699 CCP_CMD_SHA_LO(desc) = ((uint32_t)auth_msg_bits);
1700 CCP_CMD_SHA_HI(desc) = high32_value(auth_msg_bits);
1702 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1706 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1707 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1708 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1709 cmd_q->qcontrol | CMD_Q_RUN);
1711 /* Retrieve hmac output */
1712 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1713 pst.dest_addr = dest_addr;
1714 pst.len = session->auth.ctx_len;
1716 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1717 if ((session->auth.ut.sha_type == CCP_SHA_TYPE_384) ||
1718 (session->auth.ut.sha_type == CCP_SHA_TYPE_512))
1719 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1721 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1722 ccp_perform_passthru(&pst, cmd_q);
1724 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
1730 ccp_perform_sha(struct rte_crypto_op *op,
1731 struct ccp_queue *cmd_q)
1733 struct ccp_session *session;
1734 union ccp_function function;
1735 struct ccp_desc *desc;
1737 phys_addr_t src_addr, dest_addr;
1738 struct ccp_passthru pst;
1740 uint64_t auth_msg_bits;
1742 session = (struct ccp_session *)get_sym_session_private_data(
1744 ccp_cryptodev_driver_id);
1746 src_addr = rte_pktmbuf_mtophys_offset(op->sym->m_src,
1747 op->sym->auth.data.offset);
1749 append_ptr = (void *)rte_pktmbuf_append(op->sym->m_src,
1750 session->auth.ctx_len);
1751 dest_addr = (phys_addr_t)rte_mem_virt2phy(append_ptr);
1753 /** Passthru sha context*/
1755 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *)
1757 pst.dest_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1758 pst.len = session->auth.ctx_len;
1760 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1761 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1762 ccp_perform_passthru(&pst, cmd_q);
1764 /**prepare sha command descriptor*/
1766 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1767 memset(desc, 0, Q_DESC_SIZE);
1769 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1771 CCP_CMD_SOC(desc) = 0;
1772 CCP_CMD_IOC(desc) = 0;
1773 CCP_CMD_INIT(desc) = 1;
1774 CCP_CMD_EOM(desc) = 1;
1775 CCP_CMD_PROT(desc) = 0;
1778 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1779 CCP_CMD_FUNCTION(desc) = function.raw;
1781 CCP_CMD_LEN(desc) = op->sym->auth.data.length;
1782 auth_msg_bits = op->sym->auth.data.length * 8;
1784 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
1785 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
1786 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1788 CCP_CMD_LSB_ID(desc) = cmd_q->sb_sha;
1789 CCP_CMD_SHA_LO(desc) = ((uint32_t)auth_msg_bits);
1790 CCP_CMD_SHA_HI(desc) = high32_value(auth_msg_bits);
1792 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1796 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1797 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1798 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1799 cmd_q->qcontrol | CMD_Q_RUN);
1801 /* Hash value retrieve */
1802 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1803 pst.dest_addr = dest_addr;
1804 pst.len = session->auth.ctx_len;
1806 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1807 if ((session->auth.ut.sha_type == CCP_SHA_TYPE_384) ||
1808 (session->auth.ut.sha_type == CCP_SHA_TYPE_512))
1809 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1811 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1812 ccp_perform_passthru(&pst, cmd_q);
1814 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
1820 ccp_perform_sha3_hmac(struct rte_crypto_op *op,
1821 struct ccp_queue *cmd_q)
1823 struct ccp_session *session;
1824 struct ccp_passthru pst;
1825 union ccp_function function;
1826 struct ccp_desc *desc;
1827 uint8_t *append_ptr;
1829 phys_addr_t src_addr, dest_addr, ctx_paddr, dest_addr_t;
1831 session = (struct ccp_session *)get_sym_session_private_data(
1833 ccp_cryptodev_driver_id);
1835 src_addr = rte_pktmbuf_mtophys_offset(op->sym->m_src,
1836 op->sym->auth.data.offset);
1837 append_ptr = (uint8_t *)rte_pktmbuf_append(op->sym->m_src,
1838 session->auth.ctx_len);
1840 CCP_LOG_ERR("CCP MBUF append failed\n");
1843 dest_addr = (phys_addr_t)rte_mem_virt2phy((void *)append_ptr);
1844 dest_addr_t = dest_addr + (session->auth.ctx_len / 2);
1845 ctx_paddr = (phys_addr_t)rte_mem_virt2phy((void
1846 *)session->auth.pre_compute);
1847 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1848 memset(desc, 0, Q_DESC_SIZE);
1850 /*desc1 for SHA3-Ihash operation */
1851 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1852 CCP_CMD_INIT(desc) = 1;
1853 CCP_CMD_EOM(desc) = 1;
1856 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1857 CCP_CMD_FUNCTION(desc) = function.raw;
1858 CCP_CMD_LEN(desc) = op->sym->auth.data.length;
1860 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
1861 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
1862 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1864 CCP_CMD_DST_LO(desc) = (cmd_q->sb_sha * CCP_SB_BYTES);
1865 CCP_CMD_DST_HI(desc) = 0;
1866 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SB;
1868 CCP_CMD_KEY_LO(desc) = ((uint32_t)ctx_paddr);
1869 CCP_CMD_KEY_HI(desc) = high32_value(ctx_paddr);
1870 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1872 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1875 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1876 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1877 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1878 cmd_q->qcontrol | CMD_Q_RUN);
1880 /* Intermediate Hash value retrieve */
1881 if ((session->auth.ut.sha_type == CCP_SHA3_TYPE_384) ||
1882 (session->auth.ut.sha_type == CCP_SHA3_TYPE_512)) {
1885 (phys_addr_t)((cmd_q->sb_sha + 1) * CCP_SB_BYTES);
1886 pst.dest_addr = dest_addr_t;
1887 pst.len = CCP_SB_BYTES;
1889 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1890 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1891 ccp_perform_passthru(&pst, cmd_q);
1893 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1894 pst.dest_addr = dest_addr_t + CCP_SB_BYTES;
1895 pst.len = CCP_SB_BYTES;
1897 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1898 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1899 ccp_perform_passthru(&pst, cmd_q);
1902 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1903 pst.dest_addr = dest_addr_t;
1904 pst.len = CCP_SB_BYTES;
1906 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1907 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1908 ccp_perform_passthru(&pst, cmd_q);
1911 /**sha engine command descriptor for FinalHash*/
1912 ctx_paddr += CCP_SHA3_CTX_SIZE;
1913 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1914 memset(desc, 0, Q_DESC_SIZE);
1916 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1917 CCP_CMD_INIT(desc) = 1;
1918 CCP_CMD_EOM(desc) = 1;
1921 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1922 CCP_CMD_FUNCTION(desc) = function.raw;
1924 if (session->auth.ut.sha_type == CCP_SHA3_TYPE_224) {
1925 dest_addr_t += (CCP_SB_BYTES - SHA224_DIGEST_SIZE);
1926 CCP_CMD_LEN(desc) = SHA224_DIGEST_SIZE;
1927 } else if (session->auth.ut.sha_type == CCP_SHA3_TYPE_256) {
1928 CCP_CMD_LEN(desc) = SHA256_DIGEST_SIZE;
1929 } else if (session->auth.ut.sha_type == CCP_SHA3_TYPE_384) {
1930 dest_addr_t += (2 * CCP_SB_BYTES - SHA384_DIGEST_SIZE);
1931 CCP_CMD_LEN(desc) = SHA384_DIGEST_SIZE;
1933 CCP_CMD_LEN(desc) = SHA512_DIGEST_SIZE;
1936 CCP_CMD_SRC_LO(desc) = ((uint32_t)dest_addr_t);
1937 CCP_CMD_SRC_HI(desc) = high32_value(dest_addr_t);
1938 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1940 CCP_CMD_DST_LO(desc) = (uint32_t)dest_addr;
1941 CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
1942 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1944 CCP_CMD_KEY_LO(desc) = ((uint32_t)ctx_paddr);
1945 CCP_CMD_KEY_HI(desc) = high32_value(ctx_paddr);
1946 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1948 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1951 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1952 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1953 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1954 cmd_q->qcontrol | CMD_Q_RUN);
1956 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
1961 ccp_perform_sha3(struct rte_crypto_op *op,
1962 struct ccp_queue *cmd_q)
1964 struct ccp_session *session;
1965 union ccp_function function;
1966 struct ccp_desc *desc;
1967 uint8_t *ctx_addr, *append_ptr;
1969 phys_addr_t src_addr, dest_addr, ctx_paddr;
1971 session = (struct ccp_session *)get_sym_session_private_data(
1973 ccp_cryptodev_driver_id);
1975 src_addr = rte_pktmbuf_mtophys_offset(op->sym->m_src,
1976 op->sym->auth.data.offset);
1977 append_ptr = (uint8_t *)rte_pktmbuf_append(op->sym->m_src,
1978 session->auth.ctx_len);
1980 CCP_LOG_ERR("CCP MBUF append failed\n");
1983 dest_addr = (phys_addr_t)rte_mem_virt2phy((void *)append_ptr);
1984 ctx_addr = session->auth.sha3_ctx;
1985 ctx_paddr = (phys_addr_t)rte_mem_virt2phy((void *)ctx_addr);
1987 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1988 memset(desc, 0, Q_DESC_SIZE);
1990 /* prepare desc for SHA3 operation */
1991 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1992 CCP_CMD_INIT(desc) = 1;
1993 CCP_CMD_EOM(desc) = 1;
1996 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1997 CCP_CMD_FUNCTION(desc) = function.raw;
1999 CCP_CMD_LEN(desc) = op->sym->auth.data.length;
2001 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2002 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2003 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2005 CCP_CMD_DST_LO(desc) = ((uint32_t)dest_addr);
2006 CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
2007 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2009 CCP_CMD_KEY_LO(desc) = ((uint32_t)ctx_paddr);
2010 CCP_CMD_KEY_HI(desc) = high32_value(ctx_paddr);
2011 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2013 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2017 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2018 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2019 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2020 cmd_q->qcontrol | CMD_Q_RUN);
2022 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
2027 ccp_perform_aes_cmac(struct rte_crypto_op *op,
2028 struct ccp_queue *cmd_q)
2030 struct ccp_session *session;
2031 union ccp_function function;
2032 struct ccp_passthru pst;
2033 struct ccp_desc *desc;
2035 uint8_t *src_tb, *append_ptr, *ctx_addr;
2036 phys_addr_t src_addr, dest_addr, key_addr;
2037 int length, non_align_len;
2039 session = (struct ccp_session *)get_sym_session_private_data(
2041 ccp_cryptodev_driver_id);
2042 key_addr = rte_mem_virt2phy(session->auth.key_ccp);
2044 src_addr = rte_pktmbuf_mtophys_offset(op->sym->m_src,
2045 op->sym->auth.data.offset);
2046 append_ptr = (uint8_t *)rte_pktmbuf_append(op->sym->m_src,
2047 session->auth.ctx_len);
2048 dest_addr = (phys_addr_t)rte_mem_virt2phy((void *)append_ptr);
2051 CCP_AES_ENCRYPT(&function) = CCP_CIPHER_DIR_ENCRYPT;
2052 CCP_AES_MODE(&function) = session->auth.um.aes_mode;
2053 CCP_AES_TYPE(&function) = session->auth.ut.aes_type;
2055 if (op->sym->auth.data.length % session->auth.block_size == 0) {
2057 ctx_addr = session->auth.pre_compute;
2058 memset(ctx_addr, 0, AES_BLOCK_SIZE);
2059 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *)ctx_addr);
2060 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2061 pst.len = CCP_SB_BYTES;
2063 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2064 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
2065 ccp_perform_passthru(&pst, cmd_q);
2067 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2068 memset(desc, 0, Q_DESC_SIZE);
2070 /* prepare desc for aes-cmac command */
2071 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2072 CCP_CMD_EOM(desc) = 1;
2073 CCP_CMD_FUNCTION(desc) = function.raw;
2075 CCP_CMD_LEN(desc) = op->sym->auth.data.length;
2076 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2077 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2078 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2080 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2081 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2082 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2083 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2085 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2090 (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2091 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2092 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2093 cmd_q->qcontrol | CMD_Q_RUN);
2095 ctx_addr = session->auth.pre_compute + CCP_SB_BYTES;
2096 memset(ctx_addr, 0, AES_BLOCK_SIZE);
2097 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *)ctx_addr);
2098 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2099 pst.len = CCP_SB_BYTES;
2101 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2102 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
2103 ccp_perform_passthru(&pst, cmd_q);
2105 length = (op->sym->auth.data.length / AES_BLOCK_SIZE);
2106 length *= AES_BLOCK_SIZE;
2107 non_align_len = op->sym->auth.data.length - length;
2108 /* prepare desc for aes-cmac command */
2110 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2111 memset(desc, 0, Q_DESC_SIZE);
2113 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2114 CCP_CMD_INIT(desc) = 1;
2115 CCP_CMD_FUNCTION(desc) = function.raw;
2117 CCP_CMD_LEN(desc) = length;
2118 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2119 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2120 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2122 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2123 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2124 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2125 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2127 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2130 append_ptr = append_ptr + CCP_SB_BYTES;
2131 memset(append_ptr, 0, AES_BLOCK_SIZE);
2132 src_tb = rte_pktmbuf_mtod_offset(op->sym->m_src,
2134 op->sym->auth.data.offset +
2136 rte_memcpy(append_ptr, src_tb, non_align_len);
2137 append_ptr[non_align_len] = CMAC_PAD_VALUE;
2139 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2140 memset(desc, 0, Q_DESC_SIZE);
2142 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2143 CCP_CMD_EOM(desc) = 1;
2144 CCP_CMD_FUNCTION(desc) = function.raw;
2145 CCP_CMD_LEN(desc) = AES_BLOCK_SIZE;
2147 CCP_CMD_SRC_LO(desc) = ((uint32_t)(dest_addr + CCP_SB_BYTES));
2148 CCP_CMD_SRC_HI(desc) = high32_value(dest_addr + CCP_SB_BYTES);
2149 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2151 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2152 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2153 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2154 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2156 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2160 (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2161 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2162 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2163 cmd_q->qcontrol | CMD_Q_RUN);
2165 /* Retrieve result */
2166 pst.dest_addr = dest_addr;
2167 pst.src_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2168 pst.len = CCP_SB_BYTES;
2170 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2171 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
2172 ccp_perform_passthru(&pst, cmd_q);
2174 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
2179 ccp_perform_aes(struct rte_crypto_op *op,
2180 struct ccp_queue *cmd_q,
2181 struct ccp_batch_info *b_info)
2183 struct ccp_session *session;
2184 union ccp_function function;
2186 struct ccp_passthru pst = {0};
2187 struct ccp_desc *desc;
2188 phys_addr_t src_addr, dest_addr, key_addr;
2191 session = (struct ccp_session *)get_sym_session_private_data(
2193 ccp_cryptodev_driver_id);
2196 iv = rte_crypto_op_ctod_offset(op, uint8_t *, session->iv.offset);
2197 if (session->cipher.um.aes_mode != CCP_AES_MODE_ECB) {
2198 if (session->cipher.um.aes_mode == CCP_AES_MODE_CTR) {
2199 rte_memcpy(session->cipher.nonce + AES_BLOCK_SIZE,
2200 iv, session->iv.length);
2201 pst.src_addr = (phys_addr_t)session->cipher.nonce_phys;
2202 CCP_AES_SIZE(&function) = 0x1F;
2205 &(b_info->lsb_buf[b_info->lsb_buf_idx*CCP_SB_BYTES]);
2206 rte_memcpy(lsb_buf +
2207 (CCP_SB_BYTES - session->iv.length),
2208 iv, session->iv.length);
2209 pst.src_addr = b_info->lsb_buf_phys +
2210 (b_info->lsb_buf_idx * CCP_SB_BYTES);
2211 b_info->lsb_buf_idx++;
2214 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2215 pst.len = CCP_SB_BYTES;
2217 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2218 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
2219 ccp_perform_passthru(&pst, cmd_q);
2222 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2224 src_addr = rte_pktmbuf_mtophys_offset(op->sym->m_src,
2225 op->sym->cipher.data.offset);
2226 if (likely(op->sym->m_dst != NULL))
2227 dest_addr = rte_pktmbuf_mtophys_offset(op->sym->m_dst,
2228 op->sym->cipher.data.offset);
2230 dest_addr = src_addr;
2231 key_addr = session->cipher.key_phys;
2233 /* prepare desc for aes command */
2234 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2235 CCP_CMD_INIT(desc) = 1;
2236 CCP_CMD_EOM(desc) = 1;
2238 CCP_AES_ENCRYPT(&function) = session->cipher.dir;
2239 CCP_AES_MODE(&function) = session->cipher.um.aes_mode;
2240 CCP_AES_TYPE(&function) = session->cipher.ut.aes_type;
2241 CCP_CMD_FUNCTION(desc) = function.raw;
2243 CCP_CMD_LEN(desc) = op->sym->cipher.data.length;
2245 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2246 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2247 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2249 CCP_CMD_DST_LO(desc) = ((uint32_t)dest_addr);
2250 CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
2251 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2253 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2254 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2255 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2257 if (session->cipher.um.aes_mode != CCP_AES_MODE_ECB)
2258 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2260 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2261 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
2266 ccp_perform_3des(struct rte_crypto_op *op,
2267 struct ccp_queue *cmd_q,
2268 struct ccp_batch_info *b_info)
2270 struct ccp_session *session;
2271 union ccp_function function;
2272 unsigned char *lsb_buf;
2273 struct ccp_passthru pst;
2274 struct ccp_desc *desc;
2277 phys_addr_t src_addr, dest_addr, key_addr;
2279 session = (struct ccp_session *)get_sym_session_private_data(
2281 ccp_cryptodev_driver_id);
2283 iv = rte_crypto_op_ctod_offset(op, uint8_t *, session->iv.offset);
2284 switch (session->cipher.um.des_mode) {
2285 case CCP_DES_MODE_CBC:
2286 lsb_buf = &(b_info->lsb_buf[b_info->lsb_buf_idx*CCP_SB_BYTES]);
2287 b_info->lsb_buf_idx++;
2289 rte_memcpy(lsb_buf + (CCP_SB_BYTES - session->iv.length),
2290 iv, session->iv.length);
2292 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *) lsb_buf);
2293 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2294 pst.len = CCP_SB_BYTES;
2296 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2297 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
2298 ccp_perform_passthru(&pst, cmd_q);
2300 case CCP_DES_MODE_CFB:
2301 case CCP_DES_MODE_ECB:
2302 CCP_LOG_ERR("Unsupported DES cipher mode");
2306 src_addr = rte_pktmbuf_mtophys_offset(op->sym->m_src,
2307 op->sym->cipher.data.offset);
2308 if (unlikely(op->sym->m_dst != NULL))
2310 rte_pktmbuf_mtophys_offset(op->sym->m_dst,
2311 op->sym->cipher.data.offset);
2313 dest_addr = src_addr;
2315 key_addr = rte_mem_virt2phy(session->cipher.key_ccp);
2317 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2319 memset(desc, 0, Q_DESC_SIZE);
2321 /* prepare desc for des command */
2322 CCP_CMD_ENGINE(desc) = CCP_ENGINE_3DES;
2324 CCP_CMD_SOC(desc) = 0;
2325 CCP_CMD_IOC(desc) = 0;
2326 CCP_CMD_INIT(desc) = 1;
2327 CCP_CMD_EOM(desc) = 1;
2328 CCP_CMD_PROT(desc) = 0;
2331 CCP_DES_ENCRYPT(&function) = session->cipher.dir;
2332 CCP_DES_MODE(&function) = session->cipher.um.des_mode;
2333 CCP_DES_TYPE(&function) = session->cipher.ut.des_type;
2334 CCP_CMD_FUNCTION(desc) = function.raw;
2336 CCP_CMD_LEN(desc) = op->sym->cipher.data.length;
2338 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2339 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2340 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2342 CCP_CMD_DST_LO(desc) = ((uint32_t)dest_addr);
2343 CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
2344 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2346 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2347 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2348 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2350 if (session->cipher.um.des_mode)
2351 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2353 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2357 /* Write the new tail address back to the queue register */
2358 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2359 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2360 /* Turn the queue back on using our cached control register */
2361 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2362 cmd_q->qcontrol | CMD_Q_RUN);
2364 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
2369 ccp_perform_aes_gcm(struct rte_crypto_op *op, struct ccp_queue *cmd_q)
2371 struct ccp_session *session;
2372 union ccp_function function;
2374 struct ccp_passthru pst;
2375 struct ccp_desc *desc;
2378 phys_addr_t src_addr, dest_addr, key_addr, aad_addr;
2379 phys_addr_t digest_dest_addr;
2380 int length, non_align_len;
2382 session = (struct ccp_session *)get_sym_session_private_data(
2384 ccp_cryptodev_driver_id);
2385 iv = rte_crypto_op_ctod_offset(op, uint8_t *, session->iv.offset);
2386 key_addr = session->cipher.key_phys;
2388 src_addr = rte_pktmbuf_mtophys_offset(op->sym->m_src,
2389 op->sym->aead.data.offset);
2390 if (unlikely(op->sym->m_dst != NULL))
2391 dest_addr = rte_pktmbuf_mtophys_offset(op->sym->m_dst,
2392 op->sym->aead.data.offset);
2394 dest_addr = src_addr;
2395 rte_pktmbuf_append(op->sym->m_src, session->auth.ctx_len);
2396 digest_dest_addr = op->sym->aead.digest.phys_addr;
2397 temp = (uint64_t *)(op->sym->aead.digest.data + AES_BLOCK_SIZE);
2398 *temp++ = rte_bswap64(session->auth.aad_length << 3);
2399 *temp = rte_bswap64(op->sym->aead.data.length << 3);
2401 non_align_len = op->sym->aead.data.length % AES_BLOCK_SIZE;
2402 length = CCP_ALIGN(op->sym->aead.data.length, AES_BLOCK_SIZE);
2404 aad_addr = op->sym->aead.aad.phys_addr;
2406 /* CMD1 IV Passthru */
2407 rte_memcpy(session->cipher.nonce + AES_BLOCK_SIZE, iv,
2408 session->iv.length);
2409 pst.src_addr = session->cipher.nonce_phys;
2410 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2411 pst.len = CCP_SB_BYTES;
2413 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2414 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
2415 ccp_perform_passthru(&pst, cmd_q);
2417 /* CMD2 GHASH-AAD */
2419 CCP_AES_ENCRYPT(&function) = CCP_AES_MODE_GHASH_AAD;
2420 CCP_AES_MODE(&function) = CCP_AES_MODE_GHASH;
2421 CCP_AES_TYPE(&function) = session->cipher.ut.aes_type;
2423 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2424 memset(desc, 0, Q_DESC_SIZE);
2426 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2427 CCP_CMD_INIT(desc) = 1;
2428 CCP_CMD_FUNCTION(desc) = function.raw;
2430 CCP_CMD_LEN(desc) = session->auth.aad_length;
2432 CCP_CMD_SRC_LO(desc) = ((uint32_t)aad_addr);
2433 CCP_CMD_SRC_HI(desc) = high32_value(aad_addr);
2434 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2436 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2437 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2438 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2440 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2442 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2445 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2446 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2447 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2448 cmd_q->qcontrol | CMD_Q_RUN);
2450 /* CMD3 : GCTR Plain text */
2452 CCP_AES_ENCRYPT(&function) = session->cipher.dir;
2453 CCP_AES_MODE(&function) = CCP_AES_MODE_GCTR;
2454 CCP_AES_TYPE(&function) = session->cipher.ut.aes_type;
2455 if (non_align_len == 0)
2456 CCP_AES_SIZE(&function) = (AES_BLOCK_SIZE << 3) - 1;
2458 CCP_AES_SIZE(&function) = (non_align_len << 3) - 1;
2461 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2462 memset(desc, 0, Q_DESC_SIZE);
2464 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2465 CCP_CMD_EOM(desc) = 1;
2466 CCP_CMD_FUNCTION(desc) = function.raw;
2468 CCP_CMD_LEN(desc) = length;
2470 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2471 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2472 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2474 CCP_CMD_DST_LO(desc) = ((uint32_t)dest_addr);
2475 CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
2476 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2478 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2479 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2480 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2482 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2484 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2487 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2488 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2489 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2490 cmd_q->qcontrol | CMD_Q_RUN);
2492 /* CMD4 : PT to copy IV */
2493 pst.src_addr = session->cipher.nonce_phys;
2494 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2495 pst.len = AES_BLOCK_SIZE;
2497 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2498 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
2499 ccp_perform_passthru(&pst, cmd_q);
2501 /* CMD5 : GHASH-Final */
2503 CCP_AES_ENCRYPT(&function) = CCP_AES_MODE_GHASH_FINAL;
2504 CCP_AES_MODE(&function) = CCP_AES_MODE_GHASH;
2505 CCP_AES_TYPE(&function) = session->cipher.ut.aes_type;
2507 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2508 memset(desc, 0, Q_DESC_SIZE);
2510 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2511 CCP_CMD_FUNCTION(desc) = function.raw;
2512 /* Last block (AAD_len || PT_len)*/
2513 CCP_CMD_LEN(desc) = AES_BLOCK_SIZE;
2515 CCP_CMD_SRC_LO(desc) = ((uint32_t)digest_dest_addr + AES_BLOCK_SIZE);
2516 CCP_CMD_SRC_HI(desc) = high32_value(digest_dest_addr + AES_BLOCK_SIZE);
2517 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2519 CCP_CMD_DST_LO(desc) = ((uint32_t)digest_dest_addr);
2520 CCP_CMD_DST_HI(desc) = high32_value(digest_dest_addr);
2521 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2523 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2524 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2525 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2527 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2529 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2532 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2533 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2534 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2535 cmd_q->qcontrol | CMD_Q_RUN);
2537 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
2542 ccp_crypto_cipher(struct rte_crypto_op *op,
2543 struct ccp_queue *cmd_q,
2544 struct ccp_batch_info *b_info)
2547 struct ccp_session *session;
2549 session = (struct ccp_session *)get_sym_session_private_data(
2551 ccp_cryptodev_driver_id);
2553 switch (session->cipher.algo) {
2554 case CCP_CIPHER_ALGO_AES_CBC:
2555 result = ccp_perform_aes(op, cmd_q, b_info);
2556 b_info->desccnt += 2;
2558 case CCP_CIPHER_ALGO_AES_CTR:
2559 result = ccp_perform_aes(op, cmd_q, b_info);
2560 b_info->desccnt += 2;
2562 case CCP_CIPHER_ALGO_AES_ECB:
2563 result = ccp_perform_aes(op, cmd_q, b_info);
2564 b_info->desccnt += 1;
2566 case CCP_CIPHER_ALGO_3DES_CBC:
2567 result = ccp_perform_3des(op, cmd_q, b_info);
2568 b_info->desccnt += 2;
2571 CCP_LOG_ERR("Unsupported cipher algo %d",
2572 session->cipher.algo);
2579 ccp_crypto_auth(struct rte_crypto_op *op,
2580 struct ccp_queue *cmd_q,
2581 struct ccp_batch_info *b_info)
2585 struct ccp_session *session;
2587 session = (struct ccp_session *)get_sym_session_private_data(
2589 ccp_cryptodev_driver_id);
2591 switch (session->auth.algo) {
2592 case CCP_AUTH_ALGO_SHA1:
2593 case CCP_AUTH_ALGO_SHA224:
2594 case CCP_AUTH_ALGO_SHA256:
2595 case CCP_AUTH_ALGO_SHA384:
2596 case CCP_AUTH_ALGO_SHA512:
2597 result = ccp_perform_sha(op, cmd_q);
2598 b_info->desccnt += 3;
2600 case CCP_AUTH_ALGO_MD5_HMAC:
2601 if (session->auth_opt == 0)
2604 case CCP_AUTH_ALGO_SHA1_HMAC:
2605 case CCP_AUTH_ALGO_SHA224_HMAC:
2606 case CCP_AUTH_ALGO_SHA256_HMAC:
2607 if (session->auth_opt == 0) {
2608 result = ccp_perform_hmac(op, cmd_q);
2609 b_info->desccnt += 6;
2612 case CCP_AUTH_ALGO_SHA384_HMAC:
2613 case CCP_AUTH_ALGO_SHA512_HMAC:
2614 if (session->auth_opt == 0) {
2615 result = ccp_perform_hmac(op, cmd_q);
2616 b_info->desccnt += 7;
2619 case CCP_AUTH_ALGO_SHA3_224:
2620 case CCP_AUTH_ALGO_SHA3_256:
2621 case CCP_AUTH_ALGO_SHA3_384:
2622 case CCP_AUTH_ALGO_SHA3_512:
2623 result = ccp_perform_sha3(op, cmd_q);
2624 b_info->desccnt += 1;
2626 case CCP_AUTH_ALGO_SHA3_224_HMAC:
2627 case CCP_AUTH_ALGO_SHA3_256_HMAC:
2628 result = ccp_perform_sha3_hmac(op, cmd_q);
2629 b_info->desccnt += 3;
2631 case CCP_AUTH_ALGO_SHA3_384_HMAC:
2632 case CCP_AUTH_ALGO_SHA3_512_HMAC:
2633 result = ccp_perform_sha3_hmac(op, cmd_q);
2634 b_info->desccnt += 4;
2636 case CCP_AUTH_ALGO_AES_CMAC:
2637 result = ccp_perform_aes_cmac(op, cmd_q);
2638 b_info->desccnt += 4;
2641 CCP_LOG_ERR("Unsupported auth algo %d",
2642 session->auth.algo);
2650 ccp_crypto_aead(struct rte_crypto_op *op,
2651 struct ccp_queue *cmd_q,
2652 struct ccp_batch_info *b_info)
2655 struct ccp_session *session;
2657 session = (struct ccp_session *)get_sym_session_private_data(
2659 ccp_cryptodev_driver_id);
2661 switch (session->auth.algo) {
2662 case CCP_AUTH_ALGO_AES_GCM:
2663 if (session->cipher.algo != CCP_CIPHER_ALGO_AES_GCM) {
2664 CCP_LOG_ERR("Incorrect chain order");
2667 result = ccp_perform_aes_gcm(op, cmd_q);
2668 b_info->desccnt += 5;
2671 CCP_LOG_ERR("Unsupported aead algo %d",
2672 session->aead_algo);
2679 process_ops_to_enqueue(struct ccp_qp *qp,
2680 struct rte_crypto_op **op,
2681 struct ccp_queue *cmd_q,
2686 struct ccp_batch_info *b_info;
2687 struct ccp_session *session;
2688 EVP_MD_CTX *auth_ctx = NULL;
2690 if (rte_mempool_get(qp->batch_mp, (void **)&b_info)) {
2691 CCP_LOG_ERR("batch info allocation failed");
2695 auth_ctx = EVP_MD_CTX_create();
2696 if (unlikely(!auth_ctx)) {
2697 CCP_LOG_ERR("Unable to create auth ctx");
2700 b_info->auth_ctr = 0;
2702 /* populate batch info necessary for dequeue */
2704 b_info->lsb_buf_idx = 0;
2705 b_info->desccnt = 0;
2706 b_info->cmd_q = cmd_q;
2707 b_info->lsb_buf_phys =
2708 (phys_addr_t)rte_mem_virt2phy((void *)b_info->lsb_buf);
2709 rte_atomic64_sub(&b_info->cmd_q->free_slots, slots_req);
2711 b_info->head_offset = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx *
2713 for (i = 0; i < nb_ops; i++) {
2714 session = (struct ccp_session *)get_sym_session_private_data(
2715 op[i]->sym->session,
2716 ccp_cryptodev_driver_id);
2717 switch (session->cmd_id) {
2718 case CCP_CMD_CIPHER:
2719 result = ccp_crypto_cipher(op[i], cmd_q, b_info);
2722 if (session->auth_opt) {
2724 result = cpu_crypto_auth(qp, op[i],
2727 result = ccp_crypto_auth(op[i], cmd_q, b_info);
2729 case CCP_CMD_CIPHER_HASH:
2730 result = ccp_crypto_cipher(op[i], cmd_q, b_info);
2733 result = ccp_crypto_auth(op[i], cmd_q, b_info);
2735 case CCP_CMD_HASH_CIPHER:
2736 if (session->auth_opt) {
2737 result = cpu_crypto_auth(qp, op[i],
2739 if (op[i]->status !=
2740 RTE_CRYPTO_OP_STATUS_SUCCESS)
2743 result = ccp_crypto_auth(op[i], cmd_q, b_info);
2747 result = ccp_crypto_cipher(op[i], cmd_q, b_info);
2749 case CCP_CMD_COMBINED:
2750 result = ccp_crypto_aead(op[i], cmd_q, b_info);
2753 CCP_LOG_ERR("Unsupported cmd_id");
2756 if (unlikely(result < 0)) {
2757 rte_atomic64_add(&b_info->cmd_q->free_slots,
2758 (slots_req - b_info->desccnt));
2761 b_info->op[i] = op[i];
2765 b_info->tail_offset = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx *
2769 /* Write the new tail address back to the queue register */
2770 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE,
2771 b_info->tail_offset);
2772 /* Turn the queue back on using our cached control register */
2773 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2774 cmd_q->qcontrol | CMD_Q_RUN);
2776 rte_ring_enqueue(qp->processed_pkts, (void *)b_info);
2778 EVP_MD_CTX_destroy(auth_ctx);
2782 static inline void ccp_auth_dq_prepare(struct rte_crypto_op *op)
2784 struct ccp_session *session;
2785 uint8_t *digest_data, *addr;
2786 struct rte_mbuf *m_last;
2787 int offset, digest_offset;
2788 uint8_t digest_le[64];
2790 session = (struct ccp_session *)get_sym_session_private_data(
2792 ccp_cryptodev_driver_id);
2794 if (session->cmd_id == CCP_CMD_COMBINED) {
2795 digest_data = op->sym->aead.digest.data;
2796 digest_offset = op->sym->aead.data.offset +
2797 op->sym->aead.data.length;
2799 digest_data = op->sym->auth.digest.data;
2800 digest_offset = op->sym->auth.data.offset +
2801 op->sym->auth.data.length;
2803 m_last = rte_pktmbuf_lastseg(op->sym->m_src);
2804 addr = (uint8_t *)((char *)m_last->buf_addr + m_last->data_off +
2805 m_last->data_len - session->auth.ctx_len);
2808 offset = session->auth.offset;
2810 if (session->auth.engine == CCP_ENGINE_SHA)
2811 if ((session->auth.ut.sha_type != CCP_SHA_TYPE_1) &&
2812 (session->auth.ut.sha_type != CCP_SHA_TYPE_224) &&
2813 (session->auth.ut.sha_type != CCP_SHA_TYPE_256)) {
2814 /* All other algorithms require byte
2819 offset = session->auth.ctx_len -
2820 session->auth.offset - 1;
2821 for (i = 0; i < session->auth.digest_length; i++)
2822 digest_le[i] = addr[offset - i];
2827 op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
2828 if (session->auth.op == CCP_AUTH_OP_VERIFY) {
2829 if (memcmp(addr + offset, digest_data,
2830 session->auth.digest_length) != 0)
2831 op->status = RTE_CRYPTO_OP_STATUS_AUTH_FAILED;
2834 if (unlikely(digest_data == 0))
2835 digest_data = rte_pktmbuf_mtod_offset(
2836 op->sym->m_dst, uint8_t *,
2838 rte_memcpy(digest_data, addr + offset,
2839 session->auth.digest_length);
2841 /* Trim area used for digest from mbuf. */
2842 rte_pktmbuf_trim(op->sym->m_src,
2843 session->auth.ctx_len);
2847 ccp_prepare_ops(struct ccp_qp *qp,
2848 struct rte_crypto_op **op_d,
2849 struct ccp_batch_info *b_info,
2853 struct ccp_session *session;
2855 EVP_MD_CTX *auth_ctx = NULL;
2857 auth_ctx = EVP_MD_CTX_create();
2858 if (unlikely(!auth_ctx)) {
2859 CCP_LOG_ERR("Unable to create auth ctx");
2862 min_ops = RTE_MIN(nb_ops, b_info->opcnt);
2864 for (i = 0; i < min_ops; i++) {
2865 op_d[i] = b_info->op[b_info->op_idx++];
2866 session = (struct ccp_session *)get_sym_session_private_data(
2867 op_d[i]->sym->session,
2868 ccp_cryptodev_driver_id);
2869 switch (session->cmd_id) {
2870 case CCP_CMD_CIPHER:
2871 op_d[i]->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
2874 if (session->auth_opt == 0)
2875 ccp_auth_dq_prepare(op_d[i]);
2877 case CCP_CMD_CIPHER_HASH:
2878 if (session->auth_opt)
2879 cpu_crypto_auth(qp, op_d[i],
2882 ccp_auth_dq_prepare(op_d[i]);
2884 case CCP_CMD_HASH_CIPHER:
2885 if (session->auth_opt)
2886 op_d[i]->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
2888 ccp_auth_dq_prepare(op_d[i]);
2890 case CCP_CMD_COMBINED:
2891 ccp_auth_dq_prepare(op_d[i]);
2894 CCP_LOG_ERR("Unsupported cmd_id");
2898 EVP_MD_CTX_destroy(auth_ctx);
2899 b_info->opcnt -= min_ops;
2904 process_ops_to_dequeue(struct ccp_qp *qp,
2905 struct rte_crypto_op **op,
2908 struct ccp_batch_info *b_info;
2909 uint32_t cur_head_offset;
2911 if (qp->b_info != NULL) {
2912 b_info = qp->b_info;
2913 if (unlikely(b_info->op_idx > 0))
2915 } else if (rte_ring_dequeue(qp->processed_pkts,
2919 if (b_info->auth_ctr == b_info->opcnt)
2921 cur_head_offset = CCP_READ_REG(b_info->cmd_q->reg_base,
2922 CMD_Q_HEAD_LO_BASE);
2924 if (b_info->head_offset < b_info->tail_offset) {
2925 if ((cur_head_offset >= b_info->head_offset) &&
2926 (cur_head_offset < b_info->tail_offset)) {
2927 qp->b_info = b_info;
2931 if ((cur_head_offset >= b_info->head_offset) ||
2932 (cur_head_offset < b_info->tail_offset)) {
2933 qp->b_info = b_info;
2940 nb_ops = ccp_prepare_ops(qp, op, b_info, nb_ops);
2941 rte_atomic64_add(&b_info->cmd_q->free_slots, b_info->desccnt);
2942 b_info->desccnt = 0;
2943 if (b_info->opcnt > 0) {
2944 qp->b_info = b_info;
2946 rte_mempool_put(qp->batch_mp, (void *)b_info);