1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2018 Advanced Micro Devices, Inc. All rights reserved.
10 #include <sys/queue.h>
11 #include <sys/types.h>
13 #include <openssl/sha.h>
14 #include <openssl/cmac.h> /*sub key apis*/
15 #include <openssl/evp.h> /*sub key apis*/
17 #include <rte_hexdump.h>
18 #include <rte_memzone.h>
19 #include <rte_malloc.h>
20 #include <rte_memory.h>
21 #include <rte_spinlock.h>
22 #include <rte_string_fns.h>
23 #include <rte_cryptodev_pmd.h>
26 #include "ccp_crypto.h"
28 #include "ccp_pmd_private.h"
30 /* SHA initial context values */
31 static uint32_t ccp_sha1_init[SHA_COMMON_DIGEST_SIZE / sizeof(uint32_t)] = {
38 uint32_t ccp_sha224_init[SHA256_DIGEST_SIZE / sizeof(uint32_t)] = {
45 uint32_t ccp_sha256_init[SHA256_DIGEST_SIZE / sizeof(uint32_t)] = {
52 uint64_t ccp_sha384_init[SHA512_DIGEST_SIZE / sizeof(uint64_t)] = {
59 uint64_t ccp_sha512_init[SHA512_DIGEST_SIZE / sizeof(uint64_t)] = {
66 static enum ccp_cmd_order
67 ccp_get_cmd_id(const struct rte_crypto_sym_xform *xform)
69 enum ccp_cmd_order res = CCP_CMD_NOT_SUPPORTED;
73 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH) {
74 if (xform->next == NULL)
76 else if (xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
77 return CCP_CMD_HASH_CIPHER;
79 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER) {
80 if (xform->next == NULL)
81 return CCP_CMD_CIPHER;
82 else if (xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
83 return CCP_CMD_CIPHER_HASH;
85 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD)
86 return CCP_CMD_COMBINED;
90 /* partial hash using openssl */
91 static int partial_hash_sha1(uint8_t *data_in, uint8_t *data_out)
97 SHA1_Transform(&ctx, data_in);
98 rte_memcpy(data_out, &ctx, SHA_DIGEST_LENGTH);
102 static int partial_hash_sha224(uint8_t *data_in, uint8_t *data_out)
106 if (!SHA224_Init(&ctx))
108 SHA256_Transform(&ctx, data_in);
109 rte_memcpy(data_out, &ctx,
110 SHA256_DIGEST_LENGTH);
114 static int partial_hash_sha256(uint8_t *data_in, uint8_t *data_out)
118 if (!SHA256_Init(&ctx))
120 SHA256_Transform(&ctx, data_in);
121 rte_memcpy(data_out, &ctx,
122 SHA256_DIGEST_LENGTH);
126 static int partial_hash_sha384(uint8_t *data_in, uint8_t *data_out)
130 if (!SHA384_Init(&ctx))
132 SHA512_Transform(&ctx, data_in);
133 rte_memcpy(data_out, &ctx,
134 SHA512_DIGEST_LENGTH);
138 static int partial_hash_sha512(uint8_t *data_in, uint8_t *data_out)
142 if (!SHA512_Init(&ctx))
144 SHA512_Transform(&ctx, data_in);
145 rte_memcpy(data_out, &ctx,
146 SHA512_DIGEST_LENGTH);
150 static int generate_partial_hash(struct ccp_session *sess)
153 uint8_t ipad[sess->auth.block_size];
154 uint8_t opad[sess->auth.block_size];
155 uint8_t *ipad_t, *opad_t;
156 uint32_t *hash_value_be32, hash_temp32[8];
157 uint64_t *hash_value_be64, hash_temp64[8];
160 opad_t = ipad_t = (uint8_t *)sess->auth.key;
162 hash_value_be32 = (uint32_t *)((uint8_t *)sess->auth.pre_compute);
163 hash_value_be64 = (uint64_t *)((uint8_t *)sess->auth.pre_compute);
165 /* considering key size is always equal to block size of algorithm */
166 for (i = 0; i < sess->auth.block_size; i++) {
167 ipad[i] = (ipad_t[i] ^ HMAC_IPAD_VALUE);
168 opad[i] = (opad_t[i] ^ HMAC_OPAD_VALUE);
171 switch (sess->auth.algo) {
172 case CCP_AUTH_ALGO_SHA1_HMAC:
173 count = SHA1_DIGEST_SIZE >> 2;
175 if (partial_hash_sha1(ipad, (uint8_t *)hash_temp32))
177 for (i = 0; i < count; i++, hash_value_be32++)
178 *hash_value_be32 = hash_temp32[count - 1 - i];
180 hash_value_be32 = (uint32_t *)((uint8_t *)sess->auth.pre_compute
181 + sess->auth.ctx_len);
182 if (partial_hash_sha1(opad, (uint8_t *)hash_temp32))
184 for (i = 0; i < count; i++, hash_value_be32++)
185 *hash_value_be32 = hash_temp32[count - 1 - i];
187 case CCP_AUTH_ALGO_SHA224_HMAC:
188 count = SHA256_DIGEST_SIZE >> 2;
190 if (partial_hash_sha224(ipad, (uint8_t *)hash_temp32))
192 for (i = 0; i < count; i++, hash_value_be32++)
193 *hash_value_be32 = hash_temp32[count - 1 - i];
195 hash_value_be32 = (uint32_t *)((uint8_t *)sess->auth.pre_compute
196 + sess->auth.ctx_len);
197 if (partial_hash_sha224(opad, (uint8_t *)hash_temp32))
199 for (i = 0; i < count; i++, hash_value_be32++)
200 *hash_value_be32 = hash_temp32[count - 1 - i];
202 case CCP_AUTH_ALGO_SHA256_HMAC:
203 count = SHA256_DIGEST_SIZE >> 2;
205 if (partial_hash_sha256(ipad, (uint8_t *)hash_temp32))
207 for (i = 0; i < count; i++, hash_value_be32++)
208 *hash_value_be32 = hash_temp32[count - 1 - i];
210 hash_value_be32 = (uint32_t *)((uint8_t *)sess->auth.pre_compute
211 + sess->auth.ctx_len);
212 if (partial_hash_sha256(opad, (uint8_t *)hash_temp32))
214 for (i = 0; i < count; i++, hash_value_be32++)
215 *hash_value_be32 = hash_temp32[count - 1 - i];
217 case CCP_AUTH_ALGO_SHA384_HMAC:
218 count = SHA512_DIGEST_SIZE >> 3;
220 if (partial_hash_sha384(ipad, (uint8_t *)hash_temp64))
222 for (i = 0; i < count; i++, hash_value_be64++)
223 *hash_value_be64 = hash_temp64[count - 1 - i];
225 hash_value_be64 = (uint64_t *)((uint8_t *)sess->auth.pre_compute
226 + sess->auth.ctx_len);
227 if (partial_hash_sha384(opad, (uint8_t *)hash_temp64))
229 for (i = 0; i < count; i++, hash_value_be64++)
230 *hash_value_be64 = hash_temp64[count - 1 - i];
232 case CCP_AUTH_ALGO_SHA512_HMAC:
233 count = SHA512_DIGEST_SIZE >> 3;
235 if (partial_hash_sha512(ipad, (uint8_t *)hash_temp64))
237 for (i = 0; i < count; i++, hash_value_be64++)
238 *hash_value_be64 = hash_temp64[count - 1 - i];
240 hash_value_be64 = (uint64_t *)((uint8_t *)sess->auth.pre_compute
241 + sess->auth.ctx_len);
242 if (partial_hash_sha512(opad, (uint8_t *)hash_temp64))
244 for (i = 0; i < count; i++, hash_value_be64++)
245 *hash_value_be64 = hash_temp64[count - 1 - i];
248 CCP_LOG_ERR("Invalid auth algo");
253 /* prepare temporary keys K1 and K2 */
254 static void prepare_key(unsigned char *k, unsigned char *l, int bl)
257 /* Shift block to left, including carry */
258 for (i = 0; i < bl; i++) {
260 if (i < bl - 1 && l[i + 1] & 0x80)
263 /* If MSB set fixup with R */
265 k[bl - 1] ^= bl == 16 ? 0x87 : 0x1b;
268 /* subkeys K1 and K2 generation for CMAC */
270 generate_cmac_subkeys(struct ccp_session *sess)
272 const EVP_CIPHER *algo;
274 unsigned char *ccp_ctx;
277 unsigned char zero_iv[AES_BLOCK_SIZE] = {0};
278 unsigned char dst[2 * AES_BLOCK_SIZE] = {0};
279 unsigned char k1[AES_BLOCK_SIZE] = {0};
280 unsigned char k2[AES_BLOCK_SIZE] = {0};
282 if (sess->auth.ut.aes_type == CCP_AES_TYPE_128)
283 algo = EVP_aes_128_cbc();
284 else if (sess->auth.ut.aes_type == CCP_AES_TYPE_192)
285 algo = EVP_aes_192_cbc();
286 else if (sess->auth.ut.aes_type == CCP_AES_TYPE_256)
287 algo = EVP_aes_256_cbc();
289 CCP_LOG_ERR("Invalid CMAC type length");
293 ctx = EVP_CIPHER_CTX_new();
295 CCP_LOG_ERR("ctx creation failed");
298 if (EVP_EncryptInit(ctx, algo, (unsigned char *)sess->auth.key,
299 (unsigned char *)zero_iv) <= 0)
300 goto key_generate_err;
301 if (EVP_CIPHER_CTX_set_padding(ctx, 0) <= 0)
302 goto key_generate_err;
303 if (EVP_EncryptUpdate(ctx, dst, &dstlen, zero_iv,
304 AES_BLOCK_SIZE) <= 0)
305 goto key_generate_err;
306 if (EVP_EncryptFinal_ex(ctx, dst + dstlen, &totlen) <= 0)
307 goto key_generate_err;
309 memset(sess->auth.pre_compute, 0, CCP_SB_BYTES * 2);
311 ccp_ctx = (unsigned char *)(sess->auth.pre_compute + CCP_SB_BYTES - 1);
312 prepare_key(k1, dst, AES_BLOCK_SIZE);
313 for (i = 0; i < AES_BLOCK_SIZE; i++, ccp_ctx--)
316 ccp_ctx = (unsigned char *)(sess->auth.pre_compute +
317 (2 * CCP_SB_BYTES) - 1);
318 prepare_key(k2, k1, AES_BLOCK_SIZE);
319 for (i = 0; i < AES_BLOCK_SIZE; i++, ccp_ctx--)
322 EVP_CIPHER_CTX_free(ctx);
327 CCP_LOG_ERR("CMAC Init failed");
331 /* configure session */
333 ccp_configure_session_cipher(struct ccp_session *sess,
334 const struct rte_crypto_sym_xform *xform)
336 const struct rte_crypto_cipher_xform *cipher_xform = NULL;
339 cipher_xform = &xform->cipher;
341 /* set cipher direction */
342 if (cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
343 sess->cipher.dir = CCP_CIPHER_DIR_ENCRYPT;
345 sess->cipher.dir = CCP_CIPHER_DIR_DECRYPT;
348 sess->cipher.key_length = cipher_xform->key.length;
349 rte_memcpy(sess->cipher.key, cipher_xform->key.data,
350 cipher_xform->key.length);
352 /* set iv parameters */
353 sess->iv.offset = cipher_xform->iv.offset;
354 sess->iv.length = cipher_xform->iv.length;
356 switch (cipher_xform->algo) {
357 case RTE_CRYPTO_CIPHER_AES_CTR:
358 sess->cipher.algo = CCP_CIPHER_ALGO_AES_CTR;
359 sess->cipher.um.aes_mode = CCP_AES_MODE_CTR;
360 sess->cipher.engine = CCP_ENGINE_AES;
362 case RTE_CRYPTO_CIPHER_AES_ECB:
363 sess->cipher.algo = CCP_CIPHER_ALGO_AES_CBC;
364 sess->cipher.um.aes_mode = CCP_AES_MODE_ECB;
365 sess->cipher.engine = CCP_ENGINE_AES;
367 case RTE_CRYPTO_CIPHER_AES_CBC:
368 sess->cipher.algo = CCP_CIPHER_ALGO_AES_CBC;
369 sess->cipher.um.aes_mode = CCP_AES_MODE_CBC;
370 sess->cipher.engine = CCP_ENGINE_AES;
372 case RTE_CRYPTO_CIPHER_3DES_CBC:
373 sess->cipher.algo = CCP_CIPHER_ALGO_3DES_CBC;
374 sess->cipher.um.des_mode = CCP_DES_MODE_CBC;
375 sess->cipher.engine = CCP_ENGINE_3DES;
378 CCP_LOG_ERR("Unsupported cipher algo");
383 switch (sess->cipher.engine) {
385 if (sess->cipher.key_length == 16)
386 sess->cipher.ut.aes_type = CCP_AES_TYPE_128;
387 else if (sess->cipher.key_length == 24)
388 sess->cipher.ut.aes_type = CCP_AES_TYPE_192;
389 else if (sess->cipher.key_length == 32)
390 sess->cipher.ut.aes_type = CCP_AES_TYPE_256;
392 CCP_LOG_ERR("Invalid cipher key length");
395 for (i = 0; i < sess->cipher.key_length ; i++)
396 sess->cipher.key_ccp[sess->cipher.key_length - i - 1] =
399 case CCP_ENGINE_3DES:
400 if (sess->cipher.key_length == 16)
401 sess->cipher.ut.des_type = CCP_DES_TYPE_128;
402 else if (sess->cipher.key_length == 24)
403 sess->cipher.ut.des_type = CCP_DES_TYPE_192;
405 CCP_LOG_ERR("Invalid cipher key length");
408 for (j = 0, x = 0; j < sess->cipher.key_length/8; j++, x += 8)
409 for (i = 0; i < 8; i++)
410 sess->cipher.key_ccp[(8 + x) - i - 1] =
411 sess->cipher.key[i + x];
414 CCP_LOG_ERR("Invalid CCP Engine");
417 sess->cipher.nonce_phys = rte_mem_virt2phy(sess->cipher.nonce);
418 sess->cipher.key_phys = rte_mem_virt2phy(sess->cipher.key_ccp);
423 ccp_configure_session_auth(struct ccp_session *sess,
424 const struct rte_crypto_sym_xform *xform)
426 const struct rte_crypto_auth_xform *auth_xform = NULL;
429 auth_xform = &xform->auth;
431 sess->auth.digest_length = auth_xform->digest_length;
432 if (auth_xform->op == RTE_CRYPTO_AUTH_OP_GENERATE)
433 sess->auth.op = CCP_AUTH_OP_GENERATE;
435 sess->auth.op = CCP_AUTH_OP_VERIFY;
436 switch (auth_xform->algo) {
437 case RTE_CRYPTO_AUTH_SHA1:
438 sess->auth.engine = CCP_ENGINE_SHA;
439 sess->auth.algo = CCP_AUTH_ALGO_SHA1;
440 sess->auth.ut.sha_type = CCP_SHA_TYPE_1;
441 sess->auth.ctx = (void *)ccp_sha1_init;
442 sess->auth.ctx_len = CCP_SB_BYTES;
443 sess->auth.offset = CCP_SB_BYTES - SHA1_DIGEST_SIZE;
445 case RTE_CRYPTO_AUTH_SHA1_HMAC:
446 if (auth_xform->key.length > SHA1_BLOCK_SIZE)
448 sess->auth.engine = CCP_ENGINE_SHA;
449 sess->auth.algo = CCP_AUTH_ALGO_SHA1_HMAC;
450 sess->auth.ut.sha_type = CCP_SHA_TYPE_1;
451 sess->auth.ctx_len = CCP_SB_BYTES;
452 sess->auth.offset = CCP_SB_BYTES - SHA1_DIGEST_SIZE;
453 sess->auth.block_size = SHA1_BLOCK_SIZE;
454 sess->auth.key_length = auth_xform->key.length;
455 memset(sess->auth.key, 0, sess->auth.block_size);
456 memset(sess->auth.pre_compute, 0, sess->auth.ctx_len << 1);
457 rte_memcpy(sess->auth.key, auth_xform->key.data,
458 auth_xform->key.length);
459 if (generate_partial_hash(sess))
462 case RTE_CRYPTO_AUTH_SHA224:
463 sess->auth.algo = CCP_AUTH_ALGO_SHA224;
464 sess->auth.engine = CCP_ENGINE_SHA;
465 sess->auth.ut.sha_type = CCP_SHA_TYPE_224;
466 sess->auth.ctx = (void *)ccp_sha224_init;
467 sess->auth.ctx_len = CCP_SB_BYTES;
468 sess->auth.offset = CCP_SB_BYTES - SHA224_DIGEST_SIZE;
470 case RTE_CRYPTO_AUTH_SHA224_HMAC:
471 if (auth_xform->key.length > SHA224_BLOCK_SIZE)
473 sess->auth.algo = CCP_AUTH_ALGO_SHA224_HMAC;
474 sess->auth.engine = CCP_ENGINE_SHA;
475 sess->auth.ut.sha_type = CCP_SHA_TYPE_224;
476 sess->auth.ctx_len = CCP_SB_BYTES;
477 sess->auth.offset = CCP_SB_BYTES - SHA224_DIGEST_SIZE;
478 sess->auth.block_size = SHA224_BLOCK_SIZE;
479 sess->auth.key_length = auth_xform->key.length;
480 memset(sess->auth.key, 0, sess->auth.block_size);
481 memset(sess->auth.pre_compute, 0, sess->auth.ctx_len << 1);
482 rte_memcpy(sess->auth.key, auth_xform->key.data,
483 auth_xform->key.length);
484 if (generate_partial_hash(sess))
487 case RTE_CRYPTO_AUTH_SHA256:
488 sess->auth.algo = CCP_AUTH_ALGO_SHA256;
489 sess->auth.engine = CCP_ENGINE_SHA;
490 sess->auth.ut.sha_type = CCP_SHA_TYPE_256;
491 sess->auth.ctx = (void *)ccp_sha256_init;
492 sess->auth.ctx_len = CCP_SB_BYTES;
493 sess->auth.offset = CCP_SB_BYTES - SHA256_DIGEST_SIZE;
495 case RTE_CRYPTO_AUTH_SHA256_HMAC:
496 if (auth_xform->key.length > SHA256_BLOCK_SIZE)
498 sess->auth.algo = CCP_AUTH_ALGO_SHA256_HMAC;
499 sess->auth.engine = CCP_ENGINE_SHA;
500 sess->auth.ut.sha_type = CCP_SHA_TYPE_256;
501 sess->auth.ctx_len = CCP_SB_BYTES;
502 sess->auth.offset = CCP_SB_BYTES - SHA256_DIGEST_SIZE;
503 sess->auth.block_size = SHA256_BLOCK_SIZE;
504 sess->auth.key_length = auth_xform->key.length;
505 memset(sess->auth.key, 0, sess->auth.block_size);
506 memset(sess->auth.pre_compute, 0, sess->auth.ctx_len << 1);
507 rte_memcpy(sess->auth.key, auth_xform->key.data,
508 auth_xform->key.length);
509 if (generate_partial_hash(sess))
512 case RTE_CRYPTO_AUTH_SHA384:
513 sess->auth.algo = CCP_AUTH_ALGO_SHA384;
514 sess->auth.engine = CCP_ENGINE_SHA;
515 sess->auth.ut.sha_type = CCP_SHA_TYPE_384;
516 sess->auth.ctx = (void *)ccp_sha384_init;
517 sess->auth.ctx_len = CCP_SB_BYTES << 1;
518 sess->auth.offset = (CCP_SB_BYTES << 1) - SHA384_DIGEST_SIZE;
520 case RTE_CRYPTO_AUTH_SHA384_HMAC:
521 if (auth_xform->key.length > SHA384_BLOCK_SIZE)
523 sess->auth.algo = CCP_AUTH_ALGO_SHA384_HMAC;
524 sess->auth.engine = CCP_ENGINE_SHA;
525 sess->auth.ut.sha_type = CCP_SHA_TYPE_384;
526 sess->auth.ctx_len = CCP_SB_BYTES << 1;
527 sess->auth.offset = (CCP_SB_BYTES << 1) - SHA384_DIGEST_SIZE;
528 sess->auth.block_size = SHA384_BLOCK_SIZE;
529 sess->auth.key_length = auth_xform->key.length;
530 memset(sess->auth.key, 0, sess->auth.block_size);
531 memset(sess->auth.pre_compute, 0, sess->auth.ctx_len << 1);
532 rte_memcpy(sess->auth.key, auth_xform->key.data,
533 auth_xform->key.length);
534 if (generate_partial_hash(sess))
537 case RTE_CRYPTO_AUTH_SHA512:
538 sess->auth.algo = CCP_AUTH_ALGO_SHA512;
539 sess->auth.engine = CCP_ENGINE_SHA;
540 sess->auth.ut.sha_type = CCP_SHA_TYPE_512;
541 sess->auth.ctx = (void *)ccp_sha512_init;
542 sess->auth.ctx_len = CCP_SB_BYTES << 1;
543 sess->auth.offset = (CCP_SB_BYTES << 1) - SHA512_DIGEST_SIZE;
545 case RTE_CRYPTO_AUTH_SHA512_HMAC:
546 if (auth_xform->key.length > SHA512_BLOCK_SIZE)
548 sess->auth.algo = CCP_AUTH_ALGO_SHA512_HMAC;
549 sess->auth.engine = CCP_ENGINE_SHA;
550 sess->auth.ut.sha_type = CCP_SHA_TYPE_512;
551 sess->auth.ctx_len = CCP_SB_BYTES << 1;
552 sess->auth.offset = (CCP_SB_BYTES << 1) - SHA512_DIGEST_SIZE;
553 sess->auth.block_size = SHA512_BLOCK_SIZE;
554 sess->auth.key_length = auth_xform->key.length;
555 memset(sess->auth.key, 0, sess->auth.block_size);
556 memset(sess->auth.pre_compute, 0, sess->auth.ctx_len << 1);
557 rte_memcpy(sess->auth.key, auth_xform->key.data,
558 auth_xform->key.length);
559 if (generate_partial_hash(sess))
563 case RTE_CRYPTO_AUTH_AES_CMAC:
564 sess->auth.algo = CCP_AUTH_ALGO_AES_CMAC;
565 sess->auth.engine = CCP_ENGINE_AES;
566 sess->auth.um.aes_mode = CCP_AES_MODE_CMAC;
567 sess->auth.key_length = auth_xform->key.length;
568 /**<padding and hash result*/
569 sess->auth.ctx_len = CCP_SB_BYTES << 1;
570 sess->auth.offset = AES_BLOCK_SIZE;
571 sess->auth.block_size = AES_BLOCK_SIZE;
572 if (sess->auth.key_length == 16)
573 sess->auth.ut.aes_type = CCP_AES_TYPE_128;
574 else if (sess->auth.key_length == 24)
575 sess->auth.ut.aes_type = CCP_AES_TYPE_192;
576 else if (sess->auth.key_length == 32)
577 sess->auth.ut.aes_type = CCP_AES_TYPE_256;
579 CCP_LOG_ERR("Invalid CMAC key length");
582 rte_memcpy(sess->auth.key, auth_xform->key.data,
583 sess->auth.key_length);
584 for (i = 0; i < sess->auth.key_length; i++)
585 sess->auth.key_ccp[sess->auth.key_length - i - 1] =
587 if (generate_cmac_subkeys(sess))
591 CCP_LOG_ERR("Unsupported hash algo");
598 ccp_configure_session_aead(struct ccp_session *sess,
599 const struct rte_crypto_sym_xform *xform)
601 const struct rte_crypto_aead_xform *aead_xform = NULL;
604 aead_xform = &xform->aead;
606 sess->cipher.key_length = aead_xform->key.length;
607 rte_memcpy(sess->cipher.key, aead_xform->key.data,
608 aead_xform->key.length);
610 if (aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) {
611 sess->cipher.dir = CCP_CIPHER_DIR_ENCRYPT;
612 sess->auth.op = CCP_AUTH_OP_GENERATE;
614 sess->cipher.dir = CCP_CIPHER_DIR_DECRYPT;
615 sess->auth.op = CCP_AUTH_OP_VERIFY;
617 sess->aead_algo = aead_xform->algo;
618 sess->auth.aad_length = aead_xform->aad_length;
619 sess->auth.digest_length = aead_xform->digest_length;
621 /* set iv parameters */
622 sess->iv.offset = aead_xform->iv.offset;
623 sess->iv.length = aead_xform->iv.length;
625 switch (aead_xform->algo) {
626 case RTE_CRYPTO_AEAD_AES_GCM:
627 sess->cipher.algo = CCP_CIPHER_ALGO_AES_GCM;
628 sess->cipher.um.aes_mode = CCP_AES_MODE_GCTR;
629 sess->cipher.engine = CCP_ENGINE_AES;
630 if (sess->cipher.key_length == 16)
631 sess->cipher.ut.aes_type = CCP_AES_TYPE_128;
632 else if (sess->cipher.key_length == 24)
633 sess->cipher.ut.aes_type = CCP_AES_TYPE_192;
634 else if (sess->cipher.key_length == 32)
635 sess->cipher.ut.aes_type = CCP_AES_TYPE_256;
637 CCP_LOG_ERR("Invalid aead key length");
640 for (i = 0; i < sess->cipher.key_length; i++)
641 sess->cipher.key_ccp[sess->cipher.key_length - i - 1] =
643 sess->auth.algo = CCP_AUTH_ALGO_AES_GCM;
644 sess->auth.engine = CCP_ENGINE_AES;
645 sess->auth.um.aes_mode = CCP_AES_MODE_GHASH;
646 sess->auth.ctx_len = CCP_SB_BYTES;
647 sess->auth.offset = 0;
648 sess->auth.block_size = AES_BLOCK_SIZE;
649 sess->cmd_id = CCP_CMD_COMBINED;
652 CCP_LOG_ERR("Unsupported aead algo");
655 sess->cipher.nonce_phys = rte_mem_virt2phy(sess->cipher.nonce);
656 sess->cipher.key_phys = rte_mem_virt2phy(sess->cipher.key_ccp);
661 ccp_set_session_parameters(struct ccp_session *sess,
662 const struct rte_crypto_sym_xform *xform)
664 const struct rte_crypto_sym_xform *cipher_xform = NULL;
665 const struct rte_crypto_sym_xform *auth_xform = NULL;
666 const struct rte_crypto_sym_xform *aead_xform = NULL;
669 sess->cmd_id = ccp_get_cmd_id(xform);
671 switch (sess->cmd_id) {
673 cipher_xform = xform;
678 case CCP_CMD_CIPHER_HASH:
679 cipher_xform = xform;
680 auth_xform = xform->next;
682 case CCP_CMD_HASH_CIPHER:
684 cipher_xform = xform->next;
686 case CCP_CMD_COMBINED:
690 CCP_LOG_ERR("Unsupported cmd_id");
694 /* Default IV length = 0 */
697 ret = ccp_configure_session_cipher(sess, cipher_xform);
699 CCP_LOG_ERR("Invalid/unsupported cipher parameters");
704 ret = ccp_configure_session_auth(sess, auth_xform);
706 CCP_LOG_ERR("Invalid/unsupported auth parameters");
711 ret = ccp_configure_session_aead(sess, aead_xform);
713 CCP_LOG_ERR("Invalid/unsupported aead parameters");
720 /* calculate CCP descriptors requirement */
722 ccp_cipher_slot(struct ccp_session *session)
726 switch (session->cipher.algo) {
727 case CCP_CIPHER_ALGO_AES_CBC:
729 /**< op + passthrough for iv */
731 case CCP_CIPHER_ALGO_AES_ECB:
735 case CCP_CIPHER_ALGO_AES_CTR:
737 /**< op + passthrough for iv */
739 case CCP_CIPHER_ALGO_3DES_CBC:
741 /**< op + passthrough for iv */
744 CCP_LOG_ERR("Unsupported cipher algo %d",
745 session->cipher.algo);
751 ccp_auth_slot(struct ccp_session *session)
755 switch (session->auth.algo) {
756 case CCP_AUTH_ALGO_SHA1:
757 case CCP_AUTH_ALGO_SHA224:
758 case CCP_AUTH_ALGO_SHA256:
759 case CCP_AUTH_ALGO_SHA384:
760 case CCP_AUTH_ALGO_SHA512:
762 /**< op + lsb passthrough cpy to/from*/
764 case CCP_AUTH_ALGO_SHA1_HMAC:
765 case CCP_AUTH_ALGO_SHA224_HMAC:
766 case CCP_AUTH_ALGO_SHA256_HMAC:
769 case CCP_AUTH_ALGO_SHA384_HMAC:
770 case CCP_AUTH_ALGO_SHA512_HMAC:
773 * 1. Load PHash1 = H(k ^ ipad); to LSB
774 * 2. generate IHash = H(hash on meassage with PHash1
776 * 3. Retrieve IHash 2 slots for 384/512
777 * 4. Load Phash2 = H(k ^ opad); to LSB
778 * 5. generate FHash = H(hash on Ihash with Phash2
780 * 6. Retrieve HMAC output from LSB to host memory
783 case CCP_AUTH_ALGO_AES_CMAC:
787 * extra descriptor in padding case
788 * (k1/k2(255:128) with iv(127:0))
793 CCP_LOG_ERR("Unsupported auth algo %d",
801 ccp_aead_slot(struct ccp_session *session)
805 switch (session->aead_algo) {
806 case RTE_CRYPTO_AEAD_AES_GCM:
809 CCP_LOG_ERR("Unsupported aead algo %d",
812 switch (session->auth.algo) {
813 case CCP_AUTH_ALGO_AES_GCM:
824 CCP_LOG_ERR("Unsupported combined auth ALGO %d",
831 ccp_compute_slot_count(struct ccp_session *session)
835 switch (session->cmd_id) {
837 count = ccp_cipher_slot(session);
840 count = ccp_auth_slot(session);
842 case CCP_CMD_CIPHER_HASH:
843 case CCP_CMD_HASH_CIPHER:
844 count = ccp_cipher_slot(session);
845 count += ccp_auth_slot(session);
847 case CCP_CMD_COMBINED:
848 count = ccp_aead_slot(session);
851 CCP_LOG_ERR("Unsupported cmd_id");
859 ccp_perform_passthru(struct ccp_passthru *pst,
860 struct ccp_queue *cmd_q)
862 struct ccp_desc *desc;
863 union ccp_function function;
865 desc = &cmd_q->qbase_desc[cmd_q->qidx];
867 CCP_CMD_ENGINE(desc) = CCP_ENGINE_PASSTHRU;
869 CCP_CMD_SOC(desc) = 0;
870 CCP_CMD_IOC(desc) = 0;
871 CCP_CMD_INIT(desc) = 0;
872 CCP_CMD_EOM(desc) = 0;
873 CCP_CMD_PROT(desc) = 0;
876 CCP_PT_BYTESWAP(&function) = pst->byte_swap;
877 CCP_PT_BITWISE(&function) = pst->bit_mod;
878 CCP_CMD_FUNCTION(desc) = function.raw;
880 CCP_CMD_LEN(desc) = pst->len;
883 CCP_CMD_SRC_LO(desc) = (uint32_t)(pst->src_addr);
884 CCP_CMD_SRC_HI(desc) = high32_value(pst->src_addr);
885 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
887 CCP_CMD_DST_LO(desc) = (uint32_t)(pst->dest_addr);
888 CCP_CMD_DST_HI(desc) = 0;
889 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SB;
891 if (pst->bit_mod != CCP_PASSTHRU_BITWISE_NOOP)
892 CCP_CMD_LSB_ID(desc) = cmd_q->sb_key;
895 CCP_CMD_SRC_LO(desc) = (uint32_t)(pst->src_addr);
896 CCP_CMD_SRC_HI(desc) = 0;
897 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SB;
899 CCP_CMD_DST_LO(desc) = (uint32_t)(pst->dest_addr);
900 CCP_CMD_DST_HI(desc) = high32_value(pst->dest_addr);
901 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
904 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
908 ccp_perform_hmac(struct rte_crypto_op *op,
909 struct ccp_queue *cmd_q)
912 struct ccp_session *session;
913 union ccp_function function;
914 struct ccp_desc *desc;
916 phys_addr_t src_addr, dest_addr, dest_addr_t;
917 struct ccp_passthru pst;
918 uint64_t auth_msg_bits;
922 session = (struct ccp_session *)get_session_private_data(
924 ccp_cryptodev_driver_id);
925 addr = session->auth.pre_compute;
927 src_addr = rte_pktmbuf_mtophys_offset(op->sym->m_src,
928 op->sym->auth.data.offset);
929 append_ptr = (void *)rte_pktmbuf_append(op->sym->m_src,
930 session->auth.ctx_len);
931 dest_addr = (phys_addr_t)rte_mem_virt2phy(append_ptr);
932 dest_addr_t = dest_addr;
934 /** Load PHash1 to LSB*/
935 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *)addr);
936 pst.dest_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
937 pst.len = session->auth.ctx_len;
939 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
940 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
941 ccp_perform_passthru(&pst, cmd_q);
943 /**sha engine command descriptor for IntermediateHash*/
945 desc = &cmd_q->qbase_desc[cmd_q->qidx];
946 memset(desc, 0, Q_DESC_SIZE);
948 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
950 CCP_CMD_SOC(desc) = 0;
951 CCP_CMD_IOC(desc) = 0;
952 CCP_CMD_INIT(desc) = 1;
953 CCP_CMD_EOM(desc) = 1;
954 CCP_CMD_PROT(desc) = 0;
957 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
958 CCP_CMD_FUNCTION(desc) = function.raw;
960 CCP_CMD_LEN(desc) = op->sym->auth.data.length;
961 auth_msg_bits = (op->sym->auth.data.length +
962 session->auth.block_size) * 8;
964 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
965 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
966 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
968 CCP_CMD_LSB_ID(desc) = cmd_q->sb_sha;
969 CCP_CMD_SHA_LO(desc) = ((uint32_t)auth_msg_bits);
970 CCP_CMD_SHA_HI(desc) = high32_value(auth_msg_bits);
972 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
976 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
977 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
978 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
979 cmd_q->qcontrol | CMD_Q_RUN);
981 /* Intermediate Hash value retrieve */
982 if ((session->auth.ut.sha_type == CCP_SHA_TYPE_384) ||
983 (session->auth.ut.sha_type == CCP_SHA_TYPE_512)) {
986 (phys_addr_t)((cmd_q->sb_sha + 1) * CCP_SB_BYTES);
987 pst.dest_addr = dest_addr_t;
988 pst.len = CCP_SB_BYTES;
990 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
991 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
992 ccp_perform_passthru(&pst, cmd_q);
994 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
995 pst.dest_addr = dest_addr_t + CCP_SB_BYTES;
996 pst.len = CCP_SB_BYTES;
998 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
999 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1000 ccp_perform_passthru(&pst, cmd_q);
1003 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1004 pst.dest_addr = dest_addr_t;
1005 pst.len = session->auth.ctx_len;
1007 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1008 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1009 ccp_perform_passthru(&pst, cmd_q);
1013 /** Load PHash2 to LSB*/
1014 addr += session->auth.ctx_len;
1015 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *)addr);
1016 pst.dest_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1017 pst.len = session->auth.ctx_len;
1019 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1020 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1021 ccp_perform_passthru(&pst, cmd_q);
1023 /**sha engine command descriptor for FinalHash*/
1024 dest_addr_t += session->auth.offset;
1026 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1027 memset(desc, 0, Q_DESC_SIZE);
1029 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1031 CCP_CMD_SOC(desc) = 0;
1032 CCP_CMD_IOC(desc) = 0;
1033 CCP_CMD_INIT(desc) = 1;
1034 CCP_CMD_EOM(desc) = 1;
1035 CCP_CMD_PROT(desc) = 0;
1038 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1039 CCP_CMD_FUNCTION(desc) = function.raw;
1041 CCP_CMD_LEN(desc) = (session->auth.ctx_len -
1042 session->auth.offset);
1043 auth_msg_bits = (session->auth.block_size +
1044 session->auth.ctx_len -
1045 session->auth.offset) * 8;
1047 CCP_CMD_SRC_LO(desc) = (uint32_t)(dest_addr_t);
1048 CCP_CMD_SRC_HI(desc) = high32_value(dest_addr_t);
1049 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1051 CCP_CMD_LSB_ID(desc) = cmd_q->sb_sha;
1052 CCP_CMD_SHA_LO(desc) = ((uint32_t)auth_msg_bits);
1053 CCP_CMD_SHA_HI(desc) = high32_value(auth_msg_bits);
1055 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1059 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1060 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1061 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1062 cmd_q->qcontrol | CMD_Q_RUN);
1064 /* Retrieve hmac output */
1065 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1066 pst.dest_addr = dest_addr;
1067 pst.len = session->auth.ctx_len;
1069 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1070 if ((session->auth.ut.sha_type == CCP_SHA_TYPE_384) ||
1071 (session->auth.ut.sha_type == CCP_SHA_TYPE_512))
1072 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1074 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1075 ccp_perform_passthru(&pst, cmd_q);
1077 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
1083 ccp_perform_sha(struct rte_crypto_op *op,
1084 struct ccp_queue *cmd_q)
1086 struct ccp_session *session;
1087 union ccp_function function;
1088 struct ccp_desc *desc;
1090 phys_addr_t src_addr, dest_addr;
1091 struct ccp_passthru pst;
1093 uint64_t auth_msg_bits;
1095 session = (struct ccp_session *)get_session_private_data(
1097 ccp_cryptodev_driver_id);
1099 src_addr = rte_pktmbuf_mtophys_offset(op->sym->m_src,
1100 op->sym->auth.data.offset);
1102 append_ptr = (void *)rte_pktmbuf_append(op->sym->m_src,
1103 session->auth.ctx_len);
1104 dest_addr = (phys_addr_t)rte_mem_virt2phy(append_ptr);
1106 /** Passthru sha context*/
1108 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *)
1110 pst.dest_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1111 pst.len = session->auth.ctx_len;
1113 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1114 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1115 ccp_perform_passthru(&pst, cmd_q);
1117 /**prepare sha command descriptor*/
1119 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1120 memset(desc, 0, Q_DESC_SIZE);
1122 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1124 CCP_CMD_SOC(desc) = 0;
1125 CCP_CMD_IOC(desc) = 0;
1126 CCP_CMD_INIT(desc) = 1;
1127 CCP_CMD_EOM(desc) = 1;
1128 CCP_CMD_PROT(desc) = 0;
1131 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1132 CCP_CMD_FUNCTION(desc) = function.raw;
1134 CCP_CMD_LEN(desc) = op->sym->auth.data.length;
1135 auth_msg_bits = op->sym->auth.data.length * 8;
1137 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
1138 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
1139 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1141 CCP_CMD_LSB_ID(desc) = cmd_q->sb_sha;
1142 CCP_CMD_SHA_LO(desc) = ((uint32_t)auth_msg_bits);
1143 CCP_CMD_SHA_HI(desc) = high32_value(auth_msg_bits);
1145 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1149 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1150 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1151 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1152 cmd_q->qcontrol | CMD_Q_RUN);
1154 /* Hash value retrieve */
1155 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1156 pst.dest_addr = dest_addr;
1157 pst.len = session->auth.ctx_len;
1159 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1160 if ((session->auth.ut.sha_type == CCP_SHA_TYPE_384) ||
1161 (session->auth.ut.sha_type == CCP_SHA_TYPE_512))
1162 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1164 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1165 ccp_perform_passthru(&pst, cmd_q);
1167 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
1173 ccp_perform_aes_cmac(struct rte_crypto_op *op,
1174 struct ccp_queue *cmd_q)
1176 struct ccp_session *session;
1177 union ccp_function function;
1178 struct ccp_passthru pst;
1179 struct ccp_desc *desc;
1181 uint8_t *src_tb, *append_ptr, *ctx_addr;
1182 phys_addr_t src_addr, dest_addr, key_addr;
1183 int length, non_align_len;
1185 session = (struct ccp_session *)get_session_private_data(
1187 ccp_cryptodev_driver_id);
1188 key_addr = rte_mem_virt2phy(session->auth.key_ccp);
1190 src_addr = rte_pktmbuf_mtophys_offset(op->sym->m_src,
1191 op->sym->auth.data.offset);
1192 append_ptr = (uint8_t *)rte_pktmbuf_append(op->sym->m_src,
1193 session->auth.ctx_len);
1194 dest_addr = (phys_addr_t)rte_mem_virt2phy((void *)append_ptr);
1197 CCP_AES_ENCRYPT(&function) = CCP_CIPHER_DIR_ENCRYPT;
1198 CCP_AES_MODE(&function) = session->auth.um.aes_mode;
1199 CCP_AES_TYPE(&function) = session->auth.ut.aes_type;
1201 if (op->sym->auth.data.length % session->auth.block_size == 0) {
1203 ctx_addr = session->auth.pre_compute;
1204 memset(ctx_addr, 0, AES_BLOCK_SIZE);
1205 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *)ctx_addr);
1206 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
1207 pst.len = CCP_SB_BYTES;
1209 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1210 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1211 ccp_perform_passthru(&pst, cmd_q);
1213 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1214 memset(desc, 0, Q_DESC_SIZE);
1216 /* prepare desc for aes-cmac command */
1217 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
1218 CCP_CMD_EOM(desc) = 1;
1219 CCP_CMD_FUNCTION(desc) = function.raw;
1221 CCP_CMD_LEN(desc) = op->sym->auth.data.length;
1222 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
1223 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
1224 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1226 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
1227 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
1228 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1229 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
1231 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1236 (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1237 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1238 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1239 cmd_q->qcontrol | CMD_Q_RUN);
1241 ctx_addr = session->auth.pre_compute + CCP_SB_BYTES;
1242 memset(ctx_addr, 0, AES_BLOCK_SIZE);
1243 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *)ctx_addr);
1244 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
1245 pst.len = CCP_SB_BYTES;
1247 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1248 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1249 ccp_perform_passthru(&pst, cmd_q);
1251 length = (op->sym->auth.data.length / AES_BLOCK_SIZE);
1252 length *= AES_BLOCK_SIZE;
1253 non_align_len = op->sym->auth.data.length - length;
1254 /* prepare desc for aes-cmac command */
1256 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1257 memset(desc, 0, Q_DESC_SIZE);
1259 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
1260 CCP_CMD_INIT(desc) = 1;
1261 CCP_CMD_FUNCTION(desc) = function.raw;
1263 CCP_CMD_LEN(desc) = length;
1264 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
1265 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
1266 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1268 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
1269 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
1270 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1271 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
1273 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1276 append_ptr = append_ptr + CCP_SB_BYTES;
1277 memset(append_ptr, 0, AES_BLOCK_SIZE);
1278 src_tb = rte_pktmbuf_mtod_offset(op->sym->m_src,
1280 op->sym->auth.data.offset +
1282 rte_memcpy(append_ptr, src_tb, non_align_len);
1283 append_ptr[non_align_len] = CMAC_PAD_VALUE;
1285 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1286 memset(desc, 0, Q_DESC_SIZE);
1288 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
1289 CCP_CMD_EOM(desc) = 1;
1290 CCP_CMD_FUNCTION(desc) = function.raw;
1291 CCP_CMD_LEN(desc) = AES_BLOCK_SIZE;
1293 CCP_CMD_SRC_LO(desc) = ((uint32_t)(dest_addr + CCP_SB_BYTES));
1294 CCP_CMD_SRC_HI(desc) = high32_value(dest_addr + CCP_SB_BYTES);
1295 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1297 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
1298 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
1299 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1300 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
1302 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1306 (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1307 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1308 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1309 cmd_q->qcontrol | CMD_Q_RUN);
1311 /* Retrieve result */
1312 pst.dest_addr = dest_addr;
1313 pst.src_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
1314 pst.len = CCP_SB_BYTES;
1316 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1317 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1318 ccp_perform_passthru(&pst, cmd_q);
1320 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
1325 ccp_perform_aes(struct rte_crypto_op *op,
1326 struct ccp_queue *cmd_q,
1327 struct ccp_batch_info *b_info)
1329 struct ccp_session *session;
1330 union ccp_function function;
1332 struct ccp_passthru pst = {0};
1333 struct ccp_desc *desc;
1334 phys_addr_t src_addr, dest_addr, key_addr;
1337 session = (struct ccp_session *)get_session_private_data(
1339 ccp_cryptodev_driver_id);
1342 iv = rte_crypto_op_ctod_offset(op, uint8_t *, session->iv.offset);
1343 if (session->cipher.um.aes_mode != CCP_AES_MODE_ECB) {
1344 if (session->cipher.um.aes_mode == CCP_AES_MODE_CTR) {
1345 rte_memcpy(session->cipher.nonce + AES_BLOCK_SIZE,
1346 iv, session->iv.length);
1347 pst.src_addr = (phys_addr_t)session->cipher.nonce_phys;
1348 CCP_AES_SIZE(&function) = 0x1F;
1351 &(b_info->lsb_buf[b_info->lsb_buf_idx*CCP_SB_BYTES]);
1352 rte_memcpy(lsb_buf +
1353 (CCP_SB_BYTES - session->iv.length),
1354 iv, session->iv.length);
1355 pst.src_addr = b_info->lsb_buf_phys +
1356 (b_info->lsb_buf_idx * CCP_SB_BYTES);
1357 b_info->lsb_buf_idx++;
1360 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
1361 pst.len = CCP_SB_BYTES;
1363 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1364 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1365 ccp_perform_passthru(&pst, cmd_q);
1368 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1370 src_addr = rte_pktmbuf_mtophys_offset(op->sym->m_src,
1371 op->sym->cipher.data.offset);
1372 if (likely(op->sym->m_dst != NULL))
1373 dest_addr = rte_pktmbuf_mtophys_offset(op->sym->m_dst,
1374 op->sym->cipher.data.offset);
1376 dest_addr = src_addr;
1377 key_addr = session->cipher.key_phys;
1379 /* prepare desc for aes command */
1380 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
1381 CCP_CMD_INIT(desc) = 1;
1382 CCP_CMD_EOM(desc) = 1;
1384 CCP_AES_ENCRYPT(&function) = session->cipher.dir;
1385 CCP_AES_MODE(&function) = session->cipher.um.aes_mode;
1386 CCP_AES_TYPE(&function) = session->cipher.ut.aes_type;
1387 CCP_CMD_FUNCTION(desc) = function.raw;
1389 CCP_CMD_LEN(desc) = op->sym->cipher.data.length;
1391 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
1392 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
1393 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1395 CCP_CMD_DST_LO(desc) = ((uint32_t)dest_addr);
1396 CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
1397 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1399 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
1400 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
1401 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1403 if (session->cipher.um.aes_mode != CCP_AES_MODE_ECB)
1404 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
1406 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1407 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
1412 ccp_perform_3des(struct rte_crypto_op *op,
1413 struct ccp_queue *cmd_q,
1414 struct ccp_batch_info *b_info)
1416 struct ccp_session *session;
1417 union ccp_function function;
1418 unsigned char *lsb_buf;
1419 struct ccp_passthru pst;
1420 struct ccp_desc *desc;
1423 phys_addr_t src_addr, dest_addr, key_addr;
1425 session = (struct ccp_session *)get_session_private_data(
1427 ccp_cryptodev_driver_id);
1429 iv = rte_crypto_op_ctod_offset(op, uint8_t *, session->iv.offset);
1430 switch (session->cipher.um.des_mode) {
1431 case CCP_DES_MODE_CBC:
1432 lsb_buf = &(b_info->lsb_buf[b_info->lsb_buf_idx*CCP_SB_BYTES]);
1433 b_info->lsb_buf_idx++;
1435 rte_memcpy(lsb_buf + (CCP_SB_BYTES - session->iv.length),
1436 iv, session->iv.length);
1438 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *) lsb_buf);
1439 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
1440 pst.len = CCP_SB_BYTES;
1442 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1443 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1444 ccp_perform_passthru(&pst, cmd_q);
1446 case CCP_DES_MODE_CFB:
1447 case CCP_DES_MODE_ECB:
1448 CCP_LOG_ERR("Unsupported DES cipher mode");
1452 src_addr = rte_pktmbuf_mtophys_offset(op->sym->m_src,
1453 op->sym->cipher.data.offset);
1454 if (unlikely(op->sym->m_dst != NULL))
1456 rte_pktmbuf_mtophys_offset(op->sym->m_dst,
1457 op->sym->cipher.data.offset);
1459 dest_addr = src_addr;
1461 key_addr = rte_mem_virt2phy(session->cipher.key_ccp);
1463 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1465 memset(desc, 0, Q_DESC_SIZE);
1467 /* prepare desc for des command */
1468 CCP_CMD_ENGINE(desc) = CCP_ENGINE_3DES;
1470 CCP_CMD_SOC(desc) = 0;
1471 CCP_CMD_IOC(desc) = 0;
1472 CCP_CMD_INIT(desc) = 1;
1473 CCP_CMD_EOM(desc) = 1;
1474 CCP_CMD_PROT(desc) = 0;
1477 CCP_DES_ENCRYPT(&function) = session->cipher.dir;
1478 CCP_DES_MODE(&function) = session->cipher.um.des_mode;
1479 CCP_DES_TYPE(&function) = session->cipher.ut.des_type;
1480 CCP_CMD_FUNCTION(desc) = function.raw;
1482 CCP_CMD_LEN(desc) = op->sym->cipher.data.length;
1484 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
1485 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
1486 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1488 CCP_CMD_DST_LO(desc) = ((uint32_t)dest_addr);
1489 CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
1490 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1492 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
1493 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
1494 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1496 if (session->cipher.um.des_mode)
1497 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
1499 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1503 /* Write the new tail address back to the queue register */
1504 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1505 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1506 /* Turn the queue back on using our cached control register */
1507 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1508 cmd_q->qcontrol | CMD_Q_RUN);
1510 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
1515 ccp_perform_aes_gcm(struct rte_crypto_op *op, struct ccp_queue *cmd_q)
1517 struct ccp_session *session;
1518 union ccp_function function;
1520 struct ccp_passthru pst;
1521 struct ccp_desc *desc;
1524 phys_addr_t src_addr, dest_addr, key_addr, aad_addr;
1525 phys_addr_t digest_dest_addr;
1526 int length, non_align_len;
1528 session = (struct ccp_session *)get_session_private_data(
1530 ccp_cryptodev_driver_id);
1531 iv = rte_crypto_op_ctod_offset(op, uint8_t *, session->iv.offset);
1532 key_addr = session->cipher.key_phys;
1534 src_addr = rte_pktmbuf_mtophys_offset(op->sym->m_src,
1535 op->sym->aead.data.offset);
1536 if (unlikely(op->sym->m_dst != NULL))
1537 dest_addr = rte_pktmbuf_mtophys_offset(op->sym->m_dst,
1538 op->sym->aead.data.offset);
1540 dest_addr = src_addr;
1541 rte_pktmbuf_append(op->sym->m_src, session->auth.ctx_len);
1542 digest_dest_addr = op->sym->aead.digest.phys_addr;
1543 temp = (uint64_t *)(op->sym->aead.digest.data + AES_BLOCK_SIZE);
1544 *temp++ = rte_bswap64(session->auth.aad_length << 3);
1545 *temp = rte_bswap64(op->sym->aead.data.length << 3);
1547 non_align_len = op->sym->aead.data.length % AES_BLOCK_SIZE;
1548 length = CCP_ALIGN(op->sym->aead.data.length, AES_BLOCK_SIZE);
1550 aad_addr = op->sym->aead.aad.phys_addr;
1552 /* CMD1 IV Passthru */
1553 rte_memcpy(session->cipher.nonce + AES_BLOCK_SIZE, iv,
1554 session->iv.length);
1555 pst.src_addr = session->cipher.nonce_phys;
1556 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
1557 pst.len = CCP_SB_BYTES;
1559 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1560 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1561 ccp_perform_passthru(&pst, cmd_q);
1563 /* CMD2 GHASH-AAD */
1565 CCP_AES_ENCRYPT(&function) = CCP_AES_MODE_GHASH_AAD;
1566 CCP_AES_MODE(&function) = CCP_AES_MODE_GHASH;
1567 CCP_AES_TYPE(&function) = session->cipher.ut.aes_type;
1569 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1570 memset(desc, 0, Q_DESC_SIZE);
1572 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
1573 CCP_CMD_INIT(desc) = 1;
1574 CCP_CMD_FUNCTION(desc) = function.raw;
1576 CCP_CMD_LEN(desc) = session->auth.aad_length;
1578 CCP_CMD_SRC_LO(desc) = ((uint32_t)aad_addr);
1579 CCP_CMD_SRC_HI(desc) = high32_value(aad_addr);
1580 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1582 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
1583 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
1584 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1586 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
1588 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1591 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1592 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1593 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1594 cmd_q->qcontrol | CMD_Q_RUN);
1596 /* CMD3 : GCTR Plain text */
1598 CCP_AES_ENCRYPT(&function) = session->cipher.dir;
1599 CCP_AES_MODE(&function) = CCP_AES_MODE_GCTR;
1600 CCP_AES_TYPE(&function) = session->cipher.ut.aes_type;
1601 if (non_align_len == 0)
1602 CCP_AES_SIZE(&function) = (AES_BLOCK_SIZE << 3) - 1;
1604 CCP_AES_SIZE(&function) = (non_align_len << 3) - 1;
1607 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1608 memset(desc, 0, Q_DESC_SIZE);
1610 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
1611 CCP_CMD_EOM(desc) = 1;
1612 CCP_CMD_FUNCTION(desc) = function.raw;
1614 CCP_CMD_LEN(desc) = length;
1616 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
1617 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
1618 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1620 CCP_CMD_DST_LO(desc) = ((uint32_t)dest_addr);
1621 CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
1622 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1624 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
1625 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
1626 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1628 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
1630 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1633 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1634 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1635 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1636 cmd_q->qcontrol | CMD_Q_RUN);
1638 /* CMD4 : PT to copy IV */
1639 pst.src_addr = session->cipher.nonce_phys;
1640 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
1641 pst.len = AES_BLOCK_SIZE;
1643 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1644 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1645 ccp_perform_passthru(&pst, cmd_q);
1647 /* CMD5 : GHASH-Final */
1649 CCP_AES_ENCRYPT(&function) = CCP_AES_MODE_GHASH_FINAL;
1650 CCP_AES_MODE(&function) = CCP_AES_MODE_GHASH;
1651 CCP_AES_TYPE(&function) = session->cipher.ut.aes_type;
1653 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1654 memset(desc, 0, Q_DESC_SIZE);
1656 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
1657 CCP_CMD_FUNCTION(desc) = function.raw;
1658 /* Last block (AAD_len || PT_len)*/
1659 CCP_CMD_LEN(desc) = AES_BLOCK_SIZE;
1661 CCP_CMD_SRC_LO(desc) = ((uint32_t)digest_dest_addr + AES_BLOCK_SIZE);
1662 CCP_CMD_SRC_HI(desc) = high32_value(digest_dest_addr + AES_BLOCK_SIZE);
1663 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1665 CCP_CMD_DST_LO(desc) = ((uint32_t)digest_dest_addr);
1666 CCP_CMD_DST_HI(desc) = high32_value(digest_dest_addr);
1667 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1669 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
1670 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
1671 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1673 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
1675 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1678 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1679 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1680 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1681 cmd_q->qcontrol | CMD_Q_RUN);
1683 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
1688 ccp_crypto_cipher(struct rte_crypto_op *op,
1689 struct ccp_queue *cmd_q,
1690 struct ccp_batch_info *b_info)
1693 struct ccp_session *session;
1695 session = (struct ccp_session *)get_session_private_data(
1697 ccp_cryptodev_driver_id);
1699 switch (session->cipher.algo) {
1700 case CCP_CIPHER_ALGO_AES_CBC:
1701 result = ccp_perform_aes(op, cmd_q, b_info);
1702 b_info->desccnt += 2;
1704 case CCP_CIPHER_ALGO_AES_CTR:
1705 result = ccp_perform_aes(op, cmd_q, b_info);
1706 b_info->desccnt += 2;
1708 case CCP_CIPHER_ALGO_AES_ECB:
1709 result = ccp_perform_aes(op, cmd_q, b_info);
1710 b_info->desccnt += 1;
1712 case CCP_CIPHER_ALGO_3DES_CBC:
1713 result = ccp_perform_3des(op, cmd_q, b_info);
1714 b_info->desccnt += 2;
1717 CCP_LOG_ERR("Unsupported cipher algo %d",
1718 session->cipher.algo);
1725 ccp_crypto_auth(struct rte_crypto_op *op,
1726 struct ccp_queue *cmd_q,
1727 struct ccp_batch_info *b_info)
1731 struct ccp_session *session;
1733 session = (struct ccp_session *)get_session_private_data(
1735 ccp_cryptodev_driver_id);
1737 switch (session->auth.algo) {
1738 case CCP_AUTH_ALGO_SHA1:
1739 case CCP_AUTH_ALGO_SHA224:
1740 case CCP_AUTH_ALGO_SHA256:
1741 case CCP_AUTH_ALGO_SHA384:
1742 case CCP_AUTH_ALGO_SHA512:
1743 result = ccp_perform_sha(op, cmd_q);
1744 b_info->desccnt += 3;
1746 case CCP_AUTH_ALGO_SHA1_HMAC:
1747 case CCP_AUTH_ALGO_SHA224_HMAC:
1748 case CCP_AUTH_ALGO_SHA256_HMAC:
1749 result = ccp_perform_hmac(op, cmd_q);
1750 b_info->desccnt += 6;
1752 case CCP_AUTH_ALGO_SHA384_HMAC:
1753 case CCP_AUTH_ALGO_SHA512_HMAC:
1754 result = ccp_perform_hmac(op, cmd_q);
1755 b_info->desccnt += 7;
1757 case CCP_AUTH_ALGO_AES_CMAC:
1758 result = ccp_perform_aes_cmac(op, cmd_q);
1759 b_info->desccnt += 4;
1762 CCP_LOG_ERR("Unsupported auth algo %d",
1763 session->auth.algo);
1771 ccp_crypto_aead(struct rte_crypto_op *op,
1772 struct ccp_queue *cmd_q,
1773 struct ccp_batch_info *b_info)
1776 struct ccp_session *session;
1778 session = (struct ccp_session *)get_session_private_data(
1780 ccp_cryptodev_driver_id);
1782 switch (session->auth.algo) {
1783 case CCP_AUTH_ALGO_AES_GCM:
1784 if (session->cipher.algo != CCP_CIPHER_ALGO_AES_GCM) {
1785 CCP_LOG_ERR("Incorrect chain order");
1788 result = ccp_perform_aes_gcm(op, cmd_q);
1789 b_info->desccnt += 5;
1792 CCP_LOG_ERR("Unsupported aead algo %d",
1793 session->aead_algo);
1800 process_ops_to_enqueue(const struct ccp_qp *qp,
1801 struct rte_crypto_op **op,
1802 struct ccp_queue *cmd_q,
1807 struct ccp_batch_info *b_info;
1808 struct ccp_session *session;
1810 if (rte_mempool_get(qp->batch_mp, (void **)&b_info)) {
1811 CCP_LOG_ERR("batch info allocation failed");
1814 /* populate batch info necessary for dequeue */
1816 b_info->lsb_buf_idx = 0;
1817 b_info->desccnt = 0;
1818 b_info->cmd_q = cmd_q;
1819 b_info->lsb_buf_phys =
1820 (phys_addr_t)rte_mem_virt2phy((void *)b_info->lsb_buf);
1821 rte_atomic64_sub(&b_info->cmd_q->free_slots, slots_req);
1823 b_info->head_offset = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx *
1825 for (i = 0; i < nb_ops; i++) {
1826 session = (struct ccp_session *)get_session_private_data(
1827 op[i]->sym->session,
1828 ccp_cryptodev_driver_id);
1829 switch (session->cmd_id) {
1830 case CCP_CMD_CIPHER:
1831 result = ccp_crypto_cipher(op[i], cmd_q, b_info);
1834 result = ccp_crypto_auth(op[i], cmd_q, b_info);
1836 case CCP_CMD_CIPHER_HASH:
1837 result = ccp_crypto_cipher(op[i], cmd_q, b_info);
1840 result = ccp_crypto_auth(op[i], cmd_q, b_info);
1842 case CCP_CMD_HASH_CIPHER:
1843 result = ccp_crypto_auth(op[i], cmd_q, b_info);
1846 result = ccp_crypto_cipher(op[i], cmd_q, b_info);
1848 case CCP_CMD_COMBINED:
1849 result = ccp_crypto_aead(op[i], cmd_q, b_info);
1852 CCP_LOG_ERR("Unsupported cmd_id");
1855 if (unlikely(result < 0)) {
1856 rte_atomic64_add(&b_info->cmd_q->free_slots,
1857 (slots_req - b_info->desccnt));
1860 b_info->op[i] = op[i];
1864 b_info->tail_offset = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx *
1868 /* Write the new tail address back to the queue register */
1869 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE,
1870 b_info->tail_offset);
1871 /* Turn the queue back on using our cached control register */
1872 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1873 cmd_q->qcontrol | CMD_Q_RUN);
1875 rte_ring_enqueue(qp->processed_pkts, (void *)b_info);
1880 static inline void ccp_auth_dq_prepare(struct rte_crypto_op *op)
1882 struct ccp_session *session;
1883 uint8_t *digest_data, *addr;
1884 struct rte_mbuf *m_last;
1885 int offset, digest_offset;
1886 uint8_t digest_le[64];
1888 session = (struct ccp_session *)get_session_private_data(
1890 ccp_cryptodev_driver_id);
1892 if (session->cmd_id == CCP_CMD_COMBINED) {
1893 digest_data = op->sym->aead.digest.data;
1894 digest_offset = op->sym->aead.data.offset +
1895 op->sym->aead.data.length;
1897 digest_data = op->sym->auth.digest.data;
1898 digest_offset = op->sym->auth.data.offset +
1899 op->sym->auth.data.length;
1901 m_last = rte_pktmbuf_lastseg(op->sym->m_src);
1902 addr = (uint8_t *)((char *)m_last->buf_addr + m_last->data_off +
1903 m_last->data_len - session->auth.ctx_len);
1906 offset = session->auth.offset;
1908 if (session->auth.engine == CCP_ENGINE_SHA)
1909 if ((session->auth.ut.sha_type != CCP_SHA_TYPE_1) &&
1910 (session->auth.ut.sha_type != CCP_SHA_TYPE_224) &&
1911 (session->auth.ut.sha_type != CCP_SHA_TYPE_256)) {
1912 /* All other algorithms require byte
1917 offset = session->auth.ctx_len -
1918 session->auth.offset - 1;
1919 for (i = 0; i < session->auth.digest_length; i++)
1920 digest_le[i] = addr[offset - i];
1925 op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
1926 if (session->auth.op == CCP_AUTH_OP_VERIFY) {
1927 if (memcmp(addr + offset, digest_data,
1928 session->auth.digest_length) != 0)
1929 op->status = RTE_CRYPTO_OP_STATUS_AUTH_FAILED;
1932 if (unlikely(digest_data == 0))
1933 digest_data = rte_pktmbuf_mtod_offset(
1934 op->sym->m_dst, uint8_t *,
1936 rte_memcpy(digest_data, addr + offset,
1937 session->auth.digest_length);
1939 /* Trim area used for digest from mbuf. */
1940 rte_pktmbuf_trim(op->sym->m_src,
1941 session->auth.ctx_len);
1945 ccp_prepare_ops(struct rte_crypto_op **op_d,
1946 struct ccp_batch_info *b_info,
1950 struct ccp_session *session;
1952 min_ops = RTE_MIN(nb_ops, b_info->opcnt);
1954 for (i = 0; i < min_ops; i++) {
1955 op_d[i] = b_info->op[b_info->op_idx++];
1956 session = (struct ccp_session *)get_session_private_data(
1957 op_d[i]->sym->session,
1958 ccp_cryptodev_driver_id);
1959 switch (session->cmd_id) {
1960 case CCP_CMD_CIPHER:
1961 op_d[i]->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
1964 case CCP_CMD_CIPHER_HASH:
1965 case CCP_CMD_HASH_CIPHER:
1966 case CCP_CMD_COMBINED:
1967 ccp_auth_dq_prepare(op_d[i]);
1970 CCP_LOG_ERR("Unsupported cmd_id");
1974 b_info->opcnt -= min_ops;
1979 process_ops_to_dequeue(struct ccp_qp *qp,
1980 struct rte_crypto_op **op,
1983 struct ccp_batch_info *b_info;
1984 uint32_t cur_head_offset;
1986 if (qp->b_info != NULL) {
1987 b_info = qp->b_info;
1988 if (unlikely(b_info->op_idx > 0))
1990 } else if (rte_ring_dequeue(qp->processed_pkts,
1993 cur_head_offset = CCP_READ_REG(b_info->cmd_q->reg_base,
1994 CMD_Q_HEAD_LO_BASE);
1996 if (b_info->head_offset < b_info->tail_offset) {
1997 if ((cur_head_offset >= b_info->head_offset) &&
1998 (cur_head_offset < b_info->tail_offset)) {
1999 qp->b_info = b_info;
2003 if ((cur_head_offset >= b_info->head_offset) ||
2004 (cur_head_offset < b_info->tail_offset)) {
2005 qp->b_info = b_info;
2012 nb_ops = ccp_prepare_ops(op, b_info, nb_ops);
2013 rte_atomic64_add(&b_info->cmd_q->free_slots, b_info->desccnt);
2014 b_info->desccnt = 0;
2015 if (b_info->opcnt > 0) {
2016 qp->b_info = b_info;
2018 rte_mempool_put(qp->batch_mp, (void *)b_info);