1 /* SPDX-License-Identifier: (BSD-3-Clause OR GPL-2.0)
2 * Copyright(c) 2015-2019 Intel Corporation
5 #include <openssl/sha.h> /* Needed to calculate pre-compute values */
6 #include <openssl/aes.h> /* Needed to calculate pre-compute values */
7 #include <openssl/md5.h> /* Needed to calculate pre-compute values */
8 #include <openssl/evp.h> /* Needed for bpi runt block processing */
10 #include <rte_memcpy.h>
11 #include <rte_common.h>
12 #include <rte_spinlock.h>
13 #include <rte_byteorder.h>
15 #include <rte_malloc.h>
16 #include <rte_crypto_sym.h>
19 #include "qat_sym_session.h"
20 #include "qat_sym_pmd.h"
22 /** Frees a context previously created
23 * Depends on openssl libcrypto
26 bpi_cipher_ctx_free(void *bpi_ctx)
29 EVP_CIPHER_CTX_free((EVP_CIPHER_CTX *)bpi_ctx);
32 /** Creates a context in either AES or DES in ECB mode
33 * Depends on openssl libcrypto
36 bpi_cipher_ctx_init(enum rte_crypto_cipher_algorithm cryptodev_algo,
37 enum rte_crypto_cipher_operation direction __rte_unused,
38 uint8_t *key, void **ctx)
40 const EVP_CIPHER *algo = NULL;
42 *ctx = EVP_CIPHER_CTX_new();
49 if (cryptodev_algo == RTE_CRYPTO_CIPHER_DES_DOCSISBPI)
52 algo = EVP_aes_128_ecb();
54 /* IV will be ECB encrypted whether direction is encrypt or decrypt*/
55 if (EVP_EncryptInit_ex(*ctx, algo, NULL, key, 0) != 1) {
64 EVP_CIPHER_CTX_free(*ctx);
69 qat_is_cipher_alg_supported(enum rte_crypto_cipher_algorithm algo,
70 struct qat_sym_dev_private *internals)
73 const struct rte_cryptodev_capabilities *capability;
75 while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
76 RTE_CRYPTO_OP_TYPE_UNDEFINED) {
77 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
80 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_CIPHER)
83 if (capability->sym.cipher.algo == algo)
90 qat_is_auth_alg_supported(enum rte_crypto_auth_algorithm algo,
91 struct qat_sym_dev_private *internals)
94 const struct rte_cryptodev_capabilities *capability;
96 while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
97 RTE_CRYPTO_OP_TYPE_UNDEFINED) {
98 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
101 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_AUTH)
104 if (capability->sym.auth.algo == algo)
111 qat_sym_session_clear(struct rte_cryptodev *dev,
112 struct rte_cryptodev_sym_session *sess)
114 uint8_t index = dev->driver_id;
115 void *sess_priv = get_sym_session_private_data(sess, index);
116 struct qat_sym_session *s = (struct qat_sym_session *)sess_priv;
120 bpi_cipher_ctx_free(s->bpi_ctx);
121 memset(s, 0, qat_sym_session_get_private_size(dev));
122 struct rte_mempool *sess_mp = rte_mempool_from_obj(sess_priv);
124 set_sym_session_private_data(sess, index, NULL);
125 rte_mempool_put(sess_mp, sess_priv);
130 qat_get_cmd_id(const struct rte_crypto_sym_xform *xform)
133 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER && xform->next == NULL)
134 return ICP_QAT_FW_LA_CMD_CIPHER;
136 /* Authentication Only */
137 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH && xform->next == NULL)
138 return ICP_QAT_FW_LA_CMD_AUTH;
141 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
142 /* AES-GCM and AES-CCM works with different direction
143 * GCM first encrypts and generate hash where AES-CCM
144 * first generate hash and encrypts. Similar relation
145 * applies to decryption.
147 if (xform->aead.op == RTE_CRYPTO_AEAD_OP_ENCRYPT)
148 if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
149 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
151 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
153 if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
154 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
156 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
159 if (xform->next == NULL)
162 /* Cipher then Authenticate */
163 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
164 xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
165 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
167 /* Authenticate then Cipher */
168 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH &&
169 xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
170 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
175 static struct rte_crypto_auth_xform *
176 qat_get_auth_xform(struct rte_crypto_sym_xform *xform)
179 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH)
188 static struct rte_crypto_cipher_xform *
189 qat_get_cipher_xform(struct rte_crypto_sym_xform *xform)
192 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
193 return &xform->cipher;
202 qat_sym_session_configure_cipher(struct rte_cryptodev *dev,
203 struct rte_crypto_sym_xform *xform,
204 struct qat_sym_session *session)
206 struct qat_sym_dev_private *internals = dev->data->dev_private;
207 struct rte_crypto_cipher_xform *cipher_xform = NULL;
210 /* Get cipher xform from crypto xform chain */
211 cipher_xform = qat_get_cipher_xform(xform);
213 session->cipher_iv.offset = cipher_xform->iv.offset;
214 session->cipher_iv.length = cipher_xform->iv.length;
216 switch (cipher_xform->algo) {
217 case RTE_CRYPTO_CIPHER_AES_CBC:
218 if (qat_sym_validate_aes_key(cipher_xform->key.length,
219 &session->qat_cipher_alg) != 0) {
220 QAT_LOG(ERR, "Invalid AES cipher key size");
224 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
226 case RTE_CRYPTO_CIPHER_AES_CTR:
227 if (qat_sym_validate_aes_key(cipher_xform->key.length,
228 &session->qat_cipher_alg) != 0) {
229 QAT_LOG(ERR, "Invalid AES cipher key size");
233 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
235 case RTE_CRYPTO_CIPHER_SNOW3G_UEA2:
236 if (qat_sym_validate_snow3g_key(cipher_xform->key.length,
237 &session->qat_cipher_alg) != 0) {
238 QAT_LOG(ERR, "Invalid SNOW 3G cipher key size");
242 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
244 case RTE_CRYPTO_CIPHER_NULL:
245 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
247 case RTE_CRYPTO_CIPHER_KASUMI_F8:
248 if (qat_sym_validate_kasumi_key(cipher_xform->key.length,
249 &session->qat_cipher_alg) != 0) {
250 QAT_LOG(ERR, "Invalid KASUMI cipher key size");
254 session->qat_mode = ICP_QAT_HW_CIPHER_F8_MODE;
256 case RTE_CRYPTO_CIPHER_3DES_CBC:
257 if (qat_sym_validate_3des_key(cipher_xform->key.length,
258 &session->qat_cipher_alg) != 0) {
259 QAT_LOG(ERR, "Invalid 3DES cipher key size");
263 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
265 case RTE_CRYPTO_CIPHER_DES_CBC:
266 if (qat_sym_validate_des_key(cipher_xform->key.length,
267 &session->qat_cipher_alg) != 0) {
268 QAT_LOG(ERR, "Invalid DES cipher key size");
272 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
274 case RTE_CRYPTO_CIPHER_3DES_CTR:
275 if (qat_sym_validate_3des_key(cipher_xform->key.length,
276 &session->qat_cipher_alg) != 0) {
277 QAT_LOG(ERR, "Invalid 3DES cipher key size");
281 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
283 case RTE_CRYPTO_CIPHER_DES_DOCSISBPI:
284 ret = bpi_cipher_ctx_init(
287 cipher_xform->key.data,
290 QAT_LOG(ERR, "failed to create DES BPI ctx");
293 if (qat_sym_validate_des_key(cipher_xform->key.length,
294 &session->qat_cipher_alg) != 0) {
295 QAT_LOG(ERR, "Invalid DES cipher key size");
299 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
301 case RTE_CRYPTO_CIPHER_AES_DOCSISBPI:
302 ret = bpi_cipher_ctx_init(
305 cipher_xform->key.data,
308 QAT_LOG(ERR, "failed to create AES BPI ctx");
311 if (qat_sym_validate_aes_docsisbpi_key(cipher_xform->key.length,
312 &session->qat_cipher_alg) != 0) {
313 QAT_LOG(ERR, "Invalid AES DOCSISBPI key size");
317 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
319 case RTE_CRYPTO_CIPHER_ZUC_EEA3:
320 if (!qat_is_cipher_alg_supported(
321 cipher_xform->algo, internals)) {
322 QAT_LOG(ERR, "%s not supported on this device",
323 rte_crypto_cipher_algorithm_strings
324 [cipher_xform->algo]);
328 if (qat_sym_validate_zuc_key(cipher_xform->key.length,
329 &session->qat_cipher_alg) != 0) {
330 QAT_LOG(ERR, "Invalid ZUC cipher key size");
334 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
336 case RTE_CRYPTO_CIPHER_AES_XTS:
337 if ((cipher_xform->key.length/2) == ICP_QAT_HW_AES_192_KEY_SZ) {
338 QAT_LOG(ERR, "AES-XTS-192 not supported");
342 if (qat_sym_validate_aes_key((cipher_xform->key.length/2),
343 &session->qat_cipher_alg) != 0) {
344 QAT_LOG(ERR, "Invalid AES-XTS cipher key size");
348 session->qat_mode = ICP_QAT_HW_CIPHER_XTS_MODE;
350 case RTE_CRYPTO_CIPHER_3DES_ECB:
351 case RTE_CRYPTO_CIPHER_AES_ECB:
352 case RTE_CRYPTO_CIPHER_AES_F8:
353 case RTE_CRYPTO_CIPHER_ARC4:
354 QAT_LOG(ERR, "Crypto QAT PMD: Unsupported Cipher alg %u",
359 QAT_LOG(ERR, "Crypto: Undefined Cipher specified %u\n",
365 if (cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
366 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
368 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
370 if (qat_sym_session_aead_create_cd_cipher(session,
371 cipher_xform->key.data,
372 cipher_xform->key.length)) {
380 if (session->bpi_ctx) {
381 bpi_cipher_ctx_free(session->bpi_ctx);
382 session->bpi_ctx = NULL;
388 qat_sym_session_configure(struct rte_cryptodev *dev,
389 struct rte_crypto_sym_xform *xform,
390 struct rte_cryptodev_sym_session *sess,
391 struct rte_mempool *mempool)
393 void *sess_private_data;
396 if (rte_mempool_get(mempool, &sess_private_data)) {
398 "Couldn't get object from session mempool");
402 ret = qat_sym_session_set_parameters(dev, xform, sess_private_data);
405 "Crypto QAT PMD: failed to configure session parameters");
407 /* Return session to mempool */
408 rte_mempool_put(mempool, sess_private_data);
412 set_sym_session_private_data(sess, dev->driver_id,
419 qat_sym_session_set_parameters(struct rte_cryptodev *dev,
420 struct rte_crypto_sym_xform *xform, void *session_private)
422 struct qat_sym_session *session = session_private;
426 /* Set context descriptor physical address */
427 session->cd_paddr = rte_mempool_virt2iova(session) +
428 offsetof(struct qat_sym_session, cd);
430 session->min_qat_dev_gen = QAT_GEN1;
432 /* Get requested QAT command id */
433 qat_cmd_id = qat_get_cmd_id(xform);
434 if (qat_cmd_id < 0 || qat_cmd_id >= ICP_QAT_FW_LA_CMD_DELIMITER) {
435 QAT_LOG(ERR, "Unsupported xform chain requested");
438 session->qat_cmd = (enum icp_qat_fw_la_cmd_id)qat_cmd_id;
439 switch (session->qat_cmd) {
440 case ICP_QAT_FW_LA_CMD_CIPHER:
441 ret = qat_sym_session_configure_cipher(dev, xform, session);
445 case ICP_QAT_FW_LA_CMD_AUTH:
446 ret = qat_sym_session_configure_auth(dev, xform, session);
450 case ICP_QAT_FW_LA_CMD_CIPHER_HASH:
451 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
452 ret = qat_sym_session_configure_aead(xform,
457 ret = qat_sym_session_configure_cipher(dev,
461 ret = qat_sym_session_configure_auth(dev,
467 case ICP_QAT_FW_LA_CMD_HASH_CIPHER:
468 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
469 ret = qat_sym_session_configure_aead(xform,
474 ret = qat_sym_session_configure_auth(dev,
478 ret = qat_sym_session_configure_cipher(dev,
484 case ICP_QAT_FW_LA_CMD_TRNG_GET_RANDOM:
485 case ICP_QAT_FW_LA_CMD_TRNG_TEST:
486 case ICP_QAT_FW_LA_CMD_SSL3_KEY_DERIVE:
487 case ICP_QAT_FW_LA_CMD_TLS_V1_1_KEY_DERIVE:
488 case ICP_QAT_FW_LA_CMD_TLS_V1_2_KEY_DERIVE:
489 case ICP_QAT_FW_LA_CMD_MGF1:
490 case ICP_QAT_FW_LA_CMD_AUTH_PRE_COMP:
491 case ICP_QAT_FW_LA_CMD_CIPHER_PRE_COMP:
492 case ICP_QAT_FW_LA_CMD_DELIMITER:
493 QAT_LOG(ERR, "Unsupported Service %u",
497 QAT_LOG(ERR, "Unsupported Service %u",
506 qat_sym_session_configure_auth(struct rte_cryptodev *dev,
507 struct rte_crypto_sym_xform *xform,
508 struct qat_sym_session *session)
510 struct rte_crypto_auth_xform *auth_xform = qat_get_auth_xform(xform);
511 struct qat_sym_dev_private *internals = dev->data->dev_private;
512 uint8_t *key_data = auth_xform->key.data;
513 uint8_t key_length = auth_xform->key.length;
514 session->aes_cmac = 0;
516 switch (auth_xform->algo) {
517 case RTE_CRYPTO_AUTH_SHA1_HMAC:
518 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA1;
520 case RTE_CRYPTO_AUTH_SHA224_HMAC:
521 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA224;
523 case RTE_CRYPTO_AUTH_SHA256_HMAC:
524 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA256;
526 case RTE_CRYPTO_AUTH_SHA384_HMAC:
527 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA384;
529 case RTE_CRYPTO_AUTH_SHA512_HMAC:
530 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA512;
532 case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
533 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
535 case RTE_CRYPTO_AUTH_AES_CMAC:
536 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
537 session->aes_cmac = 1;
539 case RTE_CRYPTO_AUTH_AES_GMAC:
540 if (qat_sym_validate_aes_key(auth_xform->key.length,
541 &session->qat_cipher_alg) != 0) {
542 QAT_LOG(ERR, "Invalid AES key size");
545 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
546 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
549 case RTE_CRYPTO_AUTH_SNOW3G_UIA2:
550 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2;
552 case RTE_CRYPTO_AUTH_MD5_HMAC:
553 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_MD5;
555 case RTE_CRYPTO_AUTH_NULL:
556 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_NULL;
558 case RTE_CRYPTO_AUTH_KASUMI_F9:
559 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_KASUMI_F9;
561 case RTE_CRYPTO_AUTH_ZUC_EIA3:
562 if (!qat_is_auth_alg_supported(auth_xform->algo, internals)) {
563 QAT_LOG(ERR, "%s not supported on this device",
564 rte_crypto_auth_algorithm_strings
568 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3;
570 case RTE_CRYPTO_AUTH_SHA1:
571 case RTE_CRYPTO_AUTH_SHA256:
572 case RTE_CRYPTO_AUTH_SHA512:
573 case RTE_CRYPTO_AUTH_SHA224:
574 case RTE_CRYPTO_AUTH_SHA384:
575 case RTE_CRYPTO_AUTH_MD5:
576 case RTE_CRYPTO_AUTH_AES_CBC_MAC:
577 QAT_LOG(ERR, "Crypto: Unsupported hash alg %u",
581 QAT_LOG(ERR, "Crypto: Undefined Hash algo %u specified",
586 session->auth_iv.offset = auth_xform->iv.offset;
587 session->auth_iv.length = auth_xform->iv.length;
589 if (auth_xform->algo == RTE_CRYPTO_AUTH_AES_GMAC) {
590 if (auth_xform->op == RTE_CRYPTO_AUTH_OP_GENERATE) {
591 session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER_HASH;
592 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
594 * It needs to create cipher desc content first,
595 * then authentication
598 if (qat_sym_session_aead_create_cd_cipher(session,
599 auth_xform->key.data,
600 auth_xform->key.length))
603 if (qat_sym_session_aead_create_cd_auth(session,
607 auth_xform->digest_length,
611 session->qat_cmd = ICP_QAT_FW_LA_CMD_HASH_CIPHER;
612 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
614 * It needs to create authentication desc content first,
618 if (qat_sym_session_aead_create_cd_auth(session,
622 auth_xform->digest_length,
626 if (qat_sym_session_aead_create_cd_cipher(session,
627 auth_xform->key.data,
628 auth_xform->key.length))
631 /* Restore to authentication only only */
632 session->qat_cmd = ICP_QAT_FW_LA_CMD_AUTH;
634 if (qat_sym_session_aead_create_cd_auth(session,
638 auth_xform->digest_length,
643 session->digest_length = auth_xform->digest_length;
648 qat_sym_session_configure_aead(struct rte_crypto_sym_xform *xform,
649 struct qat_sym_session *session)
651 struct rte_crypto_aead_xform *aead_xform = &xform->aead;
652 enum rte_crypto_auth_operation crypto_operation;
655 * Store AEAD IV parameters as cipher IV,
656 * to avoid unnecessary memory usage
658 session->cipher_iv.offset = xform->aead.iv.offset;
659 session->cipher_iv.length = xform->aead.iv.length;
661 switch (aead_xform->algo) {
662 case RTE_CRYPTO_AEAD_AES_GCM:
663 if (qat_sym_validate_aes_key(aead_xform->key.length,
664 &session->qat_cipher_alg) != 0) {
665 QAT_LOG(ERR, "Invalid AES key size");
668 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
669 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
671 case RTE_CRYPTO_AEAD_AES_CCM:
672 if (qat_sym_validate_aes_key(aead_xform->key.length,
673 &session->qat_cipher_alg) != 0) {
674 QAT_LOG(ERR, "Invalid AES key size");
677 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
678 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC;
681 QAT_LOG(ERR, "Crypto: Undefined AEAD specified %u\n",
686 if ((aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT &&
687 aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) ||
688 (aead_xform->op == RTE_CRYPTO_AEAD_OP_DECRYPT &&
689 aead_xform->algo == RTE_CRYPTO_AEAD_AES_CCM)) {
690 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
692 * It needs to create cipher desc content first,
693 * then authentication
695 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
696 RTE_CRYPTO_AUTH_OP_GENERATE : RTE_CRYPTO_AUTH_OP_VERIFY;
698 if (qat_sym_session_aead_create_cd_cipher(session,
699 aead_xform->key.data,
700 aead_xform->key.length))
703 if (qat_sym_session_aead_create_cd_auth(session,
704 aead_xform->key.data,
705 aead_xform->key.length,
706 aead_xform->aad_length,
707 aead_xform->digest_length,
711 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
713 * It needs to create authentication desc content first,
717 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
718 RTE_CRYPTO_AUTH_OP_VERIFY : RTE_CRYPTO_AUTH_OP_GENERATE;
720 if (qat_sym_session_aead_create_cd_auth(session,
721 aead_xform->key.data,
722 aead_xform->key.length,
723 aead_xform->aad_length,
724 aead_xform->digest_length,
728 if (qat_sym_session_aead_create_cd_cipher(session,
729 aead_xform->key.data,
730 aead_xform->key.length))
734 session->digest_length = aead_xform->digest_length;
738 unsigned int qat_sym_session_get_private_size(
739 struct rte_cryptodev *dev __rte_unused)
741 return RTE_ALIGN_CEIL(sizeof(struct qat_sym_session), 8);
744 /* returns block size in bytes per cipher algo */
745 int qat_cipher_get_block_size(enum icp_qat_hw_cipher_algo qat_cipher_alg)
747 switch (qat_cipher_alg) {
748 case ICP_QAT_HW_CIPHER_ALGO_DES:
749 return ICP_QAT_HW_DES_BLK_SZ;
750 case ICP_QAT_HW_CIPHER_ALGO_3DES:
751 return ICP_QAT_HW_3DES_BLK_SZ;
752 case ICP_QAT_HW_CIPHER_ALGO_AES128:
753 case ICP_QAT_HW_CIPHER_ALGO_AES192:
754 case ICP_QAT_HW_CIPHER_ALGO_AES256:
755 return ICP_QAT_HW_AES_BLK_SZ;
757 QAT_LOG(ERR, "invalid block cipher alg %u", qat_cipher_alg);
764 * Returns size in bytes per hash algo for state1 size field in cd_ctrl
765 * This is digest size rounded up to nearest quadword
767 static int qat_hash_get_state1_size(enum icp_qat_hw_auth_algo qat_hash_alg)
769 switch (qat_hash_alg) {
770 case ICP_QAT_HW_AUTH_ALGO_SHA1:
771 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA1_STATE1_SZ,
772 QAT_HW_DEFAULT_ALIGNMENT);
773 case ICP_QAT_HW_AUTH_ALGO_SHA224:
774 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA224_STATE1_SZ,
775 QAT_HW_DEFAULT_ALIGNMENT);
776 case ICP_QAT_HW_AUTH_ALGO_SHA256:
777 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA256_STATE1_SZ,
778 QAT_HW_DEFAULT_ALIGNMENT);
779 case ICP_QAT_HW_AUTH_ALGO_SHA384:
780 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA384_STATE1_SZ,
781 QAT_HW_DEFAULT_ALIGNMENT);
782 case ICP_QAT_HW_AUTH_ALGO_SHA512:
783 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
784 QAT_HW_DEFAULT_ALIGNMENT);
785 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
786 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ,
787 QAT_HW_DEFAULT_ALIGNMENT);
788 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
789 case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
790 return QAT_HW_ROUND_UP(ICP_QAT_HW_GALOIS_128_STATE1_SZ,
791 QAT_HW_DEFAULT_ALIGNMENT);
792 case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
793 return QAT_HW_ROUND_UP(ICP_QAT_HW_ZUC_3G_EIA3_STATE1_SZ,
794 QAT_HW_DEFAULT_ALIGNMENT);
795 case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
796 return QAT_HW_ROUND_UP(ICP_QAT_HW_SNOW_3G_UIA2_STATE1_SZ,
797 QAT_HW_DEFAULT_ALIGNMENT);
798 case ICP_QAT_HW_AUTH_ALGO_MD5:
799 return QAT_HW_ROUND_UP(ICP_QAT_HW_MD5_STATE1_SZ,
800 QAT_HW_DEFAULT_ALIGNMENT);
801 case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
802 return QAT_HW_ROUND_UP(ICP_QAT_HW_KASUMI_F9_STATE1_SZ,
803 QAT_HW_DEFAULT_ALIGNMENT);
804 case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
805 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_CBC_MAC_STATE1_SZ,
806 QAT_HW_DEFAULT_ALIGNMENT);
807 case ICP_QAT_HW_AUTH_ALGO_NULL:
808 return QAT_HW_ROUND_UP(ICP_QAT_HW_NULL_STATE1_SZ,
809 QAT_HW_DEFAULT_ALIGNMENT);
810 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
811 /* return maximum state1 size in this case */
812 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
813 QAT_HW_DEFAULT_ALIGNMENT);
815 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
821 /* returns digest size in bytes per hash algo */
822 static int qat_hash_get_digest_size(enum icp_qat_hw_auth_algo qat_hash_alg)
824 switch (qat_hash_alg) {
825 case ICP_QAT_HW_AUTH_ALGO_SHA1:
826 return ICP_QAT_HW_SHA1_STATE1_SZ;
827 case ICP_QAT_HW_AUTH_ALGO_SHA224:
828 return ICP_QAT_HW_SHA224_STATE1_SZ;
829 case ICP_QAT_HW_AUTH_ALGO_SHA256:
830 return ICP_QAT_HW_SHA256_STATE1_SZ;
831 case ICP_QAT_HW_AUTH_ALGO_SHA384:
832 return ICP_QAT_HW_SHA384_STATE1_SZ;
833 case ICP_QAT_HW_AUTH_ALGO_SHA512:
834 return ICP_QAT_HW_SHA512_STATE1_SZ;
835 case ICP_QAT_HW_AUTH_ALGO_MD5:
836 return ICP_QAT_HW_MD5_STATE1_SZ;
837 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
838 return ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
839 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
840 /* return maximum digest size in this case */
841 return ICP_QAT_HW_SHA512_STATE1_SZ;
843 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
849 /* returns block size in byes per hash algo */
850 static int qat_hash_get_block_size(enum icp_qat_hw_auth_algo qat_hash_alg)
852 switch (qat_hash_alg) {
853 case ICP_QAT_HW_AUTH_ALGO_SHA1:
855 case ICP_QAT_HW_AUTH_ALGO_SHA224:
856 return SHA256_CBLOCK;
857 case ICP_QAT_HW_AUTH_ALGO_SHA256:
858 return SHA256_CBLOCK;
859 case ICP_QAT_HW_AUTH_ALGO_SHA384:
860 return SHA512_CBLOCK;
861 case ICP_QAT_HW_AUTH_ALGO_SHA512:
862 return SHA512_CBLOCK;
863 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
865 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
866 return ICP_QAT_HW_AES_BLK_SZ;
867 case ICP_QAT_HW_AUTH_ALGO_MD5:
869 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
870 /* return maximum block size in this case */
871 return SHA512_CBLOCK;
873 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
879 static int partial_hash_sha1(uint8_t *data_in, uint8_t *data_out)
883 if (!SHA1_Init(&ctx))
885 SHA1_Transform(&ctx, data_in);
886 rte_memcpy(data_out, &ctx, SHA_DIGEST_LENGTH);
890 static int partial_hash_sha224(uint8_t *data_in, uint8_t *data_out)
894 if (!SHA224_Init(&ctx))
896 SHA256_Transform(&ctx, data_in);
897 rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
901 static int partial_hash_sha256(uint8_t *data_in, uint8_t *data_out)
905 if (!SHA256_Init(&ctx))
907 SHA256_Transform(&ctx, data_in);
908 rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
912 static int partial_hash_sha384(uint8_t *data_in, uint8_t *data_out)
916 if (!SHA384_Init(&ctx))
918 SHA512_Transform(&ctx, data_in);
919 rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
923 static int partial_hash_sha512(uint8_t *data_in, uint8_t *data_out)
927 if (!SHA512_Init(&ctx))
929 SHA512_Transform(&ctx, data_in);
930 rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
934 static int partial_hash_md5(uint8_t *data_in, uint8_t *data_out)
940 MD5_Transform(&ctx, data_in);
941 rte_memcpy(data_out, &ctx, MD5_DIGEST_LENGTH);
946 static int partial_hash_compute(enum icp_qat_hw_auth_algo hash_alg,
951 uint8_t digest[qat_hash_get_digest_size(
952 ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
953 uint32_t *hash_state_out_be32;
954 uint64_t *hash_state_out_be64;
957 digest_size = qat_hash_get_digest_size(hash_alg);
958 if (digest_size <= 0)
961 hash_state_out_be32 = (uint32_t *)data_out;
962 hash_state_out_be64 = (uint64_t *)data_out;
965 case ICP_QAT_HW_AUTH_ALGO_SHA1:
966 if (partial_hash_sha1(data_in, digest))
968 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
969 *hash_state_out_be32 =
970 rte_bswap32(*(((uint32_t *)digest)+i));
972 case ICP_QAT_HW_AUTH_ALGO_SHA224:
973 if (partial_hash_sha224(data_in, digest))
975 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
976 *hash_state_out_be32 =
977 rte_bswap32(*(((uint32_t *)digest)+i));
979 case ICP_QAT_HW_AUTH_ALGO_SHA256:
980 if (partial_hash_sha256(data_in, digest))
982 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
983 *hash_state_out_be32 =
984 rte_bswap32(*(((uint32_t *)digest)+i));
986 case ICP_QAT_HW_AUTH_ALGO_SHA384:
987 if (partial_hash_sha384(data_in, digest))
989 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
990 *hash_state_out_be64 =
991 rte_bswap64(*(((uint64_t *)digest)+i));
993 case ICP_QAT_HW_AUTH_ALGO_SHA512:
994 if (partial_hash_sha512(data_in, digest))
996 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
997 *hash_state_out_be64 =
998 rte_bswap64(*(((uint64_t *)digest)+i));
1000 case ICP_QAT_HW_AUTH_ALGO_MD5:
1001 if (partial_hash_md5(data_in, data_out))
1005 QAT_LOG(ERR, "invalid hash alg %u", hash_alg);
1011 #define HMAC_IPAD_VALUE 0x36
1012 #define HMAC_OPAD_VALUE 0x5c
1013 #define HASH_XCBC_PRECOMP_KEY_NUM 3
1015 static const uint8_t AES_CMAC_SEED[ICP_QAT_HW_AES_128_KEY_SZ];
1017 static void aes_cmac_key_derive(uint8_t *base, uint8_t *derived)
1021 derived[0] = base[0] << 1;
1022 for (i = 1; i < ICP_QAT_HW_AES_BLK_SZ ; i++) {
1023 derived[i] = base[i] << 1;
1024 derived[i - 1] |= base[i] >> 7;
1028 derived[ICP_QAT_HW_AES_BLK_SZ - 1] ^= QAT_AES_CMAC_CONST_RB;
1031 static int qat_sym_do_precomputes(enum icp_qat_hw_auth_algo hash_alg,
1032 const uint8_t *auth_key,
1033 uint16_t auth_keylen,
1034 uint8_t *p_state_buf,
1035 uint16_t *p_state_len,
1039 uint8_t ipad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1040 uint8_t opad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1043 if (hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC) {
1049 uint8_t k0[ICP_QAT_HW_AES_128_KEY_SZ];
1052 auth_keylen = ICP_QAT_HW_AES_128_KEY_SZ;
1054 in = rte_zmalloc("AES CMAC K1",
1055 ICP_QAT_HW_AES_128_KEY_SZ, 16);
1058 QAT_LOG(ERR, "Failed to alloc memory");
1062 rte_memcpy(in, AES_CMAC_SEED,
1063 ICP_QAT_HW_AES_128_KEY_SZ);
1064 rte_memcpy(p_state_buf, auth_key, auth_keylen);
1066 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1072 AES_encrypt(in, k0, &enc_key);
1074 k1 = p_state_buf + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1075 k2 = k1 + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1077 aes_cmac_key_derive(k0, k1);
1078 aes_cmac_key_derive(k1, k2);
1080 memset(k0, 0, ICP_QAT_HW_AES_128_KEY_SZ);
1081 *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1085 static uint8_t qat_aes_xcbc_key_seed[
1086 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ] = {
1087 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1088 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1089 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1090 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1091 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1092 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1096 uint8_t *out = p_state_buf;
1100 in = rte_zmalloc("working mem for key",
1101 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ, 16);
1103 QAT_LOG(ERR, "Failed to alloc memory");
1107 rte_memcpy(in, qat_aes_xcbc_key_seed,
1108 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1109 for (x = 0; x < HASH_XCBC_PRECOMP_KEY_NUM; x++) {
1110 if (AES_set_encrypt_key(auth_key,
1114 (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ));
1116 (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ),
1117 0, ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1120 AES_encrypt(in, out, &enc_key);
1121 in += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1122 out += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1124 *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1125 rte_free(in - x*ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ);
1129 } else if ((hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_128) ||
1130 (hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_64)) {
1132 uint8_t *out = p_state_buf;
1135 memset(p_state_buf, 0, ICP_QAT_HW_GALOIS_H_SZ +
1136 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1137 ICP_QAT_HW_GALOIS_E_CTR0_SZ);
1138 in = rte_zmalloc("working mem for key",
1139 ICP_QAT_HW_GALOIS_H_SZ, 16);
1141 QAT_LOG(ERR, "Failed to alloc memory");
1145 memset(in, 0, ICP_QAT_HW_GALOIS_H_SZ);
1146 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1150 AES_encrypt(in, out, &enc_key);
1151 *p_state_len = ICP_QAT_HW_GALOIS_H_SZ +
1152 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1153 ICP_QAT_HW_GALOIS_E_CTR0_SZ;
1158 block_size = qat_hash_get_block_size(hash_alg);
1161 /* init ipad and opad from key and xor with fixed values */
1162 memset(ipad, 0, block_size);
1163 memset(opad, 0, block_size);
1165 if (auth_keylen > (unsigned int)block_size) {
1166 QAT_LOG(ERR, "invalid keylen %u", auth_keylen);
1169 rte_memcpy(ipad, auth_key, auth_keylen);
1170 rte_memcpy(opad, auth_key, auth_keylen);
1172 for (i = 0; i < block_size; i++) {
1173 uint8_t *ipad_ptr = ipad + i;
1174 uint8_t *opad_ptr = opad + i;
1175 *ipad_ptr ^= HMAC_IPAD_VALUE;
1176 *opad_ptr ^= HMAC_OPAD_VALUE;
1179 /* do partial hash of ipad and copy to state1 */
1180 if (partial_hash_compute(hash_alg, ipad, p_state_buf)) {
1181 memset(ipad, 0, block_size);
1182 memset(opad, 0, block_size);
1183 QAT_LOG(ERR, "ipad precompute failed");
1188 * State len is a multiple of 8, so may be larger than the digest.
1189 * Put the partial hash of opad state_len bytes after state1
1191 *p_state_len = qat_hash_get_state1_size(hash_alg);
1192 if (partial_hash_compute(hash_alg, opad, p_state_buf + *p_state_len)) {
1193 memset(ipad, 0, block_size);
1194 memset(opad, 0, block_size);
1195 QAT_LOG(ERR, "opad precompute failed");
1199 /* don't leave data lying around */
1200 memset(ipad, 0, block_size);
1201 memset(opad, 0, block_size);
1206 qat_sym_session_init_common_hdr(struct icp_qat_fw_comn_req_hdr *header,
1207 enum qat_sym_proto_flag proto_flags)
1210 ICP_QAT_FW_COMN_HDR_FLAGS_BUILD(ICP_QAT_FW_COMN_REQ_FLAG_SET);
1211 header->service_type = ICP_QAT_FW_COMN_REQ_CPM_FW_LA;
1212 header->comn_req_flags =
1213 ICP_QAT_FW_COMN_FLAGS_BUILD(QAT_COMN_CD_FLD_TYPE_64BIT_ADR,
1214 QAT_COMN_PTR_TYPE_FLAT);
1215 ICP_QAT_FW_LA_PARTIAL_SET(header->serv_specif_flags,
1216 ICP_QAT_FW_LA_PARTIAL_NONE);
1217 ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(header->serv_specif_flags,
1218 ICP_QAT_FW_CIPH_IV_16BYTE_DATA);
1220 switch (proto_flags) {
1221 case QAT_CRYPTO_PROTO_FLAG_NONE:
1222 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1223 ICP_QAT_FW_LA_NO_PROTO);
1225 case QAT_CRYPTO_PROTO_FLAG_CCM:
1226 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1227 ICP_QAT_FW_LA_CCM_PROTO);
1229 case QAT_CRYPTO_PROTO_FLAG_GCM:
1230 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1231 ICP_QAT_FW_LA_GCM_PROTO);
1233 case QAT_CRYPTO_PROTO_FLAG_SNOW3G:
1234 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1235 ICP_QAT_FW_LA_SNOW_3G_PROTO);
1237 case QAT_CRYPTO_PROTO_FLAG_ZUC:
1238 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(header->serv_specif_flags,
1239 ICP_QAT_FW_LA_ZUC_3G_PROTO);
1243 ICP_QAT_FW_LA_UPDATE_STATE_SET(header->serv_specif_flags,
1244 ICP_QAT_FW_LA_NO_UPDATE_STATE);
1245 ICP_QAT_FW_LA_DIGEST_IN_BUFFER_SET(header->serv_specif_flags,
1246 ICP_QAT_FW_LA_NO_DIGEST_IN_BUFFER);
1250 * Snow3G and ZUC should never use this function
1251 * and set its protocol flag in both cipher and auth part of content
1252 * descriptor building function
1254 static enum qat_sym_proto_flag
1255 qat_get_crypto_proto_flag(uint16_t flags)
1257 int proto = ICP_QAT_FW_LA_PROTO_GET(flags);
1258 enum qat_sym_proto_flag qat_proto_flag =
1259 QAT_CRYPTO_PROTO_FLAG_NONE;
1262 case ICP_QAT_FW_LA_GCM_PROTO:
1263 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1265 case ICP_QAT_FW_LA_CCM_PROTO:
1266 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1270 return qat_proto_flag;
1273 int qat_sym_session_aead_create_cd_cipher(struct qat_sym_session *cdesc,
1275 uint32_t cipherkeylen)
1277 struct icp_qat_hw_cipher_algo_blk *cipher;
1278 struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1279 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1280 struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1281 void *ptr = &req_tmpl->cd_ctrl;
1282 struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1283 struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1284 enum icp_qat_hw_cipher_convert key_convert;
1285 enum qat_sym_proto_flag qat_proto_flag =
1286 QAT_CRYPTO_PROTO_FLAG_NONE;
1287 uint32_t total_key_size;
1288 uint16_t cipher_offset, cd_size;
1289 uint32_t wordIndex = 0;
1290 uint32_t *temp_key = NULL;
1292 if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER) {
1293 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1294 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1295 ICP_QAT_FW_SLICE_CIPHER);
1296 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1297 ICP_QAT_FW_SLICE_DRAM_WR);
1298 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1299 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1300 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1301 ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1302 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1303 } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1304 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1305 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1306 ICP_QAT_FW_SLICE_CIPHER);
1307 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1308 ICP_QAT_FW_SLICE_AUTH);
1309 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1310 ICP_QAT_FW_SLICE_AUTH);
1311 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1312 ICP_QAT_FW_SLICE_DRAM_WR);
1313 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1314 } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1315 QAT_LOG(ERR, "Invalid param, must be a cipher command.");
1319 if (cdesc->qat_mode == ICP_QAT_HW_CIPHER_CTR_MODE) {
1321 * CTR Streaming ciphers are a special case. Decrypt = encrypt
1322 * Overriding default values previously set
1324 cdesc->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
1325 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1326 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2
1327 || cdesc->qat_cipher_alg ==
1328 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)
1329 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1330 else if (cdesc->qat_dir == ICP_QAT_HW_CIPHER_ENCRYPT)
1331 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1333 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1335 if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
1336 total_key_size = ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ +
1337 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1338 cipher_cd_ctrl->cipher_state_sz =
1339 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1340 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1342 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1343 total_key_size = ICP_QAT_HW_KASUMI_F8_KEY_SZ;
1344 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_KASUMI_BLK_SZ >> 3;
1345 cipher_cd_ctrl->cipher_padding_sz =
1346 (2 * ICP_QAT_HW_KASUMI_BLK_SZ) >> 3;
1347 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES) {
1348 total_key_size = ICP_QAT_HW_3DES_KEY_SZ;
1349 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_3DES_BLK_SZ >> 3;
1351 qat_get_crypto_proto_flag(header->serv_specif_flags);
1352 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_DES) {
1353 total_key_size = ICP_QAT_HW_DES_KEY_SZ;
1354 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_DES_BLK_SZ >> 3;
1356 qat_get_crypto_proto_flag(header->serv_specif_flags);
1357 } else if (cdesc->qat_cipher_alg ==
1358 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
1359 total_key_size = ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ +
1360 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1361 cipher_cd_ctrl->cipher_state_sz =
1362 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1363 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1364 cdesc->min_qat_dev_gen = QAT_GEN2;
1366 total_key_size = cipherkeylen;
1367 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_AES_BLK_SZ >> 3;
1369 qat_get_crypto_proto_flag(header->serv_specif_flags);
1371 cipher_cd_ctrl->cipher_key_sz = total_key_size >> 3;
1372 cipher_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1373 cipher_cd_ctrl->cipher_cfg_offset = cipher_offset >> 3;
1375 header->service_cmd_id = cdesc->qat_cmd;
1376 qat_sym_session_init_common_hdr(header, qat_proto_flag);
1378 cipher = (struct icp_qat_hw_cipher_algo_blk *)cdesc->cd_cur_ptr;
1379 cipher->cipher_config.val =
1380 ICP_QAT_HW_CIPHER_CONFIG_BUILD(cdesc->qat_mode,
1381 cdesc->qat_cipher_alg, key_convert,
1384 if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1385 temp_key = (uint32_t *)(cdesc->cd_cur_ptr +
1386 sizeof(struct icp_qat_hw_cipher_config)
1388 memcpy(cipher->key, cipherkey, cipherkeylen);
1389 memcpy(temp_key, cipherkey, cipherkeylen);
1391 /* XOR Key with KASUMI F8 key modifier at 4 bytes level */
1392 for (wordIndex = 0; wordIndex < (cipherkeylen >> 2);
1394 temp_key[wordIndex] ^= KASUMI_F8_KEY_MODIFIER_4_BYTES;
1396 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1397 cipherkeylen + cipherkeylen;
1399 memcpy(cipher->key, cipherkey, cipherkeylen);
1400 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1404 if (total_key_size > cipherkeylen) {
1405 uint32_t padding_size = total_key_size-cipherkeylen;
1406 if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1407 && (cipherkeylen == QAT_3DES_KEY_SZ_OPT2)) {
1408 /* K3 not provided so use K1 = K3*/
1409 memcpy(cdesc->cd_cur_ptr, cipherkey, padding_size);
1410 } else if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1411 && (cipherkeylen == QAT_3DES_KEY_SZ_OPT3)) {
1412 /* K2 and K3 not provided so use K1 = K2 = K3*/
1413 memcpy(cdesc->cd_cur_ptr, cipherkey,
1415 memcpy(cdesc->cd_cur_ptr+cipherkeylen,
1416 cipherkey, cipherkeylen);
1418 memset(cdesc->cd_cur_ptr, 0, padding_size);
1420 cdesc->cd_cur_ptr += padding_size;
1422 cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1423 cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1428 int qat_sym_session_aead_create_cd_auth(struct qat_sym_session *cdesc,
1430 uint32_t authkeylen,
1431 uint32_t aad_length,
1432 uint32_t digestsize,
1433 unsigned int operation)
1435 struct icp_qat_hw_auth_setup *hash;
1436 struct icp_qat_hw_cipher_algo_blk *cipherconfig;
1437 struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1438 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1439 struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1440 void *ptr = &req_tmpl->cd_ctrl;
1441 struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1442 struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1443 struct icp_qat_fw_la_auth_req_params *auth_param =
1444 (struct icp_qat_fw_la_auth_req_params *)
1445 ((char *)&req_tmpl->serv_specif_rqpars +
1446 sizeof(struct icp_qat_fw_la_cipher_req_params));
1447 uint16_t state1_size = 0, state2_size = 0;
1448 uint16_t hash_offset, cd_size;
1449 uint32_t *aad_len = NULL;
1450 uint32_t wordIndex = 0;
1452 enum qat_sym_proto_flag qat_proto_flag =
1453 QAT_CRYPTO_PROTO_FLAG_NONE;
1455 if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_AUTH) {
1456 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1457 ICP_QAT_FW_SLICE_AUTH);
1458 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1459 ICP_QAT_FW_SLICE_DRAM_WR);
1460 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1461 } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1462 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1463 ICP_QAT_FW_SLICE_AUTH);
1464 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1465 ICP_QAT_FW_SLICE_CIPHER);
1466 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1467 ICP_QAT_FW_SLICE_CIPHER);
1468 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1469 ICP_QAT_FW_SLICE_DRAM_WR);
1470 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1471 } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1472 QAT_LOG(ERR, "Invalid param, must be a hash command.");
1476 if (operation == RTE_CRYPTO_AUTH_OP_VERIFY) {
1477 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1478 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1479 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1480 ICP_QAT_FW_LA_CMP_AUTH_RES);
1481 cdesc->auth_op = ICP_QAT_HW_AUTH_VERIFY;
1483 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1484 ICP_QAT_FW_LA_RET_AUTH_RES);
1485 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1486 ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1487 cdesc->auth_op = ICP_QAT_HW_AUTH_GENERATE;
1491 * Setup the inner hash config
1493 hash_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1494 hash = (struct icp_qat_hw_auth_setup *)cdesc->cd_cur_ptr;
1495 hash->auth_config.reserved = 0;
1496 hash->auth_config.config =
1497 ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE1,
1498 cdesc->qat_hash_alg, digestsize);
1500 if (cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2
1501 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_KASUMI_F9
1502 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3
1503 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC
1504 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC
1505 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_NULL
1507 hash->auth_counter.counter = 0;
1509 int block_size = qat_hash_get_block_size(cdesc->qat_hash_alg);
1513 hash->auth_counter.counter = rte_bswap32(block_size);
1516 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_auth_setup);
1519 * cd_cur_ptr now points at the state1 information.
1521 switch (cdesc->qat_hash_alg) {
1522 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1523 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA1, authkey,
1524 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1526 QAT_LOG(ERR, "(SHA)precompute failed");
1529 state2_size = RTE_ALIGN_CEIL(ICP_QAT_HW_SHA1_STATE2_SZ, 8);
1531 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1532 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA224, authkey,
1533 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1535 QAT_LOG(ERR, "(SHA)precompute failed");
1538 state2_size = ICP_QAT_HW_SHA224_STATE2_SZ;
1540 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1541 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA256, authkey,
1542 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1544 QAT_LOG(ERR, "(SHA)precompute failed");
1547 state2_size = ICP_QAT_HW_SHA256_STATE2_SZ;
1549 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1550 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA384, authkey,
1551 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1553 QAT_LOG(ERR, "(SHA)precompute failed");
1556 state2_size = ICP_QAT_HW_SHA384_STATE2_SZ;
1558 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1559 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA512, authkey,
1560 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1562 QAT_LOG(ERR, "(SHA)precompute failed");
1565 state2_size = ICP_QAT_HW_SHA512_STATE2_SZ;
1567 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1568 state1_size = ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1570 if (cdesc->aes_cmac)
1571 memset(cdesc->cd_cur_ptr, 0, state1_size);
1572 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC,
1573 authkey, authkeylen, cdesc->cd_cur_ptr + state1_size,
1574 &state2_size, cdesc->aes_cmac)) {
1575 cdesc->aes_cmac ? QAT_LOG(ERR,
1576 "(CMAC)precompute failed")
1578 "(XCBC)precompute failed");
1582 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1583 case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
1584 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1585 state1_size = ICP_QAT_HW_GALOIS_128_STATE1_SZ;
1586 if (qat_sym_do_precomputes(cdesc->qat_hash_alg, authkey,
1587 authkeylen, cdesc->cd_cur_ptr + state1_size,
1588 &state2_size, cdesc->aes_cmac)) {
1589 QAT_LOG(ERR, "(GCM)precompute failed");
1593 * Write (the length of AAD) into bytes 16-19 of state2
1594 * in big-endian format. This field is 8 bytes
1596 auth_param->u2.aad_sz =
1597 RTE_ALIGN_CEIL(aad_length, 16);
1598 auth_param->hash_state_sz = (auth_param->u2.aad_sz) >> 3;
1600 aad_len = (uint32_t *)(cdesc->cd_cur_ptr +
1601 ICP_QAT_HW_GALOIS_128_STATE1_SZ +
1602 ICP_QAT_HW_GALOIS_H_SZ);
1603 *aad_len = rte_bswap32(aad_length);
1604 cdesc->aad_len = aad_length;
1606 case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
1607 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1608 state1_size = qat_hash_get_state1_size(
1609 ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2);
1610 state2_size = ICP_QAT_HW_SNOW_3G_UIA2_STATE2_SZ;
1611 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1613 cipherconfig = (struct icp_qat_hw_cipher_algo_blk *)
1614 (cdesc->cd_cur_ptr + state1_size + state2_size);
1615 cipherconfig->cipher_config.val =
1616 ICP_QAT_HW_CIPHER_CONFIG_BUILD(ICP_QAT_HW_CIPHER_ECB_MODE,
1617 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2,
1618 ICP_QAT_HW_CIPHER_KEY_CONVERT,
1619 ICP_QAT_HW_CIPHER_ENCRYPT);
1620 memcpy(cipherconfig->key, authkey, authkeylen);
1621 memset(cipherconfig->key + authkeylen,
1622 0, ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ);
1623 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1624 authkeylen + ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1625 auth_param->hash_state_sz = ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1627 case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
1628 hash->auth_config.config =
1629 ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE0,
1630 cdesc->qat_hash_alg, digestsize);
1631 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1632 state1_size = qat_hash_get_state1_size(
1633 ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3);
1634 state2_size = ICP_QAT_HW_ZUC_3G_EIA3_STATE2_SZ;
1635 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size
1636 + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ);
1638 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1639 cdesc->cd_cur_ptr += state1_size + state2_size
1640 + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1641 auth_param->hash_state_sz = ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1642 cdesc->min_qat_dev_gen = QAT_GEN2;
1645 case ICP_QAT_HW_AUTH_ALGO_MD5:
1646 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_MD5, authkey,
1647 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1649 QAT_LOG(ERR, "(MD5)precompute failed");
1652 state2_size = ICP_QAT_HW_MD5_STATE2_SZ;
1654 case ICP_QAT_HW_AUTH_ALGO_NULL:
1655 state1_size = qat_hash_get_state1_size(
1656 ICP_QAT_HW_AUTH_ALGO_NULL);
1657 state2_size = ICP_QAT_HW_NULL_STATE2_SZ;
1659 case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
1660 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1661 state1_size = qat_hash_get_state1_size(
1662 ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC);
1663 state2_size = ICP_QAT_HW_AES_CBC_MAC_KEY_SZ +
1664 ICP_QAT_HW_AES_CCM_CBC_E_CTR0_SZ;
1666 if (aad_length > 0) {
1667 aad_length += ICP_QAT_HW_CCM_AAD_B0_LEN +
1668 ICP_QAT_HW_CCM_AAD_LEN_INFO;
1669 auth_param->u2.aad_sz =
1670 RTE_ALIGN_CEIL(aad_length,
1671 ICP_QAT_HW_CCM_AAD_ALIGNMENT);
1673 auth_param->u2.aad_sz = ICP_QAT_HW_CCM_AAD_B0_LEN;
1675 cdesc->aad_len = aad_length;
1676 hash->auth_counter.counter = 0;
1678 hash_cd_ctrl->outer_prefix_sz = digestsize;
1679 auth_param->hash_state_sz = digestsize;
1681 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1683 case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
1684 state1_size = qat_hash_get_state1_size(
1685 ICP_QAT_HW_AUTH_ALGO_KASUMI_F9);
1686 state2_size = ICP_QAT_HW_KASUMI_F9_STATE2_SZ;
1687 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1688 pTempKey = (uint32_t *)(cdesc->cd_cur_ptr + state1_size
1691 * The Inner Hash Initial State2 block must contain IK
1692 * (Initialisation Key), followed by IK XOR-ed with KM
1693 * (Key Modifier): IK||(IK^KM).
1695 /* write the auth key */
1696 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1697 /* initialise temp key with auth key */
1698 memcpy(pTempKey, authkey, authkeylen);
1699 /* XOR Key with KASUMI F9 key modifier at 4 bytes level */
1700 for (wordIndex = 0; wordIndex < (authkeylen >> 2); wordIndex++)
1701 pTempKey[wordIndex] ^= KASUMI_F9_KEY_MODIFIER_4_BYTES;
1704 QAT_LOG(ERR, "Invalid HASH alg %u", cdesc->qat_hash_alg);
1708 /* Request template setup */
1709 qat_sym_session_init_common_hdr(header, qat_proto_flag);
1710 header->service_cmd_id = cdesc->qat_cmd;
1712 /* Auth CD config setup */
1713 hash_cd_ctrl->hash_cfg_offset = hash_offset >> 3;
1714 hash_cd_ctrl->hash_flags = ICP_QAT_FW_AUTH_HDR_FLAG_NO_NESTED;
1715 hash_cd_ctrl->inner_res_sz = digestsize;
1716 hash_cd_ctrl->final_sz = digestsize;
1717 hash_cd_ctrl->inner_state1_sz = state1_size;
1718 auth_param->auth_res_sz = digestsize;
1720 hash_cd_ctrl->inner_state2_sz = state2_size;
1721 hash_cd_ctrl->inner_state2_offset = hash_cd_ctrl->hash_cfg_offset +
1722 ((sizeof(struct icp_qat_hw_auth_setup) +
1723 RTE_ALIGN_CEIL(hash_cd_ctrl->inner_state1_sz, 8))
1726 cdesc->cd_cur_ptr += state1_size + state2_size;
1727 cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1729 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1730 cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1735 int qat_sym_validate_aes_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1738 case ICP_QAT_HW_AES_128_KEY_SZ:
1739 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1741 case ICP_QAT_HW_AES_192_KEY_SZ:
1742 *alg = ICP_QAT_HW_CIPHER_ALGO_AES192;
1744 case ICP_QAT_HW_AES_256_KEY_SZ:
1745 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
1753 int qat_sym_validate_aes_docsisbpi_key(int key_len,
1754 enum icp_qat_hw_cipher_algo *alg)
1757 case ICP_QAT_HW_AES_128_KEY_SZ:
1758 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1766 int qat_sym_validate_snow3g_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1769 case ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ:
1770 *alg = ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2;
1778 int qat_sym_validate_kasumi_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1781 case ICP_QAT_HW_KASUMI_KEY_SZ:
1782 *alg = ICP_QAT_HW_CIPHER_ALGO_KASUMI;
1790 int qat_sym_validate_des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1793 case ICP_QAT_HW_DES_KEY_SZ:
1794 *alg = ICP_QAT_HW_CIPHER_ALGO_DES;
1802 int qat_sym_validate_3des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1805 case QAT_3DES_KEY_SZ_OPT1:
1806 case QAT_3DES_KEY_SZ_OPT2:
1807 case QAT_3DES_KEY_SZ_OPT3:
1808 *alg = ICP_QAT_HW_CIPHER_ALGO_3DES;
1816 int qat_sym_validate_zuc_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1819 case ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ:
1820 *alg = ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3;