1 /* SPDX-License-Identifier: (BSD-3-Clause OR GPL-2.0)
2 * Copyright(c) 2015-2019 Intel Corporation
5 #include <openssl/sha.h> /* Needed to calculate pre-compute values */
6 #include <openssl/aes.h> /* Needed to calculate pre-compute values */
7 #include <openssl/md5.h> /* Needed to calculate pre-compute values */
8 #include <openssl/evp.h> /* Needed for bpi runt block processing */
10 #include <rte_memcpy.h>
11 #include <rte_common.h>
12 #include <rte_spinlock.h>
13 #include <rte_byteorder.h>
15 #include <rte_malloc.h>
16 #include <rte_crypto_sym.h>
19 #include "qat_sym_session.h"
20 #include "qat_sym_pmd.h"
22 /** Frees a context previously created
23 * Depends on openssl libcrypto
26 bpi_cipher_ctx_free(void *bpi_ctx)
29 EVP_CIPHER_CTX_free((EVP_CIPHER_CTX *)bpi_ctx);
32 /** Creates a context in either AES or DES in ECB mode
33 * Depends on openssl libcrypto
36 bpi_cipher_ctx_init(enum rte_crypto_cipher_algorithm cryptodev_algo,
37 enum rte_crypto_cipher_operation direction __rte_unused,
38 uint8_t *key, void **ctx)
40 const EVP_CIPHER *algo = NULL;
42 *ctx = EVP_CIPHER_CTX_new();
49 if (cryptodev_algo == RTE_CRYPTO_CIPHER_DES_DOCSISBPI)
52 algo = EVP_aes_128_ecb();
54 /* IV will be ECB encrypted whether direction is encrypt or decrypt*/
55 if (EVP_EncryptInit_ex(*ctx, algo, NULL, key, 0) != 1) {
64 EVP_CIPHER_CTX_free(*ctx);
69 qat_is_cipher_alg_supported(enum rte_crypto_cipher_algorithm algo,
70 struct qat_sym_dev_private *internals)
73 const struct rte_cryptodev_capabilities *capability;
75 while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
76 RTE_CRYPTO_OP_TYPE_UNDEFINED) {
77 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
80 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_CIPHER)
83 if (capability->sym.cipher.algo == algo)
90 qat_is_auth_alg_supported(enum rte_crypto_auth_algorithm algo,
91 struct qat_sym_dev_private *internals)
94 const struct rte_cryptodev_capabilities *capability;
96 while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
97 RTE_CRYPTO_OP_TYPE_UNDEFINED) {
98 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
101 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_AUTH)
104 if (capability->sym.auth.algo == algo)
111 qat_sym_session_clear(struct rte_cryptodev *dev,
112 struct rte_cryptodev_sym_session *sess)
114 uint8_t index = dev->driver_id;
115 void *sess_priv = get_sym_session_private_data(sess, index);
116 struct qat_sym_session *s = (struct qat_sym_session *)sess_priv;
120 bpi_cipher_ctx_free(s->bpi_ctx);
121 memset(s, 0, qat_sym_session_get_private_size(dev));
122 struct rte_mempool *sess_mp = rte_mempool_from_obj(sess_priv);
124 set_sym_session_private_data(sess, index, NULL);
125 rte_mempool_put(sess_mp, sess_priv);
130 qat_get_cmd_id(const struct rte_crypto_sym_xform *xform)
133 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER && xform->next == NULL)
134 return ICP_QAT_FW_LA_CMD_CIPHER;
136 /* Authentication Only */
137 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH && xform->next == NULL)
138 return ICP_QAT_FW_LA_CMD_AUTH;
141 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
142 /* AES-GCM and AES-CCM works with different direction
143 * GCM first encrypts and generate hash where AES-CCM
144 * first generate hash and encrypts. Similar relation
145 * applies to decryption.
147 if (xform->aead.op == RTE_CRYPTO_AEAD_OP_ENCRYPT)
148 if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
149 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
151 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
153 if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
154 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
156 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
159 if (xform->next == NULL)
162 /* Cipher then Authenticate */
163 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
164 xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
165 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
167 /* Authenticate then Cipher */
168 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH &&
169 xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
170 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
175 static struct rte_crypto_auth_xform *
176 qat_get_auth_xform(struct rte_crypto_sym_xform *xform)
179 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH)
188 static struct rte_crypto_cipher_xform *
189 qat_get_cipher_xform(struct rte_crypto_sym_xform *xform)
192 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
193 return &xform->cipher;
202 qat_sym_session_configure_cipher(struct rte_cryptodev *dev,
203 struct rte_crypto_sym_xform *xform,
204 struct qat_sym_session *session)
206 struct qat_sym_dev_private *internals = dev->data->dev_private;
207 struct rte_crypto_cipher_xform *cipher_xform = NULL;
210 /* Get cipher xform from crypto xform chain */
211 cipher_xform = qat_get_cipher_xform(xform);
213 session->cipher_iv.offset = cipher_xform->iv.offset;
214 session->cipher_iv.length = cipher_xform->iv.length;
216 switch (cipher_xform->algo) {
217 case RTE_CRYPTO_CIPHER_AES_CBC:
218 if (qat_sym_validate_aes_key(cipher_xform->key.length,
219 &session->qat_cipher_alg) != 0) {
220 QAT_LOG(ERR, "Invalid AES cipher key size");
224 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
226 case RTE_CRYPTO_CIPHER_AES_CTR:
227 if (qat_sym_validate_aes_key(cipher_xform->key.length,
228 &session->qat_cipher_alg) != 0) {
229 QAT_LOG(ERR, "Invalid AES cipher key size");
233 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
235 case RTE_CRYPTO_CIPHER_SNOW3G_UEA2:
236 if (qat_sym_validate_snow3g_key(cipher_xform->key.length,
237 &session->qat_cipher_alg) != 0) {
238 QAT_LOG(ERR, "Invalid SNOW 3G cipher key size");
242 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
244 case RTE_CRYPTO_CIPHER_NULL:
245 session->qat_cipher_alg = ICP_QAT_HW_CIPHER_ALGO_NULL;
246 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
248 case RTE_CRYPTO_CIPHER_KASUMI_F8:
249 if (qat_sym_validate_kasumi_key(cipher_xform->key.length,
250 &session->qat_cipher_alg) != 0) {
251 QAT_LOG(ERR, "Invalid KASUMI cipher key size");
255 session->qat_mode = ICP_QAT_HW_CIPHER_F8_MODE;
257 case RTE_CRYPTO_CIPHER_3DES_CBC:
258 if (qat_sym_validate_3des_key(cipher_xform->key.length,
259 &session->qat_cipher_alg) != 0) {
260 QAT_LOG(ERR, "Invalid 3DES cipher key size");
264 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
266 case RTE_CRYPTO_CIPHER_DES_CBC:
267 if (qat_sym_validate_des_key(cipher_xform->key.length,
268 &session->qat_cipher_alg) != 0) {
269 QAT_LOG(ERR, "Invalid DES cipher key size");
273 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
275 case RTE_CRYPTO_CIPHER_3DES_CTR:
276 if (qat_sym_validate_3des_key(cipher_xform->key.length,
277 &session->qat_cipher_alg) != 0) {
278 QAT_LOG(ERR, "Invalid 3DES cipher key size");
282 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
284 case RTE_CRYPTO_CIPHER_DES_DOCSISBPI:
285 ret = bpi_cipher_ctx_init(
288 cipher_xform->key.data,
291 QAT_LOG(ERR, "failed to create DES BPI ctx");
294 if (qat_sym_validate_des_key(cipher_xform->key.length,
295 &session->qat_cipher_alg) != 0) {
296 QAT_LOG(ERR, "Invalid DES cipher key size");
300 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
302 case RTE_CRYPTO_CIPHER_AES_DOCSISBPI:
303 ret = bpi_cipher_ctx_init(
306 cipher_xform->key.data,
309 QAT_LOG(ERR, "failed to create AES BPI ctx");
312 if (qat_sym_validate_aes_docsisbpi_key(cipher_xform->key.length,
313 &session->qat_cipher_alg) != 0) {
314 QAT_LOG(ERR, "Invalid AES DOCSISBPI key size");
318 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
320 case RTE_CRYPTO_CIPHER_ZUC_EEA3:
321 if (!qat_is_cipher_alg_supported(
322 cipher_xform->algo, internals)) {
323 QAT_LOG(ERR, "%s not supported on this device",
324 rte_crypto_cipher_algorithm_strings
325 [cipher_xform->algo]);
329 if (qat_sym_validate_zuc_key(cipher_xform->key.length,
330 &session->qat_cipher_alg) != 0) {
331 QAT_LOG(ERR, "Invalid ZUC cipher key size");
335 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
337 case RTE_CRYPTO_CIPHER_AES_XTS:
338 if ((cipher_xform->key.length/2) == ICP_QAT_HW_AES_192_KEY_SZ) {
339 QAT_LOG(ERR, "AES-XTS-192 not supported");
343 if (qat_sym_validate_aes_key((cipher_xform->key.length/2),
344 &session->qat_cipher_alg) != 0) {
345 QAT_LOG(ERR, "Invalid AES-XTS cipher key size");
349 session->qat_mode = ICP_QAT_HW_CIPHER_XTS_MODE;
351 case RTE_CRYPTO_CIPHER_3DES_ECB:
352 case RTE_CRYPTO_CIPHER_AES_ECB:
353 case RTE_CRYPTO_CIPHER_AES_F8:
354 case RTE_CRYPTO_CIPHER_ARC4:
355 QAT_LOG(ERR, "Crypto QAT PMD: Unsupported Cipher alg %u",
360 QAT_LOG(ERR, "Crypto: Undefined Cipher specified %u\n",
366 if (cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
367 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
369 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
371 if (qat_sym_session_aead_create_cd_cipher(session,
372 cipher_xform->key.data,
373 cipher_xform->key.length)) {
381 if (session->bpi_ctx) {
382 bpi_cipher_ctx_free(session->bpi_ctx);
383 session->bpi_ctx = NULL;
389 qat_sym_session_configure(struct rte_cryptodev *dev,
390 struct rte_crypto_sym_xform *xform,
391 struct rte_cryptodev_sym_session *sess,
392 struct rte_mempool *mempool)
394 void *sess_private_data;
397 if (rte_mempool_get(mempool, &sess_private_data)) {
399 "Couldn't get object from session mempool");
403 ret = qat_sym_session_set_parameters(dev, xform, sess_private_data);
406 "Crypto QAT PMD: failed to configure session parameters");
408 /* Return session to mempool */
409 rte_mempool_put(mempool, sess_private_data);
413 set_sym_session_private_data(sess, dev->driver_id,
420 qat_sym_session_set_parameters(struct rte_cryptodev *dev,
421 struct rte_crypto_sym_xform *xform, void *session_private)
423 struct qat_sym_session *session = session_private;
427 /* Set context descriptor physical address */
428 session->cd_paddr = rte_mempool_virt2iova(session) +
429 offsetof(struct qat_sym_session, cd);
431 session->min_qat_dev_gen = QAT_GEN1;
433 /* Get requested QAT command id */
434 qat_cmd_id = qat_get_cmd_id(xform);
435 if (qat_cmd_id < 0 || qat_cmd_id >= ICP_QAT_FW_LA_CMD_DELIMITER) {
436 QAT_LOG(ERR, "Unsupported xform chain requested");
439 session->qat_cmd = (enum icp_qat_fw_la_cmd_id)qat_cmd_id;
440 switch (session->qat_cmd) {
441 case ICP_QAT_FW_LA_CMD_CIPHER:
442 ret = qat_sym_session_configure_cipher(dev, xform, session);
446 case ICP_QAT_FW_LA_CMD_AUTH:
447 ret = qat_sym_session_configure_auth(dev, xform, session);
451 case ICP_QAT_FW_LA_CMD_CIPHER_HASH:
452 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
453 ret = qat_sym_session_configure_aead(xform,
458 ret = qat_sym_session_configure_cipher(dev,
462 ret = qat_sym_session_configure_auth(dev,
468 case ICP_QAT_FW_LA_CMD_HASH_CIPHER:
469 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
470 ret = qat_sym_session_configure_aead(xform,
475 ret = qat_sym_session_configure_auth(dev,
479 ret = qat_sym_session_configure_cipher(dev,
485 case ICP_QAT_FW_LA_CMD_TRNG_GET_RANDOM:
486 case ICP_QAT_FW_LA_CMD_TRNG_TEST:
487 case ICP_QAT_FW_LA_CMD_SSL3_KEY_DERIVE:
488 case ICP_QAT_FW_LA_CMD_TLS_V1_1_KEY_DERIVE:
489 case ICP_QAT_FW_LA_CMD_TLS_V1_2_KEY_DERIVE:
490 case ICP_QAT_FW_LA_CMD_MGF1:
491 case ICP_QAT_FW_LA_CMD_AUTH_PRE_COMP:
492 case ICP_QAT_FW_LA_CMD_CIPHER_PRE_COMP:
493 case ICP_QAT_FW_LA_CMD_DELIMITER:
494 QAT_LOG(ERR, "Unsupported Service %u",
498 QAT_LOG(ERR, "Unsupported Service %u",
507 qat_sym_session_configure_auth(struct rte_cryptodev *dev,
508 struct rte_crypto_sym_xform *xform,
509 struct qat_sym_session *session)
511 struct rte_crypto_auth_xform *auth_xform = qat_get_auth_xform(xform);
512 struct qat_sym_dev_private *internals = dev->data->dev_private;
513 uint8_t *key_data = auth_xform->key.data;
514 uint8_t key_length = auth_xform->key.length;
515 session->aes_cmac = 0;
517 switch (auth_xform->algo) {
518 case RTE_CRYPTO_AUTH_SHA1_HMAC:
519 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA1;
521 case RTE_CRYPTO_AUTH_SHA224_HMAC:
522 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA224;
524 case RTE_CRYPTO_AUTH_SHA256_HMAC:
525 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA256;
527 case RTE_CRYPTO_AUTH_SHA384_HMAC:
528 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA384;
530 case RTE_CRYPTO_AUTH_SHA512_HMAC:
531 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA512;
533 case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
534 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
536 case RTE_CRYPTO_AUTH_AES_CMAC:
537 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
538 session->aes_cmac = 1;
540 case RTE_CRYPTO_AUTH_AES_GMAC:
541 if (qat_sym_validate_aes_key(auth_xform->key.length,
542 &session->qat_cipher_alg) != 0) {
543 QAT_LOG(ERR, "Invalid AES key size");
546 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
547 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
550 case RTE_CRYPTO_AUTH_SNOW3G_UIA2:
551 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2;
553 case RTE_CRYPTO_AUTH_MD5_HMAC:
554 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_MD5;
556 case RTE_CRYPTO_AUTH_NULL:
557 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_NULL;
559 case RTE_CRYPTO_AUTH_KASUMI_F9:
560 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_KASUMI_F9;
562 case RTE_CRYPTO_AUTH_ZUC_EIA3:
563 if (!qat_is_auth_alg_supported(auth_xform->algo, internals)) {
564 QAT_LOG(ERR, "%s not supported on this device",
565 rte_crypto_auth_algorithm_strings
569 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3;
571 case RTE_CRYPTO_AUTH_SHA1:
572 case RTE_CRYPTO_AUTH_SHA256:
573 case RTE_CRYPTO_AUTH_SHA512:
574 case RTE_CRYPTO_AUTH_SHA224:
575 case RTE_CRYPTO_AUTH_SHA384:
576 case RTE_CRYPTO_AUTH_MD5:
577 case RTE_CRYPTO_AUTH_AES_CBC_MAC:
578 QAT_LOG(ERR, "Crypto: Unsupported hash alg %u",
582 QAT_LOG(ERR, "Crypto: Undefined Hash algo %u specified",
587 session->auth_iv.offset = auth_xform->iv.offset;
588 session->auth_iv.length = auth_xform->iv.length;
590 if (auth_xform->algo == RTE_CRYPTO_AUTH_AES_GMAC) {
591 if (auth_xform->op == RTE_CRYPTO_AUTH_OP_GENERATE) {
592 session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER_HASH;
593 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
595 * It needs to create cipher desc content first,
596 * then authentication
599 if (qat_sym_session_aead_create_cd_cipher(session,
600 auth_xform->key.data,
601 auth_xform->key.length))
604 if (qat_sym_session_aead_create_cd_auth(session,
608 auth_xform->digest_length,
612 session->qat_cmd = ICP_QAT_FW_LA_CMD_HASH_CIPHER;
613 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
615 * It needs to create authentication desc content first,
619 if (qat_sym_session_aead_create_cd_auth(session,
623 auth_xform->digest_length,
627 if (qat_sym_session_aead_create_cd_cipher(session,
628 auth_xform->key.data,
629 auth_xform->key.length))
632 /* Restore to authentication only only */
633 session->qat_cmd = ICP_QAT_FW_LA_CMD_AUTH;
635 if (qat_sym_session_aead_create_cd_auth(session,
639 auth_xform->digest_length,
644 session->digest_length = auth_xform->digest_length;
649 qat_sym_session_configure_aead(struct rte_crypto_sym_xform *xform,
650 struct qat_sym_session *session)
652 struct rte_crypto_aead_xform *aead_xform = &xform->aead;
653 enum rte_crypto_auth_operation crypto_operation;
656 * Store AEAD IV parameters as cipher IV,
657 * to avoid unnecessary memory usage
659 session->cipher_iv.offset = xform->aead.iv.offset;
660 session->cipher_iv.length = xform->aead.iv.length;
662 switch (aead_xform->algo) {
663 case RTE_CRYPTO_AEAD_AES_GCM:
664 if (qat_sym_validate_aes_key(aead_xform->key.length,
665 &session->qat_cipher_alg) != 0) {
666 QAT_LOG(ERR, "Invalid AES key size");
669 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
670 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
672 case RTE_CRYPTO_AEAD_AES_CCM:
673 if (qat_sym_validate_aes_key(aead_xform->key.length,
674 &session->qat_cipher_alg) != 0) {
675 QAT_LOG(ERR, "Invalid AES key size");
678 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
679 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC;
682 QAT_LOG(ERR, "Crypto: Undefined AEAD specified %u\n",
687 if ((aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT &&
688 aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) ||
689 (aead_xform->op == RTE_CRYPTO_AEAD_OP_DECRYPT &&
690 aead_xform->algo == RTE_CRYPTO_AEAD_AES_CCM)) {
691 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
693 * It needs to create cipher desc content first,
694 * then authentication
696 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
697 RTE_CRYPTO_AUTH_OP_GENERATE : RTE_CRYPTO_AUTH_OP_VERIFY;
699 if (qat_sym_session_aead_create_cd_cipher(session,
700 aead_xform->key.data,
701 aead_xform->key.length))
704 if (qat_sym_session_aead_create_cd_auth(session,
705 aead_xform->key.data,
706 aead_xform->key.length,
707 aead_xform->aad_length,
708 aead_xform->digest_length,
712 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
714 * It needs to create authentication desc content first,
718 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
719 RTE_CRYPTO_AUTH_OP_VERIFY : RTE_CRYPTO_AUTH_OP_GENERATE;
721 if (qat_sym_session_aead_create_cd_auth(session,
722 aead_xform->key.data,
723 aead_xform->key.length,
724 aead_xform->aad_length,
725 aead_xform->digest_length,
729 if (qat_sym_session_aead_create_cd_cipher(session,
730 aead_xform->key.data,
731 aead_xform->key.length))
735 session->digest_length = aead_xform->digest_length;
739 unsigned int qat_sym_session_get_private_size(
740 struct rte_cryptodev *dev __rte_unused)
742 return RTE_ALIGN_CEIL(sizeof(struct qat_sym_session), 8);
745 /* returns block size in bytes per cipher algo */
746 int qat_cipher_get_block_size(enum icp_qat_hw_cipher_algo qat_cipher_alg)
748 switch (qat_cipher_alg) {
749 case ICP_QAT_HW_CIPHER_ALGO_DES:
750 return ICP_QAT_HW_DES_BLK_SZ;
751 case ICP_QAT_HW_CIPHER_ALGO_3DES:
752 return ICP_QAT_HW_3DES_BLK_SZ;
753 case ICP_QAT_HW_CIPHER_ALGO_AES128:
754 case ICP_QAT_HW_CIPHER_ALGO_AES192:
755 case ICP_QAT_HW_CIPHER_ALGO_AES256:
756 return ICP_QAT_HW_AES_BLK_SZ;
758 QAT_LOG(ERR, "invalid block cipher alg %u", qat_cipher_alg);
765 * Returns size in bytes per hash algo for state1 size field in cd_ctrl
766 * This is digest size rounded up to nearest quadword
768 static int qat_hash_get_state1_size(enum icp_qat_hw_auth_algo qat_hash_alg)
770 switch (qat_hash_alg) {
771 case ICP_QAT_HW_AUTH_ALGO_SHA1:
772 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA1_STATE1_SZ,
773 QAT_HW_DEFAULT_ALIGNMENT);
774 case ICP_QAT_HW_AUTH_ALGO_SHA224:
775 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA224_STATE1_SZ,
776 QAT_HW_DEFAULT_ALIGNMENT);
777 case ICP_QAT_HW_AUTH_ALGO_SHA256:
778 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA256_STATE1_SZ,
779 QAT_HW_DEFAULT_ALIGNMENT);
780 case ICP_QAT_HW_AUTH_ALGO_SHA384:
781 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA384_STATE1_SZ,
782 QAT_HW_DEFAULT_ALIGNMENT);
783 case ICP_QAT_HW_AUTH_ALGO_SHA512:
784 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
785 QAT_HW_DEFAULT_ALIGNMENT);
786 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
787 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ,
788 QAT_HW_DEFAULT_ALIGNMENT);
789 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
790 case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
791 return QAT_HW_ROUND_UP(ICP_QAT_HW_GALOIS_128_STATE1_SZ,
792 QAT_HW_DEFAULT_ALIGNMENT);
793 case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
794 return QAT_HW_ROUND_UP(ICP_QAT_HW_ZUC_3G_EIA3_STATE1_SZ,
795 QAT_HW_DEFAULT_ALIGNMENT);
796 case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
797 return QAT_HW_ROUND_UP(ICP_QAT_HW_SNOW_3G_UIA2_STATE1_SZ,
798 QAT_HW_DEFAULT_ALIGNMENT);
799 case ICP_QAT_HW_AUTH_ALGO_MD5:
800 return QAT_HW_ROUND_UP(ICP_QAT_HW_MD5_STATE1_SZ,
801 QAT_HW_DEFAULT_ALIGNMENT);
802 case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
803 return QAT_HW_ROUND_UP(ICP_QAT_HW_KASUMI_F9_STATE1_SZ,
804 QAT_HW_DEFAULT_ALIGNMENT);
805 case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
806 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_CBC_MAC_STATE1_SZ,
807 QAT_HW_DEFAULT_ALIGNMENT);
808 case ICP_QAT_HW_AUTH_ALGO_NULL:
809 return QAT_HW_ROUND_UP(ICP_QAT_HW_NULL_STATE1_SZ,
810 QAT_HW_DEFAULT_ALIGNMENT);
811 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
812 /* return maximum state1 size in this case */
813 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
814 QAT_HW_DEFAULT_ALIGNMENT);
816 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
822 /* returns digest size in bytes per hash algo */
823 static int qat_hash_get_digest_size(enum icp_qat_hw_auth_algo qat_hash_alg)
825 switch (qat_hash_alg) {
826 case ICP_QAT_HW_AUTH_ALGO_SHA1:
827 return ICP_QAT_HW_SHA1_STATE1_SZ;
828 case ICP_QAT_HW_AUTH_ALGO_SHA224:
829 return ICP_QAT_HW_SHA224_STATE1_SZ;
830 case ICP_QAT_HW_AUTH_ALGO_SHA256:
831 return ICP_QAT_HW_SHA256_STATE1_SZ;
832 case ICP_QAT_HW_AUTH_ALGO_SHA384:
833 return ICP_QAT_HW_SHA384_STATE1_SZ;
834 case ICP_QAT_HW_AUTH_ALGO_SHA512:
835 return ICP_QAT_HW_SHA512_STATE1_SZ;
836 case ICP_QAT_HW_AUTH_ALGO_MD5:
837 return ICP_QAT_HW_MD5_STATE1_SZ;
838 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
839 return ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
840 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
841 /* return maximum digest size in this case */
842 return ICP_QAT_HW_SHA512_STATE1_SZ;
844 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
850 /* returns block size in byes per hash algo */
851 static int qat_hash_get_block_size(enum icp_qat_hw_auth_algo qat_hash_alg)
853 switch (qat_hash_alg) {
854 case ICP_QAT_HW_AUTH_ALGO_SHA1:
856 case ICP_QAT_HW_AUTH_ALGO_SHA224:
857 return SHA256_CBLOCK;
858 case ICP_QAT_HW_AUTH_ALGO_SHA256:
859 return SHA256_CBLOCK;
860 case ICP_QAT_HW_AUTH_ALGO_SHA384:
861 return SHA512_CBLOCK;
862 case ICP_QAT_HW_AUTH_ALGO_SHA512:
863 return SHA512_CBLOCK;
864 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
866 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
867 return ICP_QAT_HW_AES_BLK_SZ;
868 case ICP_QAT_HW_AUTH_ALGO_MD5:
870 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
871 /* return maximum block size in this case */
872 return SHA512_CBLOCK;
874 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
880 static int partial_hash_sha1(uint8_t *data_in, uint8_t *data_out)
884 if (!SHA1_Init(&ctx))
886 SHA1_Transform(&ctx, data_in);
887 rte_memcpy(data_out, &ctx, SHA_DIGEST_LENGTH);
891 static int partial_hash_sha224(uint8_t *data_in, uint8_t *data_out)
895 if (!SHA224_Init(&ctx))
897 SHA256_Transform(&ctx, data_in);
898 rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
902 static int partial_hash_sha256(uint8_t *data_in, uint8_t *data_out)
906 if (!SHA256_Init(&ctx))
908 SHA256_Transform(&ctx, data_in);
909 rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
913 static int partial_hash_sha384(uint8_t *data_in, uint8_t *data_out)
917 if (!SHA384_Init(&ctx))
919 SHA512_Transform(&ctx, data_in);
920 rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
924 static int partial_hash_sha512(uint8_t *data_in, uint8_t *data_out)
928 if (!SHA512_Init(&ctx))
930 SHA512_Transform(&ctx, data_in);
931 rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
935 static int partial_hash_md5(uint8_t *data_in, uint8_t *data_out)
941 MD5_Transform(&ctx, data_in);
942 rte_memcpy(data_out, &ctx, MD5_DIGEST_LENGTH);
947 static int partial_hash_compute(enum icp_qat_hw_auth_algo hash_alg,
952 uint8_t digest[qat_hash_get_digest_size(
953 ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
954 uint32_t *hash_state_out_be32;
955 uint64_t *hash_state_out_be64;
958 digest_size = qat_hash_get_digest_size(hash_alg);
959 if (digest_size <= 0)
962 hash_state_out_be32 = (uint32_t *)data_out;
963 hash_state_out_be64 = (uint64_t *)data_out;
966 case ICP_QAT_HW_AUTH_ALGO_SHA1:
967 if (partial_hash_sha1(data_in, digest))
969 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
970 *hash_state_out_be32 =
971 rte_bswap32(*(((uint32_t *)digest)+i));
973 case ICP_QAT_HW_AUTH_ALGO_SHA224:
974 if (partial_hash_sha224(data_in, digest))
976 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
977 *hash_state_out_be32 =
978 rte_bswap32(*(((uint32_t *)digest)+i));
980 case ICP_QAT_HW_AUTH_ALGO_SHA256:
981 if (partial_hash_sha256(data_in, digest))
983 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
984 *hash_state_out_be32 =
985 rte_bswap32(*(((uint32_t *)digest)+i));
987 case ICP_QAT_HW_AUTH_ALGO_SHA384:
988 if (partial_hash_sha384(data_in, digest))
990 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
991 *hash_state_out_be64 =
992 rte_bswap64(*(((uint64_t *)digest)+i));
994 case ICP_QAT_HW_AUTH_ALGO_SHA512:
995 if (partial_hash_sha512(data_in, digest))
997 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
998 *hash_state_out_be64 =
999 rte_bswap64(*(((uint64_t *)digest)+i));
1001 case ICP_QAT_HW_AUTH_ALGO_MD5:
1002 if (partial_hash_md5(data_in, data_out))
1006 QAT_LOG(ERR, "invalid hash alg %u", hash_alg);
1012 #define HMAC_IPAD_VALUE 0x36
1013 #define HMAC_OPAD_VALUE 0x5c
1014 #define HASH_XCBC_PRECOMP_KEY_NUM 3
1016 static const uint8_t AES_CMAC_SEED[ICP_QAT_HW_AES_128_KEY_SZ];
1018 static void aes_cmac_key_derive(uint8_t *base, uint8_t *derived)
1022 derived[0] = base[0] << 1;
1023 for (i = 1; i < ICP_QAT_HW_AES_BLK_SZ ; i++) {
1024 derived[i] = base[i] << 1;
1025 derived[i - 1] |= base[i] >> 7;
1029 derived[ICP_QAT_HW_AES_BLK_SZ - 1] ^= QAT_AES_CMAC_CONST_RB;
1032 static int qat_sym_do_precomputes(enum icp_qat_hw_auth_algo hash_alg,
1033 const uint8_t *auth_key,
1034 uint16_t auth_keylen,
1035 uint8_t *p_state_buf,
1036 uint16_t *p_state_len,
1040 uint8_t ipad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1041 uint8_t opad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1044 if (hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC) {
1050 uint8_t k0[ICP_QAT_HW_AES_128_KEY_SZ];
1053 auth_keylen = ICP_QAT_HW_AES_128_KEY_SZ;
1055 in = rte_zmalloc("AES CMAC K1",
1056 ICP_QAT_HW_AES_128_KEY_SZ, 16);
1059 QAT_LOG(ERR, "Failed to alloc memory");
1063 rte_memcpy(in, AES_CMAC_SEED,
1064 ICP_QAT_HW_AES_128_KEY_SZ);
1065 rte_memcpy(p_state_buf, auth_key, auth_keylen);
1067 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1073 AES_encrypt(in, k0, &enc_key);
1075 k1 = p_state_buf + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1076 k2 = k1 + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1078 aes_cmac_key_derive(k0, k1);
1079 aes_cmac_key_derive(k1, k2);
1081 memset(k0, 0, ICP_QAT_HW_AES_128_KEY_SZ);
1082 *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1086 static uint8_t qat_aes_xcbc_key_seed[
1087 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ] = {
1088 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1089 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1090 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1091 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1092 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1093 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1097 uint8_t *out = p_state_buf;
1101 in = rte_zmalloc("working mem for key",
1102 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ, 16);
1104 QAT_LOG(ERR, "Failed to alloc memory");
1108 rte_memcpy(in, qat_aes_xcbc_key_seed,
1109 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1110 for (x = 0; x < HASH_XCBC_PRECOMP_KEY_NUM; x++) {
1111 if (AES_set_encrypt_key(auth_key,
1115 (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ));
1117 (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ),
1118 0, ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1121 AES_encrypt(in, out, &enc_key);
1122 in += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1123 out += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1125 *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1126 rte_free(in - x*ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ);
1130 } else if ((hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_128) ||
1131 (hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_64)) {
1133 uint8_t *out = p_state_buf;
1136 memset(p_state_buf, 0, ICP_QAT_HW_GALOIS_H_SZ +
1137 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1138 ICP_QAT_HW_GALOIS_E_CTR0_SZ);
1139 in = rte_zmalloc("working mem for key",
1140 ICP_QAT_HW_GALOIS_H_SZ, 16);
1142 QAT_LOG(ERR, "Failed to alloc memory");
1146 memset(in, 0, ICP_QAT_HW_GALOIS_H_SZ);
1147 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1151 AES_encrypt(in, out, &enc_key);
1152 *p_state_len = ICP_QAT_HW_GALOIS_H_SZ +
1153 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1154 ICP_QAT_HW_GALOIS_E_CTR0_SZ;
1159 block_size = qat_hash_get_block_size(hash_alg);
1162 /* init ipad and opad from key and xor with fixed values */
1163 memset(ipad, 0, block_size);
1164 memset(opad, 0, block_size);
1166 if (auth_keylen > (unsigned int)block_size) {
1167 QAT_LOG(ERR, "invalid keylen %u", auth_keylen);
1170 rte_memcpy(ipad, auth_key, auth_keylen);
1171 rte_memcpy(opad, auth_key, auth_keylen);
1173 for (i = 0; i < block_size; i++) {
1174 uint8_t *ipad_ptr = ipad + i;
1175 uint8_t *opad_ptr = opad + i;
1176 *ipad_ptr ^= HMAC_IPAD_VALUE;
1177 *opad_ptr ^= HMAC_OPAD_VALUE;
1180 /* do partial hash of ipad and copy to state1 */
1181 if (partial_hash_compute(hash_alg, ipad, p_state_buf)) {
1182 memset(ipad, 0, block_size);
1183 memset(opad, 0, block_size);
1184 QAT_LOG(ERR, "ipad precompute failed");
1189 * State len is a multiple of 8, so may be larger than the digest.
1190 * Put the partial hash of opad state_len bytes after state1
1192 *p_state_len = qat_hash_get_state1_size(hash_alg);
1193 if (partial_hash_compute(hash_alg, opad, p_state_buf + *p_state_len)) {
1194 memset(ipad, 0, block_size);
1195 memset(opad, 0, block_size);
1196 QAT_LOG(ERR, "opad precompute failed");
1200 /* don't leave data lying around */
1201 memset(ipad, 0, block_size);
1202 memset(opad, 0, block_size);
1207 qat_sym_session_init_common_hdr(struct icp_qat_fw_comn_req_hdr *header,
1208 enum qat_sym_proto_flag proto_flags)
1211 ICP_QAT_FW_COMN_HDR_FLAGS_BUILD(ICP_QAT_FW_COMN_REQ_FLAG_SET);
1212 header->service_type = ICP_QAT_FW_COMN_REQ_CPM_FW_LA;
1213 header->comn_req_flags =
1214 ICP_QAT_FW_COMN_FLAGS_BUILD(QAT_COMN_CD_FLD_TYPE_64BIT_ADR,
1215 QAT_COMN_PTR_TYPE_FLAT);
1216 ICP_QAT_FW_LA_PARTIAL_SET(header->serv_specif_flags,
1217 ICP_QAT_FW_LA_PARTIAL_NONE);
1218 ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(header->serv_specif_flags,
1219 ICP_QAT_FW_CIPH_IV_16BYTE_DATA);
1221 switch (proto_flags) {
1222 case QAT_CRYPTO_PROTO_FLAG_NONE:
1223 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1224 ICP_QAT_FW_LA_NO_PROTO);
1226 case QAT_CRYPTO_PROTO_FLAG_CCM:
1227 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1228 ICP_QAT_FW_LA_CCM_PROTO);
1230 case QAT_CRYPTO_PROTO_FLAG_GCM:
1231 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1232 ICP_QAT_FW_LA_GCM_PROTO);
1234 case QAT_CRYPTO_PROTO_FLAG_SNOW3G:
1235 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1236 ICP_QAT_FW_LA_SNOW_3G_PROTO);
1238 case QAT_CRYPTO_PROTO_FLAG_ZUC:
1239 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(header->serv_specif_flags,
1240 ICP_QAT_FW_LA_ZUC_3G_PROTO);
1244 ICP_QAT_FW_LA_UPDATE_STATE_SET(header->serv_specif_flags,
1245 ICP_QAT_FW_LA_NO_UPDATE_STATE);
1246 ICP_QAT_FW_LA_DIGEST_IN_BUFFER_SET(header->serv_specif_flags,
1247 ICP_QAT_FW_LA_NO_DIGEST_IN_BUFFER);
1251 * Snow3G and ZUC should never use this function
1252 * and set its protocol flag in both cipher and auth part of content
1253 * descriptor building function
1255 static enum qat_sym_proto_flag
1256 qat_get_crypto_proto_flag(uint16_t flags)
1258 int proto = ICP_QAT_FW_LA_PROTO_GET(flags);
1259 enum qat_sym_proto_flag qat_proto_flag =
1260 QAT_CRYPTO_PROTO_FLAG_NONE;
1263 case ICP_QAT_FW_LA_GCM_PROTO:
1264 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1266 case ICP_QAT_FW_LA_CCM_PROTO:
1267 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1271 return qat_proto_flag;
1274 int qat_sym_session_aead_create_cd_cipher(struct qat_sym_session *cdesc,
1276 uint32_t cipherkeylen)
1278 struct icp_qat_hw_cipher_algo_blk *cipher;
1279 struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1280 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1281 struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1282 void *ptr = &req_tmpl->cd_ctrl;
1283 struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1284 struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1285 enum icp_qat_hw_cipher_convert key_convert;
1286 enum qat_sym_proto_flag qat_proto_flag =
1287 QAT_CRYPTO_PROTO_FLAG_NONE;
1288 uint32_t total_key_size;
1289 uint16_t cipher_offset, cd_size;
1290 uint32_t wordIndex = 0;
1291 uint32_t *temp_key = NULL;
1293 if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER) {
1294 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1295 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1296 ICP_QAT_FW_SLICE_CIPHER);
1297 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1298 ICP_QAT_FW_SLICE_DRAM_WR);
1299 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1300 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1301 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1302 ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1303 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1304 } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1305 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1306 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1307 ICP_QAT_FW_SLICE_CIPHER);
1308 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1309 ICP_QAT_FW_SLICE_AUTH);
1310 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1311 ICP_QAT_FW_SLICE_AUTH);
1312 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1313 ICP_QAT_FW_SLICE_DRAM_WR);
1314 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1315 } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1316 QAT_LOG(ERR, "Invalid param, must be a cipher command.");
1320 if (cdesc->qat_mode == ICP_QAT_HW_CIPHER_CTR_MODE) {
1322 * CTR Streaming ciphers are a special case. Decrypt = encrypt
1323 * Overriding default values previously set
1325 cdesc->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
1326 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1327 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2
1328 || cdesc->qat_cipher_alg ==
1329 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)
1330 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1331 else if (cdesc->qat_dir == ICP_QAT_HW_CIPHER_ENCRYPT)
1332 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1334 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1336 if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
1337 total_key_size = ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ +
1338 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1339 cipher_cd_ctrl->cipher_state_sz =
1340 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1341 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1343 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1344 total_key_size = ICP_QAT_HW_KASUMI_F8_KEY_SZ;
1345 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_KASUMI_BLK_SZ >> 3;
1346 cipher_cd_ctrl->cipher_padding_sz =
1347 (2 * ICP_QAT_HW_KASUMI_BLK_SZ) >> 3;
1348 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES) {
1349 total_key_size = ICP_QAT_HW_3DES_KEY_SZ;
1350 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_3DES_BLK_SZ >> 3;
1352 qat_get_crypto_proto_flag(header->serv_specif_flags);
1353 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_DES) {
1354 total_key_size = ICP_QAT_HW_DES_KEY_SZ;
1355 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_DES_BLK_SZ >> 3;
1357 qat_get_crypto_proto_flag(header->serv_specif_flags);
1358 } else if (cdesc->qat_cipher_alg ==
1359 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
1360 total_key_size = ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ +
1361 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1362 cipher_cd_ctrl->cipher_state_sz =
1363 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1364 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1365 cdesc->min_qat_dev_gen = QAT_GEN2;
1367 total_key_size = cipherkeylen;
1368 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_AES_BLK_SZ >> 3;
1370 qat_get_crypto_proto_flag(header->serv_specif_flags);
1372 cipher_cd_ctrl->cipher_key_sz = total_key_size >> 3;
1373 cipher_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1374 cipher_cd_ctrl->cipher_cfg_offset = cipher_offset >> 3;
1376 header->service_cmd_id = cdesc->qat_cmd;
1377 qat_sym_session_init_common_hdr(header, qat_proto_flag);
1379 cipher = (struct icp_qat_hw_cipher_algo_blk *)cdesc->cd_cur_ptr;
1380 cipher->cipher_config.val =
1381 ICP_QAT_HW_CIPHER_CONFIG_BUILD(cdesc->qat_mode,
1382 cdesc->qat_cipher_alg, key_convert,
1385 if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1386 temp_key = (uint32_t *)(cdesc->cd_cur_ptr +
1387 sizeof(struct icp_qat_hw_cipher_config)
1389 memcpy(cipher->key, cipherkey, cipherkeylen);
1390 memcpy(temp_key, cipherkey, cipherkeylen);
1392 /* XOR Key with KASUMI F8 key modifier at 4 bytes level */
1393 for (wordIndex = 0; wordIndex < (cipherkeylen >> 2);
1395 temp_key[wordIndex] ^= KASUMI_F8_KEY_MODIFIER_4_BYTES;
1397 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1398 cipherkeylen + cipherkeylen;
1400 memcpy(cipher->key, cipherkey, cipherkeylen);
1401 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1405 if (total_key_size > cipherkeylen) {
1406 uint32_t padding_size = total_key_size-cipherkeylen;
1407 if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1408 && (cipherkeylen == QAT_3DES_KEY_SZ_OPT2)) {
1409 /* K3 not provided so use K1 = K3*/
1410 memcpy(cdesc->cd_cur_ptr, cipherkey, padding_size);
1411 } else if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1412 && (cipherkeylen == QAT_3DES_KEY_SZ_OPT3)) {
1413 /* K2 and K3 not provided so use K1 = K2 = K3*/
1414 memcpy(cdesc->cd_cur_ptr, cipherkey,
1416 memcpy(cdesc->cd_cur_ptr+cipherkeylen,
1417 cipherkey, cipherkeylen);
1419 memset(cdesc->cd_cur_ptr, 0, padding_size);
1421 cdesc->cd_cur_ptr += padding_size;
1423 cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1424 cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1429 int qat_sym_session_aead_create_cd_auth(struct qat_sym_session *cdesc,
1431 uint32_t authkeylen,
1432 uint32_t aad_length,
1433 uint32_t digestsize,
1434 unsigned int operation)
1436 struct icp_qat_hw_auth_setup *hash;
1437 struct icp_qat_hw_cipher_algo_blk *cipherconfig;
1438 struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1439 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1440 struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1441 void *ptr = &req_tmpl->cd_ctrl;
1442 struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1443 struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1444 struct icp_qat_fw_la_auth_req_params *auth_param =
1445 (struct icp_qat_fw_la_auth_req_params *)
1446 ((char *)&req_tmpl->serv_specif_rqpars +
1447 sizeof(struct icp_qat_fw_la_cipher_req_params));
1448 uint16_t state1_size = 0, state2_size = 0;
1449 uint16_t hash_offset, cd_size;
1450 uint32_t *aad_len = NULL;
1451 uint32_t wordIndex = 0;
1453 enum qat_sym_proto_flag qat_proto_flag =
1454 QAT_CRYPTO_PROTO_FLAG_NONE;
1456 if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_AUTH) {
1457 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1458 ICP_QAT_FW_SLICE_AUTH);
1459 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1460 ICP_QAT_FW_SLICE_DRAM_WR);
1461 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1462 } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1463 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1464 ICP_QAT_FW_SLICE_AUTH);
1465 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1466 ICP_QAT_FW_SLICE_CIPHER);
1467 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1468 ICP_QAT_FW_SLICE_CIPHER);
1469 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1470 ICP_QAT_FW_SLICE_DRAM_WR);
1471 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1472 } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1473 QAT_LOG(ERR, "Invalid param, must be a hash command.");
1477 if (operation == RTE_CRYPTO_AUTH_OP_VERIFY) {
1478 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1479 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1480 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1481 ICP_QAT_FW_LA_CMP_AUTH_RES);
1482 cdesc->auth_op = ICP_QAT_HW_AUTH_VERIFY;
1484 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1485 ICP_QAT_FW_LA_RET_AUTH_RES);
1486 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1487 ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1488 cdesc->auth_op = ICP_QAT_HW_AUTH_GENERATE;
1492 * Setup the inner hash config
1494 hash_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1495 hash = (struct icp_qat_hw_auth_setup *)cdesc->cd_cur_ptr;
1496 hash->auth_config.reserved = 0;
1497 hash->auth_config.config =
1498 ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE1,
1499 cdesc->qat_hash_alg, digestsize);
1501 if (cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2
1502 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_KASUMI_F9
1503 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3
1504 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC
1505 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC
1506 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_NULL
1508 hash->auth_counter.counter = 0;
1510 int block_size = qat_hash_get_block_size(cdesc->qat_hash_alg);
1514 hash->auth_counter.counter = rte_bswap32(block_size);
1517 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_auth_setup);
1520 * cd_cur_ptr now points at the state1 information.
1522 switch (cdesc->qat_hash_alg) {
1523 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1524 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA1, authkey,
1525 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1527 QAT_LOG(ERR, "(SHA)precompute failed");
1530 state2_size = RTE_ALIGN_CEIL(ICP_QAT_HW_SHA1_STATE2_SZ, 8);
1532 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1533 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA224, authkey,
1534 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1536 QAT_LOG(ERR, "(SHA)precompute failed");
1539 state2_size = ICP_QAT_HW_SHA224_STATE2_SZ;
1541 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1542 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA256, authkey,
1543 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1545 QAT_LOG(ERR, "(SHA)precompute failed");
1548 state2_size = ICP_QAT_HW_SHA256_STATE2_SZ;
1550 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1551 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA384, authkey,
1552 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1554 QAT_LOG(ERR, "(SHA)precompute failed");
1557 state2_size = ICP_QAT_HW_SHA384_STATE2_SZ;
1559 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1560 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA512, authkey,
1561 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1563 QAT_LOG(ERR, "(SHA)precompute failed");
1566 state2_size = ICP_QAT_HW_SHA512_STATE2_SZ;
1568 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1569 state1_size = ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1571 if (cdesc->aes_cmac)
1572 memset(cdesc->cd_cur_ptr, 0, state1_size);
1573 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC,
1574 authkey, authkeylen, cdesc->cd_cur_ptr + state1_size,
1575 &state2_size, cdesc->aes_cmac)) {
1576 cdesc->aes_cmac ? QAT_LOG(ERR,
1577 "(CMAC)precompute failed")
1579 "(XCBC)precompute failed");
1583 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1584 case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
1585 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1586 state1_size = ICP_QAT_HW_GALOIS_128_STATE1_SZ;
1587 if (qat_sym_do_precomputes(cdesc->qat_hash_alg, authkey,
1588 authkeylen, cdesc->cd_cur_ptr + state1_size,
1589 &state2_size, cdesc->aes_cmac)) {
1590 QAT_LOG(ERR, "(GCM)precompute failed");
1594 * Write (the length of AAD) into bytes 16-19 of state2
1595 * in big-endian format. This field is 8 bytes
1597 auth_param->u2.aad_sz =
1598 RTE_ALIGN_CEIL(aad_length, 16);
1599 auth_param->hash_state_sz = (auth_param->u2.aad_sz) >> 3;
1601 aad_len = (uint32_t *)(cdesc->cd_cur_ptr +
1602 ICP_QAT_HW_GALOIS_128_STATE1_SZ +
1603 ICP_QAT_HW_GALOIS_H_SZ);
1604 *aad_len = rte_bswap32(aad_length);
1605 cdesc->aad_len = aad_length;
1607 case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
1608 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1609 state1_size = qat_hash_get_state1_size(
1610 ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2);
1611 state2_size = ICP_QAT_HW_SNOW_3G_UIA2_STATE2_SZ;
1612 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1614 cipherconfig = (struct icp_qat_hw_cipher_algo_blk *)
1615 (cdesc->cd_cur_ptr + state1_size + state2_size);
1616 cipherconfig->cipher_config.val =
1617 ICP_QAT_HW_CIPHER_CONFIG_BUILD(ICP_QAT_HW_CIPHER_ECB_MODE,
1618 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2,
1619 ICP_QAT_HW_CIPHER_KEY_CONVERT,
1620 ICP_QAT_HW_CIPHER_ENCRYPT);
1621 memcpy(cipherconfig->key, authkey, authkeylen);
1622 memset(cipherconfig->key + authkeylen,
1623 0, ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ);
1624 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1625 authkeylen + ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1626 auth_param->hash_state_sz = ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1628 case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
1629 hash->auth_config.config =
1630 ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE0,
1631 cdesc->qat_hash_alg, digestsize);
1632 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1633 state1_size = qat_hash_get_state1_size(
1634 ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3);
1635 state2_size = ICP_QAT_HW_ZUC_3G_EIA3_STATE2_SZ;
1636 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size
1637 + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ);
1639 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1640 cdesc->cd_cur_ptr += state1_size + state2_size
1641 + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1642 auth_param->hash_state_sz = ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1643 cdesc->min_qat_dev_gen = QAT_GEN2;
1646 case ICP_QAT_HW_AUTH_ALGO_MD5:
1647 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_MD5, authkey,
1648 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1650 QAT_LOG(ERR, "(MD5)precompute failed");
1653 state2_size = ICP_QAT_HW_MD5_STATE2_SZ;
1655 case ICP_QAT_HW_AUTH_ALGO_NULL:
1656 state1_size = qat_hash_get_state1_size(
1657 ICP_QAT_HW_AUTH_ALGO_NULL);
1658 state2_size = ICP_QAT_HW_NULL_STATE2_SZ;
1660 case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
1661 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1662 state1_size = qat_hash_get_state1_size(
1663 ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC);
1664 state2_size = ICP_QAT_HW_AES_CBC_MAC_KEY_SZ +
1665 ICP_QAT_HW_AES_CCM_CBC_E_CTR0_SZ;
1667 if (aad_length > 0) {
1668 aad_length += ICP_QAT_HW_CCM_AAD_B0_LEN +
1669 ICP_QAT_HW_CCM_AAD_LEN_INFO;
1670 auth_param->u2.aad_sz =
1671 RTE_ALIGN_CEIL(aad_length,
1672 ICP_QAT_HW_CCM_AAD_ALIGNMENT);
1674 auth_param->u2.aad_sz = ICP_QAT_HW_CCM_AAD_B0_LEN;
1676 cdesc->aad_len = aad_length;
1677 hash->auth_counter.counter = 0;
1679 hash_cd_ctrl->outer_prefix_sz = digestsize;
1680 auth_param->hash_state_sz = digestsize;
1682 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1684 case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
1685 state1_size = qat_hash_get_state1_size(
1686 ICP_QAT_HW_AUTH_ALGO_KASUMI_F9);
1687 state2_size = ICP_QAT_HW_KASUMI_F9_STATE2_SZ;
1688 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1689 pTempKey = (uint32_t *)(cdesc->cd_cur_ptr + state1_size
1692 * The Inner Hash Initial State2 block must contain IK
1693 * (Initialisation Key), followed by IK XOR-ed with KM
1694 * (Key Modifier): IK||(IK^KM).
1696 /* write the auth key */
1697 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1698 /* initialise temp key with auth key */
1699 memcpy(pTempKey, authkey, authkeylen);
1700 /* XOR Key with KASUMI F9 key modifier at 4 bytes level */
1701 for (wordIndex = 0; wordIndex < (authkeylen >> 2); wordIndex++)
1702 pTempKey[wordIndex] ^= KASUMI_F9_KEY_MODIFIER_4_BYTES;
1705 QAT_LOG(ERR, "Invalid HASH alg %u", cdesc->qat_hash_alg);
1709 /* Request template setup */
1710 qat_sym_session_init_common_hdr(header, qat_proto_flag);
1711 header->service_cmd_id = cdesc->qat_cmd;
1713 /* Auth CD config setup */
1714 hash_cd_ctrl->hash_cfg_offset = hash_offset >> 3;
1715 hash_cd_ctrl->hash_flags = ICP_QAT_FW_AUTH_HDR_FLAG_NO_NESTED;
1716 hash_cd_ctrl->inner_res_sz = digestsize;
1717 hash_cd_ctrl->final_sz = digestsize;
1718 hash_cd_ctrl->inner_state1_sz = state1_size;
1719 auth_param->auth_res_sz = digestsize;
1721 hash_cd_ctrl->inner_state2_sz = state2_size;
1722 hash_cd_ctrl->inner_state2_offset = hash_cd_ctrl->hash_cfg_offset +
1723 ((sizeof(struct icp_qat_hw_auth_setup) +
1724 RTE_ALIGN_CEIL(hash_cd_ctrl->inner_state1_sz, 8))
1727 cdesc->cd_cur_ptr += state1_size + state2_size;
1728 cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1730 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1731 cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1736 int qat_sym_validate_aes_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1739 case ICP_QAT_HW_AES_128_KEY_SZ:
1740 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1742 case ICP_QAT_HW_AES_192_KEY_SZ:
1743 *alg = ICP_QAT_HW_CIPHER_ALGO_AES192;
1745 case ICP_QAT_HW_AES_256_KEY_SZ:
1746 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
1754 int qat_sym_validate_aes_docsisbpi_key(int key_len,
1755 enum icp_qat_hw_cipher_algo *alg)
1758 case ICP_QAT_HW_AES_128_KEY_SZ:
1759 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1767 int qat_sym_validate_snow3g_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1770 case ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ:
1771 *alg = ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2;
1779 int qat_sym_validate_kasumi_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1782 case ICP_QAT_HW_KASUMI_KEY_SZ:
1783 *alg = ICP_QAT_HW_CIPHER_ALGO_KASUMI;
1791 int qat_sym_validate_des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1794 case ICP_QAT_HW_DES_KEY_SZ:
1795 *alg = ICP_QAT_HW_CIPHER_ALGO_DES;
1803 int qat_sym_validate_3des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1806 case QAT_3DES_KEY_SZ_OPT1:
1807 case QAT_3DES_KEY_SZ_OPT2:
1808 case QAT_3DES_KEY_SZ_OPT3:
1809 *alg = ICP_QAT_HW_CIPHER_ALGO_3DES;
1817 int qat_sym_validate_zuc_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1820 case ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ:
1821 *alg = ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3;