1 /* SPDX-License-Identifier: (BSD-3-Clause OR GPL-2.0)
2 * Copyright(c) 2015-2018 Intel Corporation
5 #include <openssl/sha.h> /* Needed to calculate pre-compute values */
6 #include <openssl/aes.h> /* Needed to calculate pre-compute values */
7 #include <openssl/md5.h> /* Needed to calculate pre-compute values */
8 #include <openssl/evp.h> /* Needed for bpi runt block processing */
10 #include <rte_memcpy.h>
11 #include <rte_common.h>
12 #include <rte_spinlock.h>
13 #include <rte_byteorder.h>
15 #include <rte_malloc.h>
16 #include <rte_crypto_sym.h>
19 #include "qat_sym_session.h"
20 #include "qat_sym_pmd.h"
22 /** Frees a context previously created
23 * Depends on openssl libcrypto
26 bpi_cipher_ctx_free(void *bpi_ctx)
29 EVP_CIPHER_CTX_free((EVP_CIPHER_CTX *)bpi_ctx);
32 /** Creates a context in either AES or DES in ECB mode
33 * Depends on openssl libcrypto
36 bpi_cipher_ctx_init(enum rte_crypto_cipher_algorithm cryptodev_algo,
37 enum rte_crypto_cipher_operation direction __rte_unused,
38 uint8_t *key, void **ctx)
40 const EVP_CIPHER *algo = NULL;
42 *ctx = EVP_CIPHER_CTX_new();
49 if (cryptodev_algo == RTE_CRYPTO_CIPHER_DES_DOCSISBPI)
52 algo = EVP_aes_128_ecb();
54 /* IV will be ECB encrypted whether direction is encrypt or decrypt*/
55 if (EVP_EncryptInit_ex(*ctx, algo, NULL, key, 0) != 1) {
64 EVP_CIPHER_CTX_free(*ctx);
69 qat_is_cipher_alg_supported(enum rte_crypto_cipher_algorithm algo,
70 struct qat_sym_dev_private *internals)
73 const struct rte_cryptodev_capabilities *capability;
75 while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
76 RTE_CRYPTO_OP_TYPE_UNDEFINED) {
77 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
80 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_CIPHER)
83 if (capability->sym.cipher.algo == algo)
90 qat_is_auth_alg_supported(enum rte_crypto_auth_algorithm algo,
91 struct qat_sym_dev_private *internals)
94 const struct rte_cryptodev_capabilities *capability;
96 while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
97 RTE_CRYPTO_OP_TYPE_UNDEFINED) {
98 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
101 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_AUTH)
104 if (capability->sym.auth.algo == algo)
111 qat_sym_session_clear(struct rte_cryptodev *dev,
112 struct rte_cryptodev_sym_session *sess)
114 uint8_t index = dev->driver_id;
115 void *sess_priv = get_sym_session_private_data(sess, index);
116 struct qat_sym_session *s = (struct qat_sym_session *)sess_priv;
120 bpi_cipher_ctx_free(s->bpi_ctx);
121 memset(s, 0, qat_sym_session_get_private_size(dev));
122 struct rte_mempool *sess_mp = rte_mempool_from_obj(sess_priv);
124 set_sym_session_private_data(sess, index, NULL);
125 rte_mempool_put(sess_mp, sess_priv);
130 qat_get_cmd_id(const struct rte_crypto_sym_xform *xform)
133 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER && xform->next == NULL)
134 return ICP_QAT_FW_LA_CMD_CIPHER;
136 /* Authentication Only */
137 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH && xform->next == NULL)
138 return ICP_QAT_FW_LA_CMD_AUTH;
141 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
142 /* AES-GCM and AES-CCM works with different direction
143 * GCM first encrypts and generate hash where AES-CCM
144 * first generate hash and encrypts. Similar relation
145 * applies to decryption.
147 if (xform->aead.op == RTE_CRYPTO_AEAD_OP_ENCRYPT)
148 if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
149 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
151 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
153 if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
154 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
156 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
159 if (xform->next == NULL)
162 /* Cipher then Authenticate */
163 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
164 xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
165 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
167 /* Authenticate then Cipher */
168 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH &&
169 xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
170 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
175 static struct rte_crypto_auth_xform *
176 qat_get_auth_xform(struct rte_crypto_sym_xform *xform)
179 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH)
188 static struct rte_crypto_cipher_xform *
189 qat_get_cipher_xform(struct rte_crypto_sym_xform *xform)
192 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
193 return &xform->cipher;
202 qat_sym_session_configure_cipher(struct rte_cryptodev *dev,
203 struct rte_crypto_sym_xform *xform,
204 struct qat_sym_session *session)
206 struct qat_sym_dev_private *internals = dev->data->dev_private;
207 struct rte_crypto_cipher_xform *cipher_xform = NULL;
210 /* Get cipher xform from crypto xform chain */
211 cipher_xform = qat_get_cipher_xform(xform);
213 session->cipher_iv.offset = cipher_xform->iv.offset;
214 session->cipher_iv.length = cipher_xform->iv.length;
216 switch (cipher_xform->algo) {
217 case RTE_CRYPTO_CIPHER_AES_CBC:
218 if (qat_sym_validate_aes_key(cipher_xform->key.length,
219 &session->qat_cipher_alg) != 0) {
220 QAT_LOG(ERR, "Invalid AES cipher key size");
224 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
226 case RTE_CRYPTO_CIPHER_AES_CTR:
227 if (qat_sym_validate_aes_key(cipher_xform->key.length,
228 &session->qat_cipher_alg) != 0) {
229 QAT_LOG(ERR, "Invalid AES cipher key size");
233 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
235 case RTE_CRYPTO_CIPHER_SNOW3G_UEA2:
236 if (qat_sym_validate_snow3g_key(cipher_xform->key.length,
237 &session->qat_cipher_alg) != 0) {
238 QAT_LOG(ERR, "Invalid SNOW 3G cipher key size");
242 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
244 case RTE_CRYPTO_CIPHER_NULL:
245 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
247 case RTE_CRYPTO_CIPHER_KASUMI_F8:
248 if (qat_sym_validate_kasumi_key(cipher_xform->key.length,
249 &session->qat_cipher_alg) != 0) {
250 QAT_LOG(ERR, "Invalid KASUMI cipher key size");
254 session->qat_mode = ICP_QAT_HW_CIPHER_F8_MODE;
256 case RTE_CRYPTO_CIPHER_3DES_CBC:
257 if (qat_sym_validate_3des_key(cipher_xform->key.length,
258 &session->qat_cipher_alg) != 0) {
259 QAT_LOG(ERR, "Invalid 3DES cipher key size");
263 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
265 case RTE_CRYPTO_CIPHER_DES_CBC:
266 if (qat_sym_validate_des_key(cipher_xform->key.length,
267 &session->qat_cipher_alg) != 0) {
268 QAT_LOG(ERR, "Invalid DES cipher key size");
272 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
274 case RTE_CRYPTO_CIPHER_3DES_CTR:
275 if (qat_sym_validate_3des_key(cipher_xform->key.length,
276 &session->qat_cipher_alg) != 0) {
277 QAT_LOG(ERR, "Invalid 3DES cipher key size");
281 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
283 case RTE_CRYPTO_CIPHER_DES_DOCSISBPI:
284 ret = bpi_cipher_ctx_init(
287 cipher_xform->key.data,
290 QAT_LOG(ERR, "failed to create DES BPI ctx");
293 if (qat_sym_validate_des_key(cipher_xform->key.length,
294 &session->qat_cipher_alg) != 0) {
295 QAT_LOG(ERR, "Invalid DES cipher key size");
299 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
301 case RTE_CRYPTO_CIPHER_AES_DOCSISBPI:
302 ret = bpi_cipher_ctx_init(
305 cipher_xform->key.data,
308 QAT_LOG(ERR, "failed to create AES BPI ctx");
311 if (qat_sym_validate_aes_docsisbpi_key(cipher_xform->key.length,
312 &session->qat_cipher_alg) != 0) {
313 QAT_LOG(ERR, "Invalid AES DOCSISBPI key size");
317 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
319 case RTE_CRYPTO_CIPHER_ZUC_EEA3:
320 if (!qat_is_cipher_alg_supported(
321 cipher_xform->algo, internals)) {
322 QAT_LOG(ERR, "%s not supported on this device",
323 rte_crypto_cipher_algorithm_strings
324 [cipher_xform->algo]);
328 if (qat_sym_validate_zuc_key(cipher_xform->key.length,
329 &session->qat_cipher_alg) != 0) {
330 QAT_LOG(ERR, "Invalid ZUC cipher key size");
334 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
336 case RTE_CRYPTO_CIPHER_3DES_ECB:
337 case RTE_CRYPTO_CIPHER_AES_ECB:
338 case RTE_CRYPTO_CIPHER_AES_F8:
339 case RTE_CRYPTO_CIPHER_AES_XTS:
340 case RTE_CRYPTO_CIPHER_ARC4:
341 QAT_LOG(ERR, "Crypto QAT PMD: Unsupported Cipher alg %u",
346 QAT_LOG(ERR, "Crypto: Undefined Cipher specified %u\n",
352 if (cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
353 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
355 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
357 if (qat_sym_session_aead_create_cd_cipher(session,
358 cipher_xform->key.data,
359 cipher_xform->key.length)) {
367 if (session->bpi_ctx) {
368 bpi_cipher_ctx_free(session->bpi_ctx);
369 session->bpi_ctx = NULL;
375 qat_sym_session_configure(struct rte_cryptodev *dev,
376 struct rte_crypto_sym_xform *xform,
377 struct rte_cryptodev_sym_session *sess,
378 struct rte_mempool *mempool)
380 void *sess_private_data;
383 if (rte_mempool_get(mempool, &sess_private_data)) {
385 "Couldn't get object from session mempool");
389 ret = qat_sym_session_set_parameters(dev, xform, sess_private_data);
392 "Crypto QAT PMD: failed to configure session parameters");
394 /* Return session to mempool */
395 rte_mempool_put(mempool, sess_private_data);
399 set_sym_session_private_data(sess, dev->driver_id,
406 qat_sym_session_set_parameters(struct rte_cryptodev *dev,
407 struct rte_crypto_sym_xform *xform, void *session_private)
409 struct qat_sym_session *session = session_private;
413 /* Set context descriptor physical address */
414 session->cd_paddr = rte_mempool_virt2iova(session) +
415 offsetof(struct qat_sym_session, cd);
417 session->min_qat_dev_gen = QAT_GEN1;
419 /* Get requested QAT command id */
420 qat_cmd_id = qat_get_cmd_id(xform);
421 if (qat_cmd_id < 0 || qat_cmd_id >= ICP_QAT_FW_LA_CMD_DELIMITER) {
422 QAT_LOG(ERR, "Unsupported xform chain requested");
425 session->qat_cmd = (enum icp_qat_fw_la_cmd_id)qat_cmd_id;
426 switch (session->qat_cmd) {
427 case ICP_QAT_FW_LA_CMD_CIPHER:
428 ret = qat_sym_session_configure_cipher(dev, xform, session);
432 case ICP_QAT_FW_LA_CMD_AUTH:
433 ret = qat_sym_session_configure_auth(dev, xform, session);
437 case ICP_QAT_FW_LA_CMD_CIPHER_HASH:
438 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
439 ret = qat_sym_session_configure_aead(xform,
444 ret = qat_sym_session_configure_cipher(dev,
448 ret = qat_sym_session_configure_auth(dev,
454 case ICP_QAT_FW_LA_CMD_HASH_CIPHER:
455 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
456 ret = qat_sym_session_configure_aead(xform,
461 ret = qat_sym_session_configure_auth(dev,
465 ret = qat_sym_session_configure_cipher(dev,
471 case ICP_QAT_FW_LA_CMD_TRNG_GET_RANDOM:
472 case ICP_QAT_FW_LA_CMD_TRNG_TEST:
473 case ICP_QAT_FW_LA_CMD_SSL3_KEY_DERIVE:
474 case ICP_QAT_FW_LA_CMD_TLS_V1_1_KEY_DERIVE:
475 case ICP_QAT_FW_LA_CMD_TLS_V1_2_KEY_DERIVE:
476 case ICP_QAT_FW_LA_CMD_MGF1:
477 case ICP_QAT_FW_LA_CMD_AUTH_PRE_COMP:
478 case ICP_QAT_FW_LA_CMD_CIPHER_PRE_COMP:
479 case ICP_QAT_FW_LA_CMD_DELIMITER:
480 QAT_LOG(ERR, "Unsupported Service %u",
484 QAT_LOG(ERR, "Unsupported Service %u",
493 qat_sym_session_configure_auth(struct rte_cryptodev *dev,
494 struct rte_crypto_sym_xform *xform,
495 struct qat_sym_session *session)
497 struct rte_crypto_auth_xform *auth_xform = qat_get_auth_xform(xform);
498 struct qat_sym_dev_private *internals = dev->data->dev_private;
499 uint8_t *key_data = auth_xform->key.data;
500 uint8_t key_length = auth_xform->key.length;
501 session->aes_cmac = 0;
503 switch (auth_xform->algo) {
504 case RTE_CRYPTO_AUTH_SHA1_HMAC:
505 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA1;
507 case RTE_CRYPTO_AUTH_SHA224_HMAC:
508 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA224;
510 case RTE_CRYPTO_AUTH_SHA256_HMAC:
511 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA256;
513 case RTE_CRYPTO_AUTH_SHA384_HMAC:
514 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA384;
516 case RTE_CRYPTO_AUTH_SHA512_HMAC:
517 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA512;
519 case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
520 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
522 case RTE_CRYPTO_AUTH_AES_CMAC:
523 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
524 session->aes_cmac = 1;
526 case RTE_CRYPTO_AUTH_AES_GMAC:
527 if (qat_sym_validate_aes_key(auth_xform->key.length,
528 &session->qat_cipher_alg) != 0) {
529 QAT_LOG(ERR, "Invalid AES key size");
532 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
533 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
536 case RTE_CRYPTO_AUTH_SNOW3G_UIA2:
537 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2;
539 case RTE_CRYPTO_AUTH_MD5_HMAC:
540 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_MD5;
542 case RTE_CRYPTO_AUTH_NULL:
543 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_NULL;
545 case RTE_CRYPTO_AUTH_KASUMI_F9:
546 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_KASUMI_F9;
548 case RTE_CRYPTO_AUTH_ZUC_EIA3:
549 if (!qat_is_auth_alg_supported(auth_xform->algo, internals)) {
550 QAT_LOG(ERR, "%s not supported on this device",
551 rte_crypto_auth_algorithm_strings
555 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3;
557 case RTE_CRYPTO_AUTH_SHA1:
558 case RTE_CRYPTO_AUTH_SHA256:
559 case RTE_CRYPTO_AUTH_SHA512:
560 case RTE_CRYPTO_AUTH_SHA224:
561 case RTE_CRYPTO_AUTH_SHA384:
562 case RTE_CRYPTO_AUTH_MD5:
563 case RTE_CRYPTO_AUTH_AES_CBC_MAC:
564 QAT_LOG(ERR, "Crypto: Unsupported hash alg %u",
568 QAT_LOG(ERR, "Crypto: Undefined Hash algo %u specified",
573 session->auth_iv.offset = auth_xform->iv.offset;
574 session->auth_iv.length = auth_xform->iv.length;
576 if (auth_xform->algo == RTE_CRYPTO_AUTH_AES_GMAC) {
577 if (auth_xform->op == RTE_CRYPTO_AUTH_OP_GENERATE) {
578 session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER_HASH;
579 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
581 * It needs to create cipher desc content first,
582 * then authentication
585 if (qat_sym_session_aead_create_cd_cipher(session,
586 auth_xform->key.data,
587 auth_xform->key.length))
590 if (qat_sym_session_aead_create_cd_auth(session,
594 auth_xform->digest_length,
598 session->qat_cmd = ICP_QAT_FW_LA_CMD_HASH_CIPHER;
599 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
601 * It needs to create authentication desc content first,
605 if (qat_sym_session_aead_create_cd_auth(session,
609 auth_xform->digest_length,
613 if (qat_sym_session_aead_create_cd_cipher(session,
614 auth_xform->key.data,
615 auth_xform->key.length))
618 /* Restore to authentication only only */
619 session->qat_cmd = ICP_QAT_FW_LA_CMD_AUTH;
621 if (qat_sym_session_aead_create_cd_auth(session,
625 auth_xform->digest_length,
630 session->digest_length = auth_xform->digest_length;
635 qat_sym_session_configure_aead(struct rte_crypto_sym_xform *xform,
636 struct qat_sym_session *session)
638 struct rte_crypto_aead_xform *aead_xform = &xform->aead;
639 enum rte_crypto_auth_operation crypto_operation;
642 * Store AEAD IV parameters as cipher IV,
643 * to avoid unnecessary memory usage
645 session->cipher_iv.offset = xform->aead.iv.offset;
646 session->cipher_iv.length = xform->aead.iv.length;
648 switch (aead_xform->algo) {
649 case RTE_CRYPTO_AEAD_AES_GCM:
650 if (qat_sym_validate_aes_key(aead_xform->key.length,
651 &session->qat_cipher_alg) != 0) {
652 QAT_LOG(ERR, "Invalid AES key size");
655 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
656 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
658 case RTE_CRYPTO_AEAD_AES_CCM:
659 if (qat_sym_validate_aes_key(aead_xform->key.length,
660 &session->qat_cipher_alg) != 0) {
661 QAT_LOG(ERR, "Invalid AES key size");
664 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
665 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC;
668 QAT_LOG(ERR, "Crypto: Undefined AEAD specified %u\n",
673 if ((aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT &&
674 aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) ||
675 (aead_xform->op == RTE_CRYPTO_AEAD_OP_DECRYPT &&
676 aead_xform->algo == RTE_CRYPTO_AEAD_AES_CCM)) {
677 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
679 * It needs to create cipher desc content first,
680 * then authentication
682 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
683 RTE_CRYPTO_AUTH_OP_GENERATE : RTE_CRYPTO_AUTH_OP_VERIFY;
685 if (qat_sym_session_aead_create_cd_cipher(session,
686 aead_xform->key.data,
687 aead_xform->key.length))
690 if (qat_sym_session_aead_create_cd_auth(session,
691 aead_xform->key.data,
692 aead_xform->key.length,
693 aead_xform->aad_length,
694 aead_xform->digest_length,
698 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
700 * It needs to create authentication desc content first,
704 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
705 RTE_CRYPTO_AUTH_OP_VERIFY : RTE_CRYPTO_AUTH_OP_GENERATE;
707 if (qat_sym_session_aead_create_cd_auth(session,
708 aead_xform->key.data,
709 aead_xform->key.length,
710 aead_xform->aad_length,
711 aead_xform->digest_length,
715 if (qat_sym_session_aead_create_cd_cipher(session,
716 aead_xform->key.data,
717 aead_xform->key.length))
721 session->digest_length = aead_xform->digest_length;
725 unsigned int qat_sym_session_get_private_size(
726 struct rte_cryptodev *dev __rte_unused)
728 return RTE_ALIGN_CEIL(sizeof(struct qat_sym_session), 8);
731 /* returns block size in bytes per cipher algo */
732 int qat_cipher_get_block_size(enum icp_qat_hw_cipher_algo qat_cipher_alg)
734 switch (qat_cipher_alg) {
735 case ICP_QAT_HW_CIPHER_ALGO_DES:
736 return ICP_QAT_HW_DES_BLK_SZ;
737 case ICP_QAT_HW_CIPHER_ALGO_3DES:
738 return ICP_QAT_HW_3DES_BLK_SZ;
739 case ICP_QAT_HW_CIPHER_ALGO_AES128:
740 case ICP_QAT_HW_CIPHER_ALGO_AES192:
741 case ICP_QAT_HW_CIPHER_ALGO_AES256:
742 return ICP_QAT_HW_AES_BLK_SZ;
744 QAT_LOG(ERR, "invalid block cipher alg %u", qat_cipher_alg);
751 * Returns size in bytes per hash algo for state1 size field in cd_ctrl
752 * This is digest size rounded up to nearest quadword
754 static int qat_hash_get_state1_size(enum icp_qat_hw_auth_algo qat_hash_alg)
756 switch (qat_hash_alg) {
757 case ICP_QAT_HW_AUTH_ALGO_SHA1:
758 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA1_STATE1_SZ,
759 QAT_HW_DEFAULT_ALIGNMENT);
760 case ICP_QAT_HW_AUTH_ALGO_SHA224:
761 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA224_STATE1_SZ,
762 QAT_HW_DEFAULT_ALIGNMENT);
763 case ICP_QAT_HW_AUTH_ALGO_SHA256:
764 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA256_STATE1_SZ,
765 QAT_HW_DEFAULT_ALIGNMENT);
766 case ICP_QAT_HW_AUTH_ALGO_SHA384:
767 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA384_STATE1_SZ,
768 QAT_HW_DEFAULT_ALIGNMENT);
769 case ICP_QAT_HW_AUTH_ALGO_SHA512:
770 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
771 QAT_HW_DEFAULT_ALIGNMENT);
772 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
773 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ,
774 QAT_HW_DEFAULT_ALIGNMENT);
775 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
776 case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
777 return QAT_HW_ROUND_UP(ICP_QAT_HW_GALOIS_128_STATE1_SZ,
778 QAT_HW_DEFAULT_ALIGNMENT);
779 case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
780 return QAT_HW_ROUND_UP(ICP_QAT_HW_ZUC_3G_EIA3_STATE1_SZ,
781 QAT_HW_DEFAULT_ALIGNMENT);
782 case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
783 return QAT_HW_ROUND_UP(ICP_QAT_HW_SNOW_3G_UIA2_STATE1_SZ,
784 QAT_HW_DEFAULT_ALIGNMENT);
785 case ICP_QAT_HW_AUTH_ALGO_MD5:
786 return QAT_HW_ROUND_UP(ICP_QAT_HW_MD5_STATE1_SZ,
787 QAT_HW_DEFAULT_ALIGNMENT);
788 case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
789 return QAT_HW_ROUND_UP(ICP_QAT_HW_KASUMI_F9_STATE1_SZ,
790 QAT_HW_DEFAULT_ALIGNMENT);
791 case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
792 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_CBC_MAC_STATE1_SZ,
793 QAT_HW_DEFAULT_ALIGNMENT);
794 case ICP_QAT_HW_AUTH_ALGO_NULL:
795 return QAT_HW_ROUND_UP(ICP_QAT_HW_NULL_STATE1_SZ,
796 QAT_HW_DEFAULT_ALIGNMENT);
797 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
798 /* return maximum state1 size in this case */
799 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
800 QAT_HW_DEFAULT_ALIGNMENT);
802 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
808 /* returns digest size in bytes per hash algo */
809 static int qat_hash_get_digest_size(enum icp_qat_hw_auth_algo qat_hash_alg)
811 switch (qat_hash_alg) {
812 case ICP_QAT_HW_AUTH_ALGO_SHA1:
813 return ICP_QAT_HW_SHA1_STATE1_SZ;
814 case ICP_QAT_HW_AUTH_ALGO_SHA224:
815 return ICP_QAT_HW_SHA224_STATE1_SZ;
816 case ICP_QAT_HW_AUTH_ALGO_SHA256:
817 return ICP_QAT_HW_SHA256_STATE1_SZ;
818 case ICP_QAT_HW_AUTH_ALGO_SHA384:
819 return ICP_QAT_HW_SHA384_STATE1_SZ;
820 case ICP_QAT_HW_AUTH_ALGO_SHA512:
821 return ICP_QAT_HW_SHA512_STATE1_SZ;
822 case ICP_QAT_HW_AUTH_ALGO_MD5:
823 return ICP_QAT_HW_MD5_STATE1_SZ;
824 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
825 return ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
826 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
827 /* return maximum digest size in this case */
828 return ICP_QAT_HW_SHA512_STATE1_SZ;
830 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
836 /* returns block size in byes per hash algo */
837 static int qat_hash_get_block_size(enum icp_qat_hw_auth_algo qat_hash_alg)
839 switch (qat_hash_alg) {
840 case ICP_QAT_HW_AUTH_ALGO_SHA1:
842 case ICP_QAT_HW_AUTH_ALGO_SHA224:
843 return SHA256_CBLOCK;
844 case ICP_QAT_HW_AUTH_ALGO_SHA256:
845 return SHA256_CBLOCK;
846 case ICP_QAT_HW_AUTH_ALGO_SHA384:
847 return SHA512_CBLOCK;
848 case ICP_QAT_HW_AUTH_ALGO_SHA512:
849 return SHA512_CBLOCK;
850 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
852 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
853 return ICP_QAT_HW_AES_BLK_SZ;
854 case ICP_QAT_HW_AUTH_ALGO_MD5:
856 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
857 /* return maximum block size in this case */
858 return SHA512_CBLOCK;
860 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
866 static int partial_hash_sha1(uint8_t *data_in, uint8_t *data_out)
870 if (!SHA1_Init(&ctx))
872 SHA1_Transform(&ctx, data_in);
873 rte_memcpy(data_out, &ctx, SHA_DIGEST_LENGTH);
877 static int partial_hash_sha224(uint8_t *data_in, uint8_t *data_out)
881 if (!SHA224_Init(&ctx))
883 SHA256_Transform(&ctx, data_in);
884 rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
888 static int partial_hash_sha256(uint8_t *data_in, uint8_t *data_out)
892 if (!SHA256_Init(&ctx))
894 SHA256_Transform(&ctx, data_in);
895 rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
899 static int partial_hash_sha384(uint8_t *data_in, uint8_t *data_out)
903 if (!SHA384_Init(&ctx))
905 SHA512_Transform(&ctx, data_in);
906 rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
910 static int partial_hash_sha512(uint8_t *data_in, uint8_t *data_out)
914 if (!SHA512_Init(&ctx))
916 SHA512_Transform(&ctx, data_in);
917 rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
921 static int partial_hash_md5(uint8_t *data_in, uint8_t *data_out)
927 MD5_Transform(&ctx, data_in);
928 rte_memcpy(data_out, &ctx, MD5_DIGEST_LENGTH);
933 static int partial_hash_compute(enum icp_qat_hw_auth_algo hash_alg,
938 uint8_t digest[qat_hash_get_digest_size(
939 ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
940 uint32_t *hash_state_out_be32;
941 uint64_t *hash_state_out_be64;
944 digest_size = qat_hash_get_digest_size(hash_alg);
945 if (digest_size <= 0)
948 hash_state_out_be32 = (uint32_t *)data_out;
949 hash_state_out_be64 = (uint64_t *)data_out;
952 case ICP_QAT_HW_AUTH_ALGO_SHA1:
953 if (partial_hash_sha1(data_in, digest))
955 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
956 *hash_state_out_be32 =
957 rte_bswap32(*(((uint32_t *)digest)+i));
959 case ICP_QAT_HW_AUTH_ALGO_SHA224:
960 if (partial_hash_sha224(data_in, digest))
962 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
963 *hash_state_out_be32 =
964 rte_bswap32(*(((uint32_t *)digest)+i));
966 case ICP_QAT_HW_AUTH_ALGO_SHA256:
967 if (partial_hash_sha256(data_in, digest))
969 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
970 *hash_state_out_be32 =
971 rte_bswap32(*(((uint32_t *)digest)+i));
973 case ICP_QAT_HW_AUTH_ALGO_SHA384:
974 if (partial_hash_sha384(data_in, digest))
976 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
977 *hash_state_out_be64 =
978 rte_bswap64(*(((uint64_t *)digest)+i));
980 case ICP_QAT_HW_AUTH_ALGO_SHA512:
981 if (partial_hash_sha512(data_in, digest))
983 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
984 *hash_state_out_be64 =
985 rte_bswap64(*(((uint64_t *)digest)+i));
987 case ICP_QAT_HW_AUTH_ALGO_MD5:
988 if (partial_hash_md5(data_in, data_out))
992 QAT_LOG(ERR, "invalid hash alg %u", hash_alg);
998 #define HMAC_IPAD_VALUE 0x36
999 #define HMAC_OPAD_VALUE 0x5c
1000 #define HASH_XCBC_PRECOMP_KEY_NUM 3
1002 static const uint8_t AES_CMAC_SEED[ICP_QAT_HW_AES_128_KEY_SZ];
1004 static void aes_cmac_key_derive(uint8_t *base, uint8_t *derived)
1008 derived[0] = base[0] << 1;
1009 for (i = 1; i < ICP_QAT_HW_AES_BLK_SZ ; i++) {
1010 derived[i] = base[i] << 1;
1011 derived[i - 1] |= base[i] >> 7;
1015 derived[ICP_QAT_HW_AES_BLK_SZ - 1] ^= QAT_AES_CMAC_CONST_RB;
1018 static int qat_sym_do_precomputes(enum icp_qat_hw_auth_algo hash_alg,
1019 const uint8_t *auth_key,
1020 uint16_t auth_keylen,
1021 uint8_t *p_state_buf,
1022 uint16_t *p_state_len,
1026 uint8_t ipad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1027 uint8_t opad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1030 if (hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC) {
1036 uint8_t k0[ICP_QAT_HW_AES_128_KEY_SZ];
1039 auth_keylen = ICP_QAT_HW_AES_128_KEY_SZ;
1041 in = rte_zmalloc("AES CMAC K1",
1042 ICP_QAT_HW_AES_128_KEY_SZ, 16);
1045 QAT_LOG(ERR, "Failed to alloc memory");
1049 rte_memcpy(in, AES_CMAC_SEED,
1050 ICP_QAT_HW_AES_128_KEY_SZ);
1051 rte_memcpy(p_state_buf, auth_key, auth_keylen);
1053 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1059 AES_encrypt(in, k0, &enc_key);
1061 k1 = p_state_buf + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1062 k2 = k1 + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1064 aes_cmac_key_derive(k0, k1);
1065 aes_cmac_key_derive(k1, k2);
1067 memset(k0, 0, ICP_QAT_HW_AES_128_KEY_SZ);
1068 *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1072 static uint8_t qat_aes_xcbc_key_seed[
1073 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ] = {
1074 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1075 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1076 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1077 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1078 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1079 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1083 uint8_t *out = p_state_buf;
1087 in = rte_zmalloc("working mem for key",
1088 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ, 16);
1090 QAT_LOG(ERR, "Failed to alloc memory");
1094 rte_memcpy(in, qat_aes_xcbc_key_seed,
1095 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1096 for (x = 0; x < HASH_XCBC_PRECOMP_KEY_NUM; x++) {
1097 if (AES_set_encrypt_key(auth_key,
1101 (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ));
1103 (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ),
1104 0, ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1107 AES_encrypt(in, out, &enc_key);
1108 in += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1109 out += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1111 *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1112 rte_free(in - x*ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ);
1116 } else if ((hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_128) ||
1117 (hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_64)) {
1119 uint8_t *out = p_state_buf;
1122 memset(p_state_buf, 0, ICP_QAT_HW_GALOIS_H_SZ +
1123 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1124 ICP_QAT_HW_GALOIS_E_CTR0_SZ);
1125 in = rte_zmalloc("working mem for key",
1126 ICP_QAT_HW_GALOIS_H_SZ, 16);
1128 QAT_LOG(ERR, "Failed to alloc memory");
1132 memset(in, 0, ICP_QAT_HW_GALOIS_H_SZ);
1133 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1137 AES_encrypt(in, out, &enc_key);
1138 *p_state_len = ICP_QAT_HW_GALOIS_H_SZ +
1139 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1140 ICP_QAT_HW_GALOIS_E_CTR0_SZ;
1145 block_size = qat_hash_get_block_size(hash_alg);
1148 /* init ipad and opad from key and xor with fixed values */
1149 memset(ipad, 0, block_size);
1150 memset(opad, 0, block_size);
1152 if (auth_keylen > (unsigned int)block_size) {
1153 QAT_LOG(ERR, "invalid keylen %u", auth_keylen);
1156 rte_memcpy(ipad, auth_key, auth_keylen);
1157 rte_memcpy(opad, auth_key, auth_keylen);
1159 for (i = 0; i < block_size; i++) {
1160 uint8_t *ipad_ptr = ipad + i;
1161 uint8_t *opad_ptr = opad + i;
1162 *ipad_ptr ^= HMAC_IPAD_VALUE;
1163 *opad_ptr ^= HMAC_OPAD_VALUE;
1166 /* do partial hash of ipad and copy to state1 */
1167 if (partial_hash_compute(hash_alg, ipad, p_state_buf)) {
1168 memset(ipad, 0, block_size);
1169 memset(opad, 0, block_size);
1170 QAT_LOG(ERR, "ipad precompute failed");
1175 * State len is a multiple of 8, so may be larger than the digest.
1176 * Put the partial hash of opad state_len bytes after state1
1178 *p_state_len = qat_hash_get_state1_size(hash_alg);
1179 if (partial_hash_compute(hash_alg, opad, p_state_buf + *p_state_len)) {
1180 memset(ipad, 0, block_size);
1181 memset(opad, 0, block_size);
1182 QAT_LOG(ERR, "opad precompute failed");
1186 /* don't leave data lying around */
1187 memset(ipad, 0, block_size);
1188 memset(opad, 0, block_size);
1193 qat_sym_session_init_common_hdr(struct icp_qat_fw_comn_req_hdr *header,
1194 enum qat_sym_proto_flag proto_flags)
1197 ICP_QAT_FW_COMN_HDR_FLAGS_BUILD(ICP_QAT_FW_COMN_REQ_FLAG_SET);
1198 header->service_type = ICP_QAT_FW_COMN_REQ_CPM_FW_LA;
1199 header->comn_req_flags =
1200 ICP_QAT_FW_COMN_FLAGS_BUILD(QAT_COMN_CD_FLD_TYPE_64BIT_ADR,
1201 QAT_COMN_PTR_TYPE_FLAT);
1202 ICP_QAT_FW_LA_PARTIAL_SET(header->serv_specif_flags,
1203 ICP_QAT_FW_LA_PARTIAL_NONE);
1204 ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(header->serv_specif_flags,
1205 ICP_QAT_FW_CIPH_IV_16BYTE_DATA);
1207 switch (proto_flags) {
1208 case QAT_CRYPTO_PROTO_FLAG_NONE:
1209 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1210 ICP_QAT_FW_LA_NO_PROTO);
1212 case QAT_CRYPTO_PROTO_FLAG_CCM:
1213 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1214 ICP_QAT_FW_LA_CCM_PROTO);
1216 case QAT_CRYPTO_PROTO_FLAG_GCM:
1217 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1218 ICP_QAT_FW_LA_GCM_PROTO);
1220 case QAT_CRYPTO_PROTO_FLAG_SNOW3G:
1221 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1222 ICP_QAT_FW_LA_SNOW_3G_PROTO);
1224 case QAT_CRYPTO_PROTO_FLAG_ZUC:
1225 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(header->serv_specif_flags,
1226 ICP_QAT_FW_LA_ZUC_3G_PROTO);
1230 ICP_QAT_FW_LA_UPDATE_STATE_SET(header->serv_specif_flags,
1231 ICP_QAT_FW_LA_NO_UPDATE_STATE);
1232 ICP_QAT_FW_LA_DIGEST_IN_BUFFER_SET(header->serv_specif_flags,
1233 ICP_QAT_FW_LA_NO_DIGEST_IN_BUFFER);
1237 * Snow3G and ZUC should never use this function
1238 * and set its protocol flag in both cipher and auth part of content
1239 * descriptor building function
1241 static enum qat_sym_proto_flag
1242 qat_get_crypto_proto_flag(uint16_t flags)
1244 int proto = ICP_QAT_FW_LA_PROTO_GET(flags);
1245 enum qat_sym_proto_flag qat_proto_flag =
1246 QAT_CRYPTO_PROTO_FLAG_NONE;
1249 case ICP_QAT_FW_LA_GCM_PROTO:
1250 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1252 case ICP_QAT_FW_LA_CCM_PROTO:
1253 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1257 return qat_proto_flag;
1260 int qat_sym_session_aead_create_cd_cipher(struct qat_sym_session *cdesc,
1262 uint32_t cipherkeylen)
1264 struct icp_qat_hw_cipher_algo_blk *cipher;
1265 struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1266 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1267 struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1268 void *ptr = &req_tmpl->cd_ctrl;
1269 struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1270 struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1271 enum icp_qat_hw_cipher_convert key_convert;
1272 enum qat_sym_proto_flag qat_proto_flag =
1273 QAT_CRYPTO_PROTO_FLAG_NONE;
1274 uint32_t total_key_size;
1275 uint16_t cipher_offset, cd_size;
1276 uint32_t wordIndex = 0;
1277 uint32_t *temp_key = NULL;
1279 if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER) {
1280 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1281 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1282 ICP_QAT_FW_SLICE_CIPHER);
1283 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1284 ICP_QAT_FW_SLICE_DRAM_WR);
1285 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1286 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1287 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1288 ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1289 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1290 } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1291 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1292 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1293 ICP_QAT_FW_SLICE_CIPHER);
1294 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1295 ICP_QAT_FW_SLICE_AUTH);
1296 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1297 ICP_QAT_FW_SLICE_AUTH);
1298 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1299 ICP_QAT_FW_SLICE_DRAM_WR);
1300 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1301 } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1302 QAT_LOG(ERR, "Invalid param, must be a cipher command.");
1306 if (cdesc->qat_mode == ICP_QAT_HW_CIPHER_CTR_MODE) {
1308 * CTR Streaming ciphers are a special case. Decrypt = encrypt
1309 * Overriding default values previously set
1311 cdesc->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
1312 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1313 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2
1314 || cdesc->qat_cipher_alg ==
1315 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)
1316 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1317 else if (cdesc->qat_dir == ICP_QAT_HW_CIPHER_ENCRYPT)
1318 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1320 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1322 if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
1323 total_key_size = ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ +
1324 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1325 cipher_cd_ctrl->cipher_state_sz =
1326 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1327 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1329 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1330 total_key_size = ICP_QAT_HW_KASUMI_F8_KEY_SZ;
1331 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_KASUMI_BLK_SZ >> 3;
1332 cipher_cd_ctrl->cipher_padding_sz =
1333 (2 * ICP_QAT_HW_KASUMI_BLK_SZ) >> 3;
1334 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES) {
1335 total_key_size = ICP_QAT_HW_3DES_KEY_SZ;
1336 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_3DES_BLK_SZ >> 3;
1338 qat_get_crypto_proto_flag(header->serv_specif_flags);
1339 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_DES) {
1340 total_key_size = ICP_QAT_HW_DES_KEY_SZ;
1341 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_DES_BLK_SZ >> 3;
1343 qat_get_crypto_proto_flag(header->serv_specif_flags);
1344 } else if (cdesc->qat_cipher_alg ==
1345 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
1346 total_key_size = ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ +
1347 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1348 cipher_cd_ctrl->cipher_state_sz =
1349 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1350 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1351 cdesc->min_qat_dev_gen = QAT_GEN2;
1353 total_key_size = cipherkeylen;
1354 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_AES_BLK_SZ >> 3;
1356 qat_get_crypto_proto_flag(header->serv_specif_flags);
1358 cipher_cd_ctrl->cipher_key_sz = total_key_size >> 3;
1359 cipher_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1360 cipher_cd_ctrl->cipher_cfg_offset = cipher_offset >> 3;
1362 header->service_cmd_id = cdesc->qat_cmd;
1363 qat_sym_session_init_common_hdr(header, qat_proto_flag);
1365 cipher = (struct icp_qat_hw_cipher_algo_blk *)cdesc->cd_cur_ptr;
1366 cipher->cipher_config.val =
1367 ICP_QAT_HW_CIPHER_CONFIG_BUILD(cdesc->qat_mode,
1368 cdesc->qat_cipher_alg, key_convert,
1371 if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1372 temp_key = (uint32_t *)(cdesc->cd_cur_ptr +
1373 sizeof(struct icp_qat_hw_cipher_config)
1375 memcpy(cipher->key, cipherkey, cipherkeylen);
1376 memcpy(temp_key, cipherkey, cipherkeylen);
1378 /* XOR Key with KASUMI F8 key modifier at 4 bytes level */
1379 for (wordIndex = 0; wordIndex < (cipherkeylen >> 2);
1381 temp_key[wordIndex] ^= KASUMI_F8_KEY_MODIFIER_4_BYTES;
1383 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1384 cipherkeylen + cipherkeylen;
1386 memcpy(cipher->key, cipherkey, cipherkeylen);
1387 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1391 if (total_key_size > cipherkeylen) {
1392 uint32_t padding_size = total_key_size-cipherkeylen;
1393 if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1394 && (cipherkeylen == QAT_3DES_KEY_SZ_OPT2)) {
1395 /* K3 not provided so use K1 = K3*/
1396 memcpy(cdesc->cd_cur_ptr, cipherkey, padding_size);
1397 } else if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1398 && (cipherkeylen == QAT_3DES_KEY_SZ_OPT3)) {
1399 /* K2 and K3 not provided so use K1 = K2 = K3*/
1400 memcpy(cdesc->cd_cur_ptr, cipherkey,
1402 memcpy(cdesc->cd_cur_ptr+cipherkeylen,
1403 cipherkey, cipherkeylen);
1405 memset(cdesc->cd_cur_ptr, 0, padding_size);
1407 cdesc->cd_cur_ptr += padding_size;
1409 cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1410 cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1415 int qat_sym_session_aead_create_cd_auth(struct qat_sym_session *cdesc,
1417 uint32_t authkeylen,
1418 uint32_t aad_length,
1419 uint32_t digestsize,
1420 unsigned int operation)
1422 struct icp_qat_hw_auth_setup *hash;
1423 struct icp_qat_hw_cipher_algo_blk *cipherconfig;
1424 struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1425 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1426 struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1427 void *ptr = &req_tmpl->cd_ctrl;
1428 struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1429 struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1430 struct icp_qat_fw_la_auth_req_params *auth_param =
1431 (struct icp_qat_fw_la_auth_req_params *)
1432 ((char *)&req_tmpl->serv_specif_rqpars +
1433 sizeof(struct icp_qat_fw_la_cipher_req_params));
1434 uint16_t state1_size = 0, state2_size = 0;
1435 uint16_t hash_offset, cd_size;
1436 uint32_t *aad_len = NULL;
1437 uint32_t wordIndex = 0;
1439 enum qat_sym_proto_flag qat_proto_flag =
1440 QAT_CRYPTO_PROTO_FLAG_NONE;
1442 if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_AUTH) {
1443 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1444 ICP_QAT_FW_SLICE_AUTH);
1445 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1446 ICP_QAT_FW_SLICE_DRAM_WR);
1447 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1448 } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1449 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1450 ICP_QAT_FW_SLICE_AUTH);
1451 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1452 ICP_QAT_FW_SLICE_CIPHER);
1453 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1454 ICP_QAT_FW_SLICE_CIPHER);
1455 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1456 ICP_QAT_FW_SLICE_DRAM_WR);
1457 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1458 } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1459 QAT_LOG(ERR, "Invalid param, must be a hash command.");
1463 if (operation == RTE_CRYPTO_AUTH_OP_VERIFY) {
1464 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1465 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1466 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1467 ICP_QAT_FW_LA_CMP_AUTH_RES);
1468 cdesc->auth_op = ICP_QAT_HW_AUTH_VERIFY;
1470 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1471 ICP_QAT_FW_LA_RET_AUTH_RES);
1472 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1473 ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1474 cdesc->auth_op = ICP_QAT_HW_AUTH_GENERATE;
1478 * Setup the inner hash config
1480 hash_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1481 hash = (struct icp_qat_hw_auth_setup *)cdesc->cd_cur_ptr;
1482 hash->auth_config.reserved = 0;
1483 hash->auth_config.config =
1484 ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE1,
1485 cdesc->qat_hash_alg, digestsize);
1487 if (cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2
1488 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_KASUMI_F9
1489 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3
1490 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC
1491 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC
1492 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_NULL
1494 hash->auth_counter.counter = 0;
1496 int block_size = qat_hash_get_block_size(cdesc->qat_hash_alg);
1500 hash->auth_counter.counter = rte_bswap32(block_size);
1503 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_auth_setup);
1506 * cd_cur_ptr now points at the state1 information.
1508 switch (cdesc->qat_hash_alg) {
1509 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1510 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA1, authkey,
1511 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1513 QAT_LOG(ERR, "(SHA)precompute failed");
1516 state2_size = RTE_ALIGN_CEIL(ICP_QAT_HW_SHA1_STATE2_SZ, 8);
1518 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1519 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA224, authkey,
1520 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1522 QAT_LOG(ERR, "(SHA)precompute failed");
1525 state2_size = ICP_QAT_HW_SHA224_STATE2_SZ;
1527 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1528 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA256, authkey,
1529 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1531 QAT_LOG(ERR, "(SHA)precompute failed");
1534 state2_size = ICP_QAT_HW_SHA256_STATE2_SZ;
1536 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1537 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA384, authkey,
1538 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1540 QAT_LOG(ERR, "(SHA)precompute failed");
1543 state2_size = ICP_QAT_HW_SHA384_STATE2_SZ;
1545 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1546 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA512, authkey,
1547 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1549 QAT_LOG(ERR, "(SHA)precompute failed");
1552 state2_size = ICP_QAT_HW_SHA512_STATE2_SZ;
1554 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1555 state1_size = ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1557 if (cdesc->aes_cmac)
1558 memset(cdesc->cd_cur_ptr, 0, state1_size);
1559 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC,
1560 authkey, authkeylen, cdesc->cd_cur_ptr + state1_size,
1561 &state2_size, cdesc->aes_cmac)) {
1562 cdesc->aes_cmac ? QAT_LOG(ERR,
1563 "(CMAC)precompute failed")
1565 "(XCBC)precompute failed");
1569 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1570 case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
1571 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1572 state1_size = ICP_QAT_HW_GALOIS_128_STATE1_SZ;
1573 if (qat_sym_do_precomputes(cdesc->qat_hash_alg, authkey,
1574 authkeylen, cdesc->cd_cur_ptr + state1_size,
1575 &state2_size, cdesc->aes_cmac)) {
1576 QAT_LOG(ERR, "(GCM)precompute failed");
1580 * Write (the length of AAD) into bytes 16-19 of state2
1581 * in big-endian format. This field is 8 bytes
1583 auth_param->u2.aad_sz =
1584 RTE_ALIGN_CEIL(aad_length, 16);
1585 auth_param->hash_state_sz = (auth_param->u2.aad_sz) >> 3;
1587 aad_len = (uint32_t *)(cdesc->cd_cur_ptr +
1588 ICP_QAT_HW_GALOIS_128_STATE1_SZ +
1589 ICP_QAT_HW_GALOIS_H_SZ);
1590 *aad_len = rte_bswap32(aad_length);
1591 cdesc->aad_len = aad_length;
1593 case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
1594 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1595 state1_size = qat_hash_get_state1_size(
1596 ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2);
1597 state2_size = ICP_QAT_HW_SNOW_3G_UIA2_STATE2_SZ;
1598 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1600 cipherconfig = (struct icp_qat_hw_cipher_algo_blk *)
1601 (cdesc->cd_cur_ptr + state1_size + state2_size);
1602 cipherconfig->cipher_config.val =
1603 ICP_QAT_HW_CIPHER_CONFIG_BUILD(ICP_QAT_HW_CIPHER_ECB_MODE,
1604 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2,
1605 ICP_QAT_HW_CIPHER_KEY_CONVERT,
1606 ICP_QAT_HW_CIPHER_ENCRYPT);
1607 memcpy(cipherconfig->key, authkey, authkeylen);
1608 memset(cipherconfig->key + authkeylen,
1609 0, ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ);
1610 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1611 authkeylen + ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1612 auth_param->hash_state_sz = ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1614 case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
1615 hash->auth_config.config =
1616 ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE0,
1617 cdesc->qat_hash_alg, digestsize);
1618 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1619 state1_size = qat_hash_get_state1_size(
1620 ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3);
1621 state2_size = ICP_QAT_HW_ZUC_3G_EIA3_STATE2_SZ;
1622 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size
1623 + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ);
1625 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1626 cdesc->cd_cur_ptr += state1_size + state2_size
1627 + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1628 auth_param->hash_state_sz = ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1629 cdesc->min_qat_dev_gen = QAT_GEN2;
1632 case ICP_QAT_HW_AUTH_ALGO_MD5:
1633 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_MD5, authkey,
1634 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1636 QAT_LOG(ERR, "(MD5)precompute failed");
1639 state2_size = ICP_QAT_HW_MD5_STATE2_SZ;
1641 case ICP_QAT_HW_AUTH_ALGO_NULL:
1642 state1_size = qat_hash_get_state1_size(
1643 ICP_QAT_HW_AUTH_ALGO_NULL);
1644 state2_size = ICP_QAT_HW_NULL_STATE2_SZ;
1646 case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
1647 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1648 state1_size = qat_hash_get_state1_size(
1649 ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC);
1650 state2_size = ICP_QAT_HW_AES_CBC_MAC_KEY_SZ +
1651 ICP_QAT_HW_AES_CCM_CBC_E_CTR0_SZ;
1653 if (aad_length > 0) {
1654 aad_length += ICP_QAT_HW_CCM_AAD_B0_LEN +
1655 ICP_QAT_HW_CCM_AAD_LEN_INFO;
1656 auth_param->u2.aad_sz =
1657 RTE_ALIGN_CEIL(aad_length,
1658 ICP_QAT_HW_CCM_AAD_ALIGNMENT);
1660 auth_param->u2.aad_sz = ICP_QAT_HW_CCM_AAD_B0_LEN;
1662 cdesc->aad_len = aad_length;
1663 hash->auth_counter.counter = 0;
1665 hash_cd_ctrl->outer_prefix_sz = digestsize;
1666 auth_param->hash_state_sz = digestsize;
1668 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1670 case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
1671 state1_size = qat_hash_get_state1_size(
1672 ICP_QAT_HW_AUTH_ALGO_KASUMI_F9);
1673 state2_size = ICP_QAT_HW_KASUMI_F9_STATE2_SZ;
1674 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1675 pTempKey = (uint32_t *)(cdesc->cd_cur_ptr + state1_size
1678 * The Inner Hash Initial State2 block must contain IK
1679 * (Initialisation Key), followed by IK XOR-ed with KM
1680 * (Key Modifier): IK||(IK^KM).
1682 /* write the auth key */
1683 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1684 /* initialise temp key with auth key */
1685 memcpy(pTempKey, authkey, authkeylen);
1686 /* XOR Key with KASUMI F9 key modifier at 4 bytes level */
1687 for (wordIndex = 0; wordIndex < (authkeylen >> 2); wordIndex++)
1688 pTempKey[wordIndex] ^= KASUMI_F9_KEY_MODIFIER_4_BYTES;
1691 QAT_LOG(ERR, "Invalid HASH alg %u", cdesc->qat_hash_alg);
1695 /* Request template setup */
1696 qat_sym_session_init_common_hdr(header, qat_proto_flag);
1697 header->service_cmd_id = cdesc->qat_cmd;
1699 /* Auth CD config setup */
1700 hash_cd_ctrl->hash_cfg_offset = hash_offset >> 3;
1701 hash_cd_ctrl->hash_flags = ICP_QAT_FW_AUTH_HDR_FLAG_NO_NESTED;
1702 hash_cd_ctrl->inner_res_sz = digestsize;
1703 hash_cd_ctrl->final_sz = digestsize;
1704 hash_cd_ctrl->inner_state1_sz = state1_size;
1705 auth_param->auth_res_sz = digestsize;
1707 hash_cd_ctrl->inner_state2_sz = state2_size;
1708 hash_cd_ctrl->inner_state2_offset = hash_cd_ctrl->hash_cfg_offset +
1709 ((sizeof(struct icp_qat_hw_auth_setup) +
1710 RTE_ALIGN_CEIL(hash_cd_ctrl->inner_state1_sz, 8))
1713 cdesc->cd_cur_ptr += state1_size + state2_size;
1714 cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1716 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1717 cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1722 int qat_sym_validate_aes_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1725 case ICP_QAT_HW_AES_128_KEY_SZ:
1726 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1728 case ICP_QAT_HW_AES_192_KEY_SZ:
1729 *alg = ICP_QAT_HW_CIPHER_ALGO_AES192;
1731 case ICP_QAT_HW_AES_256_KEY_SZ:
1732 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
1740 int qat_sym_validate_aes_docsisbpi_key(int key_len,
1741 enum icp_qat_hw_cipher_algo *alg)
1744 case ICP_QAT_HW_AES_128_KEY_SZ:
1745 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1753 int qat_sym_validate_snow3g_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1756 case ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ:
1757 *alg = ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2;
1765 int qat_sym_validate_kasumi_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1768 case ICP_QAT_HW_KASUMI_KEY_SZ:
1769 *alg = ICP_QAT_HW_CIPHER_ALGO_KASUMI;
1777 int qat_sym_validate_des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1780 case ICP_QAT_HW_DES_KEY_SZ:
1781 *alg = ICP_QAT_HW_CIPHER_ALGO_DES;
1789 int qat_sym_validate_3des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1792 case QAT_3DES_KEY_SZ_OPT1:
1793 case QAT_3DES_KEY_SZ_OPT2:
1794 case QAT_3DES_KEY_SZ_OPT3:
1795 *alg = ICP_QAT_HW_CIPHER_ALGO_3DES;
1803 int qat_sym_validate_zuc_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1806 case ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ:
1807 *alg = ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3;