1 /* SPDX-License-Identifier: (BSD-3-Clause OR GPL-2.0)
2 * Copyright(c) 2015-2018 Intel Corporation
4 #include <rte_memcpy.h>
5 #include <rte_common.h>
6 #include <rte_spinlock.h>
7 #include <rte_byteorder.h>
9 #include <rte_malloc.h>
10 #include <rte_crypto_sym.h>
13 #include "qat_device.h"
15 #include <openssl/sha.h> /* Needed to calculate pre-compute values */
16 #include <openssl/aes.h> /* Needed to calculate pre-compute values */
17 #include <openssl/md5.h> /* Needed to calculate pre-compute values */
18 #include <openssl/evp.h>
20 #include "qat_sym_session.h"
22 /** Frees a context previously created
23 * Depends on openssl libcrypto
26 bpi_cipher_ctx_free(void *bpi_ctx)
29 EVP_CIPHER_CTX_free((EVP_CIPHER_CTX *)bpi_ctx);
33 bpi_cipher_ctx_init(enum rte_crypto_cipher_algorithm cryptodev_algo,
34 enum rte_crypto_cipher_operation direction __rte_unused,
35 uint8_t *key, void **ctx)
37 const EVP_CIPHER *algo = NULL;
39 *ctx = EVP_CIPHER_CTX_new();
46 if (cryptodev_algo == RTE_CRYPTO_CIPHER_DES_DOCSISBPI)
49 algo = EVP_aes_128_ecb();
51 /* IV will be ECB encrypted whether direction is encrypt or decrypt*/
52 if (EVP_EncryptInit_ex(*ctx, algo, NULL, key, 0) != 1) {
61 EVP_CIPHER_CTX_free(*ctx);
66 qat_is_cipher_alg_supported(enum rte_crypto_cipher_algorithm algo,
67 struct qat_pmd_private *internals)
70 const struct rte_cryptodev_capabilities *capability;
72 while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
73 RTE_CRYPTO_OP_TYPE_UNDEFINED) {
74 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
77 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_CIPHER)
80 if (capability->sym.cipher.algo == algo)
87 qat_is_auth_alg_supported(enum rte_crypto_auth_algorithm algo,
88 struct qat_pmd_private *internals)
91 const struct rte_cryptodev_capabilities *capability;
93 while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
94 RTE_CRYPTO_OP_TYPE_UNDEFINED) {
95 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
98 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_AUTH)
101 if (capability->sym.auth.algo == algo)
108 qat_crypto_sym_clear_session(struct rte_cryptodev *dev,
109 struct rte_cryptodev_sym_session *sess)
111 PMD_INIT_FUNC_TRACE();
112 uint8_t index = dev->driver_id;
113 void *sess_priv = get_session_private_data(sess, index);
114 struct qat_session *s = (struct qat_session *)sess_priv;
118 bpi_cipher_ctx_free(s->bpi_ctx);
119 memset(s, 0, qat_crypto_sym_get_session_private_size(dev));
120 struct rte_mempool *sess_mp = rte_mempool_from_obj(sess_priv);
122 set_session_private_data(sess, index, NULL);
123 rte_mempool_put(sess_mp, sess_priv);
128 qat_get_cmd_id(const struct rte_crypto_sym_xform *xform)
131 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER && xform->next == NULL)
132 return ICP_QAT_FW_LA_CMD_CIPHER;
134 /* Authentication Only */
135 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH && xform->next == NULL)
136 return ICP_QAT_FW_LA_CMD_AUTH;
139 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
140 /* AES-GCM and AES-CCM works with different direction
141 * GCM first encrypts and generate hash where AES-CCM
142 * first generate hash and encrypts. Similar relation
143 * applies to decryption.
145 if (xform->aead.op == RTE_CRYPTO_AEAD_OP_ENCRYPT)
146 if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
147 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
149 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
151 if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
152 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
154 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
157 if (xform->next == NULL)
160 /* Cipher then Authenticate */
161 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
162 xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
163 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
165 /* Authenticate then Cipher */
166 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH &&
167 xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
168 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
173 static struct rte_crypto_auth_xform *
174 qat_get_auth_xform(struct rte_crypto_sym_xform *xform)
177 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH)
186 static struct rte_crypto_cipher_xform *
187 qat_get_cipher_xform(struct rte_crypto_sym_xform *xform)
190 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
191 return &xform->cipher;
200 qat_crypto_sym_configure_session_cipher(struct rte_cryptodev *dev,
201 struct rte_crypto_sym_xform *xform,
202 struct qat_session *session)
204 struct qat_pmd_private *internals = dev->data->dev_private;
205 struct rte_crypto_cipher_xform *cipher_xform = NULL;
208 /* Get cipher xform from crypto xform chain */
209 cipher_xform = qat_get_cipher_xform(xform);
211 session->cipher_iv.offset = cipher_xform->iv.offset;
212 session->cipher_iv.length = cipher_xform->iv.length;
214 switch (cipher_xform->algo) {
215 case RTE_CRYPTO_CIPHER_AES_CBC:
216 if (qat_alg_validate_aes_key(cipher_xform->key.length,
217 &session->qat_cipher_alg) != 0) {
218 PMD_DRV_LOG(ERR, "Invalid AES cipher key size");
222 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
224 case RTE_CRYPTO_CIPHER_AES_CTR:
225 if (qat_alg_validate_aes_key(cipher_xform->key.length,
226 &session->qat_cipher_alg) != 0) {
227 PMD_DRV_LOG(ERR, "Invalid AES cipher key size");
231 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
233 case RTE_CRYPTO_CIPHER_SNOW3G_UEA2:
234 if (qat_alg_validate_snow3g_key(cipher_xform->key.length,
235 &session->qat_cipher_alg) != 0) {
236 PMD_DRV_LOG(ERR, "Invalid SNOW 3G cipher key size");
240 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
242 case RTE_CRYPTO_CIPHER_NULL:
243 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
245 case RTE_CRYPTO_CIPHER_KASUMI_F8:
246 if (qat_alg_validate_kasumi_key(cipher_xform->key.length,
247 &session->qat_cipher_alg) != 0) {
248 PMD_DRV_LOG(ERR, "Invalid KASUMI cipher key size");
252 session->qat_mode = ICP_QAT_HW_CIPHER_F8_MODE;
254 case RTE_CRYPTO_CIPHER_3DES_CBC:
255 if (qat_alg_validate_3des_key(cipher_xform->key.length,
256 &session->qat_cipher_alg) != 0) {
257 PMD_DRV_LOG(ERR, "Invalid 3DES cipher key size");
261 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
263 case RTE_CRYPTO_CIPHER_DES_CBC:
264 if (qat_alg_validate_des_key(cipher_xform->key.length,
265 &session->qat_cipher_alg) != 0) {
266 PMD_DRV_LOG(ERR, "Invalid DES cipher key size");
270 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
272 case RTE_CRYPTO_CIPHER_3DES_CTR:
273 if (qat_alg_validate_3des_key(cipher_xform->key.length,
274 &session->qat_cipher_alg) != 0) {
275 PMD_DRV_LOG(ERR, "Invalid 3DES cipher key size");
279 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
281 case RTE_CRYPTO_CIPHER_DES_DOCSISBPI:
282 ret = bpi_cipher_ctx_init(
285 cipher_xform->key.data,
288 PMD_DRV_LOG(ERR, "failed to create DES BPI ctx");
291 if (qat_alg_validate_des_key(cipher_xform->key.length,
292 &session->qat_cipher_alg) != 0) {
293 PMD_DRV_LOG(ERR, "Invalid DES cipher key size");
297 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
299 case RTE_CRYPTO_CIPHER_AES_DOCSISBPI:
300 ret = bpi_cipher_ctx_init(
303 cipher_xform->key.data,
306 PMD_DRV_LOG(ERR, "failed to create AES BPI ctx");
309 if (qat_alg_validate_aes_docsisbpi_key(cipher_xform->key.length,
310 &session->qat_cipher_alg) != 0) {
311 PMD_DRV_LOG(ERR, "Invalid AES DOCSISBPI key size");
315 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
317 case RTE_CRYPTO_CIPHER_ZUC_EEA3:
318 if (!qat_is_cipher_alg_supported(
319 cipher_xform->algo, internals)) {
320 PMD_DRV_LOG(ERR, "%s not supported on this device",
321 rte_crypto_cipher_algorithm_strings
322 [cipher_xform->algo]);
326 if (qat_alg_validate_zuc_key(cipher_xform->key.length,
327 &session->qat_cipher_alg) != 0) {
328 PMD_DRV_LOG(ERR, "Invalid ZUC cipher key size");
332 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
334 case RTE_CRYPTO_CIPHER_3DES_ECB:
335 case RTE_CRYPTO_CIPHER_AES_ECB:
336 case RTE_CRYPTO_CIPHER_AES_F8:
337 case RTE_CRYPTO_CIPHER_AES_XTS:
338 case RTE_CRYPTO_CIPHER_ARC4:
339 PMD_DRV_LOG(ERR, "Crypto QAT PMD: Unsupported Cipher alg %u",
344 PMD_DRV_LOG(ERR, "Crypto: Undefined Cipher specified %u\n",
350 if (cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
351 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
353 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
355 if (qat_alg_aead_session_create_content_desc_cipher(session,
356 cipher_xform->key.data,
357 cipher_xform->key.length)) {
365 if (session->bpi_ctx) {
366 bpi_cipher_ctx_free(session->bpi_ctx);
367 session->bpi_ctx = NULL;
373 qat_crypto_sym_configure_session(struct rte_cryptodev *dev,
374 struct rte_crypto_sym_xform *xform,
375 struct rte_cryptodev_sym_session *sess,
376 struct rte_mempool *mempool)
378 void *sess_private_data;
381 if (rte_mempool_get(mempool, &sess_private_data)) {
383 "Couldn't get object from session mempool");
387 ret = qat_crypto_set_session_parameters(dev, xform, sess_private_data);
390 "Crypto QAT PMD: failed to configure session parameters");
392 /* Return session to mempool */
393 rte_mempool_put(mempool, sess_private_data);
397 set_session_private_data(sess, dev->driver_id,
404 qat_crypto_set_session_parameters(struct rte_cryptodev *dev,
405 struct rte_crypto_sym_xform *xform, void *session_private)
407 struct qat_session *session = session_private;
411 PMD_INIT_FUNC_TRACE();
413 /* Set context descriptor physical address */
414 session->cd_paddr = rte_mempool_virt2iova(session) +
415 offsetof(struct qat_session, cd);
417 session->min_qat_dev_gen = QAT_GEN1;
419 /* Get requested QAT command id */
420 qat_cmd_id = qat_get_cmd_id(xform);
421 if (qat_cmd_id < 0 || qat_cmd_id >= ICP_QAT_FW_LA_CMD_DELIMITER) {
422 PMD_DRV_LOG(ERR, "Unsupported xform chain requested");
425 session->qat_cmd = (enum icp_qat_fw_la_cmd_id)qat_cmd_id;
426 switch (session->qat_cmd) {
427 case ICP_QAT_FW_LA_CMD_CIPHER:
428 ret = qat_crypto_sym_configure_session_cipher(dev,
433 case ICP_QAT_FW_LA_CMD_AUTH:
434 ret = qat_crypto_sym_configure_session_auth(dev,
439 case ICP_QAT_FW_LA_CMD_CIPHER_HASH:
440 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
441 ret = qat_crypto_sym_configure_session_aead(xform,
446 ret = qat_crypto_sym_configure_session_cipher(dev,
450 ret = qat_crypto_sym_configure_session_auth(dev,
456 case ICP_QAT_FW_LA_CMD_HASH_CIPHER:
457 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
458 ret = qat_crypto_sym_configure_session_aead(xform,
463 ret = qat_crypto_sym_configure_session_auth(dev,
467 ret = qat_crypto_sym_configure_session_cipher(dev,
473 case ICP_QAT_FW_LA_CMD_TRNG_GET_RANDOM:
474 case ICP_QAT_FW_LA_CMD_TRNG_TEST:
475 case ICP_QAT_FW_LA_CMD_SSL3_KEY_DERIVE:
476 case ICP_QAT_FW_LA_CMD_TLS_V1_1_KEY_DERIVE:
477 case ICP_QAT_FW_LA_CMD_TLS_V1_2_KEY_DERIVE:
478 case ICP_QAT_FW_LA_CMD_MGF1:
479 case ICP_QAT_FW_LA_CMD_AUTH_PRE_COMP:
480 case ICP_QAT_FW_LA_CMD_CIPHER_PRE_COMP:
481 case ICP_QAT_FW_LA_CMD_DELIMITER:
482 PMD_DRV_LOG(ERR, "Unsupported Service %u",
486 PMD_DRV_LOG(ERR, "Unsupported Service %u",
495 qat_crypto_sym_configure_session_auth(struct rte_cryptodev *dev,
496 struct rte_crypto_sym_xform *xform,
497 struct qat_session *session)
499 struct rte_crypto_auth_xform *auth_xform = qat_get_auth_xform(xform);
500 struct qat_pmd_private *internals = dev->data->dev_private;
501 uint8_t *key_data = auth_xform->key.data;
502 uint8_t key_length = auth_xform->key.length;
504 switch (auth_xform->algo) {
505 case RTE_CRYPTO_AUTH_SHA1_HMAC:
506 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA1;
508 case RTE_CRYPTO_AUTH_SHA224_HMAC:
509 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA224;
511 case RTE_CRYPTO_AUTH_SHA256_HMAC:
512 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA256;
514 case RTE_CRYPTO_AUTH_SHA384_HMAC:
515 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA384;
517 case RTE_CRYPTO_AUTH_SHA512_HMAC:
518 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA512;
520 case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
521 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
523 case RTE_CRYPTO_AUTH_AES_GMAC:
524 if (qat_alg_validate_aes_key(auth_xform->key.length,
525 &session->qat_cipher_alg) != 0) {
526 PMD_DRV_LOG(ERR, "Invalid AES key size");
529 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
530 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
533 case RTE_CRYPTO_AUTH_SNOW3G_UIA2:
534 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2;
536 case RTE_CRYPTO_AUTH_MD5_HMAC:
537 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_MD5;
539 case RTE_CRYPTO_AUTH_NULL:
540 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_NULL;
542 case RTE_CRYPTO_AUTH_KASUMI_F9:
543 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_KASUMI_F9;
545 case RTE_CRYPTO_AUTH_ZUC_EIA3:
546 if (!qat_is_auth_alg_supported(auth_xform->algo, internals)) {
547 PMD_DRV_LOG(ERR, "%s not supported on this device",
548 rte_crypto_auth_algorithm_strings
552 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3;
554 case RTE_CRYPTO_AUTH_SHA1:
555 case RTE_CRYPTO_AUTH_SHA256:
556 case RTE_CRYPTO_AUTH_SHA512:
557 case RTE_CRYPTO_AUTH_SHA224:
558 case RTE_CRYPTO_AUTH_SHA384:
559 case RTE_CRYPTO_AUTH_MD5:
560 case RTE_CRYPTO_AUTH_AES_CMAC:
561 case RTE_CRYPTO_AUTH_AES_CBC_MAC:
562 PMD_DRV_LOG(ERR, "Crypto: Unsupported hash alg %u",
566 PMD_DRV_LOG(ERR, "Crypto: Undefined Hash algo %u specified",
571 session->auth_iv.offset = auth_xform->iv.offset;
572 session->auth_iv.length = auth_xform->iv.length;
574 if (auth_xform->algo == RTE_CRYPTO_AUTH_AES_GMAC) {
575 if (auth_xform->op == RTE_CRYPTO_AUTH_OP_GENERATE) {
576 session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER_HASH;
577 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
579 * It needs to create cipher desc content first,
580 * then authentication
582 if (qat_alg_aead_session_create_content_desc_cipher(
584 auth_xform->key.data,
585 auth_xform->key.length))
588 if (qat_alg_aead_session_create_content_desc_auth(
593 auth_xform->digest_length,
597 session->qat_cmd = ICP_QAT_FW_LA_CMD_HASH_CIPHER;
598 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
600 * It needs to create authentication desc content first,
603 if (qat_alg_aead_session_create_content_desc_auth(
608 auth_xform->digest_length,
612 if (qat_alg_aead_session_create_content_desc_cipher(
614 auth_xform->key.data,
615 auth_xform->key.length))
618 /* Restore to authentication only only */
619 session->qat_cmd = ICP_QAT_FW_LA_CMD_AUTH;
621 if (qat_alg_aead_session_create_content_desc_auth(session,
625 auth_xform->digest_length,
630 session->digest_length = auth_xform->digest_length;
635 qat_crypto_sym_configure_session_aead(struct rte_crypto_sym_xform *xform,
636 struct qat_session *session)
638 struct rte_crypto_aead_xform *aead_xform = &xform->aead;
639 enum rte_crypto_auth_operation crypto_operation;
642 * Store AEAD IV parameters as cipher IV,
643 * to avoid unnecessary memory usage
645 session->cipher_iv.offset = xform->aead.iv.offset;
646 session->cipher_iv.length = xform->aead.iv.length;
648 switch (aead_xform->algo) {
649 case RTE_CRYPTO_AEAD_AES_GCM:
650 if (qat_alg_validate_aes_key(aead_xform->key.length,
651 &session->qat_cipher_alg) != 0) {
652 PMD_DRV_LOG(ERR, "Invalid AES key size");
655 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
656 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
658 case RTE_CRYPTO_AEAD_AES_CCM:
659 if (qat_alg_validate_aes_key(aead_xform->key.length,
660 &session->qat_cipher_alg) != 0) {
661 PMD_DRV_LOG(ERR, "Invalid AES key size");
664 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
665 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC;
668 PMD_DRV_LOG(ERR, "Crypto: Undefined AEAD specified %u\n",
673 if ((aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT &&
674 aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) ||
675 (aead_xform->op == RTE_CRYPTO_AEAD_OP_DECRYPT &&
676 aead_xform->algo == RTE_CRYPTO_AEAD_AES_CCM)) {
677 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
679 * It needs to create cipher desc content first,
680 * then authentication
682 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
683 RTE_CRYPTO_AUTH_OP_GENERATE : RTE_CRYPTO_AUTH_OP_VERIFY;
685 if (qat_alg_aead_session_create_content_desc_cipher(session,
686 aead_xform->key.data,
687 aead_xform->key.length))
690 if (qat_alg_aead_session_create_content_desc_auth(session,
691 aead_xform->key.data,
692 aead_xform->key.length,
693 aead_xform->aad_length,
694 aead_xform->digest_length,
698 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
700 * It needs to create authentication desc content first,
704 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
705 RTE_CRYPTO_AUTH_OP_VERIFY : RTE_CRYPTO_AUTH_OP_GENERATE;
707 if (qat_alg_aead_session_create_content_desc_auth(session,
708 aead_xform->key.data,
709 aead_xform->key.length,
710 aead_xform->aad_length,
711 aead_xform->digest_length,
715 if (qat_alg_aead_session_create_content_desc_cipher(session,
716 aead_xform->key.data,
717 aead_xform->key.length))
721 session->digest_length = aead_xform->digest_length;
725 unsigned int qat_crypto_sym_get_session_private_size(
726 struct rte_cryptodev *dev __rte_unused)
728 return RTE_ALIGN_CEIL(sizeof(struct qat_session), 8);
731 /* returns block size in bytes per cipher algo */
732 int qat_cipher_get_block_size(enum icp_qat_hw_cipher_algo qat_cipher_alg)
734 switch (qat_cipher_alg) {
735 case ICP_QAT_HW_CIPHER_ALGO_DES:
736 return ICP_QAT_HW_DES_BLK_SZ;
737 case ICP_QAT_HW_CIPHER_ALGO_3DES:
738 return ICP_QAT_HW_3DES_BLK_SZ;
739 case ICP_QAT_HW_CIPHER_ALGO_AES128:
740 case ICP_QAT_HW_CIPHER_ALGO_AES192:
741 case ICP_QAT_HW_CIPHER_ALGO_AES256:
742 return ICP_QAT_HW_AES_BLK_SZ;
744 PMD_DRV_LOG(ERR, "invalid block cipher alg %u", qat_cipher_alg);
751 * Returns size in bytes per hash algo for state1 size field in cd_ctrl
752 * This is digest size rounded up to nearest quadword
754 static int qat_hash_get_state1_size(enum icp_qat_hw_auth_algo qat_hash_alg)
756 switch (qat_hash_alg) {
757 case ICP_QAT_HW_AUTH_ALGO_SHA1:
758 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA1_STATE1_SZ,
759 QAT_HW_DEFAULT_ALIGNMENT);
760 case ICP_QAT_HW_AUTH_ALGO_SHA224:
761 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA224_STATE1_SZ,
762 QAT_HW_DEFAULT_ALIGNMENT);
763 case ICP_QAT_HW_AUTH_ALGO_SHA256:
764 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA256_STATE1_SZ,
765 QAT_HW_DEFAULT_ALIGNMENT);
766 case ICP_QAT_HW_AUTH_ALGO_SHA384:
767 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA384_STATE1_SZ,
768 QAT_HW_DEFAULT_ALIGNMENT);
769 case ICP_QAT_HW_AUTH_ALGO_SHA512:
770 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
771 QAT_HW_DEFAULT_ALIGNMENT);
772 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
773 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ,
774 QAT_HW_DEFAULT_ALIGNMENT);
775 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
776 case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
777 return QAT_HW_ROUND_UP(ICP_QAT_HW_GALOIS_128_STATE1_SZ,
778 QAT_HW_DEFAULT_ALIGNMENT);
779 case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
780 return QAT_HW_ROUND_UP(ICP_QAT_HW_ZUC_3G_EIA3_STATE1_SZ,
781 QAT_HW_DEFAULT_ALIGNMENT);
782 case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
783 return QAT_HW_ROUND_UP(ICP_QAT_HW_SNOW_3G_UIA2_STATE1_SZ,
784 QAT_HW_DEFAULT_ALIGNMENT);
785 case ICP_QAT_HW_AUTH_ALGO_MD5:
786 return QAT_HW_ROUND_UP(ICP_QAT_HW_MD5_STATE1_SZ,
787 QAT_HW_DEFAULT_ALIGNMENT);
788 case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
789 return QAT_HW_ROUND_UP(ICP_QAT_HW_KASUMI_F9_STATE1_SZ,
790 QAT_HW_DEFAULT_ALIGNMENT);
791 case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
792 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_CBC_MAC_STATE1_SZ,
793 QAT_HW_DEFAULT_ALIGNMENT);
794 case ICP_QAT_HW_AUTH_ALGO_NULL:
795 return QAT_HW_ROUND_UP(ICP_QAT_HW_NULL_STATE1_SZ,
796 QAT_HW_DEFAULT_ALIGNMENT);
797 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
798 /* return maximum state1 size in this case */
799 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
800 QAT_HW_DEFAULT_ALIGNMENT);
802 PMD_DRV_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
808 /* returns digest size in bytes per hash algo */
809 static int qat_hash_get_digest_size(enum icp_qat_hw_auth_algo qat_hash_alg)
811 switch (qat_hash_alg) {
812 case ICP_QAT_HW_AUTH_ALGO_SHA1:
813 return ICP_QAT_HW_SHA1_STATE1_SZ;
814 case ICP_QAT_HW_AUTH_ALGO_SHA224:
815 return ICP_QAT_HW_SHA224_STATE1_SZ;
816 case ICP_QAT_HW_AUTH_ALGO_SHA256:
817 return ICP_QAT_HW_SHA256_STATE1_SZ;
818 case ICP_QAT_HW_AUTH_ALGO_SHA384:
819 return ICP_QAT_HW_SHA384_STATE1_SZ;
820 case ICP_QAT_HW_AUTH_ALGO_SHA512:
821 return ICP_QAT_HW_SHA512_STATE1_SZ;
822 case ICP_QAT_HW_AUTH_ALGO_MD5:
823 return ICP_QAT_HW_MD5_STATE1_SZ;
824 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
825 /* return maximum digest size in this case */
826 return ICP_QAT_HW_SHA512_STATE1_SZ;
828 PMD_DRV_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
834 /* returns block size in byes per hash algo */
835 static int qat_hash_get_block_size(enum icp_qat_hw_auth_algo qat_hash_alg)
837 switch (qat_hash_alg) {
838 case ICP_QAT_HW_AUTH_ALGO_SHA1:
840 case ICP_QAT_HW_AUTH_ALGO_SHA224:
841 return SHA256_CBLOCK;
842 case ICP_QAT_HW_AUTH_ALGO_SHA256:
843 return SHA256_CBLOCK;
844 case ICP_QAT_HW_AUTH_ALGO_SHA384:
845 return SHA512_CBLOCK;
846 case ICP_QAT_HW_AUTH_ALGO_SHA512:
847 return SHA512_CBLOCK;
848 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
850 case ICP_QAT_HW_AUTH_ALGO_MD5:
852 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
853 /* return maximum block size in this case */
854 return SHA512_CBLOCK;
856 PMD_DRV_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
862 static int partial_hash_sha1(uint8_t *data_in, uint8_t *data_out)
866 if (!SHA1_Init(&ctx))
868 SHA1_Transform(&ctx, data_in);
869 rte_memcpy(data_out, &ctx, SHA_DIGEST_LENGTH);
873 static int partial_hash_sha224(uint8_t *data_in, uint8_t *data_out)
877 if (!SHA224_Init(&ctx))
879 SHA256_Transform(&ctx, data_in);
880 rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
884 static int partial_hash_sha256(uint8_t *data_in, uint8_t *data_out)
888 if (!SHA256_Init(&ctx))
890 SHA256_Transform(&ctx, data_in);
891 rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
895 static int partial_hash_sha384(uint8_t *data_in, uint8_t *data_out)
899 if (!SHA384_Init(&ctx))
901 SHA512_Transform(&ctx, data_in);
902 rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
906 static int partial_hash_sha512(uint8_t *data_in, uint8_t *data_out)
910 if (!SHA512_Init(&ctx))
912 SHA512_Transform(&ctx, data_in);
913 rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
917 static int partial_hash_md5(uint8_t *data_in, uint8_t *data_out)
923 MD5_Transform(&ctx, data_in);
924 rte_memcpy(data_out, &ctx, MD5_DIGEST_LENGTH);
929 static int partial_hash_compute(enum icp_qat_hw_auth_algo hash_alg,
934 uint8_t digest[qat_hash_get_digest_size(
935 ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
936 uint32_t *hash_state_out_be32;
937 uint64_t *hash_state_out_be64;
940 PMD_INIT_FUNC_TRACE();
941 digest_size = qat_hash_get_digest_size(hash_alg);
942 if (digest_size <= 0)
945 hash_state_out_be32 = (uint32_t *)data_out;
946 hash_state_out_be64 = (uint64_t *)data_out;
949 case ICP_QAT_HW_AUTH_ALGO_SHA1:
950 if (partial_hash_sha1(data_in, digest))
952 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
953 *hash_state_out_be32 =
954 rte_bswap32(*(((uint32_t *)digest)+i));
956 case ICP_QAT_HW_AUTH_ALGO_SHA224:
957 if (partial_hash_sha224(data_in, digest))
959 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
960 *hash_state_out_be32 =
961 rte_bswap32(*(((uint32_t *)digest)+i));
963 case ICP_QAT_HW_AUTH_ALGO_SHA256:
964 if (partial_hash_sha256(data_in, digest))
966 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
967 *hash_state_out_be32 =
968 rte_bswap32(*(((uint32_t *)digest)+i));
970 case ICP_QAT_HW_AUTH_ALGO_SHA384:
971 if (partial_hash_sha384(data_in, digest))
973 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
974 *hash_state_out_be64 =
975 rte_bswap64(*(((uint64_t *)digest)+i));
977 case ICP_QAT_HW_AUTH_ALGO_SHA512:
978 if (partial_hash_sha512(data_in, digest))
980 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
981 *hash_state_out_be64 =
982 rte_bswap64(*(((uint64_t *)digest)+i));
984 case ICP_QAT_HW_AUTH_ALGO_MD5:
985 if (partial_hash_md5(data_in, data_out))
989 PMD_DRV_LOG(ERR, "invalid hash alg %u", hash_alg);
995 #define HMAC_IPAD_VALUE 0x36
996 #define HMAC_OPAD_VALUE 0x5c
997 #define HASH_XCBC_PRECOMP_KEY_NUM 3
999 static int qat_alg_do_precomputes(enum icp_qat_hw_auth_algo hash_alg,
1000 const uint8_t *auth_key,
1001 uint16_t auth_keylen,
1002 uint8_t *p_state_buf,
1003 uint16_t *p_state_len)
1006 uint8_t ipad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1007 uint8_t opad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1010 PMD_INIT_FUNC_TRACE();
1011 if (hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC) {
1012 static uint8_t qat_aes_xcbc_key_seed[
1013 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ] = {
1014 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1015 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1016 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1017 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1018 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1019 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1023 uint8_t *out = p_state_buf;
1027 in = rte_zmalloc("working mem for key",
1028 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ, 16);
1030 PMD_DRV_LOG(ERR, "Failed to alloc memory");
1034 rte_memcpy(in, qat_aes_xcbc_key_seed,
1035 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1036 for (x = 0; x < HASH_XCBC_PRECOMP_KEY_NUM; x++) {
1037 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1040 (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ));
1042 (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ),
1043 0, ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1046 AES_encrypt(in, out, &enc_key);
1047 in += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1048 out += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1050 *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1051 rte_free(in - x*ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ);
1053 } else if ((hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_128) ||
1054 (hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_64)) {
1056 uint8_t *out = p_state_buf;
1059 memset(p_state_buf, 0, ICP_QAT_HW_GALOIS_H_SZ +
1060 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1061 ICP_QAT_HW_GALOIS_E_CTR0_SZ);
1062 in = rte_zmalloc("working mem for key",
1063 ICP_QAT_HW_GALOIS_H_SZ, 16);
1065 PMD_DRV_LOG(ERR, "Failed to alloc memory");
1069 memset(in, 0, ICP_QAT_HW_GALOIS_H_SZ);
1070 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1074 AES_encrypt(in, out, &enc_key);
1075 *p_state_len = ICP_QAT_HW_GALOIS_H_SZ +
1076 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1077 ICP_QAT_HW_GALOIS_E_CTR0_SZ;
1082 block_size = qat_hash_get_block_size(hash_alg);
1083 if (block_size <= 0)
1085 /* init ipad and opad from key and xor with fixed values */
1086 memset(ipad, 0, block_size);
1087 memset(opad, 0, block_size);
1089 if (auth_keylen > (unsigned int)block_size) {
1090 PMD_DRV_LOG(ERR, "invalid keylen %u", auth_keylen);
1093 rte_memcpy(ipad, auth_key, auth_keylen);
1094 rte_memcpy(opad, auth_key, auth_keylen);
1096 for (i = 0; i < block_size; i++) {
1097 uint8_t *ipad_ptr = ipad + i;
1098 uint8_t *opad_ptr = opad + i;
1099 *ipad_ptr ^= HMAC_IPAD_VALUE;
1100 *opad_ptr ^= HMAC_OPAD_VALUE;
1103 /* do partial hash of ipad and copy to state1 */
1104 if (partial_hash_compute(hash_alg, ipad, p_state_buf)) {
1105 memset(ipad, 0, block_size);
1106 memset(opad, 0, block_size);
1107 PMD_DRV_LOG(ERR, "ipad precompute failed");
1112 * State len is a multiple of 8, so may be larger than the digest.
1113 * Put the partial hash of opad state_len bytes after state1
1115 *p_state_len = qat_hash_get_state1_size(hash_alg);
1116 if (partial_hash_compute(hash_alg, opad, p_state_buf + *p_state_len)) {
1117 memset(ipad, 0, block_size);
1118 memset(opad, 0, block_size);
1119 PMD_DRV_LOG(ERR, "opad precompute failed");
1123 /* don't leave data lying around */
1124 memset(ipad, 0, block_size);
1125 memset(opad, 0, block_size);
1129 void qat_alg_init_common_hdr(struct icp_qat_fw_comn_req_hdr *header,
1130 enum qat_crypto_proto_flag proto_flags)
1132 PMD_INIT_FUNC_TRACE();
1134 ICP_QAT_FW_COMN_HDR_FLAGS_BUILD(ICP_QAT_FW_COMN_REQ_FLAG_SET);
1135 header->service_type = ICP_QAT_FW_COMN_REQ_CPM_FW_LA;
1136 header->comn_req_flags =
1137 ICP_QAT_FW_COMN_FLAGS_BUILD(QAT_COMN_CD_FLD_TYPE_64BIT_ADR,
1138 QAT_COMN_PTR_TYPE_FLAT);
1139 ICP_QAT_FW_LA_PARTIAL_SET(header->serv_specif_flags,
1140 ICP_QAT_FW_LA_PARTIAL_NONE);
1141 ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(header->serv_specif_flags,
1142 ICP_QAT_FW_CIPH_IV_16BYTE_DATA);
1144 switch (proto_flags) {
1145 case QAT_CRYPTO_PROTO_FLAG_NONE:
1146 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1147 ICP_QAT_FW_LA_NO_PROTO);
1149 case QAT_CRYPTO_PROTO_FLAG_CCM:
1150 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1151 ICP_QAT_FW_LA_CCM_PROTO);
1153 case QAT_CRYPTO_PROTO_FLAG_GCM:
1154 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1155 ICP_QAT_FW_LA_GCM_PROTO);
1157 case QAT_CRYPTO_PROTO_FLAG_SNOW3G:
1158 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1159 ICP_QAT_FW_LA_SNOW_3G_PROTO);
1161 case QAT_CRYPTO_PROTO_FLAG_ZUC:
1162 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(header->serv_specif_flags,
1163 ICP_QAT_FW_LA_ZUC_3G_PROTO);
1167 ICP_QAT_FW_LA_UPDATE_STATE_SET(header->serv_specif_flags,
1168 ICP_QAT_FW_LA_NO_UPDATE_STATE);
1169 ICP_QAT_FW_LA_DIGEST_IN_BUFFER_SET(header->serv_specif_flags,
1170 ICP_QAT_FW_LA_NO_DIGEST_IN_BUFFER);
1174 * Snow3G and ZUC should never use this function
1175 * and set its protocol flag in both cipher and auth part of content
1176 * descriptor building function
1178 static enum qat_crypto_proto_flag
1179 qat_get_crypto_proto_flag(uint16_t flags)
1181 int proto = ICP_QAT_FW_LA_PROTO_GET(flags);
1182 enum qat_crypto_proto_flag qat_proto_flag =
1183 QAT_CRYPTO_PROTO_FLAG_NONE;
1186 case ICP_QAT_FW_LA_GCM_PROTO:
1187 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1189 case ICP_QAT_FW_LA_CCM_PROTO:
1190 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1194 return qat_proto_flag;
1197 int qat_alg_aead_session_create_content_desc_cipher(struct qat_session *cdesc,
1199 uint32_t cipherkeylen)
1201 struct icp_qat_hw_cipher_algo_blk *cipher;
1202 struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1203 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1204 struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1205 void *ptr = &req_tmpl->cd_ctrl;
1206 struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1207 struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1208 enum icp_qat_hw_cipher_convert key_convert;
1209 enum qat_crypto_proto_flag qat_proto_flag =
1210 QAT_CRYPTO_PROTO_FLAG_NONE;
1211 uint32_t total_key_size;
1212 uint16_t cipher_offset, cd_size;
1213 uint32_t wordIndex = 0;
1214 uint32_t *temp_key = NULL;
1215 PMD_INIT_FUNC_TRACE();
1217 if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER) {
1218 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1219 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1220 ICP_QAT_FW_SLICE_CIPHER);
1221 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1222 ICP_QAT_FW_SLICE_DRAM_WR);
1223 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1224 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1225 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1226 ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1227 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1228 } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1229 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1230 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1231 ICP_QAT_FW_SLICE_CIPHER);
1232 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1233 ICP_QAT_FW_SLICE_AUTH);
1234 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1235 ICP_QAT_FW_SLICE_AUTH);
1236 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1237 ICP_QAT_FW_SLICE_DRAM_WR);
1238 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1239 } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1240 PMD_DRV_LOG(ERR, "Invalid param, must be a cipher command.");
1244 if (cdesc->qat_mode == ICP_QAT_HW_CIPHER_CTR_MODE) {
1246 * CTR Streaming ciphers are a special case. Decrypt = encrypt
1247 * Overriding default values previously set
1249 cdesc->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
1250 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1251 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2
1252 || cdesc->qat_cipher_alg ==
1253 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)
1254 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1255 else if (cdesc->qat_dir == ICP_QAT_HW_CIPHER_ENCRYPT)
1256 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1258 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1260 if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
1261 total_key_size = ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ +
1262 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1263 cipher_cd_ctrl->cipher_state_sz =
1264 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1265 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1267 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1268 total_key_size = ICP_QAT_HW_KASUMI_F8_KEY_SZ;
1269 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_KASUMI_BLK_SZ >> 3;
1270 cipher_cd_ctrl->cipher_padding_sz =
1271 (2 * ICP_QAT_HW_KASUMI_BLK_SZ) >> 3;
1272 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES) {
1273 total_key_size = ICP_QAT_HW_3DES_KEY_SZ;
1274 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_3DES_BLK_SZ >> 3;
1276 qat_get_crypto_proto_flag(header->serv_specif_flags);
1277 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_DES) {
1278 total_key_size = ICP_QAT_HW_DES_KEY_SZ;
1279 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_DES_BLK_SZ >> 3;
1281 qat_get_crypto_proto_flag(header->serv_specif_flags);
1282 } else if (cdesc->qat_cipher_alg ==
1283 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
1284 total_key_size = ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ +
1285 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1286 cipher_cd_ctrl->cipher_state_sz =
1287 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1288 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1289 cdesc->min_qat_dev_gen = QAT_GEN2;
1291 total_key_size = cipherkeylen;
1292 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_AES_BLK_SZ >> 3;
1294 qat_get_crypto_proto_flag(header->serv_specif_flags);
1296 cipher_cd_ctrl->cipher_key_sz = total_key_size >> 3;
1297 cipher_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1298 cipher_cd_ctrl->cipher_cfg_offset = cipher_offset >> 3;
1300 header->service_cmd_id = cdesc->qat_cmd;
1301 qat_alg_init_common_hdr(header, qat_proto_flag);
1303 cipher = (struct icp_qat_hw_cipher_algo_blk *)cdesc->cd_cur_ptr;
1304 cipher->cipher_config.val =
1305 ICP_QAT_HW_CIPHER_CONFIG_BUILD(cdesc->qat_mode,
1306 cdesc->qat_cipher_alg, key_convert,
1309 if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1310 temp_key = (uint32_t *)(cdesc->cd_cur_ptr +
1311 sizeof(struct icp_qat_hw_cipher_config)
1313 memcpy(cipher->key, cipherkey, cipherkeylen);
1314 memcpy(temp_key, cipherkey, cipherkeylen);
1316 /* XOR Key with KASUMI F8 key modifier at 4 bytes level */
1317 for (wordIndex = 0; wordIndex < (cipherkeylen >> 2);
1319 temp_key[wordIndex] ^= KASUMI_F8_KEY_MODIFIER_4_BYTES;
1321 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1322 cipherkeylen + cipherkeylen;
1324 memcpy(cipher->key, cipherkey, cipherkeylen);
1325 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1329 if (total_key_size > cipherkeylen) {
1330 uint32_t padding_size = total_key_size-cipherkeylen;
1331 if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1332 && (cipherkeylen == QAT_3DES_KEY_SZ_OPT2))
1333 /* K3 not provided so use K1 = K3*/
1334 memcpy(cdesc->cd_cur_ptr, cipherkey, padding_size);
1336 memset(cdesc->cd_cur_ptr, 0, padding_size);
1337 cdesc->cd_cur_ptr += padding_size;
1339 cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1340 cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1345 int qat_alg_aead_session_create_content_desc_auth(struct qat_session *cdesc,
1347 uint32_t authkeylen,
1348 uint32_t aad_length,
1349 uint32_t digestsize,
1350 unsigned int operation)
1352 struct icp_qat_hw_auth_setup *hash;
1353 struct icp_qat_hw_cipher_algo_blk *cipherconfig;
1354 struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1355 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1356 struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1357 void *ptr = &req_tmpl->cd_ctrl;
1358 struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1359 struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1360 struct icp_qat_fw_la_auth_req_params *auth_param =
1361 (struct icp_qat_fw_la_auth_req_params *)
1362 ((char *)&req_tmpl->serv_specif_rqpars +
1363 sizeof(struct icp_qat_fw_la_cipher_req_params));
1364 uint16_t state1_size = 0, state2_size = 0;
1365 uint16_t hash_offset, cd_size;
1366 uint32_t *aad_len = NULL;
1367 uint32_t wordIndex = 0;
1369 enum qat_crypto_proto_flag qat_proto_flag =
1370 QAT_CRYPTO_PROTO_FLAG_NONE;
1372 PMD_INIT_FUNC_TRACE();
1374 if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_AUTH) {
1375 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1376 ICP_QAT_FW_SLICE_AUTH);
1377 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1378 ICP_QAT_FW_SLICE_DRAM_WR);
1379 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1380 } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1381 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1382 ICP_QAT_FW_SLICE_AUTH);
1383 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1384 ICP_QAT_FW_SLICE_CIPHER);
1385 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1386 ICP_QAT_FW_SLICE_CIPHER);
1387 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1388 ICP_QAT_FW_SLICE_DRAM_WR);
1389 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1390 } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1391 PMD_DRV_LOG(ERR, "Invalid param, must be a hash command.");
1395 if (operation == RTE_CRYPTO_AUTH_OP_VERIFY) {
1396 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1397 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1398 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1399 ICP_QAT_FW_LA_CMP_AUTH_RES);
1400 cdesc->auth_op = ICP_QAT_HW_AUTH_VERIFY;
1402 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1403 ICP_QAT_FW_LA_RET_AUTH_RES);
1404 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1405 ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1406 cdesc->auth_op = ICP_QAT_HW_AUTH_GENERATE;
1410 * Setup the inner hash config
1412 hash_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1413 hash = (struct icp_qat_hw_auth_setup *)cdesc->cd_cur_ptr;
1414 hash->auth_config.reserved = 0;
1415 hash->auth_config.config =
1416 ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE1,
1417 cdesc->qat_hash_alg, digestsize);
1419 if (cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2
1420 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_KASUMI_F9
1421 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3)
1422 hash->auth_counter.counter = 0;
1424 hash->auth_counter.counter = rte_bswap32(
1425 qat_hash_get_block_size(cdesc->qat_hash_alg));
1427 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_auth_setup);
1430 * cd_cur_ptr now points at the state1 information.
1432 switch (cdesc->qat_hash_alg) {
1433 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1434 if (qat_alg_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA1,
1435 authkey, authkeylen, cdesc->cd_cur_ptr, &state1_size)) {
1436 PMD_DRV_LOG(ERR, "(SHA)precompute failed");
1439 state2_size = RTE_ALIGN_CEIL(ICP_QAT_HW_SHA1_STATE2_SZ, 8);
1441 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1442 if (qat_alg_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA224,
1443 authkey, authkeylen, cdesc->cd_cur_ptr, &state1_size)) {
1444 PMD_DRV_LOG(ERR, "(SHA)precompute failed");
1447 state2_size = ICP_QAT_HW_SHA224_STATE2_SZ;
1449 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1450 if (qat_alg_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA256,
1451 authkey, authkeylen, cdesc->cd_cur_ptr, &state1_size)) {
1452 PMD_DRV_LOG(ERR, "(SHA)precompute failed");
1455 state2_size = ICP_QAT_HW_SHA256_STATE2_SZ;
1457 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1458 if (qat_alg_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA384,
1459 authkey, authkeylen, cdesc->cd_cur_ptr, &state1_size)) {
1460 PMD_DRV_LOG(ERR, "(SHA)precompute failed");
1463 state2_size = ICP_QAT_HW_SHA384_STATE2_SZ;
1465 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1466 if (qat_alg_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA512,
1467 authkey, authkeylen, cdesc->cd_cur_ptr, &state1_size)) {
1468 PMD_DRV_LOG(ERR, "(SHA)precompute failed");
1471 state2_size = ICP_QAT_HW_SHA512_STATE2_SZ;
1473 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1474 state1_size = ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1475 if (qat_alg_do_precomputes(ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC,
1476 authkey, authkeylen, cdesc->cd_cur_ptr + state1_size,
1478 PMD_DRV_LOG(ERR, "(XCBC)precompute failed");
1482 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1483 case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
1484 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1485 state1_size = ICP_QAT_HW_GALOIS_128_STATE1_SZ;
1486 if (qat_alg_do_precomputes(cdesc->qat_hash_alg,
1487 authkey, authkeylen, cdesc->cd_cur_ptr + state1_size,
1489 PMD_DRV_LOG(ERR, "(GCM)precompute failed");
1493 * Write (the length of AAD) into bytes 16-19 of state2
1494 * in big-endian format. This field is 8 bytes
1496 auth_param->u2.aad_sz =
1497 RTE_ALIGN_CEIL(aad_length, 16);
1498 auth_param->hash_state_sz = (auth_param->u2.aad_sz) >> 3;
1500 aad_len = (uint32_t *)(cdesc->cd_cur_ptr +
1501 ICP_QAT_HW_GALOIS_128_STATE1_SZ +
1502 ICP_QAT_HW_GALOIS_H_SZ);
1503 *aad_len = rte_bswap32(aad_length);
1504 cdesc->aad_len = aad_length;
1506 case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
1507 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1508 state1_size = qat_hash_get_state1_size(
1509 ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2);
1510 state2_size = ICP_QAT_HW_SNOW_3G_UIA2_STATE2_SZ;
1511 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1513 cipherconfig = (struct icp_qat_hw_cipher_algo_blk *)
1514 (cdesc->cd_cur_ptr + state1_size + state2_size);
1515 cipherconfig->cipher_config.val =
1516 ICP_QAT_HW_CIPHER_CONFIG_BUILD(ICP_QAT_HW_CIPHER_ECB_MODE,
1517 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2,
1518 ICP_QAT_HW_CIPHER_KEY_CONVERT,
1519 ICP_QAT_HW_CIPHER_ENCRYPT);
1520 memcpy(cipherconfig->key, authkey, authkeylen);
1521 memset(cipherconfig->key + authkeylen,
1522 0, ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ);
1523 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1524 authkeylen + ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1525 auth_param->hash_state_sz = ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1527 case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
1528 hash->auth_config.config =
1529 ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE0,
1530 cdesc->qat_hash_alg, digestsize);
1531 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1532 state1_size = qat_hash_get_state1_size(
1533 ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3);
1534 state2_size = ICP_QAT_HW_ZUC_3G_EIA3_STATE2_SZ;
1535 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size
1536 + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ);
1538 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1539 cdesc->cd_cur_ptr += state1_size + state2_size
1540 + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1541 auth_param->hash_state_sz = ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1542 cdesc->min_qat_dev_gen = QAT_GEN2;
1545 case ICP_QAT_HW_AUTH_ALGO_MD5:
1546 if (qat_alg_do_precomputes(ICP_QAT_HW_AUTH_ALGO_MD5,
1547 authkey, authkeylen, cdesc->cd_cur_ptr,
1549 PMD_DRV_LOG(ERR, "(MD5)precompute failed");
1552 state2_size = ICP_QAT_HW_MD5_STATE2_SZ;
1554 case ICP_QAT_HW_AUTH_ALGO_NULL:
1555 state1_size = qat_hash_get_state1_size(
1556 ICP_QAT_HW_AUTH_ALGO_NULL);
1557 state2_size = ICP_QAT_HW_NULL_STATE2_SZ;
1559 case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
1560 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1561 state1_size = qat_hash_get_state1_size(
1562 ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC);
1563 state2_size = ICP_QAT_HW_AES_CBC_MAC_KEY_SZ +
1564 ICP_QAT_HW_AES_CCM_CBC_E_CTR0_SZ;
1566 if (aad_length > 0) {
1567 aad_length += ICP_QAT_HW_CCM_AAD_B0_LEN +
1568 ICP_QAT_HW_CCM_AAD_LEN_INFO;
1569 auth_param->u2.aad_sz =
1570 RTE_ALIGN_CEIL(aad_length,
1571 ICP_QAT_HW_CCM_AAD_ALIGNMENT);
1573 auth_param->u2.aad_sz = ICP_QAT_HW_CCM_AAD_B0_LEN;
1575 cdesc->aad_len = aad_length;
1576 hash->auth_counter.counter = 0;
1578 hash_cd_ctrl->outer_prefix_sz = digestsize;
1579 auth_param->hash_state_sz = digestsize;
1581 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1583 case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
1584 state1_size = qat_hash_get_state1_size(
1585 ICP_QAT_HW_AUTH_ALGO_KASUMI_F9);
1586 state2_size = ICP_QAT_HW_KASUMI_F9_STATE2_SZ;
1587 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1588 pTempKey = (uint32_t *)(cdesc->cd_cur_ptr + state1_size
1591 * The Inner Hash Initial State2 block must contain IK
1592 * (Initialisation Key), followed by IK XOR-ed with KM
1593 * (Key Modifier): IK||(IK^KM).
1595 /* write the auth key */
1596 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1597 /* initialise temp key with auth key */
1598 memcpy(pTempKey, authkey, authkeylen);
1599 /* XOR Key with KASUMI F9 key modifier at 4 bytes level */
1600 for (wordIndex = 0; wordIndex < (authkeylen >> 2); wordIndex++)
1601 pTempKey[wordIndex] ^= KASUMI_F9_KEY_MODIFIER_4_BYTES;
1604 PMD_DRV_LOG(ERR, "Invalid HASH alg %u", cdesc->qat_hash_alg);
1608 /* Request template setup */
1609 qat_alg_init_common_hdr(header, qat_proto_flag);
1610 header->service_cmd_id = cdesc->qat_cmd;
1612 /* Auth CD config setup */
1613 hash_cd_ctrl->hash_cfg_offset = hash_offset >> 3;
1614 hash_cd_ctrl->hash_flags = ICP_QAT_FW_AUTH_HDR_FLAG_NO_NESTED;
1615 hash_cd_ctrl->inner_res_sz = digestsize;
1616 hash_cd_ctrl->final_sz = digestsize;
1617 hash_cd_ctrl->inner_state1_sz = state1_size;
1618 auth_param->auth_res_sz = digestsize;
1620 hash_cd_ctrl->inner_state2_sz = state2_size;
1621 hash_cd_ctrl->inner_state2_offset = hash_cd_ctrl->hash_cfg_offset +
1622 ((sizeof(struct icp_qat_hw_auth_setup) +
1623 RTE_ALIGN_CEIL(hash_cd_ctrl->inner_state1_sz, 8))
1626 cdesc->cd_cur_ptr += state1_size + state2_size;
1627 cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1629 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1630 cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1635 int qat_alg_validate_aes_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1638 case ICP_QAT_HW_AES_128_KEY_SZ:
1639 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1641 case ICP_QAT_HW_AES_192_KEY_SZ:
1642 *alg = ICP_QAT_HW_CIPHER_ALGO_AES192;
1644 case ICP_QAT_HW_AES_256_KEY_SZ:
1645 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
1653 int qat_alg_validate_aes_docsisbpi_key(int key_len,
1654 enum icp_qat_hw_cipher_algo *alg)
1657 case ICP_QAT_HW_AES_128_KEY_SZ:
1658 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1666 int qat_alg_validate_snow3g_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1669 case ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ:
1670 *alg = ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2;
1678 int qat_alg_validate_kasumi_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1681 case ICP_QAT_HW_KASUMI_KEY_SZ:
1682 *alg = ICP_QAT_HW_CIPHER_ALGO_KASUMI;
1690 int qat_alg_validate_des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1693 case ICP_QAT_HW_DES_KEY_SZ:
1694 *alg = ICP_QAT_HW_CIPHER_ALGO_DES;
1702 int qat_alg_validate_3des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1705 case QAT_3DES_KEY_SZ_OPT1:
1706 case QAT_3DES_KEY_SZ_OPT2:
1707 *alg = ICP_QAT_HW_CIPHER_ALGO_3DES;
1715 int qat_alg_validate_zuc_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1718 case ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ:
1719 *alg = ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3;