1 /* SPDX-License-Identifier: (BSD-3-Clause OR GPL-2.0)
2 * Copyright(c) 2015-2018 Intel Corporation
4 #include <rte_memcpy.h>
5 #include <rte_common.h>
6 #include <rte_spinlock.h>
7 #include <rte_byteorder.h>
9 #include <rte_malloc.h>
10 #include <rte_crypto_sym.h>
13 #include "qat_device.h"
15 #include <openssl/sha.h> /* Needed to calculate pre-compute values */
16 #include <openssl/aes.h> /* Needed to calculate pre-compute values */
17 #include <openssl/md5.h> /* Needed to calculate pre-compute values */
18 #include <openssl/evp.h>
20 #include "qat_sym_session.h"
22 /** Frees a context previously created
23 * Depends on openssl libcrypto
26 bpi_cipher_ctx_free(void *bpi_ctx)
29 EVP_CIPHER_CTX_free((EVP_CIPHER_CTX *)bpi_ctx);
33 bpi_cipher_ctx_init(enum rte_crypto_cipher_algorithm cryptodev_algo,
34 enum rte_crypto_cipher_operation direction __rte_unused,
35 uint8_t *key, void **ctx)
37 const EVP_CIPHER *algo = NULL;
39 *ctx = EVP_CIPHER_CTX_new();
46 if (cryptodev_algo == RTE_CRYPTO_CIPHER_DES_DOCSISBPI)
49 algo = EVP_aes_128_ecb();
51 /* IV will be ECB encrypted whether direction is encrypt or decrypt*/
52 if (EVP_EncryptInit_ex(*ctx, algo, NULL, key, 0) != 1) {
61 EVP_CIPHER_CTX_free(*ctx);
66 qat_is_cipher_alg_supported(enum rte_crypto_cipher_algorithm algo,
67 struct qat_pmd_private *internals)
70 const struct rte_cryptodev_capabilities *capability;
72 while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
73 RTE_CRYPTO_OP_TYPE_UNDEFINED) {
74 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
77 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_CIPHER)
80 if (capability->sym.cipher.algo == algo)
87 qat_is_auth_alg_supported(enum rte_crypto_auth_algorithm algo,
88 struct qat_pmd_private *internals)
91 const struct rte_cryptodev_capabilities *capability;
93 while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
94 RTE_CRYPTO_OP_TYPE_UNDEFINED) {
95 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
98 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_AUTH)
101 if (capability->sym.auth.algo == algo)
108 qat_sym_session_clear(struct rte_cryptodev *dev,
109 struct rte_cryptodev_sym_session *sess)
111 PMD_INIT_FUNC_TRACE();
112 uint8_t index = dev->driver_id;
113 void *sess_priv = get_session_private_data(sess, index);
114 struct qat_session *s = (struct qat_session *)sess_priv;
118 bpi_cipher_ctx_free(s->bpi_ctx);
119 memset(s, 0, qat_sym_session_get_private_size(dev));
120 struct rte_mempool *sess_mp = rte_mempool_from_obj(sess_priv);
122 set_session_private_data(sess, index, NULL);
123 rte_mempool_put(sess_mp, sess_priv);
128 qat_get_cmd_id(const struct rte_crypto_sym_xform *xform)
131 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER && xform->next == NULL)
132 return ICP_QAT_FW_LA_CMD_CIPHER;
134 /* Authentication Only */
135 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH && xform->next == NULL)
136 return ICP_QAT_FW_LA_CMD_AUTH;
139 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
140 /* AES-GCM and AES-CCM works with different direction
141 * GCM first encrypts and generate hash where AES-CCM
142 * first generate hash and encrypts. Similar relation
143 * applies to decryption.
145 if (xform->aead.op == RTE_CRYPTO_AEAD_OP_ENCRYPT)
146 if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
147 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
149 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
151 if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
152 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
154 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
157 if (xform->next == NULL)
160 /* Cipher then Authenticate */
161 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
162 xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
163 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
165 /* Authenticate then Cipher */
166 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH &&
167 xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
168 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
173 static struct rte_crypto_auth_xform *
174 qat_get_auth_xform(struct rte_crypto_sym_xform *xform)
177 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH)
186 static struct rte_crypto_cipher_xform *
187 qat_get_cipher_xform(struct rte_crypto_sym_xform *xform)
190 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
191 return &xform->cipher;
200 qat_sym_session_configure_cipher(struct rte_cryptodev *dev,
201 struct rte_crypto_sym_xform *xform,
202 struct qat_session *session)
204 struct qat_pmd_private *internals = dev->data->dev_private;
205 struct rte_crypto_cipher_xform *cipher_xform = NULL;
208 /* Get cipher xform from crypto xform chain */
209 cipher_xform = qat_get_cipher_xform(xform);
211 session->cipher_iv.offset = cipher_xform->iv.offset;
212 session->cipher_iv.length = cipher_xform->iv.length;
214 switch (cipher_xform->algo) {
215 case RTE_CRYPTO_CIPHER_AES_CBC:
216 if (qat_sym_validate_aes_key(cipher_xform->key.length,
217 &session->qat_cipher_alg) != 0) {
218 PMD_DRV_LOG(ERR, "Invalid AES cipher key size");
222 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
224 case RTE_CRYPTO_CIPHER_AES_CTR:
225 if (qat_sym_validate_aes_key(cipher_xform->key.length,
226 &session->qat_cipher_alg) != 0) {
227 PMD_DRV_LOG(ERR, "Invalid AES cipher key size");
231 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
233 case RTE_CRYPTO_CIPHER_SNOW3G_UEA2:
234 if (qat_sym_validate_snow3g_key(cipher_xform->key.length,
235 &session->qat_cipher_alg) != 0) {
236 PMD_DRV_LOG(ERR, "Invalid SNOW 3G cipher key size");
240 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
242 case RTE_CRYPTO_CIPHER_NULL:
243 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
245 case RTE_CRYPTO_CIPHER_KASUMI_F8:
246 if (qat_sym_validate_kasumi_key(cipher_xform->key.length,
247 &session->qat_cipher_alg) != 0) {
248 PMD_DRV_LOG(ERR, "Invalid KASUMI cipher key size");
252 session->qat_mode = ICP_QAT_HW_CIPHER_F8_MODE;
254 case RTE_CRYPTO_CIPHER_3DES_CBC:
255 if (qat_sym_validate_3des_key(cipher_xform->key.length,
256 &session->qat_cipher_alg) != 0) {
257 PMD_DRV_LOG(ERR, "Invalid 3DES cipher key size");
261 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
263 case RTE_CRYPTO_CIPHER_DES_CBC:
264 if (qat_sym_validate_des_key(cipher_xform->key.length,
265 &session->qat_cipher_alg) != 0) {
266 PMD_DRV_LOG(ERR, "Invalid DES cipher key size");
270 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
272 case RTE_CRYPTO_CIPHER_3DES_CTR:
273 if (qat_sym_validate_3des_key(cipher_xform->key.length,
274 &session->qat_cipher_alg) != 0) {
275 PMD_DRV_LOG(ERR, "Invalid 3DES cipher key size");
279 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
281 case RTE_CRYPTO_CIPHER_DES_DOCSISBPI:
282 ret = bpi_cipher_ctx_init(
285 cipher_xform->key.data,
288 PMD_DRV_LOG(ERR, "failed to create DES BPI ctx");
291 if (qat_sym_validate_des_key(cipher_xform->key.length,
292 &session->qat_cipher_alg) != 0) {
293 PMD_DRV_LOG(ERR, "Invalid DES cipher key size");
297 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
299 case RTE_CRYPTO_CIPHER_AES_DOCSISBPI:
300 ret = bpi_cipher_ctx_init(
303 cipher_xform->key.data,
306 PMD_DRV_LOG(ERR, "failed to create AES BPI ctx");
309 if (qat_sym_validate_aes_docsisbpi_key(cipher_xform->key.length,
310 &session->qat_cipher_alg) != 0) {
311 PMD_DRV_LOG(ERR, "Invalid AES DOCSISBPI key size");
315 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
317 case RTE_CRYPTO_CIPHER_ZUC_EEA3:
318 if (!qat_is_cipher_alg_supported(
319 cipher_xform->algo, internals)) {
320 PMD_DRV_LOG(ERR, "%s not supported on this device",
321 rte_crypto_cipher_algorithm_strings
322 [cipher_xform->algo]);
326 if (qat_sym_validate_zuc_key(cipher_xform->key.length,
327 &session->qat_cipher_alg) != 0) {
328 PMD_DRV_LOG(ERR, "Invalid ZUC cipher key size");
332 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
334 case RTE_CRYPTO_CIPHER_3DES_ECB:
335 case RTE_CRYPTO_CIPHER_AES_ECB:
336 case RTE_CRYPTO_CIPHER_AES_F8:
337 case RTE_CRYPTO_CIPHER_AES_XTS:
338 case RTE_CRYPTO_CIPHER_ARC4:
339 PMD_DRV_LOG(ERR, "Crypto QAT PMD: Unsupported Cipher alg %u",
344 PMD_DRV_LOG(ERR, "Crypto: Undefined Cipher specified %u\n",
350 if (cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
351 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
353 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
355 if (qat_sym_session_aead_create_cd_cipher(session,
356 cipher_xform->key.data,
357 cipher_xform->key.length)) {
365 if (session->bpi_ctx) {
366 bpi_cipher_ctx_free(session->bpi_ctx);
367 session->bpi_ctx = NULL;
373 qat_sym_session_configure(struct rte_cryptodev *dev,
374 struct rte_crypto_sym_xform *xform,
375 struct rte_cryptodev_sym_session *sess,
376 struct rte_mempool *mempool)
378 void *sess_private_data;
381 if (rte_mempool_get(mempool, &sess_private_data)) {
383 "Couldn't get object from session mempool");
387 ret = qat_sym_session_set_parameters(dev, xform, sess_private_data);
390 "Crypto QAT PMD: failed to configure session parameters");
392 /* Return session to mempool */
393 rte_mempool_put(mempool, sess_private_data);
397 set_session_private_data(sess, dev->driver_id,
404 qat_sym_session_set_parameters(struct rte_cryptodev *dev,
405 struct rte_crypto_sym_xform *xform, void *session_private)
407 struct qat_session *session = session_private;
411 PMD_INIT_FUNC_TRACE();
413 /* Set context descriptor physical address */
414 session->cd_paddr = rte_mempool_virt2iova(session) +
415 offsetof(struct qat_session, cd);
417 session->min_qat_dev_gen = QAT_GEN1;
419 /* Get requested QAT command id */
420 qat_cmd_id = qat_get_cmd_id(xform);
421 if (qat_cmd_id < 0 || qat_cmd_id >= ICP_QAT_FW_LA_CMD_DELIMITER) {
422 PMD_DRV_LOG(ERR, "Unsupported xform chain requested");
425 session->qat_cmd = (enum icp_qat_fw_la_cmd_id)qat_cmd_id;
426 switch (session->qat_cmd) {
427 case ICP_QAT_FW_LA_CMD_CIPHER:
428 ret = qat_sym_session_configure_cipher(dev, xform, session);
432 case ICP_QAT_FW_LA_CMD_AUTH:
433 ret = qat_sym_session_configure_auth(dev, xform, session);
437 case ICP_QAT_FW_LA_CMD_CIPHER_HASH:
438 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
439 ret = qat_sym_session_configure_aead(xform,
444 ret = qat_sym_session_configure_cipher(dev,
448 ret = qat_sym_session_configure_auth(dev,
454 case ICP_QAT_FW_LA_CMD_HASH_CIPHER:
455 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
456 ret = qat_sym_session_configure_aead(xform,
461 ret = qat_sym_session_configure_auth(dev,
465 ret = qat_sym_session_configure_cipher(dev,
471 case ICP_QAT_FW_LA_CMD_TRNG_GET_RANDOM:
472 case ICP_QAT_FW_LA_CMD_TRNG_TEST:
473 case ICP_QAT_FW_LA_CMD_SSL3_KEY_DERIVE:
474 case ICP_QAT_FW_LA_CMD_TLS_V1_1_KEY_DERIVE:
475 case ICP_QAT_FW_LA_CMD_TLS_V1_2_KEY_DERIVE:
476 case ICP_QAT_FW_LA_CMD_MGF1:
477 case ICP_QAT_FW_LA_CMD_AUTH_PRE_COMP:
478 case ICP_QAT_FW_LA_CMD_CIPHER_PRE_COMP:
479 case ICP_QAT_FW_LA_CMD_DELIMITER:
480 PMD_DRV_LOG(ERR, "Unsupported Service %u",
484 PMD_DRV_LOG(ERR, "Unsupported Service %u",
493 qat_sym_session_configure_auth(struct rte_cryptodev *dev,
494 struct rte_crypto_sym_xform *xform,
495 struct qat_session *session)
497 struct rte_crypto_auth_xform *auth_xform = qat_get_auth_xform(xform);
498 struct qat_pmd_private *internals = dev->data->dev_private;
499 uint8_t *key_data = auth_xform->key.data;
500 uint8_t key_length = auth_xform->key.length;
502 switch (auth_xform->algo) {
503 case RTE_CRYPTO_AUTH_SHA1_HMAC:
504 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA1;
506 case RTE_CRYPTO_AUTH_SHA224_HMAC:
507 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA224;
509 case RTE_CRYPTO_AUTH_SHA256_HMAC:
510 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA256;
512 case RTE_CRYPTO_AUTH_SHA384_HMAC:
513 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA384;
515 case RTE_CRYPTO_AUTH_SHA512_HMAC:
516 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA512;
518 case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
519 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
521 case RTE_CRYPTO_AUTH_AES_GMAC:
522 if (qat_sym_validate_aes_key(auth_xform->key.length,
523 &session->qat_cipher_alg) != 0) {
524 PMD_DRV_LOG(ERR, "Invalid AES key size");
527 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
528 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
531 case RTE_CRYPTO_AUTH_SNOW3G_UIA2:
532 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2;
534 case RTE_CRYPTO_AUTH_MD5_HMAC:
535 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_MD5;
537 case RTE_CRYPTO_AUTH_NULL:
538 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_NULL;
540 case RTE_CRYPTO_AUTH_KASUMI_F9:
541 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_KASUMI_F9;
543 case RTE_CRYPTO_AUTH_ZUC_EIA3:
544 if (!qat_is_auth_alg_supported(auth_xform->algo, internals)) {
545 PMD_DRV_LOG(ERR, "%s not supported on this device",
546 rte_crypto_auth_algorithm_strings
550 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3;
552 case RTE_CRYPTO_AUTH_SHA1:
553 case RTE_CRYPTO_AUTH_SHA256:
554 case RTE_CRYPTO_AUTH_SHA512:
555 case RTE_CRYPTO_AUTH_SHA224:
556 case RTE_CRYPTO_AUTH_SHA384:
557 case RTE_CRYPTO_AUTH_MD5:
558 case RTE_CRYPTO_AUTH_AES_CMAC:
559 case RTE_CRYPTO_AUTH_AES_CBC_MAC:
560 PMD_DRV_LOG(ERR, "Crypto: Unsupported hash alg %u",
564 PMD_DRV_LOG(ERR, "Crypto: Undefined Hash algo %u specified",
569 session->auth_iv.offset = auth_xform->iv.offset;
570 session->auth_iv.length = auth_xform->iv.length;
572 if (auth_xform->algo == RTE_CRYPTO_AUTH_AES_GMAC) {
573 if (auth_xform->op == RTE_CRYPTO_AUTH_OP_GENERATE) {
574 session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER_HASH;
575 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
577 * It needs to create cipher desc content first,
578 * then authentication
581 if (qat_sym_session_aead_create_cd_cipher(session,
582 auth_xform->key.data,
583 auth_xform->key.length))
586 if (qat_sym_session_aead_create_cd_auth(session,
590 auth_xform->digest_length,
594 session->qat_cmd = ICP_QAT_FW_LA_CMD_HASH_CIPHER;
595 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
597 * It needs to create authentication desc content first,
601 if (qat_sym_session_aead_create_cd_auth(session,
605 auth_xform->digest_length,
609 if (qat_sym_session_aead_create_cd_cipher(session,
610 auth_xform->key.data,
611 auth_xform->key.length))
614 /* Restore to authentication only only */
615 session->qat_cmd = ICP_QAT_FW_LA_CMD_AUTH;
617 if (qat_sym_session_aead_create_cd_auth(session,
621 auth_xform->digest_length,
626 session->digest_length = auth_xform->digest_length;
631 qat_sym_session_configure_aead(struct rte_crypto_sym_xform *xform,
632 struct qat_session *session)
634 struct rte_crypto_aead_xform *aead_xform = &xform->aead;
635 enum rte_crypto_auth_operation crypto_operation;
638 * Store AEAD IV parameters as cipher IV,
639 * to avoid unnecessary memory usage
641 session->cipher_iv.offset = xform->aead.iv.offset;
642 session->cipher_iv.length = xform->aead.iv.length;
644 switch (aead_xform->algo) {
645 case RTE_CRYPTO_AEAD_AES_GCM:
646 if (qat_sym_validate_aes_key(aead_xform->key.length,
647 &session->qat_cipher_alg) != 0) {
648 PMD_DRV_LOG(ERR, "Invalid AES key size");
651 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
652 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
654 case RTE_CRYPTO_AEAD_AES_CCM:
655 if (qat_sym_validate_aes_key(aead_xform->key.length,
656 &session->qat_cipher_alg) != 0) {
657 PMD_DRV_LOG(ERR, "Invalid AES key size");
660 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
661 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC;
664 PMD_DRV_LOG(ERR, "Crypto: Undefined AEAD specified %u\n",
669 if ((aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT &&
670 aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) ||
671 (aead_xform->op == RTE_CRYPTO_AEAD_OP_DECRYPT &&
672 aead_xform->algo == RTE_CRYPTO_AEAD_AES_CCM)) {
673 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
675 * It needs to create cipher desc content first,
676 * then authentication
678 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
679 RTE_CRYPTO_AUTH_OP_GENERATE : RTE_CRYPTO_AUTH_OP_VERIFY;
681 if (qat_sym_session_aead_create_cd_cipher(session,
682 aead_xform->key.data,
683 aead_xform->key.length))
686 if (qat_sym_session_aead_create_cd_auth(session,
687 aead_xform->key.data,
688 aead_xform->key.length,
689 aead_xform->aad_length,
690 aead_xform->digest_length,
694 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
696 * It needs to create authentication desc content first,
700 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
701 RTE_CRYPTO_AUTH_OP_VERIFY : RTE_CRYPTO_AUTH_OP_GENERATE;
703 if (qat_sym_session_aead_create_cd_auth(session,
704 aead_xform->key.data,
705 aead_xform->key.length,
706 aead_xform->aad_length,
707 aead_xform->digest_length,
711 if (qat_sym_session_aead_create_cd_cipher(session,
712 aead_xform->key.data,
713 aead_xform->key.length))
717 session->digest_length = aead_xform->digest_length;
721 unsigned int qat_sym_session_get_private_size(
722 struct rte_cryptodev *dev __rte_unused)
724 return RTE_ALIGN_CEIL(sizeof(struct qat_session), 8);
727 /* returns block size in bytes per cipher algo */
728 int qat_cipher_get_block_size(enum icp_qat_hw_cipher_algo qat_cipher_alg)
730 switch (qat_cipher_alg) {
731 case ICP_QAT_HW_CIPHER_ALGO_DES:
732 return ICP_QAT_HW_DES_BLK_SZ;
733 case ICP_QAT_HW_CIPHER_ALGO_3DES:
734 return ICP_QAT_HW_3DES_BLK_SZ;
735 case ICP_QAT_HW_CIPHER_ALGO_AES128:
736 case ICP_QAT_HW_CIPHER_ALGO_AES192:
737 case ICP_QAT_HW_CIPHER_ALGO_AES256:
738 return ICP_QAT_HW_AES_BLK_SZ;
740 PMD_DRV_LOG(ERR, "invalid block cipher alg %u", qat_cipher_alg);
747 * Returns size in bytes per hash algo for state1 size field in cd_ctrl
748 * This is digest size rounded up to nearest quadword
750 static int qat_hash_get_state1_size(enum icp_qat_hw_auth_algo qat_hash_alg)
752 switch (qat_hash_alg) {
753 case ICP_QAT_HW_AUTH_ALGO_SHA1:
754 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA1_STATE1_SZ,
755 QAT_HW_DEFAULT_ALIGNMENT);
756 case ICP_QAT_HW_AUTH_ALGO_SHA224:
757 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA224_STATE1_SZ,
758 QAT_HW_DEFAULT_ALIGNMENT);
759 case ICP_QAT_HW_AUTH_ALGO_SHA256:
760 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA256_STATE1_SZ,
761 QAT_HW_DEFAULT_ALIGNMENT);
762 case ICP_QAT_HW_AUTH_ALGO_SHA384:
763 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA384_STATE1_SZ,
764 QAT_HW_DEFAULT_ALIGNMENT);
765 case ICP_QAT_HW_AUTH_ALGO_SHA512:
766 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
767 QAT_HW_DEFAULT_ALIGNMENT);
768 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
769 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ,
770 QAT_HW_DEFAULT_ALIGNMENT);
771 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
772 case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
773 return QAT_HW_ROUND_UP(ICP_QAT_HW_GALOIS_128_STATE1_SZ,
774 QAT_HW_DEFAULT_ALIGNMENT);
775 case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
776 return QAT_HW_ROUND_UP(ICP_QAT_HW_ZUC_3G_EIA3_STATE1_SZ,
777 QAT_HW_DEFAULT_ALIGNMENT);
778 case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
779 return QAT_HW_ROUND_UP(ICP_QAT_HW_SNOW_3G_UIA2_STATE1_SZ,
780 QAT_HW_DEFAULT_ALIGNMENT);
781 case ICP_QAT_HW_AUTH_ALGO_MD5:
782 return QAT_HW_ROUND_UP(ICP_QAT_HW_MD5_STATE1_SZ,
783 QAT_HW_DEFAULT_ALIGNMENT);
784 case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
785 return QAT_HW_ROUND_UP(ICP_QAT_HW_KASUMI_F9_STATE1_SZ,
786 QAT_HW_DEFAULT_ALIGNMENT);
787 case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
788 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_CBC_MAC_STATE1_SZ,
789 QAT_HW_DEFAULT_ALIGNMENT);
790 case ICP_QAT_HW_AUTH_ALGO_NULL:
791 return QAT_HW_ROUND_UP(ICP_QAT_HW_NULL_STATE1_SZ,
792 QAT_HW_DEFAULT_ALIGNMENT);
793 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
794 /* return maximum state1 size in this case */
795 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
796 QAT_HW_DEFAULT_ALIGNMENT);
798 PMD_DRV_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
804 /* returns digest size in bytes per hash algo */
805 static int qat_hash_get_digest_size(enum icp_qat_hw_auth_algo qat_hash_alg)
807 switch (qat_hash_alg) {
808 case ICP_QAT_HW_AUTH_ALGO_SHA1:
809 return ICP_QAT_HW_SHA1_STATE1_SZ;
810 case ICP_QAT_HW_AUTH_ALGO_SHA224:
811 return ICP_QAT_HW_SHA224_STATE1_SZ;
812 case ICP_QAT_HW_AUTH_ALGO_SHA256:
813 return ICP_QAT_HW_SHA256_STATE1_SZ;
814 case ICP_QAT_HW_AUTH_ALGO_SHA384:
815 return ICP_QAT_HW_SHA384_STATE1_SZ;
816 case ICP_QAT_HW_AUTH_ALGO_SHA512:
817 return ICP_QAT_HW_SHA512_STATE1_SZ;
818 case ICP_QAT_HW_AUTH_ALGO_MD5:
819 return ICP_QAT_HW_MD5_STATE1_SZ;
820 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
821 /* return maximum digest size in this case */
822 return ICP_QAT_HW_SHA512_STATE1_SZ;
824 PMD_DRV_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
830 /* returns block size in byes per hash algo */
831 static int qat_hash_get_block_size(enum icp_qat_hw_auth_algo qat_hash_alg)
833 switch (qat_hash_alg) {
834 case ICP_QAT_HW_AUTH_ALGO_SHA1:
836 case ICP_QAT_HW_AUTH_ALGO_SHA224:
837 return SHA256_CBLOCK;
838 case ICP_QAT_HW_AUTH_ALGO_SHA256:
839 return SHA256_CBLOCK;
840 case ICP_QAT_HW_AUTH_ALGO_SHA384:
841 return SHA512_CBLOCK;
842 case ICP_QAT_HW_AUTH_ALGO_SHA512:
843 return SHA512_CBLOCK;
844 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
846 case ICP_QAT_HW_AUTH_ALGO_MD5:
848 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
849 /* return maximum block size in this case */
850 return SHA512_CBLOCK;
852 PMD_DRV_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
858 static int partial_hash_sha1(uint8_t *data_in, uint8_t *data_out)
862 if (!SHA1_Init(&ctx))
864 SHA1_Transform(&ctx, data_in);
865 rte_memcpy(data_out, &ctx, SHA_DIGEST_LENGTH);
869 static int partial_hash_sha224(uint8_t *data_in, uint8_t *data_out)
873 if (!SHA224_Init(&ctx))
875 SHA256_Transform(&ctx, data_in);
876 rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
880 static int partial_hash_sha256(uint8_t *data_in, uint8_t *data_out)
884 if (!SHA256_Init(&ctx))
886 SHA256_Transform(&ctx, data_in);
887 rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
891 static int partial_hash_sha384(uint8_t *data_in, uint8_t *data_out)
895 if (!SHA384_Init(&ctx))
897 SHA512_Transform(&ctx, data_in);
898 rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
902 static int partial_hash_sha512(uint8_t *data_in, uint8_t *data_out)
906 if (!SHA512_Init(&ctx))
908 SHA512_Transform(&ctx, data_in);
909 rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
913 static int partial_hash_md5(uint8_t *data_in, uint8_t *data_out)
919 MD5_Transform(&ctx, data_in);
920 rte_memcpy(data_out, &ctx, MD5_DIGEST_LENGTH);
925 static int partial_hash_compute(enum icp_qat_hw_auth_algo hash_alg,
930 uint8_t digest[qat_hash_get_digest_size(
931 ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
932 uint32_t *hash_state_out_be32;
933 uint64_t *hash_state_out_be64;
936 PMD_INIT_FUNC_TRACE();
937 digest_size = qat_hash_get_digest_size(hash_alg);
938 if (digest_size <= 0)
941 hash_state_out_be32 = (uint32_t *)data_out;
942 hash_state_out_be64 = (uint64_t *)data_out;
945 case ICP_QAT_HW_AUTH_ALGO_SHA1:
946 if (partial_hash_sha1(data_in, digest))
948 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
949 *hash_state_out_be32 =
950 rte_bswap32(*(((uint32_t *)digest)+i));
952 case ICP_QAT_HW_AUTH_ALGO_SHA224:
953 if (partial_hash_sha224(data_in, digest))
955 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
956 *hash_state_out_be32 =
957 rte_bswap32(*(((uint32_t *)digest)+i));
959 case ICP_QAT_HW_AUTH_ALGO_SHA256:
960 if (partial_hash_sha256(data_in, digest))
962 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
963 *hash_state_out_be32 =
964 rte_bswap32(*(((uint32_t *)digest)+i));
966 case ICP_QAT_HW_AUTH_ALGO_SHA384:
967 if (partial_hash_sha384(data_in, digest))
969 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
970 *hash_state_out_be64 =
971 rte_bswap64(*(((uint64_t *)digest)+i));
973 case ICP_QAT_HW_AUTH_ALGO_SHA512:
974 if (partial_hash_sha512(data_in, digest))
976 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
977 *hash_state_out_be64 =
978 rte_bswap64(*(((uint64_t *)digest)+i));
980 case ICP_QAT_HW_AUTH_ALGO_MD5:
981 if (partial_hash_md5(data_in, data_out))
985 PMD_DRV_LOG(ERR, "invalid hash alg %u", hash_alg);
991 #define HMAC_IPAD_VALUE 0x36
992 #define HMAC_OPAD_VALUE 0x5c
993 #define HASH_XCBC_PRECOMP_KEY_NUM 3
995 static int qat_sym_do_precomputes(enum icp_qat_hw_auth_algo hash_alg,
996 const uint8_t *auth_key,
997 uint16_t auth_keylen,
998 uint8_t *p_state_buf,
999 uint16_t *p_state_len)
1002 uint8_t ipad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1003 uint8_t opad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1006 PMD_INIT_FUNC_TRACE();
1007 if (hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC) {
1008 static uint8_t qat_aes_xcbc_key_seed[
1009 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ] = {
1010 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1011 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1012 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1013 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1014 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1015 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1019 uint8_t *out = p_state_buf;
1023 in = rte_zmalloc("working mem for key",
1024 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ, 16);
1026 PMD_DRV_LOG(ERR, "Failed to alloc memory");
1030 rte_memcpy(in, qat_aes_xcbc_key_seed,
1031 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1032 for (x = 0; x < HASH_XCBC_PRECOMP_KEY_NUM; x++) {
1033 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1036 (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ));
1038 (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ),
1039 0, ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1042 AES_encrypt(in, out, &enc_key);
1043 in += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1044 out += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1046 *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1047 rte_free(in - x*ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ);
1049 } else if ((hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_128) ||
1050 (hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_64)) {
1052 uint8_t *out = p_state_buf;
1055 memset(p_state_buf, 0, ICP_QAT_HW_GALOIS_H_SZ +
1056 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1057 ICP_QAT_HW_GALOIS_E_CTR0_SZ);
1058 in = rte_zmalloc("working mem for key",
1059 ICP_QAT_HW_GALOIS_H_SZ, 16);
1061 PMD_DRV_LOG(ERR, "Failed to alloc memory");
1065 memset(in, 0, ICP_QAT_HW_GALOIS_H_SZ);
1066 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1070 AES_encrypt(in, out, &enc_key);
1071 *p_state_len = ICP_QAT_HW_GALOIS_H_SZ +
1072 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1073 ICP_QAT_HW_GALOIS_E_CTR0_SZ;
1078 block_size = qat_hash_get_block_size(hash_alg);
1079 if (block_size <= 0)
1081 /* init ipad and opad from key and xor with fixed values */
1082 memset(ipad, 0, block_size);
1083 memset(opad, 0, block_size);
1085 if (auth_keylen > (unsigned int)block_size) {
1086 PMD_DRV_LOG(ERR, "invalid keylen %u", auth_keylen);
1089 rte_memcpy(ipad, auth_key, auth_keylen);
1090 rte_memcpy(opad, auth_key, auth_keylen);
1092 for (i = 0; i < block_size; i++) {
1093 uint8_t *ipad_ptr = ipad + i;
1094 uint8_t *opad_ptr = opad + i;
1095 *ipad_ptr ^= HMAC_IPAD_VALUE;
1096 *opad_ptr ^= HMAC_OPAD_VALUE;
1099 /* do partial hash of ipad and copy to state1 */
1100 if (partial_hash_compute(hash_alg, ipad, p_state_buf)) {
1101 memset(ipad, 0, block_size);
1102 memset(opad, 0, block_size);
1103 PMD_DRV_LOG(ERR, "ipad precompute failed");
1108 * State len is a multiple of 8, so may be larger than the digest.
1109 * Put the partial hash of opad state_len bytes after state1
1111 *p_state_len = qat_hash_get_state1_size(hash_alg);
1112 if (partial_hash_compute(hash_alg, opad, p_state_buf + *p_state_len)) {
1113 memset(ipad, 0, block_size);
1114 memset(opad, 0, block_size);
1115 PMD_DRV_LOG(ERR, "opad precompute failed");
1119 /* don't leave data lying around */
1120 memset(ipad, 0, block_size);
1121 memset(opad, 0, block_size);
1126 qat_sym_session_init_common_hdr(struct icp_qat_fw_comn_req_hdr *header,
1127 enum qat_crypto_proto_flag proto_flags)
1129 PMD_INIT_FUNC_TRACE();
1131 ICP_QAT_FW_COMN_HDR_FLAGS_BUILD(ICP_QAT_FW_COMN_REQ_FLAG_SET);
1132 header->service_type = ICP_QAT_FW_COMN_REQ_CPM_FW_LA;
1133 header->comn_req_flags =
1134 ICP_QAT_FW_COMN_FLAGS_BUILD(QAT_COMN_CD_FLD_TYPE_64BIT_ADR,
1135 QAT_COMN_PTR_TYPE_FLAT);
1136 ICP_QAT_FW_LA_PARTIAL_SET(header->serv_specif_flags,
1137 ICP_QAT_FW_LA_PARTIAL_NONE);
1138 ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(header->serv_specif_flags,
1139 ICP_QAT_FW_CIPH_IV_16BYTE_DATA);
1141 switch (proto_flags) {
1142 case QAT_CRYPTO_PROTO_FLAG_NONE:
1143 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1144 ICP_QAT_FW_LA_NO_PROTO);
1146 case QAT_CRYPTO_PROTO_FLAG_CCM:
1147 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1148 ICP_QAT_FW_LA_CCM_PROTO);
1150 case QAT_CRYPTO_PROTO_FLAG_GCM:
1151 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1152 ICP_QAT_FW_LA_GCM_PROTO);
1154 case QAT_CRYPTO_PROTO_FLAG_SNOW3G:
1155 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1156 ICP_QAT_FW_LA_SNOW_3G_PROTO);
1158 case QAT_CRYPTO_PROTO_FLAG_ZUC:
1159 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(header->serv_specif_flags,
1160 ICP_QAT_FW_LA_ZUC_3G_PROTO);
1164 ICP_QAT_FW_LA_UPDATE_STATE_SET(header->serv_specif_flags,
1165 ICP_QAT_FW_LA_NO_UPDATE_STATE);
1166 ICP_QAT_FW_LA_DIGEST_IN_BUFFER_SET(header->serv_specif_flags,
1167 ICP_QAT_FW_LA_NO_DIGEST_IN_BUFFER);
1171 * Snow3G and ZUC should never use this function
1172 * and set its protocol flag in both cipher and auth part of content
1173 * descriptor building function
1175 static enum qat_crypto_proto_flag
1176 qat_get_crypto_proto_flag(uint16_t flags)
1178 int proto = ICP_QAT_FW_LA_PROTO_GET(flags);
1179 enum qat_crypto_proto_flag qat_proto_flag =
1180 QAT_CRYPTO_PROTO_FLAG_NONE;
1183 case ICP_QAT_FW_LA_GCM_PROTO:
1184 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1186 case ICP_QAT_FW_LA_CCM_PROTO:
1187 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1191 return qat_proto_flag;
1194 int qat_sym_session_aead_create_cd_cipher(struct qat_session *cdesc,
1196 uint32_t cipherkeylen)
1198 struct icp_qat_hw_cipher_algo_blk *cipher;
1199 struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1200 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1201 struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1202 void *ptr = &req_tmpl->cd_ctrl;
1203 struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1204 struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1205 enum icp_qat_hw_cipher_convert key_convert;
1206 enum qat_crypto_proto_flag qat_proto_flag =
1207 QAT_CRYPTO_PROTO_FLAG_NONE;
1208 uint32_t total_key_size;
1209 uint16_t cipher_offset, cd_size;
1210 uint32_t wordIndex = 0;
1211 uint32_t *temp_key = NULL;
1212 PMD_INIT_FUNC_TRACE();
1214 if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER) {
1215 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1216 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1217 ICP_QAT_FW_SLICE_CIPHER);
1218 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1219 ICP_QAT_FW_SLICE_DRAM_WR);
1220 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1221 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1222 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1223 ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1224 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1225 } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1226 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1227 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1228 ICP_QAT_FW_SLICE_CIPHER);
1229 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1230 ICP_QAT_FW_SLICE_AUTH);
1231 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1232 ICP_QAT_FW_SLICE_AUTH);
1233 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1234 ICP_QAT_FW_SLICE_DRAM_WR);
1235 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1236 } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1237 PMD_DRV_LOG(ERR, "Invalid param, must be a cipher command.");
1241 if (cdesc->qat_mode == ICP_QAT_HW_CIPHER_CTR_MODE) {
1243 * CTR Streaming ciphers are a special case. Decrypt = encrypt
1244 * Overriding default values previously set
1246 cdesc->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
1247 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1248 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2
1249 || cdesc->qat_cipher_alg ==
1250 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)
1251 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1252 else if (cdesc->qat_dir == ICP_QAT_HW_CIPHER_ENCRYPT)
1253 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1255 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1257 if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
1258 total_key_size = ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ +
1259 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1260 cipher_cd_ctrl->cipher_state_sz =
1261 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1262 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1264 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1265 total_key_size = ICP_QAT_HW_KASUMI_F8_KEY_SZ;
1266 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_KASUMI_BLK_SZ >> 3;
1267 cipher_cd_ctrl->cipher_padding_sz =
1268 (2 * ICP_QAT_HW_KASUMI_BLK_SZ) >> 3;
1269 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES) {
1270 total_key_size = ICP_QAT_HW_3DES_KEY_SZ;
1271 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_3DES_BLK_SZ >> 3;
1273 qat_get_crypto_proto_flag(header->serv_specif_flags);
1274 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_DES) {
1275 total_key_size = ICP_QAT_HW_DES_KEY_SZ;
1276 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_DES_BLK_SZ >> 3;
1278 qat_get_crypto_proto_flag(header->serv_specif_flags);
1279 } else if (cdesc->qat_cipher_alg ==
1280 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
1281 total_key_size = ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ +
1282 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1283 cipher_cd_ctrl->cipher_state_sz =
1284 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1285 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1286 cdesc->min_qat_dev_gen = QAT_GEN2;
1288 total_key_size = cipherkeylen;
1289 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_AES_BLK_SZ >> 3;
1291 qat_get_crypto_proto_flag(header->serv_specif_flags);
1293 cipher_cd_ctrl->cipher_key_sz = total_key_size >> 3;
1294 cipher_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1295 cipher_cd_ctrl->cipher_cfg_offset = cipher_offset >> 3;
1297 header->service_cmd_id = cdesc->qat_cmd;
1298 qat_sym_session_init_common_hdr(header, qat_proto_flag);
1300 cipher = (struct icp_qat_hw_cipher_algo_blk *)cdesc->cd_cur_ptr;
1301 cipher->cipher_config.val =
1302 ICP_QAT_HW_CIPHER_CONFIG_BUILD(cdesc->qat_mode,
1303 cdesc->qat_cipher_alg, key_convert,
1306 if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1307 temp_key = (uint32_t *)(cdesc->cd_cur_ptr +
1308 sizeof(struct icp_qat_hw_cipher_config)
1310 memcpy(cipher->key, cipherkey, cipherkeylen);
1311 memcpy(temp_key, cipherkey, cipherkeylen);
1313 /* XOR Key with KASUMI F8 key modifier at 4 bytes level */
1314 for (wordIndex = 0; wordIndex < (cipherkeylen >> 2);
1316 temp_key[wordIndex] ^= KASUMI_F8_KEY_MODIFIER_4_BYTES;
1318 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1319 cipherkeylen + cipherkeylen;
1321 memcpy(cipher->key, cipherkey, cipherkeylen);
1322 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1326 if (total_key_size > cipherkeylen) {
1327 uint32_t padding_size = total_key_size-cipherkeylen;
1328 if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1329 && (cipherkeylen == QAT_3DES_KEY_SZ_OPT2))
1330 /* K3 not provided so use K1 = K3*/
1331 memcpy(cdesc->cd_cur_ptr, cipherkey, padding_size);
1333 memset(cdesc->cd_cur_ptr, 0, padding_size);
1334 cdesc->cd_cur_ptr += padding_size;
1336 cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1337 cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1342 int qat_sym_session_aead_create_cd_auth(struct qat_session *cdesc,
1344 uint32_t authkeylen,
1345 uint32_t aad_length,
1346 uint32_t digestsize,
1347 unsigned int operation)
1349 struct icp_qat_hw_auth_setup *hash;
1350 struct icp_qat_hw_cipher_algo_blk *cipherconfig;
1351 struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1352 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1353 struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1354 void *ptr = &req_tmpl->cd_ctrl;
1355 struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1356 struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1357 struct icp_qat_fw_la_auth_req_params *auth_param =
1358 (struct icp_qat_fw_la_auth_req_params *)
1359 ((char *)&req_tmpl->serv_specif_rqpars +
1360 sizeof(struct icp_qat_fw_la_cipher_req_params));
1361 uint16_t state1_size = 0, state2_size = 0;
1362 uint16_t hash_offset, cd_size;
1363 uint32_t *aad_len = NULL;
1364 uint32_t wordIndex = 0;
1366 enum qat_crypto_proto_flag qat_proto_flag =
1367 QAT_CRYPTO_PROTO_FLAG_NONE;
1369 PMD_INIT_FUNC_TRACE();
1371 if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_AUTH) {
1372 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1373 ICP_QAT_FW_SLICE_AUTH);
1374 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1375 ICP_QAT_FW_SLICE_DRAM_WR);
1376 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1377 } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1378 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1379 ICP_QAT_FW_SLICE_AUTH);
1380 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1381 ICP_QAT_FW_SLICE_CIPHER);
1382 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1383 ICP_QAT_FW_SLICE_CIPHER);
1384 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1385 ICP_QAT_FW_SLICE_DRAM_WR);
1386 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1387 } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1388 PMD_DRV_LOG(ERR, "Invalid param, must be a hash command.");
1392 if (operation == RTE_CRYPTO_AUTH_OP_VERIFY) {
1393 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1394 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1395 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1396 ICP_QAT_FW_LA_CMP_AUTH_RES);
1397 cdesc->auth_op = ICP_QAT_HW_AUTH_VERIFY;
1399 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1400 ICP_QAT_FW_LA_RET_AUTH_RES);
1401 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1402 ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1403 cdesc->auth_op = ICP_QAT_HW_AUTH_GENERATE;
1407 * Setup the inner hash config
1409 hash_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1410 hash = (struct icp_qat_hw_auth_setup *)cdesc->cd_cur_ptr;
1411 hash->auth_config.reserved = 0;
1412 hash->auth_config.config =
1413 ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE1,
1414 cdesc->qat_hash_alg, digestsize);
1416 if (cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2
1417 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_KASUMI_F9
1418 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3)
1419 hash->auth_counter.counter = 0;
1421 hash->auth_counter.counter = rte_bswap32(
1422 qat_hash_get_block_size(cdesc->qat_hash_alg));
1424 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_auth_setup);
1427 * cd_cur_ptr now points at the state1 information.
1429 switch (cdesc->qat_hash_alg) {
1430 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1431 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA1,
1432 authkey, authkeylen, cdesc->cd_cur_ptr, &state1_size)) {
1433 PMD_DRV_LOG(ERR, "(SHA)precompute failed");
1436 state2_size = RTE_ALIGN_CEIL(ICP_QAT_HW_SHA1_STATE2_SZ, 8);
1438 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1439 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA224,
1440 authkey, authkeylen, cdesc->cd_cur_ptr, &state1_size)) {
1441 PMD_DRV_LOG(ERR, "(SHA)precompute failed");
1444 state2_size = ICP_QAT_HW_SHA224_STATE2_SZ;
1446 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1447 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA256,
1448 authkey, authkeylen, cdesc->cd_cur_ptr, &state1_size)) {
1449 PMD_DRV_LOG(ERR, "(SHA)precompute failed");
1452 state2_size = ICP_QAT_HW_SHA256_STATE2_SZ;
1454 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1455 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA384,
1456 authkey, authkeylen, cdesc->cd_cur_ptr, &state1_size)) {
1457 PMD_DRV_LOG(ERR, "(SHA)precompute failed");
1460 state2_size = ICP_QAT_HW_SHA384_STATE2_SZ;
1462 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1463 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA512,
1464 authkey, authkeylen, cdesc->cd_cur_ptr, &state1_size)) {
1465 PMD_DRV_LOG(ERR, "(SHA)precompute failed");
1468 state2_size = ICP_QAT_HW_SHA512_STATE2_SZ;
1470 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1471 state1_size = ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1472 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC,
1473 authkey, authkeylen, cdesc->cd_cur_ptr + state1_size,
1475 PMD_DRV_LOG(ERR, "(XCBC)precompute failed");
1479 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1480 case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
1481 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1482 state1_size = ICP_QAT_HW_GALOIS_128_STATE1_SZ;
1483 if (qat_sym_do_precomputes(cdesc->qat_hash_alg,
1484 authkey, authkeylen, cdesc->cd_cur_ptr + state1_size,
1486 PMD_DRV_LOG(ERR, "(GCM)precompute failed");
1490 * Write (the length of AAD) into bytes 16-19 of state2
1491 * in big-endian format. This field is 8 bytes
1493 auth_param->u2.aad_sz =
1494 RTE_ALIGN_CEIL(aad_length, 16);
1495 auth_param->hash_state_sz = (auth_param->u2.aad_sz) >> 3;
1497 aad_len = (uint32_t *)(cdesc->cd_cur_ptr +
1498 ICP_QAT_HW_GALOIS_128_STATE1_SZ +
1499 ICP_QAT_HW_GALOIS_H_SZ);
1500 *aad_len = rte_bswap32(aad_length);
1501 cdesc->aad_len = aad_length;
1503 case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
1504 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1505 state1_size = qat_hash_get_state1_size(
1506 ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2);
1507 state2_size = ICP_QAT_HW_SNOW_3G_UIA2_STATE2_SZ;
1508 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1510 cipherconfig = (struct icp_qat_hw_cipher_algo_blk *)
1511 (cdesc->cd_cur_ptr + state1_size + state2_size);
1512 cipherconfig->cipher_config.val =
1513 ICP_QAT_HW_CIPHER_CONFIG_BUILD(ICP_QAT_HW_CIPHER_ECB_MODE,
1514 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2,
1515 ICP_QAT_HW_CIPHER_KEY_CONVERT,
1516 ICP_QAT_HW_CIPHER_ENCRYPT);
1517 memcpy(cipherconfig->key, authkey, authkeylen);
1518 memset(cipherconfig->key + authkeylen,
1519 0, ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ);
1520 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1521 authkeylen + ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1522 auth_param->hash_state_sz = ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1524 case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
1525 hash->auth_config.config =
1526 ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE0,
1527 cdesc->qat_hash_alg, digestsize);
1528 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1529 state1_size = qat_hash_get_state1_size(
1530 ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3);
1531 state2_size = ICP_QAT_HW_ZUC_3G_EIA3_STATE2_SZ;
1532 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size
1533 + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ);
1535 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1536 cdesc->cd_cur_ptr += state1_size + state2_size
1537 + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1538 auth_param->hash_state_sz = ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1539 cdesc->min_qat_dev_gen = QAT_GEN2;
1542 case ICP_QAT_HW_AUTH_ALGO_MD5:
1543 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_MD5,
1544 authkey, authkeylen, cdesc->cd_cur_ptr,
1546 PMD_DRV_LOG(ERR, "(MD5)precompute failed");
1549 state2_size = ICP_QAT_HW_MD5_STATE2_SZ;
1551 case ICP_QAT_HW_AUTH_ALGO_NULL:
1552 state1_size = qat_hash_get_state1_size(
1553 ICP_QAT_HW_AUTH_ALGO_NULL);
1554 state2_size = ICP_QAT_HW_NULL_STATE2_SZ;
1556 case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
1557 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1558 state1_size = qat_hash_get_state1_size(
1559 ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC);
1560 state2_size = ICP_QAT_HW_AES_CBC_MAC_KEY_SZ +
1561 ICP_QAT_HW_AES_CCM_CBC_E_CTR0_SZ;
1563 if (aad_length > 0) {
1564 aad_length += ICP_QAT_HW_CCM_AAD_B0_LEN +
1565 ICP_QAT_HW_CCM_AAD_LEN_INFO;
1566 auth_param->u2.aad_sz =
1567 RTE_ALIGN_CEIL(aad_length,
1568 ICP_QAT_HW_CCM_AAD_ALIGNMENT);
1570 auth_param->u2.aad_sz = ICP_QAT_HW_CCM_AAD_B0_LEN;
1572 cdesc->aad_len = aad_length;
1573 hash->auth_counter.counter = 0;
1575 hash_cd_ctrl->outer_prefix_sz = digestsize;
1576 auth_param->hash_state_sz = digestsize;
1578 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1580 case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
1581 state1_size = qat_hash_get_state1_size(
1582 ICP_QAT_HW_AUTH_ALGO_KASUMI_F9);
1583 state2_size = ICP_QAT_HW_KASUMI_F9_STATE2_SZ;
1584 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1585 pTempKey = (uint32_t *)(cdesc->cd_cur_ptr + state1_size
1588 * The Inner Hash Initial State2 block must contain IK
1589 * (Initialisation Key), followed by IK XOR-ed with KM
1590 * (Key Modifier): IK||(IK^KM).
1592 /* write the auth key */
1593 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1594 /* initialise temp key with auth key */
1595 memcpy(pTempKey, authkey, authkeylen);
1596 /* XOR Key with KASUMI F9 key modifier at 4 bytes level */
1597 for (wordIndex = 0; wordIndex < (authkeylen >> 2); wordIndex++)
1598 pTempKey[wordIndex] ^= KASUMI_F9_KEY_MODIFIER_4_BYTES;
1601 PMD_DRV_LOG(ERR, "Invalid HASH alg %u", cdesc->qat_hash_alg);
1605 /* Request template setup */
1606 qat_sym_session_init_common_hdr(header, qat_proto_flag);
1607 header->service_cmd_id = cdesc->qat_cmd;
1609 /* Auth CD config setup */
1610 hash_cd_ctrl->hash_cfg_offset = hash_offset >> 3;
1611 hash_cd_ctrl->hash_flags = ICP_QAT_FW_AUTH_HDR_FLAG_NO_NESTED;
1612 hash_cd_ctrl->inner_res_sz = digestsize;
1613 hash_cd_ctrl->final_sz = digestsize;
1614 hash_cd_ctrl->inner_state1_sz = state1_size;
1615 auth_param->auth_res_sz = digestsize;
1617 hash_cd_ctrl->inner_state2_sz = state2_size;
1618 hash_cd_ctrl->inner_state2_offset = hash_cd_ctrl->hash_cfg_offset +
1619 ((sizeof(struct icp_qat_hw_auth_setup) +
1620 RTE_ALIGN_CEIL(hash_cd_ctrl->inner_state1_sz, 8))
1623 cdesc->cd_cur_ptr += state1_size + state2_size;
1624 cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1626 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1627 cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1632 int qat_sym_validate_aes_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1635 case ICP_QAT_HW_AES_128_KEY_SZ:
1636 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1638 case ICP_QAT_HW_AES_192_KEY_SZ:
1639 *alg = ICP_QAT_HW_CIPHER_ALGO_AES192;
1641 case ICP_QAT_HW_AES_256_KEY_SZ:
1642 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
1650 int qat_sym_validate_aes_docsisbpi_key(int key_len,
1651 enum icp_qat_hw_cipher_algo *alg)
1654 case ICP_QAT_HW_AES_128_KEY_SZ:
1655 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1663 int qat_sym_validate_snow3g_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1666 case ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ:
1667 *alg = ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2;
1675 int qat_sym_validate_kasumi_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1678 case ICP_QAT_HW_KASUMI_KEY_SZ:
1679 *alg = ICP_QAT_HW_CIPHER_ALGO_KASUMI;
1687 int qat_sym_validate_des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1690 case ICP_QAT_HW_DES_KEY_SZ:
1691 *alg = ICP_QAT_HW_CIPHER_ALGO_DES;
1699 int qat_sym_validate_3des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1702 case QAT_3DES_KEY_SZ_OPT1:
1703 case QAT_3DES_KEY_SZ_OPT2:
1704 *alg = ICP_QAT_HW_CIPHER_ALGO_3DES;
1712 int qat_sym_validate_zuc_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1715 case ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ:
1716 *alg = ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3;