1 /* SPDX-License-Identifier: (BSD-3-Clause OR GPL-2.0)
2 * Copyright(c) 2015-2019 Intel Corporation
5 #include <openssl/sha.h> /* Needed to calculate pre-compute values */
6 #include <openssl/aes.h> /* Needed to calculate pre-compute values */
7 #include <openssl/md5.h> /* Needed to calculate pre-compute values */
8 #include <openssl/evp.h> /* Needed for bpi runt block processing */
10 #include <rte_memcpy.h>
11 #include <rte_common.h>
12 #include <rte_spinlock.h>
13 #include <rte_byteorder.h>
15 #include <rte_malloc.h>
16 #include <rte_crypto_sym.h>
19 #include "qat_sym_session.h"
20 #include "qat_sym_pmd.h"
22 /** Frees a context previously created
23 * Depends on openssl libcrypto
26 bpi_cipher_ctx_free(void *bpi_ctx)
29 EVP_CIPHER_CTX_free((EVP_CIPHER_CTX *)bpi_ctx);
32 /** Creates a context in either AES or DES in ECB mode
33 * Depends on openssl libcrypto
36 bpi_cipher_ctx_init(enum rte_crypto_cipher_algorithm cryptodev_algo,
37 enum rte_crypto_cipher_operation direction __rte_unused,
38 const uint8_t *key, void **ctx)
40 const EVP_CIPHER *algo = NULL;
42 *ctx = EVP_CIPHER_CTX_new();
49 if (cryptodev_algo == RTE_CRYPTO_CIPHER_DES_DOCSISBPI)
52 algo = EVP_aes_128_ecb();
54 /* IV will be ECB encrypted whether direction is encrypt or decrypt*/
55 if (EVP_EncryptInit_ex(*ctx, algo, NULL, key, 0) != 1) {
64 EVP_CIPHER_CTX_free(*ctx);
69 qat_is_cipher_alg_supported(enum rte_crypto_cipher_algorithm algo,
70 struct qat_sym_dev_private *internals)
73 const struct rte_cryptodev_capabilities *capability;
75 while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
76 RTE_CRYPTO_OP_TYPE_UNDEFINED) {
77 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
80 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_CIPHER)
83 if (capability->sym.cipher.algo == algo)
90 qat_is_auth_alg_supported(enum rte_crypto_auth_algorithm algo,
91 struct qat_sym_dev_private *internals)
94 const struct rte_cryptodev_capabilities *capability;
96 while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
97 RTE_CRYPTO_OP_TYPE_UNDEFINED) {
98 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
101 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_AUTH)
104 if (capability->sym.auth.algo == algo)
111 qat_sym_session_clear(struct rte_cryptodev *dev,
112 struct rte_cryptodev_sym_session *sess)
114 uint8_t index = dev->driver_id;
115 void *sess_priv = get_sym_session_private_data(sess, index);
116 struct qat_sym_session *s = (struct qat_sym_session *)sess_priv;
120 bpi_cipher_ctx_free(s->bpi_ctx);
121 memset(s, 0, qat_sym_session_get_private_size(dev));
122 struct rte_mempool *sess_mp = rte_mempool_from_obj(sess_priv);
124 set_sym_session_private_data(sess, index, NULL);
125 rte_mempool_put(sess_mp, sess_priv);
130 qat_get_cmd_id(const struct rte_crypto_sym_xform *xform)
133 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER && xform->next == NULL)
134 return ICP_QAT_FW_LA_CMD_CIPHER;
136 /* Authentication Only */
137 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH && xform->next == NULL)
138 return ICP_QAT_FW_LA_CMD_AUTH;
141 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
142 /* AES-GCM and AES-CCM works with different direction
143 * GCM first encrypts and generate hash where AES-CCM
144 * first generate hash and encrypts. Similar relation
145 * applies to decryption.
147 if (xform->aead.op == RTE_CRYPTO_AEAD_OP_ENCRYPT)
148 if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
149 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
151 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
153 if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
154 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
156 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
159 if (xform->next == NULL)
162 /* Cipher then Authenticate */
163 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
164 xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
165 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
167 /* Authenticate then Cipher */
168 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH &&
169 xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
170 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
175 static struct rte_crypto_auth_xform *
176 qat_get_auth_xform(struct rte_crypto_sym_xform *xform)
179 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH)
188 static struct rte_crypto_cipher_xform *
189 qat_get_cipher_xform(struct rte_crypto_sym_xform *xform)
192 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
193 return &xform->cipher;
202 qat_sym_session_configure_cipher(struct rte_cryptodev *dev,
203 struct rte_crypto_sym_xform *xform,
204 struct qat_sym_session *session)
206 struct qat_sym_dev_private *internals = dev->data->dev_private;
207 struct rte_crypto_cipher_xform *cipher_xform = NULL;
210 /* Get cipher xform from crypto xform chain */
211 cipher_xform = qat_get_cipher_xform(xform);
213 session->cipher_iv.offset = cipher_xform->iv.offset;
214 session->cipher_iv.length = cipher_xform->iv.length;
216 switch (cipher_xform->algo) {
217 case RTE_CRYPTO_CIPHER_AES_CBC:
218 if (qat_sym_validate_aes_key(cipher_xform->key.length,
219 &session->qat_cipher_alg) != 0) {
220 QAT_LOG(ERR, "Invalid AES cipher key size");
224 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
226 case RTE_CRYPTO_CIPHER_AES_CTR:
227 if (qat_sym_validate_aes_key(cipher_xform->key.length,
228 &session->qat_cipher_alg) != 0) {
229 QAT_LOG(ERR, "Invalid AES cipher key size");
233 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
235 case RTE_CRYPTO_CIPHER_SNOW3G_UEA2:
236 if (qat_sym_validate_snow3g_key(cipher_xform->key.length,
237 &session->qat_cipher_alg) != 0) {
238 QAT_LOG(ERR, "Invalid SNOW 3G cipher key size");
242 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
244 case RTE_CRYPTO_CIPHER_NULL:
245 session->qat_cipher_alg = ICP_QAT_HW_CIPHER_ALGO_NULL;
246 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
248 case RTE_CRYPTO_CIPHER_KASUMI_F8:
249 if (qat_sym_validate_kasumi_key(cipher_xform->key.length,
250 &session->qat_cipher_alg) != 0) {
251 QAT_LOG(ERR, "Invalid KASUMI cipher key size");
255 session->qat_mode = ICP_QAT_HW_CIPHER_F8_MODE;
257 case RTE_CRYPTO_CIPHER_3DES_CBC:
258 if (qat_sym_validate_3des_key(cipher_xform->key.length,
259 &session->qat_cipher_alg) != 0) {
260 QAT_LOG(ERR, "Invalid 3DES cipher key size");
264 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
266 case RTE_CRYPTO_CIPHER_DES_CBC:
267 if (qat_sym_validate_des_key(cipher_xform->key.length,
268 &session->qat_cipher_alg) != 0) {
269 QAT_LOG(ERR, "Invalid DES cipher key size");
273 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
275 case RTE_CRYPTO_CIPHER_3DES_CTR:
276 if (qat_sym_validate_3des_key(cipher_xform->key.length,
277 &session->qat_cipher_alg) != 0) {
278 QAT_LOG(ERR, "Invalid 3DES cipher key size");
282 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
284 case RTE_CRYPTO_CIPHER_DES_DOCSISBPI:
285 ret = bpi_cipher_ctx_init(
288 cipher_xform->key.data,
291 QAT_LOG(ERR, "failed to create DES BPI ctx");
294 if (qat_sym_validate_des_key(cipher_xform->key.length,
295 &session->qat_cipher_alg) != 0) {
296 QAT_LOG(ERR, "Invalid DES cipher key size");
300 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
302 case RTE_CRYPTO_CIPHER_AES_DOCSISBPI:
303 ret = bpi_cipher_ctx_init(
306 cipher_xform->key.data,
309 QAT_LOG(ERR, "failed to create AES BPI ctx");
312 if (qat_sym_validate_aes_docsisbpi_key(cipher_xform->key.length,
313 &session->qat_cipher_alg) != 0) {
314 QAT_LOG(ERR, "Invalid AES DOCSISBPI key size");
318 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
320 case RTE_CRYPTO_CIPHER_ZUC_EEA3:
321 if (!qat_is_cipher_alg_supported(
322 cipher_xform->algo, internals)) {
323 QAT_LOG(ERR, "%s not supported on this device",
324 rte_crypto_cipher_algorithm_strings
325 [cipher_xform->algo]);
329 if (qat_sym_validate_zuc_key(cipher_xform->key.length,
330 &session->qat_cipher_alg) != 0) {
331 QAT_LOG(ERR, "Invalid ZUC cipher key size");
335 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
337 case RTE_CRYPTO_CIPHER_AES_XTS:
338 if ((cipher_xform->key.length/2) == ICP_QAT_HW_AES_192_KEY_SZ) {
339 QAT_LOG(ERR, "AES-XTS-192 not supported");
343 if (qat_sym_validate_aes_key((cipher_xform->key.length/2),
344 &session->qat_cipher_alg) != 0) {
345 QAT_LOG(ERR, "Invalid AES-XTS cipher key size");
349 session->qat_mode = ICP_QAT_HW_CIPHER_XTS_MODE;
351 case RTE_CRYPTO_CIPHER_3DES_ECB:
352 case RTE_CRYPTO_CIPHER_AES_ECB:
353 case RTE_CRYPTO_CIPHER_AES_F8:
354 case RTE_CRYPTO_CIPHER_ARC4:
355 QAT_LOG(ERR, "Crypto QAT PMD: Unsupported Cipher alg %u",
360 QAT_LOG(ERR, "Crypto: Undefined Cipher specified %u\n",
366 if (cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
367 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
369 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
371 if (qat_sym_session_aead_create_cd_cipher(session,
372 cipher_xform->key.data,
373 cipher_xform->key.length)) {
381 if (session->bpi_ctx) {
382 bpi_cipher_ctx_free(session->bpi_ctx);
383 session->bpi_ctx = NULL;
389 qat_sym_session_configure(struct rte_cryptodev *dev,
390 struct rte_crypto_sym_xform *xform,
391 struct rte_cryptodev_sym_session *sess,
392 struct rte_mempool *mempool)
394 void *sess_private_data;
397 if (rte_mempool_get(mempool, &sess_private_data)) {
399 "Couldn't get object from session mempool");
403 ret = qat_sym_session_set_parameters(dev, xform, sess_private_data);
406 "Crypto QAT PMD: failed to configure session parameters");
408 /* Return session to mempool */
409 rte_mempool_put(mempool, sess_private_data);
413 set_sym_session_private_data(sess, dev->driver_id,
420 qat_sym_session_set_ext_hash_flags(struct qat_sym_session *session,
423 struct icp_qat_fw_comn_req_hdr *header = &session->fw_req.comn_hdr;
424 struct icp_qat_fw_cipher_auth_cd_ctrl_hdr *cd_ctrl =
425 (struct icp_qat_fw_cipher_auth_cd_ctrl_hdr *)
426 session->fw_req.cd_ctrl.content_desc_ctrl_lw;
428 /* Set the Use Extended Protocol Flags bit in LW 1 */
429 QAT_FIELD_SET(header->comn_req_flags,
430 QAT_COMN_EXT_FLAGS_USED,
431 QAT_COMN_EXT_FLAGS_BITPOS,
432 QAT_COMN_EXT_FLAGS_MASK);
434 /* Set Hash Flags in LW 28 */
435 cd_ctrl->hash_flags |= hash_flag;
437 /* Set proto flags in LW 1 */
438 switch (session->qat_cipher_alg) {
439 case ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2:
440 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
441 ICP_QAT_FW_LA_SNOW_3G_PROTO);
442 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
443 header->serv_specif_flags, 0);
445 case ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3:
446 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
447 ICP_QAT_FW_LA_NO_PROTO);
448 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
449 header->serv_specif_flags,
450 ICP_QAT_FW_LA_ZUC_3G_PROTO);
453 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
454 ICP_QAT_FW_LA_NO_PROTO);
455 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
456 header->serv_specif_flags, 0);
462 qat_sym_session_handle_mixed(struct qat_sym_session *session)
464 if (session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3 &&
465 session->qat_cipher_alg !=
466 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
467 session->min_qat_dev_gen = QAT_GEN3;
468 qat_sym_session_set_ext_hash_flags(session,
469 1 << ICP_QAT_FW_AUTH_HDR_FLAG_ZUC_EIA3_BITPOS);
470 } else if (session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2 &&
471 session->qat_cipher_alg !=
472 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
473 session->min_qat_dev_gen = QAT_GEN3;
474 qat_sym_session_set_ext_hash_flags(session,
475 1 << ICP_QAT_FW_AUTH_HDR_FLAG_SNOW3G_UIA2_BITPOS);
476 } else if ((session->aes_cmac ||
477 session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_NULL) &&
478 (session->qat_cipher_alg ==
479 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2 ||
480 session->qat_cipher_alg ==
481 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)) {
482 session->min_qat_dev_gen = QAT_GEN3;
483 qat_sym_session_set_ext_hash_flags(session, 0);
488 qat_sym_session_set_parameters(struct rte_cryptodev *dev,
489 struct rte_crypto_sym_xform *xform, void *session_private)
491 struct qat_sym_session *session = session_private;
495 /* Set context descriptor physical address */
496 session->cd_paddr = rte_mempool_virt2iova(session) +
497 offsetof(struct qat_sym_session, cd);
499 session->min_qat_dev_gen = QAT_GEN1;
501 /* Get requested QAT command id */
502 qat_cmd_id = qat_get_cmd_id(xform);
503 if (qat_cmd_id < 0 || qat_cmd_id >= ICP_QAT_FW_LA_CMD_DELIMITER) {
504 QAT_LOG(ERR, "Unsupported xform chain requested");
507 session->qat_cmd = (enum icp_qat_fw_la_cmd_id)qat_cmd_id;
508 switch (session->qat_cmd) {
509 case ICP_QAT_FW_LA_CMD_CIPHER:
510 ret = qat_sym_session_configure_cipher(dev, xform, session);
514 case ICP_QAT_FW_LA_CMD_AUTH:
515 ret = qat_sym_session_configure_auth(dev, xform, session);
519 case ICP_QAT_FW_LA_CMD_CIPHER_HASH:
520 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
521 ret = qat_sym_session_configure_aead(dev, xform,
526 ret = qat_sym_session_configure_cipher(dev,
530 ret = qat_sym_session_configure_auth(dev,
534 /* Special handling of mixed hash+cipher algorithms */
535 qat_sym_session_handle_mixed(session);
538 case ICP_QAT_FW_LA_CMD_HASH_CIPHER:
539 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
540 ret = qat_sym_session_configure_aead(dev, xform,
545 ret = qat_sym_session_configure_auth(dev,
549 ret = qat_sym_session_configure_cipher(dev,
553 /* Special handling of mixed hash+cipher algorithms */
554 qat_sym_session_handle_mixed(session);
557 case ICP_QAT_FW_LA_CMD_TRNG_GET_RANDOM:
558 case ICP_QAT_FW_LA_CMD_TRNG_TEST:
559 case ICP_QAT_FW_LA_CMD_SSL3_KEY_DERIVE:
560 case ICP_QAT_FW_LA_CMD_TLS_V1_1_KEY_DERIVE:
561 case ICP_QAT_FW_LA_CMD_TLS_V1_2_KEY_DERIVE:
562 case ICP_QAT_FW_LA_CMD_MGF1:
563 case ICP_QAT_FW_LA_CMD_AUTH_PRE_COMP:
564 case ICP_QAT_FW_LA_CMD_CIPHER_PRE_COMP:
565 case ICP_QAT_FW_LA_CMD_DELIMITER:
566 QAT_LOG(ERR, "Unsupported Service %u",
570 QAT_LOG(ERR, "Unsupported Service %u",
579 qat_sym_session_handle_single_pass(struct qat_sym_session *session,
580 struct rte_crypto_aead_xform *aead_xform)
582 struct icp_qat_fw_la_cipher_req_params *cipher_param =
583 (void *) &session->fw_req.serv_specif_rqpars;
585 session->is_single_pass = 1;
586 session->min_qat_dev_gen = QAT_GEN3;
587 session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER;
588 if (aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) {
589 session->qat_mode = ICP_QAT_HW_CIPHER_AEAD_MODE;
590 ICP_QAT_FW_LA_GCM_IV_LEN_FLAG_SET(
591 session->fw_req.comn_hdr.serv_specif_flags,
592 ICP_QAT_FW_LA_GCM_IV_LEN_12_OCTETS);
594 /* Chacha-Poly is special case that use QAT CTR mode */
595 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
597 session->cipher_iv.offset = aead_xform->iv.offset;
598 session->cipher_iv.length = aead_xform->iv.length;
599 if (qat_sym_session_aead_create_cd_cipher(session,
600 aead_xform->key.data, aead_xform->key.length))
602 session->aad_len = aead_xform->aad_length;
603 session->digest_length = aead_xform->digest_length;
604 if (aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) {
605 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
606 session->auth_op = ICP_QAT_HW_AUTH_GENERATE;
607 ICP_QAT_FW_LA_RET_AUTH_SET(
608 session->fw_req.comn_hdr.serv_specif_flags,
609 ICP_QAT_FW_LA_RET_AUTH_RES);
611 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
612 session->auth_op = ICP_QAT_HW_AUTH_VERIFY;
613 ICP_QAT_FW_LA_CMP_AUTH_SET(
614 session->fw_req.comn_hdr.serv_specif_flags,
615 ICP_QAT_FW_LA_CMP_AUTH_RES);
617 ICP_QAT_FW_LA_SINGLE_PASS_PROTO_FLAG_SET(
618 session->fw_req.comn_hdr.serv_specif_flags,
619 ICP_QAT_FW_LA_SINGLE_PASS_PROTO);
620 ICP_QAT_FW_LA_PROTO_SET(
621 session->fw_req.comn_hdr.serv_specif_flags,
622 ICP_QAT_FW_LA_NO_PROTO);
623 session->fw_req.comn_hdr.service_cmd_id =
624 ICP_QAT_FW_LA_CMD_CIPHER;
625 session->cd.cipher.cipher_config.val =
626 ICP_QAT_HW_CIPHER_CONFIG_BUILD(
627 ICP_QAT_HW_CIPHER_AEAD_MODE,
628 session->qat_cipher_alg,
629 ICP_QAT_HW_CIPHER_NO_CONVERT,
631 QAT_FIELD_SET(session->cd.cipher.cipher_config.val,
632 aead_xform->digest_length,
633 QAT_CIPHER_AEAD_HASH_CMP_LEN_BITPOS,
634 QAT_CIPHER_AEAD_HASH_CMP_LEN_MASK);
635 session->cd.cipher.cipher_config.reserved =
636 ICP_QAT_HW_CIPHER_CONFIG_BUILD_UPPER(
637 aead_xform->aad_length);
638 cipher_param->spc_aad_sz = aead_xform->aad_length;
639 cipher_param->spc_auth_res_sz = aead_xform->digest_length;
645 qat_sym_session_configure_auth(struct rte_cryptodev *dev,
646 struct rte_crypto_sym_xform *xform,
647 struct qat_sym_session *session)
649 struct rte_crypto_auth_xform *auth_xform = qat_get_auth_xform(xform);
650 struct qat_sym_dev_private *internals = dev->data->dev_private;
651 const uint8_t *key_data = auth_xform->key.data;
652 uint8_t key_length = auth_xform->key.length;
653 session->aes_cmac = 0;
655 switch (auth_xform->algo) {
656 case RTE_CRYPTO_AUTH_SHA1_HMAC:
657 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA1;
659 case RTE_CRYPTO_AUTH_SHA224_HMAC:
660 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA224;
662 case RTE_CRYPTO_AUTH_SHA256_HMAC:
663 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA256;
665 case RTE_CRYPTO_AUTH_SHA384_HMAC:
666 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA384;
668 case RTE_CRYPTO_AUTH_SHA512_HMAC:
669 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA512;
671 case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
672 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
674 case RTE_CRYPTO_AUTH_AES_CMAC:
675 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
676 session->aes_cmac = 1;
678 case RTE_CRYPTO_AUTH_AES_GMAC:
679 if (qat_sym_validate_aes_key(auth_xform->key.length,
680 &session->qat_cipher_alg) != 0) {
681 QAT_LOG(ERR, "Invalid AES key size");
684 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
685 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
688 case RTE_CRYPTO_AUTH_SNOW3G_UIA2:
689 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2;
691 case RTE_CRYPTO_AUTH_MD5_HMAC:
692 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_MD5;
694 case RTE_CRYPTO_AUTH_NULL:
695 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_NULL;
697 case RTE_CRYPTO_AUTH_KASUMI_F9:
698 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_KASUMI_F9;
700 case RTE_CRYPTO_AUTH_ZUC_EIA3:
701 if (!qat_is_auth_alg_supported(auth_xform->algo, internals)) {
702 QAT_LOG(ERR, "%s not supported on this device",
703 rte_crypto_auth_algorithm_strings
707 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3;
709 case RTE_CRYPTO_AUTH_SHA1:
710 case RTE_CRYPTO_AUTH_SHA256:
711 case RTE_CRYPTO_AUTH_SHA512:
712 case RTE_CRYPTO_AUTH_SHA224:
713 case RTE_CRYPTO_AUTH_SHA384:
714 case RTE_CRYPTO_AUTH_MD5:
715 case RTE_CRYPTO_AUTH_AES_CBC_MAC:
716 QAT_LOG(ERR, "Crypto: Unsupported hash alg %u",
720 QAT_LOG(ERR, "Crypto: Undefined Hash algo %u specified",
725 session->auth_iv.offset = auth_xform->iv.offset;
726 session->auth_iv.length = auth_xform->iv.length;
728 if (auth_xform->algo == RTE_CRYPTO_AUTH_AES_GMAC) {
729 if (auth_xform->op == RTE_CRYPTO_AUTH_OP_GENERATE) {
730 session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER_HASH;
731 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
733 * It needs to create cipher desc content first,
734 * then authentication
737 if (qat_sym_session_aead_create_cd_cipher(session,
738 auth_xform->key.data,
739 auth_xform->key.length))
742 if (qat_sym_session_aead_create_cd_auth(session,
746 auth_xform->digest_length,
750 session->qat_cmd = ICP_QAT_FW_LA_CMD_HASH_CIPHER;
751 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
753 * It needs to create authentication desc content first,
757 if (qat_sym_session_aead_create_cd_auth(session,
761 auth_xform->digest_length,
765 if (qat_sym_session_aead_create_cd_cipher(session,
766 auth_xform->key.data,
767 auth_xform->key.length))
770 /* Restore to authentication only only */
771 session->qat_cmd = ICP_QAT_FW_LA_CMD_AUTH;
773 if (qat_sym_session_aead_create_cd_auth(session,
777 auth_xform->digest_length,
782 session->digest_length = auth_xform->digest_length;
787 qat_sym_session_configure_aead(struct rte_cryptodev *dev,
788 struct rte_crypto_sym_xform *xform,
789 struct qat_sym_session *session)
791 struct rte_crypto_aead_xform *aead_xform = &xform->aead;
792 enum rte_crypto_auth_operation crypto_operation;
793 struct qat_sym_dev_private *internals =
794 dev->data->dev_private;
795 enum qat_device_gen qat_dev_gen =
796 internals->qat_dev->qat_dev_gen;
799 * Store AEAD IV parameters as cipher IV,
800 * to avoid unnecessary memory usage
802 session->cipher_iv.offset = xform->aead.iv.offset;
803 session->cipher_iv.length = xform->aead.iv.length;
805 session->is_single_pass = 0;
806 switch (aead_xform->algo) {
807 case RTE_CRYPTO_AEAD_AES_GCM:
808 if (qat_sym_validate_aes_key(aead_xform->key.length,
809 &session->qat_cipher_alg) != 0) {
810 QAT_LOG(ERR, "Invalid AES key size");
813 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
814 session->qat_hash_alg =
815 ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
816 if (qat_dev_gen > QAT_GEN2 && aead_xform->iv.length ==
817 QAT_AES_GCM_SPC_IV_SIZE) {
818 return qat_sym_session_handle_single_pass(session,
822 case RTE_CRYPTO_AEAD_AES_CCM:
823 if (qat_sym_validate_aes_key(aead_xform->key.length,
824 &session->qat_cipher_alg) != 0) {
825 QAT_LOG(ERR, "Invalid AES key size");
828 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
829 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC;
831 case RTE_CRYPTO_AEAD_CHACHA20_POLY1305:
832 if (aead_xform->key.length != ICP_QAT_HW_CHACHAPOLY_KEY_SZ)
834 session->qat_cipher_alg =
835 ICP_QAT_HW_CIPHER_ALGO_CHACHA20_POLY1305;
836 return qat_sym_session_handle_single_pass(session,
839 QAT_LOG(ERR, "Crypto: Undefined AEAD specified %u\n",
844 if ((aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT &&
845 aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) ||
846 (aead_xform->op == RTE_CRYPTO_AEAD_OP_DECRYPT &&
847 aead_xform->algo == RTE_CRYPTO_AEAD_AES_CCM)) {
848 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
850 * It needs to create cipher desc content first,
851 * then authentication
853 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
854 RTE_CRYPTO_AUTH_OP_GENERATE : RTE_CRYPTO_AUTH_OP_VERIFY;
856 if (qat_sym_session_aead_create_cd_cipher(session,
857 aead_xform->key.data,
858 aead_xform->key.length))
861 if (qat_sym_session_aead_create_cd_auth(session,
862 aead_xform->key.data,
863 aead_xform->key.length,
864 aead_xform->aad_length,
865 aead_xform->digest_length,
869 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
871 * It needs to create authentication desc content first,
875 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
876 RTE_CRYPTO_AUTH_OP_VERIFY : RTE_CRYPTO_AUTH_OP_GENERATE;
878 if (qat_sym_session_aead_create_cd_auth(session,
879 aead_xform->key.data,
880 aead_xform->key.length,
881 aead_xform->aad_length,
882 aead_xform->digest_length,
886 if (qat_sym_session_aead_create_cd_cipher(session,
887 aead_xform->key.data,
888 aead_xform->key.length))
892 session->digest_length = aead_xform->digest_length;
896 unsigned int qat_sym_session_get_private_size(
897 struct rte_cryptodev *dev __rte_unused)
899 return RTE_ALIGN_CEIL(sizeof(struct qat_sym_session), 8);
902 /* returns block size in bytes per cipher algo */
903 int qat_cipher_get_block_size(enum icp_qat_hw_cipher_algo qat_cipher_alg)
905 switch (qat_cipher_alg) {
906 case ICP_QAT_HW_CIPHER_ALGO_DES:
907 return ICP_QAT_HW_DES_BLK_SZ;
908 case ICP_QAT_HW_CIPHER_ALGO_3DES:
909 return ICP_QAT_HW_3DES_BLK_SZ;
910 case ICP_QAT_HW_CIPHER_ALGO_AES128:
911 case ICP_QAT_HW_CIPHER_ALGO_AES192:
912 case ICP_QAT_HW_CIPHER_ALGO_AES256:
913 return ICP_QAT_HW_AES_BLK_SZ;
915 QAT_LOG(ERR, "invalid block cipher alg %u", qat_cipher_alg);
922 * Returns size in bytes per hash algo for state1 size field in cd_ctrl
923 * This is digest size rounded up to nearest quadword
925 static int qat_hash_get_state1_size(enum icp_qat_hw_auth_algo qat_hash_alg)
927 switch (qat_hash_alg) {
928 case ICP_QAT_HW_AUTH_ALGO_SHA1:
929 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA1_STATE1_SZ,
930 QAT_HW_DEFAULT_ALIGNMENT);
931 case ICP_QAT_HW_AUTH_ALGO_SHA224:
932 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA224_STATE1_SZ,
933 QAT_HW_DEFAULT_ALIGNMENT);
934 case ICP_QAT_HW_AUTH_ALGO_SHA256:
935 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA256_STATE1_SZ,
936 QAT_HW_DEFAULT_ALIGNMENT);
937 case ICP_QAT_HW_AUTH_ALGO_SHA384:
938 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA384_STATE1_SZ,
939 QAT_HW_DEFAULT_ALIGNMENT);
940 case ICP_QAT_HW_AUTH_ALGO_SHA512:
941 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
942 QAT_HW_DEFAULT_ALIGNMENT);
943 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
944 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ,
945 QAT_HW_DEFAULT_ALIGNMENT);
946 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
947 case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
948 return QAT_HW_ROUND_UP(ICP_QAT_HW_GALOIS_128_STATE1_SZ,
949 QAT_HW_DEFAULT_ALIGNMENT);
950 case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
951 return QAT_HW_ROUND_UP(ICP_QAT_HW_ZUC_3G_EIA3_STATE1_SZ,
952 QAT_HW_DEFAULT_ALIGNMENT);
953 case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
954 return QAT_HW_ROUND_UP(ICP_QAT_HW_SNOW_3G_UIA2_STATE1_SZ,
955 QAT_HW_DEFAULT_ALIGNMENT);
956 case ICP_QAT_HW_AUTH_ALGO_MD5:
957 return QAT_HW_ROUND_UP(ICP_QAT_HW_MD5_STATE1_SZ,
958 QAT_HW_DEFAULT_ALIGNMENT);
959 case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
960 return QAT_HW_ROUND_UP(ICP_QAT_HW_KASUMI_F9_STATE1_SZ,
961 QAT_HW_DEFAULT_ALIGNMENT);
962 case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
963 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_CBC_MAC_STATE1_SZ,
964 QAT_HW_DEFAULT_ALIGNMENT);
965 case ICP_QAT_HW_AUTH_ALGO_NULL:
966 return QAT_HW_ROUND_UP(ICP_QAT_HW_NULL_STATE1_SZ,
967 QAT_HW_DEFAULT_ALIGNMENT);
968 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
969 /* return maximum state1 size in this case */
970 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
971 QAT_HW_DEFAULT_ALIGNMENT);
973 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
979 /* returns digest size in bytes per hash algo */
980 static int qat_hash_get_digest_size(enum icp_qat_hw_auth_algo qat_hash_alg)
982 switch (qat_hash_alg) {
983 case ICP_QAT_HW_AUTH_ALGO_SHA1:
984 return ICP_QAT_HW_SHA1_STATE1_SZ;
985 case ICP_QAT_HW_AUTH_ALGO_SHA224:
986 return ICP_QAT_HW_SHA224_STATE1_SZ;
987 case ICP_QAT_HW_AUTH_ALGO_SHA256:
988 return ICP_QAT_HW_SHA256_STATE1_SZ;
989 case ICP_QAT_HW_AUTH_ALGO_SHA384:
990 return ICP_QAT_HW_SHA384_STATE1_SZ;
991 case ICP_QAT_HW_AUTH_ALGO_SHA512:
992 return ICP_QAT_HW_SHA512_STATE1_SZ;
993 case ICP_QAT_HW_AUTH_ALGO_MD5:
994 return ICP_QAT_HW_MD5_STATE1_SZ;
995 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
996 return ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
997 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
998 /* return maximum digest size in this case */
999 return ICP_QAT_HW_SHA512_STATE1_SZ;
1001 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
1007 /* returns block size in byes per hash algo */
1008 static int qat_hash_get_block_size(enum icp_qat_hw_auth_algo qat_hash_alg)
1010 switch (qat_hash_alg) {
1011 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1013 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1014 return SHA256_CBLOCK;
1015 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1016 return SHA256_CBLOCK;
1017 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1018 return SHA512_CBLOCK;
1019 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1020 return SHA512_CBLOCK;
1021 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1023 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1024 return ICP_QAT_HW_AES_BLK_SZ;
1025 case ICP_QAT_HW_AUTH_ALGO_MD5:
1027 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
1028 /* return maximum block size in this case */
1029 return SHA512_CBLOCK;
1031 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
1037 static int partial_hash_sha1(uint8_t *data_in, uint8_t *data_out)
1041 if (!SHA1_Init(&ctx))
1043 SHA1_Transform(&ctx, data_in);
1044 rte_memcpy(data_out, &ctx, SHA_DIGEST_LENGTH);
1048 static int partial_hash_sha224(uint8_t *data_in, uint8_t *data_out)
1052 if (!SHA224_Init(&ctx))
1054 SHA256_Transform(&ctx, data_in);
1055 rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
1059 static int partial_hash_sha256(uint8_t *data_in, uint8_t *data_out)
1063 if (!SHA256_Init(&ctx))
1065 SHA256_Transform(&ctx, data_in);
1066 rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
1070 static int partial_hash_sha384(uint8_t *data_in, uint8_t *data_out)
1074 if (!SHA384_Init(&ctx))
1076 SHA512_Transform(&ctx, data_in);
1077 rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
1081 static int partial_hash_sha512(uint8_t *data_in, uint8_t *data_out)
1085 if (!SHA512_Init(&ctx))
1087 SHA512_Transform(&ctx, data_in);
1088 rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
1092 static int partial_hash_md5(uint8_t *data_in, uint8_t *data_out)
1096 if (!MD5_Init(&ctx))
1098 MD5_Transform(&ctx, data_in);
1099 rte_memcpy(data_out, &ctx, MD5_DIGEST_LENGTH);
1104 static int partial_hash_compute(enum icp_qat_hw_auth_algo hash_alg,
1109 uint8_t digest[qat_hash_get_digest_size(
1110 ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1111 uint32_t *hash_state_out_be32;
1112 uint64_t *hash_state_out_be64;
1115 digest_size = qat_hash_get_digest_size(hash_alg);
1116 if (digest_size <= 0)
1119 hash_state_out_be32 = (uint32_t *)data_out;
1120 hash_state_out_be64 = (uint64_t *)data_out;
1123 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1124 if (partial_hash_sha1(data_in, digest))
1126 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1127 *hash_state_out_be32 =
1128 rte_bswap32(*(((uint32_t *)digest)+i));
1130 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1131 if (partial_hash_sha224(data_in, digest))
1133 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1134 *hash_state_out_be32 =
1135 rte_bswap32(*(((uint32_t *)digest)+i));
1137 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1138 if (partial_hash_sha256(data_in, digest))
1140 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1141 *hash_state_out_be32 =
1142 rte_bswap32(*(((uint32_t *)digest)+i));
1144 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1145 if (partial_hash_sha384(data_in, digest))
1147 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
1148 *hash_state_out_be64 =
1149 rte_bswap64(*(((uint64_t *)digest)+i));
1151 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1152 if (partial_hash_sha512(data_in, digest))
1154 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
1155 *hash_state_out_be64 =
1156 rte_bswap64(*(((uint64_t *)digest)+i));
1158 case ICP_QAT_HW_AUTH_ALGO_MD5:
1159 if (partial_hash_md5(data_in, data_out))
1163 QAT_LOG(ERR, "invalid hash alg %u", hash_alg);
1169 #define HMAC_IPAD_VALUE 0x36
1170 #define HMAC_OPAD_VALUE 0x5c
1171 #define HASH_XCBC_PRECOMP_KEY_NUM 3
1173 static const uint8_t AES_CMAC_SEED[ICP_QAT_HW_AES_128_KEY_SZ];
1175 static void aes_cmac_key_derive(uint8_t *base, uint8_t *derived)
1179 derived[0] = base[0] << 1;
1180 for (i = 1; i < ICP_QAT_HW_AES_BLK_SZ ; i++) {
1181 derived[i] = base[i] << 1;
1182 derived[i - 1] |= base[i] >> 7;
1186 derived[ICP_QAT_HW_AES_BLK_SZ - 1] ^= QAT_AES_CMAC_CONST_RB;
1189 static int qat_sym_do_precomputes(enum icp_qat_hw_auth_algo hash_alg,
1190 const uint8_t *auth_key,
1191 uint16_t auth_keylen,
1192 uint8_t *p_state_buf,
1193 uint16_t *p_state_len,
1197 uint8_t ipad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1198 uint8_t opad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1201 if (hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC) {
1207 uint8_t k0[ICP_QAT_HW_AES_128_KEY_SZ];
1210 auth_keylen = ICP_QAT_HW_AES_128_KEY_SZ;
1212 in = rte_zmalloc("AES CMAC K1",
1213 ICP_QAT_HW_AES_128_KEY_SZ, 16);
1216 QAT_LOG(ERR, "Failed to alloc memory");
1220 rte_memcpy(in, AES_CMAC_SEED,
1221 ICP_QAT_HW_AES_128_KEY_SZ);
1222 rte_memcpy(p_state_buf, auth_key, auth_keylen);
1224 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1230 AES_encrypt(in, k0, &enc_key);
1232 k1 = p_state_buf + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1233 k2 = k1 + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1235 aes_cmac_key_derive(k0, k1);
1236 aes_cmac_key_derive(k1, k2);
1238 memset(k0, 0, ICP_QAT_HW_AES_128_KEY_SZ);
1239 *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1243 static uint8_t qat_aes_xcbc_key_seed[
1244 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ] = {
1245 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1246 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1247 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1248 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1249 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1250 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1254 uint8_t *out = p_state_buf;
1258 in = rte_zmalloc("working mem for key",
1259 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ, 16);
1261 QAT_LOG(ERR, "Failed to alloc memory");
1265 rte_memcpy(in, qat_aes_xcbc_key_seed,
1266 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1267 for (x = 0; x < HASH_XCBC_PRECOMP_KEY_NUM; x++) {
1268 if (AES_set_encrypt_key(auth_key,
1272 (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ));
1274 (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ),
1275 0, ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1278 AES_encrypt(in, out, &enc_key);
1279 in += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1280 out += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1282 *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1283 rte_free(in - x*ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ);
1287 } else if ((hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_128) ||
1288 (hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_64)) {
1290 uint8_t *out = p_state_buf;
1293 memset(p_state_buf, 0, ICP_QAT_HW_GALOIS_H_SZ +
1294 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1295 ICP_QAT_HW_GALOIS_E_CTR0_SZ);
1296 in = rte_zmalloc("working mem for key",
1297 ICP_QAT_HW_GALOIS_H_SZ, 16);
1299 QAT_LOG(ERR, "Failed to alloc memory");
1303 memset(in, 0, ICP_QAT_HW_GALOIS_H_SZ);
1304 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1308 AES_encrypt(in, out, &enc_key);
1309 *p_state_len = ICP_QAT_HW_GALOIS_H_SZ +
1310 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1311 ICP_QAT_HW_GALOIS_E_CTR0_SZ;
1316 block_size = qat_hash_get_block_size(hash_alg);
1319 /* init ipad and opad from key and xor with fixed values */
1320 memset(ipad, 0, block_size);
1321 memset(opad, 0, block_size);
1323 if (auth_keylen > (unsigned int)block_size) {
1324 QAT_LOG(ERR, "invalid keylen %u", auth_keylen);
1327 rte_memcpy(ipad, auth_key, auth_keylen);
1328 rte_memcpy(opad, auth_key, auth_keylen);
1330 for (i = 0; i < block_size; i++) {
1331 uint8_t *ipad_ptr = ipad + i;
1332 uint8_t *opad_ptr = opad + i;
1333 *ipad_ptr ^= HMAC_IPAD_VALUE;
1334 *opad_ptr ^= HMAC_OPAD_VALUE;
1337 /* do partial hash of ipad and copy to state1 */
1338 if (partial_hash_compute(hash_alg, ipad, p_state_buf)) {
1339 memset(ipad, 0, block_size);
1340 memset(opad, 0, block_size);
1341 QAT_LOG(ERR, "ipad precompute failed");
1346 * State len is a multiple of 8, so may be larger than the digest.
1347 * Put the partial hash of opad state_len bytes after state1
1349 *p_state_len = qat_hash_get_state1_size(hash_alg);
1350 if (partial_hash_compute(hash_alg, opad, p_state_buf + *p_state_len)) {
1351 memset(ipad, 0, block_size);
1352 memset(opad, 0, block_size);
1353 QAT_LOG(ERR, "opad precompute failed");
1357 /* don't leave data lying around */
1358 memset(ipad, 0, block_size);
1359 memset(opad, 0, block_size);
1364 qat_sym_session_init_common_hdr(struct icp_qat_fw_comn_req_hdr *header,
1365 enum qat_sym_proto_flag proto_flags)
1368 ICP_QAT_FW_COMN_HDR_FLAGS_BUILD(ICP_QAT_FW_COMN_REQ_FLAG_SET);
1369 header->service_type = ICP_QAT_FW_COMN_REQ_CPM_FW_LA;
1370 header->comn_req_flags =
1371 ICP_QAT_FW_COMN_FLAGS_BUILD(QAT_COMN_CD_FLD_TYPE_64BIT_ADR,
1372 QAT_COMN_PTR_TYPE_FLAT);
1373 ICP_QAT_FW_LA_PARTIAL_SET(header->serv_specif_flags,
1374 ICP_QAT_FW_LA_PARTIAL_NONE);
1375 ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(header->serv_specif_flags,
1376 ICP_QAT_FW_CIPH_IV_16BYTE_DATA);
1378 switch (proto_flags) {
1379 case QAT_CRYPTO_PROTO_FLAG_NONE:
1380 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1381 ICP_QAT_FW_LA_NO_PROTO);
1383 case QAT_CRYPTO_PROTO_FLAG_CCM:
1384 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1385 ICP_QAT_FW_LA_CCM_PROTO);
1387 case QAT_CRYPTO_PROTO_FLAG_GCM:
1388 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1389 ICP_QAT_FW_LA_GCM_PROTO);
1391 case QAT_CRYPTO_PROTO_FLAG_SNOW3G:
1392 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1393 ICP_QAT_FW_LA_SNOW_3G_PROTO);
1395 case QAT_CRYPTO_PROTO_FLAG_ZUC:
1396 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(header->serv_specif_flags,
1397 ICP_QAT_FW_LA_ZUC_3G_PROTO);
1401 ICP_QAT_FW_LA_UPDATE_STATE_SET(header->serv_specif_flags,
1402 ICP_QAT_FW_LA_NO_UPDATE_STATE);
1403 ICP_QAT_FW_LA_DIGEST_IN_BUFFER_SET(header->serv_specif_flags,
1404 ICP_QAT_FW_LA_NO_DIGEST_IN_BUFFER);
1408 * Snow3G and ZUC should never use this function
1409 * and set its protocol flag in both cipher and auth part of content
1410 * descriptor building function
1412 static enum qat_sym_proto_flag
1413 qat_get_crypto_proto_flag(uint16_t flags)
1415 int proto = ICP_QAT_FW_LA_PROTO_GET(flags);
1416 enum qat_sym_proto_flag qat_proto_flag =
1417 QAT_CRYPTO_PROTO_FLAG_NONE;
1420 case ICP_QAT_FW_LA_GCM_PROTO:
1421 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1423 case ICP_QAT_FW_LA_CCM_PROTO:
1424 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1428 return qat_proto_flag;
1431 int qat_sym_session_aead_create_cd_cipher(struct qat_sym_session *cdesc,
1432 const uint8_t *cipherkey,
1433 uint32_t cipherkeylen)
1435 struct icp_qat_hw_cipher_algo_blk *cipher;
1436 struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1437 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1438 struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1439 void *ptr = &req_tmpl->cd_ctrl;
1440 struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1441 struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1442 enum icp_qat_hw_cipher_convert key_convert;
1443 enum qat_sym_proto_flag qat_proto_flag =
1444 QAT_CRYPTO_PROTO_FLAG_NONE;
1445 uint32_t total_key_size;
1446 uint16_t cipher_offset, cd_size;
1447 uint32_t wordIndex = 0;
1448 uint32_t *temp_key = NULL;
1450 if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER) {
1451 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1452 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1453 ICP_QAT_FW_SLICE_CIPHER);
1454 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1455 ICP_QAT_FW_SLICE_DRAM_WR);
1456 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1457 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1458 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1459 ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1460 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1461 } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1462 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1463 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1464 ICP_QAT_FW_SLICE_CIPHER);
1465 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1466 ICP_QAT_FW_SLICE_AUTH);
1467 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1468 ICP_QAT_FW_SLICE_AUTH);
1469 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1470 ICP_QAT_FW_SLICE_DRAM_WR);
1471 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1472 } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1473 QAT_LOG(ERR, "Invalid param, must be a cipher command.");
1477 if (cdesc->qat_mode == ICP_QAT_HW_CIPHER_CTR_MODE) {
1479 * CTR Streaming ciphers are a special case. Decrypt = encrypt
1480 * Overriding default values previously set
1482 cdesc->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
1483 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1484 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2
1485 || cdesc->qat_cipher_alg ==
1486 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)
1487 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1488 else if (cdesc->qat_dir == ICP_QAT_HW_CIPHER_ENCRYPT)
1489 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1491 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1493 if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
1494 total_key_size = ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ +
1495 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1496 cipher_cd_ctrl->cipher_state_sz =
1497 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1498 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1500 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1501 total_key_size = ICP_QAT_HW_KASUMI_F8_KEY_SZ;
1502 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_KASUMI_BLK_SZ >> 3;
1503 cipher_cd_ctrl->cipher_padding_sz =
1504 (2 * ICP_QAT_HW_KASUMI_BLK_SZ) >> 3;
1505 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES) {
1506 total_key_size = ICP_QAT_HW_3DES_KEY_SZ;
1507 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_3DES_BLK_SZ >> 3;
1509 qat_get_crypto_proto_flag(header->serv_specif_flags);
1510 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_DES) {
1511 total_key_size = ICP_QAT_HW_DES_KEY_SZ;
1512 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_DES_BLK_SZ >> 3;
1514 qat_get_crypto_proto_flag(header->serv_specif_flags);
1515 } else if (cdesc->qat_cipher_alg ==
1516 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
1517 total_key_size = ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ +
1518 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1519 cipher_cd_ctrl->cipher_state_sz =
1520 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1521 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1522 cdesc->min_qat_dev_gen = QAT_GEN2;
1524 total_key_size = cipherkeylen;
1525 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_AES_BLK_SZ >> 3;
1527 qat_get_crypto_proto_flag(header->serv_specif_flags);
1529 cipher_cd_ctrl->cipher_key_sz = total_key_size >> 3;
1530 cipher_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1531 cipher_cd_ctrl->cipher_cfg_offset = cipher_offset >> 3;
1533 header->service_cmd_id = cdesc->qat_cmd;
1534 qat_sym_session_init_common_hdr(header, qat_proto_flag);
1536 cipher = (struct icp_qat_hw_cipher_algo_blk *)cdesc->cd_cur_ptr;
1537 cipher->cipher_config.val =
1538 ICP_QAT_HW_CIPHER_CONFIG_BUILD(cdesc->qat_mode,
1539 cdesc->qat_cipher_alg, key_convert,
1542 if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1543 temp_key = (uint32_t *)(cdesc->cd_cur_ptr +
1544 sizeof(struct icp_qat_hw_cipher_config)
1546 memcpy(cipher->key, cipherkey, cipherkeylen);
1547 memcpy(temp_key, cipherkey, cipherkeylen);
1549 /* XOR Key with KASUMI F8 key modifier at 4 bytes level */
1550 for (wordIndex = 0; wordIndex < (cipherkeylen >> 2);
1552 temp_key[wordIndex] ^= KASUMI_F8_KEY_MODIFIER_4_BYTES;
1554 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1555 cipherkeylen + cipherkeylen;
1557 memcpy(cipher->key, cipherkey, cipherkeylen);
1558 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1562 if (total_key_size > cipherkeylen) {
1563 uint32_t padding_size = total_key_size-cipherkeylen;
1564 if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1565 && (cipherkeylen == QAT_3DES_KEY_SZ_OPT2)) {
1566 /* K3 not provided so use K1 = K3*/
1567 memcpy(cdesc->cd_cur_ptr, cipherkey, padding_size);
1568 } else if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1569 && (cipherkeylen == QAT_3DES_KEY_SZ_OPT3)) {
1570 /* K2 and K3 not provided so use K1 = K2 = K3*/
1571 memcpy(cdesc->cd_cur_ptr, cipherkey,
1573 memcpy(cdesc->cd_cur_ptr+cipherkeylen,
1574 cipherkey, cipherkeylen);
1576 memset(cdesc->cd_cur_ptr, 0, padding_size);
1578 cdesc->cd_cur_ptr += padding_size;
1580 cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1581 cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1586 int qat_sym_session_aead_create_cd_auth(struct qat_sym_session *cdesc,
1587 const uint8_t *authkey,
1588 uint32_t authkeylen,
1589 uint32_t aad_length,
1590 uint32_t digestsize,
1591 unsigned int operation)
1593 struct icp_qat_hw_auth_setup *hash;
1594 struct icp_qat_hw_cipher_algo_blk *cipherconfig;
1595 struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1596 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1597 struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1598 void *ptr = &req_tmpl->cd_ctrl;
1599 struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1600 struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1601 struct icp_qat_fw_la_auth_req_params *auth_param =
1602 (struct icp_qat_fw_la_auth_req_params *)
1603 ((char *)&req_tmpl->serv_specif_rqpars +
1604 ICP_QAT_FW_HASH_REQUEST_PARAMETERS_OFFSET);
1605 uint16_t state1_size = 0, state2_size = 0;
1606 uint16_t hash_offset, cd_size;
1607 uint32_t *aad_len = NULL;
1608 uint32_t wordIndex = 0;
1610 enum qat_sym_proto_flag qat_proto_flag =
1611 QAT_CRYPTO_PROTO_FLAG_NONE;
1613 if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_AUTH) {
1614 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1615 ICP_QAT_FW_SLICE_AUTH);
1616 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1617 ICP_QAT_FW_SLICE_DRAM_WR);
1618 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1619 } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1620 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1621 ICP_QAT_FW_SLICE_AUTH);
1622 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1623 ICP_QAT_FW_SLICE_CIPHER);
1624 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1625 ICP_QAT_FW_SLICE_CIPHER);
1626 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1627 ICP_QAT_FW_SLICE_DRAM_WR);
1628 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1629 } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1630 QAT_LOG(ERR, "Invalid param, must be a hash command.");
1634 if (operation == RTE_CRYPTO_AUTH_OP_VERIFY) {
1635 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1636 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1637 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1638 ICP_QAT_FW_LA_CMP_AUTH_RES);
1639 cdesc->auth_op = ICP_QAT_HW_AUTH_VERIFY;
1641 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1642 ICP_QAT_FW_LA_RET_AUTH_RES);
1643 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1644 ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1645 cdesc->auth_op = ICP_QAT_HW_AUTH_GENERATE;
1649 * Setup the inner hash config
1651 hash_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1652 hash = (struct icp_qat_hw_auth_setup *)cdesc->cd_cur_ptr;
1653 hash->auth_config.reserved = 0;
1654 hash->auth_config.config =
1655 ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE1,
1656 cdesc->qat_hash_alg, digestsize);
1658 if (cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2
1659 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_KASUMI_F9
1660 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3
1661 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC
1662 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC
1663 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_NULL
1665 hash->auth_counter.counter = 0;
1667 int block_size = qat_hash_get_block_size(cdesc->qat_hash_alg);
1671 hash->auth_counter.counter = rte_bswap32(block_size);
1674 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_auth_setup);
1677 * cd_cur_ptr now points at the state1 information.
1679 switch (cdesc->qat_hash_alg) {
1680 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1681 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA1, authkey,
1682 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1684 QAT_LOG(ERR, "(SHA)precompute failed");
1687 state2_size = RTE_ALIGN_CEIL(ICP_QAT_HW_SHA1_STATE2_SZ, 8);
1689 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1690 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA224, authkey,
1691 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1693 QAT_LOG(ERR, "(SHA)precompute failed");
1696 state2_size = ICP_QAT_HW_SHA224_STATE2_SZ;
1698 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1699 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA256, authkey,
1700 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1702 QAT_LOG(ERR, "(SHA)precompute failed");
1705 state2_size = ICP_QAT_HW_SHA256_STATE2_SZ;
1707 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1708 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA384, authkey,
1709 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1711 QAT_LOG(ERR, "(SHA)precompute failed");
1714 state2_size = ICP_QAT_HW_SHA384_STATE2_SZ;
1716 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1717 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA512, authkey,
1718 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1720 QAT_LOG(ERR, "(SHA)precompute failed");
1723 state2_size = ICP_QAT_HW_SHA512_STATE2_SZ;
1725 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1726 state1_size = ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1728 if (cdesc->aes_cmac)
1729 memset(cdesc->cd_cur_ptr, 0, state1_size);
1730 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC,
1731 authkey, authkeylen, cdesc->cd_cur_ptr + state1_size,
1732 &state2_size, cdesc->aes_cmac)) {
1733 cdesc->aes_cmac ? QAT_LOG(ERR,
1734 "(CMAC)precompute failed")
1736 "(XCBC)precompute failed");
1740 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1741 case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
1742 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1743 state1_size = ICP_QAT_HW_GALOIS_128_STATE1_SZ;
1744 if (qat_sym_do_precomputes(cdesc->qat_hash_alg, authkey,
1745 authkeylen, cdesc->cd_cur_ptr + state1_size,
1746 &state2_size, cdesc->aes_cmac)) {
1747 QAT_LOG(ERR, "(GCM)precompute failed");
1751 * Write (the length of AAD) into bytes 16-19 of state2
1752 * in big-endian format. This field is 8 bytes
1754 auth_param->u2.aad_sz =
1755 RTE_ALIGN_CEIL(aad_length, 16);
1756 auth_param->hash_state_sz = (auth_param->u2.aad_sz) >> 3;
1758 aad_len = (uint32_t *)(cdesc->cd_cur_ptr +
1759 ICP_QAT_HW_GALOIS_128_STATE1_SZ +
1760 ICP_QAT_HW_GALOIS_H_SZ);
1761 *aad_len = rte_bswap32(aad_length);
1762 cdesc->aad_len = aad_length;
1764 case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
1765 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1766 state1_size = qat_hash_get_state1_size(
1767 ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2);
1768 state2_size = ICP_QAT_HW_SNOW_3G_UIA2_STATE2_SZ;
1769 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1771 cipherconfig = (struct icp_qat_hw_cipher_algo_blk *)
1772 (cdesc->cd_cur_ptr + state1_size + state2_size);
1773 cipherconfig->cipher_config.val =
1774 ICP_QAT_HW_CIPHER_CONFIG_BUILD(ICP_QAT_HW_CIPHER_ECB_MODE,
1775 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2,
1776 ICP_QAT_HW_CIPHER_KEY_CONVERT,
1777 ICP_QAT_HW_CIPHER_ENCRYPT);
1778 memcpy(cipherconfig->key, authkey, authkeylen);
1779 memset(cipherconfig->key + authkeylen,
1780 0, ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ);
1781 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1782 authkeylen + ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1783 auth_param->hash_state_sz = ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1785 case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
1786 hash->auth_config.config =
1787 ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE0,
1788 cdesc->qat_hash_alg, digestsize);
1789 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1790 state1_size = qat_hash_get_state1_size(
1791 ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3);
1792 state2_size = ICP_QAT_HW_ZUC_3G_EIA3_STATE2_SZ;
1793 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size
1794 + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ);
1796 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1797 cdesc->cd_cur_ptr += state1_size + state2_size
1798 + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1799 auth_param->hash_state_sz = ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1800 cdesc->min_qat_dev_gen = QAT_GEN2;
1803 case ICP_QAT_HW_AUTH_ALGO_MD5:
1804 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_MD5, authkey,
1805 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1807 QAT_LOG(ERR, "(MD5)precompute failed");
1810 state2_size = ICP_QAT_HW_MD5_STATE2_SZ;
1812 case ICP_QAT_HW_AUTH_ALGO_NULL:
1813 state1_size = qat_hash_get_state1_size(
1814 ICP_QAT_HW_AUTH_ALGO_NULL);
1815 state2_size = ICP_QAT_HW_NULL_STATE2_SZ;
1817 case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
1818 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1819 state1_size = qat_hash_get_state1_size(
1820 ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC);
1821 state2_size = ICP_QAT_HW_AES_CBC_MAC_KEY_SZ +
1822 ICP_QAT_HW_AES_CCM_CBC_E_CTR0_SZ;
1824 if (aad_length > 0) {
1825 aad_length += ICP_QAT_HW_CCM_AAD_B0_LEN +
1826 ICP_QAT_HW_CCM_AAD_LEN_INFO;
1827 auth_param->u2.aad_sz =
1828 RTE_ALIGN_CEIL(aad_length,
1829 ICP_QAT_HW_CCM_AAD_ALIGNMENT);
1831 auth_param->u2.aad_sz = ICP_QAT_HW_CCM_AAD_B0_LEN;
1833 cdesc->aad_len = aad_length;
1834 hash->auth_counter.counter = 0;
1836 hash_cd_ctrl->outer_prefix_sz = digestsize;
1837 auth_param->hash_state_sz = digestsize;
1839 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1841 case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
1842 state1_size = qat_hash_get_state1_size(
1843 ICP_QAT_HW_AUTH_ALGO_KASUMI_F9);
1844 state2_size = ICP_QAT_HW_KASUMI_F9_STATE2_SZ;
1845 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1846 pTempKey = (uint32_t *)(cdesc->cd_cur_ptr + state1_size
1849 * The Inner Hash Initial State2 block must contain IK
1850 * (Initialisation Key), followed by IK XOR-ed with KM
1851 * (Key Modifier): IK||(IK^KM).
1853 /* write the auth key */
1854 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1855 /* initialise temp key with auth key */
1856 memcpy(pTempKey, authkey, authkeylen);
1857 /* XOR Key with KASUMI F9 key modifier at 4 bytes level */
1858 for (wordIndex = 0; wordIndex < (authkeylen >> 2); wordIndex++)
1859 pTempKey[wordIndex] ^= KASUMI_F9_KEY_MODIFIER_4_BYTES;
1862 QAT_LOG(ERR, "Invalid HASH alg %u", cdesc->qat_hash_alg);
1866 /* Request template setup */
1867 qat_sym_session_init_common_hdr(header, qat_proto_flag);
1868 header->service_cmd_id = cdesc->qat_cmd;
1870 /* Auth CD config setup */
1871 hash_cd_ctrl->hash_cfg_offset = hash_offset >> 3;
1872 hash_cd_ctrl->hash_flags = ICP_QAT_FW_AUTH_HDR_FLAG_NO_NESTED;
1873 hash_cd_ctrl->inner_res_sz = digestsize;
1874 hash_cd_ctrl->final_sz = digestsize;
1875 hash_cd_ctrl->inner_state1_sz = state1_size;
1876 auth_param->auth_res_sz = digestsize;
1878 hash_cd_ctrl->inner_state2_sz = state2_size;
1879 hash_cd_ctrl->inner_state2_offset = hash_cd_ctrl->hash_cfg_offset +
1880 ((sizeof(struct icp_qat_hw_auth_setup) +
1881 RTE_ALIGN_CEIL(hash_cd_ctrl->inner_state1_sz, 8))
1884 cdesc->cd_cur_ptr += state1_size + state2_size;
1885 cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1887 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1888 cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1893 int qat_sym_validate_aes_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1896 case ICP_QAT_HW_AES_128_KEY_SZ:
1897 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1899 case ICP_QAT_HW_AES_192_KEY_SZ:
1900 *alg = ICP_QAT_HW_CIPHER_ALGO_AES192;
1902 case ICP_QAT_HW_AES_256_KEY_SZ:
1903 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
1911 int qat_sym_validate_aes_docsisbpi_key(int key_len,
1912 enum icp_qat_hw_cipher_algo *alg)
1915 case ICP_QAT_HW_AES_128_KEY_SZ:
1916 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1924 int qat_sym_validate_snow3g_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1927 case ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ:
1928 *alg = ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2;
1936 int qat_sym_validate_kasumi_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1939 case ICP_QAT_HW_KASUMI_KEY_SZ:
1940 *alg = ICP_QAT_HW_CIPHER_ALGO_KASUMI;
1948 int qat_sym_validate_des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1951 case ICP_QAT_HW_DES_KEY_SZ:
1952 *alg = ICP_QAT_HW_CIPHER_ALGO_DES;
1960 int qat_sym_validate_3des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1963 case QAT_3DES_KEY_SZ_OPT1:
1964 case QAT_3DES_KEY_SZ_OPT2:
1965 case QAT_3DES_KEY_SZ_OPT3:
1966 *alg = ICP_QAT_HW_CIPHER_ALGO_3DES;
1974 int qat_sym_validate_zuc_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1977 case ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ:
1978 *alg = ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3;