1 /* SPDX-License-Identifier: (BSD-3-Clause OR GPL-2.0)
2 * Copyright(c) 2015-2019 Intel Corporation
5 #include <openssl/sha.h> /* Needed to calculate pre-compute values */
6 #include <openssl/aes.h> /* Needed to calculate pre-compute values */
7 #include <openssl/md5.h> /* Needed to calculate pre-compute values */
8 #include <openssl/evp.h> /* Needed for bpi runt block processing */
10 #include <rte_memcpy.h>
11 #include <rte_common.h>
12 #include <rte_spinlock.h>
13 #include <rte_byteorder.h>
15 #include <rte_malloc.h>
16 #include <rte_crypto_sym.h>
19 #include "qat_sym_session.h"
20 #include "qat_sym_pmd.h"
22 /** Frees a context previously created
23 * Depends on openssl libcrypto
26 bpi_cipher_ctx_free(void *bpi_ctx)
29 EVP_CIPHER_CTX_free((EVP_CIPHER_CTX *)bpi_ctx);
32 /** Creates a context in either AES or DES in ECB mode
33 * Depends on openssl libcrypto
36 bpi_cipher_ctx_init(enum rte_crypto_cipher_algorithm cryptodev_algo,
37 enum rte_crypto_cipher_operation direction __rte_unused,
38 const uint8_t *key, void **ctx)
40 const EVP_CIPHER *algo = NULL;
42 *ctx = EVP_CIPHER_CTX_new();
49 if (cryptodev_algo == RTE_CRYPTO_CIPHER_DES_DOCSISBPI)
52 algo = EVP_aes_128_ecb();
54 /* IV will be ECB encrypted whether direction is encrypt or decrypt*/
55 if (EVP_EncryptInit_ex(*ctx, algo, NULL, key, 0) != 1) {
64 EVP_CIPHER_CTX_free(*ctx);
69 qat_is_cipher_alg_supported(enum rte_crypto_cipher_algorithm algo,
70 struct qat_sym_dev_private *internals)
73 const struct rte_cryptodev_capabilities *capability;
75 while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
76 RTE_CRYPTO_OP_TYPE_UNDEFINED) {
77 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
80 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_CIPHER)
83 if (capability->sym.cipher.algo == algo)
90 qat_is_auth_alg_supported(enum rte_crypto_auth_algorithm algo,
91 struct qat_sym_dev_private *internals)
94 const struct rte_cryptodev_capabilities *capability;
96 while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
97 RTE_CRYPTO_OP_TYPE_UNDEFINED) {
98 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
101 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_AUTH)
104 if (capability->sym.auth.algo == algo)
111 qat_sym_session_clear(struct rte_cryptodev *dev,
112 struct rte_cryptodev_sym_session *sess)
114 uint8_t index = dev->driver_id;
115 void *sess_priv = get_sym_session_private_data(sess, index);
116 struct qat_sym_session *s = (struct qat_sym_session *)sess_priv;
120 bpi_cipher_ctx_free(s->bpi_ctx);
121 memset(s, 0, qat_sym_session_get_private_size(dev));
122 struct rte_mempool *sess_mp = rte_mempool_from_obj(sess_priv);
124 set_sym_session_private_data(sess, index, NULL);
125 rte_mempool_put(sess_mp, sess_priv);
130 qat_get_cmd_id(const struct rte_crypto_sym_xform *xform)
133 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER && xform->next == NULL)
134 return ICP_QAT_FW_LA_CMD_CIPHER;
136 /* Authentication Only */
137 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH && xform->next == NULL)
138 return ICP_QAT_FW_LA_CMD_AUTH;
141 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
142 /* AES-GCM and AES-CCM works with different direction
143 * GCM first encrypts and generate hash where AES-CCM
144 * first generate hash and encrypts. Similar relation
145 * applies to decryption.
147 if (xform->aead.op == RTE_CRYPTO_AEAD_OP_ENCRYPT)
148 if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
149 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
151 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
153 if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
154 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
156 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
159 if (xform->next == NULL)
162 /* Cipher then Authenticate */
163 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
164 xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
165 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
167 /* Authenticate then Cipher */
168 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH &&
169 xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
170 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
175 static struct rte_crypto_auth_xform *
176 qat_get_auth_xform(struct rte_crypto_sym_xform *xform)
179 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH)
188 static struct rte_crypto_cipher_xform *
189 qat_get_cipher_xform(struct rte_crypto_sym_xform *xform)
192 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
193 return &xform->cipher;
202 qat_sym_session_configure_cipher(struct rte_cryptodev *dev,
203 struct rte_crypto_sym_xform *xform,
204 struct qat_sym_session *session)
206 struct qat_sym_dev_private *internals = dev->data->dev_private;
207 struct rte_crypto_cipher_xform *cipher_xform = NULL;
210 /* Get cipher xform from crypto xform chain */
211 cipher_xform = qat_get_cipher_xform(xform);
213 session->cipher_iv.offset = cipher_xform->iv.offset;
214 session->cipher_iv.length = cipher_xform->iv.length;
216 switch (cipher_xform->algo) {
217 case RTE_CRYPTO_CIPHER_AES_CBC:
218 if (qat_sym_validate_aes_key(cipher_xform->key.length,
219 &session->qat_cipher_alg) != 0) {
220 QAT_LOG(ERR, "Invalid AES cipher key size");
224 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
226 case RTE_CRYPTO_CIPHER_AES_CTR:
227 if (qat_sym_validate_aes_key(cipher_xform->key.length,
228 &session->qat_cipher_alg) != 0) {
229 QAT_LOG(ERR, "Invalid AES cipher key size");
233 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
235 case RTE_CRYPTO_CIPHER_SNOW3G_UEA2:
236 if (qat_sym_validate_snow3g_key(cipher_xform->key.length,
237 &session->qat_cipher_alg) != 0) {
238 QAT_LOG(ERR, "Invalid SNOW 3G cipher key size");
242 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
244 case RTE_CRYPTO_CIPHER_NULL:
245 session->qat_cipher_alg = ICP_QAT_HW_CIPHER_ALGO_NULL;
246 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
248 case RTE_CRYPTO_CIPHER_KASUMI_F8:
249 if (qat_sym_validate_kasumi_key(cipher_xform->key.length,
250 &session->qat_cipher_alg) != 0) {
251 QAT_LOG(ERR, "Invalid KASUMI cipher key size");
255 session->qat_mode = ICP_QAT_HW_CIPHER_F8_MODE;
257 case RTE_CRYPTO_CIPHER_3DES_CBC:
258 if (qat_sym_validate_3des_key(cipher_xform->key.length,
259 &session->qat_cipher_alg) != 0) {
260 QAT_LOG(ERR, "Invalid 3DES cipher key size");
264 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
266 case RTE_CRYPTO_CIPHER_DES_CBC:
267 if (qat_sym_validate_des_key(cipher_xform->key.length,
268 &session->qat_cipher_alg) != 0) {
269 QAT_LOG(ERR, "Invalid DES cipher key size");
273 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
275 case RTE_CRYPTO_CIPHER_3DES_CTR:
276 if (qat_sym_validate_3des_key(cipher_xform->key.length,
277 &session->qat_cipher_alg) != 0) {
278 QAT_LOG(ERR, "Invalid 3DES cipher key size");
282 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
284 case RTE_CRYPTO_CIPHER_DES_DOCSISBPI:
285 ret = bpi_cipher_ctx_init(
288 cipher_xform->key.data,
291 QAT_LOG(ERR, "failed to create DES BPI ctx");
294 if (qat_sym_validate_des_key(cipher_xform->key.length,
295 &session->qat_cipher_alg) != 0) {
296 QAT_LOG(ERR, "Invalid DES cipher key size");
300 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
302 case RTE_CRYPTO_CIPHER_AES_DOCSISBPI:
303 ret = bpi_cipher_ctx_init(
306 cipher_xform->key.data,
309 QAT_LOG(ERR, "failed to create AES BPI ctx");
312 if (qat_sym_validate_aes_docsisbpi_key(cipher_xform->key.length,
313 &session->qat_cipher_alg) != 0) {
314 QAT_LOG(ERR, "Invalid AES DOCSISBPI key size");
318 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
320 case RTE_CRYPTO_CIPHER_ZUC_EEA3:
321 if (!qat_is_cipher_alg_supported(
322 cipher_xform->algo, internals)) {
323 QAT_LOG(ERR, "%s not supported on this device",
324 rte_crypto_cipher_algorithm_strings
325 [cipher_xform->algo]);
329 if (qat_sym_validate_zuc_key(cipher_xform->key.length,
330 &session->qat_cipher_alg) != 0) {
331 QAT_LOG(ERR, "Invalid ZUC cipher key size");
335 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
337 case RTE_CRYPTO_CIPHER_AES_XTS:
338 if ((cipher_xform->key.length/2) == ICP_QAT_HW_AES_192_KEY_SZ) {
339 QAT_LOG(ERR, "AES-XTS-192 not supported");
343 if (qat_sym_validate_aes_key((cipher_xform->key.length/2),
344 &session->qat_cipher_alg) != 0) {
345 QAT_LOG(ERR, "Invalid AES-XTS cipher key size");
349 session->qat_mode = ICP_QAT_HW_CIPHER_XTS_MODE;
351 case RTE_CRYPTO_CIPHER_3DES_ECB:
352 case RTE_CRYPTO_CIPHER_AES_ECB:
353 case RTE_CRYPTO_CIPHER_AES_F8:
354 case RTE_CRYPTO_CIPHER_ARC4:
355 QAT_LOG(ERR, "Crypto QAT PMD: Unsupported Cipher alg %u",
360 QAT_LOG(ERR, "Crypto: Undefined Cipher specified %u\n",
366 if (cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
367 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
369 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
371 if (qat_sym_session_aead_create_cd_cipher(session,
372 cipher_xform->key.data,
373 cipher_xform->key.length)) {
381 if (session->bpi_ctx) {
382 bpi_cipher_ctx_free(session->bpi_ctx);
383 session->bpi_ctx = NULL;
389 qat_sym_session_configure(struct rte_cryptodev *dev,
390 struct rte_crypto_sym_xform *xform,
391 struct rte_cryptodev_sym_session *sess,
392 struct rte_mempool *mempool)
394 void *sess_private_data;
397 if (rte_mempool_get(mempool, &sess_private_data)) {
399 "Couldn't get object from session mempool");
403 ret = qat_sym_session_set_parameters(dev, xform, sess_private_data);
406 "Crypto QAT PMD: failed to configure session parameters");
408 /* Return session to mempool */
409 rte_mempool_put(mempool, sess_private_data);
413 set_sym_session_private_data(sess, dev->driver_id,
420 qat_sym_session_set_ext_hash_flags(struct qat_sym_session *session,
423 struct icp_qat_fw_comn_req_hdr *header = &session->fw_req.comn_hdr;
424 struct icp_qat_fw_cipher_auth_cd_ctrl_hdr *cd_ctrl =
425 (struct icp_qat_fw_cipher_auth_cd_ctrl_hdr *)
426 session->fw_req.cd_ctrl.content_desc_ctrl_lw;
428 /* Set the Use Extended Protocol Flags bit in LW 1 */
429 QAT_FIELD_SET(header->comn_req_flags,
430 QAT_COMN_EXT_FLAGS_USED,
431 QAT_COMN_EXT_FLAGS_BITPOS,
432 QAT_COMN_EXT_FLAGS_MASK);
434 /* Set Hash Flags in LW 28 */
435 cd_ctrl->hash_flags |= hash_flag;
437 /* Set proto flags in LW 1 */
438 switch (session->qat_cipher_alg) {
439 case ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2:
440 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
441 ICP_QAT_FW_LA_SNOW_3G_PROTO);
442 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
443 header->serv_specif_flags, 0);
445 case ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3:
446 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
447 ICP_QAT_FW_LA_NO_PROTO);
448 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
449 header->serv_specif_flags,
450 ICP_QAT_FW_LA_ZUC_3G_PROTO);
453 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
454 ICP_QAT_FW_LA_NO_PROTO);
455 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
456 header->serv_specif_flags, 0);
462 qat_sym_session_handle_mixed(struct qat_sym_session *session)
464 if (session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3 &&
465 session->qat_cipher_alg !=
466 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
467 session->min_qat_dev_gen = QAT_GEN3;
468 qat_sym_session_set_ext_hash_flags(session,
469 1 << ICP_QAT_FW_AUTH_HDR_FLAG_ZUC_EIA3_BITPOS);
470 } else if (session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2 &&
471 session->qat_cipher_alg !=
472 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
473 session->min_qat_dev_gen = QAT_GEN3;
474 qat_sym_session_set_ext_hash_flags(session,
475 1 << ICP_QAT_FW_AUTH_HDR_FLAG_SNOW3G_UIA2_BITPOS);
476 } else if ((session->aes_cmac ||
477 session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_NULL) &&
478 (session->qat_cipher_alg ==
479 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2 ||
480 session->qat_cipher_alg ==
481 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)) {
482 session->min_qat_dev_gen = QAT_GEN3;
483 qat_sym_session_set_ext_hash_flags(session, 0);
488 qat_sym_session_set_parameters(struct rte_cryptodev *dev,
489 struct rte_crypto_sym_xform *xform, void *session_private)
491 struct qat_sym_session *session = session_private;
495 /* Set context descriptor physical address */
496 session->cd_paddr = rte_mempool_virt2iova(session) +
497 offsetof(struct qat_sym_session, cd);
499 session->min_qat_dev_gen = QAT_GEN1;
501 /* Get requested QAT command id */
502 qat_cmd_id = qat_get_cmd_id(xform);
503 if (qat_cmd_id < 0 || qat_cmd_id >= ICP_QAT_FW_LA_CMD_DELIMITER) {
504 QAT_LOG(ERR, "Unsupported xform chain requested");
507 session->qat_cmd = (enum icp_qat_fw_la_cmd_id)qat_cmd_id;
508 switch (session->qat_cmd) {
509 case ICP_QAT_FW_LA_CMD_CIPHER:
510 ret = qat_sym_session_configure_cipher(dev, xform, session);
514 case ICP_QAT_FW_LA_CMD_AUTH:
515 ret = qat_sym_session_configure_auth(dev, xform, session);
519 case ICP_QAT_FW_LA_CMD_CIPHER_HASH:
520 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
521 ret = qat_sym_session_configure_aead(dev, xform,
526 ret = qat_sym_session_configure_cipher(dev,
530 ret = qat_sym_session_configure_auth(dev,
534 /* Special handling of mixed hash+cipher algorithms */
535 qat_sym_session_handle_mixed(session);
538 case ICP_QAT_FW_LA_CMD_HASH_CIPHER:
539 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
540 ret = qat_sym_session_configure_aead(dev, xform,
545 ret = qat_sym_session_configure_auth(dev,
549 ret = qat_sym_session_configure_cipher(dev,
553 /* Special handling of mixed hash+cipher algorithms */
554 qat_sym_session_handle_mixed(session);
557 case ICP_QAT_FW_LA_CMD_TRNG_GET_RANDOM:
558 case ICP_QAT_FW_LA_CMD_TRNG_TEST:
559 case ICP_QAT_FW_LA_CMD_SSL3_KEY_DERIVE:
560 case ICP_QAT_FW_LA_CMD_TLS_V1_1_KEY_DERIVE:
561 case ICP_QAT_FW_LA_CMD_TLS_V1_2_KEY_DERIVE:
562 case ICP_QAT_FW_LA_CMD_MGF1:
563 case ICP_QAT_FW_LA_CMD_AUTH_PRE_COMP:
564 case ICP_QAT_FW_LA_CMD_CIPHER_PRE_COMP:
565 case ICP_QAT_FW_LA_CMD_DELIMITER:
566 QAT_LOG(ERR, "Unsupported Service %u",
570 QAT_LOG(ERR, "Unsupported Service %u",
579 qat_sym_session_handle_single_pass(struct qat_sym_dev_private *internals,
580 struct qat_sym_session *session,
581 struct rte_crypto_aead_xform *aead_xform)
583 enum qat_device_gen qat_dev_gen = internals->qat_dev->qat_dev_gen;
585 if (qat_dev_gen == QAT_GEN3 &&
586 aead_xform->iv.length == QAT_AES_GCM_SPC_IV_SIZE) {
587 /* Use faster Single-Pass GCM */
588 struct icp_qat_fw_la_cipher_req_params *cipher_param =
589 (void *) &session->fw_req.serv_specif_rqpars;
591 session->is_single_pass = 1;
592 session->min_qat_dev_gen = QAT_GEN3;
593 session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER;
594 session->qat_mode = ICP_QAT_HW_CIPHER_AEAD_MODE;
595 session->cipher_iv.offset = aead_xform->iv.offset;
596 session->cipher_iv.length = aead_xform->iv.length;
597 if (qat_sym_session_aead_create_cd_cipher(session,
598 aead_xform->key.data, aead_xform->key.length))
600 session->aad_len = aead_xform->aad_length;
601 session->digest_length = aead_xform->digest_length;
602 if (aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) {
603 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
604 session->auth_op = ICP_QAT_HW_AUTH_GENERATE;
605 ICP_QAT_FW_LA_RET_AUTH_SET(
606 session->fw_req.comn_hdr.serv_specif_flags,
607 ICP_QAT_FW_LA_RET_AUTH_RES);
609 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
610 session->auth_op = ICP_QAT_HW_AUTH_VERIFY;
611 ICP_QAT_FW_LA_CMP_AUTH_SET(
612 session->fw_req.comn_hdr.serv_specif_flags,
613 ICP_QAT_FW_LA_CMP_AUTH_RES);
615 ICP_QAT_FW_LA_SINGLE_PASS_PROTO_FLAG_SET(
616 session->fw_req.comn_hdr.serv_specif_flags,
617 ICP_QAT_FW_LA_SINGLE_PASS_PROTO);
618 ICP_QAT_FW_LA_PROTO_SET(
619 session->fw_req.comn_hdr.serv_specif_flags,
620 ICP_QAT_FW_LA_NO_PROTO);
621 ICP_QAT_FW_LA_GCM_IV_LEN_FLAG_SET(
622 session->fw_req.comn_hdr.serv_specif_flags,
623 ICP_QAT_FW_LA_GCM_IV_LEN_12_OCTETS);
624 session->fw_req.comn_hdr.service_cmd_id =
625 ICP_QAT_FW_LA_CMD_CIPHER;
626 session->cd.cipher.cipher_config.val =
627 ICP_QAT_HW_CIPHER_CONFIG_BUILD(
628 ICP_QAT_HW_CIPHER_AEAD_MODE,
629 session->qat_cipher_alg,
630 ICP_QAT_HW_CIPHER_NO_CONVERT,
632 QAT_FIELD_SET(session->cd.cipher.cipher_config.val,
633 aead_xform->digest_length,
634 QAT_CIPHER_AEAD_HASH_CMP_LEN_BITPOS,
635 QAT_CIPHER_AEAD_HASH_CMP_LEN_MASK);
636 session->cd.cipher.cipher_config.reserved =
637 ICP_QAT_HW_CIPHER_CONFIG_BUILD_UPPER(
638 aead_xform->aad_length);
639 cipher_param->spc_aad_sz = aead_xform->aad_length;
640 cipher_param->spc_auth_res_sz = aead_xform->digest_length;
646 qat_sym_session_configure_auth(struct rte_cryptodev *dev,
647 struct rte_crypto_sym_xform *xform,
648 struct qat_sym_session *session)
650 struct rte_crypto_auth_xform *auth_xform = qat_get_auth_xform(xform);
651 struct qat_sym_dev_private *internals = dev->data->dev_private;
652 const uint8_t *key_data = auth_xform->key.data;
653 uint8_t key_length = auth_xform->key.length;
654 session->aes_cmac = 0;
656 switch (auth_xform->algo) {
657 case RTE_CRYPTO_AUTH_SHA1_HMAC:
658 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA1;
660 case RTE_CRYPTO_AUTH_SHA224_HMAC:
661 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA224;
663 case RTE_CRYPTO_AUTH_SHA256_HMAC:
664 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA256;
666 case RTE_CRYPTO_AUTH_SHA384_HMAC:
667 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA384;
669 case RTE_CRYPTO_AUTH_SHA512_HMAC:
670 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA512;
672 case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
673 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
675 case RTE_CRYPTO_AUTH_AES_CMAC:
676 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
677 session->aes_cmac = 1;
679 case RTE_CRYPTO_AUTH_AES_GMAC:
680 if (qat_sym_validate_aes_key(auth_xform->key.length,
681 &session->qat_cipher_alg) != 0) {
682 QAT_LOG(ERR, "Invalid AES key size");
685 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
686 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
689 case RTE_CRYPTO_AUTH_SNOW3G_UIA2:
690 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2;
692 case RTE_CRYPTO_AUTH_MD5_HMAC:
693 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_MD5;
695 case RTE_CRYPTO_AUTH_NULL:
696 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_NULL;
698 case RTE_CRYPTO_AUTH_KASUMI_F9:
699 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_KASUMI_F9;
701 case RTE_CRYPTO_AUTH_ZUC_EIA3:
702 if (!qat_is_auth_alg_supported(auth_xform->algo, internals)) {
703 QAT_LOG(ERR, "%s not supported on this device",
704 rte_crypto_auth_algorithm_strings
708 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3;
710 case RTE_CRYPTO_AUTH_SHA1:
711 case RTE_CRYPTO_AUTH_SHA256:
712 case RTE_CRYPTO_AUTH_SHA512:
713 case RTE_CRYPTO_AUTH_SHA224:
714 case RTE_CRYPTO_AUTH_SHA384:
715 case RTE_CRYPTO_AUTH_MD5:
716 case RTE_CRYPTO_AUTH_AES_CBC_MAC:
717 QAT_LOG(ERR, "Crypto: Unsupported hash alg %u",
721 QAT_LOG(ERR, "Crypto: Undefined Hash algo %u specified",
726 session->auth_iv.offset = auth_xform->iv.offset;
727 session->auth_iv.length = auth_xform->iv.length;
729 if (auth_xform->algo == RTE_CRYPTO_AUTH_AES_GMAC) {
730 if (auth_xform->op == RTE_CRYPTO_AUTH_OP_GENERATE) {
731 session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER_HASH;
732 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
734 * It needs to create cipher desc content first,
735 * then authentication
738 if (qat_sym_session_aead_create_cd_cipher(session,
739 auth_xform->key.data,
740 auth_xform->key.length))
743 if (qat_sym_session_aead_create_cd_auth(session,
747 auth_xform->digest_length,
751 session->qat_cmd = ICP_QAT_FW_LA_CMD_HASH_CIPHER;
752 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
754 * It needs to create authentication desc content first,
758 if (qat_sym_session_aead_create_cd_auth(session,
762 auth_xform->digest_length,
766 if (qat_sym_session_aead_create_cd_cipher(session,
767 auth_xform->key.data,
768 auth_xform->key.length))
771 /* Restore to authentication only only */
772 session->qat_cmd = ICP_QAT_FW_LA_CMD_AUTH;
774 if (qat_sym_session_aead_create_cd_auth(session,
778 auth_xform->digest_length,
783 session->digest_length = auth_xform->digest_length;
788 qat_sym_session_configure_aead(struct rte_cryptodev *dev,
789 struct rte_crypto_sym_xform *xform,
790 struct qat_sym_session *session)
792 struct rte_crypto_aead_xform *aead_xform = &xform->aead;
793 enum rte_crypto_auth_operation crypto_operation;
796 * Store AEAD IV parameters as cipher IV,
797 * to avoid unnecessary memory usage
799 session->cipher_iv.offset = xform->aead.iv.offset;
800 session->cipher_iv.length = xform->aead.iv.length;
802 switch (aead_xform->algo) {
803 case RTE_CRYPTO_AEAD_AES_GCM:
804 if (qat_sym_validate_aes_key(aead_xform->key.length,
805 &session->qat_cipher_alg) != 0) {
806 QAT_LOG(ERR, "Invalid AES key size");
809 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
810 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
812 case RTE_CRYPTO_AEAD_AES_CCM:
813 if (qat_sym_validate_aes_key(aead_xform->key.length,
814 &session->qat_cipher_alg) != 0) {
815 QAT_LOG(ERR, "Invalid AES key size");
818 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
819 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC;
822 QAT_LOG(ERR, "Crypto: Undefined AEAD specified %u\n",
827 session->is_single_pass = 0;
828 if (aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) {
829 /* Use faster Single-Pass GCM if possible */
830 int res = qat_sym_session_handle_single_pass(
831 dev->data->dev_private, session, aead_xform);
834 if (session->is_single_pass)
838 if ((aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT &&
839 aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) ||
840 (aead_xform->op == RTE_CRYPTO_AEAD_OP_DECRYPT &&
841 aead_xform->algo == RTE_CRYPTO_AEAD_AES_CCM)) {
842 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
844 * It needs to create cipher desc content first,
845 * then authentication
847 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
848 RTE_CRYPTO_AUTH_OP_GENERATE : RTE_CRYPTO_AUTH_OP_VERIFY;
850 if (qat_sym_session_aead_create_cd_cipher(session,
851 aead_xform->key.data,
852 aead_xform->key.length))
855 if (qat_sym_session_aead_create_cd_auth(session,
856 aead_xform->key.data,
857 aead_xform->key.length,
858 aead_xform->aad_length,
859 aead_xform->digest_length,
863 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
865 * It needs to create authentication desc content first,
869 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
870 RTE_CRYPTO_AUTH_OP_VERIFY : RTE_CRYPTO_AUTH_OP_GENERATE;
872 if (qat_sym_session_aead_create_cd_auth(session,
873 aead_xform->key.data,
874 aead_xform->key.length,
875 aead_xform->aad_length,
876 aead_xform->digest_length,
880 if (qat_sym_session_aead_create_cd_cipher(session,
881 aead_xform->key.data,
882 aead_xform->key.length))
886 session->digest_length = aead_xform->digest_length;
890 unsigned int qat_sym_session_get_private_size(
891 struct rte_cryptodev *dev __rte_unused)
893 return RTE_ALIGN_CEIL(sizeof(struct qat_sym_session), 8);
896 /* returns block size in bytes per cipher algo */
897 int qat_cipher_get_block_size(enum icp_qat_hw_cipher_algo qat_cipher_alg)
899 switch (qat_cipher_alg) {
900 case ICP_QAT_HW_CIPHER_ALGO_DES:
901 return ICP_QAT_HW_DES_BLK_SZ;
902 case ICP_QAT_HW_CIPHER_ALGO_3DES:
903 return ICP_QAT_HW_3DES_BLK_SZ;
904 case ICP_QAT_HW_CIPHER_ALGO_AES128:
905 case ICP_QAT_HW_CIPHER_ALGO_AES192:
906 case ICP_QAT_HW_CIPHER_ALGO_AES256:
907 return ICP_QAT_HW_AES_BLK_SZ;
909 QAT_LOG(ERR, "invalid block cipher alg %u", qat_cipher_alg);
916 * Returns size in bytes per hash algo for state1 size field in cd_ctrl
917 * This is digest size rounded up to nearest quadword
919 static int qat_hash_get_state1_size(enum icp_qat_hw_auth_algo qat_hash_alg)
921 switch (qat_hash_alg) {
922 case ICP_QAT_HW_AUTH_ALGO_SHA1:
923 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA1_STATE1_SZ,
924 QAT_HW_DEFAULT_ALIGNMENT);
925 case ICP_QAT_HW_AUTH_ALGO_SHA224:
926 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA224_STATE1_SZ,
927 QAT_HW_DEFAULT_ALIGNMENT);
928 case ICP_QAT_HW_AUTH_ALGO_SHA256:
929 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA256_STATE1_SZ,
930 QAT_HW_DEFAULT_ALIGNMENT);
931 case ICP_QAT_HW_AUTH_ALGO_SHA384:
932 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA384_STATE1_SZ,
933 QAT_HW_DEFAULT_ALIGNMENT);
934 case ICP_QAT_HW_AUTH_ALGO_SHA512:
935 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
936 QAT_HW_DEFAULT_ALIGNMENT);
937 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
938 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ,
939 QAT_HW_DEFAULT_ALIGNMENT);
940 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
941 case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
942 return QAT_HW_ROUND_UP(ICP_QAT_HW_GALOIS_128_STATE1_SZ,
943 QAT_HW_DEFAULT_ALIGNMENT);
944 case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
945 return QAT_HW_ROUND_UP(ICP_QAT_HW_ZUC_3G_EIA3_STATE1_SZ,
946 QAT_HW_DEFAULT_ALIGNMENT);
947 case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
948 return QAT_HW_ROUND_UP(ICP_QAT_HW_SNOW_3G_UIA2_STATE1_SZ,
949 QAT_HW_DEFAULT_ALIGNMENT);
950 case ICP_QAT_HW_AUTH_ALGO_MD5:
951 return QAT_HW_ROUND_UP(ICP_QAT_HW_MD5_STATE1_SZ,
952 QAT_HW_DEFAULT_ALIGNMENT);
953 case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
954 return QAT_HW_ROUND_UP(ICP_QAT_HW_KASUMI_F9_STATE1_SZ,
955 QAT_HW_DEFAULT_ALIGNMENT);
956 case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
957 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_CBC_MAC_STATE1_SZ,
958 QAT_HW_DEFAULT_ALIGNMENT);
959 case ICP_QAT_HW_AUTH_ALGO_NULL:
960 return QAT_HW_ROUND_UP(ICP_QAT_HW_NULL_STATE1_SZ,
961 QAT_HW_DEFAULT_ALIGNMENT);
962 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
963 /* return maximum state1 size in this case */
964 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
965 QAT_HW_DEFAULT_ALIGNMENT);
967 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
973 /* returns digest size in bytes per hash algo */
974 static int qat_hash_get_digest_size(enum icp_qat_hw_auth_algo qat_hash_alg)
976 switch (qat_hash_alg) {
977 case ICP_QAT_HW_AUTH_ALGO_SHA1:
978 return ICP_QAT_HW_SHA1_STATE1_SZ;
979 case ICP_QAT_HW_AUTH_ALGO_SHA224:
980 return ICP_QAT_HW_SHA224_STATE1_SZ;
981 case ICP_QAT_HW_AUTH_ALGO_SHA256:
982 return ICP_QAT_HW_SHA256_STATE1_SZ;
983 case ICP_QAT_HW_AUTH_ALGO_SHA384:
984 return ICP_QAT_HW_SHA384_STATE1_SZ;
985 case ICP_QAT_HW_AUTH_ALGO_SHA512:
986 return ICP_QAT_HW_SHA512_STATE1_SZ;
987 case ICP_QAT_HW_AUTH_ALGO_MD5:
988 return ICP_QAT_HW_MD5_STATE1_SZ;
989 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
990 return ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
991 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
992 /* return maximum digest size in this case */
993 return ICP_QAT_HW_SHA512_STATE1_SZ;
995 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
1001 /* returns block size in byes per hash algo */
1002 static int qat_hash_get_block_size(enum icp_qat_hw_auth_algo qat_hash_alg)
1004 switch (qat_hash_alg) {
1005 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1007 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1008 return SHA256_CBLOCK;
1009 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1010 return SHA256_CBLOCK;
1011 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1012 return SHA512_CBLOCK;
1013 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1014 return SHA512_CBLOCK;
1015 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1017 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1018 return ICP_QAT_HW_AES_BLK_SZ;
1019 case ICP_QAT_HW_AUTH_ALGO_MD5:
1021 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
1022 /* return maximum block size in this case */
1023 return SHA512_CBLOCK;
1025 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
1031 static int partial_hash_sha1(uint8_t *data_in, uint8_t *data_out)
1035 if (!SHA1_Init(&ctx))
1037 SHA1_Transform(&ctx, data_in);
1038 rte_memcpy(data_out, &ctx, SHA_DIGEST_LENGTH);
1042 static int partial_hash_sha224(uint8_t *data_in, uint8_t *data_out)
1046 if (!SHA224_Init(&ctx))
1048 SHA256_Transform(&ctx, data_in);
1049 rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
1053 static int partial_hash_sha256(uint8_t *data_in, uint8_t *data_out)
1057 if (!SHA256_Init(&ctx))
1059 SHA256_Transform(&ctx, data_in);
1060 rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
1064 static int partial_hash_sha384(uint8_t *data_in, uint8_t *data_out)
1068 if (!SHA384_Init(&ctx))
1070 SHA512_Transform(&ctx, data_in);
1071 rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
1075 static int partial_hash_sha512(uint8_t *data_in, uint8_t *data_out)
1079 if (!SHA512_Init(&ctx))
1081 SHA512_Transform(&ctx, data_in);
1082 rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
1086 static int partial_hash_md5(uint8_t *data_in, uint8_t *data_out)
1090 if (!MD5_Init(&ctx))
1092 MD5_Transform(&ctx, data_in);
1093 rte_memcpy(data_out, &ctx, MD5_DIGEST_LENGTH);
1098 static int partial_hash_compute(enum icp_qat_hw_auth_algo hash_alg,
1103 uint8_t digest[qat_hash_get_digest_size(
1104 ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1105 uint32_t *hash_state_out_be32;
1106 uint64_t *hash_state_out_be64;
1109 digest_size = qat_hash_get_digest_size(hash_alg);
1110 if (digest_size <= 0)
1113 hash_state_out_be32 = (uint32_t *)data_out;
1114 hash_state_out_be64 = (uint64_t *)data_out;
1117 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1118 if (partial_hash_sha1(data_in, digest))
1120 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1121 *hash_state_out_be32 =
1122 rte_bswap32(*(((uint32_t *)digest)+i));
1124 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1125 if (partial_hash_sha224(data_in, digest))
1127 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1128 *hash_state_out_be32 =
1129 rte_bswap32(*(((uint32_t *)digest)+i));
1131 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1132 if (partial_hash_sha256(data_in, digest))
1134 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1135 *hash_state_out_be32 =
1136 rte_bswap32(*(((uint32_t *)digest)+i));
1138 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1139 if (partial_hash_sha384(data_in, digest))
1141 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
1142 *hash_state_out_be64 =
1143 rte_bswap64(*(((uint64_t *)digest)+i));
1145 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1146 if (partial_hash_sha512(data_in, digest))
1148 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
1149 *hash_state_out_be64 =
1150 rte_bswap64(*(((uint64_t *)digest)+i));
1152 case ICP_QAT_HW_AUTH_ALGO_MD5:
1153 if (partial_hash_md5(data_in, data_out))
1157 QAT_LOG(ERR, "invalid hash alg %u", hash_alg);
1163 #define HMAC_IPAD_VALUE 0x36
1164 #define HMAC_OPAD_VALUE 0x5c
1165 #define HASH_XCBC_PRECOMP_KEY_NUM 3
1167 static const uint8_t AES_CMAC_SEED[ICP_QAT_HW_AES_128_KEY_SZ];
1169 static void aes_cmac_key_derive(uint8_t *base, uint8_t *derived)
1173 derived[0] = base[0] << 1;
1174 for (i = 1; i < ICP_QAT_HW_AES_BLK_SZ ; i++) {
1175 derived[i] = base[i] << 1;
1176 derived[i - 1] |= base[i] >> 7;
1180 derived[ICP_QAT_HW_AES_BLK_SZ - 1] ^= QAT_AES_CMAC_CONST_RB;
1183 static int qat_sym_do_precomputes(enum icp_qat_hw_auth_algo hash_alg,
1184 const uint8_t *auth_key,
1185 uint16_t auth_keylen,
1186 uint8_t *p_state_buf,
1187 uint16_t *p_state_len,
1191 uint8_t ipad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1192 uint8_t opad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1195 if (hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC) {
1201 uint8_t k0[ICP_QAT_HW_AES_128_KEY_SZ];
1204 auth_keylen = ICP_QAT_HW_AES_128_KEY_SZ;
1206 in = rte_zmalloc("AES CMAC K1",
1207 ICP_QAT_HW_AES_128_KEY_SZ, 16);
1210 QAT_LOG(ERR, "Failed to alloc memory");
1214 rte_memcpy(in, AES_CMAC_SEED,
1215 ICP_QAT_HW_AES_128_KEY_SZ);
1216 rte_memcpy(p_state_buf, auth_key, auth_keylen);
1218 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1224 AES_encrypt(in, k0, &enc_key);
1226 k1 = p_state_buf + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1227 k2 = k1 + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1229 aes_cmac_key_derive(k0, k1);
1230 aes_cmac_key_derive(k1, k2);
1232 memset(k0, 0, ICP_QAT_HW_AES_128_KEY_SZ);
1233 *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1237 static uint8_t qat_aes_xcbc_key_seed[
1238 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ] = {
1239 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1240 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1241 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1242 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1243 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1244 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1248 uint8_t *out = p_state_buf;
1252 in = rte_zmalloc("working mem for key",
1253 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ, 16);
1255 QAT_LOG(ERR, "Failed to alloc memory");
1259 rte_memcpy(in, qat_aes_xcbc_key_seed,
1260 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1261 for (x = 0; x < HASH_XCBC_PRECOMP_KEY_NUM; x++) {
1262 if (AES_set_encrypt_key(auth_key,
1266 (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ));
1268 (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ),
1269 0, ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1272 AES_encrypt(in, out, &enc_key);
1273 in += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1274 out += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1276 *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1277 rte_free(in - x*ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ);
1281 } else if ((hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_128) ||
1282 (hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_64)) {
1284 uint8_t *out = p_state_buf;
1287 memset(p_state_buf, 0, ICP_QAT_HW_GALOIS_H_SZ +
1288 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1289 ICP_QAT_HW_GALOIS_E_CTR0_SZ);
1290 in = rte_zmalloc("working mem for key",
1291 ICP_QAT_HW_GALOIS_H_SZ, 16);
1293 QAT_LOG(ERR, "Failed to alloc memory");
1297 memset(in, 0, ICP_QAT_HW_GALOIS_H_SZ);
1298 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1302 AES_encrypt(in, out, &enc_key);
1303 *p_state_len = ICP_QAT_HW_GALOIS_H_SZ +
1304 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1305 ICP_QAT_HW_GALOIS_E_CTR0_SZ;
1310 block_size = qat_hash_get_block_size(hash_alg);
1313 /* init ipad and opad from key and xor with fixed values */
1314 memset(ipad, 0, block_size);
1315 memset(opad, 0, block_size);
1317 if (auth_keylen > (unsigned int)block_size) {
1318 QAT_LOG(ERR, "invalid keylen %u", auth_keylen);
1321 rte_memcpy(ipad, auth_key, auth_keylen);
1322 rte_memcpy(opad, auth_key, auth_keylen);
1324 for (i = 0; i < block_size; i++) {
1325 uint8_t *ipad_ptr = ipad + i;
1326 uint8_t *opad_ptr = opad + i;
1327 *ipad_ptr ^= HMAC_IPAD_VALUE;
1328 *opad_ptr ^= HMAC_OPAD_VALUE;
1331 /* do partial hash of ipad and copy to state1 */
1332 if (partial_hash_compute(hash_alg, ipad, p_state_buf)) {
1333 memset(ipad, 0, block_size);
1334 memset(opad, 0, block_size);
1335 QAT_LOG(ERR, "ipad precompute failed");
1340 * State len is a multiple of 8, so may be larger than the digest.
1341 * Put the partial hash of opad state_len bytes after state1
1343 *p_state_len = qat_hash_get_state1_size(hash_alg);
1344 if (partial_hash_compute(hash_alg, opad, p_state_buf + *p_state_len)) {
1345 memset(ipad, 0, block_size);
1346 memset(opad, 0, block_size);
1347 QAT_LOG(ERR, "opad precompute failed");
1351 /* don't leave data lying around */
1352 memset(ipad, 0, block_size);
1353 memset(opad, 0, block_size);
1358 qat_sym_session_init_common_hdr(struct icp_qat_fw_comn_req_hdr *header,
1359 enum qat_sym_proto_flag proto_flags)
1362 ICP_QAT_FW_COMN_HDR_FLAGS_BUILD(ICP_QAT_FW_COMN_REQ_FLAG_SET);
1363 header->service_type = ICP_QAT_FW_COMN_REQ_CPM_FW_LA;
1364 header->comn_req_flags =
1365 ICP_QAT_FW_COMN_FLAGS_BUILD(QAT_COMN_CD_FLD_TYPE_64BIT_ADR,
1366 QAT_COMN_PTR_TYPE_FLAT);
1367 ICP_QAT_FW_LA_PARTIAL_SET(header->serv_specif_flags,
1368 ICP_QAT_FW_LA_PARTIAL_NONE);
1369 ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(header->serv_specif_flags,
1370 ICP_QAT_FW_CIPH_IV_16BYTE_DATA);
1372 switch (proto_flags) {
1373 case QAT_CRYPTO_PROTO_FLAG_NONE:
1374 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1375 ICP_QAT_FW_LA_NO_PROTO);
1377 case QAT_CRYPTO_PROTO_FLAG_CCM:
1378 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1379 ICP_QAT_FW_LA_CCM_PROTO);
1381 case QAT_CRYPTO_PROTO_FLAG_GCM:
1382 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1383 ICP_QAT_FW_LA_GCM_PROTO);
1385 case QAT_CRYPTO_PROTO_FLAG_SNOW3G:
1386 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1387 ICP_QAT_FW_LA_SNOW_3G_PROTO);
1389 case QAT_CRYPTO_PROTO_FLAG_ZUC:
1390 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(header->serv_specif_flags,
1391 ICP_QAT_FW_LA_ZUC_3G_PROTO);
1395 ICP_QAT_FW_LA_UPDATE_STATE_SET(header->serv_specif_flags,
1396 ICP_QAT_FW_LA_NO_UPDATE_STATE);
1397 ICP_QAT_FW_LA_DIGEST_IN_BUFFER_SET(header->serv_specif_flags,
1398 ICP_QAT_FW_LA_NO_DIGEST_IN_BUFFER);
1402 * Snow3G and ZUC should never use this function
1403 * and set its protocol flag in both cipher and auth part of content
1404 * descriptor building function
1406 static enum qat_sym_proto_flag
1407 qat_get_crypto_proto_flag(uint16_t flags)
1409 int proto = ICP_QAT_FW_LA_PROTO_GET(flags);
1410 enum qat_sym_proto_flag qat_proto_flag =
1411 QAT_CRYPTO_PROTO_FLAG_NONE;
1414 case ICP_QAT_FW_LA_GCM_PROTO:
1415 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1417 case ICP_QAT_FW_LA_CCM_PROTO:
1418 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1422 return qat_proto_flag;
1425 int qat_sym_session_aead_create_cd_cipher(struct qat_sym_session *cdesc,
1426 const uint8_t *cipherkey,
1427 uint32_t cipherkeylen)
1429 struct icp_qat_hw_cipher_algo_blk *cipher;
1430 struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1431 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1432 struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1433 void *ptr = &req_tmpl->cd_ctrl;
1434 struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1435 struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1436 enum icp_qat_hw_cipher_convert key_convert;
1437 enum qat_sym_proto_flag qat_proto_flag =
1438 QAT_CRYPTO_PROTO_FLAG_NONE;
1439 uint32_t total_key_size;
1440 uint16_t cipher_offset, cd_size;
1441 uint32_t wordIndex = 0;
1442 uint32_t *temp_key = NULL;
1444 if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER) {
1445 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1446 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1447 ICP_QAT_FW_SLICE_CIPHER);
1448 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1449 ICP_QAT_FW_SLICE_DRAM_WR);
1450 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1451 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1452 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1453 ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1454 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1455 } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1456 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1457 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1458 ICP_QAT_FW_SLICE_CIPHER);
1459 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1460 ICP_QAT_FW_SLICE_AUTH);
1461 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1462 ICP_QAT_FW_SLICE_AUTH);
1463 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1464 ICP_QAT_FW_SLICE_DRAM_WR);
1465 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1466 } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1467 QAT_LOG(ERR, "Invalid param, must be a cipher command.");
1471 if (cdesc->qat_mode == ICP_QAT_HW_CIPHER_CTR_MODE) {
1473 * CTR Streaming ciphers are a special case. Decrypt = encrypt
1474 * Overriding default values previously set
1476 cdesc->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
1477 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1478 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2
1479 || cdesc->qat_cipher_alg ==
1480 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)
1481 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1482 else if (cdesc->qat_dir == ICP_QAT_HW_CIPHER_ENCRYPT)
1483 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1485 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1487 if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
1488 total_key_size = ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ +
1489 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1490 cipher_cd_ctrl->cipher_state_sz =
1491 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1492 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1494 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1495 total_key_size = ICP_QAT_HW_KASUMI_F8_KEY_SZ;
1496 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_KASUMI_BLK_SZ >> 3;
1497 cipher_cd_ctrl->cipher_padding_sz =
1498 (2 * ICP_QAT_HW_KASUMI_BLK_SZ) >> 3;
1499 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES) {
1500 total_key_size = ICP_QAT_HW_3DES_KEY_SZ;
1501 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_3DES_BLK_SZ >> 3;
1503 qat_get_crypto_proto_flag(header->serv_specif_flags);
1504 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_DES) {
1505 total_key_size = ICP_QAT_HW_DES_KEY_SZ;
1506 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_DES_BLK_SZ >> 3;
1508 qat_get_crypto_proto_flag(header->serv_specif_flags);
1509 } else if (cdesc->qat_cipher_alg ==
1510 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
1511 total_key_size = ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ +
1512 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1513 cipher_cd_ctrl->cipher_state_sz =
1514 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1515 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1516 cdesc->min_qat_dev_gen = QAT_GEN2;
1518 total_key_size = cipherkeylen;
1519 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_AES_BLK_SZ >> 3;
1521 qat_get_crypto_proto_flag(header->serv_specif_flags);
1523 cipher_cd_ctrl->cipher_key_sz = total_key_size >> 3;
1524 cipher_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1525 cipher_cd_ctrl->cipher_cfg_offset = cipher_offset >> 3;
1527 header->service_cmd_id = cdesc->qat_cmd;
1528 qat_sym_session_init_common_hdr(header, qat_proto_flag);
1530 cipher = (struct icp_qat_hw_cipher_algo_blk *)cdesc->cd_cur_ptr;
1531 cipher->cipher_config.val =
1532 ICP_QAT_HW_CIPHER_CONFIG_BUILD(cdesc->qat_mode,
1533 cdesc->qat_cipher_alg, key_convert,
1536 if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1537 temp_key = (uint32_t *)(cdesc->cd_cur_ptr +
1538 sizeof(struct icp_qat_hw_cipher_config)
1540 memcpy(cipher->key, cipherkey, cipherkeylen);
1541 memcpy(temp_key, cipherkey, cipherkeylen);
1543 /* XOR Key with KASUMI F8 key modifier at 4 bytes level */
1544 for (wordIndex = 0; wordIndex < (cipherkeylen >> 2);
1546 temp_key[wordIndex] ^= KASUMI_F8_KEY_MODIFIER_4_BYTES;
1548 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1549 cipherkeylen + cipherkeylen;
1551 memcpy(cipher->key, cipherkey, cipherkeylen);
1552 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1556 if (total_key_size > cipherkeylen) {
1557 uint32_t padding_size = total_key_size-cipherkeylen;
1558 if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1559 && (cipherkeylen == QAT_3DES_KEY_SZ_OPT2)) {
1560 /* K3 not provided so use K1 = K3*/
1561 memcpy(cdesc->cd_cur_ptr, cipherkey, padding_size);
1562 } else if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1563 && (cipherkeylen == QAT_3DES_KEY_SZ_OPT3)) {
1564 /* K2 and K3 not provided so use K1 = K2 = K3*/
1565 memcpy(cdesc->cd_cur_ptr, cipherkey,
1567 memcpy(cdesc->cd_cur_ptr+cipherkeylen,
1568 cipherkey, cipherkeylen);
1570 memset(cdesc->cd_cur_ptr, 0, padding_size);
1572 cdesc->cd_cur_ptr += padding_size;
1574 cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1575 cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1580 int qat_sym_session_aead_create_cd_auth(struct qat_sym_session *cdesc,
1581 const uint8_t *authkey,
1582 uint32_t authkeylen,
1583 uint32_t aad_length,
1584 uint32_t digestsize,
1585 unsigned int operation)
1587 struct icp_qat_hw_auth_setup *hash;
1588 struct icp_qat_hw_cipher_algo_blk *cipherconfig;
1589 struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1590 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1591 struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1592 void *ptr = &req_tmpl->cd_ctrl;
1593 struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1594 struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1595 struct icp_qat_fw_la_auth_req_params *auth_param =
1596 (struct icp_qat_fw_la_auth_req_params *)
1597 ((char *)&req_tmpl->serv_specif_rqpars +
1598 ICP_QAT_FW_HASH_REQUEST_PARAMETERS_OFFSET);
1599 uint16_t state1_size = 0, state2_size = 0;
1600 uint16_t hash_offset, cd_size;
1601 uint32_t *aad_len = NULL;
1602 uint32_t wordIndex = 0;
1604 enum qat_sym_proto_flag qat_proto_flag =
1605 QAT_CRYPTO_PROTO_FLAG_NONE;
1607 if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_AUTH) {
1608 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1609 ICP_QAT_FW_SLICE_AUTH);
1610 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1611 ICP_QAT_FW_SLICE_DRAM_WR);
1612 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1613 } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1614 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1615 ICP_QAT_FW_SLICE_AUTH);
1616 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1617 ICP_QAT_FW_SLICE_CIPHER);
1618 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1619 ICP_QAT_FW_SLICE_CIPHER);
1620 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1621 ICP_QAT_FW_SLICE_DRAM_WR);
1622 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1623 } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1624 QAT_LOG(ERR, "Invalid param, must be a hash command.");
1628 if (operation == RTE_CRYPTO_AUTH_OP_VERIFY) {
1629 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1630 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1631 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1632 ICP_QAT_FW_LA_CMP_AUTH_RES);
1633 cdesc->auth_op = ICP_QAT_HW_AUTH_VERIFY;
1635 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1636 ICP_QAT_FW_LA_RET_AUTH_RES);
1637 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1638 ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1639 cdesc->auth_op = ICP_QAT_HW_AUTH_GENERATE;
1643 * Setup the inner hash config
1645 hash_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1646 hash = (struct icp_qat_hw_auth_setup *)cdesc->cd_cur_ptr;
1647 hash->auth_config.reserved = 0;
1648 hash->auth_config.config =
1649 ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE1,
1650 cdesc->qat_hash_alg, digestsize);
1652 if (cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2
1653 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_KASUMI_F9
1654 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3
1655 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC
1656 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC
1657 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_NULL
1659 hash->auth_counter.counter = 0;
1661 int block_size = qat_hash_get_block_size(cdesc->qat_hash_alg);
1665 hash->auth_counter.counter = rte_bswap32(block_size);
1668 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_auth_setup);
1671 * cd_cur_ptr now points at the state1 information.
1673 switch (cdesc->qat_hash_alg) {
1674 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1675 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA1, authkey,
1676 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1678 QAT_LOG(ERR, "(SHA)precompute failed");
1681 state2_size = RTE_ALIGN_CEIL(ICP_QAT_HW_SHA1_STATE2_SZ, 8);
1683 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1684 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA224, authkey,
1685 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1687 QAT_LOG(ERR, "(SHA)precompute failed");
1690 state2_size = ICP_QAT_HW_SHA224_STATE2_SZ;
1692 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1693 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA256, authkey,
1694 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1696 QAT_LOG(ERR, "(SHA)precompute failed");
1699 state2_size = ICP_QAT_HW_SHA256_STATE2_SZ;
1701 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1702 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA384, authkey,
1703 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1705 QAT_LOG(ERR, "(SHA)precompute failed");
1708 state2_size = ICP_QAT_HW_SHA384_STATE2_SZ;
1710 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1711 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA512, authkey,
1712 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1714 QAT_LOG(ERR, "(SHA)precompute failed");
1717 state2_size = ICP_QAT_HW_SHA512_STATE2_SZ;
1719 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1720 state1_size = ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1722 if (cdesc->aes_cmac)
1723 memset(cdesc->cd_cur_ptr, 0, state1_size);
1724 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC,
1725 authkey, authkeylen, cdesc->cd_cur_ptr + state1_size,
1726 &state2_size, cdesc->aes_cmac)) {
1727 cdesc->aes_cmac ? QAT_LOG(ERR,
1728 "(CMAC)precompute failed")
1730 "(XCBC)precompute failed");
1734 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1735 case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
1736 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1737 state1_size = ICP_QAT_HW_GALOIS_128_STATE1_SZ;
1738 if (qat_sym_do_precomputes(cdesc->qat_hash_alg, authkey,
1739 authkeylen, cdesc->cd_cur_ptr + state1_size,
1740 &state2_size, cdesc->aes_cmac)) {
1741 QAT_LOG(ERR, "(GCM)precompute failed");
1745 * Write (the length of AAD) into bytes 16-19 of state2
1746 * in big-endian format. This field is 8 bytes
1748 auth_param->u2.aad_sz =
1749 RTE_ALIGN_CEIL(aad_length, 16);
1750 auth_param->hash_state_sz = (auth_param->u2.aad_sz) >> 3;
1752 aad_len = (uint32_t *)(cdesc->cd_cur_ptr +
1753 ICP_QAT_HW_GALOIS_128_STATE1_SZ +
1754 ICP_QAT_HW_GALOIS_H_SZ);
1755 *aad_len = rte_bswap32(aad_length);
1756 cdesc->aad_len = aad_length;
1758 case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
1759 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1760 state1_size = qat_hash_get_state1_size(
1761 ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2);
1762 state2_size = ICP_QAT_HW_SNOW_3G_UIA2_STATE2_SZ;
1763 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1765 cipherconfig = (struct icp_qat_hw_cipher_algo_blk *)
1766 (cdesc->cd_cur_ptr + state1_size + state2_size);
1767 cipherconfig->cipher_config.val =
1768 ICP_QAT_HW_CIPHER_CONFIG_BUILD(ICP_QAT_HW_CIPHER_ECB_MODE,
1769 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2,
1770 ICP_QAT_HW_CIPHER_KEY_CONVERT,
1771 ICP_QAT_HW_CIPHER_ENCRYPT);
1772 memcpy(cipherconfig->key, authkey, authkeylen);
1773 memset(cipherconfig->key + authkeylen,
1774 0, ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ);
1775 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1776 authkeylen + ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1777 auth_param->hash_state_sz = ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1779 case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
1780 hash->auth_config.config =
1781 ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE0,
1782 cdesc->qat_hash_alg, digestsize);
1783 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1784 state1_size = qat_hash_get_state1_size(
1785 ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3);
1786 state2_size = ICP_QAT_HW_ZUC_3G_EIA3_STATE2_SZ;
1787 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size
1788 + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ);
1790 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1791 cdesc->cd_cur_ptr += state1_size + state2_size
1792 + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1793 auth_param->hash_state_sz = ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1794 cdesc->min_qat_dev_gen = QAT_GEN2;
1797 case ICP_QAT_HW_AUTH_ALGO_MD5:
1798 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_MD5, authkey,
1799 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1801 QAT_LOG(ERR, "(MD5)precompute failed");
1804 state2_size = ICP_QAT_HW_MD5_STATE2_SZ;
1806 case ICP_QAT_HW_AUTH_ALGO_NULL:
1807 state1_size = qat_hash_get_state1_size(
1808 ICP_QAT_HW_AUTH_ALGO_NULL);
1809 state2_size = ICP_QAT_HW_NULL_STATE2_SZ;
1811 case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
1812 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1813 state1_size = qat_hash_get_state1_size(
1814 ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC);
1815 state2_size = ICP_QAT_HW_AES_CBC_MAC_KEY_SZ +
1816 ICP_QAT_HW_AES_CCM_CBC_E_CTR0_SZ;
1818 if (aad_length > 0) {
1819 aad_length += ICP_QAT_HW_CCM_AAD_B0_LEN +
1820 ICP_QAT_HW_CCM_AAD_LEN_INFO;
1821 auth_param->u2.aad_sz =
1822 RTE_ALIGN_CEIL(aad_length,
1823 ICP_QAT_HW_CCM_AAD_ALIGNMENT);
1825 auth_param->u2.aad_sz = ICP_QAT_HW_CCM_AAD_B0_LEN;
1827 cdesc->aad_len = aad_length;
1828 hash->auth_counter.counter = 0;
1830 hash_cd_ctrl->outer_prefix_sz = digestsize;
1831 auth_param->hash_state_sz = digestsize;
1833 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1835 case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
1836 state1_size = qat_hash_get_state1_size(
1837 ICP_QAT_HW_AUTH_ALGO_KASUMI_F9);
1838 state2_size = ICP_QAT_HW_KASUMI_F9_STATE2_SZ;
1839 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1840 pTempKey = (uint32_t *)(cdesc->cd_cur_ptr + state1_size
1843 * The Inner Hash Initial State2 block must contain IK
1844 * (Initialisation Key), followed by IK XOR-ed with KM
1845 * (Key Modifier): IK||(IK^KM).
1847 /* write the auth key */
1848 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1849 /* initialise temp key with auth key */
1850 memcpy(pTempKey, authkey, authkeylen);
1851 /* XOR Key with KASUMI F9 key modifier at 4 bytes level */
1852 for (wordIndex = 0; wordIndex < (authkeylen >> 2); wordIndex++)
1853 pTempKey[wordIndex] ^= KASUMI_F9_KEY_MODIFIER_4_BYTES;
1856 QAT_LOG(ERR, "Invalid HASH alg %u", cdesc->qat_hash_alg);
1860 /* Request template setup */
1861 qat_sym_session_init_common_hdr(header, qat_proto_flag);
1862 header->service_cmd_id = cdesc->qat_cmd;
1864 /* Auth CD config setup */
1865 hash_cd_ctrl->hash_cfg_offset = hash_offset >> 3;
1866 hash_cd_ctrl->hash_flags = ICP_QAT_FW_AUTH_HDR_FLAG_NO_NESTED;
1867 hash_cd_ctrl->inner_res_sz = digestsize;
1868 hash_cd_ctrl->final_sz = digestsize;
1869 hash_cd_ctrl->inner_state1_sz = state1_size;
1870 auth_param->auth_res_sz = digestsize;
1872 hash_cd_ctrl->inner_state2_sz = state2_size;
1873 hash_cd_ctrl->inner_state2_offset = hash_cd_ctrl->hash_cfg_offset +
1874 ((sizeof(struct icp_qat_hw_auth_setup) +
1875 RTE_ALIGN_CEIL(hash_cd_ctrl->inner_state1_sz, 8))
1878 cdesc->cd_cur_ptr += state1_size + state2_size;
1879 cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1881 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1882 cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1887 int qat_sym_validate_aes_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1890 case ICP_QAT_HW_AES_128_KEY_SZ:
1891 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1893 case ICP_QAT_HW_AES_192_KEY_SZ:
1894 *alg = ICP_QAT_HW_CIPHER_ALGO_AES192;
1896 case ICP_QAT_HW_AES_256_KEY_SZ:
1897 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
1905 int qat_sym_validate_aes_docsisbpi_key(int key_len,
1906 enum icp_qat_hw_cipher_algo *alg)
1909 case ICP_QAT_HW_AES_128_KEY_SZ:
1910 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1918 int qat_sym_validate_snow3g_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1921 case ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ:
1922 *alg = ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2;
1930 int qat_sym_validate_kasumi_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1933 case ICP_QAT_HW_KASUMI_KEY_SZ:
1934 *alg = ICP_QAT_HW_CIPHER_ALGO_KASUMI;
1942 int qat_sym_validate_des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1945 case ICP_QAT_HW_DES_KEY_SZ:
1946 *alg = ICP_QAT_HW_CIPHER_ALGO_DES;
1954 int qat_sym_validate_3des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1957 case QAT_3DES_KEY_SZ_OPT1:
1958 case QAT_3DES_KEY_SZ_OPT2:
1959 case QAT_3DES_KEY_SZ_OPT3:
1960 *alg = ICP_QAT_HW_CIPHER_ALGO_3DES;
1968 int qat_sym_validate_zuc_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1971 case ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ:
1972 *alg = ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3;