1 /* SPDX-License-Identifier: (BSD-3-Clause OR GPL-2.0)
2 * Copyright(c) 2015-2019 Intel Corporation
5 #include <openssl/sha.h> /* Needed to calculate pre-compute values */
6 #include <openssl/aes.h> /* Needed to calculate pre-compute values */
7 #include <openssl/md5.h> /* Needed to calculate pre-compute values */
8 #include <openssl/evp.h> /* Needed for bpi runt block processing */
10 #include <rte_memcpy.h>
11 #include <rte_common.h>
12 #include <rte_spinlock.h>
13 #include <rte_byteorder.h>
15 #include <rte_malloc.h>
16 #include <rte_crypto_sym.h>
19 #include "qat_sym_session.h"
20 #include "qat_sym_pmd.h"
22 /** Frees a context previously created
23 * Depends on openssl libcrypto
26 bpi_cipher_ctx_free(void *bpi_ctx)
29 EVP_CIPHER_CTX_free((EVP_CIPHER_CTX *)bpi_ctx);
32 /** Creates a context in either AES or DES in ECB mode
33 * Depends on openssl libcrypto
36 bpi_cipher_ctx_init(enum rte_crypto_cipher_algorithm cryptodev_algo,
37 enum rte_crypto_cipher_operation direction __rte_unused,
38 const uint8_t *key, uint16_t key_length, void **ctx)
40 const EVP_CIPHER *algo = NULL;
42 *ctx = EVP_CIPHER_CTX_new();
49 if (cryptodev_algo == RTE_CRYPTO_CIPHER_DES_DOCSISBPI)
52 if (key_length == ICP_QAT_HW_AES_128_KEY_SZ)
53 algo = EVP_aes_128_ecb();
55 algo = EVP_aes_256_ecb();
57 /* IV will be ECB encrypted whether direction is encrypt or decrypt*/
58 if (EVP_EncryptInit_ex(*ctx, algo, NULL, key, 0) != 1) {
67 EVP_CIPHER_CTX_free(*ctx);
72 qat_is_cipher_alg_supported(enum rte_crypto_cipher_algorithm algo,
73 struct qat_sym_dev_private *internals)
76 const struct rte_cryptodev_capabilities *capability;
78 while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
79 RTE_CRYPTO_OP_TYPE_UNDEFINED) {
80 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
83 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_CIPHER)
86 if (capability->sym.cipher.algo == algo)
93 qat_is_auth_alg_supported(enum rte_crypto_auth_algorithm algo,
94 struct qat_sym_dev_private *internals)
97 const struct rte_cryptodev_capabilities *capability;
99 while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
100 RTE_CRYPTO_OP_TYPE_UNDEFINED) {
101 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
104 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_AUTH)
107 if (capability->sym.auth.algo == algo)
114 qat_sym_session_clear(struct rte_cryptodev *dev,
115 struct rte_cryptodev_sym_session *sess)
117 uint8_t index = dev->driver_id;
118 void *sess_priv = get_sym_session_private_data(sess, index);
119 struct qat_sym_session *s = (struct qat_sym_session *)sess_priv;
123 bpi_cipher_ctx_free(s->bpi_ctx);
124 memset(s, 0, qat_sym_session_get_private_size(dev));
125 struct rte_mempool *sess_mp = rte_mempool_from_obj(sess_priv);
127 set_sym_session_private_data(sess, index, NULL);
128 rte_mempool_put(sess_mp, sess_priv);
133 qat_get_cmd_id(const struct rte_crypto_sym_xform *xform)
136 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER && xform->next == NULL)
137 return ICP_QAT_FW_LA_CMD_CIPHER;
139 /* Authentication Only */
140 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH && xform->next == NULL)
141 return ICP_QAT_FW_LA_CMD_AUTH;
144 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
145 /* AES-GCM and AES-CCM works with different direction
146 * GCM first encrypts and generate hash where AES-CCM
147 * first generate hash and encrypts. Similar relation
148 * applies to decryption.
150 if (xform->aead.op == RTE_CRYPTO_AEAD_OP_ENCRYPT)
151 if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
152 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
154 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
156 if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
157 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
159 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
162 if (xform->next == NULL)
165 /* Cipher then Authenticate */
166 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
167 xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
168 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
170 /* Authenticate then Cipher */
171 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH &&
172 xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
173 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
178 static struct rte_crypto_auth_xform *
179 qat_get_auth_xform(struct rte_crypto_sym_xform *xform)
182 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH)
191 static struct rte_crypto_cipher_xform *
192 qat_get_cipher_xform(struct rte_crypto_sym_xform *xform)
195 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
196 return &xform->cipher;
205 qat_sym_session_configure_cipher(struct rte_cryptodev *dev,
206 struct rte_crypto_sym_xform *xform,
207 struct qat_sym_session *session)
209 struct qat_sym_dev_private *internals = dev->data->dev_private;
210 struct rte_crypto_cipher_xform *cipher_xform = NULL;
213 /* Get cipher xform from crypto xform chain */
214 cipher_xform = qat_get_cipher_xform(xform);
216 session->cipher_iv.offset = cipher_xform->iv.offset;
217 session->cipher_iv.length = cipher_xform->iv.length;
219 switch (cipher_xform->algo) {
220 case RTE_CRYPTO_CIPHER_AES_CBC:
221 if (qat_sym_validate_aes_key(cipher_xform->key.length,
222 &session->qat_cipher_alg) != 0) {
223 QAT_LOG(ERR, "Invalid AES cipher key size");
227 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
229 case RTE_CRYPTO_CIPHER_AES_CTR:
230 if (qat_sym_validate_aes_key(cipher_xform->key.length,
231 &session->qat_cipher_alg) != 0) {
232 QAT_LOG(ERR, "Invalid AES cipher key size");
236 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
238 case RTE_CRYPTO_CIPHER_SNOW3G_UEA2:
239 if (qat_sym_validate_snow3g_key(cipher_xform->key.length,
240 &session->qat_cipher_alg) != 0) {
241 QAT_LOG(ERR, "Invalid SNOW 3G cipher key size");
245 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
247 case RTE_CRYPTO_CIPHER_NULL:
248 session->qat_cipher_alg = ICP_QAT_HW_CIPHER_ALGO_NULL;
249 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
251 case RTE_CRYPTO_CIPHER_KASUMI_F8:
252 if (qat_sym_validate_kasumi_key(cipher_xform->key.length,
253 &session->qat_cipher_alg) != 0) {
254 QAT_LOG(ERR, "Invalid KASUMI cipher key size");
258 session->qat_mode = ICP_QAT_HW_CIPHER_F8_MODE;
260 case RTE_CRYPTO_CIPHER_3DES_CBC:
261 if (qat_sym_validate_3des_key(cipher_xform->key.length,
262 &session->qat_cipher_alg) != 0) {
263 QAT_LOG(ERR, "Invalid 3DES cipher key size");
267 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
269 case RTE_CRYPTO_CIPHER_DES_CBC:
270 if (qat_sym_validate_des_key(cipher_xform->key.length,
271 &session->qat_cipher_alg) != 0) {
272 QAT_LOG(ERR, "Invalid DES cipher key size");
276 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
278 case RTE_CRYPTO_CIPHER_3DES_CTR:
279 if (qat_sym_validate_3des_key(cipher_xform->key.length,
280 &session->qat_cipher_alg) != 0) {
281 QAT_LOG(ERR, "Invalid 3DES cipher key size");
285 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
287 case RTE_CRYPTO_CIPHER_DES_DOCSISBPI:
288 ret = bpi_cipher_ctx_init(
291 cipher_xform->key.data,
292 cipher_xform->key.length,
295 QAT_LOG(ERR, "failed to create DES BPI ctx");
298 if (qat_sym_validate_des_key(cipher_xform->key.length,
299 &session->qat_cipher_alg) != 0) {
300 QAT_LOG(ERR, "Invalid DES cipher key size");
304 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
306 case RTE_CRYPTO_CIPHER_AES_DOCSISBPI:
307 ret = bpi_cipher_ctx_init(
310 cipher_xform->key.data,
311 cipher_xform->key.length,
314 QAT_LOG(ERR, "failed to create AES BPI ctx");
317 if (qat_sym_validate_aes_docsisbpi_key(cipher_xform->key.length,
318 &session->qat_cipher_alg) != 0) {
319 QAT_LOG(ERR, "Invalid AES DOCSISBPI key size");
323 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
325 case RTE_CRYPTO_CIPHER_ZUC_EEA3:
326 if (!qat_is_cipher_alg_supported(
327 cipher_xform->algo, internals)) {
328 QAT_LOG(ERR, "%s not supported on this device",
329 rte_crypto_cipher_algorithm_strings
330 [cipher_xform->algo]);
334 if (qat_sym_validate_zuc_key(cipher_xform->key.length,
335 &session->qat_cipher_alg) != 0) {
336 QAT_LOG(ERR, "Invalid ZUC cipher key size");
340 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
342 case RTE_CRYPTO_CIPHER_AES_XTS:
343 if ((cipher_xform->key.length/2) == ICP_QAT_HW_AES_192_KEY_SZ) {
344 QAT_LOG(ERR, "AES-XTS-192 not supported");
348 if (qat_sym_validate_aes_key((cipher_xform->key.length/2),
349 &session->qat_cipher_alg) != 0) {
350 QAT_LOG(ERR, "Invalid AES-XTS cipher key size");
354 session->qat_mode = ICP_QAT_HW_CIPHER_XTS_MODE;
356 case RTE_CRYPTO_CIPHER_3DES_ECB:
357 case RTE_CRYPTO_CIPHER_AES_ECB:
358 case RTE_CRYPTO_CIPHER_AES_F8:
359 case RTE_CRYPTO_CIPHER_ARC4:
360 QAT_LOG(ERR, "Crypto QAT PMD: Unsupported Cipher alg %u",
365 QAT_LOG(ERR, "Crypto: Undefined Cipher specified %u\n",
371 if (cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
372 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
374 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
376 if (qat_sym_session_aead_create_cd_cipher(session,
377 cipher_xform->key.data,
378 cipher_xform->key.length)) {
386 if (session->bpi_ctx) {
387 bpi_cipher_ctx_free(session->bpi_ctx);
388 session->bpi_ctx = NULL;
394 qat_sym_session_configure(struct rte_cryptodev *dev,
395 struct rte_crypto_sym_xform *xform,
396 struct rte_cryptodev_sym_session *sess,
397 struct rte_mempool *mempool)
399 void *sess_private_data;
402 if (rte_mempool_get(mempool, &sess_private_data)) {
404 "Couldn't get object from session mempool");
408 ret = qat_sym_session_set_parameters(dev, xform, sess_private_data);
411 "Crypto QAT PMD: failed to configure session parameters");
413 /* Return session to mempool */
414 rte_mempool_put(mempool, sess_private_data);
418 set_sym_session_private_data(sess, dev->driver_id,
425 qat_sym_session_set_ext_hash_flags(struct qat_sym_session *session,
428 struct icp_qat_fw_comn_req_hdr *header = &session->fw_req.comn_hdr;
429 struct icp_qat_fw_cipher_auth_cd_ctrl_hdr *cd_ctrl =
430 (struct icp_qat_fw_cipher_auth_cd_ctrl_hdr *)
431 session->fw_req.cd_ctrl.content_desc_ctrl_lw;
433 /* Set the Use Extended Protocol Flags bit in LW 1 */
434 QAT_FIELD_SET(header->comn_req_flags,
435 QAT_COMN_EXT_FLAGS_USED,
436 QAT_COMN_EXT_FLAGS_BITPOS,
437 QAT_COMN_EXT_FLAGS_MASK);
439 /* Set Hash Flags in LW 28 */
440 cd_ctrl->hash_flags |= hash_flag;
442 /* Set proto flags in LW 1 */
443 switch (session->qat_cipher_alg) {
444 case ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2:
445 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
446 ICP_QAT_FW_LA_SNOW_3G_PROTO);
447 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
448 header->serv_specif_flags, 0);
450 case ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3:
451 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
452 ICP_QAT_FW_LA_NO_PROTO);
453 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
454 header->serv_specif_flags,
455 ICP_QAT_FW_LA_ZUC_3G_PROTO);
458 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
459 ICP_QAT_FW_LA_NO_PROTO);
460 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
461 header->serv_specif_flags, 0);
467 qat_sym_session_handle_mixed(const struct rte_cryptodev *dev,
468 struct qat_sym_session *session)
470 const struct qat_sym_dev_private *qat_private = dev->data->dev_private;
471 enum qat_device_gen min_dev_gen = (qat_private->internal_capabilities &
472 QAT_SYM_CAP_MIXED_CRYPTO) ? QAT_GEN2 : QAT_GEN3;
474 if (session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3 &&
475 session->qat_cipher_alg !=
476 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
477 session->min_qat_dev_gen = min_dev_gen;
478 qat_sym_session_set_ext_hash_flags(session,
479 1 << ICP_QAT_FW_AUTH_HDR_FLAG_ZUC_EIA3_BITPOS);
480 } else if (session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2 &&
481 session->qat_cipher_alg !=
482 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
483 session->min_qat_dev_gen = min_dev_gen;
484 qat_sym_session_set_ext_hash_flags(session,
485 1 << ICP_QAT_FW_AUTH_HDR_FLAG_SNOW3G_UIA2_BITPOS);
486 } else if ((session->aes_cmac ||
487 session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_NULL) &&
488 (session->qat_cipher_alg ==
489 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2 ||
490 session->qat_cipher_alg ==
491 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)) {
492 session->min_qat_dev_gen = min_dev_gen;
493 qat_sym_session_set_ext_hash_flags(session, 0);
498 qat_sym_session_set_parameters(struct rte_cryptodev *dev,
499 struct rte_crypto_sym_xform *xform, void *session_private)
501 struct qat_sym_session *session = session_private;
505 /* Set context descriptor physical address */
506 session->cd_paddr = rte_mempool_virt2iova(session) +
507 offsetof(struct qat_sym_session, cd);
509 session->min_qat_dev_gen = QAT_GEN1;
511 /* Get requested QAT command id */
512 qat_cmd_id = qat_get_cmd_id(xform);
513 if (qat_cmd_id < 0 || qat_cmd_id >= ICP_QAT_FW_LA_CMD_DELIMITER) {
514 QAT_LOG(ERR, "Unsupported xform chain requested");
517 session->qat_cmd = (enum icp_qat_fw_la_cmd_id)qat_cmd_id;
518 switch (session->qat_cmd) {
519 case ICP_QAT_FW_LA_CMD_CIPHER:
520 ret = qat_sym_session_configure_cipher(dev, xform, session);
524 case ICP_QAT_FW_LA_CMD_AUTH:
525 ret = qat_sym_session_configure_auth(dev, xform, session);
529 case ICP_QAT_FW_LA_CMD_CIPHER_HASH:
530 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
531 ret = qat_sym_session_configure_aead(dev, xform,
536 ret = qat_sym_session_configure_cipher(dev,
540 ret = qat_sym_session_configure_auth(dev,
544 /* Special handling of mixed hash+cipher algorithms */
545 qat_sym_session_handle_mixed(dev, session);
548 case ICP_QAT_FW_LA_CMD_HASH_CIPHER:
549 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
550 ret = qat_sym_session_configure_aead(dev, xform,
555 ret = qat_sym_session_configure_auth(dev,
559 ret = qat_sym_session_configure_cipher(dev,
563 /* Special handling of mixed hash+cipher algorithms */
564 qat_sym_session_handle_mixed(dev, session);
567 case ICP_QAT_FW_LA_CMD_TRNG_GET_RANDOM:
568 case ICP_QAT_FW_LA_CMD_TRNG_TEST:
569 case ICP_QAT_FW_LA_CMD_SSL3_KEY_DERIVE:
570 case ICP_QAT_FW_LA_CMD_TLS_V1_1_KEY_DERIVE:
571 case ICP_QAT_FW_LA_CMD_TLS_V1_2_KEY_DERIVE:
572 case ICP_QAT_FW_LA_CMD_MGF1:
573 case ICP_QAT_FW_LA_CMD_AUTH_PRE_COMP:
574 case ICP_QAT_FW_LA_CMD_CIPHER_PRE_COMP:
575 case ICP_QAT_FW_LA_CMD_DELIMITER:
576 QAT_LOG(ERR, "Unsupported Service %u",
580 QAT_LOG(ERR, "Unsupported Service %u",
589 qat_sym_session_handle_single_pass(struct qat_sym_dev_private *internals,
590 struct qat_sym_session *session,
591 struct rte_crypto_aead_xform *aead_xform)
593 enum qat_device_gen qat_dev_gen = internals->qat_dev->qat_dev_gen;
595 if (qat_dev_gen == QAT_GEN3 &&
596 aead_xform->iv.length == QAT_AES_GCM_SPC_IV_SIZE) {
597 /* Use faster Single-Pass GCM */
598 struct icp_qat_fw_la_cipher_req_params *cipher_param =
599 (void *) &session->fw_req.serv_specif_rqpars;
601 session->is_single_pass = 1;
602 session->min_qat_dev_gen = QAT_GEN3;
603 session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER;
604 session->qat_mode = ICP_QAT_HW_CIPHER_AEAD_MODE;
605 session->cipher_iv.offset = aead_xform->iv.offset;
606 session->cipher_iv.length = aead_xform->iv.length;
607 if (qat_sym_session_aead_create_cd_cipher(session,
608 aead_xform->key.data, aead_xform->key.length))
610 session->aad_len = aead_xform->aad_length;
611 session->digest_length = aead_xform->digest_length;
612 if (aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) {
613 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
614 session->auth_op = ICP_QAT_HW_AUTH_GENERATE;
615 ICP_QAT_FW_LA_RET_AUTH_SET(
616 session->fw_req.comn_hdr.serv_specif_flags,
617 ICP_QAT_FW_LA_RET_AUTH_RES);
619 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
620 session->auth_op = ICP_QAT_HW_AUTH_VERIFY;
621 ICP_QAT_FW_LA_CMP_AUTH_SET(
622 session->fw_req.comn_hdr.serv_specif_flags,
623 ICP_QAT_FW_LA_CMP_AUTH_RES);
625 ICP_QAT_FW_LA_SINGLE_PASS_PROTO_FLAG_SET(
626 session->fw_req.comn_hdr.serv_specif_flags,
627 ICP_QAT_FW_LA_SINGLE_PASS_PROTO);
628 ICP_QAT_FW_LA_PROTO_SET(
629 session->fw_req.comn_hdr.serv_specif_flags,
630 ICP_QAT_FW_LA_NO_PROTO);
631 ICP_QAT_FW_LA_GCM_IV_LEN_FLAG_SET(
632 session->fw_req.comn_hdr.serv_specif_flags,
633 ICP_QAT_FW_LA_GCM_IV_LEN_12_OCTETS);
634 session->fw_req.comn_hdr.service_cmd_id =
635 ICP_QAT_FW_LA_CMD_CIPHER;
636 session->cd.cipher.cipher_config.val =
637 ICP_QAT_HW_CIPHER_CONFIG_BUILD(
638 ICP_QAT_HW_CIPHER_AEAD_MODE,
639 session->qat_cipher_alg,
640 ICP_QAT_HW_CIPHER_NO_CONVERT,
642 QAT_FIELD_SET(session->cd.cipher.cipher_config.val,
643 aead_xform->digest_length,
644 QAT_CIPHER_AEAD_HASH_CMP_LEN_BITPOS,
645 QAT_CIPHER_AEAD_HASH_CMP_LEN_MASK);
646 session->cd.cipher.cipher_config.reserved =
647 ICP_QAT_HW_CIPHER_CONFIG_BUILD_UPPER(
648 aead_xform->aad_length);
649 cipher_param->spc_aad_sz = aead_xform->aad_length;
650 cipher_param->spc_auth_res_sz = aead_xform->digest_length;
656 qat_sym_session_configure_auth(struct rte_cryptodev *dev,
657 struct rte_crypto_sym_xform *xform,
658 struct qat_sym_session *session)
660 struct rte_crypto_auth_xform *auth_xform = qat_get_auth_xform(xform);
661 struct qat_sym_dev_private *internals = dev->data->dev_private;
662 const uint8_t *key_data = auth_xform->key.data;
663 uint8_t key_length = auth_xform->key.length;
664 session->aes_cmac = 0;
666 session->auth_iv.offset = auth_xform->iv.offset;
667 session->auth_iv.length = auth_xform->iv.length;
669 switch (auth_xform->algo) {
670 case RTE_CRYPTO_AUTH_SHA1_HMAC:
671 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA1;
673 case RTE_CRYPTO_AUTH_SHA224_HMAC:
674 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA224;
676 case RTE_CRYPTO_AUTH_SHA256_HMAC:
677 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA256;
679 case RTE_CRYPTO_AUTH_SHA384_HMAC:
680 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA384;
682 case RTE_CRYPTO_AUTH_SHA512_HMAC:
683 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA512;
685 case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
686 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
688 case RTE_CRYPTO_AUTH_AES_CMAC:
689 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
690 session->aes_cmac = 1;
692 case RTE_CRYPTO_AUTH_AES_GMAC:
693 if (qat_sym_validate_aes_key(auth_xform->key.length,
694 &session->qat_cipher_alg) != 0) {
695 QAT_LOG(ERR, "Invalid AES key size");
698 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
699 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
700 if (session->auth_iv.length == 0)
701 session->auth_iv.length = AES_GCM_J0_LEN;
704 case RTE_CRYPTO_AUTH_SNOW3G_UIA2:
705 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2;
707 case RTE_CRYPTO_AUTH_MD5_HMAC:
708 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_MD5;
710 case RTE_CRYPTO_AUTH_NULL:
711 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_NULL;
713 case RTE_CRYPTO_AUTH_KASUMI_F9:
714 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_KASUMI_F9;
716 case RTE_CRYPTO_AUTH_ZUC_EIA3:
717 if (!qat_is_auth_alg_supported(auth_xform->algo, internals)) {
718 QAT_LOG(ERR, "%s not supported on this device",
719 rte_crypto_auth_algorithm_strings
723 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3;
725 case RTE_CRYPTO_AUTH_SHA1:
726 case RTE_CRYPTO_AUTH_SHA256:
727 case RTE_CRYPTO_AUTH_SHA512:
728 case RTE_CRYPTO_AUTH_SHA224:
729 case RTE_CRYPTO_AUTH_SHA384:
730 case RTE_CRYPTO_AUTH_MD5:
731 case RTE_CRYPTO_AUTH_AES_CBC_MAC:
732 QAT_LOG(ERR, "Crypto: Unsupported hash alg %u",
736 QAT_LOG(ERR, "Crypto: Undefined Hash algo %u specified",
741 if (auth_xform->algo == RTE_CRYPTO_AUTH_AES_GMAC) {
742 if (auth_xform->op == RTE_CRYPTO_AUTH_OP_GENERATE) {
743 session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER_HASH;
744 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
746 * It needs to create cipher desc content first,
747 * then authentication
750 if (qat_sym_session_aead_create_cd_cipher(session,
751 auth_xform->key.data,
752 auth_xform->key.length))
755 if (qat_sym_session_aead_create_cd_auth(session,
759 auth_xform->digest_length,
763 session->qat_cmd = ICP_QAT_FW_LA_CMD_HASH_CIPHER;
764 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
766 * It needs to create authentication desc content first,
770 if (qat_sym_session_aead_create_cd_auth(session,
774 auth_xform->digest_length,
778 if (qat_sym_session_aead_create_cd_cipher(session,
779 auth_xform->key.data,
780 auth_xform->key.length))
783 /* Restore to authentication only only */
784 session->qat_cmd = ICP_QAT_FW_LA_CMD_AUTH;
786 if (qat_sym_session_aead_create_cd_auth(session,
790 auth_xform->digest_length,
795 session->digest_length = auth_xform->digest_length;
800 qat_sym_session_configure_aead(struct rte_cryptodev *dev,
801 struct rte_crypto_sym_xform *xform,
802 struct qat_sym_session *session)
804 struct rte_crypto_aead_xform *aead_xform = &xform->aead;
805 enum rte_crypto_auth_operation crypto_operation;
808 * Store AEAD IV parameters as cipher IV,
809 * to avoid unnecessary memory usage
811 session->cipher_iv.offset = xform->aead.iv.offset;
812 session->cipher_iv.length = xform->aead.iv.length;
814 switch (aead_xform->algo) {
815 case RTE_CRYPTO_AEAD_AES_GCM:
816 if (qat_sym_validate_aes_key(aead_xform->key.length,
817 &session->qat_cipher_alg) != 0) {
818 QAT_LOG(ERR, "Invalid AES key size");
821 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
822 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
823 if (session->cipher_iv.length == 0)
824 session->cipher_iv.length = AES_GCM_J0_LEN;
827 case RTE_CRYPTO_AEAD_AES_CCM:
828 if (qat_sym_validate_aes_key(aead_xform->key.length,
829 &session->qat_cipher_alg) != 0) {
830 QAT_LOG(ERR, "Invalid AES key size");
833 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
834 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC;
837 QAT_LOG(ERR, "Crypto: Undefined AEAD specified %u\n",
842 session->is_single_pass = 0;
843 if (aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) {
844 /* Use faster Single-Pass GCM if possible */
845 int res = qat_sym_session_handle_single_pass(
846 dev->data->dev_private, session, aead_xform);
849 if (session->is_single_pass)
853 if ((aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT &&
854 aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) ||
855 (aead_xform->op == RTE_CRYPTO_AEAD_OP_DECRYPT &&
856 aead_xform->algo == RTE_CRYPTO_AEAD_AES_CCM)) {
857 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
859 * It needs to create cipher desc content first,
860 * then authentication
862 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
863 RTE_CRYPTO_AUTH_OP_GENERATE : RTE_CRYPTO_AUTH_OP_VERIFY;
865 if (qat_sym_session_aead_create_cd_cipher(session,
866 aead_xform->key.data,
867 aead_xform->key.length))
870 if (qat_sym_session_aead_create_cd_auth(session,
871 aead_xform->key.data,
872 aead_xform->key.length,
873 aead_xform->aad_length,
874 aead_xform->digest_length,
878 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
880 * It needs to create authentication desc content first,
884 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
885 RTE_CRYPTO_AUTH_OP_VERIFY : RTE_CRYPTO_AUTH_OP_GENERATE;
887 if (qat_sym_session_aead_create_cd_auth(session,
888 aead_xform->key.data,
889 aead_xform->key.length,
890 aead_xform->aad_length,
891 aead_xform->digest_length,
895 if (qat_sym_session_aead_create_cd_cipher(session,
896 aead_xform->key.data,
897 aead_xform->key.length))
901 session->digest_length = aead_xform->digest_length;
905 unsigned int qat_sym_session_get_private_size(
906 struct rte_cryptodev *dev __rte_unused)
908 return RTE_ALIGN_CEIL(sizeof(struct qat_sym_session), 8);
911 /* returns block size in bytes per cipher algo */
912 int qat_cipher_get_block_size(enum icp_qat_hw_cipher_algo qat_cipher_alg)
914 switch (qat_cipher_alg) {
915 case ICP_QAT_HW_CIPHER_ALGO_DES:
916 return ICP_QAT_HW_DES_BLK_SZ;
917 case ICP_QAT_HW_CIPHER_ALGO_3DES:
918 return ICP_QAT_HW_3DES_BLK_SZ;
919 case ICP_QAT_HW_CIPHER_ALGO_AES128:
920 case ICP_QAT_HW_CIPHER_ALGO_AES192:
921 case ICP_QAT_HW_CIPHER_ALGO_AES256:
922 return ICP_QAT_HW_AES_BLK_SZ;
924 QAT_LOG(ERR, "invalid block cipher alg %u", qat_cipher_alg);
931 * Returns size in bytes per hash algo for state1 size field in cd_ctrl
932 * This is digest size rounded up to nearest quadword
934 static int qat_hash_get_state1_size(enum icp_qat_hw_auth_algo qat_hash_alg)
936 switch (qat_hash_alg) {
937 case ICP_QAT_HW_AUTH_ALGO_SHA1:
938 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA1_STATE1_SZ,
939 QAT_HW_DEFAULT_ALIGNMENT);
940 case ICP_QAT_HW_AUTH_ALGO_SHA224:
941 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA224_STATE1_SZ,
942 QAT_HW_DEFAULT_ALIGNMENT);
943 case ICP_QAT_HW_AUTH_ALGO_SHA256:
944 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA256_STATE1_SZ,
945 QAT_HW_DEFAULT_ALIGNMENT);
946 case ICP_QAT_HW_AUTH_ALGO_SHA384:
947 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA384_STATE1_SZ,
948 QAT_HW_DEFAULT_ALIGNMENT);
949 case ICP_QAT_HW_AUTH_ALGO_SHA512:
950 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
951 QAT_HW_DEFAULT_ALIGNMENT);
952 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
953 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ,
954 QAT_HW_DEFAULT_ALIGNMENT);
955 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
956 case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
957 return QAT_HW_ROUND_UP(ICP_QAT_HW_GALOIS_128_STATE1_SZ,
958 QAT_HW_DEFAULT_ALIGNMENT);
959 case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
960 return QAT_HW_ROUND_UP(ICP_QAT_HW_ZUC_3G_EIA3_STATE1_SZ,
961 QAT_HW_DEFAULT_ALIGNMENT);
962 case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
963 return QAT_HW_ROUND_UP(ICP_QAT_HW_SNOW_3G_UIA2_STATE1_SZ,
964 QAT_HW_DEFAULT_ALIGNMENT);
965 case ICP_QAT_HW_AUTH_ALGO_MD5:
966 return QAT_HW_ROUND_UP(ICP_QAT_HW_MD5_STATE1_SZ,
967 QAT_HW_DEFAULT_ALIGNMENT);
968 case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
969 return QAT_HW_ROUND_UP(ICP_QAT_HW_KASUMI_F9_STATE1_SZ,
970 QAT_HW_DEFAULT_ALIGNMENT);
971 case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
972 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_CBC_MAC_STATE1_SZ,
973 QAT_HW_DEFAULT_ALIGNMENT);
974 case ICP_QAT_HW_AUTH_ALGO_NULL:
975 return QAT_HW_ROUND_UP(ICP_QAT_HW_NULL_STATE1_SZ,
976 QAT_HW_DEFAULT_ALIGNMENT);
977 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
978 /* return maximum state1 size in this case */
979 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
980 QAT_HW_DEFAULT_ALIGNMENT);
982 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
988 /* returns digest size in bytes per hash algo */
989 static int qat_hash_get_digest_size(enum icp_qat_hw_auth_algo qat_hash_alg)
991 switch (qat_hash_alg) {
992 case ICP_QAT_HW_AUTH_ALGO_SHA1:
993 return ICP_QAT_HW_SHA1_STATE1_SZ;
994 case ICP_QAT_HW_AUTH_ALGO_SHA224:
995 return ICP_QAT_HW_SHA224_STATE1_SZ;
996 case ICP_QAT_HW_AUTH_ALGO_SHA256:
997 return ICP_QAT_HW_SHA256_STATE1_SZ;
998 case ICP_QAT_HW_AUTH_ALGO_SHA384:
999 return ICP_QAT_HW_SHA384_STATE1_SZ;
1000 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1001 return ICP_QAT_HW_SHA512_STATE1_SZ;
1002 case ICP_QAT_HW_AUTH_ALGO_MD5:
1003 return ICP_QAT_HW_MD5_STATE1_SZ;
1004 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1005 return ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1006 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
1007 /* return maximum digest size in this case */
1008 return ICP_QAT_HW_SHA512_STATE1_SZ;
1010 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
1016 /* returns block size in byes per hash algo */
1017 static int qat_hash_get_block_size(enum icp_qat_hw_auth_algo qat_hash_alg)
1019 switch (qat_hash_alg) {
1020 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1022 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1023 return SHA256_CBLOCK;
1024 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1025 return SHA256_CBLOCK;
1026 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1027 return SHA512_CBLOCK;
1028 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1029 return SHA512_CBLOCK;
1030 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1032 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1033 return ICP_QAT_HW_AES_BLK_SZ;
1034 case ICP_QAT_HW_AUTH_ALGO_MD5:
1036 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
1037 /* return maximum block size in this case */
1038 return SHA512_CBLOCK;
1040 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
1046 static int partial_hash_sha1(uint8_t *data_in, uint8_t *data_out)
1050 if (!SHA1_Init(&ctx))
1052 SHA1_Transform(&ctx, data_in);
1053 rte_memcpy(data_out, &ctx, SHA_DIGEST_LENGTH);
1057 static int partial_hash_sha224(uint8_t *data_in, uint8_t *data_out)
1061 if (!SHA224_Init(&ctx))
1063 SHA256_Transform(&ctx, data_in);
1064 rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
1068 static int partial_hash_sha256(uint8_t *data_in, uint8_t *data_out)
1072 if (!SHA256_Init(&ctx))
1074 SHA256_Transform(&ctx, data_in);
1075 rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
1079 static int partial_hash_sha384(uint8_t *data_in, uint8_t *data_out)
1083 if (!SHA384_Init(&ctx))
1085 SHA512_Transform(&ctx, data_in);
1086 rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
1090 static int partial_hash_sha512(uint8_t *data_in, uint8_t *data_out)
1094 if (!SHA512_Init(&ctx))
1096 SHA512_Transform(&ctx, data_in);
1097 rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
1101 static int partial_hash_md5(uint8_t *data_in, uint8_t *data_out)
1105 if (!MD5_Init(&ctx))
1107 MD5_Transform(&ctx, data_in);
1108 rte_memcpy(data_out, &ctx, MD5_DIGEST_LENGTH);
1113 static int partial_hash_compute(enum icp_qat_hw_auth_algo hash_alg,
1118 uint8_t digest[qat_hash_get_digest_size(
1119 ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1120 uint32_t *hash_state_out_be32;
1121 uint64_t *hash_state_out_be64;
1124 digest_size = qat_hash_get_digest_size(hash_alg);
1125 if (digest_size <= 0)
1128 hash_state_out_be32 = (uint32_t *)data_out;
1129 hash_state_out_be64 = (uint64_t *)data_out;
1132 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1133 if (partial_hash_sha1(data_in, digest))
1135 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1136 *hash_state_out_be32 =
1137 rte_bswap32(*(((uint32_t *)digest)+i));
1139 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1140 if (partial_hash_sha224(data_in, digest))
1142 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1143 *hash_state_out_be32 =
1144 rte_bswap32(*(((uint32_t *)digest)+i));
1146 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1147 if (partial_hash_sha256(data_in, digest))
1149 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1150 *hash_state_out_be32 =
1151 rte_bswap32(*(((uint32_t *)digest)+i));
1153 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1154 if (partial_hash_sha384(data_in, digest))
1156 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
1157 *hash_state_out_be64 =
1158 rte_bswap64(*(((uint64_t *)digest)+i));
1160 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1161 if (partial_hash_sha512(data_in, digest))
1163 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
1164 *hash_state_out_be64 =
1165 rte_bswap64(*(((uint64_t *)digest)+i));
1167 case ICP_QAT_HW_AUTH_ALGO_MD5:
1168 if (partial_hash_md5(data_in, data_out))
1172 QAT_LOG(ERR, "invalid hash alg %u", hash_alg);
1178 #define HMAC_IPAD_VALUE 0x36
1179 #define HMAC_OPAD_VALUE 0x5c
1180 #define HASH_XCBC_PRECOMP_KEY_NUM 3
1182 static const uint8_t AES_CMAC_SEED[ICP_QAT_HW_AES_128_KEY_SZ];
1184 static void aes_cmac_key_derive(uint8_t *base, uint8_t *derived)
1188 derived[0] = base[0] << 1;
1189 for (i = 1; i < ICP_QAT_HW_AES_BLK_SZ ; i++) {
1190 derived[i] = base[i] << 1;
1191 derived[i - 1] |= base[i] >> 7;
1195 derived[ICP_QAT_HW_AES_BLK_SZ - 1] ^= QAT_AES_CMAC_CONST_RB;
1198 static int qat_sym_do_precomputes(enum icp_qat_hw_auth_algo hash_alg,
1199 const uint8_t *auth_key,
1200 uint16_t auth_keylen,
1201 uint8_t *p_state_buf,
1202 uint16_t *p_state_len,
1206 uint8_t ipad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1207 uint8_t opad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1210 if (hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC) {
1216 uint8_t k0[ICP_QAT_HW_AES_128_KEY_SZ];
1219 auth_keylen = ICP_QAT_HW_AES_128_KEY_SZ;
1221 in = rte_zmalloc("AES CMAC K1",
1222 ICP_QAT_HW_AES_128_KEY_SZ, 16);
1225 QAT_LOG(ERR, "Failed to alloc memory");
1229 rte_memcpy(in, AES_CMAC_SEED,
1230 ICP_QAT_HW_AES_128_KEY_SZ);
1231 rte_memcpy(p_state_buf, auth_key, auth_keylen);
1233 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1239 AES_encrypt(in, k0, &enc_key);
1241 k1 = p_state_buf + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1242 k2 = k1 + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1244 aes_cmac_key_derive(k0, k1);
1245 aes_cmac_key_derive(k1, k2);
1247 memset(k0, 0, ICP_QAT_HW_AES_128_KEY_SZ);
1248 *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1252 static uint8_t qat_aes_xcbc_key_seed[
1253 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ] = {
1254 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1255 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1256 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1257 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1258 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1259 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1263 uint8_t *out = p_state_buf;
1267 in = rte_zmalloc("working mem for key",
1268 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ, 16);
1270 QAT_LOG(ERR, "Failed to alloc memory");
1274 rte_memcpy(in, qat_aes_xcbc_key_seed,
1275 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1276 for (x = 0; x < HASH_XCBC_PRECOMP_KEY_NUM; x++) {
1277 if (AES_set_encrypt_key(auth_key,
1281 (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ));
1283 (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ),
1284 0, ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1287 AES_encrypt(in, out, &enc_key);
1288 in += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1289 out += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1291 *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1292 rte_free(in - x*ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ);
1296 } else if ((hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_128) ||
1297 (hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_64)) {
1299 uint8_t *out = p_state_buf;
1302 memset(p_state_buf, 0, ICP_QAT_HW_GALOIS_H_SZ +
1303 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1304 ICP_QAT_HW_GALOIS_E_CTR0_SZ);
1305 in = rte_zmalloc("working mem for key",
1306 ICP_QAT_HW_GALOIS_H_SZ, 16);
1308 QAT_LOG(ERR, "Failed to alloc memory");
1312 memset(in, 0, ICP_QAT_HW_GALOIS_H_SZ);
1313 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1317 AES_encrypt(in, out, &enc_key);
1318 *p_state_len = ICP_QAT_HW_GALOIS_H_SZ +
1319 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1320 ICP_QAT_HW_GALOIS_E_CTR0_SZ;
1325 block_size = qat_hash_get_block_size(hash_alg);
1328 /* init ipad and opad from key and xor with fixed values */
1329 memset(ipad, 0, block_size);
1330 memset(opad, 0, block_size);
1332 if (auth_keylen > (unsigned int)block_size) {
1333 QAT_LOG(ERR, "invalid keylen %u", auth_keylen);
1336 rte_memcpy(ipad, auth_key, auth_keylen);
1337 rte_memcpy(opad, auth_key, auth_keylen);
1339 for (i = 0; i < block_size; i++) {
1340 uint8_t *ipad_ptr = ipad + i;
1341 uint8_t *opad_ptr = opad + i;
1342 *ipad_ptr ^= HMAC_IPAD_VALUE;
1343 *opad_ptr ^= HMAC_OPAD_VALUE;
1346 /* do partial hash of ipad and copy to state1 */
1347 if (partial_hash_compute(hash_alg, ipad, p_state_buf)) {
1348 memset(ipad, 0, block_size);
1349 memset(opad, 0, block_size);
1350 QAT_LOG(ERR, "ipad precompute failed");
1355 * State len is a multiple of 8, so may be larger than the digest.
1356 * Put the partial hash of opad state_len bytes after state1
1358 *p_state_len = qat_hash_get_state1_size(hash_alg);
1359 if (partial_hash_compute(hash_alg, opad, p_state_buf + *p_state_len)) {
1360 memset(ipad, 0, block_size);
1361 memset(opad, 0, block_size);
1362 QAT_LOG(ERR, "opad precompute failed");
1366 /* don't leave data lying around */
1367 memset(ipad, 0, block_size);
1368 memset(opad, 0, block_size);
1373 qat_sym_session_init_common_hdr(struct icp_qat_fw_comn_req_hdr *header,
1374 enum qat_sym_proto_flag proto_flags)
1377 ICP_QAT_FW_COMN_HDR_FLAGS_BUILD(ICP_QAT_FW_COMN_REQ_FLAG_SET);
1378 header->service_type = ICP_QAT_FW_COMN_REQ_CPM_FW_LA;
1379 header->comn_req_flags =
1380 ICP_QAT_FW_COMN_FLAGS_BUILD(QAT_COMN_CD_FLD_TYPE_64BIT_ADR,
1381 QAT_COMN_PTR_TYPE_FLAT);
1382 ICP_QAT_FW_LA_PARTIAL_SET(header->serv_specif_flags,
1383 ICP_QAT_FW_LA_PARTIAL_NONE);
1384 ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(header->serv_specif_flags,
1385 ICP_QAT_FW_CIPH_IV_16BYTE_DATA);
1387 switch (proto_flags) {
1388 case QAT_CRYPTO_PROTO_FLAG_NONE:
1389 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1390 ICP_QAT_FW_LA_NO_PROTO);
1392 case QAT_CRYPTO_PROTO_FLAG_CCM:
1393 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1394 ICP_QAT_FW_LA_CCM_PROTO);
1396 case QAT_CRYPTO_PROTO_FLAG_GCM:
1397 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1398 ICP_QAT_FW_LA_GCM_PROTO);
1400 case QAT_CRYPTO_PROTO_FLAG_SNOW3G:
1401 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1402 ICP_QAT_FW_LA_SNOW_3G_PROTO);
1404 case QAT_CRYPTO_PROTO_FLAG_ZUC:
1405 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(header->serv_specif_flags,
1406 ICP_QAT_FW_LA_ZUC_3G_PROTO);
1410 ICP_QAT_FW_LA_UPDATE_STATE_SET(header->serv_specif_flags,
1411 ICP_QAT_FW_LA_NO_UPDATE_STATE);
1412 ICP_QAT_FW_LA_DIGEST_IN_BUFFER_SET(header->serv_specif_flags,
1413 ICP_QAT_FW_LA_NO_DIGEST_IN_BUFFER);
1417 * Snow3G and ZUC should never use this function
1418 * and set its protocol flag in both cipher and auth part of content
1419 * descriptor building function
1421 static enum qat_sym_proto_flag
1422 qat_get_crypto_proto_flag(uint16_t flags)
1424 int proto = ICP_QAT_FW_LA_PROTO_GET(flags);
1425 enum qat_sym_proto_flag qat_proto_flag =
1426 QAT_CRYPTO_PROTO_FLAG_NONE;
1429 case ICP_QAT_FW_LA_GCM_PROTO:
1430 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1432 case ICP_QAT_FW_LA_CCM_PROTO:
1433 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1437 return qat_proto_flag;
1440 int qat_sym_session_aead_create_cd_cipher(struct qat_sym_session *cdesc,
1441 const uint8_t *cipherkey,
1442 uint32_t cipherkeylen)
1444 struct icp_qat_hw_cipher_algo_blk *cipher;
1445 struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1446 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1447 struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1448 void *ptr = &req_tmpl->cd_ctrl;
1449 struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1450 struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1451 enum icp_qat_hw_cipher_convert key_convert;
1452 enum qat_sym_proto_flag qat_proto_flag =
1453 QAT_CRYPTO_PROTO_FLAG_NONE;
1454 uint32_t total_key_size;
1455 uint16_t cipher_offset, cd_size;
1456 uint32_t wordIndex = 0;
1457 uint32_t *temp_key = NULL;
1459 if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER) {
1460 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1461 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1462 ICP_QAT_FW_SLICE_CIPHER);
1463 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1464 ICP_QAT_FW_SLICE_DRAM_WR);
1465 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1466 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1467 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1468 ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1469 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1470 } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1471 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1472 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1473 ICP_QAT_FW_SLICE_CIPHER);
1474 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1475 ICP_QAT_FW_SLICE_AUTH);
1476 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1477 ICP_QAT_FW_SLICE_AUTH);
1478 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1479 ICP_QAT_FW_SLICE_DRAM_WR);
1480 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1481 } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1482 QAT_LOG(ERR, "Invalid param, must be a cipher command.");
1486 if (cdesc->qat_mode == ICP_QAT_HW_CIPHER_CTR_MODE) {
1488 * CTR Streaming ciphers are a special case. Decrypt = encrypt
1489 * Overriding default values previously set
1491 cdesc->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
1492 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1493 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2
1494 || cdesc->qat_cipher_alg ==
1495 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)
1496 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1497 else if (cdesc->qat_dir == ICP_QAT_HW_CIPHER_ENCRYPT)
1498 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1500 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1502 if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
1503 total_key_size = ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ +
1504 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1505 cipher_cd_ctrl->cipher_state_sz =
1506 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1507 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1509 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1510 total_key_size = ICP_QAT_HW_KASUMI_F8_KEY_SZ;
1511 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_KASUMI_BLK_SZ >> 3;
1512 cipher_cd_ctrl->cipher_padding_sz =
1513 (2 * ICP_QAT_HW_KASUMI_BLK_SZ) >> 3;
1514 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES) {
1515 total_key_size = ICP_QAT_HW_3DES_KEY_SZ;
1516 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_3DES_BLK_SZ >> 3;
1518 qat_get_crypto_proto_flag(header->serv_specif_flags);
1519 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_DES) {
1520 total_key_size = ICP_QAT_HW_DES_KEY_SZ;
1521 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_DES_BLK_SZ >> 3;
1523 qat_get_crypto_proto_flag(header->serv_specif_flags);
1524 } else if (cdesc->qat_cipher_alg ==
1525 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
1526 total_key_size = ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ +
1527 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1528 cipher_cd_ctrl->cipher_state_sz =
1529 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1530 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1531 cdesc->min_qat_dev_gen = QAT_GEN2;
1533 total_key_size = cipherkeylen;
1534 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_AES_BLK_SZ >> 3;
1536 qat_get_crypto_proto_flag(header->serv_specif_flags);
1538 cipher_cd_ctrl->cipher_key_sz = total_key_size >> 3;
1539 cipher_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1540 cipher_cd_ctrl->cipher_cfg_offset = cipher_offset >> 3;
1542 header->service_cmd_id = cdesc->qat_cmd;
1543 qat_sym_session_init_common_hdr(header, qat_proto_flag);
1545 cipher = (struct icp_qat_hw_cipher_algo_blk *)cdesc->cd_cur_ptr;
1546 cipher->cipher_config.val =
1547 ICP_QAT_HW_CIPHER_CONFIG_BUILD(cdesc->qat_mode,
1548 cdesc->qat_cipher_alg, key_convert,
1551 if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1552 temp_key = (uint32_t *)(cdesc->cd_cur_ptr +
1553 sizeof(struct icp_qat_hw_cipher_config)
1555 memcpy(cipher->key, cipherkey, cipherkeylen);
1556 memcpy(temp_key, cipherkey, cipherkeylen);
1558 /* XOR Key with KASUMI F8 key modifier at 4 bytes level */
1559 for (wordIndex = 0; wordIndex < (cipherkeylen >> 2);
1561 temp_key[wordIndex] ^= KASUMI_F8_KEY_MODIFIER_4_BYTES;
1563 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1564 cipherkeylen + cipherkeylen;
1566 memcpy(cipher->key, cipherkey, cipherkeylen);
1567 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1571 if (total_key_size > cipherkeylen) {
1572 uint32_t padding_size = total_key_size-cipherkeylen;
1573 if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1574 && (cipherkeylen == QAT_3DES_KEY_SZ_OPT2)) {
1575 /* K3 not provided so use K1 = K3*/
1576 memcpy(cdesc->cd_cur_ptr, cipherkey, padding_size);
1577 } else if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1578 && (cipherkeylen == QAT_3DES_KEY_SZ_OPT3)) {
1579 /* K2 and K3 not provided so use K1 = K2 = K3*/
1580 memcpy(cdesc->cd_cur_ptr, cipherkey,
1582 memcpy(cdesc->cd_cur_ptr+cipherkeylen,
1583 cipherkey, cipherkeylen);
1585 memset(cdesc->cd_cur_ptr, 0, padding_size);
1587 cdesc->cd_cur_ptr += padding_size;
1589 cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1590 cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1595 int qat_sym_session_aead_create_cd_auth(struct qat_sym_session *cdesc,
1596 const uint8_t *authkey,
1597 uint32_t authkeylen,
1598 uint32_t aad_length,
1599 uint32_t digestsize,
1600 unsigned int operation)
1602 struct icp_qat_hw_auth_setup *hash;
1603 struct icp_qat_hw_cipher_algo_blk *cipherconfig;
1604 struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1605 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1606 struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1607 void *ptr = &req_tmpl->cd_ctrl;
1608 struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1609 struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1610 struct icp_qat_fw_la_auth_req_params *auth_param =
1611 (struct icp_qat_fw_la_auth_req_params *)
1612 ((char *)&req_tmpl->serv_specif_rqpars +
1613 ICP_QAT_FW_HASH_REQUEST_PARAMETERS_OFFSET);
1614 uint16_t state1_size = 0, state2_size = 0;
1615 uint16_t hash_offset, cd_size;
1616 uint32_t *aad_len = NULL;
1617 uint32_t wordIndex = 0;
1619 enum qat_sym_proto_flag qat_proto_flag =
1620 QAT_CRYPTO_PROTO_FLAG_NONE;
1622 if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_AUTH) {
1623 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1624 ICP_QAT_FW_SLICE_AUTH);
1625 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1626 ICP_QAT_FW_SLICE_DRAM_WR);
1627 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1628 } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1629 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1630 ICP_QAT_FW_SLICE_AUTH);
1631 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1632 ICP_QAT_FW_SLICE_CIPHER);
1633 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1634 ICP_QAT_FW_SLICE_CIPHER);
1635 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1636 ICP_QAT_FW_SLICE_DRAM_WR);
1637 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1638 } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1639 QAT_LOG(ERR, "Invalid param, must be a hash command.");
1643 if (operation == RTE_CRYPTO_AUTH_OP_VERIFY) {
1644 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1645 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1646 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1647 ICP_QAT_FW_LA_CMP_AUTH_RES);
1648 cdesc->auth_op = ICP_QAT_HW_AUTH_VERIFY;
1650 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1651 ICP_QAT_FW_LA_RET_AUTH_RES);
1652 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1653 ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1654 cdesc->auth_op = ICP_QAT_HW_AUTH_GENERATE;
1658 * Setup the inner hash config
1660 hash_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1661 hash = (struct icp_qat_hw_auth_setup *)cdesc->cd_cur_ptr;
1662 hash->auth_config.reserved = 0;
1663 hash->auth_config.config =
1664 ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE1,
1665 cdesc->qat_hash_alg, digestsize);
1667 if (cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2
1668 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_KASUMI_F9
1669 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3
1670 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC
1671 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC
1672 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_NULL
1674 hash->auth_counter.counter = 0;
1676 int block_size = qat_hash_get_block_size(cdesc->qat_hash_alg);
1680 hash->auth_counter.counter = rte_bswap32(block_size);
1683 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_auth_setup);
1686 * cd_cur_ptr now points at the state1 information.
1688 switch (cdesc->qat_hash_alg) {
1689 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1690 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA1, authkey,
1691 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1693 QAT_LOG(ERR, "(SHA)precompute failed");
1696 state2_size = RTE_ALIGN_CEIL(ICP_QAT_HW_SHA1_STATE2_SZ, 8);
1698 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1699 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA224, authkey,
1700 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1702 QAT_LOG(ERR, "(SHA)precompute failed");
1705 state2_size = ICP_QAT_HW_SHA224_STATE2_SZ;
1707 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1708 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA256, authkey,
1709 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1711 QAT_LOG(ERR, "(SHA)precompute failed");
1714 state2_size = ICP_QAT_HW_SHA256_STATE2_SZ;
1716 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1717 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA384, authkey,
1718 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1720 QAT_LOG(ERR, "(SHA)precompute failed");
1723 state2_size = ICP_QAT_HW_SHA384_STATE2_SZ;
1725 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1726 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA512, authkey,
1727 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1729 QAT_LOG(ERR, "(SHA)precompute failed");
1732 state2_size = ICP_QAT_HW_SHA512_STATE2_SZ;
1734 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1735 state1_size = ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1737 if (cdesc->aes_cmac)
1738 memset(cdesc->cd_cur_ptr, 0, state1_size);
1739 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC,
1740 authkey, authkeylen, cdesc->cd_cur_ptr + state1_size,
1741 &state2_size, cdesc->aes_cmac)) {
1742 cdesc->aes_cmac ? QAT_LOG(ERR,
1743 "(CMAC)precompute failed")
1745 "(XCBC)precompute failed");
1749 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1750 case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
1751 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1752 state1_size = ICP_QAT_HW_GALOIS_128_STATE1_SZ;
1753 if (qat_sym_do_precomputes(cdesc->qat_hash_alg, authkey,
1754 authkeylen, cdesc->cd_cur_ptr + state1_size,
1755 &state2_size, cdesc->aes_cmac)) {
1756 QAT_LOG(ERR, "(GCM)precompute failed");
1760 * Write (the length of AAD) into bytes 16-19 of state2
1761 * in big-endian format. This field is 8 bytes
1763 auth_param->u2.aad_sz =
1764 RTE_ALIGN_CEIL(aad_length, 16);
1765 auth_param->hash_state_sz = (auth_param->u2.aad_sz) >> 3;
1767 aad_len = (uint32_t *)(cdesc->cd_cur_ptr +
1768 ICP_QAT_HW_GALOIS_128_STATE1_SZ +
1769 ICP_QAT_HW_GALOIS_H_SZ);
1770 *aad_len = rte_bswap32(aad_length);
1771 cdesc->aad_len = aad_length;
1773 case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
1774 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1775 state1_size = qat_hash_get_state1_size(
1776 ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2);
1777 state2_size = ICP_QAT_HW_SNOW_3G_UIA2_STATE2_SZ;
1778 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1780 cipherconfig = (struct icp_qat_hw_cipher_algo_blk *)
1781 (cdesc->cd_cur_ptr + state1_size + state2_size);
1782 cipherconfig->cipher_config.val =
1783 ICP_QAT_HW_CIPHER_CONFIG_BUILD(ICP_QAT_HW_CIPHER_ECB_MODE,
1784 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2,
1785 ICP_QAT_HW_CIPHER_KEY_CONVERT,
1786 ICP_QAT_HW_CIPHER_ENCRYPT);
1787 memcpy(cipherconfig->key, authkey, authkeylen);
1788 memset(cipherconfig->key + authkeylen,
1789 0, ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ);
1790 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1791 authkeylen + ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1792 auth_param->hash_state_sz = ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1794 case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
1795 hash->auth_config.config =
1796 ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE0,
1797 cdesc->qat_hash_alg, digestsize);
1798 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1799 state1_size = qat_hash_get_state1_size(
1800 ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3);
1801 state2_size = ICP_QAT_HW_ZUC_3G_EIA3_STATE2_SZ;
1802 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size
1803 + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ);
1805 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1806 cdesc->cd_cur_ptr += state1_size + state2_size
1807 + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1808 auth_param->hash_state_sz = ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1809 cdesc->min_qat_dev_gen = QAT_GEN2;
1812 case ICP_QAT_HW_AUTH_ALGO_MD5:
1813 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_MD5, authkey,
1814 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1816 QAT_LOG(ERR, "(MD5)precompute failed");
1819 state2_size = ICP_QAT_HW_MD5_STATE2_SZ;
1821 case ICP_QAT_HW_AUTH_ALGO_NULL:
1822 state1_size = qat_hash_get_state1_size(
1823 ICP_QAT_HW_AUTH_ALGO_NULL);
1824 state2_size = ICP_QAT_HW_NULL_STATE2_SZ;
1826 case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
1827 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1828 state1_size = qat_hash_get_state1_size(
1829 ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC);
1830 state2_size = ICP_QAT_HW_AES_CBC_MAC_KEY_SZ +
1831 ICP_QAT_HW_AES_CCM_CBC_E_CTR0_SZ;
1833 if (aad_length > 0) {
1834 aad_length += ICP_QAT_HW_CCM_AAD_B0_LEN +
1835 ICP_QAT_HW_CCM_AAD_LEN_INFO;
1836 auth_param->u2.aad_sz =
1837 RTE_ALIGN_CEIL(aad_length,
1838 ICP_QAT_HW_CCM_AAD_ALIGNMENT);
1840 auth_param->u2.aad_sz = ICP_QAT_HW_CCM_AAD_B0_LEN;
1842 cdesc->aad_len = aad_length;
1843 hash->auth_counter.counter = 0;
1845 hash_cd_ctrl->outer_prefix_sz = digestsize;
1846 auth_param->hash_state_sz = digestsize;
1848 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1850 case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
1851 state1_size = qat_hash_get_state1_size(
1852 ICP_QAT_HW_AUTH_ALGO_KASUMI_F9);
1853 state2_size = ICP_QAT_HW_KASUMI_F9_STATE2_SZ;
1854 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1855 pTempKey = (uint32_t *)(cdesc->cd_cur_ptr + state1_size
1858 * The Inner Hash Initial State2 block must contain IK
1859 * (Initialisation Key), followed by IK XOR-ed with KM
1860 * (Key Modifier): IK||(IK^KM).
1862 /* write the auth key */
1863 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1864 /* initialise temp key with auth key */
1865 memcpy(pTempKey, authkey, authkeylen);
1866 /* XOR Key with KASUMI F9 key modifier at 4 bytes level */
1867 for (wordIndex = 0; wordIndex < (authkeylen >> 2); wordIndex++)
1868 pTempKey[wordIndex] ^= KASUMI_F9_KEY_MODIFIER_4_BYTES;
1871 QAT_LOG(ERR, "Invalid HASH alg %u", cdesc->qat_hash_alg);
1875 /* Request template setup */
1876 qat_sym_session_init_common_hdr(header, qat_proto_flag);
1877 header->service_cmd_id = cdesc->qat_cmd;
1879 /* Auth CD config setup */
1880 hash_cd_ctrl->hash_cfg_offset = hash_offset >> 3;
1881 hash_cd_ctrl->hash_flags = ICP_QAT_FW_AUTH_HDR_FLAG_NO_NESTED;
1882 hash_cd_ctrl->inner_res_sz = digestsize;
1883 hash_cd_ctrl->final_sz = digestsize;
1884 hash_cd_ctrl->inner_state1_sz = state1_size;
1885 auth_param->auth_res_sz = digestsize;
1887 hash_cd_ctrl->inner_state2_sz = state2_size;
1888 hash_cd_ctrl->inner_state2_offset = hash_cd_ctrl->hash_cfg_offset +
1889 ((sizeof(struct icp_qat_hw_auth_setup) +
1890 RTE_ALIGN_CEIL(hash_cd_ctrl->inner_state1_sz, 8))
1893 cdesc->cd_cur_ptr += state1_size + state2_size;
1894 cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1896 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1897 cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1902 int qat_sym_validate_aes_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1905 case ICP_QAT_HW_AES_128_KEY_SZ:
1906 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1908 case ICP_QAT_HW_AES_192_KEY_SZ:
1909 *alg = ICP_QAT_HW_CIPHER_ALGO_AES192;
1911 case ICP_QAT_HW_AES_256_KEY_SZ:
1912 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
1920 int qat_sym_validate_aes_docsisbpi_key(int key_len,
1921 enum icp_qat_hw_cipher_algo *alg)
1924 case ICP_QAT_HW_AES_128_KEY_SZ:
1925 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1927 case ICP_QAT_HW_AES_256_KEY_SZ:
1928 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
1936 int qat_sym_validate_snow3g_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1939 case ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ:
1940 *alg = ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2;
1948 int qat_sym_validate_kasumi_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1951 case ICP_QAT_HW_KASUMI_KEY_SZ:
1952 *alg = ICP_QAT_HW_CIPHER_ALGO_KASUMI;
1960 int qat_sym_validate_des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1963 case ICP_QAT_HW_DES_KEY_SZ:
1964 *alg = ICP_QAT_HW_CIPHER_ALGO_DES;
1972 int qat_sym_validate_3des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1975 case QAT_3DES_KEY_SZ_OPT1:
1976 case QAT_3DES_KEY_SZ_OPT2:
1977 case QAT_3DES_KEY_SZ_OPT3:
1978 *alg = ICP_QAT_HW_CIPHER_ALGO_3DES;
1986 int qat_sym_validate_zuc_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1989 case ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ:
1990 *alg = ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3;