1 /* SPDX-License-Identifier: (BSD-3-Clause OR GPL-2.0)
2 * Copyright(c) 2015-2019 Intel Corporation
5 #include <openssl/sha.h> /* Needed to calculate pre-compute values */
6 #include <openssl/aes.h> /* Needed to calculate pre-compute values */
7 #include <openssl/md5.h> /* Needed to calculate pre-compute values */
8 #include <openssl/evp.h> /* Needed for bpi runt block processing */
10 #include <rte_memcpy.h>
11 #include <rte_common.h>
12 #include <rte_spinlock.h>
13 #include <rte_byteorder.h>
15 #include <rte_malloc.h>
16 #include <rte_crypto_sym.h>
19 #include "qat_sym_session.h"
20 #include "qat_sym_pmd.h"
22 /** Frees a context previously created
23 * Depends on openssl libcrypto
26 bpi_cipher_ctx_free(void *bpi_ctx)
29 EVP_CIPHER_CTX_free((EVP_CIPHER_CTX *)bpi_ctx);
32 /** Creates a context in either AES or DES in ECB mode
33 * Depends on openssl libcrypto
36 bpi_cipher_ctx_init(enum rte_crypto_cipher_algorithm cryptodev_algo,
37 enum rte_crypto_cipher_operation direction __rte_unused,
38 const uint8_t *key, uint16_t key_length, void **ctx)
40 const EVP_CIPHER *algo = NULL;
42 *ctx = EVP_CIPHER_CTX_new();
49 if (cryptodev_algo == RTE_CRYPTO_CIPHER_DES_DOCSISBPI)
52 if (key_length == ICP_QAT_HW_AES_128_KEY_SZ)
53 algo = EVP_aes_128_ecb();
55 algo = EVP_aes_256_ecb();
57 /* IV will be ECB encrypted whether direction is encrypt or decrypt*/
58 if (EVP_EncryptInit_ex(*ctx, algo, NULL, key, 0) != 1) {
67 EVP_CIPHER_CTX_free(*ctx);
72 qat_is_cipher_alg_supported(enum rte_crypto_cipher_algorithm algo,
73 struct qat_sym_dev_private *internals)
76 const struct rte_cryptodev_capabilities *capability;
78 while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
79 RTE_CRYPTO_OP_TYPE_UNDEFINED) {
80 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
83 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_CIPHER)
86 if (capability->sym.cipher.algo == algo)
93 qat_is_auth_alg_supported(enum rte_crypto_auth_algorithm algo,
94 struct qat_sym_dev_private *internals)
97 const struct rte_cryptodev_capabilities *capability;
99 while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
100 RTE_CRYPTO_OP_TYPE_UNDEFINED) {
101 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
104 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_AUTH)
107 if (capability->sym.auth.algo == algo)
114 qat_sym_session_clear(struct rte_cryptodev *dev,
115 struct rte_cryptodev_sym_session *sess)
117 uint8_t index = dev->driver_id;
118 void *sess_priv = get_sym_session_private_data(sess, index);
119 struct qat_sym_session *s = (struct qat_sym_session *)sess_priv;
123 bpi_cipher_ctx_free(s->bpi_ctx);
124 memset(s, 0, qat_sym_session_get_private_size(dev));
125 struct rte_mempool *sess_mp = rte_mempool_from_obj(sess_priv);
127 set_sym_session_private_data(sess, index, NULL);
128 rte_mempool_put(sess_mp, sess_priv);
133 qat_get_cmd_id(const struct rte_crypto_sym_xform *xform)
136 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER && xform->next == NULL)
137 return ICP_QAT_FW_LA_CMD_CIPHER;
139 /* Authentication Only */
140 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH && xform->next == NULL)
141 return ICP_QAT_FW_LA_CMD_AUTH;
144 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
145 /* AES-GCM and AES-CCM works with different direction
146 * GCM first encrypts and generate hash where AES-CCM
147 * first generate hash and encrypts. Similar relation
148 * applies to decryption.
150 if (xform->aead.op == RTE_CRYPTO_AEAD_OP_ENCRYPT)
151 if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
152 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
154 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
156 if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
157 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
159 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
162 if (xform->next == NULL)
165 /* Cipher then Authenticate */
166 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
167 xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
168 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
170 /* Authenticate then Cipher */
171 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH &&
172 xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
173 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
178 static struct rte_crypto_auth_xform *
179 qat_get_auth_xform(struct rte_crypto_sym_xform *xform)
182 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH)
191 static struct rte_crypto_cipher_xform *
192 qat_get_cipher_xform(struct rte_crypto_sym_xform *xform)
195 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
196 return &xform->cipher;
205 qat_sym_session_configure_cipher(struct rte_cryptodev *dev,
206 struct rte_crypto_sym_xform *xform,
207 struct qat_sym_session *session)
209 struct qat_sym_dev_private *internals = dev->data->dev_private;
210 struct rte_crypto_cipher_xform *cipher_xform = NULL;
213 /* Get cipher xform from crypto xform chain */
214 cipher_xform = qat_get_cipher_xform(xform);
216 session->cipher_iv.offset = cipher_xform->iv.offset;
217 session->cipher_iv.length = cipher_xform->iv.length;
219 switch (cipher_xform->algo) {
220 case RTE_CRYPTO_CIPHER_AES_CBC:
221 if (qat_sym_validate_aes_key(cipher_xform->key.length,
222 &session->qat_cipher_alg) != 0) {
223 QAT_LOG(ERR, "Invalid AES cipher key size");
227 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
229 case RTE_CRYPTO_CIPHER_AES_CTR:
230 if (qat_sym_validate_aes_key(cipher_xform->key.length,
231 &session->qat_cipher_alg) != 0) {
232 QAT_LOG(ERR, "Invalid AES cipher key size");
236 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
238 case RTE_CRYPTO_CIPHER_SNOW3G_UEA2:
239 if (qat_sym_validate_snow3g_key(cipher_xform->key.length,
240 &session->qat_cipher_alg) != 0) {
241 QAT_LOG(ERR, "Invalid SNOW 3G cipher key size");
245 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
247 case RTE_CRYPTO_CIPHER_NULL:
248 session->qat_cipher_alg = ICP_QAT_HW_CIPHER_ALGO_NULL;
249 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
251 case RTE_CRYPTO_CIPHER_KASUMI_F8:
252 if (qat_sym_validate_kasumi_key(cipher_xform->key.length,
253 &session->qat_cipher_alg) != 0) {
254 QAT_LOG(ERR, "Invalid KASUMI cipher key size");
258 session->qat_mode = ICP_QAT_HW_CIPHER_F8_MODE;
260 case RTE_CRYPTO_CIPHER_3DES_CBC:
261 if (qat_sym_validate_3des_key(cipher_xform->key.length,
262 &session->qat_cipher_alg) != 0) {
263 QAT_LOG(ERR, "Invalid 3DES cipher key size");
267 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
269 case RTE_CRYPTO_CIPHER_DES_CBC:
270 if (qat_sym_validate_des_key(cipher_xform->key.length,
271 &session->qat_cipher_alg) != 0) {
272 QAT_LOG(ERR, "Invalid DES cipher key size");
276 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
278 case RTE_CRYPTO_CIPHER_3DES_CTR:
279 if (qat_sym_validate_3des_key(cipher_xform->key.length,
280 &session->qat_cipher_alg) != 0) {
281 QAT_LOG(ERR, "Invalid 3DES cipher key size");
285 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
287 case RTE_CRYPTO_CIPHER_DES_DOCSISBPI:
288 ret = bpi_cipher_ctx_init(
291 cipher_xform->key.data,
292 cipher_xform->key.length,
295 QAT_LOG(ERR, "failed to create DES BPI ctx");
298 if (qat_sym_validate_des_key(cipher_xform->key.length,
299 &session->qat_cipher_alg) != 0) {
300 QAT_LOG(ERR, "Invalid DES cipher key size");
304 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
306 case RTE_CRYPTO_CIPHER_AES_DOCSISBPI:
307 ret = bpi_cipher_ctx_init(
310 cipher_xform->key.data,
311 cipher_xform->key.length,
314 QAT_LOG(ERR, "failed to create AES BPI ctx");
317 if (qat_sym_validate_aes_docsisbpi_key(cipher_xform->key.length,
318 &session->qat_cipher_alg) != 0) {
319 QAT_LOG(ERR, "Invalid AES DOCSISBPI key size");
323 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
325 case RTE_CRYPTO_CIPHER_ZUC_EEA3:
326 if (!qat_is_cipher_alg_supported(
327 cipher_xform->algo, internals)) {
328 QAT_LOG(ERR, "%s not supported on this device",
329 rte_crypto_cipher_algorithm_strings
330 [cipher_xform->algo]);
334 if (qat_sym_validate_zuc_key(cipher_xform->key.length,
335 &session->qat_cipher_alg) != 0) {
336 QAT_LOG(ERR, "Invalid ZUC cipher key size");
340 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
342 case RTE_CRYPTO_CIPHER_AES_XTS:
343 if ((cipher_xform->key.length/2) == ICP_QAT_HW_AES_192_KEY_SZ) {
344 QAT_LOG(ERR, "AES-XTS-192 not supported");
348 if (qat_sym_validate_aes_key((cipher_xform->key.length/2),
349 &session->qat_cipher_alg) != 0) {
350 QAT_LOG(ERR, "Invalid AES-XTS cipher key size");
354 session->qat_mode = ICP_QAT_HW_CIPHER_XTS_MODE;
356 case RTE_CRYPTO_CIPHER_3DES_ECB:
357 case RTE_CRYPTO_CIPHER_AES_ECB:
358 case RTE_CRYPTO_CIPHER_AES_F8:
359 case RTE_CRYPTO_CIPHER_ARC4:
360 QAT_LOG(ERR, "Crypto QAT PMD: Unsupported Cipher alg %u",
365 QAT_LOG(ERR, "Crypto: Undefined Cipher specified %u\n",
371 if (cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
372 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
374 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
376 if (qat_sym_session_aead_create_cd_cipher(session,
377 cipher_xform->key.data,
378 cipher_xform->key.length)) {
386 if (session->bpi_ctx) {
387 bpi_cipher_ctx_free(session->bpi_ctx);
388 session->bpi_ctx = NULL;
394 qat_sym_session_configure(struct rte_cryptodev *dev,
395 struct rte_crypto_sym_xform *xform,
396 struct rte_cryptodev_sym_session *sess,
397 struct rte_mempool *mempool)
399 void *sess_private_data;
402 if (rte_mempool_get(mempool, &sess_private_data)) {
404 "Couldn't get object from session mempool");
408 ret = qat_sym_session_set_parameters(dev, xform, sess_private_data);
411 "Crypto QAT PMD: failed to configure session parameters");
413 /* Return session to mempool */
414 rte_mempool_put(mempool, sess_private_data);
418 set_sym_session_private_data(sess, dev->driver_id,
425 qat_sym_session_set_ext_hash_flags(struct qat_sym_session *session,
428 struct icp_qat_fw_comn_req_hdr *header = &session->fw_req.comn_hdr;
429 struct icp_qat_fw_cipher_auth_cd_ctrl_hdr *cd_ctrl =
430 (struct icp_qat_fw_cipher_auth_cd_ctrl_hdr *)
431 session->fw_req.cd_ctrl.content_desc_ctrl_lw;
433 /* Set the Use Extended Protocol Flags bit in LW 1 */
434 QAT_FIELD_SET(header->comn_req_flags,
435 QAT_COMN_EXT_FLAGS_USED,
436 QAT_COMN_EXT_FLAGS_BITPOS,
437 QAT_COMN_EXT_FLAGS_MASK);
439 /* Set Hash Flags in LW 28 */
440 cd_ctrl->hash_flags |= hash_flag;
442 /* Set proto flags in LW 1 */
443 switch (session->qat_cipher_alg) {
444 case ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2:
445 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
446 ICP_QAT_FW_LA_SNOW_3G_PROTO);
447 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
448 header->serv_specif_flags, 0);
450 case ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3:
451 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
452 ICP_QAT_FW_LA_NO_PROTO);
453 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
454 header->serv_specif_flags,
455 ICP_QAT_FW_LA_ZUC_3G_PROTO);
458 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
459 ICP_QAT_FW_LA_NO_PROTO);
460 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
461 header->serv_specif_flags, 0);
467 qat_sym_session_handle_mixed(struct qat_sym_session *session)
469 if (session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3 &&
470 session->qat_cipher_alg !=
471 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
472 session->min_qat_dev_gen = QAT_GEN3;
473 qat_sym_session_set_ext_hash_flags(session,
474 1 << ICP_QAT_FW_AUTH_HDR_FLAG_ZUC_EIA3_BITPOS);
475 } else if (session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2 &&
476 session->qat_cipher_alg !=
477 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
478 session->min_qat_dev_gen = QAT_GEN3;
479 qat_sym_session_set_ext_hash_flags(session,
480 1 << ICP_QAT_FW_AUTH_HDR_FLAG_SNOW3G_UIA2_BITPOS);
481 } else if ((session->aes_cmac ||
482 session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_NULL) &&
483 (session->qat_cipher_alg ==
484 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2 ||
485 session->qat_cipher_alg ==
486 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)) {
487 session->min_qat_dev_gen = QAT_GEN3;
488 qat_sym_session_set_ext_hash_flags(session, 0);
493 qat_sym_session_set_parameters(struct rte_cryptodev *dev,
494 struct rte_crypto_sym_xform *xform, void *session_private)
496 struct qat_sym_session *session = session_private;
500 /* Set context descriptor physical address */
501 session->cd_paddr = rte_mempool_virt2iova(session) +
502 offsetof(struct qat_sym_session, cd);
504 session->min_qat_dev_gen = QAT_GEN1;
506 /* Get requested QAT command id */
507 qat_cmd_id = qat_get_cmd_id(xform);
508 if (qat_cmd_id < 0 || qat_cmd_id >= ICP_QAT_FW_LA_CMD_DELIMITER) {
509 QAT_LOG(ERR, "Unsupported xform chain requested");
512 session->qat_cmd = (enum icp_qat_fw_la_cmd_id)qat_cmd_id;
513 switch (session->qat_cmd) {
514 case ICP_QAT_FW_LA_CMD_CIPHER:
515 ret = qat_sym_session_configure_cipher(dev, xform, session);
519 case ICP_QAT_FW_LA_CMD_AUTH:
520 ret = qat_sym_session_configure_auth(dev, xform, session);
524 case ICP_QAT_FW_LA_CMD_CIPHER_HASH:
525 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
526 ret = qat_sym_session_configure_aead(dev, xform,
531 ret = qat_sym_session_configure_cipher(dev,
535 ret = qat_sym_session_configure_auth(dev,
539 /* Special handling of mixed hash+cipher algorithms */
540 qat_sym_session_handle_mixed(session);
543 case ICP_QAT_FW_LA_CMD_HASH_CIPHER:
544 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
545 ret = qat_sym_session_configure_aead(dev, xform,
550 ret = qat_sym_session_configure_auth(dev,
554 ret = qat_sym_session_configure_cipher(dev,
558 /* Special handling of mixed hash+cipher algorithms */
559 qat_sym_session_handle_mixed(session);
562 case ICP_QAT_FW_LA_CMD_TRNG_GET_RANDOM:
563 case ICP_QAT_FW_LA_CMD_TRNG_TEST:
564 case ICP_QAT_FW_LA_CMD_SSL3_KEY_DERIVE:
565 case ICP_QAT_FW_LA_CMD_TLS_V1_1_KEY_DERIVE:
566 case ICP_QAT_FW_LA_CMD_TLS_V1_2_KEY_DERIVE:
567 case ICP_QAT_FW_LA_CMD_MGF1:
568 case ICP_QAT_FW_LA_CMD_AUTH_PRE_COMP:
569 case ICP_QAT_FW_LA_CMD_CIPHER_PRE_COMP:
570 case ICP_QAT_FW_LA_CMD_DELIMITER:
571 QAT_LOG(ERR, "Unsupported Service %u",
575 QAT_LOG(ERR, "Unsupported Service %u",
584 qat_sym_session_handle_single_pass(struct qat_sym_dev_private *internals,
585 struct qat_sym_session *session,
586 struct rte_crypto_aead_xform *aead_xform)
588 enum qat_device_gen qat_dev_gen = internals->qat_dev->qat_dev_gen;
590 if (qat_dev_gen == QAT_GEN3 &&
591 aead_xform->iv.length == QAT_AES_GCM_SPC_IV_SIZE) {
592 /* Use faster Single-Pass GCM */
593 struct icp_qat_fw_la_cipher_req_params *cipher_param =
594 (void *) &session->fw_req.serv_specif_rqpars;
596 session->is_single_pass = 1;
597 session->min_qat_dev_gen = QAT_GEN3;
598 session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER;
599 session->qat_mode = ICP_QAT_HW_CIPHER_AEAD_MODE;
600 session->cipher_iv.offset = aead_xform->iv.offset;
601 session->cipher_iv.length = aead_xform->iv.length;
602 if (qat_sym_session_aead_create_cd_cipher(session,
603 aead_xform->key.data, aead_xform->key.length))
605 session->aad_len = aead_xform->aad_length;
606 session->digest_length = aead_xform->digest_length;
607 if (aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) {
608 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
609 session->auth_op = ICP_QAT_HW_AUTH_GENERATE;
610 ICP_QAT_FW_LA_RET_AUTH_SET(
611 session->fw_req.comn_hdr.serv_specif_flags,
612 ICP_QAT_FW_LA_RET_AUTH_RES);
614 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
615 session->auth_op = ICP_QAT_HW_AUTH_VERIFY;
616 ICP_QAT_FW_LA_CMP_AUTH_SET(
617 session->fw_req.comn_hdr.serv_specif_flags,
618 ICP_QAT_FW_LA_CMP_AUTH_RES);
620 ICP_QAT_FW_LA_SINGLE_PASS_PROTO_FLAG_SET(
621 session->fw_req.comn_hdr.serv_specif_flags,
622 ICP_QAT_FW_LA_SINGLE_PASS_PROTO);
623 ICP_QAT_FW_LA_PROTO_SET(
624 session->fw_req.comn_hdr.serv_specif_flags,
625 ICP_QAT_FW_LA_NO_PROTO);
626 ICP_QAT_FW_LA_GCM_IV_LEN_FLAG_SET(
627 session->fw_req.comn_hdr.serv_specif_flags,
628 ICP_QAT_FW_LA_GCM_IV_LEN_12_OCTETS);
629 session->fw_req.comn_hdr.service_cmd_id =
630 ICP_QAT_FW_LA_CMD_CIPHER;
631 session->cd.cipher.cipher_config.val =
632 ICP_QAT_HW_CIPHER_CONFIG_BUILD(
633 ICP_QAT_HW_CIPHER_AEAD_MODE,
634 session->qat_cipher_alg,
635 ICP_QAT_HW_CIPHER_NO_CONVERT,
637 QAT_FIELD_SET(session->cd.cipher.cipher_config.val,
638 aead_xform->digest_length,
639 QAT_CIPHER_AEAD_HASH_CMP_LEN_BITPOS,
640 QAT_CIPHER_AEAD_HASH_CMP_LEN_MASK);
641 session->cd.cipher.cipher_config.reserved =
642 ICP_QAT_HW_CIPHER_CONFIG_BUILD_UPPER(
643 aead_xform->aad_length);
644 cipher_param->spc_aad_sz = aead_xform->aad_length;
645 cipher_param->spc_auth_res_sz = aead_xform->digest_length;
651 qat_sym_session_configure_auth(struct rte_cryptodev *dev,
652 struct rte_crypto_sym_xform *xform,
653 struct qat_sym_session *session)
655 struct rte_crypto_auth_xform *auth_xform = qat_get_auth_xform(xform);
656 struct qat_sym_dev_private *internals = dev->data->dev_private;
657 const uint8_t *key_data = auth_xform->key.data;
658 uint8_t key_length = auth_xform->key.length;
659 session->aes_cmac = 0;
661 switch (auth_xform->algo) {
662 case RTE_CRYPTO_AUTH_SHA1_HMAC:
663 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA1;
665 case RTE_CRYPTO_AUTH_SHA224_HMAC:
666 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA224;
668 case RTE_CRYPTO_AUTH_SHA256_HMAC:
669 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA256;
671 case RTE_CRYPTO_AUTH_SHA384_HMAC:
672 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA384;
674 case RTE_CRYPTO_AUTH_SHA512_HMAC:
675 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA512;
677 case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
678 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
680 case RTE_CRYPTO_AUTH_AES_CMAC:
681 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
682 session->aes_cmac = 1;
684 case RTE_CRYPTO_AUTH_AES_GMAC:
685 if (qat_sym_validate_aes_key(auth_xform->key.length,
686 &session->qat_cipher_alg) != 0) {
687 QAT_LOG(ERR, "Invalid AES key size");
690 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
691 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
694 case RTE_CRYPTO_AUTH_SNOW3G_UIA2:
695 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2;
697 case RTE_CRYPTO_AUTH_MD5_HMAC:
698 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_MD5;
700 case RTE_CRYPTO_AUTH_NULL:
701 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_NULL;
703 case RTE_CRYPTO_AUTH_KASUMI_F9:
704 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_KASUMI_F9;
706 case RTE_CRYPTO_AUTH_ZUC_EIA3:
707 if (!qat_is_auth_alg_supported(auth_xform->algo, internals)) {
708 QAT_LOG(ERR, "%s not supported on this device",
709 rte_crypto_auth_algorithm_strings
713 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3;
715 case RTE_CRYPTO_AUTH_SHA1:
716 case RTE_CRYPTO_AUTH_SHA256:
717 case RTE_CRYPTO_AUTH_SHA512:
718 case RTE_CRYPTO_AUTH_SHA224:
719 case RTE_CRYPTO_AUTH_SHA384:
720 case RTE_CRYPTO_AUTH_MD5:
721 case RTE_CRYPTO_AUTH_AES_CBC_MAC:
722 QAT_LOG(ERR, "Crypto: Unsupported hash alg %u",
726 QAT_LOG(ERR, "Crypto: Undefined Hash algo %u specified",
731 session->auth_iv.offset = auth_xform->iv.offset;
732 session->auth_iv.length = auth_xform->iv.length;
734 if (auth_xform->algo == RTE_CRYPTO_AUTH_AES_GMAC) {
735 if (auth_xform->op == RTE_CRYPTO_AUTH_OP_GENERATE) {
736 session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER_HASH;
737 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
739 * It needs to create cipher desc content first,
740 * then authentication
743 if (qat_sym_session_aead_create_cd_cipher(session,
744 auth_xform->key.data,
745 auth_xform->key.length))
748 if (qat_sym_session_aead_create_cd_auth(session,
752 auth_xform->digest_length,
756 session->qat_cmd = ICP_QAT_FW_LA_CMD_HASH_CIPHER;
757 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
759 * It needs to create authentication desc content first,
763 if (qat_sym_session_aead_create_cd_auth(session,
767 auth_xform->digest_length,
771 if (qat_sym_session_aead_create_cd_cipher(session,
772 auth_xform->key.data,
773 auth_xform->key.length))
776 /* Restore to authentication only only */
777 session->qat_cmd = ICP_QAT_FW_LA_CMD_AUTH;
779 if (qat_sym_session_aead_create_cd_auth(session,
783 auth_xform->digest_length,
788 session->digest_length = auth_xform->digest_length;
793 qat_sym_session_configure_aead(struct rte_cryptodev *dev,
794 struct rte_crypto_sym_xform *xform,
795 struct qat_sym_session *session)
797 struct rte_crypto_aead_xform *aead_xform = &xform->aead;
798 enum rte_crypto_auth_operation crypto_operation;
801 * Store AEAD IV parameters as cipher IV,
802 * to avoid unnecessary memory usage
804 session->cipher_iv.offset = xform->aead.iv.offset;
805 session->cipher_iv.length = xform->aead.iv.length;
807 switch (aead_xform->algo) {
808 case RTE_CRYPTO_AEAD_AES_GCM:
809 if (qat_sym_validate_aes_key(aead_xform->key.length,
810 &session->qat_cipher_alg) != 0) {
811 QAT_LOG(ERR, "Invalid AES key size");
814 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
815 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
817 case RTE_CRYPTO_AEAD_AES_CCM:
818 if (qat_sym_validate_aes_key(aead_xform->key.length,
819 &session->qat_cipher_alg) != 0) {
820 QAT_LOG(ERR, "Invalid AES key size");
823 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
824 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC;
827 QAT_LOG(ERR, "Crypto: Undefined AEAD specified %u\n",
832 session->is_single_pass = 0;
833 if (aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) {
834 /* Use faster Single-Pass GCM if possible */
835 int res = qat_sym_session_handle_single_pass(
836 dev->data->dev_private, session, aead_xform);
839 if (session->is_single_pass)
843 if ((aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT &&
844 aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) ||
845 (aead_xform->op == RTE_CRYPTO_AEAD_OP_DECRYPT &&
846 aead_xform->algo == RTE_CRYPTO_AEAD_AES_CCM)) {
847 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
849 * It needs to create cipher desc content first,
850 * then authentication
852 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
853 RTE_CRYPTO_AUTH_OP_GENERATE : RTE_CRYPTO_AUTH_OP_VERIFY;
855 if (qat_sym_session_aead_create_cd_cipher(session,
856 aead_xform->key.data,
857 aead_xform->key.length))
860 if (qat_sym_session_aead_create_cd_auth(session,
861 aead_xform->key.data,
862 aead_xform->key.length,
863 aead_xform->aad_length,
864 aead_xform->digest_length,
868 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
870 * It needs to create authentication desc content first,
874 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
875 RTE_CRYPTO_AUTH_OP_VERIFY : RTE_CRYPTO_AUTH_OP_GENERATE;
877 if (qat_sym_session_aead_create_cd_auth(session,
878 aead_xform->key.data,
879 aead_xform->key.length,
880 aead_xform->aad_length,
881 aead_xform->digest_length,
885 if (qat_sym_session_aead_create_cd_cipher(session,
886 aead_xform->key.data,
887 aead_xform->key.length))
891 session->digest_length = aead_xform->digest_length;
895 unsigned int qat_sym_session_get_private_size(
896 struct rte_cryptodev *dev __rte_unused)
898 return RTE_ALIGN_CEIL(sizeof(struct qat_sym_session), 8);
901 /* returns block size in bytes per cipher algo */
902 int qat_cipher_get_block_size(enum icp_qat_hw_cipher_algo qat_cipher_alg)
904 switch (qat_cipher_alg) {
905 case ICP_QAT_HW_CIPHER_ALGO_DES:
906 return ICP_QAT_HW_DES_BLK_SZ;
907 case ICP_QAT_HW_CIPHER_ALGO_3DES:
908 return ICP_QAT_HW_3DES_BLK_SZ;
909 case ICP_QAT_HW_CIPHER_ALGO_AES128:
910 case ICP_QAT_HW_CIPHER_ALGO_AES192:
911 case ICP_QAT_HW_CIPHER_ALGO_AES256:
912 return ICP_QAT_HW_AES_BLK_SZ;
914 QAT_LOG(ERR, "invalid block cipher alg %u", qat_cipher_alg);
921 * Returns size in bytes per hash algo for state1 size field in cd_ctrl
922 * This is digest size rounded up to nearest quadword
924 static int qat_hash_get_state1_size(enum icp_qat_hw_auth_algo qat_hash_alg)
926 switch (qat_hash_alg) {
927 case ICP_QAT_HW_AUTH_ALGO_SHA1:
928 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA1_STATE1_SZ,
929 QAT_HW_DEFAULT_ALIGNMENT);
930 case ICP_QAT_HW_AUTH_ALGO_SHA224:
931 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA224_STATE1_SZ,
932 QAT_HW_DEFAULT_ALIGNMENT);
933 case ICP_QAT_HW_AUTH_ALGO_SHA256:
934 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA256_STATE1_SZ,
935 QAT_HW_DEFAULT_ALIGNMENT);
936 case ICP_QAT_HW_AUTH_ALGO_SHA384:
937 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA384_STATE1_SZ,
938 QAT_HW_DEFAULT_ALIGNMENT);
939 case ICP_QAT_HW_AUTH_ALGO_SHA512:
940 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
941 QAT_HW_DEFAULT_ALIGNMENT);
942 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
943 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ,
944 QAT_HW_DEFAULT_ALIGNMENT);
945 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
946 case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
947 return QAT_HW_ROUND_UP(ICP_QAT_HW_GALOIS_128_STATE1_SZ,
948 QAT_HW_DEFAULT_ALIGNMENT);
949 case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
950 return QAT_HW_ROUND_UP(ICP_QAT_HW_ZUC_3G_EIA3_STATE1_SZ,
951 QAT_HW_DEFAULT_ALIGNMENT);
952 case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
953 return QAT_HW_ROUND_UP(ICP_QAT_HW_SNOW_3G_UIA2_STATE1_SZ,
954 QAT_HW_DEFAULT_ALIGNMENT);
955 case ICP_QAT_HW_AUTH_ALGO_MD5:
956 return QAT_HW_ROUND_UP(ICP_QAT_HW_MD5_STATE1_SZ,
957 QAT_HW_DEFAULT_ALIGNMENT);
958 case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
959 return QAT_HW_ROUND_UP(ICP_QAT_HW_KASUMI_F9_STATE1_SZ,
960 QAT_HW_DEFAULT_ALIGNMENT);
961 case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
962 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_CBC_MAC_STATE1_SZ,
963 QAT_HW_DEFAULT_ALIGNMENT);
964 case ICP_QAT_HW_AUTH_ALGO_NULL:
965 return QAT_HW_ROUND_UP(ICP_QAT_HW_NULL_STATE1_SZ,
966 QAT_HW_DEFAULT_ALIGNMENT);
967 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
968 /* return maximum state1 size in this case */
969 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
970 QAT_HW_DEFAULT_ALIGNMENT);
972 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
978 /* returns digest size in bytes per hash algo */
979 static int qat_hash_get_digest_size(enum icp_qat_hw_auth_algo qat_hash_alg)
981 switch (qat_hash_alg) {
982 case ICP_QAT_HW_AUTH_ALGO_SHA1:
983 return ICP_QAT_HW_SHA1_STATE1_SZ;
984 case ICP_QAT_HW_AUTH_ALGO_SHA224:
985 return ICP_QAT_HW_SHA224_STATE1_SZ;
986 case ICP_QAT_HW_AUTH_ALGO_SHA256:
987 return ICP_QAT_HW_SHA256_STATE1_SZ;
988 case ICP_QAT_HW_AUTH_ALGO_SHA384:
989 return ICP_QAT_HW_SHA384_STATE1_SZ;
990 case ICP_QAT_HW_AUTH_ALGO_SHA512:
991 return ICP_QAT_HW_SHA512_STATE1_SZ;
992 case ICP_QAT_HW_AUTH_ALGO_MD5:
993 return ICP_QAT_HW_MD5_STATE1_SZ;
994 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
995 return ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
996 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
997 /* return maximum digest size in this case */
998 return ICP_QAT_HW_SHA512_STATE1_SZ;
1000 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
1006 /* returns block size in byes per hash algo */
1007 static int qat_hash_get_block_size(enum icp_qat_hw_auth_algo qat_hash_alg)
1009 switch (qat_hash_alg) {
1010 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1012 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1013 return SHA256_CBLOCK;
1014 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1015 return SHA256_CBLOCK;
1016 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1017 return SHA512_CBLOCK;
1018 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1019 return SHA512_CBLOCK;
1020 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1022 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1023 return ICP_QAT_HW_AES_BLK_SZ;
1024 case ICP_QAT_HW_AUTH_ALGO_MD5:
1026 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
1027 /* return maximum block size in this case */
1028 return SHA512_CBLOCK;
1030 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
1036 static int partial_hash_sha1(uint8_t *data_in, uint8_t *data_out)
1040 if (!SHA1_Init(&ctx))
1042 SHA1_Transform(&ctx, data_in);
1043 rte_memcpy(data_out, &ctx, SHA_DIGEST_LENGTH);
1047 static int partial_hash_sha224(uint8_t *data_in, uint8_t *data_out)
1051 if (!SHA224_Init(&ctx))
1053 SHA256_Transform(&ctx, data_in);
1054 rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
1058 static int partial_hash_sha256(uint8_t *data_in, uint8_t *data_out)
1062 if (!SHA256_Init(&ctx))
1064 SHA256_Transform(&ctx, data_in);
1065 rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
1069 static int partial_hash_sha384(uint8_t *data_in, uint8_t *data_out)
1073 if (!SHA384_Init(&ctx))
1075 SHA512_Transform(&ctx, data_in);
1076 rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
1080 static int partial_hash_sha512(uint8_t *data_in, uint8_t *data_out)
1084 if (!SHA512_Init(&ctx))
1086 SHA512_Transform(&ctx, data_in);
1087 rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
1091 static int partial_hash_md5(uint8_t *data_in, uint8_t *data_out)
1095 if (!MD5_Init(&ctx))
1097 MD5_Transform(&ctx, data_in);
1098 rte_memcpy(data_out, &ctx, MD5_DIGEST_LENGTH);
1103 static int partial_hash_compute(enum icp_qat_hw_auth_algo hash_alg,
1108 uint8_t digest[qat_hash_get_digest_size(
1109 ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1110 uint32_t *hash_state_out_be32;
1111 uint64_t *hash_state_out_be64;
1114 digest_size = qat_hash_get_digest_size(hash_alg);
1115 if (digest_size <= 0)
1118 hash_state_out_be32 = (uint32_t *)data_out;
1119 hash_state_out_be64 = (uint64_t *)data_out;
1122 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1123 if (partial_hash_sha1(data_in, digest))
1125 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1126 *hash_state_out_be32 =
1127 rte_bswap32(*(((uint32_t *)digest)+i));
1129 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1130 if (partial_hash_sha224(data_in, digest))
1132 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1133 *hash_state_out_be32 =
1134 rte_bswap32(*(((uint32_t *)digest)+i));
1136 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1137 if (partial_hash_sha256(data_in, digest))
1139 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1140 *hash_state_out_be32 =
1141 rte_bswap32(*(((uint32_t *)digest)+i));
1143 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1144 if (partial_hash_sha384(data_in, digest))
1146 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
1147 *hash_state_out_be64 =
1148 rte_bswap64(*(((uint64_t *)digest)+i));
1150 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1151 if (partial_hash_sha512(data_in, digest))
1153 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
1154 *hash_state_out_be64 =
1155 rte_bswap64(*(((uint64_t *)digest)+i));
1157 case ICP_QAT_HW_AUTH_ALGO_MD5:
1158 if (partial_hash_md5(data_in, data_out))
1162 QAT_LOG(ERR, "invalid hash alg %u", hash_alg);
1168 #define HMAC_IPAD_VALUE 0x36
1169 #define HMAC_OPAD_VALUE 0x5c
1170 #define HASH_XCBC_PRECOMP_KEY_NUM 3
1172 static const uint8_t AES_CMAC_SEED[ICP_QAT_HW_AES_128_KEY_SZ];
1174 static void aes_cmac_key_derive(uint8_t *base, uint8_t *derived)
1178 derived[0] = base[0] << 1;
1179 for (i = 1; i < ICP_QAT_HW_AES_BLK_SZ ; i++) {
1180 derived[i] = base[i] << 1;
1181 derived[i - 1] |= base[i] >> 7;
1185 derived[ICP_QAT_HW_AES_BLK_SZ - 1] ^= QAT_AES_CMAC_CONST_RB;
1188 static int qat_sym_do_precomputes(enum icp_qat_hw_auth_algo hash_alg,
1189 const uint8_t *auth_key,
1190 uint16_t auth_keylen,
1191 uint8_t *p_state_buf,
1192 uint16_t *p_state_len,
1196 uint8_t ipad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1197 uint8_t opad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1200 if (hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC) {
1206 uint8_t k0[ICP_QAT_HW_AES_128_KEY_SZ];
1209 auth_keylen = ICP_QAT_HW_AES_128_KEY_SZ;
1211 in = rte_zmalloc("AES CMAC K1",
1212 ICP_QAT_HW_AES_128_KEY_SZ, 16);
1215 QAT_LOG(ERR, "Failed to alloc memory");
1219 rte_memcpy(in, AES_CMAC_SEED,
1220 ICP_QAT_HW_AES_128_KEY_SZ);
1221 rte_memcpy(p_state_buf, auth_key, auth_keylen);
1223 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1229 AES_encrypt(in, k0, &enc_key);
1231 k1 = p_state_buf + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1232 k2 = k1 + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1234 aes_cmac_key_derive(k0, k1);
1235 aes_cmac_key_derive(k1, k2);
1237 memset(k0, 0, ICP_QAT_HW_AES_128_KEY_SZ);
1238 *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1242 static uint8_t qat_aes_xcbc_key_seed[
1243 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ] = {
1244 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1245 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1246 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1247 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1248 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1249 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1253 uint8_t *out = p_state_buf;
1257 in = rte_zmalloc("working mem for key",
1258 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ, 16);
1260 QAT_LOG(ERR, "Failed to alloc memory");
1264 rte_memcpy(in, qat_aes_xcbc_key_seed,
1265 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1266 for (x = 0; x < HASH_XCBC_PRECOMP_KEY_NUM; x++) {
1267 if (AES_set_encrypt_key(auth_key,
1271 (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ));
1273 (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ),
1274 0, ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1277 AES_encrypt(in, out, &enc_key);
1278 in += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1279 out += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1281 *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1282 rte_free(in - x*ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ);
1286 } else if ((hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_128) ||
1287 (hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_64)) {
1289 uint8_t *out = p_state_buf;
1292 memset(p_state_buf, 0, ICP_QAT_HW_GALOIS_H_SZ +
1293 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1294 ICP_QAT_HW_GALOIS_E_CTR0_SZ);
1295 in = rte_zmalloc("working mem for key",
1296 ICP_QAT_HW_GALOIS_H_SZ, 16);
1298 QAT_LOG(ERR, "Failed to alloc memory");
1302 memset(in, 0, ICP_QAT_HW_GALOIS_H_SZ);
1303 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1307 AES_encrypt(in, out, &enc_key);
1308 *p_state_len = ICP_QAT_HW_GALOIS_H_SZ +
1309 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1310 ICP_QAT_HW_GALOIS_E_CTR0_SZ;
1315 block_size = qat_hash_get_block_size(hash_alg);
1318 /* init ipad and opad from key and xor with fixed values */
1319 memset(ipad, 0, block_size);
1320 memset(opad, 0, block_size);
1322 if (auth_keylen > (unsigned int)block_size) {
1323 QAT_LOG(ERR, "invalid keylen %u", auth_keylen);
1326 rte_memcpy(ipad, auth_key, auth_keylen);
1327 rte_memcpy(opad, auth_key, auth_keylen);
1329 for (i = 0; i < block_size; i++) {
1330 uint8_t *ipad_ptr = ipad + i;
1331 uint8_t *opad_ptr = opad + i;
1332 *ipad_ptr ^= HMAC_IPAD_VALUE;
1333 *opad_ptr ^= HMAC_OPAD_VALUE;
1336 /* do partial hash of ipad and copy to state1 */
1337 if (partial_hash_compute(hash_alg, ipad, p_state_buf)) {
1338 memset(ipad, 0, block_size);
1339 memset(opad, 0, block_size);
1340 QAT_LOG(ERR, "ipad precompute failed");
1345 * State len is a multiple of 8, so may be larger than the digest.
1346 * Put the partial hash of opad state_len bytes after state1
1348 *p_state_len = qat_hash_get_state1_size(hash_alg);
1349 if (partial_hash_compute(hash_alg, opad, p_state_buf + *p_state_len)) {
1350 memset(ipad, 0, block_size);
1351 memset(opad, 0, block_size);
1352 QAT_LOG(ERR, "opad precompute failed");
1356 /* don't leave data lying around */
1357 memset(ipad, 0, block_size);
1358 memset(opad, 0, block_size);
1363 qat_sym_session_init_common_hdr(struct icp_qat_fw_comn_req_hdr *header,
1364 enum qat_sym_proto_flag proto_flags)
1367 ICP_QAT_FW_COMN_HDR_FLAGS_BUILD(ICP_QAT_FW_COMN_REQ_FLAG_SET);
1368 header->service_type = ICP_QAT_FW_COMN_REQ_CPM_FW_LA;
1369 header->comn_req_flags =
1370 ICP_QAT_FW_COMN_FLAGS_BUILD(QAT_COMN_CD_FLD_TYPE_64BIT_ADR,
1371 QAT_COMN_PTR_TYPE_FLAT);
1372 ICP_QAT_FW_LA_PARTIAL_SET(header->serv_specif_flags,
1373 ICP_QAT_FW_LA_PARTIAL_NONE);
1374 ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(header->serv_specif_flags,
1375 ICP_QAT_FW_CIPH_IV_16BYTE_DATA);
1377 switch (proto_flags) {
1378 case QAT_CRYPTO_PROTO_FLAG_NONE:
1379 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1380 ICP_QAT_FW_LA_NO_PROTO);
1382 case QAT_CRYPTO_PROTO_FLAG_CCM:
1383 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1384 ICP_QAT_FW_LA_CCM_PROTO);
1386 case QAT_CRYPTO_PROTO_FLAG_GCM:
1387 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1388 ICP_QAT_FW_LA_GCM_PROTO);
1390 case QAT_CRYPTO_PROTO_FLAG_SNOW3G:
1391 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1392 ICP_QAT_FW_LA_SNOW_3G_PROTO);
1394 case QAT_CRYPTO_PROTO_FLAG_ZUC:
1395 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(header->serv_specif_flags,
1396 ICP_QAT_FW_LA_ZUC_3G_PROTO);
1400 ICP_QAT_FW_LA_UPDATE_STATE_SET(header->serv_specif_flags,
1401 ICP_QAT_FW_LA_NO_UPDATE_STATE);
1402 ICP_QAT_FW_LA_DIGEST_IN_BUFFER_SET(header->serv_specif_flags,
1403 ICP_QAT_FW_LA_NO_DIGEST_IN_BUFFER);
1407 * Snow3G and ZUC should never use this function
1408 * and set its protocol flag in both cipher and auth part of content
1409 * descriptor building function
1411 static enum qat_sym_proto_flag
1412 qat_get_crypto_proto_flag(uint16_t flags)
1414 int proto = ICP_QAT_FW_LA_PROTO_GET(flags);
1415 enum qat_sym_proto_flag qat_proto_flag =
1416 QAT_CRYPTO_PROTO_FLAG_NONE;
1419 case ICP_QAT_FW_LA_GCM_PROTO:
1420 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1422 case ICP_QAT_FW_LA_CCM_PROTO:
1423 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1427 return qat_proto_flag;
1430 int qat_sym_session_aead_create_cd_cipher(struct qat_sym_session *cdesc,
1431 const uint8_t *cipherkey,
1432 uint32_t cipherkeylen)
1434 struct icp_qat_hw_cipher_algo_blk *cipher;
1435 struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1436 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1437 struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1438 void *ptr = &req_tmpl->cd_ctrl;
1439 struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1440 struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1441 enum icp_qat_hw_cipher_convert key_convert;
1442 enum qat_sym_proto_flag qat_proto_flag =
1443 QAT_CRYPTO_PROTO_FLAG_NONE;
1444 uint32_t total_key_size;
1445 uint16_t cipher_offset, cd_size;
1446 uint32_t wordIndex = 0;
1447 uint32_t *temp_key = NULL;
1449 if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER) {
1450 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1451 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1452 ICP_QAT_FW_SLICE_CIPHER);
1453 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1454 ICP_QAT_FW_SLICE_DRAM_WR);
1455 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1456 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1457 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1458 ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1459 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1460 } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1461 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1462 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1463 ICP_QAT_FW_SLICE_CIPHER);
1464 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1465 ICP_QAT_FW_SLICE_AUTH);
1466 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1467 ICP_QAT_FW_SLICE_AUTH);
1468 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1469 ICP_QAT_FW_SLICE_DRAM_WR);
1470 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1471 } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1472 QAT_LOG(ERR, "Invalid param, must be a cipher command.");
1476 if (cdesc->qat_mode == ICP_QAT_HW_CIPHER_CTR_MODE) {
1478 * CTR Streaming ciphers are a special case. Decrypt = encrypt
1479 * Overriding default values previously set
1481 cdesc->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
1482 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1483 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2
1484 || cdesc->qat_cipher_alg ==
1485 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)
1486 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1487 else if (cdesc->qat_dir == ICP_QAT_HW_CIPHER_ENCRYPT)
1488 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1490 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1492 if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
1493 total_key_size = ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ +
1494 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1495 cipher_cd_ctrl->cipher_state_sz =
1496 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1497 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1499 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1500 total_key_size = ICP_QAT_HW_KASUMI_F8_KEY_SZ;
1501 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_KASUMI_BLK_SZ >> 3;
1502 cipher_cd_ctrl->cipher_padding_sz =
1503 (2 * ICP_QAT_HW_KASUMI_BLK_SZ) >> 3;
1504 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES) {
1505 total_key_size = ICP_QAT_HW_3DES_KEY_SZ;
1506 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_3DES_BLK_SZ >> 3;
1508 qat_get_crypto_proto_flag(header->serv_specif_flags);
1509 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_DES) {
1510 total_key_size = ICP_QAT_HW_DES_KEY_SZ;
1511 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_DES_BLK_SZ >> 3;
1513 qat_get_crypto_proto_flag(header->serv_specif_flags);
1514 } else if (cdesc->qat_cipher_alg ==
1515 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
1516 total_key_size = ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ +
1517 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1518 cipher_cd_ctrl->cipher_state_sz =
1519 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1520 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1521 cdesc->min_qat_dev_gen = QAT_GEN2;
1523 total_key_size = cipherkeylen;
1524 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_AES_BLK_SZ >> 3;
1526 qat_get_crypto_proto_flag(header->serv_specif_flags);
1528 cipher_cd_ctrl->cipher_key_sz = total_key_size >> 3;
1529 cipher_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1530 cipher_cd_ctrl->cipher_cfg_offset = cipher_offset >> 3;
1532 header->service_cmd_id = cdesc->qat_cmd;
1533 qat_sym_session_init_common_hdr(header, qat_proto_flag);
1535 cipher = (struct icp_qat_hw_cipher_algo_blk *)cdesc->cd_cur_ptr;
1536 cipher->cipher_config.val =
1537 ICP_QAT_HW_CIPHER_CONFIG_BUILD(cdesc->qat_mode,
1538 cdesc->qat_cipher_alg, key_convert,
1541 if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1542 temp_key = (uint32_t *)(cdesc->cd_cur_ptr +
1543 sizeof(struct icp_qat_hw_cipher_config)
1545 memcpy(cipher->key, cipherkey, cipherkeylen);
1546 memcpy(temp_key, cipherkey, cipherkeylen);
1548 /* XOR Key with KASUMI F8 key modifier at 4 bytes level */
1549 for (wordIndex = 0; wordIndex < (cipherkeylen >> 2);
1551 temp_key[wordIndex] ^= KASUMI_F8_KEY_MODIFIER_4_BYTES;
1553 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1554 cipherkeylen + cipherkeylen;
1556 memcpy(cipher->key, cipherkey, cipherkeylen);
1557 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1561 if (total_key_size > cipherkeylen) {
1562 uint32_t padding_size = total_key_size-cipherkeylen;
1563 if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1564 && (cipherkeylen == QAT_3DES_KEY_SZ_OPT2)) {
1565 /* K3 not provided so use K1 = K3*/
1566 memcpy(cdesc->cd_cur_ptr, cipherkey, padding_size);
1567 } else if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1568 && (cipherkeylen == QAT_3DES_KEY_SZ_OPT3)) {
1569 /* K2 and K3 not provided so use K1 = K2 = K3*/
1570 memcpy(cdesc->cd_cur_ptr, cipherkey,
1572 memcpy(cdesc->cd_cur_ptr+cipherkeylen,
1573 cipherkey, cipherkeylen);
1575 memset(cdesc->cd_cur_ptr, 0, padding_size);
1577 cdesc->cd_cur_ptr += padding_size;
1579 cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1580 cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1585 int qat_sym_session_aead_create_cd_auth(struct qat_sym_session *cdesc,
1586 const uint8_t *authkey,
1587 uint32_t authkeylen,
1588 uint32_t aad_length,
1589 uint32_t digestsize,
1590 unsigned int operation)
1592 struct icp_qat_hw_auth_setup *hash;
1593 struct icp_qat_hw_cipher_algo_blk *cipherconfig;
1594 struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1595 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1596 struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1597 void *ptr = &req_tmpl->cd_ctrl;
1598 struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1599 struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1600 struct icp_qat_fw_la_auth_req_params *auth_param =
1601 (struct icp_qat_fw_la_auth_req_params *)
1602 ((char *)&req_tmpl->serv_specif_rqpars +
1603 ICP_QAT_FW_HASH_REQUEST_PARAMETERS_OFFSET);
1604 uint16_t state1_size = 0, state2_size = 0;
1605 uint16_t hash_offset, cd_size;
1606 uint32_t *aad_len = NULL;
1607 uint32_t wordIndex = 0;
1609 enum qat_sym_proto_flag qat_proto_flag =
1610 QAT_CRYPTO_PROTO_FLAG_NONE;
1612 if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_AUTH) {
1613 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1614 ICP_QAT_FW_SLICE_AUTH);
1615 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1616 ICP_QAT_FW_SLICE_DRAM_WR);
1617 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1618 } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1619 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1620 ICP_QAT_FW_SLICE_AUTH);
1621 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1622 ICP_QAT_FW_SLICE_CIPHER);
1623 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1624 ICP_QAT_FW_SLICE_CIPHER);
1625 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1626 ICP_QAT_FW_SLICE_DRAM_WR);
1627 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1628 } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1629 QAT_LOG(ERR, "Invalid param, must be a hash command.");
1633 if (operation == RTE_CRYPTO_AUTH_OP_VERIFY) {
1634 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1635 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1636 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1637 ICP_QAT_FW_LA_CMP_AUTH_RES);
1638 cdesc->auth_op = ICP_QAT_HW_AUTH_VERIFY;
1640 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1641 ICP_QAT_FW_LA_RET_AUTH_RES);
1642 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1643 ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1644 cdesc->auth_op = ICP_QAT_HW_AUTH_GENERATE;
1648 * Setup the inner hash config
1650 hash_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1651 hash = (struct icp_qat_hw_auth_setup *)cdesc->cd_cur_ptr;
1652 hash->auth_config.reserved = 0;
1653 hash->auth_config.config =
1654 ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE1,
1655 cdesc->qat_hash_alg, digestsize);
1657 if (cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2
1658 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_KASUMI_F9
1659 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3
1660 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC
1661 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC
1662 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_NULL
1664 hash->auth_counter.counter = 0;
1666 int block_size = qat_hash_get_block_size(cdesc->qat_hash_alg);
1670 hash->auth_counter.counter = rte_bswap32(block_size);
1673 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_auth_setup);
1676 * cd_cur_ptr now points at the state1 information.
1678 switch (cdesc->qat_hash_alg) {
1679 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1680 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA1, authkey,
1681 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1683 QAT_LOG(ERR, "(SHA)precompute failed");
1686 state2_size = RTE_ALIGN_CEIL(ICP_QAT_HW_SHA1_STATE2_SZ, 8);
1688 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1689 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA224, authkey,
1690 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1692 QAT_LOG(ERR, "(SHA)precompute failed");
1695 state2_size = ICP_QAT_HW_SHA224_STATE2_SZ;
1697 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1698 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA256, authkey,
1699 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1701 QAT_LOG(ERR, "(SHA)precompute failed");
1704 state2_size = ICP_QAT_HW_SHA256_STATE2_SZ;
1706 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1707 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA384, authkey,
1708 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1710 QAT_LOG(ERR, "(SHA)precompute failed");
1713 state2_size = ICP_QAT_HW_SHA384_STATE2_SZ;
1715 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1716 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA512, authkey,
1717 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1719 QAT_LOG(ERR, "(SHA)precompute failed");
1722 state2_size = ICP_QAT_HW_SHA512_STATE2_SZ;
1724 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1725 state1_size = ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1727 if (cdesc->aes_cmac)
1728 memset(cdesc->cd_cur_ptr, 0, state1_size);
1729 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC,
1730 authkey, authkeylen, cdesc->cd_cur_ptr + state1_size,
1731 &state2_size, cdesc->aes_cmac)) {
1732 cdesc->aes_cmac ? QAT_LOG(ERR,
1733 "(CMAC)precompute failed")
1735 "(XCBC)precompute failed");
1739 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1740 case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
1741 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1742 state1_size = ICP_QAT_HW_GALOIS_128_STATE1_SZ;
1743 if (qat_sym_do_precomputes(cdesc->qat_hash_alg, authkey,
1744 authkeylen, cdesc->cd_cur_ptr + state1_size,
1745 &state2_size, cdesc->aes_cmac)) {
1746 QAT_LOG(ERR, "(GCM)precompute failed");
1750 * Write (the length of AAD) into bytes 16-19 of state2
1751 * in big-endian format. This field is 8 bytes
1753 auth_param->u2.aad_sz =
1754 RTE_ALIGN_CEIL(aad_length, 16);
1755 auth_param->hash_state_sz = (auth_param->u2.aad_sz) >> 3;
1757 aad_len = (uint32_t *)(cdesc->cd_cur_ptr +
1758 ICP_QAT_HW_GALOIS_128_STATE1_SZ +
1759 ICP_QAT_HW_GALOIS_H_SZ);
1760 *aad_len = rte_bswap32(aad_length);
1761 cdesc->aad_len = aad_length;
1763 case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
1764 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1765 state1_size = qat_hash_get_state1_size(
1766 ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2);
1767 state2_size = ICP_QAT_HW_SNOW_3G_UIA2_STATE2_SZ;
1768 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1770 cipherconfig = (struct icp_qat_hw_cipher_algo_blk *)
1771 (cdesc->cd_cur_ptr + state1_size + state2_size);
1772 cipherconfig->cipher_config.val =
1773 ICP_QAT_HW_CIPHER_CONFIG_BUILD(ICP_QAT_HW_CIPHER_ECB_MODE,
1774 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2,
1775 ICP_QAT_HW_CIPHER_KEY_CONVERT,
1776 ICP_QAT_HW_CIPHER_ENCRYPT);
1777 memcpy(cipherconfig->key, authkey, authkeylen);
1778 memset(cipherconfig->key + authkeylen,
1779 0, ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ);
1780 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1781 authkeylen + ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1782 auth_param->hash_state_sz = ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1784 case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
1785 hash->auth_config.config =
1786 ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE0,
1787 cdesc->qat_hash_alg, digestsize);
1788 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1789 state1_size = qat_hash_get_state1_size(
1790 ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3);
1791 state2_size = ICP_QAT_HW_ZUC_3G_EIA3_STATE2_SZ;
1792 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size
1793 + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ);
1795 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1796 cdesc->cd_cur_ptr += state1_size + state2_size
1797 + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1798 auth_param->hash_state_sz = ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1799 cdesc->min_qat_dev_gen = QAT_GEN2;
1802 case ICP_QAT_HW_AUTH_ALGO_MD5:
1803 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_MD5, authkey,
1804 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1806 QAT_LOG(ERR, "(MD5)precompute failed");
1809 state2_size = ICP_QAT_HW_MD5_STATE2_SZ;
1811 case ICP_QAT_HW_AUTH_ALGO_NULL:
1812 state1_size = qat_hash_get_state1_size(
1813 ICP_QAT_HW_AUTH_ALGO_NULL);
1814 state2_size = ICP_QAT_HW_NULL_STATE2_SZ;
1816 case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
1817 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1818 state1_size = qat_hash_get_state1_size(
1819 ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC);
1820 state2_size = ICP_QAT_HW_AES_CBC_MAC_KEY_SZ +
1821 ICP_QAT_HW_AES_CCM_CBC_E_CTR0_SZ;
1823 if (aad_length > 0) {
1824 aad_length += ICP_QAT_HW_CCM_AAD_B0_LEN +
1825 ICP_QAT_HW_CCM_AAD_LEN_INFO;
1826 auth_param->u2.aad_sz =
1827 RTE_ALIGN_CEIL(aad_length,
1828 ICP_QAT_HW_CCM_AAD_ALIGNMENT);
1830 auth_param->u2.aad_sz = ICP_QAT_HW_CCM_AAD_B0_LEN;
1832 cdesc->aad_len = aad_length;
1833 hash->auth_counter.counter = 0;
1835 hash_cd_ctrl->outer_prefix_sz = digestsize;
1836 auth_param->hash_state_sz = digestsize;
1838 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1840 case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
1841 state1_size = qat_hash_get_state1_size(
1842 ICP_QAT_HW_AUTH_ALGO_KASUMI_F9);
1843 state2_size = ICP_QAT_HW_KASUMI_F9_STATE2_SZ;
1844 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1845 pTempKey = (uint32_t *)(cdesc->cd_cur_ptr + state1_size
1848 * The Inner Hash Initial State2 block must contain IK
1849 * (Initialisation Key), followed by IK XOR-ed with KM
1850 * (Key Modifier): IK||(IK^KM).
1852 /* write the auth key */
1853 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1854 /* initialise temp key with auth key */
1855 memcpy(pTempKey, authkey, authkeylen);
1856 /* XOR Key with KASUMI F9 key modifier at 4 bytes level */
1857 for (wordIndex = 0; wordIndex < (authkeylen >> 2); wordIndex++)
1858 pTempKey[wordIndex] ^= KASUMI_F9_KEY_MODIFIER_4_BYTES;
1861 QAT_LOG(ERR, "Invalid HASH alg %u", cdesc->qat_hash_alg);
1865 /* Request template setup */
1866 qat_sym_session_init_common_hdr(header, qat_proto_flag);
1867 header->service_cmd_id = cdesc->qat_cmd;
1869 /* Auth CD config setup */
1870 hash_cd_ctrl->hash_cfg_offset = hash_offset >> 3;
1871 hash_cd_ctrl->hash_flags = ICP_QAT_FW_AUTH_HDR_FLAG_NO_NESTED;
1872 hash_cd_ctrl->inner_res_sz = digestsize;
1873 hash_cd_ctrl->final_sz = digestsize;
1874 hash_cd_ctrl->inner_state1_sz = state1_size;
1875 auth_param->auth_res_sz = digestsize;
1877 hash_cd_ctrl->inner_state2_sz = state2_size;
1878 hash_cd_ctrl->inner_state2_offset = hash_cd_ctrl->hash_cfg_offset +
1879 ((sizeof(struct icp_qat_hw_auth_setup) +
1880 RTE_ALIGN_CEIL(hash_cd_ctrl->inner_state1_sz, 8))
1883 cdesc->cd_cur_ptr += state1_size + state2_size;
1884 cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1886 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1887 cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1892 int qat_sym_validate_aes_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1895 case ICP_QAT_HW_AES_128_KEY_SZ:
1896 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1898 case ICP_QAT_HW_AES_192_KEY_SZ:
1899 *alg = ICP_QAT_HW_CIPHER_ALGO_AES192;
1901 case ICP_QAT_HW_AES_256_KEY_SZ:
1902 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
1910 int qat_sym_validate_aes_docsisbpi_key(int key_len,
1911 enum icp_qat_hw_cipher_algo *alg)
1914 case ICP_QAT_HW_AES_128_KEY_SZ:
1915 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1917 case ICP_QAT_HW_AES_256_KEY_SZ:
1918 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
1926 int qat_sym_validate_snow3g_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1929 case ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ:
1930 *alg = ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2;
1938 int qat_sym_validate_kasumi_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1941 case ICP_QAT_HW_KASUMI_KEY_SZ:
1942 *alg = ICP_QAT_HW_CIPHER_ALGO_KASUMI;
1950 int qat_sym_validate_des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1953 case ICP_QAT_HW_DES_KEY_SZ:
1954 *alg = ICP_QAT_HW_CIPHER_ALGO_DES;
1962 int qat_sym_validate_3des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1965 case QAT_3DES_KEY_SZ_OPT1:
1966 case QAT_3DES_KEY_SZ_OPT2:
1967 case QAT_3DES_KEY_SZ_OPT3:
1968 *alg = ICP_QAT_HW_CIPHER_ALGO_3DES;
1976 int qat_sym_validate_zuc_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1979 case ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ:
1980 *alg = ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3;