1 /* SPDX-License-Identifier: (BSD-3-Clause OR GPL-2.0)
2 * Copyright(c) 2015-2019 Intel Corporation
5 #include <openssl/sha.h> /* Needed to calculate pre-compute values */
6 #include <openssl/aes.h> /* Needed to calculate pre-compute values */
7 #include <openssl/md5.h> /* Needed to calculate pre-compute values */
8 #include <openssl/evp.h> /* Needed for bpi runt block processing */
10 #include <rte_memcpy.h>
11 #include <rte_common.h>
12 #include <rte_spinlock.h>
13 #include <rte_byteorder.h>
15 #include <rte_malloc.h>
16 #include <rte_crypto_sym.h>
19 #include "qat_sym_session.h"
20 #include "qat_sym_pmd.h"
22 /** Frees a context previously created
23 * Depends on openssl libcrypto
26 bpi_cipher_ctx_free(void *bpi_ctx)
29 EVP_CIPHER_CTX_free((EVP_CIPHER_CTX *)bpi_ctx);
32 /** Creates a context in either AES or DES in ECB mode
33 * Depends on openssl libcrypto
36 bpi_cipher_ctx_init(enum rte_crypto_cipher_algorithm cryptodev_algo,
37 enum rte_crypto_cipher_operation direction __rte_unused,
38 const uint8_t *key, uint16_t key_length, void **ctx)
40 const EVP_CIPHER *algo = NULL;
42 *ctx = EVP_CIPHER_CTX_new();
49 if (cryptodev_algo == RTE_CRYPTO_CIPHER_DES_DOCSISBPI)
52 if (key_length == ICP_QAT_HW_AES_128_KEY_SZ)
53 algo = EVP_aes_128_ecb();
55 algo = EVP_aes_256_ecb();
57 /* IV will be ECB encrypted whether direction is encrypt or decrypt*/
58 if (EVP_EncryptInit_ex(*ctx, algo, NULL, key, 0) != 1) {
67 EVP_CIPHER_CTX_free(*ctx);
72 qat_is_cipher_alg_supported(enum rte_crypto_cipher_algorithm algo,
73 struct qat_sym_dev_private *internals)
76 const struct rte_cryptodev_capabilities *capability;
78 while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
79 RTE_CRYPTO_OP_TYPE_UNDEFINED) {
80 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
83 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_CIPHER)
86 if (capability->sym.cipher.algo == algo)
93 qat_is_auth_alg_supported(enum rte_crypto_auth_algorithm algo,
94 struct qat_sym_dev_private *internals)
97 const struct rte_cryptodev_capabilities *capability;
99 while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
100 RTE_CRYPTO_OP_TYPE_UNDEFINED) {
101 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
104 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_AUTH)
107 if (capability->sym.auth.algo == algo)
114 qat_sym_session_clear(struct rte_cryptodev *dev,
115 struct rte_cryptodev_sym_session *sess)
117 uint8_t index = dev->driver_id;
118 void *sess_priv = get_sym_session_private_data(sess, index);
119 struct qat_sym_session *s = (struct qat_sym_session *)sess_priv;
123 bpi_cipher_ctx_free(s->bpi_ctx);
124 memset(s, 0, qat_sym_session_get_private_size(dev));
125 struct rte_mempool *sess_mp = rte_mempool_from_obj(sess_priv);
127 set_sym_session_private_data(sess, index, NULL);
128 rte_mempool_put(sess_mp, sess_priv);
133 qat_get_cmd_id(const struct rte_crypto_sym_xform *xform)
136 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER && xform->next == NULL)
137 return ICP_QAT_FW_LA_CMD_CIPHER;
139 /* Authentication Only */
140 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH && xform->next == NULL)
141 return ICP_QAT_FW_LA_CMD_AUTH;
144 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
145 /* AES-GCM and AES-CCM works with different direction
146 * GCM first encrypts and generate hash where AES-CCM
147 * first generate hash and encrypts. Similar relation
148 * applies to decryption.
150 if (xform->aead.op == RTE_CRYPTO_AEAD_OP_ENCRYPT)
151 if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
152 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
154 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
156 if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
157 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
159 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
162 if (xform->next == NULL)
165 /* Cipher then Authenticate */
166 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
167 xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
168 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
170 /* Authenticate then Cipher */
171 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH &&
172 xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
173 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
178 static struct rte_crypto_auth_xform *
179 qat_get_auth_xform(struct rte_crypto_sym_xform *xform)
182 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH)
191 static struct rte_crypto_cipher_xform *
192 qat_get_cipher_xform(struct rte_crypto_sym_xform *xform)
195 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
196 return &xform->cipher;
205 qat_sym_session_configure_cipher(struct rte_cryptodev *dev,
206 struct rte_crypto_sym_xform *xform,
207 struct qat_sym_session *session)
209 struct qat_sym_dev_private *internals = dev->data->dev_private;
210 struct rte_crypto_cipher_xform *cipher_xform = NULL;
213 /* Get cipher xform from crypto xform chain */
214 cipher_xform = qat_get_cipher_xform(xform);
216 session->cipher_iv.offset = cipher_xform->iv.offset;
217 session->cipher_iv.length = cipher_xform->iv.length;
219 switch (cipher_xform->algo) {
220 case RTE_CRYPTO_CIPHER_AES_CBC:
221 if (qat_sym_validate_aes_key(cipher_xform->key.length,
222 &session->qat_cipher_alg) != 0) {
223 QAT_LOG(ERR, "Invalid AES cipher key size");
227 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
229 case RTE_CRYPTO_CIPHER_AES_CTR:
230 if (qat_sym_validate_aes_key(cipher_xform->key.length,
231 &session->qat_cipher_alg) != 0) {
232 QAT_LOG(ERR, "Invalid AES cipher key size");
236 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
238 case RTE_CRYPTO_CIPHER_SNOW3G_UEA2:
239 if (qat_sym_validate_snow3g_key(cipher_xform->key.length,
240 &session->qat_cipher_alg) != 0) {
241 QAT_LOG(ERR, "Invalid SNOW 3G cipher key size");
245 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
247 case RTE_CRYPTO_CIPHER_NULL:
248 session->qat_cipher_alg = ICP_QAT_HW_CIPHER_ALGO_NULL;
249 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
251 case RTE_CRYPTO_CIPHER_KASUMI_F8:
252 if (qat_sym_validate_kasumi_key(cipher_xform->key.length,
253 &session->qat_cipher_alg) != 0) {
254 QAT_LOG(ERR, "Invalid KASUMI cipher key size");
258 session->qat_mode = ICP_QAT_HW_CIPHER_F8_MODE;
260 case RTE_CRYPTO_CIPHER_3DES_CBC:
261 if (qat_sym_validate_3des_key(cipher_xform->key.length,
262 &session->qat_cipher_alg) != 0) {
263 QAT_LOG(ERR, "Invalid 3DES cipher key size");
267 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
269 case RTE_CRYPTO_CIPHER_DES_CBC:
270 if (qat_sym_validate_des_key(cipher_xform->key.length,
271 &session->qat_cipher_alg) != 0) {
272 QAT_LOG(ERR, "Invalid DES cipher key size");
276 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
278 case RTE_CRYPTO_CIPHER_3DES_CTR:
279 if (qat_sym_validate_3des_key(cipher_xform->key.length,
280 &session->qat_cipher_alg) != 0) {
281 QAT_LOG(ERR, "Invalid 3DES cipher key size");
285 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
287 case RTE_CRYPTO_CIPHER_DES_DOCSISBPI:
288 ret = bpi_cipher_ctx_init(
291 cipher_xform->key.data,
292 cipher_xform->key.length,
295 QAT_LOG(ERR, "failed to create DES BPI ctx");
298 if (qat_sym_validate_des_key(cipher_xform->key.length,
299 &session->qat_cipher_alg) != 0) {
300 QAT_LOG(ERR, "Invalid DES cipher key size");
304 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
306 case RTE_CRYPTO_CIPHER_AES_DOCSISBPI:
307 ret = bpi_cipher_ctx_init(
310 cipher_xform->key.data,
311 cipher_xform->key.length,
314 QAT_LOG(ERR, "failed to create AES BPI ctx");
317 if (qat_sym_validate_aes_docsisbpi_key(cipher_xform->key.length,
318 &session->qat_cipher_alg) != 0) {
319 QAT_LOG(ERR, "Invalid AES DOCSISBPI key size");
323 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
325 case RTE_CRYPTO_CIPHER_ZUC_EEA3:
326 if (!qat_is_cipher_alg_supported(
327 cipher_xform->algo, internals)) {
328 QAT_LOG(ERR, "%s not supported on this device",
329 rte_crypto_cipher_algorithm_strings
330 [cipher_xform->algo]);
334 if (qat_sym_validate_zuc_key(cipher_xform->key.length,
335 &session->qat_cipher_alg) != 0) {
336 QAT_LOG(ERR, "Invalid ZUC cipher key size");
340 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
342 case RTE_CRYPTO_CIPHER_AES_XTS:
343 if ((cipher_xform->key.length/2) == ICP_QAT_HW_AES_192_KEY_SZ) {
344 QAT_LOG(ERR, "AES-XTS-192 not supported");
348 if (qat_sym_validate_aes_key((cipher_xform->key.length/2),
349 &session->qat_cipher_alg) != 0) {
350 QAT_LOG(ERR, "Invalid AES-XTS cipher key size");
354 session->qat_mode = ICP_QAT_HW_CIPHER_XTS_MODE;
356 case RTE_CRYPTO_CIPHER_3DES_ECB:
357 case RTE_CRYPTO_CIPHER_AES_ECB:
358 case RTE_CRYPTO_CIPHER_AES_F8:
359 case RTE_CRYPTO_CIPHER_ARC4:
360 QAT_LOG(ERR, "Crypto QAT PMD: Unsupported Cipher alg %u",
365 QAT_LOG(ERR, "Crypto: Undefined Cipher specified %u\n",
371 if (cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
372 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
374 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
376 if (qat_sym_session_aead_create_cd_cipher(session,
377 cipher_xform->key.data,
378 cipher_xform->key.length)) {
386 if (session->bpi_ctx) {
387 bpi_cipher_ctx_free(session->bpi_ctx);
388 session->bpi_ctx = NULL;
394 qat_sym_session_configure(struct rte_cryptodev *dev,
395 struct rte_crypto_sym_xform *xform,
396 struct rte_cryptodev_sym_session *sess,
397 struct rte_mempool *mempool)
399 void *sess_private_data;
402 if (rte_mempool_get(mempool, &sess_private_data)) {
404 "Couldn't get object from session mempool");
408 ret = qat_sym_session_set_parameters(dev, xform, sess_private_data);
411 "Crypto QAT PMD: failed to configure session parameters");
413 /* Return session to mempool */
414 rte_mempool_put(mempool, sess_private_data);
418 set_sym_session_private_data(sess, dev->driver_id,
425 qat_sym_session_set_ext_hash_flags(struct qat_sym_session *session,
428 struct icp_qat_fw_comn_req_hdr *header = &session->fw_req.comn_hdr;
429 struct icp_qat_fw_cipher_auth_cd_ctrl_hdr *cd_ctrl =
430 (struct icp_qat_fw_cipher_auth_cd_ctrl_hdr *)
431 session->fw_req.cd_ctrl.content_desc_ctrl_lw;
433 /* Set the Use Extended Protocol Flags bit in LW 1 */
434 QAT_FIELD_SET(header->comn_req_flags,
435 QAT_COMN_EXT_FLAGS_USED,
436 QAT_COMN_EXT_FLAGS_BITPOS,
437 QAT_COMN_EXT_FLAGS_MASK);
439 /* Set Hash Flags in LW 28 */
440 cd_ctrl->hash_flags |= hash_flag;
442 /* Set proto flags in LW 1 */
443 switch (session->qat_cipher_alg) {
444 case ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2:
445 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
446 ICP_QAT_FW_LA_SNOW_3G_PROTO);
447 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
448 header->serv_specif_flags, 0);
450 case ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3:
451 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
452 ICP_QAT_FW_LA_NO_PROTO);
453 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
454 header->serv_specif_flags,
455 ICP_QAT_FW_LA_ZUC_3G_PROTO);
458 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
459 ICP_QAT_FW_LA_NO_PROTO);
460 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
461 header->serv_specif_flags, 0);
467 qat_sym_session_handle_mixed(struct qat_sym_session *session)
469 if (session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3 &&
470 session->qat_cipher_alg !=
471 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
472 session->min_qat_dev_gen = QAT_GEN3;
473 qat_sym_session_set_ext_hash_flags(session,
474 1 << ICP_QAT_FW_AUTH_HDR_FLAG_ZUC_EIA3_BITPOS);
475 } else if (session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2 &&
476 session->qat_cipher_alg !=
477 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
478 session->min_qat_dev_gen = QAT_GEN3;
479 qat_sym_session_set_ext_hash_flags(session,
480 1 << ICP_QAT_FW_AUTH_HDR_FLAG_SNOW3G_UIA2_BITPOS);
481 } else if ((session->aes_cmac ||
482 session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_NULL) &&
483 (session->qat_cipher_alg ==
484 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2 ||
485 session->qat_cipher_alg ==
486 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)) {
487 session->min_qat_dev_gen = QAT_GEN3;
488 qat_sym_session_set_ext_hash_flags(session, 0);
493 qat_sym_session_set_parameters(struct rte_cryptodev *dev,
494 struct rte_crypto_sym_xform *xform, void *session_private)
496 struct qat_sym_session *session = session_private;
500 /* Set context descriptor physical address */
501 session->cd_paddr = rte_mempool_virt2iova(session) +
502 offsetof(struct qat_sym_session, cd);
504 session->min_qat_dev_gen = QAT_GEN1;
506 /* Get requested QAT command id */
507 qat_cmd_id = qat_get_cmd_id(xform);
508 if (qat_cmd_id < 0 || qat_cmd_id >= ICP_QAT_FW_LA_CMD_DELIMITER) {
509 QAT_LOG(ERR, "Unsupported xform chain requested");
512 session->qat_cmd = (enum icp_qat_fw_la_cmd_id)qat_cmd_id;
513 switch (session->qat_cmd) {
514 case ICP_QAT_FW_LA_CMD_CIPHER:
515 ret = qat_sym_session_configure_cipher(dev, xform, session);
519 case ICP_QAT_FW_LA_CMD_AUTH:
520 ret = qat_sym_session_configure_auth(dev, xform, session);
524 case ICP_QAT_FW_LA_CMD_CIPHER_HASH:
525 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
526 ret = qat_sym_session_configure_aead(dev, xform,
531 ret = qat_sym_session_configure_cipher(dev,
535 ret = qat_sym_session_configure_auth(dev,
539 /* Special handling of mixed hash+cipher algorithms */
540 qat_sym_session_handle_mixed(session);
543 case ICP_QAT_FW_LA_CMD_HASH_CIPHER:
544 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
545 ret = qat_sym_session_configure_aead(dev, xform,
550 ret = qat_sym_session_configure_auth(dev,
554 ret = qat_sym_session_configure_cipher(dev,
558 /* Special handling of mixed hash+cipher algorithms */
559 qat_sym_session_handle_mixed(session);
562 case ICP_QAT_FW_LA_CMD_TRNG_GET_RANDOM:
563 case ICP_QAT_FW_LA_CMD_TRNG_TEST:
564 case ICP_QAT_FW_LA_CMD_SSL3_KEY_DERIVE:
565 case ICP_QAT_FW_LA_CMD_TLS_V1_1_KEY_DERIVE:
566 case ICP_QAT_FW_LA_CMD_TLS_V1_2_KEY_DERIVE:
567 case ICP_QAT_FW_LA_CMD_MGF1:
568 case ICP_QAT_FW_LA_CMD_AUTH_PRE_COMP:
569 case ICP_QAT_FW_LA_CMD_CIPHER_PRE_COMP:
570 case ICP_QAT_FW_LA_CMD_DELIMITER:
571 QAT_LOG(ERR, "Unsupported Service %u",
575 QAT_LOG(ERR, "Unsupported Service %u",
584 qat_sym_session_handle_single_pass(struct qat_sym_dev_private *internals,
585 struct qat_sym_session *session,
586 struct rte_crypto_aead_xform *aead_xform)
588 enum qat_device_gen qat_dev_gen = internals->qat_dev->qat_dev_gen;
590 if (qat_dev_gen == QAT_GEN3 &&
591 aead_xform->iv.length == QAT_AES_GCM_SPC_IV_SIZE) {
592 /* Use faster Single-Pass GCM */
593 struct icp_qat_fw_la_cipher_req_params *cipher_param =
594 (void *) &session->fw_req.serv_specif_rqpars;
596 session->is_single_pass = 1;
597 session->min_qat_dev_gen = QAT_GEN3;
598 session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER;
599 session->qat_mode = ICP_QAT_HW_CIPHER_AEAD_MODE;
600 session->cipher_iv.offset = aead_xform->iv.offset;
601 session->cipher_iv.length = aead_xform->iv.length;
602 if (qat_sym_session_aead_create_cd_cipher(session,
603 aead_xform->key.data, aead_xform->key.length))
605 session->aad_len = aead_xform->aad_length;
606 session->digest_length = aead_xform->digest_length;
607 if (aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) {
608 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
609 session->auth_op = ICP_QAT_HW_AUTH_GENERATE;
610 ICP_QAT_FW_LA_RET_AUTH_SET(
611 session->fw_req.comn_hdr.serv_specif_flags,
612 ICP_QAT_FW_LA_RET_AUTH_RES);
614 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
615 session->auth_op = ICP_QAT_HW_AUTH_VERIFY;
616 ICP_QAT_FW_LA_CMP_AUTH_SET(
617 session->fw_req.comn_hdr.serv_specif_flags,
618 ICP_QAT_FW_LA_CMP_AUTH_RES);
620 ICP_QAT_FW_LA_SINGLE_PASS_PROTO_FLAG_SET(
621 session->fw_req.comn_hdr.serv_specif_flags,
622 ICP_QAT_FW_LA_SINGLE_PASS_PROTO);
623 ICP_QAT_FW_LA_PROTO_SET(
624 session->fw_req.comn_hdr.serv_specif_flags,
625 ICP_QAT_FW_LA_NO_PROTO);
626 ICP_QAT_FW_LA_GCM_IV_LEN_FLAG_SET(
627 session->fw_req.comn_hdr.serv_specif_flags,
628 ICP_QAT_FW_LA_GCM_IV_LEN_12_OCTETS);
629 session->fw_req.comn_hdr.service_cmd_id =
630 ICP_QAT_FW_LA_CMD_CIPHER;
631 session->cd.cipher.cipher_config.val =
632 ICP_QAT_HW_CIPHER_CONFIG_BUILD(
633 ICP_QAT_HW_CIPHER_AEAD_MODE,
634 session->qat_cipher_alg,
635 ICP_QAT_HW_CIPHER_NO_CONVERT,
637 QAT_FIELD_SET(session->cd.cipher.cipher_config.val,
638 aead_xform->digest_length,
639 QAT_CIPHER_AEAD_HASH_CMP_LEN_BITPOS,
640 QAT_CIPHER_AEAD_HASH_CMP_LEN_MASK);
641 session->cd.cipher.cipher_config.reserved =
642 ICP_QAT_HW_CIPHER_CONFIG_BUILD_UPPER(
643 aead_xform->aad_length);
644 cipher_param->spc_aad_sz = aead_xform->aad_length;
645 cipher_param->spc_auth_res_sz = aead_xform->digest_length;
651 qat_sym_session_configure_auth(struct rte_cryptodev *dev,
652 struct rte_crypto_sym_xform *xform,
653 struct qat_sym_session *session)
655 struct rte_crypto_auth_xform *auth_xform = qat_get_auth_xform(xform);
656 struct qat_sym_dev_private *internals = dev->data->dev_private;
657 const uint8_t *key_data = auth_xform->key.data;
658 uint8_t key_length = auth_xform->key.length;
659 session->aes_cmac = 0;
661 session->auth_iv.offset = auth_xform->iv.offset;
662 session->auth_iv.length = auth_xform->iv.length;
664 switch (auth_xform->algo) {
665 case RTE_CRYPTO_AUTH_SHA1_HMAC:
666 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA1;
668 case RTE_CRYPTO_AUTH_SHA224_HMAC:
669 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA224;
671 case RTE_CRYPTO_AUTH_SHA256_HMAC:
672 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA256;
674 case RTE_CRYPTO_AUTH_SHA384_HMAC:
675 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA384;
677 case RTE_CRYPTO_AUTH_SHA512_HMAC:
678 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA512;
680 case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
681 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
683 case RTE_CRYPTO_AUTH_AES_CMAC:
684 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
685 session->aes_cmac = 1;
687 case RTE_CRYPTO_AUTH_AES_GMAC:
688 if (qat_sym_validate_aes_key(auth_xform->key.length,
689 &session->qat_cipher_alg) != 0) {
690 QAT_LOG(ERR, "Invalid AES key size");
693 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
694 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
695 if (session->auth_iv.length == 0)
696 session->auth_iv.length = AES_GCM_J0_LEN;
699 case RTE_CRYPTO_AUTH_SNOW3G_UIA2:
700 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2;
702 case RTE_CRYPTO_AUTH_MD5_HMAC:
703 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_MD5;
705 case RTE_CRYPTO_AUTH_NULL:
706 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_NULL;
708 case RTE_CRYPTO_AUTH_KASUMI_F9:
709 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_KASUMI_F9;
711 case RTE_CRYPTO_AUTH_ZUC_EIA3:
712 if (!qat_is_auth_alg_supported(auth_xform->algo, internals)) {
713 QAT_LOG(ERR, "%s not supported on this device",
714 rte_crypto_auth_algorithm_strings
718 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3;
720 case RTE_CRYPTO_AUTH_SHA1:
721 case RTE_CRYPTO_AUTH_SHA256:
722 case RTE_CRYPTO_AUTH_SHA512:
723 case RTE_CRYPTO_AUTH_SHA224:
724 case RTE_CRYPTO_AUTH_SHA384:
725 case RTE_CRYPTO_AUTH_MD5:
726 case RTE_CRYPTO_AUTH_AES_CBC_MAC:
727 QAT_LOG(ERR, "Crypto: Unsupported hash alg %u",
731 QAT_LOG(ERR, "Crypto: Undefined Hash algo %u specified",
736 if (auth_xform->algo == RTE_CRYPTO_AUTH_AES_GMAC) {
737 if (auth_xform->op == RTE_CRYPTO_AUTH_OP_GENERATE) {
738 session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER_HASH;
739 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
741 * It needs to create cipher desc content first,
742 * then authentication
745 if (qat_sym_session_aead_create_cd_cipher(session,
746 auth_xform->key.data,
747 auth_xform->key.length))
750 if (qat_sym_session_aead_create_cd_auth(session,
754 auth_xform->digest_length,
758 session->qat_cmd = ICP_QAT_FW_LA_CMD_HASH_CIPHER;
759 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
761 * It needs to create authentication desc content first,
765 if (qat_sym_session_aead_create_cd_auth(session,
769 auth_xform->digest_length,
773 if (qat_sym_session_aead_create_cd_cipher(session,
774 auth_xform->key.data,
775 auth_xform->key.length))
778 /* Restore to authentication only only */
779 session->qat_cmd = ICP_QAT_FW_LA_CMD_AUTH;
781 if (qat_sym_session_aead_create_cd_auth(session,
785 auth_xform->digest_length,
790 session->digest_length = auth_xform->digest_length;
795 qat_sym_session_configure_aead(struct rte_cryptodev *dev,
796 struct rte_crypto_sym_xform *xform,
797 struct qat_sym_session *session)
799 struct rte_crypto_aead_xform *aead_xform = &xform->aead;
800 enum rte_crypto_auth_operation crypto_operation;
803 * Store AEAD IV parameters as cipher IV,
804 * to avoid unnecessary memory usage
806 session->cipher_iv.offset = xform->aead.iv.offset;
807 session->cipher_iv.length = xform->aead.iv.length;
809 switch (aead_xform->algo) {
810 case RTE_CRYPTO_AEAD_AES_GCM:
811 if (qat_sym_validate_aes_key(aead_xform->key.length,
812 &session->qat_cipher_alg) != 0) {
813 QAT_LOG(ERR, "Invalid AES key size");
816 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
817 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
818 if (session->cipher_iv.length == 0)
819 session->cipher_iv.length = AES_GCM_J0_LEN;
822 case RTE_CRYPTO_AEAD_AES_CCM:
823 if (qat_sym_validate_aes_key(aead_xform->key.length,
824 &session->qat_cipher_alg) != 0) {
825 QAT_LOG(ERR, "Invalid AES key size");
828 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
829 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC;
832 QAT_LOG(ERR, "Crypto: Undefined AEAD specified %u\n",
837 session->is_single_pass = 0;
838 if (aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) {
839 /* Use faster Single-Pass GCM if possible */
840 int res = qat_sym_session_handle_single_pass(
841 dev->data->dev_private, session, aead_xform);
844 if (session->is_single_pass)
848 if ((aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT &&
849 aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) ||
850 (aead_xform->op == RTE_CRYPTO_AEAD_OP_DECRYPT &&
851 aead_xform->algo == RTE_CRYPTO_AEAD_AES_CCM)) {
852 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
854 * It needs to create cipher desc content first,
855 * then authentication
857 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
858 RTE_CRYPTO_AUTH_OP_GENERATE : RTE_CRYPTO_AUTH_OP_VERIFY;
860 if (qat_sym_session_aead_create_cd_cipher(session,
861 aead_xform->key.data,
862 aead_xform->key.length))
865 if (qat_sym_session_aead_create_cd_auth(session,
866 aead_xform->key.data,
867 aead_xform->key.length,
868 aead_xform->aad_length,
869 aead_xform->digest_length,
873 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
875 * It needs to create authentication desc content first,
879 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
880 RTE_CRYPTO_AUTH_OP_VERIFY : RTE_CRYPTO_AUTH_OP_GENERATE;
882 if (qat_sym_session_aead_create_cd_auth(session,
883 aead_xform->key.data,
884 aead_xform->key.length,
885 aead_xform->aad_length,
886 aead_xform->digest_length,
890 if (qat_sym_session_aead_create_cd_cipher(session,
891 aead_xform->key.data,
892 aead_xform->key.length))
896 session->digest_length = aead_xform->digest_length;
900 unsigned int qat_sym_session_get_private_size(
901 struct rte_cryptodev *dev __rte_unused)
903 return RTE_ALIGN_CEIL(sizeof(struct qat_sym_session), 8);
906 /* returns block size in bytes per cipher algo */
907 int qat_cipher_get_block_size(enum icp_qat_hw_cipher_algo qat_cipher_alg)
909 switch (qat_cipher_alg) {
910 case ICP_QAT_HW_CIPHER_ALGO_DES:
911 return ICP_QAT_HW_DES_BLK_SZ;
912 case ICP_QAT_HW_CIPHER_ALGO_3DES:
913 return ICP_QAT_HW_3DES_BLK_SZ;
914 case ICP_QAT_HW_CIPHER_ALGO_AES128:
915 case ICP_QAT_HW_CIPHER_ALGO_AES192:
916 case ICP_QAT_HW_CIPHER_ALGO_AES256:
917 return ICP_QAT_HW_AES_BLK_SZ;
919 QAT_LOG(ERR, "invalid block cipher alg %u", qat_cipher_alg);
926 * Returns size in bytes per hash algo for state1 size field in cd_ctrl
927 * This is digest size rounded up to nearest quadword
929 static int qat_hash_get_state1_size(enum icp_qat_hw_auth_algo qat_hash_alg)
931 switch (qat_hash_alg) {
932 case ICP_QAT_HW_AUTH_ALGO_SHA1:
933 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA1_STATE1_SZ,
934 QAT_HW_DEFAULT_ALIGNMENT);
935 case ICP_QAT_HW_AUTH_ALGO_SHA224:
936 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA224_STATE1_SZ,
937 QAT_HW_DEFAULT_ALIGNMENT);
938 case ICP_QAT_HW_AUTH_ALGO_SHA256:
939 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA256_STATE1_SZ,
940 QAT_HW_DEFAULT_ALIGNMENT);
941 case ICP_QAT_HW_AUTH_ALGO_SHA384:
942 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA384_STATE1_SZ,
943 QAT_HW_DEFAULT_ALIGNMENT);
944 case ICP_QAT_HW_AUTH_ALGO_SHA512:
945 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
946 QAT_HW_DEFAULT_ALIGNMENT);
947 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
948 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ,
949 QAT_HW_DEFAULT_ALIGNMENT);
950 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
951 case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
952 return QAT_HW_ROUND_UP(ICP_QAT_HW_GALOIS_128_STATE1_SZ,
953 QAT_HW_DEFAULT_ALIGNMENT);
954 case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
955 return QAT_HW_ROUND_UP(ICP_QAT_HW_ZUC_3G_EIA3_STATE1_SZ,
956 QAT_HW_DEFAULT_ALIGNMENT);
957 case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
958 return QAT_HW_ROUND_UP(ICP_QAT_HW_SNOW_3G_UIA2_STATE1_SZ,
959 QAT_HW_DEFAULT_ALIGNMENT);
960 case ICP_QAT_HW_AUTH_ALGO_MD5:
961 return QAT_HW_ROUND_UP(ICP_QAT_HW_MD5_STATE1_SZ,
962 QAT_HW_DEFAULT_ALIGNMENT);
963 case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
964 return QAT_HW_ROUND_UP(ICP_QAT_HW_KASUMI_F9_STATE1_SZ,
965 QAT_HW_DEFAULT_ALIGNMENT);
966 case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
967 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_CBC_MAC_STATE1_SZ,
968 QAT_HW_DEFAULT_ALIGNMENT);
969 case ICP_QAT_HW_AUTH_ALGO_NULL:
970 return QAT_HW_ROUND_UP(ICP_QAT_HW_NULL_STATE1_SZ,
971 QAT_HW_DEFAULT_ALIGNMENT);
972 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
973 /* return maximum state1 size in this case */
974 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
975 QAT_HW_DEFAULT_ALIGNMENT);
977 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
983 /* returns digest size in bytes per hash algo */
984 static int qat_hash_get_digest_size(enum icp_qat_hw_auth_algo qat_hash_alg)
986 switch (qat_hash_alg) {
987 case ICP_QAT_HW_AUTH_ALGO_SHA1:
988 return ICP_QAT_HW_SHA1_STATE1_SZ;
989 case ICP_QAT_HW_AUTH_ALGO_SHA224:
990 return ICP_QAT_HW_SHA224_STATE1_SZ;
991 case ICP_QAT_HW_AUTH_ALGO_SHA256:
992 return ICP_QAT_HW_SHA256_STATE1_SZ;
993 case ICP_QAT_HW_AUTH_ALGO_SHA384:
994 return ICP_QAT_HW_SHA384_STATE1_SZ;
995 case ICP_QAT_HW_AUTH_ALGO_SHA512:
996 return ICP_QAT_HW_SHA512_STATE1_SZ;
997 case ICP_QAT_HW_AUTH_ALGO_MD5:
998 return ICP_QAT_HW_MD5_STATE1_SZ;
999 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1000 return ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1001 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
1002 /* return maximum digest size in this case */
1003 return ICP_QAT_HW_SHA512_STATE1_SZ;
1005 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
1011 /* returns block size in byes per hash algo */
1012 static int qat_hash_get_block_size(enum icp_qat_hw_auth_algo qat_hash_alg)
1014 switch (qat_hash_alg) {
1015 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1017 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1018 return SHA256_CBLOCK;
1019 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1020 return SHA256_CBLOCK;
1021 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1022 return SHA512_CBLOCK;
1023 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1024 return SHA512_CBLOCK;
1025 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1027 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1028 return ICP_QAT_HW_AES_BLK_SZ;
1029 case ICP_QAT_HW_AUTH_ALGO_MD5:
1031 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
1032 /* return maximum block size in this case */
1033 return SHA512_CBLOCK;
1035 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
1041 static int partial_hash_sha1(uint8_t *data_in, uint8_t *data_out)
1045 if (!SHA1_Init(&ctx))
1047 SHA1_Transform(&ctx, data_in);
1048 rte_memcpy(data_out, &ctx, SHA_DIGEST_LENGTH);
1052 static int partial_hash_sha224(uint8_t *data_in, uint8_t *data_out)
1056 if (!SHA224_Init(&ctx))
1058 SHA256_Transform(&ctx, data_in);
1059 rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
1063 static int partial_hash_sha256(uint8_t *data_in, uint8_t *data_out)
1067 if (!SHA256_Init(&ctx))
1069 SHA256_Transform(&ctx, data_in);
1070 rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
1074 static int partial_hash_sha384(uint8_t *data_in, uint8_t *data_out)
1078 if (!SHA384_Init(&ctx))
1080 SHA512_Transform(&ctx, data_in);
1081 rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
1085 static int partial_hash_sha512(uint8_t *data_in, uint8_t *data_out)
1089 if (!SHA512_Init(&ctx))
1091 SHA512_Transform(&ctx, data_in);
1092 rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
1096 static int partial_hash_md5(uint8_t *data_in, uint8_t *data_out)
1100 if (!MD5_Init(&ctx))
1102 MD5_Transform(&ctx, data_in);
1103 rte_memcpy(data_out, &ctx, MD5_DIGEST_LENGTH);
1108 static int partial_hash_compute(enum icp_qat_hw_auth_algo hash_alg,
1113 uint8_t digest[qat_hash_get_digest_size(
1114 ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1115 uint32_t *hash_state_out_be32;
1116 uint64_t *hash_state_out_be64;
1119 digest_size = qat_hash_get_digest_size(hash_alg);
1120 if (digest_size <= 0)
1123 hash_state_out_be32 = (uint32_t *)data_out;
1124 hash_state_out_be64 = (uint64_t *)data_out;
1127 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1128 if (partial_hash_sha1(data_in, digest))
1130 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1131 *hash_state_out_be32 =
1132 rte_bswap32(*(((uint32_t *)digest)+i));
1134 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1135 if (partial_hash_sha224(data_in, digest))
1137 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1138 *hash_state_out_be32 =
1139 rte_bswap32(*(((uint32_t *)digest)+i));
1141 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1142 if (partial_hash_sha256(data_in, digest))
1144 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1145 *hash_state_out_be32 =
1146 rte_bswap32(*(((uint32_t *)digest)+i));
1148 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1149 if (partial_hash_sha384(data_in, digest))
1151 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
1152 *hash_state_out_be64 =
1153 rte_bswap64(*(((uint64_t *)digest)+i));
1155 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1156 if (partial_hash_sha512(data_in, digest))
1158 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
1159 *hash_state_out_be64 =
1160 rte_bswap64(*(((uint64_t *)digest)+i));
1162 case ICP_QAT_HW_AUTH_ALGO_MD5:
1163 if (partial_hash_md5(data_in, data_out))
1167 QAT_LOG(ERR, "invalid hash alg %u", hash_alg);
1173 #define HMAC_IPAD_VALUE 0x36
1174 #define HMAC_OPAD_VALUE 0x5c
1175 #define HASH_XCBC_PRECOMP_KEY_NUM 3
1177 static const uint8_t AES_CMAC_SEED[ICP_QAT_HW_AES_128_KEY_SZ];
1179 static void aes_cmac_key_derive(uint8_t *base, uint8_t *derived)
1183 derived[0] = base[0] << 1;
1184 for (i = 1; i < ICP_QAT_HW_AES_BLK_SZ ; i++) {
1185 derived[i] = base[i] << 1;
1186 derived[i - 1] |= base[i] >> 7;
1190 derived[ICP_QAT_HW_AES_BLK_SZ - 1] ^= QAT_AES_CMAC_CONST_RB;
1193 static int qat_sym_do_precomputes(enum icp_qat_hw_auth_algo hash_alg,
1194 const uint8_t *auth_key,
1195 uint16_t auth_keylen,
1196 uint8_t *p_state_buf,
1197 uint16_t *p_state_len,
1201 uint8_t ipad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1202 uint8_t opad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1205 if (hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC) {
1211 uint8_t k0[ICP_QAT_HW_AES_128_KEY_SZ];
1214 auth_keylen = ICP_QAT_HW_AES_128_KEY_SZ;
1216 in = rte_zmalloc("AES CMAC K1",
1217 ICP_QAT_HW_AES_128_KEY_SZ, 16);
1220 QAT_LOG(ERR, "Failed to alloc memory");
1224 rte_memcpy(in, AES_CMAC_SEED,
1225 ICP_QAT_HW_AES_128_KEY_SZ);
1226 rte_memcpy(p_state_buf, auth_key, auth_keylen);
1228 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1234 AES_encrypt(in, k0, &enc_key);
1236 k1 = p_state_buf + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1237 k2 = k1 + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1239 aes_cmac_key_derive(k0, k1);
1240 aes_cmac_key_derive(k1, k2);
1242 memset(k0, 0, ICP_QAT_HW_AES_128_KEY_SZ);
1243 *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1247 static uint8_t qat_aes_xcbc_key_seed[
1248 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ] = {
1249 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1250 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1251 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1252 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1253 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1254 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1258 uint8_t *out = p_state_buf;
1262 in = rte_zmalloc("working mem for key",
1263 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ, 16);
1265 QAT_LOG(ERR, "Failed to alloc memory");
1269 rte_memcpy(in, qat_aes_xcbc_key_seed,
1270 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1271 for (x = 0; x < HASH_XCBC_PRECOMP_KEY_NUM; x++) {
1272 if (AES_set_encrypt_key(auth_key,
1276 (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ));
1278 (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ),
1279 0, ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1282 AES_encrypt(in, out, &enc_key);
1283 in += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1284 out += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1286 *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1287 rte_free(in - x*ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ);
1291 } else if ((hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_128) ||
1292 (hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_64)) {
1294 uint8_t *out = p_state_buf;
1297 memset(p_state_buf, 0, ICP_QAT_HW_GALOIS_H_SZ +
1298 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1299 ICP_QAT_HW_GALOIS_E_CTR0_SZ);
1300 in = rte_zmalloc("working mem for key",
1301 ICP_QAT_HW_GALOIS_H_SZ, 16);
1303 QAT_LOG(ERR, "Failed to alloc memory");
1307 memset(in, 0, ICP_QAT_HW_GALOIS_H_SZ);
1308 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1312 AES_encrypt(in, out, &enc_key);
1313 *p_state_len = ICP_QAT_HW_GALOIS_H_SZ +
1314 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1315 ICP_QAT_HW_GALOIS_E_CTR0_SZ;
1320 block_size = qat_hash_get_block_size(hash_alg);
1323 /* init ipad and opad from key and xor with fixed values */
1324 memset(ipad, 0, block_size);
1325 memset(opad, 0, block_size);
1327 if (auth_keylen > (unsigned int)block_size) {
1328 QAT_LOG(ERR, "invalid keylen %u", auth_keylen);
1331 rte_memcpy(ipad, auth_key, auth_keylen);
1332 rte_memcpy(opad, auth_key, auth_keylen);
1334 for (i = 0; i < block_size; i++) {
1335 uint8_t *ipad_ptr = ipad + i;
1336 uint8_t *opad_ptr = opad + i;
1337 *ipad_ptr ^= HMAC_IPAD_VALUE;
1338 *opad_ptr ^= HMAC_OPAD_VALUE;
1341 /* do partial hash of ipad and copy to state1 */
1342 if (partial_hash_compute(hash_alg, ipad, p_state_buf)) {
1343 memset(ipad, 0, block_size);
1344 memset(opad, 0, block_size);
1345 QAT_LOG(ERR, "ipad precompute failed");
1350 * State len is a multiple of 8, so may be larger than the digest.
1351 * Put the partial hash of opad state_len bytes after state1
1353 *p_state_len = qat_hash_get_state1_size(hash_alg);
1354 if (partial_hash_compute(hash_alg, opad, p_state_buf + *p_state_len)) {
1355 memset(ipad, 0, block_size);
1356 memset(opad, 0, block_size);
1357 QAT_LOG(ERR, "opad precompute failed");
1361 /* don't leave data lying around */
1362 memset(ipad, 0, block_size);
1363 memset(opad, 0, block_size);
1368 qat_sym_session_init_common_hdr(struct icp_qat_fw_comn_req_hdr *header,
1369 enum qat_sym_proto_flag proto_flags)
1372 ICP_QAT_FW_COMN_HDR_FLAGS_BUILD(ICP_QAT_FW_COMN_REQ_FLAG_SET);
1373 header->service_type = ICP_QAT_FW_COMN_REQ_CPM_FW_LA;
1374 header->comn_req_flags =
1375 ICP_QAT_FW_COMN_FLAGS_BUILD(QAT_COMN_CD_FLD_TYPE_64BIT_ADR,
1376 QAT_COMN_PTR_TYPE_FLAT);
1377 ICP_QAT_FW_LA_PARTIAL_SET(header->serv_specif_flags,
1378 ICP_QAT_FW_LA_PARTIAL_NONE);
1379 ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(header->serv_specif_flags,
1380 ICP_QAT_FW_CIPH_IV_16BYTE_DATA);
1382 switch (proto_flags) {
1383 case QAT_CRYPTO_PROTO_FLAG_NONE:
1384 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1385 ICP_QAT_FW_LA_NO_PROTO);
1387 case QAT_CRYPTO_PROTO_FLAG_CCM:
1388 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1389 ICP_QAT_FW_LA_CCM_PROTO);
1391 case QAT_CRYPTO_PROTO_FLAG_GCM:
1392 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1393 ICP_QAT_FW_LA_GCM_PROTO);
1395 case QAT_CRYPTO_PROTO_FLAG_SNOW3G:
1396 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1397 ICP_QAT_FW_LA_SNOW_3G_PROTO);
1399 case QAT_CRYPTO_PROTO_FLAG_ZUC:
1400 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(header->serv_specif_flags,
1401 ICP_QAT_FW_LA_ZUC_3G_PROTO);
1405 ICP_QAT_FW_LA_UPDATE_STATE_SET(header->serv_specif_flags,
1406 ICP_QAT_FW_LA_NO_UPDATE_STATE);
1407 ICP_QAT_FW_LA_DIGEST_IN_BUFFER_SET(header->serv_specif_flags,
1408 ICP_QAT_FW_LA_NO_DIGEST_IN_BUFFER);
1412 * Snow3G and ZUC should never use this function
1413 * and set its protocol flag in both cipher and auth part of content
1414 * descriptor building function
1416 static enum qat_sym_proto_flag
1417 qat_get_crypto_proto_flag(uint16_t flags)
1419 int proto = ICP_QAT_FW_LA_PROTO_GET(flags);
1420 enum qat_sym_proto_flag qat_proto_flag =
1421 QAT_CRYPTO_PROTO_FLAG_NONE;
1424 case ICP_QAT_FW_LA_GCM_PROTO:
1425 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1427 case ICP_QAT_FW_LA_CCM_PROTO:
1428 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1432 return qat_proto_flag;
1435 int qat_sym_session_aead_create_cd_cipher(struct qat_sym_session *cdesc,
1436 const uint8_t *cipherkey,
1437 uint32_t cipherkeylen)
1439 struct icp_qat_hw_cipher_algo_blk *cipher;
1440 struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1441 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1442 struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1443 void *ptr = &req_tmpl->cd_ctrl;
1444 struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1445 struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1446 enum icp_qat_hw_cipher_convert key_convert;
1447 enum qat_sym_proto_flag qat_proto_flag =
1448 QAT_CRYPTO_PROTO_FLAG_NONE;
1449 uint32_t total_key_size;
1450 uint16_t cipher_offset, cd_size;
1451 uint32_t wordIndex = 0;
1452 uint32_t *temp_key = NULL;
1454 if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER) {
1455 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1456 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1457 ICP_QAT_FW_SLICE_CIPHER);
1458 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1459 ICP_QAT_FW_SLICE_DRAM_WR);
1460 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1461 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1462 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1463 ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1464 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1465 } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1466 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1467 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1468 ICP_QAT_FW_SLICE_CIPHER);
1469 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1470 ICP_QAT_FW_SLICE_AUTH);
1471 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1472 ICP_QAT_FW_SLICE_AUTH);
1473 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1474 ICP_QAT_FW_SLICE_DRAM_WR);
1475 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1476 } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1477 QAT_LOG(ERR, "Invalid param, must be a cipher command.");
1481 if (cdesc->qat_mode == ICP_QAT_HW_CIPHER_CTR_MODE) {
1483 * CTR Streaming ciphers are a special case. Decrypt = encrypt
1484 * Overriding default values previously set
1486 cdesc->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
1487 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1488 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2
1489 || cdesc->qat_cipher_alg ==
1490 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)
1491 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1492 else if (cdesc->qat_dir == ICP_QAT_HW_CIPHER_ENCRYPT)
1493 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1495 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1497 if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
1498 total_key_size = ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ +
1499 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1500 cipher_cd_ctrl->cipher_state_sz =
1501 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1502 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1504 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1505 total_key_size = ICP_QAT_HW_KASUMI_F8_KEY_SZ;
1506 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_KASUMI_BLK_SZ >> 3;
1507 cipher_cd_ctrl->cipher_padding_sz =
1508 (2 * ICP_QAT_HW_KASUMI_BLK_SZ) >> 3;
1509 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES) {
1510 total_key_size = ICP_QAT_HW_3DES_KEY_SZ;
1511 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_3DES_BLK_SZ >> 3;
1513 qat_get_crypto_proto_flag(header->serv_specif_flags);
1514 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_DES) {
1515 total_key_size = ICP_QAT_HW_DES_KEY_SZ;
1516 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_DES_BLK_SZ >> 3;
1518 qat_get_crypto_proto_flag(header->serv_specif_flags);
1519 } else if (cdesc->qat_cipher_alg ==
1520 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
1521 total_key_size = ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ +
1522 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1523 cipher_cd_ctrl->cipher_state_sz =
1524 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1525 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1526 cdesc->min_qat_dev_gen = QAT_GEN2;
1528 total_key_size = cipherkeylen;
1529 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_AES_BLK_SZ >> 3;
1531 qat_get_crypto_proto_flag(header->serv_specif_flags);
1533 cipher_cd_ctrl->cipher_key_sz = total_key_size >> 3;
1534 cipher_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1535 cipher_cd_ctrl->cipher_cfg_offset = cipher_offset >> 3;
1537 header->service_cmd_id = cdesc->qat_cmd;
1538 qat_sym_session_init_common_hdr(header, qat_proto_flag);
1540 cipher = (struct icp_qat_hw_cipher_algo_blk *)cdesc->cd_cur_ptr;
1541 cipher->cipher_config.val =
1542 ICP_QAT_HW_CIPHER_CONFIG_BUILD(cdesc->qat_mode,
1543 cdesc->qat_cipher_alg, key_convert,
1546 if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1547 temp_key = (uint32_t *)(cdesc->cd_cur_ptr +
1548 sizeof(struct icp_qat_hw_cipher_config)
1550 memcpy(cipher->key, cipherkey, cipherkeylen);
1551 memcpy(temp_key, cipherkey, cipherkeylen);
1553 /* XOR Key with KASUMI F8 key modifier at 4 bytes level */
1554 for (wordIndex = 0; wordIndex < (cipherkeylen >> 2);
1556 temp_key[wordIndex] ^= KASUMI_F8_KEY_MODIFIER_4_BYTES;
1558 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1559 cipherkeylen + cipherkeylen;
1561 memcpy(cipher->key, cipherkey, cipherkeylen);
1562 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1566 if (total_key_size > cipherkeylen) {
1567 uint32_t padding_size = total_key_size-cipherkeylen;
1568 if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1569 && (cipherkeylen == QAT_3DES_KEY_SZ_OPT2)) {
1570 /* K3 not provided so use K1 = K3*/
1571 memcpy(cdesc->cd_cur_ptr, cipherkey, padding_size);
1572 } else if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1573 && (cipherkeylen == QAT_3DES_KEY_SZ_OPT3)) {
1574 /* K2 and K3 not provided so use K1 = K2 = K3*/
1575 memcpy(cdesc->cd_cur_ptr, cipherkey,
1577 memcpy(cdesc->cd_cur_ptr+cipherkeylen,
1578 cipherkey, cipherkeylen);
1580 memset(cdesc->cd_cur_ptr, 0, padding_size);
1582 cdesc->cd_cur_ptr += padding_size;
1584 cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1585 cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1590 int qat_sym_session_aead_create_cd_auth(struct qat_sym_session *cdesc,
1591 const uint8_t *authkey,
1592 uint32_t authkeylen,
1593 uint32_t aad_length,
1594 uint32_t digestsize,
1595 unsigned int operation)
1597 struct icp_qat_hw_auth_setup *hash;
1598 struct icp_qat_hw_cipher_algo_blk *cipherconfig;
1599 struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1600 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1601 struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1602 void *ptr = &req_tmpl->cd_ctrl;
1603 struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1604 struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1605 struct icp_qat_fw_la_auth_req_params *auth_param =
1606 (struct icp_qat_fw_la_auth_req_params *)
1607 ((char *)&req_tmpl->serv_specif_rqpars +
1608 ICP_QAT_FW_HASH_REQUEST_PARAMETERS_OFFSET);
1609 uint16_t state1_size = 0, state2_size = 0;
1610 uint16_t hash_offset, cd_size;
1611 uint32_t *aad_len = NULL;
1612 uint32_t wordIndex = 0;
1614 enum qat_sym_proto_flag qat_proto_flag =
1615 QAT_CRYPTO_PROTO_FLAG_NONE;
1617 if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_AUTH) {
1618 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1619 ICP_QAT_FW_SLICE_AUTH);
1620 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1621 ICP_QAT_FW_SLICE_DRAM_WR);
1622 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1623 } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1624 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1625 ICP_QAT_FW_SLICE_AUTH);
1626 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1627 ICP_QAT_FW_SLICE_CIPHER);
1628 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1629 ICP_QAT_FW_SLICE_CIPHER);
1630 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1631 ICP_QAT_FW_SLICE_DRAM_WR);
1632 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1633 } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1634 QAT_LOG(ERR, "Invalid param, must be a hash command.");
1638 if (operation == RTE_CRYPTO_AUTH_OP_VERIFY) {
1639 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1640 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1641 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1642 ICP_QAT_FW_LA_CMP_AUTH_RES);
1643 cdesc->auth_op = ICP_QAT_HW_AUTH_VERIFY;
1645 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1646 ICP_QAT_FW_LA_RET_AUTH_RES);
1647 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1648 ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1649 cdesc->auth_op = ICP_QAT_HW_AUTH_GENERATE;
1653 * Setup the inner hash config
1655 hash_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1656 hash = (struct icp_qat_hw_auth_setup *)cdesc->cd_cur_ptr;
1657 hash->auth_config.reserved = 0;
1658 hash->auth_config.config =
1659 ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE1,
1660 cdesc->qat_hash_alg, digestsize);
1662 if (cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2
1663 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_KASUMI_F9
1664 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3
1665 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC
1666 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC
1667 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_NULL
1669 hash->auth_counter.counter = 0;
1671 int block_size = qat_hash_get_block_size(cdesc->qat_hash_alg);
1675 hash->auth_counter.counter = rte_bswap32(block_size);
1678 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_auth_setup);
1681 * cd_cur_ptr now points at the state1 information.
1683 switch (cdesc->qat_hash_alg) {
1684 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1685 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA1, authkey,
1686 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1688 QAT_LOG(ERR, "(SHA)precompute failed");
1691 state2_size = RTE_ALIGN_CEIL(ICP_QAT_HW_SHA1_STATE2_SZ, 8);
1693 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1694 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA224, authkey,
1695 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1697 QAT_LOG(ERR, "(SHA)precompute failed");
1700 state2_size = ICP_QAT_HW_SHA224_STATE2_SZ;
1702 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1703 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA256, authkey,
1704 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1706 QAT_LOG(ERR, "(SHA)precompute failed");
1709 state2_size = ICP_QAT_HW_SHA256_STATE2_SZ;
1711 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1712 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA384, authkey,
1713 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1715 QAT_LOG(ERR, "(SHA)precompute failed");
1718 state2_size = ICP_QAT_HW_SHA384_STATE2_SZ;
1720 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1721 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA512, authkey,
1722 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1724 QAT_LOG(ERR, "(SHA)precompute failed");
1727 state2_size = ICP_QAT_HW_SHA512_STATE2_SZ;
1729 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1730 state1_size = ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1732 if (cdesc->aes_cmac)
1733 memset(cdesc->cd_cur_ptr, 0, state1_size);
1734 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC,
1735 authkey, authkeylen, cdesc->cd_cur_ptr + state1_size,
1736 &state2_size, cdesc->aes_cmac)) {
1737 cdesc->aes_cmac ? QAT_LOG(ERR,
1738 "(CMAC)precompute failed")
1740 "(XCBC)precompute failed");
1744 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1745 case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
1746 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1747 state1_size = ICP_QAT_HW_GALOIS_128_STATE1_SZ;
1748 if (qat_sym_do_precomputes(cdesc->qat_hash_alg, authkey,
1749 authkeylen, cdesc->cd_cur_ptr + state1_size,
1750 &state2_size, cdesc->aes_cmac)) {
1751 QAT_LOG(ERR, "(GCM)precompute failed");
1755 * Write (the length of AAD) into bytes 16-19 of state2
1756 * in big-endian format. This field is 8 bytes
1758 auth_param->u2.aad_sz =
1759 RTE_ALIGN_CEIL(aad_length, 16);
1760 auth_param->hash_state_sz = (auth_param->u2.aad_sz) >> 3;
1762 aad_len = (uint32_t *)(cdesc->cd_cur_ptr +
1763 ICP_QAT_HW_GALOIS_128_STATE1_SZ +
1764 ICP_QAT_HW_GALOIS_H_SZ);
1765 *aad_len = rte_bswap32(aad_length);
1766 cdesc->aad_len = aad_length;
1768 case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
1769 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1770 state1_size = qat_hash_get_state1_size(
1771 ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2);
1772 state2_size = ICP_QAT_HW_SNOW_3G_UIA2_STATE2_SZ;
1773 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1775 cipherconfig = (struct icp_qat_hw_cipher_algo_blk *)
1776 (cdesc->cd_cur_ptr + state1_size + state2_size);
1777 cipherconfig->cipher_config.val =
1778 ICP_QAT_HW_CIPHER_CONFIG_BUILD(ICP_QAT_HW_CIPHER_ECB_MODE,
1779 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2,
1780 ICP_QAT_HW_CIPHER_KEY_CONVERT,
1781 ICP_QAT_HW_CIPHER_ENCRYPT);
1782 memcpy(cipherconfig->key, authkey, authkeylen);
1783 memset(cipherconfig->key + authkeylen,
1784 0, ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ);
1785 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1786 authkeylen + ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1787 auth_param->hash_state_sz = ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1789 case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
1790 hash->auth_config.config =
1791 ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE0,
1792 cdesc->qat_hash_alg, digestsize);
1793 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1794 state1_size = qat_hash_get_state1_size(
1795 ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3);
1796 state2_size = ICP_QAT_HW_ZUC_3G_EIA3_STATE2_SZ;
1797 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size
1798 + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ);
1800 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1801 cdesc->cd_cur_ptr += state1_size + state2_size
1802 + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1803 auth_param->hash_state_sz = ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1804 cdesc->min_qat_dev_gen = QAT_GEN2;
1807 case ICP_QAT_HW_AUTH_ALGO_MD5:
1808 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_MD5, authkey,
1809 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1811 QAT_LOG(ERR, "(MD5)precompute failed");
1814 state2_size = ICP_QAT_HW_MD5_STATE2_SZ;
1816 case ICP_QAT_HW_AUTH_ALGO_NULL:
1817 state1_size = qat_hash_get_state1_size(
1818 ICP_QAT_HW_AUTH_ALGO_NULL);
1819 state2_size = ICP_QAT_HW_NULL_STATE2_SZ;
1821 case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
1822 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1823 state1_size = qat_hash_get_state1_size(
1824 ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC);
1825 state2_size = ICP_QAT_HW_AES_CBC_MAC_KEY_SZ +
1826 ICP_QAT_HW_AES_CCM_CBC_E_CTR0_SZ;
1828 if (aad_length > 0) {
1829 aad_length += ICP_QAT_HW_CCM_AAD_B0_LEN +
1830 ICP_QAT_HW_CCM_AAD_LEN_INFO;
1831 auth_param->u2.aad_sz =
1832 RTE_ALIGN_CEIL(aad_length,
1833 ICP_QAT_HW_CCM_AAD_ALIGNMENT);
1835 auth_param->u2.aad_sz = ICP_QAT_HW_CCM_AAD_B0_LEN;
1837 cdesc->aad_len = aad_length;
1838 hash->auth_counter.counter = 0;
1840 hash_cd_ctrl->outer_prefix_sz = digestsize;
1841 auth_param->hash_state_sz = digestsize;
1843 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1845 case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
1846 state1_size = qat_hash_get_state1_size(
1847 ICP_QAT_HW_AUTH_ALGO_KASUMI_F9);
1848 state2_size = ICP_QAT_HW_KASUMI_F9_STATE2_SZ;
1849 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1850 pTempKey = (uint32_t *)(cdesc->cd_cur_ptr + state1_size
1853 * The Inner Hash Initial State2 block must contain IK
1854 * (Initialisation Key), followed by IK XOR-ed with KM
1855 * (Key Modifier): IK||(IK^KM).
1857 /* write the auth key */
1858 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1859 /* initialise temp key with auth key */
1860 memcpy(pTempKey, authkey, authkeylen);
1861 /* XOR Key with KASUMI F9 key modifier at 4 bytes level */
1862 for (wordIndex = 0; wordIndex < (authkeylen >> 2); wordIndex++)
1863 pTempKey[wordIndex] ^= KASUMI_F9_KEY_MODIFIER_4_BYTES;
1866 QAT_LOG(ERR, "Invalid HASH alg %u", cdesc->qat_hash_alg);
1870 /* Request template setup */
1871 qat_sym_session_init_common_hdr(header, qat_proto_flag);
1872 header->service_cmd_id = cdesc->qat_cmd;
1874 /* Auth CD config setup */
1875 hash_cd_ctrl->hash_cfg_offset = hash_offset >> 3;
1876 hash_cd_ctrl->hash_flags = ICP_QAT_FW_AUTH_HDR_FLAG_NO_NESTED;
1877 hash_cd_ctrl->inner_res_sz = digestsize;
1878 hash_cd_ctrl->final_sz = digestsize;
1879 hash_cd_ctrl->inner_state1_sz = state1_size;
1880 auth_param->auth_res_sz = digestsize;
1882 hash_cd_ctrl->inner_state2_sz = state2_size;
1883 hash_cd_ctrl->inner_state2_offset = hash_cd_ctrl->hash_cfg_offset +
1884 ((sizeof(struct icp_qat_hw_auth_setup) +
1885 RTE_ALIGN_CEIL(hash_cd_ctrl->inner_state1_sz, 8))
1888 cdesc->cd_cur_ptr += state1_size + state2_size;
1889 cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1891 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1892 cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1897 int qat_sym_validate_aes_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1900 case ICP_QAT_HW_AES_128_KEY_SZ:
1901 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1903 case ICP_QAT_HW_AES_192_KEY_SZ:
1904 *alg = ICP_QAT_HW_CIPHER_ALGO_AES192;
1906 case ICP_QAT_HW_AES_256_KEY_SZ:
1907 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
1915 int qat_sym_validate_aes_docsisbpi_key(int key_len,
1916 enum icp_qat_hw_cipher_algo *alg)
1919 case ICP_QAT_HW_AES_128_KEY_SZ:
1920 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1922 case ICP_QAT_HW_AES_256_KEY_SZ:
1923 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
1931 int qat_sym_validate_snow3g_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1934 case ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ:
1935 *alg = ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2;
1943 int qat_sym_validate_kasumi_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1946 case ICP_QAT_HW_KASUMI_KEY_SZ:
1947 *alg = ICP_QAT_HW_CIPHER_ALGO_KASUMI;
1955 int qat_sym_validate_des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1958 case ICP_QAT_HW_DES_KEY_SZ:
1959 *alg = ICP_QAT_HW_CIPHER_ALGO_DES;
1967 int qat_sym_validate_3des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1970 case QAT_3DES_KEY_SZ_OPT1:
1971 case QAT_3DES_KEY_SZ_OPT2:
1972 case QAT_3DES_KEY_SZ_OPT3:
1973 *alg = ICP_QAT_HW_CIPHER_ALGO_3DES;
1981 int qat_sym_validate_zuc_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1984 case ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ:
1985 *alg = ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3;