1 /* SPDX-License-Identifier: (BSD-3-Clause OR GPL-2.0)
2 * Copyright(c) 2015-2019 Intel Corporation
5 #include <openssl/sha.h> /* Needed to calculate pre-compute values */
6 #include <openssl/aes.h> /* Needed to calculate pre-compute values */
7 #include <openssl/md5.h> /* Needed to calculate pre-compute values */
8 #include <openssl/evp.h> /* Needed for bpi runt block processing */
10 #include <rte_memcpy.h>
11 #include <rte_common.h>
12 #include <rte_spinlock.h>
13 #include <rte_byteorder.h>
15 #include <rte_malloc.h>
16 #include <rte_crypto_sym.h>
19 #include "qat_sym_session.h"
20 #include "qat_sym_pmd.h"
22 /** Frees a context previously created
23 * Depends on openssl libcrypto
26 bpi_cipher_ctx_free(void *bpi_ctx)
29 EVP_CIPHER_CTX_free((EVP_CIPHER_CTX *)bpi_ctx);
32 /** Creates a context in either AES or DES in ECB mode
33 * Depends on openssl libcrypto
36 bpi_cipher_ctx_init(enum rte_crypto_cipher_algorithm cryptodev_algo,
37 enum rte_crypto_cipher_operation direction __rte_unused,
38 const uint8_t *key, void **ctx)
40 const EVP_CIPHER *algo = NULL;
42 *ctx = EVP_CIPHER_CTX_new();
49 if (cryptodev_algo == RTE_CRYPTO_CIPHER_DES_DOCSISBPI)
52 algo = EVP_aes_128_ecb();
54 /* IV will be ECB encrypted whether direction is encrypt or decrypt*/
55 if (EVP_EncryptInit_ex(*ctx, algo, NULL, key, 0) != 1) {
64 EVP_CIPHER_CTX_free(*ctx);
69 qat_is_cipher_alg_supported(enum rte_crypto_cipher_algorithm algo,
70 struct qat_sym_dev_private *internals)
73 const struct rte_cryptodev_capabilities *capability;
75 while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
76 RTE_CRYPTO_OP_TYPE_UNDEFINED) {
77 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
80 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_CIPHER)
83 if (capability->sym.cipher.algo == algo)
90 qat_is_auth_alg_supported(enum rte_crypto_auth_algorithm algo,
91 struct qat_sym_dev_private *internals)
94 const struct rte_cryptodev_capabilities *capability;
96 while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
97 RTE_CRYPTO_OP_TYPE_UNDEFINED) {
98 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
101 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_AUTH)
104 if (capability->sym.auth.algo == algo)
111 qat_sym_session_clear(struct rte_cryptodev *dev,
112 struct rte_cryptodev_sym_session *sess)
114 uint8_t index = dev->driver_id;
115 void *sess_priv = get_sym_session_private_data(sess, index);
116 struct qat_sym_session *s = (struct qat_sym_session *)sess_priv;
120 bpi_cipher_ctx_free(s->bpi_ctx);
121 memset(s, 0, qat_sym_session_get_private_size(dev));
122 struct rte_mempool *sess_mp = rte_mempool_from_obj(sess_priv);
124 set_sym_session_private_data(sess, index, NULL);
125 rte_mempool_put(sess_mp, sess_priv);
130 qat_get_cmd_id(const struct rte_crypto_sym_xform *xform)
133 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER && xform->next == NULL)
134 return ICP_QAT_FW_LA_CMD_CIPHER;
136 /* Authentication Only */
137 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH && xform->next == NULL)
138 return ICP_QAT_FW_LA_CMD_AUTH;
141 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
142 /* AES-GCM and AES-CCM works with different direction
143 * GCM first encrypts and generate hash where AES-CCM
144 * first generate hash and encrypts. Similar relation
145 * applies to decryption.
147 if (xform->aead.op == RTE_CRYPTO_AEAD_OP_ENCRYPT)
148 if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
149 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
151 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
153 if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
154 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
156 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
159 if (xform->next == NULL)
162 /* Cipher then Authenticate */
163 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
164 xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
165 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
167 /* Authenticate then Cipher */
168 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH &&
169 xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
170 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
175 static struct rte_crypto_auth_xform *
176 qat_get_auth_xform(struct rte_crypto_sym_xform *xform)
179 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH)
188 static struct rte_crypto_cipher_xform *
189 qat_get_cipher_xform(struct rte_crypto_sym_xform *xform)
192 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
193 return &xform->cipher;
202 qat_sym_session_configure_cipher(struct rte_cryptodev *dev,
203 struct rte_crypto_sym_xform *xform,
204 struct qat_sym_session *session)
206 struct qat_sym_dev_private *internals = dev->data->dev_private;
207 struct rte_crypto_cipher_xform *cipher_xform = NULL;
210 /* Get cipher xform from crypto xform chain */
211 cipher_xform = qat_get_cipher_xform(xform);
213 session->cipher_iv.offset = cipher_xform->iv.offset;
214 session->cipher_iv.length = cipher_xform->iv.length;
216 switch (cipher_xform->algo) {
217 case RTE_CRYPTO_CIPHER_AES_CBC:
218 if (qat_sym_validate_aes_key(cipher_xform->key.length,
219 &session->qat_cipher_alg) != 0) {
220 QAT_LOG(ERR, "Invalid AES cipher key size");
224 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
226 case RTE_CRYPTO_CIPHER_AES_CTR:
227 if (qat_sym_validate_aes_key(cipher_xform->key.length,
228 &session->qat_cipher_alg) != 0) {
229 QAT_LOG(ERR, "Invalid AES cipher key size");
233 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
235 case RTE_CRYPTO_CIPHER_SNOW3G_UEA2:
236 if (qat_sym_validate_snow3g_key(cipher_xform->key.length,
237 &session->qat_cipher_alg) != 0) {
238 QAT_LOG(ERR, "Invalid SNOW 3G cipher key size");
242 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
244 case RTE_CRYPTO_CIPHER_NULL:
245 session->qat_cipher_alg = ICP_QAT_HW_CIPHER_ALGO_NULL;
246 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
248 case RTE_CRYPTO_CIPHER_KASUMI_F8:
249 if (qat_sym_validate_kasumi_key(cipher_xform->key.length,
250 &session->qat_cipher_alg) != 0) {
251 QAT_LOG(ERR, "Invalid KASUMI cipher key size");
255 session->qat_mode = ICP_QAT_HW_CIPHER_F8_MODE;
257 case RTE_CRYPTO_CIPHER_3DES_CBC:
258 if (qat_sym_validate_3des_key(cipher_xform->key.length,
259 &session->qat_cipher_alg) != 0) {
260 QAT_LOG(ERR, "Invalid 3DES cipher key size");
264 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
266 case RTE_CRYPTO_CIPHER_DES_CBC:
267 if (qat_sym_validate_des_key(cipher_xform->key.length,
268 &session->qat_cipher_alg) != 0) {
269 QAT_LOG(ERR, "Invalid DES cipher key size");
273 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
275 case RTE_CRYPTO_CIPHER_3DES_CTR:
276 if (qat_sym_validate_3des_key(cipher_xform->key.length,
277 &session->qat_cipher_alg) != 0) {
278 QAT_LOG(ERR, "Invalid 3DES cipher key size");
282 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
284 case RTE_CRYPTO_CIPHER_DES_DOCSISBPI:
285 ret = bpi_cipher_ctx_init(
288 cipher_xform->key.data,
291 QAT_LOG(ERR, "failed to create DES BPI ctx");
294 if (qat_sym_validate_des_key(cipher_xform->key.length,
295 &session->qat_cipher_alg) != 0) {
296 QAT_LOG(ERR, "Invalid DES cipher key size");
300 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
302 case RTE_CRYPTO_CIPHER_AES_DOCSISBPI:
303 ret = bpi_cipher_ctx_init(
306 cipher_xform->key.data,
309 QAT_LOG(ERR, "failed to create AES BPI ctx");
312 if (qat_sym_validate_aes_docsisbpi_key(cipher_xform->key.length,
313 &session->qat_cipher_alg) != 0) {
314 QAT_LOG(ERR, "Invalid AES DOCSISBPI key size");
318 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
320 case RTE_CRYPTO_CIPHER_ZUC_EEA3:
321 if (!qat_is_cipher_alg_supported(
322 cipher_xform->algo, internals)) {
323 QAT_LOG(ERR, "%s not supported on this device",
324 rte_crypto_cipher_algorithm_strings
325 [cipher_xform->algo]);
329 if (qat_sym_validate_zuc_key(cipher_xform->key.length,
330 &session->qat_cipher_alg) != 0) {
331 QAT_LOG(ERR, "Invalid ZUC cipher key size");
335 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
337 case RTE_CRYPTO_CIPHER_AES_XTS:
338 if ((cipher_xform->key.length/2) == ICP_QAT_HW_AES_192_KEY_SZ) {
339 QAT_LOG(ERR, "AES-XTS-192 not supported");
343 if (qat_sym_validate_aes_key((cipher_xform->key.length/2),
344 &session->qat_cipher_alg) != 0) {
345 QAT_LOG(ERR, "Invalid AES-XTS cipher key size");
349 session->qat_mode = ICP_QAT_HW_CIPHER_XTS_MODE;
351 case RTE_CRYPTO_CIPHER_3DES_ECB:
352 case RTE_CRYPTO_CIPHER_AES_ECB:
353 case RTE_CRYPTO_CIPHER_AES_F8:
354 case RTE_CRYPTO_CIPHER_ARC4:
355 QAT_LOG(ERR, "Crypto QAT PMD: Unsupported Cipher alg %u",
360 QAT_LOG(ERR, "Crypto: Undefined Cipher specified %u\n",
366 if (cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
367 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
369 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
371 if (qat_sym_session_aead_create_cd_cipher(session,
372 cipher_xform->key.data,
373 cipher_xform->key.length)) {
381 if (session->bpi_ctx) {
382 bpi_cipher_ctx_free(session->bpi_ctx);
383 session->bpi_ctx = NULL;
389 qat_sym_session_configure(struct rte_cryptodev *dev,
390 struct rte_crypto_sym_xform *xform,
391 struct rte_cryptodev_sym_session *sess,
392 struct rte_mempool *mempool)
394 void *sess_private_data;
397 if (rte_mempool_get(mempool, &sess_private_data)) {
399 "Couldn't get object from session mempool");
403 ret = qat_sym_session_set_parameters(dev, xform, sess_private_data);
406 "Crypto QAT PMD: failed to configure session parameters");
408 /* Return session to mempool */
409 rte_mempool_put(mempool, sess_private_data);
413 set_sym_session_private_data(sess, dev->driver_id,
420 qat_sym_session_set_parameters(struct rte_cryptodev *dev,
421 struct rte_crypto_sym_xform *xform, void *session_private)
423 struct qat_sym_session *session = session_private;
427 /* Set context descriptor physical address */
428 session->cd_paddr = rte_mempool_virt2iova(session) +
429 offsetof(struct qat_sym_session, cd);
431 session->min_qat_dev_gen = QAT_GEN1;
433 /* Get requested QAT command id */
434 qat_cmd_id = qat_get_cmd_id(xform);
435 if (qat_cmd_id < 0 || qat_cmd_id >= ICP_QAT_FW_LA_CMD_DELIMITER) {
436 QAT_LOG(ERR, "Unsupported xform chain requested");
439 session->qat_cmd = (enum icp_qat_fw_la_cmd_id)qat_cmd_id;
440 switch (session->qat_cmd) {
441 case ICP_QAT_FW_LA_CMD_CIPHER:
442 ret = qat_sym_session_configure_cipher(dev, xform, session);
446 case ICP_QAT_FW_LA_CMD_AUTH:
447 ret = qat_sym_session_configure_auth(dev, xform, session);
451 case ICP_QAT_FW_LA_CMD_CIPHER_HASH:
452 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
453 ret = qat_sym_session_configure_aead(dev, xform,
458 ret = qat_sym_session_configure_cipher(dev,
462 ret = qat_sym_session_configure_auth(dev,
468 case ICP_QAT_FW_LA_CMD_HASH_CIPHER:
469 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
470 ret = qat_sym_session_configure_aead(dev, xform,
475 ret = qat_sym_session_configure_auth(dev,
479 ret = qat_sym_session_configure_cipher(dev,
485 case ICP_QAT_FW_LA_CMD_TRNG_GET_RANDOM:
486 case ICP_QAT_FW_LA_CMD_TRNG_TEST:
487 case ICP_QAT_FW_LA_CMD_SSL3_KEY_DERIVE:
488 case ICP_QAT_FW_LA_CMD_TLS_V1_1_KEY_DERIVE:
489 case ICP_QAT_FW_LA_CMD_TLS_V1_2_KEY_DERIVE:
490 case ICP_QAT_FW_LA_CMD_MGF1:
491 case ICP_QAT_FW_LA_CMD_AUTH_PRE_COMP:
492 case ICP_QAT_FW_LA_CMD_CIPHER_PRE_COMP:
493 case ICP_QAT_FW_LA_CMD_DELIMITER:
494 QAT_LOG(ERR, "Unsupported Service %u",
498 QAT_LOG(ERR, "Unsupported Service %u",
507 qat_sym_session_handle_single_pass(struct qat_sym_dev_private *internals,
508 struct qat_sym_session *session,
509 struct rte_crypto_aead_xform *aead_xform)
511 enum qat_device_gen qat_dev_gen = internals->qat_dev->qat_dev_gen;
513 if (qat_dev_gen == QAT_GEN3 &&
514 aead_xform->iv.length == QAT_AES_GCM_SPC_IV_SIZE) {
515 /* Use faster Single-Pass GCM */
516 struct icp_qat_fw_la_cipher_req_params *cipher_param =
517 (void *) &session->fw_req.serv_specif_rqpars;
519 session->is_single_pass = 1;
520 session->min_qat_dev_gen = QAT_GEN3;
521 session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER;
522 session->qat_mode = ICP_QAT_HW_CIPHER_AEAD_MODE;
523 session->cipher_iv.offset = aead_xform->iv.offset;
524 session->cipher_iv.length = aead_xform->iv.length;
525 if (qat_sym_session_aead_create_cd_cipher(session,
526 aead_xform->key.data, aead_xform->key.length))
528 session->aad_len = aead_xform->aad_length;
529 session->digest_length = aead_xform->digest_length;
530 if (aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) {
531 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
532 session->auth_op = ICP_QAT_HW_AUTH_GENERATE;
533 ICP_QAT_FW_LA_RET_AUTH_SET(
534 session->fw_req.comn_hdr.serv_specif_flags,
535 ICP_QAT_FW_LA_RET_AUTH_RES);
537 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
538 session->auth_op = ICP_QAT_HW_AUTH_VERIFY;
539 ICP_QAT_FW_LA_CMP_AUTH_SET(
540 session->fw_req.comn_hdr.serv_specif_flags,
541 ICP_QAT_FW_LA_CMP_AUTH_RES);
543 ICP_QAT_FW_LA_SINGLE_PASS_PROTO_FLAG_SET(
544 session->fw_req.comn_hdr.serv_specif_flags,
545 ICP_QAT_FW_LA_SINGLE_PASS_PROTO);
546 ICP_QAT_FW_LA_PROTO_SET(
547 session->fw_req.comn_hdr.serv_specif_flags,
548 ICP_QAT_FW_LA_NO_PROTO);
549 ICP_QAT_FW_LA_GCM_IV_LEN_FLAG_SET(
550 session->fw_req.comn_hdr.serv_specif_flags,
551 ICP_QAT_FW_LA_GCM_IV_LEN_12_OCTETS);
552 session->fw_req.comn_hdr.service_cmd_id =
553 ICP_QAT_FW_LA_CMD_CIPHER;
554 session->cd.cipher.cipher_config.val =
555 ICP_QAT_HW_CIPHER_CONFIG_BUILD(
556 ICP_QAT_HW_CIPHER_AEAD_MODE,
557 session->qat_cipher_alg,
558 ICP_QAT_HW_CIPHER_NO_CONVERT,
560 QAT_FIELD_SET(session->cd.cipher.cipher_config.val,
561 aead_xform->digest_length,
562 QAT_CIPHER_AEAD_HASH_CMP_LEN_BITPOS,
563 QAT_CIPHER_AEAD_HASH_CMP_LEN_MASK);
564 session->cd.cipher.cipher_config.reserved =
565 ICP_QAT_HW_CIPHER_CONFIG_BUILD_UPPER(
566 aead_xform->aad_length);
567 cipher_param->spc_aad_sz = aead_xform->aad_length;
568 cipher_param->spc_auth_res_sz = aead_xform->digest_length;
574 qat_sym_session_configure_auth(struct rte_cryptodev *dev,
575 struct rte_crypto_sym_xform *xform,
576 struct qat_sym_session *session)
578 struct rte_crypto_auth_xform *auth_xform = qat_get_auth_xform(xform);
579 struct qat_sym_dev_private *internals = dev->data->dev_private;
580 const uint8_t *key_data = auth_xform->key.data;
581 uint8_t key_length = auth_xform->key.length;
582 session->aes_cmac = 0;
584 switch (auth_xform->algo) {
585 case RTE_CRYPTO_AUTH_SHA1_HMAC:
586 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA1;
588 case RTE_CRYPTO_AUTH_SHA224_HMAC:
589 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA224;
591 case RTE_CRYPTO_AUTH_SHA256_HMAC:
592 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA256;
594 case RTE_CRYPTO_AUTH_SHA384_HMAC:
595 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA384;
597 case RTE_CRYPTO_AUTH_SHA512_HMAC:
598 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA512;
600 case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
601 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
603 case RTE_CRYPTO_AUTH_AES_CMAC:
604 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
605 session->aes_cmac = 1;
607 case RTE_CRYPTO_AUTH_AES_GMAC:
608 if (qat_sym_validate_aes_key(auth_xform->key.length,
609 &session->qat_cipher_alg) != 0) {
610 QAT_LOG(ERR, "Invalid AES key size");
613 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
614 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
617 case RTE_CRYPTO_AUTH_SNOW3G_UIA2:
618 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2;
620 case RTE_CRYPTO_AUTH_MD5_HMAC:
621 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_MD5;
623 case RTE_CRYPTO_AUTH_NULL:
624 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_NULL;
626 case RTE_CRYPTO_AUTH_KASUMI_F9:
627 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_KASUMI_F9;
629 case RTE_CRYPTO_AUTH_ZUC_EIA3:
630 if (!qat_is_auth_alg_supported(auth_xform->algo, internals)) {
631 QAT_LOG(ERR, "%s not supported on this device",
632 rte_crypto_auth_algorithm_strings
636 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3;
638 case RTE_CRYPTO_AUTH_SHA1:
639 case RTE_CRYPTO_AUTH_SHA256:
640 case RTE_CRYPTO_AUTH_SHA512:
641 case RTE_CRYPTO_AUTH_SHA224:
642 case RTE_CRYPTO_AUTH_SHA384:
643 case RTE_CRYPTO_AUTH_MD5:
644 case RTE_CRYPTO_AUTH_AES_CBC_MAC:
645 QAT_LOG(ERR, "Crypto: Unsupported hash alg %u",
649 QAT_LOG(ERR, "Crypto: Undefined Hash algo %u specified",
654 session->auth_iv.offset = auth_xform->iv.offset;
655 session->auth_iv.length = auth_xform->iv.length;
657 if (auth_xform->algo == RTE_CRYPTO_AUTH_AES_GMAC) {
658 if (auth_xform->op == RTE_CRYPTO_AUTH_OP_GENERATE) {
659 session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER_HASH;
660 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
662 * It needs to create cipher desc content first,
663 * then authentication
666 if (qat_sym_session_aead_create_cd_cipher(session,
667 auth_xform->key.data,
668 auth_xform->key.length))
671 if (qat_sym_session_aead_create_cd_auth(session,
675 auth_xform->digest_length,
679 session->qat_cmd = ICP_QAT_FW_LA_CMD_HASH_CIPHER;
680 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
682 * It needs to create authentication desc content first,
686 if (qat_sym_session_aead_create_cd_auth(session,
690 auth_xform->digest_length,
694 if (qat_sym_session_aead_create_cd_cipher(session,
695 auth_xform->key.data,
696 auth_xform->key.length))
699 /* Restore to authentication only only */
700 session->qat_cmd = ICP_QAT_FW_LA_CMD_AUTH;
702 if (qat_sym_session_aead_create_cd_auth(session,
706 auth_xform->digest_length,
711 session->digest_length = auth_xform->digest_length;
716 qat_sym_session_configure_aead(struct rte_cryptodev *dev,
717 struct rte_crypto_sym_xform *xform,
718 struct qat_sym_session *session)
720 struct rte_crypto_aead_xform *aead_xform = &xform->aead;
721 enum rte_crypto_auth_operation crypto_operation;
724 * Store AEAD IV parameters as cipher IV,
725 * to avoid unnecessary memory usage
727 session->cipher_iv.offset = xform->aead.iv.offset;
728 session->cipher_iv.length = xform->aead.iv.length;
730 switch (aead_xform->algo) {
731 case RTE_CRYPTO_AEAD_AES_GCM:
732 if (qat_sym_validate_aes_key(aead_xform->key.length,
733 &session->qat_cipher_alg) != 0) {
734 QAT_LOG(ERR, "Invalid AES key size");
737 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
738 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
740 case RTE_CRYPTO_AEAD_AES_CCM:
741 if (qat_sym_validate_aes_key(aead_xform->key.length,
742 &session->qat_cipher_alg) != 0) {
743 QAT_LOG(ERR, "Invalid AES key size");
746 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
747 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC;
750 QAT_LOG(ERR, "Crypto: Undefined AEAD specified %u\n",
755 session->is_single_pass = 0;
756 if (aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) {
757 /* Use faster Single-Pass GCM if possible */
758 int res = qat_sym_session_handle_single_pass(
759 dev->data->dev_private, session, aead_xform);
762 if (session->is_single_pass)
766 if ((aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT &&
767 aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) ||
768 (aead_xform->op == RTE_CRYPTO_AEAD_OP_DECRYPT &&
769 aead_xform->algo == RTE_CRYPTO_AEAD_AES_CCM)) {
770 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
772 * It needs to create cipher desc content first,
773 * then authentication
775 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
776 RTE_CRYPTO_AUTH_OP_GENERATE : RTE_CRYPTO_AUTH_OP_VERIFY;
778 if (qat_sym_session_aead_create_cd_cipher(session,
779 aead_xform->key.data,
780 aead_xform->key.length))
783 if (qat_sym_session_aead_create_cd_auth(session,
784 aead_xform->key.data,
785 aead_xform->key.length,
786 aead_xform->aad_length,
787 aead_xform->digest_length,
791 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
793 * It needs to create authentication desc content first,
797 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
798 RTE_CRYPTO_AUTH_OP_VERIFY : RTE_CRYPTO_AUTH_OP_GENERATE;
800 if (qat_sym_session_aead_create_cd_auth(session,
801 aead_xform->key.data,
802 aead_xform->key.length,
803 aead_xform->aad_length,
804 aead_xform->digest_length,
808 if (qat_sym_session_aead_create_cd_cipher(session,
809 aead_xform->key.data,
810 aead_xform->key.length))
814 session->digest_length = aead_xform->digest_length;
818 unsigned int qat_sym_session_get_private_size(
819 struct rte_cryptodev *dev __rte_unused)
821 return RTE_ALIGN_CEIL(sizeof(struct qat_sym_session), 8);
824 /* returns block size in bytes per cipher algo */
825 int qat_cipher_get_block_size(enum icp_qat_hw_cipher_algo qat_cipher_alg)
827 switch (qat_cipher_alg) {
828 case ICP_QAT_HW_CIPHER_ALGO_DES:
829 return ICP_QAT_HW_DES_BLK_SZ;
830 case ICP_QAT_HW_CIPHER_ALGO_3DES:
831 return ICP_QAT_HW_3DES_BLK_SZ;
832 case ICP_QAT_HW_CIPHER_ALGO_AES128:
833 case ICP_QAT_HW_CIPHER_ALGO_AES192:
834 case ICP_QAT_HW_CIPHER_ALGO_AES256:
835 return ICP_QAT_HW_AES_BLK_SZ;
837 QAT_LOG(ERR, "invalid block cipher alg %u", qat_cipher_alg);
844 * Returns size in bytes per hash algo for state1 size field in cd_ctrl
845 * This is digest size rounded up to nearest quadword
847 static int qat_hash_get_state1_size(enum icp_qat_hw_auth_algo qat_hash_alg)
849 switch (qat_hash_alg) {
850 case ICP_QAT_HW_AUTH_ALGO_SHA1:
851 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA1_STATE1_SZ,
852 QAT_HW_DEFAULT_ALIGNMENT);
853 case ICP_QAT_HW_AUTH_ALGO_SHA224:
854 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA224_STATE1_SZ,
855 QAT_HW_DEFAULT_ALIGNMENT);
856 case ICP_QAT_HW_AUTH_ALGO_SHA256:
857 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA256_STATE1_SZ,
858 QAT_HW_DEFAULT_ALIGNMENT);
859 case ICP_QAT_HW_AUTH_ALGO_SHA384:
860 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA384_STATE1_SZ,
861 QAT_HW_DEFAULT_ALIGNMENT);
862 case ICP_QAT_HW_AUTH_ALGO_SHA512:
863 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
864 QAT_HW_DEFAULT_ALIGNMENT);
865 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
866 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ,
867 QAT_HW_DEFAULT_ALIGNMENT);
868 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
869 case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
870 return QAT_HW_ROUND_UP(ICP_QAT_HW_GALOIS_128_STATE1_SZ,
871 QAT_HW_DEFAULT_ALIGNMENT);
872 case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
873 return QAT_HW_ROUND_UP(ICP_QAT_HW_ZUC_3G_EIA3_STATE1_SZ,
874 QAT_HW_DEFAULT_ALIGNMENT);
875 case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
876 return QAT_HW_ROUND_UP(ICP_QAT_HW_SNOW_3G_UIA2_STATE1_SZ,
877 QAT_HW_DEFAULT_ALIGNMENT);
878 case ICP_QAT_HW_AUTH_ALGO_MD5:
879 return QAT_HW_ROUND_UP(ICP_QAT_HW_MD5_STATE1_SZ,
880 QAT_HW_DEFAULT_ALIGNMENT);
881 case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
882 return QAT_HW_ROUND_UP(ICP_QAT_HW_KASUMI_F9_STATE1_SZ,
883 QAT_HW_DEFAULT_ALIGNMENT);
884 case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
885 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_CBC_MAC_STATE1_SZ,
886 QAT_HW_DEFAULT_ALIGNMENT);
887 case ICP_QAT_HW_AUTH_ALGO_NULL:
888 return QAT_HW_ROUND_UP(ICP_QAT_HW_NULL_STATE1_SZ,
889 QAT_HW_DEFAULT_ALIGNMENT);
890 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
891 /* return maximum state1 size in this case */
892 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
893 QAT_HW_DEFAULT_ALIGNMENT);
895 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
901 /* returns digest size in bytes per hash algo */
902 static int qat_hash_get_digest_size(enum icp_qat_hw_auth_algo qat_hash_alg)
904 switch (qat_hash_alg) {
905 case ICP_QAT_HW_AUTH_ALGO_SHA1:
906 return ICP_QAT_HW_SHA1_STATE1_SZ;
907 case ICP_QAT_HW_AUTH_ALGO_SHA224:
908 return ICP_QAT_HW_SHA224_STATE1_SZ;
909 case ICP_QAT_HW_AUTH_ALGO_SHA256:
910 return ICP_QAT_HW_SHA256_STATE1_SZ;
911 case ICP_QAT_HW_AUTH_ALGO_SHA384:
912 return ICP_QAT_HW_SHA384_STATE1_SZ;
913 case ICP_QAT_HW_AUTH_ALGO_SHA512:
914 return ICP_QAT_HW_SHA512_STATE1_SZ;
915 case ICP_QAT_HW_AUTH_ALGO_MD5:
916 return ICP_QAT_HW_MD5_STATE1_SZ;
917 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
918 return ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
919 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
920 /* return maximum digest size in this case */
921 return ICP_QAT_HW_SHA512_STATE1_SZ;
923 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
929 /* returns block size in byes per hash algo */
930 static int qat_hash_get_block_size(enum icp_qat_hw_auth_algo qat_hash_alg)
932 switch (qat_hash_alg) {
933 case ICP_QAT_HW_AUTH_ALGO_SHA1:
935 case ICP_QAT_HW_AUTH_ALGO_SHA224:
936 return SHA256_CBLOCK;
937 case ICP_QAT_HW_AUTH_ALGO_SHA256:
938 return SHA256_CBLOCK;
939 case ICP_QAT_HW_AUTH_ALGO_SHA384:
940 return SHA512_CBLOCK;
941 case ICP_QAT_HW_AUTH_ALGO_SHA512:
942 return SHA512_CBLOCK;
943 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
945 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
946 return ICP_QAT_HW_AES_BLK_SZ;
947 case ICP_QAT_HW_AUTH_ALGO_MD5:
949 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
950 /* return maximum block size in this case */
951 return SHA512_CBLOCK;
953 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
959 static int partial_hash_sha1(uint8_t *data_in, uint8_t *data_out)
963 if (!SHA1_Init(&ctx))
965 SHA1_Transform(&ctx, data_in);
966 rte_memcpy(data_out, &ctx, SHA_DIGEST_LENGTH);
970 static int partial_hash_sha224(uint8_t *data_in, uint8_t *data_out)
974 if (!SHA224_Init(&ctx))
976 SHA256_Transform(&ctx, data_in);
977 rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
981 static int partial_hash_sha256(uint8_t *data_in, uint8_t *data_out)
985 if (!SHA256_Init(&ctx))
987 SHA256_Transform(&ctx, data_in);
988 rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
992 static int partial_hash_sha384(uint8_t *data_in, uint8_t *data_out)
996 if (!SHA384_Init(&ctx))
998 SHA512_Transform(&ctx, data_in);
999 rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
1003 static int partial_hash_sha512(uint8_t *data_in, uint8_t *data_out)
1007 if (!SHA512_Init(&ctx))
1009 SHA512_Transform(&ctx, data_in);
1010 rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
1014 static int partial_hash_md5(uint8_t *data_in, uint8_t *data_out)
1018 if (!MD5_Init(&ctx))
1020 MD5_Transform(&ctx, data_in);
1021 rte_memcpy(data_out, &ctx, MD5_DIGEST_LENGTH);
1026 static int partial_hash_compute(enum icp_qat_hw_auth_algo hash_alg,
1031 uint8_t digest[qat_hash_get_digest_size(
1032 ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1033 uint32_t *hash_state_out_be32;
1034 uint64_t *hash_state_out_be64;
1037 digest_size = qat_hash_get_digest_size(hash_alg);
1038 if (digest_size <= 0)
1041 hash_state_out_be32 = (uint32_t *)data_out;
1042 hash_state_out_be64 = (uint64_t *)data_out;
1045 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1046 if (partial_hash_sha1(data_in, digest))
1048 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1049 *hash_state_out_be32 =
1050 rte_bswap32(*(((uint32_t *)digest)+i));
1052 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1053 if (partial_hash_sha224(data_in, digest))
1055 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1056 *hash_state_out_be32 =
1057 rte_bswap32(*(((uint32_t *)digest)+i));
1059 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1060 if (partial_hash_sha256(data_in, digest))
1062 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1063 *hash_state_out_be32 =
1064 rte_bswap32(*(((uint32_t *)digest)+i));
1066 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1067 if (partial_hash_sha384(data_in, digest))
1069 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
1070 *hash_state_out_be64 =
1071 rte_bswap64(*(((uint64_t *)digest)+i));
1073 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1074 if (partial_hash_sha512(data_in, digest))
1076 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
1077 *hash_state_out_be64 =
1078 rte_bswap64(*(((uint64_t *)digest)+i));
1080 case ICP_QAT_HW_AUTH_ALGO_MD5:
1081 if (partial_hash_md5(data_in, data_out))
1085 QAT_LOG(ERR, "invalid hash alg %u", hash_alg);
1091 #define HMAC_IPAD_VALUE 0x36
1092 #define HMAC_OPAD_VALUE 0x5c
1093 #define HASH_XCBC_PRECOMP_KEY_NUM 3
1095 static const uint8_t AES_CMAC_SEED[ICP_QAT_HW_AES_128_KEY_SZ];
1097 static void aes_cmac_key_derive(uint8_t *base, uint8_t *derived)
1101 derived[0] = base[0] << 1;
1102 for (i = 1; i < ICP_QAT_HW_AES_BLK_SZ ; i++) {
1103 derived[i] = base[i] << 1;
1104 derived[i - 1] |= base[i] >> 7;
1108 derived[ICP_QAT_HW_AES_BLK_SZ - 1] ^= QAT_AES_CMAC_CONST_RB;
1111 static int qat_sym_do_precomputes(enum icp_qat_hw_auth_algo hash_alg,
1112 const uint8_t *auth_key,
1113 uint16_t auth_keylen,
1114 uint8_t *p_state_buf,
1115 uint16_t *p_state_len,
1119 uint8_t ipad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1120 uint8_t opad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1123 if (hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC) {
1129 uint8_t k0[ICP_QAT_HW_AES_128_KEY_SZ];
1132 auth_keylen = ICP_QAT_HW_AES_128_KEY_SZ;
1134 in = rte_zmalloc("AES CMAC K1",
1135 ICP_QAT_HW_AES_128_KEY_SZ, 16);
1138 QAT_LOG(ERR, "Failed to alloc memory");
1142 rte_memcpy(in, AES_CMAC_SEED,
1143 ICP_QAT_HW_AES_128_KEY_SZ);
1144 rte_memcpy(p_state_buf, auth_key, auth_keylen);
1146 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1152 AES_encrypt(in, k0, &enc_key);
1154 k1 = p_state_buf + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1155 k2 = k1 + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1157 aes_cmac_key_derive(k0, k1);
1158 aes_cmac_key_derive(k1, k2);
1160 memset(k0, 0, ICP_QAT_HW_AES_128_KEY_SZ);
1161 *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1165 static uint8_t qat_aes_xcbc_key_seed[
1166 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ] = {
1167 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1168 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1169 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1170 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1171 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1172 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1176 uint8_t *out = p_state_buf;
1180 in = rte_zmalloc("working mem for key",
1181 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ, 16);
1183 QAT_LOG(ERR, "Failed to alloc memory");
1187 rte_memcpy(in, qat_aes_xcbc_key_seed,
1188 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1189 for (x = 0; x < HASH_XCBC_PRECOMP_KEY_NUM; x++) {
1190 if (AES_set_encrypt_key(auth_key,
1194 (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ));
1196 (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ),
1197 0, ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1200 AES_encrypt(in, out, &enc_key);
1201 in += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1202 out += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1204 *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1205 rte_free(in - x*ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ);
1209 } else if ((hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_128) ||
1210 (hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_64)) {
1212 uint8_t *out = p_state_buf;
1215 memset(p_state_buf, 0, ICP_QAT_HW_GALOIS_H_SZ +
1216 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1217 ICP_QAT_HW_GALOIS_E_CTR0_SZ);
1218 in = rte_zmalloc("working mem for key",
1219 ICP_QAT_HW_GALOIS_H_SZ, 16);
1221 QAT_LOG(ERR, "Failed to alloc memory");
1225 memset(in, 0, ICP_QAT_HW_GALOIS_H_SZ);
1226 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1230 AES_encrypt(in, out, &enc_key);
1231 *p_state_len = ICP_QAT_HW_GALOIS_H_SZ +
1232 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1233 ICP_QAT_HW_GALOIS_E_CTR0_SZ;
1238 block_size = qat_hash_get_block_size(hash_alg);
1241 /* init ipad and opad from key and xor with fixed values */
1242 memset(ipad, 0, block_size);
1243 memset(opad, 0, block_size);
1245 if (auth_keylen > (unsigned int)block_size) {
1246 QAT_LOG(ERR, "invalid keylen %u", auth_keylen);
1249 rte_memcpy(ipad, auth_key, auth_keylen);
1250 rte_memcpy(opad, auth_key, auth_keylen);
1252 for (i = 0; i < block_size; i++) {
1253 uint8_t *ipad_ptr = ipad + i;
1254 uint8_t *opad_ptr = opad + i;
1255 *ipad_ptr ^= HMAC_IPAD_VALUE;
1256 *opad_ptr ^= HMAC_OPAD_VALUE;
1259 /* do partial hash of ipad and copy to state1 */
1260 if (partial_hash_compute(hash_alg, ipad, p_state_buf)) {
1261 memset(ipad, 0, block_size);
1262 memset(opad, 0, block_size);
1263 QAT_LOG(ERR, "ipad precompute failed");
1268 * State len is a multiple of 8, so may be larger than the digest.
1269 * Put the partial hash of opad state_len bytes after state1
1271 *p_state_len = qat_hash_get_state1_size(hash_alg);
1272 if (partial_hash_compute(hash_alg, opad, p_state_buf + *p_state_len)) {
1273 memset(ipad, 0, block_size);
1274 memset(opad, 0, block_size);
1275 QAT_LOG(ERR, "opad precompute failed");
1279 /* don't leave data lying around */
1280 memset(ipad, 0, block_size);
1281 memset(opad, 0, block_size);
1286 qat_sym_session_init_common_hdr(struct icp_qat_fw_comn_req_hdr *header,
1287 enum qat_sym_proto_flag proto_flags)
1290 ICP_QAT_FW_COMN_HDR_FLAGS_BUILD(ICP_QAT_FW_COMN_REQ_FLAG_SET);
1291 header->service_type = ICP_QAT_FW_COMN_REQ_CPM_FW_LA;
1292 header->comn_req_flags =
1293 ICP_QAT_FW_COMN_FLAGS_BUILD(QAT_COMN_CD_FLD_TYPE_64BIT_ADR,
1294 QAT_COMN_PTR_TYPE_FLAT);
1295 ICP_QAT_FW_LA_PARTIAL_SET(header->serv_specif_flags,
1296 ICP_QAT_FW_LA_PARTIAL_NONE);
1297 ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(header->serv_specif_flags,
1298 ICP_QAT_FW_CIPH_IV_16BYTE_DATA);
1300 switch (proto_flags) {
1301 case QAT_CRYPTO_PROTO_FLAG_NONE:
1302 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1303 ICP_QAT_FW_LA_NO_PROTO);
1305 case QAT_CRYPTO_PROTO_FLAG_CCM:
1306 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1307 ICP_QAT_FW_LA_CCM_PROTO);
1309 case QAT_CRYPTO_PROTO_FLAG_GCM:
1310 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1311 ICP_QAT_FW_LA_GCM_PROTO);
1313 case QAT_CRYPTO_PROTO_FLAG_SNOW3G:
1314 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1315 ICP_QAT_FW_LA_SNOW_3G_PROTO);
1317 case QAT_CRYPTO_PROTO_FLAG_ZUC:
1318 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(header->serv_specif_flags,
1319 ICP_QAT_FW_LA_ZUC_3G_PROTO);
1323 ICP_QAT_FW_LA_UPDATE_STATE_SET(header->serv_specif_flags,
1324 ICP_QAT_FW_LA_NO_UPDATE_STATE);
1325 ICP_QAT_FW_LA_DIGEST_IN_BUFFER_SET(header->serv_specif_flags,
1326 ICP_QAT_FW_LA_NO_DIGEST_IN_BUFFER);
1330 * Snow3G and ZUC should never use this function
1331 * and set its protocol flag in both cipher and auth part of content
1332 * descriptor building function
1334 static enum qat_sym_proto_flag
1335 qat_get_crypto_proto_flag(uint16_t flags)
1337 int proto = ICP_QAT_FW_LA_PROTO_GET(flags);
1338 enum qat_sym_proto_flag qat_proto_flag =
1339 QAT_CRYPTO_PROTO_FLAG_NONE;
1342 case ICP_QAT_FW_LA_GCM_PROTO:
1343 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1345 case ICP_QAT_FW_LA_CCM_PROTO:
1346 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1350 return qat_proto_flag;
1353 int qat_sym_session_aead_create_cd_cipher(struct qat_sym_session *cdesc,
1354 const uint8_t *cipherkey,
1355 uint32_t cipherkeylen)
1357 struct icp_qat_hw_cipher_algo_blk *cipher;
1358 struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1359 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1360 struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1361 void *ptr = &req_tmpl->cd_ctrl;
1362 struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1363 struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1364 enum icp_qat_hw_cipher_convert key_convert;
1365 enum qat_sym_proto_flag qat_proto_flag =
1366 QAT_CRYPTO_PROTO_FLAG_NONE;
1367 uint32_t total_key_size;
1368 uint16_t cipher_offset, cd_size;
1369 uint32_t wordIndex = 0;
1370 uint32_t *temp_key = NULL;
1372 if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER) {
1373 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1374 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1375 ICP_QAT_FW_SLICE_CIPHER);
1376 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1377 ICP_QAT_FW_SLICE_DRAM_WR);
1378 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1379 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1380 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1381 ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1382 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1383 } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1384 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1385 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1386 ICP_QAT_FW_SLICE_CIPHER);
1387 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1388 ICP_QAT_FW_SLICE_AUTH);
1389 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1390 ICP_QAT_FW_SLICE_AUTH);
1391 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1392 ICP_QAT_FW_SLICE_DRAM_WR);
1393 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1394 } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1395 QAT_LOG(ERR, "Invalid param, must be a cipher command.");
1399 if (cdesc->qat_mode == ICP_QAT_HW_CIPHER_CTR_MODE) {
1401 * CTR Streaming ciphers are a special case. Decrypt = encrypt
1402 * Overriding default values previously set
1404 cdesc->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
1405 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1406 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2
1407 || cdesc->qat_cipher_alg ==
1408 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)
1409 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1410 else if (cdesc->qat_dir == ICP_QAT_HW_CIPHER_ENCRYPT)
1411 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1413 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1415 if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
1416 total_key_size = ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ +
1417 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1418 cipher_cd_ctrl->cipher_state_sz =
1419 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1420 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1422 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1423 total_key_size = ICP_QAT_HW_KASUMI_F8_KEY_SZ;
1424 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_KASUMI_BLK_SZ >> 3;
1425 cipher_cd_ctrl->cipher_padding_sz =
1426 (2 * ICP_QAT_HW_KASUMI_BLK_SZ) >> 3;
1427 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES) {
1428 total_key_size = ICP_QAT_HW_3DES_KEY_SZ;
1429 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_3DES_BLK_SZ >> 3;
1431 qat_get_crypto_proto_flag(header->serv_specif_flags);
1432 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_DES) {
1433 total_key_size = ICP_QAT_HW_DES_KEY_SZ;
1434 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_DES_BLK_SZ >> 3;
1436 qat_get_crypto_proto_flag(header->serv_specif_flags);
1437 } else if (cdesc->qat_cipher_alg ==
1438 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
1439 total_key_size = ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ +
1440 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1441 cipher_cd_ctrl->cipher_state_sz =
1442 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1443 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1444 cdesc->min_qat_dev_gen = QAT_GEN2;
1446 total_key_size = cipherkeylen;
1447 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_AES_BLK_SZ >> 3;
1449 qat_get_crypto_proto_flag(header->serv_specif_flags);
1451 cipher_cd_ctrl->cipher_key_sz = total_key_size >> 3;
1452 cipher_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1453 cipher_cd_ctrl->cipher_cfg_offset = cipher_offset >> 3;
1455 header->service_cmd_id = cdesc->qat_cmd;
1456 qat_sym_session_init_common_hdr(header, qat_proto_flag);
1458 cipher = (struct icp_qat_hw_cipher_algo_blk *)cdesc->cd_cur_ptr;
1459 cipher->cipher_config.val =
1460 ICP_QAT_HW_CIPHER_CONFIG_BUILD(cdesc->qat_mode,
1461 cdesc->qat_cipher_alg, key_convert,
1464 if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1465 temp_key = (uint32_t *)(cdesc->cd_cur_ptr +
1466 sizeof(struct icp_qat_hw_cipher_config)
1468 memcpy(cipher->key, cipherkey, cipherkeylen);
1469 memcpy(temp_key, cipherkey, cipherkeylen);
1471 /* XOR Key with KASUMI F8 key modifier at 4 bytes level */
1472 for (wordIndex = 0; wordIndex < (cipherkeylen >> 2);
1474 temp_key[wordIndex] ^= KASUMI_F8_KEY_MODIFIER_4_BYTES;
1476 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1477 cipherkeylen + cipherkeylen;
1479 memcpy(cipher->key, cipherkey, cipherkeylen);
1480 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1484 if (total_key_size > cipherkeylen) {
1485 uint32_t padding_size = total_key_size-cipherkeylen;
1486 if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1487 && (cipherkeylen == QAT_3DES_KEY_SZ_OPT2)) {
1488 /* K3 not provided so use K1 = K3*/
1489 memcpy(cdesc->cd_cur_ptr, cipherkey, padding_size);
1490 } else if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1491 && (cipherkeylen == QAT_3DES_KEY_SZ_OPT3)) {
1492 /* K2 and K3 not provided so use K1 = K2 = K3*/
1493 memcpy(cdesc->cd_cur_ptr, cipherkey,
1495 memcpy(cdesc->cd_cur_ptr+cipherkeylen,
1496 cipherkey, cipherkeylen);
1498 memset(cdesc->cd_cur_ptr, 0, padding_size);
1500 cdesc->cd_cur_ptr += padding_size;
1502 cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1503 cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1508 int qat_sym_session_aead_create_cd_auth(struct qat_sym_session *cdesc,
1509 const uint8_t *authkey,
1510 uint32_t authkeylen,
1511 uint32_t aad_length,
1512 uint32_t digestsize,
1513 unsigned int operation)
1515 struct icp_qat_hw_auth_setup *hash;
1516 struct icp_qat_hw_cipher_algo_blk *cipherconfig;
1517 struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1518 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1519 struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1520 void *ptr = &req_tmpl->cd_ctrl;
1521 struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1522 struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1523 struct icp_qat_fw_la_auth_req_params *auth_param =
1524 (struct icp_qat_fw_la_auth_req_params *)
1525 ((char *)&req_tmpl->serv_specif_rqpars +
1526 ICP_QAT_FW_HASH_REQUEST_PARAMETERS_OFFSET);
1527 uint16_t state1_size = 0, state2_size = 0;
1528 uint16_t hash_offset, cd_size;
1529 uint32_t *aad_len = NULL;
1530 uint32_t wordIndex = 0;
1532 enum qat_sym_proto_flag qat_proto_flag =
1533 QAT_CRYPTO_PROTO_FLAG_NONE;
1535 if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_AUTH) {
1536 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1537 ICP_QAT_FW_SLICE_AUTH);
1538 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1539 ICP_QAT_FW_SLICE_DRAM_WR);
1540 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1541 } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1542 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1543 ICP_QAT_FW_SLICE_AUTH);
1544 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1545 ICP_QAT_FW_SLICE_CIPHER);
1546 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1547 ICP_QAT_FW_SLICE_CIPHER);
1548 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1549 ICP_QAT_FW_SLICE_DRAM_WR);
1550 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1551 } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1552 QAT_LOG(ERR, "Invalid param, must be a hash command.");
1556 if (operation == RTE_CRYPTO_AUTH_OP_VERIFY) {
1557 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1558 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1559 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1560 ICP_QAT_FW_LA_CMP_AUTH_RES);
1561 cdesc->auth_op = ICP_QAT_HW_AUTH_VERIFY;
1563 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1564 ICP_QAT_FW_LA_RET_AUTH_RES);
1565 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1566 ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1567 cdesc->auth_op = ICP_QAT_HW_AUTH_GENERATE;
1571 * Setup the inner hash config
1573 hash_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1574 hash = (struct icp_qat_hw_auth_setup *)cdesc->cd_cur_ptr;
1575 hash->auth_config.reserved = 0;
1576 hash->auth_config.config =
1577 ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE1,
1578 cdesc->qat_hash_alg, digestsize);
1580 if (cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2
1581 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_KASUMI_F9
1582 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3
1583 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC
1584 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC
1585 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_NULL
1587 hash->auth_counter.counter = 0;
1589 int block_size = qat_hash_get_block_size(cdesc->qat_hash_alg);
1593 hash->auth_counter.counter = rte_bswap32(block_size);
1596 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_auth_setup);
1599 * cd_cur_ptr now points at the state1 information.
1601 switch (cdesc->qat_hash_alg) {
1602 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1603 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA1, authkey,
1604 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1606 QAT_LOG(ERR, "(SHA)precompute failed");
1609 state2_size = RTE_ALIGN_CEIL(ICP_QAT_HW_SHA1_STATE2_SZ, 8);
1611 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1612 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA224, authkey,
1613 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1615 QAT_LOG(ERR, "(SHA)precompute failed");
1618 state2_size = ICP_QAT_HW_SHA224_STATE2_SZ;
1620 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1621 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA256, authkey,
1622 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1624 QAT_LOG(ERR, "(SHA)precompute failed");
1627 state2_size = ICP_QAT_HW_SHA256_STATE2_SZ;
1629 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1630 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA384, authkey,
1631 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1633 QAT_LOG(ERR, "(SHA)precompute failed");
1636 state2_size = ICP_QAT_HW_SHA384_STATE2_SZ;
1638 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1639 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA512, authkey,
1640 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1642 QAT_LOG(ERR, "(SHA)precompute failed");
1645 state2_size = ICP_QAT_HW_SHA512_STATE2_SZ;
1647 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1648 state1_size = ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1650 if (cdesc->aes_cmac)
1651 memset(cdesc->cd_cur_ptr, 0, state1_size);
1652 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC,
1653 authkey, authkeylen, cdesc->cd_cur_ptr + state1_size,
1654 &state2_size, cdesc->aes_cmac)) {
1655 cdesc->aes_cmac ? QAT_LOG(ERR,
1656 "(CMAC)precompute failed")
1658 "(XCBC)precompute failed");
1662 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1663 case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
1664 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1665 state1_size = ICP_QAT_HW_GALOIS_128_STATE1_SZ;
1666 if (qat_sym_do_precomputes(cdesc->qat_hash_alg, authkey,
1667 authkeylen, cdesc->cd_cur_ptr + state1_size,
1668 &state2_size, cdesc->aes_cmac)) {
1669 QAT_LOG(ERR, "(GCM)precompute failed");
1673 * Write (the length of AAD) into bytes 16-19 of state2
1674 * in big-endian format. This field is 8 bytes
1676 auth_param->u2.aad_sz =
1677 RTE_ALIGN_CEIL(aad_length, 16);
1678 auth_param->hash_state_sz = (auth_param->u2.aad_sz) >> 3;
1680 aad_len = (uint32_t *)(cdesc->cd_cur_ptr +
1681 ICP_QAT_HW_GALOIS_128_STATE1_SZ +
1682 ICP_QAT_HW_GALOIS_H_SZ);
1683 *aad_len = rte_bswap32(aad_length);
1684 cdesc->aad_len = aad_length;
1686 case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
1687 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1688 state1_size = qat_hash_get_state1_size(
1689 ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2);
1690 state2_size = ICP_QAT_HW_SNOW_3G_UIA2_STATE2_SZ;
1691 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1693 cipherconfig = (struct icp_qat_hw_cipher_algo_blk *)
1694 (cdesc->cd_cur_ptr + state1_size + state2_size);
1695 cipherconfig->cipher_config.val =
1696 ICP_QAT_HW_CIPHER_CONFIG_BUILD(ICP_QAT_HW_CIPHER_ECB_MODE,
1697 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2,
1698 ICP_QAT_HW_CIPHER_KEY_CONVERT,
1699 ICP_QAT_HW_CIPHER_ENCRYPT);
1700 memcpy(cipherconfig->key, authkey, authkeylen);
1701 memset(cipherconfig->key + authkeylen,
1702 0, ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ);
1703 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1704 authkeylen + ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1705 auth_param->hash_state_sz = ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1707 case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
1708 hash->auth_config.config =
1709 ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE0,
1710 cdesc->qat_hash_alg, digestsize);
1711 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1712 state1_size = qat_hash_get_state1_size(
1713 ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3);
1714 state2_size = ICP_QAT_HW_ZUC_3G_EIA3_STATE2_SZ;
1715 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size
1716 + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ);
1718 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1719 cdesc->cd_cur_ptr += state1_size + state2_size
1720 + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1721 auth_param->hash_state_sz = ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1722 cdesc->min_qat_dev_gen = QAT_GEN2;
1725 case ICP_QAT_HW_AUTH_ALGO_MD5:
1726 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_MD5, authkey,
1727 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1729 QAT_LOG(ERR, "(MD5)precompute failed");
1732 state2_size = ICP_QAT_HW_MD5_STATE2_SZ;
1734 case ICP_QAT_HW_AUTH_ALGO_NULL:
1735 state1_size = qat_hash_get_state1_size(
1736 ICP_QAT_HW_AUTH_ALGO_NULL);
1737 state2_size = ICP_QAT_HW_NULL_STATE2_SZ;
1739 case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
1740 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1741 state1_size = qat_hash_get_state1_size(
1742 ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC);
1743 state2_size = ICP_QAT_HW_AES_CBC_MAC_KEY_SZ +
1744 ICP_QAT_HW_AES_CCM_CBC_E_CTR0_SZ;
1746 if (aad_length > 0) {
1747 aad_length += ICP_QAT_HW_CCM_AAD_B0_LEN +
1748 ICP_QAT_HW_CCM_AAD_LEN_INFO;
1749 auth_param->u2.aad_sz =
1750 RTE_ALIGN_CEIL(aad_length,
1751 ICP_QAT_HW_CCM_AAD_ALIGNMENT);
1753 auth_param->u2.aad_sz = ICP_QAT_HW_CCM_AAD_B0_LEN;
1755 cdesc->aad_len = aad_length;
1756 hash->auth_counter.counter = 0;
1758 hash_cd_ctrl->outer_prefix_sz = digestsize;
1759 auth_param->hash_state_sz = digestsize;
1761 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1763 case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
1764 state1_size = qat_hash_get_state1_size(
1765 ICP_QAT_HW_AUTH_ALGO_KASUMI_F9);
1766 state2_size = ICP_QAT_HW_KASUMI_F9_STATE2_SZ;
1767 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1768 pTempKey = (uint32_t *)(cdesc->cd_cur_ptr + state1_size
1771 * The Inner Hash Initial State2 block must contain IK
1772 * (Initialisation Key), followed by IK XOR-ed with KM
1773 * (Key Modifier): IK||(IK^KM).
1775 /* write the auth key */
1776 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1777 /* initialise temp key with auth key */
1778 memcpy(pTempKey, authkey, authkeylen);
1779 /* XOR Key with KASUMI F9 key modifier at 4 bytes level */
1780 for (wordIndex = 0; wordIndex < (authkeylen >> 2); wordIndex++)
1781 pTempKey[wordIndex] ^= KASUMI_F9_KEY_MODIFIER_4_BYTES;
1784 QAT_LOG(ERR, "Invalid HASH alg %u", cdesc->qat_hash_alg);
1788 /* Request template setup */
1789 qat_sym_session_init_common_hdr(header, qat_proto_flag);
1790 header->service_cmd_id = cdesc->qat_cmd;
1792 /* Auth CD config setup */
1793 hash_cd_ctrl->hash_cfg_offset = hash_offset >> 3;
1794 hash_cd_ctrl->hash_flags = ICP_QAT_FW_AUTH_HDR_FLAG_NO_NESTED;
1795 hash_cd_ctrl->inner_res_sz = digestsize;
1796 hash_cd_ctrl->final_sz = digestsize;
1797 hash_cd_ctrl->inner_state1_sz = state1_size;
1798 auth_param->auth_res_sz = digestsize;
1800 hash_cd_ctrl->inner_state2_sz = state2_size;
1801 hash_cd_ctrl->inner_state2_offset = hash_cd_ctrl->hash_cfg_offset +
1802 ((sizeof(struct icp_qat_hw_auth_setup) +
1803 RTE_ALIGN_CEIL(hash_cd_ctrl->inner_state1_sz, 8))
1806 cdesc->cd_cur_ptr += state1_size + state2_size;
1807 cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1809 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1810 cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1815 int qat_sym_validate_aes_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1818 case ICP_QAT_HW_AES_128_KEY_SZ:
1819 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1821 case ICP_QAT_HW_AES_192_KEY_SZ:
1822 *alg = ICP_QAT_HW_CIPHER_ALGO_AES192;
1824 case ICP_QAT_HW_AES_256_KEY_SZ:
1825 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
1833 int qat_sym_validate_aes_docsisbpi_key(int key_len,
1834 enum icp_qat_hw_cipher_algo *alg)
1837 case ICP_QAT_HW_AES_128_KEY_SZ:
1838 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1846 int qat_sym_validate_snow3g_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1849 case ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ:
1850 *alg = ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2;
1858 int qat_sym_validate_kasumi_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1861 case ICP_QAT_HW_KASUMI_KEY_SZ:
1862 *alg = ICP_QAT_HW_CIPHER_ALGO_KASUMI;
1870 int qat_sym_validate_des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1873 case ICP_QAT_HW_DES_KEY_SZ:
1874 *alg = ICP_QAT_HW_CIPHER_ALGO_DES;
1882 int qat_sym_validate_3des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1885 case QAT_3DES_KEY_SZ_OPT1:
1886 case QAT_3DES_KEY_SZ_OPT2:
1887 case QAT_3DES_KEY_SZ_OPT3:
1888 *alg = ICP_QAT_HW_CIPHER_ALGO_3DES;
1896 int qat_sym_validate_zuc_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1899 case ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ:
1900 *alg = ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3;