1 /* SPDX-License-Identifier: (BSD-3-Clause OR GPL-2.0)
2 * Copyright(c) 2015-2019 Intel Corporation
5 #include <openssl/sha.h> /* Needed to calculate pre-compute values */
6 #include <openssl/aes.h> /* Needed to calculate pre-compute values */
7 #include <openssl/md5.h> /* Needed to calculate pre-compute values */
8 #include <openssl/evp.h> /* Needed for bpi runt block processing */
10 #include <rte_memcpy.h>
11 #include <rte_common.h>
12 #include <rte_spinlock.h>
13 #include <rte_byteorder.h>
15 #include <rte_malloc.h>
16 #include <rte_crypto_sym.h>
19 #include "qat_sym_session.h"
20 #include "qat_sym_pmd.h"
22 /* SHA1 - 20 bytes - Initialiser state can be found in FIPS stds 180-2 */
23 static const uint8_t sha1InitialState[] = {
24 0x67, 0x45, 0x23, 0x01, 0xef, 0xcd, 0xab, 0x89, 0x98, 0xba,
25 0xdc, 0xfe, 0x10, 0x32, 0x54, 0x76, 0xc3, 0xd2, 0xe1, 0xf0};
27 /* SHA 224 - 32 bytes - Initialiser state can be found in FIPS stds 180-2 */
28 static const uint8_t sha224InitialState[] = {
29 0xc1, 0x05, 0x9e, 0xd8, 0x36, 0x7c, 0xd5, 0x07, 0x30, 0x70, 0xdd,
30 0x17, 0xf7, 0x0e, 0x59, 0x39, 0xff, 0xc0, 0x0b, 0x31, 0x68, 0x58,
31 0x15, 0x11, 0x64, 0xf9, 0x8f, 0xa7, 0xbe, 0xfa, 0x4f, 0xa4};
33 /* SHA 256 - 32 bytes - Initialiser state can be found in FIPS stds 180-2 */
34 static const uint8_t sha256InitialState[] = {
35 0x6a, 0x09, 0xe6, 0x67, 0xbb, 0x67, 0xae, 0x85, 0x3c, 0x6e, 0xf3,
36 0x72, 0xa5, 0x4f, 0xf5, 0x3a, 0x51, 0x0e, 0x52, 0x7f, 0x9b, 0x05,
37 0x68, 0x8c, 0x1f, 0x83, 0xd9, 0xab, 0x5b, 0xe0, 0xcd, 0x19};
39 /* SHA 384 - 64 bytes - Initialiser state can be found in FIPS stds 180-2 */
40 static const uint8_t sha384InitialState[] = {
41 0xcb, 0xbb, 0x9d, 0x5d, 0xc1, 0x05, 0x9e, 0xd8, 0x62, 0x9a, 0x29,
42 0x2a, 0x36, 0x7c, 0xd5, 0x07, 0x91, 0x59, 0x01, 0x5a, 0x30, 0x70,
43 0xdd, 0x17, 0x15, 0x2f, 0xec, 0xd8, 0xf7, 0x0e, 0x59, 0x39, 0x67,
44 0x33, 0x26, 0x67, 0xff, 0xc0, 0x0b, 0x31, 0x8e, 0xb4, 0x4a, 0x87,
45 0x68, 0x58, 0x15, 0x11, 0xdb, 0x0c, 0x2e, 0x0d, 0x64, 0xf9, 0x8f,
46 0xa7, 0x47, 0xb5, 0x48, 0x1d, 0xbe, 0xfa, 0x4f, 0xa4};
48 /* SHA 512 - 64 bytes - Initialiser state can be found in FIPS stds 180-2 */
49 static const uint8_t sha512InitialState[] = {
50 0x6a, 0x09, 0xe6, 0x67, 0xf3, 0xbc, 0xc9, 0x08, 0xbb, 0x67, 0xae,
51 0x85, 0x84, 0xca, 0xa7, 0x3b, 0x3c, 0x6e, 0xf3, 0x72, 0xfe, 0x94,
52 0xf8, 0x2b, 0xa5, 0x4f, 0xf5, 0x3a, 0x5f, 0x1d, 0x36, 0xf1, 0x51,
53 0x0e, 0x52, 0x7f, 0xad, 0xe6, 0x82, 0xd1, 0x9b, 0x05, 0x68, 0x8c,
54 0x2b, 0x3e, 0x6c, 0x1f, 0x1f, 0x83, 0xd9, 0xab, 0xfb, 0x41, 0xbd,
55 0x6b, 0x5b, 0xe0, 0xcd, 0x19, 0x13, 0x7e, 0x21, 0x79};
57 /** Frees a context previously created
58 * Depends on openssl libcrypto
61 bpi_cipher_ctx_free(void *bpi_ctx)
64 EVP_CIPHER_CTX_free((EVP_CIPHER_CTX *)bpi_ctx);
67 /** Creates a context in either AES or DES in ECB mode
68 * Depends on openssl libcrypto
71 bpi_cipher_ctx_init(enum rte_crypto_cipher_algorithm cryptodev_algo,
72 enum rte_crypto_cipher_operation direction __rte_unused,
73 const uint8_t *key, uint16_t key_length, void **ctx)
75 const EVP_CIPHER *algo = NULL;
77 *ctx = EVP_CIPHER_CTX_new();
84 if (cryptodev_algo == RTE_CRYPTO_CIPHER_DES_DOCSISBPI)
87 if (key_length == ICP_QAT_HW_AES_128_KEY_SZ)
88 algo = EVP_aes_128_ecb();
90 algo = EVP_aes_256_ecb();
92 /* IV will be ECB encrypted whether direction is encrypt or decrypt*/
93 if (EVP_EncryptInit_ex(*ctx, algo, NULL, key, 0) != 1) {
102 EVP_CIPHER_CTX_free(*ctx);
107 qat_is_cipher_alg_supported(enum rte_crypto_cipher_algorithm algo,
108 struct qat_sym_dev_private *internals)
111 const struct rte_cryptodev_capabilities *capability;
113 while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
114 RTE_CRYPTO_OP_TYPE_UNDEFINED) {
115 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
118 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_CIPHER)
121 if (capability->sym.cipher.algo == algo)
128 qat_is_auth_alg_supported(enum rte_crypto_auth_algorithm algo,
129 struct qat_sym_dev_private *internals)
132 const struct rte_cryptodev_capabilities *capability;
134 while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
135 RTE_CRYPTO_OP_TYPE_UNDEFINED) {
136 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
139 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_AUTH)
142 if (capability->sym.auth.algo == algo)
149 qat_sym_session_clear(struct rte_cryptodev *dev,
150 struct rte_cryptodev_sym_session *sess)
152 uint8_t index = dev->driver_id;
153 void *sess_priv = get_sym_session_private_data(sess, index);
154 struct qat_sym_session *s = (struct qat_sym_session *)sess_priv;
158 bpi_cipher_ctx_free(s->bpi_ctx);
159 memset(s, 0, qat_sym_session_get_private_size(dev));
160 struct rte_mempool *sess_mp = rte_mempool_from_obj(sess_priv);
162 set_sym_session_private_data(sess, index, NULL);
163 rte_mempool_put(sess_mp, sess_priv);
168 qat_get_cmd_id(const struct rte_crypto_sym_xform *xform)
171 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER && xform->next == NULL)
172 return ICP_QAT_FW_LA_CMD_CIPHER;
174 /* Authentication Only */
175 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH && xform->next == NULL)
176 return ICP_QAT_FW_LA_CMD_AUTH;
179 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
180 /* AES-GCM and AES-CCM works with different direction
181 * GCM first encrypts and generate hash where AES-CCM
182 * first generate hash and encrypts. Similar relation
183 * applies to decryption.
185 if (xform->aead.op == RTE_CRYPTO_AEAD_OP_ENCRYPT)
186 if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
187 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
189 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
191 if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
192 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
194 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
197 if (xform->next == NULL)
200 /* Cipher then Authenticate */
201 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
202 xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
203 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
205 /* Authenticate then Cipher */
206 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH &&
207 xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
208 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
213 static struct rte_crypto_auth_xform *
214 qat_get_auth_xform(struct rte_crypto_sym_xform *xform)
217 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH)
226 static struct rte_crypto_cipher_xform *
227 qat_get_cipher_xform(struct rte_crypto_sym_xform *xform)
230 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
231 return &xform->cipher;
240 qat_sym_session_configure_cipher(struct rte_cryptodev *dev,
241 struct rte_crypto_sym_xform *xform,
242 struct qat_sym_session *session)
244 struct qat_sym_dev_private *internals = dev->data->dev_private;
245 struct rte_crypto_cipher_xform *cipher_xform = NULL;
248 /* Get cipher xform from crypto xform chain */
249 cipher_xform = qat_get_cipher_xform(xform);
251 session->cipher_iv.offset = cipher_xform->iv.offset;
252 session->cipher_iv.length = cipher_xform->iv.length;
254 switch (cipher_xform->algo) {
255 case RTE_CRYPTO_CIPHER_AES_CBC:
256 if (qat_sym_validate_aes_key(cipher_xform->key.length,
257 &session->qat_cipher_alg) != 0) {
258 QAT_LOG(ERR, "Invalid AES cipher key size");
262 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
264 case RTE_CRYPTO_CIPHER_AES_CTR:
265 if (qat_sym_validate_aes_key(cipher_xform->key.length,
266 &session->qat_cipher_alg) != 0) {
267 QAT_LOG(ERR, "Invalid AES cipher key size");
271 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
273 case RTE_CRYPTO_CIPHER_SNOW3G_UEA2:
274 if (qat_sym_validate_snow3g_key(cipher_xform->key.length,
275 &session->qat_cipher_alg) != 0) {
276 QAT_LOG(ERR, "Invalid SNOW 3G cipher key size");
280 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
282 case RTE_CRYPTO_CIPHER_NULL:
283 session->qat_cipher_alg = ICP_QAT_HW_CIPHER_ALGO_NULL;
284 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
286 case RTE_CRYPTO_CIPHER_KASUMI_F8:
287 if (qat_sym_validate_kasumi_key(cipher_xform->key.length,
288 &session->qat_cipher_alg) != 0) {
289 QAT_LOG(ERR, "Invalid KASUMI cipher key size");
293 session->qat_mode = ICP_QAT_HW_CIPHER_F8_MODE;
295 case RTE_CRYPTO_CIPHER_3DES_CBC:
296 if (qat_sym_validate_3des_key(cipher_xform->key.length,
297 &session->qat_cipher_alg) != 0) {
298 QAT_LOG(ERR, "Invalid 3DES cipher key size");
302 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
304 case RTE_CRYPTO_CIPHER_DES_CBC:
305 if (qat_sym_validate_des_key(cipher_xform->key.length,
306 &session->qat_cipher_alg) != 0) {
307 QAT_LOG(ERR, "Invalid DES cipher key size");
311 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
313 case RTE_CRYPTO_CIPHER_3DES_CTR:
314 if (qat_sym_validate_3des_key(cipher_xform->key.length,
315 &session->qat_cipher_alg) != 0) {
316 QAT_LOG(ERR, "Invalid 3DES cipher key size");
320 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
322 case RTE_CRYPTO_CIPHER_DES_DOCSISBPI:
323 ret = bpi_cipher_ctx_init(
326 cipher_xform->key.data,
327 cipher_xform->key.length,
330 QAT_LOG(ERR, "failed to create DES BPI ctx");
333 if (qat_sym_validate_des_key(cipher_xform->key.length,
334 &session->qat_cipher_alg) != 0) {
335 QAT_LOG(ERR, "Invalid DES cipher key size");
339 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
341 case RTE_CRYPTO_CIPHER_AES_DOCSISBPI:
342 ret = bpi_cipher_ctx_init(
345 cipher_xform->key.data,
346 cipher_xform->key.length,
349 QAT_LOG(ERR, "failed to create AES BPI ctx");
352 if (qat_sym_validate_aes_docsisbpi_key(cipher_xform->key.length,
353 &session->qat_cipher_alg) != 0) {
354 QAT_LOG(ERR, "Invalid AES DOCSISBPI key size");
358 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
360 case RTE_CRYPTO_CIPHER_ZUC_EEA3:
361 if (!qat_is_cipher_alg_supported(
362 cipher_xform->algo, internals)) {
363 QAT_LOG(ERR, "%s not supported on this device",
364 rte_crypto_cipher_algorithm_strings
365 [cipher_xform->algo]);
369 if (qat_sym_validate_zuc_key(cipher_xform->key.length,
370 &session->qat_cipher_alg) != 0) {
371 QAT_LOG(ERR, "Invalid ZUC cipher key size");
375 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
377 case RTE_CRYPTO_CIPHER_AES_XTS:
378 if ((cipher_xform->key.length/2) == ICP_QAT_HW_AES_192_KEY_SZ) {
379 QAT_LOG(ERR, "AES-XTS-192 not supported");
383 if (qat_sym_validate_aes_key((cipher_xform->key.length/2),
384 &session->qat_cipher_alg) != 0) {
385 QAT_LOG(ERR, "Invalid AES-XTS cipher key size");
389 session->qat_mode = ICP_QAT_HW_CIPHER_XTS_MODE;
391 case RTE_CRYPTO_CIPHER_3DES_ECB:
392 case RTE_CRYPTO_CIPHER_AES_ECB:
393 case RTE_CRYPTO_CIPHER_AES_F8:
394 case RTE_CRYPTO_CIPHER_ARC4:
395 QAT_LOG(ERR, "Crypto QAT PMD: Unsupported Cipher alg %u",
400 QAT_LOG(ERR, "Crypto: Undefined Cipher specified %u\n",
406 if (cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
407 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
409 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
411 if (qat_sym_session_aead_create_cd_cipher(session,
412 cipher_xform->key.data,
413 cipher_xform->key.length)) {
421 if (session->bpi_ctx) {
422 bpi_cipher_ctx_free(session->bpi_ctx);
423 session->bpi_ctx = NULL;
429 qat_sym_session_configure(struct rte_cryptodev *dev,
430 struct rte_crypto_sym_xform *xform,
431 struct rte_cryptodev_sym_session *sess,
432 struct rte_mempool *mempool)
434 void *sess_private_data;
437 if (rte_mempool_get(mempool, &sess_private_data)) {
439 "Couldn't get object from session mempool");
443 ret = qat_sym_session_set_parameters(dev, xform, sess_private_data);
446 "Crypto QAT PMD: failed to configure session parameters");
448 /* Return session to mempool */
449 rte_mempool_put(mempool, sess_private_data);
453 set_sym_session_private_data(sess, dev->driver_id,
460 qat_sym_session_set_ext_hash_flags(struct qat_sym_session *session,
463 struct icp_qat_fw_comn_req_hdr *header = &session->fw_req.comn_hdr;
464 struct icp_qat_fw_cipher_auth_cd_ctrl_hdr *cd_ctrl =
465 (struct icp_qat_fw_cipher_auth_cd_ctrl_hdr *)
466 session->fw_req.cd_ctrl.content_desc_ctrl_lw;
468 /* Set the Use Extended Protocol Flags bit in LW 1 */
469 QAT_FIELD_SET(header->comn_req_flags,
470 QAT_COMN_EXT_FLAGS_USED,
471 QAT_COMN_EXT_FLAGS_BITPOS,
472 QAT_COMN_EXT_FLAGS_MASK);
474 /* Set Hash Flags in LW 28 */
475 cd_ctrl->hash_flags |= hash_flag;
477 /* Set proto flags in LW 1 */
478 switch (session->qat_cipher_alg) {
479 case ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2:
480 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
481 ICP_QAT_FW_LA_SNOW_3G_PROTO);
482 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
483 header->serv_specif_flags, 0);
485 case ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3:
486 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
487 ICP_QAT_FW_LA_NO_PROTO);
488 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
489 header->serv_specif_flags,
490 ICP_QAT_FW_LA_ZUC_3G_PROTO);
493 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
494 ICP_QAT_FW_LA_NO_PROTO);
495 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
496 header->serv_specif_flags, 0);
502 qat_sym_session_handle_mixed(const struct rte_cryptodev *dev,
503 struct qat_sym_session *session)
505 const struct qat_sym_dev_private *qat_private = dev->data->dev_private;
506 enum qat_device_gen min_dev_gen = (qat_private->internal_capabilities &
507 QAT_SYM_CAP_MIXED_CRYPTO) ? QAT_GEN2 : QAT_GEN3;
509 if (session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3 &&
510 session->qat_cipher_alg !=
511 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
512 session->min_qat_dev_gen = min_dev_gen;
513 qat_sym_session_set_ext_hash_flags(session,
514 1 << ICP_QAT_FW_AUTH_HDR_FLAG_ZUC_EIA3_BITPOS);
515 } else if (session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2 &&
516 session->qat_cipher_alg !=
517 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
518 session->min_qat_dev_gen = min_dev_gen;
519 qat_sym_session_set_ext_hash_flags(session,
520 1 << ICP_QAT_FW_AUTH_HDR_FLAG_SNOW3G_UIA2_BITPOS);
521 } else if ((session->aes_cmac ||
522 session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_NULL) &&
523 (session->qat_cipher_alg ==
524 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2 ||
525 session->qat_cipher_alg ==
526 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)) {
527 session->min_qat_dev_gen = min_dev_gen;
528 qat_sym_session_set_ext_hash_flags(session, 0);
533 qat_sym_session_set_parameters(struct rte_cryptodev *dev,
534 struct rte_crypto_sym_xform *xform, void *session_private)
536 struct qat_sym_session *session = session_private;
540 /* Verify the session physical address is known */
541 rte_iova_t session_paddr = rte_mempool_virt2iova(session);
542 if (session_paddr == 0 || session_paddr == RTE_BAD_IOVA) {
544 "Session physical address unknown. Bad memory pool.");
548 /* Set context descriptor physical address */
549 session->cd_paddr = session_paddr +
550 offsetof(struct qat_sym_session, cd);
552 session->min_qat_dev_gen = QAT_GEN1;
554 /* Get requested QAT command id */
555 qat_cmd_id = qat_get_cmd_id(xform);
556 if (qat_cmd_id < 0 || qat_cmd_id >= ICP_QAT_FW_LA_CMD_DELIMITER) {
557 QAT_LOG(ERR, "Unsupported xform chain requested");
560 session->qat_cmd = (enum icp_qat_fw_la_cmd_id)qat_cmd_id;
561 switch (session->qat_cmd) {
562 case ICP_QAT_FW_LA_CMD_CIPHER:
563 ret = qat_sym_session_configure_cipher(dev, xform, session);
567 case ICP_QAT_FW_LA_CMD_AUTH:
568 ret = qat_sym_session_configure_auth(dev, xform, session);
572 case ICP_QAT_FW_LA_CMD_CIPHER_HASH:
573 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
574 ret = qat_sym_session_configure_aead(dev, xform,
579 ret = qat_sym_session_configure_cipher(dev,
583 ret = qat_sym_session_configure_auth(dev,
587 /* Special handling of mixed hash+cipher algorithms */
588 qat_sym_session_handle_mixed(dev, session);
591 case ICP_QAT_FW_LA_CMD_HASH_CIPHER:
592 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
593 ret = qat_sym_session_configure_aead(dev, xform,
598 ret = qat_sym_session_configure_auth(dev,
602 ret = qat_sym_session_configure_cipher(dev,
606 /* Special handling of mixed hash+cipher algorithms */
607 qat_sym_session_handle_mixed(dev, session);
610 case ICP_QAT_FW_LA_CMD_TRNG_GET_RANDOM:
611 case ICP_QAT_FW_LA_CMD_TRNG_TEST:
612 case ICP_QAT_FW_LA_CMD_SSL3_KEY_DERIVE:
613 case ICP_QAT_FW_LA_CMD_TLS_V1_1_KEY_DERIVE:
614 case ICP_QAT_FW_LA_CMD_TLS_V1_2_KEY_DERIVE:
615 case ICP_QAT_FW_LA_CMD_MGF1:
616 case ICP_QAT_FW_LA_CMD_AUTH_PRE_COMP:
617 case ICP_QAT_FW_LA_CMD_CIPHER_PRE_COMP:
618 case ICP_QAT_FW_LA_CMD_DELIMITER:
619 QAT_LOG(ERR, "Unsupported Service %u",
623 QAT_LOG(ERR, "Unsupported Service %u",
632 qat_sym_session_handle_single_pass(struct qat_sym_dev_private *internals,
633 struct qat_sym_session *session,
634 struct rte_crypto_aead_xform *aead_xform)
636 enum qat_device_gen qat_dev_gen = internals->qat_dev->qat_dev_gen;
638 if (qat_dev_gen == QAT_GEN3 &&
639 aead_xform->iv.length == QAT_AES_GCM_SPC_IV_SIZE) {
640 /* Use faster Single-Pass GCM */
641 struct icp_qat_fw_la_cipher_req_params *cipher_param =
642 (void *) &session->fw_req.serv_specif_rqpars;
644 session->is_single_pass = 1;
645 session->min_qat_dev_gen = QAT_GEN3;
646 session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER;
647 session->qat_mode = ICP_QAT_HW_CIPHER_AEAD_MODE;
648 session->cipher_iv.offset = aead_xform->iv.offset;
649 session->cipher_iv.length = aead_xform->iv.length;
650 if (qat_sym_session_aead_create_cd_cipher(session,
651 aead_xform->key.data, aead_xform->key.length))
653 session->aad_len = aead_xform->aad_length;
654 session->digest_length = aead_xform->digest_length;
655 if (aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) {
656 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
657 session->auth_op = ICP_QAT_HW_AUTH_GENERATE;
658 ICP_QAT_FW_LA_RET_AUTH_SET(
659 session->fw_req.comn_hdr.serv_specif_flags,
660 ICP_QAT_FW_LA_RET_AUTH_RES);
662 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
663 session->auth_op = ICP_QAT_HW_AUTH_VERIFY;
664 ICP_QAT_FW_LA_CMP_AUTH_SET(
665 session->fw_req.comn_hdr.serv_specif_flags,
666 ICP_QAT_FW_LA_CMP_AUTH_RES);
668 ICP_QAT_FW_LA_SINGLE_PASS_PROTO_FLAG_SET(
669 session->fw_req.comn_hdr.serv_specif_flags,
670 ICP_QAT_FW_LA_SINGLE_PASS_PROTO);
671 ICP_QAT_FW_LA_PROTO_SET(
672 session->fw_req.comn_hdr.serv_specif_flags,
673 ICP_QAT_FW_LA_NO_PROTO);
674 ICP_QAT_FW_LA_GCM_IV_LEN_FLAG_SET(
675 session->fw_req.comn_hdr.serv_specif_flags,
676 ICP_QAT_FW_LA_GCM_IV_LEN_12_OCTETS);
677 session->fw_req.comn_hdr.service_cmd_id =
678 ICP_QAT_FW_LA_CMD_CIPHER;
679 session->cd.cipher.cipher_config.val =
680 ICP_QAT_HW_CIPHER_CONFIG_BUILD(
681 ICP_QAT_HW_CIPHER_AEAD_MODE,
682 session->qat_cipher_alg,
683 ICP_QAT_HW_CIPHER_NO_CONVERT,
685 QAT_FIELD_SET(session->cd.cipher.cipher_config.val,
686 aead_xform->digest_length,
687 QAT_CIPHER_AEAD_HASH_CMP_LEN_BITPOS,
688 QAT_CIPHER_AEAD_HASH_CMP_LEN_MASK);
689 session->cd.cipher.cipher_config.reserved =
690 ICP_QAT_HW_CIPHER_CONFIG_BUILD_UPPER(
691 aead_xform->aad_length);
692 cipher_param->spc_aad_sz = aead_xform->aad_length;
693 cipher_param->spc_auth_res_sz = aead_xform->digest_length;
699 qat_sym_session_configure_auth(struct rte_cryptodev *dev,
700 struct rte_crypto_sym_xform *xform,
701 struct qat_sym_session *session)
703 struct rte_crypto_auth_xform *auth_xform = qat_get_auth_xform(xform);
704 struct qat_sym_dev_private *internals = dev->data->dev_private;
705 const uint8_t *key_data = auth_xform->key.data;
706 uint8_t key_length = auth_xform->key.length;
707 session->aes_cmac = 0;
709 session->auth_iv.offset = auth_xform->iv.offset;
710 session->auth_iv.length = auth_xform->iv.length;
711 session->auth_mode = ICP_QAT_HW_AUTH_MODE1;
713 switch (auth_xform->algo) {
714 case RTE_CRYPTO_AUTH_SHA1:
715 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA1;
716 session->auth_mode = ICP_QAT_HW_AUTH_MODE0;
718 case RTE_CRYPTO_AUTH_SHA224:
719 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA224;
720 session->auth_mode = ICP_QAT_HW_AUTH_MODE0;
722 case RTE_CRYPTO_AUTH_SHA256:
723 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA256;
724 session->auth_mode = ICP_QAT_HW_AUTH_MODE0;
726 case RTE_CRYPTO_AUTH_SHA384:
727 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA384;
728 session->auth_mode = ICP_QAT_HW_AUTH_MODE0;
730 case RTE_CRYPTO_AUTH_SHA512:
731 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA512;
732 session->auth_mode = ICP_QAT_HW_AUTH_MODE0;
734 case RTE_CRYPTO_AUTH_SHA1_HMAC:
735 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA1;
737 case RTE_CRYPTO_AUTH_SHA224_HMAC:
738 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA224;
740 case RTE_CRYPTO_AUTH_SHA256_HMAC:
741 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA256;
743 case RTE_CRYPTO_AUTH_SHA384_HMAC:
744 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA384;
746 case RTE_CRYPTO_AUTH_SHA512_HMAC:
747 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA512;
749 case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
750 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
752 case RTE_CRYPTO_AUTH_AES_CMAC:
753 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
754 session->aes_cmac = 1;
756 case RTE_CRYPTO_AUTH_AES_GMAC:
757 if (qat_sym_validate_aes_key(auth_xform->key.length,
758 &session->qat_cipher_alg) != 0) {
759 QAT_LOG(ERR, "Invalid AES key size");
762 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
763 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
764 if (session->auth_iv.length == 0)
765 session->auth_iv.length = AES_GCM_J0_LEN;
768 case RTE_CRYPTO_AUTH_SNOW3G_UIA2:
769 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2;
771 case RTE_CRYPTO_AUTH_MD5_HMAC:
772 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_MD5;
774 case RTE_CRYPTO_AUTH_NULL:
775 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_NULL;
777 case RTE_CRYPTO_AUTH_KASUMI_F9:
778 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_KASUMI_F9;
780 case RTE_CRYPTO_AUTH_ZUC_EIA3:
781 if (!qat_is_auth_alg_supported(auth_xform->algo, internals)) {
782 QAT_LOG(ERR, "%s not supported on this device",
783 rte_crypto_auth_algorithm_strings
787 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3;
789 case RTE_CRYPTO_AUTH_MD5:
790 case RTE_CRYPTO_AUTH_AES_CBC_MAC:
791 QAT_LOG(ERR, "Crypto: Unsupported hash alg %u",
795 QAT_LOG(ERR, "Crypto: Undefined Hash algo %u specified",
800 if (auth_xform->algo == RTE_CRYPTO_AUTH_AES_GMAC) {
801 if (auth_xform->op == RTE_CRYPTO_AUTH_OP_GENERATE) {
802 session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER_HASH;
803 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
805 * It needs to create cipher desc content first,
806 * then authentication
809 if (qat_sym_session_aead_create_cd_cipher(session,
810 auth_xform->key.data,
811 auth_xform->key.length))
814 if (qat_sym_session_aead_create_cd_auth(session,
818 auth_xform->digest_length,
822 session->qat_cmd = ICP_QAT_FW_LA_CMD_HASH_CIPHER;
823 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
825 * It needs to create authentication desc content first,
829 if (qat_sym_session_aead_create_cd_auth(session,
833 auth_xform->digest_length,
837 if (qat_sym_session_aead_create_cd_cipher(session,
838 auth_xform->key.data,
839 auth_xform->key.length))
842 /* Restore to authentication only only */
843 session->qat_cmd = ICP_QAT_FW_LA_CMD_AUTH;
845 if (qat_sym_session_aead_create_cd_auth(session,
849 auth_xform->digest_length,
854 session->digest_length = auth_xform->digest_length;
859 qat_sym_session_configure_aead(struct rte_cryptodev *dev,
860 struct rte_crypto_sym_xform *xform,
861 struct qat_sym_session *session)
863 struct rte_crypto_aead_xform *aead_xform = &xform->aead;
864 enum rte_crypto_auth_operation crypto_operation;
867 * Store AEAD IV parameters as cipher IV,
868 * to avoid unnecessary memory usage
870 session->cipher_iv.offset = xform->aead.iv.offset;
871 session->cipher_iv.length = xform->aead.iv.length;
873 session->auth_mode = ICP_QAT_HW_AUTH_MODE1;
875 switch (aead_xform->algo) {
876 case RTE_CRYPTO_AEAD_AES_GCM:
877 if (qat_sym_validate_aes_key(aead_xform->key.length,
878 &session->qat_cipher_alg) != 0) {
879 QAT_LOG(ERR, "Invalid AES key size");
882 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
883 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
884 if (session->cipher_iv.length == 0)
885 session->cipher_iv.length = AES_GCM_J0_LEN;
888 case RTE_CRYPTO_AEAD_AES_CCM:
889 if (qat_sym_validate_aes_key(aead_xform->key.length,
890 &session->qat_cipher_alg) != 0) {
891 QAT_LOG(ERR, "Invalid AES key size");
894 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
895 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC;
898 QAT_LOG(ERR, "Crypto: Undefined AEAD specified %u\n",
903 session->is_single_pass = 0;
904 if (aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) {
905 /* Use faster Single-Pass GCM if possible */
906 int res = qat_sym_session_handle_single_pass(
907 dev->data->dev_private, session, aead_xform);
910 if (session->is_single_pass)
914 if ((aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT &&
915 aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) ||
916 (aead_xform->op == RTE_CRYPTO_AEAD_OP_DECRYPT &&
917 aead_xform->algo == RTE_CRYPTO_AEAD_AES_CCM)) {
918 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
920 * It needs to create cipher desc content first,
921 * then authentication
923 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
924 RTE_CRYPTO_AUTH_OP_GENERATE : RTE_CRYPTO_AUTH_OP_VERIFY;
926 if (qat_sym_session_aead_create_cd_cipher(session,
927 aead_xform->key.data,
928 aead_xform->key.length))
931 if (qat_sym_session_aead_create_cd_auth(session,
932 aead_xform->key.data,
933 aead_xform->key.length,
934 aead_xform->aad_length,
935 aead_xform->digest_length,
939 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
941 * It needs to create authentication desc content first,
945 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
946 RTE_CRYPTO_AUTH_OP_VERIFY : RTE_CRYPTO_AUTH_OP_GENERATE;
948 if (qat_sym_session_aead_create_cd_auth(session,
949 aead_xform->key.data,
950 aead_xform->key.length,
951 aead_xform->aad_length,
952 aead_xform->digest_length,
956 if (qat_sym_session_aead_create_cd_cipher(session,
957 aead_xform->key.data,
958 aead_xform->key.length))
962 session->digest_length = aead_xform->digest_length;
966 unsigned int qat_sym_session_get_private_size(
967 struct rte_cryptodev *dev __rte_unused)
969 return RTE_ALIGN_CEIL(sizeof(struct qat_sym_session), 8);
972 /* returns block size in bytes per cipher algo */
973 int qat_cipher_get_block_size(enum icp_qat_hw_cipher_algo qat_cipher_alg)
975 switch (qat_cipher_alg) {
976 case ICP_QAT_HW_CIPHER_ALGO_DES:
977 return ICP_QAT_HW_DES_BLK_SZ;
978 case ICP_QAT_HW_CIPHER_ALGO_3DES:
979 return ICP_QAT_HW_3DES_BLK_SZ;
980 case ICP_QAT_HW_CIPHER_ALGO_AES128:
981 case ICP_QAT_HW_CIPHER_ALGO_AES192:
982 case ICP_QAT_HW_CIPHER_ALGO_AES256:
983 return ICP_QAT_HW_AES_BLK_SZ;
985 QAT_LOG(ERR, "invalid block cipher alg %u", qat_cipher_alg);
992 * Returns size in bytes per hash algo for state1 size field in cd_ctrl
993 * This is digest size rounded up to nearest quadword
995 static int qat_hash_get_state1_size(enum icp_qat_hw_auth_algo qat_hash_alg)
997 switch (qat_hash_alg) {
998 case ICP_QAT_HW_AUTH_ALGO_SHA1:
999 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA1_STATE1_SZ,
1000 QAT_HW_DEFAULT_ALIGNMENT);
1001 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1002 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA224_STATE1_SZ,
1003 QAT_HW_DEFAULT_ALIGNMENT);
1004 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1005 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA256_STATE1_SZ,
1006 QAT_HW_DEFAULT_ALIGNMENT);
1007 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1008 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA384_STATE1_SZ,
1009 QAT_HW_DEFAULT_ALIGNMENT);
1010 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1011 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
1012 QAT_HW_DEFAULT_ALIGNMENT);
1013 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1014 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ,
1015 QAT_HW_DEFAULT_ALIGNMENT);
1016 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1017 case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
1018 return QAT_HW_ROUND_UP(ICP_QAT_HW_GALOIS_128_STATE1_SZ,
1019 QAT_HW_DEFAULT_ALIGNMENT);
1020 case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
1021 return QAT_HW_ROUND_UP(ICP_QAT_HW_ZUC_3G_EIA3_STATE1_SZ,
1022 QAT_HW_DEFAULT_ALIGNMENT);
1023 case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
1024 return QAT_HW_ROUND_UP(ICP_QAT_HW_SNOW_3G_UIA2_STATE1_SZ,
1025 QAT_HW_DEFAULT_ALIGNMENT);
1026 case ICP_QAT_HW_AUTH_ALGO_MD5:
1027 return QAT_HW_ROUND_UP(ICP_QAT_HW_MD5_STATE1_SZ,
1028 QAT_HW_DEFAULT_ALIGNMENT);
1029 case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
1030 return QAT_HW_ROUND_UP(ICP_QAT_HW_KASUMI_F9_STATE1_SZ,
1031 QAT_HW_DEFAULT_ALIGNMENT);
1032 case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
1033 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_CBC_MAC_STATE1_SZ,
1034 QAT_HW_DEFAULT_ALIGNMENT);
1035 case ICP_QAT_HW_AUTH_ALGO_NULL:
1036 return QAT_HW_ROUND_UP(ICP_QAT_HW_NULL_STATE1_SZ,
1037 QAT_HW_DEFAULT_ALIGNMENT);
1038 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
1039 /* return maximum state1 size in this case */
1040 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
1041 QAT_HW_DEFAULT_ALIGNMENT);
1043 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
1049 /* returns digest size in bytes per hash algo */
1050 static int qat_hash_get_digest_size(enum icp_qat_hw_auth_algo qat_hash_alg)
1052 switch (qat_hash_alg) {
1053 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1054 return ICP_QAT_HW_SHA1_STATE1_SZ;
1055 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1056 return ICP_QAT_HW_SHA224_STATE1_SZ;
1057 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1058 return ICP_QAT_HW_SHA256_STATE1_SZ;
1059 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1060 return ICP_QAT_HW_SHA384_STATE1_SZ;
1061 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1062 return ICP_QAT_HW_SHA512_STATE1_SZ;
1063 case ICP_QAT_HW_AUTH_ALGO_MD5:
1064 return ICP_QAT_HW_MD5_STATE1_SZ;
1065 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1066 return ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1067 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
1068 /* return maximum digest size in this case */
1069 return ICP_QAT_HW_SHA512_STATE1_SZ;
1071 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
1077 /* returns block size in byes per hash algo */
1078 static int qat_hash_get_block_size(enum icp_qat_hw_auth_algo qat_hash_alg)
1080 switch (qat_hash_alg) {
1081 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1083 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1084 return SHA256_CBLOCK;
1085 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1086 return SHA256_CBLOCK;
1087 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1088 return SHA512_CBLOCK;
1089 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1090 return SHA512_CBLOCK;
1091 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1093 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1094 return ICP_QAT_HW_AES_BLK_SZ;
1095 case ICP_QAT_HW_AUTH_ALGO_MD5:
1097 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
1098 /* return maximum block size in this case */
1099 return SHA512_CBLOCK;
1101 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
1107 static int partial_hash_sha1(uint8_t *data_in, uint8_t *data_out)
1111 if (!SHA1_Init(&ctx))
1113 SHA1_Transform(&ctx, data_in);
1114 rte_memcpy(data_out, &ctx, SHA_DIGEST_LENGTH);
1118 static int partial_hash_sha224(uint8_t *data_in, uint8_t *data_out)
1122 if (!SHA224_Init(&ctx))
1124 SHA256_Transform(&ctx, data_in);
1125 rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
1129 static int partial_hash_sha256(uint8_t *data_in, uint8_t *data_out)
1133 if (!SHA256_Init(&ctx))
1135 SHA256_Transform(&ctx, data_in);
1136 rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
1140 static int partial_hash_sha384(uint8_t *data_in, uint8_t *data_out)
1144 if (!SHA384_Init(&ctx))
1146 SHA512_Transform(&ctx, data_in);
1147 rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
1151 static int partial_hash_sha512(uint8_t *data_in, uint8_t *data_out)
1155 if (!SHA512_Init(&ctx))
1157 SHA512_Transform(&ctx, data_in);
1158 rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
1162 static int partial_hash_md5(uint8_t *data_in, uint8_t *data_out)
1166 if (!MD5_Init(&ctx))
1168 MD5_Transform(&ctx, data_in);
1169 rte_memcpy(data_out, &ctx, MD5_DIGEST_LENGTH);
1174 static int partial_hash_compute(enum icp_qat_hw_auth_algo hash_alg,
1179 uint8_t digest[qat_hash_get_digest_size(
1180 ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1181 uint32_t *hash_state_out_be32;
1182 uint64_t *hash_state_out_be64;
1185 digest_size = qat_hash_get_digest_size(hash_alg);
1186 if (digest_size <= 0)
1189 hash_state_out_be32 = (uint32_t *)data_out;
1190 hash_state_out_be64 = (uint64_t *)data_out;
1193 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1194 if (partial_hash_sha1(data_in, digest))
1196 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1197 *hash_state_out_be32 =
1198 rte_bswap32(*(((uint32_t *)digest)+i));
1200 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1201 if (partial_hash_sha224(data_in, digest))
1203 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1204 *hash_state_out_be32 =
1205 rte_bswap32(*(((uint32_t *)digest)+i));
1207 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1208 if (partial_hash_sha256(data_in, digest))
1210 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1211 *hash_state_out_be32 =
1212 rte_bswap32(*(((uint32_t *)digest)+i));
1214 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1215 if (partial_hash_sha384(data_in, digest))
1217 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
1218 *hash_state_out_be64 =
1219 rte_bswap64(*(((uint64_t *)digest)+i));
1221 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1222 if (partial_hash_sha512(data_in, digest))
1224 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
1225 *hash_state_out_be64 =
1226 rte_bswap64(*(((uint64_t *)digest)+i));
1228 case ICP_QAT_HW_AUTH_ALGO_MD5:
1229 if (partial_hash_md5(data_in, data_out))
1233 QAT_LOG(ERR, "invalid hash alg %u", hash_alg);
1239 #define HMAC_IPAD_VALUE 0x36
1240 #define HMAC_OPAD_VALUE 0x5c
1241 #define HASH_XCBC_PRECOMP_KEY_NUM 3
1243 static const uint8_t AES_CMAC_SEED[ICP_QAT_HW_AES_128_KEY_SZ];
1245 static void aes_cmac_key_derive(uint8_t *base, uint8_t *derived)
1249 derived[0] = base[0] << 1;
1250 for (i = 1; i < ICP_QAT_HW_AES_BLK_SZ ; i++) {
1251 derived[i] = base[i] << 1;
1252 derived[i - 1] |= base[i] >> 7;
1256 derived[ICP_QAT_HW_AES_BLK_SZ - 1] ^= QAT_AES_CMAC_CONST_RB;
1259 static int qat_sym_do_precomputes(enum icp_qat_hw_auth_algo hash_alg,
1260 const uint8_t *auth_key,
1261 uint16_t auth_keylen,
1262 uint8_t *p_state_buf,
1263 uint16_t *p_state_len,
1267 uint8_t ipad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1268 uint8_t opad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1271 if (hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC) {
1277 uint8_t k0[ICP_QAT_HW_AES_128_KEY_SZ];
1280 auth_keylen = ICP_QAT_HW_AES_128_KEY_SZ;
1282 in = rte_zmalloc("AES CMAC K1",
1283 ICP_QAT_HW_AES_128_KEY_SZ, 16);
1286 QAT_LOG(ERR, "Failed to alloc memory");
1290 rte_memcpy(in, AES_CMAC_SEED,
1291 ICP_QAT_HW_AES_128_KEY_SZ);
1292 rte_memcpy(p_state_buf, auth_key, auth_keylen);
1294 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1300 AES_encrypt(in, k0, &enc_key);
1302 k1 = p_state_buf + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1303 k2 = k1 + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1305 aes_cmac_key_derive(k0, k1);
1306 aes_cmac_key_derive(k1, k2);
1308 memset(k0, 0, ICP_QAT_HW_AES_128_KEY_SZ);
1309 *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1313 static uint8_t qat_aes_xcbc_key_seed[
1314 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ] = {
1315 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1316 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1317 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1318 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1319 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1320 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1324 uint8_t *out = p_state_buf;
1328 in = rte_zmalloc("working mem for key",
1329 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ, 16);
1331 QAT_LOG(ERR, "Failed to alloc memory");
1335 rte_memcpy(in, qat_aes_xcbc_key_seed,
1336 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1337 for (x = 0; x < HASH_XCBC_PRECOMP_KEY_NUM; x++) {
1338 if (AES_set_encrypt_key(auth_key,
1342 (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ));
1344 (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ),
1345 0, ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1348 AES_encrypt(in, out, &enc_key);
1349 in += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1350 out += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1352 *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1353 rte_free(in - x*ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ);
1357 } else if ((hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_128) ||
1358 (hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_64)) {
1360 uint8_t *out = p_state_buf;
1363 memset(p_state_buf, 0, ICP_QAT_HW_GALOIS_H_SZ +
1364 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1365 ICP_QAT_HW_GALOIS_E_CTR0_SZ);
1366 in = rte_zmalloc("working mem for key",
1367 ICP_QAT_HW_GALOIS_H_SZ, 16);
1369 QAT_LOG(ERR, "Failed to alloc memory");
1373 memset(in, 0, ICP_QAT_HW_GALOIS_H_SZ);
1374 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1378 AES_encrypt(in, out, &enc_key);
1379 *p_state_len = ICP_QAT_HW_GALOIS_H_SZ +
1380 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1381 ICP_QAT_HW_GALOIS_E_CTR0_SZ;
1386 block_size = qat_hash_get_block_size(hash_alg);
1389 /* init ipad and opad from key and xor with fixed values */
1390 memset(ipad, 0, block_size);
1391 memset(opad, 0, block_size);
1393 if (auth_keylen > (unsigned int)block_size) {
1394 QAT_LOG(ERR, "invalid keylen %u", auth_keylen);
1397 rte_memcpy(ipad, auth_key, auth_keylen);
1398 rte_memcpy(opad, auth_key, auth_keylen);
1400 for (i = 0; i < block_size; i++) {
1401 uint8_t *ipad_ptr = ipad + i;
1402 uint8_t *opad_ptr = opad + i;
1403 *ipad_ptr ^= HMAC_IPAD_VALUE;
1404 *opad_ptr ^= HMAC_OPAD_VALUE;
1407 /* do partial hash of ipad and copy to state1 */
1408 if (partial_hash_compute(hash_alg, ipad, p_state_buf)) {
1409 memset(ipad, 0, block_size);
1410 memset(opad, 0, block_size);
1411 QAT_LOG(ERR, "ipad precompute failed");
1416 * State len is a multiple of 8, so may be larger than the digest.
1417 * Put the partial hash of opad state_len bytes after state1
1419 *p_state_len = qat_hash_get_state1_size(hash_alg);
1420 if (partial_hash_compute(hash_alg, opad, p_state_buf + *p_state_len)) {
1421 memset(ipad, 0, block_size);
1422 memset(opad, 0, block_size);
1423 QAT_LOG(ERR, "opad precompute failed");
1427 /* don't leave data lying around */
1428 memset(ipad, 0, block_size);
1429 memset(opad, 0, block_size);
1434 qat_sym_session_init_common_hdr(struct icp_qat_fw_comn_req_hdr *header,
1435 enum qat_sym_proto_flag proto_flags)
1438 ICP_QAT_FW_COMN_HDR_FLAGS_BUILD(ICP_QAT_FW_COMN_REQ_FLAG_SET);
1439 header->service_type = ICP_QAT_FW_COMN_REQ_CPM_FW_LA;
1440 header->comn_req_flags =
1441 ICP_QAT_FW_COMN_FLAGS_BUILD(QAT_COMN_CD_FLD_TYPE_64BIT_ADR,
1442 QAT_COMN_PTR_TYPE_FLAT);
1443 ICP_QAT_FW_LA_PARTIAL_SET(header->serv_specif_flags,
1444 ICP_QAT_FW_LA_PARTIAL_NONE);
1445 ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(header->serv_specif_flags,
1446 ICP_QAT_FW_CIPH_IV_16BYTE_DATA);
1448 switch (proto_flags) {
1449 case QAT_CRYPTO_PROTO_FLAG_NONE:
1450 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1451 ICP_QAT_FW_LA_NO_PROTO);
1453 case QAT_CRYPTO_PROTO_FLAG_CCM:
1454 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1455 ICP_QAT_FW_LA_CCM_PROTO);
1457 case QAT_CRYPTO_PROTO_FLAG_GCM:
1458 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1459 ICP_QAT_FW_LA_GCM_PROTO);
1461 case QAT_CRYPTO_PROTO_FLAG_SNOW3G:
1462 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1463 ICP_QAT_FW_LA_SNOW_3G_PROTO);
1465 case QAT_CRYPTO_PROTO_FLAG_ZUC:
1466 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(header->serv_specif_flags,
1467 ICP_QAT_FW_LA_ZUC_3G_PROTO);
1471 ICP_QAT_FW_LA_UPDATE_STATE_SET(header->serv_specif_flags,
1472 ICP_QAT_FW_LA_NO_UPDATE_STATE);
1473 ICP_QAT_FW_LA_DIGEST_IN_BUFFER_SET(header->serv_specif_flags,
1474 ICP_QAT_FW_LA_NO_DIGEST_IN_BUFFER);
1478 * Snow3G and ZUC should never use this function
1479 * and set its protocol flag in both cipher and auth part of content
1480 * descriptor building function
1482 static enum qat_sym_proto_flag
1483 qat_get_crypto_proto_flag(uint16_t flags)
1485 int proto = ICP_QAT_FW_LA_PROTO_GET(flags);
1486 enum qat_sym_proto_flag qat_proto_flag =
1487 QAT_CRYPTO_PROTO_FLAG_NONE;
1490 case ICP_QAT_FW_LA_GCM_PROTO:
1491 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1493 case ICP_QAT_FW_LA_CCM_PROTO:
1494 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1498 return qat_proto_flag;
1501 int qat_sym_session_aead_create_cd_cipher(struct qat_sym_session *cdesc,
1502 const uint8_t *cipherkey,
1503 uint32_t cipherkeylen)
1505 struct icp_qat_hw_cipher_algo_blk *cipher;
1506 struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1507 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1508 struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1509 void *ptr = &req_tmpl->cd_ctrl;
1510 struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1511 struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1512 enum icp_qat_hw_cipher_convert key_convert;
1513 enum qat_sym_proto_flag qat_proto_flag =
1514 QAT_CRYPTO_PROTO_FLAG_NONE;
1515 uint32_t total_key_size;
1516 uint16_t cipher_offset, cd_size;
1517 uint32_t wordIndex = 0;
1518 uint32_t *temp_key = NULL;
1520 if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER) {
1521 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1522 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1523 ICP_QAT_FW_SLICE_CIPHER);
1524 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1525 ICP_QAT_FW_SLICE_DRAM_WR);
1526 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1527 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1528 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1529 ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1530 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1531 } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1532 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1533 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1534 ICP_QAT_FW_SLICE_CIPHER);
1535 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1536 ICP_QAT_FW_SLICE_AUTH);
1537 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1538 ICP_QAT_FW_SLICE_AUTH);
1539 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1540 ICP_QAT_FW_SLICE_DRAM_WR);
1541 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1542 } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1543 QAT_LOG(ERR, "Invalid param, must be a cipher command.");
1547 if (cdesc->qat_mode == ICP_QAT_HW_CIPHER_CTR_MODE) {
1549 * CTR Streaming ciphers are a special case. Decrypt = encrypt
1550 * Overriding default values previously set
1552 cdesc->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
1553 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1554 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2
1555 || cdesc->qat_cipher_alg ==
1556 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)
1557 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1558 else if (cdesc->qat_dir == ICP_QAT_HW_CIPHER_ENCRYPT)
1559 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1561 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1563 if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
1564 total_key_size = ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ +
1565 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1566 cipher_cd_ctrl->cipher_state_sz =
1567 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1568 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1570 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1571 total_key_size = ICP_QAT_HW_KASUMI_F8_KEY_SZ;
1572 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_KASUMI_BLK_SZ >> 3;
1573 cipher_cd_ctrl->cipher_padding_sz =
1574 (2 * ICP_QAT_HW_KASUMI_BLK_SZ) >> 3;
1575 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES) {
1576 total_key_size = ICP_QAT_HW_3DES_KEY_SZ;
1577 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_3DES_BLK_SZ >> 3;
1579 qat_get_crypto_proto_flag(header->serv_specif_flags);
1580 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_DES) {
1581 total_key_size = ICP_QAT_HW_DES_KEY_SZ;
1582 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_DES_BLK_SZ >> 3;
1584 qat_get_crypto_proto_flag(header->serv_specif_flags);
1585 } else if (cdesc->qat_cipher_alg ==
1586 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
1587 total_key_size = ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ +
1588 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1589 cipher_cd_ctrl->cipher_state_sz =
1590 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1591 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1592 cdesc->min_qat_dev_gen = QAT_GEN2;
1594 total_key_size = cipherkeylen;
1595 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_AES_BLK_SZ >> 3;
1597 qat_get_crypto_proto_flag(header->serv_specif_flags);
1599 cipher_cd_ctrl->cipher_key_sz = total_key_size >> 3;
1600 cipher_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1601 cipher_cd_ctrl->cipher_cfg_offset = cipher_offset >> 3;
1603 header->service_cmd_id = cdesc->qat_cmd;
1604 qat_sym_session_init_common_hdr(header, qat_proto_flag);
1606 cipher = (struct icp_qat_hw_cipher_algo_blk *)cdesc->cd_cur_ptr;
1607 cipher->cipher_config.val =
1608 ICP_QAT_HW_CIPHER_CONFIG_BUILD(cdesc->qat_mode,
1609 cdesc->qat_cipher_alg, key_convert,
1612 if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1613 temp_key = (uint32_t *)(cdesc->cd_cur_ptr +
1614 sizeof(struct icp_qat_hw_cipher_config)
1616 memcpy(cipher->key, cipherkey, cipherkeylen);
1617 memcpy(temp_key, cipherkey, cipherkeylen);
1619 /* XOR Key with KASUMI F8 key modifier at 4 bytes level */
1620 for (wordIndex = 0; wordIndex < (cipherkeylen >> 2);
1622 temp_key[wordIndex] ^= KASUMI_F8_KEY_MODIFIER_4_BYTES;
1624 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1625 cipherkeylen + cipherkeylen;
1627 memcpy(cipher->key, cipherkey, cipherkeylen);
1628 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1632 if (total_key_size > cipherkeylen) {
1633 uint32_t padding_size = total_key_size-cipherkeylen;
1634 if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1635 && (cipherkeylen == QAT_3DES_KEY_SZ_OPT2)) {
1636 /* K3 not provided so use K1 = K3*/
1637 memcpy(cdesc->cd_cur_ptr, cipherkey, padding_size);
1638 } else if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1639 && (cipherkeylen == QAT_3DES_KEY_SZ_OPT3)) {
1640 /* K2 and K3 not provided so use K1 = K2 = K3*/
1641 memcpy(cdesc->cd_cur_ptr, cipherkey,
1643 memcpy(cdesc->cd_cur_ptr+cipherkeylen,
1644 cipherkey, cipherkeylen);
1646 memset(cdesc->cd_cur_ptr, 0, padding_size);
1648 cdesc->cd_cur_ptr += padding_size;
1650 cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1651 cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1656 int qat_sym_session_aead_create_cd_auth(struct qat_sym_session *cdesc,
1657 const uint8_t *authkey,
1658 uint32_t authkeylen,
1659 uint32_t aad_length,
1660 uint32_t digestsize,
1661 unsigned int operation)
1663 struct icp_qat_hw_auth_setup *hash;
1664 struct icp_qat_hw_cipher_algo_blk *cipherconfig;
1665 struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1666 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1667 struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1668 void *ptr = &req_tmpl->cd_ctrl;
1669 struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1670 struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1671 struct icp_qat_fw_la_auth_req_params *auth_param =
1672 (struct icp_qat_fw_la_auth_req_params *)
1673 ((char *)&req_tmpl->serv_specif_rqpars +
1674 ICP_QAT_FW_HASH_REQUEST_PARAMETERS_OFFSET);
1675 uint16_t state1_size = 0, state2_size = 0, cd_extra_size = 0;
1676 uint16_t hash_offset, cd_size;
1677 uint32_t *aad_len = NULL;
1678 uint32_t wordIndex = 0;
1680 enum qat_sym_proto_flag qat_proto_flag =
1681 QAT_CRYPTO_PROTO_FLAG_NONE;
1683 if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_AUTH) {
1684 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1685 ICP_QAT_FW_SLICE_AUTH);
1686 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1687 ICP_QAT_FW_SLICE_DRAM_WR);
1688 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1689 } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1690 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1691 ICP_QAT_FW_SLICE_AUTH);
1692 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1693 ICP_QAT_FW_SLICE_CIPHER);
1694 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1695 ICP_QAT_FW_SLICE_CIPHER);
1696 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1697 ICP_QAT_FW_SLICE_DRAM_WR);
1698 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1699 } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1700 QAT_LOG(ERR, "Invalid param, must be a hash command.");
1704 if (operation == RTE_CRYPTO_AUTH_OP_VERIFY) {
1705 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1706 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1707 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1708 ICP_QAT_FW_LA_CMP_AUTH_RES);
1709 cdesc->auth_op = ICP_QAT_HW_AUTH_VERIFY;
1711 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1712 ICP_QAT_FW_LA_RET_AUTH_RES);
1713 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1714 ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1715 cdesc->auth_op = ICP_QAT_HW_AUTH_GENERATE;
1719 * Setup the inner hash config
1721 hash_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1722 hash = (struct icp_qat_hw_auth_setup *)cdesc->cd_cur_ptr;
1723 hash->auth_config.reserved = 0;
1724 hash->auth_config.config =
1725 ICP_QAT_HW_AUTH_CONFIG_BUILD(cdesc->auth_mode,
1726 cdesc->qat_hash_alg, digestsize);
1728 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0
1729 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2
1730 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_KASUMI_F9
1731 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3
1732 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC
1733 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC
1734 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_NULL
1736 hash->auth_counter.counter = 0;
1738 int block_size = qat_hash_get_block_size(cdesc->qat_hash_alg);
1742 hash->auth_counter.counter = rte_bswap32(block_size);
1745 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_auth_setup);
1748 * cd_cur_ptr now points at the state1 information.
1750 switch (cdesc->qat_hash_alg) {
1751 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1752 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0) {
1754 rte_memcpy(cdesc->cd_cur_ptr, sha1InitialState,
1755 sizeof(sha1InitialState));
1756 state1_size = qat_hash_get_state1_size(
1757 cdesc->qat_hash_alg);
1761 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA1, authkey,
1762 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1764 QAT_LOG(ERR, "(SHA)precompute failed");
1767 state2_size = RTE_ALIGN_CEIL(ICP_QAT_HW_SHA1_STATE2_SZ, 8);
1769 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1770 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0) {
1772 rte_memcpy(cdesc->cd_cur_ptr, sha224InitialState,
1773 sizeof(sha224InitialState));
1774 state1_size = qat_hash_get_state1_size(
1775 cdesc->qat_hash_alg);
1779 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA224, authkey,
1780 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1782 QAT_LOG(ERR, "(SHA)precompute failed");
1785 state2_size = ICP_QAT_HW_SHA224_STATE2_SZ;
1787 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1788 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0) {
1790 rte_memcpy(cdesc->cd_cur_ptr, sha256InitialState,
1791 sizeof(sha256InitialState));
1792 state1_size = qat_hash_get_state1_size(
1793 cdesc->qat_hash_alg);
1797 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA256, authkey,
1798 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1800 QAT_LOG(ERR, "(SHA)precompute failed");
1803 state2_size = ICP_QAT_HW_SHA256_STATE2_SZ;
1805 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1806 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0) {
1808 rte_memcpy(cdesc->cd_cur_ptr, sha384InitialState,
1809 sizeof(sha384InitialState));
1810 state1_size = qat_hash_get_state1_size(
1811 cdesc->qat_hash_alg);
1815 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA384, authkey,
1816 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1818 QAT_LOG(ERR, "(SHA)precompute failed");
1821 state2_size = ICP_QAT_HW_SHA384_STATE2_SZ;
1823 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1824 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0) {
1826 rte_memcpy(cdesc->cd_cur_ptr, sha512InitialState,
1827 sizeof(sha512InitialState));
1828 state1_size = qat_hash_get_state1_size(
1829 cdesc->qat_hash_alg);
1833 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA512, authkey,
1834 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1836 QAT_LOG(ERR, "(SHA)precompute failed");
1839 state2_size = ICP_QAT_HW_SHA512_STATE2_SZ;
1841 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1842 state1_size = ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1844 if (cdesc->aes_cmac)
1845 memset(cdesc->cd_cur_ptr, 0, state1_size);
1846 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC,
1847 authkey, authkeylen, cdesc->cd_cur_ptr + state1_size,
1848 &state2_size, cdesc->aes_cmac)) {
1849 cdesc->aes_cmac ? QAT_LOG(ERR,
1850 "(CMAC)precompute failed")
1852 "(XCBC)precompute failed");
1856 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1857 case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
1858 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1859 state1_size = ICP_QAT_HW_GALOIS_128_STATE1_SZ;
1860 if (qat_sym_do_precomputes(cdesc->qat_hash_alg, authkey,
1861 authkeylen, cdesc->cd_cur_ptr + state1_size,
1862 &state2_size, cdesc->aes_cmac)) {
1863 QAT_LOG(ERR, "(GCM)precompute failed");
1867 * Write (the length of AAD) into bytes 16-19 of state2
1868 * in big-endian format. This field is 8 bytes
1870 auth_param->u2.aad_sz =
1871 RTE_ALIGN_CEIL(aad_length, 16);
1872 auth_param->hash_state_sz = (auth_param->u2.aad_sz) >> 3;
1874 aad_len = (uint32_t *)(cdesc->cd_cur_ptr +
1875 ICP_QAT_HW_GALOIS_128_STATE1_SZ +
1876 ICP_QAT_HW_GALOIS_H_SZ);
1877 *aad_len = rte_bswap32(aad_length);
1878 cdesc->aad_len = aad_length;
1880 case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
1881 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1882 state1_size = qat_hash_get_state1_size(
1883 ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2);
1884 state2_size = ICP_QAT_HW_SNOW_3G_UIA2_STATE2_SZ;
1885 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1887 cipherconfig = (struct icp_qat_hw_cipher_algo_blk *)
1888 (cdesc->cd_cur_ptr + state1_size + state2_size);
1889 cipherconfig->cipher_config.val =
1890 ICP_QAT_HW_CIPHER_CONFIG_BUILD(ICP_QAT_HW_CIPHER_ECB_MODE,
1891 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2,
1892 ICP_QAT_HW_CIPHER_KEY_CONVERT,
1893 ICP_QAT_HW_CIPHER_ENCRYPT);
1894 memcpy(cipherconfig->key, authkey, authkeylen);
1895 memset(cipherconfig->key + authkeylen,
1896 0, ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ);
1897 cd_extra_size += sizeof(struct icp_qat_hw_cipher_config) +
1898 authkeylen + ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1899 auth_param->hash_state_sz = ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1901 case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
1902 hash->auth_config.config =
1903 ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE0,
1904 cdesc->qat_hash_alg, digestsize);
1905 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1906 state1_size = qat_hash_get_state1_size(
1907 ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3);
1908 state2_size = ICP_QAT_HW_ZUC_3G_EIA3_STATE2_SZ;
1909 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size
1910 + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ);
1912 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1913 cd_extra_size += ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1914 auth_param->hash_state_sz = ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1915 cdesc->min_qat_dev_gen = QAT_GEN2;
1918 case ICP_QAT_HW_AUTH_ALGO_MD5:
1919 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_MD5, authkey,
1920 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1922 QAT_LOG(ERR, "(MD5)precompute failed");
1925 state2_size = ICP_QAT_HW_MD5_STATE2_SZ;
1927 case ICP_QAT_HW_AUTH_ALGO_NULL:
1928 state1_size = qat_hash_get_state1_size(
1929 ICP_QAT_HW_AUTH_ALGO_NULL);
1930 state2_size = ICP_QAT_HW_NULL_STATE2_SZ;
1932 case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
1933 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1934 state1_size = qat_hash_get_state1_size(
1935 ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC);
1936 state2_size = ICP_QAT_HW_AES_CBC_MAC_KEY_SZ +
1937 ICP_QAT_HW_AES_CCM_CBC_E_CTR0_SZ;
1939 if (aad_length > 0) {
1940 aad_length += ICP_QAT_HW_CCM_AAD_B0_LEN +
1941 ICP_QAT_HW_CCM_AAD_LEN_INFO;
1942 auth_param->u2.aad_sz =
1943 RTE_ALIGN_CEIL(aad_length,
1944 ICP_QAT_HW_CCM_AAD_ALIGNMENT);
1946 auth_param->u2.aad_sz = ICP_QAT_HW_CCM_AAD_B0_LEN;
1948 cdesc->aad_len = aad_length;
1949 hash->auth_counter.counter = 0;
1951 hash_cd_ctrl->outer_prefix_sz = digestsize;
1952 auth_param->hash_state_sz = digestsize;
1954 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1956 case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
1957 state1_size = qat_hash_get_state1_size(
1958 ICP_QAT_HW_AUTH_ALGO_KASUMI_F9);
1959 state2_size = ICP_QAT_HW_KASUMI_F9_STATE2_SZ;
1960 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1961 pTempKey = (uint32_t *)(cdesc->cd_cur_ptr + state1_size
1964 * The Inner Hash Initial State2 block must contain IK
1965 * (Initialisation Key), followed by IK XOR-ed with KM
1966 * (Key Modifier): IK||(IK^KM).
1968 /* write the auth key */
1969 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1970 /* initialise temp key with auth key */
1971 memcpy(pTempKey, authkey, authkeylen);
1972 /* XOR Key with KASUMI F9 key modifier at 4 bytes level */
1973 for (wordIndex = 0; wordIndex < (authkeylen >> 2); wordIndex++)
1974 pTempKey[wordIndex] ^= KASUMI_F9_KEY_MODIFIER_4_BYTES;
1977 QAT_LOG(ERR, "Invalid HASH alg %u", cdesc->qat_hash_alg);
1981 /* Request template setup */
1982 qat_sym_session_init_common_hdr(header, qat_proto_flag);
1983 header->service_cmd_id = cdesc->qat_cmd;
1985 /* Auth CD config setup */
1986 hash_cd_ctrl->hash_cfg_offset = hash_offset >> 3;
1987 hash_cd_ctrl->hash_flags = ICP_QAT_FW_AUTH_HDR_FLAG_NO_NESTED;
1988 hash_cd_ctrl->inner_res_sz = digestsize;
1989 hash_cd_ctrl->final_sz = digestsize;
1990 hash_cd_ctrl->inner_state1_sz = state1_size;
1991 auth_param->auth_res_sz = digestsize;
1993 hash_cd_ctrl->inner_state2_sz = state2_size;
1994 hash_cd_ctrl->inner_state2_offset = hash_cd_ctrl->hash_cfg_offset +
1995 ((sizeof(struct icp_qat_hw_auth_setup) +
1996 RTE_ALIGN_CEIL(hash_cd_ctrl->inner_state1_sz, 8))
1999 cdesc->cd_cur_ptr += state1_size + state2_size + cd_extra_size;
2000 cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
2002 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
2003 cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
2008 int qat_sym_validate_aes_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2011 case ICP_QAT_HW_AES_128_KEY_SZ:
2012 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
2014 case ICP_QAT_HW_AES_192_KEY_SZ:
2015 *alg = ICP_QAT_HW_CIPHER_ALGO_AES192;
2017 case ICP_QAT_HW_AES_256_KEY_SZ:
2018 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
2026 int qat_sym_validate_aes_docsisbpi_key(int key_len,
2027 enum icp_qat_hw_cipher_algo *alg)
2030 case ICP_QAT_HW_AES_128_KEY_SZ:
2031 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
2033 case ICP_QAT_HW_AES_256_KEY_SZ:
2034 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
2042 int qat_sym_validate_snow3g_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2045 case ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ:
2046 *alg = ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2;
2054 int qat_sym_validate_kasumi_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2057 case ICP_QAT_HW_KASUMI_KEY_SZ:
2058 *alg = ICP_QAT_HW_CIPHER_ALGO_KASUMI;
2066 int qat_sym_validate_des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2069 case ICP_QAT_HW_DES_KEY_SZ:
2070 *alg = ICP_QAT_HW_CIPHER_ALGO_DES;
2078 int qat_sym_validate_3des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2081 case QAT_3DES_KEY_SZ_OPT1:
2082 case QAT_3DES_KEY_SZ_OPT2:
2083 case QAT_3DES_KEY_SZ_OPT3:
2084 *alg = ICP_QAT_HW_CIPHER_ALGO_3DES;
2092 int qat_sym_validate_zuc_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2095 case ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ:
2096 *alg = ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3;