1 /* SPDX-License-Identifier: (BSD-3-Clause OR GPL-2.0)
2 * Copyright(c) 2015-2019 Intel Corporation
5 #include <openssl/sha.h> /* Needed to calculate pre-compute values */
6 #include <openssl/aes.h> /* Needed to calculate pre-compute values */
7 #include <openssl/md5.h> /* Needed to calculate pre-compute values */
8 #include <openssl/evp.h> /* Needed for bpi runt block processing */
10 #include <rte_memcpy.h>
11 #include <rte_common.h>
12 #include <rte_spinlock.h>
13 #include <rte_byteorder.h>
15 #include <rte_malloc.h>
16 #include <rte_crypto_sym.h>
19 #include "qat_sym_session.h"
20 #include "qat_sym_pmd.h"
22 /* SHA1 - 20 bytes - Initialiser state can be found in FIPS stds 180-2 */
23 static const uint8_t sha1InitialState[] = {
24 0x67, 0x45, 0x23, 0x01, 0xef, 0xcd, 0xab, 0x89, 0x98, 0xba,
25 0xdc, 0xfe, 0x10, 0x32, 0x54, 0x76, 0xc3, 0xd2, 0xe1, 0xf0};
27 /* SHA 224 - 32 bytes - Initialiser state can be found in FIPS stds 180-2 */
28 static const uint8_t sha224InitialState[] = {
29 0xc1, 0x05, 0x9e, 0xd8, 0x36, 0x7c, 0xd5, 0x07, 0x30, 0x70, 0xdd,
30 0x17, 0xf7, 0x0e, 0x59, 0x39, 0xff, 0xc0, 0x0b, 0x31, 0x68, 0x58,
31 0x15, 0x11, 0x64, 0xf9, 0x8f, 0xa7, 0xbe, 0xfa, 0x4f, 0xa4};
33 /* SHA 256 - 32 bytes - Initialiser state can be found in FIPS stds 180-2 */
34 static const uint8_t sha256InitialState[] = {
35 0x6a, 0x09, 0xe6, 0x67, 0xbb, 0x67, 0xae, 0x85, 0x3c, 0x6e, 0xf3,
36 0x72, 0xa5, 0x4f, 0xf5, 0x3a, 0x51, 0x0e, 0x52, 0x7f, 0x9b, 0x05,
37 0x68, 0x8c, 0x1f, 0x83, 0xd9, 0xab, 0x5b, 0xe0, 0xcd, 0x19};
39 /* SHA 384 - 64 bytes - Initialiser state can be found in FIPS stds 180-2 */
40 static const uint8_t sha384InitialState[] = {
41 0xcb, 0xbb, 0x9d, 0x5d, 0xc1, 0x05, 0x9e, 0xd8, 0x62, 0x9a, 0x29,
42 0x2a, 0x36, 0x7c, 0xd5, 0x07, 0x91, 0x59, 0x01, 0x5a, 0x30, 0x70,
43 0xdd, 0x17, 0x15, 0x2f, 0xec, 0xd8, 0xf7, 0x0e, 0x59, 0x39, 0x67,
44 0x33, 0x26, 0x67, 0xff, 0xc0, 0x0b, 0x31, 0x8e, 0xb4, 0x4a, 0x87,
45 0x68, 0x58, 0x15, 0x11, 0xdb, 0x0c, 0x2e, 0x0d, 0x64, 0xf9, 0x8f,
46 0xa7, 0x47, 0xb5, 0x48, 0x1d, 0xbe, 0xfa, 0x4f, 0xa4};
48 /* SHA 512 - 64 bytes - Initialiser state can be found in FIPS stds 180-2 */
49 static const uint8_t sha512InitialState[] = {
50 0x6a, 0x09, 0xe6, 0x67, 0xf3, 0xbc, 0xc9, 0x08, 0xbb, 0x67, 0xae,
51 0x85, 0x84, 0xca, 0xa7, 0x3b, 0x3c, 0x6e, 0xf3, 0x72, 0xfe, 0x94,
52 0xf8, 0x2b, 0xa5, 0x4f, 0xf5, 0x3a, 0x5f, 0x1d, 0x36, 0xf1, 0x51,
53 0x0e, 0x52, 0x7f, 0xad, 0xe6, 0x82, 0xd1, 0x9b, 0x05, 0x68, 0x8c,
54 0x2b, 0x3e, 0x6c, 0x1f, 0x1f, 0x83, 0xd9, 0xab, 0xfb, 0x41, 0xbd,
55 0x6b, 0x5b, 0xe0, 0xcd, 0x19, 0x13, 0x7e, 0x21, 0x79};
57 /** Frees a context previously created
58 * Depends on openssl libcrypto
61 bpi_cipher_ctx_free(void *bpi_ctx)
64 EVP_CIPHER_CTX_free((EVP_CIPHER_CTX *)bpi_ctx);
67 /** Creates a context in either AES or DES in ECB mode
68 * Depends on openssl libcrypto
71 bpi_cipher_ctx_init(enum rte_crypto_cipher_algorithm cryptodev_algo,
72 enum rte_crypto_cipher_operation direction __rte_unused,
73 const uint8_t *key, uint16_t key_length, void **ctx)
75 const EVP_CIPHER *algo = NULL;
77 *ctx = EVP_CIPHER_CTX_new();
84 if (cryptodev_algo == RTE_CRYPTO_CIPHER_DES_DOCSISBPI)
87 if (key_length == ICP_QAT_HW_AES_128_KEY_SZ)
88 algo = EVP_aes_128_ecb();
90 algo = EVP_aes_256_ecb();
92 /* IV will be ECB encrypted whether direction is encrypt or decrypt*/
93 if (EVP_EncryptInit_ex(*ctx, algo, NULL, key, 0) != 1) {
102 EVP_CIPHER_CTX_free(*ctx);
107 qat_is_cipher_alg_supported(enum rte_crypto_cipher_algorithm algo,
108 struct qat_sym_dev_private *internals)
111 const struct rte_cryptodev_capabilities *capability;
113 while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
114 RTE_CRYPTO_OP_TYPE_UNDEFINED) {
115 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
118 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_CIPHER)
121 if (capability->sym.cipher.algo == algo)
128 qat_is_auth_alg_supported(enum rte_crypto_auth_algorithm algo,
129 struct qat_sym_dev_private *internals)
132 const struct rte_cryptodev_capabilities *capability;
134 while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
135 RTE_CRYPTO_OP_TYPE_UNDEFINED) {
136 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
139 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_AUTH)
142 if (capability->sym.auth.algo == algo)
149 qat_sym_session_clear(struct rte_cryptodev *dev,
150 struct rte_cryptodev_sym_session *sess)
152 uint8_t index = dev->driver_id;
153 void *sess_priv = get_sym_session_private_data(sess, index);
154 struct qat_sym_session *s = (struct qat_sym_session *)sess_priv;
158 bpi_cipher_ctx_free(s->bpi_ctx);
159 memset(s, 0, qat_sym_session_get_private_size(dev));
160 struct rte_mempool *sess_mp = rte_mempool_from_obj(sess_priv);
162 set_sym_session_private_data(sess, index, NULL);
163 rte_mempool_put(sess_mp, sess_priv);
168 qat_get_cmd_id(const struct rte_crypto_sym_xform *xform)
171 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER && xform->next == NULL)
172 return ICP_QAT_FW_LA_CMD_CIPHER;
174 /* Authentication Only */
175 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH && xform->next == NULL)
176 return ICP_QAT_FW_LA_CMD_AUTH;
179 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
180 /* AES-GCM and AES-CCM works with different direction
181 * GCM first encrypts and generate hash where AES-CCM
182 * first generate hash and encrypts. Similar relation
183 * applies to decryption.
185 if (xform->aead.op == RTE_CRYPTO_AEAD_OP_ENCRYPT)
186 if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
187 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
189 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
191 if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
192 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
194 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
197 if (xform->next == NULL)
200 /* Cipher then Authenticate */
201 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
202 xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
203 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
205 /* Authenticate then Cipher */
206 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH &&
207 xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
208 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
213 static struct rte_crypto_auth_xform *
214 qat_get_auth_xform(struct rte_crypto_sym_xform *xform)
217 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH)
226 static struct rte_crypto_cipher_xform *
227 qat_get_cipher_xform(struct rte_crypto_sym_xform *xform)
230 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
231 return &xform->cipher;
240 qat_sym_session_configure_cipher(struct rte_cryptodev *dev,
241 struct rte_crypto_sym_xform *xform,
242 struct qat_sym_session *session)
244 struct qat_sym_dev_private *internals = dev->data->dev_private;
245 struct rte_crypto_cipher_xform *cipher_xform = NULL;
248 /* Get cipher xform from crypto xform chain */
249 cipher_xform = qat_get_cipher_xform(xform);
251 session->cipher_iv.offset = cipher_xform->iv.offset;
252 session->cipher_iv.length = cipher_xform->iv.length;
254 switch (cipher_xform->algo) {
255 case RTE_CRYPTO_CIPHER_AES_CBC:
256 if (qat_sym_validate_aes_key(cipher_xform->key.length,
257 &session->qat_cipher_alg) != 0) {
258 QAT_LOG(ERR, "Invalid AES cipher key size");
262 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
264 case RTE_CRYPTO_CIPHER_AES_CTR:
265 if (qat_sym_validate_aes_key(cipher_xform->key.length,
266 &session->qat_cipher_alg) != 0) {
267 QAT_LOG(ERR, "Invalid AES cipher key size");
271 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
273 case RTE_CRYPTO_CIPHER_SNOW3G_UEA2:
274 if (qat_sym_validate_snow3g_key(cipher_xform->key.length,
275 &session->qat_cipher_alg) != 0) {
276 QAT_LOG(ERR, "Invalid SNOW 3G cipher key size");
280 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
282 case RTE_CRYPTO_CIPHER_NULL:
283 session->qat_cipher_alg = ICP_QAT_HW_CIPHER_ALGO_NULL;
284 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
286 case RTE_CRYPTO_CIPHER_KASUMI_F8:
287 if (qat_sym_validate_kasumi_key(cipher_xform->key.length,
288 &session->qat_cipher_alg) != 0) {
289 QAT_LOG(ERR, "Invalid KASUMI cipher key size");
293 session->qat_mode = ICP_QAT_HW_CIPHER_F8_MODE;
295 case RTE_CRYPTO_CIPHER_3DES_CBC:
296 if (qat_sym_validate_3des_key(cipher_xform->key.length,
297 &session->qat_cipher_alg) != 0) {
298 QAT_LOG(ERR, "Invalid 3DES cipher key size");
302 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
304 case RTE_CRYPTO_CIPHER_DES_CBC:
305 if (qat_sym_validate_des_key(cipher_xform->key.length,
306 &session->qat_cipher_alg) != 0) {
307 QAT_LOG(ERR, "Invalid DES cipher key size");
311 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
313 case RTE_CRYPTO_CIPHER_3DES_CTR:
314 if (qat_sym_validate_3des_key(cipher_xform->key.length,
315 &session->qat_cipher_alg) != 0) {
316 QAT_LOG(ERR, "Invalid 3DES cipher key size");
320 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
322 case RTE_CRYPTO_CIPHER_DES_DOCSISBPI:
323 ret = bpi_cipher_ctx_init(
326 cipher_xform->key.data,
327 cipher_xform->key.length,
330 QAT_LOG(ERR, "failed to create DES BPI ctx");
333 if (qat_sym_validate_des_key(cipher_xform->key.length,
334 &session->qat_cipher_alg) != 0) {
335 QAT_LOG(ERR, "Invalid DES cipher key size");
339 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
341 case RTE_CRYPTO_CIPHER_AES_DOCSISBPI:
342 ret = bpi_cipher_ctx_init(
345 cipher_xform->key.data,
346 cipher_xform->key.length,
349 QAT_LOG(ERR, "failed to create AES BPI ctx");
352 if (qat_sym_validate_aes_docsisbpi_key(cipher_xform->key.length,
353 &session->qat_cipher_alg) != 0) {
354 QAT_LOG(ERR, "Invalid AES DOCSISBPI key size");
358 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
360 case RTE_CRYPTO_CIPHER_ZUC_EEA3:
361 if (!qat_is_cipher_alg_supported(
362 cipher_xform->algo, internals)) {
363 QAT_LOG(ERR, "%s not supported on this device",
364 rte_crypto_cipher_algorithm_strings
365 [cipher_xform->algo]);
369 if (qat_sym_validate_zuc_key(cipher_xform->key.length,
370 &session->qat_cipher_alg) != 0) {
371 QAT_LOG(ERR, "Invalid ZUC cipher key size");
375 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
377 case RTE_CRYPTO_CIPHER_AES_XTS:
378 if ((cipher_xform->key.length/2) == ICP_QAT_HW_AES_192_KEY_SZ) {
379 QAT_LOG(ERR, "AES-XTS-192 not supported");
383 if (qat_sym_validate_aes_key((cipher_xform->key.length/2),
384 &session->qat_cipher_alg) != 0) {
385 QAT_LOG(ERR, "Invalid AES-XTS cipher key size");
389 session->qat_mode = ICP_QAT_HW_CIPHER_XTS_MODE;
391 case RTE_CRYPTO_CIPHER_3DES_ECB:
392 case RTE_CRYPTO_CIPHER_AES_ECB:
393 case RTE_CRYPTO_CIPHER_AES_F8:
394 case RTE_CRYPTO_CIPHER_ARC4:
395 QAT_LOG(ERR, "Crypto QAT PMD: Unsupported Cipher alg %u",
400 QAT_LOG(ERR, "Crypto: Undefined Cipher specified %u\n",
406 if (cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
407 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
409 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
411 if (qat_sym_session_aead_create_cd_cipher(session,
412 cipher_xform->key.data,
413 cipher_xform->key.length)) {
421 if (session->bpi_ctx) {
422 bpi_cipher_ctx_free(session->bpi_ctx);
423 session->bpi_ctx = NULL;
429 qat_sym_session_configure(struct rte_cryptodev *dev,
430 struct rte_crypto_sym_xform *xform,
431 struct rte_cryptodev_sym_session *sess,
432 struct rte_mempool *mempool)
434 void *sess_private_data;
437 if (rte_mempool_get(mempool, &sess_private_data)) {
439 "Couldn't get object from session mempool");
443 ret = qat_sym_session_set_parameters(dev, xform, sess_private_data);
446 "Crypto QAT PMD: failed to configure session parameters");
448 /* Return session to mempool */
449 rte_mempool_put(mempool, sess_private_data);
453 set_sym_session_private_data(sess, dev->driver_id,
460 qat_sym_session_set_ext_hash_flags(struct qat_sym_session *session,
463 struct icp_qat_fw_comn_req_hdr *header = &session->fw_req.comn_hdr;
464 struct icp_qat_fw_cipher_auth_cd_ctrl_hdr *cd_ctrl =
465 (struct icp_qat_fw_cipher_auth_cd_ctrl_hdr *)
466 session->fw_req.cd_ctrl.content_desc_ctrl_lw;
468 /* Set the Use Extended Protocol Flags bit in LW 1 */
469 QAT_FIELD_SET(header->comn_req_flags,
470 QAT_COMN_EXT_FLAGS_USED,
471 QAT_COMN_EXT_FLAGS_BITPOS,
472 QAT_COMN_EXT_FLAGS_MASK);
474 /* Set Hash Flags in LW 28 */
475 cd_ctrl->hash_flags |= hash_flag;
477 /* Set proto flags in LW 1 */
478 switch (session->qat_cipher_alg) {
479 case ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2:
480 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
481 ICP_QAT_FW_LA_SNOW_3G_PROTO);
482 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
483 header->serv_specif_flags, 0);
485 case ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3:
486 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
487 ICP_QAT_FW_LA_NO_PROTO);
488 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
489 header->serv_specif_flags,
490 ICP_QAT_FW_LA_ZUC_3G_PROTO);
493 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
494 ICP_QAT_FW_LA_NO_PROTO);
495 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
496 header->serv_specif_flags, 0);
502 qat_sym_session_handle_mixed(const struct rte_cryptodev *dev,
503 struct qat_sym_session *session)
505 const struct qat_sym_dev_private *qat_private = dev->data->dev_private;
506 enum qat_device_gen min_dev_gen = (qat_private->internal_capabilities &
507 QAT_SYM_CAP_MIXED_CRYPTO) ? QAT_GEN2 : QAT_GEN3;
509 if (session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3 &&
510 session->qat_cipher_alg !=
511 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
512 session->min_qat_dev_gen = min_dev_gen;
513 qat_sym_session_set_ext_hash_flags(session,
514 1 << ICP_QAT_FW_AUTH_HDR_FLAG_ZUC_EIA3_BITPOS);
515 } else if (session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2 &&
516 session->qat_cipher_alg !=
517 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
518 session->min_qat_dev_gen = min_dev_gen;
519 qat_sym_session_set_ext_hash_flags(session,
520 1 << ICP_QAT_FW_AUTH_HDR_FLAG_SNOW3G_UIA2_BITPOS);
521 } else if ((session->aes_cmac ||
522 session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_NULL) &&
523 (session->qat_cipher_alg ==
524 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2 ||
525 session->qat_cipher_alg ==
526 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)) {
527 session->min_qat_dev_gen = min_dev_gen;
528 qat_sym_session_set_ext_hash_flags(session, 0);
533 qat_sym_session_set_parameters(struct rte_cryptodev *dev,
534 struct rte_crypto_sym_xform *xform, void *session_private)
536 struct qat_sym_session *session = session_private;
540 /* Set context descriptor physical address */
541 session->cd_paddr = rte_mempool_virt2iova(session) +
542 offsetof(struct qat_sym_session, cd);
544 session->min_qat_dev_gen = QAT_GEN1;
546 /* Get requested QAT command id */
547 qat_cmd_id = qat_get_cmd_id(xform);
548 if (qat_cmd_id < 0 || qat_cmd_id >= ICP_QAT_FW_LA_CMD_DELIMITER) {
549 QAT_LOG(ERR, "Unsupported xform chain requested");
552 session->qat_cmd = (enum icp_qat_fw_la_cmd_id)qat_cmd_id;
553 switch (session->qat_cmd) {
554 case ICP_QAT_FW_LA_CMD_CIPHER:
555 ret = qat_sym_session_configure_cipher(dev, xform, session);
559 case ICP_QAT_FW_LA_CMD_AUTH:
560 ret = qat_sym_session_configure_auth(dev, xform, session);
564 case ICP_QAT_FW_LA_CMD_CIPHER_HASH:
565 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
566 ret = qat_sym_session_configure_aead(dev, xform,
571 ret = qat_sym_session_configure_cipher(dev,
575 ret = qat_sym_session_configure_auth(dev,
579 /* Special handling of mixed hash+cipher algorithms */
580 qat_sym_session_handle_mixed(dev, session);
583 case ICP_QAT_FW_LA_CMD_HASH_CIPHER:
584 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
585 ret = qat_sym_session_configure_aead(dev, xform,
590 ret = qat_sym_session_configure_auth(dev,
594 ret = qat_sym_session_configure_cipher(dev,
598 /* Special handling of mixed hash+cipher algorithms */
599 qat_sym_session_handle_mixed(dev, session);
602 case ICP_QAT_FW_LA_CMD_TRNG_GET_RANDOM:
603 case ICP_QAT_FW_LA_CMD_TRNG_TEST:
604 case ICP_QAT_FW_LA_CMD_SSL3_KEY_DERIVE:
605 case ICP_QAT_FW_LA_CMD_TLS_V1_1_KEY_DERIVE:
606 case ICP_QAT_FW_LA_CMD_TLS_V1_2_KEY_DERIVE:
607 case ICP_QAT_FW_LA_CMD_MGF1:
608 case ICP_QAT_FW_LA_CMD_AUTH_PRE_COMP:
609 case ICP_QAT_FW_LA_CMD_CIPHER_PRE_COMP:
610 case ICP_QAT_FW_LA_CMD_DELIMITER:
611 QAT_LOG(ERR, "Unsupported Service %u",
615 QAT_LOG(ERR, "Unsupported Service %u",
624 qat_sym_session_handle_single_pass(struct qat_sym_dev_private *internals,
625 struct qat_sym_session *session,
626 struct rte_crypto_aead_xform *aead_xform)
628 enum qat_device_gen qat_dev_gen = internals->qat_dev->qat_dev_gen;
630 if (qat_dev_gen == QAT_GEN3 &&
631 aead_xform->iv.length == QAT_AES_GCM_SPC_IV_SIZE) {
632 /* Use faster Single-Pass GCM */
633 struct icp_qat_fw_la_cipher_req_params *cipher_param =
634 (void *) &session->fw_req.serv_specif_rqpars;
636 session->is_single_pass = 1;
637 session->min_qat_dev_gen = QAT_GEN3;
638 session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER;
639 session->qat_mode = ICP_QAT_HW_CIPHER_AEAD_MODE;
640 session->cipher_iv.offset = aead_xform->iv.offset;
641 session->cipher_iv.length = aead_xform->iv.length;
642 if (qat_sym_session_aead_create_cd_cipher(session,
643 aead_xform->key.data, aead_xform->key.length))
645 session->aad_len = aead_xform->aad_length;
646 session->digest_length = aead_xform->digest_length;
647 if (aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) {
648 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
649 session->auth_op = ICP_QAT_HW_AUTH_GENERATE;
650 ICP_QAT_FW_LA_RET_AUTH_SET(
651 session->fw_req.comn_hdr.serv_specif_flags,
652 ICP_QAT_FW_LA_RET_AUTH_RES);
654 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
655 session->auth_op = ICP_QAT_HW_AUTH_VERIFY;
656 ICP_QAT_FW_LA_CMP_AUTH_SET(
657 session->fw_req.comn_hdr.serv_specif_flags,
658 ICP_QAT_FW_LA_CMP_AUTH_RES);
660 ICP_QAT_FW_LA_SINGLE_PASS_PROTO_FLAG_SET(
661 session->fw_req.comn_hdr.serv_specif_flags,
662 ICP_QAT_FW_LA_SINGLE_PASS_PROTO);
663 ICP_QAT_FW_LA_PROTO_SET(
664 session->fw_req.comn_hdr.serv_specif_flags,
665 ICP_QAT_FW_LA_NO_PROTO);
666 ICP_QAT_FW_LA_GCM_IV_LEN_FLAG_SET(
667 session->fw_req.comn_hdr.serv_specif_flags,
668 ICP_QAT_FW_LA_GCM_IV_LEN_12_OCTETS);
669 session->fw_req.comn_hdr.service_cmd_id =
670 ICP_QAT_FW_LA_CMD_CIPHER;
671 session->cd.cipher.cipher_config.val =
672 ICP_QAT_HW_CIPHER_CONFIG_BUILD(
673 ICP_QAT_HW_CIPHER_AEAD_MODE,
674 session->qat_cipher_alg,
675 ICP_QAT_HW_CIPHER_NO_CONVERT,
677 QAT_FIELD_SET(session->cd.cipher.cipher_config.val,
678 aead_xform->digest_length,
679 QAT_CIPHER_AEAD_HASH_CMP_LEN_BITPOS,
680 QAT_CIPHER_AEAD_HASH_CMP_LEN_MASK);
681 session->cd.cipher.cipher_config.reserved =
682 ICP_QAT_HW_CIPHER_CONFIG_BUILD_UPPER(
683 aead_xform->aad_length);
684 cipher_param->spc_aad_sz = aead_xform->aad_length;
685 cipher_param->spc_auth_res_sz = aead_xform->digest_length;
691 qat_sym_session_configure_auth(struct rte_cryptodev *dev,
692 struct rte_crypto_sym_xform *xform,
693 struct qat_sym_session *session)
695 struct rte_crypto_auth_xform *auth_xform = qat_get_auth_xform(xform);
696 struct qat_sym_dev_private *internals = dev->data->dev_private;
697 const uint8_t *key_data = auth_xform->key.data;
698 uint8_t key_length = auth_xform->key.length;
699 session->aes_cmac = 0;
701 session->auth_iv.offset = auth_xform->iv.offset;
702 session->auth_iv.length = auth_xform->iv.length;
703 session->auth_mode = ICP_QAT_HW_AUTH_MODE1;
705 switch (auth_xform->algo) {
706 case RTE_CRYPTO_AUTH_SHA1:
707 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA1;
708 session->auth_mode = ICP_QAT_HW_AUTH_MODE0;
710 case RTE_CRYPTO_AUTH_SHA224:
711 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA224;
712 session->auth_mode = ICP_QAT_HW_AUTH_MODE0;
714 case RTE_CRYPTO_AUTH_SHA256:
715 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA256;
716 session->auth_mode = ICP_QAT_HW_AUTH_MODE0;
718 case RTE_CRYPTO_AUTH_SHA384:
719 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA384;
720 session->auth_mode = ICP_QAT_HW_AUTH_MODE0;
722 case RTE_CRYPTO_AUTH_SHA512:
723 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA512;
724 session->auth_mode = ICP_QAT_HW_AUTH_MODE0;
726 case RTE_CRYPTO_AUTH_SHA1_HMAC:
727 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA1;
729 case RTE_CRYPTO_AUTH_SHA224_HMAC:
730 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA224;
732 case RTE_CRYPTO_AUTH_SHA256_HMAC:
733 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA256;
735 case RTE_CRYPTO_AUTH_SHA384_HMAC:
736 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA384;
738 case RTE_CRYPTO_AUTH_SHA512_HMAC:
739 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA512;
741 case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
742 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
744 case RTE_CRYPTO_AUTH_AES_CMAC:
745 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
746 session->aes_cmac = 1;
748 case RTE_CRYPTO_AUTH_AES_GMAC:
749 if (qat_sym_validate_aes_key(auth_xform->key.length,
750 &session->qat_cipher_alg) != 0) {
751 QAT_LOG(ERR, "Invalid AES key size");
754 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
755 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
756 if (session->auth_iv.length == 0)
757 session->auth_iv.length = AES_GCM_J0_LEN;
760 case RTE_CRYPTO_AUTH_SNOW3G_UIA2:
761 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2;
763 case RTE_CRYPTO_AUTH_MD5_HMAC:
764 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_MD5;
766 case RTE_CRYPTO_AUTH_NULL:
767 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_NULL;
769 case RTE_CRYPTO_AUTH_KASUMI_F9:
770 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_KASUMI_F9;
772 case RTE_CRYPTO_AUTH_ZUC_EIA3:
773 if (!qat_is_auth_alg_supported(auth_xform->algo, internals)) {
774 QAT_LOG(ERR, "%s not supported on this device",
775 rte_crypto_auth_algorithm_strings
779 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3;
781 case RTE_CRYPTO_AUTH_MD5:
782 case RTE_CRYPTO_AUTH_AES_CBC_MAC:
783 QAT_LOG(ERR, "Crypto: Unsupported hash alg %u",
787 QAT_LOG(ERR, "Crypto: Undefined Hash algo %u specified",
792 if (auth_xform->algo == RTE_CRYPTO_AUTH_AES_GMAC) {
793 if (auth_xform->op == RTE_CRYPTO_AUTH_OP_GENERATE) {
794 session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER_HASH;
795 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
797 * It needs to create cipher desc content first,
798 * then authentication
801 if (qat_sym_session_aead_create_cd_cipher(session,
802 auth_xform->key.data,
803 auth_xform->key.length))
806 if (qat_sym_session_aead_create_cd_auth(session,
810 auth_xform->digest_length,
814 session->qat_cmd = ICP_QAT_FW_LA_CMD_HASH_CIPHER;
815 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
817 * It needs to create authentication desc content first,
821 if (qat_sym_session_aead_create_cd_auth(session,
825 auth_xform->digest_length,
829 if (qat_sym_session_aead_create_cd_cipher(session,
830 auth_xform->key.data,
831 auth_xform->key.length))
834 /* Restore to authentication only only */
835 session->qat_cmd = ICP_QAT_FW_LA_CMD_AUTH;
837 if (qat_sym_session_aead_create_cd_auth(session,
841 auth_xform->digest_length,
846 session->digest_length = auth_xform->digest_length;
851 qat_sym_session_configure_aead(struct rte_cryptodev *dev,
852 struct rte_crypto_sym_xform *xform,
853 struct qat_sym_session *session)
855 struct rte_crypto_aead_xform *aead_xform = &xform->aead;
856 enum rte_crypto_auth_operation crypto_operation;
859 * Store AEAD IV parameters as cipher IV,
860 * to avoid unnecessary memory usage
862 session->cipher_iv.offset = xform->aead.iv.offset;
863 session->cipher_iv.length = xform->aead.iv.length;
865 session->auth_mode = ICP_QAT_HW_AUTH_MODE1;
867 switch (aead_xform->algo) {
868 case RTE_CRYPTO_AEAD_AES_GCM:
869 if (qat_sym_validate_aes_key(aead_xform->key.length,
870 &session->qat_cipher_alg) != 0) {
871 QAT_LOG(ERR, "Invalid AES key size");
874 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
875 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
876 if (session->cipher_iv.length == 0)
877 session->cipher_iv.length = AES_GCM_J0_LEN;
880 case RTE_CRYPTO_AEAD_AES_CCM:
881 if (qat_sym_validate_aes_key(aead_xform->key.length,
882 &session->qat_cipher_alg) != 0) {
883 QAT_LOG(ERR, "Invalid AES key size");
886 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
887 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC;
890 QAT_LOG(ERR, "Crypto: Undefined AEAD specified %u\n",
895 session->is_single_pass = 0;
896 if (aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) {
897 /* Use faster Single-Pass GCM if possible */
898 int res = qat_sym_session_handle_single_pass(
899 dev->data->dev_private, session, aead_xform);
902 if (session->is_single_pass)
906 if ((aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT &&
907 aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) ||
908 (aead_xform->op == RTE_CRYPTO_AEAD_OP_DECRYPT &&
909 aead_xform->algo == RTE_CRYPTO_AEAD_AES_CCM)) {
910 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
912 * It needs to create cipher desc content first,
913 * then authentication
915 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
916 RTE_CRYPTO_AUTH_OP_GENERATE : RTE_CRYPTO_AUTH_OP_VERIFY;
918 if (qat_sym_session_aead_create_cd_cipher(session,
919 aead_xform->key.data,
920 aead_xform->key.length))
923 if (qat_sym_session_aead_create_cd_auth(session,
924 aead_xform->key.data,
925 aead_xform->key.length,
926 aead_xform->aad_length,
927 aead_xform->digest_length,
931 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
933 * It needs to create authentication desc content first,
937 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
938 RTE_CRYPTO_AUTH_OP_VERIFY : RTE_CRYPTO_AUTH_OP_GENERATE;
940 if (qat_sym_session_aead_create_cd_auth(session,
941 aead_xform->key.data,
942 aead_xform->key.length,
943 aead_xform->aad_length,
944 aead_xform->digest_length,
948 if (qat_sym_session_aead_create_cd_cipher(session,
949 aead_xform->key.data,
950 aead_xform->key.length))
954 session->digest_length = aead_xform->digest_length;
958 unsigned int qat_sym_session_get_private_size(
959 struct rte_cryptodev *dev __rte_unused)
961 return RTE_ALIGN_CEIL(sizeof(struct qat_sym_session), 8);
964 /* returns block size in bytes per cipher algo */
965 int qat_cipher_get_block_size(enum icp_qat_hw_cipher_algo qat_cipher_alg)
967 switch (qat_cipher_alg) {
968 case ICP_QAT_HW_CIPHER_ALGO_DES:
969 return ICP_QAT_HW_DES_BLK_SZ;
970 case ICP_QAT_HW_CIPHER_ALGO_3DES:
971 return ICP_QAT_HW_3DES_BLK_SZ;
972 case ICP_QAT_HW_CIPHER_ALGO_AES128:
973 case ICP_QAT_HW_CIPHER_ALGO_AES192:
974 case ICP_QAT_HW_CIPHER_ALGO_AES256:
975 return ICP_QAT_HW_AES_BLK_SZ;
977 QAT_LOG(ERR, "invalid block cipher alg %u", qat_cipher_alg);
984 * Returns size in bytes per hash algo for state1 size field in cd_ctrl
985 * This is digest size rounded up to nearest quadword
987 static int qat_hash_get_state1_size(enum icp_qat_hw_auth_algo qat_hash_alg)
989 switch (qat_hash_alg) {
990 case ICP_QAT_HW_AUTH_ALGO_SHA1:
991 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA1_STATE1_SZ,
992 QAT_HW_DEFAULT_ALIGNMENT);
993 case ICP_QAT_HW_AUTH_ALGO_SHA224:
994 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA224_STATE1_SZ,
995 QAT_HW_DEFAULT_ALIGNMENT);
996 case ICP_QAT_HW_AUTH_ALGO_SHA256:
997 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA256_STATE1_SZ,
998 QAT_HW_DEFAULT_ALIGNMENT);
999 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1000 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA384_STATE1_SZ,
1001 QAT_HW_DEFAULT_ALIGNMENT);
1002 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1003 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
1004 QAT_HW_DEFAULT_ALIGNMENT);
1005 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1006 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ,
1007 QAT_HW_DEFAULT_ALIGNMENT);
1008 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1009 case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
1010 return QAT_HW_ROUND_UP(ICP_QAT_HW_GALOIS_128_STATE1_SZ,
1011 QAT_HW_DEFAULT_ALIGNMENT);
1012 case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
1013 return QAT_HW_ROUND_UP(ICP_QAT_HW_ZUC_3G_EIA3_STATE1_SZ,
1014 QAT_HW_DEFAULT_ALIGNMENT);
1015 case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
1016 return QAT_HW_ROUND_UP(ICP_QAT_HW_SNOW_3G_UIA2_STATE1_SZ,
1017 QAT_HW_DEFAULT_ALIGNMENT);
1018 case ICP_QAT_HW_AUTH_ALGO_MD5:
1019 return QAT_HW_ROUND_UP(ICP_QAT_HW_MD5_STATE1_SZ,
1020 QAT_HW_DEFAULT_ALIGNMENT);
1021 case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
1022 return QAT_HW_ROUND_UP(ICP_QAT_HW_KASUMI_F9_STATE1_SZ,
1023 QAT_HW_DEFAULT_ALIGNMENT);
1024 case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
1025 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_CBC_MAC_STATE1_SZ,
1026 QAT_HW_DEFAULT_ALIGNMENT);
1027 case ICP_QAT_HW_AUTH_ALGO_NULL:
1028 return QAT_HW_ROUND_UP(ICP_QAT_HW_NULL_STATE1_SZ,
1029 QAT_HW_DEFAULT_ALIGNMENT);
1030 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
1031 /* return maximum state1 size in this case */
1032 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
1033 QAT_HW_DEFAULT_ALIGNMENT);
1035 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
1041 /* returns digest size in bytes per hash algo */
1042 static int qat_hash_get_digest_size(enum icp_qat_hw_auth_algo qat_hash_alg)
1044 switch (qat_hash_alg) {
1045 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1046 return ICP_QAT_HW_SHA1_STATE1_SZ;
1047 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1048 return ICP_QAT_HW_SHA224_STATE1_SZ;
1049 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1050 return ICP_QAT_HW_SHA256_STATE1_SZ;
1051 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1052 return ICP_QAT_HW_SHA384_STATE1_SZ;
1053 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1054 return ICP_QAT_HW_SHA512_STATE1_SZ;
1055 case ICP_QAT_HW_AUTH_ALGO_MD5:
1056 return ICP_QAT_HW_MD5_STATE1_SZ;
1057 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1058 return ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1059 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
1060 /* return maximum digest size in this case */
1061 return ICP_QAT_HW_SHA512_STATE1_SZ;
1063 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
1069 /* returns block size in byes per hash algo */
1070 static int qat_hash_get_block_size(enum icp_qat_hw_auth_algo qat_hash_alg)
1072 switch (qat_hash_alg) {
1073 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1075 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1076 return SHA256_CBLOCK;
1077 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1078 return SHA256_CBLOCK;
1079 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1080 return SHA512_CBLOCK;
1081 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1082 return SHA512_CBLOCK;
1083 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1085 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1086 return ICP_QAT_HW_AES_BLK_SZ;
1087 case ICP_QAT_HW_AUTH_ALGO_MD5:
1089 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
1090 /* return maximum block size in this case */
1091 return SHA512_CBLOCK;
1093 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
1099 static int partial_hash_sha1(uint8_t *data_in, uint8_t *data_out)
1103 if (!SHA1_Init(&ctx))
1105 SHA1_Transform(&ctx, data_in);
1106 rte_memcpy(data_out, &ctx, SHA_DIGEST_LENGTH);
1110 static int partial_hash_sha224(uint8_t *data_in, uint8_t *data_out)
1114 if (!SHA224_Init(&ctx))
1116 SHA256_Transform(&ctx, data_in);
1117 rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
1121 static int partial_hash_sha256(uint8_t *data_in, uint8_t *data_out)
1125 if (!SHA256_Init(&ctx))
1127 SHA256_Transform(&ctx, data_in);
1128 rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
1132 static int partial_hash_sha384(uint8_t *data_in, uint8_t *data_out)
1136 if (!SHA384_Init(&ctx))
1138 SHA512_Transform(&ctx, data_in);
1139 rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
1143 static int partial_hash_sha512(uint8_t *data_in, uint8_t *data_out)
1147 if (!SHA512_Init(&ctx))
1149 SHA512_Transform(&ctx, data_in);
1150 rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
1154 static int partial_hash_md5(uint8_t *data_in, uint8_t *data_out)
1158 if (!MD5_Init(&ctx))
1160 MD5_Transform(&ctx, data_in);
1161 rte_memcpy(data_out, &ctx, MD5_DIGEST_LENGTH);
1166 static int partial_hash_compute(enum icp_qat_hw_auth_algo hash_alg,
1171 uint8_t digest[qat_hash_get_digest_size(
1172 ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1173 uint32_t *hash_state_out_be32;
1174 uint64_t *hash_state_out_be64;
1177 digest_size = qat_hash_get_digest_size(hash_alg);
1178 if (digest_size <= 0)
1181 hash_state_out_be32 = (uint32_t *)data_out;
1182 hash_state_out_be64 = (uint64_t *)data_out;
1185 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1186 if (partial_hash_sha1(data_in, digest))
1188 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1189 *hash_state_out_be32 =
1190 rte_bswap32(*(((uint32_t *)digest)+i));
1192 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1193 if (partial_hash_sha224(data_in, digest))
1195 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1196 *hash_state_out_be32 =
1197 rte_bswap32(*(((uint32_t *)digest)+i));
1199 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1200 if (partial_hash_sha256(data_in, digest))
1202 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1203 *hash_state_out_be32 =
1204 rte_bswap32(*(((uint32_t *)digest)+i));
1206 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1207 if (partial_hash_sha384(data_in, digest))
1209 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
1210 *hash_state_out_be64 =
1211 rte_bswap64(*(((uint64_t *)digest)+i));
1213 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1214 if (partial_hash_sha512(data_in, digest))
1216 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
1217 *hash_state_out_be64 =
1218 rte_bswap64(*(((uint64_t *)digest)+i));
1220 case ICP_QAT_HW_AUTH_ALGO_MD5:
1221 if (partial_hash_md5(data_in, data_out))
1225 QAT_LOG(ERR, "invalid hash alg %u", hash_alg);
1231 #define HMAC_IPAD_VALUE 0x36
1232 #define HMAC_OPAD_VALUE 0x5c
1233 #define HASH_XCBC_PRECOMP_KEY_NUM 3
1235 static const uint8_t AES_CMAC_SEED[ICP_QAT_HW_AES_128_KEY_SZ];
1237 static void aes_cmac_key_derive(uint8_t *base, uint8_t *derived)
1241 derived[0] = base[0] << 1;
1242 for (i = 1; i < ICP_QAT_HW_AES_BLK_SZ ; i++) {
1243 derived[i] = base[i] << 1;
1244 derived[i - 1] |= base[i] >> 7;
1248 derived[ICP_QAT_HW_AES_BLK_SZ - 1] ^= QAT_AES_CMAC_CONST_RB;
1251 static int qat_sym_do_precomputes(enum icp_qat_hw_auth_algo hash_alg,
1252 const uint8_t *auth_key,
1253 uint16_t auth_keylen,
1254 uint8_t *p_state_buf,
1255 uint16_t *p_state_len,
1259 uint8_t ipad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1260 uint8_t opad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1263 if (hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC) {
1269 uint8_t k0[ICP_QAT_HW_AES_128_KEY_SZ];
1272 auth_keylen = ICP_QAT_HW_AES_128_KEY_SZ;
1274 in = rte_zmalloc("AES CMAC K1",
1275 ICP_QAT_HW_AES_128_KEY_SZ, 16);
1278 QAT_LOG(ERR, "Failed to alloc memory");
1282 rte_memcpy(in, AES_CMAC_SEED,
1283 ICP_QAT_HW_AES_128_KEY_SZ);
1284 rte_memcpy(p_state_buf, auth_key, auth_keylen);
1286 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1292 AES_encrypt(in, k0, &enc_key);
1294 k1 = p_state_buf + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1295 k2 = k1 + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1297 aes_cmac_key_derive(k0, k1);
1298 aes_cmac_key_derive(k1, k2);
1300 memset(k0, 0, ICP_QAT_HW_AES_128_KEY_SZ);
1301 *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1305 static uint8_t qat_aes_xcbc_key_seed[
1306 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ] = {
1307 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1308 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1309 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1310 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1311 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1312 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1316 uint8_t *out = p_state_buf;
1320 in = rte_zmalloc("working mem for key",
1321 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ, 16);
1323 QAT_LOG(ERR, "Failed to alloc memory");
1327 rte_memcpy(in, qat_aes_xcbc_key_seed,
1328 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1329 for (x = 0; x < HASH_XCBC_PRECOMP_KEY_NUM; x++) {
1330 if (AES_set_encrypt_key(auth_key,
1334 (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ));
1336 (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ),
1337 0, ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1340 AES_encrypt(in, out, &enc_key);
1341 in += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1342 out += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1344 *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1345 rte_free(in - x*ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ);
1349 } else if ((hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_128) ||
1350 (hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_64)) {
1352 uint8_t *out = p_state_buf;
1355 memset(p_state_buf, 0, ICP_QAT_HW_GALOIS_H_SZ +
1356 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1357 ICP_QAT_HW_GALOIS_E_CTR0_SZ);
1358 in = rte_zmalloc("working mem for key",
1359 ICP_QAT_HW_GALOIS_H_SZ, 16);
1361 QAT_LOG(ERR, "Failed to alloc memory");
1365 memset(in, 0, ICP_QAT_HW_GALOIS_H_SZ);
1366 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1370 AES_encrypt(in, out, &enc_key);
1371 *p_state_len = ICP_QAT_HW_GALOIS_H_SZ +
1372 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1373 ICP_QAT_HW_GALOIS_E_CTR0_SZ;
1378 block_size = qat_hash_get_block_size(hash_alg);
1381 /* init ipad and opad from key and xor with fixed values */
1382 memset(ipad, 0, block_size);
1383 memset(opad, 0, block_size);
1385 if (auth_keylen > (unsigned int)block_size) {
1386 QAT_LOG(ERR, "invalid keylen %u", auth_keylen);
1389 rte_memcpy(ipad, auth_key, auth_keylen);
1390 rte_memcpy(opad, auth_key, auth_keylen);
1392 for (i = 0; i < block_size; i++) {
1393 uint8_t *ipad_ptr = ipad + i;
1394 uint8_t *opad_ptr = opad + i;
1395 *ipad_ptr ^= HMAC_IPAD_VALUE;
1396 *opad_ptr ^= HMAC_OPAD_VALUE;
1399 /* do partial hash of ipad and copy to state1 */
1400 if (partial_hash_compute(hash_alg, ipad, p_state_buf)) {
1401 memset(ipad, 0, block_size);
1402 memset(opad, 0, block_size);
1403 QAT_LOG(ERR, "ipad precompute failed");
1408 * State len is a multiple of 8, so may be larger than the digest.
1409 * Put the partial hash of opad state_len bytes after state1
1411 *p_state_len = qat_hash_get_state1_size(hash_alg);
1412 if (partial_hash_compute(hash_alg, opad, p_state_buf + *p_state_len)) {
1413 memset(ipad, 0, block_size);
1414 memset(opad, 0, block_size);
1415 QAT_LOG(ERR, "opad precompute failed");
1419 /* don't leave data lying around */
1420 memset(ipad, 0, block_size);
1421 memset(opad, 0, block_size);
1426 qat_sym_session_init_common_hdr(struct icp_qat_fw_comn_req_hdr *header,
1427 enum qat_sym_proto_flag proto_flags)
1430 ICP_QAT_FW_COMN_HDR_FLAGS_BUILD(ICP_QAT_FW_COMN_REQ_FLAG_SET);
1431 header->service_type = ICP_QAT_FW_COMN_REQ_CPM_FW_LA;
1432 header->comn_req_flags =
1433 ICP_QAT_FW_COMN_FLAGS_BUILD(QAT_COMN_CD_FLD_TYPE_64BIT_ADR,
1434 QAT_COMN_PTR_TYPE_FLAT);
1435 ICP_QAT_FW_LA_PARTIAL_SET(header->serv_specif_flags,
1436 ICP_QAT_FW_LA_PARTIAL_NONE);
1437 ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(header->serv_specif_flags,
1438 ICP_QAT_FW_CIPH_IV_16BYTE_DATA);
1440 switch (proto_flags) {
1441 case QAT_CRYPTO_PROTO_FLAG_NONE:
1442 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1443 ICP_QAT_FW_LA_NO_PROTO);
1445 case QAT_CRYPTO_PROTO_FLAG_CCM:
1446 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1447 ICP_QAT_FW_LA_CCM_PROTO);
1449 case QAT_CRYPTO_PROTO_FLAG_GCM:
1450 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1451 ICP_QAT_FW_LA_GCM_PROTO);
1453 case QAT_CRYPTO_PROTO_FLAG_SNOW3G:
1454 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1455 ICP_QAT_FW_LA_SNOW_3G_PROTO);
1457 case QAT_CRYPTO_PROTO_FLAG_ZUC:
1458 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(header->serv_specif_flags,
1459 ICP_QAT_FW_LA_ZUC_3G_PROTO);
1463 ICP_QAT_FW_LA_UPDATE_STATE_SET(header->serv_specif_flags,
1464 ICP_QAT_FW_LA_NO_UPDATE_STATE);
1465 ICP_QAT_FW_LA_DIGEST_IN_BUFFER_SET(header->serv_specif_flags,
1466 ICP_QAT_FW_LA_NO_DIGEST_IN_BUFFER);
1470 * Snow3G and ZUC should never use this function
1471 * and set its protocol flag in both cipher and auth part of content
1472 * descriptor building function
1474 static enum qat_sym_proto_flag
1475 qat_get_crypto_proto_flag(uint16_t flags)
1477 int proto = ICP_QAT_FW_LA_PROTO_GET(flags);
1478 enum qat_sym_proto_flag qat_proto_flag =
1479 QAT_CRYPTO_PROTO_FLAG_NONE;
1482 case ICP_QAT_FW_LA_GCM_PROTO:
1483 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1485 case ICP_QAT_FW_LA_CCM_PROTO:
1486 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1490 return qat_proto_flag;
1493 int qat_sym_session_aead_create_cd_cipher(struct qat_sym_session *cdesc,
1494 const uint8_t *cipherkey,
1495 uint32_t cipherkeylen)
1497 struct icp_qat_hw_cipher_algo_blk *cipher;
1498 struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1499 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1500 struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1501 void *ptr = &req_tmpl->cd_ctrl;
1502 struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1503 struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1504 enum icp_qat_hw_cipher_convert key_convert;
1505 enum qat_sym_proto_flag qat_proto_flag =
1506 QAT_CRYPTO_PROTO_FLAG_NONE;
1507 uint32_t total_key_size;
1508 uint16_t cipher_offset, cd_size;
1509 uint32_t wordIndex = 0;
1510 uint32_t *temp_key = NULL;
1512 if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER) {
1513 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1514 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1515 ICP_QAT_FW_SLICE_CIPHER);
1516 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1517 ICP_QAT_FW_SLICE_DRAM_WR);
1518 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1519 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1520 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1521 ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1522 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1523 } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1524 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1525 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1526 ICP_QAT_FW_SLICE_CIPHER);
1527 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1528 ICP_QAT_FW_SLICE_AUTH);
1529 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1530 ICP_QAT_FW_SLICE_AUTH);
1531 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1532 ICP_QAT_FW_SLICE_DRAM_WR);
1533 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1534 } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1535 QAT_LOG(ERR, "Invalid param, must be a cipher command.");
1539 if (cdesc->qat_mode == ICP_QAT_HW_CIPHER_CTR_MODE) {
1541 * CTR Streaming ciphers are a special case. Decrypt = encrypt
1542 * Overriding default values previously set
1544 cdesc->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
1545 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1546 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2
1547 || cdesc->qat_cipher_alg ==
1548 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)
1549 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1550 else if (cdesc->qat_dir == ICP_QAT_HW_CIPHER_ENCRYPT)
1551 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1553 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1555 if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
1556 total_key_size = ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ +
1557 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1558 cipher_cd_ctrl->cipher_state_sz =
1559 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1560 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1562 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1563 total_key_size = ICP_QAT_HW_KASUMI_F8_KEY_SZ;
1564 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_KASUMI_BLK_SZ >> 3;
1565 cipher_cd_ctrl->cipher_padding_sz =
1566 (2 * ICP_QAT_HW_KASUMI_BLK_SZ) >> 3;
1567 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES) {
1568 total_key_size = ICP_QAT_HW_3DES_KEY_SZ;
1569 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_3DES_BLK_SZ >> 3;
1571 qat_get_crypto_proto_flag(header->serv_specif_flags);
1572 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_DES) {
1573 total_key_size = ICP_QAT_HW_DES_KEY_SZ;
1574 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_DES_BLK_SZ >> 3;
1576 qat_get_crypto_proto_flag(header->serv_specif_flags);
1577 } else if (cdesc->qat_cipher_alg ==
1578 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
1579 total_key_size = ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ +
1580 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1581 cipher_cd_ctrl->cipher_state_sz =
1582 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1583 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1584 cdesc->min_qat_dev_gen = QAT_GEN2;
1586 total_key_size = cipherkeylen;
1587 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_AES_BLK_SZ >> 3;
1589 qat_get_crypto_proto_flag(header->serv_specif_flags);
1591 cipher_cd_ctrl->cipher_key_sz = total_key_size >> 3;
1592 cipher_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1593 cipher_cd_ctrl->cipher_cfg_offset = cipher_offset >> 3;
1595 header->service_cmd_id = cdesc->qat_cmd;
1596 qat_sym_session_init_common_hdr(header, qat_proto_flag);
1598 cipher = (struct icp_qat_hw_cipher_algo_blk *)cdesc->cd_cur_ptr;
1599 cipher->cipher_config.val =
1600 ICP_QAT_HW_CIPHER_CONFIG_BUILD(cdesc->qat_mode,
1601 cdesc->qat_cipher_alg, key_convert,
1604 if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1605 temp_key = (uint32_t *)(cdesc->cd_cur_ptr +
1606 sizeof(struct icp_qat_hw_cipher_config)
1608 memcpy(cipher->key, cipherkey, cipherkeylen);
1609 memcpy(temp_key, cipherkey, cipherkeylen);
1611 /* XOR Key with KASUMI F8 key modifier at 4 bytes level */
1612 for (wordIndex = 0; wordIndex < (cipherkeylen >> 2);
1614 temp_key[wordIndex] ^= KASUMI_F8_KEY_MODIFIER_4_BYTES;
1616 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1617 cipherkeylen + cipherkeylen;
1619 memcpy(cipher->key, cipherkey, cipherkeylen);
1620 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1624 if (total_key_size > cipherkeylen) {
1625 uint32_t padding_size = total_key_size-cipherkeylen;
1626 if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1627 && (cipherkeylen == QAT_3DES_KEY_SZ_OPT2)) {
1628 /* K3 not provided so use K1 = K3*/
1629 memcpy(cdesc->cd_cur_ptr, cipherkey, padding_size);
1630 } else if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1631 && (cipherkeylen == QAT_3DES_KEY_SZ_OPT3)) {
1632 /* K2 and K3 not provided so use K1 = K2 = K3*/
1633 memcpy(cdesc->cd_cur_ptr, cipherkey,
1635 memcpy(cdesc->cd_cur_ptr+cipherkeylen,
1636 cipherkey, cipherkeylen);
1638 memset(cdesc->cd_cur_ptr, 0, padding_size);
1640 cdesc->cd_cur_ptr += padding_size;
1642 cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1643 cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1648 int qat_sym_session_aead_create_cd_auth(struct qat_sym_session *cdesc,
1649 const uint8_t *authkey,
1650 uint32_t authkeylen,
1651 uint32_t aad_length,
1652 uint32_t digestsize,
1653 unsigned int operation)
1655 struct icp_qat_hw_auth_setup *hash;
1656 struct icp_qat_hw_cipher_algo_blk *cipherconfig;
1657 struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1658 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1659 struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1660 void *ptr = &req_tmpl->cd_ctrl;
1661 struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1662 struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1663 struct icp_qat_fw_la_auth_req_params *auth_param =
1664 (struct icp_qat_fw_la_auth_req_params *)
1665 ((char *)&req_tmpl->serv_specif_rqpars +
1666 ICP_QAT_FW_HASH_REQUEST_PARAMETERS_OFFSET);
1667 uint16_t state1_size = 0, state2_size = 0;
1668 uint16_t hash_offset, cd_size;
1669 uint32_t *aad_len = NULL;
1670 uint32_t wordIndex = 0;
1672 enum qat_sym_proto_flag qat_proto_flag =
1673 QAT_CRYPTO_PROTO_FLAG_NONE;
1675 if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_AUTH) {
1676 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1677 ICP_QAT_FW_SLICE_AUTH);
1678 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1679 ICP_QAT_FW_SLICE_DRAM_WR);
1680 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1681 } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1682 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1683 ICP_QAT_FW_SLICE_AUTH);
1684 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1685 ICP_QAT_FW_SLICE_CIPHER);
1686 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1687 ICP_QAT_FW_SLICE_CIPHER);
1688 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1689 ICP_QAT_FW_SLICE_DRAM_WR);
1690 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1691 } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1692 QAT_LOG(ERR, "Invalid param, must be a hash command.");
1696 if (operation == RTE_CRYPTO_AUTH_OP_VERIFY) {
1697 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1698 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1699 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1700 ICP_QAT_FW_LA_CMP_AUTH_RES);
1701 cdesc->auth_op = ICP_QAT_HW_AUTH_VERIFY;
1703 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1704 ICP_QAT_FW_LA_RET_AUTH_RES);
1705 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1706 ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1707 cdesc->auth_op = ICP_QAT_HW_AUTH_GENERATE;
1711 * Setup the inner hash config
1713 hash_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1714 hash = (struct icp_qat_hw_auth_setup *)cdesc->cd_cur_ptr;
1715 hash->auth_config.reserved = 0;
1716 hash->auth_config.config =
1717 ICP_QAT_HW_AUTH_CONFIG_BUILD(cdesc->auth_mode,
1718 cdesc->qat_hash_alg, digestsize);
1720 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0
1721 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2
1722 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_KASUMI_F9
1723 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3
1724 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC
1725 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC
1726 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_NULL
1728 hash->auth_counter.counter = 0;
1730 int block_size = qat_hash_get_block_size(cdesc->qat_hash_alg);
1734 hash->auth_counter.counter = rte_bswap32(block_size);
1737 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_auth_setup);
1740 * cd_cur_ptr now points at the state1 information.
1742 switch (cdesc->qat_hash_alg) {
1743 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1744 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0) {
1746 rte_memcpy(cdesc->cd_cur_ptr, sha1InitialState,
1747 sizeof(sha1InitialState));
1748 state1_size = qat_hash_get_state1_size(
1749 cdesc->qat_hash_alg);
1753 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA1, authkey,
1754 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1756 QAT_LOG(ERR, "(SHA)precompute failed");
1759 state2_size = RTE_ALIGN_CEIL(ICP_QAT_HW_SHA1_STATE2_SZ, 8);
1761 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1762 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0) {
1764 rte_memcpy(cdesc->cd_cur_ptr, sha224InitialState,
1765 sizeof(sha224InitialState));
1766 state1_size = qat_hash_get_state1_size(
1767 cdesc->qat_hash_alg);
1771 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA224, authkey,
1772 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1774 QAT_LOG(ERR, "(SHA)precompute failed");
1777 state2_size = ICP_QAT_HW_SHA224_STATE2_SZ;
1779 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1780 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0) {
1782 rte_memcpy(cdesc->cd_cur_ptr, sha256InitialState,
1783 sizeof(sha256InitialState));
1784 state1_size = qat_hash_get_state1_size(
1785 cdesc->qat_hash_alg);
1789 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA256, authkey,
1790 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1792 QAT_LOG(ERR, "(SHA)precompute failed");
1795 state2_size = ICP_QAT_HW_SHA256_STATE2_SZ;
1797 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1798 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0) {
1800 rte_memcpy(cdesc->cd_cur_ptr, sha384InitialState,
1801 sizeof(sha384InitialState));
1802 state1_size = qat_hash_get_state1_size(
1803 cdesc->qat_hash_alg);
1807 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA384, authkey,
1808 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1810 QAT_LOG(ERR, "(SHA)precompute failed");
1813 state2_size = ICP_QAT_HW_SHA384_STATE2_SZ;
1815 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1816 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0) {
1818 rte_memcpy(cdesc->cd_cur_ptr, sha512InitialState,
1819 sizeof(sha512InitialState));
1820 state1_size = qat_hash_get_state1_size(
1821 cdesc->qat_hash_alg);
1825 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA512, authkey,
1826 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1828 QAT_LOG(ERR, "(SHA)precompute failed");
1831 state2_size = ICP_QAT_HW_SHA512_STATE2_SZ;
1833 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1834 state1_size = ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1836 if (cdesc->aes_cmac)
1837 memset(cdesc->cd_cur_ptr, 0, state1_size);
1838 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC,
1839 authkey, authkeylen, cdesc->cd_cur_ptr + state1_size,
1840 &state2_size, cdesc->aes_cmac)) {
1841 cdesc->aes_cmac ? QAT_LOG(ERR,
1842 "(CMAC)precompute failed")
1844 "(XCBC)precompute failed");
1848 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1849 case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
1850 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1851 state1_size = ICP_QAT_HW_GALOIS_128_STATE1_SZ;
1852 if (qat_sym_do_precomputes(cdesc->qat_hash_alg, authkey,
1853 authkeylen, cdesc->cd_cur_ptr + state1_size,
1854 &state2_size, cdesc->aes_cmac)) {
1855 QAT_LOG(ERR, "(GCM)precompute failed");
1859 * Write (the length of AAD) into bytes 16-19 of state2
1860 * in big-endian format. This field is 8 bytes
1862 auth_param->u2.aad_sz =
1863 RTE_ALIGN_CEIL(aad_length, 16);
1864 auth_param->hash_state_sz = (auth_param->u2.aad_sz) >> 3;
1866 aad_len = (uint32_t *)(cdesc->cd_cur_ptr +
1867 ICP_QAT_HW_GALOIS_128_STATE1_SZ +
1868 ICP_QAT_HW_GALOIS_H_SZ);
1869 *aad_len = rte_bswap32(aad_length);
1870 cdesc->aad_len = aad_length;
1872 case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
1873 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1874 state1_size = qat_hash_get_state1_size(
1875 ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2);
1876 state2_size = ICP_QAT_HW_SNOW_3G_UIA2_STATE2_SZ;
1877 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1879 cipherconfig = (struct icp_qat_hw_cipher_algo_blk *)
1880 (cdesc->cd_cur_ptr + state1_size + state2_size);
1881 cipherconfig->cipher_config.val =
1882 ICP_QAT_HW_CIPHER_CONFIG_BUILD(ICP_QAT_HW_CIPHER_ECB_MODE,
1883 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2,
1884 ICP_QAT_HW_CIPHER_KEY_CONVERT,
1885 ICP_QAT_HW_CIPHER_ENCRYPT);
1886 memcpy(cipherconfig->key, authkey, authkeylen);
1887 memset(cipherconfig->key + authkeylen,
1888 0, ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ);
1889 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1890 authkeylen + ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1891 auth_param->hash_state_sz = ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1893 case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
1894 hash->auth_config.config =
1895 ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE0,
1896 cdesc->qat_hash_alg, digestsize);
1897 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1898 state1_size = qat_hash_get_state1_size(
1899 ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3);
1900 state2_size = ICP_QAT_HW_ZUC_3G_EIA3_STATE2_SZ;
1901 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size
1902 + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ);
1904 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1905 cdesc->cd_cur_ptr += state1_size + state2_size
1906 + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1907 auth_param->hash_state_sz = ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1908 cdesc->min_qat_dev_gen = QAT_GEN2;
1911 case ICP_QAT_HW_AUTH_ALGO_MD5:
1912 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_MD5, authkey,
1913 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1915 QAT_LOG(ERR, "(MD5)precompute failed");
1918 state2_size = ICP_QAT_HW_MD5_STATE2_SZ;
1920 case ICP_QAT_HW_AUTH_ALGO_NULL:
1921 state1_size = qat_hash_get_state1_size(
1922 ICP_QAT_HW_AUTH_ALGO_NULL);
1923 state2_size = ICP_QAT_HW_NULL_STATE2_SZ;
1925 case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
1926 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1927 state1_size = qat_hash_get_state1_size(
1928 ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC);
1929 state2_size = ICP_QAT_HW_AES_CBC_MAC_KEY_SZ +
1930 ICP_QAT_HW_AES_CCM_CBC_E_CTR0_SZ;
1932 if (aad_length > 0) {
1933 aad_length += ICP_QAT_HW_CCM_AAD_B0_LEN +
1934 ICP_QAT_HW_CCM_AAD_LEN_INFO;
1935 auth_param->u2.aad_sz =
1936 RTE_ALIGN_CEIL(aad_length,
1937 ICP_QAT_HW_CCM_AAD_ALIGNMENT);
1939 auth_param->u2.aad_sz = ICP_QAT_HW_CCM_AAD_B0_LEN;
1941 cdesc->aad_len = aad_length;
1942 hash->auth_counter.counter = 0;
1944 hash_cd_ctrl->outer_prefix_sz = digestsize;
1945 auth_param->hash_state_sz = digestsize;
1947 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1949 case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
1950 state1_size = qat_hash_get_state1_size(
1951 ICP_QAT_HW_AUTH_ALGO_KASUMI_F9);
1952 state2_size = ICP_QAT_HW_KASUMI_F9_STATE2_SZ;
1953 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1954 pTempKey = (uint32_t *)(cdesc->cd_cur_ptr + state1_size
1957 * The Inner Hash Initial State2 block must contain IK
1958 * (Initialisation Key), followed by IK XOR-ed with KM
1959 * (Key Modifier): IK||(IK^KM).
1961 /* write the auth key */
1962 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1963 /* initialise temp key with auth key */
1964 memcpy(pTempKey, authkey, authkeylen);
1965 /* XOR Key with KASUMI F9 key modifier at 4 bytes level */
1966 for (wordIndex = 0; wordIndex < (authkeylen >> 2); wordIndex++)
1967 pTempKey[wordIndex] ^= KASUMI_F9_KEY_MODIFIER_4_BYTES;
1970 QAT_LOG(ERR, "Invalid HASH alg %u", cdesc->qat_hash_alg);
1974 /* Request template setup */
1975 qat_sym_session_init_common_hdr(header, qat_proto_flag);
1976 header->service_cmd_id = cdesc->qat_cmd;
1978 /* Auth CD config setup */
1979 hash_cd_ctrl->hash_cfg_offset = hash_offset >> 3;
1980 hash_cd_ctrl->hash_flags = ICP_QAT_FW_AUTH_HDR_FLAG_NO_NESTED;
1981 hash_cd_ctrl->inner_res_sz = digestsize;
1982 hash_cd_ctrl->final_sz = digestsize;
1983 hash_cd_ctrl->inner_state1_sz = state1_size;
1984 auth_param->auth_res_sz = digestsize;
1986 hash_cd_ctrl->inner_state2_sz = state2_size;
1987 hash_cd_ctrl->inner_state2_offset = hash_cd_ctrl->hash_cfg_offset +
1988 ((sizeof(struct icp_qat_hw_auth_setup) +
1989 RTE_ALIGN_CEIL(hash_cd_ctrl->inner_state1_sz, 8))
1992 cdesc->cd_cur_ptr += state1_size + state2_size;
1993 cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1995 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1996 cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
2001 int qat_sym_validate_aes_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2004 case ICP_QAT_HW_AES_128_KEY_SZ:
2005 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
2007 case ICP_QAT_HW_AES_192_KEY_SZ:
2008 *alg = ICP_QAT_HW_CIPHER_ALGO_AES192;
2010 case ICP_QAT_HW_AES_256_KEY_SZ:
2011 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
2019 int qat_sym_validate_aes_docsisbpi_key(int key_len,
2020 enum icp_qat_hw_cipher_algo *alg)
2023 case ICP_QAT_HW_AES_128_KEY_SZ:
2024 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
2026 case ICP_QAT_HW_AES_256_KEY_SZ:
2027 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
2035 int qat_sym_validate_snow3g_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2038 case ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ:
2039 *alg = ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2;
2047 int qat_sym_validate_kasumi_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2050 case ICP_QAT_HW_KASUMI_KEY_SZ:
2051 *alg = ICP_QAT_HW_CIPHER_ALGO_KASUMI;
2059 int qat_sym_validate_des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2062 case ICP_QAT_HW_DES_KEY_SZ:
2063 *alg = ICP_QAT_HW_CIPHER_ALGO_DES;
2071 int qat_sym_validate_3des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2074 case QAT_3DES_KEY_SZ_OPT1:
2075 case QAT_3DES_KEY_SZ_OPT2:
2076 case QAT_3DES_KEY_SZ_OPT3:
2077 *alg = ICP_QAT_HW_CIPHER_ALGO_3DES;
2085 int qat_sym_validate_zuc_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2088 case ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ:
2089 *alg = ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3;