1 /* SPDX-License-Identifier: (BSD-3-Clause OR GPL-2.0)
2 * Copyright(c) 2015-2019 Intel Corporation
5 #include <openssl/sha.h> /* Needed to calculate pre-compute values */
6 #include <openssl/aes.h> /* Needed to calculate pre-compute values */
7 #include <openssl/md5.h> /* Needed to calculate pre-compute values */
8 #include <openssl/evp.h> /* Needed for bpi runt block processing */
10 #include <rte_memcpy.h>
11 #include <rte_common.h>
12 #include <rte_spinlock.h>
13 #include <rte_byteorder.h>
15 #include <rte_malloc.h>
16 #include <rte_crypto_sym.h>
17 #ifdef RTE_LIB_SECURITY
18 #include <rte_security.h>
22 #include "qat_sym_session.h"
23 #include "qat_sym_pmd.h"
25 /* SHA1 - 20 bytes - Initialiser state can be found in FIPS stds 180-2 */
26 static const uint8_t sha1InitialState[] = {
27 0x67, 0x45, 0x23, 0x01, 0xef, 0xcd, 0xab, 0x89, 0x98, 0xba,
28 0xdc, 0xfe, 0x10, 0x32, 0x54, 0x76, 0xc3, 0xd2, 0xe1, 0xf0};
30 /* SHA 224 - 32 bytes - Initialiser state can be found in FIPS stds 180-2 */
31 static const uint8_t sha224InitialState[] = {
32 0xc1, 0x05, 0x9e, 0xd8, 0x36, 0x7c, 0xd5, 0x07, 0x30, 0x70, 0xdd,
33 0x17, 0xf7, 0x0e, 0x59, 0x39, 0xff, 0xc0, 0x0b, 0x31, 0x68, 0x58,
34 0x15, 0x11, 0x64, 0xf9, 0x8f, 0xa7, 0xbe, 0xfa, 0x4f, 0xa4};
36 /* SHA 256 - 32 bytes - Initialiser state can be found in FIPS stds 180-2 */
37 static const uint8_t sha256InitialState[] = {
38 0x6a, 0x09, 0xe6, 0x67, 0xbb, 0x67, 0xae, 0x85, 0x3c, 0x6e, 0xf3,
39 0x72, 0xa5, 0x4f, 0xf5, 0x3a, 0x51, 0x0e, 0x52, 0x7f, 0x9b, 0x05,
40 0x68, 0x8c, 0x1f, 0x83, 0xd9, 0xab, 0x5b, 0xe0, 0xcd, 0x19};
42 /* SHA 384 - 64 bytes - Initialiser state can be found in FIPS stds 180-2 */
43 static const uint8_t sha384InitialState[] = {
44 0xcb, 0xbb, 0x9d, 0x5d, 0xc1, 0x05, 0x9e, 0xd8, 0x62, 0x9a, 0x29,
45 0x2a, 0x36, 0x7c, 0xd5, 0x07, 0x91, 0x59, 0x01, 0x5a, 0x30, 0x70,
46 0xdd, 0x17, 0x15, 0x2f, 0xec, 0xd8, 0xf7, 0x0e, 0x59, 0x39, 0x67,
47 0x33, 0x26, 0x67, 0xff, 0xc0, 0x0b, 0x31, 0x8e, 0xb4, 0x4a, 0x87,
48 0x68, 0x58, 0x15, 0x11, 0xdb, 0x0c, 0x2e, 0x0d, 0x64, 0xf9, 0x8f,
49 0xa7, 0x47, 0xb5, 0x48, 0x1d, 0xbe, 0xfa, 0x4f, 0xa4};
51 /* SHA 512 - 64 bytes - Initialiser state can be found in FIPS stds 180-2 */
52 static const uint8_t sha512InitialState[] = {
53 0x6a, 0x09, 0xe6, 0x67, 0xf3, 0xbc, 0xc9, 0x08, 0xbb, 0x67, 0xae,
54 0x85, 0x84, 0xca, 0xa7, 0x3b, 0x3c, 0x6e, 0xf3, 0x72, 0xfe, 0x94,
55 0xf8, 0x2b, 0xa5, 0x4f, 0xf5, 0x3a, 0x5f, 0x1d, 0x36, 0xf1, 0x51,
56 0x0e, 0x52, 0x7f, 0xad, 0xe6, 0x82, 0xd1, 0x9b, 0x05, 0x68, 0x8c,
57 0x2b, 0x3e, 0x6c, 0x1f, 0x1f, 0x83, 0xd9, 0xab, 0xfb, 0x41, 0xbd,
58 0x6b, 0x5b, 0xe0, 0xcd, 0x19, 0x13, 0x7e, 0x21, 0x79};
60 /** Frees a context previously created
61 * Depends on openssl libcrypto
64 bpi_cipher_ctx_free(void *bpi_ctx)
67 EVP_CIPHER_CTX_free((EVP_CIPHER_CTX *)bpi_ctx);
70 /** Creates a context in either AES or DES in ECB mode
71 * Depends on openssl libcrypto
74 bpi_cipher_ctx_init(enum rte_crypto_cipher_algorithm cryptodev_algo,
75 enum rte_crypto_cipher_operation direction __rte_unused,
76 const uint8_t *key, uint16_t key_length, void **ctx)
78 const EVP_CIPHER *algo = NULL;
80 *ctx = EVP_CIPHER_CTX_new();
87 if (cryptodev_algo == RTE_CRYPTO_CIPHER_DES_DOCSISBPI)
90 if (key_length == ICP_QAT_HW_AES_128_KEY_SZ)
91 algo = EVP_aes_128_ecb();
93 algo = EVP_aes_256_ecb();
95 /* IV will be ECB encrypted whether direction is encrypt or decrypt*/
96 if (EVP_EncryptInit_ex(*ctx, algo, NULL, key, 0) != 1) {
105 EVP_CIPHER_CTX_free(*ctx);
110 qat_is_cipher_alg_supported(enum rte_crypto_cipher_algorithm algo,
111 struct qat_sym_dev_private *internals)
114 const struct rte_cryptodev_capabilities *capability;
116 while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
117 RTE_CRYPTO_OP_TYPE_UNDEFINED) {
118 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
121 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_CIPHER)
124 if (capability->sym.cipher.algo == algo)
131 qat_is_auth_alg_supported(enum rte_crypto_auth_algorithm algo,
132 struct qat_sym_dev_private *internals)
135 const struct rte_cryptodev_capabilities *capability;
137 while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
138 RTE_CRYPTO_OP_TYPE_UNDEFINED) {
139 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
142 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_AUTH)
145 if (capability->sym.auth.algo == algo)
152 qat_sym_session_clear(struct rte_cryptodev *dev,
153 struct rte_cryptodev_sym_session *sess)
155 uint8_t index = dev->driver_id;
156 void *sess_priv = get_sym_session_private_data(sess, index);
157 struct qat_sym_session *s = (struct qat_sym_session *)sess_priv;
161 bpi_cipher_ctx_free(s->bpi_ctx);
162 memset(s, 0, qat_sym_session_get_private_size(dev));
163 struct rte_mempool *sess_mp = rte_mempool_from_obj(sess_priv);
165 set_sym_session_private_data(sess, index, NULL);
166 rte_mempool_put(sess_mp, sess_priv);
171 qat_get_cmd_id(const struct rte_crypto_sym_xform *xform)
174 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER && xform->next == NULL)
175 return ICP_QAT_FW_LA_CMD_CIPHER;
177 /* Authentication Only */
178 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH && xform->next == NULL)
179 return ICP_QAT_FW_LA_CMD_AUTH;
182 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
183 /* AES-GCM and AES-CCM works with different direction
184 * GCM first encrypts and generate hash where AES-CCM
185 * first generate hash and encrypts. Similar relation
186 * applies to decryption.
188 if (xform->aead.op == RTE_CRYPTO_AEAD_OP_ENCRYPT)
189 if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
190 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
192 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
194 if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
195 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
197 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
200 if (xform->next == NULL)
203 /* Cipher then Authenticate */
204 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
205 xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
206 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
208 /* Authenticate then Cipher */
209 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH &&
210 xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
211 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
216 static struct rte_crypto_auth_xform *
217 qat_get_auth_xform(struct rte_crypto_sym_xform *xform)
220 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH)
229 static struct rte_crypto_cipher_xform *
230 qat_get_cipher_xform(struct rte_crypto_sym_xform *xform)
233 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
234 return &xform->cipher;
243 qat_sym_session_configure_cipher(struct rte_cryptodev *dev,
244 struct rte_crypto_sym_xform *xform,
245 struct qat_sym_session *session)
247 struct qat_sym_dev_private *internals = dev->data->dev_private;
248 struct rte_crypto_cipher_xform *cipher_xform = NULL;
251 /* Get cipher xform from crypto xform chain */
252 cipher_xform = qat_get_cipher_xform(xform);
254 session->cipher_iv.offset = cipher_xform->iv.offset;
255 session->cipher_iv.length = cipher_xform->iv.length;
257 switch (cipher_xform->algo) {
258 case RTE_CRYPTO_CIPHER_AES_CBC:
259 if (qat_sym_validate_aes_key(cipher_xform->key.length,
260 &session->qat_cipher_alg) != 0) {
261 QAT_LOG(ERR, "Invalid AES cipher key size");
265 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
267 case RTE_CRYPTO_CIPHER_AES_CTR:
268 if (qat_sym_validate_aes_key(cipher_xform->key.length,
269 &session->qat_cipher_alg) != 0) {
270 QAT_LOG(ERR, "Invalid AES cipher key size");
274 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
276 case RTE_CRYPTO_CIPHER_SNOW3G_UEA2:
277 if (qat_sym_validate_snow3g_key(cipher_xform->key.length,
278 &session->qat_cipher_alg) != 0) {
279 QAT_LOG(ERR, "Invalid SNOW 3G cipher key size");
283 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
285 case RTE_CRYPTO_CIPHER_NULL:
286 session->qat_cipher_alg = ICP_QAT_HW_CIPHER_ALGO_NULL;
287 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
289 case RTE_CRYPTO_CIPHER_KASUMI_F8:
290 if (qat_sym_validate_kasumi_key(cipher_xform->key.length,
291 &session->qat_cipher_alg) != 0) {
292 QAT_LOG(ERR, "Invalid KASUMI cipher key size");
296 session->qat_mode = ICP_QAT_HW_CIPHER_F8_MODE;
298 case RTE_CRYPTO_CIPHER_3DES_CBC:
299 if (qat_sym_validate_3des_key(cipher_xform->key.length,
300 &session->qat_cipher_alg) != 0) {
301 QAT_LOG(ERR, "Invalid 3DES cipher key size");
305 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
307 case RTE_CRYPTO_CIPHER_DES_CBC:
308 if (qat_sym_validate_des_key(cipher_xform->key.length,
309 &session->qat_cipher_alg) != 0) {
310 QAT_LOG(ERR, "Invalid DES cipher key size");
314 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
316 case RTE_CRYPTO_CIPHER_3DES_CTR:
317 if (qat_sym_validate_3des_key(cipher_xform->key.length,
318 &session->qat_cipher_alg) != 0) {
319 QAT_LOG(ERR, "Invalid 3DES cipher key size");
323 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
325 case RTE_CRYPTO_CIPHER_DES_DOCSISBPI:
326 ret = bpi_cipher_ctx_init(
329 cipher_xform->key.data,
330 cipher_xform->key.length,
333 QAT_LOG(ERR, "failed to create DES BPI ctx");
336 if (qat_sym_validate_des_key(cipher_xform->key.length,
337 &session->qat_cipher_alg) != 0) {
338 QAT_LOG(ERR, "Invalid DES cipher key size");
342 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
344 case RTE_CRYPTO_CIPHER_AES_DOCSISBPI:
345 ret = bpi_cipher_ctx_init(
348 cipher_xform->key.data,
349 cipher_xform->key.length,
352 QAT_LOG(ERR, "failed to create AES BPI ctx");
355 if (qat_sym_validate_aes_docsisbpi_key(cipher_xform->key.length,
356 &session->qat_cipher_alg) != 0) {
357 QAT_LOG(ERR, "Invalid AES DOCSISBPI key size");
361 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
363 case RTE_CRYPTO_CIPHER_ZUC_EEA3:
364 if (!qat_is_cipher_alg_supported(
365 cipher_xform->algo, internals)) {
366 QAT_LOG(ERR, "%s not supported on this device",
367 rte_crypto_cipher_algorithm_strings
368 [cipher_xform->algo]);
372 if (qat_sym_validate_zuc_key(cipher_xform->key.length,
373 &session->qat_cipher_alg) != 0) {
374 QAT_LOG(ERR, "Invalid ZUC cipher key size");
378 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
380 case RTE_CRYPTO_CIPHER_AES_XTS:
381 if ((cipher_xform->key.length/2) == ICP_QAT_HW_AES_192_KEY_SZ) {
382 QAT_LOG(ERR, "AES-XTS-192 not supported");
386 if (qat_sym_validate_aes_key((cipher_xform->key.length/2),
387 &session->qat_cipher_alg) != 0) {
388 QAT_LOG(ERR, "Invalid AES-XTS cipher key size");
392 session->qat_mode = ICP_QAT_HW_CIPHER_XTS_MODE;
394 case RTE_CRYPTO_CIPHER_3DES_ECB:
395 case RTE_CRYPTO_CIPHER_AES_ECB:
396 case RTE_CRYPTO_CIPHER_AES_F8:
397 case RTE_CRYPTO_CIPHER_ARC4:
398 QAT_LOG(ERR, "Crypto QAT PMD: Unsupported Cipher alg %u",
403 QAT_LOG(ERR, "Crypto: Undefined Cipher specified %u\n",
409 if (cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
410 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
412 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
414 if (qat_sym_session_aead_create_cd_cipher(session,
415 cipher_xform->key.data,
416 cipher_xform->key.length)) {
424 if (session->bpi_ctx) {
425 bpi_cipher_ctx_free(session->bpi_ctx);
426 session->bpi_ctx = NULL;
432 qat_sym_session_configure(struct rte_cryptodev *dev,
433 struct rte_crypto_sym_xform *xform,
434 struct rte_cryptodev_sym_session *sess,
435 struct rte_mempool *mempool)
437 void *sess_private_data;
440 if (rte_mempool_get(mempool, &sess_private_data)) {
442 "Couldn't get object from session mempool");
446 ret = qat_sym_session_set_parameters(dev, xform, sess_private_data);
449 "Crypto QAT PMD: failed to configure session parameters");
451 /* Return session to mempool */
452 rte_mempool_put(mempool, sess_private_data);
456 set_sym_session_private_data(sess, dev->driver_id,
463 qat_sym_session_set_ext_hash_flags(struct qat_sym_session *session,
466 struct icp_qat_fw_comn_req_hdr *header = &session->fw_req.comn_hdr;
467 struct icp_qat_fw_cipher_auth_cd_ctrl_hdr *cd_ctrl =
468 (struct icp_qat_fw_cipher_auth_cd_ctrl_hdr *)
469 session->fw_req.cd_ctrl.content_desc_ctrl_lw;
471 /* Set the Use Extended Protocol Flags bit in LW 1 */
472 QAT_FIELD_SET(header->comn_req_flags,
473 QAT_COMN_EXT_FLAGS_USED,
474 QAT_COMN_EXT_FLAGS_BITPOS,
475 QAT_COMN_EXT_FLAGS_MASK);
477 /* Set Hash Flags in LW 28 */
478 cd_ctrl->hash_flags |= hash_flag;
480 /* Set proto flags in LW 1 */
481 switch (session->qat_cipher_alg) {
482 case ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2:
483 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
484 ICP_QAT_FW_LA_SNOW_3G_PROTO);
485 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
486 header->serv_specif_flags, 0);
488 case ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3:
489 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
490 ICP_QAT_FW_LA_NO_PROTO);
491 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
492 header->serv_specif_flags,
493 ICP_QAT_FW_LA_ZUC_3G_PROTO);
496 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
497 ICP_QAT_FW_LA_NO_PROTO);
498 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
499 header->serv_specif_flags, 0);
505 qat_sym_session_handle_mixed(const struct rte_cryptodev *dev,
506 struct qat_sym_session *session)
508 const struct qat_sym_dev_private *qat_private = dev->data->dev_private;
509 enum qat_device_gen min_dev_gen = (qat_private->internal_capabilities &
510 QAT_SYM_CAP_MIXED_CRYPTO) ? QAT_GEN2 : QAT_GEN3;
512 if (session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3 &&
513 session->qat_cipher_alg !=
514 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
515 session->min_qat_dev_gen = min_dev_gen;
516 qat_sym_session_set_ext_hash_flags(session,
517 1 << ICP_QAT_FW_AUTH_HDR_FLAG_ZUC_EIA3_BITPOS);
518 } else if (session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2 &&
519 session->qat_cipher_alg !=
520 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
521 session->min_qat_dev_gen = min_dev_gen;
522 qat_sym_session_set_ext_hash_flags(session,
523 1 << ICP_QAT_FW_AUTH_HDR_FLAG_SNOW3G_UIA2_BITPOS);
524 } else if ((session->aes_cmac ||
525 session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_NULL) &&
526 (session->qat_cipher_alg ==
527 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2 ||
528 session->qat_cipher_alg ==
529 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)) {
530 session->min_qat_dev_gen = min_dev_gen;
531 qat_sym_session_set_ext_hash_flags(session, 0);
536 qat_sym_session_set_parameters(struct rte_cryptodev *dev,
537 struct rte_crypto_sym_xform *xform, void *session_private)
539 struct qat_sym_session *session = session_private;
540 struct qat_sym_dev_private *internals = dev->data->dev_private;
541 enum qat_device_gen qat_dev_gen = internals->qat_dev->qat_dev_gen;
545 /* Verify the session physical address is known */
546 rte_iova_t session_paddr = rte_mempool_virt2iova(session);
547 if (session_paddr == 0 || session_paddr == RTE_BAD_IOVA) {
549 "Session physical address unknown. Bad memory pool.");
553 /* Set context descriptor physical address */
554 session->cd_paddr = session_paddr +
555 offsetof(struct qat_sym_session, cd);
557 session->min_qat_dev_gen = QAT_GEN1;
559 /* Get requested QAT command id */
560 qat_cmd_id = qat_get_cmd_id(xform);
561 if (qat_cmd_id < 0 || qat_cmd_id >= ICP_QAT_FW_LA_CMD_DELIMITER) {
562 QAT_LOG(ERR, "Unsupported xform chain requested");
565 session->qat_cmd = (enum icp_qat_fw_la_cmd_id)qat_cmd_id;
566 switch (session->qat_cmd) {
567 case ICP_QAT_FW_LA_CMD_CIPHER:
568 ret = qat_sym_session_configure_cipher(dev, xform, session);
572 case ICP_QAT_FW_LA_CMD_AUTH:
573 ret = qat_sym_session_configure_auth(dev, xform, session);
576 session->is_single_pass_gmac =
577 qat_dev_gen == QAT_GEN3 &&
578 xform->auth.algo == RTE_CRYPTO_AUTH_AES_GMAC &&
579 xform->auth.iv.length == QAT_AES_GCM_SPC_IV_SIZE;
581 case ICP_QAT_FW_LA_CMD_CIPHER_HASH:
582 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
583 ret = qat_sym_session_configure_aead(dev, xform,
588 ret = qat_sym_session_configure_cipher(dev,
592 ret = qat_sym_session_configure_auth(dev,
596 /* Special handling of mixed hash+cipher algorithms */
597 qat_sym_session_handle_mixed(dev, session);
600 case ICP_QAT_FW_LA_CMD_HASH_CIPHER:
601 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
602 ret = qat_sym_session_configure_aead(dev, xform,
607 ret = qat_sym_session_configure_auth(dev,
611 ret = qat_sym_session_configure_cipher(dev,
615 /* Special handling of mixed hash+cipher algorithms */
616 qat_sym_session_handle_mixed(dev, session);
619 case ICP_QAT_FW_LA_CMD_TRNG_GET_RANDOM:
620 case ICP_QAT_FW_LA_CMD_TRNG_TEST:
621 case ICP_QAT_FW_LA_CMD_SSL3_KEY_DERIVE:
622 case ICP_QAT_FW_LA_CMD_TLS_V1_1_KEY_DERIVE:
623 case ICP_QAT_FW_LA_CMD_TLS_V1_2_KEY_DERIVE:
624 case ICP_QAT_FW_LA_CMD_MGF1:
625 case ICP_QAT_FW_LA_CMD_AUTH_PRE_COMP:
626 case ICP_QAT_FW_LA_CMD_CIPHER_PRE_COMP:
627 case ICP_QAT_FW_LA_CMD_DELIMITER:
628 QAT_LOG(ERR, "Unsupported Service %u",
632 QAT_LOG(ERR, "Unsupported Service %u",
641 qat_sym_session_handle_single_pass(struct qat_sym_session *session,
642 struct rte_crypto_aead_xform *aead_xform)
644 struct icp_qat_fw_la_cipher_req_params *cipher_param =
645 (void *) &session->fw_req.serv_specif_rqpars;
647 session->is_single_pass = 1;
648 session->min_qat_dev_gen = QAT_GEN3;
649 session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER;
650 if (aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) {
651 session->qat_mode = ICP_QAT_HW_CIPHER_AEAD_MODE;
652 ICP_QAT_FW_LA_GCM_IV_LEN_FLAG_SET(
653 session->fw_req.comn_hdr.serv_specif_flags,
654 ICP_QAT_FW_LA_GCM_IV_LEN_12_OCTETS);
656 /* Chacha-Poly is special case that use QAT CTR mode */
657 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
659 session->cipher_iv.offset = aead_xform->iv.offset;
660 session->cipher_iv.length = aead_xform->iv.length;
661 if (qat_sym_session_aead_create_cd_cipher(session,
662 aead_xform->key.data, aead_xform->key.length))
664 session->aad_len = aead_xform->aad_length;
665 session->digest_length = aead_xform->digest_length;
666 if (aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) {
667 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
668 session->auth_op = ICP_QAT_HW_AUTH_GENERATE;
669 ICP_QAT_FW_LA_RET_AUTH_SET(
670 session->fw_req.comn_hdr.serv_specif_flags,
671 ICP_QAT_FW_LA_RET_AUTH_RES);
673 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
674 session->auth_op = ICP_QAT_HW_AUTH_VERIFY;
675 ICP_QAT_FW_LA_CMP_AUTH_SET(
676 session->fw_req.comn_hdr.serv_specif_flags,
677 ICP_QAT_FW_LA_CMP_AUTH_RES);
679 ICP_QAT_FW_LA_SINGLE_PASS_PROTO_FLAG_SET(
680 session->fw_req.comn_hdr.serv_specif_flags,
681 ICP_QAT_FW_LA_SINGLE_PASS_PROTO);
682 ICP_QAT_FW_LA_PROTO_SET(
683 session->fw_req.comn_hdr.serv_specif_flags,
684 ICP_QAT_FW_LA_NO_PROTO);
685 session->fw_req.comn_hdr.service_cmd_id =
686 ICP_QAT_FW_LA_CMD_CIPHER;
687 session->cd.cipher.cipher_config.val =
688 ICP_QAT_HW_CIPHER_CONFIG_BUILD(
689 ICP_QAT_HW_CIPHER_AEAD_MODE,
690 session->qat_cipher_alg,
691 ICP_QAT_HW_CIPHER_NO_CONVERT,
693 QAT_FIELD_SET(session->cd.cipher.cipher_config.val,
694 aead_xform->digest_length,
695 QAT_CIPHER_AEAD_HASH_CMP_LEN_BITPOS,
696 QAT_CIPHER_AEAD_HASH_CMP_LEN_MASK);
697 session->cd.cipher.cipher_config.reserved =
698 ICP_QAT_HW_CIPHER_CONFIG_BUILD_UPPER(
699 aead_xform->aad_length);
700 cipher_param->spc_aad_sz = aead_xform->aad_length;
701 cipher_param->spc_auth_res_sz = aead_xform->digest_length;
707 qat_sym_session_configure_auth(struct rte_cryptodev *dev,
708 struct rte_crypto_sym_xform *xform,
709 struct qat_sym_session *session)
711 struct rte_crypto_auth_xform *auth_xform = qat_get_auth_xform(xform);
712 struct qat_sym_dev_private *internals = dev->data->dev_private;
713 const uint8_t *key_data = auth_xform->key.data;
714 uint8_t key_length = auth_xform->key.length;
716 session->aes_cmac = 0;
717 session->auth_key_length = auth_xform->key.length;
718 session->auth_iv.offset = auth_xform->iv.offset;
719 session->auth_iv.length = auth_xform->iv.length;
720 session->auth_mode = ICP_QAT_HW_AUTH_MODE1;
722 switch (auth_xform->algo) {
723 case RTE_CRYPTO_AUTH_SHA1:
724 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA1;
725 session->auth_mode = ICP_QAT_HW_AUTH_MODE0;
727 case RTE_CRYPTO_AUTH_SHA224:
728 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA224;
729 session->auth_mode = ICP_QAT_HW_AUTH_MODE0;
731 case RTE_CRYPTO_AUTH_SHA256:
732 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA256;
733 session->auth_mode = ICP_QAT_HW_AUTH_MODE0;
735 case RTE_CRYPTO_AUTH_SHA384:
736 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA384;
737 session->auth_mode = ICP_QAT_HW_AUTH_MODE0;
739 case RTE_CRYPTO_AUTH_SHA512:
740 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA512;
741 session->auth_mode = ICP_QAT_HW_AUTH_MODE0;
743 case RTE_CRYPTO_AUTH_SHA1_HMAC:
744 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA1;
746 case RTE_CRYPTO_AUTH_SHA224_HMAC:
747 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA224;
749 case RTE_CRYPTO_AUTH_SHA256_HMAC:
750 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA256;
752 case RTE_CRYPTO_AUTH_SHA384_HMAC:
753 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA384;
755 case RTE_CRYPTO_AUTH_SHA512_HMAC:
756 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA512;
758 case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
759 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
761 case RTE_CRYPTO_AUTH_AES_CMAC:
762 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
763 session->aes_cmac = 1;
765 case RTE_CRYPTO_AUTH_AES_GMAC:
766 if (qat_sym_validate_aes_key(auth_xform->key.length,
767 &session->qat_cipher_alg) != 0) {
768 QAT_LOG(ERR, "Invalid AES key size");
771 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
772 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
773 if (session->auth_iv.length == 0)
774 session->auth_iv.length = AES_GCM_J0_LEN;
776 case RTE_CRYPTO_AUTH_SNOW3G_UIA2:
777 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2;
779 case RTE_CRYPTO_AUTH_MD5_HMAC:
780 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_MD5;
782 case RTE_CRYPTO_AUTH_NULL:
783 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_NULL;
785 case RTE_CRYPTO_AUTH_KASUMI_F9:
786 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_KASUMI_F9;
788 case RTE_CRYPTO_AUTH_ZUC_EIA3:
789 if (!qat_is_auth_alg_supported(auth_xform->algo, internals)) {
790 QAT_LOG(ERR, "%s not supported on this device",
791 rte_crypto_auth_algorithm_strings
795 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3;
797 case RTE_CRYPTO_AUTH_MD5:
798 case RTE_CRYPTO_AUTH_AES_CBC_MAC:
799 QAT_LOG(ERR, "Crypto: Unsupported hash alg %u",
803 QAT_LOG(ERR, "Crypto: Undefined Hash algo %u specified",
808 if (auth_xform->algo == RTE_CRYPTO_AUTH_AES_GMAC) {
809 if (auth_xform->op == RTE_CRYPTO_AUTH_OP_GENERATE) {
810 session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER_HASH;
811 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
813 * It needs to create cipher desc content first,
814 * then authentication
817 if (qat_sym_session_aead_create_cd_cipher(session,
818 auth_xform->key.data,
819 auth_xform->key.length))
822 if (qat_sym_session_aead_create_cd_auth(session,
826 auth_xform->digest_length,
830 session->qat_cmd = ICP_QAT_FW_LA_CMD_HASH_CIPHER;
831 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
833 * It needs to create authentication desc content first,
837 if (qat_sym_session_aead_create_cd_auth(session,
841 auth_xform->digest_length,
845 if (qat_sym_session_aead_create_cd_cipher(session,
846 auth_xform->key.data,
847 auth_xform->key.length))
850 /* Restore to authentication only only */
851 session->qat_cmd = ICP_QAT_FW_LA_CMD_AUTH;
853 if (qat_sym_session_aead_create_cd_auth(session,
857 auth_xform->digest_length,
862 session->digest_length = auth_xform->digest_length;
867 qat_sym_session_configure_aead(struct rte_cryptodev *dev,
868 struct rte_crypto_sym_xform *xform,
869 struct qat_sym_session *session)
871 struct rte_crypto_aead_xform *aead_xform = &xform->aead;
872 enum rte_crypto_auth_operation crypto_operation;
873 struct qat_sym_dev_private *internals =
874 dev->data->dev_private;
875 enum qat_device_gen qat_dev_gen =
876 internals->qat_dev->qat_dev_gen;
879 * Store AEAD IV parameters as cipher IV,
880 * to avoid unnecessary memory usage
882 session->cipher_iv.offset = xform->aead.iv.offset;
883 session->cipher_iv.length = xform->aead.iv.length;
885 session->auth_mode = ICP_QAT_HW_AUTH_MODE1;
887 session->is_single_pass = 0;
888 switch (aead_xform->algo) {
889 case RTE_CRYPTO_AEAD_AES_GCM:
890 if (qat_sym_validate_aes_key(aead_xform->key.length,
891 &session->qat_cipher_alg) != 0) {
892 QAT_LOG(ERR, "Invalid AES key size");
895 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
896 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
897 if (qat_dev_gen > QAT_GEN2 && aead_xform->iv.length ==
898 QAT_AES_GCM_SPC_IV_SIZE) {
899 return qat_sym_session_handle_single_pass(session,
902 if (session->cipher_iv.length == 0)
903 session->cipher_iv.length = AES_GCM_J0_LEN;
906 case RTE_CRYPTO_AEAD_AES_CCM:
907 if (qat_sym_validate_aes_key(aead_xform->key.length,
908 &session->qat_cipher_alg) != 0) {
909 QAT_LOG(ERR, "Invalid AES key size");
912 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
913 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC;
915 case RTE_CRYPTO_AEAD_CHACHA20_POLY1305:
916 if (aead_xform->key.length != ICP_QAT_HW_CHACHAPOLY_KEY_SZ)
918 session->qat_cipher_alg =
919 ICP_QAT_HW_CIPHER_ALGO_CHACHA20_POLY1305;
920 return qat_sym_session_handle_single_pass(session,
923 QAT_LOG(ERR, "Crypto: Undefined AEAD specified %u\n",
928 if ((aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT &&
929 aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) ||
930 (aead_xform->op == RTE_CRYPTO_AEAD_OP_DECRYPT &&
931 aead_xform->algo == RTE_CRYPTO_AEAD_AES_CCM)) {
932 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
934 * It needs to create cipher desc content first,
935 * then authentication
937 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
938 RTE_CRYPTO_AUTH_OP_GENERATE : RTE_CRYPTO_AUTH_OP_VERIFY;
940 if (qat_sym_session_aead_create_cd_cipher(session,
941 aead_xform->key.data,
942 aead_xform->key.length))
945 if (qat_sym_session_aead_create_cd_auth(session,
946 aead_xform->key.data,
947 aead_xform->key.length,
948 aead_xform->aad_length,
949 aead_xform->digest_length,
953 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
955 * It needs to create authentication desc content first,
959 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
960 RTE_CRYPTO_AUTH_OP_VERIFY : RTE_CRYPTO_AUTH_OP_GENERATE;
962 if (qat_sym_session_aead_create_cd_auth(session,
963 aead_xform->key.data,
964 aead_xform->key.length,
965 aead_xform->aad_length,
966 aead_xform->digest_length,
970 if (qat_sym_session_aead_create_cd_cipher(session,
971 aead_xform->key.data,
972 aead_xform->key.length))
976 session->digest_length = aead_xform->digest_length;
980 unsigned int qat_sym_session_get_private_size(
981 struct rte_cryptodev *dev __rte_unused)
983 return RTE_ALIGN_CEIL(sizeof(struct qat_sym_session), 8);
986 /* returns block size in bytes per cipher algo */
987 int qat_cipher_get_block_size(enum icp_qat_hw_cipher_algo qat_cipher_alg)
989 switch (qat_cipher_alg) {
990 case ICP_QAT_HW_CIPHER_ALGO_DES:
991 return ICP_QAT_HW_DES_BLK_SZ;
992 case ICP_QAT_HW_CIPHER_ALGO_3DES:
993 return ICP_QAT_HW_3DES_BLK_SZ;
994 case ICP_QAT_HW_CIPHER_ALGO_AES128:
995 case ICP_QAT_HW_CIPHER_ALGO_AES192:
996 case ICP_QAT_HW_CIPHER_ALGO_AES256:
997 return ICP_QAT_HW_AES_BLK_SZ;
999 QAT_LOG(ERR, "invalid block cipher alg %u", qat_cipher_alg);
1006 * Returns size in bytes per hash algo for state1 size field in cd_ctrl
1007 * This is digest size rounded up to nearest quadword
1009 static int qat_hash_get_state1_size(enum icp_qat_hw_auth_algo qat_hash_alg)
1011 switch (qat_hash_alg) {
1012 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1013 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA1_STATE1_SZ,
1014 QAT_HW_DEFAULT_ALIGNMENT);
1015 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1016 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA224_STATE1_SZ,
1017 QAT_HW_DEFAULT_ALIGNMENT);
1018 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1019 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA256_STATE1_SZ,
1020 QAT_HW_DEFAULT_ALIGNMENT);
1021 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1022 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA384_STATE1_SZ,
1023 QAT_HW_DEFAULT_ALIGNMENT);
1024 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1025 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
1026 QAT_HW_DEFAULT_ALIGNMENT);
1027 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1028 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ,
1029 QAT_HW_DEFAULT_ALIGNMENT);
1030 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1031 case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
1032 return QAT_HW_ROUND_UP(ICP_QAT_HW_GALOIS_128_STATE1_SZ,
1033 QAT_HW_DEFAULT_ALIGNMENT);
1034 case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
1035 return QAT_HW_ROUND_UP(ICP_QAT_HW_ZUC_3G_EIA3_STATE1_SZ,
1036 QAT_HW_DEFAULT_ALIGNMENT);
1037 case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
1038 return QAT_HW_ROUND_UP(ICP_QAT_HW_SNOW_3G_UIA2_STATE1_SZ,
1039 QAT_HW_DEFAULT_ALIGNMENT);
1040 case ICP_QAT_HW_AUTH_ALGO_MD5:
1041 return QAT_HW_ROUND_UP(ICP_QAT_HW_MD5_STATE1_SZ,
1042 QAT_HW_DEFAULT_ALIGNMENT);
1043 case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
1044 return QAT_HW_ROUND_UP(ICP_QAT_HW_KASUMI_F9_STATE1_SZ,
1045 QAT_HW_DEFAULT_ALIGNMENT);
1046 case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
1047 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_CBC_MAC_STATE1_SZ,
1048 QAT_HW_DEFAULT_ALIGNMENT);
1049 case ICP_QAT_HW_AUTH_ALGO_NULL:
1050 return QAT_HW_ROUND_UP(ICP_QAT_HW_NULL_STATE1_SZ,
1051 QAT_HW_DEFAULT_ALIGNMENT);
1052 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
1053 /* return maximum state1 size in this case */
1054 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
1055 QAT_HW_DEFAULT_ALIGNMENT);
1057 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
1063 /* returns digest size in bytes per hash algo */
1064 static int qat_hash_get_digest_size(enum icp_qat_hw_auth_algo qat_hash_alg)
1066 switch (qat_hash_alg) {
1067 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1068 return ICP_QAT_HW_SHA1_STATE1_SZ;
1069 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1070 return ICP_QAT_HW_SHA224_STATE1_SZ;
1071 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1072 return ICP_QAT_HW_SHA256_STATE1_SZ;
1073 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1074 return ICP_QAT_HW_SHA384_STATE1_SZ;
1075 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1076 return ICP_QAT_HW_SHA512_STATE1_SZ;
1077 case ICP_QAT_HW_AUTH_ALGO_MD5:
1078 return ICP_QAT_HW_MD5_STATE1_SZ;
1079 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1080 return ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1081 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
1082 /* return maximum digest size in this case */
1083 return ICP_QAT_HW_SHA512_STATE1_SZ;
1085 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
1091 /* returns block size in byes per hash algo */
1092 static int qat_hash_get_block_size(enum icp_qat_hw_auth_algo qat_hash_alg)
1094 switch (qat_hash_alg) {
1095 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1097 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1098 return SHA256_CBLOCK;
1099 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1100 return SHA256_CBLOCK;
1101 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1102 return SHA512_CBLOCK;
1103 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1104 return SHA512_CBLOCK;
1105 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1107 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1108 return ICP_QAT_HW_AES_BLK_SZ;
1109 case ICP_QAT_HW_AUTH_ALGO_MD5:
1111 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
1112 /* return maximum block size in this case */
1113 return SHA512_CBLOCK;
1115 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
1121 static int partial_hash_sha1(uint8_t *data_in, uint8_t *data_out)
1125 if (!SHA1_Init(&ctx))
1127 SHA1_Transform(&ctx, data_in);
1128 rte_memcpy(data_out, &ctx, SHA_DIGEST_LENGTH);
1132 static int partial_hash_sha224(uint8_t *data_in, uint8_t *data_out)
1136 if (!SHA224_Init(&ctx))
1138 SHA256_Transform(&ctx, data_in);
1139 rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
1143 static int partial_hash_sha256(uint8_t *data_in, uint8_t *data_out)
1147 if (!SHA256_Init(&ctx))
1149 SHA256_Transform(&ctx, data_in);
1150 rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
1154 static int partial_hash_sha384(uint8_t *data_in, uint8_t *data_out)
1158 if (!SHA384_Init(&ctx))
1160 SHA512_Transform(&ctx, data_in);
1161 rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
1165 static int partial_hash_sha512(uint8_t *data_in, uint8_t *data_out)
1169 if (!SHA512_Init(&ctx))
1171 SHA512_Transform(&ctx, data_in);
1172 rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
1176 static int partial_hash_md5(uint8_t *data_in, uint8_t *data_out)
1180 if (!MD5_Init(&ctx))
1182 MD5_Transform(&ctx, data_in);
1183 rte_memcpy(data_out, &ctx, MD5_DIGEST_LENGTH);
1188 static int partial_hash_compute(enum icp_qat_hw_auth_algo hash_alg,
1193 uint8_t digest[qat_hash_get_digest_size(
1194 ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1195 uint32_t *hash_state_out_be32;
1196 uint64_t *hash_state_out_be64;
1199 /* Initialize to avoid gcc warning */
1200 memset(digest, 0, sizeof(digest));
1202 digest_size = qat_hash_get_digest_size(hash_alg);
1203 if (digest_size <= 0)
1206 hash_state_out_be32 = (uint32_t *)data_out;
1207 hash_state_out_be64 = (uint64_t *)data_out;
1210 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1211 if (partial_hash_sha1(data_in, digest))
1213 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1214 *hash_state_out_be32 =
1215 rte_bswap32(*(((uint32_t *)digest)+i));
1217 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1218 if (partial_hash_sha224(data_in, digest))
1220 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1221 *hash_state_out_be32 =
1222 rte_bswap32(*(((uint32_t *)digest)+i));
1224 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1225 if (partial_hash_sha256(data_in, digest))
1227 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1228 *hash_state_out_be32 =
1229 rte_bswap32(*(((uint32_t *)digest)+i));
1231 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1232 if (partial_hash_sha384(data_in, digest))
1234 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
1235 *hash_state_out_be64 =
1236 rte_bswap64(*(((uint64_t *)digest)+i));
1238 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1239 if (partial_hash_sha512(data_in, digest))
1241 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
1242 *hash_state_out_be64 =
1243 rte_bswap64(*(((uint64_t *)digest)+i));
1245 case ICP_QAT_HW_AUTH_ALGO_MD5:
1246 if (partial_hash_md5(data_in, data_out))
1250 QAT_LOG(ERR, "invalid hash alg %u", hash_alg);
1256 #define HMAC_IPAD_VALUE 0x36
1257 #define HMAC_OPAD_VALUE 0x5c
1258 #define HASH_XCBC_PRECOMP_KEY_NUM 3
1260 static const uint8_t AES_CMAC_SEED[ICP_QAT_HW_AES_128_KEY_SZ];
1262 static void aes_cmac_key_derive(uint8_t *base, uint8_t *derived)
1266 derived[0] = base[0] << 1;
1267 for (i = 1; i < ICP_QAT_HW_AES_BLK_SZ ; i++) {
1268 derived[i] = base[i] << 1;
1269 derived[i - 1] |= base[i] >> 7;
1273 derived[ICP_QAT_HW_AES_BLK_SZ - 1] ^= QAT_AES_CMAC_CONST_RB;
1276 static int qat_sym_do_precomputes(enum icp_qat_hw_auth_algo hash_alg,
1277 const uint8_t *auth_key,
1278 uint16_t auth_keylen,
1279 uint8_t *p_state_buf,
1280 uint16_t *p_state_len,
1284 uint8_t ipad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1285 uint8_t opad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1288 if (hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC) {
1294 uint8_t k0[ICP_QAT_HW_AES_128_KEY_SZ];
1297 auth_keylen = ICP_QAT_HW_AES_128_KEY_SZ;
1299 in = rte_zmalloc("AES CMAC K1",
1300 ICP_QAT_HW_AES_128_KEY_SZ, 16);
1303 QAT_LOG(ERR, "Failed to alloc memory");
1307 rte_memcpy(in, AES_CMAC_SEED,
1308 ICP_QAT_HW_AES_128_KEY_SZ);
1309 rte_memcpy(p_state_buf, auth_key, auth_keylen);
1311 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1317 AES_encrypt(in, k0, &enc_key);
1319 k1 = p_state_buf + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1320 k2 = k1 + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1322 aes_cmac_key_derive(k0, k1);
1323 aes_cmac_key_derive(k1, k2);
1325 memset(k0, 0, ICP_QAT_HW_AES_128_KEY_SZ);
1326 *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1330 static uint8_t qat_aes_xcbc_key_seed[
1331 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ] = {
1332 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1333 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1334 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1335 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1336 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1337 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1341 uint8_t *out = p_state_buf;
1345 in = rte_zmalloc("working mem for key",
1346 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ, 16);
1348 QAT_LOG(ERR, "Failed to alloc memory");
1352 rte_memcpy(in, qat_aes_xcbc_key_seed,
1353 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1354 for (x = 0; x < HASH_XCBC_PRECOMP_KEY_NUM; x++) {
1355 if (AES_set_encrypt_key(auth_key,
1359 (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ));
1361 (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ),
1362 0, ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1365 AES_encrypt(in, out, &enc_key);
1366 in += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1367 out += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1369 *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1370 rte_free(in - x*ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ);
1374 } else if ((hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_128) ||
1375 (hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_64)) {
1377 uint8_t *out = p_state_buf;
1380 memset(p_state_buf, 0, ICP_QAT_HW_GALOIS_H_SZ +
1381 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1382 ICP_QAT_HW_GALOIS_E_CTR0_SZ);
1383 in = rte_zmalloc("working mem for key",
1384 ICP_QAT_HW_GALOIS_H_SZ, 16);
1386 QAT_LOG(ERR, "Failed to alloc memory");
1390 memset(in, 0, ICP_QAT_HW_GALOIS_H_SZ);
1391 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1395 AES_encrypt(in, out, &enc_key);
1396 *p_state_len = ICP_QAT_HW_GALOIS_H_SZ +
1397 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1398 ICP_QAT_HW_GALOIS_E_CTR0_SZ;
1403 block_size = qat_hash_get_block_size(hash_alg);
1406 /* init ipad and opad from key and xor with fixed values */
1407 memset(ipad, 0, block_size);
1408 memset(opad, 0, block_size);
1410 if (auth_keylen > (unsigned int)block_size) {
1411 QAT_LOG(ERR, "invalid keylen %u", auth_keylen);
1414 rte_memcpy(ipad, auth_key, auth_keylen);
1415 rte_memcpy(opad, auth_key, auth_keylen);
1417 for (i = 0; i < block_size; i++) {
1418 uint8_t *ipad_ptr = ipad + i;
1419 uint8_t *opad_ptr = opad + i;
1420 *ipad_ptr ^= HMAC_IPAD_VALUE;
1421 *opad_ptr ^= HMAC_OPAD_VALUE;
1424 /* do partial hash of ipad and copy to state1 */
1425 if (partial_hash_compute(hash_alg, ipad, p_state_buf)) {
1426 memset(ipad, 0, block_size);
1427 memset(opad, 0, block_size);
1428 QAT_LOG(ERR, "ipad precompute failed");
1433 * State len is a multiple of 8, so may be larger than the digest.
1434 * Put the partial hash of opad state_len bytes after state1
1436 *p_state_len = qat_hash_get_state1_size(hash_alg);
1437 if (partial_hash_compute(hash_alg, opad, p_state_buf + *p_state_len)) {
1438 memset(ipad, 0, block_size);
1439 memset(opad, 0, block_size);
1440 QAT_LOG(ERR, "opad precompute failed");
1444 /* don't leave data lying around */
1445 memset(ipad, 0, block_size);
1446 memset(opad, 0, block_size);
1451 qat_sym_session_init_common_hdr(struct icp_qat_fw_comn_req_hdr *header,
1452 enum qat_sym_proto_flag proto_flags)
1455 ICP_QAT_FW_COMN_HDR_FLAGS_BUILD(ICP_QAT_FW_COMN_REQ_FLAG_SET);
1456 header->service_type = ICP_QAT_FW_COMN_REQ_CPM_FW_LA;
1457 header->comn_req_flags =
1458 ICP_QAT_FW_COMN_FLAGS_BUILD(QAT_COMN_CD_FLD_TYPE_64BIT_ADR,
1459 QAT_COMN_PTR_TYPE_FLAT);
1460 ICP_QAT_FW_LA_PARTIAL_SET(header->serv_specif_flags,
1461 ICP_QAT_FW_LA_PARTIAL_NONE);
1462 ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(header->serv_specif_flags,
1463 ICP_QAT_FW_CIPH_IV_16BYTE_DATA);
1465 switch (proto_flags) {
1466 case QAT_CRYPTO_PROTO_FLAG_NONE:
1467 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1468 ICP_QAT_FW_LA_NO_PROTO);
1470 case QAT_CRYPTO_PROTO_FLAG_CCM:
1471 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1472 ICP_QAT_FW_LA_CCM_PROTO);
1474 case QAT_CRYPTO_PROTO_FLAG_GCM:
1475 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1476 ICP_QAT_FW_LA_GCM_PROTO);
1478 case QAT_CRYPTO_PROTO_FLAG_SNOW3G:
1479 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1480 ICP_QAT_FW_LA_SNOW_3G_PROTO);
1482 case QAT_CRYPTO_PROTO_FLAG_ZUC:
1483 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(header->serv_specif_flags,
1484 ICP_QAT_FW_LA_ZUC_3G_PROTO);
1488 ICP_QAT_FW_LA_UPDATE_STATE_SET(header->serv_specif_flags,
1489 ICP_QAT_FW_LA_NO_UPDATE_STATE);
1490 ICP_QAT_FW_LA_DIGEST_IN_BUFFER_SET(header->serv_specif_flags,
1491 ICP_QAT_FW_LA_NO_DIGEST_IN_BUFFER);
1495 * Snow3G and ZUC should never use this function
1496 * and set its protocol flag in both cipher and auth part of content
1497 * descriptor building function
1499 static enum qat_sym_proto_flag
1500 qat_get_crypto_proto_flag(uint16_t flags)
1502 int proto = ICP_QAT_FW_LA_PROTO_GET(flags);
1503 enum qat_sym_proto_flag qat_proto_flag =
1504 QAT_CRYPTO_PROTO_FLAG_NONE;
1507 case ICP_QAT_FW_LA_GCM_PROTO:
1508 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1510 case ICP_QAT_FW_LA_CCM_PROTO:
1511 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1515 return qat_proto_flag;
1518 int qat_sym_session_aead_create_cd_cipher(struct qat_sym_session *cdesc,
1519 const uint8_t *cipherkey,
1520 uint32_t cipherkeylen)
1522 struct icp_qat_hw_cipher_algo_blk *cipher;
1523 struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1524 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1525 struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1526 void *ptr = &req_tmpl->cd_ctrl;
1527 struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1528 struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1529 enum icp_qat_hw_cipher_convert key_convert;
1530 enum qat_sym_proto_flag qat_proto_flag =
1531 QAT_CRYPTO_PROTO_FLAG_NONE;
1532 uint32_t total_key_size;
1533 uint16_t cipher_offset, cd_size;
1534 uint32_t wordIndex = 0;
1535 uint32_t *temp_key = NULL;
1537 if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER) {
1538 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1539 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1540 ICP_QAT_FW_SLICE_CIPHER);
1541 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1542 ICP_QAT_FW_SLICE_DRAM_WR);
1543 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1544 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1545 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1546 ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1547 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1548 } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1549 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1550 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1551 ICP_QAT_FW_SLICE_CIPHER);
1552 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1553 ICP_QAT_FW_SLICE_AUTH);
1554 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1555 ICP_QAT_FW_SLICE_AUTH);
1556 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1557 ICP_QAT_FW_SLICE_DRAM_WR);
1558 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1559 } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1560 QAT_LOG(ERR, "Invalid param, must be a cipher command.");
1564 if (cdesc->qat_mode == ICP_QAT_HW_CIPHER_CTR_MODE) {
1566 * CTR Streaming ciphers are a special case. Decrypt = encrypt
1567 * Overriding default values previously set
1569 cdesc->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
1570 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1571 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2
1572 || cdesc->qat_cipher_alg ==
1573 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)
1574 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1575 else if (cdesc->qat_dir == ICP_QAT_HW_CIPHER_ENCRYPT)
1576 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1578 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1580 if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
1581 total_key_size = ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ +
1582 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1583 cipher_cd_ctrl->cipher_state_sz =
1584 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1585 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1587 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1588 total_key_size = ICP_QAT_HW_KASUMI_F8_KEY_SZ;
1589 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_KASUMI_BLK_SZ >> 3;
1590 cipher_cd_ctrl->cipher_padding_sz =
1591 (2 * ICP_QAT_HW_KASUMI_BLK_SZ) >> 3;
1592 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES) {
1593 total_key_size = ICP_QAT_HW_3DES_KEY_SZ;
1594 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_3DES_BLK_SZ >> 3;
1596 qat_get_crypto_proto_flag(header->serv_specif_flags);
1597 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_DES) {
1598 total_key_size = ICP_QAT_HW_DES_KEY_SZ;
1599 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_DES_BLK_SZ >> 3;
1601 qat_get_crypto_proto_flag(header->serv_specif_flags);
1602 } else if (cdesc->qat_cipher_alg ==
1603 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
1604 total_key_size = ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ +
1605 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1606 cipher_cd_ctrl->cipher_state_sz =
1607 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1608 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1609 cdesc->min_qat_dev_gen = QAT_GEN2;
1611 total_key_size = cipherkeylen;
1612 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_AES_BLK_SZ >> 3;
1614 qat_get_crypto_proto_flag(header->serv_specif_flags);
1616 cipher_cd_ctrl->cipher_key_sz = total_key_size >> 3;
1617 cipher_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1618 cipher_cd_ctrl->cipher_cfg_offset = cipher_offset >> 3;
1620 header->service_cmd_id = cdesc->qat_cmd;
1621 qat_sym_session_init_common_hdr(header, qat_proto_flag);
1623 cipher = (struct icp_qat_hw_cipher_algo_blk *)cdesc->cd_cur_ptr;
1624 cipher->cipher_config.val =
1625 ICP_QAT_HW_CIPHER_CONFIG_BUILD(cdesc->qat_mode,
1626 cdesc->qat_cipher_alg, key_convert,
1629 if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1630 temp_key = (uint32_t *)(cdesc->cd_cur_ptr +
1631 sizeof(struct icp_qat_hw_cipher_config)
1633 memcpy(cipher->key, cipherkey, cipherkeylen);
1634 memcpy(temp_key, cipherkey, cipherkeylen);
1636 /* XOR Key with KASUMI F8 key modifier at 4 bytes level */
1637 for (wordIndex = 0; wordIndex < (cipherkeylen >> 2);
1639 temp_key[wordIndex] ^= KASUMI_F8_KEY_MODIFIER_4_BYTES;
1641 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1642 cipherkeylen + cipherkeylen;
1644 memcpy(cipher->key, cipherkey, cipherkeylen);
1645 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1649 if (total_key_size > cipherkeylen) {
1650 uint32_t padding_size = total_key_size-cipherkeylen;
1651 if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1652 && (cipherkeylen == QAT_3DES_KEY_SZ_OPT2)) {
1653 /* K3 not provided so use K1 = K3*/
1654 memcpy(cdesc->cd_cur_ptr, cipherkey, padding_size);
1655 } else if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1656 && (cipherkeylen == QAT_3DES_KEY_SZ_OPT3)) {
1657 /* K2 and K3 not provided so use K1 = K2 = K3*/
1658 memcpy(cdesc->cd_cur_ptr, cipherkey,
1660 memcpy(cdesc->cd_cur_ptr+cipherkeylen,
1661 cipherkey, cipherkeylen);
1663 memset(cdesc->cd_cur_ptr, 0, padding_size);
1665 cdesc->cd_cur_ptr += padding_size;
1667 cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1668 cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1673 int qat_sym_session_aead_create_cd_auth(struct qat_sym_session *cdesc,
1674 const uint8_t *authkey,
1675 uint32_t authkeylen,
1676 uint32_t aad_length,
1677 uint32_t digestsize,
1678 unsigned int operation)
1680 struct icp_qat_hw_auth_setup *hash;
1681 struct icp_qat_hw_cipher_algo_blk *cipherconfig;
1682 struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1683 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1684 struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1685 void *ptr = &req_tmpl->cd_ctrl;
1686 struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1687 struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1688 struct icp_qat_fw_la_auth_req_params *auth_param =
1689 (struct icp_qat_fw_la_auth_req_params *)
1690 ((char *)&req_tmpl->serv_specif_rqpars +
1691 ICP_QAT_FW_HASH_REQUEST_PARAMETERS_OFFSET);
1692 uint16_t state1_size = 0, state2_size = 0, cd_extra_size = 0;
1693 uint16_t hash_offset, cd_size;
1694 uint32_t *aad_len = NULL;
1695 uint32_t wordIndex = 0;
1697 enum qat_sym_proto_flag qat_proto_flag =
1698 QAT_CRYPTO_PROTO_FLAG_NONE;
1700 if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_AUTH) {
1701 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1702 ICP_QAT_FW_SLICE_AUTH);
1703 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1704 ICP_QAT_FW_SLICE_DRAM_WR);
1705 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1706 } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1707 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1708 ICP_QAT_FW_SLICE_AUTH);
1709 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1710 ICP_QAT_FW_SLICE_CIPHER);
1711 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1712 ICP_QAT_FW_SLICE_CIPHER);
1713 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1714 ICP_QAT_FW_SLICE_DRAM_WR);
1715 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1716 } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1717 QAT_LOG(ERR, "Invalid param, must be a hash command.");
1721 if (operation == RTE_CRYPTO_AUTH_OP_VERIFY) {
1722 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1723 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1724 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1725 ICP_QAT_FW_LA_CMP_AUTH_RES);
1726 cdesc->auth_op = ICP_QAT_HW_AUTH_VERIFY;
1728 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1729 ICP_QAT_FW_LA_RET_AUTH_RES);
1730 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1731 ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1732 cdesc->auth_op = ICP_QAT_HW_AUTH_GENERATE;
1736 * Setup the inner hash config
1738 hash_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1739 hash = (struct icp_qat_hw_auth_setup *)cdesc->cd_cur_ptr;
1740 hash->auth_config.reserved = 0;
1741 hash->auth_config.config =
1742 ICP_QAT_HW_AUTH_CONFIG_BUILD(cdesc->auth_mode,
1743 cdesc->qat_hash_alg, digestsize);
1745 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0
1746 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2
1747 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_KASUMI_F9
1748 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3
1749 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC
1750 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC
1751 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_NULL
1753 hash->auth_counter.counter = 0;
1755 int block_size = qat_hash_get_block_size(cdesc->qat_hash_alg);
1759 hash->auth_counter.counter = rte_bswap32(block_size);
1762 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_auth_setup);
1765 * cd_cur_ptr now points at the state1 information.
1767 switch (cdesc->qat_hash_alg) {
1768 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1769 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0) {
1771 rte_memcpy(cdesc->cd_cur_ptr, sha1InitialState,
1772 sizeof(sha1InitialState));
1773 state1_size = qat_hash_get_state1_size(
1774 cdesc->qat_hash_alg);
1778 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA1, authkey,
1779 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1781 QAT_LOG(ERR, "(SHA)precompute failed");
1784 state2_size = RTE_ALIGN_CEIL(ICP_QAT_HW_SHA1_STATE2_SZ, 8);
1786 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1787 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0) {
1789 rte_memcpy(cdesc->cd_cur_ptr, sha224InitialState,
1790 sizeof(sha224InitialState));
1791 state1_size = qat_hash_get_state1_size(
1792 cdesc->qat_hash_alg);
1796 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA224, authkey,
1797 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1799 QAT_LOG(ERR, "(SHA)precompute failed");
1802 state2_size = ICP_QAT_HW_SHA224_STATE2_SZ;
1804 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1805 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0) {
1807 rte_memcpy(cdesc->cd_cur_ptr, sha256InitialState,
1808 sizeof(sha256InitialState));
1809 state1_size = qat_hash_get_state1_size(
1810 cdesc->qat_hash_alg);
1814 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA256, authkey,
1815 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1817 QAT_LOG(ERR, "(SHA)precompute failed");
1820 state2_size = ICP_QAT_HW_SHA256_STATE2_SZ;
1822 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1823 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0) {
1825 rte_memcpy(cdesc->cd_cur_ptr, sha384InitialState,
1826 sizeof(sha384InitialState));
1827 state1_size = qat_hash_get_state1_size(
1828 cdesc->qat_hash_alg);
1832 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA384, authkey,
1833 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1835 QAT_LOG(ERR, "(SHA)precompute failed");
1838 state2_size = ICP_QAT_HW_SHA384_STATE2_SZ;
1840 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1841 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0) {
1843 rte_memcpy(cdesc->cd_cur_ptr, sha512InitialState,
1844 sizeof(sha512InitialState));
1845 state1_size = qat_hash_get_state1_size(
1846 cdesc->qat_hash_alg);
1850 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA512, authkey,
1851 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1853 QAT_LOG(ERR, "(SHA)precompute failed");
1856 state2_size = ICP_QAT_HW_SHA512_STATE2_SZ;
1858 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1859 state1_size = ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1861 if (cdesc->aes_cmac)
1862 memset(cdesc->cd_cur_ptr, 0, state1_size);
1863 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC,
1864 authkey, authkeylen, cdesc->cd_cur_ptr + state1_size,
1865 &state2_size, cdesc->aes_cmac)) {
1866 cdesc->aes_cmac ? QAT_LOG(ERR,
1867 "(CMAC)precompute failed")
1869 "(XCBC)precompute failed");
1873 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1874 case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
1875 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1876 state1_size = ICP_QAT_HW_GALOIS_128_STATE1_SZ;
1877 if (qat_sym_do_precomputes(cdesc->qat_hash_alg, authkey,
1878 authkeylen, cdesc->cd_cur_ptr + state1_size,
1879 &state2_size, cdesc->aes_cmac)) {
1880 QAT_LOG(ERR, "(GCM)precompute failed");
1884 * Write (the length of AAD) into bytes 16-19 of state2
1885 * in big-endian format. This field is 8 bytes
1887 auth_param->u2.aad_sz =
1888 RTE_ALIGN_CEIL(aad_length, 16);
1889 auth_param->hash_state_sz = (auth_param->u2.aad_sz) >> 3;
1891 aad_len = (uint32_t *)(cdesc->cd_cur_ptr +
1892 ICP_QAT_HW_GALOIS_128_STATE1_SZ +
1893 ICP_QAT_HW_GALOIS_H_SZ);
1894 *aad_len = rte_bswap32(aad_length);
1895 cdesc->aad_len = aad_length;
1897 case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
1898 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1899 state1_size = qat_hash_get_state1_size(
1900 ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2);
1901 state2_size = ICP_QAT_HW_SNOW_3G_UIA2_STATE2_SZ;
1902 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1904 cipherconfig = (struct icp_qat_hw_cipher_algo_blk *)
1905 (cdesc->cd_cur_ptr + state1_size + state2_size);
1906 cipherconfig->cipher_config.val =
1907 ICP_QAT_HW_CIPHER_CONFIG_BUILD(ICP_QAT_HW_CIPHER_ECB_MODE,
1908 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2,
1909 ICP_QAT_HW_CIPHER_KEY_CONVERT,
1910 ICP_QAT_HW_CIPHER_ENCRYPT);
1911 memcpy(cipherconfig->key, authkey, authkeylen);
1912 memset(cipherconfig->key + authkeylen,
1913 0, ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ);
1914 cd_extra_size += sizeof(struct icp_qat_hw_cipher_config) +
1915 authkeylen + ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1916 auth_param->hash_state_sz = ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1918 case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
1919 hash->auth_config.config =
1920 ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE0,
1921 cdesc->qat_hash_alg, digestsize);
1922 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1923 state1_size = qat_hash_get_state1_size(
1924 ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3);
1925 state2_size = ICP_QAT_HW_ZUC_3G_EIA3_STATE2_SZ;
1926 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size
1927 + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ);
1929 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1930 cd_extra_size += ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1931 auth_param->hash_state_sz = ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1932 cdesc->min_qat_dev_gen = QAT_GEN2;
1935 case ICP_QAT_HW_AUTH_ALGO_MD5:
1936 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_MD5, authkey,
1937 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1939 QAT_LOG(ERR, "(MD5)precompute failed");
1942 state2_size = ICP_QAT_HW_MD5_STATE2_SZ;
1944 case ICP_QAT_HW_AUTH_ALGO_NULL:
1945 state1_size = qat_hash_get_state1_size(
1946 ICP_QAT_HW_AUTH_ALGO_NULL);
1947 state2_size = ICP_QAT_HW_NULL_STATE2_SZ;
1949 case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
1950 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1951 state1_size = qat_hash_get_state1_size(
1952 ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC);
1953 state2_size = ICP_QAT_HW_AES_CBC_MAC_KEY_SZ +
1954 ICP_QAT_HW_AES_CCM_CBC_E_CTR0_SZ;
1956 if (aad_length > 0) {
1957 aad_length += ICP_QAT_HW_CCM_AAD_B0_LEN +
1958 ICP_QAT_HW_CCM_AAD_LEN_INFO;
1959 auth_param->u2.aad_sz =
1960 RTE_ALIGN_CEIL(aad_length,
1961 ICP_QAT_HW_CCM_AAD_ALIGNMENT);
1963 auth_param->u2.aad_sz = ICP_QAT_HW_CCM_AAD_B0_LEN;
1965 cdesc->aad_len = aad_length;
1966 hash->auth_counter.counter = 0;
1968 hash_cd_ctrl->outer_prefix_sz = digestsize;
1969 auth_param->hash_state_sz = digestsize;
1971 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1973 case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
1974 state1_size = qat_hash_get_state1_size(
1975 ICP_QAT_HW_AUTH_ALGO_KASUMI_F9);
1976 state2_size = ICP_QAT_HW_KASUMI_F9_STATE2_SZ;
1977 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1978 pTempKey = (uint32_t *)(cdesc->cd_cur_ptr + state1_size
1981 * The Inner Hash Initial State2 block must contain IK
1982 * (Initialisation Key), followed by IK XOR-ed with KM
1983 * (Key Modifier): IK||(IK^KM).
1985 /* write the auth key */
1986 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1987 /* initialise temp key with auth key */
1988 memcpy(pTempKey, authkey, authkeylen);
1989 /* XOR Key with KASUMI F9 key modifier at 4 bytes level */
1990 for (wordIndex = 0; wordIndex < (authkeylen >> 2); wordIndex++)
1991 pTempKey[wordIndex] ^= KASUMI_F9_KEY_MODIFIER_4_BYTES;
1994 QAT_LOG(ERR, "Invalid HASH alg %u", cdesc->qat_hash_alg);
1998 /* Request template setup */
1999 qat_sym_session_init_common_hdr(header, qat_proto_flag);
2000 header->service_cmd_id = cdesc->qat_cmd;
2002 /* Auth CD config setup */
2003 hash_cd_ctrl->hash_cfg_offset = hash_offset >> 3;
2004 hash_cd_ctrl->hash_flags = ICP_QAT_FW_AUTH_HDR_FLAG_NO_NESTED;
2005 hash_cd_ctrl->inner_res_sz = digestsize;
2006 hash_cd_ctrl->final_sz = digestsize;
2007 hash_cd_ctrl->inner_state1_sz = state1_size;
2008 auth_param->auth_res_sz = digestsize;
2010 hash_cd_ctrl->inner_state2_sz = state2_size;
2011 hash_cd_ctrl->inner_state2_offset = hash_cd_ctrl->hash_cfg_offset +
2012 ((sizeof(struct icp_qat_hw_auth_setup) +
2013 RTE_ALIGN_CEIL(hash_cd_ctrl->inner_state1_sz, 8))
2016 cdesc->cd_cur_ptr += state1_size + state2_size + cd_extra_size;
2017 cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
2019 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
2020 cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
2025 int qat_sym_validate_aes_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2028 case ICP_QAT_HW_AES_128_KEY_SZ:
2029 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
2031 case ICP_QAT_HW_AES_192_KEY_SZ:
2032 *alg = ICP_QAT_HW_CIPHER_ALGO_AES192;
2034 case ICP_QAT_HW_AES_256_KEY_SZ:
2035 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
2043 int qat_sym_validate_aes_docsisbpi_key(int key_len,
2044 enum icp_qat_hw_cipher_algo *alg)
2047 case ICP_QAT_HW_AES_128_KEY_SZ:
2048 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
2050 case ICP_QAT_HW_AES_256_KEY_SZ:
2051 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
2059 int qat_sym_validate_snow3g_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2062 case ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ:
2063 *alg = ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2;
2071 int qat_sym_validate_kasumi_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2074 case ICP_QAT_HW_KASUMI_KEY_SZ:
2075 *alg = ICP_QAT_HW_CIPHER_ALGO_KASUMI;
2083 int qat_sym_validate_des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2086 case ICP_QAT_HW_DES_KEY_SZ:
2087 *alg = ICP_QAT_HW_CIPHER_ALGO_DES;
2095 int qat_sym_validate_3des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2098 case QAT_3DES_KEY_SZ_OPT1:
2099 case QAT_3DES_KEY_SZ_OPT2:
2100 case QAT_3DES_KEY_SZ_OPT3:
2101 *alg = ICP_QAT_HW_CIPHER_ALGO_3DES;
2109 int qat_sym_validate_zuc_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2112 case ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ:
2113 *alg = ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3;
2121 #ifdef RTE_LIB_SECURITY
2123 qat_sec_session_check_docsis(struct rte_security_session_conf *conf)
2125 struct rte_crypto_sym_xform *crypto_sym = conf->crypto_xform;
2126 struct rte_security_docsis_xform *docsis = &conf->docsis;
2128 /* CRC generate -> Cipher encrypt */
2129 if (docsis->direction == RTE_SECURITY_DOCSIS_DOWNLINK) {
2131 if (crypto_sym != NULL &&
2132 crypto_sym->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
2133 crypto_sym->cipher.op == RTE_CRYPTO_CIPHER_OP_ENCRYPT &&
2134 crypto_sym->cipher.algo ==
2135 RTE_CRYPTO_CIPHER_AES_DOCSISBPI &&
2136 (crypto_sym->cipher.key.length ==
2137 ICP_QAT_HW_AES_128_KEY_SZ ||
2138 crypto_sym->cipher.key.length ==
2139 ICP_QAT_HW_AES_256_KEY_SZ) &&
2140 crypto_sym->cipher.iv.length == ICP_QAT_HW_AES_BLK_SZ &&
2141 crypto_sym->next == NULL) {
2144 /* Cipher decrypt -> CRC verify */
2145 } else if (docsis->direction == RTE_SECURITY_DOCSIS_UPLINK) {
2147 if (crypto_sym != NULL &&
2148 crypto_sym->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
2149 crypto_sym->cipher.op == RTE_CRYPTO_CIPHER_OP_DECRYPT &&
2150 crypto_sym->cipher.algo ==
2151 RTE_CRYPTO_CIPHER_AES_DOCSISBPI &&
2152 (crypto_sym->cipher.key.length ==
2153 ICP_QAT_HW_AES_128_KEY_SZ ||
2154 crypto_sym->cipher.key.length ==
2155 ICP_QAT_HW_AES_256_KEY_SZ) &&
2156 crypto_sym->cipher.iv.length == ICP_QAT_HW_AES_BLK_SZ &&
2157 crypto_sym->next == NULL) {
2166 qat_sec_session_set_docsis_parameters(struct rte_cryptodev *dev,
2167 struct rte_security_session_conf *conf, void *session_private)
2171 struct rte_crypto_sym_xform *xform = NULL;
2172 struct qat_sym_session *session = session_private;
2174 /* Clear the session */
2175 memset(session, 0, qat_sym_session_get_private_size(dev));
2177 ret = qat_sec_session_check_docsis(conf);
2179 QAT_LOG(ERR, "Unsupported DOCSIS security configuration");
2183 xform = conf->crypto_xform;
2185 /* Verify the session physical address is known */
2186 rte_iova_t session_paddr = rte_mempool_virt2iova(session);
2187 if (session_paddr == 0 || session_paddr == RTE_BAD_IOVA) {
2189 "Session physical address unknown. Bad memory pool.");
2193 /* Set context descriptor physical address */
2194 session->cd_paddr = session_paddr +
2195 offsetof(struct qat_sym_session, cd);
2197 session->min_qat_dev_gen = QAT_GEN1;
2199 /* Get requested QAT command id - should be cipher */
2200 qat_cmd_id = qat_get_cmd_id(xform);
2201 if (qat_cmd_id != ICP_QAT_FW_LA_CMD_CIPHER) {
2202 QAT_LOG(ERR, "Unsupported xform chain requested");
2205 session->qat_cmd = (enum icp_qat_fw_la_cmd_id)qat_cmd_id;
2207 ret = qat_sym_session_configure_cipher(dev, xform, session);
2215 qat_security_session_create(void *dev,
2216 struct rte_security_session_conf *conf,
2217 struct rte_security_session *sess,
2218 struct rte_mempool *mempool)
2220 void *sess_private_data;
2221 struct rte_cryptodev *cdev = (struct rte_cryptodev *)dev;
2224 if (conf->action_type != RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL ||
2225 conf->protocol != RTE_SECURITY_PROTOCOL_DOCSIS) {
2226 QAT_LOG(ERR, "Invalid security protocol");
2230 if (rte_mempool_get(mempool, &sess_private_data)) {
2231 QAT_LOG(ERR, "Couldn't get object from session mempool");
2235 ret = qat_sec_session_set_docsis_parameters(cdev, conf,
2238 QAT_LOG(ERR, "Failed to configure session parameters");
2239 /* Return session to mempool */
2240 rte_mempool_put(mempool, sess_private_data);
2244 set_sec_session_private_data(sess, sess_private_data);
2250 qat_security_session_destroy(void *dev __rte_unused,
2251 struct rte_security_session *sess)
2253 void *sess_priv = get_sec_session_private_data(sess);
2254 struct qat_sym_session *s = (struct qat_sym_session *)sess_priv;
2258 bpi_cipher_ctx_free(s->bpi_ctx);
2259 memset(s, 0, qat_sym_session_get_private_size(dev));
2260 struct rte_mempool *sess_mp = rte_mempool_from_obj(sess_priv);
2262 set_sec_session_private_data(sess, NULL);
2263 rte_mempool_put(sess_mp, sess_priv);