1 /* SPDX-License-Identifier: (BSD-3-Clause OR GPL-2.0)
2 * Copyright(c) 2015-2019 Intel Corporation
5 #include <openssl/sha.h> /* Needed to calculate pre-compute values */
6 #include <openssl/aes.h> /* Needed to calculate pre-compute values */
7 #include <openssl/md5.h> /* Needed to calculate pre-compute values */
8 #include <openssl/evp.h> /* Needed for bpi runt block processing */
10 #include <rte_memcpy.h>
11 #include <rte_common.h>
12 #include <rte_spinlock.h>
13 #include <rte_byteorder.h>
15 #include <rte_malloc.h>
16 #include <rte_crypto_sym.h>
17 #ifdef RTE_LIB_SECURITY
18 #include <rte_security.h>
22 #include "qat_sym_session.h"
23 #include "qat_sym_pmd.h"
25 /* SHA1 - 20 bytes - Initialiser state can be found in FIPS stds 180-2 */
26 static const uint8_t sha1InitialState[] = {
27 0x67, 0x45, 0x23, 0x01, 0xef, 0xcd, 0xab, 0x89, 0x98, 0xba,
28 0xdc, 0xfe, 0x10, 0x32, 0x54, 0x76, 0xc3, 0xd2, 0xe1, 0xf0};
30 /* SHA 224 - 32 bytes - Initialiser state can be found in FIPS stds 180-2 */
31 static const uint8_t sha224InitialState[] = {
32 0xc1, 0x05, 0x9e, 0xd8, 0x36, 0x7c, 0xd5, 0x07, 0x30, 0x70, 0xdd,
33 0x17, 0xf7, 0x0e, 0x59, 0x39, 0xff, 0xc0, 0x0b, 0x31, 0x68, 0x58,
34 0x15, 0x11, 0x64, 0xf9, 0x8f, 0xa7, 0xbe, 0xfa, 0x4f, 0xa4};
36 /* SHA 256 - 32 bytes - Initialiser state can be found in FIPS stds 180-2 */
37 static const uint8_t sha256InitialState[] = {
38 0x6a, 0x09, 0xe6, 0x67, 0xbb, 0x67, 0xae, 0x85, 0x3c, 0x6e, 0xf3,
39 0x72, 0xa5, 0x4f, 0xf5, 0x3a, 0x51, 0x0e, 0x52, 0x7f, 0x9b, 0x05,
40 0x68, 0x8c, 0x1f, 0x83, 0xd9, 0xab, 0x5b, 0xe0, 0xcd, 0x19};
42 /* SHA 384 - 64 bytes - Initialiser state can be found in FIPS stds 180-2 */
43 static const uint8_t sha384InitialState[] = {
44 0xcb, 0xbb, 0x9d, 0x5d, 0xc1, 0x05, 0x9e, 0xd8, 0x62, 0x9a, 0x29,
45 0x2a, 0x36, 0x7c, 0xd5, 0x07, 0x91, 0x59, 0x01, 0x5a, 0x30, 0x70,
46 0xdd, 0x17, 0x15, 0x2f, 0xec, 0xd8, 0xf7, 0x0e, 0x59, 0x39, 0x67,
47 0x33, 0x26, 0x67, 0xff, 0xc0, 0x0b, 0x31, 0x8e, 0xb4, 0x4a, 0x87,
48 0x68, 0x58, 0x15, 0x11, 0xdb, 0x0c, 0x2e, 0x0d, 0x64, 0xf9, 0x8f,
49 0xa7, 0x47, 0xb5, 0x48, 0x1d, 0xbe, 0xfa, 0x4f, 0xa4};
51 /* SHA 512 - 64 bytes - Initialiser state can be found in FIPS stds 180-2 */
52 static const uint8_t sha512InitialState[] = {
53 0x6a, 0x09, 0xe6, 0x67, 0xf3, 0xbc, 0xc9, 0x08, 0xbb, 0x67, 0xae,
54 0x85, 0x84, 0xca, 0xa7, 0x3b, 0x3c, 0x6e, 0xf3, 0x72, 0xfe, 0x94,
55 0xf8, 0x2b, 0xa5, 0x4f, 0xf5, 0x3a, 0x5f, 0x1d, 0x36, 0xf1, 0x51,
56 0x0e, 0x52, 0x7f, 0xad, 0xe6, 0x82, 0xd1, 0x9b, 0x05, 0x68, 0x8c,
57 0x2b, 0x3e, 0x6c, 0x1f, 0x1f, 0x83, 0xd9, 0xab, 0xfb, 0x41, 0xbd,
58 0x6b, 0x5b, 0xe0, 0xcd, 0x19, 0x13, 0x7e, 0x21, 0x79};
61 qat_sym_cd_cipher_set(struct qat_sym_session *cd,
62 const uint8_t *enckey,
66 qat_sym_cd_auth_set(struct qat_sym_session *cdesc,
67 const uint8_t *authkey,
71 unsigned int operation);
73 /** Frees a context previously created
74 * Depends on openssl libcrypto
77 bpi_cipher_ctx_free(void *bpi_ctx)
80 EVP_CIPHER_CTX_free((EVP_CIPHER_CTX *)bpi_ctx);
83 /** Creates a context in either AES or DES in ECB mode
84 * Depends on openssl libcrypto
87 bpi_cipher_ctx_init(enum rte_crypto_cipher_algorithm cryptodev_algo,
88 enum rte_crypto_cipher_operation direction __rte_unused,
89 const uint8_t *key, uint16_t key_length, void **ctx)
91 const EVP_CIPHER *algo = NULL;
93 *ctx = EVP_CIPHER_CTX_new();
100 if (cryptodev_algo == RTE_CRYPTO_CIPHER_DES_DOCSISBPI)
101 algo = EVP_des_ecb();
103 if (key_length == ICP_QAT_HW_AES_128_KEY_SZ)
104 algo = EVP_aes_128_ecb();
106 algo = EVP_aes_256_ecb();
108 /* IV will be ECB encrypted whether direction is encrypt or decrypt*/
109 if (EVP_EncryptInit_ex(*ctx, algo, NULL, key, 0) != 1) {
118 EVP_CIPHER_CTX_free(*ctx);
123 qat_is_cipher_alg_supported(enum rte_crypto_cipher_algorithm algo,
124 struct qat_sym_dev_private *internals)
127 const struct rte_cryptodev_capabilities *capability;
129 while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
130 RTE_CRYPTO_OP_TYPE_UNDEFINED) {
131 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
134 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_CIPHER)
137 if (capability->sym.cipher.algo == algo)
144 qat_is_auth_alg_supported(enum rte_crypto_auth_algorithm algo,
145 struct qat_sym_dev_private *internals)
148 const struct rte_cryptodev_capabilities *capability;
150 while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
151 RTE_CRYPTO_OP_TYPE_UNDEFINED) {
152 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
155 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_AUTH)
158 if (capability->sym.auth.algo == algo)
165 qat_sym_session_clear(struct rte_cryptodev *dev,
166 struct rte_cryptodev_sym_session *sess)
168 uint8_t index = dev->driver_id;
169 void *sess_priv = get_sym_session_private_data(sess, index);
170 struct qat_sym_session *s = (struct qat_sym_session *)sess_priv;
174 bpi_cipher_ctx_free(s->bpi_ctx);
175 memset(s, 0, qat_sym_session_get_private_size(dev));
176 struct rte_mempool *sess_mp = rte_mempool_from_obj(sess_priv);
178 set_sym_session_private_data(sess, index, NULL);
179 rte_mempool_put(sess_mp, sess_priv);
184 qat_get_cmd_id(const struct rte_crypto_sym_xform *xform)
187 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER && xform->next == NULL)
188 return ICP_QAT_FW_LA_CMD_CIPHER;
190 /* Authentication Only */
191 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH && xform->next == NULL)
192 return ICP_QAT_FW_LA_CMD_AUTH;
195 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
196 /* AES-GCM and AES-CCM works with different direction
197 * GCM first encrypts and generate hash where AES-CCM
198 * first generate hash and encrypts. Similar relation
199 * applies to decryption.
201 if (xform->aead.op == RTE_CRYPTO_AEAD_OP_ENCRYPT)
202 if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
203 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
205 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
207 if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
208 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
210 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
213 if (xform->next == NULL)
216 /* Cipher then Authenticate */
217 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
218 xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
219 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
221 /* Authenticate then Cipher */
222 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH &&
223 xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
224 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
229 static struct rte_crypto_auth_xform *
230 qat_get_auth_xform(struct rte_crypto_sym_xform *xform)
233 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH)
242 static struct rte_crypto_cipher_xform *
243 qat_get_cipher_xform(struct rte_crypto_sym_xform *xform)
246 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
247 return &xform->cipher;
256 qat_sym_session_configure_cipher(struct rte_cryptodev *dev,
257 struct rte_crypto_sym_xform *xform,
258 struct qat_sym_session *session)
260 struct qat_sym_dev_private *internals = dev->data->dev_private;
261 struct rte_crypto_cipher_xform *cipher_xform = NULL;
262 enum qat_device_gen qat_dev_gen =
263 internals->qat_dev->qat_dev_gen;
266 /* Get cipher xform from crypto xform chain */
267 cipher_xform = qat_get_cipher_xform(xform);
269 session->cipher_iv.offset = cipher_xform->iv.offset;
270 session->cipher_iv.length = cipher_xform->iv.length;
272 switch (cipher_xform->algo) {
273 case RTE_CRYPTO_CIPHER_AES_CBC:
274 if (qat_sym_validate_aes_key(cipher_xform->key.length,
275 &session->qat_cipher_alg) != 0) {
276 QAT_LOG(ERR, "Invalid AES cipher key size");
280 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
282 case RTE_CRYPTO_CIPHER_AES_CTR:
283 if (qat_sym_validate_aes_key(cipher_xform->key.length,
284 &session->qat_cipher_alg) != 0) {
285 QAT_LOG(ERR, "Invalid AES cipher key size");
289 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
290 if (qat_dev_gen == QAT_GEN4) {
291 /* TODO: Filter WCP */
292 ICP_QAT_FW_LA_SLICE_TYPE_SET(
293 session->fw_req.comn_hdr.serv_specif_flags,
294 ICP_QAT_FW_LA_USE_UCS_SLICE_TYPE);
298 case RTE_CRYPTO_CIPHER_SNOW3G_UEA2:
299 if (qat_sym_validate_snow3g_key(cipher_xform->key.length,
300 &session->qat_cipher_alg) != 0) {
301 QAT_LOG(ERR, "Invalid SNOW 3G cipher key size");
305 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
307 case RTE_CRYPTO_CIPHER_NULL:
308 session->qat_cipher_alg = ICP_QAT_HW_CIPHER_ALGO_NULL;
309 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
311 case RTE_CRYPTO_CIPHER_KASUMI_F8:
312 if (qat_sym_validate_kasumi_key(cipher_xform->key.length,
313 &session->qat_cipher_alg) != 0) {
314 QAT_LOG(ERR, "Invalid KASUMI cipher key size");
318 session->qat_mode = ICP_QAT_HW_CIPHER_F8_MODE;
320 case RTE_CRYPTO_CIPHER_3DES_CBC:
321 if (qat_sym_validate_3des_key(cipher_xform->key.length,
322 &session->qat_cipher_alg) != 0) {
323 QAT_LOG(ERR, "Invalid 3DES cipher key size");
327 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
329 case RTE_CRYPTO_CIPHER_DES_CBC:
330 if (qat_sym_validate_des_key(cipher_xform->key.length,
331 &session->qat_cipher_alg) != 0) {
332 QAT_LOG(ERR, "Invalid DES cipher key size");
336 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
338 case RTE_CRYPTO_CIPHER_3DES_CTR:
339 if (qat_sym_validate_3des_key(cipher_xform->key.length,
340 &session->qat_cipher_alg) != 0) {
341 QAT_LOG(ERR, "Invalid 3DES cipher key size");
345 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
347 case RTE_CRYPTO_CIPHER_DES_DOCSISBPI:
348 ret = bpi_cipher_ctx_init(
351 cipher_xform->key.data,
352 cipher_xform->key.length,
355 QAT_LOG(ERR, "failed to create DES BPI ctx");
358 if (qat_sym_validate_des_key(cipher_xform->key.length,
359 &session->qat_cipher_alg) != 0) {
360 QAT_LOG(ERR, "Invalid DES cipher key size");
364 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
366 case RTE_CRYPTO_CIPHER_AES_DOCSISBPI:
367 ret = bpi_cipher_ctx_init(
370 cipher_xform->key.data,
371 cipher_xform->key.length,
374 QAT_LOG(ERR, "failed to create AES BPI ctx");
377 if (qat_sym_validate_aes_docsisbpi_key(cipher_xform->key.length,
378 &session->qat_cipher_alg) != 0) {
379 QAT_LOG(ERR, "Invalid AES DOCSISBPI key size");
383 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
385 case RTE_CRYPTO_CIPHER_ZUC_EEA3:
386 if (!qat_is_cipher_alg_supported(
387 cipher_xform->algo, internals)) {
388 QAT_LOG(ERR, "%s not supported on this device",
389 rte_crypto_cipher_algorithm_strings
390 [cipher_xform->algo]);
394 if (qat_sym_validate_zuc_key(cipher_xform->key.length,
395 &session->qat_cipher_alg) != 0) {
396 QAT_LOG(ERR, "Invalid ZUC cipher key size");
400 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
402 case RTE_CRYPTO_CIPHER_AES_XTS:
403 if ((cipher_xform->key.length/2) == ICP_QAT_HW_AES_192_KEY_SZ) {
404 QAT_LOG(ERR, "AES-XTS-192 not supported");
408 if (qat_sym_validate_aes_key((cipher_xform->key.length/2),
409 &session->qat_cipher_alg) != 0) {
410 QAT_LOG(ERR, "Invalid AES-XTS cipher key size");
414 session->qat_mode = ICP_QAT_HW_CIPHER_XTS_MODE;
416 case RTE_CRYPTO_CIPHER_3DES_ECB:
417 case RTE_CRYPTO_CIPHER_AES_ECB:
418 case RTE_CRYPTO_CIPHER_AES_F8:
419 case RTE_CRYPTO_CIPHER_ARC4:
420 QAT_LOG(ERR, "Crypto QAT PMD: Unsupported Cipher alg %u",
425 QAT_LOG(ERR, "Crypto: Undefined Cipher specified %u\n",
431 if (cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
432 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
434 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
436 if (qat_sym_cd_cipher_set(session,
437 cipher_xform->key.data,
438 cipher_xform->key.length)) {
446 if (session->bpi_ctx) {
447 bpi_cipher_ctx_free(session->bpi_ctx);
448 session->bpi_ctx = NULL;
454 qat_sym_session_configure(struct rte_cryptodev *dev,
455 struct rte_crypto_sym_xform *xform,
456 struct rte_cryptodev_sym_session *sess,
457 struct rte_mempool *mempool)
459 void *sess_private_data;
462 if (rte_mempool_get(mempool, &sess_private_data)) {
464 "Couldn't get object from session mempool");
468 ret = qat_sym_session_set_parameters(dev, xform, sess_private_data);
471 "Crypto QAT PMD: failed to configure session parameters");
473 /* Return session to mempool */
474 rte_mempool_put(mempool, sess_private_data);
478 set_sym_session_private_data(sess, dev->driver_id,
485 qat_sym_session_set_ext_hash_flags(struct qat_sym_session *session,
488 struct icp_qat_fw_comn_req_hdr *header = &session->fw_req.comn_hdr;
489 struct icp_qat_fw_cipher_auth_cd_ctrl_hdr *cd_ctrl =
490 (struct icp_qat_fw_cipher_auth_cd_ctrl_hdr *)
491 session->fw_req.cd_ctrl.content_desc_ctrl_lw;
493 /* Set the Use Extended Protocol Flags bit in LW 1 */
494 QAT_FIELD_SET(header->comn_req_flags,
495 QAT_COMN_EXT_FLAGS_USED,
496 QAT_COMN_EXT_FLAGS_BITPOS,
497 QAT_COMN_EXT_FLAGS_MASK);
499 /* Set Hash Flags in LW 28 */
500 cd_ctrl->hash_flags |= hash_flag;
502 /* Set proto flags in LW 1 */
503 switch (session->qat_cipher_alg) {
504 case ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2:
505 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
506 ICP_QAT_FW_LA_SNOW_3G_PROTO);
507 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
508 header->serv_specif_flags, 0);
510 case ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3:
511 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
512 ICP_QAT_FW_LA_NO_PROTO);
513 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
514 header->serv_specif_flags,
515 ICP_QAT_FW_LA_ZUC_3G_PROTO);
518 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
519 ICP_QAT_FW_LA_NO_PROTO);
520 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
521 header->serv_specif_flags, 0);
527 qat_sym_session_handle_mixed(const struct rte_cryptodev *dev,
528 struct qat_sym_session *session)
530 const struct qat_sym_dev_private *qat_private = dev->data->dev_private;
531 enum qat_device_gen min_dev_gen = (qat_private->internal_capabilities &
532 QAT_SYM_CAP_MIXED_CRYPTO) ? QAT_GEN2 : QAT_GEN3;
534 if (session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3 &&
535 session->qat_cipher_alg !=
536 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
537 session->min_qat_dev_gen = min_dev_gen;
538 qat_sym_session_set_ext_hash_flags(session,
539 1 << ICP_QAT_FW_AUTH_HDR_FLAG_ZUC_EIA3_BITPOS);
540 } else if (session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2 &&
541 session->qat_cipher_alg !=
542 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
543 session->min_qat_dev_gen = min_dev_gen;
544 qat_sym_session_set_ext_hash_flags(session,
545 1 << ICP_QAT_FW_AUTH_HDR_FLAG_SNOW3G_UIA2_BITPOS);
546 } else if ((session->aes_cmac ||
547 session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_NULL) &&
548 (session->qat_cipher_alg ==
549 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2 ||
550 session->qat_cipher_alg ==
551 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)) {
552 session->min_qat_dev_gen = min_dev_gen;
553 qat_sym_session_set_ext_hash_flags(session, 0);
558 qat_sym_session_set_parameters(struct rte_cryptodev *dev,
559 struct rte_crypto_sym_xform *xform, void *session_private)
561 struct qat_sym_session *session = session_private;
562 struct qat_sym_dev_private *internals = dev->data->dev_private;
563 enum qat_device_gen qat_dev_gen = internals->qat_dev->qat_dev_gen;
567 /* Verify the session physical address is known */
568 rte_iova_t session_paddr = rte_mempool_virt2iova(session);
569 if (session_paddr == 0 || session_paddr == RTE_BAD_IOVA) {
571 "Session physical address unknown. Bad memory pool.");
575 memset(session, 0, sizeof(*session));
576 /* Set context descriptor physical address */
577 session->cd_paddr = session_paddr +
578 offsetof(struct qat_sym_session, cd);
580 session->min_qat_dev_gen = QAT_GEN1;
583 /* Get requested QAT command id */
584 qat_cmd_id = qat_get_cmd_id(xform);
585 if (qat_cmd_id < 0 || qat_cmd_id >= ICP_QAT_FW_LA_CMD_DELIMITER) {
586 QAT_LOG(ERR, "Unsupported xform chain requested");
589 session->qat_cmd = (enum icp_qat_fw_la_cmd_id)qat_cmd_id;
590 switch (session->qat_cmd) {
591 case ICP_QAT_FW_LA_CMD_CIPHER:
592 ret = qat_sym_session_configure_cipher(dev, xform, session);
596 case ICP_QAT_FW_LA_CMD_AUTH:
597 ret = qat_sym_session_configure_auth(dev, xform, session);
600 session->is_single_pass_gmac =
601 qat_dev_gen == QAT_GEN3 &&
602 xform->auth.algo == RTE_CRYPTO_AUTH_AES_GMAC &&
603 xform->auth.iv.length == QAT_AES_GCM_SPC_IV_SIZE;
605 case ICP_QAT_FW_LA_CMD_CIPHER_HASH:
606 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
607 ret = qat_sym_session_configure_aead(dev, xform,
612 ret = qat_sym_session_configure_cipher(dev,
616 ret = qat_sym_session_configure_auth(dev,
620 /* Special handling of mixed hash+cipher algorithms */
621 qat_sym_session_handle_mixed(dev, session);
624 case ICP_QAT_FW_LA_CMD_HASH_CIPHER:
625 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
626 ret = qat_sym_session_configure_aead(dev, xform,
631 ret = qat_sym_session_configure_auth(dev,
635 ret = qat_sym_session_configure_cipher(dev,
639 /* Special handling of mixed hash+cipher algorithms */
640 qat_sym_session_handle_mixed(dev, session);
643 case ICP_QAT_FW_LA_CMD_TRNG_GET_RANDOM:
644 case ICP_QAT_FW_LA_CMD_TRNG_TEST:
645 case ICP_QAT_FW_LA_CMD_SSL3_KEY_DERIVE:
646 case ICP_QAT_FW_LA_CMD_TLS_V1_1_KEY_DERIVE:
647 case ICP_QAT_FW_LA_CMD_TLS_V1_2_KEY_DERIVE:
648 case ICP_QAT_FW_LA_CMD_MGF1:
649 case ICP_QAT_FW_LA_CMD_AUTH_PRE_COMP:
650 case ICP_QAT_FW_LA_CMD_CIPHER_PRE_COMP:
651 case ICP_QAT_FW_LA_CMD_DELIMITER:
652 QAT_LOG(ERR, "Unsupported Service %u",
656 QAT_LOG(ERR, "Unsupported Service %u",
665 qat_sym_session_handle_single_pass(struct qat_sym_session *session,
666 struct rte_crypto_aead_xform *aead_xform)
668 struct icp_qat_fw_la_cipher_req_params *cipher_param =
669 (void *) &session->fw_req.serv_specif_rqpars;
671 session->is_single_pass = 1;
672 session->min_qat_dev_gen = QAT_GEN3;
673 session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER;
674 if (aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) {
675 session->qat_mode = ICP_QAT_HW_CIPHER_AEAD_MODE;
676 ICP_QAT_FW_LA_GCM_IV_LEN_FLAG_SET(
677 session->fw_req.comn_hdr.serv_specif_flags,
678 ICP_QAT_FW_LA_GCM_IV_LEN_12_OCTETS);
680 /* Chacha-Poly is special case that use QAT CTR mode */
681 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
683 session->cipher_iv.offset = aead_xform->iv.offset;
684 session->cipher_iv.length = aead_xform->iv.length;
685 if (qat_sym_cd_cipher_set(session,
686 aead_xform->key.data, aead_xform->key.length))
688 session->aad_len = aead_xform->aad_length;
689 session->digest_length = aead_xform->digest_length;
690 if (aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) {
691 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
692 session->auth_op = ICP_QAT_HW_AUTH_GENERATE;
693 ICP_QAT_FW_LA_RET_AUTH_SET(
694 session->fw_req.comn_hdr.serv_specif_flags,
695 ICP_QAT_FW_LA_RET_AUTH_RES);
697 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
698 session->auth_op = ICP_QAT_HW_AUTH_VERIFY;
699 ICP_QAT_FW_LA_CMP_AUTH_SET(
700 session->fw_req.comn_hdr.serv_specif_flags,
701 ICP_QAT_FW_LA_CMP_AUTH_RES);
703 ICP_QAT_FW_LA_SINGLE_PASS_PROTO_FLAG_SET(
704 session->fw_req.comn_hdr.serv_specif_flags,
705 ICP_QAT_FW_LA_SINGLE_PASS_PROTO);
706 ICP_QAT_FW_LA_PROTO_SET(
707 session->fw_req.comn_hdr.serv_specif_flags,
708 ICP_QAT_FW_LA_NO_PROTO);
709 session->fw_req.comn_hdr.service_cmd_id =
710 ICP_QAT_FW_LA_CMD_CIPHER;
711 session->cd.cipher.cipher_config.val =
712 ICP_QAT_HW_CIPHER_CONFIG_BUILD(
713 ICP_QAT_HW_CIPHER_AEAD_MODE,
714 session->qat_cipher_alg,
715 ICP_QAT_HW_CIPHER_NO_CONVERT,
717 QAT_FIELD_SET(session->cd.cipher.cipher_config.val,
718 aead_xform->digest_length,
719 QAT_CIPHER_AEAD_HASH_CMP_LEN_BITPOS,
720 QAT_CIPHER_AEAD_HASH_CMP_LEN_MASK);
721 session->cd.cipher.cipher_config.reserved =
722 ICP_QAT_HW_CIPHER_CONFIG_BUILD_UPPER(
723 aead_xform->aad_length);
724 cipher_param->spc_aad_sz = aead_xform->aad_length;
725 cipher_param->spc_auth_res_sz = aead_xform->digest_length;
731 qat_sym_session_configure_auth(struct rte_cryptodev *dev,
732 struct rte_crypto_sym_xform *xform,
733 struct qat_sym_session *session)
735 struct rte_crypto_auth_xform *auth_xform = qat_get_auth_xform(xform);
736 struct qat_sym_dev_private *internals = dev->data->dev_private;
737 const uint8_t *key_data = auth_xform->key.data;
738 uint8_t key_length = auth_xform->key.length;
740 session->aes_cmac = 0;
741 session->auth_key_length = auth_xform->key.length;
742 session->auth_iv.offset = auth_xform->iv.offset;
743 session->auth_iv.length = auth_xform->iv.length;
744 session->auth_mode = ICP_QAT_HW_AUTH_MODE1;
746 switch (auth_xform->algo) {
747 case RTE_CRYPTO_AUTH_SHA1:
748 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA1;
749 session->auth_mode = ICP_QAT_HW_AUTH_MODE0;
751 case RTE_CRYPTO_AUTH_SHA224:
752 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA224;
753 session->auth_mode = ICP_QAT_HW_AUTH_MODE0;
755 case RTE_CRYPTO_AUTH_SHA256:
756 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA256;
757 session->auth_mode = ICP_QAT_HW_AUTH_MODE0;
759 case RTE_CRYPTO_AUTH_SHA384:
760 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA384;
761 session->auth_mode = ICP_QAT_HW_AUTH_MODE0;
763 case RTE_CRYPTO_AUTH_SHA512:
764 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA512;
765 session->auth_mode = ICP_QAT_HW_AUTH_MODE0;
767 case RTE_CRYPTO_AUTH_SHA1_HMAC:
768 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA1;
770 case RTE_CRYPTO_AUTH_SHA224_HMAC:
771 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA224;
773 case RTE_CRYPTO_AUTH_SHA256_HMAC:
774 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA256;
776 case RTE_CRYPTO_AUTH_SHA384_HMAC:
777 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA384;
779 case RTE_CRYPTO_AUTH_SHA512_HMAC:
780 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA512;
782 case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
783 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
785 case RTE_CRYPTO_AUTH_AES_CMAC:
786 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
787 session->aes_cmac = 1;
789 case RTE_CRYPTO_AUTH_AES_GMAC:
790 if (qat_sym_validate_aes_key(auth_xform->key.length,
791 &session->qat_cipher_alg) != 0) {
792 QAT_LOG(ERR, "Invalid AES key size");
795 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
796 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
797 if (session->auth_iv.length == 0)
798 session->auth_iv.length = AES_GCM_J0_LEN;
800 case RTE_CRYPTO_AUTH_SNOW3G_UIA2:
801 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2;
803 case RTE_CRYPTO_AUTH_MD5_HMAC:
804 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_MD5;
806 case RTE_CRYPTO_AUTH_NULL:
807 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_NULL;
809 case RTE_CRYPTO_AUTH_KASUMI_F9:
810 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_KASUMI_F9;
812 case RTE_CRYPTO_AUTH_ZUC_EIA3:
813 if (!qat_is_auth_alg_supported(auth_xform->algo, internals)) {
814 QAT_LOG(ERR, "%s not supported on this device",
815 rte_crypto_auth_algorithm_strings
819 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3;
821 case RTE_CRYPTO_AUTH_MD5:
822 case RTE_CRYPTO_AUTH_AES_CBC_MAC:
823 QAT_LOG(ERR, "Crypto: Unsupported hash alg %u",
827 QAT_LOG(ERR, "Crypto: Undefined Hash algo %u specified",
832 if (auth_xform->algo == RTE_CRYPTO_AUTH_AES_GMAC) {
833 if (auth_xform->op == RTE_CRYPTO_AUTH_OP_GENERATE) {
834 session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER_HASH;
835 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
837 * It needs to create cipher desc content first,
838 * then authentication
841 if (qat_sym_cd_cipher_set(session,
842 auth_xform->key.data,
843 auth_xform->key.length))
846 if (qat_sym_cd_auth_set(session,
850 auth_xform->digest_length,
854 session->qat_cmd = ICP_QAT_FW_LA_CMD_HASH_CIPHER;
855 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
857 * It needs to create authentication desc content first,
861 if (qat_sym_cd_auth_set(session,
865 auth_xform->digest_length,
869 if (qat_sym_cd_cipher_set(session,
870 auth_xform->key.data,
871 auth_xform->key.length))
874 /* Restore to authentication only only */
875 session->qat_cmd = ICP_QAT_FW_LA_CMD_AUTH;
877 if (qat_sym_cd_auth_set(session,
881 auth_xform->digest_length,
886 session->digest_length = auth_xform->digest_length;
891 qat_sym_session_configure_aead(struct rte_cryptodev *dev,
892 struct rte_crypto_sym_xform *xform,
893 struct qat_sym_session *session)
895 struct rte_crypto_aead_xform *aead_xform = &xform->aead;
896 enum rte_crypto_auth_operation crypto_operation;
897 struct qat_sym_dev_private *internals =
898 dev->data->dev_private;
899 enum qat_device_gen qat_dev_gen =
900 internals->qat_dev->qat_dev_gen;
903 * Store AEAD IV parameters as cipher IV,
904 * to avoid unnecessary memory usage
906 session->cipher_iv.offset = xform->aead.iv.offset;
907 session->cipher_iv.length = xform->aead.iv.length;
909 session->auth_mode = ICP_QAT_HW_AUTH_MODE1;
911 session->is_single_pass = 0;
912 switch (aead_xform->algo) {
913 case RTE_CRYPTO_AEAD_AES_GCM:
914 if (qat_sym_validate_aes_key(aead_xform->key.length,
915 &session->qat_cipher_alg) != 0) {
916 QAT_LOG(ERR, "Invalid AES key size");
919 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
920 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
921 if (qat_dev_gen > QAT_GEN2 && aead_xform->iv.length ==
922 QAT_AES_GCM_SPC_IV_SIZE) {
923 return qat_sym_session_handle_single_pass(session,
926 if (session->cipher_iv.length == 0)
927 session->cipher_iv.length = AES_GCM_J0_LEN;
930 case RTE_CRYPTO_AEAD_AES_CCM:
931 if (qat_sym_validate_aes_key(aead_xform->key.length,
932 &session->qat_cipher_alg) != 0) {
933 QAT_LOG(ERR, "Invalid AES key size");
936 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
937 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC;
939 case RTE_CRYPTO_AEAD_CHACHA20_POLY1305:
940 if (aead_xform->key.length != ICP_QAT_HW_CHACHAPOLY_KEY_SZ)
942 session->qat_cipher_alg =
943 ICP_QAT_HW_CIPHER_ALGO_CHACHA20_POLY1305;
944 return qat_sym_session_handle_single_pass(session,
947 QAT_LOG(ERR, "Crypto: Undefined AEAD specified %u\n",
952 if ((aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT &&
953 aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) ||
954 (aead_xform->op == RTE_CRYPTO_AEAD_OP_DECRYPT &&
955 aead_xform->algo == RTE_CRYPTO_AEAD_AES_CCM)) {
956 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
958 * It needs to create cipher desc content first,
959 * then authentication
961 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
962 RTE_CRYPTO_AUTH_OP_GENERATE : RTE_CRYPTO_AUTH_OP_VERIFY;
964 if (qat_sym_cd_cipher_set(session,
965 aead_xform->key.data,
966 aead_xform->key.length))
969 if (qat_sym_cd_auth_set(session,
970 aead_xform->key.data,
971 aead_xform->key.length,
972 aead_xform->aad_length,
973 aead_xform->digest_length,
977 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
979 * It needs to create authentication desc content first,
983 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
984 RTE_CRYPTO_AUTH_OP_VERIFY : RTE_CRYPTO_AUTH_OP_GENERATE;
986 if (qat_sym_cd_auth_set(session,
987 aead_xform->key.data,
988 aead_xform->key.length,
989 aead_xform->aad_length,
990 aead_xform->digest_length,
994 if (qat_sym_cd_cipher_set(session,
995 aead_xform->key.data,
996 aead_xform->key.length))
1000 session->digest_length = aead_xform->digest_length;
1004 unsigned int qat_sym_session_get_private_size(
1005 struct rte_cryptodev *dev __rte_unused)
1007 return RTE_ALIGN_CEIL(sizeof(struct qat_sym_session), 8);
1010 /* returns block size in bytes per cipher algo */
1011 int qat_cipher_get_block_size(enum icp_qat_hw_cipher_algo qat_cipher_alg)
1013 switch (qat_cipher_alg) {
1014 case ICP_QAT_HW_CIPHER_ALGO_DES:
1015 return ICP_QAT_HW_DES_BLK_SZ;
1016 case ICP_QAT_HW_CIPHER_ALGO_3DES:
1017 return ICP_QAT_HW_3DES_BLK_SZ;
1018 case ICP_QAT_HW_CIPHER_ALGO_AES128:
1019 case ICP_QAT_HW_CIPHER_ALGO_AES192:
1020 case ICP_QAT_HW_CIPHER_ALGO_AES256:
1021 return ICP_QAT_HW_AES_BLK_SZ;
1023 QAT_LOG(ERR, "invalid block cipher alg %u", qat_cipher_alg);
1030 * Returns size in bytes per hash algo for state1 size field in cd_ctrl
1031 * This is digest size rounded up to nearest quadword
1033 static int qat_hash_get_state1_size(enum icp_qat_hw_auth_algo qat_hash_alg)
1035 switch (qat_hash_alg) {
1036 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1037 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA1_STATE1_SZ,
1038 QAT_HW_DEFAULT_ALIGNMENT);
1039 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1040 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA224_STATE1_SZ,
1041 QAT_HW_DEFAULT_ALIGNMENT);
1042 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1043 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA256_STATE1_SZ,
1044 QAT_HW_DEFAULT_ALIGNMENT);
1045 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1046 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA384_STATE1_SZ,
1047 QAT_HW_DEFAULT_ALIGNMENT);
1048 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1049 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
1050 QAT_HW_DEFAULT_ALIGNMENT);
1051 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1052 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ,
1053 QAT_HW_DEFAULT_ALIGNMENT);
1054 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1055 case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
1056 return QAT_HW_ROUND_UP(ICP_QAT_HW_GALOIS_128_STATE1_SZ,
1057 QAT_HW_DEFAULT_ALIGNMENT);
1058 case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
1059 return QAT_HW_ROUND_UP(ICP_QAT_HW_ZUC_3G_EIA3_STATE1_SZ,
1060 QAT_HW_DEFAULT_ALIGNMENT);
1061 case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
1062 return QAT_HW_ROUND_UP(ICP_QAT_HW_SNOW_3G_UIA2_STATE1_SZ,
1063 QAT_HW_DEFAULT_ALIGNMENT);
1064 case ICP_QAT_HW_AUTH_ALGO_MD5:
1065 return QAT_HW_ROUND_UP(ICP_QAT_HW_MD5_STATE1_SZ,
1066 QAT_HW_DEFAULT_ALIGNMENT);
1067 case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
1068 return QAT_HW_ROUND_UP(ICP_QAT_HW_KASUMI_F9_STATE1_SZ,
1069 QAT_HW_DEFAULT_ALIGNMENT);
1070 case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
1071 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_CBC_MAC_STATE1_SZ,
1072 QAT_HW_DEFAULT_ALIGNMENT);
1073 case ICP_QAT_HW_AUTH_ALGO_NULL:
1074 return QAT_HW_ROUND_UP(ICP_QAT_HW_NULL_STATE1_SZ,
1075 QAT_HW_DEFAULT_ALIGNMENT);
1076 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
1077 /* return maximum state1 size in this case */
1078 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
1079 QAT_HW_DEFAULT_ALIGNMENT);
1081 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
1087 /* returns digest size in bytes per hash algo */
1088 static int qat_hash_get_digest_size(enum icp_qat_hw_auth_algo qat_hash_alg)
1090 switch (qat_hash_alg) {
1091 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1092 return ICP_QAT_HW_SHA1_STATE1_SZ;
1093 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1094 return ICP_QAT_HW_SHA224_STATE1_SZ;
1095 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1096 return ICP_QAT_HW_SHA256_STATE1_SZ;
1097 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1098 return ICP_QAT_HW_SHA384_STATE1_SZ;
1099 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1100 return ICP_QAT_HW_SHA512_STATE1_SZ;
1101 case ICP_QAT_HW_AUTH_ALGO_MD5:
1102 return ICP_QAT_HW_MD5_STATE1_SZ;
1103 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1104 return ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1105 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
1106 /* return maximum digest size in this case */
1107 return ICP_QAT_HW_SHA512_STATE1_SZ;
1109 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
1115 /* returns block size in byes per hash algo */
1116 static int qat_hash_get_block_size(enum icp_qat_hw_auth_algo qat_hash_alg)
1118 switch (qat_hash_alg) {
1119 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1121 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1122 return SHA256_CBLOCK;
1123 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1124 return SHA256_CBLOCK;
1125 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1126 return SHA512_CBLOCK;
1127 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1128 return SHA512_CBLOCK;
1129 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1131 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1132 return ICP_QAT_HW_AES_BLK_SZ;
1133 case ICP_QAT_HW_AUTH_ALGO_MD5:
1135 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
1136 /* return maximum block size in this case */
1137 return SHA512_CBLOCK;
1139 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
1145 static int partial_hash_sha1(uint8_t *data_in, uint8_t *data_out)
1149 if (!SHA1_Init(&ctx))
1151 SHA1_Transform(&ctx, data_in);
1152 rte_memcpy(data_out, &ctx, SHA_DIGEST_LENGTH);
1156 static int partial_hash_sha224(uint8_t *data_in, uint8_t *data_out)
1160 if (!SHA224_Init(&ctx))
1162 SHA256_Transform(&ctx, data_in);
1163 rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
1167 static int partial_hash_sha256(uint8_t *data_in, uint8_t *data_out)
1171 if (!SHA256_Init(&ctx))
1173 SHA256_Transform(&ctx, data_in);
1174 rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
1178 static int partial_hash_sha384(uint8_t *data_in, uint8_t *data_out)
1182 if (!SHA384_Init(&ctx))
1184 SHA512_Transform(&ctx, data_in);
1185 rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
1189 static int partial_hash_sha512(uint8_t *data_in, uint8_t *data_out)
1193 if (!SHA512_Init(&ctx))
1195 SHA512_Transform(&ctx, data_in);
1196 rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
1200 static int partial_hash_md5(uint8_t *data_in, uint8_t *data_out)
1204 if (!MD5_Init(&ctx))
1206 MD5_Transform(&ctx, data_in);
1207 rte_memcpy(data_out, &ctx, MD5_DIGEST_LENGTH);
1212 static int partial_hash_compute(enum icp_qat_hw_auth_algo hash_alg,
1217 uint8_t digest[qat_hash_get_digest_size(
1218 ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1219 uint32_t *hash_state_out_be32;
1220 uint64_t *hash_state_out_be64;
1223 /* Initialize to avoid gcc warning */
1224 memset(digest, 0, sizeof(digest));
1226 digest_size = qat_hash_get_digest_size(hash_alg);
1227 if (digest_size <= 0)
1230 hash_state_out_be32 = (uint32_t *)data_out;
1231 hash_state_out_be64 = (uint64_t *)data_out;
1234 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1235 if (partial_hash_sha1(data_in, digest))
1237 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1238 *hash_state_out_be32 =
1239 rte_bswap32(*(((uint32_t *)digest)+i));
1241 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1242 if (partial_hash_sha224(data_in, digest))
1244 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1245 *hash_state_out_be32 =
1246 rte_bswap32(*(((uint32_t *)digest)+i));
1248 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1249 if (partial_hash_sha256(data_in, digest))
1251 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1252 *hash_state_out_be32 =
1253 rte_bswap32(*(((uint32_t *)digest)+i));
1255 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1256 if (partial_hash_sha384(data_in, digest))
1258 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
1259 *hash_state_out_be64 =
1260 rte_bswap64(*(((uint64_t *)digest)+i));
1262 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1263 if (partial_hash_sha512(data_in, digest))
1265 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
1266 *hash_state_out_be64 =
1267 rte_bswap64(*(((uint64_t *)digest)+i));
1269 case ICP_QAT_HW_AUTH_ALGO_MD5:
1270 if (partial_hash_md5(data_in, data_out))
1274 QAT_LOG(ERR, "invalid hash alg %u", hash_alg);
1280 #define HMAC_IPAD_VALUE 0x36
1281 #define HMAC_OPAD_VALUE 0x5c
1282 #define HASH_XCBC_PRECOMP_KEY_NUM 3
1284 static const uint8_t AES_CMAC_SEED[ICP_QAT_HW_AES_128_KEY_SZ];
1286 static void aes_cmac_key_derive(uint8_t *base, uint8_t *derived)
1290 derived[0] = base[0] << 1;
1291 for (i = 1; i < ICP_QAT_HW_AES_BLK_SZ ; i++) {
1292 derived[i] = base[i] << 1;
1293 derived[i - 1] |= base[i] >> 7;
1297 derived[ICP_QAT_HW_AES_BLK_SZ - 1] ^= QAT_AES_CMAC_CONST_RB;
1300 static int qat_sym_do_precomputes(enum icp_qat_hw_auth_algo hash_alg,
1301 const uint8_t *auth_key,
1302 uint16_t auth_keylen,
1303 uint8_t *p_state_buf,
1304 uint16_t *p_state_len,
1308 uint8_t ipad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1309 uint8_t opad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1312 if (hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC) {
1318 uint8_t k0[ICP_QAT_HW_AES_128_KEY_SZ];
1321 auth_keylen = ICP_QAT_HW_AES_128_KEY_SZ;
1323 in = rte_zmalloc("AES CMAC K1",
1324 ICP_QAT_HW_AES_128_KEY_SZ, 16);
1327 QAT_LOG(ERR, "Failed to alloc memory");
1331 rte_memcpy(in, AES_CMAC_SEED,
1332 ICP_QAT_HW_AES_128_KEY_SZ);
1333 rte_memcpy(p_state_buf, auth_key, auth_keylen);
1335 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1341 AES_encrypt(in, k0, &enc_key);
1343 k1 = p_state_buf + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1344 k2 = k1 + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1346 aes_cmac_key_derive(k0, k1);
1347 aes_cmac_key_derive(k1, k2);
1349 memset(k0, 0, ICP_QAT_HW_AES_128_KEY_SZ);
1350 *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1354 static uint8_t qat_aes_xcbc_key_seed[
1355 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ] = {
1356 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1357 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1358 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1359 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1360 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1361 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1365 uint8_t *out = p_state_buf;
1369 in = rte_zmalloc("working mem for key",
1370 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ, 16);
1372 QAT_LOG(ERR, "Failed to alloc memory");
1376 rte_memcpy(in, qat_aes_xcbc_key_seed,
1377 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1378 for (x = 0; x < HASH_XCBC_PRECOMP_KEY_NUM; x++) {
1379 if (AES_set_encrypt_key(auth_key,
1383 (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ));
1385 (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ),
1386 0, ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1389 AES_encrypt(in, out, &enc_key);
1390 in += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1391 out += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1393 *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1394 rte_free(in - x*ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ);
1398 } else if ((hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_128) ||
1399 (hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_64)) {
1401 uint8_t *out = p_state_buf;
1404 memset(p_state_buf, 0, ICP_QAT_HW_GALOIS_H_SZ +
1405 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1406 ICP_QAT_HW_GALOIS_E_CTR0_SZ);
1407 in = rte_zmalloc("working mem for key",
1408 ICP_QAT_HW_GALOIS_H_SZ, 16);
1410 QAT_LOG(ERR, "Failed to alloc memory");
1414 memset(in, 0, ICP_QAT_HW_GALOIS_H_SZ);
1415 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1419 AES_encrypt(in, out, &enc_key);
1420 *p_state_len = ICP_QAT_HW_GALOIS_H_SZ +
1421 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1422 ICP_QAT_HW_GALOIS_E_CTR0_SZ;
1427 block_size = qat_hash_get_block_size(hash_alg);
1430 /* init ipad and opad from key and xor with fixed values */
1431 memset(ipad, 0, block_size);
1432 memset(opad, 0, block_size);
1434 if (auth_keylen > (unsigned int)block_size) {
1435 QAT_LOG(ERR, "invalid keylen %u", auth_keylen);
1438 rte_memcpy(ipad, auth_key, auth_keylen);
1439 rte_memcpy(opad, auth_key, auth_keylen);
1441 for (i = 0; i < block_size; i++) {
1442 uint8_t *ipad_ptr = ipad + i;
1443 uint8_t *opad_ptr = opad + i;
1444 *ipad_ptr ^= HMAC_IPAD_VALUE;
1445 *opad_ptr ^= HMAC_OPAD_VALUE;
1448 /* do partial hash of ipad and copy to state1 */
1449 if (partial_hash_compute(hash_alg, ipad, p_state_buf)) {
1450 memset(ipad, 0, block_size);
1451 memset(opad, 0, block_size);
1452 QAT_LOG(ERR, "ipad precompute failed");
1457 * State len is a multiple of 8, so may be larger than the digest.
1458 * Put the partial hash of opad state_len bytes after state1
1460 *p_state_len = qat_hash_get_state1_size(hash_alg);
1461 if (partial_hash_compute(hash_alg, opad, p_state_buf + *p_state_len)) {
1462 memset(ipad, 0, block_size);
1463 memset(opad, 0, block_size);
1464 QAT_LOG(ERR, "opad precompute failed");
1468 /* don't leave data lying around */
1469 memset(ipad, 0, block_size);
1470 memset(opad, 0, block_size);
1475 qat_sym_session_init_common_hdr(struct icp_qat_fw_comn_req_hdr *header,
1476 enum qat_sym_proto_flag proto_flags)
1479 ICP_QAT_FW_COMN_HDR_FLAGS_BUILD(ICP_QAT_FW_COMN_REQ_FLAG_SET);
1480 header->service_type = ICP_QAT_FW_COMN_REQ_CPM_FW_LA;
1481 header->comn_req_flags =
1482 ICP_QAT_FW_COMN_FLAGS_BUILD(QAT_COMN_CD_FLD_TYPE_64BIT_ADR,
1483 QAT_COMN_PTR_TYPE_FLAT);
1484 ICP_QAT_FW_LA_PARTIAL_SET(header->serv_specif_flags,
1485 ICP_QAT_FW_LA_PARTIAL_NONE);
1486 ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(header->serv_specif_flags,
1487 ICP_QAT_FW_CIPH_IV_16BYTE_DATA);
1489 switch (proto_flags) {
1490 case QAT_CRYPTO_PROTO_FLAG_NONE:
1491 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1492 ICP_QAT_FW_LA_NO_PROTO);
1494 case QAT_CRYPTO_PROTO_FLAG_CCM:
1495 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1496 ICP_QAT_FW_LA_CCM_PROTO);
1498 case QAT_CRYPTO_PROTO_FLAG_GCM:
1499 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1500 ICP_QAT_FW_LA_GCM_PROTO);
1502 case QAT_CRYPTO_PROTO_FLAG_SNOW3G:
1503 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1504 ICP_QAT_FW_LA_SNOW_3G_PROTO);
1506 case QAT_CRYPTO_PROTO_FLAG_ZUC:
1507 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(header->serv_specif_flags,
1508 ICP_QAT_FW_LA_ZUC_3G_PROTO);
1512 ICP_QAT_FW_LA_UPDATE_STATE_SET(header->serv_specif_flags,
1513 ICP_QAT_FW_LA_NO_UPDATE_STATE);
1514 ICP_QAT_FW_LA_DIGEST_IN_BUFFER_SET(header->serv_specif_flags,
1515 ICP_QAT_FW_LA_NO_DIGEST_IN_BUFFER);
1519 * Snow3G and ZUC should never use this function
1520 * and set its protocol flag in both cipher and auth part of content
1521 * descriptor building function
1523 static enum qat_sym_proto_flag
1524 qat_get_crypto_proto_flag(uint16_t flags)
1526 int proto = ICP_QAT_FW_LA_PROTO_GET(flags);
1527 enum qat_sym_proto_flag qat_proto_flag =
1528 QAT_CRYPTO_PROTO_FLAG_NONE;
1531 case ICP_QAT_FW_LA_GCM_PROTO:
1532 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1534 case ICP_QAT_FW_LA_CCM_PROTO:
1535 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1539 return qat_proto_flag;
1542 int qat_sym_cd_cipher_set(struct qat_sym_session *cdesc,
1543 const uint8_t *cipherkey,
1544 uint32_t cipherkeylen)
1546 struct icp_qat_hw_cipher_algo_blk *cipher;
1547 struct icp_qat_hw_cipher_algo_blk20 *cipher20;
1548 struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1549 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1550 struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1551 void *ptr = &req_tmpl->cd_ctrl;
1552 struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1553 struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1554 enum icp_qat_hw_cipher_convert key_convert;
1555 enum qat_sym_proto_flag qat_proto_flag =
1556 QAT_CRYPTO_PROTO_FLAG_NONE;
1557 uint32_t total_key_size;
1558 uint16_t cipher_offset, cd_size;
1559 uint32_t wordIndex = 0;
1560 uint32_t *temp_key = NULL;
1562 if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER) {
1563 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1564 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1565 ICP_QAT_FW_SLICE_CIPHER);
1566 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1567 ICP_QAT_FW_SLICE_DRAM_WR);
1568 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1569 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1570 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1571 ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1572 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1573 } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1574 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1575 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1576 ICP_QAT_FW_SLICE_CIPHER);
1577 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1578 ICP_QAT_FW_SLICE_AUTH);
1579 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1580 ICP_QAT_FW_SLICE_AUTH);
1581 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1582 ICP_QAT_FW_SLICE_DRAM_WR);
1583 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1584 } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1585 QAT_LOG(ERR, "Invalid param, must be a cipher command.");
1589 if (cdesc->qat_mode == ICP_QAT_HW_CIPHER_CTR_MODE) {
1591 * CTR Streaming ciphers are a special case. Decrypt = encrypt
1592 * Overriding default values previously set
1594 cdesc->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
1595 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1596 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2
1597 || cdesc->qat_cipher_alg ==
1598 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)
1599 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1600 else if (cdesc->qat_dir == ICP_QAT_HW_CIPHER_ENCRYPT)
1601 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1603 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1605 if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
1606 total_key_size = ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ +
1607 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1608 cipher_cd_ctrl->cipher_state_sz =
1609 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1610 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1612 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1613 total_key_size = ICP_QAT_HW_KASUMI_F8_KEY_SZ;
1614 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_KASUMI_BLK_SZ >> 3;
1615 cipher_cd_ctrl->cipher_padding_sz =
1616 (2 * ICP_QAT_HW_KASUMI_BLK_SZ) >> 3;
1617 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES) {
1618 total_key_size = ICP_QAT_HW_3DES_KEY_SZ;
1619 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_3DES_BLK_SZ >> 3;
1621 qat_get_crypto_proto_flag(header->serv_specif_flags);
1622 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_DES) {
1623 total_key_size = ICP_QAT_HW_DES_KEY_SZ;
1624 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_DES_BLK_SZ >> 3;
1626 qat_get_crypto_proto_flag(header->serv_specif_flags);
1627 } else if (cdesc->qat_cipher_alg ==
1628 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
1629 total_key_size = ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ +
1630 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1631 cipher_cd_ctrl->cipher_state_sz =
1632 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1633 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1634 cdesc->min_qat_dev_gen = QAT_GEN2;
1636 total_key_size = cipherkeylen;
1637 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_AES_BLK_SZ >> 3;
1639 qat_get_crypto_proto_flag(header->serv_specif_flags);
1641 cipher_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1642 cipher_cd_ctrl->cipher_cfg_offset = cipher_offset >> 3;
1644 header->service_cmd_id = cdesc->qat_cmd;
1645 qat_sym_session_init_common_hdr(header, qat_proto_flag);
1647 cipher = (struct icp_qat_hw_cipher_algo_blk *)cdesc->cd_cur_ptr;
1648 cipher20 = (struct icp_qat_hw_cipher_algo_blk20 *)cdesc->cd_cur_ptr;
1649 cipher->cipher_config.val =
1650 ICP_QAT_HW_CIPHER_CONFIG_BUILD(cdesc->qat_mode,
1651 cdesc->qat_cipher_alg, key_convert,
1654 if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1655 temp_key = (uint32_t *)(cdesc->cd_cur_ptr +
1656 sizeof(struct icp_qat_hw_cipher_config)
1658 memcpy(cipher->key, cipherkey, cipherkeylen);
1659 memcpy(temp_key, cipherkey, cipherkeylen);
1661 /* XOR Key with KASUMI F8 key modifier at 4 bytes level */
1662 for (wordIndex = 0; wordIndex < (cipherkeylen >> 2);
1664 temp_key[wordIndex] ^= KASUMI_F8_KEY_MODIFIER_4_BYTES;
1666 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1667 cipherkeylen + cipherkeylen;
1668 } else if (cdesc->is_ucs) {
1669 const uint8_t *final_key = cipherkey;
1671 total_key_size = RTE_ALIGN_CEIL(cipherkeylen,
1672 ICP_QAT_HW_AES_128_KEY_SZ);
1673 cipher20->cipher_config.reserved[0] = 0;
1674 cipher20->cipher_config.reserved[1] = 0;
1675 cipher20->cipher_config.reserved[2] = 0;
1677 rte_memcpy(cipher20->key, final_key, cipherkeylen);
1678 cdesc->cd_cur_ptr +=
1679 sizeof(struct icp_qat_hw_ucs_cipher_config) +
1682 memcpy(cipher->key, cipherkey, cipherkeylen);
1683 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1687 if (total_key_size > cipherkeylen) {
1688 uint32_t padding_size = total_key_size-cipherkeylen;
1689 if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1690 && (cipherkeylen == QAT_3DES_KEY_SZ_OPT2)) {
1691 /* K3 not provided so use K1 = K3*/
1692 memcpy(cdesc->cd_cur_ptr, cipherkey, padding_size);
1693 } else if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1694 && (cipherkeylen == QAT_3DES_KEY_SZ_OPT3)) {
1695 /* K2 and K3 not provided so use K1 = K2 = K3*/
1696 memcpy(cdesc->cd_cur_ptr, cipherkey,
1698 memcpy(cdesc->cd_cur_ptr+cipherkeylen,
1699 cipherkey, cipherkeylen);
1701 memset(cdesc->cd_cur_ptr, 0, padding_size);
1703 cdesc->cd_cur_ptr += padding_size;
1705 cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1706 cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1707 cipher_cd_ctrl->cipher_key_sz = total_key_size >> 3;
1712 int qat_sym_cd_auth_set(struct qat_sym_session *cdesc,
1713 const uint8_t *authkey,
1714 uint32_t authkeylen,
1715 uint32_t aad_length,
1716 uint32_t digestsize,
1717 unsigned int operation)
1719 struct icp_qat_hw_auth_setup *hash;
1720 struct icp_qat_hw_cipher_algo_blk *cipherconfig;
1721 struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1722 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1723 struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1724 void *ptr = &req_tmpl->cd_ctrl;
1725 struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1726 struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1727 struct icp_qat_fw_la_auth_req_params *auth_param =
1728 (struct icp_qat_fw_la_auth_req_params *)
1729 ((char *)&req_tmpl->serv_specif_rqpars +
1730 ICP_QAT_FW_HASH_REQUEST_PARAMETERS_OFFSET);
1731 uint16_t state1_size = 0, state2_size = 0, cd_extra_size = 0;
1732 uint16_t hash_offset, cd_size;
1733 uint32_t *aad_len = NULL;
1734 uint32_t wordIndex = 0;
1736 enum qat_sym_proto_flag qat_proto_flag =
1737 QAT_CRYPTO_PROTO_FLAG_NONE;
1739 if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_AUTH) {
1740 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1741 ICP_QAT_FW_SLICE_AUTH);
1742 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1743 ICP_QAT_FW_SLICE_DRAM_WR);
1744 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1745 } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1746 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1747 ICP_QAT_FW_SLICE_AUTH);
1748 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1749 ICP_QAT_FW_SLICE_CIPHER);
1750 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1751 ICP_QAT_FW_SLICE_CIPHER);
1752 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1753 ICP_QAT_FW_SLICE_DRAM_WR);
1754 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1755 } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1756 QAT_LOG(ERR, "Invalid param, must be a hash command.");
1760 if (operation == RTE_CRYPTO_AUTH_OP_VERIFY) {
1761 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1762 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1763 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1764 ICP_QAT_FW_LA_CMP_AUTH_RES);
1765 cdesc->auth_op = ICP_QAT_HW_AUTH_VERIFY;
1767 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1768 ICP_QAT_FW_LA_RET_AUTH_RES);
1769 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1770 ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1771 cdesc->auth_op = ICP_QAT_HW_AUTH_GENERATE;
1775 * Setup the inner hash config
1777 hash_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1778 hash = (struct icp_qat_hw_auth_setup *)cdesc->cd_cur_ptr;
1779 hash->auth_config.reserved = 0;
1780 hash->auth_config.config =
1781 ICP_QAT_HW_AUTH_CONFIG_BUILD(cdesc->auth_mode,
1782 cdesc->qat_hash_alg, digestsize);
1784 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0
1785 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2
1786 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_KASUMI_F9
1787 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3
1788 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC
1789 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC
1790 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_NULL
1792 hash->auth_counter.counter = 0;
1794 int block_size = qat_hash_get_block_size(cdesc->qat_hash_alg);
1798 hash->auth_counter.counter = rte_bswap32(block_size);
1801 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_auth_setup);
1804 * cd_cur_ptr now points at the state1 information.
1806 switch (cdesc->qat_hash_alg) {
1807 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1808 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0) {
1810 rte_memcpy(cdesc->cd_cur_ptr, sha1InitialState,
1811 sizeof(sha1InitialState));
1812 state1_size = qat_hash_get_state1_size(
1813 cdesc->qat_hash_alg);
1817 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA1, authkey,
1818 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1820 QAT_LOG(ERR, "(SHA)precompute failed");
1823 state2_size = RTE_ALIGN_CEIL(ICP_QAT_HW_SHA1_STATE2_SZ, 8);
1825 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1826 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0) {
1828 rte_memcpy(cdesc->cd_cur_ptr, sha224InitialState,
1829 sizeof(sha224InitialState));
1830 state1_size = qat_hash_get_state1_size(
1831 cdesc->qat_hash_alg);
1835 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA224, authkey,
1836 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1838 QAT_LOG(ERR, "(SHA)precompute failed");
1841 state2_size = ICP_QAT_HW_SHA224_STATE2_SZ;
1843 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1844 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0) {
1846 rte_memcpy(cdesc->cd_cur_ptr, sha256InitialState,
1847 sizeof(sha256InitialState));
1848 state1_size = qat_hash_get_state1_size(
1849 cdesc->qat_hash_alg);
1853 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA256, authkey,
1854 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1856 QAT_LOG(ERR, "(SHA)precompute failed");
1859 state2_size = ICP_QAT_HW_SHA256_STATE2_SZ;
1861 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1862 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0) {
1864 rte_memcpy(cdesc->cd_cur_ptr, sha384InitialState,
1865 sizeof(sha384InitialState));
1866 state1_size = qat_hash_get_state1_size(
1867 cdesc->qat_hash_alg);
1871 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA384, authkey,
1872 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1874 QAT_LOG(ERR, "(SHA)precompute failed");
1877 state2_size = ICP_QAT_HW_SHA384_STATE2_SZ;
1879 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1880 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0) {
1882 rte_memcpy(cdesc->cd_cur_ptr, sha512InitialState,
1883 sizeof(sha512InitialState));
1884 state1_size = qat_hash_get_state1_size(
1885 cdesc->qat_hash_alg);
1889 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA512, authkey,
1890 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1892 QAT_LOG(ERR, "(SHA)precompute failed");
1895 state2_size = ICP_QAT_HW_SHA512_STATE2_SZ;
1897 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1898 state1_size = ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1900 if (cdesc->aes_cmac)
1901 memset(cdesc->cd_cur_ptr, 0, state1_size);
1902 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC,
1903 authkey, authkeylen, cdesc->cd_cur_ptr + state1_size,
1904 &state2_size, cdesc->aes_cmac)) {
1905 cdesc->aes_cmac ? QAT_LOG(ERR,
1906 "(CMAC)precompute failed")
1908 "(XCBC)precompute failed");
1912 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1913 case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
1914 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1915 state1_size = ICP_QAT_HW_GALOIS_128_STATE1_SZ;
1916 if (qat_sym_do_precomputes(cdesc->qat_hash_alg, authkey,
1917 authkeylen, cdesc->cd_cur_ptr + state1_size,
1918 &state2_size, cdesc->aes_cmac)) {
1919 QAT_LOG(ERR, "(GCM)precompute failed");
1923 * Write (the length of AAD) into bytes 16-19 of state2
1924 * in big-endian format. This field is 8 bytes
1926 auth_param->u2.aad_sz =
1927 RTE_ALIGN_CEIL(aad_length, 16);
1928 auth_param->hash_state_sz = (auth_param->u2.aad_sz) >> 3;
1930 aad_len = (uint32_t *)(cdesc->cd_cur_ptr +
1931 ICP_QAT_HW_GALOIS_128_STATE1_SZ +
1932 ICP_QAT_HW_GALOIS_H_SZ);
1933 *aad_len = rte_bswap32(aad_length);
1934 cdesc->aad_len = aad_length;
1936 case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
1937 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1938 state1_size = qat_hash_get_state1_size(
1939 ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2);
1940 state2_size = ICP_QAT_HW_SNOW_3G_UIA2_STATE2_SZ;
1941 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1943 cipherconfig = (struct icp_qat_hw_cipher_algo_blk *)
1944 (cdesc->cd_cur_ptr + state1_size + state2_size);
1945 cipherconfig->cipher_config.val =
1946 ICP_QAT_HW_CIPHER_CONFIG_BUILD(ICP_QAT_HW_CIPHER_ECB_MODE,
1947 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2,
1948 ICP_QAT_HW_CIPHER_KEY_CONVERT,
1949 ICP_QAT_HW_CIPHER_ENCRYPT);
1950 memcpy(cipherconfig->key, authkey, authkeylen);
1951 memset(cipherconfig->key + authkeylen,
1952 0, ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ);
1953 cd_extra_size += sizeof(struct icp_qat_hw_cipher_config) +
1954 authkeylen + ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1955 auth_param->hash_state_sz = ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1957 case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
1958 hash->auth_config.config =
1959 ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE0,
1960 cdesc->qat_hash_alg, digestsize);
1961 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1962 state1_size = qat_hash_get_state1_size(
1963 ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3);
1964 state2_size = ICP_QAT_HW_ZUC_3G_EIA3_STATE2_SZ;
1965 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size
1966 + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ);
1968 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1969 cd_extra_size += ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1970 auth_param->hash_state_sz = ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1971 cdesc->min_qat_dev_gen = QAT_GEN2;
1974 case ICP_QAT_HW_AUTH_ALGO_MD5:
1975 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_MD5, authkey,
1976 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1978 QAT_LOG(ERR, "(MD5)precompute failed");
1981 state2_size = ICP_QAT_HW_MD5_STATE2_SZ;
1983 case ICP_QAT_HW_AUTH_ALGO_NULL:
1984 state1_size = qat_hash_get_state1_size(
1985 ICP_QAT_HW_AUTH_ALGO_NULL);
1986 state2_size = ICP_QAT_HW_NULL_STATE2_SZ;
1988 case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
1989 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1990 state1_size = qat_hash_get_state1_size(
1991 ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC);
1992 state2_size = ICP_QAT_HW_AES_CBC_MAC_KEY_SZ +
1993 ICP_QAT_HW_AES_CCM_CBC_E_CTR0_SZ;
1995 if (aad_length > 0) {
1996 aad_length += ICP_QAT_HW_CCM_AAD_B0_LEN +
1997 ICP_QAT_HW_CCM_AAD_LEN_INFO;
1998 auth_param->u2.aad_sz =
1999 RTE_ALIGN_CEIL(aad_length,
2000 ICP_QAT_HW_CCM_AAD_ALIGNMENT);
2002 auth_param->u2.aad_sz = ICP_QAT_HW_CCM_AAD_B0_LEN;
2004 cdesc->aad_len = aad_length;
2005 hash->auth_counter.counter = 0;
2007 hash_cd_ctrl->outer_prefix_sz = digestsize;
2008 auth_param->hash_state_sz = digestsize;
2010 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
2012 case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
2013 state1_size = qat_hash_get_state1_size(
2014 ICP_QAT_HW_AUTH_ALGO_KASUMI_F9);
2015 state2_size = ICP_QAT_HW_KASUMI_F9_STATE2_SZ;
2016 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
2017 pTempKey = (uint32_t *)(cdesc->cd_cur_ptr + state1_size
2020 * The Inner Hash Initial State2 block must contain IK
2021 * (Initialisation Key), followed by IK XOR-ed with KM
2022 * (Key Modifier): IK||(IK^KM).
2024 /* write the auth key */
2025 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
2026 /* initialise temp key with auth key */
2027 memcpy(pTempKey, authkey, authkeylen);
2028 /* XOR Key with KASUMI F9 key modifier at 4 bytes level */
2029 for (wordIndex = 0; wordIndex < (authkeylen >> 2); wordIndex++)
2030 pTempKey[wordIndex] ^= KASUMI_F9_KEY_MODIFIER_4_BYTES;
2033 QAT_LOG(ERR, "Invalid HASH alg %u", cdesc->qat_hash_alg);
2037 /* Request template setup */
2038 qat_sym_session_init_common_hdr(header, qat_proto_flag);
2039 header->service_cmd_id = cdesc->qat_cmd;
2041 /* Auth CD config setup */
2042 hash_cd_ctrl->hash_cfg_offset = hash_offset >> 3;
2043 hash_cd_ctrl->hash_flags = ICP_QAT_FW_AUTH_HDR_FLAG_NO_NESTED;
2044 hash_cd_ctrl->inner_res_sz = digestsize;
2045 hash_cd_ctrl->final_sz = digestsize;
2046 hash_cd_ctrl->inner_state1_sz = state1_size;
2047 auth_param->auth_res_sz = digestsize;
2049 hash_cd_ctrl->inner_state2_sz = state2_size;
2050 hash_cd_ctrl->inner_state2_offset = hash_cd_ctrl->hash_cfg_offset +
2051 ((sizeof(struct icp_qat_hw_auth_setup) +
2052 RTE_ALIGN_CEIL(hash_cd_ctrl->inner_state1_sz, 8))
2055 cdesc->cd_cur_ptr += state1_size + state2_size + cd_extra_size;
2056 cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
2058 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
2059 cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
2064 int qat_sym_validate_aes_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2067 case ICP_QAT_HW_AES_128_KEY_SZ:
2068 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
2070 case ICP_QAT_HW_AES_192_KEY_SZ:
2071 *alg = ICP_QAT_HW_CIPHER_ALGO_AES192;
2073 case ICP_QAT_HW_AES_256_KEY_SZ:
2074 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
2082 int qat_sym_validate_aes_docsisbpi_key(int key_len,
2083 enum icp_qat_hw_cipher_algo *alg)
2086 case ICP_QAT_HW_AES_128_KEY_SZ:
2087 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
2089 case ICP_QAT_HW_AES_256_KEY_SZ:
2090 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
2098 int qat_sym_validate_snow3g_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2101 case ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ:
2102 *alg = ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2;
2110 int qat_sym_validate_kasumi_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2113 case ICP_QAT_HW_KASUMI_KEY_SZ:
2114 *alg = ICP_QAT_HW_CIPHER_ALGO_KASUMI;
2122 int qat_sym_validate_des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2125 case ICP_QAT_HW_DES_KEY_SZ:
2126 *alg = ICP_QAT_HW_CIPHER_ALGO_DES;
2134 int qat_sym_validate_3des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2137 case QAT_3DES_KEY_SZ_OPT1:
2138 case QAT_3DES_KEY_SZ_OPT2:
2139 case QAT_3DES_KEY_SZ_OPT3:
2140 *alg = ICP_QAT_HW_CIPHER_ALGO_3DES;
2148 int qat_sym_validate_zuc_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2151 case ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ:
2152 *alg = ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3;
2160 #ifdef RTE_LIB_SECURITY
2162 qat_sec_session_check_docsis(struct rte_security_session_conf *conf)
2164 struct rte_crypto_sym_xform *crypto_sym = conf->crypto_xform;
2165 struct rte_security_docsis_xform *docsis = &conf->docsis;
2167 /* CRC generate -> Cipher encrypt */
2168 if (docsis->direction == RTE_SECURITY_DOCSIS_DOWNLINK) {
2170 if (crypto_sym != NULL &&
2171 crypto_sym->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
2172 crypto_sym->cipher.op == RTE_CRYPTO_CIPHER_OP_ENCRYPT &&
2173 crypto_sym->cipher.algo ==
2174 RTE_CRYPTO_CIPHER_AES_DOCSISBPI &&
2175 (crypto_sym->cipher.key.length ==
2176 ICP_QAT_HW_AES_128_KEY_SZ ||
2177 crypto_sym->cipher.key.length ==
2178 ICP_QAT_HW_AES_256_KEY_SZ) &&
2179 crypto_sym->cipher.iv.length == ICP_QAT_HW_AES_BLK_SZ &&
2180 crypto_sym->next == NULL) {
2183 /* Cipher decrypt -> CRC verify */
2184 } else if (docsis->direction == RTE_SECURITY_DOCSIS_UPLINK) {
2186 if (crypto_sym != NULL &&
2187 crypto_sym->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
2188 crypto_sym->cipher.op == RTE_CRYPTO_CIPHER_OP_DECRYPT &&
2189 crypto_sym->cipher.algo ==
2190 RTE_CRYPTO_CIPHER_AES_DOCSISBPI &&
2191 (crypto_sym->cipher.key.length ==
2192 ICP_QAT_HW_AES_128_KEY_SZ ||
2193 crypto_sym->cipher.key.length ==
2194 ICP_QAT_HW_AES_256_KEY_SZ) &&
2195 crypto_sym->cipher.iv.length == ICP_QAT_HW_AES_BLK_SZ &&
2196 crypto_sym->next == NULL) {
2205 qat_sec_session_set_docsis_parameters(struct rte_cryptodev *dev,
2206 struct rte_security_session_conf *conf, void *session_private)
2210 struct rte_crypto_sym_xform *xform = NULL;
2211 struct qat_sym_session *session = session_private;
2213 /* Clear the session */
2214 memset(session, 0, qat_sym_session_get_private_size(dev));
2216 ret = qat_sec_session_check_docsis(conf);
2218 QAT_LOG(ERR, "Unsupported DOCSIS security configuration");
2222 xform = conf->crypto_xform;
2224 /* Verify the session physical address is known */
2225 rte_iova_t session_paddr = rte_mempool_virt2iova(session);
2226 if (session_paddr == 0 || session_paddr == RTE_BAD_IOVA) {
2228 "Session physical address unknown. Bad memory pool.");
2232 /* Set context descriptor physical address */
2233 session->cd_paddr = session_paddr +
2234 offsetof(struct qat_sym_session, cd);
2236 session->min_qat_dev_gen = QAT_GEN1;
2238 /* Get requested QAT command id - should be cipher */
2239 qat_cmd_id = qat_get_cmd_id(xform);
2240 if (qat_cmd_id != ICP_QAT_FW_LA_CMD_CIPHER) {
2241 QAT_LOG(ERR, "Unsupported xform chain requested");
2244 session->qat_cmd = (enum icp_qat_fw_la_cmd_id)qat_cmd_id;
2246 ret = qat_sym_session_configure_cipher(dev, xform, session);
2254 qat_security_session_create(void *dev,
2255 struct rte_security_session_conf *conf,
2256 struct rte_security_session *sess,
2257 struct rte_mempool *mempool)
2259 void *sess_private_data;
2260 struct rte_cryptodev *cdev = (struct rte_cryptodev *)dev;
2263 if (conf->action_type != RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL ||
2264 conf->protocol != RTE_SECURITY_PROTOCOL_DOCSIS) {
2265 QAT_LOG(ERR, "Invalid security protocol");
2269 if (rte_mempool_get(mempool, &sess_private_data)) {
2270 QAT_LOG(ERR, "Couldn't get object from session mempool");
2274 ret = qat_sec_session_set_docsis_parameters(cdev, conf,
2277 QAT_LOG(ERR, "Failed to configure session parameters");
2278 /* Return session to mempool */
2279 rte_mempool_put(mempool, sess_private_data);
2283 set_sec_session_private_data(sess, sess_private_data);
2289 qat_security_session_destroy(void *dev __rte_unused,
2290 struct rte_security_session *sess)
2292 void *sess_priv = get_sec_session_private_data(sess);
2293 struct qat_sym_session *s = (struct qat_sym_session *)sess_priv;
2297 bpi_cipher_ctx_free(s->bpi_ctx);
2298 memset(s, 0, qat_sym_session_get_private_size(dev));
2299 struct rte_mempool *sess_mp = rte_mempool_from_obj(sess_priv);
2301 set_sec_session_private_data(sess, NULL);
2302 rte_mempool_put(sess_mp, sess_priv);