1 /* SPDX-License-Identifier: (BSD-3-Clause OR GPL-2.0)
2 * Copyright(c) 2015-2019 Intel Corporation
5 #include <openssl/sha.h> /* Needed to calculate pre-compute values */
6 #include <openssl/aes.h> /* Needed to calculate pre-compute values */
7 #include <openssl/md5.h> /* Needed to calculate pre-compute values */
8 #include <openssl/evp.h> /* Needed for bpi runt block processing */
10 #include <rte_memcpy.h>
11 #include <rte_common.h>
12 #include <rte_spinlock.h>
13 #include <rte_byteorder.h>
15 #include <rte_malloc.h>
16 #include <rte_crypto_sym.h>
17 #ifdef RTE_LIBRTE_SECURITY
18 #include <rte_security.h>
22 #include "qat_sym_session.h"
23 #include "qat_sym_pmd.h"
25 /* SHA1 - 20 bytes - Initialiser state can be found in FIPS stds 180-2 */
26 static const uint8_t sha1InitialState[] = {
27 0x67, 0x45, 0x23, 0x01, 0xef, 0xcd, 0xab, 0x89, 0x98, 0xba,
28 0xdc, 0xfe, 0x10, 0x32, 0x54, 0x76, 0xc3, 0xd2, 0xe1, 0xf0};
30 /* SHA 224 - 32 bytes - Initialiser state can be found in FIPS stds 180-2 */
31 static const uint8_t sha224InitialState[] = {
32 0xc1, 0x05, 0x9e, 0xd8, 0x36, 0x7c, 0xd5, 0x07, 0x30, 0x70, 0xdd,
33 0x17, 0xf7, 0x0e, 0x59, 0x39, 0xff, 0xc0, 0x0b, 0x31, 0x68, 0x58,
34 0x15, 0x11, 0x64, 0xf9, 0x8f, 0xa7, 0xbe, 0xfa, 0x4f, 0xa4};
36 /* SHA 256 - 32 bytes - Initialiser state can be found in FIPS stds 180-2 */
37 static const uint8_t sha256InitialState[] = {
38 0x6a, 0x09, 0xe6, 0x67, 0xbb, 0x67, 0xae, 0x85, 0x3c, 0x6e, 0xf3,
39 0x72, 0xa5, 0x4f, 0xf5, 0x3a, 0x51, 0x0e, 0x52, 0x7f, 0x9b, 0x05,
40 0x68, 0x8c, 0x1f, 0x83, 0xd9, 0xab, 0x5b, 0xe0, 0xcd, 0x19};
42 /* SHA 384 - 64 bytes - Initialiser state can be found in FIPS stds 180-2 */
43 static const uint8_t sha384InitialState[] = {
44 0xcb, 0xbb, 0x9d, 0x5d, 0xc1, 0x05, 0x9e, 0xd8, 0x62, 0x9a, 0x29,
45 0x2a, 0x36, 0x7c, 0xd5, 0x07, 0x91, 0x59, 0x01, 0x5a, 0x30, 0x70,
46 0xdd, 0x17, 0x15, 0x2f, 0xec, 0xd8, 0xf7, 0x0e, 0x59, 0x39, 0x67,
47 0x33, 0x26, 0x67, 0xff, 0xc0, 0x0b, 0x31, 0x8e, 0xb4, 0x4a, 0x87,
48 0x68, 0x58, 0x15, 0x11, 0xdb, 0x0c, 0x2e, 0x0d, 0x64, 0xf9, 0x8f,
49 0xa7, 0x47, 0xb5, 0x48, 0x1d, 0xbe, 0xfa, 0x4f, 0xa4};
51 /* SHA 512 - 64 bytes - Initialiser state can be found in FIPS stds 180-2 */
52 static const uint8_t sha512InitialState[] = {
53 0x6a, 0x09, 0xe6, 0x67, 0xf3, 0xbc, 0xc9, 0x08, 0xbb, 0x67, 0xae,
54 0x85, 0x84, 0xca, 0xa7, 0x3b, 0x3c, 0x6e, 0xf3, 0x72, 0xfe, 0x94,
55 0xf8, 0x2b, 0xa5, 0x4f, 0xf5, 0x3a, 0x5f, 0x1d, 0x36, 0xf1, 0x51,
56 0x0e, 0x52, 0x7f, 0xad, 0xe6, 0x82, 0xd1, 0x9b, 0x05, 0x68, 0x8c,
57 0x2b, 0x3e, 0x6c, 0x1f, 0x1f, 0x83, 0xd9, 0xab, 0xfb, 0x41, 0xbd,
58 0x6b, 0x5b, 0xe0, 0xcd, 0x19, 0x13, 0x7e, 0x21, 0x79};
60 /** Frees a context previously created
61 * Depends on openssl libcrypto
64 bpi_cipher_ctx_free(void *bpi_ctx)
67 EVP_CIPHER_CTX_free((EVP_CIPHER_CTX *)bpi_ctx);
70 /** Creates a context in either AES or DES in ECB mode
71 * Depends on openssl libcrypto
74 bpi_cipher_ctx_init(enum rte_crypto_cipher_algorithm cryptodev_algo,
75 enum rte_crypto_cipher_operation direction __rte_unused,
76 const uint8_t *key, uint16_t key_length, void **ctx)
78 const EVP_CIPHER *algo = NULL;
80 *ctx = EVP_CIPHER_CTX_new();
87 if (cryptodev_algo == RTE_CRYPTO_CIPHER_DES_DOCSISBPI)
90 if (key_length == ICP_QAT_HW_AES_128_KEY_SZ)
91 algo = EVP_aes_128_ecb();
93 algo = EVP_aes_256_ecb();
95 /* IV will be ECB encrypted whether direction is encrypt or decrypt*/
96 if (EVP_EncryptInit_ex(*ctx, algo, NULL, key, 0) != 1) {
105 EVP_CIPHER_CTX_free(*ctx);
110 qat_is_cipher_alg_supported(enum rte_crypto_cipher_algorithm algo,
111 struct qat_sym_dev_private *internals)
114 const struct rte_cryptodev_capabilities *capability;
116 while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
117 RTE_CRYPTO_OP_TYPE_UNDEFINED) {
118 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
121 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_CIPHER)
124 if (capability->sym.cipher.algo == algo)
131 qat_is_auth_alg_supported(enum rte_crypto_auth_algorithm algo,
132 struct qat_sym_dev_private *internals)
135 const struct rte_cryptodev_capabilities *capability;
137 while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
138 RTE_CRYPTO_OP_TYPE_UNDEFINED) {
139 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
142 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_AUTH)
145 if (capability->sym.auth.algo == algo)
152 qat_sym_session_clear(struct rte_cryptodev *dev,
153 struct rte_cryptodev_sym_session *sess)
155 uint8_t index = dev->driver_id;
156 void *sess_priv = get_sym_session_private_data(sess, index);
157 struct qat_sym_session *s = (struct qat_sym_session *)sess_priv;
161 bpi_cipher_ctx_free(s->bpi_ctx);
162 memset(s, 0, qat_sym_session_get_private_size(dev));
163 struct rte_mempool *sess_mp = rte_mempool_from_obj(sess_priv);
165 set_sym_session_private_data(sess, index, NULL);
166 rte_mempool_put(sess_mp, sess_priv);
171 qat_get_cmd_id(const struct rte_crypto_sym_xform *xform)
174 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER && xform->next == NULL)
175 return ICP_QAT_FW_LA_CMD_CIPHER;
177 /* Authentication Only */
178 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH && xform->next == NULL)
179 return ICP_QAT_FW_LA_CMD_AUTH;
182 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
183 /* AES-GCM and AES-CCM works with different direction
184 * GCM first encrypts and generate hash where AES-CCM
185 * first generate hash and encrypts. Similar relation
186 * applies to decryption.
188 if (xform->aead.op == RTE_CRYPTO_AEAD_OP_ENCRYPT)
189 if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
190 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
192 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
194 if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
195 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
197 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
200 if (xform->next == NULL)
203 /* Cipher then Authenticate */
204 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
205 xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
206 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
208 /* Authenticate then Cipher */
209 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH &&
210 xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
211 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
216 static struct rte_crypto_auth_xform *
217 qat_get_auth_xform(struct rte_crypto_sym_xform *xform)
220 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH)
229 static struct rte_crypto_cipher_xform *
230 qat_get_cipher_xform(struct rte_crypto_sym_xform *xform)
233 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
234 return &xform->cipher;
243 qat_sym_session_configure_cipher(struct rte_cryptodev *dev,
244 struct rte_crypto_sym_xform *xform,
245 struct qat_sym_session *session)
247 struct qat_sym_dev_private *internals = dev->data->dev_private;
248 struct rte_crypto_cipher_xform *cipher_xform = NULL;
251 /* Get cipher xform from crypto xform chain */
252 cipher_xform = qat_get_cipher_xform(xform);
254 session->cipher_iv.offset = cipher_xform->iv.offset;
255 session->cipher_iv.length = cipher_xform->iv.length;
257 switch (cipher_xform->algo) {
258 case RTE_CRYPTO_CIPHER_AES_CBC:
259 if (qat_sym_validate_aes_key(cipher_xform->key.length,
260 &session->qat_cipher_alg) != 0) {
261 QAT_LOG(ERR, "Invalid AES cipher key size");
265 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
267 case RTE_CRYPTO_CIPHER_AES_CTR:
268 if (qat_sym_validate_aes_key(cipher_xform->key.length,
269 &session->qat_cipher_alg) != 0) {
270 QAT_LOG(ERR, "Invalid AES cipher key size");
274 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
276 case RTE_CRYPTO_CIPHER_SNOW3G_UEA2:
277 if (qat_sym_validate_snow3g_key(cipher_xform->key.length,
278 &session->qat_cipher_alg) != 0) {
279 QAT_LOG(ERR, "Invalid SNOW 3G cipher key size");
283 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
285 case RTE_CRYPTO_CIPHER_NULL:
286 session->qat_cipher_alg = ICP_QAT_HW_CIPHER_ALGO_NULL;
287 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
289 case RTE_CRYPTO_CIPHER_KASUMI_F8:
290 if (qat_sym_validate_kasumi_key(cipher_xform->key.length,
291 &session->qat_cipher_alg) != 0) {
292 QAT_LOG(ERR, "Invalid KASUMI cipher key size");
296 session->qat_mode = ICP_QAT_HW_CIPHER_F8_MODE;
298 case RTE_CRYPTO_CIPHER_3DES_CBC:
299 if (qat_sym_validate_3des_key(cipher_xform->key.length,
300 &session->qat_cipher_alg) != 0) {
301 QAT_LOG(ERR, "Invalid 3DES cipher key size");
305 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
307 case RTE_CRYPTO_CIPHER_DES_CBC:
308 if (qat_sym_validate_des_key(cipher_xform->key.length,
309 &session->qat_cipher_alg) != 0) {
310 QAT_LOG(ERR, "Invalid DES cipher key size");
314 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
316 case RTE_CRYPTO_CIPHER_3DES_CTR:
317 if (qat_sym_validate_3des_key(cipher_xform->key.length,
318 &session->qat_cipher_alg) != 0) {
319 QAT_LOG(ERR, "Invalid 3DES cipher key size");
323 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
325 case RTE_CRYPTO_CIPHER_DES_DOCSISBPI:
326 ret = bpi_cipher_ctx_init(
329 cipher_xform->key.data,
330 cipher_xform->key.length,
333 QAT_LOG(ERR, "failed to create DES BPI ctx");
336 if (qat_sym_validate_des_key(cipher_xform->key.length,
337 &session->qat_cipher_alg) != 0) {
338 QAT_LOG(ERR, "Invalid DES cipher key size");
342 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
344 case RTE_CRYPTO_CIPHER_AES_DOCSISBPI:
345 ret = bpi_cipher_ctx_init(
348 cipher_xform->key.data,
349 cipher_xform->key.length,
352 QAT_LOG(ERR, "failed to create AES BPI ctx");
355 if (qat_sym_validate_aes_docsisbpi_key(cipher_xform->key.length,
356 &session->qat_cipher_alg) != 0) {
357 QAT_LOG(ERR, "Invalid AES DOCSISBPI key size");
361 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
363 case RTE_CRYPTO_CIPHER_ZUC_EEA3:
364 if (!qat_is_cipher_alg_supported(
365 cipher_xform->algo, internals)) {
366 QAT_LOG(ERR, "%s not supported on this device",
367 rte_crypto_cipher_algorithm_strings
368 [cipher_xform->algo]);
372 if (qat_sym_validate_zuc_key(cipher_xform->key.length,
373 &session->qat_cipher_alg) != 0) {
374 QAT_LOG(ERR, "Invalid ZUC cipher key size");
378 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
380 case RTE_CRYPTO_CIPHER_AES_XTS:
381 if ((cipher_xform->key.length/2) == ICP_QAT_HW_AES_192_KEY_SZ) {
382 QAT_LOG(ERR, "AES-XTS-192 not supported");
386 if (qat_sym_validate_aes_key((cipher_xform->key.length/2),
387 &session->qat_cipher_alg) != 0) {
388 QAT_LOG(ERR, "Invalid AES-XTS cipher key size");
392 session->qat_mode = ICP_QAT_HW_CIPHER_XTS_MODE;
394 case RTE_CRYPTO_CIPHER_3DES_ECB:
395 case RTE_CRYPTO_CIPHER_AES_ECB:
396 case RTE_CRYPTO_CIPHER_AES_F8:
397 case RTE_CRYPTO_CIPHER_ARC4:
398 QAT_LOG(ERR, "Crypto QAT PMD: Unsupported Cipher alg %u",
403 QAT_LOG(ERR, "Crypto: Undefined Cipher specified %u\n",
409 if (cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
410 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
412 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
414 if (qat_sym_session_aead_create_cd_cipher(session,
415 cipher_xform->key.data,
416 cipher_xform->key.length)) {
424 if (session->bpi_ctx) {
425 bpi_cipher_ctx_free(session->bpi_ctx);
426 session->bpi_ctx = NULL;
432 qat_sym_session_configure(struct rte_cryptodev *dev,
433 struct rte_crypto_sym_xform *xform,
434 struct rte_cryptodev_sym_session *sess,
435 struct rte_mempool *mempool)
437 void *sess_private_data;
440 if (rte_mempool_get(mempool, &sess_private_data)) {
442 "Couldn't get object from session mempool");
446 ret = qat_sym_session_set_parameters(dev, xform, sess_private_data);
449 "Crypto QAT PMD: failed to configure session parameters");
451 /* Return session to mempool */
452 rte_mempool_put(mempool, sess_private_data);
456 set_sym_session_private_data(sess, dev->driver_id,
463 qat_sym_session_set_ext_hash_flags(struct qat_sym_session *session,
466 struct icp_qat_fw_comn_req_hdr *header = &session->fw_req.comn_hdr;
467 struct icp_qat_fw_cipher_auth_cd_ctrl_hdr *cd_ctrl =
468 (struct icp_qat_fw_cipher_auth_cd_ctrl_hdr *)
469 session->fw_req.cd_ctrl.content_desc_ctrl_lw;
471 /* Set the Use Extended Protocol Flags bit in LW 1 */
472 QAT_FIELD_SET(header->comn_req_flags,
473 QAT_COMN_EXT_FLAGS_USED,
474 QAT_COMN_EXT_FLAGS_BITPOS,
475 QAT_COMN_EXT_FLAGS_MASK);
477 /* Set Hash Flags in LW 28 */
478 cd_ctrl->hash_flags |= hash_flag;
480 /* Set proto flags in LW 1 */
481 switch (session->qat_cipher_alg) {
482 case ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2:
483 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
484 ICP_QAT_FW_LA_SNOW_3G_PROTO);
485 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
486 header->serv_specif_flags, 0);
488 case ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3:
489 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
490 ICP_QAT_FW_LA_NO_PROTO);
491 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
492 header->serv_specif_flags,
493 ICP_QAT_FW_LA_ZUC_3G_PROTO);
496 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
497 ICP_QAT_FW_LA_NO_PROTO);
498 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
499 header->serv_specif_flags, 0);
505 qat_sym_session_handle_mixed(const struct rte_cryptodev *dev,
506 struct qat_sym_session *session)
508 const struct qat_sym_dev_private *qat_private = dev->data->dev_private;
509 enum qat_device_gen min_dev_gen = (qat_private->internal_capabilities &
510 QAT_SYM_CAP_MIXED_CRYPTO) ? QAT_GEN2 : QAT_GEN3;
512 if (session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3 &&
513 session->qat_cipher_alg !=
514 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
515 session->min_qat_dev_gen = min_dev_gen;
516 qat_sym_session_set_ext_hash_flags(session,
517 1 << ICP_QAT_FW_AUTH_HDR_FLAG_ZUC_EIA3_BITPOS);
518 } else if (session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2 &&
519 session->qat_cipher_alg !=
520 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
521 session->min_qat_dev_gen = min_dev_gen;
522 qat_sym_session_set_ext_hash_flags(session,
523 1 << ICP_QAT_FW_AUTH_HDR_FLAG_SNOW3G_UIA2_BITPOS);
524 } else if ((session->aes_cmac ||
525 session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_NULL) &&
526 (session->qat_cipher_alg ==
527 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2 ||
528 session->qat_cipher_alg ==
529 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)) {
530 session->min_qat_dev_gen = min_dev_gen;
531 qat_sym_session_set_ext_hash_flags(session, 0);
536 qat_sym_session_set_parameters(struct rte_cryptodev *dev,
537 struct rte_crypto_sym_xform *xform, void *session_private)
539 struct qat_sym_session *session = session_private;
543 /* Verify the session physical address is known */
544 rte_iova_t session_paddr = rte_mempool_virt2iova(session);
545 if (session_paddr == 0 || session_paddr == RTE_BAD_IOVA) {
547 "Session physical address unknown. Bad memory pool.");
551 /* Set context descriptor physical address */
552 session->cd_paddr = session_paddr +
553 offsetof(struct qat_sym_session, cd);
555 session->min_qat_dev_gen = QAT_GEN1;
557 /* Get requested QAT command id */
558 qat_cmd_id = qat_get_cmd_id(xform);
559 if (qat_cmd_id < 0 || qat_cmd_id >= ICP_QAT_FW_LA_CMD_DELIMITER) {
560 QAT_LOG(ERR, "Unsupported xform chain requested");
563 session->qat_cmd = (enum icp_qat_fw_la_cmd_id)qat_cmd_id;
564 switch (session->qat_cmd) {
565 case ICP_QAT_FW_LA_CMD_CIPHER:
566 ret = qat_sym_session_configure_cipher(dev, xform, session);
570 case ICP_QAT_FW_LA_CMD_AUTH:
571 ret = qat_sym_session_configure_auth(dev, xform, session);
575 case ICP_QAT_FW_LA_CMD_CIPHER_HASH:
576 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
577 ret = qat_sym_session_configure_aead(dev, xform,
582 ret = qat_sym_session_configure_cipher(dev,
586 ret = qat_sym_session_configure_auth(dev,
590 /* Special handling of mixed hash+cipher algorithms */
591 qat_sym_session_handle_mixed(dev, session);
594 case ICP_QAT_FW_LA_CMD_HASH_CIPHER:
595 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
596 ret = qat_sym_session_configure_aead(dev, xform,
601 ret = qat_sym_session_configure_auth(dev,
605 ret = qat_sym_session_configure_cipher(dev,
609 /* Special handling of mixed hash+cipher algorithms */
610 qat_sym_session_handle_mixed(dev, session);
613 case ICP_QAT_FW_LA_CMD_TRNG_GET_RANDOM:
614 case ICP_QAT_FW_LA_CMD_TRNG_TEST:
615 case ICP_QAT_FW_LA_CMD_SSL3_KEY_DERIVE:
616 case ICP_QAT_FW_LA_CMD_TLS_V1_1_KEY_DERIVE:
617 case ICP_QAT_FW_LA_CMD_TLS_V1_2_KEY_DERIVE:
618 case ICP_QAT_FW_LA_CMD_MGF1:
619 case ICP_QAT_FW_LA_CMD_AUTH_PRE_COMP:
620 case ICP_QAT_FW_LA_CMD_CIPHER_PRE_COMP:
621 case ICP_QAT_FW_LA_CMD_DELIMITER:
622 QAT_LOG(ERR, "Unsupported Service %u",
626 QAT_LOG(ERR, "Unsupported Service %u",
635 qat_sym_session_handle_single_pass(struct qat_sym_session *session,
636 struct rte_crypto_aead_xform *aead_xform)
638 struct icp_qat_fw_la_cipher_req_params *cipher_param =
639 (void *) &session->fw_req.serv_specif_rqpars;
641 session->is_single_pass = 1;
642 session->min_qat_dev_gen = QAT_GEN3;
643 session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER;
644 if (aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) {
645 session->qat_mode = ICP_QAT_HW_CIPHER_AEAD_MODE;
646 ICP_QAT_FW_LA_GCM_IV_LEN_FLAG_SET(
647 session->fw_req.comn_hdr.serv_specif_flags,
648 ICP_QAT_FW_LA_GCM_IV_LEN_12_OCTETS);
650 /* Chacha-Poly is special case that use QAT CTR mode */
651 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
653 session->cipher_iv.offset = aead_xform->iv.offset;
654 session->cipher_iv.length = aead_xform->iv.length;
655 if (qat_sym_session_aead_create_cd_cipher(session,
656 aead_xform->key.data, aead_xform->key.length))
658 session->aad_len = aead_xform->aad_length;
659 session->digest_length = aead_xform->digest_length;
660 if (aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) {
661 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
662 session->auth_op = ICP_QAT_HW_AUTH_GENERATE;
663 ICP_QAT_FW_LA_RET_AUTH_SET(
664 session->fw_req.comn_hdr.serv_specif_flags,
665 ICP_QAT_FW_LA_RET_AUTH_RES);
667 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
668 session->auth_op = ICP_QAT_HW_AUTH_VERIFY;
669 ICP_QAT_FW_LA_CMP_AUTH_SET(
670 session->fw_req.comn_hdr.serv_specif_flags,
671 ICP_QAT_FW_LA_CMP_AUTH_RES);
673 ICP_QAT_FW_LA_SINGLE_PASS_PROTO_FLAG_SET(
674 session->fw_req.comn_hdr.serv_specif_flags,
675 ICP_QAT_FW_LA_SINGLE_PASS_PROTO);
676 ICP_QAT_FW_LA_PROTO_SET(
677 session->fw_req.comn_hdr.serv_specif_flags,
678 ICP_QAT_FW_LA_NO_PROTO);
679 session->fw_req.comn_hdr.service_cmd_id =
680 ICP_QAT_FW_LA_CMD_CIPHER;
681 session->cd.cipher.cipher_config.val =
682 ICP_QAT_HW_CIPHER_CONFIG_BUILD(
683 ICP_QAT_HW_CIPHER_AEAD_MODE,
684 session->qat_cipher_alg,
685 ICP_QAT_HW_CIPHER_NO_CONVERT,
687 QAT_FIELD_SET(session->cd.cipher.cipher_config.val,
688 aead_xform->digest_length,
689 QAT_CIPHER_AEAD_HASH_CMP_LEN_BITPOS,
690 QAT_CIPHER_AEAD_HASH_CMP_LEN_MASK);
691 session->cd.cipher.cipher_config.reserved =
692 ICP_QAT_HW_CIPHER_CONFIG_BUILD_UPPER(
693 aead_xform->aad_length);
694 cipher_param->spc_aad_sz = aead_xform->aad_length;
695 cipher_param->spc_auth_res_sz = aead_xform->digest_length;
701 qat_sym_session_configure_auth(struct rte_cryptodev *dev,
702 struct rte_crypto_sym_xform *xform,
703 struct qat_sym_session *session)
705 struct rte_crypto_auth_xform *auth_xform = qat_get_auth_xform(xform);
706 struct qat_sym_dev_private *internals = dev->data->dev_private;
707 const uint8_t *key_data = auth_xform->key.data;
708 uint8_t key_length = auth_xform->key.length;
709 session->aes_cmac = 0;
711 session->auth_iv.offset = auth_xform->iv.offset;
712 session->auth_iv.length = auth_xform->iv.length;
713 session->auth_mode = ICP_QAT_HW_AUTH_MODE1;
715 switch (auth_xform->algo) {
716 case RTE_CRYPTO_AUTH_SHA1:
717 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA1;
718 session->auth_mode = ICP_QAT_HW_AUTH_MODE0;
720 case RTE_CRYPTO_AUTH_SHA224:
721 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA224;
722 session->auth_mode = ICP_QAT_HW_AUTH_MODE0;
724 case RTE_CRYPTO_AUTH_SHA256:
725 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA256;
726 session->auth_mode = ICP_QAT_HW_AUTH_MODE0;
728 case RTE_CRYPTO_AUTH_SHA384:
729 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA384;
730 session->auth_mode = ICP_QAT_HW_AUTH_MODE0;
732 case RTE_CRYPTO_AUTH_SHA512:
733 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA512;
734 session->auth_mode = ICP_QAT_HW_AUTH_MODE0;
736 case RTE_CRYPTO_AUTH_SHA1_HMAC:
737 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA1;
739 case RTE_CRYPTO_AUTH_SHA224_HMAC:
740 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA224;
742 case RTE_CRYPTO_AUTH_SHA256_HMAC:
743 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA256;
745 case RTE_CRYPTO_AUTH_SHA384_HMAC:
746 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA384;
748 case RTE_CRYPTO_AUTH_SHA512_HMAC:
749 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA512;
751 case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
752 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
754 case RTE_CRYPTO_AUTH_AES_CMAC:
755 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
756 session->aes_cmac = 1;
758 case RTE_CRYPTO_AUTH_AES_GMAC:
759 if (qat_sym_validate_aes_key(auth_xform->key.length,
760 &session->qat_cipher_alg) != 0) {
761 QAT_LOG(ERR, "Invalid AES key size");
764 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
765 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
766 if (session->auth_iv.length == 0)
767 session->auth_iv.length = AES_GCM_J0_LEN;
770 case RTE_CRYPTO_AUTH_SNOW3G_UIA2:
771 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2;
773 case RTE_CRYPTO_AUTH_MD5_HMAC:
774 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_MD5;
776 case RTE_CRYPTO_AUTH_NULL:
777 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_NULL;
779 case RTE_CRYPTO_AUTH_KASUMI_F9:
780 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_KASUMI_F9;
782 case RTE_CRYPTO_AUTH_ZUC_EIA3:
783 if (!qat_is_auth_alg_supported(auth_xform->algo, internals)) {
784 QAT_LOG(ERR, "%s not supported on this device",
785 rte_crypto_auth_algorithm_strings
789 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3;
791 case RTE_CRYPTO_AUTH_MD5:
792 case RTE_CRYPTO_AUTH_AES_CBC_MAC:
793 QAT_LOG(ERR, "Crypto: Unsupported hash alg %u",
797 QAT_LOG(ERR, "Crypto: Undefined Hash algo %u specified",
802 if (auth_xform->algo == RTE_CRYPTO_AUTH_AES_GMAC) {
803 if (auth_xform->op == RTE_CRYPTO_AUTH_OP_GENERATE) {
804 session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER_HASH;
805 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
807 * It needs to create cipher desc content first,
808 * then authentication
811 if (qat_sym_session_aead_create_cd_cipher(session,
812 auth_xform->key.data,
813 auth_xform->key.length))
816 if (qat_sym_session_aead_create_cd_auth(session,
820 auth_xform->digest_length,
824 session->qat_cmd = ICP_QAT_FW_LA_CMD_HASH_CIPHER;
825 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
827 * It needs to create authentication desc content first,
831 if (qat_sym_session_aead_create_cd_auth(session,
835 auth_xform->digest_length,
839 if (qat_sym_session_aead_create_cd_cipher(session,
840 auth_xform->key.data,
841 auth_xform->key.length))
844 /* Restore to authentication only only */
845 session->qat_cmd = ICP_QAT_FW_LA_CMD_AUTH;
847 if (qat_sym_session_aead_create_cd_auth(session,
851 auth_xform->digest_length,
856 session->digest_length = auth_xform->digest_length;
861 qat_sym_session_configure_aead(struct rte_cryptodev *dev,
862 struct rte_crypto_sym_xform *xform,
863 struct qat_sym_session *session)
865 struct rte_crypto_aead_xform *aead_xform = &xform->aead;
866 enum rte_crypto_auth_operation crypto_operation;
867 struct qat_sym_dev_private *internals =
868 dev->data->dev_private;
869 enum qat_device_gen qat_dev_gen =
870 internals->qat_dev->qat_dev_gen;
873 * Store AEAD IV parameters as cipher IV,
874 * to avoid unnecessary memory usage
876 session->cipher_iv.offset = xform->aead.iv.offset;
877 session->cipher_iv.length = xform->aead.iv.length;
879 session->auth_mode = ICP_QAT_HW_AUTH_MODE1;
881 session->is_single_pass = 0;
882 switch (aead_xform->algo) {
883 case RTE_CRYPTO_AEAD_AES_GCM:
884 if (qat_sym_validate_aes_key(aead_xform->key.length,
885 &session->qat_cipher_alg) != 0) {
886 QAT_LOG(ERR, "Invalid AES key size");
889 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
890 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
891 if (qat_dev_gen > QAT_GEN2 && aead_xform->iv.length ==
892 QAT_AES_GCM_SPC_IV_SIZE) {
893 return qat_sym_session_handle_single_pass(session,
896 if (session->cipher_iv.length == 0)
897 session->cipher_iv.length = AES_GCM_J0_LEN;
900 case RTE_CRYPTO_AEAD_AES_CCM:
901 if (qat_sym_validate_aes_key(aead_xform->key.length,
902 &session->qat_cipher_alg) != 0) {
903 QAT_LOG(ERR, "Invalid AES key size");
906 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
907 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC;
909 case RTE_CRYPTO_AEAD_CHACHA20_POLY1305:
910 if (aead_xform->key.length != ICP_QAT_HW_CHACHAPOLY_KEY_SZ)
912 session->qat_cipher_alg =
913 ICP_QAT_HW_CIPHER_ALGO_CHACHA20_POLY1305;
914 return qat_sym_session_handle_single_pass(session,
917 QAT_LOG(ERR, "Crypto: Undefined AEAD specified %u\n",
922 if ((aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT &&
923 aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) ||
924 (aead_xform->op == RTE_CRYPTO_AEAD_OP_DECRYPT &&
925 aead_xform->algo == RTE_CRYPTO_AEAD_AES_CCM)) {
926 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
928 * It needs to create cipher desc content first,
929 * then authentication
931 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
932 RTE_CRYPTO_AUTH_OP_GENERATE : RTE_CRYPTO_AUTH_OP_VERIFY;
934 if (qat_sym_session_aead_create_cd_cipher(session,
935 aead_xform->key.data,
936 aead_xform->key.length))
939 if (qat_sym_session_aead_create_cd_auth(session,
940 aead_xform->key.data,
941 aead_xform->key.length,
942 aead_xform->aad_length,
943 aead_xform->digest_length,
947 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
949 * It needs to create authentication desc content first,
953 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
954 RTE_CRYPTO_AUTH_OP_VERIFY : RTE_CRYPTO_AUTH_OP_GENERATE;
956 if (qat_sym_session_aead_create_cd_auth(session,
957 aead_xform->key.data,
958 aead_xform->key.length,
959 aead_xform->aad_length,
960 aead_xform->digest_length,
964 if (qat_sym_session_aead_create_cd_cipher(session,
965 aead_xform->key.data,
966 aead_xform->key.length))
970 session->digest_length = aead_xform->digest_length;
974 unsigned int qat_sym_session_get_private_size(
975 struct rte_cryptodev *dev __rte_unused)
977 return RTE_ALIGN_CEIL(sizeof(struct qat_sym_session), 8);
980 /* returns block size in bytes per cipher algo */
981 int qat_cipher_get_block_size(enum icp_qat_hw_cipher_algo qat_cipher_alg)
983 switch (qat_cipher_alg) {
984 case ICP_QAT_HW_CIPHER_ALGO_DES:
985 return ICP_QAT_HW_DES_BLK_SZ;
986 case ICP_QAT_HW_CIPHER_ALGO_3DES:
987 return ICP_QAT_HW_3DES_BLK_SZ;
988 case ICP_QAT_HW_CIPHER_ALGO_AES128:
989 case ICP_QAT_HW_CIPHER_ALGO_AES192:
990 case ICP_QAT_HW_CIPHER_ALGO_AES256:
991 return ICP_QAT_HW_AES_BLK_SZ;
993 QAT_LOG(ERR, "invalid block cipher alg %u", qat_cipher_alg);
1000 * Returns size in bytes per hash algo for state1 size field in cd_ctrl
1001 * This is digest size rounded up to nearest quadword
1003 static int qat_hash_get_state1_size(enum icp_qat_hw_auth_algo qat_hash_alg)
1005 switch (qat_hash_alg) {
1006 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1007 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA1_STATE1_SZ,
1008 QAT_HW_DEFAULT_ALIGNMENT);
1009 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1010 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA224_STATE1_SZ,
1011 QAT_HW_DEFAULT_ALIGNMENT);
1012 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1013 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA256_STATE1_SZ,
1014 QAT_HW_DEFAULT_ALIGNMENT);
1015 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1016 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA384_STATE1_SZ,
1017 QAT_HW_DEFAULT_ALIGNMENT);
1018 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1019 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
1020 QAT_HW_DEFAULT_ALIGNMENT);
1021 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1022 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ,
1023 QAT_HW_DEFAULT_ALIGNMENT);
1024 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1025 case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
1026 return QAT_HW_ROUND_UP(ICP_QAT_HW_GALOIS_128_STATE1_SZ,
1027 QAT_HW_DEFAULT_ALIGNMENT);
1028 case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
1029 return QAT_HW_ROUND_UP(ICP_QAT_HW_ZUC_3G_EIA3_STATE1_SZ,
1030 QAT_HW_DEFAULT_ALIGNMENT);
1031 case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
1032 return QAT_HW_ROUND_UP(ICP_QAT_HW_SNOW_3G_UIA2_STATE1_SZ,
1033 QAT_HW_DEFAULT_ALIGNMENT);
1034 case ICP_QAT_HW_AUTH_ALGO_MD5:
1035 return QAT_HW_ROUND_UP(ICP_QAT_HW_MD5_STATE1_SZ,
1036 QAT_HW_DEFAULT_ALIGNMENT);
1037 case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
1038 return QAT_HW_ROUND_UP(ICP_QAT_HW_KASUMI_F9_STATE1_SZ,
1039 QAT_HW_DEFAULT_ALIGNMENT);
1040 case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
1041 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_CBC_MAC_STATE1_SZ,
1042 QAT_HW_DEFAULT_ALIGNMENT);
1043 case ICP_QAT_HW_AUTH_ALGO_NULL:
1044 return QAT_HW_ROUND_UP(ICP_QAT_HW_NULL_STATE1_SZ,
1045 QAT_HW_DEFAULT_ALIGNMENT);
1046 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
1047 /* return maximum state1 size in this case */
1048 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
1049 QAT_HW_DEFAULT_ALIGNMENT);
1051 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
1057 /* returns digest size in bytes per hash algo */
1058 static int qat_hash_get_digest_size(enum icp_qat_hw_auth_algo qat_hash_alg)
1060 switch (qat_hash_alg) {
1061 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1062 return ICP_QAT_HW_SHA1_STATE1_SZ;
1063 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1064 return ICP_QAT_HW_SHA224_STATE1_SZ;
1065 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1066 return ICP_QAT_HW_SHA256_STATE1_SZ;
1067 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1068 return ICP_QAT_HW_SHA384_STATE1_SZ;
1069 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1070 return ICP_QAT_HW_SHA512_STATE1_SZ;
1071 case ICP_QAT_HW_AUTH_ALGO_MD5:
1072 return ICP_QAT_HW_MD5_STATE1_SZ;
1073 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1074 return ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1075 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
1076 /* return maximum digest size in this case */
1077 return ICP_QAT_HW_SHA512_STATE1_SZ;
1079 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
1085 /* returns block size in byes per hash algo */
1086 static int qat_hash_get_block_size(enum icp_qat_hw_auth_algo qat_hash_alg)
1088 switch (qat_hash_alg) {
1089 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1091 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1092 return SHA256_CBLOCK;
1093 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1094 return SHA256_CBLOCK;
1095 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1096 return SHA512_CBLOCK;
1097 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1098 return SHA512_CBLOCK;
1099 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1101 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1102 return ICP_QAT_HW_AES_BLK_SZ;
1103 case ICP_QAT_HW_AUTH_ALGO_MD5:
1105 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
1106 /* return maximum block size in this case */
1107 return SHA512_CBLOCK;
1109 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
1115 static int partial_hash_sha1(uint8_t *data_in, uint8_t *data_out)
1119 if (!SHA1_Init(&ctx))
1121 SHA1_Transform(&ctx, data_in);
1122 rte_memcpy(data_out, &ctx, SHA_DIGEST_LENGTH);
1126 static int partial_hash_sha224(uint8_t *data_in, uint8_t *data_out)
1130 if (!SHA224_Init(&ctx))
1132 SHA256_Transform(&ctx, data_in);
1133 rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
1137 static int partial_hash_sha256(uint8_t *data_in, uint8_t *data_out)
1141 if (!SHA256_Init(&ctx))
1143 SHA256_Transform(&ctx, data_in);
1144 rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
1148 static int partial_hash_sha384(uint8_t *data_in, uint8_t *data_out)
1152 if (!SHA384_Init(&ctx))
1154 SHA512_Transform(&ctx, data_in);
1155 rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
1159 static int partial_hash_sha512(uint8_t *data_in, uint8_t *data_out)
1163 if (!SHA512_Init(&ctx))
1165 SHA512_Transform(&ctx, data_in);
1166 rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
1170 static int partial_hash_md5(uint8_t *data_in, uint8_t *data_out)
1174 if (!MD5_Init(&ctx))
1176 MD5_Transform(&ctx, data_in);
1177 rte_memcpy(data_out, &ctx, MD5_DIGEST_LENGTH);
1182 static int partial_hash_compute(enum icp_qat_hw_auth_algo hash_alg,
1187 uint8_t digest[qat_hash_get_digest_size(
1188 ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1189 uint32_t *hash_state_out_be32;
1190 uint64_t *hash_state_out_be64;
1193 digest_size = qat_hash_get_digest_size(hash_alg);
1194 if (digest_size <= 0)
1197 hash_state_out_be32 = (uint32_t *)data_out;
1198 hash_state_out_be64 = (uint64_t *)data_out;
1201 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1202 if (partial_hash_sha1(data_in, digest))
1204 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1205 *hash_state_out_be32 =
1206 rte_bswap32(*(((uint32_t *)digest)+i));
1208 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1209 if (partial_hash_sha224(data_in, digest))
1211 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1212 *hash_state_out_be32 =
1213 rte_bswap32(*(((uint32_t *)digest)+i));
1215 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1216 if (partial_hash_sha256(data_in, digest))
1218 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1219 *hash_state_out_be32 =
1220 rte_bswap32(*(((uint32_t *)digest)+i));
1222 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1223 if (partial_hash_sha384(data_in, digest))
1225 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
1226 *hash_state_out_be64 =
1227 rte_bswap64(*(((uint64_t *)digest)+i));
1229 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1230 if (partial_hash_sha512(data_in, digest))
1232 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
1233 *hash_state_out_be64 =
1234 rte_bswap64(*(((uint64_t *)digest)+i));
1236 case ICP_QAT_HW_AUTH_ALGO_MD5:
1237 if (partial_hash_md5(data_in, data_out))
1241 QAT_LOG(ERR, "invalid hash alg %u", hash_alg);
1247 #define HMAC_IPAD_VALUE 0x36
1248 #define HMAC_OPAD_VALUE 0x5c
1249 #define HASH_XCBC_PRECOMP_KEY_NUM 3
1251 static const uint8_t AES_CMAC_SEED[ICP_QAT_HW_AES_128_KEY_SZ];
1253 static void aes_cmac_key_derive(uint8_t *base, uint8_t *derived)
1257 derived[0] = base[0] << 1;
1258 for (i = 1; i < ICP_QAT_HW_AES_BLK_SZ ; i++) {
1259 derived[i] = base[i] << 1;
1260 derived[i - 1] |= base[i] >> 7;
1264 derived[ICP_QAT_HW_AES_BLK_SZ - 1] ^= QAT_AES_CMAC_CONST_RB;
1267 static int qat_sym_do_precomputes(enum icp_qat_hw_auth_algo hash_alg,
1268 const uint8_t *auth_key,
1269 uint16_t auth_keylen,
1270 uint8_t *p_state_buf,
1271 uint16_t *p_state_len,
1275 uint8_t ipad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1276 uint8_t opad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1279 if (hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC) {
1285 uint8_t k0[ICP_QAT_HW_AES_128_KEY_SZ];
1288 auth_keylen = ICP_QAT_HW_AES_128_KEY_SZ;
1290 in = rte_zmalloc("AES CMAC K1",
1291 ICP_QAT_HW_AES_128_KEY_SZ, 16);
1294 QAT_LOG(ERR, "Failed to alloc memory");
1298 rte_memcpy(in, AES_CMAC_SEED,
1299 ICP_QAT_HW_AES_128_KEY_SZ);
1300 rte_memcpy(p_state_buf, auth_key, auth_keylen);
1302 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1308 AES_encrypt(in, k0, &enc_key);
1310 k1 = p_state_buf + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1311 k2 = k1 + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1313 aes_cmac_key_derive(k0, k1);
1314 aes_cmac_key_derive(k1, k2);
1316 memset(k0, 0, ICP_QAT_HW_AES_128_KEY_SZ);
1317 *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1321 static uint8_t qat_aes_xcbc_key_seed[
1322 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ] = {
1323 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1324 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1325 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1326 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1327 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1328 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1332 uint8_t *out = p_state_buf;
1336 in = rte_zmalloc("working mem for key",
1337 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ, 16);
1339 QAT_LOG(ERR, "Failed to alloc memory");
1343 rte_memcpy(in, qat_aes_xcbc_key_seed,
1344 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1345 for (x = 0; x < HASH_XCBC_PRECOMP_KEY_NUM; x++) {
1346 if (AES_set_encrypt_key(auth_key,
1350 (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ));
1352 (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ),
1353 0, ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1356 AES_encrypt(in, out, &enc_key);
1357 in += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1358 out += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1360 *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1361 rte_free(in - x*ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ);
1365 } else if ((hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_128) ||
1366 (hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_64)) {
1368 uint8_t *out = p_state_buf;
1371 memset(p_state_buf, 0, ICP_QAT_HW_GALOIS_H_SZ +
1372 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1373 ICP_QAT_HW_GALOIS_E_CTR0_SZ);
1374 in = rte_zmalloc("working mem for key",
1375 ICP_QAT_HW_GALOIS_H_SZ, 16);
1377 QAT_LOG(ERR, "Failed to alloc memory");
1381 memset(in, 0, ICP_QAT_HW_GALOIS_H_SZ);
1382 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1386 AES_encrypt(in, out, &enc_key);
1387 *p_state_len = ICP_QAT_HW_GALOIS_H_SZ +
1388 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1389 ICP_QAT_HW_GALOIS_E_CTR0_SZ;
1394 block_size = qat_hash_get_block_size(hash_alg);
1397 /* init ipad and opad from key and xor with fixed values */
1398 memset(ipad, 0, block_size);
1399 memset(opad, 0, block_size);
1401 if (auth_keylen > (unsigned int)block_size) {
1402 QAT_LOG(ERR, "invalid keylen %u", auth_keylen);
1405 rte_memcpy(ipad, auth_key, auth_keylen);
1406 rte_memcpy(opad, auth_key, auth_keylen);
1408 for (i = 0; i < block_size; i++) {
1409 uint8_t *ipad_ptr = ipad + i;
1410 uint8_t *opad_ptr = opad + i;
1411 *ipad_ptr ^= HMAC_IPAD_VALUE;
1412 *opad_ptr ^= HMAC_OPAD_VALUE;
1415 /* do partial hash of ipad and copy to state1 */
1416 if (partial_hash_compute(hash_alg, ipad, p_state_buf)) {
1417 memset(ipad, 0, block_size);
1418 memset(opad, 0, block_size);
1419 QAT_LOG(ERR, "ipad precompute failed");
1424 * State len is a multiple of 8, so may be larger than the digest.
1425 * Put the partial hash of opad state_len bytes after state1
1427 *p_state_len = qat_hash_get_state1_size(hash_alg);
1428 if (partial_hash_compute(hash_alg, opad, p_state_buf + *p_state_len)) {
1429 memset(ipad, 0, block_size);
1430 memset(opad, 0, block_size);
1431 QAT_LOG(ERR, "opad precompute failed");
1435 /* don't leave data lying around */
1436 memset(ipad, 0, block_size);
1437 memset(opad, 0, block_size);
1442 qat_sym_session_init_common_hdr(struct icp_qat_fw_comn_req_hdr *header,
1443 enum qat_sym_proto_flag proto_flags)
1446 ICP_QAT_FW_COMN_HDR_FLAGS_BUILD(ICP_QAT_FW_COMN_REQ_FLAG_SET);
1447 header->service_type = ICP_QAT_FW_COMN_REQ_CPM_FW_LA;
1448 header->comn_req_flags =
1449 ICP_QAT_FW_COMN_FLAGS_BUILD(QAT_COMN_CD_FLD_TYPE_64BIT_ADR,
1450 QAT_COMN_PTR_TYPE_FLAT);
1451 ICP_QAT_FW_LA_PARTIAL_SET(header->serv_specif_flags,
1452 ICP_QAT_FW_LA_PARTIAL_NONE);
1453 ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(header->serv_specif_flags,
1454 ICP_QAT_FW_CIPH_IV_16BYTE_DATA);
1456 switch (proto_flags) {
1457 case QAT_CRYPTO_PROTO_FLAG_NONE:
1458 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1459 ICP_QAT_FW_LA_NO_PROTO);
1461 case QAT_CRYPTO_PROTO_FLAG_CCM:
1462 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1463 ICP_QAT_FW_LA_CCM_PROTO);
1465 case QAT_CRYPTO_PROTO_FLAG_GCM:
1466 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1467 ICP_QAT_FW_LA_GCM_PROTO);
1469 case QAT_CRYPTO_PROTO_FLAG_SNOW3G:
1470 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1471 ICP_QAT_FW_LA_SNOW_3G_PROTO);
1473 case QAT_CRYPTO_PROTO_FLAG_ZUC:
1474 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(header->serv_specif_flags,
1475 ICP_QAT_FW_LA_ZUC_3G_PROTO);
1479 ICP_QAT_FW_LA_UPDATE_STATE_SET(header->serv_specif_flags,
1480 ICP_QAT_FW_LA_NO_UPDATE_STATE);
1481 ICP_QAT_FW_LA_DIGEST_IN_BUFFER_SET(header->serv_specif_flags,
1482 ICP_QAT_FW_LA_NO_DIGEST_IN_BUFFER);
1486 * Snow3G and ZUC should never use this function
1487 * and set its protocol flag in both cipher and auth part of content
1488 * descriptor building function
1490 static enum qat_sym_proto_flag
1491 qat_get_crypto_proto_flag(uint16_t flags)
1493 int proto = ICP_QAT_FW_LA_PROTO_GET(flags);
1494 enum qat_sym_proto_flag qat_proto_flag =
1495 QAT_CRYPTO_PROTO_FLAG_NONE;
1498 case ICP_QAT_FW_LA_GCM_PROTO:
1499 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1501 case ICP_QAT_FW_LA_CCM_PROTO:
1502 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1506 return qat_proto_flag;
1509 int qat_sym_session_aead_create_cd_cipher(struct qat_sym_session *cdesc,
1510 const uint8_t *cipherkey,
1511 uint32_t cipherkeylen)
1513 struct icp_qat_hw_cipher_algo_blk *cipher;
1514 struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1515 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1516 struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1517 void *ptr = &req_tmpl->cd_ctrl;
1518 struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1519 struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1520 enum icp_qat_hw_cipher_convert key_convert;
1521 enum qat_sym_proto_flag qat_proto_flag =
1522 QAT_CRYPTO_PROTO_FLAG_NONE;
1523 uint32_t total_key_size;
1524 uint16_t cipher_offset, cd_size;
1525 uint32_t wordIndex = 0;
1526 uint32_t *temp_key = NULL;
1528 if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER) {
1529 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1530 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1531 ICP_QAT_FW_SLICE_CIPHER);
1532 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1533 ICP_QAT_FW_SLICE_DRAM_WR);
1534 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1535 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1536 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1537 ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1538 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1539 } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1540 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1541 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1542 ICP_QAT_FW_SLICE_CIPHER);
1543 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1544 ICP_QAT_FW_SLICE_AUTH);
1545 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1546 ICP_QAT_FW_SLICE_AUTH);
1547 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1548 ICP_QAT_FW_SLICE_DRAM_WR);
1549 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1550 } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1551 QAT_LOG(ERR, "Invalid param, must be a cipher command.");
1555 if (cdesc->qat_mode == ICP_QAT_HW_CIPHER_CTR_MODE) {
1557 * CTR Streaming ciphers are a special case. Decrypt = encrypt
1558 * Overriding default values previously set
1560 cdesc->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
1561 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1562 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2
1563 || cdesc->qat_cipher_alg ==
1564 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)
1565 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1566 else if (cdesc->qat_dir == ICP_QAT_HW_CIPHER_ENCRYPT)
1567 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1569 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1571 if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
1572 total_key_size = ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ +
1573 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1574 cipher_cd_ctrl->cipher_state_sz =
1575 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1576 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1578 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1579 total_key_size = ICP_QAT_HW_KASUMI_F8_KEY_SZ;
1580 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_KASUMI_BLK_SZ >> 3;
1581 cipher_cd_ctrl->cipher_padding_sz =
1582 (2 * ICP_QAT_HW_KASUMI_BLK_SZ) >> 3;
1583 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES) {
1584 total_key_size = ICP_QAT_HW_3DES_KEY_SZ;
1585 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_3DES_BLK_SZ >> 3;
1587 qat_get_crypto_proto_flag(header->serv_specif_flags);
1588 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_DES) {
1589 total_key_size = ICP_QAT_HW_DES_KEY_SZ;
1590 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_DES_BLK_SZ >> 3;
1592 qat_get_crypto_proto_flag(header->serv_specif_flags);
1593 } else if (cdesc->qat_cipher_alg ==
1594 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
1595 total_key_size = ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ +
1596 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1597 cipher_cd_ctrl->cipher_state_sz =
1598 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1599 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1600 cdesc->min_qat_dev_gen = QAT_GEN2;
1602 total_key_size = cipherkeylen;
1603 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_AES_BLK_SZ >> 3;
1605 qat_get_crypto_proto_flag(header->serv_specif_flags);
1607 cipher_cd_ctrl->cipher_key_sz = total_key_size >> 3;
1608 cipher_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1609 cipher_cd_ctrl->cipher_cfg_offset = cipher_offset >> 3;
1611 header->service_cmd_id = cdesc->qat_cmd;
1612 qat_sym_session_init_common_hdr(header, qat_proto_flag);
1614 cipher = (struct icp_qat_hw_cipher_algo_blk *)cdesc->cd_cur_ptr;
1615 cipher->cipher_config.val =
1616 ICP_QAT_HW_CIPHER_CONFIG_BUILD(cdesc->qat_mode,
1617 cdesc->qat_cipher_alg, key_convert,
1620 if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1621 temp_key = (uint32_t *)(cdesc->cd_cur_ptr +
1622 sizeof(struct icp_qat_hw_cipher_config)
1624 memcpy(cipher->key, cipherkey, cipherkeylen);
1625 memcpy(temp_key, cipherkey, cipherkeylen);
1627 /* XOR Key with KASUMI F8 key modifier at 4 bytes level */
1628 for (wordIndex = 0; wordIndex < (cipherkeylen >> 2);
1630 temp_key[wordIndex] ^= KASUMI_F8_KEY_MODIFIER_4_BYTES;
1632 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1633 cipherkeylen + cipherkeylen;
1635 memcpy(cipher->key, cipherkey, cipherkeylen);
1636 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1640 if (total_key_size > cipherkeylen) {
1641 uint32_t padding_size = total_key_size-cipherkeylen;
1642 if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1643 && (cipherkeylen == QAT_3DES_KEY_SZ_OPT2)) {
1644 /* K3 not provided so use K1 = K3*/
1645 memcpy(cdesc->cd_cur_ptr, cipherkey, padding_size);
1646 } else if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1647 && (cipherkeylen == QAT_3DES_KEY_SZ_OPT3)) {
1648 /* K2 and K3 not provided so use K1 = K2 = K3*/
1649 memcpy(cdesc->cd_cur_ptr, cipherkey,
1651 memcpy(cdesc->cd_cur_ptr+cipherkeylen,
1652 cipherkey, cipherkeylen);
1654 memset(cdesc->cd_cur_ptr, 0, padding_size);
1656 cdesc->cd_cur_ptr += padding_size;
1658 cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1659 cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1664 int qat_sym_session_aead_create_cd_auth(struct qat_sym_session *cdesc,
1665 const uint8_t *authkey,
1666 uint32_t authkeylen,
1667 uint32_t aad_length,
1668 uint32_t digestsize,
1669 unsigned int operation)
1671 struct icp_qat_hw_auth_setup *hash;
1672 struct icp_qat_hw_cipher_algo_blk *cipherconfig;
1673 struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1674 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1675 struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1676 void *ptr = &req_tmpl->cd_ctrl;
1677 struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1678 struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1679 struct icp_qat_fw_la_auth_req_params *auth_param =
1680 (struct icp_qat_fw_la_auth_req_params *)
1681 ((char *)&req_tmpl->serv_specif_rqpars +
1682 ICP_QAT_FW_HASH_REQUEST_PARAMETERS_OFFSET);
1683 uint16_t state1_size = 0, state2_size = 0, cd_extra_size = 0;
1684 uint16_t hash_offset, cd_size;
1685 uint32_t *aad_len = NULL;
1686 uint32_t wordIndex = 0;
1688 enum qat_sym_proto_flag qat_proto_flag =
1689 QAT_CRYPTO_PROTO_FLAG_NONE;
1691 if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_AUTH) {
1692 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1693 ICP_QAT_FW_SLICE_AUTH);
1694 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1695 ICP_QAT_FW_SLICE_DRAM_WR);
1696 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1697 } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1698 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1699 ICP_QAT_FW_SLICE_AUTH);
1700 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1701 ICP_QAT_FW_SLICE_CIPHER);
1702 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1703 ICP_QAT_FW_SLICE_CIPHER);
1704 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1705 ICP_QAT_FW_SLICE_DRAM_WR);
1706 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1707 } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1708 QAT_LOG(ERR, "Invalid param, must be a hash command.");
1712 if (operation == RTE_CRYPTO_AUTH_OP_VERIFY) {
1713 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1714 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1715 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1716 ICP_QAT_FW_LA_CMP_AUTH_RES);
1717 cdesc->auth_op = ICP_QAT_HW_AUTH_VERIFY;
1719 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1720 ICP_QAT_FW_LA_RET_AUTH_RES);
1721 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1722 ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1723 cdesc->auth_op = ICP_QAT_HW_AUTH_GENERATE;
1727 * Setup the inner hash config
1729 hash_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1730 hash = (struct icp_qat_hw_auth_setup *)cdesc->cd_cur_ptr;
1731 hash->auth_config.reserved = 0;
1732 hash->auth_config.config =
1733 ICP_QAT_HW_AUTH_CONFIG_BUILD(cdesc->auth_mode,
1734 cdesc->qat_hash_alg, digestsize);
1736 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0
1737 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2
1738 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_KASUMI_F9
1739 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3
1740 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC
1741 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC
1742 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_NULL
1744 hash->auth_counter.counter = 0;
1746 int block_size = qat_hash_get_block_size(cdesc->qat_hash_alg);
1750 hash->auth_counter.counter = rte_bswap32(block_size);
1753 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_auth_setup);
1756 * cd_cur_ptr now points at the state1 information.
1758 switch (cdesc->qat_hash_alg) {
1759 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1760 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0) {
1762 rte_memcpy(cdesc->cd_cur_ptr, sha1InitialState,
1763 sizeof(sha1InitialState));
1764 state1_size = qat_hash_get_state1_size(
1765 cdesc->qat_hash_alg);
1769 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA1, authkey,
1770 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1772 QAT_LOG(ERR, "(SHA)precompute failed");
1775 state2_size = RTE_ALIGN_CEIL(ICP_QAT_HW_SHA1_STATE2_SZ, 8);
1777 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1778 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0) {
1780 rte_memcpy(cdesc->cd_cur_ptr, sha224InitialState,
1781 sizeof(sha224InitialState));
1782 state1_size = qat_hash_get_state1_size(
1783 cdesc->qat_hash_alg);
1787 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA224, authkey,
1788 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1790 QAT_LOG(ERR, "(SHA)precompute failed");
1793 state2_size = ICP_QAT_HW_SHA224_STATE2_SZ;
1795 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1796 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0) {
1798 rte_memcpy(cdesc->cd_cur_ptr, sha256InitialState,
1799 sizeof(sha256InitialState));
1800 state1_size = qat_hash_get_state1_size(
1801 cdesc->qat_hash_alg);
1805 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA256, authkey,
1806 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1808 QAT_LOG(ERR, "(SHA)precompute failed");
1811 state2_size = ICP_QAT_HW_SHA256_STATE2_SZ;
1813 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1814 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0) {
1816 rte_memcpy(cdesc->cd_cur_ptr, sha384InitialState,
1817 sizeof(sha384InitialState));
1818 state1_size = qat_hash_get_state1_size(
1819 cdesc->qat_hash_alg);
1823 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA384, authkey,
1824 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1826 QAT_LOG(ERR, "(SHA)precompute failed");
1829 state2_size = ICP_QAT_HW_SHA384_STATE2_SZ;
1831 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1832 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0) {
1834 rte_memcpy(cdesc->cd_cur_ptr, sha512InitialState,
1835 sizeof(sha512InitialState));
1836 state1_size = qat_hash_get_state1_size(
1837 cdesc->qat_hash_alg);
1841 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA512, authkey,
1842 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1844 QAT_LOG(ERR, "(SHA)precompute failed");
1847 state2_size = ICP_QAT_HW_SHA512_STATE2_SZ;
1849 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1850 state1_size = ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1852 if (cdesc->aes_cmac)
1853 memset(cdesc->cd_cur_ptr, 0, state1_size);
1854 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC,
1855 authkey, authkeylen, cdesc->cd_cur_ptr + state1_size,
1856 &state2_size, cdesc->aes_cmac)) {
1857 cdesc->aes_cmac ? QAT_LOG(ERR,
1858 "(CMAC)precompute failed")
1860 "(XCBC)precompute failed");
1864 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1865 case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
1866 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1867 state1_size = ICP_QAT_HW_GALOIS_128_STATE1_SZ;
1868 if (qat_sym_do_precomputes(cdesc->qat_hash_alg, authkey,
1869 authkeylen, cdesc->cd_cur_ptr + state1_size,
1870 &state2_size, cdesc->aes_cmac)) {
1871 QAT_LOG(ERR, "(GCM)precompute failed");
1875 * Write (the length of AAD) into bytes 16-19 of state2
1876 * in big-endian format. This field is 8 bytes
1878 auth_param->u2.aad_sz =
1879 RTE_ALIGN_CEIL(aad_length, 16);
1880 auth_param->hash_state_sz = (auth_param->u2.aad_sz) >> 3;
1882 aad_len = (uint32_t *)(cdesc->cd_cur_ptr +
1883 ICP_QAT_HW_GALOIS_128_STATE1_SZ +
1884 ICP_QAT_HW_GALOIS_H_SZ);
1885 *aad_len = rte_bswap32(aad_length);
1886 cdesc->aad_len = aad_length;
1888 case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
1889 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1890 state1_size = qat_hash_get_state1_size(
1891 ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2);
1892 state2_size = ICP_QAT_HW_SNOW_3G_UIA2_STATE2_SZ;
1893 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1895 cipherconfig = (struct icp_qat_hw_cipher_algo_blk *)
1896 (cdesc->cd_cur_ptr + state1_size + state2_size);
1897 cipherconfig->cipher_config.val =
1898 ICP_QAT_HW_CIPHER_CONFIG_BUILD(ICP_QAT_HW_CIPHER_ECB_MODE,
1899 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2,
1900 ICP_QAT_HW_CIPHER_KEY_CONVERT,
1901 ICP_QAT_HW_CIPHER_ENCRYPT);
1902 memcpy(cipherconfig->key, authkey, authkeylen);
1903 memset(cipherconfig->key + authkeylen,
1904 0, ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ);
1905 cd_extra_size += sizeof(struct icp_qat_hw_cipher_config) +
1906 authkeylen + ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1907 auth_param->hash_state_sz = ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1909 case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
1910 hash->auth_config.config =
1911 ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE0,
1912 cdesc->qat_hash_alg, digestsize);
1913 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1914 state1_size = qat_hash_get_state1_size(
1915 ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3);
1916 state2_size = ICP_QAT_HW_ZUC_3G_EIA3_STATE2_SZ;
1917 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size
1918 + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ);
1920 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1921 cd_extra_size += ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1922 auth_param->hash_state_sz = ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1923 cdesc->min_qat_dev_gen = QAT_GEN2;
1926 case ICP_QAT_HW_AUTH_ALGO_MD5:
1927 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_MD5, authkey,
1928 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1930 QAT_LOG(ERR, "(MD5)precompute failed");
1933 state2_size = ICP_QAT_HW_MD5_STATE2_SZ;
1935 case ICP_QAT_HW_AUTH_ALGO_NULL:
1936 state1_size = qat_hash_get_state1_size(
1937 ICP_QAT_HW_AUTH_ALGO_NULL);
1938 state2_size = ICP_QAT_HW_NULL_STATE2_SZ;
1940 case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
1941 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1942 state1_size = qat_hash_get_state1_size(
1943 ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC);
1944 state2_size = ICP_QAT_HW_AES_CBC_MAC_KEY_SZ +
1945 ICP_QAT_HW_AES_CCM_CBC_E_CTR0_SZ;
1947 if (aad_length > 0) {
1948 aad_length += ICP_QAT_HW_CCM_AAD_B0_LEN +
1949 ICP_QAT_HW_CCM_AAD_LEN_INFO;
1950 auth_param->u2.aad_sz =
1951 RTE_ALIGN_CEIL(aad_length,
1952 ICP_QAT_HW_CCM_AAD_ALIGNMENT);
1954 auth_param->u2.aad_sz = ICP_QAT_HW_CCM_AAD_B0_LEN;
1956 cdesc->aad_len = aad_length;
1957 hash->auth_counter.counter = 0;
1959 hash_cd_ctrl->outer_prefix_sz = digestsize;
1960 auth_param->hash_state_sz = digestsize;
1962 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1964 case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
1965 state1_size = qat_hash_get_state1_size(
1966 ICP_QAT_HW_AUTH_ALGO_KASUMI_F9);
1967 state2_size = ICP_QAT_HW_KASUMI_F9_STATE2_SZ;
1968 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1969 pTempKey = (uint32_t *)(cdesc->cd_cur_ptr + state1_size
1972 * The Inner Hash Initial State2 block must contain IK
1973 * (Initialisation Key), followed by IK XOR-ed with KM
1974 * (Key Modifier): IK||(IK^KM).
1976 /* write the auth key */
1977 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1978 /* initialise temp key with auth key */
1979 memcpy(pTempKey, authkey, authkeylen);
1980 /* XOR Key with KASUMI F9 key modifier at 4 bytes level */
1981 for (wordIndex = 0; wordIndex < (authkeylen >> 2); wordIndex++)
1982 pTempKey[wordIndex] ^= KASUMI_F9_KEY_MODIFIER_4_BYTES;
1985 QAT_LOG(ERR, "Invalid HASH alg %u", cdesc->qat_hash_alg);
1989 /* Request template setup */
1990 qat_sym_session_init_common_hdr(header, qat_proto_flag);
1991 header->service_cmd_id = cdesc->qat_cmd;
1993 /* Auth CD config setup */
1994 hash_cd_ctrl->hash_cfg_offset = hash_offset >> 3;
1995 hash_cd_ctrl->hash_flags = ICP_QAT_FW_AUTH_HDR_FLAG_NO_NESTED;
1996 hash_cd_ctrl->inner_res_sz = digestsize;
1997 hash_cd_ctrl->final_sz = digestsize;
1998 hash_cd_ctrl->inner_state1_sz = state1_size;
1999 auth_param->auth_res_sz = digestsize;
2001 hash_cd_ctrl->inner_state2_sz = state2_size;
2002 hash_cd_ctrl->inner_state2_offset = hash_cd_ctrl->hash_cfg_offset +
2003 ((sizeof(struct icp_qat_hw_auth_setup) +
2004 RTE_ALIGN_CEIL(hash_cd_ctrl->inner_state1_sz, 8))
2007 cdesc->cd_cur_ptr += state1_size + state2_size + cd_extra_size;
2008 cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
2010 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
2011 cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
2016 int qat_sym_validate_aes_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2019 case ICP_QAT_HW_AES_128_KEY_SZ:
2020 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
2022 case ICP_QAT_HW_AES_192_KEY_SZ:
2023 *alg = ICP_QAT_HW_CIPHER_ALGO_AES192;
2025 case ICP_QAT_HW_AES_256_KEY_SZ:
2026 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
2034 int qat_sym_validate_aes_docsisbpi_key(int key_len,
2035 enum icp_qat_hw_cipher_algo *alg)
2038 case ICP_QAT_HW_AES_128_KEY_SZ:
2039 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
2041 case ICP_QAT_HW_AES_256_KEY_SZ:
2042 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
2050 int qat_sym_validate_snow3g_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2053 case ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ:
2054 *alg = ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2;
2062 int qat_sym_validate_kasumi_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2065 case ICP_QAT_HW_KASUMI_KEY_SZ:
2066 *alg = ICP_QAT_HW_CIPHER_ALGO_KASUMI;
2074 int qat_sym_validate_des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2077 case ICP_QAT_HW_DES_KEY_SZ:
2078 *alg = ICP_QAT_HW_CIPHER_ALGO_DES;
2086 int qat_sym_validate_3des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2089 case QAT_3DES_KEY_SZ_OPT1:
2090 case QAT_3DES_KEY_SZ_OPT2:
2091 case QAT_3DES_KEY_SZ_OPT3:
2092 *alg = ICP_QAT_HW_CIPHER_ALGO_3DES;
2100 int qat_sym_validate_zuc_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2103 case ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ:
2104 *alg = ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3;
2112 #ifdef RTE_LIBRTE_SECURITY
2114 qat_sec_session_check_docsis(struct rte_security_session_conf *conf)
2116 struct rte_crypto_sym_xform *crypto_sym = conf->crypto_xform;
2117 struct rte_security_docsis_xform *docsis = &conf->docsis;
2119 /* CRC generate -> Cipher encrypt */
2120 if (docsis->direction == RTE_SECURITY_DOCSIS_DOWNLINK) {
2122 if (crypto_sym != NULL &&
2123 crypto_sym->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
2124 crypto_sym->cipher.op == RTE_CRYPTO_CIPHER_OP_ENCRYPT &&
2125 crypto_sym->cipher.algo ==
2126 RTE_CRYPTO_CIPHER_AES_DOCSISBPI &&
2127 (crypto_sym->cipher.key.length ==
2128 ICP_QAT_HW_AES_128_KEY_SZ ||
2129 crypto_sym->cipher.key.length ==
2130 ICP_QAT_HW_AES_256_KEY_SZ) &&
2131 crypto_sym->cipher.iv.length == ICP_QAT_HW_AES_BLK_SZ &&
2132 crypto_sym->next == NULL) {
2135 /* Cipher decrypt -> CRC verify */
2136 } else if (docsis->direction == RTE_SECURITY_DOCSIS_UPLINK) {
2138 if (crypto_sym != NULL &&
2139 crypto_sym->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
2140 crypto_sym->cipher.op == RTE_CRYPTO_CIPHER_OP_DECRYPT &&
2141 crypto_sym->cipher.algo ==
2142 RTE_CRYPTO_CIPHER_AES_DOCSISBPI &&
2143 (crypto_sym->cipher.key.length ==
2144 ICP_QAT_HW_AES_128_KEY_SZ ||
2145 crypto_sym->cipher.key.length ==
2146 ICP_QAT_HW_AES_256_KEY_SZ) &&
2147 crypto_sym->cipher.iv.length == ICP_QAT_HW_AES_BLK_SZ &&
2148 crypto_sym->next == NULL) {
2157 qat_sec_session_set_docsis_parameters(struct rte_cryptodev *dev,
2158 struct rte_security_session_conf *conf, void *session_private)
2162 struct rte_crypto_sym_xform *xform = NULL;
2163 struct qat_sym_session *session = session_private;
2165 ret = qat_sec_session_check_docsis(conf);
2167 QAT_LOG(ERR, "Unsupported DOCSIS security configuration");
2171 xform = conf->crypto_xform;
2173 /* Verify the session physical address is known */
2174 rte_iova_t session_paddr = rte_mempool_virt2iova(session);
2175 if (session_paddr == 0 || session_paddr == RTE_BAD_IOVA) {
2177 "Session physical address unknown. Bad memory pool.");
2181 /* Set context descriptor physical address */
2182 session->cd_paddr = session_paddr +
2183 offsetof(struct qat_sym_session, cd);
2185 session->min_qat_dev_gen = QAT_GEN1;
2187 /* Get requested QAT command id */
2188 qat_cmd_id = qat_get_cmd_id(xform);
2189 if (qat_cmd_id < 0 || qat_cmd_id >= ICP_QAT_FW_LA_CMD_DELIMITER) {
2190 QAT_LOG(ERR, "Unsupported xform chain requested");
2193 session->qat_cmd = (enum icp_qat_fw_la_cmd_id)qat_cmd_id;
2194 switch (session->qat_cmd) {
2195 case ICP_QAT_FW_LA_CMD_CIPHER:
2196 ret = qat_sym_session_configure_cipher(dev, xform, session);
2201 QAT_LOG(ERR, "Unsupported Service %u", session->qat_cmd);
2209 qat_security_session_create(void *dev,
2210 struct rte_security_session_conf *conf,
2211 struct rte_security_session *sess,
2212 struct rte_mempool *mempool)
2214 void *sess_private_data;
2215 struct rte_cryptodev *cdev = (struct rte_cryptodev *)dev;
2218 if (rte_mempool_get(mempool, &sess_private_data)) {
2219 QAT_LOG(ERR, "Couldn't get object from session mempool");
2223 if (conf->protocol != RTE_SECURITY_PROTOCOL_DOCSIS) {
2224 QAT_LOG(ERR, "Invalid security protocol");
2228 ret = qat_sec_session_set_docsis_parameters(cdev, conf,
2231 QAT_LOG(ERR, "Failed to configure session parameters");
2232 /* Return session to mempool */
2233 rte_mempool_put(mempool, sess_private_data);
2237 set_sec_session_private_data(sess, sess_private_data);
2243 qat_security_session_destroy(void *dev __rte_unused,
2244 struct rte_security_session *sess)
2246 void *sess_priv = get_sec_session_private_data(sess);
2247 struct qat_sym_session *s = (struct qat_sym_session *)sess_priv;
2251 bpi_cipher_ctx_free(s->bpi_ctx);
2252 memset(s, 0, qat_sym_session_get_private_size(dev));
2253 struct rte_mempool *sess_mp = rte_mempool_from_obj(sess_priv);
2255 set_sec_session_private_data(sess, NULL);
2256 rte_mempool_put(sess_mp, sess_priv);