1 /* SPDX-License-Identifier: (BSD-3-Clause OR GPL-2.0)
2 * Copyright(c) 2015-2019 Intel Corporation
5 #include <openssl/sha.h> /* Needed to calculate pre-compute values */
6 #include <openssl/aes.h> /* Needed to calculate pre-compute values */
7 #include <openssl/md5.h> /* Needed to calculate pre-compute values */
8 #include <openssl/evp.h> /* Needed for bpi runt block processing */
10 #include <rte_memcpy.h>
11 #include <rte_common.h>
12 #include <rte_spinlock.h>
13 #include <rte_byteorder.h>
15 #include <rte_malloc.h>
16 #include <rte_crypto_sym.h>
17 #ifdef RTE_LIB_SECURITY
18 #include <rte_security.h>
22 #include "qat_sym_session.h"
23 #include "qat_sym_pmd.h"
25 /* SHA1 - 20 bytes - Initialiser state can be found in FIPS stds 180-2 */
26 static const uint8_t sha1InitialState[] = {
27 0x67, 0x45, 0x23, 0x01, 0xef, 0xcd, 0xab, 0x89, 0x98, 0xba,
28 0xdc, 0xfe, 0x10, 0x32, 0x54, 0x76, 0xc3, 0xd2, 0xe1, 0xf0};
30 /* SHA 224 - 32 bytes - Initialiser state can be found in FIPS stds 180-2 */
31 static const uint8_t sha224InitialState[] = {
32 0xc1, 0x05, 0x9e, 0xd8, 0x36, 0x7c, 0xd5, 0x07, 0x30, 0x70, 0xdd,
33 0x17, 0xf7, 0x0e, 0x59, 0x39, 0xff, 0xc0, 0x0b, 0x31, 0x68, 0x58,
34 0x15, 0x11, 0x64, 0xf9, 0x8f, 0xa7, 0xbe, 0xfa, 0x4f, 0xa4};
36 /* SHA 256 - 32 bytes - Initialiser state can be found in FIPS stds 180-2 */
37 static const uint8_t sha256InitialState[] = {
38 0x6a, 0x09, 0xe6, 0x67, 0xbb, 0x67, 0xae, 0x85, 0x3c, 0x6e, 0xf3,
39 0x72, 0xa5, 0x4f, 0xf5, 0x3a, 0x51, 0x0e, 0x52, 0x7f, 0x9b, 0x05,
40 0x68, 0x8c, 0x1f, 0x83, 0xd9, 0xab, 0x5b, 0xe0, 0xcd, 0x19};
42 /* SHA 384 - 64 bytes - Initialiser state can be found in FIPS stds 180-2 */
43 static const uint8_t sha384InitialState[] = {
44 0xcb, 0xbb, 0x9d, 0x5d, 0xc1, 0x05, 0x9e, 0xd8, 0x62, 0x9a, 0x29,
45 0x2a, 0x36, 0x7c, 0xd5, 0x07, 0x91, 0x59, 0x01, 0x5a, 0x30, 0x70,
46 0xdd, 0x17, 0x15, 0x2f, 0xec, 0xd8, 0xf7, 0x0e, 0x59, 0x39, 0x67,
47 0x33, 0x26, 0x67, 0xff, 0xc0, 0x0b, 0x31, 0x8e, 0xb4, 0x4a, 0x87,
48 0x68, 0x58, 0x15, 0x11, 0xdb, 0x0c, 0x2e, 0x0d, 0x64, 0xf9, 0x8f,
49 0xa7, 0x47, 0xb5, 0x48, 0x1d, 0xbe, 0xfa, 0x4f, 0xa4};
51 /* SHA 512 - 64 bytes - Initialiser state can be found in FIPS stds 180-2 */
52 static const uint8_t sha512InitialState[] = {
53 0x6a, 0x09, 0xe6, 0x67, 0xf3, 0xbc, 0xc9, 0x08, 0xbb, 0x67, 0xae,
54 0x85, 0x84, 0xca, 0xa7, 0x3b, 0x3c, 0x6e, 0xf3, 0x72, 0xfe, 0x94,
55 0xf8, 0x2b, 0xa5, 0x4f, 0xf5, 0x3a, 0x5f, 0x1d, 0x36, 0xf1, 0x51,
56 0x0e, 0x52, 0x7f, 0xad, 0xe6, 0x82, 0xd1, 0x9b, 0x05, 0x68, 0x8c,
57 0x2b, 0x3e, 0x6c, 0x1f, 0x1f, 0x83, 0xd9, 0xab, 0xfb, 0x41, 0xbd,
58 0x6b, 0x5b, 0xe0, 0xcd, 0x19, 0x13, 0x7e, 0x21, 0x79};
60 /** Frees a context previously created
61 * Depends on openssl libcrypto
64 bpi_cipher_ctx_free(void *bpi_ctx)
67 EVP_CIPHER_CTX_free((EVP_CIPHER_CTX *)bpi_ctx);
70 /** Creates a context in either AES or DES in ECB mode
71 * Depends on openssl libcrypto
74 bpi_cipher_ctx_init(enum rte_crypto_cipher_algorithm cryptodev_algo,
75 enum rte_crypto_cipher_operation direction __rte_unused,
76 const uint8_t *key, uint16_t key_length, void **ctx)
78 const EVP_CIPHER *algo = NULL;
80 *ctx = EVP_CIPHER_CTX_new();
87 if (cryptodev_algo == RTE_CRYPTO_CIPHER_DES_DOCSISBPI)
90 if (key_length == ICP_QAT_HW_AES_128_KEY_SZ)
91 algo = EVP_aes_128_ecb();
93 algo = EVP_aes_256_ecb();
95 /* IV will be ECB encrypted whether direction is encrypt or decrypt*/
96 if (EVP_EncryptInit_ex(*ctx, algo, NULL, key, 0) != 1) {
105 EVP_CIPHER_CTX_free(*ctx);
110 qat_is_cipher_alg_supported(enum rte_crypto_cipher_algorithm algo,
111 struct qat_sym_dev_private *internals)
114 const struct rte_cryptodev_capabilities *capability;
116 while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
117 RTE_CRYPTO_OP_TYPE_UNDEFINED) {
118 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
121 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_CIPHER)
124 if (capability->sym.cipher.algo == algo)
131 qat_is_auth_alg_supported(enum rte_crypto_auth_algorithm algo,
132 struct qat_sym_dev_private *internals)
135 const struct rte_cryptodev_capabilities *capability;
137 while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
138 RTE_CRYPTO_OP_TYPE_UNDEFINED) {
139 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
142 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_AUTH)
145 if (capability->sym.auth.algo == algo)
152 qat_sym_session_clear(struct rte_cryptodev *dev,
153 struct rte_cryptodev_sym_session *sess)
155 uint8_t index = dev->driver_id;
156 void *sess_priv = get_sym_session_private_data(sess, index);
157 struct qat_sym_session *s = (struct qat_sym_session *)sess_priv;
161 bpi_cipher_ctx_free(s->bpi_ctx);
162 memset(s, 0, qat_sym_session_get_private_size(dev));
163 struct rte_mempool *sess_mp = rte_mempool_from_obj(sess_priv);
165 set_sym_session_private_data(sess, index, NULL);
166 rte_mempool_put(sess_mp, sess_priv);
171 qat_get_cmd_id(const struct rte_crypto_sym_xform *xform)
174 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER && xform->next == NULL)
175 return ICP_QAT_FW_LA_CMD_CIPHER;
177 /* Authentication Only */
178 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH && xform->next == NULL)
179 return ICP_QAT_FW_LA_CMD_AUTH;
182 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
183 /* AES-GCM and AES-CCM works with different direction
184 * GCM first encrypts and generate hash where AES-CCM
185 * first generate hash and encrypts. Similar relation
186 * applies to decryption.
188 if (xform->aead.op == RTE_CRYPTO_AEAD_OP_ENCRYPT)
189 if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
190 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
192 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
194 if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
195 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
197 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
200 if (xform->next == NULL)
203 /* Cipher then Authenticate */
204 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
205 xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
206 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
208 /* Authenticate then Cipher */
209 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH &&
210 xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
211 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
216 static struct rte_crypto_auth_xform *
217 qat_get_auth_xform(struct rte_crypto_sym_xform *xform)
220 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH)
229 static struct rte_crypto_cipher_xform *
230 qat_get_cipher_xform(struct rte_crypto_sym_xform *xform)
233 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
234 return &xform->cipher;
243 qat_sym_session_configure_cipher(struct rte_cryptodev *dev,
244 struct rte_crypto_sym_xform *xform,
245 struct qat_sym_session *session)
247 struct qat_sym_dev_private *internals = dev->data->dev_private;
248 struct rte_crypto_cipher_xform *cipher_xform = NULL;
251 /* Get cipher xform from crypto xform chain */
252 cipher_xform = qat_get_cipher_xform(xform);
254 session->cipher_iv.offset = cipher_xform->iv.offset;
255 session->cipher_iv.length = cipher_xform->iv.length;
257 switch (cipher_xform->algo) {
258 case RTE_CRYPTO_CIPHER_AES_CBC:
259 if (qat_sym_validate_aes_key(cipher_xform->key.length,
260 &session->qat_cipher_alg) != 0) {
261 QAT_LOG(ERR, "Invalid AES cipher key size");
265 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
267 case RTE_CRYPTO_CIPHER_AES_CTR:
268 if (qat_sym_validate_aes_key(cipher_xform->key.length,
269 &session->qat_cipher_alg) != 0) {
270 QAT_LOG(ERR, "Invalid AES cipher key size");
274 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
276 case RTE_CRYPTO_CIPHER_SNOW3G_UEA2:
277 if (qat_sym_validate_snow3g_key(cipher_xform->key.length,
278 &session->qat_cipher_alg) != 0) {
279 QAT_LOG(ERR, "Invalid SNOW 3G cipher key size");
283 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
285 case RTE_CRYPTO_CIPHER_NULL:
286 session->qat_cipher_alg = ICP_QAT_HW_CIPHER_ALGO_NULL;
287 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
289 case RTE_CRYPTO_CIPHER_KASUMI_F8:
290 if (qat_sym_validate_kasumi_key(cipher_xform->key.length,
291 &session->qat_cipher_alg) != 0) {
292 QAT_LOG(ERR, "Invalid KASUMI cipher key size");
296 session->qat_mode = ICP_QAT_HW_CIPHER_F8_MODE;
298 case RTE_CRYPTO_CIPHER_3DES_CBC:
299 if (qat_sym_validate_3des_key(cipher_xform->key.length,
300 &session->qat_cipher_alg) != 0) {
301 QAT_LOG(ERR, "Invalid 3DES cipher key size");
305 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
307 case RTE_CRYPTO_CIPHER_DES_CBC:
308 if (qat_sym_validate_des_key(cipher_xform->key.length,
309 &session->qat_cipher_alg) != 0) {
310 QAT_LOG(ERR, "Invalid DES cipher key size");
314 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
316 case RTE_CRYPTO_CIPHER_3DES_CTR:
317 if (qat_sym_validate_3des_key(cipher_xform->key.length,
318 &session->qat_cipher_alg) != 0) {
319 QAT_LOG(ERR, "Invalid 3DES cipher key size");
323 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
325 case RTE_CRYPTO_CIPHER_DES_DOCSISBPI:
326 ret = bpi_cipher_ctx_init(
329 cipher_xform->key.data,
330 cipher_xform->key.length,
333 QAT_LOG(ERR, "failed to create DES BPI ctx");
336 if (qat_sym_validate_des_key(cipher_xform->key.length,
337 &session->qat_cipher_alg) != 0) {
338 QAT_LOG(ERR, "Invalid DES cipher key size");
342 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
344 case RTE_CRYPTO_CIPHER_AES_DOCSISBPI:
345 ret = bpi_cipher_ctx_init(
348 cipher_xform->key.data,
349 cipher_xform->key.length,
352 QAT_LOG(ERR, "failed to create AES BPI ctx");
355 if (qat_sym_validate_aes_docsisbpi_key(cipher_xform->key.length,
356 &session->qat_cipher_alg) != 0) {
357 QAT_LOG(ERR, "Invalid AES DOCSISBPI key size");
361 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
363 case RTE_CRYPTO_CIPHER_ZUC_EEA3:
364 if (!qat_is_cipher_alg_supported(
365 cipher_xform->algo, internals)) {
366 QAT_LOG(ERR, "%s not supported on this device",
367 rte_crypto_cipher_algorithm_strings
368 [cipher_xform->algo]);
372 if (qat_sym_validate_zuc_key(cipher_xform->key.length,
373 &session->qat_cipher_alg) != 0) {
374 QAT_LOG(ERR, "Invalid ZUC cipher key size");
378 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
380 case RTE_CRYPTO_CIPHER_AES_XTS:
381 if ((cipher_xform->key.length/2) == ICP_QAT_HW_AES_192_KEY_SZ) {
382 QAT_LOG(ERR, "AES-XTS-192 not supported");
386 if (qat_sym_validate_aes_key((cipher_xform->key.length/2),
387 &session->qat_cipher_alg) != 0) {
388 QAT_LOG(ERR, "Invalid AES-XTS cipher key size");
392 session->qat_mode = ICP_QAT_HW_CIPHER_XTS_MODE;
394 case RTE_CRYPTO_CIPHER_3DES_ECB:
395 case RTE_CRYPTO_CIPHER_AES_ECB:
396 case RTE_CRYPTO_CIPHER_AES_F8:
397 case RTE_CRYPTO_CIPHER_ARC4:
398 QAT_LOG(ERR, "Crypto QAT PMD: Unsupported Cipher alg %u",
403 QAT_LOG(ERR, "Crypto: Undefined Cipher specified %u\n",
409 if (cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
410 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
412 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
414 if (qat_sym_session_aead_create_cd_cipher(session,
415 cipher_xform->key.data,
416 cipher_xform->key.length)) {
424 if (session->bpi_ctx) {
425 bpi_cipher_ctx_free(session->bpi_ctx);
426 session->bpi_ctx = NULL;
432 qat_sym_session_configure(struct rte_cryptodev *dev,
433 struct rte_crypto_sym_xform *xform,
434 struct rte_cryptodev_sym_session *sess,
435 struct rte_mempool *mempool)
437 void *sess_private_data;
440 if (rte_mempool_get(mempool, &sess_private_data)) {
442 "Couldn't get object from session mempool");
446 ret = qat_sym_session_set_parameters(dev, xform, sess_private_data);
449 "Crypto QAT PMD: failed to configure session parameters");
451 /* Return session to mempool */
452 rte_mempool_put(mempool, sess_private_data);
456 set_sym_session_private_data(sess, dev->driver_id,
463 qat_sym_session_set_ext_hash_flags(struct qat_sym_session *session,
466 struct icp_qat_fw_comn_req_hdr *header = &session->fw_req.comn_hdr;
467 struct icp_qat_fw_cipher_auth_cd_ctrl_hdr *cd_ctrl =
468 (struct icp_qat_fw_cipher_auth_cd_ctrl_hdr *)
469 session->fw_req.cd_ctrl.content_desc_ctrl_lw;
471 /* Set the Use Extended Protocol Flags bit in LW 1 */
472 QAT_FIELD_SET(header->comn_req_flags,
473 QAT_COMN_EXT_FLAGS_USED,
474 QAT_COMN_EXT_FLAGS_BITPOS,
475 QAT_COMN_EXT_FLAGS_MASK);
477 /* Set Hash Flags in LW 28 */
478 cd_ctrl->hash_flags |= hash_flag;
480 /* Set proto flags in LW 1 */
481 switch (session->qat_cipher_alg) {
482 case ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2:
483 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
484 ICP_QAT_FW_LA_SNOW_3G_PROTO);
485 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
486 header->serv_specif_flags, 0);
488 case ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3:
489 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
490 ICP_QAT_FW_LA_NO_PROTO);
491 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
492 header->serv_specif_flags,
493 ICP_QAT_FW_LA_ZUC_3G_PROTO);
496 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
497 ICP_QAT_FW_LA_NO_PROTO);
498 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
499 header->serv_specif_flags, 0);
505 qat_sym_session_handle_mixed(const struct rte_cryptodev *dev,
506 struct qat_sym_session *session)
508 const struct qat_sym_dev_private *qat_private = dev->data->dev_private;
509 enum qat_device_gen min_dev_gen = (qat_private->internal_capabilities &
510 QAT_SYM_CAP_MIXED_CRYPTO) ? QAT_GEN2 : QAT_GEN3;
512 if (session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3 &&
513 session->qat_cipher_alg !=
514 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
515 session->min_qat_dev_gen = min_dev_gen;
516 qat_sym_session_set_ext_hash_flags(session,
517 1 << ICP_QAT_FW_AUTH_HDR_FLAG_ZUC_EIA3_BITPOS);
518 } else if (session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2 &&
519 session->qat_cipher_alg !=
520 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
521 session->min_qat_dev_gen = min_dev_gen;
522 qat_sym_session_set_ext_hash_flags(session,
523 1 << ICP_QAT_FW_AUTH_HDR_FLAG_SNOW3G_UIA2_BITPOS);
524 } else if ((session->aes_cmac ||
525 session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_NULL) &&
526 (session->qat_cipher_alg ==
527 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2 ||
528 session->qat_cipher_alg ==
529 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)) {
530 session->min_qat_dev_gen = min_dev_gen;
531 qat_sym_session_set_ext_hash_flags(session, 0);
536 qat_sym_session_set_parameters(struct rte_cryptodev *dev,
537 struct rte_crypto_sym_xform *xform, void *session_private)
539 struct qat_sym_session *session = session_private;
540 struct qat_sym_dev_private *internals = dev->data->dev_private;
541 enum qat_device_gen qat_dev_gen = internals->qat_dev->qat_dev_gen;
545 /* Verify the session physical address is known */
546 rte_iova_t session_paddr = rte_mempool_virt2iova(session);
547 if (session_paddr == 0 || session_paddr == RTE_BAD_IOVA) {
549 "Session physical address unknown. Bad memory pool.");
553 /* Set context descriptor physical address */
554 session->cd_paddr = session_paddr +
555 offsetof(struct qat_sym_session, cd);
557 session->min_qat_dev_gen = QAT_GEN1;
559 /* Get requested QAT command id */
560 qat_cmd_id = qat_get_cmd_id(xform);
561 if (qat_cmd_id < 0 || qat_cmd_id >= ICP_QAT_FW_LA_CMD_DELIMITER) {
562 QAT_LOG(ERR, "Unsupported xform chain requested");
565 session->qat_cmd = (enum icp_qat_fw_la_cmd_id)qat_cmd_id;
566 switch (session->qat_cmd) {
567 case ICP_QAT_FW_LA_CMD_CIPHER:
568 ret = qat_sym_session_configure_cipher(dev, xform, session);
572 case ICP_QAT_FW_LA_CMD_AUTH:
573 ret = qat_sym_session_configure_auth(dev, xform, session);
576 session->is_single_pass_gmac =
577 qat_dev_gen == QAT_GEN3 &&
578 xform->auth.algo == RTE_CRYPTO_AUTH_AES_GMAC &&
579 xform->auth.iv.length == QAT_AES_GCM_SPC_IV_SIZE;
581 case ICP_QAT_FW_LA_CMD_CIPHER_HASH:
582 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
583 ret = qat_sym_session_configure_aead(dev, xform,
588 ret = qat_sym_session_configure_cipher(dev,
592 ret = qat_sym_session_configure_auth(dev,
596 /* Special handling of mixed hash+cipher algorithms */
597 qat_sym_session_handle_mixed(dev, session);
600 case ICP_QAT_FW_LA_CMD_HASH_CIPHER:
601 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
602 ret = qat_sym_session_configure_aead(dev, xform,
607 ret = qat_sym_session_configure_auth(dev,
611 ret = qat_sym_session_configure_cipher(dev,
615 /* Special handling of mixed hash+cipher algorithms */
616 qat_sym_session_handle_mixed(dev, session);
619 case ICP_QAT_FW_LA_CMD_TRNG_GET_RANDOM:
620 case ICP_QAT_FW_LA_CMD_TRNG_TEST:
621 case ICP_QAT_FW_LA_CMD_SSL3_KEY_DERIVE:
622 case ICP_QAT_FW_LA_CMD_TLS_V1_1_KEY_DERIVE:
623 case ICP_QAT_FW_LA_CMD_TLS_V1_2_KEY_DERIVE:
624 case ICP_QAT_FW_LA_CMD_MGF1:
625 case ICP_QAT_FW_LA_CMD_AUTH_PRE_COMP:
626 case ICP_QAT_FW_LA_CMD_CIPHER_PRE_COMP:
627 case ICP_QAT_FW_LA_CMD_DELIMITER:
628 QAT_LOG(ERR, "Unsupported Service %u",
632 QAT_LOG(ERR, "Unsupported Service %u",
641 qat_sym_session_handle_single_pass(struct qat_sym_session *session,
642 struct rte_crypto_aead_xform *aead_xform)
644 struct icp_qat_fw_la_cipher_req_params *cipher_param =
645 (void *) &session->fw_req.serv_specif_rqpars;
647 session->is_single_pass = 1;
648 session->min_qat_dev_gen = QAT_GEN3;
649 session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER;
650 if (aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) {
651 session->qat_mode = ICP_QAT_HW_CIPHER_AEAD_MODE;
652 ICP_QAT_FW_LA_GCM_IV_LEN_FLAG_SET(
653 session->fw_req.comn_hdr.serv_specif_flags,
654 ICP_QAT_FW_LA_GCM_IV_LEN_12_OCTETS);
656 /* Chacha-Poly is special case that use QAT CTR mode */
657 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
659 session->cipher_iv.offset = aead_xform->iv.offset;
660 session->cipher_iv.length = aead_xform->iv.length;
661 if (qat_sym_session_aead_create_cd_cipher(session,
662 aead_xform->key.data, aead_xform->key.length))
664 session->aad_len = aead_xform->aad_length;
665 session->digest_length = aead_xform->digest_length;
666 if (aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) {
667 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
668 session->auth_op = ICP_QAT_HW_AUTH_GENERATE;
669 ICP_QAT_FW_LA_RET_AUTH_SET(
670 session->fw_req.comn_hdr.serv_specif_flags,
671 ICP_QAT_FW_LA_RET_AUTH_RES);
673 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
674 session->auth_op = ICP_QAT_HW_AUTH_VERIFY;
675 ICP_QAT_FW_LA_CMP_AUTH_SET(
676 session->fw_req.comn_hdr.serv_specif_flags,
677 ICP_QAT_FW_LA_CMP_AUTH_RES);
679 ICP_QAT_FW_LA_SINGLE_PASS_PROTO_FLAG_SET(
680 session->fw_req.comn_hdr.serv_specif_flags,
681 ICP_QAT_FW_LA_SINGLE_PASS_PROTO);
682 ICP_QAT_FW_LA_PROTO_SET(
683 session->fw_req.comn_hdr.serv_specif_flags,
684 ICP_QAT_FW_LA_NO_PROTO);
685 session->fw_req.comn_hdr.service_cmd_id =
686 ICP_QAT_FW_LA_CMD_CIPHER;
687 session->cd.cipher.cipher_config.val =
688 ICP_QAT_HW_CIPHER_CONFIG_BUILD(
689 ICP_QAT_HW_CIPHER_AEAD_MODE,
690 session->qat_cipher_alg,
691 ICP_QAT_HW_CIPHER_NO_CONVERT,
693 QAT_FIELD_SET(session->cd.cipher.cipher_config.val,
694 aead_xform->digest_length,
695 QAT_CIPHER_AEAD_HASH_CMP_LEN_BITPOS,
696 QAT_CIPHER_AEAD_HASH_CMP_LEN_MASK);
697 session->cd.cipher.cipher_config.reserved =
698 ICP_QAT_HW_CIPHER_CONFIG_BUILD_UPPER(
699 aead_xform->aad_length);
700 cipher_param->spc_aad_sz = aead_xform->aad_length;
701 cipher_param->spc_auth_res_sz = aead_xform->digest_length;
707 qat_sym_session_configure_auth(struct rte_cryptodev *dev,
708 struct rte_crypto_sym_xform *xform,
709 struct qat_sym_session *session)
711 struct rte_crypto_auth_xform *auth_xform = qat_get_auth_xform(xform);
712 struct qat_sym_dev_private *internals = dev->data->dev_private;
713 const uint8_t *key_data = auth_xform->key.data;
714 uint8_t key_length = auth_xform->key.length;
716 session->aes_cmac = 0;
717 session->auth_key_length = auth_xform->key.length;
718 session->auth_iv.offset = auth_xform->iv.offset;
719 session->auth_iv.length = auth_xform->iv.length;
720 session->auth_mode = ICP_QAT_HW_AUTH_MODE1;
722 switch (auth_xform->algo) {
723 case RTE_CRYPTO_AUTH_SHA1:
724 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA1;
725 session->auth_mode = ICP_QAT_HW_AUTH_MODE0;
727 case RTE_CRYPTO_AUTH_SHA224:
728 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA224;
729 session->auth_mode = ICP_QAT_HW_AUTH_MODE0;
731 case RTE_CRYPTO_AUTH_SHA256:
732 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA256;
733 session->auth_mode = ICP_QAT_HW_AUTH_MODE0;
735 case RTE_CRYPTO_AUTH_SHA384:
736 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA384;
737 session->auth_mode = ICP_QAT_HW_AUTH_MODE0;
739 case RTE_CRYPTO_AUTH_SHA512:
740 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA512;
741 session->auth_mode = ICP_QAT_HW_AUTH_MODE0;
743 case RTE_CRYPTO_AUTH_SHA1_HMAC:
744 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA1;
746 case RTE_CRYPTO_AUTH_SHA224_HMAC:
747 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA224;
749 case RTE_CRYPTO_AUTH_SHA256_HMAC:
750 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA256;
752 case RTE_CRYPTO_AUTH_SHA384_HMAC:
753 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA384;
755 case RTE_CRYPTO_AUTH_SHA512_HMAC:
756 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA512;
758 case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
759 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
761 case RTE_CRYPTO_AUTH_AES_CMAC:
762 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
763 session->aes_cmac = 1;
765 case RTE_CRYPTO_AUTH_AES_GMAC:
766 if (qat_sym_validate_aes_key(auth_xform->key.length,
767 &session->qat_cipher_alg) != 0) {
768 QAT_LOG(ERR, "Invalid AES key size");
771 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
772 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
773 if (session->auth_iv.length == 0)
774 session->auth_iv.length = AES_GCM_J0_LEN;
776 case RTE_CRYPTO_AUTH_SNOW3G_UIA2:
777 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2;
779 case RTE_CRYPTO_AUTH_MD5_HMAC:
780 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_MD5;
782 case RTE_CRYPTO_AUTH_NULL:
783 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_NULL;
785 case RTE_CRYPTO_AUTH_KASUMI_F9:
786 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_KASUMI_F9;
788 case RTE_CRYPTO_AUTH_ZUC_EIA3:
789 if (!qat_is_auth_alg_supported(auth_xform->algo, internals)) {
790 QAT_LOG(ERR, "%s not supported on this device",
791 rte_crypto_auth_algorithm_strings
795 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3;
797 case RTE_CRYPTO_AUTH_MD5:
798 case RTE_CRYPTO_AUTH_AES_CBC_MAC:
799 QAT_LOG(ERR, "Crypto: Unsupported hash alg %u",
803 QAT_LOG(ERR, "Crypto: Undefined Hash algo %u specified",
808 if (auth_xform->algo == RTE_CRYPTO_AUTH_AES_GMAC) {
809 if (auth_xform->op == RTE_CRYPTO_AUTH_OP_GENERATE) {
810 session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER_HASH;
811 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
813 * It needs to create cipher desc content first,
814 * then authentication
817 if (qat_sym_session_aead_create_cd_cipher(session,
818 auth_xform->key.data,
819 auth_xform->key.length))
822 if (qat_sym_session_aead_create_cd_auth(session,
826 auth_xform->digest_length,
830 session->qat_cmd = ICP_QAT_FW_LA_CMD_HASH_CIPHER;
831 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
833 * It needs to create authentication desc content first,
837 if (qat_sym_session_aead_create_cd_auth(session,
841 auth_xform->digest_length,
845 if (qat_sym_session_aead_create_cd_cipher(session,
846 auth_xform->key.data,
847 auth_xform->key.length))
850 /* Restore to authentication only only */
851 session->qat_cmd = ICP_QAT_FW_LA_CMD_AUTH;
853 if (qat_sym_session_aead_create_cd_auth(session,
857 auth_xform->digest_length,
862 session->digest_length = auth_xform->digest_length;
867 qat_sym_session_configure_aead(struct rte_cryptodev *dev,
868 struct rte_crypto_sym_xform *xform,
869 struct qat_sym_session *session)
871 struct rte_crypto_aead_xform *aead_xform = &xform->aead;
872 enum rte_crypto_auth_operation crypto_operation;
873 struct qat_sym_dev_private *internals =
874 dev->data->dev_private;
875 enum qat_device_gen qat_dev_gen =
876 internals->qat_dev->qat_dev_gen;
879 * Store AEAD IV parameters as cipher IV,
880 * to avoid unnecessary memory usage
882 session->cipher_iv.offset = xform->aead.iv.offset;
883 session->cipher_iv.length = xform->aead.iv.length;
885 session->auth_mode = ICP_QAT_HW_AUTH_MODE1;
887 session->is_single_pass = 0;
888 switch (aead_xform->algo) {
889 case RTE_CRYPTO_AEAD_AES_GCM:
890 if (qat_sym_validate_aes_key(aead_xform->key.length,
891 &session->qat_cipher_alg) != 0) {
892 QAT_LOG(ERR, "Invalid AES key size");
895 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
896 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
897 if (qat_dev_gen > QAT_GEN2 && aead_xform->iv.length ==
898 QAT_AES_GCM_SPC_IV_SIZE) {
899 return qat_sym_session_handle_single_pass(session,
902 if (session->cipher_iv.length == 0)
903 session->cipher_iv.length = AES_GCM_J0_LEN;
906 case RTE_CRYPTO_AEAD_AES_CCM:
907 if (qat_sym_validate_aes_key(aead_xform->key.length,
908 &session->qat_cipher_alg) != 0) {
909 QAT_LOG(ERR, "Invalid AES key size");
912 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
913 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC;
915 case RTE_CRYPTO_AEAD_CHACHA20_POLY1305:
916 if (aead_xform->key.length != ICP_QAT_HW_CHACHAPOLY_KEY_SZ)
918 session->qat_cipher_alg =
919 ICP_QAT_HW_CIPHER_ALGO_CHACHA20_POLY1305;
920 return qat_sym_session_handle_single_pass(session,
923 QAT_LOG(ERR, "Crypto: Undefined AEAD specified %u\n",
928 if ((aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT &&
929 aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) ||
930 (aead_xform->op == RTE_CRYPTO_AEAD_OP_DECRYPT &&
931 aead_xform->algo == RTE_CRYPTO_AEAD_AES_CCM)) {
932 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
934 * It needs to create cipher desc content first,
935 * then authentication
937 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
938 RTE_CRYPTO_AUTH_OP_GENERATE : RTE_CRYPTO_AUTH_OP_VERIFY;
940 if (qat_sym_session_aead_create_cd_cipher(session,
941 aead_xform->key.data,
942 aead_xform->key.length))
945 if (qat_sym_session_aead_create_cd_auth(session,
946 aead_xform->key.data,
947 aead_xform->key.length,
948 aead_xform->aad_length,
949 aead_xform->digest_length,
953 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
955 * It needs to create authentication desc content first,
959 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
960 RTE_CRYPTO_AUTH_OP_VERIFY : RTE_CRYPTO_AUTH_OP_GENERATE;
962 if (qat_sym_session_aead_create_cd_auth(session,
963 aead_xform->key.data,
964 aead_xform->key.length,
965 aead_xform->aad_length,
966 aead_xform->digest_length,
970 if (qat_sym_session_aead_create_cd_cipher(session,
971 aead_xform->key.data,
972 aead_xform->key.length))
976 session->digest_length = aead_xform->digest_length;
980 unsigned int qat_sym_session_get_private_size(
981 struct rte_cryptodev *dev __rte_unused)
983 return RTE_ALIGN_CEIL(sizeof(struct qat_sym_session), 8);
986 /* returns block size in bytes per cipher algo */
987 int qat_cipher_get_block_size(enum icp_qat_hw_cipher_algo qat_cipher_alg)
989 switch (qat_cipher_alg) {
990 case ICP_QAT_HW_CIPHER_ALGO_DES:
991 return ICP_QAT_HW_DES_BLK_SZ;
992 case ICP_QAT_HW_CIPHER_ALGO_3DES:
993 return ICP_QAT_HW_3DES_BLK_SZ;
994 case ICP_QAT_HW_CIPHER_ALGO_AES128:
995 case ICP_QAT_HW_CIPHER_ALGO_AES192:
996 case ICP_QAT_HW_CIPHER_ALGO_AES256:
997 return ICP_QAT_HW_AES_BLK_SZ;
999 QAT_LOG(ERR, "invalid block cipher alg %u", qat_cipher_alg);
1006 * Returns size in bytes per hash algo for state1 size field in cd_ctrl
1007 * This is digest size rounded up to nearest quadword
1009 static int qat_hash_get_state1_size(enum icp_qat_hw_auth_algo qat_hash_alg)
1011 switch (qat_hash_alg) {
1012 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1013 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA1_STATE1_SZ,
1014 QAT_HW_DEFAULT_ALIGNMENT);
1015 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1016 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA224_STATE1_SZ,
1017 QAT_HW_DEFAULT_ALIGNMENT);
1018 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1019 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA256_STATE1_SZ,
1020 QAT_HW_DEFAULT_ALIGNMENT);
1021 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1022 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA384_STATE1_SZ,
1023 QAT_HW_DEFAULT_ALIGNMENT);
1024 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1025 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
1026 QAT_HW_DEFAULT_ALIGNMENT);
1027 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1028 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ,
1029 QAT_HW_DEFAULT_ALIGNMENT);
1030 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1031 case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
1032 return QAT_HW_ROUND_UP(ICP_QAT_HW_GALOIS_128_STATE1_SZ,
1033 QAT_HW_DEFAULT_ALIGNMENT);
1034 case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
1035 return QAT_HW_ROUND_UP(ICP_QAT_HW_ZUC_3G_EIA3_STATE1_SZ,
1036 QAT_HW_DEFAULT_ALIGNMENT);
1037 case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
1038 return QAT_HW_ROUND_UP(ICP_QAT_HW_SNOW_3G_UIA2_STATE1_SZ,
1039 QAT_HW_DEFAULT_ALIGNMENT);
1040 case ICP_QAT_HW_AUTH_ALGO_MD5:
1041 return QAT_HW_ROUND_UP(ICP_QAT_HW_MD5_STATE1_SZ,
1042 QAT_HW_DEFAULT_ALIGNMENT);
1043 case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
1044 return QAT_HW_ROUND_UP(ICP_QAT_HW_KASUMI_F9_STATE1_SZ,
1045 QAT_HW_DEFAULT_ALIGNMENT);
1046 case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
1047 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_CBC_MAC_STATE1_SZ,
1048 QAT_HW_DEFAULT_ALIGNMENT);
1049 case ICP_QAT_HW_AUTH_ALGO_NULL:
1050 return QAT_HW_ROUND_UP(ICP_QAT_HW_NULL_STATE1_SZ,
1051 QAT_HW_DEFAULT_ALIGNMENT);
1052 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
1053 /* return maximum state1 size in this case */
1054 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
1055 QAT_HW_DEFAULT_ALIGNMENT);
1057 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
1063 /* returns digest size in bytes per hash algo */
1064 static int qat_hash_get_digest_size(enum icp_qat_hw_auth_algo qat_hash_alg)
1066 switch (qat_hash_alg) {
1067 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1068 return ICP_QAT_HW_SHA1_STATE1_SZ;
1069 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1070 return ICP_QAT_HW_SHA224_STATE1_SZ;
1071 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1072 return ICP_QAT_HW_SHA256_STATE1_SZ;
1073 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1074 return ICP_QAT_HW_SHA384_STATE1_SZ;
1075 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1076 return ICP_QAT_HW_SHA512_STATE1_SZ;
1077 case ICP_QAT_HW_AUTH_ALGO_MD5:
1078 return ICP_QAT_HW_MD5_STATE1_SZ;
1079 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1080 return ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1081 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
1082 /* return maximum digest size in this case */
1083 return ICP_QAT_HW_SHA512_STATE1_SZ;
1085 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
1091 /* returns block size in byes per hash algo */
1092 static int qat_hash_get_block_size(enum icp_qat_hw_auth_algo qat_hash_alg)
1094 switch (qat_hash_alg) {
1095 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1097 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1098 return SHA256_CBLOCK;
1099 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1100 return SHA256_CBLOCK;
1101 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1102 return SHA512_CBLOCK;
1103 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1104 return SHA512_CBLOCK;
1105 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1107 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1108 return ICP_QAT_HW_AES_BLK_SZ;
1109 case ICP_QAT_HW_AUTH_ALGO_MD5:
1111 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
1112 /* return maximum block size in this case */
1113 return SHA512_CBLOCK;
1115 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
1121 static int partial_hash_sha1(uint8_t *data_in, uint8_t *data_out)
1125 if (!SHA1_Init(&ctx))
1127 SHA1_Transform(&ctx, data_in);
1128 rte_memcpy(data_out, &ctx, SHA_DIGEST_LENGTH);
1132 static int partial_hash_sha224(uint8_t *data_in, uint8_t *data_out)
1136 if (!SHA224_Init(&ctx))
1138 SHA256_Transform(&ctx, data_in);
1139 rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
1143 static int partial_hash_sha256(uint8_t *data_in, uint8_t *data_out)
1147 if (!SHA256_Init(&ctx))
1149 SHA256_Transform(&ctx, data_in);
1150 rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
1154 static int partial_hash_sha384(uint8_t *data_in, uint8_t *data_out)
1158 if (!SHA384_Init(&ctx))
1160 SHA512_Transform(&ctx, data_in);
1161 rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
1165 static int partial_hash_sha512(uint8_t *data_in, uint8_t *data_out)
1169 if (!SHA512_Init(&ctx))
1171 SHA512_Transform(&ctx, data_in);
1172 rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
1176 static int partial_hash_md5(uint8_t *data_in, uint8_t *data_out)
1180 if (!MD5_Init(&ctx))
1182 MD5_Transform(&ctx, data_in);
1183 rte_memcpy(data_out, &ctx, MD5_DIGEST_LENGTH);
1188 static int partial_hash_compute(enum icp_qat_hw_auth_algo hash_alg,
1193 uint8_t digest[qat_hash_get_digest_size(
1194 ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1195 uint32_t *hash_state_out_be32;
1196 uint64_t *hash_state_out_be64;
1199 digest_size = qat_hash_get_digest_size(hash_alg);
1200 if (digest_size <= 0)
1203 hash_state_out_be32 = (uint32_t *)data_out;
1204 hash_state_out_be64 = (uint64_t *)data_out;
1207 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1208 if (partial_hash_sha1(data_in, digest))
1210 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1211 *hash_state_out_be32 =
1212 rte_bswap32(*(((uint32_t *)digest)+i));
1214 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1215 if (partial_hash_sha224(data_in, digest))
1217 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1218 *hash_state_out_be32 =
1219 rte_bswap32(*(((uint32_t *)digest)+i));
1221 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1222 if (partial_hash_sha256(data_in, digest))
1224 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1225 *hash_state_out_be32 =
1226 rte_bswap32(*(((uint32_t *)digest)+i));
1228 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1229 if (partial_hash_sha384(data_in, digest))
1231 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
1232 *hash_state_out_be64 =
1233 rte_bswap64(*(((uint64_t *)digest)+i));
1235 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1236 if (partial_hash_sha512(data_in, digest))
1238 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
1239 *hash_state_out_be64 =
1240 rte_bswap64(*(((uint64_t *)digest)+i));
1242 case ICP_QAT_HW_AUTH_ALGO_MD5:
1243 if (partial_hash_md5(data_in, data_out))
1247 QAT_LOG(ERR, "invalid hash alg %u", hash_alg);
1253 #define HMAC_IPAD_VALUE 0x36
1254 #define HMAC_OPAD_VALUE 0x5c
1255 #define HASH_XCBC_PRECOMP_KEY_NUM 3
1257 static const uint8_t AES_CMAC_SEED[ICP_QAT_HW_AES_128_KEY_SZ];
1259 static void aes_cmac_key_derive(uint8_t *base, uint8_t *derived)
1263 derived[0] = base[0] << 1;
1264 for (i = 1; i < ICP_QAT_HW_AES_BLK_SZ ; i++) {
1265 derived[i] = base[i] << 1;
1266 derived[i - 1] |= base[i] >> 7;
1270 derived[ICP_QAT_HW_AES_BLK_SZ - 1] ^= QAT_AES_CMAC_CONST_RB;
1273 static int qat_sym_do_precomputes(enum icp_qat_hw_auth_algo hash_alg,
1274 const uint8_t *auth_key,
1275 uint16_t auth_keylen,
1276 uint8_t *p_state_buf,
1277 uint16_t *p_state_len,
1281 uint8_t ipad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1282 uint8_t opad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1285 if (hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC) {
1291 uint8_t k0[ICP_QAT_HW_AES_128_KEY_SZ];
1294 auth_keylen = ICP_QAT_HW_AES_128_KEY_SZ;
1296 in = rte_zmalloc("AES CMAC K1",
1297 ICP_QAT_HW_AES_128_KEY_SZ, 16);
1300 QAT_LOG(ERR, "Failed to alloc memory");
1304 rte_memcpy(in, AES_CMAC_SEED,
1305 ICP_QAT_HW_AES_128_KEY_SZ);
1306 rte_memcpy(p_state_buf, auth_key, auth_keylen);
1308 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1314 AES_encrypt(in, k0, &enc_key);
1316 k1 = p_state_buf + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1317 k2 = k1 + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1319 aes_cmac_key_derive(k0, k1);
1320 aes_cmac_key_derive(k1, k2);
1322 memset(k0, 0, ICP_QAT_HW_AES_128_KEY_SZ);
1323 *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1327 static uint8_t qat_aes_xcbc_key_seed[
1328 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ] = {
1329 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1330 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1331 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1332 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1333 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1334 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1338 uint8_t *out = p_state_buf;
1342 in = rte_zmalloc("working mem for key",
1343 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ, 16);
1345 QAT_LOG(ERR, "Failed to alloc memory");
1349 rte_memcpy(in, qat_aes_xcbc_key_seed,
1350 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1351 for (x = 0; x < HASH_XCBC_PRECOMP_KEY_NUM; x++) {
1352 if (AES_set_encrypt_key(auth_key,
1356 (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ));
1358 (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ),
1359 0, ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1362 AES_encrypt(in, out, &enc_key);
1363 in += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1364 out += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1366 *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1367 rte_free(in - x*ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ);
1371 } else if ((hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_128) ||
1372 (hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_64)) {
1374 uint8_t *out = p_state_buf;
1377 memset(p_state_buf, 0, ICP_QAT_HW_GALOIS_H_SZ +
1378 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1379 ICP_QAT_HW_GALOIS_E_CTR0_SZ);
1380 in = rte_zmalloc("working mem for key",
1381 ICP_QAT_HW_GALOIS_H_SZ, 16);
1383 QAT_LOG(ERR, "Failed to alloc memory");
1387 memset(in, 0, ICP_QAT_HW_GALOIS_H_SZ);
1388 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1392 AES_encrypt(in, out, &enc_key);
1393 *p_state_len = ICP_QAT_HW_GALOIS_H_SZ +
1394 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1395 ICP_QAT_HW_GALOIS_E_CTR0_SZ;
1400 block_size = qat_hash_get_block_size(hash_alg);
1403 /* init ipad and opad from key and xor with fixed values */
1404 memset(ipad, 0, block_size);
1405 memset(opad, 0, block_size);
1407 if (auth_keylen > (unsigned int)block_size) {
1408 QAT_LOG(ERR, "invalid keylen %u", auth_keylen);
1411 rte_memcpy(ipad, auth_key, auth_keylen);
1412 rte_memcpy(opad, auth_key, auth_keylen);
1414 for (i = 0; i < block_size; i++) {
1415 uint8_t *ipad_ptr = ipad + i;
1416 uint8_t *opad_ptr = opad + i;
1417 *ipad_ptr ^= HMAC_IPAD_VALUE;
1418 *opad_ptr ^= HMAC_OPAD_VALUE;
1421 /* do partial hash of ipad and copy to state1 */
1422 if (partial_hash_compute(hash_alg, ipad, p_state_buf)) {
1423 memset(ipad, 0, block_size);
1424 memset(opad, 0, block_size);
1425 QAT_LOG(ERR, "ipad precompute failed");
1430 * State len is a multiple of 8, so may be larger than the digest.
1431 * Put the partial hash of opad state_len bytes after state1
1433 *p_state_len = qat_hash_get_state1_size(hash_alg);
1434 if (partial_hash_compute(hash_alg, opad, p_state_buf + *p_state_len)) {
1435 memset(ipad, 0, block_size);
1436 memset(opad, 0, block_size);
1437 QAT_LOG(ERR, "opad precompute failed");
1441 /* don't leave data lying around */
1442 memset(ipad, 0, block_size);
1443 memset(opad, 0, block_size);
1448 qat_sym_session_init_common_hdr(struct icp_qat_fw_comn_req_hdr *header,
1449 enum qat_sym_proto_flag proto_flags)
1452 ICP_QAT_FW_COMN_HDR_FLAGS_BUILD(ICP_QAT_FW_COMN_REQ_FLAG_SET);
1453 header->service_type = ICP_QAT_FW_COMN_REQ_CPM_FW_LA;
1454 header->comn_req_flags =
1455 ICP_QAT_FW_COMN_FLAGS_BUILD(QAT_COMN_CD_FLD_TYPE_64BIT_ADR,
1456 QAT_COMN_PTR_TYPE_FLAT);
1457 ICP_QAT_FW_LA_PARTIAL_SET(header->serv_specif_flags,
1458 ICP_QAT_FW_LA_PARTIAL_NONE);
1459 ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(header->serv_specif_flags,
1460 ICP_QAT_FW_CIPH_IV_16BYTE_DATA);
1462 switch (proto_flags) {
1463 case QAT_CRYPTO_PROTO_FLAG_NONE:
1464 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1465 ICP_QAT_FW_LA_NO_PROTO);
1467 case QAT_CRYPTO_PROTO_FLAG_CCM:
1468 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1469 ICP_QAT_FW_LA_CCM_PROTO);
1471 case QAT_CRYPTO_PROTO_FLAG_GCM:
1472 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1473 ICP_QAT_FW_LA_GCM_PROTO);
1475 case QAT_CRYPTO_PROTO_FLAG_SNOW3G:
1476 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1477 ICP_QAT_FW_LA_SNOW_3G_PROTO);
1479 case QAT_CRYPTO_PROTO_FLAG_ZUC:
1480 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(header->serv_specif_flags,
1481 ICP_QAT_FW_LA_ZUC_3G_PROTO);
1485 ICP_QAT_FW_LA_UPDATE_STATE_SET(header->serv_specif_flags,
1486 ICP_QAT_FW_LA_NO_UPDATE_STATE);
1487 ICP_QAT_FW_LA_DIGEST_IN_BUFFER_SET(header->serv_specif_flags,
1488 ICP_QAT_FW_LA_NO_DIGEST_IN_BUFFER);
1492 * Snow3G and ZUC should never use this function
1493 * and set its protocol flag in both cipher and auth part of content
1494 * descriptor building function
1496 static enum qat_sym_proto_flag
1497 qat_get_crypto_proto_flag(uint16_t flags)
1499 int proto = ICP_QAT_FW_LA_PROTO_GET(flags);
1500 enum qat_sym_proto_flag qat_proto_flag =
1501 QAT_CRYPTO_PROTO_FLAG_NONE;
1504 case ICP_QAT_FW_LA_GCM_PROTO:
1505 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1507 case ICP_QAT_FW_LA_CCM_PROTO:
1508 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1512 return qat_proto_flag;
1515 int qat_sym_session_aead_create_cd_cipher(struct qat_sym_session *cdesc,
1516 const uint8_t *cipherkey,
1517 uint32_t cipherkeylen)
1519 struct icp_qat_hw_cipher_algo_blk *cipher;
1520 struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1521 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1522 struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1523 void *ptr = &req_tmpl->cd_ctrl;
1524 struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1525 struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1526 enum icp_qat_hw_cipher_convert key_convert;
1527 enum qat_sym_proto_flag qat_proto_flag =
1528 QAT_CRYPTO_PROTO_FLAG_NONE;
1529 uint32_t total_key_size;
1530 uint16_t cipher_offset, cd_size;
1531 uint32_t wordIndex = 0;
1532 uint32_t *temp_key = NULL;
1534 if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER) {
1535 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1536 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1537 ICP_QAT_FW_SLICE_CIPHER);
1538 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1539 ICP_QAT_FW_SLICE_DRAM_WR);
1540 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1541 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1542 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1543 ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1544 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1545 } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1546 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1547 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1548 ICP_QAT_FW_SLICE_CIPHER);
1549 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1550 ICP_QAT_FW_SLICE_AUTH);
1551 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1552 ICP_QAT_FW_SLICE_AUTH);
1553 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1554 ICP_QAT_FW_SLICE_DRAM_WR);
1555 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1556 } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1557 QAT_LOG(ERR, "Invalid param, must be a cipher command.");
1561 if (cdesc->qat_mode == ICP_QAT_HW_CIPHER_CTR_MODE) {
1563 * CTR Streaming ciphers are a special case. Decrypt = encrypt
1564 * Overriding default values previously set
1566 cdesc->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
1567 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1568 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2
1569 || cdesc->qat_cipher_alg ==
1570 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)
1571 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1572 else if (cdesc->qat_dir == ICP_QAT_HW_CIPHER_ENCRYPT)
1573 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1575 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1577 if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
1578 total_key_size = ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ +
1579 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1580 cipher_cd_ctrl->cipher_state_sz =
1581 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1582 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1584 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1585 total_key_size = ICP_QAT_HW_KASUMI_F8_KEY_SZ;
1586 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_KASUMI_BLK_SZ >> 3;
1587 cipher_cd_ctrl->cipher_padding_sz =
1588 (2 * ICP_QAT_HW_KASUMI_BLK_SZ) >> 3;
1589 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES) {
1590 total_key_size = ICP_QAT_HW_3DES_KEY_SZ;
1591 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_3DES_BLK_SZ >> 3;
1593 qat_get_crypto_proto_flag(header->serv_specif_flags);
1594 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_DES) {
1595 total_key_size = ICP_QAT_HW_DES_KEY_SZ;
1596 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_DES_BLK_SZ >> 3;
1598 qat_get_crypto_proto_flag(header->serv_specif_flags);
1599 } else if (cdesc->qat_cipher_alg ==
1600 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
1601 total_key_size = ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ +
1602 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1603 cipher_cd_ctrl->cipher_state_sz =
1604 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1605 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1606 cdesc->min_qat_dev_gen = QAT_GEN2;
1608 total_key_size = cipherkeylen;
1609 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_AES_BLK_SZ >> 3;
1611 qat_get_crypto_proto_flag(header->serv_specif_flags);
1613 cipher_cd_ctrl->cipher_key_sz = total_key_size >> 3;
1614 cipher_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1615 cipher_cd_ctrl->cipher_cfg_offset = cipher_offset >> 3;
1617 header->service_cmd_id = cdesc->qat_cmd;
1618 qat_sym_session_init_common_hdr(header, qat_proto_flag);
1620 cipher = (struct icp_qat_hw_cipher_algo_blk *)cdesc->cd_cur_ptr;
1621 cipher->cipher_config.val =
1622 ICP_QAT_HW_CIPHER_CONFIG_BUILD(cdesc->qat_mode,
1623 cdesc->qat_cipher_alg, key_convert,
1626 if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1627 temp_key = (uint32_t *)(cdesc->cd_cur_ptr +
1628 sizeof(struct icp_qat_hw_cipher_config)
1630 memcpy(cipher->key, cipherkey, cipherkeylen);
1631 memcpy(temp_key, cipherkey, cipherkeylen);
1633 /* XOR Key with KASUMI F8 key modifier at 4 bytes level */
1634 for (wordIndex = 0; wordIndex < (cipherkeylen >> 2);
1636 temp_key[wordIndex] ^= KASUMI_F8_KEY_MODIFIER_4_BYTES;
1638 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1639 cipherkeylen + cipherkeylen;
1641 memcpy(cipher->key, cipherkey, cipherkeylen);
1642 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1646 if (total_key_size > cipherkeylen) {
1647 uint32_t padding_size = total_key_size-cipherkeylen;
1648 if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1649 && (cipherkeylen == QAT_3DES_KEY_SZ_OPT2)) {
1650 /* K3 not provided so use K1 = K3*/
1651 memcpy(cdesc->cd_cur_ptr, cipherkey, padding_size);
1652 } else if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1653 && (cipherkeylen == QAT_3DES_KEY_SZ_OPT3)) {
1654 /* K2 and K3 not provided so use K1 = K2 = K3*/
1655 memcpy(cdesc->cd_cur_ptr, cipherkey,
1657 memcpy(cdesc->cd_cur_ptr+cipherkeylen,
1658 cipherkey, cipherkeylen);
1660 memset(cdesc->cd_cur_ptr, 0, padding_size);
1662 cdesc->cd_cur_ptr += padding_size;
1664 cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1665 cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1670 int qat_sym_session_aead_create_cd_auth(struct qat_sym_session *cdesc,
1671 const uint8_t *authkey,
1672 uint32_t authkeylen,
1673 uint32_t aad_length,
1674 uint32_t digestsize,
1675 unsigned int operation)
1677 struct icp_qat_hw_auth_setup *hash;
1678 struct icp_qat_hw_cipher_algo_blk *cipherconfig;
1679 struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1680 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1681 struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1682 void *ptr = &req_tmpl->cd_ctrl;
1683 struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1684 struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1685 struct icp_qat_fw_la_auth_req_params *auth_param =
1686 (struct icp_qat_fw_la_auth_req_params *)
1687 ((char *)&req_tmpl->serv_specif_rqpars +
1688 ICP_QAT_FW_HASH_REQUEST_PARAMETERS_OFFSET);
1689 uint16_t state1_size = 0, state2_size = 0, cd_extra_size = 0;
1690 uint16_t hash_offset, cd_size;
1691 uint32_t *aad_len = NULL;
1692 uint32_t wordIndex = 0;
1694 enum qat_sym_proto_flag qat_proto_flag =
1695 QAT_CRYPTO_PROTO_FLAG_NONE;
1697 if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_AUTH) {
1698 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1699 ICP_QAT_FW_SLICE_AUTH);
1700 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1701 ICP_QAT_FW_SLICE_DRAM_WR);
1702 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1703 } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1704 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1705 ICP_QAT_FW_SLICE_AUTH);
1706 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1707 ICP_QAT_FW_SLICE_CIPHER);
1708 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1709 ICP_QAT_FW_SLICE_CIPHER);
1710 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1711 ICP_QAT_FW_SLICE_DRAM_WR);
1712 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1713 } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1714 QAT_LOG(ERR, "Invalid param, must be a hash command.");
1718 if (operation == RTE_CRYPTO_AUTH_OP_VERIFY) {
1719 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1720 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1721 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1722 ICP_QAT_FW_LA_CMP_AUTH_RES);
1723 cdesc->auth_op = ICP_QAT_HW_AUTH_VERIFY;
1725 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1726 ICP_QAT_FW_LA_RET_AUTH_RES);
1727 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1728 ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1729 cdesc->auth_op = ICP_QAT_HW_AUTH_GENERATE;
1733 * Setup the inner hash config
1735 hash_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1736 hash = (struct icp_qat_hw_auth_setup *)cdesc->cd_cur_ptr;
1737 hash->auth_config.reserved = 0;
1738 hash->auth_config.config =
1739 ICP_QAT_HW_AUTH_CONFIG_BUILD(cdesc->auth_mode,
1740 cdesc->qat_hash_alg, digestsize);
1742 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0
1743 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2
1744 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_KASUMI_F9
1745 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3
1746 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC
1747 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC
1748 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_NULL
1750 hash->auth_counter.counter = 0;
1752 int block_size = qat_hash_get_block_size(cdesc->qat_hash_alg);
1756 hash->auth_counter.counter = rte_bswap32(block_size);
1759 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_auth_setup);
1762 * cd_cur_ptr now points at the state1 information.
1764 switch (cdesc->qat_hash_alg) {
1765 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1766 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0) {
1768 rte_memcpy(cdesc->cd_cur_ptr, sha1InitialState,
1769 sizeof(sha1InitialState));
1770 state1_size = qat_hash_get_state1_size(
1771 cdesc->qat_hash_alg);
1775 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA1, authkey,
1776 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1778 QAT_LOG(ERR, "(SHA)precompute failed");
1781 state2_size = RTE_ALIGN_CEIL(ICP_QAT_HW_SHA1_STATE2_SZ, 8);
1783 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1784 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0) {
1786 rte_memcpy(cdesc->cd_cur_ptr, sha224InitialState,
1787 sizeof(sha224InitialState));
1788 state1_size = qat_hash_get_state1_size(
1789 cdesc->qat_hash_alg);
1793 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA224, authkey,
1794 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1796 QAT_LOG(ERR, "(SHA)precompute failed");
1799 state2_size = ICP_QAT_HW_SHA224_STATE2_SZ;
1801 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1802 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0) {
1804 rte_memcpy(cdesc->cd_cur_ptr, sha256InitialState,
1805 sizeof(sha256InitialState));
1806 state1_size = qat_hash_get_state1_size(
1807 cdesc->qat_hash_alg);
1811 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA256, authkey,
1812 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1814 QAT_LOG(ERR, "(SHA)precompute failed");
1817 state2_size = ICP_QAT_HW_SHA256_STATE2_SZ;
1819 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1820 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0) {
1822 rte_memcpy(cdesc->cd_cur_ptr, sha384InitialState,
1823 sizeof(sha384InitialState));
1824 state1_size = qat_hash_get_state1_size(
1825 cdesc->qat_hash_alg);
1829 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA384, authkey,
1830 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1832 QAT_LOG(ERR, "(SHA)precompute failed");
1835 state2_size = ICP_QAT_HW_SHA384_STATE2_SZ;
1837 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1838 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0) {
1840 rte_memcpy(cdesc->cd_cur_ptr, sha512InitialState,
1841 sizeof(sha512InitialState));
1842 state1_size = qat_hash_get_state1_size(
1843 cdesc->qat_hash_alg);
1847 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA512, authkey,
1848 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1850 QAT_LOG(ERR, "(SHA)precompute failed");
1853 state2_size = ICP_QAT_HW_SHA512_STATE2_SZ;
1855 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1856 state1_size = ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1858 if (cdesc->aes_cmac)
1859 memset(cdesc->cd_cur_ptr, 0, state1_size);
1860 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC,
1861 authkey, authkeylen, cdesc->cd_cur_ptr + state1_size,
1862 &state2_size, cdesc->aes_cmac)) {
1863 cdesc->aes_cmac ? QAT_LOG(ERR,
1864 "(CMAC)precompute failed")
1866 "(XCBC)precompute failed");
1870 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1871 case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
1872 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1873 state1_size = ICP_QAT_HW_GALOIS_128_STATE1_SZ;
1874 if (qat_sym_do_precomputes(cdesc->qat_hash_alg, authkey,
1875 authkeylen, cdesc->cd_cur_ptr + state1_size,
1876 &state2_size, cdesc->aes_cmac)) {
1877 QAT_LOG(ERR, "(GCM)precompute failed");
1881 * Write (the length of AAD) into bytes 16-19 of state2
1882 * in big-endian format. This field is 8 bytes
1884 auth_param->u2.aad_sz =
1885 RTE_ALIGN_CEIL(aad_length, 16);
1886 auth_param->hash_state_sz = (auth_param->u2.aad_sz) >> 3;
1888 aad_len = (uint32_t *)(cdesc->cd_cur_ptr +
1889 ICP_QAT_HW_GALOIS_128_STATE1_SZ +
1890 ICP_QAT_HW_GALOIS_H_SZ);
1891 *aad_len = rte_bswap32(aad_length);
1892 cdesc->aad_len = aad_length;
1894 case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
1895 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1896 state1_size = qat_hash_get_state1_size(
1897 ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2);
1898 state2_size = ICP_QAT_HW_SNOW_3G_UIA2_STATE2_SZ;
1899 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1901 cipherconfig = (struct icp_qat_hw_cipher_algo_blk *)
1902 (cdesc->cd_cur_ptr + state1_size + state2_size);
1903 cipherconfig->cipher_config.val =
1904 ICP_QAT_HW_CIPHER_CONFIG_BUILD(ICP_QAT_HW_CIPHER_ECB_MODE,
1905 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2,
1906 ICP_QAT_HW_CIPHER_KEY_CONVERT,
1907 ICP_QAT_HW_CIPHER_ENCRYPT);
1908 memcpy(cipherconfig->key, authkey, authkeylen);
1909 memset(cipherconfig->key + authkeylen,
1910 0, ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ);
1911 cd_extra_size += sizeof(struct icp_qat_hw_cipher_config) +
1912 authkeylen + ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1913 auth_param->hash_state_sz = ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1915 case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
1916 hash->auth_config.config =
1917 ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE0,
1918 cdesc->qat_hash_alg, digestsize);
1919 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1920 state1_size = qat_hash_get_state1_size(
1921 ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3);
1922 state2_size = ICP_QAT_HW_ZUC_3G_EIA3_STATE2_SZ;
1923 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size
1924 + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ);
1926 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1927 cd_extra_size += ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1928 auth_param->hash_state_sz = ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1929 cdesc->min_qat_dev_gen = QAT_GEN2;
1932 case ICP_QAT_HW_AUTH_ALGO_MD5:
1933 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_MD5, authkey,
1934 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1936 QAT_LOG(ERR, "(MD5)precompute failed");
1939 state2_size = ICP_QAT_HW_MD5_STATE2_SZ;
1941 case ICP_QAT_HW_AUTH_ALGO_NULL:
1942 state1_size = qat_hash_get_state1_size(
1943 ICP_QAT_HW_AUTH_ALGO_NULL);
1944 state2_size = ICP_QAT_HW_NULL_STATE2_SZ;
1946 case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
1947 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1948 state1_size = qat_hash_get_state1_size(
1949 ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC);
1950 state2_size = ICP_QAT_HW_AES_CBC_MAC_KEY_SZ +
1951 ICP_QAT_HW_AES_CCM_CBC_E_CTR0_SZ;
1953 if (aad_length > 0) {
1954 aad_length += ICP_QAT_HW_CCM_AAD_B0_LEN +
1955 ICP_QAT_HW_CCM_AAD_LEN_INFO;
1956 auth_param->u2.aad_sz =
1957 RTE_ALIGN_CEIL(aad_length,
1958 ICP_QAT_HW_CCM_AAD_ALIGNMENT);
1960 auth_param->u2.aad_sz = ICP_QAT_HW_CCM_AAD_B0_LEN;
1962 cdesc->aad_len = aad_length;
1963 hash->auth_counter.counter = 0;
1965 hash_cd_ctrl->outer_prefix_sz = digestsize;
1966 auth_param->hash_state_sz = digestsize;
1968 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1970 case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
1971 state1_size = qat_hash_get_state1_size(
1972 ICP_QAT_HW_AUTH_ALGO_KASUMI_F9);
1973 state2_size = ICP_QAT_HW_KASUMI_F9_STATE2_SZ;
1974 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1975 pTempKey = (uint32_t *)(cdesc->cd_cur_ptr + state1_size
1978 * The Inner Hash Initial State2 block must contain IK
1979 * (Initialisation Key), followed by IK XOR-ed with KM
1980 * (Key Modifier): IK||(IK^KM).
1982 /* write the auth key */
1983 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1984 /* initialise temp key with auth key */
1985 memcpy(pTempKey, authkey, authkeylen);
1986 /* XOR Key with KASUMI F9 key modifier at 4 bytes level */
1987 for (wordIndex = 0; wordIndex < (authkeylen >> 2); wordIndex++)
1988 pTempKey[wordIndex] ^= KASUMI_F9_KEY_MODIFIER_4_BYTES;
1991 QAT_LOG(ERR, "Invalid HASH alg %u", cdesc->qat_hash_alg);
1995 /* Request template setup */
1996 qat_sym_session_init_common_hdr(header, qat_proto_flag);
1997 header->service_cmd_id = cdesc->qat_cmd;
1999 /* Auth CD config setup */
2000 hash_cd_ctrl->hash_cfg_offset = hash_offset >> 3;
2001 hash_cd_ctrl->hash_flags = ICP_QAT_FW_AUTH_HDR_FLAG_NO_NESTED;
2002 hash_cd_ctrl->inner_res_sz = digestsize;
2003 hash_cd_ctrl->final_sz = digestsize;
2004 hash_cd_ctrl->inner_state1_sz = state1_size;
2005 auth_param->auth_res_sz = digestsize;
2007 hash_cd_ctrl->inner_state2_sz = state2_size;
2008 hash_cd_ctrl->inner_state2_offset = hash_cd_ctrl->hash_cfg_offset +
2009 ((sizeof(struct icp_qat_hw_auth_setup) +
2010 RTE_ALIGN_CEIL(hash_cd_ctrl->inner_state1_sz, 8))
2013 cdesc->cd_cur_ptr += state1_size + state2_size + cd_extra_size;
2014 cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
2016 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
2017 cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
2022 int qat_sym_validate_aes_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2025 case ICP_QAT_HW_AES_128_KEY_SZ:
2026 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
2028 case ICP_QAT_HW_AES_192_KEY_SZ:
2029 *alg = ICP_QAT_HW_CIPHER_ALGO_AES192;
2031 case ICP_QAT_HW_AES_256_KEY_SZ:
2032 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
2040 int qat_sym_validate_aes_docsisbpi_key(int key_len,
2041 enum icp_qat_hw_cipher_algo *alg)
2044 case ICP_QAT_HW_AES_128_KEY_SZ:
2045 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
2047 case ICP_QAT_HW_AES_256_KEY_SZ:
2048 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
2056 int qat_sym_validate_snow3g_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2059 case ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ:
2060 *alg = ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2;
2068 int qat_sym_validate_kasumi_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2071 case ICP_QAT_HW_KASUMI_KEY_SZ:
2072 *alg = ICP_QAT_HW_CIPHER_ALGO_KASUMI;
2080 int qat_sym_validate_des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2083 case ICP_QAT_HW_DES_KEY_SZ:
2084 *alg = ICP_QAT_HW_CIPHER_ALGO_DES;
2092 int qat_sym_validate_3des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2095 case QAT_3DES_KEY_SZ_OPT1:
2096 case QAT_3DES_KEY_SZ_OPT2:
2097 case QAT_3DES_KEY_SZ_OPT3:
2098 *alg = ICP_QAT_HW_CIPHER_ALGO_3DES;
2106 int qat_sym_validate_zuc_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2109 case ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ:
2110 *alg = ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3;
2118 #ifdef RTE_LIB_SECURITY
2120 qat_sec_session_check_docsis(struct rte_security_session_conf *conf)
2122 struct rte_crypto_sym_xform *crypto_sym = conf->crypto_xform;
2123 struct rte_security_docsis_xform *docsis = &conf->docsis;
2125 /* CRC generate -> Cipher encrypt */
2126 if (docsis->direction == RTE_SECURITY_DOCSIS_DOWNLINK) {
2128 if (crypto_sym != NULL &&
2129 crypto_sym->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
2130 crypto_sym->cipher.op == RTE_CRYPTO_CIPHER_OP_ENCRYPT &&
2131 crypto_sym->cipher.algo ==
2132 RTE_CRYPTO_CIPHER_AES_DOCSISBPI &&
2133 (crypto_sym->cipher.key.length ==
2134 ICP_QAT_HW_AES_128_KEY_SZ ||
2135 crypto_sym->cipher.key.length ==
2136 ICP_QAT_HW_AES_256_KEY_SZ) &&
2137 crypto_sym->cipher.iv.length == ICP_QAT_HW_AES_BLK_SZ &&
2138 crypto_sym->next == NULL) {
2141 /* Cipher decrypt -> CRC verify */
2142 } else if (docsis->direction == RTE_SECURITY_DOCSIS_UPLINK) {
2144 if (crypto_sym != NULL &&
2145 crypto_sym->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
2146 crypto_sym->cipher.op == RTE_CRYPTO_CIPHER_OP_DECRYPT &&
2147 crypto_sym->cipher.algo ==
2148 RTE_CRYPTO_CIPHER_AES_DOCSISBPI &&
2149 (crypto_sym->cipher.key.length ==
2150 ICP_QAT_HW_AES_128_KEY_SZ ||
2151 crypto_sym->cipher.key.length ==
2152 ICP_QAT_HW_AES_256_KEY_SZ) &&
2153 crypto_sym->cipher.iv.length == ICP_QAT_HW_AES_BLK_SZ &&
2154 crypto_sym->next == NULL) {
2163 qat_sec_session_set_docsis_parameters(struct rte_cryptodev *dev,
2164 struct rte_security_session_conf *conf, void *session_private)
2168 struct rte_crypto_sym_xform *xform = NULL;
2169 struct qat_sym_session *session = session_private;
2171 /* Clear the session */
2172 memset(session, 0, qat_sym_session_get_private_size(dev));
2174 ret = qat_sec_session_check_docsis(conf);
2176 QAT_LOG(ERR, "Unsupported DOCSIS security configuration");
2180 xform = conf->crypto_xform;
2182 /* Verify the session physical address is known */
2183 rte_iova_t session_paddr = rte_mempool_virt2iova(session);
2184 if (session_paddr == 0 || session_paddr == RTE_BAD_IOVA) {
2186 "Session physical address unknown. Bad memory pool.");
2190 /* Set context descriptor physical address */
2191 session->cd_paddr = session_paddr +
2192 offsetof(struct qat_sym_session, cd);
2194 session->min_qat_dev_gen = QAT_GEN1;
2196 /* Get requested QAT command id - should be cipher */
2197 qat_cmd_id = qat_get_cmd_id(xform);
2198 if (qat_cmd_id != ICP_QAT_FW_LA_CMD_CIPHER) {
2199 QAT_LOG(ERR, "Unsupported xform chain requested");
2202 session->qat_cmd = (enum icp_qat_fw_la_cmd_id)qat_cmd_id;
2204 ret = qat_sym_session_configure_cipher(dev, xform, session);
2212 qat_security_session_create(void *dev,
2213 struct rte_security_session_conf *conf,
2214 struct rte_security_session *sess,
2215 struct rte_mempool *mempool)
2217 void *sess_private_data;
2218 struct rte_cryptodev *cdev = (struct rte_cryptodev *)dev;
2221 if (conf->action_type != RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL ||
2222 conf->protocol != RTE_SECURITY_PROTOCOL_DOCSIS) {
2223 QAT_LOG(ERR, "Invalid security protocol");
2227 if (rte_mempool_get(mempool, &sess_private_data)) {
2228 QAT_LOG(ERR, "Couldn't get object from session mempool");
2232 ret = qat_sec_session_set_docsis_parameters(cdev, conf,
2235 QAT_LOG(ERR, "Failed to configure session parameters");
2236 /* Return session to mempool */
2237 rte_mempool_put(mempool, sess_private_data);
2241 set_sec_session_private_data(sess, sess_private_data);
2247 qat_security_session_destroy(void *dev __rte_unused,
2248 struct rte_security_session *sess)
2250 void *sess_priv = get_sec_session_private_data(sess);
2251 struct qat_sym_session *s = (struct qat_sym_session *)sess_priv;
2255 bpi_cipher_ctx_free(s->bpi_ctx);
2256 memset(s, 0, qat_sym_session_get_private_size(dev));
2257 struct rte_mempool *sess_mp = rte_mempool_from_obj(sess_priv);
2259 set_sec_session_private_data(sess, NULL);
2260 rte_mempool_put(sess_mp, sess_priv);