1 /* SPDX-License-Identifier: (BSD-3-Clause OR GPL-2.0)
2 * Copyright(c) 2015-2018 Intel Corporation
5 #include <openssl/sha.h> /* Needed to calculate pre-compute values */
6 #include <openssl/aes.h> /* Needed to calculate pre-compute values */
7 #include <openssl/md5.h> /* Needed to calculate pre-compute values */
8 #include <openssl/evp.h> /* Needed for bpi runt block processing */
10 #include <rte_memcpy.h>
11 #include <rte_common.h>
12 #include <rte_spinlock.h>
13 #include <rte_byteorder.h>
15 #include <rte_malloc.h>
16 #include <rte_crypto_sym.h>
19 #include "qat_sym_session.h"
20 #include "qat_sym_pmd.h"
22 /** Frees a context previously created
23 * Depends on openssl libcrypto
26 bpi_cipher_ctx_free(void *bpi_ctx)
29 EVP_CIPHER_CTX_free((EVP_CIPHER_CTX *)bpi_ctx);
32 /** Creates a context in either AES or DES in ECB mode
33 * Depends on openssl libcrypto
36 bpi_cipher_ctx_init(enum rte_crypto_cipher_algorithm cryptodev_algo,
37 enum rte_crypto_cipher_operation direction __rte_unused,
38 uint8_t *key, void **ctx)
40 const EVP_CIPHER *algo = NULL;
42 *ctx = EVP_CIPHER_CTX_new();
49 if (cryptodev_algo == RTE_CRYPTO_CIPHER_DES_DOCSISBPI)
52 algo = EVP_aes_128_ecb();
54 /* IV will be ECB encrypted whether direction is encrypt or decrypt*/
55 if (EVP_EncryptInit_ex(*ctx, algo, NULL, key, 0) != 1) {
64 EVP_CIPHER_CTX_free(*ctx);
69 qat_is_cipher_alg_supported(enum rte_crypto_cipher_algorithm algo,
70 struct qat_sym_dev_private *internals)
73 const struct rte_cryptodev_capabilities *capability;
75 while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
76 RTE_CRYPTO_OP_TYPE_UNDEFINED) {
77 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
80 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_CIPHER)
83 if (capability->sym.cipher.algo == algo)
90 qat_is_auth_alg_supported(enum rte_crypto_auth_algorithm algo,
91 struct qat_sym_dev_private *internals)
94 const struct rte_cryptodev_capabilities *capability;
96 while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
97 RTE_CRYPTO_OP_TYPE_UNDEFINED) {
98 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
101 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_AUTH)
104 if (capability->sym.auth.algo == algo)
111 qat_sym_session_clear(struct rte_cryptodev *dev,
112 struct rte_cryptodev_sym_session *sess)
114 uint8_t index = dev->driver_id;
115 void *sess_priv = get_session_private_data(sess, index);
116 struct qat_sym_session *s = (struct qat_sym_session *)sess_priv;
120 bpi_cipher_ctx_free(s->bpi_ctx);
121 memset(s, 0, qat_sym_session_get_private_size(dev));
122 struct rte_mempool *sess_mp = rte_mempool_from_obj(sess_priv);
124 set_session_private_data(sess, index, NULL);
125 rte_mempool_put(sess_mp, sess_priv);
130 qat_get_cmd_id(const struct rte_crypto_sym_xform *xform)
133 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER && xform->next == NULL)
134 return ICP_QAT_FW_LA_CMD_CIPHER;
136 /* Authentication Only */
137 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH && xform->next == NULL)
138 return ICP_QAT_FW_LA_CMD_AUTH;
141 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
142 /* AES-GCM and AES-CCM works with different direction
143 * GCM first encrypts and generate hash where AES-CCM
144 * first generate hash and encrypts. Similar relation
145 * applies to decryption.
147 if (xform->aead.op == RTE_CRYPTO_AEAD_OP_ENCRYPT)
148 if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
149 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
151 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
153 if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
154 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
156 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
159 if (xform->next == NULL)
162 /* Cipher then Authenticate */
163 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
164 xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
165 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
167 /* Authenticate then Cipher */
168 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH &&
169 xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
170 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
175 static struct rte_crypto_auth_xform *
176 qat_get_auth_xform(struct rte_crypto_sym_xform *xform)
179 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH)
188 static struct rte_crypto_cipher_xform *
189 qat_get_cipher_xform(struct rte_crypto_sym_xform *xform)
192 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
193 return &xform->cipher;
202 qat_sym_session_configure_cipher(struct rte_cryptodev *dev,
203 struct rte_crypto_sym_xform *xform,
204 struct qat_sym_session *session)
206 struct qat_sym_dev_private *internals = dev->data->dev_private;
207 struct rte_crypto_cipher_xform *cipher_xform = NULL;
210 /* Get cipher xform from crypto xform chain */
211 cipher_xform = qat_get_cipher_xform(xform);
213 session->cipher_iv.offset = cipher_xform->iv.offset;
214 session->cipher_iv.length = cipher_xform->iv.length;
216 switch (cipher_xform->algo) {
217 case RTE_CRYPTO_CIPHER_AES_CBC:
218 if (qat_sym_validate_aes_key(cipher_xform->key.length,
219 &session->qat_cipher_alg) != 0) {
220 QAT_LOG(ERR, "Invalid AES cipher key size");
224 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
226 case RTE_CRYPTO_CIPHER_AES_CTR:
227 if (qat_sym_validate_aes_key(cipher_xform->key.length,
228 &session->qat_cipher_alg) != 0) {
229 QAT_LOG(ERR, "Invalid AES cipher key size");
233 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
235 case RTE_CRYPTO_CIPHER_SNOW3G_UEA2:
236 if (qat_sym_validate_snow3g_key(cipher_xform->key.length,
237 &session->qat_cipher_alg) != 0) {
238 QAT_LOG(ERR, "Invalid SNOW 3G cipher key size");
242 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
244 case RTE_CRYPTO_CIPHER_NULL:
245 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
247 case RTE_CRYPTO_CIPHER_KASUMI_F8:
248 if (qat_sym_validate_kasumi_key(cipher_xform->key.length,
249 &session->qat_cipher_alg) != 0) {
250 QAT_LOG(ERR, "Invalid KASUMI cipher key size");
254 session->qat_mode = ICP_QAT_HW_CIPHER_F8_MODE;
256 case RTE_CRYPTO_CIPHER_3DES_CBC:
257 if (qat_sym_validate_3des_key(cipher_xform->key.length,
258 &session->qat_cipher_alg) != 0) {
259 QAT_LOG(ERR, "Invalid 3DES cipher key size");
263 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
265 case RTE_CRYPTO_CIPHER_DES_CBC:
266 if (qat_sym_validate_des_key(cipher_xform->key.length,
267 &session->qat_cipher_alg) != 0) {
268 QAT_LOG(ERR, "Invalid DES cipher key size");
272 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
274 case RTE_CRYPTO_CIPHER_3DES_CTR:
275 if (qat_sym_validate_3des_key(cipher_xform->key.length,
276 &session->qat_cipher_alg) != 0) {
277 QAT_LOG(ERR, "Invalid 3DES cipher key size");
281 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
283 case RTE_CRYPTO_CIPHER_DES_DOCSISBPI:
284 ret = bpi_cipher_ctx_init(
287 cipher_xform->key.data,
290 QAT_LOG(ERR, "failed to create DES BPI ctx");
293 if (qat_sym_validate_des_key(cipher_xform->key.length,
294 &session->qat_cipher_alg) != 0) {
295 QAT_LOG(ERR, "Invalid DES cipher key size");
299 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
301 case RTE_CRYPTO_CIPHER_AES_DOCSISBPI:
302 ret = bpi_cipher_ctx_init(
305 cipher_xform->key.data,
308 QAT_LOG(ERR, "failed to create AES BPI ctx");
311 if (qat_sym_validate_aes_docsisbpi_key(cipher_xform->key.length,
312 &session->qat_cipher_alg) != 0) {
313 QAT_LOG(ERR, "Invalid AES DOCSISBPI key size");
317 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
319 case RTE_CRYPTO_CIPHER_ZUC_EEA3:
320 if (!qat_is_cipher_alg_supported(
321 cipher_xform->algo, internals)) {
322 QAT_LOG(ERR, "%s not supported on this device",
323 rte_crypto_cipher_algorithm_strings
324 [cipher_xform->algo]);
328 if (qat_sym_validate_zuc_key(cipher_xform->key.length,
329 &session->qat_cipher_alg) != 0) {
330 QAT_LOG(ERR, "Invalid ZUC cipher key size");
334 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
336 case RTE_CRYPTO_CIPHER_3DES_ECB:
337 case RTE_CRYPTO_CIPHER_AES_ECB:
338 case RTE_CRYPTO_CIPHER_AES_F8:
339 case RTE_CRYPTO_CIPHER_AES_XTS:
340 case RTE_CRYPTO_CIPHER_ARC4:
341 QAT_LOG(ERR, "Crypto QAT PMD: Unsupported Cipher alg %u",
346 QAT_LOG(ERR, "Crypto: Undefined Cipher specified %u\n",
352 if (cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
353 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
355 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
357 if (qat_sym_session_aead_create_cd_cipher(session,
358 cipher_xform->key.data,
359 cipher_xform->key.length)) {
367 if (session->bpi_ctx) {
368 bpi_cipher_ctx_free(session->bpi_ctx);
369 session->bpi_ctx = NULL;
375 qat_sym_session_configure(struct rte_cryptodev *dev,
376 struct rte_crypto_sym_xform *xform,
377 struct rte_cryptodev_sym_session *sess,
378 struct rte_mempool *mempool)
380 void *sess_private_data;
383 if (rte_mempool_get(mempool, &sess_private_data)) {
385 "Couldn't get object from session mempool");
389 ret = qat_sym_session_set_parameters(dev, xform, sess_private_data);
392 "Crypto QAT PMD: failed to configure session parameters");
394 /* Return session to mempool */
395 rte_mempool_put(mempool, sess_private_data);
399 set_session_private_data(sess, dev->driver_id,
406 qat_sym_session_set_parameters(struct rte_cryptodev *dev,
407 struct rte_crypto_sym_xform *xform, void *session_private)
409 struct qat_sym_session *session = session_private;
413 /* Set context descriptor physical address */
414 session->cd_paddr = rte_mempool_virt2iova(session) +
415 offsetof(struct qat_sym_session, cd);
417 session->min_qat_dev_gen = QAT_GEN1;
419 /* Get requested QAT command id */
420 qat_cmd_id = qat_get_cmd_id(xform);
421 if (qat_cmd_id < 0 || qat_cmd_id >= ICP_QAT_FW_LA_CMD_DELIMITER) {
422 QAT_LOG(ERR, "Unsupported xform chain requested");
425 session->qat_cmd = (enum icp_qat_fw_la_cmd_id)qat_cmd_id;
426 switch (session->qat_cmd) {
427 case ICP_QAT_FW_LA_CMD_CIPHER:
428 ret = qat_sym_session_configure_cipher(dev, xform, session);
432 case ICP_QAT_FW_LA_CMD_AUTH:
433 ret = qat_sym_session_configure_auth(dev, xform, session);
437 case ICP_QAT_FW_LA_CMD_CIPHER_HASH:
438 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
439 ret = qat_sym_session_configure_aead(xform,
444 ret = qat_sym_session_configure_cipher(dev,
448 ret = qat_sym_session_configure_auth(dev,
454 case ICP_QAT_FW_LA_CMD_HASH_CIPHER:
455 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
456 ret = qat_sym_session_configure_aead(xform,
461 ret = qat_sym_session_configure_auth(dev,
465 ret = qat_sym_session_configure_cipher(dev,
471 case ICP_QAT_FW_LA_CMD_TRNG_GET_RANDOM:
472 case ICP_QAT_FW_LA_CMD_TRNG_TEST:
473 case ICP_QAT_FW_LA_CMD_SSL3_KEY_DERIVE:
474 case ICP_QAT_FW_LA_CMD_TLS_V1_1_KEY_DERIVE:
475 case ICP_QAT_FW_LA_CMD_TLS_V1_2_KEY_DERIVE:
476 case ICP_QAT_FW_LA_CMD_MGF1:
477 case ICP_QAT_FW_LA_CMD_AUTH_PRE_COMP:
478 case ICP_QAT_FW_LA_CMD_CIPHER_PRE_COMP:
479 case ICP_QAT_FW_LA_CMD_DELIMITER:
480 QAT_LOG(ERR, "Unsupported Service %u",
484 QAT_LOG(ERR, "Unsupported Service %u",
493 qat_sym_session_configure_auth(struct rte_cryptodev *dev,
494 struct rte_crypto_sym_xform *xform,
495 struct qat_sym_session *session)
497 struct rte_crypto_auth_xform *auth_xform = qat_get_auth_xform(xform);
498 struct qat_sym_dev_private *internals = dev->data->dev_private;
499 uint8_t *key_data = auth_xform->key.data;
500 uint8_t key_length = auth_xform->key.length;
502 switch (auth_xform->algo) {
503 case RTE_CRYPTO_AUTH_SHA1_HMAC:
504 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA1;
506 case RTE_CRYPTO_AUTH_SHA224_HMAC:
507 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA224;
509 case RTE_CRYPTO_AUTH_SHA256_HMAC:
510 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA256;
512 case RTE_CRYPTO_AUTH_SHA384_HMAC:
513 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA384;
515 case RTE_CRYPTO_AUTH_SHA512_HMAC:
516 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA512;
518 case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
519 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
521 case RTE_CRYPTO_AUTH_AES_GMAC:
522 if (qat_sym_validate_aes_key(auth_xform->key.length,
523 &session->qat_cipher_alg) != 0) {
524 QAT_LOG(ERR, "Invalid AES key size");
527 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
528 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
531 case RTE_CRYPTO_AUTH_SNOW3G_UIA2:
532 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2;
534 case RTE_CRYPTO_AUTH_MD5_HMAC:
535 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_MD5;
537 case RTE_CRYPTO_AUTH_NULL:
538 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_NULL;
540 case RTE_CRYPTO_AUTH_KASUMI_F9:
541 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_KASUMI_F9;
543 case RTE_CRYPTO_AUTH_ZUC_EIA3:
544 if (!qat_is_auth_alg_supported(auth_xform->algo, internals)) {
545 QAT_LOG(ERR, "%s not supported on this device",
546 rte_crypto_auth_algorithm_strings
550 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3;
552 case RTE_CRYPTO_AUTH_SHA1:
553 case RTE_CRYPTO_AUTH_SHA256:
554 case RTE_CRYPTO_AUTH_SHA512:
555 case RTE_CRYPTO_AUTH_SHA224:
556 case RTE_CRYPTO_AUTH_SHA384:
557 case RTE_CRYPTO_AUTH_MD5:
558 case RTE_CRYPTO_AUTH_AES_CMAC:
559 case RTE_CRYPTO_AUTH_AES_CBC_MAC:
560 QAT_LOG(ERR, "Crypto: Unsupported hash alg %u",
564 QAT_LOG(ERR, "Crypto: Undefined Hash algo %u specified",
569 session->auth_iv.offset = auth_xform->iv.offset;
570 session->auth_iv.length = auth_xform->iv.length;
572 if (auth_xform->algo == RTE_CRYPTO_AUTH_AES_GMAC) {
573 if (auth_xform->op == RTE_CRYPTO_AUTH_OP_GENERATE) {
574 session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER_HASH;
575 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
577 * It needs to create cipher desc content first,
578 * then authentication
581 if (qat_sym_session_aead_create_cd_cipher(session,
582 auth_xform->key.data,
583 auth_xform->key.length))
586 if (qat_sym_session_aead_create_cd_auth(session,
590 auth_xform->digest_length,
594 session->qat_cmd = ICP_QAT_FW_LA_CMD_HASH_CIPHER;
595 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
597 * It needs to create authentication desc content first,
601 if (qat_sym_session_aead_create_cd_auth(session,
605 auth_xform->digest_length,
609 if (qat_sym_session_aead_create_cd_cipher(session,
610 auth_xform->key.data,
611 auth_xform->key.length))
614 /* Restore to authentication only only */
615 session->qat_cmd = ICP_QAT_FW_LA_CMD_AUTH;
617 if (qat_sym_session_aead_create_cd_auth(session,
621 auth_xform->digest_length,
626 session->digest_length = auth_xform->digest_length;
631 qat_sym_session_configure_aead(struct rte_crypto_sym_xform *xform,
632 struct qat_sym_session *session)
634 struct rte_crypto_aead_xform *aead_xform = &xform->aead;
635 enum rte_crypto_auth_operation crypto_operation;
638 * Store AEAD IV parameters as cipher IV,
639 * to avoid unnecessary memory usage
641 session->cipher_iv.offset = xform->aead.iv.offset;
642 session->cipher_iv.length = xform->aead.iv.length;
644 switch (aead_xform->algo) {
645 case RTE_CRYPTO_AEAD_AES_GCM:
646 if (qat_sym_validate_aes_key(aead_xform->key.length,
647 &session->qat_cipher_alg) != 0) {
648 QAT_LOG(ERR, "Invalid AES key size");
651 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
652 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
654 case RTE_CRYPTO_AEAD_AES_CCM:
655 if (qat_sym_validate_aes_key(aead_xform->key.length,
656 &session->qat_cipher_alg) != 0) {
657 QAT_LOG(ERR, "Invalid AES key size");
660 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
661 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC;
664 QAT_LOG(ERR, "Crypto: Undefined AEAD specified %u\n",
669 if ((aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT &&
670 aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) ||
671 (aead_xform->op == RTE_CRYPTO_AEAD_OP_DECRYPT &&
672 aead_xform->algo == RTE_CRYPTO_AEAD_AES_CCM)) {
673 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
675 * It needs to create cipher desc content first,
676 * then authentication
678 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
679 RTE_CRYPTO_AUTH_OP_GENERATE : RTE_CRYPTO_AUTH_OP_VERIFY;
681 if (qat_sym_session_aead_create_cd_cipher(session,
682 aead_xform->key.data,
683 aead_xform->key.length))
686 if (qat_sym_session_aead_create_cd_auth(session,
687 aead_xform->key.data,
688 aead_xform->key.length,
689 aead_xform->aad_length,
690 aead_xform->digest_length,
694 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
696 * It needs to create authentication desc content first,
700 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
701 RTE_CRYPTO_AUTH_OP_VERIFY : RTE_CRYPTO_AUTH_OP_GENERATE;
703 if (qat_sym_session_aead_create_cd_auth(session,
704 aead_xform->key.data,
705 aead_xform->key.length,
706 aead_xform->aad_length,
707 aead_xform->digest_length,
711 if (qat_sym_session_aead_create_cd_cipher(session,
712 aead_xform->key.data,
713 aead_xform->key.length))
717 session->digest_length = aead_xform->digest_length;
721 unsigned int qat_sym_session_get_private_size(
722 struct rte_cryptodev *dev __rte_unused)
724 return RTE_ALIGN_CEIL(sizeof(struct qat_sym_session), 8);
727 /* returns block size in bytes per cipher algo */
728 int qat_cipher_get_block_size(enum icp_qat_hw_cipher_algo qat_cipher_alg)
730 switch (qat_cipher_alg) {
731 case ICP_QAT_HW_CIPHER_ALGO_DES:
732 return ICP_QAT_HW_DES_BLK_SZ;
733 case ICP_QAT_HW_CIPHER_ALGO_3DES:
734 return ICP_QAT_HW_3DES_BLK_SZ;
735 case ICP_QAT_HW_CIPHER_ALGO_AES128:
736 case ICP_QAT_HW_CIPHER_ALGO_AES192:
737 case ICP_QAT_HW_CIPHER_ALGO_AES256:
738 return ICP_QAT_HW_AES_BLK_SZ;
740 QAT_LOG(ERR, "invalid block cipher alg %u", qat_cipher_alg);
747 * Returns size in bytes per hash algo for state1 size field in cd_ctrl
748 * This is digest size rounded up to nearest quadword
750 static int qat_hash_get_state1_size(enum icp_qat_hw_auth_algo qat_hash_alg)
752 switch (qat_hash_alg) {
753 case ICP_QAT_HW_AUTH_ALGO_SHA1:
754 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA1_STATE1_SZ,
755 QAT_HW_DEFAULT_ALIGNMENT);
756 case ICP_QAT_HW_AUTH_ALGO_SHA224:
757 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA224_STATE1_SZ,
758 QAT_HW_DEFAULT_ALIGNMENT);
759 case ICP_QAT_HW_AUTH_ALGO_SHA256:
760 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA256_STATE1_SZ,
761 QAT_HW_DEFAULT_ALIGNMENT);
762 case ICP_QAT_HW_AUTH_ALGO_SHA384:
763 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA384_STATE1_SZ,
764 QAT_HW_DEFAULT_ALIGNMENT);
765 case ICP_QAT_HW_AUTH_ALGO_SHA512:
766 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
767 QAT_HW_DEFAULT_ALIGNMENT);
768 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
769 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ,
770 QAT_HW_DEFAULT_ALIGNMENT);
771 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
772 case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
773 return QAT_HW_ROUND_UP(ICP_QAT_HW_GALOIS_128_STATE1_SZ,
774 QAT_HW_DEFAULT_ALIGNMENT);
775 case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
776 return QAT_HW_ROUND_UP(ICP_QAT_HW_ZUC_3G_EIA3_STATE1_SZ,
777 QAT_HW_DEFAULT_ALIGNMENT);
778 case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
779 return QAT_HW_ROUND_UP(ICP_QAT_HW_SNOW_3G_UIA2_STATE1_SZ,
780 QAT_HW_DEFAULT_ALIGNMENT);
781 case ICP_QAT_HW_AUTH_ALGO_MD5:
782 return QAT_HW_ROUND_UP(ICP_QAT_HW_MD5_STATE1_SZ,
783 QAT_HW_DEFAULT_ALIGNMENT);
784 case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
785 return QAT_HW_ROUND_UP(ICP_QAT_HW_KASUMI_F9_STATE1_SZ,
786 QAT_HW_DEFAULT_ALIGNMENT);
787 case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
788 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_CBC_MAC_STATE1_SZ,
789 QAT_HW_DEFAULT_ALIGNMENT);
790 case ICP_QAT_HW_AUTH_ALGO_NULL:
791 return QAT_HW_ROUND_UP(ICP_QAT_HW_NULL_STATE1_SZ,
792 QAT_HW_DEFAULT_ALIGNMENT);
793 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
794 /* return maximum state1 size in this case */
795 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
796 QAT_HW_DEFAULT_ALIGNMENT);
798 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
804 /* returns digest size in bytes per hash algo */
805 static int qat_hash_get_digest_size(enum icp_qat_hw_auth_algo qat_hash_alg)
807 switch (qat_hash_alg) {
808 case ICP_QAT_HW_AUTH_ALGO_SHA1:
809 return ICP_QAT_HW_SHA1_STATE1_SZ;
810 case ICP_QAT_HW_AUTH_ALGO_SHA224:
811 return ICP_QAT_HW_SHA224_STATE1_SZ;
812 case ICP_QAT_HW_AUTH_ALGO_SHA256:
813 return ICP_QAT_HW_SHA256_STATE1_SZ;
814 case ICP_QAT_HW_AUTH_ALGO_SHA384:
815 return ICP_QAT_HW_SHA384_STATE1_SZ;
816 case ICP_QAT_HW_AUTH_ALGO_SHA512:
817 return ICP_QAT_HW_SHA512_STATE1_SZ;
818 case ICP_QAT_HW_AUTH_ALGO_MD5:
819 return ICP_QAT_HW_MD5_STATE1_SZ;
820 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
821 /* return maximum digest size in this case */
822 return ICP_QAT_HW_SHA512_STATE1_SZ;
824 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
830 /* returns block size in byes per hash algo */
831 static int qat_hash_get_block_size(enum icp_qat_hw_auth_algo qat_hash_alg)
833 switch (qat_hash_alg) {
834 case ICP_QAT_HW_AUTH_ALGO_SHA1:
836 case ICP_QAT_HW_AUTH_ALGO_SHA224:
837 return SHA256_CBLOCK;
838 case ICP_QAT_HW_AUTH_ALGO_SHA256:
839 return SHA256_CBLOCK;
840 case ICP_QAT_HW_AUTH_ALGO_SHA384:
841 return SHA512_CBLOCK;
842 case ICP_QAT_HW_AUTH_ALGO_SHA512:
843 return SHA512_CBLOCK;
844 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
846 case ICP_QAT_HW_AUTH_ALGO_MD5:
848 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
849 /* return maximum block size in this case */
850 return SHA512_CBLOCK;
852 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
858 static int partial_hash_sha1(uint8_t *data_in, uint8_t *data_out)
862 if (!SHA1_Init(&ctx))
864 SHA1_Transform(&ctx, data_in);
865 rte_memcpy(data_out, &ctx, SHA_DIGEST_LENGTH);
869 static int partial_hash_sha224(uint8_t *data_in, uint8_t *data_out)
873 if (!SHA224_Init(&ctx))
875 SHA256_Transform(&ctx, data_in);
876 rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
880 static int partial_hash_sha256(uint8_t *data_in, uint8_t *data_out)
884 if (!SHA256_Init(&ctx))
886 SHA256_Transform(&ctx, data_in);
887 rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
891 static int partial_hash_sha384(uint8_t *data_in, uint8_t *data_out)
895 if (!SHA384_Init(&ctx))
897 SHA512_Transform(&ctx, data_in);
898 rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
902 static int partial_hash_sha512(uint8_t *data_in, uint8_t *data_out)
906 if (!SHA512_Init(&ctx))
908 SHA512_Transform(&ctx, data_in);
909 rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
913 static int partial_hash_md5(uint8_t *data_in, uint8_t *data_out)
919 MD5_Transform(&ctx, data_in);
920 rte_memcpy(data_out, &ctx, MD5_DIGEST_LENGTH);
925 static int partial_hash_compute(enum icp_qat_hw_auth_algo hash_alg,
930 uint8_t digest[qat_hash_get_digest_size(
931 ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
932 uint32_t *hash_state_out_be32;
933 uint64_t *hash_state_out_be64;
936 digest_size = qat_hash_get_digest_size(hash_alg);
937 if (digest_size <= 0)
940 hash_state_out_be32 = (uint32_t *)data_out;
941 hash_state_out_be64 = (uint64_t *)data_out;
944 case ICP_QAT_HW_AUTH_ALGO_SHA1:
945 if (partial_hash_sha1(data_in, digest))
947 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
948 *hash_state_out_be32 =
949 rte_bswap32(*(((uint32_t *)digest)+i));
951 case ICP_QAT_HW_AUTH_ALGO_SHA224:
952 if (partial_hash_sha224(data_in, digest))
954 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
955 *hash_state_out_be32 =
956 rte_bswap32(*(((uint32_t *)digest)+i));
958 case ICP_QAT_HW_AUTH_ALGO_SHA256:
959 if (partial_hash_sha256(data_in, digest))
961 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
962 *hash_state_out_be32 =
963 rte_bswap32(*(((uint32_t *)digest)+i));
965 case ICP_QAT_HW_AUTH_ALGO_SHA384:
966 if (partial_hash_sha384(data_in, digest))
968 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
969 *hash_state_out_be64 =
970 rte_bswap64(*(((uint64_t *)digest)+i));
972 case ICP_QAT_HW_AUTH_ALGO_SHA512:
973 if (partial_hash_sha512(data_in, digest))
975 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
976 *hash_state_out_be64 =
977 rte_bswap64(*(((uint64_t *)digest)+i));
979 case ICP_QAT_HW_AUTH_ALGO_MD5:
980 if (partial_hash_md5(data_in, data_out))
984 QAT_LOG(ERR, "invalid hash alg %u", hash_alg);
990 #define HMAC_IPAD_VALUE 0x36
991 #define HMAC_OPAD_VALUE 0x5c
992 #define HASH_XCBC_PRECOMP_KEY_NUM 3
994 static int qat_sym_do_precomputes(enum icp_qat_hw_auth_algo hash_alg,
995 const uint8_t *auth_key,
996 uint16_t auth_keylen,
997 uint8_t *p_state_buf,
998 uint16_t *p_state_len)
1001 uint8_t ipad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1002 uint8_t opad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1005 if (hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC) {
1006 static uint8_t qat_aes_xcbc_key_seed[
1007 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ] = {
1008 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1009 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1010 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1011 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1012 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1013 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1017 uint8_t *out = p_state_buf;
1021 in = rte_zmalloc("working mem for key",
1022 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ, 16);
1024 QAT_LOG(ERR, "Failed to alloc memory");
1028 rte_memcpy(in, qat_aes_xcbc_key_seed,
1029 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1030 for (x = 0; x < HASH_XCBC_PRECOMP_KEY_NUM; x++) {
1031 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1034 (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ));
1036 (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ),
1037 0, ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1040 AES_encrypt(in, out, &enc_key);
1041 in += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1042 out += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1044 *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1045 rte_free(in - x*ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ);
1047 } else if ((hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_128) ||
1048 (hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_64)) {
1050 uint8_t *out = p_state_buf;
1053 memset(p_state_buf, 0, ICP_QAT_HW_GALOIS_H_SZ +
1054 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1055 ICP_QAT_HW_GALOIS_E_CTR0_SZ);
1056 in = rte_zmalloc("working mem for key",
1057 ICP_QAT_HW_GALOIS_H_SZ, 16);
1059 QAT_LOG(ERR, "Failed to alloc memory");
1063 memset(in, 0, ICP_QAT_HW_GALOIS_H_SZ);
1064 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1068 AES_encrypt(in, out, &enc_key);
1069 *p_state_len = ICP_QAT_HW_GALOIS_H_SZ +
1070 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1071 ICP_QAT_HW_GALOIS_E_CTR0_SZ;
1076 block_size = qat_hash_get_block_size(hash_alg);
1077 if (block_size <= 0)
1079 /* init ipad and opad from key and xor with fixed values */
1080 memset(ipad, 0, block_size);
1081 memset(opad, 0, block_size);
1083 if (auth_keylen > (unsigned int)block_size) {
1084 QAT_LOG(ERR, "invalid keylen %u", auth_keylen);
1087 rte_memcpy(ipad, auth_key, auth_keylen);
1088 rte_memcpy(opad, auth_key, auth_keylen);
1090 for (i = 0; i < block_size; i++) {
1091 uint8_t *ipad_ptr = ipad + i;
1092 uint8_t *opad_ptr = opad + i;
1093 *ipad_ptr ^= HMAC_IPAD_VALUE;
1094 *opad_ptr ^= HMAC_OPAD_VALUE;
1097 /* do partial hash of ipad and copy to state1 */
1098 if (partial_hash_compute(hash_alg, ipad, p_state_buf)) {
1099 memset(ipad, 0, block_size);
1100 memset(opad, 0, block_size);
1101 QAT_LOG(ERR, "ipad precompute failed");
1106 * State len is a multiple of 8, so may be larger than the digest.
1107 * Put the partial hash of opad state_len bytes after state1
1109 *p_state_len = qat_hash_get_state1_size(hash_alg);
1110 if (partial_hash_compute(hash_alg, opad, p_state_buf + *p_state_len)) {
1111 memset(ipad, 0, block_size);
1112 memset(opad, 0, block_size);
1113 QAT_LOG(ERR, "opad precompute failed");
1117 /* don't leave data lying around */
1118 memset(ipad, 0, block_size);
1119 memset(opad, 0, block_size);
1124 qat_sym_session_init_common_hdr(struct icp_qat_fw_comn_req_hdr *header,
1125 enum qat_sym_proto_flag proto_flags)
1128 ICP_QAT_FW_COMN_HDR_FLAGS_BUILD(ICP_QAT_FW_COMN_REQ_FLAG_SET);
1129 header->service_type = ICP_QAT_FW_COMN_REQ_CPM_FW_LA;
1130 header->comn_req_flags =
1131 ICP_QAT_FW_COMN_FLAGS_BUILD(QAT_COMN_CD_FLD_TYPE_64BIT_ADR,
1132 QAT_COMN_PTR_TYPE_FLAT);
1133 ICP_QAT_FW_LA_PARTIAL_SET(header->serv_specif_flags,
1134 ICP_QAT_FW_LA_PARTIAL_NONE);
1135 ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(header->serv_specif_flags,
1136 ICP_QAT_FW_CIPH_IV_16BYTE_DATA);
1138 switch (proto_flags) {
1139 case QAT_CRYPTO_PROTO_FLAG_NONE:
1140 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1141 ICP_QAT_FW_LA_NO_PROTO);
1143 case QAT_CRYPTO_PROTO_FLAG_CCM:
1144 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1145 ICP_QAT_FW_LA_CCM_PROTO);
1147 case QAT_CRYPTO_PROTO_FLAG_GCM:
1148 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1149 ICP_QAT_FW_LA_GCM_PROTO);
1151 case QAT_CRYPTO_PROTO_FLAG_SNOW3G:
1152 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1153 ICP_QAT_FW_LA_SNOW_3G_PROTO);
1155 case QAT_CRYPTO_PROTO_FLAG_ZUC:
1156 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(header->serv_specif_flags,
1157 ICP_QAT_FW_LA_ZUC_3G_PROTO);
1161 ICP_QAT_FW_LA_UPDATE_STATE_SET(header->serv_specif_flags,
1162 ICP_QAT_FW_LA_NO_UPDATE_STATE);
1163 ICP_QAT_FW_LA_DIGEST_IN_BUFFER_SET(header->serv_specif_flags,
1164 ICP_QAT_FW_LA_NO_DIGEST_IN_BUFFER);
1168 * Snow3G and ZUC should never use this function
1169 * and set its protocol flag in both cipher and auth part of content
1170 * descriptor building function
1172 static enum qat_sym_proto_flag
1173 qat_get_crypto_proto_flag(uint16_t flags)
1175 int proto = ICP_QAT_FW_LA_PROTO_GET(flags);
1176 enum qat_sym_proto_flag qat_proto_flag =
1177 QAT_CRYPTO_PROTO_FLAG_NONE;
1180 case ICP_QAT_FW_LA_GCM_PROTO:
1181 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1183 case ICP_QAT_FW_LA_CCM_PROTO:
1184 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1188 return qat_proto_flag;
1191 int qat_sym_session_aead_create_cd_cipher(struct qat_sym_session *cdesc,
1193 uint32_t cipherkeylen)
1195 struct icp_qat_hw_cipher_algo_blk *cipher;
1196 struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1197 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1198 struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1199 void *ptr = &req_tmpl->cd_ctrl;
1200 struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1201 struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1202 enum icp_qat_hw_cipher_convert key_convert;
1203 enum qat_sym_proto_flag qat_proto_flag =
1204 QAT_CRYPTO_PROTO_FLAG_NONE;
1205 uint32_t total_key_size;
1206 uint16_t cipher_offset, cd_size;
1207 uint32_t wordIndex = 0;
1208 uint32_t *temp_key = NULL;
1210 if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER) {
1211 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1212 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1213 ICP_QAT_FW_SLICE_CIPHER);
1214 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1215 ICP_QAT_FW_SLICE_DRAM_WR);
1216 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1217 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1218 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1219 ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1220 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1221 } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1222 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1223 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1224 ICP_QAT_FW_SLICE_CIPHER);
1225 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1226 ICP_QAT_FW_SLICE_AUTH);
1227 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1228 ICP_QAT_FW_SLICE_AUTH);
1229 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1230 ICP_QAT_FW_SLICE_DRAM_WR);
1231 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1232 } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1233 QAT_LOG(ERR, "Invalid param, must be a cipher command.");
1237 if (cdesc->qat_mode == ICP_QAT_HW_CIPHER_CTR_MODE) {
1239 * CTR Streaming ciphers are a special case. Decrypt = encrypt
1240 * Overriding default values previously set
1242 cdesc->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
1243 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1244 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2
1245 || cdesc->qat_cipher_alg ==
1246 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)
1247 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1248 else if (cdesc->qat_dir == ICP_QAT_HW_CIPHER_ENCRYPT)
1249 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1251 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1253 if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
1254 total_key_size = ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ +
1255 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1256 cipher_cd_ctrl->cipher_state_sz =
1257 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1258 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1260 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1261 total_key_size = ICP_QAT_HW_KASUMI_F8_KEY_SZ;
1262 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_KASUMI_BLK_SZ >> 3;
1263 cipher_cd_ctrl->cipher_padding_sz =
1264 (2 * ICP_QAT_HW_KASUMI_BLK_SZ) >> 3;
1265 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES) {
1266 total_key_size = ICP_QAT_HW_3DES_KEY_SZ;
1267 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_3DES_BLK_SZ >> 3;
1269 qat_get_crypto_proto_flag(header->serv_specif_flags);
1270 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_DES) {
1271 total_key_size = ICP_QAT_HW_DES_KEY_SZ;
1272 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_DES_BLK_SZ >> 3;
1274 qat_get_crypto_proto_flag(header->serv_specif_flags);
1275 } else if (cdesc->qat_cipher_alg ==
1276 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
1277 total_key_size = ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ +
1278 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1279 cipher_cd_ctrl->cipher_state_sz =
1280 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1281 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1282 cdesc->min_qat_dev_gen = QAT_GEN2;
1284 total_key_size = cipherkeylen;
1285 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_AES_BLK_SZ >> 3;
1287 qat_get_crypto_proto_flag(header->serv_specif_flags);
1289 cipher_cd_ctrl->cipher_key_sz = total_key_size >> 3;
1290 cipher_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1291 cipher_cd_ctrl->cipher_cfg_offset = cipher_offset >> 3;
1293 header->service_cmd_id = cdesc->qat_cmd;
1294 qat_sym_session_init_common_hdr(header, qat_proto_flag);
1296 cipher = (struct icp_qat_hw_cipher_algo_blk *)cdesc->cd_cur_ptr;
1297 cipher->cipher_config.val =
1298 ICP_QAT_HW_CIPHER_CONFIG_BUILD(cdesc->qat_mode,
1299 cdesc->qat_cipher_alg, key_convert,
1302 if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1303 temp_key = (uint32_t *)(cdesc->cd_cur_ptr +
1304 sizeof(struct icp_qat_hw_cipher_config)
1306 memcpy(cipher->key, cipherkey, cipherkeylen);
1307 memcpy(temp_key, cipherkey, cipherkeylen);
1309 /* XOR Key with KASUMI F8 key modifier at 4 bytes level */
1310 for (wordIndex = 0; wordIndex < (cipherkeylen >> 2);
1312 temp_key[wordIndex] ^= KASUMI_F8_KEY_MODIFIER_4_BYTES;
1314 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1315 cipherkeylen + cipherkeylen;
1317 memcpy(cipher->key, cipherkey, cipherkeylen);
1318 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1322 if (total_key_size > cipherkeylen) {
1323 uint32_t padding_size = total_key_size-cipherkeylen;
1324 if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1325 && (cipherkeylen == QAT_3DES_KEY_SZ_OPT2))
1326 /* K3 not provided so use K1 = K3*/
1327 memcpy(cdesc->cd_cur_ptr, cipherkey, padding_size);
1329 memset(cdesc->cd_cur_ptr, 0, padding_size);
1330 cdesc->cd_cur_ptr += padding_size;
1332 cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1333 cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1338 int qat_sym_session_aead_create_cd_auth(struct qat_sym_session *cdesc,
1340 uint32_t authkeylen,
1341 uint32_t aad_length,
1342 uint32_t digestsize,
1343 unsigned int operation)
1345 struct icp_qat_hw_auth_setup *hash;
1346 struct icp_qat_hw_cipher_algo_blk *cipherconfig;
1347 struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1348 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1349 struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1350 void *ptr = &req_tmpl->cd_ctrl;
1351 struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1352 struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1353 struct icp_qat_fw_la_auth_req_params *auth_param =
1354 (struct icp_qat_fw_la_auth_req_params *)
1355 ((char *)&req_tmpl->serv_specif_rqpars +
1356 sizeof(struct icp_qat_fw_la_cipher_req_params));
1357 uint16_t state1_size = 0, state2_size = 0;
1358 uint16_t hash_offset, cd_size;
1359 uint32_t *aad_len = NULL;
1360 uint32_t wordIndex = 0;
1362 enum qat_sym_proto_flag qat_proto_flag =
1363 QAT_CRYPTO_PROTO_FLAG_NONE;
1365 if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_AUTH) {
1366 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1367 ICP_QAT_FW_SLICE_AUTH);
1368 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1369 ICP_QAT_FW_SLICE_DRAM_WR);
1370 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1371 } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1372 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1373 ICP_QAT_FW_SLICE_AUTH);
1374 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1375 ICP_QAT_FW_SLICE_CIPHER);
1376 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1377 ICP_QAT_FW_SLICE_CIPHER);
1378 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1379 ICP_QAT_FW_SLICE_DRAM_WR);
1380 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1381 } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1382 QAT_LOG(ERR, "Invalid param, must be a hash command.");
1386 if (operation == RTE_CRYPTO_AUTH_OP_VERIFY) {
1387 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1388 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1389 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1390 ICP_QAT_FW_LA_CMP_AUTH_RES);
1391 cdesc->auth_op = ICP_QAT_HW_AUTH_VERIFY;
1393 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1394 ICP_QAT_FW_LA_RET_AUTH_RES);
1395 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1396 ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1397 cdesc->auth_op = ICP_QAT_HW_AUTH_GENERATE;
1401 * Setup the inner hash config
1403 hash_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1404 hash = (struct icp_qat_hw_auth_setup *)cdesc->cd_cur_ptr;
1405 hash->auth_config.reserved = 0;
1406 hash->auth_config.config =
1407 ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE1,
1408 cdesc->qat_hash_alg, digestsize);
1410 if (cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2
1411 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_KASUMI_F9
1412 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3)
1413 hash->auth_counter.counter = 0;
1415 hash->auth_counter.counter = rte_bswap32(
1416 qat_hash_get_block_size(cdesc->qat_hash_alg));
1418 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_auth_setup);
1421 * cd_cur_ptr now points at the state1 information.
1423 switch (cdesc->qat_hash_alg) {
1424 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1425 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA1,
1426 authkey, authkeylen, cdesc->cd_cur_ptr, &state1_size)) {
1427 QAT_LOG(ERR, "(SHA)precompute failed");
1430 state2_size = RTE_ALIGN_CEIL(ICP_QAT_HW_SHA1_STATE2_SZ, 8);
1432 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1433 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA224,
1434 authkey, authkeylen, cdesc->cd_cur_ptr, &state1_size)) {
1435 QAT_LOG(ERR, "(SHA)precompute failed");
1438 state2_size = ICP_QAT_HW_SHA224_STATE2_SZ;
1440 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1441 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA256,
1442 authkey, authkeylen, cdesc->cd_cur_ptr, &state1_size)) {
1443 QAT_LOG(ERR, "(SHA)precompute failed");
1446 state2_size = ICP_QAT_HW_SHA256_STATE2_SZ;
1448 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1449 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA384,
1450 authkey, authkeylen, cdesc->cd_cur_ptr, &state1_size)) {
1451 QAT_LOG(ERR, "(SHA)precompute failed");
1454 state2_size = ICP_QAT_HW_SHA384_STATE2_SZ;
1456 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1457 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA512,
1458 authkey, authkeylen, cdesc->cd_cur_ptr, &state1_size)) {
1459 QAT_LOG(ERR, "(SHA)precompute failed");
1462 state2_size = ICP_QAT_HW_SHA512_STATE2_SZ;
1464 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1465 state1_size = ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1466 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC,
1467 authkey, authkeylen, cdesc->cd_cur_ptr + state1_size,
1469 QAT_LOG(ERR, "(XCBC)precompute failed");
1473 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1474 case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
1475 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1476 state1_size = ICP_QAT_HW_GALOIS_128_STATE1_SZ;
1477 if (qat_sym_do_precomputes(cdesc->qat_hash_alg,
1478 authkey, authkeylen, cdesc->cd_cur_ptr + state1_size,
1480 QAT_LOG(ERR, "(GCM)precompute failed");
1484 * Write (the length of AAD) into bytes 16-19 of state2
1485 * in big-endian format. This field is 8 bytes
1487 auth_param->u2.aad_sz =
1488 RTE_ALIGN_CEIL(aad_length, 16);
1489 auth_param->hash_state_sz = (auth_param->u2.aad_sz) >> 3;
1491 aad_len = (uint32_t *)(cdesc->cd_cur_ptr +
1492 ICP_QAT_HW_GALOIS_128_STATE1_SZ +
1493 ICP_QAT_HW_GALOIS_H_SZ);
1494 *aad_len = rte_bswap32(aad_length);
1495 cdesc->aad_len = aad_length;
1497 case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
1498 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1499 state1_size = qat_hash_get_state1_size(
1500 ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2);
1501 state2_size = ICP_QAT_HW_SNOW_3G_UIA2_STATE2_SZ;
1502 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1504 cipherconfig = (struct icp_qat_hw_cipher_algo_blk *)
1505 (cdesc->cd_cur_ptr + state1_size + state2_size);
1506 cipherconfig->cipher_config.val =
1507 ICP_QAT_HW_CIPHER_CONFIG_BUILD(ICP_QAT_HW_CIPHER_ECB_MODE,
1508 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2,
1509 ICP_QAT_HW_CIPHER_KEY_CONVERT,
1510 ICP_QAT_HW_CIPHER_ENCRYPT);
1511 memcpy(cipherconfig->key, authkey, authkeylen);
1512 memset(cipherconfig->key + authkeylen,
1513 0, ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ);
1514 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1515 authkeylen + ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1516 auth_param->hash_state_sz = ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1518 case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
1519 hash->auth_config.config =
1520 ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE0,
1521 cdesc->qat_hash_alg, digestsize);
1522 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1523 state1_size = qat_hash_get_state1_size(
1524 ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3);
1525 state2_size = ICP_QAT_HW_ZUC_3G_EIA3_STATE2_SZ;
1526 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size
1527 + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ);
1529 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1530 cdesc->cd_cur_ptr += state1_size + state2_size
1531 + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1532 auth_param->hash_state_sz = ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1533 cdesc->min_qat_dev_gen = QAT_GEN2;
1536 case ICP_QAT_HW_AUTH_ALGO_MD5:
1537 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_MD5,
1538 authkey, authkeylen, cdesc->cd_cur_ptr,
1540 QAT_LOG(ERR, "(MD5)precompute failed");
1543 state2_size = ICP_QAT_HW_MD5_STATE2_SZ;
1545 case ICP_QAT_HW_AUTH_ALGO_NULL:
1546 state1_size = qat_hash_get_state1_size(
1547 ICP_QAT_HW_AUTH_ALGO_NULL);
1548 state2_size = ICP_QAT_HW_NULL_STATE2_SZ;
1550 case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
1551 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1552 state1_size = qat_hash_get_state1_size(
1553 ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC);
1554 state2_size = ICP_QAT_HW_AES_CBC_MAC_KEY_SZ +
1555 ICP_QAT_HW_AES_CCM_CBC_E_CTR0_SZ;
1557 if (aad_length > 0) {
1558 aad_length += ICP_QAT_HW_CCM_AAD_B0_LEN +
1559 ICP_QAT_HW_CCM_AAD_LEN_INFO;
1560 auth_param->u2.aad_sz =
1561 RTE_ALIGN_CEIL(aad_length,
1562 ICP_QAT_HW_CCM_AAD_ALIGNMENT);
1564 auth_param->u2.aad_sz = ICP_QAT_HW_CCM_AAD_B0_LEN;
1566 cdesc->aad_len = aad_length;
1567 hash->auth_counter.counter = 0;
1569 hash_cd_ctrl->outer_prefix_sz = digestsize;
1570 auth_param->hash_state_sz = digestsize;
1572 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1574 case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
1575 state1_size = qat_hash_get_state1_size(
1576 ICP_QAT_HW_AUTH_ALGO_KASUMI_F9);
1577 state2_size = ICP_QAT_HW_KASUMI_F9_STATE2_SZ;
1578 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1579 pTempKey = (uint32_t *)(cdesc->cd_cur_ptr + state1_size
1582 * The Inner Hash Initial State2 block must contain IK
1583 * (Initialisation Key), followed by IK XOR-ed with KM
1584 * (Key Modifier): IK||(IK^KM).
1586 /* write the auth key */
1587 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1588 /* initialise temp key with auth key */
1589 memcpy(pTempKey, authkey, authkeylen);
1590 /* XOR Key with KASUMI F9 key modifier at 4 bytes level */
1591 for (wordIndex = 0; wordIndex < (authkeylen >> 2); wordIndex++)
1592 pTempKey[wordIndex] ^= KASUMI_F9_KEY_MODIFIER_4_BYTES;
1595 QAT_LOG(ERR, "Invalid HASH alg %u", cdesc->qat_hash_alg);
1599 /* Request template setup */
1600 qat_sym_session_init_common_hdr(header, qat_proto_flag);
1601 header->service_cmd_id = cdesc->qat_cmd;
1603 /* Auth CD config setup */
1604 hash_cd_ctrl->hash_cfg_offset = hash_offset >> 3;
1605 hash_cd_ctrl->hash_flags = ICP_QAT_FW_AUTH_HDR_FLAG_NO_NESTED;
1606 hash_cd_ctrl->inner_res_sz = digestsize;
1607 hash_cd_ctrl->final_sz = digestsize;
1608 hash_cd_ctrl->inner_state1_sz = state1_size;
1609 auth_param->auth_res_sz = digestsize;
1611 hash_cd_ctrl->inner_state2_sz = state2_size;
1612 hash_cd_ctrl->inner_state2_offset = hash_cd_ctrl->hash_cfg_offset +
1613 ((sizeof(struct icp_qat_hw_auth_setup) +
1614 RTE_ALIGN_CEIL(hash_cd_ctrl->inner_state1_sz, 8))
1617 cdesc->cd_cur_ptr += state1_size + state2_size;
1618 cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1620 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1621 cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1626 int qat_sym_validate_aes_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1629 case ICP_QAT_HW_AES_128_KEY_SZ:
1630 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1632 case ICP_QAT_HW_AES_192_KEY_SZ:
1633 *alg = ICP_QAT_HW_CIPHER_ALGO_AES192;
1635 case ICP_QAT_HW_AES_256_KEY_SZ:
1636 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
1644 int qat_sym_validate_aes_docsisbpi_key(int key_len,
1645 enum icp_qat_hw_cipher_algo *alg)
1648 case ICP_QAT_HW_AES_128_KEY_SZ:
1649 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1657 int qat_sym_validate_snow3g_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1660 case ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ:
1661 *alg = ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2;
1669 int qat_sym_validate_kasumi_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1672 case ICP_QAT_HW_KASUMI_KEY_SZ:
1673 *alg = ICP_QAT_HW_CIPHER_ALGO_KASUMI;
1681 int qat_sym_validate_des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1684 case ICP_QAT_HW_DES_KEY_SZ:
1685 *alg = ICP_QAT_HW_CIPHER_ALGO_DES;
1693 int qat_sym_validate_3des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1696 case QAT_3DES_KEY_SZ_OPT1:
1697 case QAT_3DES_KEY_SZ_OPT2:
1698 *alg = ICP_QAT_HW_CIPHER_ALGO_3DES;
1706 int qat_sym_validate_zuc_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1709 case ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ:
1710 *alg = ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3;