1 /* SPDX-License-Identifier: (BSD-3-Clause OR GPL-2.0)
2 * Copyright(c) 2015-2022 Intel Corporation
5 #define OPENSSL_API_COMPAT 0x10100000L
7 #include <openssl/sha.h> /* Needed to calculate pre-compute values */
8 #include <openssl/aes.h> /* Needed to calculate pre-compute values */
9 #include <openssl/md5.h> /* Needed to calculate pre-compute values */
10 #include <openssl/evp.h> /* Needed for bpi runt block processing */
12 #include <rte_memcpy.h>
13 #include <rte_common.h>
14 #include <rte_spinlock.h>
15 #include <rte_byteorder.h>
17 #include <rte_malloc.h>
18 #include <rte_crypto_sym.h>
19 #ifdef RTE_LIB_SECURITY
20 #include <rte_security.h>
24 #include "qat_sym_session.h"
27 /* SHA1 - 20 bytes - Initialiser state can be found in FIPS stds 180-2 */
28 static const uint8_t sha1InitialState[] = {
29 0x67, 0x45, 0x23, 0x01, 0xef, 0xcd, 0xab, 0x89, 0x98, 0xba,
30 0xdc, 0xfe, 0x10, 0x32, 0x54, 0x76, 0xc3, 0xd2, 0xe1, 0xf0};
32 /* SHA 224 - 32 bytes - Initialiser state can be found in FIPS stds 180-2 */
33 static const uint8_t sha224InitialState[] = {
34 0xc1, 0x05, 0x9e, 0xd8, 0x36, 0x7c, 0xd5, 0x07, 0x30, 0x70, 0xdd,
35 0x17, 0xf7, 0x0e, 0x59, 0x39, 0xff, 0xc0, 0x0b, 0x31, 0x68, 0x58,
36 0x15, 0x11, 0x64, 0xf9, 0x8f, 0xa7, 0xbe, 0xfa, 0x4f, 0xa4};
38 /* SHA 256 - 32 bytes - Initialiser state can be found in FIPS stds 180-2 */
39 static const uint8_t sha256InitialState[] = {
40 0x6a, 0x09, 0xe6, 0x67, 0xbb, 0x67, 0xae, 0x85, 0x3c, 0x6e, 0xf3,
41 0x72, 0xa5, 0x4f, 0xf5, 0x3a, 0x51, 0x0e, 0x52, 0x7f, 0x9b, 0x05,
42 0x68, 0x8c, 0x1f, 0x83, 0xd9, 0xab, 0x5b, 0xe0, 0xcd, 0x19};
44 /* SHA 384 - 64 bytes - Initialiser state can be found in FIPS stds 180-2 */
45 static const uint8_t sha384InitialState[] = {
46 0xcb, 0xbb, 0x9d, 0x5d, 0xc1, 0x05, 0x9e, 0xd8, 0x62, 0x9a, 0x29,
47 0x2a, 0x36, 0x7c, 0xd5, 0x07, 0x91, 0x59, 0x01, 0x5a, 0x30, 0x70,
48 0xdd, 0x17, 0x15, 0x2f, 0xec, 0xd8, 0xf7, 0x0e, 0x59, 0x39, 0x67,
49 0x33, 0x26, 0x67, 0xff, 0xc0, 0x0b, 0x31, 0x8e, 0xb4, 0x4a, 0x87,
50 0x68, 0x58, 0x15, 0x11, 0xdb, 0x0c, 0x2e, 0x0d, 0x64, 0xf9, 0x8f,
51 0xa7, 0x47, 0xb5, 0x48, 0x1d, 0xbe, 0xfa, 0x4f, 0xa4};
53 /* SHA 512 - 64 bytes - Initialiser state can be found in FIPS stds 180-2 */
54 static const uint8_t sha512InitialState[] = {
55 0x6a, 0x09, 0xe6, 0x67, 0xf3, 0xbc, 0xc9, 0x08, 0xbb, 0x67, 0xae,
56 0x85, 0x84, 0xca, 0xa7, 0x3b, 0x3c, 0x6e, 0xf3, 0x72, 0xfe, 0x94,
57 0xf8, 0x2b, 0xa5, 0x4f, 0xf5, 0x3a, 0x5f, 0x1d, 0x36, 0xf1, 0x51,
58 0x0e, 0x52, 0x7f, 0xad, 0xe6, 0x82, 0xd1, 0x9b, 0x05, 0x68, 0x8c,
59 0x2b, 0x3e, 0x6c, 0x1f, 0x1f, 0x83, 0xd9, 0xab, 0xfb, 0x41, 0xbd,
60 0x6b, 0x5b, 0xe0, 0xcd, 0x19, 0x13, 0x7e, 0x21, 0x79};
63 qat_sym_cd_cipher_set(struct qat_sym_session *cd,
64 const uint8_t *enckey,
68 qat_sym_cd_auth_set(struct qat_sym_session *cdesc,
69 const uint8_t *authkey,
73 unsigned int operation);
75 qat_sym_session_init_common_hdr(struct qat_sym_session *session);
77 /* Req/cd init functions */
80 qat_sym_session_finalize(struct qat_sym_session *session)
82 qat_sym_session_init_common_hdr(session);
85 /** Frees a context previously created
86 * Depends on openssl libcrypto
89 bpi_cipher_ctx_free(void *bpi_ctx)
92 EVP_CIPHER_CTX_free((EVP_CIPHER_CTX *)bpi_ctx);
95 /** Creates a context in either AES or DES in ECB mode
96 * Depends on openssl libcrypto
99 bpi_cipher_ctx_init(enum rte_crypto_cipher_algorithm cryptodev_algo,
100 enum rte_crypto_cipher_operation direction __rte_unused,
101 const uint8_t *key, uint16_t key_length, void **ctx)
103 const EVP_CIPHER *algo = NULL;
105 *ctx = EVP_CIPHER_CTX_new();
112 if (cryptodev_algo == RTE_CRYPTO_CIPHER_DES_DOCSISBPI)
113 algo = EVP_des_ecb();
115 if (key_length == ICP_QAT_HW_AES_128_KEY_SZ)
116 algo = EVP_aes_128_ecb();
118 algo = EVP_aes_256_ecb();
120 /* IV will be ECB encrypted whether direction is encrypt or decrypt*/
121 if (EVP_EncryptInit_ex(*ctx, algo, NULL, key, 0) != 1) {
130 EVP_CIPHER_CTX_free(*ctx);
135 qat_is_cipher_alg_supported(enum rte_crypto_cipher_algorithm algo,
136 struct qat_cryptodev_private *internals)
139 const struct rte_cryptodev_capabilities *capability;
141 while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
142 RTE_CRYPTO_OP_TYPE_UNDEFINED) {
143 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
146 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_CIPHER)
149 if (capability->sym.cipher.algo == algo)
156 qat_is_auth_alg_supported(enum rte_crypto_auth_algorithm algo,
157 struct qat_cryptodev_private *internals)
160 const struct rte_cryptodev_capabilities *capability;
162 while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
163 RTE_CRYPTO_OP_TYPE_UNDEFINED) {
164 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
167 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_AUTH)
170 if (capability->sym.auth.algo == algo)
177 qat_sym_session_clear(struct rte_cryptodev *dev,
178 struct rte_cryptodev_sym_session *sess)
180 uint8_t index = dev->driver_id;
181 void *sess_priv = get_sym_session_private_data(sess, index);
182 struct qat_sym_session *s = (struct qat_sym_session *)sess_priv;
186 bpi_cipher_ctx_free(s->bpi_ctx);
187 memset(s, 0, qat_sym_session_get_private_size(dev));
188 struct rte_mempool *sess_mp = rte_mempool_from_obj(sess_priv);
190 set_sym_session_private_data(sess, index, NULL);
191 rte_mempool_put(sess_mp, sess_priv);
196 qat_get_cmd_id(const struct rte_crypto_sym_xform *xform)
199 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER && xform->next == NULL)
200 return ICP_QAT_FW_LA_CMD_CIPHER;
202 /* Authentication Only */
203 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH && xform->next == NULL)
204 return ICP_QAT_FW_LA_CMD_AUTH;
207 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
208 /* AES-GCM and AES-CCM works with different direction
209 * GCM first encrypts and generate hash where AES-CCM
210 * first generate hash and encrypts. Similar relation
211 * applies to decryption.
213 if (xform->aead.op == RTE_CRYPTO_AEAD_OP_ENCRYPT)
214 if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
215 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
217 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
219 if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
220 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
222 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
225 if (xform->next == NULL)
228 /* Cipher then Authenticate */
229 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
230 xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
231 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
233 /* Authenticate then Cipher */
234 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH &&
235 xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
236 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
241 static struct rte_crypto_auth_xform *
242 qat_get_auth_xform(struct rte_crypto_sym_xform *xform)
245 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH)
254 static struct rte_crypto_cipher_xform *
255 qat_get_cipher_xform(struct rte_crypto_sym_xform *xform)
258 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
259 return &xform->cipher;
268 qat_sym_session_configure_cipher(struct rte_cryptodev *dev,
269 struct rte_crypto_sym_xform *xform,
270 struct qat_sym_session *session)
272 struct qat_cryptodev_private *internals = dev->data->dev_private;
273 struct rte_crypto_cipher_xform *cipher_xform = NULL;
274 enum qat_device_gen qat_dev_gen =
275 internals->qat_dev->qat_dev_gen;
278 /* Get cipher xform from crypto xform chain */
279 cipher_xform = qat_get_cipher_xform(xform);
281 session->cipher_iv.offset = cipher_xform->iv.offset;
282 session->cipher_iv.length = cipher_xform->iv.length;
284 switch (cipher_xform->algo) {
285 case RTE_CRYPTO_CIPHER_AES_CBC:
286 if (qat_sym_validate_aes_key(cipher_xform->key.length,
287 &session->qat_cipher_alg) != 0) {
288 QAT_LOG(ERR, "Invalid AES cipher key size");
292 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
294 case RTE_CRYPTO_CIPHER_AES_CTR:
295 if (qat_sym_validate_aes_key(cipher_xform->key.length,
296 &session->qat_cipher_alg) != 0) {
297 QAT_LOG(ERR, "Invalid AES cipher key size");
301 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
302 if (qat_dev_gen == QAT_GEN4)
305 case RTE_CRYPTO_CIPHER_SNOW3G_UEA2:
306 if (qat_sym_validate_snow3g_key(cipher_xform->key.length,
307 &session->qat_cipher_alg) != 0) {
308 QAT_LOG(ERR, "Invalid SNOW 3G cipher key size");
312 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
314 case RTE_CRYPTO_CIPHER_NULL:
315 session->qat_cipher_alg = ICP_QAT_HW_CIPHER_ALGO_NULL;
316 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
318 case RTE_CRYPTO_CIPHER_KASUMI_F8:
319 if (qat_sym_validate_kasumi_key(cipher_xform->key.length,
320 &session->qat_cipher_alg) != 0) {
321 QAT_LOG(ERR, "Invalid KASUMI cipher key size");
325 session->qat_mode = ICP_QAT_HW_CIPHER_F8_MODE;
327 case RTE_CRYPTO_CIPHER_3DES_CBC:
328 if (qat_sym_validate_3des_key(cipher_xform->key.length,
329 &session->qat_cipher_alg) != 0) {
330 QAT_LOG(ERR, "Invalid 3DES cipher key size");
334 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
336 case RTE_CRYPTO_CIPHER_DES_CBC:
337 if (qat_sym_validate_des_key(cipher_xform->key.length,
338 &session->qat_cipher_alg) != 0) {
339 QAT_LOG(ERR, "Invalid DES cipher key size");
343 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
345 case RTE_CRYPTO_CIPHER_3DES_CTR:
346 if (qat_sym_validate_3des_key(cipher_xform->key.length,
347 &session->qat_cipher_alg) != 0) {
348 QAT_LOG(ERR, "Invalid 3DES cipher key size");
352 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
354 case RTE_CRYPTO_CIPHER_DES_DOCSISBPI:
355 ret = bpi_cipher_ctx_init(
358 cipher_xform->key.data,
359 cipher_xform->key.length,
362 QAT_LOG(ERR, "failed to create DES BPI ctx");
365 if (qat_sym_validate_des_key(cipher_xform->key.length,
366 &session->qat_cipher_alg) != 0) {
367 QAT_LOG(ERR, "Invalid DES cipher key size");
371 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
373 case RTE_CRYPTO_CIPHER_AES_DOCSISBPI:
374 ret = bpi_cipher_ctx_init(
377 cipher_xform->key.data,
378 cipher_xform->key.length,
381 QAT_LOG(ERR, "failed to create AES BPI ctx");
384 if (qat_sym_validate_aes_docsisbpi_key(cipher_xform->key.length,
385 &session->qat_cipher_alg) != 0) {
386 QAT_LOG(ERR, "Invalid AES DOCSISBPI key size");
390 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
392 case RTE_CRYPTO_CIPHER_ZUC_EEA3:
393 if (!qat_is_cipher_alg_supported(
394 cipher_xform->algo, internals)) {
395 QAT_LOG(ERR, "%s not supported on this device",
396 rte_crypto_cipher_algorithm_strings
397 [cipher_xform->algo]);
401 if (qat_sym_validate_zuc_key(cipher_xform->key.length,
402 &session->qat_cipher_alg) != 0) {
403 QAT_LOG(ERR, "Invalid ZUC cipher key size");
407 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
409 case RTE_CRYPTO_CIPHER_AES_XTS:
410 if ((cipher_xform->key.length/2) == ICP_QAT_HW_AES_192_KEY_SZ) {
411 QAT_LOG(ERR, "AES-XTS-192 not supported");
415 if (qat_sym_validate_aes_key((cipher_xform->key.length/2),
416 &session->qat_cipher_alg) != 0) {
417 QAT_LOG(ERR, "Invalid AES-XTS cipher key size");
421 session->qat_mode = ICP_QAT_HW_CIPHER_XTS_MODE;
423 case RTE_CRYPTO_CIPHER_3DES_ECB:
424 case RTE_CRYPTO_CIPHER_AES_ECB:
425 case RTE_CRYPTO_CIPHER_AES_F8:
426 case RTE_CRYPTO_CIPHER_ARC4:
427 QAT_LOG(ERR, "Crypto QAT PMD: Unsupported Cipher alg %u",
432 QAT_LOG(ERR, "Crypto: Undefined Cipher specified %u\n",
438 if (cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
439 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
441 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
443 if (qat_sym_cd_cipher_set(session,
444 cipher_xform->key.data,
445 cipher_xform->key.length)) {
453 if (session->bpi_ctx) {
454 bpi_cipher_ctx_free(session->bpi_ctx);
455 session->bpi_ctx = NULL;
461 qat_sym_session_configure(struct rte_cryptodev *dev,
462 struct rte_crypto_sym_xform *xform,
463 struct rte_cryptodev_sym_session *sess,
464 struct rte_mempool *mempool)
466 void *sess_private_data;
469 if (rte_mempool_get(mempool, &sess_private_data)) {
471 "Couldn't get object from session mempool");
475 ret = qat_sym_session_set_parameters(dev, xform, sess_private_data);
478 "Crypto QAT PMD: failed to configure session parameters");
480 /* Return session to mempool */
481 rte_mempool_put(mempool, sess_private_data);
485 set_sym_session_private_data(sess, dev->driver_id,
492 qat_sym_session_set_parameters(struct rte_cryptodev *dev,
493 struct rte_crypto_sym_xform *xform, void *session_private)
495 struct qat_sym_session *session = session_private;
496 struct qat_cryptodev_private *internals = dev->data->dev_private;
497 enum qat_device_gen qat_dev_gen = internals->qat_dev->qat_dev_gen;
501 /* Verify the session physical address is known */
502 rte_iova_t session_paddr = rte_mempool_virt2iova(session);
503 if (session_paddr == 0 || session_paddr == RTE_BAD_IOVA) {
505 "Session physical address unknown. Bad memory pool.");
509 memset(session, 0, sizeof(*session));
510 /* Set context descriptor physical address */
511 session->cd_paddr = session_paddr +
512 offsetof(struct qat_sym_session, cd);
514 session->dev_id = internals->dev_id;
515 session->qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_NONE;
518 /* Get requested QAT command id */
519 qat_cmd_id = qat_get_cmd_id(xform);
520 if (qat_cmd_id < 0 || qat_cmd_id >= ICP_QAT_FW_LA_CMD_DELIMITER) {
521 QAT_LOG(ERR, "Unsupported xform chain requested");
524 session->qat_cmd = (enum icp_qat_fw_la_cmd_id)qat_cmd_id;
525 switch (session->qat_cmd) {
526 case ICP_QAT_FW_LA_CMD_CIPHER:
527 ret = qat_sym_session_configure_cipher(dev, xform, session);
531 case ICP_QAT_FW_LA_CMD_AUTH:
532 ret = qat_sym_session_configure_auth(dev, xform, session);
535 session->is_single_pass_gmac =
536 qat_dev_gen == QAT_GEN3 &&
537 xform->auth.algo == RTE_CRYPTO_AUTH_AES_GMAC &&
538 xform->auth.iv.length == QAT_AES_GCM_SPC_IV_SIZE;
540 case ICP_QAT_FW_LA_CMD_CIPHER_HASH:
541 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
542 ret = qat_sym_session_configure_aead(dev, xform,
547 ret = qat_sym_session_configure_cipher(dev,
551 ret = qat_sym_session_configure_auth(dev,
557 case ICP_QAT_FW_LA_CMD_HASH_CIPHER:
558 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
559 ret = qat_sym_session_configure_aead(dev, xform,
564 ret = qat_sym_session_configure_auth(dev,
568 ret = qat_sym_session_configure_cipher(dev,
574 case ICP_QAT_FW_LA_CMD_TRNG_GET_RANDOM:
575 case ICP_QAT_FW_LA_CMD_TRNG_TEST:
576 case ICP_QAT_FW_LA_CMD_SSL3_KEY_DERIVE:
577 case ICP_QAT_FW_LA_CMD_TLS_V1_1_KEY_DERIVE:
578 case ICP_QAT_FW_LA_CMD_TLS_V1_2_KEY_DERIVE:
579 case ICP_QAT_FW_LA_CMD_MGF1:
580 case ICP_QAT_FW_LA_CMD_AUTH_PRE_COMP:
581 case ICP_QAT_FW_LA_CMD_CIPHER_PRE_COMP:
582 case ICP_QAT_FW_LA_CMD_DELIMITER:
583 QAT_LOG(ERR, "Unsupported Service %u",
587 QAT_LOG(ERR, "Unsupported Service %u",
591 qat_sym_session_finalize(session);
593 return qat_sym_gen_dev_ops[qat_dev_gen].set_session((void *)dev,
598 qat_sym_session_handle_single_pass(struct qat_sym_session *session,
599 const struct rte_crypto_aead_xform *aead_xform)
601 session->is_single_pass = 1;
602 session->is_auth = 1;
603 session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER;
604 /* Chacha-Poly is special case that use QAT CTR mode */
605 if (aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM)
606 session->qat_mode = ICP_QAT_HW_CIPHER_AEAD_MODE;
608 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
610 session->cipher_iv.offset = aead_xform->iv.offset;
611 session->cipher_iv.length = aead_xform->iv.length;
612 session->aad_len = aead_xform->aad_length;
613 session->digest_length = aead_xform->digest_length;
615 if (aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) {
616 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
617 session->auth_op = ICP_QAT_HW_AUTH_GENERATE;
619 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
620 session->auth_op = ICP_QAT_HW_AUTH_VERIFY;
627 qat_sym_session_configure_auth(struct rte_cryptodev *dev,
628 struct rte_crypto_sym_xform *xform,
629 struct qat_sym_session *session)
631 struct rte_crypto_auth_xform *auth_xform = qat_get_auth_xform(xform);
632 struct qat_cryptodev_private *internals = dev->data->dev_private;
633 const uint8_t *key_data = auth_xform->key.data;
634 uint8_t key_length = auth_xform->key.length;
635 enum qat_device_gen qat_dev_gen =
636 internals->qat_dev->qat_dev_gen;
638 session->aes_cmac = 0;
639 session->auth_key_length = auth_xform->key.length;
640 session->auth_iv.offset = auth_xform->iv.offset;
641 session->auth_iv.length = auth_xform->iv.length;
642 session->auth_mode = ICP_QAT_HW_AUTH_MODE1;
643 session->is_auth = 1;
644 session->digest_length = auth_xform->digest_length;
646 switch (auth_xform->algo) {
647 case RTE_CRYPTO_AUTH_SHA1:
648 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA1;
649 session->auth_mode = ICP_QAT_HW_AUTH_MODE0;
651 case RTE_CRYPTO_AUTH_SHA224:
652 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA224;
653 session->auth_mode = ICP_QAT_HW_AUTH_MODE0;
655 case RTE_CRYPTO_AUTH_SHA256:
656 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA256;
657 session->auth_mode = ICP_QAT_HW_AUTH_MODE0;
659 case RTE_CRYPTO_AUTH_SHA384:
660 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA384;
661 session->auth_mode = ICP_QAT_HW_AUTH_MODE0;
663 case RTE_CRYPTO_AUTH_SHA512:
664 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA512;
665 session->auth_mode = ICP_QAT_HW_AUTH_MODE0;
667 case RTE_CRYPTO_AUTH_SHA1_HMAC:
668 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA1;
670 case RTE_CRYPTO_AUTH_SHA224_HMAC:
671 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA224;
673 case RTE_CRYPTO_AUTH_SHA256_HMAC:
674 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA256;
676 case RTE_CRYPTO_AUTH_SHA384_HMAC:
677 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA384;
679 case RTE_CRYPTO_AUTH_SHA512_HMAC:
680 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA512;
682 case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
683 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
685 case RTE_CRYPTO_AUTH_AES_CMAC:
686 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
687 session->aes_cmac = 1;
689 case RTE_CRYPTO_AUTH_AES_GMAC:
690 if (qat_sym_validate_aes_key(auth_xform->key.length,
691 &session->qat_cipher_alg) != 0) {
692 QAT_LOG(ERR, "Invalid AES key size");
695 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
696 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
697 if (session->auth_iv.length == 0)
698 session->auth_iv.length = AES_GCM_J0_LEN;
700 session->is_iv12B = 1;
701 if (qat_dev_gen == QAT_GEN4) {
702 session->is_cnt_zero = 1;
706 case RTE_CRYPTO_AUTH_SNOW3G_UIA2:
707 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2;
709 case RTE_CRYPTO_AUTH_MD5_HMAC:
710 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_MD5;
712 case RTE_CRYPTO_AUTH_NULL:
713 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_NULL;
715 case RTE_CRYPTO_AUTH_KASUMI_F9:
716 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_KASUMI_F9;
718 case RTE_CRYPTO_AUTH_ZUC_EIA3:
719 if (!qat_is_auth_alg_supported(auth_xform->algo, internals)) {
720 QAT_LOG(ERR, "%s not supported on this device",
721 rte_crypto_auth_algorithm_strings
725 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3;
727 case RTE_CRYPTO_AUTH_MD5:
728 case RTE_CRYPTO_AUTH_AES_CBC_MAC:
729 QAT_LOG(ERR, "Crypto: Unsupported hash alg %u",
733 QAT_LOG(ERR, "Crypto: Undefined Hash algo %u specified",
738 if (auth_xform->algo == RTE_CRYPTO_AUTH_AES_GMAC) {
739 session->is_gmac = 1;
740 if (auth_xform->op == RTE_CRYPTO_AUTH_OP_GENERATE) {
741 session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER_HASH;
742 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
744 * It needs to create cipher desc content first,
745 * then authentication
747 if (qat_sym_cd_cipher_set(session,
748 auth_xform->key.data,
749 auth_xform->key.length))
752 if (qat_sym_cd_auth_set(session,
756 auth_xform->digest_length,
760 session->qat_cmd = ICP_QAT_FW_LA_CMD_HASH_CIPHER;
761 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
763 * It needs to create authentication desc content first,
767 if (qat_sym_cd_auth_set(session,
771 auth_xform->digest_length,
775 if (qat_sym_cd_cipher_set(session,
776 auth_xform->key.data,
777 auth_xform->key.length))
781 if (qat_sym_cd_auth_set(session,
785 auth_xform->digest_length,
794 qat_sym_session_configure_aead(struct rte_cryptodev *dev,
795 struct rte_crypto_sym_xform *xform,
796 struct qat_sym_session *session)
798 struct rte_crypto_aead_xform *aead_xform = &xform->aead;
799 enum rte_crypto_auth_operation crypto_operation;
800 struct qat_cryptodev_private *internals =
801 dev->data->dev_private;
802 enum qat_device_gen qat_dev_gen =
803 internals->qat_dev->qat_dev_gen;
806 * Store AEAD IV parameters as cipher IV,
807 * to avoid unnecessary memory usage
809 session->cipher_iv.offset = xform->aead.iv.offset;
810 session->cipher_iv.length = xform->aead.iv.length;
812 session->auth_mode = ICP_QAT_HW_AUTH_MODE1;
813 session->is_auth = 1;
814 session->digest_length = aead_xform->digest_length;
816 session->is_single_pass = 0;
817 switch (aead_xform->algo) {
818 case RTE_CRYPTO_AEAD_AES_GCM:
819 if (qat_sym_validate_aes_key(aead_xform->key.length,
820 &session->qat_cipher_alg) != 0) {
821 QAT_LOG(ERR, "Invalid AES key size");
824 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
825 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
827 if (qat_dev_gen == QAT_GEN4)
829 if (session->cipher_iv.length == 0) {
830 session->cipher_iv.length = AES_GCM_J0_LEN;
833 session->is_iv12B = 1;
834 if (qat_dev_gen < QAT_GEN3)
836 qat_sym_session_handle_single_pass(session,
839 case RTE_CRYPTO_AEAD_AES_CCM:
840 if (qat_sym_validate_aes_key(aead_xform->key.length,
841 &session->qat_cipher_alg) != 0) {
842 QAT_LOG(ERR, "Invalid AES key size");
845 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
846 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC;
847 if (qat_dev_gen == QAT_GEN4)
850 case RTE_CRYPTO_AEAD_CHACHA20_POLY1305:
851 if (aead_xform->key.length != ICP_QAT_HW_CHACHAPOLY_KEY_SZ)
853 if (qat_dev_gen == QAT_GEN4)
855 session->qat_cipher_alg =
856 ICP_QAT_HW_CIPHER_ALGO_CHACHA20_POLY1305;
857 qat_sym_session_handle_single_pass(session,
861 QAT_LOG(ERR, "Crypto: Undefined AEAD specified %u\n",
866 if (session->is_single_pass) {
867 if (qat_sym_cd_cipher_set(session,
868 aead_xform->key.data, aead_xform->key.length))
870 } else if ((aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT &&
871 aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) ||
872 (aead_xform->op == RTE_CRYPTO_AEAD_OP_DECRYPT &&
873 aead_xform->algo == RTE_CRYPTO_AEAD_AES_CCM)) {
874 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
876 * It needs to create cipher desc content first,
877 * then authentication
879 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
880 RTE_CRYPTO_AUTH_OP_GENERATE : RTE_CRYPTO_AUTH_OP_VERIFY;
882 if (qat_sym_cd_cipher_set(session,
883 aead_xform->key.data,
884 aead_xform->key.length))
887 if (qat_sym_cd_auth_set(session,
888 aead_xform->key.data,
889 aead_xform->key.length,
890 aead_xform->aad_length,
891 aead_xform->digest_length,
895 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
897 * It needs to create authentication desc content first,
901 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
902 RTE_CRYPTO_AUTH_OP_VERIFY : RTE_CRYPTO_AUTH_OP_GENERATE;
904 if (qat_sym_cd_auth_set(session,
905 aead_xform->key.data,
906 aead_xform->key.length,
907 aead_xform->aad_length,
908 aead_xform->digest_length,
912 if (qat_sym_cd_cipher_set(session,
913 aead_xform->key.data,
914 aead_xform->key.length))
921 unsigned int qat_sym_session_get_private_size(
922 struct rte_cryptodev *dev __rte_unused)
924 return RTE_ALIGN_CEIL(sizeof(struct qat_sym_session), 8);
927 /* returns block size in bytes per cipher algo */
928 int qat_cipher_get_block_size(enum icp_qat_hw_cipher_algo qat_cipher_alg)
930 switch (qat_cipher_alg) {
931 case ICP_QAT_HW_CIPHER_ALGO_DES:
932 return ICP_QAT_HW_DES_BLK_SZ;
933 case ICP_QAT_HW_CIPHER_ALGO_3DES:
934 return ICP_QAT_HW_3DES_BLK_SZ;
935 case ICP_QAT_HW_CIPHER_ALGO_AES128:
936 case ICP_QAT_HW_CIPHER_ALGO_AES192:
937 case ICP_QAT_HW_CIPHER_ALGO_AES256:
938 return ICP_QAT_HW_AES_BLK_SZ;
940 QAT_LOG(ERR, "invalid block cipher alg %u", qat_cipher_alg);
947 * Returns size in bytes per hash algo for state1 size field in cd_ctrl
948 * This is digest size rounded up to nearest quadword
950 static int qat_hash_get_state1_size(enum icp_qat_hw_auth_algo qat_hash_alg)
952 switch (qat_hash_alg) {
953 case ICP_QAT_HW_AUTH_ALGO_SHA1:
954 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA1_STATE1_SZ,
955 QAT_HW_DEFAULT_ALIGNMENT);
956 case ICP_QAT_HW_AUTH_ALGO_SHA224:
957 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA224_STATE1_SZ,
958 QAT_HW_DEFAULT_ALIGNMENT);
959 case ICP_QAT_HW_AUTH_ALGO_SHA256:
960 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA256_STATE1_SZ,
961 QAT_HW_DEFAULT_ALIGNMENT);
962 case ICP_QAT_HW_AUTH_ALGO_SHA384:
963 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA384_STATE1_SZ,
964 QAT_HW_DEFAULT_ALIGNMENT);
965 case ICP_QAT_HW_AUTH_ALGO_SHA512:
966 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
967 QAT_HW_DEFAULT_ALIGNMENT);
968 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
969 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ,
970 QAT_HW_DEFAULT_ALIGNMENT);
971 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
972 case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
973 return QAT_HW_ROUND_UP(ICP_QAT_HW_GALOIS_128_STATE1_SZ,
974 QAT_HW_DEFAULT_ALIGNMENT);
975 case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
976 return QAT_HW_ROUND_UP(ICP_QAT_HW_ZUC_3G_EIA3_STATE1_SZ,
977 QAT_HW_DEFAULT_ALIGNMENT);
978 case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
979 return QAT_HW_ROUND_UP(ICP_QAT_HW_SNOW_3G_UIA2_STATE1_SZ,
980 QAT_HW_DEFAULT_ALIGNMENT);
981 case ICP_QAT_HW_AUTH_ALGO_MD5:
982 return QAT_HW_ROUND_UP(ICP_QAT_HW_MD5_STATE1_SZ,
983 QAT_HW_DEFAULT_ALIGNMENT);
984 case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
985 return QAT_HW_ROUND_UP(ICP_QAT_HW_KASUMI_F9_STATE1_SZ,
986 QAT_HW_DEFAULT_ALIGNMENT);
987 case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
988 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_CBC_MAC_STATE1_SZ,
989 QAT_HW_DEFAULT_ALIGNMENT);
990 case ICP_QAT_HW_AUTH_ALGO_NULL:
991 return QAT_HW_ROUND_UP(ICP_QAT_HW_NULL_STATE1_SZ,
992 QAT_HW_DEFAULT_ALIGNMENT);
993 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
994 /* return maximum state1 size in this case */
995 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
996 QAT_HW_DEFAULT_ALIGNMENT);
998 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
1004 /* returns digest size in bytes per hash algo */
1005 static int qat_hash_get_digest_size(enum icp_qat_hw_auth_algo qat_hash_alg)
1007 switch (qat_hash_alg) {
1008 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1009 return ICP_QAT_HW_SHA1_STATE1_SZ;
1010 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1011 return ICP_QAT_HW_SHA224_STATE1_SZ;
1012 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1013 return ICP_QAT_HW_SHA256_STATE1_SZ;
1014 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1015 return ICP_QAT_HW_SHA384_STATE1_SZ;
1016 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1017 return ICP_QAT_HW_SHA512_STATE1_SZ;
1018 case ICP_QAT_HW_AUTH_ALGO_MD5:
1019 return ICP_QAT_HW_MD5_STATE1_SZ;
1020 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1021 return ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1022 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
1023 /* return maximum digest size in this case */
1024 return ICP_QAT_HW_SHA512_STATE1_SZ;
1026 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
1032 /* returns block size in byes per hash algo */
1033 static int qat_hash_get_block_size(enum icp_qat_hw_auth_algo qat_hash_alg)
1035 switch (qat_hash_alg) {
1036 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1038 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1039 return SHA256_CBLOCK;
1040 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1041 return SHA256_CBLOCK;
1042 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1043 return SHA512_CBLOCK;
1044 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1045 return SHA512_CBLOCK;
1046 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1048 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1049 return ICP_QAT_HW_AES_BLK_SZ;
1050 case ICP_QAT_HW_AUTH_ALGO_MD5:
1052 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
1053 /* return maximum block size in this case */
1054 return SHA512_CBLOCK;
1056 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
1062 static int partial_hash_sha1(uint8_t *data_in, uint8_t *data_out)
1066 if (!SHA1_Init(&ctx))
1068 SHA1_Transform(&ctx, data_in);
1069 rte_memcpy(data_out, &ctx, SHA_DIGEST_LENGTH);
1073 static int partial_hash_sha224(uint8_t *data_in, uint8_t *data_out)
1077 if (!SHA224_Init(&ctx))
1079 SHA256_Transform(&ctx, data_in);
1080 rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
1084 static int partial_hash_sha256(uint8_t *data_in, uint8_t *data_out)
1088 if (!SHA256_Init(&ctx))
1090 SHA256_Transform(&ctx, data_in);
1091 rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
1095 static int partial_hash_sha384(uint8_t *data_in, uint8_t *data_out)
1099 if (!SHA384_Init(&ctx))
1101 SHA512_Transform(&ctx, data_in);
1102 rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
1106 static int partial_hash_sha512(uint8_t *data_in, uint8_t *data_out)
1110 if (!SHA512_Init(&ctx))
1112 SHA512_Transform(&ctx, data_in);
1113 rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
1117 static int partial_hash_md5(uint8_t *data_in, uint8_t *data_out)
1121 if (!MD5_Init(&ctx))
1123 MD5_Transform(&ctx, data_in);
1124 rte_memcpy(data_out, &ctx, MD5_DIGEST_LENGTH);
1130 partial_hash_compute(enum icp_qat_hw_auth_algo hash_alg,
1131 uint8_t *data_in, uint8_t *data_out)
1134 uint8_t digest[qat_hash_get_digest_size(
1135 ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1136 uint32_t *hash_state_out_be32;
1137 uint64_t *hash_state_out_be64;
1140 /* Initialize to avoid gcc warning */
1141 memset(digest, 0, sizeof(digest));
1143 digest_size = qat_hash_get_digest_size(hash_alg);
1144 if (digest_size <= 0)
1147 hash_state_out_be32 = (uint32_t *)data_out;
1148 hash_state_out_be64 = (uint64_t *)data_out;
1151 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1152 if (partial_hash_sha1(data_in, digest))
1154 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1155 *hash_state_out_be32 =
1156 rte_bswap32(*(((uint32_t *)digest)+i));
1158 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1159 if (partial_hash_sha224(data_in, digest))
1161 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1162 *hash_state_out_be32 =
1163 rte_bswap32(*(((uint32_t *)digest)+i));
1165 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1166 if (partial_hash_sha256(data_in, digest))
1168 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1169 *hash_state_out_be32 =
1170 rte_bswap32(*(((uint32_t *)digest)+i));
1172 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1173 if (partial_hash_sha384(data_in, digest))
1175 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
1176 *hash_state_out_be64 =
1177 rte_bswap64(*(((uint64_t *)digest)+i));
1179 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1180 if (partial_hash_sha512(data_in, digest))
1182 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
1183 *hash_state_out_be64 =
1184 rte_bswap64(*(((uint64_t *)digest)+i));
1186 case ICP_QAT_HW_AUTH_ALGO_MD5:
1187 if (partial_hash_md5(data_in, data_out))
1191 QAT_LOG(ERR, "invalid hash alg %u", hash_alg);
1197 #define HMAC_IPAD_VALUE 0x36
1198 #define HMAC_OPAD_VALUE 0x5c
1199 #define HASH_XCBC_PRECOMP_KEY_NUM 3
1201 static const uint8_t AES_CMAC_SEED[ICP_QAT_HW_AES_128_KEY_SZ];
1203 static void aes_cmac_key_derive(uint8_t *base, uint8_t *derived)
1207 derived[0] = base[0] << 1;
1208 for (i = 1; i < ICP_QAT_HW_AES_BLK_SZ ; i++) {
1209 derived[i] = base[i] << 1;
1210 derived[i - 1] |= base[i] >> 7;
1214 derived[ICP_QAT_HW_AES_BLK_SZ - 1] ^= QAT_AES_CMAC_CONST_RB;
1217 static int qat_sym_do_precomputes(enum icp_qat_hw_auth_algo hash_alg,
1218 const uint8_t *auth_key,
1219 uint16_t auth_keylen,
1220 uint8_t *p_state_buf,
1221 uint16_t *p_state_len,
1225 uint8_t ipad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1226 uint8_t opad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1229 if (hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC) {
1235 uint8_t k0[ICP_QAT_HW_AES_128_KEY_SZ];
1238 auth_keylen = ICP_QAT_HW_AES_128_KEY_SZ;
1240 in = rte_zmalloc("AES CMAC K1",
1241 ICP_QAT_HW_AES_128_KEY_SZ, 16);
1244 QAT_LOG(ERR, "Failed to alloc memory");
1248 rte_memcpy(in, AES_CMAC_SEED,
1249 ICP_QAT_HW_AES_128_KEY_SZ);
1250 rte_memcpy(p_state_buf, auth_key, auth_keylen);
1252 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1258 AES_encrypt(in, k0, &enc_key);
1260 k1 = p_state_buf + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1261 k2 = k1 + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1263 aes_cmac_key_derive(k0, k1);
1264 aes_cmac_key_derive(k1, k2);
1266 memset(k0, 0, ICP_QAT_HW_AES_128_KEY_SZ);
1267 *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1271 static uint8_t qat_aes_xcbc_key_seed[
1272 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ] = {
1273 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1274 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1275 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1276 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1277 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1278 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1282 uint8_t *out = p_state_buf;
1286 in = rte_zmalloc("working mem for key",
1287 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ, 16);
1289 QAT_LOG(ERR, "Failed to alloc memory");
1293 rte_memcpy(in, qat_aes_xcbc_key_seed,
1294 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1295 for (x = 0; x < HASH_XCBC_PRECOMP_KEY_NUM; x++) {
1296 if (AES_set_encrypt_key(auth_key,
1300 (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ));
1302 (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ),
1303 0, ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1306 AES_encrypt(in, out, &enc_key);
1307 in += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1308 out += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1310 *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1311 rte_free(in - x*ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ);
1315 } else if ((hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_128) ||
1316 (hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_64)) {
1318 uint8_t *out = p_state_buf;
1321 memset(p_state_buf, 0, ICP_QAT_HW_GALOIS_H_SZ +
1322 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1323 ICP_QAT_HW_GALOIS_E_CTR0_SZ);
1324 in = rte_zmalloc("working mem for key",
1325 ICP_QAT_HW_GALOIS_H_SZ, 16);
1327 QAT_LOG(ERR, "Failed to alloc memory");
1331 memset(in, 0, ICP_QAT_HW_GALOIS_H_SZ);
1332 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1336 AES_encrypt(in, out, &enc_key);
1337 *p_state_len = ICP_QAT_HW_GALOIS_H_SZ +
1338 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1339 ICP_QAT_HW_GALOIS_E_CTR0_SZ;
1344 block_size = qat_hash_get_block_size(hash_alg);
1347 /* init ipad and opad from key and xor with fixed values */
1348 memset(ipad, 0, block_size);
1349 memset(opad, 0, block_size);
1351 if (auth_keylen > (unsigned int)block_size) {
1352 QAT_LOG(ERR, "invalid keylen %u", auth_keylen);
1355 rte_memcpy(ipad, auth_key, auth_keylen);
1356 rte_memcpy(opad, auth_key, auth_keylen);
1358 for (i = 0; i < block_size; i++) {
1359 uint8_t *ipad_ptr = ipad + i;
1360 uint8_t *opad_ptr = opad + i;
1361 *ipad_ptr ^= HMAC_IPAD_VALUE;
1362 *opad_ptr ^= HMAC_OPAD_VALUE;
1365 /* do partial hash of ipad and copy to state1 */
1366 if (partial_hash_compute(hash_alg, ipad, p_state_buf)) {
1367 memset(ipad, 0, block_size);
1368 memset(opad, 0, block_size);
1369 QAT_LOG(ERR, "ipad precompute failed");
1374 * State len is a multiple of 8, so may be larger than the digest.
1375 * Put the partial hash of opad state_len bytes after state1
1377 *p_state_len = qat_hash_get_state1_size(hash_alg);
1378 if (partial_hash_compute(hash_alg, opad, p_state_buf + *p_state_len)) {
1379 memset(ipad, 0, block_size);
1380 memset(opad, 0, block_size);
1381 QAT_LOG(ERR, "opad precompute failed");
1385 /* don't leave data lying around */
1386 memset(ipad, 0, block_size);
1387 memset(opad, 0, block_size);
1392 qat_sym_session_init_common_hdr(struct qat_sym_session *session)
1394 struct icp_qat_fw_la_bulk_req *req_tmpl = &session->fw_req;
1395 struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1396 enum qat_sym_proto_flag proto_flags = session->qat_proto_flag;
1397 uint32_t slice_flags = session->slice_types;
1400 ICP_QAT_FW_COMN_HDR_FLAGS_BUILD(ICP_QAT_FW_COMN_REQ_FLAG_SET);
1401 header->service_type = ICP_QAT_FW_COMN_REQ_CPM_FW_LA;
1402 header->service_cmd_id = session->qat_cmd;
1403 header->comn_req_flags =
1404 ICP_QAT_FW_COMN_FLAGS_BUILD(QAT_COMN_CD_FLD_TYPE_64BIT_ADR,
1405 QAT_COMN_PTR_TYPE_FLAT);
1406 ICP_QAT_FW_LA_PARTIAL_SET(header->serv_specif_flags,
1407 ICP_QAT_FW_LA_PARTIAL_NONE);
1408 ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(header->serv_specif_flags,
1409 ICP_QAT_FW_CIPH_IV_16BYTE_DATA);
1411 switch (proto_flags) {
1412 case QAT_CRYPTO_PROTO_FLAG_NONE:
1413 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1414 ICP_QAT_FW_LA_NO_PROTO);
1416 case QAT_CRYPTO_PROTO_FLAG_CCM:
1417 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1418 ICP_QAT_FW_LA_CCM_PROTO);
1420 case QAT_CRYPTO_PROTO_FLAG_GCM:
1421 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1422 ICP_QAT_FW_LA_GCM_PROTO);
1424 case QAT_CRYPTO_PROTO_FLAG_SNOW3G:
1425 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1426 ICP_QAT_FW_LA_SNOW_3G_PROTO);
1428 case QAT_CRYPTO_PROTO_FLAG_ZUC:
1429 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(header->serv_specif_flags,
1430 ICP_QAT_FW_LA_ZUC_3G_PROTO);
1434 /* More than one of the following flags can be set at once */
1435 if (QAT_SESSION_IS_SLICE_SET(slice_flags, QAT_CRYPTO_SLICE_SPC)) {
1436 ICP_QAT_FW_LA_SINGLE_PASS_PROTO_FLAG_SET(
1437 header->serv_specif_flags,
1438 ICP_QAT_FW_LA_SINGLE_PASS_PROTO);
1440 if (QAT_SESSION_IS_SLICE_SET(slice_flags, QAT_CRYPTO_SLICE_UCS)) {
1441 ICP_QAT_FW_LA_SLICE_TYPE_SET(
1442 header->serv_specif_flags,
1443 ICP_QAT_FW_LA_USE_UCS_SLICE_TYPE);
1446 if (session->is_auth) {
1447 if (session->auth_op == ICP_QAT_HW_AUTH_VERIFY) {
1448 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1449 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1450 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1451 ICP_QAT_FW_LA_CMP_AUTH_RES);
1452 } else if (session->auth_op == ICP_QAT_HW_AUTH_GENERATE) {
1453 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1454 ICP_QAT_FW_LA_RET_AUTH_RES);
1455 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1456 ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1459 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1460 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1461 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1462 ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1465 if (session->is_iv12B) {
1466 ICP_QAT_FW_LA_GCM_IV_LEN_FLAG_SET(
1467 header->serv_specif_flags,
1468 ICP_QAT_FW_LA_GCM_IV_LEN_12_OCTETS);
1471 ICP_QAT_FW_LA_UPDATE_STATE_SET(header->serv_specif_flags,
1472 ICP_QAT_FW_LA_NO_UPDATE_STATE);
1473 ICP_QAT_FW_LA_DIGEST_IN_BUFFER_SET(header->serv_specif_flags,
1474 ICP_QAT_FW_LA_NO_DIGEST_IN_BUFFER);
1477 int qat_sym_cd_cipher_set(struct qat_sym_session *cdesc,
1478 const uint8_t *cipherkey,
1479 uint32_t cipherkeylen)
1481 struct icp_qat_hw_cipher_algo_blk *cipher;
1482 struct icp_qat_hw_cipher_algo_blk20 *cipher20;
1483 struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1484 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1485 struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1486 void *ptr = &req_tmpl->cd_ctrl;
1487 struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1488 struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1489 enum icp_qat_hw_cipher_convert key_convert;
1490 struct icp_qat_fw_la_cipher_20_req_params *req_ucs =
1491 (struct icp_qat_fw_la_cipher_20_req_params *)
1492 &cdesc->fw_req.serv_specif_rqpars;
1493 struct icp_qat_fw_la_cipher_req_params *req_cipher =
1494 (struct icp_qat_fw_la_cipher_req_params *)
1495 &cdesc->fw_req.serv_specif_rqpars;
1496 uint32_t total_key_size;
1497 uint16_t cipher_offset, cd_size;
1498 uint32_t wordIndex = 0;
1499 uint32_t *temp_key = NULL;
1501 if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER) {
1502 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1503 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1504 ICP_QAT_FW_SLICE_CIPHER);
1505 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1506 ICP_QAT_FW_SLICE_DRAM_WR);
1507 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1508 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1509 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1510 ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1511 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1512 } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1513 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1514 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1515 ICP_QAT_FW_SLICE_CIPHER);
1516 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1517 ICP_QAT_FW_SLICE_AUTH);
1518 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1519 ICP_QAT_FW_SLICE_AUTH);
1520 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1521 ICP_QAT_FW_SLICE_DRAM_WR);
1522 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1523 } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1524 QAT_LOG(ERR, "Invalid param, must be a cipher command.");
1528 if (cdesc->qat_mode == ICP_QAT_HW_CIPHER_CTR_MODE) {
1530 * CTR Streaming ciphers are a special case. Decrypt = encrypt
1531 * Overriding default values previously set.
1532 * Chacha20-Poly1305 is special case, CTR but single-pass
1533 * so both direction need to be used.
1535 cdesc->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
1536 if (cdesc->qat_cipher_alg ==
1537 ICP_QAT_HW_CIPHER_ALGO_CHACHA20_POLY1305 &&
1538 cdesc->auth_op == ICP_QAT_HW_AUTH_VERIFY) {
1539 cdesc->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
1541 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1542 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2
1543 || cdesc->qat_cipher_alg ==
1544 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)
1545 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1546 else if (cdesc->qat_dir == ICP_QAT_HW_CIPHER_ENCRYPT)
1547 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1548 else if (cdesc->qat_mode == ICP_QAT_HW_CIPHER_AEAD_MODE)
1549 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1551 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1553 if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
1554 total_key_size = ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ +
1555 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1556 cipher_cd_ctrl->cipher_state_sz =
1557 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1558 cdesc->qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1560 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1561 total_key_size = ICP_QAT_HW_KASUMI_F8_KEY_SZ;
1562 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_KASUMI_BLK_SZ >> 3;
1563 cipher_cd_ctrl->cipher_padding_sz =
1564 (2 * ICP_QAT_HW_KASUMI_BLK_SZ) >> 3;
1565 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES) {
1566 total_key_size = ICP_QAT_HW_3DES_KEY_SZ;
1567 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_3DES_BLK_SZ >> 3;
1568 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_DES) {
1569 total_key_size = ICP_QAT_HW_DES_KEY_SZ;
1570 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_DES_BLK_SZ >> 3;
1571 } else if (cdesc->qat_cipher_alg ==
1572 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
1573 total_key_size = ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ +
1574 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1575 cipher_cd_ctrl->cipher_state_sz =
1576 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1577 cdesc->qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1579 total_key_size = cipherkeylen;
1580 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_AES_BLK_SZ >> 3;
1582 cipher_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1583 cipher_cd_ctrl->cipher_cfg_offset = cipher_offset >> 3;
1585 cipher = (struct icp_qat_hw_cipher_algo_blk *)cdesc->cd_cur_ptr;
1586 cipher20 = (struct icp_qat_hw_cipher_algo_blk20 *)cdesc->cd_cur_ptr;
1587 cipher->cipher_config.val =
1588 ICP_QAT_HW_CIPHER_CONFIG_BUILD(cdesc->qat_mode,
1589 cdesc->qat_cipher_alg, key_convert,
1592 if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1593 temp_key = (uint32_t *)(cdesc->cd_cur_ptr +
1594 sizeof(struct icp_qat_hw_cipher_config)
1596 memcpy(cipher->key, cipherkey, cipherkeylen);
1597 memcpy(temp_key, cipherkey, cipherkeylen);
1599 /* XOR Key with KASUMI F8 key modifier at 4 bytes level */
1600 for (wordIndex = 0; wordIndex < (cipherkeylen >> 2);
1602 temp_key[wordIndex] ^= KASUMI_F8_KEY_MODIFIER_4_BYTES;
1604 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1605 cipherkeylen + cipherkeylen;
1606 } else if (cdesc->is_ucs) {
1607 const uint8_t *final_key = cipherkey;
1609 cdesc->slice_types |= QAT_CRYPTO_SLICE_UCS;
1610 total_key_size = RTE_ALIGN_CEIL(cipherkeylen,
1611 ICP_QAT_HW_AES_128_KEY_SZ);
1612 cipher20->cipher_config.reserved[0] = 0;
1613 cipher20->cipher_config.reserved[1] = 0;
1614 cipher20->cipher_config.reserved[2] = 0;
1616 rte_memcpy(cipher20->key, final_key, cipherkeylen);
1617 cdesc->cd_cur_ptr +=
1618 sizeof(struct icp_qat_hw_ucs_cipher_config) +
1621 memcpy(cipher->key, cipherkey, cipherkeylen);
1622 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1626 if (cdesc->is_single_pass) {
1627 QAT_FIELD_SET(cipher->cipher_config.val,
1628 cdesc->digest_length,
1629 QAT_CIPHER_AEAD_HASH_CMP_LEN_BITPOS,
1630 QAT_CIPHER_AEAD_HASH_CMP_LEN_MASK);
1631 /* UCS and SPC 1.8/2.0 share configuration of 2nd config word */
1632 cdesc->cd.cipher.cipher_config.reserved =
1633 ICP_QAT_HW_CIPHER_CONFIG_BUILD_UPPER(
1635 cdesc->slice_types |= QAT_CRYPTO_SLICE_SPC;
1638 if (total_key_size > cipherkeylen) {
1639 uint32_t padding_size = total_key_size-cipherkeylen;
1640 if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1641 && (cipherkeylen == QAT_3DES_KEY_SZ_OPT2)) {
1642 /* K3 not provided so use K1 = K3*/
1643 memcpy(cdesc->cd_cur_ptr, cipherkey, padding_size);
1644 } else if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1645 && (cipherkeylen == QAT_3DES_KEY_SZ_OPT3)) {
1646 /* K2 and K3 not provided so use K1 = K2 = K3*/
1647 memcpy(cdesc->cd_cur_ptr, cipherkey,
1649 memcpy(cdesc->cd_cur_ptr+cipherkeylen,
1650 cipherkey, cipherkeylen);
1652 memset(cdesc->cd_cur_ptr, 0, padding_size);
1654 cdesc->cd_cur_ptr += padding_size;
1656 if (cdesc->is_ucs) {
1658 * These values match in terms of position auth
1659 * slice request fields
1661 req_ucs->spc_auth_res_sz = cdesc->digest_length;
1662 if (!cdesc->is_gmac) {
1663 req_ucs->spc_aad_sz = cdesc->aad_len;
1664 req_ucs->spc_aad_offset = 0;
1666 } else if (cdesc->is_single_pass) {
1667 req_cipher->spc_aad_sz = cdesc->aad_len;
1668 req_cipher->spc_auth_res_sz = cdesc->digest_length;
1670 cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1671 cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1672 cipher_cd_ctrl->cipher_key_sz = total_key_size >> 3;
1677 int qat_sym_cd_auth_set(struct qat_sym_session *cdesc,
1678 const uint8_t *authkey,
1679 uint32_t authkeylen,
1680 uint32_t aad_length,
1681 uint32_t digestsize,
1682 unsigned int operation)
1684 struct icp_qat_hw_auth_setup *hash;
1685 struct icp_qat_hw_cipher_algo_blk *cipherconfig;
1686 struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1687 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1688 void *ptr = &req_tmpl->cd_ctrl;
1689 struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1690 struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1691 struct icp_qat_fw_la_auth_req_params *auth_param =
1692 (struct icp_qat_fw_la_auth_req_params *)
1693 ((char *)&req_tmpl->serv_specif_rqpars +
1694 ICP_QAT_FW_HASH_REQUEST_PARAMETERS_OFFSET);
1695 uint16_t state1_size = 0, state2_size = 0, cd_extra_size = 0;
1696 uint16_t hash_offset, cd_size;
1697 uint32_t *aad_len = NULL;
1698 uint32_t wordIndex = 0;
1701 if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_AUTH) {
1702 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1703 ICP_QAT_FW_SLICE_AUTH);
1704 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1705 ICP_QAT_FW_SLICE_DRAM_WR);
1706 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1707 } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1708 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1709 ICP_QAT_FW_SLICE_AUTH);
1710 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1711 ICP_QAT_FW_SLICE_CIPHER);
1712 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1713 ICP_QAT_FW_SLICE_CIPHER);
1714 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1715 ICP_QAT_FW_SLICE_DRAM_WR);
1716 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1717 } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1718 QAT_LOG(ERR, "Invalid param, must be a hash command.");
1722 if (operation == RTE_CRYPTO_AUTH_OP_VERIFY)
1723 cdesc->auth_op = ICP_QAT_HW_AUTH_VERIFY;
1725 cdesc->auth_op = ICP_QAT_HW_AUTH_GENERATE;
1728 * Setup the inner hash config
1730 hash_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1731 hash = (struct icp_qat_hw_auth_setup *)cdesc->cd_cur_ptr;
1732 hash->auth_config.reserved = 0;
1733 hash->auth_config.config =
1734 ICP_QAT_HW_AUTH_CONFIG_BUILD(cdesc->auth_mode,
1735 cdesc->qat_hash_alg, digestsize);
1737 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0
1738 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2
1739 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_KASUMI_F9
1740 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3
1741 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC
1742 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC
1743 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_NULL
1744 || cdesc->is_cnt_zero
1746 hash->auth_counter.counter = 0;
1748 int block_size = qat_hash_get_block_size(cdesc->qat_hash_alg);
1752 hash->auth_counter.counter = rte_bswap32(block_size);
1755 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_auth_setup);
1758 * cd_cur_ptr now points at the state1 information.
1760 switch (cdesc->qat_hash_alg) {
1761 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1762 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0) {
1764 rte_memcpy(cdesc->cd_cur_ptr, sha1InitialState,
1765 sizeof(sha1InitialState));
1766 state1_size = qat_hash_get_state1_size(
1767 cdesc->qat_hash_alg);
1771 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA1, authkey,
1772 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1774 QAT_LOG(ERR, "(SHA)precompute failed");
1777 state2_size = RTE_ALIGN_CEIL(ICP_QAT_HW_SHA1_STATE2_SZ, 8);
1779 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1780 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0) {
1782 rte_memcpy(cdesc->cd_cur_ptr, sha224InitialState,
1783 sizeof(sha224InitialState));
1784 state1_size = qat_hash_get_state1_size(
1785 cdesc->qat_hash_alg);
1789 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA224, authkey,
1790 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1792 QAT_LOG(ERR, "(SHA)precompute failed");
1795 state2_size = ICP_QAT_HW_SHA224_STATE2_SZ;
1797 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1798 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0) {
1800 rte_memcpy(cdesc->cd_cur_ptr, sha256InitialState,
1801 sizeof(sha256InitialState));
1802 state1_size = qat_hash_get_state1_size(
1803 cdesc->qat_hash_alg);
1807 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA256, authkey,
1808 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1810 QAT_LOG(ERR, "(SHA)precompute failed");
1813 state2_size = ICP_QAT_HW_SHA256_STATE2_SZ;
1815 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1816 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0) {
1818 rte_memcpy(cdesc->cd_cur_ptr, sha384InitialState,
1819 sizeof(sha384InitialState));
1820 state1_size = qat_hash_get_state1_size(
1821 cdesc->qat_hash_alg);
1825 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA384, authkey,
1826 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1828 QAT_LOG(ERR, "(SHA)precompute failed");
1831 state2_size = ICP_QAT_HW_SHA384_STATE2_SZ;
1833 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1834 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0) {
1836 rte_memcpy(cdesc->cd_cur_ptr, sha512InitialState,
1837 sizeof(sha512InitialState));
1838 state1_size = qat_hash_get_state1_size(
1839 cdesc->qat_hash_alg);
1843 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA512, authkey,
1844 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1846 QAT_LOG(ERR, "(SHA)precompute failed");
1849 state2_size = ICP_QAT_HW_SHA512_STATE2_SZ;
1851 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1852 state1_size = ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1854 if (cdesc->aes_cmac)
1855 memset(cdesc->cd_cur_ptr, 0, state1_size);
1856 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC,
1857 authkey, authkeylen, cdesc->cd_cur_ptr + state1_size,
1858 &state2_size, cdesc->aes_cmac)) {
1859 cdesc->aes_cmac ? QAT_LOG(ERR,
1860 "(CMAC)precompute failed")
1862 "(XCBC)precompute failed");
1866 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1867 case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
1868 cdesc->qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1869 state1_size = ICP_QAT_HW_GALOIS_128_STATE1_SZ;
1870 if (qat_sym_do_precomputes(cdesc->qat_hash_alg, authkey,
1871 authkeylen, cdesc->cd_cur_ptr + state1_size,
1872 &state2_size, cdesc->aes_cmac)) {
1873 QAT_LOG(ERR, "(GCM)precompute failed");
1877 * Write (the length of AAD) into bytes 16-19 of state2
1878 * in big-endian format. This field is 8 bytes
1880 auth_param->u2.aad_sz =
1881 RTE_ALIGN_CEIL(aad_length, 16);
1882 auth_param->hash_state_sz = (auth_param->u2.aad_sz) >> 3;
1884 aad_len = (uint32_t *)(cdesc->cd_cur_ptr +
1885 ICP_QAT_HW_GALOIS_128_STATE1_SZ +
1886 ICP_QAT_HW_GALOIS_H_SZ);
1887 *aad_len = rte_bswap32(aad_length);
1888 cdesc->aad_len = aad_length;
1890 case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
1891 cdesc->qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1892 state1_size = qat_hash_get_state1_size(
1893 ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2);
1894 state2_size = ICP_QAT_HW_SNOW_3G_UIA2_STATE2_SZ;
1895 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1897 cipherconfig = (struct icp_qat_hw_cipher_algo_blk *)
1898 (cdesc->cd_cur_ptr + state1_size + state2_size);
1899 cipherconfig->cipher_config.val =
1900 ICP_QAT_HW_CIPHER_CONFIG_BUILD(ICP_QAT_HW_CIPHER_ECB_MODE,
1901 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2,
1902 ICP_QAT_HW_CIPHER_KEY_CONVERT,
1903 ICP_QAT_HW_CIPHER_ENCRYPT);
1904 memcpy(cipherconfig->key, authkey, authkeylen);
1905 memset(cipherconfig->key + authkeylen,
1906 0, ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ);
1907 cd_extra_size += sizeof(struct icp_qat_hw_cipher_config) +
1908 authkeylen + ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1909 auth_param->hash_state_sz = ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1911 case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
1912 hash->auth_config.config =
1913 ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE0,
1914 cdesc->qat_hash_alg, digestsize);
1915 cdesc->qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1916 state1_size = qat_hash_get_state1_size(
1917 ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3);
1918 state2_size = ICP_QAT_HW_ZUC_3G_EIA3_STATE2_SZ;
1919 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size
1920 + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ);
1922 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1923 cd_extra_size += ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1924 auth_param->hash_state_sz = ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1927 case ICP_QAT_HW_AUTH_ALGO_MD5:
1928 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_MD5, authkey,
1929 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1931 QAT_LOG(ERR, "(MD5)precompute failed");
1934 state2_size = ICP_QAT_HW_MD5_STATE2_SZ;
1936 case ICP_QAT_HW_AUTH_ALGO_NULL:
1937 state1_size = qat_hash_get_state1_size(
1938 ICP_QAT_HW_AUTH_ALGO_NULL);
1939 state2_size = ICP_QAT_HW_NULL_STATE2_SZ;
1941 case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
1942 cdesc->qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1943 state1_size = qat_hash_get_state1_size(
1944 ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC);
1945 state2_size = ICP_QAT_HW_AES_CBC_MAC_KEY_SZ +
1946 ICP_QAT_HW_AES_CCM_CBC_E_CTR0_SZ;
1948 if (aad_length > 0) {
1949 aad_length += ICP_QAT_HW_CCM_AAD_B0_LEN +
1950 ICP_QAT_HW_CCM_AAD_LEN_INFO;
1951 auth_param->u2.aad_sz =
1952 RTE_ALIGN_CEIL(aad_length,
1953 ICP_QAT_HW_CCM_AAD_ALIGNMENT);
1955 auth_param->u2.aad_sz = ICP_QAT_HW_CCM_AAD_B0_LEN;
1957 cdesc->aad_len = aad_length;
1958 hash->auth_counter.counter = 0;
1960 hash_cd_ctrl->outer_prefix_sz = digestsize;
1961 auth_param->hash_state_sz = digestsize;
1963 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1965 case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
1966 state1_size = qat_hash_get_state1_size(
1967 ICP_QAT_HW_AUTH_ALGO_KASUMI_F9);
1968 state2_size = ICP_QAT_HW_KASUMI_F9_STATE2_SZ;
1969 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1970 pTempKey = (uint32_t *)(cdesc->cd_cur_ptr + state1_size
1973 * The Inner Hash Initial State2 block must contain IK
1974 * (Initialisation Key), followed by IK XOR-ed with KM
1975 * (Key Modifier): IK||(IK^KM).
1977 /* write the auth key */
1978 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1979 /* initialise temp key with auth key */
1980 memcpy(pTempKey, authkey, authkeylen);
1981 /* XOR Key with KASUMI F9 key modifier at 4 bytes level */
1982 for (wordIndex = 0; wordIndex < (authkeylen >> 2); wordIndex++)
1983 pTempKey[wordIndex] ^= KASUMI_F9_KEY_MODIFIER_4_BYTES;
1986 QAT_LOG(ERR, "Invalid HASH alg %u", cdesc->qat_hash_alg);
1990 /* Auth CD config setup */
1991 hash_cd_ctrl->hash_cfg_offset = hash_offset >> 3;
1992 hash_cd_ctrl->hash_flags = ICP_QAT_FW_AUTH_HDR_FLAG_NO_NESTED;
1993 hash_cd_ctrl->inner_res_sz = digestsize;
1994 hash_cd_ctrl->final_sz = digestsize;
1995 hash_cd_ctrl->inner_state1_sz = state1_size;
1996 auth_param->auth_res_sz = digestsize;
1998 hash_cd_ctrl->inner_state2_sz = state2_size;
1999 hash_cd_ctrl->inner_state2_offset = hash_cd_ctrl->hash_cfg_offset +
2000 ((sizeof(struct icp_qat_hw_auth_setup) +
2001 RTE_ALIGN_CEIL(hash_cd_ctrl->inner_state1_sz, 8))
2004 cdesc->cd_cur_ptr += state1_size + state2_size + cd_extra_size;
2005 cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
2007 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
2008 cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
2013 int qat_sym_validate_aes_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2016 case ICP_QAT_HW_AES_128_KEY_SZ:
2017 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
2019 case ICP_QAT_HW_AES_192_KEY_SZ:
2020 *alg = ICP_QAT_HW_CIPHER_ALGO_AES192;
2022 case ICP_QAT_HW_AES_256_KEY_SZ:
2023 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
2031 int qat_sym_validate_aes_docsisbpi_key(int key_len,
2032 enum icp_qat_hw_cipher_algo *alg)
2035 case ICP_QAT_HW_AES_128_KEY_SZ:
2036 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
2038 case ICP_QAT_HW_AES_256_KEY_SZ:
2039 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
2047 int qat_sym_validate_snow3g_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2050 case ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ:
2051 *alg = ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2;
2059 int qat_sym_validate_kasumi_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2062 case ICP_QAT_HW_KASUMI_KEY_SZ:
2063 *alg = ICP_QAT_HW_CIPHER_ALGO_KASUMI;
2071 int qat_sym_validate_des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2074 case ICP_QAT_HW_DES_KEY_SZ:
2075 *alg = ICP_QAT_HW_CIPHER_ALGO_DES;
2083 int qat_sym_validate_3des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2086 case QAT_3DES_KEY_SZ_OPT1:
2087 case QAT_3DES_KEY_SZ_OPT2:
2088 case QAT_3DES_KEY_SZ_OPT3:
2089 *alg = ICP_QAT_HW_CIPHER_ALGO_3DES;
2097 int qat_sym_validate_zuc_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2100 case ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ:
2101 *alg = ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3;
2109 #ifdef RTE_LIB_SECURITY
2111 qat_sec_session_check_docsis(struct rte_security_session_conf *conf)
2113 struct rte_crypto_sym_xform *crypto_sym = conf->crypto_xform;
2114 struct rte_security_docsis_xform *docsis = &conf->docsis;
2116 /* CRC generate -> Cipher encrypt */
2117 if (docsis->direction == RTE_SECURITY_DOCSIS_DOWNLINK) {
2119 if (crypto_sym != NULL &&
2120 crypto_sym->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
2121 crypto_sym->cipher.op == RTE_CRYPTO_CIPHER_OP_ENCRYPT &&
2122 crypto_sym->cipher.algo ==
2123 RTE_CRYPTO_CIPHER_AES_DOCSISBPI &&
2124 (crypto_sym->cipher.key.length ==
2125 ICP_QAT_HW_AES_128_KEY_SZ ||
2126 crypto_sym->cipher.key.length ==
2127 ICP_QAT_HW_AES_256_KEY_SZ) &&
2128 crypto_sym->cipher.iv.length == ICP_QAT_HW_AES_BLK_SZ &&
2129 crypto_sym->next == NULL) {
2132 /* Cipher decrypt -> CRC verify */
2133 } else if (docsis->direction == RTE_SECURITY_DOCSIS_UPLINK) {
2135 if (crypto_sym != NULL &&
2136 crypto_sym->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
2137 crypto_sym->cipher.op == RTE_CRYPTO_CIPHER_OP_DECRYPT &&
2138 crypto_sym->cipher.algo ==
2139 RTE_CRYPTO_CIPHER_AES_DOCSISBPI &&
2140 (crypto_sym->cipher.key.length ==
2141 ICP_QAT_HW_AES_128_KEY_SZ ||
2142 crypto_sym->cipher.key.length ==
2143 ICP_QAT_HW_AES_256_KEY_SZ) &&
2144 crypto_sym->cipher.iv.length == ICP_QAT_HW_AES_BLK_SZ &&
2145 crypto_sym->next == NULL) {
2154 qat_sec_session_set_docsis_parameters(struct rte_cryptodev *dev,
2155 struct rte_security_session_conf *conf, void *session_private)
2159 struct rte_crypto_sym_xform *xform = NULL;
2160 struct qat_sym_session *session = session_private;
2162 /* Clear the session */
2163 memset(session, 0, qat_sym_session_get_private_size(dev));
2165 ret = qat_sec_session_check_docsis(conf);
2167 QAT_LOG(ERR, "Unsupported DOCSIS security configuration");
2171 xform = conf->crypto_xform;
2173 /* Verify the session physical address is known */
2174 rte_iova_t session_paddr = rte_mempool_virt2iova(session);
2175 if (session_paddr == 0 || session_paddr == RTE_BAD_IOVA) {
2177 "Session physical address unknown. Bad memory pool.");
2181 /* Set context descriptor physical address */
2182 session->cd_paddr = session_paddr +
2183 offsetof(struct qat_sym_session, cd);
2185 /* Get requested QAT command id - should be cipher */
2186 qat_cmd_id = qat_get_cmd_id(xform);
2187 if (qat_cmd_id != ICP_QAT_FW_LA_CMD_CIPHER) {
2188 QAT_LOG(ERR, "Unsupported xform chain requested");
2191 session->qat_cmd = (enum icp_qat_fw_la_cmd_id)qat_cmd_id;
2193 ret = qat_sym_session_configure_cipher(dev, xform, session);
2196 qat_sym_session_finalize(session);
2202 qat_security_session_create(void *dev,
2203 struct rte_security_session_conf *conf,
2204 struct rte_security_session *sess,
2205 struct rte_mempool *mempool)
2207 void *sess_private_data;
2208 struct rte_cryptodev *cdev = (struct rte_cryptodev *)dev;
2209 struct qat_cryptodev_private *internals = cdev->data->dev_private;
2210 enum qat_device_gen qat_dev_gen = internals->qat_dev->qat_dev_gen;
2211 struct qat_sym_session *sym_session = NULL;
2214 if (conf->action_type != RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL ||
2215 conf->protocol != RTE_SECURITY_PROTOCOL_DOCSIS) {
2216 QAT_LOG(ERR, "Invalid security protocol");
2220 if (rte_mempool_get(mempool, &sess_private_data)) {
2221 QAT_LOG(ERR, "Couldn't get object from session mempool");
2225 ret = qat_sec_session_set_docsis_parameters(cdev, conf,
2228 QAT_LOG(ERR, "Failed to configure session parameters");
2229 /* Return session to mempool */
2230 rte_mempool_put(mempool, sess_private_data);
2234 set_sec_session_private_data(sess, sess_private_data);
2235 sym_session = (struct qat_sym_session *)sess_private_data;
2236 sym_session->dev_id = internals->dev_id;
2238 return qat_sym_gen_dev_ops[qat_dev_gen].set_session((void *)cdev,
2243 qat_security_session_destroy(void *dev __rte_unused,
2244 struct rte_security_session *sess)
2246 void *sess_priv = get_sec_session_private_data(sess);
2247 struct qat_sym_session *s = (struct qat_sym_session *)sess_priv;
2251 bpi_cipher_ctx_free(s->bpi_ctx);
2252 memset(s, 0, qat_sym_session_get_private_size(dev));
2253 struct rte_mempool *sess_mp = rte_mempool_from_obj(sess_priv);
2255 set_sec_session_private_data(sess, NULL);
2256 rte_mempool_put(sess_mp, sess_priv);