1 /* SPDX-License-Identifier: (BSD-3-Clause OR GPL-2.0)
2 * Copyright(c) 2015-2022 Intel Corporation
5 #include <openssl/sha.h> /* Needed to calculate pre-compute values */
6 #include <openssl/aes.h> /* Needed to calculate pre-compute values */
7 #include <openssl/md5.h> /* Needed to calculate pre-compute values */
8 #include <openssl/evp.h> /* Needed for bpi runt block processing */
10 #include <rte_memcpy.h>
11 #include <rte_common.h>
12 #include <rte_spinlock.h>
13 #include <rte_byteorder.h>
15 #include <rte_malloc.h>
16 #include <rte_crypto_sym.h>
17 #ifdef RTE_LIB_SECURITY
18 #include <rte_security.h>
22 #include "qat_sym_session.h"
25 /* SHA1 - 20 bytes - Initialiser state can be found in FIPS stds 180-2 */
26 static const uint8_t sha1InitialState[] = {
27 0x67, 0x45, 0x23, 0x01, 0xef, 0xcd, 0xab, 0x89, 0x98, 0xba,
28 0xdc, 0xfe, 0x10, 0x32, 0x54, 0x76, 0xc3, 0xd2, 0xe1, 0xf0};
30 /* SHA 224 - 32 bytes - Initialiser state can be found in FIPS stds 180-2 */
31 static const uint8_t sha224InitialState[] = {
32 0xc1, 0x05, 0x9e, 0xd8, 0x36, 0x7c, 0xd5, 0x07, 0x30, 0x70, 0xdd,
33 0x17, 0xf7, 0x0e, 0x59, 0x39, 0xff, 0xc0, 0x0b, 0x31, 0x68, 0x58,
34 0x15, 0x11, 0x64, 0xf9, 0x8f, 0xa7, 0xbe, 0xfa, 0x4f, 0xa4};
36 /* SHA 256 - 32 bytes - Initialiser state can be found in FIPS stds 180-2 */
37 static const uint8_t sha256InitialState[] = {
38 0x6a, 0x09, 0xe6, 0x67, 0xbb, 0x67, 0xae, 0x85, 0x3c, 0x6e, 0xf3,
39 0x72, 0xa5, 0x4f, 0xf5, 0x3a, 0x51, 0x0e, 0x52, 0x7f, 0x9b, 0x05,
40 0x68, 0x8c, 0x1f, 0x83, 0xd9, 0xab, 0x5b, 0xe0, 0xcd, 0x19};
42 /* SHA 384 - 64 bytes - Initialiser state can be found in FIPS stds 180-2 */
43 static const uint8_t sha384InitialState[] = {
44 0xcb, 0xbb, 0x9d, 0x5d, 0xc1, 0x05, 0x9e, 0xd8, 0x62, 0x9a, 0x29,
45 0x2a, 0x36, 0x7c, 0xd5, 0x07, 0x91, 0x59, 0x01, 0x5a, 0x30, 0x70,
46 0xdd, 0x17, 0x15, 0x2f, 0xec, 0xd8, 0xf7, 0x0e, 0x59, 0x39, 0x67,
47 0x33, 0x26, 0x67, 0xff, 0xc0, 0x0b, 0x31, 0x8e, 0xb4, 0x4a, 0x87,
48 0x68, 0x58, 0x15, 0x11, 0xdb, 0x0c, 0x2e, 0x0d, 0x64, 0xf9, 0x8f,
49 0xa7, 0x47, 0xb5, 0x48, 0x1d, 0xbe, 0xfa, 0x4f, 0xa4};
51 /* SHA 512 - 64 bytes - Initialiser state can be found in FIPS stds 180-2 */
52 static const uint8_t sha512InitialState[] = {
53 0x6a, 0x09, 0xe6, 0x67, 0xf3, 0xbc, 0xc9, 0x08, 0xbb, 0x67, 0xae,
54 0x85, 0x84, 0xca, 0xa7, 0x3b, 0x3c, 0x6e, 0xf3, 0x72, 0xfe, 0x94,
55 0xf8, 0x2b, 0xa5, 0x4f, 0xf5, 0x3a, 0x5f, 0x1d, 0x36, 0xf1, 0x51,
56 0x0e, 0x52, 0x7f, 0xad, 0xe6, 0x82, 0xd1, 0x9b, 0x05, 0x68, 0x8c,
57 0x2b, 0x3e, 0x6c, 0x1f, 0x1f, 0x83, 0xd9, 0xab, 0xfb, 0x41, 0xbd,
58 0x6b, 0x5b, 0xe0, 0xcd, 0x19, 0x13, 0x7e, 0x21, 0x79};
61 qat_sym_cd_cipher_set(struct qat_sym_session *cd,
62 const uint8_t *enckey,
66 qat_sym_cd_auth_set(struct qat_sym_session *cdesc,
67 const uint8_t *authkey,
71 unsigned int operation);
73 qat_sym_session_init_common_hdr(struct qat_sym_session *session);
75 /* Req/cd init functions */
78 qat_sym_session_finalize(struct qat_sym_session *session)
80 qat_sym_session_init_common_hdr(session);
83 /** Frees a context previously created
84 * Depends on openssl libcrypto
87 bpi_cipher_ctx_free(void *bpi_ctx)
90 EVP_CIPHER_CTX_free((EVP_CIPHER_CTX *)bpi_ctx);
93 /** Creates a context in either AES or DES in ECB mode
94 * Depends on openssl libcrypto
97 bpi_cipher_ctx_init(enum rte_crypto_cipher_algorithm cryptodev_algo,
98 enum rte_crypto_cipher_operation direction __rte_unused,
99 const uint8_t *key, uint16_t key_length, void **ctx)
101 const EVP_CIPHER *algo = NULL;
103 *ctx = EVP_CIPHER_CTX_new();
110 if (cryptodev_algo == RTE_CRYPTO_CIPHER_DES_DOCSISBPI)
111 algo = EVP_des_ecb();
113 if (key_length == ICP_QAT_HW_AES_128_KEY_SZ)
114 algo = EVP_aes_128_ecb();
116 algo = EVP_aes_256_ecb();
118 /* IV will be ECB encrypted whether direction is encrypt or decrypt*/
119 if (EVP_EncryptInit_ex(*ctx, algo, NULL, key, 0) != 1) {
128 EVP_CIPHER_CTX_free(*ctx);
133 qat_is_cipher_alg_supported(enum rte_crypto_cipher_algorithm algo,
134 struct qat_cryptodev_private *internals)
137 const struct rte_cryptodev_capabilities *capability;
139 while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
140 RTE_CRYPTO_OP_TYPE_UNDEFINED) {
141 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
144 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_CIPHER)
147 if (capability->sym.cipher.algo == algo)
154 qat_is_auth_alg_supported(enum rte_crypto_auth_algorithm algo,
155 struct qat_cryptodev_private *internals)
158 const struct rte_cryptodev_capabilities *capability;
160 while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
161 RTE_CRYPTO_OP_TYPE_UNDEFINED) {
162 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
165 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_AUTH)
168 if (capability->sym.auth.algo == algo)
175 qat_sym_session_clear(struct rte_cryptodev *dev,
176 struct rte_cryptodev_sym_session *sess)
178 uint8_t index = dev->driver_id;
179 void *sess_priv = get_sym_session_private_data(sess, index);
180 struct qat_sym_session *s = (struct qat_sym_session *)sess_priv;
184 bpi_cipher_ctx_free(s->bpi_ctx);
185 memset(s, 0, qat_sym_session_get_private_size(dev));
186 struct rte_mempool *sess_mp = rte_mempool_from_obj(sess_priv);
188 set_sym_session_private_data(sess, index, NULL);
189 rte_mempool_put(sess_mp, sess_priv);
194 qat_get_cmd_id(const struct rte_crypto_sym_xform *xform)
197 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER && xform->next == NULL)
198 return ICP_QAT_FW_LA_CMD_CIPHER;
200 /* Authentication Only */
201 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH && xform->next == NULL)
202 return ICP_QAT_FW_LA_CMD_AUTH;
205 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
206 /* AES-GCM and AES-CCM works with different direction
207 * GCM first encrypts and generate hash where AES-CCM
208 * first generate hash and encrypts. Similar relation
209 * applies to decryption.
211 if (xform->aead.op == RTE_CRYPTO_AEAD_OP_ENCRYPT)
212 if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
213 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
215 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
217 if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
218 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
220 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
223 if (xform->next == NULL)
226 /* Cipher then Authenticate */
227 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
228 xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
229 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
231 /* Authenticate then Cipher */
232 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH &&
233 xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
234 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
239 static struct rte_crypto_auth_xform *
240 qat_get_auth_xform(struct rte_crypto_sym_xform *xform)
243 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH)
252 static struct rte_crypto_cipher_xform *
253 qat_get_cipher_xform(struct rte_crypto_sym_xform *xform)
256 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
257 return &xform->cipher;
266 qat_sym_session_configure_cipher(struct rte_cryptodev *dev,
267 struct rte_crypto_sym_xform *xform,
268 struct qat_sym_session *session)
270 struct qat_cryptodev_private *internals = dev->data->dev_private;
271 struct rte_crypto_cipher_xform *cipher_xform = NULL;
272 enum qat_device_gen qat_dev_gen =
273 internals->qat_dev->qat_dev_gen;
276 /* Get cipher xform from crypto xform chain */
277 cipher_xform = qat_get_cipher_xform(xform);
279 session->cipher_iv.offset = cipher_xform->iv.offset;
280 session->cipher_iv.length = cipher_xform->iv.length;
282 switch (cipher_xform->algo) {
283 case RTE_CRYPTO_CIPHER_AES_CBC:
284 if (qat_sym_validate_aes_key(cipher_xform->key.length,
285 &session->qat_cipher_alg) != 0) {
286 QAT_LOG(ERR, "Invalid AES cipher key size");
290 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
292 case RTE_CRYPTO_CIPHER_AES_CTR:
293 if (qat_sym_validate_aes_key(cipher_xform->key.length,
294 &session->qat_cipher_alg) != 0) {
295 QAT_LOG(ERR, "Invalid AES cipher key size");
299 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
300 if (qat_dev_gen == QAT_GEN4)
303 case RTE_CRYPTO_CIPHER_SNOW3G_UEA2:
304 if (qat_sym_validate_snow3g_key(cipher_xform->key.length,
305 &session->qat_cipher_alg) != 0) {
306 QAT_LOG(ERR, "Invalid SNOW 3G cipher key size");
310 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
312 case RTE_CRYPTO_CIPHER_NULL:
313 session->qat_cipher_alg = ICP_QAT_HW_CIPHER_ALGO_NULL;
314 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
316 case RTE_CRYPTO_CIPHER_KASUMI_F8:
317 if (qat_sym_validate_kasumi_key(cipher_xform->key.length,
318 &session->qat_cipher_alg) != 0) {
319 QAT_LOG(ERR, "Invalid KASUMI cipher key size");
323 session->qat_mode = ICP_QAT_HW_CIPHER_F8_MODE;
325 case RTE_CRYPTO_CIPHER_3DES_CBC:
326 if (qat_sym_validate_3des_key(cipher_xform->key.length,
327 &session->qat_cipher_alg) != 0) {
328 QAT_LOG(ERR, "Invalid 3DES cipher key size");
332 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
334 case RTE_CRYPTO_CIPHER_DES_CBC:
335 if (qat_sym_validate_des_key(cipher_xform->key.length,
336 &session->qat_cipher_alg) != 0) {
337 QAT_LOG(ERR, "Invalid DES cipher key size");
341 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
343 case RTE_CRYPTO_CIPHER_3DES_CTR:
344 if (qat_sym_validate_3des_key(cipher_xform->key.length,
345 &session->qat_cipher_alg) != 0) {
346 QAT_LOG(ERR, "Invalid 3DES cipher key size");
350 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
352 case RTE_CRYPTO_CIPHER_DES_DOCSISBPI:
353 ret = bpi_cipher_ctx_init(
356 cipher_xform->key.data,
357 cipher_xform->key.length,
360 QAT_LOG(ERR, "failed to create DES BPI ctx");
363 if (qat_sym_validate_des_key(cipher_xform->key.length,
364 &session->qat_cipher_alg) != 0) {
365 QAT_LOG(ERR, "Invalid DES cipher key size");
369 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
371 case RTE_CRYPTO_CIPHER_AES_DOCSISBPI:
372 ret = bpi_cipher_ctx_init(
375 cipher_xform->key.data,
376 cipher_xform->key.length,
379 QAT_LOG(ERR, "failed to create AES BPI ctx");
382 if (qat_sym_validate_aes_docsisbpi_key(cipher_xform->key.length,
383 &session->qat_cipher_alg) != 0) {
384 QAT_LOG(ERR, "Invalid AES DOCSISBPI key size");
388 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
390 case RTE_CRYPTO_CIPHER_ZUC_EEA3:
391 if (!qat_is_cipher_alg_supported(
392 cipher_xform->algo, internals)) {
393 QAT_LOG(ERR, "%s not supported on this device",
394 rte_crypto_cipher_algorithm_strings
395 [cipher_xform->algo]);
399 if (qat_sym_validate_zuc_key(cipher_xform->key.length,
400 &session->qat_cipher_alg) != 0) {
401 QAT_LOG(ERR, "Invalid ZUC cipher key size");
405 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
407 case RTE_CRYPTO_CIPHER_AES_XTS:
408 if ((cipher_xform->key.length/2) == ICP_QAT_HW_AES_192_KEY_SZ) {
409 QAT_LOG(ERR, "AES-XTS-192 not supported");
413 if (qat_sym_validate_aes_key((cipher_xform->key.length/2),
414 &session->qat_cipher_alg) != 0) {
415 QAT_LOG(ERR, "Invalid AES-XTS cipher key size");
419 session->qat_mode = ICP_QAT_HW_CIPHER_XTS_MODE;
421 case RTE_CRYPTO_CIPHER_3DES_ECB:
422 case RTE_CRYPTO_CIPHER_AES_ECB:
423 case RTE_CRYPTO_CIPHER_AES_F8:
424 case RTE_CRYPTO_CIPHER_ARC4:
425 QAT_LOG(ERR, "Crypto QAT PMD: Unsupported Cipher alg %u",
430 QAT_LOG(ERR, "Crypto: Undefined Cipher specified %u\n",
436 if (cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
437 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
439 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
441 if (qat_sym_cd_cipher_set(session,
442 cipher_xform->key.data,
443 cipher_xform->key.length)) {
451 if (session->bpi_ctx) {
452 bpi_cipher_ctx_free(session->bpi_ctx);
453 session->bpi_ctx = NULL;
459 qat_sym_session_configure(struct rte_cryptodev *dev,
460 struct rte_crypto_sym_xform *xform,
461 struct rte_cryptodev_sym_session *sess,
462 struct rte_mempool *mempool)
464 void *sess_private_data;
467 if (rte_mempool_get(mempool, &sess_private_data)) {
469 "Couldn't get object from session mempool");
473 ret = qat_sym_session_set_parameters(dev, xform, sess_private_data);
476 "Crypto QAT PMD: failed to configure session parameters");
478 /* Return session to mempool */
479 rte_mempool_put(mempool, sess_private_data);
483 set_sym_session_private_data(sess, dev->driver_id,
490 qat_sym_session_set_parameters(struct rte_cryptodev *dev,
491 struct rte_crypto_sym_xform *xform, void *session_private)
493 struct qat_sym_session *session = session_private;
494 struct qat_cryptodev_private *internals = dev->data->dev_private;
495 enum qat_device_gen qat_dev_gen = internals->qat_dev->qat_dev_gen;
499 /* Verify the session physical address is known */
500 rte_iova_t session_paddr = rte_mempool_virt2iova(session);
501 if (session_paddr == 0 || session_paddr == RTE_BAD_IOVA) {
503 "Session physical address unknown. Bad memory pool.");
507 memset(session, 0, sizeof(*session));
508 /* Set context descriptor physical address */
509 session->cd_paddr = session_paddr +
510 offsetof(struct qat_sym_session, cd);
512 session->dev_id = internals->dev_id;
513 session->qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_NONE;
516 /* Get requested QAT command id */
517 qat_cmd_id = qat_get_cmd_id(xform);
518 if (qat_cmd_id < 0 || qat_cmd_id >= ICP_QAT_FW_LA_CMD_DELIMITER) {
519 QAT_LOG(ERR, "Unsupported xform chain requested");
522 session->qat_cmd = (enum icp_qat_fw_la_cmd_id)qat_cmd_id;
523 switch (session->qat_cmd) {
524 case ICP_QAT_FW_LA_CMD_CIPHER:
525 ret = qat_sym_session_configure_cipher(dev, xform, session);
529 case ICP_QAT_FW_LA_CMD_AUTH:
530 ret = qat_sym_session_configure_auth(dev, xform, session);
533 session->is_single_pass_gmac =
534 qat_dev_gen == QAT_GEN3 &&
535 xform->auth.algo == RTE_CRYPTO_AUTH_AES_GMAC &&
536 xform->auth.iv.length == QAT_AES_GCM_SPC_IV_SIZE;
538 case ICP_QAT_FW_LA_CMD_CIPHER_HASH:
539 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
540 ret = qat_sym_session_configure_aead(dev, xform,
545 ret = qat_sym_session_configure_cipher(dev,
549 ret = qat_sym_session_configure_auth(dev,
555 case ICP_QAT_FW_LA_CMD_HASH_CIPHER:
556 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
557 ret = qat_sym_session_configure_aead(dev, xform,
562 ret = qat_sym_session_configure_auth(dev,
566 ret = qat_sym_session_configure_cipher(dev,
572 case ICP_QAT_FW_LA_CMD_TRNG_GET_RANDOM:
573 case ICP_QAT_FW_LA_CMD_TRNG_TEST:
574 case ICP_QAT_FW_LA_CMD_SSL3_KEY_DERIVE:
575 case ICP_QAT_FW_LA_CMD_TLS_V1_1_KEY_DERIVE:
576 case ICP_QAT_FW_LA_CMD_TLS_V1_2_KEY_DERIVE:
577 case ICP_QAT_FW_LA_CMD_MGF1:
578 case ICP_QAT_FW_LA_CMD_AUTH_PRE_COMP:
579 case ICP_QAT_FW_LA_CMD_CIPHER_PRE_COMP:
580 case ICP_QAT_FW_LA_CMD_DELIMITER:
581 QAT_LOG(ERR, "Unsupported Service %u",
585 QAT_LOG(ERR, "Unsupported Service %u",
589 qat_sym_session_finalize(session);
591 return qat_sym_gen_dev_ops[qat_dev_gen].set_session((void *)dev,
596 qat_sym_session_handle_single_pass(struct qat_sym_session *session,
597 const struct rte_crypto_aead_xform *aead_xform)
599 session->is_single_pass = 1;
600 session->is_auth = 1;
601 session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER;
602 /* Chacha-Poly is special case that use QAT CTR mode */
603 if (aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM)
604 session->qat_mode = ICP_QAT_HW_CIPHER_AEAD_MODE;
606 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
608 session->cipher_iv.offset = aead_xform->iv.offset;
609 session->cipher_iv.length = aead_xform->iv.length;
610 session->aad_len = aead_xform->aad_length;
611 session->digest_length = aead_xform->digest_length;
613 if (aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) {
614 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
615 session->auth_op = ICP_QAT_HW_AUTH_GENERATE;
617 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
618 session->auth_op = ICP_QAT_HW_AUTH_VERIFY;
625 qat_sym_session_configure_auth(struct rte_cryptodev *dev,
626 struct rte_crypto_sym_xform *xform,
627 struct qat_sym_session *session)
629 struct rte_crypto_auth_xform *auth_xform = qat_get_auth_xform(xform);
630 struct qat_cryptodev_private *internals = dev->data->dev_private;
631 const uint8_t *key_data = auth_xform->key.data;
632 uint8_t key_length = auth_xform->key.length;
633 enum qat_device_gen qat_dev_gen =
634 internals->qat_dev->qat_dev_gen;
636 session->aes_cmac = 0;
637 session->auth_key_length = auth_xform->key.length;
638 session->auth_iv.offset = auth_xform->iv.offset;
639 session->auth_iv.length = auth_xform->iv.length;
640 session->auth_mode = ICP_QAT_HW_AUTH_MODE1;
641 session->is_auth = 1;
642 session->digest_length = auth_xform->digest_length;
644 switch (auth_xform->algo) {
645 case RTE_CRYPTO_AUTH_SHA1:
646 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA1;
647 session->auth_mode = ICP_QAT_HW_AUTH_MODE0;
649 case RTE_CRYPTO_AUTH_SHA224:
650 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA224;
651 session->auth_mode = ICP_QAT_HW_AUTH_MODE0;
653 case RTE_CRYPTO_AUTH_SHA256:
654 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA256;
655 session->auth_mode = ICP_QAT_HW_AUTH_MODE0;
657 case RTE_CRYPTO_AUTH_SHA384:
658 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA384;
659 session->auth_mode = ICP_QAT_HW_AUTH_MODE0;
661 case RTE_CRYPTO_AUTH_SHA512:
662 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA512;
663 session->auth_mode = ICP_QAT_HW_AUTH_MODE0;
665 case RTE_CRYPTO_AUTH_SHA1_HMAC:
666 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA1;
668 case RTE_CRYPTO_AUTH_SHA224_HMAC:
669 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA224;
671 case RTE_CRYPTO_AUTH_SHA256_HMAC:
672 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA256;
674 case RTE_CRYPTO_AUTH_SHA384_HMAC:
675 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA384;
677 case RTE_CRYPTO_AUTH_SHA512_HMAC:
678 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA512;
680 case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
681 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
683 case RTE_CRYPTO_AUTH_AES_CMAC:
684 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
685 session->aes_cmac = 1;
687 case RTE_CRYPTO_AUTH_AES_GMAC:
688 if (qat_sym_validate_aes_key(auth_xform->key.length,
689 &session->qat_cipher_alg) != 0) {
690 QAT_LOG(ERR, "Invalid AES key size");
693 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
694 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
695 if (session->auth_iv.length == 0)
696 session->auth_iv.length = AES_GCM_J0_LEN;
698 session->is_iv12B = 1;
699 if (qat_dev_gen == QAT_GEN4) {
700 session->is_cnt_zero = 1;
704 case RTE_CRYPTO_AUTH_SNOW3G_UIA2:
705 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2;
707 case RTE_CRYPTO_AUTH_MD5_HMAC:
708 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_MD5;
710 case RTE_CRYPTO_AUTH_NULL:
711 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_NULL;
713 case RTE_CRYPTO_AUTH_KASUMI_F9:
714 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_KASUMI_F9;
716 case RTE_CRYPTO_AUTH_ZUC_EIA3:
717 if (!qat_is_auth_alg_supported(auth_xform->algo, internals)) {
718 QAT_LOG(ERR, "%s not supported on this device",
719 rte_crypto_auth_algorithm_strings
723 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3;
725 case RTE_CRYPTO_AUTH_MD5:
726 case RTE_CRYPTO_AUTH_AES_CBC_MAC:
727 QAT_LOG(ERR, "Crypto: Unsupported hash alg %u",
731 QAT_LOG(ERR, "Crypto: Undefined Hash algo %u specified",
736 if (auth_xform->algo == RTE_CRYPTO_AUTH_AES_GMAC) {
737 session->is_gmac = 1;
738 if (auth_xform->op == RTE_CRYPTO_AUTH_OP_GENERATE) {
739 session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER_HASH;
740 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
742 * It needs to create cipher desc content first,
743 * then authentication
745 if (qat_sym_cd_cipher_set(session,
746 auth_xform->key.data,
747 auth_xform->key.length))
750 if (qat_sym_cd_auth_set(session,
754 auth_xform->digest_length,
758 session->qat_cmd = ICP_QAT_FW_LA_CMD_HASH_CIPHER;
759 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
761 * It needs to create authentication desc content first,
765 if (qat_sym_cd_auth_set(session,
769 auth_xform->digest_length,
773 if (qat_sym_cd_cipher_set(session,
774 auth_xform->key.data,
775 auth_xform->key.length))
779 if (qat_sym_cd_auth_set(session,
783 auth_xform->digest_length,
792 qat_sym_session_configure_aead(struct rte_cryptodev *dev,
793 struct rte_crypto_sym_xform *xform,
794 struct qat_sym_session *session)
796 struct rte_crypto_aead_xform *aead_xform = &xform->aead;
797 enum rte_crypto_auth_operation crypto_operation;
798 struct qat_cryptodev_private *internals =
799 dev->data->dev_private;
800 enum qat_device_gen qat_dev_gen =
801 internals->qat_dev->qat_dev_gen;
804 * Store AEAD IV parameters as cipher IV,
805 * to avoid unnecessary memory usage
807 session->cipher_iv.offset = xform->aead.iv.offset;
808 session->cipher_iv.length = xform->aead.iv.length;
810 session->auth_mode = ICP_QAT_HW_AUTH_MODE1;
811 session->is_auth = 1;
812 session->digest_length = aead_xform->digest_length;
814 session->is_single_pass = 0;
815 switch (aead_xform->algo) {
816 case RTE_CRYPTO_AEAD_AES_GCM:
817 if (qat_sym_validate_aes_key(aead_xform->key.length,
818 &session->qat_cipher_alg) != 0) {
819 QAT_LOG(ERR, "Invalid AES key size");
822 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
823 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
825 if (qat_dev_gen == QAT_GEN4)
827 if (session->cipher_iv.length == 0) {
828 session->cipher_iv.length = AES_GCM_J0_LEN;
831 session->is_iv12B = 1;
832 if (qat_dev_gen < QAT_GEN3)
834 qat_sym_session_handle_single_pass(session,
837 case RTE_CRYPTO_AEAD_AES_CCM:
838 if (qat_sym_validate_aes_key(aead_xform->key.length,
839 &session->qat_cipher_alg) != 0) {
840 QAT_LOG(ERR, "Invalid AES key size");
843 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
844 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC;
845 if (qat_dev_gen == QAT_GEN4)
848 case RTE_CRYPTO_AEAD_CHACHA20_POLY1305:
849 if (aead_xform->key.length != ICP_QAT_HW_CHACHAPOLY_KEY_SZ)
851 if (qat_dev_gen == QAT_GEN4)
853 session->qat_cipher_alg =
854 ICP_QAT_HW_CIPHER_ALGO_CHACHA20_POLY1305;
855 qat_sym_session_handle_single_pass(session,
859 QAT_LOG(ERR, "Crypto: Undefined AEAD specified %u\n",
864 if (session->is_single_pass) {
865 if (qat_sym_cd_cipher_set(session,
866 aead_xform->key.data, aead_xform->key.length))
868 } else if ((aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT &&
869 aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) ||
870 (aead_xform->op == RTE_CRYPTO_AEAD_OP_DECRYPT &&
871 aead_xform->algo == RTE_CRYPTO_AEAD_AES_CCM)) {
872 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
874 * It needs to create cipher desc content first,
875 * then authentication
877 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
878 RTE_CRYPTO_AUTH_OP_GENERATE : RTE_CRYPTO_AUTH_OP_VERIFY;
880 if (qat_sym_cd_cipher_set(session,
881 aead_xform->key.data,
882 aead_xform->key.length))
885 if (qat_sym_cd_auth_set(session,
886 aead_xform->key.data,
887 aead_xform->key.length,
888 aead_xform->aad_length,
889 aead_xform->digest_length,
893 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
895 * It needs to create authentication desc content first,
899 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
900 RTE_CRYPTO_AUTH_OP_VERIFY : RTE_CRYPTO_AUTH_OP_GENERATE;
902 if (qat_sym_cd_auth_set(session,
903 aead_xform->key.data,
904 aead_xform->key.length,
905 aead_xform->aad_length,
906 aead_xform->digest_length,
910 if (qat_sym_cd_cipher_set(session,
911 aead_xform->key.data,
912 aead_xform->key.length))
919 unsigned int qat_sym_session_get_private_size(
920 struct rte_cryptodev *dev __rte_unused)
922 return RTE_ALIGN_CEIL(sizeof(struct qat_sym_session), 8);
925 /* returns block size in bytes per cipher algo */
926 int qat_cipher_get_block_size(enum icp_qat_hw_cipher_algo qat_cipher_alg)
928 switch (qat_cipher_alg) {
929 case ICP_QAT_HW_CIPHER_ALGO_DES:
930 return ICP_QAT_HW_DES_BLK_SZ;
931 case ICP_QAT_HW_CIPHER_ALGO_3DES:
932 return ICP_QAT_HW_3DES_BLK_SZ;
933 case ICP_QAT_HW_CIPHER_ALGO_AES128:
934 case ICP_QAT_HW_CIPHER_ALGO_AES192:
935 case ICP_QAT_HW_CIPHER_ALGO_AES256:
936 return ICP_QAT_HW_AES_BLK_SZ;
938 QAT_LOG(ERR, "invalid block cipher alg %u", qat_cipher_alg);
945 * Returns size in bytes per hash algo for state1 size field in cd_ctrl
946 * This is digest size rounded up to nearest quadword
948 static int qat_hash_get_state1_size(enum icp_qat_hw_auth_algo qat_hash_alg)
950 switch (qat_hash_alg) {
951 case ICP_QAT_HW_AUTH_ALGO_SHA1:
952 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA1_STATE1_SZ,
953 QAT_HW_DEFAULT_ALIGNMENT);
954 case ICP_QAT_HW_AUTH_ALGO_SHA224:
955 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA224_STATE1_SZ,
956 QAT_HW_DEFAULT_ALIGNMENT);
957 case ICP_QAT_HW_AUTH_ALGO_SHA256:
958 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA256_STATE1_SZ,
959 QAT_HW_DEFAULT_ALIGNMENT);
960 case ICP_QAT_HW_AUTH_ALGO_SHA384:
961 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA384_STATE1_SZ,
962 QAT_HW_DEFAULT_ALIGNMENT);
963 case ICP_QAT_HW_AUTH_ALGO_SHA512:
964 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
965 QAT_HW_DEFAULT_ALIGNMENT);
966 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
967 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ,
968 QAT_HW_DEFAULT_ALIGNMENT);
969 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
970 case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
971 return QAT_HW_ROUND_UP(ICP_QAT_HW_GALOIS_128_STATE1_SZ,
972 QAT_HW_DEFAULT_ALIGNMENT);
973 case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
974 return QAT_HW_ROUND_UP(ICP_QAT_HW_ZUC_3G_EIA3_STATE1_SZ,
975 QAT_HW_DEFAULT_ALIGNMENT);
976 case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
977 return QAT_HW_ROUND_UP(ICP_QAT_HW_SNOW_3G_UIA2_STATE1_SZ,
978 QAT_HW_DEFAULT_ALIGNMENT);
979 case ICP_QAT_HW_AUTH_ALGO_MD5:
980 return QAT_HW_ROUND_UP(ICP_QAT_HW_MD5_STATE1_SZ,
981 QAT_HW_DEFAULT_ALIGNMENT);
982 case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
983 return QAT_HW_ROUND_UP(ICP_QAT_HW_KASUMI_F9_STATE1_SZ,
984 QAT_HW_DEFAULT_ALIGNMENT);
985 case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
986 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_CBC_MAC_STATE1_SZ,
987 QAT_HW_DEFAULT_ALIGNMENT);
988 case ICP_QAT_HW_AUTH_ALGO_NULL:
989 return QAT_HW_ROUND_UP(ICP_QAT_HW_NULL_STATE1_SZ,
990 QAT_HW_DEFAULT_ALIGNMENT);
991 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
992 /* return maximum state1 size in this case */
993 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
994 QAT_HW_DEFAULT_ALIGNMENT);
996 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
1002 /* returns digest size in bytes per hash algo */
1003 static int qat_hash_get_digest_size(enum icp_qat_hw_auth_algo qat_hash_alg)
1005 switch (qat_hash_alg) {
1006 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1007 return ICP_QAT_HW_SHA1_STATE1_SZ;
1008 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1009 return ICP_QAT_HW_SHA224_STATE1_SZ;
1010 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1011 return ICP_QAT_HW_SHA256_STATE1_SZ;
1012 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1013 return ICP_QAT_HW_SHA384_STATE1_SZ;
1014 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1015 return ICP_QAT_HW_SHA512_STATE1_SZ;
1016 case ICP_QAT_HW_AUTH_ALGO_MD5:
1017 return ICP_QAT_HW_MD5_STATE1_SZ;
1018 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1019 return ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1020 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
1021 /* return maximum digest size in this case */
1022 return ICP_QAT_HW_SHA512_STATE1_SZ;
1024 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
1030 /* returns block size in byes per hash algo */
1031 static int qat_hash_get_block_size(enum icp_qat_hw_auth_algo qat_hash_alg)
1033 switch (qat_hash_alg) {
1034 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1036 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1037 return SHA256_CBLOCK;
1038 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1039 return SHA256_CBLOCK;
1040 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1041 return SHA512_CBLOCK;
1042 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1043 return SHA512_CBLOCK;
1044 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1046 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1047 return ICP_QAT_HW_AES_BLK_SZ;
1048 case ICP_QAT_HW_AUTH_ALGO_MD5:
1050 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
1051 /* return maximum block size in this case */
1052 return SHA512_CBLOCK;
1054 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
1060 static int partial_hash_sha1(uint8_t *data_in, uint8_t *data_out)
1064 if (!SHA1_Init(&ctx))
1066 SHA1_Transform(&ctx, data_in);
1067 rte_memcpy(data_out, &ctx, SHA_DIGEST_LENGTH);
1071 static int partial_hash_sha224(uint8_t *data_in, uint8_t *data_out)
1075 if (!SHA224_Init(&ctx))
1077 SHA256_Transform(&ctx, data_in);
1078 rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
1082 static int partial_hash_sha256(uint8_t *data_in, uint8_t *data_out)
1086 if (!SHA256_Init(&ctx))
1088 SHA256_Transform(&ctx, data_in);
1089 rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
1093 static int partial_hash_sha384(uint8_t *data_in, uint8_t *data_out)
1097 if (!SHA384_Init(&ctx))
1099 SHA512_Transform(&ctx, data_in);
1100 rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
1104 static int partial_hash_sha512(uint8_t *data_in, uint8_t *data_out)
1108 if (!SHA512_Init(&ctx))
1110 SHA512_Transform(&ctx, data_in);
1111 rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
1115 static int partial_hash_md5(uint8_t *data_in, uint8_t *data_out)
1119 if (!MD5_Init(&ctx))
1121 MD5_Transform(&ctx, data_in);
1122 rte_memcpy(data_out, &ctx, MD5_DIGEST_LENGTH);
1128 partial_hash_compute(enum icp_qat_hw_auth_algo hash_alg,
1129 uint8_t *data_in, uint8_t *data_out)
1132 uint8_t digest[qat_hash_get_digest_size(
1133 ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1134 uint32_t *hash_state_out_be32;
1135 uint64_t *hash_state_out_be64;
1138 /* Initialize to avoid gcc warning */
1139 memset(digest, 0, sizeof(digest));
1141 digest_size = qat_hash_get_digest_size(hash_alg);
1142 if (digest_size <= 0)
1145 hash_state_out_be32 = (uint32_t *)data_out;
1146 hash_state_out_be64 = (uint64_t *)data_out;
1149 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1150 if (partial_hash_sha1(data_in, digest))
1152 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1153 *hash_state_out_be32 =
1154 rte_bswap32(*(((uint32_t *)digest)+i));
1156 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1157 if (partial_hash_sha224(data_in, digest))
1159 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1160 *hash_state_out_be32 =
1161 rte_bswap32(*(((uint32_t *)digest)+i));
1163 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1164 if (partial_hash_sha256(data_in, digest))
1166 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1167 *hash_state_out_be32 =
1168 rte_bswap32(*(((uint32_t *)digest)+i));
1170 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1171 if (partial_hash_sha384(data_in, digest))
1173 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
1174 *hash_state_out_be64 =
1175 rte_bswap64(*(((uint64_t *)digest)+i));
1177 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1178 if (partial_hash_sha512(data_in, digest))
1180 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
1181 *hash_state_out_be64 =
1182 rte_bswap64(*(((uint64_t *)digest)+i));
1184 case ICP_QAT_HW_AUTH_ALGO_MD5:
1185 if (partial_hash_md5(data_in, data_out))
1189 QAT_LOG(ERR, "invalid hash alg %u", hash_alg);
1195 #define HMAC_IPAD_VALUE 0x36
1196 #define HMAC_OPAD_VALUE 0x5c
1197 #define HASH_XCBC_PRECOMP_KEY_NUM 3
1199 static const uint8_t AES_CMAC_SEED[ICP_QAT_HW_AES_128_KEY_SZ];
1201 static void aes_cmac_key_derive(uint8_t *base, uint8_t *derived)
1205 derived[0] = base[0] << 1;
1206 for (i = 1; i < ICP_QAT_HW_AES_BLK_SZ ; i++) {
1207 derived[i] = base[i] << 1;
1208 derived[i - 1] |= base[i] >> 7;
1212 derived[ICP_QAT_HW_AES_BLK_SZ - 1] ^= QAT_AES_CMAC_CONST_RB;
1215 static int qat_sym_do_precomputes(enum icp_qat_hw_auth_algo hash_alg,
1216 const uint8_t *auth_key,
1217 uint16_t auth_keylen,
1218 uint8_t *p_state_buf,
1219 uint16_t *p_state_len,
1223 uint8_t ipad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1224 uint8_t opad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1227 if (hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC) {
1233 uint8_t k0[ICP_QAT_HW_AES_128_KEY_SZ];
1236 auth_keylen = ICP_QAT_HW_AES_128_KEY_SZ;
1238 in = rte_zmalloc("AES CMAC K1",
1239 ICP_QAT_HW_AES_128_KEY_SZ, 16);
1242 QAT_LOG(ERR, "Failed to alloc memory");
1246 rte_memcpy(in, AES_CMAC_SEED,
1247 ICP_QAT_HW_AES_128_KEY_SZ);
1248 rte_memcpy(p_state_buf, auth_key, auth_keylen);
1250 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1256 AES_encrypt(in, k0, &enc_key);
1258 k1 = p_state_buf + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1259 k2 = k1 + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1261 aes_cmac_key_derive(k0, k1);
1262 aes_cmac_key_derive(k1, k2);
1264 memset(k0, 0, ICP_QAT_HW_AES_128_KEY_SZ);
1265 *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1269 static uint8_t qat_aes_xcbc_key_seed[
1270 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ] = {
1271 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1272 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1273 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1274 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1275 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1276 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1280 uint8_t *out = p_state_buf;
1284 in = rte_zmalloc("working mem for key",
1285 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ, 16);
1287 QAT_LOG(ERR, "Failed to alloc memory");
1291 rte_memcpy(in, qat_aes_xcbc_key_seed,
1292 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1293 for (x = 0; x < HASH_XCBC_PRECOMP_KEY_NUM; x++) {
1294 if (AES_set_encrypt_key(auth_key,
1298 (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ));
1300 (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ),
1301 0, ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1304 AES_encrypt(in, out, &enc_key);
1305 in += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1306 out += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1308 *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1309 rte_free(in - x*ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ);
1313 } else if ((hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_128) ||
1314 (hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_64)) {
1316 uint8_t *out = p_state_buf;
1319 memset(p_state_buf, 0, ICP_QAT_HW_GALOIS_H_SZ +
1320 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1321 ICP_QAT_HW_GALOIS_E_CTR0_SZ);
1322 in = rte_zmalloc("working mem for key",
1323 ICP_QAT_HW_GALOIS_H_SZ, 16);
1325 QAT_LOG(ERR, "Failed to alloc memory");
1329 memset(in, 0, ICP_QAT_HW_GALOIS_H_SZ);
1330 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1334 AES_encrypt(in, out, &enc_key);
1335 *p_state_len = ICP_QAT_HW_GALOIS_H_SZ +
1336 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1337 ICP_QAT_HW_GALOIS_E_CTR0_SZ;
1342 block_size = qat_hash_get_block_size(hash_alg);
1345 /* init ipad and opad from key and xor with fixed values */
1346 memset(ipad, 0, block_size);
1347 memset(opad, 0, block_size);
1349 if (auth_keylen > (unsigned int)block_size) {
1350 QAT_LOG(ERR, "invalid keylen %u", auth_keylen);
1353 rte_memcpy(ipad, auth_key, auth_keylen);
1354 rte_memcpy(opad, auth_key, auth_keylen);
1356 for (i = 0; i < block_size; i++) {
1357 uint8_t *ipad_ptr = ipad + i;
1358 uint8_t *opad_ptr = opad + i;
1359 *ipad_ptr ^= HMAC_IPAD_VALUE;
1360 *opad_ptr ^= HMAC_OPAD_VALUE;
1363 /* do partial hash of ipad and copy to state1 */
1364 if (partial_hash_compute(hash_alg, ipad, p_state_buf)) {
1365 memset(ipad, 0, block_size);
1366 memset(opad, 0, block_size);
1367 QAT_LOG(ERR, "ipad precompute failed");
1372 * State len is a multiple of 8, so may be larger than the digest.
1373 * Put the partial hash of opad state_len bytes after state1
1375 *p_state_len = qat_hash_get_state1_size(hash_alg);
1376 if (partial_hash_compute(hash_alg, opad, p_state_buf + *p_state_len)) {
1377 memset(ipad, 0, block_size);
1378 memset(opad, 0, block_size);
1379 QAT_LOG(ERR, "opad precompute failed");
1383 /* don't leave data lying around */
1384 memset(ipad, 0, block_size);
1385 memset(opad, 0, block_size);
1390 qat_sym_session_init_common_hdr(struct qat_sym_session *session)
1392 struct icp_qat_fw_la_bulk_req *req_tmpl = &session->fw_req;
1393 struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1394 enum qat_sym_proto_flag proto_flags = session->qat_proto_flag;
1395 uint32_t slice_flags = session->slice_types;
1398 ICP_QAT_FW_COMN_HDR_FLAGS_BUILD(ICP_QAT_FW_COMN_REQ_FLAG_SET);
1399 header->service_type = ICP_QAT_FW_COMN_REQ_CPM_FW_LA;
1400 header->service_cmd_id = session->qat_cmd;
1401 header->comn_req_flags =
1402 ICP_QAT_FW_COMN_FLAGS_BUILD(QAT_COMN_CD_FLD_TYPE_64BIT_ADR,
1403 QAT_COMN_PTR_TYPE_FLAT);
1404 ICP_QAT_FW_LA_PARTIAL_SET(header->serv_specif_flags,
1405 ICP_QAT_FW_LA_PARTIAL_NONE);
1406 ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(header->serv_specif_flags,
1407 ICP_QAT_FW_CIPH_IV_16BYTE_DATA);
1409 switch (proto_flags) {
1410 case QAT_CRYPTO_PROTO_FLAG_NONE:
1411 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1412 ICP_QAT_FW_LA_NO_PROTO);
1414 case QAT_CRYPTO_PROTO_FLAG_CCM:
1415 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1416 ICP_QAT_FW_LA_CCM_PROTO);
1418 case QAT_CRYPTO_PROTO_FLAG_GCM:
1419 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1420 ICP_QAT_FW_LA_GCM_PROTO);
1422 case QAT_CRYPTO_PROTO_FLAG_SNOW3G:
1423 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1424 ICP_QAT_FW_LA_SNOW_3G_PROTO);
1426 case QAT_CRYPTO_PROTO_FLAG_ZUC:
1427 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(header->serv_specif_flags,
1428 ICP_QAT_FW_LA_ZUC_3G_PROTO);
1432 /* More than one of the following flags can be set at once */
1433 if (QAT_SESSION_IS_SLICE_SET(slice_flags, QAT_CRYPTO_SLICE_SPC)) {
1434 ICP_QAT_FW_LA_SINGLE_PASS_PROTO_FLAG_SET(
1435 header->serv_specif_flags,
1436 ICP_QAT_FW_LA_SINGLE_PASS_PROTO);
1438 if (QAT_SESSION_IS_SLICE_SET(slice_flags, QAT_CRYPTO_SLICE_UCS)) {
1439 ICP_QAT_FW_LA_SLICE_TYPE_SET(
1440 header->serv_specif_flags,
1441 ICP_QAT_FW_LA_USE_UCS_SLICE_TYPE);
1444 if (session->is_auth) {
1445 if (session->auth_op == ICP_QAT_HW_AUTH_VERIFY) {
1446 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1447 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1448 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1449 ICP_QAT_FW_LA_CMP_AUTH_RES);
1450 } else if (session->auth_op == ICP_QAT_HW_AUTH_GENERATE) {
1451 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1452 ICP_QAT_FW_LA_RET_AUTH_RES);
1453 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1454 ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1457 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1458 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1459 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1460 ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1463 if (session->is_iv12B) {
1464 ICP_QAT_FW_LA_GCM_IV_LEN_FLAG_SET(
1465 header->serv_specif_flags,
1466 ICP_QAT_FW_LA_GCM_IV_LEN_12_OCTETS);
1469 ICP_QAT_FW_LA_UPDATE_STATE_SET(header->serv_specif_flags,
1470 ICP_QAT_FW_LA_NO_UPDATE_STATE);
1471 ICP_QAT_FW_LA_DIGEST_IN_BUFFER_SET(header->serv_specif_flags,
1472 ICP_QAT_FW_LA_NO_DIGEST_IN_BUFFER);
1475 int qat_sym_cd_cipher_set(struct qat_sym_session *cdesc,
1476 const uint8_t *cipherkey,
1477 uint32_t cipherkeylen)
1479 struct icp_qat_hw_cipher_algo_blk *cipher;
1480 struct icp_qat_hw_cipher_algo_blk20 *cipher20;
1481 struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1482 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1483 struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1484 void *ptr = &req_tmpl->cd_ctrl;
1485 struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1486 struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1487 enum icp_qat_hw_cipher_convert key_convert;
1488 struct icp_qat_fw_la_cipher_20_req_params *req_ucs =
1489 (struct icp_qat_fw_la_cipher_20_req_params *)
1490 &cdesc->fw_req.serv_specif_rqpars;
1491 struct icp_qat_fw_la_cipher_req_params *req_cipher =
1492 (struct icp_qat_fw_la_cipher_req_params *)
1493 &cdesc->fw_req.serv_specif_rqpars;
1494 uint32_t total_key_size;
1495 uint16_t cipher_offset, cd_size;
1496 uint32_t wordIndex = 0;
1497 uint32_t *temp_key = NULL;
1499 if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER) {
1500 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1501 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1502 ICP_QAT_FW_SLICE_CIPHER);
1503 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1504 ICP_QAT_FW_SLICE_DRAM_WR);
1505 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1506 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1507 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1508 ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1509 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1510 } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1511 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1512 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1513 ICP_QAT_FW_SLICE_CIPHER);
1514 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1515 ICP_QAT_FW_SLICE_AUTH);
1516 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1517 ICP_QAT_FW_SLICE_AUTH);
1518 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1519 ICP_QAT_FW_SLICE_DRAM_WR);
1520 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1521 } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1522 QAT_LOG(ERR, "Invalid param, must be a cipher command.");
1526 if (cdesc->qat_mode == ICP_QAT_HW_CIPHER_CTR_MODE) {
1528 * CTR Streaming ciphers are a special case. Decrypt = encrypt
1529 * Overriding default values previously set.
1530 * Chacha20-Poly1305 is special case, CTR but single-pass
1531 * so both direction need to be used.
1533 cdesc->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
1534 if (cdesc->qat_cipher_alg ==
1535 ICP_QAT_HW_CIPHER_ALGO_CHACHA20_POLY1305 &&
1536 cdesc->auth_op == ICP_QAT_HW_AUTH_VERIFY) {
1537 cdesc->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
1539 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1540 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2
1541 || cdesc->qat_cipher_alg ==
1542 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)
1543 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1544 else if (cdesc->qat_dir == ICP_QAT_HW_CIPHER_ENCRYPT)
1545 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1546 else if (cdesc->qat_mode == ICP_QAT_HW_CIPHER_AEAD_MODE)
1547 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1549 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1551 if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
1552 total_key_size = ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ +
1553 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1554 cipher_cd_ctrl->cipher_state_sz =
1555 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1556 cdesc->qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1558 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1559 total_key_size = ICP_QAT_HW_KASUMI_F8_KEY_SZ;
1560 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_KASUMI_BLK_SZ >> 3;
1561 cipher_cd_ctrl->cipher_padding_sz =
1562 (2 * ICP_QAT_HW_KASUMI_BLK_SZ) >> 3;
1563 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES) {
1564 total_key_size = ICP_QAT_HW_3DES_KEY_SZ;
1565 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_3DES_BLK_SZ >> 3;
1566 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_DES) {
1567 total_key_size = ICP_QAT_HW_DES_KEY_SZ;
1568 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_DES_BLK_SZ >> 3;
1569 } else if (cdesc->qat_cipher_alg ==
1570 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
1571 total_key_size = ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ +
1572 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1573 cipher_cd_ctrl->cipher_state_sz =
1574 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1575 cdesc->qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1577 total_key_size = cipherkeylen;
1578 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_AES_BLK_SZ >> 3;
1580 cipher_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1581 cipher_cd_ctrl->cipher_cfg_offset = cipher_offset >> 3;
1583 cipher = (struct icp_qat_hw_cipher_algo_blk *)cdesc->cd_cur_ptr;
1584 cipher20 = (struct icp_qat_hw_cipher_algo_blk20 *)cdesc->cd_cur_ptr;
1585 cipher->cipher_config.val =
1586 ICP_QAT_HW_CIPHER_CONFIG_BUILD(cdesc->qat_mode,
1587 cdesc->qat_cipher_alg, key_convert,
1590 if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1591 temp_key = (uint32_t *)(cdesc->cd_cur_ptr +
1592 sizeof(struct icp_qat_hw_cipher_config)
1594 memcpy(cipher->key, cipherkey, cipherkeylen);
1595 memcpy(temp_key, cipherkey, cipherkeylen);
1597 /* XOR Key with KASUMI F8 key modifier at 4 bytes level */
1598 for (wordIndex = 0; wordIndex < (cipherkeylen >> 2);
1600 temp_key[wordIndex] ^= KASUMI_F8_KEY_MODIFIER_4_BYTES;
1602 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1603 cipherkeylen + cipherkeylen;
1604 } else if (cdesc->is_ucs) {
1605 const uint8_t *final_key = cipherkey;
1607 cdesc->slice_types |= QAT_CRYPTO_SLICE_UCS;
1608 total_key_size = RTE_ALIGN_CEIL(cipherkeylen,
1609 ICP_QAT_HW_AES_128_KEY_SZ);
1610 cipher20->cipher_config.reserved[0] = 0;
1611 cipher20->cipher_config.reserved[1] = 0;
1612 cipher20->cipher_config.reserved[2] = 0;
1614 rte_memcpy(cipher20->key, final_key, cipherkeylen);
1615 cdesc->cd_cur_ptr +=
1616 sizeof(struct icp_qat_hw_ucs_cipher_config) +
1619 memcpy(cipher->key, cipherkey, cipherkeylen);
1620 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1624 if (cdesc->is_single_pass) {
1625 QAT_FIELD_SET(cipher->cipher_config.val,
1626 cdesc->digest_length,
1627 QAT_CIPHER_AEAD_HASH_CMP_LEN_BITPOS,
1628 QAT_CIPHER_AEAD_HASH_CMP_LEN_MASK);
1629 /* UCS and SPC 1.8/2.0 share configuration of 2nd config word */
1630 cdesc->cd.cipher.cipher_config.reserved =
1631 ICP_QAT_HW_CIPHER_CONFIG_BUILD_UPPER(
1633 cdesc->slice_types |= QAT_CRYPTO_SLICE_SPC;
1636 if (total_key_size > cipherkeylen) {
1637 uint32_t padding_size = total_key_size-cipherkeylen;
1638 if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1639 && (cipherkeylen == QAT_3DES_KEY_SZ_OPT2)) {
1640 /* K3 not provided so use K1 = K3*/
1641 memcpy(cdesc->cd_cur_ptr, cipherkey, padding_size);
1642 } else if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1643 && (cipherkeylen == QAT_3DES_KEY_SZ_OPT3)) {
1644 /* K2 and K3 not provided so use K1 = K2 = K3*/
1645 memcpy(cdesc->cd_cur_ptr, cipherkey,
1647 memcpy(cdesc->cd_cur_ptr+cipherkeylen,
1648 cipherkey, cipherkeylen);
1650 memset(cdesc->cd_cur_ptr, 0, padding_size);
1652 cdesc->cd_cur_ptr += padding_size;
1654 if (cdesc->is_ucs) {
1656 * These values match in terms of position auth
1657 * slice request fields
1659 req_ucs->spc_auth_res_sz = cdesc->digest_length;
1660 if (!cdesc->is_gmac) {
1661 req_ucs->spc_aad_sz = cdesc->aad_len;
1662 req_ucs->spc_aad_offset = 0;
1664 } else if (cdesc->is_single_pass) {
1665 req_cipher->spc_aad_sz = cdesc->aad_len;
1666 req_cipher->spc_auth_res_sz = cdesc->digest_length;
1668 cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1669 cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1670 cipher_cd_ctrl->cipher_key_sz = total_key_size >> 3;
1675 int qat_sym_cd_auth_set(struct qat_sym_session *cdesc,
1676 const uint8_t *authkey,
1677 uint32_t authkeylen,
1678 uint32_t aad_length,
1679 uint32_t digestsize,
1680 unsigned int operation)
1682 struct icp_qat_hw_auth_setup *hash;
1683 struct icp_qat_hw_cipher_algo_blk *cipherconfig;
1684 struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1685 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1686 void *ptr = &req_tmpl->cd_ctrl;
1687 struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1688 struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1689 struct icp_qat_fw_la_auth_req_params *auth_param =
1690 (struct icp_qat_fw_la_auth_req_params *)
1691 ((char *)&req_tmpl->serv_specif_rqpars +
1692 ICP_QAT_FW_HASH_REQUEST_PARAMETERS_OFFSET);
1693 uint16_t state1_size = 0, state2_size = 0, cd_extra_size = 0;
1694 uint16_t hash_offset, cd_size;
1695 uint32_t *aad_len = NULL;
1696 uint32_t wordIndex = 0;
1699 if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_AUTH) {
1700 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1701 ICP_QAT_FW_SLICE_AUTH);
1702 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1703 ICP_QAT_FW_SLICE_DRAM_WR);
1704 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1705 } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1706 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1707 ICP_QAT_FW_SLICE_AUTH);
1708 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1709 ICP_QAT_FW_SLICE_CIPHER);
1710 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1711 ICP_QAT_FW_SLICE_CIPHER);
1712 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1713 ICP_QAT_FW_SLICE_DRAM_WR);
1714 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1715 } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1716 QAT_LOG(ERR, "Invalid param, must be a hash command.");
1720 if (operation == RTE_CRYPTO_AUTH_OP_VERIFY)
1721 cdesc->auth_op = ICP_QAT_HW_AUTH_VERIFY;
1723 cdesc->auth_op = ICP_QAT_HW_AUTH_GENERATE;
1726 * Setup the inner hash config
1728 hash_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1729 hash = (struct icp_qat_hw_auth_setup *)cdesc->cd_cur_ptr;
1730 hash->auth_config.reserved = 0;
1731 hash->auth_config.config =
1732 ICP_QAT_HW_AUTH_CONFIG_BUILD(cdesc->auth_mode,
1733 cdesc->qat_hash_alg, digestsize);
1735 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0
1736 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2
1737 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_KASUMI_F9
1738 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3
1739 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC
1740 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC
1741 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_NULL
1742 || cdesc->is_cnt_zero
1744 hash->auth_counter.counter = 0;
1746 int block_size = qat_hash_get_block_size(cdesc->qat_hash_alg);
1750 hash->auth_counter.counter = rte_bswap32(block_size);
1753 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_auth_setup);
1756 * cd_cur_ptr now points at the state1 information.
1758 switch (cdesc->qat_hash_alg) {
1759 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1760 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0) {
1762 rte_memcpy(cdesc->cd_cur_ptr, sha1InitialState,
1763 sizeof(sha1InitialState));
1764 state1_size = qat_hash_get_state1_size(
1765 cdesc->qat_hash_alg);
1769 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA1, authkey,
1770 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1772 QAT_LOG(ERR, "(SHA)precompute failed");
1775 state2_size = RTE_ALIGN_CEIL(ICP_QAT_HW_SHA1_STATE2_SZ, 8);
1777 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1778 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0) {
1780 rte_memcpy(cdesc->cd_cur_ptr, sha224InitialState,
1781 sizeof(sha224InitialState));
1782 state1_size = qat_hash_get_state1_size(
1783 cdesc->qat_hash_alg);
1787 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA224, authkey,
1788 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1790 QAT_LOG(ERR, "(SHA)precompute failed");
1793 state2_size = ICP_QAT_HW_SHA224_STATE2_SZ;
1795 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1796 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0) {
1798 rte_memcpy(cdesc->cd_cur_ptr, sha256InitialState,
1799 sizeof(sha256InitialState));
1800 state1_size = qat_hash_get_state1_size(
1801 cdesc->qat_hash_alg);
1805 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA256, authkey,
1806 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1808 QAT_LOG(ERR, "(SHA)precompute failed");
1811 state2_size = ICP_QAT_HW_SHA256_STATE2_SZ;
1813 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1814 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0) {
1816 rte_memcpy(cdesc->cd_cur_ptr, sha384InitialState,
1817 sizeof(sha384InitialState));
1818 state1_size = qat_hash_get_state1_size(
1819 cdesc->qat_hash_alg);
1823 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA384, authkey,
1824 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1826 QAT_LOG(ERR, "(SHA)precompute failed");
1829 state2_size = ICP_QAT_HW_SHA384_STATE2_SZ;
1831 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1832 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0) {
1834 rte_memcpy(cdesc->cd_cur_ptr, sha512InitialState,
1835 sizeof(sha512InitialState));
1836 state1_size = qat_hash_get_state1_size(
1837 cdesc->qat_hash_alg);
1841 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA512, authkey,
1842 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1844 QAT_LOG(ERR, "(SHA)precompute failed");
1847 state2_size = ICP_QAT_HW_SHA512_STATE2_SZ;
1849 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1850 state1_size = ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1852 if (cdesc->aes_cmac)
1853 memset(cdesc->cd_cur_ptr, 0, state1_size);
1854 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC,
1855 authkey, authkeylen, cdesc->cd_cur_ptr + state1_size,
1856 &state2_size, cdesc->aes_cmac)) {
1857 cdesc->aes_cmac ? QAT_LOG(ERR,
1858 "(CMAC)precompute failed")
1860 "(XCBC)precompute failed");
1864 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1865 case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
1866 cdesc->qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1867 state1_size = ICP_QAT_HW_GALOIS_128_STATE1_SZ;
1868 if (qat_sym_do_precomputes(cdesc->qat_hash_alg, authkey,
1869 authkeylen, cdesc->cd_cur_ptr + state1_size,
1870 &state2_size, cdesc->aes_cmac)) {
1871 QAT_LOG(ERR, "(GCM)precompute failed");
1875 * Write (the length of AAD) into bytes 16-19 of state2
1876 * in big-endian format. This field is 8 bytes
1878 auth_param->u2.aad_sz =
1879 RTE_ALIGN_CEIL(aad_length, 16);
1880 auth_param->hash_state_sz = (auth_param->u2.aad_sz) >> 3;
1882 aad_len = (uint32_t *)(cdesc->cd_cur_ptr +
1883 ICP_QAT_HW_GALOIS_128_STATE1_SZ +
1884 ICP_QAT_HW_GALOIS_H_SZ);
1885 *aad_len = rte_bswap32(aad_length);
1886 cdesc->aad_len = aad_length;
1888 case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
1889 cdesc->qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1890 state1_size = qat_hash_get_state1_size(
1891 ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2);
1892 state2_size = ICP_QAT_HW_SNOW_3G_UIA2_STATE2_SZ;
1893 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1895 cipherconfig = (struct icp_qat_hw_cipher_algo_blk *)
1896 (cdesc->cd_cur_ptr + state1_size + state2_size);
1897 cipherconfig->cipher_config.val =
1898 ICP_QAT_HW_CIPHER_CONFIG_BUILD(ICP_QAT_HW_CIPHER_ECB_MODE,
1899 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2,
1900 ICP_QAT_HW_CIPHER_KEY_CONVERT,
1901 ICP_QAT_HW_CIPHER_ENCRYPT);
1902 memcpy(cipherconfig->key, authkey, authkeylen);
1903 memset(cipherconfig->key + authkeylen,
1904 0, ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ);
1905 cd_extra_size += sizeof(struct icp_qat_hw_cipher_config) +
1906 authkeylen + ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1907 auth_param->hash_state_sz = ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1909 case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
1910 hash->auth_config.config =
1911 ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE0,
1912 cdesc->qat_hash_alg, digestsize);
1913 cdesc->qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1914 state1_size = qat_hash_get_state1_size(
1915 ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3);
1916 state2_size = ICP_QAT_HW_ZUC_3G_EIA3_STATE2_SZ;
1917 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size
1918 + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ);
1920 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1921 cd_extra_size += ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1922 auth_param->hash_state_sz = ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1925 case ICP_QAT_HW_AUTH_ALGO_MD5:
1926 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_MD5, authkey,
1927 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1929 QAT_LOG(ERR, "(MD5)precompute failed");
1932 state2_size = ICP_QAT_HW_MD5_STATE2_SZ;
1934 case ICP_QAT_HW_AUTH_ALGO_NULL:
1935 state1_size = qat_hash_get_state1_size(
1936 ICP_QAT_HW_AUTH_ALGO_NULL);
1937 state2_size = ICP_QAT_HW_NULL_STATE2_SZ;
1939 case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
1940 cdesc->qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1941 state1_size = qat_hash_get_state1_size(
1942 ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC);
1943 state2_size = ICP_QAT_HW_AES_CBC_MAC_KEY_SZ +
1944 ICP_QAT_HW_AES_CCM_CBC_E_CTR0_SZ;
1946 if (aad_length > 0) {
1947 aad_length += ICP_QAT_HW_CCM_AAD_B0_LEN +
1948 ICP_QAT_HW_CCM_AAD_LEN_INFO;
1949 auth_param->u2.aad_sz =
1950 RTE_ALIGN_CEIL(aad_length,
1951 ICP_QAT_HW_CCM_AAD_ALIGNMENT);
1953 auth_param->u2.aad_sz = ICP_QAT_HW_CCM_AAD_B0_LEN;
1955 cdesc->aad_len = aad_length;
1956 hash->auth_counter.counter = 0;
1958 hash_cd_ctrl->outer_prefix_sz = digestsize;
1959 auth_param->hash_state_sz = digestsize;
1961 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1963 case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
1964 state1_size = qat_hash_get_state1_size(
1965 ICP_QAT_HW_AUTH_ALGO_KASUMI_F9);
1966 state2_size = ICP_QAT_HW_KASUMI_F9_STATE2_SZ;
1967 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1968 pTempKey = (uint32_t *)(cdesc->cd_cur_ptr + state1_size
1971 * The Inner Hash Initial State2 block must contain IK
1972 * (Initialisation Key), followed by IK XOR-ed with KM
1973 * (Key Modifier): IK||(IK^KM).
1975 /* write the auth key */
1976 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1977 /* initialise temp key with auth key */
1978 memcpy(pTempKey, authkey, authkeylen);
1979 /* XOR Key with KASUMI F9 key modifier at 4 bytes level */
1980 for (wordIndex = 0; wordIndex < (authkeylen >> 2); wordIndex++)
1981 pTempKey[wordIndex] ^= KASUMI_F9_KEY_MODIFIER_4_BYTES;
1984 QAT_LOG(ERR, "Invalid HASH alg %u", cdesc->qat_hash_alg);
1988 /* Auth CD config setup */
1989 hash_cd_ctrl->hash_cfg_offset = hash_offset >> 3;
1990 hash_cd_ctrl->hash_flags = ICP_QAT_FW_AUTH_HDR_FLAG_NO_NESTED;
1991 hash_cd_ctrl->inner_res_sz = digestsize;
1992 hash_cd_ctrl->final_sz = digestsize;
1993 hash_cd_ctrl->inner_state1_sz = state1_size;
1994 auth_param->auth_res_sz = digestsize;
1996 hash_cd_ctrl->inner_state2_sz = state2_size;
1997 hash_cd_ctrl->inner_state2_offset = hash_cd_ctrl->hash_cfg_offset +
1998 ((sizeof(struct icp_qat_hw_auth_setup) +
1999 RTE_ALIGN_CEIL(hash_cd_ctrl->inner_state1_sz, 8))
2002 cdesc->cd_cur_ptr += state1_size + state2_size + cd_extra_size;
2003 cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
2005 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
2006 cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
2011 int qat_sym_validate_aes_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2014 case ICP_QAT_HW_AES_128_KEY_SZ:
2015 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
2017 case ICP_QAT_HW_AES_192_KEY_SZ:
2018 *alg = ICP_QAT_HW_CIPHER_ALGO_AES192;
2020 case ICP_QAT_HW_AES_256_KEY_SZ:
2021 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
2029 int qat_sym_validate_aes_docsisbpi_key(int key_len,
2030 enum icp_qat_hw_cipher_algo *alg)
2033 case ICP_QAT_HW_AES_128_KEY_SZ:
2034 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
2036 case ICP_QAT_HW_AES_256_KEY_SZ:
2037 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
2045 int qat_sym_validate_snow3g_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2048 case ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ:
2049 *alg = ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2;
2057 int qat_sym_validate_kasumi_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2060 case ICP_QAT_HW_KASUMI_KEY_SZ:
2061 *alg = ICP_QAT_HW_CIPHER_ALGO_KASUMI;
2069 int qat_sym_validate_des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2072 case ICP_QAT_HW_DES_KEY_SZ:
2073 *alg = ICP_QAT_HW_CIPHER_ALGO_DES;
2081 int qat_sym_validate_3des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2084 case QAT_3DES_KEY_SZ_OPT1:
2085 case QAT_3DES_KEY_SZ_OPT2:
2086 case QAT_3DES_KEY_SZ_OPT3:
2087 *alg = ICP_QAT_HW_CIPHER_ALGO_3DES;
2095 int qat_sym_validate_zuc_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2098 case ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ:
2099 *alg = ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3;
2107 #ifdef RTE_LIB_SECURITY
2109 qat_sec_session_check_docsis(struct rte_security_session_conf *conf)
2111 struct rte_crypto_sym_xform *crypto_sym = conf->crypto_xform;
2112 struct rte_security_docsis_xform *docsis = &conf->docsis;
2114 /* CRC generate -> Cipher encrypt */
2115 if (docsis->direction == RTE_SECURITY_DOCSIS_DOWNLINK) {
2117 if (crypto_sym != NULL &&
2118 crypto_sym->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
2119 crypto_sym->cipher.op == RTE_CRYPTO_CIPHER_OP_ENCRYPT &&
2120 crypto_sym->cipher.algo ==
2121 RTE_CRYPTO_CIPHER_AES_DOCSISBPI &&
2122 (crypto_sym->cipher.key.length ==
2123 ICP_QAT_HW_AES_128_KEY_SZ ||
2124 crypto_sym->cipher.key.length ==
2125 ICP_QAT_HW_AES_256_KEY_SZ) &&
2126 crypto_sym->cipher.iv.length == ICP_QAT_HW_AES_BLK_SZ &&
2127 crypto_sym->next == NULL) {
2130 /* Cipher decrypt -> CRC verify */
2131 } else if (docsis->direction == RTE_SECURITY_DOCSIS_UPLINK) {
2133 if (crypto_sym != NULL &&
2134 crypto_sym->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
2135 crypto_sym->cipher.op == RTE_CRYPTO_CIPHER_OP_DECRYPT &&
2136 crypto_sym->cipher.algo ==
2137 RTE_CRYPTO_CIPHER_AES_DOCSISBPI &&
2138 (crypto_sym->cipher.key.length ==
2139 ICP_QAT_HW_AES_128_KEY_SZ ||
2140 crypto_sym->cipher.key.length ==
2141 ICP_QAT_HW_AES_256_KEY_SZ) &&
2142 crypto_sym->cipher.iv.length == ICP_QAT_HW_AES_BLK_SZ &&
2143 crypto_sym->next == NULL) {
2152 qat_sec_session_set_docsis_parameters(struct rte_cryptodev *dev,
2153 struct rte_security_session_conf *conf, void *session_private)
2157 struct rte_crypto_sym_xform *xform = NULL;
2158 struct qat_sym_session *session = session_private;
2160 /* Clear the session */
2161 memset(session, 0, qat_sym_session_get_private_size(dev));
2163 ret = qat_sec_session_check_docsis(conf);
2165 QAT_LOG(ERR, "Unsupported DOCSIS security configuration");
2169 xform = conf->crypto_xform;
2171 /* Verify the session physical address is known */
2172 rte_iova_t session_paddr = rte_mempool_virt2iova(session);
2173 if (session_paddr == 0 || session_paddr == RTE_BAD_IOVA) {
2175 "Session physical address unknown. Bad memory pool.");
2179 /* Set context descriptor physical address */
2180 session->cd_paddr = session_paddr +
2181 offsetof(struct qat_sym_session, cd);
2183 /* Get requested QAT command id - should be cipher */
2184 qat_cmd_id = qat_get_cmd_id(xform);
2185 if (qat_cmd_id != ICP_QAT_FW_LA_CMD_CIPHER) {
2186 QAT_LOG(ERR, "Unsupported xform chain requested");
2189 session->qat_cmd = (enum icp_qat_fw_la_cmd_id)qat_cmd_id;
2191 ret = qat_sym_session_configure_cipher(dev, xform, session);
2194 qat_sym_session_finalize(session);
2200 qat_security_session_create(void *dev,
2201 struct rte_security_session_conf *conf,
2202 struct rte_security_session *sess,
2203 struct rte_mempool *mempool)
2205 void *sess_private_data;
2206 struct rte_cryptodev *cdev = (struct rte_cryptodev *)dev;
2207 struct qat_cryptodev_private *internals = cdev->data->dev_private;
2208 enum qat_device_gen qat_dev_gen = internals->qat_dev->qat_dev_gen;
2209 struct qat_sym_session *sym_session = NULL;
2212 if (conf->action_type != RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL ||
2213 conf->protocol != RTE_SECURITY_PROTOCOL_DOCSIS) {
2214 QAT_LOG(ERR, "Invalid security protocol");
2218 if (rte_mempool_get(mempool, &sess_private_data)) {
2219 QAT_LOG(ERR, "Couldn't get object from session mempool");
2223 ret = qat_sec_session_set_docsis_parameters(cdev, conf,
2226 QAT_LOG(ERR, "Failed to configure session parameters");
2227 /* Return session to mempool */
2228 rte_mempool_put(mempool, sess_private_data);
2232 set_sec_session_private_data(sess, sess_private_data);
2233 sym_session = (struct qat_sym_session *)sess_private_data;
2234 sym_session->dev_id = internals->dev_id;
2236 return qat_sym_gen_dev_ops[qat_dev_gen].set_session((void *)cdev,
2241 qat_security_session_destroy(void *dev __rte_unused,
2242 struct rte_security_session *sess)
2244 void *sess_priv = get_sec_session_private_data(sess);
2245 struct qat_sym_session *s = (struct qat_sym_session *)sess_priv;
2249 bpi_cipher_ctx_free(s->bpi_ctx);
2250 memset(s, 0, qat_sym_session_get_private_size(dev));
2251 struct rte_mempool *sess_mp = rte_mempool_from_obj(sess_priv);
2253 set_sec_session_private_data(sess, NULL);
2254 rte_mempool_put(sess_mp, sess_priv);