1 /* SPDX-License-Identifier: (BSD-3-Clause OR GPL-2.0)
2 * Copyright(c) 2015-2018 Intel Corporation
4 #include <rte_memcpy.h>
5 #include <rte_common.h>
6 #include <rte_spinlock.h>
7 #include <rte_byteorder.h>
9 #include <rte_malloc.h>
10 #include <rte_crypto_sym.h>
12 #include "../qat_logs.h"
14 #include <openssl/sha.h> /* Needed to calculate pre-compute values */
15 #include <openssl/aes.h> /* Needed to calculate pre-compute values */
16 #include <openssl/md5.h> /* Needed to calculate pre-compute values */
20 /* returns block size in bytes per cipher algo */
21 int qat_cipher_get_block_size(enum icp_qat_hw_cipher_algo qat_cipher_alg)
23 switch (qat_cipher_alg) {
24 case ICP_QAT_HW_CIPHER_ALGO_DES:
25 return ICP_QAT_HW_DES_BLK_SZ;
26 case ICP_QAT_HW_CIPHER_ALGO_3DES:
27 return ICP_QAT_HW_3DES_BLK_SZ;
28 case ICP_QAT_HW_CIPHER_ALGO_AES128:
29 case ICP_QAT_HW_CIPHER_ALGO_AES192:
30 case ICP_QAT_HW_CIPHER_ALGO_AES256:
31 return ICP_QAT_HW_AES_BLK_SZ;
33 PMD_DRV_LOG(ERR, "invalid block cipher alg %u", qat_cipher_alg);
40 * Returns size in bytes per hash algo for state1 size field in cd_ctrl
41 * This is digest size rounded up to nearest quadword
43 static int qat_hash_get_state1_size(enum icp_qat_hw_auth_algo qat_hash_alg)
45 switch (qat_hash_alg) {
46 case ICP_QAT_HW_AUTH_ALGO_SHA1:
47 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA1_STATE1_SZ,
48 QAT_HW_DEFAULT_ALIGNMENT);
49 case ICP_QAT_HW_AUTH_ALGO_SHA224:
50 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA224_STATE1_SZ,
51 QAT_HW_DEFAULT_ALIGNMENT);
52 case ICP_QAT_HW_AUTH_ALGO_SHA256:
53 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA256_STATE1_SZ,
54 QAT_HW_DEFAULT_ALIGNMENT);
55 case ICP_QAT_HW_AUTH_ALGO_SHA384:
56 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA384_STATE1_SZ,
57 QAT_HW_DEFAULT_ALIGNMENT);
58 case ICP_QAT_HW_AUTH_ALGO_SHA512:
59 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
60 QAT_HW_DEFAULT_ALIGNMENT);
61 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
62 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ,
63 QAT_HW_DEFAULT_ALIGNMENT);
64 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
65 case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
66 return QAT_HW_ROUND_UP(ICP_QAT_HW_GALOIS_128_STATE1_SZ,
67 QAT_HW_DEFAULT_ALIGNMENT);
68 case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
69 return QAT_HW_ROUND_UP(ICP_QAT_HW_ZUC_3G_EIA3_STATE1_SZ,
70 QAT_HW_DEFAULT_ALIGNMENT);
71 case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
72 return QAT_HW_ROUND_UP(ICP_QAT_HW_SNOW_3G_UIA2_STATE1_SZ,
73 QAT_HW_DEFAULT_ALIGNMENT);
74 case ICP_QAT_HW_AUTH_ALGO_MD5:
75 return QAT_HW_ROUND_UP(ICP_QAT_HW_MD5_STATE1_SZ,
76 QAT_HW_DEFAULT_ALIGNMENT);
77 case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
78 return QAT_HW_ROUND_UP(ICP_QAT_HW_KASUMI_F9_STATE1_SZ,
79 QAT_HW_DEFAULT_ALIGNMENT);
80 case ICP_QAT_HW_AUTH_ALGO_NULL:
81 return QAT_HW_ROUND_UP(ICP_QAT_HW_NULL_STATE1_SZ,
82 QAT_HW_DEFAULT_ALIGNMENT);
83 case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
84 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_CBC_MAC_STATE1_SZ,
85 QAT_HW_DEFAULT_ALIGNMENT);
86 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
87 /* return maximum state1 size in this case */
88 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
89 QAT_HW_DEFAULT_ALIGNMENT);
91 PMD_DRV_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
97 /* returns digest size in bytes per hash algo */
98 static int qat_hash_get_digest_size(enum icp_qat_hw_auth_algo qat_hash_alg)
100 switch (qat_hash_alg) {
101 case ICP_QAT_HW_AUTH_ALGO_SHA1:
102 return ICP_QAT_HW_SHA1_STATE1_SZ;
103 case ICP_QAT_HW_AUTH_ALGO_SHA224:
104 return ICP_QAT_HW_SHA224_STATE1_SZ;
105 case ICP_QAT_HW_AUTH_ALGO_SHA256:
106 return ICP_QAT_HW_SHA256_STATE1_SZ;
107 case ICP_QAT_HW_AUTH_ALGO_SHA384:
108 return ICP_QAT_HW_SHA384_STATE1_SZ;
109 case ICP_QAT_HW_AUTH_ALGO_SHA512:
110 return ICP_QAT_HW_SHA512_STATE1_SZ;
111 case ICP_QAT_HW_AUTH_ALGO_MD5:
112 return ICP_QAT_HW_MD5_STATE1_SZ;
113 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
114 /* return maximum digest size in this case */
115 return ICP_QAT_HW_SHA512_STATE1_SZ;
117 PMD_DRV_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
123 /* returns block size in byes per hash algo */
124 static int qat_hash_get_block_size(enum icp_qat_hw_auth_algo qat_hash_alg)
126 switch (qat_hash_alg) {
127 case ICP_QAT_HW_AUTH_ALGO_SHA1:
129 case ICP_QAT_HW_AUTH_ALGO_SHA224:
130 return SHA256_CBLOCK;
131 case ICP_QAT_HW_AUTH_ALGO_SHA256:
132 return SHA256_CBLOCK;
133 case ICP_QAT_HW_AUTH_ALGO_SHA384:
134 return SHA512_CBLOCK;
135 case ICP_QAT_HW_AUTH_ALGO_SHA512:
136 return SHA512_CBLOCK;
137 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
139 case ICP_QAT_HW_AUTH_ALGO_MD5:
141 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
142 /* return maximum block size in this case */
143 return SHA512_CBLOCK;
145 PMD_DRV_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
151 static int partial_hash_sha1(uint8_t *data_in, uint8_t *data_out)
155 if (!SHA1_Init(&ctx))
157 SHA1_Transform(&ctx, data_in);
158 rte_memcpy(data_out, &ctx, SHA_DIGEST_LENGTH);
162 static int partial_hash_sha224(uint8_t *data_in, uint8_t *data_out)
166 if (!SHA224_Init(&ctx))
168 SHA256_Transform(&ctx, data_in);
169 rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
173 static int partial_hash_sha256(uint8_t *data_in, uint8_t *data_out)
177 if (!SHA256_Init(&ctx))
179 SHA256_Transform(&ctx, data_in);
180 rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
184 static int partial_hash_sha384(uint8_t *data_in, uint8_t *data_out)
188 if (!SHA384_Init(&ctx))
190 SHA512_Transform(&ctx, data_in);
191 rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
195 static int partial_hash_sha512(uint8_t *data_in, uint8_t *data_out)
199 if (!SHA512_Init(&ctx))
201 SHA512_Transform(&ctx, data_in);
202 rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
206 static int partial_hash_md5(uint8_t *data_in, uint8_t *data_out)
212 MD5_Transform(&ctx, data_in);
213 rte_memcpy(data_out, &ctx, MD5_DIGEST_LENGTH);
218 static int partial_hash_compute(enum icp_qat_hw_auth_algo hash_alg,
223 uint8_t digest[qat_hash_get_digest_size(
224 ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
225 uint32_t *hash_state_out_be32;
226 uint64_t *hash_state_out_be64;
229 PMD_INIT_FUNC_TRACE();
230 digest_size = qat_hash_get_digest_size(hash_alg);
231 if (digest_size <= 0)
234 hash_state_out_be32 = (uint32_t *)data_out;
235 hash_state_out_be64 = (uint64_t *)data_out;
238 case ICP_QAT_HW_AUTH_ALGO_SHA1:
239 if (partial_hash_sha1(data_in, digest))
241 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
242 *hash_state_out_be32 =
243 rte_bswap32(*(((uint32_t *)digest)+i));
245 case ICP_QAT_HW_AUTH_ALGO_SHA224:
246 if (partial_hash_sha224(data_in, digest))
248 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
249 *hash_state_out_be32 =
250 rte_bswap32(*(((uint32_t *)digest)+i));
252 case ICP_QAT_HW_AUTH_ALGO_SHA256:
253 if (partial_hash_sha256(data_in, digest))
255 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
256 *hash_state_out_be32 =
257 rte_bswap32(*(((uint32_t *)digest)+i));
259 case ICP_QAT_HW_AUTH_ALGO_SHA384:
260 if (partial_hash_sha384(data_in, digest))
262 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
263 *hash_state_out_be64 =
264 rte_bswap64(*(((uint64_t *)digest)+i));
266 case ICP_QAT_HW_AUTH_ALGO_SHA512:
267 if (partial_hash_sha512(data_in, digest))
269 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
270 *hash_state_out_be64 =
271 rte_bswap64(*(((uint64_t *)digest)+i));
273 case ICP_QAT_HW_AUTH_ALGO_MD5:
274 if (partial_hash_md5(data_in, data_out))
278 PMD_DRV_LOG(ERR, "invalid hash alg %u", hash_alg);
284 #define HMAC_IPAD_VALUE 0x36
285 #define HMAC_OPAD_VALUE 0x5c
286 #define HASH_XCBC_PRECOMP_KEY_NUM 3
288 static int qat_alg_do_precomputes(enum icp_qat_hw_auth_algo hash_alg,
289 const uint8_t *auth_key,
290 uint16_t auth_keylen,
291 uint8_t *p_state_buf,
292 uint16_t *p_state_len)
295 uint8_t ipad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
296 uint8_t opad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
299 PMD_INIT_FUNC_TRACE();
300 if (hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC) {
301 static uint8_t qat_aes_xcbc_key_seed[
302 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ] = {
303 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
304 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
305 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
306 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
307 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
308 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
312 uint8_t *out = p_state_buf;
316 in = rte_zmalloc("working mem for key",
317 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ, 16);
319 PMD_DRV_LOG(ERR, "Failed to alloc memory");
323 rte_memcpy(in, qat_aes_xcbc_key_seed,
324 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
325 for (x = 0; x < HASH_XCBC_PRECOMP_KEY_NUM; x++) {
326 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
329 (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ));
331 (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ),
332 0, ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
335 AES_encrypt(in, out, &enc_key);
336 in += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
337 out += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
339 *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
340 rte_free(in - x*ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ);
342 } else if ((hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_128) ||
343 (hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_64)) {
345 uint8_t *out = p_state_buf;
348 memset(p_state_buf, 0, ICP_QAT_HW_GALOIS_H_SZ +
349 ICP_QAT_HW_GALOIS_LEN_A_SZ +
350 ICP_QAT_HW_GALOIS_E_CTR0_SZ);
351 in = rte_zmalloc("working mem for key",
352 ICP_QAT_HW_GALOIS_H_SZ, 16);
354 PMD_DRV_LOG(ERR, "Failed to alloc memory");
358 memset(in, 0, ICP_QAT_HW_GALOIS_H_SZ);
359 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
363 AES_encrypt(in, out, &enc_key);
364 *p_state_len = ICP_QAT_HW_GALOIS_H_SZ +
365 ICP_QAT_HW_GALOIS_LEN_A_SZ +
366 ICP_QAT_HW_GALOIS_E_CTR0_SZ;
371 block_size = qat_hash_get_block_size(hash_alg);
374 /* init ipad and opad from key and xor with fixed values */
375 memset(ipad, 0, block_size);
376 memset(opad, 0, block_size);
378 if (auth_keylen > (unsigned int)block_size) {
379 PMD_DRV_LOG(ERR, "invalid keylen %u", auth_keylen);
382 rte_memcpy(ipad, auth_key, auth_keylen);
383 rte_memcpy(opad, auth_key, auth_keylen);
385 for (i = 0; i < block_size; i++) {
386 uint8_t *ipad_ptr = ipad + i;
387 uint8_t *opad_ptr = opad + i;
388 *ipad_ptr ^= HMAC_IPAD_VALUE;
389 *opad_ptr ^= HMAC_OPAD_VALUE;
392 /* do partial hash of ipad and copy to state1 */
393 if (partial_hash_compute(hash_alg, ipad, p_state_buf)) {
394 memset(ipad, 0, block_size);
395 memset(opad, 0, block_size);
396 PMD_DRV_LOG(ERR, "ipad precompute failed");
401 * State len is a multiple of 8, so may be larger than the digest.
402 * Put the partial hash of opad state_len bytes after state1
404 *p_state_len = qat_hash_get_state1_size(hash_alg);
405 if (partial_hash_compute(hash_alg, opad, p_state_buf + *p_state_len)) {
406 memset(ipad, 0, block_size);
407 memset(opad, 0, block_size);
408 PMD_DRV_LOG(ERR, "opad precompute failed");
412 /* don't leave data lying around */
413 memset(ipad, 0, block_size);
414 memset(opad, 0, block_size);
418 void qat_alg_init_common_hdr(struct icp_qat_fw_comn_req_hdr *header,
419 enum qat_crypto_proto_flag proto_flags)
421 PMD_INIT_FUNC_TRACE();
423 ICP_QAT_FW_COMN_HDR_FLAGS_BUILD(ICP_QAT_FW_COMN_REQ_FLAG_SET);
424 header->service_type = ICP_QAT_FW_COMN_REQ_CPM_FW_LA;
425 header->comn_req_flags =
426 ICP_QAT_FW_COMN_FLAGS_BUILD(QAT_COMN_CD_FLD_TYPE_64BIT_ADR,
427 QAT_COMN_PTR_TYPE_FLAT);
428 ICP_QAT_FW_LA_PARTIAL_SET(header->serv_specif_flags,
429 ICP_QAT_FW_LA_PARTIAL_NONE);
430 ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(header->serv_specif_flags,
431 ICP_QAT_FW_CIPH_IV_16BYTE_DATA);
433 switch (proto_flags) {
434 case QAT_CRYPTO_PROTO_FLAG_NONE:
435 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
436 ICP_QAT_FW_LA_NO_PROTO);
438 case QAT_CRYPTO_PROTO_FLAG_CCM:
439 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
440 ICP_QAT_FW_LA_CCM_PROTO);
442 case QAT_CRYPTO_PROTO_FLAG_GCM:
443 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
444 ICP_QAT_FW_LA_GCM_PROTO);
446 case QAT_CRYPTO_PROTO_FLAG_SNOW3G:
447 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
448 ICP_QAT_FW_LA_SNOW_3G_PROTO);
450 case QAT_CRYPTO_PROTO_FLAG_ZUC:
451 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(header->serv_specif_flags,
452 ICP_QAT_FW_LA_ZUC_3G_PROTO);
456 ICP_QAT_FW_LA_UPDATE_STATE_SET(header->serv_specif_flags,
457 ICP_QAT_FW_LA_NO_UPDATE_STATE);
458 ICP_QAT_FW_LA_DIGEST_IN_BUFFER_SET(header->serv_specif_flags,
459 ICP_QAT_FW_LA_NO_DIGEST_IN_BUFFER);
463 * Snow3G and ZUC should never use this function
464 * and set its protocol flag in both cipher and auth part of content
465 * descriptor building function
467 static enum qat_crypto_proto_flag
468 qat_get_crypto_proto_flag(uint16_t flags)
470 int proto = ICP_QAT_FW_LA_PROTO_GET(flags);
471 enum qat_crypto_proto_flag qat_proto_flag =
472 QAT_CRYPTO_PROTO_FLAG_NONE;
475 case ICP_QAT_FW_LA_GCM_PROTO:
476 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
478 case ICP_QAT_FW_LA_CCM_PROTO:
479 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
483 return qat_proto_flag;
486 int qat_alg_aead_session_create_content_desc_cipher(struct qat_session *cdesc,
488 uint32_t cipherkeylen)
490 struct icp_qat_hw_cipher_algo_blk *cipher;
491 struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
492 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
493 struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
494 void *ptr = &req_tmpl->cd_ctrl;
495 struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
496 struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
497 enum icp_qat_hw_cipher_convert key_convert;
498 enum qat_crypto_proto_flag qat_proto_flag =
499 QAT_CRYPTO_PROTO_FLAG_NONE;
500 uint32_t total_key_size;
501 uint16_t cipher_offset, cd_size;
502 uint32_t wordIndex = 0;
503 uint32_t *temp_key = NULL;
504 PMD_INIT_FUNC_TRACE();
506 if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER) {
507 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
508 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
509 ICP_QAT_FW_SLICE_CIPHER);
510 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
511 ICP_QAT_FW_SLICE_DRAM_WR);
512 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
513 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
514 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
515 ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
516 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
517 } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
518 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
519 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
520 ICP_QAT_FW_SLICE_CIPHER);
521 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
522 ICP_QAT_FW_SLICE_AUTH);
523 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
524 ICP_QAT_FW_SLICE_AUTH);
525 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
526 ICP_QAT_FW_SLICE_DRAM_WR);
527 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
528 } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
529 PMD_DRV_LOG(ERR, "Invalid param, must be a cipher command.");
533 if (cdesc->qat_mode == ICP_QAT_HW_CIPHER_CTR_MODE) {
535 * CTR Streaming ciphers are a special case. Decrypt = encrypt
536 * Overriding default values previously set
538 cdesc->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
539 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
540 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2
541 || cdesc->qat_cipher_alg ==
542 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)
543 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
544 else if (cdesc->qat_dir == ICP_QAT_HW_CIPHER_ENCRYPT)
545 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
547 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
549 if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
550 total_key_size = ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ +
551 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
552 cipher_cd_ctrl->cipher_state_sz =
553 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
554 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
556 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
557 total_key_size = ICP_QAT_HW_KASUMI_F8_KEY_SZ;
558 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_KASUMI_BLK_SZ >> 3;
559 cipher_cd_ctrl->cipher_padding_sz =
560 (2 * ICP_QAT_HW_KASUMI_BLK_SZ) >> 3;
561 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES) {
562 total_key_size = ICP_QAT_HW_3DES_KEY_SZ;
563 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_3DES_BLK_SZ >> 3;
565 qat_get_crypto_proto_flag(header->serv_specif_flags);
566 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_DES) {
567 total_key_size = ICP_QAT_HW_DES_KEY_SZ;
568 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_DES_BLK_SZ >> 3;
570 qat_get_crypto_proto_flag(header->serv_specif_flags);
571 } else if (cdesc->qat_cipher_alg ==
572 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
573 total_key_size = ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ +
574 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
575 cipher_cd_ctrl->cipher_state_sz =
576 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
577 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
578 cdesc->min_qat_dev_gen = QAT_GEN2;
580 total_key_size = cipherkeylen;
581 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_AES_BLK_SZ >> 3;
583 qat_get_crypto_proto_flag(header->serv_specif_flags);
585 cipher_cd_ctrl->cipher_key_sz = total_key_size >> 3;
586 cipher_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
587 cipher_cd_ctrl->cipher_cfg_offset = cipher_offset >> 3;
589 header->service_cmd_id = cdesc->qat_cmd;
590 qat_alg_init_common_hdr(header, qat_proto_flag);
592 cipher = (struct icp_qat_hw_cipher_algo_blk *)cdesc->cd_cur_ptr;
593 cipher->cipher_config.val =
594 ICP_QAT_HW_CIPHER_CONFIG_BUILD(cdesc->qat_mode,
595 cdesc->qat_cipher_alg, key_convert,
598 if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
599 temp_key = (uint32_t *)(cdesc->cd_cur_ptr +
600 sizeof(struct icp_qat_hw_cipher_config)
602 memcpy(cipher->key, cipherkey, cipherkeylen);
603 memcpy(temp_key, cipherkey, cipherkeylen);
605 /* XOR Key with KASUMI F8 key modifier at 4 bytes level */
606 for (wordIndex = 0; wordIndex < (cipherkeylen >> 2);
608 temp_key[wordIndex] ^= KASUMI_F8_KEY_MODIFIER_4_BYTES;
610 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
611 cipherkeylen + cipherkeylen;
613 memcpy(cipher->key, cipherkey, cipherkeylen);
614 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
618 if (total_key_size > cipherkeylen) {
619 uint32_t padding_size = total_key_size-cipherkeylen;
620 if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
621 && (cipherkeylen == QAT_3DES_KEY_SZ_OPT2))
622 /* K3 not provided so use K1 = K3*/
623 memcpy(cdesc->cd_cur_ptr, cipherkey, padding_size);
625 memset(cdesc->cd_cur_ptr, 0, padding_size);
626 cdesc->cd_cur_ptr += padding_size;
628 cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
629 cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
634 int qat_alg_aead_session_create_content_desc_auth(struct qat_session *cdesc,
639 unsigned int operation)
641 struct icp_qat_hw_auth_setup *hash;
642 struct icp_qat_hw_cipher_algo_blk *cipherconfig;
643 struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
644 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
645 struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
646 void *ptr = &req_tmpl->cd_ctrl;
647 struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
648 struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
649 struct icp_qat_fw_la_auth_req_params *auth_param =
650 (struct icp_qat_fw_la_auth_req_params *)
651 ((char *)&req_tmpl->serv_specif_rqpars +
652 sizeof(struct icp_qat_fw_la_cipher_req_params));
653 uint16_t state1_size = 0, state2_size = 0;
654 uint16_t hash_offset, cd_size;
655 uint32_t *aad_len = NULL;
656 uint32_t wordIndex = 0;
658 enum qat_crypto_proto_flag qat_proto_flag =
659 QAT_CRYPTO_PROTO_FLAG_NONE;
661 PMD_INIT_FUNC_TRACE();
663 if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_AUTH) {
664 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
665 ICP_QAT_FW_SLICE_AUTH);
666 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
667 ICP_QAT_FW_SLICE_DRAM_WR);
668 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
669 } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
670 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
671 ICP_QAT_FW_SLICE_AUTH);
672 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
673 ICP_QAT_FW_SLICE_CIPHER);
674 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
675 ICP_QAT_FW_SLICE_CIPHER);
676 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
677 ICP_QAT_FW_SLICE_DRAM_WR);
678 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
679 } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
680 PMD_DRV_LOG(ERR, "Invalid param, must be a hash command.");
684 if (operation == RTE_CRYPTO_AUTH_OP_VERIFY) {
685 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
686 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
687 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
688 ICP_QAT_FW_LA_CMP_AUTH_RES);
689 cdesc->auth_op = ICP_QAT_HW_AUTH_VERIFY;
691 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
692 ICP_QAT_FW_LA_RET_AUTH_RES);
693 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
694 ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
695 cdesc->auth_op = ICP_QAT_HW_AUTH_GENERATE;
699 * Setup the inner hash config
701 hash_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
702 hash = (struct icp_qat_hw_auth_setup *)cdesc->cd_cur_ptr;
703 hash->auth_config.reserved = 0;
704 hash->auth_config.config =
705 ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE1,
706 cdesc->qat_hash_alg, digestsize);
708 if (cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2
709 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_KASUMI_F9
710 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3)
711 hash->auth_counter.counter = 0;
713 hash->auth_counter.counter = rte_bswap32(
714 qat_hash_get_block_size(cdesc->qat_hash_alg));
716 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_auth_setup);
719 * cd_cur_ptr now points at the state1 information.
721 switch (cdesc->qat_hash_alg) {
722 case ICP_QAT_HW_AUTH_ALGO_SHA1:
723 if (qat_alg_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA1,
724 authkey, authkeylen, cdesc->cd_cur_ptr, &state1_size)) {
725 PMD_DRV_LOG(ERR, "(SHA)precompute failed");
728 state2_size = RTE_ALIGN_CEIL(ICP_QAT_HW_SHA1_STATE2_SZ, 8);
730 case ICP_QAT_HW_AUTH_ALGO_SHA224:
731 if (qat_alg_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA224,
732 authkey, authkeylen, cdesc->cd_cur_ptr, &state1_size)) {
733 PMD_DRV_LOG(ERR, "(SHA)precompute failed");
736 state2_size = ICP_QAT_HW_SHA224_STATE2_SZ;
738 case ICP_QAT_HW_AUTH_ALGO_SHA256:
739 if (qat_alg_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA256,
740 authkey, authkeylen, cdesc->cd_cur_ptr, &state1_size)) {
741 PMD_DRV_LOG(ERR, "(SHA)precompute failed");
744 state2_size = ICP_QAT_HW_SHA256_STATE2_SZ;
746 case ICP_QAT_HW_AUTH_ALGO_SHA384:
747 if (qat_alg_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA384,
748 authkey, authkeylen, cdesc->cd_cur_ptr, &state1_size)) {
749 PMD_DRV_LOG(ERR, "(SHA)precompute failed");
752 state2_size = ICP_QAT_HW_SHA384_STATE2_SZ;
754 case ICP_QAT_HW_AUTH_ALGO_SHA512:
755 if (qat_alg_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA512,
756 authkey, authkeylen, cdesc->cd_cur_ptr, &state1_size)) {
757 PMD_DRV_LOG(ERR, "(SHA)precompute failed");
760 state2_size = ICP_QAT_HW_SHA512_STATE2_SZ;
762 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
763 state1_size = ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
764 if (qat_alg_do_precomputes(ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC,
765 authkey, authkeylen, cdesc->cd_cur_ptr + state1_size,
767 PMD_DRV_LOG(ERR, "(XCBC)precompute failed");
771 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
772 case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
773 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
774 state1_size = ICP_QAT_HW_GALOIS_128_STATE1_SZ;
775 if (qat_alg_do_precomputes(cdesc->qat_hash_alg,
776 authkey, authkeylen, cdesc->cd_cur_ptr + state1_size,
778 PMD_DRV_LOG(ERR, "(GCM)precompute failed");
782 * Write (the length of AAD) into bytes 16-19 of state2
783 * in big-endian format. This field is 8 bytes
785 auth_param->u2.aad_sz =
786 RTE_ALIGN_CEIL(aad_length, 16);
787 auth_param->hash_state_sz = (auth_param->u2.aad_sz) >> 3;
789 aad_len = (uint32_t *)(cdesc->cd_cur_ptr +
790 ICP_QAT_HW_GALOIS_128_STATE1_SZ +
791 ICP_QAT_HW_GALOIS_H_SZ);
792 *aad_len = rte_bswap32(aad_length);
793 cdesc->aad_len = aad_length;
795 case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
796 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
797 state1_size = qat_hash_get_state1_size(
798 ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2);
799 state2_size = ICP_QAT_HW_SNOW_3G_UIA2_STATE2_SZ;
800 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
802 cipherconfig = (struct icp_qat_hw_cipher_algo_blk *)
803 (cdesc->cd_cur_ptr + state1_size + state2_size);
804 cipherconfig->cipher_config.val =
805 ICP_QAT_HW_CIPHER_CONFIG_BUILD(ICP_QAT_HW_CIPHER_ECB_MODE,
806 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2,
807 ICP_QAT_HW_CIPHER_KEY_CONVERT,
808 ICP_QAT_HW_CIPHER_ENCRYPT);
809 memcpy(cipherconfig->key, authkey, authkeylen);
810 memset(cipherconfig->key + authkeylen,
811 0, ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ);
812 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
813 authkeylen + ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
814 auth_param->hash_state_sz = ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
816 case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
817 hash->auth_config.config =
818 ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE0,
819 cdesc->qat_hash_alg, digestsize);
820 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
821 state1_size = qat_hash_get_state1_size(
822 ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3);
823 state2_size = ICP_QAT_HW_ZUC_3G_EIA3_STATE2_SZ;
824 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size
825 + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ);
827 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
828 cdesc->cd_cur_ptr += state1_size + state2_size
829 + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
830 auth_param->hash_state_sz = ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
831 cdesc->min_qat_dev_gen = QAT_GEN2;
834 case ICP_QAT_HW_AUTH_ALGO_MD5:
835 if (qat_alg_do_precomputes(ICP_QAT_HW_AUTH_ALGO_MD5,
836 authkey, authkeylen, cdesc->cd_cur_ptr,
838 PMD_DRV_LOG(ERR, "(MD5)precompute failed");
841 state2_size = ICP_QAT_HW_MD5_STATE2_SZ;
843 case ICP_QAT_HW_AUTH_ALGO_NULL:
844 state1_size = qat_hash_get_state1_size(
845 ICP_QAT_HW_AUTH_ALGO_NULL);
846 state2_size = ICP_QAT_HW_NULL_STATE2_SZ;
848 case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
849 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
850 state1_size = qat_hash_get_state1_size(
851 ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC);
852 state2_size = ICP_QAT_HW_AES_CBC_MAC_KEY_SZ +
853 ICP_QAT_HW_AES_CCM_CBC_E_CTR0_SZ;
855 if (aad_length > 0) {
856 aad_length += ICP_QAT_HW_CCM_AAD_B0_LEN +
857 ICP_QAT_HW_CCM_AAD_LEN_INFO;
858 auth_param->u2.aad_sz =
859 RTE_ALIGN_CEIL(aad_length,
860 ICP_QAT_HW_CCM_AAD_ALIGNMENT);
862 auth_param->u2.aad_sz = ICP_QAT_HW_CCM_AAD_B0_LEN;
865 cdesc->aad_len = aad_length;
866 hash->auth_counter.counter = 0;
868 hash_cd_ctrl->outer_prefix_sz = digestsize;
869 auth_param->hash_state_sz = digestsize;
871 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
873 case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
874 state1_size = qat_hash_get_state1_size(
875 ICP_QAT_HW_AUTH_ALGO_KASUMI_F9);
876 state2_size = ICP_QAT_HW_KASUMI_F9_STATE2_SZ;
877 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
878 pTempKey = (uint32_t *)(cdesc->cd_cur_ptr + state1_size
881 * The Inner Hash Initial State2 block must contain IK
882 * (Initialisation Key), followed by IK XOR-ed with KM
883 * (Key Modifier): IK||(IK^KM).
885 /* write the auth key */
886 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
887 /* initialise temp key with auth key */
888 memcpy(pTempKey, authkey, authkeylen);
889 /* XOR Key with KASUMI F9 key modifier at 4 bytes level */
890 for (wordIndex = 0; wordIndex < (authkeylen >> 2); wordIndex++)
891 pTempKey[wordIndex] ^= KASUMI_F9_KEY_MODIFIER_4_BYTES;
894 PMD_DRV_LOG(ERR, "Invalid HASH alg %u", cdesc->qat_hash_alg);
898 /* Request template setup */
899 qat_alg_init_common_hdr(header, qat_proto_flag);
900 header->service_cmd_id = cdesc->qat_cmd;
902 /* Auth CD config setup */
903 hash_cd_ctrl->hash_cfg_offset = hash_offset >> 3;
904 hash_cd_ctrl->hash_flags = ICP_QAT_FW_AUTH_HDR_FLAG_NO_NESTED;
905 hash_cd_ctrl->inner_res_sz = digestsize;
906 hash_cd_ctrl->final_sz = digestsize;
907 hash_cd_ctrl->inner_state1_sz = state1_size;
908 auth_param->auth_res_sz = digestsize;
910 hash_cd_ctrl->inner_state2_sz = state2_size;
911 hash_cd_ctrl->inner_state2_offset = hash_cd_ctrl->hash_cfg_offset +
912 ((sizeof(struct icp_qat_hw_auth_setup) +
913 RTE_ALIGN_CEIL(hash_cd_ctrl->inner_state1_sz, 8))
916 cdesc->cd_cur_ptr += state1_size + state2_size;
917 cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
919 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
920 cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
925 int qat_alg_validate_aes_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
928 case ICP_QAT_HW_AES_128_KEY_SZ:
929 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
931 case ICP_QAT_HW_AES_192_KEY_SZ:
932 *alg = ICP_QAT_HW_CIPHER_ALGO_AES192;
934 case ICP_QAT_HW_AES_256_KEY_SZ:
935 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
943 int qat_alg_validate_aes_docsisbpi_key(int key_len,
944 enum icp_qat_hw_cipher_algo *alg)
947 case ICP_QAT_HW_AES_128_KEY_SZ:
948 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
956 int qat_alg_validate_snow3g_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
959 case ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ:
960 *alg = ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2;
968 int qat_alg_validate_kasumi_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
971 case ICP_QAT_HW_KASUMI_KEY_SZ:
972 *alg = ICP_QAT_HW_CIPHER_ALGO_KASUMI;
980 int qat_alg_validate_des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
983 case ICP_QAT_HW_DES_KEY_SZ:
984 *alg = ICP_QAT_HW_CIPHER_ALGO_DES;
992 int qat_alg_validate_3des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
995 case QAT_3DES_KEY_SZ_OPT1:
996 case QAT_3DES_KEY_SZ_OPT2:
997 *alg = ICP_QAT_HW_CIPHER_ALGO_3DES;
1005 int qat_alg_validate_zuc_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1008 case ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ:
1009 *alg = ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3;