crypto/qat: use SPDX license
[dpdk.git] / drivers / crypto / qat / qat_adf / qat_algs_build_desc.c
1 /* SPDX-License-Identifier: (BSD-3-Clause OR GPL-2.0)
2  * Copyright(c) 2015-2018 Intel Corporation
3  */
4 #include <rte_memcpy.h>
5 #include <rte_common.h>
6 #include <rte_spinlock.h>
7 #include <rte_byteorder.h>
8 #include <rte_log.h>
9 #include <rte_malloc.h>
10 #include <rte_crypto_sym.h>
11
12 #include "../qat_logs.h"
13
14 #include <openssl/sha.h>        /* Needed to calculate pre-compute values */
15 #include <openssl/aes.h>        /* Needed to calculate pre-compute values */
16 #include <openssl/md5.h>        /* Needed to calculate pre-compute values */
17
18 #include "qat_algs.h"
19
20 /* returns block size in bytes per cipher algo */
21 int qat_cipher_get_block_size(enum icp_qat_hw_cipher_algo qat_cipher_alg)
22 {
23         switch (qat_cipher_alg) {
24         case ICP_QAT_HW_CIPHER_ALGO_DES:
25                 return ICP_QAT_HW_DES_BLK_SZ;
26         case ICP_QAT_HW_CIPHER_ALGO_3DES:
27                 return ICP_QAT_HW_3DES_BLK_SZ;
28         case ICP_QAT_HW_CIPHER_ALGO_AES128:
29         case ICP_QAT_HW_CIPHER_ALGO_AES192:
30         case ICP_QAT_HW_CIPHER_ALGO_AES256:
31                 return ICP_QAT_HW_AES_BLK_SZ;
32         default:
33                 PMD_DRV_LOG(ERR, "invalid block cipher alg %u", qat_cipher_alg);
34                 return -EFAULT;
35         };
36         return -EFAULT;
37 }
38
39 /*
40  * Returns size in bytes per hash algo for state1 size field in cd_ctrl
41  * This is digest size rounded up to nearest quadword
42  */
43 static int qat_hash_get_state1_size(enum icp_qat_hw_auth_algo qat_hash_alg)
44 {
45         switch (qat_hash_alg) {
46         case ICP_QAT_HW_AUTH_ALGO_SHA1:
47                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA1_STATE1_SZ,
48                                                 QAT_HW_DEFAULT_ALIGNMENT);
49         case ICP_QAT_HW_AUTH_ALGO_SHA224:
50                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA224_STATE1_SZ,
51                                                 QAT_HW_DEFAULT_ALIGNMENT);
52         case ICP_QAT_HW_AUTH_ALGO_SHA256:
53                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA256_STATE1_SZ,
54                                                 QAT_HW_DEFAULT_ALIGNMENT);
55         case ICP_QAT_HW_AUTH_ALGO_SHA384:
56                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA384_STATE1_SZ,
57                                                 QAT_HW_DEFAULT_ALIGNMENT);
58         case ICP_QAT_HW_AUTH_ALGO_SHA512:
59                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
60                                                 QAT_HW_DEFAULT_ALIGNMENT);
61         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
62                 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ,
63                                                 QAT_HW_DEFAULT_ALIGNMENT);
64         case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
65         case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
66                 return QAT_HW_ROUND_UP(ICP_QAT_HW_GALOIS_128_STATE1_SZ,
67                                                 QAT_HW_DEFAULT_ALIGNMENT);
68         case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
69                 return QAT_HW_ROUND_UP(ICP_QAT_HW_ZUC_3G_EIA3_STATE1_SZ,
70                                                 QAT_HW_DEFAULT_ALIGNMENT);
71         case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
72                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SNOW_3G_UIA2_STATE1_SZ,
73                                                 QAT_HW_DEFAULT_ALIGNMENT);
74         case ICP_QAT_HW_AUTH_ALGO_MD5:
75                 return QAT_HW_ROUND_UP(ICP_QAT_HW_MD5_STATE1_SZ,
76                                                 QAT_HW_DEFAULT_ALIGNMENT);
77         case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
78                 return QAT_HW_ROUND_UP(ICP_QAT_HW_KASUMI_F9_STATE1_SZ,
79                                                 QAT_HW_DEFAULT_ALIGNMENT);
80         case ICP_QAT_HW_AUTH_ALGO_NULL:
81                 return QAT_HW_ROUND_UP(ICP_QAT_HW_NULL_STATE1_SZ,
82                                                 QAT_HW_DEFAULT_ALIGNMENT);
83         case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
84                 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_CBC_MAC_STATE1_SZ,
85                                                 QAT_HW_DEFAULT_ALIGNMENT);
86         case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
87                 /* return maximum state1 size in this case */
88                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
89                                                 QAT_HW_DEFAULT_ALIGNMENT);
90         default:
91                 PMD_DRV_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
92                 return -EFAULT;
93         };
94         return -EFAULT;
95 }
96
97 /* returns digest size in bytes  per hash algo */
98 static int qat_hash_get_digest_size(enum icp_qat_hw_auth_algo qat_hash_alg)
99 {
100         switch (qat_hash_alg) {
101         case ICP_QAT_HW_AUTH_ALGO_SHA1:
102                 return ICP_QAT_HW_SHA1_STATE1_SZ;
103         case ICP_QAT_HW_AUTH_ALGO_SHA224:
104                 return ICP_QAT_HW_SHA224_STATE1_SZ;
105         case ICP_QAT_HW_AUTH_ALGO_SHA256:
106                 return ICP_QAT_HW_SHA256_STATE1_SZ;
107         case ICP_QAT_HW_AUTH_ALGO_SHA384:
108                 return ICP_QAT_HW_SHA384_STATE1_SZ;
109         case ICP_QAT_HW_AUTH_ALGO_SHA512:
110                 return ICP_QAT_HW_SHA512_STATE1_SZ;
111         case ICP_QAT_HW_AUTH_ALGO_MD5:
112                 return ICP_QAT_HW_MD5_STATE1_SZ;
113         case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
114                 /* return maximum digest size in this case */
115                 return ICP_QAT_HW_SHA512_STATE1_SZ;
116         default:
117                 PMD_DRV_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
118                 return -EFAULT;
119         };
120         return -EFAULT;
121 }
122
123 /* returns block size in byes per hash algo */
124 static int qat_hash_get_block_size(enum icp_qat_hw_auth_algo qat_hash_alg)
125 {
126         switch (qat_hash_alg) {
127         case ICP_QAT_HW_AUTH_ALGO_SHA1:
128                 return SHA_CBLOCK;
129         case ICP_QAT_HW_AUTH_ALGO_SHA224:
130                 return SHA256_CBLOCK;
131         case ICP_QAT_HW_AUTH_ALGO_SHA256:
132                 return SHA256_CBLOCK;
133         case ICP_QAT_HW_AUTH_ALGO_SHA384:
134                 return SHA512_CBLOCK;
135         case ICP_QAT_HW_AUTH_ALGO_SHA512:
136                 return SHA512_CBLOCK;
137         case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
138                 return 16;
139         case ICP_QAT_HW_AUTH_ALGO_MD5:
140                 return MD5_CBLOCK;
141         case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
142                 /* return maximum block size in this case */
143                 return SHA512_CBLOCK;
144         default:
145                 PMD_DRV_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
146                 return -EFAULT;
147         };
148         return -EFAULT;
149 }
150
151 static int partial_hash_sha1(uint8_t *data_in, uint8_t *data_out)
152 {
153         SHA_CTX ctx;
154
155         if (!SHA1_Init(&ctx))
156                 return -EFAULT;
157         SHA1_Transform(&ctx, data_in);
158         rte_memcpy(data_out, &ctx, SHA_DIGEST_LENGTH);
159         return 0;
160 }
161
162 static int partial_hash_sha224(uint8_t *data_in, uint8_t *data_out)
163 {
164         SHA256_CTX ctx;
165
166         if (!SHA224_Init(&ctx))
167                 return -EFAULT;
168         SHA256_Transform(&ctx, data_in);
169         rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
170         return 0;
171 }
172
173 static int partial_hash_sha256(uint8_t *data_in, uint8_t *data_out)
174 {
175         SHA256_CTX ctx;
176
177         if (!SHA256_Init(&ctx))
178                 return -EFAULT;
179         SHA256_Transform(&ctx, data_in);
180         rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
181         return 0;
182 }
183
184 static int partial_hash_sha384(uint8_t *data_in, uint8_t *data_out)
185 {
186         SHA512_CTX ctx;
187
188         if (!SHA384_Init(&ctx))
189                 return -EFAULT;
190         SHA512_Transform(&ctx, data_in);
191         rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
192         return 0;
193 }
194
195 static int partial_hash_sha512(uint8_t *data_in, uint8_t *data_out)
196 {
197         SHA512_CTX ctx;
198
199         if (!SHA512_Init(&ctx))
200                 return -EFAULT;
201         SHA512_Transform(&ctx, data_in);
202         rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
203         return 0;
204 }
205
206 static int partial_hash_md5(uint8_t *data_in, uint8_t *data_out)
207 {
208         MD5_CTX ctx;
209
210         if (!MD5_Init(&ctx))
211                 return -EFAULT;
212         MD5_Transform(&ctx, data_in);
213         rte_memcpy(data_out, &ctx, MD5_DIGEST_LENGTH);
214
215         return 0;
216 }
217
218 static int partial_hash_compute(enum icp_qat_hw_auth_algo hash_alg,
219                         uint8_t *data_in,
220                         uint8_t *data_out)
221 {
222         int digest_size;
223         uint8_t digest[qat_hash_get_digest_size(
224                         ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
225         uint32_t *hash_state_out_be32;
226         uint64_t *hash_state_out_be64;
227         int i;
228
229         PMD_INIT_FUNC_TRACE();
230         digest_size = qat_hash_get_digest_size(hash_alg);
231         if (digest_size <= 0)
232                 return -EFAULT;
233
234         hash_state_out_be32 = (uint32_t *)data_out;
235         hash_state_out_be64 = (uint64_t *)data_out;
236
237         switch (hash_alg) {
238         case ICP_QAT_HW_AUTH_ALGO_SHA1:
239                 if (partial_hash_sha1(data_in, digest))
240                         return -EFAULT;
241                 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
242                         *hash_state_out_be32 =
243                                 rte_bswap32(*(((uint32_t *)digest)+i));
244                 break;
245         case ICP_QAT_HW_AUTH_ALGO_SHA224:
246                 if (partial_hash_sha224(data_in, digest))
247                         return -EFAULT;
248                 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
249                         *hash_state_out_be32 =
250                                 rte_bswap32(*(((uint32_t *)digest)+i));
251                 break;
252         case ICP_QAT_HW_AUTH_ALGO_SHA256:
253                 if (partial_hash_sha256(data_in, digest))
254                         return -EFAULT;
255                 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
256                         *hash_state_out_be32 =
257                                 rte_bswap32(*(((uint32_t *)digest)+i));
258                 break;
259         case ICP_QAT_HW_AUTH_ALGO_SHA384:
260                 if (partial_hash_sha384(data_in, digest))
261                         return -EFAULT;
262                 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
263                         *hash_state_out_be64 =
264                                 rte_bswap64(*(((uint64_t *)digest)+i));
265                 break;
266         case ICP_QAT_HW_AUTH_ALGO_SHA512:
267                 if (partial_hash_sha512(data_in, digest))
268                         return -EFAULT;
269                 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
270                         *hash_state_out_be64 =
271                                 rte_bswap64(*(((uint64_t *)digest)+i));
272                 break;
273         case ICP_QAT_HW_AUTH_ALGO_MD5:
274                 if (partial_hash_md5(data_in, data_out))
275                         return -EFAULT;
276                 break;
277         default:
278                 PMD_DRV_LOG(ERR, "invalid hash alg %u", hash_alg);
279                 return -EFAULT;
280         }
281
282         return 0;
283 }
284 #define HMAC_IPAD_VALUE 0x36
285 #define HMAC_OPAD_VALUE 0x5c
286 #define HASH_XCBC_PRECOMP_KEY_NUM 3
287
288 static int qat_alg_do_precomputes(enum icp_qat_hw_auth_algo hash_alg,
289                                 const uint8_t *auth_key,
290                                 uint16_t auth_keylen,
291                                 uint8_t *p_state_buf,
292                                 uint16_t *p_state_len)
293 {
294         int block_size;
295         uint8_t ipad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
296         uint8_t opad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
297         int i;
298
299         PMD_INIT_FUNC_TRACE();
300         if (hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC) {
301                 static uint8_t qat_aes_xcbc_key_seed[
302                                         ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ] = {
303                         0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
304                         0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
305                         0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
306                         0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
307                         0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
308                         0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
309                 };
310
311                 uint8_t *in = NULL;
312                 uint8_t *out = p_state_buf;
313                 int x;
314                 AES_KEY enc_key;
315
316                 in = rte_zmalloc("working mem for key",
317                                 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ, 16);
318                 if (in == NULL) {
319                         PMD_DRV_LOG(ERR, "Failed to alloc memory");
320                         return -ENOMEM;
321                 }
322
323                 rte_memcpy(in, qat_aes_xcbc_key_seed,
324                                 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
325                 for (x = 0; x < HASH_XCBC_PRECOMP_KEY_NUM; x++) {
326                         if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
327                                 &enc_key) != 0) {
328                                 rte_free(in -
329                                         (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ));
330                                 memset(out -
331                                         (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ),
332                                         0, ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
333                                 return -EFAULT;
334                         }
335                         AES_encrypt(in, out, &enc_key);
336                         in += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
337                         out += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
338                 }
339                 *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
340                 rte_free(in - x*ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ);
341                 return 0;
342         } else if ((hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_128) ||
343                 (hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_64)) {
344                 uint8_t *in = NULL;
345                 uint8_t *out = p_state_buf;
346                 AES_KEY enc_key;
347
348                 memset(p_state_buf, 0, ICP_QAT_HW_GALOIS_H_SZ +
349                                 ICP_QAT_HW_GALOIS_LEN_A_SZ +
350                                 ICP_QAT_HW_GALOIS_E_CTR0_SZ);
351                 in = rte_zmalloc("working mem for key",
352                                 ICP_QAT_HW_GALOIS_H_SZ, 16);
353                 if (in == NULL) {
354                         PMD_DRV_LOG(ERR, "Failed to alloc memory");
355                         return -ENOMEM;
356                 }
357
358                 memset(in, 0, ICP_QAT_HW_GALOIS_H_SZ);
359                 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
360                         &enc_key) != 0) {
361                         return -EFAULT;
362                 }
363                 AES_encrypt(in, out, &enc_key);
364                 *p_state_len = ICP_QAT_HW_GALOIS_H_SZ +
365                                 ICP_QAT_HW_GALOIS_LEN_A_SZ +
366                                 ICP_QAT_HW_GALOIS_E_CTR0_SZ;
367                 rte_free(in);
368                 return 0;
369         }
370
371         block_size = qat_hash_get_block_size(hash_alg);
372         if (block_size <= 0)
373                 return -EFAULT;
374         /* init ipad and opad from key and xor with fixed values */
375         memset(ipad, 0, block_size);
376         memset(opad, 0, block_size);
377
378         if (auth_keylen > (unsigned int)block_size) {
379                 PMD_DRV_LOG(ERR, "invalid keylen %u", auth_keylen);
380                 return -EFAULT;
381         }
382         rte_memcpy(ipad, auth_key, auth_keylen);
383         rte_memcpy(opad, auth_key, auth_keylen);
384
385         for (i = 0; i < block_size; i++) {
386                 uint8_t *ipad_ptr = ipad + i;
387                 uint8_t *opad_ptr = opad + i;
388                 *ipad_ptr ^= HMAC_IPAD_VALUE;
389                 *opad_ptr ^= HMAC_OPAD_VALUE;
390         }
391
392         /* do partial hash of ipad and copy to state1 */
393         if (partial_hash_compute(hash_alg, ipad, p_state_buf)) {
394                 memset(ipad, 0, block_size);
395                 memset(opad, 0, block_size);
396                 PMD_DRV_LOG(ERR, "ipad precompute failed");
397                 return -EFAULT;
398         }
399
400         /*
401          * State len is a multiple of 8, so may be larger than the digest.
402          * Put the partial hash of opad state_len bytes after state1
403          */
404         *p_state_len = qat_hash_get_state1_size(hash_alg);
405         if (partial_hash_compute(hash_alg, opad, p_state_buf + *p_state_len)) {
406                 memset(ipad, 0, block_size);
407                 memset(opad, 0, block_size);
408                 PMD_DRV_LOG(ERR, "opad precompute failed");
409                 return -EFAULT;
410         }
411
412         /*  don't leave data lying around */
413         memset(ipad, 0, block_size);
414         memset(opad, 0, block_size);
415         return 0;
416 }
417
418 void qat_alg_init_common_hdr(struct icp_qat_fw_comn_req_hdr *header,
419                 enum qat_crypto_proto_flag proto_flags)
420 {
421         PMD_INIT_FUNC_TRACE();
422         header->hdr_flags =
423                 ICP_QAT_FW_COMN_HDR_FLAGS_BUILD(ICP_QAT_FW_COMN_REQ_FLAG_SET);
424         header->service_type = ICP_QAT_FW_COMN_REQ_CPM_FW_LA;
425         header->comn_req_flags =
426                 ICP_QAT_FW_COMN_FLAGS_BUILD(QAT_COMN_CD_FLD_TYPE_64BIT_ADR,
427                                         QAT_COMN_PTR_TYPE_FLAT);
428         ICP_QAT_FW_LA_PARTIAL_SET(header->serv_specif_flags,
429                                   ICP_QAT_FW_LA_PARTIAL_NONE);
430         ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(header->serv_specif_flags,
431                                            ICP_QAT_FW_CIPH_IV_16BYTE_DATA);
432
433         switch (proto_flags)            {
434         case QAT_CRYPTO_PROTO_FLAG_NONE:
435                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
436                                         ICP_QAT_FW_LA_NO_PROTO);
437                 break;
438         case QAT_CRYPTO_PROTO_FLAG_CCM:
439                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
440                                         ICP_QAT_FW_LA_CCM_PROTO);
441                 break;
442         case QAT_CRYPTO_PROTO_FLAG_GCM:
443                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
444                                         ICP_QAT_FW_LA_GCM_PROTO);
445                 break;
446         case QAT_CRYPTO_PROTO_FLAG_SNOW3G:
447                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
448                                         ICP_QAT_FW_LA_SNOW_3G_PROTO);
449                 break;
450         case QAT_CRYPTO_PROTO_FLAG_ZUC:
451                 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(header->serv_specif_flags,
452                         ICP_QAT_FW_LA_ZUC_3G_PROTO);
453                 break;
454         }
455
456         ICP_QAT_FW_LA_UPDATE_STATE_SET(header->serv_specif_flags,
457                                            ICP_QAT_FW_LA_NO_UPDATE_STATE);
458         ICP_QAT_FW_LA_DIGEST_IN_BUFFER_SET(header->serv_specif_flags,
459                                         ICP_QAT_FW_LA_NO_DIGEST_IN_BUFFER);
460 }
461
462 /*
463  *      Snow3G and ZUC should never use this function
464  *      and set its protocol flag in both cipher and auth part of content
465  *      descriptor building function
466  */
467 static enum qat_crypto_proto_flag
468 qat_get_crypto_proto_flag(uint16_t flags)
469 {
470         int proto = ICP_QAT_FW_LA_PROTO_GET(flags);
471         enum qat_crypto_proto_flag qat_proto_flag =
472                         QAT_CRYPTO_PROTO_FLAG_NONE;
473
474         switch (proto) {
475         case ICP_QAT_FW_LA_GCM_PROTO:
476                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
477                 break;
478         case ICP_QAT_FW_LA_CCM_PROTO:
479                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
480                 break;
481         }
482
483         return qat_proto_flag;
484 }
485
486 int qat_alg_aead_session_create_content_desc_cipher(struct qat_session *cdesc,
487                                                 uint8_t *cipherkey,
488                                                 uint32_t cipherkeylen)
489 {
490         struct icp_qat_hw_cipher_algo_blk *cipher;
491         struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
492         struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
493         struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
494         void *ptr = &req_tmpl->cd_ctrl;
495         struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
496         struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
497         enum icp_qat_hw_cipher_convert key_convert;
498         enum qat_crypto_proto_flag qat_proto_flag =
499                 QAT_CRYPTO_PROTO_FLAG_NONE;
500         uint32_t total_key_size;
501         uint16_t cipher_offset, cd_size;
502         uint32_t wordIndex  = 0;
503         uint32_t *temp_key = NULL;
504         PMD_INIT_FUNC_TRACE();
505
506         if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER) {
507                 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
508                 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
509                                         ICP_QAT_FW_SLICE_CIPHER);
510                 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
511                                         ICP_QAT_FW_SLICE_DRAM_WR);
512                 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
513                                         ICP_QAT_FW_LA_NO_RET_AUTH_RES);
514                 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
515                                         ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
516                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
517         } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
518                 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
519                 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
520                                         ICP_QAT_FW_SLICE_CIPHER);
521                 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
522                                         ICP_QAT_FW_SLICE_AUTH);
523                 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
524                                         ICP_QAT_FW_SLICE_AUTH);
525                 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
526                                         ICP_QAT_FW_SLICE_DRAM_WR);
527                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
528         } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
529                 PMD_DRV_LOG(ERR, "Invalid param, must be a cipher command.");
530                 return -EFAULT;
531         }
532
533         if (cdesc->qat_mode == ICP_QAT_HW_CIPHER_CTR_MODE) {
534                 /*
535                  * CTR Streaming ciphers are a special case. Decrypt = encrypt
536                  * Overriding default values previously set
537                  */
538                 cdesc->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
539                 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
540         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2
541                 || cdesc->qat_cipher_alg ==
542                         ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)
543                 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
544         else if (cdesc->qat_dir == ICP_QAT_HW_CIPHER_ENCRYPT)
545                 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
546         else
547                 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
548
549         if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
550                 total_key_size = ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ +
551                         ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
552                 cipher_cd_ctrl->cipher_state_sz =
553                         ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
554                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
555
556         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
557                 total_key_size = ICP_QAT_HW_KASUMI_F8_KEY_SZ;
558                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_KASUMI_BLK_SZ >> 3;
559                 cipher_cd_ctrl->cipher_padding_sz =
560                                         (2 * ICP_QAT_HW_KASUMI_BLK_SZ) >> 3;
561         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES) {
562                 total_key_size = ICP_QAT_HW_3DES_KEY_SZ;
563                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_3DES_BLK_SZ >> 3;
564                 qat_proto_flag =
565                         qat_get_crypto_proto_flag(header->serv_specif_flags);
566         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_DES) {
567                 total_key_size = ICP_QAT_HW_DES_KEY_SZ;
568                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_DES_BLK_SZ >> 3;
569                 qat_proto_flag =
570                         qat_get_crypto_proto_flag(header->serv_specif_flags);
571         } else if (cdesc->qat_cipher_alg ==
572                 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
573                 total_key_size = ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ +
574                         ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
575                 cipher_cd_ctrl->cipher_state_sz =
576                         ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
577                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
578                 cdesc->min_qat_dev_gen = QAT_GEN2;
579         } else {
580                 total_key_size = cipherkeylen;
581                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_AES_BLK_SZ >> 3;
582                 qat_proto_flag =
583                         qat_get_crypto_proto_flag(header->serv_specif_flags);
584         }
585         cipher_cd_ctrl->cipher_key_sz = total_key_size >> 3;
586         cipher_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
587         cipher_cd_ctrl->cipher_cfg_offset = cipher_offset >> 3;
588
589         header->service_cmd_id = cdesc->qat_cmd;
590         qat_alg_init_common_hdr(header, qat_proto_flag);
591
592         cipher = (struct icp_qat_hw_cipher_algo_blk *)cdesc->cd_cur_ptr;
593         cipher->cipher_config.val =
594             ICP_QAT_HW_CIPHER_CONFIG_BUILD(cdesc->qat_mode,
595                                         cdesc->qat_cipher_alg, key_convert,
596                                         cdesc->qat_dir);
597
598         if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
599                 temp_key = (uint32_t *)(cdesc->cd_cur_ptr +
600                                         sizeof(struct icp_qat_hw_cipher_config)
601                                         + cipherkeylen);
602                 memcpy(cipher->key, cipherkey, cipherkeylen);
603                 memcpy(temp_key, cipherkey, cipherkeylen);
604
605                 /* XOR Key with KASUMI F8 key modifier at 4 bytes level */
606                 for (wordIndex = 0; wordIndex < (cipherkeylen >> 2);
607                                                                 wordIndex++)
608                         temp_key[wordIndex] ^= KASUMI_F8_KEY_MODIFIER_4_BYTES;
609
610                 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
611                                         cipherkeylen + cipherkeylen;
612         } else {
613                 memcpy(cipher->key, cipherkey, cipherkeylen);
614                 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
615                                         cipherkeylen;
616         }
617
618         if (total_key_size > cipherkeylen) {
619                 uint32_t padding_size =  total_key_size-cipherkeylen;
620                 if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
621                         && (cipherkeylen == QAT_3DES_KEY_SZ_OPT2))
622                         /* K3 not provided so use K1 = K3*/
623                         memcpy(cdesc->cd_cur_ptr, cipherkey, padding_size);
624                 else
625                         memset(cdesc->cd_cur_ptr, 0, padding_size);
626                 cdesc->cd_cur_ptr += padding_size;
627         }
628         cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
629         cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
630
631         return 0;
632 }
633
634 int qat_alg_aead_session_create_content_desc_auth(struct qat_session *cdesc,
635                                                 uint8_t *authkey,
636                                                 uint32_t authkeylen,
637                                                 uint32_t aad_length,
638                                                 uint32_t digestsize,
639                                                 unsigned int operation)
640 {
641         struct icp_qat_hw_auth_setup *hash;
642         struct icp_qat_hw_cipher_algo_blk *cipherconfig;
643         struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
644         struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
645         struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
646         void *ptr = &req_tmpl->cd_ctrl;
647         struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
648         struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
649         struct icp_qat_fw_la_auth_req_params *auth_param =
650                 (struct icp_qat_fw_la_auth_req_params *)
651                 ((char *)&req_tmpl->serv_specif_rqpars +
652                 sizeof(struct icp_qat_fw_la_cipher_req_params));
653         uint16_t state1_size = 0, state2_size = 0;
654         uint16_t hash_offset, cd_size;
655         uint32_t *aad_len = NULL;
656         uint32_t wordIndex  = 0;
657         uint32_t *pTempKey;
658         enum qat_crypto_proto_flag qat_proto_flag =
659                 QAT_CRYPTO_PROTO_FLAG_NONE;
660
661         PMD_INIT_FUNC_TRACE();
662
663         if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_AUTH) {
664                 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
665                                         ICP_QAT_FW_SLICE_AUTH);
666                 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
667                                         ICP_QAT_FW_SLICE_DRAM_WR);
668                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
669         } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
670                 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
671                                 ICP_QAT_FW_SLICE_AUTH);
672                 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
673                                 ICP_QAT_FW_SLICE_CIPHER);
674                 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
675                                 ICP_QAT_FW_SLICE_CIPHER);
676                 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
677                                 ICP_QAT_FW_SLICE_DRAM_WR);
678                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
679         } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
680                 PMD_DRV_LOG(ERR, "Invalid param, must be a hash command.");
681                 return -EFAULT;
682         }
683
684         if (operation == RTE_CRYPTO_AUTH_OP_VERIFY) {
685                 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
686                                 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
687                 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
688                                 ICP_QAT_FW_LA_CMP_AUTH_RES);
689                 cdesc->auth_op = ICP_QAT_HW_AUTH_VERIFY;
690         } else {
691                 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
692                                            ICP_QAT_FW_LA_RET_AUTH_RES);
693                 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
694                                            ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
695                 cdesc->auth_op = ICP_QAT_HW_AUTH_GENERATE;
696         }
697
698         /*
699          * Setup the inner hash config
700          */
701         hash_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
702         hash = (struct icp_qat_hw_auth_setup *)cdesc->cd_cur_ptr;
703         hash->auth_config.reserved = 0;
704         hash->auth_config.config =
705                         ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE1,
706                                 cdesc->qat_hash_alg, digestsize);
707
708         if (cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2
709                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_KASUMI_F9
710                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3)
711                 hash->auth_counter.counter = 0;
712         else
713                 hash->auth_counter.counter = rte_bswap32(
714                                 qat_hash_get_block_size(cdesc->qat_hash_alg));
715
716         cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_auth_setup);
717
718         /*
719          * cd_cur_ptr now points at the state1 information.
720          */
721         switch (cdesc->qat_hash_alg) {
722         case ICP_QAT_HW_AUTH_ALGO_SHA1:
723                 if (qat_alg_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA1,
724                         authkey, authkeylen, cdesc->cd_cur_ptr, &state1_size)) {
725                         PMD_DRV_LOG(ERR, "(SHA)precompute failed");
726                         return -EFAULT;
727                 }
728                 state2_size = RTE_ALIGN_CEIL(ICP_QAT_HW_SHA1_STATE2_SZ, 8);
729                 break;
730         case ICP_QAT_HW_AUTH_ALGO_SHA224:
731                 if (qat_alg_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA224,
732                         authkey, authkeylen, cdesc->cd_cur_ptr, &state1_size)) {
733                         PMD_DRV_LOG(ERR, "(SHA)precompute failed");
734                         return -EFAULT;
735                 }
736                 state2_size = ICP_QAT_HW_SHA224_STATE2_SZ;
737                 break;
738         case ICP_QAT_HW_AUTH_ALGO_SHA256:
739                 if (qat_alg_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA256,
740                         authkey, authkeylen, cdesc->cd_cur_ptr, &state1_size)) {
741                         PMD_DRV_LOG(ERR, "(SHA)precompute failed");
742                         return -EFAULT;
743                 }
744                 state2_size = ICP_QAT_HW_SHA256_STATE2_SZ;
745                 break;
746         case ICP_QAT_HW_AUTH_ALGO_SHA384:
747                 if (qat_alg_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA384,
748                         authkey, authkeylen, cdesc->cd_cur_ptr, &state1_size)) {
749                         PMD_DRV_LOG(ERR, "(SHA)precompute failed");
750                         return -EFAULT;
751                 }
752                 state2_size = ICP_QAT_HW_SHA384_STATE2_SZ;
753                 break;
754         case ICP_QAT_HW_AUTH_ALGO_SHA512:
755                 if (qat_alg_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA512,
756                         authkey, authkeylen, cdesc->cd_cur_ptr, &state1_size)) {
757                         PMD_DRV_LOG(ERR, "(SHA)precompute failed");
758                         return -EFAULT;
759                 }
760                 state2_size = ICP_QAT_HW_SHA512_STATE2_SZ;
761                 break;
762         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
763                 state1_size = ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
764                 if (qat_alg_do_precomputes(ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC,
765                         authkey, authkeylen, cdesc->cd_cur_ptr + state1_size,
766                         &state2_size)) {
767                         PMD_DRV_LOG(ERR, "(XCBC)precompute failed");
768                         return -EFAULT;
769                 }
770                 break;
771         case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
772         case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
773                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
774                 state1_size = ICP_QAT_HW_GALOIS_128_STATE1_SZ;
775                 if (qat_alg_do_precomputes(cdesc->qat_hash_alg,
776                         authkey, authkeylen, cdesc->cd_cur_ptr + state1_size,
777                         &state2_size)) {
778                         PMD_DRV_LOG(ERR, "(GCM)precompute failed");
779                         return -EFAULT;
780                 }
781                 /*
782                  * Write (the length of AAD) into bytes 16-19 of state2
783                  * in big-endian format. This field is 8 bytes
784                  */
785                 auth_param->u2.aad_sz =
786                                 RTE_ALIGN_CEIL(aad_length, 16);
787                 auth_param->hash_state_sz = (auth_param->u2.aad_sz) >> 3;
788
789                 aad_len = (uint32_t *)(cdesc->cd_cur_ptr +
790                                         ICP_QAT_HW_GALOIS_128_STATE1_SZ +
791                                         ICP_QAT_HW_GALOIS_H_SZ);
792                 *aad_len = rte_bswap32(aad_length);
793                 cdesc->aad_len = aad_length;
794                 break;
795         case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
796                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
797                 state1_size = qat_hash_get_state1_size(
798                                 ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2);
799                 state2_size = ICP_QAT_HW_SNOW_3G_UIA2_STATE2_SZ;
800                 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
801
802                 cipherconfig = (struct icp_qat_hw_cipher_algo_blk *)
803                                 (cdesc->cd_cur_ptr + state1_size + state2_size);
804                 cipherconfig->cipher_config.val =
805                 ICP_QAT_HW_CIPHER_CONFIG_BUILD(ICP_QAT_HW_CIPHER_ECB_MODE,
806                         ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2,
807                         ICP_QAT_HW_CIPHER_KEY_CONVERT,
808                         ICP_QAT_HW_CIPHER_ENCRYPT);
809                 memcpy(cipherconfig->key, authkey, authkeylen);
810                 memset(cipherconfig->key + authkeylen,
811                                 0, ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ);
812                 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
813                                 authkeylen + ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
814                 auth_param->hash_state_sz = ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
815                 break;
816         case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
817                 hash->auth_config.config =
818                         ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE0,
819                                 cdesc->qat_hash_alg, digestsize);
820                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
821                 state1_size = qat_hash_get_state1_size(
822                                 ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3);
823                 state2_size = ICP_QAT_HW_ZUC_3G_EIA3_STATE2_SZ;
824                 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size
825                         + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ);
826
827                 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
828                 cdesc->cd_cur_ptr += state1_size + state2_size
829                         + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
830                 auth_param->hash_state_sz = ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
831                 cdesc->min_qat_dev_gen = QAT_GEN2;
832
833                 break;
834         case ICP_QAT_HW_AUTH_ALGO_MD5:
835                 if (qat_alg_do_precomputes(ICP_QAT_HW_AUTH_ALGO_MD5,
836                         authkey, authkeylen, cdesc->cd_cur_ptr,
837                         &state1_size)) {
838                         PMD_DRV_LOG(ERR, "(MD5)precompute failed");
839                         return -EFAULT;
840                 }
841                 state2_size = ICP_QAT_HW_MD5_STATE2_SZ;
842                 break;
843         case ICP_QAT_HW_AUTH_ALGO_NULL:
844                 state1_size = qat_hash_get_state1_size(
845                                 ICP_QAT_HW_AUTH_ALGO_NULL);
846                 state2_size = ICP_QAT_HW_NULL_STATE2_SZ;
847                 break;
848         case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
849                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
850                 state1_size = qat_hash_get_state1_size(
851                                 ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC);
852                 state2_size = ICP_QAT_HW_AES_CBC_MAC_KEY_SZ +
853                                 ICP_QAT_HW_AES_CCM_CBC_E_CTR0_SZ;
854
855                 if (aad_length > 0) {
856                         aad_length += ICP_QAT_HW_CCM_AAD_B0_LEN +
857                                 ICP_QAT_HW_CCM_AAD_LEN_INFO;
858                         auth_param->u2.aad_sz =
859                                         RTE_ALIGN_CEIL(aad_length,
860                                         ICP_QAT_HW_CCM_AAD_ALIGNMENT);
861                 } else {
862                         auth_param->u2.aad_sz = ICP_QAT_HW_CCM_AAD_B0_LEN;
863                 }
864
865                 cdesc->aad_len = aad_length;
866                 hash->auth_counter.counter = 0;
867
868                 hash_cd_ctrl->outer_prefix_sz = digestsize;
869                 auth_param->hash_state_sz = digestsize;
870
871                 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
872                 break;
873         case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
874                 state1_size = qat_hash_get_state1_size(
875                                 ICP_QAT_HW_AUTH_ALGO_KASUMI_F9);
876                 state2_size = ICP_QAT_HW_KASUMI_F9_STATE2_SZ;
877                 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
878                 pTempKey = (uint32_t *)(cdesc->cd_cur_ptr + state1_size
879                                                         + authkeylen);
880                 /*
881                 * The Inner Hash Initial State2 block must contain IK
882                 * (Initialisation Key), followed by IK XOR-ed with KM
883                 * (Key Modifier): IK||(IK^KM).
884                 */
885                 /* write the auth key */
886                 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
887                 /* initialise temp key with auth key */
888                 memcpy(pTempKey, authkey, authkeylen);
889                 /* XOR Key with KASUMI F9 key modifier at 4 bytes level */
890                 for (wordIndex = 0; wordIndex < (authkeylen >> 2); wordIndex++)
891                         pTempKey[wordIndex] ^= KASUMI_F9_KEY_MODIFIER_4_BYTES;
892                 break;
893         default:
894                 PMD_DRV_LOG(ERR, "Invalid HASH alg %u", cdesc->qat_hash_alg);
895                 return -EFAULT;
896         }
897
898         /* Request template setup */
899         qat_alg_init_common_hdr(header, qat_proto_flag);
900         header->service_cmd_id = cdesc->qat_cmd;
901
902         /* Auth CD config setup */
903         hash_cd_ctrl->hash_cfg_offset = hash_offset >> 3;
904         hash_cd_ctrl->hash_flags = ICP_QAT_FW_AUTH_HDR_FLAG_NO_NESTED;
905         hash_cd_ctrl->inner_res_sz = digestsize;
906         hash_cd_ctrl->final_sz = digestsize;
907         hash_cd_ctrl->inner_state1_sz = state1_size;
908         auth_param->auth_res_sz = digestsize;
909
910         hash_cd_ctrl->inner_state2_sz  = state2_size;
911         hash_cd_ctrl->inner_state2_offset = hash_cd_ctrl->hash_cfg_offset +
912                         ((sizeof(struct icp_qat_hw_auth_setup) +
913                          RTE_ALIGN_CEIL(hash_cd_ctrl->inner_state1_sz, 8))
914                                         >> 3);
915
916         cdesc->cd_cur_ptr += state1_size + state2_size;
917         cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
918
919         cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
920         cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
921
922         return 0;
923 }
924
925 int qat_alg_validate_aes_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
926 {
927         switch (key_len) {
928         case ICP_QAT_HW_AES_128_KEY_SZ:
929                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
930                 break;
931         case ICP_QAT_HW_AES_192_KEY_SZ:
932                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES192;
933                 break;
934         case ICP_QAT_HW_AES_256_KEY_SZ:
935                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
936                 break;
937         default:
938                 return -EINVAL;
939         }
940         return 0;
941 }
942
943 int qat_alg_validate_aes_docsisbpi_key(int key_len,
944                 enum icp_qat_hw_cipher_algo *alg)
945 {
946         switch (key_len) {
947         case ICP_QAT_HW_AES_128_KEY_SZ:
948                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
949                 break;
950         default:
951                 return -EINVAL;
952         }
953         return 0;
954 }
955
956 int qat_alg_validate_snow3g_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
957 {
958         switch (key_len) {
959         case ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ:
960                 *alg = ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2;
961                 break;
962         default:
963                 return -EINVAL;
964         }
965         return 0;
966 }
967
968 int qat_alg_validate_kasumi_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
969 {
970         switch (key_len) {
971         case ICP_QAT_HW_KASUMI_KEY_SZ:
972                 *alg = ICP_QAT_HW_CIPHER_ALGO_KASUMI;
973                 break;
974         default:
975                 return -EINVAL;
976         }
977         return 0;
978 }
979
980 int qat_alg_validate_des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
981 {
982         switch (key_len) {
983         case ICP_QAT_HW_DES_KEY_SZ:
984                 *alg = ICP_QAT_HW_CIPHER_ALGO_DES;
985                 break;
986         default:
987                 return -EINVAL;
988         }
989         return 0;
990 }
991
992 int qat_alg_validate_3des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
993 {
994         switch (key_len) {
995         case QAT_3DES_KEY_SZ_OPT1:
996         case QAT_3DES_KEY_SZ_OPT2:
997                 *alg = ICP_QAT_HW_CIPHER_ALGO_3DES;
998                 break;
999         default:
1000                 return -EINVAL;
1001         }
1002         return 0;
1003 }
1004
1005 int qat_alg_validate_zuc_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1006 {
1007         switch (key_len) {
1008         case ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ:
1009                 *alg = ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3;
1010                 break;
1011         default:
1012                 return -EINVAL;
1013         }
1014         return 0;
1015 }