11b459ee825b19bb69e094499df4d16c86d40358
[dpdk.git] / drivers / crypto / qat / qat_sym_session.c
1 /* SPDX-License-Identifier: (BSD-3-Clause OR GPL-2.0)
2  * Copyright(c) 2015-2019 Intel Corporation
3  */
4
5 #include <openssl/sha.h>        /* Needed to calculate pre-compute values */
6 #include <openssl/aes.h>        /* Needed to calculate pre-compute values */
7 #include <openssl/md5.h>        /* Needed to calculate pre-compute values */
8 #include <openssl/evp.h>        /* Needed for bpi runt block processing */
9
10 #include <rte_memcpy.h>
11 #include <rte_common.h>
12 #include <rte_spinlock.h>
13 #include <rte_byteorder.h>
14 #include <rte_log.h>
15 #include <rte_malloc.h>
16 #include <rte_crypto_sym.h>
17
18 #include "qat_logs.h"
19 #include "qat_sym_session.h"
20 #include "qat_sym_pmd.h"
21
22 /* SHA1 - 20 bytes - Initialiser state can be found in FIPS stds 180-2 */
23 static const uint8_t sha1InitialState[] = {
24         0x67, 0x45, 0x23, 0x01, 0xef, 0xcd, 0xab, 0x89, 0x98, 0xba,
25         0xdc, 0xfe, 0x10, 0x32, 0x54, 0x76, 0xc3, 0xd2, 0xe1, 0xf0};
26
27 /* SHA 224 - 32 bytes - Initialiser state can be found in FIPS stds 180-2 */
28 static const uint8_t sha224InitialState[] = {
29         0xc1, 0x05, 0x9e, 0xd8, 0x36, 0x7c, 0xd5, 0x07, 0x30, 0x70, 0xdd,
30         0x17, 0xf7, 0x0e, 0x59, 0x39, 0xff, 0xc0, 0x0b, 0x31, 0x68, 0x58,
31         0x15, 0x11, 0x64, 0xf9, 0x8f, 0xa7, 0xbe, 0xfa, 0x4f, 0xa4};
32
33 /* SHA 256 - 32 bytes - Initialiser state can be found in FIPS stds 180-2 */
34 static const uint8_t sha256InitialState[] = {
35         0x6a, 0x09, 0xe6, 0x67, 0xbb, 0x67, 0xae, 0x85, 0x3c, 0x6e, 0xf3,
36         0x72, 0xa5, 0x4f, 0xf5, 0x3a, 0x51, 0x0e, 0x52, 0x7f, 0x9b, 0x05,
37         0x68, 0x8c, 0x1f, 0x83, 0xd9, 0xab, 0x5b, 0xe0, 0xcd, 0x19};
38
39 /* SHA 384 - 64 bytes - Initialiser state can be found in FIPS stds 180-2 */
40 static const uint8_t sha384InitialState[] = {
41         0xcb, 0xbb, 0x9d, 0x5d, 0xc1, 0x05, 0x9e, 0xd8, 0x62, 0x9a, 0x29,
42         0x2a, 0x36, 0x7c, 0xd5, 0x07, 0x91, 0x59, 0x01, 0x5a, 0x30, 0x70,
43         0xdd, 0x17, 0x15, 0x2f, 0xec, 0xd8, 0xf7, 0x0e, 0x59, 0x39, 0x67,
44         0x33, 0x26, 0x67, 0xff, 0xc0, 0x0b, 0x31, 0x8e, 0xb4, 0x4a, 0x87,
45         0x68, 0x58, 0x15, 0x11, 0xdb, 0x0c, 0x2e, 0x0d, 0x64, 0xf9, 0x8f,
46         0xa7, 0x47, 0xb5, 0x48, 0x1d, 0xbe, 0xfa, 0x4f, 0xa4};
47
48 /* SHA 512 - 64 bytes - Initialiser state can be found in FIPS stds 180-2 */
49 static const uint8_t sha512InitialState[] = {
50         0x6a, 0x09, 0xe6, 0x67, 0xf3, 0xbc, 0xc9, 0x08, 0xbb, 0x67, 0xae,
51         0x85, 0x84, 0xca, 0xa7, 0x3b, 0x3c, 0x6e, 0xf3, 0x72, 0xfe, 0x94,
52         0xf8, 0x2b, 0xa5, 0x4f, 0xf5, 0x3a, 0x5f, 0x1d, 0x36, 0xf1, 0x51,
53         0x0e, 0x52, 0x7f, 0xad, 0xe6, 0x82, 0xd1, 0x9b, 0x05, 0x68, 0x8c,
54         0x2b, 0x3e, 0x6c, 0x1f, 0x1f, 0x83, 0xd9, 0xab, 0xfb, 0x41, 0xbd,
55         0x6b, 0x5b, 0xe0, 0xcd, 0x19, 0x13, 0x7e, 0x21, 0x79};
56
57 /** Frees a context previously created
58  *  Depends on openssl libcrypto
59  */
60 static void
61 bpi_cipher_ctx_free(void *bpi_ctx)
62 {
63         if (bpi_ctx != NULL)
64                 EVP_CIPHER_CTX_free((EVP_CIPHER_CTX *)bpi_ctx);
65 }
66
67 /** Creates a context in either AES or DES in ECB mode
68  *  Depends on openssl libcrypto
69  */
70 static int
71 bpi_cipher_ctx_init(enum rte_crypto_cipher_algorithm cryptodev_algo,
72                 enum rte_crypto_cipher_operation direction __rte_unused,
73                 const uint8_t *key, uint16_t key_length, void **ctx)
74 {
75         const EVP_CIPHER *algo = NULL;
76         int ret;
77         *ctx = EVP_CIPHER_CTX_new();
78
79         if (*ctx == NULL) {
80                 ret = -ENOMEM;
81                 goto ctx_init_err;
82         }
83
84         if (cryptodev_algo == RTE_CRYPTO_CIPHER_DES_DOCSISBPI)
85                 algo = EVP_des_ecb();
86         else
87                 if (key_length == ICP_QAT_HW_AES_128_KEY_SZ)
88                         algo = EVP_aes_128_ecb();
89                 else
90                         algo = EVP_aes_256_ecb();
91
92         /* IV will be ECB encrypted whether direction is encrypt or decrypt*/
93         if (EVP_EncryptInit_ex(*ctx, algo, NULL, key, 0) != 1) {
94                 ret = -EINVAL;
95                 goto ctx_init_err;
96         }
97
98         return 0;
99
100 ctx_init_err:
101         if (*ctx != NULL)
102                 EVP_CIPHER_CTX_free(*ctx);
103         return ret;
104 }
105
106 static int
107 qat_is_cipher_alg_supported(enum rte_crypto_cipher_algorithm algo,
108                 struct qat_sym_dev_private *internals)
109 {
110         int i = 0;
111         const struct rte_cryptodev_capabilities *capability;
112
113         while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
114                         RTE_CRYPTO_OP_TYPE_UNDEFINED) {
115                 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
116                         continue;
117
118                 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_CIPHER)
119                         continue;
120
121                 if (capability->sym.cipher.algo == algo)
122                         return 1;
123         }
124         return 0;
125 }
126
127 static int
128 qat_is_auth_alg_supported(enum rte_crypto_auth_algorithm algo,
129                 struct qat_sym_dev_private *internals)
130 {
131         int i = 0;
132         const struct rte_cryptodev_capabilities *capability;
133
134         while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
135                         RTE_CRYPTO_OP_TYPE_UNDEFINED) {
136                 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
137                         continue;
138
139                 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_AUTH)
140                         continue;
141
142                 if (capability->sym.auth.algo == algo)
143                         return 1;
144         }
145         return 0;
146 }
147
148 void
149 qat_sym_session_clear(struct rte_cryptodev *dev,
150                 struct rte_cryptodev_sym_session *sess)
151 {
152         uint8_t index = dev->driver_id;
153         void *sess_priv = get_sym_session_private_data(sess, index);
154         struct qat_sym_session *s = (struct qat_sym_session *)sess_priv;
155
156         if (sess_priv) {
157                 if (s->bpi_ctx)
158                         bpi_cipher_ctx_free(s->bpi_ctx);
159                 memset(s, 0, qat_sym_session_get_private_size(dev));
160                 struct rte_mempool *sess_mp = rte_mempool_from_obj(sess_priv);
161
162                 set_sym_session_private_data(sess, index, NULL);
163                 rte_mempool_put(sess_mp, sess_priv);
164         }
165 }
166
167 static int
168 qat_get_cmd_id(const struct rte_crypto_sym_xform *xform)
169 {
170         /* Cipher Only */
171         if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER && xform->next == NULL)
172                 return ICP_QAT_FW_LA_CMD_CIPHER;
173
174         /* Authentication Only */
175         if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH && xform->next == NULL)
176                 return ICP_QAT_FW_LA_CMD_AUTH;
177
178         /* AEAD */
179         if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
180                 /* AES-GCM and AES-CCM works with different direction
181                  * GCM first encrypts and generate hash where AES-CCM
182                  * first generate hash and encrypts. Similar relation
183                  * applies to decryption.
184                  */
185                 if (xform->aead.op == RTE_CRYPTO_AEAD_OP_ENCRYPT)
186                         if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
187                                 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
188                         else
189                                 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
190                 else
191                         if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
192                                 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
193                         else
194                                 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
195         }
196
197         if (xform->next == NULL)
198                 return -1;
199
200         /* Cipher then Authenticate */
201         if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
202                         xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
203                 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
204
205         /* Authenticate then Cipher */
206         if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH &&
207                         xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
208                 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
209
210         return -1;
211 }
212
213 static struct rte_crypto_auth_xform *
214 qat_get_auth_xform(struct rte_crypto_sym_xform *xform)
215 {
216         do {
217                 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH)
218                         return &xform->auth;
219
220                 xform = xform->next;
221         } while (xform);
222
223         return NULL;
224 }
225
226 static struct rte_crypto_cipher_xform *
227 qat_get_cipher_xform(struct rte_crypto_sym_xform *xform)
228 {
229         do {
230                 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
231                         return &xform->cipher;
232
233                 xform = xform->next;
234         } while (xform);
235
236         return NULL;
237 }
238
239 int
240 qat_sym_session_configure_cipher(struct rte_cryptodev *dev,
241                 struct rte_crypto_sym_xform *xform,
242                 struct qat_sym_session *session)
243 {
244         struct qat_sym_dev_private *internals = dev->data->dev_private;
245         struct rte_crypto_cipher_xform *cipher_xform = NULL;
246         int ret;
247
248         /* Get cipher xform from crypto xform chain */
249         cipher_xform = qat_get_cipher_xform(xform);
250
251         session->cipher_iv.offset = cipher_xform->iv.offset;
252         session->cipher_iv.length = cipher_xform->iv.length;
253
254         switch (cipher_xform->algo) {
255         case RTE_CRYPTO_CIPHER_AES_CBC:
256                 if (qat_sym_validate_aes_key(cipher_xform->key.length,
257                                 &session->qat_cipher_alg) != 0) {
258                         QAT_LOG(ERR, "Invalid AES cipher key size");
259                         ret = -EINVAL;
260                         goto error_out;
261                 }
262                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
263                 break;
264         case RTE_CRYPTO_CIPHER_AES_CTR:
265                 if (qat_sym_validate_aes_key(cipher_xform->key.length,
266                                 &session->qat_cipher_alg) != 0) {
267                         QAT_LOG(ERR, "Invalid AES cipher key size");
268                         ret = -EINVAL;
269                         goto error_out;
270                 }
271                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
272                 break;
273         case RTE_CRYPTO_CIPHER_SNOW3G_UEA2:
274                 if (qat_sym_validate_snow3g_key(cipher_xform->key.length,
275                                         &session->qat_cipher_alg) != 0) {
276                         QAT_LOG(ERR, "Invalid SNOW 3G cipher key size");
277                         ret = -EINVAL;
278                         goto error_out;
279                 }
280                 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
281                 break;
282         case RTE_CRYPTO_CIPHER_NULL:
283                 session->qat_cipher_alg = ICP_QAT_HW_CIPHER_ALGO_NULL;
284                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
285                 break;
286         case RTE_CRYPTO_CIPHER_KASUMI_F8:
287                 if (qat_sym_validate_kasumi_key(cipher_xform->key.length,
288                                         &session->qat_cipher_alg) != 0) {
289                         QAT_LOG(ERR, "Invalid KASUMI cipher key size");
290                         ret = -EINVAL;
291                         goto error_out;
292                 }
293                 session->qat_mode = ICP_QAT_HW_CIPHER_F8_MODE;
294                 break;
295         case RTE_CRYPTO_CIPHER_3DES_CBC:
296                 if (qat_sym_validate_3des_key(cipher_xform->key.length,
297                                 &session->qat_cipher_alg) != 0) {
298                         QAT_LOG(ERR, "Invalid 3DES cipher key size");
299                         ret = -EINVAL;
300                         goto error_out;
301                 }
302                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
303                 break;
304         case RTE_CRYPTO_CIPHER_DES_CBC:
305                 if (qat_sym_validate_des_key(cipher_xform->key.length,
306                                 &session->qat_cipher_alg) != 0) {
307                         QAT_LOG(ERR, "Invalid DES cipher key size");
308                         ret = -EINVAL;
309                         goto error_out;
310                 }
311                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
312                 break;
313         case RTE_CRYPTO_CIPHER_3DES_CTR:
314                 if (qat_sym_validate_3des_key(cipher_xform->key.length,
315                                 &session->qat_cipher_alg) != 0) {
316                         QAT_LOG(ERR, "Invalid 3DES cipher key size");
317                         ret = -EINVAL;
318                         goto error_out;
319                 }
320                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
321                 break;
322         case RTE_CRYPTO_CIPHER_DES_DOCSISBPI:
323                 ret = bpi_cipher_ctx_init(
324                                         cipher_xform->algo,
325                                         cipher_xform->op,
326                                         cipher_xform->key.data,
327                                         cipher_xform->key.length,
328                                         &session->bpi_ctx);
329                 if (ret != 0) {
330                         QAT_LOG(ERR, "failed to create DES BPI ctx");
331                         goto error_out;
332                 }
333                 if (qat_sym_validate_des_key(cipher_xform->key.length,
334                                 &session->qat_cipher_alg) != 0) {
335                         QAT_LOG(ERR, "Invalid DES cipher key size");
336                         ret = -EINVAL;
337                         goto error_out;
338                 }
339                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
340                 break;
341         case RTE_CRYPTO_CIPHER_AES_DOCSISBPI:
342                 ret = bpi_cipher_ctx_init(
343                                         cipher_xform->algo,
344                                         cipher_xform->op,
345                                         cipher_xform->key.data,
346                                         cipher_xform->key.length,
347                                         &session->bpi_ctx);
348                 if (ret != 0) {
349                         QAT_LOG(ERR, "failed to create AES BPI ctx");
350                         goto error_out;
351                 }
352                 if (qat_sym_validate_aes_docsisbpi_key(cipher_xform->key.length,
353                                 &session->qat_cipher_alg) != 0) {
354                         QAT_LOG(ERR, "Invalid AES DOCSISBPI key size");
355                         ret = -EINVAL;
356                         goto error_out;
357                 }
358                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
359                 break;
360         case RTE_CRYPTO_CIPHER_ZUC_EEA3:
361                 if (!qat_is_cipher_alg_supported(
362                         cipher_xform->algo, internals)) {
363                         QAT_LOG(ERR, "%s not supported on this device",
364                                 rte_crypto_cipher_algorithm_strings
365                                         [cipher_xform->algo]);
366                         ret = -ENOTSUP;
367                         goto error_out;
368                 }
369                 if (qat_sym_validate_zuc_key(cipher_xform->key.length,
370                                 &session->qat_cipher_alg) != 0) {
371                         QAT_LOG(ERR, "Invalid ZUC cipher key size");
372                         ret = -EINVAL;
373                         goto error_out;
374                 }
375                 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
376                 break;
377         case RTE_CRYPTO_CIPHER_AES_XTS:
378                 if ((cipher_xform->key.length/2) == ICP_QAT_HW_AES_192_KEY_SZ) {
379                         QAT_LOG(ERR, "AES-XTS-192 not supported");
380                         ret = -EINVAL;
381                         goto error_out;
382                 }
383                 if (qat_sym_validate_aes_key((cipher_xform->key.length/2),
384                                 &session->qat_cipher_alg) != 0) {
385                         QAT_LOG(ERR, "Invalid AES-XTS cipher key size");
386                         ret = -EINVAL;
387                         goto error_out;
388                 }
389                 session->qat_mode = ICP_QAT_HW_CIPHER_XTS_MODE;
390                 break;
391         case RTE_CRYPTO_CIPHER_3DES_ECB:
392         case RTE_CRYPTO_CIPHER_AES_ECB:
393         case RTE_CRYPTO_CIPHER_AES_F8:
394         case RTE_CRYPTO_CIPHER_ARC4:
395                 QAT_LOG(ERR, "Crypto QAT PMD: Unsupported Cipher alg %u",
396                                 cipher_xform->algo);
397                 ret = -ENOTSUP;
398                 goto error_out;
399         default:
400                 QAT_LOG(ERR, "Crypto: Undefined Cipher specified %u\n",
401                                 cipher_xform->algo);
402                 ret = -EINVAL;
403                 goto error_out;
404         }
405
406         if (cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
407                 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
408         else
409                 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
410
411         if (qat_sym_session_aead_create_cd_cipher(session,
412                                                 cipher_xform->key.data,
413                                                 cipher_xform->key.length)) {
414                 ret = -EINVAL;
415                 goto error_out;
416         }
417
418         return 0;
419
420 error_out:
421         if (session->bpi_ctx) {
422                 bpi_cipher_ctx_free(session->bpi_ctx);
423                 session->bpi_ctx = NULL;
424         }
425         return ret;
426 }
427
428 int
429 qat_sym_session_configure(struct rte_cryptodev *dev,
430                 struct rte_crypto_sym_xform *xform,
431                 struct rte_cryptodev_sym_session *sess,
432                 struct rte_mempool *mempool)
433 {
434         void *sess_private_data;
435         int ret;
436
437         if (rte_mempool_get(mempool, &sess_private_data)) {
438                 CDEV_LOG_ERR(
439                         "Couldn't get object from session mempool");
440                 return -ENOMEM;
441         }
442
443         ret = qat_sym_session_set_parameters(dev, xform, sess_private_data);
444         if (ret != 0) {
445                 QAT_LOG(ERR,
446                     "Crypto QAT PMD: failed to configure session parameters");
447
448                 /* Return session to mempool */
449                 rte_mempool_put(mempool, sess_private_data);
450                 return ret;
451         }
452
453         set_sym_session_private_data(sess, dev->driver_id,
454                 sess_private_data);
455
456         return 0;
457 }
458
459 static void
460 qat_sym_session_set_ext_hash_flags(struct qat_sym_session *session,
461                 uint8_t hash_flag)
462 {
463         struct icp_qat_fw_comn_req_hdr *header = &session->fw_req.comn_hdr;
464         struct icp_qat_fw_cipher_auth_cd_ctrl_hdr *cd_ctrl =
465                         (struct icp_qat_fw_cipher_auth_cd_ctrl_hdr *)
466                         session->fw_req.cd_ctrl.content_desc_ctrl_lw;
467
468         /* Set the Use Extended Protocol Flags bit in LW 1 */
469         QAT_FIELD_SET(header->comn_req_flags,
470                         QAT_COMN_EXT_FLAGS_USED,
471                         QAT_COMN_EXT_FLAGS_BITPOS,
472                         QAT_COMN_EXT_FLAGS_MASK);
473
474         /* Set Hash Flags in LW 28 */
475         cd_ctrl->hash_flags |= hash_flag;
476
477         /* Set proto flags in LW 1 */
478         switch (session->qat_cipher_alg) {
479         case ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2:
480                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
481                                 ICP_QAT_FW_LA_SNOW_3G_PROTO);
482                 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
483                                 header->serv_specif_flags, 0);
484                 break;
485         case ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3:
486                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
487                                 ICP_QAT_FW_LA_NO_PROTO);
488                 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
489                                 header->serv_specif_flags,
490                                 ICP_QAT_FW_LA_ZUC_3G_PROTO);
491                 break;
492         default:
493                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
494                                 ICP_QAT_FW_LA_NO_PROTO);
495                 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
496                                 header->serv_specif_flags, 0);
497                 break;
498         }
499 }
500
501 static void
502 qat_sym_session_handle_mixed(const struct rte_cryptodev *dev,
503                 struct qat_sym_session *session)
504 {
505         const struct qat_sym_dev_private *qat_private = dev->data->dev_private;
506         enum qat_device_gen min_dev_gen = (qat_private->internal_capabilities &
507                         QAT_SYM_CAP_MIXED_CRYPTO) ? QAT_GEN2 : QAT_GEN3;
508
509         if (session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3 &&
510                         session->qat_cipher_alg !=
511                         ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
512                 session->min_qat_dev_gen = min_dev_gen;
513                 qat_sym_session_set_ext_hash_flags(session,
514                         1 << ICP_QAT_FW_AUTH_HDR_FLAG_ZUC_EIA3_BITPOS);
515         } else if (session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2 &&
516                         session->qat_cipher_alg !=
517                         ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
518                 session->min_qat_dev_gen = min_dev_gen;
519                 qat_sym_session_set_ext_hash_flags(session,
520                         1 << ICP_QAT_FW_AUTH_HDR_FLAG_SNOW3G_UIA2_BITPOS);
521         } else if ((session->aes_cmac ||
522                         session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_NULL) &&
523                         (session->qat_cipher_alg ==
524                         ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2 ||
525                         session->qat_cipher_alg ==
526                         ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)) {
527                 session->min_qat_dev_gen = min_dev_gen;
528                 qat_sym_session_set_ext_hash_flags(session, 0);
529         }
530 }
531
532 int
533 qat_sym_session_set_parameters(struct rte_cryptodev *dev,
534                 struct rte_crypto_sym_xform *xform, void *session_private)
535 {
536         struct qat_sym_session *session = session_private;
537         int ret;
538         int qat_cmd_id;
539
540         /* Verify the session physical address is known */
541         rte_iova_t session_paddr = rte_mempool_virt2iova(session);
542         if (session_paddr == 0 || session_paddr == RTE_BAD_IOVA) {
543                 QAT_LOG(ERR,
544                         "Session physical address unknown. Bad memory pool.");
545                 return -EINVAL;
546         }
547
548         /* Set context descriptor physical address */
549         session->cd_paddr = session_paddr +
550                         offsetof(struct qat_sym_session, cd);
551
552         session->min_qat_dev_gen = QAT_GEN1;
553
554         /* Get requested QAT command id */
555         qat_cmd_id = qat_get_cmd_id(xform);
556         if (qat_cmd_id < 0 || qat_cmd_id >= ICP_QAT_FW_LA_CMD_DELIMITER) {
557                 QAT_LOG(ERR, "Unsupported xform chain requested");
558                 return -ENOTSUP;
559         }
560         session->qat_cmd = (enum icp_qat_fw_la_cmd_id)qat_cmd_id;
561         switch (session->qat_cmd) {
562         case ICP_QAT_FW_LA_CMD_CIPHER:
563                 ret = qat_sym_session_configure_cipher(dev, xform, session);
564                 if (ret < 0)
565                         return ret;
566                 break;
567         case ICP_QAT_FW_LA_CMD_AUTH:
568                 ret = qat_sym_session_configure_auth(dev, xform, session);
569                 if (ret < 0)
570                         return ret;
571                 break;
572         case ICP_QAT_FW_LA_CMD_CIPHER_HASH:
573                 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
574                         ret = qat_sym_session_configure_aead(dev, xform,
575                                         session);
576                         if (ret < 0)
577                                 return ret;
578                 } else {
579                         ret = qat_sym_session_configure_cipher(dev,
580                                         xform, session);
581                         if (ret < 0)
582                                 return ret;
583                         ret = qat_sym_session_configure_auth(dev,
584                                         xform, session);
585                         if (ret < 0)
586                                 return ret;
587                         /* Special handling of mixed hash+cipher algorithms */
588                         qat_sym_session_handle_mixed(dev, session);
589                 }
590                 break;
591         case ICP_QAT_FW_LA_CMD_HASH_CIPHER:
592                 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
593                         ret = qat_sym_session_configure_aead(dev, xform,
594                                         session);
595                         if (ret < 0)
596                                 return ret;
597                 } else {
598                         ret = qat_sym_session_configure_auth(dev,
599                                         xform, session);
600                         if (ret < 0)
601                                 return ret;
602                         ret = qat_sym_session_configure_cipher(dev,
603                                         xform, session);
604                         if (ret < 0)
605                                 return ret;
606                         /* Special handling of mixed hash+cipher algorithms */
607                         qat_sym_session_handle_mixed(dev, session);
608                 }
609                 break;
610         case ICP_QAT_FW_LA_CMD_TRNG_GET_RANDOM:
611         case ICP_QAT_FW_LA_CMD_TRNG_TEST:
612         case ICP_QAT_FW_LA_CMD_SSL3_KEY_DERIVE:
613         case ICP_QAT_FW_LA_CMD_TLS_V1_1_KEY_DERIVE:
614         case ICP_QAT_FW_LA_CMD_TLS_V1_2_KEY_DERIVE:
615         case ICP_QAT_FW_LA_CMD_MGF1:
616         case ICP_QAT_FW_LA_CMD_AUTH_PRE_COMP:
617         case ICP_QAT_FW_LA_CMD_CIPHER_PRE_COMP:
618         case ICP_QAT_FW_LA_CMD_DELIMITER:
619         QAT_LOG(ERR, "Unsupported Service %u",
620                 session->qat_cmd);
621                 return -ENOTSUP;
622         default:
623         QAT_LOG(ERR, "Unsupported Service %u",
624                 session->qat_cmd);
625                 return -ENOTSUP;
626         }
627
628         return 0;
629 }
630
631 static int
632 qat_sym_session_handle_single_pass(struct qat_sym_dev_private *internals,
633                 struct qat_sym_session *session,
634                 struct rte_crypto_aead_xform *aead_xform)
635 {
636         enum qat_device_gen qat_dev_gen = internals->qat_dev->qat_dev_gen;
637
638         if (qat_dev_gen == QAT_GEN3 &&
639                         aead_xform->iv.length == QAT_AES_GCM_SPC_IV_SIZE) {
640                 /* Use faster Single-Pass GCM */
641                 struct icp_qat_fw_la_cipher_req_params *cipher_param =
642                                 (void *) &session->fw_req.serv_specif_rqpars;
643
644                 session->is_single_pass = 1;
645                 session->min_qat_dev_gen = QAT_GEN3;
646                 session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER;
647                 session->qat_mode = ICP_QAT_HW_CIPHER_AEAD_MODE;
648                 session->cipher_iv.offset = aead_xform->iv.offset;
649                 session->cipher_iv.length = aead_xform->iv.length;
650                 if (qat_sym_session_aead_create_cd_cipher(session,
651                                 aead_xform->key.data, aead_xform->key.length))
652                         return -EINVAL;
653                 session->aad_len = aead_xform->aad_length;
654                 session->digest_length = aead_xform->digest_length;
655                 if (aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) {
656                         session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
657                         session->auth_op = ICP_QAT_HW_AUTH_GENERATE;
658                         ICP_QAT_FW_LA_RET_AUTH_SET(
659                                 session->fw_req.comn_hdr.serv_specif_flags,
660                                 ICP_QAT_FW_LA_RET_AUTH_RES);
661                 } else {
662                         session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
663                         session->auth_op = ICP_QAT_HW_AUTH_VERIFY;
664                         ICP_QAT_FW_LA_CMP_AUTH_SET(
665                                 session->fw_req.comn_hdr.serv_specif_flags,
666                                 ICP_QAT_FW_LA_CMP_AUTH_RES);
667                 }
668                 ICP_QAT_FW_LA_SINGLE_PASS_PROTO_FLAG_SET(
669                                 session->fw_req.comn_hdr.serv_specif_flags,
670                                 ICP_QAT_FW_LA_SINGLE_PASS_PROTO);
671                 ICP_QAT_FW_LA_PROTO_SET(
672                                 session->fw_req.comn_hdr.serv_specif_flags,
673                                 ICP_QAT_FW_LA_NO_PROTO);
674                 ICP_QAT_FW_LA_GCM_IV_LEN_FLAG_SET(
675                                 session->fw_req.comn_hdr.serv_specif_flags,
676                                 ICP_QAT_FW_LA_GCM_IV_LEN_12_OCTETS);
677                 session->fw_req.comn_hdr.service_cmd_id =
678                                 ICP_QAT_FW_LA_CMD_CIPHER;
679                 session->cd.cipher.cipher_config.val =
680                                 ICP_QAT_HW_CIPHER_CONFIG_BUILD(
681                                         ICP_QAT_HW_CIPHER_AEAD_MODE,
682                                         session->qat_cipher_alg,
683                                         ICP_QAT_HW_CIPHER_NO_CONVERT,
684                                         session->qat_dir);
685                 QAT_FIELD_SET(session->cd.cipher.cipher_config.val,
686                                 aead_xform->digest_length,
687                                 QAT_CIPHER_AEAD_HASH_CMP_LEN_BITPOS,
688                                 QAT_CIPHER_AEAD_HASH_CMP_LEN_MASK);
689                 session->cd.cipher.cipher_config.reserved =
690                                 ICP_QAT_HW_CIPHER_CONFIG_BUILD_UPPER(
691                                         aead_xform->aad_length);
692                 cipher_param->spc_aad_sz = aead_xform->aad_length;
693                 cipher_param->spc_auth_res_sz = aead_xform->digest_length;
694         }
695         return 0;
696 }
697
698 int
699 qat_sym_session_configure_auth(struct rte_cryptodev *dev,
700                                 struct rte_crypto_sym_xform *xform,
701                                 struct qat_sym_session *session)
702 {
703         struct rte_crypto_auth_xform *auth_xform = qat_get_auth_xform(xform);
704         struct qat_sym_dev_private *internals = dev->data->dev_private;
705         const uint8_t *key_data = auth_xform->key.data;
706         uint8_t key_length = auth_xform->key.length;
707         session->aes_cmac = 0;
708
709         session->auth_iv.offset = auth_xform->iv.offset;
710         session->auth_iv.length = auth_xform->iv.length;
711         session->auth_mode = ICP_QAT_HW_AUTH_MODE1;
712
713         switch (auth_xform->algo) {
714         case RTE_CRYPTO_AUTH_SHA1:
715                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA1;
716                 session->auth_mode = ICP_QAT_HW_AUTH_MODE0;
717                 break;
718         case RTE_CRYPTO_AUTH_SHA224:
719                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA224;
720                 session->auth_mode = ICP_QAT_HW_AUTH_MODE0;
721                 break;
722         case RTE_CRYPTO_AUTH_SHA256:
723                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA256;
724                 session->auth_mode = ICP_QAT_HW_AUTH_MODE0;
725                 break;
726         case RTE_CRYPTO_AUTH_SHA384:
727                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA384;
728                 session->auth_mode = ICP_QAT_HW_AUTH_MODE0;
729                 break;
730         case RTE_CRYPTO_AUTH_SHA512:
731                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA512;
732                 session->auth_mode = ICP_QAT_HW_AUTH_MODE0;
733                 break;
734         case RTE_CRYPTO_AUTH_SHA1_HMAC:
735                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA1;
736                 break;
737         case RTE_CRYPTO_AUTH_SHA224_HMAC:
738                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA224;
739                 break;
740         case RTE_CRYPTO_AUTH_SHA256_HMAC:
741                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA256;
742                 break;
743         case RTE_CRYPTO_AUTH_SHA384_HMAC:
744                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA384;
745                 break;
746         case RTE_CRYPTO_AUTH_SHA512_HMAC:
747                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA512;
748                 break;
749         case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
750                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
751                 break;
752         case RTE_CRYPTO_AUTH_AES_CMAC:
753                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
754                 session->aes_cmac = 1;
755                 break;
756         case RTE_CRYPTO_AUTH_AES_GMAC:
757                 if (qat_sym_validate_aes_key(auth_xform->key.length,
758                                 &session->qat_cipher_alg) != 0) {
759                         QAT_LOG(ERR, "Invalid AES key size");
760                         return -EINVAL;
761                 }
762                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
763                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
764                 if (session->auth_iv.length == 0)
765                         session->auth_iv.length = AES_GCM_J0_LEN;
766
767                 break;
768         case RTE_CRYPTO_AUTH_SNOW3G_UIA2:
769                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2;
770                 break;
771         case RTE_CRYPTO_AUTH_MD5_HMAC:
772                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_MD5;
773                 break;
774         case RTE_CRYPTO_AUTH_NULL:
775                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_NULL;
776                 break;
777         case RTE_CRYPTO_AUTH_KASUMI_F9:
778                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_KASUMI_F9;
779                 break;
780         case RTE_CRYPTO_AUTH_ZUC_EIA3:
781                 if (!qat_is_auth_alg_supported(auth_xform->algo, internals)) {
782                         QAT_LOG(ERR, "%s not supported on this device",
783                                 rte_crypto_auth_algorithm_strings
784                                 [auth_xform->algo]);
785                         return -ENOTSUP;
786                 }
787                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3;
788                 break;
789         case RTE_CRYPTO_AUTH_MD5:
790         case RTE_CRYPTO_AUTH_AES_CBC_MAC:
791                 QAT_LOG(ERR, "Crypto: Unsupported hash alg %u",
792                                 auth_xform->algo);
793                 return -ENOTSUP;
794         default:
795                 QAT_LOG(ERR, "Crypto: Undefined Hash algo %u specified",
796                                 auth_xform->algo);
797                 return -EINVAL;
798         }
799
800         if (auth_xform->algo == RTE_CRYPTO_AUTH_AES_GMAC) {
801                 if (auth_xform->op == RTE_CRYPTO_AUTH_OP_GENERATE) {
802                         session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER_HASH;
803                         session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
804                         /*
805                          * It needs to create cipher desc content first,
806                          * then authentication
807                          */
808
809                         if (qat_sym_session_aead_create_cd_cipher(session,
810                                                 auth_xform->key.data,
811                                                 auth_xform->key.length))
812                                 return -EINVAL;
813
814                         if (qat_sym_session_aead_create_cd_auth(session,
815                                                 key_data,
816                                                 key_length,
817                                                 0,
818                                                 auth_xform->digest_length,
819                                                 auth_xform->op))
820                                 return -EINVAL;
821                 } else {
822                         session->qat_cmd = ICP_QAT_FW_LA_CMD_HASH_CIPHER;
823                         session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
824                         /*
825                          * It needs to create authentication desc content first,
826                          * then cipher
827                          */
828
829                         if (qat_sym_session_aead_create_cd_auth(session,
830                                         key_data,
831                                         key_length,
832                                         0,
833                                         auth_xform->digest_length,
834                                         auth_xform->op))
835                                 return -EINVAL;
836
837                         if (qat_sym_session_aead_create_cd_cipher(session,
838                                                 auth_xform->key.data,
839                                                 auth_xform->key.length))
840                                 return -EINVAL;
841                 }
842                 /* Restore to authentication only only */
843                 session->qat_cmd = ICP_QAT_FW_LA_CMD_AUTH;
844         } else {
845                 if (qat_sym_session_aead_create_cd_auth(session,
846                                 key_data,
847                                 key_length,
848                                 0,
849                                 auth_xform->digest_length,
850                                 auth_xform->op))
851                         return -EINVAL;
852         }
853
854         session->digest_length = auth_xform->digest_length;
855         return 0;
856 }
857
858 int
859 qat_sym_session_configure_aead(struct rte_cryptodev *dev,
860                                 struct rte_crypto_sym_xform *xform,
861                                 struct qat_sym_session *session)
862 {
863         struct rte_crypto_aead_xform *aead_xform = &xform->aead;
864         enum rte_crypto_auth_operation crypto_operation;
865
866         /*
867          * Store AEAD IV parameters as cipher IV,
868          * to avoid unnecessary memory usage
869          */
870         session->cipher_iv.offset = xform->aead.iv.offset;
871         session->cipher_iv.length = xform->aead.iv.length;
872
873         session->auth_mode = ICP_QAT_HW_AUTH_MODE1;
874
875         switch (aead_xform->algo) {
876         case RTE_CRYPTO_AEAD_AES_GCM:
877                 if (qat_sym_validate_aes_key(aead_xform->key.length,
878                                 &session->qat_cipher_alg) != 0) {
879                         QAT_LOG(ERR, "Invalid AES key size");
880                         return -EINVAL;
881                 }
882                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
883                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
884                 if (session->cipher_iv.length == 0)
885                         session->cipher_iv.length = AES_GCM_J0_LEN;
886
887                 break;
888         case RTE_CRYPTO_AEAD_AES_CCM:
889                 if (qat_sym_validate_aes_key(aead_xform->key.length,
890                                 &session->qat_cipher_alg) != 0) {
891                         QAT_LOG(ERR, "Invalid AES key size");
892                         return -EINVAL;
893                 }
894                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
895                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC;
896                 break;
897         default:
898                 QAT_LOG(ERR, "Crypto: Undefined AEAD specified %u\n",
899                                 aead_xform->algo);
900                 return -EINVAL;
901         }
902
903         session->is_single_pass = 0;
904         if (aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) {
905                 /* Use faster Single-Pass GCM if possible */
906                 int res = qat_sym_session_handle_single_pass(
907                                 dev->data->dev_private, session, aead_xform);
908                 if (res < 0)
909                         return res;
910                 if (session->is_single_pass)
911                         return 0;
912         }
913
914         if ((aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT &&
915                         aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) ||
916                         (aead_xform->op == RTE_CRYPTO_AEAD_OP_DECRYPT &&
917                         aead_xform->algo == RTE_CRYPTO_AEAD_AES_CCM)) {
918                 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
919                 /*
920                  * It needs to create cipher desc content first,
921                  * then authentication
922                  */
923                 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
924                         RTE_CRYPTO_AUTH_OP_GENERATE : RTE_CRYPTO_AUTH_OP_VERIFY;
925
926                 if (qat_sym_session_aead_create_cd_cipher(session,
927                                         aead_xform->key.data,
928                                         aead_xform->key.length))
929                         return -EINVAL;
930
931                 if (qat_sym_session_aead_create_cd_auth(session,
932                                         aead_xform->key.data,
933                                         aead_xform->key.length,
934                                         aead_xform->aad_length,
935                                         aead_xform->digest_length,
936                                         crypto_operation))
937                         return -EINVAL;
938         } else {
939                 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
940                 /*
941                  * It needs to create authentication desc content first,
942                  * then cipher
943                  */
944
945                 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
946                         RTE_CRYPTO_AUTH_OP_VERIFY : RTE_CRYPTO_AUTH_OP_GENERATE;
947
948                 if (qat_sym_session_aead_create_cd_auth(session,
949                                         aead_xform->key.data,
950                                         aead_xform->key.length,
951                                         aead_xform->aad_length,
952                                         aead_xform->digest_length,
953                                         crypto_operation))
954                         return -EINVAL;
955
956                 if (qat_sym_session_aead_create_cd_cipher(session,
957                                         aead_xform->key.data,
958                                         aead_xform->key.length))
959                         return -EINVAL;
960         }
961
962         session->digest_length = aead_xform->digest_length;
963         return 0;
964 }
965
966 unsigned int qat_sym_session_get_private_size(
967                 struct rte_cryptodev *dev __rte_unused)
968 {
969         return RTE_ALIGN_CEIL(sizeof(struct qat_sym_session), 8);
970 }
971
972 /* returns block size in bytes per cipher algo */
973 int qat_cipher_get_block_size(enum icp_qat_hw_cipher_algo qat_cipher_alg)
974 {
975         switch (qat_cipher_alg) {
976         case ICP_QAT_HW_CIPHER_ALGO_DES:
977                 return ICP_QAT_HW_DES_BLK_SZ;
978         case ICP_QAT_HW_CIPHER_ALGO_3DES:
979                 return ICP_QAT_HW_3DES_BLK_SZ;
980         case ICP_QAT_HW_CIPHER_ALGO_AES128:
981         case ICP_QAT_HW_CIPHER_ALGO_AES192:
982         case ICP_QAT_HW_CIPHER_ALGO_AES256:
983                 return ICP_QAT_HW_AES_BLK_SZ;
984         default:
985                 QAT_LOG(ERR, "invalid block cipher alg %u", qat_cipher_alg);
986                 return -EFAULT;
987         };
988         return -EFAULT;
989 }
990
991 /*
992  * Returns size in bytes per hash algo for state1 size field in cd_ctrl
993  * This is digest size rounded up to nearest quadword
994  */
995 static int qat_hash_get_state1_size(enum icp_qat_hw_auth_algo qat_hash_alg)
996 {
997         switch (qat_hash_alg) {
998         case ICP_QAT_HW_AUTH_ALGO_SHA1:
999                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA1_STATE1_SZ,
1000                                                 QAT_HW_DEFAULT_ALIGNMENT);
1001         case ICP_QAT_HW_AUTH_ALGO_SHA224:
1002                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA224_STATE1_SZ,
1003                                                 QAT_HW_DEFAULT_ALIGNMENT);
1004         case ICP_QAT_HW_AUTH_ALGO_SHA256:
1005                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA256_STATE1_SZ,
1006                                                 QAT_HW_DEFAULT_ALIGNMENT);
1007         case ICP_QAT_HW_AUTH_ALGO_SHA384:
1008                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA384_STATE1_SZ,
1009                                                 QAT_HW_DEFAULT_ALIGNMENT);
1010         case ICP_QAT_HW_AUTH_ALGO_SHA512:
1011                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
1012                                                 QAT_HW_DEFAULT_ALIGNMENT);
1013         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1014                 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ,
1015                                                 QAT_HW_DEFAULT_ALIGNMENT);
1016         case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1017         case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
1018                 return QAT_HW_ROUND_UP(ICP_QAT_HW_GALOIS_128_STATE1_SZ,
1019                                                 QAT_HW_DEFAULT_ALIGNMENT);
1020         case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
1021                 return QAT_HW_ROUND_UP(ICP_QAT_HW_ZUC_3G_EIA3_STATE1_SZ,
1022                                                 QAT_HW_DEFAULT_ALIGNMENT);
1023         case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
1024                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SNOW_3G_UIA2_STATE1_SZ,
1025                                                 QAT_HW_DEFAULT_ALIGNMENT);
1026         case ICP_QAT_HW_AUTH_ALGO_MD5:
1027                 return QAT_HW_ROUND_UP(ICP_QAT_HW_MD5_STATE1_SZ,
1028                                                 QAT_HW_DEFAULT_ALIGNMENT);
1029         case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
1030                 return QAT_HW_ROUND_UP(ICP_QAT_HW_KASUMI_F9_STATE1_SZ,
1031                                                 QAT_HW_DEFAULT_ALIGNMENT);
1032         case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
1033                 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_CBC_MAC_STATE1_SZ,
1034                                                 QAT_HW_DEFAULT_ALIGNMENT);
1035         case ICP_QAT_HW_AUTH_ALGO_NULL:
1036                 return QAT_HW_ROUND_UP(ICP_QAT_HW_NULL_STATE1_SZ,
1037                                                 QAT_HW_DEFAULT_ALIGNMENT);
1038         case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
1039                 /* return maximum state1 size in this case */
1040                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
1041                                                 QAT_HW_DEFAULT_ALIGNMENT);
1042         default:
1043                 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
1044                 return -EFAULT;
1045         };
1046         return -EFAULT;
1047 }
1048
1049 /* returns digest size in bytes  per hash algo */
1050 static int qat_hash_get_digest_size(enum icp_qat_hw_auth_algo qat_hash_alg)
1051 {
1052         switch (qat_hash_alg) {
1053         case ICP_QAT_HW_AUTH_ALGO_SHA1:
1054                 return ICP_QAT_HW_SHA1_STATE1_SZ;
1055         case ICP_QAT_HW_AUTH_ALGO_SHA224:
1056                 return ICP_QAT_HW_SHA224_STATE1_SZ;
1057         case ICP_QAT_HW_AUTH_ALGO_SHA256:
1058                 return ICP_QAT_HW_SHA256_STATE1_SZ;
1059         case ICP_QAT_HW_AUTH_ALGO_SHA384:
1060                 return ICP_QAT_HW_SHA384_STATE1_SZ;
1061         case ICP_QAT_HW_AUTH_ALGO_SHA512:
1062                 return ICP_QAT_HW_SHA512_STATE1_SZ;
1063         case ICP_QAT_HW_AUTH_ALGO_MD5:
1064                 return ICP_QAT_HW_MD5_STATE1_SZ;
1065         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1066                 return ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1067         case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
1068                 /* return maximum digest size in this case */
1069                 return ICP_QAT_HW_SHA512_STATE1_SZ;
1070         default:
1071                 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
1072                 return -EFAULT;
1073         };
1074         return -EFAULT;
1075 }
1076
1077 /* returns block size in byes per hash algo */
1078 static int qat_hash_get_block_size(enum icp_qat_hw_auth_algo qat_hash_alg)
1079 {
1080         switch (qat_hash_alg) {
1081         case ICP_QAT_HW_AUTH_ALGO_SHA1:
1082                 return SHA_CBLOCK;
1083         case ICP_QAT_HW_AUTH_ALGO_SHA224:
1084                 return SHA256_CBLOCK;
1085         case ICP_QAT_HW_AUTH_ALGO_SHA256:
1086                 return SHA256_CBLOCK;
1087         case ICP_QAT_HW_AUTH_ALGO_SHA384:
1088                 return SHA512_CBLOCK;
1089         case ICP_QAT_HW_AUTH_ALGO_SHA512:
1090                 return SHA512_CBLOCK;
1091         case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1092                 return 16;
1093         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1094                 return ICP_QAT_HW_AES_BLK_SZ;
1095         case ICP_QAT_HW_AUTH_ALGO_MD5:
1096                 return MD5_CBLOCK;
1097         case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
1098                 /* return maximum block size in this case */
1099                 return SHA512_CBLOCK;
1100         default:
1101                 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
1102                 return -EFAULT;
1103         };
1104         return -EFAULT;
1105 }
1106
1107 static int partial_hash_sha1(uint8_t *data_in, uint8_t *data_out)
1108 {
1109         SHA_CTX ctx;
1110
1111         if (!SHA1_Init(&ctx))
1112                 return -EFAULT;
1113         SHA1_Transform(&ctx, data_in);
1114         rte_memcpy(data_out, &ctx, SHA_DIGEST_LENGTH);
1115         return 0;
1116 }
1117
1118 static int partial_hash_sha224(uint8_t *data_in, uint8_t *data_out)
1119 {
1120         SHA256_CTX ctx;
1121
1122         if (!SHA224_Init(&ctx))
1123                 return -EFAULT;
1124         SHA256_Transform(&ctx, data_in);
1125         rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
1126         return 0;
1127 }
1128
1129 static int partial_hash_sha256(uint8_t *data_in, uint8_t *data_out)
1130 {
1131         SHA256_CTX ctx;
1132
1133         if (!SHA256_Init(&ctx))
1134                 return -EFAULT;
1135         SHA256_Transform(&ctx, data_in);
1136         rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
1137         return 0;
1138 }
1139
1140 static int partial_hash_sha384(uint8_t *data_in, uint8_t *data_out)
1141 {
1142         SHA512_CTX ctx;
1143
1144         if (!SHA384_Init(&ctx))
1145                 return -EFAULT;
1146         SHA512_Transform(&ctx, data_in);
1147         rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
1148         return 0;
1149 }
1150
1151 static int partial_hash_sha512(uint8_t *data_in, uint8_t *data_out)
1152 {
1153         SHA512_CTX ctx;
1154
1155         if (!SHA512_Init(&ctx))
1156                 return -EFAULT;
1157         SHA512_Transform(&ctx, data_in);
1158         rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
1159         return 0;
1160 }
1161
1162 static int partial_hash_md5(uint8_t *data_in, uint8_t *data_out)
1163 {
1164         MD5_CTX ctx;
1165
1166         if (!MD5_Init(&ctx))
1167                 return -EFAULT;
1168         MD5_Transform(&ctx, data_in);
1169         rte_memcpy(data_out, &ctx, MD5_DIGEST_LENGTH);
1170
1171         return 0;
1172 }
1173
1174 static int partial_hash_compute(enum icp_qat_hw_auth_algo hash_alg,
1175                         uint8_t *data_in,
1176                         uint8_t *data_out)
1177 {
1178         int digest_size;
1179         uint8_t digest[qat_hash_get_digest_size(
1180                         ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1181         uint32_t *hash_state_out_be32;
1182         uint64_t *hash_state_out_be64;
1183         int i;
1184
1185         digest_size = qat_hash_get_digest_size(hash_alg);
1186         if (digest_size <= 0)
1187                 return -EFAULT;
1188
1189         hash_state_out_be32 = (uint32_t *)data_out;
1190         hash_state_out_be64 = (uint64_t *)data_out;
1191
1192         switch (hash_alg) {
1193         case ICP_QAT_HW_AUTH_ALGO_SHA1:
1194                 if (partial_hash_sha1(data_in, digest))
1195                         return -EFAULT;
1196                 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1197                         *hash_state_out_be32 =
1198                                 rte_bswap32(*(((uint32_t *)digest)+i));
1199                 break;
1200         case ICP_QAT_HW_AUTH_ALGO_SHA224:
1201                 if (partial_hash_sha224(data_in, digest))
1202                         return -EFAULT;
1203                 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1204                         *hash_state_out_be32 =
1205                                 rte_bswap32(*(((uint32_t *)digest)+i));
1206                 break;
1207         case ICP_QAT_HW_AUTH_ALGO_SHA256:
1208                 if (partial_hash_sha256(data_in, digest))
1209                         return -EFAULT;
1210                 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1211                         *hash_state_out_be32 =
1212                                 rte_bswap32(*(((uint32_t *)digest)+i));
1213                 break;
1214         case ICP_QAT_HW_AUTH_ALGO_SHA384:
1215                 if (partial_hash_sha384(data_in, digest))
1216                         return -EFAULT;
1217                 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
1218                         *hash_state_out_be64 =
1219                                 rte_bswap64(*(((uint64_t *)digest)+i));
1220                 break;
1221         case ICP_QAT_HW_AUTH_ALGO_SHA512:
1222                 if (partial_hash_sha512(data_in, digest))
1223                         return -EFAULT;
1224                 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
1225                         *hash_state_out_be64 =
1226                                 rte_bswap64(*(((uint64_t *)digest)+i));
1227                 break;
1228         case ICP_QAT_HW_AUTH_ALGO_MD5:
1229                 if (partial_hash_md5(data_in, data_out))
1230                         return -EFAULT;
1231                 break;
1232         default:
1233                 QAT_LOG(ERR, "invalid hash alg %u", hash_alg);
1234                 return -EFAULT;
1235         }
1236
1237         return 0;
1238 }
1239 #define HMAC_IPAD_VALUE 0x36
1240 #define HMAC_OPAD_VALUE 0x5c
1241 #define HASH_XCBC_PRECOMP_KEY_NUM 3
1242
1243 static const uint8_t AES_CMAC_SEED[ICP_QAT_HW_AES_128_KEY_SZ];
1244
1245 static void aes_cmac_key_derive(uint8_t *base, uint8_t *derived)
1246 {
1247         int i;
1248
1249         derived[0] = base[0] << 1;
1250         for (i = 1; i < ICP_QAT_HW_AES_BLK_SZ ; i++) {
1251                 derived[i] = base[i] << 1;
1252                 derived[i - 1] |= base[i] >> 7;
1253         }
1254
1255         if (base[0] & 0x80)
1256                 derived[ICP_QAT_HW_AES_BLK_SZ - 1] ^= QAT_AES_CMAC_CONST_RB;
1257 }
1258
1259 static int qat_sym_do_precomputes(enum icp_qat_hw_auth_algo hash_alg,
1260                                 const uint8_t *auth_key,
1261                                 uint16_t auth_keylen,
1262                                 uint8_t *p_state_buf,
1263                                 uint16_t *p_state_len,
1264                                 uint8_t aes_cmac)
1265 {
1266         int block_size;
1267         uint8_t ipad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1268         uint8_t opad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1269         int i;
1270
1271         if (hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC) {
1272
1273                 /* CMAC */
1274                 if (aes_cmac) {
1275                         AES_KEY enc_key;
1276                         uint8_t *in = NULL;
1277                         uint8_t k0[ICP_QAT_HW_AES_128_KEY_SZ];
1278                         uint8_t *k1, *k2;
1279
1280                         auth_keylen = ICP_QAT_HW_AES_128_KEY_SZ;
1281
1282                         in = rte_zmalloc("AES CMAC K1",
1283                                          ICP_QAT_HW_AES_128_KEY_SZ, 16);
1284
1285                         if (in == NULL) {
1286                                 QAT_LOG(ERR, "Failed to alloc memory");
1287                                 return -ENOMEM;
1288                         }
1289
1290                         rte_memcpy(in, AES_CMAC_SEED,
1291                                    ICP_QAT_HW_AES_128_KEY_SZ);
1292                         rte_memcpy(p_state_buf, auth_key, auth_keylen);
1293
1294                         if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1295                                 &enc_key) != 0) {
1296                                 rte_free(in);
1297                                 return -EFAULT;
1298                         }
1299
1300                         AES_encrypt(in, k0, &enc_key);
1301
1302                         k1 = p_state_buf + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1303                         k2 = k1 + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1304
1305                         aes_cmac_key_derive(k0, k1);
1306                         aes_cmac_key_derive(k1, k2);
1307
1308                         memset(k0, 0, ICP_QAT_HW_AES_128_KEY_SZ);
1309                         *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1310                         rte_free(in);
1311                         return 0;
1312                 } else {
1313                         static uint8_t qat_aes_xcbc_key_seed[
1314                                         ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ] = {
1315                                 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1316                                 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1317                                 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1318                                 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1319                                 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1320                                 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1321                         };
1322
1323                         uint8_t *in = NULL;
1324                         uint8_t *out = p_state_buf;
1325                         int x;
1326                         AES_KEY enc_key;
1327
1328                         in = rte_zmalloc("working mem for key",
1329                                         ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ, 16);
1330                         if (in == NULL) {
1331                                 QAT_LOG(ERR, "Failed to alloc memory");
1332                                 return -ENOMEM;
1333                         }
1334
1335                         rte_memcpy(in, qat_aes_xcbc_key_seed,
1336                                         ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1337                         for (x = 0; x < HASH_XCBC_PRECOMP_KEY_NUM; x++) {
1338                                 if (AES_set_encrypt_key(auth_key,
1339                                                         auth_keylen << 3,
1340                                                         &enc_key) != 0) {
1341                                         rte_free(in -
1342                                           (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ));
1343                                         memset(out -
1344                                            (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ),
1345                                           0, ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1346                                         return -EFAULT;
1347                                 }
1348                                 AES_encrypt(in, out, &enc_key);
1349                                 in += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1350                                 out += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1351                         }
1352                         *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1353                         rte_free(in - x*ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ);
1354                         return 0;
1355                 }
1356
1357         } else if ((hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_128) ||
1358                 (hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_64)) {
1359                 uint8_t *in = NULL;
1360                 uint8_t *out = p_state_buf;
1361                 AES_KEY enc_key;
1362
1363                 memset(p_state_buf, 0, ICP_QAT_HW_GALOIS_H_SZ +
1364                                 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1365                                 ICP_QAT_HW_GALOIS_E_CTR0_SZ);
1366                 in = rte_zmalloc("working mem for key",
1367                                 ICP_QAT_HW_GALOIS_H_SZ, 16);
1368                 if (in == NULL) {
1369                         QAT_LOG(ERR, "Failed to alloc memory");
1370                         return -ENOMEM;
1371                 }
1372
1373                 memset(in, 0, ICP_QAT_HW_GALOIS_H_SZ);
1374                 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1375                         &enc_key) != 0) {
1376                         return -EFAULT;
1377                 }
1378                 AES_encrypt(in, out, &enc_key);
1379                 *p_state_len = ICP_QAT_HW_GALOIS_H_SZ +
1380                                 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1381                                 ICP_QAT_HW_GALOIS_E_CTR0_SZ;
1382                 rte_free(in);
1383                 return 0;
1384         }
1385
1386         block_size = qat_hash_get_block_size(hash_alg);
1387         if (block_size < 0)
1388                 return block_size;
1389         /* init ipad and opad from key and xor with fixed values */
1390         memset(ipad, 0, block_size);
1391         memset(opad, 0, block_size);
1392
1393         if (auth_keylen > (unsigned int)block_size) {
1394                 QAT_LOG(ERR, "invalid keylen %u", auth_keylen);
1395                 return -EFAULT;
1396         }
1397         rte_memcpy(ipad, auth_key, auth_keylen);
1398         rte_memcpy(opad, auth_key, auth_keylen);
1399
1400         for (i = 0; i < block_size; i++) {
1401                 uint8_t *ipad_ptr = ipad + i;
1402                 uint8_t *opad_ptr = opad + i;
1403                 *ipad_ptr ^= HMAC_IPAD_VALUE;
1404                 *opad_ptr ^= HMAC_OPAD_VALUE;
1405         }
1406
1407         /* do partial hash of ipad and copy to state1 */
1408         if (partial_hash_compute(hash_alg, ipad, p_state_buf)) {
1409                 memset(ipad, 0, block_size);
1410                 memset(opad, 0, block_size);
1411                 QAT_LOG(ERR, "ipad precompute failed");
1412                 return -EFAULT;
1413         }
1414
1415         /*
1416          * State len is a multiple of 8, so may be larger than the digest.
1417          * Put the partial hash of opad state_len bytes after state1
1418          */
1419         *p_state_len = qat_hash_get_state1_size(hash_alg);
1420         if (partial_hash_compute(hash_alg, opad, p_state_buf + *p_state_len)) {
1421                 memset(ipad, 0, block_size);
1422                 memset(opad, 0, block_size);
1423                 QAT_LOG(ERR, "opad precompute failed");
1424                 return -EFAULT;
1425         }
1426
1427         /*  don't leave data lying around */
1428         memset(ipad, 0, block_size);
1429         memset(opad, 0, block_size);
1430         return 0;
1431 }
1432
1433 static void
1434 qat_sym_session_init_common_hdr(struct icp_qat_fw_comn_req_hdr *header,
1435                 enum qat_sym_proto_flag proto_flags)
1436 {
1437         header->hdr_flags =
1438                 ICP_QAT_FW_COMN_HDR_FLAGS_BUILD(ICP_QAT_FW_COMN_REQ_FLAG_SET);
1439         header->service_type = ICP_QAT_FW_COMN_REQ_CPM_FW_LA;
1440         header->comn_req_flags =
1441                 ICP_QAT_FW_COMN_FLAGS_BUILD(QAT_COMN_CD_FLD_TYPE_64BIT_ADR,
1442                                         QAT_COMN_PTR_TYPE_FLAT);
1443         ICP_QAT_FW_LA_PARTIAL_SET(header->serv_specif_flags,
1444                                   ICP_QAT_FW_LA_PARTIAL_NONE);
1445         ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(header->serv_specif_flags,
1446                                            ICP_QAT_FW_CIPH_IV_16BYTE_DATA);
1447
1448         switch (proto_flags)            {
1449         case QAT_CRYPTO_PROTO_FLAG_NONE:
1450                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1451                                         ICP_QAT_FW_LA_NO_PROTO);
1452                 break;
1453         case QAT_CRYPTO_PROTO_FLAG_CCM:
1454                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1455                                         ICP_QAT_FW_LA_CCM_PROTO);
1456                 break;
1457         case QAT_CRYPTO_PROTO_FLAG_GCM:
1458                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1459                                         ICP_QAT_FW_LA_GCM_PROTO);
1460                 break;
1461         case QAT_CRYPTO_PROTO_FLAG_SNOW3G:
1462                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1463                                         ICP_QAT_FW_LA_SNOW_3G_PROTO);
1464                 break;
1465         case QAT_CRYPTO_PROTO_FLAG_ZUC:
1466                 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(header->serv_specif_flags,
1467                         ICP_QAT_FW_LA_ZUC_3G_PROTO);
1468                 break;
1469         }
1470
1471         ICP_QAT_FW_LA_UPDATE_STATE_SET(header->serv_specif_flags,
1472                                            ICP_QAT_FW_LA_NO_UPDATE_STATE);
1473         ICP_QAT_FW_LA_DIGEST_IN_BUFFER_SET(header->serv_specif_flags,
1474                                         ICP_QAT_FW_LA_NO_DIGEST_IN_BUFFER);
1475 }
1476
1477 /*
1478  *      Snow3G and ZUC should never use this function
1479  *      and set its protocol flag in both cipher and auth part of content
1480  *      descriptor building function
1481  */
1482 static enum qat_sym_proto_flag
1483 qat_get_crypto_proto_flag(uint16_t flags)
1484 {
1485         int proto = ICP_QAT_FW_LA_PROTO_GET(flags);
1486         enum qat_sym_proto_flag qat_proto_flag =
1487                         QAT_CRYPTO_PROTO_FLAG_NONE;
1488
1489         switch (proto) {
1490         case ICP_QAT_FW_LA_GCM_PROTO:
1491                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1492                 break;
1493         case ICP_QAT_FW_LA_CCM_PROTO:
1494                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1495                 break;
1496         }
1497
1498         return qat_proto_flag;
1499 }
1500
1501 int qat_sym_session_aead_create_cd_cipher(struct qat_sym_session *cdesc,
1502                                                 const uint8_t *cipherkey,
1503                                                 uint32_t cipherkeylen)
1504 {
1505         struct icp_qat_hw_cipher_algo_blk *cipher;
1506         struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1507         struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1508         struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1509         void *ptr = &req_tmpl->cd_ctrl;
1510         struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1511         struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1512         enum icp_qat_hw_cipher_convert key_convert;
1513         enum qat_sym_proto_flag qat_proto_flag =
1514                 QAT_CRYPTO_PROTO_FLAG_NONE;
1515         uint32_t total_key_size;
1516         uint16_t cipher_offset, cd_size;
1517         uint32_t wordIndex  = 0;
1518         uint32_t *temp_key = NULL;
1519
1520         if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER) {
1521                 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1522                 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1523                                         ICP_QAT_FW_SLICE_CIPHER);
1524                 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1525                                         ICP_QAT_FW_SLICE_DRAM_WR);
1526                 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1527                                         ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1528                 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1529                                         ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1530                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1531         } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1532                 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1533                 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1534                                         ICP_QAT_FW_SLICE_CIPHER);
1535                 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1536                                         ICP_QAT_FW_SLICE_AUTH);
1537                 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1538                                         ICP_QAT_FW_SLICE_AUTH);
1539                 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1540                                         ICP_QAT_FW_SLICE_DRAM_WR);
1541                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1542         } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1543                 QAT_LOG(ERR, "Invalid param, must be a cipher command.");
1544                 return -EFAULT;
1545         }
1546
1547         if (cdesc->qat_mode == ICP_QAT_HW_CIPHER_CTR_MODE) {
1548                 /*
1549                  * CTR Streaming ciphers are a special case. Decrypt = encrypt
1550                  * Overriding default values previously set
1551                  */
1552                 cdesc->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
1553                 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1554         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2
1555                 || cdesc->qat_cipher_alg ==
1556                         ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)
1557                 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1558         else if (cdesc->qat_dir == ICP_QAT_HW_CIPHER_ENCRYPT)
1559                 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1560         else
1561                 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1562
1563         if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
1564                 total_key_size = ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ +
1565                         ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1566                 cipher_cd_ctrl->cipher_state_sz =
1567                         ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1568                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1569
1570         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1571                 total_key_size = ICP_QAT_HW_KASUMI_F8_KEY_SZ;
1572                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_KASUMI_BLK_SZ >> 3;
1573                 cipher_cd_ctrl->cipher_padding_sz =
1574                                         (2 * ICP_QAT_HW_KASUMI_BLK_SZ) >> 3;
1575         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES) {
1576                 total_key_size = ICP_QAT_HW_3DES_KEY_SZ;
1577                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_3DES_BLK_SZ >> 3;
1578                 qat_proto_flag =
1579                         qat_get_crypto_proto_flag(header->serv_specif_flags);
1580         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_DES) {
1581                 total_key_size = ICP_QAT_HW_DES_KEY_SZ;
1582                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_DES_BLK_SZ >> 3;
1583                 qat_proto_flag =
1584                         qat_get_crypto_proto_flag(header->serv_specif_flags);
1585         } else if (cdesc->qat_cipher_alg ==
1586                 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
1587                 total_key_size = ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ +
1588                         ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1589                 cipher_cd_ctrl->cipher_state_sz =
1590                         ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1591                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1592                 cdesc->min_qat_dev_gen = QAT_GEN2;
1593         } else {
1594                 total_key_size = cipherkeylen;
1595                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_AES_BLK_SZ >> 3;
1596                 qat_proto_flag =
1597                         qat_get_crypto_proto_flag(header->serv_specif_flags);
1598         }
1599         cipher_cd_ctrl->cipher_key_sz = total_key_size >> 3;
1600         cipher_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1601         cipher_cd_ctrl->cipher_cfg_offset = cipher_offset >> 3;
1602
1603         header->service_cmd_id = cdesc->qat_cmd;
1604         qat_sym_session_init_common_hdr(header, qat_proto_flag);
1605
1606         cipher = (struct icp_qat_hw_cipher_algo_blk *)cdesc->cd_cur_ptr;
1607         cipher->cipher_config.val =
1608             ICP_QAT_HW_CIPHER_CONFIG_BUILD(cdesc->qat_mode,
1609                                         cdesc->qat_cipher_alg, key_convert,
1610                                         cdesc->qat_dir);
1611
1612         if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1613                 temp_key = (uint32_t *)(cdesc->cd_cur_ptr +
1614                                         sizeof(struct icp_qat_hw_cipher_config)
1615                                         + cipherkeylen);
1616                 memcpy(cipher->key, cipherkey, cipherkeylen);
1617                 memcpy(temp_key, cipherkey, cipherkeylen);
1618
1619                 /* XOR Key with KASUMI F8 key modifier at 4 bytes level */
1620                 for (wordIndex = 0; wordIndex < (cipherkeylen >> 2);
1621                                                                 wordIndex++)
1622                         temp_key[wordIndex] ^= KASUMI_F8_KEY_MODIFIER_4_BYTES;
1623
1624                 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1625                                         cipherkeylen + cipherkeylen;
1626         } else {
1627                 memcpy(cipher->key, cipherkey, cipherkeylen);
1628                 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1629                                         cipherkeylen;
1630         }
1631
1632         if (total_key_size > cipherkeylen) {
1633                 uint32_t padding_size =  total_key_size-cipherkeylen;
1634                 if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1635                         && (cipherkeylen == QAT_3DES_KEY_SZ_OPT2)) {
1636                         /* K3 not provided so use K1 = K3*/
1637                         memcpy(cdesc->cd_cur_ptr, cipherkey, padding_size);
1638                 } else if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1639                         && (cipherkeylen == QAT_3DES_KEY_SZ_OPT3)) {
1640                         /* K2 and K3 not provided so use K1 = K2 = K3*/
1641                         memcpy(cdesc->cd_cur_ptr, cipherkey,
1642                                 cipherkeylen);
1643                         memcpy(cdesc->cd_cur_ptr+cipherkeylen,
1644                                 cipherkey, cipherkeylen);
1645                 } else
1646                         memset(cdesc->cd_cur_ptr, 0, padding_size);
1647
1648                 cdesc->cd_cur_ptr += padding_size;
1649         }
1650         cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1651         cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1652
1653         return 0;
1654 }
1655
1656 int qat_sym_session_aead_create_cd_auth(struct qat_sym_session *cdesc,
1657                                                 const uint8_t *authkey,
1658                                                 uint32_t authkeylen,
1659                                                 uint32_t aad_length,
1660                                                 uint32_t digestsize,
1661                                                 unsigned int operation)
1662 {
1663         struct icp_qat_hw_auth_setup *hash;
1664         struct icp_qat_hw_cipher_algo_blk *cipherconfig;
1665         struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1666         struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1667         struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1668         void *ptr = &req_tmpl->cd_ctrl;
1669         struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1670         struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1671         struct icp_qat_fw_la_auth_req_params *auth_param =
1672                 (struct icp_qat_fw_la_auth_req_params *)
1673                 ((char *)&req_tmpl->serv_specif_rqpars +
1674                 ICP_QAT_FW_HASH_REQUEST_PARAMETERS_OFFSET);
1675         uint16_t state1_size = 0, state2_size = 0, cd_extra_size = 0;
1676         uint16_t hash_offset, cd_size;
1677         uint32_t *aad_len = NULL;
1678         uint32_t wordIndex  = 0;
1679         uint32_t *pTempKey;
1680         enum qat_sym_proto_flag qat_proto_flag =
1681                 QAT_CRYPTO_PROTO_FLAG_NONE;
1682
1683         if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_AUTH) {
1684                 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1685                                         ICP_QAT_FW_SLICE_AUTH);
1686                 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1687                                         ICP_QAT_FW_SLICE_DRAM_WR);
1688                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1689         } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1690                 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1691                                 ICP_QAT_FW_SLICE_AUTH);
1692                 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1693                                 ICP_QAT_FW_SLICE_CIPHER);
1694                 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1695                                 ICP_QAT_FW_SLICE_CIPHER);
1696                 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1697                                 ICP_QAT_FW_SLICE_DRAM_WR);
1698                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1699         } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1700                 QAT_LOG(ERR, "Invalid param, must be a hash command.");
1701                 return -EFAULT;
1702         }
1703
1704         if (operation == RTE_CRYPTO_AUTH_OP_VERIFY) {
1705                 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1706                                 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1707                 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1708                                 ICP_QAT_FW_LA_CMP_AUTH_RES);
1709                 cdesc->auth_op = ICP_QAT_HW_AUTH_VERIFY;
1710         } else {
1711                 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1712                                            ICP_QAT_FW_LA_RET_AUTH_RES);
1713                 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1714                                            ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1715                 cdesc->auth_op = ICP_QAT_HW_AUTH_GENERATE;
1716         }
1717
1718         /*
1719          * Setup the inner hash config
1720          */
1721         hash_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1722         hash = (struct icp_qat_hw_auth_setup *)cdesc->cd_cur_ptr;
1723         hash->auth_config.reserved = 0;
1724         hash->auth_config.config =
1725                         ICP_QAT_HW_AUTH_CONFIG_BUILD(cdesc->auth_mode,
1726                                 cdesc->qat_hash_alg, digestsize);
1727
1728         if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0
1729                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2
1730                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_KASUMI_F9
1731                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3
1732                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC
1733                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC
1734                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_NULL
1735                         )
1736                 hash->auth_counter.counter = 0;
1737         else {
1738                 int block_size = qat_hash_get_block_size(cdesc->qat_hash_alg);
1739
1740                 if (block_size < 0)
1741                         return block_size;
1742                 hash->auth_counter.counter = rte_bswap32(block_size);
1743         }
1744
1745         cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_auth_setup);
1746
1747         /*
1748          * cd_cur_ptr now points at the state1 information.
1749          */
1750         switch (cdesc->qat_hash_alg) {
1751         case ICP_QAT_HW_AUTH_ALGO_SHA1:
1752                 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0) {
1753                         /* Plain SHA-1 */
1754                         rte_memcpy(cdesc->cd_cur_ptr, sha1InitialState,
1755                                         sizeof(sha1InitialState));
1756                         state1_size = qat_hash_get_state1_size(
1757                                         cdesc->qat_hash_alg);
1758                         break;
1759                 }
1760                 /* SHA-1 HMAC */
1761                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA1, authkey,
1762                         authkeylen, cdesc->cd_cur_ptr, &state1_size,
1763                         cdesc->aes_cmac)) {
1764                         QAT_LOG(ERR, "(SHA)precompute failed");
1765                         return -EFAULT;
1766                 }
1767                 state2_size = RTE_ALIGN_CEIL(ICP_QAT_HW_SHA1_STATE2_SZ, 8);
1768                 break;
1769         case ICP_QAT_HW_AUTH_ALGO_SHA224:
1770                 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0) {
1771                         /* Plain SHA-224 */
1772                         rte_memcpy(cdesc->cd_cur_ptr, sha224InitialState,
1773                                         sizeof(sha224InitialState));
1774                         state1_size = qat_hash_get_state1_size(
1775                                         cdesc->qat_hash_alg);
1776                         break;
1777                 }
1778                 /* SHA-224 HMAC */
1779                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA224, authkey,
1780                         authkeylen, cdesc->cd_cur_ptr, &state1_size,
1781                         cdesc->aes_cmac)) {
1782                         QAT_LOG(ERR, "(SHA)precompute failed");
1783                         return -EFAULT;
1784                 }
1785                 state2_size = ICP_QAT_HW_SHA224_STATE2_SZ;
1786                 break;
1787         case ICP_QAT_HW_AUTH_ALGO_SHA256:
1788                 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0) {
1789                         /* Plain SHA-256 */
1790                         rte_memcpy(cdesc->cd_cur_ptr, sha256InitialState,
1791                                         sizeof(sha256InitialState));
1792                         state1_size = qat_hash_get_state1_size(
1793                                         cdesc->qat_hash_alg);
1794                         break;
1795                 }
1796                 /* SHA-256 HMAC */
1797                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA256, authkey,
1798                         authkeylen, cdesc->cd_cur_ptr,  &state1_size,
1799                         cdesc->aes_cmac)) {
1800                         QAT_LOG(ERR, "(SHA)precompute failed");
1801                         return -EFAULT;
1802                 }
1803                 state2_size = ICP_QAT_HW_SHA256_STATE2_SZ;
1804                 break;
1805         case ICP_QAT_HW_AUTH_ALGO_SHA384:
1806                 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0) {
1807                         /* Plain SHA-384 */
1808                         rte_memcpy(cdesc->cd_cur_ptr, sha384InitialState,
1809                                         sizeof(sha384InitialState));
1810                         state1_size = qat_hash_get_state1_size(
1811                                         cdesc->qat_hash_alg);
1812                         break;
1813                 }
1814                 /* SHA-384 HMAC */
1815                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA384, authkey,
1816                         authkeylen, cdesc->cd_cur_ptr, &state1_size,
1817                         cdesc->aes_cmac)) {
1818                         QAT_LOG(ERR, "(SHA)precompute failed");
1819                         return -EFAULT;
1820                 }
1821                 state2_size = ICP_QAT_HW_SHA384_STATE2_SZ;
1822                 break;
1823         case ICP_QAT_HW_AUTH_ALGO_SHA512:
1824                 if (cdesc->auth_mode == ICP_QAT_HW_AUTH_MODE0) {
1825                         /* Plain SHA-512 */
1826                         rte_memcpy(cdesc->cd_cur_ptr, sha512InitialState,
1827                                         sizeof(sha512InitialState));
1828                         state1_size = qat_hash_get_state1_size(
1829                                         cdesc->qat_hash_alg);
1830                         break;
1831                 }
1832                 /* SHA-512 HMAC */
1833                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA512, authkey,
1834                         authkeylen, cdesc->cd_cur_ptr,  &state1_size,
1835                         cdesc->aes_cmac)) {
1836                         QAT_LOG(ERR, "(SHA)precompute failed");
1837                         return -EFAULT;
1838                 }
1839                 state2_size = ICP_QAT_HW_SHA512_STATE2_SZ;
1840                 break;
1841         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1842                 state1_size = ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1843
1844                 if (cdesc->aes_cmac)
1845                         memset(cdesc->cd_cur_ptr, 0, state1_size);
1846                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC,
1847                         authkey, authkeylen, cdesc->cd_cur_ptr + state1_size,
1848                         &state2_size, cdesc->aes_cmac)) {
1849                         cdesc->aes_cmac ? QAT_LOG(ERR,
1850                                                   "(CMAC)precompute failed")
1851                                         : QAT_LOG(ERR,
1852                                                   "(XCBC)precompute failed");
1853                         return -EFAULT;
1854                 }
1855                 break;
1856         case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1857         case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
1858                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1859                 state1_size = ICP_QAT_HW_GALOIS_128_STATE1_SZ;
1860                 if (qat_sym_do_precomputes(cdesc->qat_hash_alg, authkey,
1861                         authkeylen, cdesc->cd_cur_ptr + state1_size,
1862                         &state2_size, cdesc->aes_cmac)) {
1863                         QAT_LOG(ERR, "(GCM)precompute failed");
1864                         return -EFAULT;
1865                 }
1866                 /*
1867                  * Write (the length of AAD) into bytes 16-19 of state2
1868                  * in big-endian format. This field is 8 bytes
1869                  */
1870                 auth_param->u2.aad_sz =
1871                                 RTE_ALIGN_CEIL(aad_length, 16);
1872                 auth_param->hash_state_sz = (auth_param->u2.aad_sz) >> 3;
1873
1874                 aad_len = (uint32_t *)(cdesc->cd_cur_ptr +
1875                                         ICP_QAT_HW_GALOIS_128_STATE1_SZ +
1876                                         ICP_QAT_HW_GALOIS_H_SZ);
1877                 *aad_len = rte_bswap32(aad_length);
1878                 cdesc->aad_len = aad_length;
1879                 break;
1880         case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
1881                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1882                 state1_size = qat_hash_get_state1_size(
1883                                 ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2);
1884                 state2_size = ICP_QAT_HW_SNOW_3G_UIA2_STATE2_SZ;
1885                 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1886
1887                 cipherconfig = (struct icp_qat_hw_cipher_algo_blk *)
1888                                 (cdesc->cd_cur_ptr + state1_size + state2_size);
1889                 cipherconfig->cipher_config.val =
1890                 ICP_QAT_HW_CIPHER_CONFIG_BUILD(ICP_QAT_HW_CIPHER_ECB_MODE,
1891                         ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2,
1892                         ICP_QAT_HW_CIPHER_KEY_CONVERT,
1893                         ICP_QAT_HW_CIPHER_ENCRYPT);
1894                 memcpy(cipherconfig->key, authkey, authkeylen);
1895                 memset(cipherconfig->key + authkeylen,
1896                                 0, ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ);
1897                 cd_extra_size += sizeof(struct icp_qat_hw_cipher_config) +
1898                                 authkeylen + ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1899                 auth_param->hash_state_sz = ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1900                 break;
1901         case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
1902                 hash->auth_config.config =
1903                         ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE0,
1904                                 cdesc->qat_hash_alg, digestsize);
1905                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1906                 state1_size = qat_hash_get_state1_size(
1907                                 ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3);
1908                 state2_size = ICP_QAT_HW_ZUC_3G_EIA3_STATE2_SZ;
1909                 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size
1910                         + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ);
1911
1912                 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1913                 cd_extra_size += ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1914                 auth_param->hash_state_sz = ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1915                 cdesc->min_qat_dev_gen = QAT_GEN2;
1916
1917                 break;
1918         case ICP_QAT_HW_AUTH_ALGO_MD5:
1919                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_MD5, authkey,
1920                         authkeylen, cdesc->cd_cur_ptr, &state1_size,
1921                         cdesc->aes_cmac)) {
1922                         QAT_LOG(ERR, "(MD5)precompute failed");
1923                         return -EFAULT;
1924                 }
1925                 state2_size = ICP_QAT_HW_MD5_STATE2_SZ;
1926                 break;
1927         case ICP_QAT_HW_AUTH_ALGO_NULL:
1928                 state1_size = qat_hash_get_state1_size(
1929                                 ICP_QAT_HW_AUTH_ALGO_NULL);
1930                 state2_size = ICP_QAT_HW_NULL_STATE2_SZ;
1931                 break;
1932         case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
1933                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1934                 state1_size = qat_hash_get_state1_size(
1935                                 ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC);
1936                 state2_size = ICP_QAT_HW_AES_CBC_MAC_KEY_SZ +
1937                                 ICP_QAT_HW_AES_CCM_CBC_E_CTR0_SZ;
1938
1939                 if (aad_length > 0) {
1940                         aad_length += ICP_QAT_HW_CCM_AAD_B0_LEN +
1941                         ICP_QAT_HW_CCM_AAD_LEN_INFO;
1942                         auth_param->u2.aad_sz =
1943                         RTE_ALIGN_CEIL(aad_length,
1944                         ICP_QAT_HW_CCM_AAD_ALIGNMENT);
1945                 } else {
1946                         auth_param->u2.aad_sz = ICP_QAT_HW_CCM_AAD_B0_LEN;
1947                 }
1948                 cdesc->aad_len = aad_length;
1949                 hash->auth_counter.counter = 0;
1950
1951                 hash_cd_ctrl->outer_prefix_sz = digestsize;
1952                 auth_param->hash_state_sz = digestsize;
1953
1954                 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1955                 break;
1956         case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
1957                 state1_size = qat_hash_get_state1_size(
1958                                 ICP_QAT_HW_AUTH_ALGO_KASUMI_F9);
1959                 state2_size = ICP_QAT_HW_KASUMI_F9_STATE2_SZ;
1960                 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1961                 pTempKey = (uint32_t *)(cdesc->cd_cur_ptr + state1_size
1962                                                         + authkeylen);
1963                 /*
1964                 * The Inner Hash Initial State2 block must contain IK
1965                 * (Initialisation Key), followed by IK XOR-ed with KM
1966                 * (Key Modifier): IK||(IK^KM).
1967                 */
1968                 /* write the auth key */
1969                 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1970                 /* initialise temp key with auth key */
1971                 memcpy(pTempKey, authkey, authkeylen);
1972                 /* XOR Key with KASUMI F9 key modifier at 4 bytes level */
1973                 for (wordIndex = 0; wordIndex < (authkeylen >> 2); wordIndex++)
1974                         pTempKey[wordIndex] ^= KASUMI_F9_KEY_MODIFIER_4_BYTES;
1975                 break;
1976         default:
1977                 QAT_LOG(ERR, "Invalid HASH alg %u", cdesc->qat_hash_alg);
1978                 return -EFAULT;
1979         }
1980
1981         /* Request template setup */
1982         qat_sym_session_init_common_hdr(header, qat_proto_flag);
1983         header->service_cmd_id = cdesc->qat_cmd;
1984
1985         /* Auth CD config setup */
1986         hash_cd_ctrl->hash_cfg_offset = hash_offset >> 3;
1987         hash_cd_ctrl->hash_flags = ICP_QAT_FW_AUTH_HDR_FLAG_NO_NESTED;
1988         hash_cd_ctrl->inner_res_sz = digestsize;
1989         hash_cd_ctrl->final_sz = digestsize;
1990         hash_cd_ctrl->inner_state1_sz = state1_size;
1991         auth_param->auth_res_sz = digestsize;
1992
1993         hash_cd_ctrl->inner_state2_sz  = state2_size;
1994         hash_cd_ctrl->inner_state2_offset = hash_cd_ctrl->hash_cfg_offset +
1995                         ((sizeof(struct icp_qat_hw_auth_setup) +
1996                          RTE_ALIGN_CEIL(hash_cd_ctrl->inner_state1_sz, 8))
1997                                         >> 3);
1998
1999         cdesc->cd_cur_ptr += state1_size + state2_size + cd_extra_size;
2000         cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
2001
2002         cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
2003         cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
2004
2005         return 0;
2006 }
2007
2008 int qat_sym_validate_aes_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2009 {
2010         switch (key_len) {
2011         case ICP_QAT_HW_AES_128_KEY_SZ:
2012                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
2013                 break;
2014         case ICP_QAT_HW_AES_192_KEY_SZ:
2015                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES192;
2016                 break;
2017         case ICP_QAT_HW_AES_256_KEY_SZ:
2018                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
2019                 break;
2020         default:
2021                 return -EINVAL;
2022         }
2023         return 0;
2024 }
2025
2026 int qat_sym_validate_aes_docsisbpi_key(int key_len,
2027                 enum icp_qat_hw_cipher_algo *alg)
2028 {
2029         switch (key_len) {
2030         case ICP_QAT_HW_AES_128_KEY_SZ:
2031                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
2032                 break;
2033         case ICP_QAT_HW_AES_256_KEY_SZ:
2034                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
2035                 break;
2036         default:
2037                 return -EINVAL;
2038         }
2039         return 0;
2040 }
2041
2042 int qat_sym_validate_snow3g_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2043 {
2044         switch (key_len) {
2045         case ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ:
2046                 *alg = ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2;
2047                 break;
2048         default:
2049                 return -EINVAL;
2050         }
2051         return 0;
2052 }
2053
2054 int qat_sym_validate_kasumi_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2055 {
2056         switch (key_len) {
2057         case ICP_QAT_HW_KASUMI_KEY_SZ:
2058                 *alg = ICP_QAT_HW_CIPHER_ALGO_KASUMI;
2059                 break;
2060         default:
2061                 return -EINVAL;
2062         }
2063         return 0;
2064 }
2065
2066 int qat_sym_validate_des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2067 {
2068         switch (key_len) {
2069         case ICP_QAT_HW_DES_KEY_SZ:
2070                 *alg = ICP_QAT_HW_CIPHER_ALGO_DES;
2071                 break;
2072         default:
2073                 return -EINVAL;
2074         }
2075         return 0;
2076 }
2077
2078 int qat_sym_validate_3des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2079 {
2080         switch (key_len) {
2081         case QAT_3DES_KEY_SZ_OPT1:
2082         case QAT_3DES_KEY_SZ_OPT2:
2083         case QAT_3DES_KEY_SZ_OPT3:
2084                 *alg = ICP_QAT_HW_CIPHER_ALGO_3DES;
2085                 break;
2086         default:
2087                 return -EINVAL;
2088         }
2089         return 0;
2090 }
2091
2092 int qat_sym_validate_zuc_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
2093 {
2094         switch (key_len) {
2095         case ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ:
2096                 *alg = ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3;
2097                 break;
2098         default:
2099                 return -EINVAL;
2100         }
2101         return 0;
2102 }