1adef8faf4bcff8043cc31a982384929630ecee2
[dpdk.git] / drivers / crypto / qat / qat_sym_session.c
1 /* SPDX-License-Identifier: (BSD-3-Clause OR GPL-2.0)
2  * Copyright(c) 2015-2019 Intel Corporation
3  */
4
5 #include <openssl/sha.h>        /* Needed to calculate pre-compute values */
6 #include <openssl/aes.h>        /* Needed to calculate pre-compute values */
7 #include <openssl/md5.h>        /* Needed to calculate pre-compute values */
8 #include <openssl/evp.h>        /* Needed for bpi runt block processing */
9
10 #include <rte_memcpy.h>
11 #include <rte_common.h>
12 #include <rte_spinlock.h>
13 #include <rte_byteorder.h>
14 #include <rte_log.h>
15 #include <rte_malloc.h>
16 #include <rte_crypto_sym.h>
17
18 #include "qat_logs.h"
19 #include "qat_sym_session.h"
20 #include "qat_sym_pmd.h"
21
22 /** Frees a context previously created
23  *  Depends on openssl libcrypto
24  */
25 static void
26 bpi_cipher_ctx_free(void *bpi_ctx)
27 {
28         if (bpi_ctx != NULL)
29                 EVP_CIPHER_CTX_free((EVP_CIPHER_CTX *)bpi_ctx);
30 }
31
32 /** Creates a context in either AES or DES in ECB mode
33  *  Depends on openssl libcrypto
34  */
35 static int
36 bpi_cipher_ctx_init(enum rte_crypto_cipher_algorithm cryptodev_algo,
37                 enum rte_crypto_cipher_operation direction __rte_unused,
38                 const uint8_t *key, void **ctx)
39 {
40         const EVP_CIPHER *algo = NULL;
41         int ret;
42         *ctx = EVP_CIPHER_CTX_new();
43
44         if (*ctx == NULL) {
45                 ret = -ENOMEM;
46                 goto ctx_init_err;
47         }
48
49         if (cryptodev_algo == RTE_CRYPTO_CIPHER_DES_DOCSISBPI)
50                 algo = EVP_des_ecb();
51         else
52                 algo = EVP_aes_128_ecb();
53
54         /* IV will be ECB encrypted whether direction is encrypt or decrypt*/
55         if (EVP_EncryptInit_ex(*ctx, algo, NULL, key, 0) != 1) {
56                 ret = -EINVAL;
57                 goto ctx_init_err;
58         }
59
60         return 0;
61
62 ctx_init_err:
63         if (*ctx != NULL)
64                 EVP_CIPHER_CTX_free(*ctx);
65         return ret;
66 }
67
68 static int
69 qat_is_cipher_alg_supported(enum rte_crypto_cipher_algorithm algo,
70                 struct qat_sym_dev_private *internals)
71 {
72         int i = 0;
73         const struct rte_cryptodev_capabilities *capability;
74
75         while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
76                         RTE_CRYPTO_OP_TYPE_UNDEFINED) {
77                 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
78                         continue;
79
80                 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_CIPHER)
81                         continue;
82
83                 if (capability->sym.cipher.algo == algo)
84                         return 1;
85         }
86         return 0;
87 }
88
89 static int
90 qat_is_auth_alg_supported(enum rte_crypto_auth_algorithm algo,
91                 struct qat_sym_dev_private *internals)
92 {
93         int i = 0;
94         const struct rte_cryptodev_capabilities *capability;
95
96         while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
97                         RTE_CRYPTO_OP_TYPE_UNDEFINED) {
98                 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
99                         continue;
100
101                 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_AUTH)
102                         continue;
103
104                 if (capability->sym.auth.algo == algo)
105                         return 1;
106         }
107         return 0;
108 }
109
110 void
111 qat_sym_session_clear(struct rte_cryptodev *dev,
112                 struct rte_cryptodev_sym_session *sess)
113 {
114         uint8_t index = dev->driver_id;
115         void *sess_priv = get_sym_session_private_data(sess, index);
116         struct qat_sym_session *s = (struct qat_sym_session *)sess_priv;
117
118         if (sess_priv) {
119                 if (s->bpi_ctx)
120                         bpi_cipher_ctx_free(s->bpi_ctx);
121                 memset(s, 0, qat_sym_session_get_private_size(dev));
122                 struct rte_mempool *sess_mp = rte_mempool_from_obj(sess_priv);
123
124                 set_sym_session_private_data(sess, index, NULL);
125                 rte_mempool_put(sess_mp, sess_priv);
126         }
127 }
128
129 static int
130 qat_get_cmd_id(const struct rte_crypto_sym_xform *xform)
131 {
132         /* Cipher Only */
133         if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER && xform->next == NULL)
134                 return ICP_QAT_FW_LA_CMD_CIPHER;
135
136         /* Authentication Only */
137         if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH && xform->next == NULL)
138                 return ICP_QAT_FW_LA_CMD_AUTH;
139
140         /* AEAD */
141         if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
142                 /* AES-GCM and AES-CCM works with different direction
143                  * GCM first encrypts and generate hash where AES-CCM
144                  * first generate hash and encrypts. Similar relation
145                  * applies to decryption.
146                  */
147                 if (xform->aead.op == RTE_CRYPTO_AEAD_OP_ENCRYPT)
148                         if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
149                                 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
150                         else
151                                 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
152                 else
153                         if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
154                                 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
155                         else
156                                 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
157         }
158
159         if (xform->next == NULL)
160                 return -1;
161
162         /* Cipher then Authenticate */
163         if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
164                         xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
165                 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
166
167         /* Authenticate then Cipher */
168         if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH &&
169                         xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
170                 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
171
172         return -1;
173 }
174
175 static struct rte_crypto_auth_xform *
176 qat_get_auth_xform(struct rte_crypto_sym_xform *xform)
177 {
178         do {
179                 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH)
180                         return &xform->auth;
181
182                 xform = xform->next;
183         } while (xform);
184
185         return NULL;
186 }
187
188 static struct rte_crypto_cipher_xform *
189 qat_get_cipher_xform(struct rte_crypto_sym_xform *xform)
190 {
191         do {
192                 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
193                         return &xform->cipher;
194
195                 xform = xform->next;
196         } while (xform);
197
198         return NULL;
199 }
200
201 int
202 qat_sym_session_configure_cipher(struct rte_cryptodev *dev,
203                 struct rte_crypto_sym_xform *xform,
204                 struct qat_sym_session *session)
205 {
206         struct qat_sym_dev_private *internals = dev->data->dev_private;
207         struct rte_crypto_cipher_xform *cipher_xform = NULL;
208         int ret;
209
210         /* Get cipher xform from crypto xform chain */
211         cipher_xform = qat_get_cipher_xform(xform);
212
213         session->cipher_iv.offset = cipher_xform->iv.offset;
214         session->cipher_iv.length = cipher_xform->iv.length;
215
216         switch (cipher_xform->algo) {
217         case RTE_CRYPTO_CIPHER_AES_CBC:
218                 if (qat_sym_validate_aes_key(cipher_xform->key.length,
219                                 &session->qat_cipher_alg) != 0) {
220                         QAT_LOG(ERR, "Invalid AES cipher key size");
221                         ret = -EINVAL;
222                         goto error_out;
223                 }
224                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
225                 break;
226         case RTE_CRYPTO_CIPHER_AES_CTR:
227                 if (qat_sym_validate_aes_key(cipher_xform->key.length,
228                                 &session->qat_cipher_alg) != 0) {
229                         QAT_LOG(ERR, "Invalid AES cipher key size");
230                         ret = -EINVAL;
231                         goto error_out;
232                 }
233                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
234                 break;
235         case RTE_CRYPTO_CIPHER_SNOW3G_UEA2:
236                 if (qat_sym_validate_snow3g_key(cipher_xform->key.length,
237                                         &session->qat_cipher_alg) != 0) {
238                         QAT_LOG(ERR, "Invalid SNOW 3G cipher key size");
239                         ret = -EINVAL;
240                         goto error_out;
241                 }
242                 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
243                 break;
244         case RTE_CRYPTO_CIPHER_NULL:
245                 session->qat_cipher_alg = ICP_QAT_HW_CIPHER_ALGO_NULL;
246                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
247                 break;
248         case RTE_CRYPTO_CIPHER_KASUMI_F8:
249                 if (qat_sym_validate_kasumi_key(cipher_xform->key.length,
250                                         &session->qat_cipher_alg) != 0) {
251                         QAT_LOG(ERR, "Invalid KASUMI cipher key size");
252                         ret = -EINVAL;
253                         goto error_out;
254                 }
255                 session->qat_mode = ICP_QAT_HW_CIPHER_F8_MODE;
256                 break;
257         case RTE_CRYPTO_CIPHER_3DES_CBC:
258                 if (qat_sym_validate_3des_key(cipher_xform->key.length,
259                                 &session->qat_cipher_alg) != 0) {
260                         QAT_LOG(ERR, "Invalid 3DES cipher key size");
261                         ret = -EINVAL;
262                         goto error_out;
263                 }
264                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
265                 break;
266         case RTE_CRYPTO_CIPHER_DES_CBC:
267                 if (qat_sym_validate_des_key(cipher_xform->key.length,
268                                 &session->qat_cipher_alg) != 0) {
269                         QAT_LOG(ERR, "Invalid DES cipher key size");
270                         ret = -EINVAL;
271                         goto error_out;
272                 }
273                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
274                 break;
275         case RTE_CRYPTO_CIPHER_3DES_CTR:
276                 if (qat_sym_validate_3des_key(cipher_xform->key.length,
277                                 &session->qat_cipher_alg) != 0) {
278                         QAT_LOG(ERR, "Invalid 3DES cipher key size");
279                         ret = -EINVAL;
280                         goto error_out;
281                 }
282                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
283                 break;
284         case RTE_CRYPTO_CIPHER_DES_DOCSISBPI:
285                 ret = bpi_cipher_ctx_init(
286                                         cipher_xform->algo,
287                                         cipher_xform->op,
288                                         cipher_xform->key.data,
289                                         &session->bpi_ctx);
290                 if (ret != 0) {
291                         QAT_LOG(ERR, "failed to create DES BPI ctx");
292                         goto error_out;
293                 }
294                 if (qat_sym_validate_des_key(cipher_xform->key.length,
295                                 &session->qat_cipher_alg) != 0) {
296                         QAT_LOG(ERR, "Invalid DES cipher key size");
297                         ret = -EINVAL;
298                         goto error_out;
299                 }
300                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
301                 break;
302         case RTE_CRYPTO_CIPHER_AES_DOCSISBPI:
303                 ret = bpi_cipher_ctx_init(
304                                         cipher_xform->algo,
305                                         cipher_xform->op,
306                                         cipher_xform->key.data,
307                                         &session->bpi_ctx);
308                 if (ret != 0) {
309                         QAT_LOG(ERR, "failed to create AES BPI ctx");
310                         goto error_out;
311                 }
312                 if (qat_sym_validate_aes_docsisbpi_key(cipher_xform->key.length,
313                                 &session->qat_cipher_alg) != 0) {
314                         QAT_LOG(ERR, "Invalid AES DOCSISBPI key size");
315                         ret = -EINVAL;
316                         goto error_out;
317                 }
318                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
319                 break;
320         case RTE_CRYPTO_CIPHER_ZUC_EEA3:
321                 if (!qat_is_cipher_alg_supported(
322                         cipher_xform->algo, internals)) {
323                         QAT_LOG(ERR, "%s not supported on this device",
324                                 rte_crypto_cipher_algorithm_strings
325                                         [cipher_xform->algo]);
326                         ret = -ENOTSUP;
327                         goto error_out;
328                 }
329                 if (qat_sym_validate_zuc_key(cipher_xform->key.length,
330                                 &session->qat_cipher_alg) != 0) {
331                         QAT_LOG(ERR, "Invalid ZUC cipher key size");
332                         ret = -EINVAL;
333                         goto error_out;
334                 }
335                 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
336                 break;
337         case RTE_CRYPTO_CIPHER_AES_XTS:
338                 if ((cipher_xform->key.length/2) == ICP_QAT_HW_AES_192_KEY_SZ) {
339                         QAT_LOG(ERR, "AES-XTS-192 not supported");
340                         ret = -EINVAL;
341                         goto error_out;
342                 }
343                 if (qat_sym_validate_aes_key((cipher_xform->key.length/2),
344                                 &session->qat_cipher_alg) != 0) {
345                         QAT_LOG(ERR, "Invalid AES-XTS cipher key size");
346                         ret = -EINVAL;
347                         goto error_out;
348                 }
349                 session->qat_mode = ICP_QAT_HW_CIPHER_XTS_MODE;
350                 break;
351         case RTE_CRYPTO_CIPHER_3DES_ECB:
352         case RTE_CRYPTO_CIPHER_AES_ECB:
353         case RTE_CRYPTO_CIPHER_AES_F8:
354         case RTE_CRYPTO_CIPHER_ARC4:
355                 QAT_LOG(ERR, "Crypto QAT PMD: Unsupported Cipher alg %u",
356                                 cipher_xform->algo);
357                 ret = -ENOTSUP;
358                 goto error_out;
359         default:
360                 QAT_LOG(ERR, "Crypto: Undefined Cipher specified %u\n",
361                                 cipher_xform->algo);
362                 ret = -EINVAL;
363                 goto error_out;
364         }
365
366         if (cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
367                 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
368         else
369                 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
370
371         if (qat_sym_session_aead_create_cd_cipher(session,
372                                                 cipher_xform->key.data,
373                                                 cipher_xform->key.length)) {
374                 ret = -EINVAL;
375                 goto error_out;
376         }
377
378         return 0;
379
380 error_out:
381         if (session->bpi_ctx) {
382                 bpi_cipher_ctx_free(session->bpi_ctx);
383                 session->bpi_ctx = NULL;
384         }
385         return ret;
386 }
387
388 int
389 qat_sym_session_configure(struct rte_cryptodev *dev,
390                 struct rte_crypto_sym_xform *xform,
391                 struct rte_cryptodev_sym_session *sess,
392                 struct rte_mempool *mempool)
393 {
394         void *sess_private_data;
395         int ret;
396
397         if (rte_mempool_get(mempool, &sess_private_data)) {
398                 CDEV_LOG_ERR(
399                         "Couldn't get object from session mempool");
400                 return -ENOMEM;
401         }
402
403         ret = qat_sym_session_set_parameters(dev, xform, sess_private_data);
404         if (ret != 0) {
405                 QAT_LOG(ERR,
406                     "Crypto QAT PMD: failed to configure session parameters");
407
408                 /* Return session to mempool */
409                 rte_mempool_put(mempool, sess_private_data);
410                 return ret;
411         }
412
413         set_sym_session_private_data(sess, dev->driver_id,
414                 sess_private_data);
415
416         return 0;
417 }
418
419 static void
420 qat_sym_session_set_ext_hash_flags(struct qat_sym_session *session,
421                 uint8_t hash_flag)
422 {
423         struct icp_qat_fw_comn_req_hdr *header = &session->fw_req.comn_hdr;
424         struct icp_qat_fw_cipher_auth_cd_ctrl_hdr *cd_ctrl =
425                         (struct icp_qat_fw_cipher_auth_cd_ctrl_hdr *)
426                         session->fw_req.cd_ctrl.content_desc_ctrl_lw;
427
428         /* Set the Use Extended Protocol Flags bit in LW 1 */
429         QAT_FIELD_SET(header->comn_req_flags,
430                         QAT_COMN_EXT_FLAGS_USED,
431                         QAT_COMN_EXT_FLAGS_BITPOS,
432                         QAT_COMN_EXT_FLAGS_MASK);
433
434         /* Set Hash Flags in LW 28 */
435         cd_ctrl->hash_flags |= hash_flag;
436
437         /* Set proto flags in LW 1 */
438         switch (session->qat_cipher_alg) {
439         case ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2:
440                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
441                                 ICP_QAT_FW_LA_SNOW_3G_PROTO);
442                 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
443                                 header->serv_specif_flags, 0);
444                 break;
445         case ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3:
446                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
447                                 ICP_QAT_FW_LA_NO_PROTO);
448                 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
449                                 header->serv_specif_flags,
450                                 ICP_QAT_FW_LA_ZUC_3G_PROTO);
451                 break;
452         default:
453                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
454                                 ICP_QAT_FW_LA_NO_PROTO);
455                 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
456                                 header->serv_specif_flags, 0);
457                 break;
458         }
459 }
460
461 static void
462 qat_sym_session_handle_mixed(struct qat_sym_session *session)
463 {
464         if (session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3 &&
465                         session->qat_cipher_alg !=
466                         ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
467                 session->min_qat_dev_gen = QAT_GEN3;
468                 qat_sym_session_set_ext_hash_flags(session,
469                         1 << ICP_QAT_FW_AUTH_HDR_FLAG_ZUC_EIA3_BITPOS);
470         } else if (session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2 &&
471                         session->qat_cipher_alg !=
472                         ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
473                 session->min_qat_dev_gen = QAT_GEN3;
474                 qat_sym_session_set_ext_hash_flags(session,
475                         1 << ICP_QAT_FW_AUTH_HDR_FLAG_SNOW3G_UIA2_BITPOS);
476         } else if ((session->aes_cmac ||
477                         session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_NULL) &&
478                         (session->qat_cipher_alg ==
479                         ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2 ||
480                         session->qat_cipher_alg ==
481                         ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)) {
482                 session->min_qat_dev_gen = QAT_GEN3;
483                 qat_sym_session_set_ext_hash_flags(session, 0);
484         }
485 }
486
487 int
488 qat_sym_session_set_parameters(struct rte_cryptodev *dev,
489                 struct rte_crypto_sym_xform *xform, void *session_private)
490 {
491         struct qat_sym_session *session = session_private;
492         int ret;
493         int qat_cmd_id;
494
495         /* Set context descriptor physical address */
496         session->cd_paddr = rte_mempool_virt2iova(session) +
497                         offsetof(struct qat_sym_session, cd);
498
499         session->min_qat_dev_gen = QAT_GEN1;
500
501         /* Get requested QAT command id */
502         qat_cmd_id = qat_get_cmd_id(xform);
503         if (qat_cmd_id < 0 || qat_cmd_id >= ICP_QAT_FW_LA_CMD_DELIMITER) {
504                 QAT_LOG(ERR, "Unsupported xform chain requested");
505                 return -ENOTSUP;
506         }
507         session->qat_cmd = (enum icp_qat_fw_la_cmd_id)qat_cmd_id;
508         switch (session->qat_cmd) {
509         case ICP_QAT_FW_LA_CMD_CIPHER:
510                 ret = qat_sym_session_configure_cipher(dev, xform, session);
511                 if (ret < 0)
512                         return ret;
513                 break;
514         case ICP_QAT_FW_LA_CMD_AUTH:
515                 ret = qat_sym_session_configure_auth(dev, xform, session);
516                 if (ret < 0)
517                         return ret;
518                 break;
519         case ICP_QAT_FW_LA_CMD_CIPHER_HASH:
520                 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
521                         ret = qat_sym_session_configure_aead(dev, xform,
522                                         session);
523                         if (ret < 0)
524                                 return ret;
525                 } else {
526                         ret = qat_sym_session_configure_cipher(dev,
527                                         xform, session);
528                         if (ret < 0)
529                                 return ret;
530                         ret = qat_sym_session_configure_auth(dev,
531                                         xform, session);
532                         if (ret < 0)
533                                 return ret;
534                         /* Special handling of mixed hash+cipher algorithms */
535                         qat_sym_session_handle_mixed(session);
536                 }
537                 break;
538         case ICP_QAT_FW_LA_CMD_HASH_CIPHER:
539                 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
540                         ret = qat_sym_session_configure_aead(dev, xform,
541                                         session);
542                         if (ret < 0)
543                                 return ret;
544                 } else {
545                         ret = qat_sym_session_configure_auth(dev,
546                                         xform, session);
547                         if (ret < 0)
548                                 return ret;
549                         ret = qat_sym_session_configure_cipher(dev,
550                                         xform, session);
551                         if (ret < 0)
552                                 return ret;
553                         /* Special handling of mixed hash+cipher algorithms */
554                         qat_sym_session_handle_mixed(session);
555                 }
556                 break;
557         case ICP_QAT_FW_LA_CMD_TRNG_GET_RANDOM:
558         case ICP_QAT_FW_LA_CMD_TRNG_TEST:
559         case ICP_QAT_FW_LA_CMD_SSL3_KEY_DERIVE:
560         case ICP_QAT_FW_LA_CMD_TLS_V1_1_KEY_DERIVE:
561         case ICP_QAT_FW_LA_CMD_TLS_V1_2_KEY_DERIVE:
562         case ICP_QAT_FW_LA_CMD_MGF1:
563         case ICP_QAT_FW_LA_CMD_AUTH_PRE_COMP:
564         case ICP_QAT_FW_LA_CMD_CIPHER_PRE_COMP:
565         case ICP_QAT_FW_LA_CMD_DELIMITER:
566         QAT_LOG(ERR, "Unsupported Service %u",
567                 session->qat_cmd);
568                 return -ENOTSUP;
569         default:
570         QAT_LOG(ERR, "Unsupported Service %u",
571                 session->qat_cmd);
572                 return -ENOTSUP;
573         }
574
575         return 0;
576 }
577
578 static int
579 qat_sym_session_handle_single_pass(struct qat_sym_session *session,
580                 struct rte_crypto_aead_xform *aead_xform)
581 {
582         struct icp_qat_fw_la_cipher_req_params *cipher_param =
583                         (void *) &session->fw_req.serv_specif_rqpars;
584
585         session->is_single_pass = 1;
586         session->min_qat_dev_gen = QAT_GEN3;
587         session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER;
588         if (aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) {
589                 session->qat_mode = ICP_QAT_HW_CIPHER_AEAD_MODE;
590                 ICP_QAT_FW_LA_GCM_IV_LEN_FLAG_SET(
591                         session->fw_req.comn_hdr.serv_specif_flags,
592                         ICP_QAT_FW_LA_GCM_IV_LEN_12_OCTETS);
593         } else {
594                 /* Chacha-Poly is special case that use QAT CTR mode */
595                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
596         }
597         session->cipher_iv.offset = aead_xform->iv.offset;
598         session->cipher_iv.length = aead_xform->iv.length;
599         if (qat_sym_session_aead_create_cd_cipher(session,
600                         aead_xform->key.data, aead_xform->key.length))
601                 return -EINVAL;
602         session->aad_len = aead_xform->aad_length;
603         session->digest_length = aead_xform->digest_length;
604         if (aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) {
605                 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
606                 session->auth_op = ICP_QAT_HW_AUTH_GENERATE;
607                 ICP_QAT_FW_LA_RET_AUTH_SET(
608                         session->fw_req.comn_hdr.serv_specif_flags,
609                         ICP_QAT_FW_LA_RET_AUTH_RES);
610         } else {
611                 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
612                 session->auth_op = ICP_QAT_HW_AUTH_VERIFY;
613                 ICP_QAT_FW_LA_CMP_AUTH_SET(
614                         session->fw_req.comn_hdr.serv_specif_flags,
615                         ICP_QAT_FW_LA_CMP_AUTH_RES);
616         }
617         ICP_QAT_FW_LA_SINGLE_PASS_PROTO_FLAG_SET(
618                         session->fw_req.comn_hdr.serv_specif_flags,
619                         ICP_QAT_FW_LA_SINGLE_PASS_PROTO);
620         ICP_QAT_FW_LA_PROTO_SET(
621                         session->fw_req.comn_hdr.serv_specif_flags,
622                         ICP_QAT_FW_LA_NO_PROTO);
623         session->fw_req.comn_hdr.service_cmd_id =
624                         ICP_QAT_FW_LA_CMD_CIPHER;
625         session->cd.cipher.cipher_config.val =
626                         ICP_QAT_HW_CIPHER_CONFIG_BUILD(
627                                 ICP_QAT_HW_CIPHER_AEAD_MODE,
628                                 session->qat_cipher_alg,
629                                 ICP_QAT_HW_CIPHER_NO_CONVERT,
630                                 session->qat_dir);
631         QAT_FIELD_SET(session->cd.cipher.cipher_config.val,
632                         aead_xform->digest_length,
633                         QAT_CIPHER_AEAD_HASH_CMP_LEN_BITPOS,
634                         QAT_CIPHER_AEAD_HASH_CMP_LEN_MASK);
635         session->cd.cipher.cipher_config.reserved =
636                         ICP_QAT_HW_CIPHER_CONFIG_BUILD_UPPER(
637                                 aead_xform->aad_length);
638         cipher_param->spc_aad_sz = aead_xform->aad_length;
639         cipher_param->spc_auth_res_sz = aead_xform->digest_length;
640
641         return 0;
642 }
643
644 int
645 qat_sym_session_configure_auth(struct rte_cryptodev *dev,
646                                 struct rte_crypto_sym_xform *xform,
647                                 struct qat_sym_session *session)
648 {
649         struct rte_crypto_auth_xform *auth_xform = qat_get_auth_xform(xform);
650         struct qat_sym_dev_private *internals = dev->data->dev_private;
651         const uint8_t *key_data = auth_xform->key.data;
652         uint8_t key_length = auth_xform->key.length;
653         session->aes_cmac = 0;
654
655         switch (auth_xform->algo) {
656         case RTE_CRYPTO_AUTH_SHA1_HMAC:
657                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA1;
658                 break;
659         case RTE_CRYPTO_AUTH_SHA224_HMAC:
660                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA224;
661                 break;
662         case RTE_CRYPTO_AUTH_SHA256_HMAC:
663                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA256;
664                 break;
665         case RTE_CRYPTO_AUTH_SHA384_HMAC:
666                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA384;
667                 break;
668         case RTE_CRYPTO_AUTH_SHA512_HMAC:
669                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA512;
670                 break;
671         case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
672                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
673                 break;
674         case RTE_CRYPTO_AUTH_AES_CMAC:
675                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
676                 session->aes_cmac = 1;
677                 break;
678         case RTE_CRYPTO_AUTH_AES_GMAC:
679                 if (qat_sym_validate_aes_key(auth_xform->key.length,
680                                 &session->qat_cipher_alg) != 0) {
681                         QAT_LOG(ERR, "Invalid AES key size");
682                         return -EINVAL;
683                 }
684                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
685                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
686
687                 break;
688         case RTE_CRYPTO_AUTH_SNOW3G_UIA2:
689                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2;
690                 break;
691         case RTE_CRYPTO_AUTH_MD5_HMAC:
692                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_MD5;
693                 break;
694         case RTE_CRYPTO_AUTH_NULL:
695                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_NULL;
696                 break;
697         case RTE_CRYPTO_AUTH_KASUMI_F9:
698                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_KASUMI_F9;
699                 break;
700         case RTE_CRYPTO_AUTH_ZUC_EIA3:
701                 if (!qat_is_auth_alg_supported(auth_xform->algo, internals)) {
702                         QAT_LOG(ERR, "%s not supported on this device",
703                                 rte_crypto_auth_algorithm_strings
704                                 [auth_xform->algo]);
705                         return -ENOTSUP;
706                 }
707                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3;
708                 break;
709         case RTE_CRYPTO_AUTH_SHA1:
710         case RTE_CRYPTO_AUTH_SHA256:
711         case RTE_CRYPTO_AUTH_SHA512:
712         case RTE_CRYPTO_AUTH_SHA224:
713         case RTE_CRYPTO_AUTH_SHA384:
714         case RTE_CRYPTO_AUTH_MD5:
715         case RTE_CRYPTO_AUTH_AES_CBC_MAC:
716                 QAT_LOG(ERR, "Crypto: Unsupported hash alg %u",
717                                 auth_xform->algo);
718                 return -ENOTSUP;
719         default:
720                 QAT_LOG(ERR, "Crypto: Undefined Hash algo %u specified",
721                                 auth_xform->algo);
722                 return -EINVAL;
723         }
724
725         session->auth_iv.offset = auth_xform->iv.offset;
726         session->auth_iv.length = auth_xform->iv.length;
727
728         if (auth_xform->algo == RTE_CRYPTO_AUTH_AES_GMAC) {
729                 if (auth_xform->op == RTE_CRYPTO_AUTH_OP_GENERATE) {
730                         session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER_HASH;
731                         session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
732                         /*
733                          * It needs to create cipher desc content first,
734                          * then authentication
735                          */
736
737                         if (qat_sym_session_aead_create_cd_cipher(session,
738                                                 auth_xform->key.data,
739                                                 auth_xform->key.length))
740                                 return -EINVAL;
741
742                         if (qat_sym_session_aead_create_cd_auth(session,
743                                                 key_data,
744                                                 key_length,
745                                                 0,
746                                                 auth_xform->digest_length,
747                                                 auth_xform->op))
748                                 return -EINVAL;
749                 } else {
750                         session->qat_cmd = ICP_QAT_FW_LA_CMD_HASH_CIPHER;
751                         session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
752                         /*
753                          * It needs to create authentication desc content first,
754                          * then cipher
755                          */
756
757                         if (qat_sym_session_aead_create_cd_auth(session,
758                                         key_data,
759                                         key_length,
760                                         0,
761                                         auth_xform->digest_length,
762                                         auth_xform->op))
763                                 return -EINVAL;
764
765                         if (qat_sym_session_aead_create_cd_cipher(session,
766                                                 auth_xform->key.data,
767                                                 auth_xform->key.length))
768                                 return -EINVAL;
769                 }
770                 /* Restore to authentication only only */
771                 session->qat_cmd = ICP_QAT_FW_LA_CMD_AUTH;
772         } else {
773                 if (qat_sym_session_aead_create_cd_auth(session,
774                                 key_data,
775                                 key_length,
776                                 0,
777                                 auth_xform->digest_length,
778                                 auth_xform->op))
779                         return -EINVAL;
780         }
781
782         session->digest_length = auth_xform->digest_length;
783         return 0;
784 }
785
786 int
787 qat_sym_session_configure_aead(struct rte_cryptodev *dev,
788                                 struct rte_crypto_sym_xform *xform,
789                                 struct qat_sym_session *session)
790 {
791         struct rte_crypto_aead_xform *aead_xform = &xform->aead;
792         enum rte_crypto_auth_operation crypto_operation;
793         struct qat_sym_dev_private *internals =
794                         dev->data->dev_private;
795         enum qat_device_gen qat_dev_gen =
796                         internals->qat_dev->qat_dev_gen;
797
798         /*
799          * Store AEAD IV parameters as cipher IV,
800          * to avoid unnecessary memory usage
801          */
802         session->cipher_iv.offset = xform->aead.iv.offset;
803         session->cipher_iv.length = xform->aead.iv.length;
804
805         session->is_single_pass = 0;
806         switch (aead_xform->algo) {
807         case RTE_CRYPTO_AEAD_AES_GCM:
808                 if (qat_sym_validate_aes_key(aead_xform->key.length,
809                                 &session->qat_cipher_alg) != 0) {
810                         QAT_LOG(ERR, "Invalid AES key size");
811                         return -EINVAL;
812                 }
813                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
814                 session->qat_hash_alg =
815                                 ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
816                 if (qat_dev_gen > QAT_GEN2 && aead_xform->iv.length ==
817                                 QAT_AES_GCM_SPC_IV_SIZE) {
818                         return qat_sym_session_handle_single_pass(session,
819                                                 aead_xform);
820                 }
821                 break;
822         case RTE_CRYPTO_AEAD_AES_CCM:
823                 if (qat_sym_validate_aes_key(aead_xform->key.length,
824                                 &session->qat_cipher_alg) != 0) {
825                         QAT_LOG(ERR, "Invalid AES key size");
826                         return -EINVAL;
827                 }
828                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
829                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC;
830                 break;
831         case RTE_CRYPTO_AEAD_CHACHA20_POLY1305:
832                 if (aead_xform->key.length != ICP_QAT_HW_CHACHAPOLY_KEY_SZ)
833                         return -EINVAL;
834                 session->qat_cipher_alg =
835                                 ICP_QAT_HW_CIPHER_ALGO_CHACHA20_POLY1305;
836                 return qat_sym_session_handle_single_pass(session,
837                                                 aead_xform);
838         default:
839                 QAT_LOG(ERR, "Crypto: Undefined AEAD specified %u\n",
840                                 aead_xform->algo);
841                 return -EINVAL;
842         }
843
844         if ((aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT &&
845                         aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) ||
846                         (aead_xform->op == RTE_CRYPTO_AEAD_OP_DECRYPT &&
847                         aead_xform->algo == RTE_CRYPTO_AEAD_AES_CCM)) {
848                 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
849                 /*
850                  * It needs to create cipher desc content first,
851                  * then authentication
852                  */
853                 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
854                         RTE_CRYPTO_AUTH_OP_GENERATE : RTE_CRYPTO_AUTH_OP_VERIFY;
855
856                 if (qat_sym_session_aead_create_cd_cipher(session,
857                                         aead_xform->key.data,
858                                         aead_xform->key.length))
859                         return -EINVAL;
860
861                 if (qat_sym_session_aead_create_cd_auth(session,
862                                         aead_xform->key.data,
863                                         aead_xform->key.length,
864                                         aead_xform->aad_length,
865                                         aead_xform->digest_length,
866                                         crypto_operation))
867                         return -EINVAL;
868         } else {
869                 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
870                 /*
871                  * It needs to create authentication desc content first,
872                  * then cipher
873                  */
874
875                 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
876                         RTE_CRYPTO_AUTH_OP_VERIFY : RTE_CRYPTO_AUTH_OP_GENERATE;
877
878                 if (qat_sym_session_aead_create_cd_auth(session,
879                                         aead_xform->key.data,
880                                         aead_xform->key.length,
881                                         aead_xform->aad_length,
882                                         aead_xform->digest_length,
883                                         crypto_operation))
884                         return -EINVAL;
885
886                 if (qat_sym_session_aead_create_cd_cipher(session,
887                                         aead_xform->key.data,
888                                         aead_xform->key.length))
889                         return -EINVAL;
890         }
891
892         session->digest_length = aead_xform->digest_length;
893         return 0;
894 }
895
896 unsigned int qat_sym_session_get_private_size(
897                 struct rte_cryptodev *dev __rte_unused)
898 {
899         return RTE_ALIGN_CEIL(sizeof(struct qat_sym_session), 8);
900 }
901
902 /* returns block size in bytes per cipher algo */
903 int qat_cipher_get_block_size(enum icp_qat_hw_cipher_algo qat_cipher_alg)
904 {
905         switch (qat_cipher_alg) {
906         case ICP_QAT_HW_CIPHER_ALGO_DES:
907                 return ICP_QAT_HW_DES_BLK_SZ;
908         case ICP_QAT_HW_CIPHER_ALGO_3DES:
909                 return ICP_QAT_HW_3DES_BLK_SZ;
910         case ICP_QAT_HW_CIPHER_ALGO_AES128:
911         case ICP_QAT_HW_CIPHER_ALGO_AES192:
912         case ICP_QAT_HW_CIPHER_ALGO_AES256:
913                 return ICP_QAT_HW_AES_BLK_SZ;
914         default:
915                 QAT_LOG(ERR, "invalid block cipher alg %u", qat_cipher_alg);
916                 return -EFAULT;
917         };
918         return -EFAULT;
919 }
920
921 /*
922  * Returns size in bytes per hash algo for state1 size field in cd_ctrl
923  * This is digest size rounded up to nearest quadword
924  */
925 static int qat_hash_get_state1_size(enum icp_qat_hw_auth_algo qat_hash_alg)
926 {
927         switch (qat_hash_alg) {
928         case ICP_QAT_HW_AUTH_ALGO_SHA1:
929                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA1_STATE1_SZ,
930                                                 QAT_HW_DEFAULT_ALIGNMENT);
931         case ICP_QAT_HW_AUTH_ALGO_SHA224:
932                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA224_STATE1_SZ,
933                                                 QAT_HW_DEFAULT_ALIGNMENT);
934         case ICP_QAT_HW_AUTH_ALGO_SHA256:
935                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA256_STATE1_SZ,
936                                                 QAT_HW_DEFAULT_ALIGNMENT);
937         case ICP_QAT_HW_AUTH_ALGO_SHA384:
938                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA384_STATE1_SZ,
939                                                 QAT_HW_DEFAULT_ALIGNMENT);
940         case ICP_QAT_HW_AUTH_ALGO_SHA512:
941                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
942                                                 QAT_HW_DEFAULT_ALIGNMENT);
943         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
944                 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ,
945                                                 QAT_HW_DEFAULT_ALIGNMENT);
946         case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
947         case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
948                 return QAT_HW_ROUND_UP(ICP_QAT_HW_GALOIS_128_STATE1_SZ,
949                                                 QAT_HW_DEFAULT_ALIGNMENT);
950         case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
951                 return QAT_HW_ROUND_UP(ICP_QAT_HW_ZUC_3G_EIA3_STATE1_SZ,
952                                                 QAT_HW_DEFAULT_ALIGNMENT);
953         case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
954                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SNOW_3G_UIA2_STATE1_SZ,
955                                                 QAT_HW_DEFAULT_ALIGNMENT);
956         case ICP_QAT_HW_AUTH_ALGO_MD5:
957                 return QAT_HW_ROUND_UP(ICP_QAT_HW_MD5_STATE1_SZ,
958                                                 QAT_HW_DEFAULT_ALIGNMENT);
959         case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
960                 return QAT_HW_ROUND_UP(ICP_QAT_HW_KASUMI_F9_STATE1_SZ,
961                                                 QAT_HW_DEFAULT_ALIGNMENT);
962         case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
963                 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_CBC_MAC_STATE1_SZ,
964                                                 QAT_HW_DEFAULT_ALIGNMENT);
965         case ICP_QAT_HW_AUTH_ALGO_NULL:
966                 return QAT_HW_ROUND_UP(ICP_QAT_HW_NULL_STATE1_SZ,
967                                                 QAT_HW_DEFAULT_ALIGNMENT);
968         case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
969                 /* return maximum state1 size in this case */
970                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
971                                                 QAT_HW_DEFAULT_ALIGNMENT);
972         default:
973                 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
974                 return -EFAULT;
975         };
976         return -EFAULT;
977 }
978
979 /* returns digest size in bytes  per hash algo */
980 static int qat_hash_get_digest_size(enum icp_qat_hw_auth_algo qat_hash_alg)
981 {
982         switch (qat_hash_alg) {
983         case ICP_QAT_HW_AUTH_ALGO_SHA1:
984                 return ICP_QAT_HW_SHA1_STATE1_SZ;
985         case ICP_QAT_HW_AUTH_ALGO_SHA224:
986                 return ICP_QAT_HW_SHA224_STATE1_SZ;
987         case ICP_QAT_HW_AUTH_ALGO_SHA256:
988                 return ICP_QAT_HW_SHA256_STATE1_SZ;
989         case ICP_QAT_HW_AUTH_ALGO_SHA384:
990                 return ICP_QAT_HW_SHA384_STATE1_SZ;
991         case ICP_QAT_HW_AUTH_ALGO_SHA512:
992                 return ICP_QAT_HW_SHA512_STATE1_SZ;
993         case ICP_QAT_HW_AUTH_ALGO_MD5:
994                 return ICP_QAT_HW_MD5_STATE1_SZ;
995         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
996                 return ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
997         case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
998                 /* return maximum digest size in this case */
999                 return ICP_QAT_HW_SHA512_STATE1_SZ;
1000         default:
1001                 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
1002                 return -EFAULT;
1003         };
1004         return -EFAULT;
1005 }
1006
1007 /* returns block size in byes per hash algo */
1008 static int qat_hash_get_block_size(enum icp_qat_hw_auth_algo qat_hash_alg)
1009 {
1010         switch (qat_hash_alg) {
1011         case ICP_QAT_HW_AUTH_ALGO_SHA1:
1012                 return SHA_CBLOCK;
1013         case ICP_QAT_HW_AUTH_ALGO_SHA224:
1014                 return SHA256_CBLOCK;
1015         case ICP_QAT_HW_AUTH_ALGO_SHA256:
1016                 return SHA256_CBLOCK;
1017         case ICP_QAT_HW_AUTH_ALGO_SHA384:
1018                 return SHA512_CBLOCK;
1019         case ICP_QAT_HW_AUTH_ALGO_SHA512:
1020                 return SHA512_CBLOCK;
1021         case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1022                 return 16;
1023         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1024                 return ICP_QAT_HW_AES_BLK_SZ;
1025         case ICP_QAT_HW_AUTH_ALGO_MD5:
1026                 return MD5_CBLOCK;
1027         case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
1028                 /* return maximum block size in this case */
1029                 return SHA512_CBLOCK;
1030         default:
1031                 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
1032                 return -EFAULT;
1033         };
1034         return -EFAULT;
1035 }
1036
1037 static int partial_hash_sha1(uint8_t *data_in, uint8_t *data_out)
1038 {
1039         SHA_CTX ctx;
1040
1041         if (!SHA1_Init(&ctx))
1042                 return -EFAULT;
1043         SHA1_Transform(&ctx, data_in);
1044         rte_memcpy(data_out, &ctx, SHA_DIGEST_LENGTH);
1045         return 0;
1046 }
1047
1048 static int partial_hash_sha224(uint8_t *data_in, uint8_t *data_out)
1049 {
1050         SHA256_CTX ctx;
1051
1052         if (!SHA224_Init(&ctx))
1053                 return -EFAULT;
1054         SHA256_Transform(&ctx, data_in);
1055         rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
1056         return 0;
1057 }
1058
1059 static int partial_hash_sha256(uint8_t *data_in, uint8_t *data_out)
1060 {
1061         SHA256_CTX ctx;
1062
1063         if (!SHA256_Init(&ctx))
1064                 return -EFAULT;
1065         SHA256_Transform(&ctx, data_in);
1066         rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
1067         return 0;
1068 }
1069
1070 static int partial_hash_sha384(uint8_t *data_in, uint8_t *data_out)
1071 {
1072         SHA512_CTX ctx;
1073
1074         if (!SHA384_Init(&ctx))
1075                 return -EFAULT;
1076         SHA512_Transform(&ctx, data_in);
1077         rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
1078         return 0;
1079 }
1080
1081 static int partial_hash_sha512(uint8_t *data_in, uint8_t *data_out)
1082 {
1083         SHA512_CTX ctx;
1084
1085         if (!SHA512_Init(&ctx))
1086                 return -EFAULT;
1087         SHA512_Transform(&ctx, data_in);
1088         rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
1089         return 0;
1090 }
1091
1092 static int partial_hash_md5(uint8_t *data_in, uint8_t *data_out)
1093 {
1094         MD5_CTX ctx;
1095
1096         if (!MD5_Init(&ctx))
1097                 return -EFAULT;
1098         MD5_Transform(&ctx, data_in);
1099         rte_memcpy(data_out, &ctx, MD5_DIGEST_LENGTH);
1100
1101         return 0;
1102 }
1103
1104 static int partial_hash_compute(enum icp_qat_hw_auth_algo hash_alg,
1105                         uint8_t *data_in,
1106                         uint8_t *data_out)
1107 {
1108         int digest_size;
1109         uint8_t digest[qat_hash_get_digest_size(
1110                         ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1111         uint32_t *hash_state_out_be32;
1112         uint64_t *hash_state_out_be64;
1113         int i;
1114
1115         digest_size = qat_hash_get_digest_size(hash_alg);
1116         if (digest_size <= 0)
1117                 return -EFAULT;
1118
1119         hash_state_out_be32 = (uint32_t *)data_out;
1120         hash_state_out_be64 = (uint64_t *)data_out;
1121
1122         switch (hash_alg) {
1123         case ICP_QAT_HW_AUTH_ALGO_SHA1:
1124                 if (partial_hash_sha1(data_in, digest))
1125                         return -EFAULT;
1126                 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1127                         *hash_state_out_be32 =
1128                                 rte_bswap32(*(((uint32_t *)digest)+i));
1129                 break;
1130         case ICP_QAT_HW_AUTH_ALGO_SHA224:
1131                 if (partial_hash_sha224(data_in, digest))
1132                         return -EFAULT;
1133                 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1134                         *hash_state_out_be32 =
1135                                 rte_bswap32(*(((uint32_t *)digest)+i));
1136                 break;
1137         case ICP_QAT_HW_AUTH_ALGO_SHA256:
1138                 if (partial_hash_sha256(data_in, digest))
1139                         return -EFAULT;
1140                 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1141                         *hash_state_out_be32 =
1142                                 rte_bswap32(*(((uint32_t *)digest)+i));
1143                 break;
1144         case ICP_QAT_HW_AUTH_ALGO_SHA384:
1145                 if (partial_hash_sha384(data_in, digest))
1146                         return -EFAULT;
1147                 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
1148                         *hash_state_out_be64 =
1149                                 rte_bswap64(*(((uint64_t *)digest)+i));
1150                 break;
1151         case ICP_QAT_HW_AUTH_ALGO_SHA512:
1152                 if (partial_hash_sha512(data_in, digest))
1153                         return -EFAULT;
1154                 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
1155                         *hash_state_out_be64 =
1156                                 rte_bswap64(*(((uint64_t *)digest)+i));
1157                 break;
1158         case ICP_QAT_HW_AUTH_ALGO_MD5:
1159                 if (partial_hash_md5(data_in, data_out))
1160                         return -EFAULT;
1161                 break;
1162         default:
1163                 QAT_LOG(ERR, "invalid hash alg %u", hash_alg);
1164                 return -EFAULT;
1165         }
1166
1167         return 0;
1168 }
1169 #define HMAC_IPAD_VALUE 0x36
1170 #define HMAC_OPAD_VALUE 0x5c
1171 #define HASH_XCBC_PRECOMP_KEY_NUM 3
1172
1173 static const uint8_t AES_CMAC_SEED[ICP_QAT_HW_AES_128_KEY_SZ];
1174
1175 static void aes_cmac_key_derive(uint8_t *base, uint8_t *derived)
1176 {
1177         int i;
1178
1179         derived[0] = base[0] << 1;
1180         for (i = 1; i < ICP_QAT_HW_AES_BLK_SZ ; i++) {
1181                 derived[i] = base[i] << 1;
1182                 derived[i - 1] |= base[i] >> 7;
1183         }
1184
1185         if (base[0] & 0x80)
1186                 derived[ICP_QAT_HW_AES_BLK_SZ - 1] ^= QAT_AES_CMAC_CONST_RB;
1187 }
1188
1189 static int qat_sym_do_precomputes(enum icp_qat_hw_auth_algo hash_alg,
1190                                 const uint8_t *auth_key,
1191                                 uint16_t auth_keylen,
1192                                 uint8_t *p_state_buf,
1193                                 uint16_t *p_state_len,
1194                                 uint8_t aes_cmac)
1195 {
1196         int block_size;
1197         uint8_t ipad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1198         uint8_t opad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1199         int i;
1200
1201         if (hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC) {
1202
1203                 /* CMAC */
1204                 if (aes_cmac) {
1205                         AES_KEY enc_key;
1206                         uint8_t *in = NULL;
1207                         uint8_t k0[ICP_QAT_HW_AES_128_KEY_SZ];
1208                         uint8_t *k1, *k2;
1209
1210                         auth_keylen = ICP_QAT_HW_AES_128_KEY_SZ;
1211
1212                         in = rte_zmalloc("AES CMAC K1",
1213                                          ICP_QAT_HW_AES_128_KEY_SZ, 16);
1214
1215                         if (in == NULL) {
1216                                 QAT_LOG(ERR, "Failed to alloc memory");
1217                                 return -ENOMEM;
1218                         }
1219
1220                         rte_memcpy(in, AES_CMAC_SEED,
1221                                    ICP_QAT_HW_AES_128_KEY_SZ);
1222                         rte_memcpy(p_state_buf, auth_key, auth_keylen);
1223
1224                         if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1225                                 &enc_key) != 0) {
1226                                 rte_free(in);
1227                                 return -EFAULT;
1228                         }
1229
1230                         AES_encrypt(in, k0, &enc_key);
1231
1232                         k1 = p_state_buf + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1233                         k2 = k1 + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1234
1235                         aes_cmac_key_derive(k0, k1);
1236                         aes_cmac_key_derive(k1, k2);
1237
1238                         memset(k0, 0, ICP_QAT_HW_AES_128_KEY_SZ);
1239                         *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1240                         rte_free(in);
1241                         return 0;
1242                 } else {
1243                         static uint8_t qat_aes_xcbc_key_seed[
1244                                         ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ] = {
1245                                 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1246                                 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1247                                 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1248                                 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1249                                 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1250                                 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1251                         };
1252
1253                         uint8_t *in = NULL;
1254                         uint8_t *out = p_state_buf;
1255                         int x;
1256                         AES_KEY enc_key;
1257
1258                         in = rte_zmalloc("working mem for key",
1259                                         ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ, 16);
1260                         if (in == NULL) {
1261                                 QAT_LOG(ERR, "Failed to alloc memory");
1262                                 return -ENOMEM;
1263                         }
1264
1265                         rte_memcpy(in, qat_aes_xcbc_key_seed,
1266                                         ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1267                         for (x = 0; x < HASH_XCBC_PRECOMP_KEY_NUM; x++) {
1268                                 if (AES_set_encrypt_key(auth_key,
1269                                                         auth_keylen << 3,
1270                                                         &enc_key) != 0) {
1271                                         rte_free(in -
1272                                           (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ));
1273                                         memset(out -
1274                                            (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ),
1275                                           0, ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1276                                         return -EFAULT;
1277                                 }
1278                                 AES_encrypt(in, out, &enc_key);
1279                                 in += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1280                                 out += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1281                         }
1282                         *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1283                         rte_free(in - x*ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ);
1284                         return 0;
1285                 }
1286
1287         } else if ((hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_128) ||
1288                 (hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_64)) {
1289                 uint8_t *in = NULL;
1290                 uint8_t *out = p_state_buf;
1291                 AES_KEY enc_key;
1292
1293                 memset(p_state_buf, 0, ICP_QAT_HW_GALOIS_H_SZ +
1294                                 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1295                                 ICP_QAT_HW_GALOIS_E_CTR0_SZ);
1296                 in = rte_zmalloc("working mem for key",
1297                                 ICP_QAT_HW_GALOIS_H_SZ, 16);
1298                 if (in == NULL) {
1299                         QAT_LOG(ERR, "Failed to alloc memory");
1300                         return -ENOMEM;
1301                 }
1302
1303                 memset(in, 0, ICP_QAT_HW_GALOIS_H_SZ);
1304                 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1305                         &enc_key) != 0) {
1306                         return -EFAULT;
1307                 }
1308                 AES_encrypt(in, out, &enc_key);
1309                 *p_state_len = ICP_QAT_HW_GALOIS_H_SZ +
1310                                 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1311                                 ICP_QAT_HW_GALOIS_E_CTR0_SZ;
1312                 rte_free(in);
1313                 return 0;
1314         }
1315
1316         block_size = qat_hash_get_block_size(hash_alg);
1317         if (block_size < 0)
1318                 return block_size;
1319         /* init ipad and opad from key and xor with fixed values */
1320         memset(ipad, 0, block_size);
1321         memset(opad, 0, block_size);
1322
1323         if (auth_keylen > (unsigned int)block_size) {
1324                 QAT_LOG(ERR, "invalid keylen %u", auth_keylen);
1325                 return -EFAULT;
1326         }
1327         rte_memcpy(ipad, auth_key, auth_keylen);
1328         rte_memcpy(opad, auth_key, auth_keylen);
1329
1330         for (i = 0; i < block_size; i++) {
1331                 uint8_t *ipad_ptr = ipad + i;
1332                 uint8_t *opad_ptr = opad + i;
1333                 *ipad_ptr ^= HMAC_IPAD_VALUE;
1334                 *opad_ptr ^= HMAC_OPAD_VALUE;
1335         }
1336
1337         /* do partial hash of ipad and copy to state1 */
1338         if (partial_hash_compute(hash_alg, ipad, p_state_buf)) {
1339                 memset(ipad, 0, block_size);
1340                 memset(opad, 0, block_size);
1341                 QAT_LOG(ERR, "ipad precompute failed");
1342                 return -EFAULT;
1343         }
1344
1345         /*
1346          * State len is a multiple of 8, so may be larger than the digest.
1347          * Put the partial hash of opad state_len bytes after state1
1348          */
1349         *p_state_len = qat_hash_get_state1_size(hash_alg);
1350         if (partial_hash_compute(hash_alg, opad, p_state_buf + *p_state_len)) {
1351                 memset(ipad, 0, block_size);
1352                 memset(opad, 0, block_size);
1353                 QAT_LOG(ERR, "opad precompute failed");
1354                 return -EFAULT;
1355         }
1356
1357         /*  don't leave data lying around */
1358         memset(ipad, 0, block_size);
1359         memset(opad, 0, block_size);
1360         return 0;
1361 }
1362
1363 static void
1364 qat_sym_session_init_common_hdr(struct icp_qat_fw_comn_req_hdr *header,
1365                 enum qat_sym_proto_flag proto_flags)
1366 {
1367         header->hdr_flags =
1368                 ICP_QAT_FW_COMN_HDR_FLAGS_BUILD(ICP_QAT_FW_COMN_REQ_FLAG_SET);
1369         header->service_type = ICP_QAT_FW_COMN_REQ_CPM_FW_LA;
1370         header->comn_req_flags =
1371                 ICP_QAT_FW_COMN_FLAGS_BUILD(QAT_COMN_CD_FLD_TYPE_64BIT_ADR,
1372                                         QAT_COMN_PTR_TYPE_FLAT);
1373         ICP_QAT_FW_LA_PARTIAL_SET(header->serv_specif_flags,
1374                                   ICP_QAT_FW_LA_PARTIAL_NONE);
1375         ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(header->serv_specif_flags,
1376                                            ICP_QAT_FW_CIPH_IV_16BYTE_DATA);
1377
1378         switch (proto_flags)            {
1379         case QAT_CRYPTO_PROTO_FLAG_NONE:
1380                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1381                                         ICP_QAT_FW_LA_NO_PROTO);
1382                 break;
1383         case QAT_CRYPTO_PROTO_FLAG_CCM:
1384                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1385                                         ICP_QAT_FW_LA_CCM_PROTO);
1386                 break;
1387         case QAT_CRYPTO_PROTO_FLAG_GCM:
1388                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1389                                         ICP_QAT_FW_LA_GCM_PROTO);
1390                 break;
1391         case QAT_CRYPTO_PROTO_FLAG_SNOW3G:
1392                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1393                                         ICP_QAT_FW_LA_SNOW_3G_PROTO);
1394                 break;
1395         case QAT_CRYPTO_PROTO_FLAG_ZUC:
1396                 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(header->serv_specif_flags,
1397                         ICP_QAT_FW_LA_ZUC_3G_PROTO);
1398                 break;
1399         }
1400
1401         ICP_QAT_FW_LA_UPDATE_STATE_SET(header->serv_specif_flags,
1402                                            ICP_QAT_FW_LA_NO_UPDATE_STATE);
1403         ICP_QAT_FW_LA_DIGEST_IN_BUFFER_SET(header->serv_specif_flags,
1404                                         ICP_QAT_FW_LA_NO_DIGEST_IN_BUFFER);
1405 }
1406
1407 /*
1408  *      Snow3G and ZUC should never use this function
1409  *      and set its protocol flag in both cipher and auth part of content
1410  *      descriptor building function
1411  */
1412 static enum qat_sym_proto_flag
1413 qat_get_crypto_proto_flag(uint16_t flags)
1414 {
1415         int proto = ICP_QAT_FW_LA_PROTO_GET(flags);
1416         enum qat_sym_proto_flag qat_proto_flag =
1417                         QAT_CRYPTO_PROTO_FLAG_NONE;
1418
1419         switch (proto) {
1420         case ICP_QAT_FW_LA_GCM_PROTO:
1421                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1422                 break;
1423         case ICP_QAT_FW_LA_CCM_PROTO:
1424                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1425                 break;
1426         }
1427
1428         return qat_proto_flag;
1429 }
1430
1431 int qat_sym_session_aead_create_cd_cipher(struct qat_sym_session *cdesc,
1432                                                 const uint8_t *cipherkey,
1433                                                 uint32_t cipherkeylen)
1434 {
1435         struct icp_qat_hw_cipher_algo_blk *cipher;
1436         struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1437         struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1438         struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1439         void *ptr = &req_tmpl->cd_ctrl;
1440         struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1441         struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1442         enum icp_qat_hw_cipher_convert key_convert;
1443         enum qat_sym_proto_flag qat_proto_flag =
1444                 QAT_CRYPTO_PROTO_FLAG_NONE;
1445         uint32_t total_key_size;
1446         uint16_t cipher_offset, cd_size;
1447         uint32_t wordIndex  = 0;
1448         uint32_t *temp_key = NULL;
1449
1450         if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER) {
1451                 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1452                 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1453                                         ICP_QAT_FW_SLICE_CIPHER);
1454                 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1455                                         ICP_QAT_FW_SLICE_DRAM_WR);
1456                 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1457                                         ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1458                 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1459                                         ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1460                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1461         } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1462                 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1463                 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1464                                         ICP_QAT_FW_SLICE_CIPHER);
1465                 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1466                                         ICP_QAT_FW_SLICE_AUTH);
1467                 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1468                                         ICP_QAT_FW_SLICE_AUTH);
1469                 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1470                                         ICP_QAT_FW_SLICE_DRAM_WR);
1471                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1472         } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1473                 QAT_LOG(ERR, "Invalid param, must be a cipher command.");
1474                 return -EFAULT;
1475         }
1476
1477         if (cdesc->qat_mode == ICP_QAT_HW_CIPHER_CTR_MODE) {
1478                 /*
1479                  * CTR Streaming ciphers are a special case. Decrypt = encrypt
1480                  * Overriding default values previously set
1481                  */
1482                 cdesc->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
1483                 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1484         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2
1485                 || cdesc->qat_cipher_alg ==
1486                         ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)
1487                 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1488         else if (cdesc->qat_dir == ICP_QAT_HW_CIPHER_ENCRYPT)
1489                 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1490         else
1491                 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1492
1493         if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
1494                 total_key_size = ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ +
1495                         ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1496                 cipher_cd_ctrl->cipher_state_sz =
1497                         ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1498                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1499
1500         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1501                 total_key_size = ICP_QAT_HW_KASUMI_F8_KEY_SZ;
1502                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_KASUMI_BLK_SZ >> 3;
1503                 cipher_cd_ctrl->cipher_padding_sz =
1504                                         (2 * ICP_QAT_HW_KASUMI_BLK_SZ) >> 3;
1505         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES) {
1506                 total_key_size = ICP_QAT_HW_3DES_KEY_SZ;
1507                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_3DES_BLK_SZ >> 3;
1508                 qat_proto_flag =
1509                         qat_get_crypto_proto_flag(header->serv_specif_flags);
1510         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_DES) {
1511                 total_key_size = ICP_QAT_HW_DES_KEY_SZ;
1512                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_DES_BLK_SZ >> 3;
1513                 qat_proto_flag =
1514                         qat_get_crypto_proto_flag(header->serv_specif_flags);
1515         } else if (cdesc->qat_cipher_alg ==
1516                 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
1517                 total_key_size = ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ +
1518                         ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1519                 cipher_cd_ctrl->cipher_state_sz =
1520                         ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1521                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1522                 cdesc->min_qat_dev_gen = QAT_GEN2;
1523         } else {
1524                 total_key_size = cipherkeylen;
1525                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_AES_BLK_SZ >> 3;
1526                 qat_proto_flag =
1527                         qat_get_crypto_proto_flag(header->serv_specif_flags);
1528         }
1529         cipher_cd_ctrl->cipher_key_sz = total_key_size >> 3;
1530         cipher_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1531         cipher_cd_ctrl->cipher_cfg_offset = cipher_offset >> 3;
1532
1533         header->service_cmd_id = cdesc->qat_cmd;
1534         qat_sym_session_init_common_hdr(header, qat_proto_flag);
1535
1536         cipher = (struct icp_qat_hw_cipher_algo_blk *)cdesc->cd_cur_ptr;
1537         cipher->cipher_config.val =
1538             ICP_QAT_HW_CIPHER_CONFIG_BUILD(cdesc->qat_mode,
1539                                         cdesc->qat_cipher_alg, key_convert,
1540                                         cdesc->qat_dir);
1541
1542         if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1543                 temp_key = (uint32_t *)(cdesc->cd_cur_ptr +
1544                                         sizeof(struct icp_qat_hw_cipher_config)
1545                                         + cipherkeylen);
1546                 memcpy(cipher->key, cipherkey, cipherkeylen);
1547                 memcpy(temp_key, cipherkey, cipherkeylen);
1548
1549                 /* XOR Key with KASUMI F8 key modifier at 4 bytes level */
1550                 for (wordIndex = 0; wordIndex < (cipherkeylen >> 2);
1551                                                                 wordIndex++)
1552                         temp_key[wordIndex] ^= KASUMI_F8_KEY_MODIFIER_4_BYTES;
1553
1554                 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1555                                         cipherkeylen + cipherkeylen;
1556         } else {
1557                 memcpy(cipher->key, cipherkey, cipherkeylen);
1558                 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1559                                         cipherkeylen;
1560         }
1561
1562         if (total_key_size > cipherkeylen) {
1563                 uint32_t padding_size =  total_key_size-cipherkeylen;
1564                 if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1565                         && (cipherkeylen == QAT_3DES_KEY_SZ_OPT2)) {
1566                         /* K3 not provided so use K1 = K3*/
1567                         memcpy(cdesc->cd_cur_ptr, cipherkey, padding_size);
1568                 } else if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1569                         && (cipherkeylen == QAT_3DES_KEY_SZ_OPT3)) {
1570                         /* K2 and K3 not provided so use K1 = K2 = K3*/
1571                         memcpy(cdesc->cd_cur_ptr, cipherkey,
1572                                 cipherkeylen);
1573                         memcpy(cdesc->cd_cur_ptr+cipherkeylen,
1574                                 cipherkey, cipherkeylen);
1575                 } else
1576                         memset(cdesc->cd_cur_ptr, 0, padding_size);
1577
1578                 cdesc->cd_cur_ptr += padding_size;
1579         }
1580         cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1581         cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1582
1583         return 0;
1584 }
1585
1586 int qat_sym_session_aead_create_cd_auth(struct qat_sym_session *cdesc,
1587                                                 const uint8_t *authkey,
1588                                                 uint32_t authkeylen,
1589                                                 uint32_t aad_length,
1590                                                 uint32_t digestsize,
1591                                                 unsigned int operation)
1592 {
1593         struct icp_qat_hw_auth_setup *hash;
1594         struct icp_qat_hw_cipher_algo_blk *cipherconfig;
1595         struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1596         struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1597         struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1598         void *ptr = &req_tmpl->cd_ctrl;
1599         struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1600         struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1601         struct icp_qat_fw_la_auth_req_params *auth_param =
1602                 (struct icp_qat_fw_la_auth_req_params *)
1603                 ((char *)&req_tmpl->serv_specif_rqpars +
1604                 ICP_QAT_FW_HASH_REQUEST_PARAMETERS_OFFSET);
1605         uint16_t state1_size = 0, state2_size = 0;
1606         uint16_t hash_offset, cd_size;
1607         uint32_t *aad_len = NULL;
1608         uint32_t wordIndex  = 0;
1609         uint32_t *pTempKey;
1610         enum qat_sym_proto_flag qat_proto_flag =
1611                 QAT_CRYPTO_PROTO_FLAG_NONE;
1612
1613         if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_AUTH) {
1614                 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1615                                         ICP_QAT_FW_SLICE_AUTH);
1616                 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1617                                         ICP_QAT_FW_SLICE_DRAM_WR);
1618                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1619         } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1620                 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1621                                 ICP_QAT_FW_SLICE_AUTH);
1622                 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1623                                 ICP_QAT_FW_SLICE_CIPHER);
1624                 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1625                                 ICP_QAT_FW_SLICE_CIPHER);
1626                 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1627                                 ICP_QAT_FW_SLICE_DRAM_WR);
1628                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1629         } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1630                 QAT_LOG(ERR, "Invalid param, must be a hash command.");
1631                 return -EFAULT;
1632         }
1633
1634         if (operation == RTE_CRYPTO_AUTH_OP_VERIFY) {
1635                 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1636                                 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1637                 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1638                                 ICP_QAT_FW_LA_CMP_AUTH_RES);
1639                 cdesc->auth_op = ICP_QAT_HW_AUTH_VERIFY;
1640         } else {
1641                 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1642                                            ICP_QAT_FW_LA_RET_AUTH_RES);
1643                 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1644                                            ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1645                 cdesc->auth_op = ICP_QAT_HW_AUTH_GENERATE;
1646         }
1647
1648         /*
1649          * Setup the inner hash config
1650          */
1651         hash_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1652         hash = (struct icp_qat_hw_auth_setup *)cdesc->cd_cur_ptr;
1653         hash->auth_config.reserved = 0;
1654         hash->auth_config.config =
1655                         ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE1,
1656                                 cdesc->qat_hash_alg, digestsize);
1657
1658         if (cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2
1659                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_KASUMI_F9
1660                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3
1661                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC
1662                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC
1663                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_NULL
1664                         )
1665                 hash->auth_counter.counter = 0;
1666         else {
1667                 int block_size = qat_hash_get_block_size(cdesc->qat_hash_alg);
1668
1669                 if (block_size < 0)
1670                         return block_size;
1671                 hash->auth_counter.counter = rte_bswap32(block_size);
1672         }
1673
1674         cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_auth_setup);
1675
1676         /*
1677          * cd_cur_ptr now points at the state1 information.
1678          */
1679         switch (cdesc->qat_hash_alg) {
1680         case ICP_QAT_HW_AUTH_ALGO_SHA1:
1681                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA1, authkey,
1682                         authkeylen, cdesc->cd_cur_ptr, &state1_size,
1683                         cdesc->aes_cmac)) {
1684                         QAT_LOG(ERR, "(SHA)precompute failed");
1685                         return -EFAULT;
1686                 }
1687                 state2_size = RTE_ALIGN_CEIL(ICP_QAT_HW_SHA1_STATE2_SZ, 8);
1688                 break;
1689         case ICP_QAT_HW_AUTH_ALGO_SHA224:
1690                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA224, authkey,
1691                         authkeylen, cdesc->cd_cur_ptr, &state1_size,
1692                         cdesc->aes_cmac)) {
1693                         QAT_LOG(ERR, "(SHA)precompute failed");
1694                         return -EFAULT;
1695                 }
1696                 state2_size = ICP_QAT_HW_SHA224_STATE2_SZ;
1697                 break;
1698         case ICP_QAT_HW_AUTH_ALGO_SHA256:
1699                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA256, authkey,
1700                         authkeylen, cdesc->cd_cur_ptr,  &state1_size,
1701                         cdesc->aes_cmac)) {
1702                         QAT_LOG(ERR, "(SHA)precompute failed");
1703                         return -EFAULT;
1704                 }
1705                 state2_size = ICP_QAT_HW_SHA256_STATE2_SZ;
1706                 break;
1707         case ICP_QAT_HW_AUTH_ALGO_SHA384:
1708                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA384, authkey,
1709                         authkeylen, cdesc->cd_cur_ptr, &state1_size,
1710                         cdesc->aes_cmac)) {
1711                         QAT_LOG(ERR, "(SHA)precompute failed");
1712                         return -EFAULT;
1713                 }
1714                 state2_size = ICP_QAT_HW_SHA384_STATE2_SZ;
1715                 break;
1716         case ICP_QAT_HW_AUTH_ALGO_SHA512:
1717                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA512, authkey,
1718                         authkeylen, cdesc->cd_cur_ptr,  &state1_size,
1719                         cdesc->aes_cmac)) {
1720                         QAT_LOG(ERR, "(SHA)precompute failed");
1721                         return -EFAULT;
1722                 }
1723                 state2_size = ICP_QAT_HW_SHA512_STATE2_SZ;
1724                 break;
1725         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1726                 state1_size = ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1727
1728                 if (cdesc->aes_cmac)
1729                         memset(cdesc->cd_cur_ptr, 0, state1_size);
1730                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC,
1731                         authkey, authkeylen, cdesc->cd_cur_ptr + state1_size,
1732                         &state2_size, cdesc->aes_cmac)) {
1733                         cdesc->aes_cmac ? QAT_LOG(ERR,
1734                                                   "(CMAC)precompute failed")
1735                                         : QAT_LOG(ERR,
1736                                                   "(XCBC)precompute failed");
1737                         return -EFAULT;
1738                 }
1739                 break;
1740         case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1741         case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
1742                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1743                 state1_size = ICP_QAT_HW_GALOIS_128_STATE1_SZ;
1744                 if (qat_sym_do_precomputes(cdesc->qat_hash_alg, authkey,
1745                         authkeylen, cdesc->cd_cur_ptr + state1_size,
1746                         &state2_size, cdesc->aes_cmac)) {
1747                         QAT_LOG(ERR, "(GCM)precompute failed");
1748                         return -EFAULT;
1749                 }
1750                 /*
1751                  * Write (the length of AAD) into bytes 16-19 of state2
1752                  * in big-endian format. This field is 8 bytes
1753                  */
1754                 auth_param->u2.aad_sz =
1755                                 RTE_ALIGN_CEIL(aad_length, 16);
1756                 auth_param->hash_state_sz = (auth_param->u2.aad_sz) >> 3;
1757
1758                 aad_len = (uint32_t *)(cdesc->cd_cur_ptr +
1759                                         ICP_QAT_HW_GALOIS_128_STATE1_SZ +
1760                                         ICP_QAT_HW_GALOIS_H_SZ);
1761                 *aad_len = rte_bswap32(aad_length);
1762                 cdesc->aad_len = aad_length;
1763                 break;
1764         case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
1765                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1766                 state1_size = qat_hash_get_state1_size(
1767                                 ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2);
1768                 state2_size = ICP_QAT_HW_SNOW_3G_UIA2_STATE2_SZ;
1769                 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1770
1771                 cipherconfig = (struct icp_qat_hw_cipher_algo_blk *)
1772                                 (cdesc->cd_cur_ptr + state1_size + state2_size);
1773                 cipherconfig->cipher_config.val =
1774                 ICP_QAT_HW_CIPHER_CONFIG_BUILD(ICP_QAT_HW_CIPHER_ECB_MODE,
1775                         ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2,
1776                         ICP_QAT_HW_CIPHER_KEY_CONVERT,
1777                         ICP_QAT_HW_CIPHER_ENCRYPT);
1778                 memcpy(cipherconfig->key, authkey, authkeylen);
1779                 memset(cipherconfig->key + authkeylen,
1780                                 0, ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ);
1781                 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1782                                 authkeylen + ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1783                 auth_param->hash_state_sz = ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1784                 break;
1785         case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
1786                 hash->auth_config.config =
1787                         ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE0,
1788                                 cdesc->qat_hash_alg, digestsize);
1789                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1790                 state1_size = qat_hash_get_state1_size(
1791                                 ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3);
1792                 state2_size = ICP_QAT_HW_ZUC_3G_EIA3_STATE2_SZ;
1793                 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size
1794                         + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ);
1795
1796                 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1797                 cdesc->cd_cur_ptr += state1_size + state2_size
1798                         + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1799                 auth_param->hash_state_sz = ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1800                 cdesc->min_qat_dev_gen = QAT_GEN2;
1801
1802                 break;
1803         case ICP_QAT_HW_AUTH_ALGO_MD5:
1804                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_MD5, authkey,
1805                         authkeylen, cdesc->cd_cur_ptr, &state1_size,
1806                         cdesc->aes_cmac)) {
1807                         QAT_LOG(ERR, "(MD5)precompute failed");
1808                         return -EFAULT;
1809                 }
1810                 state2_size = ICP_QAT_HW_MD5_STATE2_SZ;
1811                 break;
1812         case ICP_QAT_HW_AUTH_ALGO_NULL:
1813                 state1_size = qat_hash_get_state1_size(
1814                                 ICP_QAT_HW_AUTH_ALGO_NULL);
1815                 state2_size = ICP_QAT_HW_NULL_STATE2_SZ;
1816                 break;
1817         case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
1818                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1819                 state1_size = qat_hash_get_state1_size(
1820                                 ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC);
1821                 state2_size = ICP_QAT_HW_AES_CBC_MAC_KEY_SZ +
1822                                 ICP_QAT_HW_AES_CCM_CBC_E_CTR0_SZ;
1823
1824                 if (aad_length > 0) {
1825                         aad_length += ICP_QAT_HW_CCM_AAD_B0_LEN +
1826                         ICP_QAT_HW_CCM_AAD_LEN_INFO;
1827                         auth_param->u2.aad_sz =
1828                         RTE_ALIGN_CEIL(aad_length,
1829                         ICP_QAT_HW_CCM_AAD_ALIGNMENT);
1830                 } else {
1831                         auth_param->u2.aad_sz = ICP_QAT_HW_CCM_AAD_B0_LEN;
1832                 }
1833                 cdesc->aad_len = aad_length;
1834                 hash->auth_counter.counter = 0;
1835
1836                 hash_cd_ctrl->outer_prefix_sz = digestsize;
1837                 auth_param->hash_state_sz = digestsize;
1838
1839                 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1840                 break;
1841         case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
1842                 state1_size = qat_hash_get_state1_size(
1843                                 ICP_QAT_HW_AUTH_ALGO_KASUMI_F9);
1844                 state2_size = ICP_QAT_HW_KASUMI_F9_STATE2_SZ;
1845                 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1846                 pTempKey = (uint32_t *)(cdesc->cd_cur_ptr + state1_size
1847                                                         + authkeylen);
1848                 /*
1849                 * The Inner Hash Initial State2 block must contain IK
1850                 * (Initialisation Key), followed by IK XOR-ed with KM
1851                 * (Key Modifier): IK||(IK^KM).
1852                 */
1853                 /* write the auth key */
1854                 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1855                 /* initialise temp key with auth key */
1856                 memcpy(pTempKey, authkey, authkeylen);
1857                 /* XOR Key with KASUMI F9 key modifier at 4 bytes level */
1858                 for (wordIndex = 0; wordIndex < (authkeylen >> 2); wordIndex++)
1859                         pTempKey[wordIndex] ^= KASUMI_F9_KEY_MODIFIER_4_BYTES;
1860                 break;
1861         default:
1862                 QAT_LOG(ERR, "Invalid HASH alg %u", cdesc->qat_hash_alg);
1863                 return -EFAULT;
1864         }
1865
1866         /* Request template setup */
1867         qat_sym_session_init_common_hdr(header, qat_proto_flag);
1868         header->service_cmd_id = cdesc->qat_cmd;
1869
1870         /* Auth CD config setup */
1871         hash_cd_ctrl->hash_cfg_offset = hash_offset >> 3;
1872         hash_cd_ctrl->hash_flags = ICP_QAT_FW_AUTH_HDR_FLAG_NO_NESTED;
1873         hash_cd_ctrl->inner_res_sz = digestsize;
1874         hash_cd_ctrl->final_sz = digestsize;
1875         hash_cd_ctrl->inner_state1_sz = state1_size;
1876         auth_param->auth_res_sz = digestsize;
1877
1878         hash_cd_ctrl->inner_state2_sz  = state2_size;
1879         hash_cd_ctrl->inner_state2_offset = hash_cd_ctrl->hash_cfg_offset +
1880                         ((sizeof(struct icp_qat_hw_auth_setup) +
1881                          RTE_ALIGN_CEIL(hash_cd_ctrl->inner_state1_sz, 8))
1882                                         >> 3);
1883
1884         cdesc->cd_cur_ptr += state1_size + state2_size;
1885         cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1886
1887         cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1888         cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1889
1890         return 0;
1891 }
1892
1893 int qat_sym_validate_aes_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1894 {
1895         switch (key_len) {
1896         case ICP_QAT_HW_AES_128_KEY_SZ:
1897                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1898                 break;
1899         case ICP_QAT_HW_AES_192_KEY_SZ:
1900                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES192;
1901                 break;
1902         case ICP_QAT_HW_AES_256_KEY_SZ:
1903                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
1904                 break;
1905         default:
1906                 return -EINVAL;
1907         }
1908         return 0;
1909 }
1910
1911 int qat_sym_validate_aes_docsisbpi_key(int key_len,
1912                 enum icp_qat_hw_cipher_algo *alg)
1913 {
1914         switch (key_len) {
1915         case ICP_QAT_HW_AES_128_KEY_SZ:
1916                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1917                 break;
1918         default:
1919                 return -EINVAL;
1920         }
1921         return 0;
1922 }
1923
1924 int qat_sym_validate_snow3g_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1925 {
1926         switch (key_len) {
1927         case ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ:
1928                 *alg = ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2;
1929                 break;
1930         default:
1931                 return -EINVAL;
1932         }
1933         return 0;
1934 }
1935
1936 int qat_sym_validate_kasumi_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1937 {
1938         switch (key_len) {
1939         case ICP_QAT_HW_KASUMI_KEY_SZ:
1940                 *alg = ICP_QAT_HW_CIPHER_ALGO_KASUMI;
1941                 break;
1942         default:
1943                 return -EINVAL;
1944         }
1945         return 0;
1946 }
1947
1948 int qat_sym_validate_des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1949 {
1950         switch (key_len) {
1951         case ICP_QAT_HW_DES_KEY_SZ:
1952                 *alg = ICP_QAT_HW_CIPHER_ALGO_DES;
1953                 break;
1954         default:
1955                 return -EINVAL;
1956         }
1957         return 0;
1958 }
1959
1960 int qat_sym_validate_3des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1961 {
1962         switch (key_len) {
1963         case QAT_3DES_KEY_SZ_OPT1:
1964         case QAT_3DES_KEY_SZ_OPT2:
1965         case QAT_3DES_KEY_SZ_OPT3:
1966                 *alg = ICP_QAT_HW_CIPHER_ALGO_3DES;
1967                 break;
1968         default:
1969                 return -EINVAL;
1970         }
1971         return 0;
1972 }
1973
1974 int qat_sym_validate_zuc_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1975 {
1976         switch (key_len) {
1977         case ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ:
1978                 *alg = ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3;
1979                 break;
1980         default:
1981                 return -EINVAL;
1982         }
1983         return 0;
1984 }