crypto/qat: optimise check for chained mbufs
[dpdk.git] / drivers / crypto / qat / qat_sym_session.c
1 /* SPDX-License-Identifier: (BSD-3-Clause OR GPL-2.0)
2  * Copyright(c) 2015-2019 Intel Corporation
3  */
4
5 #include <openssl/sha.h>        /* Needed to calculate pre-compute values */
6 #include <openssl/aes.h>        /* Needed to calculate pre-compute values */
7 #include <openssl/md5.h>        /* Needed to calculate pre-compute values */
8 #include <openssl/evp.h>        /* Needed for bpi runt block processing */
9
10 #include <rte_memcpy.h>
11 #include <rte_common.h>
12 #include <rte_spinlock.h>
13 #include <rte_byteorder.h>
14 #include <rte_log.h>
15 #include <rte_malloc.h>
16 #include <rte_crypto_sym.h>
17
18 #include "qat_logs.h"
19 #include "qat_sym_session.h"
20 #include "qat_sym_pmd.h"
21
22 /** Frees a context previously created
23  *  Depends on openssl libcrypto
24  */
25 static void
26 bpi_cipher_ctx_free(void *bpi_ctx)
27 {
28         if (bpi_ctx != NULL)
29                 EVP_CIPHER_CTX_free((EVP_CIPHER_CTX *)bpi_ctx);
30 }
31
32 /** Creates a context in either AES or DES in ECB mode
33  *  Depends on openssl libcrypto
34  */
35 static int
36 bpi_cipher_ctx_init(enum rte_crypto_cipher_algorithm cryptodev_algo,
37                 enum rte_crypto_cipher_operation direction __rte_unused,
38                 const uint8_t *key, uint16_t key_length, void **ctx)
39 {
40         const EVP_CIPHER *algo = NULL;
41         int ret;
42         *ctx = EVP_CIPHER_CTX_new();
43
44         if (*ctx == NULL) {
45                 ret = -ENOMEM;
46                 goto ctx_init_err;
47         }
48
49         if (cryptodev_algo == RTE_CRYPTO_CIPHER_DES_DOCSISBPI)
50                 algo = EVP_des_ecb();
51         else
52                 if (key_length == ICP_QAT_HW_AES_128_KEY_SZ)
53                         algo = EVP_aes_128_ecb();
54                 else
55                         algo = EVP_aes_256_ecb();
56
57         /* IV will be ECB encrypted whether direction is encrypt or decrypt*/
58         if (EVP_EncryptInit_ex(*ctx, algo, NULL, key, 0) != 1) {
59                 ret = -EINVAL;
60                 goto ctx_init_err;
61         }
62
63         return 0;
64
65 ctx_init_err:
66         if (*ctx != NULL)
67                 EVP_CIPHER_CTX_free(*ctx);
68         return ret;
69 }
70
71 static int
72 qat_is_cipher_alg_supported(enum rte_crypto_cipher_algorithm algo,
73                 struct qat_sym_dev_private *internals)
74 {
75         int i = 0;
76         const struct rte_cryptodev_capabilities *capability;
77
78         while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
79                         RTE_CRYPTO_OP_TYPE_UNDEFINED) {
80                 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
81                         continue;
82
83                 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_CIPHER)
84                         continue;
85
86                 if (capability->sym.cipher.algo == algo)
87                         return 1;
88         }
89         return 0;
90 }
91
92 static int
93 qat_is_auth_alg_supported(enum rte_crypto_auth_algorithm algo,
94                 struct qat_sym_dev_private *internals)
95 {
96         int i = 0;
97         const struct rte_cryptodev_capabilities *capability;
98
99         while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
100                         RTE_CRYPTO_OP_TYPE_UNDEFINED) {
101                 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
102                         continue;
103
104                 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_AUTH)
105                         continue;
106
107                 if (capability->sym.auth.algo == algo)
108                         return 1;
109         }
110         return 0;
111 }
112
113 void
114 qat_sym_session_clear(struct rte_cryptodev *dev,
115                 struct rte_cryptodev_sym_session *sess)
116 {
117         uint8_t index = dev->driver_id;
118         void *sess_priv = get_sym_session_private_data(sess, index);
119         struct qat_sym_session *s = (struct qat_sym_session *)sess_priv;
120
121         if (sess_priv) {
122                 if (s->bpi_ctx)
123                         bpi_cipher_ctx_free(s->bpi_ctx);
124                 memset(s, 0, qat_sym_session_get_private_size(dev));
125                 struct rte_mempool *sess_mp = rte_mempool_from_obj(sess_priv);
126
127                 set_sym_session_private_data(sess, index, NULL);
128                 rte_mempool_put(sess_mp, sess_priv);
129         }
130 }
131
132 static int
133 qat_get_cmd_id(const struct rte_crypto_sym_xform *xform)
134 {
135         /* Cipher Only */
136         if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER && xform->next == NULL)
137                 return ICP_QAT_FW_LA_CMD_CIPHER;
138
139         /* Authentication Only */
140         if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH && xform->next == NULL)
141                 return ICP_QAT_FW_LA_CMD_AUTH;
142
143         /* AEAD */
144         if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
145                 /* AES-GCM and AES-CCM works with different direction
146                  * GCM first encrypts and generate hash where AES-CCM
147                  * first generate hash and encrypts. Similar relation
148                  * applies to decryption.
149                  */
150                 if (xform->aead.op == RTE_CRYPTO_AEAD_OP_ENCRYPT)
151                         if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
152                                 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
153                         else
154                                 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
155                 else
156                         if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
157                                 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
158                         else
159                                 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
160         }
161
162         if (xform->next == NULL)
163                 return -1;
164
165         /* Cipher then Authenticate */
166         if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
167                         xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
168                 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
169
170         /* Authenticate then Cipher */
171         if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH &&
172                         xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
173                 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
174
175         return -1;
176 }
177
178 static struct rte_crypto_auth_xform *
179 qat_get_auth_xform(struct rte_crypto_sym_xform *xform)
180 {
181         do {
182                 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH)
183                         return &xform->auth;
184
185                 xform = xform->next;
186         } while (xform);
187
188         return NULL;
189 }
190
191 static struct rte_crypto_cipher_xform *
192 qat_get_cipher_xform(struct rte_crypto_sym_xform *xform)
193 {
194         do {
195                 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
196                         return &xform->cipher;
197
198                 xform = xform->next;
199         } while (xform);
200
201         return NULL;
202 }
203
204 int
205 qat_sym_session_configure_cipher(struct rte_cryptodev *dev,
206                 struct rte_crypto_sym_xform *xform,
207                 struct qat_sym_session *session)
208 {
209         struct qat_sym_dev_private *internals = dev->data->dev_private;
210         struct rte_crypto_cipher_xform *cipher_xform = NULL;
211         int ret;
212
213         /* Get cipher xform from crypto xform chain */
214         cipher_xform = qat_get_cipher_xform(xform);
215
216         session->cipher_iv.offset = cipher_xform->iv.offset;
217         session->cipher_iv.length = cipher_xform->iv.length;
218
219         switch (cipher_xform->algo) {
220         case RTE_CRYPTO_CIPHER_AES_CBC:
221                 if (qat_sym_validate_aes_key(cipher_xform->key.length,
222                                 &session->qat_cipher_alg) != 0) {
223                         QAT_LOG(ERR, "Invalid AES cipher key size");
224                         ret = -EINVAL;
225                         goto error_out;
226                 }
227                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
228                 break;
229         case RTE_CRYPTO_CIPHER_AES_CTR:
230                 if (qat_sym_validate_aes_key(cipher_xform->key.length,
231                                 &session->qat_cipher_alg) != 0) {
232                         QAT_LOG(ERR, "Invalid AES cipher key size");
233                         ret = -EINVAL;
234                         goto error_out;
235                 }
236                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
237                 break;
238         case RTE_CRYPTO_CIPHER_SNOW3G_UEA2:
239                 if (qat_sym_validate_snow3g_key(cipher_xform->key.length,
240                                         &session->qat_cipher_alg) != 0) {
241                         QAT_LOG(ERR, "Invalid SNOW 3G cipher key size");
242                         ret = -EINVAL;
243                         goto error_out;
244                 }
245                 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
246                 break;
247         case RTE_CRYPTO_CIPHER_NULL:
248                 session->qat_cipher_alg = ICP_QAT_HW_CIPHER_ALGO_NULL;
249                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
250                 break;
251         case RTE_CRYPTO_CIPHER_KASUMI_F8:
252                 if (qat_sym_validate_kasumi_key(cipher_xform->key.length,
253                                         &session->qat_cipher_alg) != 0) {
254                         QAT_LOG(ERR, "Invalid KASUMI cipher key size");
255                         ret = -EINVAL;
256                         goto error_out;
257                 }
258                 session->qat_mode = ICP_QAT_HW_CIPHER_F8_MODE;
259                 break;
260         case RTE_CRYPTO_CIPHER_3DES_CBC:
261                 if (qat_sym_validate_3des_key(cipher_xform->key.length,
262                                 &session->qat_cipher_alg) != 0) {
263                         QAT_LOG(ERR, "Invalid 3DES cipher key size");
264                         ret = -EINVAL;
265                         goto error_out;
266                 }
267                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
268                 break;
269         case RTE_CRYPTO_CIPHER_DES_CBC:
270                 if (qat_sym_validate_des_key(cipher_xform->key.length,
271                                 &session->qat_cipher_alg) != 0) {
272                         QAT_LOG(ERR, "Invalid DES cipher key size");
273                         ret = -EINVAL;
274                         goto error_out;
275                 }
276                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
277                 break;
278         case RTE_CRYPTO_CIPHER_3DES_CTR:
279                 if (qat_sym_validate_3des_key(cipher_xform->key.length,
280                                 &session->qat_cipher_alg) != 0) {
281                         QAT_LOG(ERR, "Invalid 3DES cipher key size");
282                         ret = -EINVAL;
283                         goto error_out;
284                 }
285                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
286                 break;
287         case RTE_CRYPTO_CIPHER_DES_DOCSISBPI:
288                 ret = bpi_cipher_ctx_init(
289                                         cipher_xform->algo,
290                                         cipher_xform->op,
291                                         cipher_xform->key.data,
292                                         cipher_xform->key.length,
293                                         &session->bpi_ctx);
294                 if (ret != 0) {
295                         QAT_LOG(ERR, "failed to create DES BPI ctx");
296                         goto error_out;
297                 }
298                 if (qat_sym_validate_des_key(cipher_xform->key.length,
299                                 &session->qat_cipher_alg) != 0) {
300                         QAT_LOG(ERR, "Invalid DES cipher key size");
301                         ret = -EINVAL;
302                         goto error_out;
303                 }
304                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
305                 break;
306         case RTE_CRYPTO_CIPHER_AES_DOCSISBPI:
307                 ret = bpi_cipher_ctx_init(
308                                         cipher_xform->algo,
309                                         cipher_xform->op,
310                                         cipher_xform->key.data,
311                                         cipher_xform->key.length,
312                                         &session->bpi_ctx);
313                 if (ret != 0) {
314                         QAT_LOG(ERR, "failed to create AES BPI ctx");
315                         goto error_out;
316                 }
317                 if (qat_sym_validate_aes_docsisbpi_key(cipher_xform->key.length,
318                                 &session->qat_cipher_alg) != 0) {
319                         QAT_LOG(ERR, "Invalid AES DOCSISBPI key size");
320                         ret = -EINVAL;
321                         goto error_out;
322                 }
323                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
324                 break;
325         case RTE_CRYPTO_CIPHER_ZUC_EEA3:
326                 if (!qat_is_cipher_alg_supported(
327                         cipher_xform->algo, internals)) {
328                         QAT_LOG(ERR, "%s not supported on this device",
329                                 rte_crypto_cipher_algorithm_strings
330                                         [cipher_xform->algo]);
331                         ret = -ENOTSUP;
332                         goto error_out;
333                 }
334                 if (qat_sym_validate_zuc_key(cipher_xform->key.length,
335                                 &session->qat_cipher_alg) != 0) {
336                         QAT_LOG(ERR, "Invalid ZUC cipher key size");
337                         ret = -EINVAL;
338                         goto error_out;
339                 }
340                 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
341                 break;
342         case RTE_CRYPTO_CIPHER_AES_XTS:
343                 if ((cipher_xform->key.length/2) == ICP_QAT_HW_AES_192_KEY_SZ) {
344                         QAT_LOG(ERR, "AES-XTS-192 not supported");
345                         ret = -EINVAL;
346                         goto error_out;
347                 }
348                 if (qat_sym_validate_aes_key((cipher_xform->key.length/2),
349                                 &session->qat_cipher_alg) != 0) {
350                         QAT_LOG(ERR, "Invalid AES-XTS cipher key size");
351                         ret = -EINVAL;
352                         goto error_out;
353                 }
354                 session->qat_mode = ICP_QAT_HW_CIPHER_XTS_MODE;
355                 break;
356         case RTE_CRYPTO_CIPHER_3DES_ECB:
357         case RTE_CRYPTO_CIPHER_AES_ECB:
358         case RTE_CRYPTO_CIPHER_AES_F8:
359         case RTE_CRYPTO_CIPHER_ARC4:
360                 QAT_LOG(ERR, "Crypto QAT PMD: Unsupported Cipher alg %u",
361                                 cipher_xform->algo);
362                 ret = -ENOTSUP;
363                 goto error_out;
364         default:
365                 QAT_LOG(ERR, "Crypto: Undefined Cipher specified %u\n",
366                                 cipher_xform->algo);
367                 ret = -EINVAL;
368                 goto error_out;
369         }
370
371         if (cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
372                 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
373         else
374                 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
375
376         if (qat_sym_session_aead_create_cd_cipher(session,
377                                                 cipher_xform->key.data,
378                                                 cipher_xform->key.length)) {
379                 ret = -EINVAL;
380                 goto error_out;
381         }
382
383         return 0;
384
385 error_out:
386         if (session->bpi_ctx) {
387                 bpi_cipher_ctx_free(session->bpi_ctx);
388                 session->bpi_ctx = NULL;
389         }
390         return ret;
391 }
392
393 int
394 qat_sym_session_configure(struct rte_cryptodev *dev,
395                 struct rte_crypto_sym_xform *xform,
396                 struct rte_cryptodev_sym_session *sess,
397                 struct rte_mempool *mempool)
398 {
399         void *sess_private_data;
400         int ret;
401
402         if (rte_mempool_get(mempool, &sess_private_data)) {
403                 CDEV_LOG_ERR(
404                         "Couldn't get object from session mempool");
405                 return -ENOMEM;
406         }
407
408         ret = qat_sym_session_set_parameters(dev, xform, sess_private_data);
409         if (ret != 0) {
410                 QAT_LOG(ERR,
411                     "Crypto QAT PMD: failed to configure session parameters");
412
413                 /* Return session to mempool */
414                 rte_mempool_put(mempool, sess_private_data);
415                 return ret;
416         }
417
418         set_sym_session_private_data(sess, dev->driver_id,
419                 sess_private_data);
420
421         return 0;
422 }
423
424 static void
425 qat_sym_session_set_ext_hash_flags(struct qat_sym_session *session,
426                 uint8_t hash_flag)
427 {
428         struct icp_qat_fw_comn_req_hdr *header = &session->fw_req.comn_hdr;
429         struct icp_qat_fw_cipher_auth_cd_ctrl_hdr *cd_ctrl =
430                         (struct icp_qat_fw_cipher_auth_cd_ctrl_hdr *)
431                         session->fw_req.cd_ctrl.content_desc_ctrl_lw;
432
433         /* Set the Use Extended Protocol Flags bit in LW 1 */
434         QAT_FIELD_SET(header->comn_req_flags,
435                         QAT_COMN_EXT_FLAGS_USED,
436                         QAT_COMN_EXT_FLAGS_BITPOS,
437                         QAT_COMN_EXT_FLAGS_MASK);
438
439         /* Set Hash Flags in LW 28 */
440         cd_ctrl->hash_flags |= hash_flag;
441
442         /* Set proto flags in LW 1 */
443         switch (session->qat_cipher_alg) {
444         case ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2:
445                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
446                                 ICP_QAT_FW_LA_SNOW_3G_PROTO);
447                 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
448                                 header->serv_specif_flags, 0);
449                 break;
450         case ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3:
451                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
452                                 ICP_QAT_FW_LA_NO_PROTO);
453                 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
454                                 header->serv_specif_flags,
455                                 ICP_QAT_FW_LA_ZUC_3G_PROTO);
456                 break;
457         default:
458                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
459                                 ICP_QAT_FW_LA_NO_PROTO);
460                 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
461                                 header->serv_specif_flags, 0);
462                 break;
463         }
464 }
465
466 static void
467 qat_sym_session_handle_mixed(struct qat_sym_session *session)
468 {
469         if (session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3 &&
470                         session->qat_cipher_alg !=
471                         ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
472                 session->min_qat_dev_gen = QAT_GEN3;
473                 qat_sym_session_set_ext_hash_flags(session,
474                         1 << ICP_QAT_FW_AUTH_HDR_FLAG_ZUC_EIA3_BITPOS);
475         } else if (session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2 &&
476                         session->qat_cipher_alg !=
477                         ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
478                 session->min_qat_dev_gen = QAT_GEN3;
479                 qat_sym_session_set_ext_hash_flags(session,
480                         1 << ICP_QAT_FW_AUTH_HDR_FLAG_SNOW3G_UIA2_BITPOS);
481         } else if ((session->aes_cmac ||
482                         session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_NULL) &&
483                         (session->qat_cipher_alg ==
484                         ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2 ||
485                         session->qat_cipher_alg ==
486                         ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)) {
487                 session->min_qat_dev_gen = QAT_GEN3;
488                 qat_sym_session_set_ext_hash_flags(session, 0);
489         }
490 }
491
492 int
493 qat_sym_session_set_parameters(struct rte_cryptodev *dev,
494                 struct rte_crypto_sym_xform *xform, void *session_private)
495 {
496         struct qat_sym_session *session = session_private;
497         int ret;
498         int qat_cmd_id;
499
500         /* Set context descriptor physical address */
501         session->cd_paddr = rte_mempool_virt2iova(session) +
502                         offsetof(struct qat_sym_session, cd);
503
504         session->min_qat_dev_gen = QAT_GEN1;
505
506         /* Get requested QAT command id */
507         qat_cmd_id = qat_get_cmd_id(xform);
508         if (qat_cmd_id < 0 || qat_cmd_id >= ICP_QAT_FW_LA_CMD_DELIMITER) {
509                 QAT_LOG(ERR, "Unsupported xform chain requested");
510                 return -ENOTSUP;
511         }
512         session->qat_cmd = (enum icp_qat_fw_la_cmd_id)qat_cmd_id;
513         switch (session->qat_cmd) {
514         case ICP_QAT_FW_LA_CMD_CIPHER:
515                 ret = qat_sym_session_configure_cipher(dev, xform, session);
516                 if (ret < 0)
517                         return ret;
518                 break;
519         case ICP_QAT_FW_LA_CMD_AUTH:
520                 ret = qat_sym_session_configure_auth(dev, xform, session);
521                 if (ret < 0)
522                         return ret;
523                 break;
524         case ICP_QAT_FW_LA_CMD_CIPHER_HASH:
525                 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
526                         ret = qat_sym_session_configure_aead(dev, xform,
527                                         session);
528                         if (ret < 0)
529                                 return ret;
530                 } else {
531                         ret = qat_sym_session_configure_cipher(dev,
532                                         xform, session);
533                         if (ret < 0)
534                                 return ret;
535                         ret = qat_sym_session_configure_auth(dev,
536                                         xform, session);
537                         if (ret < 0)
538                                 return ret;
539                         /* Special handling of mixed hash+cipher algorithms */
540                         qat_sym_session_handle_mixed(session);
541                 }
542                 break;
543         case ICP_QAT_FW_LA_CMD_HASH_CIPHER:
544                 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
545                         ret = qat_sym_session_configure_aead(dev, xform,
546                                         session);
547                         if (ret < 0)
548                                 return ret;
549                 } else {
550                         ret = qat_sym_session_configure_auth(dev,
551                                         xform, session);
552                         if (ret < 0)
553                                 return ret;
554                         ret = qat_sym_session_configure_cipher(dev,
555                                         xform, session);
556                         if (ret < 0)
557                                 return ret;
558                         /* Special handling of mixed hash+cipher algorithms */
559                         qat_sym_session_handle_mixed(session);
560                 }
561                 break;
562         case ICP_QAT_FW_LA_CMD_TRNG_GET_RANDOM:
563         case ICP_QAT_FW_LA_CMD_TRNG_TEST:
564         case ICP_QAT_FW_LA_CMD_SSL3_KEY_DERIVE:
565         case ICP_QAT_FW_LA_CMD_TLS_V1_1_KEY_DERIVE:
566         case ICP_QAT_FW_LA_CMD_TLS_V1_2_KEY_DERIVE:
567         case ICP_QAT_FW_LA_CMD_MGF1:
568         case ICP_QAT_FW_LA_CMD_AUTH_PRE_COMP:
569         case ICP_QAT_FW_LA_CMD_CIPHER_PRE_COMP:
570         case ICP_QAT_FW_LA_CMD_DELIMITER:
571         QAT_LOG(ERR, "Unsupported Service %u",
572                 session->qat_cmd);
573                 return -ENOTSUP;
574         default:
575         QAT_LOG(ERR, "Unsupported Service %u",
576                 session->qat_cmd);
577                 return -ENOTSUP;
578         }
579
580         return 0;
581 }
582
583 static int
584 qat_sym_session_handle_single_pass(struct qat_sym_dev_private *internals,
585                 struct qat_sym_session *session,
586                 struct rte_crypto_aead_xform *aead_xform)
587 {
588         enum qat_device_gen qat_dev_gen = internals->qat_dev->qat_dev_gen;
589
590         if (qat_dev_gen == QAT_GEN3 &&
591                         aead_xform->iv.length == QAT_AES_GCM_SPC_IV_SIZE) {
592                 /* Use faster Single-Pass GCM */
593                 struct icp_qat_fw_la_cipher_req_params *cipher_param =
594                                 (void *) &session->fw_req.serv_specif_rqpars;
595
596                 session->is_single_pass = 1;
597                 session->min_qat_dev_gen = QAT_GEN3;
598                 session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER;
599                 session->qat_mode = ICP_QAT_HW_CIPHER_AEAD_MODE;
600                 session->cipher_iv.offset = aead_xform->iv.offset;
601                 session->cipher_iv.length = aead_xform->iv.length;
602                 if (qat_sym_session_aead_create_cd_cipher(session,
603                                 aead_xform->key.data, aead_xform->key.length))
604                         return -EINVAL;
605                 session->aad_len = aead_xform->aad_length;
606                 session->digest_length = aead_xform->digest_length;
607                 if (aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) {
608                         session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
609                         session->auth_op = ICP_QAT_HW_AUTH_GENERATE;
610                         ICP_QAT_FW_LA_RET_AUTH_SET(
611                                 session->fw_req.comn_hdr.serv_specif_flags,
612                                 ICP_QAT_FW_LA_RET_AUTH_RES);
613                 } else {
614                         session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
615                         session->auth_op = ICP_QAT_HW_AUTH_VERIFY;
616                         ICP_QAT_FW_LA_CMP_AUTH_SET(
617                                 session->fw_req.comn_hdr.serv_specif_flags,
618                                 ICP_QAT_FW_LA_CMP_AUTH_RES);
619                 }
620                 ICP_QAT_FW_LA_SINGLE_PASS_PROTO_FLAG_SET(
621                                 session->fw_req.comn_hdr.serv_specif_flags,
622                                 ICP_QAT_FW_LA_SINGLE_PASS_PROTO);
623                 ICP_QAT_FW_LA_PROTO_SET(
624                                 session->fw_req.comn_hdr.serv_specif_flags,
625                                 ICP_QAT_FW_LA_NO_PROTO);
626                 ICP_QAT_FW_LA_GCM_IV_LEN_FLAG_SET(
627                                 session->fw_req.comn_hdr.serv_specif_flags,
628                                 ICP_QAT_FW_LA_GCM_IV_LEN_12_OCTETS);
629                 session->fw_req.comn_hdr.service_cmd_id =
630                                 ICP_QAT_FW_LA_CMD_CIPHER;
631                 session->cd.cipher.cipher_config.val =
632                                 ICP_QAT_HW_CIPHER_CONFIG_BUILD(
633                                         ICP_QAT_HW_CIPHER_AEAD_MODE,
634                                         session->qat_cipher_alg,
635                                         ICP_QAT_HW_CIPHER_NO_CONVERT,
636                                         session->qat_dir);
637                 QAT_FIELD_SET(session->cd.cipher.cipher_config.val,
638                                 aead_xform->digest_length,
639                                 QAT_CIPHER_AEAD_HASH_CMP_LEN_BITPOS,
640                                 QAT_CIPHER_AEAD_HASH_CMP_LEN_MASK);
641                 session->cd.cipher.cipher_config.reserved =
642                                 ICP_QAT_HW_CIPHER_CONFIG_BUILD_UPPER(
643                                         aead_xform->aad_length);
644                 cipher_param->spc_aad_sz = aead_xform->aad_length;
645                 cipher_param->spc_auth_res_sz = aead_xform->digest_length;
646         }
647         return 0;
648 }
649
650 int
651 qat_sym_session_configure_auth(struct rte_cryptodev *dev,
652                                 struct rte_crypto_sym_xform *xform,
653                                 struct qat_sym_session *session)
654 {
655         struct rte_crypto_auth_xform *auth_xform = qat_get_auth_xform(xform);
656         struct qat_sym_dev_private *internals = dev->data->dev_private;
657         const uint8_t *key_data = auth_xform->key.data;
658         uint8_t key_length = auth_xform->key.length;
659         session->aes_cmac = 0;
660
661         switch (auth_xform->algo) {
662         case RTE_CRYPTO_AUTH_SHA1_HMAC:
663                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA1;
664                 break;
665         case RTE_CRYPTO_AUTH_SHA224_HMAC:
666                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA224;
667                 break;
668         case RTE_CRYPTO_AUTH_SHA256_HMAC:
669                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA256;
670                 break;
671         case RTE_CRYPTO_AUTH_SHA384_HMAC:
672                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA384;
673                 break;
674         case RTE_CRYPTO_AUTH_SHA512_HMAC:
675                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA512;
676                 break;
677         case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
678                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
679                 break;
680         case RTE_CRYPTO_AUTH_AES_CMAC:
681                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
682                 session->aes_cmac = 1;
683                 break;
684         case RTE_CRYPTO_AUTH_AES_GMAC:
685                 if (qat_sym_validate_aes_key(auth_xform->key.length,
686                                 &session->qat_cipher_alg) != 0) {
687                         QAT_LOG(ERR, "Invalid AES key size");
688                         return -EINVAL;
689                 }
690                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
691                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
692
693                 break;
694         case RTE_CRYPTO_AUTH_SNOW3G_UIA2:
695                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2;
696                 break;
697         case RTE_CRYPTO_AUTH_MD5_HMAC:
698                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_MD5;
699                 break;
700         case RTE_CRYPTO_AUTH_NULL:
701                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_NULL;
702                 break;
703         case RTE_CRYPTO_AUTH_KASUMI_F9:
704                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_KASUMI_F9;
705                 break;
706         case RTE_CRYPTO_AUTH_ZUC_EIA3:
707                 if (!qat_is_auth_alg_supported(auth_xform->algo, internals)) {
708                         QAT_LOG(ERR, "%s not supported on this device",
709                                 rte_crypto_auth_algorithm_strings
710                                 [auth_xform->algo]);
711                         return -ENOTSUP;
712                 }
713                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3;
714                 break;
715         case RTE_CRYPTO_AUTH_SHA1:
716         case RTE_CRYPTO_AUTH_SHA256:
717         case RTE_CRYPTO_AUTH_SHA512:
718         case RTE_CRYPTO_AUTH_SHA224:
719         case RTE_CRYPTO_AUTH_SHA384:
720         case RTE_CRYPTO_AUTH_MD5:
721         case RTE_CRYPTO_AUTH_AES_CBC_MAC:
722                 QAT_LOG(ERR, "Crypto: Unsupported hash alg %u",
723                                 auth_xform->algo);
724                 return -ENOTSUP;
725         default:
726                 QAT_LOG(ERR, "Crypto: Undefined Hash algo %u specified",
727                                 auth_xform->algo);
728                 return -EINVAL;
729         }
730
731         session->auth_iv.offset = auth_xform->iv.offset;
732         session->auth_iv.length = auth_xform->iv.length;
733
734         if (auth_xform->algo == RTE_CRYPTO_AUTH_AES_GMAC) {
735                 if (auth_xform->op == RTE_CRYPTO_AUTH_OP_GENERATE) {
736                         session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER_HASH;
737                         session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
738                         /*
739                          * It needs to create cipher desc content first,
740                          * then authentication
741                          */
742
743                         if (qat_sym_session_aead_create_cd_cipher(session,
744                                                 auth_xform->key.data,
745                                                 auth_xform->key.length))
746                                 return -EINVAL;
747
748                         if (qat_sym_session_aead_create_cd_auth(session,
749                                                 key_data,
750                                                 key_length,
751                                                 0,
752                                                 auth_xform->digest_length,
753                                                 auth_xform->op))
754                                 return -EINVAL;
755                 } else {
756                         session->qat_cmd = ICP_QAT_FW_LA_CMD_HASH_CIPHER;
757                         session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
758                         /*
759                          * It needs to create authentication desc content first,
760                          * then cipher
761                          */
762
763                         if (qat_sym_session_aead_create_cd_auth(session,
764                                         key_data,
765                                         key_length,
766                                         0,
767                                         auth_xform->digest_length,
768                                         auth_xform->op))
769                                 return -EINVAL;
770
771                         if (qat_sym_session_aead_create_cd_cipher(session,
772                                                 auth_xform->key.data,
773                                                 auth_xform->key.length))
774                                 return -EINVAL;
775                 }
776                 /* Restore to authentication only only */
777                 session->qat_cmd = ICP_QAT_FW_LA_CMD_AUTH;
778         } else {
779                 if (qat_sym_session_aead_create_cd_auth(session,
780                                 key_data,
781                                 key_length,
782                                 0,
783                                 auth_xform->digest_length,
784                                 auth_xform->op))
785                         return -EINVAL;
786         }
787
788         session->digest_length = auth_xform->digest_length;
789         return 0;
790 }
791
792 int
793 qat_sym_session_configure_aead(struct rte_cryptodev *dev,
794                                 struct rte_crypto_sym_xform *xform,
795                                 struct qat_sym_session *session)
796 {
797         struct rte_crypto_aead_xform *aead_xform = &xform->aead;
798         enum rte_crypto_auth_operation crypto_operation;
799
800         /*
801          * Store AEAD IV parameters as cipher IV,
802          * to avoid unnecessary memory usage
803          */
804         session->cipher_iv.offset = xform->aead.iv.offset;
805         session->cipher_iv.length = xform->aead.iv.length;
806
807         switch (aead_xform->algo) {
808         case RTE_CRYPTO_AEAD_AES_GCM:
809                 if (qat_sym_validate_aes_key(aead_xform->key.length,
810                                 &session->qat_cipher_alg) != 0) {
811                         QAT_LOG(ERR, "Invalid AES key size");
812                         return -EINVAL;
813                 }
814                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
815                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
816                 break;
817         case RTE_CRYPTO_AEAD_AES_CCM:
818                 if (qat_sym_validate_aes_key(aead_xform->key.length,
819                                 &session->qat_cipher_alg) != 0) {
820                         QAT_LOG(ERR, "Invalid AES key size");
821                         return -EINVAL;
822                 }
823                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
824                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC;
825                 break;
826         default:
827                 QAT_LOG(ERR, "Crypto: Undefined AEAD specified %u\n",
828                                 aead_xform->algo);
829                 return -EINVAL;
830         }
831
832         session->is_single_pass = 0;
833         if (aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) {
834                 /* Use faster Single-Pass GCM if possible */
835                 int res = qat_sym_session_handle_single_pass(
836                                 dev->data->dev_private, session, aead_xform);
837                 if (res < 0)
838                         return res;
839                 if (session->is_single_pass)
840                         return 0;
841         }
842
843         if ((aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT &&
844                         aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) ||
845                         (aead_xform->op == RTE_CRYPTO_AEAD_OP_DECRYPT &&
846                         aead_xform->algo == RTE_CRYPTO_AEAD_AES_CCM)) {
847                 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
848                 /*
849                  * It needs to create cipher desc content first,
850                  * then authentication
851                  */
852                 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
853                         RTE_CRYPTO_AUTH_OP_GENERATE : RTE_CRYPTO_AUTH_OP_VERIFY;
854
855                 if (qat_sym_session_aead_create_cd_cipher(session,
856                                         aead_xform->key.data,
857                                         aead_xform->key.length))
858                         return -EINVAL;
859
860                 if (qat_sym_session_aead_create_cd_auth(session,
861                                         aead_xform->key.data,
862                                         aead_xform->key.length,
863                                         aead_xform->aad_length,
864                                         aead_xform->digest_length,
865                                         crypto_operation))
866                         return -EINVAL;
867         } else {
868                 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
869                 /*
870                  * It needs to create authentication desc content first,
871                  * then cipher
872                  */
873
874                 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
875                         RTE_CRYPTO_AUTH_OP_VERIFY : RTE_CRYPTO_AUTH_OP_GENERATE;
876
877                 if (qat_sym_session_aead_create_cd_auth(session,
878                                         aead_xform->key.data,
879                                         aead_xform->key.length,
880                                         aead_xform->aad_length,
881                                         aead_xform->digest_length,
882                                         crypto_operation))
883                         return -EINVAL;
884
885                 if (qat_sym_session_aead_create_cd_cipher(session,
886                                         aead_xform->key.data,
887                                         aead_xform->key.length))
888                         return -EINVAL;
889         }
890
891         session->digest_length = aead_xform->digest_length;
892         return 0;
893 }
894
895 unsigned int qat_sym_session_get_private_size(
896                 struct rte_cryptodev *dev __rte_unused)
897 {
898         return RTE_ALIGN_CEIL(sizeof(struct qat_sym_session), 8);
899 }
900
901 /* returns block size in bytes per cipher algo */
902 int qat_cipher_get_block_size(enum icp_qat_hw_cipher_algo qat_cipher_alg)
903 {
904         switch (qat_cipher_alg) {
905         case ICP_QAT_HW_CIPHER_ALGO_DES:
906                 return ICP_QAT_HW_DES_BLK_SZ;
907         case ICP_QAT_HW_CIPHER_ALGO_3DES:
908                 return ICP_QAT_HW_3DES_BLK_SZ;
909         case ICP_QAT_HW_CIPHER_ALGO_AES128:
910         case ICP_QAT_HW_CIPHER_ALGO_AES192:
911         case ICP_QAT_HW_CIPHER_ALGO_AES256:
912                 return ICP_QAT_HW_AES_BLK_SZ;
913         default:
914                 QAT_LOG(ERR, "invalid block cipher alg %u", qat_cipher_alg);
915                 return -EFAULT;
916         };
917         return -EFAULT;
918 }
919
920 /*
921  * Returns size in bytes per hash algo for state1 size field in cd_ctrl
922  * This is digest size rounded up to nearest quadword
923  */
924 static int qat_hash_get_state1_size(enum icp_qat_hw_auth_algo qat_hash_alg)
925 {
926         switch (qat_hash_alg) {
927         case ICP_QAT_HW_AUTH_ALGO_SHA1:
928                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA1_STATE1_SZ,
929                                                 QAT_HW_DEFAULT_ALIGNMENT);
930         case ICP_QAT_HW_AUTH_ALGO_SHA224:
931                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA224_STATE1_SZ,
932                                                 QAT_HW_DEFAULT_ALIGNMENT);
933         case ICP_QAT_HW_AUTH_ALGO_SHA256:
934                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA256_STATE1_SZ,
935                                                 QAT_HW_DEFAULT_ALIGNMENT);
936         case ICP_QAT_HW_AUTH_ALGO_SHA384:
937                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA384_STATE1_SZ,
938                                                 QAT_HW_DEFAULT_ALIGNMENT);
939         case ICP_QAT_HW_AUTH_ALGO_SHA512:
940                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
941                                                 QAT_HW_DEFAULT_ALIGNMENT);
942         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
943                 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ,
944                                                 QAT_HW_DEFAULT_ALIGNMENT);
945         case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
946         case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
947                 return QAT_HW_ROUND_UP(ICP_QAT_HW_GALOIS_128_STATE1_SZ,
948                                                 QAT_HW_DEFAULT_ALIGNMENT);
949         case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
950                 return QAT_HW_ROUND_UP(ICP_QAT_HW_ZUC_3G_EIA3_STATE1_SZ,
951                                                 QAT_HW_DEFAULT_ALIGNMENT);
952         case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
953                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SNOW_3G_UIA2_STATE1_SZ,
954                                                 QAT_HW_DEFAULT_ALIGNMENT);
955         case ICP_QAT_HW_AUTH_ALGO_MD5:
956                 return QAT_HW_ROUND_UP(ICP_QAT_HW_MD5_STATE1_SZ,
957                                                 QAT_HW_DEFAULT_ALIGNMENT);
958         case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
959                 return QAT_HW_ROUND_UP(ICP_QAT_HW_KASUMI_F9_STATE1_SZ,
960                                                 QAT_HW_DEFAULT_ALIGNMENT);
961         case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
962                 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_CBC_MAC_STATE1_SZ,
963                                                 QAT_HW_DEFAULT_ALIGNMENT);
964         case ICP_QAT_HW_AUTH_ALGO_NULL:
965                 return QAT_HW_ROUND_UP(ICP_QAT_HW_NULL_STATE1_SZ,
966                                                 QAT_HW_DEFAULT_ALIGNMENT);
967         case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
968                 /* return maximum state1 size in this case */
969                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
970                                                 QAT_HW_DEFAULT_ALIGNMENT);
971         default:
972                 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
973                 return -EFAULT;
974         };
975         return -EFAULT;
976 }
977
978 /* returns digest size in bytes  per hash algo */
979 static int qat_hash_get_digest_size(enum icp_qat_hw_auth_algo qat_hash_alg)
980 {
981         switch (qat_hash_alg) {
982         case ICP_QAT_HW_AUTH_ALGO_SHA1:
983                 return ICP_QAT_HW_SHA1_STATE1_SZ;
984         case ICP_QAT_HW_AUTH_ALGO_SHA224:
985                 return ICP_QAT_HW_SHA224_STATE1_SZ;
986         case ICP_QAT_HW_AUTH_ALGO_SHA256:
987                 return ICP_QAT_HW_SHA256_STATE1_SZ;
988         case ICP_QAT_HW_AUTH_ALGO_SHA384:
989                 return ICP_QAT_HW_SHA384_STATE1_SZ;
990         case ICP_QAT_HW_AUTH_ALGO_SHA512:
991                 return ICP_QAT_HW_SHA512_STATE1_SZ;
992         case ICP_QAT_HW_AUTH_ALGO_MD5:
993                 return ICP_QAT_HW_MD5_STATE1_SZ;
994         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
995                 return ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
996         case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
997                 /* return maximum digest size in this case */
998                 return ICP_QAT_HW_SHA512_STATE1_SZ;
999         default:
1000                 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
1001                 return -EFAULT;
1002         };
1003         return -EFAULT;
1004 }
1005
1006 /* returns block size in byes per hash algo */
1007 static int qat_hash_get_block_size(enum icp_qat_hw_auth_algo qat_hash_alg)
1008 {
1009         switch (qat_hash_alg) {
1010         case ICP_QAT_HW_AUTH_ALGO_SHA1:
1011                 return SHA_CBLOCK;
1012         case ICP_QAT_HW_AUTH_ALGO_SHA224:
1013                 return SHA256_CBLOCK;
1014         case ICP_QAT_HW_AUTH_ALGO_SHA256:
1015                 return SHA256_CBLOCK;
1016         case ICP_QAT_HW_AUTH_ALGO_SHA384:
1017                 return SHA512_CBLOCK;
1018         case ICP_QAT_HW_AUTH_ALGO_SHA512:
1019                 return SHA512_CBLOCK;
1020         case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1021                 return 16;
1022         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1023                 return ICP_QAT_HW_AES_BLK_SZ;
1024         case ICP_QAT_HW_AUTH_ALGO_MD5:
1025                 return MD5_CBLOCK;
1026         case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
1027                 /* return maximum block size in this case */
1028                 return SHA512_CBLOCK;
1029         default:
1030                 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
1031                 return -EFAULT;
1032         };
1033         return -EFAULT;
1034 }
1035
1036 static int partial_hash_sha1(uint8_t *data_in, uint8_t *data_out)
1037 {
1038         SHA_CTX ctx;
1039
1040         if (!SHA1_Init(&ctx))
1041                 return -EFAULT;
1042         SHA1_Transform(&ctx, data_in);
1043         rte_memcpy(data_out, &ctx, SHA_DIGEST_LENGTH);
1044         return 0;
1045 }
1046
1047 static int partial_hash_sha224(uint8_t *data_in, uint8_t *data_out)
1048 {
1049         SHA256_CTX ctx;
1050
1051         if (!SHA224_Init(&ctx))
1052                 return -EFAULT;
1053         SHA256_Transform(&ctx, data_in);
1054         rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
1055         return 0;
1056 }
1057
1058 static int partial_hash_sha256(uint8_t *data_in, uint8_t *data_out)
1059 {
1060         SHA256_CTX ctx;
1061
1062         if (!SHA256_Init(&ctx))
1063                 return -EFAULT;
1064         SHA256_Transform(&ctx, data_in);
1065         rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
1066         return 0;
1067 }
1068
1069 static int partial_hash_sha384(uint8_t *data_in, uint8_t *data_out)
1070 {
1071         SHA512_CTX ctx;
1072
1073         if (!SHA384_Init(&ctx))
1074                 return -EFAULT;
1075         SHA512_Transform(&ctx, data_in);
1076         rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
1077         return 0;
1078 }
1079
1080 static int partial_hash_sha512(uint8_t *data_in, uint8_t *data_out)
1081 {
1082         SHA512_CTX ctx;
1083
1084         if (!SHA512_Init(&ctx))
1085                 return -EFAULT;
1086         SHA512_Transform(&ctx, data_in);
1087         rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
1088         return 0;
1089 }
1090
1091 static int partial_hash_md5(uint8_t *data_in, uint8_t *data_out)
1092 {
1093         MD5_CTX ctx;
1094
1095         if (!MD5_Init(&ctx))
1096                 return -EFAULT;
1097         MD5_Transform(&ctx, data_in);
1098         rte_memcpy(data_out, &ctx, MD5_DIGEST_LENGTH);
1099
1100         return 0;
1101 }
1102
1103 static int partial_hash_compute(enum icp_qat_hw_auth_algo hash_alg,
1104                         uint8_t *data_in,
1105                         uint8_t *data_out)
1106 {
1107         int digest_size;
1108         uint8_t digest[qat_hash_get_digest_size(
1109                         ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1110         uint32_t *hash_state_out_be32;
1111         uint64_t *hash_state_out_be64;
1112         int i;
1113
1114         digest_size = qat_hash_get_digest_size(hash_alg);
1115         if (digest_size <= 0)
1116                 return -EFAULT;
1117
1118         hash_state_out_be32 = (uint32_t *)data_out;
1119         hash_state_out_be64 = (uint64_t *)data_out;
1120
1121         switch (hash_alg) {
1122         case ICP_QAT_HW_AUTH_ALGO_SHA1:
1123                 if (partial_hash_sha1(data_in, digest))
1124                         return -EFAULT;
1125                 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1126                         *hash_state_out_be32 =
1127                                 rte_bswap32(*(((uint32_t *)digest)+i));
1128                 break;
1129         case ICP_QAT_HW_AUTH_ALGO_SHA224:
1130                 if (partial_hash_sha224(data_in, digest))
1131                         return -EFAULT;
1132                 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1133                         *hash_state_out_be32 =
1134                                 rte_bswap32(*(((uint32_t *)digest)+i));
1135                 break;
1136         case ICP_QAT_HW_AUTH_ALGO_SHA256:
1137                 if (partial_hash_sha256(data_in, digest))
1138                         return -EFAULT;
1139                 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1140                         *hash_state_out_be32 =
1141                                 rte_bswap32(*(((uint32_t *)digest)+i));
1142                 break;
1143         case ICP_QAT_HW_AUTH_ALGO_SHA384:
1144                 if (partial_hash_sha384(data_in, digest))
1145                         return -EFAULT;
1146                 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
1147                         *hash_state_out_be64 =
1148                                 rte_bswap64(*(((uint64_t *)digest)+i));
1149                 break;
1150         case ICP_QAT_HW_AUTH_ALGO_SHA512:
1151                 if (partial_hash_sha512(data_in, digest))
1152                         return -EFAULT;
1153                 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
1154                         *hash_state_out_be64 =
1155                                 rte_bswap64(*(((uint64_t *)digest)+i));
1156                 break;
1157         case ICP_QAT_HW_AUTH_ALGO_MD5:
1158                 if (partial_hash_md5(data_in, data_out))
1159                         return -EFAULT;
1160                 break;
1161         default:
1162                 QAT_LOG(ERR, "invalid hash alg %u", hash_alg);
1163                 return -EFAULT;
1164         }
1165
1166         return 0;
1167 }
1168 #define HMAC_IPAD_VALUE 0x36
1169 #define HMAC_OPAD_VALUE 0x5c
1170 #define HASH_XCBC_PRECOMP_KEY_NUM 3
1171
1172 static const uint8_t AES_CMAC_SEED[ICP_QAT_HW_AES_128_KEY_SZ];
1173
1174 static void aes_cmac_key_derive(uint8_t *base, uint8_t *derived)
1175 {
1176         int i;
1177
1178         derived[0] = base[0] << 1;
1179         for (i = 1; i < ICP_QAT_HW_AES_BLK_SZ ; i++) {
1180                 derived[i] = base[i] << 1;
1181                 derived[i - 1] |= base[i] >> 7;
1182         }
1183
1184         if (base[0] & 0x80)
1185                 derived[ICP_QAT_HW_AES_BLK_SZ - 1] ^= QAT_AES_CMAC_CONST_RB;
1186 }
1187
1188 static int qat_sym_do_precomputes(enum icp_qat_hw_auth_algo hash_alg,
1189                                 const uint8_t *auth_key,
1190                                 uint16_t auth_keylen,
1191                                 uint8_t *p_state_buf,
1192                                 uint16_t *p_state_len,
1193                                 uint8_t aes_cmac)
1194 {
1195         int block_size;
1196         uint8_t ipad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1197         uint8_t opad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1198         int i;
1199
1200         if (hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC) {
1201
1202                 /* CMAC */
1203                 if (aes_cmac) {
1204                         AES_KEY enc_key;
1205                         uint8_t *in = NULL;
1206                         uint8_t k0[ICP_QAT_HW_AES_128_KEY_SZ];
1207                         uint8_t *k1, *k2;
1208
1209                         auth_keylen = ICP_QAT_HW_AES_128_KEY_SZ;
1210
1211                         in = rte_zmalloc("AES CMAC K1",
1212                                          ICP_QAT_HW_AES_128_KEY_SZ, 16);
1213
1214                         if (in == NULL) {
1215                                 QAT_LOG(ERR, "Failed to alloc memory");
1216                                 return -ENOMEM;
1217                         }
1218
1219                         rte_memcpy(in, AES_CMAC_SEED,
1220                                    ICP_QAT_HW_AES_128_KEY_SZ);
1221                         rte_memcpy(p_state_buf, auth_key, auth_keylen);
1222
1223                         if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1224                                 &enc_key) != 0) {
1225                                 rte_free(in);
1226                                 return -EFAULT;
1227                         }
1228
1229                         AES_encrypt(in, k0, &enc_key);
1230
1231                         k1 = p_state_buf + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1232                         k2 = k1 + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1233
1234                         aes_cmac_key_derive(k0, k1);
1235                         aes_cmac_key_derive(k1, k2);
1236
1237                         memset(k0, 0, ICP_QAT_HW_AES_128_KEY_SZ);
1238                         *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1239                         rte_free(in);
1240                         return 0;
1241                 } else {
1242                         static uint8_t qat_aes_xcbc_key_seed[
1243                                         ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ] = {
1244                                 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1245                                 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1246                                 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1247                                 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1248                                 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1249                                 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1250                         };
1251
1252                         uint8_t *in = NULL;
1253                         uint8_t *out = p_state_buf;
1254                         int x;
1255                         AES_KEY enc_key;
1256
1257                         in = rte_zmalloc("working mem for key",
1258                                         ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ, 16);
1259                         if (in == NULL) {
1260                                 QAT_LOG(ERR, "Failed to alloc memory");
1261                                 return -ENOMEM;
1262                         }
1263
1264                         rte_memcpy(in, qat_aes_xcbc_key_seed,
1265                                         ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1266                         for (x = 0; x < HASH_XCBC_PRECOMP_KEY_NUM; x++) {
1267                                 if (AES_set_encrypt_key(auth_key,
1268                                                         auth_keylen << 3,
1269                                                         &enc_key) != 0) {
1270                                         rte_free(in -
1271                                           (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ));
1272                                         memset(out -
1273                                            (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ),
1274                                           0, ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1275                                         return -EFAULT;
1276                                 }
1277                                 AES_encrypt(in, out, &enc_key);
1278                                 in += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1279                                 out += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1280                         }
1281                         *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1282                         rte_free(in - x*ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ);
1283                         return 0;
1284                 }
1285
1286         } else if ((hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_128) ||
1287                 (hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_64)) {
1288                 uint8_t *in = NULL;
1289                 uint8_t *out = p_state_buf;
1290                 AES_KEY enc_key;
1291
1292                 memset(p_state_buf, 0, ICP_QAT_HW_GALOIS_H_SZ +
1293                                 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1294                                 ICP_QAT_HW_GALOIS_E_CTR0_SZ);
1295                 in = rte_zmalloc("working mem for key",
1296                                 ICP_QAT_HW_GALOIS_H_SZ, 16);
1297                 if (in == NULL) {
1298                         QAT_LOG(ERR, "Failed to alloc memory");
1299                         return -ENOMEM;
1300                 }
1301
1302                 memset(in, 0, ICP_QAT_HW_GALOIS_H_SZ);
1303                 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1304                         &enc_key) != 0) {
1305                         return -EFAULT;
1306                 }
1307                 AES_encrypt(in, out, &enc_key);
1308                 *p_state_len = ICP_QAT_HW_GALOIS_H_SZ +
1309                                 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1310                                 ICP_QAT_HW_GALOIS_E_CTR0_SZ;
1311                 rte_free(in);
1312                 return 0;
1313         }
1314
1315         block_size = qat_hash_get_block_size(hash_alg);
1316         if (block_size < 0)
1317                 return block_size;
1318         /* init ipad and opad from key and xor with fixed values */
1319         memset(ipad, 0, block_size);
1320         memset(opad, 0, block_size);
1321
1322         if (auth_keylen > (unsigned int)block_size) {
1323                 QAT_LOG(ERR, "invalid keylen %u", auth_keylen);
1324                 return -EFAULT;
1325         }
1326         rte_memcpy(ipad, auth_key, auth_keylen);
1327         rte_memcpy(opad, auth_key, auth_keylen);
1328
1329         for (i = 0; i < block_size; i++) {
1330                 uint8_t *ipad_ptr = ipad + i;
1331                 uint8_t *opad_ptr = opad + i;
1332                 *ipad_ptr ^= HMAC_IPAD_VALUE;
1333                 *opad_ptr ^= HMAC_OPAD_VALUE;
1334         }
1335
1336         /* do partial hash of ipad and copy to state1 */
1337         if (partial_hash_compute(hash_alg, ipad, p_state_buf)) {
1338                 memset(ipad, 0, block_size);
1339                 memset(opad, 0, block_size);
1340                 QAT_LOG(ERR, "ipad precompute failed");
1341                 return -EFAULT;
1342         }
1343
1344         /*
1345          * State len is a multiple of 8, so may be larger than the digest.
1346          * Put the partial hash of opad state_len bytes after state1
1347          */
1348         *p_state_len = qat_hash_get_state1_size(hash_alg);
1349         if (partial_hash_compute(hash_alg, opad, p_state_buf + *p_state_len)) {
1350                 memset(ipad, 0, block_size);
1351                 memset(opad, 0, block_size);
1352                 QAT_LOG(ERR, "opad precompute failed");
1353                 return -EFAULT;
1354         }
1355
1356         /*  don't leave data lying around */
1357         memset(ipad, 0, block_size);
1358         memset(opad, 0, block_size);
1359         return 0;
1360 }
1361
1362 static void
1363 qat_sym_session_init_common_hdr(struct icp_qat_fw_comn_req_hdr *header,
1364                 enum qat_sym_proto_flag proto_flags)
1365 {
1366         header->hdr_flags =
1367                 ICP_QAT_FW_COMN_HDR_FLAGS_BUILD(ICP_QAT_FW_COMN_REQ_FLAG_SET);
1368         header->service_type = ICP_QAT_FW_COMN_REQ_CPM_FW_LA;
1369         header->comn_req_flags =
1370                 ICP_QAT_FW_COMN_FLAGS_BUILD(QAT_COMN_CD_FLD_TYPE_64BIT_ADR,
1371                                         QAT_COMN_PTR_TYPE_FLAT);
1372         ICP_QAT_FW_LA_PARTIAL_SET(header->serv_specif_flags,
1373                                   ICP_QAT_FW_LA_PARTIAL_NONE);
1374         ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(header->serv_specif_flags,
1375                                            ICP_QAT_FW_CIPH_IV_16BYTE_DATA);
1376
1377         switch (proto_flags)            {
1378         case QAT_CRYPTO_PROTO_FLAG_NONE:
1379                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1380                                         ICP_QAT_FW_LA_NO_PROTO);
1381                 break;
1382         case QAT_CRYPTO_PROTO_FLAG_CCM:
1383                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1384                                         ICP_QAT_FW_LA_CCM_PROTO);
1385                 break;
1386         case QAT_CRYPTO_PROTO_FLAG_GCM:
1387                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1388                                         ICP_QAT_FW_LA_GCM_PROTO);
1389                 break;
1390         case QAT_CRYPTO_PROTO_FLAG_SNOW3G:
1391                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1392                                         ICP_QAT_FW_LA_SNOW_3G_PROTO);
1393                 break;
1394         case QAT_CRYPTO_PROTO_FLAG_ZUC:
1395                 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(header->serv_specif_flags,
1396                         ICP_QAT_FW_LA_ZUC_3G_PROTO);
1397                 break;
1398         }
1399
1400         ICP_QAT_FW_LA_UPDATE_STATE_SET(header->serv_specif_flags,
1401                                            ICP_QAT_FW_LA_NO_UPDATE_STATE);
1402         ICP_QAT_FW_LA_DIGEST_IN_BUFFER_SET(header->serv_specif_flags,
1403                                         ICP_QAT_FW_LA_NO_DIGEST_IN_BUFFER);
1404 }
1405
1406 /*
1407  *      Snow3G and ZUC should never use this function
1408  *      and set its protocol flag in both cipher and auth part of content
1409  *      descriptor building function
1410  */
1411 static enum qat_sym_proto_flag
1412 qat_get_crypto_proto_flag(uint16_t flags)
1413 {
1414         int proto = ICP_QAT_FW_LA_PROTO_GET(flags);
1415         enum qat_sym_proto_flag qat_proto_flag =
1416                         QAT_CRYPTO_PROTO_FLAG_NONE;
1417
1418         switch (proto) {
1419         case ICP_QAT_FW_LA_GCM_PROTO:
1420                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1421                 break;
1422         case ICP_QAT_FW_LA_CCM_PROTO:
1423                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1424                 break;
1425         }
1426
1427         return qat_proto_flag;
1428 }
1429
1430 int qat_sym_session_aead_create_cd_cipher(struct qat_sym_session *cdesc,
1431                                                 const uint8_t *cipherkey,
1432                                                 uint32_t cipherkeylen)
1433 {
1434         struct icp_qat_hw_cipher_algo_blk *cipher;
1435         struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1436         struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1437         struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1438         void *ptr = &req_tmpl->cd_ctrl;
1439         struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1440         struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1441         enum icp_qat_hw_cipher_convert key_convert;
1442         enum qat_sym_proto_flag qat_proto_flag =
1443                 QAT_CRYPTO_PROTO_FLAG_NONE;
1444         uint32_t total_key_size;
1445         uint16_t cipher_offset, cd_size;
1446         uint32_t wordIndex  = 0;
1447         uint32_t *temp_key = NULL;
1448
1449         if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER) {
1450                 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1451                 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1452                                         ICP_QAT_FW_SLICE_CIPHER);
1453                 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1454                                         ICP_QAT_FW_SLICE_DRAM_WR);
1455                 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1456                                         ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1457                 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1458                                         ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1459                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1460         } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1461                 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1462                 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1463                                         ICP_QAT_FW_SLICE_CIPHER);
1464                 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1465                                         ICP_QAT_FW_SLICE_AUTH);
1466                 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1467                                         ICP_QAT_FW_SLICE_AUTH);
1468                 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1469                                         ICP_QAT_FW_SLICE_DRAM_WR);
1470                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1471         } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1472                 QAT_LOG(ERR, "Invalid param, must be a cipher command.");
1473                 return -EFAULT;
1474         }
1475
1476         if (cdesc->qat_mode == ICP_QAT_HW_CIPHER_CTR_MODE) {
1477                 /*
1478                  * CTR Streaming ciphers are a special case. Decrypt = encrypt
1479                  * Overriding default values previously set
1480                  */
1481                 cdesc->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
1482                 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1483         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2
1484                 || cdesc->qat_cipher_alg ==
1485                         ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)
1486                 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1487         else if (cdesc->qat_dir == ICP_QAT_HW_CIPHER_ENCRYPT)
1488                 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1489         else
1490                 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1491
1492         if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
1493                 total_key_size = ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ +
1494                         ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1495                 cipher_cd_ctrl->cipher_state_sz =
1496                         ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1497                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1498
1499         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1500                 total_key_size = ICP_QAT_HW_KASUMI_F8_KEY_SZ;
1501                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_KASUMI_BLK_SZ >> 3;
1502                 cipher_cd_ctrl->cipher_padding_sz =
1503                                         (2 * ICP_QAT_HW_KASUMI_BLK_SZ) >> 3;
1504         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES) {
1505                 total_key_size = ICP_QAT_HW_3DES_KEY_SZ;
1506                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_3DES_BLK_SZ >> 3;
1507                 qat_proto_flag =
1508                         qat_get_crypto_proto_flag(header->serv_specif_flags);
1509         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_DES) {
1510                 total_key_size = ICP_QAT_HW_DES_KEY_SZ;
1511                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_DES_BLK_SZ >> 3;
1512                 qat_proto_flag =
1513                         qat_get_crypto_proto_flag(header->serv_specif_flags);
1514         } else if (cdesc->qat_cipher_alg ==
1515                 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
1516                 total_key_size = ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ +
1517                         ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1518                 cipher_cd_ctrl->cipher_state_sz =
1519                         ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1520                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1521                 cdesc->min_qat_dev_gen = QAT_GEN2;
1522         } else {
1523                 total_key_size = cipherkeylen;
1524                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_AES_BLK_SZ >> 3;
1525                 qat_proto_flag =
1526                         qat_get_crypto_proto_flag(header->serv_specif_flags);
1527         }
1528         cipher_cd_ctrl->cipher_key_sz = total_key_size >> 3;
1529         cipher_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1530         cipher_cd_ctrl->cipher_cfg_offset = cipher_offset >> 3;
1531
1532         header->service_cmd_id = cdesc->qat_cmd;
1533         qat_sym_session_init_common_hdr(header, qat_proto_flag);
1534
1535         cipher = (struct icp_qat_hw_cipher_algo_blk *)cdesc->cd_cur_ptr;
1536         cipher->cipher_config.val =
1537             ICP_QAT_HW_CIPHER_CONFIG_BUILD(cdesc->qat_mode,
1538                                         cdesc->qat_cipher_alg, key_convert,
1539                                         cdesc->qat_dir);
1540
1541         if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1542                 temp_key = (uint32_t *)(cdesc->cd_cur_ptr +
1543                                         sizeof(struct icp_qat_hw_cipher_config)
1544                                         + cipherkeylen);
1545                 memcpy(cipher->key, cipherkey, cipherkeylen);
1546                 memcpy(temp_key, cipherkey, cipherkeylen);
1547
1548                 /* XOR Key with KASUMI F8 key modifier at 4 bytes level */
1549                 for (wordIndex = 0; wordIndex < (cipherkeylen >> 2);
1550                                                                 wordIndex++)
1551                         temp_key[wordIndex] ^= KASUMI_F8_KEY_MODIFIER_4_BYTES;
1552
1553                 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1554                                         cipherkeylen + cipherkeylen;
1555         } else {
1556                 memcpy(cipher->key, cipherkey, cipherkeylen);
1557                 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1558                                         cipherkeylen;
1559         }
1560
1561         if (total_key_size > cipherkeylen) {
1562                 uint32_t padding_size =  total_key_size-cipherkeylen;
1563                 if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1564                         && (cipherkeylen == QAT_3DES_KEY_SZ_OPT2)) {
1565                         /* K3 not provided so use K1 = K3*/
1566                         memcpy(cdesc->cd_cur_ptr, cipherkey, padding_size);
1567                 } else if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1568                         && (cipherkeylen == QAT_3DES_KEY_SZ_OPT3)) {
1569                         /* K2 and K3 not provided so use K1 = K2 = K3*/
1570                         memcpy(cdesc->cd_cur_ptr, cipherkey,
1571                                 cipherkeylen);
1572                         memcpy(cdesc->cd_cur_ptr+cipherkeylen,
1573                                 cipherkey, cipherkeylen);
1574                 } else
1575                         memset(cdesc->cd_cur_ptr, 0, padding_size);
1576
1577                 cdesc->cd_cur_ptr += padding_size;
1578         }
1579         cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1580         cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1581
1582         return 0;
1583 }
1584
1585 int qat_sym_session_aead_create_cd_auth(struct qat_sym_session *cdesc,
1586                                                 const uint8_t *authkey,
1587                                                 uint32_t authkeylen,
1588                                                 uint32_t aad_length,
1589                                                 uint32_t digestsize,
1590                                                 unsigned int operation)
1591 {
1592         struct icp_qat_hw_auth_setup *hash;
1593         struct icp_qat_hw_cipher_algo_blk *cipherconfig;
1594         struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1595         struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1596         struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1597         void *ptr = &req_tmpl->cd_ctrl;
1598         struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1599         struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1600         struct icp_qat_fw_la_auth_req_params *auth_param =
1601                 (struct icp_qat_fw_la_auth_req_params *)
1602                 ((char *)&req_tmpl->serv_specif_rqpars +
1603                 ICP_QAT_FW_HASH_REQUEST_PARAMETERS_OFFSET);
1604         uint16_t state1_size = 0, state2_size = 0;
1605         uint16_t hash_offset, cd_size;
1606         uint32_t *aad_len = NULL;
1607         uint32_t wordIndex  = 0;
1608         uint32_t *pTempKey;
1609         enum qat_sym_proto_flag qat_proto_flag =
1610                 QAT_CRYPTO_PROTO_FLAG_NONE;
1611
1612         if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_AUTH) {
1613                 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1614                                         ICP_QAT_FW_SLICE_AUTH);
1615                 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1616                                         ICP_QAT_FW_SLICE_DRAM_WR);
1617                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1618         } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1619                 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1620                                 ICP_QAT_FW_SLICE_AUTH);
1621                 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1622                                 ICP_QAT_FW_SLICE_CIPHER);
1623                 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1624                                 ICP_QAT_FW_SLICE_CIPHER);
1625                 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1626                                 ICP_QAT_FW_SLICE_DRAM_WR);
1627                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1628         } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1629                 QAT_LOG(ERR, "Invalid param, must be a hash command.");
1630                 return -EFAULT;
1631         }
1632
1633         if (operation == RTE_CRYPTO_AUTH_OP_VERIFY) {
1634                 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1635                                 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1636                 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1637                                 ICP_QAT_FW_LA_CMP_AUTH_RES);
1638                 cdesc->auth_op = ICP_QAT_HW_AUTH_VERIFY;
1639         } else {
1640                 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1641                                            ICP_QAT_FW_LA_RET_AUTH_RES);
1642                 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1643                                            ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1644                 cdesc->auth_op = ICP_QAT_HW_AUTH_GENERATE;
1645         }
1646
1647         /*
1648          * Setup the inner hash config
1649          */
1650         hash_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1651         hash = (struct icp_qat_hw_auth_setup *)cdesc->cd_cur_ptr;
1652         hash->auth_config.reserved = 0;
1653         hash->auth_config.config =
1654                         ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE1,
1655                                 cdesc->qat_hash_alg, digestsize);
1656
1657         if (cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2
1658                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_KASUMI_F9
1659                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3
1660                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC
1661                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC
1662                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_NULL
1663                         )
1664                 hash->auth_counter.counter = 0;
1665         else {
1666                 int block_size = qat_hash_get_block_size(cdesc->qat_hash_alg);
1667
1668                 if (block_size < 0)
1669                         return block_size;
1670                 hash->auth_counter.counter = rte_bswap32(block_size);
1671         }
1672
1673         cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_auth_setup);
1674
1675         /*
1676          * cd_cur_ptr now points at the state1 information.
1677          */
1678         switch (cdesc->qat_hash_alg) {
1679         case ICP_QAT_HW_AUTH_ALGO_SHA1:
1680                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA1, authkey,
1681                         authkeylen, cdesc->cd_cur_ptr, &state1_size,
1682                         cdesc->aes_cmac)) {
1683                         QAT_LOG(ERR, "(SHA)precompute failed");
1684                         return -EFAULT;
1685                 }
1686                 state2_size = RTE_ALIGN_CEIL(ICP_QAT_HW_SHA1_STATE2_SZ, 8);
1687                 break;
1688         case ICP_QAT_HW_AUTH_ALGO_SHA224:
1689                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA224, authkey,
1690                         authkeylen, cdesc->cd_cur_ptr, &state1_size,
1691                         cdesc->aes_cmac)) {
1692                         QAT_LOG(ERR, "(SHA)precompute failed");
1693                         return -EFAULT;
1694                 }
1695                 state2_size = ICP_QAT_HW_SHA224_STATE2_SZ;
1696                 break;
1697         case ICP_QAT_HW_AUTH_ALGO_SHA256:
1698                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA256, authkey,
1699                         authkeylen, cdesc->cd_cur_ptr,  &state1_size,
1700                         cdesc->aes_cmac)) {
1701                         QAT_LOG(ERR, "(SHA)precompute failed");
1702                         return -EFAULT;
1703                 }
1704                 state2_size = ICP_QAT_HW_SHA256_STATE2_SZ;
1705                 break;
1706         case ICP_QAT_HW_AUTH_ALGO_SHA384:
1707                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA384, authkey,
1708                         authkeylen, cdesc->cd_cur_ptr, &state1_size,
1709                         cdesc->aes_cmac)) {
1710                         QAT_LOG(ERR, "(SHA)precompute failed");
1711                         return -EFAULT;
1712                 }
1713                 state2_size = ICP_QAT_HW_SHA384_STATE2_SZ;
1714                 break;
1715         case ICP_QAT_HW_AUTH_ALGO_SHA512:
1716                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA512, authkey,
1717                         authkeylen, cdesc->cd_cur_ptr,  &state1_size,
1718                         cdesc->aes_cmac)) {
1719                         QAT_LOG(ERR, "(SHA)precompute failed");
1720                         return -EFAULT;
1721                 }
1722                 state2_size = ICP_QAT_HW_SHA512_STATE2_SZ;
1723                 break;
1724         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1725                 state1_size = ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1726
1727                 if (cdesc->aes_cmac)
1728                         memset(cdesc->cd_cur_ptr, 0, state1_size);
1729                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC,
1730                         authkey, authkeylen, cdesc->cd_cur_ptr + state1_size,
1731                         &state2_size, cdesc->aes_cmac)) {
1732                         cdesc->aes_cmac ? QAT_LOG(ERR,
1733                                                   "(CMAC)precompute failed")
1734                                         : QAT_LOG(ERR,
1735                                                   "(XCBC)precompute failed");
1736                         return -EFAULT;
1737                 }
1738                 break;
1739         case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1740         case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
1741                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1742                 state1_size = ICP_QAT_HW_GALOIS_128_STATE1_SZ;
1743                 if (qat_sym_do_precomputes(cdesc->qat_hash_alg, authkey,
1744                         authkeylen, cdesc->cd_cur_ptr + state1_size,
1745                         &state2_size, cdesc->aes_cmac)) {
1746                         QAT_LOG(ERR, "(GCM)precompute failed");
1747                         return -EFAULT;
1748                 }
1749                 /*
1750                  * Write (the length of AAD) into bytes 16-19 of state2
1751                  * in big-endian format. This field is 8 bytes
1752                  */
1753                 auth_param->u2.aad_sz =
1754                                 RTE_ALIGN_CEIL(aad_length, 16);
1755                 auth_param->hash_state_sz = (auth_param->u2.aad_sz) >> 3;
1756
1757                 aad_len = (uint32_t *)(cdesc->cd_cur_ptr +
1758                                         ICP_QAT_HW_GALOIS_128_STATE1_SZ +
1759                                         ICP_QAT_HW_GALOIS_H_SZ);
1760                 *aad_len = rte_bswap32(aad_length);
1761                 cdesc->aad_len = aad_length;
1762                 break;
1763         case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
1764                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1765                 state1_size = qat_hash_get_state1_size(
1766                                 ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2);
1767                 state2_size = ICP_QAT_HW_SNOW_3G_UIA2_STATE2_SZ;
1768                 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1769
1770                 cipherconfig = (struct icp_qat_hw_cipher_algo_blk *)
1771                                 (cdesc->cd_cur_ptr + state1_size + state2_size);
1772                 cipherconfig->cipher_config.val =
1773                 ICP_QAT_HW_CIPHER_CONFIG_BUILD(ICP_QAT_HW_CIPHER_ECB_MODE,
1774                         ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2,
1775                         ICP_QAT_HW_CIPHER_KEY_CONVERT,
1776                         ICP_QAT_HW_CIPHER_ENCRYPT);
1777                 memcpy(cipherconfig->key, authkey, authkeylen);
1778                 memset(cipherconfig->key + authkeylen,
1779                                 0, ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ);
1780                 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1781                                 authkeylen + ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1782                 auth_param->hash_state_sz = ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1783                 break;
1784         case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
1785                 hash->auth_config.config =
1786                         ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE0,
1787                                 cdesc->qat_hash_alg, digestsize);
1788                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1789                 state1_size = qat_hash_get_state1_size(
1790                                 ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3);
1791                 state2_size = ICP_QAT_HW_ZUC_3G_EIA3_STATE2_SZ;
1792                 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size
1793                         + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ);
1794
1795                 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1796                 cdesc->cd_cur_ptr += state1_size + state2_size
1797                         + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1798                 auth_param->hash_state_sz = ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1799                 cdesc->min_qat_dev_gen = QAT_GEN2;
1800
1801                 break;
1802         case ICP_QAT_HW_AUTH_ALGO_MD5:
1803                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_MD5, authkey,
1804                         authkeylen, cdesc->cd_cur_ptr, &state1_size,
1805                         cdesc->aes_cmac)) {
1806                         QAT_LOG(ERR, "(MD5)precompute failed");
1807                         return -EFAULT;
1808                 }
1809                 state2_size = ICP_QAT_HW_MD5_STATE2_SZ;
1810                 break;
1811         case ICP_QAT_HW_AUTH_ALGO_NULL:
1812                 state1_size = qat_hash_get_state1_size(
1813                                 ICP_QAT_HW_AUTH_ALGO_NULL);
1814                 state2_size = ICP_QAT_HW_NULL_STATE2_SZ;
1815                 break;
1816         case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
1817                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1818                 state1_size = qat_hash_get_state1_size(
1819                                 ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC);
1820                 state2_size = ICP_QAT_HW_AES_CBC_MAC_KEY_SZ +
1821                                 ICP_QAT_HW_AES_CCM_CBC_E_CTR0_SZ;
1822
1823                 if (aad_length > 0) {
1824                         aad_length += ICP_QAT_HW_CCM_AAD_B0_LEN +
1825                         ICP_QAT_HW_CCM_AAD_LEN_INFO;
1826                         auth_param->u2.aad_sz =
1827                         RTE_ALIGN_CEIL(aad_length,
1828                         ICP_QAT_HW_CCM_AAD_ALIGNMENT);
1829                 } else {
1830                         auth_param->u2.aad_sz = ICP_QAT_HW_CCM_AAD_B0_LEN;
1831                 }
1832                 cdesc->aad_len = aad_length;
1833                 hash->auth_counter.counter = 0;
1834
1835                 hash_cd_ctrl->outer_prefix_sz = digestsize;
1836                 auth_param->hash_state_sz = digestsize;
1837
1838                 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1839                 break;
1840         case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
1841                 state1_size = qat_hash_get_state1_size(
1842                                 ICP_QAT_HW_AUTH_ALGO_KASUMI_F9);
1843                 state2_size = ICP_QAT_HW_KASUMI_F9_STATE2_SZ;
1844                 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1845                 pTempKey = (uint32_t *)(cdesc->cd_cur_ptr + state1_size
1846                                                         + authkeylen);
1847                 /*
1848                 * The Inner Hash Initial State2 block must contain IK
1849                 * (Initialisation Key), followed by IK XOR-ed with KM
1850                 * (Key Modifier): IK||(IK^KM).
1851                 */
1852                 /* write the auth key */
1853                 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1854                 /* initialise temp key with auth key */
1855                 memcpy(pTempKey, authkey, authkeylen);
1856                 /* XOR Key with KASUMI F9 key modifier at 4 bytes level */
1857                 for (wordIndex = 0; wordIndex < (authkeylen >> 2); wordIndex++)
1858                         pTempKey[wordIndex] ^= KASUMI_F9_KEY_MODIFIER_4_BYTES;
1859                 break;
1860         default:
1861                 QAT_LOG(ERR, "Invalid HASH alg %u", cdesc->qat_hash_alg);
1862                 return -EFAULT;
1863         }
1864
1865         /* Request template setup */
1866         qat_sym_session_init_common_hdr(header, qat_proto_flag);
1867         header->service_cmd_id = cdesc->qat_cmd;
1868
1869         /* Auth CD config setup */
1870         hash_cd_ctrl->hash_cfg_offset = hash_offset >> 3;
1871         hash_cd_ctrl->hash_flags = ICP_QAT_FW_AUTH_HDR_FLAG_NO_NESTED;
1872         hash_cd_ctrl->inner_res_sz = digestsize;
1873         hash_cd_ctrl->final_sz = digestsize;
1874         hash_cd_ctrl->inner_state1_sz = state1_size;
1875         auth_param->auth_res_sz = digestsize;
1876
1877         hash_cd_ctrl->inner_state2_sz  = state2_size;
1878         hash_cd_ctrl->inner_state2_offset = hash_cd_ctrl->hash_cfg_offset +
1879                         ((sizeof(struct icp_qat_hw_auth_setup) +
1880                          RTE_ALIGN_CEIL(hash_cd_ctrl->inner_state1_sz, 8))
1881                                         >> 3);
1882
1883         cdesc->cd_cur_ptr += state1_size + state2_size;
1884         cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1885
1886         cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1887         cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1888
1889         return 0;
1890 }
1891
1892 int qat_sym_validate_aes_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1893 {
1894         switch (key_len) {
1895         case ICP_QAT_HW_AES_128_KEY_SZ:
1896                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1897                 break;
1898         case ICP_QAT_HW_AES_192_KEY_SZ:
1899                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES192;
1900                 break;
1901         case ICP_QAT_HW_AES_256_KEY_SZ:
1902                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
1903                 break;
1904         default:
1905                 return -EINVAL;
1906         }
1907         return 0;
1908 }
1909
1910 int qat_sym_validate_aes_docsisbpi_key(int key_len,
1911                 enum icp_qat_hw_cipher_algo *alg)
1912 {
1913         switch (key_len) {
1914         case ICP_QAT_HW_AES_128_KEY_SZ:
1915                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1916                 break;
1917         case ICP_QAT_HW_AES_256_KEY_SZ:
1918                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
1919                 break;
1920         default:
1921                 return -EINVAL;
1922         }
1923         return 0;
1924 }
1925
1926 int qat_sym_validate_snow3g_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1927 {
1928         switch (key_len) {
1929         case ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ:
1930                 *alg = ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2;
1931                 break;
1932         default:
1933                 return -EINVAL;
1934         }
1935         return 0;
1936 }
1937
1938 int qat_sym_validate_kasumi_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1939 {
1940         switch (key_len) {
1941         case ICP_QAT_HW_KASUMI_KEY_SZ:
1942                 *alg = ICP_QAT_HW_CIPHER_ALGO_KASUMI;
1943                 break;
1944         default:
1945                 return -EINVAL;
1946         }
1947         return 0;
1948 }
1949
1950 int qat_sym_validate_des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1951 {
1952         switch (key_len) {
1953         case ICP_QAT_HW_DES_KEY_SZ:
1954                 *alg = ICP_QAT_HW_CIPHER_ALGO_DES;
1955                 break;
1956         default:
1957                 return -EINVAL;
1958         }
1959         return 0;
1960 }
1961
1962 int qat_sym_validate_3des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1963 {
1964         switch (key_len) {
1965         case QAT_3DES_KEY_SZ_OPT1:
1966         case QAT_3DES_KEY_SZ_OPT2:
1967         case QAT_3DES_KEY_SZ_OPT3:
1968                 *alg = ICP_QAT_HW_CIPHER_ALGO_3DES;
1969                 break;
1970         default:
1971                 return -EINVAL;
1972         }
1973         return 0;
1974 }
1975
1976 int qat_sym_validate_zuc_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1977 {
1978         switch (key_len) {
1979         case ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ:
1980                 *alg = ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3;
1981                 break;
1982         default:
1983                 return -EINVAL;
1984         }
1985         return 0;
1986 }