cryptodev: revert Chacha20-Poly1305 AEAD algorithm
[dpdk.git] / drivers / crypto / qat / qat_sym_session.c
1 /* SPDX-License-Identifier: (BSD-3-Clause OR GPL-2.0)
2  * Copyright(c) 2015-2019 Intel Corporation
3  */
4
5 #include <openssl/sha.h>        /* Needed to calculate pre-compute values */
6 #include <openssl/aes.h>        /* Needed to calculate pre-compute values */
7 #include <openssl/md5.h>        /* Needed to calculate pre-compute values */
8 #include <openssl/evp.h>        /* Needed for bpi runt block processing */
9
10 #include <rte_memcpy.h>
11 #include <rte_common.h>
12 #include <rte_spinlock.h>
13 #include <rte_byteorder.h>
14 #include <rte_log.h>
15 #include <rte_malloc.h>
16 #include <rte_crypto_sym.h>
17
18 #include "qat_logs.h"
19 #include "qat_sym_session.h"
20 #include "qat_sym_pmd.h"
21
22 /** Frees a context previously created
23  *  Depends on openssl libcrypto
24  */
25 static void
26 bpi_cipher_ctx_free(void *bpi_ctx)
27 {
28         if (bpi_ctx != NULL)
29                 EVP_CIPHER_CTX_free((EVP_CIPHER_CTX *)bpi_ctx);
30 }
31
32 /** Creates a context in either AES or DES in ECB mode
33  *  Depends on openssl libcrypto
34  */
35 static int
36 bpi_cipher_ctx_init(enum rte_crypto_cipher_algorithm cryptodev_algo,
37                 enum rte_crypto_cipher_operation direction __rte_unused,
38                 const uint8_t *key, void **ctx)
39 {
40         const EVP_CIPHER *algo = NULL;
41         int ret;
42         *ctx = EVP_CIPHER_CTX_new();
43
44         if (*ctx == NULL) {
45                 ret = -ENOMEM;
46                 goto ctx_init_err;
47         }
48
49         if (cryptodev_algo == RTE_CRYPTO_CIPHER_DES_DOCSISBPI)
50                 algo = EVP_des_ecb();
51         else
52                 algo = EVP_aes_128_ecb();
53
54         /* IV will be ECB encrypted whether direction is encrypt or decrypt*/
55         if (EVP_EncryptInit_ex(*ctx, algo, NULL, key, 0) != 1) {
56                 ret = -EINVAL;
57                 goto ctx_init_err;
58         }
59
60         return 0;
61
62 ctx_init_err:
63         if (*ctx != NULL)
64                 EVP_CIPHER_CTX_free(*ctx);
65         return ret;
66 }
67
68 static int
69 qat_is_cipher_alg_supported(enum rte_crypto_cipher_algorithm algo,
70                 struct qat_sym_dev_private *internals)
71 {
72         int i = 0;
73         const struct rte_cryptodev_capabilities *capability;
74
75         while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
76                         RTE_CRYPTO_OP_TYPE_UNDEFINED) {
77                 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
78                         continue;
79
80                 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_CIPHER)
81                         continue;
82
83                 if (capability->sym.cipher.algo == algo)
84                         return 1;
85         }
86         return 0;
87 }
88
89 static int
90 qat_is_auth_alg_supported(enum rte_crypto_auth_algorithm algo,
91                 struct qat_sym_dev_private *internals)
92 {
93         int i = 0;
94         const struct rte_cryptodev_capabilities *capability;
95
96         while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
97                         RTE_CRYPTO_OP_TYPE_UNDEFINED) {
98                 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
99                         continue;
100
101                 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_AUTH)
102                         continue;
103
104                 if (capability->sym.auth.algo == algo)
105                         return 1;
106         }
107         return 0;
108 }
109
110 void
111 qat_sym_session_clear(struct rte_cryptodev *dev,
112                 struct rte_cryptodev_sym_session *sess)
113 {
114         uint8_t index = dev->driver_id;
115         void *sess_priv = get_sym_session_private_data(sess, index);
116         struct qat_sym_session *s = (struct qat_sym_session *)sess_priv;
117
118         if (sess_priv) {
119                 if (s->bpi_ctx)
120                         bpi_cipher_ctx_free(s->bpi_ctx);
121                 memset(s, 0, qat_sym_session_get_private_size(dev));
122                 struct rte_mempool *sess_mp = rte_mempool_from_obj(sess_priv);
123
124                 set_sym_session_private_data(sess, index, NULL);
125                 rte_mempool_put(sess_mp, sess_priv);
126         }
127 }
128
129 static int
130 qat_get_cmd_id(const struct rte_crypto_sym_xform *xform)
131 {
132         /* Cipher Only */
133         if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER && xform->next == NULL)
134                 return ICP_QAT_FW_LA_CMD_CIPHER;
135
136         /* Authentication Only */
137         if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH && xform->next == NULL)
138                 return ICP_QAT_FW_LA_CMD_AUTH;
139
140         /* AEAD */
141         if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
142                 /* AES-GCM and AES-CCM works with different direction
143                  * GCM first encrypts and generate hash where AES-CCM
144                  * first generate hash and encrypts. Similar relation
145                  * applies to decryption.
146                  */
147                 if (xform->aead.op == RTE_CRYPTO_AEAD_OP_ENCRYPT)
148                         if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
149                                 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
150                         else
151                                 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
152                 else
153                         if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
154                                 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
155                         else
156                                 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
157         }
158
159         if (xform->next == NULL)
160                 return -1;
161
162         /* Cipher then Authenticate */
163         if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
164                         xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
165                 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
166
167         /* Authenticate then Cipher */
168         if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH &&
169                         xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
170                 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
171
172         return -1;
173 }
174
175 static struct rte_crypto_auth_xform *
176 qat_get_auth_xform(struct rte_crypto_sym_xform *xform)
177 {
178         do {
179                 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH)
180                         return &xform->auth;
181
182                 xform = xform->next;
183         } while (xform);
184
185         return NULL;
186 }
187
188 static struct rte_crypto_cipher_xform *
189 qat_get_cipher_xform(struct rte_crypto_sym_xform *xform)
190 {
191         do {
192                 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
193                         return &xform->cipher;
194
195                 xform = xform->next;
196         } while (xform);
197
198         return NULL;
199 }
200
201 int
202 qat_sym_session_configure_cipher(struct rte_cryptodev *dev,
203                 struct rte_crypto_sym_xform *xform,
204                 struct qat_sym_session *session)
205 {
206         struct qat_sym_dev_private *internals = dev->data->dev_private;
207         struct rte_crypto_cipher_xform *cipher_xform = NULL;
208         int ret;
209
210         /* Get cipher xform from crypto xform chain */
211         cipher_xform = qat_get_cipher_xform(xform);
212
213         session->cipher_iv.offset = cipher_xform->iv.offset;
214         session->cipher_iv.length = cipher_xform->iv.length;
215
216         switch (cipher_xform->algo) {
217         case RTE_CRYPTO_CIPHER_AES_CBC:
218                 if (qat_sym_validate_aes_key(cipher_xform->key.length,
219                                 &session->qat_cipher_alg) != 0) {
220                         QAT_LOG(ERR, "Invalid AES cipher key size");
221                         ret = -EINVAL;
222                         goto error_out;
223                 }
224                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
225                 break;
226         case RTE_CRYPTO_CIPHER_AES_CTR:
227                 if (qat_sym_validate_aes_key(cipher_xform->key.length,
228                                 &session->qat_cipher_alg) != 0) {
229                         QAT_LOG(ERR, "Invalid AES cipher key size");
230                         ret = -EINVAL;
231                         goto error_out;
232                 }
233                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
234                 break;
235         case RTE_CRYPTO_CIPHER_SNOW3G_UEA2:
236                 if (qat_sym_validate_snow3g_key(cipher_xform->key.length,
237                                         &session->qat_cipher_alg) != 0) {
238                         QAT_LOG(ERR, "Invalid SNOW 3G cipher key size");
239                         ret = -EINVAL;
240                         goto error_out;
241                 }
242                 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
243                 break;
244         case RTE_CRYPTO_CIPHER_NULL:
245                 session->qat_cipher_alg = ICP_QAT_HW_CIPHER_ALGO_NULL;
246                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
247                 break;
248         case RTE_CRYPTO_CIPHER_KASUMI_F8:
249                 if (qat_sym_validate_kasumi_key(cipher_xform->key.length,
250                                         &session->qat_cipher_alg) != 0) {
251                         QAT_LOG(ERR, "Invalid KASUMI cipher key size");
252                         ret = -EINVAL;
253                         goto error_out;
254                 }
255                 session->qat_mode = ICP_QAT_HW_CIPHER_F8_MODE;
256                 break;
257         case RTE_CRYPTO_CIPHER_3DES_CBC:
258                 if (qat_sym_validate_3des_key(cipher_xform->key.length,
259                                 &session->qat_cipher_alg) != 0) {
260                         QAT_LOG(ERR, "Invalid 3DES cipher key size");
261                         ret = -EINVAL;
262                         goto error_out;
263                 }
264                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
265                 break;
266         case RTE_CRYPTO_CIPHER_DES_CBC:
267                 if (qat_sym_validate_des_key(cipher_xform->key.length,
268                                 &session->qat_cipher_alg) != 0) {
269                         QAT_LOG(ERR, "Invalid DES cipher key size");
270                         ret = -EINVAL;
271                         goto error_out;
272                 }
273                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
274                 break;
275         case RTE_CRYPTO_CIPHER_3DES_CTR:
276                 if (qat_sym_validate_3des_key(cipher_xform->key.length,
277                                 &session->qat_cipher_alg) != 0) {
278                         QAT_LOG(ERR, "Invalid 3DES cipher key size");
279                         ret = -EINVAL;
280                         goto error_out;
281                 }
282                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
283                 break;
284         case RTE_CRYPTO_CIPHER_DES_DOCSISBPI:
285                 ret = bpi_cipher_ctx_init(
286                                         cipher_xform->algo,
287                                         cipher_xform->op,
288                                         cipher_xform->key.data,
289                                         &session->bpi_ctx);
290                 if (ret != 0) {
291                         QAT_LOG(ERR, "failed to create DES BPI ctx");
292                         goto error_out;
293                 }
294                 if (qat_sym_validate_des_key(cipher_xform->key.length,
295                                 &session->qat_cipher_alg) != 0) {
296                         QAT_LOG(ERR, "Invalid DES cipher key size");
297                         ret = -EINVAL;
298                         goto error_out;
299                 }
300                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
301                 break;
302         case RTE_CRYPTO_CIPHER_AES_DOCSISBPI:
303                 ret = bpi_cipher_ctx_init(
304                                         cipher_xform->algo,
305                                         cipher_xform->op,
306                                         cipher_xform->key.data,
307                                         &session->bpi_ctx);
308                 if (ret != 0) {
309                         QAT_LOG(ERR, "failed to create AES BPI ctx");
310                         goto error_out;
311                 }
312                 if (qat_sym_validate_aes_docsisbpi_key(cipher_xform->key.length,
313                                 &session->qat_cipher_alg) != 0) {
314                         QAT_LOG(ERR, "Invalid AES DOCSISBPI key size");
315                         ret = -EINVAL;
316                         goto error_out;
317                 }
318                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
319                 break;
320         case RTE_CRYPTO_CIPHER_ZUC_EEA3:
321                 if (!qat_is_cipher_alg_supported(
322                         cipher_xform->algo, internals)) {
323                         QAT_LOG(ERR, "%s not supported on this device",
324                                 rte_crypto_cipher_algorithm_strings
325                                         [cipher_xform->algo]);
326                         ret = -ENOTSUP;
327                         goto error_out;
328                 }
329                 if (qat_sym_validate_zuc_key(cipher_xform->key.length,
330                                 &session->qat_cipher_alg) != 0) {
331                         QAT_LOG(ERR, "Invalid ZUC cipher key size");
332                         ret = -EINVAL;
333                         goto error_out;
334                 }
335                 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
336                 break;
337         case RTE_CRYPTO_CIPHER_AES_XTS:
338                 if ((cipher_xform->key.length/2) == ICP_QAT_HW_AES_192_KEY_SZ) {
339                         QAT_LOG(ERR, "AES-XTS-192 not supported");
340                         ret = -EINVAL;
341                         goto error_out;
342                 }
343                 if (qat_sym_validate_aes_key((cipher_xform->key.length/2),
344                                 &session->qat_cipher_alg) != 0) {
345                         QAT_LOG(ERR, "Invalid AES-XTS cipher key size");
346                         ret = -EINVAL;
347                         goto error_out;
348                 }
349                 session->qat_mode = ICP_QAT_HW_CIPHER_XTS_MODE;
350                 break;
351         case RTE_CRYPTO_CIPHER_3DES_ECB:
352         case RTE_CRYPTO_CIPHER_AES_ECB:
353         case RTE_CRYPTO_CIPHER_AES_F8:
354         case RTE_CRYPTO_CIPHER_ARC4:
355                 QAT_LOG(ERR, "Crypto QAT PMD: Unsupported Cipher alg %u",
356                                 cipher_xform->algo);
357                 ret = -ENOTSUP;
358                 goto error_out;
359         default:
360                 QAT_LOG(ERR, "Crypto: Undefined Cipher specified %u\n",
361                                 cipher_xform->algo);
362                 ret = -EINVAL;
363                 goto error_out;
364         }
365
366         if (cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
367                 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
368         else
369                 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
370
371         if (qat_sym_session_aead_create_cd_cipher(session,
372                                                 cipher_xform->key.data,
373                                                 cipher_xform->key.length)) {
374                 ret = -EINVAL;
375                 goto error_out;
376         }
377
378         return 0;
379
380 error_out:
381         if (session->bpi_ctx) {
382                 bpi_cipher_ctx_free(session->bpi_ctx);
383                 session->bpi_ctx = NULL;
384         }
385         return ret;
386 }
387
388 int
389 qat_sym_session_configure(struct rte_cryptodev *dev,
390                 struct rte_crypto_sym_xform *xform,
391                 struct rte_cryptodev_sym_session *sess,
392                 struct rte_mempool *mempool)
393 {
394         void *sess_private_data;
395         int ret;
396
397         if (rte_mempool_get(mempool, &sess_private_data)) {
398                 CDEV_LOG_ERR(
399                         "Couldn't get object from session mempool");
400                 return -ENOMEM;
401         }
402
403         ret = qat_sym_session_set_parameters(dev, xform, sess_private_data);
404         if (ret != 0) {
405                 QAT_LOG(ERR,
406                     "Crypto QAT PMD: failed to configure session parameters");
407
408                 /* Return session to mempool */
409                 rte_mempool_put(mempool, sess_private_data);
410                 return ret;
411         }
412
413         set_sym_session_private_data(sess, dev->driver_id,
414                 sess_private_data);
415
416         return 0;
417 }
418
419 static void
420 qat_sym_session_set_ext_hash_flags(struct qat_sym_session *session,
421                 uint8_t hash_flag)
422 {
423         struct icp_qat_fw_comn_req_hdr *header = &session->fw_req.comn_hdr;
424         struct icp_qat_fw_cipher_auth_cd_ctrl_hdr *cd_ctrl =
425                         (struct icp_qat_fw_cipher_auth_cd_ctrl_hdr *)
426                         session->fw_req.cd_ctrl.content_desc_ctrl_lw;
427
428         /* Set the Use Extended Protocol Flags bit in LW 1 */
429         QAT_FIELD_SET(header->comn_req_flags,
430                         QAT_COMN_EXT_FLAGS_USED,
431                         QAT_COMN_EXT_FLAGS_BITPOS,
432                         QAT_COMN_EXT_FLAGS_MASK);
433
434         /* Set Hash Flags in LW 28 */
435         cd_ctrl->hash_flags |= hash_flag;
436
437         /* Set proto flags in LW 1 */
438         switch (session->qat_cipher_alg) {
439         case ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2:
440                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
441                                 ICP_QAT_FW_LA_SNOW_3G_PROTO);
442                 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
443                                 header->serv_specif_flags, 0);
444                 break;
445         case ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3:
446                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
447                                 ICP_QAT_FW_LA_NO_PROTO);
448                 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
449                                 header->serv_specif_flags,
450                                 ICP_QAT_FW_LA_ZUC_3G_PROTO);
451                 break;
452         default:
453                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
454                                 ICP_QAT_FW_LA_NO_PROTO);
455                 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
456                                 header->serv_specif_flags, 0);
457                 break;
458         }
459 }
460
461 static void
462 qat_sym_session_handle_mixed(struct qat_sym_session *session)
463 {
464         if (session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3 &&
465                         session->qat_cipher_alg !=
466                         ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
467                 session->min_qat_dev_gen = QAT_GEN3;
468                 qat_sym_session_set_ext_hash_flags(session,
469                         1 << ICP_QAT_FW_AUTH_HDR_FLAG_ZUC_EIA3_BITPOS);
470         } else if (session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2 &&
471                         session->qat_cipher_alg !=
472                         ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
473                 session->min_qat_dev_gen = QAT_GEN3;
474                 qat_sym_session_set_ext_hash_flags(session,
475                         1 << ICP_QAT_FW_AUTH_HDR_FLAG_SNOW3G_UIA2_BITPOS);
476         } else if ((session->aes_cmac ||
477                         session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_NULL) &&
478                         (session->qat_cipher_alg ==
479                         ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2 ||
480                         session->qat_cipher_alg ==
481                         ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)) {
482                 session->min_qat_dev_gen = QAT_GEN3;
483                 qat_sym_session_set_ext_hash_flags(session, 0);
484         }
485 }
486
487 int
488 qat_sym_session_set_parameters(struct rte_cryptodev *dev,
489                 struct rte_crypto_sym_xform *xform, void *session_private)
490 {
491         struct qat_sym_session *session = session_private;
492         int ret;
493         int qat_cmd_id;
494
495         /* Set context descriptor physical address */
496         session->cd_paddr = rte_mempool_virt2iova(session) +
497                         offsetof(struct qat_sym_session, cd);
498
499         session->min_qat_dev_gen = QAT_GEN1;
500
501         /* Get requested QAT command id */
502         qat_cmd_id = qat_get_cmd_id(xform);
503         if (qat_cmd_id < 0 || qat_cmd_id >= ICP_QAT_FW_LA_CMD_DELIMITER) {
504                 QAT_LOG(ERR, "Unsupported xform chain requested");
505                 return -ENOTSUP;
506         }
507         session->qat_cmd = (enum icp_qat_fw_la_cmd_id)qat_cmd_id;
508         switch (session->qat_cmd) {
509         case ICP_QAT_FW_LA_CMD_CIPHER:
510                 ret = qat_sym_session_configure_cipher(dev, xform, session);
511                 if (ret < 0)
512                         return ret;
513                 break;
514         case ICP_QAT_FW_LA_CMD_AUTH:
515                 ret = qat_sym_session_configure_auth(dev, xform, session);
516                 if (ret < 0)
517                         return ret;
518                 break;
519         case ICP_QAT_FW_LA_CMD_CIPHER_HASH:
520                 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
521                         ret = qat_sym_session_configure_aead(dev, xform,
522                                         session);
523                         if (ret < 0)
524                                 return ret;
525                 } else {
526                         ret = qat_sym_session_configure_cipher(dev,
527                                         xform, session);
528                         if (ret < 0)
529                                 return ret;
530                         ret = qat_sym_session_configure_auth(dev,
531                                         xform, session);
532                         if (ret < 0)
533                                 return ret;
534                         /* Special handling of mixed hash+cipher algorithms */
535                         qat_sym_session_handle_mixed(session);
536                 }
537                 break;
538         case ICP_QAT_FW_LA_CMD_HASH_CIPHER:
539                 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
540                         ret = qat_sym_session_configure_aead(dev, xform,
541                                         session);
542                         if (ret < 0)
543                                 return ret;
544                 } else {
545                         ret = qat_sym_session_configure_auth(dev,
546                                         xform, session);
547                         if (ret < 0)
548                                 return ret;
549                         ret = qat_sym_session_configure_cipher(dev,
550                                         xform, session);
551                         if (ret < 0)
552                                 return ret;
553                         /* Special handling of mixed hash+cipher algorithms */
554                         qat_sym_session_handle_mixed(session);
555                 }
556                 break;
557         case ICP_QAT_FW_LA_CMD_TRNG_GET_RANDOM:
558         case ICP_QAT_FW_LA_CMD_TRNG_TEST:
559         case ICP_QAT_FW_LA_CMD_SSL3_KEY_DERIVE:
560         case ICP_QAT_FW_LA_CMD_TLS_V1_1_KEY_DERIVE:
561         case ICP_QAT_FW_LA_CMD_TLS_V1_2_KEY_DERIVE:
562         case ICP_QAT_FW_LA_CMD_MGF1:
563         case ICP_QAT_FW_LA_CMD_AUTH_PRE_COMP:
564         case ICP_QAT_FW_LA_CMD_CIPHER_PRE_COMP:
565         case ICP_QAT_FW_LA_CMD_DELIMITER:
566         QAT_LOG(ERR, "Unsupported Service %u",
567                 session->qat_cmd);
568                 return -ENOTSUP;
569         default:
570         QAT_LOG(ERR, "Unsupported Service %u",
571                 session->qat_cmd);
572                 return -ENOTSUP;
573         }
574
575         return 0;
576 }
577
578 static int
579 qat_sym_session_handle_single_pass(struct qat_sym_dev_private *internals,
580                 struct qat_sym_session *session,
581                 struct rte_crypto_aead_xform *aead_xform)
582 {
583         enum qat_device_gen qat_dev_gen = internals->qat_dev->qat_dev_gen;
584
585         if (qat_dev_gen == QAT_GEN3 &&
586                         aead_xform->iv.length == QAT_AES_GCM_SPC_IV_SIZE) {
587                 /* Use faster Single-Pass GCM */
588                 struct icp_qat_fw_la_cipher_req_params *cipher_param =
589                                 (void *) &session->fw_req.serv_specif_rqpars;
590
591                 session->is_single_pass = 1;
592                 session->min_qat_dev_gen = QAT_GEN3;
593                 session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER;
594                 session->qat_mode = ICP_QAT_HW_CIPHER_AEAD_MODE;
595                 session->cipher_iv.offset = aead_xform->iv.offset;
596                 session->cipher_iv.length = aead_xform->iv.length;
597                 if (qat_sym_session_aead_create_cd_cipher(session,
598                                 aead_xform->key.data, aead_xform->key.length))
599                         return -EINVAL;
600                 session->aad_len = aead_xform->aad_length;
601                 session->digest_length = aead_xform->digest_length;
602                 if (aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) {
603                         session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
604                         session->auth_op = ICP_QAT_HW_AUTH_GENERATE;
605                         ICP_QAT_FW_LA_RET_AUTH_SET(
606                                 session->fw_req.comn_hdr.serv_specif_flags,
607                                 ICP_QAT_FW_LA_RET_AUTH_RES);
608                 } else {
609                         session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
610                         session->auth_op = ICP_QAT_HW_AUTH_VERIFY;
611                         ICP_QAT_FW_LA_CMP_AUTH_SET(
612                                 session->fw_req.comn_hdr.serv_specif_flags,
613                                 ICP_QAT_FW_LA_CMP_AUTH_RES);
614                 }
615                 ICP_QAT_FW_LA_SINGLE_PASS_PROTO_FLAG_SET(
616                                 session->fw_req.comn_hdr.serv_specif_flags,
617                                 ICP_QAT_FW_LA_SINGLE_PASS_PROTO);
618                 ICP_QAT_FW_LA_PROTO_SET(
619                                 session->fw_req.comn_hdr.serv_specif_flags,
620                                 ICP_QAT_FW_LA_NO_PROTO);
621                 ICP_QAT_FW_LA_GCM_IV_LEN_FLAG_SET(
622                                 session->fw_req.comn_hdr.serv_specif_flags,
623                                 ICP_QAT_FW_LA_GCM_IV_LEN_12_OCTETS);
624                 session->fw_req.comn_hdr.service_cmd_id =
625                                 ICP_QAT_FW_LA_CMD_CIPHER;
626                 session->cd.cipher.cipher_config.val =
627                                 ICP_QAT_HW_CIPHER_CONFIG_BUILD(
628                                         ICP_QAT_HW_CIPHER_AEAD_MODE,
629                                         session->qat_cipher_alg,
630                                         ICP_QAT_HW_CIPHER_NO_CONVERT,
631                                         session->qat_dir);
632                 QAT_FIELD_SET(session->cd.cipher.cipher_config.val,
633                                 aead_xform->digest_length,
634                                 QAT_CIPHER_AEAD_HASH_CMP_LEN_BITPOS,
635                                 QAT_CIPHER_AEAD_HASH_CMP_LEN_MASK);
636                 session->cd.cipher.cipher_config.reserved =
637                                 ICP_QAT_HW_CIPHER_CONFIG_BUILD_UPPER(
638                                         aead_xform->aad_length);
639                 cipher_param->spc_aad_sz = aead_xform->aad_length;
640                 cipher_param->spc_auth_res_sz = aead_xform->digest_length;
641         }
642         return 0;
643 }
644
645 int
646 qat_sym_session_configure_auth(struct rte_cryptodev *dev,
647                                 struct rte_crypto_sym_xform *xform,
648                                 struct qat_sym_session *session)
649 {
650         struct rte_crypto_auth_xform *auth_xform = qat_get_auth_xform(xform);
651         struct qat_sym_dev_private *internals = dev->data->dev_private;
652         const uint8_t *key_data = auth_xform->key.data;
653         uint8_t key_length = auth_xform->key.length;
654         session->aes_cmac = 0;
655
656         switch (auth_xform->algo) {
657         case RTE_CRYPTO_AUTH_SHA1_HMAC:
658                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA1;
659                 break;
660         case RTE_CRYPTO_AUTH_SHA224_HMAC:
661                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA224;
662                 break;
663         case RTE_CRYPTO_AUTH_SHA256_HMAC:
664                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA256;
665                 break;
666         case RTE_CRYPTO_AUTH_SHA384_HMAC:
667                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA384;
668                 break;
669         case RTE_CRYPTO_AUTH_SHA512_HMAC:
670                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA512;
671                 break;
672         case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
673                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
674                 break;
675         case RTE_CRYPTO_AUTH_AES_CMAC:
676                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
677                 session->aes_cmac = 1;
678                 break;
679         case RTE_CRYPTO_AUTH_AES_GMAC:
680                 if (qat_sym_validate_aes_key(auth_xform->key.length,
681                                 &session->qat_cipher_alg) != 0) {
682                         QAT_LOG(ERR, "Invalid AES key size");
683                         return -EINVAL;
684                 }
685                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
686                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
687
688                 break;
689         case RTE_CRYPTO_AUTH_SNOW3G_UIA2:
690                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2;
691                 break;
692         case RTE_CRYPTO_AUTH_MD5_HMAC:
693                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_MD5;
694                 break;
695         case RTE_CRYPTO_AUTH_NULL:
696                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_NULL;
697                 break;
698         case RTE_CRYPTO_AUTH_KASUMI_F9:
699                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_KASUMI_F9;
700                 break;
701         case RTE_CRYPTO_AUTH_ZUC_EIA3:
702                 if (!qat_is_auth_alg_supported(auth_xform->algo, internals)) {
703                         QAT_LOG(ERR, "%s not supported on this device",
704                                 rte_crypto_auth_algorithm_strings
705                                 [auth_xform->algo]);
706                         return -ENOTSUP;
707                 }
708                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3;
709                 break;
710         case RTE_CRYPTO_AUTH_SHA1:
711         case RTE_CRYPTO_AUTH_SHA256:
712         case RTE_CRYPTO_AUTH_SHA512:
713         case RTE_CRYPTO_AUTH_SHA224:
714         case RTE_CRYPTO_AUTH_SHA384:
715         case RTE_CRYPTO_AUTH_MD5:
716         case RTE_CRYPTO_AUTH_AES_CBC_MAC:
717                 QAT_LOG(ERR, "Crypto: Unsupported hash alg %u",
718                                 auth_xform->algo);
719                 return -ENOTSUP;
720         default:
721                 QAT_LOG(ERR, "Crypto: Undefined Hash algo %u specified",
722                                 auth_xform->algo);
723                 return -EINVAL;
724         }
725
726         session->auth_iv.offset = auth_xform->iv.offset;
727         session->auth_iv.length = auth_xform->iv.length;
728
729         if (auth_xform->algo == RTE_CRYPTO_AUTH_AES_GMAC) {
730                 if (auth_xform->op == RTE_CRYPTO_AUTH_OP_GENERATE) {
731                         session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER_HASH;
732                         session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
733                         /*
734                          * It needs to create cipher desc content first,
735                          * then authentication
736                          */
737
738                         if (qat_sym_session_aead_create_cd_cipher(session,
739                                                 auth_xform->key.data,
740                                                 auth_xform->key.length))
741                                 return -EINVAL;
742
743                         if (qat_sym_session_aead_create_cd_auth(session,
744                                                 key_data,
745                                                 key_length,
746                                                 0,
747                                                 auth_xform->digest_length,
748                                                 auth_xform->op))
749                                 return -EINVAL;
750                 } else {
751                         session->qat_cmd = ICP_QAT_FW_LA_CMD_HASH_CIPHER;
752                         session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
753                         /*
754                          * It needs to create authentication desc content first,
755                          * then cipher
756                          */
757
758                         if (qat_sym_session_aead_create_cd_auth(session,
759                                         key_data,
760                                         key_length,
761                                         0,
762                                         auth_xform->digest_length,
763                                         auth_xform->op))
764                                 return -EINVAL;
765
766                         if (qat_sym_session_aead_create_cd_cipher(session,
767                                                 auth_xform->key.data,
768                                                 auth_xform->key.length))
769                                 return -EINVAL;
770                 }
771                 /* Restore to authentication only only */
772                 session->qat_cmd = ICP_QAT_FW_LA_CMD_AUTH;
773         } else {
774                 if (qat_sym_session_aead_create_cd_auth(session,
775                                 key_data,
776                                 key_length,
777                                 0,
778                                 auth_xform->digest_length,
779                                 auth_xform->op))
780                         return -EINVAL;
781         }
782
783         session->digest_length = auth_xform->digest_length;
784         return 0;
785 }
786
787 int
788 qat_sym_session_configure_aead(struct rte_cryptodev *dev,
789                                 struct rte_crypto_sym_xform *xform,
790                                 struct qat_sym_session *session)
791 {
792         struct rte_crypto_aead_xform *aead_xform = &xform->aead;
793         enum rte_crypto_auth_operation crypto_operation;
794
795         /*
796          * Store AEAD IV parameters as cipher IV,
797          * to avoid unnecessary memory usage
798          */
799         session->cipher_iv.offset = xform->aead.iv.offset;
800         session->cipher_iv.length = xform->aead.iv.length;
801
802         switch (aead_xform->algo) {
803         case RTE_CRYPTO_AEAD_AES_GCM:
804                 if (qat_sym_validate_aes_key(aead_xform->key.length,
805                                 &session->qat_cipher_alg) != 0) {
806                         QAT_LOG(ERR, "Invalid AES key size");
807                         return -EINVAL;
808                 }
809                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
810                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
811                 break;
812         case RTE_CRYPTO_AEAD_AES_CCM:
813                 if (qat_sym_validate_aes_key(aead_xform->key.length,
814                                 &session->qat_cipher_alg) != 0) {
815                         QAT_LOG(ERR, "Invalid AES key size");
816                         return -EINVAL;
817                 }
818                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
819                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC;
820                 break;
821         default:
822                 QAT_LOG(ERR, "Crypto: Undefined AEAD specified %u\n",
823                                 aead_xform->algo);
824                 return -EINVAL;
825         }
826
827         session->is_single_pass = 0;
828         if (aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) {
829                 /* Use faster Single-Pass GCM if possible */
830                 int res = qat_sym_session_handle_single_pass(
831                                 dev->data->dev_private, session, aead_xform);
832                 if (res < 0)
833                         return res;
834                 if (session->is_single_pass)
835                         return 0;
836         }
837
838         if ((aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT &&
839                         aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) ||
840                         (aead_xform->op == RTE_CRYPTO_AEAD_OP_DECRYPT &&
841                         aead_xform->algo == RTE_CRYPTO_AEAD_AES_CCM)) {
842                 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
843                 /*
844                  * It needs to create cipher desc content first,
845                  * then authentication
846                  */
847                 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
848                         RTE_CRYPTO_AUTH_OP_GENERATE : RTE_CRYPTO_AUTH_OP_VERIFY;
849
850                 if (qat_sym_session_aead_create_cd_cipher(session,
851                                         aead_xform->key.data,
852                                         aead_xform->key.length))
853                         return -EINVAL;
854
855                 if (qat_sym_session_aead_create_cd_auth(session,
856                                         aead_xform->key.data,
857                                         aead_xform->key.length,
858                                         aead_xform->aad_length,
859                                         aead_xform->digest_length,
860                                         crypto_operation))
861                         return -EINVAL;
862         } else {
863                 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
864                 /*
865                  * It needs to create authentication desc content first,
866                  * then cipher
867                  */
868
869                 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
870                         RTE_CRYPTO_AUTH_OP_VERIFY : RTE_CRYPTO_AUTH_OP_GENERATE;
871
872                 if (qat_sym_session_aead_create_cd_auth(session,
873                                         aead_xform->key.data,
874                                         aead_xform->key.length,
875                                         aead_xform->aad_length,
876                                         aead_xform->digest_length,
877                                         crypto_operation))
878                         return -EINVAL;
879
880                 if (qat_sym_session_aead_create_cd_cipher(session,
881                                         aead_xform->key.data,
882                                         aead_xform->key.length))
883                         return -EINVAL;
884         }
885
886         session->digest_length = aead_xform->digest_length;
887         return 0;
888 }
889
890 unsigned int qat_sym_session_get_private_size(
891                 struct rte_cryptodev *dev __rte_unused)
892 {
893         return RTE_ALIGN_CEIL(sizeof(struct qat_sym_session), 8);
894 }
895
896 /* returns block size in bytes per cipher algo */
897 int qat_cipher_get_block_size(enum icp_qat_hw_cipher_algo qat_cipher_alg)
898 {
899         switch (qat_cipher_alg) {
900         case ICP_QAT_HW_CIPHER_ALGO_DES:
901                 return ICP_QAT_HW_DES_BLK_SZ;
902         case ICP_QAT_HW_CIPHER_ALGO_3DES:
903                 return ICP_QAT_HW_3DES_BLK_SZ;
904         case ICP_QAT_HW_CIPHER_ALGO_AES128:
905         case ICP_QAT_HW_CIPHER_ALGO_AES192:
906         case ICP_QAT_HW_CIPHER_ALGO_AES256:
907                 return ICP_QAT_HW_AES_BLK_SZ;
908         default:
909                 QAT_LOG(ERR, "invalid block cipher alg %u", qat_cipher_alg);
910                 return -EFAULT;
911         };
912         return -EFAULT;
913 }
914
915 /*
916  * Returns size in bytes per hash algo for state1 size field in cd_ctrl
917  * This is digest size rounded up to nearest quadword
918  */
919 static int qat_hash_get_state1_size(enum icp_qat_hw_auth_algo qat_hash_alg)
920 {
921         switch (qat_hash_alg) {
922         case ICP_QAT_HW_AUTH_ALGO_SHA1:
923                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA1_STATE1_SZ,
924                                                 QAT_HW_DEFAULT_ALIGNMENT);
925         case ICP_QAT_HW_AUTH_ALGO_SHA224:
926                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA224_STATE1_SZ,
927                                                 QAT_HW_DEFAULT_ALIGNMENT);
928         case ICP_QAT_HW_AUTH_ALGO_SHA256:
929                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA256_STATE1_SZ,
930                                                 QAT_HW_DEFAULT_ALIGNMENT);
931         case ICP_QAT_HW_AUTH_ALGO_SHA384:
932                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA384_STATE1_SZ,
933                                                 QAT_HW_DEFAULT_ALIGNMENT);
934         case ICP_QAT_HW_AUTH_ALGO_SHA512:
935                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
936                                                 QAT_HW_DEFAULT_ALIGNMENT);
937         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
938                 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ,
939                                                 QAT_HW_DEFAULT_ALIGNMENT);
940         case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
941         case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
942                 return QAT_HW_ROUND_UP(ICP_QAT_HW_GALOIS_128_STATE1_SZ,
943                                                 QAT_HW_DEFAULT_ALIGNMENT);
944         case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
945                 return QAT_HW_ROUND_UP(ICP_QAT_HW_ZUC_3G_EIA3_STATE1_SZ,
946                                                 QAT_HW_DEFAULT_ALIGNMENT);
947         case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
948                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SNOW_3G_UIA2_STATE1_SZ,
949                                                 QAT_HW_DEFAULT_ALIGNMENT);
950         case ICP_QAT_HW_AUTH_ALGO_MD5:
951                 return QAT_HW_ROUND_UP(ICP_QAT_HW_MD5_STATE1_SZ,
952                                                 QAT_HW_DEFAULT_ALIGNMENT);
953         case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
954                 return QAT_HW_ROUND_UP(ICP_QAT_HW_KASUMI_F9_STATE1_SZ,
955                                                 QAT_HW_DEFAULT_ALIGNMENT);
956         case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
957                 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_CBC_MAC_STATE1_SZ,
958                                                 QAT_HW_DEFAULT_ALIGNMENT);
959         case ICP_QAT_HW_AUTH_ALGO_NULL:
960                 return QAT_HW_ROUND_UP(ICP_QAT_HW_NULL_STATE1_SZ,
961                                                 QAT_HW_DEFAULT_ALIGNMENT);
962         case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
963                 /* return maximum state1 size in this case */
964                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
965                                                 QAT_HW_DEFAULT_ALIGNMENT);
966         default:
967                 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
968                 return -EFAULT;
969         };
970         return -EFAULT;
971 }
972
973 /* returns digest size in bytes  per hash algo */
974 static int qat_hash_get_digest_size(enum icp_qat_hw_auth_algo qat_hash_alg)
975 {
976         switch (qat_hash_alg) {
977         case ICP_QAT_HW_AUTH_ALGO_SHA1:
978                 return ICP_QAT_HW_SHA1_STATE1_SZ;
979         case ICP_QAT_HW_AUTH_ALGO_SHA224:
980                 return ICP_QAT_HW_SHA224_STATE1_SZ;
981         case ICP_QAT_HW_AUTH_ALGO_SHA256:
982                 return ICP_QAT_HW_SHA256_STATE1_SZ;
983         case ICP_QAT_HW_AUTH_ALGO_SHA384:
984                 return ICP_QAT_HW_SHA384_STATE1_SZ;
985         case ICP_QAT_HW_AUTH_ALGO_SHA512:
986                 return ICP_QAT_HW_SHA512_STATE1_SZ;
987         case ICP_QAT_HW_AUTH_ALGO_MD5:
988                 return ICP_QAT_HW_MD5_STATE1_SZ;
989         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
990                 return ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
991         case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
992                 /* return maximum digest size in this case */
993                 return ICP_QAT_HW_SHA512_STATE1_SZ;
994         default:
995                 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
996                 return -EFAULT;
997         };
998         return -EFAULT;
999 }
1000
1001 /* returns block size in byes per hash algo */
1002 static int qat_hash_get_block_size(enum icp_qat_hw_auth_algo qat_hash_alg)
1003 {
1004         switch (qat_hash_alg) {
1005         case ICP_QAT_HW_AUTH_ALGO_SHA1:
1006                 return SHA_CBLOCK;
1007         case ICP_QAT_HW_AUTH_ALGO_SHA224:
1008                 return SHA256_CBLOCK;
1009         case ICP_QAT_HW_AUTH_ALGO_SHA256:
1010                 return SHA256_CBLOCK;
1011         case ICP_QAT_HW_AUTH_ALGO_SHA384:
1012                 return SHA512_CBLOCK;
1013         case ICP_QAT_HW_AUTH_ALGO_SHA512:
1014                 return SHA512_CBLOCK;
1015         case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1016                 return 16;
1017         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1018                 return ICP_QAT_HW_AES_BLK_SZ;
1019         case ICP_QAT_HW_AUTH_ALGO_MD5:
1020                 return MD5_CBLOCK;
1021         case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
1022                 /* return maximum block size in this case */
1023                 return SHA512_CBLOCK;
1024         default:
1025                 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
1026                 return -EFAULT;
1027         };
1028         return -EFAULT;
1029 }
1030
1031 static int partial_hash_sha1(uint8_t *data_in, uint8_t *data_out)
1032 {
1033         SHA_CTX ctx;
1034
1035         if (!SHA1_Init(&ctx))
1036                 return -EFAULT;
1037         SHA1_Transform(&ctx, data_in);
1038         rte_memcpy(data_out, &ctx, SHA_DIGEST_LENGTH);
1039         return 0;
1040 }
1041
1042 static int partial_hash_sha224(uint8_t *data_in, uint8_t *data_out)
1043 {
1044         SHA256_CTX ctx;
1045
1046         if (!SHA224_Init(&ctx))
1047                 return -EFAULT;
1048         SHA256_Transform(&ctx, data_in);
1049         rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
1050         return 0;
1051 }
1052
1053 static int partial_hash_sha256(uint8_t *data_in, uint8_t *data_out)
1054 {
1055         SHA256_CTX ctx;
1056
1057         if (!SHA256_Init(&ctx))
1058                 return -EFAULT;
1059         SHA256_Transform(&ctx, data_in);
1060         rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
1061         return 0;
1062 }
1063
1064 static int partial_hash_sha384(uint8_t *data_in, uint8_t *data_out)
1065 {
1066         SHA512_CTX ctx;
1067
1068         if (!SHA384_Init(&ctx))
1069                 return -EFAULT;
1070         SHA512_Transform(&ctx, data_in);
1071         rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
1072         return 0;
1073 }
1074
1075 static int partial_hash_sha512(uint8_t *data_in, uint8_t *data_out)
1076 {
1077         SHA512_CTX ctx;
1078
1079         if (!SHA512_Init(&ctx))
1080                 return -EFAULT;
1081         SHA512_Transform(&ctx, data_in);
1082         rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
1083         return 0;
1084 }
1085
1086 static int partial_hash_md5(uint8_t *data_in, uint8_t *data_out)
1087 {
1088         MD5_CTX ctx;
1089
1090         if (!MD5_Init(&ctx))
1091                 return -EFAULT;
1092         MD5_Transform(&ctx, data_in);
1093         rte_memcpy(data_out, &ctx, MD5_DIGEST_LENGTH);
1094
1095         return 0;
1096 }
1097
1098 static int partial_hash_compute(enum icp_qat_hw_auth_algo hash_alg,
1099                         uint8_t *data_in,
1100                         uint8_t *data_out)
1101 {
1102         int digest_size;
1103         uint8_t digest[qat_hash_get_digest_size(
1104                         ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1105         uint32_t *hash_state_out_be32;
1106         uint64_t *hash_state_out_be64;
1107         int i;
1108
1109         digest_size = qat_hash_get_digest_size(hash_alg);
1110         if (digest_size <= 0)
1111                 return -EFAULT;
1112
1113         hash_state_out_be32 = (uint32_t *)data_out;
1114         hash_state_out_be64 = (uint64_t *)data_out;
1115
1116         switch (hash_alg) {
1117         case ICP_QAT_HW_AUTH_ALGO_SHA1:
1118                 if (partial_hash_sha1(data_in, digest))
1119                         return -EFAULT;
1120                 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1121                         *hash_state_out_be32 =
1122                                 rte_bswap32(*(((uint32_t *)digest)+i));
1123                 break;
1124         case ICP_QAT_HW_AUTH_ALGO_SHA224:
1125                 if (partial_hash_sha224(data_in, digest))
1126                         return -EFAULT;
1127                 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1128                         *hash_state_out_be32 =
1129                                 rte_bswap32(*(((uint32_t *)digest)+i));
1130                 break;
1131         case ICP_QAT_HW_AUTH_ALGO_SHA256:
1132                 if (partial_hash_sha256(data_in, digest))
1133                         return -EFAULT;
1134                 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1135                         *hash_state_out_be32 =
1136                                 rte_bswap32(*(((uint32_t *)digest)+i));
1137                 break;
1138         case ICP_QAT_HW_AUTH_ALGO_SHA384:
1139                 if (partial_hash_sha384(data_in, digest))
1140                         return -EFAULT;
1141                 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
1142                         *hash_state_out_be64 =
1143                                 rte_bswap64(*(((uint64_t *)digest)+i));
1144                 break;
1145         case ICP_QAT_HW_AUTH_ALGO_SHA512:
1146                 if (partial_hash_sha512(data_in, digest))
1147                         return -EFAULT;
1148                 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
1149                         *hash_state_out_be64 =
1150                                 rte_bswap64(*(((uint64_t *)digest)+i));
1151                 break;
1152         case ICP_QAT_HW_AUTH_ALGO_MD5:
1153                 if (partial_hash_md5(data_in, data_out))
1154                         return -EFAULT;
1155                 break;
1156         default:
1157                 QAT_LOG(ERR, "invalid hash alg %u", hash_alg);
1158                 return -EFAULT;
1159         }
1160
1161         return 0;
1162 }
1163 #define HMAC_IPAD_VALUE 0x36
1164 #define HMAC_OPAD_VALUE 0x5c
1165 #define HASH_XCBC_PRECOMP_KEY_NUM 3
1166
1167 static const uint8_t AES_CMAC_SEED[ICP_QAT_HW_AES_128_KEY_SZ];
1168
1169 static void aes_cmac_key_derive(uint8_t *base, uint8_t *derived)
1170 {
1171         int i;
1172
1173         derived[0] = base[0] << 1;
1174         for (i = 1; i < ICP_QAT_HW_AES_BLK_SZ ; i++) {
1175                 derived[i] = base[i] << 1;
1176                 derived[i - 1] |= base[i] >> 7;
1177         }
1178
1179         if (base[0] & 0x80)
1180                 derived[ICP_QAT_HW_AES_BLK_SZ - 1] ^= QAT_AES_CMAC_CONST_RB;
1181 }
1182
1183 static int qat_sym_do_precomputes(enum icp_qat_hw_auth_algo hash_alg,
1184                                 const uint8_t *auth_key,
1185                                 uint16_t auth_keylen,
1186                                 uint8_t *p_state_buf,
1187                                 uint16_t *p_state_len,
1188                                 uint8_t aes_cmac)
1189 {
1190         int block_size;
1191         uint8_t ipad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1192         uint8_t opad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1193         int i;
1194
1195         if (hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC) {
1196
1197                 /* CMAC */
1198                 if (aes_cmac) {
1199                         AES_KEY enc_key;
1200                         uint8_t *in = NULL;
1201                         uint8_t k0[ICP_QAT_HW_AES_128_KEY_SZ];
1202                         uint8_t *k1, *k2;
1203
1204                         auth_keylen = ICP_QAT_HW_AES_128_KEY_SZ;
1205
1206                         in = rte_zmalloc("AES CMAC K1",
1207                                          ICP_QAT_HW_AES_128_KEY_SZ, 16);
1208
1209                         if (in == NULL) {
1210                                 QAT_LOG(ERR, "Failed to alloc memory");
1211                                 return -ENOMEM;
1212                         }
1213
1214                         rte_memcpy(in, AES_CMAC_SEED,
1215                                    ICP_QAT_HW_AES_128_KEY_SZ);
1216                         rte_memcpy(p_state_buf, auth_key, auth_keylen);
1217
1218                         if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1219                                 &enc_key) != 0) {
1220                                 rte_free(in);
1221                                 return -EFAULT;
1222                         }
1223
1224                         AES_encrypt(in, k0, &enc_key);
1225
1226                         k1 = p_state_buf + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1227                         k2 = k1 + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1228
1229                         aes_cmac_key_derive(k0, k1);
1230                         aes_cmac_key_derive(k1, k2);
1231
1232                         memset(k0, 0, ICP_QAT_HW_AES_128_KEY_SZ);
1233                         *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1234                         rte_free(in);
1235                         return 0;
1236                 } else {
1237                         static uint8_t qat_aes_xcbc_key_seed[
1238                                         ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ] = {
1239                                 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1240                                 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1241                                 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1242                                 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1243                                 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1244                                 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1245                         };
1246
1247                         uint8_t *in = NULL;
1248                         uint8_t *out = p_state_buf;
1249                         int x;
1250                         AES_KEY enc_key;
1251
1252                         in = rte_zmalloc("working mem for key",
1253                                         ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ, 16);
1254                         if (in == NULL) {
1255                                 QAT_LOG(ERR, "Failed to alloc memory");
1256                                 return -ENOMEM;
1257                         }
1258
1259                         rte_memcpy(in, qat_aes_xcbc_key_seed,
1260                                         ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1261                         for (x = 0; x < HASH_XCBC_PRECOMP_KEY_NUM; x++) {
1262                                 if (AES_set_encrypt_key(auth_key,
1263                                                         auth_keylen << 3,
1264                                                         &enc_key) != 0) {
1265                                         rte_free(in -
1266                                           (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ));
1267                                         memset(out -
1268                                            (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ),
1269                                           0, ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1270                                         return -EFAULT;
1271                                 }
1272                                 AES_encrypt(in, out, &enc_key);
1273                                 in += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1274                                 out += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1275                         }
1276                         *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1277                         rte_free(in - x*ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ);
1278                         return 0;
1279                 }
1280
1281         } else if ((hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_128) ||
1282                 (hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_64)) {
1283                 uint8_t *in = NULL;
1284                 uint8_t *out = p_state_buf;
1285                 AES_KEY enc_key;
1286
1287                 memset(p_state_buf, 0, ICP_QAT_HW_GALOIS_H_SZ +
1288                                 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1289                                 ICP_QAT_HW_GALOIS_E_CTR0_SZ);
1290                 in = rte_zmalloc("working mem for key",
1291                                 ICP_QAT_HW_GALOIS_H_SZ, 16);
1292                 if (in == NULL) {
1293                         QAT_LOG(ERR, "Failed to alloc memory");
1294                         return -ENOMEM;
1295                 }
1296
1297                 memset(in, 0, ICP_QAT_HW_GALOIS_H_SZ);
1298                 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1299                         &enc_key) != 0) {
1300                         return -EFAULT;
1301                 }
1302                 AES_encrypt(in, out, &enc_key);
1303                 *p_state_len = ICP_QAT_HW_GALOIS_H_SZ +
1304                                 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1305                                 ICP_QAT_HW_GALOIS_E_CTR0_SZ;
1306                 rte_free(in);
1307                 return 0;
1308         }
1309
1310         block_size = qat_hash_get_block_size(hash_alg);
1311         if (block_size < 0)
1312                 return block_size;
1313         /* init ipad and opad from key and xor with fixed values */
1314         memset(ipad, 0, block_size);
1315         memset(opad, 0, block_size);
1316
1317         if (auth_keylen > (unsigned int)block_size) {
1318                 QAT_LOG(ERR, "invalid keylen %u", auth_keylen);
1319                 return -EFAULT;
1320         }
1321         rte_memcpy(ipad, auth_key, auth_keylen);
1322         rte_memcpy(opad, auth_key, auth_keylen);
1323
1324         for (i = 0; i < block_size; i++) {
1325                 uint8_t *ipad_ptr = ipad + i;
1326                 uint8_t *opad_ptr = opad + i;
1327                 *ipad_ptr ^= HMAC_IPAD_VALUE;
1328                 *opad_ptr ^= HMAC_OPAD_VALUE;
1329         }
1330
1331         /* do partial hash of ipad and copy to state1 */
1332         if (partial_hash_compute(hash_alg, ipad, p_state_buf)) {
1333                 memset(ipad, 0, block_size);
1334                 memset(opad, 0, block_size);
1335                 QAT_LOG(ERR, "ipad precompute failed");
1336                 return -EFAULT;
1337         }
1338
1339         /*
1340          * State len is a multiple of 8, so may be larger than the digest.
1341          * Put the partial hash of opad state_len bytes after state1
1342          */
1343         *p_state_len = qat_hash_get_state1_size(hash_alg);
1344         if (partial_hash_compute(hash_alg, opad, p_state_buf + *p_state_len)) {
1345                 memset(ipad, 0, block_size);
1346                 memset(opad, 0, block_size);
1347                 QAT_LOG(ERR, "opad precompute failed");
1348                 return -EFAULT;
1349         }
1350
1351         /*  don't leave data lying around */
1352         memset(ipad, 0, block_size);
1353         memset(opad, 0, block_size);
1354         return 0;
1355 }
1356
1357 static void
1358 qat_sym_session_init_common_hdr(struct icp_qat_fw_comn_req_hdr *header,
1359                 enum qat_sym_proto_flag proto_flags)
1360 {
1361         header->hdr_flags =
1362                 ICP_QAT_FW_COMN_HDR_FLAGS_BUILD(ICP_QAT_FW_COMN_REQ_FLAG_SET);
1363         header->service_type = ICP_QAT_FW_COMN_REQ_CPM_FW_LA;
1364         header->comn_req_flags =
1365                 ICP_QAT_FW_COMN_FLAGS_BUILD(QAT_COMN_CD_FLD_TYPE_64BIT_ADR,
1366                                         QAT_COMN_PTR_TYPE_FLAT);
1367         ICP_QAT_FW_LA_PARTIAL_SET(header->serv_specif_flags,
1368                                   ICP_QAT_FW_LA_PARTIAL_NONE);
1369         ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(header->serv_specif_flags,
1370                                            ICP_QAT_FW_CIPH_IV_16BYTE_DATA);
1371
1372         switch (proto_flags)            {
1373         case QAT_CRYPTO_PROTO_FLAG_NONE:
1374                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1375                                         ICP_QAT_FW_LA_NO_PROTO);
1376                 break;
1377         case QAT_CRYPTO_PROTO_FLAG_CCM:
1378                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1379                                         ICP_QAT_FW_LA_CCM_PROTO);
1380                 break;
1381         case QAT_CRYPTO_PROTO_FLAG_GCM:
1382                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1383                                         ICP_QAT_FW_LA_GCM_PROTO);
1384                 break;
1385         case QAT_CRYPTO_PROTO_FLAG_SNOW3G:
1386                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1387                                         ICP_QAT_FW_LA_SNOW_3G_PROTO);
1388                 break;
1389         case QAT_CRYPTO_PROTO_FLAG_ZUC:
1390                 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(header->serv_specif_flags,
1391                         ICP_QAT_FW_LA_ZUC_3G_PROTO);
1392                 break;
1393         }
1394
1395         ICP_QAT_FW_LA_UPDATE_STATE_SET(header->serv_specif_flags,
1396                                            ICP_QAT_FW_LA_NO_UPDATE_STATE);
1397         ICP_QAT_FW_LA_DIGEST_IN_BUFFER_SET(header->serv_specif_flags,
1398                                         ICP_QAT_FW_LA_NO_DIGEST_IN_BUFFER);
1399 }
1400
1401 /*
1402  *      Snow3G and ZUC should never use this function
1403  *      and set its protocol flag in both cipher and auth part of content
1404  *      descriptor building function
1405  */
1406 static enum qat_sym_proto_flag
1407 qat_get_crypto_proto_flag(uint16_t flags)
1408 {
1409         int proto = ICP_QAT_FW_LA_PROTO_GET(flags);
1410         enum qat_sym_proto_flag qat_proto_flag =
1411                         QAT_CRYPTO_PROTO_FLAG_NONE;
1412
1413         switch (proto) {
1414         case ICP_QAT_FW_LA_GCM_PROTO:
1415                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1416                 break;
1417         case ICP_QAT_FW_LA_CCM_PROTO:
1418                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1419                 break;
1420         }
1421
1422         return qat_proto_flag;
1423 }
1424
1425 int qat_sym_session_aead_create_cd_cipher(struct qat_sym_session *cdesc,
1426                                                 const uint8_t *cipherkey,
1427                                                 uint32_t cipherkeylen)
1428 {
1429         struct icp_qat_hw_cipher_algo_blk *cipher;
1430         struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1431         struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1432         struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1433         void *ptr = &req_tmpl->cd_ctrl;
1434         struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1435         struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1436         enum icp_qat_hw_cipher_convert key_convert;
1437         enum qat_sym_proto_flag qat_proto_flag =
1438                 QAT_CRYPTO_PROTO_FLAG_NONE;
1439         uint32_t total_key_size;
1440         uint16_t cipher_offset, cd_size;
1441         uint32_t wordIndex  = 0;
1442         uint32_t *temp_key = NULL;
1443
1444         if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER) {
1445                 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1446                 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1447                                         ICP_QAT_FW_SLICE_CIPHER);
1448                 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1449                                         ICP_QAT_FW_SLICE_DRAM_WR);
1450                 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1451                                         ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1452                 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1453                                         ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1454                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1455         } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1456                 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1457                 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1458                                         ICP_QAT_FW_SLICE_CIPHER);
1459                 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1460                                         ICP_QAT_FW_SLICE_AUTH);
1461                 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1462                                         ICP_QAT_FW_SLICE_AUTH);
1463                 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1464                                         ICP_QAT_FW_SLICE_DRAM_WR);
1465                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1466         } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1467                 QAT_LOG(ERR, "Invalid param, must be a cipher command.");
1468                 return -EFAULT;
1469         }
1470
1471         if (cdesc->qat_mode == ICP_QAT_HW_CIPHER_CTR_MODE) {
1472                 /*
1473                  * CTR Streaming ciphers are a special case. Decrypt = encrypt
1474                  * Overriding default values previously set
1475                  */
1476                 cdesc->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
1477                 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1478         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2
1479                 || cdesc->qat_cipher_alg ==
1480                         ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)
1481                 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1482         else if (cdesc->qat_dir == ICP_QAT_HW_CIPHER_ENCRYPT)
1483                 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1484         else
1485                 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1486
1487         if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
1488                 total_key_size = ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ +
1489                         ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1490                 cipher_cd_ctrl->cipher_state_sz =
1491                         ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1492                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1493
1494         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1495                 total_key_size = ICP_QAT_HW_KASUMI_F8_KEY_SZ;
1496                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_KASUMI_BLK_SZ >> 3;
1497                 cipher_cd_ctrl->cipher_padding_sz =
1498                                         (2 * ICP_QAT_HW_KASUMI_BLK_SZ) >> 3;
1499         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES) {
1500                 total_key_size = ICP_QAT_HW_3DES_KEY_SZ;
1501                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_3DES_BLK_SZ >> 3;
1502                 qat_proto_flag =
1503                         qat_get_crypto_proto_flag(header->serv_specif_flags);
1504         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_DES) {
1505                 total_key_size = ICP_QAT_HW_DES_KEY_SZ;
1506                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_DES_BLK_SZ >> 3;
1507                 qat_proto_flag =
1508                         qat_get_crypto_proto_flag(header->serv_specif_flags);
1509         } else if (cdesc->qat_cipher_alg ==
1510                 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
1511                 total_key_size = ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ +
1512                         ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1513                 cipher_cd_ctrl->cipher_state_sz =
1514                         ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1515                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1516                 cdesc->min_qat_dev_gen = QAT_GEN2;
1517         } else {
1518                 total_key_size = cipherkeylen;
1519                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_AES_BLK_SZ >> 3;
1520                 qat_proto_flag =
1521                         qat_get_crypto_proto_flag(header->serv_specif_flags);
1522         }
1523         cipher_cd_ctrl->cipher_key_sz = total_key_size >> 3;
1524         cipher_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1525         cipher_cd_ctrl->cipher_cfg_offset = cipher_offset >> 3;
1526
1527         header->service_cmd_id = cdesc->qat_cmd;
1528         qat_sym_session_init_common_hdr(header, qat_proto_flag);
1529
1530         cipher = (struct icp_qat_hw_cipher_algo_blk *)cdesc->cd_cur_ptr;
1531         cipher->cipher_config.val =
1532             ICP_QAT_HW_CIPHER_CONFIG_BUILD(cdesc->qat_mode,
1533                                         cdesc->qat_cipher_alg, key_convert,
1534                                         cdesc->qat_dir);
1535
1536         if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1537                 temp_key = (uint32_t *)(cdesc->cd_cur_ptr +
1538                                         sizeof(struct icp_qat_hw_cipher_config)
1539                                         + cipherkeylen);
1540                 memcpy(cipher->key, cipherkey, cipherkeylen);
1541                 memcpy(temp_key, cipherkey, cipherkeylen);
1542
1543                 /* XOR Key with KASUMI F8 key modifier at 4 bytes level */
1544                 for (wordIndex = 0; wordIndex < (cipherkeylen >> 2);
1545                                                                 wordIndex++)
1546                         temp_key[wordIndex] ^= KASUMI_F8_KEY_MODIFIER_4_BYTES;
1547
1548                 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1549                                         cipherkeylen + cipherkeylen;
1550         } else {
1551                 memcpy(cipher->key, cipherkey, cipherkeylen);
1552                 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1553                                         cipherkeylen;
1554         }
1555
1556         if (total_key_size > cipherkeylen) {
1557                 uint32_t padding_size =  total_key_size-cipherkeylen;
1558                 if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1559                         && (cipherkeylen == QAT_3DES_KEY_SZ_OPT2)) {
1560                         /* K3 not provided so use K1 = K3*/
1561                         memcpy(cdesc->cd_cur_ptr, cipherkey, padding_size);
1562                 } else if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1563                         && (cipherkeylen == QAT_3DES_KEY_SZ_OPT3)) {
1564                         /* K2 and K3 not provided so use K1 = K2 = K3*/
1565                         memcpy(cdesc->cd_cur_ptr, cipherkey,
1566                                 cipherkeylen);
1567                         memcpy(cdesc->cd_cur_ptr+cipherkeylen,
1568                                 cipherkey, cipherkeylen);
1569                 } else
1570                         memset(cdesc->cd_cur_ptr, 0, padding_size);
1571
1572                 cdesc->cd_cur_ptr += padding_size;
1573         }
1574         cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1575         cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1576
1577         return 0;
1578 }
1579
1580 int qat_sym_session_aead_create_cd_auth(struct qat_sym_session *cdesc,
1581                                                 const uint8_t *authkey,
1582                                                 uint32_t authkeylen,
1583                                                 uint32_t aad_length,
1584                                                 uint32_t digestsize,
1585                                                 unsigned int operation)
1586 {
1587         struct icp_qat_hw_auth_setup *hash;
1588         struct icp_qat_hw_cipher_algo_blk *cipherconfig;
1589         struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1590         struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1591         struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1592         void *ptr = &req_tmpl->cd_ctrl;
1593         struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1594         struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1595         struct icp_qat_fw_la_auth_req_params *auth_param =
1596                 (struct icp_qat_fw_la_auth_req_params *)
1597                 ((char *)&req_tmpl->serv_specif_rqpars +
1598                 ICP_QAT_FW_HASH_REQUEST_PARAMETERS_OFFSET);
1599         uint16_t state1_size = 0, state2_size = 0;
1600         uint16_t hash_offset, cd_size;
1601         uint32_t *aad_len = NULL;
1602         uint32_t wordIndex  = 0;
1603         uint32_t *pTempKey;
1604         enum qat_sym_proto_flag qat_proto_flag =
1605                 QAT_CRYPTO_PROTO_FLAG_NONE;
1606
1607         if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_AUTH) {
1608                 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1609                                         ICP_QAT_FW_SLICE_AUTH);
1610                 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1611                                         ICP_QAT_FW_SLICE_DRAM_WR);
1612                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1613         } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1614                 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1615                                 ICP_QAT_FW_SLICE_AUTH);
1616                 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1617                                 ICP_QAT_FW_SLICE_CIPHER);
1618                 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1619                                 ICP_QAT_FW_SLICE_CIPHER);
1620                 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1621                                 ICP_QAT_FW_SLICE_DRAM_WR);
1622                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1623         } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1624                 QAT_LOG(ERR, "Invalid param, must be a hash command.");
1625                 return -EFAULT;
1626         }
1627
1628         if (operation == RTE_CRYPTO_AUTH_OP_VERIFY) {
1629                 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1630                                 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1631                 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1632                                 ICP_QAT_FW_LA_CMP_AUTH_RES);
1633                 cdesc->auth_op = ICP_QAT_HW_AUTH_VERIFY;
1634         } else {
1635                 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1636                                            ICP_QAT_FW_LA_RET_AUTH_RES);
1637                 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1638                                            ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1639                 cdesc->auth_op = ICP_QAT_HW_AUTH_GENERATE;
1640         }
1641
1642         /*
1643          * Setup the inner hash config
1644          */
1645         hash_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1646         hash = (struct icp_qat_hw_auth_setup *)cdesc->cd_cur_ptr;
1647         hash->auth_config.reserved = 0;
1648         hash->auth_config.config =
1649                         ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE1,
1650                                 cdesc->qat_hash_alg, digestsize);
1651
1652         if (cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2
1653                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_KASUMI_F9
1654                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3
1655                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC
1656                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC
1657                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_NULL
1658                         )
1659                 hash->auth_counter.counter = 0;
1660         else {
1661                 int block_size = qat_hash_get_block_size(cdesc->qat_hash_alg);
1662
1663                 if (block_size < 0)
1664                         return block_size;
1665                 hash->auth_counter.counter = rte_bswap32(block_size);
1666         }
1667
1668         cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_auth_setup);
1669
1670         /*
1671          * cd_cur_ptr now points at the state1 information.
1672          */
1673         switch (cdesc->qat_hash_alg) {
1674         case ICP_QAT_HW_AUTH_ALGO_SHA1:
1675                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA1, authkey,
1676                         authkeylen, cdesc->cd_cur_ptr, &state1_size,
1677                         cdesc->aes_cmac)) {
1678                         QAT_LOG(ERR, "(SHA)precompute failed");
1679                         return -EFAULT;
1680                 }
1681                 state2_size = RTE_ALIGN_CEIL(ICP_QAT_HW_SHA1_STATE2_SZ, 8);
1682                 break;
1683         case ICP_QAT_HW_AUTH_ALGO_SHA224:
1684                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA224, authkey,
1685                         authkeylen, cdesc->cd_cur_ptr, &state1_size,
1686                         cdesc->aes_cmac)) {
1687                         QAT_LOG(ERR, "(SHA)precompute failed");
1688                         return -EFAULT;
1689                 }
1690                 state2_size = ICP_QAT_HW_SHA224_STATE2_SZ;
1691                 break;
1692         case ICP_QAT_HW_AUTH_ALGO_SHA256:
1693                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA256, authkey,
1694                         authkeylen, cdesc->cd_cur_ptr,  &state1_size,
1695                         cdesc->aes_cmac)) {
1696                         QAT_LOG(ERR, "(SHA)precompute failed");
1697                         return -EFAULT;
1698                 }
1699                 state2_size = ICP_QAT_HW_SHA256_STATE2_SZ;
1700                 break;
1701         case ICP_QAT_HW_AUTH_ALGO_SHA384:
1702                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA384, authkey,
1703                         authkeylen, cdesc->cd_cur_ptr, &state1_size,
1704                         cdesc->aes_cmac)) {
1705                         QAT_LOG(ERR, "(SHA)precompute failed");
1706                         return -EFAULT;
1707                 }
1708                 state2_size = ICP_QAT_HW_SHA384_STATE2_SZ;
1709                 break;
1710         case ICP_QAT_HW_AUTH_ALGO_SHA512:
1711                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA512, authkey,
1712                         authkeylen, cdesc->cd_cur_ptr,  &state1_size,
1713                         cdesc->aes_cmac)) {
1714                         QAT_LOG(ERR, "(SHA)precompute failed");
1715                         return -EFAULT;
1716                 }
1717                 state2_size = ICP_QAT_HW_SHA512_STATE2_SZ;
1718                 break;
1719         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1720                 state1_size = ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1721
1722                 if (cdesc->aes_cmac)
1723                         memset(cdesc->cd_cur_ptr, 0, state1_size);
1724                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC,
1725                         authkey, authkeylen, cdesc->cd_cur_ptr + state1_size,
1726                         &state2_size, cdesc->aes_cmac)) {
1727                         cdesc->aes_cmac ? QAT_LOG(ERR,
1728                                                   "(CMAC)precompute failed")
1729                                         : QAT_LOG(ERR,
1730                                                   "(XCBC)precompute failed");
1731                         return -EFAULT;
1732                 }
1733                 break;
1734         case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1735         case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
1736                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1737                 state1_size = ICP_QAT_HW_GALOIS_128_STATE1_SZ;
1738                 if (qat_sym_do_precomputes(cdesc->qat_hash_alg, authkey,
1739                         authkeylen, cdesc->cd_cur_ptr + state1_size,
1740                         &state2_size, cdesc->aes_cmac)) {
1741                         QAT_LOG(ERR, "(GCM)precompute failed");
1742                         return -EFAULT;
1743                 }
1744                 /*
1745                  * Write (the length of AAD) into bytes 16-19 of state2
1746                  * in big-endian format. This field is 8 bytes
1747                  */
1748                 auth_param->u2.aad_sz =
1749                                 RTE_ALIGN_CEIL(aad_length, 16);
1750                 auth_param->hash_state_sz = (auth_param->u2.aad_sz) >> 3;
1751
1752                 aad_len = (uint32_t *)(cdesc->cd_cur_ptr +
1753                                         ICP_QAT_HW_GALOIS_128_STATE1_SZ +
1754                                         ICP_QAT_HW_GALOIS_H_SZ);
1755                 *aad_len = rte_bswap32(aad_length);
1756                 cdesc->aad_len = aad_length;
1757                 break;
1758         case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
1759                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1760                 state1_size = qat_hash_get_state1_size(
1761                                 ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2);
1762                 state2_size = ICP_QAT_HW_SNOW_3G_UIA2_STATE2_SZ;
1763                 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1764
1765                 cipherconfig = (struct icp_qat_hw_cipher_algo_blk *)
1766                                 (cdesc->cd_cur_ptr + state1_size + state2_size);
1767                 cipherconfig->cipher_config.val =
1768                 ICP_QAT_HW_CIPHER_CONFIG_BUILD(ICP_QAT_HW_CIPHER_ECB_MODE,
1769                         ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2,
1770                         ICP_QAT_HW_CIPHER_KEY_CONVERT,
1771                         ICP_QAT_HW_CIPHER_ENCRYPT);
1772                 memcpy(cipherconfig->key, authkey, authkeylen);
1773                 memset(cipherconfig->key + authkeylen,
1774                                 0, ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ);
1775                 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1776                                 authkeylen + ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1777                 auth_param->hash_state_sz = ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1778                 break;
1779         case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
1780                 hash->auth_config.config =
1781                         ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE0,
1782                                 cdesc->qat_hash_alg, digestsize);
1783                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1784                 state1_size = qat_hash_get_state1_size(
1785                                 ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3);
1786                 state2_size = ICP_QAT_HW_ZUC_3G_EIA3_STATE2_SZ;
1787                 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size
1788                         + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ);
1789
1790                 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1791                 cdesc->cd_cur_ptr += state1_size + state2_size
1792                         + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1793                 auth_param->hash_state_sz = ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1794                 cdesc->min_qat_dev_gen = QAT_GEN2;
1795
1796                 break;
1797         case ICP_QAT_HW_AUTH_ALGO_MD5:
1798                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_MD5, authkey,
1799                         authkeylen, cdesc->cd_cur_ptr, &state1_size,
1800                         cdesc->aes_cmac)) {
1801                         QAT_LOG(ERR, "(MD5)precompute failed");
1802                         return -EFAULT;
1803                 }
1804                 state2_size = ICP_QAT_HW_MD5_STATE2_SZ;
1805                 break;
1806         case ICP_QAT_HW_AUTH_ALGO_NULL:
1807                 state1_size = qat_hash_get_state1_size(
1808                                 ICP_QAT_HW_AUTH_ALGO_NULL);
1809                 state2_size = ICP_QAT_HW_NULL_STATE2_SZ;
1810                 break;
1811         case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
1812                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1813                 state1_size = qat_hash_get_state1_size(
1814                                 ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC);
1815                 state2_size = ICP_QAT_HW_AES_CBC_MAC_KEY_SZ +
1816                                 ICP_QAT_HW_AES_CCM_CBC_E_CTR0_SZ;
1817
1818                 if (aad_length > 0) {
1819                         aad_length += ICP_QAT_HW_CCM_AAD_B0_LEN +
1820                         ICP_QAT_HW_CCM_AAD_LEN_INFO;
1821                         auth_param->u2.aad_sz =
1822                         RTE_ALIGN_CEIL(aad_length,
1823                         ICP_QAT_HW_CCM_AAD_ALIGNMENT);
1824                 } else {
1825                         auth_param->u2.aad_sz = ICP_QAT_HW_CCM_AAD_B0_LEN;
1826                 }
1827                 cdesc->aad_len = aad_length;
1828                 hash->auth_counter.counter = 0;
1829
1830                 hash_cd_ctrl->outer_prefix_sz = digestsize;
1831                 auth_param->hash_state_sz = digestsize;
1832
1833                 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1834                 break;
1835         case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
1836                 state1_size = qat_hash_get_state1_size(
1837                                 ICP_QAT_HW_AUTH_ALGO_KASUMI_F9);
1838                 state2_size = ICP_QAT_HW_KASUMI_F9_STATE2_SZ;
1839                 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1840                 pTempKey = (uint32_t *)(cdesc->cd_cur_ptr + state1_size
1841                                                         + authkeylen);
1842                 /*
1843                 * The Inner Hash Initial State2 block must contain IK
1844                 * (Initialisation Key), followed by IK XOR-ed with KM
1845                 * (Key Modifier): IK||(IK^KM).
1846                 */
1847                 /* write the auth key */
1848                 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1849                 /* initialise temp key with auth key */
1850                 memcpy(pTempKey, authkey, authkeylen);
1851                 /* XOR Key with KASUMI F9 key modifier at 4 bytes level */
1852                 for (wordIndex = 0; wordIndex < (authkeylen >> 2); wordIndex++)
1853                         pTempKey[wordIndex] ^= KASUMI_F9_KEY_MODIFIER_4_BYTES;
1854                 break;
1855         default:
1856                 QAT_LOG(ERR, "Invalid HASH alg %u", cdesc->qat_hash_alg);
1857                 return -EFAULT;
1858         }
1859
1860         /* Request template setup */
1861         qat_sym_session_init_common_hdr(header, qat_proto_flag);
1862         header->service_cmd_id = cdesc->qat_cmd;
1863
1864         /* Auth CD config setup */
1865         hash_cd_ctrl->hash_cfg_offset = hash_offset >> 3;
1866         hash_cd_ctrl->hash_flags = ICP_QAT_FW_AUTH_HDR_FLAG_NO_NESTED;
1867         hash_cd_ctrl->inner_res_sz = digestsize;
1868         hash_cd_ctrl->final_sz = digestsize;
1869         hash_cd_ctrl->inner_state1_sz = state1_size;
1870         auth_param->auth_res_sz = digestsize;
1871
1872         hash_cd_ctrl->inner_state2_sz  = state2_size;
1873         hash_cd_ctrl->inner_state2_offset = hash_cd_ctrl->hash_cfg_offset +
1874                         ((sizeof(struct icp_qat_hw_auth_setup) +
1875                          RTE_ALIGN_CEIL(hash_cd_ctrl->inner_state1_sz, 8))
1876                                         >> 3);
1877
1878         cdesc->cd_cur_ptr += state1_size + state2_size;
1879         cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1880
1881         cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1882         cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1883
1884         return 0;
1885 }
1886
1887 int qat_sym_validate_aes_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1888 {
1889         switch (key_len) {
1890         case ICP_QAT_HW_AES_128_KEY_SZ:
1891                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1892                 break;
1893         case ICP_QAT_HW_AES_192_KEY_SZ:
1894                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES192;
1895                 break;
1896         case ICP_QAT_HW_AES_256_KEY_SZ:
1897                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
1898                 break;
1899         default:
1900                 return -EINVAL;
1901         }
1902         return 0;
1903 }
1904
1905 int qat_sym_validate_aes_docsisbpi_key(int key_len,
1906                 enum icp_qat_hw_cipher_algo *alg)
1907 {
1908         switch (key_len) {
1909         case ICP_QAT_HW_AES_128_KEY_SZ:
1910                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1911                 break;
1912         default:
1913                 return -EINVAL;
1914         }
1915         return 0;
1916 }
1917
1918 int qat_sym_validate_snow3g_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1919 {
1920         switch (key_len) {
1921         case ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ:
1922                 *alg = ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2;
1923                 break;
1924         default:
1925                 return -EINVAL;
1926         }
1927         return 0;
1928 }
1929
1930 int qat_sym_validate_kasumi_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1931 {
1932         switch (key_len) {
1933         case ICP_QAT_HW_KASUMI_KEY_SZ:
1934                 *alg = ICP_QAT_HW_CIPHER_ALGO_KASUMI;
1935                 break;
1936         default:
1937                 return -EINVAL;
1938         }
1939         return 0;
1940 }
1941
1942 int qat_sym_validate_des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1943 {
1944         switch (key_len) {
1945         case ICP_QAT_HW_DES_KEY_SZ:
1946                 *alg = ICP_QAT_HW_CIPHER_ALGO_DES;
1947                 break;
1948         default:
1949                 return -EINVAL;
1950         }
1951         return 0;
1952 }
1953
1954 int qat_sym_validate_3des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1955 {
1956         switch (key_len) {
1957         case QAT_3DES_KEY_SZ_OPT1:
1958         case QAT_3DES_KEY_SZ_OPT2:
1959         case QAT_3DES_KEY_SZ_OPT3:
1960                 *alg = ICP_QAT_HW_CIPHER_ALGO_3DES;
1961                 break;
1962         default:
1963                 return -EINVAL;
1964         }
1965         return 0;
1966 }
1967
1968 int qat_sym_validate_zuc_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1969 {
1970         switch (key_len) {
1971         case ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ:
1972                 *alg = ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3;
1973                 break;
1974         default:
1975                 return -EINVAL;
1976         }
1977         return 0;
1978 }