61ab9edc4ff4e22d8489d8e7d2896931b9375cca
[dpdk.git] / drivers / crypto / qat / qat_sym_session.c
1 /* SPDX-License-Identifier: (BSD-3-Clause OR GPL-2.0)
2  * Copyright(c) 2015-2019 Intel Corporation
3  */
4
5 #include <openssl/sha.h>        /* Needed to calculate pre-compute values */
6 #include <openssl/aes.h>        /* Needed to calculate pre-compute values */
7 #include <openssl/md5.h>        /* Needed to calculate pre-compute values */
8 #include <openssl/evp.h>        /* Needed for bpi runt block processing */
9
10 #include <rte_memcpy.h>
11 #include <rte_common.h>
12 #include <rte_spinlock.h>
13 #include <rte_byteorder.h>
14 #include <rte_log.h>
15 #include <rte_malloc.h>
16 #include <rte_crypto_sym.h>
17
18 #include "qat_logs.h"
19 #include "qat_sym_session.h"
20 #include "qat_sym_pmd.h"
21
22 /** Frees a context previously created
23  *  Depends on openssl libcrypto
24  */
25 static void
26 bpi_cipher_ctx_free(void *bpi_ctx)
27 {
28         if (bpi_ctx != NULL)
29                 EVP_CIPHER_CTX_free((EVP_CIPHER_CTX *)bpi_ctx);
30 }
31
32 /** Creates a context in either AES or DES in ECB mode
33  *  Depends on openssl libcrypto
34  */
35 static int
36 bpi_cipher_ctx_init(enum rte_crypto_cipher_algorithm cryptodev_algo,
37                 enum rte_crypto_cipher_operation direction __rte_unused,
38                 const uint8_t *key, uint16_t key_length, void **ctx)
39 {
40         const EVP_CIPHER *algo = NULL;
41         int ret;
42         *ctx = EVP_CIPHER_CTX_new();
43
44         if (*ctx == NULL) {
45                 ret = -ENOMEM;
46                 goto ctx_init_err;
47         }
48
49         if (cryptodev_algo == RTE_CRYPTO_CIPHER_DES_DOCSISBPI)
50                 algo = EVP_des_ecb();
51         else
52                 if (key_length == ICP_QAT_HW_AES_128_KEY_SZ)
53                         algo = EVP_aes_128_ecb();
54                 else
55                         algo = EVP_aes_256_ecb();
56
57         /* IV will be ECB encrypted whether direction is encrypt or decrypt*/
58         if (EVP_EncryptInit_ex(*ctx, algo, NULL, key, 0) != 1) {
59                 ret = -EINVAL;
60                 goto ctx_init_err;
61         }
62
63         return 0;
64
65 ctx_init_err:
66         if (*ctx != NULL)
67                 EVP_CIPHER_CTX_free(*ctx);
68         return ret;
69 }
70
71 static int
72 qat_is_cipher_alg_supported(enum rte_crypto_cipher_algorithm algo,
73                 struct qat_sym_dev_private *internals)
74 {
75         int i = 0;
76         const struct rte_cryptodev_capabilities *capability;
77
78         while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
79                         RTE_CRYPTO_OP_TYPE_UNDEFINED) {
80                 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
81                         continue;
82
83                 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_CIPHER)
84                         continue;
85
86                 if (capability->sym.cipher.algo == algo)
87                         return 1;
88         }
89         return 0;
90 }
91
92 static int
93 qat_is_auth_alg_supported(enum rte_crypto_auth_algorithm algo,
94                 struct qat_sym_dev_private *internals)
95 {
96         int i = 0;
97         const struct rte_cryptodev_capabilities *capability;
98
99         while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
100                         RTE_CRYPTO_OP_TYPE_UNDEFINED) {
101                 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
102                         continue;
103
104                 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_AUTH)
105                         continue;
106
107                 if (capability->sym.auth.algo == algo)
108                         return 1;
109         }
110         return 0;
111 }
112
113 void
114 qat_sym_session_clear(struct rte_cryptodev *dev,
115                 struct rte_cryptodev_sym_session *sess)
116 {
117         uint8_t index = dev->driver_id;
118         void *sess_priv = get_sym_session_private_data(sess, index);
119         struct qat_sym_session *s = (struct qat_sym_session *)sess_priv;
120
121         if (sess_priv) {
122                 if (s->bpi_ctx)
123                         bpi_cipher_ctx_free(s->bpi_ctx);
124                 memset(s, 0, qat_sym_session_get_private_size(dev));
125                 struct rte_mempool *sess_mp = rte_mempool_from_obj(sess_priv);
126
127                 set_sym_session_private_data(sess, index, NULL);
128                 rte_mempool_put(sess_mp, sess_priv);
129         }
130 }
131
132 static int
133 qat_get_cmd_id(const struct rte_crypto_sym_xform *xform)
134 {
135         /* Cipher Only */
136         if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER && xform->next == NULL)
137                 return ICP_QAT_FW_LA_CMD_CIPHER;
138
139         /* Authentication Only */
140         if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH && xform->next == NULL)
141                 return ICP_QAT_FW_LA_CMD_AUTH;
142
143         /* AEAD */
144         if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
145                 /* AES-GCM and AES-CCM works with different direction
146                  * GCM first encrypts and generate hash where AES-CCM
147                  * first generate hash and encrypts. Similar relation
148                  * applies to decryption.
149                  */
150                 if (xform->aead.op == RTE_CRYPTO_AEAD_OP_ENCRYPT)
151                         if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
152                                 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
153                         else
154                                 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
155                 else
156                         if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
157                                 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
158                         else
159                                 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
160         }
161
162         if (xform->next == NULL)
163                 return -1;
164
165         /* Cipher then Authenticate */
166         if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
167                         xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
168                 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
169
170         /* Authenticate then Cipher */
171         if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH &&
172                         xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
173                 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
174
175         return -1;
176 }
177
178 static struct rte_crypto_auth_xform *
179 qat_get_auth_xform(struct rte_crypto_sym_xform *xform)
180 {
181         do {
182                 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH)
183                         return &xform->auth;
184
185                 xform = xform->next;
186         } while (xform);
187
188         return NULL;
189 }
190
191 static struct rte_crypto_cipher_xform *
192 qat_get_cipher_xform(struct rte_crypto_sym_xform *xform)
193 {
194         do {
195                 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
196                         return &xform->cipher;
197
198                 xform = xform->next;
199         } while (xform);
200
201         return NULL;
202 }
203
204 int
205 qat_sym_session_configure_cipher(struct rte_cryptodev *dev,
206                 struct rte_crypto_sym_xform *xform,
207                 struct qat_sym_session *session)
208 {
209         struct qat_sym_dev_private *internals = dev->data->dev_private;
210         struct rte_crypto_cipher_xform *cipher_xform = NULL;
211         int ret;
212
213         /* Get cipher xform from crypto xform chain */
214         cipher_xform = qat_get_cipher_xform(xform);
215
216         session->cipher_iv.offset = cipher_xform->iv.offset;
217         session->cipher_iv.length = cipher_xform->iv.length;
218
219         switch (cipher_xform->algo) {
220         case RTE_CRYPTO_CIPHER_AES_CBC:
221                 if (qat_sym_validate_aes_key(cipher_xform->key.length,
222                                 &session->qat_cipher_alg) != 0) {
223                         QAT_LOG(ERR, "Invalid AES cipher key size");
224                         ret = -EINVAL;
225                         goto error_out;
226                 }
227                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
228                 break;
229         case RTE_CRYPTO_CIPHER_AES_CTR:
230                 if (qat_sym_validate_aes_key(cipher_xform->key.length,
231                                 &session->qat_cipher_alg) != 0) {
232                         QAT_LOG(ERR, "Invalid AES cipher key size");
233                         ret = -EINVAL;
234                         goto error_out;
235                 }
236                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
237                 break;
238         case RTE_CRYPTO_CIPHER_SNOW3G_UEA2:
239                 if (qat_sym_validate_snow3g_key(cipher_xform->key.length,
240                                         &session->qat_cipher_alg) != 0) {
241                         QAT_LOG(ERR, "Invalid SNOW 3G cipher key size");
242                         ret = -EINVAL;
243                         goto error_out;
244                 }
245                 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
246                 break;
247         case RTE_CRYPTO_CIPHER_NULL:
248                 session->qat_cipher_alg = ICP_QAT_HW_CIPHER_ALGO_NULL;
249                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
250                 break;
251         case RTE_CRYPTO_CIPHER_KASUMI_F8:
252                 if (qat_sym_validate_kasumi_key(cipher_xform->key.length,
253                                         &session->qat_cipher_alg) != 0) {
254                         QAT_LOG(ERR, "Invalid KASUMI cipher key size");
255                         ret = -EINVAL;
256                         goto error_out;
257                 }
258                 session->qat_mode = ICP_QAT_HW_CIPHER_F8_MODE;
259                 break;
260         case RTE_CRYPTO_CIPHER_3DES_CBC:
261                 if (qat_sym_validate_3des_key(cipher_xform->key.length,
262                                 &session->qat_cipher_alg) != 0) {
263                         QAT_LOG(ERR, "Invalid 3DES cipher key size");
264                         ret = -EINVAL;
265                         goto error_out;
266                 }
267                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
268                 break;
269         case RTE_CRYPTO_CIPHER_DES_CBC:
270                 if (qat_sym_validate_des_key(cipher_xform->key.length,
271                                 &session->qat_cipher_alg) != 0) {
272                         QAT_LOG(ERR, "Invalid DES cipher key size");
273                         ret = -EINVAL;
274                         goto error_out;
275                 }
276                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
277                 break;
278         case RTE_CRYPTO_CIPHER_3DES_CTR:
279                 if (qat_sym_validate_3des_key(cipher_xform->key.length,
280                                 &session->qat_cipher_alg) != 0) {
281                         QAT_LOG(ERR, "Invalid 3DES cipher key size");
282                         ret = -EINVAL;
283                         goto error_out;
284                 }
285                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
286                 break;
287         case RTE_CRYPTO_CIPHER_DES_DOCSISBPI:
288                 ret = bpi_cipher_ctx_init(
289                                         cipher_xform->algo,
290                                         cipher_xform->op,
291                                         cipher_xform->key.data,
292                                         cipher_xform->key.length,
293                                         &session->bpi_ctx);
294                 if (ret != 0) {
295                         QAT_LOG(ERR, "failed to create DES BPI ctx");
296                         goto error_out;
297                 }
298                 if (qat_sym_validate_des_key(cipher_xform->key.length,
299                                 &session->qat_cipher_alg) != 0) {
300                         QAT_LOG(ERR, "Invalid DES cipher key size");
301                         ret = -EINVAL;
302                         goto error_out;
303                 }
304                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
305                 break;
306         case RTE_CRYPTO_CIPHER_AES_DOCSISBPI:
307                 ret = bpi_cipher_ctx_init(
308                                         cipher_xform->algo,
309                                         cipher_xform->op,
310                                         cipher_xform->key.data,
311                                         cipher_xform->key.length,
312                                         &session->bpi_ctx);
313                 if (ret != 0) {
314                         QAT_LOG(ERR, "failed to create AES BPI ctx");
315                         goto error_out;
316                 }
317                 if (qat_sym_validate_aes_docsisbpi_key(cipher_xform->key.length,
318                                 &session->qat_cipher_alg) != 0) {
319                         QAT_LOG(ERR, "Invalid AES DOCSISBPI key size");
320                         ret = -EINVAL;
321                         goto error_out;
322                 }
323                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
324                 break;
325         case RTE_CRYPTO_CIPHER_ZUC_EEA3:
326                 if (!qat_is_cipher_alg_supported(
327                         cipher_xform->algo, internals)) {
328                         QAT_LOG(ERR, "%s not supported on this device",
329                                 rte_crypto_cipher_algorithm_strings
330                                         [cipher_xform->algo]);
331                         ret = -ENOTSUP;
332                         goto error_out;
333                 }
334                 if (qat_sym_validate_zuc_key(cipher_xform->key.length,
335                                 &session->qat_cipher_alg) != 0) {
336                         QAT_LOG(ERR, "Invalid ZUC cipher key size");
337                         ret = -EINVAL;
338                         goto error_out;
339                 }
340                 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
341                 break;
342         case RTE_CRYPTO_CIPHER_AES_XTS:
343                 if ((cipher_xform->key.length/2) == ICP_QAT_HW_AES_192_KEY_SZ) {
344                         QAT_LOG(ERR, "AES-XTS-192 not supported");
345                         ret = -EINVAL;
346                         goto error_out;
347                 }
348                 if (qat_sym_validate_aes_key((cipher_xform->key.length/2),
349                                 &session->qat_cipher_alg) != 0) {
350                         QAT_LOG(ERR, "Invalid AES-XTS cipher key size");
351                         ret = -EINVAL;
352                         goto error_out;
353                 }
354                 session->qat_mode = ICP_QAT_HW_CIPHER_XTS_MODE;
355                 break;
356         case RTE_CRYPTO_CIPHER_3DES_ECB:
357         case RTE_CRYPTO_CIPHER_AES_ECB:
358         case RTE_CRYPTO_CIPHER_AES_F8:
359         case RTE_CRYPTO_CIPHER_ARC4:
360                 QAT_LOG(ERR, "Crypto QAT PMD: Unsupported Cipher alg %u",
361                                 cipher_xform->algo);
362                 ret = -ENOTSUP;
363                 goto error_out;
364         default:
365                 QAT_LOG(ERR, "Crypto: Undefined Cipher specified %u\n",
366                                 cipher_xform->algo);
367                 ret = -EINVAL;
368                 goto error_out;
369         }
370
371         if (cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
372                 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
373         else
374                 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
375
376         if (qat_sym_session_aead_create_cd_cipher(session,
377                                                 cipher_xform->key.data,
378                                                 cipher_xform->key.length)) {
379                 ret = -EINVAL;
380                 goto error_out;
381         }
382
383         return 0;
384
385 error_out:
386         if (session->bpi_ctx) {
387                 bpi_cipher_ctx_free(session->bpi_ctx);
388                 session->bpi_ctx = NULL;
389         }
390         return ret;
391 }
392
393 int
394 qat_sym_session_configure(struct rte_cryptodev *dev,
395                 struct rte_crypto_sym_xform *xform,
396                 struct rte_cryptodev_sym_session *sess,
397                 struct rte_mempool *mempool)
398 {
399         void *sess_private_data;
400         int ret;
401
402         if (rte_mempool_get(mempool, &sess_private_data)) {
403                 CDEV_LOG_ERR(
404                         "Couldn't get object from session mempool");
405                 return -ENOMEM;
406         }
407
408         ret = qat_sym_session_set_parameters(dev, xform, sess_private_data);
409         if (ret != 0) {
410                 QAT_LOG(ERR,
411                     "Crypto QAT PMD: failed to configure session parameters");
412
413                 /* Return session to mempool */
414                 rte_mempool_put(mempool, sess_private_data);
415                 return ret;
416         }
417
418         set_sym_session_private_data(sess, dev->driver_id,
419                 sess_private_data);
420
421         return 0;
422 }
423
424 static void
425 qat_sym_session_set_ext_hash_flags(struct qat_sym_session *session,
426                 uint8_t hash_flag)
427 {
428         struct icp_qat_fw_comn_req_hdr *header = &session->fw_req.comn_hdr;
429         struct icp_qat_fw_cipher_auth_cd_ctrl_hdr *cd_ctrl =
430                         (struct icp_qat_fw_cipher_auth_cd_ctrl_hdr *)
431                         session->fw_req.cd_ctrl.content_desc_ctrl_lw;
432
433         /* Set the Use Extended Protocol Flags bit in LW 1 */
434         QAT_FIELD_SET(header->comn_req_flags,
435                         QAT_COMN_EXT_FLAGS_USED,
436                         QAT_COMN_EXT_FLAGS_BITPOS,
437                         QAT_COMN_EXT_FLAGS_MASK);
438
439         /* Set Hash Flags in LW 28 */
440         cd_ctrl->hash_flags |= hash_flag;
441
442         /* Set proto flags in LW 1 */
443         switch (session->qat_cipher_alg) {
444         case ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2:
445                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
446                                 ICP_QAT_FW_LA_SNOW_3G_PROTO);
447                 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
448                                 header->serv_specif_flags, 0);
449                 break;
450         case ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3:
451                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
452                                 ICP_QAT_FW_LA_NO_PROTO);
453                 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
454                                 header->serv_specif_flags,
455                                 ICP_QAT_FW_LA_ZUC_3G_PROTO);
456                 break;
457         default:
458                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
459                                 ICP_QAT_FW_LA_NO_PROTO);
460                 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
461                                 header->serv_specif_flags, 0);
462                 break;
463         }
464 }
465
466 static void
467 qat_sym_session_handle_mixed(struct qat_sym_session *session)
468 {
469         if (session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3 &&
470                         session->qat_cipher_alg !=
471                         ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
472                 session->min_qat_dev_gen = QAT_GEN3;
473                 qat_sym_session_set_ext_hash_flags(session,
474                         1 << ICP_QAT_FW_AUTH_HDR_FLAG_ZUC_EIA3_BITPOS);
475         } else if (session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2 &&
476                         session->qat_cipher_alg !=
477                         ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
478                 session->min_qat_dev_gen = QAT_GEN3;
479                 qat_sym_session_set_ext_hash_flags(session,
480                         1 << ICP_QAT_FW_AUTH_HDR_FLAG_SNOW3G_UIA2_BITPOS);
481         } else if ((session->aes_cmac ||
482                         session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_NULL) &&
483                         (session->qat_cipher_alg ==
484                         ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2 ||
485                         session->qat_cipher_alg ==
486                         ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)) {
487                 session->min_qat_dev_gen = QAT_GEN3;
488                 qat_sym_session_set_ext_hash_flags(session, 0);
489         }
490 }
491
492 int
493 qat_sym_session_set_parameters(struct rte_cryptodev *dev,
494                 struct rte_crypto_sym_xform *xform, void *session_private)
495 {
496         struct qat_sym_session *session = session_private;
497         int ret;
498         int qat_cmd_id;
499
500         /* Set context descriptor physical address */
501         session->cd_paddr = rte_mempool_virt2iova(session) +
502                         offsetof(struct qat_sym_session, cd);
503
504         session->min_qat_dev_gen = QAT_GEN1;
505
506         /* Get requested QAT command id */
507         qat_cmd_id = qat_get_cmd_id(xform);
508         if (qat_cmd_id < 0 || qat_cmd_id >= ICP_QAT_FW_LA_CMD_DELIMITER) {
509                 QAT_LOG(ERR, "Unsupported xform chain requested");
510                 return -ENOTSUP;
511         }
512         session->qat_cmd = (enum icp_qat_fw_la_cmd_id)qat_cmd_id;
513         switch (session->qat_cmd) {
514         case ICP_QAT_FW_LA_CMD_CIPHER:
515                 ret = qat_sym_session_configure_cipher(dev, xform, session);
516                 if (ret < 0)
517                         return ret;
518                 break;
519         case ICP_QAT_FW_LA_CMD_AUTH:
520                 ret = qat_sym_session_configure_auth(dev, xform, session);
521                 if (ret < 0)
522                         return ret;
523                 break;
524         case ICP_QAT_FW_LA_CMD_CIPHER_HASH:
525                 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
526                         ret = qat_sym_session_configure_aead(dev, xform,
527                                         session);
528                         if (ret < 0)
529                                 return ret;
530                 } else {
531                         ret = qat_sym_session_configure_cipher(dev,
532                                         xform, session);
533                         if (ret < 0)
534                                 return ret;
535                         ret = qat_sym_session_configure_auth(dev,
536                                         xform, session);
537                         if (ret < 0)
538                                 return ret;
539                         /* Special handling of mixed hash+cipher algorithms */
540                         qat_sym_session_handle_mixed(session);
541                 }
542                 break;
543         case ICP_QAT_FW_LA_CMD_HASH_CIPHER:
544                 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
545                         ret = qat_sym_session_configure_aead(dev, xform,
546                                         session);
547                         if (ret < 0)
548                                 return ret;
549                 } else {
550                         ret = qat_sym_session_configure_auth(dev,
551                                         xform, session);
552                         if (ret < 0)
553                                 return ret;
554                         ret = qat_sym_session_configure_cipher(dev,
555                                         xform, session);
556                         if (ret < 0)
557                                 return ret;
558                         /* Special handling of mixed hash+cipher algorithms */
559                         qat_sym_session_handle_mixed(session);
560                 }
561                 break;
562         case ICP_QAT_FW_LA_CMD_TRNG_GET_RANDOM:
563         case ICP_QAT_FW_LA_CMD_TRNG_TEST:
564         case ICP_QAT_FW_LA_CMD_SSL3_KEY_DERIVE:
565         case ICP_QAT_FW_LA_CMD_TLS_V1_1_KEY_DERIVE:
566         case ICP_QAT_FW_LA_CMD_TLS_V1_2_KEY_DERIVE:
567         case ICP_QAT_FW_LA_CMD_MGF1:
568         case ICP_QAT_FW_LA_CMD_AUTH_PRE_COMP:
569         case ICP_QAT_FW_LA_CMD_CIPHER_PRE_COMP:
570         case ICP_QAT_FW_LA_CMD_DELIMITER:
571         QAT_LOG(ERR, "Unsupported Service %u",
572                 session->qat_cmd);
573                 return -ENOTSUP;
574         default:
575         QAT_LOG(ERR, "Unsupported Service %u",
576                 session->qat_cmd);
577                 return -ENOTSUP;
578         }
579
580         return 0;
581 }
582
583 static int
584 qat_sym_session_handle_single_pass(struct qat_sym_dev_private *internals,
585                 struct qat_sym_session *session,
586                 struct rte_crypto_aead_xform *aead_xform)
587 {
588         enum qat_device_gen qat_dev_gen = internals->qat_dev->qat_dev_gen;
589
590         if (qat_dev_gen == QAT_GEN3 &&
591                         aead_xform->iv.length == QAT_AES_GCM_SPC_IV_SIZE) {
592                 /* Use faster Single-Pass GCM */
593                 struct icp_qat_fw_la_cipher_req_params *cipher_param =
594                                 (void *) &session->fw_req.serv_specif_rqpars;
595
596                 session->is_single_pass = 1;
597                 session->min_qat_dev_gen = QAT_GEN3;
598                 session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER;
599                 session->qat_mode = ICP_QAT_HW_CIPHER_AEAD_MODE;
600                 session->cipher_iv.offset = aead_xform->iv.offset;
601                 session->cipher_iv.length = aead_xform->iv.length;
602                 if (qat_sym_session_aead_create_cd_cipher(session,
603                                 aead_xform->key.data, aead_xform->key.length))
604                         return -EINVAL;
605                 session->aad_len = aead_xform->aad_length;
606                 session->digest_length = aead_xform->digest_length;
607                 if (aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) {
608                         session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
609                         session->auth_op = ICP_QAT_HW_AUTH_GENERATE;
610                         ICP_QAT_FW_LA_RET_AUTH_SET(
611                                 session->fw_req.comn_hdr.serv_specif_flags,
612                                 ICP_QAT_FW_LA_RET_AUTH_RES);
613                 } else {
614                         session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
615                         session->auth_op = ICP_QAT_HW_AUTH_VERIFY;
616                         ICP_QAT_FW_LA_CMP_AUTH_SET(
617                                 session->fw_req.comn_hdr.serv_specif_flags,
618                                 ICP_QAT_FW_LA_CMP_AUTH_RES);
619                 }
620                 ICP_QAT_FW_LA_SINGLE_PASS_PROTO_FLAG_SET(
621                                 session->fw_req.comn_hdr.serv_specif_flags,
622                                 ICP_QAT_FW_LA_SINGLE_PASS_PROTO);
623                 ICP_QAT_FW_LA_PROTO_SET(
624                                 session->fw_req.comn_hdr.serv_specif_flags,
625                                 ICP_QAT_FW_LA_NO_PROTO);
626                 ICP_QAT_FW_LA_GCM_IV_LEN_FLAG_SET(
627                                 session->fw_req.comn_hdr.serv_specif_flags,
628                                 ICP_QAT_FW_LA_GCM_IV_LEN_12_OCTETS);
629                 session->fw_req.comn_hdr.service_cmd_id =
630                                 ICP_QAT_FW_LA_CMD_CIPHER;
631                 session->cd.cipher.cipher_config.val =
632                                 ICP_QAT_HW_CIPHER_CONFIG_BUILD(
633                                         ICP_QAT_HW_CIPHER_AEAD_MODE,
634                                         session->qat_cipher_alg,
635                                         ICP_QAT_HW_CIPHER_NO_CONVERT,
636                                         session->qat_dir);
637                 QAT_FIELD_SET(session->cd.cipher.cipher_config.val,
638                                 aead_xform->digest_length,
639                                 QAT_CIPHER_AEAD_HASH_CMP_LEN_BITPOS,
640                                 QAT_CIPHER_AEAD_HASH_CMP_LEN_MASK);
641                 session->cd.cipher.cipher_config.reserved =
642                                 ICP_QAT_HW_CIPHER_CONFIG_BUILD_UPPER(
643                                         aead_xform->aad_length);
644                 cipher_param->spc_aad_sz = aead_xform->aad_length;
645                 cipher_param->spc_auth_res_sz = aead_xform->digest_length;
646         }
647         return 0;
648 }
649
650 int
651 qat_sym_session_configure_auth(struct rte_cryptodev *dev,
652                                 struct rte_crypto_sym_xform *xform,
653                                 struct qat_sym_session *session)
654 {
655         struct rte_crypto_auth_xform *auth_xform = qat_get_auth_xform(xform);
656         struct qat_sym_dev_private *internals = dev->data->dev_private;
657         const uint8_t *key_data = auth_xform->key.data;
658         uint8_t key_length = auth_xform->key.length;
659         session->aes_cmac = 0;
660
661         session->auth_iv.offset = auth_xform->iv.offset;
662         session->auth_iv.length = auth_xform->iv.length;
663
664         switch (auth_xform->algo) {
665         case RTE_CRYPTO_AUTH_SHA1_HMAC:
666                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA1;
667                 break;
668         case RTE_CRYPTO_AUTH_SHA224_HMAC:
669                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA224;
670                 break;
671         case RTE_CRYPTO_AUTH_SHA256_HMAC:
672                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA256;
673                 break;
674         case RTE_CRYPTO_AUTH_SHA384_HMAC:
675                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA384;
676                 break;
677         case RTE_CRYPTO_AUTH_SHA512_HMAC:
678                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA512;
679                 break;
680         case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
681                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
682                 break;
683         case RTE_CRYPTO_AUTH_AES_CMAC:
684                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
685                 session->aes_cmac = 1;
686                 break;
687         case RTE_CRYPTO_AUTH_AES_GMAC:
688                 if (qat_sym_validate_aes_key(auth_xform->key.length,
689                                 &session->qat_cipher_alg) != 0) {
690                         QAT_LOG(ERR, "Invalid AES key size");
691                         return -EINVAL;
692                 }
693                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
694                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
695                 if (session->auth_iv.length == 0)
696                         session->auth_iv.length = AES_GCM_J0_LEN;
697
698                 break;
699         case RTE_CRYPTO_AUTH_SNOW3G_UIA2:
700                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2;
701                 break;
702         case RTE_CRYPTO_AUTH_MD5_HMAC:
703                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_MD5;
704                 break;
705         case RTE_CRYPTO_AUTH_NULL:
706                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_NULL;
707                 break;
708         case RTE_CRYPTO_AUTH_KASUMI_F9:
709                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_KASUMI_F9;
710                 break;
711         case RTE_CRYPTO_AUTH_ZUC_EIA3:
712                 if (!qat_is_auth_alg_supported(auth_xform->algo, internals)) {
713                         QAT_LOG(ERR, "%s not supported on this device",
714                                 rte_crypto_auth_algorithm_strings
715                                 [auth_xform->algo]);
716                         return -ENOTSUP;
717                 }
718                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3;
719                 break;
720         case RTE_CRYPTO_AUTH_SHA1:
721         case RTE_CRYPTO_AUTH_SHA256:
722         case RTE_CRYPTO_AUTH_SHA512:
723         case RTE_CRYPTO_AUTH_SHA224:
724         case RTE_CRYPTO_AUTH_SHA384:
725         case RTE_CRYPTO_AUTH_MD5:
726         case RTE_CRYPTO_AUTH_AES_CBC_MAC:
727                 QAT_LOG(ERR, "Crypto: Unsupported hash alg %u",
728                                 auth_xform->algo);
729                 return -ENOTSUP;
730         default:
731                 QAT_LOG(ERR, "Crypto: Undefined Hash algo %u specified",
732                                 auth_xform->algo);
733                 return -EINVAL;
734         }
735
736         if (auth_xform->algo == RTE_CRYPTO_AUTH_AES_GMAC) {
737                 if (auth_xform->op == RTE_CRYPTO_AUTH_OP_GENERATE) {
738                         session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER_HASH;
739                         session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
740                         /*
741                          * It needs to create cipher desc content first,
742                          * then authentication
743                          */
744
745                         if (qat_sym_session_aead_create_cd_cipher(session,
746                                                 auth_xform->key.data,
747                                                 auth_xform->key.length))
748                                 return -EINVAL;
749
750                         if (qat_sym_session_aead_create_cd_auth(session,
751                                                 key_data,
752                                                 key_length,
753                                                 0,
754                                                 auth_xform->digest_length,
755                                                 auth_xform->op))
756                                 return -EINVAL;
757                 } else {
758                         session->qat_cmd = ICP_QAT_FW_LA_CMD_HASH_CIPHER;
759                         session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
760                         /*
761                          * It needs to create authentication desc content first,
762                          * then cipher
763                          */
764
765                         if (qat_sym_session_aead_create_cd_auth(session,
766                                         key_data,
767                                         key_length,
768                                         0,
769                                         auth_xform->digest_length,
770                                         auth_xform->op))
771                                 return -EINVAL;
772
773                         if (qat_sym_session_aead_create_cd_cipher(session,
774                                                 auth_xform->key.data,
775                                                 auth_xform->key.length))
776                                 return -EINVAL;
777                 }
778                 /* Restore to authentication only only */
779                 session->qat_cmd = ICP_QAT_FW_LA_CMD_AUTH;
780         } else {
781                 if (qat_sym_session_aead_create_cd_auth(session,
782                                 key_data,
783                                 key_length,
784                                 0,
785                                 auth_xform->digest_length,
786                                 auth_xform->op))
787                         return -EINVAL;
788         }
789
790         session->digest_length = auth_xform->digest_length;
791         return 0;
792 }
793
794 int
795 qat_sym_session_configure_aead(struct rte_cryptodev *dev,
796                                 struct rte_crypto_sym_xform *xform,
797                                 struct qat_sym_session *session)
798 {
799         struct rte_crypto_aead_xform *aead_xform = &xform->aead;
800         enum rte_crypto_auth_operation crypto_operation;
801
802         /*
803          * Store AEAD IV parameters as cipher IV,
804          * to avoid unnecessary memory usage
805          */
806         session->cipher_iv.offset = xform->aead.iv.offset;
807         session->cipher_iv.length = xform->aead.iv.length;
808
809         switch (aead_xform->algo) {
810         case RTE_CRYPTO_AEAD_AES_GCM:
811                 if (qat_sym_validate_aes_key(aead_xform->key.length,
812                                 &session->qat_cipher_alg) != 0) {
813                         QAT_LOG(ERR, "Invalid AES key size");
814                         return -EINVAL;
815                 }
816                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
817                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
818                 if (session->cipher_iv.length == 0)
819                         session->cipher_iv.length = AES_GCM_J0_LEN;
820
821                 break;
822         case RTE_CRYPTO_AEAD_AES_CCM:
823                 if (qat_sym_validate_aes_key(aead_xform->key.length,
824                                 &session->qat_cipher_alg) != 0) {
825                         QAT_LOG(ERR, "Invalid AES key size");
826                         return -EINVAL;
827                 }
828                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
829                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC;
830                 break;
831         default:
832                 QAT_LOG(ERR, "Crypto: Undefined AEAD specified %u\n",
833                                 aead_xform->algo);
834                 return -EINVAL;
835         }
836
837         session->is_single_pass = 0;
838         if (aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) {
839                 /* Use faster Single-Pass GCM if possible */
840                 int res = qat_sym_session_handle_single_pass(
841                                 dev->data->dev_private, session, aead_xform);
842                 if (res < 0)
843                         return res;
844                 if (session->is_single_pass)
845                         return 0;
846         }
847
848         if ((aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT &&
849                         aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) ||
850                         (aead_xform->op == RTE_CRYPTO_AEAD_OP_DECRYPT &&
851                         aead_xform->algo == RTE_CRYPTO_AEAD_AES_CCM)) {
852                 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
853                 /*
854                  * It needs to create cipher desc content first,
855                  * then authentication
856                  */
857                 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
858                         RTE_CRYPTO_AUTH_OP_GENERATE : RTE_CRYPTO_AUTH_OP_VERIFY;
859
860                 if (qat_sym_session_aead_create_cd_cipher(session,
861                                         aead_xform->key.data,
862                                         aead_xform->key.length))
863                         return -EINVAL;
864
865                 if (qat_sym_session_aead_create_cd_auth(session,
866                                         aead_xform->key.data,
867                                         aead_xform->key.length,
868                                         aead_xform->aad_length,
869                                         aead_xform->digest_length,
870                                         crypto_operation))
871                         return -EINVAL;
872         } else {
873                 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
874                 /*
875                  * It needs to create authentication desc content first,
876                  * then cipher
877                  */
878
879                 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
880                         RTE_CRYPTO_AUTH_OP_VERIFY : RTE_CRYPTO_AUTH_OP_GENERATE;
881
882                 if (qat_sym_session_aead_create_cd_auth(session,
883                                         aead_xform->key.data,
884                                         aead_xform->key.length,
885                                         aead_xform->aad_length,
886                                         aead_xform->digest_length,
887                                         crypto_operation))
888                         return -EINVAL;
889
890                 if (qat_sym_session_aead_create_cd_cipher(session,
891                                         aead_xform->key.data,
892                                         aead_xform->key.length))
893                         return -EINVAL;
894         }
895
896         session->digest_length = aead_xform->digest_length;
897         return 0;
898 }
899
900 unsigned int qat_sym_session_get_private_size(
901                 struct rte_cryptodev *dev __rte_unused)
902 {
903         return RTE_ALIGN_CEIL(sizeof(struct qat_sym_session), 8);
904 }
905
906 /* returns block size in bytes per cipher algo */
907 int qat_cipher_get_block_size(enum icp_qat_hw_cipher_algo qat_cipher_alg)
908 {
909         switch (qat_cipher_alg) {
910         case ICP_QAT_HW_CIPHER_ALGO_DES:
911                 return ICP_QAT_HW_DES_BLK_SZ;
912         case ICP_QAT_HW_CIPHER_ALGO_3DES:
913                 return ICP_QAT_HW_3DES_BLK_SZ;
914         case ICP_QAT_HW_CIPHER_ALGO_AES128:
915         case ICP_QAT_HW_CIPHER_ALGO_AES192:
916         case ICP_QAT_HW_CIPHER_ALGO_AES256:
917                 return ICP_QAT_HW_AES_BLK_SZ;
918         default:
919                 QAT_LOG(ERR, "invalid block cipher alg %u", qat_cipher_alg);
920                 return -EFAULT;
921         };
922         return -EFAULT;
923 }
924
925 /*
926  * Returns size in bytes per hash algo for state1 size field in cd_ctrl
927  * This is digest size rounded up to nearest quadword
928  */
929 static int qat_hash_get_state1_size(enum icp_qat_hw_auth_algo qat_hash_alg)
930 {
931         switch (qat_hash_alg) {
932         case ICP_QAT_HW_AUTH_ALGO_SHA1:
933                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA1_STATE1_SZ,
934                                                 QAT_HW_DEFAULT_ALIGNMENT);
935         case ICP_QAT_HW_AUTH_ALGO_SHA224:
936                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA224_STATE1_SZ,
937                                                 QAT_HW_DEFAULT_ALIGNMENT);
938         case ICP_QAT_HW_AUTH_ALGO_SHA256:
939                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA256_STATE1_SZ,
940                                                 QAT_HW_DEFAULT_ALIGNMENT);
941         case ICP_QAT_HW_AUTH_ALGO_SHA384:
942                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA384_STATE1_SZ,
943                                                 QAT_HW_DEFAULT_ALIGNMENT);
944         case ICP_QAT_HW_AUTH_ALGO_SHA512:
945                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
946                                                 QAT_HW_DEFAULT_ALIGNMENT);
947         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
948                 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ,
949                                                 QAT_HW_DEFAULT_ALIGNMENT);
950         case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
951         case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
952                 return QAT_HW_ROUND_UP(ICP_QAT_HW_GALOIS_128_STATE1_SZ,
953                                                 QAT_HW_DEFAULT_ALIGNMENT);
954         case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
955                 return QAT_HW_ROUND_UP(ICP_QAT_HW_ZUC_3G_EIA3_STATE1_SZ,
956                                                 QAT_HW_DEFAULT_ALIGNMENT);
957         case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
958                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SNOW_3G_UIA2_STATE1_SZ,
959                                                 QAT_HW_DEFAULT_ALIGNMENT);
960         case ICP_QAT_HW_AUTH_ALGO_MD5:
961                 return QAT_HW_ROUND_UP(ICP_QAT_HW_MD5_STATE1_SZ,
962                                                 QAT_HW_DEFAULT_ALIGNMENT);
963         case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
964                 return QAT_HW_ROUND_UP(ICP_QAT_HW_KASUMI_F9_STATE1_SZ,
965                                                 QAT_HW_DEFAULT_ALIGNMENT);
966         case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
967                 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_CBC_MAC_STATE1_SZ,
968                                                 QAT_HW_DEFAULT_ALIGNMENT);
969         case ICP_QAT_HW_AUTH_ALGO_NULL:
970                 return QAT_HW_ROUND_UP(ICP_QAT_HW_NULL_STATE1_SZ,
971                                                 QAT_HW_DEFAULT_ALIGNMENT);
972         case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
973                 /* return maximum state1 size in this case */
974                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
975                                                 QAT_HW_DEFAULT_ALIGNMENT);
976         default:
977                 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
978                 return -EFAULT;
979         };
980         return -EFAULT;
981 }
982
983 /* returns digest size in bytes  per hash algo */
984 static int qat_hash_get_digest_size(enum icp_qat_hw_auth_algo qat_hash_alg)
985 {
986         switch (qat_hash_alg) {
987         case ICP_QAT_HW_AUTH_ALGO_SHA1:
988                 return ICP_QAT_HW_SHA1_STATE1_SZ;
989         case ICP_QAT_HW_AUTH_ALGO_SHA224:
990                 return ICP_QAT_HW_SHA224_STATE1_SZ;
991         case ICP_QAT_HW_AUTH_ALGO_SHA256:
992                 return ICP_QAT_HW_SHA256_STATE1_SZ;
993         case ICP_QAT_HW_AUTH_ALGO_SHA384:
994                 return ICP_QAT_HW_SHA384_STATE1_SZ;
995         case ICP_QAT_HW_AUTH_ALGO_SHA512:
996                 return ICP_QAT_HW_SHA512_STATE1_SZ;
997         case ICP_QAT_HW_AUTH_ALGO_MD5:
998                 return ICP_QAT_HW_MD5_STATE1_SZ;
999         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1000                 return ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1001         case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
1002                 /* return maximum digest size in this case */
1003                 return ICP_QAT_HW_SHA512_STATE1_SZ;
1004         default:
1005                 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
1006                 return -EFAULT;
1007         };
1008         return -EFAULT;
1009 }
1010
1011 /* returns block size in byes per hash algo */
1012 static int qat_hash_get_block_size(enum icp_qat_hw_auth_algo qat_hash_alg)
1013 {
1014         switch (qat_hash_alg) {
1015         case ICP_QAT_HW_AUTH_ALGO_SHA1:
1016                 return SHA_CBLOCK;
1017         case ICP_QAT_HW_AUTH_ALGO_SHA224:
1018                 return SHA256_CBLOCK;
1019         case ICP_QAT_HW_AUTH_ALGO_SHA256:
1020                 return SHA256_CBLOCK;
1021         case ICP_QAT_HW_AUTH_ALGO_SHA384:
1022                 return SHA512_CBLOCK;
1023         case ICP_QAT_HW_AUTH_ALGO_SHA512:
1024                 return SHA512_CBLOCK;
1025         case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1026                 return 16;
1027         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1028                 return ICP_QAT_HW_AES_BLK_SZ;
1029         case ICP_QAT_HW_AUTH_ALGO_MD5:
1030                 return MD5_CBLOCK;
1031         case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
1032                 /* return maximum block size in this case */
1033                 return SHA512_CBLOCK;
1034         default:
1035                 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
1036                 return -EFAULT;
1037         };
1038         return -EFAULT;
1039 }
1040
1041 static int partial_hash_sha1(uint8_t *data_in, uint8_t *data_out)
1042 {
1043         SHA_CTX ctx;
1044
1045         if (!SHA1_Init(&ctx))
1046                 return -EFAULT;
1047         SHA1_Transform(&ctx, data_in);
1048         rte_memcpy(data_out, &ctx, SHA_DIGEST_LENGTH);
1049         return 0;
1050 }
1051
1052 static int partial_hash_sha224(uint8_t *data_in, uint8_t *data_out)
1053 {
1054         SHA256_CTX ctx;
1055
1056         if (!SHA224_Init(&ctx))
1057                 return -EFAULT;
1058         SHA256_Transform(&ctx, data_in);
1059         rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
1060         return 0;
1061 }
1062
1063 static int partial_hash_sha256(uint8_t *data_in, uint8_t *data_out)
1064 {
1065         SHA256_CTX ctx;
1066
1067         if (!SHA256_Init(&ctx))
1068                 return -EFAULT;
1069         SHA256_Transform(&ctx, data_in);
1070         rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
1071         return 0;
1072 }
1073
1074 static int partial_hash_sha384(uint8_t *data_in, uint8_t *data_out)
1075 {
1076         SHA512_CTX ctx;
1077
1078         if (!SHA384_Init(&ctx))
1079                 return -EFAULT;
1080         SHA512_Transform(&ctx, data_in);
1081         rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
1082         return 0;
1083 }
1084
1085 static int partial_hash_sha512(uint8_t *data_in, uint8_t *data_out)
1086 {
1087         SHA512_CTX ctx;
1088
1089         if (!SHA512_Init(&ctx))
1090                 return -EFAULT;
1091         SHA512_Transform(&ctx, data_in);
1092         rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
1093         return 0;
1094 }
1095
1096 static int partial_hash_md5(uint8_t *data_in, uint8_t *data_out)
1097 {
1098         MD5_CTX ctx;
1099
1100         if (!MD5_Init(&ctx))
1101                 return -EFAULT;
1102         MD5_Transform(&ctx, data_in);
1103         rte_memcpy(data_out, &ctx, MD5_DIGEST_LENGTH);
1104
1105         return 0;
1106 }
1107
1108 static int partial_hash_compute(enum icp_qat_hw_auth_algo hash_alg,
1109                         uint8_t *data_in,
1110                         uint8_t *data_out)
1111 {
1112         int digest_size;
1113         uint8_t digest[qat_hash_get_digest_size(
1114                         ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1115         uint32_t *hash_state_out_be32;
1116         uint64_t *hash_state_out_be64;
1117         int i;
1118
1119         digest_size = qat_hash_get_digest_size(hash_alg);
1120         if (digest_size <= 0)
1121                 return -EFAULT;
1122
1123         hash_state_out_be32 = (uint32_t *)data_out;
1124         hash_state_out_be64 = (uint64_t *)data_out;
1125
1126         switch (hash_alg) {
1127         case ICP_QAT_HW_AUTH_ALGO_SHA1:
1128                 if (partial_hash_sha1(data_in, digest))
1129                         return -EFAULT;
1130                 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1131                         *hash_state_out_be32 =
1132                                 rte_bswap32(*(((uint32_t *)digest)+i));
1133                 break;
1134         case ICP_QAT_HW_AUTH_ALGO_SHA224:
1135                 if (partial_hash_sha224(data_in, digest))
1136                         return -EFAULT;
1137                 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1138                         *hash_state_out_be32 =
1139                                 rte_bswap32(*(((uint32_t *)digest)+i));
1140                 break;
1141         case ICP_QAT_HW_AUTH_ALGO_SHA256:
1142                 if (partial_hash_sha256(data_in, digest))
1143                         return -EFAULT;
1144                 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1145                         *hash_state_out_be32 =
1146                                 rte_bswap32(*(((uint32_t *)digest)+i));
1147                 break;
1148         case ICP_QAT_HW_AUTH_ALGO_SHA384:
1149                 if (partial_hash_sha384(data_in, digest))
1150                         return -EFAULT;
1151                 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
1152                         *hash_state_out_be64 =
1153                                 rte_bswap64(*(((uint64_t *)digest)+i));
1154                 break;
1155         case ICP_QAT_HW_AUTH_ALGO_SHA512:
1156                 if (partial_hash_sha512(data_in, digest))
1157                         return -EFAULT;
1158                 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
1159                         *hash_state_out_be64 =
1160                                 rte_bswap64(*(((uint64_t *)digest)+i));
1161                 break;
1162         case ICP_QAT_HW_AUTH_ALGO_MD5:
1163                 if (partial_hash_md5(data_in, data_out))
1164                         return -EFAULT;
1165                 break;
1166         default:
1167                 QAT_LOG(ERR, "invalid hash alg %u", hash_alg);
1168                 return -EFAULT;
1169         }
1170
1171         return 0;
1172 }
1173 #define HMAC_IPAD_VALUE 0x36
1174 #define HMAC_OPAD_VALUE 0x5c
1175 #define HASH_XCBC_PRECOMP_KEY_NUM 3
1176
1177 static const uint8_t AES_CMAC_SEED[ICP_QAT_HW_AES_128_KEY_SZ];
1178
1179 static void aes_cmac_key_derive(uint8_t *base, uint8_t *derived)
1180 {
1181         int i;
1182
1183         derived[0] = base[0] << 1;
1184         for (i = 1; i < ICP_QAT_HW_AES_BLK_SZ ; i++) {
1185                 derived[i] = base[i] << 1;
1186                 derived[i - 1] |= base[i] >> 7;
1187         }
1188
1189         if (base[0] & 0x80)
1190                 derived[ICP_QAT_HW_AES_BLK_SZ - 1] ^= QAT_AES_CMAC_CONST_RB;
1191 }
1192
1193 static int qat_sym_do_precomputes(enum icp_qat_hw_auth_algo hash_alg,
1194                                 const uint8_t *auth_key,
1195                                 uint16_t auth_keylen,
1196                                 uint8_t *p_state_buf,
1197                                 uint16_t *p_state_len,
1198                                 uint8_t aes_cmac)
1199 {
1200         int block_size;
1201         uint8_t ipad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1202         uint8_t opad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1203         int i;
1204
1205         if (hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC) {
1206
1207                 /* CMAC */
1208                 if (aes_cmac) {
1209                         AES_KEY enc_key;
1210                         uint8_t *in = NULL;
1211                         uint8_t k0[ICP_QAT_HW_AES_128_KEY_SZ];
1212                         uint8_t *k1, *k2;
1213
1214                         auth_keylen = ICP_QAT_HW_AES_128_KEY_SZ;
1215
1216                         in = rte_zmalloc("AES CMAC K1",
1217                                          ICP_QAT_HW_AES_128_KEY_SZ, 16);
1218
1219                         if (in == NULL) {
1220                                 QAT_LOG(ERR, "Failed to alloc memory");
1221                                 return -ENOMEM;
1222                         }
1223
1224                         rte_memcpy(in, AES_CMAC_SEED,
1225                                    ICP_QAT_HW_AES_128_KEY_SZ);
1226                         rte_memcpy(p_state_buf, auth_key, auth_keylen);
1227
1228                         if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1229                                 &enc_key) != 0) {
1230                                 rte_free(in);
1231                                 return -EFAULT;
1232                         }
1233
1234                         AES_encrypt(in, k0, &enc_key);
1235
1236                         k1 = p_state_buf + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1237                         k2 = k1 + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1238
1239                         aes_cmac_key_derive(k0, k1);
1240                         aes_cmac_key_derive(k1, k2);
1241
1242                         memset(k0, 0, ICP_QAT_HW_AES_128_KEY_SZ);
1243                         *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1244                         rte_free(in);
1245                         return 0;
1246                 } else {
1247                         static uint8_t qat_aes_xcbc_key_seed[
1248                                         ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ] = {
1249                                 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1250                                 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1251                                 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1252                                 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1253                                 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1254                                 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1255                         };
1256
1257                         uint8_t *in = NULL;
1258                         uint8_t *out = p_state_buf;
1259                         int x;
1260                         AES_KEY enc_key;
1261
1262                         in = rte_zmalloc("working mem for key",
1263                                         ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ, 16);
1264                         if (in == NULL) {
1265                                 QAT_LOG(ERR, "Failed to alloc memory");
1266                                 return -ENOMEM;
1267                         }
1268
1269                         rte_memcpy(in, qat_aes_xcbc_key_seed,
1270                                         ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1271                         for (x = 0; x < HASH_XCBC_PRECOMP_KEY_NUM; x++) {
1272                                 if (AES_set_encrypt_key(auth_key,
1273                                                         auth_keylen << 3,
1274                                                         &enc_key) != 0) {
1275                                         rte_free(in -
1276                                           (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ));
1277                                         memset(out -
1278                                            (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ),
1279                                           0, ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1280                                         return -EFAULT;
1281                                 }
1282                                 AES_encrypt(in, out, &enc_key);
1283                                 in += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1284                                 out += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1285                         }
1286                         *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1287                         rte_free(in - x*ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ);
1288                         return 0;
1289                 }
1290
1291         } else if ((hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_128) ||
1292                 (hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_64)) {
1293                 uint8_t *in = NULL;
1294                 uint8_t *out = p_state_buf;
1295                 AES_KEY enc_key;
1296
1297                 memset(p_state_buf, 0, ICP_QAT_HW_GALOIS_H_SZ +
1298                                 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1299                                 ICP_QAT_HW_GALOIS_E_CTR0_SZ);
1300                 in = rte_zmalloc("working mem for key",
1301                                 ICP_QAT_HW_GALOIS_H_SZ, 16);
1302                 if (in == NULL) {
1303                         QAT_LOG(ERR, "Failed to alloc memory");
1304                         return -ENOMEM;
1305                 }
1306
1307                 memset(in, 0, ICP_QAT_HW_GALOIS_H_SZ);
1308                 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1309                         &enc_key) != 0) {
1310                         return -EFAULT;
1311                 }
1312                 AES_encrypt(in, out, &enc_key);
1313                 *p_state_len = ICP_QAT_HW_GALOIS_H_SZ +
1314                                 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1315                                 ICP_QAT_HW_GALOIS_E_CTR0_SZ;
1316                 rte_free(in);
1317                 return 0;
1318         }
1319
1320         block_size = qat_hash_get_block_size(hash_alg);
1321         if (block_size < 0)
1322                 return block_size;
1323         /* init ipad and opad from key and xor with fixed values */
1324         memset(ipad, 0, block_size);
1325         memset(opad, 0, block_size);
1326
1327         if (auth_keylen > (unsigned int)block_size) {
1328                 QAT_LOG(ERR, "invalid keylen %u", auth_keylen);
1329                 return -EFAULT;
1330         }
1331         rte_memcpy(ipad, auth_key, auth_keylen);
1332         rte_memcpy(opad, auth_key, auth_keylen);
1333
1334         for (i = 0; i < block_size; i++) {
1335                 uint8_t *ipad_ptr = ipad + i;
1336                 uint8_t *opad_ptr = opad + i;
1337                 *ipad_ptr ^= HMAC_IPAD_VALUE;
1338                 *opad_ptr ^= HMAC_OPAD_VALUE;
1339         }
1340
1341         /* do partial hash of ipad and copy to state1 */
1342         if (partial_hash_compute(hash_alg, ipad, p_state_buf)) {
1343                 memset(ipad, 0, block_size);
1344                 memset(opad, 0, block_size);
1345                 QAT_LOG(ERR, "ipad precompute failed");
1346                 return -EFAULT;
1347         }
1348
1349         /*
1350          * State len is a multiple of 8, so may be larger than the digest.
1351          * Put the partial hash of opad state_len bytes after state1
1352          */
1353         *p_state_len = qat_hash_get_state1_size(hash_alg);
1354         if (partial_hash_compute(hash_alg, opad, p_state_buf + *p_state_len)) {
1355                 memset(ipad, 0, block_size);
1356                 memset(opad, 0, block_size);
1357                 QAT_LOG(ERR, "opad precompute failed");
1358                 return -EFAULT;
1359         }
1360
1361         /*  don't leave data lying around */
1362         memset(ipad, 0, block_size);
1363         memset(opad, 0, block_size);
1364         return 0;
1365 }
1366
1367 static void
1368 qat_sym_session_init_common_hdr(struct icp_qat_fw_comn_req_hdr *header,
1369                 enum qat_sym_proto_flag proto_flags)
1370 {
1371         header->hdr_flags =
1372                 ICP_QAT_FW_COMN_HDR_FLAGS_BUILD(ICP_QAT_FW_COMN_REQ_FLAG_SET);
1373         header->service_type = ICP_QAT_FW_COMN_REQ_CPM_FW_LA;
1374         header->comn_req_flags =
1375                 ICP_QAT_FW_COMN_FLAGS_BUILD(QAT_COMN_CD_FLD_TYPE_64BIT_ADR,
1376                                         QAT_COMN_PTR_TYPE_FLAT);
1377         ICP_QAT_FW_LA_PARTIAL_SET(header->serv_specif_flags,
1378                                   ICP_QAT_FW_LA_PARTIAL_NONE);
1379         ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(header->serv_specif_flags,
1380                                            ICP_QAT_FW_CIPH_IV_16BYTE_DATA);
1381
1382         switch (proto_flags)            {
1383         case QAT_CRYPTO_PROTO_FLAG_NONE:
1384                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1385                                         ICP_QAT_FW_LA_NO_PROTO);
1386                 break;
1387         case QAT_CRYPTO_PROTO_FLAG_CCM:
1388                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1389                                         ICP_QAT_FW_LA_CCM_PROTO);
1390                 break;
1391         case QAT_CRYPTO_PROTO_FLAG_GCM:
1392                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1393                                         ICP_QAT_FW_LA_GCM_PROTO);
1394                 break;
1395         case QAT_CRYPTO_PROTO_FLAG_SNOW3G:
1396                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1397                                         ICP_QAT_FW_LA_SNOW_3G_PROTO);
1398                 break;
1399         case QAT_CRYPTO_PROTO_FLAG_ZUC:
1400                 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(header->serv_specif_flags,
1401                         ICP_QAT_FW_LA_ZUC_3G_PROTO);
1402                 break;
1403         }
1404
1405         ICP_QAT_FW_LA_UPDATE_STATE_SET(header->serv_specif_flags,
1406                                            ICP_QAT_FW_LA_NO_UPDATE_STATE);
1407         ICP_QAT_FW_LA_DIGEST_IN_BUFFER_SET(header->serv_specif_flags,
1408                                         ICP_QAT_FW_LA_NO_DIGEST_IN_BUFFER);
1409 }
1410
1411 /*
1412  *      Snow3G and ZUC should never use this function
1413  *      and set its protocol flag in both cipher and auth part of content
1414  *      descriptor building function
1415  */
1416 static enum qat_sym_proto_flag
1417 qat_get_crypto_proto_flag(uint16_t flags)
1418 {
1419         int proto = ICP_QAT_FW_LA_PROTO_GET(flags);
1420         enum qat_sym_proto_flag qat_proto_flag =
1421                         QAT_CRYPTO_PROTO_FLAG_NONE;
1422
1423         switch (proto) {
1424         case ICP_QAT_FW_LA_GCM_PROTO:
1425                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1426                 break;
1427         case ICP_QAT_FW_LA_CCM_PROTO:
1428                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1429                 break;
1430         }
1431
1432         return qat_proto_flag;
1433 }
1434
1435 int qat_sym_session_aead_create_cd_cipher(struct qat_sym_session *cdesc,
1436                                                 const uint8_t *cipherkey,
1437                                                 uint32_t cipherkeylen)
1438 {
1439         struct icp_qat_hw_cipher_algo_blk *cipher;
1440         struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1441         struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1442         struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1443         void *ptr = &req_tmpl->cd_ctrl;
1444         struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1445         struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1446         enum icp_qat_hw_cipher_convert key_convert;
1447         enum qat_sym_proto_flag qat_proto_flag =
1448                 QAT_CRYPTO_PROTO_FLAG_NONE;
1449         uint32_t total_key_size;
1450         uint16_t cipher_offset, cd_size;
1451         uint32_t wordIndex  = 0;
1452         uint32_t *temp_key = NULL;
1453
1454         if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER) {
1455                 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1456                 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1457                                         ICP_QAT_FW_SLICE_CIPHER);
1458                 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1459                                         ICP_QAT_FW_SLICE_DRAM_WR);
1460                 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1461                                         ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1462                 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1463                                         ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1464                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1465         } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1466                 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1467                 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1468                                         ICP_QAT_FW_SLICE_CIPHER);
1469                 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1470                                         ICP_QAT_FW_SLICE_AUTH);
1471                 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1472                                         ICP_QAT_FW_SLICE_AUTH);
1473                 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1474                                         ICP_QAT_FW_SLICE_DRAM_WR);
1475                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1476         } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1477                 QAT_LOG(ERR, "Invalid param, must be a cipher command.");
1478                 return -EFAULT;
1479         }
1480
1481         if (cdesc->qat_mode == ICP_QAT_HW_CIPHER_CTR_MODE) {
1482                 /*
1483                  * CTR Streaming ciphers are a special case. Decrypt = encrypt
1484                  * Overriding default values previously set
1485                  */
1486                 cdesc->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
1487                 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1488         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2
1489                 || cdesc->qat_cipher_alg ==
1490                         ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)
1491                 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1492         else if (cdesc->qat_dir == ICP_QAT_HW_CIPHER_ENCRYPT)
1493                 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1494         else
1495                 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1496
1497         if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
1498                 total_key_size = ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ +
1499                         ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1500                 cipher_cd_ctrl->cipher_state_sz =
1501                         ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1502                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1503
1504         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1505                 total_key_size = ICP_QAT_HW_KASUMI_F8_KEY_SZ;
1506                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_KASUMI_BLK_SZ >> 3;
1507                 cipher_cd_ctrl->cipher_padding_sz =
1508                                         (2 * ICP_QAT_HW_KASUMI_BLK_SZ) >> 3;
1509         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES) {
1510                 total_key_size = ICP_QAT_HW_3DES_KEY_SZ;
1511                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_3DES_BLK_SZ >> 3;
1512                 qat_proto_flag =
1513                         qat_get_crypto_proto_flag(header->serv_specif_flags);
1514         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_DES) {
1515                 total_key_size = ICP_QAT_HW_DES_KEY_SZ;
1516                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_DES_BLK_SZ >> 3;
1517                 qat_proto_flag =
1518                         qat_get_crypto_proto_flag(header->serv_specif_flags);
1519         } else if (cdesc->qat_cipher_alg ==
1520                 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
1521                 total_key_size = ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ +
1522                         ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1523                 cipher_cd_ctrl->cipher_state_sz =
1524                         ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1525                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1526                 cdesc->min_qat_dev_gen = QAT_GEN2;
1527         } else {
1528                 total_key_size = cipherkeylen;
1529                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_AES_BLK_SZ >> 3;
1530                 qat_proto_flag =
1531                         qat_get_crypto_proto_flag(header->serv_specif_flags);
1532         }
1533         cipher_cd_ctrl->cipher_key_sz = total_key_size >> 3;
1534         cipher_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1535         cipher_cd_ctrl->cipher_cfg_offset = cipher_offset >> 3;
1536
1537         header->service_cmd_id = cdesc->qat_cmd;
1538         qat_sym_session_init_common_hdr(header, qat_proto_flag);
1539
1540         cipher = (struct icp_qat_hw_cipher_algo_blk *)cdesc->cd_cur_ptr;
1541         cipher->cipher_config.val =
1542             ICP_QAT_HW_CIPHER_CONFIG_BUILD(cdesc->qat_mode,
1543                                         cdesc->qat_cipher_alg, key_convert,
1544                                         cdesc->qat_dir);
1545
1546         if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1547                 temp_key = (uint32_t *)(cdesc->cd_cur_ptr +
1548                                         sizeof(struct icp_qat_hw_cipher_config)
1549                                         + cipherkeylen);
1550                 memcpy(cipher->key, cipherkey, cipherkeylen);
1551                 memcpy(temp_key, cipherkey, cipherkeylen);
1552
1553                 /* XOR Key with KASUMI F8 key modifier at 4 bytes level */
1554                 for (wordIndex = 0; wordIndex < (cipherkeylen >> 2);
1555                                                                 wordIndex++)
1556                         temp_key[wordIndex] ^= KASUMI_F8_KEY_MODIFIER_4_BYTES;
1557
1558                 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1559                                         cipherkeylen + cipherkeylen;
1560         } else {
1561                 memcpy(cipher->key, cipherkey, cipherkeylen);
1562                 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1563                                         cipherkeylen;
1564         }
1565
1566         if (total_key_size > cipherkeylen) {
1567                 uint32_t padding_size =  total_key_size-cipherkeylen;
1568                 if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1569                         && (cipherkeylen == QAT_3DES_KEY_SZ_OPT2)) {
1570                         /* K3 not provided so use K1 = K3*/
1571                         memcpy(cdesc->cd_cur_ptr, cipherkey, padding_size);
1572                 } else if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1573                         && (cipherkeylen == QAT_3DES_KEY_SZ_OPT3)) {
1574                         /* K2 and K3 not provided so use K1 = K2 = K3*/
1575                         memcpy(cdesc->cd_cur_ptr, cipherkey,
1576                                 cipherkeylen);
1577                         memcpy(cdesc->cd_cur_ptr+cipherkeylen,
1578                                 cipherkey, cipherkeylen);
1579                 } else
1580                         memset(cdesc->cd_cur_ptr, 0, padding_size);
1581
1582                 cdesc->cd_cur_ptr += padding_size;
1583         }
1584         cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1585         cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1586
1587         return 0;
1588 }
1589
1590 int qat_sym_session_aead_create_cd_auth(struct qat_sym_session *cdesc,
1591                                                 const uint8_t *authkey,
1592                                                 uint32_t authkeylen,
1593                                                 uint32_t aad_length,
1594                                                 uint32_t digestsize,
1595                                                 unsigned int operation)
1596 {
1597         struct icp_qat_hw_auth_setup *hash;
1598         struct icp_qat_hw_cipher_algo_blk *cipherconfig;
1599         struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1600         struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1601         struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1602         void *ptr = &req_tmpl->cd_ctrl;
1603         struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1604         struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1605         struct icp_qat_fw_la_auth_req_params *auth_param =
1606                 (struct icp_qat_fw_la_auth_req_params *)
1607                 ((char *)&req_tmpl->serv_specif_rqpars +
1608                 ICP_QAT_FW_HASH_REQUEST_PARAMETERS_OFFSET);
1609         uint16_t state1_size = 0, state2_size = 0;
1610         uint16_t hash_offset, cd_size;
1611         uint32_t *aad_len = NULL;
1612         uint32_t wordIndex  = 0;
1613         uint32_t *pTempKey;
1614         enum qat_sym_proto_flag qat_proto_flag =
1615                 QAT_CRYPTO_PROTO_FLAG_NONE;
1616
1617         if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_AUTH) {
1618                 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1619                                         ICP_QAT_FW_SLICE_AUTH);
1620                 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1621                                         ICP_QAT_FW_SLICE_DRAM_WR);
1622                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1623         } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1624                 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1625                                 ICP_QAT_FW_SLICE_AUTH);
1626                 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1627                                 ICP_QAT_FW_SLICE_CIPHER);
1628                 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1629                                 ICP_QAT_FW_SLICE_CIPHER);
1630                 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1631                                 ICP_QAT_FW_SLICE_DRAM_WR);
1632                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1633         } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1634                 QAT_LOG(ERR, "Invalid param, must be a hash command.");
1635                 return -EFAULT;
1636         }
1637
1638         if (operation == RTE_CRYPTO_AUTH_OP_VERIFY) {
1639                 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1640                                 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1641                 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1642                                 ICP_QAT_FW_LA_CMP_AUTH_RES);
1643                 cdesc->auth_op = ICP_QAT_HW_AUTH_VERIFY;
1644         } else {
1645                 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1646                                            ICP_QAT_FW_LA_RET_AUTH_RES);
1647                 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1648                                            ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1649                 cdesc->auth_op = ICP_QAT_HW_AUTH_GENERATE;
1650         }
1651
1652         /*
1653          * Setup the inner hash config
1654          */
1655         hash_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1656         hash = (struct icp_qat_hw_auth_setup *)cdesc->cd_cur_ptr;
1657         hash->auth_config.reserved = 0;
1658         hash->auth_config.config =
1659                         ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE1,
1660                                 cdesc->qat_hash_alg, digestsize);
1661
1662         if (cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2
1663                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_KASUMI_F9
1664                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3
1665                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC
1666                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC
1667                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_NULL
1668                         )
1669                 hash->auth_counter.counter = 0;
1670         else {
1671                 int block_size = qat_hash_get_block_size(cdesc->qat_hash_alg);
1672
1673                 if (block_size < 0)
1674                         return block_size;
1675                 hash->auth_counter.counter = rte_bswap32(block_size);
1676         }
1677
1678         cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_auth_setup);
1679
1680         /*
1681          * cd_cur_ptr now points at the state1 information.
1682          */
1683         switch (cdesc->qat_hash_alg) {
1684         case ICP_QAT_HW_AUTH_ALGO_SHA1:
1685                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA1, authkey,
1686                         authkeylen, cdesc->cd_cur_ptr, &state1_size,
1687                         cdesc->aes_cmac)) {
1688                         QAT_LOG(ERR, "(SHA)precompute failed");
1689                         return -EFAULT;
1690                 }
1691                 state2_size = RTE_ALIGN_CEIL(ICP_QAT_HW_SHA1_STATE2_SZ, 8);
1692                 break;
1693         case ICP_QAT_HW_AUTH_ALGO_SHA224:
1694                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA224, authkey,
1695                         authkeylen, cdesc->cd_cur_ptr, &state1_size,
1696                         cdesc->aes_cmac)) {
1697                         QAT_LOG(ERR, "(SHA)precompute failed");
1698                         return -EFAULT;
1699                 }
1700                 state2_size = ICP_QAT_HW_SHA224_STATE2_SZ;
1701                 break;
1702         case ICP_QAT_HW_AUTH_ALGO_SHA256:
1703                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA256, authkey,
1704                         authkeylen, cdesc->cd_cur_ptr,  &state1_size,
1705                         cdesc->aes_cmac)) {
1706                         QAT_LOG(ERR, "(SHA)precompute failed");
1707                         return -EFAULT;
1708                 }
1709                 state2_size = ICP_QAT_HW_SHA256_STATE2_SZ;
1710                 break;
1711         case ICP_QAT_HW_AUTH_ALGO_SHA384:
1712                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA384, authkey,
1713                         authkeylen, cdesc->cd_cur_ptr, &state1_size,
1714                         cdesc->aes_cmac)) {
1715                         QAT_LOG(ERR, "(SHA)precompute failed");
1716                         return -EFAULT;
1717                 }
1718                 state2_size = ICP_QAT_HW_SHA384_STATE2_SZ;
1719                 break;
1720         case ICP_QAT_HW_AUTH_ALGO_SHA512:
1721                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA512, authkey,
1722                         authkeylen, cdesc->cd_cur_ptr,  &state1_size,
1723                         cdesc->aes_cmac)) {
1724                         QAT_LOG(ERR, "(SHA)precompute failed");
1725                         return -EFAULT;
1726                 }
1727                 state2_size = ICP_QAT_HW_SHA512_STATE2_SZ;
1728                 break;
1729         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1730                 state1_size = ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1731
1732                 if (cdesc->aes_cmac)
1733                         memset(cdesc->cd_cur_ptr, 0, state1_size);
1734                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC,
1735                         authkey, authkeylen, cdesc->cd_cur_ptr + state1_size,
1736                         &state2_size, cdesc->aes_cmac)) {
1737                         cdesc->aes_cmac ? QAT_LOG(ERR,
1738                                                   "(CMAC)precompute failed")
1739                                         : QAT_LOG(ERR,
1740                                                   "(XCBC)precompute failed");
1741                         return -EFAULT;
1742                 }
1743                 break;
1744         case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1745         case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
1746                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1747                 state1_size = ICP_QAT_HW_GALOIS_128_STATE1_SZ;
1748                 if (qat_sym_do_precomputes(cdesc->qat_hash_alg, authkey,
1749                         authkeylen, cdesc->cd_cur_ptr + state1_size,
1750                         &state2_size, cdesc->aes_cmac)) {
1751                         QAT_LOG(ERR, "(GCM)precompute failed");
1752                         return -EFAULT;
1753                 }
1754                 /*
1755                  * Write (the length of AAD) into bytes 16-19 of state2
1756                  * in big-endian format. This field is 8 bytes
1757                  */
1758                 auth_param->u2.aad_sz =
1759                                 RTE_ALIGN_CEIL(aad_length, 16);
1760                 auth_param->hash_state_sz = (auth_param->u2.aad_sz) >> 3;
1761
1762                 aad_len = (uint32_t *)(cdesc->cd_cur_ptr +
1763                                         ICP_QAT_HW_GALOIS_128_STATE1_SZ +
1764                                         ICP_QAT_HW_GALOIS_H_SZ);
1765                 *aad_len = rte_bswap32(aad_length);
1766                 cdesc->aad_len = aad_length;
1767                 break;
1768         case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
1769                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1770                 state1_size = qat_hash_get_state1_size(
1771                                 ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2);
1772                 state2_size = ICP_QAT_HW_SNOW_3G_UIA2_STATE2_SZ;
1773                 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1774
1775                 cipherconfig = (struct icp_qat_hw_cipher_algo_blk *)
1776                                 (cdesc->cd_cur_ptr + state1_size + state2_size);
1777                 cipherconfig->cipher_config.val =
1778                 ICP_QAT_HW_CIPHER_CONFIG_BUILD(ICP_QAT_HW_CIPHER_ECB_MODE,
1779                         ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2,
1780                         ICP_QAT_HW_CIPHER_KEY_CONVERT,
1781                         ICP_QAT_HW_CIPHER_ENCRYPT);
1782                 memcpy(cipherconfig->key, authkey, authkeylen);
1783                 memset(cipherconfig->key + authkeylen,
1784                                 0, ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ);
1785                 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1786                                 authkeylen + ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1787                 auth_param->hash_state_sz = ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1788                 break;
1789         case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
1790                 hash->auth_config.config =
1791                         ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE0,
1792                                 cdesc->qat_hash_alg, digestsize);
1793                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1794                 state1_size = qat_hash_get_state1_size(
1795                                 ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3);
1796                 state2_size = ICP_QAT_HW_ZUC_3G_EIA3_STATE2_SZ;
1797                 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size
1798                         + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ);
1799
1800                 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1801                 cdesc->cd_cur_ptr += state1_size + state2_size
1802                         + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1803                 auth_param->hash_state_sz = ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1804                 cdesc->min_qat_dev_gen = QAT_GEN2;
1805
1806                 break;
1807         case ICP_QAT_HW_AUTH_ALGO_MD5:
1808                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_MD5, authkey,
1809                         authkeylen, cdesc->cd_cur_ptr, &state1_size,
1810                         cdesc->aes_cmac)) {
1811                         QAT_LOG(ERR, "(MD5)precompute failed");
1812                         return -EFAULT;
1813                 }
1814                 state2_size = ICP_QAT_HW_MD5_STATE2_SZ;
1815                 break;
1816         case ICP_QAT_HW_AUTH_ALGO_NULL:
1817                 state1_size = qat_hash_get_state1_size(
1818                                 ICP_QAT_HW_AUTH_ALGO_NULL);
1819                 state2_size = ICP_QAT_HW_NULL_STATE2_SZ;
1820                 break;
1821         case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
1822                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1823                 state1_size = qat_hash_get_state1_size(
1824                                 ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC);
1825                 state2_size = ICP_QAT_HW_AES_CBC_MAC_KEY_SZ +
1826                                 ICP_QAT_HW_AES_CCM_CBC_E_CTR0_SZ;
1827
1828                 if (aad_length > 0) {
1829                         aad_length += ICP_QAT_HW_CCM_AAD_B0_LEN +
1830                         ICP_QAT_HW_CCM_AAD_LEN_INFO;
1831                         auth_param->u2.aad_sz =
1832                         RTE_ALIGN_CEIL(aad_length,
1833                         ICP_QAT_HW_CCM_AAD_ALIGNMENT);
1834                 } else {
1835                         auth_param->u2.aad_sz = ICP_QAT_HW_CCM_AAD_B0_LEN;
1836                 }
1837                 cdesc->aad_len = aad_length;
1838                 hash->auth_counter.counter = 0;
1839
1840                 hash_cd_ctrl->outer_prefix_sz = digestsize;
1841                 auth_param->hash_state_sz = digestsize;
1842
1843                 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1844                 break;
1845         case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
1846                 state1_size = qat_hash_get_state1_size(
1847                                 ICP_QAT_HW_AUTH_ALGO_KASUMI_F9);
1848                 state2_size = ICP_QAT_HW_KASUMI_F9_STATE2_SZ;
1849                 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1850                 pTempKey = (uint32_t *)(cdesc->cd_cur_ptr + state1_size
1851                                                         + authkeylen);
1852                 /*
1853                 * The Inner Hash Initial State2 block must contain IK
1854                 * (Initialisation Key), followed by IK XOR-ed with KM
1855                 * (Key Modifier): IK||(IK^KM).
1856                 */
1857                 /* write the auth key */
1858                 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1859                 /* initialise temp key with auth key */
1860                 memcpy(pTempKey, authkey, authkeylen);
1861                 /* XOR Key with KASUMI F9 key modifier at 4 bytes level */
1862                 for (wordIndex = 0; wordIndex < (authkeylen >> 2); wordIndex++)
1863                         pTempKey[wordIndex] ^= KASUMI_F9_KEY_MODIFIER_4_BYTES;
1864                 break;
1865         default:
1866                 QAT_LOG(ERR, "Invalid HASH alg %u", cdesc->qat_hash_alg);
1867                 return -EFAULT;
1868         }
1869
1870         /* Request template setup */
1871         qat_sym_session_init_common_hdr(header, qat_proto_flag);
1872         header->service_cmd_id = cdesc->qat_cmd;
1873
1874         /* Auth CD config setup */
1875         hash_cd_ctrl->hash_cfg_offset = hash_offset >> 3;
1876         hash_cd_ctrl->hash_flags = ICP_QAT_FW_AUTH_HDR_FLAG_NO_NESTED;
1877         hash_cd_ctrl->inner_res_sz = digestsize;
1878         hash_cd_ctrl->final_sz = digestsize;
1879         hash_cd_ctrl->inner_state1_sz = state1_size;
1880         auth_param->auth_res_sz = digestsize;
1881
1882         hash_cd_ctrl->inner_state2_sz  = state2_size;
1883         hash_cd_ctrl->inner_state2_offset = hash_cd_ctrl->hash_cfg_offset +
1884                         ((sizeof(struct icp_qat_hw_auth_setup) +
1885                          RTE_ALIGN_CEIL(hash_cd_ctrl->inner_state1_sz, 8))
1886                                         >> 3);
1887
1888         cdesc->cd_cur_ptr += state1_size + state2_size;
1889         cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1890
1891         cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1892         cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1893
1894         return 0;
1895 }
1896
1897 int qat_sym_validate_aes_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1898 {
1899         switch (key_len) {
1900         case ICP_QAT_HW_AES_128_KEY_SZ:
1901                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1902                 break;
1903         case ICP_QAT_HW_AES_192_KEY_SZ:
1904                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES192;
1905                 break;
1906         case ICP_QAT_HW_AES_256_KEY_SZ:
1907                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
1908                 break;
1909         default:
1910                 return -EINVAL;
1911         }
1912         return 0;
1913 }
1914
1915 int qat_sym_validate_aes_docsisbpi_key(int key_len,
1916                 enum icp_qat_hw_cipher_algo *alg)
1917 {
1918         switch (key_len) {
1919         case ICP_QAT_HW_AES_128_KEY_SZ:
1920                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1921                 break;
1922         case ICP_QAT_HW_AES_256_KEY_SZ:
1923                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
1924                 break;
1925         default:
1926                 return -EINVAL;
1927         }
1928         return 0;
1929 }
1930
1931 int qat_sym_validate_snow3g_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1932 {
1933         switch (key_len) {
1934         case ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ:
1935                 *alg = ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2;
1936                 break;
1937         default:
1938                 return -EINVAL;
1939         }
1940         return 0;
1941 }
1942
1943 int qat_sym_validate_kasumi_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1944 {
1945         switch (key_len) {
1946         case ICP_QAT_HW_KASUMI_KEY_SZ:
1947                 *alg = ICP_QAT_HW_CIPHER_ALGO_KASUMI;
1948                 break;
1949         default:
1950                 return -EINVAL;
1951         }
1952         return 0;
1953 }
1954
1955 int qat_sym_validate_des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1956 {
1957         switch (key_len) {
1958         case ICP_QAT_HW_DES_KEY_SZ:
1959                 *alg = ICP_QAT_HW_CIPHER_ALGO_DES;
1960                 break;
1961         default:
1962                 return -EINVAL;
1963         }
1964         return 0;
1965 }
1966
1967 int qat_sym_validate_3des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1968 {
1969         switch (key_len) {
1970         case QAT_3DES_KEY_SZ_OPT1:
1971         case QAT_3DES_KEY_SZ_OPT2:
1972         case QAT_3DES_KEY_SZ_OPT3:
1973                 *alg = ICP_QAT_HW_CIPHER_ALGO_3DES;
1974                 break;
1975         default:
1976                 return -EINVAL;
1977         }
1978         return 0;
1979 }
1980
1981 int qat_sym_validate_zuc_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1982 {
1983         switch (key_len) {
1984         case ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ:
1985                 *alg = ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3;
1986                 break;
1987         default:
1988                 return -EINVAL;
1989         }
1990         return 0;
1991 }