fd2cc382e6d352d9bba8b6eb455c63eac3084a00
[dpdk.git] / drivers / crypto / qat / qat_sym_session.c
1 /* SPDX-License-Identifier: (BSD-3-Clause OR GPL-2.0)
2  * Copyright(c) 2015-2019 Intel Corporation
3  */
4
5 #include <openssl/sha.h>        /* Needed to calculate pre-compute values */
6 #include <openssl/aes.h>        /* Needed to calculate pre-compute values */
7 #include <openssl/md5.h>        /* Needed to calculate pre-compute values */
8 #include <openssl/evp.h>        /* Needed for bpi runt block processing */
9
10 #include <rte_memcpy.h>
11 #include <rte_common.h>
12 #include <rte_spinlock.h>
13 #include <rte_byteorder.h>
14 #include <rte_log.h>
15 #include <rte_malloc.h>
16 #include <rte_crypto_sym.h>
17
18 #include "qat_logs.h"
19 #include "qat_sym_session.h"
20 #include "qat_sym_pmd.h"
21
22 /** Frees a context previously created
23  *  Depends on openssl libcrypto
24  */
25 static void
26 bpi_cipher_ctx_free(void *bpi_ctx)
27 {
28         if (bpi_ctx != NULL)
29                 EVP_CIPHER_CTX_free((EVP_CIPHER_CTX *)bpi_ctx);
30 }
31
32 /** Creates a context in either AES or DES in ECB mode
33  *  Depends on openssl libcrypto
34  */
35 static int
36 bpi_cipher_ctx_init(enum rte_crypto_cipher_algorithm cryptodev_algo,
37                 enum rte_crypto_cipher_operation direction __rte_unused,
38                 const uint8_t *key, uint16_t key_length, void **ctx)
39 {
40         const EVP_CIPHER *algo = NULL;
41         int ret;
42         *ctx = EVP_CIPHER_CTX_new();
43
44         if (*ctx == NULL) {
45                 ret = -ENOMEM;
46                 goto ctx_init_err;
47         }
48
49         if (cryptodev_algo == RTE_CRYPTO_CIPHER_DES_DOCSISBPI)
50                 algo = EVP_des_ecb();
51         else
52                 if (key_length == ICP_QAT_HW_AES_128_KEY_SZ)
53                         algo = EVP_aes_128_ecb();
54                 else
55                         algo = EVP_aes_256_ecb();
56
57         /* IV will be ECB encrypted whether direction is encrypt or decrypt*/
58         if (EVP_EncryptInit_ex(*ctx, algo, NULL, key, 0) != 1) {
59                 ret = -EINVAL;
60                 goto ctx_init_err;
61         }
62
63         return 0;
64
65 ctx_init_err:
66         if (*ctx != NULL)
67                 EVP_CIPHER_CTX_free(*ctx);
68         return ret;
69 }
70
71 static int
72 qat_is_cipher_alg_supported(enum rte_crypto_cipher_algorithm algo,
73                 struct qat_sym_dev_private *internals)
74 {
75         int i = 0;
76         const struct rte_cryptodev_capabilities *capability;
77
78         while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
79                         RTE_CRYPTO_OP_TYPE_UNDEFINED) {
80                 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
81                         continue;
82
83                 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_CIPHER)
84                         continue;
85
86                 if (capability->sym.cipher.algo == algo)
87                         return 1;
88         }
89         return 0;
90 }
91
92 static int
93 qat_is_auth_alg_supported(enum rte_crypto_auth_algorithm algo,
94                 struct qat_sym_dev_private *internals)
95 {
96         int i = 0;
97         const struct rte_cryptodev_capabilities *capability;
98
99         while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
100                         RTE_CRYPTO_OP_TYPE_UNDEFINED) {
101                 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
102                         continue;
103
104                 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_AUTH)
105                         continue;
106
107                 if (capability->sym.auth.algo == algo)
108                         return 1;
109         }
110         return 0;
111 }
112
113 void
114 qat_sym_session_clear(struct rte_cryptodev *dev,
115                 struct rte_cryptodev_sym_session *sess)
116 {
117         uint8_t index = dev->driver_id;
118         void *sess_priv = get_sym_session_private_data(sess, index);
119         struct qat_sym_session *s = (struct qat_sym_session *)sess_priv;
120
121         if (sess_priv) {
122                 if (s->bpi_ctx)
123                         bpi_cipher_ctx_free(s->bpi_ctx);
124                 memset(s, 0, qat_sym_session_get_private_size(dev));
125                 struct rte_mempool *sess_mp = rte_mempool_from_obj(sess_priv);
126
127                 set_sym_session_private_data(sess, index, NULL);
128                 rte_mempool_put(sess_mp, sess_priv);
129         }
130 }
131
132 static int
133 qat_get_cmd_id(const struct rte_crypto_sym_xform *xform)
134 {
135         /* Cipher Only */
136         if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER && xform->next == NULL)
137                 return ICP_QAT_FW_LA_CMD_CIPHER;
138
139         /* Authentication Only */
140         if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH && xform->next == NULL)
141                 return ICP_QAT_FW_LA_CMD_AUTH;
142
143         /* AEAD */
144         if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
145                 /* AES-GCM and AES-CCM works with different direction
146                  * GCM first encrypts and generate hash where AES-CCM
147                  * first generate hash and encrypts. Similar relation
148                  * applies to decryption.
149                  */
150                 if (xform->aead.op == RTE_CRYPTO_AEAD_OP_ENCRYPT)
151                         if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
152                                 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
153                         else
154                                 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
155                 else
156                         if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
157                                 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
158                         else
159                                 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
160         }
161
162         if (xform->next == NULL)
163                 return -1;
164
165         /* Cipher then Authenticate */
166         if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
167                         xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
168                 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
169
170         /* Authenticate then Cipher */
171         if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH &&
172                         xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
173                 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
174
175         return -1;
176 }
177
178 static struct rte_crypto_auth_xform *
179 qat_get_auth_xform(struct rte_crypto_sym_xform *xform)
180 {
181         do {
182                 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH)
183                         return &xform->auth;
184
185                 xform = xform->next;
186         } while (xform);
187
188         return NULL;
189 }
190
191 static struct rte_crypto_cipher_xform *
192 qat_get_cipher_xform(struct rte_crypto_sym_xform *xform)
193 {
194         do {
195                 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
196                         return &xform->cipher;
197
198                 xform = xform->next;
199         } while (xform);
200
201         return NULL;
202 }
203
204 int
205 qat_sym_session_configure_cipher(struct rte_cryptodev *dev,
206                 struct rte_crypto_sym_xform *xform,
207                 struct qat_sym_session *session)
208 {
209         struct qat_sym_dev_private *internals = dev->data->dev_private;
210         struct rte_crypto_cipher_xform *cipher_xform = NULL;
211         int ret;
212
213         /* Get cipher xform from crypto xform chain */
214         cipher_xform = qat_get_cipher_xform(xform);
215
216         session->cipher_iv.offset = cipher_xform->iv.offset;
217         session->cipher_iv.length = cipher_xform->iv.length;
218
219         switch (cipher_xform->algo) {
220         case RTE_CRYPTO_CIPHER_AES_CBC:
221                 if (qat_sym_validate_aes_key(cipher_xform->key.length,
222                                 &session->qat_cipher_alg) != 0) {
223                         QAT_LOG(ERR, "Invalid AES cipher key size");
224                         ret = -EINVAL;
225                         goto error_out;
226                 }
227                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
228                 break;
229         case RTE_CRYPTO_CIPHER_AES_CTR:
230                 if (qat_sym_validate_aes_key(cipher_xform->key.length,
231                                 &session->qat_cipher_alg) != 0) {
232                         QAT_LOG(ERR, "Invalid AES cipher key size");
233                         ret = -EINVAL;
234                         goto error_out;
235                 }
236                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
237                 break;
238         case RTE_CRYPTO_CIPHER_SNOW3G_UEA2:
239                 if (qat_sym_validate_snow3g_key(cipher_xform->key.length,
240                                         &session->qat_cipher_alg) != 0) {
241                         QAT_LOG(ERR, "Invalid SNOW 3G cipher key size");
242                         ret = -EINVAL;
243                         goto error_out;
244                 }
245                 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
246                 break;
247         case RTE_CRYPTO_CIPHER_NULL:
248                 session->qat_cipher_alg = ICP_QAT_HW_CIPHER_ALGO_NULL;
249                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
250                 break;
251         case RTE_CRYPTO_CIPHER_KASUMI_F8:
252                 if (qat_sym_validate_kasumi_key(cipher_xform->key.length,
253                                         &session->qat_cipher_alg) != 0) {
254                         QAT_LOG(ERR, "Invalid KASUMI cipher key size");
255                         ret = -EINVAL;
256                         goto error_out;
257                 }
258                 session->qat_mode = ICP_QAT_HW_CIPHER_F8_MODE;
259                 break;
260         case RTE_CRYPTO_CIPHER_3DES_CBC:
261                 if (qat_sym_validate_3des_key(cipher_xform->key.length,
262                                 &session->qat_cipher_alg) != 0) {
263                         QAT_LOG(ERR, "Invalid 3DES cipher key size");
264                         ret = -EINVAL;
265                         goto error_out;
266                 }
267                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
268                 break;
269         case RTE_CRYPTO_CIPHER_DES_CBC:
270                 if (qat_sym_validate_des_key(cipher_xform->key.length,
271                                 &session->qat_cipher_alg) != 0) {
272                         QAT_LOG(ERR, "Invalid DES cipher key size");
273                         ret = -EINVAL;
274                         goto error_out;
275                 }
276                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
277                 break;
278         case RTE_CRYPTO_CIPHER_3DES_CTR:
279                 if (qat_sym_validate_3des_key(cipher_xform->key.length,
280                                 &session->qat_cipher_alg) != 0) {
281                         QAT_LOG(ERR, "Invalid 3DES cipher key size");
282                         ret = -EINVAL;
283                         goto error_out;
284                 }
285                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
286                 break;
287         case RTE_CRYPTO_CIPHER_DES_DOCSISBPI:
288                 ret = bpi_cipher_ctx_init(
289                                         cipher_xform->algo,
290                                         cipher_xform->op,
291                                         cipher_xform->key.data,
292                                         cipher_xform->key.length,
293                                         &session->bpi_ctx);
294                 if (ret != 0) {
295                         QAT_LOG(ERR, "failed to create DES BPI ctx");
296                         goto error_out;
297                 }
298                 if (qat_sym_validate_des_key(cipher_xform->key.length,
299                                 &session->qat_cipher_alg) != 0) {
300                         QAT_LOG(ERR, "Invalid DES cipher key size");
301                         ret = -EINVAL;
302                         goto error_out;
303                 }
304                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
305                 break;
306         case RTE_CRYPTO_CIPHER_AES_DOCSISBPI:
307                 ret = bpi_cipher_ctx_init(
308                                         cipher_xform->algo,
309                                         cipher_xform->op,
310                                         cipher_xform->key.data,
311                                         cipher_xform->key.length,
312                                         &session->bpi_ctx);
313                 if (ret != 0) {
314                         QAT_LOG(ERR, "failed to create AES BPI ctx");
315                         goto error_out;
316                 }
317                 if (qat_sym_validate_aes_docsisbpi_key(cipher_xform->key.length,
318                                 &session->qat_cipher_alg) != 0) {
319                         QAT_LOG(ERR, "Invalid AES DOCSISBPI key size");
320                         ret = -EINVAL;
321                         goto error_out;
322                 }
323                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
324                 break;
325         case RTE_CRYPTO_CIPHER_ZUC_EEA3:
326                 if (!qat_is_cipher_alg_supported(
327                         cipher_xform->algo, internals)) {
328                         QAT_LOG(ERR, "%s not supported on this device",
329                                 rte_crypto_cipher_algorithm_strings
330                                         [cipher_xform->algo]);
331                         ret = -ENOTSUP;
332                         goto error_out;
333                 }
334                 if (qat_sym_validate_zuc_key(cipher_xform->key.length,
335                                 &session->qat_cipher_alg) != 0) {
336                         QAT_LOG(ERR, "Invalid ZUC cipher key size");
337                         ret = -EINVAL;
338                         goto error_out;
339                 }
340                 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
341                 break;
342         case RTE_CRYPTO_CIPHER_AES_XTS:
343                 if ((cipher_xform->key.length/2) == ICP_QAT_HW_AES_192_KEY_SZ) {
344                         QAT_LOG(ERR, "AES-XTS-192 not supported");
345                         ret = -EINVAL;
346                         goto error_out;
347                 }
348                 if (qat_sym_validate_aes_key((cipher_xform->key.length/2),
349                                 &session->qat_cipher_alg) != 0) {
350                         QAT_LOG(ERR, "Invalid AES-XTS cipher key size");
351                         ret = -EINVAL;
352                         goto error_out;
353                 }
354                 session->qat_mode = ICP_QAT_HW_CIPHER_XTS_MODE;
355                 break;
356         case RTE_CRYPTO_CIPHER_3DES_ECB:
357         case RTE_CRYPTO_CIPHER_AES_ECB:
358         case RTE_CRYPTO_CIPHER_AES_F8:
359         case RTE_CRYPTO_CIPHER_ARC4:
360                 QAT_LOG(ERR, "Crypto QAT PMD: Unsupported Cipher alg %u",
361                                 cipher_xform->algo);
362                 ret = -ENOTSUP;
363                 goto error_out;
364         default:
365                 QAT_LOG(ERR, "Crypto: Undefined Cipher specified %u\n",
366                                 cipher_xform->algo);
367                 ret = -EINVAL;
368                 goto error_out;
369         }
370
371         if (cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
372                 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
373         else
374                 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
375
376         if (qat_sym_session_aead_create_cd_cipher(session,
377                                                 cipher_xform->key.data,
378                                                 cipher_xform->key.length)) {
379                 ret = -EINVAL;
380                 goto error_out;
381         }
382
383         return 0;
384
385 error_out:
386         if (session->bpi_ctx) {
387                 bpi_cipher_ctx_free(session->bpi_ctx);
388                 session->bpi_ctx = NULL;
389         }
390         return ret;
391 }
392
393 int
394 qat_sym_session_configure(struct rte_cryptodev *dev,
395                 struct rte_crypto_sym_xform *xform,
396                 struct rte_cryptodev_sym_session *sess,
397                 struct rte_mempool *mempool)
398 {
399         void *sess_private_data;
400         int ret;
401
402         if (rte_mempool_get(mempool, &sess_private_data)) {
403                 CDEV_LOG_ERR(
404                         "Couldn't get object from session mempool");
405                 return -ENOMEM;
406         }
407
408         ret = qat_sym_session_set_parameters(dev, xform, sess_private_data);
409         if (ret != 0) {
410                 QAT_LOG(ERR,
411                     "Crypto QAT PMD: failed to configure session parameters");
412
413                 /* Return session to mempool */
414                 rte_mempool_put(mempool, sess_private_data);
415                 return ret;
416         }
417
418         set_sym_session_private_data(sess, dev->driver_id,
419                 sess_private_data);
420
421         return 0;
422 }
423
424 static void
425 qat_sym_session_set_ext_hash_flags(struct qat_sym_session *session,
426                 uint8_t hash_flag)
427 {
428         struct icp_qat_fw_comn_req_hdr *header = &session->fw_req.comn_hdr;
429         struct icp_qat_fw_cipher_auth_cd_ctrl_hdr *cd_ctrl =
430                         (struct icp_qat_fw_cipher_auth_cd_ctrl_hdr *)
431                         session->fw_req.cd_ctrl.content_desc_ctrl_lw;
432
433         /* Set the Use Extended Protocol Flags bit in LW 1 */
434         QAT_FIELD_SET(header->comn_req_flags,
435                         QAT_COMN_EXT_FLAGS_USED,
436                         QAT_COMN_EXT_FLAGS_BITPOS,
437                         QAT_COMN_EXT_FLAGS_MASK);
438
439         /* Set Hash Flags in LW 28 */
440         cd_ctrl->hash_flags |= hash_flag;
441
442         /* Set proto flags in LW 1 */
443         switch (session->qat_cipher_alg) {
444         case ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2:
445                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
446                                 ICP_QAT_FW_LA_SNOW_3G_PROTO);
447                 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
448                                 header->serv_specif_flags, 0);
449                 break;
450         case ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3:
451                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
452                                 ICP_QAT_FW_LA_NO_PROTO);
453                 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
454                                 header->serv_specif_flags,
455                                 ICP_QAT_FW_LA_ZUC_3G_PROTO);
456                 break;
457         default:
458                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
459                                 ICP_QAT_FW_LA_NO_PROTO);
460                 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(
461                                 header->serv_specif_flags, 0);
462                 break;
463         }
464 }
465
466 static void
467 qat_sym_session_handle_mixed(const struct rte_cryptodev *dev,
468                 struct qat_sym_session *session)
469 {
470         const struct qat_sym_dev_private *qat_private = dev->data->dev_private;
471         enum qat_device_gen min_dev_gen = (qat_private->internal_capabilities &
472                         QAT_SYM_CAP_MIXED_CRYPTO) ? QAT_GEN2 : QAT_GEN3;
473
474         if (session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3 &&
475                         session->qat_cipher_alg !=
476                         ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
477                 session->min_qat_dev_gen = min_dev_gen;
478                 qat_sym_session_set_ext_hash_flags(session,
479                         1 << ICP_QAT_FW_AUTH_HDR_FLAG_ZUC_EIA3_BITPOS);
480         } else if (session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2 &&
481                         session->qat_cipher_alg !=
482                         ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
483                 session->min_qat_dev_gen = min_dev_gen;
484                 qat_sym_session_set_ext_hash_flags(session,
485                         1 << ICP_QAT_FW_AUTH_HDR_FLAG_SNOW3G_UIA2_BITPOS);
486         } else if ((session->aes_cmac ||
487                         session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_NULL) &&
488                         (session->qat_cipher_alg ==
489                         ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2 ||
490                         session->qat_cipher_alg ==
491                         ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)) {
492                 session->min_qat_dev_gen = min_dev_gen;
493                 qat_sym_session_set_ext_hash_flags(session, 0);
494         }
495 }
496
497 int
498 qat_sym_session_set_parameters(struct rte_cryptodev *dev,
499                 struct rte_crypto_sym_xform *xform, void *session_private)
500 {
501         struct qat_sym_session *session = session_private;
502         int ret;
503         int qat_cmd_id;
504
505         /* Set context descriptor physical address */
506         session->cd_paddr = rte_mempool_virt2iova(session) +
507                         offsetof(struct qat_sym_session, cd);
508
509         session->min_qat_dev_gen = QAT_GEN1;
510
511         /* Get requested QAT command id */
512         qat_cmd_id = qat_get_cmd_id(xform);
513         if (qat_cmd_id < 0 || qat_cmd_id >= ICP_QAT_FW_LA_CMD_DELIMITER) {
514                 QAT_LOG(ERR, "Unsupported xform chain requested");
515                 return -ENOTSUP;
516         }
517         session->qat_cmd = (enum icp_qat_fw_la_cmd_id)qat_cmd_id;
518         switch (session->qat_cmd) {
519         case ICP_QAT_FW_LA_CMD_CIPHER:
520                 ret = qat_sym_session_configure_cipher(dev, xform, session);
521                 if (ret < 0)
522                         return ret;
523                 break;
524         case ICP_QAT_FW_LA_CMD_AUTH:
525                 ret = qat_sym_session_configure_auth(dev, xform, session);
526                 if (ret < 0)
527                         return ret;
528                 break;
529         case ICP_QAT_FW_LA_CMD_CIPHER_HASH:
530                 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
531                         ret = qat_sym_session_configure_aead(dev, xform,
532                                         session);
533                         if (ret < 0)
534                                 return ret;
535                 } else {
536                         ret = qat_sym_session_configure_cipher(dev,
537                                         xform, session);
538                         if (ret < 0)
539                                 return ret;
540                         ret = qat_sym_session_configure_auth(dev,
541                                         xform, session);
542                         if (ret < 0)
543                                 return ret;
544                         /* Special handling of mixed hash+cipher algorithms */
545                         qat_sym_session_handle_mixed(dev, session);
546                 }
547                 break;
548         case ICP_QAT_FW_LA_CMD_HASH_CIPHER:
549                 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
550                         ret = qat_sym_session_configure_aead(dev, xform,
551                                         session);
552                         if (ret < 0)
553                                 return ret;
554                 } else {
555                         ret = qat_sym_session_configure_auth(dev,
556                                         xform, session);
557                         if (ret < 0)
558                                 return ret;
559                         ret = qat_sym_session_configure_cipher(dev,
560                                         xform, session);
561                         if (ret < 0)
562                                 return ret;
563                         /* Special handling of mixed hash+cipher algorithms */
564                         qat_sym_session_handle_mixed(dev, session);
565                 }
566                 break;
567         case ICP_QAT_FW_LA_CMD_TRNG_GET_RANDOM:
568         case ICP_QAT_FW_LA_CMD_TRNG_TEST:
569         case ICP_QAT_FW_LA_CMD_SSL3_KEY_DERIVE:
570         case ICP_QAT_FW_LA_CMD_TLS_V1_1_KEY_DERIVE:
571         case ICP_QAT_FW_LA_CMD_TLS_V1_2_KEY_DERIVE:
572         case ICP_QAT_FW_LA_CMD_MGF1:
573         case ICP_QAT_FW_LA_CMD_AUTH_PRE_COMP:
574         case ICP_QAT_FW_LA_CMD_CIPHER_PRE_COMP:
575         case ICP_QAT_FW_LA_CMD_DELIMITER:
576         QAT_LOG(ERR, "Unsupported Service %u",
577                 session->qat_cmd);
578                 return -ENOTSUP;
579         default:
580         QAT_LOG(ERR, "Unsupported Service %u",
581                 session->qat_cmd);
582                 return -ENOTSUP;
583         }
584
585         return 0;
586 }
587
588 static int
589 qat_sym_session_handle_single_pass(struct qat_sym_dev_private *internals,
590                 struct qat_sym_session *session,
591                 struct rte_crypto_aead_xform *aead_xform)
592 {
593         enum qat_device_gen qat_dev_gen = internals->qat_dev->qat_dev_gen;
594
595         if (qat_dev_gen == QAT_GEN3 &&
596                         aead_xform->iv.length == QAT_AES_GCM_SPC_IV_SIZE) {
597                 /* Use faster Single-Pass GCM */
598                 struct icp_qat_fw_la_cipher_req_params *cipher_param =
599                                 (void *) &session->fw_req.serv_specif_rqpars;
600
601                 session->is_single_pass = 1;
602                 session->min_qat_dev_gen = QAT_GEN3;
603                 session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER;
604                 session->qat_mode = ICP_QAT_HW_CIPHER_AEAD_MODE;
605                 session->cipher_iv.offset = aead_xform->iv.offset;
606                 session->cipher_iv.length = aead_xform->iv.length;
607                 if (qat_sym_session_aead_create_cd_cipher(session,
608                                 aead_xform->key.data, aead_xform->key.length))
609                         return -EINVAL;
610                 session->aad_len = aead_xform->aad_length;
611                 session->digest_length = aead_xform->digest_length;
612                 if (aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) {
613                         session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
614                         session->auth_op = ICP_QAT_HW_AUTH_GENERATE;
615                         ICP_QAT_FW_LA_RET_AUTH_SET(
616                                 session->fw_req.comn_hdr.serv_specif_flags,
617                                 ICP_QAT_FW_LA_RET_AUTH_RES);
618                 } else {
619                         session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
620                         session->auth_op = ICP_QAT_HW_AUTH_VERIFY;
621                         ICP_QAT_FW_LA_CMP_AUTH_SET(
622                                 session->fw_req.comn_hdr.serv_specif_flags,
623                                 ICP_QAT_FW_LA_CMP_AUTH_RES);
624                 }
625                 ICP_QAT_FW_LA_SINGLE_PASS_PROTO_FLAG_SET(
626                                 session->fw_req.comn_hdr.serv_specif_flags,
627                                 ICP_QAT_FW_LA_SINGLE_PASS_PROTO);
628                 ICP_QAT_FW_LA_PROTO_SET(
629                                 session->fw_req.comn_hdr.serv_specif_flags,
630                                 ICP_QAT_FW_LA_NO_PROTO);
631                 ICP_QAT_FW_LA_GCM_IV_LEN_FLAG_SET(
632                                 session->fw_req.comn_hdr.serv_specif_flags,
633                                 ICP_QAT_FW_LA_GCM_IV_LEN_12_OCTETS);
634                 session->fw_req.comn_hdr.service_cmd_id =
635                                 ICP_QAT_FW_LA_CMD_CIPHER;
636                 session->cd.cipher.cipher_config.val =
637                                 ICP_QAT_HW_CIPHER_CONFIG_BUILD(
638                                         ICP_QAT_HW_CIPHER_AEAD_MODE,
639                                         session->qat_cipher_alg,
640                                         ICP_QAT_HW_CIPHER_NO_CONVERT,
641                                         session->qat_dir);
642                 QAT_FIELD_SET(session->cd.cipher.cipher_config.val,
643                                 aead_xform->digest_length,
644                                 QAT_CIPHER_AEAD_HASH_CMP_LEN_BITPOS,
645                                 QAT_CIPHER_AEAD_HASH_CMP_LEN_MASK);
646                 session->cd.cipher.cipher_config.reserved =
647                                 ICP_QAT_HW_CIPHER_CONFIG_BUILD_UPPER(
648                                         aead_xform->aad_length);
649                 cipher_param->spc_aad_sz = aead_xform->aad_length;
650                 cipher_param->spc_auth_res_sz = aead_xform->digest_length;
651         }
652         return 0;
653 }
654
655 int
656 qat_sym_session_configure_auth(struct rte_cryptodev *dev,
657                                 struct rte_crypto_sym_xform *xform,
658                                 struct qat_sym_session *session)
659 {
660         struct rte_crypto_auth_xform *auth_xform = qat_get_auth_xform(xform);
661         struct qat_sym_dev_private *internals = dev->data->dev_private;
662         const uint8_t *key_data = auth_xform->key.data;
663         uint8_t key_length = auth_xform->key.length;
664         session->aes_cmac = 0;
665
666         session->auth_iv.offset = auth_xform->iv.offset;
667         session->auth_iv.length = auth_xform->iv.length;
668
669         switch (auth_xform->algo) {
670         case RTE_CRYPTO_AUTH_SHA1_HMAC:
671                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA1;
672                 break;
673         case RTE_CRYPTO_AUTH_SHA224_HMAC:
674                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA224;
675                 break;
676         case RTE_CRYPTO_AUTH_SHA256_HMAC:
677                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA256;
678                 break;
679         case RTE_CRYPTO_AUTH_SHA384_HMAC:
680                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA384;
681                 break;
682         case RTE_CRYPTO_AUTH_SHA512_HMAC:
683                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA512;
684                 break;
685         case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
686                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
687                 break;
688         case RTE_CRYPTO_AUTH_AES_CMAC:
689                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
690                 session->aes_cmac = 1;
691                 break;
692         case RTE_CRYPTO_AUTH_AES_GMAC:
693                 if (qat_sym_validate_aes_key(auth_xform->key.length,
694                                 &session->qat_cipher_alg) != 0) {
695                         QAT_LOG(ERR, "Invalid AES key size");
696                         return -EINVAL;
697                 }
698                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
699                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
700                 if (session->auth_iv.length == 0)
701                         session->auth_iv.length = AES_GCM_J0_LEN;
702
703                 break;
704         case RTE_CRYPTO_AUTH_SNOW3G_UIA2:
705                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2;
706                 break;
707         case RTE_CRYPTO_AUTH_MD5_HMAC:
708                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_MD5;
709                 break;
710         case RTE_CRYPTO_AUTH_NULL:
711                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_NULL;
712                 break;
713         case RTE_CRYPTO_AUTH_KASUMI_F9:
714                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_KASUMI_F9;
715                 break;
716         case RTE_CRYPTO_AUTH_ZUC_EIA3:
717                 if (!qat_is_auth_alg_supported(auth_xform->algo, internals)) {
718                         QAT_LOG(ERR, "%s not supported on this device",
719                                 rte_crypto_auth_algorithm_strings
720                                 [auth_xform->algo]);
721                         return -ENOTSUP;
722                 }
723                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3;
724                 break;
725         case RTE_CRYPTO_AUTH_SHA1:
726         case RTE_CRYPTO_AUTH_SHA256:
727         case RTE_CRYPTO_AUTH_SHA512:
728         case RTE_CRYPTO_AUTH_SHA224:
729         case RTE_CRYPTO_AUTH_SHA384:
730         case RTE_CRYPTO_AUTH_MD5:
731         case RTE_CRYPTO_AUTH_AES_CBC_MAC:
732                 QAT_LOG(ERR, "Crypto: Unsupported hash alg %u",
733                                 auth_xform->algo);
734                 return -ENOTSUP;
735         default:
736                 QAT_LOG(ERR, "Crypto: Undefined Hash algo %u specified",
737                                 auth_xform->algo);
738                 return -EINVAL;
739         }
740
741         if (auth_xform->algo == RTE_CRYPTO_AUTH_AES_GMAC) {
742                 if (auth_xform->op == RTE_CRYPTO_AUTH_OP_GENERATE) {
743                         session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER_HASH;
744                         session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
745                         /*
746                          * It needs to create cipher desc content first,
747                          * then authentication
748                          */
749
750                         if (qat_sym_session_aead_create_cd_cipher(session,
751                                                 auth_xform->key.data,
752                                                 auth_xform->key.length))
753                                 return -EINVAL;
754
755                         if (qat_sym_session_aead_create_cd_auth(session,
756                                                 key_data,
757                                                 key_length,
758                                                 0,
759                                                 auth_xform->digest_length,
760                                                 auth_xform->op))
761                                 return -EINVAL;
762                 } else {
763                         session->qat_cmd = ICP_QAT_FW_LA_CMD_HASH_CIPHER;
764                         session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
765                         /*
766                          * It needs to create authentication desc content first,
767                          * then cipher
768                          */
769
770                         if (qat_sym_session_aead_create_cd_auth(session,
771                                         key_data,
772                                         key_length,
773                                         0,
774                                         auth_xform->digest_length,
775                                         auth_xform->op))
776                                 return -EINVAL;
777
778                         if (qat_sym_session_aead_create_cd_cipher(session,
779                                                 auth_xform->key.data,
780                                                 auth_xform->key.length))
781                                 return -EINVAL;
782                 }
783                 /* Restore to authentication only only */
784                 session->qat_cmd = ICP_QAT_FW_LA_CMD_AUTH;
785         } else {
786                 if (qat_sym_session_aead_create_cd_auth(session,
787                                 key_data,
788                                 key_length,
789                                 0,
790                                 auth_xform->digest_length,
791                                 auth_xform->op))
792                         return -EINVAL;
793         }
794
795         session->digest_length = auth_xform->digest_length;
796         return 0;
797 }
798
799 int
800 qat_sym_session_configure_aead(struct rte_cryptodev *dev,
801                                 struct rte_crypto_sym_xform *xform,
802                                 struct qat_sym_session *session)
803 {
804         struct rte_crypto_aead_xform *aead_xform = &xform->aead;
805         enum rte_crypto_auth_operation crypto_operation;
806
807         /*
808          * Store AEAD IV parameters as cipher IV,
809          * to avoid unnecessary memory usage
810          */
811         session->cipher_iv.offset = xform->aead.iv.offset;
812         session->cipher_iv.length = xform->aead.iv.length;
813
814         switch (aead_xform->algo) {
815         case RTE_CRYPTO_AEAD_AES_GCM:
816                 if (qat_sym_validate_aes_key(aead_xform->key.length,
817                                 &session->qat_cipher_alg) != 0) {
818                         QAT_LOG(ERR, "Invalid AES key size");
819                         return -EINVAL;
820                 }
821                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
822                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
823                 if (session->cipher_iv.length == 0)
824                         session->cipher_iv.length = AES_GCM_J0_LEN;
825
826                 break;
827         case RTE_CRYPTO_AEAD_AES_CCM:
828                 if (qat_sym_validate_aes_key(aead_xform->key.length,
829                                 &session->qat_cipher_alg) != 0) {
830                         QAT_LOG(ERR, "Invalid AES key size");
831                         return -EINVAL;
832                 }
833                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
834                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC;
835                 break;
836         default:
837                 QAT_LOG(ERR, "Crypto: Undefined AEAD specified %u\n",
838                                 aead_xform->algo);
839                 return -EINVAL;
840         }
841
842         session->is_single_pass = 0;
843         if (aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) {
844                 /* Use faster Single-Pass GCM if possible */
845                 int res = qat_sym_session_handle_single_pass(
846                                 dev->data->dev_private, session, aead_xform);
847                 if (res < 0)
848                         return res;
849                 if (session->is_single_pass)
850                         return 0;
851         }
852
853         if ((aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT &&
854                         aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) ||
855                         (aead_xform->op == RTE_CRYPTO_AEAD_OP_DECRYPT &&
856                         aead_xform->algo == RTE_CRYPTO_AEAD_AES_CCM)) {
857                 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
858                 /*
859                  * It needs to create cipher desc content first,
860                  * then authentication
861                  */
862                 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
863                         RTE_CRYPTO_AUTH_OP_GENERATE : RTE_CRYPTO_AUTH_OP_VERIFY;
864
865                 if (qat_sym_session_aead_create_cd_cipher(session,
866                                         aead_xform->key.data,
867                                         aead_xform->key.length))
868                         return -EINVAL;
869
870                 if (qat_sym_session_aead_create_cd_auth(session,
871                                         aead_xform->key.data,
872                                         aead_xform->key.length,
873                                         aead_xform->aad_length,
874                                         aead_xform->digest_length,
875                                         crypto_operation))
876                         return -EINVAL;
877         } else {
878                 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
879                 /*
880                  * It needs to create authentication desc content first,
881                  * then cipher
882                  */
883
884                 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
885                         RTE_CRYPTO_AUTH_OP_VERIFY : RTE_CRYPTO_AUTH_OP_GENERATE;
886
887                 if (qat_sym_session_aead_create_cd_auth(session,
888                                         aead_xform->key.data,
889                                         aead_xform->key.length,
890                                         aead_xform->aad_length,
891                                         aead_xform->digest_length,
892                                         crypto_operation))
893                         return -EINVAL;
894
895                 if (qat_sym_session_aead_create_cd_cipher(session,
896                                         aead_xform->key.data,
897                                         aead_xform->key.length))
898                         return -EINVAL;
899         }
900
901         session->digest_length = aead_xform->digest_length;
902         return 0;
903 }
904
905 unsigned int qat_sym_session_get_private_size(
906                 struct rte_cryptodev *dev __rte_unused)
907 {
908         return RTE_ALIGN_CEIL(sizeof(struct qat_sym_session), 8);
909 }
910
911 /* returns block size in bytes per cipher algo */
912 int qat_cipher_get_block_size(enum icp_qat_hw_cipher_algo qat_cipher_alg)
913 {
914         switch (qat_cipher_alg) {
915         case ICP_QAT_HW_CIPHER_ALGO_DES:
916                 return ICP_QAT_HW_DES_BLK_SZ;
917         case ICP_QAT_HW_CIPHER_ALGO_3DES:
918                 return ICP_QAT_HW_3DES_BLK_SZ;
919         case ICP_QAT_HW_CIPHER_ALGO_AES128:
920         case ICP_QAT_HW_CIPHER_ALGO_AES192:
921         case ICP_QAT_HW_CIPHER_ALGO_AES256:
922                 return ICP_QAT_HW_AES_BLK_SZ;
923         default:
924                 QAT_LOG(ERR, "invalid block cipher alg %u", qat_cipher_alg);
925                 return -EFAULT;
926         };
927         return -EFAULT;
928 }
929
930 /*
931  * Returns size in bytes per hash algo for state1 size field in cd_ctrl
932  * This is digest size rounded up to nearest quadword
933  */
934 static int qat_hash_get_state1_size(enum icp_qat_hw_auth_algo qat_hash_alg)
935 {
936         switch (qat_hash_alg) {
937         case ICP_QAT_HW_AUTH_ALGO_SHA1:
938                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA1_STATE1_SZ,
939                                                 QAT_HW_DEFAULT_ALIGNMENT);
940         case ICP_QAT_HW_AUTH_ALGO_SHA224:
941                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA224_STATE1_SZ,
942                                                 QAT_HW_DEFAULT_ALIGNMENT);
943         case ICP_QAT_HW_AUTH_ALGO_SHA256:
944                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA256_STATE1_SZ,
945                                                 QAT_HW_DEFAULT_ALIGNMENT);
946         case ICP_QAT_HW_AUTH_ALGO_SHA384:
947                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA384_STATE1_SZ,
948                                                 QAT_HW_DEFAULT_ALIGNMENT);
949         case ICP_QAT_HW_AUTH_ALGO_SHA512:
950                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
951                                                 QAT_HW_DEFAULT_ALIGNMENT);
952         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
953                 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ,
954                                                 QAT_HW_DEFAULT_ALIGNMENT);
955         case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
956         case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
957                 return QAT_HW_ROUND_UP(ICP_QAT_HW_GALOIS_128_STATE1_SZ,
958                                                 QAT_HW_DEFAULT_ALIGNMENT);
959         case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
960                 return QAT_HW_ROUND_UP(ICP_QAT_HW_ZUC_3G_EIA3_STATE1_SZ,
961                                                 QAT_HW_DEFAULT_ALIGNMENT);
962         case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
963                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SNOW_3G_UIA2_STATE1_SZ,
964                                                 QAT_HW_DEFAULT_ALIGNMENT);
965         case ICP_QAT_HW_AUTH_ALGO_MD5:
966                 return QAT_HW_ROUND_UP(ICP_QAT_HW_MD5_STATE1_SZ,
967                                                 QAT_HW_DEFAULT_ALIGNMENT);
968         case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
969                 return QAT_HW_ROUND_UP(ICP_QAT_HW_KASUMI_F9_STATE1_SZ,
970                                                 QAT_HW_DEFAULT_ALIGNMENT);
971         case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
972                 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_CBC_MAC_STATE1_SZ,
973                                                 QAT_HW_DEFAULT_ALIGNMENT);
974         case ICP_QAT_HW_AUTH_ALGO_NULL:
975                 return QAT_HW_ROUND_UP(ICP_QAT_HW_NULL_STATE1_SZ,
976                                                 QAT_HW_DEFAULT_ALIGNMENT);
977         case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
978                 /* return maximum state1 size in this case */
979                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
980                                                 QAT_HW_DEFAULT_ALIGNMENT);
981         default:
982                 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
983                 return -EFAULT;
984         };
985         return -EFAULT;
986 }
987
988 /* returns digest size in bytes  per hash algo */
989 static int qat_hash_get_digest_size(enum icp_qat_hw_auth_algo qat_hash_alg)
990 {
991         switch (qat_hash_alg) {
992         case ICP_QAT_HW_AUTH_ALGO_SHA1:
993                 return ICP_QAT_HW_SHA1_STATE1_SZ;
994         case ICP_QAT_HW_AUTH_ALGO_SHA224:
995                 return ICP_QAT_HW_SHA224_STATE1_SZ;
996         case ICP_QAT_HW_AUTH_ALGO_SHA256:
997                 return ICP_QAT_HW_SHA256_STATE1_SZ;
998         case ICP_QAT_HW_AUTH_ALGO_SHA384:
999                 return ICP_QAT_HW_SHA384_STATE1_SZ;
1000         case ICP_QAT_HW_AUTH_ALGO_SHA512:
1001                 return ICP_QAT_HW_SHA512_STATE1_SZ;
1002         case ICP_QAT_HW_AUTH_ALGO_MD5:
1003                 return ICP_QAT_HW_MD5_STATE1_SZ;
1004         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1005                 return ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1006         case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
1007                 /* return maximum digest size in this case */
1008                 return ICP_QAT_HW_SHA512_STATE1_SZ;
1009         default:
1010                 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
1011                 return -EFAULT;
1012         };
1013         return -EFAULT;
1014 }
1015
1016 /* returns block size in byes per hash algo */
1017 static int qat_hash_get_block_size(enum icp_qat_hw_auth_algo qat_hash_alg)
1018 {
1019         switch (qat_hash_alg) {
1020         case ICP_QAT_HW_AUTH_ALGO_SHA1:
1021                 return SHA_CBLOCK;
1022         case ICP_QAT_HW_AUTH_ALGO_SHA224:
1023                 return SHA256_CBLOCK;
1024         case ICP_QAT_HW_AUTH_ALGO_SHA256:
1025                 return SHA256_CBLOCK;
1026         case ICP_QAT_HW_AUTH_ALGO_SHA384:
1027                 return SHA512_CBLOCK;
1028         case ICP_QAT_HW_AUTH_ALGO_SHA512:
1029                 return SHA512_CBLOCK;
1030         case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1031                 return 16;
1032         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1033                 return ICP_QAT_HW_AES_BLK_SZ;
1034         case ICP_QAT_HW_AUTH_ALGO_MD5:
1035                 return MD5_CBLOCK;
1036         case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
1037                 /* return maximum block size in this case */
1038                 return SHA512_CBLOCK;
1039         default:
1040                 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
1041                 return -EFAULT;
1042         };
1043         return -EFAULT;
1044 }
1045
1046 static int partial_hash_sha1(uint8_t *data_in, uint8_t *data_out)
1047 {
1048         SHA_CTX ctx;
1049
1050         if (!SHA1_Init(&ctx))
1051                 return -EFAULT;
1052         SHA1_Transform(&ctx, data_in);
1053         rte_memcpy(data_out, &ctx, SHA_DIGEST_LENGTH);
1054         return 0;
1055 }
1056
1057 static int partial_hash_sha224(uint8_t *data_in, uint8_t *data_out)
1058 {
1059         SHA256_CTX ctx;
1060
1061         if (!SHA224_Init(&ctx))
1062                 return -EFAULT;
1063         SHA256_Transform(&ctx, data_in);
1064         rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
1065         return 0;
1066 }
1067
1068 static int partial_hash_sha256(uint8_t *data_in, uint8_t *data_out)
1069 {
1070         SHA256_CTX ctx;
1071
1072         if (!SHA256_Init(&ctx))
1073                 return -EFAULT;
1074         SHA256_Transform(&ctx, data_in);
1075         rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
1076         return 0;
1077 }
1078
1079 static int partial_hash_sha384(uint8_t *data_in, uint8_t *data_out)
1080 {
1081         SHA512_CTX ctx;
1082
1083         if (!SHA384_Init(&ctx))
1084                 return -EFAULT;
1085         SHA512_Transform(&ctx, data_in);
1086         rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
1087         return 0;
1088 }
1089
1090 static int partial_hash_sha512(uint8_t *data_in, uint8_t *data_out)
1091 {
1092         SHA512_CTX ctx;
1093
1094         if (!SHA512_Init(&ctx))
1095                 return -EFAULT;
1096         SHA512_Transform(&ctx, data_in);
1097         rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
1098         return 0;
1099 }
1100
1101 static int partial_hash_md5(uint8_t *data_in, uint8_t *data_out)
1102 {
1103         MD5_CTX ctx;
1104
1105         if (!MD5_Init(&ctx))
1106                 return -EFAULT;
1107         MD5_Transform(&ctx, data_in);
1108         rte_memcpy(data_out, &ctx, MD5_DIGEST_LENGTH);
1109
1110         return 0;
1111 }
1112
1113 static int partial_hash_compute(enum icp_qat_hw_auth_algo hash_alg,
1114                         uint8_t *data_in,
1115                         uint8_t *data_out)
1116 {
1117         int digest_size;
1118         uint8_t digest[qat_hash_get_digest_size(
1119                         ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1120         uint32_t *hash_state_out_be32;
1121         uint64_t *hash_state_out_be64;
1122         int i;
1123
1124         digest_size = qat_hash_get_digest_size(hash_alg);
1125         if (digest_size <= 0)
1126                 return -EFAULT;
1127
1128         hash_state_out_be32 = (uint32_t *)data_out;
1129         hash_state_out_be64 = (uint64_t *)data_out;
1130
1131         switch (hash_alg) {
1132         case ICP_QAT_HW_AUTH_ALGO_SHA1:
1133                 if (partial_hash_sha1(data_in, digest))
1134                         return -EFAULT;
1135                 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1136                         *hash_state_out_be32 =
1137                                 rte_bswap32(*(((uint32_t *)digest)+i));
1138                 break;
1139         case ICP_QAT_HW_AUTH_ALGO_SHA224:
1140                 if (partial_hash_sha224(data_in, digest))
1141                         return -EFAULT;
1142                 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1143                         *hash_state_out_be32 =
1144                                 rte_bswap32(*(((uint32_t *)digest)+i));
1145                 break;
1146         case ICP_QAT_HW_AUTH_ALGO_SHA256:
1147                 if (partial_hash_sha256(data_in, digest))
1148                         return -EFAULT;
1149                 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1150                         *hash_state_out_be32 =
1151                                 rte_bswap32(*(((uint32_t *)digest)+i));
1152                 break;
1153         case ICP_QAT_HW_AUTH_ALGO_SHA384:
1154                 if (partial_hash_sha384(data_in, digest))
1155                         return -EFAULT;
1156                 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
1157                         *hash_state_out_be64 =
1158                                 rte_bswap64(*(((uint64_t *)digest)+i));
1159                 break;
1160         case ICP_QAT_HW_AUTH_ALGO_SHA512:
1161                 if (partial_hash_sha512(data_in, digest))
1162                         return -EFAULT;
1163                 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
1164                         *hash_state_out_be64 =
1165                                 rte_bswap64(*(((uint64_t *)digest)+i));
1166                 break;
1167         case ICP_QAT_HW_AUTH_ALGO_MD5:
1168                 if (partial_hash_md5(data_in, data_out))
1169                         return -EFAULT;
1170                 break;
1171         default:
1172                 QAT_LOG(ERR, "invalid hash alg %u", hash_alg);
1173                 return -EFAULT;
1174         }
1175
1176         return 0;
1177 }
1178 #define HMAC_IPAD_VALUE 0x36
1179 #define HMAC_OPAD_VALUE 0x5c
1180 #define HASH_XCBC_PRECOMP_KEY_NUM 3
1181
1182 static const uint8_t AES_CMAC_SEED[ICP_QAT_HW_AES_128_KEY_SZ];
1183
1184 static void aes_cmac_key_derive(uint8_t *base, uint8_t *derived)
1185 {
1186         int i;
1187
1188         derived[0] = base[0] << 1;
1189         for (i = 1; i < ICP_QAT_HW_AES_BLK_SZ ; i++) {
1190                 derived[i] = base[i] << 1;
1191                 derived[i - 1] |= base[i] >> 7;
1192         }
1193
1194         if (base[0] & 0x80)
1195                 derived[ICP_QAT_HW_AES_BLK_SZ - 1] ^= QAT_AES_CMAC_CONST_RB;
1196 }
1197
1198 static int qat_sym_do_precomputes(enum icp_qat_hw_auth_algo hash_alg,
1199                                 const uint8_t *auth_key,
1200                                 uint16_t auth_keylen,
1201                                 uint8_t *p_state_buf,
1202                                 uint16_t *p_state_len,
1203                                 uint8_t aes_cmac)
1204 {
1205         int block_size;
1206         uint8_t ipad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1207         uint8_t opad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1208         int i;
1209
1210         if (hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC) {
1211
1212                 /* CMAC */
1213                 if (aes_cmac) {
1214                         AES_KEY enc_key;
1215                         uint8_t *in = NULL;
1216                         uint8_t k0[ICP_QAT_HW_AES_128_KEY_SZ];
1217                         uint8_t *k1, *k2;
1218
1219                         auth_keylen = ICP_QAT_HW_AES_128_KEY_SZ;
1220
1221                         in = rte_zmalloc("AES CMAC K1",
1222                                          ICP_QAT_HW_AES_128_KEY_SZ, 16);
1223
1224                         if (in == NULL) {
1225                                 QAT_LOG(ERR, "Failed to alloc memory");
1226                                 return -ENOMEM;
1227                         }
1228
1229                         rte_memcpy(in, AES_CMAC_SEED,
1230                                    ICP_QAT_HW_AES_128_KEY_SZ);
1231                         rte_memcpy(p_state_buf, auth_key, auth_keylen);
1232
1233                         if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1234                                 &enc_key) != 0) {
1235                                 rte_free(in);
1236                                 return -EFAULT;
1237                         }
1238
1239                         AES_encrypt(in, k0, &enc_key);
1240
1241                         k1 = p_state_buf + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1242                         k2 = k1 + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1243
1244                         aes_cmac_key_derive(k0, k1);
1245                         aes_cmac_key_derive(k1, k2);
1246
1247                         memset(k0, 0, ICP_QAT_HW_AES_128_KEY_SZ);
1248                         *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1249                         rte_free(in);
1250                         return 0;
1251                 } else {
1252                         static uint8_t qat_aes_xcbc_key_seed[
1253                                         ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ] = {
1254                                 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1255                                 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1256                                 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1257                                 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1258                                 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1259                                 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1260                         };
1261
1262                         uint8_t *in = NULL;
1263                         uint8_t *out = p_state_buf;
1264                         int x;
1265                         AES_KEY enc_key;
1266
1267                         in = rte_zmalloc("working mem for key",
1268                                         ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ, 16);
1269                         if (in == NULL) {
1270                                 QAT_LOG(ERR, "Failed to alloc memory");
1271                                 return -ENOMEM;
1272                         }
1273
1274                         rte_memcpy(in, qat_aes_xcbc_key_seed,
1275                                         ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1276                         for (x = 0; x < HASH_XCBC_PRECOMP_KEY_NUM; x++) {
1277                                 if (AES_set_encrypt_key(auth_key,
1278                                                         auth_keylen << 3,
1279                                                         &enc_key) != 0) {
1280                                         rte_free(in -
1281                                           (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ));
1282                                         memset(out -
1283                                            (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ),
1284                                           0, ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1285                                         return -EFAULT;
1286                                 }
1287                                 AES_encrypt(in, out, &enc_key);
1288                                 in += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1289                                 out += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1290                         }
1291                         *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1292                         rte_free(in - x*ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ);
1293                         return 0;
1294                 }
1295
1296         } else if ((hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_128) ||
1297                 (hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_64)) {
1298                 uint8_t *in = NULL;
1299                 uint8_t *out = p_state_buf;
1300                 AES_KEY enc_key;
1301
1302                 memset(p_state_buf, 0, ICP_QAT_HW_GALOIS_H_SZ +
1303                                 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1304                                 ICP_QAT_HW_GALOIS_E_CTR0_SZ);
1305                 in = rte_zmalloc("working mem for key",
1306                                 ICP_QAT_HW_GALOIS_H_SZ, 16);
1307                 if (in == NULL) {
1308                         QAT_LOG(ERR, "Failed to alloc memory");
1309                         return -ENOMEM;
1310                 }
1311
1312                 memset(in, 0, ICP_QAT_HW_GALOIS_H_SZ);
1313                 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1314                         &enc_key) != 0) {
1315                         return -EFAULT;
1316                 }
1317                 AES_encrypt(in, out, &enc_key);
1318                 *p_state_len = ICP_QAT_HW_GALOIS_H_SZ +
1319                                 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1320                                 ICP_QAT_HW_GALOIS_E_CTR0_SZ;
1321                 rte_free(in);
1322                 return 0;
1323         }
1324
1325         block_size = qat_hash_get_block_size(hash_alg);
1326         if (block_size < 0)
1327                 return block_size;
1328         /* init ipad and opad from key and xor with fixed values */
1329         memset(ipad, 0, block_size);
1330         memset(opad, 0, block_size);
1331
1332         if (auth_keylen > (unsigned int)block_size) {
1333                 QAT_LOG(ERR, "invalid keylen %u", auth_keylen);
1334                 return -EFAULT;
1335         }
1336         rte_memcpy(ipad, auth_key, auth_keylen);
1337         rte_memcpy(opad, auth_key, auth_keylen);
1338
1339         for (i = 0; i < block_size; i++) {
1340                 uint8_t *ipad_ptr = ipad + i;
1341                 uint8_t *opad_ptr = opad + i;
1342                 *ipad_ptr ^= HMAC_IPAD_VALUE;
1343                 *opad_ptr ^= HMAC_OPAD_VALUE;
1344         }
1345
1346         /* do partial hash of ipad and copy to state1 */
1347         if (partial_hash_compute(hash_alg, ipad, p_state_buf)) {
1348                 memset(ipad, 0, block_size);
1349                 memset(opad, 0, block_size);
1350                 QAT_LOG(ERR, "ipad precompute failed");
1351                 return -EFAULT;
1352         }
1353
1354         /*
1355          * State len is a multiple of 8, so may be larger than the digest.
1356          * Put the partial hash of opad state_len bytes after state1
1357          */
1358         *p_state_len = qat_hash_get_state1_size(hash_alg);
1359         if (partial_hash_compute(hash_alg, opad, p_state_buf + *p_state_len)) {
1360                 memset(ipad, 0, block_size);
1361                 memset(opad, 0, block_size);
1362                 QAT_LOG(ERR, "opad precompute failed");
1363                 return -EFAULT;
1364         }
1365
1366         /*  don't leave data lying around */
1367         memset(ipad, 0, block_size);
1368         memset(opad, 0, block_size);
1369         return 0;
1370 }
1371
1372 static void
1373 qat_sym_session_init_common_hdr(struct icp_qat_fw_comn_req_hdr *header,
1374                 enum qat_sym_proto_flag proto_flags)
1375 {
1376         header->hdr_flags =
1377                 ICP_QAT_FW_COMN_HDR_FLAGS_BUILD(ICP_QAT_FW_COMN_REQ_FLAG_SET);
1378         header->service_type = ICP_QAT_FW_COMN_REQ_CPM_FW_LA;
1379         header->comn_req_flags =
1380                 ICP_QAT_FW_COMN_FLAGS_BUILD(QAT_COMN_CD_FLD_TYPE_64BIT_ADR,
1381                                         QAT_COMN_PTR_TYPE_FLAT);
1382         ICP_QAT_FW_LA_PARTIAL_SET(header->serv_specif_flags,
1383                                   ICP_QAT_FW_LA_PARTIAL_NONE);
1384         ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(header->serv_specif_flags,
1385                                            ICP_QAT_FW_CIPH_IV_16BYTE_DATA);
1386
1387         switch (proto_flags)            {
1388         case QAT_CRYPTO_PROTO_FLAG_NONE:
1389                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1390                                         ICP_QAT_FW_LA_NO_PROTO);
1391                 break;
1392         case QAT_CRYPTO_PROTO_FLAG_CCM:
1393                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1394                                         ICP_QAT_FW_LA_CCM_PROTO);
1395                 break;
1396         case QAT_CRYPTO_PROTO_FLAG_GCM:
1397                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1398                                         ICP_QAT_FW_LA_GCM_PROTO);
1399                 break;
1400         case QAT_CRYPTO_PROTO_FLAG_SNOW3G:
1401                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1402                                         ICP_QAT_FW_LA_SNOW_3G_PROTO);
1403                 break;
1404         case QAT_CRYPTO_PROTO_FLAG_ZUC:
1405                 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(header->serv_specif_flags,
1406                         ICP_QAT_FW_LA_ZUC_3G_PROTO);
1407                 break;
1408         }
1409
1410         ICP_QAT_FW_LA_UPDATE_STATE_SET(header->serv_specif_flags,
1411                                            ICP_QAT_FW_LA_NO_UPDATE_STATE);
1412         ICP_QAT_FW_LA_DIGEST_IN_BUFFER_SET(header->serv_specif_flags,
1413                                         ICP_QAT_FW_LA_NO_DIGEST_IN_BUFFER);
1414 }
1415
1416 /*
1417  *      Snow3G and ZUC should never use this function
1418  *      and set its protocol flag in both cipher and auth part of content
1419  *      descriptor building function
1420  */
1421 static enum qat_sym_proto_flag
1422 qat_get_crypto_proto_flag(uint16_t flags)
1423 {
1424         int proto = ICP_QAT_FW_LA_PROTO_GET(flags);
1425         enum qat_sym_proto_flag qat_proto_flag =
1426                         QAT_CRYPTO_PROTO_FLAG_NONE;
1427
1428         switch (proto) {
1429         case ICP_QAT_FW_LA_GCM_PROTO:
1430                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1431                 break;
1432         case ICP_QAT_FW_LA_CCM_PROTO:
1433                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1434                 break;
1435         }
1436
1437         return qat_proto_flag;
1438 }
1439
1440 int qat_sym_session_aead_create_cd_cipher(struct qat_sym_session *cdesc,
1441                                                 const uint8_t *cipherkey,
1442                                                 uint32_t cipherkeylen)
1443 {
1444         struct icp_qat_hw_cipher_algo_blk *cipher;
1445         struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1446         struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1447         struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1448         void *ptr = &req_tmpl->cd_ctrl;
1449         struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1450         struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1451         enum icp_qat_hw_cipher_convert key_convert;
1452         enum qat_sym_proto_flag qat_proto_flag =
1453                 QAT_CRYPTO_PROTO_FLAG_NONE;
1454         uint32_t total_key_size;
1455         uint16_t cipher_offset, cd_size;
1456         uint32_t wordIndex  = 0;
1457         uint32_t *temp_key = NULL;
1458
1459         if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER) {
1460                 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1461                 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1462                                         ICP_QAT_FW_SLICE_CIPHER);
1463                 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1464                                         ICP_QAT_FW_SLICE_DRAM_WR);
1465                 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1466                                         ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1467                 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1468                                         ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1469                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1470         } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1471                 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1472                 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1473                                         ICP_QAT_FW_SLICE_CIPHER);
1474                 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1475                                         ICP_QAT_FW_SLICE_AUTH);
1476                 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1477                                         ICP_QAT_FW_SLICE_AUTH);
1478                 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1479                                         ICP_QAT_FW_SLICE_DRAM_WR);
1480                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1481         } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1482                 QAT_LOG(ERR, "Invalid param, must be a cipher command.");
1483                 return -EFAULT;
1484         }
1485
1486         if (cdesc->qat_mode == ICP_QAT_HW_CIPHER_CTR_MODE) {
1487                 /*
1488                  * CTR Streaming ciphers are a special case. Decrypt = encrypt
1489                  * Overriding default values previously set
1490                  */
1491                 cdesc->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
1492                 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1493         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2
1494                 || cdesc->qat_cipher_alg ==
1495                         ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)
1496                 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1497         else if (cdesc->qat_dir == ICP_QAT_HW_CIPHER_ENCRYPT)
1498                 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1499         else
1500                 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1501
1502         if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
1503                 total_key_size = ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ +
1504                         ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1505                 cipher_cd_ctrl->cipher_state_sz =
1506                         ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1507                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1508
1509         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1510                 total_key_size = ICP_QAT_HW_KASUMI_F8_KEY_SZ;
1511                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_KASUMI_BLK_SZ >> 3;
1512                 cipher_cd_ctrl->cipher_padding_sz =
1513                                         (2 * ICP_QAT_HW_KASUMI_BLK_SZ) >> 3;
1514         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES) {
1515                 total_key_size = ICP_QAT_HW_3DES_KEY_SZ;
1516                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_3DES_BLK_SZ >> 3;
1517                 qat_proto_flag =
1518                         qat_get_crypto_proto_flag(header->serv_specif_flags);
1519         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_DES) {
1520                 total_key_size = ICP_QAT_HW_DES_KEY_SZ;
1521                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_DES_BLK_SZ >> 3;
1522                 qat_proto_flag =
1523                         qat_get_crypto_proto_flag(header->serv_specif_flags);
1524         } else if (cdesc->qat_cipher_alg ==
1525                 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
1526                 total_key_size = ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ +
1527                         ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1528                 cipher_cd_ctrl->cipher_state_sz =
1529                         ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1530                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1531                 cdesc->min_qat_dev_gen = QAT_GEN2;
1532         } else {
1533                 total_key_size = cipherkeylen;
1534                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_AES_BLK_SZ >> 3;
1535                 qat_proto_flag =
1536                         qat_get_crypto_proto_flag(header->serv_specif_flags);
1537         }
1538         cipher_cd_ctrl->cipher_key_sz = total_key_size >> 3;
1539         cipher_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1540         cipher_cd_ctrl->cipher_cfg_offset = cipher_offset >> 3;
1541
1542         header->service_cmd_id = cdesc->qat_cmd;
1543         qat_sym_session_init_common_hdr(header, qat_proto_flag);
1544
1545         cipher = (struct icp_qat_hw_cipher_algo_blk *)cdesc->cd_cur_ptr;
1546         cipher->cipher_config.val =
1547             ICP_QAT_HW_CIPHER_CONFIG_BUILD(cdesc->qat_mode,
1548                                         cdesc->qat_cipher_alg, key_convert,
1549                                         cdesc->qat_dir);
1550
1551         if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1552                 temp_key = (uint32_t *)(cdesc->cd_cur_ptr +
1553                                         sizeof(struct icp_qat_hw_cipher_config)
1554                                         + cipherkeylen);
1555                 memcpy(cipher->key, cipherkey, cipherkeylen);
1556                 memcpy(temp_key, cipherkey, cipherkeylen);
1557
1558                 /* XOR Key with KASUMI F8 key modifier at 4 bytes level */
1559                 for (wordIndex = 0; wordIndex < (cipherkeylen >> 2);
1560                                                                 wordIndex++)
1561                         temp_key[wordIndex] ^= KASUMI_F8_KEY_MODIFIER_4_BYTES;
1562
1563                 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1564                                         cipherkeylen + cipherkeylen;
1565         } else {
1566                 memcpy(cipher->key, cipherkey, cipherkeylen);
1567                 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1568                                         cipherkeylen;
1569         }
1570
1571         if (total_key_size > cipherkeylen) {
1572                 uint32_t padding_size =  total_key_size-cipherkeylen;
1573                 if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1574                         && (cipherkeylen == QAT_3DES_KEY_SZ_OPT2)) {
1575                         /* K3 not provided so use K1 = K3*/
1576                         memcpy(cdesc->cd_cur_ptr, cipherkey, padding_size);
1577                 } else if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1578                         && (cipherkeylen == QAT_3DES_KEY_SZ_OPT3)) {
1579                         /* K2 and K3 not provided so use K1 = K2 = K3*/
1580                         memcpy(cdesc->cd_cur_ptr, cipherkey,
1581                                 cipherkeylen);
1582                         memcpy(cdesc->cd_cur_ptr+cipherkeylen,
1583                                 cipherkey, cipherkeylen);
1584                 } else
1585                         memset(cdesc->cd_cur_ptr, 0, padding_size);
1586
1587                 cdesc->cd_cur_ptr += padding_size;
1588         }
1589         cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1590         cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1591
1592         return 0;
1593 }
1594
1595 int qat_sym_session_aead_create_cd_auth(struct qat_sym_session *cdesc,
1596                                                 const uint8_t *authkey,
1597                                                 uint32_t authkeylen,
1598                                                 uint32_t aad_length,
1599                                                 uint32_t digestsize,
1600                                                 unsigned int operation)
1601 {
1602         struct icp_qat_hw_auth_setup *hash;
1603         struct icp_qat_hw_cipher_algo_blk *cipherconfig;
1604         struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1605         struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1606         struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1607         void *ptr = &req_tmpl->cd_ctrl;
1608         struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1609         struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1610         struct icp_qat_fw_la_auth_req_params *auth_param =
1611                 (struct icp_qat_fw_la_auth_req_params *)
1612                 ((char *)&req_tmpl->serv_specif_rqpars +
1613                 ICP_QAT_FW_HASH_REQUEST_PARAMETERS_OFFSET);
1614         uint16_t state1_size = 0, state2_size = 0;
1615         uint16_t hash_offset, cd_size;
1616         uint32_t *aad_len = NULL;
1617         uint32_t wordIndex  = 0;
1618         uint32_t *pTempKey;
1619         enum qat_sym_proto_flag qat_proto_flag =
1620                 QAT_CRYPTO_PROTO_FLAG_NONE;
1621
1622         if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_AUTH) {
1623                 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1624                                         ICP_QAT_FW_SLICE_AUTH);
1625                 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1626                                         ICP_QAT_FW_SLICE_DRAM_WR);
1627                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1628         } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1629                 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1630                                 ICP_QAT_FW_SLICE_AUTH);
1631                 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1632                                 ICP_QAT_FW_SLICE_CIPHER);
1633                 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1634                                 ICP_QAT_FW_SLICE_CIPHER);
1635                 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1636                                 ICP_QAT_FW_SLICE_DRAM_WR);
1637                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1638         } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1639                 QAT_LOG(ERR, "Invalid param, must be a hash command.");
1640                 return -EFAULT;
1641         }
1642
1643         if (operation == RTE_CRYPTO_AUTH_OP_VERIFY) {
1644                 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1645                                 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1646                 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1647                                 ICP_QAT_FW_LA_CMP_AUTH_RES);
1648                 cdesc->auth_op = ICP_QAT_HW_AUTH_VERIFY;
1649         } else {
1650                 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1651                                            ICP_QAT_FW_LA_RET_AUTH_RES);
1652                 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1653                                            ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1654                 cdesc->auth_op = ICP_QAT_HW_AUTH_GENERATE;
1655         }
1656
1657         /*
1658          * Setup the inner hash config
1659          */
1660         hash_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1661         hash = (struct icp_qat_hw_auth_setup *)cdesc->cd_cur_ptr;
1662         hash->auth_config.reserved = 0;
1663         hash->auth_config.config =
1664                         ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE1,
1665                                 cdesc->qat_hash_alg, digestsize);
1666
1667         if (cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2
1668                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_KASUMI_F9
1669                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3
1670                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC
1671                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC
1672                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_NULL
1673                         )
1674                 hash->auth_counter.counter = 0;
1675         else {
1676                 int block_size = qat_hash_get_block_size(cdesc->qat_hash_alg);
1677
1678                 if (block_size < 0)
1679                         return block_size;
1680                 hash->auth_counter.counter = rte_bswap32(block_size);
1681         }
1682
1683         cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_auth_setup);
1684
1685         /*
1686          * cd_cur_ptr now points at the state1 information.
1687          */
1688         switch (cdesc->qat_hash_alg) {
1689         case ICP_QAT_HW_AUTH_ALGO_SHA1:
1690                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA1, authkey,
1691                         authkeylen, cdesc->cd_cur_ptr, &state1_size,
1692                         cdesc->aes_cmac)) {
1693                         QAT_LOG(ERR, "(SHA)precompute failed");
1694                         return -EFAULT;
1695                 }
1696                 state2_size = RTE_ALIGN_CEIL(ICP_QAT_HW_SHA1_STATE2_SZ, 8);
1697                 break;
1698         case ICP_QAT_HW_AUTH_ALGO_SHA224:
1699                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA224, authkey,
1700                         authkeylen, cdesc->cd_cur_ptr, &state1_size,
1701                         cdesc->aes_cmac)) {
1702                         QAT_LOG(ERR, "(SHA)precompute failed");
1703                         return -EFAULT;
1704                 }
1705                 state2_size = ICP_QAT_HW_SHA224_STATE2_SZ;
1706                 break;
1707         case ICP_QAT_HW_AUTH_ALGO_SHA256:
1708                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA256, authkey,
1709                         authkeylen, cdesc->cd_cur_ptr,  &state1_size,
1710                         cdesc->aes_cmac)) {
1711                         QAT_LOG(ERR, "(SHA)precompute failed");
1712                         return -EFAULT;
1713                 }
1714                 state2_size = ICP_QAT_HW_SHA256_STATE2_SZ;
1715                 break;
1716         case ICP_QAT_HW_AUTH_ALGO_SHA384:
1717                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA384, authkey,
1718                         authkeylen, cdesc->cd_cur_ptr, &state1_size,
1719                         cdesc->aes_cmac)) {
1720                         QAT_LOG(ERR, "(SHA)precompute failed");
1721                         return -EFAULT;
1722                 }
1723                 state2_size = ICP_QAT_HW_SHA384_STATE2_SZ;
1724                 break;
1725         case ICP_QAT_HW_AUTH_ALGO_SHA512:
1726                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA512, authkey,
1727                         authkeylen, cdesc->cd_cur_ptr,  &state1_size,
1728                         cdesc->aes_cmac)) {
1729                         QAT_LOG(ERR, "(SHA)precompute failed");
1730                         return -EFAULT;
1731                 }
1732                 state2_size = ICP_QAT_HW_SHA512_STATE2_SZ;
1733                 break;
1734         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1735                 state1_size = ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1736
1737                 if (cdesc->aes_cmac)
1738                         memset(cdesc->cd_cur_ptr, 0, state1_size);
1739                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC,
1740                         authkey, authkeylen, cdesc->cd_cur_ptr + state1_size,
1741                         &state2_size, cdesc->aes_cmac)) {
1742                         cdesc->aes_cmac ? QAT_LOG(ERR,
1743                                                   "(CMAC)precompute failed")
1744                                         : QAT_LOG(ERR,
1745                                                   "(XCBC)precompute failed");
1746                         return -EFAULT;
1747                 }
1748                 break;
1749         case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1750         case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
1751                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1752                 state1_size = ICP_QAT_HW_GALOIS_128_STATE1_SZ;
1753                 if (qat_sym_do_precomputes(cdesc->qat_hash_alg, authkey,
1754                         authkeylen, cdesc->cd_cur_ptr + state1_size,
1755                         &state2_size, cdesc->aes_cmac)) {
1756                         QAT_LOG(ERR, "(GCM)precompute failed");
1757                         return -EFAULT;
1758                 }
1759                 /*
1760                  * Write (the length of AAD) into bytes 16-19 of state2
1761                  * in big-endian format. This field is 8 bytes
1762                  */
1763                 auth_param->u2.aad_sz =
1764                                 RTE_ALIGN_CEIL(aad_length, 16);
1765                 auth_param->hash_state_sz = (auth_param->u2.aad_sz) >> 3;
1766
1767                 aad_len = (uint32_t *)(cdesc->cd_cur_ptr +
1768                                         ICP_QAT_HW_GALOIS_128_STATE1_SZ +
1769                                         ICP_QAT_HW_GALOIS_H_SZ);
1770                 *aad_len = rte_bswap32(aad_length);
1771                 cdesc->aad_len = aad_length;
1772                 break;
1773         case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
1774                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1775                 state1_size = qat_hash_get_state1_size(
1776                                 ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2);
1777                 state2_size = ICP_QAT_HW_SNOW_3G_UIA2_STATE2_SZ;
1778                 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1779
1780                 cipherconfig = (struct icp_qat_hw_cipher_algo_blk *)
1781                                 (cdesc->cd_cur_ptr + state1_size + state2_size);
1782                 cipherconfig->cipher_config.val =
1783                 ICP_QAT_HW_CIPHER_CONFIG_BUILD(ICP_QAT_HW_CIPHER_ECB_MODE,
1784                         ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2,
1785                         ICP_QAT_HW_CIPHER_KEY_CONVERT,
1786                         ICP_QAT_HW_CIPHER_ENCRYPT);
1787                 memcpy(cipherconfig->key, authkey, authkeylen);
1788                 memset(cipherconfig->key + authkeylen,
1789                                 0, ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ);
1790                 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1791                                 authkeylen + ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1792                 auth_param->hash_state_sz = ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1793                 break;
1794         case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
1795                 hash->auth_config.config =
1796                         ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE0,
1797                                 cdesc->qat_hash_alg, digestsize);
1798                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1799                 state1_size = qat_hash_get_state1_size(
1800                                 ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3);
1801                 state2_size = ICP_QAT_HW_ZUC_3G_EIA3_STATE2_SZ;
1802                 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size
1803                         + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ);
1804
1805                 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1806                 cdesc->cd_cur_ptr += state1_size + state2_size
1807                         + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1808                 auth_param->hash_state_sz = ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1809                 cdesc->min_qat_dev_gen = QAT_GEN2;
1810
1811                 break;
1812         case ICP_QAT_HW_AUTH_ALGO_MD5:
1813                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_MD5, authkey,
1814                         authkeylen, cdesc->cd_cur_ptr, &state1_size,
1815                         cdesc->aes_cmac)) {
1816                         QAT_LOG(ERR, "(MD5)precompute failed");
1817                         return -EFAULT;
1818                 }
1819                 state2_size = ICP_QAT_HW_MD5_STATE2_SZ;
1820                 break;
1821         case ICP_QAT_HW_AUTH_ALGO_NULL:
1822                 state1_size = qat_hash_get_state1_size(
1823                                 ICP_QAT_HW_AUTH_ALGO_NULL);
1824                 state2_size = ICP_QAT_HW_NULL_STATE2_SZ;
1825                 break;
1826         case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
1827                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1828                 state1_size = qat_hash_get_state1_size(
1829                                 ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC);
1830                 state2_size = ICP_QAT_HW_AES_CBC_MAC_KEY_SZ +
1831                                 ICP_QAT_HW_AES_CCM_CBC_E_CTR0_SZ;
1832
1833                 if (aad_length > 0) {
1834                         aad_length += ICP_QAT_HW_CCM_AAD_B0_LEN +
1835                         ICP_QAT_HW_CCM_AAD_LEN_INFO;
1836                         auth_param->u2.aad_sz =
1837                         RTE_ALIGN_CEIL(aad_length,
1838                         ICP_QAT_HW_CCM_AAD_ALIGNMENT);
1839                 } else {
1840                         auth_param->u2.aad_sz = ICP_QAT_HW_CCM_AAD_B0_LEN;
1841                 }
1842                 cdesc->aad_len = aad_length;
1843                 hash->auth_counter.counter = 0;
1844
1845                 hash_cd_ctrl->outer_prefix_sz = digestsize;
1846                 auth_param->hash_state_sz = digestsize;
1847
1848                 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1849                 break;
1850         case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
1851                 state1_size = qat_hash_get_state1_size(
1852                                 ICP_QAT_HW_AUTH_ALGO_KASUMI_F9);
1853                 state2_size = ICP_QAT_HW_KASUMI_F9_STATE2_SZ;
1854                 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1855                 pTempKey = (uint32_t *)(cdesc->cd_cur_ptr + state1_size
1856                                                         + authkeylen);
1857                 /*
1858                 * The Inner Hash Initial State2 block must contain IK
1859                 * (Initialisation Key), followed by IK XOR-ed with KM
1860                 * (Key Modifier): IK||(IK^KM).
1861                 */
1862                 /* write the auth key */
1863                 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1864                 /* initialise temp key with auth key */
1865                 memcpy(pTempKey, authkey, authkeylen);
1866                 /* XOR Key with KASUMI F9 key modifier at 4 bytes level */
1867                 for (wordIndex = 0; wordIndex < (authkeylen >> 2); wordIndex++)
1868                         pTempKey[wordIndex] ^= KASUMI_F9_KEY_MODIFIER_4_BYTES;
1869                 break;
1870         default:
1871                 QAT_LOG(ERR, "Invalid HASH alg %u", cdesc->qat_hash_alg);
1872                 return -EFAULT;
1873         }
1874
1875         /* Request template setup */
1876         qat_sym_session_init_common_hdr(header, qat_proto_flag);
1877         header->service_cmd_id = cdesc->qat_cmd;
1878
1879         /* Auth CD config setup */
1880         hash_cd_ctrl->hash_cfg_offset = hash_offset >> 3;
1881         hash_cd_ctrl->hash_flags = ICP_QAT_FW_AUTH_HDR_FLAG_NO_NESTED;
1882         hash_cd_ctrl->inner_res_sz = digestsize;
1883         hash_cd_ctrl->final_sz = digestsize;
1884         hash_cd_ctrl->inner_state1_sz = state1_size;
1885         auth_param->auth_res_sz = digestsize;
1886
1887         hash_cd_ctrl->inner_state2_sz  = state2_size;
1888         hash_cd_ctrl->inner_state2_offset = hash_cd_ctrl->hash_cfg_offset +
1889                         ((sizeof(struct icp_qat_hw_auth_setup) +
1890                          RTE_ALIGN_CEIL(hash_cd_ctrl->inner_state1_sz, 8))
1891                                         >> 3);
1892
1893         cdesc->cd_cur_ptr += state1_size + state2_size;
1894         cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1895
1896         cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1897         cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1898
1899         return 0;
1900 }
1901
1902 int qat_sym_validate_aes_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1903 {
1904         switch (key_len) {
1905         case ICP_QAT_HW_AES_128_KEY_SZ:
1906                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1907                 break;
1908         case ICP_QAT_HW_AES_192_KEY_SZ:
1909                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES192;
1910                 break;
1911         case ICP_QAT_HW_AES_256_KEY_SZ:
1912                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
1913                 break;
1914         default:
1915                 return -EINVAL;
1916         }
1917         return 0;
1918 }
1919
1920 int qat_sym_validate_aes_docsisbpi_key(int key_len,
1921                 enum icp_qat_hw_cipher_algo *alg)
1922 {
1923         switch (key_len) {
1924         case ICP_QAT_HW_AES_128_KEY_SZ:
1925                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1926                 break;
1927         case ICP_QAT_HW_AES_256_KEY_SZ:
1928                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
1929                 break;
1930         default:
1931                 return -EINVAL;
1932         }
1933         return 0;
1934 }
1935
1936 int qat_sym_validate_snow3g_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1937 {
1938         switch (key_len) {
1939         case ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ:
1940                 *alg = ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2;
1941                 break;
1942         default:
1943                 return -EINVAL;
1944         }
1945         return 0;
1946 }
1947
1948 int qat_sym_validate_kasumi_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1949 {
1950         switch (key_len) {
1951         case ICP_QAT_HW_KASUMI_KEY_SZ:
1952                 *alg = ICP_QAT_HW_CIPHER_ALGO_KASUMI;
1953                 break;
1954         default:
1955                 return -EINVAL;
1956         }
1957         return 0;
1958 }
1959
1960 int qat_sym_validate_des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1961 {
1962         switch (key_len) {
1963         case ICP_QAT_HW_DES_KEY_SZ:
1964                 *alg = ICP_QAT_HW_CIPHER_ALGO_DES;
1965                 break;
1966         default:
1967                 return -EINVAL;
1968         }
1969         return 0;
1970 }
1971
1972 int qat_sym_validate_3des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1973 {
1974         switch (key_len) {
1975         case QAT_3DES_KEY_SZ_OPT1:
1976         case QAT_3DES_KEY_SZ_OPT2:
1977         case QAT_3DES_KEY_SZ_OPT3:
1978                 *alg = ICP_QAT_HW_CIPHER_ALGO_3DES;
1979                 break;
1980         default:
1981                 return -EINVAL;
1982         }
1983         return 0;
1984 }
1985
1986 int qat_sym_validate_zuc_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1987 {
1988         switch (key_len) {
1989         case ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ:
1990                 *alg = ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3;
1991                 break;
1992         default:
1993                 return -EINVAL;
1994         }
1995         return 0;
1996 }