mempool: introduce helpers for populate and required size
[dpdk.git] / drivers / crypto / qat / qat_sym_session.c
1 /* SPDX-License-Identifier: (BSD-3-Clause OR GPL-2.0)
2  * Copyright(c) 2015-2019 Intel Corporation
3  */
4
5 #include <openssl/sha.h>        /* Needed to calculate pre-compute values */
6 #include <openssl/aes.h>        /* Needed to calculate pre-compute values */
7 #include <openssl/md5.h>        /* Needed to calculate pre-compute values */
8 #include <openssl/evp.h>        /* Needed for bpi runt block processing */
9
10 #include <rte_memcpy.h>
11 #include <rte_common.h>
12 #include <rte_spinlock.h>
13 #include <rte_byteorder.h>
14 #include <rte_log.h>
15 #include <rte_malloc.h>
16 #include <rte_crypto_sym.h>
17
18 #include "qat_logs.h"
19 #include "qat_sym_session.h"
20 #include "qat_sym_pmd.h"
21
22 /** Frees a context previously created
23  *  Depends on openssl libcrypto
24  */
25 static void
26 bpi_cipher_ctx_free(void *bpi_ctx)
27 {
28         if (bpi_ctx != NULL)
29                 EVP_CIPHER_CTX_free((EVP_CIPHER_CTX *)bpi_ctx);
30 }
31
32 /** Creates a context in either AES or DES in ECB mode
33  *  Depends on openssl libcrypto
34  */
35 static int
36 bpi_cipher_ctx_init(enum rte_crypto_cipher_algorithm cryptodev_algo,
37                 enum rte_crypto_cipher_operation direction __rte_unused,
38                 const uint8_t *key, void **ctx)
39 {
40         const EVP_CIPHER *algo = NULL;
41         int ret;
42         *ctx = EVP_CIPHER_CTX_new();
43
44         if (*ctx == NULL) {
45                 ret = -ENOMEM;
46                 goto ctx_init_err;
47         }
48
49         if (cryptodev_algo == RTE_CRYPTO_CIPHER_DES_DOCSISBPI)
50                 algo = EVP_des_ecb();
51         else
52                 algo = EVP_aes_128_ecb();
53
54         /* IV will be ECB encrypted whether direction is encrypt or decrypt*/
55         if (EVP_EncryptInit_ex(*ctx, algo, NULL, key, 0) != 1) {
56                 ret = -EINVAL;
57                 goto ctx_init_err;
58         }
59
60         return 0;
61
62 ctx_init_err:
63         if (*ctx != NULL)
64                 EVP_CIPHER_CTX_free(*ctx);
65         return ret;
66 }
67
68 static int
69 qat_is_cipher_alg_supported(enum rte_crypto_cipher_algorithm algo,
70                 struct qat_sym_dev_private *internals)
71 {
72         int i = 0;
73         const struct rte_cryptodev_capabilities *capability;
74
75         while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
76                         RTE_CRYPTO_OP_TYPE_UNDEFINED) {
77                 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
78                         continue;
79
80                 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_CIPHER)
81                         continue;
82
83                 if (capability->sym.cipher.algo == algo)
84                         return 1;
85         }
86         return 0;
87 }
88
89 static int
90 qat_is_auth_alg_supported(enum rte_crypto_auth_algorithm algo,
91                 struct qat_sym_dev_private *internals)
92 {
93         int i = 0;
94         const struct rte_cryptodev_capabilities *capability;
95
96         while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
97                         RTE_CRYPTO_OP_TYPE_UNDEFINED) {
98                 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
99                         continue;
100
101                 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_AUTH)
102                         continue;
103
104                 if (capability->sym.auth.algo == algo)
105                         return 1;
106         }
107         return 0;
108 }
109
110 void
111 qat_sym_session_clear(struct rte_cryptodev *dev,
112                 struct rte_cryptodev_sym_session *sess)
113 {
114         uint8_t index = dev->driver_id;
115         void *sess_priv = get_sym_session_private_data(sess, index);
116         struct qat_sym_session *s = (struct qat_sym_session *)sess_priv;
117
118         if (sess_priv) {
119                 if (s->bpi_ctx)
120                         bpi_cipher_ctx_free(s->bpi_ctx);
121                 memset(s, 0, qat_sym_session_get_private_size(dev));
122                 struct rte_mempool *sess_mp = rte_mempool_from_obj(sess_priv);
123
124                 set_sym_session_private_data(sess, index, NULL);
125                 rte_mempool_put(sess_mp, sess_priv);
126         }
127 }
128
129 static int
130 qat_get_cmd_id(const struct rte_crypto_sym_xform *xform)
131 {
132         /* Cipher Only */
133         if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER && xform->next == NULL)
134                 return ICP_QAT_FW_LA_CMD_CIPHER;
135
136         /* Authentication Only */
137         if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH && xform->next == NULL)
138                 return ICP_QAT_FW_LA_CMD_AUTH;
139
140         /* AEAD */
141         if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
142                 /* AES-GCM and AES-CCM works with different direction
143                  * GCM first encrypts and generate hash where AES-CCM
144                  * first generate hash and encrypts. Similar relation
145                  * applies to decryption.
146                  */
147                 if (xform->aead.op == RTE_CRYPTO_AEAD_OP_ENCRYPT)
148                         if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
149                                 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
150                         else
151                                 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
152                 else
153                         if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
154                                 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
155                         else
156                                 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
157         }
158
159         if (xform->next == NULL)
160                 return -1;
161
162         /* Cipher then Authenticate */
163         if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
164                         xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
165                 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
166
167         /* Authenticate then Cipher */
168         if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH &&
169                         xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
170                 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
171
172         return -1;
173 }
174
175 static struct rte_crypto_auth_xform *
176 qat_get_auth_xform(struct rte_crypto_sym_xform *xform)
177 {
178         do {
179                 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH)
180                         return &xform->auth;
181
182                 xform = xform->next;
183         } while (xform);
184
185         return NULL;
186 }
187
188 static struct rte_crypto_cipher_xform *
189 qat_get_cipher_xform(struct rte_crypto_sym_xform *xform)
190 {
191         do {
192                 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
193                         return &xform->cipher;
194
195                 xform = xform->next;
196         } while (xform);
197
198         return NULL;
199 }
200
201 int
202 qat_sym_session_configure_cipher(struct rte_cryptodev *dev,
203                 struct rte_crypto_sym_xform *xform,
204                 struct qat_sym_session *session)
205 {
206         struct qat_sym_dev_private *internals = dev->data->dev_private;
207         struct rte_crypto_cipher_xform *cipher_xform = NULL;
208         int ret;
209
210         /* Get cipher xform from crypto xform chain */
211         cipher_xform = qat_get_cipher_xform(xform);
212
213         session->cipher_iv.offset = cipher_xform->iv.offset;
214         session->cipher_iv.length = cipher_xform->iv.length;
215
216         switch (cipher_xform->algo) {
217         case RTE_CRYPTO_CIPHER_AES_CBC:
218                 if (qat_sym_validate_aes_key(cipher_xform->key.length,
219                                 &session->qat_cipher_alg) != 0) {
220                         QAT_LOG(ERR, "Invalid AES cipher key size");
221                         ret = -EINVAL;
222                         goto error_out;
223                 }
224                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
225                 break;
226         case RTE_CRYPTO_CIPHER_AES_CTR:
227                 if (qat_sym_validate_aes_key(cipher_xform->key.length,
228                                 &session->qat_cipher_alg) != 0) {
229                         QAT_LOG(ERR, "Invalid AES cipher key size");
230                         ret = -EINVAL;
231                         goto error_out;
232                 }
233                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
234                 break;
235         case RTE_CRYPTO_CIPHER_SNOW3G_UEA2:
236                 if (qat_sym_validate_snow3g_key(cipher_xform->key.length,
237                                         &session->qat_cipher_alg) != 0) {
238                         QAT_LOG(ERR, "Invalid SNOW 3G cipher key size");
239                         ret = -EINVAL;
240                         goto error_out;
241                 }
242                 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
243                 break;
244         case RTE_CRYPTO_CIPHER_NULL:
245                 session->qat_cipher_alg = ICP_QAT_HW_CIPHER_ALGO_NULL;
246                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
247                 break;
248         case RTE_CRYPTO_CIPHER_KASUMI_F8:
249                 if (qat_sym_validate_kasumi_key(cipher_xform->key.length,
250                                         &session->qat_cipher_alg) != 0) {
251                         QAT_LOG(ERR, "Invalid KASUMI cipher key size");
252                         ret = -EINVAL;
253                         goto error_out;
254                 }
255                 session->qat_mode = ICP_QAT_HW_CIPHER_F8_MODE;
256                 break;
257         case RTE_CRYPTO_CIPHER_3DES_CBC:
258                 if (qat_sym_validate_3des_key(cipher_xform->key.length,
259                                 &session->qat_cipher_alg) != 0) {
260                         QAT_LOG(ERR, "Invalid 3DES cipher key size");
261                         ret = -EINVAL;
262                         goto error_out;
263                 }
264                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
265                 break;
266         case RTE_CRYPTO_CIPHER_DES_CBC:
267                 if (qat_sym_validate_des_key(cipher_xform->key.length,
268                                 &session->qat_cipher_alg) != 0) {
269                         QAT_LOG(ERR, "Invalid DES cipher key size");
270                         ret = -EINVAL;
271                         goto error_out;
272                 }
273                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
274                 break;
275         case RTE_CRYPTO_CIPHER_3DES_CTR:
276                 if (qat_sym_validate_3des_key(cipher_xform->key.length,
277                                 &session->qat_cipher_alg) != 0) {
278                         QAT_LOG(ERR, "Invalid 3DES cipher key size");
279                         ret = -EINVAL;
280                         goto error_out;
281                 }
282                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
283                 break;
284         case RTE_CRYPTO_CIPHER_DES_DOCSISBPI:
285                 ret = bpi_cipher_ctx_init(
286                                         cipher_xform->algo,
287                                         cipher_xform->op,
288                                         cipher_xform->key.data,
289                                         &session->bpi_ctx);
290                 if (ret != 0) {
291                         QAT_LOG(ERR, "failed to create DES BPI ctx");
292                         goto error_out;
293                 }
294                 if (qat_sym_validate_des_key(cipher_xform->key.length,
295                                 &session->qat_cipher_alg) != 0) {
296                         QAT_LOG(ERR, "Invalid DES cipher key size");
297                         ret = -EINVAL;
298                         goto error_out;
299                 }
300                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
301                 break;
302         case RTE_CRYPTO_CIPHER_AES_DOCSISBPI:
303                 ret = bpi_cipher_ctx_init(
304                                         cipher_xform->algo,
305                                         cipher_xform->op,
306                                         cipher_xform->key.data,
307                                         &session->bpi_ctx);
308                 if (ret != 0) {
309                         QAT_LOG(ERR, "failed to create AES BPI ctx");
310                         goto error_out;
311                 }
312                 if (qat_sym_validate_aes_docsisbpi_key(cipher_xform->key.length,
313                                 &session->qat_cipher_alg) != 0) {
314                         QAT_LOG(ERR, "Invalid AES DOCSISBPI key size");
315                         ret = -EINVAL;
316                         goto error_out;
317                 }
318                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
319                 break;
320         case RTE_CRYPTO_CIPHER_ZUC_EEA3:
321                 if (!qat_is_cipher_alg_supported(
322                         cipher_xform->algo, internals)) {
323                         QAT_LOG(ERR, "%s not supported on this device",
324                                 rte_crypto_cipher_algorithm_strings
325                                         [cipher_xform->algo]);
326                         ret = -ENOTSUP;
327                         goto error_out;
328                 }
329                 if (qat_sym_validate_zuc_key(cipher_xform->key.length,
330                                 &session->qat_cipher_alg) != 0) {
331                         QAT_LOG(ERR, "Invalid ZUC cipher key size");
332                         ret = -EINVAL;
333                         goto error_out;
334                 }
335                 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
336                 break;
337         case RTE_CRYPTO_CIPHER_AES_XTS:
338                 if ((cipher_xform->key.length/2) == ICP_QAT_HW_AES_192_KEY_SZ) {
339                         QAT_LOG(ERR, "AES-XTS-192 not supported");
340                         ret = -EINVAL;
341                         goto error_out;
342                 }
343                 if (qat_sym_validate_aes_key((cipher_xform->key.length/2),
344                                 &session->qat_cipher_alg) != 0) {
345                         QAT_LOG(ERR, "Invalid AES-XTS cipher key size");
346                         ret = -EINVAL;
347                         goto error_out;
348                 }
349                 session->qat_mode = ICP_QAT_HW_CIPHER_XTS_MODE;
350                 break;
351         case RTE_CRYPTO_CIPHER_3DES_ECB:
352         case RTE_CRYPTO_CIPHER_AES_ECB:
353         case RTE_CRYPTO_CIPHER_AES_F8:
354         case RTE_CRYPTO_CIPHER_ARC4:
355                 QAT_LOG(ERR, "Crypto QAT PMD: Unsupported Cipher alg %u",
356                                 cipher_xform->algo);
357                 ret = -ENOTSUP;
358                 goto error_out;
359         default:
360                 QAT_LOG(ERR, "Crypto: Undefined Cipher specified %u\n",
361                                 cipher_xform->algo);
362                 ret = -EINVAL;
363                 goto error_out;
364         }
365
366         if (cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
367                 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
368         else
369                 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
370
371         if (qat_sym_session_aead_create_cd_cipher(session,
372                                                 cipher_xform->key.data,
373                                                 cipher_xform->key.length)) {
374                 ret = -EINVAL;
375                 goto error_out;
376         }
377
378         return 0;
379
380 error_out:
381         if (session->bpi_ctx) {
382                 bpi_cipher_ctx_free(session->bpi_ctx);
383                 session->bpi_ctx = NULL;
384         }
385         return ret;
386 }
387
388 int
389 qat_sym_session_configure(struct rte_cryptodev *dev,
390                 struct rte_crypto_sym_xform *xform,
391                 struct rte_cryptodev_sym_session *sess,
392                 struct rte_mempool *mempool)
393 {
394         void *sess_private_data;
395         int ret;
396
397         if (rte_mempool_get(mempool, &sess_private_data)) {
398                 CDEV_LOG_ERR(
399                         "Couldn't get object from session mempool");
400                 return -ENOMEM;
401         }
402
403         ret = qat_sym_session_set_parameters(dev, xform, sess_private_data);
404         if (ret != 0) {
405                 QAT_LOG(ERR,
406                     "Crypto QAT PMD: failed to configure session parameters");
407
408                 /* Return session to mempool */
409                 rte_mempool_put(mempool, sess_private_data);
410                 return ret;
411         }
412
413         set_sym_session_private_data(sess, dev->driver_id,
414                 sess_private_data);
415
416         return 0;
417 }
418
419 int
420 qat_sym_session_set_parameters(struct rte_cryptodev *dev,
421                 struct rte_crypto_sym_xform *xform, void *session_private)
422 {
423         struct qat_sym_session *session = session_private;
424         int ret;
425         int qat_cmd_id;
426
427         /* Set context descriptor physical address */
428         session->cd_paddr = rte_mempool_virt2iova(session) +
429                         offsetof(struct qat_sym_session, cd);
430
431         session->min_qat_dev_gen = QAT_GEN1;
432
433         /* Get requested QAT command id */
434         qat_cmd_id = qat_get_cmd_id(xform);
435         if (qat_cmd_id < 0 || qat_cmd_id >= ICP_QAT_FW_LA_CMD_DELIMITER) {
436                 QAT_LOG(ERR, "Unsupported xform chain requested");
437                 return -ENOTSUP;
438         }
439         session->qat_cmd = (enum icp_qat_fw_la_cmd_id)qat_cmd_id;
440         switch (session->qat_cmd) {
441         case ICP_QAT_FW_LA_CMD_CIPHER:
442                 ret = qat_sym_session_configure_cipher(dev, xform, session);
443                 if (ret < 0)
444                         return ret;
445                 break;
446         case ICP_QAT_FW_LA_CMD_AUTH:
447                 ret = qat_sym_session_configure_auth(dev, xform, session);
448                 if (ret < 0)
449                         return ret;
450                 break;
451         case ICP_QAT_FW_LA_CMD_CIPHER_HASH:
452                 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
453                         ret = qat_sym_session_configure_aead(dev, xform,
454                                         session);
455                         if (ret < 0)
456                                 return ret;
457                 } else {
458                         ret = qat_sym_session_configure_cipher(dev,
459                                         xform, session);
460                         if (ret < 0)
461                                 return ret;
462                         ret = qat_sym_session_configure_auth(dev,
463                                         xform, session);
464                         if (ret < 0)
465                                 return ret;
466                 }
467                 break;
468         case ICP_QAT_FW_LA_CMD_HASH_CIPHER:
469                 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
470                         ret = qat_sym_session_configure_aead(dev, xform,
471                                         session);
472                         if (ret < 0)
473                                 return ret;
474                 } else {
475                         ret = qat_sym_session_configure_auth(dev,
476                                         xform, session);
477                         if (ret < 0)
478                                 return ret;
479                         ret = qat_sym_session_configure_cipher(dev,
480                                         xform, session);
481                         if (ret < 0)
482                                 return ret;
483                 }
484                 break;
485         case ICP_QAT_FW_LA_CMD_TRNG_GET_RANDOM:
486         case ICP_QAT_FW_LA_CMD_TRNG_TEST:
487         case ICP_QAT_FW_LA_CMD_SSL3_KEY_DERIVE:
488         case ICP_QAT_FW_LA_CMD_TLS_V1_1_KEY_DERIVE:
489         case ICP_QAT_FW_LA_CMD_TLS_V1_2_KEY_DERIVE:
490         case ICP_QAT_FW_LA_CMD_MGF1:
491         case ICP_QAT_FW_LA_CMD_AUTH_PRE_COMP:
492         case ICP_QAT_FW_LA_CMD_CIPHER_PRE_COMP:
493         case ICP_QAT_FW_LA_CMD_DELIMITER:
494         QAT_LOG(ERR, "Unsupported Service %u",
495                 session->qat_cmd);
496                 return -ENOTSUP;
497         default:
498         QAT_LOG(ERR, "Unsupported Service %u",
499                 session->qat_cmd);
500                 return -ENOTSUP;
501         }
502
503         return 0;
504 }
505
506 static int
507 qat_sym_session_handle_single_pass(struct qat_sym_dev_private *internals,
508                 struct qat_sym_session *session,
509                 struct rte_crypto_aead_xform *aead_xform)
510 {
511         enum qat_device_gen qat_dev_gen = internals->qat_dev->qat_dev_gen;
512
513         if (qat_dev_gen == QAT_GEN3 &&
514                         aead_xform->iv.length == QAT_AES_GCM_SPC_IV_SIZE) {
515                 /* Use faster Single-Pass GCM */
516                 struct icp_qat_fw_la_cipher_req_params *cipher_param =
517                                 (void *) &session->fw_req.serv_specif_rqpars;
518
519                 session->is_single_pass = 1;
520                 session->min_qat_dev_gen = QAT_GEN3;
521                 session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER;
522                 session->qat_mode = ICP_QAT_HW_CIPHER_AEAD_MODE;
523                 session->cipher_iv.offset = aead_xform->iv.offset;
524                 session->cipher_iv.length = aead_xform->iv.length;
525                 if (qat_sym_session_aead_create_cd_cipher(session,
526                                 aead_xform->key.data, aead_xform->key.length))
527                         return -EINVAL;
528                 session->aad_len = aead_xform->aad_length;
529                 session->digest_length = aead_xform->digest_length;
530                 if (aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) {
531                         session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
532                         session->auth_op = ICP_QAT_HW_AUTH_GENERATE;
533                         ICP_QAT_FW_LA_RET_AUTH_SET(
534                                 session->fw_req.comn_hdr.serv_specif_flags,
535                                 ICP_QAT_FW_LA_RET_AUTH_RES);
536                 } else {
537                         session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
538                         session->auth_op = ICP_QAT_HW_AUTH_VERIFY;
539                         ICP_QAT_FW_LA_CMP_AUTH_SET(
540                                 session->fw_req.comn_hdr.serv_specif_flags,
541                                 ICP_QAT_FW_LA_CMP_AUTH_RES);
542                 }
543                 ICP_QAT_FW_LA_SINGLE_PASS_PROTO_FLAG_SET(
544                                 session->fw_req.comn_hdr.serv_specif_flags,
545                                 ICP_QAT_FW_LA_SINGLE_PASS_PROTO);
546                 ICP_QAT_FW_LA_PROTO_SET(
547                                 session->fw_req.comn_hdr.serv_specif_flags,
548                                 ICP_QAT_FW_LA_NO_PROTO);
549                 ICP_QAT_FW_LA_GCM_IV_LEN_FLAG_SET(
550                                 session->fw_req.comn_hdr.serv_specif_flags,
551                                 ICP_QAT_FW_LA_GCM_IV_LEN_12_OCTETS);
552                 session->fw_req.comn_hdr.service_cmd_id =
553                                 ICP_QAT_FW_LA_CMD_CIPHER;
554                 session->cd.cipher.cipher_config.val =
555                                 ICP_QAT_HW_CIPHER_CONFIG_BUILD(
556                                         ICP_QAT_HW_CIPHER_AEAD_MODE,
557                                         session->qat_cipher_alg,
558                                         ICP_QAT_HW_CIPHER_NO_CONVERT,
559                                         session->qat_dir);
560                 QAT_FIELD_SET(session->cd.cipher.cipher_config.val,
561                                 aead_xform->digest_length,
562                                 QAT_CIPHER_AEAD_HASH_CMP_LEN_BITPOS,
563                                 QAT_CIPHER_AEAD_HASH_CMP_LEN_MASK);
564                 session->cd.cipher.cipher_config.reserved =
565                                 ICP_QAT_HW_CIPHER_CONFIG_BUILD_UPPER(
566                                         aead_xform->aad_length);
567                 cipher_param->spc_aad_sz = aead_xform->aad_length;
568                 cipher_param->spc_auth_res_sz = aead_xform->digest_length;
569         }
570         return 0;
571 }
572
573 int
574 qat_sym_session_configure_auth(struct rte_cryptodev *dev,
575                                 struct rte_crypto_sym_xform *xform,
576                                 struct qat_sym_session *session)
577 {
578         struct rte_crypto_auth_xform *auth_xform = qat_get_auth_xform(xform);
579         struct qat_sym_dev_private *internals = dev->data->dev_private;
580         const uint8_t *key_data = auth_xform->key.data;
581         uint8_t key_length = auth_xform->key.length;
582         session->aes_cmac = 0;
583
584         switch (auth_xform->algo) {
585         case RTE_CRYPTO_AUTH_SHA1_HMAC:
586                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA1;
587                 break;
588         case RTE_CRYPTO_AUTH_SHA224_HMAC:
589                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA224;
590                 break;
591         case RTE_CRYPTO_AUTH_SHA256_HMAC:
592                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA256;
593                 break;
594         case RTE_CRYPTO_AUTH_SHA384_HMAC:
595                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA384;
596                 break;
597         case RTE_CRYPTO_AUTH_SHA512_HMAC:
598                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA512;
599                 break;
600         case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
601                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
602                 break;
603         case RTE_CRYPTO_AUTH_AES_CMAC:
604                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
605                 session->aes_cmac = 1;
606                 break;
607         case RTE_CRYPTO_AUTH_AES_GMAC:
608                 if (qat_sym_validate_aes_key(auth_xform->key.length,
609                                 &session->qat_cipher_alg) != 0) {
610                         QAT_LOG(ERR, "Invalid AES key size");
611                         return -EINVAL;
612                 }
613                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
614                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
615
616                 break;
617         case RTE_CRYPTO_AUTH_SNOW3G_UIA2:
618                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2;
619                 break;
620         case RTE_CRYPTO_AUTH_MD5_HMAC:
621                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_MD5;
622                 break;
623         case RTE_CRYPTO_AUTH_NULL:
624                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_NULL;
625                 break;
626         case RTE_CRYPTO_AUTH_KASUMI_F9:
627                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_KASUMI_F9;
628                 break;
629         case RTE_CRYPTO_AUTH_ZUC_EIA3:
630                 if (!qat_is_auth_alg_supported(auth_xform->algo, internals)) {
631                         QAT_LOG(ERR, "%s not supported on this device",
632                                 rte_crypto_auth_algorithm_strings
633                                 [auth_xform->algo]);
634                         return -ENOTSUP;
635                 }
636                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3;
637                 break;
638         case RTE_CRYPTO_AUTH_SHA1:
639         case RTE_CRYPTO_AUTH_SHA256:
640         case RTE_CRYPTO_AUTH_SHA512:
641         case RTE_CRYPTO_AUTH_SHA224:
642         case RTE_CRYPTO_AUTH_SHA384:
643         case RTE_CRYPTO_AUTH_MD5:
644         case RTE_CRYPTO_AUTH_AES_CBC_MAC:
645                 QAT_LOG(ERR, "Crypto: Unsupported hash alg %u",
646                                 auth_xform->algo);
647                 return -ENOTSUP;
648         default:
649                 QAT_LOG(ERR, "Crypto: Undefined Hash algo %u specified",
650                                 auth_xform->algo);
651                 return -EINVAL;
652         }
653
654         session->auth_iv.offset = auth_xform->iv.offset;
655         session->auth_iv.length = auth_xform->iv.length;
656
657         if (auth_xform->algo == RTE_CRYPTO_AUTH_AES_GMAC) {
658                 if (auth_xform->op == RTE_CRYPTO_AUTH_OP_GENERATE) {
659                         session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER_HASH;
660                         session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
661                         /*
662                          * It needs to create cipher desc content first,
663                          * then authentication
664                          */
665
666                         if (qat_sym_session_aead_create_cd_cipher(session,
667                                                 auth_xform->key.data,
668                                                 auth_xform->key.length))
669                                 return -EINVAL;
670
671                         if (qat_sym_session_aead_create_cd_auth(session,
672                                                 key_data,
673                                                 key_length,
674                                                 0,
675                                                 auth_xform->digest_length,
676                                                 auth_xform->op))
677                                 return -EINVAL;
678                 } else {
679                         session->qat_cmd = ICP_QAT_FW_LA_CMD_HASH_CIPHER;
680                         session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
681                         /*
682                          * It needs to create authentication desc content first,
683                          * then cipher
684                          */
685
686                         if (qat_sym_session_aead_create_cd_auth(session,
687                                         key_data,
688                                         key_length,
689                                         0,
690                                         auth_xform->digest_length,
691                                         auth_xform->op))
692                                 return -EINVAL;
693
694                         if (qat_sym_session_aead_create_cd_cipher(session,
695                                                 auth_xform->key.data,
696                                                 auth_xform->key.length))
697                                 return -EINVAL;
698                 }
699                 /* Restore to authentication only only */
700                 session->qat_cmd = ICP_QAT_FW_LA_CMD_AUTH;
701         } else {
702                 if (qat_sym_session_aead_create_cd_auth(session,
703                                 key_data,
704                                 key_length,
705                                 0,
706                                 auth_xform->digest_length,
707                                 auth_xform->op))
708                         return -EINVAL;
709         }
710
711         session->digest_length = auth_xform->digest_length;
712         return 0;
713 }
714
715 int
716 qat_sym_session_configure_aead(struct rte_cryptodev *dev,
717                                 struct rte_crypto_sym_xform *xform,
718                                 struct qat_sym_session *session)
719 {
720         struct rte_crypto_aead_xform *aead_xform = &xform->aead;
721         enum rte_crypto_auth_operation crypto_operation;
722
723         /*
724          * Store AEAD IV parameters as cipher IV,
725          * to avoid unnecessary memory usage
726          */
727         session->cipher_iv.offset = xform->aead.iv.offset;
728         session->cipher_iv.length = xform->aead.iv.length;
729
730         switch (aead_xform->algo) {
731         case RTE_CRYPTO_AEAD_AES_GCM:
732                 if (qat_sym_validate_aes_key(aead_xform->key.length,
733                                 &session->qat_cipher_alg) != 0) {
734                         QAT_LOG(ERR, "Invalid AES key size");
735                         return -EINVAL;
736                 }
737                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
738                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
739                 break;
740         case RTE_CRYPTO_AEAD_AES_CCM:
741                 if (qat_sym_validate_aes_key(aead_xform->key.length,
742                                 &session->qat_cipher_alg) != 0) {
743                         QAT_LOG(ERR, "Invalid AES key size");
744                         return -EINVAL;
745                 }
746                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
747                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC;
748                 break;
749         default:
750                 QAT_LOG(ERR, "Crypto: Undefined AEAD specified %u\n",
751                                 aead_xform->algo);
752                 return -EINVAL;
753         }
754
755         session->is_single_pass = 0;
756         if (aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) {
757                 /* Use faster Single-Pass GCM if possible */
758                 int res = qat_sym_session_handle_single_pass(
759                                 dev->data->dev_private, session, aead_xform);
760                 if (res < 0)
761                         return res;
762                 if (session->is_single_pass)
763                         return 0;
764         }
765
766         if ((aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT &&
767                         aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) ||
768                         (aead_xform->op == RTE_CRYPTO_AEAD_OP_DECRYPT &&
769                         aead_xform->algo == RTE_CRYPTO_AEAD_AES_CCM)) {
770                 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
771                 /*
772                  * It needs to create cipher desc content first,
773                  * then authentication
774                  */
775                 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
776                         RTE_CRYPTO_AUTH_OP_GENERATE : RTE_CRYPTO_AUTH_OP_VERIFY;
777
778                 if (qat_sym_session_aead_create_cd_cipher(session,
779                                         aead_xform->key.data,
780                                         aead_xform->key.length))
781                         return -EINVAL;
782
783                 if (qat_sym_session_aead_create_cd_auth(session,
784                                         aead_xform->key.data,
785                                         aead_xform->key.length,
786                                         aead_xform->aad_length,
787                                         aead_xform->digest_length,
788                                         crypto_operation))
789                         return -EINVAL;
790         } else {
791                 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
792                 /*
793                  * It needs to create authentication desc content first,
794                  * then cipher
795                  */
796
797                 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
798                         RTE_CRYPTO_AUTH_OP_VERIFY : RTE_CRYPTO_AUTH_OP_GENERATE;
799
800                 if (qat_sym_session_aead_create_cd_auth(session,
801                                         aead_xform->key.data,
802                                         aead_xform->key.length,
803                                         aead_xform->aad_length,
804                                         aead_xform->digest_length,
805                                         crypto_operation))
806                         return -EINVAL;
807
808                 if (qat_sym_session_aead_create_cd_cipher(session,
809                                         aead_xform->key.data,
810                                         aead_xform->key.length))
811                         return -EINVAL;
812         }
813
814         session->digest_length = aead_xform->digest_length;
815         return 0;
816 }
817
818 unsigned int qat_sym_session_get_private_size(
819                 struct rte_cryptodev *dev __rte_unused)
820 {
821         return RTE_ALIGN_CEIL(sizeof(struct qat_sym_session), 8);
822 }
823
824 /* returns block size in bytes per cipher algo */
825 int qat_cipher_get_block_size(enum icp_qat_hw_cipher_algo qat_cipher_alg)
826 {
827         switch (qat_cipher_alg) {
828         case ICP_QAT_HW_CIPHER_ALGO_DES:
829                 return ICP_QAT_HW_DES_BLK_SZ;
830         case ICP_QAT_HW_CIPHER_ALGO_3DES:
831                 return ICP_QAT_HW_3DES_BLK_SZ;
832         case ICP_QAT_HW_CIPHER_ALGO_AES128:
833         case ICP_QAT_HW_CIPHER_ALGO_AES192:
834         case ICP_QAT_HW_CIPHER_ALGO_AES256:
835                 return ICP_QAT_HW_AES_BLK_SZ;
836         default:
837                 QAT_LOG(ERR, "invalid block cipher alg %u", qat_cipher_alg);
838                 return -EFAULT;
839         };
840         return -EFAULT;
841 }
842
843 /*
844  * Returns size in bytes per hash algo for state1 size field in cd_ctrl
845  * This is digest size rounded up to nearest quadword
846  */
847 static int qat_hash_get_state1_size(enum icp_qat_hw_auth_algo qat_hash_alg)
848 {
849         switch (qat_hash_alg) {
850         case ICP_QAT_HW_AUTH_ALGO_SHA1:
851                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA1_STATE1_SZ,
852                                                 QAT_HW_DEFAULT_ALIGNMENT);
853         case ICP_QAT_HW_AUTH_ALGO_SHA224:
854                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA224_STATE1_SZ,
855                                                 QAT_HW_DEFAULT_ALIGNMENT);
856         case ICP_QAT_HW_AUTH_ALGO_SHA256:
857                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA256_STATE1_SZ,
858                                                 QAT_HW_DEFAULT_ALIGNMENT);
859         case ICP_QAT_HW_AUTH_ALGO_SHA384:
860                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA384_STATE1_SZ,
861                                                 QAT_HW_DEFAULT_ALIGNMENT);
862         case ICP_QAT_HW_AUTH_ALGO_SHA512:
863                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
864                                                 QAT_HW_DEFAULT_ALIGNMENT);
865         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
866                 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ,
867                                                 QAT_HW_DEFAULT_ALIGNMENT);
868         case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
869         case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
870                 return QAT_HW_ROUND_UP(ICP_QAT_HW_GALOIS_128_STATE1_SZ,
871                                                 QAT_HW_DEFAULT_ALIGNMENT);
872         case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
873                 return QAT_HW_ROUND_UP(ICP_QAT_HW_ZUC_3G_EIA3_STATE1_SZ,
874                                                 QAT_HW_DEFAULT_ALIGNMENT);
875         case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
876                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SNOW_3G_UIA2_STATE1_SZ,
877                                                 QAT_HW_DEFAULT_ALIGNMENT);
878         case ICP_QAT_HW_AUTH_ALGO_MD5:
879                 return QAT_HW_ROUND_UP(ICP_QAT_HW_MD5_STATE1_SZ,
880                                                 QAT_HW_DEFAULT_ALIGNMENT);
881         case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
882                 return QAT_HW_ROUND_UP(ICP_QAT_HW_KASUMI_F9_STATE1_SZ,
883                                                 QAT_HW_DEFAULT_ALIGNMENT);
884         case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
885                 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_CBC_MAC_STATE1_SZ,
886                                                 QAT_HW_DEFAULT_ALIGNMENT);
887         case ICP_QAT_HW_AUTH_ALGO_NULL:
888                 return QAT_HW_ROUND_UP(ICP_QAT_HW_NULL_STATE1_SZ,
889                                                 QAT_HW_DEFAULT_ALIGNMENT);
890         case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
891                 /* return maximum state1 size in this case */
892                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
893                                                 QAT_HW_DEFAULT_ALIGNMENT);
894         default:
895                 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
896                 return -EFAULT;
897         };
898         return -EFAULT;
899 }
900
901 /* returns digest size in bytes  per hash algo */
902 static int qat_hash_get_digest_size(enum icp_qat_hw_auth_algo qat_hash_alg)
903 {
904         switch (qat_hash_alg) {
905         case ICP_QAT_HW_AUTH_ALGO_SHA1:
906                 return ICP_QAT_HW_SHA1_STATE1_SZ;
907         case ICP_QAT_HW_AUTH_ALGO_SHA224:
908                 return ICP_QAT_HW_SHA224_STATE1_SZ;
909         case ICP_QAT_HW_AUTH_ALGO_SHA256:
910                 return ICP_QAT_HW_SHA256_STATE1_SZ;
911         case ICP_QAT_HW_AUTH_ALGO_SHA384:
912                 return ICP_QAT_HW_SHA384_STATE1_SZ;
913         case ICP_QAT_HW_AUTH_ALGO_SHA512:
914                 return ICP_QAT_HW_SHA512_STATE1_SZ;
915         case ICP_QAT_HW_AUTH_ALGO_MD5:
916                 return ICP_QAT_HW_MD5_STATE1_SZ;
917         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
918                 return ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
919         case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
920                 /* return maximum digest size in this case */
921                 return ICP_QAT_HW_SHA512_STATE1_SZ;
922         default:
923                 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
924                 return -EFAULT;
925         };
926         return -EFAULT;
927 }
928
929 /* returns block size in byes per hash algo */
930 static int qat_hash_get_block_size(enum icp_qat_hw_auth_algo qat_hash_alg)
931 {
932         switch (qat_hash_alg) {
933         case ICP_QAT_HW_AUTH_ALGO_SHA1:
934                 return SHA_CBLOCK;
935         case ICP_QAT_HW_AUTH_ALGO_SHA224:
936                 return SHA256_CBLOCK;
937         case ICP_QAT_HW_AUTH_ALGO_SHA256:
938                 return SHA256_CBLOCK;
939         case ICP_QAT_HW_AUTH_ALGO_SHA384:
940                 return SHA512_CBLOCK;
941         case ICP_QAT_HW_AUTH_ALGO_SHA512:
942                 return SHA512_CBLOCK;
943         case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
944                 return 16;
945         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
946                 return ICP_QAT_HW_AES_BLK_SZ;
947         case ICP_QAT_HW_AUTH_ALGO_MD5:
948                 return MD5_CBLOCK;
949         case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
950                 /* return maximum block size in this case */
951                 return SHA512_CBLOCK;
952         default:
953                 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
954                 return -EFAULT;
955         };
956         return -EFAULT;
957 }
958
959 static int partial_hash_sha1(uint8_t *data_in, uint8_t *data_out)
960 {
961         SHA_CTX ctx;
962
963         if (!SHA1_Init(&ctx))
964                 return -EFAULT;
965         SHA1_Transform(&ctx, data_in);
966         rte_memcpy(data_out, &ctx, SHA_DIGEST_LENGTH);
967         return 0;
968 }
969
970 static int partial_hash_sha224(uint8_t *data_in, uint8_t *data_out)
971 {
972         SHA256_CTX ctx;
973
974         if (!SHA224_Init(&ctx))
975                 return -EFAULT;
976         SHA256_Transform(&ctx, data_in);
977         rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
978         return 0;
979 }
980
981 static int partial_hash_sha256(uint8_t *data_in, uint8_t *data_out)
982 {
983         SHA256_CTX ctx;
984
985         if (!SHA256_Init(&ctx))
986                 return -EFAULT;
987         SHA256_Transform(&ctx, data_in);
988         rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
989         return 0;
990 }
991
992 static int partial_hash_sha384(uint8_t *data_in, uint8_t *data_out)
993 {
994         SHA512_CTX ctx;
995
996         if (!SHA384_Init(&ctx))
997                 return -EFAULT;
998         SHA512_Transform(&ctx, data_in);
999         rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
1000         return 0;
1001 }
1002
1003 static int partial_hash_sha512(uint8_t *data_in, uint8_t *data_out)
1004 {
1005         SHA512_CTX ctx;
1006
1007         if (!SHA512_Init(&ctx))
1008                 return -EFAULT;
1009         SHA512_Transform(&ctx, data_in);
1010         rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
1011         return 0;
1012 }
1013
1014 static int partial_hash_md5(uint8_t *data_in, uint8_t *data_out)
1015 {
1016         MD5_CTX ctx;
1017
1018         if (!MD5_Init(&ctx))
1019                 return -EFAULT;
1020         MD5_Transform(&ctx, data_in);
1021         rte_memcpy(data_out, &ctx, MD5_DIGEST_LENGTH);
1022
1023         return 0;
1024 }
1025
1026 static int partial_hash_compute(enum icp_qat_hw_auth_algo hash_alg,
1027                         uint8_t *data_in,
1028                         uint8_t *data_out)
1029 {
1030         int digest_size;
1031         uint8_t digest[qat_hash_get_digest_size(
1032                         ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1033         uint32_t *hash_state_out_be32;
1034         uint64_t *hash_state_out_be64;
1035         int i;
1036
1037         digest_size = qat_hash_get_digest_size(hash_alg);
1038         if (digest_size <= 0)
1039                 return -EFAULT;
1040
1041         hash_state_out_be32 = (uint32_t *)data_out;
1042         hash_state_out_be64 = (uint64_t *)data_out;
1043
1044         switch (hash_alg) {
1045         case ICP_QAT_HW_AUTH_ALGO_SHA1:
1046                 if (partial_hash_sha1(data_in, digest))
1047                         return -EFAULT;
1048                 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1049                         *hash_state_out_be32 =
1050                                 rte_bswap32(*(((uint32_t *)digest)+i));
1051                 break;
1052         case ICP_QAT_HW_AUTH_ALGO_SHA224:
1053                 if (partial_hash_sha224(data_in, digest))
1054                         return -EFAULT;
1055                 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1056                         *hash_state_out_be32 =
1057                                 rte_bswap32(*(((uint32_t *)digest)+i));
1058                 break;
1059         case ICP_QAT_HW_AUTH_ALGO_SHA256:
1060                 if (partial_hash_sha256(data_in, digest))
1061                         return -EFAULT;
1062                 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
1063                         *hash_state_out_be32 =
1064                                 rte_bswap32(*(((uint32_t *)digest)+i));
1065                 break;
1066         case ICP_QAT_HW_AUTH_ALGO_SHA384:
1067                 if (partial_hash_sha384(data_in, digest))
1068                         return -EFAULT;
1069                 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
1070                         *hash_state_out_be64 =
1071                                 rte_bswap64(*(((uint64_t *)digest)+i));
1072                 break;
1073         case ICP_QAT_HW_AUTH_ALGO_SHA512:
1074                 if (partial_hash_sha512(data_in, digest))
1075                         return -EFAULT;
1076                 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
1077                         *hash_state_out_be64 =
1078                                 rte_bswap64(*(((uint64_t *)digest)+i));
1079                 break;
1080         case ICP_QAT_HW_AUTH_ALGO_MD5:
1081                 if (partial_hash_md5(data_in, data_out))
1082                         return -EFAULT;
1083                 break;
1084         default:
1085                 QAT_LOG(ERR, "invalid hash alg %u", hash_alg);
1086                 return -EFAULT;
1087         }
1088
1089         return 0;
1090 }
1091 #define HMAC_IPAD_VALUE 0x36
1092 #define HMAC_OPAD_VALUE 0x5c
1093 #define HASH_XCBC_PRECOMP_KEY_NUM 3
1094
1095 static const uint8_t AES_CMAC_SEED[ICP_QAT_HW_AES_128_KEY_SZ];
1096
1097 static void aes_cmac_key_derive(uint8_t *base, uint8_t *derived)
1098 {
1099         int i;
1100
1101         derived[0] = base[0] << 1;
1102         for (i = 1; i < ICP_QAT_HW_AES_BLK_SZ ; i++) {
1103                 derived[i] = base[i] << 1;
1104                 derived[i - 1] |= base[i] >> 7;
1105         }
1106
1107         if (base[0] & 0x80)
1108                 derived[ICP_QAT_HW_AES_BLK_SZ - 1] ^= QAT_AES_CMAC_CONST_RB;
1109 }
1110
1111 static int qat_sym_do_precomputes(enum icp_qat_hw_auth_algo hash_alg,
1112                                 const uint8_t *auth_key,
1113                                 uint16_t auth_keylen,
1114                                 uint8_t *p_state_buf,
1115                                 uint16_t *p_state_len,
1116                                 uint8_t aes_cmac)
1117 {
1118         int block_size;
1119         uint8_t ipad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1120         uint8_t opad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1121         int i;
1122
1123         if (hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC) {
1124
1125                 /* CMAC */
1126                 if (aes_cmac) {
1127                         AES_KEY enc_key;
1128                         uint8_t *in = NULL;
1129                         uint8_t k0[ICP_QAT_HW_AES_128_KEY_SZ];
1130                         uint8_t *k1, *k2;
1131
1132                         auth_keylen = ICP_QAT_HW_AES_128_KEY_SZ;
1133
1134                         in = rte_zmalloc("AES CMAC K1",
1135                                          ICP_QAT_HW_AES_128_KEY_SZ, 16);
1136
1137                         if (in == NULL) {
1138                                 QAT_LOG(ERR, "Failed to alloc memory");
1139                                 return -ENOMEM;
1140                         }
1141
1142                         rte_memcpy(in, AES_CMAC_SEED,
1143                                    ICP_QAT_HW_AES_128_KEY_SZ);
1144                         rte_memcpy(p_state_buf, auth_key, auth_keylen);
1145
1146                         if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1147                                 &enc_key) != 0) {
1148                                 rte_free(in);
1149                                 return -EFAULT;
1150                         }
1151
1152                         AES_encrypt(in, k0, &enc_key);
1153
1154                         k1 = p_state_buf + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1155                         k2 = k1 + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1156
1157                         aes_cmac_key_derive(k0, k1);
1158                         aes_cmac_key_derive(k1, k2);
1159
1160                         memset(k0, 0, ICP_QAT_HW_AES_128_KEY_SZ);
1161                         *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1162                         rte_free(in);
1163                         return 0;
1164                 } else {
1165                         static uint8_t qat_aes_xcbc_key_seed[
1166                                         ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ] = {
1167                                 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1168                                 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1169                                 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1170                                 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1171                                 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1172                                 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1173                         };
1174
1175                         uint8_t *in = NULL;
1176                         uint8_t *out = p_state_buf;
1177                         int x;
1178                         AES_KEY enc_key;
1179
1180                         in = rte_zmalloc("working mem for key",
1181                                         ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ, 16);
1182                         if (in == NULL) {
1183                                 QAT_LOG(ERR, "Failed to alloc memory");
1184                                 return -ENOMEM;
1185                         }
1186
1187                         rte_memcpy(in, qat_aes_xcbc_key_seed,
1188                                         ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1189                         for (x = 0; x < HASH_XCBC_PRECOMP_KEY_NUM; x++) {
1190                                 if (AES_set_encrypt_key(auth_key,
1191                                                         auth_keylen << 3,
1192                                                         &enc_key) != 0) {
1193                                         rte_free(in -
1194                                           (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ));
1195                                         memset(out -
1196                                            (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ),
1197                                           0, ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1198                                         return -EFAULT;
1199                                 }
1200                                 AES_encrypt(in, out, &enc_key);
1201                                 in += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1202                                 out += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1203                         }
1204                         *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1205                         rte_free(in - x*ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ);
1206                         return 0;
1207                 }
1208
1209         } else if ((hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_128) ||
1210                 (hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_64)) {
1211                 uint8_t *in = NULL;
1212                 uint8_t *out = p_state_buf;
1213                 AES_KEY enc_key;
1214
1215                 memset(p_state_buf, 0, ICP_QAT_HW_GALOIS_H_SZ +
1216                                 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1217                                 ICP_QAT_HW_GALOIS_E_CTR0_SZ);
1218                 in = rte_zmalloc("working mem for key",
1219                                 ICP_QAT_HW_GALOIS_H_SZ, 16);
1220                 if (in == NULL) {
1221                         QAT_LOG(ERR, "Failed to alloc memory");
1222                         return -ENOMEM;
1223                 }
1224
1225                 memset(in, 0, ICP_QAT_HW_GALOIS_H_SZ);
1226                 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1227                         &enc_key) != 0) {
1228                         return -EFAULT;
1229                 }
1230                 AES_encrypt(in, out, &enc_key);
1231                 *p_state_len = ICP_QAT_HW_GALOIS_H_SZ +
1232                                 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1233                                 ICP_QAT_HW_GALOIS_E_CTR0_SZ;
1234                 rte_free(in);
1235                 return 0;
1236         }
1237
1238         block_size = qat_hash_get_block_size(hash_alg);
1239         if (block_size < 0)
1240                 return block_size;
1241         /* init ipad and opad from key and xor with fixed values */
1242         memset(ipad, 0, block_size);
1243         memset(opad, 0, block_size);
1244
1245         if (auth_keylen > (unsigned int)block_size) {
1246                 QAT_LOG(ERR, "invalid keylen %u", auth_keylen);
1247                 return -EFAULT;
1248         }
1249         rte_memcpy(ipad, auth_key, auth_keylen);
1250         rte_memcpy(opad, auth_key, auth_keylen);
1251
1252         for (i = 0; i < block_size; i++) {
1253                 uint8_t *ipad_ptr = ipad + i;
1254                 uint8_t *opad_ptr = opad + i;
1255                 *ipad_ptr ^= HMAC_IPAD_VALUE;
1256                 *opad_ptr ^= HMAC_OPAD_VALUE;
1257         }
1258
1259         /* do partial hash of ipad and copy to state1 */
1260         if (partial_hash_compute(hash_alg, ipad, p_state_buf)) {
1261                 memset(ipad, 0, block_size);
1262                 memset(opad, 0, block_size);
1263                 QAT_LOG(ERR, "ipad precompute failed");
1264                 return -EFAULT;
1265         }
1266
1267         /*
1268          * State len is a multiple of 8, so may be larger than the digest.
1269          * Put the partial hash of opad state_len bytes after state1
1270          */
1271         *p_state_len = qat_hash_get_state1_size(hash_alg);
1272         if (partial_hash_compute(hash_alg, opad, p_state_buf + *p_state_len)) {
1273                 memset(ipad, 0, block_size);
1274                 memset(opad, 0, block_size);
1275                 QAT_LOG(ERR, "opad precompute failed");
1276                 return -EFAULT;
1277         }
1278
1279         /*  don't leave data lying around */
1280         memset(ipad, 0, block_size);
1281         memset(opad, 0, block_size);
1282         return 0;
1283 }
1284
1285 static void
1286 qat_sym_session_init_common_hdr(struct icp_qat_fw_comn_req_hdr *header,
1287                 enum qat_sym_proto_flag proto_flags)
1288 {
1289         header->hdr_flags =
1290                 ICP_QAT_FW_COMN_HDR_FLAGS_BUILD(ICP_QAT_FW_COMN_REQ_FLAG_SET);
1291         header->service_type = ICP_QAT_FW_COMN_REQ_CPM_FW_LA;
1292         header->comn_req_flags =
1293                 ICP_QAT_FW_COMN_FLAGS_BUILD(QAT_COMN_CD_FLD_TYPE_64BIT_ADR,
1294                                         QAT_COMN_PTR_TYPE_FLAT);
1295         ICP_QAT_FW_LA_PARTIAL_SET(header->serv_specif_flags,
1296                                   ICP_QAT_FW_LA_PARTIAL_NONE);
1297         ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(header->serv_specif_flags,
1298                                            ICP_QAT_FW_CIPH_IV_16BYTE_DATA);
1299
1300         switch (proto_flags)            {
1301         case QAT_CRYPTO_PROTO_FLAG_NONE:
1302                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1303                                         ICP_QAT_FW_LA_NO_PROTO);
1304                 break;
1305         case QAT_CRYPTO_PROTO_FLAG_CCM:
1306                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1307                                         ICP_QAT_FW_LA_CCM_PROTO);
1308                 break;
1309         case QAT_CRYPTO_PROTO_FLAG_GCM:
1310                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1311                                         ICP_QAT_FW_LA_GCM_PROTO);
1312                 break;
1313         case QAT_CRYPTO_PROTO_FLAG_SNOW3G:
1314                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1315                                         ICP_QAT_FW_LA_SNOW_3G_PROTO);
1316                 break;
1317         case QAT_CRYPTO_PROTO_FLAG_ZUC:
1318                 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(header->serv_specif_flags,
1319                         ICP_QAT_FW_LA_ZUC_3G_PROTO);
1320                 break;
1321         }
1322
1323         ICP_QAT_FW_LA_UPDATE_STATE_SET(header->serv_specif_flags,
1324                                            ICP_QAT_FW_LA_NO_UPDATE_STATE);
1325         ICP_QAT_FW_LA_DIGEST_IN_BUFFER_SET(header->serv_specif_flags,
1326                                         ICP_QAT_FW_LA_NO_DIGEST_IN_BUFFER);
1327 }
1328
1329 /*
1330  *      Snow3G and ZUC should never use this function
1331  *      and set its protocol flag in both cipher and auth part of content
1332  *      descriptor building function
1333  */
1334 static enum qat_sym_proto_flag
1335 qat_get_crypto_proto_flag(uint16_t flags)
1336 {
1337         int proto = ICP_QAT_FW_LA_PROTO_GET(flags);
1338         enum qat_sym_proto_flag qat_proto_flag =
1339                         QAT_CRYPTO_PROTO_FLAG_NONE;
1340
1341         switch (proto) {
1342         case ICP_QAT_FW_LA_GCM_PROTO:
1343                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1344                 break;
1345         case ICP_QAT_FW_LA_CCM_PROTO:
1346                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1347                 break;
1348         }
1349
1350         return qat_proto_flag;
1351 }
1352
1353 int qat_sym_session_aead_create_cd_cipher(struct qat_sym_session *cdesc,
1354                                                 const uint8_t *cipherkey,
1355                                                 uint32_t cipherkeylen)
1356 {
1357         struct icp_qat_hw_cipher_algo_blk *cipher;
1358         struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1359         struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1360         struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1361         void *ptr = &req_tmpl->cd_ctrl;
1362         struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1363         struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1364         enum icp_qat_hw_cipher_convert key_convert;
1365         enum qat_sym_proto_flag qat_proto_flag =
1366                 QAT_CRYPTO_PROTO_FLAG_NONE;
1367         uint32_t total_key_size;
1368         uint16_t cipher_offset, cd_size;
1369         uint32_t wordIndex  = 0;
1370         uint32_t *temp_key = NULL;
1371
1372         if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER) {
1373                 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1374                 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1375                                         ICP_QAT_FW_SLICE_CIPHER);
1376                 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1377                                         ICP_QAT_FW_SLICE_DRAM_WR);
1378                 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1379                                         ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1380                 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1381                                         ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1382                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1383         } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1384                 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1385                 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1386                                         ICP_QAT_FW_SLICE_CIPHER);
1387                 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1388                                         ICP_QAT_FW_SLICE_AUTH);
1389                 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1390                                         ICP_QAT_FW_SLICE_AUTH);
1391                 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1392                                         ICP_QAT_FW_SLICE_DRAM_WR);
1393                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1394         } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1395                 QAT_LOG(ERR, "Invalid param, must be a cipher command.");
1396                 return -EFAULT;
1397         }
1398
1399         if (cdesc->qat_mode == ICP_QAT_HW_CIPHER_CTR_MODE) {
1400                 /*
1401                  * CTR Streaming ciphers are a special case. Decrypt = encrypt
1402                  * Overriding default values previously set
1403                  */
1404                 cdesc->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
1405                 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1406         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2
1407                 || cdesc->qat_cipher_alg ==
1408                         ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)
1409                 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1410         else if (cdesc->qat_dir == ICP_QAT_HW_CIPHER_ENCRYPT)
1411                 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1412         else
1413                 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1414
1415         if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
1416                 total_key_size = ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ +
1417                         ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1418                 cipher_cd_ctrl->cipher_state_sz =
1419                         ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1420                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1421
1422         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1423                 total_key_size = ICP_QAT_HW_KASUMI_F8_KEY_SZ;
1424                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_KASUMI_BLK_SZ >> 3;
1425                 cipher_cd_ctrl->cipher_padding_sz =
1426                                         (2 * ICP_QAT_HW_KASUMI_BLK_SZ) >> 3;
1427         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES) {
1428                 total_key_size = ICP_QAT_HW_3DES_KEY_SZ;
1429                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_3DES_BLK_SZ >> 3;
1430                 qat_proto_flag =
1431                         qat_get_crypto_proto_flag(header->serv_specif_flags);
1432         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_DES) {
1433                 total_key_size = ICP_QAT_HW_DES_KEY_SZ;
1434                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_DES_BLK_SZ >> 3;
1435                 qat_proto_flag =
1436                         qat_get_crypto_proto_flag(header->serv_specif_flags);
1437         } else if (cdesc->qat_cipher_alg ==
1438                 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
1439                 total_key_size = ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ +
1440                         ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1441                 cipher_cd_ctrl->cipher_state_sz =
1442                         ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1443                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1444                 cdesc->min_qat_dev_gen = QAT_GEN2;
1445         } else {
1446                 total_key_size = cipherkeylen;
1447                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_AES_BLK_SZ >> 3;
1448                 qat_proto_flag =
1449                         qat_get_crypto_proto_flag(header->serv_specif_flags);
1450         }
1451         cipher_cd_ctrl->cipher_key_sz = total_key_size >> 3;
1452         cipher_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1453         cipher_cd_ctrl->cipher_cfg_offset = cipher_offset >> 3;
1454
1455         header->service_cmd_id = cdesc->qat_cmd;
1456         qat_sym_session_init_common_hdr(header, qat_proto_flag);
1457
1458         cipher = (struct icp_qat_hw_cipher_algo_blk *)cdesc->cd_cur_ptr;
1459         cipher->cipher_config.val =
1460             ICP_QAT_HW_CIPHER_CONFIG_BUILD(cdesc->qat_mode,
1461                                         cdesc->qat_cipher_alg, key_convert,
1462                                         cdesc->qat_dir);
1463
1464         if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1465                 temp_key = (uint32_t *)(cdesc->cd_cur_ptr +
1466                                         sizeof(struct icp_qat_hw_cipher_config)
1467                                         + cipherkeylen);
1468                 memcpy(cipher->key, cipherkey, cipherkeylen);
1469                 memcpy(temp_key, cipherkey, cipherkeylen);
1470
1471                 /* XOR Key with KASUMI F8 key modifier at 4 bytes level */
1472                 for (wordIndex = 0; wordIndex < (cipherkeylen >> 2);
1473                                                                 wordIndex++)
1474                         temp_key[wordIndex] ^= KASUMI_F8_KEY_MODIFIER_4_BYTES;
1475
1476                 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1477                                         cipherkeylen + cipherkeylen;
1478         } else {
1479                 memcpy(cipher->key, cipherkey, cipherkeylen);
1480                 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1481                                         cipherkeylen;
1482         }
1483
1484         if (total_key_size > cipherkeylen) {
1485                 uint32_t padding_size =  total_key_size-cipherkeylen;
1486                 if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1487                         && (cipherkeylen == QAT_3DES_KEY_SZ_OPT2)) {
1488                         /* K3 not provided so use K1 = K3*/
1489                         memcpy(cdesc->cd_cur_ptr, cipherkey, padding_size);
1490                 } else if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1491                         && (cipherkeylen == QAT_3DES_KEY_SZ_OPT3)) {
1492                         /* K2 and K3 not provided so use K1 = K2 = K3*/
1493                         memcpy(cdesc->cd_cur_ptr, cipherkey,
1494                                 cipherkeylen);
1495                         memcpy(cdesc->cd_cur_ptr+cipherkeylen,
1496                                 cipherkey, cipherkeylen);
1497                 } else
1498                         memset(cdesc->cd_cur_ptr, 0, padding_size);
1499
1500                 cdesc->cd_cur_ptr += padding_size;
1501         }
1502         cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1503         cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1504
1505         return 0;
1506 }
1507
1508 int qat_sym_session_aead_create_cd_auth(struct qat_sym_session *cdesc,
1509                                                 const uint8_t *authkey,
1510                                                 uint32_t authkeylen,
1511                                                 uint32_t aad_length,
1512                                                 uint32_t digestsize,
1513                                                 unsigned int operation)
1514 {
1515         struct icp_qat_hw_auth_setup *hash;
1516         struct icp_qat_hw_cipher_algo_blk *cipherconfig;
1517         struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1518         struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1519         struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1520         void *ptr = &req_tmpl->cd_ctrl;
1521         struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1522         struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1523         struct icp_qat_fw_la_auth_req_params *auth_param =
1524                 (struct icp_qat_fw_la_auth_req_params *)
1525                 ((char *)&req_tmpl->serv_specif_rqpars +
1526                 ICP_QAT_FW_HASH_REQUEST_PARAMETERS_OFFSET);
1527         uint16_t state1_size = 0, state2_size = 0;
1528         uint16_t hash_offset, cd_size;
1529         uint32_t *aad_len = NULL;
1530         uint32_t wordIndex  = 0;
1531         uint32_t *pTempKey;
1532         enum qat_sym_proto_flag qat_proto_flag =
1533                 QAT_CRYPTO_PROTO_FLAG_NONE;
1534
1535         if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_AUTH) {
1536                 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1537                                         ICP_QAT_FW_SLICE_AUTH);
1538                 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1539                                         ICP_QAT_FW_SLICE_DRAM_WR);
1540                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1541         } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1542                 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1543                                 ICP_QAT_FW_SLICE_AUTH);
1544                 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1545                                 ICP_QAT_FW_SLICE_CIPHER);
1546                 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1547                                 ICP_QAT_FW_SLICE_CIPHER);
1548                 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1549                                 ICP_QAT_FW_SLICE_DRAM_WR);
1550                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1551         } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1552                 QAT_LOG(ERR, "Invalid param, must be a hash command.");
1553                 return -EFAULT;
1554         }
1555
1556         if (operation == RTE_CRYPTO_AUTH_OP_VERIFY) {
1557                 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1558                                 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1559                 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1560                                 ICP_QAT_FW_LA_CMP_AUTH_RES);
1561                 cdesc->auth_op = ICP_QAT_HW_AUTH_VERIFY;
1562         } else {
1563                 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1564                                            ICP_QAT_FW_LA_RET_AUTH_RES);
1565                 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1566                                            ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1567                 cdesc->auth_op = ICP_QAT_HW_AUTH_GENERATE;
1568         }
1569
1570         /*
1571          * Setup the inner hash config
1572          */
1573         hash_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1574         hash = (struct icp_qat_hw_auth_setup *)cdesc->cd_cur_ptr;
1575         hash->auth_config.reserved = 0;
1576         hash->auth_config.config =
1577                         ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE1,
1578                                 cdesc->qat_hash_alg, digestsize);
1579
1580         if (cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2
1581                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_KASUMI_F9
1582                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3
1583                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC
1584                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC
1585                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_NULL
1586                         )
1587                 hash->auth_counter.counter = 0;
1588         else {
1589                 int block_size = qat_hash_get_block_size(cdesc->qat_hash_alg);
1590
1591                 if (block_size < 0)
1592                         return block_size;
1593                 hash->auth_counter.counter = rte_bswap32(block_size);
1594         }
1595
1596         cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_auth_setup);
1597
1598         /*
1599          * cd_cur_ptr now points at the state1 information.
1600          */
1601         switch (cdesc->qat_hash_alg) {
1602         case ICP_QAT_HW_AUTH_ALGO_SHA1:
1603                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA1, authkey,
1604                         authkeylen, cdesc->cd_cur_ptr, &state1_size,
1605                         cdesc->aes_cmac)) {
1606                         QAT_LOG(ERR, "(SHA)precompute failed");
1607                         return -EFAULT;
1608                 }
1609                 state2_size = RTE_ALIGN_CEIL(ICP_QAT_HW_SHA1_STATE2_SZ, 8);
1610                 break;
1611         case ICP_QAT_HW_AUTH_ALGO_SHA224:
1612                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA224, authkey,
1613                         authkeylen, cdesc->cd_cur_ptr, &state1_size,
1614                         cdesc->aes_cmac)) {
1615                         QAT_LOG(ERR, "(SHA)precompute failed");
1616                         return -EFAULT;
1617                 }
1618                 state2_size = ICP_QAT_HW_SHA224_STATE2_SZ;
1619                 break;
1620         case ICP_QAT_HW_AUTH_ALGO_SHA256:
1621                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA256, authkey,
1622                         authkeylen, cdesc->cd_cur_ptr,  &state1_size,
1623                         cdesc->aes_cmac)) {
1624                         QAT_LOG(ERR, "(SHA)precompute failed");
1625                         return -EFAULT;
1626                 }
1627                 state2_size = ICP_QAT_HW_SHA256_STATE2_SZ;
1628                 break;
1629         case ICP_QAT_HW_AUTH_ALGO_SHA384:
1630                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA384, authkey,
1631                         authkeylen, cdesc->cd_cur_ptr, &state1_size,
1632                         cdesc->aes_cmac)) {
1633                         QAT_LOG(ERR, "(SHA)precompute failed");
1634                         return -EFAULT;
1635                 }
1636                 state2_size = ICP_QAT_HW_SHA384_STATE2_SZ;
1637                 break;
1638         case ICP_QAT_HW_AUTH_ALGO_SHA512:
1639                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA512, authkey,
1640                         authkeylen, cdesc->cd_cur_ptr,  &state1_size,
1641                         cdesc->aes_cmac)) {
1642                         QAT_LOG(ERR, "(SHA)precompute failed");
1643                         return -EFAULT;
1644                 }
1645                 state2_size = ICP_QAT_HW_SHA512_STATE2_SZ;
1646                 break;
1647         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1648                 state1_size = ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1649
1650                 if (cdesc->aes_cmac)
1651                         memset(cdesc->cd_cur_ptr, 0, state1_size);
1652                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC,
1653                         authkey, authkeylen, cdesc->cd_cur_ptr + state1_size,
1654                         &state2_size, cdesc->aes_cmac)) {
1655                         cdesc->aes_cmac ? QAT_LOG(ERR,
1656                                                   "(CMAC)precompute failed")
1657                                         : QAT_LOG(ERR,
1658                                                   "(XCBC)precompute failed");
1659                         return -EFAULT;
1660                 }
1661                 break;
1662         case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1663         case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
1664                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1665                 state1_size = ICP_QAT_HW_GALOIS_128_STATE1_SZ;
1666                 if (qat_sym_do_precomputes(cdesc->qat_hash_alg, authkey,
1667                         authkeylen, cdesc->cd_cur_ptr + state1_size,
1668                         &state2_size, cdesc->aes_cmac)) {
1669                         QAT_LOG(ERR, "(GCM)precompute failed");
1670                         return -EFAULT;
1671                 }
1672                 /*
1673                  * Write (the length of AAD) into bytes 16-19 of state2
1674                  * in big-endian format. This field is 8 bytes
1675                  */
1676                 auth_param->u2.aad_sz =
1677                                 RTE_ALIGN_CEIL(aad_length, 16);
1678                 auth_param->hash_state_sz = (auth_param->u2.aad_sz) >> 3;
1679
1680                 aad_len = (uint32_t *)(cdesc->cd_cur_ptr +
1681                                         ICP_QAT_HW_GALOIS_128_STATE1_SZ +
1682                                         ICP_QAT_HW_GALOIS_H_SZ);
1683                 *aad_len = rte_bswap32(aad_length);
1684                 cdesc->aad_len = aad_length;
1685                 break;
1686         case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
1687                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1688                 state1_size = qat_hash_get_state1_size(
1689                                 ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2);
1690                 state2_size = ICP_QAT_HW_SNOW_3G_UIA2_STATE2_SZ;
1691                 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1692
1693                 cipherconfig = (struct icp_qat_hw_cipher_algo_blk *)
1694                                 (cdesc->cd_cur_ptr + state1_size + state2_size);
1695                 cipherconfig->cipher_config.val =
1696                 ICP_QAT_HW_CIPHER_CONFIG_BUILD(ICP_QAT_HW_CIPHER_ECB_MODE,
1697                         ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2,
1698                         ICP_QAT_HW_CIPHER_KEY_CONVERT,
1699                         ICP_QAT_HW_CIPHER_ENCRYPT);
1700                 memcpy(cipherconfig->key, authkey, authkeylen);
1701                 memset(cipherconfig->key + authkeylen,
1702                                 0, ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ);
1703                 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1704                                 authkeylen + ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1705                 auth_param->hash_state_sz = ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1706                 break;
1707         case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
1708                 hash->auth_config.config =
1709                         ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE0,
1710                                 cdesc->qat_hash_alg, digestsize);
1711                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1712                 state1_size = qat_hash_get_state1_size(
1713                                 ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3);
1714                 state2_size = ICP_QAT_HW_ZUC_3G_EIA3_STATE2_SZ;
1715                 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size
1716                         + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ);
1717
1718                 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1719                 cdesc->cd_cur_ptr += state1_size + state2_size
1720                         + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1721                 auth_param->hash_state_sz = ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1722                 cdesc->min_qat_dev_gen = QAT_GEN2;
1723
1724                 break;
1725         case ICP_QAT_HW_AUTH_ALGO_MD5:
1726                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_MD5, authkey,
1727                         authkeylen, cdesc->cd_cur_ptr, &state1_size,
1728                         cdesc->aes_cmac)) {
1729                         QAT_LOG(ERR, "(MD5)precompute failed");
1730                         return -EFAULT;
1731                 }
1732                 state2_size = ICP_QAT_HW_MD5_STATE2_SZ;
1733                 break;
1734         case ICP_QAT_HW_AUTH_ALGO_NULL:
1735                 state1_size = qat_hash_get_state1_size(
1736                                 ICP_QAT_HW_AUTH_ALGO_NULL);
1737                 state2_size = ICP_QAT_HW_NULL_STATE2_SZ;
1738                 break;
1739         case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
1740                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1741                 state1_size = qat_hash_get_state1_size(
1742                                 ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC);
1743                 state2_size = ICP_QAT_HW_AES_CBC_MAC_KEY_SZ +
1744                                 ICP_QAT_HW_AES_CCM_CBC_E_CTR0_SZ;
1745
1746                 if (aad_length > 0) {
1747                         aad_length += ICP_QAT_HW_CCM_AAD_B0_LEN +
1748                         ICP_QAT_HW_CCM_AAD_LEN_INFO;
1749                         auth_param->u2.aad_sz =
1750                         RTE_ALIGN_CEIL(aad_length,
1751                         ICP_QAT_HW_CCM_AAD_ALIGNMENT);
1752                 } else {
1753                         auth_param->u2.aad_sz = ICP_QAT_HW_CCM_AAD_B0_LEN;
1754                 }
1755                 cdesc->aad_len = aad_length;
1756                 hash->auth_counter.counter = 0;
1757
1758                 hash_cd_ctrl->outer_prefix_sz = digestsize;
1759                 auth_param->hash_state_sz = digestsize;
1760
1761                 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1762                 break;
1763         case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
1764                 state1_size = qat_hash_get_state1_size(
1765                                 ICP_QAT_HW_AUTH_ALGO_KASUMI_F9);
1766                 state2_size = ICP_QAT_HW_KASUMI_F9_STATE2_SZ;
1767                 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1768                 pTempKey = (uint32_t *)(cdesc->cd_cur_ptr + state1_size
1769                                                         + authkeylen);
1770                 /*
1771                 * The Inner Hash Initial State2 block must contain IK
1772                 * (Initialisation Key), followed by IK XOR-ed with KM
1773                 * (Key Modifier): IK||(IK^KM).
1774                 */
1775                 /* write the auth key */
1776                 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1777                 /* initialise temp key with auth key */
1778                 memcpy(pTempKey, authkey, authkeylen);
1779                 /* XOR Key with KASUMI F9 key modifier at 4 bytes level */
1780                 for (wordIndex = 0; wordIndex < (authkeylen >> 2); wordIndex++)
1781                         pTempKey[wordIndex] ^= KASUMI_F9_KEY_MODIFIER_4_BYTES;
1782                 break;
1783         default:
1784                 QAT_LOG(ERR, "Invalid HASH alg %u", cdesc->qat_hash_alg);
1785                 return -EFAULT;
1786         }
1787
1788         /* Request template setup */
1789         qat_sym_session_init_common_hdr(header, qat_proto_flag);
1790         header->service_cmd_id = cdesc->qat_cmd;
1791
1792         /* Auth CD config setup */
1793         hash_cd_ctrl->hash_cfg_offset = hash_offset >> 3;
1794         hash_cd_ctrl->hash_flags = ICP_QAT_FW_AUTH_HDR_FLAG_NO_NESTED;
1795         hash_cd_ctrl->inner_res_sz = digestsize;
1796         hash_cd_ctrl->final_sz = digestsize;
1797         hash_cd_ctrl->inner_state1_sz = state1_size;
1798         auth_param->auth_res_sz = digestsize;
1799
1800         hash_cd_ctrl->inner_state2_sz  = state2_size;
1801         hash_cd_ctrl->inner_state2_offset = hash_cd_ctrl->hash_cfg_offset +
1802                         ((sizeof(struct icp_qat_hw_auth_setup) +
1803                          RTE_ALIGN_CEIL(hash_cd_ctrl->inner_state1_sz, 8))
1804                                         >> 3);
1805
1806         cdesc->cd_cur_ptr += state1_size + state2_size;
1807         cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1808
1809         cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1810         cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1811
1812         return 0;
1813 }
1814
1815 int qat_sym_validate_aes_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1816 {
1817         switch (key_len) {
1818         case ICP_QAT_HW_AES_128_KEY_SZ:
1819                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1820                 break;
1821         case ICP_QAT_HW_AES_192_KEY_SZ:
1822                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES192;
1823                 break;
1824         case ICP_QAT_HW_AES_256_KEY_SZ:
1825                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
1826                 break;
1827         default:
1828                 return -EINVAL;
1829         }
1830         return 0;
1831 }
1832
1833 int qat_sym_validate_aes_docsisbpi_key(int key_len,
1834                 enum icp_qat_hw_cipher_algo *alg)
1835 {
1836         switch (key_len) {
1837         case ICP_QAT_HW_AES_128_KEY_SZ:
1838                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1839                 break;
1840         default:
1841                 return -EINVAL;
1842         }
1843         return 0;
1844 }
1845
1846 int qat_sym_validate_snow3g_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1847 {
1848         switch (key_len) {
1849         case ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ:
1850                 *alg = ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2;
1851                 break;
1852         default:
1853                 return -EINVAL;
1854         }
1855         return 0;
1856 }
1857
1858 int qat_sym_validate_kasumi_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1859 {
1860         switch (key_len) {
1861         case ICP_QAT_HW_KASUMI_KEY_SZ:
1862                 *alg = ICP_QAT_HW_CIPHER_ALGO_KASUMI;
1863                 break;
1864         default:
1865                 return -EINVAL;
1866         }
1867         return 0;
1868 }
1869
1870 int qat_sym_validate_des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1871 {
1872         switch (key_len) {
1873         case ICP_QAT_HW_DES_KEY_SZ:
1874                 *alg = ICP_QAT_HW_CIPHER_ALGO_DES;
1875                 break;
1876         default:
1877                 return -EINVAL;
1878         }
1879         return 0;
1880 }
1881
1882 int qat_sym_validate_3des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1883 {
1884         switch (key_len) {
1885         case QAT_3DES_KEY_SZ_OPT1:
1886         case QAT_3DES_KEY_SZ_OPT2:
1887         case QAT_3DES_KEY_SZ_OPT3:
1888                 *alg = ICP_QAT_HW_CIPHER_ALGO_3DES;
1889                 break;
1890         default:
1891                 return -EINVAL;
1892         }
1893         return 0;
1894 }
1895
1896 int qat_sym_validate_zuc_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1897 {
1898         switch (key_len) {
1899         case ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ:
1900                 *alg = ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3;
1901                 break;
1902         default:
1903                 return -EINVAL;
1904         }
1905         return 0;
1906 }