e5167b3fae569aedefe94397f74d4a065dfacefc
[dpdk.git] / drivers / crypto / qat / qat_sym_session.c
1 /* SPDX-License-Identifier: (BSD-3-Clause OR GPL-2.0)
2  * Copyright(c) 2015-2019 Intel Corporation
3  */
4
5 #include <openssl/sha.h>        /* Needed to calculate pre-compute values */
6 #include <openssl/aes.h>        /* Needed to calculate pre-compute values */
7 #include <openssl/md5.h>        /* Needed to calculate pre-compute values */
8 #include <openssl/evp.h>        /* Needed for bpi runt block processing */
9
10 #include <rte_memcpy.h>
11 #include <rte_common.h>
12 #include <rte_spinlock.h>
13 #include <rte_byteorder.h>
14 #include <rte_log.h>
15 #include <rte_malloc.h>
16 #include <rte_crypto_sym.h>
17
18 #include "qat_logs.h"
19 #include "qat_sym_session.h"
20 #include "qat_sym_pmd.h"
21
22 /** Frees a context previously created
23  *  Depends on openssl libcrypto
24  */
25 static void
26 bpi_cipher_ctx_free(void *bpi_ctx)
27 {
28         if (bpi_ctx != NULL)
29                 EVP_CIPHER_CTX_free((EVP_CIPHER_CTX *)bpi_ctx);
30 }
31
32 /** Creates a context in either AES or DES in ECB mode
33  *  Depends on openssl libcrypto
34  */
35 static int
36 bpi_cipher_ctx_init(enum rte_crypto_cipher_algorithm cryptodev_algo,
37                 enum rte_crypto_cipher_operation direction __rte_unused,
38                 const uint8_t *key, void **ctx)
39 {
40         const EVP_CIPHER *algo = NULL;
41         int ret;
42         *ctx = EVP_CIPHER_CTX_new();
43
44         if (*ctx == NULL) {
45                 ret = -ENOMEM;
46                 goto ctx_init_err;
47         }
48
49         if (cryptodev_algo == RTE_CRYPTO_CIPHER_DES_DOCSISBPI)
50                 algo = EVP_des_ecb();
51         else
52                 algo = EVP_aes_128_ecb();
53
54         /* IV will be ECB encrypted whether direction is encrypt or decrypt*/
55         if (EVP_EncryptInit_ex(*ctx, algo, NULL, key, 0) != 1) {
56                 ret = -EINVAL;
57                 goto ctx_init_err;
58         }
59
60         return 0;
61
62 ctx_init_err:
63         if (*ctx != NULL)
64                 EVP_CIPHER_CTX_free(*ctx);
65         return ret;
66 }
67
68 static int
69 qat_is_cipher_alg_supported(enum rte_crypto_cipher_algorithm algo,
70                 struct qat_sym_dev_private *internals)
71 {
72         int i = 0;
73         const struct rte_cryptodev_capabilities *capability;
74
75         while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
76                         RTE_CRYPTO_OP_TYPE_UNDEFINED) {
77                 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
78                         continue;
79
80                 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_CIPHER)
81                         continue;
82
83                 if (capability->sym.cipher.algo == algo)
84                         return 1;
85         }
86         return 0;
87 }
88
89 static int
90 qat_is_auth_alg_supported(enum rte_crypto_auth_algorithm algo,
91                 struct qat_sym_dev_private *internals)
92 {
93         int i = 0;
94         const struct rte_cryptodev_capabilities *capability;
95
96         while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
97                         RTE_CRYPTO_OP_TYPE_UNDEFINED) {
98                 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
99                         continue;
100
101                 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_AUTH)
102                         continue;
103
104                 if (capability->sym.auth.algo == algo)
105                         return 1;
106         }
107         return 0;
108 }
109
110 void
111 qat_sym_session_clear(struct rte_cryptodev *dev,
112                 struct rte_cryptodev_sym_session *sess)
113 {
114         uint8_t index = dev->driver_id;
115         void *sess_priv = get_sym_session_private_data(sess, index);
116         struct qat_sym_session *s = (struct qat_sym_session *)sess_priv;
117
118         if (sess_priv) {
119                 if (s->bpi_ctx)
120                         bpi_cipher_ctx_free(s->bpi_ctx);
121                 memset(s, 0, qat_sym_session_get_private_size(dev));
122                 struct rte_mempool *sess_mp = rte_mempool_from_obj(sess_priv);
123
124                 set_sym_session_private_data(sess, index, NULL);
125                 rte_mempool_put(sess_mp, sess_priv);
126         }
127 }
128
129 static int
130 qat_get_cmd_id(const struct rte_crypto_sym_xform *xform)
131 {
132         /* Cipher Only */
133         if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER && xform->next == NULL)
134                 return ICP_QAT_FW_LA_CMD_CIPHER;
135
136         /* Authentication Only */
137         if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH && xform->next == NULL)
138                 return ICP_QAT_FW_LA_CMD_AUTH;
139
140         /* AEAD */
141         if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
142                 /* AES-GCM and AES-CCM works with different direction
143                  * GCM first encrypts and generate hash where AES-CCM
144                  * first generate hash and encrypts. Similar relation
145                  * applies to decryption.
146                  */
147                 if (xform->aead.op == RTE_CRYPTO_AEAD_OP_ENCRYPT)
148                         if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
149                                 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
150                         else
151                                 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
152                 else
153                         if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
154                                 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
155                         else
156                                 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
157         }
158
159         if (xform->next == NULL)
160                 return -1;
161
162         /* Cipher then Authenticate */
163         if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
164                         xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
165                 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
166
167         /* Authenticate then Cipher */
168         if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH &&
169                         xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
170                 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
171
172         return -1;
173 }
174
175 static struct rte_crypto_auth_xform *
176 qat_get_auth_xform(struct rte_crypto_sym_xform *xform)
177 {
178         do {
179                 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH)
180                         return &xform->auth;
181
182                 xform = xform->next;
183         } while (xform);
184
185         return NULL;
186 }
187
188 static struct rte_crypto_cipher_xform *
189 qat_get_cipher_xform(struct rte_crypto_sym_xform *xform)
190 {
191         do {
192                 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
193                         return &xform->cipher;
194
195                 xform = xform->next;
196         } while (xform);
197
198         return NULL;
199 }
200
201 int
202 qat_sym_session_configure_cipher(struct rte_cryptodev *dev,
203                 struct rte_crypto_sym_xform *xform,
204                 struct qat_sym_session *session)
205 {
206         struct qat_sym_dev_private *internals = dev->data->dev_private;
207         struct rte_crypto_cipher_xform *cipher_xform = NULL;
208         int ret;
209
210         /* Get cipher xform from crypto xform chain */
211         cipher_xform = qat_get_cipher_xform(xform);
212
213         session->cipher_iv.offset = cipher_xform->iv.offset;
214         session->cipher_iv.length = cipher_xform->iv.length;
215
216         switch (cipher_xform->algo) {
217         case RTE_CRYPTO_CIPHER_AES_CBC:
218                 if (qat_sym_validate_aes_key(cipher_xform->key.length,
219                                 &session->qat_cipher_alg) != 0) {
220                         QAT_LOG(ERR, "Invalid AES cipher key size");
221                         ret = -EINVAL;
222                         goto error_out;
223                 }
224                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
225                 break;
226         case RTE_CRYPTO_CIPHER_AES_CTR:
227                 if (qat_sym_validate_aes_key(cipher_xform->key.length,
228                                 &session->qat_cipher_alg) != 0) {
229                         QAT_LOG(ERR, "Invalid AES cipher key size");
230                         ret = -EINVAL;
231                         goto error_out;
232                 }
233                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
234                 break;
235         case RTE_CRYPTO_CIPHER_SNOW3G_UEA2:
236                 if (qat_sym_validate_snow3g_key(cipher_xform->key.length,
237                                         &session->qat_cipher_alg) != 0) {
238                         QAT_LOG(ERR, "Invalid SNOW 3G cipher key size");
239                         ret = -EINVAL;
240                         goto error_out;
241                 }
242                 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
243                 break;
244         case RTE_CRYPTO_CIPHER_NULL:
245                 session->qat_cipher_alg = ICP_QAT_HW_CIPHER_ALGO_NULL;
246                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
247                 break;
248         case RTE_CRYPTO_CIPHER_KASUMI_F8:
249                 if (qat_sym_validate_kasumi_key(cipher_xform->key.length,
250                                         &session->qat_cipher_alg) != 0) {
251                         QAT_LOG(ERR, "Invalid KASUMI cipher key size");
252                         ret = -EINVAL;
253                         goto error_out;
254                 }
255                 session->qat_mode = ICP_QAT_HW_CIPHER_F8_MODE;
256                 break;
257         case RTE_CRYPTO_CIPHER_3DES_CBC:
258                 if (qat_sym_validate_3des_key(cipher_xform->key.length,
259                                 &session->qat_cipher_alg) != 0) {
260                         QAT_LOG(ERR, "Invalid 3DES cipher key size");
261                         ret = -EINVAL;
262                         goto error_out;
263                 }
264                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
265                 break;
266         case RTE_CRYPTO_CIPHER_DES_CBC:
267                 if (qat_sym_validate_des_key(cipher_xform->key.length,
268                                 &session->qat_cipher_alg) != 0) {
269                         QAT_LOG(ERR, "Invalid DES cipher key size");
270                         ret = -EINVAL;
271                         goto error_out;
272                 }
273                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
274                 break;
275         case RTE_CRYPTO_CIPHER_3DES_CTR:
276                 if (qat_sym_validate_3des_key(cipher_xform->key.length,
277                                 &session->qat_cipher_alg) != 0) {
278                         QAT_LOG(ERR, "Invalid 3DES cipher key size");
279                         ret = -EINVAL;
280                         goto error_out;
281                 }
282                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
283                 break;
284         case RTE_CRYPTO_CIPHER_DES_DOCSISBPI:
285                 ret = bpi_cipher_ctx_init(
286                                         cipher_xform->algo,
287                                         cipher_xform->op,
288                                         cipher_xform->key.data,
289                                         &session->bpi_ctx);
290                 if (ret != 0) {
291                         QAT_LOG(ERR, "failed to create DES BPI ctx");
292                         goto error_out;
293                 }
294                 if (qat_sym_validate_des_key(cipher_xform->key.length,
295                                 &session->qat_cipher_alg) != 0) {
296                         QAT_LOG(ERR, "Invalid DES cipher key size");
297                         ret = -EINVAL;
298                         goto error_out;
299                 }
300                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
301                 break;
302         case RTE_CRYPTO_CIPHER_AES_DOCSISBPI:
303                 ret = bpi_cipher_ctx_init(
304                                         cipher_xform->algo,
305                                         cipher_xform->op,
306                                         cipher_xform->key.data,
307                                         &session->bpi_ctx);
308                 if (ret != 0) {
309                         QAT_LOG(ERR, "failed to create AES BPI ctx");
310                         goto error_out;
311                 }
312                 if (qat_sym_validate_aes_docsisbpi_key(cipher_xform->key.length,
313                                 &session->qat_cipher_alg) != 0) {
314                         QAT_LOG(ERR, "Invalid AES DOCSISBPI key size");
315                         ret = -EINVAL;
316                         goto error_out;
317                 }
318                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
319                 break;
320         case RTE_CRYPTO_CIPHER_ZUC_EEA3:
321                 if (!qat_is_cipher_alg_supported(
322                         cipher_xform->algo, internals)) {
323                         QAT_LOG(ERR, "%s not supported on this device",
324                                 rte_crypto_cipher_algorithm_strings
325                                         [cipher_xform->algo]);
326                         ret = -ENOTSUP;
327                         goto error_out;
328                 }
329                 if (qat_sym_validate_zuc_key(cipher_xform->key.length,
330                                 &session->qat_cipher_alg) != 0) {
331                         QAT_LOG(ERR, "Invalid ZUC cipher key size");
332                         ret = -EINVAL;
333                         goto error_out;
334                 }
335                 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
336                 break;
337         case RTE_CRYPTO_CIPHER_AES_XTS:
338                 if ((cipher_xform->key.length/2) == ICP_QAT_HW_AES_192_KEY_SZ) {
339                         QAT_LOG(ERR, "AES-XTS-192 not supported");
340                         ret = -EINVAL;
341                         goto error_out;
342                 }
343                 if (qat_sym_validate_aes_key((cipher_xform->key.length/2),
344                                 &session->qat_cipher_alg) != 0) {
345                         QAT_LOG(ERR, "Invalid AES-XTS cipher key size");
346                         ret = -EINVAL;
347                         goto error_out;
348                 }
349                 session->qat_mode = ICP_QAT_HW_CIPHER_XTS_MODE;
350                 break;
351         case RTE_CRYPTO_CIPHER_3DES_ECB:
352         case RTE_CRYPTO_CIPHER_AES_ECB:
353         case RTE_CRYPTO_CIPHER_AES_F8:
354         case RTE_CRYPTO_CIPHER_ARC4:
355                 QAT_LOG(ERR, "Crypto QAT PMD: Unsupported Cipher alg %u",
356                                 cipher_xform->algo);
357                 ret = -ENOTSUP;
358                 goto error_out;
359         default:
360                 QAT_LOG(ERR, "Crypto: Undefined Cipher specified %u\n",
361                                 cipher_xform->algo);
362                 ret = -EINVAL;
363                 goto error_out;
364         }
365
366         if (cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
367                 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
368         else
369                 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
370
371         if (qat_sym_session_aead_create_cd_cipher(session,
372                                                 cipher_xform->key.data,
373                                                 cipher_xform->key.length)) {
374                 ret = -EINVAL;
375                 goto error_out;
376         }
377
378         return 0;
379
380 error_out:
381         if (session->bpi_ctx) {
382                 bpi_cipher_ctx_free(session->bpi_ctx);
383                 session->bpi_ctx = NULL;
384         }
385         return ret;
386 }
387
388 int
389 qat_sym_session_configure(struct rte_cryptodev *dev,
390                 struct rte_crypto_sym_xform *xform,
391                 struct rte_cryptodev_sym_session *sess,
392                 struct rte_mempool *mempool)
393 {
394         void *sess_private_data;
395         int ret;
396
397         if (rte_mempool_get(mempool, &sess_private_data)) {
398                 CDEV_LOG_ERR(
399                         "Couldn't get object from session mempool");
400                 return -ENOMEM;
401         }
402
403         ret = qat_sym_session_set_parameters(dev, xform, sess_private_data);
404         if (ret != 0) {
405                 QAT_LOG(ERR,
406                     "Crypto QAT PMD: failed to configure session parameters");
407
408                 /* Return session to mempool */
409                 rte_mempool_put(mempool, sess_private_data);
410                 return ret;
411         }
412
413         set_sym_session_private_data(sess, dev->driver_id,
414                 sess_private_data);
415
416         return 0;
417 }
418
419 int
420 qat_sym_session_set_parameters(struct rte_cryptodev *dev,
421                 struct rte_crypto_sym_xform *xform, void *session_private)
422 {
423         struct qat_sym_session *session = session_private;
424         int ret;
425         int qat_cmd_id;
426
427         /* Set context descriptor physical address */
428         session->cd_paddr = rte_mempool_virt2iova(session) +
429                         offsetof(struct qat_sym_session, cd);
430
431         session->min_qat_dev_gen = QAT_GEN1;
432
433         /* Get requested QAT command id */
434         qat_cmd_id = qat_get_cmd_id(xform);
435         if (qat_cmd_id < 0 || qat_cmd_id >= ICP_QAT_FW_LA_CMD_DELIMITER) {
436                 QAT_LOG(ERR, "Unsupported xform chain requested");
437                 return -ENOTSUP;
438         }
439         session->qat_cmd = (enum icp_qat_fw_la_cmd_id)qat_cmd_id;
440         switch (session->qat_cmd) {
441         case ICP_QAT_FW_LA_CMD_CIPHER:
442                 ret = qat_sym_session_configure_cipher(dev, xform, session);
443                 if (ret < 0)
444                         return ret;
445                 break;
446         case ICP_QAT_FW_LA_CMD_AUTH:
447                 ret = qat_sym_session_configure_auth(dev, xform, session);
448                 if (ret < 0)
449                         return ret;
450                 break;
451         case ICP_QAT_FW_LA_CMD_CIPHER_HASH:
452                 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
453                         ret = qat_sym_session_configure_aead(xform,
454                                         session);
455                         if (ret < 0)
456                                 return ret;
457                 } else {
458                         ret = qat_sym_session_configure_cipher(dev,
459                                         xform, session);
460                         if (ret < 0)
461                                 return ret;
462                         ret = qat_sym_session_configure_auth(dev,
463                                         xform, session);
464                         if (ret < 0)
465                                 return ret;
466                 }
467                 break;
468         case ICP_QAT_FW_LA_CMD_HASH_CIPHER:
469                 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
470                         ret = qat_sym_session_configure_aead(xform,
471                                         session);
472                         if (ret < 0)
473                                 return ret;
474                 } else {
475                         ret = qat_sym_session_configure_auth(dev,
476                                         xform, session);
477                         if (ret < 0)
478                                 return ret;
479                         ret = qat_sym_session_configure_cipher(dev,
480                                         xform, session);
481                         if (ret < 0)
482                                 return ret;
483                 }
484                 break;
485         case ICP_QAT_FW_LA_CMD_TRNG_GET_RANDOM:
486         case ICP_QAT_FW_LA_CMD_TRNG_TEST:
487         case ICP_QAT_FW_LA_CMD_SSL3_KEY_DERIVE:
488         case ICP_QAT_FW_LA_CMD_TLS_V1_1_KEY_DERIVE:
489         case ICP_QAT_FW_LA_CMD_TLS_V1_2_KEY_DERIVE:
490         case ICP_QAT_FW_LA_CMD_MGF1:
491         case ICP_QAT_FW_LA_CMD_AUTH_PRE_COMP:
492         case ICP_QAT_FW_LA_CMD_CIPHER_PRE_COMP:
493         case ICP_QAT_FW_LA_CMD_DELIMITER:
494         QAT_LOG(ERR, "Unsupported Service %u",
495                 session->qat_cmd);
496                 return -ENOTSUP;
497         default:
498         QAT_LOG(ERR, "Unsupported Service %u",
499                 session->qat_cmd);
500                 return -ENOTSUP;
501         }
502
503         return 0;
504 }
505
506 int
507 qat_sym_session_configure_auth(struct rte_cryptodev *dev,
508                                 struct rte_crypto_sym_xform *xform,
509                                 struct qat_sym_session *session)
510 {
511         struct rte_crypto_auth_xform *auth_xform = qat_get_auth_xform(xform);
512         struct qat_sym_dev_private *internals = dev->data->dev_private;
513         const uint8_t *key_data = auth_xform->key.data;
514         uint8_t key_length = auth_xform->key.length;
515         session->aes_cmac = 0;
516
517         switch (auth_xform->algo) {
518         case RTE_CRYPTO_AUTH_SHA1_HMAC:
519                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA1;
520                 break;
521         case RTE_CRYPTO_AUTH_SHA224_HMAC:
522                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA224;
523                 break;
524         case RTE_CRYPTO_AUTH_SHA256_HMAC:
525                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA256;
526                 break;
527         case RTE_CRYPTO_AUTH_SHA384_HMAC:
528                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA384;
529                 break;
530         case RTE_CRYPTO_AUTH_SHA512_HMAC:
531                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA512;
532                 break;
533         case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
534                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
535                 break;
536         case RTE_CRYPTO_AUTH_AES_CMAC:
537                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
538                 session->aes_cmac = 1;
539                 break;
540         case RTE_CRYPTO_AUTH_AES_GMAC:
541                 if (qat_sym_validate_aes_key(auth_xform->key.length,
542                                 &session->qat_cipher_alg) != 0) {
543                         QAT_LOG(ERR, "Invalid AES key size");
544                         return -EINVAL;
545                 }
546                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
547                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
548
549                 break;
550         case RTE_CRYPTO_AUTH_SNOW3G_UIA2:
551                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2;
552                 break;
553         case RTE_CRYPTO_AUTH_MD5_HMAC:
554                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_MD5;
555                 break;
556         case RTE_CRYPTO_AUTH_NULL:
557                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_NULL;
558                 break;
559         case RTE_CRYPTO_AUTH_KASUMI_F9:
560                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_KASUMI_F9;
561                 break;
562         case RTE_CRYPTO_AUTH_ZUC_EIA3:
563                 if (!qat_is_auth_alg_supported(auth_xform->algo, internals)) {
564                         QAT_LOG(ERR, "%s not supported on this device",
565                                 rte_crypto_auth_algorithm_strings
566                                 [auth_xform->algo]);
567                         return -ENOTSUP;
568                 }
569                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3;
570                 break;
571         case RTE_CRYPTO_AUTH_SHA1:
572         case RTE_CRYPTO_AUTH_SHA256:
573         case RTE_CRYPTO_AUTH_SHA512:
574         case RTE_CRYPTO_AUTH_SHA224:
575         case RTE_CRYPTO_AUTH_SHA384:
576         case RTE_CRYPTO_AUTH_MD5:
577         case RTE_CRYPTO_AUTH_AES_CBC_MAC:
578                 QAT_LOG(ERR, "Crypto: Unsupported hash alg %u",
579                                 auth_xform->algo);
580                 return -ENOTSUP;
581         default:
582                 QAT_LOG(ERR, "Crypto: Undefined Hash algo %u specified",
583                                 auth_xform->algo);
584                 return -EINVAL;
585         }
586
587         session->auth_iv.offset = auth_xform->iv.offset;
588         session->auth_iv.length = auth_xform->iv.length;
589
590         if (auth_xform->algo == RTE_CRYPTO_AUTH_AES_GMAC) {
591                 if (auth_xform->op == RTE_CRYPTO_AUTH_OP_GENERATE) {
592                         session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER_HASH;
593                         session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
594                         /*
595                          * It needs to create cipher desc content first,
596                          * then authentication
597                          */
598
599                         if (qat_sym_session_aead_create_cd_cipher(session,
600                                                 auth_xform->key.data,
601                                                 auth_xform->key.length))
602                                 return -EINVAL;
603
604                         if (qat_sym_session_aead_create_cd_auth(session,
605                                                 key_data,
606                                                 key_length,
607                                                 0,
608                                                 auth_xform->digest_length,
609                                                 auth_xform->op))
610                                 return -EINVAL;
611                 } else {
612                         session->qat_cmd = ICP_QAT_FW_LA_CMD_HASH_CIPHER;
613                         session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
614                         /*
615                          * It needs to create authentication desc content first,
616                          * then cipher
617                          */
618
619                         if (qat_sym_session_aead_create_cd_auth(session,
620                                         key_data,
621                                         key_length,
622                                         0,
623                                         auth_xform->digest_length,
624                                         auth_xform->op))
625                                 return -EINVAL;
626
627                         if (qat_sym_session_aead_create_cd_cipher(session,
628                                                 auth_xform->key.data,
629                                                 auth_xform->key.length))
630                                 return -EINVAL;
631                 }
632                 /* Restore to authentication only only */
633                 session->qat_cmd = ICP_QAT_FW_LA_CMD_AUTH;
634         } else {
635                 if (qat_sym_session_aead_create_cd_auth(session,
636                                 key_data,
637                                 key_length,
638                                 0,
639                                 auth_xform->digest_length,
640                                 auth_xform->op))
641                         return -EINVAL;
642         }
643
644         session->digest_length = auth_xform->digest_length;
645         return 0;
646 }
647
648 int
649 qat_sym_session_configure_aead(struct rte_crypto_sym_xform *xform,
650                                 struct qat_sym_session *session)
651 {
652         struct rte_crypto_aead_xform *aead_xform = &xform->aead;
653         enum rte_crypto_auth_operation crypto_operation;
654
655         /*
656          * Store AEAD IV parameters as cipher IV,
657          * to avoid unnecessary memory usage
658          */
659         session->cipher_iv.offset = xform->aead.iv.offset;
660         session->cipher_iv.length = xform->aead.iv.length;
661
662         switch (aead_xform->algo) {
663         case RTE_CRYPTO_AEAD_AES_GCM:
664                 if (qat_sym_validate_aes_key(aead_xform->key.length,
665                                 &session->qat_cipher_alg) != 0) {
666                         QAT_LOG(ERR, "Invalid AES key size");
667                         return -EINVAL;
668                 }
669                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
670                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
671                 break;
672         case RTE_CRYPTO_AEAD_AES_CCM:
673                 if (qat_sym_validate_aes_key(aead_xform->key.length,
674                                 &session->qat_cipher_alg) != 0) {
675                         QAT_LOG(ERR, "Invalid AES key size");
676                         return -EINVAL;
677                 }
678                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
679                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC;
680                 break;
681         default:
682                 QAT_LOG(ERR, "Crypto: Undefined AEAD specified %u\n",
683                                 aead_xform->algo);
684                 return -EINVAL;
685         }
686
687         if ((aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT &&
688                         aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) ||
689                         (aead_xform->op == RTE_CRYPTO_AEAD_OP_DECRYPT &&
690                         aead_xform->algo == RTE_CRYPTO_AEAD_AES_CCM)) {
691                 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
692                 /*
693                  * It needs to create cipher desc content first,
694                  * then authentication
695                  */
696                 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
697                         RTE_CRYPTO_AUTH_OP_GENERATE : RTE_CRYPTO_AUTH_OP_VERIFY;
698
699                 if (qat_sym_session_aead_create_cd_cipher(session,
700                                         aead_xform->key.data,
701                                         aead_xform->key.length))
702                         return -EINVAL;
703
704                 if (qat_sym_session_aead_create_cd_auth(session,
705                                         aead_xform->key.data,
706                                         aead_xform->key.length,
707                                         aead_xform->aad_length,
708                                         aead_xform->digest_length,
709                                         crypto_operation))
710                         return -EINVAL;
711         } else {
712                 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
713                 /*
714                  * It needs to create authentication desc content first,
715                  * then cipher
716                  */
717
718                 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
719                         RTE_CRYPTO_AUTH_OP_VERIFY : RTE_CRYPTO_AUTH_OP_GENERATE;
720
721                 if (qat_sym_session_aead_create_cd_auth(session,
722                                         aead_xform->key.data,
723                                         aead_xform->key.length,
724                                         aead_xform->aad_length,
725                                         aead_xform->digest_length,
726                                         crypto_operation))
727                         return -EINVAL;
728
729                 if (qat_sym_session_aead_create_cd_cipher(session,
730                                         aead_xform->key.data,
731                                         aead_xform->key.length))
732                         return -EINVAL;
733         }
734
735         session->digest_length = aead_xform->digest_length;
736         return 0;
737 }
738
739 unsigned int qat_sym_session_get_private_size(
740                 struct rte_cryptodev *dev __rte_unused)
741 {
742         return RTE_ALIGN_CEIL(sizeof(struct qat_sym_session), 8);
743 }
744
745 /* returns block size in bytes per cipher algo */
746 int qat_cipher_get_block_size(enum icp_qat_hw_cipher_algo qat_cipher_alg)
747 {
748         switch (qat_cipher_alg) {
749         case ICP_QAT_HW_CIPHER_ALGO_DES:
750                 return ICP_QAT_HW_DES_BLK_SZ;
751         case ICP_QAT_HW_CIPHER_ALGO_3DES:
752                 return ICP_QAT_HW_3DES_BLK_SZ;
753         case ICP_QAT_HW_CIPHER_ALGO_AES128:
754         case ICP_QAT_HW_CIPHER_ALGO_AES192:
755         case ICP_QAT_HW_CIPHER_ALGO_AES256:
756                 return ICP_QAT_HW_AES_BLK_SZ;
757         default:
758                 QAT_LOG(ERR, "invalid block cipher alg %u", qat_cipher_alg);
759                 return -EFAULT;
760         };
761         return -EFAULT;
762 }
763
764 /*
765  * Returns size in bytes per hash algo for state1 size field in cd_ctrl
766  * This is digest size rounded up to nearest quadword
767  */
768 static int qat_hash_get_state1_size(enum icp_qat_hw_auth_algo qat_hash_alg)
769 {
770         switch (qat_hash_alg) {
771         case ICP_QAT_HW_AUTH_ALGO_SHA1:
772                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA1_STATE1_SZ,
773                                                 QAT_HW_DEFAULT_ALIGNMENT);
774         case ICP_QAT_HW_AUTH_ALGO_SHA224:
775                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA224_STATE1_SZ,
776                                                 QAT_HW_DEFAULT_ALIGNMENT);
777         case ICP_QAT_HW_AUTH_ALGO_SHA256:
778                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA256_STATE1_SZ,
779                                                 QAT_HW_DEFAULT_ALIGNMENT);
780         case ICP_QAT_HW_AUTH_ALGO_SHA384:
781                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA384_STATE1_SZ,
782                                                 QAT_HW_DEFAULT_ALIGNMENT);
783         case ICP_QAT_HW_AUTH_ALGO_SHA512:
784                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
785                                                 QAT_HW_DEFAULT_ALIGNMENT);
786         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
787                 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ,
788                                                 QAT_HW_DEFAULT_ALIGNMENT);
789         case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
790         case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
791                 return QAT_HW_ROUND_UP(ICP_QAT_HW_GALOIS_128_STATE1_SZ,
792                                                 QAT_HW_DEFAULT_ALIGNMENT);
793         case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
794                 return QAT_HW_ROUND_UP(ICP_QAT_HW_ZUC_3G_EIA3_STATE1_SZ,
795                                                 QAT_HW_DEFAULT_ALIGNMENT);
796         case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
797                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SNOW_3G_UIA2_STATE1_SZ,
798                                                 QAT_HW_DEFAULT_ALIGNMENT);
799         case ICP_QAT_HW_AUTH_ALGO_MD5:
800                 return QAT_HW_ROUND_UP(ICP_QAT_HW_MD5_STATE1_SZ,
801                                                 QAT_HW_DEFAULT_ALIGNMENT);
802         case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
803                 return QAT_HW_ROUND_UP(ICP_QAT_HW_KASUMI_F9_STATE1_SZ,
804                                                 QAT_HW_DEFAULT_ALIGNMENT);
805         case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
806                 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_CBC_MAC_STATE1_SZ,
807                                                 QAT_HW_DEFAULT_ALIGNMENT);
808         case ICP_QAT_HW_AUTH_ALGO_NULL:
809                 return QAT_HW_ROUND_UP(ICP_QAT_HW_NULL_STATE1_SZ,
810                                                 QAT_HW_DEFAULT_ALIGNMENT);
811         case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
812                 /* return maximum state1 size in this case */
813                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
814                                                 QAT_HW_DEFAULT_ALIGNMENT);
815         default:
816                 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
817                 return -EFAULT;
818         };
819         return -EFAULT;
820 }
821
822 /* returns digest size in bytes  per hash algo */
823 static int qat_hash_get_digest_size(enum icp_qat_hw_auth_algo qat_hash_alg)
824 {
825         switch (qat_hash_alg) {
826         case ICP_QAT_HW_AUTH_ALGO_SHA1:
827                 return ICP_QAT_HW_SHA1_STATE1_SZ;
828         case ICP_QAT_HW_AUTH_ALGO_SHA224:
829                 return ICP_QAT_HW_SHA224_STATE1_SZ;
830         case ICP_QAT_HW_AUTH_ALGO_SHA256:
831                 return ICP_QAT_HW_SHA256_STATE1_SZ;
832         case ICP_QAT_HW_AUTH_ALGO_SHA384:
833                 return ICP_QAT_HW_SHA384_STATE1_SZ;
834         case ICP_QAT_HW_AUTH_ALGO_SHA512:
835                 return ICP_QAT_HW_SHA512_STATE1_SZ;
836         case ICP_QAT_HW_AUTH_ALGO_MD5:
837                 return ICP_QAT_HW_MD5_STATE1_SZ;
838         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
839                 return ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
840         case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
841                 /* return maximum digest size in this case */
842                 return ICP_QAT_HW_SHA512_STATE1_SZ;
843         default:
844                 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
845                 return -EFAULT;
846         };
847         return -EFAULT;
848 }
849
850 /* returns block size in byes per hash algo */
851 static int qat_hash_get_block_size(enum icp_qat_hw_auth_algo qat_hash_alg)
852 {
853         switch (qat_hash_alg) {
854         case ICP_QAT_HW_AUTH_ALGO_SHA1:
855                 return SHA_CBLOCK;
856         case ICP_QAT_HW_AUTH_ALGO_SHA224:
857                 return SHA256_CBLOCK;
858         case ICP_QAT_HW_AUTH_ALGO_SHA256:
859                 return SHA256_CBLOCK;
860         case ICP_QAT_HW_AUTH_ALGO_SHA384:
861                 return SHA512_CBLOCK;
862         case ICP_QAT_HW_AUTH_ALGO_SHA512:
863                 return SHA512_CBLOCK;
864         case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
865                 return 16;
866         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
867                 return ICP_QAT_HW_AES_BLK_SZ;
868         case ICP_QAT_HW_AUTH_ALGO_MD5:
869                 return MD5_CBLOCK;
870         case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
871                 /* return maximum block size in this case */
872                 return SHA512_CBLOCK;
873         default:
874                 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
875                 return -EFAULT;
876         };
877         return -EFAULT;
878 }
879
880 static int partial_hash_sha1(uint8_t *data_in, uint8_t *data_out)
881 {
882         SHA_CTX ctx;
883
884         if (!SHA1_Init(&ctx))
885                 return -EFAULT;
886         SHA1_Transform(&ctx, data_in);
887         rte_memcpy(data_out, &ctx, SHA_DIGEST_LENGTH);
888         return 0;
889 }
890
891 static int partial_hash_sha224(uint8_t *data_in, uint8_t *data_out)
892 {
893         SHA256_CTX ctx;
894
895         if (!SHA224_Init(&ctx))
896                 return -EFAULT;
897         SHA256_Transform(&ctx, data_in);
898         rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
899         return 0;
900 }
901
902 static int partial_hash_sha256(uint8_t *data_in, uint8_t *data_out)
903 {
904         SHA256_CTX ctx;
905
906         if (!SHA256_Init(&ctx))
907                 return -EFAULT;
908         SHA256_Transform(&ctx, data_in);
909         rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
910         return 0;
911 }
912
913 static int partial_hash_sha384(uint8_t *data_in, uint8_t *data_out)
914 {
915         SHA512_CTX ctx;
916
917         if (!SHA384_Init(&ctx))
918                 return -EFAULT;
919         SHA512_Transform(&ctx, data_in);
920         rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
921         return 0;
922 }
923
924 static int partial_hash_sha512(uint8_t *data_in, uint8_t *data_out)
925 {
926         SHA512_CTX ctx;
927
928         if (!SHA512_Init(&ctx))
929                 return -EFAULT;
930         SHA512_Transform(&ctx, data_in);
931         rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
932         return 0;
933 }
934
935 static int partial_hash_md5(uint8_t *data_in, uint8_t *data_out)
936 {
937         MD5_CTX ctx;
938
939         if (!MD5_Init(&ctx))
940                 return -EFAULT;
941         MD5_Transform(&ctx, data_in);
942         rte_memcpy(data_out, &ctx, MD5_DIGEST_LENGTH);
943
944         return 0;
945 }
946
947 static int partial_hash_compute(enum icp_qat_hw_auth_algo hash_alg,
948                         uint8_t *data_in,
949                         uint8_t *data_out)
950 {
951         int digest_size;
952         uint8_t digest[qat_hash_get_digest_size(
953                         ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
954         uint32_t *hash_state_out_be32;
955         uint64_t *hash_state_out_be64;
956         int i;
957
958         digest_size = qat_hash_get_digest_size(hash_alg);
959         if (digest_size <= 0)
960                 return -EFAULT;
961
962         hash_state_out_be32 = (uint32_t *)data_out;
963         hash_state_out_be64 = (uint64_t *)data_out;
964
965         switch (hash_alg) {
966         case ICP_QAT_HW_AUTH_ALGO_SHA1:
967                 if (partial_hash_sha1(data_in, digest))
968                         return -EFAULT;
969                 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
970                         *hash_state_out_be32 =
971                                 rte_bswap32(*(((uint32_t *)digest)+i));
972                 break;
973         case ICP_QAT_HW_AUTH_ALGO_SHA224:
974                 if (partial_hash_sha224(data_in, digest))
975                         return -EFAULT;
976                 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
977                         *hash_state_out_be32 =
978                                 rte_bswap32(*(((uint32_t *)digest)+i));
979                 break;
980         case ICP_QAT_HW_AUTH_ALGO_SHA256:
981                 if (partial_hash_sha256(data_in, digest))
982                         return -EFAULT;
983                 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
984                         *hash_state_out_be32 =
985                                 rte_bswap32(*(((uint32_t *)digest)+i));
986                 break;
987         case ICP_QAT_HW_AUTH_ALGO_SHA384:
988                 if (partial_hash_sha384(data_in, digest))
989                         return -EFAULT;
990                 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
991                         *hash_state_out_be64 =
992                                 rte_bswap64(*(((uint64_t *)digest)+i));
993                 break;
994         case ICP_QAT_HW_AUTH_ALGO_SHA512:
995                 if (partial_hash_sha512(data_in, digest))
996                         return -EFAULT;
997                 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
998                         *hash_state_out_be64 =
999                                 rte_bswap64(*(((uint64_t *)digest)+i));
1000                 break;
1001         case ICP_QAT_HW_AUTH_ALGO_MD5:
1002                 if (partial_hash_md5(data_in, data_out))
1003                         return -EFAULT;
1004                 break;
1005         default:
1006                 QAT_LOG(ERR, "invalid hash alg %u", hash_alg);
1007                 return -EFAULT;
1008         }
1009
1010         return 0;
1011 }
1012 #define HMAC_IPAD_VALUE 0x36
1013 #define HMAC_OPAD_VALUE 0x5c
1014 #define HASH_XCBC_PRECOMP_KEY_NUM 3
1015
1016 static const uint8_t AES_CMAC_SEED[ICP_QAT_HW_AES_128_KEY_SZ];
1017
1018 static void aes_cmac_key_derive(uint8_t *base, uint8_t *derived)
1019 {
1020         int i;
1021
1022         derived[0] = base[0] << 1;
1023         for (i = 1; i < ICP_QAT_HW_AES_BLK_SZ ; i++) {
1024                 derived[i] = base[i] << 1;
1025                 derived[i - 1] |= base[i] >> 7;
1026         }
1027
1028         if (base[0] & 0x80)
1029                 derived[ICP_QAT_HW_AES_BLK_SZ - 1] ^= QAT_AES_CMAC_CONST_RB;
1030 }
1031
1032 static int qat_sym_do_precomputes(enum icp_qat_hw_auth_algo hash_alg,
1033                                 const uint8_t *auth_key,
1034                                 uint16_t auth_keylen,
1035                                 uint8_t *p_state_buf,
1036                                 uint16_t *p_state_len,
1037                                 uint8_t aes_cmac)
1038 {
1039         int block_size;
1040         uint8_t ipad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1041         uint8_t opad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1042         int i;
1043
1044         if (hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC) {
1045
1046                 /* CMAC */
1047                 if (aes_cmac) {
1048                         AES_KEY enc_key;
1049                         uint8_t *in = NULL;
1050                         uint8_t k0[ICP_QAT_HW_AES_128_KEY_SZ];
1051                         uint8_t *k1, *k2;
1052
1053                         auth_keylen = ICP_QAT_HW_AES_128_KEY_SZ;
1054
1055                         in = rte_zmalloc("AES CMAC K1",
1056                                          ICP_QAT_HW_AES_128_KEY_SZ, 16);
1057
1058                         if (in == NULL) {
1059                                 QAT_LOG(ERR, "Failed to alloc memory");
1060                                 return -ENOMEM;
1061                         }
1062
1063                         rte_memcpy(in, AES_CMAC_SEED,
1064                                    ICP_QAT_HW_AES_128_KEY_SZ);
1065                         rte_memcpy(p_state_buf, auth_key, auth_keylen);
1066
1067                         if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1068                                 &enc_key) != 0) {
1069                                 rte_free(in);
1070                                 return -EFAULT;
1071                         }
1072
1073                         AES_encrypt(in, k0, &enc_key);
1074
1075                         k1 = p_state_buf + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1076                         k2 = k1 + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1077
1078                         aes_cmac_key_derive(k0, k1);
1079                         aes_cmac_key_derive(k1, k2);
1080
1081                         memset(k0, 0, ICP_QAT_HW_AES_128_KEY_SZ);
1082                         *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1083                         rte_free(in);
1084                         return 0;
1085                 } else {
1086                         static uint8_t qat_aes_xcbc_key_seed[
1087                                         ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ] = {
1088                                 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1089                                 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1090                                 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1091                                 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1092                                 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1093                                 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1094                         };
1095
1096                         uint8_t *in = NULL;
1097                         uint8_t *out = p_state_buf;
1098                         int x;
1099                         AES_KEY enc_key;
1100
1101                         in = rte_zmalloc("working mem for key",
1102                                         ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ, 16);
1103                         if (in == NULL) {
1104                                 QAT_LOG(ERR, "Failed to alloc memory");
1105                                 return -ENOMEM;
1106                         }
1107
1108                         rte_memcpy(in, qat_aes_xcbc_key_seed,
1109                                         ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1110                         for (x = 0; x < HASH_XCBC_PRECOMP_KEY_NUM; x++) {
1111                                 if (AES_set_encrypt_key(auth_key,
1112                                                         auth_keylen << 3,
1113                                                         &enc_key) != 0) {
1114                                         rte_free(in -
1115                                           (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ));
1116                                         memset(out -
1117                                            (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ),
1118                                           0, ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1119                                         return -EFAULT;
1120                                 }
1121                                 AES_encrypt(in, out, &enc_key);
1122                                 in += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1123                                 out += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1124                         }
1125                         *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1126                         rte_free(in - x*ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ);
1127                         return 0;
1128                 }
1129
1130         } else if ((hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_128) ||
1131                 (hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_64)) {
1132                 uint8_t *in = NULL;
1133                 uint8_t *out = p_state_buf;
1134                 AES_KEY enc_key;
1135
1136                 memset(p_state_buf, 0, ICP_QAT_HW_GALOIS_H_SZ +
1137                                 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1138                                 ICP_QAT_HW_GALOIS_E_CTR0_SZ);
1139                 in = rte_zmalloc("working mem for key",
1140                                 ICP_QAT_HW_GALOIS_H_SZ, 16);
1141                 if (in == NULL) {
1142                         QAT_LOG(ERR, "Failed to alloc memory");
1143                         return -ENOMEM;
1144                 }
1145
1146                 memset(in, 0, ICP_QAT_HW_GALOIS_H_SZ);
1147                 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1148                         &enc_key) != 0) {
1149                         return -EFAULT;
1150                 }
1151                 AES_encrypt(in, out, &enc_key);
1152                 *p_state_len = ICP_QAT_HW_GALOIS_H_SZ +
1153                                 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1154                                 ICP_QAT_HW_GALOIS_E_CTR0_SZ;
1155                 rte_free(in);
1156                 return 0;
1157         }
1158
1159         block_size = qat_hash_get_block_size(hash_alg);
1160         if (block_size < 0)
1161                 return block_size;
1162         /* init ipad and opad from key and xor with fixed values */
1163         memset(ipad, 0, block_size);
1164         memset(opad, 0, block_size);
1165
1166         if (auth_keylen > (unsigned int)block_size) {
1167                 QAT_LOG(ERR, "invalid keylen %u", auth_keylen);
1168                 return -EFAULT;
1169         }
1170         rte_memcpy(ipad, auth_key, auth_keylen);
1171         rte_memcpy(opad, auth_key, auth_keylen);
1172
1173         for (i = 0; i < block_size; i++) {
1174                 uint8_t *ipad_ptr = ipad + i;
1175                 uint8_t *opad_ptr = opad + i;
1176                 *ipad_ptr ^= HMAC_IPAD_VALUE;
1177                 *opad_ptr ^= HMAC_OPAD_VALUE;
1178         }
1179
1180         /* do partial hash of ipad and copy to state1 */
1181         if (partial_hash_compute(hash_alg, ipad, p_state_buf)) {
1182                 memset(ipad, 0, block_size);
1183                 memset(opad, 0, block_size);
1184                 QAT_LOG(ERR, "ipad precompute failed");
1185                 return -EFAULT;
1186         }
1187
1188         /*
1189          * State len is a multiple of 8, so may be larger than the digest.
1190          * Put the partial hash of opad state_len bytes after state1
1191          */
1192         *p_state_len = qat_hash_get_state1_size(hash_alg);
1193         if (partial_hash_compute(hash_alg, opad, p_state_buf + *p_state_len)) {
1194                 memset(ipad, 0, block_size);
1195                 memset(opad, 0, block_size);
1196                 QAT_LOG(ERR, "opad precompute failed");
1197                 return -EFAULT;
1198         }
1199
1200         /*  don't leave data lying around */
1201         memset(ipad, 0, block_size);
1202         memset(opad, 0, block_size);
1203         return 0;
1204 }
1205
1206 static void
1207 qat_sym_session_init_common_hdr(struct icp_qat_fw_comn_req_hdr *header,
1208                 enum qat_sym_proto_flag proto_flags)
1209 {
1210         header->hdr_flags =
1211                 ICP_QAT_FW_COMN_HDR_FLAGS_BUILD(ICP_QAT_FW_COMN_REQ_FLAG_SET);
1212         header->service_type = ICP_QAT_FW_COMN_REQ_CPM_FW_LA;
1213         header->comn_req_flags =
1214                 ICP_QAT_FW_COMN_FLAGS_BUILD(QAT_COMN_CD_FLD_TYPE_64BIT_ADR,
1215                                         QAT_COMN_PTR_TYPE_FLAT);
1216         ICP_QAT_FW_LA_PARTIAL_SET(header->serv_specif_flags,
1217                                   ICP_QAT_FW_LA_PARTIAL_NONE);
1218         ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(header->serv_specif_flags,
1219                                            ICP_QAT_FW_CIPH_IV_16BYTE_DATA);
1220
1221         switch (proto_flags)            {
1222         case QAT_CRYPTO_PROTO_FLAG_NONE:
1223                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1224                                         ICP_QAT_FW_LA_NO_PROTO);
1225                 break;
1226         case QAT_CRYPTO_PROTO_FLAG_CCM:
1227                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1228                                         ICP_QAT_FW_LA_CCM_PROTO);
1229                 break;
1230         case QAT_CRYPTO_PROTO_FLAG_GCM:
1231                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1232                                         ICP_QAT_FW_LA_GCM_PROTO);
1233                 break;
1234         case QAT_CRYPTO_PROTO_FLAG_SNOW3G:
1235                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1236                                         ICP_QAT_FW_LA_SNOW_3G_PROTO);
1237                 break;
1238         case QAT_CRYPTO_PROTO_FLAG_ZUC:
1239                 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(header->serv_specif_flags,
1240                         ICP_QAT_FW_LA_ZUC_3G_PROTO);
1241                 break;
1242         }
1243
1244         ICP_QAT_FW_LA_UPDATE_STATE_SET(header->serv_specif_flags,
1245                                            ICP_QAT_FW_LA_NO_UPDATE_STATE);
1246         ICP_QAT_FW_LA_DIGEST_IN_BUFFER_SET(header->serv_specif_flags,
1247                                         ICP_QAT_FW_LA_NO_DIGEST_IN_BUFFER);
1248 }
1249
1250 /*
1251  *      Snow3G and ZUC should never use this function
1252  *      and set its protocol flag in both cipher and auth part of content
1253  *      descriptor building function
1254  */
1255 static enum qat_sym_proto_flag
1256 qat_get_crypto_proto_flag(uint16_t flags)
1257 {
1258         int proto = ICP_QAT_FW_LA_PROTO_GET(flags);
1259         enum qat_sym_proto_flag qat_proto_flag =
1260                         QAT_CRYPTO_PROTO_FLAG_NONE;
1261
1262         switch (proto) {
1263         case ICP_QAT_FW_LA_GCM_PROTO:
1264                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1265                 break;
1266         case ICP_QAT_FW_LA_CCM_PROTO:
1267                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1268                 break;
1269         }
1270
1271         return qat_proto_flag;
1272 }
1273
1274 int qat_sym_session_aead_create_cd_cipher(struct qat_sym_session *cdesc,
1275                                                 const uint8_t *cipherkey,
1276                                                 uint32_t cipherkeylen)
1277 {
1278         struct icp_qat_hw_cipher_algo_blk *cipher;
1279         struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1280         struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1281         struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1282         void *ptr = &req_tmpl->cd_ctrl;
1283         struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1284         struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1285         enum icp_qat_hw_cipher_convert key_convert;
1286         enum qat_sym_proto_flag qat_proto_flag =
1287                 QAT_CRYPTO_PROTO_FLAG_NONE;
1288         uint32_t total_key_size;
1289         uint16_t cipher_offset, cd_size;
1290         uint32_t wordIndex  = 0;
1291         uint32_t *temp_key = NULL;
1292
1293         if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER) {
1294                 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1295                 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1296                                         ICP_QAT_FW_SLICE_CIPHER);
1297                 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1298                                         ICP_QAT_FW_SLICE_DRAM_WR);
1299                 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1300                                         ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1301                 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1302                                         ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1303                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1304         } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1305                 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1306                 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1307                                         ICP_QAT_FW_SLICE_CIPHER);
1308                 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1309                                         ICP_QAT_FW_SLICE_AUTH);
1310                 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1311                                         ICP_QAT_FW_SLICE_AUTH);
1312                 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1313                                         ICP_QAT_FW_SLICE_DRAM_WR);
1314                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1315         } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1316                 QAT_LOG(ERR, "Invalid param, must be a cipher command.");
1317                 return -EFAULT;
1318         }
1319
1320         if (cdesc->qat_mode == ICP_QAT_HW_CIPHER_CTR_MODE) {
1321                 /*
1322                  * CTR Streaming ciphers are a special case. Decrypt = encrypt
1323                  * Overriding default values previously set
1324                  */
1325                 cdesc->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
1326                 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1327         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2
1328                 || cdesc->qat_cipher_alg ==
1329                         ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)
1330                 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1331         else if (cdesc->qat_dir == ICP_QAT_HW_CIPHER_ENCRYPT)
1332                 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1333         else
1334                 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1335
1336         if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
1337                 total_key_size = ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ +
1338                         ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1339                 cipher_cd_ctrl->cipher_state_sz =
1340                         ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1341                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1342
1343         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1344                 total_key_size = ICP_QAT_HW_KASUMI_F8_KEY_SZ;
1345                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_KASUMI_BLK_SZ >> 3;
1346                 cipher_cd_ctrl->cipher_padding_sz =
1347                                         (2 * ICP_QAT_HW_KASUMI_BLK_SZ) >> 3;
1348         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES) {
1349                 total_key_size = ICP_QAT_HW_3DES_KEY_SZ;
1350                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_3DES_BLK_SZ >> 3;
1351                 qat_proto_flag =
1352                         qat_get_crypto_proto_flag(header->serv_specif_flags);
1353         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_DES) {
1354                 total_key_size = ICP_QAT_HW_DES_KEY_SZ;
1355                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_DES_BLK_SZ >> 3;
1356                 qat_proto_flag =
1357                         qat_get_crypto_proto_flag(header->serv_specif_flags);
1358         } else if (cdesc->qat_cipher_alg ==
1359                 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
1360                 total_key_size = ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ +
1361                         ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1362                 cipher_cd_ctrl->cipher_state_sz =
1363                         ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1364                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1365                 cdesc->min_qat_dev_gen = QAT_GEN2;
1366         } else {
1367                 total_key_size = cipherkeylen;
1368                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_AES_BLK_SZ >> 3;
1369                 qat_proto_flag =
1370                         qat_get_crypto_proto_flag(header->serv_specif_flags);
1371         }
1372         cipher_cd_ctrl->cipher_key_sz = total_key_size >> 3;
1373         cipher_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1374         cipher_cd_ctrl->cipher_cfg_offset = cipher_offset >> 3;
1375
1376         header->service_cmd_id = cdesc->qat_cmd;
1377         qat_sym_session_init_common_hdr(header, qat_proto_flag);
1378
1379         cipher = (struct icp_qat_hw_cipher_algo_blk *)cdesc->cd_cur_ptr;
1380         cipher->cipher_config.val =
1381             ICP_QAT_HW_CIPHER_CONFIG_BUILD(cdesc->qat_mode,
1382                                         cdesc->qat_cipher_alg, key_convert,
1383                                         cdesc->qat_dir);
1384
1385         if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1386                 temp_key = (uint32_t *)(cdesc->cd_cur_ptr +
1387                                         sizeof(struct icp_qat_hw_cipher_config)
1388                                         + cipherkeylen);
1389                 memcpy(cipher->key, cipherkey, cipherkeylen);
1390                 memcpy(temp_key, cipherkey, cipherkeylen);
1391
1392                 /* XOR Key with KASUMI F8 key modifier at 4 bytes level */
1393                 for (wordIndex = 0; wordIndex < (cipherkeylen >> 2);
1394                                                                 wordIndex++)
1395                         temp_key[wordIndex] ^= KASUMI_F8_KEY_MODIFIER_4_BYTES;
1396
1397                 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1398                                         cipherkeylen + cipherkeylen;
1399         } else {
1400                 memcpy(cipher->key, cipherkey, cipherkeylen);
1401                 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1402                                         cipherkeylen;
1403         }
1404
1405         if (total_key_size > cipherkeylen) {
1406                 uint32_t padding_size =  total_key_size-cipherkeylen;
1407                 if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1408                         && (cipherkeylen == QAT_3DES_KEY_SZ_OPT2)) {
1409                         /* K3 not provided so use K1 = K3*/
1410                         memcpy(cdesc->cd_cur_ptr, cipherkey, padding_size);
1411                 } else if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1412                         && (cipherkeylen == QAT_3DES_KEY_SZ_OPT3)) {
1413                         /* K2 and K3 not provided so use K1 = K2 = K3*/
1414                         memcpy(cdesc->cd_cur_ptr, cipherkey,
1415                                 cipherkeylen);
1416                         memcpy(cdesc->cd_cur_ptr+cipherkeylen,
1417                                 cipherkey, cipherkeylen);
1418                 } else
1419                         memset(cdesc->cd_cur_ptr, 0, padding_size);
1420
1421                 cdesc->cd_cur_ptr += padding_size;
1422         }
1423         cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1424         cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1425
1426         return 0;
1427 }
1428
1429 int qat_sym_session_aead_create_cd_auth(struct qat_sym_session *cdesc,
1430                                                 const uint8_t *authkey,
1431                                                 uint32_t authkeylen,
1432                                                 uint32_t aad_length,
1433                                                 uint32_t digestsize,
1434                                                 unsigned int operation)
1435 {
1436         struct icp_qat_hw_auth_setup *hash;
1437         struct icp_qat_hw_cipher_algo_blk *cipherconfig;
1438         struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1439         struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1440         struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1441         void *ptr = &req_tmpl->cd_ctrl;
1442         struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1443         struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1444         struct icp_qat_fw_la_auth_req_params *auth_param =
1445                 (struct icp_qat_fw_la_auth_req_params *)
1446                 ((char *)&req_tmpl->serv_specif_rqpars +
1447                 sizeof(struct icp_qat_fw_la_cipher_req_params));
1448         uint16_t state1_size = 0, state2_size = 0;
1449         uint16_t hash_offset, cd_size;
1450         uint32_t *aad_len = NULL;
1451         uint32_t wordIndex  = 0;
1452         uint32_t *pTempKey;
1453         enum qat_sym_proto_flag qat_proto_flag =
1454                 QAT_CRYPTO_PROTO_FLAG_NONE;
1455
1456         if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_AUTH) {
1457                 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1458                                         ICP_QAT_FW_SLICE_AUTH);
1459                 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1460                                         ICP_QAT_FW_SLICE_DRAM_WR);
1461                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1462         } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1463                 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1464                                 ICP_QAT_FW_SLICE_AUTH);
1465                 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1466                                 ICP_QAT_FW_SLICE_CIPHER);
1467                 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1468                                 ICP_QAT_FW_SLICE_CIPHER);
1469                 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1470                                 ICP_QAT_FW_SLICE_DRAM_WR);
1471                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1472         } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1473                 QAT_LOG(ERR, "Invalid param, must be a hash command.");
1474                 return -EFAULT;
1475         }
1476
1477         if (operation == RTE_CRYPTO_AUTH_OP_VERIFY) {
1478                 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1479                                 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1480                 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1481                                 ICP_QAT_FW_LA_CMP_AUTH_RES);
1482                 cdesc->auth_op = ICP_QAT_HW_AUTH_VERIFY;
1483         } else {
1484                 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1485                                            ICP_QAT_FW_LA_RET_AUTH_RES);
1486                 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1487                                            ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1488                 cdesc->auth_op = ICP_QAT_HW_AUTH_GENERATE;
1489         }
1490
1491         /*
1492          * Setup the inner hash config
1493          */
1494         hash_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1495         hash = (struct icp_qat_hw_auth_setup *)cdesc->cd_cur_ptr;
1496         hash->auth_config.reserved = 0;
1497         hash->auth_config.config =
1498                         ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE1,
1499                                 cdesc->qat_hash_alg, digestsize);
1500
1501         if (cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2
1502                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_KASUMI_F9
1503                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3
1504                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC
1505                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC
1506                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_NULL
1507                         )
1508                 hash->auth_counter.counter = 0;
1509         else {
1510                 int block_size = qat_hash_get_block_size(cdesc->qat_hash_alg);
1511
1512                 if (block_size < 0)
1513                         return block_size;
1514                 hash->auth_counter.counter = rte_bswap32(block_size);
1515         }
1516
1517         cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_auth_setup);
1518
1519         /*
1520          * cd_cur_ptr now points at the state1 information.
1521          */
1522         switch (cdesc->qat_hash_alg) {
1523         case ICP_QAT_HW_AUTH_ALGO_SHA1:
1524                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA1, authkey,
1525                         authkeylen, cdesc->cd_cur_ptr, &state1_size,
1526                         cdesc->aes_cmac)) {
1527                         QAT_LOG(ERR, "(SHA)precompute failed");
1528                         return -EFAULT;
1529                 }
1530                 state2_size = RTE_ALIGN_CEIL(ICP_QAT_HW_SHA1_STATE2_SZ, 8);
1531                 break;
1532         case ICP_QAT_HW_AUTH_ALGO_SHA224:
1533                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA224, authkey,
1534                         authkeylen, cdesc->cd_cur_ptr, &state1_size,
1535                         cdesc->aes_cmac)) {
1536                         QAT_LOG(ERR, "(SHA)precompute failed");
1537                         return -EFAULT;
1538                 }
1539                 state2_size = ICP_QAT_HW_SHA224_STATE2_SZ;
1540                 break;
1541         case ICP_QAT_HW_AUTH_ALGO_SHA256:
1542                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA256, authkey,
1543                         authkeylen, cdesc->cd_cur_ptr,  &state1_size,
1544                         cdesc->aes_cmac)) {
1545                         QAT_LOG(ERR, "(SHA)precompute failed");
1546                         return -EFAULT;
1547                 }
1548                 state2_size = ICP_QAT_HW_SHA256_STATE2_SZ;
1549                 break;
1550         case ICP_QAT_HW_AUTH_ALGO_SHA384:
1551                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA384, authkey,
1552                         authkeylen, cdesc->cd_cur_ptr, &state1_size,
1553                         cdesc->aes_cmac)) {
1554                         QAT_LOG(ERR, "(SHA)precompute failed");
1555                         return -EFAULT;
1556                 }
1557                 state2_size = ICP_QAT_HW_SHA384_STATE2_SZ;
1558                 break;
1559         case ICP_QAT_HW_AUTH_ALGO_SHA512:
1560                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA512, authkey,
1561                         authkeylen, cdesc->cd_cur_ptr,  &state1_size,
1562                         cdesc->aes_cmac)) {
1563                         QAT_LOG(ERR, "(SHA)precompute failed");
1564                         return -EFAULT;
1565                 }
1566                 state2_size = ICP_QAT_HW_SHA512_STATE2_SZ;
1567                 break;
1568         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1569                 state1_size = ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1570
1571                 if (cdesc->aes_cmac)
1572                         memset(cdesc->cd_cur_ptr, 0, state1_size);
1573                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC,
1574                         authkey, authkeylen, cdesc->cd_cur_ptr + state1_size,
1575                         &state2_size, cdesc->aes_cmac)) {
1576                         cdesc->aes_cmac ? QAT_LOG(ERR,
1577                                                   "(CMAC)precompute failed")
1578                                         : QAT_LOG(ERR,
1579                                                   "(XCBC)precompute failed");
1580                         return -EFAULT;
1581                 }
1582                 break;
1583         case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1584         case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
1585                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1586                 state1_size = ICP_QAT_HW_GALOIS_128_STATE1_SZ;
1587                 if (qat_sym_do_precomputes(cdesc->qat_hash_alg, authkey,
1588                         authkeylen, cdesc->cd_cur_ptr + state1_size,
1589                         &state2_size, cdesc->aes_cmac)) {
1590                         QAT_LOG(ERR, "(GCM)precompute failed");
1591                         return -EFAULT;
1592                 }
1593                 /*
1594                  * Write (the length of AAD) into bytes 16-19 of state2
1595                  * in big-endian format. This field is 8 bytes
1596                  */
1597                 auth_param->u2.aad_sz =
1598                                 RTE_ALIGN_CEIL(aad_length, 16);
1599                 auth_param->hash_state_sz = (auth_param->u2.aad_sz) >> 3;
1600
1601                 aad_len = (uint32_t *)(cdesc->cd_cur_ptr +
1602                                         ICP_QAT_HW_GALOIS_128_STATE1_SZ +
1603                                         ICP_QAT_HW_GALOIS_H_SZ);
1604                 *aad_len = rte_bswap32(aad_length);
1605                 cdesc->aad_len = aad_length;
1606                 break;
1607         case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
1608                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1609                 state1_size = qat_hash_get_state1_size(
1610                                 ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2);
1611                 state2_size = ICP_QAT_HW_SNOW_3G_UIA2_STATE2_SZ;
1612                 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1613
1614                 cipherconfig = (struct icp_qat_hw_cipher_algo_blk *)
1615                                 (cdesc->cd_cur_ptr + state1_size + state2_size);
1616                 cipherconfig->cipher_config.val =
1617                 ICP_QAT_HW_CIPHER_CONFIG_BUILD(ICP_QAT_HW_CIPHER_ECB_MODE,
1618                         ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2,
1619                         ICP_QAT_HW_CIPHER_KEY_CONVERT,
1620                         ICP_QAT_HW_CIPHER_ENCRYPT);
1621                 memcpy(cipherconfig->key, authkey, authkeylen);
1622                 memset(cipherconfig->key + authkeylen,
1623                                 0, ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ);
1624                 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1625                                 authkeylen + ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1626                 auth_param->hash_state_sz = ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1627                 break;
1628         case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
1629                 hash->auth_config.config =
1630                         ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE0,
1631                                 cdesc->qat_hash_alg, digestsize);
1632                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1633                 state1_size = qat_hash_get_state1_size(
1634                                 ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3);
1635                 state2_size = ICP_QAT_HW_ZUC_3G_EIA3_STATE2_SZ;
1636                 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size
1637                         + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ);
1638
1639                 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1640                 cdesc->cd_cur_ptr += state1_size + state2_size
1641                         + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1642                 auth_param->hash_state_sz = ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1643                 cdesc->min_qat_dev_gen = QAT_GEN2;
1644
1645                 break;
1646         case ICP_QAT_HW_AUTH_ALGO_MD5:
1647                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_MD5, authkey,
1648                         authkeylen, cdesc->cd_cur_ptr, &state1_size,
1649                         cdesc->aes_cmac)) {
1650                         QAT_LOG(ERR, "(MD5)precompute failed");
1651                         return -EFAULT;
1652                 }
1653                 state2_size = ICP_QAT_HW_MD5_STATE2_SZ;
1654                 break;
1655         case ICP_QAT_HW_AUTH_ALGO_NULL:
1656                 state1_size = qat_hash_get_state1_size(
1657                                 ICP_QAT_HW_AUTH_ALGO_NULL);
1658                 state2_size = ICP_QAT_HW_NULL_STATE2_SZ;
1659                 break;
1660         case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
1661                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1662                 state1_size = qat_hash_get_state1_size(
1663                                 ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC);
1664                 state2_size = ICP_QAT_HW_AES_CBC_MAC_KEY_SZ +
1665                                 ICP_QAT_HW_AES_CCM_CBC_E_CTR0_SZ;
1666
1667                 if (aad_length > 0) {
1668                         aad_length += ICP_QAT_HW_CCM_AAD_B0_LEN +
1669                         ICP_QAT_HW_CCM_AAD_LEN_INFO;
1670                         auth_param->u2.aad_sz =
1671                         RTE_ALIGN_CEIL(aad_length,
1672                         ICP_QAT_HW_CCM_AAD_ALIGNMENT);
1673                 } else {
1674                         auth_param->u2.aad_sz = ICP_QAT_HW_CCM_AAD_B0_LEN;
1675                 }
1676                 cdesc->aad_len = aad_length;
1677                 hash->auth_counter.counter = 0;
1678
1679                 hash_cd_ctrl->outer_prefix_sz = digestsize;
1680                 auth_param->hash_state_sz = digestsize;
1681
1682                 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1683                 break;
1684         case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
1685                 state1_size = qat_hash_get_state1_size(
1686                                 ICP_QAT_HW_AUTH_ALGO_KASUMI_F9);
1687                 state2_size = ICP_QAT_HW_KASUMI_F9_STATE2_SZ;
1688                 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1689                 pTempKey = (uint32_t *)(cdesc->cd_cur_ptr + state1_size
1690                                                         + authkeylen);
1691                 /*
1692                 * The Inner Hash Initial State2 block must contain IK
1693                 * (Initialisation Key), followed by IK XOR-ed with KM
1694                 * (Key Modifier): IK||(IK^KM).
1695                 */
1696                 /* write the auth key */
1697                 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1698                 /* initialise temp key with auth key */
1699                 memcpy(pTempKey, authkey, authkeylen);
1700                 /* XOR Key with KASUMI F9 key modifier at 4 bytes level */
1701                 for (wordIndex = 0; wordIndex < (authkeylen >> 2); wordIndex++)
1702                         pTempKey[wordIndex] ^= KASUMI_F9_KEY_MODIFIER_4_BYTES;
1703                 break;
1704         default:
1705                 QAT_LOG(ERR, "Invalid HASH alg %u", cdesc->qat_hash_alg);
1706                 return -EFAULT;
1707         }
1708
1709         /* Request template setup */
1710         qat_sym_session_init_common_hdr(header, qat_proto_flag);
1711         header->service_cmd_id = cdesc->qat_cmd;
1712
1713         /* Auth CD config setup */
1714         hash_cd_ctrl->hash_cfg_offset = hash_offset >> 3;
1715         hash_cd_ctrl->hash_flags = ICP_QAT_FW_AUTH_HDR_FLAG_NO_NESTED;
1716         hash_cd_ctrl->inner_res_sz = digestsize;
1717         hash_cd_ctrl->final_sz = digestsize;
1718         hash_cd_ctrl->inner_state1_sz = state1_size;
1719         auth_param->auth_res_sz = digestsize;
1720
1721         hash_cd_ctrl->inner_state2_sz  = state2_size;
1722         hash_cd_ctrl->inner_state2_offset = hash_cd_ctrl->hash_cfg_offset +
1723                         ((sizeof(struct icp_qat_hw_auth_setup) +
1724                          RTE_ALIGN_CEIL(hash_cd_ctrl->inner_state1_sz, 8))
1725                                         >> 3);
1726
1727         cdesc->cd_cur_ptr += state1_size + state2_size;
1728         cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1729
1730         cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1731         cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1732
1733         return 0;
1734 }
1735
1736 int qat_sym_validate_aes_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1737 {
1738         switch (key_len) {
1739         case ICP_QAT_HW_AES_128_KEY_SZ:
1740                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1741                 break;
1742         case ICP_QAT_HW_AES_192_KEY_SZ:
1743                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES192;
1744                 break;
1745         case ICP_QAT_HW_AES_256_KEY_SZ:
1746                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
1747                 break;
1748         default:
1749                 return -EINVAL;
1750         }
1751         return 0;
1752 }
1753
1754 int qat_sym_validate_aes_docsisbpi_key(int key_len,
1755                 enum icp_qat_hw_cipher_algo *alg)
1756 {
1757         switch (key_len) {
1758         case ICP_QAT_HW_AES_128_KEY_SZ:
1759                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1760                 break;
1761         default:
1762                 return -EINVAL;
1763         }
1764         return 0;
1765 }
1766
1767 int qat_sym_validate_snow3g_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1768 {
1769         switch (key_len) {
1770         case ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ:
1771                 *alg = ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2;
1772                 break;
1773         default:
1774                 return -EINVAL;
1775         }
1776         return 0;
1777 }
1778
1779 int qat_sym_validate_kasumi_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1780 {
1781         switch (key_len) {
1782         case ICP_QAT_HW_KASUMI_KEY_SZ:
1783                 *alg = ICP_QAT_HW_CIPHER_ALGO_KASUMI;
1784                 break;
1785         default:
1786                 return -EINVAL;
1787         }
1788         return 0;
1789 }
1790
1791 int qat_sym_validate_des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1792 {
1793         switch (key_len) {
1794         case ICP_QAT_HW_DES_KEY_SZ:
1795                 *alg = ICP_QAT_HW_CIPHER_ALGO_DES;
1796                 break;
1797         default:
1798                 return -EINVAL;
1799         }
1800         return 0;
1801 }
1802
1803 int qat_sym_validate_3des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1804 {
1805         switch (key_len) {
1806         case QAT_3DES_KEY_SZ_OPT1:
1807         case QAT_3DES_KEY_SZ_OPT2:
1808         case QAT_3DES_KEY_SZ_OPT3:
1809                 *alg = ICP_QAT_HW_CIPHER_ALGO_3DES;
1810                 break;
1811         default:
1812                 return -EINVAL;
1813         }
1814         return 0;
1815 }
1816
1817 int qat_sym_validate_zuc_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1818 {
1819         switch (key_len) {
1820         case ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ:
1821                 *alg = ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3;
1822                 break;
1823         default:
1824                 return -EINVAL;
1825         }
1826         return 0;
1827 }