malloc: fix documentation of realloc function
[dpdk.git] / drivers / crypto / qat / qat_sym_session.c
1 /* SPDX-License-Identifier: (BSD-3-Clause OR GPL-2.0)
2  * Copyright(c) 2015-2019 Intel Corporation
3  */
4
5 #include <openssl/sha.h>        /* Needed to calculate pre-compute values */
6 #include <openssl/aes.h>        /* Needed to calculate pre-compute values */
7 #include <openssl/md5.h>        /* Needed to calculate pre-compute values */
8 #include <openssl/evp.h>        /* Needed for bpi runt block processing */
9
10 #include <rte_memcpy.h>
11 #include <rte_common.h>
12 #include <rte_spinlock.h>
13 #include <rte_byteorder.h>
14 #include <rte_log.h>
15 #include <rte_malloc.h>
16 #include <rte_crypto_sym.h>
17
18 #include "qat_logs.h"
19 #include "qat_sym_session.h"
20 #include "qat_sym_pmd.h"
21
22 /** Frees a context previously created
23  *  Depends on openssl libcrypto
24  */
25 static void
26 bpi_cipher_ctx_free(void *bpi_ctx)
27 {
28         if (bpi_ctx != NULL)
29                 EVP_CIPHER_CTX_free((EVP_CIPHER_CTX *)bpi_ctx);
30 }
31
32 /** Creates a context in either AES or DES in ECB mode
33  *  Depends on openssl libcrypto
34  */
35 static int
36 bpi_cipher_ctx_init(enum rte_crypto_cipher_algorithm cryptodev_algo,
37                 enum rte_crypto_cipher_operation direction __rte_unused,
38                 uint8_t *key, void **ctx)
39 {
40         const EVP_CIPHER *algo = NULL;
41         int ret;
42         *ctx = EVP_CIPHER_CTX_new();
43
44         if (*ctx == NULL) {
45                 ret = -ENOMEM;
46                 goto ctx_init_err;
47         }
48
49         if (cryptodev_algo == RTE_CRYPTO_CIPHER_DES_DOCSISBPI)
50                 algo = EVP_des_ecb();
51         else
52                 algo = EVP_aes_128_ecb();
53
54         /* IV will be ECB encrypted whether direction is encrypt or decrypt*/
55         if (EVP_EncryptInit_ex(*ctx, algo, NULL, key, 0) != 1) {
56                 ret = -EINVAL;
57                 goto ctx_init_err;
58         }
59
60         return 0;
61
62 ctx_init_err:
63         if (*ctx != NULL)
64                 EVP_CIPHER_CTX_free(*ctx);
65         return ret;
66 }
67
68 static int
69 qat_is_cipher_alg_supported(enum rte_crypto_cipher_algorithm algo,
70                 struct qat_sym_dev_private *internals)
71 {
72         int i = 0;
73         const struct rte_cryptodev_capabilities *capability;
74
75         while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
76                         RTE_CRYPTO_OP_TYPE_UNDEFINED) {
77                 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
78                         continue;
79
80                 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_CIPHER)
81                         continue;
82
83                 if (capability->sym.cipher.algo == algo)
84                         return 1;
85         }
86         return 0;
87 }
88
89 static int
90 qat_is_auth_alg_supported(enum rte_crypto_auth_algorithm algo,
91                 struct qat_sym_dev_private *internals)
92 {
93         int i = 0;
94         const struct rte_cryptodev_capabilities *capability;
95
96         while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
97                         RTE_CRYPTO_OP_TYPE_UNDEFINED) {
98                 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
99                         continue;
100
101                 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_AUTH)
102                         continue;
103
104                 if (capability->sym.auth.algo == algo)
105                         return 1;
106         }
107         return 0;
108 }
109
110 void
111 qat_sym_session_clear(struct rte_cryptodev *dev,
112                 struct rte_cryptodev_sym_session *sess)
113 {
114         uint8_t index = dev->driver_id;
115         void *sess_priv = get_sym_session_private_data(sess, index);
116         struct qat_sym_session *s = (struct qat_sym_session *)sess_priv;
117
118         if (sess_priv) {
119                 if (s->bpi_ctx)
120                         bpi_cipher_ctx_free(s->bpi_ctx);
121                 memset(s, 0, qat_sym_session_get_private_size(dev));
122                 struct rte_mempool *sess_mp = rte_mempool_from_obj(sess_priv);
123
124                 set_sym_session_private_data(sess, index, NULL);
125                 rte_mempool_put(sess_mp, sess_priv);
126         }
127 }
128
129 static int
130 qat_get_cmd_id(const struct rte_crypto_sym_xform *xform)
131 {
132         /* Cipher Only */
133         if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER && xform->next == NULL)
134                 return ICP_QAT_FW_LA_CMD_CIPHER;
135
136         /* Authentication Only */
137         if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH && xform->next == NULL)
138                 return ICP_QAT_FW_LA_CMD_AUTH;
139
140         /* AEAD */
141         if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
142                 /* AES-GCM and AES-CCM works with different direction
143                  * GCM first encrypts and generate hash where AES-CCM
144                  * first generate hash and encrypts. Similar relation
145                  * applies to decryption.
146                  */
147                 if (xform->aead.op == RTE_CRYPTO_AEAD_OP_ENCRYPT)
148                         if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
149                                 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
150                         else
151                                 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
152                 else
153                         if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
154                                 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
155                         else
156                                 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
157         }
158
159         if (xform->next == NULL)
160                 return -1;
161
162         /* Cipher then Authenticate */
163         if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
164                         xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
165                 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
166
167         /* Authenticate then Cipher */
168         if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH &&
169                         xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
170                 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
171
172         return -1;
173 }
174
175 static struct rte_crypto_auth_xform *
176 qat_get_auth_xform(struct rte_crypto_sym_xform *xform)
177 {
178         do {
179                 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH)
180                         return &xform->auth;
181
182                 xform = xform->next;
183         } while (xform);
184
185         return NULL;
186 }
187
188 static struct rte_crypto_cipher_xform *
189 qat_get_cipher_xform(struct rte_crypto_sym_xform *xform)
190 {
191         do {
192                 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
193                         return &xform->cipher;
194
195                 xform = xform->next;
196         } while (xform);
197
198         return NULL;
199 }
200
201 int
202 qat_sym_session_configure_cipher(struct rte_cryptodev *dev,
203                 struct rte_crypto_sym_xform *xform,
204                 struct qat_sym_session *session)
205 {
206         struct qat_sym_dev_private *internals = dev->data->dev_private;
207         struct rte_crypto_cipher_xform *cipher_xform = NULL;
208         int ret;
209
210         /* Get cipher xform from crypto xform chain */
211         cipher_xform = qat_get_cipher_xform(xform);
212
213         session->cipher_iv.offset = cipher_xform->iv.offset;
214         session->cipher_iv.length = cipher_xform->iv.length;
215
216         switch (cipher_xform->algo) {
217         case RTE_CRYPTO_CIPHER_AES_CBC:
218                 if (qat_sym_validate_aes_key(cipher_xform->key.length,
219                                 &session->qat_cipher_alg) != 0) {
220                         QAT_LOG(ERR, "Invalid AES cipher key size");
221                         ret = -EINVAL;
222                         goto error_out;
223                 }
224                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
225                 break;
226         case RTE_CRYPTO_CIPHER_AES_CTR:
227                 if (qat_sym_validate_aes_key(cipher_xform->key.length,
228                                 &session->qat_cipher_alg) != 0) {
229                         QAT_LOG(ERR, "Invalid AES cipher key size");
230                         ret = -EINVAL;
231                         goto error_out;
232                 }
233                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
234                 break;
235         case RTE_CRYPTO_CIPHER_SNOW3G_UEA2:
236                 if (qat_sym_validate_snow3g_key(cipher_xform->key.length,
237                                         &session->qat_cipher_alg) != 0) {
238                         QAT_LOG(ERR, "Invalid SNOW 3G cipher key size");
239                         ret = -EINVAL;
240                         goto error_out;
241                 }
242                 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
243                 break;
244         case RTE_CRYPTO_CIPHER_NULL:
245                 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
246                 break;
247         case RTE_CRYPTO_CIPHER_KASUMI_F8:
248                 if (qat_sym_validate_kasumi_key(cipher_xform->key.length,
249                                         &session->qat_cipher_alg) != 0) {
250                         QAT_LOG(ERR, "Invalid KASUMI cipher key size");
251                         ret = -EINVAL;
252                         goto error_out;
253                 }
254                 session->qat_mode = ICP_QAT_HW_CIPHER_F8_MODE;
255                 break;
256         case RTE_CRYPTO_CIPHER_3DES_CBC:
257                 if (qat_sym_validate_3des_key(cipher_xform->key.length,
258                                 &session->qat_cipher_alg) != 0) {
259                         QAT_LOG(ERR, "Invalid 3DES cipher key size");
260                         ret = -EINVAL;
261                         goto error_out;
262                 }
263                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
264                 break;
265         case RTE_CRYPTO_CIPHER_DES_CBC:
266                 if (qat_sym_validate_des_key(cipher_xform->key.length,
267                                 &session->qat_cipher_alg) != 0) {
268                         QAT_LOG(ERR, "Invalid DES cipher key size");
269                         ret = -EINVAL;
270                         goto error_out;
271                 }
272                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
273                 break;
274         case RTE_CRYPTO_CIPHER_3DES_CTR:
275                 if (qat_sym_validate_3des_key(cipher_xform->key.length,
276                                 &session->qat_cipher_alg) != 0) {
277                         QAT_LOG(ERR, "Invalid 3DES cipher key size");
278                         ret = -EINVAL;
279                         goto error_out;
280                 }
281                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
282                 break;
283         case RTE_CRYPTO_CIPHER_DES_DOCSISBPI:
284                 ret = bpi_cipher_ctx_init(
285                                         cipher_xform->algo,
286                                         cipher_xform->op,
287                                         cipher_xform->key.data,
288                                         &session->bpi_ctx);
289                 if (ret != 0) {
290                         QAT_LOG(ERR, "failed to create DES BPI ctx");
291                         goto error_out;
292                 }
293                 if (qat_sym_validate_des_key(cipher_xform->key.length,
294                                 &session->qat_cipher_alg) != 0) {
295                         QAT_LOG(ERR, "Invalid DES cipher key size");
296                         ret = -EINVAL;
297                         goto error_out;
298                 }
299                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
300                 break;
301         case RTE_CRYPTO_CIPHER_AES_DOCSISBPI:
302                 ret = bpi_cipher_ctx_init(
303                                         cipher_xform->algo,
304                                         cipher_xform->op,
305                                         cipher_xform->key.data,
306                                         &session->bpi_ctx);
307                 if (ret != 0) {
308                         QAT_LOG(ERR, "failed to create AES BPI ctx");
309                         goto error_out;
310                 }
311                 if (qat_sym_validate_aes_docsisbpi_key(cipher_xform->key.length,
312                                 &session->qat_cipher_alg) != 0) {
313                         QAT_LOG(ERR, "Invalid AES DOCSISBPI key size");
314                         ret = -EINVAL;
315                         goto error_out;
316                 }
317                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
318                 break;
319         case RTE_CRYPTO_CIPHER_ZUC_EEA3:
320                 if (!qat_is_cipher_alg_supported(
321                         cipher_xform->algo, internals)) {
322                         QAT_LOG(ERR, "%s not supported on this device",
323                                 rte_crypto_cipher_algorithm_strings
324                                         [cipher_xform->algo]);
325                         ret = -ENOTSUP;
326                         goto error_out;
327                 }
328                 if (qat_sym_validate_zuc_key(cipher_xform->key.length,
329                                 &session->qat_cipher_alg) != 0) {
330                         QAT_LOG(ERR, "Invalid ZUC cipher key size");
331                         ret = -EINVAL;
332                         goto error_out;
333                 }
334                 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
335                 break;
336         case RTE_CRYPTO_CIPHER_AES_XTS:
337                 if ((cipher_xform->key.length/2) == ICP_QAT_HW_AES_192_KEY_SZ) {
338                         QAT_LOG(ERR, "AES-XTS-192 not supported");
339                         ret = -EINVAL;
340                         goto error_out;
341                 }
342                 if (qat_sym_validate_aes_key((cipher_xform->key.length/2),
343                                 &session->qat_cipher_alg) != 0) {
344                         QAT_LOG(ERR, "Invalid AES-XTS cipher key size");
345                         ret = -EINVAL;
346                         goto error_out;
347                 }
348                 session->qat_mode = ICP_QAT_HW_CIPHER_XTS_MODE;
349                 break;
350         case RTE_CRYPTO_CIPHER_3DES_ECB:
351         case RTE_CRYPTO_CIPHER_AES_ECB:
352         case RTE_CRYPTO_CIPHER_AES_F8:
353         case RTE_CRYPTO_CIPHER_ARC4:
354                 QAT_LOG(ERR, "Crypto QAT PMD: Unsupported Cipher alg %u",
355                                 cipher_xform->algo);
356                 ret = -ENOTSUP;
357                 goto error_out;
358         default:
359                 QAT_LOG(ERR, "Crypto: Undefined Cipher specified %u\n",
360                                 cipher_xform->algo);
361                 ret = -EINVAL;
362                 goto error_out;
363         }
364
365         if (cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
366                 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
367         else
368                 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
369
370         if (qat_sym_session_aead_create_cd_cipher(session,
371                                                 cipher_xform->key.data,
372                                                 cipher_xform->key.length)) {
373                 ret = -EINVAL;
374                 goto error_out;
375         }
376
377         return 0;
378
379 error_out:
380         if (session->bpi_ctx) {
381                 bpi_cipher_ctx_free(session->bpi_ctx);
382                 session->bpi_ctx = NULL;
383         }
384         return ret;
385 }
386
387 int
388 qat_sym_session_configure(struct rte_cryptodev *dev,
389                 struct rte_crypto_sym_xform *xform,
390                 struct rte_cryptodev_sym_session *sess,
391                 struct rte_mempool *mempool)
392 {
393         void *sess_private_data;
394         int ret;
395
396         if (rte_mempool_get(mempool, &sess_private_data)) {
397                 CDEV_LOG_ERR(
398                         "Couldn't get object from session mempool");
399                 return -ENOMEM;
400         }
401
402         ret = qat_sym_session_set_parameters(dev, xform, sess_private_data);
403         if (ret != 0) {
404                 QAT_LOG(ERR,
405                     "Crypto QAT PMD: failed to configure session parameters");
406
407                 /* Return session to mempool */
408                 rte_mempool_put(mempool, sess_private_data);
409                 return ret;
410         }
411
412         set_sym_session_private_data(sess, dev->driver_id,
413                 sess_private_data);
414
415         return 0;
416 }
417
418 int
419 qat_sym_session_set_parameters(struct rte_cryptodev *dev,
420                 struct rte_crypto_sym_xform *xform, void *session_private)
421 {
422         struct qat_sym_session *session = session_private;
423         int ret;
424         int qat_cmd_id;
425
426         /* Set context descriptor physical address */
427         session->cd_paddr = rte_mempool_virt2iova(session) +
428                         offsetof(struct qat_sym_session, cd);
429
430         session->min_qat_dev_gen = QAT_GEN1;
431
432         /* Get requested QAT command id */
433         qat_cmd_id = qat_get_cmd_id(xform);
434         if (qat_cmd_id < 0 || qat_cmd_id >= ICP_QAT_FW_LA_CMD_DELIMITER) {
435                 QAT_LOG(ERR, "Unsupported xform chain requested");
436                 return -ENOTSUP;
437         }
438         session->qat_cmd = (enum icp_qat_fw_la_cmd_id)qat_cmd_id;
439         switch (session->qat_cmd) {
440         case ICP_QAT_FW_LA_CMD_CIPHER:
441                 ret = qat_sym_session_configure_cipher(dev, xform, session);
442                 if (ret < 0)
443                         return ret;
444                 break;
445         case ICP_QAT_FW_LA_CMD_AUTH:
446                 ret = qat_sym_session_configure_auth(dev, xform, session);
447                 if (ret < 0)
448                         return ret;
449                 break;
450         case ICP_QAT_FW_LA_CMD_CIPHER_HASH:
451                 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
452                         ret = qat_sym_session_configure_aead(xform,
453                                         session);
454                         if (ret < 0)
455                                 return ret;
456                 } else {
457                         ret = qat_sym_session_configure_cipher(dev,
458                                         xform, session);
459                         if (ret < 0)
460                                 return ret;
461                         ret = qat_sym_session_configure_auth(dev,
462                                         xform, session);
463                         if (ret < 0)
464                                 return ret;
465                 }
466                 break;
467         case ICP_QAT_FW_LA_CMD_HASH_CIPHER:
468                 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
469                         ret = qat_sym_session_configure_aead(xform,
470                                         session);
471                         if (ret < 0)
472                                 return ret;
473                 } else {
474                         ret = qat_sym_session_configure_auth(dev,
475                                         xform, session);
476                         if (ret < 0)
477                                 return ret;
478                         ret = qat_sym_session_configure_cipher(dev,
479                                         xform, session);
480                         if (ret < 0)
481                                 return ret;
482                 }
483                 break;
484         case ICP_QAT_FW_LA_CMD_TRNG_GET_RANDOM:
485         case ICP_QAT_FW_LA_CMD_TRNG_TEST:
486         case ICP_QAT_FW_LA_CMD_SSL3_KEY_DERIVE:
487         case ICP_QAT_FW_LA_CMD_TLS_V1_1_KEY_DERIVE:
488         case ICP_QAT_FW_LA_CMD_TLS_V1_2_KEY_DERIVE:
489         case ICP_QAT_FW_LA_CMD_MGF1:
490         case ICP_QAT_FW_LA_CMD_AUTH_PRE_COMP:
491         case ICP_QAT_FW_LA_CMD_CIPHER_PRE_COMP:
492         case ICP_QAT_FW_LA_CMD_DELIMITER:
493         QAT_LOG(ERR, "Unsupported Service %u",
494                 session->qat_cmd);
495                 return -ENOTSUP;
496         default:
497         QAT_LOG(ERR, "Unsupported Service %u",
498                 session->qat_cmd);
499                 return -ENOTSUP;
500         }
501
502         return 0;
503 }
504
505 int
506 qat_sym_session_configure_auth(struct rte_cryptodev *dev,
507                                 struct rte_crypto_sym_xform *xform,
508                                 struct qat_sym_session *session)
509 {
510         struct rte_crypto_auth_xform *auth_xform = qat_get_auth_xform(xform);
511         struct qat_sym_dev_private *internals = dev->data->dev_private;
512         uint8_t *key_data = auth_xform->key.data;
513         uint8_t key_length = auth_xform->key.length;
514         session->aes_cmac = 0;
515
516         switch (auth_xform->algo) {
517         case RTE_CRYPTO_AUTH_SHA1_HMAC:
518                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA1;
519                 break;
520         case RTE_CRYPTO_AUTH_SHA224_HMAC:
521                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA224;
522                 break;
523         case RTE_CRYPTO_AUTH_SHA256_HMAC:
524                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA256;
525                 break;
526         case RTE_CRYPTO_AUTH_SHA384_HMAC:
527                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA384;
528                 break;
529         case RTE_CRYPTO_AUTH_SHA512_HMAC:
530                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA512;
531                 break;
532         case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
533                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
534                 break;
535         case RTE_CRYPTO_AUTH_AES_CMAC:
536                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
537                 session->aes_cmac = 1;
538                 break;
539         case RTE_CRYPTO_AUTH_AES_GMAC:
540                 if (qat_sym_validate_aes_key(auth_xform->key.length,
541                                 &session->qat_cipher_alg) != 0) {
542                         QAT_LOG(ERR, "Invalid AES key size");
543                         return -EINVAL;
544                 }
545                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
546                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
547
548                 break;
549         case RTE_CRYPTO_AUTH_SNOW3G_UIA2:
550                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2;
551                 break;
552         case RTE_CRYPTO_AUTH_MD5_HMAC:
553                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_MD5;
554                 break;
555         case RTE_CRYPTO_AUTH_NULL:
556                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_NULL;
557                 break;
558         case RTE_CRYPTO_AUTH_KASUMI_F9:
559                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_KASUMI_F9;
560                 break;
561         case RTE_CRYPTO_AUTH_ZUC_EIA3:
562                 if (!qat_is_auth_alg_supported(auth_xform->algo, internals)) {
563                         QAT_LOG(ERR, "%s not supported on this device",
564                                 rte_crypto_auth_algorithm_strings
565                                 [auth_xform->algo]);
566                         return -ENOTSUP;
567                 }
568                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3;
569                 break;
570         case RTE_CRYPTO_AUTH_SHA1:
571         case RTE_CRYPTO_AUTH_SHA256:
572         case RTE_CRYPTO_AUTH_SHA512:
573         case RTE_CRYPTO_AUTH_SHA224:
574         case RTE_CRYPTO_AUTH_SHA384:
575         case RTE_CRYPTO_AUTH_MD5:
576         case RTE_CRYPTO_AUTH_AES_CBC_MAC:
577                 QAT_LOG(ERR, "Crypto: Unsupported hash alg %u",
578                                 auth_xform->algo);
579                 return -ENOTSUP;
580         default:
581                 QAT_LOG(ERR, "Crypto: Undefined Hash algo %u specified",
582                                 auth_xform->algo);
583                 return -EINVAL;
584         }
585
586         session->auth_iv.offset = auth_xform->iv.offset;
587         session->auth_iv.length = auth_xform->iv.length;
588
589         if (auth_xform->algo == RTE_CRYPTO_AUTH_AES_GMAC) {
590                 if (auth_xform->op == RTE_CRYPTO_AUTH_OP_GENERATE) {
591                         session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER_HASH;
592                         session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
593                         /*
594                          * It needs to create cipher desc content first,
595                          * then authentication
596                          */
597
598                         if (qat_sym_session_aead_create_cd_cipher(session,
599                                                 auth_xform->key.data,
600                                                 auth_xform->key.length))
601                                 return -EINVAL;
602
603                         if (qat_sym_session_aead_create_cd_auth(session,
604                                                 key_data,
605                                                 key_length,
606                                                 0,
607                                                 auth_xform->digest_length,
608                                                 auth_xform->op))
609                                 return -EINVAL;
610                 } else {
611                         session->qat_cmd = ICP_QAT_FW_LA_CMD_HASH_CIPHER;
612                         session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
613                         /*
614                          * It needs to create authentication desc content first,
615                          * then cipher
616                          */
617
618                         if (qat_sym_session_aead_create_cd_auth(session,
619                                         key_data,
620                                         key_length,
621                                         0,
622                                         auth_xform->digest_length,
623                                         auth_xform->op))
624                                 return -EINVAL;
625
626                         if (qat_sym_session_aead_create_cd_cipher(session,
627                                                 auth_xform->key.data,
628                                                 auth_xform->key.length))
629                                 return -EINVAL;
630                 }
631                 /* Restore to authentication only only */
632                 session->qat_cmd = ICP_QAT_FW_LA_CMD_AUTH;
633         } else {
634                 if (qat_sym_session_aead_create_cd_auth(session,
635                                 key_data,
636                                 key_length,
637                                 0,
638                                 auth_xform->digest_length,
639                                 auth_xform->op))
640                         return -EINVAL;
641         }
642
643         session->digest_length = auth_xform->digest_length;
644         return 0;
645 }
646
647 int
648 qat_sym_session_configure_aead(struct rte_crypto_sym_xform *xform,
649                                 struct qat_sym_session *session)
650 {
651         struct rte_crypto_aead_xform *aead_xform = &xform->aead;
652         enum rte_crypto_auth_operation crypto_operation;
653
654         /*
655          * Store AEAD IV parameters as cipher IV,
656          * to avoid unnecessary memory usage
657          */
658         session->cipher_iv.offset = xform->aead.iv.offset;
659         session->cipher_iv.length = xform->aead.iv.length;
660
661         switch (aead_xform->algo) {
662         case RTE_CRYPTO_AEAD_AES_GCM:
663                 if (qat_sym_validate_aes_key(aead_xform->key.length,
664                                 &session->qat_cipher_alg) != 0) {
665                         QAT_LOG(ERR, "Invalid AES key size");
666                         return -EINVAL;
667                 }
668                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
669                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
670                 break;
671         case RTE_CRYPTO_AEAD_AES_CCM:
672                 if (qat_sym_validate_aes_key(aead_xform->key.length,
673                                 &session->qat_cipher_alg) != 0) {
674                         QAT_LOG(ERR, "Invalid AES key size");
675                         return -EINVAL;
676                 }
677                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
678                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC;
679                 break;
680         default:
681                 QAT_LOG(ERR, "Crypto: Undefined AEAD specified %u\n",
682                                 aead_xform->algo);
683                 return -EINVAL;
684         }
685
686         if ((aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT &&
687                         aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) ||
688                         (aead_xform->op == RTE_CRYPTO_AEAD_OP_DECRYPT &&
689                         aead_xform->algo == RTE_CRYPTO_AEAD_AES_CCM)) {
690                 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
691                 /*
692                  * It needs to create cipher desc content first,
693                  * then authentication
694                  */
695                 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
696                         RTE_CRYPTO_AUTH_OP_GENERATE : RTE_CRYPTO_AUTH_OP_VERIFY;
697
698                 if (qat_sym_session_aead_create_cd_cipher(session,
699                                         aead_xform->key.data,
700                                         aead_xform->key.length))
701                         return -EINVAL;
702
703                 if (qat_sym_session_aead_create_cd_auth(session,
704                                         aead_xform->key.data,
705                                         aead_xform->key.length,
706                                         aead_xform->aad_length,
707                                         aead_xform->digest_length,
708                                         crypto_operation))
709                         return -EINVAL;
710         } else {
711                 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
712                 /*
713                  * It needs to create authentication desc content first,
714                  * then cipher
715                  */
716
717                 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
718                         RTE_CRYPTO_AUTH_OP_VERIFY : RTE_CRYPTO_AUTH_OP_GENERATE;
719
720                 if (qat_sym_session_aead_create_cd_auth(session,
721                                         aead_xform->key.data,
722                                         aead_xform->key.length,
723                                         aead_xform->aad_length,
724                                         aead_xform->digest_length,
725                                         crypto_operation))
726                         return -EINVAL;
727
728                 if (qat_sym_session_aead_create_cd_cipher(session,
729                                         aead_xform->key.data,
730                                         aead_xform->key.length))
731                         return -EINVAL;
732         }
733
734         session->digest_length = aead_xform->digest_length;
735         return 0;
736 }
737
738 unsigned int qat_sym_session_get_private_size(
739                 struct rte_cryptodev *dev __rte_unused)
740 {
741         return RTE_ALIGN_CEIL(sizeof(struct qat_sym_session), 8);
742 }
743
744 /* returns block size in bytes per cipher algo */
745 int qat_cipher_get_block_size(enum icp_qat_hw_cipher_algo qat_cipher_alg)
746 {
747         switch (qat_cipher_alg) {
748         case ICP_QAT_HW_CIPHER_ALGO_DES:
749                 return ICP_QAT_HW_DES_BLK_SZ;
750         case ICP_QAT_HW_CIPHER_ALGO_3DES:
751                 return ICP_QAT_HW_3DES_BLK_SZ;
752         case ICP_QAT_HW_CIPHER_ALGO_AES128:
753         case ICP_QAT_HW_CIPHER_ALGO_AES192:
754         case ICP_QAT_HW_CIPHER_ALGO_AES256:
755                 return ICP_QAT_HW_AES_BLK_SZ;
756         default:
757                 QAT_LOG(ERR, "invalid block cipher alg %u", qat_cipher_alg);
758                 return -EFAULT;
759         };
760         return -EFAULT;
761 }
762
763 /*
764  * Returns size in bytes per hash algo for state1 size field in cd_ctrl
765  * This is digest size rounded up to nearest quadword
766  */
767 static int qat_hash_get_state1_size(enum icp_qat_hw_auth_algo qat_hash_alg)
768 {
769         switch (qat_hash_alg) {
770         case ICP_QAT_HW_AUTH_ALGO_SHA1:
771                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA1_STATE1_SZ,
772                                                 QAT_HW_DEFAULT_ALIGNMENT);
773         case ICP_QAT_HW_AUTH_ALGO_SHA224:
774                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA224_STATE1_SZ,
775                                                 QAT_HW_DEFAULT_ALIGNMENT);
776         case ICP_QAT_HW_AUTH_ALGO_SHA256:
777                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA256_STATE1_SZ,
778                                                 QAT_HW_DEFAULT_ALIGNMENT);
779         case ICP_QAT_HW_AUTH_ALGO_SHA384:
780                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA384_STATE1_SZ,
781                                                 QAT_HW_DEFAULT_ALIGNMENT);
782         case ICP_QAT_HW_AUTH_ALGO_SHA512:
783                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
784                                                 QAT_HW_DEFAULT_ALIGNMENT);
785         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
786                 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ,
787                                                 QAT_HW_DEFAULT_ALIGNMENT);
788         case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
789         case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
790                 return QAT_HW_ROUND_UP(ICP_QAT_HW_GALOIS_128_STATE1_SZ,
791                                                 QAT_HW_DEFAULT_ALIGNMENT);
792         case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
793                 return QAT_HW_ROUND_UP(ICP_QAT_HW_ZUC_3G_EIA3_STATE1_SZ,
794                                                 QAT_HW_DEFAULT_ALIGNMENT);
795         case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
796                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SNOW_3G_UIA2_STATE1_SZ,
797                                                 QAT_HW_DEFAULT_ALIGNMENT);
798         case ICP_QAT_HW_AUTH_ALGO_MD5:
799                 return QAT_HW_ROUND_UP(ICP_QAT_HW_MD5_STATE1_SZ,
800                                                 QAT_HW_DEFAULT_ALIGNMENT);
801         case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
802                 return QAT_HW_ROUND_UP(ICP_QAT_HW_KASUMI_F9_STATE1_SZ,
803                                                 QAT_HW_DEFAULT_ALIGNMENT);
804         case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
805                 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_CBC_MAC_STATE1_SZ,
806                                                 QAT_HW_DEFAULT_ALIGNMENT);
807         case ICP_QAT_HW_AUTH_ALGO_NULL:
808                 return QAT_HW_ROUND_UP(ICP_QAT_HW_NULL_STATE1_SZ,
809                                                 QAT_HW_DEFAULT_ALIGNMENT);
810         case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
811                 /* return maximum state1 size in this case */
812                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
813                                                 QAT_HW_DEFAULT_ALIGNMENT);
814         default:
815                 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
816                 return -EFAULT;
817         };
818         return -EFAULT;
819 }
820
821 /* returns digest size in bytes  per hash algo */
822 static int qat_hash_get_digest_size(enum icp_qat_hw_auth_algo qat_hash_alg)
823 {
824         switch (qat_hash_alg) {
825         case ICP_QAT_HW_AUTH_ALGO_SHA1:
826                 return ICP_QAT_HW_SHA1_STATE1_SZ;
827         case ICP_QAT_HW_AUTH_ALGO_SHA224:
828                 return ICP_QAT_HW_SHA224_STATE1_SZ;
829         case ICP_QAT_HW_AUTH_ALGO_SHA256:
830                 return ICP_QAT_HW_SHA256_STATE1_SZ;
831         case ICP_QAT_HW_AUTH_ALGO_SHA384:
832                 return ICP_QAT_HW_SHA384_STATE1_SZ;
833         case ICP_QAT_HW_AUTH_ALGO_SHA512:
834                 return ICP_QAT_HW_SHA512_STATE1_SZ;
835         case ICP_QAT_HW_AUTH_ALGO_MD5:
836                 return ICP_QAT_HW_MD5_STATE1_SZ;
837         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
838                 return ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
839         case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
840                 /* return maximum digest size in this case */
841                 return ICP_QAT_HW_SHA512_STATE1_SZ;
842         default:
843                 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
844                 return -EFAULT;
845         };
846         return -EFAULT;
847 }
848
849 /* returns block size in byes per hash algo */
850 static int qat_hash_get_block_size(enum icp_qat_hw_auth_algo qat_hash_alg)
851 {
852         switch (qat_hash_alg) {
853         case ICP_QAT_HW_AUTH_ALGO_SHA1:
854                 return SHA_CBLOCK;
855         case ICP_QAT_HW_AUTH_ALGO_SHA224:
856                 return SHA256_CBLOCK;
857         case ICP_QAT_HW_AUTH_ALGO_SHA256:
858                 return SHA256_CBLOCK;
859         case ICP_QAT_HW_AUTH_ALGO_SHA384:
860                 return SHA512_CBLOCK;
861         case ICP_QAT_HW_AUTH_ALGO_SHA512:
862                 return SHA512_CBLOCK;
863         case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
864                 return 16;
865         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
866                 return ICP_QAT_HW_AES_BLK_SZ;
867         case ICP_QAT_HW_AUTH_ALGO_MD5:
868                 return MD5_CBLOCK;
869         case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
870                 /* return maximum block size in this case */
871                 return SHA512_CBLOCK;
872         default:
873                 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
874                 return -EFAULT;
875         };
876         return -EFAULT;
877 }
878
879 static int partial_hash_sha1(uint8_t *data_in, uint8_t *data_out)
880 {
881         SHA_CTX ctx;
882
883         if (!SHA1_Init(&ctx))
884                 return -EFAULT;
885         SHA1_Transform(&ctx, data_in);
886         rte_memcpy(data_out, &ctx, SHA_DIGEST_LENGTH);
887         return 0;
888 }
889
890 static int partial_hash_sha224(uint8_t *data_in, uint8_t *data_out)
891 {
892         SHA256_CTX ctx;
893
894         if (!SHA224_Init(&ctx))
895                 return -EFAULT;
896         SHA256_Transform(&ctx, data_in);
897         rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
898         return 0;
899 }
900
901 static int partial_hash_sha256(uint8_t *data_in, uint8_t *data_out)
902 {
903         SHA256_CTX ctx;
904
905         if (!SHA256_Init(&ctx))
906                 return -EFAULT;
907         SHA256_Transform(&ctx, data_in);
908         rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
909         return 0;
910 }
911
912 static int partial_hash_sha384(uint8_t *data_in, uint8_t *data_out)
913 {
914         SHA512_CTX ctx;
915
916         if (!SHA384_Init(&ctx))
917                 return -EFAULT;
918         SHA512_Transform(&ctx, data_in);
919         rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
920         return 0;
921 }
922
923 static int partial_hash_sha512(uint8_t *data_in, uint8_t *data_out)
924 {
925         SHA512_CTX ctx;
926
927         if (!SHA512_Init(&ctx))
928                 return -EFAULT;
929         SHA512_Transform(&ctx, data_in);
930         rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
931         return 0;
932 }
933
934 static int partial_hash_md5(uint8_t *data_in, uint8_t *data_out)
935 {
936         MD5_CTX ctx;
937
938         if (!MD5_Init(&ctx))
939                 return -EFAULT;
940         MD5_Transform(&ctx, data_in);
941         rte_memcpy(data_out, &ctx, MD5_DIGEST_LENGTH);
942
943         return 0;
944 }
945
946 static int partial_hash_compute(enum icp_qat_hw_auth_algo hash_alg,
947                         uint8_t *data_in,
948                         uint8_t *data_out)
949 {
950         int digest_size;
951         uint8_t digest[qat_hash_get_digest_size(
952                         ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
953         uint32_t *hash_state_out_be32;
954         uint64_t *hash_state_out_be64;
955         int i;
956
957         digest_size = qat_hash_get_digest_size(hash_alg);
958         if (digest_size <= 0)
959                 return -EFAULT;
960
961         hash_state_out_be32 = (uint32_t *)data_out;
962         hash_state_out_be64 = (uint64_t *)data_out;
963
964         switch (hash_alg) {
965         case ICP_QAT_HW_AUTH_ALGO_SHA1:
966                 if (partial_hash_sha1(data_in, digest))
967                         return -EFAULT;
968                 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
969                         *hash_state_out_be32 =
970                                 rte_bswap32(*(((uint32_t *)digest)+i));
971                 break;
972         case ICP_QAT_HW_AUTH_ALGO_SHA224:
973                 if (partial_hash_sha224(data_in, digest))
974                         return -EFAULT;
975                 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
976                         *hash_state_out_be32 =
977                                 rte_bswap32(*(((uint32_t *)digest)+i));
978                 break;
979         case ICP_QAT_HW_AUTH_ALGO_SHA256:
980                 if (partial_hash_sha256(data_in, digest))
981                         return -EFAULT;
982                 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
983                         *hash_state_out_be32 =
984                                 rte_bswap32(*(((uint32_t *)digest)+i));
985                 break;
986         case ICP_QAT_HW_AUTH_ALGO_SHA384:
987                 if (partial_hash_sha384(data_in, digest))
988                         return -EFAULT;
989                 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
990                         *hash_state_out_be64 =
991                                 rte_bswap64(*(((uint64_t *)digest)+i));
992                 break;
993         case ICP_QAT_HW_AUTH_ALGO_SHA512:
994                 if (partial_hash_sha512(data_in, digest))
995                         return -EFAULT;
996                 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
997                         *hash_state_out_be64 =
998                                 rte_bswap64(*(((uint64_t *)digest)+i));
999                 break;
1000         case ICP_QAT_HW_AUTH_ALGO_MD5:
1001                 if (partial_hash_md5(data_in, data_out))
1002                         return -EFAULT;
1003                 break;
1004         default:
1005                 QAT_LOG(ERR, "invalid hash alg %u", hash_alg);
1006                 return -EFAULT;
1007         }
1008
1009         return 0;
1010 }
1011 #define HMAC_IPAD_VALUE 0x36
1012 #define HMAC_OPAD_VALUE 0x5c
1013 #define HASH_XCBC_PRECOMP_KEY_NUM 3
1014
1015 static const uint8_t AES_CMAC_SEED[ICP_QAT_HW_AES_128_KEY_SZ];
1016
1017 static void aes_cmac_key_derive(uint8_t *base, uint8_t *derived)
1018 {
1019         int i;
1020
1021         derived[0] = base[0] << 1;
1022         for (i = 1; i < ICP_QAT_HW_AES_BLK_SZ ; i++) {
1023                 derived[i] = base[i] << 1;
1024                 derived[i - 1] |= base[i] >> 7;
1025         }
1026
1027         if (base[0] & 0x80)
1028                 derived[ICP_QAT_HW_AES_BLK_SZ - 1] ^= QAT_AES_CMAC_CONST_RB;
1029 }
1030
1031 static int qat_sym_do_precomputes(enum icp_qat_hw_auth_algo hash_alg,
1032                                 const uint8_t *auth_key,
1033                                 uint16_t auth_keylen,
1034                                 uint8_t *p_state_buf,
1035                                 uint16_t *p_state_len,
1036                                 uint8_t aes_cmac)
1037 {
1038         int block_size;
1039         uint8_t ipad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1040         uint8_t opad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1041         int i;
1042
1043         if (hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC) {
1044
1045                 /* CMAC */
1046                 if (aes_cmac) {
1047                         AES_KEY enc_key;
1048                         uint8_t *in = NULL;
1049                         uint8_t k0[ICP_QAT_HW_AES_128_KEY_SZ];
1050                         uint8_t *k1, *k2;
1051
1052                         auth_keylen = ICP_QAT_HW_AES_128_KEY_SZ;
1053
1054                         in = rte_zmalloc("AES CMAC K1",
1055                                          ICP_QAT_HW_AES_128_KEY_SZ, 16);
1056
1057                         if (in == NULL) {
1058                                 QAT_LOG(ERR, "Failed to alloc memory");
1059                                 return -ENOMEM;
1060                         }
1061
1062                         rte_memcpy(in, AES_CMAC_SEED,
1063                                    ICP_QAT_HW_AES_128_KEY_SZ);
1064                         rte_memcpy(p_state_buf, auth_key, auth_keylen);
1065
1066                         if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1067                                 &enc_key) != 0) {
1068                                 rte_free(in);
1069                                 return -EFAULT;
1070                         }
1071
1072                         AES_encrypt(in, k0, &enc_key);
1073
1074                         k1 = p_state_buf + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1075                         k2 = k1 + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1076
1077                         aes_cmac_key_derive(k0, k1);
1078                         aes_cmac_key_derive(k1, k2);
1079
1080                         memset(k0, 0, ICP_QAT_HW_AES_128_KEY_SZ);
1081                         *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1082                         rte_free(in);
1083                         return 0;
1084                 } else {
1085                         static uint8_t qat_aes_xcbc_key_seed[
1086                                         ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ] = {
1087                                 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1088                                 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1089                                 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1090                                 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1091                                 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1092                                 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1093                         };
1094
1095                         uint8_t *in = NULL;
1096                         uint8_t *out = p_state_buf;
1097                         int x;
1098                         AES_KEY enc_key;
1099
1100                         in = rte_zmalloc("working mem for key",
1101                                         ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ, 16);
1102                         if (in == NULL) {
1103                                 QAT_LOG(ERR, "Failed to alloc memory");
1104                                 return -ENOMEM;
1105                         }
1106
1107                         rte_memcpy(in, qat_aes_xcbc_key_seed,
1108                                         ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1109                         for (x = 0; x < HASH_XCBC_PRECOMP_KEY_NUM; x++) {
1110                                 if (AES_set_encrypt_key(auth_key,
1111                                                         auth_keylen << 3,
1112                                                         &enc_key) != 0) {
1113                                         rte_free(in -
1114                                           (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ));
1115                                         memset(out -
1116                                            (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ),
1117                                           0, ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1118                                         return -EFAULT;
1119                                 }
1120                                 AES_encrypt(in, out, &enc_key);
1121                                 in += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1122                                 out += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1123                         }
1124                         *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1125                         rte_free(in - x*ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ);
1126                         return 0;
1127                 }
1128
1129         } else if ((hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_128) ||
1130                 (hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_64)) {
1131                 uint8_t *in = NULL;
1132                 uint8_t *out = p_state_buf;
1133                 AES_KEY enc_key;
1134
1135                 memset(p_state_buf, 0, ICP_QAT_HW_GALOIS_H_SZ +
1136                                 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1137                                 ICP_QAT_HW_GALOIS_E_CTR0_SZ);
1138                 in = rte_zmalloc("working mem for key",
1139                                 ICP_QAT_HW_GALOIS_H_SZ, 16);
1140                 if (in == NULL) {
1141                         QAT_LOG(ERR, "Failed to alloc memory");
1142                         return -ENOMEM;
1143                 }
1144
1145                 memset(in, 0, ICP_QAT_HW_GALOIS_H_SZ);
1146                 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1147                         &enc_key) != 0) {
1148                         return -EFAULT;
1149                 }
1150                 AES_encrypt(in, out, &enc_key);
1151                 *p_state_len = ICP_QAT_HW_GALOIS_H_SZ +
1152                                 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1153                                 ICP_QAT_HW_GALOIS_E_CTR0_SZ;
1154                 rte_free(in);
1155                 return 0;
1156         }
1157
1158         block_size = qat_hash_get_block_size(hash_alg);
1159         if (block_size < 0)
1160                 return block_size;
1161         /* init ipad and opad from key and xor with fixed values */
1162         memset(ipad, 0, block_size);
1163         memset(opad, 0, block_size);
1164
1165         if (auth_keylen > (unsigned int)block_size) {
1166                 QAT_LOG(ERR, "invalid keylen %u", auth_keylen);
1167                 return -EFAULT;
1168         }
1169         rte_memcpy(ipad, auth_key, auth_keylen);
1170         rte_memcpy(opad, auth_key, auth_keylen);
1171
1172         for (i = 0; i < block_size; i++) {
1173                 uint8_t *ipad_ptr = ipad + i;
1174                 uint8_t *opad_ptr = opad + i;
1175                 *ipad_ptr ^= HMAC_IPAD_VALUE;
1176                 *opad_ptr ^= HMAC_OPAD_VALUE;
1177         }
1178
1179         /* do partial hash of ipad and copy to state1 */
1180         if (partial_hash_compute(hash_alg, ipad, p_state_buf)) {
1181                 memset(ipad, 0, block_size);
1182                 memset(opad, 0, block_size);
1183                 QAT_LOG(ERR, "ipad precompute failed");
1184                 return -EFAULT;
1185         }
1186
1187         /*
1188          * State len is a multiple of 8, so may be larger than the digest.
1189          * Put the partial hash of opad state_len bytes after state1
1190          */
1191         *p_state_len = qat_hash_get_state1_size(hash_alg);
1192         if (partial_hash_compute(hash_alg, opad, p_state_buf + *p_state_len)) {
1193                 memset(ipad, 0, block_size);
1194                 memset(opad, 0, block_size);
1195                 QAT_LOG(ERR, "opad precompute failed");
1196                 return -EFAULT;
1197         }
1198
1199         /*  don't leave data lying around */
1200         memset(ipad, 0, block_size);
1201         memset(opad, 0, block_size);
1202         return 0;
1203 }
1204
1205 static void
1206 qat_sym_session_init_common_hdr(struct icp_qat_fw_comn_req_hdr *header,
1207                 enum qat_sym_proto_flag proto_flags)
1208 {
1209         header->hdr_flags =
1210                 ICP_QAT_FW_COMN_HDR_FLAGS_BUILD(ICP_QAT_FW_COMN_REQ_FLAG_SET);
1211         header->service_type = ICP_QAT_FW_COMN_REQ_CPM_FW_LA;
1212         header->comn_req_flags =
1213                 ICP_QAT_FW_COMN_FLAGS_BUILD(QAT_COMN_CD_FLD_TYPE_64BIT_ADR,
1214                                         QAT_COMN_PTR_TYPE_FLAT);
1215         ICP_QAT_FW_LA_PARTIAL_SET(header->serv_specif_flags,
1216                                   ICP_QAT_FW_LA_PARTIAL_NONE);
1217         ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(header->serv_specif_flags,
1218                                            ICP_QAT_FW_CIPH_IV_16BYTE_DATA);
1219
1220         switch (proto_flags)            {
1221         case QAT_CRYPTO_PROTO_FLAG_NONE:
1222                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1223                                         ICP_QAT_FW_LA_NO_PROTO);
1224                 break;
1225         case QAT_CRYPTO_PROTO_FLAG_CCM:
1226                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1227                                         ICP_QAT_FW_LA_CCM_PROTO);
1228                 break;
1229         case QAT_CRYPTO_PROTO_FLAG_GCM:
1230                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1231                                         ICP_QAT_FW_LA_GCM_PROTO);
1232                 break;
1233         case QAT_CRYPTO_PROTO_FLAG_SNOW3G:
1234                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1235                                         ICP_QAT_FW_LA_SNOW_3G_PROTO);
1236                 break;
1237         case QAT_CRYPTO_PROTO_FLAG_ZUC:
1238                 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(header->serv_specif_flags,
1239                         ICP_QAT_FW_LA_ZUC_3G_PROTO);
1240                 break;
1241         }
1242
1243         ICP_QAT_FW_LA_UPDATE_STATE_SET(header->serv_specif_flags,
1244                                            ICP_QAT_FW_LA_NO_UPDATE_STATE);
1245         ICP_QAT_FW_LA_DIGEST_IN_BUFFER_SET(header->serv_specif_flags,
1246                                         ICP_QAT_FW_LA_NO_DIGEST_IN_BUFFER);
1247 }
1248
1249 /*
1250  *      Snow3G and ZUC should never use this function
1251  *      and set its protocol flag in both cipher and auth part of content
1252  *      descriptor building function
1253  */
1254 static enum qat_sym_proto_flag
1255 qat_get_crypto_proto_flag(uint16_t flags)
1256 {
1257         int proto = ICP_QAT_FW_LA_PROTO_GET(flags);
1258         enum qat_sym_proto_flag qat_proto_flag =
1259                         QAT_CRYPTO_PROTO_FLAG_NONE;
1260
1261         switch (proto) {
1262         case ICP_QAT_FW_LA_GCM_PROTO:
1263                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1264                 break;
1265         case ICP_QAT_FW_LA_CCM_PROTO:
1266                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1267                 break;
1268         }
1269
1270         return qat_proto_flag;
1271 }
1272
1273 int qat_sym_session_aead_create_cd_cipher(struct qat_sym_session *cdesc,
1274                                                 uint8_t *cipherkey,
1275                                                 uint32_t cipherkeylen)
1276 {
1277         struct icp_qat_hw_cipher_algo_blk *cipher;
1278         struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1279         struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1280         struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1281         void *ptr = &req_tmpl->cd_ctrl;
1282         struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1283         struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1284         enum icp_qat_hw_cipher_convert key_convert;
1285         enum qat_sym_proto_flag qat_proto_flag =
1286                 QAT_CRYPTO_PROTO_FLAG_NONE;
1287         uint32_t total_key_size;
1288         uint16_t cipher_offset, cd_size;
1289         uint32_t wordIndex  = 0;
1290         uint32_t *temp_key = NULL;
1291
1292         if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER) {
1293                 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1294                 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1295                                         ICP_QAT_FW_SLICE_CIPHER);
1296                 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1297                                         ICP_QAT_FW_SLICE_DRAM_WR);
1298                 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1299                                         ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1300                 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1301                                         ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1302                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1303         } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1304                 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1305                 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1306                                         ICP_QAT_FW_SLICE_CIPHER);
1307                 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1308                                         ICP_QAT_FW_SLICE_AUTH);
1309                 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1310                                         ICP_QAT_FW_SLICE_AUTH);
1311                 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1312                                         ICP_QAT_FW_SLICE_DRAM_WR);
1313                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1314         } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1315                 QAT_LOG(ERR, "Invalid param, must be a cipher command.");
1316                 return -EFAULT;
1317         }
1318
1319         if (cdesc->qat_mode == ICP_QAT_HW_CIPHER_CTR_MODE) {
1320                 /*
1321                  * CTR Streaming ciphers are a special case. Decrypt = encrypt
1322                  * Overriding default values previously set
1323                  */
1324                 cdesc->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
1325                 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1326         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2
1327                 || cdesc->qat_cipher_alg ==
1328                         ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)
1329                 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1330         else if (cdesc->qat_dir == ICP_QAT_HW_CIPHER_ENCRYPT)
1331                 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1332         else
1333                 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1334
1335         if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
1336                 total_key_size = ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ +
1337                         ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1338                 cipher_cd_ctrl->cipher_state_sz =
1339                         ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1340                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1341
1342         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1343                 total_key_size = ICP_QAT_HW_KASUMI_F8_KEY_SZ;
1344                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_KASUMI_BLK_SZ >> 3;
1345                 cipher_cd_ctrl->cipher_padding_sz =
1346                                         (2 * ICP_QAT_HW_KASUMI_BLK_SZ) >> 3;
1347         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES) {
1348                 total_key_size = ICP_QAT_HW_3DES_KEY_SZ;
1349                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_3DES_BLK_SZ >> 3;
1350                 qat_proto_flag =
1351                         qat_get_crypto_proto_flag(header->serv_specif_flags);
1352         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_DES) {
1353                 total_key_size = ICP_QAT_HW_DES_KEY_SZ;
1354                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_DES_BLK_SZ >> 3;
1355                 qat_proto_flag =
1356                         qat_get_crypto_proto_flag(header->serv_specif_flags);
1357         } else if (cdesc->qat_cipher_alg ==
1358                 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
1359                 total_key_size = ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ +
1360                         ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1361                 cipher_cd_ctrl->cipher_state_sz =
1362                         ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1363                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1364                 cdesc->min_qat_dev_gen = QAT_GEN2;
1365         } else {
1366                 total_key_size = cipherkeylen;
1367                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_AES_BLK_SZ >> 3;
1368                 qat_proto_flag =
1369                         qat_get_crypto_proto_flag(header->serv_specif_flags);
1370         }
1371         cipher_cd_ctrl->cipher_key_sz = total_key_size >> 3;
1372         cipher_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1373         cipher_cd_ctrl->cipher_cfg_offset = cipher_offset >> 3;
1374
1375         header->service_cmd_id = cdesc->qat_cmd;
1376         qat_sym_session_init_common_hdr(header, qat_proto_flag);
1377
1378         cipher = (struct icp_qat_hw_cipher_algo_blk *)cdesc->cd_cur_ptr;
1379         cipher->cipher_config.val =
1380             ICP_QAT_HW_CIPHER_CONFIG_BUILD(cdesc->qat_mode,
1381                                         cdesc->qat_cipher_alg, key_convert,
1382                                         cdesc->qat_dir);
1383
1384         if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1385                 temp_key = (uint32_t *)(cdesc->cd_cur_ptr +
1386                                         sizeof(struct icp_qat_hw_cipher_config)
1387                                         + cipherkeylen);
1388                 memcpy(cipher->key, cipherkey, cipherkeylen);
1389                 memcpy(temp_key, cipherkey, cipherkeylen);
1390
1391                 /* XOR Key with KASUMI F8 key modifier at 4 bytes level */
1392                 for (wordIndex = 0; wordIndex < (cipherkeylen >> 2);
1393                                                                 wordIndex++)
1394                         temp_key[wordIndex] ^= KASUMI_F8_KEY_MODIFIER_4_BYTES;
1395
1396                 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1397                                         cipherkeylen + cipherkeylen;
1398         } else {
1399                 memcpy(cipher->key, cipherkey, cipherkeylen);
1400                 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1401                                         cipherkeylen;
1402         }
1403
1404         if (total_key_size > cipherkeylen) {
1405                 uint32_t padding_size =  total_key_size-cipherkeylen;
1406                 if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1407                         && (cipherkeylen == QAT_3DES_KEY_SZ_OPT2)) {
1408                         /* K3 not provided so use K1 = K3*/
1409                         memcpy(cdesc->cd_cur_ptr, cipherkey, padding_size);
1410                 } else if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1411                         && (cipherkeylen == QAT_3DES_KEY_SZ_OPT3)) {
1412                         /* K2 and K3 not provided so use K1 = K2 = K3*/
1413                         memcpy(cdesc->cd_cur_ptr, cipherkey,
1414                                 cipherkeylen);
1415                         memcpy(cdesc->cd_cur_ptr+cipherkeylen,
1416                                 cipherkey, cipherkeylen);
1417                 } else
1418                         memset(cdesc->cd_cur_ptr, 0, padding_size);
1419
1420                 cdesc->cd_cur_ptr += padding_size;
1421         }
1422         cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1423         cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1424
1425         return 0;
1426 }
1427
1428 int qat_sym_session_aead_create_cd_auth(struct qat_sym_session *cdesc,
1429                                                 uint8_t *authkey,
1430                                                 uint32_t authkeylen,
1431                                                 uint32_t aad_length,
1432                                                 uint32_t digestsize,
1433                                                 unsigned int operation)
1434 {
1435         struct icp_qat_hw_auth_setup *hash;
1436         struct icp_qat_hw_cipher_algo_blk *cipherconfig;
1437         struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1438         struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1439         struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1440         void *ptr = &req_tmpl->cd_ctrl;
1441         struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1442         struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1443         struct icp_qat_fw_la_auth_req_params *auth_param =
1444                 (struct icp_qat_fw_la_auth_req_params *)
1445                 ((char *)&req_tmpl->serv_specif_rqpars +
1446                 sizeof(struct icp_qat_fw_la_cipher_req_params));
1447         uint16_t state1_size = 0, state2_size = 0;
1448         uint16_t hash_offset, cd_size;
1449         uint32_t *aad_len = NULL;
1450         uint32_t wordIndex  = 0;
1451         uint32_t *pTempKey;
1452         enum qat_sym_proto_flag qat_proto_flag =
1453                 QAT_CRYPTO_PROTO_FLAG_NONE;
1454
1455         if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_AUTH) {
1456                 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1457                                         ICP_QAT_FW_SLICE_AUTH);
1458                 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1459                                         ICP_QAT_FW_SLICE_DRAM_WR);
1460                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1461         } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1462                 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1463                                 ICP_QAT_FW_SLICE_AUTH);
1464                 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1465                                 ICP_QAT_FW_SLICE_CIPHER);
1466                 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1467                                 ICP_QAT_FW_SLICE_CIPHER);
1468                 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1469                                 ICP_QAT_FW_SLICE_DRAM_WR);
1470                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1471         } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1472                 QAT_LOG(ERR, "Invalid param, must be a hash command.");
1473                 return -EFAULT;
1474         }
1475
1476         if (operation == RTE_CRYPTO_AUTH_OP_VERIFY) {
1477                 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1478                                 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1479                 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1480                                 ICP_QAT_FW_LA_CMP_AUTH_RES);
1481                 cdesc->auth_op = ICP_QAT_HW_AUTH_VERIFY;
1482         } else {
1483                 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1484                                            ICP_QAT_FW_LA_RET_AUTH_RES);
1485                 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1486                                            ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1487                 cdesc->auth_op = ICP_QAT_HW_AUTH_GENERATE;
1488         }
1489
1490         /*
1491          * Setup the inner hash config
1492          */
1493         hash_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1494         hash = (struct icp_qat_hw_auth_setup *)cdesc->cd_cur_ptr;
1495         hash->auth_config.reserved = 0;
1496         hash->auth_config.config =
1497                         ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE1,
1498                                 cdesc->qat_hash_alg, digestsize);
1499
1500         if (cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2
1501                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_KASUMI_F9
1502                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3
1503                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC
1504                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC
1505                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_NULL
1506                         )
1507                 hash->auth_counter.counter = 0;
1508         else {
1509                 int block_size = qat_hash_get_block_size(cdesc->qat_hash_alg);
1510
1511                 if (block_size < 0)
1512                         return block_size;
1513                 hash->auth_counter.counter = rte_bswap32(block_size);
1514         }
1515
1516         cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_auth_setup);
1517
1518         /*
1519          * cd_cur_ptr now points at the state1 information.
1520          */
1521         switch (cdesc->qat_hash_alg) {
1522         case ICP_QAT_HW_AUTH_ALGO_SHA1:
1523                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA1, authkey,
1524                         authkeylen, cdesc->cd_cur_ptr, &state1_size,
1525                         cdesc->aes_cmac)) {
1526                         QAT_LOG(ERR, "(SHA)precompute failed");
1527                         return -EFAULT;
1528                 }
1529                 state2_size = RTE_ALIGN_CEIL(ICP_QAT_HW_SHA1_STATE2_SZ, 8);
1530                 break;
1531         case ICP_QAT_HW_AUTH_ALGO_SHA224:
1532                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA224, authkey,
1533                         authkeylen, cdesc->cd_cur_ptr, &state1_size,
1534                         cdesc->aes_cmac)) {
1535                         QAT_LOG(ERR, "(SHA)precompute failed");
1536                         return -EFAULT;
1537                 }
1538                 state2_size = ICP_QAT_HW_SHA224_STATE2_SZ;
1539                 break;
1540         case ICP_QAT_HW_AUTH_ALGO_SHA256:
1541                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA256, authkey,
1542                         authkeylen, cdesc->cd_cur_ptr,  &state1_size,
1543                         cdesc->aes_cmac)) {
1544                         QAT_LOG(ERR, "(SHA)precompute failed");
1545                         return -EFAULT;
1546                 }
1547                 state2_size = ICP_QAT_HW_SHA256_STATE2_SZ;
1548                 break;
1549         case ICP_QAT_HW_AUTH_ALGO_SHA384:
1550                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA384, authkey,
1551                         authkeylen, cdesc->cd_cur_ptr, &state1_size,
1552                         cdesc->aes_cmac)) {
1553                         QAT_LOG(ERR, "(SHA)precompute failed");
1554                         return -EFAULT;
1555                 }
1556                 state2_size = ICP_QAT_HW_SHA384_STATE2_SZ;
1557                 break;
1558         case ICP_QAT_HW_AUTH_ALGO_SHA512:
1559                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA512, authkey,
1560                         authkeylen, cdesc->cd_cur_ptr,  &state1_size,
1561                         cdesc->aes_cmac)) {
1562                         QAT_LOG(ERR, "(SHA)precompute failed");
1563                         return -EFAULT;
1564                 }
1565                 state2_size = ICP_QAT_HW_SHA512_STATE2_SZ;
1566                 break;
1567         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1568                 state1_size = ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1569
1570                 if (cdesc->aes_cmac)
1571                         memset(cdesc->cd_cur_ptr, 0, state1_size);
1572                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC,
1573                         authkey, authkeylen, cdesc->cd_cur_ptr + state1_size,
1574                         &state2_size, cdesc->aes_cmac)) {
1575                         cdesc->aes_cmac ? QAT_LOG(ERR,
1576                                                   "(CMAC)precompute failed")
1577                                         : QAT_LOG(ERR,
1578                                                   "(XCBC)precompute failed");
1579                         return -EFAULT;
1580                 }
1581                 break;
1582         case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1583         case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
1584                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1585                 state1_size = ICP_QAT_HW_GALOIS_128_STATE1_SZ;
1586                 if (qat_sym_do_precomputes(cdesc->qat_hash_alg, authkey,
1587                         authkeylen, cdesc->cd_cur_ptr + state1_size,
1588                         &state2_size, cdesc->aes_cmac)) {
1589                         QAT_LOG(ERR, "(GCM)precompute failed");
1590                         return -EFAULT;
1591                 }
1592                 /*
1593                  * Write (the length of AAD) into bytes 16-19 of state2
1594                  * in big-endian format. This field is 8 bytes
1595                  */
1596                 auth_param->u2.aad_sz =
1597                                 RTE_ALIGN_CEIL(aad_length, 16);
1598                 auth_param->hash_state_sz = (auth_param->u2.aad_sz) >> 3;
1599
1600                 aad_len = (uint32_t *)(cdesc->cd_cur_ptr +
1601                                         ICP_QAT_HW_GALOIS_128_STATE1_SZ +
1602                                         ICP_QAT_HW_GALOIS_H_SZ);
1603                 *aad_len = rte_bswap32(aad_length);
1604                 cdesc->aad_len = aad_length;
1605                 break;
1606         case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
1607                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1608                 state1_size = qat_hash_get_state1_size(
1609                                 ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2);
1610                 state2_size = ICP_QAT_HW_SNOW_3G_UIA2_STATE2_SZ;
1611                 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1612
1613                 cipherconfig = (struct icp_qat_hw_cipher_algo_blk *)
1614                                 (cdesc->cd_cur_ptr + state1_size + state2_size);
1615                 cipherconfig->cipher_config.val =
1616                 ICP_QAT_HW_CIPHER_CONFIG_BUILD(ICP_QAT_HW_CIPHER_ECB_MODE,
1617                         ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2,
1618                         ICP_QAT_HW_CIPHER_KEY_CONVERT,
1619                         ICP_QAT_HW_CIPHER_ENCRYPT);
1620                 memcpy(cipherconfig->key, authkey, authkeylen);
1621                 memset(cipherconfig->key + authkeylen,
1622                                 0, ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ);
1623                 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1624                                 authkeylen + ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1625                 auth_param->hash_state_sz = ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1626                 break;
1627         case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
1628                 hash->auth_config.config =
1629                         ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE0,
1630                                 cdesc->qat_hash_alg, digestsize);
1631                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1632                 state1_size = qat_hash_get_state1_size(
1633                                 ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3);
1634                 state2_size = ICP_QAT_HW_ZUC_3G_EIA3_STATE2_SZ;
1635                 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size
1636                         + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ);
1637
1638                 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1639                 cdesc->cd_cur_ptr += state1_size + state2_size
1640                         + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1641                 auth_param->hash_state_sz = ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1642                 cdesc->min_qat_dev_gen = QAT_GEN2;
1643
1644                 break;
1645         case ICP_QAT_HW_AUTH_ALGO_MD5:
1646                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_MD5, authkey,
1647                         authkeylen, cdesc->cd_cur_ptr, &state1_size,
1648                         cdesc->aes_cmac)) {
1649                         QAT_LOG(ERR, "(MD5)precompute failed");
1650                         return -EFAULT;
1651                 }
1652                 state2_size = ICP_QAT_HW_MD5_STATE2_SZ;
1653                 break;
1654         case ICP_QAT_HW_AUTH_ALGO_NULL:
1655                 state1_size = qat_hash_get_state1_size(
1656                                 ICP_QAT_HW_AUTH_ALGO_NULL);
1657                 state2_size = ICP_QAT_HW_NULL_STATE2_SZ;
1658                 break;
1659         case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
1660                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1661                 state1_size = qat_hash_get_state1_size(
1662                                 ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC);
1663                 state2_size = ICP_QAT_HW_AES_CBC_MAC_KEY_SZ +
1664                                 ICP_QAT_HW_AES_CCM_CBC_E_CTR0_SZ;
1665
1666                 if (aad_length > 0) {
1667                         aad_length += ICP_QAT_HW_CCM_AAD_B0_LEN +
1668                         ICP_QAT_HW_CCM_AAD_LEN_INFO;
1669                         auth_param->u2.aad_sz =
1670                         RTE_ALIGN_CEIL(aad_length,
1671                         ICP_QAT_HW_CCM_AAD_ALIGNMENT);
1672                 } else {
1673                         auth_param->u2.aad_sz = ICP_QAT_HW_CCM_AAD_B0_LEN;
1674                 }
1675                 cdesc->aad_len = aad_length;
1676                 hash->auth_counter.counter = 0;
1677
1678                 hash_cd_ctrl->outer_prefix_sz = digestsize;
1679                 auth_param->hash_state_sz = digestsize;
1680
1681                 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1682                 break;
1683         case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
1684                 state1_size = qat_hash_get_state1_size(
1685                                 ICP_QAT_HW_AUTH_ALGO_KASUMI_F9);
1686                 state2_size = ICP_QAT_HW_KASUMI_F9_STATE2_SZ;
1687                 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1688                 pTempKey = (uint32_t *)(cdesc->cd_cur_ptr + state1_size
1689                                                         + authkeylen);
1690                 /*
1691                 * The Inner Hash Initial State2 block must contain IK
1692                 * (Initialisation Key), followed by IK XOR-ed with KM
1693                 * (Key Modifier): IK||(IK^KM).
1694                 */
1695                 /* write the auth key */
1696                 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1697                 /* initialise temp key with auth key */
1698                 memcpy(pTempKey, authkey, authkeylen);
1699                 /* XOR Key with KASUMI F9 key modifier at 4 bytes level */
1700                 for (wordIndex = 0; wordIndex < (authkeylen >> 2); wordIndex++)
1701                         pTempKey[wordIndex] ^= KASUMI_F9_KEY_MODIFIER_4_BYTES;
1702                 break;
1703         default:
1704                 QAT_LOG(ERR, "Invalid HASH alg %u", cdesc->qat_hash_alg);
1705                 return -EFAULT;
1706         }
1707
1708         /* Request template setup */
1709         qat_sym_session_init_common_hdr(header, qat_proto_flag);
1710         header->service_cmd_id = cdesc->qat_cmd;
1711
1712         /* Auth CD config setup */
1713         hash_cd_ctrl->hash_cfg_offset = hash_offset >> 3;
1714         hash_cd_ctrl->hash_flags = ICP_QAT_FW_AUTH_HDR_FLAG_NO_NESTED;
1715         hash_cd_ctrl->inner_res_sz = digestsize;
1716         hash_cd_ctrl->final_sz = digestsize;
1717         hash_cd_ctrl->inner_state1_sz = state1_size;
1718         auth_param->auth_res_sz = digestsize;
1719
1720         hash_cd_ctrl->inner_state2_sz  = state2_size;
1721         hash_cd_ctrl->inner_state2_offset = hash_cd_ctrl->hash_cfg_offset +
1722                         ((sizeof(struct icp_qat_hw_auth_setup) +
1723                          RTE_ALIGN_CEIL(hash_cd_ctrl->inner_state1_sz, 8))
1724                                         >> 3);
1725
1726         cdesc->cd_cur_ptr += state1_size + state2_size;
1727         cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1728
1729         cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1730         cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1731
1732         return 0;
1733 }
1734
1735 int qat_sym_validate_aes_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1736 {
1737         switch (key_len) {
1738         case ICP_QAT_HW_AES_128_KEY_SZ:
1739                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1740                 break;
1741         case ICP_QAT_HW_AES_192_KEY_SZ:
1742                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES192;
1743                 break;
1744         case ICP_QAT_HW_AES_256_KEY_SZ:
1745                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
1746                 break;
1747         default:
1748                 return -EINVAL;
1749         }
1750         return 0;
1751 }
1752
1753 int qat_sym_validate_aes_docsisbpi_key(int key_len,
1754                 enum icp_qat_hw_cipher_algo *alg)
1755 {
1756         switch (key_len) {
1757         case ICP_QAT_HW_AES_128_KEY_SZ:
1758                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1759                 break;
1760         default:
1761                 return -EINVAL;
1762         }
1763         return 0;
1764 }
1765
1766 int qat_sym_validate_snow3g_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1767 {
1768         switch (key_len) {
1769         case ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ:
1770                 *alg = ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2;
1771                 break;
1772         default:
1773                 return -EINVAL;
1774         }
1775         return 0;
1776 }
1777
1778 int qat_sym_validate_kasumi_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1779 {
1780         switch (key_len) {
1781         case ICP_QAT_HW_KASUMI_KEY_SZ:
1782                 *alg = ICP_QAT_HW_CIPHER_ALGO_KASUMI;
1783                 break;
1784         default:
1785                 return -EINVAL;
1786         }
1787         return 0;
1788 }
1789
1790 int qat_sym_validate_des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1791 {
1792         switch (key_len) {
1793         case ICP_QAT_HW_DES_KEY_SZ:
1794                 *alg = ICP_QAT_HW_CIPHER_ALGO_DES;
1795                 break;
1796         default:
1797                 return -EINVAL;
1798         }
1799         return 0;
1800 }
1801
1802 int qat_sym_validate_3des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1803 {
1804         switch (key_len) {
1805         case QAT_3DES_KEY_SZ_OPT1:
1806         case QAT_3DES_KEY_SZ_OPT2:
1807         case QAT_3DES_KEY_SZ_OPT3:
1808                 *alg = ICP_QAT_HW_CIPHER_ALGO_3DES;
1809                 break;
1810         default:
1811                 return -EINVAL;
1812         }
1813         return 0;
1814 }
1815
1816 int qat_sym_validate_zuc_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1817 {
1818         switch (key_len) {
1819         case ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ:
1820                 *alg = ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3;
1821                 break;
1822         default:
1823                 return -EINVAL;
1824         }
1825         return 0;
1826 }