crypto/qat: add aes-sha224-hmac capability
[dpdk.git] / drivers / crypto / qat / qat_crypto.c
1 /*-
2  *   BSD LICENSE
3  *
4  *   Copyright(c) 2015-2016 Intel Corporation. All rights reserved.
5  *   All rights reserved.
6  *
7  *   Redistribution and use in source and binary forms, with or without
8  *   modification, are permitted provided that the following conditions
9  *   are met:
10  *
11  *       * Redistributions of source code must retain the above copyright
12  *         notice, this list of conditions and the following disclaimer.
13  *       * Redistributions in binary form must reproduce the above copyright
14  *         notice, this list of conditions and the following disclaimer in
15  *         the documentation and/or other materials provided with the
16  *         distribution.
17  *       * Neither the name of Intel Corporation nor the names of its
18  *         contributors may be used to endorse or promote products derived
19  *         from this software without specific prior written permission.
20  *
21  *   THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
22  *   "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
23  *   LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
24  *   A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
25  *   OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
26  *   SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
27  *   LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
28  *   DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
29  *   THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
30  *   (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
31  *   OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32  */
33
34 #include <stdio.h>
35 #include <stdlib.h>
36 #include <strings.h>
37 #include <string.h>
38 #include <inttypes.h>
39 #include <errno.h>
40 #include <sys/queue.h>
41 #include <stdarg.h>
42
43 #include <rte_common.h>
44 #include <rte_log.h>
45 #include <rte_debug.h>
46 #include <rte_memory.h>
47 #include <rte_memzone.h>
48 #include <rte_tailq.h>
49 #include <rte_ether.h>
50 #include <rte_malloc.h>
51 #include <rte_launch.h>
52 #include <rte_eal.h>
53 #include <rte_per_lcore.h>
54 #include <rte_lcore.h>
55 #include <rte_atomic.h>
56 #include <rte_branch_prediction.h>
57 #include <rte_mempool.h>
58 #include <rte_mbuf.h>
59 #include <rte_string_fns.h>
60 #include <rte_spinlock.h>
61 #include <rte_hexdump.h>
62
63 #include "qat_logs.h"
64 #include "qat_algs.h"
65 #include "qat_crypto.h"
66 #include "adf_transport_access_macros.h"
67
68 #define BYTE_LENGTH    8
69
70 static const struct rte_cryptodev_capabilities qat_pmd_capabilities[] = {
71         {       /* SHA1 HMAC */
72                 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
73                 {.sym = {
74                         .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
75                         {.auth = {
76                                 .algo = RTE_CRYPTO_AUTH_SHA1_HMAC,
77                                 .block_size = 64,
78                                 .key_size = {
79                                         .min = 64,
80                                         .max = 64,
81                                         .increment = 0
82                                 },
83                                 .digest_size = {
84                                         .min = 20,
85                                         .max = 20,
86                                         .increment = 0
87                                 },
88                                 .aad_size = { 0 }
89                         }, }
90                 }, }
91         },
92         {       /* SHA224 HMAC */
93                 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
94                 {.sym = {
95                         .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
96                         {.auth = {
97                                 .algo = RTE_CRYPTO_AUTH_SHA224_HMAC,
98                                 .block_size = 64,
99                                         .key_size = {
100                                         .min = 64,
101                                         .max = 64,
102                                         .increment = 0
103                                 },
104                                 .digest_size = {
105                                         .min = 28,
106                                         .max = 28,
107                                         .increment = 0
108                                 },
109                                 .aad_size = { 0 }
110                         }, }
111                 }, }
112         },
113         {       /* SHA256 HMAC */
114                 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
115                 {.sym = {
116                         .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
117                         {.auth = {
118                                 .algo = RTE_CRYPTO_AUTH_SHA256_HMAC,
119                                 .block_size = 64,
120                                 .key_size = {
121                                         .min = 64,
122                                         .max = 64,
123                                         .increment = 0
124                                 },
125                                 .digest_size = {
126                                         .min = 32,
127                                         .max = 32,
128                                         .increment = 0
129                                 },
130                                 .aad_size = { 0 }
131                         }, }
132                 }, }
133         },
134         {       /* SHA512 HMAC */
135                 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
136                 {.sym = {
137                         .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
138                         {.auth = {
139                                 .algo = RTE_CRYPTO_AUTH_SHA512_HMAC,
140                                 .block_size = 128,
141                                 .key_size = {
142                                         .min = 128,
143                                         .max = 128,
144                                         .increment = 0
145                                 },
146                                 .digest_size = {
147                                         .min = 64,
148                                         .max = 64,
149                                         .increment = 0
150                                 },
151                                 .aad_size = { 0 }
152                         }, }
153                 }, }
154         },
155         {       /* MD5 HMAC */
156                 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
157                 {.sym = {
158                         .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
159                         {.auth = {
160                                 .algo = RTE_CRYPTO_AUTH_MD5_HMAC,
161                                 .block_size = 64,
162                                 .key_size = {
163                                         .min = 8,
164                                         .max = 64,
165                                         .increment = 8
166                                 },
167                                 .digest_size = {
168                                         .min = 16,
169                                         .max = 16,
170                                         .increment = 0
171                                 },
172                                 .aad_size = { 0 }
173                         }, }
174                 }, }
175         },
176         {       /* AES XCBC MAC */
177                 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
178                 {.sym = {
179                         .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
180                         {.auth = {
181                                 .algo = RTE_CRYPTO_AUTH_AES_XCBC_MAC,
182                                 .block_size = 16,
183                                 .key_size = {
184                                         .min = 16,
185                                         .max = 16,
186                                         .increment = 0
187                                 },
188                                 .digest_size = {
189                                         .min = 16,
190                                         .max = 16,
191                                         .increment = 0
192                                 },
193                                 .aad_size = { 0 }
194                         }, }
195                 }, }
196         },
197         {       /* AES GCM (AUTH) */
198                 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
199                 {.sym = {
200                         .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
201                         {.auth = {
202                                 .algo = RTE_CRYPTO_AUTH_AES_GCM,
203                                 .block_size = 16,
204                                 .key_size = {
205                                         .min = 16,
206                                         .max = 32,
207                                         .increment = 8
208                                 },
209                                 .digest_size = {
210                                         .min = 8,
211                                         .max = 16,
212                                         .increment = 4
213                                 },
214                                 .aad_size = {
215                                         .min = 8,
216                                         .max = 12,
217                                         .increment = 4
218                                 }
219                         }, }
220                 }, }
221         },
222         {       /* SNOW3G (UIA2) */
223                 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
224                 {.sym = {
225                         .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
226                         {.auth = {
227                                 .algo = RTE_CRYPTO_AUTH_SNOW3G_UIA2,
228                                 .block_size = 16,
229                                 .key_size = {
230                                         .min = 16,
231                                         .max = 16,
232                                         .increment = 0
233                                 },
234                                 .digest_size = {
235                                         .min = 4,
236                                         .max = 4,
237                                         .increment = 0
238                                 },
239                                 .aad_size = {
240                                         .min = 16,
241                                         .max = 16,
242                                         .increment = 0
243                                 }
244                         }, }
245                 }, }
246         },
247         {       /* AES GCM (CIPHER) */
248                 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
249                 {.sym = {
250                         .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
251                         {.cipher = {
252                                 .algo = RTE_CRYPTO_CIPHER_AES_GCM,
253                                 .block_size = 16,
254                                 .key_size = {
255                                         .min = 16,
256                                         .max = 32,
257                                         .increment = 8
258                                 },
259                                 .iv_size = {
260                                         .min = 16,
261                                         .max = 16,
262                                         .increment = 0
263                                 }
264                         }, }
265                 }, }
266         },
267         {       /* AES CBC */
268                 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
269                 {.sym = {
270                         .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
271                         {.cipher = {
272                                 .algo = RTE_CRYPTO_CIPHER_AES_CBC,
273                                 .block_size = 16,
274                                 .key_size = {
275                                         .min = 16,
276                                         .max = 32,
277                                         .increment = 8
278                                 },
279                                 .iv_size = {
280                                         .min = 16,
281                                         .max = 16,
282                                         .increment = 0
283                                 }
284                         }, }
285                 }, }
286         },
287         {       /* SNOW3G (UEA2) */
288                 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
289                 {.sym = {
290                         .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
291                         {.cipher = {
292                                 .algo = RTE_CRYPTO_CIPHER_SNOW3G_UEA2,
293                                 .block_size = 16,
294                                 .key_size = {
295                                         .min = 16,
296                                         .max = 16,
297                                         .increment = 0
298                                 },
299                                 .iv_size = {
300                                         .min = 16,
301                                         .max = 16,
302                                         .increment = 0
303                                 }
304                         }, }
305                 }, }
306         },
307         {       /* AES CTR */
308                 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
309                 {.sym = {
310                         .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
311                         {.cipher = {
312                                 .algo = RTE_CRYPTO_CIPHER_AES_CTR,
313                                 .block_size = 16,
314                                 .key_size = {
315                                         .min = 16,
316                                         .max = 32,
317                                         .increment = 8
318                                 },
319                                 .iv_size = {
320                                         .min = 16,
321                                         .max = 16,
322                                         .increment = 0
323                                 }
324                         }, }
325                 }, }
326         },
327         RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST()
328 };
329
330 static inline uint32_t
331 adf_modulo(uint32_t data, uint32_t shift);
332
333 static inline int
334 qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg);
335
336 void qat_crypto_sym_clear_session(struct rte_cryptodev *dev,
337                 void *session)
338 {
339         struct qat_session *sess = session;
340         phys_addr_t cd_paddr;
341
342         PMD_INIT_FUNC_TRACE();
343         if (session) {
344                 cd_paddr = sess->cd_paddr;
345                 memset(sess, 0, qat_crypto_sym_get_session_private_size(dev));
346                 sess->cd_paddr = cd_paddr;
347         } else
348                 PMD_DRV_LOG(ERR, "NULL session");
349 }
350
351 static int
352 qat_get_cmd_id(const struct rte_crypto_sym_xform *xform)
353 {
354         /* Cipher Only */
355         if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER && xform->next == NULL)
356                 return ICP_QAT_FW_LA_CMD_CIPHER;
357
358         /* Authentication Only */
359         if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH && xform->next == NULL)
360                 return ICP_QAT_FW_LA_CMD_AUTH;
361
362         if (xform->next == NULL)
363                 return -1;
364
365         /* Cipher then Authenticate */
366         if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
367                         xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
368                 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
369
370         /* Authenticate then Cipher */
371         if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH &&
372                         xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
373                 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
374
375         return -1;
376 }
377
378 static struct rte_crypto_auth_xform *
379 qat_get_auth_xform(struct rte_crypto_sym_xform *xform)
380 {
381         do {
382                 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH)
383                         return &xform->auth;
384
385                 xform = xform->next;
386         } while (xform);
387
388         return NULL;
389 }
390
391 static struct rte_crypto_cipher_xform *
392 qat_get_cipher_xform(struct rte_crypto_sym_xform *xform)
393 {
394         do {
395                 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
396                         return &xform->cipher;
397
398                 xform = xform->next;
399         } while (xform);
400
401         return NULL;
402 }
403 void *
404 qat_crypto_sym_configure_session_cipher(struct rte_cryptodev *dev,
405                 struct rte_crypto_sym_xform *xform, void *session_private)
406 {
407         struct qat_pmd_private *internals = dev->data->dev_private;
408
409         struct qat_session *session = session_private;
410
411         struct rte_crypto_cipher_xform *cipher_xform = NULL;
412
413         /* Get cipher xform from crypto xform chain */
414         cipher_xform = qat_get_cipher_xform(xform);
415
416         switch (cipher_xform->algo) {
417         case RTE_CRYPTO_CIPHER_AES_CBC:
418                 if (qat_alg_validate_aes_key(cipher_xform->key.length,
419                                 &session->qat_cipher_alg) != 0) {
420                         PMD_DRV_LOG(ERR, "Invalid AES cipher key size");
421                         goto error_out;
422                 }
423                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
424                 break;
425         case RTE_CRYPTO_CIPHER_AES_GCM:
426                 if (qat_alg_validate_aes_key(cipher_xform->key.length,
427                                 &session->qat_cipher_alg) != 0) {
428                         PMD_DRV_LOG(ERR, "Invalid AES cipher key size");
429                         goto error_out;
430                 }
431                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
432                 break;
433         case RTE_CRYPTO_CIPHER_AES_CTR:
434                 if (qat_alg_validate_aes_key(cipher_xform->key.length,
435                                 &session->qat_cipher_alg) != 0) {
436                         PMD_DRV_LOG(ERR, "Invalid AES cipher key size");
437                         goto error_out;
438                 }
439                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
440                 break;
441         case RTE_CRYPTO_CIPHER_SNOW3G_UEA2:
442                 if (qat_alg_validate_snow3g_key(cipher_xform->key.length,
443                                         &session->qat_cipher_alg) != 0) {
444                         PMD_DRV_LOG(ERR, "Invalid SNOW3G cipher key size");
445                         goto error_out;
446                 }
447                 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
448                 break;
449         case RTE_CRYPTO_CIPHER_NULL:
450         case RTE_CRYPTO_CIPHER_3DES_ECB:
451         case RTE_CRYPTO_CIPHER_3DES_CBC:
452         case RTE_CRYPTO_CIPHER_AES_ECB:
453         case RTE_CRYPTO_CIPHER_AES_CCM:
454         case RTE_CRYPTO_CIPHER_KASUMI_F8:
455                 PMD_DRV_LOG(ERR, "Crypto: Unsupported Cipher alg %u",
456                                 cipher_xform->algo);
457                 goto error_out;
458         default:
459                 PMD_DRV_LOG(ERR, "Crypto: Undefined Cipher specified %u\n",
460                                 cipher_xform->algo);
461                 goto error_out;
462         }
463
464         if (cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
465                 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
466         else
467                 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
468
469         if (qat_alg_aead_session_create_content_desc_cipher(session,
470                                                 cipher_xform->key.data,
471                                                 cipher_xform->key.length))
472                 goto error_out;
473
474         return session;
475
476 error_out:
477         rte_mempool_put(internals->sess_mp, session);
478         return NULL;
479 }
480
481
482 void *
483 qat_crypto_sym_configure_session(struct rte_cryptodev *dev,
484                 struct rte_crypto_sym_xform *xform, void *session_private)
485 {
486         struct qat_pmd_private *internals = dev->data->dev_private;
487
488         struct qat_session *session = session_private;
489
490         int qat_cmd_id;
491
492         PMD_INIT_FUNC_TRACE();
493
494         /* Get requested QAT command id */
495         qat_cmd_id = qat_get_cmd_id(xform);
496         if (qat_cmd_id < 0 || qat_cmd_id >= ICP_QAT_FW_LA_CMD_DELIMITER) {
497                 PMD_DRV_LOG(ERR, "Unsupported xform chain requested");
498                 goto error_out;
499         }
500         session->qat_cmd = (enum icp_qat_fw_la_cmd_id)qat_cmd_id;
501         switch (session->qat_cmd) {
502         case ICP_QAT_FW_LA_CMD_CIPHER:
503         session = qat_crypto_sym_configure_session_cipher(dev, xform, session);
504                 break;
505         case ICP_QAT_FW_LA_CMD_AUTH:
506         session = qat_crypto_sym_configure_session_auth(dev, xform, session);
507                 break;
508         case ICP_QAT_FW_LA_CMD_CIPHER_HASH:
509         session = qat_crypto_sym_configure_session_cipher(dev, xform, session);
510         session = qat_crypto_sym_configure_session_auth(dev, xform, session);
511                 break;
512         case ICP_QAT_FW_LA_CMD_HASH_CIPHER:
513         session = qat_crypto_sym_configure_session_auth(dev, xform, session);
514         session = qat_crypto_sym_configure_session_cipher(dev, xform, session);
515                 break;
516         case ICP_QAT_FW_LA_CMD_TRNG_GET_RANDOM:
517         case ICP_QAT_FW_LA_CMD_TRNG_TEST:
518         case ICP_QAT_FW_LA_CMD_SSL3_KEY_DERIVE:
519         case ICP_QAT_FW_LA_CMD_TLS_V1_1_KEY_DERIVE:
520         case ICP_QAT_FW_LA_CMD_TLS_V1_2_KEY_DERIVE:
521         case ICP_QAT_FW_LA_CMD_MGF1:
522         case ICP_QAT_FW_LA_CMD_AUTH_PRE_COMP:
523         case ICP_QAT_FW_LA_CMD_CIPHER_PRE_COMP:
524         case ICP_QAT_FW_LA_CMD_DELIMITER:
525         PMD_DRV_LOG(ERR, "Unsupported Service %u",
526                 session->qat_cmd);
527                 goto error_out;
528         default:
529         PMD_DRV_LOG(ERR, "Unsupported Service %u",
530                 session->qat_cmd);
531                 goto error_out;
532         }
533         return session;
534
535 error_out:
536         rte_mempool_put(internals->sess_mp, session);
537         return NULL;
538 }
539
540 struct qat_session *
541 qat_crypto_sym_configure_session_auth(struct rte_cryptodev *dev,
542                                 struct rte_crypto_sym_xform *xform,
543                                 struct qat_session *session_private)
544 {
545
546         struct qat_pmd_private *internals = dev->data->dev_private;
547         struct qat_session *session = session_private;
548         struct rte_crypto_auth_xform *auth_xform = NULL;
549         struct rte_crypto_cipher_xform *cipher_xform = NULL;
550         auth_xform = qat_get_auth_xform(xform);
551
552         switch (auth_xform->algo) {
553         case RTE_CRYPTO_AUTH_SHA1_HMAC:
554                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA1;
555                 break;
556         case RTE_CRYPTO_AUTH_SHA256_HMAC:
557                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA256;
558                 break;
559         case RTE_CRYPTO_AUTH_SHA512_HMAC:
560                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA512;
561                 break;
562         case RTE_CRYPTO_AUTH_SHA224_HMAC:
563                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA224;
564                 break;
565         case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
566                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
567                 break;
568         case RTE_CRYPTO_AUTH_AES_GCM:
569                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
570                 break;
571         case RTE_CRYPTO_AUTH_SNOW3G_UIA2:
572                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2;
573                 break;
574         case RTE_CRYPTO_AUTH_MD5_HMAC:
575                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_MD5;
576                 break;
577         case RTE_CRYPTO_AUTH_NULL:
578         case RTE_CRYPTO_AUTH_SHA1:
579         case RTE_CRYPTO_AUTH_SHA256:
580         case RTE_CRYPTO_AUTH_SHA512:
581         case RTE_CRYPTO_AUTH_SHA224:
582         case RTE_CRYPTO_AUTH_SHA384:
583         case RTE_CRYPTO_AUTH_SHA384_HMAC:
584         case RTE_CRYPTO_AUTH_MD5:
585         case RTE_CRYPTO_AUTH_AES_CCM:
586         case RTE_CRYPTO_AUTH_AES_GMAC:
587         case RTE_CRYPTO_AUTH_KASUMI_F9:
588         case RTE_CRYPTO_AUTH_AES_CMAC:
589         case RTE_CRYPTO_AUTH_AES_CBC_MAC:
590         case RTE_CRYPTO_AUTH_ZUC_EIA3:
591                 PMD_DRV_LOG(ERR, "Crypto: Unsupported hash alg %u",
592                                 auth_xform->algo);
593                 goto error_out;
594         default:
595                 PMD_DRV_LOG(ERR, "Crypto: Undefined Hash algo %u specified",
596                                 auth_xform->algo);
597                 goto error_out;
598         }
599         cipher_xform = qat_get_cipher_xform(xform);
600
601         if ((session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_128) ||
602                         (session->qat_hash_alg ==
603                                 ICP_QAT_HW_AUTH_ALGO_GALOIS_64))  {
604                 if (qat_alg_aead_session_create_content_desc_auth(session,
605                                 cipher_xform->key.data,
606                                 cipher_xform->key.length,
607                                 auth_xform->add_auth_data_length,
608                                 auth_xform->digest_length,
609                                 auth_xform->op))
610                         goto error_out;
611         } else {
612                 if (qat_alg_aead_session_create_content_desc_auth(session,
613                                 auth_xform->key.data,
614                                 auth_xform->key.length,
615                                 auth_xform->add_auth_data_length,
616                                 auth_xform->digest_length,
617                                 auth_xform->op))
618                         goto error_out;
619         }
620         return session;
621
622 error_out:
623         if (internals->sess_mp != NULL)
624                 rte_mempool_put(internals->sess_mp, session);
625         return NULL;
626 }
627
628 unsigned qat_crypto_sym_get_session_private_size(
629                 struct rte_cryptodev *dev __rte_unused)
630 {
631         return RTE_ALIGN_CEIL(sizeof(struct qat_session), 8);
632 }
633
634
635 uint16_t
636 qat_pmd_enqueue_op_burst(void *qp, struct rte_crypto_op **ops,
637                 uint16_t nb_ops)
638 {
639         register struct qat_queue *queue;
640         struct qat_qp *tmp_qp = (struct qat_qp *)qp;
641         register uint32_t nb_ops_sent = 0;
642         register struct rte_crypto_op **cur_op = ops;
643         register int ret;
644         uint16_t nb_ops_possible = nb_ops;
645         register uint8_t *base_addr;
646         register uint32_t tail;
647         int overflow;
648
649         if (unlikely(nb_ops == 0))
650                 return 0;
651
652         /* read params used a lot in main loop into registers */
653         queue = &(tmp_qp->tx_q);
654         base_addr = (uint8_t *)queue->base_addr;
655         tail = queue->tail;
656
657         /* Find how many can actually fit on the ring */
658         overflow = rte_atomic16_add_return(&tmp_qp->inflights16, nb_ops)
659                                 - queue->max_inflights;
660         if (overflow > 0) {
661                 rte_atomic16_sub(&tmp_qp->inflights16, overflow);
662                 nb_ops_possible = nb_ops - overflow;
663                 if (nb_ops_possible == 0)
664                         return 0;
665         }
666
667         while (nb_ops_sent != nb_ops_possible) {
668                 ret = qat_write_hw_desc_entry(*cur_op, base_addr + tail);
669                 if (ret != 0) {
670                         tmp_qp->stats.enqueue_err_count++;
671                         if (nb_ops_sent == 0)
672                                 return 0;
673                         goto kick_tail;
674                 }
675
676                 tail = adf_modulo(tail + queue->msg_size, queue->modulo);
677                 nb_ops_sent++;
678                 cur_op++;
679         }
680 kick_tail:
681         WRITE_CSR_RING_TAIL(tmp_qp->mmap_bar_addr, queue->hw_bundle_number,
682                         queue->hw_queue_number, tail);
683         queue->tail = tail;
684         tmp_qp->stats.enqueued_count += nb_ops_sent;
685         return nb_ops_sent;
686 }
687
688 uint16_t
689 qat_pmd_dequeue_op_burst(void *qp, struct rte_crypto_op **ops,
690                 uint16_t nb_ops)
691 {
692         struct qat_queue *queue;
693         struct qat_qp *tmp_qp = (struct qat_qp *)qp;
694         uint32_t msg_counter = 0;
695         struct rte_crypto_op *rx_op;
696         struct icp_qat_fw_comn_resp *resp_msg;
697
698         queue = &(tmp_qp->rx_q);
699         resp_msg = (struct icp_qat_fw_comn_resp *)
700                         ((uint8_t *)queue->base_addr + queue->head);
701
702         while (*(uint32_t *)resp_msg != ADF_RING_EMPTY_SIG &&
703                         msg_counter != nb_ops) {
704                 rx_op = (struct rte_crypto_op *)(uintptr_t)
705                                 (resp_msg->opaque_data);
706
707 #ifdef RTE_LIBRTE_PMD_QAT_DEBUG_RX
708                 rte_hexdump(stdout, "qat_response:", (uint8_t *)resp_msg,
709                                 sizeof(struct icp_qat_fw_comn_resp));
710 #endif
711                 if (ICP_QAT_FW_COMN_STATUS_FLAG_OK !=
712                                 ICP_QAT_FW_COMN_RESP_CRYPTO_STAT_GET(
713                                         resp_msg->comn_hdr.comn_status)) {
714                         rx_op->status = RTE_CRYPTO_OP_STATUS_AUTH_FAILED;
715                 } else {
716                         rx_op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
717                 }
718                 *(uint32_t *)resp_msg = ADF_RING_EMPTY_SIG;
719                 queue->head = adf_modulo(queue->head +
720                                 queue->msg_size,
721                                 ADF_RING_SIZE_MODULO(queue->queue_size));
722                 resp_msg = (struct icp_qat_fw_comn_resp *)
723                                         ((uint8_t *)queue->base_addr +
724                                                         queue->head);
725                 *ops = rx_op;
726                 ops++;
727                 msg_counter++;
728         }
729         if (msg_counter > 0) {
730                 WRITE_CSR_RING_HEAD(tmp_qp->mmap_bar_addr,
731                                         queue->hw_bundle_number,
732                                         queue->hw_queue_number, queue->head);
733                 rte_atomic16_sub(&tmp_qp->inflights16, msg_counter);
734                 tmp_qp->stats.dequeued_count += msg_counter;
735         }
736         return msg_counter;
737 }
738
739 static inline int
740 qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg)
741 {
742         struct qat_session *ctx;
743         struct icp_qat_fw_la_cipher_req_params *cipher_param;
744         struct icp_qat_fw_la_auth_req_params *auth_param;
745         register struct icp_qat_fw_la_bulk_req *qat_req;
746
747 #ifdef RTE_LIBRTE_PMD_QAT_DEBUG_TX
748         if (unlikely(op->type != RTE_CRYPTO_OP_TYPE_SYMMETRIC)) {
749                 PMD_DRV_LOG(ERR, "QAT PMD only supports symmetric crypto "
750                                 "operation requests, op (%p) is not a "
751                                 "symmetric operation.", op);
752                 return -EINVAL;
753         }
754 #endif
755         if (unlikely(op->sym->sess_type == RTE_CRYPTO_SYM_OP_SESSIONLESS)) {
756                 PMD_DRV_LOG(ERR, "QAT PMD only supports session oriented"
757                                 " requests, op (%p) is sessionless.", op);
758                 return -EINVAL;
759         }
760
761         if (unlikely(op->sym->session->dev_type != RTE_CRYPTODEV_QAT_SYM_PMD)) {
762                 PMD_DRV_LOG(ERR, "Session was not created for this device");
763                 return -EINVAL;
764         }
765
766         ctx = (struct qat_session *)op->sym->session->_private;
767         qat_req = (struct icp_qat_fw_la_bulk_req *)out_msg;
768         rte_mov128((uint8_t *)qat_req, (const uint8_t *)&(ctx->fw_req));
769         qat_req->comn_mid.opaque_data = (uint64_t)(uintptr_t)op;
770
771         qat_req->comn_mid.dst_length =
772                 qat_req->comn_mid.src_length =
773                                 rte_pktmbuf_data_len(op->sym->m_src);
774
775         qat_req->comn_mid.dest_data_addr =
776                 qat_req->comn_mid.src_data_addr =
777                             rte_pktmbuf_mtophys(op->sym->m_src);
778
779         if (unlikely(op->sym->m_dst != NULL)) {
780                 qat_req->comn_mid.dest_data_addr =
781                                 rte_pktmbuf_mtophys(op->sym->m_dst);
782                 qat_req->comn_mid.dst_length =
783                                 rte_pktmbuf_data_len(op->sym->m_dst);
784         }
785
786         cipher_param = (void *)&qat_req->serv_specif_rqpars;
787         auth_param = (void *)((uint8_t *)cipher_param + sizeof(*cipher_param));
788
789         cipher_param->cipher_length = op->sym->cipher.data.length;
790         cipher_param->cipher_offset = op->sym->cipher.data.offset;
791         if (ctx->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
792                 if (unlikely((cipher_param->cipher_length % BYTE_LENGTH != 0) ||
793                                 (cipher_param->cipher_offset
794                                         % BYTE_LENGTH != 0))) {
795                         PMD_DRV_LOG(ERR, " For Snow3g, QAT PMD only "
796                                 "supports byte aligned values");
797                         op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
798                         return -EINVAL;
799                 }
800                 cipher_param->cipher_length >>= 3;
801                 cipher_param->cipher_offset >>= 3;
802         }
803
804         if (op->sym->cipher.iv.length && (op->sym->cipher.iv.length <=
805                         sizeof(cipher_param->u.cipher_IV_array))) {
806                 rte_memcpy(cipher_param->u.cipher_IV_array,
807                                 op->sym->cipher.iv.data,
808                                 op->sym->cipher.iv.length);
809         } else {
810                 ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(
811                                 qat_req->comn_hdr.serv_specif_flags,
812                                 ICP_QAT_FW_CIPH_IV_64BIT_PTR);
813                 cipher_param->u.s.cipher_IV_ptr = op->sym->cipher.iv.phys_addr;
814         }
815         if (op->sym->auth.digest.phys_addr) {
816                 ICP_QAT_FW_LA_DIGEST_IN_BUFFER_SET(
817                                 qat_req->comn_hdr.serv_specif_flags,
818                                 ICP_QAT_FW_LA_NO_DIGEST_IN_BUFFER);
819                 auth_param->auth_res_addr = op->sym->auth.digest.phys_addr;
820         }
821         auth_param->auth_off = op->sym->auth.data.offset;
822         auth_param->auth_len = op->sym->auth.data.length;
823         if (ctx->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2) {
824                 if (unlikely((auth_param->auth_off % BYTE_LENGTH != 0) ||
825                                 (auth_param->auth_len % BYTE_LENGTH != 0))) {
826                         PMD_DRV_LOG(ERR, " For Snow3g, QAT PMD only "
827                                 "supports byte aligned values");
828                         op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
829                         return -EINVAL;
830                 }
831                 auth_param->auth_off >>= 3;
832                 auth_param->auth_len >>= 3;
833         }
834         auth_param->u1.aad_adr = op->sym->auth.aad.phys_addr;
835
836         if (ctx->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_128 ||
837                         ctx->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_64) {
838                 if (op->sym->cipher.iv.length == 12) {
839                         /*
840                          * For GCM a 12 bit IV is allowed,
841                          * but we need to inform the f/w
842                          */
843                         ICP_QAT_FW_LA_GCM_IV_LEN_FLAG_SET(
844                                 qat_req->comn_hdr.serv_specif_flags,
845                                 ICP_QAT_FW_LA_GCM_IV_LEN_12_OCTETS);
846                 }
847         }
848
849 #ifdef RTE_LIBRTE_PMD_QAT_DEBUG_TX
850         rte_hexdump(stdout, "qat_req:", qat_req,
851                         sizeof(struct icp_qat_fw_la_bulk_req));
852         rte_hexdump(stdout, "src_data:",
853                         rte_pktmbuf_mtod(op->sym->m_src, uint8_t*),
854                         rte_pktmbuf_data_len(op->sym->m_src));
855         rte_hexdump(stdout, "iv:", op->sym->cipher.iv.data,
856                         op->sym->cipher.iv.length);
857         rte_hexdump(stdout, "digest:", op->sym->auth.digest.data,
858                         op->sym->auth.digest.length);
859         rte_hexdump(stdout, "aad:", op->sym->auth.aad.data,
860                         op->sym->auth.aad.length);
861 #endif
862         return 0;
863 }
864
865 static inline uint32_t adf_modulo(uint32_t data, uint32_t shift)
866 {
867         uint32_t div = data >> shift;
868         uint32_t mult = div << shift;
869
870         return data - mult;
871 }
872
873 void qat_crypto_sym_session_init(struct rte_mempool *mp, void *sym_sess)
874 {
875         struct rte_cryptodev_sym_session *sess = sym_sess;
876         struct qat_session *s = (void *)sess->_private;
877
878         PMD_INIT_FUNC_TRACE();
879         s->cd_paddr = rte_mempool_virt2phy(mp, sess) +
880                 offsetof(struct qat_session, cd) +
881                 offsetof(struct rte_cryptodev_sym_session, _private);
882 }
883
884 int qat_dev_config(__rte_unused struct rte_cryptodev *dev)
885 {
886         PMD_INIT_FUNC_TRACE();
887         return -ENOTSUP;
888 }
889
890 int qat_dev_start(__rte_unused struct rte_cryptodev *dev)
891 {
892         PMD_INIT_FUNC_TRACE();
893         return 0;
894 }
895
896 void qat_dev_stop(__rte_unused struct rte_cryptodev *dev)
897 {
898         PMD_INIT_FUNC_TRACE();
899 }
900
901 int qat_dev_close(struct rte_cryptodev *dev)
902 {
903         int i, ret;
904
905         PMD_INIT_FUNC_TRACE();
906
907         for (i = 0; i < dev->data->nb_queue_pairs; i++) {
908                 ret = qat_crypto_sym_qp_release(dev, i);
909                 if (ret < 0)
910                         return ret;
911         }
912
913         return 0;
914 }
915
916 void qat_dev_info_get(__rte_unused struct rte_cryptodev *dev,
917                                 struct rte_cryptodev_info *info)
918 {
919         struct qat_pmd_private *internals = dev->data->dev_private;
920
921         PMD_INIT_FUNC_TRACE();
922         if (info != NULL) {
923                 info->max_nb_queue_pairs =
924                                 ADF_NUM_SYM_QPS_PER_BUNDLE *
925                                 ADF_NUM_BUNDLES_PER_DEV;
926                 info->feature_flags = dev->feature_flags;
927                 info->capabilities = qat_pmd_capabilities;
928                 info->sym.max_nb_sessions = internals->max_nb_sessions;
929                 info->dev_type = RTE_CRYPTODEV_QAT_SYM_PMD;
930         }
931 }
932
933 void qat_crypto_sym_stats_get(struct rte_cryptodev *dev,
934                 struct rte_cryptodev_stats *stats)
935 {
936         int i;
937         struct qat_qp **qp = (struct qat_qp **)(dev->data->queue_pairs);
938
939         PMD_INIT_FUNC_TRACE();
940         if (stats == NULL) {
941                 PMD_DRV_LOG(ERR, "invalid stats ptr NULL");
942                 return;
943         }
944         for (i = 0; i < dev->data->nb_queue_pairs; i++) {
945                 if (qp[i] == NULL) {
946                         PMD_DRV_LOG(DEBUG, "Uninitialised queue pair");
947                         continue;
948                 }
949
950                 stats->enqueued_count += qp[i]->stats.enqueued_count;
951                 stats->dequeued_count += qp[i]->stats.enqueued_count;
952                 stats->enqueue_err_count += qp[i]->stats.enqueue_err_count;
953                 stats->dequeue_err_count += qp[i]->stats.enqueue_err_count;
954         }
955 }
956
957 void qat_crypto_sym_stats_reset(struct rte_cryptodev *dev)
958 {
959         int i;
960         struct qat_qp **qp = (struct qat_qp **)(dev->data->queue_pairs);
961
962         PMD_INIT_FUNC_TRACE();
963         for (i = 0; i < dev->data->nb_queue_pairs; i++)
964                 memset(&(qp[i]->stats), 0, sizeof(qp[i]->stats));
965         PMD_DRV_LOG(DEBUG, "QAT crypto: stats cleared");
966 }