cryptodev: change device configuration API
[dpdk.git] / drivers / crypto / qat / qat_crypto.c
1 /*-
2  *   BSD LICENSE
3  *
4  *   Copyright(c) 2015-2016 Intel Corporation. All rights reserved.
5  *   All rights reserved.
6  *
7  *   Redistribution and use in source and binary forms, with or without
8  *   modification, are permitted provided that the following conditions
9  *   are met:
10  *
11  *       * Redistributions of source code must retain the above copyright
12  *         notice, this list of conditions and the following disclaimer.
13  *       * Redistributions in binary form must reproduce the above copyright
14  *         notice, this list of conditions and the following disclaimer in
15  *         the documentation and/or other materials provided with the
16  *         distribution.
17  *       * Neither the name of Intel Corporation nor the names of its
18  *         contributors may be used to endorse or promote products derived
19  *         from this software without specific prior written permission.
20  *
21  *   THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
22  *   "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
23  *   LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
24  *   A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
25  *   OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
26  *   SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
27  *   LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
28  *   DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
29  *   THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
30  *   (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
31  *   OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32  */
33
34 #include <stdio.h>
35 #include <stdlib.h>
36 #include <strings.h>
37 #include <string.h>
38 #include <inttypes.h>
39 #include <errno.h>
40 #include <sys/queue.h>
41 #include <stdarg.h>
42
43 #include <rte_common.h>
44 #include <rte_log.h>
45 #include <rte_debug.h>
46 #include <rte_memory.h>
47 #include <rte_memzone.h>
48 #include <rte_tailq.h>
49 #include <rte_ether.h>
50 #include <rte_malloc.h>
51 #include <rte_launch.h>
52 #include <rte_eal.h>
53 #include <rte_per_lcore.h>
54 #include <rte_lcore.h>
55 #include <rte_atomic.h>
56 #include <rte_branch_prediction.h>
57 #include <rte_mempool.h>
58 #include <rte_mbuf.h>
59 #include <rte_string_fns.h>
60 #include <rte_spinlock.h>
61 #include <rte_hexdump.h>
62
63 #include "qat_logs.h"
64 #include "qat_algs.h"
65 #include "qat_crypto.h"
66 #include "adf_transport_access_macros.h"
67
68 #define BYTE_LENGTH    8
69
70 static const struct rte_cryptodev_capabilities qat_pmd_capabilities[] = {
71         {       /* SHA1 HMAC */
72                 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
73                 {.sym = {
74                         .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
75                         {.auth = {
76                                 .algo = RTE_CRYPTO_AUTH_SHA1_HMAC,
77                                 .block_size = 64,
78                                 .key_size = {
79                                         .min = 64,
80                                         .max = 64,
81                                         .increment = 0
82                                 },
83                                 .digest_size = {
84                                         .min = 20,
85                                         .max = 20,
86                                         .increment = 0
87                                 },
88                                 .aad_size = { 0 }
89                         }, }
90                 }, }
91         },
92         {       /* SHA224 HMAC */
93                 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
94                 {.sym = {
95                         .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
96                         {.auth = {
97                                 .algo = RTE_CRYPTO_AUTH_SHA224_HMAC,
98                                 .block_size = 64,
99                                         .key_size = {
100                                         .min = 64,
101                                         .max = 64,
102                                         .increment = 0
103                                 },
104                                 .digest_size = {
105                                         .min = 28,
106                                         .max = 28,
107                                         .increment = 0
108                                 },
109                                 .aad_size = { 0 }
110                         }, }
111                 }, }
112         },
113         {       /* SHA256 HMAC */
114                 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
115                 {.sym = {
116                         .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
117                         {.auth = {
118                                 .algo = RTE_CRYPTO_AUTH_SHA256_HMAC,
119                                 .block_size = 64,
120                                 .key_size = {
121                                         .min = 64,
122                                         .max = 64,
123                                         .increment = 0
124                                 },
125                                 .digest_size = {
126                                         .min = 32,
127                                         .max = 32,
128                                         .increment = 0
129                                 },
130                                 .aad_size = { 0 }
131                         }, }
132                 }, }
133         },
134         {       /* SHA384 HMAC */
135                 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
136                 {.sym = {
137                         .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
138                         {.auth = {
139                                 .algo = RTE_CRYPTO_AUTH_SHA384_HMAC,
140                                 .block_size = 64,
141                                 .key_size = {
142                                         .min = 128,
143                                         .max = 128,
144                                         .increment = 0
145                                 },
146                                 .digest_size = {
147                                         .min = 48,
148                                         .max = 48,
149                                         .increment = 0
150                                         },
151                                 .aad_size = { 0 }
152                         }, }
153                 }, }
154         },
155         {       /* SHA512 HMAC */
156                 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
157                 {.sym = {
158                         .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
159                         {.auth = {
160                                 .algo = RTE_CRYPTO_AUTH_SHA512_HMAC,
161                                 .block_size = 128,
162                                 .key_size = {
163                                         .min = 128,
164                                         .max = 128,
165                                         .increment = 0
166                                 },
167                                 .digest_size = {
168                                         .min = 64,
169                                         .max = 64,
170                                         .increment = 0
171                                 },
172                                 .aad_size = { 0 }
173                         }, }
174                 }, }
175         },
176         {       /* MD5 HMAC */
177                 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
178                 {.sym = {
179                         .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
180                         {.auth = {
181                                 .algo = RTE_CRYPTO_AUTH_MD5_HMAC,
182                                 .block_size = 64,
183                                 .key_size = {
184                                         .min = 8,
185                                         .max = 64,
186                                         .increment = 8
187                                 },
188                                 .digest_size = {
189                                         .min = 16,
190                                         .max = 16,
191                                         .increment = 0
192                                 },
193                                 .aad_size = { 0 }
194                         }, }
195                 }, }
196         },
197         {       /* AES XCBC MAC */
198                 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
199                 {.sym = {
200                         .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
201                         {.auth = {
202                                 .algo = RTE_CRYPTO_AUTH_AES_XCBC_MAC,
203                                 .block_size = 16,
204                                 .key_size = {
205                                         .min = 16,
206                                         .max = 16,
207                                         .increment = 0
208                                 },
209                                 .digest_size = {
210                                         .min = 16,
211                                         .max = 16,
212                                         .increment = 0
213                                 },
214                                 .aad_size = { 0 }
215                         }, }
216                 }, }
217         },
218         {       /* AES GCM (AUTH) */
219                 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
220                 {.sym = {
221                         .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
222                         {.auth = {
223                                 .algo = RTE_CRYPTO_AUTH_AES_GCM,
224                                 .block_size = 16,
225                                 .key_size = {
226                                         .min = 16,
227                                         .max = 32,
228                                         .increment = 8
229                                 },
230                                 .digest_size = {
231                                         .min = 8,
232                                         .max = 16,
233                                         .increment = 4
234                                 },
235                                 .aad_size = {
236                                         .min = 8,
237                                         .max = 12,
238                                         .increment = 4
239                                 }
240                         }, }
241                 }, }
242         },
243         {       /* AES GMAC (AUTH) */
244                 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
245                 {.sym = {
246                         .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
247                         {.auth = {
248                                 .algo = RTE_CRYPTO_AUTH_AES_GMAC,
249                                 .block_size = 16,
250                                 .key_size = {
251                                         .min = 16,
252                                         .max = 32,
253                                         .increment = 8
254                                 },
255                                 .digest_size = {
256                                         .min = 8,
257                                         .max = 16,
258                                         .increment = 4
259                                 },
260                                 .aad_size = {
261                                         .min = 1,
262                                         .max = 65535,
263                                         .increment = 1
264                                 }
265                         }, }
266                 }, }
267         },
268         {       /* SNOW 3G (UIA2) */
269                 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
270                 {.sym = {
271                         .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
272                         {.auth = {
273                                 .algo = RTE_CRYPTO_AUTH_SNOW3G_UIA2,
274                                 .block_size = 16,
275                                 .key_size = {
276                                         .min = 16,
277                                         .max = 16,
278                                         .increment = 0
279                                 },
280                                 .digest_size = {
281                                         .min = 4,
282                                         .max = 4,
283                                         .increment = 0
284                                 },
285                                 .aad_size = {
286                                         .min = 16,
287                                         .max = 16,
288                                         .increment = 0
289                                 }
290                         }, }
291                 }, }
292         },
293         {       /* AES GCM (CIPHER) */
294                 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
295                 {.sym = {
296                         .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
297                         {.cipher = {
298                                 .algo = RTE_CRYPTO_CIPHER_AES_GCM,
299                                 .block_size = 16,
300                                 .key_size = {
301                                         .min = 16,
302                                         .max = 32,
303                                         .increment = 8
304                                 },
305                                 .iv_size = {
306                                         .min = 12,
307                                         .max = 12,
308                                         .increment = 0
309                                 }
310                         }, }
311                 }, }
312         },
313         {       /* AES CBC */
314                 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
315                 {.sym = {
316                         .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
317                         {.cipher = {
318                                 .algo = RTE_CRYPTO_CIPHER_AES_CBC,
319                                 .block_size = 16,
320                                 .key_size = {
321                                         .min = 16,
322                                         .max = 32,
323                                         .increment = 8
324                                 },
325                                 .iv_size = {
326                                         .min = 16,
327                                         .max = 16,
328                                         .increment = 0
329                                 }
330                         }, }
331                 }, }
332         },
333         {       /* SNOW 3G (UEA2) */
334                 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
335                 {.sym = {
336                         .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
337                         {.cipher = {
338                                 .algo = RTE_CRYPTO_CIPHER_SNOW3G_UEA2,
339                                 .block_size = 16,
340                                 .key_size = {
341                                         .min = 16,
342                                         .max = 16,
343                                         .increment = 0
344                                 },
345                                 .iv_size = {
346                                         .min = 16,
347                                         .max = 16,
348                                         .increment = 0
349                                 }
350                         }, }
351                 }, }
352         },
353         {       /* AES CTR */
354                 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
355                 {.sym = {
356                         .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
357                         {.cipher = {
358                                 .algo = RTE_CRYPTO_CIPHER_AES_CTR,
359                                 .block_size = 16,
360                                 .key_size = {
361                                         .min = 16,
362                                         .max = 32,
363                                         .increment = 8
364                                 },
365                                 .iv_size = {
366                                         .min = 16,
367                                         .max = 16,
368                                         .increment = 0
369                                 }
370                         }, }
371                 }, }
372         },
373         {       /* NULL (AUTH) */
374                 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
375                 {.sym = {
376                         .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
377                         {.auth = {
378                                 .algo = RTE_CRYPTO_AUTH_NULL,
379                                 .block_size = 1,
380                                 .key_size = {
381                                         .min = 0,
382                                         .max = 0,
383                                         .increment = 0
384                                 },
385                                 .digest_size = {
386                                         .min = 0,
387                                         .max = 0,
388                                         .increment = 0
389                                 },
390                                 .aad_size = { 0 }
391                         }, },
392                 }, },
393         },
394         {       /* NULL (CIPHER) */
395                 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
396                 {.sym = {
397                         .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
398                         {.cipher = {
399                                 .algo = RTE_CRYPTO_CIPHER_NULL,
400                                 .block_size = 1,
401                                 .key_size = {
402                                         .min = 0,
403                                         .max = 0,
404                                         .increment = 0
405                                 },
406                                 .iv_size = {
407                                         .min = 0,
408                                         .max = 0,
409                                         .increment = 0
410                                 }
411                         }, },
412                 }, }
413         },
414         {       /* KASUMI (F8) */
415                 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
416                 {.sym = {
417                         .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
418                         {.cipher = {
419                                 .algo = RTE_CRYPTO_CIPHER_KASUMI_F8,
420                                 .block_size = 8,
421                                 .key_size = {
422                                         .min = 16,
423                                         .max = 16,
424                                         .increment = 0
425                                 },
426                                 .iv_size = {
427                                         .min = 8,
428                                         .max = 8,
429                                         .increment = 0
430                                 }
431                         }, }
432                 }, }
433         },
434         {       /* KASUMI (F9) */
435                 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
436                 {.sym = {
437                         .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
438                         {.auth = {
439                                 .algo = RTE_CRYPTO_AUTH_KASUMI_F9,
440                                 .block_size = 8,
441                                 .key_size = {
442                                         .min = 16,
443                                         .max = 16,
444                                         .increment = 0
445                                 },
446                                 .digest_size = {
447                                         .min = 4,
448                                         .max = 4,
449                                         .increment = 0
450                                 },
451                                 .aad_size = {
452                                         .min = 8,
453                                         .max = 8,
454                                         .increment = 0
455                                 }
456                         }, }
457                 }, }
458         },
459         {       /* 3DES CBC */
460                 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
461                 {.sym = {
462                         .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
463                         {.cipher = {
464                                 .algo = RTE_CRYPTO_CIPHER_3DES_CBC,
465                                 .block_size = 8,
466                                 .key_size = {
467                                         .min = 16,
468                                         .max = 24,
469                                         .increment = 8
470                                 },
471                                 .iv_size = {
472                                         .min = 8,
473                                         .max = 8,
474                                         .increment = 0
475                                 }
476                         }, }
477                 }, }
478         },
479         {       /* 3DES CTR */
480                 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
481                 {.sym = {
482                         .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
483                         {.cipher = {
484                                 .algo = RTE_CRYPTO_CIPHER_3DES_CTR,
485                                 .block_size = 8,
486                                 .key_size = {
487                                         .min = 16,
488                                         .max = 24,
489                                         .increment = 8
490                                 },
491                                 .iv_size = {
492                                         .min = 8,
493                                         .max = 8,
494                                         .increment = 0
495                                 }
496                         }, }
497                 }, }
498         },
499         {       /* DES CBC */
500                 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
501                 {.sym = {
502                         .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
503                         {.cipher = {
504                                 .algo = RTE_CRYPTO_CIPHER_DES_CBC,
505                                 .block_size = 8,
506                                 .key_size = {
507                                         .min = 8,
508                                         .max = 8,
509                                         .increment = 0
510                                 },
511                                 .iv_size = {
512                                         .min = 8,
513                                         .max = 8,
514                                         .increment = 0
515                                 }
516                         }, }
517                 }, }
518         },
519         RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST()
520 };
521
522 static inline uint32_t
523 adf_modulo(uint32_t data, uint32_t shift);
524
525 static inline int
526 qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg,
527                 struct qat_crypto_op_cookie *qat_op_cookie);
528
529 void qat_crypto_sym_clear_session(struct rte_cryptodev *dev,
530                 void *session)
531 {
532         struct qat_session *sess = session;
533         phys_addr_t cd_paddr;
534
535         PMD_INIT_FUNC_TRACE();
536         if (session) {
537                 cd_paddr = sess->cd_paddr;
538                 memset(sess, 0, qat_crypto_sym_get_session_private_size(dev));
539                 sess->cd_paddr = cd_paddr;
540         } else
541                 PMD_DRV_LOG(ERR, "NULL session");
542 }
543
544 static int
545 qat_get_cmd_id(const struct rte_crypto_sym_xform *xform)
546 {
547         /* Cipher Only */
548         if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER && xform->next == NULL)
549                 return ICP_QAT_FW_LA_CMD_CIPHER;
550
551         /* Authentication Only */
552         if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH && xform->next == NULL)
553                 return ICP_QAT_FW_LA_CMD_AUTH;
554
555         if (xform->next == NULL)
556                 return -1;
557
558         /* Cipher then Authenticate */
559         if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
560                         xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
561                 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
562
563         /* Authenticate then Cipher */
564         if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH &&
565                         xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
566                 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
567
568         return -1;
569 }
570
571 static struct rte_crypto_auth_xform *
572 qat_get_auth_xform(struct rte_crypto_sym_xform *xform)
573 {
574         do {
575                 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH)
576                         return &xform->auth;
577
578                 xform = xform->next;
579         } while (xform);
580
581         return NULL;
582 }
583
584 static struct rte_crypto_cipher_xform *
585 qat_get_cipher_xform(struct rte_crypto_sym_xform *xform)
586 {
587         do {
588                 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
589                         return &xform->cipher;
590
591                 xform = xform->next;
592         } while (xform);
593
594         return NULL;
595 }
596 void *
597 qat_crypto_sym_configure_session_cipher(struct rte_cryptodev *dev,
598                 struct rte_crypto_sym_xform *xform, void *session_private)
599 {
600         struct qat_pmd_private *internals = dev->data->dev_private;
601
602         struct qat_session *session = session_private;
603
604         struct rte_crypto_cipher_xform *cipher_xform = NULL;
605
606         /* Get cipher xform from crypto xform chain */
607         cipher_xform = qat_get_cipher_xform(xform);
608
609         switch (cipher_xform->algo) {
610         case RTE_CRYPTO_CIPHER_AES_CBC:
611                 if (qat_alg_validate_aes_key(cipher_xform->key.length,
612                                 &session->qat_cipher_alg) != 0) {
613                         PMD_DRV_LOG(ERR, "Invalid AES cipher key size");
614                         goto error_out;
615                 }
616                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
617                 break;
618         case RTE_CRYPTO_CIPHER_AES_GCM:
619                 if (qat_alg_validate_aes_key(cipher_xform->key.length,
620                                 &session->qat_cipher_alg) != 0) {
621                         PMD_DRV_LOG(ERR, "Invalid AES cipher key size");
622                         goto error_out;
623                 }
624                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
625                 break;
626         case RTE_CRYPTO_CIPHER_AES_CTR:
627                 if (qat_alg_validate_aes_key(cipher_xform->key.length,
628                                 &session->qat_cipher_alg) != 0) {
629                         PMD_DRV_LOG(ERR, "Invalid AES cipher key size");
630                         goto error_out;
631                 }
632                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
633                 break;
634         case RTE_CRYPTO_CIPHER_SNOW3G_UEA2:
635                 if (qat_alg_validate_snow3g_key(cipher_xform->key.length,
636                                         &session->qat_cipher_alg) != 0) {
637                         PMD_DRV_LOG(ERR, "Invalid SNOW 3G cipher key size");
638                         goto error_out;
639                 }
640                 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
641                 break;
642         case RTE_CRYPTO_CIPHER_NULL:
643                 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
644                 break;
645         case RTE_CRYPTO_CIPHER_KASUMI_F8:
646                 if (qat_alg_validate_kasumi_key(cipher_xform->key.length,
647                                         &session->qat_cipher_alg) != 0) {
648                         PMD_DRV_LOG(ERR, "Invalid KASUMI cipher key size");
649                         goto error_out;
650                 }
651                 session->qat_mode = ICP_QAT_HW_CIPHER_F8_MODE;
652                 break;
653         case RTE_CRYPTO_CIPHER_3DES_CBC:
654                 if (qat_alg_validate_3des_key(cipher_xform->key.length,
655                                 &session->qat_cipher_alg) != 0) {
656                         PMD_DRV_LOG(ERR, "Invalid 3DES cipher key size");
657                         goto error_out;
658                 }
659                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
660                 break;
661         case RTE_CRYPTO_CIPHER_DES_CBC:
662                 if (qat_alg_validate_des_key(cipher_xform->key.length,
663                                 &session->qat_cipher_alg) != 0) {
664                         PMD_DRV_LOG(ERR, "Invalid DES cipher key size");
665                         goto error_out;
666                 }
667                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
668                 break;
669         case RTE_CRYPTO_CIPHER_3DES_CTR:
670                 if (qat_alg_validate_3des_key(cipher_xform->key.length,
671                                 &session->qat_cipher_alg) != 0) {
672                         PMD_DRV_LOG(ERR, "Invalid 3DES cipher key size");
673                         goto error_out;
674                 }
675                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
676                 break;
677         case RTE_CRYPTO_CIPHER_3DES_ECB:
678         case RTE_CRYPTO_CIPHER_AES_ECB:
679         case RTE_CRYPTO_CIPHER_AES_CCM:
680         case RTE_CRYPTO_CIPHER_AES_F8:
681         case RTE_CRYPTO_CIPHER_AES_XTS:
682         case RTE_CRYPTO_CIPHER_ARC4:
683         case RTE_CRYPTO_CIPHER_ZUC_EEA3:
684                 PMD_DRV_LOG(ERR, "Crypto QAT PMD: Unsupported Cipher alg %u",
685                                 cipher_xform->algo);
686                 goto error_out;
687         default:
688                 PMD_DRV_LOG(ERR, "Crypto: Undefined Cipher specified %u\n",
689                                 cipher_xform->algo);
690                 goto error_out;
691         }
692
693         if (cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
694                 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
695         else
696                 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
697
698         if (qat_alg_aead_session_create_content_desc_cipher(session,
699                                                 cipher_xform->key.data,
700                                                 cipher_xform->key.length))
701                 goto error_out;
702
703         return session;
704
705 error_out:
706         rte_mempool_put(internals->sess_mp, session);
707         return NULL;
708 }
709
710
711 void *
712 qat_crypto_sym_configure_session(struct rte_cryptodev *dev,
713                 struct rte_crypto_sym_xform *xform, void *session_private)
714 {
715         struct qat_pmd_private *internals = dev->data->dev_private;
716
717         struct qat_session *session = session_private;
718
719         int qat_cmd_id;
720
721         PMD_INIT_FUNC_TRACE();
722
723         /* Get requested QAT command id */
724         qat_cmd_id = qat_get_cmd_id(xform);
725         if (qat_cmd_id < 0 || qat_cmd_id >= ICP_QAT_FW_LA_CMD_DELIMITER) {
726                 PMD_DRV_LOG(ERR, "Unsupported xform chain requested");
727                 goto error_out;
728         }
729         session->qat_cmd = (enum icp_qat_fw_la_cmd_id)qat_cmd_id;
730         switch (session->qat_cmd) {
731         case ICP_QAT_FW_LA_CMD_CIPHER:
732         session = qat_crypto_sym_configure_session_cipher(dev, xform, session);
733                 break;
734         case ICP_QAT_FW_LA_CMD_AUTH:
735         session = qat_crypto_sym_configure_session_auth(dev, xform, session);
736                 break;
737         case ICP_QAT_FW_LA_CMD_CIPHER_HASH:
738         session = qat_crypto_sym_configure_session_cipher(dev, xform, session);
739         session = qat_crypto_sym_configure_session_auth(dev, xform, session);
740                 break;
741         case ICP_QAT_FW_LA_CMD_HASH_CIPHER:
742         session = qat_crypto_sym_configure_session_auth(dev, xform, session);
743         session = qat_crypto_sym_configure_session_cipher(dev, xform, session);
744                 break;
745         case ICP_QAT_FW_LA_CMD_TRNG_GET_RANDOM:
746         case ICP_QAT_FW_LA_CMD_TRNG_TEST:
747         case ICP_QAT_FW_LA_CMD_SSL3_KEY_DERIVE:
748         case ICP_QAT_FW_LA_CMD_TLS_V1_1_KEY_DERIVE:
749         case ICP_QAT_FW_LA_CMD_TLS_V1_2_KEY_DERIVE:
750         case ICP_QAT_FW_LA_CMD_MGF1:
751         case ICP_QAT_FW_LA_CMD_AUTH_PRE_COMP:
752         case ICP_QAT_FW_LA_CMD_CIPHER_PRE_COMP:
753         case ICP_QAT_FW_LA_CMD_DELIMITER:
754         PMD_DRV_LOG(ERR, "Unsupported Service %u",
755                 session->qat_cmd);
756                 goto error_out;
757         default:
758         PMD_DRV_LOG(ERR, "Unsupported Service %u",
759                 session->qat_cmd);
760                 goto error_out;
761         }
762         return session;
763
764 error_out:
765         rte_mempool_put(internals->sess_mp, session);
766         return NULL;
767 }
768
769 struct qat_session *
770 qat_crypto_sym_configure_session_auth(struct rte_cryptodev *dev,
771                                 struct rte_crypto_sym_xform *xform,
772                                 struct qat_session *session_private)
773 {
774
775         struct qat_pmd_private *internals = dev->data->dev_private;
776         struct qat_session *session = session_private;
777         struct rte_crypto_auth_xform *auth_xform = NULL;
778         struct rte_crypto_cipher_xform *cipher_xform = NULL;
779         auth_xform = qat_get_auth_xform(xform);
780
781         switch (auth_xform->algo) {
782         case RTE_CRYPTO_AUTH_SHA1_HMAC:
783                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA1;
784                 break;
785         case RTE_CRYPTO_AUTH_SHA224_HMAC:
786                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA224;
787                 break;
788         case RTE_CRYPTO_AUTH_SHA256_HMAC:
789                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA256;
790                 break;
791         case RTE_CRYPTO_AUTH_SHA384_HMAC:
792                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA384;
793                 break;
794         case RTE_CRYPTO_AUTH_SHA512_HMAC:
795                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA512;
796                 break;
797         case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
798                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
799                 break;
800         case RTE_CRYPTO_AUTH_AES_GCM:
801                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
802                 break;
803         case RTE_CRYPTO_AUTH_AES_GMAC:
804                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
805                 break;
806         case RTE_CRYPTO_AUTH_SNOW3G_UIA2:
807                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2;
808                 break;
809         case RTE_CRYPTO_AUTH_MD5_HMAC:
810                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_MD5;
811                 break;
812         case RTE_CRYPTO_AUTH_NULL:
813                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_NULL;
814                 break;
815         case RTE_CRYPTO_AUTH_KASUMI_F9:
816                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_KASUMI_F9;
817                 break;
818         case RTE_CRYPTO_AUTH_SHA1:
819         case RTE_CRYPTO_AUTH_SHA256:
820         case RTE_CRYPTO_AUTH_SHA512:
821         case RTE_CRYPTO_AUTH_SHA224:
822         case RTE_CRYPTO_AUTH_SHA384:
823         case RTE_CRYPTO_AUTH_MD5:
824         case RTE_CRYPTO_AUTH_AES_CCM:
825         case RTE_CRYPTO_AUTH_AES_CMAC:
826         case RTE_CRYPTO_AUTH_AES_CBC_MAC:
827         case RTE_CRYPTO_AUTH_ZUC_EIA3:
828                 PMD_DRV_LOG(ERR, "Crypto: Unsupported hash alg %u",
829                                 auth_xform->algo);
830                 goto error_out;
831         default:
832                 PMD_DRV_LOG(ERR, "Crypto: Undefined Hash algo %u specified",
833                                 auth_xform->algo);
834                 goto error_out;
835         }
836         cipher_xform = qat_get_cipher_xform(xform);
837
838         if ((session->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_128) ||
839                         (session->qat_hash_alg ==
840                                 ICP_QAT_HW_AUTH_ALGO_GALOIS_64))  {
841                 if (qat_alg_aead_session_create_content_desc_auth(session,
842                                 cipher_xform->key.data,
843                                 cipher_xform->key.length,
844                                 auth_xform->add_auth_data_length,
845                                 auth_xform->digest_length,
846                                 auth_xform->op))
847                         goto error_out;
848         } else {
849                 if (qat_alg_aead_session_create_content_desc_auth(session,
850                                 auth_xform->key.data,
851                                 auth_xform->key.length,
852                                 auth_xform->add_auth_data_length,
853                                 auth_xform->digest_length,
854                                 auth_xform->op))
855                         goto error_out;
856         }
857         return session;
858
859 error_out:
860         if (internals->sess_mp != NULL)
861                 rte_mempool_put(internals->sess_mp, session);
862         return NULL;
863 }
864
865 unsigned qat_crypto_sym_get_session_private_size(
866                 struct rte_cryptodev *dev __rte_unused)
867 {
868         return RTE_ALIGN_CEIL(sizeof(struct qat_session), 8);
869 }
870
871 uint16_t
872 qat_pmd_enqueue_op_burst(void *qp, struct rte_crypto_op **ops,
873                 uint16_t nb_ops)
874 {
875         register struct qat_queue *queue;
876         struct qat_qp *tmp_qp = (struct qat_qp *)qp;
877         register uint32_t nb_ops_sent = 0;
878         register struct rte_crypto_op **cur_op = ops;
879         register int ret;
880         uint16_t nb_ops_possible = nb_ops;
881         register uint8_t *base_addr;
882         register uint32_t tail;
883         int overflow;
884
885         if (unlikely(nb_ops == 0))
886                 return 0;
887
888         /* read params used a lot in main loop into registers */
889         queue = &(tmp_qp->tx_q);
890         base_addr = (uint8_t *)queue->base_addr;
891         tail = queue->tail;
892
893         /* Find how many can actually fit on the ring */
894         overflow = rte_atomic16_add_return(&tmp_qp->inflights16, nb_ops)
895                                 - queue->max_inflights;
896         if (overflow > 0) {
897                 rte_atomic16_sub(&tmp_qp->inflights16, overflow);
898                 nb_ops_possible = nb_ops - overflow;
899                 if (nb_ops_possible == 0)
900                         return 0;
901         }
902
903         while (nb_ops_sent != nb_ops_possible) {
904                 ret = qat_write_hw_desc_entry(*cur_op, base_addr + tail,
905                                 tmp_qp->op_cookies[tail / queue->msg_size]);
906                 if (ret != 0) {
907                         tmp_qp->stats.enqueue_err_count++;
908                         /*
909                          * This message cannot be enqueued,
910                          * decrease number of ops that wasnt sent
911                          */
912                         rte_atomic16_sub(&tmp_qp->inflights16,
913                                         nb_ops_possible - nb_ops_sent);
914                         if (nb_ops_sent == 0)
915                                 return 0;
916                         goto kick_tail;
917                 }
918
919                 tail = adf_modulo(tail + queue->msg_size, queue->modulo);
920                 nb_ops_sent++;
921                 cur_op++;
922         }
923 kick_tail:
924         WRITE_CSR_RING_TAIL(tmp_qp->mmap_bar_addr, queue->hw_bundle_number,
925                         queue->hw_queue_number, tail);
926         queue->tail = tail;
927         tmp_qp->stats.enqueued_count += nb_ops_sent;
928         return nb_ops_sent;
929 }
930
931 uint16_t
932 qat_pmd_dequeue_op_burst(void *qp, struct rte_crypto_op **ops,
933                 uint16_t nb_ops)
934 {
935         struct qat_queue *queue;
936         struct qat_qp *tmp_qp = (struct qat_qp *)qp;
937         uint32_t msg_counter = 0;
938         struct rte_crypto_op *rx_op;
939         struct icp_qat_fw_comn_resp *resp_msg;
940
941         queue = &(tmp_qp->rx_q);
942         resp_msg = (struct icp_qat_fw_comn_resp *)
943                         ((uint8_t *)queue->base_addr + queue->head);
944
945         while (*(uint32_t *)resp_msg != ADF_RING_EMPTY_SIG &&
946                         msg_counter != nb_ops) {
947                 rx_op = (struct rte_crypto_op *)(uintptr_t)
948                                 (resp_msg->opaque_data);
949
950 #ifdef RTE_LIBRTE_PMD_QAT_DEBUG_RX
951                 rte_hexdump(stdout, "qat_response:", (uint8_t *)resp_msg,
952                         sizeof(struct icp_qat_fw_comn_resp));
953 #endif
954                 if (ICP_QAT_FW_COMN_STATUS_FLAG_OK !=
955                                 ICP_QAT_FW_COMN_RESP_CRYPTO_STAT_GET(
956                                         resp_msg->comn_hdr.comn_status)) {
957                         rx_op->status = RTE_CRYPTO_OP_STATUS_AUTH_FAILED;
958                 } else {
959                         rx_op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
960                 }
961                 *(uint32_t *)resp_msg = ADF_RING_EMPTY_SIG;
962                 queue->head = adf_modulo(queue->head +
963                                 queue->msg_size,
964                                 ADF_RING_SIZE_MODULO(queue->queue_size));
965                 resp_msg = (struct icp_qat_fw_comn_resp *)
966                                         ((uint8_t *)queue->base_addr +
967                                                         queue->head);
968                 *ops = rx_op;
969                 ops++;
970                 msg_counter++;
971         }
972         if (msg_counter > 0) {
973                 WRITE_CSR_RING_HEAD(tmp_qp->mmap_bar_addr,
974                                         queue->hw_bundle_number,
975                                         queue->hw_queue_number, queue->head);
976                 rte_atomic16_sub(&tmp_qp->inflights16, msg_counter);
977                 tmp_qp->stats.dequeued_count += msg_counter;
978         }
979         return msg_counter;
980 }
981
982 static inline int
983 qat_sgl_fill_array(struct rte_mbuf *buf, uint64_t buff_start,
984                 struct qat_alg_buf_list *list, uint32_t data_len)
985 {
986         int nr = 1;
987
988         uint32_t buf_len = rte_pktmbuf_mtophys(buf) -
989                         buff_start + rte_pktmbuf_data_len(buf);
990
991         list->bufers[0].addr = buff_start;
992         list->bufers[0].resrvd = 0;
993         list->bufers[0].len = buf_len;
994
995         if (data_len <= buf_len) {
996                 list->num_bufs = nr;
997                 list->bufers[0].len = data_len;
998                 return 0;
999         }
1000
1001         buf = buf->next;
1002         while (buf) {
1003                 if (unlikely(nr == QAT_SGL_MAX_NUMBER)) {
1004                         PMD_DRV_LOG(ERR, "QAT PMD exceeded size of QAT SGL"
1005                                         " entry(%u)",
1006                                         QAT_SGL_MAX_NUMBER);
1007                         return -EINVAL;
1008                 }
1009
1010                 list->bufers[nr].len = rte_pktmbuf_data_len(buf);
1011                 list->bufers[nr].resrvd = 0;
1012                 list->bufers[nr].addr = rte_pktmbuf_mtophys(buf);
1013
1014                 buf_len += list->bufers[nr].len;
1015                 buf = buf->next;
1016
1017                 if (buf_len > data_len) {
1018                         list->bufers[nr].len -=
1019                                 buf_len - data_len;
1020                         buf = NULL;
1021                 }
1022                 ++nr;
1023         }
1024         list->num_bufs = nr;
1025
1026         return 0;
1027 }
1028
1029 static inline int
1030 qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg,
1031                 struct qat_crypto_op_cookie *qat_op_cookie)
1032 {
1033         int ret = 0;
1034         struct qat_session *ctx;
1035         struct icp_qat_fw_la_cipher_req_params *cipher_param;
1036         struct icp_qat_fw_la_auth_req_params *auth_param;
1037         register struct icp_qat_fw_la_bulk_req *qat_req;
1038         uint8_t do_auth = 0, do_cipher = 0;
1039         uint32_t cipher_len = 0, cipher_ofs = 0;
1040         uint32_t auth_len = 0, auth_ofs = 0;
1041         uint32_t min_ofs = 0;
1042         uint64_t src_buf_start = 0, dst_buf_start = 0;
1043         uint8_t do_sgl = 0;
1044
1045
1046 #ifdef RTE_LIBRTE_PMD_QAT_DEBUG_TX
1047         if (unlikely(op->type != RTE_CRYPTO_OP_TYPE_SYMMETRIC)) {
1048                 PMD_DRV_LOG(ERR, "QAT PMD only supports symmetric crypto "
1049                                 "operation requests, op (%p) is not a "
1050                                 "symmetric operation.", op);
1051                 return -EINVAL;
1052         }
1053 #endif
1054         if (unlikely(op->sym->sess_type == RTE_CRYPTO_SYM_OP_SESSIONLESS)) {
1055                 PMD_DRV_LOG(ERR, "QAT PMD only supports session oriented"
1056                                 " requests, op (%p) is sessionless.", op);
1057                 return -EINVAL;
1058         }
1059
1060         if (unlikely(op->sym->session->dev_type != RTE_CRYPTODEV_QAT_SYM_PMD)) {
1061                 PMD_DRV_LOG(ERR, "Session was not created for this device");
1062                 return -EINVAL;
1063         }
1064
1065         ctx = (struct qat_session *)op->sym->session->_private;
1066         qat_req = (struct icp_qat_fw_la_bulk_req *)out_msg;
1067         rte_mov128((uint8_t *)qat_req, (const uint8_t *)&(ctx->fw_req));
1068         qat_req->comn_mid.opaque_data = (uint64_t)(uintptr_t)op;
1069         cipher_param = (void *)&qat_req->serv_specif_rqpars;
1070         auth_param = (void *)((uint8_t *)cipher_param + sizeof(*cipher_param));
1071
1072         if (ctx->qat_cmd == ICP_QAT_FW_LA_CMD_HASH_CIPHER ||
1073                 ctx->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1074                 do_auth = 1;
1075                 do_cipher = 1;
1076         } else if (ctx->qat_cmd == ICP_QAT_FW_LA_CMD_AUTH) {
1077                 do_auth = 1;
1078                 do_cipher = 0;
1079         } else if (ctx->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER) {
1080                 do_auth = 0;
1081                 do_cipher = 1;
1082         }
1083
1084         if (do_cipher) {
1085
1086                 if (ctx->qat_cipher_alg ==
1087                                          ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2 ||
1088                         ctx->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1089
1090                         if (unlikely(
1091                                 (cipher_param->cipher_length % BYTE_LENGTH != 0)
1092                                  || (cipher_param->cipher_offset
1093                                                         % BYTE_LENGTH != 0))) {
1094                                 PMD_DRV_LOG(ERR,
1095                   "SNOW3G/KASUMI in QAT PMD only supports byte aligned values");
1096                                 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
1097                                 return -EINVAL;
1098                         }
1099                         cipher_len = op->sym->cipher.data.length >> 3;
1100                         cipher_ofs = op->sym->cipher.data.offset >> 3;
1101
1102                 } else {
1103                         cipher_len = op->sym->cipher.data.length;
1104                         cipher_ofs = op->sym->cipher.data.offset;
1105                 }
1106
1107                 /* copy IV into request if it fits */
1108                 if (op->sym->cipher.iv.length && (op->sym->cipher.iv.length <=
1109                                 sizeof(cipher_param->u.cipher_IV_array))) {
1110                         rte_memcpy(cipher_param->u.cipher_IV_array,
1111                                         op->sym->cipher.iv.data,
1112                                         op->sym->cipher.iv.length);
1113                 } else {
1114                         ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(
1115                                         qat_req->comn_hdr.serv_specif_flags,
1116                                         ICP_QAT_FW_CIPH_IV_64BIT_PTR);
1117                         cipher_param->u.s.cipher_IV_ptr =
1118                                         op->sym->cipher.iv.phys_addr;
1119                 }
1120                 min_ofs = cipher_ofs;
1121         }
1122
1123         if (do_auth) {
1124
1125                 if (ctx->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2 ||
1126                         ctx->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_KASUMI_F9) {
1127                         if (unlikely((auth_param->auth_off % BYTE_LENGTH != 0)
1128                                 || (auth_param->auth_len % BYTE_LENGTH != 0))) {
1129                                 PMD_DRV_LOG(ERR,
1130                 "For SNOW3G/KASUMI, QAT PMD only supports byte aligned values");
1131                                 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
1132                                 return -EINVAL;
1133                         }
1134                         auth_ofs = op->sym->auth.data.offset >> 3;
1135                         auth_len = op->sym->auth.data.length >> 3;
1136
1137                         if (ctx->qat_hash_alg ==
1138                                         ICP_QAT_HW_AUTH_ALGO_KASUMI_F9) {
1139                                 if (do_cipher) {
1140                                         auth_len = auth_len + auth_ofs + 1 -
1141                                                 ICP_QAT_HW_KASUMI_BLK_SZ;
1142                                         auth_ofs = ICP_QAT_HW_KASUMI_BLK_SZ;
1143                                 } else {
1144                                         auth_len = auth_len + auth_ofs + 1;
1145                                         auth_ofs = 0;
1146                                 }
1147                         }
1148
1149                 } else {
1150                         auth_ofs = op->sym->auth.data.offset;
1151                         auth_len = op->sym->auth.data.length;
1152                 }
1153                 min_ofs = auth_ofs;
1154
1155                 auth_param->auth_res_addr = op->sym->auth.digest.phys_addr;
1156
1157                 auth_param->u1.aad_adr = op->sym->auth.aad.phys_addr;
1158
1159         }
1160
1161         if (op->sym->m_src->next || (op->sym->m_dst && op->sym->m_dst->next))
1162                 do_sgl = 1;
1163
1164         /* adjust for chain case */
1165         if (do_cipher && do_auth)
1166                 min_ofs = cipher_ofs < auth_ofs ? cipher_ofs : auth_ofs;
1167
1168         if (unlikely(min_ofs >= rte_pktmbuf_data_len(op->sym->m_src) && do_sgl))
1169                 min_ofs = 0;
1170
1171         if (unlikely(op->sym->m_dst != NULL)) {
1172                 /* Out-of-place operation (OOP)
1173                  * Don't align DMA start. DMA the minimum data-set
1174                  * so as not to overwrite data in dest buffer
1175                  */
1176                 src_buf_start =
1177                         rte_pktmbuf_mtophys_offset(op->sym->m_src, min_ofs);
1178                 dst_buf_start =
1179                         rte_pktmbuf_mtophys_offset(op->sym->m_dst, min_ofs);
1180
1181         } else {
1182                 /* In-place operation
1183                  * Start DMA at nearest aligned address below min_ofs
1184                  */
1185                 src_buf_start =
1186                         rte_pktmbuf_mtophys_offset(op->sym->m_src, min_ofs)
1187                                                 & QAT_64_BTYE_ALIGN_MASK;
1188
1189                 if (unlikely((rte_pktmbuf_mtophys(op->sym->m_src) -
1190                                         rte_pktmbuf_headroom(op->sym->m_src))
1191                                                         > src_buf_start)) {
1192                         /* alignment has pushed addr ahead of start of mbuf
1193                          * so revert and take the performance hit
1194                          */
1195                         src_buf_start =
1196                                 rte_pktmbuf_mtophys_offset(op->sym->m_src,
1197                                                                 min_ofs);
1198                 }
1199                 dst_buf_start = src_buf_start;
1200         }
1201
1202         if (do_cipher) {
1203                 cipher_param->cipher_offset =
1204                                 (uint32_t)rte_pktmbuf_mtophys_offset(
1205                                 op->sym->m_src, cipher_ofs) - src_buf_start;
1206                 cipher_param->cipher_length = cipher_len;
1207         } else {
1208                 cipher_param->cipher_offset = 0;
1209                 cipher_param->cipher_length = 0;
1210         }
1211         if (do_auth) {
1212                 auth_param->auth_off = (uint32_t)rte_pktmbuf_mtophys_offset(
1213                                 op->sym->m_src, auth_ofs) - src_buf_start;
1214                 auth_param->auth_len = auth_len;
1215         } else {
1216                 auth_param->auth_off = 0;
1217                 auth_param->auth_len = 0;
1218         }
1219         qat_req->comn_mid.dst_length =
1220                 qat_req->comn_mid.src_length =
1221                 (cipher_param->cipher_offset + cipher_param->cipher_length)
1222                 > (auth_param->auth_off + auth_param->auth_len) ?
1223                 (cipher_param->cipher_offset + cipher_param->cipher_length)
1224                 : (auth_param->auth_off + auth_param->auth_len);
1225
1226         if (do_sgl) {
1227
1228                 ICP_QAT_FW_COMN_PTR_TYPE_SET(qat_req->comn_hdr.comn_req_flags,
1229                                 QAT_COMN_PTR_TYPE_SGL);
1230                 ret = qat_sgl_fill_array(op->sym->m_src, src_buf_start,
1231                                 &qat_op_cookie->qat_sgl_list_src,
1232                                 qat_req->comn_mid.src_length);
1233                 if (ret) {
1234                         PMD_DRV_LOG(ERR, "QAT PMD Cannot fill sgl array");
1235                         return ret;
1236                 }
1237
1238                 if (likely(op->sym->m_dst == NULL))
1239                         qat_req->comn_mid.dest_data_addr =
1240                                 qat_req->comn_mid.src_data_addr =
1241                                 qat_op_cookie->qat_sgl_src_phys_addr;
1242                 else {
1243                         ret = qat_sgl_fill_array(op->sym->m_dst,
1244                                         dst_buf_start,
1245                                         &qat_op_cookie->qat_sgl_list_dst,
1246                                                 qat_req->comn_mid.dst_length);
1247
1248                         if (ret) {
1249                                 PMD_DRV_LOG(ERR, "QAT PMD Cannot "
1250                                                 "fill sgl array");
1251                                 return ret;
1252                         }
1253
1254                         qat_req->comn_mid.src_data_addr =
1255                                 qat_op_cookie->qat_sgl_src_phys_addr;
1256                         qat_req->comn_mid.dest_data_addr =
1257                                         qat_op_cookie->qat_sgl_dst_phys_addr;
1258                 }
1259         } else {
1260                 qat_req->comn_mid.src_data_addr = src_buf_start;
1261                 qat_req->comn_mid.dest_data_addr = dst_buf_start;
1262         }
1263
1264         if (ctx->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_128 ||
1265                         ctx->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_64) {
1266                 if (op->sym->cipher.iv.length == 12) {
1267                         /*
1268                          * For GCM a 12 bit IV is allowed,
1269                          * but we need to inform the f/w
1270                          */
1271                         ICP_QAT_FW_LA_GCM_IV_LEN_FLAG_SET(
1272                                 qat_req->comn_hdr.serv_specif_flags,
1273                                 ICP_QAT_FW_LA_GCM_IV_LEN_12_OCTETS);
1274                 }
1275                 if (op->sym->cipher.data.length == 0) {
1276                         /*
1277                          * GMAC
1278                          */
1279                         qat_req->comn_mid.dest_data_addr =
1280                                 qat_req->comn_mid.src_data_addr =
1281                                                 op->sym->auth.aad.phys_addr;
1282                         qat_req->comn_mid.dst_length =
1283                                 qat_req->comn_mid.src_length =
1284                                         rte_pktmbuf_data_len(op->sym->m_src);
1285                         cipher_param->cipher_length = 0;
1286                         cipher_param->cipher_offset = 0;
1287                         auth_param->u1.aad_adr = 0;
1288                         auth_param->auth_len = op->sym->auth.aad.length;
1289                         auth_param->auth_off = op->sym->auth.data.offset;
1290                         auth_param->u2.aad_sz = 0;
1291                 }
1292         }
1293
1294 #ifdef RTE_LIBRTE_PMD_QAT_DEBUG_TX
1295         rte_hexdump(stdout, "qat_req:", qat_req,
1296                         sizeof(struct icp_qat_fw_la_bulk_req));
1297         rte_hexdump(stdout, "src_data:",
1298                         rte_pktmbuf_mtod(op->sym->m_src, uint8_t*),
1299                         rte_pktmbuf_data_len(op->sym->m_src));
1300         rte_hexdump(stdout, "iv:", op->sym->cipher.iv.data,
1301                         op->sym->cipher.iv.length);
1302         rte_hexdump(stdout, "digest:", op->sym->auth.digest.data,
1303                         op->sym->auth.digest.length);
1304         rte_hexdump(stdout, "aad:", op->sym->auth.aad.data,
1305                         op->sym->auth.aad.length);
1306 #endif
1307         return 0;
1308 }
1309
1310 static inline uint32_t adf_modulo(uint32_t data, uint32_t shift)
1311 {
1312         uint32_t div = data >> shift;
1313         uint32_t mult = div << shift;
1314
1315         return data - mult;
1316 }
1317
1318 void qat_crypto_sym_session_init(struct rte_mempool *mp, void *sym_sess)
1319 {
1320         struct rte_cryptodev_sym_session *sess = sym_sess;
1321         struct qat_session *s = (void *)sess->_private;
1322
1323         PMD_INIT_FUNC_TRACE();
1324         s->cd_paddr = rte_mempool_virt2phy(mp, sess) +
1325                 offsetof(struct qat_session, cd) +
1326                 offsetof(struct rte_cryptodev_sym_session, _private);
1327 }
1328
1329 int qat_dev_config(__rte_unused struct rte_cryptodev *dev,
1330                 __rte_unused struct rte_cryptodev_config *config)
1331 {
1332         PMD_INIT_FUNC_TRACE();
1333         return 0;
1334 }
1335
1336 int qat_dev_start(__rte_unused struct rte_cryptodev *dev)
1337 {
1338         PMD_INIT_FUNC_TRACE();
1339         return 0;
1340 }
1341
1342 void qat_dev_stop(__rte_unused struct rte_cryptodev *dev)
1343 {
1344         PMD_INIT_FUNC_TRACE();
1345 }
1346
1347 int qat_dev_close(struct rte_cryptodev *dev)
1348 {
1349         int i, ret;
1350
1351         PMD_INIT_FUNC_TRACE();
1352
1353         for (i = 0; i < dev->data->nb_queue_pairs; i++) {
1354                 ret = qat_crypto_sym_qp_release(dev, i);
1355                 if (ret < 0)
1356                         return ret;
1357         }
1358
1359         return 0;
1360 }
1361
1362 void qat_dev_info_get(__rte_unused struct rte_cryptodev *dev,
1363                                 struct rte_cryptodev_info *info)
1364 {
1365         struct qat_pmd_private *internals = dev->data->dev_private;
1366
1367         PMD_INIT_FUNC_TRACE();
1368         if (info != NULL) {
1369                 info->max_nb_queue_pairs =
1370                                 ADF_NUM_SYM_QPS_PER_BUNDLE *
1371                                 ADF_NUM_BUNDLES_PER_DEV;
1372                 info->feature_flags = dev->feature_flags;
1373                 info->capabilities = qat_pmd_capabilities;
1374                 info->sym.max_nb_sessions = internals->max_nb_sessions;
1375                 info->dev_type = RTE_CRYPTODEV_QAT_SYM_PMD;
1376         }
1377 }
1378
1379 void qat_crypto_sym_stats_get(struct rte_cryptodev *dev,
1380                 struct rte_cryptodev_stats *stats)
1381 {
1382         int i;
1383         struct qat_qp **qp = (struct qat_qp **)(dev->data->queue_pairs);
1384
1385         PMD_INIT_FUNC_TRACE();
1386         if (stats == NULL) {
1387                 PMD_DRV_LOG(ERR, "invalid stats ptr NULL");
1388                 return;
1389         }
1390         for (i = 0; i < dev->data->nb_queue_pairs; i++) {
1391                 if (qp[i] == NULL) {
1392                         PMD_DRV_LOG(DEBUG, "Uninitialised queue pair");
1393                         continue;
1394                 }
1395
1396                 stats->enqueued_count += qp[i]->stats.enqueued_count;
1397                 stats->dequeued_count += qp[i]->stats.enqueued_count;
1398                 stats->enqueue_err_count += qp[i]->stats.enqueue_err_count;
1399                 stats->dequeue_err_count += qp[i]->stats.enqueue_err_count;
1400         }
1401 }
1402
1403 void qat_crypto_sym_stats_reset(struct rte_cryptodev *dev)
1404 {
1405         int i;
1406         struct qat_qp **qp = (struct qat_qp **)(dev->data->queue_pairs);
1407
1408         PMD_INIT_FUNC_TRACE();
1409         for (i = 0; i < dev->data->nb_queue_pairs; i++)
1410                 memset(&(qp[i]->stats), 0, sizeof(qp[i]->stats));
1411         PMD_DRV_LOG(DEBUG, "QAT crypto: stats cleared");
1412 }