common/cnxk: support enabling AURA tail drop for RQ
[dpdk.git] / drivers / crypto / qat / qat_asym.c
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2019 - 2022 Intel Corporation
3  */
4
5 #include <stdarg.h>
6
7 #include <cryptodev_pmd.h>
8
9 #include "qat_device.h"
10 #include "qat_logs.h"
11
12 #include "qat_asym.h"
13 #include "icp_qat_fw_pke.h"
14 #include "icp_qat_fw.h"
15 #include "qat_pke.h"
16 #include "qat_ec.h"
17
18 uint8_t qat_asym_driver_id;
19
20 struct qat_crypto_gen_dev_ops qat_asym_gen_dev_ops[QAT_N_GENS];
21
22 /* An rte_driver is needed in the registration of both the device and the driver
23  * with cryptodev.
24  * The actual qat pci's rte_driver can't be used as its name represents
25  * the whole pci device with all services. Think of this as a holder for a name
26  * for the crypto part of the pci device.
27  */
28 static const char qat_asym_drv_name[] = RTE_STR(CRYPTODEV_NAME_QAT_ASYM_PMD);
29 static const struct rte_driver cryptodev_qat_asym_driver = {
30         .name = qat_asym_drv_name,
31         .alias = qat_asym_drv_name
32 };
33
34 /*
35  * Macros with suffix _F are used with some of predefinded identifiers:
36  * - cookie->input_buffer
37  * - qat_alg_bytesize
38  */
39 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
40 #define HEXDUMP(name, where, size) QAT_DP_HEXDUMP_LOG(DEBUG, name, \
41                         where, size)
42 #define HEXDUMP_OFF(name, where, size, idx) QAT_DP_HEXDUMP_LOG(DEBUG, name, \
43                         &where[idx * size], size)
44
45 #define HEXDUMP_OFF_F(name, idx) QAT_DP_HEXDUMP_LOG(DEBUG, name, \
46                         &cookie->input_buffer[idx * qat_alg_bytesize], \
47                         qat_alg_bytesize)
48 #else
49 #define HEXDUMP(name, where, size)
50 #define HEXDUMP_OFF(name, where, size, idx)
51 #define HEXDUMP_OFF_F(name, idx)
52 #endif
53
54 #define CHECK_IF_NOT_EMPTY(param, name, pname, status) \
55         do { \
56                 if (param.length == 0) {        \
57                         QAT_LOG(ERR,                    \
58                                 "Invalid " name \
59                                 " input parameter, zero length " pname  \
60                         );      \
61                         status = -EINVAL;       \
62                 } else if (check_zero(param)) { \
63                         QAT_LOG(ERR,    \
64                                 "Invalid " name " input parameter, empty " \
65                                 pname ", length = %d", \
66                                 (int)param.length \
67                         ); \
68                         status = -EINVAL;       \
69                 } \
70         } while (0)
71
72 #define SET_PKE_LN(where, what, how, idx) \
73         rte_memcpy(where[idx] + how - \
74                 what.length, \
75                 what.data, \
76                 what.length)
77
78 #define SET_PKE_LN_9A(where, what, how, idx) \
79                 rte_memcpy(&where[idx * RTE_ALIGN_CEIL(how, 8)] + \
80                         RTE_ALIGN_CEIL(how, 8) - \
81                         what.length, \
82                         what.data, \
83                         what.length)
84
85 #define SET_PKE_LN_EC(where, what, how, idx) \
86                 rte_memcpy(where[idx] + \
87                         RTE_ALIGN_CEIL(how, 8) - \
88                         how, \
89                         what.data, \
90                         how)
91
92 #define SET_PKE_LN_9A_F(what, idx) \
93                 rte_memcpy(&cookie->input_buffer[idx * qat_alg_bytesize] + \
94                         qat_alg_bytesize - what.length, \
95                         what.data, what.length)
96
97 #define SET_PKE_LN_EC_F(what, how, idx) \
98                 rte_memcpy(&cookie->input_buffer[idx * \
99                         RTE_ALIGN_CEIL(how, 8)] + \
100                         RTE_ALIGN_CEIL(how, 8) - how, \
101                         what.data, how)
102
103 static void
104 request_init(struct icp_qat_fw_pke_request *qat_req)
105 {
106         memset(qat_req, 0, sizeof(*qat_req));
107         qat_req->pke_hdr.service_type = ICP_QAT_FW_COMN_REQ_CPM_FW_PKE;
108         qat_req->pke_hdr.hdr_flags =
109                         ICP_QAT_FW_COMN_HDR_FLAGS_BUILD
110                         (ICP_QAT_FW_COMN_REQ_FLAG_SET);
111 }
112
113 static void
114 cleanup_arrays(struct qat_asym_op_cookie *cookie,
115                 int in_count, int out_count, int alg_size)
116 {
117         int i;
118
119         for (i = 0; i < in_count; i++)
120                 memset(cookie->input_array[i], 0x0, alg_size);
121         for (i = 0; i < out_count; i++)
122                 memset(cookie->output_array[i], 0x0, alg_size);
123 }
124
125 static void
126 cleanup_crt(struct qat_asym_op_cookie *cookie,
127                 int alg_size)
128 {
129         int i;
130
131         memset(cookie->input_array[0], 0x0, alg_size);
132         for (i = 1; i < QAT_ASYM_RSA_QT_NUM_IN_PARAMS; i++)
133                 memset(cookie->input_array[i], 0x0, alg_size / 2);
134         for (i = 0; i < QAT_ASYM_RSA_NUM_OUT_PARAMS; i++)
135                 memset(cookie->output_array[i], 0x0, alg_size);
136 }
137
138 static void
139 cleanup(struct qat_asym_op_cookie *cookie,
140                 struct rte_crypto_asym_xform *xform, int alg_size)
141 {
142         if (xform->xform_type == RTE_CRYPTO_ASYM_XFORM_MODEX)
143                 cleanup_arrays(cookie, QAT_ASYM_MODEXP_NUM_IN_PARAMS,
144                                 QAT_ASYM_MODEXP_NUM_OUT_PARAMS, alg_size);
145         else if (xform->xform_type == RTE_CRYPTO_ASYM_XFORM_MODINV)
146                 cleanup_arrays(cookie, QAT_ASYM_MODINV_NUM_IN_PARAMS,
147                                 QAT_ASYM_MODINV_NUM_OUT_PARAMS, alg_size);
148         else if (xform->xform_type == RTE_CRYPTO_ASYM_XFORM_RSA) {
149                 if (xform->rsa.key_type == RTE_RSA_KEY_TYPE_QT)
150                         cleanup_crt(cookie, alg_size);
151                 else {
152                         cleanup_arrays(cookie, QAT_ASYM_RSA_NUM_IN_PARAMS,
153                                 QAT_ASYM_RSA_NUM_OUT_PARAMS, alg_size);
154                 }
155         }
156 }
157
158 static int
159 check_zero(rte_crypto_param n)
160 {
161         int i, len = n.length;
162
163         if (len < 8) {
164                 for (i = len - 1; i >= 0; i--) {
165                         if (n.data[i] != 0x0)
166                                 return 0;
167                 }
168         } else if (len == 8 && *(uint64_t *)&n.data[len - 8] == 0) {
169                 return 1;
170         } else if (*(uint64_t *)&n.data[len - 8] == 0) {
171                 for (i = len - 9; i >= 0; i--) {
172                         if (n.data[i] != 0x0)
173                                 return 0;
174                 }
175         } else
176                 return 0;
177
178         return 1;
179 }
180
181 static struct qat_asym_function
182 get_asym_function(struct rte_crypto_asym_xform *xform)
183 {
184         struct qat_asym_function qat_function;
185
186         switch (xform->xform_type) {
187         case RTE_CRYPTO_ASYM_XFORM_MODEX:
188                 qat_function = get_modexp_function(xform);
189                 break;
190         case RTE_CRYPTO_ASYM_XFORM_MODINV:
191                 qat_function = get_modinv_function(xform);
192                 break;
193         default:
194                 qat_function.func_id = 0;
195                 break;
196         }
197
198         return qat_function;
199 }
200
201 static int
202 modexp_set_input(struct rte_crypto_asym_op *asym_op,
203                 struct icp_qat_fw_pke_request *qat_req,
204                 struct qat_asym_op_cookie *cookie,
205                 struct rte_crypto_asym_xform *xform)
206 {
207         struct qat_asym_function qat_function;
208         uint32_t alg_bytesize, func_id;
209         int status = 0;
210
211         CHECK_IF_NOT_EMPTY(xform->modex.modulus, "mod exp",
212                         "modulus", status);
213         CHECK_IF_NOT_EMPTY(xform->modex.exponent, "mod exp",
214                                 "exponent", status);
215         if (status)
216                 return status;
217
218         qat_function = get_asym_function(xform);
219         func_id = qat_function.func_id;
220         if (qat_function.func_id == 0) {
221                 QAT_LOG(ERR, "Cannot obtain functionality id");
222                 return -EINVAL;
223         }
224         alg_bytesize = qat_function.bytesize;
225
226         SET_PKE_LN(cookie->input_array, asym_op->modex.base,
227                         alg_bytesize, 0);
228         SET_PKE_LN(cookie->input_array, xform->modex.exponent,
229                         alg_bytesize, 1);
230         SET_PKE_LN(cookie->input_array, xform->modex.modulus,
231                         alg_bytesize, 2);
232
233         cookie->alg_bytesize = alg_bytesize;
234         qat_req->pke_hdr.cd_pars.func_id = func_id;
235         qat_req->input_param_count = QAT_ASYM_MODEXP_NUM_IN_PARAMS;
236         qat_req->output_param_count = QAT_ASYM_MODEXP_NUM_OUT_PARAMS;
237
238         HEXDUMP("ModExp base", cookie->input_array[0], alg_bytesize);
239         HEXDUMP("ModExp exponent", cookie->input_array[1], alg_bytesize);
240         HEXDUMP("ModExp modulus", cookie->input_array[2], alg_bytesize);
241
242         return status;
243 }
244
245 static uint8_t
246 modexp_collect(struct rte_crypto_asym_op *asym_op,
247                 struct qat_asym_op_cookie *cookie,
248                 struct rte_crypto_asym_xform *xform)
249 {
250         rte_crypto_param n = xform->modex.modulus;
251         uint32_t alg_bytesize = cookie->alg_bytesize;
252         uint8_t *modexp_result = asym_op->modex.result.data;
253
254         rte_memcpy(modexp_result,
255                 cookie->output_array[0] + alg_bytesize
256                 - n.length, n.length);
257         HEXDUMP("ModExp result", cookie->output_array[0],
258                         alg_bytesize);
259         return RTE_CRYPTO_OP_STATUS_SUCCESS;
260 }
261
262 static int
263 modinv_set_input(struct rte_crypto_asym_op *asym_op,
264                 struct icp_qat_fw_pke_request *qat_req,
265                 struct qat_asym_op_cookie *cookie,
266                 struct rte_crypto_asym_xform *xform)
267 {
268         struct qat_asym_function qat_function;
269         uint32_t alg_bytesize, func_id;
270         int status = 0;
271
272         CHECK_IF_NOT_EMPTY(xform->modex.modulus, "mod inv",
273                         "modulus", status);
274         if (status)
275                 return status;
276
277         qat_function = get_asym_function(xform);
278         func_id = qat_function.func_id;
279         if (func_id == 0) {
280                 QAT_LOG(ERR, "Cannot obtain functionality id");
281                 return -EINVAL;
282         }
283         alg_bytesize = qat_function.bytesize;
284
285         SET_PKE_LN(cookie->input_array, asym_op->modinv.base,
286                         alg_bytesize, 0);
287         SET_PKE_LN(cookie->input_array, xform->modinv.modulus,
288                         alg_bytesize, 1);
289
290         cookie->alg_bytesize = alg_bytesize;
291         qat_req->pke_hdr.cd_pars.func_id = func_id;
292         qat_req->input_param_count =
293                         QAT_ASYM_MODINV_NUM_IN_PARAMS;
294         qat_req->output_param_count =
295                         QAT_ASYM_MODINV_NUM_OUT_PARAMS;
296
297         HEXDUMP("ModInv base", cookie->input_array[0], alg_bytesize);
298         HEXDUMP("ModInv modulus", cookie->input_array[1], alg_bytesize);
299
300         return 0;
301 }
302
303 static uint8_t
304 modinv_collect(struct rte_crypto_asym_op *asym_op,
305                 struct qat_asym_op_cookie *cookie,
306                 struct rte_crypto_asym_xform *xform)
307 {
308         rte_crypto_param n = xform->modinv.modulus;
309         uint8_t *modinv_result = asym_op->modinv.result.data;
310         uint32_t alg_bytesize = cookie->alg_bytesize;
311
312         rte_memcpy(modinv_result + (asym_op->modinv.result.length
313                 - n.length),
314                 cookie->output_array[0] + alg_bytesize
315                 - n.length, n.length);
316         HEXDUMP("ModInv result", cookie->output_array[0],
317                         alg_bytesize);
318         return RTE_CRYPTO_OP_STATUS_SUCCESS;
319 }
320
321 static int
322 rsa_set_pub_input(struct rte_crypto_asym_op *asym_op,
323                 struct icp_qat_fw_pke_request *qat_req,
324                 struct qat_asym_op_cookie *cookie,
325                 struct rte_crypto_asym_xform *xform)
326 {
327         struct qat_asym_function qat_function;
328         uint32_t alg_bytesize, func_id;
329         int status = 0;
330
331         qat_function = get_rsa_enc_function(xform);
332         func_id = qat_function.func_id;
333         if (func_id == 0) {
334                 QAT_LOG(ERR, "Cannot obtain functionality id");
335                 return -EINVAL;
336         }
337         alg_bytesize = qat_function.bytesize;
338
339         if (asym_op->rsa.op_type == RTE_CRYPTO_ASYM_OP_ENCRYPT) {
340                 switch (asym_op->rsa.pad) {
341                 case RTE_CRYPTO_RSA_PADDING_NONE:
342                         SET_PKE_LN(cookie->input_array, asym_op->rsa.message,
343                                         alg_bytesize, 0);
344                         break;
345                 default:
346                         QAT_LOG(ERR,
347                                 "Invalid RSA padding (Encryption)"
348                                 );
349                         return -EINVAL;
350                 }
351                 HEXDUMP("RSA Message", cookie->input_array[0], alg_bytesize);
352         } else {
353                 switch (asym_op->rsa.pad) {
354                 case RTE_CRYPTO_RSA_PADDING_NONE:
355                         SET_PKE_LN(cookie->input_array, asym_op->rsa.sign,
356                                         alg_bytesize, 0);
357                         break;
358                 default:
359                         QAT_LOG(ERR,
360                                 "Invalid RSA padding (Verify)");
361                         return -EINVAL;
362                 }
363                 HEXDUMP("RSA Signature", cookie->input_array[0],
364                                 alg_bytesize);
365         }
366
367         SET_PKE_LN(cookie->input_array, xform->rsa.e,
368                         alg_bytesize, 1);
369         SET_PKE_LN(cookie->input_array, xform->rsa.n,
370                         alg_bytesize, 2);
371
372         cookie->alg_bytesize = alg_bytesize;
373         qat_req->pke_hdr.cd_pars.func_id = func_id;
374
375         HEXDUMP("RSA Public Key", cookie->input_array[1], alg_bytesize);
376         HEXDUMP("RSA Modulus", cookie->input_array[2], alg_bytesize);
377
378         return status;
379 }
380
381 static int
382 rsa_set_priv_input(struct rte_crypto_asym_op *asym_op,
383                 struct icp_qat_fw_pke_request *qat_req,
384                 struct qat_asym_op_cookie *cookie,
385                 struct rte_crypto_asym_xform *xform)
386 {
387         struct qat_asym_function qat_function;
388         uint32_t alg_bytesize, func_id;
389         int status = 0;
390
391         if (xform->rsa.key_type == RTE_RSA_KEY_TYPE_QT) {
392                 qat_function = get_rsa_crt_function(xform);
393                 func_id = qat_function.func_id;
394                 if (func_id == 0) {
395                         QAT_LOG(ERR, "Cannot obtain functionality id");
396                         return -EINVAL;
397                 }
398                 alg_bytesize = qat_function.bytesize;
399                 qat_req->input_param_count =
400                                 QAT_ASYM_RSA_QT_NUM_IN_PARAMS;
401
402                 SET_PKE_LN(cookie->input_array, xform->rsa.qt.p,
403                         (alg_bytesize >> 1), 1);
404                 SET_PKE_LN(cookie->input_array, xform->rsa.qt.q,
405                         (alg_bytesize >> 1), 2);
406                 SET_PKE_LN(cookie->input_array, xform->rsa.qt.dP,
407                         (alg_bytesize >> 1), 3);
408                 SET_PKE_LN(cookie->input_array, xform->rsa.qt.dQ,
409                         (alg_bytesize >> 1), 4);
410                 SET_PKE_LN(cookie->input_array, xform->rsa.qt.qInv,
411                         (alg_bytesize >> 1), 5);
412
413                 HEXDUMP("RSA p", cookie->input_array[1],
414                                 alg_bytesize);
415                 HEXDUMP("RSA q", cookie->input_array[2],
416                                 alg_bytesize);
417                 HEXDUMP("RSA dP", cookie->input_array[3],
418                                 alg_bytesize);
419                 HEXDUMP("RSA dQ", cookie->input_array[4],
420                                 alg_bytesize);
421                 HEXDUMP("RSA qInv", cookie->input_array[5],
422                                 alg_bytesize);
423         } else if (xform->rsa.key_type ==
424                         RTE_RSA_KEY_TYPE_EXP) {
425                 qat_function = get_rsa_dec_function(xform);
426                 func_id = qat_function.func_id;
427                 if (func_id == 0) {
428                         QAT_LOG(ERR, "Cannot obtain functionality id");
429                         return -EINVAL;
430                 }
431                 alg_bytesize = qat_function.bytesize;
432
433                 SET_PKE_LN(cookie->input_array, xform->rsa.d,
434                         alg_bytesize, 1);
435                 SET_PKE_LN(cookie->input_array, xform->rsa.n,
436                         alg_bytesize, 2);
437
438                 HEXDUMP("RSA d", cookie->input_array[1],
439                                 alg_bytesize);
440                 HEXDUMP("RSA n", cookie->input_array[2],
441                                 alg_bytesize);
442         } else {
443                 QAT_LOG(ERR, "Invalid RSA key type");
444                 return -EINVAL;
445         }
446
447         if (asym_op->rsa.op_type ==
448                         RTE_CRYPTO_ASYM_OP_DECRYPT) {
449                 switch (asym_op->rsa.pad) {
450                 case RTE_CRYPTO_RSA_PADDING_NONE:
451                         SET_PKE_LN(cookie->input_array, asym_op->rsa.cipher,
452                                 alg_bytesize, 0);
453                         HEXDUMP("RSA ciphertext", cookie->input_array[0],
454                                 alg_bytesize);
455                         break;
456                 default:
457                         QAT_LOG(ERR,
458                                 "Invalid padding of RSA (Decrypt)");
459                         return -(EINVAL);
460                 }
461
462         } else if (asym_op->rsa.op_type ==
463                         RTE_CRYPTO_ASYM_OP_SIGN) {
464                 switch (asym_op->rsa.pad) {
465                 case RTE_CRYPTO_RSA_PADDING_NONE:
466                         SET_PKE_LN(cookie->input_array, asym_op->rsa.message,
467                                 alg_bytesize, 0);
468                         HEXDUMP("RSA text to be signed", cookie->input_array[0],
469                                 alg_bytesize);
470                         break;
471                 default:
472                         QAT_LOG(ERR,
473                                 "Invalid padding of RSA (Signature)");
474                         return -(EINVAL);
475                 }
476         }
477
478         cookie->alg_bytesize = alg_bytesize;
479         qat_req->pke_hdr.cd_pars.func_id = func_id;
480         return status;
481 }
482
483 static int
484 rsa_set_input(struct rte_crypto_asym_op *asym_op,
485                 struct icp_qat_fw_pke_request *qat_req,
486                 struct qat_asym_op_cookie *cookie,
487                 struct rte_crypto_asym_xform *xform)
488 {
489         qat_req->input_param_count =
490                         QAT_ASYM_RSA_NUM_IN_PARAMS;
491         qat_req->output_param_count =
492                         QAT_ASYM_RSA_NUM_OUT_PARAMS;
493
494         if (asym_op->rsa.op_type == RTE_CRYPTO_ASYM_OP_ENCRYPT ||
495                         asym_op->rsa.op_type ==
496                                 RTE_CRYPTO_ASYM_OP_VERIFY) {
497                 return rsa_set_pub_input(asym_op, qat_req, cookie, xform);
498         } else {
499                 return rsa_set_priv_input(asym_op, qat_req, cookie, xform);
500         }
501 }
502
503 static uint8_t
504 rsa_collect(struct rte_crypto_asym_op *asym_op,
505                 struct qat_asym_op_cookie *cookie)
506 {
507         uint32_t alg_bytesize = cookie->alg_bytesize;
508
509         if (asym_op->rsa.op_type == RTE_CRYPTO_ASYM_OP_ENCRYPT ||
510                 asym_op->rsa.op_type == RTE_CRYPTO_ASYM_OP_VERIFY) {
511
512                 if (asym_op->rsa.op_type ==
513                                 RTE_CRYPTO_ASYM_OP_ENCRYPT) {
514                         uint8_t *rsa_result = asym_op->rsa.cipher.data;
515
516                         rte_memcpy(rsa_result,
517                                         cookie->output_array[0],
518                                         alg_bytesize);
519                         HEXDUMP("RSA Encrypted data", cookie->output_array[0],
520                                 alg_bytesize);
521                 } else {
522                         uint8_t *rsa_result = asym_op->rsa.cipher.data;
523
524                         switch (asym_op->rsa.pad) {
525                         case RTE_CRYPTO_RSA_PADDING_NONE:
526                                 rte_memcpy(rsa_result,
527                                                 cookie->output_array[0],
528                                                 alg_bytesize);
529                                 HEXDUMP("RSA signature",
530                                         cookie->output_array[0],
531                                         alg_bytesize);
532                                 break;
533                         default:
534                                 QAT_LOG(ERR, "Padding not supported");
535                                 return RTE_CRYPTO_OP_STATUS_ERROR;
536                         }
537                 }
538         } else {
539                 if (asym_op->rsa.op_type == RTE_CRYPTO_ASYM_OP_DECRYPT) {
540                         uint8_t *rsa_result = asym_op->rsa.message.data;
541
542                         switch (asym_op->rsa.pad) {
543                         case RTE_CRYPTO_RSA_PADDING_NONE:
544                                 rte_memcpy(rsa_result,
545                                         cookie->output_array[0],
546                                         alg_bytesize);
547                                 HEXDUMP("RSA Decrypted Message",
548                                         cookie->output_array[0],
549                                         alg_bytesize);
550                                 break;
551                         default:
552                                 QAT_LOG(ERR, "Padding not supported");
553                                 return RTE_CRYPTO_OP_STATUS_ERROR;
554                         }
555                 } else {
556                         uint8_t *rsa_result = asym_op->rsa.sign.data;
557
558                         rte_memcpy(rsa_result,
559                                         cookie->output_array[0],
560                                         alg_bytesize);
561                         HEXDUMP("RSA Signature", cookie->output_array[0],
562                                 alg_bytesize);
563                 }
564         }
565         return RTE_CRYPTO_OP_STATUS_SUCCESS;
566 }
567
568 static int
569 ecdsa_set_input(struct rte_crypto_asym_op *asym_op,
570                 struct icp_qat_fw_pke_request *qat_req,
571                 struct qat_asym_op_cookie *cookie,
572                 struct rte_crypto_asym_xform *xform)
573 {
574         struct qat_asym_function qat_function;
575         uint32_t alg_bytesize, qat_alg_bytesize, func_id;
576         int curve_id;
577
578         curve_id = pick_curve(xform);
579         if (curve_id < 0) {
580                 QAT_LOG(ERR, "Incorrect elliptic curve");
581                 return -EINVAL;
582         }
583
584         switch (asym_op->ecdsa.op_type) {
585         case RTE_CRYPTO_ASYM_OP_SIGN:
586                 qat_function = get_ecdsa_function(xform);
587                 func_id = qat_function.func_id;
588                 if (func_id == 0) {
589                         QAT_LOG(ERR, "Cannot obtain functionality id");
590                         return -EINVAL;
591                 }
592                 alg_bytesize = qat_function.bytesize;
593                 qat_alg_bytesize = RTE_ALIGN_CEIL(alg_bytesize, 8);
594
595                 SET_PKE_LN_9A_F(asym_op->ecdsa.pkey, 0);
596                 SET_PKE_LN_9A_F(asym_op->ecdsa.message, 1);
597                 SET_PKE_LN_9A_F(asym_op->ecdsa.k, 2);
598                 SET_PKE_LN_EC_F(curve[curve_id].b, alg_bytesize, 3);
599                 SET_PKE_LN_EC_F(curve[curve_id].a, alg_bytesize, 4);
600                 SET_PKE_LN_EC_F(curve[curve_id].p, alg_bytesize, 5);
601                 SET_PKE_LN_EC_F(curve[curve_id].n, alg_bytesize, 6);
602                 SET_PKE_LN_EC_F(curve[curve_id].y, alg_bytesize, 7);
603                 SET_PKE_LN_EC_F(curve[curve_id].x, alg_bytesize, 8);
604
605                 cookie->alg_bytesize = alg_bytesize;
606                 qat_req->pke_hdr.cd_pars.func_id = func_id;
607                 qat_req->input_param_count =
608                                 QAT_ASYM_ECDSA_RS_SIGN_IN_PARAMS;
609                 qat_req->output_param_count =
610                                 QAT_ASYM_ECDSA_RS_SIGN_OUT_PARAMS;
611
612                 HEXDUMP_OFF_F("ECDSA d", 0);
613                 HEXDUMP_OFF_F("ECDSA e", 1);
614                 HEXDUMP_OFF_F("ECDSA k", 2);
615                 HEXDUMP_OFF_F("ECDSA b", 3);
616                 HEXDUMP_OFF_F("ECDSA a", 4);
617                 HEXDUMP_OFF_F("ECDSA n", 5);
618                 HEXDUMP_OFF_F("ECDSA y", 6);
619                 HEXDUMP_OFF_F("ECDSA x", 7);
620                 break;
621         case RTE_CRYPTO_ASYM_OP_VERIFY:
622                 qat_function = get_ecdsa_verify_function(xform);
623                 func_id = qat_function.func_id;
624                 if (func_id == 0) {
625                         QAT_LOG(ERR, "Cannot obtain functionality id");
626                         return -EINVAL;
627                 }
628                 alg_bytesize = qat_function.bytesize;
629                 qat_alg_bytesize = RTE_ALIGN_CEIL(alg_bytesize, 8);
630
631                 SET_PKE_LN_9A_F(asym_op->ecdsa.message, 10);
632                 SET_PKE_LN_9A_F(asym_op->ecdsa.s, 9);
633                 SET_PKE_LN_9A_F(asym_op->ecdsa.r, 8);
634                 SET_PKE_LN_EC_F(curve[curve_id].n, alg_bytesize, 7);
635                 SET_PKE_LN_EC_F(curve[curve_id].x, alg_bytesize, 6);
636                 SET_PKE_LN_EC_F(curve[curve_id].y, alg_bytesize, 5);
637                 SET_PKE_LN_9A_F(asym_op->ecdsa.q.x, 4);
638                 SET_PKE_LN_9A_F(asym_op->ecdsa.q.y, 3);
639                 SET_PKE_LN_EC_F(curve[curve_id].a, alg_bytesize, 2);
640                 SET_PKE_LN_EC_F(curve[curve_id].b, alg_bytesize, 1);
641                 SET_PKE_LN_EC_F(curve[curve_id].p, alg_bytesize, 0);
642
643                 cookie->alg_bytesize = alg_bytesize;
644                 qat_req->pke_hdr.cd_pars.func_id = func_id;
645                 qat_req->input_param_count =
646                                 QAT_ASYM_ECDSA_RS_VERIFY_IN_PARAMS;
647                 qat_req->output_param_count =
648                                 QAT_ASYM_ECDSA_RS_VERIFY_OUT_PARAMS;
649
650                 HEXDUMP_OFF_F("p", 0);
651                 HEXDUMP_OFF_F("b", 1);
652                 HEXDUMP_OFF_F("a", 2);
653                 HEXDUMP_OFF_F("y", 3);
654                 HEXDUMP_OFF_F("x", 4);
655                 HEXDUMP_OFF_F("yG", 5);
656                 HEXDUMP_OFF_F("xG", 6);
657                 HEXDUMP_OFF_F("n", 7);
658                 HEXDUMP_OFF_F("r", 8);
659                 HEXDUMP_OFF_F("s", 9);
660                 HEXDUMP_OFF_F("e", 10);
661                 break;
662         default:
663                 return -1;
664         }
665
666         return 0;
667 }
668
669 static uint8_t
670 ecdsa_collect(struct rte_crypto_asym_op *asym_op,
671                 struct qat_asym_op_cookie *cookie)
672 {
673         uint32_t alg_bytesize = cookie->alg_bytesize;
674         uint32_t qat_alg_bytesize = RTE_ALIGN_CEIL(cookie->alg_bytesize, 8);
675         uint32_t ltrim = qat_alg_bytesize - alg_bytesize;
676
677         if (asym_op->rsa.op_type == RTE_CRYPTO_ASYM_OP_SIGN) {
678                 uint8_t *r = asym_op->ecdsa.r.data;
679                 uint8_t *s = asym_op->ecdsa.s.data;
680
681                 asym_op->ecdsa.r.length = alg_bytesize;
682                 asym_op->ecdsa.s.length = alg_bytesize;
683                 rte_memcpy(r, &cookie->output_array[0][ltrim], alg_bytesize);
684                 rte_memcpy(s, &cookie->output_array[1][ltrim], alg_bytesize);
685
686                 HEXDUMP("R", cookie->output_array[0],
687                         alg_bytesize);
688                 HEXDUMP("S", cookie->output_array[1],
689                         alg_bytesize);
690         }
691         return RTE_CRYPTO_OP_STATUS_SUCCESS;
692 }
693
694 static int
695 ecpm_set_input(struct rte_crypto_asym_op *asym_op,
696                 struct icp_qat_fw_pke_request *qat_req,
697                 struct qat_asym_op_cookie *cookie,
698                 struct rte_crypto_asym_xform *xform)
699 {
700         struct qat_asym_function qat_function;
701         uint32_t alg_bytesize, __rte_unused qat_alg_bytesize, func_id;
702         int curve_id;
703
704         curve_id = pick_curve(xform);
705         if (curve_id < 0) {
706                 QAT_LOG(ERR, "Incorrect elliptic curve");
707                 return -EINVAL;
708         }
709
710         qat_function = get_ecpm_function(xform);
711         func_id = qat_function.func_id;
712         if (func_id == 0) {
713                 QAT_LOG(ERR, "Cannot obtain functionality id");
714                 return -EINVAL;
715         }
716         alg_bytesize = qat_function.bytesize;
717         qat_alg_bytesize = RTE_ALIGN_CEIL(alg_bytesize, 8);
718
719         SET_PKE_LN_EC(cookie->input_array, asym_op->ecpm.scalar,
720                         asym_op->ecpm.scalar.length, 0);
721         SET_PKE_LN_EC(cookie->input_array, asym_op->ecpm.p.x,
722                          asym_op->ecpm.p.x.length, 1);
723         SET_PKE_LN_EC(cookie->input_array, asym_op->ecpm.p.y,
724                         asym_op->ecpm.p.y.length, 2);
725         SET_PKE_LN_EC(cookie->input_array, curve[curve_id].a,
726                         alg_bytesize, 3);
727         SET_PKE_LN_EC(cookie->input_array, curve[curve_id].b,
728                         alg_bytesize, 4);
729         SET_PKE_LN_EC(cookie->input_array, curve[curve_id].p,
730                         alg_bytesize, 5);
731         SET_PKE_LN_EC(cookie->input_array, curve[curve_id].h,
732                         alg_bytesize, 6);
733
734         cookie->alg_bytesize = alg_bytesize;
735         qat_req->pke_hdr.cd_pars.func_id = func_id;
736         qat_req->input_param_count =
737                         QAT_ASYM_ECPM_IN_PARAMS;
738         qat_req->output_param_count =
739                         QAT_ASYM_ECPM_OUT_PARAMS;
740
741         HEXDUMP("k", cookie->input_array[0], qat_alg_bytesize);
742         HEXDUMP("xG", cookie->input_array[1], qat_alg_bytesize);
743         HEXDUMP("yG", cookie->input_array[2], qat_alg_bytesize);
744         HEXDUMP("a", cookie->input_array[3], qat_alg_bytesize);
745         HEXDUMP("b", cookie->input_array[4], qat_alg_bytesize);
746         HEXDUMP("q", cookie->input_array[5], qat_alg_bytesize);
747         HEXDUMP("h", cookie->input_array[6], qat_alg_bytesize);
748
749         return 0;
750 }
751
752 static uint8_t
753 ecpm_collect(struct rte_crypto_asym_op *asym_op,
754                 struct qat_asym_op_cookie *cookie)
755 {
756         uint8_t *x = asym_op->ecpm.r.x.data;
757         uint8_t *y = asym_op->ecpm.r.y.data;
758         uint32_t alg_bytesize = cookie->alg_bytesize;
759         uint32_t qat_alg_bytesize = RTE_ALIGN_CEIL(cookie->alg_bytesize, 8);
760         uint32_t ltrim = qat_alg_bytesize - alg_bytesize;
761
762         asym_op->ecpm.r.x.length = alg_bytesize;
763         asym_op->ecpm.r.y.length = alg_bytesize;
764         rte_memcpy(x, &cookie->output_array[0][ltrim], alg_bytesize);
765         rte_memcpy(y, &cookie->output_array[1][ltrim], alg_bytesize);
766
767         HEXDUMP("rX", cookie->output_array[0],
768                 alg_bytesize);
769         HEXDUMP("rY", cookie->output_array[1],
770                 alg_bytesize);
771         return RTE_CRYPTO_OP_STATUS_SUCCESS;
772 }
773
774 static int
775 asym_set_input(struct rte_crypto_asym_op *asym_op,
776                 struct icp_qat_fw_pke_request *qat_req,
777                 struct qat_asym_op_cookie *cookie,
778                 struct rte_crypto_asym_xform *xform)
779 {
780         switch (xform->xform_type) {
781         case RTE_CRYPTO_ASYM_XFORM_MODEX:
782                 return modexp_set_input(asym_op, qat_req,
783                                 cookie, xform);
784         case RTE_CRYPTO_ASYM_XFORM_MODINV:
785                 return modinv_set_input(asym_op, qat_req,
786                                 cookie, xform);
787         case RTE_CRYPTO_ASYM_XFORM_RSA:
788                 return rsa_set_input(asym_op, qat_req,
789                                 cookie, xform);
790         case RTE_CRYPTO_ASYM_XFORM_ECDSA:
791                 return ecdsa_set_input(asym_op, qat_req,
792                                 cookie, xform);
793         case RTE_CRYPTO_ASYM_XFORM_ECPM:
794                 return ecpm_set_input(asym_op, qat_req,
795                                 cookie, xform);
796         default:
797                 QAT_LOG(ERR, "Invalid/unsupported asymmetric crypto xform");
798                 return -EINVAL;
799         }
800         return 1;
801 }
802
803 static int
804 qat_asym_build_request(void *in_op, uint8_t *out_msg, void *op_cookie,
805                         __rte_unused uint64_t *opaque,
806                         __rte_unused enum qat_device_gen qat_dev_gen)
807 {
808         struct rte_crypto_op *op = (struct rte_crypto_op *)in_op;
809         struct rte_crypto_asym_op *asym_op = op->asym;
810         struct icp_qat_fw_pke_request *qat_req =
811                         (struct icp_qat_fw_pke_request *)out_msg;
812         struct qat_asym_op_cookie *cookie =
813                         (struct qat_asym_op_cookie *)op_cookie;
814         struct rte_crypto_asym_xform *xform;
815         struct qat_asym_session *qat_session = (struct qat_asym_session *)
816                         op->asym->session->sess_private_data;
817         int err = 0;
818
819         if (unlikely(qat_session == NULL)) {
820                 QAT_DP_LOG(ERR, "Session was not created for this device");
821                 goto error;
822         }
823
824         op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
825         switch (op->sess_type) {
826         case RTE_CRYPTO_OP_WITH_SESSION:
827                 request_init(qat_req);
828                 xform = &qat_session->xform;
829                 break;
830         case RTE_CRYPTO_OP_SESSIONLESS:
831                 request_init(qat_req);
832                 xform = op->asym->xform;
833                 break;
834         default:
835                 QAT_DP_LOG(ERR, "Invalid session/xform settings");
836                 op->status = RTE_CRYPTO_OP_STATUS_INVALID_SESSION;
837                 goto error;
838         }
839         err = asym_set_input(asym_op, qat_req, cookie,
840                         xform);
841         if (err) {
842                 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
843                 goto error;
844         }
845
846         qat_req->pke_mid.opaque = (uint64_t)(uintptr_t)op;
847         qat_req->pke_mid.src_data_addr = cookie->input_addr;
848         qat_req->pke_mid.dest_data_addr = cookie->output_addr;
849
850         HEXDUMP("qat_req:", qat_req, sizeof(struct icp_qat_fw_pke_request));
851
852         return 0;
853 error:
854         qat_req->pke_mid.opaque = (uint64_t)(uintptr_t)op;
855         HEXDUMP("qat_req:", qat_req, sizeof(struct icp_qat_fw_pke_request));
856         qat_req->output_param_count = 0;
857         qat_req->input_param_count = 0;
858         qat_req->pke_hdr.service_type = ICP_QAT_FW_COMN_REQ_NULL;
859         cookie->error |= err;
860
861         return 0;
862 }
863
864 static uint8_t
865 qat_asym_collect_response(struct rte_crypto_op *op,
866                 struct qat_asym_op_cookie *cookie,
867                 struct rte_crypto_asym_xform *xform)
868 {
869         struct rte_crypto_asym_op *asym_op = op->asym;
870
871         switch (xform->xform_type) {
872         case RTE_CRYPTO_ASYM_XFORM_MODEX:
873                 return modexp_collect(asym_op, cookie, xform);
874         case RTE_CRYPTO_ASYM_XFORM_MODINV:
875                 return modinv_collect(asym_op, cookie, xform);
876         case RTE_CRYPTO_ASYM_XFORM_RSA:
877                 return rsa_collect(asym_op, cookie);
878         case RTE_CRYPTO_ASYM_XFORM_ECDSA:
879                 return ecdsa_collect(asym_op, cookie);
880         case RTE_CRYPTO_ASYM_XFORM_ECPM:
881                 return ecpm_collect(asym_op, cookie);
882         default:
883                 QAT_LOG(ERR, "Not supported xform type");
884                 return  RTE_CRYPTO_OP_STATUS_ERROR;
885         }
886 }
887
888 static int
889 qat_asym_process_response(void **out_op, uint8_t *resp,
890                 void *op_cookie, __rte_unused uint64_t *dequeue_err_count)
891 {
892         struct icp_qat_fw_pke_resp *resp_msg =
893                         (struct icp_qat_fw_pke_resp *)resp;
894         struct rte_crypto_op *op = (struct rte_crypto_op *)(uintptr_t)
895                         (resp_msg->opaque);
896         struct qat_asym_op_cookie *cookie = op_cookie;
897         struct rte_crypto_asym_xform *xform;
898         struct qat_asym_session *qat_session = (struct qat_asym_session *)
899                         op->asym->session->sess_private_data;
900
901         if (cookie->error) {
902                 cookie->error = 0;
903                 if (op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED)
904                         op->status = RTE_CRYPTO_OP_STATUS_ERROR;
905                 QAT_DP_LOG(ERR, "Cookie status returned error");
906         } else {
907                 if (ICP_QAT_FW_PKE_RESP_PKE_STAT_GET(
908                         resp_msg->pke_resp_hdr.resp_status.pke_resp_flags)) {
909                         if (op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED)
910                                 op->status = RTE_CRYPTO_OP_STATUS_ERROR;
911                         QAT_DP_LOG(ERR, "Asymmetric response status"
912                                         " returned error");
913                 }
914                 if (resp_msg->pke_resp_hdr.resp_status.comn_err_code) {
915                         if (op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED)
916                                 op->status = RTE_CRYPTO_OP_STATUS_ERROR;
917                         QAT_DP_LOG(ERR, "Asymmetric common status"
918                                         " returned error");
919                 }
920         }
921
922         switch (op->sess_type) {
923         case RTE_CRYPTO_OP_WITH_SESSION:
924                 xform = &qat_session->xform;
925                 break;
926         case RTE_CRYPTO_OP_SESSIONLESS:
927                 xform = op->asym->xform;
928                 break;
929         default:
930                 QAT_DP_LOG(ERR,
931                         "Invalid session/xform settings in response ring!");
932                 op->status = RTE_CRYPTO_OP_STATUS_INVALID_SESSION;
933         }
934
935         if (op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED) {
936                 op->status = qat_asym_collect_response(op,
937                                         cookie, xform);
938                 cleanup(cookie, xform, cookie->alg_bytesize);
939         }
940
941         *out_op = op;
942         HEXDUMP("resp_msg:", resp_msg, sizeof(struct icp_qat_fw_pke_resp));
943
944         return 1;
945 }
946
947 static int
948 session_set_modexp(struct qat_asym_session *qat_session,
949                         struct rte_crypto_asym_xform *xform)
950 {
951         uint8_t *modulus = xform->modex.modulus.data;
952         uint8_t *exponent = xform->modex.exponent.data;
953
954         qat_session->xform.modex.modulus.data =
955                 rte_malloc(NULL, xform->modex.modulus.length, 0);
956         if (qat_session->xform.modex.modulus.data == NULL)
957                 return -ENOMEM;
958         qat_session->xform.modex.modulus.length = xform->modex.modulus.length;
959         qat_session->xform.modex.exponent.data = rte_malloc(NULL,
960                                 xform->modex.exponent.length, 0);
961         if (qat_session->xform.modex.exponent.data == NULL) {
962                 rte_free(qat_session->xform.modex.exponent.data);
963                 return -ENOMEM;
964         }
965         qat_session->xform.modex.exponent.length = xform->modex.exponent.length;
966
967         rte_memcpy(qat_session->xform.modex.modulus.data, modulus,
968                         xform->modex.modulus.length);
969         rte_memcpy(qat_session->xform.modex.exponent.data, exponent,
970                         xform->modex.exponent.length);
971
972         return 0;
973 }
974
975 static int
976 session_set_modinv(struct qat_asym_session *qat_session,
977                         struct rte_crypto_asym_xform *xform)
978 {
979         uint8_t *modulus = xform->modinv.modulus.data;
980
981         qat_session->xform.modinv.modulus.data =
982                 rte_malloc(NULL, xform->modinv.modulus.length, 0);
983         if (qat_session->xform.modinv.modulus.data == NULL)
984                 return -ENOMEM;
985         qat_session->xform.modinv.modulus.length = xform->modinv.modulus.length;
986
987         rte_memcpy(qat_session->xform.modinv.modulus.data, modulus,
988                         xform->modinv.modulus.length);
989
990         return 0;
991 }
992
993 static int
994 session_set_rsa(struct qat_asym_session *qat_session,
995                         struct rte_crypto_asym_xform *xform)
996 {
997         uint8_t *n = xform->rsa.n.data;
998         uint8_t *e = xform->rsa.e.data;
999         int ret = 0;
1000
1001         qat_session->xform.rsa.key_type = xform->rsa.key_type;
1002
1003         qat_session->xform.rsa.n.data =
1004                 rte_malloc(NULL, xform->rsa.n.length, 0);
1005         if (qat_session->xform.rsa.n.data == NULL)
1006                 return -ENOMEM;
1007         qat_session->xform.rsa.n.length =
1008                 xform->rsa.n.length;
1009
1010         qat_session->xform.rsa.e.data =
1011                 rte_malloc(NULL, xform->rsa.e.length, 0);
1012         if (qat_session->xform.rsa.e.data == NULL) {
1013                 ret = -ENOMEM;
1014                 goto err;
1015         }
1016         qat_session->xform.rsa.e.length =
1017                 xform->rsa.e.length;
1018
1019         if (xform->rsa.key_type == RTE_RSA_KEY_TYPE_QT) {
1020                 uint8_t *p = xform->rsa.qt.p.data;
1021                 uint8_t *q = xform->rsa.qt.q.data;
1022                 uint8_t *dP = xform->rsa.qt.dP.data;
1023                 uint8_t *dQ = xform->rsa.qt.dQ.data;
1024                 uint8_t *qInv = xform->rsa.qt.qInv.data;
1025
1026                 qat_session->xform.rsa.qt.p.data =
1027                         rte_malloc(NULL, xform->rsa.qt.p.length, 0);
1028                 if (qat_session->xform.rsa.qt.p.data == NULL) {
1029                         ret = -ENOMEM;
1030                         goto err;
1031                 }
1032                 qat_session->xform.rsa.qt.p.length =
1033                         xform->rsa.qt.p.length;
1034
1035                 qat_session->xform.rsa.qt.q.data =
1036                         rte_malloc(NULL, xform->rsa.qt.q.length, 0);
1037                 if (qat_session->xform.rsa.qt.q.data == NULL) {
1038                         ret = -ENOMEM;
1039                         goto err;
1040                 }
1041                 qat_session->xform.rsa.qt.q.length =
1042                         xform->rsa.qt.q.length;
1043
1044                 qat_session->xform.rsa.qt.dP.data =
1045                         rte_malloc(NULL, xform->rsa.qt.dP.length, 0);
1046                 if (qat_session->xform.rsa.qt.dP.data == NULL) {
1047                         ret = -ENOMEM;
1048                         goto err;
1049                 }
1050                 qat_session->xform.rsa.qt.dP.length =
1051                         xform->rsa.qt.dP.length;
1052
1053                 qat_session->xform.rsa.qt.dQ.data =
1054                         rte_malloc(NULL, xform->rsa.qt.dQ.length, 0);
1055                 if (qat_session->xform.rsa.qt.dQ.data == NULL) {
1056                         ret = -ENOMEM;
1057                         goto err;
1058                 }
1059                 qat_session->xform.rsa.qt.dQ.length =
1060                         xform->rsa.qt.dQ.length;
1061
1062                 qat_session->xform.rsa.qt.qInv.data =
1063                         rte_malloc(NULL, xform->rsa.qt.qInv.length, 0);
1064                 if (qat_session->xform.rsa.qt.qInv.data == NULL) {
1065                         ret = -ENOMEM;
1066                         goto err;
1067                 }
1068                 qat_session->xform.rsa.qt.qInv.length =
1069                         xform->rsa.qt.qInv.length;
1070
1071                 rte_memcpy(qat_session->xform.rsa.qt.p.data, p,
1072                                 xform->rsa.qt.p.length);
1073                 rte_memcpy(qat_session->xform.rsa.qt.q.data, q,
1074                                 xform->rsa.qt.q.length);
1075                 rte_memcpy(qat_session->xform.rsa.qt.dP.data, dP,
1076                                 xform->rsa.qt.dP.length);
1077                 rte_memcpy(qat_session->xform.rsa.qt.dQ.data, dQ,
1078                                 xform->rsa.qt.dQ.length);
1079                 rte_memcpy(qat_session->xform.rsa.qt.qInv.data, qInv,
1080                                 xform->rsa.qt.qInv.length);
1081
1082         } else {
1083                 uint8_t *d = xform->rsa.d.data;
1084
1085                 qat_session->xform.rsa.d.data =
1086                         rte_malloc(NULL, xform->rsa.d.length, 0);
1087                 if (qat_session->xform.rsa.d.data == NULL) {
1088                         ret = -ENOMEM;
1089                         goto err;
1090                 }
1091                 qat_session->xform.rsa.d.length =
1092                         xform->rsa.d.length;
1093                 rte_memcpy(qat_session->xform.rsa.d.data, d,
1094                         xform->rsa.d.length);
1095         }
1096
1097         rte_memcpy(qat_session->xform.rsa.n.data, n,
1098                 xform->rsa.n.length);
1099         rte_memcpy(qat_session->xform.rsa.e.data, e,
1100                 xform->rsa.e.length);
1101
1102         return 0;
1103
1104 err:
1105         rte_free(qat_session->xform.rsa.n.data);
1106         rte_free(qat_session->xform.rsa.e.data);
1107         rte_free(qat_session->xform.rsa.d.data);
1108         rte_free(qat_session->xform.rsa.qt.p.data);
1109         rte_free(qat_session->xform.rsa.qt.q.data);
1110         rte_free(qat_session->xform.rsa.qt.dP.data);
1111         rte_free(qat_session->xform.rsa.qt.dQ.data);
1112         rte_free(qat_session->xform.rsa.qt.qInv.data);
1113         return ret;
1114 }
1115
1116 static void
1117 session_set_ecdsa(struct qat_asym_session *qat_session,
1118                         struct rte_crypto_asym_xform *xform)
1119 {
1120         qat_session->xform.ec.curve_id = xform->ec.curve_id;
1121 }
1122
1123 int
1124 qat_asym_session_configure(struct rte_cryptodev *dev __rte_unused,
1125                 struct rte_crypto_asym_xform *xform,
1126                 struct rte_cryptodev_asym_session *session)
1127 {
1128         struct qat_asym_session *qat_session;
1129         int ret = 0;
1130
1131         qat_session = (struct qat_asym_session *) session->sess_private_data;
1132         memset(qat_session, 0, sizeof(*qat_session));
1133
1134         qat_session->xform.xform_type = xform->xform_type;
1135         switch (xform->xform_type) {
1136         case RTE_CRYPTO_ASYM_XFORM_MODEX:
1137                 ret = session_set_modexp(qat_session, xform);
1138                 break;
1139         case RTE_CRYPTO_ASYM_XFORM_MODINV:
1140                 ret = session_set_modinv(qat_session, xform);
1141                 break;
1142         case RTE_CRYPTO_ASYM_XFORM_RSA:
1143                 ret = session_set_rsa(qat_session, xform);
1144                 break;
1145         case RTE_CRYPTO_ASYM_XFORM_ECDSA:
1146         case RTE_CRYPTO_ASYM_XFORM_ECPM:
1147                 session_set_ecdsa(qat_session, xform);
1148                 break;
1149         default:
1150                 ret = -ENOTSUP;
1151         }
1152
1153         if (ret) {
1154                 QAT_LOG(ERR, "Unsupported xform type");
1155                 return ret;
1156         }
1157
1158         return 0;
1159 }
1160
1161 unsigned int
1162 qat_asym_session_get_private_size(struct rte_cryptodev *dev __rte_unused)
1163 {
1164         return RTE_ALIGN_CEIL(sizeof(struct qat_asym_session), 8);
1165 }
1166
1167 static void
1168 session_clear_modexp(struct rte_crypto_modex_xform *modex)
1169 {
1170         memset(modex->modulus.data, 0, modex->modulus.length);
1171         rte_free(modex->modulus.data);
1172         memset(modex->exponent.data, 0, modex->exponent.length);
1173         rte_free(modex->exponent.data);
1174 }
1175
1176 static void
1177 session_clear_modinv(struct rte_crypto_modinv_xform *modinv)
1178 {
1179         memset(modinv->modulus.data, 0, modinv->modulus.length);
1180         rte_free(modinv->modulus.data);
1181 }
1182
1183 static void
1184 session_clear_rsa(struct rte_crypto_rsa_xform *rsa)
1185 {
1186         return;
1187         memset(rsa->n.data, 0, rsa->n.length);
1188         rte_free(rsa->n.data);
1189         memset(rsa->e.data, 0, rsa->e.length);
1190         rte_free(rsa->e.data);
1191         if (rsa->key_type == RTE_RSA_KEY_TYPE_EXP) {
1192                 memset(rsa->d.data, 0, rsa->d.length);
1193                 rte_free(rsa->d.data);
1194         } else {
1195                 memset(rsa->qt.p.data, 0, rsa->qt.p.length);
1196                 rte_free(rsa->qt.p.data);
1197                 memset(rsa->qt.q.data, 0, rsa->qt.q.length);
1198                 rte_free(rsa->qt.q.data);
1199                 memset(rsa->qt.dP.data, 0, rsa->qt.dP.length);
1200                 rte_free(rsa->qt.dP.data);
1201                 memset(rsa->qt.dQ.data, 0, rsa->qt.dQ.length);
1202                 rte_free(rsa->qt.dQ.data);
1203                 memset(rsa->qt.qInv.data, 0, rsa->qt.qInv.length);
1204                 rte_free(rsa->qt.qInv.data);
1205         }
1206 }
1207
1208 static void
1209 session_clear_xform(struct qat_asym_session *qat_session)
1210 {
1211         switch (qat_session->xform.xform_type) {
1212         case RTE_CRYPTO_ASYM_XFORM_MODEX:
1213                 session_clear_modexp(&qat_session->xform.modex);
1214                 break;
1215         case RTE_CRYPTO_ASYM_XFORM_MODINV:
1216                 session_clear_modinv(&qat_session->xform.modinv);
1217                 break;
1218         case RTE_CRYPTO_ASYM_XFORM_RSA:
1219                 session_clear_rsa(&qat_session->xform.rsa);
1220                 break;
1221         default:
1222                 break;
1223         }
1224 }
1225
1226 void
1227 qat_asym_session_clear(struct rte_cryptodev *dev,
1228                 struct rte_cryptodev_asym_session *session)
1229 {
1230         void *sess_priv = session->sess_private_data;
1231         struct qat_asym_session *qat_session =
1232                 (struct qat_asym_session *)sess_priv;
1233
1234         if (sess_priv) {
1235                 session_clear_xform(qat_session);
1236                 memset(qat_session, 0, qat_asym_session_get_private_size(dev));
1237         }
1238 }
1239
1240 static uint16_t
1241 qat_asym_crypto_enqueue_op_burst(void *qp, struct rte_crypto_op **ops,
1242                 uint16_t nb_ops)
1243 {
1244         return qat_enqueue_op_burst(qp, qat_asym_build_request, (void **)ops,
1245                         nb_ops);
1246 }
1247
1248 static uint16_t
1249 qat_asym_crypto_dequeue_op_burst(void *qp, struct rte_crypto_op **ops,
1250                 uint16_t nb_ops)
1251 {
1252         return qat_dequeue_op_burst(qp, (void **)ops, qat_asym_process_response,
1253                                 nb_ops);
1254 }
1255
1256 void
1257 qat_asym_init_op_cookie(void *op_cookie)
1258 {
1259         int j;
1260         struct qat_asym_op_cookie *cookie = op_cookie;
1261
1262         cookie->input_addr = rte_mempool_virt2iova(cookie) +
1263                         offsetof(struct qat_asym_op_cookie,
1264                                         input_params_ptrs);
1265
1266         cookie->output_addr = rte_mempool_virt2iova(cookie) +
1267                         offsetof(struct qat_asym_op_cookie,
1268                                         output_params_ptrs);
1269
1270         for (j = 0; j < 8; j++) {
1271                 cookie->input_params_ptrs[j] =
1272                                 rte_mempool_virt2iova(cookie) +
1273                                 offsetof(struct qat_asym_op_cookie,
1274                                                 input_array[j]);
1275                 cookie->output_params_ptrs[j] =
1276                                 rte_mempool_virt2iova(cookie) +
1277                                 offsetof(struct qat_asym_op_cookie,
1278                                                 output_array[j]);
1279         }
1280 }
1281
1282 int
1283 qat_asym_dev_create(struct qat_pci_device *qat_pci_dev,
1284                 struct qat_dev_cmd_param *qat_dev_cmd_param)
1285 {
1286         struct qat_cryptodev_private *internals;
1287         struct rte_cryptodev *cryptodev;
1288         struct qat_device_info *qat_dev_instance =
1289                 &qat_pci_devs[qat_pci_dev->qat_dev_id];
1290         struct rte_cryptodev_pmd_init_params init_params = {
1291                 .name = "",
1292                 .socket_id = qat_dev_instance->pci_dev->device.numa_node,
1293                 .private_data_size = sizeof(struct qat_cryptodev_private)
1294         };
1295         struct qat_capabilities_info capa_info;
1296         const struct rte_cryptodev_capabilities *capabilities;
1297         const struct qat_crypto_gen_dev_ops *gen_dev_ops =
1298                 &qat_asym_gen_dev_ops[qat_pci_dev->qat_dev_gen];
1299         char name[RTE_CRYPTODEV_NAME_MAX_LEN];
1300         char capa_memz_name[RTE_CRYPTODEV_NAME_MAX_LEN];
1301         uint64_t capa_size;
1302         int i = 0;
1303
1304         snprintf(name, RTE_CRYPTODEV_NAME_MAX_LEN, "%s_%s",
1305                         qat_pci_dev->name, "asym");
1306         QAT_LOG(DEBUG, "Creating QAT ASYM device %s\n", name);
1307
1308         if (gen_dev_ops->cryptodev_ops == NULL) {
1309                 QAT_LOG(ERR, "Device %s does not support asymmetric crypto",
1310                                 name);
1311                 return -(EFAULT);
1312         }
1313
1314         if (rte_eal_process_type() == RTE_PROC_PRIMARY) {
1315                 qat_pci_dev->qat_asym_driver_id =
1316                                 qat_asym_driver_id;
1317         } else if (rte_eal_process_type() == RTE_PROC_SECONDARY) {
1318                 if (qat_pci_dev->qat_asym_driver_id !=
1319                                 qat_asym_driver_id) {
1320                         QAT_LOG(ERR,
1321                                 "Device %s have different driver id than corresponding device in primary process",
1322                                 name);
1323                         return -(EFAULT);
1324                 }
1325         }
1326
1327         /* Populate subset device to use in cryptodev device creation */
1328         qat_dev_instance->asym_rte_dev.driver = &cryptodev_qat_asym_driver;
1329         qat_dev_instance->asym_rte_dev.numa_node =
1330                         qat_dev_instance->pci_dev->device.numa_node;
1331         qat_dev_instance->asym_rte_dev.devargs = NULL;
1332
1333         cryptodev = rte_cryptodev_pmd_create(name,
1334                         &(qat_dev_instance->asym_rte_dev), &init_params);
1335
1336         if (cryptodev == NULL)
1337                 return -ENODEV;
1338
1339         qat_dev_instance->asym_rte_dev.name = cryptodev->data->name;
1340         cryptodev->driver_id = qat_asym_driver_id;
1341         cryptodev->dev_ops = gen_dev_ops->cryptodev_ops;
1342
1343         cryptodev->enqueue_burst = qat_asym_crypto_enqueue_op_burst;
1344         cryptodev->dequeue_burst = qat_asym_crypto_dequeue_op_burst;
1345
1346         cryptodev->feature_flags = gen_dev_ops->get_feature_flags(qat_pci_dev);
1347
1348         if (rte_eal_process_type() != RTE_PROC_PRIMARY)
1349                 return 0;
1350
1351         snprintf(capa_memz_name, RTE_CRYPTODEV_NAME_MAX_LEN,
1352                         "QAT_ASYM_CAPA_GEN_%d",
1353                         qat_pci_dev->qat_dev_gen);
1354
1355         internals = cryptodev->data->dev_private;
1356         internals->qat_dev = qat_pci_dev;
1357         internals->dev_id = cryptodev->data->dev_id;
1358
1359         capa_info = gen_dev_ops->get_capabilities(qat_pci_dev);
1360         capabilities = capa_info.data;
1361         capa_size = capa_info.size;
1362
1363         internals->capa_mz = rte_memzone_lookup(capa_memz_name);
1364         if (internals->capa_mz == NULL) {
1365                 internals->capa_mz = rte_memzone_reserve(capa_memz_name,
1366                                 capa_size, rte_socket_id(), 0);
1367                 if (internals->capa_mz == NULL) {
1368                         QAT_LOG(DEBUG,
1369                                 "Error allocating memzone for capabilities, "
1370                                 "destroying PMD for %s",
1371                                 name);
1372                         rte_cryptodev_pmd_destroy(cryptodev);
1373                         memset(&qat_dev_instance->asym_rte_dev, 0,
1374                                 sizeof(qat_dev_instance->asym_rte_dev));
1375                         return -EFAULT;
1376                 }
1377         }
1378
1379         memcpy(internals->capa_mz->addr, capabilities, capa_size);
1380         internals->qat_dev_capabilities = internals->capa_mz->addr;
1381
1382         while (1) {
1383                 if (qat_dev_cmd_param[i].name == NULL)
1384                         break;
1385                 if (!strcmp(qat_dev_cmd_param[i].name, ASYM_ENQ_THRESHOLD_NAME))
1386                         internals->min_enq_burst_threshold =
1387                                         qat_dev_cmd_param[i].val;
1388                 i++;
1389         }
1390
1391         qat_pci_dev->asym_dev = internals;
1392         internals->service_type = QAT_SERVICE_ASYMMETRIC;
1393         QAT_LOG(DEBUG, "Created QAT ASYM device %s as cryptodev instance %d",
1394                         cryptodev->data->name, internals->dev_id);
1395         return 0;
1396 }
1397
1398 int
1399 qat_asym_dev_destroy(struct qat_pci_device *qat_pci_dev)
1400 {
1401         struct rte_cryptodev *cryptodev;
1402
1403         if (qat_pci_dev == NULL)
1404                 return -ENODEV;
1405         if (qat_pci_dev->asym_dev == NULL)
1406                 return 0;
1407         if (rte_eal_process_type() == RTE_PROC_PRIMARY)
1408                 rte_memzone_free(qat_pci_dev->asym_dev->capa_mz);
1409
1410         /* free crypto device */
1411         cryptodev = rte_cryptodev_pmd_get_dev(
1412                         qat_pci_dev->asym_dev->dev_id);
1413         rte_cryptodev_pmd_destroy(cryptodev);
1414         qat_pci_devs[qat_pci_dev->qat_dev_id].asym_rte_dev.name = NULL;
1415         qat_pci_dev->asym_dev = NULL;
1416
1417         return 0;
1418 }
1419
1420 static struct cryptodev_driver qat_crypto_drv;
1421 RTE_PMD_REGISTER_CRYPTO_DRIVER(qat_crypto_drv,
1422                 cryptodev_qat_asym_driver,
1423                 qat_asym_driver_id);