1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2019 - 2022 Intel Corporation
7 #include <cryptodev_pmd.h>
9 #include "qat_device.h"
13 #include "icp_qat_fw_pke.h"
14 #include "icp_qat_fw.h"
18 uint8_t qat_asym_driver_id;
20 struct qat_crypto_gen_dev_ops qat_asym_gen_dev_ops[QAT_N_GENS];
22 /* An rte_driver is needed in the registration of both the device and the driver
24 * The actual qat pci's rte_driver can't be used as its name represents
25 * the whole pci device with all services. Think of this as a holder for a name
26 * for the crypto part of the pci device.
28 static const char qat_asym_drv_name[] = RTE_STR(CRYPTODEV_NAME_QAT_ASYM_PMD);
29 static const struct rte_driver cryptodev_qat_asym_driver = {
30 .name = qat_asym_drv_name,
31 .alias = qat_asym_drv_name
35 * Macros with suffix _F are used with some of predefinded identifiers:
36 * - cookie->input_buffer
37 * - qat_func_alignsize
39 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
40 #define HEXDUMP(name, where, size) QAT_DP_HEXDUMP_LOG(DEBUG, name, \
42 #define HEXDUMP_OFF(name, where, size, idx) QAT_DP_HEXDUMP_LOG(DEBUG, name, \
43 &where[idx * size], size)
45 #define HEXDUMP_OFF_F(name, idx) QAT_DP_HEXDUMP_LOG(DEBUG, name, \
46 &cookie->input_buffer[idx * qat_func_alignsize], \
49 #define HEXDUMP(name, where, size)
50 #define HEXDUMP_OFF(name, where, size, idx)
51 #define HEXDUMP_OFF_F(name, idx)
54 #define CHECK_IF_NOT_EMPTY(param, name, pname, status) \
56 if (param.length == 0) { \
59 " input parameter, zero length " pname \
62 } else if (check_zero(param)) { \
64 "Invalid " name " input parameter, empty " \
65 pname ", length = %d", \
72 #define SET_PKE_LN(what, how, idx) \
73 rte_memcpy(cookie->input_array[idx] + how - \
78 #define SET_PKE_LN_EC(curve, p, idx) \
79 rte_memcpy(cookie->input_array[idx] + \
80 qat_func_alignsize - curve.bytesize, \
81 curve.p.data, curve.bytesize)
83 #define SET_PKE_9A_IN(what, idx) \
84 rte_memcpy(&cookie->input_buffer[idx * \
85 qat_func_alignsize] + \
86 qat_func_alignsize - what.length, \
87 what.data, what.length)
89 #define SET_PKE_9A_EC(curve, p, idx) \
90 rte_memcpy(&cookie->input_buffer[idx * \
91 qat_func_alignsize] + \
92 qat_func_alignsize - curve.bytesize, \
93 curve.p.data, curve.bytesize)
96 request_init(struct icp_qat_fw_pke_request *qat_req)
98 memset(qat_req, 0, sizeof(*qat_req));
99 qat_req->pke_hdr.service_type = ICP_QAT_FW_COMN_REQ_CPM_FW_PKE;
100 qat_req->pke_hdr.hdr_flags =
101 ICP_QAT_FW_COMN_HDR_FLAGS_BUILD
102 (ICP_QAT_FW_COMN_REQ_FLAG_SET);
106 cleanup_arrays(struct qat_asym_op_cookie *cookie,
107 int in_count, int out_count, int alg_size)
111 for (i = 0; i < in_count; i++)
112 memset(cookie->input_array[i], 0x0, alg_size);
113 for (i = 0; i < out_count; i++)
114 memset(cookie->output_array[i], 0x0, alg_size);
118 cleanup_crt(struct qat_asym_op_cookie *cookie,
123 memset(cookie->input_array[0], 0x0, alg_size);
124 for (i = 1; i < QAT_ASYM_RSA_QT_NUM_IN_PARAMS; i++)
125 memset(cookie->input_array[i], 0x0, alg_size / 2);
126 for (i = 0; i < QAT_ASYM_RSA_NUM_OUT_PARAMS; i++)
127 memset(cookie->output_array[i], 0x0, alg_size);
131 cleanup(struct qat_asym_op_cookie *cookie,
132 struct rte_crypto_asym_xform *xform, int alg_size)
134 if (xform->xform_type == RTE_CRYPTO_ASYM_XFORM_MODEX)
135 cleanup_arrays(cookie, QAT_ASYM_MODEXP_NUM_IN_PARAMS,
136 QAT_ASYM_MODEXP_NUM_OUT_PARAMS, alg_size);
137 else if (xform->xform_type == RTE_CRYPTO_ASYM_XFORM_MODINV)
138 cleanup_arrays(cookie, QAT_ASYM_MODINV_NUM_IN_PARAMS,
139 QAT_ASYM_MODINV_NUM_OUT_PARAMS, alg_size);
140 else if (xform->xform_type == RTE_CRYPTO_ASYM_XFORM_RSA) {
141 if (xform->rsa.key_type == RTE_RSA_KEY_TYPE_QT)
142 cleanup_crt(cookie, alg_size);
144 cleanup_arrays(cookie, QAT_ASYM_RSA_NUM_IN_PARAMS,
145 QAT_ASYM_RSA_NUM_OUT_PARAMS, alg_size);
151 check_zero(rte_crypto_param n)
153 int i, len = n.length;
156 for (i = len - 1; i >= 0; i--) {
157 if (n.data[i] != 0x0)
160 } else if (len == 8 && *(uint64_t *)&n.data[len - 8] == 0) {
162 } else if (*(uint64_t *)&n.data[len - 8] == 0) {
163 for (i = len - 9; i >= 0; i--) {
164 if (n.data[i] != 0x0)
173 static struct qat_asym_function
174 get_asym_function(struct rte_crypto_asym_xform *xform)
176 struct qat_asym_function qat_function;
178 switch (xform->xform_type) {
179 case RTE_CRYPTO_ASYM_XFORM_MODEX:
180 qat_function = get_modexp_function(xform);
182 case RTE_CRYPTO_ASYM_XFORM_MODINV:
183 qat_function = get_modinv_function(xform);
186 qat_function.func_id = 0;
194 modexp_set_input(struct rte_crypto_asym_op *asym_op,
195 struct icp_qat_fw_pke_request *qat_req,
196 struct qat_asym_op_cookie *cookie,
197 struct rte_crypto_asym_xform *xform)
199 struct qat_asym_function qat_function;
200 uint32_t alg_bytesize, func_id, in_bytesize;
203 CHECK_IF_NOT_EMPTY(xform->modex.modulus, "mod exp",
205 CHECK_IF_NOT_EMPTY(xform->modex.exponent, "mod exp",
210 if (asym_op->modex.base.length > xform->modex.exponent.length &&
211 asym_op->modex.base.length > xform->modex.modulus.length) {
212 in_bytesize = asym_op->modex.base.length;
213 } else if (xform->modex.exponent.length > xform->modex.modulus.length)
214 in_bytesize = xform->modex.exponent.length;
216 in_bytesize = xform->modex.modulus.length;
218 qat_function = get_modexp_function2(in_bytesize);
219 func_id = qat_function.func_id;
220 if (qat_function.func_id == 0) {
221 QAT_LOG(ERR, "Cannot obtain functionality id");
224 alg_bytesize = qat_function.bytesize;
226 SET_PKE_LN(asym_op->modex.base, alg_bytesize, 0);
227 SET_PKE_LN(xform->modex.exponent, alg_bytesize, 1);
228 SET_PKE_LN(xform->modex.modulus, alg_bytesize, 2);
230 cookie->alg_bytesize = alg_bytesize;
231 qat_req->pke_hdr.cd_pars.func_id = func_id;
232 qat_req->input_param_count = QAT_ASYM_MODEXP_NUM_IN_PARAMS;
233 qat_req->output_param_count = QAT_ASYM_MODEXP_NUM_OUT_PARAMS;
235 HEXDUMP("ModExp base", cookie->input_array[0], alg_bytesize);
236 HEXDUMP("ModExp exponent", cookie->input_array[1], alg_bytesize);
237 HEXDUMP("ModExp modulus", cookie->input_array[2], alg_bytesize);
243 modexp_collect(struct rte_crypto_asym_op *asym_op,
244 struct qat_asym_op_cookie *cookie,
245 struct rte_crypto_asym_xform *xform)
247 rte_crypto_param n = xform->modex.modulus;
248 uint32_t alg_bytesize = cookie->alg_bytesize;
249 uint8_t *modexp_result = asym_op->modex.result.data;
251 rte_memcpy(modexp_result,
252 cookie->output_array[0] + alg_bytesize
253 - n.length, n.length);
254 HEXDUMP("ModExp result", cookie->output_array[0],
256 return RTE_CRYPTO_OP_STATUS_SUCCESS;
260 modinv_set_input(struct rte_crypto_asym_op *asym_op,
261 struct icp_qat_fw_pke_request *qat_req,
262 struct qat_asym_op_cookie *cookie,
263 struct rte_crypto_asym_xform *xform)
265 struct qat_asym_function qat_function;
266 uint32_t alg_bytesize, func_id;
269 CHECK_IF_NOT_EMPTY(xform->modex.modulus, "mod inv",
274 qat_function = get_asym_function(xform);
275 func_id = qat_function.func_id;
277 QAT_LOG(ERR, "Cannot obtain functionality id");
280 alg_bytesize = qat_function.bytesize;
282 SET_PKE_LN(asym_op->modinv.base, alg_bytesize, 0);
283 SET_PKE_LN(xform->modinv.modulus, alg_bytesize, 1);
285 cookie->alg_bytesize = alg_bytesize;
286 qat_req->pke_hdr.cd_pars.func_id = func_id;
287 qat_req->input_param_count =
288 QAT_ASYM_MODINV_NUM_IN_PARAMS;
289 qat_req->output_param_count =
290 QAT_ASYM_MODINV_NUM_OUT_PARAMS;
292 HEXDUMP("ModInv base", cookie->input_array[0], alg_bytesize);
293 HEXDUMP("ModInv modulus", cookie->input_array[1], alg_bytesize);
299 modinv_collect(struct rte_crypto_asym_op *asym_op,
300 struct qat_asym_op_cookie *cookie,
301 struct rte_crypto_asym_xform *xform)
303 rte_crypto_param n = xform->modinv.modulus;
304 uint8_t *modinv_result = asym_op->modinv.result.data;
305 uint32_t alg_bytesize = cookie->alg_bytesize;
307 rte_memcpy(modinv_result + (asym_op->modinv.result.length
309 cookie->output_array[0] + alg_bytesize
310 - n.length, n.length);
311 HEXDUMP("ModInv result", cookie->output_array[0],
313 return RTE_CRYPTO_OP_STATUS_SUCCESS;
317 rsa_set_pub_input(struct rte_crypto_asym_op *asym_op,
318 struct icp_qat_fw_pke_request *qat_req,
319 struct qat_asym_op_cookie *cookie,
320 struct rte_crypto_asym_xform *xform)
322 struct qat_asym_function qat_function;
323 uint32_t alg_bytesize, func_id;
326 qat_function = get_rsa_enc_function(xform);
327 func_id = qat_function.func_id;
329 QAT_LOG(ERR, "Cannot obtain functionality id");
332 alg_bytesize = qat_function.bytesize;
334 if (asym_op->rsa.op_type == RTE_CRYPTO_ASYM_OP_ENCRYPT) {
335 switch (asym_op->rsa.padding.type) {
336 case RTE_CRYPTO_RSA_PADDING_NONE:
337 SET_PKE_LN(asym_op->rsa.message, alg_bytesize, 0);
341 "Invalid RSA padding (Encryption)"
345 HEXDUMP("RSA Message", cookie->input_array[0], alg_bytesize);
347 switch (asym_op->rsa.padding.type) {
348 case RTE_CRYPTO_RSA_PADDING_NONE:
349 SET_PKE_LN(asym_op->rsa.sign, alg_bytesize, 0);
353 "Invalid RSA padding (Verify)");
356 HEXDUMP("RSA Signature", cookie->input_array[0],
360 SET_PKE_LN(xform->rsa.e, alg_bytesize, 1);
361 SET_PKE_LN(xform->rsa.n, alg_bytesize, 2);
363 cookie->alg_bytesize = alg_bytesize;
364 qat_req->pke_hdr.cd_pars.func_id = func_id;
366 HEXDUMP("RSA Public Key", cookie->input_array[1], alg_bytesize);
367 HEXDUMP("RSA Modulus", cookie->input_array[2], alg_bytesize);
373 rsa_set_priv_input(struct rte_crypto_asym_op *asym_op,
374 struct icp_qat_fw_pke_request *qat_req,
375 struct qat_asym_op_cookie *cookie,
376 struct rte_crypto_asym_xform *xform)
378 struct qat_asym_function qat_function;
379 uint32_t alg_bytesize, func_id;
382 if (xform->rsa.key_type == RTE_RSA_KEY_TYPE_QT) {
383 qat_function = get_rsa_crt_function(xform);
384 func_id = qat_function.func_id;
386 QAT_LOG(ERR, "Cannot obtain functionality id");
389 alg_bytesize = qat_function.bytesize;
390 qat_req->input_param_count =
391 QAT_ASYM_RSA_QT_NUM_IN_PARAMS;
393 SET_PKE_LN(xform->rsa.qt.p, (alg_bytesize >> 1), 1);
394 SET_PKE_LN(xform->rsa.qt.q, (alg_bytesize >> 1), 2);
395 SET_PKE_LN(xform->rsa.qt.dP, (alg_bytesize >> 1), 3);
396 SET_PKE_LN(xform->rsa.qt.dQ, (alg_bytesize >> 1), 4);
397 SET_PKE_LN(xform->rsa.qt.qInv, (alg_bytesize >> 1), 5);
399 HEXDUMP("RSA p", cookie->input_array[1],
401 HEXDUMP("RSA q", cookie->input_array[2],
403 HEXDUMP("RSA dP", cookie->input_array[3],
405 HEXDUMP("RSA dQ", cookie->input_array[4],
407 HEXDUMP("RSA qInv", cookie->input_array[5],
409 } else if (xform->rsa.key_type ==
410 RTE_RSA_KEY_TYPE_EXP) {
411 qat_function = get_rsa_dec_function(xform);
412 func_id = qat_function.func_id;
414 QAT_LOG(ERR, "Cannot obtain functionality id");
417 alg_bytesize = qat_function.bytesize;
419 SET_PKE_LN(xform->rsa.d, alg_bytesize, 1);
420 SET_PKE_LN(xform->rsa.n, alg_bytesize, 2);
422 HEXDUMP("RSA d", cookie->input_array[1],
424 HEXDUMP("RSA n", cookie->input_array[2],
427 QAT_LOG(ERR, "Invalid RSA key type");
431 if (asym_op->rsa.op_type ==
432 RTE_CRYPTO_ASYM_OP_DECRYPT) {
433 switch (asym_op->rsa.padding.type) {
434 case RTE_CRYPTO_RSA_PADDING_NONE:
435 SET_PKE_LN(asym_op->rsa.cipher, alg_bytesize, 0);
436 HEXDUMP("RSA ciphertext", cookie->input_array[0],
441 "Invalid padding of RSA (Decrypt)");
445 } else if (asym_op->rsa.op_type ==
446 RTE_CRYPTO_ASYM_OP_SIGN) {
447 switch (asym_op->rsa.padding.type) {
448 case RTE_CRYPTO_RSA_PADDING_NONE:
449 SET_PKE_LN(asym_op->rsa.message, alg_bytesize, 0);
450 HEXDUMP("RSA text to be signed", cookie->input_array[0],
455 "Invalid padding of RSA (Signature)");
460 cookie->alg_bytesize = alg_bytesize;
461 qat_req->pke_hdr.cd_pars.func_id = func_id;
466 rsa_set_input(struct rte_crypto_asym_op *asym_op,
467 struct icp_qat_fw_pke_request *qat_req,
468 struct qat_asym_op_cookie *cookie,
469 struct rte_crypto_asym_xform *xform)
471 qat_req->input_param_count =
472 QAT_ASYM_RSA_NUM_IN_PARAMS;
473 qat_req->output_param_count =
474 QAT_ASYM_RSA_NUM_OUT_PARAMS;
476 if (asym_op->rsa.op_type == RTE_CRYPTO_ASYM_OP_ENCRYPT ||
477 asym_op->rsa.op_type ==
478 RTE_CRYPTO_ASYM_OP_VERIFY) {
479 return rsa_set_pub_input(asym_op, qat_req, cookie, xform);
481 return rsa_set_priv_input(asym_op, qat_req, cookie, xform);
486 rsa_collect(struct rte_crypto_asym_op *asym_op,
487 struct qat_asym_op_cookie *cookie)
489 uint32_t alg_bytesize = cookie->alg_bytesize;
491 if (asym_op->rsa.op_type == RTE_CRYPTO_ASYM_OP_ENCRYPT ||
492 asym_op->rsa.op_type == RTE_CRYPTO_ASYM_OP_VERIFY) {
494 if (asym_op->rsa.op_type ==
495 RTE_CRYPTO_ASYM_OP_ENCRYPT) {
496 uint8_t *rsa_result = asym_op->rsa.cipher.data;
498 rte_memcpy(rsa_result,
499 cookie->output_array[0],
501 HEXDUMP("RSA Encrypted data", cookie->output_array[0],
504 uint8_t *rsa_result = asym_op->rsa.cipher.data;
506 switch (asym_op->rsa.padding.type) {
507 case RTE_CRYPTO_RSA_PADDING_NONE:
508 rte_memcpy(rsa_result,
509 cookie->output_array[0],
511 HEXDUMP("RSA signature",
512 cookie->output_array[0],
516 QAT_LOG(ERR, "Padding not supported");
517 return RTE_CRYPTO_OP_STATUS_ERROR;
521 if (asym_op->rsa.op_type == RTE_CRYPTO_ASYM_OP_DECRYPT) {
522 uint8_t *rsa_result = asym_op->rsa.message.data;
524 switch (asym_op->rsa.padding.type) {
525 case RTE_CRYPTO_RSA_PADDING_NONE:
526 rte_memcpy(rsa_result,
527 cookie->output_array[0],
529 HEXDUMP("RSA Decrypted Message",
530 cookie->output_array[0],
534 QAT_LOG(ERR, "Padding not supported");
535 return RTE_CRYPTO_OP_STATUS_ERROR;
538 uint8_t *rsa_result = asym_op->rsa.sign.data;
540 rte_memcpy(rsa_result,
541 cookie->output_array[0],
543 HEXDUMP("RSA Signature", cookie->output_array[0],
547 return RTE_CRYPTO_OP_STATUS_SUCCESS;
551 ecdsa_set_input(struct rte_crypto_asym_op *asym_op,
552 struct icp_qat_fw_pke_request *qat_req,
553 struct qat_asym_op_cookie *cookie,
554 struct rte_crypto_asym_xform *xform)
556 struct qat_asym_function qat_function;
557 uint32_t qat_func_alignsize, func_id;
560 curve_id = pick_curve(xform);
562 QAT_LOG(DEBUG, "Incorrect elliptic curve");
566 switch (asym_op->ecdsa.op_type) {
567 case RTE_CRYPTO_ASYM_OP_SIGN:
568 qat_function = get_ecdsa_function(xform);
569 func_id = qat_function.func_id;
571 QAT_LOG(ERR, "Cannot obtain functionality id");
575 RTE_ALIGN_CEIL(qat_function.bytesize, 8);
577 SET_PKE_9A_IN(asym_op->ecdsa.pkey, 0);
578 SET_PKE_9A_IN(asym_op->ecdsa.message, 1);
579 SET_PKE_9A_IN(asym_op->ecdsa.k, 2);
580 SET_PKE_9A_EC(curve[curve_id], b, 3);
581 SET_PKE_9A_EC(curve[curve_id], a, 4);
582 SET_PKE_9A_EC(curve[curve_id], p, 5);
583 SET_PKE_9A_EC(curve[curve_id], n, 6);
584 SET_PKE_9A_EC(curve[curve_id], y, 7);
585 SET_PKE_9A_EC(curve[curve_id], x, 8);
587 cookie->alg_bytesize = curve[curve_id].bytesize;
588 cookie->qat_func_alignsize = qat_func_alignsize;
589 qat_req->pke_hdr.cd_pars.func_id = func_id;
590 qat_req->input_param_count =
591 QAT_ASYM_ECDSA_RS_SIGN_IN_PARAMS;
592 qat_req->output_param_count =
593 QAT_ASYM_ECDSA_RS_SIGN_OUT_PARAMS;
595 HEXDUMP_OFF_F("ECDSA d", 0);
596 HEXDUMP_OFF_F("ECDSA e", 1);
597 HEXDUMP_OFF_F("ECDSA k", 2);
598 HEXDUMP_OFF_F("ECDSA b", 3);
599 HEXDUMP_OFF_F("ECDSA a", 4);
600 HEXDUMP_OFF_F("ECDSA n", 5);
601 HEXDUMP_OFF_F("ECDSA y", 6);
602 HEXDUMP_OFF_F("ECDSA x", 7);
604 case RTE_CRYPTO_ASYM_OP_VERIFY:
605 qat_function = get_ecdsa_verify_function(xform);
606 func_id = qat_function.func_id;
608 QAT_LOG(ERR, "Cannot obtain functionality id");
611 qat_func_alignsize = RTE_ALIGN_CEIL(qat_function.bytesize, 8);
613 SET_PKE_9A_IN(asym_op->ecdsa.message, 10);
614 SET_PKE_9A_IN(asym_op->ecdsa.s, 9);
615 SET_PKE_9A_IN(asym_op->ecdsa.r, 8);
616 SET_PKE_9A_EC(curve[curve_id], n, 7);
617 SET_PKE_9A_EC(curve[curve_id], x, 6);
618 SET_PKE_9A_EC(curve[curve_id], y, 5);
619 SET_PKE_9A_IN(asym_op->ecdsa.q.x, 4);
620 SET_PKE_9A_IN(asym_op->ecdsa.q.y, 3);
621 SET_PKE_9A_EC(curve[curve_id], a, 2);
622 SET_PKE_9A_EC(curve[curve_id], b, 1);
623 SET_PKE_9A_EC(curve[curve_id], p, 0);
625 cookie->alg_bytesize = curve[curve_id].bytesize;
626 cookie->qat_func_alignsize = qat_func_alignsize;
627 qat_req->pke_hdr.cd_pars.func_id = func_id;
628 qat_req->input_param_count =
629 QAT_ASYM_ECDSA_RS_VERIFY_IN_PARAMS;
630 qat_req->output_param_count =
631 QAT_ASYM_ECDSA_RS_VERIFY_OUT_PARAMS;
633 HEXDUMP_OFF_F("p", 0);
634 HEXDUMP_OFF_F("b", 1);
635 HEXDUMP_OFF_F("a", 2);
636 HEXDUMP_OFF_F("y", 3);
637 HEXDUMP_OFF_F("x", 4);
638 HEXDUMP_OFF_F("yG", 5);
639 HEXDUMP_OFF_F("xG", 6);
640 HEXDUMP_OFF_F("n", 7);
641 HEXDUMP_OFF_F("r", 8);
642 HEXDUMP_OFF_F("s", 9);
643 HEXDUMP_OFF_F("e", 10);
653 ecdsa_collect(struct rte_crypto_asym_op *asym_op,
654 struct qat_asym_op_cookie *cookie)
656 uint32_t alg_bytesize = cookie->alg_bytesize;
657 uint32_t qat_func_alignsize = cookie->qat_func_alignsize;
658 uint32_t ltrim = qat_func_alignsize - alg_bytesize;
660 if (asym_op->rsa.op_type == RTE_CRYPTO_ASYM_OP_SIGN) {
661 uint8_t *r = asym_op->ecdsa.r.data;
662 uint8_t *s = asym_op->ecdsa.s.data;
664 asym_op->ecdsa.r.length = alg_bytesize;
665 asym_op->ecdsa.s.length = alg_bytesize;
666 rte_memcpy(r, &cookie->output_array[0][ltrim], alg_bytesize);
667 rte_memcpy(s, &cookie->output_array[1][ltrim], alg_bytesize);
669 HEXDUMP("R", cookie->output_array[0],
671 HEXDUMP("S", cookie->output_array[1],
674 return RTE_CRYPTO_OP_STATUS_SUCCESS;
678 ecpm_set_input(struct rte_crypto_asym_op *asym_op,
679 struct icp_qat_fw_pke_request *qat_req,
680 struct qat_asym_op_cookie *cookie,
681 struct rte_crypto_asym_xform *xform)
683 struct qat_asym_function qat_function;
684 uint32_t qat_func_alignsize, func_id;
687 curve_id = pick_curve(xform);
689 QAT_LOG(DEBUG, "Incorrect elliptic curve");
693 qat_function = get_ecpm_function(xform);
694 func_id = qat_function.func_id;
696 QAT_LOG(ERR, "Cannot obtain functionality id");
699 qat_func_alignsize = RTE_ALIGN_CEIL(qat_function.bytesize, 8);
701 SET_PKE_LN(asym_op->ecpm.scalar, qat_func_alignsize, 0);
702 SET_PKE_LN(asym_op->ecpm.p.x, qat_func_alignsize, 1);
703 SET_PKE_LN(asym_op->ecpm.p.y, qat_func_alignsize, 2);
704 SET_PKE_LN_EC(curve[curve_id], a, 3);
705 SET_PKE_LN_EC(curve[curve_id], b, 4);
706 SET_PKE_LN_EC(curve[curve_id], p, 5);
707 SET_PKE_LN_EC(curve[curve_id], h, 6);
709 cookie->alg_bytesize = curve[curve_id].bytesize;
710 cookie->qat_func_alignsize = qat_func_alignsize;
711 qat_req->pke_hdr.cd_pars.func_id = func_id;
712 qat_req->input_param_count =
713 QAT_ASYM_ECPM_IN_PARAMS;
714 qat_req->output_param_count =
715 QAT_ASYM_ECPM_OUT_PARAMS;
717 HEXDUMP("k", cookie->input_array[0], qat_func_alignsize);
718 HEXDUMP("xG", cookie->input_array[1], qat_func_alignsize);
719 HEXDUMP("yG", cookie->input_array[2], qat_func_alignsize);
720 HEXDUMP("a", cookie->input_array[3], qat_func_alignsize);
721 HEXDUMP("b", cookie->input_array[4], qat_func_alignsize);
722 HEXDUMP("q", cookie->input_array[5], qat_func_alignsize);
723 HEXDUMP("h", cookie->input_array[6], qat_func_alignsize);
729 ecpm_collect(struct rte_crypto_asym_op *asym_op,
730 struct qat_asym_op_cookie *cookie)
732 uint8_t *x = asym_op->ecpm.r.x.data;
733 uint8_t *y = asym_op->ecpm.r.y.data;
734 uint32_t alg_bytesize = cookie->alg_bytesize;
735 uint32_t qat_func_alignsize = cookie->qat_func_alignsize;
736 uint32_t ltrim = qat_func_alignsize - alg_bytesize;
738 asym_op->ecpm.r.x.length = alg_bytesize;
739 asym_op->ecpm.r.y.length = alg_bytesize;
740 rte_memcpy(x, &cookie->output_array[0][ltrim], alg_bytesize);
741 rte_memcpy(y, &cookie->output_array[1][ltrim], alg_bytesize);
743 HEXDUMP("rX", cookie->output_array[0],
745 HEXDUMP("rY", cookie->output_array[1],
747 return RTE_CRYPTO_OP_STATUS_SUCCESS;
751 asym_set_input(struct rte_crypto_asym_op *asym_op,
752 struct icp_qat_fw_pke_request *qat_req,
753 struct qat_asym_op_cookie *cookie,
754 struct rte_crypto_asym_xform *xform)
756 switch (xform->xform_type) {
757 case RTE_CRYPTO_ASYM_XFORM_MODEX:
758 return modexp_set_input(asym_op, qat_req,
760 case RTE_CRYPTO_ASYM_XFORM_MODINV:
761 return modinv_set_input(asym_op, qat_req,
763 case RTE_CRYPTO_ASYM_XFORM_RSA:
764 return rsa_set_input(asym_op, qat_req,
766 case RTE_CRYPTO_ASYM_XFORM_ECDSA:
767 return ecdsa_set_input(asym_op, qat_req,
769 case RTE_CRYPTO_ASYM_XFORM_ECPM:
770 return ecpm_set_input(asym_op, qat_req,
773 QAT_LOG(ERR, "Invalid/unsupported asymmetric crypto xform");
780 qat_asym_build_request(void *in_op, uint8_t *out_msg, void *op_cookie,
781 __rte_unused uint64_t *opaque,
782 __rte_unused enum qat_device_gen qat_dev_gen)
784 struct rte_crypto_op *op = (struct rte_crypto_op *)in_op;
785 struct rte_crypto_asym_op *asym_op = op->asym;
786 struct icp_qat_fw_pke_request *qat_req =
787 (struct icp_qat_fw_pke_request *)out_msg;
788 struct qat_asym_op_cookie *cookie =
789 (struct qat_asym_op_cookie *)op_cookie;
790 struct rte_crypto_asym_xform *xform;
791 struct qat_asym_session *qat_session = (struct qat_asym_session *)
792 op->asym->session->sess_private_data;
795 if (unlikely(qat_session == NULL)) {
796 QAT_DP_LOG(ERR, "Session was not created for this device");
800 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
801 switch (op->sess_type) {
802 case RTE_CRYPTO_OP_WITH_SESSION:
803 request_init(qat_req);
804 xform = &qat_session->xform;
806 case RTE_CRYPTO_OP_SESSIONLESS:
807 request_init(qat_req);
808 xform = op->asym->xform;
811 QAT_DP_LOG(ERR, "Invalid session/xform settings");
812 op->status = RTE_CRYPTO_OP_STATUS_INVALID_SESSION;
815 err = asym_set_input(asym_op, qat_req, cookie,
818 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
822 qat_req->pke_mid.opaque = (uint64_t)(uintptr_t)op;
823 qat_req->pke_mid.src_data_addr = cookie->input_addr;
824 qat_req->pke_mid.dest_data_addr = cookie->output_addr;
826 HEXDUMP("qat_req:", qat_req, sizeof(struct icp_qat_fw_pke_request));
830 qat_req->pke_mid.opaque = (uint64_t)(uintptr_t)op;
831 HEXDUMP("qat_req:", qat_req, sizeof(struct icp_qat_fw_pke_request));
832 qat_req->output_param_count = 0;
833 qat_req->input_param_count = 0;
834 qat_req->pke_hdr.service_type = ICP_QAT_FW_COMN_REQ_NULL;
835 cookie->error |= err;
841 qat_asym_collect_response(struct rte_crypto_op *op,
842 struct qat_asym_op_cookie *cookie,
843 struct rte_crypto_asym_xform *xform)
845 struct rte_crypto_asym_op *asym_op = op->asym;
847 switch (xform->xform_type) {
848 case RTE_CRYPTO_ASYM_XFORM_MODEX:
849 return modexp_collect(asym_op, cookie, xform);
850 case RTE_CRYPTO_ASYM_XFORM_MODINV:
851 return modinv_collect(asym_op, cookie, xform);
852 case RTE_CRYPTO_ASYM_XFORM_RSA:
853 return rsa_collect(asym_op, cookie);
854 case RTE_CRYPTO_ASYM_XFORM_ECDSA:
855 return ecdsa_collect(asym_op, cookie);
856 case RTE_CRYPTO_ASYM_XFORM_ECPM:
857 return ecpm_collect(asym_op, cookie);
859 QAT_LOG(ERR, "Not supported xform type");
860 return RTE_CRYPTO_OP_STATUS_ERROR;
865 qat_asym_process_response(void **out_op, uint8_t *resp,
866 void *op_cookie, __rte_unused uint64_t *dequeue_err_count)
868 struct icp_qat_fw_pke_resp *resp_msg =
869 (struct icp_qat_fw_pke_resp *)resp;
870 struct rte_crypto_op *op = (struct rte_crypto_op *)(uintptr_t)
872 struct qat_asym_op_cookie *cookie = op_cookie;
873 struct rte_crypto_asym_xform *xform;
874 struct qat_asym_session *qat_session = (struct qat_asym_session *)
875 op->asym->session->sess_private_data;
879 if (op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED)
880 op->status = RTE_CRYPTO_OP_STATUS_ERROR;
881 QAT_DP_LOG(DEBUG, "Cookie status returned error");
883 if (ICP_QAT_FW_PKE_RESP_PKE_STAT_GET(
884 resp_msg->pke_resp_hdr.resp_status.pke_resp_flags)) {
885 if (op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED)
886 op->status = RTE_CRYPTO_OP_STATUS_ERROR;
887 QAT_DP_LOG(DEBUG, "Asymmetric response status"
890 if (resp_msg->pke_resp_hdr.resp_status.comn_err_code) {
891 if (op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED)
892 op->status = RTE_CRYPTO_OP_STATUS_ERROR;
893 QAT_DP_LOG(ERR, "Asymmetric common status"
898 switch (op->sess_type) {
899 case RTE_CRYPTO_OP_WITH_SESSION:
900 xform = &qat_session->xform;
902 case RTE_CRYPTO_OP_SESSIONLESS:
903 xform = op->asym->xform;
907 "Invalid session/xform settings in response ring!");
908 op->status = RTE_CRYPTO_OP_STATUS_INVALID_SESSION;
911 if (op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED) {
912 op->status = qat_asym_collect_response(op,
914 cleanup(cookie, xform, cookie->alg_bytesize);
918 HEXDUMP("resp_msg:", resp_msg, sizeof(struct icp_qat_fw_pke_resp));
924 session_set_modexp(struct qat_asym_session *qat_session,
925 struct rte_crypto_asym_xform *xform)
927 uint8_t *modulus = xform->modex.modulus.data;
928 uint8_t *exponent = xform->modex.exponent.data;
930 qat_session->xform.modex.modulus.data =
931 rte_malloc(NULL, xform->modex.modulus.length, 0);
932 if (qat_session->xform.modex.modulus.data == NULL)
934 qat_session->xform.modex.modulus.length = xform->modex.modulus.length;
935 qat_session->xform.modex.exponent.data = rte_malloc(NULL,
936 xform->modex.exponent.length, 0);
937 if (qat_session->xform.modex.exponent.data == NULL) {
938 rte_free(qat_session->xform.modex.exponent.data);
941 qat_session->xform.modex.exponent.length = xform->modex.exponent.length;
943 rte_memcpy(qat_session->xform.modex.modulus.data, modulus,
944 xform->modex.modulus.length);
945 rte_memcpy(qat_session->xform.modex.exponent.data, exponent,
946 xform->modex.exponent.length);
952 session_set_modinv(struct qat_asym_session *qat_session,
953 struct rte_crypto_asym_xform *xform)
955 uint8_t *modulus = xform->modinv.modulus.data;
957 qat_session->xform.modinv.modulus.data =
958 rte_malloc(NULL, xform->modinv.modulus.length, 0);
959 if (qat_session->xform.modinv.modulus.data == NULL)
961 qat_session->xform.modinv.modulus.length = xform->modinv.modulus.length;
963 rte_memcpy(qat_session->xform.modinv.modulus.data, modulus,
964 xform->modinv.modulus.length);
970 session_set_rsa(struct qat_asym_session *qat_session,
971 struct rte_crypto_asym_xform *xform)
973 uint8_t *n = xform->rsa.n.data;
974 uint8_t *e = xform->rsa.e.data;
977 qat_session->xform.rsa.key_type = xform->rsa.key_type;
979 qat_session->xform.rsa.n.data =
980 rte_malloc(NULL, xform->rsa.n.length, 0);
981 if (qat_session->xform.rsa.n.data == NULL)
983 qat_session->xform.rsa.n.length =
986 qat_session->xform.rsa.e.data =
987 rte_malloc(NULL, xform->rsa.e.length, 0);
988 if (qat_session->xform.rsa.e.data == NULL) {
992 qat_session->xform.rsa.e.length =
995 if (xform->rsa.key_type == RTE_RSA_KEY_TYPE_QT) {
996 uint8_t *p = xform->rsa.qt.p.data;
997 uint8_t *q = xform->rsa.qt.q.data;
998 uint8_t *dP = xform->rsa.qt.dP.data;
999 uint8_t *dQ = xform->rsa.qt.dQ.data;
1000 uint8_t *qInv = xform->rsa.qt.qInv.data;
1002 qat_session->xform.rsa.qt.p.data =
1003 rte_malloc(NULL, xform->rsa.qt.p.length, 0);
1004 if (qat_session->xform.rsa.qt.p.data == NULL) {
1008 qat_session->xform.rsa.qt.p.length =
1009 xform->rsa.qt.p.length;
1011 qat_session->xform.rsa.qt.q.data =
1012 rte_malloc(NULL, xform->rsa.qt.q.length, 0);
1013 if (qat_session->xform.rsa.qt.q.data == NULL) {
1017 qat_session->xform.rsa.qt.q.length =
1018 xform->rsa.qt.q.length;
1020 qat_session->xform.rsa.qt.dP.data =
1021 rte_malloc(NULL, xform->rsa.qt.dP.length, 0);
1022 if (qat_session->xform.rsa.qt.dP.data == NULL) {
1026 qat_session->xform.rsa.qt.dP.length =
1027 xform->rsa.qt.dP.length;
1029 qat_session->xform.rsa.qt.dQ.data =
1030 rte_malloc(NULL, xform->rsa.qt.dQ.length, 0);
1031 if (qat_session->xform.rsa.qt.dQ.data == NULL) {
1035 qat_session->xform.rsa.qt.dQ.length =
1036 xform->rsa.qt.dQ.length;
1038 qat_session->xform.rsa.qt.qInv.data =
1039 rte_malloc(NULL, xform->rsa.qt.qInv.length, 0);
1040 if (qat_session->xform.rsa.qt.qInv.data == NULL) {
1044 qat_session->xform.rsa.qt.qInv.length =
1045 xform->rsa.qt.qInv.length;
1047 rte_memcpy(qat_session->xform.rsa.qt.p.data, p,
1048 xform->rsa.qt.p.length);
1049 rte_memcpy(qat_session->xform.rsa.qt.q.data, q,
1050 xform->rsa.qt.q.length);
1051 rte_memcpy(qat_session->xform.rsa.qt.dP.data, dP,
1052 xform->rsa.qt.dP.length);
1053 rte_memcpy(qat_session->xform.rsa.qt.dQ.data, dQ,
1054 xform->rsa.qt.dQ.length);
1055 rte_memcpy(qat_session->xform.rsa.qt.qInv.data, qInv,
1056 xform->rsa.qt.qInv.length);
1059 uint8_t *d = xform->rsa.d.data;
1061 qat_session->xform.rsa.d.data =
1062 rte_malloc(NULL, xform->rsa.d.length, 0);
1063 if (qat_session->xform.rsa.d.data == NULL) {
1067 qat_session->xform.rsa.d.length =
1068 xform->rsa.d.length;
1069 rte_memcpy(qat_session->xform.rsa.d.data, d,
1070 xform->rsa.d.length);
1073 rte_memcpy(qat_session->xform.rsa.n.data, n,
1074 xform->rsa.n.length);
1075 rte_memcpy(qat_session->xform.rsa.e.data, e,
1076 xform->rsa.e.length);
1081 rte_free(qat_session->xform.rsa.n.data);
1082 rte_free(qat_session->xform.rsa.e.data);
1083 rte_free(qat_session->xform.rsa.d.data);
1084 rte_free(qat_session->xform.rsa.qt.p.data);
1085 rte_free(qat_session->xform.rsa.qt.q.data);
1086 rte_free(qat_session->xform.rsa.qt.dP.data);
1087 rte_free(qat_session->xform.rsa.qt.dQ.data);
1088 rte_free(qat_session->xform.rsa.qt.qInv.data);
1093 session_set_ecdsa(struct qat_asym_session *qat_session,
1094 struct rte_crypto_asym_xform *xform)
1096 qat_session->xform.ec.curve_id = xform->ec.curve_id;
1100 qat_asym_session_configure(struct rte_cryptodev *dev __rte_unused,
1101 struct rte_crypto_asym_xform *xform,
1102 struct rte_cryptodev_asym_session *session)
1104 struct qat_asym_session *qat_session;
1107 qat_session = (struct qat_asym_session *) session->sess_private_data;
1108 memset(qat_session, 0, sizeof(*qat_session));
1110 qat_session->xform.xform_type = xform->xform_type;
1111 switch (xform->xform_type) {
1112 case RTE_CRYPTO_ASYM_XFORM_MODEX:
1113 ret = session_set_modexp(qat_session, xform);
1115 case RTE_CRYPTO_ASYM_XFORM_MODINV:
1116 ret = session_set_modinv(qat_session, xform);
1118 case RTE_CRYPTO_ASYM_XFORM_RSA:
1119 ret = session_set_rsa(qat_session, xform);
1121 case RTE_CRYPTO_ASYM_XFORM_ECDSA:
1122 case RTE_CRYPTO_ASYM_XFORM_ECPM:
1123 session_set_ecdsa(qat_session, xform);
1130 QAT_LOG(ERR, "Unsupported xform type");
1138 qat_asym_session_get_private_size(struct rte_cryptodev *dev __rte_unused)
1140 return RTE_ALIGN_CEIL(sizeof(struct qat_asym_session), 8);
1144 session_clear_modexp(struct rte_crypto_modex_xform *modex)
1146 memset(modex->modulus.data, 0, modex->modulus.length);
1147 rte_free(modex->modulus.data);
1148 memset(modex->exponent.data, 0, modex->exponent.length);
1149 rte_free(modex->exponent.data);
1153 session_clear_modinv(struct rte_crypto_modinv_xform *modinv)
1155 memset(modinv->modulus.data, 0, modinv->modulus.length);
1156 rte_free(modinv->modulus.data);
1160 session_clear_rsa(struct rte_crypto_rsa_xform *rsa)
1162 memset(rsa->n.data, 0, rsa->n.length);
1163 rte_free(rsa->n.data);
1164 memset(rsa->e.data, 0, rsa->e.length);
1165 rte_free(rsa->e.data);
1166 if (rsa->key_type == RTE_RSA_KEY_TYPE_EXP) {
1167 memset(rsa->d.data, 0, rsa->d.length);
1168 rte_free(rsa->d.data);
1170 memset(rsa->qt.p.data, 0, rsa->qt.p.length);
1171 rte_free(rsa->qt.p.data);
1172 memset(rsa->qt.q.data, 0, rsa->qt.q.length);
1173 rte_free(rsa->qt.q.data);
1174 memset(rsa->qt.dP.data, 0, rsa->qt.dP.length);
1175 rte_free(rsa->qt.dP.data);
1176 memset(rsa->qt.dQ.data, 0, rsa->qt.dQ.length);
1177 rte_free(rsa->qt.dQ.data);
1178 memset(rsa->qt.qInv.data, 0, rsa->qt.qInv.length);
1179 rte_free(rsa->qt.qInv.data);
1184 session_clear_xform(struct qat_asym_session *qat_session)
1186 switch (qat_session->xform.xform_type) {
1187 case RTE_CRYPTO_ASYM_XFORM_MODEX:
1188 session_clear_modexp(&qat_session->xform.modex);
1190 case RTE_CRYPTO_ASYM_XFORM_MODINV:
1191 session_clear_modinv(&qat_session->xform.modinv);
1193 case RTE_CRYPTO_ASYM_XFORM_RSA:
1194 session_clear_rsa(&qat_session->xform.rsa);
1202 qat_asym_session_clear(struct rte_cryptodev *dev,
1203 struct rte_cryptodev_asym_session *session)
1205 void *sess_priv = session->sess_private_data;
1206 struct qat_asym_session *qat_session =
1207 (struct qat_asym_session *)sess_priv;
1210 session_clear_xform(qat_session);
1211 memset(qat_session, 0, qat_asym_session_get_private_size(dev));
1216 qat_asym_crypto_enqueue_op_burst(void *qp, struct rte_crypto_op **ops,
1219 return qat_enqueue_op_burst(qp, qat_asym_build_request, (void **)ops,
1224 qat_asym_crypto_dequeue_op_burst(void *qp, struct rte_crypto_op **ops,
1227 return qat_dequeue_op_burst(qp, (void **)ops, qat_asym_process_response,
1232 qat_asym_init_op_cookie(void *op_cookie)
1235 struct qat_asym_op_cookie *cookie = op_cookie;
1237 cookie->input_addr = rte_mempool_virt2iova(cookie) +
1238 offsetof(struct qat_asym_op_cookie,
1241 cookie->output_addr = rte_mempool_virt2iova(cookie) +
1242 offsetof(struct qat_asym_op_cookie,
1243 output_params_ptrs);
1245 for (j = 0; j < 8; j++) {
1246 cookie->input_params_ptrs[j] =
1247 rte_mempool_virt2iova(cookie) +
1248 offsetof(struct qat_asym_op_cookie,
1250 cookie->output_params_ptrs[j] =
1251 rte_mempool_virt2iova(cookie) +
1252 offsetof(struct qat_asym_op_cookie,
1258 qat_asym_dev_create(struct qat_pci_device *qat_pci_dev,
1259 struct qat_dev_cmd_param *qat_dev_cmd_param)
1261 struct qat_cryptodev_private *internals;
1262 struct rte_cryptodev *cryptodev;
1263 struct qat_device_info *qat_dev_instance =
1264 &qat_pci_devs[qat_pci_dev->qat_dev_id];
1265 struct rte_cryptodev_pmd_init_params init_params = {
1267 .socket_id = qat_dev_instance->pci_dev->device.numa_node,
1268 .private_data_size = sizeof(struct qat_cryptodev_private)
1270 struct qat_capabilities_info capa_info;
1271 const struct rte_cryptodev_capabilities *capabilities;
1272 const struct qat_crypto_gen_dev_ops *gen_dev_ops =
1273 &qat_asym_gen_dev_ops[qat_pci_dev->qat_dev_gen];
1274 char name[RTE_CRYPTODEV_NAME_MAX_LEN];
1275 char capa_memz_name[RTE_CRYPTODEV_NAME_MAX_LEN];
1279 snprintf(name, RTE_CRYPTODEV_NAME_MAX_LEN, "%s_%s",
1280 qat_pci_dev->name, "asym");
1281 QAT_LOG(DEBUG, "Creating QAT ASYM device %s\n", name);
1283 if (gen_dev_ops->cryptodev_ops == NULL) {
1284 QAT_LOG(ERR, "Device %s does not support asymmetric crypto",
1289 if (rte_eal_process_type() == RTE_PROC_PRIMARY) {
1290 qat_pci_dev->qat_asym_driver_id =
1292 } else if (rte_eal_process_type() == RTE_PROC_SECONDARY) {
1293 if (qat_pci_dev->qat_asym_driver_id !=
1294 qat_asym_driver_id) {
1296 "Device %s have different driver id than corresponding device in primary process",
1302 /* Populate subset device to use in cryptodev device creation */
1303 qat_dev_instance->asym_rte_dev.driver = &cryptodev_qat_asym_driver;
1304 qat_dev_instance->asym_rte_dev.numa_node =
1305 qat_dev_instance->pci_dev->device.numa_node;
1306 qat_dev_instance->asym_rte_dev.devargs = NULL;
1308 cryptodev = rte_cryptodev_pmd_create(name,
1309 &(qat_dev_instance->asym_rte_dev), &init_params);
1311 if (cryptodev == NULL)
1314 qat_dev_instance->asym_rte_dev.name = cryptodev->data->name;
1315 cryptodev->driver_id = qat_asym_driver_id;
1316 cryptodev->dev_ops = gen_dev_ops->cryptodev_ops;
1318 cryptodev->enqueue_burst = qat_asym_crypto_enqueue_op_burst;
1319 cryptodev->dequeue_burst = qat_asym_crypto_dequeue_op_burst;
1321 cryptodev->feature_flags = gen_dev_ops->get_feature_flags(qat_pci_dev);
1323 if (rte_eal_process_type() != RTE_PROC_PRIMARY)
1326 snprintf(capa_memz_name, RTE_CRYPTODEV_NAME_MAX_LEN,
1327 "QAT_ASYM_CAPA_GEN_%d",
1328 qat_pci_dev->qat_dev_gen);
1330 internals = cryptodev->data->dev_private;
1331 internals->qat_dev = qat_pci_dev;
1332 internals->dev_id = cryptodev->data->dev_id;
1334 capa_info = gen_dev_ops->get_capabilities(qat_pci_dev);
1335 capabilities = capa_info.data;
1336 capa_size = capa_info.size;
1338 internals->capa_mz = rte_memzone_lookup(capa_memz_name);
1339 if (internals->capa_mz == NULL) {
1340 internals->capa_mz = rte_memzone_reserve(capa_memz_name,
1341 capa_size, rte_socket_id(), 0);
1342 if (internals->capa_mz == NULL) {
1344 "Error allocating memzone for capabilities, "
1345 "destroying PMD for %s",
1347 rte_cryptodev_pmd_destroy(cryptodev);
1348 memset(&qat_dev_instance->asym_rte_dev, 0,
1349 sizeof(qat_dev_instance->asym_rte_dev));
1354 memcpy(internals->capa_mz->addr, capabilities, capa_size);
1355 internals->qat_dev_capabilities = internals->capa_mz->addr;
1358 if (qat_dev_cmd_param[i].name == NULL)
1360 if (!strcmp(qat_dev_cmd_param[i].name, ASYM_ENQ_THRESHOLD_NAME))
1361 internals->min_enq_burst_threshold =
1362 qat_dev_cmd_param[i].val;
1366 qat_pci_dev->asym_dev = internals;
1367 internals->service_type = QAT_SERVICE_ASYMMETRIC;
1368 QAT_LOG(DEBUG, "Created QAT ASYM device %s as cryptodev instance %d",
1369 cryptodev->data->name, internals->dev_id);
1374 qat_asym_dev_destroy(struct qat_pci_device *qat_pci_dev)
1376 struct rte_cryptodev *cryptodev;
1378 if (qat_pci_dev == NULL)
1380 if (qat_pci_dev->asym_dev == NULL)
1382 if (rte_eal_process_type() == RTE_PROC_PRIMARY)
1383 rte_memzone_free(qat_pci_dev->asym_dev->capa_mz);
1385 /* free crypto device */
1386 cryptodev = rte_cryptodev_pmd_get_dev(
1387 qat_pci_dev->asym_dev->dev_id);
1388 rte_cryptodev_pmd_destroy(cryptodev);
1389 qat_pci_devs[qat_pci_dev->qat_dev_id].asym_rte_dev.name = NULL;
1390 qat_pci_dev->asym_dev = NULL;
1395 static struct cryptodev_driver qat_crypto_drv;
1396 RTE_PMD_REGISTER_CRYPTO_DRIVER(qat_crypto_drv,
1397 cryptodev_qat_asym_driver,
1398 qat_asym_driver_id);