1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2019 - 2022 Intel Corporation
7 #include <cryptodev_pmd.h>
9 #include "qat_device.h"
13 #include "icp_qat_fw_pke.h"
14 #include "icp_qat_fw.h"
18 uint8_t qat_asym_driver_id;
20 struct qat_crypto_gen_dev_ops qat_asym_gen_dev_ops[QAT_N_GENS];
22 /* An rte_driver is needed in the registration of both the device and the driver
24 * The actual qat pci's rte_driver can't be used as its name represents
25 * the whole pci device with all services. Think of this as a holder for a name
26 * for the crypto part of the pci device.
28 static const char qat_asym_drv_name[] = RTE_STR(CRYPTODEV_NAME_QAT_ASYM_PMD);
29 static const struct rte_driver cryptodev_qat_asym_driver = {
30 .name = qat_asym_drv_name,
31 .alias = qat_asym_drv_name
35 * Macros with suffix _F are used with some of predefinded identifiers:
36 * - cookie->input_buffer
39 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
40 #define HEXDUMP(name, where, size) QAT_DP_HEXDUMP_LOG(DEBUG, name, \
42 #define HEXDUMP_OFF(name, where, size, idx) QAT_DP_HEXDUMP_LOG(DEBUG, name, \
43 &where[idx * size], size)
45 #define HEXDUMP_OFF_F(name, idx) QAT_DP_HEXDUMP_LOG(DEBUG, name, \
46 &cookie->input_buffer[idx * qat_alg_bytesize], \
49 #define HEXDUMP(name, where, size)
50 #define HEXDUMP_OFF(name, where, size, idx)
51 #define HEXDUMP_OFF_F(name, idx)
54 #define CHECK_IF_NOT_EMPTY(param, name, pname, status) \
56 if (param.length == 0) { \
59 " input parameter, zero length " pname \
62 } else if (check_zero(param)) { \
64 "Invalid " name " input parameter, empty " \
65 pname ", length = %d", \
72 #define SET_PKE_LN(where, what, how, idx) \
73 rte_memcpy(where[idx] + how - \
78 #define SET_PKE_LN_9A(where, what, how, idx) \
79 rte_memcpy(&where[idx * RTE_ALIGN_CEIL(how, 8)] + \
80 RTE_ALIGN_CEIL(how, 8) - \
85 #define SET_PKE_LN_EC(where, what, how, idx) \
86 rte_memcpy(where[idx] + \
87 RTE_ALIGN_CEIL(how, 8) - \
92 #define SET_PKE_LN_9A_F(what, idx) \
93 rte_memcpy(&cookie->input_buffer[idx * qat_alg_bytesize] + \
94 qat_alg_bytesize - what.length, \
95 what.data, what.length)
97 #define SET_PKE_LN_EC_F(what, how, idx) \
98 rte_memcpy(&cookie->input_buffer[idx * \
99 RTE_ALIGN_CEIL(how, 8)] + \
100 RTE_ALIGN_CEIL(how, 8) - how, \
104 request_init(struct icp_qat_fw_pke_request *qat_req)
106 memset(qat_req, 0, sizeof(*qat_req));
107 qat_req->pke_hdr.service_type = ICP_QAT_FW_COMN_REQ_CPM_FW_PKE;
108 qat_req->pke_hdr.hdr_flags =
109 ICP_QAT_FW_COMN_HDR_FLAGS_BUILD
110 (ICP_QAT_FW_COMN_REQ_FLAG_SET);
114 cleanup_arrays(struct qat_asym_op_cookie *cookie,
115 int in_count, int out_count, int alg_size)
119 for (i = 0; i < in_count; i++)
120 memset(cookie->input_array[i], 0x0, alg_size);
121 for (i = 0; i < out_count; i++)
122 memset(cookie->output_array[i], 0x0, alg_size);
126 cleanup_crt(struct qat_asym_op_cookie *cookie,
131 memset(cookie->input_array[0], 0x0, alg_size);
132 for (i = 1; i < QAT_ASYM_RSA_QT_NUM_IN_PARAMS; i++)
133 memset(cookie->input_array[i], 0x0, alg_size / 2);
134 for (i = 0; i < QAT_ASYM_RSA_NUM_OUT_PARAMS; i++)
135 memset(cookie->output_array[i], 0x0, alg_size);
139 cleanup(struct qat_asym_op_cookie *cookie,
140 struct rte_crypto_asym_xform *xform, int alg_size)
142 if (xform->xform_type == RTE_CRYPTO_ASYM_XFORM_MODEX)
143 cleanup_arrays(cookie, QAT_ASYM_MODEXP_NUM_IN_PARAMS,
144 QAT_ASYM_MODEXP_NUM_OUT_PARAMS, alg_size);
145 else if (xform->xform_type == RTE_CRYPTO_ASYM_XFORM_MODINV)
146 cleanup_arrays(cookie, QAT_ASYM_MODINV_NUM_IN_PARAMS,
147 QAT_ASYM_MODINV_NUM_OUT_PARAMS, alg_size);
148 else if (xform->xform_type == RTE_CRYPTO_ASYM_XFORM_RSA) {
149 if (xform->rsa.key_type == RTE_RSA_KEY_TYPE_QT)
150 cleanup_crt(cookie, alg_size);
152 cleanup_arrays(cookie, QAT_ASYM_RSA_NUM_IN_PARAMS,
153 QAT_ASYM_RSA_NUM_OUT_PARAMS, alg_size);
159 check_zero(rte_crypto_param n)
161 int i, len = n.length;
164 for (i = len - 1; i >= 0; i--) {
165 if (n.data[i] != 0x0)
168 } else if (len == 8 && *(uint64_t *)&n.data[len - 8] == 0) {
170 } else if (*(uint64_t *)&n.data[len - 8] == 0) {
171 for (i = len - 9; i >= 0; i--) {
172 if (n.data[i] != 0x0)
181 static struct qat_asym_function
182 get_asym_function(struct rte_crypto_asym_xform *xform)
184 struct qat_asym_function qat_function;
186 switch (xform->xform_type) {
187 case RTE_CRYPTO_ASYM_XFORM_MODEX:
188 qat_function = get_modexp_function(xform);
190 case RTE_CRYPTO_ASYM_XFORM_MODINV:
191 qat_function = get_modinv_function(xform);
194 qat_function.func_id = 0;
202 modexp_set_input(struct rte_crypto_asym_op *asym_op,
203 struct icp_qat_fw_pke_request *qat_req,
204 struct qat_asym_op_cookie *cookie,
205 struct rte_crypto_asym_xform *xform)
207 struct qat_asym_function qat_function;
208 uint32_t alg_bytesize, func_id;
211 CHECK_IF_NOT_EMPTY(xform->modex.modulus, "mod exp",
213 CHECK_IF_NOT_EMPTY(xform->modex.exponent, "mod exp",
218 qat_function = get_asym_function(xform);
219 func_id = qat_function.func_id;
220 if (qat_function.func_id == 0) {
221 QAT_LOG(ERR, "Cannot obtain functionality id");
224 alg_bytesize = qat_function.bytesize;
226 SET_PKE_LN(cookie->input_array, asym_op->modex.base,
228 SET_PKE_LN(cookie->input_array, xform->modex.exponent,
230 SET_PKE_LN(cookie->input_array, xform->modex.modulus,
233 cookie->alg_bytesize = alg_bytesize;
234 qat_req->pke_hdr.cd_pars.func_id = func_id;
235 qat_req->input_param_count = QAT_ASYM_MODEXP_NUM_IN_PARAMS;
236 qat_req->output_param_count = QAT_ASYM_MODEXP_NUM_OUT_PARAMS;
238 HEXDUMP("ModExp base", cookie->input_array[0], alg_bytesize);
239 HEXDUMP("ModExp exponent", cookie->input_array[1], alg_bytesize);
240 HEXDUMP("ModExp modulus", cookie->input_array[2], alg_bytesize);
246 modexp_collect(struct rte_crypto_asym_op *asym_op,
247 struct qat_asym_op_cookie *cookie,
248 struct rte_crypto_asym_xform *xform)
250 rte_crypto_param n = xform->modex.modulus;
251 uint32_t alg_bytesize = cookie->alg_bytesize;
252 uint8_t *modexp_result = asym_op->modex.result.data;
254 rte_memcpy(modexp_result,
255 cookie->output_array[0] + alg_bytesize
256 - n.length, n.length);
257 HEXDUMP("ModExp result", cookie->output_array[0],
259 return RTE_CRYPTO_OP_STATUS_SUCCESS;
263 modinv_set_input(struct rte_crypto_asym_op *asym_op,
264 struct icp_qat_fw_pke_request *qat_req,
265 struct qat_asym_op_cookie *cookie,
266 struct rte_crypto_asym_xform *xform)
268 struct qat_asym_function qat_function;
269 uint32_t alg_bytesize, func_id;
272 CHECK_IF_NOT_EMPTY(xform->modex.modulus, "mod inv",
277 qat_function = get_asym_function(xform);
278 func_id = qat_function.func_id;
280 QAT_LOG(ERR, "Cannot obtain functionality id");
283 alg_bytesize = qat_function.bytesize;
285 SET_PKE_LN(cookie->input_array, asym_op->modinv.base,
287 SET_PKE_LN(cookie->input_array, xform->modinv.modulus,
290 cookie->alg_bytesize = alg_bytesize;
291 qat_req->pke_hdr.cd_pars.func_id = func_id;
292 qat_req->input_param_count =
293 QAT_ASYM_MODINV_NUM_IN_PARAMS;
294 qat_req->output_param_count =
295 QAT_ASYM_MODINV_NUM_OUT_PARAMS;
297 HEXDUMP("ModInv base", cookie->input_array[0], alg_bytesize);
298 HEXDUMP("ModInv modulus", cookie->input_array[1], alg_bytesize);
304 modinv_collect(struct rte_crypto_asym_op *asym_op,
305 struct qat_asym_op_cookie *cookie,
306 struct rte_crypto_asym_xform *xform)
308 rte_crypto_param n = xform->modinv.modulus;
309 uint8_t *modinv_result = asym_op->modinv.result.data;
310 uint32_t alg_bytesize = cookie->alg_bytesize;
312 rte_memcpy(modinv_result + (asym_op->modinv.result.length
314 cookie->output_array[0] + alg_bytesize
315 - n.length, n.length);
316 HEXDUMP("ModInv result", cookie->output_array[0],
318 return RTE_CRYPTO_OP_STATUS_SUCCESS;
322 rsa_set_pub_input(struct rte_crypto_asym_op *asym_op,
323 struct icp_qat_fw_pke_request *qat_req,
324 struct qat_asym_op_cookie *cookie,
325 struct rte_crypto_asym_xform *xform)
327 struct qat_asym_function qat_function;
328 uint32_t alg_bytesize, func_id;
331 qat_function = get_rsa_enc_function(xform);
332 func_id = qat_function.func_id;
334 QAT_LOG(ERR, "Cannot obtain functionality id");
337 alg_bytesize = qat_function.bytesize;
339 if (asym_op->rsa.op_type == RTE_CRYPTO_ASYM_OP_ENCRYPT) {
340 switch (asym_op->rsa.pad) {
341 case RTE_CRYPTO_RSA_PADDING_NONE:
342 SET_PKE_LN(cookie->input_array, asym_op->rsa.message,
347 "Invalid RSA padding (Encryption)"
351 HEXDUMP("RSA Message", cookie->input_array[0], alg_bytesize);
353 switch (asym_op->rsa.pad) {
354 case RTE_CRYPTO_RSA_PADDING_NONE:
355 SET_PKE_LN(cookie->input_array, asym_op->rsa.sign,
360 "Invalid RSA padding (Verify)");
363 HEXDUMP("RSA Signature", cookie->input_array[0],
367 SET_PKE_LN(cookie->input_array, xform->rsa.e,
369 SET_PKE_LN(cookie->input_array, xform->rsa.n,
372 cookie->alg_bytesize = alg_bytesize;
373 qat_req->pke_hdr.cd_pars.func_id = func_id;
375 HEXDUMP("RSA Public Key", cookie->input_array[1], alg_bytesize);
376 HEXDUMP("RSA Modulus", cookie->input_array[2], alg_bytesize);
382 rsa_set_priv_input(struct rte_crypto_asym_op *asym_op,
383 struct icp_qat_fw_pke_request *qat_req,
384 struct qat_asym_op_cookie *cookie,
385 struct rte_crypto_asym_xform *xform)
387 struct qat_asym_function qat_function;
388 uint32_t alg_bytesize, func_id;
391 if (xform->rsa.key_type == RTE_RSA_KEY_TYPE_QT) {
392 qat_function = get_rsa_crt_function(xform);
393 func_id = qat_function.func_id;
395 QAT_LOG(ERR, "Cannot obtain functionality id");
398 alg_bytesize = qat_function.bytesize;
399 qat_req->input_param_count =
400 QAT_ASYM_RSA_QT_NUM_IN_PARAMS;
402 SET_PKE_LN(cookie->input_array, xform->rsa.qt.p,
403 (alg_bytesize >> 1), 1);
404 SET_PKE_LN(cookie->input_array, xform->rsa.qt.q,
405 (alg_bytesize >> 1), 2);
406 SET_PKE_LN(cookie->input_array, xform->rsa.qt.dP,
407 (alg_bytesize >> 1), 3);
408 SET_PKE_LN(cookie->input_array, xform->rsa.qt.dQ,
409 (alg_bytesize >> 1), 4);
410 SET_PKE_LN(cookie->input_array, xform->rsa.qt.qInv,
411 (alg_bytesize >> 1), 5);
413 HEXDUMP("RSA p", cookie->input_array[1],
415 HEXDUMP("RSA q", cookie->input_array[2],
417 HEXDUMP("RSA dP", cookie->input_array[3],
419 HEXDUMP("RSA dQ", cookie->input_array[4],
421 HEXDUMP("RSA qInv", cookie->input_array[5],
423 } else if (xform->rsa.key_type ==
424 RTE_RSA_KEY_TYPE_EXP) {
425 qat_function = get_rsa_dec_function(xform);
426 func_id = qat_function.func_id;
428 QAT_LOG(ERR, "Cannot obtain functionality id");
431 alg_bytesize = qat_function.bytesize;
433 SET_PKE_LN(cookie->input_array, xform->rsa.d,
435 SET_PKE_LN(cookie->input_array, xform->rsa.n,
438 HEXDUMP("RSA d", cookie->input_array[1],
440 HEXDUMP("RSA n", cookie->input_array[2],
443 QAT_LOG(ERR, "Invalid RSA key type");
447 if (asym_op->rsa.op_type ==
448 RTE_CRYPTO_ASYM_OP_DECRYPT) {
449 switch (asym_op->rsa.pad) {
450 case RTE_CRYPTO_RSA_PADDING_NONE:
451 SET_PKE_LN(cookie->input_array, asym_op->rsa.cipher,
453 HEXDUMP("RSA ciphertext", cookie->input_array[0],
458 "Invalid padding of RSA (Decrypt)");
462 } else if (asym_op->rsa.op_type ==
463 RTE_CRYPTO_ASYM_OP_SIGN) {
464 switch (asym_op->rsa.pad) {
465 case RTE_CRYPTO_RSA_PADDING_NONE:
466 SET_PKE_LN(cookie->input_array, asym_op->rsa.message,
468 HEXDUMP("RSA text to be signed", cookie->input_array[0],
473 "Invalid padding of RSA (Signature)");
478 cookie->alg_bytesize = alg_bytesize;
479 qat_req->pke_hdr.cd_pars.func_id = func_id;
484 rsa_set_input(struct rte_crypto_asym_op *asym_op,
485 struct icp_qat_fw_pke_request *qat_req,
486 struct qat_asym_op_cookie *cookie,
487 struct rte_crypto_asym_xform *xform)
489 qat_req->input_param_count =
490 QAT_ASYM_RSA_NUM_IN_PARAMS;
491 qat_req->output_param_count =
492 QAT_ASYM_RSA_NUM_OUT_PARAMS;
494 if (asym_op->rsa.op_type == RTE_CRYPTO_ASYM_OP_ENCRYPT ||
495 asym_op->rsa.op_type ==
496 RTE_CRYPTO_ASYM_OP_VERIFY) {
497 return rsa_set_pub_input(asym_op, qat_req, cookie, xform);
499 return rsa_set_priv_input(asym_op, qat_req, cookie, xform);
504 rsa_collect(struct rte_crypto_asym_op *asym_op,
505 struct qat_asym_op_cookie *cookie)
507 uint32_t alg_bytesize = cookie->alg_bytesize;
509 if (asym_op->rsa.op_type == RTE_CRYPTO_ASYM_OP_ENCRYPT ||
510 asym_op->rsa.op_type == RTE_CRYPTO_ASYM_OP_VERIFY) {
512 if (asym_op->rsa.op_type ==
513 RTE_CRYPTO_ASYM_OP_ENCRYPT) {
514 uint8_t *rsa_result = asym_op->rsa.cipher.data;
516 rte_memcpy(rsa_result,
517 cookie->output_array[0],
519 HEXDUMP("RSA Encrypted data", cookie->output_array[0],
522 uint8_t *rsa_result = asym_op->rsa.cipher.data;
524 switch (asym_op->rsa.pad) {
525 case RTE_CRYPTO_RSA_PADDING_NONE:
526 rte_memcpy(rsa_result,
527 cookie->output_array[0],
529 HEXDUMP("RSA signature",
530 cookie->output_array[0],
534 QAT_LOG(ERR, "Padding not supported");
535 return RTE_CRYPTO_OP_STATUS_ERROR;
539 if (asym_op->rsa.op_type == RTE_CRYPTO_ASYM_OP_DECRYPT) {
540 uint8_t *rsa_result = asym_op->rsa.message.data;
542 switch (asym_op->rsa.pad) {
543 case RTE_CRYPTO_RSA_PADDING_NONE:
544 rte_memcpy(rsa_result,
545 cookie->output_array[0],
547 HEXDUMP("RSA Decrypted Message",
548 cookie->output_array[0],
552 QAT_LOG(ERR, "Padding not supported");
553 return RTE_CRYPTO_OP_STATUS_ERROR;
556 uint8_t *rsa_result = asym_op->rsa.sign.data;
558 rte_memcpy(rsa_result,
559 cookie->output_array[0],
561 HEXDUMP("RSA Signature", cookie->output_array[0],
565 return RTE_CRYPTO_OP_STATUS_SUCCESS;
569 ecdsa_set_input(struct rte_crypto_asym_op *asym_op,
570 struct icp_qat_fw_pke_request *qat_req,
571 struct qat_asym_op_cookie *cookie,
572 struct rte_crypto_asym_xform *xform)
574 struct qat_asym_function qat_function;
575 uint32_t alg_bytesize, qat_alg_bytesize, func_id;
578 curve_id = pick_curve(xform);
580 QAT_LOG(ERR, "Incorrect elliptic curve");
584 switch (asym_op->ecdsa.op_type) {
585 case RTE_CRYPTO_ASYM_OP_SIGN:
586 qat_function = get_ecdsa_function(xform);
587 func_id = qat_function.func_id;
589 QAT_LOG(ERR, "Cannot obtain functionality id");
592 alg_bytesize = qat_function.bytesize;
593 qat_alg_bytesize = RTE_ALIGN_CEIL(alg_bytesize, 8);
595 SET_PKE_LN_9A_F(asym_op->ecdsa.pkey, 0);
596 SET_PKE_LN_9A_F(asym_op->ecdsa.message, 1);
597 SET_PKE_LN_9A_F(asym_op->ecdsa.k, 2);
598 SET_PKE_LN_EC_F(curve[curve_id].b, alg_bytesize, 3);
599 SET_PKE_LN_EC_F(curve[curve_id].a, alg_bytesize, 4);
600 SET_PKE_LN_EC_F(curve[curve_id].p, alg_bytesize, 5);
601 SET_PKE_LN_EC_F(curve[curve_id].n, alg_bytesize, 6);
602 SET_PKE_LN_EC_F(curve[curve_id].y, alg_bytesize, 7);
603 SET_PKE_LN_EC_F(curve[curve_id].x, alg_bytesize, 8);
605 cookie->alg_bytesize = alg_bytesize;
606 qat_req->pke_hdr.cd_pars.func_id = func_id;
607 qat_req->input_param_count =
608 QAT_ASYM_ECDSA_RS_SIGN_IN_PARAMS;
609 qat_req->output_param_count =
610 QAT_ASYM_ECDSA_RS_SIGN_OUT_PARAMS;
612 HEXDUMP_OFF_F("ECDSA d", 0);
613 HEXDUMP_OFF_F("ECDSA e", 1);
614 HEXDUMP_OFF_F("ECDSA k", 2);
615 HEXDUMP_OFF_F("ECDSA b", 3);
616 HEXDUMP_OFF_F("ECDSA a", 4);
617 HEXDUMP_OFF_F("ECDSA n", 5);
618 HEXDUMP_OFF_F("ECDSA y", 6);
619 HEXDUMP_OFF_F("ECDSA x", 7);
621 case RTE_CRYPTO_ASYM_OP_VERIFY:
622 qat_function = get_ecdsa_verify_function(xform);
623 func_id = qat_function.func_id;
625 QAT_LOG(ERR, "Cannot obtain functionality id");
628 alg_bytesize = qat_function.bytesize;
629 qat_alg_bytesize = RTE_ALIGN_CEIL(alg_bytesize, 8);
631 SET_PKE_LN_9A_F(asym_op->ecdsa.message, 10);
632 SET_PKE_LN_9A_F(asym_op->ecdsa.s, 9);
633 SET_PKE_LN_9A_F(asym_op->ecdsa.r, 8);
634 SET_PKE_LN_EC_F(curve[curve_id].n, alg_bytesize, 7);
635 SET_PKE_LN_EC_F(curve[curve_id].x, alg_bytesize, 6);
636 SET_PKE_LN_EC_F(curve[curve_id].y, alg_bytesize, 5);
637 SET_PKE_LN_9A_F(asym_op->ecdsa.q.x, 4);
638 SET_PKE_LN_9A_F(asym_op->ecdsa.q.y, 3);
639 SET_PKE_LN_EC_F(curve[curve_id].a, alg_bytesize, 2);
640 SET_PKE_LN_EC_F(curve[curve_id].b, alg_bytesize, 1);
641 SET_PKE_LN_EC_F(curve[curve_id].p, alg_bytesize, 0);
643 cookie->alg_bytesize = alg_bytesize;
644 qat_req->pke_hdr.cd_pars.func_id = func_id;
645 qat_req->input_param_count =
646 QAT_ASYM_ECDSA_RS_VERIFY_IN_PARAMS;
647 qat_req->output_param_count =
648 QAT_ASYM_ECDSA_RS_VERIFY_OUT_PARAMS;
650 HEXDUMP_OFF_F("p", 0);
651 HEXDUMP_OFF_F("b", 1);
652 HEXDUMP_OFF_F("a", 2);
653 HEXDUMP_OFF_F("y", 3);
654 HEXDUMP_OFF_F("x", 4);
655 HEXDUMP_OFF_F("yG", 5);
656 HEXDUMP_OFF_F("xG", 6);
657 HEXDUMP_OFF_F("n", 7);
658 HEXDUMP_OFF_F("r", 8);
659 HEXDUMP_OFF_F("s", 9);
660 HEXDUMP_OFF_F("e", 10);
670 ecdsa_collect(struct rte_crypto_asym_op *asym_op,
671 struct qat_asym_op_cookie *cookie)
673 uint32_t alg_bytesize = cookie->alg_bytesize;
674 uint32_t qat_alg_bytesize = RTE_ALIGN_CEIL(cookie->alg_bytesize, 8);
675 uint32_t ltrim = qat_alg_bytesize - alg_bytesize;
677 if (asym_op->rsa.op_type == RTE_CRYPTO_ASYM_OP_SIGN) {
678 uint8_t *r = asym_op->ecdsa.r.data;
679 uint8_t *s = asym_op->ecdsa.s.data;
681 asym_op->ecdsa.r.length = alg_bytesize;
682 asym_op->ecdsa.s.length = alg_bytesize;
683 rte_memcpy(r, &cookie->output_array[0][ltrim], alg_bytesize);
684 rte_memcpy(s, &cookie->output_array[1][ltrim], alg_bytesize);
686 HEXDUMP("R", cookie->output_array[0],
688 HEXDUMP("S", cookie->output_array[1],
691 return RTE_CRYPTO_OP_STATUS_SUCCESS;
695 ecpm_set_input(struct rte_crypto_asym_op *asym_op,
696 struct icp_qat_fw_pke_request *qat_req,
697 struct qat_asym_op_cookie *cookie,
698 struct rte_crypto_asym_xform *xform)
700 struct qat_asym_function qat_function;
701 uint32_t alg_bytesize, __rte_unused qat_alg_bytesize, func_id;
704 curve_id = pick_curve(xform);
706 QAT_LOG(ERR, "Incorrect elliptic curve");
710 qat_function = get_ecpm_function(xform);
711 func_id = qat_function.func_id;
713 QAT_LOG(ERR, "Cannot obtain functionality id");
716 alg_bytesize = qat_function.bytesize;
717 qat_alg_bytesize = RTE_ALIGN_CEIL(alg_bytesize, 8);
719 SET_PKE_LN_EC(cookie->input_array, asym_op->ecpm.scalar,
720 asym_op->ecpm.scalar.length, 0);
721 SET_PKE_LN_EC(cookie->input_array, asym_op->ecpm.p.x,
722 asym_op->ecpm.p.x.length, 1);
723 SET_PKE_LN_EC(cookie->input_array, asym_op->ecpm.p.y,
724 asym_op->ecpm.p.y.length, 2);
725 SET_PKE_LN_EC(cookie->input_array, curve[curve_id].a,
727 SET_PKE_LN_EC(cookie->input_array, curve[curve_id].b,
729 SET_PKE_LN_EC(cookie->input_array, curve[curve_id].p,
731 SET_PKE_LN_EC(cookie->input_array, curve[curve_id].h,
734 cookie->alg_bytesize = alg_bytesize;
735 qat_req->pke_hdr.cd_pars.func_id = func_id;
736 qat_req->input_param_count =
737 QAT_ASYM_ECPM_IN_PARAMS;
738 qat_req->output_param_count =
739 QAT_ASYM_ECPM_OUT_PARAMS;
741 HEXDUMP("k", cookie->input_array[0], qat_alg_bytesize);
742 HEXDUMP("xG", cookie->input_array[1], qat_alg_bytesize);
743 HEXDUMP("yG", cookie->input_array[2], qat_alg_bytesize);
744 HEXDUMP("a", cookie->input_array[3], qat_alg_bytesize);
745 HEXDUMP("b", cookie->input_array[4], qat_alg_bytesize);
746 HEXDUMP("q", cookie->input_array[5], qat_alg_bytesize);
747 HEXDUMP("h", cookie->input_array[6], qat_alg_bytesize);
753 ecpm_collect(struct rte_crypto_asym_op *asym_op,
754 struct qat_asym_op_cookie *cookie)
756 uint8_t *x = asym_op->ecpm.r.x.data;
757 uint8_t *y = asym_op->ecpm.r.y.data;
758 uint32_t alg_bytesize = cookie->alg_bytesize;
759 uint32_t qat_alg_bytesize = RTE_ALIGN_CEIL(cookie->alg_bytesize, 8);
760 uint32_t ltrim = qat_alg_bytesize - alg_bytesize;
762 asym_op->ecpm.r.x.length = alg_bytesize;
763 asym_op->ecpm.r.y.length = alg_bytesize;
764 rte_memcpy(x, &cookie->output_array[0][ltrim], alg_bytesize);
765 rte_memcpy(y, &cookie->output_array[1][ltrim], alg_bytesize);
767 HEXDUMP("rX", cookie->output_array[0],
769 HEXDUMP("rY", cookie->output_array[1],
771 return RTE_CRYPTO_OP_STATUS_SUCCESS;
775 asym_set_input(struct rte_crypto_asym_op *asym_op,
776 struct icp_qat_fw_pke_request *qat_req,
777 struct qat_asym_op_cookie *cookie,
778 struct rte_crypto_asym_xform *xform)
780 switch (xform->xform_type) {
781 case RTE_CRYPTO_ASYM_XFORM_MODEX:
782 return modexp_set_input(asym_op, qat_req,
784 case RTE_CRYPTO_ASYM_XFORM_MODINV:
785 return modinv_set_input(asym_op, qat_req,
787 case RTE_CRYPTO_ASYM_XFORM_RSA:
788 return rsa_set_input(asym_op, qat_req,
790 case RTE_CRYPTO_ASYM_XFORM_ECDSA:
791 return ecdsa_set_input(asym_op, qat_req,
793 case RTE_CRYPTO_ASYM_XFORM_ECPM:
794 return ecpm_set_input(asym_op, qat_req,
797 QAT_LOG(ERR, "Invalid/unsupported asymmetric crypto xform");
804 qat_asym_build_request(void *in_op, uint8_t *out_msg, void *op_cookie,
805 __rte_unused uint64_t *opaque,
806 __rte_unused enum qat_device_gen qat_dev_gen)
808 struct rte_crypto_op *op = (struct rte_crypto_op *)in_op;
809 struct rte_crypto_asym_op *asym_op = op->asym;
810 struct icp_qat_fw_pke_request *qat_req =
811 (struct icp_qat_fw_pke_request *)out_msg;
812 struct qat_asym_op_cookie *cookie =
813 (struct qat_asym_op_cookie *)op_cookie;
814 struct rte_crypto_asym_xform *xform;
815 struct qat_asym_session *qat_session = (struct qat_asym_session *)
816 op->asym->session->sess_private_data;
819 if (unlikely(qat_session == NULL)) {
820 QAT_DP_LOG(ERR, "Session was not created for this device");
824 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
825 switch (op->sess_type) {
826 case RTE_CRYPTO_OP_WITH_SESSION:
827 request_init(qat_req);
828 xform = &qat_session->xform;
830 case RTE_CRYPTO_OP_SESSIONLESS:
831 request_init(qat_req);
832 xform = op->asym->xform;
835 QAT_DP_LOG(ERR, "Invalid session/xform settings");
836 op->status = RTE_CRYPTO_OP_STATUS_INVALID_SESSION;
839 err = asym_set_input(asym_op, qat_req, cookie,
842 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
846 qat_req->pke_mid.opaque = (uint64_t)(uintptr_t)op;
847 qat_req->pke_mid.src_data_addr = cookie->input_addr;
848 qat_req->pke_mid.dest_data_addr = cookie->output_addr;
850 HEXDUMP("qat_req:", qat_req, sizeof(struct icp_qat_fw_pke_request));
854 qat_req->pke_mid.opaque = (uint64_t)(uintptr_t)op;
855 HEXDUMP("qat_req:", qat_req, sizeof(struct icp_qat_fw_pke_request));
856 qat_req->output_param_count = 0;
857 qat_req->input_param_count = 0;
858 qat_req->pke_hdr.service_type = ICP_QAT_FW_COMN_REQ_NULL;
859 cookie->error |= err;
865 qat_asym_collect_response(struct rte_crypto_op *op,
866 struct qat_asym_op_cookie *cookie,
867 struct rte_crypto_asym_xform *xform)
869 struct rte_crypto_asym_op *asym_op = op->asym;
871 switch (xform->xform_type) {
872 case RTE_CRYPTO_ASYM_XFORM_MODEX:
873 return modexp_collect(asym_op, cookie, xform);
874 case RTE_CRYPTO_ASYM_XFORM_MODINV:
875 return modinv_collect(asym_op, cookie, xform);
876 case RTE_CRYPTO_ASYM_XFORM_RSA:
877 return rsa_collect(asym_op, cookie);
878 case RTE_CRYPTO_ASYM_XFORM_ECDSA:
879 return ecdsa_collect(asym_op, cookie);
880 case RTE_CRYPTO_ASYM_XFORM_ECPM:
881 return ecpm_collect(asym_op, cookie);
883 QAT_LOG(ERR, "Not supported xform type");
884 return RTE_CRYPTO_OP_STATUS_ERROR;
889 qat_asym_process_response(void **out_op, uint8_t *resp,
890 void *op_cookie, __rte_unused uint64_t *dequeue_err_count)
892 struct icp_qat_fw_pke_resp *resp_msg =
893 (struct icp_qat_fw_pke_resp *)resp;
894 struct rte_crypto_op *op = (struct rte_crypto_op *)(uintptr_t)
896 struct qat_asym_op_cookie *cookie = op_cookie;
897 struct rte_crypto_asym_xform *xform;
898 struct qat_asym_session *qat_session = (struct qat_asym_session *)
899 op->asym->session->sess_private_data;
903 if (op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED)
904 op->status = RTE_CRYPTO_OP_STATUS_ERROR;
905 QAT_DP_LOG(ERR, "Cookie status returned error");
907 if (ICP_QAT_FW_PKE_RESP_PKE_STAT_GET(
908 resp_msg->pke_resp_hdr.resp_status.pke_resp_flags)) {
909 if (op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED)
910 op->status = RTE_CRYPTO_OP_STATUS_ERROR;
911 QAT_DP_LOG(ERR, "Asymmetric response status"
914 if (resp_msg->pke_resp_hdr.resp_status.comn_err_code) {
915 if (op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED)
916 op->status = RTE_CRYPTO_OP_STATUS_ERROR;
917 QAT_DP_LOG(ERR, "Asymmetric common status"
922 switch (op->sess_type) {
923 case RTE_CRYPTO_OP_WITH_SESSION:
924 xform = &qat_session->xform;
926 case RTE_CRYPTO_OP_SESSIONLESS:
927 xform = op->asym->xform;
931 "Invalid session/xform settings in response ring!");
932 op->status = RTE_CRYPTO_OP_STATUS_INVALID_SESSION;
935 if (op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED) {
936 op->status = qat_asym_collect_response(op,
938 cleanup(cookie, xform, cookie->alg_bytesize);
942 HEXDUMP("resp_msg:", resp_msg, sizeof(struct icp_qat_fw_pke_resp));
948 session_set_modexp(struct qat_asym_session *qat_session,
949 struct rte_crypto_asym_xform *xform)
951 uint8_t *modulus = xform->modex.modulus.data;
952 uint8_t *exponent = xform->modex.exponent.data;
954 qat_session->xform.modex.modulus.data =
955 rte_malloc(NULL, xform->modex.modulus.length, 0);
956 if (qat_session->xform.modex.modulus.data == NULL)
958 qat_session->xform.modex.modulus.length = xform->modex.modulus.length;
959 qat_session->xform.modex.exponent.data = rte_malloc(NULL,
960 xform->modex.exponent.length, 0);
961 if (qat_session->xform.modex.exponent.data == NULL) {
962 rte_free(qat_session->xform.modex.exponent.data);
965 qat_session->xform.modex.exponent.length = xform->modex.exponent.length;
967 rte_memcpy(qat_session->xform.modex.modulus.data, modulus,
968 xform->modex.modulus.length);
969 rte_memcpy(qat_session->xform.modex.exponent.data, exponent,
970 xform->modex.exponent.length);
976 session_set_modinv(struct qat_asym_session *qat_session,
977 struct rte_crypto_asym_xform *xform)
979 uint8_t *modulus = xform->modinv.modulus.data;
981 qat_session->xform.modinv.modulus.data =
982 rte_malloc(NULL, xform->modinv.modulus.length, 0);
983 if (qat_session->xform.modinv.modulus.data == NULL)
985 qat_session->xform.modinv.modulus.length = xform->modinv.modulus.length;
987 rte_memcpy(qat_session->xform.modinv.modulus.data, modulus,
988 xform->modinv.modulus.length);
994 session_set_rsa(struct qat_asym_session *qat_session,
995 struct rte_crypto_asym_xform *xform)
997 uint8_t *n = xform->rsa.n.data;
998 uint8_t *e = xform->rsa.e.data;
1001 qat_session->xform.rsa.key_type = xform->rsa.key_type;
1003 qat_session->xform.rsa.n.data =
1004 rte_malloc(NULL, xform->rsa.n.length, 0);
1005 if (qat_session->xform.rsa.n.data == NULL)
1007 qat_session->xform.rsa.n.length =
1008 xform->rsa.n.length;
1010 qat_session->xform.rsa.e.data =
1011 rte_malloc(NULL, xform->rsa.e.length, 0);
1012 if (qat_session->xform.rsa.e.data == NULL) {
1016 qat_session->xform.rsa.e.length =
1017 xform->rsa.e.length;
1019 if (xform->rsa.key_type == RTE_RSA_KEY_TYPE_QT) {
1020 uint8_t *p = xform->rsa.qt.p.data;
1021 uint8_t *q = xform->rsa.qt.q.data;
1022 uint8_t *dP = xform->rsa.qt.dP.data;
1023 uint8_t *dQ = xform->rsa.qt.dQ.data;
1024 uint8_t *qInv = xform->rsa.qt.qInv.data;
1026 qat_session->xform.rsa.qt.p.data =
1027 rte_malloc(NULL, xform->rsa.qt.p.length, 0);
1028 if (qat_session->xform.rsa.qt.p.data == NULL) {
1032 qat_session->xform.rsa.qt.p.length =
1033 xform->rsa.qt.p.length;
1035 qat_session->xform.rsa.qt.q.data =
1036 rte_malloc(NULL, xform->rsa.qt.q.length, 0);
1037 if (qat_session->xform.rsa.qt.q.data == NULL) {
1041 qat_session->xform.rsa.qt.q.length =
1042 xform->rsa.qt.q.length;
1044 qat_session->xform.rsa.qt.dP.data =
1045 rte_malloc(NULL, xform->rsa.qt.dP.length, 0);
1046 if (qat_session->xform.rsa.qt.dP.data == NULL) {
1050 qat_session->xform.rsa.qt.dP.length =
1051 xform->rsa.qt.dP.length;
1053 qat_session->xform.rsa.qt.dQ.data =
1054 rte_malloc(NULL, xform->rsa.qt.dQ.length, 0);
1055 if (qat_session->xform.rsa.qt.dQ.data == NULL) {
1059 qat_session->xform.rsa.qt.dQ.length =
1060 xform->rsa.qt.dQ.length;
1062 qat_session->xform.rsa.qt.qInv.data =
1063 rte_malloc(NULL, xform->rsa.qt.qInv.length, 0);
1064 if (qat_session->xform.rsa.qt.qInv.data == NULL) {
1068 qat_session->xform.rsa.qt.qInv.length =
1069 xform->rsa.qt.qInv.length;
1071 rte_memcpy(qat_session->xform.rsa.qt.p.data, p,
1072 xform->rsa.qt.p.length);
1073 rte_memcpy(qat_session->xform.rsa.qt.q.data, q,
1074 xform->rsa.qt.q.length);
1075 rte_memcpy(qat_session->xform.rsa.qt.dP.data, dP,
1076 xform->rsa.qt.dP.length);
1077 rte_memcpy(qat_session->xform.rsa.qt.dQ.data, dQ,
1078 xform->rsa.qt.dQ.length);
1079 rte_memcpy(qat_session->xform.rsa.qt.qInv.data, qInv,
1080 xform->rsa.qt.qInv.length);
1083 uint8_t *d = xform->rsa.d.data;
1085 qat_session->xform.rsa.d.data =
1086 rte_malloc(NULL, xform->rsa.d.length, 0);
1087 if (qat_session->xform.rsa.d.data == NULL) {
1091 qat_session->xform.rsa.d.length =
1092 xform->rsa.d.length;
1093 rte_memcpy(qat_session->xform.rsa.d.data, d,
1094 xform->rsa.d.length);
1097 rte_memcpy(qat_session->xform.rsa.n.data, n,
1098 xform->rsa.n.length);
1099 rte_memcpy(qat_session->xform.rsa.e.data, e,
1100 xform->rsa.e.length);
1105 rte_free(qat_session->xform.rsa.n.data);
1106 rte_free(qat_session->xform.rsa.e.data);
1107 rte_free(qat_session->xform.rsa.d.data);
1108 rte_free(qat_session->xform.rsa.qt.p.data);
1109 rte_free(qat_session->xform.rsa.qt.q.data);
1110 rte_free(qat_session->xform.rsa.qt.dP.data);
1111 rte_free(qat_session->xform.rsa.qt.dQ.data);
1112 rte_free(qat_session->xform.rsa.qt.qInv.data);
1117 session_set_ecdsa(struct qat_asym_session *qat_session,
1118 struct rte_crypto_asym_xform *xform)
1120 qat_session->xform.ec.curve_id = xform->ec.curve_id;
1124 qat_asym_session_configure(struct rte_cryptodev *dev __rte_unused,
1125 struct rte_crypto_asym_xform *xform,
1126 struct rte_cryptodev_asym_session *session)
1128 struct qat_asym_session *qat_session;
1131 qat_session = (struct qat_asym_session *) session->sess_private_data;
1132 memset(qat_session, 0, sizeof(*qat_session));
1134 qat_session->xform.xform_type = xform->xform_type;
1135 switch (xform->xform_type) {
1136 case RTE_CRYPTO_ASYM_XFORM_MODEX:
1137 ret = session_set_modexp(qat_session, xform);
1139 case RTE_CRYPTO_ASYM_XFORM_MODINV:
1140 ret = session_set_modinv(qat_session, xform);
1142 case RTE_CRYPTO_ASYM_XFORM_RSA:
1143 ret = session_set_rsa(qat_session, xform);
1145 case RTE_CRYPTO_ASYM_XFORM_ECDSA:
1146 case RTE_CRYPTO_ASYM_XFORM_ECPM:
1147 session_set_ecdsa(qat_session, xform);
1154 QAT_LOG(ERR, "Unsupported xform type");
1162 qat_asym_session_get_private_size(struct rte_cryptodev *dev __rte_unused)
1164 return RTE_ALIGN_CEIL(sizeof(struct qat_asym_session), 8);
1168 session_clear_modexp(struct rte_crypto_modex_xform *modex)
1170 memset(modex->modulus.data, 0, modex->modulus.length);
1171 rte_free(modex->modulus.data);
1172 memset(modex->exponent.data, 0, modex->exponent.length);
1173 rte_free(modex->exponent.data);
1177 session_clear_modinv(struct rte_crypto_modinv_xform *modinv)
1179 memset(modinv->modulus.data, 0, modinv->modulus.length);
1180 rte_free(modinv->modulus.data);
1184 session_clear_rsa(struct rte_crypto_rsa_xform *rsa)
1187 memset(rsa->n.data, 0, rsa->n.length);
1188 rte_free(rsa->n.data);
1189 memset(rsa->e.data, 0, rsa->e.length);
1190 rte_free(rsa->e.data);
1191 if (rsa->key_type == RTE_RSA_KEY_TYPE_EXP) {
1192 memset(rsa->d.data, 0, rsa->d.length);
1193 rte_free(rsa->d.data);
1195 memset(rsa->qt.p.data, 0, rsa->qt.p.length);
1196 rte_free(rsa->qt.p.data);
1197 memset(rsa->qt.q.data, 0, rsa->qt.q.length);
1198 rte_free(rsa->qt.q.data);
1199 memset(rsa->qt.dP.data, 0, rsa->qt.dP.length);
1200 rte_free(rsa->qt.dP.data);
1201 memset(rsa->qt.dQ.data, 0, rsa->qt.dQ.length);
1202 rte_free(rsa->qt.dQ.data);
1203 memset(rsa->qt.qInv.data, 0, rsa->qt.qInv.length);
1204 rte_free(rsa->qt.qInv.data);
1209 session_clear_xform(struct qat_asym_session *qat_session)
1211 switch (qat_session->xform.xform_type) {
1212 case RTE_CRYPTO_ASYM_XFORM_MODEX:
1213 session_clear_modexp(&qat_session->xform.modex);
1215 case RTE_CRYPTO_ASYM_XFORM_MODINV:
1216 session_clear_modinv(&qat_session->xform.modinv);
1218 case RTE_CRYPTO_ASYM_XFORM_RSA:
1219 session_clear_rsa(&qat_session->xform.rsa);
1227 qat_asym_session_clear(struct rte_cryptodev *dev,
1228 struct rte_cryptodev_asym_session *session)
1230 void *sess_priv = session->sess_private_data;
1231 struct qat_asym_session *qat_session =
1232 (struct qat_asym_session *)sess_priv;
1235 session_clear_xform(qat_session);
1236 memset(qat_session, 0, qat_asym_session_get_private_size(dev));
1241 qat_asym_crypto_enqueue_op_burst(void *qp, struct rte_crypto_op **ops,
1244 return qat_enqueue_op_burst(qp, qat_asym_build_request, (void **)ops,
1249 qat_asym_crypto_dequeue_op_burst(void *qp, struct rte_crypto_op **ops,
1252 return qat_dequeue_op_burst(qp, (void **)ops, qat_asym_process_response,
1257 qat_asym_init_op_cookie(void *op_cookie)
1260 struct qat_asym_op_cookie *cookie = op_cookie;
1262 cookie->input_addr = rte_mempool_virt2iova(cookie) +
1263 offsetof(struct qat_asym_op_cookie,
1266 cookie->output_addr = rte_mempool_virt2iova(cookie) +
1267 offsetof(struct qat_asym_op_cookie,
1268 output_params_ptrs);
1270 for (j = 0; j < 8; j++) {
1271 cookie->input_params_ptrs[j] =
1272 rte_mempool_virt2iova(cookie) +
1273 offsetof(struct qat_asym_op_cookie,
1275 cookie->output_params_ptrs[j] =
1276 rte_mempool_virt2iova(cookie) +
1277 offsetof(struct qat_asym_op_cookie,
1283 qat_asym_dev_create(struct qat_pci_device *qat_pci_dev,
1284 struct qat_dev_cmd_param *qat_dev_cmd_param)
1286 struct qat_cryptodev_private *internals;
1287 struct rte_cryptodev *cryptodev;
1288 struct qat_device_info *qat_dev_instance =
1289 &qat_pci_devs[qat_pci_dev->qat_dev_id];
1290 struct rte_cryptodev_pmd_init_params init_params = {
1292 .socket_id = qat_dev_instance->pci_dev->device.numa_node,
1293 .private_data_size = sizeof(struct qat_cryptodev_private)
1295 struct qat_capabilities_info capa_info;
1296 const struct rte_cryptodev_capabilities *capabilities;
1297 const struct qat_crypto_gen_dev_ops *gen_dev_ops =
1298 &qat_asym_gen_dev_ops[qat_pci_dev->qat_dev_gen];
1299 char name[RTE_CRYPTODEV_NAME_MAX_LEN];
1300 char capa_memz_name[RTE_CRYPTODEV_NAME_MAX_LEN];
1304 snprintf(name, RTE_CRYPTODEV_NAME_MAX_LEN, "%s_%s",
1305 qat_pci_dev->name, "asym");
1306 QAT_LOG(DEBUG, "Creating QAT ASYM device %s\n", name);
1308 if (gen_dev_ops->cryptodev_ops == NULL) {
1309 QAT_LOG(ERR, "Device %s does not support asymmetric crypto",
1314 if (rte_eal_process_type() == RTE_PROC_PRIMARY) {
1315 qat_pci_dev->qat_asym_driver_id =
1317 } else if (rte_eal_process_type() == RTE_PROC_SECONDARY) {
1318 if (qat_pci_dev->qat_asym_driver_id !=
1319 qat_asym_driver_id) {
1321 "Device %s have different driver id than corresponding device in primary process",
1327 /* Populate subset device to use in cryptodev device creation */
1328 qat_dev_instance->asym_rte_dev.driver = &cryptodev_qat_asym_driver;
1329 qat_dev_instance->asym_rte_dev.numa_node =
1330 qat_dev_instance->pci_dev->device.numa_node;
1331 qat_dev_instance->asym_rte_dev.devargs = NULL;
1333 cryptodev = rte_cryptodev_pmd_create(name,
1334 &(qat_dev_instance->asym_rte_dev), &init_params);
1336 if (cryptodev == NULL)
1339 qat_dev_instance->asym_rte_dev.name = cryptodev->data->name;
1340 cryptodev->driver_id = qat_asym_driver_id;
1341 cryptodev->dev_ops = gen_dev_ops->cryptodev_ops;
1343 cryptodev->enqueue_burst = qat_asym_crypto_enqueue_op_burst;
1344 cryptodev->dequeue_burst = qat_asym_crypto_dequeue_op_burst;
1346 cryptodev->feature_flags = gen_dev_ops->get_feature_flags(qat_pci_dev);
1348 if (rte_eal_process_type() != RTE_PROC_PRIMARY)
1351 snprintf(capa_memz_name, RTE_CRYPTODEV_NAME_MAX_LEN,
1352 "QAT_ASYM_CAPA_GEN_%d",
1353 qat_pci_dev->qat_dev_gen);
1355 internals = cryptodev->data->dev_private;
1356 internals->qat_dev = qat_pci_dev;
1357 internals->dev_id = cryptodev->data->dev_id;
1359 capa_info = gen_dev_ops->get_capabilities(qat_pci_dev);
1360 capabilities = capa_info.data;
1361 capa_size = capa_info.size;
1363 internals->capa_mz = rte_memzone_lookup(capa_memz_name);
1364 if (internals->capa_mz == NULL) {
1365 internals->capa_mz = rte_memzone_reserve(capa_memz_name,
1366 capa_size, rte_socket_id(), 0);
1367 if (internals->capa_mz == NULL) {
1369 "Error allocating memzone for capabilities, "
1370 "destroying PMD for %s",
1372 rte_cryptodev_pmd_destroy(cryptodev);
1373 memset(&qat_dev_instance->asym_rte_dev, 0,
1374 sizeof(qat_dev_instance->asym_rte_dev));
1379 memcpy(internals->capa_mz->addr, capabilities, capa_size);
1380 internals->qat_dev_capabilities = internals->capa_mz->addr;
1383 if (qat_dev_cmd_param[i].name == NULL)
1385 if (!strcmp(qat_dev_cmd_param[i].name, ASYM_ENQ_THRESHOLD_NAME))
1386 internals->min_enq_burst_threshold =
1387 qat_dev_cmd_param[i].val;
1391 qat_pci_dev->asym_dev = internals;
1392 internals->service_type = QAT_SERVICE_ASYMMETRIC;
1393 QAT_LOG(DEBUG, "Created QAT ASYM device %s as cryptodev instance %d",
1394 cryptodev->data->name, internals->dev_id);
1399 qat_asym_dev_destroy(struct qat_pci_device *qat_pci_dev)
1401 struct rte_cryptodev *cryptodev;
1403 if (qat_pci_dev == NULL)
1405 if (qat_pci_dev->asym_dev == NULL)
1407 if (rte_eal_process_type() == RTE_PROC_PRIMARY)
1408 rte_memzone_free(qat_pci_dev->asym_dev->capa_mz);
1410 /* free crypto device */
1411 cryptodev = rte_cryptodev_pmd_get_dev(
1412 qat_pci_dev->asym_dev->dev_id);
1413 rte_cryptodev_pmd_destroy(cryptodev);
1414 qat_pci_devs[qat_pci_dev->qat_dev_id].asym_rte_dev.name = NULL;
1415 qat_pci_dev->asym_dev = NULL;
1420 static struct cryptodev_driver qat_crypto_drv;
1421 RTE_PMD_REGISTER_CRYPTO_DRIVER(qat_crypto_drv,
1422 cryptodev_qat_asym_driver,
1423 qat_asym_driver_id);