1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2019 - 2022 Intel Corporation
7 #include <cryptodev_pmd.h>
9 #include "qat_device.h"
13 #include "icp_qat_fw_pke.h"
14 #include "icp_qat_fw.h"
18 uint8_t qat_asym_driver_id;
20 struct qat_crypto_gen_dev_ops qat_asym_gen_dev_ops[QAT_N_GENS];
22 /* An rte_driver is needed in the registration of both the device and the driver
24 * The actual qat pci's rte_driver can't be used as its name represents
25 * the whole pci device with all services. Think of this as a holder for a name
26 * for the crypto part of the pci device.
28 static const char qat_asym_drv_name[] = RTE_STR(CRYPTODEV_NAME_QAT_ASYM_PMD);
29 static const struct rte_driver cryptodev_qat_asym_driver = {
30 .name = qat_asym_drv_name,
31 .alias = qat_asym_drv_name
35 * Macros with suffix _F are used with some of predefinded identifiers:
36 * - cookie->input_buffer
39 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
40 #define HEXDUMP(name, where, size) QAT_DP_HEXDUMP_LOG(DEBUG, name, \
42 #define HEXDUMP_OFF(name, where, size, idx) QAT_DP_HEXDUMP_LOG(DEBUG, name, \
43 &where[idx * size], size)
45 #define HEXDUMP_OFF_F(name, idx) QAT_DP_HEXDUMP_LOG(DEBUG, name, \
46 &cookie->input_buffer[idx * qat_alg_bytesize], \
49 #define HEXDUMP(name, where, size)
50 #define HEXDUMP_OFF(name, where, size, idx)
51 #define HEXDUMP_OFF_F(name, idx)
54 #define CHECK_IF_NOT_EMPTY(param, name, pname, status) \
56 if (param.length == 0) { \
59 " input parameter, zero length " pname \
62 } else if (check_zero(param)) { \
64 "Invalid " name " input parameter, empty " \
65 pname ", length = %d", \
72 #define SET_PKE_LN(where, what, how, idx) \
73 rte_memcpy(where[idx] + how - \
78 #define SET_PKE_LN_9A(where, what, how, idx) \
79 rte_memcpy(&where[idx * RTE_ALIGN_CEIL(how, 8)] + \
80 RTE_ALIGN_CEIL(how, 8) - \
85 #define SET_PKE_LN_EC(where, what, how, idx) \
86 rte_memcpy(where[idx] + \
87 RTE_ALIGN_CEIL(how, 8) - \
92 #define SET_PKE_LN_9A_F(what, idx) \
93 rte_memcpy(&cookie->input_buffer[idx * qat_alg_bytesize] + \
94 qat_alg_bytesize - what.length, \
95 what.data, what.length)
97 #define SET_PKE_LN_EC_F(what, how, idx) \
98 rte_memcpy(&cookie->input_buffer[idx * \
99 RTE_ALIGN_CEIL(how, 8)] + \
100 RTE_ALIGN_CEIL(how, 8) - how, \
104 request_init(struct icp_qat_fw_pke_request *qat_req)
106 memset(qat_req, 0, sizeof(*qat_req));
107 qat_req->pke_hdr.service_type = ICP_QAT_FW_COMN_REQ_CPM_FW_PKE;
108 qat_req->pke_hdr.hdr_flags =
109 ICP_QAT_FW_COMN_HDR_FLAGS_BUILD
110 (ICP_QAT_FW_COMN_REQ_FLAG_SET);
114 cleanup_arrays(struct qat_asym_op_cookie *cookie,
115 int in_count, int out_count, int alg_size)
119 for (i = 0; i < in_count; i++)
120 memset(cookie->input_array[i], 0x0, alg_size);
121 for (i = 0; i < out_count; i++)
122 memset(cookie->output_array[i], 0x0, alg_size);
126 cleanup_crt(struct qat_asym_op_cookie *cookie,
131 memset(cookie->input_array[0], 0x0, alg_size);
132 for (i = 1; i < QAT_ASYM_RSA_QT_NUM_IN_PARAMS; i++)
133 memset(cookie->input_array[i], 0x0, alg_size / 2);
134 for (i = 0; i < QAT_ASYM_RSA_NUM_OUT_PARAMS; i++)
135 memset(cookie->output_array[i], 0x0, alg_size);
139 cleanup(struct qat_asym_op_cookie *cookie,
140 struct rte_crypto_asym_xform *xform, int alg_size)
142 if (xform->xform_type == RTE_CRYPTO_ASYM_XFORM_MODEX)
143 cleanup_arrays(cookie, QAT_ASYM_MODEXP_NUM_IN_PARAMS,
144 QAT_ASYM_MODEXP_NUM_OUT_PARAMS, alg_size);
145 else if (xform->xform_type == RTE_CRYPTO_ASYM_XFORM_MODINV)
146 cleanup_arrays(cookie, QAT_ASYM_MODINV_NUM_IN_PARAMS,
147 QAT_ASYM_MODINV_NUM_OUT_PARAMS, alg_size);
148 else if (xform->xform_type == RTE_CRYPTO_ASYM_XFORM_RSA) {
149 if (xform->rsa.key_type == RTE_RSA_KEY_TYPE_QT)
150 cleanup_crt(cookie, alg_size);
152 cleanup_arrays(cookie, QAT_ASYM_RSA_NUM_IN_PARAMS,
153 QAT_ASYM_RSA_NUM_OUT_PARAMS, alg_size);
159 check_zero(rte_crypto_param n)
161 int i, len = n.length;
164 for (i = len - 1; i >= 0; i--) {
165 if (n.data[i] != 0x0)
168 } else if (len == 8 && *(uint64_t *)&n.data[len - 8] == 0) {
170 } else if (*(uint64_t *)&n.data[len - 8] == 0) {
171 for (i = len - 9; i >= 0; i--) {
172 if (n.data[i] != 0x0)
181 static struct qat_asym_function
182 get_asym_function(struct rte_crypto_asym_xform *xform)
184 struct qat_asym_function qat_function;
186 switch (xform->xform_type) {
187 case RTE_CRYPTO_ASYM_XFORM_MODEX:
188 qat_function = get_modexp_function(xform);
190 case RTE_CRYPTO_ASYM_XFORM_MODINV:
191 qat_function = get_modinv_function(xform);
194 qat_function.func_id = 0;
202 modexp_set_input(struct rte_crypto_asym_op *asym_op,
203 struct icp_qat_fw_pke_request *qat_req,
204 struct qat_asym_op_cookie *cookie,
205 struct rte_crypto_asym_xform *xform)
207 struct qat_asym_function qat_function;
208 uint32_t alg_bytesize, func_id, in_bytesize;
211 CHECK_IF_NOT_EMPTY(xform->modex.modulus, "mod exp",
213 CHECK_IF_NOT_EMPTY(xform->modex.exponent, "mod exp",
218 if (asym_op->modex.base.length > xform->modex.exponent.length &&
219 asym_op->modex.base.length > xform->modex.modulus.length) {
220 in_bytesize = asym_op->modex.base.length;
221 } else if (xform->modex.exponent.length > xform->modex.modulus.length)
222 in_bytesize = xform->modex.exponent.length;
224 in_bytesize = xform->modex.modulus.length;
226 qat_function = get_modexp_function2(in_bytesize);
227 func_id = qat_function.func_id;
228 if (qat_function.func_id == 0) {
229 QAT_LOG(ERR, "Cannot obtain functionality id");
232 alg_bytesize = qat_function.bytesize;
234 SET_PKE_LN(cookie->input_array, asym_op->modex.base,
236 SET_PKE_LN(cookie->input_array, xform->modex.exponent,
238 SET_PKE_LN(cookie->input_array, xform->modex.modulus,
241 cookie->alg_bytesize = alg_bytesize;
242 qat_req->pke_hdr.cd_pars.func_id = func_id;
243 qat_req->input_param_count = QAT_ASYM_MODEXP_NUM_IN_PARAMS;
244 qat_req->output_param_count = QAT_ASYM_MODEXP_NUM_OUT_PARAMS;
246 HEXDUMP("ModExp base", cookie->input_array[0], alg_bytesize);
247 HEXDUMP("ModExp exponent", cookie->input_array[1], alg_bytesize);
248 HEXDUMP("ModExp modulus", cookie->input_array[2], alg_bytesize);
254 modexp_collect(struct rte_crypto_asym_op *asym_op,
255 struct qat_asym_op_cookie *cookie,
256 struct rte_crypto_asym_xform *xform)
258 rte_crypto_param n = xform->modex.modulus;
259 uint32_t alg_bytesize = cookie->alg_bytesize;
260 uint8_t *modexp_result = asym_op->modex.result.data;
262 rte_memcpy(modexp_result,
263 cookie->output_array[0] + alg_bytesize
264 - n.length, n.length);
265 HEXDUMP("ModExp result", cookie->output_array[0],
267 return RTE_CRYPTO_OP_STATUS_SUCCESS;
271 modinv_set_input(struct rte_crypto_asym_op *asym_op,
272 struct icp_qat_fw_pke_request *qat_req,
273 struct qat_asym_op_cookie *cookie,
274 struct rte_crypto_asym_xform *xform)
276 struct qat_asym_function qat_function;
277 uint32_t alg_bytesize, func_id;
280 CHECK_IF_NOT_EMPTY(xform->modex.modulus, "mod inv",
285 qat_function = get_asym_function(xform);
286 func_id = qat_function.func_id;
288 QAT_LOG(ERR, "Cannot obtain functionality id");
291 alg_bytesize = qat_function.bytesize;
293 SET_PKE_LN(cookie->input_array, asym_op->modinv.base,
295 SET_PKE_LN(cookie->input_array, xform->modinv.modulus,
298 cookie->alg_bytesize = alg_bytesize;
299 qat_req->pke_hdr.cd_pars.func_id = func_id;
300 qat_req->input_param_count =
301 QAT_ASYM_MODINV_NUM_IN_PARAMS;
302 qat_req->output_param_count =
303 QAT_ASYM_MODINV_NUM_OUT_PARAMS;
305 HEXDUMP("ModInv base", cookie->input_array[0], alg_bytesize);
306 HEXDUMP("ModInv modulus", cookie->input_array[1], alg_bytesize);
312 modinv_collect(struct rte_crypto_asym_op *asym_op,
313 struct qat_asym_op_cookie *cookie,
314 struct rte_crypto_asym_xform *xform)
316 rte_crypto_param n = xform->modinv.modulus;
317 uint8_t *modinv_result = asym_op->modinv.result.data;
318 uint32_t alg_bytesize = cookie->alg_bytesize;
320 rte_memcpy(modinv_result + (asym_op->modinv.result.length
322 cookie->output_array[0] + alg_bytesize
323 - n.length, n.length);
324 HEXDUMP("ModInv result", cookie->output_array[0],
326 return RTE_CRYPTO_OP_STATUS_SUCCESS;
330 rsa_set_pub_input(struct rte_crypto_asym_op *asym_op,
331 struct icp_qat_fw_pke_request *qat_req,
332 struct qat_asym_op_cookie *cookie,
333 struct rte_crypto_asym_xform *xform)
335 struct qat_asym_function qat_function;
336 uint32_t alg_bytesize, func_id;
339 qat_function = get_rsa_enc_function(xform);
340 func_id = qat_function.func_id;
342 QAT_LOG(ERR, "Cannot obtain functionality id");
345 alg_bytesize = qat_function.bytesize;
347 if (asym_op->rsa.op_type == RTE_CRYPTO_ASYM_OP_ENCRYPT) {
348 switch (asym_op->rsa.pad) {
349 case RTE_CRYPTO_RSA_PADDING_NONE:
350 SET_PKE_LN(cookie->input_array, asym_op->rsa.message,
355 "Invalid RSA padding (Encryption)"
359 HEXDUMP("RSA Message", cookie->input_array[0], alg_bytesize);
361 switch (asym_op->rsa.pad) {
362 case RTE_CRYPTO_RSA_PADDING_NONE:
363 SET_PKE_LN(cookie->input_array, asym_op->rsa.sign,
368 "Invalid RSA padding (Verify)");
371 HEXDUMP("RSA Signature", cookie->input_array[0],
375 SET_PKE_LN(cookie->input_array, xform->rsa.e,
377 SET_PKE_LN(cookie->input_array, xform->rsa.n,
380 cookie->alg_bytesize = alg_bytesize;
381 qat_req->pke_hdr.cd_pars.func_id = func_id;
383 HEXDUMP("RSA Public Key", cookie->input_array[1], alg_bytesize);
384 HEXDUMP("RSA Modulus", cookie->input_array[2], alg_bytesize);
390 rsa_set_priv_input(struct rte_crypto_asym_op *asym_op,
391 struct icp_qat_fw_pke_request *qat_req,
392 struct qat_asym_op_cookie *cookie,
393 struct rte_crypto_asym_xform *xform)
395 struct qat_asym_function qat_function;
396 uint32_t alg_bytesize, func_id;
399 if (xform->rsa.key_type == RTE_RSA_KEY_TYPE_QT) {
400 qat_function = get_rsa_crt_function(xform);
401 func_id = qat_function.func_id;
403 QAT_LOG(ERR, "Cannot obtain functionality id");
406 alg_bytesize = qat_function.bytesize;
407 qat_req->input_param_count =
408 QAT_ASYM_RSA_QT_NUM_IN_PARAMS;
410 SET_PKE_LN(cookie->input_array, xform->rsa.qt.p,
411 (alg_bytesize >> 1), 1);
412 SET_PKE_LN(cookie->input_array, xform->rsa.qt.q,
413 (alg_bytesize >> 1), 2);
414 SET_PKE_LN(cookie->input_array, xform->rsa.qt.dP,
415 (alg_bytesize >> 1), 3);
416 SET_PKE_LN(cookie->input_array, xform->rsa.qt.dQ,
417 (alg_bytesize >> 1), 4);
418 SET_PKE_LN(cookie->input_array, xform->rsa.qt.qInv,
419 (alg_bytesize >> 1), 5);
421 HEXDUMP("RSA p", cookie->input_array[1],
423 HEXDUMP("RSA q", cookie->input_array[2],
425 HEXDUMP("RSA dP", cookie->input_array[3],
427 HEXDUMP("RSA dQ", cookie->input_array[4],
429 HEXDUMP("RSA qInv", cookie->input_array[5],
431 } else if (xform->rsa.key_type ==
432 RTE_RSA_KEY_TYPE_EXP) {
433 qat_function = get_rsa_dec_function(xform);
434 func_id = qat_function.func_id;
436 QAT_LOG(ERR, "Cannot obtain functionality id");
439 alg_bytesize = qat_function.bytesize;
441 SET_PKE_LN(cookie->input_array, xform->rsa.d,
443 SET_PKE_LN(cookie->input_array, xform->rsa.n,
446 HEXDUMP("RSA d", cookie->input_array[1],
448 HEXDUMP("RSA n", cookie->input_array[2],
451 QAT_LOG(ERR, "Invalid RSA key type");
455 if (asym_op->rsa.op_type ==
456 RTE_CRYPTO_ASYM_OP_DECRYPT) {
457 switch (asym_op->rsa.pad) {
458 case RTE_CRYPTO_RSA_PADDING_NONE:
459 SET_PKE_LN(cookie->input_array, asym_op->rsa.cipher,
461 HEXDUMP("RSA ciphertext", cookie->input_array[0],
466 "Invalid padding of RSA (Decrypt)");
470 } else if (asym_op->rsa.op_type ==
471 RTE_CRYPTO_ASYM_OP_SIGN) {
472 switch (asym_op->rsa.pad) {
473 case RTE_CRYPTO_RSA_PADDING_NONE:
474 SET_PKE_LN(cookie->input_array, asym_op->rsa.message,
476 HEXDUMP("RSA text to be signed", cookie->input_array[0],
481 "Invalid padding of RSA (Signature)");
486 cookie->alg_bytesize = alg_bytesize;
487 qat_req->pke_hdr.cd_pars.func_id = func_id;
492 rsa_set_input(struct rte_crypto_asym_op *asym_op,
493 struct icp_qat_fw_pke_request *qat_req,
494 struct qat_asym_op_cookie *cookie,
495 struct rte_crypto_asym_xform *xform)
497 qat_req->input_param_count =
498 QAT_ASYM_RSA_NUM_IN_PARAMS;
499 qat_req->output_param_count =
500 QAT_ASYM_RSA_NUM_OUT_PARAMS;
502 if (asym_op->rsa.op_type == RTE_CRYPTO_ASYM_OP_ENCRYPT ||
503 asym_op->rsa.op_type ==
504 RTE_CRYPTO_ASYM_OP_VERIFY) {
505 return rsa_set_pub_input(asym_op, qat_req, cookie, xform);
507 return rsa_set_priv_input(asym_op, qat_req, cookie, xform);
512 rsa_collect(struct rte_crypto_asym_op *asym_op,
513 struct qat_asym_op_cookie *cookie)
515 uint32_t alg_bytesize = cookie->alg_bytesize;
517 if (asym_op->rsa.op_type == RTE_CRYPTO_ASYM_OP_ENCRYPT ||
518 asym_op->rsa.op_type == RTE_CRYPTO_ASYM_OP_VERIFY) {
520 if (asym_op->rsa.op_type ==
521 RTE_CRYPTO_ASYM_OP_ENCRYPT) {
522 uint8_t *rsa_result = asym_op->rsa.cipher.data;
524 rte_memcpy(rsa_result,
525 cookie->output_array[0],
527 HEXDUMP("RSA Encrypted data", cookie->output_array[0],
530 uint8_t *rsa_result = asym_op->rsa.cipher.data;
532 switch (asym_op->rsa.pad) {
533 case RTE_CRYPTO_RSA_PADDING_NONE:
534 rte_memcpy(rsa_result,
535 cookie->output_array[0],
537 HEXDUMP("RSA signature",
538 cookie->output_array[0],
542 QAT_LOG(ERR, "Padding not supported");
543 return RTE_CRYPTO_OP_STATUS_ERROR;
547 if (asym_op->rsa.op_type == RTE_CRYPTO_ASYM_OP_DECRYPT) {
548 uint8_t *rsa_result = asym_op->rsa.message.data;
550 switch (asym_op->rsa.pad) {
551 case RTE_CRYPTO_RSA_PADDING_NONE:
552 rte_memcpy(rsa_result,
553 cookie->output_array[0],
555 HEXDUMP("RSA Decrypted Message",
556 cookie->output_array[0],
560 QAT_LOG(ERR, "Padding not supported");
561 return RTE_CRYPTO_OP_STATUS_ERROR;
564 uint8_t *rsa_result = asym_op->rsa.sign.data;
566 rte_memcpy(rsa_result,
567 cookie->output_array[0],
569 HEXDUMP("RSA Signature", cookie->output_array[0],
573 return RTE_CRYPTO_OP_STATUS_SUCCESS;
577 ecdsa_set_input(struct rte_crypto_asym_op *asym_op,
578 struct icp_qat_fw_pke_request *qat_req,
579 struct qat_asym_op_cookie *cookie,
580 struct rte_crypto_asym_xform *xform)
582 struct qat_asym_function qat_function;
583 uint32_t alg_bytesize, qat_alg_bytesize, func_id;
586 curve_id = pick_curve(xform);
588 QAT_LOG(ERR, "Incorrect elliptic curve");
592 switch (asym_op->ecdsa.op_type) {
593 case RTE_CRYPTO_ASYM_OP_SIGN:
594 qat_function = get_ecdsa_function(xform);
595 func_id = qat_function.func_id;
597 QAT_LOG(ERR, "Cannot obtain functionality id");
600 alg_bytesize = qat_function.bytesize;
601 qat_alg_bytesize = RTE_ALIGN_CEIL(alg_bytesize, 8);
603 SET_PKE_LN_9A_F(asym_op->ecdsa.pkey, 0);
604 SET_PKE_LN_9A_F(asym_op->ecdsa.message, 1);
605 SET_PKE_LN_9A_F(asym_op->ecdsa.k, 2);
606 SET_PKE_LN_EC_F(curve[curve_id].b, alg_bytesize, 3);
607 SET_PKE_LN_EC_F(curve[curve_id].a, alg_bytesize, 4);
608 SET_PKE_LN_EC_F(curve[curve_id].p, alg_bytesize, 5);
609 SET_PKE_LN_EC_F(curve[curve_id].n, alg_bytesize, 6);
610 SET_PKE_LN_EC_F(curve[curve_id].y, alg_bytesize, 7);
611 SET_PKE_LN_EC_F(curve[curve_id].x, alg_bytesize, 8);
613 cookie->alg_bytesize = alg_bytesize;
614 qat_req->pke_hdr.cd_pars.func_id = func_id;
615 qat_req->input_param_count =
616 QAT_ASYM_ECDSA_RS_SIGN_IN_PARAMS;
617 qat_req->output_param_count =
618 QAT_ASYM_ECDSA_RS_SIGN_OUT_PARAMS;
620 HEXDUMP_OFF_F("ECDSA d", 0);
621 HEXDUMP_OFF_F("ECDSA e", 1);
622 HEXDUMP_OFF_F("ECDSA k", 2);
623 HEXDUMP_OFF_F("ECDSA b", 3);
624 HEXDUMP_OFF_F("ECDSA a", 4);
625 HEXDUMP_OFF_F("ECDSA n", 5);
626 HEXDUMP_OFF_F("ECDSA y", 6);
627 HEXDUMP_OFF_F("ECDSA x", 7);
629 case RTE_CRYPTO_ASYM_OP_VERIFY:
630 qat_function = get_ecdsa_verify_function(xform);
631 func_id = qat_function.func_id;
633 QAT_LOG(ERR, "Cannot obtain functionality id");
636 alg_bytesize = qat_function.bytesize;
637 qat_alg_bytesize = RTE_ALIGN_CEIL(alg_bytesize, 8);
639 SET_PKE_LN_9A_F(asym_op->ecdsa.message, 10);
640 SET_PKE_LN_9A_F(asym_op->ecdsa.s, 9);
641 SET_PKE_LN_9A_F(asym_op->ecdsa.r, 8);
642 SET_PKE_LN_EC_F(curve[curve_id].n, alg_bytesize, 7);
643 SET_PKE_LN_EC_F(curve[curve_id].x, alg_bytesize, 6);
644 SET_PKE_LN_EC_F(curve[curve_id].y, alg_bytesize, 5);
645 SET_PKE_LN_9A_F(asym_op->ecdsa.q.x, 4);
646 SET_PKE_LN_9A_F(asym_op->ecdsa.q.y, 3);
647 SET_PKE_LN_EC_F(curve[curve_id].a, alg_bytesize, 2);
648 SET_PKE_LN_EC_F(curve[curve_id].b, alg_bytesize, 1);
649 SET_PKE_LN_EC_F(curve[curve_id].p, alg_bytesize, 0);
651 cookie->alg_bytesize = alg_bytesize;
652 qat_req->pke_hdr.cd_pars.func_id = func_id;
653 qat_req->input_param_count =
654 QAT_ASYM_ECDSA_RS_VERIFY_IN_PARAMS;
655 qat_req->output_param_count =
656 QAT_ASYM_ECDSA_RS_VERIFY_OUT_PARAMS;
658 HEXDUMP_OFF_F("p", 0);
659 HEXDUMP_OFF_F("b", 1);
660 HEXDUMP_OFF_F("a", 2);
661 HEXDUMP_OFF_F("y", 3);
662 HEXDUMP_OFF_F("x", 4);
663 HEXDUMP_OFF_F("yG", 5);
664 HEXDUMP_OFF_F("xG", 6);
665 HEXDUMP_OFF_F("n", 7);
666 HEXDUMP_OFF_F("r", 8);
667 HEXDUMP_OFF_F("s", 9);
668 HEXDUMP_OFF_F("e", 10);
678 ecdsa_collect(struct rte_crypto_asym_op *asym_op,
679 struct qat_asym_op_cookie *cookie)
681 uint32_t alg_bytesize = cookie->alg_bytesize;
682 uint32_t qat_alg_bytesize = RTE_ALIGN_CEIL(cookie->alg_bytesize, 8);
683 uint32_t ltrim = qat_alg_bytesize - alg_bytesize;
685 if (asym_op->rsa.op_type == RTE_CRYPTO_ASYM_OP_SIGN) {
686 uint8_t *r = asym_op->ecdsa.r.data;
687 uint8_t *s = asym_op->ecdsa.s.data;
689 asym_op->ecdsa.r.length = alg_bytesize;
690 asym_op->ecdsa.s.length = alg_bytesize;
691 rte_memcpy(r, &cookie->output_array[0][ltrim], alg_bytesize);
692 rte_memcpy(s, &cookie->output_array[1][ltrim], alg_bytesize);
694 HEXDUMP("R", cookie->output_array[0],
696 HEXDUMP("S", cookie->output_array[1],
699 return RTE_CRYPTO_OP_STATUS_SUCCESS;
703 ecpm_set_input(struct rte_crypto_asym_op *asym_op,
704 struct icp_qat_fw_pke_request *qat_req,
705 struct qat_asym_op_cookie *cookie,
706 struct rte_crypto_asym_xform *xform)
708 struct qat_asym_function qat_function;
709 uint32_t alg_bytesize, __rte_unused qat_alg_bytesize, func_id;
712 curve_id = pick_curve(xform);
714 QAT_LOG(ERR, "Incorrect elliptic curve");
718 qat_function = get_ecpm_function(xform);
719 func_id = qat_function.func_id;
721 QAT_LOG(ERR, "Cannot obtain functionality id");
724 alg_bytesize = qat_function.bytesize;
725 qat_alg_bytesize = RTE_ALIGN_CEIL(alg_bytesize, 8);
727 SET_PKE_LN_EC(cookie->input_array, asym_op->ecpm.scalar,
728 asym_op->ecpm.scalar.length, 0);
729 SET_PKE_LN_EC(cookie->input_array, asym_op->ecpm.p.x,
730 asym_op->ecpm.p.x.length, 1);
731 SET_PKE_LN_EC(cookie->input_array, asym_op->ecpm.p.y,
732 asym_op->ecpm.p.y.length, 2);
733 SET_PKE_LN_EC(cookie->input_array, curve[curve_id].a,
735 SET_PKE_LN_EC(cookie->input_array, curve[curve_id].b,
737 SET_PKE_LN_EC(cookie->input_array, curve[curve_id].p,
739 SET_PKE_LN_EC(cookie->input_array, curve[curve_id].h,
742 cookie->alg_bytesize = alg_bytesize;
743 qat_req->pke_hdr.cd_pars.func_id = func_id;
744 qat_req->input_param_count =
745 QAT_ASYM_ECPM_IN_PARAMS;
746 qat_req->output_param_count =
747 QAT_ASYM_ECPM_OUT_PARAMS;
749 HEXDUMP("k", cookie->input_array[0], qat_alg_bytesize);
750 HEXDUMP("xG", cookie->input_array[1], qat_alg_bytesize);
751 HEXDUMP("yG", cookie->input_array[2], qat_alg_bytesize);
752 HEXDUMP("a", cookie->input_array[3], qat_alg_bytesize);
753 HEXDUMP("b", cookie->input_array[4], qat_alg_bytesize);
754 HEXDUMP("q", cookie->input_array[5], qat_alg_bytesize);
755 HEXDUMP("h", cookie->input_array[6], qat_alg_bytesize);
761 ecpm_collect(struct rte_crypto_asym_op *asym_op,
762 struct qat_asym_op_cookie *cookie)
764 uint8_t *x = asym_op->ecpm.r.x.data;
765 uint8_t *y = asym_op->ecpm.r.y.data;
766 uint32_t alg_bytesize = cookie->alg_bytesize;
767 uint32_t qat_alg_bytesize = RTE_ALIGN_CEIL(cookie->alg_bytesize, 8);
768 uint32_t ltrim = qat_alg_bytesize - alg_bytesize;
770 asym_op->ecpm.r.x.length = alg_bytesize;
771 asym_op->ecpm.r.y.length = alg_bytesize;
772 rte_memcpy(x, &cookie->output_array[0][ltrim], alg_bytesize);
773 rte_memcpy(y, &cookie->output_array[1][ltrim], alg_bytesize);
775 HEXDUMP("rX", cookie->output_array[0],
777 HEXDUMP("rY", cookie->output_array[1],
779 return RTE_CRYPTO_OP_STATUS_SUCCESS;
783 asym_set_input(struct rte_crypto_asym_op *asym_op,
784 struct icp_qat_fw_pke_request *qat_req,
785 struct qat_asym_op_cookie *cookie,
786 struct rte_crypto_asym_xform *xform)
788 switch (xform->xform_type) {
789 case RTE_CRYPTO_ASYM_XFORM_MODEX:
790 return modexp_set_input(asym_op, qat_req,
792 case RTE_CRYPTO_ASYM_XFORM_MODINV:
793 return modinv_set_input(asym_op, qat_req,
795 case RTE_CRYPTO_ASYM_XFORM_RSA:
796 return rsa_set_input(asym_op, qat_req,
798 case RTE_CRYPTO_ASYM_XFORM_ECDSA:
799 return ecdsa_set_input(asym_op, qat_req,
801 case RTE_CRYPTO_ASYM_XFORM_ECPM:
802 return ecpm_set_input(asym_op, qat_req,
805 QAT_LOG(ERR, "Invalid/unsupported asymmetric crypto xform");
812 qat_asym_build_request(void *in_op, uint8_t *out_msg, void *op_cookie,
813 __rte_unused uint64_t *opaque,
814 __rte_unused enum qat_device_gen qat_dev_gen)
816 struct rte_crypto_op *op = (struct rte_crypto_op *)in_op;
817 struct rte_crypto_asym_op *asym_op = op->asym;
818 struct icp_qat_fw_pke_request *qat_req =
819 (struct icp_qat_fw_pke_request *)out_msg;
820 struct qat_asym_op_cookie *cookie =
821 (struct qat_asym_op_cookie *)op_cookie;
822 struct rte_crypto_asym_xform *xform;
823 struct qat_asym_session *qat_session = (struct qat_asym_session *)
824 op->asym->session->sess_private_data;
827 if (unlikely(qat_session == NULL)) {
828 QAT_DP_LOG(ERR, "Session was not created for this device");
832 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
833 switch (op->sess_type) {
834 case RTE_CRYPTO_OP_WITH_SESSION:
835 request_init(qat_req);
836 xform = &qat_session->xform;
838 case RTE_CRYPTO_OP_SESSIONLESS:
839 request_init(qat_req);
840 xform = op->asym->xform;
843 QAT_DP_LOG(ERR, "Invalid session/xform settings");
844 op->status = RTE_CRYPTO_OP_STATUS_INVALID_SESSION;
847 err = asym_set_input(asym_op, qat_req, cookie,
850 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
854 qat_req->pke_mid.opaque = (uint64_t)(uintptr_t)op;
855 qat_req->pke_mid.src_data_addr = cookie->input_addr;
856 qat_req->pke_mid.dest_data_addr = cookie->output_addr;
858 HEXDUMP("qat_req:", qat_req, sizeof(struct icp_qat_fw_pke_request));
862 qat_req->pke_mid.opaque = (uint64_t)(uintptr_t)op;
863 HEXDUMP("qat_req:", qat_req, sizeof(struct icp_qat_fw_pke_request));
864 qat_req->output_param_count = 0;
865 qat_req->input_param_count = 0;
866 qat_req->pke_hdr.service_type = ICP_QAT_FW_COMN_REQ_NULL;
867 cookie->error |= err;
873 qat_asym_collect_response(struct rte_crypto_op *op,
874 struct qat_asym_op_cookie *cookie,
875 struct rte_crypto_asym_xform *xform)
877 struct rte_crypto_asym_op *asym_op = op->asym;
879 switch (xform->xform_type) {
880 case RTE_CRYPTO_ASYM_XFORM_MODEX:
881 return modexp_collect(asym_op, cookie, xform);
882 case RTE_CRYPTO_ASYM_XFORM_MODINV:
883 return modinv_collect(asym_op, cookie, xform);
884 case RTE_CRYPTO_ASYM_XFORM_RSA:
885 return rsa_collect(asym_op, cookie);
886 case RTE_CRYPTO_ASYM_XFORM_ECDSA:
887 return ecdsa_collect(asym_op, cookie);
888 case RTE_CRYPTO_ASYM_XFORM_ECPM:
889 return ecpm_collect(asym_op, cookie);
891 QAT_LOG(ERR, "Not supported xform type");
892 return RTE_CRYPTO_OP_STATUS_ERROR;
897 qat_asym_process_response(void **out_op, uint8_t *resp,
898 void *op_cookie, __rte_unused uint64_t *dequeue_err_count)
900 struct icp_qat_fw_pke_resp *resp_msg =
901 (struct icp_qat_fw_pke_resp *)resp;
902 struct rte_crypto_op *op = (struct rte_crypto_op *)(uintptr_t)
904 struct qat_asym_op_cookie *cookie = op_cookie;
905 struct rte_crypto_asym_xform *xform;
906 struct qat_asym_session *qat_session = (struct qat_asym_session *)
907 op->asym->session->sess_private_data;
911 if (op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED)
912 op->status = RTE_CRYPTO_OP_STATUS_ERROR;
913 QAT_DP_LOG(ERR, "Cookie status returned error");
915 if (ICP_QAT_FW_PKE_RESP_PKE_STAT_GET(
916 resp_msg->pke_resp_hdr.resp_status.pke_resp_flags)) {
917 if (op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED)
918 op->status = RTE_CRYPTO_OP_STATUS_ERROR;
919 QAT_DP_LOG(ERR, "Asymmetric response status"
922 if (resp_msg->pke_resp_hdr.resp_status.comn_err_code) {
923 if (op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED)
924 op->status = RTE_CRYPTO_OP_STATUS_ERROR;
925 QAT_DP_LOG(ERR, "Asymmetric common status"
930 switch (op->sess_type) {
931 case RTE_CRYPTO_OP_WITH_SESSION:
932 xform = &qat_session->xform;
934 case RTE_CRYPTO_OP_SESSIONLESS:
935 xform = op->asym->xform;
939 "Invalid session/xform settings in response ring!");
940 op->status = RTE_CRYPTO_OP_STATUS_INVALID_SESSION;
943 if (op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED) {
944 op->status = qat_asym_collect_response(op,
946 cleanup(cookie, xform, cookie->alg_bytesize);
950 HEXDUMP("resp_msg:", resp_msg, sizeof(struct icp_qat_fw_pke_resp));
956 session_set_modexp(struct qat_asym_session *qat_session,
957 struct rte_crypto_asym_xform *xform)
959 uint8_t *modulus = xform->modex.modulus.data;
960 uint8_t *exponent = xform->modex.exponent.data;
962 qat_session->xform.modex.modulus.data =
963 rte_malloc(NULL, xform->modex.modulus.length, 0);
964 if (qat_session->xform.modex.modulus.data == NULL)
966 qat_session->xform.modex.modulus.length = xform->modex.modulus.length;
967 qat_session->xform.modex.exponent.data = rte_malloc(NULL,
968 xform->modex.exponent.length, 0);
969 if (qat_session->xform.modex.exponent.data == NULL) {
970 rte_free(qat_session->xform.modex.exponent.data);
973 qat_session->xform.modex.exponent.length = xform->modex.exponent.length;
975 rte_memcpy(qat_session->xform.modex.modulus.data, modulus,
976 xform->modex.modulus.length);
977 rte_memcpy(qat_session->xform.modex.exponent.data, exponent,
978 xform->modex.exponent.length);
984 session_set_modinv(struct qat_asym_session *qat_session,
985 struct rte_crypto_asym_xform *xform)
987 uint8_t *modulus = xform->modinv.modulus.data;
989 qat_session->xform.modinv.modulus.data =
990 rte_malloc(NULL, xform->modinv.modulus.length, 0);
991 if (qat_session->xform.modinv.modulus.data == NULL)
993 qat_session->xform.modinv.modulus.length = xform->modinv.modulus.length;
995 rte_memcpy(qat_session->xform.modinv.modulus.data, modulus,
996 xform->modinv.modulus.length);
1002 session_set_rsa(struct qat_asym_session *qat_session,
1003 struct rte_crypto_asym_xform *xform)
1005 uint8_t *n = xform->rsa.n.data;
1006 uint8_t *e = xform->rsa.e.data;
1009 qat_session->xform.rsa.key_type = xform->rsa.key_type;
1011 qat_session->xform.rsa.n.data =
1012 rte_malloc(NULL, xform->rsa.n.length, 0);
1013 if (qat_session->xform.rsa.n.data == NULL)
1015 qat_session->xform.rsa.n.length =
1016 xform->rsa.n.length;
1018 qat_session->xform.rsa.e.data =
1019 rte_malloc(NULL, xform->rsa.e.length, 0);
1020 if (qat_session->xform.rsa.e.data == NULL) {
1024 qat_session->xform.rsa.e.length =
1025 xform->rsa.e.length;
1027 if (xform->rsa.key_type == RTE_RSA_KEY_TYPE_QT) {
1028 uint8_t *p = xform->rsa.qt.p.data;
1029 uint8_t *q = xform->rsa.qt.q.data;
1030 uint8_t *dP = xform->rsa.qt.dP.data;
1031 uint8_t *dQ = xform->rsa.qt.dQ.data;
1032 uint8_t *qInv = xform->rsa.qt.qInv.data;
1034 qat_session->xform.rsa.qt.p.data =
1035 rte_malloc(NULL, xform->rsa.qt.p.length, 0);
1036 if (qat_session->xform.rsa.qt.p.data == NULL) {
1040 qat_session->xform.rsa.qt.p.length =
1041 xform->rsa.qt.p.length;
1043 qat_session->xform.rsa.qt.q.data =
1044 rte_malloc(NULL, xform->rsa.qt.q.length, 0);
1045 if (qat_session->xform.rsa.qt.q.data == NULL) {
1049 qat_session->xform.rsa.qt.q.length =
1050 xform->rsa.qt.q.length;
1052 qat_session->xform.rsa.qt.dP.data =
1053 rte_malloc(NULL, xform->rsa.qt.dP.length, 0);
1054 if (qat_session->xform.rsa.qt.dP.data == NULL) {
1058 qat_session->xform.rsa.qt.dP.length =
1059 xform->rsa.qt.dP.length;
1061 qat_session->xform.rsa.qt.dQ.data =
1062 rte_malloc(NULL, xform->rsa.qt.dQ.length, 0);
1063 if (qat_session->xform.rsa.qt.dQ.data == NULL) {
1067 qat_session->xform.rsa.qt.dQ.length =
1068 xform->rsa.qt.dQ.length;
1070 qat_session->xform.rsa.qt.qInv.data =
1071 rte_malloc(NULL, xform->rsa.qt.qInv.length, 0);
1072 if (qat_session->xform.rsa.qt.qInv.data == NULL) {
1076 qat_session->xform.rsa.qt.qInv.length =
1077 xform->rsa.qt.qInv.length;
1079 rte_memcpy(qat_session->xform.rsa.qt.p.data, p,
1080 xform->rsa.qt.p.length);
1081 rte_memcpy(qat_session->xform.rsa.qt.q.data, q,
1082 xform->rsa.qt.q.length);
1083 rte_memcpy(qat_session->xform.rsa.qt.dP.data, dP,
1084 xform->rsa.qt.dP.length);
1085 rte_memcpy(qat_session->xform.rsa.qt.dQ.data, dQ,
1086 xform->rsa.qt.dQ.length);
1087 rte_memcpy(qat_session->xform.rsa.qt.qInv.data, qInv,
1088 xform->rsa.qt.qInv.length);
1091 uint8_t *d = xform->rsa.d.data;
1093 qat_session->xform.rsa.d.data =
1094 rte_malloc(NULL, xform->rsa.d.length, 0);
1095 if (qat_session->xform.rsa.d.data == NULL) {
1099 qat_session->xform.rsa.d.length =
1100 xform->rsa.d.length;
1101 rte_memcpy(qat_session->xform.rsa.d.data, d,
1102 xform->rsa.d.length);
1105 rte_memcpy(qat_session->xform.rsa.n.data, n,
1106 xform->rsa.n.length);
1107 rte_memcpy(qat_session->xform.rsa.e.data, e,
1108 xform->rsa.e.length);
1113 rte_free(qat_session->xform.rsa.n.data);
1114 rte_free(qat_session->xform.rsa.e.data);
1115 rte_free(qat_session->xform.rsa.d.data);
1116 rte_free(qat_session->xform.rsa.qt.p.data);
1117 rte_free(qat_session->xform.rsa.qt.q.data);
1118 rte_free(qat_session->xform.rsa.qt.dP.data);
1119 rte_free(qat_session->xform.rsa.qt.dQ.data);
1120 rte_free(qat_session->xform.rsa.qt.qInv.data);
1125 session_set_ecdsa(struct qat_asym_session *qat_session,
1126 struct rte_crypto_asym_xform *xform)
1128 qat_session->xform.ec.curve_id = xform->ec.curve_id;
1132 qat_asym_session_configure(struct rte_cryptodev *dev __rte_unused,
1133 struct rte_crypto_asym_xform *xform,
1134 struct rte_cryptodev_asym_session *session)
1136 struct qat_asym_session *qat_session;
1139 qat_session = (struct qat_asym_session *) session->sess_private_data;
1140 memset(qat_session, 0, sizeof(*qat_session));
1142 qat_session->xform.xform_type = xform->xform_type;
1143 switch (xform->xform_type) {
1144 case RTE_CRYPTO_ASYM_XFORM_MODEX:
1145 ret = session_set_modexp(qat_session, xform);
1147 case RTE_CRYPTO_ASYM_XFORM_MODINV:
1148 ret = session_set_modinv(qat_session, xform);
1150 case RTE_CRYPTO_ASYM_XFORM_RSA:
1151 ret = session_set_rsa(qat_session, xform);
1153 case RTE_CRYPTO_ASYM_XFORM_ECDSA:
1154 case RTE_CRYPTO_ASYM_XFORM_ECPM:
1155 session_set_ecdsa(qat_session, xform);
1162 QAT_LOG(ERR, "Unsupported xform type");
1170 qat_asym_session_get_private_size(struct rte_cryptodev *dev __rte_unused)
1172 return RTE_ALIGN_CEIL(sizeof(struct qat_asym_session), 8);
1176 session_clear_modexp(struct rte_crypto_modex_xform *modex)
1178 memset(modex->modulus.data, 0, modex->modulus.length);
1179 rte_free(modex->modulus.data);
1180 memset(modex->exponent.data, 0, modex->exponent.length);
1181 rte_free(modex->exponent.data);
1185 session_clear_modinv(struct rte_crypto_modinv_xform *modinv)
1187 memset(modinv->modulus.data, 0, modinv->modulus.length);
1188 rte_free(modinv->modulus.data);
1192 session_clear_rsa(struct rte_crypto_rsa_xform *rsa)
1194 memset(rsa->n.data, 0, rsa->n.length);
1195 rte_free(rsa->n.data);
1196 memset(rsa->e.data, 0, rsa->e.length);
1197 rte_free(rsa->e.data);
1198 if (rsa->key_type == RTE_RSA_KEY_TYPE_EXP) {
1199 memset(rsa->d.data, 0, rsa->d.length);
1200 rte_free(rsa->d.data);
1202 memset(rsa->qt.p.data, 0, rsa->qt.p.length);
1203 rte_free(rsa->qt.p.data);
1204 memset(rsa->qt.q.data, 0, rsa->qt.q.length);
1205 rte_free(rsa->qt.q.data);
1206 memset(rsa->qt.dP.data, 0, rsa->qt.dP.length);
1207 rte_free(rsa->qt.dP.data);
1208 memset(rsa->qt.dQ.data, 0, rsa->qt.dQ.length);
1209 rte_free(rsa->qt.dQ.data);
1210 memset(rsa->qt.qInv.data, 0, rsa->qt.qInv.length);
1211 rte_free(rsa->qt.qInv.data);
1216 session_clear_xform(struct qat_asym_session *qat_session)
1218 switch (qat_session->xform.xform_type) {
1219 case RTE_CRYPTO_ASYM_XFORM_MODEX:
1220 session_clear_modexp(&qat_session->xform.modex);
1222 case RTE_CRYPTO_ASYM_XFORM_MODINV:
1223 session_clear_modinv(&qat_session->xform.modinv);
1225 case RTE_CRYPTO_ASYM_XFORM_RSA:
1226 session_clear_rsa(&qat_session->xform.rsa);
1234 qat_asym_session_clear(struct rte_cryptodev *dev,
1235 struct rte_cryptodev_asym_session *session)
1237 void *sess_priv = session->sess_private_data;
1238 struct qat_asym_session *qat_session =
1239 (struct qat_asym_session *)sess_priv;
1242 session_clear_xform(qat_session);
1243 memset(qat_session, 0, qat_asym_session_get_private_size(dev));
1248 qat_asym_crypto_enqueue_op_burst(void *qp, struct rte_crypto_op **ops,
1251 return qat_enqueue_op_burst(qp, qat_asym_build_request, (void **)ops,
1256 qat_asym_crypto_dequeue_op_burst(void *qp, struct rte_crypto_op **ops,
1259 return qat_dequeue_op_burst(qp, (void **)ops, qat_asym_process_response,
1264 qat_asym_init_op_cookie(void *op_cookie)
1267 struct qat_asym_op_cookie *cookie = op_cookie;
1269 cookie->input_addr = rte_mempool_virt2iova(cookie) +
1270 offsetof(struct qat_asym_op_cookie,
1273 cookie->output_addr = rte_mempool_virt2iova(cookie) +
1274 offsetof(struct qat_asym_op_cookie,
1275 output_params_ptrs);
1277 for (j = 0; j < 8; j++) {
1278 cookie->input_params_ptrs[j] =
1279 rte_mempool_virt2iova(cookie) +
1280 offsetof(struct qat_asym_op_cookie,
1282 cookie->output_params_ptrs[j] =
1283 rte_mempool_virt2iova(cookie) +
1284 offsetof(struct qat_asym_op_cookie,
1290 qat_asym_dev_create(struct qat_pci_device *qat_pci_dev,
1291 struct qat_dev_cmd_param *qat_dev_cmd_param)
1293 struct qat_cryptodev_private *internals;
1294 struct rte_cryptodev *cryptodev;
1295 struct qat_device_info *qat_dev_instance =
1296 &qat_pci_devs[qat_pci_dev->qat_dev_id];
1297 struct rte_cryptodev_pmd_init_params init_params = {
1299 .socket_id = qat_dev_instance->pci_dev->device.numa_node,
1300 .private_data_size = sizeof(struct qat_cryptodev_private)
1302 struct qat_capabilities_info capa_info;
1303 const struct rte_cryptodev_capabilities *capabilities;
1304 const struct qat_crypto_gen_dev_ops *gen_dev_ops =
1305 &qat_asym_gen_dev_ops[qat_pci_dev->qat_dev_gen];
1306 char name[RTE_CRYPTODEV_NAME_MAX_LEN];
1307 char capa_memz_name[RTE_CRYPTODEV_NAME_MAX_LEN];
1311 snprintf(name, RTE_CRYPTODEV_NAME_MAX_LEN, "%s_%s",
1312 qat_pci_dev->name, "asym");
1313 QAT_LOG(DEBUG, "Creating QAT ASYM device %s\n", name);
1315 if (gen_dev_ops->cryptodev_ops == NULL) {
1316 QAT_LOG(ERR, "Device %s does not support asymmetric crypto",
1321 if (rte_eal_process_type() == RTE_PROC_PRIMARY) {
1322 qat_pci_dev->qat_asym_driver_id =
1324 } else if (rte_eal_process_type() == RTE_PROC_SECONDARY) {
1325 if (qat_pci_dev->qat_asym_driver_id !=
1326 qat_asym_driver_id) {
1328 "Device %s have different driver id than corresponding device in primary process",
1334 /* Populate subset device to use in cryptodev device creation */
1335 qat_dev_instance->asym_rte_dev.driver = &cryptodev_qat_asym_driver;
1336 qat_dev_instance->asym_rte_dev.numa_node =
1337 qat_dev_instance->pci_dev->device.numa_node;
1338 qat_dev_instance->asym_rte_dev.devargs = NULL;
1340 cryptodev = rte_cryptodev_pmd_create(name,
1341 &(qat_dev_instance->asym_rte_dev), &init_params);
1343 if (cryptodev == NULL)
1346 qat_dev_instance->asym_rte_dev.name = cryptodev->data->name;
1347 cryptodev->driver_id = qat_asym_driver_id;
1348 cryptodev->dev_ops = gen_dev_ops->cryptodev_ops;
1350 cryptodev->enqueue_burst = qat_asym_crypto_enqueue_op_burst;
1351 cryptodev->dequeue_burst = qat_asym_crypto_dequeue_op_burst;
1353 cryptodev->feature_flags = gen_dev_ops->get_feature_flags(qat_pci_dev);
1355 if (rte_eal_process_type() != RTE_PROC_PRIMARY)
1358 snprintf(capa_memz_name, RTE_CRYPTODEV_NAME_MAX_LEN,
1359 "QAT_ASYM_CAPA_GEN_%d",
1360 qat_pci_dev->qat_dev_gen);
1362 internals = cryptodev->data->dev_private;
1363 internals->qat_dev = qat_pci_dev;
1364 internals->dev_id = cryptodev->data->dev_id;
1366 capa_info = gen_dev_ops->get_capabilities(qat_pci_dev);
1367 capabilities = capa_info.data;
1368 capa_size = capa_info.size;
1370 internals->capa_mz = rte_memzone_lookup(capa_memz_name);
1371 if (internals->capa_mz == NULL) {
1372 internals->capa_mz = rte_memzone_reserve(capa_memz_name,
1373 capa_size, rte_socket_id(), 0);
1374 if (internals->capa_mz == NULL) {
1376 "Error allocating memzone for capabilities, "
1377 "destroying PMD for %s",
1379 rte_cryptodev_pmd_destroy(cryptodev);
1380 memset(&qat_dev_instance->asym_rte_dev, 0,
1381 sizeof(qat_dev_instance->asym_rte_dev));
1386 memcpy(internals->capa_mz->addr, capabilities, capa_size);
1387 internals->qat_dev_capabilities = internals->capa_mz->addr;
1390 if (qat_dev_cmd_param[i].name == NULL)
1392 if (!strcmp(qat_dev_cmd_param[i].name, ASYM_ENQ_THRESHOLD_NAME))
1393 internals->min_enq_burst_threshold =
1394 qat_dev_cmd_param[i].val;
1398 qat_pci_dev->asym_dev = internals;
1399 internals->service_type = QAT_SERVICE_ASYMMETRIC;
1400 QAT_LOG(DEBUG, "Created QAT ASYM device %s as cryptodev instance %d",
1401 cryptodev->data->name, internals->dev_id);
1406 qat_asym_dev_destroy(struct qat_pci_device *qat_pci_dev)
1408 struct rte_cryptodev *cryptodev;
1410 if (qat_pci_dev == NULL)
1412 if (qat_pci_dev->asym_dev == NULL)
1414 if (rte_eal_process_type() == RTE_PROC_PRIMARY)
1415 rte_memzone_free(qat_pci_dev->asym_dev->capa_mz);
1417 /* free crypto device */
1418 cryptodev = rte_cryptodev_pmd_get_dev(
1419 qat_pci_dev->asym_dev->dev_id);
1420 rte_cryptodev_pmd_destroy(cryptodev);
1421 qat_pci_devs[qat_pci_dev->qat_dev_id].asym_rte_dev.name = NULL;
1422 qat_pci_dev->asym_dev = NULL;
1427 static struct cryptodev_driver qat_crypto_drv;
1428 RTE_PMD_REGISTER_CRYPTO_DRIVER(qat_crypto_drv,
1429 cryptodev_qat_asym_driver,
1430 qat_asym_driver_id);