1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2019 - 2022 Intel Corporation
7 #include <cryptodev_pmd.h>
9 #include "qat_device.h"
13 #include "icp_qat_fw_pke.h"
14 #include "icp_qat_fw.h"
18 uint8_t qat_asym_driver_id;
20 struct qat_crypto_gen_dev_ops qat_asym_gen_dev_ops[QAT_N_GENS];
22 /* An rte_driver is needed in the registration of both the device and the driver
24 * The actual qat pci's rte_driver can't be used as its name represents
25 * the whole pci device with all services. Think of this as a holder for a name
26 * for the crypto part of the pci device.
28 static const char qat_asym_drv_name[] = RTE_STR(CRYPTODEV_NAME_QAT_ASYM_PMD);
29 static const struct rte_driver cryptodev_qat_asym_driver = {
30 .name = qat_asym_drv_name,
31 .alias = qat_asym_drv_name
35 * Macros with suffix _F are used with some of predefinded identifiers:
36 * - cookie->input_buffer
37 * - qat_func_alignsize
39 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
40 #define HEXDUMP(name, where, size) QAT_DP_HEXDUMP_LOG(DEBUG, name, \
42 #define HEXDUMP_OFF(name, where, size, idx) QAT_DP_HEXDUMP_LOG(DEBUG, name, \
43 &where[idx * size], size)
45 #define HEXDUMP_OFF_F(name, idx) QAT_DP_HEXDUMP_LOG(DEBUG, name, \
46 &cookie->input_buffer[idx * qat_func_alignsize], \
49 #define HEXDUMP(name, where, size)
50 #define HEXDUMP_OFF(name, where, size, idx)
51 #define HEXDUMP_OFF_F(name, idx)
54 #define CHECK_IF_NOT_EMPTY(param, name, pname, status) \
56 if (param.length == 0) { \
59 " input parameter, zero length " pname \
62 } else if (check_zero(param)) { \
64 "Invalid " name " input parameter, empty " \
65 pname ", length = %d", \
72 #define SET_PKE_LN(what, how, idx) \
73 rte_memcpy(cookie->input_array[idx] + how - \
78 #define SET_PKE_LN_EC(curve, p, idx) \
79 rte_memcpy(cookie->input_array[idx] + \
80 qat_func_alignsize - curve.bytesize, \
81 curve.p.data, curve.bytesize)
83 #define SET_PKE_9A_IN(what, idx) \
84 rte_memcpy(&cookie->input_buffer[idx * \
85 qat_func_alignsize] + \
86 qat_func_alignsize - what.length, \
87 what.data, what.length)
89 #define SET_PKE_9A_EC(curve, p, idx) \
90 rte_memcpy(&cookie->input_buffer[idx * \
91 qat_func_alignsize] + \
92 qat_func_alignsize - curve.bytesize, \
93 curve.p.data, curve.bytesize)
96 request_init(struct icp_qat_fw_pke_request *qat_req)
98 memset(qat_req, 0, sizeof(*qat_req));
99 qat_req->pke_hdr.service_type = ICP_QAT_FW_COMN_REQ_CPM_FW_PKE;
100 qat_req->pke_hdr.hdr_flags =
101 ICP_QAT_FW_COMN_HDR_FLAGS_BUILD
102 (ICP_QAT_FW_COMN_REQ_FLAG_SET);
106 cleanup_arrays(struct qat_asym_op_cookie *cookie,
107 int in_count, int out_count, int alg_size)
111 for (i = 0; i < in_count; i++)
112 memset(cookie->input_array[i], 0x0, alg_size);
113 for (i = 0; i < out_count; i++)
114 memset(cookie->output_array[i], 0x0, alg_size);
118 cleanup_crt(struct qat_asym_op_cookie *cookie,
123 memset(cookie->input_array[0], 0x0, alg_size);
124 for (i = 1; i < QAT_ASYM_RSA_QT_NUM_IN_PARAMS; i++)
125 memset(cookie->input_array[i], 0x0, alg_size / 2);
126 for (i = 0; i < QAT_ASYM_RSA_NUM_OUT_PARAMS; i++)
127 memset(cookie->output_array[i], 0x0, alg_size);
131 cleanup(struct qat_asym_op_cookie *cookie,
132 struct rte_crypto_asym_xform *xform, int alg_size)
134 if (xform->xform_type == RTE_CRYPTO_ASYM_XFORM_MODEX)
135 cleanup_arrays(cookie, QAT_ASYM_MODEXP_NUM_IN_PARAMS,
136 QAT_ASYM_MODEXP_NUM_OUT_PARAMS, alg_size);
137 else if (xform->xform_type == RTE_CRYPTO_ASYM_XFORM_MODINV)
138 cleanup_arrays(cookie, QAT_ASYM_MODINV_NUM_IN_PARAMS,
139 QAT_ASYM_MODINV_NUM_OUT_PARAMS, alg_size);
140 else if (xform->xform_type == RTE_CRYPTO_ASYM_XFORM_RSA) {
141 if (xform->rsa.key_type == RTE_RSA_KEY_TYPE_QT)
142 cleanup_crt(cookie, alg_size);
144 cleanup_arrays(cookie, QAT_ASYM_RSA_NUM_IN_PARAMS,
145 QAT_ASYM_RSA_NUM_OUT_PARAMS, alg_size);
148 cleanup_arrays(cookie, QAT_ASYM_MAX_PARAMS,
150 QAT_PKE_MAX_LN_SIZE);
155 check_zero(rte_crypto_param n)
157 int i, len = n.length;
160 for (i = len - 1; i >= 0; i--) {
161 if (n.data[i] != 0x0)
164 } else if (len == 8 && *(uint64_t *)&n.data[len - 8] == 0) {
166 } else if (*(uint64_t *)&n.data[len - 8] == 0) {
167 for (i = len - 9; i >= 0; i--) {
168 if (n.data[i] != 0x0)
177 static struct qat_asym_function
178 get_asym_function(struct rte_crypto_asym_xform *xform)
180 struct qat_asym_function qat_function;
182 switch (xform->xform_type) {
183 case RTE_CRYPTO_ASYM_XFORM_MODEX:
184 qat_function = get_modexp_function(xform);
186 case RTE_CRYPTO_ASYM_XFORM_MODINV:
187 qat_function = get_modinv_function(xform);
190 qat_function.func_id = 0;
198 modexp_set_input(struct rte_crypto_asym_op *asym_op,
199 struct icp_qat_fw_pke_request *qat_req,
200 struct qat_asym_op_cookie *cookie,
201 struct rte_crypto_asym_xform *xform)
203 struct qat_asym_function qat_function;
204 uint32_t alg_bytesize, func_id, in_bytesize;
207 CHECK_IF_NOT_EMPTY(xform->modex.modulus, "mod exp",
209 CHECK_IF_NOT_EMPTY(xform->modex.exponent, "mod exp",
214 if (asym_op->modex.base.length > xform->modex.exponent.length &&
215 asym_op->modex.base.length > xform->modex.modulus.length) {
216 in_bytesize = asym_op->modex.base.length;
217 } else if (xform->modex.exponent.length > xform->modex.modulus.length)
218 in_bytesize = xform->modex.exponent.length;
220 in_bytesize = xform->modex.modulus.length;
222 qat_function = get_modexp_function2(in_bytesize);
223 func_id = qat_function.func_id;
224 if (qat_function.func_id == 0) {
225 QAT_LOG(ERR, "Cannot obtain functionality id");
228 alg_bytesize = qat_function.bytesize;
230 SET_PKE_LN(asym_op->modex.base, alg_bytesize, 0);
231 SET_PKE_LN(xform->modex.exponent, alg_bytesize, 1);
232 SET_PKE_LN(xform->modex.modulus, alg_bytesize, 2);
234 cookie->alg_bytesize = alg_bytesize;
235 qat_req->pke_hdr.cd_pars.func_id = func_id;
236 qat_req->input_param_count = QAT_ASYM_MODEXP_NUM_IN_PARAMS;
237 qat_req->output_param_count = QAT_ASYM_MODEXP_NUM_OUT_PARAMS;
239 HEXDUMP("ModExp base", cookie->input_array[0], alg_bytesize);
240 HEXDUMP("ModExp exponent", cookie->input_array[1], alg_bytesize);
241 HEXDUMP("ModExp modulus", cookie->input_array[2], alg_bytesize);
247 modexp_collect(struct rte_crypto_asym_op *asym_op,
248 struct qat_asym_op_cookie *cookie,
249 struct rte_crypto_asym_xform *xform)
251 rte_crypto_param n = xform->modex.modulus;
252 uint32_t alg_bytesize = cookie->alg_bytesize;
253 uint8_t *modexp_result = asym_op->modex.result.data;
255 if (n.length > alg_bytesize) {
256 QAT_LOG(ERR, "Incorrect length of modexp modulus");
257 return RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
259 rte_memcpy(modexp_result,
260 cookie->output_array[0] + alg_bytesize
261 - n.length, n.length);
262 HEXDUMP("ModExp result", cookie->output_array[0],
264 return RTE_CRYPTO_OP_STATUS_SUCCESS;
268 modinv_set_input(struct rte_crypto_asym_op *asym_op,
269 struct icp_qat_fw_pke_request *qat_req,
270 struct qat_asym_op_cookie *cookie,
271 struct rte_crypto_asym_xform *xform)
273 struct qat_asym_function qat_function;
274 uint32_t alg_bytesize, func_id;
277 CHECK_IF_NOT_EMPTY(xform->modex.modulus, "mod inv",
282 qat_function = get_asym_function(xform);
283 func_id = qat_function.func_id;
285 QAT_LOG(ERR, "Cannot obtain functionality id");
288 alg_bytesize = qat_function.bytesize;
290 SET_PKE_LN(asym_op->modinv.base, alg_bytesize, 0);
291 SET_PKE_LN(xform->modinv.modulus, alg_bytesize, 1);
293 cookie->alg_bytesize = alg_bytesize;
294 qat_req->pke_hdr.cd_pars.func_id = func_id;
295 qat_req->input_param_count =
296 QAT_ASYM_MODINV_NUM_IN_PARAMS;
297 qat_req->output_param_count =
298 QAT_ASYM_MODINV_NUM_OUT_PARAMS;
300 HEXDUMP("ModInv base", cookie->input_array[0], alg_bytesize);
301 HEXDUMP("ModInv modulus", cookie->input_array[1], alg_bytesize);
307 modinv_collect(struct rte_crypto_asym_op *asym_op,
308 struct qat_asym_op_cookie *cookie,
309 struct rte_crypto_asym_xform *xform)
311 rte_crypto_param n = xform->modinv.modulus;
312 uint8_t *modinv_result = asym_op->modinv.result.data;
313 uint32_t alg_bytesize = cookie->alg_bytesize;
315 if (n.length > alg_bytesize) {
316 QAT_LOG(ERR, "Incorrect length of modinv modulus");
317 return RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
319 rte_memcpy(modinv_result + (asym_op->modinv.result.length
321 cookie->output_array[0] + alg_bytesize
322 - n.length, n.length);
323 HEXDUMP("ModInv result", cookie->output_array[0],
325 return RTE_CRYPTO_OP_STATUS_SUCCESS;
329 rsa_set_pub_input(struct rte_crypto_asym_op *asym_op,
330 struct icp_qat_fw_pke_request *qat_req,
331 struct qat_asym_op_cookie *cookie,
332 struct rte_crypto_asym_xform *xform)
334 struct qat_asym_function qat_function;
335 uint32_t alg_bytesize, func_id;
338 qat_function = get_rsa_enc_function(xform);
339 func_id = qat_function.func_id;
341 QAT_LOG(ERR, "Cannot obtain functionality id");
344 alg_bytesize = qat_function.bytesize;
346 if (asym_op->rsa.op_type == RTE_CRYPTO_ASYM_OP_ENCRYPT) {
347 switch (asym_op->rsa.padding.type) {
348 case RTE_CRYPTO_RSA_PADDING_NONE:
349 SET_PKE_LN(asym_op->rsa.message, alg_bytesize, 0);
353 "Invalid RSA padding (Encryption)"
357 HEXDUMP("RSA Message", cookie->input_array[0], alg_bytesize);
359 switch (asym_op->rsa.padding.type) {
360 case RTE_CRYPTO_RSA_PADDING_NONE:
361 SET_PKE_LN(asym_op->rsa.sign, alg_bytesize, 0);
365 "Invalid RSA padding (Verify)");
368 HEXDUMP("RSA Signature", cookie->input_array[0],
372 SET_PKE_LN(xform->rsa.e, alg_bytesize, 1);
373 SET_PKE_LN(xform->rsa.n, alg_bytesize, 2);
375 cookie->alg_bytesize = alg_bytesize;
376 qat_req->pke_hdr.cd_pars.func_id = func_id;
378 HEXDUMP("RSA Public Key", cookie->input_array[1], alg_bytesize);
379 HEXDUMP("RSA Modulus", cookie->input_array[2], alg_bytesize);
385 rsa_set_priv_input(struct rte_crypto_asym_op *asym_op,
386 struct icp_qat_fw_pke_request *qat_req,
387 struct qat_asym_op_cookie *cookie,
388 struct rte_crypto_asym_xform *xform)
390 struct qat_asym_function qat_function;
391 uint32_t alg_bytesize, func_id;
394 if (xform->rsa.key_type == RTE_RSA_KEY_TYPE_QT) {
395 qat_function = get_rsa_crt_function(xform);
396 func_id = qat_function.func_id;
398 QAT_LOG(ERR, "Cannot obtain functionality id");
401 alg_bytesize = qat_function.bytesize;
402 qat_req->input_param_count =
403 QAT_ASYM_RSA_QT_NUM_IN_PARAMS;
405 SET_PKE_LN(xform->rsa.qt.p, (alg_bytesize >> 1), 1);
406 SET_PKE_LN(xform->rsa.qt.q, (alg_bytesize >> 1), 2);
407 SET_PKE_LN(xform->rsa.qt.dP, (alg_bytesize >> 1), 3);
408 SET_PKE_LN(xform->rsa.qt.dQ, (alg_bytesize >> 1), 4);
409 SET_PKE_LN(xform->rsa.qt.qInv, (alg_bytesize >> 1), 5);
411 HEXDUMP("RSA p", cookie->input_array[1],
413 HEXDUMP("RSA q", cookie->input_array[2],
415 HEXDUMP("RSA dP", cookie->input_array[3],
417 HEXDUMP("RSA dQ", cookie->input_array[4],
419 HEXDUMP("RSA qInv", cookie->input_array[5],
421 } else if (xform->rsa.key_type ==
422 RTE_RSA_KEY_TYPE_EXP) {
423 qat_function = get_rsa_dec_function(xform);
424 func_id = qat_function.func_id;
426 QAT_LOG(ERR, "Cannot obtain functionality id");
429 alg_bytesize = qat_function.bytesize;
431 SET_PKE_LN(xform->rsa.d, alg_bytesize, 1);
432 SET_PKE_LN(xform->rsa.n, alg_bytesize, 2);
434 HEXDUMP("RSA d", cookie->input_array[1],
436 HEXDUMP("RSA n", cookie->input_array[2],
439 QAT_LOG(ERR, "Invalid RSA key type");
443 if (asym_op->rsa.op_type ==
444 RTE_CRYPTO_ASYM_OP_DECRYPT) {
445 switch (asym_op->rsa.padding.type) {
446 case RTE_CRYPTO_RSA_PADDING_NONE:
447 SET_PKE_LN(asym_op->rsa.cipher, alg_bytesize, 0);
448 HEXDUMP("RSA ciphertext", cookie->input_array[0],
453 "Invalid padding of RSA (Decrypt)");
457 } else if (asym_op->rsa.op_type ==
458 RTE_CRYPTO_ASYM_OP_SIGN) {
459 switch (asym_op->rsa.padding.type) {
460 case RTE_CRYPTO_RSA_PADDING_NONE:
461 SET_PKE_LN(asym_op->rsa.message, alg_bytesize, 0);
462 HEXDUMP("RSA text to be signed", cookie->input_array[0],
467 "Invalid padding of RSA (Signature)");
472 cookie->alg_bytesize = alg_bytesize;
473 qat_req->pke_hdr.cd_pars.func_id = func_id;
478 rsa_set_input(struct rte_crypto_asym_op *asym_op,
479 struct icp_qat_fw_pke_request *qat_req,
480 struct qat_asym_op_cookie *cookie,
481 struct rte_crypto_asym_xform *xform)
483 qat_req->input_param_count =
484 QAT_ASYM_RSA_NUM_IN_PARAMS;
485 qat_req->output_param_count =
486 QAT_ASYM_RSA_NUM_OUT_PARAMS;
488 if (asym_op->rsa.op_type == RTE_CRYPTO_ASYM_OP_ENCRYPT ||
489 asym_op->rsa.op_type ==
490 RTE_CRYPTO_ASYM_OP_VERIFY) {
491 return rsa_set_pub_input(asym_op, qat_req, cookie, xform);
493 return rsa_set_priv_input(asym_op, qat_req, cookie, xform);
498 rsa_collect(struct rte_crypto_asym_op *asym_op,
499 struct qat_asym_op_cookie *cookie)
501 uint32_t alg_bytesize = cookie->alg_bytesize;
503 if (asym_op->rsa.op_type == RTE_CRYPTO_ASYM_OP_ENCRYPT ||
504 asym_op->rsa.op_type == RTE_CRYPTO_ASYM_OP_VERIFY) {
506 if (asym_op->rsa.op_type ==
507 RTE_CRYPTO_ASYM_OP_ENCRYPT) {
508 uint8_t *rsa_result = asym_op->rsa.cipher.data;
510 rte_memcpy(rsa_result,
511 cookie->output_array[0],
513 HEXDUMP("RSA Encrypted data", cookie->output_array[0],
516 uint8_t *rsa_result = asym_op->rsa.cipher.data;
518 switch (asym_op->rsa.padding.type) {
519 case RTE_CRYPTO_RSA_PADDING_NONE:
520 rte_memcpy(rsa_result,
521 cookie->output_array[0],
523 HEXDUMP("RSA signature",
524 cookie->output_array[0],
528 QAT_LOG(ERR, "Padding not supported");
529 return RTE_CRYPTO_OP_STATUS_ERROR;
533 if (asym_op->rsa.op_type == RTE_CRYPTO_ASYM_OP_DECRYPT) {
534 uint8_t *rsa_result = asym_op->rsa.message.data;
536 switch (asym_op->rsa.padding.type) {
537 case RTE_CRYPTO_RSA_PADDING_NONE:
538 rte_memcpy(rsa_result,
539 cookie->output_array[0],
541 HEXDUMP("RSA Decrypted Message",
542 cookie->output_array[0],
546 QAT_LOG(ERR, "Padding not supported");
547 return RTE_CRYPTO_OP_STATUS_ERROR;
550 uint8_t *rsa_result = asym_op->rsa.sign.data;
552 rte_memcpy(rsa_result,
553 cookie->output_array[0],
555 HEXDUMP("RSA Signature", cookie->output_array[0],
559 return RTE_CRYPTO_OP_STATUS_SUCCESS;
563 ecdsa_set_input(struct rte_crypto_asym_op *asym_op,
564 struct icp_qat_fw_pke_request *qat_req,
565 struct qat_asym_op_cookie *cookie,
566 struct rte_crypto_asym_xform *xform)
568 struct qat_asym_function qat_function;
569 uint32_t qat_func_alignsize, func_id;
572 curve_id = pick_curve(xform);
574 QAT_LOG(DEBUG, "Incorrect elliptic curve");
578 switch (asym_op->ecdsa.op_type) {
579 case RTE_CRYPTO_ASYM_OP_SIGN:
580 qat_function = get_ecdsa_function(xform);
581 func_id = qat_function.func_id;
583 QAT_LOG(ERR, "Cannot obtain functionality id");
587 RTE_ALIGN_CEIL(qat_function.bytesize, 8);
589 SET_PKE_9A_IN(asym_op->ecdsa.pkey, 0);
590 SET_PKE_9A_IN(asym_op->ecdsa.message, 1);
591 SET_PKE_9A_IN(asym_op->ecdsa.k, 2);
592 SET_PKE_9A_EC(curve[curve_id], b, 3);
593 SET_PKE_9A_EC(curve[curve_id], a, 4);
594 SET_PKE_9A_EC(curve[curve_id], p, 5);
595 SET_PKE_9A_EC(curve[curve_id], n, 6);
596 SET_PKE_9A_EC(curve[curve_id], y, 7);
597 SET_PKE_9A_EC(curve[curve_id], x, 8);
599 cookie->alg_bytesize = curve[curve_id].bytesize;
600 cookie->qat_func_alignsize = qat_func_alignsize;
601 qat_req->pke_hdr.cd_pars.func_id = func_id;
602 qat_req->input_param_count =
603 QAT_ASYM_ECDSA_RS_SIGN_IN_PARAMS;
604 qat_req->output_param_count =
605 QAT_ASYM_ECDSA_RS_SIGN_OUT_PARAMS;
607 HEXDUMP_OFF_F("ECDSA d", 0);
608 HEXDUMP_OFF_F("ECDSA e", 1);
609 HEXDUMP_OFF_F("ECDSA k", 2);
610 HEXDUMP_OFF_F("ECDSA b", 3);
611 HEXDUMP_OFF_F("ECDSA a", 4);
612 HEXDUMP_OFF_F("ECDSA n", 5);
613 HEXDUMP_OFF_F("ECDSA y", 6);
614 HEXDUMP_OFF_F("ECDSA x", 7);
616 case RTE_CRYPTO_ASYM_OP_VERIFY:
617 qat_function = get_ecdsa_verify_function(xform);
618 func_id = qat_function.func_id;
620 QAT_LOG(ERR, "Cannot obtain functionality id");
623 qat_func_alignsize = RTE_ALIGN_CEIL(qat_function.bytesize, 8);
625 SET_PKE_9A_IN(asym_op->ecdsa.message, 10);
626 SET_PKE_9A_IN(asym_op->ecdsa.s, 9);
627 SET_PKE_9A_IN(asym_op->ecdsa.r, 8);
628 SET_PKE_9A_EC(curve[curve_id], n, 7);
629 SET_PKE_9A_EC(curve[curve_id], x, 6);
630 SET_PKE_9A_EC(curve[curve_id], y, 5);
631 SET_PKE_9A_IN(asym_op->ecdsa.q.x, 4);
632 SET_PKE_9A_IN(asym_op->ecdsa.q.y, 3);
633 SET_PKE_9A_EC(curve[curve_id], a, 2);
634 SET_PKE_9A_EC(curve[curve_id], b, 1);
635 SET_PKE_9A_EC(curve[curve_id], p, 0);
637 cookie->alg_bytesize = curve[curve_id].bytesize;
638 cookie->qat_func_alignsize = qat_func_alignsize;
639 qat_req->pke_hdr.cd_pars.func_id = func_id;
640 qat_req->input_param_count =
641 QAT_ASYM_ECDSA_RS_VERIFY_IN_PARAMS;
642 qat_req->output_param_count =
643 QAT_ASYM_ECDSA_RS_VERIFY_OUT_PARAMS;
645 HEXDUMP_OFF_F("p", 0);
646 HEXDUMP_OFF_F("b", 1);
647 HEXDUMP_OFF_F("a", 2);
648 HEXDUMP_OFF_F("y", 3);
649 HEXDUMP_OFF_F("x", 4);
650 HEXDUMP_OFF_F("yG", 5);
651 HEXDUMP_OFF_F("xG", 6);
652 HEXDUMP_OFF_F("n", 7);
653 HEXDUMP_OFF_F("r", 8);
654 HEXDUMP_OFF_F("s", 9);
655 HEXDUMP_OFF_F("e", 10);
665 ecdsa_collect(struct rte_crypto_asym_op *asym_op,
666 struct qat_asym_op_cookie *cookie)
668 uint32_t alg_bytesize = cookie->alg_bytesize;
669 uint32_t qat_func_alignsize = cookie->qat_func_alignsize;
670 uint32_t ltrim = qat_func_alignsize - alg_bytesize;
672 if (asym_op->rsa.op_type == RTE_CRYPTO_ASYM_OP_SIGN) {
673 uint8_t *r = asym_op->ecdsa.r.data;
674 uint8_t *s = asym_op->ecdsa.s.data;
676 asym_op->ecdsa.r.length = alg_bytesize;
677 asym_op->ecdsa.s.length = alg_bytesize;
678 rte_memcpy(r, &cookie->output_array[0][ltrim], alg_bytesize);
679 rte_memcpy(s, &cookie->output_array[1][ltrim], alg_bytesize);
681 HEXDUMP("R", cookie->output_array[0],
683 HEXDUMP("S", cookie->output_array[1],
686 return RTE_CRYPTO_OP_STATUS_SUCCESS;
690 ecpm_set_input(struct rte_crypto_asym_op *asym_op,
691 struct icp_qat_fw_pke_request *qat_req,
692 struct qat_asym_op_cookie *cookie,
693 struct rte_crypto_asym_xform *xform)
695 struct qat_asym_function qat_function;
696 uint32_t qat_func_alignsize, func_id;
699 curve_id = pick_curve(xform);
701 QAT_LOG(DEBUG, "Incorrect elliptic curve");
705 qat_function = get_ecpm_function(xform);
706 func_id = qat_function.func_id;
708 QAT_LOG(ERR, "Cannot obtain functionality id");
711 qat_func_alignsize = RTE_ALIGN_CEIL(qat_function.bytesize, 8);
713 SET_PKE_LN(asym_op->ecpm.scalar, qat_func_alignsize, 0);
714 SET_PKE_LN(asym_op->ecpm.p.x, qat_func_alignsize, 1);
715 SET_PKE_LN(asym_op->ecpm.p.y, qat_func_alignsize, 2);
716 SET_PKE_LN_EC(curve[curve_id], a, 3);
717 SET_PKE_LN_EC(curve[curve_id], b, 4);
718 SET_PKE_LN_EC(curve[curve_id], p, 5);
719 SET_PKE_LN_EC(curve[curve_id], h, 6);
721 cookie->alg_bytesize = curve[curve_id].bytesize;
722 cookie->qat_func_alignsize = qat_func_alignsize;
723 qat_req->pke_hdr.cd_pars.func_id = func_id;
724 qat_req->input_param_count =
725 QAT_ASYM_ECPM_IN_PARAMS;
726 qat_req->output_param_count =
727 QAT_ASYM_ECPM_OUT_PARAMS;
729 HEXDUMP("k", cookie->input_array[0], qat_func_alignsize);
730 HEXDUMP("xG", cookie->input_array[1], qat_func_alignsize);
731 HEXDUMP("yG", cookie->input_array[2], qat_func_alignsize);
732 HEXDUMP("a", cookie->input_array[3], qat_func_alignsize);
733 HEXDUMP("b", cookie->input_array[4], qat_func_alignsize);
734 HEXDUMP("q", cookie->input_array[5], qat_func_alignsize);
735 HEXDUMP("h", cookie->input_array[6], qat_func_alignsize);
741 ecpm_collect(struct rte_crypto_asym_op *asym_op,
742 struct qat_asym_op_cookie *cookie)
744 uint8_t *x = asym_op->ecpm.r.x.data;
745 uint8_t *y = asym_op->ecpm.r.y.data;
746 uint32_t alg_bytesize = cookie->alg_bytesize;
747 uint32_t qat_func_alignsize = cookie->qat_func_alignsize;
748 uint32_t ltrim = qat_func_alignsize - alg_bytesize;
750 asym_op->ecpm.r.x.length = alg_bytesize;
751 asym_op->ecpm.r.y.length = alg_bytesize;
752 rte_memcpy(x, &cookie->output_array[0][ltrim], alg_bytesize);
753 rte_memcpy(y, &cookie->output_array[1][ltrim], alg_bytesize);
755 HEXDUMP("rX", cookie->output_array[0],
757 HEXDUMP("rY", cookie->output_array[1],
759 return RTE_CRYPTO_OP_STATUS_SUCCESS;
763 asym_set_input(struct rte_crypto_asym_op *asym_op,
764 struct icp_qat_fw_pke_request *qat_req,
765 struct qat_asym_op_cookie *cookie,
766 struct rte_crypto_asym_xform *xform)
768 switch (xform->xform_type) {
769 case RTE_CRYPTO_ASYM_XFORM_MODEX:
770 return modexp_set_input(asym_op, qat_req,
772 case RTE_CRYPTO_ASYM_XFORM_MODINV:
773 return modinv_set_input(asym_op, qat_req,
775 case RTE_CRYPTO_ASYM_XFORM_RSA:
776 return rsa_set_input(asym_op, qat_req,
778 case RTE_CRYPTO_ASYM_XFORM_ECDSA:
779 return ecdsa_set_input(asym_op, qat_req,
781 case RTE_CRYPTO_ASYM_XFORM_ECPM:
782 return ecpm_set_input(asym_op, qat_req,
785 QAT_LOG(ERR, "Invalid/unsupported asymmetric crypto xform");
792 qat_asym_build_request(void *in_op, uint8_t *out_msg, void *op_cookie,
793 __rte_unused uint64_t *opaque,
794 __rte_unused enum qat_device_gen qat_dev_gen)
796 struct rte_crypto_op *op = (struct rte_crypto_op *)in_op;
797 struct rte_crypto_asym_op *asym_op = op->asym;
798 struct icp_qat_fw_pke_request *qat_req =
799 (struct icp_qat_fw_pke_request *)out_msg;
800 struct qat_asym_op_cookie *cookie =
801 (struct qat_asym_op_cookie *)op_cookie;
802 struct rte_crypto_asym_xform *xform;
803 struct qat_asym_session *qat_session = (struct qat_asym_session *)
804 op->asym->session->sess_private_data;
807 if (unlikely(qat_session == NULL)) {
808 QAT_DP_LOG(ERR, "Session was not created for this device");
812 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
813 switch (op->sess_type) {
814 case RTE_CRYPTO_OP_WITH_SESSION:
815 request_init(qat_req);
816 xform = &qat_session->xform;
818 case RTE_CRYPTO_OP_SESSIONLESS:
819 request_init(qat_req);
820 xform = op->asym->xform;
823 QAT_DP_LOG(ERR, "Invalid session/xform settings");
824 op->status = RTE_CRYPTO_OP_STATUS_INVALID_SESSION;
827 err = asym_set_input(asym_op, qat_req, cookie,
830 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
834 qat_req->pke_mid.opaque = (uint64_t)(uintptr_t)op;
835 qat_req->pke_mid.src_data_addr = cookie->input_addr;
836 qat_req->pke_mid.dest_data_addr = cookie->output_addr;
838 HEXDUMP("qat_req:", qat_req, sizeof(struct icp_qat_fw_pke_request));
842 qat_req->pke_mid.opaque = (uint64_t)(uintptr_t)op;
843 HEXDUMP("qat_req:", qat_req, sizeof(struct icp_qat_fw_pke_request));
844 qat_req->output_param_count = 0;
845 qat_req->input_param_count = 0;
846 qat_req->pke_hdr.service_type = ICP_QAT_FW_COMN_REQ_NULL;
847 cookie->error |= err;
853 qat_asym_collect_response(struct rte_crypto_op *op,
854 struct qat_asym_op_cookie *cookie,
855 struct rte_crypto_asym_xform *xform)
857 struct rte_crypto_asym_op *asym_op = op->asym;
859 switch (xform->xform_type) {
860 case RTE_CRYPTO_ASYM_XFORM_MODEX:
861 return modexp_collect(asym_op, cookie, xform);
862 case RTE_CRYPTO_ASYM_XFORM_MODINV:
863 return modinv_collect(asym_op, cookie, xform);
864 case RTE_CRYPTO_ASYM_XFORM_RSA:
865 return rsa_collect(asym_op, cookie);
866 case RTE_CRYPTO_ASYM_XFORM_ECDSA:
867 return ecdsa_collect(asym_op, cookie);
868 case RTE_CRYPTO_ASYM_XFORM_ECPM:
869 return ecpm_collect(asym_op, cookie);
871 QAT_LOG(ERR, "Not supported xform type");
872 return RTE_CRYPTO_OP_STATUS_ERROR;
877 qat_asym_process_response(void **out_op, uint8_t *resp,
878 void *op_cookie, __rte_unused uint64_t *dequeue_err_count)
880 struct icp_qat_fw_pke_resp *resp_msg =
881 (struct icp_qat_fw_pke_resp *)resp;
882 struct rte_crypto_op *op = (struct rte_crypto_op *)(uintptr_t)
884 struct qat_asym_op_cookie *cookie = op_cookie;
885 struct rte_crypto_asym_xform *xform;
886 struct qat_asym_session *qat_session = (struct qat_asym_session *)
887 op->asym->session->sess_private_data;
891 if (op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED)
892 op->status = RTE_CRYPTO_OP_STATUS_ERROR;
893 QAT_DP_LOG(DEBUG, "Cookie status returned error");
895 if (ICP_QAT_FW_PKE_RESP_PKE_STAT_GET(
896 resp_msg->pke_resp_hdr.resp_status.pke_resp_flags)) {
897 if (op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED)
898 op->status = RTE_CRYPTO_OP_STATUS_ERROR;
899 QAT_DP_LOG(DEBUG, "Asymmetric response status"
902 if (resp_msg->pke_resp_hdr.resp_status.comn_err_code) {
903 if (op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED)
904 op->status = RTE_CRYPTO_OP_STATUS_ERROR;
905 QAT_DP_LOG(ERR, "Asymmetric common status"
910 switch (op->sess_type) {
911 case RTE_CRYPTO_OP_WITH_SESSION:
912 xform = &qat_session->xform;
914 case RTE_CRYPTO_OP_SESSIONLESS:
915 xform = op->asym->xform;
919 "Invalid session/xform settings in response ring!");
920 op->status = RTE_CRYPTO_OP_STATUS_INVALID_SESSION;
923 if (op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED) {
924 op->status = qat_asym_collect_response(op,
926 cleanup(cookie, xform, cookie->alg_bytesize);
930 HEXDUMP("resp_msg:", resp_msg, sizeof(struct icp_qat_fw_pke_resp));
936 session_set_modexp(struct qat_asym_session *qat_session,
937 struct rte_crypto_asym_xform *xform)
939 uint8_t *modulus = xform->modex.modulus.data;
940 uint8_t *exponent = xform->modex.exponent.data;
942 qat_session->xform.modex.modulus.data =
943 rte_malloc(NULL, xform->modex.modulus.length, 0);
944 if (qat_session->xform.modex.modulus.data == NULL)
946 qat_session->xform.modex.modulus.length = xform->modex.modulus.length;
947 qat_session->xform.modex.exponent.data = rte_malloc(NULL,
948 xform->modex.exponent.length, 0);
949 if (qat_session->xform.modex.exponent.data == NULL) {
950 rte_free(qat_session->xform.modex.exponent.data);
953 qat_session->xform.modex.exponent.length = xform->modex.exponent.length;
955 rte_memcpy(qat_session->xform.modex.modulus.data, modulus,
956 xform->modex.modulus.length);
957 rte_memcpy(qat_session->xform.modex.exponent.data, exponent,
958 xform->modex.exponent.length);
964 session_set_modinv(struct qat_asym_session *qat_session,
965 struct rte_crypto_asym_xform *xform)
967 uint8_t *modulus = xform->modinv.modulus.data;
969 qat_session->xform.modinv.modulus.data =
970 rte_malloc(NULL, xform->modinv.modulus.length, 0);
971 if (qat_session->xform.modinv.modulus.data == NULL)
973 qat_session->xform.modinv.modulus.length = xform->modinv.modulus.length;
975 rte_memcpy(qat_session->xform.modinv.modulus.data, modulus,
976 xform->modinv.modulus.length);
982 session_set_rsa(struct qat_asym_session *qat_session,
983 struct rte_crypto_asym_xform *xform)
985 uint8_t *n = xform->rsa.n.data;
986 uint8_t *e = xform->rsa.e.data;
989 qat_session->xform.rsa.key_type = xform->rsa.key_type;
991 qat_session->xform.rsa.n.data =
992 rte_malloc(NULL, xform->rsa.n.length, 0);
993 if (qat_session->xform.rsa.n.data == NULL)
995 qat_session->xform.rsa.n.length =
998 qat_session->xform.rsa.e.data =
999 rte_malloc(NULL, xform->rsa.e.length, 0);
1000 if (qat_session->xform.rsa.e.data == NULL) {
1004 qat_session->xform.rsa.e.length =
1005 xform->rsa.e.length;
1007 if (xform->rsa.key_type == RTE_RSA_KEY_TYPE_QT) {
1008 uint8_t *p = xform->rsa.qt.p.data;
1009 uint8_t *q = xform->rsa.qt.q.data;
1010 uint8_t *dP = xform->rsa.qt.dP.data;
1011 uint8_t *dQ = xform->rsa.qt.dQ.data;
1012 uint8_t *qInv = xform->rsa.qt.qInv.data;
1014 qat_session->xform.rsa.qt.p.data =
1015 rte_malloc(NULL, xform->rsa.qt.p.length, 0);
1016 if (qat_session->xform.rsa.qt.p.data == NULL) {
1020 qat_session->xform.rsa.qt.p.length =
1021 xform->rsa.qt.p.length;
1023 qat_session->xform.rsa.qt.q.data =
1024 rte_malloc(NULL, xform->rsa.qt.q.length, 0);
1025 if (qat_session->xform.rsa.qt.q.data == NULL) {
1029 qat_session->xform.rsa.qt.q.length =
1030 xform->rsa.qt.q.length;
1032 qat_session->xform.rsa.qt.dP.data =
1033 rte_malloc(NULL, xform->rsa.qt.dP.length, 0);
1034 if (qat_session->xform.rsa.qt.dP.data == NULL) {
1038 qat_session->xform.rsa.qt.dP.length =
1039 xform->rsa.qt.dP.length;
1041 qat_session->xform.rsa.qt.dQ.data =
1042 rte_malloc(NULL, xform->rsa.qt.dQ.length, 0);
1043 if (qat_session->xform.rsa.qt.dQ.data == NULL) {
1047 qat_session->xform.rsa.qt.dQ.length =
1048 xform->rsa.qt.dQ.length;
1050 qat_session->xform.rsa.qt.qInv.data =
1051 rte_malloc(NULL, xform->rsa.qt.qInv.length, 0);
1052 if (qat_session->xform.rsa.qt.qInv.data == NULL) {
1056 qat_session->xform.rsa.qt.qInv.length =
1057 xform->rsa.qt.qInv.length;
1059 rte_memcpy(qat_session->xform.rsa.qt.p.data, p,
1060 xform->rsa.qt.p.length);
1061 rte_memcpy(qat_session->xform.rsa.qt.q.data, q,
1062 xform->rsa.qt.q.length);
1063 rte_memcpy(qat_session->xform.rsa.qt.dP.data, dP,
1064 xform->rsa.qt.dP.length);
1065 rte_memcpy(qat_session->xform.rsa.qt.dQ.data, dQ,
1066 xform->rsa.qt.dQ.length);
1067 rte_memcpy(qat_session->xform.rsa.qt.qInv.data, qInv,
1068 xform->rsa.qt.qInv.length);
1071 uint8_t *d = xform->rsa.d.data;
1073 qat_session->xform.rsa.d.data =
1074 rte_malloc(NULL, xform->rsa.d.length, 0);
1075 if (qat_session->xform.rsa.d.data == NULL) {
1079 qat_session->xform.rsa.d.length =
1080 xform->rsa.d.length;
1081 rte_memcpy(qat_session->xform.rsa.d.data, d,
1082 xform->rsa.d.length);
1085 rte_memcpy(qat_session->xform.rsa.n.data, n,
1086 xform->rsa.n.length);
1087 rte_memcpy(qat_session->xform.rsa.e.data, e,
1088 xform->rsa.e.length);
1093 rte_free(qat_session->xform.rsa.n.data);
1094 rte_free(qat_session->xform.rsa.e.data);
1095 rte_free(qat_session->xform.rsa.d.data);
1096 rte_free(qat_session->xform.rsa.qt.p.data);
1097 rte_free(qat_session->xform.rsa.qt.q.data);
1098 rte_free(qat_session->xform.rsa.qt.dP.data);
1099 rte_free(qat_session->xform.rsa.qt.dQ.data);
1100 rte_free(qat_session->xform.rsa.qt.qInv.data);
1105 session_set_ecdsa(struct qat_asym_session *qat_session,
1106 struct rte_crypto_asym_xform *xform)
1108 qat_session->xform.ec.curve_id = xform->ec.curve_id;
1112 qat_asym_session_configure(struct rte_cryptodev *dev __rte_unused,
1113 struct rte_crypto_asym_xform *xform,
1114 struct rte_cryptodev_asym_session *session)
1116 struct qat_asym_session *qat_session;
1119 qat_session = (struct qat_asym_session *) session->sess_private_data;
1120 memset(qat_session, 0, sizeof(*qat_session));
1122 qat_session->xform.xform_type = xform->xform_type;
1123 switch (xform->xform_type) {
1124 case RTE_CRYPTO_ASYM_XFORM_MODEX:
1125 ret = session_set_modexp(qat_session, xform);
1127 case RTE_CRYPTO_ASYM_XFORM_MODINV:
1128 ret = session_set_modinv(qat_session, xform);
1130 case RTE_CRYPTO_ASYM_XFORM_RSA:
1131 ret = session_set_rsa(qat_session, xform);
1133 case RTE_CRYPTO_ASYM_XFORM_ECDSA:
1134 case RTE_CRYPTO_ASYM_XFORM_ECPM:
1135 session_set_ecdsa(qat_session, xform);
1142 QAT_LOG(ERR, "Unsupported xform type");
1150 qat_asym_session_get_private_size(struct rte_cryptodev *dev __rte_unused)
1152 return RTE_ALIGN_CEIL(sizeof(struct qat_asym_session), 8);
1156 session_clear_modexp(struct rte_crypto_modex_xform *modex)
1158 memset(modex->modulus.data, 0, modex->modulus.length);
1159 rte_free(modex->modulus.data);
1160 memset(modex->exponent.data, 0, modex->exponent.length);
1161 rte_free(modex->exponent.data);
1165 session_clear_modinv(struct rte_crypto_modinv_xform *modinv)
1167 memset(modinv->modulus.data, 0, modinv->modulus.length);
1168 rte_free(modinv->modulus.data);
1172 session_clear_rsa(struct rte_crypto_rsa_xform *rsa)
1174 memset(rsa->n.data, 0, rsa->n.length);
1175 rte_free(rsa->n.data);
1176 memset(rsa->e.data, 0, rsa->e.length);
1177 rte_free(rsa->e.data);
1178 if (rsa->key_type == RTE_RSA_KEY_TYPE_EXP) {
1179 memset(rsa->d.data, 0, rsa->d.length);
1180 rte_free(rsa->d.data);
1182 memset(rsa->qt.p.data, 0, rsa->qt.p.length);
1183 rte_free(rsa->qt.p.data);
1184 memset(rsa->qt.q.data, 0, rsa->qt.q.length);
1185 rte_free(rsa->qt.q.data);
1186 memset(rsa->qt.dP.data, 0, rsa->qt.dP.length);
1187 rte_free(rsa->qt.dP.data);
1188 memset(rsa->qt.dQ.data, 0, rsa->qt.dQ.length);
1189 rte_free(rsa->qt.dQ.data);
1190 memset(rsa->qt.qInv.data, 0, rsa->qt.qInv.length);
1191 rte_free(rsa->qt.qInv.data);
1196 session_clear_xform(struct qat_asym_session *qat_session)
1198 switch (qat_session->xform.xform_type) {
1199 case RTE_CRYPTO_ASYM_XFORM_MODEX:
1200 session_clear_modexp(&qat_session->xform.modex);
1202 case RTE_CRYPTO_ASYM_XFORM_MODINV:
1203 session_clear_modinv(&qat_session->xform.modinv);
1205 case RTE_CRYPTO_ASYM_XFORM_RSA:
1206 session_clear_rsa(&qat_session->xform.rsa);
1214 qat_asym_session_clear(struct rte_cryptodev *dev,
1215 struct rte_cryptodev_asym_session *session)
1217 void *sess_priv = session->sess_private_data;
1218 struct qat_asym_session *qat_session =
1219 (struct qat_asym_session *)sess_priv;
1222 session_clear_xform(qat_session);
1223 memset(qat_session, 0, qat_asym_session_get_private_size(dev));
1228 qat_asym_crypto_enqueue_op_burst(void *qp, struct rte_crypto_op **ops,
1231 return qat_enqueue_op_burst(qp, qat_asym_build_request, (void **)ops,
1236 qat_asym_crypto_dequeue_op_burst(void *qp, struct rte_crypto_op **ops,
1239 return qat_dequeue_op_burst(qp, (void **)ops, qat_asym_process_response,
1244 qat_asym_init_op_cookie(void *op_cookie)
1247 struct qat_asym_op_cookie *cookie = op_cookie;
1249 cookie->input_addr = rte_mempool_virt2iova(cookie) +
1250 offsetof(struct qat_asym_op_cookie,
1253 cookie->output_addr = rte_mempool_virt2iova(cookie) +
1254 offsetof(struct qat_asym_op_cookie,
1255 output_params_ptrs);
1257 for (j = 0; j < 8; j++) {
1258 cookie->input_params_ptrs[j] =
1259 rte_mempool_virt2iova(cookie) +
1260 offsetof(struct qat_asym_op_cookie,
1262 cookie->output_params_ptrs[j] =
1263 rte_mempool_virt2iova(cookie) +
1264 offsetof(struct qat_asym_op_cookie,
1270 qat_asym_dev_create(struct qat_pci_device *qat_pci_dev,
1271 struct qat_dev_cmd_param *qat_dev_cmd_param)
1273 struct qat_cryptodev_private *internals;
1274 struct rte_cryptodev *cryptodev;
1275 struct qat_device_info *qat_dev_instance =
1276 &qat_pci_devs[qat_pci_dev->qat_dev_id];
1277 struct rte_cryptodev_pmd_init_params init_params = {
1279 .socket_id = qat_dev_instance->pci_dev->device.numa_node,
1280 .private_data_size = sizeof(struct qat_cryptodev_private)
1282 struct qat_capabilities_info capa_info;
1283 const struct rte_cryptodev_capabilities *capabilities;
1284 const struct qat_crypto_gen_dev_ops *gen_dev_ops =
1285 &qat_asym_gen_dev_ops[qat_pci_dev->qat_dev_gen];
1286 char name[RTE_CRYPTODEV_NAME_MAX_LEN];
1287 char capa_memz_name[RTE_CRYPTODEV_NAME_MAX_LEN];
1291 snprintf(name, RTE_CRYPTODEV_NAME_MAX_LEN, "%s_%s",
1292 qat_pci_dev->name, "asym");
1293 QAT_LOG(DEBUG, "Creating QAT ASYM device %s\n", name);
1295 if (gen_dev_ops->cryptodev_ops == NULL) {
1296 QAT_LOG(ERR, "Device %s does not support asymmetric crypto",
1301 if (rte_eal_process_type() == RTE_PROC_PRIMARY) {
1302 qat_pci_dev->qat_asym_driver_id =
1304 } else if (rte_eal_process_type() == RTE_PROC_SECONDARY) {
1305 if (qat_pci_dev->qat_asym_driver_id !=
1306 qat_asym_driver_id) {
1308 "Device %s have different driver id than corresponding device in primary process",
1314 /* Populate subset device to use in cryptodev device creation */
1315 qat_dev_instance->asym_rte_dev.driver = &cryptodev_qat_asym_driver;
1316 qat_dev_instance->asym_rte_dev.numa_node =
1317 qat_dev_instance->pci_dev->device.numa_node;
1318 qat_dev_instance->asym_rte_dev.devargs = NULL;
1320 cryptodev = rte_cryptodev_pmd_create(name,
1321 &(qat_dev_instance->asym_rte_dev), &init_params);
1323 if (cryptodev == NULL)
1326 qat_dev_instance->asym_rte_dev.name = cryptodev->data->name;
1327 cryptodev->driver_id = qat_asym_driver_id;
1328 cryptodev->dev_ops = gen_dev_ops->cryptodev_ops;
1330 cryptodev->enqueue_burst = qat_asym_crypto_enqueue_op_burst;
1331 cryptodev->dequeue_burst = qat_asym_crypto_dequeue_op_burst;
1333 cryptodev->feature_flags = gen_dev_ops->get_feature_flags(qat_pci_dev);
1335 if (rte_eal_process_type() != RTE_PROC_PRIMARY)
1338 snprintf(capa_memz_name, RTE_CRYPTODEV_NAME_MAX_LEN,
1339 "QAT_ASYM_CAPA_GEN_%d",
1340 qat_pci_dev->qat_dev_gen);
1342 internals = cryptodev->data->dev_private;
1343 internals->qat_dev = qat_pci_dev;
1344 internals->dev_id = cryptodev->data->dev_id;
1346 capa_info = gen_dev_ops->get_capabilities(qat_pci_dev);
1347 capabilities = capa_info.data;
1348 capa_size = capa_info.size;
1350 internals->capa_mz = rte_memzone_lookup(capa_memz_name);
1351 if (internals->capa_mz == NULL) {
1352 internals->capa_mz = rte_memzone_reserve(capa_memz_name,
1353 capa_size, rte_socket_id(), 0);
1354 if (internals->capa_mz == NULL) {
1356 "Error allocating memzone for capabilities, "
1357 "destroying PMD for %s",
1359 rte_cryptodev_pmd_destroy(cryptodev);
1360 memset(&qat_dev_instance->asym_rte_dev, 0,
1361 sizeof(qat_dev_instance->asym_rte_dev));
1366 memcpy(internals->capa_mz->addr, capabilities, capa_size);
1367 internals->qat_dev_capabilities = internals->capa_mz->addr;
1370 if (qat_dev_cmd_param[i].name == NULL)
1372 if (!strcmp(qat_dev_cmd_param[i].name, ASYM_ENQ_THRESHOLD_NAME))
1373 internals->min_enq_burst_threshold =
1374 qat_dev_cmd_param[i].val;
1378 qat_pci_dev->asym_dev = internals;
1379 internals->service_type = QAT_SERVICE_ASYMMETRIC;
1380 QAT_LOG(DEBUG, "Created QAT ASYM device %s as cryptodev instance %d",
1381 cryptodev->data->name, internals->dev_id);
1386 qat_asym_dev_destroy(struct qat_pci_device *qat_pci_dev)
1388 struct rte_cryptodev *cryptodev;
1390 if (qat_pci_dev == NULL)
1392 if (qat_pci_dev->asym_dev == NULL)
1394 if (rte_eal_process_type() == RTE_PROC_PRIMARY)
1395 rte_memzone_free(qat_pci_dev->asym_dev->capa_mz);
1397 /* free crypto device */
1398 cryptodev = rte_cryptodev_pmd_get_dev(
1399 qat_pci_dev->asym_dev->dev_id);
1400 rte_cryptodev_pmd_destroy(cryptodev);
1401 qat_pci_devs[qat_pci_dev->qat_dev_id].asym_rte_dev.name = NULL;
1402 qat_pci_dev->asym_dev = NULL;
1407 static struct cryptodev_driver qat_crypto_drv;
1408 RTE_PMD_REGISTER_CRYPTO_DRIVER(qat_crypto_drv,
1409 cryptodev_qat_asym_driver,
1410 qat_asym_driver_id);