1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2019 Intel Corporation
8 #include "icp_qat_fw_pke.h"
9 #include "icp_qat_fw.h"
10 #include "qat_pke_functionality_arrays.h"
12 #define qat_asym_sz_2param(arg) (arg, sizeof(arg)/sizeof(*arg))
14 static int qat_asym_get_sz_and_func_id(const uint32_t arr[][2],
15 size_t arr_sz, size_t *size, uint32_t *func_id)
19 for (i = 0; i < arr_sz; i++) {
20 if (*size <= arr[i][0]) {
29 static inline void qat_fill_req_tmpl(struct icp_qat_fw_pke_request *qat_req)
31 memset(qat_req, 0, sizeof(*qat_req));
32 qat_req->pke_hdr.service_type = ICP_QAT_FW_COMN_REQ_CPM_FW_PKE;
34 qat_req->pke_hdr.hdr_flags =
35 ICP_QAT_FW_COMN_HDR_FLAGS_BUILD
36 (ICP_QAT_FW_COMN_REQ_FLAG_SET);
39 static inline void qat_asym_build_req_tmpl(void *sess_private_data)
41 struct icp_qat_fw_pke_request *qat_req;
42 struct qat_asym_session *session = sess_private_data;
44 qat_req = &session->req_tmpl;
45 qat_fill_req_tmpl(qat_req);
48 static size_t max_of(int n, ...)
55 len = va_arg(args, size_t);
57 for (i = 0; i < n - 1; i++) {
58 num = va_arg(args, size_t);
67 static void qat_clear_arrays(struct qat_asym_op_cookie *cookie,
68 int in_count, int out_count, int in_size, int out_size)
72 for (i = 0; i < in_count; i++)
73 memset(cookie->input_array[i], 0x0, in_size);
74 for (i = 0; i < out_count; i++)
75 memset(cookie->output_array[i], 0x0, out_size);
78 static void qat_clear_arrays_by_alg(struct qat_asym_op_cookie *cookie,
79 enum rte_crypto_asym_xform_type alg, int in_size, int out_size)
81 if (alg == RTE_CRYPTO_ASYM_XFORM_MODEX)
82 qat_clear_arrays(cookie, QAT_ASYM_MODEXP_NUM_IN_PARAMS,
83 QAT_ASYM_MODEXP_NUM_OUT_PARAMS, in_size,
85 else if (alg == RTE_CRYPTO_ASYM_XFORM_MODINV)
86 qat_clear_arrays(cookie, QAT_ASYM_MODINV_NUM_IN_PARAMS,
87 QAT_ASYM_MODINV_NUM_OUT_PARAMS, in_size,
91 static int qat_asym_check_nonzero(rte_crypto_param n)
94 /* Not a case for any cryptograpic function except for DH
95 * generator which very often can be of one byte length
99 if (n.data[n.length - 1] == 0x0) {
100 for (i = 0; i < n.length - 1; i++)
101 if (n.data[i] != 0x0)
103 if (i == n.length - 1)
106 } else if (*(uint64_t *)&n.data[
107 n.length - 8] == 0) {
108 /* Very likely it is zeroed modulus */
111 for (i = 0; i < n.length - 8; i++)
112 if (n.data[i] != 0x0)
114 if (i == n.length - 8)
122 qat_asym_fill_arrays(struct rte_crypto_asym_op *asym_op,
123 struct icp_qat_fw_pke_request *qat_req,
124 struct qat_asym_op_cookie *cookie,
125 struct rte_crypto_asym_xform *xform)
129 size_t alg_size_in_bytes;
130 uint32_t func_id = 0;
132 if (xform->xform_type == RTE_CRYPTO_ASYM_XFORM_MODEX) {
133 err = qat_asym_check_nonzero(xform->modex.modulus);
135 QAT_LOG(ERR, "Empty modulus in modular exponentiation,"
136 " aborting this operation");
140 alg_size_in_bytes = max_of(3, asym_op->modex.base.length,
141 xform->modex.exponent.length,
142 xform->modex.modulus.length);
143 alg_size = alg_size_in_bytes << 3;
145 if (qat_asym_get_sz_and_func_id(MOD_EXP_SIZE,
146 sizeof(MOD_EXP_SIZE)/sizeof(*MOD_EXP_SIZE),
147 &alg_size, &func_id)) {
151 alg_size_in_bytes = alg_size >> 3;
152 rte_memcpy(cookie->input_array[0] + alg_size_in_bytes -
153 asym_op->modex.base.length
154 , asym_op->modex.base.data,
155 asym_op->modex.base.length);
156 rte_memcpy(cookie->input_array[1] + alg_size_in_bytes -
157 xform->modex.exponent.length
158 , xform->modex.exponent.data,
159 xform->modex.exponent.length);
160 rte_memcpy(cookie->input_array[2] + alg_size_in_bytes -
161 xform->modex.modulus.length,
162 xform->modex.modulus.data,
163 xform->modex.modulus.length);
164 cookie->alg_size = alg_size;
165 qat_req->pke_hdr.cd_pars.func_id = func_id;
166 qat_req->input_param_count = QAT_ASYM_MODEXP_NUM_IN_PARAMS;
167 qat_req->output_param_count = QAT_ASYM_MODEXP_NUM_OUT_PARAMS;
168 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
169 QAT_DP_HEXDUMP_LOG(DEBUG, "ModExp base",
170 cookie->input_array[0],
172 QAT_DP_HEXDUMP_LOG(DEBUG, "ModExp exponent",
173 cookie->input_array[1],
175 QAT_DP_HEXDUMP_LOG(DEBUG, " ModExpmodulus",
176 cookie->input_array[2],
179 } else if (xform->xform_type == RTE_CRYPTO_ASYM_XFORM_MODINV) {
180 err = qat_asym_check_nonzero(xform->modinv.modulus);
182 QAT_LOG(ERR, "Empty modulus in modular multiplicative"
183 " inverse, aborting this operation");
187 alg_size_in_bytes = max_of(2, asym_op->modinv.base.length,
188 xform->modinv.modulus.length);
189 alg_size = alg_size_in_bytes << 3;
191 if (xform->modinv.modulus.data[
192 xform->modinv.modulus.length - 1] & 0x01) {
193 if (qat_asym_get_sz_and_func_id(MOD_INV_IDS_ODD,
194 sizeof(MOD_INV_IDS_ODD)/
195 sizeof(*MOD_INV_IDS_ODD),
196 &alg_size, &func_id)) {
200 if (qat_asym_get_sz_and_func_id(MOD_INV_IDS_EVEN,
201 sizeof(MOD_INV_IDS_EVEN)/
202 sizeof(*MOD_INV_IDS_EVEN),
203 &alg_size, &func_id)) {
208 alg_size_in_bytes = alg_size >> 3;
209 rte_memcpy(cookie->input_array[0] + alg_size_in_bytes -
210 asym_op->modinv.base.length
211 , asym_op->modinv.base.data,
212 asym_op->modinv.base.length);
213 rte_memcpy(cookie->input_array[1] + alg_size_in_bytes -
214 xform->modinv.modulus.length
215 , xform->modinv.modulus.data,
216 xform->modinv.modulus.length);
217 cookie->alg_size = alg_size;
218 qat_req->pke_hdr.cd_pars.func_id = func_id;
219 qat_req->input_param_count =
220 QAT_ASYM_MODINV_NUM_IN_PARAMS;
221 qat_req->output_param_count =
222 QAT_ASYM_MODINV_NUM_OUT_PARAMS;
223 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
224 QAT_DP_HEXDUMP_LOG(DEBUG, "ModInv base",
225 cookie->input_array[0],
227 QAT_DP_HEXDUMP_LOG(DEBUG, "ModInv modulus",
228 cookie->input_array[1],
231 } else if (xform->xform_type == RTE_CRYPTO_ASYM_XFORM_RSA) {
232 err = qat_asym_check_nonzero(xform->rsa.n);
234 QAT_LOG(ERR, "Empty modulus in RSA"
235 " inverse, aborting this operation");
239 alg_size_in_bytes = xform->rsa.n.length;
240 alg_size = alg_size_in_bytes << 3;
242 qat_req->input_param_count =
243 QAT_ASYM_RSA_NUM_IN_PARAMS;
244 qat_req->output_param_count =
245 QAT_ASYM_RSA_NUM_OUT_PARAMS;
247 if (asym_op->rsa.op_type == RTE_CRYPTO_ASYM_OP_ENCRYPT ||
248 asym_op->rsa.op_type ==
249 RTE_CRYPTO_ASYM_OP_VERIFY) {
251 if (qat_asym_get_sz_and_func_id(RSA_ENC_IDS,
253 sizeof(*RSA_ENC_IDS),
254 &alg_size, &func_id)) {
257 "Not supported RSA parameter size (key)");
260 alg_size_in_bytes = alg_size >> 3;
261 if (asym_op->rsa.op_type == RTE_CRYPTO_ASYM_OP_ENCRYPT) {
262 switch (asym_op->rsa.pad) {
263 case RTE_CRYPTO_RSA_PADDING_NONE:
264 rte_memcpy(cookie->input_array[0] +
266 asym_op->rsa.message.length
267 , asym_op->rsa.message.data,
268 asym_op->rsa.message.length);
273 "Invalid RSA padding (Encryption)");
276 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
277 QAT_DP_HEXDUMP_LOG(DEBUG, "RSA Message",
278 cookie->input_array[0],
282 switch (asym_op->rsa.pad) {
283 case RTE_CRYPTO_RSA_PADDING_NONE:
284 rte_memcpy(cookie->input_array[0],
285 asym_op->rsa.sign.data,
291 "Invalid RSA padding (Verify)");
295 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
296 QAT_DP_HEXDUMP_LOG(DEBUG, " RSA Signature",
297 cookie->input_array[0],
302 rte_memcpy(cookie->input_array[1] +
306 xform->rsa.e.length);
307 rte_memcpy(cookie->input_array[2] +
311 xform->rsa.n.length);
313 cookie->alg_size = alg_size;
314 qat_req->pke_hdr.cd_pars.func_id = func_id;
316 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
317 QAT_DP_HEXDUMP_LOG(DEBUG, "RSA Public Key",
318 cookie->input_array[1], alg_size_in_bytes);
319 QAT_DP_HEXDUMP_LOG(DEBUG, "RSA Modulus",
320 cookie->input_array[2], alg_size_in_bytes);
323 if (asym_op->rsa.op_type ==
324 RTE_CRYPTO_ASYM_OP_DECRYPT) {
325 switch (asym_op->rsa.pad) {
326 case RTE_CRYPTO_RSA_PADDING_NONE:
327 rte_memcpy(cookie->input_array[0]
328 + alg_size_in_bytes -
329 asym_op->rsa.cipher.length,
330 asym_op->rsa.cipher.data,
331 asym_op->rsa.cipher.length);
335 "Invalid padding of RSA (Decrypt)");
339 } else if (asym_op->rsa.op_type ==
340 RTE_CRYPTO_ASYM_OP_SIGN) {
341 switch (asym_op->rsa.pad) {
342 case RTE_CRYPTO_RSA_PADDING_NONE:
343 rte_memcpy(cookie->input_array[0]
344 + alg_size_in_bytes -
345 asym_op->rsa.message.length,
346 asym_op->rsa.message.data,
347 asym_op->rsa.message.length);
351 "Invalid padding of RSA (Signature)");
355 if (xform->rsa.key_type == RTE_RSA_KET_TYPE_QT) {
357 qat_req->input_param_count =
358 QAT_ASYM_RSA_QT_NUM_IN_PARAMS;
359 if (qat_asym_get_sz_and_func_id(RSA_DEC_CRT_IDS,
360 sizeof(RSA_DEC_CRT_IDS)/
361 sizeof(*RSA_DEC_CRT_IDS),
362 &alg_size, &func_id)) {
365 alg_size_in_bytes = alg_size >> 3;
367 rte_memcpy(cookie->input_array[1] +
368 (alg_size_in_bytes >> 1) -
369 xform->rsa.qt.p.length
370 , xform->rsa.qt.p.data,
371 xform->rsa.qt.p.length);
372 rte_memcpy(cookie->input_array[2] +
373 (alg_size_in_bytes >> 1) -
374 xform->rsa.qt.q.length
375 , xform->rsa.qt.q.data,
376 xform->rsa.qt.q.length);
377 rte_memcpy(cookie->input_array[3] +
378 (alg_size_in_bytes >> 1) -
379 xform->rsa.qt.dP.length
380 , xform->rsa.qt.dP.data,
381 xform->rsa.qt.dP.length);
382 rte_memcpy(cookie->input_array[4] +
383 (alg_size_in_bytes >> 1) -
384 xform->rsa.qt.dQ.length
385 , xform->rsa.qt.dQ.data,
386 xform->rsa.qt.dQ.length);
387 rte_memcpy(cookie->input_array[5] +
388 (alg_size_in_bytes >> 1) -
389 xform->rsa.qt.qInv.length
390 , xform->rsa.qt.qInv.data,
391 xform->rsa.qt.qInv.length);
392 cookie->alg_size = alg_size;
393 qat_req->pke_hdr.cd_pars.func_id = func_id;
395 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
396 QAT_DP_HEXDUMP_LOG(DEBUG, "C",
397 cookie->input_array[0],
399 QAT_DP_HEXDUMP_LOG(DEBUG, "p",
400 cookie->input_array[1],
402 QAT_DP_HEXDUMP_LOG(DEBUG, "q",
403 cookie->input_array[2],
405 QAT_DP_HEXDUMP_LOG(DEBUG,
406 "dP", cookie->input_array[3],
408 QAT_DP_HEXDUMP_LOG(DEBUG,
409 "dQ", cookie->input_array[4],
411 QAT_DP_HEXDUMP_LOG(DEBUG,
412 "qInv", cookie->input_array[5],
415 } else if (xform->rsa.key_type ==
416 RTE_RSA_KEY_TYPE_EXP) {
417 if (qat_asym_get_sz_and_func_id(
420 sizeof(*RSA_DEC_IDS),
421 &alg_size, &func_id)) {
424 alg_size_in_bytes = alg_size >> 3;
425 rte_memcpy(cookie->input_array[1] +
429 xform->rsa.d.length);
430 rte_memcpy(cookie->input_array[2] +
434 xform->rsa.n.length);
435 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
436 QAT_DP_HEXDUMP_LOG(DEBUG, "RSA ciphertext",
437 cookie->input_array[0],
439 QAT_DP_HEXDUMP_LOG(DEBUG, "RSA d", cookie->input_array[1],
441 QAT_DP_HEXDUMP_LOG(DEBUG, "RSA n", cookie->input_array[2],
445 cookie->alg_size = alg_size;
446 qat_req->pke_hdr.cd_pars.func_id = func_id;
448 QAT_LOG(ERR, "Invalid RSA key type");
453 QAT_LOG(ERR, "Invalid asymmetric crypto xform");
460 qat_asym_build_request(void *in_op,
463 __rte_unused enum qat_device_gen qat_dev_gen)
465 struct qat_asym_session *ctx;
466 struct rte_crypto_op *op = (struct rte_crypto_op *)in_op;
467 struct rte_crypto_asym_op *asym_op = op->asym;
468 struct icp_qat_fw_pke_request *qat_req =
469 (struct icp_qat_fw_pke_request *)out_msg;
470 struct qat_asym_op_cookie *cookie =
471 (struct qat_asym_op_cookie *)op_cookie;
474 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
475 if (op->sess_type == RTE_CRYPTO_OP_WITH_SESSION) {
476 ctx = (struct qat_asym_session *)
477 get_asym_session_private_data(
478 op->asym->session, qat_asym_driver_id);
479 if (unlikely(ctx == NULL)) {
480 QAT_LOG(ERR, "Session has not been created for this device");
483 rte_mov64((uint8_t *)qat_req, (const uint8_t *)&(ctx->req_tmpl));
484 err = qat_asym_fill_arrays(asym_op, qat_req, cookie, ctx->xform);
486 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
489 } else if (op->sess_type == RTE_CRYPTO_OP_SESSIONLESS) {
490 qat_fill_req_tmpl(qat_req);
491 err = qat_asym_fill_arrays(asym_op, qat_req, cookie,
494 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
498 QAT_DP_LOG(ERR, "Invalid session/xform settings");
499 op->status = RTE_CRYPTO_OP_STATUS_INVALID_SESSION;
503 qat_req->pke_mid.opaque = (uint64_t)(uintptr_t)op;
504 qat_req->pke_mid.src_data_addr = cookie->input_addr;
505 qat_req->pke_mid.dest_data_addr = cookie->output_addr;
507 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
508 QAT_DP_HEXDUMP_LOG(DEBUG, "qat_req:", qat_req,
509 sizeof(struct icp_qat_fw_pke_request));
515 qat_req->pke_mid.opaque = (uint64_t)(uintptr_t)op;
517 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
518 QAT_DP_HEXDUMP_LOG(DEBUG, "qat_req:", qat_req,
519 sizeof(struct icp_qat_fw_pke_request));
522 qat_req->output_param_count = 0;
523 qat_req->input_param_count = 0;
524 qat_req->pke_hdr.service_type = ICP_QAT_FW_COMN_REQ_NULL;
525 cookie->error |= err;
530 static void qat_asym_collect_response(struct rte_crypto_op *rx_op,
531 struct qat_asym_op_cookie *cookie,
532 struct rte_crypto_asym_xform *xform)
534 size_t alg_size, alg_size_in_bytes = 0;
535 struct rte_crypto_asym_op *asym_op = rx_op->asym;
537 if (xform->xform_type == RTE_CRYPTO_ASYM_XFORM_MODEX) {
538 rte_crypto_param n = xform->modex.modulus;
540 alg_size = cookie->alg_size;
541 alg_size_in_bytes = alg_size >> 3;
542 uint8_t *modexp_result = asym_op->modex.result.data;
544 if (rx_op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED) {
545 rte_memcpy(modexp_result +
546 (asym_op->modex.result.length -
548 cookie->output_array[0] + alg_size_in_bytes
551 rx_op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
552 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
553 QAT_DP_HEXDUMP_LOG(DEBUG, "ModExp result",
554 cookie->output_array[0],
559 } else if (xform->xform_type == RTE_CRYPTO_ASYM_XFORM_MODINV) {
560 rte_crypto_param n = xform->modinv.modulus;
562 alg_size = cookie->alg_size;
563 alg_size_in_bytes = alg_size >> 3;
564 uint8_t *modinv_result = asym_op->modinv.result.data;
566 if (rx_op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED) {
567 rte_memcpy(modinv_result + (asym_op->modinv.result.length
569 cookie->output_array[0] + alg_size_in_bytes
570 - n.length, n.length);
571 rx_op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
572 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
573 QAT_DP_HEXDUMP_LOG(DEBUG, "ModInv result",
574 cookie->output_array[0],
578 } else if (xform->xform_type == RTE_CRYPTO_ASYM_XFORM_RSA) {
580 alg_size = cookie->alg_size;
581 alg_size_in_bytes = alg_size >> 3;
582 if (asym_op->rsa.op_type == RTE_CRYPTO_ASYM_OP_ENCRYPT ||
583 asym_op->rsa.op_type ==
584 RTE_CRYPTO_ASYM_OP_VERIFY) {
585 if (asym_op->rsa.op_type ==
586 RTE_CRYPTO_ASYM_OP_ENCRYPT) {
587 uint8_t *rsa_result = asym_op->rsa.cipher.data;
589 rte_memcpy(rsa_result,
590 cookie->output_array[0],
592 rx_op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
593 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
594 QAT_DP_HEXDUMP_LOG(DEBUG, "RSA Encrypted data",
595 cookie->output_array[0],
598 } else if (asym_op->rsa.op_type ==
599 RTE_CRYPTO_ASYM_OP_VERIFY) {
600 uint8_t *rsa_result = asym_op->rsa.cipher.data;
602 switch (asym_op->rsa.pad) {
603 case RTE_CRYPTO_RSA_PADDING_NONE:
604 rte_memcpy(rsa_result,
605 cookie->output_array[0],
608 RTE_CRYPTO_OP_STATUS_SUCCESS;
609 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
610 QAT_DP_HEXDUMP_LOG(DEBUG, "RSA Signature",
611 cookie->output_array[0],
616 QAT_LOG(ERR, "Padding not supported");
618 RTE_CRYPTO_OP_STATUS_ERROR;
623 if (asym_op->rsa.op_type ==
624 RTE_CRYPTO_ASYM_OP_DECRYPT) {
625 uint8_t *rsa_result = asym_op->rsa.message.data;
627 switch (asym_op->rsa.pad) {
628 case RTE_CRYPTO_RSA_PADDING_NONE:
629 rte_memcpy(rsa_result,
630 cookie->output_array[0],
634 QAT_LOG(ERR, "Padding not supported");
636 RTE_CRYPTO_OP_STATUS_ERROR;
639 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
640 QAT_DP_HEXDUMP_LOG(DEBUG, "RSA Decrypted Message",
641 rsa_result, alg_size_in_bytes);
643 } else if (asym_op->rsa.op_type == RTE_CRYPTO_ASYM_OP_SIGN) {
644 uint8_t *rsa_result = asym_op->rsa.sign.data;
646 rte_memcpy(rsa_result,
647 cookie->output_array[0],
649 rx_op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
650 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
651 QAT_DP_HEXDUMP_LOG(DEBUG, "RSA Signature",
652 cookie->output_array[0],
658 qat_clear_arrays_by_alg(cookie, xform->xform_type, alg_size_in_bytes,
663 qat_asym_process_response(void **op, uint8_t *resp,
666 struct qat_asym_session *ctx;
667 struct icp_qat_fw_pke_resp *resp_msg =
668 (struct icp_qat_fw_pke_resp *)resp;
669 struct rte_crypto_op *rx_op = (struct rte_crypto_op *)(uintptr_t)
671 struct qat_asym_op_cookie *cookie = op_cookie;
675 if (rx_op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED)
676 rx_op->status = RTE_CRYPTO_OP_STATUS_ERROR;
677 QAT_DP_LOG(ERR, "Cookie status returned error");
679 if (ICP_QAT_FW_PKE_RESP_PKE_STAT_GET(
680 resp_msg->pke_resp_hdr.resp_status.pke_resp_flags)) {
681 if (rx_op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED)
682 rx_op->status = RTE_CRYPTO_OP_STATUS_ERROR;
683 QAT_DP_LOG(ERR, "Asymmetric response status"
686 if (resp_msg->pke_resp_hdr.resp_status.comn_err_code) {
687 if (rx_op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED)
688 rx_op->status = RTE_CRYPTO_OP_STATUS_ERROR;
689 QAT_DP_LOG(ERR, "Asymmetric common status"
694 if (rx_op->sess_type == RTE_CRYPTO_OP_WITH_SESSION) {
695 ctx = (struct qat_asym_session *)get_asym_session_private_data(
696 rx_op->asym->session, qat_asym_driver_id);
697 qat_asym_collect_response(rx_op, cookie, ctx->xform);
698 } else if (rx_op->sess_type == RTE_CRYPTO_OP_SESSIONLESS) {
699 qat_asym_collect_response(rx_op, cookie, rx_op->asym->xform);
703 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
704 QAT_DP_HEXDUMP_LOG(DEBUG, "resp_msg:", resp_msg,
705 sizeof(struct icp_qat_fw_pke_resp));
710 qat_asym_session_configure(struct rte_cryptodev *dev,
711 struct rte_crypto_asym_xform *xform,
712 struct rte_cryptodev_asym_session *sess,
713 struct rte_mempool *mempool)
716 void *sess_private_data;
717 struct qat_asym_session *session;
719 if (rte_mempool_get(mempool, &sess_private_data)) {
721 "Couldn't get object from session mempool");
725 session = sess_private_data;
726 if (xform->xform_type == RTE_CRYPTO_ASYM_XFORM_MODEX) {
727 if (xform->modex.exponent.length == 0 ||
728 xform->modex.modulus.length == 0) {
729 QAT_LOG(ERR, "Invalid mod exp input parameter");
733 } else if (xform->xform_type == RTE_CRYPTO_ASYM_XFORM_MODINV) {
734 if (xform->modinv.modulus.length == 0) {
735 QAT_LOG(ERR, "Invalid mod inv input parameter");
739 } else if (xform->xform_type == RTE_CRYPTO_ASYM_XFORM_RSA) {
740 if (xform->rsa.n.length == 0) {
741 QAT_LOG(ERR, "Invalid rsa input parameter");
745 } else if (xform->xform_type >= RTE_CRYPTO_ASYM_XFORM_TYPE_LIST_END
746 || xform->xform_type <= RTE_CRYPTO_ASYM_XFORM_NONE) {
747 QAT_LOG(ERR, "Invalid asymmetric crypto xform");
751 QAT_LOG(ERR, "Asymmetric crypto xform not implemented");
756 session->xform = xform;
757 qat_asym_build_req_tmpl(sess_private_data);
758 set_asym_session_private_data(sess, dev->driver_id,
763 rte_mempool_put(mempool, sess_private_data);
767 unsigned int qat_asym_session_get_private_size(
768 struct rte_cryptodev *dev __rte_unused)
770 return RTE_ALIGN_CEIL(sizeof(struct qat_asym_session), 8);
774 qat_asym_session_clear(struct rte_cryptodev *dev,
775 struct rte_cryptodev_asym_session *sess)
777 uint8_t index = dev->driver_id;
778 void *sess_priv = get_asym_session_private_data(sess, index);
779 struct qat_asym_session *s = (struct qat_asym_session *)sess_priv;
782 memset(s, 0, qat_asym_session_get_private_size(dev));
783 struct rte_mempool *sess_mp = rte_mempool_from_obj(sess_priv);
785 set_asym_session_private_data(sess, index, NULL);
786 rte_mempool_put(sess_mp, sess_priv);