1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2019 Intel Corporation
8 #include "icp_qat_fw_pke.h"
9 #include "icp_qat_fw.h"
10 #include "qat_pke_functionality_arrays.h"
12 #define qat_asym_sz_2param(arg) (arg, sizeof(arg)/sizeof(*arg))
14 static int qat_asym_get_sz_and_func_id(const uint32_t arr[][2],
15 size_t arr_sz, size_t *size, uint32_t *func_id)
19 for (i = 0; i < arr_sz; i++) {
20 if (*size <= arr[i][0]) {
29 static inline void qat_fill_req_tmpl(struct icp_qat_fw_pke_request *qat_req)
31 memset(qat_req, 0, sizeof(*qat_req));
32 qat_req->pke_hdr.service_type = ICP_QAT_FW_COMN_REQ_CPM_FW_PKE;
34 qat_req->pke_hdr.hdr_flags =
35 ICP_QAT_FW_COMN_HDR_FLAGS_BUILD
36 (ICP_QAT_FW_COMN_REQ_FLAG_SET);
39 static inline void qat_asym_build_req_tmpl(void *sess_private_data)
41 struct icp_qat_fw_pke_request *qat_req;
42 struct qat_asym_session *session = sess_private_data;
44 qat_req = &session->req_tmpl;
45 qat_fill_req_tmpl(qat_req);
48 static size_t max_of(int n, ...)
55 len = va_arg(args, size_t);
57 for (i = 0; i < n - 1; i++) {
58 num = va_arg(args, size_t);
67 static void qat_clear_arrays(struct qat_asym_op_cookie *cookie,
68 int in_count, int out_count, int alg_size)
72 for (i = 0; i < in_count; i++)
73 memset(cookie->input_array[i], 0x0, alg_size);
74 for (i = 0; i < out_count; i++)
75 memset(cookie->output_array[i], 0x0, alg_size);
78 static void qat_clear_arrays_crt(struct qat_asym_op_cookie *cookie,
83 memset(cookie->input_array[0], 0x0, alg_size);
84 for (i = 1; i < QAT_ASYM_RSA_QT_NUM_IN_PARAMS; i++)
85 memset(cookie->input_array[i], 0x0, alg_size / 2);
86 for (i = 0; i < QAT_ASYM_RSA_NUM_OUT_PARAMS; i++)
87 memset(cookie->output_array[i], 0x0, alg_size);
90 static void qat_clear_arrays_by_alg(struct qat_asym_op_cookie *cookie,
91 struct rte_crypto_asym_xform *xform, int alg_size)
93 if (xform->xform_type == RTE_CRYPTO_ASYM_XFORM_MODEX)
94 qat_clear_arrays(cookie, QAT_ASYM_MODEXP_NUM_IN_PARAMS,
95 QAT_ASYM_MODEXP_NUM_OUT_PARAMS, alg_size);
96 else if (xform->xform_type == RTE_CRYPTO_ASYM_XFORM_MODINV)
97 qat_clear_arrays(cookie, QAT_ASYM_MODINV_NUM_IN_PARAMS,
98 QAT_ASYM_MODINV_NUM_OUT_PARAMS, alg_size);
99 else if (xform->xform_type == RTE_CRYPTO_ASYM_XFORM_RSA) {
100 if (xform->rsa.key_type == RTE_RSA_KET_TYPE_QT)
101 qat_clear_arrays_crt(cookie, alg_size);
103 qat_clear_arrays(cookie, QAT_ASYM_RSA_NUM_IN_PARAMS,
104 QAT_ASYM_RSA_NUM_OUT_PARAMS, alg_size);
109 static int qat_asym_check_nonzero(rte_crypto_param n)
112 /* Not a case for any cryptographic function except for DH
113 * generator which very often can be of one byte length
117 if (n.data[n.length - 1] == 0x0) {
118 for (i = 0; i < n.length - 1; i++)
119 if (n.data[i] != 0x0)
121 if (i == n.length - 1)
124 } else if (*(uint64_t *)&n.data[
125 n.length - 8] == 0) {
126 /* Very likely it is zeroed modulus */
129 for (i = 0; i < n.length - 8; i++)
130 if (n.data[i] != 0x0)
132 if (i == n.length - 8)
140 qat_asym_fill_arrays(struct rte_crypto_asym_op *asym_op,
141 struct icp_qat_fw_pke_request *qat_req,
142 struct qat_asym_op_cookie *cookie,
143 struct rte_crypto_asym_xform *xform)
147 size_t alg_size_in_bytes;
148 uint32_t func_id = 0;
150 if (xform->xform_type == RTE_CRYPTO_ASYM_XFORM_MODEX) {
151 err = qat_asym_check_nonzero(xform->modex.modulus);
153 QAT_LOG(ERR, "Empty modulus in modular exponentiation,"
154 " aborting this operation");
158 alg_size_in_bytes = max_of(3, asym_op->modex.base.length,
159 xform->modex.exponent.length,
160 xform->modex.modulus.length);
161 alg_size = alg_size_in_bytes << 3;
163 if (qat_asym_get_sz_and_func_id(MOD_EXP_SIZE,
164 sizeof(MOD_EXP_SIZE)/sizeof(*MOD_EXP_SIZE),
165 &alg_size, &func_id)) {
169 alg_size_in_bytes = alg_size >> 3;
170 rte_memcpy(cookie->input_array[0] + alg_size_in_bytes -
171 asym_op->modex.base.length
172 , asym_op->modex.base.data,
173 asym_op->modex.base.length);
174 rte_memcpy(cookie->input_array[1] + alg_size_in_bytes -
175 xform->modex.exponent.length
176 , xform->modex.exponent.data,
177 xform->modex.exponent.length);
178 rte_memcpy(cookie->input_array[2] + alg_size_in_bytes -
179 xform->modex.modulus.length,
180 xform->modex.modulus.data,
181 xform->modex.modulus.length);
182 cookie->alg_size = alg_size;
183 qat_req->pke_hdr.cd_pars.func_id = func_id;
184 qat_req->input_param_count = QAT_ASYM_MODEXP_NUM_IN_PARAMS;
185 qat_req->output_param_count = QAT_ASYM_MODEXP_NUM_OUT_PARAMS;
186 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
187 QAT_DP_HEXDUMP_LOG(DEBUG, "ModExp base",
188 cookie->input_array[0],
190 QAT_DP_HEXDUMP_LOG(DEBUG, "ModExp exponent",
191 cookie->input_array[1],
193 QAT_DP_HEXDUMP_LOG(DEBUG, " ModExpmodulus",
194 cookie->input_array[2],
197 } else if (xform->xform_type == RTE_CRYPTO_ASYM_XFORM_MODINV) {
198 err = qat_asym_check_nonzero(xform->modinv.modulus);
200 QAT_LOG(ERR, "Empty modulus in modular multiplicative"
201 " inverse, aborting this operation");
205 alg_size_in_bytes = max_of(2, asym_op->modinv.base.length,
206 xform->modinv.modulus.length);
207 alg_size = alg_size_in_bytes << 3;
209 if (xform->modinv.modulus.data[
210 xform->modinv.modulus.length - 1] & 0x01) {
211 if (qat_asym_get_sz_and_func_id(MOD_INV_IDS_ODD,
212 sizeof(MOD_INV_IDS_ODD)/
213 sizeof(*MOD_INV_IDS_ODD),
214 &alg_size, &func_id)) {
218 if (qat_asym_get_sz_and_func_id(MOD_INV_IDS_EVEN,
219 sizeof(MOD_INV_IDS_EVEN)/
220 sizeof(*MOD_INV_IDS_EVEN),
221 &alg_size, &func_id)) {
226 alg_size_in_bytes = alg_size >> 3;
227 rte_memcpy(cookie->input_array[0] + alg_size_in_bytes -
228 asym_op->modinv.base.length
229 , asym_op->modinv.base.data,
230 asym_op->modinv.base.length);
231 rte_memcpy(cookie->input_array[1] + alg_size_in_bytes -
232 xform->modinv.modulus.length
233 , xform->modinv.modulus.data,
234 xform->modinv.modulus.length);
235 cookie->alg_size = alg_size;
236 qat_req->pke_hdr.cd_pars.func_id = func_id;
237 qat_req->input_param_count =
238 QAT_ASYM_MODINV_NUM_IN_PARAMS;
239 qat_req->output_param_count =
240 QAT_ASYM_MODINV_NUM_OUT_PARAMS;
241 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
242 QAT_DP_HEXDUMP_LOG(DEBUG, "ModInv base",
243 cookie->input_array[0],
245 QAT_DP_HEXDUMP_LOG(DEBUG, "ModInv modulus",
246 cookie->input_array[1],
249 } else if (xform->xform_type == RTE_CRYPTO_ASYM_XFORM_RSA) {
250 err = qat_asym_check_nonzero(xform->rsa.n);
252 QAT_LOG(ERR, "Empty modulus in RSA"
253 " inverse, aborting this operation");
257 alg_size_in_bytes = xform->rsa.n.length;
258 alg_size = alg_size_in_bytes << 3;
260 qat_req->input_param_count =
261 QAT_ASYM_RSA_NUM_IN_PARAMS;
262 qat_req->output_param_count =
263 QAT_ASYM_RSA_NUM_OUT_PARAMS;
265 if (asym_op->rsa.op_type == RTE_CRYPTO_ASYM_OP_ENCRYPT ||
266 asym_op->rsa.op_type ==
267 RTE_CRYPTO_ASYM_OP_VERIFY) {
269 if (qat_asym_get_sz_and_func_id(RSA_ENC_IDS,
271 sizeof(*RSA_ENC_IDS),
272 &alg_size, &func_id)) {
275 "Not supported RSA parameter size (key)");
278 alg_size_in_bytes = alg_size >> 3;
279 if (asym_op->rsa.op_type == RTE_CRYPTO_ASYM_OP_ENCRYPT) {
280 switch (asym_op->rsa.pad) {
281 case RTE_CRYPTO_RSA_PADDING_NONE:
282 rte_memcpy(cookie->input_array[0] +
284 asym_op->rsa.message.length
285 , asym_op->rsa.message.data,
286 asym_op->rsa.message.length);
291 "Invalid RSA padding (Encryption)");
294 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
295 QAT_DP_HEXDUMP_LOG(DEBUG, "RSA Message",
296 cookie->input_array[0],
300 switch (asym_op->rsa.pad) {
301 case RTE_CRYPTO_RSA_PADDING_NONE:
302 rte_memcpy(cookie->input_array[0],
303 asym_op->rsa.sign.data,
309 "Invalid RSA padding (Verify)");
313 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
314 QAT_DP_HEXDUMP_LOG(DEBUG, " RSA Signature",
315 cookie->input_array[0],
320 rte_memcpy(cookie->input_array[1] +
324 xform->rsa.e.length);
325 rte_memcpy(cookie->input_array[2] +
329 xform->rsa.n.length);
331 cookie->alg_size = alg_size;
332 qat_req->pke_hdr.cd_pars.func_id = func_id;
334 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
335 QAT_DP_HEXDUMP_LOG(DEBUG, "RSA Public Key",
336 cookie->input_array[1], alg_size_in_bytes);
337 QAT_DP_HEXDUMP_LOG(DEBUG, "RSA Modulus",
338 cookie->input_array[2], alg_size_in_bytes);
341 if (asym_op->rsa.op_type ==
342 RTE_CRYPTO_ASYM_OP_DECRYPT) {
343 switch (asym_op->rsa.pad) {
344 case RTE_CRYPTO_RSA_PADDING_NONE:
345 rte_memcpy(cookie->input_array[0]
346 + alg_size_in_bytes -
347 asym_op->rsa.cipher.length,
348 asym_op->rsa.cipher.data,
349 asym_op->rsa.cipher.length);
353 "Invalid padding of RSA (Decrypt)");
357 } else if (asym_op->rsa.op_type ==
358 RTE_CRYPTO_ASYM_OP_SIGN) {
359 switch (asym_op->rsa.pad) {
360 case RTE_CRYPTO_RSA_PADDING_NONE:
361 rte_memcpy(cookie->input_array[0]
362 + alg_size_in_bytes -
363 asym_op->rsa.message.length,
364 asym_op->rsa.message.data,
365 asym_op->rsa.message.length);
369 "Invalid padding of RSA (Signature)");
373 if (xform->rsa.key_type == RTE_RSA_KET_TYPE_QT) {
375 qat_req->input_param_count =
376 QAT_ASYM_RSA_QT_NUM_IN_PARAMS;
377 if (qat_asym_get_sz_and_func_id(RSA_DEC_CRT_IDS,
378 sizeof(RSA_DEC_CRT_IDS)/
379 sizeof(*RSA_DEC_CRT_IDS),
380 &alg_size, &func_id)) {
383 alg_size_in_bytes = alg_size >> 3;
385 rte_memcpy(cookie->input_array[1] +
386 (alg_size_in_bytes >> 1) -
387 xform->rsa.qt.p.length
388 , xform->rsa.qt.p.data,
389 xform->rsa.qt.p.length);
390 rte_memcpy(cookie->input_array[2] +
391 (alg_size_in_bytes >> 1) -
392 xform->rsa.qt.q.length
393 , xform->rsa.qt.q.data,
394 xform->rsa.qt.q.length);
395 rte_memcpy(cookie->input_array[3] +
396 (alg_size_in_bytes >> 1) -
397 xform->rsa.qt.dP.length
398 , xform->rsa.qt.dP.data,
399 xform->rsa.qt.dP.length);
400 rte_memcpy(cookie->input_array[4] +
401 (alg_size_in_bytes >> 1) -
402 xform->rsa.qt.dQ.length
403 , xform->rsa.qt.dQ.data,
404 xform->rsa.qt.dQ.length);
405 rte_memcpy(cookie->input_array[5] +
406 (alg_size_in_bytes >> 1) -
407 xform->rsa.qt.qInv.length
408 , xform->rsa.qt.qInv.data,
409 xform->rsa.qt.qInv.length);
410 cookie->alg_size = alg_size;
411 qat_req->pke_hdr.cd_pars.func_id = func_id;
413 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
414 QAT_DP_HEXDUMP_LOG(DEBUG, "C",
415 cookie->input_array[0],
417 QAT_DP_HEXDUMP_LOG(DEBUG, "p",
418 cookie->input_array[1],
420 QAT_DP_HEXDUMP_LOG(DEBUG, "q",
421 cookie->input_array[2],
423 QAT_DP_HEXDUMP_LOG(DEBUG,
424 "dP", cookie->input_array[3],
426 QAT_DP_HEXDUMP_LOG(DEBUG,
427 "dQ", cookie->input_array[4],
429 QAT_DP_HEXDUMP_LOG(DEBUG,
430 "qInv", cookie->input_array[5],
433 } else if (xform->rsa.key_type ==
434 RTE_RSA_KEY_TYPE_EXP) {
435 if (qat_asym_get_sz_and_func_id(
438 sizeof(*RSA_DEC_IDS),
439 &alg_size, &func_id)) {
442 alg_size_in_bytes = alg_size >> 3;
443 rte_memcpy(cookie->input_array[1] +
447 xform->rsa.d.length);
448 rte_memcpy(cookie->input_array[2] +
452 xform->rsa.n.length);
453 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
454 QAT_DP_HEXDUMP_LOG(DEBUG, "RSA ciphertext",
455 cookie->input_array[0],
457 QAT_DP_HEXDUMP_LOG(DEBUG, "RSA d", cookie->input_array[1],
459 QAT_DP_HEXDUMP_LOG(DEBUG, "RSA n", cookie->input_array[2],
463 cookie->alg_size = alg_size;
464 qat_req->pke_hdr.cd_pars.func_id = func_id;
466 QAT_LOG(ERR, "Invalid RSA key type");
471 QAT_LOG(ERR, "Invalid asymmetric crypto xform");
478 qat_asym_build_request(void *in_op,
481 __rte_unused enum qat_device_gen qat_dev_gen)
483 struct qat_asym_session *ctx;
484 struct rte_crypto_op *op = (struct rte_crypto_op *)in_op;
485 struct rte_crypto_asym_op *asym_op = op->asym;
486 struct icp_qat_fw_pke_request *qat_req =
487 (struct icp_qat_fw_pke_request *)out_msg;
488 struct qat_asym_op_cookie *cookie =
489 (struct qat_asym_op_cookie *)op_cookie;
492 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
493 if (op->sess_type == RTE_CRYPTO_OP_WITH_SESSION) {
494 ctx = (struct qat_asym_session *)
495 get_asym_session_private_data(
496 op->asym->session, qat_asym_driver_id);
497 if (unlikely(ctx == NULL)) {
498 QAT_LOG(ERR, "Session has not been created for this device");
501 rte_mov64((uint8_t *)qat_req, (const uint8_t *)&(ctx->req_tmpl));
502 err = qat_asym_fill_arrays(asym_op, qat_req, cookie, ctx->xform);
504 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
507 } else if (op->sess_type == RTE_CRYPTO_OP_SESSIONLESS) {
508 qat_fill_req_tmpl(qat_req);
509 err = qat_asym_fill_arrays(asym_op, qat_req, cookie,
512 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
516 QAT_DP_LOG(ERR, "Invalid session/xform settings");
517 op->status = RTE_CRYPTO_OP_STATUS_INVALID_SESSION;
521 qat_req->pke_mid.opaque = (uint64_t)(uintptr_t)op;
522 qat_req->pke_mid.src_data_addr = cookie->input_addr;
523 qat_req->pke_mid.dest_data_addr = cookie->output_addr;
525 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
526 QAT_DP_HEXDUMP_LOG(DEBUG, "qat_req:", qat_req,
527 sizeof(struct icp_qat_fw_pke_request));
533 qat_req->pke_mid.opaque = (uint64_t)(uintptr_t)op;
535 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
536 QAT_DP_HEXDUMP_LOG(DEBUG, "qat_req:", qat_req,
537 sizeof(struct icp_qat_fw_pke_request));
540 qat_req->output_param_count = 0;
541 qat_req->input_param_count = 0;
542 qat_req->pke_hdr.service_type = ICP_QAT_FW_COMN_REQ_NULL;
543 cookie->error |= err;
548 static void qat_asym_collect_response(struct rte_crypto_op *rx_op,
549 struct qat_asym_op_cookie *cookie,
550 struct rte_crypto_asym_xform *xform)
552 size_t alg_size, alg_size_in_bytes = 0;
553 struct rte_crypto_asym_op *asym_op = rx_op->asym;
555 if (xform->xform_type == RTE_CRYPTO_ASYM_XFORM_MODEX) {
556 rte_crypto_param n = xform->modex.modulus;
558 alg_size = cookie->alg_size;
559 alg_size_in_bytes = alg_size >> 3;
560 uint8_t *modexp_result = asym_op->modex.result.data;
562 if (rx_op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED) {
563 rte_memcpy(modexp_result +
564 (asym_op->modex.result.length -
566 cookie->output_array[0] + alg_size_in_bytes
569 rx_op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
570 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
571 QAT_DP_HEXDUMP_LOG(DEBUG, "ModExp result",
572 cookie->output_array[0],
577 } else if (xform->xform_type == RTE_CRYPTO_ASYM_XFORM_MODINV) {
578 rte_crypto_param n = xform->modinv.modulus;
580 alg_size = cookie->alg_size;
581 alg_size_in_bytes = alg_size >> 3;
582 uint8_t *modinv_result = asym_op->modinv.result.data;
584 if (rx_op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED) {
585 rte_memcpy(modinv_result + (asym_op->modinv.result.length
587 cookie->output_array[0] + alg_size_in_bytes
588 - n.length, n.length);
589 rx_op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
590 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
591 QAT_DP_HEXDUMP_LOG(DEBUG, "ModInv result",
592 cookie->output_array[0],
596 } else if (xform->xform_type == RTE_CRYPTO_ASYM_XFORM_RSA) {
598 alg_size = cookie->alg_size;
599 alg_size_in_bytes = alg_size >> 3;
600 if (asym_op->rsa.op_type == RTE_CRYPTO_ASYM_OP_ENCRYPT ||
601 asym_op->rsa.op_type ==
602 RTE_CRYPTO_ASYM_OP_VERIFY) {
603 if (asym_op->rsa.op_type ==
604 RTE_CRYPTO_ASYM_OP_ENCRYPT) {
605 uint8_t *rsa_result = asym_op->rsa.cipher.data;
607 rte_memcpy(rsa_result,
608 cookie->output_array[0],
610 rx_op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
611 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
612 QAT_DP_HEXDUMP_LOG(DEBUG, "RSA Encrypted data",
613 cookie->output_array[0],
616 } else if (asym_op->rsa.op_type ==
617 RTE_CRYPTO_ASYM_OP_VERIFY) {
618 uint8_t *rsa_result = asym_op->rsa.cipher.data;
620 switch (asym_op->rsa.pad) {
621 case RTE_CRYPTO_RSA_PADDING_NONE:
622 rte_memcpy(rsa_result,
623 cookie->output_array[0],
626 RTE_CRYPTO_OP_STATUS_SUCCESS;
627 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
628 QAT_DP_HEXDUMP_LOG(DEBUG, "RSA Signature",
629 cookie->output_array[0],
634 QAT_LOG(ERR, "Padding not supported");
636 RTE_CRYPTO_OP_STATUS_ERROR;
641 if (asym_op->rsa.op_type ==
642 RTE_CRYPTO_ASYM_OP_DECRYPT) {
643 uint8_t *rsa_result = asym_op->rsa.message.data;
645 switch (asym_op->rsa.pad) {
646 case RTE_CRYPTO_RSA_PADDING_NONE:
647 rte_memcpy(rsa_result,
648 cookie->output_array[0],
651 RTE_CRYPTO_OP_STATUS_SUCCESS;
654 QAT_LOG(ERR, "Padding not supported");
656 RTE_CRYPTO_OP_STATUS_ERROR;
659 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
660 QAT_DP_HEXDUMP_LOG(DEBUG, "RSA Decrypted Message",
661 rsa_result, alg_size_in_bytes);
663 } else if (asym_op->rsa.op_type == RTE_CRYPTO_ASYM_OP_SIGN) {
664 uint8_t *rsa_result = asym_op->rsa.sign.data;
666 rte_memcpy(rsa_result,
667 cookie->output_array[0],
669 rx_op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
670 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
671 QAT_DP_HEXDUMP_LOG(DEBUG, "RSA Signature",
672 cookie->output_array[0],
678 qat_clear_arrays_by_alg(cookie, xform, alg_size_in_bytes);
682 qat_asym_process_response(void **op, uint8_t *resp,
685 struct qat_asym_session *ctx;
686 struct icp_qat_fw_pke_resp *resp_msg =
687 (struct icp_qat_fw_pke_resp *)resp;
688 struct rte_crypto_op *rx_op = (struct rte_crypto_op *)(uintptr_t)
690 struct qat_asym_op_cookie *cookie = op_cookie;
694 if (rx_op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED)
695 rx_op->status = RTE_CRYPTO_OP_STATUS_ERROR;
696 QAT_DP_LOG(ERR, "Cookie status returned error");
698 if (ICP_QAT_FW_PKE_RESP_PKE_STAT_GET(
699 resp_msg->pke_resp_hdr.resp_status.pke_resp_flags)) {
700 if (rx_op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED)
701 rx_op->status = RTE_CRYPTO_OP_STATUS_ERROR;
702 QAT_DP_LOG(ERR, "Asymmetric response status"
705 if (resp_msg->pke_resp_hdr.resp_status.comn_err_code) {
706 if (rx_op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED)
707 rx_op->status = RTE_CRYPTO_OP_STATUS_ERROR;
708 QAT_DP_LOG(ERR, "Asymmetric common status"
713 if (rx_op->sess_type == RTE_CRYPTO_OP_WITH_SESSION) {
714 ctx = (struct qat_asym_session *)get_asym_session_private_data(
715 rx_op->asym->session, qat_asym_driver_id);
716 qat_asym_collect_response(rx_op, cookie, ctx->xform);
717 } else if (rx_op->sess_type == RTE_CRYPTO_OP_SESSIONLESS) {
718 qat_asym_collect_response(rx_op, cookie, rx_op->asym->xform);
722 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
723 QAT_DP_HEXDUMP_LOG(DEBUG, "resp_msg:", resp_msg,
724 sizeof(struct icp_qat_fw_pke_resp));
729 qat_asym_session_configure(struct rte_cryptodev *dev,
730 struct rte_crypto_asym_xform *xform,
731 struct rte_cryptodev_asym_session *sess,
732 struct rte_mempool *mempool)
735 void *sess_private_data;
736 struct qat_asym_session *session;
738 if (rte_mempool_get(mempool, &sess_private_data)) {
740 "Couldn't get object from session mempool");
744 session = sess_private_data;
745 if (xform->xform_type == RTE_CRYPTO_ASYM_XFORM_MODEX) {
746 if (xform->modex.exponent.length == 0 ||
747 xform->modex.modulus.length == 0) {
748 QAT_LOG(ERR, "Invalid mod exp input parameter");
752 } else if (xform->xform_type == RTE_CRYPTO_ASYM_XFORM_MODINV) {
753 if (xform->modinv.modulus.length == 0) {
754 QAT_LOG(ERR, "Invalid mod inv input parameter");
758 } else if (xform->xform_type == RTE_CRYPTO_ASYM_XFORM_RSA) {
759 if (xform->rsa.n.length == 0) {
760 QAT_LOG(ERR, "Invalid rsa input parameter");
764 } else if (xform->xform_type >= RTE_CRYPTO_ASYM_XFORM_TYPE_LIST_END
765 || xform->xform_type <= RTE_CRYPTO_ASYM_XFORM_NONE) {
766 QAT_LOG(ERR, "Invalid asymmetric crypto xform");
770 QAT_LOG(ERR, "Asymmetric crypto xform not implemented");
775 session->xform = xform;
776 qat_asym_build_req_tmpl(sess_private_data);
777 set_asym_session_private_data(sess, dev->driver_id,
782 rte_mempool_put(mempool, sess_private_data);
786 unsigned int qat_asym_session_get_private_size(
787 struct rte_cryptodev *dev __rte_unused)
789 return RTE_ALIGN_CEIL(sizeof(struct qat_asym_session), 8);
793 qat_asym_session_clear(struct rte_cryptodev *dev,
794 struct rte_cryptodev_asym_session *sess)
796 uint8_t index = dev->driver_id;
797 void *sess_priv = get_asym_session_private_data(sess, index);
798 struct qat_asym_session *s = (struct qat_asym_session *)sess_priv;
801 memset(s, 0, qat_asym_session_get_private_size(dev));
802 struct rte_mempool *sess_mp = rte_mempool_from_obj(sess_priv);
804 set_asym_session_private_data(sess, index, NULL);
805 rte_mempool_put(sess_mp, sess_priv);