1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(C) 2021 Marvell.
8 #include <rte_common.h>
9 #include <rte_crypto_asym.h>
10 #include <rte_malloc.h>
13 #include "cnxk_cryptodev_ops.h"
16 enum rte_crypto_asym_xform_type xfrm_type;
18 struct rte_crypto_rsa_xform rsa_ctx;
19 struct rte_crypto_modex_xform mod_ctx;
20 struct roc_ae_ec_ctx ec_ctx;
22 uint64_t *cnxk_fpm_iova;
23 struct roc_ae_ec_group **ec_grp;
27 static __rte_always_inline void
28 cnxk_ae_modex_param_normalize(uint8_t **data, size_t *len)
32 /* Strip leading NUL bytes */
33 for (i = 0; i < *len; i++) {
41 static __rte_always_inline int
42 cnxk_ae_fill_modex_params(struct cnxk_ae_sess *sess,
43 struct rte_crypto_asym_xform *xform)
45 struct rte_crypto_modex_xform *ctx = &sess->mod_ctx;
46 size_t exp_len = xform->modex.exponent.length;
47 size_t mod_len = xform->modex.modulus.length;
48 uint8_t *exp = xform->modex.exponent.data;
49 uint8_t *mod = xform->modex.modulus.data;
51 cnxk_ae_modex_param_normalize(&mod, &mod_len);
52 cnxk_ae_modex_param_normalize(&exp, &exp_len);
54 if (unlikely(exp_len == 0 || mod_len == 0))
57 if (unlikely(exp_len > mod_len))
60 /* Allocate buffer to hold modexp params */
61 ctx->modulus.data = rte_malloc(NULL, mod_len + exp_len, 0);
62 if (ctx->modulus.data == NULL)
65 /* Set up modexp prime modulus and private exponent */
66 memcpy(ctx->modulus.data, mod, mod_len);
67 ctx->exponent.data = ctx->modulus.data + mod_len;
68 memcpy(ctx->exponent.data, exp, exp_len);
70 ctx->modulus.length = mod_len;
71 ctx->exponent.length = exp_len;
76 static __rte_always_inline int
77 cnxk_ae_fill_rsa_params(struct cnxk_ae_sess *sess,
78 struct rte_crypto_asym_xform *xform)
80 struct rte_crypto_rsa_priv_key_qt qt = xform->rsa.qt;
81 struct rte_crypto_rsa_xform *xfrm_rsa = &xform->rsa;
82 struct rte_crypto_rsa_xform *rsa = &sess->rsa_ctx;
83 size_t mod_len = xfrm_rsa->n.length;
84 size_t exp_len = xfrm_rsa->e.length;
85 size_t len = (mod_len / 2);
88 if (qt.p.length != 0 && qt.p.data == NULL)
91 /* Make sure key length used is not more than mod_len/2 */
92 if (qt.p.data != NULL)
93 len = RTE_MIN(len, qt.p.length);
95 /* Total size required for RSA key params(n,e,(q,dQ,p,dP,qInv)) */
96 total_size = mod_len + exp_len + 5 * len;
98 /* Allocate buffer to hold all RSA keys */
99 rsa->n.data = rte_malloc(NULL, total_size, 0);
100 if (rsa->n.data == NULL)
103 /* Set up RSA prime modulus and public key exponent */
104 memcpy(rsa->n.data, xfrm_rsa->n.data, mod_len);
105 rsa->e.data = rsa->n.data + mod_len;
106 memcpy(rsa->e.data, xfrm_rsa->e.data, exp_len);
108 /* Private key in quintuple format */
110 rsa->qt.q.data = rsa->e.data + exp_len;
111 memcpy(rsa->qt.q.data, qt.q.data, qt.q.length);
112 rsa->qt.dQ.data = rsa->qt.q.data + qt.q.length;
113 memcpy(rsa->qt.dQ.data, qt.dQ.data, qt.dQ.length);
114 rsa->qt.p.data = rsa->qt.dQ.data + qt.dQ.length;
115 if (qt.p.data != NULL)
116 memcpy(rsa->qt.p.data, qt.p.data, qt.p.length);
117 rsa->qt.dP.data = rsa->qt.p.data + qt.p.length;
118 memcpy(rsa->qt.dP.data, qt.dP.data, qt.dP.length);
119 rsa->qt.qInv.data = rsa->qt.dP.data + qt.dP.length;
120 memcpy(rsa->qt.qInv.data, qt.qInv.data, qt.qInv.length);
122 rsa->qt.q.length = qt.q.length;
123 rsa->qt.dQ.length = qt.dQ.length;
124 rsa->qt.p.length = qt.p.length;
125 rsa->qt.dP.length = qt.dP.length;
126 rsa->qt.qInv.length = qt.qInv.length;
128 rsa->n.length = mod_len;
129 rsa->e.length = exp_len;
134 static __rte_always_inline int
135 cnxk_ae_fill_ec_params(struct cnxk_ae_sess *sess,
136 struct rte_crypto_asym_xform *xform)
138 struct roc_ae_ec_ctx *ec = &sess->ec_ctx;
140 switch (xform->ec.curve_id) {
141 case RTE_CRYPTO_EC_GROUP_SECP192R1:
142 ec->curveid = ROC_AE_EC_ID_P192;
144 case RTE_CRYPTO_EC_GROUP_SECP224R1:
145 ec->curveid = ROC_AE_EC_ID_P224;
147 case RTE_CRYPTO_EC_GROUP_SECP256R1:
148 ec->curveid = ROC_AE_EC_ID_P256;
150 case RTE_CRYPTO_EC_GROUP_SECP384R1:
151 ec->curveid = ROC_AE_EC_ID_P384;
153 case RTE_CRYPTO_EC_GROUP_SECP521R1:
154 ec->curveid = ROC_AE_EC_ID_P521;
157 /* Only NIST curves (FIPS 186-4) are supported */
164 static __rte_always_inline int
165 cnxk_ae_fill_session_parameters(struct cnxk_ae_sess *sess,
166 struct rte_crypto_asym_xform *xform)
170 sess->xfrm_type = xform->xform_type;
172 switch (xform->xform_type) {
173 case RTE_CRYPTO_ASYM_XFORM_RSA:
174 ret = cnxk_ae_fill_rsa_params(sess, xform);
176 case RTE_CRYPTO_ASYM_XFORM_MODEX:
177 ret = cnxk_ae_fill_modex_params(sess, xform);
179 case RTE_CRYPTO_ASYM_XFORM_ECDSA:
181 case RTE_CRYPTO_ASYM_XFORM_ECPM:
182 ret = cnxk_ae_fill_ec_params(sess, xform);
191 cnxk_ae_free_session_parameters(struct cnxk_ae_sess *sess)
193 struct rte_crypto_modex_xform *mod;
194 struct rte_crypto_rsa_xform *rsa;
196 switch (sess->xfrm_type) {
197 case RTE_CRYPTO_ASYM_XFORM_RSA:
198 rsa = &sess->rsa_ctx;
200 rte_free(rsa->n.data);
202 case RTE_CRYPTO_ASYM_XFORM_MODEX:
203 mod = &sess->mod_ctx;
204 if (mod->modulus.data)
205 rte_free(mod->modulus.data);
207 case RTE_CRYPTO_ASYM_XFORM_ECDSA:
209 case RTE_CRYPTO_ASYM_XFORM_ECPM:
216 static __rte_always_inline int
217 cnxk_ae_modex_prep(struct rte_crypto_op *op, struct roc_ae_buf_ptr *meta_buf,
218 struct rte_crypto_modex_xform *mod, struct cpt_inst_s *inst)
220 uint32_t exp_len = mod->exponent.length;
221 uint32_t mod_len = mod->modulus.length;
222 struct rte_crypto_mod_op_param mod_op;
223 uint64_t total_key_len;
224 union cpt_inst_w4 w4;
229 mod_op = op->asym->modex;
231 base_len = mod_op.base.length;
232 if (unlikely(base_len > mod_len)) {
233 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
237 total_key_len = mod_len + exp_len;
240 dptr = meta_buf->vaddr;
241 inst->dptr = (uintptr_t)dptr;
242 memcpy(dptr, mod->modulus.data, total_key_len);
243 dptr += total_key_len;
244 memcpy(dptr, mod_op.base.data, base_len);
246 dlen = total_key_len + base_len;
249 w4.s.opcode_major = ROC_AE_MAJOR_OP_MODEX;
250 w4.s.opcode_minor = ROC_AE_MINOR_OP_MODEX;
252 w4.s.param1 = mod_len;
253 w4.s.param2 = exp_len;
256 inst->w4.u64 = w4.u64;
257 inst->rptr = (uintptr_t)dptr;
262 static __rte_always_inline void
263 cnxk_ae_rsa_prep(struct rte_crypto_op *op, struct roc_ae_buf_ptr *meta_buf,
264 struct rte_crypto_rsa_xform *rsa,
265 rte_crypto_param *crypto_param, struct cpt_inst_s *inst)
267 struct rte_crypto_rsa_op_param rsa_op;
268 uint32_t mod_len = rsa->n.length;
269 uint32_t exp_len = rsa->e.length;
270 uint64_t total_key_len;
271 union cpt_inst_w4 w4;
276 rsa_op = op->asym->rsa;
277 total_key_len = mod_len + exp_len;
280 dptr = meta_buf->vaddr;
281 inst->dptr = (uintptr_t)dptr;
282 memcpy(dptr, rsa->n.data, total_key_len);
283 dptr += total_key_len;
285 in_size = crypto_param->length;
286 memcpy(dptr, crypto_param->data, in_size);
289 dlen = total_key_len + in_size;
291 if (rsa_op.pad == RTE_CRYPTO_RSA_PADDING_NONE) {
292 /* Use mod_exp operation for no_padding type */
293 w4.s.opcode_minor = ROC_AE_MINOR_OP_MODEX;
294 w4.s.param2 = exp_len;
296 if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_ENCRYPT) {
297 w4.s.opcode_minor = ROC_AE_MINOR_OP_PKCS_ENC;
298 /* Public key encrypt, use BT2*/
299 w4.s.param2 = ROC_AE_CPT_BLOCK_TYPE2 |
300 ((uint16_t)(exp_len) << 1);
301 } else if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_VERIFY) {
302 w4.s.opcode_minor = ROC_AE_MINOR_OP_PKCS_DEC;
303 /* Public key decrypt, use BT1 */
304 w4.s.param2 = ROC_AE_CPT_BLOCK_TYPE1;
308 w4.s.opcode_major = ROC_AE_MAJOR_OP_MODEX;
310 w4.s.param1 = mod_len;
313 inst->w4.u64 = w4.u64;
314 inst->rptr = (uintptr_t)dptr;
317 static __rte_always_inline void
318 cnxk_ae_rsa_crt_prep(struct rte_crypto_op *op, struct roc_ae_buf_ptr *meta_buf,
319 struct rte_crypto_rsa_xform *rsa,
320 rte_crypto_param *crypto_param, struct cpt_inst_s *inst)
322 uint32_t qInv_len = rsa->qt.qInv.length;
323 struct rte_crypto_rsa_op_param rsa_op;
324 uint32_t dP_len = rsa->qt.dP.length;
325 uint32_t dQ_len = rsa->qt.dQ.length;
326 uint32_t p_len = rsa->qt.p.length;
327 uint32_t q_len = rsa->qt.q.length;
328 uint32_t mod_len = rsa->n.length;
329 uint64_t total_key_len;
330 union cpt_inst_w4 w4;
335 rsa_op = op->asym->rsa;
336 total_key_len = p_len + q_len + dP_len + dQ_len + qInv_len;
339 dptr = meta_buf->vaddr;
340 inst->dptr = (uintptr_t)dptr;
341 memcpy(dptr, rsa->qt.q.data, total_key_len);
342 dptr += total_key_len;
344 in_size = crypto_param->length;
345 memcpy(dptr, crypto_param->data, in_size);
348 dlen = total_key_len + in_size;
350 if (rsa_op.pad == RTE_CRYPTO_RSA_PADDING_NONE) {
351 /*Use mod_exp operation for no_padding type */
352 w4.s.opcode_minor = ROC_AE_MINOR_OP_MODEX_CRT;
354 if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_SIGN) {
355 w4.s.opcode_minor = ROC_AE_MINOR_OP_PKCS_ENC_CRT;
356 /* Private encrypt, use BT1 */
357 w4.s.param2 = ROC_AE_CPT_BLOCK_TYPE1;
358 } else if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_DECRYPT) {
359 w4.s.opcode_minor = ROC_AE_MINOR_OP_PKCS_DEC_CRT;
360 /* Private decrypt, use BT2 */
361 w4.s.param2 = ROC_AE_CPT_BLOCK_TYPE2;
365 w4.s.opcode_major = ROC_AE_MAJOR_OP_MODEX;
367 w4.s.param1 = mod_len;
370 inst->w4.u64 = w4.u64;
371 inst->rptr = (uintptr_t)dptr;
374 static __rte_always_inline int __rte_hot
375 cnxk_ae_enqueue_rsa_op(struct rte_crypto_op *op,
376 struct roc_ae_buf_ptr *meta_buf,
377 struct cnxk_ae_sess *sess, struct cpt_inst_s *inst)
379 struct rte_crypto_rsa_op_param *rsa = &op->asym->rsa;
381 switch (rsa->op_type) {
382 case RTE_CRYPTO_ASYM_OP_VERIFY:
383 cnxk_ae_rsa_prep(op, meta_buf, &sess->rsa_ctx, &rsa->sign,
386 case RTE_CRYPTO_ASYM_OP_ENCRYPT:
387 cnxk_ae_rsa_prep(op, meta_buf, &sess->rsa_ctx, &rsa->message,
390 case RTE_CRYPTO_ASYM_OP_SIGN:
391 cnxk_ae_rsa_crt_prep(op, meta_buf, &sess->rsa_ctx,
392 &rsa->message, inst);
394 case RTE_CRYPTO_ASYM_OP_DECRYPT:
395 cnxk_ae_rsa_crt_prep(op, meta_buf, &sess->rsa_ctx, &rsa->cipher,
399 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
405 static __rte_always_inline void
406 cnxk_ae_ecdsa_sign_prep(struct rte_crypto_ecdsa_op_param *ecdsa,
407 struct roc_ae_buf_ptr *meta_buf,
408 uint64_t fpm_table_iova, struct roc_ae_ec_group *ec_grp,
409 uint8_t curveid, struct cpt_inst_s *inst)
411 uint16_t message_len = ecdsa->message.length;
412 uint16_t pkey_len = ecdsa->pkey.length;
413 uint16_t p_align, k_align, m_align;
414 uint16_t k_len = ecdsa->k.length;
415 uint16_t order_len, prime_len;
416 uint16_t o_offset, pk_offset;
417 union cpt_inst_w4 w4;
421 prime_len = ec_grp->prime.length;
422 order_len = ec_grp->order.length;
424 /* Truncate input length to curve prime length */
425 if (message_len > prime_len)
426 message_len = prime_len;
427 m_align = RTE_ALIGN_CEIL(message_len, 8);
429 p_align = RTE_ALIGN_CEIL(prime_len, 8);
430 k_align = RTE_ALIGN_CEIL(k_len, 8);
432 /* Set write offset for order and private key */
433 o_offset = prime_len - order_len;
434 pk_offset = prime_len - pkey_len;
437 dptr = meta_buf->vaddr;
438 inst->dptr = (uintptr_t)dptr;
441 * Set dlen = sum(sizeof(fpm address), ROUNDUP8(scalar len, input len),
442 * ROUNDUP8(priv key len, prime len, order len)).
443 * Please note, private key, order cannot exceed prime
444 * length i.e 3 * p_align.
446 dlen = sizeof(fpm_table_iova) + k_align + m_align + p_align * 5;
448 memset(dptr, 0, dlen);
450 *(uint64_t *)dptr = fpm_table_iova;
451 dptr += sizeof(fpm_table_iova);
453 memcpy(dptr, ecdsa->k.data, k_len);
456 memcpy(dptr, ec_grp->prime.data, prime_len);
459 memcpy(dptr + o_offset, ec_grp->order.data, order_len);
462 memcpy(dptr + pk_offset, ecdsa->pkey.data, pkey_len);
465 memcpy(dptr, ecdsa->message.data, message_len);
468 memcpy(dptr, ec_grp->consta.data, prime_len);
471 memcpy(dptr, ec_grp->constb.data, prime_len);
475 w4.s.opcode_major = ROC_AE_MAJOR_OP_ECDSA;
476 w4.s.opcode_minor = ROC_AE_MINOR_OP_ECDSA_SIGN;
478 w4.s.param1 = curveid | (message_len << 8);
479 w4.s.param2 = (pkey_len << 8) | k_len;
482 inst->w4.u64 = w4.u64;
483 inst->rptr = (uintptr_t)dptr;
486 static __rte_always_inline void
487 cnxk_ae_ecdsa_verify_prep(struct rte_crypto_ecdsa_op_param *ecdsa,
488 struct roc_ae_buf_ptr *meta_buf,
489 uint64_t fpm_table_iova,
490 struct roc_ae_ec_group *ec_grp, uint8_t curveid,
491 struct cpt_inst_s *inst)
493 uint32_t message_len = ecdsa->message.length;
494 uint16_t o_offset, r_offset, s_offset;
495 uint16_t qx_len = ecdsa->q.x.length;
496 uint16_t qy_len = ecdsa->q.y.length;
497 uint16_t r_len = ecdsa->r.length;
498 uint16_t s_len = ecdsa->s.length;
499 uint16_t order_len, prime_len;
500 uint16_t qx_offset, qy_offset;
501 uint16_t p_align, m_align;
502 union cpt_inst_w4 w4;
506 prime_len = ec_grp->prime.length;
507 order_len = ec_grp->order.length;
509 /* Truncate input length to curve prime length */
510 if (message_len > prime_len)
511 message_len = prime_len;
513 m_align = RTE_ALIGN_CEIL(message_len, 8);
514 p_align = RTE_ALIGN_CEIL(prime_len, 8);
516 /* Set write offset for sign, order and public key coordinates */
517 o_offset = prime_len - order_len;
518 qx_offset = prime_len - qx_len;
519 qy_offset = prime_len - qy_len;
520 r_offset = prime_len - r_len;
521 s_offset = prime_len - s_len;
524 dptr = meta_buf->vaddr;
525 inst->dptr = (uintptr_t)dptr;
528 * Set dlen = sum(sizeof(fpm address), ROUNDUP8(message len),
529 * ROUNDUP8(sign len(r and s), public key len(x and y coordinates),
530 * prime len, order len)).
531 * Please note sign, public key and order can not exceed prime length
534 dlen = sizeof(fpm_table_iova) + m_align + (8 * p_align);
536 memset(dptr, 0, dlen);
538 *(uint64_t *)dptr = fpm_table_iova;
539 dptr += sizeof(fpm_table_iova);
541 memcpy(dptr + r_offset, ecdsa->r.data, r_len);
544 memcpy(dptr + s_offset, ecdsa->s.data, s_len);
547 memcpy(dptr, ecdsa->message.data, message_len);
550 memcpy(dptr + o_offset, ec_grp->order.data, order_len);
553 memcpy(dptr, ec_grp->prime.data, prime_len);
556 memcpy(dptr + qx_offset, ecdsa->q.x.data, qx_len);
559 memcpy(dptr + qy_offset, ecdsa->q.y.data, qy_len);
562 memcpy(dptr, ec_grp->consta.data, prime_len);
565 memcpy(dptr, ec_grp->constb.data, prime_len);
569 w4.s.opcode_major = ROC_AE_MAJOR_OP_ECDSA;
570 w4.s.opcode_minor = ROC_AE_MINOR_OP_ECDSA_VERIFY;
572 w4.s.param1 = curveid | (message_len << 8);
576 inst->w4.u64 = w4.u64;
577 inst->rptr = (uintptr_t)dptr;
580 static __rte_always_inline int __rte_hot
581 cnxk_ae_enqueue_ecdsa_op(struct rte_crypto_op *op,
582 struct roc_ae_buf_ptr *meta_buf,
583 struct cnxk_ae_sess *sess, uint64_t *fpm_iova,
584 struct roc_ae_ec_group **ec_grp,
585 struct cpt_inst_s *inst)
587 struct rte_crypto_ecdsa_op_param *ecdsa = &op->asym->ecdsa;
588 uint8_t curveid = sess->ec_ctx.curveid;
590 if (ecdsa->op_type == RTE_CRYPTO_ASYM_OP_SIGN)
591 cnxk_ae_ecdsa_sign_prep(ecdsa, meta_buf, fpm_iova[curveid],
592 ec_grp[curveid], curveid, inst);
593 else if (ecdsa->op_type == RTE_CRYPTO_ASYM_OP_VERIFY)
594 cnxk_ae_ecdsa_verify_prep(ecdsa, meta_buf, fpm_iova[curveid],
595 ec_grp[curveid], curveid, inst);
597 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
603 static __rte_always_inline int
604 cnxk_ae_ecpm_prep(struct rte_crypto_ecpm_op_param *ecpm,
605 struct roc_ae_buf_ptr *meta_buf,
606 struct roc_ae_ec_group *ec_grp, uint8_t curveid,
607 struct cpt_inst_s *inst)
609 uint16_t x1_len = ecpm->p.x.length;
610 uint16_t y1_len = ecpm->p.y.length;
611 uint16_t scalar_align, p_align;
612 uint16_t x1_offset, y1_offset;
613 uint16_t dlen, prime_len;
614 union cpt_inst_w4 w4;
617 prime_len = ec_grp->prime.length;
620 dptr = meta_buf->vaddr;
621 inst->dptr = (uintptr_t)dptr;
623 p_align = RTE_ALIGN_CEIL(prime_len, 8);
624 scalar_align = RTE_ALIGN_CEIL(ecpm->scalar.length, 8);
627 * Set dlen = sum(ROUNDUP8(input point(x and y coordinates), prime,
629 * Please note point length is equivalent to prime of the curve
631 dlen = 5 * p_align + scalar_align;
633 x1_offset = prime_len - x1_len;
634 y1_offset = prime_len - y1_len;
636 memset(dptr, 0, dlen);
638 /* Copy input point, scalar, prime */
639 memcpy(dptr + x1_offset, ecpm->p.x.data, x1_len);
641 memcpy(dptr + y1_offset, ecpm->p.y.data, y1_len);
643 memcpy(dptr, ecpm->scalar.data, ecpm->scalar.length);
644 dptr += scalar_align;
645 memcpy(dptr, ec_grp->prime.data, ec_grp->prime.length);
647 memcpy(dptr, ec_grp->consta.data, ec_grp->consta.length);
649 memcpy(dptr, ec_grp->constb.data, ec_grp->constb.length);
653 w4.s.opcode_major = ROC_AE_MAJOR_OP_ECC;
654 w4.s.opcode_minor = ROC_AE_MINOR_OP_ECC_UMP;
656 w4.s.param1 = curveid;
657 w4.s.param2 = ecpm->scalar.length;
660 inst->w4.u64 = w4.u64;
661 inst->rptr = (uintptr_t)dptr;
666 static __rte_always_inline void
667 cnxk_ae_dequeue_rsa_op(struct rte_crypto_op *cop, uint8_t *rptr,
668 struct rte_crypto_rsa_xform *rsa_ctx)
670 struct rte_crypto_rsa_op_param *rsa = &cop->asym->rsa;
672 switch (rsa->op_type) {
673 case RTE_CRYPTO_ASYM_OP_ENCRYPT:
674 rsa->cipher.length = rsa_ctx->n.length;
675 memcpy(rsa->cipher.data, rptr, rsa->cipher.length);
677 case RTE_CRYPTO_ASYM_OP_DECRYPT:
678 if (rsa->pad == RTE_CRYPTO_RSA_PADDING_NONE) {
679 rsa->message.length = rsa_ctx->n.length;
680 memcpy(rsa->message.data, rptr, rsa->message.length);
682 /* Get length of decrypted output */
683 rsa->message.length =
684 rte_cpu_to_be_16(*((uint16_t *)rptr));
686 * Offset output data pointer by length field
687 * (2 bytes) and copy decrypted data.
689 memcpy(rsa->message.data, rptr + 2,
690 rsa->message.length);
693 case RTE_CRYPTO_ASYM_OP_SIGN:
694 rsa->sign.length = rsa_ctx->n.length;
695 memcpy(rsa->sign.data, rptr, rsa->sign.length);
697 case RTE_CRYPTO_ASYM_OP_VERIFY:
698 if (rsa->pad == RTE_CRYPTO_RSA_PADDING_NONE) {
699 rsa->sign.length = rsa_ctx->n.length;
700 memcpy(rsa->sign.data, rptr, rsa->sign.length);
702 /* Get length of signed output */
704 rte_cpu_to_be_16(*((uint16_t *)rptr));
706 * Offset output data pointer by length field
707 * (2 bytes) and copy signed data.
709 memcpy(rsa->sign.data, rptr + 2, rsa->sign.length);
711 if (memcmp(rsa->sign.data, rsa->message.data,
712 rsa->message.length)) {
713 cop->status = RTE_CRYPTO_OP_STATUS_ERROR;
717 cop->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
722 static __rte_always_inline void
723 cnxk_ae_dequeue_ecdsa_op(struct rte_crypto_ecdsa_op_param *ecdsa, uint8_t *rptr,
724 struct roc_ae_ec_ctx *ec,
725 struct roc_ae_ec_group **ec_grp)
727 int prime_len = ec_grp[ec->curveid]->prime.length;
729 if (ecdsa->op_type == RTE_CRYPTO_ASYM_OP_VERIFY)
732 /* Separate out sign r and s components */
733 memcpy(ecdsa->r.data, rptr, prime_len);
734 memcpy(ecdsa->s.data, rptr + RTE_ALIGN_CEIL(prime_len, 8), prime_len);
735 ecdsa->r.length = prime_len;
736 ecdsa->s.length = prime_len;
739 static __rte_always_inline void
740 cnxk_ae_dequeue_ecpm_op(struct rte_crypto_ecpm_op_param *ecpm, uint8_t *rptr,
741 struct roc_ae_ec_ctx *ec,
742 struct roc_ae_ec_group **ec_grp)
744 int prime_len = ec_grp[ec->curveid]->prime.length;
746 memcpy(ecpm->r.x.data, rptr, prime_len);
747 memcpy(ecpm->r.y.data, rptr + RTE_ALIGN_CEIL(prime_len, 8), prime_len);
748 ecpm->r.x.length = prime_len;
749 ecpm->r.y.length = prime_len;
752 static __rte_always_inline void *
753 cnxk_ae_alloc_meta(struct roc_ae_buf_ptr *buf,
754 struct rte_mempool *cpt_meta_pool,
755 struct cpt_inflight_req *infl_req)
759 if (unlikely(rte_mempool_get(cpt_meta_pool, (void **)&mdata) < 0))
764 infl_req->mdata = mdata;
765 infl_req->op_flags |= CPT_OP_FLAGS_METABUF;
770 static __rte_always_inline int32_t __rte_hot
771 cnxk_ae_enqueue(struct cnxk_cpt_qp *qp, struct rte_crypto_op *op,
772 struct cpt_inflight_req *infl_req, struct cpt_inst_s *inst,
773 struct cnxk_ae_sess *sess)
775 struct cpt_qp_meta_info *minfo = &qp->meta_info;
776 struct rte_crypto_asym_op *asym_op = op->asym;
777 struct roc_ae_buf_ptr meta_buf;
782 mdata = cnxk_ae_alloc_meta(&meta_buf, minfo->pool, infl_req);
786 /* Reserve 8B for RPTR */
787 meta_buf.vaddr = PLT_PTR_ADD(mdata, sizeof(uint64_t));
789 switch (sess->xfrm_type) {
790 case RTE_CRYPTO_ASYM_XFORM_MODEX:
791 ret = cnxk_ae_modex_prep(op, &meta_buf, &sess->mod_ctx, inst);
795 case RTE_CRYPTO_ASYM_XFORM_RSA:
796 ret = cnxk_ae_enqueue_rsa_op(op, &meta_buf, sess, inst);
800 case RTE_CRYPTO_ASYM_XFORM_ECDSA:
801 ret = cnxk_ae_enqueue_ecdsa_op(op, &meta_buf, sess,
807 case RTE_CRYPTO_ASYM_XFORM_ECPM:
808 ret = cnxk_ae_ecpm_prep(&asym_op->ecpm, &meta_buf,
809 sess->ec_grp[sess->ec_ctx.curveid],
810 sess->ec_ctx.curveid, inst);
815 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
825 rte_mempool_put(minfo->pool, infl_req->mdata);
829 static __rte_always_inline void
830 cnxk_ae_post_process(struct rte_crypto_op *cop, struct cnxk_ae_sess *sess,
833 struct rte_crypto_asym_op *op = cop->asym;
835 switch (sess->xfrm_type) {
836 case RTE_CRYPTO_ASYM_XFORM_RSA:
837 cnxk_ae_dequeue_rsa_op(cop, rptr, &sess->rsa_ctx);
839 case RTE_CRYPTO_ASYM_XFORM_MODEX:
840 op->modex.result.length = sess->mod_ctx.modulus.length;
841 memcpy(op->modex.result.data, rptr, op->modex.result.length);
843 case RTE_CRYPTO_ASYM_XFORM_ECDSA:
844 cnxk_ae_dequeue_ecdsa_op(&op->ecdsa, rptr, &sess->ec_ctx,
847 case RTE_CRYPTO_ASYM_XFORM_ECPM:
848 cnxk_ae_dequeue_ecpm_op(&op->ecpm, rptr, &sess->ec_ctx,
852 cop->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
856 #endif /* _CNXK_AE_H_ */