1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright (C) 2019 Marvell International Ltd.
5 #ifndef _CPT_UCODE_ASYM_H_
6 #define _CPT_UCODE_ASYM_H_
8 #include <rte_common.h>
9 #include <rte_crypto_asym.h>
10 #include <rte_malloc.h>
12 #include "cpt_common.h"
13 #include "cpt_hw_types.h"
14 #include "cpt_mcode_defines.h"
16 static __rte_always_inline void
17 cpt_modex_param_normalize(uint8_t **data, size_t *len)
21 /* Strip leading NUL bytes */
23 for (i = 0; i < *len; i++) {
32 static __rte_always_inline int
33 cpt_fill_modex_params(struct cpt_asym_sess_misc *sess,
34 struct rte_crypto_asym_xform *xform)
36 struct rte_crypto_modex_xform *ctx = &sess->mod_ctx;
37 size_t exp_len = xform->modex.exponent.length;
38 size_t mod_len = xform->modex.modulus.length;
39 uint8_t *exp = xform->modex.exponent.data;
40 uint8_t *mod = xform->modex.modulus.data;
42 cpt_modex_param_normalize(&mod, &mod_len);
43 cpt_modex_param_normalize(&exp, &exp_len);
45 if (unlikely(exp_len == 0 || mod_len == 0))
48 if (unlikely(exp_len > mod_len)) {
49 CPT_LOG_DP_ERR("Exponent length greater than modulus length is not supported");
53 /* Allocate buffer to hold modexp params */
54 ctx->modulus.data = rte_malloc(NULL, mod_len + exp_len, 0);
55 if (ctx->modulus.data == NULL) {
56 CPT_LOG_DP_ERR("Could not allocate buffer for modex params");
60 /* Set up modexp prime modulus and private exponent */
62 memcpy(ctx->modulus.data, mod, mod_len);
63 ctx->exponent.data = ctx->modulus.data + mod_len;
64 memcpy(ctx->exponent.data, exp, exp_len);
66 ctx->modulus.length = mod_len;
67 ctx->exponent.length = exp_len;
72 static __rte_always_inline int
73 cpt_fill_rsa_params(struct cpt_asym_sess_misc *sess,
74 struct rte_crypto_asym_xform *xform)
76 struct rte_crypto_rsa_priv_key_qt qt = xform->rsa.qt;
77 struct rte_crypto_rsa_xform *xfrm_rsa = &xform->rsa;
78 struct rte_crypto_rsa_xform *rsa = &sess->rsa_ctx;
79 size_t mod_len = xfrm_rsa->n.length;
80 size_t exp_len = xfrm_rsa->e.length;
84 /* Make sure key length used is not more than mod_len/2 */
85 if (qt.p.data != NULL)
86 len = (((mod_len / 2) < qt.p.length) ? len : qt.p.length);
88 /* Total size required for RSA key params(n,e,(q,dQ,p,dP,qInv)) */
89 total_size = mod_len + exp_len + 5 * len;
91 /* Allocate buffer to hold all RSA keys */
92 rsa->n.data = rte_malloc(NULL, total_size, 0);
93 if (rsa->n.data == NULL) {
94 CPT_LOG_DP_ERR("Could not allocate buffer for RSA keys");
98 /* Set up RSA prime modulus and public key exponent */
99 memcpy(rsa->n.data, xfrm_rsa->n.data, mod_len);
100 rsa->e.data = rsa->n.data + mod_len;
101 memcpy(rsa->e.data, xfrm_rsa->e.data, exp_len);
103 /* Private key in quintuple format */
105 rsa->qt.q.data = rsa->e.data + exp_len;
106 memcpy(rsa->qt.q.data, qt.q.data, qt.q.length);
107 rsa->qt.dQ.data = rsa->qt.q.data + qt.q.length;
108 memcpy(rsa->qt.dQ.data, qt.dQ.data, qt.dQ.length);
109 rsa->qt.p.data = rsa->qt.dQ.data + qt.dQ.length;
110 memcpy(rsa->qt.p.data, qt.p.data, qt.p.length);
111 rsa->qt.dP.data = rsa->qt.p.data + qt.p.length;
112 memcpy(rsa->qt.dP.data, qt.dP.data, qt.dP.length);
113 rsa->qt.qInv.data = rsa->qt.dP.data + qt.dP.length;
114 memcpy(rsa->qt.qInv.data, qt.qInv.data, qt.qInv.length);
116 rsa->qt.q.length = qt.q.length;
117 rsa->qt.dQ.length = qt.dQ.length;
118 rsa->qt.p.length = qt.p.length;
119 rsa->qt.dP.length = qt.dP.length;
120 rsa->qt.qInv.length = qt.qInv.length;
122 rsa->n.length = mod_len;
123 rsa->e.length = exp_len;
128 static __rte_always_inline int
129 cpt_fill_ec_params(struct cpt_asym_sess_misc *sess,
130 struct rte_crypto_asym_xform *xform)
132 struct cpt_asym_ec_ctx *ec = &sess->ec_ctx;
134 switch (xform->ec.curve_id) {
135 case RTE_CRYPTO_EC_GROUP_SECP192R1:
136 ec->curveid = CPT_EC_ID_P192;
138 case RTE_CRYPTO_EC_GROUP_SECP224R1:
139 ec->curveid = CPT_EC_ID_P224;
141 case RTE_CRYPTO_EC_GROUP_SECP256R1:
142 ec->curveid = CPT_EC_ID_P256;
144 case RTE_CRYPTO_EC_GROUP_SECP384R1:
145 ec->curveid = CPT_EC_ID_P384;
147 case RTE_CRYPTO_EC_GROUP_SECP521R1:
148 ec->curveid = CPT_EC_ID_P521;
151 /* Only NIST curves (FIPS 186-4) are supported */
152 CPT_LOG_DP_ERR("Unsupported curve");
159 static __rte_always_inline int
160 cpt_fill_asym_session_parameters(struct cpt_asym_sess_misc *sess,
161 struct rte_crypto_asym_xform *xform)
165 sess->xfrm_type = xform->xform_type;
167 switch (xform->xform_type) {
168 case RTE_CRYPTO_ASYM_XFORM_RSA:
169 ret = cpt_fill_rsa_params(sess, xform);
171 case RTE_CRYPTO_ASYM_XFORM_MODEX:
172 ret = cpt_fill_modex_params(sess, xform);
174 case RTE_CRYPTO_ASYM_XFORM_ECDSA:
176 case RTE_CRYPTO_ASYM_XFORM_ECPM:
177 ret = cpt_fill_ec_params(sess, xform);
180 CPT_LOG_DP_ERR("Unsupported transform type");
186 static __rte_always_inline void
187 cpt_free_asym_session_parameters(struct cpt_asym_sess_misc *sess)
189 struct rte_crypto_modex_xform *mod;
190 struct rte_crypto_rsa_xform *rsa;
192 switch (sess->xfrm_type) {
193 case RTE_CRYPTO_ASYM_XFORM_RSA:
194 rsa = &sess->rsa_ctx;
196 rte_free(rsa->n.data);
198 case RTE_CRYPTO_ASYM_XFORM_MODEX:
199 mod = &sess->mod_ctx;
200 if (mod->modulus.data)
201 rte_free(mod->modulus.data);
203 case RTE_CRYPTO_ASYM_XFORM_ECDSA:
205 case RTE_CRYPTO_ASYM_XFORM_ECPM:
208 CPT_LOG_DP_ERR("Invalid transform type");
213 static __rte_always_inline void
214 cpt_fill_req_comp_addr(struct cpt_request_info *req, buf_ptr_t addr)
216 void *completion_addr = RTE_PTR_ALIGN(addr.vaddr, 16);
218 /* Pointer to cpt_res_s, updated by CPT */
219 req->completion_addr = (volatile uint64_t *)completion_addr;
220 req->comp_baddr = addr.dma_addr +
221 RTE_PTR_DIFF(completion_addr, addr.vaddr);
222 *(req->completion_addr) = COMPLETION_CODE_INIT;
225 static __rte_always_inline int
226 cpt_modex_prep(struct asym_op_params *modex_params,
227 struct rte_crypto_modex_xform *mod)
229 struct cpt_request_info *req = modex_params->req;
230 phys_addr_t mphys = modex_params->meta_buf;
231 uint32_t exp_len = mod->exponent.length;
232 uint32_t mod_len = mod->modulus.length;
233 struct rte_crypto_mod_op_param mod_op;
234 struct rte_crypto_op **op;
235 vq_cmd_word0_t vq_cmd_w0;
236 uint64_t total_key_len;
242 /* Extracting modex op form params->req->op[1]->asym->modex */
243 op = RTE_PTR_ADD(req->op, sizeof(uintptr_t));
244 mod_op = ((struct rte_crypto_op *)*op)->asym->modex;
246 base_len = mod_op.base.length;
247 if (unlikely(base_len > mod_len)) {
248 CPT_LOG_DP_ERR("Base length greater than modulus length is not supported");
249 (*op)->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
253 total_key_len = mod_len + exp_len;
256 dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info));
257 memcpy(dptr, mod->modulus.data, total_key_len);
258 dptr += total_key_len;
259 memcpy(dptr, mod_op.base.data, base_len);
261 dlen = total_key_len + base_len;
267 vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_MODEX;
268 vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_MODEX;
271 vq_cmd_w0.s.param1 = mod_len;
272 vq_cmd_w0.s.param2 = exp_len;
273 vq_cmd_w0.s.dlen = dlen;
275 /* Filling cpt_request_info structure */
276 req->ist.ei0 = vq_cmd_w0.u64;
277 req->ist.ei1 = mphys;
278 req->ist.ei2 = mphys + dlen;
280 /* Result pointer to store result data */
283 /* alternate_caddr to write completion status of the microcode */
284 req->alternate_caddr = (uint64_t *)(dptr + rlen);
285 *req->alternate_caddr = ~((uint64_t)COMPLETION_CODE_INIT);
287 /* Preparing completion addr, +1 for completion code */
288 caddr.vaddr = dptr + rlen + 1;
289 caddr.dma_addr = mphys + dlen + rlen + 1;
291 cpt_fill_req_comp_addr(req, caddr);
295 static __rte_always_inline void
296 cpt_rsa_prep(struct asym_op_params *rsa_params,
297 struct rte_crypto_rsa_xform *rsa,
298 rte_crypto_param *crypto_param)
300 struct cpt_request_info *req = rsa_params->req;
301 phys_addr_t mphys = rsa_params->meta_buf;
302 struct rte_crypto_rsa_op_param rsa_op;
303 uint32_t mod_len = rsa->n.length;
304 uint32_t exp_len = rsa->e.length;
305 struct rte_crypto_op **op;
306 vq_cmd_word0_t vq_cmd_w0;
307 uint64_t total_key_len;
313 /* Extracting rsa op form params->req->op[1]->asym->rsa */
314 op = RTE_PTR_ADD(req->op, sizeof(uintptr_t));
315 rsa_op = ((struct rte_crypto_op *)*op)->asym->rsa;
316 total_key_len = mod_len + exp_len;
319 dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info));
320 memcpy(dptr, rsa->n.data, total_key_len);
321 dptr += total_key_len;
323 in_size = crypto_param->length;
324 memcpy(dptr, crypto_param->data, in_size);
327 dlen = total_key_len + in_size;
332 if (rsa_op.pad == RTE_CRYPTO_RSA_PADDING_NONE) {
333 /* Use mod_exp operation for no_padding type */
334 vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_MODEX;
335 vq_cmd_w0.s.param2 = exp_len;
337 if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_ENCRYPT) {
338 vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_PKCS_ENC;
339 /* Public key encrypt, use BT2*/
340 vq_cmd_w0.s.param2 = CPT_BLOCK_TYPE2 |
341 ((uint16_t)(exp_len) << 1);
342 } else if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_VERIFY) {
343 vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_PKCS_DEC;
344 /* Public key decrypt, use BT1 */
345 vq_cmd_w0.s.param2 = CPT_BLOCK_TYPE1;
346 /* + 2 for decrypted len */
351 vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_MODEX;
354 vq_cmd_w0.s.param1 = mod_len;
355 vq_cmd_w0.s.dlen = dlen;
357 /* Filling cpt_request_info structure */
358 req->ist.ei0 = vq_cmd_w0.u64;
359 req->ist.ei1 = mphys;
360 req->ist.ei2 = mphys + dlen;
362 /* Result pointer to store result data */
365 /* alternate_caddr to write completion status of the microcode */
366 req->alternate_caddr = (uint64_t *)(dptr + rlen);
367 *req->alternate_caddr = ~((uint64_t)COMPLETION_CODE_INIT);
369 /* Preparing completion addr, +1 for completion code */
370 caddr.vaddr = dptr + rlen + 1;
371 caddr.dma_addr = mphys + dlen + rlen + 1;
373 cpt_fill_req_comp_addr(req, caddr);
376 static __rte_always_inline void
377 cpt_rsa_crt_prep(struct asym_op_params *rsa_params,
378 struct rte_crypto_rsa_xform *rsa,
379 rte_crypto_param *crypto_param)
381 struct cpt_request_info *req = rsa_params->req;
382 phys_addr_t mphys = rsa_params->meta_buf;
383 uint32_t qInv_len = rsa->qt.qInv.length;
384 struct rte_crypto_rsa_op_param rsa_op;
385 uint32_t dP_len = rsa->qt.dP.length;
386 uint32_t dQ_len = rsa->qt.dQ.length;
387 uint32_t p_len = rsa->qt.p.length;
388 uint32_t q_len = rsa->qt.q.length;
389 uint32_t mod_len = rsa->n.length;
390 struct rte_crypto_op **op;
391 vq_cmd_word0_t vq_cmd_w0;
392 uint64_t total_key_len;
398 /* Extracting rsa op form params->req->op[1]->asym->rsa */
399 op = RTE_PTR_ADD(req->op, sizeof(uintptr_t));
400 rsa_op = ((struct rte_crypto_op *)*op)->asym->rsa;
401 total_key_len = p_len + q_len + dP_len + dQ_len + qInv_len;
404 dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info));
405 memcpy(dptr, rsa->qt.q.data, total_key_len);
406 dptr += total_key_len;
408 in_size = crypto_param->length;
409 memcpy(dptr, crypto_param->data, in_size);
412 dlen = total_key_len + in_size;
417 if (rsa_op.pad == RTE_CRYPTO_RSA_PADDING_NONE) {
418 /*Use mod_exp operation for no_padding type */
419 vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_MODEX_CRT;
421 if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_SIGN) {
422 vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_PKCS_ENC_CRT;
423 /* Private encrypt, use BT1 */
424 vq_cmd_w0.s.param2 = CPT_BLOCK_TYPE1;
425 } else if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_DECRYPT) {
426 vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_PKCS_DEC_CRT;
427 /* Private decrypt, use BT2 */
428 vq_cmd_w0.s.param2 = CPT_BLOCK_TYPE2;
429 /* + 2 for decrypted len */
434 vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_MODEX;
437 vq_cmd_w0.s.param1 = mod_len;
438 vq_cmd_w0.s.dlen = dlen;
440 /* Filling cpt_request_info structure */
441 req->ist.ei0 = vq_cmd_w0.u64;
442 req->ist.ei1 = mphys;
443 req->ist.ei2 = mphys + dlen;
445 /* Result pointer to store result data */
448 /* alternate_caddr to write completion status of the microcode */
449 req->alternate_caddr = (uint64_t *)(dptr + rlen);
450 *req->alternate_caddr = ~((uint64_t)COMPLETION_CODE_INIT);
452 /* Preparing completion addr, +1 for completion code */
453 caddr.vaddr = dptr + rlen + 1;
454 caddr.dma_addr = mphys + dlen + rlen + 1;
456 cpt_fill_req_comp_addr(req, caddr);
459 static __rte_always_inline int __rte_hot
460 cpt_enqueue_rsa_op(struct rte_crypto_op *op,
461 struct asym_op_params *params,
462 struct cpt_asym_sess_misc *sess)
464 struct rte_crypto_rsa_op_param *rsa = &op->asym->rsa;
466 switch (rsa->op_type) {
467 case RTE_CRYPTO_ASYM_OP_VERIFY:
468 cpt_rsa_prep(params, &sess->rsa_ctx, &rsa->sign);
470 case RTE_CRYPTO_ASYM_OP_ENCRYPT:
471 cpt_rsa_prep(params, &sess->rsa_ctx, &rsa->message);
473 case RTE_CRYPTO_ASYM_OP_SIGN:
474 cpt_rsa_crt_prep(params, &sess->rsa_ctx, &rsa->message);
476 case RTE_CRYPTO_ASYM_OP_DECRYPT:
477 cpt_rsa_crt_prep(params, &sess->rsa_ctx, &rsa->cipher);
480 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
486 static const struct cpt_ec_group ec_grp[CPT_EC_ID_PMAX] = {
490 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
491 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE,
492 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF
498 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
499 0xFF, 0xFF, 0xFF, 0xFF, 0x99, 0xDE, 0xF8, 0x36,
500 0x14, 0x6B, 0xC9, 0xB1, 0xB4, 0xD2, 0x28, 0x31
508 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
509 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
510 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
511 0x00, 0x00, 0x00, 0x01
517 0XFF, 0XFF, 0XFF, 0XFF, 0XFF, 0XFF, 0XFF, 0XFF,
518 0XFF, 0XFF, 0XFF, 0XFF, 0XFF, 0XFF, 0X16, 0XA2,
519 0XE0, 0XB8, 0XF0, 0X3E, 0X13, 0XDD, 0X29, 0X45,
520 0X5C, 0X5C, 0X2A, 0X3D
528 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01,
529 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
530 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF,
531 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF
537 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00,
538 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
539 0xBC, 0xE6, 0xFA, 0xAD, 0xA7, 0x17, 0x9E, 0x84,
540 0xF3, 0xB9, 0xCA, 0xC2, 0xFC, 0x63, 0x25, 0x51
548 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
549 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
550 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
551 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE,
552 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00,
553 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF
559 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
560 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
561 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
562 0xC7, 0x63, 0x4D, 0x81, 0xF4, 0x37, 0x2D, 0xDF,
563 0x58, 0x1A, 0x0D, 0xB2, 0x48, 0xB0, 0xA7, 0x7A,
564 0xEC, 0xEC, 0x19, 0x6A, 0xCC, 0xC5, 0x29, 0x73
572 0x01, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
573 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
574 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
575 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
576 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
577 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
578 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
579 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
586 0x01, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
587 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
588 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
589 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
590 0xFF, 0xFA, 0x51, 0x86, 0x87, 0x83, 0xBF, 0x2F,
591 0x96, 0x6B, 0x7F, 0xCC, 0x01, 0x48, 0xF7, 0x09,
592 0xA5, 0xD0, 0x3B, 0xB5, 0xC9, 0xB8, 0x89, 0x9C,
593 0x47, 0xAE, 0xBB, 0x6F, 0xB7, 0x1E, 0x91, 0x38,
601 static __rte_always_inline void
602 cpt_ecdsa_sign_prep(struct rte_crypto_ecdsa_op_param *ecdsa,
603 struct asym_op_params *ecdsa_params,
604 uint64_t fpm_table_iova,
607 struct cpt_request_info *req = ecdsa_params->req;
608 uint16_t message_len = ecdsa->message.length;
609 phys_addr_t mphys = ecdsa_params->meta_buf;
610 uint16_t pkey_len = ecdsa->pkey.length;
611 uint16_t p_align, k_align, m_align;
612 uint16_t k_len = ecdsa->k.length;
613 uint16_t order_len, prime_len;
614 uint16_t o_offset, pk_offset;
615 vq_cmd_word0_t vq_cmd_w0;
620 prime_len = ec_grp[curveid].prime.length;
621 order_len = ec_grp[curveid].order.length;
623 /* Truncate input length to curve prime length */
624 if (message_len > prime_len)
625 message_len = prime_len;
626 m_align = RTE_ALIGN_CEIL(message_len, 8);
628 p_align = RTE_ALIGN_CEIL(prime_len, 8);
629 k_align = RTE_ALIGN_CEIL(k_len, 8);
631 /* Set write offset for order and private key */
632 o_offset = prime_len - order_len;
633 pk_offset = prime_len - pkey_len;
636 dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info));
639 * Set dlen = sum(sizeof(fpm address), ROUNDUP8(scalar len, input len),
640 * ROUNDUP8(priv key len, prime len, order len)).
641 * Please note, private key, order cannot exceed prime
642 * length i.e 3 * p_align.
644 dlen = sizeof(fpm_table_iova) + k_align + m_align + p_align * 3;
646 memset(dptr, 0, dlen);
648 *(uint64_t *)dptr = fpm_table_iova;
649 dptr += sizeof(fpm_table_iova);
651 memcpy(dptr, ecdsa->k.data, k_len);
654 memcpy(dptr, ec_grp[curveid].prime.data, prime_len);
657 memcpy(dptr + o_offset, ec_grp[curveid].order.data, order_len);
660 memcpy(dptr + pk_offset, ecdsa->pkey.data, pkey_len);
663 memcpy(dptr, ecdsa->message.data, message_len);
666 /* 2 * prime length (for sign r and s ) */
670 vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_ECDSA;
671 vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_ECDSA_SIGN;
674 vq_cmd_w0.s.param1 = curveid | (message_len << 8);
675 vq_cmd_w0.s.param2 = k_len;
676 vq_cmd_w0.s.dlen = dlen;
678 /* Filling cpt_request_info structure */
679 req->ist.ei0 = vq_cmd_w0.u64;
680 req->ist.ei1 = mphys;
681 req->ist.ei2 = mphys + dlen;
683 /* Result pointer to store result data */
686 /* alternate_caddr to write completion status of the microcode */
687 req->alternate_caddr = (uint64_t *)(dptr + rlen);
688 *req->alternate_caddr = ~((uint64_t)COMPLETION_CODE_INIT);
690 /* Preparing completion addr, +1 for completion code */
691 caddr.vaddr = dptr + rlen + 1;
692 caddr.dma_addr = mphys + dlen + rlen + 1;
694 cpt_fill_req_comp_addr(req, caddr);
697 static __rte_always_inline void
698 cpt_ecdsa_verify_prep(struct rte_crypto_ecdsa_op_param *ecdsa,
699 struct asym_op_params *ecdsa_params,
700 uint64_t fpm_table_iova,
703 struct cpt_request_info *req = ecdsa_params->req;
704 uint32_t message_len = ecdsa->message.length;
705 phys_addr_t mphys = ecdsa_params->meta_buf;
706 uint16_t o_offset, r_offset, s_offset;
707 uint16_t qx_len = ecdsa->q.x.length;
708 uint16_t qy_len = ecdsa->q.y.length;
709 uint16_t r_len = ecdsa->r.length;
710 uint16_t s_len = ecdsa->s.length;
711 uint16_t order_len, prime_len;
712 uint16_t qx_offset, qy_offset;
713 uint16_t p_align, m_align;
714 vq_cmd_word0_t vq_cmd_w0;
719 prime_len = ec_grp[curveid].prime.length;
720 order_len = ec_grp[curveid].order.length;
722 /* Truncate input length to curve prime length */
723 if (message_len > prime_len)
724 message_len = prime_len;
726 m_align = RTE_ALIGN_CEIL(message_len, 8);
727 p_align = RTE_ALIGN_CEIL(prime_len, 8);
729 /* Set write offset for sign, order and public key coordinates */
730 o_offset = prime_len - order_len;
731 qx_offset = prime_len - qx_len;
732 qy_offset = prime_len - qy_len;
733 r_offset = prime_len - r_len;
734 s_offset = prime_len - s_len;
737 dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info));
740 * Set dlen = sum(sizeof(fpm address), ROUNDUP8(message len),
741 * ROUNDUP8(sign len(r and s), public key len(x and y coordinates),
742 * prime len, order len)).
743 * Please note sign, public key and order can not excede prime length
746 dlen = sizeof(fpm_table_iova) + m_align + (6 * p_align);
748 memset(dptr, 0, dlen);
750 *(uint64_t *)dptr = fpm_table_iova;
751 dptr += sizeof(fpm_table_iova);
753 memcpy(dptr + r_offset, ecdsa->r.data, r_len);
756 memcpy(dptr + s_offset, ecdsa->s.data, s_len);
759 memcpy(dptr, ecdsa->message.data, message_len);
762 memcpy(dptr + o_offset, ec_grp[curveid].order.data, order_len);
765 memcpy(dptr, ec_grp[curveid].prime.data, prime_len);
768 memcpy(dptr + qx_offset, ecdsa->q.x.data, qx_len);
771 memcpy(dptr + qy_offset, ecdsa->q.y.data, qy_len);
775 vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_ECDSA;
776 vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_ECDSA_VERIFY;
779 vq_cmd_w0.s.param1 = curveid | (message_len << 8);
780 vq_cmd_w0.s.param2 = 0;
781 vq_cmd_w0.s.dlen = dlen;
783 /* Filling cpt_request_info structure */
784 req->ist.ei0 = vq_cmd_w0.u64;
785 req->ist.ei1 = mphys;
786 req->ist.ei2 = mphys + dlen;
788 /* Result pointer to store result data */
791 /* alternate_caddr to write completion status of the microcode */
792 req->alternate_caddr = (uint64_t *)dptr;
793 *req->alternate_caddr = ~((uint64_t)COMPLETION_CODE_INIT);
795 /* Preparing completion addr, +1 for completion code */
796 caddr.vaddr = dptr + 1;
797 caddr.dma_addr = mphys + dlen + 1;
799 cpt_fill_req_comp_addr(req, caddr);
802 static __rte_always_inline int __rte_hot
803 cpt_enqueue_ecdsa_op(struct rte_crypto_op *op,
804 struct asym_op_params *params,
805 struct cpt_asym_sess_misc *sess,
808 struct rte_crypto_ecdsa_op_param *ecdsa = &op->asym->ecdsa;
809 uint8_t curveid = sess->ec_ctx.curveid;
811 if (ecdsa->op_type == RTE_CRYPTO_ASYM_OP_SIGN)
812 cpt_ecdsa_sign_prep(ecdsa, params, fpm_iova[curveid], curveid);
813 else if (ecdsa->op_type == RTE_CRYPTO_ASYM_OP_VERIFY)
814 cpt_ecdsa_verify_prep(ecdsa, params, fpm_iova[curveid],
817 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
823 static __rte_always_inline int
824 cpt_ecpm_prep(struct rte_crypto_ecpm_op_param *ecpm,
825 struct asym_op_params *asym_params,
828 struct cpt_request_info *req = asym_params->req;
829 phys_addr_t mphys = asym_params->meta_buf;
830 uint16_t x1_len = ecpm->p.x.length;
831 uint16_t y1_len = ecpm->p.y.length;
832 uint16_t scalar_align, p_align;
833 uint16_t dlen, rlen, prime_len;
834 uint16_t x1_offset, y1_offset;
835 vq_cmd_word0_t vq_cmd_w0;
839 prime_len = ec_grp[curveid].prime.length;
842 dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info));
844 p_align = RTE_ALIGN_CEIL(prime_len, 8);
845 scalar_align = RTE_ALIGN_CEIL(ecpm->scalar.length, 8);
848 * Set dlen = sum(ROUNDUP8(input point(x and y coordinates), prime,
850 * Please note point length is equivalent to prime of the curve
852 dlen = 3 * p_align + scalar_align;
854 x1_offset = prime_len - x1_len;
855 y1_offset = prime_len - y1_len;
857 memset(dptr, 0, dlen);
859 /* Copy input point, scalar, prime */
860 memcpy(dptr + x1_offset, ecpm->p.x.data, x1_len);
862 memcpy(dptr + y1_offset, ecpm->p.y.data, y1_len);
864 memcpy(dptr, ecpm->scalar.data, ecpm->scalar.length);
865 dptr += scalar_align;
866 memcpy(dptr, ec_grp[curveid].prime.data, ec_grp[curveid].prime.length);
870 vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_ECC;
871 vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_ECC_UMP;
874 vq_cmd_w0.s.param1 = curveid;
875 vq_cmd_w0.s.param2 = ecpm->scalar.length;
876 vq_cmd_w0.s.dlen = dlen;
878 /* Filling cpt_request_info structure */
879 req->ist.ei0 = vq_cmd_w0.u64;
880 req->ist.ei1 = mphys;
881 req->ist.ei2 = mphys + dlen;
883 /* Result buffer will store output point where length of
884 * each coordinate will be of prime length, thus set
885 * rlen to twice of prime length.
890 /* alternate_caddr to write completion status by the microcode */
891 req->alternate_caddr = (uint64_t *)(dptr + rlen);
892 *req->alternate_caddr = ~((uint64_t)COMPLETION_CODE_INIT);
894 /* Preparing completion addr, +1 for completion code */
895 caddr.vaddr = dptr + rlen + 1;
896 caddr.dma_addr = mphys + dlen + rlen + 1;
898 cpt_fill_req_comp_addr(req, caddr);
901 #endif /* _CPT_UCODE_ASYM_H_ */