1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright (C) 2019 Marvell International Ltd.
5 #ifndef _CPT_UCODE_ASYM_H_
6 #define _CPT_UCODE_ASYM_H_
8 #include <rte_common.h>
9 #include <rte_crypto_asym.h>
10 #include <rte_malloc.h>
12 #include "cpt_common.h"
13 #include "cpt_hw_types.h"
14 #include "cpt_mcode_defines.h"
16 static __rte_always_inline void
17 cpt_modex_param_normalize(uint8_t **data, size_t *len)
21 /* Strip leading NUL bytes */
23 for (i = 0; i < *len; i++) {
32 static __rte_always_inline int
33 cpt_fill_modex_params(struct cpt_asym_sess_misc *sess,
34 struct rte_crypto_asym_xform *xform)
36 struct rte_crypto_modex_xform *ctx = &sess->mod_ctx;
37 size_t exp_len = xform->modex.exponent.length;
38 size_t mod_len = xform->modex.modulus.length;
39 uint8_t *exp = xform->modex.exponent.data;
40 uint8_t *mod = xform->modex.modulus.data;
42 cpt_modex_param_normalize(&mod, &mod_len);
43 cpt_modex_param_normalize(&exp, &exp_len);
45 if (unlikely(exp_len == 0 || mod_len == 0))
48 if (unlikely(exp_len > mod_len)) {
49 CPT_LOG_DP_ERR("Exponent length greater than modulus length is not supported");
53 /* Allocate buffer to hold modexp params */
54 ctx->modulus.data = rte_malloc(NULL, mod_len + exp_len, 0);
55 if (ctx->modulus.data == NULL) {
56 CPT_LOG_DP_ERR("Could not allocate buffer for modex params");
60 /* Set up modexp prime modulus and private exponent */
62 memcpy(ctx->modulus.data, mod, mod_len);
63 ctx->exponent.data = ctx->modulus.data + mod_len;
64 memcpy(ctx->exponent.data, exp, exp_len);
66 ctx->modulus.length = mod_len;
67 ctx->exponent.length = exp_len;
72 static __rte_always_inline int
73 cpt_fill_rsa_params(struct cpt_asym_sess_misc *sess,
74 struct rte_crypto_asym_xform *xform)
76 struct rte_crypto_rsa_priv_key_qt qt = xform->rsa.qt;
77 struct rte_crypto_rsa_xform *xfrm_rsa = &xform->rsa;
78 struct rte_crypto_rsa_xform *rsa = &sess->rsa_ctx;
79 size_t mod_len = xfrm_rsa->n.length;
80 size_t exp_len = xfrm_rsa->e.length;
84 /* Make sure key length used is not more than mod_len/2 */
85 if (qt.p.data != NULL)
86 len = (((mod_len / 2) < qt.p.length) ? len : qt.p.length);
88 /* Total size required for RSA key params(n,e,(q,dQ,p,dP,qInv)) */
89 total_size = mod_len + exp_len + 5 * len;
91 /* Allocate buffer to hold all RSA keys */
92 rsa->n.data = rte_malloc(NULL, total_size, 0);
93 if (rsa->n.data == NULL) {
94 CPT_LOG_DP_ERR("Could not allocate buffer for RSA keys");
98 /* Set up RSA prime modulus and public key exponent */
99 memcpy(rsa->n.data, xfrm_rsa->n.data, mod_len);
100 rsa->e.data = rsa->n.data + mod_len;
101 memcpy(rsa->e.data, xfrm_rsa->e.data, exp_len);
103 /* Private key in quintuple format */
105 rsa->qt.q.data = rsa->e.data + exp_len;
106 memcpy(rsa->qt.q.data, qt.q.data, qt.q.length);
107 rsa->qt.dQ.data = rsa->qt.q.data + qt.q.length;
108 memcpy(rsa->qt.dQ.data, qt.dQ.data, qt.dQ.length);
109 rsa->qt.p.data = rsa->qt.dQ.data + qt.dQ.length;
110 memcpy(rsa->qt.p.data, qt.p.data, qt.p.length);
111 rsa->qt.dP.data = rsa->qt.p.data + qt.p.length;
112 memcpy(rsa->qt.dP.data, qt.dP.data, qt.dP.length);
113 rsa->qt.qInv.data = rsa->qt.dP.data + qt.dP.length;
114 memcpy(rsa->qt.qInv.data, qt.qInv.data, qt.qInv.length);
116 rsa->qt.q.length = qt.q.length;
117 rsa->qt.dQ.length = qt.dQ.length;
118 rsa->qt.p.length = qt.p.length;
119 rsa->qt.dP.length = qt.dP.length;
120 rsa->qt.qInv.length = qt.qInv.length;
122 rsa->n.length = mod_len;
123 rsa->e.length = exp_len;
128 static __rte_always_inline int
129 cpt_fill_ec_params(struct cpt_asym_sess_misc *sess,
130 struct rte_crypto_asym_xform *xform)
132 struct cpt_asym_ec_ctx *ec = &sess->ec_ctx;
134 switch (xform->ec.curve_id) {
135 case RTE_CRYPTO_EC_GROUP_SECP192R1:
136 ec->curveid = CPT_EC_ID_P192;
138 case RTE_CRYPTO_EC_GROUP_SECP224R1:
139 ec->curveid = CPT_EC_ID_P224;
141 case RTE_CRYPTO_EC_GROUP_SECP256R1:
142 ec->curveid = CPT_EC_ID_P256;
144 case RTE_CRYPTO_EC_GROUP_SECP384R1:
145 ec->curveid = CPT_EC_ID_P384;
147 case RTE_CRYPTO_EC_GROUP_SECP521R1:
148 ec->curveid = CPT_EC_ID_P521;
151 /* Only NIST curves (FIPS 186-4) are supported */
152 CPT_LOG_DP_ERR("Unsupported curve");
159 static __rte_always_inline int
160 cpt_fill_asym_session_parameters(struct cpt_asym_sess_misc *sess,
161 struct rte_crypto_asym_xform *xform)
165 sess->xfrm_type = xform->xform_type;
167 switch (xform->xform_type) {
168 case RTE_CRYPTO_ASYM_XFORM_RSA:
169 ret = cpt_fill_rsa_params(sess, xform);
171 case RTE_CRYPTO_ASYM_XFORM_MODEX:
172 ret = cpt_fill_modex_params(sess, xform);
174 case RTE_CRYPTO_ASYM_XFORM_ECDSA:
176 case RTE_CRYPTO_ASYM_XFORM_ECPM:
177 ret = cpt_fill_ec_params(sess, xform);
180 CPT_LOG_DP_ERR("Unsupported transform type");
186 static __rte_always_inline void
187 cpt_free_asym_session_parameters(struct cpt_asym_sess_misc *sess)
189 struct rte_crypto_modex_xform *mod;
190 struct rte_crypto_rsa_xform *rsa;
192 switch (sess->xfrm_type) {
193 case RTE_CRYPTO_ASYM_XFORM_RSA:
194 rsa = &sess->rsa_ctx;
196 rte_free(rsa->n.data);
198 case RTE_CRYPTO_ASYM_XFORM_MODEX:
199 mod = &sess->mod_ctx;
200 if (mod->modulus.data)
201 rte_free(mod->modulus.data);
203 case RTE_CRYPTO_ASYM_XFORM_ECDSA:
205 case RTE_CRYPTO_ASYM_XFORM_ECPM:
208 CPT_LOG_DP_ERR("Invalid transform type");
213 static __rte_always_inline void
214 cpt_fill_req_comp_addr(struct cpt_request_info *req, buf_ptr_t addr)
216 void *completion_addr = RTE_PTR_ALIGN(addr.vaddr, 16);
218 /* Pointer to cpt_res_s, updated by CPT */
219 req->completion_addr = (volatile uint64_t *)completion_addr;
220 req->comp_baddr = addr.dma_addr +
221 RTE_PTR_DIFF(completion_addr, addr.vaddr);
222 *(req->completion_addr) = COMPLETION_CODE_INIT;
225 static __rte_always_inline int
226 cpt_modex_prep(struct asym_op_params *modex_params,
227 struct rte_crypto_modex_xform *mod)
229 struct cpt_request_info *req = modex_params->req;
230 phys_addr_t mphys = modex_params->meta_buf;
231 uint32_t exp_len = mod->exponent.length;
232 uint32_t mod_len = mod->modulus.length;
233 struct rte_crypto_mod_op_param mod_op;
234 struct rte_crypto_op **op;
235 vq_cmd_word0_t vq_cmd_w0;
236 uint64_t total_key_len;
242 /* Extracting modex op form params->req->op[1]->asym->modex */
243 op = RTE_PTR_ADD(req->op, sizeof(uintptr_t));
244 mod_op = ((struct rte_crypto_op *)*op)->asym->modex;
246 base_len = mod_op.base.length;
247 if (unlikely(base_len > mod_len)) {
248 CPT_LOG_DP_ERR("Base length greater than modulus length is not supported");
249 (*op)->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
253 total_key_len = mod_len + exp_len;
256 dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info));
257 memcpy(dptr, mod->modulus.data, total_key_len);
258 dptr += total_key_len;
259 memcpy(dptr, mod_op.base.data, base_len);
261 dlen = total_key_len + base_len;
267 vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_MODEX;
268 vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_MODEX;
271 vq_cmd_w0.s.param1 = mod_len;
272 vq_cmd_w0.s.param2 = exp_len;
273 vq_cmd_w0.s.dlen = dlen;
275 /* Filling cpt_request_info structure */
276 req->ist.ei0 = vq_cmd_w0.u64;
277 req->ist.ei1 = mphys;
278 req->ist.ei2 = mphys + dlen;
280 /* Result pointer to store result data */
283 /* alternate_caddr to write completion status of the microcode */
284 req->alternate_caddr = (uint64_t *)(dptr + rlen);
285 *req->alternate_caddr = ~((uint64_t)COMPLETION_CODE_INIT);
287 /* Preparing completion addr, +1 for completion code */
288 caddr.vaddr = dptr + rlen + 1;
289 caddr.dma_addr = mphys + dlen + rlen + 1;
291 cpt_fill_req_comp_addr(req, caddr);
295 static __rte_always_inline void
296 cpt_rsa_prep(struct asym_op_params *rsa_params,
297 struct rte_crypto_rsa_xform *rsa,
298 rte_crypto_param *crypto_param)
300 struct cpt_request_info *req = rsa_params->req;
301 phys_addr_t mphys = rsa_params->meta_buf;
302 struct rte_crypto_rsa_op_param rsa_op;
303 uint32_t mod_len = rsa->n.length;
304 uint32_t exp_len = rsa->e.length;
305 struct rte_crypto_op **op;
306 vq_cmd_word0_t vq_cmd_w0;
307 uint64_t total_key_len;
313 /* Extracting rsa op form params->req->op[1]->asym->rsa */
314 op = RTE_PTR_ADD(req->op, sizeof(uintptr_t));
315 rsa_op = ((struct rte_crypto_op *)*op)->asym->rsa;
316 total_key_len = mod_len + exp_len;
319 dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info));
320 memcpy(dptr, rsa->n.data, total_key_len);
321 dptr += total_key_len;
323 in_size = crypto_param->length;
324 memcpy(dptr, crypto_param->data, in_size);
327 dlen = total_key_len + in_size;
332 if (rsa_op.pad == RTE_CRYPTO_RSA_PADDING_NONE) {
333 /* Use mod_exp operation for no_padding type */
334 vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_MODEX;
335 vq_cmd_w0.s.param2 = exp_len;
337 if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_ENCRYPT) {
338 vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_PKCS_ENC;
339 /* Public key encrypt, use BT2*/
340 vq_cmd_w0.s.param2 = CPT_BLOCK_TYPE2 |
341 ((uint16_t)(exp_len) << 1);
342 } else if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_VERIFY) {
343 vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_PKCS_DEC;
344 /* Public key decrypt, use BT1 */
345 vq_cmd_w0.s.param2 = CPT_BLOCK_TYPE1;
346 /* + 2 for decrypted len */
351 vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_MODEX;
354 vq_cmd_w0.s.param1 = mod_len;
355 vq_cmd_w0.s.dlen = dlen;
357 /* Filling cpt_request_info structure */
358 req->ist.ei0 = vq_cmd_w0.u64;
359 req->ist.ei1 = mphys;
360 req->ist.ei2 = mphys + dlen;
362 /* Result pointer to store result data */
365 /* alternate_caddr to write completion status of the microcode */
366 req->alternate_caddr = (uint64_t *)(dptr + rlen);
367 *req->alternate_caddr = ~((uint64_t)COMPLETION_CODE_INIT);
369 /* Preparing completion addr, +1 for completion code */
370 caddr.vaddr = dptr + rlen + 1;
371 caddr.dma_addr = mphys + dlen + rlen + 1;
373 cpt_fill_req_comp_addr(req, caddr);
376 static __rte_always_inline void
377 cpt_rsa_crt_prep(struct asym_op_params *rsa_params,
378 struct rte_crypto_rsa_xform *rsa,
379 rte_crypto_param *crypto_param)
381 struct cpt_request_info *req = rsa_params->req;
382 phys_addr_t mphys = rsa_params->meta_buf;
383 uint32_t qInv_len = rsa->qt.qInv.length;
384 struct rte_crypto_rsa_op_param rsa_op;
385 uint32_t dP_len = rsa->qt.dP.length;
386 uint32_t dQ_len = rsa->qt.dQ.length;
387 uint32_t p_len = rsa->qt.p.length;
388 uint32_t q_len = rsa->qt.q.length;
389 uint32_t mod_len = rsa->n.length;
390 struct rte_crypto_op **op;
391 vq_cmd_word0_t vq_cmd_w0;
392 uint64_t total_key_len;
398 /* Extracting rsa op form params->req->op[1]->asym->rsa */
399 op = RTE_PTR_ADD(req->op, sizeof(uintptr_t));
400 rsa_op = ((struct rte_crypto_op *)*op)->asym->rsa;
401 total_key_len = p_len + q_len + dP_len + dQ_len + qInv_len;
404 dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info));
405 memcpy(dptr, rsa->qt.q.data, total_key_len);
406 dptr += total_key_len;
408 in_size = crypto_param->length;
409 memcpy(dptr, crypto_param->data, in_size);
412 dlen = total_key_len + in_size;
417 if (rsa_op.pad == RTE_CRYPTO_RSA_PADDING_NONE) {
418 /*Use mod_exp operation for no_padding type */
419 vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_MODEX_CRT;
421 if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_SIGN) {
422 vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_PKCS_ENC_CRT;
423 /* Private encrypt, use BT1 */
424 vq_cmd_w0.s.param2 = CPT_BLOCK_TYPE1;
425 } else if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_DECRYPT) {
426 vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_PKCS_DEC_CRT;
427 /* Private decrypt, use BT2 */
428 vq_cmd_w0.s.param2 = CPT_BLOCK_TYPE2;
429 /* + 2 for decrypted len */
434 vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_MODEX;
437 vq_cmd_w0.s.param1 = mod_len;
438 vq_cmd_w0.s.dlen = dlen;
440 /* Filling cpt_request_info structure */
441 req->ist.ei0 = vq_cmd_w0.u64;
442 req->ist.ei1 = mphys;
443 req->ist.ei2 = mphys + dlen;
445 /* Result pointer to store result data */
448 /* alternate_caddr to write completion status of the microcode */
449 req->alternate_caddr = (uint64_t *)(dptr + rlen);
450 *req->alternate_caddr = ~((uint64_t)COMPLETION_CODE_INIT);
452 /* Preparing completion addr, +1 for completion code */
453 caddr.vaddr = dptr + rlen + 1;
454 caddr.dma_addr = mphys + dlen + rlen + 1;
456 cpt_fill_req_comp_addr(req, caddr);
459 static __rte_always_inline int __rte_hot
460 cpt_enqueue_rsa_op(struct rte_crypto_op *op,
461 struct asym_op_params *params,
462 struct cpt_asym_sess_misc *sess)
464 struct rte_crypto_rsa_op_param *rsa = &op->asym->rsa;
466 switch (rsa->op_type) {
467 case RTE_CRYPTO_ASYM_OP_VERIFY:
468 cpt_rsa_prep(params, &sess->rsa_ctx, &rsa->sign);
470 case RTE_CRYPTO_ASYM_OP_ENCRYPT:
471 cpt_rsa_prep(params, &sess->rsa_ctx, &rsa->message);
473 case RTE_CRYPTO_ASYM_OP_SIGN:
474 cpt_rsa_crt_prep(params, &sess->rsa_ctx, &rsa->message);
476 case RTE_CRYPTO_ASYM_OP_DECRYPT:
477 cpt_rsa_crt_prep(params, &sess->rsa_ctx, &rsa->cipher);
480 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
486 static const struct cpt_ec_group ec_grp[CPT_EC_ID_PMAX] = {
489 .data = {0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
490 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
491 0xFF, 0xFF, 0xFF, 0xFE, 0xFF, 0xFF,
492 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF},
495 .order = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
496 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
497 0x99, 0xDE, 0xF8, 0x36, 0x14, 0x6B,
498 0xC9, 0xB1, 0xB4, 0xD2, 0x28, 0x31},
500 .consta = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
501 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
502 0xFF, 0xFF, 0xFF, 0xFE, 0xFF, 0xFF,
503 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFC},
505 .constb = {.data = {0x64, 0x21, 0x05, 0x19, 0xE5, 0x9C,
506 0x80, 0xE7, 0x0F, 0xA7, 0xE9, 0xAB,
507 0x72, 0x24, 0x30, 0x49, 0xFE, 0xB8,
508 0xDE, 0xEC, 0xC1, 0x46, 0xB9, 0xB1},
512 .prime = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
513 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
514 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00,
515 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01},
517 .order = {.data = {0XFF, 0XFF, 0XFF, 0XFF, 0XFF, 0XFF, 0XFF,
518 0XFF, 0XFF, 0XFF, 0XFF, 0XFF, 0XFF, 0XFF,
519 0X16, 0XA2, 0XE0, 0XB8, 0XF0, 0X3E, 0X13,
520 0XDD, 0X29, 0X45, 0X5C, 0X5C, 0X2A, 0X3D},
522 .consta = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
523 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
524 0xFF, 0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
525 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE},
527 .constb = {.data = {0xB4, 0x05, 0x0A, 0x85, 0x0C, 0x04, 0xB3,
528 0xAB, 0xF5, 0x41, 0x32, 0x56, 0x50, 0x44,
529 0xB0, 0xB7, 0xD7, 0xBF, 0xD8, 0xBA, 0x27,
530 0x0B, 0x39, 0x43, 0x23, 0x55, 0xFF, 0xB4},
534 .prime = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00,
535 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
536 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF,
537 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
538 0xFF, 0xFF, 0xFF, 0xFF},
540 .order = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00,
541 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
542 0xFF, 0xFF, 0xBC, 0xE6, 0xFA, 0xAD, 0xA7,
543 0x17, 0x9E, 0x84, 0xF3, 0xB9, 0xCA, 0xC2,
544 0xFC, 0x63, 0x25, 0x51},
546 .consta = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00,
547 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
548 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF,
549 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
550 0xFF, 0xFF, 0xFF, 0xFC},
552 .constb = {.data = {0x5A, 0xC6, 0x35, 0xD8, 0xAA, 0x3A, 0x93,
553 0xE7, 0xB3, 0xEB, 0xBD, 0x55, 0x76, 0x98,
554 0x86, 0xBC, 0x65, 0x1D, 0x06, 0xB0, 0xCC,
555 0x53, 0xB0, 0xF6, 0x3B, 0xCE, 0x3C, 0x3E,
556 0x27, 0xD2, 0x60, 0x4B},
560 .prime = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
561 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
562 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
563 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
564 0xFF, 0xFF, 0xFF, 0xFE, 0xFF, 0xFF, 0xFF,
565 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
566 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF},
568 .order = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
569 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
570 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
571 0xFF, 0xFF, 0xFF, 0xC7, 0x63, 0x4D, 0x81,
572 0xF4, 0x37, 0x2D, 0xDF, 0x58, 0x1A, 0x0D,
573 0xB2, 0x48, 0xB0, 0xA7, 0x7A, 0xEC, 0xEC,
574 0x19, 0x6A, 0xCC, 0xC5, 0x29, 0x73},
576 .consta = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
577 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
578 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
579 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
580 0xFF, 0xFF, 0xFF, 0xFE, 0xFF, 0xFF, 0xFF,
581 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
582 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFC},
584 .constb = {.data = {0xB3, 0x31, 0x2F, 0xA7, 0xE2, 0x3E, 0xE7,
585 0xE4, 0x98, 0x8E, 0x05, 0x6B, 0xE3, 0xF8,
586 0x2D, 0x19, 0x18, 0x1D, 0x9C, 0x6E, 0xFE,
587 0x81, 0x41, 0x12, 0x03, 0x14, 0x08, 0x8F,
588 0x50, 0x13, 0x87, 0x5A, 0xC6, 0x56, 0x39,
589 0x8D, 0x8A, 0x2E, 0xD1, 0x9D, 0x2A, 0x85,
590 0xC8, 0xED, 0xD3, 0xEC, 0x2A, 0xEF},
593 {.prime = {.data = {0x01, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
594 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
595 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
596 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
597 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
598 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
599 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
600 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
603 .order = {.data = {0x01, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
604 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
605 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
606 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
607 0xFF, 0xFA, 0x51, 0x86, 0x87, 0x83, 0xBF, 0x2F,
608 0x96, 0x6B, 0x7F, 0xCC, 0x01, 0x48, 0xF7, 0x09,
609 0xA5, 0xD0, 0x3B, 0xB5, 0xC9, 0xB8, 0x89, 0x9C,
610 0x47, 0xAE, 0xBB, 0x6F, 0xB7, 0x1E, 0x91, 0x38,
613 .consta = {.data = {0x01, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
614 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
615 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
616 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
617 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
618 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
619 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
620 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
623 .constb = {.data = {0x00, 0x51, 0x95, 0x3E, 0xB9, 0x61, 0x8E, 0x1C,
624 0x9A, 0x1F, 0x92, 0x9A, 0x21, 0xA0, 0xB6, 0x85,
625 0x40, 0xEE, 0xA2, 0xDA, 0x72, 0x5B, 0x99, 0xB3,
626 0x15, 0xF3, 0xB8, 0xB4, 0x89, 0x91, 0x8E, 0xF1,
627 0x09, 0xE1, 0x56, 0x19, 0x39, 0x51, 0xEC, 0x7E,
628 0x93, 0x7B, 0x16, 0x52, 0xC0, 0xBD, 0x3B, 0xB1,
629 0xBF, 0x07, 0x35, 0x73, 0xDF, 0x88, 0x3D, 0x2C,
630 0x34, 0xF1, 0xEF, 0x45, 0x1F, 0xD4, 0x6B, 0x50,
634 static __rte_always_inline void
635 cpt_ecdsa_sign_prep(struct rte_crypto_ecdsa_op_param *ecdsa,
636 struct asym_op_params *ecdsa_params,
637 uint64_t fpm_table_iova,
640 struct cpt_request_info *req = ecdsa_params->req;
641 uint16_t message_len = ecdsa->message.length;
642 phys_addr_t mphys = ecdsa_params->meta_buf;
643 uint16_t pkey_len = ecdsa->pkey.length;
644 uint16_t p_align, k_align, m_align;
645 uint16_t k_len = ecdsa->k.length;
646 uint16_t order_len, prime_len;
647 uint16_t o_offset, pk_offset;
648 vq_cmd_word0_t vq_cmd_w0;
653 prime_len = ec_grp[curveid].prime.length;
654 order_len = ec_grp[curveid].order.length;
656 /* Truncate input length to curve prime length */
657 if (message_len > prime_len)
658 message_len = prime_len;
659 m_align = RTE_ALIGN_CEIL(message_len, 8);
661 p_align = RTE_ALIGN_CEIL(prime_len, 8);
662 k_align = RTE_ALIGN_CEIL(k_len, 8);
664 /* Set write offset for order and private key */
665 o_offset = prime_len - order_len;
666 pk_offset = prime_len - pkey_len;
669 dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info));
672 * Set dlen = sum(sizeof(fpm address), ROUNDUP8(scalar len, input len),
673 * ROUNDUP8(priv key len, prime len, order len)).
674 * Please note, private key, order cannot exceed prime
675 * length i.e 3 * p_align.
677 dlen = sizeof(fpm_table_iova) + k_align + m_align + p_align * 5;
679 memset(dptr, 0, dlen);
681 *(uint64_t *)dptr = fpm_table_iova;
682 dptr += sizeof(fpm_table_iova);
684 memcpy(dptr, ecdsa->k.data, k_len);
687 memcpy(dptr, ec_grp[curveid].prime.data, prime_len);
690 memcpy(dptr + o_offset, ec_grp[curveid].order.data, order_len);
693 memcpy(dptr + pk_offset, ecdsa->pkey.data, pkey_len);
696 memcpy(dptr, ecdsa->message.data, message_len);
699 memcpy(dptr, ec_grp[curveid].consta.data, prime_len);
702 memcpy(dptr, ec_grp[curveid].constb.data, prime_len);
705 /* 2 * prime length (for sign r and s ) */
709 vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_ECDSA;
710 vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_ECDSA_SIGN;
713 vq_cmd_w0.s.param1 = curveid | (message_len << 8);
714 vq_cmd_w0.s.param2 = (pkey_len << 8) | k_len;
715 vq_cmd_w0.s.dlen = dlen;
717 /* Filling cpt_request_info structure */
718 req->ist.ei0 = vq_cmd_w0.u64;
719 req->ist.ei1 = mphys;
720 req->ist.ei2 = mphys + dlen;
722 /* Result pointer to store result data */
725 /* alternate_caddr to write completion status of the microcode */
726 req->alternate_caddr = (uint64_t *)(dptr + rlen);
727 *req->alternate_caddr = ~((uint64_t)COMPLETION_CODE_INIT);
729 /* Preparing completion addr, +1 for completion code */
730 caddr.vaddr = dptr + rlen + 1;
731 caddr.dma_addr = mphys + dlen + rlen + 1;
733 cpt_fill_req_comp_addr(req, caddr);
736 static __rte_always_inline void
737 cpt_ecdsa_verify_prep(struct rte_crypto_ecdsa_op_param *ecdsa,
738 struct asym_op_params *ecdsa_params,
739 uint64_t fpm_table_iova,
742 struct cpt_request_info *req = ecdsa_params->req;
743 uint32_t message_len = ecdsa->message.length;
744 phys_addr_t mphys = ecdsa_params->meta_buf;
745 uint16_t o_offset, r_offset, s_offset;
746 uint16_t qx_len = ecdsa->q.x.length;
747 uint16_t qy_len = ecdsa->q.y.length;
748 uint16_t r_len = ecdsa->r.length;
749 uint16_t s_len = ecdsa->s.length;
750 uint16_t order_len, prime_len;
751 uint16_t qx_offset, qy_offset;
752 uint16_t p_align, m_align;
753 vq_cmd_word0_t vq_cmd_w0;
758 prime_len = ec_grp[curveid].prime.length;
759 order_len = ec_grp[curveid].order.length;
761 /* Truncate input length to curve prime length */
762 if (message_len > prime_len)
763 message_len = prime_len;
765 m_align = RTE_ALIGN_CEIL(message_len, 8);
766 p_align = RTE_ALIGN_CEIL(prime_len, 8);
768 /* Set write offset for sign, order and public key coordinates */
769 o_offset = prime_len - order_len;
770 qx_offset = prime_len - qx_len;
771 qy_offset = prime_len - qy_len;
772 r_offset = prime_len - r_len;
773 s_offset = prime_len - s_len;
776 dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info));
779 * Set dlen = sum(sizeof(fpm address), ROUNDUP8(message len),
780 * ROUNDUP8(sign len(r and s), public key len(x and y coordinates),
781 * prime len, order len)).
782 * Please note sign, public key and order can not exceed prime length
785 dlen = sizeof(fpm_table_iova) + m_align + (8 * p_align);
787 memset(dptr, 0, dlen);
789 *(uint64_t *)dptr = fpm_table_iova;
790 dptr += sizeof(fpm_table_iova);
792 memcpy(dptr + r_offset, ecdsa->r.data, r_len);
795 memcpy(dptr + s_offset, ecdsa->s.data, s_len);
798 memcpy(dptr, ecdsa->message.data, message_len);
801 memcpy(dptr + o_offset, ec_grp[curveid].order.data, order_len);
804 memcpy(dptr, ec_grp[curveid].prime.data, prime_len);
807 memcpy(dptr + qx_offset, ecdsa->q.x.data, qx_len);
810 memcpy(dptr + qy_offset, ecdsa->q.y.data, qy_len);
813 memcpy(dptr, ec_grp[curveid].consta.data, prime_len);
816 memcpy(dptr, ec_grp[curveid].constb.data, prime_len);
820 vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_ECDSA;
821 vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_ECDSA_VERIFY;
824 vq_cmd_w0.s.param1 = curveid | (message_len << 8);
825 vq_cmd_w0.s.param2 = 0;
826 vq_cmd_w0.s.dlen = dlen;
828 /* Filling cpt_request_info structure */
829 req->ist.ei0 = vq_cmd_w0.u64;
830 req->ist.ei1 = mphys;
831 req->ist.ei2 = mphys + dlen;
833 /* Result pointer to store result data */
836 /* alternate_caddr to write completion status of the microcode */
837 req->alternate_caddr = (uint64_t *)dptr;
838 *req->alternate_caddr = ~((uint64_t)COMPLETION_CODE_INIT);
840 /* Preparing completion addr, +1 for completion code */
841 caddr.vaddr = dptr + 1;
842 caddr.dma_addr = mphys + dlen + 1;
844 cpt_fill_req_comp_addr(req, caddr);
847 static __rte_always_inline int __rte_hot
848 cpt_enqueue_ecdsa_op(struct rte_crypto_op *op,
849 struct asym_op_params *params,
850 struct cpt_asym_sess_misc *sess,
853 struct rte_crypto_ecdsa_op_param *ecdsa = &op->asym->ecdsa;
854 uint8_t curveid = sess->ec_ctx.curveid;
856 if (ecdsa->op_type == RTE_CRYPTO_ASYM_OP_SIGN)
857 cpt_ecdsa_sign_prep(ecdsa, params, fpm_iova[curveid], curveid);
858 else if (ecdsa->op_type == RTE_CRYPTO_ASYM_OP_VERIFY)
859 cpt_ecdsa_verify_prep(ecdsa, params, fpm_iova[curveid],
862 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
868 static __rte_always_inline int
869 cpt_ecpm_prep(struct rte_crypto_ecpm_op_param *ecpm,
870 struct asym_op_params *asym_params,
873 struct cpt_request_info *req = asym_params->req;
874 phys_addr_t mphys = asym_params->meta_buf;
875 uint16_t x1_len = ecpm->p.x.length;
876 uint16_t y1_len = ecpm->p.y.length;
877 uint16_t scalar_align, p_align;
878 uint16_t dlen, rlen, prime_len;
879 uint16_t x1_offset, y1_offset;
880 vq_cmd_word0_t vq_cmd_w0;
884 prime_len = ec_grp[curveid].prime.length;
887 dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info));
889 p_align = RTE_ALIGN_CEIL(prime_len, 8);
890 scalar_align = RTE_ALIGN_CEIL(ecpm->scalar.length, 8);
893 * Set dlen = sum(ROUNDUP8(input point(x and y coordinates), prime,
895 * Please note point length is equivalent to prime of the curve
897 dlen = 5 * p_align + scalar_align;
899 x1_offset = prime_len - x1_len;
900 y1_offset = prime_len - y1_len;
902 memset(dptr, 0, dlen);
904 /* Copy input point, scalar, prime */
905 memcpy(dptr + x1_offset, ecpm->p.x.data, x1_len);
907 memcpy(dptr + y1_offset, ecpm->p.y.data, y1_len);
909 memcpy(dptr, ecpm->scalar.data, ecpm->scalar.length);
910 dptr += scalar_align;
911 memcpy(dptr, ec_grp[curveid].prime.data, ec_grp[curveid].prime.length);
914 memcpy(dptr, ec_grp[curveid].consta.data,
915 ec_grp[curveid].consta.length);
918 memcpy(dptr, ec_grp[curveid].constb.data,
919 ec_grp[curveid].constb.length);
923 vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_ECC;
924 vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_ECC_UMP;
927 vq_cmd_w0.s.param1 = curveid;
928 vq_cmd_w0.s.param2 = ecpm->scalar.length;
929 vq_cmd_w0.s.dlen = dlen;
931 /* Filling cpt_request_info structure */
932 req->ist.ei0 = vq_cmd_w0.u64;
933 req->ist.ei1 = mphys;
934 req->ist.ei2 = mphys + dlen;
936 /* Result buffer will store output point where length of
937 * each coordinate will be of prime length, thus set
938 * rlen to twice of prime length.
943 /* alternate_caddr to write completion status by the microcode */
944 req->alternate_caddr = (uint64_t *)(dptr + rlen);
945 *req->alternate_caddr = ~((uint64_t)COMPLETION_CODE_INIT);
947 /* Preparing completion addr, +1 for completion code */
948 caddr.vaddr = dptr + rlen + 1;
949 caddr.dma_addr = mphys + dlen + rlen + 1;
951 cpt_fill_req_comp_addr(req, caddr);
954 #endif /* _CPT_UCODE_ASYM_H_ */