1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright (C) 2019 Marvell International Ltd.
5 #ifndef _CPT_UCODE_ASYM_H_
6 #define _CPT_UCODE_ASYM_H_
8 #include <rte_common.h>
9 #include <rte_crypto_asym.h>
10 #include <rte_malloc.h>
12 #include "cpt_common.h"
13 #include "cpt_hw_types.h"
14 #include "cpt_mcode_defines.h"
16 static __rte_always_inline void
17 cpt_modex_param_normalize(uint8_t **data, size_t *len)
21 /* Strip leading NUL bytes */
23 for (i = 0; i < *len; i++) {
32 static __rte_always_inline int
33 cpt_fill_modex_params(struct cpt_asym_sess_misc *sess,
34 struct rte_crypto_asym_xform *xform)
36 struct rte_crypto_modex_xform *ctx = &sess->mod_ctx;
37 size_t exp_len = xform->modex.exponent.length;
38 size_t mod_len = xform->modex.modulus.length;
39 uint8_t *exp = xform->modex.exponent.data;
40 uint8_t *mod = xform->modex.modulus.data;
42 cpt_modex_param_normalize(&mod, &mod_len);
43 cpt_modex_param_normalize(&exp, &exp_len);
45 if (unlikely(exp_len == 0 || mod_len == 0))
48 if (unlikely(exp_len > mod_len)) {
49 CPT_LOG_DP_ERR("Exponent length greater than modulus length is not supported");
53 /* Allocate buffer to hold modexp params */
54 ctx->modulus.data = rte_malloc(NULL, mod_len + exp_len, 0);
55 if (ctx->modulus.data == NULL) {
56 CPT_LOG_DP_ERR("Could not allocate buffer for modex params");
60 /* Set up modexp prime modulus and private exponent */
62 memcpy(ctx->modulus.data, mod, mod_len);
63 ctx->exponent.data = ctx->modulus.data + mod_len;
64 memcpy(ctx->exponent.data, exp, exp_len);
66 ctx->modulus.length = mod_len;
67 ctx->exponent.length = exp_len;
72 static __rte_always_inline int
73 cpt_fill_rsa_params(struct cpt_asym_sess_misc *sess,
74 struct rte_crypto_asym_xform *xform)
76 struct rte_crypto_rsa_priv_key_qt qt = xform->rsa.qt;
77 struct rte_crypto_rsa_xform *xfrm_rsa = &xform->rsa;
78 struct rte_crypto_rsa_xform *rsa = &sess->rsa_ctx;
79 size_t mod_len = xfrm_rsa->n.length;
80 size_t exp_len = xfrm_rsa->e.length;
84 /* Make sure key length used is not more than mod_len/2 */
85 if (qt.p.data != NULL)
86 len = (((mod_len / 2) < qt.p.length) ? len : qt.p.length);
88 /* Total size required for RSA key params(n,e,(q,dQ,p,dP,qInv)) */
89 total_size = mod_len + exp_len + 5 * len;
91 /* Allocate buffer to hold all RSA keys */
92 rsa->n.data = rte_malloc(NULL, total_size, 0);
93 if (rsa->n.data == NULL) {
94 CPT_LOG_DP_ERR("Could not allocate buffer for RSA keys");
98 /* Set up RSA prime modulus and public key exponent */
99 memcpy(rsa->n.data, xfrm_rsa->n.data, mod_len);
100 rsa->e.data = rsa->n.data + mod_len;
101 memcpy(rsa->e.data, xfrm_rsa->e.data, exp_len);
103 /* Private key in quintuple format */
105 rsa->qt.q.data = rsa->e.data + exp_len;
106 memcpy(rsa->qt.q.data, qt.q.data, qt.q.length);
107 rsa->qt.dQ.data = rsa->qt.q.data + qt.q.length;
108 memcpy(rsa->qt.dQ.data, qt.dQ.data, qt.dQ.length);
109 rsa->qt.p.data = rsa->qt.dQ.data + qt.dQ.length;
110 memcpy(rsa->qt.p.data, qt.p.data, qt.p.length);
111 rsa->qt.dP.data = rsa->qt.p.data + qt.p.length;
112 memcpy(rsa->qt.dP.data, qt.dP.data, qt.dP.length);
113 rsa->qt.qInv.data = rsa->qt.dP.data + qt.dP.length;
114 memcpy(rsa->qt.qInv.data, qt.qInv.data, qt.qInv.length);
116 rsa->qt.q.length = qt.q.length;
117 rsa->qt.dQ.length = qt.dQ.length;
118 rsa->qt.p.length = qt.p.length;
119 rsa->qt.dP.length = qt.dP.length;
120 rsa->qt.qInv.length = qt.qInv.length;
122 rsa->n.length = mod_len;
123 rsa->e.length = exp_len;
128 static __rte_always_inline int
129 cpt_fill_ec_params(struct cpt_asym_sess_misc *sess,
130 struct rte_crypto_asym_xform *xform)
132 struct cpt_asym_ec_ctx *ec = &sess->ec_ctx;
134 switch (xform->ec.curve_id) {
135 case RTE_CRYPTO_EC_GROUP_SECP192R1:
136 ec->curveid = CPT_EC_ID_P192;
138 case RTE_CRYPTO_EC_GROUP_SECP224R1:
139 ec->curveid = CPT_EC_ID_P224;
141 case RTE_CRYPTO_EC_GROUP_SECP256R1:
142 ec->curveid = CPT_EC_ID_P256;
144 case RTE_CRYPTO_EC_GROUP_SECP384R1:
145 ec->curveid = CPT_EC_ID_P384;
147 case RTE_CRYPTO_EC_GROUP_SECP521R1:
148 ec->curveid = CPT_EC_ID_P521;
151 /* Only NIST curves (FIPS 186-4) are supported */
152 CPT_LOG_DP_ERR("Unsupported curve");
159 static __rte_always_inline int
160 cpt_fill_asym_session_parameters(struct cpt_asym_sess_misc *sess,
161 struct rte_crypto_asym_xform *xform)
165 sess->xfrm_type = xform->xform_type;
167 switch (xform->xform_type) {
168 case RTE_CRYPTO_ASYM_XFORM_RSA:
169 ret = cpt_fill_rsa_params(sess, xform);
171 case RTE_CRYPTO_ASYM_XFORM_MODEX:
172 ret = cpt_fill_modex_params(sess, xform);
174 case RTE_CRYPTO_ASYM_XFORM_ECDSA:
176 case RTE_CRYPTO_ASYM_XFORM_ECPM:
177 ret = cpt_fill_ec_params(sess, xform);
180 CPT_LOG_DP_ERR("Unsupported transform type");
186 static __rte_always_inline void
187 cpt_free_asym_session_parameters(struct cpt_asym_sess_misc *sess)
189 struct rte_crypto_modex_xform *mod;
190 struct rte_crypto_rsa_xform *rsa;
192 switch (sess->xfrm_type) {
193 case RTE_CRYPTO_ASYM_XFORM_RSA:
194 rsa = &sess->rsa_ctx;
195 rte_free(rsa->n.data);
197 case RTE_CRYPTO_ASYM_XFORM_MODEX:
198 mod = &sess->mod_ctx;
199 rte_free(mod->modulus.data);
201 case RTE_CRYPTO_ASYM_XFORM_ECDSA:
203 case RTE_CRYPTO_ASYM_XFORM_ECPM:
206 CPT_LOG_DP_ERR("Invalid transform type");
211 static __rte_always_inline void
212 cpt_fill_req_comp_addr(struct cpt_request_info *req, buf_ptr_t addr)
214 void *completion_addr = RTE_PTR_ALIGN(addr.vaddr, 16);
216 /* Pointer to cpt_res_s, updated by CPT */
217 req->completion_addr = (volatile uint64_t *)completion_addr;
218 req->comp_baddr = addr.dma_addr +
219 RTE_PTR_DIFF(completion_addr, addr.vaddr);
220 *(req->completion_addr) = COMPLETION_CODE_INIT;
223 static __rte_always_inline int
224 cpt_modex_prep(struct asym_op_params *modex_params,
225 struct rte_crypto_modex_xform *mod)
227 struct cpt_request_info *req = modex_params->req;
228 phys_addr_t mphys = modex_params->meta_buf;
229 uint32_t exp_len = mod->exponent.length;
230 uint32_t mod_len = mod->modulus.length;
231 struct rte_crypto_mod_op_param mod_op;
232 struct rte_crypto_op **op;
233 vq_cmd_word0_t vq_cmd_w0;
234 uint64_t total_key_len;
240 /* Extracting modex op form params->req->op[1]->asym->modex */
241 op = RTE_PTR_ADD(req->op, sizeof(uintptr_t));
242 mod_op = ((struct rte_crypto_op *)*op)->asym->modex;
244 base_len = mod_op.base.length;
245 if (unlikely(base_len > mod_len)) {
246 CPT_LOG_DP_ERR("Base length greater than modulus length is not supported");
247 (*op)->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
251 total_key_len = mod_len + exp_len;
254 dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info));
255 memcpy(dptr, mod->modulus.data, total_key_len);
256 dptr += total_key_len;
257 memcpy(dptr, mod_op.base.data, base_len);
259 dlen = total_key_len + base_len;
265 vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_MODEX;
266 vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_MODEX;
269 vq_cmd_w0.s.param1 = mod_len;
270 vq_cmd_w0.s.param2 = exp_len;
271 vq_cmd_w0.s.dlen = dlen;
273 /* Filling cpt_request_info structure */
274 req->ist.ei0 = vq_cmd_w0.u64;
275 req->ist.ei1 = mphys;
276 req->ist.ei2 = mphys + dlen;
278 /* Result pointer to store result data */
281 /* alternate_caddr to write completion status of the microcode */
282 req->alternate_caddr = (uint64_t *)(dptr + rlen);
283 *req->alternate_caddr = ~((uint64_t)COMPLETION_CODE_INIT);
285 /* Preparing completion addr, +1 for completion code */
286 caddr.vaddr = dptr + rlen + 1;
287 caddr.dma_addr = mphys + dlen + rlen + 1;
289 cpt_fill_req_comp_addr(req, caddr);
293 static __rte_always_inline void
294 cpt_rsa_prep(struct asym_op_params *rsa_params,
295 struct rte_crypto_rsa_xform *rsa,
296 rte_crypto_param *crypto_param)
298 struct cpt_request_info *req = rsa_params->req;
299 phys_addr_t mphys = rsa_params->meta_buf;
300 struct rte_crypto_rsa_op_param rsa_op;
301 uint32_t mod_len = rsa->n.length;
302 uint32_t exp_len = rsa->e.length;
303 struct rte_crypto_op **op;
304 vq_cmd_word0_t vq_cmd_w0;
305 uint64_t total_key_len;
311 /* Extracting rsa op form params->req->op[1]->asym->rsa */
312 op = RTE_PTR_ADD(req->op, sizeof(uintptr_t));
313 rsa_op = ((struct rte_crypto_op *)*op)->asym->rsa;
314 total_key_len = mod_len + exp_len;
317 dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info));
318 memcpy(dptr, rsa->n.data, total_key_len);
319 dptr += total_key_len;
321 in_size = crypto_param->length;
322 memcpy(dptr, crypto_param->data, in_size);
325 dlen = total_key_len + in_size;
330 if (rsa_op.padding.type == RTE_CRYPTO_RSA_PADDING_NONE) {
331 /* Use mod_exp operation for no_padding type */
332 vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_MODEX;
333 vq_cmd_w0.s.param2 = exp_len;
335 if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_ENCRYPT) {
336 vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_PKCS_ENC;
337 /* Public key encrypt, use BT2*/
338 vq_cmd_w0.s.param2 = CPT_BLOCK_TYPE2 |
339 ((uint16_t)(exp_len) << 1);
340 } else if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_VERIFY) {
341 vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_PKCS_DEC;
342 /* Public key decrypt, use BT1 */
343 vq_cmd_w0.s.param2 = CPT_BLOCK_TYPE1;
344 /* + 2 for decrypted len */
349 vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_MODEX;
352 vq_cmd_w0.s.param1 = mod_len;
353 vq_cmd_w0.s.dlen = dlen;
355 /* Filling cpt_request_info structure */
356 req->ist.ei0 = vq_cmd_w0.u64;
357 req->ist.ei1 = mphys;
358 req->ist.ei2 = mphys + dlen;
360 /* Result pointer to store result data */
363 /* alternate_caddr to write completion status of the microcode */
364 req->alternate_caddr = (uint64_t *)(dptr + rlen);
365 *req->alternate_caddr = ~((uint64_t)COMPLETION_CODE_INIT);
367 /* Preparing completion addr, +1 for completion code */
368 caddr.vaddr = dptr + rlen + 1;
369 caddr.dma_addr = mphys + dlen + rlen + 1;
371 cpt_fill_req_comp_addr(req, caddr);
374 static __rte_always_inline void
375 cpt_rsa_crt_prep(struct asym_op_params *rsa_params,
376 struct rte_crypto_rsa_xform *rsa,
377 rte_crypto_param *crypto_param)
379 struct cpt_request_info *req = rsa_params->req;
380 phys_addr_t mphys = rsa_params->meta_buf;
381 uint32_t qInv_len = rsa->qt.qInv.length;
382 struct rte_crypto_rsa_op_param rsa_op;
383 uint32_t dP_len = rsa->qt.dP.length;
384 uint32_t dQ_len = rsa->qt.dQ.length;
385 uint32_t p_len = rsa->qt.p.length;
386 uint32_t q_len = rsa->qt.q.length;
387 uint32_t mod_len = rsa->n.length;
388 struct rte_crypto_op **op;
389 vq_cmd_word0_t vq_cmd_w0;
390 uint64_t total_key_len;
396 /* Extracting rsa op form params->req->op[1]->asym->rsa */
397 op = RTE_PTR_ADD(req->op, sizeof(uintptr_t));
398 rsa_op = ((struct rte_crypto_op *)*op)->asym->rsa;
399 total_key_len = p_len + q_len + dP_len + dQ_len + qInv_len;
402 dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info));
403 memcpy(dptr, rsa->qt.q.data, total_key_len);
404 dptr += total_key_len;
406 in_size = crypto_param->length;
407 memcpy(dptr, crypto_param->data, in_size);
410 dlen = total_key_len + in_size;
415 if (rsa_op.padding.type == RTE_CRYPTO_RSA_PADDING_NONE) {
416 /*Use mod_exp operation for no_padding type */
417 vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_MODEX_CRT;
419 if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_SIGN) {
420 vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_PKCS_ENC_CRT;
421 /* Private encrypt, use BT1 */
422 vq_cmd_w0.s.param2 = CPT_BLOCK_TYPE1;
423 } else if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_DECRYPT) {
424 vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_PKCS_DEC_CRT;
425 /* Private decrypt, use BT2 */
426 vq_cmd_w0.s.param2 = CPT_BLOCK_TYPE2;
427 /* + 2 for decrypted len */
432 vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_MODEX;
435 vq_cmd_w0.s.param1 = mod_len;
436 vq_cmd_w0.s.dlen = dlen;
438 /* Filling cpt_request_info structure */
439 req->ist.ei0 = vq_cmd_w0.u64;
440 req->ist.ei1 = mphys;
441 req->ist.ei2 = mphys + dlen;
443 /* Result pointer to store result data */
446 /* alternate_caddr to write completion status of the microcode */
447 req->alternate_caddr = (uint64_t *)(dptr + rlen);
448 *req->alternate_caddr = ~((uint64_t)COMPLETION_CODE_INIT);
450 /* Preparing completion addr, +1 for completion code */
451 caddr.vaddr = dptr + rlen + 1;
452 caddr.dma_addr = mphys + dlen + rlen + 1;
454 cpt_fill_req_comp_addr(req, caddr);
457 static __rte_always_inline int __rte_hot
458 cpt_enqueue_rsa_op(struct rte_crypto_op *op,
459 struct asym_op_params *params,
460 struct cpt_asym_sess_misc *sess)
462 struct rte_crypto_rsa_op_param *rsa = &op->asym->rsa;
464 switch (rsa->op_type) {
465 case RTE_CRYPTO_ASYM_OP_VERIFY:
466 cpt_rsa_prep(params, &sess->rsa_ctx, &rsa->sign);
468 case RTE_CRYPTO_ASYM_OP_ENCRYPT:
469 cpt_rsa_prep(params, &sess->rsa_ctx, &rsa->message);
471 case RTE_CRYPTO_ASYM_OP_SIGN:
472 cpt_rsa_crt_prep(params, &sess->rsa_ctx, &rsa->message);
474 case RTE_CRYPTO_ASYM_OP_DECRYPT:
475 cpt_rsa_crt_prep(params, &sess->rsa_ctx, &rsa->cipher);
478 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
484 static const struct cpt_ec_group ec_grp[CPT_EC_ID_PMAX] = {
487 .data = {0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
488 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
489 0xFF, 0xFF, 0xFF, 0xFE, 0xFF, 0xFF,
490 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF},
493 .order = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
494 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
495 0x99, 0xDE, 0xF8, 0x36, 0x14, 0x6B,
496 0xC9, 0xB1, 0xB4, 0xD2, 0x28, 0x31},
498 .consta = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
499 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
500 0xFF, 0xFF, 0xFF, 0xFE, 0xFF, 0xFF,
501 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFC},
503 .constb = {.data = {0x64, 0x21, 0x05, 0x19, 0xE5, 0x9C,
504 0x80, 0xE7, 0x0F, 0xA7, 0xE9, 0xAB,
505 0x72, 0x24, 0x30, 0x49, 0xFE, 0xB8,
506 0xDE, 0xEC, 0xC1, 0x46, 0xB9, 0xB1},
510 .prime = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
511 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
512 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00,
513 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01},
515 .order = {.data = {0XFF, 0XFF, 0XFF, 0XFF, 0XFF, 0XFF, 0XFF,
516 0XFF, 0XFF, 0XFF, 0XFF, 0XFF, 0XFF, 0XFF,
517 0X16, 0XA2, 0XE0, 0XB8, 0XF0, 0X3E, 0X13,
518 0XDD, 0X29, 0X45, 0X5C, 0X5C, 0X2A, 0X3D},
520 .consta = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
521 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
522 0xFF, 0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
523 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE},
525 .constb = {.data = {0xB4, 0x05, 0x0A, 0x85, 0x0C, 0x04, 0xB3,
526 0xAB, 0xF5, 0x41, 0x32, 0x56, 0x50, 0x44,
527 0xB0, 0xB7, 0xD7, 0xBF, 0xD8, 0xBA, 0x27,
528 0x0B, 0x39, 0x43, 0x23, 0x55, 0xFF, 0xB4},
532 .prime = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00,
533 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
534 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF,
535 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
536 0xFF, 0xFF, 0xFF, 0xFF},
538 .order = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00,
539 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
540 0xFF, 0xFF, 0xBC, 0xE6, 0xFA, 0xAD, 0xA7,
541 0x17, 0x9E, 0x84, 0xF3, 0xB9, 0xCA, 0xC2,
542 0xFC, 0x63, 0x25, 0x51},
544 .consta = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00,
545 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
546 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF,
547 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
548 0xFF, 0xFF, 0xFF, 0xFC},
550 .constb = {.data = {0x5A, 0xC6, 0x35, 0xD8, 0xAA, 0x3A, 0x93,
551 0xE7, 0xB3, 0xEB, 0xBD, 0x55, 0x76, 0x98,
552 0x86, 0xBC, 0x65, 0x1D, 0x06, 0xB0, 0xCC,
553 0x53, 0xB0, 0xF6, 0x3B, 0xCE, 0x3C, 0x3E,
554 0x27, 0xD2, 0x60, 0x4B},
558 .prime = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
559 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
560 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
561 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
562 0xFF, 0xFF, 0xFF, 0xFE, 0xFF, 0xFF, 0xFF,
563 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
564 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF},
566 .order = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
567 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
568 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
569 0xFF, 0xFF, 0xFF, 0xC7, 0x63, 0x4D, 0x81,
570 0xF4, 0x37, 0x2D, 0xDF, 0x58, 0x1A, 0x0D,
571 0xB2, 0x48, 0xB0, 0xA7, 0x7A, 0xEC, 0xEC,
572 0x19, 0x6A, 0xCC, 0xC5, 0x29, 0x73},
574 .consta = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
575 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
576 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
577 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
578 0xFF, 0xFF, 0xFF, 0xFE, 0xFF, 0xFF, 0xFF,
579 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
580 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFC},
582 .constb = {.data = {0xB3, 0x31, 0x2F, 0xA7, 0xE2, 0x3E, 0xE7,
583 0xE4, 0x98, 0x8E, 0x05, 0x6B, 0xE3, 0xF8,
584 0x2D, 0x19, 0x18, 0x1D, 0x9C, 0x6E, 0xFE,
585 0x81, 0x41, 0x12, 0x03, 0x14, 0x08, 0x8F,
586 0x50, 0x13, 0x87, 0x5A, 0xC6, 0x56, 0x39,
587 0x8D, 0x8A, 0x2E, 0xD1, 0x9D, 0x2A, 0x85,
588 0xC8, 0xED, 0xD3, 0xEC, 0x2A, 0xEF},
591 {.prime = {.data = {0x01, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
592 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
593 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
594 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
595 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
596 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
597 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
598 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
601 .order = {.data = {0x01, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
602 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
603 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
604 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
605 0xFF, 0xFA, 0x51, 0x86, 0x87, 0x83, 0xBF, 0x2F,
606 0x96, 0x6B, 0x7F, 0xCC, 0x01, 0x48, 0xF7, 0x09,
607 0xA5, 0xD0, 0x3B, 0xB5, 0xC9, 0xB8, 0x89, 0x9C,
608 0x47, 0xAE, 0xBB, 0x6F, 0xB7, 0x1E, 0x91, 0x38,
611 .consta = {.data = {0x01, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
612 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
613 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
614 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
615 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
616 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
617 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
618 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
621 .constb = {.data = {0x00, 0x51, 0x95, 0x3E, 0xB9, 0x61, 0x8E, 0x1C,
622 0x9A, 0x1F, 0x92, 0x9A, 0x21, 0xA0, 0xB6, 0x85,
623 0x40, 0xEE, 0xA2, 0xDA, 0x72, 0x5B, 0x99, 0xB3,
624 0x15, 0xF3, 0xB8, 0xB4, 0x89, 0x91, 0x8E, 0xF1,
625 0x09, 0xE1, 0x56, 0x19, 0x39, 0x51, 0xEC, 0x7E,
626 0x93, 0x7B, 0x16, 0x52, 0xC0, 0xBD, 0x3B, 0xB1,
627 0xBF, 0x07, 0x35, 0x73, 0xDF, 0x88, 0x3D, 0x2C,
628 0x34, 0xF1, 0xEF, 0x45, 0x1F, 0xD4, 0x6B, 0x50,
632 static __rte_always_inline void
633 cpt_ecdsa_sign_prep(struct rte_crypto_ecdsa_op_param *ecdsa,
634 struct asym_op_params *ecdsa_params,
635 uint64_t fpm_table_iova,
638 struct cpt_request_info *req = ecdsa_params->req;
639 uint16_t message_len = ecdsa->message.length;
640 phys_addr_t mphys = ecdsa_params->meta_buf;
641 uint16_t pkey_len = ecdsa->pkey.length;
642 uint16_t p_align, k_align, m_align;
643 uint16_t k_len = ecdsa->k.length;
644 uint16_t order_len, prime_len;
645 uint16_t o_offset, pk_offset;
646 vq_cmd_word0_t vq_cmd_w0;
651 prime_len = ec_grp[curveid].prime.length;
652 order_len = ec_grp[curveid].order.length;
654 /* Truncate input length to curve prime length */
655 if (message_len > prime_len)
656 message_len = prime_len;
657 m_align = RTE_ALIGN_CEIL(message_len, 8);
659 p_align = RTE_ALIGN_CEIL(prime_len, 8);
660 k_align = RTE_ALIGN_CEIL(k_len, 8);
662 /* Set write offset for order and private key */
663 o_offset = prime_len - order_len;
664 pk_offset = prime_len - pkey_len;
667 dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info));
670 * Set dlen = sum(sizeof(fpm address), ROUNDUP8(scalar len, input len),
671 * ROUNDUP8(priv key len, prime len, order len)).
672 * Please note, private key, order cannot exceed prime
673 * length i.e 3 * p_align.
675 dlen = sizeof(fpm_table_iova) + k_align + m_align + p_align * 5;
677 memset(dptr, 0, dlen);
679 *(uint64_t *)dptr = fpm_table_iova;
680 dptr += sizeof(fpm_table_iova);
682 memcpy(dptr, ecdsa->k.data, k_len);
685 memcpy(dptr, ec_grp[curveid].prime.data, prime_len);
688 memcpy(dptr + o_offset, ec_grp[curveid].order.data, order_len);
691 memcpy(dptr + pk_offset, ecdsa->pkey.data, pkey_len);
694 memcpy(dptr, ecdsa->message.data, message_len);
697 memcpy(dptr, ec_grp[curveid].consta.data, prime_len);
700 memcpy(dptr, ec_grp[curveid].constb.data, prime_len);
703 /* 2 * prime length (for sign r and s ) */
707 vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_ECDSA;
708 vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_ECDSA_SIGN;
711 vq_cmd_w0.s.param1 = curveid | (message_len << 8);
712 vq_cmd_w0.s.param2 = (pkey_len << 8) | k_len;
713 vq_cmd_w0.s.dlen = dlen;
715 /* Filling cpt_request_info structure */
716 req->ist.ei0 = vq_cmd_w0.u64;
717 req->ist.ei1 = mphys;
718 req->ist.ei2 = mphys + dlen;
720 /* Result pointer to store result data */
723 /* alternate_caddr to write completion status of the microcode */
724 req->alternate_caddr = (uint64_t *)(dptr + rlen);
725 *req->alternate_caddr = ~((uint64_t)COMPLETION_CODE_INIT);
727 /* Preparing completion addr, +1 for completion code */
728 caddr.vaddr = dptr + rlen + 1;
729 caddr.dma_addr = mphys + dlen + rlen + 1;
731 cpt_fill_req_comp_addr(req, caddr);
734 static __rte_always_inline void
735 cpt_ecdsa_verify_prep(struct rte_crypto_ecdsa_op_param *ecdsa,
736 struct asym_op_params *ecdsa_params,
737 uint64_t fpm_table_iova,
740 struct cpt_request_info *req = ecdsa_params->req;
741 uint32_t message_len = ecdsa->message.length;
742 phys_addr_t mphys = ecdsa_params->meta_buf;
743 uint16_t o_offset, r_offset, s_offset;
744 uint16_t qx_len = ecdsa->q.x.length;
745 uint16_t qy_len = ecdsa->q.y.length;
746 uint16_t r_len = ecdsa->r.length;
747 uint16_t s_len = ecdsa->s.length;
748 uint16_t order_len, prime_len;
749 uint16_t qx_offset, qy_offset;
750 uint16_t p_align, m_align;
751 vq_cmd_word0_t vq_cmd_w0;
756 prime_len = ec_grp[curveid].prime.length;
757 order_len = ec_grp[curveid].order.length;
759 /* Truncate input length to curve prime length */
760 if (message_len > prime_len)
761 message_len = prime_len;
763 m_align = RTE_ALIGN_CEIL(message_len, 8);
764 p_align = RTE_ALIGN_CEIL(prime_len, 8);
766 /* Set write offset for sign, order and public key coordinates */
767 o_offset = prime_len - order_len;
768 qx_offset = prime_len - qx_len;
769 qy_offset = prime_len - qy_len;
770 r_offset = prime_len - r_len;
771 s_offset = prime_len - s_len;
774 dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info));
777 * Set dlen = sum(sizeof(fpm address), ROUNDUP8(message len),
778 * ROUNDUP8(sign len(r and s), public key len(x and y coordinates),
779 * prime len, order len)).
780 * Please note sign, public key and order can not exceed prime length
783 dlen = sizeof(fpm_table_iova) + m_align + (8 * p_align);
785 memset(dptr, 0, dlen);
787 *(uint64_t *)dptr = fpm_table_iova;
788 dptr += sizeof(fpm_table_iova);
790 memcpy(dptr + r_offset, ecdsa->r.data, r_len);
793 memcpy(dptr + s_offset, ecdsa->s.data, s_len);
796 memcpy(dptr, ecdsa->message.data, message_len);
799 memcpy(dptr + o_offset, ec_grp[curveid].order.data, order_len);
802 memcpy(dptr, ec_grp[curveid].prime.data, prime_len);
805 memcpy(dptr + qx_offset, ecdsa->q.x.data, qx_len);
808 memcpy(dptr + qy_offset, ecdsa->q.y.data, qy_len);
811 memcpy(dptr, ec_grp[curveid].consta.data, prime_len);
814 memcpy(dptr, ec_grp[curveid].constb.data, prime_len);
818 vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_ECDSA;
819 vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_ECDSA_VERIFY;
822 vq_cmd_w0.s.param1 = curveid | (message_len << 8);
823 vq_cmd_w0.s.param2 = 0;
824 vq_cmd_w0.s.dlen = dlen;
826 /* Filling cpt_request_info structure */
827 req->ist.ei0 = vq_cmd_w0.u64;
828 req->ist.ei1 = mphys;
829 req->ist.ei2 = mphys + dlen;
831 /* Result pointer to store result data */
834 /* alternate_caddr to write completion status of the microcode */
835 req->alternate_caddr = (uint64_t *)dptr;
836 *req->alternate_caddr = ~((uint64_t)COMPLETION_CODE_INIT);
838 /* Preparing completion addr, +1 for completion code */
839 caddr.vaddr = dptr + 1;
840 caddr.dma_addr = mphys + dlen + 1;
842 cpt_fill_req_comp_addr(req, caddr);
845 static __rte_always_inline int __rte_hot
846 cpt_enqueue_ecdsa_op(struct rte_crypto_op *op,
847 struct asym_op_params *params,
848 struct cpt_asym_sess_misc *sess,
851 struct rte_crypto_ecdsa_op_param *ecdsa = &op->asym->ecdsa;
852 uint8_t curveid = sess->ec_ctx.curveid;
854 if (ecdsa->op_type == RTE_CRYPTO_ASYM_OP_SIGN)
855 cpt_ecdsa_sign_prep(ecdsa, params, fpm_iova[curveid], curveid);
856 else if (ecdsa->op_type == RTE_CRYPTO_ASYM_OP_VERIFY)
857 cpt_ecdsa_verify_prep(ecdsa, params, fpm_iova[curveid],
860 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
866 static __rte_always_inline int
867 cpt_ecpm_prep(struct rte_crypto_ecpm_op_param *ecpm,
868 struct asym_op_params *asym_params,
871 struct cpt_request_info *req = asym_params->req;
872 phys_addr_t mphys = asym_params->meta_buf;
873 uint16_t x1_len = ecpm->p.x.length;
874 uint16_t y1_len = ecpm->p.y.length;
875 uint16_t scalar_align, p_align;
876 uint16_t dlen, rlen, prime_len;
877 uint16_t x1_offset, y1_offset;
878 vq_cmd_word0_t vq_cmd_w0;
882 prime_len = ec_grp[curveid].prime.length;
885 dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info));
887 p_align = RTE_ALIGN_CEIL(prime_len, 8);
888 scalar_align = RTE_ALIGN_CEIL(ecpm->scalar.length, 8);
891 * Set dlen = sum(ROUNDUP8(input point(x and y coordinates), prime,
893 * Please note point length is equivalent to prime of the curve
895 dlen = 5 * p_align + scalar_align;
897 x1_offset = prime_len - x1_len;
898 y1_offset = prime_len - y1_len;
900 memset(dptr, 0, dlen);
902 /* Copy input point, scalar, prime */
903 memcpy(dptr + x1_offset, ecpm->p.x.data, x1_len);
905 memcpy(dptr + y1_offset, ecpm->p.y.data, y1_len);
907 memcpy(dptr, ecpm->scalar.data, ecpm->scalar.length);
908 dptr += scalar_align;
909 memcpy(dptr, ec_grp[curveid].prime.data, ec_grp[curveid].prime.length);
912 memcpy(dptr, ec_grp[curveid].consta.data,
913 ec_grp[curveid].consta.length);
916 memcpy(dptr, ec_grp[curveid].constb.data,
917 ec_grp[curveid].constb.length);
921 vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_ECC;
922 vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_ECC_UMP;
925 vq_cmd_w0.s.param1 = curveid;
926 vq_cmd_w0.s.param2 = ecpm->scalar.length;
927 vq_cmd_w0.s.dlen = dlen;
929 /* Filling cpt_request_info structure */
930 req->ist.ei0 = vq_cmd_w0.u64;
931 req->ist.ei1 = mphys;
932 req->ist.ei2 = mphys + dlen;
934 /* Result buffer will store output point where length of
935 * each coordinate will be of prime length, thus set
936 * rlen to twice of prime length.
941 /* alternate_caddr to write completion status by the microcode */
942 req->alternate_caddr = (uint64_t *)(dptr + rlen);
943 *req->alternate_caddr = ~((uint64_t)COMPLETION_CODE_INIT);
945 /* Preparing completion addr, +1 for completion code */
946 caddr.vaddr = dptr + rlen + 1;
947 caddr.dma_addr = mphys + dlen + rlen + 1;
949 cpt_fill_req_comp_addr(req, caddr);
952 #endif /* _CPT_UCODE_ASYM_H_ */