break;
}
}
+
+static __rte_always_inline int
+cnxk_ae_modex_prep(struct rte_crypto_op *op, struct roc_ae_buf_ptr *meta_buf,
+ struct rte_crypto_modex_xform *mod, struct cpt_inst_s *inst)
+{
+ uint32_t exp_len = mod->exponent.length;
+ uint32_t mod_len = mod->modulus.length;
+ struct rte_crypto_mod_op_param mod_op;
+ uint64_t total_key_len;
+ union cpt_inst_w4 w4;
+ uint32_t base_len;
+ uint32_t dlen;
+ uint8_t *dptr;
+
+ mod_op = op->asym->modex;
+
+ base_len = mod_op.base.length;
+ if (unlikely(base_len > mod_len)) {
+ op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
+ return -ENOTSUP;
+ }
+
+ total_key_len = mod_len + exp_len;
+
+ /* Input buffer */
+ dptr = meta_buf->vaddr;
+ inst->dptr = (uintptr_t)dptr;
+ memcpy(dptr, mod->modulus.data, total_key_len);
+ dptr += total_key_len;
+ memcpy(dptr, mod_op.base.data, base_len);
+ dptr += base_len;
+ dlen = total_key_len + base_len;
+
+ /* Setup opcodes */
+ w4.s.opcode_major = ROC_AE_MAJOR_OP_MODEX;
+ w4.s.opcode_minor = ROC_AE_MINOR_OP_MODEX;
+
+ w4.s.param1 = mod_len;
+ w4.s.param2 = exp_len;
+ w4.s.dlen = dlen;
+
+ inst->w4.u64 = w4.u64;
+ inst->rptr = (uintptr_t)dptr;
+
+ return 0;
+}
+
+static __rte_always_inline void
+cnxk_ae_rsa_prep(struct rte_crypto_op *op, struct roc_ae_buf_ptr *meta_buf,
+ struct rte_crypto_rsa_xform *rsa,
+ rte_crypto_param *crypto_param, struct cpt_inst_s *inst)
+{
+ struct rte_crypto_rsa_op_param rsa_op;
+ uint32_t mod_len = rsa->n.length;
+ uint32_t exp_len = rsa->e.length;
+ uint64_t total_key_len;
+ union cpt_inst_w4 w4;
+ uint32_t in_size;
+ uint32_t dlen;
+ uint8_t *dptr;
+
+ rsa_op = op->asym->rsa;
+ total_key_len = mod_len + exp_len;
+
+ /* Input buffer */
+ dptr = meta_buf->vaddr;
+ inst->dptr = (uintptr_t)dptr;
+ memcpy(dptr, rsa->n.data, total_key_len);
+ dptr += total_key_len;
+
+ in_size = crypto_param->length;
+ memcpy(dptr, crypto_param->data, in_size);
+
+ dptr += in_size;
+ dlen = total_key_len + in_size;
+
+ if (rsa_op.pad == RTE_CRYPTO_RSA_PADDING_NONE) {
+ /* Use mod_exp operation for no_padding type */
+ w4.s.opcode_minor = ROC_AE_MINOR_OP_MODEX;
+ w4.s.param2 = exp_len;
+ } else {
+ if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_ENCRYPT) {
+ w4.s.opcode_minor = ROC_AE_MINOR_OP_PKCS_ENC;
+ /* Public key encrypt, use BT2*/
+ w4.s.param2 = ROC_AE_CPT_BLOCK_TYPE2 |
+ ((uint16_t)(exp_len) << 1);
+ } else if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_VERIFY) {
+ w4.s.opcode_minor = ROC_AE_MINOR_OP_PKCS_DEC;
+ /* Public key decrypt, use BT1 */
+ w4.s.param2 = ROC_AE_CPT_BLOCK_TYPE1;
+ }
+ }
+
+ w4.s.opcode_major = ROC_AE_MAJOR_OP_MODEX;
+
+ w4.s.param1 = mod_len;
+ w4.s.dlen = dlen;
+
+ inst->w4.u64 = w4.u64;
+ inst->rptr = (uintptr_t)dptr;
+}
+
+static __rte_always_inline void
+cnxk_ae_rsa_crt_prep(struct rte_crypto_op *op, struct roc_ae_buf_ptr *meta_buf,
+ struct rte_crypto_rsa_xform *rsa,
+ rte_crypto_param *crypto_param, struct cpt_inst_s *inst)
+{
+ uint32_t qInv_len = rsa->qt.qInv.length;
+ struct rte_crypto_rsa_op_param rsa_op;
+ uint32_t dP_len = rsa->qt.dP.length;
+ uint32_t dQ_len = rsa->qt.dQ.length;
+ uint32_t p_len = rsa->qt.p.length;
+ uint32_t q_len = rsa->qt.q.length;
+ uint32_t mod_len = rsa->n.length;
+ uint64_t total_key_len;
+ union cpt_inst_w4 w4;
+ uint32_t in_size;
+ uint32_t dlen;
+ uint8_t *dptr;
+
+ rsa_op = op->asym->rsa;
+ total_key_len = p_len + q_len + dP_len + dQ_len + qInv_len;
+
+ /* Input buffer */
+ dptr = meta_buf->vaddr;
+ inst->dptr = (uintptr_t)dptr;
+ memcpy(dptr, rsa->qt.q.data, total_key_len);
+ dptr += total_key_len;
+
+ in_size = crypto_param->length;
+ memcpy(dptr, crypto_param->data, in_size);
+
+ dptr += in_size;
+ dlen = total_key_len + in_size;
+
+ if (rsa_op.pad == RTE_CRYPTO_RSA_PADDING_NONE) {
+ /*Use mod_exp operation for no_padding type */
+ w4.s.opcode_minor = ROC_AE_MINOR_OP_MODEX_CRT;
+ } else {
+ if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_SIGN) {
+ w4.s.opcode_minor = ROC_AE_MINOR_OP_PKCS_ENC_CRT;
+ /* Private encrypt, use BT1 */
+ w4.s.param2 = ROC_AE_CPT_BLOCK_TYPE1;
+ } else if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_DECRYPT) {
+ w4.s.opcode_minor = ROC_AE_MINOR_OP_PKCS_DEC_CRT;
+ /* Private decrypt, use BT2 */
+ w4.s.param2 = ROC_AE_CPT_BLOCK_TYPE2;
+ }
+ }
+
+ w4.s.opcode_major = ROC_AE_MAJOR_OP_MODEX;
+
+ w4.s.param1 = mod_len;
+ w4.s.dlen = dlen;
+
+ inst->w4.u64 = w4.u64;
+ inst->rptr = (uintptr_t)dptr;
+}
+
+static __rte_always_inline int __rte_hot
+cnxk_ae_enqueue_rsa_op(struct rte_crypto_op *op,
+ struct roc_ae_buf_ptr *meta_buf,
+ struct cnxk_ae_sess *sess, struct cpt_inst_s *inst)
+{
+ struct rte_crypto_rsa_op_param *rsa = &op->asym->rsa;
+
+ switch (rsa->op_type) {
+ case RTE_CRYPTO_ASYM_OP_VERIFY:
+ cnxk_ae_rsa_prep(op, meta_buf, &sess->rsa_ctx, &rsa->sign,
+ inst);
+ break;
+ case RTE_CRYPTO_ASYM_OP_ENCRYPT:
+ cnxk_ae_rsa_prep(op, meta_buf, &sess->rsa_ctx, &rsa->message,
+ inst);
+ break;
+ case RTE_CRYPTO_ASYM_OP_SIGN:
+ cnxk_ae_rsa_crt_prep(op, meta_buf, &sess->rsa_ctx,
+ &rsa->message, inst);
+ break;
+ case RTE_CRYPTO_ASYM_OP_DECRYPT:
+ cnxk_ae_rsa_crt_prep(op, meta_buf, &sess->rsa_ctx, &rsa->cipher,
+ inst);
+ break;
+ default:
+ op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
+ return -EINVAL;
+ }
+ return 0;
+}
+
+static __rte_always_inline void
+cnxk_ae_ecdsa_sign_prep(struct rte_crypto_ecdsa_op_param *ecdsa,
+ struct roc_ae_buf_ptr *meta_buf,
+ uint64_t fpm_table_iova, struct roc_ae_ec_group *ec_grp,
+ uint8_t curveid, struct cpt_inst_s *inst)
+{
+ uint16_t message_len = ecdsa->message.length;
+ uint16_t pkey_len = ecdsa->pkey.length;
+ uint16_t p_align, k_align, m_align;
+ uint16_t k_len = ecdsa->k.length;
+ uint16_t order_len, prime_len;
+ uint16_t o_offset, pk_offset;
+ union cpt_inst_w4 w4;
+ uint16_t dlen;
+ uint8_t *dptr;
+
+ prime_len = ec_grp->prime.length;
+ order_len = ec_grp->order.length;
+
+ /* Truncate input length to curve prime length */
+ if (message_len > prime_len)
+ message_len = prime_len;
+ m_align = RTE_ALIGN_CEIL(message_len, 8);
+
+ p_align = RTE_ALIGN_CEIL(prime_len, 8);
+ k_align = RTE_ALIGN_CEIL(k_len, 8);
+
+ /* Set write offset for order and private key */
+ o_offset = prime_len - order_len;
+ pk_offset = prime_len - pkey_len;
+
+ /* Input buffer */
+ dptr = meta_buf->vaddr;
+ inst->dptr = (uintptr_t)dptr;
+
+ /*
+ * Set dlen = sum(sizeof(fpm address), ROUNDUP8(scalar len, input len),
+ * ROUNDUP8(priv key len, prime len, order len)).
+ * Please note, private key, order cannot exceed prime
+ * length i.e 3 * p_align.
+ */
+ dlen = sizeof(fpm_table_iova) + k_align + m_align + p_align * 3;
+
+ memset(dptr, 0, dlen);
+
+ *(uint64_t *)dptr = fpm_table_iova;
+ dptr += sizeof(fpm_table_iova);
+
+ memcpy(dptr, ecdsa->k.data, k_len);
+ dptr += k_align;
+
+ memcpy(dptr, ec_grp->prime.data, prime_len);
+ dptr += p_align;
+
+ memcpy(dptr + o_offset, ec_grp->order.data, order_len);
+ dptr += p_align;
+
+ memcpy(dptr + pk_offset, ecdsa->pkey.data, pkey_len);
+ dptr += p_align;
+
+ memcpy(dptr, ecdsa->message.data, message_len);
+ dptr += m_align;
+
+ /* Setup opcodes */
+ w4.s.opcode_major = ROC_AE_MAJOR_OP_ECDSA;
+ w4.s.opcode_minor = ROC_AE_MINOR_OP_ECDSA_SIGN;
+
+ w4.s.param1 = curveid | (message_len << 8);
+ w4.s.param2 = k_len;
+ w4.s.dlen = dlen;
+
+ inst->w4.u64 = w4.u64;
+ inst->rptr = (uintptr_t)dptr;
+}
+
+static __rte_always_inline void
+cnxk_ae_ecdsa_verify_prep(struct rte_crypto_ecdsa_op_param *ecdsa,
+ struct roc_ae_buf_ptr *meta_buf,
+ uint64_t fpm_table_iova,
+ struct roc_ae_ec_group *ec_grp, uint8_t curveid,
+ struct cpt_inst_s *inst)
+{
+ uint32_t message_len = ecdsa->message.length;
+ uint16_t o_offset, r_offset, s_offset;
+ uint16_t qx_len = ecdsa->q.x.length;
+ uint16_t qy_len = ecdsa->q.y.length;
+ uint16_t r_len = ecdsa->r.length;
+ uint16_t s_len = ecdsa->s.length;
+ uint16_t order_len, prime_len;
+ uint16_t qx_offset, qy_offset;
+ uint16_t p_align, m_align;
+ union cpt_inst_w4 w4;
+ uint16_t dlen;
+ uint8_t *dptr;
+
+ prime_len = ec_grp->prime.length;
+ order_len = ec_grp->order.length;
+
+ /* Truncate input length to curve prime length */
+ if (message_len > prime_len)
+ message_len = prime_len;
+
+ m_align = RTE_ALIGN_CEIL(message_len, 8);
+ p_align = RTE_ALIGN_CEIL(prime_len, 8);
+
+ /* Set write offset for sign, order and public key coordinates */
+ o_offset = prime_len - order_len;
+ qx_offset = prime_len - qx_len;
+ qy_offset = prime_len - qy_len;
+ r_offset = prime_len - r_len;
+ s_offset = prime_len - s_len;
+
+ /* Input buffer */
+ dptr = meta_buf->vaddr;
+ inst->dptr = (uintptr_t)dptr;
+
+ /*
+ * Set dlen = sum(sizeof(fpm address), ROUNDUP8(message len),
+ * ROUNDUP8(sign len(r and s), public key len(x and y coordinates),
+ * prime len, order len)).
+ * Please note sign, public key and order can not exceed prime length
+ * i.e. 6 * p_align
+ */
+ dlen = sizeof(fpm_table_iova) + m_align + (6 * p_align);
+
+ memset(dptr, 0, dlen);
+
+ *(uint64_t *)dptr = fpm_table_iova;
+ dptr += sizeof(fpm_table_iova);
+
+ memcpy(dptr + r_offset, ecdsa->r.data, r_len);
+ dptr += p_align;
+
+ memcpy(dptr + s_offset, ecdsa->s.data, s_len);
+ dptr += p_align;
+
+ memcpy(dptr, ecdsa->message.data, message_len);
+ dptr += m_align;
+
+ memcpy(dptr + o_offset, ec_grp->order.data, order_len);
+ dptr += p_align;
+
+ memcpy(dptr, ec_grp->prime.data, prime_len);
+ dptr += p_align;
+
+ memcpy(dptr + qx_offset, ecdsa->q.x.data, qx_len);
+ dptr += p_align;
+
+ memcpy(dptr + qy_offset, ecdsa->q.y.data, qy_len);
+ dptr += p_align;
+
+ /* Setup opcodes */
+ w4.s.opcode_major = ROC_AE_MAJOR_OP_ECDSA;
+ w4.s.opcode_minor = ROC_AE_MINOR_OP_ECDSA_VERIFY;
+
+ w4.s.param1 = curveid | (message_len << 8);
+ w4.s.param2 = 0;
+ w4.s.dlen = dlen;
+
+ inst->w4.u64 = w4.u64;
+ inst->rptr = (uintptr_t)dptr;
+}
+
+static __rte_always_inline int __rte_hot
+cnxk_ae_enqueue_ecdsa_op(struct rte_crypto_op *op,
+ struct roc_ae_buf_ptr *meta_buf,
+ struct cnxk_ae_sess *sess, uint64_t *fpm_iova,
+ struct roc_ae_ec_group **ec_grp,
+ struct cpt_inst_s *inst)
+{
+ struct rte_crypto_ecdsa_op_param *ecdsa = &op->asym->ecdsa;
+ uint8_t curveid = sess->ec_ctx.curveid;
+
+ if (ecdsa->op_type == RTE_CRYPTO_ASYM_OP_SIGN)
+ cnxk_ae_ecdsa_sign_prep(ecdsa, meta_buf, fpm_iova[curveid],
+ ec_grp[curveid], curveid, inst);
+ else if (ecdsa->op_type == RTE_CRYPTO_ASYM_OP_VERIFY)
+ cnxk_ae_ecdsa_verify_prep(ecdsa, meta_buf, fpm_iova[curveid],
+ ec_grp[curveid], curveid, inst);
+ else {
+ op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
+ return -EINVAL;
+ }
+ return 0;
+}
+
+static __rte_always_inline int
+cnxk_ae_ecpm_prep(struct rte_crypto_ecpm_op_param *ecpm,
+ struct roc_ae_buf_ptr *meta_buf,
+ struct roc_ae_ec_group *ec_grp, uint8_t curveid,
+ struct cpt_inst_s *inst)
+{
+ uint16_t x1_len = ecpm->p.x.length;
+ uint16_t y1_len = ecpm->p.y.length;
+ uint16_t scalar_align, p_align;
+ uint16_t x1_offset, y1_offset;
+ uint16_t dlen, prime_len;
+ union cpt_inst_w4 w4;
+ uint8_t *dptr;
+
+ prime_len = ec_grp->prime.length;
+
+ /* Input buffer */
+ dptr = meta_buf->vaddr;
+ inst->dptr = (uintptr_t)dptr;
+
+ p_align = RTE_ALIGN_CEIL(prime_len, 8);
+ scalar_align = RTE_ALIGN_CEIL(ecpm->scalar.length, 8);
+
+ /*
+ * Set dlen = sum(ROUNDUP8(input point(x and y coordinates), prime,
+ * scalar length),
+ * Please note point length is equivalent to prime of the curve
+ */
+ dlen = 3 * p_align + scalar_align;
+
+ x1_offset = prime_len - x1_len;
+ y1_offset = prime_len - y1_len;
+
+ memset(dptr, 0, dlen);
+
+ /* Copy input point, scalar, prime */
+ memcpy(dptr + x1_offset, ecpm->p.x.data, x1_len);
+ dptr += p_align;
+ memcpy(dptr + y1_offset, ecpm->p.y.data, y1_len);
+ dptr += p_align;
+ memcpy(dptr, ecpm->scalar.data, ecpm->scalar.length);
+ dptr += scalar_align;
+ memcpy(dptr, ec_grp->prime.data, ec_grp->prime.length);
+ dptr += p_align;
+
+ /* Setup opcodes */
+ w4.s.opcode_major = ROC_AE_MAJOR_OP_ECC;
+ w4.s.opcode_minor = ROC_AE_MINOR_OP_ECC_UMP;
+
+ w4.s.param1 = curveid;
+ w4.s.param2 = ecpm->scalar.length;
+ w4.s.dlen = dlen;
+
+ inst->w4.u64 = w4.u64;
+ inst->rptr = (uintptr_t)dptr;
+
+ return 0;
+}
+
+static __rte_always_inline void
+cnxk_ae_dequeue_rsa_op(struct rte_crypto_op *cop, uint8_t *rptr,
+ struct rte_crypto_rsa_xform *rsa_ctx)
+{
+ struct rte_crypto_rsa_op_param *rsa = &cop->asym->rsa;
+
+ switch (rsa->op_type) {
+ case RTE_CRYPTO_ASYM_OP_ENCRYPT:
+ rsa->cipher.length = rsa_ctx->n.length;
+ memcpy(rsa->cipher.data, rptr, rsa->cipher.length);
+ break;
+ case RTE_CRYPTO_ASYM_OP_DECRYPT:
+ if (rsa->pad == RTE_CRYPTO_RSA_PADDING_NONE) {
+ rsa->message.length = rsa_ctx->n.length;
+ memcpy(rsa->message.data, rptr, rsa->message.length);
+ } else {
+ /* Get length of decrypted output */
+ rsa->message.length =
+ rte_cpu_to_be_16(*((uint16_t *)rptr));
+ /*
+ * Offset output data pointer by length field
+ * (2 bytes) and copy decrypted data.
+ */
+ memcpy(rsa->message.data, rptr + 2,
+ rsa->message.length);
+ }
+ break;
+ case RTE_CRYPTO_ASYM_OP_SIGN:
+ rsa->sign.length = rsa_ctx->n.length;
+ memcpy(rsa->sign.data, rptr, rsa->sign.length);
+ break;
+ case RTE_CRYPTO_ASYM_OP_VERIFY:
+ if (rsa->pad == RTE_CRYPTO_RSA_PADDING_NONE) {
+ rsa->sign.length = rsa_ctx->n.length;
+ memcpy(rsa->sign.data, rptr, rsa->sign.length);
+ } else {
+ /* Get length of signed output */
+ rsa->sign.length =
+ rte_cpu_to_be_16(*((uint16_t *)rptr));
+ /*
+ * Offset output data pointer by length field
+ * (2 bytes) and copy signed data.
+ */
+ memcpy(rsa->sign.data, rptr + 2, rsa->sign.length);
+ }
+ if (memcmp(rsa->sign.data, rsa->message.data,
+ rsa->message.length)) {
+ cop->status = RTE_CRYPTO_OP_STATUS_ERROR;
+ }
+ break;
+ default:
+ cop->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
+ break;
+ }
+}
+
+static __rte_always_inline void
+cnxk_ae_dequeue_ecdsa_op(struct rte_crypto_ecdsa_op_param *ecdsa, uint8_t *rptr,
+ struct roc_ae_ec_ctx *ec,
+ struct roc_ae_ec_group **ec_grp)
+{
+ int prime_len = ec_grp[ec->curveid]->prime.length;
+
+ if (ecdsa->op_type == RTE_CRYPTO_ASYM_OP_VERIFY)
+ return;
+
+ /* Separate out sign r and s components */
+ memcpy(ecdsa->r.data, rptr, prime_len);
+ memcpy(ecdsa->s.data, rptr + RTE_ALIGN_CEIL(prime_len, 8), prime_len);
+ ecdsa->r.length = prime_len;
+ ecdsa->s.length = prime_len;
+}
+
+static __rte_always_inline void
+cnxk_ae_dequeue_ecpm_op(struct rte_crypto_ecpm_op_param *ecpm, uint8_t *rptr,
+ struct roc_ae_ec_ctx *ec,
+ struct roc_ae_ec_group **ec_grp)
+{
+ int prime_len = ec_grp[ec->curveid]->prime.length;
+
+ memcpy(ecpm->r.x.data, rptr, prime_len);
+ memcpy(ecpm->r.y.data, rptr + RTE_ALIGN_CEIL(prime_len, 8), prime_len);
+ ecpm->r.x.length = prime_len;
+ ecpm->r.y.length = prime_len;
+}
+
+static __rte_always_inline void *
+cnxk_ae_alloc_meta(struct roc_ae_buf_ptr *buf,
+ struct rte_mempool *cpt_meta_pool,
+ struct cpt_inflight_req *infl_req)
+{
+ uint8_t *mdata;
+
+ if (unlikely(rte_mempool_get(cpt_meta_pool, (void **)&mdata) < 0))
+ return NULL;
+
+ buf->vaddr = mdata;
+
+ infl_req->mdata = mdata;
+ infl_req->op_flags |= CPT_OP_FLAGS_METABUF;
+
+ return mdata;
+}
+
+static __rte_always_inline int32_t __rte_hot
+cnxk_ae_enqueue(struct cnxk_cpt_qp *qp, struct rte_crypto_op *op,
+ struct cpt_inflight_req *infl_req, struct cpt_inst_s *inst,
+ struct cnxk_ae_sess *sess)
+{
+ struct cpt_qp_meta_info *minfo = &qp->meta_info;
+ struct rte_crypto_asym_op *asym_op = op->asym;
+ struct roc_ae_buf_ptr meta_buf;
+ uint64_t *mop;
+ void *mdata;
+ int ret;
+
+ mdata = cnxk_ae_alloc_meta(&meta_buf, minfo->pool, infl_req);
+ if (mdata == NULL)
+ return -ENOMEM;
+
+ /* Reserve 8B for RPTR */
+ meta_buf.vaddr = PLT_PTR_ADD(mdata, sizeof(uint64_t));
+
+ switch (sess->xfrm_type) {
+ case RTE_CRYPTO_ASYM_XFORM_MODEX:
+ ret = cnxk_ae_modex_prep(op, &meta_buf, &sess->mod_ctx, inst);
+ if (unlikely(ret))
+ goto req_fail;
+ break;
+ case RTE_CRYPTO_ASYM_XFORM_RSA:
+ ret = cnxk_ae_enqueue_rsa_op(op, &meta_buf, sess, inst);
+ if (unlikely(ret))
+ goto req_fail;
+ break;
+ case RTE_CRYPTO_ASYM_XFORM_ECDSA:
+ ret = cnxk_ae_enqueue_ecdsa_op(op, &meta_buf, sess,
+ sess->cnxk_fpm_iova,
+ sess->ec_grp, inst);
+ if (unlikely(ret))
+ goto req_fail;
+ break;
+ case RTE_CRYPTO_ASYM_XFORM_ECPM:
+ ret = cnxk_ae_ecpm_prep(&asym_op->ecpm, &meta_buf,
+ sess->ec_grp[sess->ec_ctx.curveid],
+ sess->ec_ctx.curveid, inst);
+ if (unlikely(ret))
+ goto req_fail;
+ break;
+ default:
+ op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
+ ret = -EINVAL;
+ goto req_fail;
+ }
+
+ mop = mdata;
+ mop[0] = inst->rptr;
+ return 0;
+
+req_fail:
+ rte_mempool_put(minfo->pool, infl_req->mdata);
+ return ret;
+}
+
+static __rte_always_inline void
+cnxk_ae_post_process(struct rte_crypto_op *cop, struct cnxk_ae_sess *sess,
+ uint8_t *rptr)
+{
+ struct rte_crypto_asym_op *op = cop->asym;
+
+ switch (sess->xfrm_type) {
+ case RTE_CRYPTO_ASYM_XFORM_RSA:
+ cnxk_ae_dequeue_rsa_op(cop, rptr, &sess->rsa_ctx);
+ break;
+ case RTE_CRYPTO_ASYM_XFORM_MODEX:
+ op->modex.result.length = sess->mod_ctx.modulus.length;
+ memcpy(op->modex.result.data, rptr, op->modex.result.length);
+ break;
+ case RTE_CRYPTO_ASYM_XFORM_ECDSA:
+ cnxk_ae_dequeue_ecdsa_op(&op->ecdsa, rptr, &sess->ec_ctx,
+ sess->ec_grp);
+ break;
+ case RTE_CRYPTO_ASYM_XFORM_ECPM:
+ cnxk_ae_dequeue_ecpm_op(&op->ecpm, rptr, &sess->ec_ctx,
+ sess->ec_grp);
+ break;
+ default:
+ cop->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
+ break;
+ }
+}
#endif /* _CNXK_AE_H_ */