X-Git-Url: http://git.droids-corp.org/?a=blobdiff_plain;f=drivers%2Fcommon%2Fcpt%2Fcpt_ucode_asym.h;h=00e01b5e9c5a9eb2a8e66479fa2062980cc73db8;hb=a5d668e6376db327fda591067d929c314466bd8f;hp=e0311f1387e3b2b4461f49d5ee44fbbbad6c43ff;hpb=33bcaae5f85ad805ee287bee42013e61a1cff6fa;p=dpdk.git diff --git a/drivers/common/cpt/cpt_ucode_asym.h b/drivers/common/cpt/cpt_ucode_asym.h index e0311f1387..00e01b5e9c 100644 --- a/drivers/common/cpt/cpt_ucode_asym.h +++ b/drivers/common/cpt/cpt_ucode_asym.h @@ -9,6 +9,8 @@ #include #include +#include "cpt_common.h" +#include "cpt_hw_types.h" #include "cpt_mcode_defines.h" static __rte_always_inline void @@ -168,4 +170,284 @@ cpt_free_asym_session_parameters(struct cpt_asym_sess_misc *sess) } } +static __rte_always_inline void +cpt_fill_req_comp_addr(struct cpt_request_info *req, buf_ptr_t addr) +{ + void *completion_addr = RTE_PTR_ALIGN(addr.vaddr, 16); + + /* Pointer to cpt_res_s, updated by CPT */ + req->completion_addr = (volatile uint64_t *)completion_addr; + req->comp_baddr = addr.dma_addr + + RTE_PTR_DIFF(completion_addr, addr.vaddr); + *(req->completion_addr) = COMPLETION_CODE_INIT; +} + +static __rte_always_inline int +cpt_modex_prep(struct asym_op_params *modex_params, + struct rte_crypto_modex_xform *mod) +{ + struct cpt_request_info *req = modex_params->req; + phys_addr_t mphys = modex_params->meta_buf; + uint32_t exp_len = mod->exponent.length; + uint32_t mod_len = mod->modulus.length; + struct rte_crypto_mod_op_param mod_op; + struct rte_crypto_op **op; + vq_cmd_word0_t vq_cmd_w0; + uint64_t total_key_len; + opcode_info_t opcode; + uint32_t dlen, rlen; + uint32_t base_len; + buf_ptr_t caddr; + uint8_t *dptr; + + /* Extracting modex op form params->req->op[1]->asym->modex */ + op = RTE_PTR_ADD(req->op, sizeof(uintptr_t)); + mod_op = ((struct rte_crypto_op *)*op)->asym->modex; + + base_len = mod_op.base.length; + if (unlikely(base_len > mod_len)) { + CPT_LOG_DP_ERR("Base length greater than modulus length is not supported"); + (*op)->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS; + return -ENOTSUP; + } + + total_key_len = mod_len + exp_len; + + /* Input buffer */ + dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info)); + memcpy(dptr, mod->modulus.data, total_key_len); + dptr += total_key_len; + memcpy(dptr, mod_op.base.data, base_len); + dptr += base_len; + dlen = total_key_len + base_len; + + /* Result buffer */ + rlen = mod_len; + + /* Setup opcodes */ + opcode.s.major = CPT_MAJOR_OP_MODEX; + opcode.s.minor = CPT_MINOR_OP_MODEX; + vq_cmd_w0.s.opcode = opcode.flags; + + /* GP op header */ + vq_cmd_w0.s.param1 = mod_len; + vq_cmd_w0.s.param2 = exp_len; + vq_cmd_w0.s.dlen = dlen; + + /* Filling cpt_request_info structure */ + req->ist.ei0 = vq_cmd_w0.u64; + req->ist.ei1 = mphys; + req->ist.ei2 = mphys + dlen; + + /* Result pointer to store result data */ + req->rptr = dptr; + + /* alternate_caddr to write completion status of the microcode */ + req->alternate_caddr = (uint64_t *)(dptr + rlen); + *req->alternate_caddr = ~((uint64_t)COMPLETION_CODE_INIT); + + /* Preparing completion addr, +1 for completion code */ + caddr.vaddr = dptr + rlen + 1; + caddr.dma_addr = mphys + dlen + rlen + 1; + + cpt_fill_req_comp_addr(req, caddr); + return 0; +} + +static __rte_always_inline void +cpt_rsa_prep(struct asym_op_params *rsa_params, + struct rte_crypto_rsa_xform *rsa, + rte_crypto_param *crypto_param) +{ + struct cpt_request_info *req = rsa_params->req; + phys_addr_t mphys = rsa_params->meta_buf; + struct rte_crypto_rsa_op_param rsa_op; + uint32_t mod_len = rsa->n.length; + uint32_t exp_len = rsa->e.length; + struct rte_crypto_op **op; + vq_cmd_word0_t vq_cmd_w0; + uint64_t total_key_len; + opcode_info_t opcode; + uint32_t dlen, rlen; + uint32_t in_size; + buf_ptr_t caddr; + uint8_t *dptr; + + /* Extracting rsa op form params->req->op[1]->asym->rsa */ + op = RTE_PTR_ADD(req->op, sizeof(uintptr_t)); + rsa_op = ((struct rte_crypto_op *)*op)->asym->rsa; + total_key_len = mod_len + exp_len; + + /* Input buffer */ + dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info)); + memcpy(dptr, rsa->n.data, total_key_len); + dptr += total_key_len; + + in_size = crypto_param->length; + memcpy(dptr, crypto_param->data, in_size); + + dptr += in_size; + dlen = total_key_len + in_size; + + /* Result buffer */ + rlen = mod_len; + + if (rsa_op.pad == RTE_CRYPTO_RSA_PADDING_NONE) { + /* Use mod_exp operation for no_padding type */ + opcode.s.minor = CPT_MINOR_OP_MODEX; + vq_cmd_w0.s.param2 = exp_len; + } else { + if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_ENCRYPT) { + opcode.s.minor = CPT_MINOR_OP_PKCS_ENC; + /* Public key encrypt, use BT2*/ + vq_cmd_w0.s.param2 = CPT_BLOCK_TYPE2 | + ((uint16_t)(exp_len) << 1); + } else if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_VERIFY) { + opcode.s.minor = CPT_MINOR_OP_PKCS_DEC; + /* Public key decrypt, use BT1 */ + vq_cmd_w0.s.param2 = CPT_BLOCK_TYPE1; + /* + 2 for decrypted len */ + rlen += 2; + } + } + + /* Setup opcodes */ + opcode.s.major = CPT_MAJOR_OP_MODEX; + vq_cmd_w0.s.opcode = opcode.flags; + + /* GP op header */ + vq_cmd_w0.s.param1 = mod_len; + vq_cmd_w0.s.dlen = dlen; + + /* Filling cpt_request_info structure */ + req->ist.ei0 = vq_cmd_w0.u64; + req->ist.ei1 = mphys; + req->ist.ei2 = mphys + dlen; + + /* Result pointer to store result data */ + req->rptr = dptr; + + /* alternate_caddr to write completion status of the microcode */ + req->alternate_caddr = (uint64_t *)(dptr + rlen); + *req->alternate_caddr = ~((uint64_t)COMPLETION_CODE_INIT); + + /* Preparing completion addr, +1 for completion code */ + caddr.vaddr = dptr + rlen + 1; + caddr.dma_addr = mphys + dlen + rlen + 1; + + cpt_fill_req_comp_addr(req, caddr); +} + +static __rte_always_inline void +cpt_rsa_crt_prep(struct asym_op_params *rsa_params, + struct rte_crypto_rsa_xform *rsa, + rte_crypto_param *crypto_param) +{ + struct cpt_request_info *req = rsa_params->req; + phys_addr_t mphys = rsa_params->meta_buf; + uint32_t qInv_len = rsa->qt.qInv.length; + struct rte_crypto_rsa_op_param rsa_op; + uint32_t dP_len = rsa->qt.dP.length; + uint32_t dQ_len = rsa->qt.dQ.length; + uint32_t p_len = rsa->qt.p.length; + uint32_t q_len = rsa->qt.q.length; + uint32_t mod_len = rsa->n.length; + struct rte_crypto_op **op; + vq_cmd_word0_t vq_cmd_w0; + uint64_t total_key_len; + opcode_info_t opcode; + uint32_t dlen, rlen; + uint32_t in_size; + buf_ptr_t caddr; + uint8_t *dptr; + + /* Extracting rsa op form params->req->op[1]->asym->rsa */ + op = RTE_PTR_ADD(req->op, sizeof(uintptr_t)); + rsa_op = ((struct rte_crypto_op *)*op)->asym->rsa; + total_key_len = p_len + q_len + dP_len + dQ_len + qInv_len; + + /* Input buffer */ + dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info)); + memcpy(dptr, rsa->qt.q.data, total_key_len); + dptr += total_key_len; + + in_size = crypto_param->length; + memcpy(dptr, crypto_param->data, in_size); + + dptr += in_size; + dlen = total_key_len + in_size; + + /* Result buffer */ + rlen = mod_len; + + if (rsa_op.pad == RTE_CRYPTO_RSA_PADDING_NONE) { + /*Use mod_exp operation for no_padding type */ + opcode.s.minor = CPT_MINOR_OP_MODEX_CRT; + } else { + if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_SIGN) { + opcode.s.minor = CPT_MINOR_OP_PKCS_ENC_CRT; + /* Private encrypt, use BT1 */ + vq_cmd_w0.s.param2 = CPT_BLOCK_TYPE1; + } else if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_DECRYPT) { + opcode.s.minor = CPT_MINOR_OP_PKCS_DEC_CRT; + /* Private decrypt, use BT2 */ + vq_cmd_w0.s.param2 = CPT_BLOCK_TYPE2; + /* + 2 for decrypted len */ + rlen += 2; + } + } + + /* Setup opcodes */ + opcode.s.major = CPT_MAJOR_OP_MODEX; + vq_cmd_w0.s.opcode = opcode.flags; + + /* GP op header */ + vq_cmd_w0.s.param1 = mod_len; + vq_cmd_w0.s.dlen = dlen; + + /* Filling cpt_request_info structure */ + req->ist.ei0 = vq_cmd_w0.u64; + req->ist.ei1 = mphys; + req->ist.ei2 = mphys + dlen; + + /* Result pointer to store result data */ + req->rptr = dptr; + + /* alternate_caddr to write completion status of the microcode */ + req->alternate_caddr = (uint64_t *)(dptr + rlen); + *req->alternate_caddr = ~((uint64_t)COMPLETION_CODE_INIT); + + /* Preparing completion addr, +1 for completion code */ + caddr.vaddr = dptr + rlen + 1; + caddr.dma_addr = mphys + dlen + rlen + 1; + + cpt_fill_req_comp_addr(req, caddr); +} + +static __rte_always_inline int __hot +cpt_enqueue_rsa_op(struct rte_crypto_op *op, + struct asym_op_params *params, + struct cpt_asym_sess_misc *sess) +{ + struct rte_crypto_rsa_op_param *rsa = &op->asym->rsa; + + switch (rsa->op_type) { + case RTE_CRYPTO_ASYM_OP_VERIFY: + cpt_rsa_prep(params, &sess->rsa_ctx, &rsa->sign); + break; + case RTE_CRYPTO_ASYM_OP_ENCRYPT: + cpt_rsa_prep(params, &sess->rsa_ctx, &rsa->message); + break; + case RTE_CRYPTO_ASYM_OP_SIGN: + cpt_rsa_crt_prep(params, &sess->rsa_ctx, &rsa->message); + break; + case RTE_CRYPTO_ASYM_OP_DECRYPT: + cpt_rsa_crt_prep(params, &sess->rsa_ctx, &rsa->cipher); + break; + default: + op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS; + return -EINVAL; + } + return 0; +} #endif /* _CPT_UCODE_ASYM_H_ */