X-Git-Url: http://git.droids-corp.org/?a=blobdiff_plain;ds=sidebyside;f=drivers%2Fcommon%2Fcpt%2Fcpt_ucode_asym.h;h=50c6f58d3a20b4c72db89dae570ba6289017bc59;hb=e961c8e31e9660876b16adca596dafe419aa5d1b;hp=a67efb361ccde2f426af0af5527dd35ed7123d07;hpb=98e763bb12373286d5508b55c5f3c2fa9dd7b3e9;p=dpdk.git diff --git a/drivers/common/cpt/cpt_ucode_asym.h b/drivers/common/cpt/cpt_ucode_asym.h index a67efb361c..50c6f58d3a 100644 --- a/drivers/common/cpt/cpt_ucode_asym.h +++ b/drivers/common/cpt/cpt_ucode_asym.h @@ -234,7 +234,6 @@ cpt_modex_prep(struct asym_op_params *modex_params, struct rte_crypto_op **op; vq_cmd_word0_t vq_cmd_w0; uint64_t total_key_len; - opcode_info_t opcode; uint32_t dlen, rlen; uint32_t base_len; buf_ptr_t caddr; @@ -265,9 +264,8 @@ cpt_modex_prep(struct asym_op_params *modex_params, rlen = mod_len; /* Setup opcodes */ - opcode.s.major = CPT_MAJOR_OP_MODEX; - opcode.s.minor = CPT_MINOR_OP_MODEX; - vq_cmd_w0.s.opcode = opcode.flags; + vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_MODEX; + vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_MODEX; /* GP op header */ vq_cmd_w0.s.param1 = mod_len; @@ -307,7 +305,6 @@ cpt_rsa_prep(struct asym_op_params *rsa_params, struct rte_crypto_op **op; vq_cmd_word0_t vq_cmd_w0; uint64_t total_key_len; - opcode_info_t opcode; uint32_t dlen, rlen; uint32_t in_size; buf_ptr_t caddr; @@ -334,16 +331,16 @@ cpt_rsa_prep(struct asym_op_params *rsa_params, if (rsa_op.pad == RTE_CRYPTO_RSA_PADDING_NONE) { /* Use mod_exp operation for no_padding type */ - opcode.s.minor = CPT_MINOR_OP_MODEX; + vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_MODEX; vq_cmd_w0.s.param2 = exp_len; } else { if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_ENCRYPT) { - opcode.s.minor = CPT_MINOR_OP_PKCS_ENC; + vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_PKCS_ENC; /* Public key encrypt, use BT2*/ vq_cmd_w0.s.param2 = CPT_BLOCK_TYPE2 | ((uint16_t)(exp_len) << 1); } else if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_VERIFY) { - opcode.s.minor = CPT_MINOR_OP_PKCS_DEC; + vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_PKCS_DEC; /* Public key decrypt, use BT1 */ vq_cmd_w0.s.param2 = CPT_BLOCK_TYPE1; /* + 2 for decrypted len */ @@ -351,9 +348,7 @@ cpt_rsa_prep(struct asym_op_params *rsa_params, } } - /* Setup opcodes */ - opcode.s.major = CPT_MAJOR_OP_MODEX; - vq_cmd_w0.s.opcode = opcode.flags; + vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_MODEX; /* GP op header */ vq_cmd_w0.s.param1 = mod_len; @@ -395,7 +390,6 @@ cpt_rsa_crt_prep(struct asym_op_params *rsa_params, struct rte_crypto_op **op; vq_cmd_word0_t vq_cmd_w0; uint64_t total_key_len; - opcode_info_t opcode; uint32_t dlen, rlen; uint32_t in_size; buf_ptr_t caddr; @@ -422,14 +416,14 @@ cpt_rsa_crt_prep(struct asym_op_params *rsa_params, if (rsa_op.pad == RTE_CRYPTO_RSA_PADDING_NONE) { /*Use mod_exp operation for no_padding type */ - opcode.s.minor = CPT_MINOR_OP_MODEX_CRT; + vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_MODEX_CRT; } else { if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_SIGN) { - opcode.s.minor = CPT_MINOR_OP_PKCS_ENC_CRT; + vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_PKCS_ENC_CRT; /* Private encrypt, use BT1 */ vq_cmd_w0.s.param2 = CPT_BLOCK_TYPE1; } else if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_DECRYPT) { - opcode.s.minor = CPT_MINOR_OP_PKCS_DEC_CRT; + vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_PKCS_DEC_CRT; /* Private decrypt, use BT2 */ vq_cmd_w0.s.param2 = CPT_BLOCK_TYPE2; /* + 2 for decrypted len */ @@ -437,9 +431,7 @@ cpt_rsa_crt_prep(struct asym_op_params *rsa_params, } } - /* Setup opcodes */ - opcode.s.major = CPT_MAJOR_OP_MODEX; - vq_cmd_w0.s.opcode = opcode.flags; + vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_MODEX; /* GP op header */ vq_cmd_w0.s.param1 = mod_len; @@ -464,7 +456,7 @@ cpt_rsa_crt_prep(struct asym_op_params *rsa_params, cpt_fill_req_comp_addr(req, caddr); } -static __rte_always_inline int __hot +static __rte_always_inline int __rte_hot cpt_enqueue_rsa_op(struct rte_crypto_op *op, struct asym_op_params *params, struct cpt_asym_sess_misc *sess) @@ -621,7 +613,6 @@ cpt_ecdsa_sign_prep(struct rte_crypto_ecdsa_op_param *ecdsa, uint16_t order_len, prime_len; uint16_t o_offset, pk_offset; vq_cmd_word0_t vq_cmd_w0; - opcode_info_t opcode; uint16_t rlen, dlen; buf_ptr_t caddr; uint8_t *dptr; @@ -632,10 +623,10 @@ cpt_ecdsa_sign_prep(struct rte_crypto_ecdsa_op_param *ecdsa, /* Truncate input length to curve prime length */ if (message_len > prime_len) message_len = prime_len; - m_align = ROUNDUP8(message_len); + m_align = RTE_ALIGN_CEIL(message_len, 8); - p_align = ROUNDUP8(prime_len); - k_align = ROUNDUP8(k_len); + p_align = RTE_ALIGN_CEIL(prime_len, 8); + k_align = RTE_ALIGN_CEIL(k_len, 8); /* Set write offset for order and private key */ o_offset = prime_len - order_len; @@ -676,9 +667,8 @@ cpt_ecdsa_sign_prep(struct rte_crypto_ecdsa_op_param *ecdsa, rlen = 2 * p_align; /* Setup opcodes */ - opcode.s.major = CPT_MAJOR_OP_ECDSA; - opcode.s.minor = CPT_MINOR_OP_ECDSA_SIGN; - vq_cmd_w0.s.opcode = opcode.flags; + vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_ECDSA; + vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_ECDSA_SIGN; /* GP op header */ vq_cmd_w0.s.param1 = curveid | (message_len << 8); @@ -722,7 +712,6 @@ cpt_ecdsa_verify_prep(struct rte_crypto_ecdsa_op_param *ecdsa, uint16_t qx_offset, qy_offset; uint16_t p_align, m_align; vq_cmd_word0_t vq_cmd_w0; - opcode_info_t opcode; buf_ptr_t caddr; uint16_t dlen; uint8_t *dptr; @@ -734,8 +723,8 @@ cpt_ecdsa_verify_prep(struct rte_crypto_ecdsa_op_param *ecdsa, if (message_len > prime_len) message_len = prime_len; - m_align = ROUNDUP8(message_len); - p_align = ROUNDUP8(prime_len); + m_align = RTE_ALIGN_CEIL(message_len, 8); + p_align = RTE_ALIGN_CEIL(prime_len, 8); /* Set write offset for sign, order and public key coordinates */ o_offset = prime_len - order_len; @@ -783,9 +772,8 @@ cpt_ecdsa_verify_prep(struct rte_crypto_ecdsa_op_param *ecdsa, dptr += p_align; /* Setup opcodes */ - opcode.s.major = CPT_MAJOR_OP_ECDSA; - opcode.s.minor = CPT_MINOR_OP_ECDSA_VERIFY; - vq_cmd_w0.s.opcode = opcode.flags; + vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_ECDSA; + vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_ECDSA_VERIFY; /* GP op header */ vq_cmd_w0.s.param1 = curveid | (message_len << 8); @@ -811,7 +799,7 @@ cpt_ecdsa_verify_prep(struct rte_crypto_ecdsa_op_param *ecdsa, cpt_fill_req_comp_addr(req, caddr); } -static __rte_always_inline int __hot +static __rte_always_inline int __rte_hot cpt_enqueue_ecdsa_op(struct rte_crypto_op *op, struct asym_op_params *params, struct cpt_asym_sess_misc *sess, @@ -845,7 +833,6 @@ cpt_ecpm_prep(struct rte_crypto_ecpm_op_param *ecpm, uint16_t dlen, rlen, prime_len; uint16_t x1_offset, y1_offset; vq_cmd_word0_t vq_cmd_w0; - opcode_info_t opcode; buf_ptr_t caddr; uint8_t *dptr; @@ -854,8 +841,8 @@ cpt_ecpm_prep(struct rte_crypto_ecpm_op_param *ecpm, /* Input buffer */ dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info)); - p_align = ROUNDUP8(prime_len); - scalar_align = ROUNDUP8(ecpm->scalar.length); + p_align = RTE_ALIGN_CEIL(prime_len, 8); + scalar_align = RTE_ALIGN_CEIL(ecpm->scalar.length, 8); /* * Set dlen = sum(ROUNDUP8(input point(x and y coordinates), prime, @@ -880,11 +867,10 @@ cpt_ecpm_prep(struct rte_crypto_ecpm_op_param *ecpm, dptr += p_align; /* Setup opcodes */ - opcode.s.major = CPT_MAJOR_OP_ECC; - opcode.s.minor = CPT_MINOR_OP_ECC_UMP; + vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_ECC; + vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_ECC_UMP; /* GP op header */ - vq_cmd_w0.s.opcode = opcode.flags; vq_cmd_w0.s.param1 = curveid; vq_cmd_w0.s.param2 = ecpm->scalar.length; vq_cmd_w0.s.dlen = dlen;