vq_cmd_word0_t vq_cmd_w0;
void *c_vaddr, *m_vaddr;
uint64_t c_dma, m_dma;
- opcode_info_t opcode;
ctx = params->ctx_buf.vaddr;
meta_p = ¶ms->meta_buf;
data_len = AUTH_DLEN(d_lens);
/*GP op header */
- vq_cmd_w0.u64 = 0;
+ vq_cmd_w0.s.opcode.minor = 0;
vq_cmd_w0.s.param2 = ((uint16_t)hash_type << 8);
if (ctx->hmac) {
- opcode.s.major = CPT_MAJOR_OP_HMAC | CPT_DMA_MODE;
+ vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_HMAC | CPT_DMA_MODE;
vq_cmd_w0.s.param1 = key_len;
vq_cmd_w0.s.dlen = data_len + ROUNDUP8(key_len);
} else {
- opcode.s.major = CPT_MAJOR_OP_HASH | CPT_DMA_MODE;
+ vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_HASH | CPT_DMA_MODE;
vq_cmd_w0.s.param1 = 0;
vq_cmd_w0.s.dlen = data_len;
}
- opcode.s.minor = 0;
-
/* Null auth only case enters the if */
if (unlikely(!hash_type && !ctx->enc_cipher)) {
- opcode.s.major = CPT_MAJOR_OP_MISC;
+ vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_MISC;
/* Minor op is passthrough */
- opcode.s.minor = 0x03;
+ vq_cmd_w0.s.opcode.minor = 0x03;
/* Send out completion code only */
vq_cmd_w0.s.param2 = 0x1;
}
- vq_cmd_w0.s.opcode = opcode.flags;
-
/* DPTR has SG list */
in_buffer = m_vaddr;
dptr_dma = m_dma;
vq_cmd_word0_t vq_cmd_w0;
void *c_vaddr;
uint64_t c_dma;
- opcode_info_t opcode;
meta_p = &fc_params->meta_buf;
m_vaddr = meta_p->vaddr;
}
/* Encryption */
- opcode.s.major = CPT_MAJOR_OP_FC;
- opcode.s.minor = 0;
+ vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_FC;
+ vq_cmd_w0.s.opcode.minor = 0;
if (hash_type == GMAC_TYPE) {
encr_offset = 0;
}
/* GP op header */
- vq_cmd_w0.u64 = 0;
vq_cmd_w0.s.param1 = encr_data_len;
vq_cmd_w0.s.param2 = auth_data_len;
/*
vq_cmd_w0.s.dlen = inputlen + OFF_CTRL_LEN;
- vq_cmd_w0.s.opcode = opcode.flags;
-
if (likely(iv_len)) {
uint64_t *dest = (uint64_t *)((uint8_t *)offset_vaddr
+ OFF_CTRL_LEN);
m_vaddr = (uint8_t *)m_vaddr + size;
m_dma += size;
- opcode.s.major |= CPT_DMA_MODE;
-
- vq_cmd_w0.s.opcode = opcode.flags;
+ vq_cmd_w0.s.opcode.major |= CPT_DMA_MODE;
if (likely(iv_len)) {
uint64_t *dest = (uint64_t *)((uint8_t *)offset_vaddr
uint32_t passthrough_len = 0;
void *m_vaddr, *offset_vaddr;
uint64_t m_dma, offset_dma;
- opcode_info_t opcode;
vq_cmd_word0_t vq_cmd_w0;
void *c_vaddr;
uint64_t c_dma;
m_dma += size;
/* Decryption */
- opcode.s.major = CPT_MAJOR_OP_FC;
- opcode.s.minor = 1;
+ vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_FC;
+ vq_cmd_w0.s.opcode.minor = 1;
if (hash_type == GMAC_TYPE) {
encr_offset = 0;
outputlen = enc_dlen;
}
- vq_cmd_w0.u64 = 0;
vq_cmd_w0.s.param1 = encr_data_len;
vq_cmd_w0.s.param2 = auth_data_len;
vq_cmd_w0.s.dlen = inputlen + OFF_CTRL_LEN;
- vq_cmd_w0.s.opcode = opcode.flags;
-
if (likely(iv_len)) {
uint64_t *dest = (uint64_t *)((uint8_t *)offset_vaddr +
OFF_CTRL_LEN);
m_vaddr = (uint8_t *)m_vaddr + size;
m_dma += size;
- opcode.s.major |= CPT_DMA_MODE;
-
- vq_cmd_w0.s.opcode = opcode.flags;
+ vq_cmd_w0.s.opcode.major |= CPT_DMA_MODE;
if (likely(iv_len)) {
uint64_t *dest = (uint64_t *)((uint8_t *)offset_vaddr +
uint64_t *offset_vaddr, offset_dma;
uint32_t *iv_s, iv[4];
vq_cmd_word0_t vq_cmd_w0;
- opcode_info_t opcode;
buf_p = ¶ms->meta_buf;
m_vaddr = buf_p->vaddr;
m_vaddr = (uint8_t *)m_vaddr + size;
m_dma += size;
- opcode.s.major = CPT_MAJOR_OP_ZUC_SNOW3G;
+ vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_ZUC_SNOW3G;
/* indicates CPTR ctx, operation type, KEY & IV mode from DPTR */
- opcode.s.minor = ((1 << 7) | (snow3g << 5) | (0 << 4) |
+ vq_cmd_w0.s.opcode.minor = ((1 << 7) | (snow3g << 5) | (0 << 4) |
(0 << 3) | (flags & 0x7));
if (flags == 0x1) {
/*
* GP op header, lengths are expected in bits.
*/
- vq_cmd_w0.u64 = 0;
vq_cmd_w0.s.param1 = encr_data_len;
vq_cmd_w0.s.param2 = auth_data_len;
vq_cmd_w0.s.dlen = inputlen + OFF_CTRL_LEN;
- vq_cmd_w0.s.opcode = opcode.flags;
-
if (likely(iv_len)) {
uint32_t *iv_d = (uint32_t *)((uint8_t *)offset_vaddr
+ OFF_CTRL_LEN);
m_vaddr = (uint8_t *)m_vaddr + OFF_CTRL_LEN + iv_len;
m_dma += OFF_CTRL_LEN + iv_len;
- opcode.s.major |= CPT_DMA_MODE;
-
- vq_cmd_w0.s.opcode = opcode.flags;
+ vq_cmd_w0.s.opcode.major |= CPT_DMA_MODE;
/* DPTR has SG list */
in_buffer = m_vaddr;
uint64_t *offset_vaddr, offset_dma;
uint32_t *iv_s, iv[4], j;
vq_cmd_word0_t vq_cmd_w0;
- opcode_info_t opcode;
buf_p = ¶ms->meta_buf;
m_vaddr = buf_p->vaddr;
m_vaddr = (uint8_t *)m_vaddr + size;
m_dma += size;
- opcode.s.major = CPT_MAJOR_OP_ZUC_SNOW3G;
+ vq_cmd_w0.u64 = 0;
+ vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_ZUC_SNOW3G;
/* indicates CPTR ctx, operation type, KEY & IV mode from DPTR */
- opcode.s.minor = ((1 << 7) | (snow3g << 5) | (0 << 4) |
+ vq_cmd_w0.s.opcode.minor = ((1 << 7) | (snow3g << 5) | (0 << 4) |
(0 << 3) | (flags & 0x7));
/* consider iv len */
/*
* GP op header, lengths are expected in bits.
*/
- vq_cmd_w0.u64 = 0;
vq_cmd_w0.s.param1 = encr_data_len;
/*
vq_cmd_w0.s.dlen = inputlen + OFF_CTRL_LEN;
- vq_cmd_w0.s.opcode = opcode.flags;
-
if (likely(iv_len)) {
uint32_t *iv_d = (uint32_t *)((uint8_t *)offset_vaddr
+ OFF_CTRL_LEN);
m_vaddr = (uint8_t *)m_vaddr + OFF_CTRL_LEN + iv_len;
m_dma += OFF_CTRL_LEN + iv_len;
- opcode.s.major |= CPT_DMA_MODE;
-
- vq_cmd_w0.s.opcode = opcode.flags;
+ vq_cmd_w0.s.opcode.major |= CPT_DMA_MODE;
/* DPTR has SG list */
in_buffer = m_vaddr;
uint64_t m_dma, c_dma;
uint64_t *offset_vaddr, offset_dma;
vq_cmd_word0_t vq_cmd_w0;
- opcode_info_t opcode;
uint8_t *in_buffer;
uint32_t g_size_bytes, s_size_bytes;
uint64_t dptr_dma, rptr_dma;
m_vaddr = (uint8_t *)m_vaddr + size;
m_dma += size;
- opcode.s.major = CPT_MAJOR_OP_KASUMI | CPT_DMA_MODE;
+ vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_KASUMI | CPT_DMA_MODE;
/* indicates ECB/CBC, direction, ctx from cptr, iv from dptr */
- opcode.s.minor = ((1 << 6) | (cpt_ctx->k_ecb << 5) |
+ vq_cmd_w0.s.opcode.minor = ((1 << 6) | (cpt_ctx->k_ecb << 5) |
(dir << 4) | (0 << 3) | (flags & 0x7));
/*
* GP op header, lengths are expected in bits.
*/
- vq_cmd_w0.u64 = 0;
vq_cmd_w0.s.param1 = encr_data_len;
vq_cmd_w0.s.param2 = auth_data_len;
- vq_cmd_w0.s.opcode = opcode.flags;
/* consider iv len */
if (flags == 0x0) {
uint64_t m_dma, c_dma;
uint64_t *offset_vaddr, offset_dma;
vq_cmd_word0_t vq_cmd_w0;
- opcode_info_t opcode;
uint8_t *in_buffer;
uint32_t g_size_bytes, s_size_bytes;
uint64_t dptr_dma, rptr_dma;
m_vaddr = (uint8_t *)m_vaddr + size;
m_dma += size;
- opcode.s.major = CPT_MAJOR_OP_KASUMI | CPT_DMA_MODE;
+ vq_cmd_w0.u64 = 0;
+ vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_KASUMI | CPT_DMA_MODE;
/* indicates ECB/CBC, direction, ctx from cptr, iv from dptr */
- opcode.s.minor = ((1 << 6) | (cpt_ctx->k_ecb << 5) |
+ vq_cmd_w0.s.opcode.minor = ((1 << 6) | (cpt_ctx->k_ecb << 5) |
(dir << 4) | (0 << 3) | (flags & 0x7));
/*
* GP op header, lengths are expected in bits.
*/
- vq_cmd_w0.u64 = 0;
vq_cmd_w0.s.param1 = encr_data_len;
- vq_cmd_w0.s.opcode = opcode.flags;
/* consider iv len */
encr_offset += iv_len;
struct rte_crypto_op **op;
vq_cmd_word0_t vq_cmd_w0;
uint64_t total_key_len;
- opcode_info_t opcode;
uint32_t dlen, rlen;
uint32_t base_len;
buf_ptr_t caddr;
rlen = mod_len;
/* Setup opcodes */
- opcode.s.major = CPT_MAJOR_OP_MODEX;
- opcode.s.minor = CPT_MINOR_OP_MODEX;
- vq_cmd_w0.s.opcode = opcode.flags;
+ vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_MODEX;
+ vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_MODEX;
/* GP op header */
vq_cmd_w0.s.param1 = mod_len;
struct rte_crypto_op **op;
vq_cmd_word0_t vq_cmd_w0;
uint64_t total_key_len;
- opcode_info_t opcode;
uint32_t dlen, rlen;
uint32_t in_size;
buf_ptr_t caddr;
if (rsa_op.pad == RTE_CRYPTO_RSA_PADDING_NONE) {
/* Use mod_exp operation for no_padding type */
- opcode.s.minor = CPT_MINOR_OP_MODEX;
+ vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_MODEX;
vq_cmd_w0.s.param2 = exp_len;
} else {
if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_ENCRYPT) {
- opcode.s.minor = CPT_MINOR_OP_PKCS_ENC;
+ vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_PKCS_ENC;
/* Public key encrypt, use BT2*/
vq_cmd_w0.s.param2 = CPT_BLOCK_TYPE2 |
((uint16_t)(exp_len) << 1);
} else if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_VERIFY) {
- opcode.s.minor = CPT_MINOR_OP_PKCS_DEC;
+ vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_PKCS_DEC;
/* Public key decrypt, use BT1 */
vq_cmd_w0.s.param2 = CPT_BLOCK_TYPE1;
/* + 2 for decrypted len */
}
}
- /* Setup opcodes */
- opcode.s.major = CPT_MAJOR_OP_MODEX;
- vq_cmd_w0.s.opcode = opcode.flags;
+ vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_MODEX;
/* GP op header */
vq_cmd_w0.s.param1 = mod_len;
struct rte_crypto_op **op;
vq_cmd_word0_t vq_cmd_w0;
uint64_t total_key_len;
- opcode_info_t opcode;
uint32_t dlen, rlen;
uint32_t in_size;
buf_ptr_t caddr;
if (rsa_op.pad == RTE_CRYPTO_RSA_PADDING_NONE) {
/*Use mod_exp operation for no_padding type */
- opcode.s.minor = CPT_MINOR_OP_MODEX_CRT;
+ vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_MODEX_CRT;
} else {
if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_SIGN) {
- opcode.s.minor = CPT_MINOR_OP_PKCS_ENC_CRT;
+ vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_PKCS_ENC_CRT;
/* Private encrypt, use BT1 */
vq_cmd_w0.s.param2 = CPT_BLOCK_TYPE1;
} else if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_DECRYPT) {
- opcode.s.minor = CPT_MINOR_OP_PKCS_DEC_CRT;
+ vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_PKCS_DEC_CRT;
/* Private decrypt, use BT2 */
vq_cmd_w0.s.param2 = CPT_BLOCK_TYPE2;
/* + 2 for decrypted len */
}
}
- /* Setup opcodes */
- opcode.s.major = CPT_MAJOR_OP_MODEX;
- vq_cmd_w0.s.opcode = opcode.flags;
+ vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_MODEX;
/* GP op header */
vq_cmd_w0.s.param1 = mod_len;
uint16_t order_len, prime_len;
uint16_t o_offset, pk_offset;
vq_cmd_word0_t vq_cmd_w0;
- opcode_info_t opcode;
uint16_t rlen, dlen;
buf_ptr_t caddr;
uint8_t *dptr;
rlen = 2 * p_align;
/* Setup opcodes */
- opcode.s.major = CPT_MAJOR_OP_ECDSA;
- opcode.s.minor = CPT_MINOR_OP_ECDSA_SIGN;
- vq_cmd_w0.s.opcode = opcode.flags;
+ vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_ECDSA;
+ vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_ECDSA_SIGN;
/* GP op header */
vq_cmd_w0.s.param1 = curveid | (message_len << 8);
uint16_t qx_offset, qy_offset;
uint16_t p_align, m_align;
vq_cmd_word0_t vq_cmd_w0;
- opcode_info_t opcode;
buf_ptr_t caddr;
uint16_t dlen;
uint8_t *dptr;
dptr += p_align;
/* Setup opcodes */
- opcode.s.major = CPT_MAJOR_OP_ECDSA;
- opcode.s.minor = CPT_MINOR_OP_ECDSA_VERIFY;
- vq_cmd_w0.s.opcode = opcode.flags;
+ vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_ECDSA;
+ vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_ECDSA_VERIFY;
/* GP op header */
vq_cmd_w0.s.param1 = curveid | (message_len << 8);
uint16_t dlen, rlen, prime_len;
uint16_t x1_offset, y1_offset;
vq_cmd_word0_t vq_cmd_w0;
- opcode_info_t opcode;
buf_ptr_t caddr;
uint8_t *dptr;
dptr += p_align;
/* Setup opcodes */
- opcode.s.major = CPT_MAJOR_OP_ECC;
- opcode.s.minor = CPT_MINOR_OP_ECC_UMP;
+ vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_ECC;
+ vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_ECC_UMP;
/* GP op header */
- vq_cmd_w0.s.opcode = opcode.flags;
vq_cmd_w0.s.param1 = curveid;
vq_cmd_w0.s.param2 = ecpm->scalar.length;
vq_cmd_w0.s.dlen = dlen;