X-Git-Url: http://git.droids-corp.org/?a=blobdiff_plain;f=drivers%2Fcommon%2Fcpt%2Fcpt_ucode.h;h=763355fb1f03d6fa60b9cc6ab7b149faf04313f1;hb=f0f5d844d138;hp=34ccd08a40a802296245ae68407d0b89f73bb8e6;hpb=caeba5062c39abe79293a05fafc045a9fdb70e51;p=dpdk.git diff --git a/drivers/common/cpt/cpt_ucode.h b/drivers/common/cpt/cpt_ucode.h index 34ccd08a40..763355fb1f 100644 --- a/drivers/common/cpt/cpt_ucode.h +++ b/drivers/common/cpt/cpt_ucode.h @@ -77,6 +77,9 @@ cpt_fc_ciph_set_type(cipher_type_t type, struct cpt_ctx *ctx, uint16_t key_len) return -1; fc_type = FC_GEN; break; + case CHACHA20: + fc_type = FC_GEN; + break; case AES_XTS: key_len = key_len / 2; if (unlikely(key_len == CPT_BYTE_24)) { @@ -229,6 +232,7 @@ cpt_fc_ciph_set_key(void *ctx, cipher_type_t type, const uint8_t *key, case AES_ECB: case AES_CFB: case AES_CTR: + case CHACHA20: cpt_fc_ciph_set_key_set_aes_key_type(fctx, key_len); break; case AES_GCM: @@ -682,9 +686,6 @@ cpt_enc_hmac_prep(uint32_t flags, m_vaddr = (uint8_t *)m_vaddr + size; m_dma += size; - if (hash_type == GMAC_TYPE) - encr_data_len = 0; - if (unlikely(!(flags & VALID_IV_BUF))) { iv_len = 0; iv_offset = ENCR_IV_OFFSET(d_offs); @@ -716,6 +717,11 @@ cpt_enc_hmac_prep(uint32_t flags, opcode.s.major = CPT_MAJOR_OP_FC; opcode.s.minor = 0; + if (hash_type == GMAC_TYPE) { + encr_offset = 0; + encr_data_len = 0; + } + auth_dlen = auth_offset + auth_data_len; enc_dlen = encr_data_len + encr_offset; if (unlikely(encr_data_len & 0xf)) { @@ -726,11 +732,6 @@ cpt_enc_hmac_prep(uint32_t flags, enc_dlen = ROUNDUP16(encr_data_len) + encr_offset; } - if (unlikely(hash_type == GMAC_TYPE)) { - encr_offset = auth_dlen; - enc_dlen = 0; - } - if (unlikely(auth_dlen > enc_dlen)) { inputlen = auth_dlen; outputlen = auth_dlen + mac_len; @@ -1033,9 +1034,6 @@ cpt_dec_hmac_prep(uint32_t flags, hash_type = cpt_ctx->hash_type; mac_len = cpt_ctx->mac_len; - if (hash_type == GMAC_TYPE) - encr_data_len = 0; - if (unlikely(!(flags & VALID_IV_BUF))) { iv_len = 0; iv_offset = ENCR_IV_OFFSET(d_offs); @@ -1092,6 +1090,11 @@ cpt_dec_hmac_prep(uint32_t flags, opcode.s.major = CPT_MAJOR_OP_FC; opcode.s.minor = 1; + if (hash_type == GMAC_TYPE) { + encr_offset = 0; + encr_data_len = 0; + } + enc_dlen = encr_offset + encr_data_len; auth_dlen = auth_offset + auth_data_len; @@ -1103,9 +1106,6 @@ cpt_dec_hmac_prep(uint32_t flags, outputlen = enc_dlen; } - if (hash_type == GMAC_TYPE) - encr_offset = inputlen; - vq_cmd_w0.u64 = 0; vq_cmd_w0.s.param1 = encr_data_len; vq_cmd_w0.s.param2 = auth_data_len; @@ -2543,16 +2543,14 @@ fill_sess_aead(struct rte_crypto_sym_xform *xform, aead_form = &xform->aead; void *ctx = SESS_PRIV(sess); - if (aead_form->op == RTE_CRYPTO_AEAD_OP_ENCRYPT && - aead_form->algo == RTE_CRYPTO_AEAD_AES_GCM) { + if (aead_form->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) { sess->cpt_op |= CPT_OP_CIPHER_ENCRYPT; sess->cpt_op |= CPT_OP_AUTH_GENERATE; - } else if (aead_form->op == RTE_CRYPTO_AEAD_OP_DECRYPT && - aead_form->algo == RTE_CRYPTO_AEAD_AES_GCM) { + } else if (aead_form->op == RTE_CRYPTO_AEAD_OP_DECRYPT) { sess->cpt_op |= CPT_OP_CIPHER_DECRYPT; sess->cpt_op |= CPT_OP_AUTH_VERIFY; } else { - CPT_LOG_DP_ERR("Unknown cipher operation\n"); + CPT_LOG_DP_ERR("Unknown aead operation\n"); return -1; } switch (aead_form->algo) { @@ -2565,6 +2563,12 @@ fill_sess_aead(struct rte_crypto_sym_xform *xform, CPT_LOG_DP_ERR("Crypto: Unsupported cipher algo %u", aead_form->algo); return -1; + case RTE_CRYPTO_AEAD_CHACHA20_POLY1305: + enc_type = CHACHA20; + auth_type = POLY1305; + cipher_key_len = 32; + sess->chacha_poly = 1; + break; default: CPT_LOG_DP_ERR("Crypto: Undefined cipher algo %u specified", aead_form->algo); @@ -2858,7 +2862,7 @@ alloc_op_meta(struct rte_mbuf *m_src, tailroom = rte_pktmbuf_tailroom(m_src); if (likely(tailroom > len + 8)) { mdata = (uint8_t *)m_src->buf_addr + m_src->buf_len; - mphys = m_src->buf_physaddr + m_src->buf_len; + mphys = m_src->buf_iova + m_src->buf_len; mdata -= len; mphys -= len; buf->vaddr = mdata; @@ -2914,7 +2918,7 @@ prepare_iov_from_pkt(struct rte_mbuf *pkt, if (!start_offset) { seg_data = rte_pktmbuf_mtod(pkt, void *); - seg_phys = rte_pktmbuf_mtophys(pkt); + seg_phys = rte_pktmbuf_iova(pkt); seg_size = pkt->data_len; } else { while (start_offset >= pkt->data_len) { @@ -2923,7 +2927,7 @@ prepare_iov_from_pkt(struct rte_mbuf *pkt, } seg_data = rte_pktmbuf_mtod_offset(pkt, void *, start_offset); - seg_phys = rte_pktmbuf_mtophys_offset(pkt, start_offset); + seg_phys = rte_pktmbuf_iova_offset(pkt, start_offset); seg_size = pkt->data_len - start_offset; if (!seg_size) return 1; @@ -2938,7 +2942,7 @@ prepare_iov_from_pkt(struct rte_mbuf *pkt, while (unlikely(pkt != NULL)) { seg_data = rte_pktmbuf_mtod(pkt, void *); - seg_phys = rte_pktmbuf_mtophys(pkt); + seg_phys = rte_pktmbuf_iova(pkt); seg_size = pkt->data_len; if (!seg_size) break; @@ -2968,7 +2972,7 @@ prepare_iov_from_pkt_inplace(struct rte_mbuf *pkt, iov_ptr_t *iovec; seg_data = rte_pktmbuf_mtod(pkt, void *); - seg_phys = rte_pktmbuf_mtophys(pkt); + seg_phys = rte_pktmbuf_iova(pkt); seg_size = pkt->data_len; /* first seg */ @@ -2997,7 +3001,7 @@ prepare_iov_from_pkt_inplace(struct rte_mbuf *pkt, while (unlikely(pkt != NULL)) { seg_data = rte_pktmbuf_mtod(pkt, void *); - seg_phys = rte_pktmbuf_mtophys(pkt); + seg_phys = rte_pktmbuf_iova(pkt); seg_size = pkt->data_len; if (!seg_size) @@ -3067,7 +3071,7 @@ fill_fc_params(struct rte_crypto_op *cop, m_src = sym_op->m_src; m_dst = sym_op->m_dst; - if (sess_misc->aes_gcm) { + if (sess_misc->aes_gcm || sess_misc->chacha_poly) { uint8_t *salt; uint8_t *aad_data; uint16_t aad_len; @@ -3459,7 +3463,7 @@ fill_digest_params(struct rte_crypto_op *cop, params.mac_buf.vaddr = rte_pktmbuf_mtod_offset(m_dst, void *, off); params.mac_buf.dma_addr = - rte_pktmbuf_mtophys_offset(m_dst, off); + rte_pktmbuf_iova_offset(m_dst, off); params.mac_buf.size = mac_len; } } else {