From: Konstantin Ananyev Date: Tue, 2 Apr 2019 08:34:43 +0000 (+0100) Subject: ipsec: de-duplicate crypto op prepare X-Git-Url: http://git.droids-corp.org/?a=commitdiff_plain;h=3affe1bbcc3e500fda8f6f91016c8b152cdf0d49;p=dpdk.git ipsec: de-duplicate crypto op prepare For sym_crypto_op prepare move common code into a separate function(s). Signed-off-by: Konstantin Ananyev Acked-by: Akhil Goyal --- diff --git a/lib/librte_ipsec/esp_inb.c b/lib/librte_ipsec/esp_inb.c index 138ed04504..4e0e12a855 100644 --- a/lib/librte_ipsec/esp_inb.c +++ b/lib/librte_ipsec/esp_inb.c @@ -18,6 +18,40 @@ typedef uint16_t (*esp_inb_process_t)(const struct rte_ipsec_sa *sa, struct rte_mbuf *mb[], uint32_t sqn[], uint32_t dr[], uint16_t num); +/* + * helper function to fill crypto_sym op for cipher+auth algorithms. + * used by inb_cop_prepare(), see below. + */ +static inline void +sop_ciph_auth_prepare(struct rte_crypto_sym_op *sop, + const struct rte_ipsec_sa *sa, const union sym_op_data *icv, + uint32_t pofs, uint32_t plen) +{ + sop->cipher.data.offset = pofs + sa->ctp.cipher.offset; + sop->cipher.data.length = plen - sa->ctp.cipher.length; + sop->auth.data.offset = pofs + sa->ctp.auth.offset; + sop->auth.data.length = plen - sa->ctp.auth.length; + sop->auth.digest.data = icv->va; + sop->auth.digest.phys_addr = icv->pa; +} + +/* + * helper function to fill crypto_sym op for aead algorithms + * used by inb_cop_prepare(), see below. + */ +static inline void +sop_aead_prepare(struct rte_crypto_sym_op *sop, + const struct rte_ipsec_sa *sa, const union sym_op_data *icv, + uint32_t pofs, uint32_t plen) +{ + sop->aead.data.offset = pofs + sa->ctp.cipher.offset; + sop->aead.data.length = plen - sa->ctp.cipher.length; + sop->aead.digest.data = icv->va; + sop->aead.digest.phys_addr = icv->pa; + sop->aead.aad.data = icv->va + sa->icv_len; + sop->aead.aad.phys_addr = icv->pa + sa->icv_len; +} + /* * setup crypto op and crypto sym op for ESP inbound packet. */ @@ -33,63 +67,39 @@ inb_cop_prepare(struct rte_crypto_op *cop, uint32_t algo; algo = sa->algo_type; + ivp = rte_pktmbuf_mtod_offset(mb, uint64_t *, + pofs + sizeof(struct esp_hdr)); /* fill sym op fields */ sop = cop->sym; switch (algo) { case ALGO_TYPE_AES_GCM: - sop->aead.data.offset = pofs + sa->ctp.cipher.offset; - sop->aead.data.length = plen - sa->ctp.cipher.length; - sop->aead.digest.data = icv->va; - sop->aead.digest.phys_addr = icv->pa; - sop->aead.aad.data = icv->va + sa->icv_len; - sop->aead.aad.phys_addr = icv->pa + sa->icv_len; + sop_aead_prepare(sop, sa, icv, pofs, plen); /* fill AAD IV (located inside crypto op) */ gcm = rte_crypto_op_ctod_offset(cop, struct aead_gcm_iv *, sa->iv_ofs); - ivp = rte_pktmbuf_mtod_offset(mb, uint64_t *, - pofs + sizeof(struct esp_hdr)); aead_gcm_iv_fill(gcm, ivp[0], sa->salt); break; case ALGO_TYPE_AES_CBC: case ALGO_TYPE_3DES_CBC: - sop->cipher.data.offset = pofs + sa->ctp.cipher.offset; - sop->cipher.data.length = plen - sa->ctp.cipher.length; - sop->auth.data.offset = pofs + sa->ctp.auth.offset; - sop->auth.data.length = plen - sa->ctp.auth.length; - sop->auth.digest.data = icv->va; - sop->auth.digest.phys_addr = icv->pa; + sop_ciph_auth_prepare(sop, sa, icv, pofs, plen); /* copy iv from the input packet to the cop */ ivc = rte_crypto_op_ctod_offset(cop, uint64_t *, sa->iv_ofs); - ivp = rte_pktmbuf_mtod_offset(mb, uint64_t *, - pofs + sizeof(struct esp_hdr)); copy_iv(ivc, ivp, sa->iv_len); break; case ALGO_TYPE_AES_CTR: - sop->cipher.data.offset = pofs + sa->ctp.cipher.offset; - sop->cipher.data.length = plen - sa->ctp.cipher.length; - sop->auth.data.offset = pofs + sa->ctp.auth.offset; - sop->auth.data.length = plen - sa->ctp.auth.length; - sop->auth.digest.data = icv->va; - sop->auth.digest.phys_addr = icv->pa; + sop_ciph_auth_prepare(sop, sa, icv, pofs, plen); - /* copy iv from the input packet to the cop */ + /* fill CTR block (located inside crypto op) */ ctr = rte_crypto_op_ctod_offset(cop, struct aesctr_cnt_blk *, sa->iv_ofs); - ivp = rte_pktmbuf_mtod_offset(mb, uint64_t *, - pofs + sizeof(struct esp_hdr)); aes_ctr_cnt_blk_fill(ctr, ivp[0], sa->salt); break; case ALGO_TYPE_NULL: - sop->cipher.data.offset = pofs + sa->ctp.cipher.offset; - sop->cipher.data.length = plen - sa->ctp.cipher.length; - sop->auth.data.offset = pofs + sa->ctp.auth.offset; - sop->auth.data.length = plen - sa->ctp.auth.length; - sop->auth.digest.data = icv->va; - sop->auth.digest.phys_addr = icv->pa; + sop_ciph_auth_prepare(sop, sa, icv, pofs, plen); break; } } diff --git a/lib/librte_ipsec/esp_outb.c b/lib/librte_ipsec/esp_outb.c index 09bfb86587..c798bc4c49 100644 --- a/lib/librte_ipsec/esp_outb.c +++ b/lib/librte_ipsec/esp_outb.c @@ -15,6 +15,41 @@ #include "misc.h" #include "pad.h" + +/* + * helper function to fill crypto_sym op for cipher+auth algorithms. + * used by outb_cop_prepare(), see below. + */ +static inline void +sop_ciph_auth_prepare(struct rte_crypto_sym_op *sop, + const struct rte_ipsec_sa *sa, const union sym_op_data *icv, + uint32_t pofs, uint32_t plen) +{ + sop->cipher.data.offset = sa->ctp.cipher.offset + pofs; + sop->cipher.data.length = sa->ctp.cipher.length + plen; + sop->auth.data.offset = sa->ctp.auth.offset + pofs; + sop->auth.data.length = sa->ctp.auth.length + plen; + sop->auth.digest.data = icv->va; + sop->auth.digest.phys_addr = icv->pa; +} + +/* + * helper function to fill crypto_sym op for cipher+auth algorithms. + * used by outb_cop_prepare(), see below. + */ +static inline void +sop_aead_prepare(struct rte_crypto_sym_op *sop, + const struct rte_ipsec_sa *sa, const union sym_op_data *icv, + uint32_t pofs, uint32_t plen) +{ + sop->aead.data.offset = sa->ctp.cipher.offset + pofs; + sop->aead.data.length = sa->ctp.cipher.length + plen; + sop->aead.digest.data = icv->va; + sop->aead.digest.phys_addr = icv->pa; + sop->aead.aad.data = icv->va + sa->icv_len; + sop->aead.aad.phys_addr = icv->pa + sa->icv_len; +} + /* * setup crypto op and crypto sym op for ESP outbound packet. */ @@ -40,21 +75,11 @@ outb_cop_prepare(struct rte_crypto_op *cop, /* Cipher-Auth (3DES-CBC *) case */ case ALGO_TYPE_NULL: /* NULL case */ - sop->cipher.data.offset = sa->ctp.cipher.offset + hlen; - sop->cipher.data.length = sa->ctp.cipher.length + plen; - sop->auth.data.offset = sa->ctp.auth.offset + hlen; - sop->auth.data.length = sa->ctp.auth.length + plen; - sop->auth.digest.data = icv->va; - sop->auth.digest.phys_addr = icv->pa; + sop_ciph_auth_prepare(sop, sa, icv, hlen, plen); break; case ALGO_TYPE_AES_GCM: /* AEAD (AES_GCM) case */ - sop->aead.data.offset = sa->ctp.cipher.offset + hlen; - sop->aead.data.length = sa->ctp.cipher.length + plen; - sop->aead.digest.data = icv->va; - sop->aead.digest.phys_addr = icv->pa; - sop->aead.aad.data = icv->va + sa->icv_len; - sop->aead.aad.phys_addr = icv->pa + sa->icv_len; + sop_aead_prepare(sop, sa, icv, hlen, plen); /* fill AAD IV (located inside crypto op) */ gcm = rte_crypto_op_ctod_offset(cop, struct aead_gcm_iv *, @@ -63,13 +88,9 @@ outb_cop_prepare(struct rte_crypto_op *cop, break; case ALGO_TYPE_AES_CTR: /* Cipher-Auth (AES-CTR *) case */ - sop->cipher.data.offset = sa->ctp.cipher.offset + hlen; - sop->cipher.data.length = sa->ctp.cipher.length + plen; - sop->auth.data.offset = sa->ctp.auth.offset + hlen; - sop->auth.data.length = sa->ctp.auth.length + plen; - sop->auth.digest.data = icv->va; - sop->auth.digest.phys_addr = icv->pa; + sop_ciph_auth_prepare(sop, sa, icv, hlen, plen); + /* fill CTR block (located inside crypto op) */ ctr = rte_crypto_op_ctod_offset(cop, struct aesctr_cnt_blk *, sa->iv_ofs); aes_ctr_cnt_blk_fill(ctr, ivp[0], sa->salt);