From: Ruifeng Wang Date: Thu, 14 Jan 2021 06:59:22 +0000 (+0000) Subject: lpm: fix vector IPv4 lookup X-Git-Url: http://git.droids-corp.org/?a=commitdiff_plain;h=5702b7bf1ce509d2f40d732ba4e2af5b1269e0d8;p=dpdk.git lpm: fix vector IPv4 lookup rte_lpm_lookupx4 could return wrong next hop when more than 256 tbl8 groups are created. This is caused by incorrect type casting of tbl8 group index that been stored in tbl24 entry. The casting caused group index truncation and hence wrong tbl8 group been searched. Issue fixed by applying proper mask to tbl24 entry to get tbl8 group index. Fixes: dc81ebbacaeb ("lpm: extend IPv4 next hop field") Fixes: cbc2f1dccfba ("lpm/arm: support NEON") Fixes: d2cc7959342b ("lpm: add AltiVec for ppc64") Cc: stable@dpdk.org Signed-off-by: Ruifeng Wang Tested-by: David Christensen Acked-by: Vladimir Medvedkin --- diff --git a/lib/librte_lpm/rte_lpm_altivec.h b/lib/librte_lpm/rte_lpm_altivec.h index 228c41b38e..4fbc1b595d 100644 --- a/lib/librte_lpm/rte_lpm_altivec.h +++ b/lib/librte_lpm/rte_lpm_altivec.h @@ -88,28 +88,28 @@ rte_lpm_lookupx4(const struct rte_lpm *lpm, xmm_t ip, uint32_t hop[4], if (unlikely((pt & RTE_LPM_VALID_EXT_ENTRY_BITMASK) == RTE_LPM_VALID_EXT_ENTRY_BITMASK)) { i8.u32[0] = i8.u32[0] + - (uint8_t)tbl[0] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES; + (tbl[0] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES; ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[0]]; tbl[0] = *ptbl; } if (unlikely((pt >> 32 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) == RTE_LPM_VALID_EXT_ENTRY_BITMASK)) { i8.u32[1] = i8.u32[1] + - (uint8_t)tbl[1] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES; + (tbl[1] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES; ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[1]]; tbl[1] = *ptbl; } if (unlikely((pt2 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) == RTE_LPM_VALID_EXT_ENTRY_BITMASK)) { i8.u32[2] = i8.u32[2] + - (uint8_t)tbl[2] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES; + (tbl[2] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES; ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[2]]; tbl[2] = *ptbl; } if (unlikely((pt2 >> 32 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) == RTE_LPM_VALID_EXT_ENTRY_BITMASK)) { i8.u32[3] = i8.u32[3] + - (uint8_t)tbl[3] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES; + (tbl[3] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES; ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[3]]; tbl[3] = *ptbl; } diff --git a/lib/librte_lpm/rte_lpm_neon.h b/lib/librte_lpm/rte_lpm_neon.h index 6c131d3125..4642a866f1 100644 --- a/lib/librte_lpm/rte_lpm_neon.h +++ b/lib/librte_lpm/rte_lpm_neon.h @@ -81,28 +81,28 @@ rte_lpm_lookupx4(const struct rte_lpm *lpm, xmm_t ip, uint32_t hop[4], if (unlikely((pt & RTE_LPM_VALID_EXT_ENTRY_BITMASK) == RTE_LPM_VALID_EXT_ENTRY_BITMASK)) { i8.u32[0] = i8.u32[0] + - (uint8_t)tbl[0] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES; + (tbl[0] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES; ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[0]]; tbl[0] = *ptbl; } if (unlikely((pt >> 32 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) == RTE_LPM_VALID_EXT_ENTRY_BITMASK)) { i8.u32[1] = i8.u32[1] + - (uint8_t)tbl[1] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES; + (tbl[1] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES; ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[1]]; tbl[1] = *ptbl; } if (unlikely((pt2 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) == RTE_LPM_VALID_EXT_ENTRY_BITMASK)) { i8.u32[2] = i8.u32[2] + - (uint8_t)tbl[2] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES; + (tbl[2] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES; ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[2]]; tbl[2] = *ptbl; } if (unlikely((pt2 >> 32 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) == RTE_LPM_VALID_EXT_ENTRY_BITMASK)) { i8.u32[3] = i8.u32[3] + - (uint8_t)tbl[3] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES; + (tbl[3] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES; ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[3]]; tbl[3] = *ptbl; } diff --git a/lib/librte_lpm/rte_lpm_sse.h b/lib/librte_lpm/rte_lpm_sse.h index 44770b6ff8..eaa863c522 100644 --- a/lib/librte_lpm/rte_lpm_sse.h +++ b/lib/librte_lpm/rte_lpm_sse.h @@ -82,28 +82,28 @@ rte_lpm_lookupx4(const struct rte_lpm *lpm, xmm_t ip, uint32_t hop[4], if (unlikely((pt & RTE_LPM_VALID_EXT_ENTRY_BITMASK) == RTE_LPM_VALID_EXT_ENTRY_BITMASK)) { i8.u32[0] = i8.u32[0] + - (uint8_t)tbl[0] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES; + (tbl[0] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES; ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[0]]; tbl[0] = *ptbl; } if (unlikely((pt >> 32 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) == RTE_LPM_VALID_EXT_ENTRY_BITMASK)) { i8.u32[1] = i8.u32[1] + - (uint8_t)tbl[1] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES; + (tbl[1] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES; ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[1]]; tbl[1] = *ptbl; } if (unlikely((pt2 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) == RTE_LPM_VALID_EXT_ENTRY_BITMASK)) { i8.u32[2] = i8.u32[2] + - (uint8_t)tbl[2] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES; + (tbl[2] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES; ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[2]]; tbl[2] = *ptbl; } if (unlikely((pt2 >> 32 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) == RTE_LPM_VALID_EXT_ENTRY_BITMASK)) { i8.u32[3] = i8.u32[3] + - (uint8_t)tbl[3] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES; + (tbl[3] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES; ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[3]]; tbl[3] = *ptbl; }