]> git.droids-corp.org - dpdk.git/commitdiff
lpm: fix vector IPv4 lookup
authorRuifeng Wang <ruifeng.wang@arm.com>
Thu, 14 Jan 2021 06:59:22 +0000 (06:59 +0000)
committerDavid Marchand <david.marchand@redhat.com>
Thu, 14 Jan 2021 13:19:57 +0000 (14:19 +0100)
rte_lpm_lookupx4 could return wrong next hop when more than 256 tbl8
groups are created. This is caused by incorrect type casting of tbl8
group index that been stored in tbl24 entry. The casting caused group
index truncation and hence wrong tbl8 group been searched.

Issue fixed by applying proper mask to tbl24 entry to get tbl8 group index.

Fixes: dc81ebbacaeb ("lpm: extend IPv4 next hop field")
Fixes: cbc2f1dccfba ("lpm/arm: support NEON")
Fixes: d2cc7959342b ("lpm: add AltiVec for ppc64")
Cc: stable@dpdk.org
Signed-off-by: Ruifeng Wang <ruifeng.wang@arm.com>
Tested-by: David Christensen <drc@linux.vnet.ibm.com>
Acked-by: Vladimir Medvedkin <vladimir.medvedkin@intel.com>
lib/librte_lpm/rte_lpm_altivec.h
lib/librte_lpm/rte_lpm_neon.h
lib/librte_lpm/rte_lpm_sse.h

index 228c41b38ee6702e121c54d39d006b49588b6958..4fbc1b595d1dd24f55a6d6e7ff792a2a2c7f7def 100644 (file)
@@ -88,28 +88,28 @@ rte_lpm_lookupx4(const struct rte_lpm *lpm, xmm_t ip, uint32_t hop[4],
        if (unlikely((pt & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
                        RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
                i8.u32[0] = i8.u32[0] +
-                       (uint8_t)tbl[0] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
+                       (tbl[0] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
                ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[0]];
                tbl[0] = *ptbl;
        }
        if (unlikely((pt >> 32 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
                        RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
                i8.u32[1] = i8.u32[1] +
-                       (uint8_t)tbl[1] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
+                       (tbl[1] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
                ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[1]];
                tbl[1] = *ptbl;
        }
        if (unlikely((pt2 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
                        RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
                i8.u32[2] = i8.u32[2] +
-                       (uint8_t)tbl[2] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
+                       (tbl[2] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
                ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[2]];
                tbl[2] = *ptbl;
        }
        if (unlikely((pt2 >> 32 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
                        RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
                i8.u32[3] = i8.u32[3] +
-                       (uint8_t)tbl[3] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
+                       (tbl[3] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
                ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[3]];
                tbl[3] = *ptbl;
        }
index 6c131d3125c2a1a8762739fd8fd288e8250df444..4642a866f117c5ed63be4880acd0712d59dcd26b 100644 (file)
@@ -81,28 +81,28 @@ rte_lpm_lookupx4(const struct rte_lpm *lpm, xmm_t ip, uint32_t hop[4],
        if (unlikely((pt & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
                        RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
                i8.u32[0] = i8.u32[0] +
-                       (uint8_t)tbl[0] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
+                       (tbl[0] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
                ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[0]];
                tbl[0] = *ptbl;
        }
        if (unlikely((pt >> 32 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
                        RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
                i8.u32[1] = i8.u32[1] +
-                       (uint8_t)tbl[1] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
+                       (tbl[1] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
                ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[1]];
                tbl[1] = *ptbl;
        }
        if (unlikely((pt2 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
                        RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
                i8.u32[2] = i8.u32[2] +
-                       (uint8_t)tbl[2] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
+                       (tbl[2] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
                ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[2]];
                tbl[2] = *ptbl;
        }
        if (unlikely((pt2 >> 32 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
                        RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
                i8.u32[3] = i8.u32[3] +
-                       (uint8_t)tbl[3] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
+                       (tbl[3] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
                ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[3]];
                tbl[3] = *ptbl;
        }
index 44770b6ff876ba1ec3888f6621aae402423f8ce0..eaa863c5223fc4fbe5dc0533bbf4e01644dbffcb 100644 (file)
@@ -82,28 +82,28 @@ rte_lpm_lookupx4(const struct rte_lpm *lpm, xmm_t ip, uint32_t hop[4],
        if (unlikely((pt & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
                        RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
                i8.u32[0] = i8.u32[0] +
-                       (uint8_t)tbl[0] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
+                       (tbl[0] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
                ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[0]];
                tbl[0] = *ptbl;
        }
        if (unlikely((pt >> 32 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
                        RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
                i8.u32[1] = i8.u32[1] +
-                       (uint8_t)tbl[1] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
+                       (tbl[1] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
                ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[1]];
                tbl[1] = *ptbl;
        }
        if (unlikely((pt2 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
                        RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
                i8.u32[2] = i8.u32[2] +
-                       (uint8_t)tbl[2] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
+                       (tbl[2] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
                ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[2]];
                tbl[2] = *ptbl;
        }
        if (unlikely((pt2 >> 32 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
                        RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
                i8.u32[3] = i8.u32[3] +
-                       (uint8_t)tbl[3] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
+                       (tbl[3] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
                ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[3]];
                tbl[3] = *ptbl;
        }