{ \
uint64_t pkt00_mask, pkt01_mask; \
struct rte_mbuf *mbuf00, *mbuf01; \
+ uint32_t key_offset = t->key_offset; \
\
pkt00_index = __builtin_ctzll(pkts_mask); \
pkt00_mask = 1LLU << pkt00_index; \
pkts_mask &= ~pkt01_mask; \
mbuf01 = pkts[pkt01_index]; \
\
- rte_prefetch0(RTE_MBUF_METADATA_UINT8_PTR(mbuf00, 0)); \
- rte_prefetch0(RTE_MBUF_METADATA_UINT8_PTR(mbuf01, 0)); \
+ rte_prefetch0(RTE_MBUF_METADATA_UINT8_PTR(mbuf00, key_offset));\
+ rte_prefetch0(RTE_MBUF_METADATA_UINT8_PTR(mbuf01, key_offset));\
}
#define lookup2_stage0_with_odd_support(t, g, pkts, pkts_mask, pkt00_index, \
{ \
uint64_t pkt00_mask, pkt01_mask; \
struct rte_mbuf *mbuf00, *mbuf01; \
+ uint32_t key_offset = t->key_offset; \
\
pkt00_index = __builtin_ctzll(pkts_mask); \
pkt00_mask = 1LLU << pkt00_index; \
pkts_mask &= ~pkt01_mask; \
mbuf01 = pkts[pkt01_index]; \
\
- rte_prefetch0(RTE_MBUF_METADATA_UINT8_PTR(mbuf00, 0)); \
- rte_prefetch0(RTE_MBUF_METADATA_UINT8_PTR(mbuf01, 0)); \
+ rte_prefetch0(RTE_MBUF_METADATA_UINT8_PTR(mbuf00, key_offset));\
+ rte_prefetch0(RTE_MBUF_METADATA_UINT8_PTR(mbuf01, key_offset));\
}
#define lookup2_stage1(t, g, pkts, pkt10_index, pkt11_index) \
pos = 3; \
}
-#define lookup1_stage0(pkt0_index, mbuf0, pkts, pkts_mask) \
+#define lookup1_stage0(pkt0_index, mbuf0, pkts, pkts_mask, f) \
{ \
uint64_t pkt_mask; \
+ uint32_t key_offset = f->key_offset;\
\
pkt0_index = __builtin_ctzll(pkts_mask); \
pkt_mask = 1LLU << pkt0_index; \
pkts_mask &= ~pkt_mask; \
\
mbuf0 = pkts[pkt0_index]; \
- rte_prefetch0(RTE_MBUF_METADATA_UINT8_PTR(mbuf0, 0)); \
+ rte_prefetch0(RTE_MBUF_METADATA_UINT8_PTR(mbuf0, key_offset));\
}
#define lookup1_stage1(mbuf1, bucket1, f) \
}
#define lookup2_stage0(pkt00_index, pkt01_index, mbuf00, mbuf01,\
- pkts, pkts_mask) \
+ pkts, pkts_mask, f) \
{ \
uint64_t pkt00_mask, pkt01_mask; \
+ uint32_t key_offset = f->key_offset; \
\
pkt00_index = __builtin_ctzll(pkts_mask); \
pkt00_mask = 1LLU << pkt00_index; \
pkts_mask &= ~pkt00_mask; \
\
mbuf00 = pkts[pkt00_index]; \
- rte_prefetch0(RTE_MBUF_METADATA_UINT8_PTR(mbuf00, 0)); \
+ rte_prefetch0(RTE_MBUF_METADATA_UINT8_PTR(mbuf00, key_offset));\
\
pkt01_index = __builtin_ctzll(pkts_mask); \
pkt01_mask = 1LLU << pkt01_index; \
pkts_mask &= ~pkt01_mask; \
\
mbuf01 = pkts[pkt01_index]; \
- rte_prefetch0(RTE_MBUF_METADATA_UINT8_PTR(mbuf01, 0)); \
+ rte_prefetch0(RTE_MBUF_METADATA_UINT8_PTR(mbuf01, key_offset));\
}
#define lookup2_stage0_with_odd_support(pkt00_index, pkt01_index,\
- mbuf00, mbuf01, pkts, pkts_mask) \
+ mbuf00, mbuf01, pkts, pkts_mask, f) \
{ \
uint64_t pkt00_mask, pkt01_mask; \
+ uint32_t key_offset = f->key_offset; \
\
pkt00_index = __builtin_ctzll(pkts_mask); \
pkt00_mask = 1LLU << pkt00_index; \
pkts_mask &= ~pkt00_mask; \
\
mbuf00 = pkts[pkt00_index]; \
- rte_prefetch0(RTE_MBUF_METADATA_UINT8_PTR(mbuf00, 0)); \
+ rte_prefetch0(RTE_MBUF_METADATA_UINT8_PTR(mbuf00, key_offset)); \
\
pkt01_index = __builtin_ctzll(pkts_mask); \
if (pkts_mask == 0) \
pkts_mask &= ~pkt01_mask; \
\
mbuf01 = pkts[pkt01_index]; \
- rte_prefetch0(RTE_MBUF_METADATA_UINT8_PTR(mbuf01, 0)); \
+ rte_prefetch0(RTE_MBUF_METADATA_UINT8_PTR(mbuf01, key_offset)); \
}
#define lookup2_stage1(mbuf10, mbuf11, bucket10, bucket11, f) \
struct rte_mbuf *mbuf;
uint32_t pkt_index;
- lookup1_stage0(pkt_index, mbuf, pkts, pkts_mask);
+ lookup1_stage0(pkt_index, mbuf, pkts, pkts_mask, f);
lookup1_stage1(mbuf, bucket, f);
lookup1_stage2_lru(pkt_index, mbuf, bucket,
pkts_mask_out, entries, f);
*/
/* Pipeline stage 0 */
lookup2_stage0(pkt00_index, pkt01_index, mbuf00, mbuf01, pkts,
- pkts_mask);
+ pkts_mask, f);
/* Pipeline feed */
mbuf10 = mbuf00;
/* Pipeline stage 0 */
lookup2_stage0(pkt00_index, pkt01_index, mbuf00, mbuf01, pkts,
- pkts_mask);
+ pkts_mask, f);
/* Pipeline stage 1 */
lookup2_stage1(mbuf10, mbuf11, bucket10, bucket11, f);
/* Pipeline stage 0 */
lookup2_stage0_with_odd_support(pkt00_index, pkt01_index,
- mbuf00, mbuf01, pkts, pkts_mask);
+ mbuf00, mbuf01, pkts, pkts_mask, f);
/* Pipeline stage 1 */
lookup2_stage1(mbuf10, mbuf11, bucket10, bucket11, f);
struct rte_mbuf *mbuf;
uint32_t pkt_index;
- lookup1_stage0(pkt_index, mbuf, pkts, pkts_mask);
+ lookup1_stage0(pkt_index, mbuf, pkts, pkts_mask, f);
lookup1_stage1_dosig(mbuf, bucket, f);
lookup1_stage2_lru(pkt_index, mbuf, bucket,
pkts_mask_out, entries, f);
*/
/* Pipeline stage 0 */
lookup2_stage0(pkt00_index, pkt01_index, mbuf00, mbuf01, pkts,
- pkts_mask);
+ pkts_mask, f);
/* Pipeline feed */
mbuf10 = mbuf00;
/* Pipeline stage 0 */
lookup2_stage0(pkt00_index, pkt01_index, mbuf00, mbuf01, pkts,
- pkts_mask);
+ pkts_mask, f);
/* Pipeline stage 1 */
lookup2_stage1_dosig(mbuf10, mbuf11, bucket10, bucket11, f);
/* Pipeline stage 0 */
lookup2_stage0_with_odd_support(pkt00_index, pkt01_index,
- mbuf00, mbuf01, pkts, pkts_mask);
+ mbuf00, mbuf01, pkts, pkts_mask, f);
/* Pipeline stage 1 */
lookup2_stage1_dosig(mbuf10, mbuf11, bucket10, bucket11, f);
struct rte_mbuf *mbuf;
uint32_t pkt_index;
- lookup1_stage0(pkt_index, mbuf, pkts, pkts_mask);
+ lookup1_stage0(pkt_index, mbuf, pkts, pkts_mask, f);
lookup1_stage1(mbuf, bucket, f);
lookup1_stage2_ext(pkt_index, mbuf, bucket,
pkts_mask_out, entries, buckets_mask,
*/
/* Pipeline stage 0 */
lookup2_stage0(pkt00_index, pkt01_index, mbuf00, mbuf01, pkts,
- pkts_mask);
+ pkts_mask, f);
/* Pipeline feed */
mbuf10 = mbuf00;
/* Pipeline stage 0 */
lookup2_stage0(pkt00_index, pkt01_index, mbuf00, mbuf01, pkts,
- pkts_mask);
+ pkts_mask, f);
/* Pipeline stage 1 */
lookup2_stage1(mbuf10, mbuf11, bucket10, bucket11, f);
/* Pipeline stage 0 */
lookup2_stage0_with_odd_support(pkt00_index, pkt01_index,
- mbuf00, mbuf01, pkts, pkts_mask);
+ mbuf00, mbuf01, pkts, pkts_mask, f);
/* Pipeline stage 1 */
lookup2_stage1(mbuf10, mbuf11, bucket10, bucket11, f);
struct rte_mbuf *mbuf;
uint32_t pkt_index;
- lookup1_stage0(pkt_index, mbuf, pkts, pkts_mask);
+ lookup1_stage0(pkt_index, mbuf, pkts, pkts_mask, f);
lookup1_stage1_dosig(mbuf, bucket, f);
lookup1_stage2_ext(pkt_index, mbuf, bucket,
pkts_mask_out, entries, buckets_mask,
*/
/* Pipeline stage 0 */
lookup2_stage0(pkt00_index, pkt01_index, mbuf00, mbuf01, pkts,
- pkts_mask);
+ pkts_mask, f);
/* Pipeline feed */
mbuf10 = mbuf00;
/* Pipeline stage 0 */
lookup2_stage0(pkt00_index, pkt01_index, mbuf00, mbuf01, pkts,
- pkts_mask);
+ pkts_mask, f);
/* Pipeline stage 1 */
lookup2_stage1_dosig(mbuf10, mbuf11, bucket10, bucket11, f);
/* Pipeline stage 0 */
lookup2_stage0_with_odd_support(pkt00_index, pkt01_index,
- mbuf00, mbuf01, pkts, pkts_mask);
+ mbuf00, mbuf01, pkts, pkts_mask, f);
/* Pipeline stage 1 */
lookup2_stage1_dosig(mbuf10, mbuf11, bucket10, bucket11, f);
pos = 3; \
}
-#define lookup1_stage0(pkt0_index, mbuf0, pkts, pkts_mask) \
+#define lookup1_stage0(pkt0_index, mbuf0, pkts, pkts_mask, f) \
{ \
uint64_t pkt_mask; \
+ uint32_t key_offset = f->key_offset; \
\
pkt0_index = __builtin_ctzll(pkts_mask); \
pkt_mask = 1LLU << pkt0_index; \
pkts_mask &= ~pkt_mask; \
\
mbuf0 = pkts[pkt0_index]; \
- rte_prefetch0(RTE_MBUF_METADATA_UINT8_PTR(mbuf0, 0)); \
+ rte_prefetch0(RTE_MBUF_METADATA_UINT8_PTR(mbuf0, key_offset));\
}
#define lookup1_stage1(mbuf1, bucket1, f) \
}
#define lookup2_stage0(pkt00_index, pkt01_index, mbuf00, mbuf01,\
- pkts, pkts_mask) \
+ pkts, pkts_mask, f) \
{ \
uint64_t pkt00_mask, pkt01_mask; \
+ uint32_t key_offset = f->key_offset; \
\
pkt00_index = __builtin_ctzll(pkts_mask); \
pkt00_mask = 1LLU << pkt00_index; \
pkts_mask &= ~pkt00_mask; \
\
mbuf00 = pkts[pkt00_index]; \
- rte_prefetch0(RTE_MBUF_METADATA_UINT8_PTR(mbuf00, 0)); \
+ rte_prefetch0(RTE_MBUF_METADATA_UINT8_PTR(mbuf00, key_offset));\
\
pkt01_index = __builtin_ctzll(pkts_mask); \
pkt01_mask = 1LLU << pkt01_index; \
pkts_mask &= ~pkt01_mask; \
\
mbuf01 = pkts[pkt01_index]; \
- rte_prefetch0(RTE_MBUF_METADATA_UINT8_PTR(mbuf01, 0)); \
+ rte_prefetch0(RTE_MBUF_METADATA_UINT8_PTR(mbuf01, key_offset));\
}
#define lookup2_stage0_with_odd_support(pkt00_index, pkt01_index,\
- mbuf00, mbuf01, pkts, pkts_mask) \
+ mbuf00, mbuf01, pkts, pkts_mask, f) \
{ \
uint64_t pkt00_mask, pkt01_mask; \
+ uint32_t key_offset = f->key_offset; \
\
pkt00_index = __builtin_ctzll(pkts_mask); \
pkt00_mask = 1LLU << pkt00_index; \
pkts_mask &= ~pkt00_mask; \
\
mbuf00 = pkts[pkt00_index]; \
- rte_prefetch0(RTE_MBUF_METADATA_UINT8_PTR(mbuf00, 0)); \
+ rte_prefetch0(RTE_MBUF_METADATA_UINT8_PTR(mbuf00, key_offset)); \
\
pkt01_index = __builtin_ctzll(pkts_mask); \
if (pkts_mask == 0) \
pkts_mask &= ~pkt01_mask; \
\
mbuf01 = pkts[pkt01_index]; \
- rte_prefetch0(RTE_MBUF_METADATA_UINT8_PTR(mbuf01, 0)); \
+ rte_prefetch0(RTE_MBUF_METADATA_UINT8_PTR(mbuf01, key_offset)); \
}
#define lookup2_stage1(mbuf10, mbuf11, bucket10, bucket11, f) \
struct rte_mbuf *mbuf;
uint32_t pkt_index;
- lookup1_stage0(pkt_index, mbuf, pkts, pkts_mask);
+ lookup1_stage0(pkt_index, mbuf, pkts, pkts_mask, f);
lookup1_stage1(mbuf, bucket, f);
lookup1_stage2_lru(pkt_index, mbuf, bucket,
pkts_mask_out, entries, f);
*/
/* Pipeline stage 0 */
lookup2_stage0(pkt00_index, pkt01_index, mbuf00, mbuf01, pkts,
- pkts_mask);
+ pkts_mask, f);
/* Pipeline feed */
mbuf10 = mbuf00;
/* Pipeline stage 0 */
lookup2_stage0(pkt00_index, pkt01_index, mbuf00, mbuf01, pkts,
- pkts_mask);
+ pkts_mask, f);
/* Pipeline stage 1 */
lookup2_stage1(mbuf10, mbuf11, bucket10, bucket11, f);
/* Pipeline stage 0 */
lookup2_stage0_with_odd_support(pkt00_index, pkt01_index,
- mbuf00, mbuf01, pkts, pkts_mask);
+ mbuf00, mbuf01, pkts, pkts_mask, f);
/* Pipeline stage 1 */
lookup2_stage1(mbuf10, mbuf11, bucket10, bucket11, f);
struct rte_mbuf *mbuf;
uint32_t pkt_index;
- lookup1_stage0(pkt_index, mbuf, pkts, pkts_mask);
+ lookup1_stage0(pkt_index, mbuf, pkts, pkts_mask, f);
lookup1_stage1(mbuf, bucket, f);
lookup1_stage2_ext(pkt_index, mbuf, bucket,
pkts_mask_out, entries, buckets_mask, buckets,
*/
/* Pipeline stage 0 */
lookup2_stage0(pkt00_index, pkt01_index, mbuf00, mbuf01, pkts,
- pkts_mask);
+ pkts_mask, f);
/* Pipeline feed */
mbuf10 = mbuf00;
/* Pipeline stage 0 */
lookup2_stage0(pkt00_index, pkt01_index, mbuf00, mbuf01, pkts,
- pkts_mask);
+ pkts_mask, f);
/* Pipeline stage 1 */
lookup2_stage1(mbuf10, mbuf11, bucket10, bucket11, f);
/* Pipeline stage 0 */
lookup2_stage0_with_odd_support(pkt00_index, pkt01_index,
- mbuf00, mbuf01, pkts, pkts_mask);
+ mbuf00, mbuf01, pkts, pkts_mask, f);
/* Pipeline stage 1 */
lookup2_stage1(mbuf10, mbuf11, bucket10, bucket11, f);
pos = 3; \
}
-#define lookup1_stage0(pkt0_index, mbuf0, pkts, pkts_mask) \
+#define lookup1_stage0(pkt0_index, mbuf0, pkts, pkts_mask, f) \
{ \
uint64_t pkt_mask; \
+ uint32_t key_offset = f->key_offset;\
\
pkt0_index = __builtin_ctzll(pkts_mask); \
pkt_mask = 1LLU << pkt0_index; \
pkts_mask &= ~pkt_mask; \
\
mbuf0 = pkts[pkt0_index]; \
- rte_prefetch0(RTE_MBUF_METADATA_UINT8_PTR(mbuf0, 0)); \
+ rte_prefetch0(RTE_MBUF_METADATA_UINT8_PTR(mbuf0, key_offset)); \
}
#define lookup1_stage1(mbuf1, bucket1, f) \
}
#define lookup2_stage0(pkt00_index, pkt01_index, mbuf00, mbuf01,\
- pkts, pkts_mask) \
+ pkts, pkts_mask, f) \
{ \
uint64_t pkt00_mask, pkt01_mask; \
+ uint32_t key_offset = f->key_offset; \
\
pkt00_index = __builtin_ctzll(pkts_mask); \
pkt00_mask = 1LLU << pkt00_index; \
pkts_mask &= ~pkt00_mask; \
\
mbuf00 = pkts[pkt00_index]; \
- rte_prefetch0(RTE_MBUF_METADATA_UINT8_PTR(mbuf00, 0)); \
+ rte_prefetch0(RTE_MBUF_METADATA_UINT8_PTR(mbuf00, key_offset));\
\
pkt01_index = __builtin_ctzll(pkts_mask); \
pkt01_mask = 1LLU << pkt01_index; \
pkts_mask &= ~pkt01_mask; \
\
mbuf01 = pkts[pkt01_index]; \
- rte_prefetch0(RTE_MBUF_METADATA_UINT8_PTR(mbuf01, 0)); \
+ rte_prefetch0(RTE_MBUF_METADATA_UINT8_PTR(mbuf01, key_offset));\
}
#define lookup2_stage0_with_odd_support(pkt00_index, pkt01_index,\
- mbuf00, mbuf01, pkts, pkts_mask) \
+ mbuf00, mbuf01, pkts, pkts_mask, f) \
{ \
uint64_t pkt00_mask, pkt01_mask; \
+ uint32_t key_offset = f->key_offset; \
\
pkt00_index = __builtin_ctzll(pkts_mask); \
pkt00_mask = 1LLU << pkt00_index; \
pkts_mask &= ~pkt00_mask; \
\
mbuf00 = pkts[pkt00_index]; \
- rte_prefetch0(RTE_MBUF_METADATA_UINT8_PTR(mbuf00, 0)); \
+ rte_prefetch0(RTE_MBUF_METADATA_UINT8_PTR(mbuf00, key_offset));\
\
pkt01_index = __builtin_ctzll(pkts_mask); \
if (pkts_mask == 0) \
pkts_mask &= ~pkt01_mask; \
\
mbuf01 = pkts[pkt01_index]; \
- rte_prefetch0(RTE_MBUF_METADATA_UINT8_PTR(mbuf01, 0)); \
+ rte_prefetch0(RTE_MBUF_METADATA_UINT8_PTR(mbuf01, key_offset));\
}
#define lookup2_stage1(mbuf10, mbuf11, bucket10, bucket11, f) \
struct rte_mbuf *mbuf;
uint32_t pkt_index;
- lookup1_stage0(pkt_index, mbuf, pkts, pkts_mask);
+ lookup1_stage0(pkt_index, mbuf, pkts, pkts_mask, f);
lookup1_stage1(mbuf, bucket, f);
lookup1_stage2_lru(pkt_index, mbuf, bucket,
pkts_mask_out, entries, f);
*/
/* Pipeline stage 0 */
lookup2_stage0(pkt00_index, pkt01_index, mbuf00, mbuf01, pkts,
- pkts_mask);
+ pkts_mask, f);
/* Pipeline feed */
mbuf10 = mbuf00;
/* Pipeline stage 0 */
lookup2_stage0(pkt00_index, pkt01_index, mbuf00, mbuf01, pkts,
- pkts_mask);
+ pkts_mask, f);
/* Pipeline stage 1 */
lookup2_stage1(mbuf10, mbuf11, bucket10, bucket11, f);
/* Pipeline stage 0 */
lookup2_stage0_with_odd_support(pkt00_index, pkt01_index,
- mbuf00, mbuf01, pkts, pkts_mask);
+ mbuf00, mbuf01, pkts, pkts_mask, f);
/* Pipeline stage 1 */
lookup2_stage1(mbuf10, mbuf11, bucket10, bucket11, f);
struct rte_mbuf *mbuf;
uint32_t pkt_index;
- lookup1_stage0(pkt_index, mbuf, pkts, pkts_mask);
+ lookup1_stage0(pkt_index, mbuf, pkts, pkts_mask, f);
lookup1_stage1_dosig(mbuf, bucket, f);
lookup1_stage2_lru(pkt_index, mbuf, bucket,
pkts_mask_out, entries, f);
*/
/* Pipeline stage 0 */
lookup2_stage0(pkt00_index, pkt01_index, mbuf00, mbuf01, pkts,
- pkts_mask);
+ pkts_mask, f);
/* Pipeline feed */
mbuf10 = mbuf00;
/* Pipeline stage 0 */
lookup2_stage0(pkt00_index, pkt01_index, mbuf00, mbuf01, pkts,
- pkts_mask);
+ pkts_mask, f);
/* Pipeline stage 1 */
lookup2_stage1_dosig(mbuf10, mbuf11, bucket10, bucket11, f);
/* Pipeline stage 0 */
lookup2_stage0_with_odd_support(pkt00_index, pkt01_index,
- mbuf00, mbuf01, pkts, pkts_mask);
+ mbuf00, mbuf01, pkts, pkts_mask, f);
/* Pipeline stage 1 */
lookup2_stage1_dosig(mbuf10, mbuf11, bucket10, bucket11, f);
struct rte_mbuf *mbuf;
uint32_t pkt_index;
- lookup1_stage0(pkt_index, mbuf, pkts, pkts_mask);
+ lookup1_stage0(pkt_index, mbuf, pkts, pkts_mask, f);
lookup1_stage1(mbuf, bucket, f);
lookup1_stage2_ext(pkt_index, mbuf, bucket,
pkts_mask_out, entries, buckets_mask, buckets,
*/
/* Pipeline stage 0 */
lookup2_stage0(pkt00_index, pkt01_index, mbuf00, mbuf01, pkts,
- pkts_mask);
+ pkts_mask, f);
/* Pipeline feed */
mbuf10 = mbuf00;
/* Pipeline stage 0 */
lookup2_stage0(pkt00_index, pkt01_index, mbuf00, mbuf01, pkts,
- pkts_mask);
+ pkts_mask, f);
/* Pipeline stage 1 */
lookup2_stage1(mbuf10, mbuf11, bucket10, bucket11, f);
/* Pipeline stage 0 */
lookup2_stage0_with_odd_support(pkt00_index, pkt01_index,
- mbuf00, mbuf01, pkts, pkts_mask);
+ mbuf00, mbuf01, pkts, pkts_mask, f);
/* Pipeline stage 1 */
lookup2_stage1(mbuf10, mbuf11, bucket10, bucket11, f);
struct rte_mbuf *mbuf;
uint32_t pkt_index;
- lookup1_stage0(pkt_index, mbuf, pkts, pkts_mask);
+ lookup1_stage0(pkt_index, mbuf, pkts, pkts_mask, f);
lookup1_stage1_dosig(mbuf, bucket, f);
lookup1_stage2_ext(pkt_index, mbuf, bucket,
pkts_mask_out, entries, buckets_mask,
*/
/* Pipeline stage 0 */
lookup2_stage0(pkt00_index, pkt01_index, mbuf00, mbuf01, pkts,
- pkts_mask);
+ pkts_mask, f);
/* Pipeline feed */
mbuf10 = mbuf00;
/* Pipeline stage 0 */
lookup2_stage0(pkt00_index, pkt01_index, mbuf00, mbuf01, pkts,
- pkts_mask);
+ pkts_mask, f);
/* Pipeline stage 1 */
lookup2_stage1_dosig(mbuf10, mbuf11, bucket10, bucket11, f);
/* Pipeline stage 0 */
lookup2_stage0_with_odd_support(pkt00_index, pkt01_index,
- mbuf00, mbuf01, pkts, pkts_mask);
+ mbuf00, mbuf01, pkts, pkts_mask, f);
/* Pipeline stage 1 */
lookup2_stage1_dosig(mbuf10, mbuf11, bucket10, bucket11, f);
{ \
uint64_t pkt00_mask, pkt01_mask; \
struct rte_mbuf *mbuf00, *mbuf01; \
+ uint32_t key_offset = t->key_offset; \
\
pkt00_index = __builtin_ctzll(pkts_mask); \
pkt00_mask = 1LLU << pkt00_index; \
pkts_mask &= ~pkt01_mask; \
mbuf01 = pkts[pkt01_index]; \
\
- rte_prefetch0(RTE_MBUF_METADATA_UINT8_PTR(mbuf00, 0)); \
- rte_prefetch0(RTE_MBUF_METADATA_UINT8_PTR(mbuf01, 0)); \
+ rte_prefetch0(RTE_MBUF_METADATA_UINT8_PTR(mbuf00, key_offset));\
+ rte_prefetch0(RTE_MBUF_METADATA_UINT8_PTR(mbuf01, key_offset));\
}
#define lookup2_stage0_with_odd_support(t, g, pkts, pkts_mask, pkt00_index, \
{ \
uint64_t pkt00_mask, pkt01_mask; \
struct rte_mbuf *mbuf00, *mbuf01; \
+ uint32_t key_offset = t->key_offset; \
\
pkt00_index = __builtin_ctzll(pkts_mask); \
pkt00_mask = 1LLU << pkt00_index; \
pkts_mask &= ~pkt01_mask; \
mbuf01 = pkts[pkt01_index]; \
\
- rte_prefetch0(RTE_MBUF_METADATA_UINT8_PTR(mbuf00, 0)); \
- rte_prefetch0(RTE_MBUF_METADATA_UINT8_PTR(mbuf01, 0)); \
+ rte_prefetch0(RTE_MBUF_METADATA_UINT8_PTR(mbuf00, key_offset));\
+ rte_prefetch0(RTE_MBUF_METADATA_UINT8_PTR(mbuf01, key_offset));\
}
#define lookup2_stage1(t, g, pkts, pkt10_index, pkt11_index) \