1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright (C) 2020 Marvell.
7 #include <rte_ethdev.h>
10 #include <rte_graph_worker.h>
12 #include "pkt_cls_priv.h"
13 #include "node_private.h"
15 /* Next node for each ptype, default is '0' is "pkt_drop" */
16 static const uint8_t p_nxt[256] __rte_cache_aligned = {
17 [RTE_PTYPE_L3_IPV4] = PKT_CLS_NEXT_IP4_LOOKUP,
19 [RTE_PTYPE_L3_IPV4_EXT] = PKT_CLS_NEXT_IP4_LOOKUP,
21 [RTE_PTYPE_L3_IPV4_EXT_UNKNOWN] = PKT_CLS_NEXT_IP4_LOOKUP,
23 [RTE_PTYPE_L3_IPV4 | RTE_PTYPE_L2_ETHER] =
24 PKT_CLS_NEXT_IP4_LOOKUP,
26 [RTE_PTYPE_L3_IPV4_EXT | RTE_PTYPE_L2_ETHER] =
27 PKT_CLS_NEXT_IP4_LOOKUP,
29 [RTE_PTYPE_L3_IPV4_EXT_UNKNOWN | RTE_PTYPE_L2_ETHER] =
30 PKT_CLS_NEXT_IP4_LOOKUP,
34 pkt_cls_node_process(struct rte_graph *graph, struct rte_node *node,
35 void **objs, uint16_t nb_objs)
37 struct rte_mbuf *mbuf0, *mbuf1, *mbuf2, *mbuf3, **pkts;
38 uint8_t l0, l1, l2, l3, last_type;
39 uint16_t next_index, n_left_from;
40 uint16_t held = 0, last_spec = 0;
41 struct pkt_cls_node_ctx *ctx;
42 void **to_next, **from;
45 pkts = (struct rte_mbuf **)objs;
47 n_left_from = nb_objs;
49 for (i = OBJS_PER_CLINE; i < RTE_GRAPH_BURST_SIZE; i += OBJS_PER_CLINE)
50 rte_prefetch0(&objs[i]);
52 #if RTE_GRAPH_BURST_SIZE > 64
53 for (i = 0; i < 4 && i < n_left_from; i++)
54 rte_prefetch0(pkts[i]);
57 ctx = (struct pkt_cls_node_ctx *)node->ctx;
58 last_type = ctx->l2l3_type;
59 next_index = p_nxt[last_type];
61 /* Get stream for the speculated next node */
62 to_next = rte_node_next_stream_get(graph, node,
64 while (n_left_from >= 4) {
65 #if RTE_GRAPH_BURST_SIZE > 64
66 if (likely(n_left_from > 7)) {
67 rte_prefetch0(pkts[4]);
68 rte_prefetch0(pkts[5]);
69 rte_prefetch0(pkts[6]);
70 rte_prefetch0(pkts[7]);
81 l0 = mbuf0->packet_type &
82 (RTE_PTYPE_L2_MASK | RTE_PTYPE_L3_MASK);
83 l1 = mbuf1->packet_type &
84 (RTE_PTYPE_L2_MASK | RTE_PTYPE_L3_MASK);
85 l2 = mbuf2->packet_type &
86 (RTE_PTYPE_L2_MASK | RTE_PTYPE_L3_MASK);
87 l3 = mbuf3->packet_type &
88 (RTE_PTYPE_L2_MASK | RTE_PTYPE_L3_MASK);
90 /* Check if they are destined to same
91 * next node based on l2l3 packet type.
93 uint8_t fix_spec = (last_type ^ l0) | (last_type ^ l1) |
94 (last_type ^ l2) | (last_type ^ l3);
96 if (unlikely(fix_spec)) {
97 /* Copy things successfully speculated till now */
98 rte_memcpy(to_next, from,
99 last_spec * sizeof(from[0]));
101 to_next += last_spec;
106 if (p_nxt[l0] == next_index) {
107 to_next[0] = from[0];
111 rte_node_enqueue_x1(graph, node,
116 if (p_nxt[l1] == next_index) {
117 to_next[0] = from[1];
121 rte_node_enqueue_x1(graph, node,
126 if (p_nxt[l2] == next_index) {
127 to_next[0] = from[2];
131 rte_node_enqueue_x1(graph, node,
136 if (p_nxt[l3] == next_index) {
137 to_next[0] = from[3];
141 rte_node_enqueue_x1(graph, node,
145 /* Update speculated ptype */
146 if ((last_type != l3) && (l2 == l3) &&
147 (next_index != p_nxt[l3])) {
148 /* Put the current stream for
151 rte_node_next_stream_put(graph, node,
156 /* Get next stream for new ltype */
157 next_index = p_nxt[l3];
159 to_next = rte_node_next_stream_get(graph, node,
162 } else if (next_index == p_nxt[l3]) {
172 while (n_left_from > 0) {
178 l0 = mbuf0->packet_type &
179 (RTE_PTYPE_L2_MASK | RTE_PTYPE_L3_MASK);
180 if (unlikely((l0 != last_type) &&
181 (p_nxt[l0] != next_index))) {
182 /* Copy things successfully speculated till now */
183 rte_memcpy(to_next, from,
184 last_spec * sizeof(from[0]));
186 to_next += last_spec;
190 rte_node_enqueue_x1(graph, node,
198 /* !!! Home run !!! */
199 if (likely(last_spec == nb_objs)) {
200 rte_node_next_stream_move(graph, node, next_index);
205 /* Copy things successfully speculated till now */
206 rte_memcpy(to_next, from, last_spec * sizeof(from[0]));
207 rte_node_next_stream_put(graph, node, next_index, held);
209 ctx->l2l3_type = last_type;
213 /* Packet Classification Node */
214 struct rte_node_register pkt_cls_node = {
215 .process = pkt_cls_node_process,
218 .nb_edges = PKT_CLS_NEXT_MAX,
220 /* Pkt drop node starts at '0' */
221 [PKT_CLS_NEXT_PKT_DROP] = "pkt_drop",
222 [PKT_CLS_NEXT_IP4_LOOKUP] = "ip4_lookup",
225 RTE_NODE_REGISTER(pkt_cls_node);