1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(C) 2019 Marvell International Ltd.
5 #include <rte_common.h>
6 #include <rte_memzone.h>
8 #include "otx2_ethdev.h"
10 /* NIX_RX_PARSE_S's ERRCODE + ERRLEV (12 bits) */
11 #define ERRCODE_ERRLEN_WIDTH 12
12 #define ERR_ARRAY_SZ ((BIT(ERRCODE_ERRLEN_WIDTH)) *\
15 #define LOOKUP_ARRAY_SZ (PTYPE_ARRAY_SZ + ERR_ARRAY_SZ)
18 otx2_nix_supported_ptypes_get(struct rte_eth_dev *eth_dev)
20 struct otx2_eth_dev *dev = otx2_eth_pmd_priv(eth_dev);
22 static const uint32_t ptypes[] = {
23 RTE_PTYPE_L2_ETHER_QINQ, /* LB */
24 RTE_PTYPE_L2_ETHER_VLAN, /* LB */
25 RTE_PTYPE_L2_ETHER_TIMESYNC, /* LB */
26 RTE_PTYPE_L2_ETHER_ARP, /* LC */
27 RTE_PTYPE_L2_ETHER_NSH, /* LC */
28 RTE_PTYPE_L2_ETHER_FCOE, /* LC */
29 RTE_PTYPE_L2_ETHER_MPLS, /* LC */
30 RTE_PTYPE_L3_IPV4, /* LC */
31 RTE_PTYPE_L3_IPV4_EXT, /* LC */
32 RTE_PTYPE_L3_IPV6, /* LC */
33 RTE_PTYPE_L3_IPV6_EXT, /* LC */
34 RTE_PTYPE_L4_TCP, /* LD */
35 RTE_PTYPE_L4_UDP, /* LD */
36 RTE_PTYPE_L4_SCTP, /* LD */
37 RTE_PTYPE_L4_ICMP, /* LD */
38 RTE_PTYPE_L4_IGMP, /* LD */
39 RTE_PTYPE_TUNNEL_GRE, /* LD */
40 RTE_PTYPE_TUNNEL_ESP, /* LD */
41 RTE_PTYPE_TUNNEL_NVGRE, /* LD */
42 RTE_PTYPE_TUNNEL_VXLAN, /* LE */
43 RTE_PTYPE_TUNNEL_GENEVE, /* LE */
44 RTE_PTYPE_TUNNEL_GTPC, /* LE */
45 RTE_PTYPE_TUNNEL_GTPU, /* LE */
46 RTE_PTYPE_TUNNEL_VXLAN_GPE, /* LE */
47 RTE_PTYPE_TUNNEL_MPLS_IN_GRE, /* LE */
48 RTE_PTYPE_TUNNEL_MPLS_IN_UDP, /* LE */
49 RTE_PTYPE_INNER_L2_ETHER,/* LF */
50 RTE_PTYPE_INNER_L3_IPV4, /* LG */
51 RTE_PTYPE_INNER_L3_IPV6, /* LG */
52 RTE_PTYPE_INNER_L4_TCP, /* LH */
53 RTE_PTYPE_INNER_L4_UDP, /* LH */
54 RTE_PTYPE_INNER_L4_SCTP, /* LH */
55 RTE_PTYPE_INNER_L4_ICMP, /* LH */
59 if (dev->rx_offload_flags & NIX_RX_OFFLOAD_PTYPE_F)
66 * +------------------ +------------------ +
67 * | | IL4 | IL3| IL2 | TU | L4 | L3 | L2 |
68 * +-------------------+-------------------+
70 * +-------------------+------------------ +
71 * | | LH | LG | LF | LE | LD | LC | LB |
72 * +-------------------+-------------------+
74 * ptype [LE - LD - LC - LB] = TU - L4 - L3 - T2
75 * ptype_tunnel[LH - LG - LF] = IL4 - IL3 - IL2 - TU
79 nix_create_non_tunnel_ptype_array(uint16_t *ptype)
81 uint8_t lb, lc, ld, le;
84 for (idx = 0; idx < PTYPE_NON_TUNNEL_ARRAY_SZ; idx++) {
86 lc = (idx & 0xF0) >> 4;
87 ld = (idx & 0xF00) >> 8;
88 le = (idx & 0xF000) >> 12;
89 val = RTE_PTYPE_UNKNOWN;
93 val |= RTE_PTYPE_L2_ETHER_QINQ;
96 val |= RTE_PTYPE_L2_ETHER_VLAN;
102 val |= RTE_PTYPE_L2_ETHER_ARP;
105 val |= RTE_PTYPE_L2_ETHER_NSH;
108 val |= RTE_PTYPE_L2_ETHER_FCOE;
111 val |= RTE_PTYPE_L2_ETHER_MPLS;
114 val |= RTE_PTYPE_L3_IPV4;
116 case NPC_LT_LC_IP_OPT:
117 val |= RTE_PTYPE_L3_IPV4_EXT;
120 val |= RTE_PTYPE_L3_IPV6;
122 case NPC_LT_LC_IP6_EXT:
123 val |= RTE_PTYPE_L3_IPV6_EXT;
126 val |= RTE_PTYPE_L2_ETHER_TIMESYNC;
132 val |= RTE_PTYPE_L4_TCP;
135 val |= RTE_PTYPE_L4_UDP;
138 val |= RTE_PTYPE_L4_SCTP;
141 val |= RTE_PTYPE_L4_ICMP;
144 val |= RTE_PTYPE_L4_IGMP;
147 val |= RTE_PTYPE_TUNNEL_GRE;
149 case NPC_LT_LD_NVGRE:
150 val |= RTE_PTYPE_TUNNEL_NVGRE;
153 val |= RTE_PTYPE_TUNNEL_ESP;
158 case NPC_LT_LE_VXLAN:
159 val |= RTE_PTYPE_TUNNEL_VXLAN;
161 case NPC_LT_LE_VXLANGPE:
162 val |= RTE_PTYPE_TUNNEL_VXLAN_GPE;
164 case NPC_LT_LE_GENEVE:
165 val |= RTE_PTYPE_TUNNEL_GENEVE;
168 val |= RTE_PTYPE_TUNNEL_GTPC;
171 val |= RTE_PTYPE_TUNNEL_GTPU;
173 case NPC_LT_LE_TU_MPLS_IN_GRE:
174 val |= RTE_PTYPE_TUNNEL_MPLS_IN_GRE;
176 case NPC_LT_LE_TU_MPLS_IN_UDP:
177 val |= RTE_PTYPE_TUNNEL_MPLS_IN_UDP;
184 #define TU_SHIFT(x) ((x) >> PTYPE_WIDTH)
186 nix_create_tunnel_ptype_array(uint16_t *ptype)
191 /* Skip non tunnel ptype array memory */
192 ptype = ptype + PTYPE_NON_TUNNEL_ARRAY_SZ;
194 for (idx = 0; idx < PTYPE_TUNNEL_ARRAY_SZ; idx++) {
196 lf = (idx & 0xF0) >> 4;
197 lg = (idx & 0xF00) >> 8;
198 val = RTE_PTYPE_UNKNOWN;
201 case NPC_LT_LF_TU_ETHER:
202 val |= TU_SHIFT(RTE_PTYPE_INNER_L2_ETHER);
206 case NPC_LT_LG_TU_IP:
207 val |= TU_SHIFT(RTE_PTYPE_INNER_L3_IPV4);
209 case NPC_LT_LG_TU_IP6:
210 val |= TU_SHIFT(RTE_PTYPE_INNER_L3_IPV6);
214 case NPC_LT_LH_TU_TCP:
215 val |= TU_SHIFT(RTE_PTYPE_INNER_L4_TCP);
217 case NPC_LT_LH_TU_UDP:
218 val |= TU_SHIFT(RTE_PTYPE_INNER_L4_UDP);
220 case NPC_LT_LH_TU_SCTP:
221 val |= TU_SHIFT(RTE_PTYPE_INNER_L4_SCTP);
223 case NPC_LT_LH_TU_ICMP:
224 val |= TU_SHIFT(RTE_PTYPE_INNER_L4_ICMP);
233 nix_create_rx_ol_flags_array(void *mem)
235 uint16_t idx, errcode, errlev;
236 uint32_t val, *ol_flags;
238 /* Skip ptype array memory */
239 ol_flags = (uint32_t *)((uint8_t *)mem + PTYPE_ARRAY_SZ);
241 for (idx = 0; idx < BIT(ERRCODE_ERRLEN_WIDTH); idx++) {
243 errcode = (idx & 0xff0) >> 4;
245 val = PKT_RX_IP_CKSUM_UNKNOWN;
246 val |= PKT_RX_L4_CKSUM_UNKNOWN;
247 val |= PKT_RX_OUTER_L4_CKSUM_UNKNOWN;
251 /* Mark all errors as BAD checksum errors */
253 val |= PKT_RX_IP_CKSUM_BAD;
254 val |= PKT_RX_L4_CKSUM_BAD;
256 val |= PKT_RX_IP_CKSUM_GOOD;
257 val |= PKT_RX_L4_CKSUM_GOOD;
261 if (errcode == NPC_EC_OIP4_CSUM ||
262 errcode == NPC_EC_IP_FRAG_OFFSET_1) {
263 val |= PKT_RX_IP_CKSUM_BAD;
264 val |= PKT_RX_EIP_CKSUM_BAD;
266 val |= PKT_RX_IP_CKSUM_GOOD;
270 if (errcode == NPC_EC_IIP4_CSUM)
271 val |= PKT_RX_IP_CKSUM_BAD;
273 val |= PKT_RX_IP_CKSUM_GOOD;
276 if (errcode == NIX_RX_PERRCODE_OL4_CHK) {
277 val |= PKT_RX_OUTER_L4_CKSUM_BAD;
278 val |= PKT_RX_L4_CKSUM_BAD;
279 } else if (errcode == NIX_RX_PERRCODE_IL4_CHK) {
280 val |= PKT_RX_L4_CKSUM_BAD;
282 val |= PKT_RX_IP_CKSUM_GOOD;
283 val |= PKT_RX_L4_CKSUM_GOOD;
293 otx2_nix_fastpath_lookup_mem_get(void)
295 const char name[] = "otx2_nix_fastpath_lookup_mem";
296 const struct rte_memzone *mz;
299 mz = rte_memzone_lookup(name);
303 /* Request for the first time */
304 mz = rte_memzone_reserve_aligned(name, LOOKUP_ARRAY_SZ,
305 SOCKET_ID_ANY, 0, OTX2_ALIGN);
308 /* Form the ptype array lookup memory */
309 nix_create_non_tunnel_ptype_array(mem);
310 nix_create_tunnel_ptype_array(mem);
311 /* Form the rx ol_flags based on errcode */
312 nix_create_rx_ol_flags_array(mem);