1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(C) 2019 Marvell International Ltd.
5 #include <rte_common.h>
6 #include <rte_memzone.h>
8 #include "otx2_ethdev.h"
10 /* NIX_RX_PARSE_S's ERRCODE + ERRLEV (12 bits) */
11 #define ERRCODE_ERRLEN_WIDTH 12
12 #define ERR_ARRAY_SZ ((BIT(ERRCODE_ERRLEN_WIDTH)) *\
15 #define LOOKUP_ARRAY_SZ (PTYPE_ARRAY_SZ + ERR_ARRAY_SZ)
18 otx2_nix_supported_ptypes_get(struct rte_eth_dev *eth_dev)
20 struct otx2_eth_dev *dev = otx2_eth_pmd_priv(eth_dev);
22 static const uint32_t ptypes[] = {
23 RTE_PTYPE_L2_ETHER_QINQ, /* LB */
24 RTE_PTYPE_L2_ETHER_VLAN, /* LB */
25 RTE_PTYPE_L2_ETHER_TIMESYNC, /* LB */
26 RTE_PTYPE_L2_ETHER_ARP, /* LC */
27 RTE_PTYPE_L2_ETHER_NSH, /* LC */
28 RTE_PTYPE_L2_ETHER_FCOE, /* LC */
29 RTE_PTYPE_L2_ETHER_MPLS, /* LC */
30 RTE_PTYPE_L3_IPV4, /* LC */
31 RTE_PTYPE_L3_IPV4_EXT, /* LC */
32 RTE_PTYPE_L3_IPV6, /* LC */
33 RTE_PTYPE_L3_IPV6_EXT, /* LC */
34 RTE_PTYPE_L4_TCP, /* LD */
35 RTE_PTYPE_L4_UDP, /* LD */
36 RTE_PTYPE_L4_SCTP, /* LD */
37 RTE_PTYPE_L4_ICMP, /* LD */
38 RTE_PTYPE_L4_IGMP, /* LD */
39 RTE_PTYPE_TUNNEL_GRE, /* LD */
40 RTE_PTYPE_TUNNEL_ESP, /* LD */
41 RTE_PTYPE_TUNNEL_NVGRE, /* LD */
42 RTE_PTYPE_TUNNEL_VXLAN, /* LE */
43 RTE_PTYPE_TUNNEL_GENEVE, /* LE */
44 RTE_PTYPE_TUNNEL_GTPC, /* LE */
45 RTE_PTYPE_TUNNEL_GTPU, /* LE */
46 RTE_PTYPE_TUNNEL_VXLAN_GPE, /* LE */
47 RTE_PTYPE_TUNNEL_MPLS_IN_GRE, /* LE */
48 RTE_PTYPE_TUNNEL_MPLS_IN_UDP, /* LE */
49 RTE_PTYPE_INNER_L2_ETHER,/* LF */
50 RTE_PTYPE_INNER_L3_IPV4, /* LG */
51 RTE_PTYPE_INNER_L3_IPV6, /* LG */
52 RTE_PTYPE_INNER_L4_TCP, /* LH */
53 RTE_PTYPE_INNER_L4_UDP, /* LH */
54 RTE_PTYPE_INNER_L4_SCTP, /* LH */
55 RTE_PTYPE_INNER_L4_ICMP, /* LH */
59 if (dev->rx_offload_flags & NIX_RX_OFFLOAD_PTYPE_F)
66 * +------------------ +------------------ +
67 * | | IL4 | IL3| IL2 | TU | L4 | L3 | L2 |
68 * +-------------------+-------------------+
70 * +-------------------+------------------ +
71 * | | LH | LG | LF | LE | LD | LC | LB |
72 * +-------------------+-------------------+
74 * ptype [LE - LD - LC - LB] = TU - L4 - L3 - T2
75 * ptype_tunnel[LH - LG - LF] = IL4 - IL3 - IL2 - TU
79 nix_create_non_tunnel_ptype_array(uint16_t *ptype)
81 uint8_t lb, lc, ld, le;
85 for (idx = 0; idx < PTYPE_NON_TUNNEL_ARRAY_SZ; idx++) {
87 lc = (idx & 0xF0) >> 4;
88 ld = (idx & 0xF00) >> 8;
89 le = (idx & 0xF000) >> 12;
90 val = RTE_PTYPE_UNKNOWN;
94 val |= RTE_PTYPE_L2_ETHER_QINQ;
97 val |= RTE_PTYPE_L2_ETHER_VLAN;
103 val |= RTE_PTYPE_L2_ETHER_ARP;
106 val |= RTE_PTYPE_L2_ETHER_NSH;
109 val |= RTE_PTYPE_L2_ETHER_FCOE;
112 val |= RTE_PTYPE_L2_ETHER_MPLS;
115 val |= RTE_PTYPE_L3_IPV4;
117 case NPC_LT_LC_IP_OPT:
118 val |= RTE_PTYPE_L3_IPV4_EXT;
121 val |= RTE_PTYPE_L3_IPV6;
123 case NPC_LT_LC_IP6_EXT:
124 val |= RTE_PTYPE_L3_IPV6_EXT;
127 val |= RTE_PTYPE_L2_ETHER_TIMESYNC;
133 val |= RTE_PTYPE_L4_TCP;
136 val |= RTE_PTYPE_L4_UDP;
139 val |= RTE_PTYPE_L4_SCTP;
142 val |= RTE_PTYPE_L4_ICMP;
145 val |= RTE_PTYPE_L4_IGMP;
148 val |= RTE_PTYPE_TUNNEL_GRE;
150 case NPC_LT_LD_NVGRE:
151 val |= RTE_PTYPE_TUNNEL_NVGRE;
154 val |= RTE_PTYPE_TUNNEL_ESP;
159 case NPC_LT_LE_VXLAN:
160 val |= RTE_PTYPE_TUNNEL_VXLAN;
162 case NPC_LT_LE_VXLANGPE:
163 val |= RTE_PTYPE_TUNNEL_VXLAN_GPE;
165 case NPC_LT_LE_GENEVE:
166 val |= RTE_PTYPE_TUNNEL_GENEVE;
169 val |= RTE_PTYPE_TUNNEL_GTPC;
172 val |= RTE_PTYPE_TUNNEL_GTPU;
174 case NPC_LT_LE_TU_MPLS_IN_GRE:
175 val |= RTE_PTYPE_TUNNEL_MPLS_IN_GRE;
177 case NPC_LT_LE_TU_MPLS_IN_UDP:
178 val |= RTE_PTYPE_TUNNEL_MPLS_IN_UDP;
185 #define TU_SHIFT(x) ((x) >> PTYPE_NON_TUNNEL_WIDTH)
187 nix_create_tunnel_ptype_array(uint16_t *ptype)
193 /* Skip non tunnel ptype array memory */
194 ptype = ptype + PTYPE_NON_TUNNEL_ARRAY_SZ;
196 for (idx = 0; idx < PTYPE_TUNNEL_ARRAY_SZ; idx++) {
198 lg = (idx & 0xF0) >> 4;
199 lh = (idx & 0xF00) >> 8;
200 val = RTE_PTYPE_UNKNOWN;
203 case NPC_LT_LF_TU_ETHER:
204 val |= TU_SHIFT(RTE_PTYPE_INNER_L2_ETHER);
208 case NPC_LT_LG_TU_IP:
209 val |= TU_SHIFT(RTE_PTYPE_INNER_L3_IPV4);
211 case NPC_LT_LG_TU_IP6:
212 val |= TU_SHIFT(RTE_PTYPE_INNER_L3_IPV6);
216 case NPC_LT_LH_TU_TCP:
217 val |= TU_SHIFT(RTE_PTYPE_INNER_L4_TCP);
219 case NPC_LT_LH_TU_UDP:
220 val |= TU_SHIFT(RTE_PTYPE_INNER_L4_UDP);
222 case NPC_LT_LH_TU_SCTP:
223 val |= TU_SHIFT(RTE_PTYPE_INNER_L4_SCTP);
225 case NPC_LT_LH_TU_ICMP:
226 val |= TU_SHIFT(RTE_PTYPE_INNER_L4_ICMP);
235 nix_create_rx_ol_flags_array(void *mem)
237 uint16_t idx, errcode, errlev;
238 uint32_t val, *ol_flags;
240 /* Skip ptype array memory */
241 ol_flags = (uint32_t *)((uint8_t *)mem + PTYPE_ARRAY_SZ);
243 for (idx = 0; idx < BIT(ERRCODE_ERRLEN_WIDTH); idx++) {
245 errcode = (idx & 0xff0) >> 4;
247 val = PKT_RX_IP_CKSUM_UNKNOWN;
248 val |= PKT_RX_L4_CKSUM_UNKNOWN;
249 val |= PKT_RX_OUTER_L4_CKSUM_UNKNOWN;
253 /* Mark all errors as BAD checksum errors */
255 val |= PKT_RX_IP_CKSUM_BAD;
256 val |= PKT_RX_L4_CKSUM_BAD;
258 val |= PKT_RX_IP_CKSUM_GOOD;
259 val |= PKT_RX_L4_CKSUM_GOOD;
263 if (errcode == NPC_EC_OIP4_CSUM ||
264 errcode == NPC_EC_IP_FRAG_OFFSET_1) {
265 val |= PKT_RX_IP_CKSUM_BAD;
266 val |= PKT_RX_EIP_CKSUM_BAD;
268 val |= PKT_RX_IP_CKSUM_GOOD;
272 if (errcode == NPC_EC_IIP4_CSUM)
273 val |= PKT_RX_IP_CKSUM_BAD;
275 val |= PKT_RX_IP_CKSUM_GOOD;
278 if (errcode == NIX_RX_PERRCODE_OL4_CHK) {
279 val |= PKT_RX_OUTER_L4_CKSUM_BAD;
280 val |= PKT_RX_L4_CKSUM_BAD;
281 } else if (errcode == NIX_RX_PERRCODE_IL4_CHK) {
282 val |= PKT_RX_L4_CKSUM_BAD;
284 val |= PKT_RX_IP_CKSUM_GOOD;
285 val |= PKT_RX_L4_CKSUM_GOOD;
295 otx2_nix_fastpath_lookup_mem_get(void)
297 const char name[] = "otx2_nix_fastpath_lookup_mem";
298 const struct rte_memzone *mz;
301 mz = rte_memzone_lookup(name);
305 /* Request for the first time */
306 mz = rte_memzone_reserve_aligned(name, LOOKUP_ARRAY_SZ,
307 SOCKET_ID_ANY, 0, OTX2_ALIGN);
310 /* Form the ptype array lookup memory */
311 nix_create_non_tunnel_ptype_array(mem);
312 nix_create_tunnel_ptype_array(mem);
313 /* Form the rx ol_flags based on errcode */
314 nix_create_rx_ol_flags_array(mem);