1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(C) 2019 Marvell International Ltd.
5 #include <rte_common.h>
6 #include <rte_memzone.h>
8 #include "otx2_ethdev.h"
10 /* NIX_RX_PARSE_S's ERRCODE + ERRLEV (12 bits) */
11 #define ERRCODE_ERRLEN_WIDTH 12
12 #define ERR_ARRAY_SZ ((BIT(ERRCODE_ERRLEN_WIDTH)) *\
15 #define LOOKUP_ARRAY_SZ (PTYPE_ARRAY_SZ + ERR_ARRAY_SZ)
18 otx2_nix_supported_ptypes_get(struct rte_eth_dev *eth_dev)
20 struct otx2_eth_dev *dev = otx2_eth_pmd_priv(eth_dev);
22 static const uint32_t ptypes[] = {
23 RTE_PTYPE_L2_ETHER_QINQ, /* LB */
24 RTE_PTYPE_L2_ETHER_VLAN, /* LB */
25 RTE_PTYPE_L2_ETHER_TIMESYNC, /* LB */
26 RTE_PTYPE_L2_ETHER_ARP, /* LC */
27 RTE_PTYPE_L2_ETHER_NSH, /* LC */
28 RTE_PTYPE_L2_ETHER_FCOE, /* LC */
29 RTE_PTYPE_L2_ETHER_MPLS, /* LC */
30 RTE_PTYPE_L3_IPV4, /* LC */
31 RTE_PTYPE_L3_IPV4_EXT, /* LC */
32 RTE_PTYPE_L3_IPV6, /* LC */
33 RTE_PTYPE_L3_IPV6_EXT, /* LC */
34 RTE_PTYPE_L4_TCP, /* LD */
35 RTE_PTYPE_L4_UDP, /* LD */
36 RTE_PTYPE_L4_SCTP, /* LD */
37 RTE_PTYPE_L4_ICMP, /* LD */
38 RTE_PTYPE_L4_IGMP, /* LD */
39 RTE_PTYPE_TUNNEL_GRE, /* LD */
40 RTE_PTYPE_TUNNEL_ESP, /* LD */
41 RTE_PTYPE_TUNNEL_NVGRE, /* LD */
42 RTE_PTYPE_TUNNEL_VXLAN, /* LE */
43 RTE_PTYPE_TUNNEL_GENEVE, /* LE */
44 RTE_PTYPE_TUNNEL_GTPC, /* LE */
45 RTE_PTYPE_TUNNEL_GTPU, /* LE */
46 RTE_PTYPE_TUNNEL_VXLAN_GPE, /* LE */
47 RTE_PTYPE_TUNNEL_MPLS_IN_GRE, /* LE */
48 RTE_PTYPE_TUNNEL_MPLS_IN_UDP, /* LE */
49 RTE_PTYPE_INNER_L2_ETHER,/* LF */
50 RTE_PTYPE_INNER_L3_IPV4, /* LG */
51 RTE_PTYPE_INNER_L3_IPV6, /* LG */
52 RTE_PTYPE_INNER_L4_TCP, /* LH */
53 RTE_PTYPE_INNER_L4_UDP, /* LH */
54 RTE_PTYPE_INNER_L4_SCTP, /* LH */
55 RTE_PTYPE_INNER_L4_ICMP, /* LH */
59 if (dev->rx_offload_flags & NIX_RX_OFFLOAD_PTYPE_F)
66 * +------------------ +------------------ +
67 * | | IL4 | IL3| IL2 | TU | L4 | L3 | L2 |
68 * +-------------------+-------------------+
70 * +-------------------+------------------ +
71 * | | LH | LG | LF | LE | LD | LC | LB |
72 * +-------------------+-------------------+
74 * ptype [LE - LD - LC - LB] = TU - L4 - L3 - T2
75 * ptype_tunnel[LH - LG - LF] = IL4 - IL3 - IL2 - TU
79 nix_create_non_tunnel_ptype_array(uint16_t *ptype)
81 uint8_t lb, lc, ld, le;
85 for (idx = 0; idx < PTYPE_NON_TUNNEL_ARRAY_SZ; idx++) {
87 lc = (idx & 0xF0) >> 4;
88 ld = (idx & 0xF00) >> 8;
89 le = (idx & 0xF000) >> 12;
90 val = RTE_PTYPE_UNKNOWN;
93 case NPC_LT_LB_STAG_QINQ:
94 val |= RTE_PTYPE_L2_ETHER_QINQ;
97 val |= RTE_PTYPE_L2_ETHER_VLAN;
103 val |= RTE_PTYPE_L2_ETHER_ARP;
106 val |= RTE_PTYPE_L2_ETHER_NSH;
109 val |= RTE_PTYPE_L2_ETHER_FCOE;
112 val |= RTE_PTYPE_L2_ETHER_MPLS;
115 val |= RTE_PTYPE_L3_IPV4;
117 case NPC_LT_LC_IP_OPT:
118 val |= RTE_PTYPE_L3_IPV4_EXT;
121 val |= RTE_PTYPE_L3_IPV6;
123 case NPC_LT_LC_IP6_EXT:
124 val |= RTE_PTYPE_L3_IPV6_EXT;
127 val |= RTE_PTYPE_L2_ETHER_TIMESYNC;
133 val |= RTE_PTYPE_L4_TCP;
136 val |= RTE_PTYPE_L4_UDP;
139 val |= RTE_PTYPE_L4_SCTP;
142 case NPC_LT_LD_ICMP6:
143 val |= RTE_PTYPE_L4_ICMP;
146 val |= RTE_PTYPE_L4_IGMP;
149 val |= RTE_PTYPE_TUNNEL_GRE;
151 case NPC_LT_LD_NVGRE:
152 val |= RTE_PTYPE_TUNNEL_NVGRE;
155 val |= RTE_PTYPE_TUNNEL_ESP;
160 case NPC_LT_LE_VXLAN:
161 val |= RTE_PTYPE_TUNNEL_VXLAN;
163 case NPC_LT_LE_VXLANGPE:
164 val |= RTE_PTYPE_TUNNEL_VXLAN_GPE;
166 case NPC_LT_LE_GENEVE:
167 val |= RTE_PTYPE_TUNNEL_GENEVE;
170 val |= RTE_PTYPE_TUNNEL_GTPC;
173 val |= RTE_PTYPE_TUNNEL_GTPU;
175 case NPC_LT_LE_TU_MPLS_IN_GRE:
176 val |= RTE_PTYPE_TUNNEL_MPLS_IN_GRE;
178 case NPC_LT_LE_TU_MPLS_IN_UDP:
179 val |= RTE_PTYPE_TUNNEL_MPLS_IN_UDP;
186 #define TU_SHIFT(x) ((x) >> PTYPE_NON_TUNNEL_WIDTH)
188 nix_create_tunnel_ptype_array(uint16_t *ptype)
194 /* Skip non tunnel ptype array memory */
195 ptype = ptype + PTYPE_NON_TUNNEL_ARRAY_SZ;
197 for (idx = 0; idx < PTYPE_TUNNEL_ARRAY_SZ; idx++) {
199 lg = (idx & 0xF0) >> 4;
200 lh = (idx & 0xF00) >> 8;
201 val = RTE_PTYPE_UNKNOWN;
204 case NPC_LT_LF_TU_ETHER:
205 val |= TU_SHIFT(RTE_PTYPE_INNER_L2_ETHER);
209 case NPC_LT_LG_TU_IP:
210 val |= TU_SHIFT(RTE_PTYPE_INNER_L3_IPV4);
212 case NPC_LT_LG_TU_IP6:
213 val |= TU_SHIFT(RTE_PTYPE_INNER_L3_IPV6);
217 case NPC_LT_LH_TU_TCP:
218 val |= TU_SHIFT(RTE_PTYPE_INNER_L4_TCP);
220 case NPC_LT_LH_TU_UDP:
221 val |= TU_SHIFT(RTE_PTYPE_INNER_L4_UDP);
223 case NPC_LT_LH_TU_SCTP:
224 val |= TU_SHIFT(RTE_PTYPE_INNER_L4_SCTP);
226 case NPC_LT_LH_TU_ICMP:
227 case NPC_LT_LH_TU_ICMP6:
228 val |= TU_SHIFT(RTE_PTYPE_INNER_L4_ICMP);
237 nix_create_rx_ol_flags_array(void *mem)
239 uint16_t idx, errcode, errlev;
240 uint32_t val, *ol_flags;
242 /* Skip ptype array memory */
243 ol_flags = (uint32_t *)((uint8_t *)mem + PTYPE_ARRAY_SZ);
245 for (idx = 0; idx < BIT(ERRCODE_ERRLEN_WIDTH); idx++) {
247 errcode = (idx & 0xff0) >> 4;
249 val = PKT_RX_IP_CKSUM_UNKNOWN;
250 val |= PKT_RX_L4_CKSUM_UNKNOWN;
251 val |= PKT_RX_OUTER_L4_CKSUM_UNKNOWN;
255 /* Mark all errors as BAD checksum errors */
257 val |= PKT_RX_IP_CKSUM_BAD;
258 val |= PKT_RX_L4_CKSUM_BAD;
260 val |= PKT_RX_IP_CKSUM_GOOD;
261 val |= PKT_RX_L4_CKSUM_GOOD;
265 if (errcode == NPC_EC_OIP4_CSUM ||
266 errcode == NPC_EC_IP_FRAG_OFFSET_1) {
267 val |= PKT_RX_IP_CKSUM_BAD;
268 val |= PKT_RX_EIP_CKSUM_BAD;
270 val |= PKT_RX_IP_CKSUM_GOOD;
274 if (errcode == NPC_EC_IIP4_CSUM)
275 val |= PKT_RX_IP_CKSUM_BAD;
277 val |= PKT_RX_IP_CKSUM_GOOD;
280 val |= PKT_RX_IP_CKSUM_GOOD;
281 if (errcode == NIX_RX_PERRCODE_OL4_CHK) {
282 val |= PKT_RX_OUTER_L4_CKSUM_BAD;
283 val |= PKT_RX_L4_CKSUM_BAD;
284 } else if (errcode == NIX_RX_PERRCODE_IL4_CHK) {
285 val |= PKT_RX_L4_CKSUM_BAD;
287 val |= PKT_RX_L4_CKSUM_GOOD;
297 otx2_nix_fastpath_lookup_mem_get(void)
299 const char name[] = "otx2_nix_fastpath_lookup_mem";
300 const struct rte_memzone *mz;
303 mz = rte_memzone_lookup(name);
307 /* Request for the first time */
308 mz = rte_memzone_reserve_aligned(name, LOOKUP_ARRAY_SZ,
309 SOCKET_ID_ANY, 0, OTX2_ALIGN);
312 /* Form the ptype array lookup memory */
313 nix_create_non_tunnel_ptype_array(mem);
314 nix_create_tunnel_ptype_array(mem);
315 /* Form the rx ol_flags based on errcode */
316 nix_create_rx_ol_flags_array(mem);