1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(C) 2019 Marvell International Ltd.
5 #include <rte_common.h>
6 #include <rte_memzone.h>
8 #include "otx2_ethdev.h"
10 /* NIX_RX_PARSE_S's ERRCODE + ERRLEV (12 bits) */
11 #define ERRCODE_ERRLEN_WIDTH 12
12 #define ERR_ARRAY_SZ ((BIT(ERRCODE_ERRLEN_WIDTH)) *\
15 #define LOOKUP_ARRAY_SZ (PTYPE_ARRAY_SZ + ERR_ARRAY_SZ)
18 otx2_nix_supported_ptypes_get(struct rte_eth_dev *eth_dev)
20 struct otx2_eth_dev *dev = otx2_eth_pmd_priv(eth_dev);
22 static const uint32_t ptypes[] = {
23 RTE_PTYPE_L2_ETHER_QINQ, /* LB */
24 RTE_PTYPE_L2_ETHER_VLAN, /* LB */
25 RTE_PTYPE_L2_ETHER_TIMESYNC, /* LB */
26 RTE_PTYPE_L2_ETHER_ARP, /* LC */
27 RTE_PTYPE_L2_ETHER_NSH, /* LC */
28 RTE_PTYPE_L2_ETHER_FCOE, /* LC */
29 RTE_PTYPE_L2_ETHER_MPLS, /* LC */
30 RTE_PTYPE_L3_IPV4, /* LC */
31 RTE_PTYPE_L3_IPV4_EXT, /* LC */
32 RTE_PTYPE_L3_IPV6, /* LC */
33 RTE_PTYPE_L3_IPV6_EXT, /* LC */
34 RTE_PTYPE_L4_TCP, /* LD */
35 RTE_PTYPE_L4_UDP, /* LD */
36 RTE_PTYPE_L4_SCTP, /* LD */
37 RTE_PTYPE_L4_ICMP, /* LD */
38 RTE_PTYPE_L4_IGMP, /* LD */
39 RTE_PTYPE_TUNNEL_GRE, /* LD */
40 RTE_PTYPE_TUNNEL_ESP, /* LD */
41 RTE_PTYPE_TUNNEL_NVGRE, /* LD */
42 RTE_PTYPE_TUNNEL_VXLAN, /* LE */
43 RTE_PTYPE_TUNNEL_GENEVE, /* LE */
44 RTE_PTYPE_TUNNEL_GTPC, /* LE */
45 RTE_PTYPE_TUNNEL_GTPU, /* LE */
46 RTE_PTYPE_TUNNEL_VXLAN_GPE, /* LE */
47 RTE_PTYPE_TUNNEL_MPLS_IN_GRE, /* LE */
48 RTE_PTYPE_TUNNEL_MPLS_IN_UDP, /* LE */
49 RTE_PTYPE_INNER_L2_ETHER,/* LF */
50 RTE_PTYPE_INNER_L3_IPV4, /* LG */
51 RTE_PTYPE_INNER_L3_IPV6, /* LG */
52 RTE_PTYPE_INNER_L4_TCP, /* LH */
53 RTE_PTYPE_INNER_L4_UDP, /* LH */
54 RTE_PTYPE_INNER_L4_SCTP, /* LH */
55 RTE_PTYPE_INNER_L4_ICMP, /* LH */
58 if (dev->rx_offload_flags & NIX_RX_OFFLOAD_PTYPE_F)
65 * +------------------ +------------------ +
66 * | | IL4 | IL3| IL2 | TU | L4 | L3 | L2 |
67 * +-------------------+-------------------+
69 * +-------------------+------------------ +
70 * | | LH | LG | LF | LE | LD | LC | LB |
71 * +-------------------+-------------------+
73 * ptype [LE - LD - LC - LB] = TU - L4 - L3 - T2
74 * ptype_tunnel[LH - LG - LF] = IL4 - IL3 - IL2 - TU
78 nix_create_non_tunnel_ptype_array(uint16_t *ptype)
80 uint8_t lb, lc, ld, le;
83 for (idx = 0; idx < PTYPE_NON_TUNNEL_ARRAY_SZ; idx++) {
85 lc = (idx & 0xF0) >> 4;
86 ld = (idx & 0xF00) >> 8;
87 le = (idx & 0xF000) >> 12;
88 val = RTE_PTYPE_UNKNOWN;
92 val |= RTE_PTYPE_L2_ETHER_QINQ;
95 val |= RTE_PTYPE_L2_ETHER_VLAN;
101 val |= RTE_PTYPE_L2_ETHER_ARP;
104 val |= RTE_PTYPE_L2_ETHER_NSH;
107 val |= RTE_PTYPE_L2_ETHER_FCOE;
110 val |= RTE_PTYPE_L2_ETHER_MPLS;
113 val |= RTE_PTYPE_L3_IPV4;
115 case NPC_LT_LC_IP_OPT:
116 val |= RTE_PTYPE_L3_IPV4_EXT;
119 val |= RTE_PTYPE_L3_IPV6;
121 case NPC_LT_LC_IP6_EXT:
122 val |= RTE_PTYPE_L3_IPV6_EXT;
125 val |= RTE_PTYPE_L2_ETHER_TIMESYNC;
131 val |= RTE_PTYPE_L4_TCP;
134 val |= RTE_PTYPE_L4_UDP;
137 val |= RTE_PTYPE_L4_SCTP;
140 val |= RTE_PTYPE_L4_ICMP;
143 val |= RTE_PTYPE_L4_IGMP;
146 val |= RTE_PTYPE_TUNNEL_GRE;
148 case NPC_LT_LD_NVGRE:
149 val |= RTE_PTYPE_TUNNEL_NVGRE;
152 val |= RTE_PTYPE_TUNNEL_ESP;
157 case NPC_LT_LE_VXLAN:
158 val |= RTE_PTYPE_TUNNEL_VXLAN;
160 case NPC_LT_LE_VXLANGPE:
161 val |= RTE_PTYPE_TUNNEL_VXLAN_GPE;
163 case NPC_LT_LE_GENEVE:
164 val |= RTE_PTYPE_TUNNEL_GENEVE;
167 val |= RTE_PTYPE_TUNNEL_GTPC;
170 val |= RTE_PTYPE_TUNNEL_GTPU;
172 case NPC_LT_LE_TU_MPLS_IN_GRE:
173 val |= RTE_PTYPE_TUNNEL_MPLS_IN_GRE;
175 case NPC_LT_LE_TU_MPLS_IN_UDP:
176 val |= RTE_PTYPE_TUNNEL_MPLS_IN_UDP;
183 #define TU_SHIFT(x) ((x) >> PTYPE_WIDTH)
185 nix_create_tunnel_ptype_array(uint16_t *ptype)
190 /* Skip non tunnel ptype array memory */
191 ptype = ptype + PTYPE_NON_TUNNEL_ARRAY_SZ;
193 for (idx = 0; idx < PTYPE_TUNNEL_ARRAY_SZ; idx++) {
195 lf = (idx & 0xF0) >> 4;
196 lg = (idx & 0xF00) >> 8;
197 val = RTE_PTYPE_UNKNOWN;
200 case NPC_LT_LF_TU_ETHER:
201 val |= TU_SHIFT(RTE_PTYPE_INNER_L2_ETHER);
205 case NPC_LT_LG_TU_IP:
206 val |= TU_SHIFT(RTE_PTYPE_INNER_L3_IPV4);
208 case NPC_LT_LG_TU_IP6:
209 val |= TU_SHIFT(RTE_PTYPE_INNER_L3_IPV6);
213 case NPC_LT_LH_TU_TCP:
214 val |= TU_SHIFT(RTE_PTYPE_INNER_L4_TCP);
216 case NPC_LT_LH_TU_UDP:
217 val |= TU_SHIFT(RTE_PTYPE_INNER_L4_UDP);
219 case NPC_LT_LH_TU_SCTP:
220 val |= TU_SHIFT(RTE_PTYPE_INNER_L4_SCTP);
222 case NPC_LT_LH_TU_ICMP:
223 val |= TU_SHIFT(RTE_PTYPE_INNER_L4_ICMP);
232 nix_create_rx_ol_flags_array(void *mem)
234 uint16_t idx, errcode, errlev;
235 uint32_t val, *ol_flags;
237 /* Skip ptype array memory */
238 ol_flags = (uint32_t *)((uint8_t *)mem + PTYPE_ARRAY_SZ);
240 for (idx = 0; idx < BIT(ERRCODE_ERRLEN_WIDTH); idx++) {
242 errcode = (idx & 0xff0) >> 4;
244 val = PKT_RX_IP_CKSUM_UNKNOWN;
245 val |= PKT_RX_L4_CKSUM_UNKNOWN;
246 val |= PKT_RX_OUTER_L4_CKSUM_UNKNOWN;
250 /* Mark all errors as BAD checksum errors */
252 val |= PKT_RX_IP_CKSUM_BAD;
253 val |= PKT_RX_L4_CKSUM_BAD;
255 val |= PKT_RX_IP_CKSUM_GOOD;
256 val |= PKT_RX_L4_CKSUM_GOOD;
260 if (errcode == NPC_EC_OIP4_CSUM ||
261 errcode == NPC_EC_IP_FRAG_OFFSET_1) {
262 val |= PKT_RX_IP_CKSUM_BAD;
263 val |= PKT_RX_EIP_CKSUM_BAD;
265 val |= PKT_RX_IP_CKSUM_GOOD;
269 if (errcode == NPC_EC_IIP4_CSUM)
270 val |= PKT_RX_IP_CKSUM_BAD;
272 val |= PKT_RX_IP_CKSUM_GOOD;
275 if (errcode == NIX_RX_PERRCODE_OL4_CHK) {
276 val |= PKT_RX_OUTER_L4_CKSUM_BAD;
277 val |= PKT_RX_L4_CKSUM_BAD;
278 } else if (errcode == NIX_RX_PERRCODE_IL4_CHK) {
279 val |= PKT_RX_L4_CKSUM_BAD;
281 val |= PKT_RX_IP_CKSUM_GOOD;
282 val |= PKT_RX_L4_CKSUM_GOOD;
292 otx2_nix_fastpath_lookup_mem_get(void)
294 const char name[] = "otx2_nix_fastpath_lookup_mem";
295 const struct rte_memzone *mz;
298 mz = rte_memzone_lookup(name);
302 /* Request for the first time */
303 mz = rte_memzone_reserve_aligned(name, LOOKUP_ARRAY_SZ,
304 SOCKET_ID_ANY, 0, OTX2_ALIGN);
307 /* Form the ptype array lookup memory */
308 nix_create_non_tunnel_ptype_array(mem);
309 nix_create_tunnel_ptype_array(mem);
310 /* Form the rx ol_flags based on errcode */
311 nix_create_rx_ol_flags_array(mem);