2 * Copyright (c) 2016 QLogic Corporation.
6 * See LICENSE.qede_pmd for copyright and licensing details.
9 #ifndef __ECORE_HSI_COMMON__
10 #define __ECORE_HSI_COMMON__
11 /********************************/
12 /* Add include to common target */
13 /********************************/
14 #include "common_hsi.h"
17 * opcodes for the event ring
19 enum common_event_opcode {
20 COMMON_EVENT_PF_START,
22 COMMON_EVENT_VF_START,
24 COMMON_EVENT_VF_PF_CHANNEL,
26 COMMON_EVENT_PF_UPDATE,
27 COMMON_EVENT_MALICIOUS_VF,
29 MAX_COMMON_EVENT_OPCODE
33 * Common Ramrod Command IDs
35 enum common_ramrod_cmd_id {
37 COMMON_RAMROD_PF_START /* PF Function Start Ramrod */,
38 COMMON_RAMROD_PF_STOP /* PF Function Stop Ramrod */,
39 COMMON_RAMROD_VF_START /* VF Function Start */,
40 COMMON_RAMROD_VF_STOP /* VF Function Stop Ramrod */,
41 COMMON_RAMROD_PF_UPDATE /* PF update Ramrod */,
42 COMMON_RAMROD_EMPTY /* Empty Ramrod */,
43 MAX_COMMON_RAMROD_CMD_ID
47 * The core storm context for the Ystorm
49 struct ystorm_core_conn_st_ctx {
54 * The core storm context for the Pstorm
56 struct pstorm_core_conn_st_ctx {
61 * Core Slowpath Connection storm context of Xstorm
63 struct xstorm_core_conn_st_ctx {
64 __le32 spq_base_lo /* SPQ Ring Base Address low dword */;
65 __le32 spq_base_hi /* SPQ Ring Base Address high dword */;
66 struct regpair consolid_base_addr /* Consolidation Ring Base Address */
68 __le16 spq_cons /* SPQ Ring Consumer */;
69 __le16 consolid_cons /* Consolidation Ring Consumer */;
70 __le32 reserved0[55] /* Pad to 15 cycles */;
73 struct xstorm_core_conn_ag_ctx {
74 u8 reserved0 /* cdu_validation */;
75 u8 core_state /* state */;
77 #define XSTORM_CORE_CONN_AG_CTX_EXIST_IN_QM0_MASK 0x1
78 #define XSTORM_CORE_CONN_AG_CTX_EXIST_IN_QM0_SHIFT 0
79 #define XSTORM_CORE_CONN_AG_CTX_RESERVED1_MASK 0x1
80 #define XSTORM_CORE_CONN_AG_CTX_RESERVED1_SHIFT 1
81 #define XSTORM_CORE_CONN_AG_CTX_RESERVED2_MASK 0x1
82 #define XSTORM_CORE_CONN_AG_CTX_RESERVED2_SHIFT 2
83 #define XSTORM_CORE_CONN_AG_CTX_EXIST_IN_QM3_MASK 0x1
84 #define XSTORM_CORE_CONN_AG_CTX_EXIST_IN_QM3_SHIFT 3
85 #define XSTORM_CORE_CONN_AG_CTX_RESERVED3_MASK 0x1
86 #define XSTORM_CORE_CONN_AG_CTX_RESERVED3_SHIFT 4
87 #define XSTORM_CORE_CONN_AG_CTX_RESERVED4_MASK 0x1
88 #define XSTORM_CORE_CONN_AG_CTX_RESERVED4_SHIFT 5
89 #define XSTORM_CORE_CONN_AG_CTX_RESERVED5_MASK 0x1
90 #define XSTORM_CORE_CONN_AG_CTX_RESERVED5_SHIFT 6
91 #define XSTORM_CORE_CONN_AG_CTX_RESERVED6_MASK 0x1
92 #define XSTORM_CORE_CONN_AG_CTX_RESERVED6_SHIFT 7
94 #define XSTORM_CORE_CONN_AG_CTX_RESERVED7_MASK 0x1
95 #define XSTORM_CORE_CONN_AG_CTX_RESERVED7_SHIFT 0
96 #define XSTORM_CORE_CONN_AG_CTX_RESERVED8_MASK 0x1
97 #define XSTORM_CORE_CONN_AG_CTX_RESERVED8_SHIFT 1
98 #define XSTORM_CORE_CONN_AG_CTX_RESERVED9_MASK 0x1
99 #define XSTORM_CORE_CONN_AG_CTX_RESERVED9_SHIFT 2
100 #define XSTORM_CORE_CONN_AG_CTX_BIT11_MASK 0x1
101 #define XSTORM_CORE_CONN_AG_CTX_BIT11_SHIFT 3
102 #define XSTORM_CORE_CONN_AG_CTX_BIT12_MASK 0x1
103 #define XSTORM_CORE_CONN_AG_CTX_BIT12_SHIFT 4
104 #define XSTORM_CORE_CONN_AG_CTX_BIT13_MASK 0x1
105 #define XSTORM_CORE_CONN_AG_CTX_BIT13_SHIFT 5
106 #define XSTORM_CORE_CONN_AG_CTX_TX_RULE_ACTIVE_MASK 0x1
107 #define XSTORM_CORE_CONN_AG_CTX_TX_RULE_ACTIVE_SHIFT 6
108 #define XSTORM_CORE_CONN_AG_CTX_DQ_CF_ACTIVE_MASK 0x1
109 #define XSTORM_CORE_CONN_AG_CTX_DQ_CF_ACTIVE_SHIFT 7
111 #define XSTORM_CORE_CONN_AG_CTX_CF0_MASK 0x3
112 #define XSTORM_CORE_CONN_AG_CTX_CF0_SHIFT 0
113 #define XSTORM_CORE_CONN_AG_CTX_CF1_MASK 0x3
114 #define XSTORM_CORE_CONN_AG_CTX_CF1_SHIFT 2
115 #define XSTORM_CORE_CONN_AG_CTX_CF2_MASK 0x3
116 #define XSTORM_CORE_CONN_AG_CTX_CF2_SHIFT 4
117 #define XSTORM_CORE_CONN_AG_CTX_CF3_MASK 0x3
118 #define XSTORM_CORE_CONN_AG_CTX_CF3_SHIFT 6
120 #define XSTORM_CORE_CONN_AG_CTX_CF4_MASK 0x3
121 #define XSTORM_CORE_CONN_AG_CTX_CF4_SHIFT 0
122 #define XSTORM_CORE_CONN_AG_CTX_CF5_MASK 0x3
123 #define XSTORM_CORE_CONN_AG_CTX_CF5_SHIFT 2
124 #define XSTORM_CORE_CONN_AG_CTX_CF6_MASK 0x3
125 #define XSTORM_CORE_CONN_AG_CTX_CF6_SHIFT 4
126 #define XSTORM_CORE_CONN_AG_CTX_CF7_MASK 0x3
127 #define XSTORM_CORE_CONN_AG_CTX_CF7_SHIFT 6
129 #define XSTORM_CORE_CONN_AG_CTX_CF8_MASK 0x3
130 #define XSTORM_CORE_CONN_AG_CTX_CF8_SHIFT 0
131 #define XSTORM_CORE_CONN_AG_CTX_CF9_MASK 0x3
132 #define XSTORM_CORE_CONN_AG_CTX_CF9_SHIFT 2
133 #define XSTORM_CORE_CONN_AG_CTX_CF10_MASK 0x3
134 #define XSTORM_CORE_CONN_AG_CTX_CF10_SHIFT 4
135 #define XSTORM_CORE_CONN_AG_CTX_CF11_MASK 0x3
136 #define XSTORM_CORE_CONN_AG_CTX_CF11_SHIFT 6
138 #define XSTORM_CORE_CONN_AG_CTX_CF12_MASK 0x3
139 #define XSTORM_CORE_CONN_AG_CTX_CF12_SHIFT 0
140 #define XSTORM_CORE_CONN_AG_CTX_CF13_MASK 0x3
141 #define XSTORM_CORE_CONN_AG_CTX_CF13_SHIFT 2
142 #define XSTORM_CORE_CONN_AG_CTX_CF14_MASK 0x3
143 #define XSTORM_CORE_CONN_AG_CTX_CF14_SHIFT 4
144 #define XSTORM_CORE_CONN_AG_CTX_CF15_MASK 0x3
145 #define XSTORM_CORE_CONN_AG_CTX_CF15_SHIFT 6
147 #define XSTORM_CORE_CONN_AG_CTX_CONSOLID_PROD_CF_MASK 0x3
148 #define XSTORM_CORE_CONN_AG_CTX_CONSOLID_PROD_CF_SHIFT 0
149 #define XSTORM_CORE_CONN_AG_CTX_CF17_MASK 0x3
150 #define XSTORM_CORE_CONN_AG_CTX_CF17_SHIFT 2
151 #define XSTORM_CORE_CONN_AG_CTX_DQ_CF_MASK 0x3
152 #define XSTORM_CORE_CONN_AG_CTX_DQ_CF_SHIFT 4
153 #define XSTORM_CORE_CONN_AG_CTX_TERMINATE_CF_MASK 0x3
154 #define XSTORM_CORE_CONN_AG_CTX_TERMINATE_CF_SHIFT 6
156 #define XSTORM_CORE_CONN_AG_CTX_FLUSH_Q0_MASK 0x3
157 #define XSTORM_CORE_CONN_AG_CTX_FLUSH_Q0_SHIFT 0
158 #define XSTORM_CORE_CONN_AG_CTX_RESERVED10_MASK 0x3
159 #define XSTORM_CORE_CONN_AG_CTX_RESERVED10_SHIFT 2
160 #define XSTORM_CORE_CONN_AG_CTX_SLOW_PATH_MASK 0x3
161 #define XSTORM_CORE_CONN_AG_CTX_SLOW_PATH_SHIFT 4
162 #define XSTORM_CORE_CONN_AG_CTX_CF0EN_MASK 0x1
163 #define XSTORM_CORE_CONN_AG_CTX_CF0EN_SHIFT 6
164 #define XSTORM_CORE_CONN_AG_CTX_CF1EN_MASK 0x1
165 #define XSTORM_CORE_CONN_AG_CTX_CF1EN_SHIFT 7
167 #define XSTORM_CORE_CONN_AG_CTX_CF2EN_MASK 0x1
168 #define XSTORM_CORE_CONN_AG_CTX_CF2EN_SHIFT 0
169 #define XSTORM_CORE_CONN_AG_CTX_CF3EN_MASK 0x1
170 #define XSTORM_CORE_CONN_AG_CTX_CF3EN_SHIFT 1
171 #define XSTORM_CORE_CONN_AG_CTX_CF4EN_MASK 0x1
172 #define XSTORM_CORE_CONN_AG_CTX_CF4EN_SHIFT 2
173 #define XSTORM_CORE_CONN_AG_CTX_CF5EN_MASK 0x1
174 #define XSTORM_CORE_CONN_AG_CTX_CF5EN_SHIFT 3
175 #define XSTORM_CORE_CONN_AG_CTX_CF6EN_MASK 0x1
176 #define XSTORM_CORE_CONN_AG_CTX_CF6EN_SHIFT 4
177 #define XSTORM_CORE_CONN_AG_CTX_CF7EN_MASK 0x1
178 #define XSTORM_CORE_CONN_AG_CTX_CF7EN_SHIFT 5
179 #define XSTORM_CORE_CONN_AG_CTX_CF8EN_MASK 0x1
180 #define XSTORM_CORE_CONN_AG_CTX_CF8EN_SHIFT 6
181 #define XSTORM_CORE_CONN_AG_CTX_CF9EN_MASK 0x1
182 #define XSTORM_CORE_CONN_AG_CTX_CF9EN_SHIFT 7
184 #define XSTORM_CORE_CONN_AG_CTX_CF10EN_MASK 0x1
185 #define XSTORM_CORE_CONN_AG_CTX_CF10EN_SHIFT 0
186 #define XSTORM_CORE_CONN_AG_CTX_CF11EN_MASK 0x1
187 #define XSTORM_CORE_CONN_AG_CTX_CF11EN_SHIFT 1
188 #define XSTORM_CORE_CONN_AG_CTX_CF12EN_MASK 0x1
189 #define XSTORM_CORE_CONN_AG_CTX_CF12EN_SHIFT 2
190 #define XSTORM_CORE_CONN_AG_CTX_CF13EN_MASK 0x1
191 #define XSTORM_CORE_CONN_AG_CTX_CF13EN_SHIFT 3
192 #define XSTORM_CORE_CONN_AG_CTX_CF14EN_MASK 0x1
193 #define XSTORM_CORE_CONN_AG_CTX_CF14EN_SHIFT 4
194 #define XSTORM_CORE_CONN_AG_CTX_CF15EN_MASK 0x1
195 #define XSTORM_CORE_CONN_AG_CTX_CF15EN_SHIFT 5
196 #define XSTORM_CORE_CONN_AG_CTX_CONSOLID_PROD_CF_EN_MASK 0x1
197 #define XSTORM_CORE_CONN_AG_CTX_CONSOLID_PROD_CF_EN_SHIFT 6
198 #define XSTORM_CORE_CONN_AG_CTX_CF17EN_MASK 0x1
199 #define XSTORM_CORE_CONN_AG_CTX_CF17EN_SHIFT 7
201 #define XSTORM_CORE_CONN_AG_CTX_DQ_CF_EN_MASK 0x1
202 #define XSTORM_CORE_CONN_AG_CTX_DQ_CF_EN_SHIFT 0
203 #define XSTORM_CORE_CONN_AG_CTX_TERMINATE_CF_EN_MASK 0x1
204 #define XSTORM_CORE_CONN_AG_CTX_TERMINATE_CF_EN_SHIFT 1
205 #define XSTORM_CORE_CONN_AG_CTX_FLUSH_Q0_EN_MASK 0x1
206 #define XSTORM_CORE_CONN_AG_CTX_FLUSH_Q0_EN_SHIFT 2
207 #define XSTORM_CORE_CONN_AG_CTX_RESERVED11_MASK 0x1
208 #define XSTORM_CORE_CONN_AG_CTX_RESERVED11_SHIFT 3
209 #define XSTORM_CORE_CONN_AG_CTX_SLOW_PATH_EN_MASK 0x1
210 #define XSTORM_CORE_CONN_AG_CTX_SLOW_PATH_EN_SHIFT 4
211 #define XSTORM_CORE_CONN_AG_CTX_CF23EN_MASK 0x1
212 #define XSTORM_CORE_CONN_AG_CTX_CF23EN_SHIFT 5
213 #define XSTORM_CORE_CONN_AG_CTX_RESERVED12_MASK 0x1
214 #define XSTORM_CORE_CONN_AG_CTX_RESERVED12_SHIFT 6
215 #define XSTORM_CORE_CONN_AG_CTX_RESERVED13_MASK 0x1
216 #define XSTORM_CORE_CONN_AG_CTX_RESERVED13_SHIFT 7
218 #define XSTORM_CORE_CONN_AG_CTX_RESERVED14_MASK 0x1
219 #define XSTORM_CORE_CONN_AG_CTX_RESERVED14_SHIFT 0
220 #define XSTORM_CORE_CONN_AG_CTX_RESERVED15_MASK 0x1
221 #define XSTORM_CORE_CONN_AG_CTX_RESERVED15_SHIFT 1
222 #define XSTORM_CORE_CONN_AG_CTX_TX_DEC_RULE_EN_MASK 0x1
223 #define XSTORM_CORE_CONN_AG_CTX_TX_DEC_RULE_EN_SHIFT 2
224 #define XSTORM_CORE_CONN_AG_CTX_RULE5EN_MASK 0x1
225 #define XSTORM_CORE_CONN_AG_CTX_RULE5EN_SHIFT 3
226 #define XSTORM_CORE_CONN_AG_CTX_RULE6EN_MASK 0x1
227 #define XSTORM_CORE_CONN_AG_CTX_RULE6EN_SHIFT 4
228 #define XSTORM_CORE_CONN_AG_CTX_RULE7EN_MASK 0x1
229 #define XSTORM_CORE_CONN_AG_CTX_RULE7EN_SHIFT 5
230 #define XSTORM_CORE_CONN_AG_CTX_A0_RESERVED1_MASK 0x1
231 #define XSTORM_CORE_CONN_AG_CTX_A0_RESERVED1_SHIFT 6
232 #define XSTORM_CORE_CONN_AG_CTX_RULE9EN_MASK 0x1
233 #define XSTORM_CORE_CONN_AG_CTX_RULE9EN_SHIFT 7
235 #define XSTORM_CORE_CONN_AG_CTX_RULE10EN_MASK 0x1
236 #define XSTORM_CORE_CONN_AG_CTX_RULE10EN_SHIFT 0
237 #define XSTORM_CORE_CONN_AG_CTX_RULE11EN_MASK 0x1
238 #define XSTORM_CORE_CONN_AG_CTX_RULE11EN_SHIFT 1
239 #define XSTORM_CORE_CONN_AG_CTX_A0_RESERVED2_MASK 0x1
240 #define XSTORM_CORE_CONN_AG_CTX_A0_RESERVED2_SHIFT 2
241 #define XSTORM_CORE_CONN_AG_CTX_A0_RESERVED3_MASK 0x1
242 #define XSTORM_CORE_CONN_AG_CTX_A0_RESERVED3_SHIFT 3
243 #define XSTORM_CORE_CONN_AG_CTX_RULE14EN_MASK 0x1
244 #define XSTORM_CORE_CONN_AG_CTX_RULE14EN_SHIFT 4
245 #define XSTORM_CORE_CONN_AG_CTX_RULE15EN_MASK 0x1
246 #define XSTORM_CORE_CONN_AG_CTX_RULE15EN_SHIFT 5
247 #define XSTORM_CORE_CONN_AG_CTX_RULE16EN_MASK 0x1
248 #define XSTORM_CORE_CONN_AG_CTX_RULE16EN_SHIFT 6
249 #define XSTORM_CORE_CONN_AG_CTX_RULE17EN_MASK 0x1
250 #define XSTORM_CORE_CONN_AG_CTX_RULE17EN_SHIFT 7
252 #define XSTORM_CORE_CONN_AG_CTX_RULE18EN_MASK 0x1
253 #define XSTORM_CORE_CONN_AG_CTX_RULE18EN_SHIFT 0
254 #define XSTORM_CORE_CONN_AG_CTX_RULE19EN_MASK 0x1
255 #define XSTORM_CORE_CONN_AG_CTX_RULE19EN_SHIFT 1
256 #define XSTORM_CORE_CONN_AG_CTX_A0_RESERVED4_MASK 0x1
257 #define XSTORM_CORE_CONN_AG_CTX_A0_RESERVED4_SHIFT 2
258 #define XSTORM_CORE_CONN_AG_CTX_A0_RESERVED5_MASK 0x1
259 #define XSTORM_CORE_CONN_AG_CTX_A0_RESERVED5_SHIFT 3
260 #define XSTORM_CORE_CONN_AG_CTX_A0_RESERVED6_MASK 0x1
261 #define XSTORM_CORE_CONN_AG_CTX_A0_RESERVED6_SHIFT 4
262 #define XSTORM_CORE_CONN_AG_CTX_A0_RESERVED7_MASK 0x1
263 #define XSTORM_CORE_CONN_AG_CTX_A0_RESERVED7_SHIFT 5
264 #define XSTORM_CORE_CONN_AG_CTX_A0_RESERVED8_MASK 0x1
265 #define XSTORM_CORE_CONN_AG_CTX_A0_RESERVED8_SHIFT 6
266 #define XSTORM_CORE_CONN_AG_CTX_A0_RESERVED9_MASK 0x1
267 #define XSTORM_CORE_CONN_AG_CTX_A0_RESERVED9_SHIFT 7
269 #define XSTORM_CORE_CONN_AG_CTX_BIT16_MASK 0x1
270 #define XSTORM_CORE_CONN_AG_CTX_BIT16_SHIFT 0
271 #define XSTORM_CORE_CONN_AG_CTX_BIT17_MASK 0x1
272 #define XSTORM_CORE_CONN_AG_CTX_BIT17_SHIFT 1
273 #define XSTORM_CORE_CONN_AG_CTX_BIT18_MASK 0x1
274 #define XSTORM_CORE_CONN_AG_CTX_BIT18_SHIFT 2
275 #define XSTORM_CORE_CONN_AG_CTX_BIT19_MASK 0x1
276 #define XSTORM_CORE_CONN_AG_CTX_BIT19_SHIFT 3
277 #define XSTORM_CORE_CONN_AG_CTX_BIT20_MASK 0x1
278 #define XSTORM_CORE_CONN_AG_CTX_BIT20_SHIFT 4
279 #define XSTORM_CORE_CONN_AG_CTX_BIT21_MASK 0x1
280 #define XSTORM_CORE_CONN_AG_CTX_BIT21_SHIFT 5
281 #define XSTORM_CORE_CONN_AG_CTX_CF23_MASK 0x3
282 #define XSTORM_CORE_CONN_AG_CTX_CF23_SHIFT 6
283 u8 byte2 /* byte2 */;
284 __le16 physical_q0 /* physical_q0 */;
285 __le16 consolid_prod /* physical_q1 */;
286 __le16 reserved16 /* physical_q2 */;
287 __le16 tx_bd_cons /* word3 */;
288 __le16 tx_bd_or_spq_prod /* word4 */;
289 __le16 word5 /* word5 */;
290 __le16 conn_dpi /* conn_dpi */;
291 u8 byte3 /* byte3 */;
292 u8 byte4 /* byte4 */;
293 u8 byte5 /* byte5 */;
294 u8 byte6 /* byte6 */;
295 __le32 reg0 /* reg0 */;
296 __le32 reg1 /* reg1 */;
297 __le32 reg2 /* reg2 */;
298 __le32 reg3 /* reg3 */;
299 __le32 reg4 /* reg4 */;
300 __le32 reg5 /* cf_array0 */;
301 __le32 reg6 /* cf_array1 */;
302 __le16 word7 /* word7 */;
303 __le16 word8 /* word8 */;
304 __le16 word9 /* word9 */;
305 __le16 word10 /* word10 */;
306 __le32 reg7 /* reg7 */;
307 __le32 reg8 /* reg8 */;
308 __le32 reg9 /* reg9 */;
309 u8 byte7 /* byte7 */;
310 u8 byte8 /* byte8 */;
311 u8 byte9 /* byte9 */;
312 u8 byte10 /* byte10 */;
313 u8 byte11 /* byte11 */;
314 u8 byte12 /* byte12 */;
315 u8 byte13 /* byte13 */;
316 u8 byte14 /* byte14 */;
317 u8 byte15 /* byte15 */;
318 u8 byte16 /* byte16 */;
319 __le16 word11 /* word11 */;
320 __le32 reg10 /* reg10 */;
321 __le32 reg11 /* reg11 */;
322 __le32 reg12 /* reg12 */;
323 __le32 reg13 /* reg13 */;
324 __le32 reg14 /* reg14 */;
325 __le32 reg15 /* reg15 */;
326 __le32 reg16 /* reg16 */;
327 __le32 reg17 /* reg17 */;
328 __le32 reg18 /* reg18 */;
329 __le32 reg19 /* reg19 */;
330 __le16 word12 /* word12 */;
331 __le16 word13 /* word13 */;
332 __le16 word14 /* word14 */;
333 __le16 word15 /* word15 */;
336 struct tstorm_core_conn_ag_ctx {
337 u8 byte0 /* cdu_validation */;
338 u8 byte1 /* state */;
340 #define TSTORM_CORE_CONN_AG_CTX_BIT0_MASK 0x1
341 #define TSTORM_CORE_CONN_AG_CTX_BIT0_SHIFT 0
342 #define TSTORM_CORE_CONN_AG_CTX_BIT1_MASK 0x1
343 #define TSTORM_CORE_CONN_AG_CTX_BIT1_SHIFT 1
344 #define TSTORM_CORE_CONN_AG_CTX_BIT2_MASK 0x1
345 #define TSTORM_CORE_CONN_AG_CTX_BIT2_SHIFT 2
346 #define TSTORM_CORE_CONN_AG_CTX_BIT3_MASK 0x1
347 #define TSTORM_CORE_CONN_AG_CTX_BIT3_SHIFT 3
348 #define TSTORM_CORE_CONN_AG_CTX_BIT4_MASK 0x1
349 #define TSTORM_CORE_CONN_AG_CTX_BIT4_SHIFT 4
350 #define TSTORM_CORE_CONN_AG_CTX_BIT5_MASK 0x1
351 #define TSTORM_CORE_CONN_AG_CTX_BIT5_SHIFT 5
352 #define TSTORM_CORE_CONN_AG_CTX_CF0_MASK 0x3
353 #define TSTORM_CORE_CONN_AG_CTX_CF0_SHIFT 6
355 #define TSTORM_CORE_CONN_AG_CTX_CF1_MASK 0x3
356 #define TSTORM_CORE_CONN_AG_CTX_CF1_SHIFT 0
357 #define TSTORM_CORE_CONN_AG_CTX_CF2_MASK 0x3
358 #define TSTORM_CORE_CONN_AG_CTX_CF2_SHIFT 2
359 #define TSTORM_CORE_CONN_AG_CTX_CF3_MASK 0x3
360 #define TSTORM_CORE_CONN_AG_CTX_CF3_SHIFT 4
361 #define TSTORM_CORE_CONN_AG_CTX_CF4_MASK 0x3
362 #define TSTORM_CORE_CONN_AG_CTX_CF4_SHIFT 6
364 #define TSTORM_CORE_CONN_AG_CTX_CF5_MASK 0x3
365 #define TSTORM_CORE_CONN_AG_CTX_CF5_SHIFT 0
366 #define TSTORM_CORE_CONN_AG_CTX_CF6_MASK 0x3
367 #define TSTORM_CORE_CONN_AG_CTX_CF6_SHIFT 2
368 #define TSTORM_CORE_CONN_AG_CTX_CF7_MASK 0x3
369 #define TSTORM_CORE_CONN_AG_CTX_CF7_SHIFT 4
370 #define TSTORM_CORE_CONN_AG_CTX_CF8_MASK 0x3
371 #define TSTORM_CORE_CONN_AG_CTX_CF8_SHIFT 6
373 #define TSTORM_CORE_CONN_AG_CTX_CF9_MASK 0x3
374 #define TSTORM_CORE_CONN_AG_CTX_CF9_SHIFT 0
375 #define TSTORM_CORE_CONN_AG_CTX_CF10_MASK 0x3
376 #define TSTORM_CORE_CONN_AG_CTX_CF10_SHIFT 2
377 #define TSTORM_CORE_CONN_AG_CTX_CF0EN_MASK 0x1
378 #define TSTORM_CORE_CONN_AG_CTX_CF0EN_SHIFT 4
379 #define TSTORM_CORE_CONN_AG_CTX_CF1EN_MASK 0x1
380 #define TSTORM_CORE_CONN_AG_CTX_CF1EN_SHIFT 5
381 #define TSTORM_CORE_CONN_AG_CTX_CF2EN_MASK 0x1
382 #define TSTORM_CORE_CONN_AG_CTX_CF2EN_SHIFT 6
383 #define TSTORM_CORE_CONN_AG_CTX_CF3EN_MASK 0x1
384 #define TSTORM_CORE_CONN_AG_CTX_CF3EN_SHIFT 7
386 #define TSTORM_CORE_CONN_AG_CTX_CF4EN_MASK 0x1
387 #define TSTORM_CORE_CONN_AG_CTX_CF4EN_SHIFT 0
388 #define TSTORM_CORE_CONN_AG_CTX_CF5EN_MASK 0x1
389 #define TSTORM_CORE_CONN_AG_CTX_CF5EN_SHIFT 1
390 #define TSTORM_CORE_CONN_AG_CTX_CF6EN_MASK 0x1
391 #define TSTORM_CORE_CONN_AG_CTX_CF6EN_SHIFT 2
392 #define TSTORM_CORE_CONN_AG_CTX_CF7EN_MASK 0x1
393 #define TSTORM_CORE_CONN_AG_CTX_CF7EN_SHIFT 3
394 #define TSTORM_CORE_CONN_AG_CTX_CF8EN_MASK 0x1
395 #define TSTORM_CORE_CONN_AG_CTX_CF8EN_SHIFT 4
396 #define TSTORM_CORE_CONN_AG_CTX_CF9EN_MASK 0x1
397 #define TSTORM_CORE_CONN_AG_CTX_CF9EN_SHIFT 5
398 #define TSTORM_CORE_CONN_AG_CTX_CF10EN_MASK 0x1
399 #define TSTORM_CORE_CONN_AG_CTX_CF10EN_SHIFT 6
400 #define TSTORM_CORE_CONN_AG_CTX_RULE0EN_MASK 0x1
401 #define TSTORM_CORE_CONN_AG_CTX_RULE0EN_SHIFT 7
403 #define TSTORM_CORE_CONN_AG_CTX_RULE1EN_MASK 0x1
404 #define TSTORM_CORE_CONN_AG_CTX_RULE1EN_SHIFT 0
405 #define TSTORM_CORE_CONN_AG_CTX_RULE2EN_MASK 0x1
406 #define TSTORM_CORE_CONN_AG_CTX_RULE2EN_SHIFT 1
407 #define TSTORM_CORE_CONN_AG_CTX_RULE3EN_MASK 0x1
408 #define TSTORM_CORE_CONN_AG_CTX_RULE3EN_SHIFT 2
409 #define TSTORM_CORE_CONN_AG_CTX_RULE4EN_MASK 0x1
410 #define TSTORM_CORE_CONN_AG_CTX_RULE4EN_SHIFT 3
411 #define TSTORM_CORE_CONN_AG_CTX_RULE5EN_MASK 0x1
412 #define TSTORM_CORE_CONN_AG_CTX_RULE5EN_SHIFT 4
413 #define TSTORM_CORE_CONN_AG_CTX_RULE6EN_MASK 0x1
414 #define TSTORM_CORE_CONN_AG_CTX_RULE6EN_SHIFT 5
415 #define TSTORM_CORE_CONN_AG_CTX_RULE7EN_MASK 0x1
416 #define TSTORM_CORE_CONN_AG_CTX_RULE7EN_SHIFT 6
417 #define TSTORM_CORE_CONN_AG_CTX_RULE8EN_MASK 0x1
418 #define TSTORM_CORE_CONN_AG_CTX_RULE8EN_SHIFT 7
419 __le32 reg0 /* reg0 */;
420 __le32 reg1 /* reg1 */;
421 __le32 reg2 /* reg2 */;
422 __le32 reg3 /* reg3 */;
423 __le32 reg4 /* reg4 */;
424 __le32 reg5 /* reg5 */;
425 __le32 reg6 /* reg6 */;
426 __le32 reg7 /* reg7 */;
427 __le32 reg8 /* reg8 */;
428 u8 byte2 /* byte2 */;
429 u8 byte3 /* byte3 */;
430 __le16 word0 /* word0 */;
431 u8 byte4 /* byte4 */;
432 u8 byte5 /* byte5 */;
433 __le16 word1 /* word1 */;
434 __le16 word2 /* conn_dpi */;
435 __le16 word3 /* word3 */;
436 __le32 reg9 /* reg9 */;
437 __le32 reg10 /* reg10 */;
440 struct ustorm_core_conn_ag_ctx {
441 u8 reserved /* cdu_validation */;
442 u8 byte1 /* state */;
444 #define USTORM_CORE_CONN_AG_CTX_BIT0_MASK 0x1
445 #define USTORM_CORE_CONN_AG_CTX_BIT0_SHIFT 0
446 #define USTORM_CORE_CONN_AG_CTX_BIT1_MASK 0x1
447 #define USTORM_CORE_CONN_AG_CTX_BIT1_SHIFT 1
448 #define USTORM_CORE_CONN_AG_CTX_CF0_MASK 0x3
449 #define USTORM_CORE_CONN_AG_CTX_CF0_SHIFT 2
450 #define USTORM_CORE_CONN_AG_CTX_CF1_MASK 0x3
451 #define USTORM_CORE_CONN_AG_CTX_CF1_SHIFT 4
452 #define USTORM_CORE_CONN_AG_CTX_CF2_MASK 0x3
453 #define USTORM_CORE_CONN_AG_CTX_CF2_SHIFT 6
455 #define USTORM_CORE_CONN_AG_CTX_CF3_MASK 0x3
456 #define USTORM_CORE_CONN_AG_CTX_CF3_SHIFT 0
457 #define USTORM_CORE_CONN_AG_CTX_CF4_MASK 0x3
458 #define USTORM_CORE_CONN_AG_CTX_CF4_SHIFT 2
459 #define USTORM_CORE_CONN_AG_CTX_CF5_MASK 0x3
460 #define USTORM_CORE_CONN_AG_CTX_CF5_SHIFT 4
461 #define USTORM_CORE_CONN_AG_CTX_CF6_MASK 0x3
462 #define USTORM_CORE_CONN_AG_CTX_CF6_SHIFT 6
464 #define USTORM_CORE_CONN_AG_CTX_CF0EN_MASK 0x1
465 #define USTORM_CORE_CONN_AG_CTX_CF0EN_SHIFT 0
466 #define USTORM_CORE_CONN_AG_CTX_CF1EN_MASK 0x1
467 #define USTORM_CORE_CONN_AG_CTX_CF1EN_SHIFT 1
468 #define USTORM_CORE_CONN_AG_CTX_CF2EN_MASK 0x1
469 #define USTORM_CORE_CONN_AG_CTX_CF2EN_SHIFT 2
470 #define USTORM_CORE_CONN_AG_CTX_CF3EN_MASK 0x1
471 #define USTORM_CORE_CONN_AG_CTX_CF3EN_SHIFT 3
472 #define USTORM_CORE_CONN_AG_CTX_CF4EN_MASK 0x1
473 #define USTORM_CORE_CONN_AG_CTX_CF4EN_SHIFT 4
474 #define USTORM_CORE_CONN_AG_CTX_CF5EN_MASK 0x1
475 #define USTORM_CORE_CONN_AG_CTX_CF5EN_SHIFT 5
476 #define USTORM_CORE_CONN_AG_CTX_CF6EN_MASK 0x1
477 #define USTORM_CORE_CONN_AG_CTX_CF6EN_SHIFT 6
478 #define USTORM_CORE_CONN_AG_CTX_RULE0EN_MASK 0x1
479 #define USTORM_CORE_CONN_AG_CTX_RULE0EN_SHIFT 7
481 #define USTORM_CORE_CONN_AG_CTX_RULE1EN_MASK 0x1
482 #define USTORM_CORE_CONN_AG_CTX_RULE1EN_SHIFT 0
483 #define USTORM_CORE_CONN_AG_CTX_RULE2EN_MASK 0x1
484 #define USTORM_CORE_CONN_AG_CTX_RULE2EN_SHIFT 1
485 #define USTORM_CORE_CONN_AG_CTX_RULE3EN_MASK 0x1
486 #define USTORM_CORE_CONN_AG_CTX_RULE3EN_SHIFT 2
487 #define USTORM_CORE_CONN_AG_CTX_RULE4EN_MASK 0x1
488 #define USTORM_CORE_CONN_AG_CTX_RULE4EN_SHIFT 3
489 #define USTORM_CORE_CONN_AG_CTX_RULE5EN_MASK 0x1
490 #define USTORM_CORE_CONN_AG_CTX_RULE5EN_SHIFT 4
491 #define USTORM_CORE_CONN_AG_CTX_RULE6EN_MASK 0x1
492 #define USTORM_CORE_CONN_AG_CTX_RULE6EN_SHIFT 5
493 #define USTORM_CORE_CONN_AG_CTX_RULE7EN_MASK 0x1
494 #define USTORM_CORE_CONN_AG_CTX_RULE7EN_SHIFT 6
495 #define USTORM_CORE_CONN_AG_CTX_RULE8EN_MASK 0x1
496 #define USTORM_CORE_CONN_AG_CTX_RULE8EN_SHIFT 7
497 u8 byte2 /* byte2 */;
498 u8 byte3 /* byte3 */;
499 __le16 word0 /* conn_dpi */;
500 __le16 word1 /* word1 */;
501 __le32 rx_producers /* reg0 */;
502 __le32 reg1 /* reg1 */;
503 __le32 reg2 /* reg2 */;
504 __le32 reg3 /* reg3 */;
505 __le16 word2 /* word2 */;
506 __le16 word3 /* word3 */;
510 * The core storm context for the Mstorm
512 struct mstorm_core_conn_st_ctx {
517 * The core storm context for the Ustorm
519 struct ustorm_core_conn_st_ctx {
524 * core connection context
526 struct core_conn_context {
527 struct ystorm_core_conn_st_ctx ystorm_st_context
528 /* ystorm storm context */;
529 struct regpair ystorm_st_padding[2] /* padding */;
530 struct pstorm_core_conn_st_ctx pstorm_st_context
531 /* pstorm storm context */;
532 struct regpair pstorm_st_padding[2] /* padding */;
533 struct xstorm_core_conn_st_ctx xstorm_st_context
534 /* xstorm storm context */;
535 struct xstorm_core_conn_ag_ctx xstorm_ag_context
536 /* xstorm aggregative context */;
537 struct tstorm_core_conn_ag_ctx tstorm_ag_context
538 /* tstorm aggregative context */;
539 struct ustorm_core_conn_ag_ctx ustorm_ag_context
540 /* ustorm aggregative context */;
541 struct mstorm_core_conn_st_ctx mstorm_st_context
542 /* mstorm storm context */;
543 struct ustorm_core_conn_st_ctx ustorm_st_context
544 /* ustorm storm context */;
545 struct regpair ustorm_st_padding[2] /* padding */;
549 * How ll2 should deal with packet upon errors
551 enum core_error_handle {
552 LL2_DROP_PACKET /* If error occurs drop packet */,
553 LL2_DO_NOTHING /* If error occurs do nothing */,
554 LL2_ASSERT /* If error occurs assert */,
555 MAX_CORE_ERROR_HANDLE
559 * opcodes for the event ring
561 enum core_event_opcode {
562 CORE_EVENT_TX_QUEUE_START,
563 CORE_EVENT_TX_QUEUE_STOP,
564 CORE_EVENT_RX_QUEUE_START,
565 CORE_EVENT_RX_QUEUE_STOP,
566 MAX_CORE_EVENT_OPCODE
570 * The L4 pseudo checksum mode for Core
572 enum core_l4_pseudo_checksum_mode {
573 CORE_L4_PSEUDO_CSUM_CORRECT_LENGTH
575 CORE_L4_PSEUDO_CSUM_ZERO_LENGTH
576 /* Pseudo Checksum on packet is calculated with zero length. */,
577 MAX_CORE_L4_PSEUDO_CHECKSUM_MODE
581 * Light-L2 RX Producers in Tstorm RAM
583 struct core_ll2_port_stats {
584 struct regpair gsi_invalid_hdr;
585 struct regpair gsi_invalid_pkt_length;
586 struct regpair gsi_unsupported_pkt_typ;
587 struct regpair gsi_crcchksm_error;
591 * Ethernet TX Per Queue Stats
593 struct core_ll2_pstorm_per_queue_stat {
594 struct regpair sent_ucast_bytes
595 /* number of total bytes sent without errors */;
596 struct regpair sent_mcast_bytes
597 /* number of total bytes sent without errors */;
598 struct regpair sent_bcast_bytes
599 /* number of total bytes sent without errors */;
600 struct regpair sent_ucast_pkts
601 /* number of total packets sent without errors */;
602 struct regpair sent_mcast_pkts
603 /* number of total packets sent without errors */;
604 struct regpair sent_bcast_pkts
605 /* number of total packets sent without errors */;
609 * Light-L2 RX Producers in Tstorm RAM
611 struct core_ll2_rx_prod {
612 __le16 bd_prod /* BD Producer */;
613 __le16 cqe_prod /* CQE Producer */;
617 struct core_ll2_tstorm_per_queue_stat {
618 struct regpair packet_too_big_discard
619 /* Number of packets discarded because they are bigger than MTU */;
620 struct regpair no_buff_discard
621 /* Number of packets discarded due to lack of host buffers */;
624 struct core_ll2_ustorm_per_queue_stat {
625 struct regpair rcv_ucast_bytes;
626 struct regpair rcv_mcast_bytes;
627 struct regpair rcv_bcast_bytes;
628 struct regpair rcv_ucast_pkts;
629 struct regpair rcv_mcast_pkts;
630 struct regpair rcv_bcast_pkts;
634 * Core Ramrod Command IDs (light L2)
636 enum core_ramrod_cmd_id {
638 CORE_RAMROD_RX_QUEUE_START /* RX Queue Start Ramrod */,
639 CORE_RAMROD_TX_QUEUE_START /* TX Queue Start Ramrod */,
640 CORE_RAMROD_RX_QUEUE_STOP /* RX Queue Stop Ramrod */,
641 CORE_RAMROD_TX_QUEUE_STOP /* TX Queue Stop Ramrod */,
642 MAX_CORE_RAMROD_CMD_ID
646 * Specifies how ll2 should deal with packets errors: packet_too_big and no_buff
648 struct core_rx_action_on_error {
650 #define CORE_RX_ACTION_ON_ERROR_PACKET_TOO_BIG_MASK 0x3
651 #define CORE_RX_ACTION_ON_ERROR_PACKET_TOO_BIG_SHIFT 0
652 #define CORE_RX_ACTION_ON_ERROR_NO_BUFF_MASK 0x3
653 #define CORE_RX_ACTION_ON_ERROR_NO_BUFF_SHIFT 2
654 #define CORE_RX_ACTION_ON_ERROR_RESERVED_MASK 0xF
655 #define CORE_RX_ACTION_ON_ERROR_RESERVED_SHIFT 4
659 * Core RX BD for Light L2
667 * Core RX CM offload BD for Light L2
669 struct core_rx_bd_with_buff_len {
676 * Core RX CM offload BD for Light L2
678 union core_rx_bd_union {
679 struct core_rx_bd rx_bd /* Core Rx Bd static buffer size */;
680 struct core_rx_bd_with_buff_len rx_bd_with_len
681 /* Core Rx Bd with dynamic buffer length */;
685 * Opaque Data for Light L2 RX CQE .
687 struct core_rx_cqe_opaque_data {
688 __le32 data[2] /* Opaque CQE Data */;
692 * Core RX CQE Type for Light L2
694 enum core_rx_cqe_type {
695 CORE_RX_CQE_ILLIGAL_TYPE /* Bad RX Cqe type */,
696 CORE_RX_CQE_TYPE_REGULAR /* Regular Core RX CQE */,
697 CORE_RX_CQE_TYPE_GSI_OFFLOAD /* Fp Gsi offload RX CQE */,
698 CORE_RX_CQE_TYPE_SLOW_PATH /* Slow path Core RX CQE */,
703 * Core RX CQE for Light L2 .
705 struct core_rx_fast_path_cqe {
706 u8 type /* CQE type */;
708 /* Offset (in bytes) of the packet from start of the buffer */;
709 struct parsing_and_err_flags parse_flags
710 /* Parsing and error flags from the parser */;
711 __le16 packet_length /* Total packet length (from the parser) */;
712 __le16 vlan /* 802.1q VLAN tag */;
713 struct core_rx_cqe_opaque_data opaque_data /* Opaque Data */;
718 * Core Rx CM offload CQE .
720 struct core_rx_gsi_offload_cqe {
721 u8 type /* CQE type */;
722 u8 data_length_error /* set if gsi data is bigger than buff */;
723 struct parsing_and_err_flags parse_flags
724 /* Parsing and error flags from the parser */;
725 __le16 data_length /* Total packet length (from the parser) */;
726 __le16 vlan /* 802.1q VLAN tag */;
727 __le32 src_mac_addrhi /* hi 4 bytes source mac address */;
728 __le16 src_mac_addrlo /* lo 2 bytes of source mac address */;
730 __le32 gid_dst[4] /* Gid destination address */;
734 * Core RX CQE for Light L2 .
736 struct core_rx_slow_path_cqe {
737 u8 type /* CQE type */;
744 * Core RX CM offload BD for Light L2
746 union core_rx_cqe_union {
747 struct core_rx_fast_path_cqe rx_cqe_fp /* Fast path CQE */;
748 struct core_rx_gsi_offload_cqe rx_cqe_gsi /* GSI offload CQE */;
749 struct core_rx_slow_path_cqe rx_cqe_sp /* Slow path CQE */;
753 * Ramrod data for rx queue start ramrod
755 struct core_rx_start_ramrod_data {
756 struct regpair bd_base /* bd address of the first bd page */;
757 struct regpair cqe_pbl_addr /* Base address on host of CQE PBL */;
758 __le16 mtu /* Maximum transmission unit */;
759 __le16 sb_id /* Status block ID */;
760 u8 sb_index /* index of the protocol index */;
761 u8 complete_cqe_flg /* post completion to the CQE ring if set */;
762 u8 complete_event_flg /* post completion to the event ring if set */;
763 u8 drop_ttl0_flg /* drop packet with ttl0 if set */;
764 __le16 num_of_pbl_pages /* Num of pages in CQE PBL */;
765 u8 inner_vlan_removal_en
766 /* if set, 802.1q tags will be removed and copied to CQE */;
767 u8 queue_id /* Light L2 RX Queue ID */;
768 u8 main_func_queue /* Is this the main queue for the PF */;
769 u8 mf_si_bcast_accept_all;
770 u8 mf_si_mcast_accept_all;
771 struct core_rx_action_on_error action_on_error;
773 /* set when in GSI offload mode on ROCE connection */;
778 * Ramrod data for rx queue stop ramrod
780 struct core_rx_stop_ramrod_data {
781 u8 complete_cqe_flg /* post completion to the CQE ring if set */;
782 u8 complete_event_flg /* post completion to the event ring if set */;
783 u8 queue_id /* Light L2 RX Queue ID */;
789 * Flags for Core TX BD
791 struct core_tx_bd_flags {
793 #define CORE_TX_BD_FLAGS_FORCE_VLAN_MODE_MASK 0x1
794 #define CORE_TX_BD_FLAGS_FORCE_VLAN_MODE_SHIFT 0
795 #define CORE_TX_BD_FLAGS_VLAN_INSERTION_MASK 0x1
796 #define CORE_TX_BD_FLAGS_VLAN_INSERTION_SHIFT 1
797 #define CORE_TX_BD_FLAGS_START_BD_MASK 0x1
798 #define CORE_TX_BD_FLAGS_START_BD_SHIFT 2
799 #define CORE_TX_BD_FLAGS_IP_CSUM_MASK 0x1
800 #define CORE_TX_BD_FLAGS_IP_CSUM_SHIFT 3
801 #define CORE_TX_BD_FLAGS_L4_CSUM_MASK 0x1
802 #define CORE_TX_BD_FLAGS_L4_CSUM_SHIFT 4
803 #define CORE_TX_BD_FLAGS_IPV6_EXT_MASK 0x1
804 #define CORE_TX_BD_FLAGS_IPV6_EXT_SHIFT 5
805 #define CORE_TX_BD_FLAGS_L4_PROTOCOL_MASK 0x1
806 #define CORE_TX_BD_FLAGS_L4_PROTOCOL_SHIFT 6
807 #define CORE_TX_BD_FLAGS_L4_PSEUDO_CSUM_MODE_MASK 0x1
808 #define CORE_TX_BD_FLAGS_L4_PSEUDO_CSUM_MODE_SHIFT 7
812 * Core TX BD for Light L2
815 struct regpair addr /* Buffer Address */;
816 __le16 nbytes /* Number of Bytes in Buffer */;
817 __le16 vlan /* VLAN to insert to packet (if insertion flag set) */;
818 u8 nbds /* Number of BDs that make up one packet */;
819 struct core_tx_bd_flags bd_flags /* BD Flags */;
820 __le16 l4_hdr_offset_w;
824 * Light L2 TX Destination
827 CORE_TX_DEST_NW /* Light L2 TX Destination to the Network */,
828 CORE_TX_DEST_LB /* Light L2 TX Destination to the Loopback */,
833 * Ramrod data for rx queue start ramrod
835 struct core_tx_start_ramrod_data {
836 struct regpair pbl_base_addr /* Address of the pbl page */;
837 __le16 mtu /* Maximum transmission unit */;
838 __le16 sb_id /* Status block ID */;
839 u8 sb_index /* Status block protocol index */;
840 u8 tx_dest /* TX Destination (either Network or LB) */;
841 u8 stats_en /* Statistics Enable */;
842 u8 stats_id /* Statistics Counter ID */;
843 __le16 pbl_size /* Number of BD pages pointed by PBL */;
844 __le16 qm_pq_id /* QM PQ ID */;
845 u8 conn_type /* connection type that loaded ll2 */;
847 /* set when in GSI offload mode on ROCE connection */;
852 * Ramrod data for tx queue stop ramrod
854 struct core_tx_stop_ramrod_data {
858 struct eth_mstorm_per_queue_stat {
859 struct regpair ttl0_discard;
860 struct regpair packet_too_big_discard;
861 struct regpair no_buff_discard;
862 struct regpair not_active_discard;
863 struct regpair tpa_coalesced_pkts;
864 struct regpair tpa_coalesced_events;
865 struct regpair tpa_aborts_num;
866 struct regpair tpa_coalesced_bytes;
870 * Ethernet TX Per Queue Stats
872 struct eth_pstorm_per_queue_stat {
873 struct regpair sent_ucast_bytes
874 /* number of total bytes sent without errors */;
875 struct regpair sent_mcast_bytes
876 /* number of total bytes sent without errors */;
877 struct regpair sent_bcast_bytes
878 /* number of total bytes sent without errors */;
879 struct regpair sent_ucast_pkts
880 /* number of total packets sent without errors */;
881 struct regpair sent_mcast_pkts
882 /* number of total packets sent without errors */;
883 struct regpair sent_bcast_pkts
884 /* number of total packets sent without errors */;
885 struct regpair error_drop_pkts
886 /* number of total packets dropped due to errors */;
890 * ETH Rx producers data
892 struct eth_rx_rate_limit {
895 /* Constant term to add (or subtract from number of cycles) */;
896 u8 add_sub_cnst /* Add (1) or subtract (0) constant term */;
901 struct eth_ustorm_per_queue_stat {
902 struct regpair rcv_ucast_bytes;
903 struct regpair rcv_mcast_bytes;
904 struct regpair rcv_bcast_bytes;
905 struct regpair rcv_ucast_pkts;
906 struct regpair rcv_mcast_pkts;
907 struct regpair rcv_bcast_pkts;
911 * Event Ring Next Page Address
913 struct event_ring_next_addr {
914 struct regpair addr /* Next Page Address */;
915 __le32 reserved[2] /* Reserved */;
921 union event_ring_element {
922 struct event_ring_entry entry /* Event Ring Entry */;
923 struct event_ring_next_addr next_addr /* Event Ring Next Page Address */
930 enum fw_flow_ctrl_mode {
933 MAX_FW_FLOW_CTRL_MODE
940 INTEG_PHASE_BB_A0_LATEST = 3 /* BB A0 latest integration phase */,
941 INTEG_PHASE_BB_B0_NO_MCP = 10 /* BB B0 without MCP */,
942 INTEG_PHASE_BB_B0_WITH_MCP = 11 /* BB B0 with MCP */,
947 * Malicious VF error ID
949 enum malicious_vf_error_id {
950 MALICIOUS_VF_NO_ERROR /* Zero placeholder value */,
951 VF_PF_CHANNEL_NOT_READY
952 /* Writing to VF/PF channel when it is not ready */,
953 VF_ZONE_MSG_NOT_VALID /* VF channel message is not valid */,
954 VF_ZONE_FUNC_NOT_ENABLED /* Parent PF of VF channel is not active */,
956 /* TX packet is shorter then reported on BDs or from minimal size */
958 ETH_ILLEGAL_VLAN_MODE
959 /* Tx packet with marked as insert VLAN when its illegal */,
960 ETH_MTU_VIOLATION /* TX packet is greater then MTU */,
961 ETH_ILLEGAL_INBAND_TAGS /* TX packet has illegal inband tags marked */,
962 ETH_VLAN_INSERT_AND_INBAND_VLAN /* Vlan cant be added to inband tag */,
963 ETH_ILLEGAL_NBDS /* indicated number of BDs for the packet is illegal */
965 ETH_FIRST_BD_WO_SOP /* 1st BD must have start_bd flag set */,
967 /* There are not enough BDs for transmission of even one packet */,
968 ETH_ILLEGAL_LSO_HDR_NBDS /* Header NBDs value is illegal */,
969 ETH_ILLEGAL_LSO_MSS /* LSO MSS value is more than allowed */,
971 /* empty BD (which not contains control flags) is illegal */,
972 ETH_ILLEGAL_LSO_HDR_LEN /* LSO header size is above the limit */,
973 ETH_INSUFFICIENT_PAYLOAD
975 ETH_EDPM_OUT_OF_SYNC /* Valid BDs on local ring after EDPM L2 sync */,
976 ETH_TUNN_IPV6_EXT_NBD_ERR
977 /* Tunneled packet with IPv6+Ext without a proper number of BDs */,
978 MAX_MALICIOUS_VF_ERROR_ID
982 * Mstorm non-triggering VF zone
984 struct mstorm_non_trigger_vf_zone {
985 struct eth_mstorm_per_queue_stat eth_queue_stat
986 /* VF statistic bucket */;
992 struct mstorm_vf_zone {
993 struct mstorm_non_trigger_vf_zone non_trigger
994 /* non-interrupt-triggering zone */;
1000 enum personality_type {
1001 BAD_PERSONALITY_TYP,
1002 PERSONALITY_ISCSI /* iSCSI and LL2 */,
1003 PERSONALITY_FCOE /* Fcoe and LL2 */,
1004 PERSONALITY_RDMA_AND_ETH /* Roce or Iwarp, Eth and LL2 */,
1005 PERSONALITY_RDMA /* Roce and LL2 */,
1006 PERSONALITY_CORE /* CORE(LL2) */,
1007 PERSONALITY_ETH /* Ethernet */,
1008 PERSONALITY_TOE /* Toe and LL2 */,
1009 MAX_PERSONALITY_TYPE
1013 * tunnel configuration
1015 struct pf_start_tunnel_config {
1016 u8 set_vxlan_udp_port_flg /* Set VXLAN tunnel UDP destination port. */;
1017 u8 set_geneve_udp_port_flg /* Set GENEVE tunnel UDP destination port. */
1019 u8 tx_enable_vxlan /* If set, enable VXLAN tunnel in TX path. */;
1020 u8 tx_enable_l2geneve /* If set, enable l2 GENEVE tunnel in TX path. */
1022 u8 tx_enable_ipgeneve /* If set, enable IP GENEVE tunnel in TX path. */
1024 u8 tx_enable_l2gre /* If set, enable l2 GRE tunnel in TX path. */;
1025 u8 tx_enable_ipgre /* If set, enable IP GRE tunnel in TX path. */;
1026 u8 tunnel_clss_vxlan /* Classification scheme for VXLAN tunnel. */;
1027 u8 tunnel_clss_l2geneve
1028 /* Classification scheme for l2 GENEVE tunnel. */;
1029 u8 tunnel_clss_ipgeneve
1030 /* Classification scheme for ip GENEVE tunnel. */;
1031 u8 tunnel_clss_l2gre /* Classification scheme for l2 GRE tunnel. */;
1032 u8 tunnel_clss_ipgre /* Classification scheme for ip GRE tunnel. */;
1033 __le16 vxlan_udp_port /* VXLAN tunnel UDP destination port. */;
1034 __le16 geneve_udp_port /* GENEVE tunnel UDP destination port. */;
1038 * Ramrod data for PF start ramrod
1040 struct pf_start_ramrod_data {
1041 struct regpair event_ring_pbl_addr /* Address of event ring PBL */;
1042 struct regpair consolid_q_pbl_addr
1043 /* PBL address of consolidation queue */;
1044 struct pf_start_tunnel_config tunnel_config /* tunnel configuration. */
1046 __le16 event_ring_sb_id /* Status block ID */;
1049 u8 num_vfs /* Amount of vfs owned by PF */;
1050 u8 event_ring_num_pages /* Number of PBL pages in event ring */;
1051 u8 event_ring_sb_index /* Status block index */;
1052 u8 path_id /* HW path ID (engine ID) */;
1053 u8 warning_as_error /* In FW asserts, treat warning as error */;
1055 /* If not set - throw a warning for each ramrod (for debug) */;
1056 u8 personality /* define what type of personality is new PF */;
1057 __le16 log_type_mask;
1058 u8 mf_mode /* Multi function mode */;
1059 u8 integ_phase /* Integration phase */;
1060 u8 allow_npar_tx_switching;
1061 u8 inner_to_outer_pri_map[8];
1063 /* If inner_to_outer_pri_map is initialize then set pri_map_valid */
1070 * Data for port update ramrod
1072 struct protocol_dcb_data {
1073 u8 dcb_enable_flag /* dcbEnable flag value */;
1074 u8 dcb_priority /* dcbPri flag value */;
1075 u8 dcb_tc /* dcb TC value */;
1080 * tunnel configuration
1082 struct pf_update_tunnel_config {
1083 u8 update_rx_pf_clss;
1084 u8 update_tx_pf_clss;
1085 u8 set_vxlan_udp_port_flg
1086 /* Update VXLAN tunnel UDP destination port. */;
1087 u8 set_geneve_udp_port_flg
1088 /* Update GENEVE tunnel UDP destination port. */;
1089 u8 tx_enable_vxlan /* If set, enable VXLAN tunnel in TX path. */;
1090 u8 tx_enable_l2geneve /* If set, enable l2 GENEVE tunnel in TX path. */
1092 u8 tx_enable_ipgeneve /* If set, enable IP GENEVE tunnel in TX path. */
1094 u8 tx_enable_l2gre /* If set, enable l2 GRE tunnel in TX path. */;
1095 u8 tx_enable_ipgre /* If set, enable IP GRE tunnel in TX path. */;
1096 u8 tunnel_clss_vxlan /* Classification scheme for VXLAN tunnel. */;
1097 u8 tunnel_clss_l2geneve
1098 /* Classification scheme for l2 GENEVE tunnel. */;
1099 u8 tunnel_clss_ipgeneve
1100 /* Classification scheme for ip GENEVE tunnel. */;
1101 u8 tunnel_clss_l2gre /* Classification scheme for l2 GRE tunnel. */;
1102 u8 tunnel_clss_ipgre /* Classification scheme for ip GRE tunnel. */;
1103 __le16 vxlan_udp_port /* VXLAN tunnel UDP destination port. */;
1104 __le16 geneve_udp_port /* GENEVE tunnel UDP destination port. */;
1109 * Data for port update ramrod
1111 struct pf_update_ramrod_data {
1113 u8 update_eth_dcb_data_flag /* Update Eth DCB data indication */;
1114 u8 update_fcoe_dcb_data_flag /* Update FCOE DCB data indication */;
1115 u8 update_iscsi_dcb_data_flag /* Update iSCSI DCB data indication */;
1116 u8 update_roce_dcb_data_flag /* Update ROCE DCB data indication */;
1117 u8 update_iwarp_dcb_data_flag /* Update IWARP DCB data indication */;
1118 u8 update_mf_vlan_flag /* Update MF outer vlan Id */;
1120 struct protocol_dcb_data eth_dcb_data /* core eth related fields */;
1121 struct protocol_dcb_data fcoe_dcb_data /* core fcoe related fields */;
1122 struct protocol_dcb_data iscsi_dcb_data /* core iscsi related fields */
1124 struct protocol_dcb_data roce_dcb_data /* core roce related fields */;
1125 struct protocol_dcb_data iwarp_dcb_data /* core iwarp related fields */
1127 __le16 mf_vlan /* new outer vlan id value */;
1129 struct pf_update_tunnel_config tunnel_config /* tunnel configuration. */
1137 ENGX2_PORTX1 /* 2 engines x 1 port */,
1138 ENGX2_PORTX2 /* 2 engines x 2 ports */,
1139 ENGX1_PORTX1 /* 1 engine x 1 port */,
1140 ENGX1_PORTX2 /* 1 engine x 2 ports */,
1141 ENGX1_PORTX4 /* 1 engine x 4 ports */,
1148 struct rdma_sent_stats {
1149 struct regpair sent_bytes /* number of total RDMA bytes sent */;
1150 struct regpair sent_pkts /* number of total RDMA packets sent */;
1154 * Pstorm non-triggering VF zone
1156 struct pstorm_non_trigger_vf_zone {
1157 struct eth_pstorm_per_queue_stat eth_queue_stat
1158 /* VF statistic bucket */;
1159 struct rdma_sent_stats rdma_stats /* RoCE sent statistics */;
1165 struct pstorm_vf_zone {
1166 struct pstorm_non_trigger_vf_zone non_trigger
1167 /* non-interrupt-triggering zone */;
1168 struct regpair reserved[7] /* vf_zone size mus be power of 2 */;
1172 * Ramrod Header of SPQE
1174 struct ramrod_header {
1175 __le32 cid /* Slowpath Connection CID */;
1176 u8 cmd_id /* Ramrod Cmd (Per Protocol Type) */;
1177 u8 protocol_id /* Ramrod Protocol ID */;
1178 __le16 echo /* Ramrod echo */;
1184 struct rdma_rcv_stats {
1185 struct regpair rcv_bytes /* number of total RDMA bytes received */;
1186 struct regpair rcv_pkts /* number of total RDMA packets received */;
1190 * Slowpath Element (SPQE)
1192 struct slow_path_element {
1193 struct ramrod_header hdr /* Ramrod Header */;
1194 struct regpair data_ptr /* Pointer to the Ramrod Data on the Host */;
1198 * Tstorm non-triggering VF zone
1200 struct tstorm_non_trigger_vf_zone {
1201 struct rdma_rcv_stats rdma_stats /* RoCE received statistics */;
1204 struct tstorm_per_port_stat {
1205 struct regpair trunc_error_discard
1206 /* packet is dropped because it was truncated in NIG */;
1207 struct regpair mac_error_discard
1208 /* packet is dropped because of Ethernet FCS error */;
1209 struct regpair mftag_filter_discard
1210 /* packet is dropped because classification was unsuccessful */;
1211 struct regpair eth_mac_filter_discard;
1212 struct regpair ll2_mac_filter_discard;
1213 struct regpair ll2_conn_disabled_discard;
1214 struct regpair iscsi_irregular_pkt
1215 /* packet is an ISCSI irregular packet */;
1216 struct regpair fcoe_irregular_pkt
1217 /* packet is an FCOE irregular packet */;
1218 struct regpair roce_irregular_pkt
1219 /* packet is an ROCE irregular packet */;
1220 struct regpair eth_irregular_pkt /* packet is an ETH irregular packet */
1222 struct regpair toe_irregular_pkt /* packet is an TOE irregular packet */
1224 struct regpair preroce_irregular_pkt
1225 /* packet is an PREROCE irregular packet */;
1231 struct tstorm_vf_zone {
1232 struct tstorm_non_trigger_vf_zone non_trigger
1233 /* non-interrupt-triggering zone */;
1237 * Tunnel classification scheme
1240 TUNNEL_CLSS_MAC_VLAN =
1242 /* Use MAC & VLAN from first L2 header for vport classification. */
1246 TUNNEL_CLSS_INNER_MAC_VLAN
1247 /* Use MAC and VLAN from last L2 header for vport classification */
1249 TUNNEL_CLSS_INNER_MAC_VNI
1255 * Ustorm non-triggering VF zone
1257 struct ustorm_non_trigger_vf_zone {
1258 struct eth_ustorm_per_queue_stat eth_queue_stat
1259 /* VF statistic bucket */;
1260 struct regpair vf_pf_msg_addr /* VF-PF message address */;
1264 * Ustorm triggering VF zone
1266 struct ustorm_trigger_vf_zone {
1267 u8 vf_pf_msg_valid /* VF-PF message valid flag */;
1274 struct ustorm_vf_zone {
1275 struct ustorm_non_trigger_vf_zone non_trigger
1276 /* non-interrupt-triggering zone */;
1277 struct ustorm_trigger_vf_zone trigger /* interrupt triggering zone */;
1281 * VF-PF channel data
1283 struct vf_pf_channel_data {
1291 * Ramrod data for VF start ramrod
1293 struct vf_start_ramrod_data {
1294 u8 vf_id /* VF ID */;
1296 __le16 opaque_fid /* VF opaque FID */;
1297 u8 personality /* define what type of personality is new VF */;
1302 * Ramrod data for VF start ramrod
1304 struct vf_stop_ramrod_data {
1305 u8 vf_id /* VF ID */;
1312 * Attentions status block
1314 struct atten_status_block {
1318 __le16 sb_index /* status block running index */;
1323 * Igu cleanup bit values to distinguish between clean or producer consumer
1325 enum command_type_bit {
1326 IGU_COMMAND_TYPE_NOP = 0,
1327 IGU_COMMAND_TYPE_SET = 1,
1328 MAX_COMMAND_TYPE_BIT
1336 #define DMAE_CMD_SRC_MASK 0x1
1337 #define DMAE_CMD_SRC_SHIFT 0
1338 #define DMAE_CMD_DST_MASK 0x3
1339 #define DMAE_CMD_DST_SHIFT 1
1340 #define DMAE_CMD_C_DST_MASK 0x1
1341 #define DMAE_CMD_C_DST_SHIFT 3
1342 #define DMAE_CMD_CRC_RESET_MASK 0x1
1343 #define DMAE_CMD_CRC_RESET_SHIFT 4
1344 #define DMAE_CMD_SRC_ADDR_RESET_MASK 0x1
1345 #define DMAE_CMD_SRC_ADDR_RESET_SHIFT 5
1346 #define DMAE_CMD_DST_ADDR_RESET_MASK 0x1
1347 #define DMAE_CMD_DST_ADDR_RESET_SHIFT 6
1348 #define DMAE_CMD_COMP_FUNC_MASK 0x1
1349 #define DMAE_CMD_COMP_FUNC_SHIFT 7
1350 #define DMAE_CMD_COMP_WORD_EN_MASK 0x1
1351 #define DMAE_CMD_COMP_WORD_EN_SHIFT 8
1352 #define DMAE_CMD_COMP_CRC_EN_MASK 0x1
1353 #define DMAE_CMD_COMP_CRC_EN_SHIFT 9
1354 #define DMAE_CMD_COMP_CRC_OFFSET_MASK 0x7
1355 #define DMAE_CMD_COMP_CRC_OFFSET_SHIFT 10
1356 #define DMAE_CMD_RESERVED1_MASK 0x1
1357 #define DMAE_CMD_RESERVED1_SHIFT 13
1358 #define DMAE_CMD_ENDIANITY_MODE_MASK 0x3
1359 #define DMAE_CMD_ENDIANITY_MODE_SHIFT 14
1360 #define DMAE_CMD_ERR_HANDLING_MASK 0x3
1361 #define DMAE_CMD_ERR_HANDLING_SHIFT 16
1362 #define DMAE_CMD_PORT_ID_MASK 0x3
1363 #define DMAE_CMD_PORT_ID_SHIFT 18
1364 #define DMAE_CMD_SRC_PF_ID_MASK 0xF
1365 #define DMAE_CMD_SRC_PF_ID_SHIFT 20
1366 #define DMAE_CMD_DST_PF_ID_MASK 0xF
1367 #define DMAE_CMD_DST_PF_ID_SHIFT 24
1368 #define DMAE_CMD_SRC_VF_ID_VALID_MASK 0x1
1369 #define DMAE_CMD_SRC_VF_ID_VALID_SHIFT 28
1370 #define DMAE_CMD_DST_VF_ID_VALID_MASK 0x1
1371 #define DMAE_CMD_DST_VF_ID_VALID_SHIFT 29
1372 #define DMAE_CMD_RESERVED2_MASK 0x3
1373 #define DMAE_CMD_RESERVED2_SHIFT 30
1375 /* PCIe source address low in bytes or GRC source address in DW */;
1379 __le16 length /* Length in DW */;
1381 #define DMAE_CMD_SRC_VF_ID_MASK 0xFF
1382 #define DMAE_CMD_SRC_VF_ID_SHIFT 0
1383 #define DMAE_CMD_DST_VF_ID_MASK 0xFF
1384 #define DMAE_CMD_DST_VF_ID_SHIFT 8
1385 __le32 comp_addr_lo /* PCIe completion address low or grc address */;
1386 __le32 comp_addr_hi;
1387 __le32 comp_val /* Value to write to completion address */;
1388 __le32 crc32 /* crc16 result */;
1389 __le32 crc_32_c /* crc32_c result */;
1390 __le16 crc16 /* crc16 result */;
1391 __le16 crc16_c /* crc16_c result */;
1392 __le16 crc10 /* crc_t10 result */;
1394 __le16 xsum16 /* checksum16 result */;
1395 __le16 xsum8 /* checksum8 result */;
1398 struct storm_ram_section {
1400 /* The offset of the section in the RAM (in 64 bit units) */;
1401 __le16 size /* The size of the section (in 64 bit units) */;
1405 * IGU cleanup command
1407 struct igu_cleanup {
1408 __le32 sb_id_and_flags;
1409 #define IGU_CLEANUP_RESERVED0_MASK 0x7FFFFFF
1410 #define IGU_CLEANUP_RESERVED0_SHIFT 0
1411 #define IGU_CLEANUP_CLEANUP_SET_MASK 0x1
1412 #define IGU_CLEANUP_CLEANUP_SET_SHIFT 27
1413 #define IGU_CLEANUP_CLEANUP_TYPE_MASK 0x7
1414 #define IGU_CLEANUP_CLEANUP_TYPE_SHIFT 28
1415 #define IGU_CLEANUP_COMMAND_TYPE_MASK 0x1
1416 #define IGU_CLEANUP_COMMAND_TYPE_SHIFT 31
1421 * IGU firmware driver command
1424 struct igu_prod_cons_update prod_cons_update;
1425 struct igu_cleanup cleanup;
1429 * IGU firmware driver command
1431 struct igu_command_reg_ctrl {
1433 __le16 igu_command_reg_ctrl_fields;
1434 #define IGU_COMMAND_REG_CTRL_PXP_BAR_ADDR_MASK 0xFFF
1435 #define IGU_COMMAND_REG_CTRL_PXP_BAR_ADDR_SHIFT 0
1436 #define IGU_COMMAND_REG_CTRL_RESERVED_MASK 0x7
1437 #define IGU_COMMAND_REG_CTRL_RESERVED_SHIFT 12
1438 #define IGU_COMMAND_REG_CTRL_COMMAND_TYPE_MASK 0x1
1439 #define IGU_COMMAND_REG_CTRL_COMMAND_TYPE_SHIFT 15
1443 * IGU mapping line structure
1445 struct igu_mapping_line {
1446 __le32 igu_mapping_line_fields;
1447 #define IGU_MAPPING_LINE_VALID_MASK 0x1
1448 #define IGU_MAPPING_LINE_VALID_SHIFT 0
1449 #define IGU_MAPPING_LINE_VECTOR_NUMBER_MASK 0xFF
1450 #define IGU_MAPPING_LINE_VECTOR_NUMBER_SHIFT 1
1451 #define IGU_MAPPING_LINE_FUNCTION_NUMBER_MASK 0xFF
1452 #define IGU_MAPPING_LINE_FUNCTION_NUMBER_SHIFT 9
1453 #define IGU_MAPPING_LINE_PF_VALID_MASK 0x1
1454 #define IGU_MAPPING_LINE_PF_VALID_SHIFT 17
1455 #define IGU_MAPPING_LINE_IPS_GROUP_MASK 0x3F
1456 #define IGU_MAPPING_LINE_IPS_GROUP_SHIFT 18
1457 #define IGU_MAPPING_LINE_RESERVED_MASK 0xFF
1458 #define IGU_MAPPING_LINE_RESERVED_SHIFT 24
1462 * IGU MSIX line structure
1464 struct igu_msix_vector {
1465 struct regpair address;
1467 __le32 msix_vector_fields;
1468 #define IGU_MSIX_VECTOR_MASK_BIT_MASK 0x1
1469 #define IGU_MSIX_VECTOR_MASK_BIT_SHIFT 0
1470 #define IGU_MSIX_VECTOR_RESERVED0_MASK 0x7FFF
1471 #define IGU_MSIX_VECTOR_RESERVED0_SHIFT 1
1472 #define IGU_MSIX_VECTOR_STEERING_TAG_MASK 0xFF
1473 #define IGU_MSIX_VECTOR_STEERING_TAG_SHIFT 16
1474 #define IGU_MSIX_VECTOR_RESERVED1_MASK 0xFF
1475 #define IGU_MSIX_VECTOR_RESERVED1_SHIFT 24
1478 struct mstorm_core_conn_ag_ctx {
1479 u8 byte0 /* cdu_validation */;
1480 u8 byte1 /* state */;
1482 #define MSTORM_CORE_CONN_AG_CTX_BIT0_MASK 0x1
1483 #define MSTORM_CORE_CONN_AG_CTX_BIT0_SHIFT 0
1484 #define MSTORM_CORE_CONN_AG_CTX_BIT1_MASK 0x1
1485 #define MSTORM_CORE_CONN_AG_CTX_BIT1_SHIFT 1
1486 #define MSTORM_CORE_CONN_AG_CTX_CF0_MASK 0x3
1487 #define MSTORM_CORE_CONN_AG_CTX_CF0_SHIFT 2
1488 #define MSTORM_CORE_CONN_AG_CTX_CF1_MASK 0x3
1489 #define MSTORM_CORE_CONN_AG_CTX_CF1_SHIFT 4
1490 #define MSTORM_CORE_CONN_AG_CTX_CF2_MASK 0x3
1491 #define MSTORM_CORE_CONN_AG_CTX_CF2_SHIFT 6
1493 #define MSTORM_CORE_CONN_AG_CTX_CF0EN_MASK 0x1
1494 #define MSTORM_CORE_CONN_AG_CTX_CF0EN_SHIFT 0
1495 #define MSTORM_CORE_CONN_AG_CTX_CF1EN_MASK 0x1
1496 #define MSTORM_CORE_CONN_AG_CTX_CF1EN_SHIFT 1
1497 #define MSTORM_CORE_CONN_AG_CTX_CF2EN_MASK 0x1
1498 #define MSTORM_CORE_CONN_AG_CTX_CF2EN_SHIFT 2
1499 #define MSTORM_CORE_CONN_AG_CTX_RULE0EN_MASK 0x1
1500 #define MSTORM_CORE_CONN_AG_CTX_RULE0EN_SHIFT 3
1501 #define MSTORM_CORE_CONN_AG_CTX_RULE1EN_MASK 0x1
1502 #define MSTORM_CORE_CONN_AG_CTX_RULE1EN_SHIFT 4
1503 #define MSTORM_CORE_CONN_AG_CTX_RULE2EN_MASK 0x1
1504 #define MSTORM_CORE_CONN_AG_CTX_RULE2EN_SHIFT 5
1505 #define MSTORM_CORE_CONN_AG_CTX_RULE3EN_MASK 0x1
1506 #define MSTORM_CORE_CONN_AG_CTX_RULE3EN_SHIFT 6
1507 #define MSTORM_CORE_CONN_AG_CTX_RULE4EN_MASK 0x1
1508 #define MSTORM_CORE_CONN_AG_CTX_RULE4EN_SHIFT 7
1509 __le16 word0 /* word0 */;
1510 __le16 word1 /* word1 */;
1511 __le32 reg0 /* reg0 */;
1512 __le32 reg1 /* reg1 */;
1516 * per encapsulation type enabling flags
1518 struct prs_reg_encapsulation_type_en {
1520 #define PRS_REG_ENCAPSULATION_TYPE_EN_ETH_OVER_GRE_ENABLE_MASK 0x1
1521 #define PRS_REG_ENCAPSULATION_TYPE_EN_ETH_OVER_GRE_ENABLE_SHIFT 0
1522 #define PRS_REG_ENCAPSULATION_TYPE_EN_IP_OVER_GRE_ENABLE_MASK 0x1
1523 #define PRS_REG_ENCAPSULATION_TYPE_EN_IP_OVER_GRE_ENABLE_SHIFT 1
1524 #define PRS_REG_ENCAPSULATION_TYPE_EN_VXLAN_ENABLE_MASK 0x1
1525 #define PRS_REG_ENCAPSULATION_TYPE_EN_VXLAN_ENABLE_SHIFT 2
1526 #define PRS_REG_ENCAPSULATION_TYPE_EN_T_TAG_ENABLE_MASK 0x1
1527 #define PRS_REG_ENCAPSULATION_TYPE_EN_T_TAG_ENABLE_SHIFT 3
1528 #define PRS_REG_ENCAPSULATION_TYPE_EN_ETH_OVER_GENEVE_ENABLE_MASK 0x1
1529 #define PRS_REG_ENCAPSULATION_TYPE_EN_ETH_OVER_GENEVE_ENABLE_SHIFT 4
1530 #define PRS_REG_ENCAPSULATION_TYPE_EN_IP_OVER_GENEVE_ENABLE_MASK 0x1
1531 #define PRS_REG_ENCAPSULATION_TYPE_EN_IP_OVER_GENEVE_ENABLE_SHIFT 5
1532 #define PRS_REG_ENCAPSULATION_TYPE_EN_RESERVED_MASK 0x3
1533 #define PRS_REG_ENCAPSULATION_TYPE_EN_RESERVED_SHIFT 6
1536 enum pxp_tph_st_hint {
1537 TPH_ST_HINT_BIDIR /* Read/Write access by Host and Device */,
1538 TPH_ST_HINT_REQUESTER /* Read/Write access by Device */,
1540 /* Device Write and Host Read, or Host Write and Device Read */,
1541 TPH_ST_HINT_TARGET_PRIO,
1546 * QM hardware structure of enable bypass credit mask
1548 struct qm_rf_bypass_mask {
1550 #define QM_RF_BYPASS_MASK_LINEVOQ_MASK 0x1
1551 #define QM_RF_BYPASS_MASK_LINEVOQ_SHIFT 0
1552 #define QM_RF_BYPASS_MASK_RESERVED0_MASK 0x1
1553 #define QM_RF_BYPASS_MASK_RESERVED0_SHIFT 1
1554 #define QM_RF_BYPASS_MASK_PFWFQ_MASK 0x1
1555 #define QM_RF_BYPASS_MASK_PFWFQ_SHIFT 2
1556 #define QM_RF_BYPASS_MASK_VPWFQ_MASK 0x1
1557 #define QM_RF_BYPASS_MASK_VPWFQ_SHIFT 3
1558 #define QM_RF_BYPASS_MASK_PFRL_MASK 0x1
1559 #define QM_RF_BYPASS_MASK_PFRL_SHIFT 4
1560 #define QM_RF_BYPASS_MASK_VPQCNRL_MASK 0x1
1561 #define QM_RF_BYPASS_MASK_VPQCNRL_SHIFT 5
1562 #define QM_RF_BYPASS_MASK_FWPAUSE_MASK 0x1
1563 #define QM_RF_BYPASS_MASK_FWPAUSE_SHIFT 6
1564 #define QM_RF_BYPASS_MASK_RESERVED1_MASK 0x1
1565 #define QM_RF_BYPASS_MASK_RESERVED1_SHIFT 7
1569 * QM hardware structure of opportunistic credit mask
1571 struct qm_rf_opportunistic_mask {
1573 #define QM_RF_OPPORTUNISTIC_MASK_LINEVOQ_MASK 0x1
1574 #define QM_RF_OPPORTUNISTIC_MASK_LINEVOQ_SHIFT 0
1575 #define QM_RF_OPPORTUNISTIC_MASK_BYTEVOQ_MASK 0x1
1576 #define QM_RF_OPPORTUNISTIC_MASK_BYTEVOQ_SHIFT 1
1577 #define QM_RF_OPPORTUNISTIC_MASK_PFWFQ_MASK 0x1
1578 #define QM_RF_OPPORTUNISTIC_MASK_PFWFQ_SHIFT 2
1579 #define QM_RF_OPPORTUNISTIC_MASK_VPWFQ_MASK 0x1
1580 #define QM_RF_OPPORTUNISTIC_MASK_VPWFQ_SHIFT 3
1581 #define QM_RF_OPPORTUNISTIC_MASK_PFRL_MASK 0x1
1582 #define QM_RF_OPPORTUNISTIC_MASK_PFRL_SHIFT 4
1583 #define QM_RF_OPPORTUNISTIC_MASK_VPQCNRL_MASK 0x1
1584 #define QM_RF_OPPORTUNISTIC_MASK_VPQCNRL_SHIFT 5
1585 #define QM_RF_OPPORTUNISTIC_MASK_FWPAUSE_MASK 0x1
1586 #define QM_RF_OPPORTUNISTIC_MASK_FWPAUSE_SHIFT 6
1587 #define QM_RF_OPPORTUNISTIC_MASK_RESERVED0_MASK 0x1
1588 #define QM_RF_OPPORTUNISTIC_MASK_RESERVED0_SHIFT 7
1589 #define QM_RF_OPPORTUNISTIC_MASK_QUEUEEMPTY_MASK 0x1
1590 #define QM_RF_OPPORTUNISTIC_MASK_QUEUEEMPTY_SHIFT 8
1591 #define QM_RF_OPPORTUNISTIC_MASK_RESERVED1_MASK 0x7F
1592 #define QM_RF_OPPORTUNISTIC_MASK_RESERVED1_SHIFT 9
1596 * QM hardware structure of QM map memory
1598 struct qm_rf_pq_map {
1600 #define QM_RF_PQ_MAP_PQ_VALID_MASK 0x1
1601 #define QM_RF_PQ_MAP_PQ_VALID_SHIFT 0
1602 #define QM_RF_PQ_MAP_RL_ID_MASK 0xFF
1603 #define QM_RF_PQ_MAP_RL_ID_SHIFT 1
1604 #define QM_RF_PQ_MAP_VP_PQ_ID_MASK 0x1FF
1605 #define QM_RF_PQ_MAP_VP_PQ_ID_SHIFT 9
1606 #define QM_RF_PQ_MAP_VOQ_MASK 0x1F
1607 #define QM_RF_PQ_MAP_VOQ_SHIFT 18
1608 #define QM_RF_PQ_MAP_WRR_WEIGHT_GROUP_MASK 0x3
1609 #define QM_RF_PQ_MAP_WRR_WEIGHT_GROUP_SHIFT 23
1610 #define QM_RF_PQ_MAP_RL_VALID_MASK 0x1
1611 #define QM_RF_PQ_MAP_RL_VALID_SHIFT 25
1612 #define QM_RF_PQ_MAP_RESERVED_MASK 0x3F
1613 #define QM_RF_PQ_MAP_RESERVED_SHIFT 26
1617 * Completion params for aggregated interrupt completion
1619 struct sdm_agg_int_comp_params {
1621 #define SDM_AGG_INT_COMP_PARAMS_AGG_INT_INDEX_MASK 0x3F
1622 #define SDM_AGG_INT_COMP_PARAMS_AGG_INT_INDEX_SHIFT 0
1623 #define SDM_AGG_INT_COMP_PARAMS_AGG_VECTOR_ENABLE_MASK 0x1
1624 #define SDM_AGG_INT_COMP_PARAMS_AGG_VECTOR_ENABLE_SHIFT 6
1625 #define SDM_AGG_INT_COMP_PARAMS_AGG_VECTOR_BIT_MASK 0x1FF
1626 #define SDM_AGG_INT_COMP_PARAMS_AGG_VECTOR_BIT_SHIFT 7
1630 * SDM operation gen command (generate aggregative interrupt)
1634 #define SDM_OP_GEN_COMP_PARAM_MASK 0xFFFF
1635 #define SDM_OP_GEN_COMP_PARAM_SHIFT 0
1636 #define SDM_OP_GEN_COMP_TYPE_MASK 0xF
1637 #define SDM_OP_GEN_COMP_TYPE_SHIFT 16
1638 #define SDM_OP_GEN_RESERVED_MASK 0xFFF
1639 #define SDM_OP_GEN_RESERVED_SHIFT 20
1642 struct ystorm_core_conn_ag_ctx {
1643 u8 byte0 /* cdu_validation */;
1644 u8 byte1 /* state */;
1646 #define YSTORM_CORE_CONN_AG_CTX_BIT0_MASK 0x1
1647 #define YSTORM_CORE_CONN_AG_CTX_BIT0_SHIFT 0
1648 #define YSTORM_CORE_CONN_AG_CTX_BIT1_MASK 0x1
1649 #define YSTORM_CORE_CONN_AG_CTX_BIT1_SHIFT 1
1650 #define YSTORM_CORE_CONN_AG_CTX_CF0_MASK 0x3
1651 #define YSTORM_CORE_CONN_AG_CTX_CF0_SHIFT 2
1652 #define YSTORM_CORE_CONN_AG_CTX_CF1_MASK 0x3
1653 #define YSTORM_CORE_CONN_AG_CTX_CF1_SHIFT 4
1654 #define YSTORM_CORE_CONN_AG_CTX_CF2_MASK 0x3
1655 #define YSTORM_CORE_CONN_AG_CTX_CF2_SHIFT 6
1657 #define YSTORM_CORE_CONN_AG_CTX_CF0EN_MASK 0x1
1658 #define YSTORM_CORE_CONN_AG_CTX_CF0EN_SHIFT 0
1659 #define YSTORM_CORE_CONN_AG_CTX_CF1EN_MASK 0x1
1660 #define YSTORM_CORE_CONN_AG_CTX_CF1EN_SHIFT 1
1661 #define YSTORM_CORE_CONN_AG_CTX_CF2EN_MASK 0x1
1662 #define YSTORM_CORE_CONN_AG_CTX_CF2EN_SHIFT 2
1663 #define YSTORM_CORE_CONN_AG_CTX_RULE0EN_MASK 0x1
1664 #define YSTORM_CORE_CONN_AG_CTX_RULE0EN_SHIFT 3
1665 #define YSTORM_CORE_CONN_AG_CTX_RULE1EN_MASK 0x1
1666 #define YSTORM_CORE_CONN_AG_CTX_RULE1EN_SHIFT 4
1667 #define YSTORM_CORE_CONN_AG_CTX_RULE2EN_MASK 0x1
1668 #define YSTORM_CORE_CONN_AG_CTX_RULE2EN_SHIFT 5
1669 #define YSTORM_CORE_CONN_AG_CTX_RULE3EN_MASK 0x1
1670 #define YSTORM_CORE_CONN_AG_CTX_RULE3EN_SHIFT 6
1671 #define YSTORM_CORE_CONN_AG_CTX_RULE4EN_MASK 0x1
1672 #define YSTORM_CORE_CONN_AG_CTX_RULE4EN_SHIFT 7
1673 u8 byte2 /* byte2 */;
1674 u8 byte3 /* byte3 */;
1675 __le16 word0 /* word0 */;
1676 __le32 reg0 /* reg0 */;
1677 __le32 reg1 /* reg1 */;
1678 __le16 word1 /* word1 */;
1679 __le16 word2 /* word2 */;
1680 __le16 word3 /* word3 */;
1681 __le16 word4 /* word4 */;
1682 __le32 reg2 /* reg2 */;
1683 __le32 reg3 /* reg3 */;
1686 #endif /* __ECORE_HSI_COMMON__ */