1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(C) 2021 Marvell.
8 #define NIX_MAX_BPF_COUNT_LEAF_LAYER 64
9 #define NIX_MAX_BPF_COUNT_MID_LAYER 8
10 #define NIX_MAX_BPF_COUNT_TOP_LAYER 1
12 #define NIX_BPF_PRECOLOR_GEN_TABLE_SIZE 16
13 #define NIX_BPF_PRECOLOR_VLAN_TABLE_SIZE 16
14 #define NIX_BPF_PRECOLOR_DSCP_TABLE_SIZE 64
16 #define NIX_BPF_LEVEL_F_MASK \
17 (ROC_NIX_BPF_LEVEL_F_LEAF | ROC_NIX_BPF_LEVEL_F_MID | \
18 ROC_NIX_BPF_LEVEL_F_TOP)
20 #define NIX_RD_STATS(val) plt_read64(nix->base + NIX_LF_RX_STATX(val))
21 #define NIX_RST_STATS(val) plt_write64(0, nix->base + NIX_LF_RX_STATX(val))
23 static uint8_t sw_to_hw_lvl_map[] = {NIX_RX_BAND_PROF_LAYER_LEAF,
24 NIX_RX_BAND_PROF_LAYER_MIDDLE,
25 NIX_RX_BAND_PROF_LAYER_TOP};
27 static inline struct mbox *
28 get_mbox(struct roc_nix *roc_nix)
30 struct nix *nix = roc_nix_to_nix_priv(roc_nix);
31 struct dev *dev = &nix->dev;
36 static inline uint64_t
37 meter_rate_to_nix(uint64_t value, uint64_t *exponent_p, uint64_t *mantissa_p,
40 uint64_t div_exp, exponent, mantissa;
43 if (value < NIX_BPF_RATE_MIN || value > NIX_BPF_RATE_MAX)
46 if (value <= NIX_BPF_RATE(0, 0, 0)) {
47 /* Calculate rate div_exp and mantissa using
48 * the following formula:
50 * value = (2E6 * (256 + mantissa)
51 * / ((1 << div_exp) * 256))
55 mantissa = NIX_BPF_MAX_RATE_MANTISSA;
57 while (value < (NIX_BPF_RATE_CONST / (1 << div_exp)))
60 while (value < ((NIX_BPF_RATE_CONST * (256 + mantissa)) /
61 ((1 << div_exp) * 256)))
64 /* Calculate rate exponent and mantissa using
65 * the following formula:
67 * value = (2E6 * ((256 + mantissa) << exponent)) / 256
71 exponent = NIX_BPF_MAX_RATE_EXPONENT;
72 mantissa = NIX_BPF_MAX_RATE_MANTISSA;
74 while (value < (NIX_BPF_RATE_CONST * (1 << exponent)))
78 ((NIX_BPF_RATE_CONST * ((256 + mantissa) << exponent)) /
83 if (div_exp > NIX_BPF_MAX_RATE_DIV_EXP ||
84 exponent > NIX_BPF_MAX_RATE_EXPONENT ||
85 mantissa > NIX_BPF_MAX_RATE_MANTISSA)
91 *exponent_p = exponent;
93 *mantissa_p = mantissa;
95 /* Calculate real rate value */
96 return NIX_BPF_RATE(exponent, mantissa, div_exp);
99 static inline uint64_t
100 meter_burst_to_nix(uint64_t value, uint64_t *exponent_p, uint64_t *mantissa_p)
102 uint64_t exponent, mantissa;
104 if (value < NIX_BPF_BURST_MIN || value > NIX_BPF_BURST_MAX)
107 /* Calculate burst exponent and mantissa using
108 * the following formula:
110 * value = (((256 + mantissa) << (exponent + 1)
114 exponent = NIX_BPF_MAX_BURST_EXPONENT;
115 mantissa = NIX_BPF_MAX_BURST_MANTISSA;
117 while (value < (1ull << (exponent + 1)))
120 while (value < ((256 + mantissa) << (exponent + 1)) / 256)
123 if (exponent > NIX_BPF_MAX_BURST_EXPONENT ||
124 mantissa > NIX_BPF_MAX_BURST_MANTISSA)
128 *exponent_p = exponent;
130 *mantissa_p = mantissa;
132 return NIX_BPF_BURST(exponent, mantissa);
136 nix_lf_bpf_dump(__io struct nix_band_prof_s *bpf)
138 plt_dump("W0: cir_mantissa \t\t\t%d\nW0: pebs_mantissa \t\t\t0x%03x",
139 bpf->cir_mantissa, bpf->pebs_mantissa);
140 plt_dump("W0: peir_matissa \t\t\t\t%d\nW0: cbs_exponent \t\t\t%d",
141 bpf->peir_mantissa, bpf->cbs_exponent);
142 plt_dump("W0: cir_exponent \t\t\t%d\nW0: pebs_exponent \t\t\t%d",
143 bpf->cir_exponent, bpf->pebs_exponent);
144 plt_dump("W0: peir_exponent \t\t\t%d\n", bpf->peir_exponent);
145 plt_dump("W0: tnl_ena \t\t\t%d\n", bpf->tnl_ena);
146 plt_dump("W0: icolor \t\t\t%d\n", bpf->icolor);
147 plt_dump("W0: pc_mode \t\t\t%d\n", bpf->pc_mode);
148 plt_dump("W1: hl_en \t\t%d\nW1: band_prof_id \t\t%d", bpf->hl_en,
150 plt_dump("W1: meter_algo \t\t%d\nW1: rc_action \t\t%d", bpf->meter_algo,
152 plt_dump("W1: yc_action \t\t\t%d\nW1: gc_action \t\t\t%d",
153 bpf->yc_action, bpf->gc_action);
154 plt_dump("W1: adjust_mantissa\t\t\t%d\nW1: adjust_exponent \t\t\t%d",
155 bpf->adjust_mantissa, bpf->adjust_exponent);
156 plt_dump("W1: rdiv \t\t\t%d\n", bpf->rdiv);
157 plt_dump("W1: l_select \t\t%d\nW2: lmode \t\t%d", bpf->l_sellect,
159 plt_dump("W1: cbs_mantissa \t\t\t%d\n", bpf->cbs_mantissa);
160 plt_dump("W2: tsa \t\t\t0x%" PRIx64 "\n", (uint64_t)bpf->ts);
161 plt_dump("W3: c_accum \t\t%d\nW3: pe_accum \t\t%d", bpf->c_accum,
163 plt_dump("W4: green_pkt_pass \t\t\t0x%" PRIx64 "",
164 (uint64_t)bpf->green_pkt_pass);
165 plt_dump("W5: yellow_pkt_pass \t\t\t0x%" PRIx64 "",
166 (uint64_t)bpf->yellow_pkt_pass);
167 plt_dump("W6: red_pkt_pass \t\t\t0x%" PRIx64 "",
168 (uint64_t)bpf->red_pkt_pass);
169 plt_dump("W7: green_octs_pass \t\t\t0x%" PRIx64 "",
170 (uint64_t)bpf->green_octs_pass);
171 plt_dump("W8: yellow_octs_pass \t\t\t0x%" PRIx64 "",
172 (uint64_t)bpf->yellow_octs_pass);
173 plt_dump("W9: red_octs_pass \t\t\t0x%" PRIx64 "",
174 (uint64_t)bpf->red_octs_pass);
175 plt_dump("W10: green_pkt_drop \t\t\t0x%" PRIx64 "",
176 (uint64_t)bpf->green_pkt_drop);
177 plt_dump("W11: yellow_pkt_drop \t\t\t0x%" PRIx64 "",
178 (uint64_t)bpf->yellow_pkt_drop);
179 plt_dump("W12: red_pkt_drop \t\t\t0x%" PRIx64 "",
180 (uint64_t)bpf->red_pkt_drop);
181 plt_dump("W13: green_octs_drop \t\t\t0x%" PRIx64 "",
182 (uint64_t)bpf->green_octs_drop);
183 plt_dump("W14: yellow_octs_drop \t\t\t0x%" PRIx64 "",
184 (uint64_t)bpf->yellow_octs_drop);
185 plt_dump("W15: red_octs_drop \t\t\t0x%" PRIx64 "",
186 (uint64_t)bpf->red_octs_drop);
190 nix_precolor_conv_table_write(struct roc_nix *roc_nix, uint64_t val,
193 struct nix *nix = roc_nix_to_nix_priv(roc_nix);
196 addr = PLT_PTR_ADD(nix->base, off);
197 /* FIXME: Currently writing to this register throwing kernel dump.
198 * plt_write64(val, addr);
205 nix_precolor_vlan_table_update(struct roc_nix *roc_nix,
206 struct roc_nix_bpf_precolor *tbl)
212 for (i = 0; i < tbl->count; i++)
213 val |= (((uint64_t)tbl->color[i]) << (2 * i));
215 if (tbl->mode == ROC_NIX_BPF_PC_MODE_VLAN_INNER) {
216 off = NIX_LF_RX_VLAN1_COLOR_CONV;
219 off = NIX_LF_RX_VLAN0_COLOR_CONV;
223 nix_precolor_conv_table_write(roc_nix, val, off);
228 nix_precolor_inner_dscp_table_update(struct roc_nix *roc_nix,
229 struct roc_nix_bpf_precolor *tbl)
231 uint64_t val_lo = 0, val_hi = 0, i, j;
233 for (i = 0, j = 0; i < (tbl->count / 2); i++, j++)
234 val_lo |= (((uint64_t)tbl->color[i]) << (2 * j));
236 for (j = 0; i < tbl->count; i++, j++)
237 val_hi |= (((uint64_t)tbl->color[i]) << (2 * j));
239 nix_precolor_conv_table_write(roc_nix, val_lo,
240 NIX_LF_RX_IIP_COLOR_CONV_LO);
241 nix_precolor_conv_table_write(roc_nix, val_hi,
242 NIX_LF_RX_IIP_COLOR_CONV_HI);
248 nix_precolor_outer_dscp_table_update(struct roc_nix *roc_nix,
249 struct roc_nix_bpf_precolor *tbl)
251 uint64_t val_lo = 0, val_hi = 0, i, j;
253 for (i = 0, j = 0; i < (tbl->count / 2); i++, j++)
254 val_lo |= (((uint64_t)tbl->color[i]) << (2 * j));
256 for (j = 0; i < tbl->count; i++, j++)
257 val_hi |= (((uint64_t)tbl->color[i]) << (2 * j));
259 nix_precolor_conv_table_write(roc_nix, val_lo,
260 NIX_LF_RX_OIP_COLOR_CONV_LO);
261 nix_precolor_conv_table_write(roc_nix, val_hi,
262 NIX_LF_RX_OIP_COLOR_CONV_HI);
268 nix_precolor_gen_table_update(struct roc_nix *roc_nix,
269 struct roc_nix_bpf_precolor *tbl)
275 for (i = 0; i < tbl->count; i++)
276 val |= (((uint64_t)tbl->color[i]) << (2 * i));
278 if (tbl->mode == ROC_NIX_BPF_PC_MODE_GEN_INNER) {
279 off = NIX_LF_RX_GEN_COLOR_CONVX(1);
282 off = NIX_LF_RX_GEN_COLOR_CONVX(0);
286 nix_precolor_conv_table_write(roc_nix, val, off);
291 roc_nix_bpf_level_to_idx(enum roc_nix_bpf_level_flag level_f)
295 if (level_f & ROC_NIX_BPF_LEVEL_F_LEAF)
297 else if (level_f & ROC_NIX_BPF_LEVEL_F_MID)
299 else if (level_f & ROC_NIX_BPF_LEVEL_F_TOP)
302 idx = ROC_NIX_BPF_LEVEL_IDX_INVALID;
307 roc_nix_bpf_stats_to_idx(enum roc_nix_bpf_stats level_f)
311 if (level_f & ROC_NIX_BPF_GREEN_PKT_F_PASS)
313 else if (level_f & ROC_NIX_BPF_GREEN_OCTS_F_PASS)
315 else if (level_f & ROC_NIX_BPF_GREEN_PKT_F_DROP)
317 else if (level_f & ROC_NIX_BPF_GREEN_OCTS_F_DROP)
319 else if (level_f & ROC_NIX_BPF_YELLOW_PKT_F_PASS)
321 else if (level_f & ROC_NIX_BPF_YELLOW_OCTS_F_PASS)
323 else if (level_f & ROC_NIX_BPF_YELLOW_PKT_F_DROP)
325 else if (level_f & ROC_NIX_BPF_YELLOW_OCTS_F_DROP)
327 else if (level_f & ROC_NIX_BPF_RED_PKT_F_PASS)
329 else if (level_f & ROC_NIX_BPF_RED_OCTS_F_PASS)
331 else if (level_f & ROC_NIX_BPF_RED_PKT_F_DROP)
333 else if (level_f & ROC_NIX_BPF_RED_OCTS_F_DROP)
336 idx = ROC_NIX_BPF_STATS_MAX;
341 roc_nix_bpf_count_get(struct roc_nix *roc_nix, uint8_t lvl_mask,
342 uint16_t count[ROC_NIX_BPF_LEVEL_MAX])
344 uint8_t mask = lvl_mask & NIX_BPF_LEVEL_F_MASK;
345 uint8_t leaf_idx, mid_idx, top_idx;
347 PLT_SET_USED(roc_nix);
349 if (roc_model_is_cn9k())
350 return NIX_ERR_HW_NOTSUP;
353 return NIX_ERR_PARAM;
355 /* Currently No MBOX interface is available to get number
356 * of bandwidth profiles. So numbers per level are hard coded,
357 * considering 3 RPM blocks and each block has 4 LMAC's.
358 * So total 12 physical interfaces are in system. Each interface
359 * supports following bandwidth profiles.
362 leaf_idx = roc_nix_bpf_level_to_idx(mask & ROC_NIX_BPF_LEVEL_F_LEAF);
363 mid_idx = roc_nix_bpf_level_to_idx(mask & ROC_NIX_BPF_LEVEL_F_MID);
364 top_idx = roc_nix_bpf_level_to_idx(mask & ROC_NIX_BPF_LEVEL_F_TOP);
366 if (leaf_idx != ROC_NIX_BPF_LEVEL_IDX_INVALID)
367 count[leaf_idx] = NIX_MAX_BPF_COUNT_LEAF_LAYER;
369 if (mid_idx != ROC_NIX_BPF_LEVEL_IDX_INVALID)
370 count[mid_idx] = NIX_MAX_BPF_COUNT_MID_LAYER;
372 if (top_idx != ROC_NIX_BPF_LEVEL_IDX_INVALID)
373 count[top_idx] = NIX_MAX_BPF_COUNT_TOP_LAYER;
379 roc_nix_bpf_alloc(struct roc_nix *roc_nix, uint8_t lvl_mask,
380 uint16_t per_lvl_cnt[ROC_NIX_BPF_LEVEL_MAX],
381 struct roc_nix_bpf_objs *profs)
383 uint8_t mask = lvl_mask & NIX_BPF_LEVEL_F_MASK;
384 struct mbox *mbox = get_mbox(roc_nix);
385 struct nix_bandprof_alloc_req *req;
386 struct nix_bandprof_alloc_rsp *rsp;
387 uint8_t leaf_idx, mid_idx, top_idx;
390 if (roc_model_is_cn9k())
391 return NIX_ERR_HW_NOTSUP;
394 return NIX_ERR_PARAM;
396 leaf_idx = roc_nix_bpf_level_to_idx(mask & ROC_NIX_BPF_LEVEL_F_LEAF);
397 mid_idx = roc_nix_bpf_level_to_idx(mask & ROC_NIX_BPF_LEVEL_F_MID);
398 top_idx = roc_nix_bpf_level_to_idx(mask & ROC_NIX_BPF_LEVEL_F_TOP);
400 if ((leaf_idx != ROC_NIX_BPF_LEVEL_IDX_INVALID) &&
401 (per_lvl_cnt[leaf_idx] > NIX_MAX_BPF_COUNT_LEAF_LAYER))
402 return NIX_ERR_INVALID_RANGE;
404 if ((mid_idx != ROC_NIX_BPF_LEVEL_IDX_INVALID) &&
405 (per_lvl_cnt[mid_idx] > NIX_MAX_BPF_COUNT_MID_LAYER))
406 return NIX_ERR_INVALID_RANGE;
408 if ((top_idx != ROC_NIX_BPF_LEVEL_IDX_INVALID) &&
409 (per_lvl_cnt[top_idx] > NIX_MAX_BPF_COUNT_TOP_LAYER))
410 return NIX_ERR_INVALID_RANGE;
412 req = mbox_alloc_msg_nix_bandprof_alloc(mbox);
416 if (leaf_idx != ROC_NIX_BPF_LEVEL_IDX_INVALID) {
417 req->prof_count[sw_to_hw_lvl_map[leaf_idx]] =
418 per_lvl_cnt[leaf_idx];
421 if (mid_idx != ROC_NIX_BPF_LEVEL_IDX_INVALID) {
422 req->prof_count[sw_to_hw_lvl_map[mid_idx]] =
423 per_lvl_cnt[mid_idx];
426 if (top_idx != ROC_NIX_BPF_LEVEL_IDX_INVALID) {
427 req->prof_count[sw_to_hw_lvl_map[top_idx]] =
428 per_lvl_cnt[top_idx];
431 rc = mbox_process_msg(mbox, (void *)&rsp);
435 if (leaf_idx != ROC_NIX_BPF_LEVEL_IDX_INVALID) {
436 profs[leaf_idx].level = leaf_idx;
437 profs[leaf_idx].count =
438 rsp->prof_count[sw_to_hw_lvl_map[leaf_idx]];
439 for (i = 0; i < profs[leaf_idx].count; i++) {
440 profs[leaf_idx].ids[i] =
441 rsp->prof_idx[sw_to_hw_lvl_map[leaf_idx]][i];
445 if (mid_idx != ROC_NIX_BPF_LEVEL_IDX_INVALID) {
446 profs[mid_idx].level = mid_idx;
447 profs[mid_idx].count =
448 rsp->prof_count[sw_to_hw_lvl_map[mid_idx]];
449 for (i = 0; i < profs[mid_idx].count; i++) {
450 profs[mid_idx].ids[i] =
451 rsp->prof_idx[sw_to_hw_lvl_map[mid_idx]][i];
455 if (top_idx != ROC_NIX_BPF_LEVEL_IDX_INVALID) {
456 profs[top_idx].level = top_idx;
457 profs[top_idx].count =
458 rsp->prof_count[sw_to_hw_lvl_map[top_idx]];
459 for (i = 0; i < profs[top_idx].count; i++) {
460 profs[top_idx].ids[i] =
461 rsp->prof_idx[sw_to_hw_lvl_map[top_idx]][i];
470 roc_nix_bpf_free(struct roc_nix *roc_nix, struct roc_nix_bpf_objs *profs,
473 struct mbox *mbox = get_mbox(roc_nix);
474 struct nix_bandprof_free_req *req;
478 if (num_prof >= NIX_RX_BAND_PROF_LAYER_MAX)
479 return NIX_ERR_INVALID_RANGE;
481 req = mbox_alloc_msg_nix_bandprof_free(mbox);
485 for (i = 0; i < num_prof; i++) {
486 level = sw_to_hw_lvl_map[profs[i].level];
487 req->prof_count[level] = profs[i].count;
488 for (j = 0; j < profs[i].count; j++)
489 req->prof_idx[level][j] = profs[i].ids[j];
492 return mbox_process(mbox);
496 roc_nix_bpf_free_all(struct roc_nix *roc_nix)
498 struct mbox *mbox = get_mbox(roc_nix);
499 struct nix_bandprof_free_req *req;
501 req = mbox_alloc_msg_nix_bandprof_free(mbox);
505 req->free_all = true;
506 return mbox_process(mbox);
510 roc_nix_bpf_config(struct roc_nix *roc_nix, uint16_t id,
511 enum roc_nix_bpf_level_flag lvl_flag,
512 struct roc_nix_bpf_cfg *cfg)
514 uint64_t exponent_p = 0, mantissa_p = 0, div_exp_p = 0;
515 struct mbox *mbox = get_mbox(roc_nix);
516 struct nix_cn10k_aq_enq_req *aq;
519 if (roc_model_is_cn9k())
520 return NIX_ERR_HW_NOTSUP;
523 return NIX_ERR_PARAM;
525 level_idx = roc_nix_bpf_level_to_idx(lvl_flag);
526 if (level_idx == ROC_NIX_BPF_LEVEL_IDX_INVALID)
527 return NIX_ERR_PARAM;
529 aq = mbox_alloc_msg_nix_cn10k_aq_enq(mbox);
532 aq->qidx = (sw_to_hw_lvl_map[level_idx] << 14) | id;
533 aq->ctype = NIX_AQ_CTYPE_BAND_PROF;
534 aq->op = NIX_AQ_INSTOP_WRITE;
536 aq->prof.adjust_exponent = NIX_BPF_DEFAULT_ADJUST_EXPONENT;
537 aq->prof.adjust_mantissa = NIX_BPF_DEFAULT_ADJUST_MANTISSA;
538 if (cfg->lmode == ROC_NIX_BPF_LMODE_BYTE)
539 aq->prof.adjust_mantissa = NIX_BPF_DEFAULT_ADJUST_MANTISSA / 2;
541 aq->prof_mask.adjust_exponent = ~(aq->prof_mask.adjust_exponent);
542 aq->prof_mask.adjust_mantissa = ~(aq->prof_mask.adjust_mantissa);
545 case ROC_NIX_BPF_ALGO_2697:
546 meter_rate_to_nix(cfg->algo2697.cir, &exponent_p, &mantissa_p,
548 aq->prof.cir_mantissa = mantissa_p;
549 aq->prof.cir_exponent = exponent_p;
551 meter_burst_to_nix(cfg->algo2697.cbs, &exponent_p, &mantissa_p);
552 aq->prof.cbs_mantissa = mantissa_p;
553 aq->prof.cbs_exponent = exponent_p;
555 meter_burst_to_nix(cfg->algo2697.ebs, &exponent_p, &mantissa_p);
556 aq->prof.pebs_mantissa = mantissa_p;
557 aq->prof.pebs_exponent = exponent_p;
559 aq->prof_mask.cir_mantissa = ~(aq->prof_mask.cir_mantissa);
560 aq->prof_mask.cbs_mantissa = ~(aq->prof_mask.cbs_mantissa);
561 aq->prof_mask.pebs_mantissa = ~(aq->prof_mask.pebs_mantissa);
562 aq->prof_mask.cir_exponent = ~(aq->prof_mask.cir_exponent);
563 aq->prof_mask.cbs_exponent = ~(aq->prof_mask.cbs_exponent);
564 aq->prof_mask.pebs_exponent = ~(aq->prof_mask.pebs_exponent);
567 case ROC_NIX_BPF_ALGO_2698:
568 meter_rate_to_nix(cfg->algo2698.cir, &exponent_p, &mantissa_p,
570 aq->prof.cir_mantissa = mantissa_p;
571 aq->prof.cir_exponent = exponent_p;
573 meter_rate_to_nix(cfg->algo2698.pir, &exponent_p, &mantissa_p,
575 aq->prof.peir_mantissa = mantissa_p;
576 aq->prof.peir_exponent = exponent_p;
578 meter_burst_to_nix(cfg->algo2698.cbs, &exponent_p, &mantissa_p);
579 aq->prof.cbs_mantissa = mantissa_p;
580 aq->prof.cbs_exponent = exponent_p;
582 meter_burst_to_nix(cfg->algo2698.pbs, &exponent_p, &mantissa_p);
583 aq->prof.pebs_mantissa = mantissa_p;
584 aq->prof.pebs_exponent = exponent_p;
586 aq->prof_mask.cir_mantissa = ~(aq->prof_mask.cir_mantissa);
587 aq->prof_mask.peir_mantissa = ~(aq->prof_mask.peir_mantissa);
588 aq->prof_mask.cbs_mantissa = ~(aq->prof_mask.cbs_mantissa);
589 aq->prof_mask.pebs_mantissa = ~(aq->prof_mask.pebs_mantissa);
590 aq->prof_mask.cir_exponent = ~(aq->prof_mask.cir_exponent);
591 aq->prof_mask.peir_exponent = ~(aq->prof_mask.peir_exponent);
592 aq->prof_mask.cbs_exponent = ~(aq->prof_mask.cbs_exponent);
593 aq->prof_mask.pebs_exponent = ~(aq->prof_mask.pebs_exponent);
596 case ROC_NIX_BPF_ALGO_4115:
597 meter_rate_to_nix(cfg->algo4115.cir, &exponent_p, &mantissa_p,
599 aq->prof.cir_mantissa = mantissa_p;
600 aq->prof.cir_exponent = exponent_p;
602 meter_rate_to_nix(cfg->algo4115.eir, &exponent_p, &mantissa_p,
604 aq->prof.peir_mantissa = mantissa_p;
605 aq->prof.peir_exponent = exponent_p;
607 meter_burst_to_nix(cfg->algo4115.cbs, &exponent_p, &mantissa_p);
608 aq->prof.cbs_mantissa = mantissa_p;
609 aq->prof.cbs_exponent = exponent_p;
611 meter_burst_to_nix(cfg->algo4115.ebs, &exponent_p, &mantissa_p);
612 aq->prof.pebs_mantissa = mantissa_p;
613 aq->prof.pebs_exponent = exponent_p;
615 aq->prof_mask.cir_mantissa = ~(aq->prof_mask.cir_mantissa);
616 aq->prof_mask.peir_mantissa = ~(aq->prof_mask.peir_mantissa);
617 aq->prof_mask.cbs_mantissa = ~(aq->prof_mask.cbs_mantissa);
618 aq->prof_mask.pebs_mantissa = ~(aq->prof_mask.pebs_mantissa);
620 aq->prof_mask.cir_exponent = ~(aq->prof_mask.cir_exponent);
621 aq->prof_mask.peir_exponent = ~(aq->prof_mask.peir_exponent);
622 aq->prof_mask.cbs_exponent = ~(aq->prof_mask.cbs_exponent);
623 aq->prof_mask.pebs_exponent = ~(aq->prof_mask.pebs_exponent);
627 return NIX_ERR_PARAM;
630 aq->prof.lmode = cfg->lmode;
631 aq->prof.icolor = cfg->icolor;
632 aq->prof.pc_mode = cfg->pc_mode;
633 aq->prof.tnl_ena = cfg->tnl_ena;
634 aq->prof.gc_action = cfg->action[ROC_NIX_BPF_COLOR_GREEN];
635 aq->prof.yc_action = cfg->action[ROC_NIX_BPF_COLOR_YELLOW];
636 aq->prof.rc_action = cfg->action[ROC_NIX_BPF_COLOR_RED];
638 aq->prof_mask.lmode = ~(aq->prof_mask.lmode);
639 aq->prof_mask.icolor = ~(aq->prof_mask.icolor);
640 aq->prof_mask.pc_mode = ~(aq->prof_mask.pc_mode);
641 aq->prof_mask.tnl_ena = ~(aq->prof_mask.tnl_ena);
642 aq->prof_mask.gc_action = ~(aq->prof_mask.gc_action);
643 aq->prof_mask.yc_action = ~(aq->prof_mask.yc_action);
644 aq->prof_mask.rc_action = ~(aq->prof_mask.rc_action);
646 return mbox_process(mbox);
650 roc_nix_bpf_ena_dis(struct roc_nix *roc_nix, uint16_t id, struct roc_nix_rq *rq,
653 struct nix *nix = roc_nix_to_nix_priv(roc_nix);
654 struct mbox *mbox = get_mbox(roc_nix);
655 struct nix_cn10k_aq_enq_req *aq;
658 if (roc_model_is_cn9k())
659 return NIX_ERR_HW_NOTSUP;
661 if (rq->qid >= nix->nb_rx_queues)
662 return NIX_ERR_QUEUE_INVALID_RANGE;
664 aq = mbox_alloc_msg_nix_cn10k_aq_enq(mbox);
668 aq->ctype = NIX_AQ_CTYPE_RQ;
669 aq->op = NIX_AQ_INSTOP_WRITE;
671 aq->rq.policer_ena = enable;
672 aq->rq_mask.policer_ena = ~(aq->rq_mask.policer_ena);
674 aq->rq.band_prof_id = id;
675 aq->rq_mask.band_prof_id = ~(aq->rq_mask.band_prof_id);
678 rc = mbox_process(mbox);
689 roc_nix_bpf_dump(struct roc_nix *roc_nix, uint16_t id,
690 enum roc_nix_bpf_level_flag lvl_flag)
692 struct mbox *mbox = get_mbox(roc_nix);
693 struct nix_cn10k_aq_enq_rsp *rsp;
694 struct nix_cn10k_aq_enq_req *aq;
698 if (roc_model_is_cn9k())
699 return NIX_ERR_HW_NOTSUP;
701 level_idx = roc_nix_bpf_level_to_idx(lvl_flag);
702 if (level_idx == ROC_NIX_BPF_LEVEL_IDX_INVALID)
703 return NIX_ERR_PARAM;
705 aq = mbox_alloc_msg_nix_cn10k_aq_enq(mbox);
708 aq->qidx = (sw_to_hw_lvl_map[level_idx] << 14 | id);
709 aq->ctype = NIX_AQ_CTYPE_BAND_PROF;
710 aq->op = NIX_AQ_INSTOP_READ;
711 rc = mbox_process_msg(mbox, (void *)&rsp);
713 plt_dump("============= band prof id =%d ===============", id);
714 nix_lf_bpf_dump(&rsp->prof);
721 roc_nix_bpf_pre_color_tbl_setup(struct roc_nix *roc_nix, uint16_t id,
722 enum roc_nix_bpf_level_flag lvl_flag,
723 struct roc_nix_bpf_precolor *tbl)
725 struct mbox *mbox = get_mbox(roc_nix);
726 struct nix_cn10k_aq_enq_req *aq;
727 uint8_t pc_mode, tn_ena;
731 if (!tbl || !tbl->count)
732 return NIX_ERR_PARAM;
734 if (roc_model_is_cn9k())
735 return NIX_ERR_HW_NOTSUP;
737 level_idx = roc_nix_bpf_level_to_idx(lvl_flag);
738 if (level_idx == ROC_NIX_BPF_LEVEL_IDX_INVALID)
739 return NIX_ERR_PARAM;
742 case ROC_NIX_BPF_PC_MODE_VLAN_INNER:
743 case ROC_NIX_BPF_PC_MODE_VLAN_OUTER:
744 if (tbl->count != NIX_BPF_PRECOLOR_VLAN_TABLE_SIZE) {
745 plt_err("Table size must be %d",
746 NIX_BPF_PRECOLOR_VLAN_TABLE_SIZE);
750 tn_ena = nix_precolor_vlan_table_update(roc_nix, tbl);
751 pc_mode = NIX_RX_BAND_PROF_PC_MODE_VLAN;
753 case ROC_NIX_BPF_PC_MODE_DSCP_INNER:
754 if (tbl->count != NIX_BPF_PRECOLOR_DSCP_TABLE_SIZE) {
755 plt_err("Table size must be %d",
756 NIX_BPF_PRECOLOR_DSCP_TABLE_SIZE);
760 tn_ena = nix_precolor_inner_dscp_table_update(roc_nix, tbl);
761 pc_mode = NIX_RX_BAND_PROF_PC_MODE_DSCP;
763 case ROC_NIX_BPF_PC_MODE_DSCP_OUTER:
764 if (tbl->count != NIX_BPF_PRECOLOR_DSCP_TABLE_SIZE) {
765 plt_err("Table size must be %d",
766 NIX_BPF_PRECOLOR_DSCP_TABLE_SIZE);
770 tn_ena = nix_precolor_outer_dscp_table_update(roc_nix, tbl);
771 pc_mode = NIX_RX_BAND_PROF_PC_MODE_DSCP;
773 case ROC_NIX_BPF_PC_MODE_GEN_INNER:
774 case ROC_NIX_BPF_PC_MODE_GEN_OUTER:
775 if (tbl->count != NIX_BPF_PRECOLOR_GEN_TABLE_SIZE) {
776 plt_err("Table size must be %d",
777 NIX_BPF_PRECOLOR_GEN_TABLE_SIZE);
782 tn_ena = nix_precolor_gen_table_update(roc_nix, tbl);
783 pc_mode = NIX_RX_BAND_PROF_PC_MODE_GEN;
790 /* Update corresponding bandwidth profile too */
791 aq = mbox_alloc_msg_nix_cn10k_aq_enq(mbox);
794 aq->qidx = (sw_to_hw_lvl_map[level_idx] << 14) | id;
795 aq->ctype = NIX_AQ_CTYPE_BAND_PROF;
796 aq->op = NIX_AQ_INSTOP_WRITE;
797 aq->prof.pc_mode = pc_mode;
798 aq->prof.tnl_ena = tn_ena;
799 aq->prof_mask.pc_mode = ~(aq->prof_mask.pc_mode);
800 aq->prof_mask.tnl_ena = ~(aq->prof_mask.tnl_ena);
802 return mbox_process(mbox);
809 roc_nix_bpf_connect(struct roc_nix *roc_nix,
810 enum roc_nix_bpf_level_flag lvl_flag, uint16_t src_id,
813 struct mbox *mbox = get_mbox(roc_nix);
814 struct nix_cn10k_aq_enq_req *aq;
817 if (roc_model_is_cn9k())
818 return NIX_ERR_HW_NOTSUP;
820 level_idx = roc_nix_bpf_level_to_idx(lvl_flag);
821 if (level_idx == ROC_NIX_BPF_LEVEL_IDX_INVALID)
822 return NIX_ERR_PARAM;
824 aq = mbox_alloc_msg_nix_cn10k_aq_enq(mbox);
827 aq->qidx = (sw_to_hw_lvl_map[level_idx] << 14) | src_id;
828 aq->ctype = NIX_AQ_CTYPE_BAND_PROF;
829 aq->op = NIX_AQ_INSTOP_WRITE;
831 if (dst_id == ROC_NIX_BPF_ID_INVALID) {
832 aq->prof.hl_en = false;
833 aq->prof_mask.hl_en = ~(aq->prof_mask.hl_en);
835 aq->prof.hl_en = true;
836 aq->prof.band_prof_id = dst_id;
837 aq->prof_mask.hl_en = ~(aq->prof_mask.hl_en);
838 aq->prof_mask.band_prof_id = ~(aq->prof_mask.band_prof_id);
841 return mbox_process(mbox);
845 roc_nix_bpf_stats_read(struct roc_nix *roc_nix, uint16_t id, uint64_t mask,
846 enum roc_nix_bpf_level_flag lvl_flag,
847 uint64_t stats[ROC_NIX_BPF_STATS_MAX])
849 uint8_t yellow_pkt_pass, yellow_octs_pass, yellow_pkt_drop;
850 uint8_t green_octs_drop, yellow_octs_drop, red_octs_drop;
851 uint8_t green_pkt_pass, green_octs_pass, green_pkt_drop;
852 uint8_t red_pkt_pass, red_octs_pass, red_pkt_drop;
853 struct mbox *mbox = get_mbox(roc_nix);
854 struct nix_cn10k_aq_enq_rsp *rsp;
855 struct nix_cn10k_aq_enq_req *aq;
859 if (roc_model_is_cn9k())
860 return NIX_ERR_HW_NOTSUP;
862 level_idx = roc_nix_bpf_level_to_idx(lvl_flag);
863 if (level_idx == ROC_NIX_BPF_LEVEL_IDX_INVALID)
864 return NIX_ERR_PARAM;
866 aq = mbox_alloc_msg_nix_cn10k_aq_enq(mbox);
869 aq->qidx = (sw_to_hw_lvl_map[level_idx] << 14 | id);
870 aq->ctype = NIX_AQ_CTYPE_BAND_PROF;
871 aq->op = NIX_AQ_INSTOP_READ;
872 rc = mbox_process_msg(mbox, (void *)&rsp);
877 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_GREEN_PKT_F_PASS);
879 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_GREEN_OCTS_F_PASS);
881 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_GREEN_PKT_F_DROP);
883 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_GREEN_OCTS_F_DROP);
885 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_YELLOW_PKT_F_PASS);
887 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_YELLOW_OCTS_F_PASS);
889 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_YELLOW_PKT_F_DROP);
891 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_YELLOW_OCTS_F_DROP);
893 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_RED_PKT_F_PASS);
895 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_RED_OCTS_F_PASS);
897 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_RED_PKT_F_DROP);
899 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_RED_OCTS_F_DROP);
901 if (green_pkt_pass != ROC_NIX_BPF_STATS_MAX)
902 stats[green_pkt_pass] = rsp->prof.green_pkt_pass;
904 if (green_octs_pass != ROC_NIX_BPF_STATS_MAX)
905 stats[green_octs_pass] = rsp->prof.green_octs_pass;
907 if (green_pkt_drop != ROC_NIX_BPF_STATS_MAX)
908 stats[green_pkt_drop] = rsp->prof.green_pkt_drop;
910 if (green_octs_drop != ROC_NIX_BPF_STATS_MAX)
911 stats[green_octs_drop] = rsp->prof.green_octs_pass;
913 if (yellow_pkt_pass != ROC_NIX_BPF_STATS_MAX)
914 stats[yellow_pkt_pass] = rsp->prof.yellow_pkt_pass;
916 if (yellow_octs_pass != ROC_NIX_BPF_STATS_MAX)
917 stats[yellow_octs_pass] = rsp->prof.yellow_octs_pass;
919 if (yellow_pkt_drop != ROC_NIX_BPF_STATS_MAX)
920 stats[yellow_pkt_drop] = rsp->prof.yellow_pkt_drop;
922 if (yellow_octs_drop != ROC_NIX_BPF_STATS_MAX)
923 stats[yellow_octs_drop] = rsp->prof.yellow_octs_drop;
925 if (red_pkt_pass != ROC_NIX_BPF_STATS_MAX)
926 stats[red_pkt_pass] = rsp->prof.red_pkt_pass;
928 if (red_octs_pass != ROC_NIX_BPF_STATS_MAX)
929 stats[red_octs_pass] = rsp->prof.red_octs_pass;
931 if (red_pkt_drop != ROC_NIX_BPF_STATS_MAX)
932 stats[red_pkt_drop] = rsp->prof.red_pkt_drop;
934 if (red_octs_drop != ROC_NIX_BPF_STATS_MAX)
935 stats[red_octs_drop] = rsp->prof.red_octs_drop;
941 roc_nix_bpf_stats_reset(struct roc_nix *roc_nix, uint16_t id, uint64_t mask,
942 enum roc_nix_bpf_level_flag lvl_flag)
944 struct mbox *mbox = get_mbox(roc_nix);
945 struct nix_cn10k_aq_enq_req *aq;
948 if (roc_model_is_cn9k())
949 return NIX_ERR_HW_NOTSUP;
951 level_idx = roc_nix_bpf_level_to_idx(lvl_flag);
952 if (level_idx == ROC_NIX_BPF_LEVEL_IDX_INVALID)
953 return NIX_ERR_PARAM;
955 aq = mbox_alloc_msg_nix_cn10k_aq_enq(mbox);
958 aq->qidx = (sw_to_hw_lvl_map[level_idx] << 14 | id);
959 aq->ctype = NIX_AQ_CTYPE_BAND_PROF;
960 aq->op = NIX_AQ_INSTOP_WRITE;
962 if (mask & ROC_NIX_BPF_GREEN_PKT_F_PASS) {
963 aq->prof.green_pkt_pass = 0;
964 aq->prof_mask.green_pkt_pass = ~(aq->prof_mask.green_pkt_pass);
966 if (mask & ROC_NIX_BPF_GREEN_OCTS_F_PASS) {
967 aq->prof.green_octs_pass = 0;
968 aq->prof_mask.green_octs_pass =
969 ~(aq->prof_mask.green_octs_pass);
971 if (mask & ROC_NIX_BPF_GREEN_PKT_F_DROP) {
972 aq->prof.green_pkt_drop = 0;
973 aq->prof_mask.green_pkt_drop = ~(aq->prof_mask.green_pkt_drop);
975 if (mask & ROC_NIX_BPF_GREEN_OCTS_F_DROP) {
976 aq->prof.green_octs_drop = 0;
977 aq->prof_mask.green_octs_drop =
978 ~(aq->prof_mask.green_octs_drop);
980 if (mask & ROC_NIX_BPF_YELLOW_PKT_F_PASS) {
981 aq->prof.yellow_pkt_pass = 0;
982 aq->prof_mask.yellow_pkt_pass =
983 ~(aq->prof_mask.yellow_pkt_pass);
985 if (mask & ROC_NIX_BPF_YELLOW_OCTS_F_PASS) {
986 aq->prof.yellow_octs_pass = 0;
987 aq->prof_mask.yellow_octs_pass =
988 ~(aq->prof_mask.yellow_octs_pass);
990 if (mask & ROC_NIX_BPF_YELLOW_PKT_F_DROP) {
991 aq->prof.yellow_pkt_drop = 0;
992 aq->prof_mask.yellow_pkt_drop =
993 ~(aq->prof_mask.yellow_pkt_drop);
995 if (mask & ROC_NIX_BPF_YELLOW_OCTS_F_DROP) {
996 aq->prof.yellow_octs_drop = 0;
997 aq->prof_mask.yellow_octs_drop =
998 ~(aq->prof_mask.yellow_octs_drop);
1000 if (mask & ROC_NIX_BPF_RED_PKT_F_PASS) {
1001 aq->prof.red_pkt_pass = 0;
1002 aq->prof_mask.red_pkt_pass = ~(aq->prof_mask.red_pkt_pass);
1004 if (mask & ROC_NIX_BPF_RED_OCTS_F_PASS) {
1005 aq->prof.red_octs_pass = 0;
1006 aq->prof_mask.red_octs_pass = ~(aq->prof_mask.red_octs_pass);
1008 if (mask & ROC_NIX_BPF_RED_PKT_F_DROP) {
1009 aq->prof.red_pkt_drop = 0;
1010 aq->prof_mask.red_pkt_drop = ~(aq->prof_mask.red_pkt_drop);
1012 if (mask & ROC_NIX_BPF_RED_OCTS_F_DROP) {
1013 aq->prof.red_octs_drop = 0;
1014 aq->prof_mask.red_octs_drop = ~(aq->prof_mask.red_octs_drop);
1017 return mbox_process(mbox);
1021 roc_nix_bpf_lf_stats_read(struct roc_nix *roc_nix, uint64_t mask,
1022 uint64_t stats[ROC_NIX_BPF_STATS_MAX])
1024 uint8_t yellow_pkt_pass, yellow_octs_pass, yellow_pkt_drop;
1025 uint8_t green_octs_drop, yellow_octs_drop, red_octs_drop;
1026 uint8_t green_pkt_pass, green_octs_pass, green_pkt_drop;
1027 uint8_t red_pkt_pass, red_octs_pass, red_pkt_drop;
1028 struct nix *nix = roc_nix_to_nix_priv(roc_nix);
1031 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_GREEN_PKT_F_PASS);
1033 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_GREEN_OCTS_F_PASS);
1035 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_GREEN_PKT_F_DROP);
1037 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_GREEN_OCTS_F_DROP);
1039 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_YELLOW_PKT_F_PASS);
1041 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_YELLOW_OCTS_F_PASS);
1043 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_YELLOW_PKT_F_DROP);
1045 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_YELLOW_OCTS_F_DROP);
1047 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_RED_PKT_F_PASS);
1049 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_RED_OCTS_F_PASS);
1051 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_RED_PKT_F_DROP);
1053 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_RED_OCTS_F_DROP);
1055 if (green_pkt_pass != ROC_NIX_BPF_STATS_MAX) {
1056 stats[green_pkt_pass] =
1057 NIX_RD_STATS(NIX_STAT_LF_RX_RX_GC_OCTS_PASSED);
1060 if (green_octs_pass != ROC_NIX_BPF_STATS_MAX) {
1061 stats[green_octs_pass] =
1062 NIX_RD_STATS(NIX_STAT_LF_RX_RX_YC_PKTS_PASSED);
1065 if (green_pkt_drop != ROC_NIX_BPF_STATS_MAX) {
1066 stats[green_pkt_drop] =
1067 NIX_RD_STATS(NIX_STAT_LF_RX_RX_GC_OCTS_DROP);
1070 if (green_octs_drop != ROC_NIX_BPF_STATS_MAX) {
1071 stats[green_octs_drop] =
1072 NIX_RD_STATS(NIX_STAT_LF_RX_RX_YC_PKTS_DROP);
1075 if (yellow_pkt_pass != ROC_NIX_BPF_STATS_MAX) {
1076 stats[yellow_pkt_pass] =
1077 NIX_RD_STATS(NIX_STAT_LF_RX_RX_GC_PKTS_PASSED);
1080 if (yellow_octs_pass != ROC_NIX_BPF_STATS_MAX) {
1081 stats[yellow_octs_pass] =
1082 NIX_RD_STATS(NIX_STAT_LF_RX_RX_RC_OCTS_PASSED);
1085 if (yellow_pkt_drop != ROC_NIX_BPF_STATS_MAX) {
1086 stats[yellow_pkt_drop] =
1087 NIX_RD_STATS(NIX_STAT_LF_RX_RX_GC_PKTS_DROP);
1090 if (yellow_octs_drop != ROC_NIX_BPF_STATS_MAX) {
1091 stats[yellow_octs_drop] =
1092 NIX_RD_STATS(NIX_STAT_LF_RX_RX_RC_OCTS_DROP);
1095 if (red_pkt_pass != ROC_NIX_BPF_STATS_MAX) {
1096 stats[red_pkt_pass] =
1097 NIX_RD_STATS(NIX_STAT_LF_RX_RX_YC_OCTS_PASSED);
1100 if (red_octs_pass != ROC_NIX_BPF_STATS_MAX) {
1101 stats[red_octs_pass] =
1102 NIX_RD_STATS(NIX_STAT_LF_RX_RX_RC_PKTS_PASSED);
1105 if (red_pkt_drop != ROC_NIX_BPF_STATS_MAX) {
1106 stats[red_pkt_drop] =
1107 NIX_RD_STATS(NIX_STAT_LF_RX_RX_YC_OCTS_DROP);
1110 if (red_octs_drop != ROC_NIX_BPF_STATS_MAX) {
1111 stats[red_octs_drop] =
1112 NIX_RD_STATS(NIX_STAT_LF_RX_RX_RC_PKTS_DROP);
1119 roc_nix_bpf_lf_stats_reset(struct roc_nix *roc_nix, uint64_t mask)
1121 struct nix *nix = roc_nix_to_nix_priv(roc_nix);
1123 if (mask & ROC_NIX_BPF_GREEN_PKT_F_PASS)
1124 NIX_RST_STATS(ROC_NIX_BPF_GREEN_PKT_F_PASS);
1125 if (mask & ROC_NIX_BPF_GREEN_OCTS_F_PASS)
1126 NIX_RST_STATS(ROC_NIX_BPF_GREEN_OCTS_F_PASS);
1127 if (mask & ROC_NIX_BPF_GREEN_PKT_F_DROP)
1128 NIX_RST_STATS(ROC_NIX_BPF_GREEN_PKT_F_DROP);
1129 if (mask & ROC_NIX_BPF_GREEN_OCTS_F_DROP)
1130 NIX_RST_STATS(ROC_NIX_BPF_GREEN_OCTS_F_DROP);
1131 if (mask & ROC_NIX_BPF_YELLOW_PKT_F_PASS)
1132 NIX_RST_STATS(ROC_NIX_BPF_YELLOW_PKT_F_PASS);
1133 if (mask & ROC_NIX_BPF_YELLOW_OCTS_F_PASS)
1134 NIX_RST_STATS(ROC_NIX_BPF_YELLOW_OCTS_F_PASS);
1135 if (mask & ROC_NIX_BPF_YELLOW_PKT_F_DROP)
1136 NIX_RST_STATS(ROC_NIX_BPF_YELLOW_PKT_F_DROP);
1137 if (mask & ROC_NIX_BPF_YELLOW_OCTS_F_DROP)
1138 NIX_RST_STATS(ROC_NIX_BPF_YELLOW_OCTS_F_DROP);
1139 if (mask & ROC_NIX_BPF_RED_PKT_F_PASS)
1140 NIX_RST_STATS(ROC_NIX_BPF_RED_PKT_F_PASS);
1141 if (mask & ROC_NIX_BPF_RED_OCTS_F_PASS)
1142 NIX_RST_STATS(ROC_NIX_BPF_RED_OCTS_F_PASS);
1143 if (mask & ROC_NIX_BPF_RED_PKT_F_DROP)
1144 NIX_RST_STATS(ROC_NIX_BPF_RED_PKT_F_DROP);
1145 if (mask & ROC_NIX_BPF_RED_OCTS_F_DROP)
1146 NIX_RST_STATS(ROC_NIX_BPF_RED_OCTS_F_DROP);