1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(C) 2021 Marvell.
8 #define NIX_MAX_BPF_COUNT_LEAF_LAYER 64
9 #define NIX_MAX_BPF_COUNT_MID_LAYER 8
10 #define NIX_MAX_BPF_COUNT_TOP_LAYER 1
12 #define NIX_BPF_PRECOLOR_GEN_TABLE_SIZE 16
13 #define NIX_BPF_PRECOLOR_VLAN_TABLE_SIZE 16
14 #define NIX_BPF_PRECOLOR_DSCP_TABLE_SIZE 64
16 #define NIX_BPF_LEVEL_F_MASK \
17 (ROC_NIX_BPF_LEVEL_F_LEAF | ROC_NIX_BPF_LEVEL_F_MID | \
18 ROC_NIX_BPF_LEVEL_F_TOP)
20 #define NIX_RD_STATS(val) plt_read64(nix->base + NIX_LF_RX_STATX(val))
21 #define NIX_RST_STATS(val) plt_write64(0, nix->base + NIX_LF_RX_STATX(val))
23 static uint8_t sw_to_hw_lvl_map[] = {NIX_RX_BAND_PROF_LAYER_LEAF,
24 NIX_RX_BAND_PROF_LAYER_MIDDLE,
25 NIX_RX_BAND_PROF_LAYER_TOP};
27 static inline struct mbox *
28 get_mbox(struct roc_nix *roc_nix)
30 struct nix *nix = roc_nix_to_nix_priv(roc_nix);
31 struct dev *dev = &nix->dev;
36 static inline uint64_t
37 meter_rate_to_nix(uint64_t value, uint64_t *exponent_p, uint64_t *mantissa_p,
38 uint64_t *div_exp_p, uint32_t timeunit_p)
40 uint64_t div_exp, exponent, mantissa;
41 uint32_t time_us = timeunit_p;
44 if (value < NIX_BPF_RATE_MIN || value > NIX_BPF_RATE_MAX)
47 if (value <= NIX_BPF_RATE(time_us, 0, 0, 0)) {
48 /* Calculate rate div_exp and mantissa using
49 * the following formula:
51 * value = (2E6 * (256 + mantissa)
52 * / ((1 << div_exp) * 256))
56 mantissa = NIX_BPF_MAX_RATE_MANTISSA;
58 while (value < (NIX_BPF_RATE_CONST / (1 << div_exp)))
61 while (value < ((NIX_BPF_RATE_CONST * (256 + mantissa)) /
62 ((1 << div_exp) * 256)))
65 /* Calculate rate exponent and mantissa using
66 * the following formula:
68 * value = (2E6 * ((256 + mantissa) << exponent)) / 256
72 exponent = NIX_BPF_MAX_RATE_EXPONENT;
73 mantissa = NIX_BPF_MAX_RATE_MANTISSA;
75 while (value < (NIX_BPF_RATE_CONST * (1 << exponent)))
79 ((NIX_BPF_RATE_CONST * ((256 + mantissa) << exponent)) /
84 if (div_exp > NIX_BPF_MAX_RATE_DIV_EXP ||
85 exponent > NIX_BPF_MAX_RATE_EXPONENT ||
86 mantissa > NIX_BPF_MAX_RATE_MANTISSA)
92 *exponent_p = exponent;
94 *mantissa_p = mantissa;
96 /* Calculate real rate value */
97 return NIX_BPF_RATE(time_us, exponent, mantissa, div_exp);
100 static inline uint64_t
101 meter_burst_to_nix(uint64_t value, uint64_t *exponent_p, uint64_t *mantissa_p)
103 uint64_t exponent, mantissa;
105 if (value < NIX_BPF_BURST_MIN || value > NIX_BPF_BURST_MAX)
108 /* Calculate burst exponent and mantissa using
109 * the following formula:
111 * value = (((256 + mantissa) << (exponent + 1)
115 exponent = NIX_BPF_MAX_BURST_EXPONENT;
116 mantissa = NIX_BPF_MAX_BURST_MANTISSA;
118 while (value < (1ull << (exponent + 1)))
121 while (value < ((256 + mantissa) << (exponent + 1)) / 256)
124 if (exponent > NIX_BPF_MAX_BURST_EXPONENT ||
125 mantissa > NIX_BPF_MAX_BURST_MANTISSA)
129 *exponent_p = exponent;
131 *mantissa_p = mantissa;
133 return NIX_BPF_BURST(exponent, mantissa);
137 nix_lf_bpf_dump(__io struct nix_band_prof_s *bpf)
139 plt_dump("W0: cir_mantissa \t\t\t%d\nW0: pebs_mantissa \t\t\t0x%03x",
140 bpf->cir_mantissa, bpf->pebs_mantissa);
141 plt_dump("W0: peir_mantissa \t\t\t\t%d\nW0: cbs_exponent \t\t\t%d",
142 bpf->peir_mantissa, bpf->cbs_exponent);
143 plt_dump("W0: cir_exponent \t\t\t%d\nW0: pebs_exponent \t\t\t%d",
144 bpf->cir_exponent, bpf->pebs_exponent);
145 plt_dump("W0: peir_exponent \t\t\t%d\n", bpf->peir_exponent);
146 plt_dump("W0: tnl_ena \t\t\t%d\n", bpf->tnl_ena);
147 plt_dump("W0: icolor \t\t\t%d\n", bpf->icolor);
148 plt_dump("W0: pc_mode \t\t\t%d\n", bpf->pc_mode);
149 plt_dump("W1: hl_en \t\t%d\nW1: band_prof_id \t\t%d", bpf->hl_en,
151 plt_dump("W1: meter_algo \t\t%d\nW1: rc_action \t\t%d", bpf->meter_algo,
153 plt_dump("W1: yc_action \t\t\t%d\nW1: gc_action \t\t\t%d",
154 bpf->yc_action, bpf->gc_action);
155 plt_dump("W1: adjust_mantissa\t\t\t%d\nW1: adjust_exponent \t\t\t%d",
156 bpf->adjust_mantissa, bpf->adjust_exponent);
157 plt_dump("W1: rdiv \t\t\t%d\n", bpf->rdiv);
158 plt_dump("W1: l_select \t\t%d\nW2: lmode \t\t%d", bpf->l_sellect,
160 plt_dump("W1: cbs_mantissa \t\t\t%d\n", bpf->cbs_mantissa);
161 plt_dump("W2: tsa \t\t\t0x%" PRIx64 "\n", (uint64_t)bpf->ts);
162 plt_dump("W3: c_accum \t\t%d\nW3: pe_accum \t\t%d", bpf->c_accum,
164 plt_dump("W4: green_pkt_pass \t\t\t0x%" PRIx64 "",
165 (uint64_t)bpf->green_pkt_pass);
166 plt_dump("W5: yellow_pkt_pass \t\t\t0x%" PRIx64 "",
167 (uint64_t)bpf->yellow_pkt_pass);
168 plt_dump("W6: red_pkt_pass \t\t\t0x%" PRIx64 "",
169 (uint64_t)bpf->red_pkt_pass);
170 plt_dump("W7: green_octs_pass \t\t\t0x%" PRIx64 "",
171 (uint64_t)bpf->green_octs_pass);
172 plt_dump("W8: yellow_octs_pass \t\t\t0x%" PRIx64 "",
173 (uint64_t)bpf->yellow_octs_pass);
174 plt_dump("W9: red_octs_pass \t\t\t0x%" PRIx64 "",
175 (uint64_t)bpf->red_octs_pass);
176 plt_dump("W10: green_pkt_drop \t\t\t0x%" PRIx64 "",
177 (uint64_t)bpf->green_pkt_drop);
178 plt_dump("W11: yellow_pkt_drop \t\t\t0x%" PRIx64 "",
179 (uint64_t)bpf->yellow_pkt_drop);
180 plt_dump("W12: red_pkt_drop \t\t\t0x%" PRIx64 "",
181 (uint64_t)bpf->red_pkt_drop);
182 plt_dump("W13: green_octs_drop \t\t\t0x%" PRIx64 "",
183 (uint64_t)bpf->green_octs_drop);
184 plt_dump("W14: yellow_octs_drop \t\t\t0x%" PRIx64 "",
185 (uint64_t)bpf->yellow_octs_drop);
186 plt_dump("W15: red_octs_drop \t\t\t0x%" PRIx64 "",
187 (uint64_t)bpf->red_octs_drop);
191 nix_precolor_conv_table_write(struct roc_nix *roc_nix, uint64_t val,
194 struct nix *nix = roc_nix_to_nix_priv(roc_nix);
197 addr = PLT_PTR_ADD(nix->base, off);
198 /* FIXME: Currently writing to this register throwing kernel dump.
199 * plt_write64(val, addr);
206 nix_precolor_vlan_table_update(struct roc_nix *roc_nix,
207 struct roc_nix_bpf_precolor *tbl)
213 for (i = 0; i < tbl->count; i++)
214 val |= (((uint64_t)tbl->color[i]) << (2 * i));
216 if (tbl->mode == ROC_NIX_BPF_PC_MODE_VLAN_INNER) {
217 off = NIX_LF_RX_VLAN1_COLOR_CONV;
220 off = NIX_LF_RX_VLAN0_COLOR_CONV;
224 nix_precolor_conv_table_write(roc_nix, val, off);
229 nix_precolor_inner_dscp_table_update(struct roc_nix *roc_nix,
230 struct roc_nix_bpf_precolor *tbl)
232 uint64_t val_lo = 0, val_hi = 0, i, j;
234 for (i = 0, j = 0; i < (tbl->count / 2); i++, j++)
235 val_lo |= (((uint64_t)tbl->color[i]) << (2 * j));
237 for (j = 0; i < tbl->count; i++, j++)
238 val_hi |= (((uint64_t)tbl->color[i]) << (2 * j));
240 nix_precolor_conv_table_write(roc_nix, val_lo,
241 NIX_LF_RX_IIP_COLOR_CONV_LO);
242 nix_precolor_conv_table_write(roc_nix, val_hi,
243 NIX_LF_RX_IIP_COLOR_CONV_HI);
249 nix_precolor_outer_dscp_table_update(struct roc_nix *roc_nix,
250 struct roc_nix_bpf_precolor *tbl)
252 uint64_t val_lo = 0, val_hi = 0, i, j;
254 for (i = 0, j = 0; i < (tbl->count / 2); i++, j++)
255 val_lo |= (((uint64_t)tbl->color[i]) << (2 * j));
257 for (j = 0; i < tbl->count; i++, j++)
258 val_hi |= (((uint64_t)tbl->color[i]) << (2 * j));
260 nix_precolor_conv_table_write(roc_nix, val_lo,
261 NIX_LF_RX_OIP_COLOR_CONV_LO);
262 nix_precolor_conv_table_write(roc_nix, val_hi,
263 NIX_LF_RX_OIP_COLOR_CONV_HI);
269 nix_precolor_gen_table_update(struct roc_nix *roc_nix,
270 struct roc_nix_bpf_precolor *tbl)
276 for (i = 0; i < tbl->count; i++)
277 val |= (((uint64_t)tbl->color[i]) << (2 * i));
279 if (tbl->mode == ROC_NIX_BPF_PC_MODE_GEN_INNER) {
280 off = NIX_LF_RX_GEN_COLOR_CONVX(1);
283 off = NIX_LF_RX_GEN_COLOR_CONVX(0);
287 nix_precolor_conv_table_write(roc_nix, val, off);
292 roc_nix_bpf_level_to_idx(enum roc_nix_bpf_level_flag level_f)
296 if (level_f & ROC_NIX_BPF_LEVEL_F_LEAF)
298 else if (level_f & ROC_NIX_BPF_LEVEL_F_MID)
300 else if (level_f & ROC_NIX_BPF_LEVEL_F_TOP)
303 idx = ROC_NIX_BPF_LEVEL_IDX_INVALID;
308 roc_nix_bpf_stats_to_idx(enum roc_nix_bpf_stats level_f)
312 if (level_f & ROC_NIX_BPF_GREEN_PKT_F_PASS)
314 else if (level_f & ROC_NIX_BPF_GREEN_OCTS_F_PASS)
316 else if (level_f & ROC_NIX_BPF_GREEN_PKT_F_DROP)
318 else if (level_f & ROC_NIX_BPF_GREEN_OCTS_F_DROP)
320 else if (level_f & ROC_NIX_BPF_YELLOW_PKT_F_PASS)
322 else if (level_f & ROC_NIX_BPF_YELLOW_OCTS_F_PASS)
324 else if (level_f & ROC_NIX_BPF_YELLOW_PKT_F_DROP)
326 else if (level_f & ROC_NIX_BPF_YELLOW_OCTS_F_DROP)
328 else if (level_f & ROC_NIX_BPF_RED_PKT_F_PASS)
330 else if (level_f & ROC_NIX_BPF_RED_OCTS_F_PASS)
332 else if (level_f & ROC_NIX_BPF_RED_PKT_F_DROP)
334 else if (level_f & ROC_NIX_BPF_RED_OCTS_F_DROP)
337 idx = ROC_NIX_BPF_STATS_MAX;
342 roc_nix_bpf_timeunit_get(struct roc_nix *roc_nix, uint32_t *time_unit)
344 struct nix_bandprof_get_hwinfo_rsp *rsp;
345 struct mbox *mbox = get_mbox(roc_nix);
349 if (roc_model_is_cn9k())
350 return NIX_ERR_HW_NOTSUP;
352 req = mbox_alloc_msg_nix_bandprof_get_hwinfo(mbox);
356 rc = mbox_process_msg(mbox, (void *)&rsp);
360 *time_unit = rsp->policer_timeunit;
367 roc_nix_bpf_count_get(struct roc_nix *roc_nix, uint8_t lvl_mask,
368 uint16_t count[ROC_NIX_BPF_LEVEL_MAX])
370 uint8_t mask = lvl_mask & NIX_BPF_LEVEL_F_MASK;
371 struct nix_bandprof_get_hwinfo_rsp *rsp;
372 struct mbox *mbox = get_mbox(roc_nix);
373 uint8_t leaf_idx, mid_idx, top_idx;
377 if (roc_model_is_cn9k())
378 return NIX_ERR_HW_NOTSUP;
381 return NIX_ERR_PARAM;
383 req = mbox_alloc_msg_nix_bandprof_get_hwinfo(mbox);
387 rc = mbox_process_msg(mbox, (void *)&rsp);
391 leaf_idx = roc_nix_bpf_level_to_idx(mask & ROC_NIX_BPF_LEVEL_F_LEAF);
392 mid_idx = roc_nix_bpf_level_to_idx(mask & ROC_NIX_BPF_LEVEL_F_MID);
393 top_idx = roc_nix_bpf_level_to_idx(mask & ROC_NIX_BPF_LEVEL_F_TOP);
395 if (leaf_idx != ROC_NIX_BPF_LEVEL_IDX_INVALID)
396 count[leaf_idx] = rsp->prof_count[sw_to_hw_lvl_map[leaf_idx]];
398 if (mid_idx != ROC_NIX_BPF_LEVEL_IDX_INVALID)
399 count[mid_idx] = rsp->prof_count[sw_to_hw_lvl_map[mid_idx]];
401 if (top_idx != ROC_NIX_BPF_LEVEL_IDX_INVALID)
402 count[top_idx] = rsp->prof_count[sw_to_hw_lvl_map[top_idx]];
409 roc_nix_bpf_alloc(struct roc_nix *roc_nix, uint8_t lvl_mask,
410 uint16_t per_lvl_cnt[ROC_NIX_BPF_LEVEL_MAX],
411 struct roc_nix_bpf_objs *profs)
413 uint8_t mask = lvl_mask & NIX_BPF_LEVEL_F_MASK;
414 struct mbox *mbox = get_mbox(roc_nix);
415 struct nix_bandprof_alloc_req *req;
416 struct nix_bandprof_alloc_rsp *rsp;
417 uint8_t leaf_idx, mid_idx, top_idx;
420 if (roc_model_is_cn9k())
421 return NIX_ERR_HW_NOTSUP;
424 return NIX_ERR_PARAM;
426 leaf_idx = roc_nix_bpf_level_to_idx(mask & ROC_NIX_BPF_LEVEL_F_LEAF);
427 mid_idx = roc_nix_bpf_level_to_idx(mask & ROC_NIX_BPF_LEVEL_F_MID);
428 top_idx = roc_nix_bpf_level_to_idx(mask & ROC_NIX_BPF_LEVEL_F_TOP);
430 if ((leaf_idx != ROC_NIX_BPF_LEVEL_IDX_INVALID) &&
431 (per_lvl_cnt[leaf_idx] > NIX_MAX_BPF_COUNT_LEAF_LAYER))
432 return NIX_ERR_INVALID_RANGE;
434 if ((mid_idx != ROC_NIX_BPF_LEVEL_IDX_INVALID) &&
435 (per_lvl_cnt[mid_idx] > NIX_MAX_BPF_COUNT_MID_LAYER))
436 return NIX_ERR_INVALID_RANGE;
438 if ((top_idx != ROC_NIX_BPF_LEVEL_IDX_INVALID) &&
439 (per_lvl_cnt[top_idx] > NIX_MAX_BPF_COUNT_TOP_LAYER))
440 return NIX_ERR_INVALID_RANGE;
442 req = mbox_alloc_msg_nix_bandprof_alloc(mbox);
446 if (leaf_idx != ROC_NIX_BPF_LEVEL_IDX_INVALID) {
447 req->prof_count[sw_to_hw_lvl_map[leaf_idx]] =
448 per_lvl_cnt[leaf_idx];
451 if (mid_idx != ROC_NIX_BPF_LEVEL_IDX_INVALID) {
452 req->prof_count[sw_to_hw_lvl_map[mid_idx]] =
453 per_lvl_cnt[mid_idx];
456 if (top_idx != ROC_NIX_BPF_LEVEL_IDX_INVALID) {
457 req->prof_count[sw_to_hw_lvl_map[top_idx]] =
458 per_lvl_cnt[top_idx];
461 rc = mbox_process_msg(mbox, (void *)&rsp);
465 if (leaf_idx != ROC_NIX_BPF_LEVEL_IDX_INVALID) {
466 profs[leaf_idx].level = leaf_idx;
467 profs[leaf_idx].count =
468 rsp->prof_count[sw_to_hw_lvl_map[leaf_idx]];
469 for (i = 0; i < profs[leaf_idx].count; i++) {
470 profs[leaf_idx].ids[i] =
471 rsp->prof_idx[sw_to_hw_lvl_map[leaf_idx]][i];
475 if (mid_idx != ROC_NIX_BPF_LEVEL_IDX_INVALID) {
476 profs[mid_idx].level = mid_idx;
477 profs[mid_idx].count =
478 rsp->prof_count[sw_to_hw_lvl_map[mid_idx]];
479 for (i = 0; i < profs[mid_idx].count; i++) {
480 profs[mid_idx].ids[i] =
481 rsp->prof_idx[sw_to_hw_lvl_map[mid_idx]][i];
485 if (top_idx != ROC_NIX_BPF_LEVEL_IDX_INVALID) {
486 profs[top_idx].level = top_idx;
487 profs[top_idx].count =
488 rsp->prof_count[sw_to_hw_lvl_map[top_idx]];
489 for (i = 0; i < profs[top_idx].count; i++) {
490 profs[top_idx].ids[i] =
491 rsp->prof_idx[sw_to_hw_lvl_map[top_idx]][i];
500 roc_nix_bpf_free(struct roc_nix *roc_nix, struct roc_nix_bpf_objs *profs,
503 struct mbox *mbox = get_mbox(roc_nix);
504 struct nix_bandprof_free_req *req;
508 if (num_prof >= NIX_RX_BAND_PROF_LAYER_MAX)
509 return NIX_ERR_INVALID_RANGE;
511 req = mbox_alloc_msg_nix_bandprof_free(mbox);
515 for (i = 0; i < num_prof; i++) {
516 level = sw_to_hw_lvl_map[profs[i].level];
517 req->prof_count[level] = profs[i].count;
518 for (j = 0; j < profs[i].count; j++)
519 req->prof_idx[level][j] = profs[i].ids[j];
522 return mbox_process(mbox);
526 roc_nix_bpf_free_all(struct roc_nix *roc_nix)
528 struct mbox *mbox = get_mbox(roc_nix);
529 struct nix_bandprof_free_req *req;
531 req = mbox_alloc_msg_nix_bandprof_free(mbox);
535 req->free_all = true;
536 return mbox_process(mbox);
540 roc_nix_bpf_config(struct roc_nix *roc_nix, uint16_t id,
541 enum roc_nix_bpf_level_flag lvl_flag,
542 struct roc_nix_bpf_cfg *cfg)
544 uint64_t exponent_p = 0, mantissa_p = 0, div_exp_p = 0;
545 struct mbox *mbox = get_mbox(roc_nix);
546 struct nix_cn10k_aq_enq_req *aq;
547 uint32_t policer_timeunit;
551 if (roc_model_is_cn9k())
552 return NIX_ERR_HW_NOTSUP;
555 return NIX_ERR_PARAM;
557 rc = roc_nix_bpf_timeunit_get(roc_nix, &policer_timeunit);
561 level_idx = roc_nix_bpf_level_to_idx(lvl_flag);
562 if (level_idx == ROC_NIX_BPF_LEVEL_IDX_INVALID)
563 return NIX_ERR_PARAM;
565 aq = mbox_alloc_msg_nix_cn10k_aq_enq(mbox);
568 aq->qidx = (sw_to_hw_lvl_map[level_idx] << 14) | id;
569 aq->ctype = NIX_AQ_CTYPE_BAND_PROF;
570 aq->op = NIX_AQ_INSTOP_WRITE;
572 aq->prof.adjust_exponent = NIX_BPF_DEFAULT_ADJUST_EXPONENT;
573 aq->prof.adjust_mantissa = NIX_BPF_DEFAULT_ADJUST_MANTISSA;
574 if (cfg->lmode == ROC_NIX_BPF_LMODE_BYTE)
575 aq->prof.adjust_mantissa = NIX_BPF_DEFAULT_ADJUST_MANTISSA / 2;
577 aq->prof_mask.adjust_exponent = ~(aq->prof_mask.adjust_exponent);
578 aq->prof_mask.adjust_mantissa = ~(aq->prof_mask.adjust_mantissa);
581 case ROC_NIX_BPF_ALGO_2697:
582 meter_rate_to_nix(cfg->algo2697.cir, &exponent_p, &mantissa_p,
583 &div_exp_p, policer_timeunit);
584 aq->prof.cir_mantissa = mantissa_p;
585 aq->prof.cir_exponent = exponent_p;
587 meter_burst_to_nix(cfg->algo2697.cbs, &exponent_p, &mantissa_p);
588 aq->prof.cbs_mantissa = mantissa_p;
589 aq->prof.cbs_exponent = exponent_p;
591 meter_burst_to_nix(cfg->algo2697.ebs, &exponent_p, &mantissa_p);
592 aq->prof.pebs_mantissa = mantissa_p;
593 aq->prof.pebs_exponent = exponent_p;
595 aq->prof_mask.cir_mantissa = ~(aq->prof_mask.cir_mantissa);
596 aq->prof_mask.cbs_mantissa = ~(aq->prof_mask.cbs_mantissa);
597 aq->prof_mask.pebs_mantissa = ~(aq->prof_mask.pebs_mantissa);
598 aq->prof_mask.cir_exponent = ~(aq->prof_mask.cir_exponent);
599 aq->prof_mask.cbs_exponent = ~(aq->prof_mask.cbs_exponent);
600 aq->prof_mask.pebs_exponent = ~(aq->prof_mask.pebs_exponent);
603 case ROC_NIX_BPF_ALGO_2698:
604 meter_rate_to_nix(cfg->algo2698.cir, &exponent_p, &mantissa_p,
605 &div_exp_p, policer_timeunit);
606 aq->prof.cir_mantissa = mantissa_p;
607 aq->prof.cir_exponent = exponent_p;
609 meter_rate_to_nix(cfg->algo2698.pir, &exponent_p, &mantissa_p,
610 &div_exp_p, policer_timeunit);
611 aq->prof.peir_mantissa = mantissa_p;
612 aq->prof.peir_exponent = exponent_p;
614 meter_burst_to_nix(cfg->algo2698.cbs, &exponent_p, &mantissa_p);
615 aq->prof.cbs_mantissa = mantissa_p;
616 aq->prof.cbs_exponent = exponent_p;
618 meter_burst_to_nix(cfg->algo2698.pbs, &exponent_p, &mantissa_p);
619 aq->prof.pebs_mantissa = mantissa_p;
620 aq->prof.pebs_exponent = exponent_p;
622 aq->prof_mask.cir_mantissa = ~(aq->prof_mask.cir_mantissa);
623 aq->prof_mask.peir_mantissa = ~(aq->prof_mask.peir_mantissa);
624 aq->prof_mask.cbs_mantissa = ~(aq->prof_mask.cbs_mantissa);
625 aq->prof_mask.pebs_mantissa = ~(aq->prof_mask.pebs_mantissa);
626 aq->prof_mask.cir_exponent = ~(aq->prof_mask.cir_exponent);
627 aq->prof_mask.peir_exponent = ~(aq->prof_mask.peir_exponent);
628 aq->prof_mask.cbs_exponent = ~(aq->prof_mask.cbs_exponent);
629 aq->prof_mask.pebs_exponent = ~(aq->prof_mask.pebs_exponent);
632 case ROC_NIX_BPF_ALGO_4115:
633 meter_rate_to_nix(cfg->algo4115.cir, &exponent_p, &mantissa_p,
634 &div_exp_p, policer_timeunit);
635 aq->prof.cir_mantissa = mantissa_p;
636 aq->prof.cir_exponent = exponent_p;
638 meter_rate_to_nix(cfg->algo4115.eir, &exponent_p, &mantissa_p,
639 &div_exp_p, policer_timeunit);
640 aq->prof.peir_mantissa = mantissa_p;
641 aq->prof.peir_exponent = exponent_p;
643 meter_burst_to_nix(cfg->algo4115.cbs, &exponent_p, &mantissa_p);
644 aq->prof.cbs_mantissa = mantissa_p;
645 aq->prof.cbs_exponent = exponent_p;
647 meter_burst_to_nix(cfg->algo4115.ebs, &exponent_p, &mantissa_p);
648 aq->prof.pebs_mantissa = mantissa_p;
649 aq->prof.pebs_exponent = exponent_p;
651 aq->prof_mask.cir_mantissa = ~(aq->prof_mask.cir_mantissa);
652 aq->prof_mask.peir_mantissa = ~(aq->prof_mask.peir_mantissa);
653 aq->prof_mask.cbs_mantissa = ~(aq->prof_mask.cbs_mantissa);
654 aq->prof_mask.pebs_mantissa = ~(aq->prof_mask.pebs_mantissa);
656 aq->prof_mask.cir_exponent = ~(aq->prof_mask.cir_exponent);
657 aq->prof_mask.peir_exponent = ~(aq->prof_mask.peir_exponent);
658 aq->prof_mask.cbs_exponent = ~(aq->prof_mask.cbs_exponent);
659 aq->prof_mask.pebs_exponent = ~(aq->prof_mask.pebs_exponent);
663 return NIX_ERR_PARAM;
666 aq->prof.lmode = cfg->lmode;
667 aq->prof.icolor = cfg->icolor;
668 aq->prof.pc_mode = cfg->pc_mode;
669 aq->prof.tnl_ena = cfg->tnl_ena;
670 aq->prof.gc_action = cfg->action[ROC_NIX_BPF_COLOR_GREEN];
671 aq->prof.yc_action = cfg->action[ROC_NIX_BPF_COLOR_YELLOW];
672 aq->prof.rc_action = cfg->action[ROC_NIX_BPF_COLOR_RED];
674 aq->prof_mask.lmode = ~(aq->prof_mask.lmode);
675 aq->prof_mask.icolor = ~(aq->prof_mask.icolor);
676 aq->prof_mask.pc_mode = ~(aq->prof_mask.pc_mode);
677 aq->prof_mask.tnl_ena = ~(aq->prof_mask.tnl_ena);
678 aq->prof_mask.gc_action = ~(aq->prof_mask.gc_action);
679 aq->prof_mask.yc_action = ~(aq->prof_mask.yc_action);
680 aq->prof_mask.rc_action = ~(aq->prof_mask.rc_action);
682 return mbox_process(mbox);
686 roc_nix_bpf_ena_dis(struct roc_nix *roc_nix, uint16_t id, struct roc_nix_rq *rq,
689 struct nix *nix = roc_nix_to_nix_priv(roc_nix);
690 struct mbox *mbox = get_mbox(roc_nix);
691 struct nix_cn10k_aq_enq_req *aq;
694 if (roc_model_is_cn9k())
695 return NIX_ERR_HW_NOTSUP;
697 if (rq->qid >= nix->nb_rx_queues)
698 return NIX_ERR_QUEUE_INVALID_RANGE;
700 aq = mbox_alloc_msg_nix_cn10k_aq_enq(mbox);
704 aq->ctype = NIX_AQ_CTYPE_RQ;
705 aq->op = NIX_AQ_INSTOP_WRITE;
707 aq->rq.policer_ena = enable;
708 aq->rq_mask.policer_ena = ~(aq->rq_mask.policer_ena);
710 aq->rq.band_prof_id = id;
711 aq->rq_mask.band_prof_id = ~(aq->rq_mask.band_prof_id);
714 rc = mbox_process(mbox);
725 roc_nix_bpf_dump(struct roc_nix *roc_nix, uint16_t id,
726 enum roc_nix_bpf_level_flag lvl_flag)
728 struct mbox *mbox = get_mbox(roc_nix);
729 struct nix_cn10k_aq_enq_rsp *rsp;
730 struct nix_cn10k_aq_enq_req *aq;
734 if (roc_model_is_cn9k())
735 return NIX_ERR_HW_NOTSUP;
737 level_idx = roc_nix_bpf_level_to_idx(lvl_flag);
738 if (level_idx == ROC_NIX_BPF_LEVEL_IDX_INVALID)
739 return NIX_ERR_PARAM;
741 aq = mbox_alloc_msg_nix_cn10k_aq_enq(mbox);
744 aq->qidx = (sw_to_hw_lvl_map[level_idx] << 14 | id);
745 aq->ctype = NIX_AQ_CTYPE_BAND_PROF;
746 aq->op = NIX_AQ_INSTOP_READ;
747 rc = mbox_process_msg(mbox, (void *)&rsp);
749 plt_dump("============= band prof id =%d ===============", id);
750 nix_lf_bpf_dump(&rsp->prof);
757 roc_nix_bpf_pre_color_tbl_setup(struct roc_nix *roc_nix, uint16_t id,
758 enum roc_nix_bpf_level_flag lvl_flag,
759 struct roc_nix_bpf_precolor *tbl)
761 struct mbox *mbox = get_mbox(roc_nix);
762 struct nix_cn10k_aq_enq_req *aq;
763 uint8_t pc_mode, tn_ena;
767 if (!tbl || !tbl->count)
768 return NIX_ERR_PARAM;
770 if (roc_model_is_cn9k())
771 return NIX_ERR_HW_NOTSUP;
773 level_idx = roc_nix_bpf_level_to_idx(lvl_flag);
774 if (level_idx == ROC_NIX_BPF_LEVEL_IDX_INVALID)
775 return NIX_ERR_PARAM;
778 case ROC_NIX_BPF_PC_MODE_VLAN_INNER:
779 case ROC_NIX_BPF_PC_MODE_VLAN_OUTER:
780 if (tbl->count != NIX_BPF_PRECOLOR_VLAN_TABLE_SIZE) {
781 plt_err("Table size must be %d",
782 NIX_BPF_PRECOLOR_VLAN_TABLE_SIZE);
786 tn_ena = nix_precolor_vlan_table_update(roc_nix, tbl);
787 pc_mode = NIX_RX_BAND_PROF_PC_MODE_VLAN;
789 case ROC_NIX_BPF_PC_MODE_DSCP_INNER:
790 if (tbl->count != NIX_BPF_PRECOLOR_DSCP_TABLE_SIZE) {
791 plt_err("Table size must be %d",
792 NIX_BPF_PRECOLOR_DSCP_TABLE_SIZE);
796 tn_ena = nix_precolor_inner_dscp_table_update(roc_nix, tbl);
797 pc_mode = NIX_RX_BAND_PROF_PC_MODE_DSCP;
799 case ROC_NIX_BPF_PC_MODE_DSCP_OUTER:
800 if (tbl->count != NIX_BPF_PRECOLOR_DSCP_TABLE_SIZE) {
801 plt_err("Table size must be %d",
802 NIX_BPF_PRECOLOR_DSCP_TABLE_SIZE);
806 tn_ena = nix_precolor_outer_dscp_table_update(roc_nix, tbl);
807 pc_mode = NIX_RX_BAND_PROF_PC_MODE_DSCP;
809 case ROC_NIX_BPF_PC_MODE_GEN_INNER:
810 case ROC_NIX_BPF_PC_MODE_GEN_OUTER:
811 if (tbl->count != NIX_BPF_PRECOLOR_GEN_TABLE_SIZE) {
812 plt_err("Table size must be %d",
813 NIX_BPF_PRECOLOR_GEN_TABLE_SIZE);
818 tn_ena = nix_precolor_gen_table_update(roc_nix, tbl);
819 pc_mode = NIX_RX_BAND_PROF_PC_MODE_GEN;
826 /* Update corresponding bandwidth profile too */
827 aq = mbox_alloc_msg_nix_cn10k_aq_enq(mbox);
830 aq->qidx = (sw_to_hw_lvl_map[level_idx] << 14) | id;
831 aq->ctype = NIX_AQ_CTYPE_BAND_PROF;
832 aq->op = NIX_AQ_INSTOP_WRITE;
833 aq->prof.pc_mode = pc_mode;
834 aq->prof.tnl_ena = tn_ena;
835 aq->prof_mask.pc_mode = ~(aq->prof_mask.pc_mode);
836 aq->prof_mask.tnl_ena = ~(aq->prof_mask.tnl_ena);
838 return mbox_process(mbox);
845 roc_nix_bpf_connect(struct roc_nix *roc_nix,
846 enum roc_nix_bpf_level_flag lvl_flag, uint16_t src_id,
849 struct mbox *mbox = get_mbox(roc_nix);
850 struct nix_cn10k_aq_enq_req *aq;
853 if (roc_model_is_cn9k())
854 return NIX_ERR_HW_NOTSUP;
856 level_idx = roc_nix_bpf_level_to_idx(lvl_flag);
857 if (level_idx == ROC_NIX_BPF_LEVEL_IDX_INVALID)
858 return NIX_ERR_PARAM;
860 aq = mbox_alloc_msg_nix_cn10k_aq_enq(mbox);
863 aq->qidx = (sw_to_hw_lvl_map[level_idx] << 14) | src_id;
864 aq->ctype = NIX_AQ_CTYPE_BAND_PROF;
865 aq->op = NIX_AQ_INSTOP_WRITE;
867 if (dst_id == ROC_NIX_BPF_ID_INVALID) {
868 aq->prof.hl_en = false;
869 aq->prof_mask.hl_en = ~(aq->prof_mask.hl_en);
871 aq->prof.hl_en = true;
872 aq->prof.band_prof_id = dst_id;
873 aq->prof_mask.hl_en = ~(aq->prof_mask.hl_en);
874 aq->prof_mask.band_prof_id = ~(aq->prof_mask.band_prof_id);
877 return mbox_process(mbox);
881 roc_nix_bpf_stats_read(struct roc_nix *roc_nix, uint16_t id, uint64_t mask,
882 enum roc_nix_bpf_level_flag lvl_flag,
883 uint64_t stats[ROC_NIX_BPF_STATS_MAX])
885 uint8_t yellow_pkt_pass, yellow_octs_pass, yellow_pkt_drop;
886 uint8_t green_octs_drop, yellow_octs_drop, red_octs_drop;
887 uint8_t green_pkt_pass, green_octs_pass, green_pkt_drop;
888 uint8_t red_pkt_pass, red_octs_pass, red_pkt_drop;
889 struct mbox *mbox = get_mbox(roc_nix);
890 struct nix_cn10k_aq_enq_rsp *rsp;
891 struct nix_cn10k_aq_enq_req *aq;
895 if (roc_model_is_cn9k())
896 return NIX_ERR_HW_NOTSUP;
898 level_idx = roc_nix_bpf_level_to_idx(lvl_flag);
899 if (level_idx == ROC_NIX_BPF_LEVEL_IDX_INVALID)
900 return NIX_ERR_PARAM;
902 aq = mbox_alloc_msg_nix_cn10k_aq_enq(mbox);
905 aq->qidx = (sw_to_hw_lvl_map[level_idx] << 14 | id);
906 aq->ctype = NIX_AQ_CTYPE_BAND_PROF;
907 aq->op = NIX_AQ_INSTOP_READ;
908 rc = mbox_process_msg(mbox, (void *)&rsp);
913 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_GREEN_PKT_F_PASS);
915 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_GREEN_OCTS_F_PASS);
917 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_GREEN_PKT_F_DROP);
919 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_GREEN_OCTS_F_DROP);
921 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_YELLOW_PKT_F_PASS);
923 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_YELLOW_OCTS_F_PASS);
925 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_YELLOW_PKT_F_DROP);
927 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_YELLOW_OCTS_F_DROP);
929 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_RED_PKT_F_PASS);
931 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_RED_OCTS_F_PASS);
933 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_RED_PKT_F_DROP);
935 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_RED_OCTS_F_DROP);
937 if (green_pkt_pass != ROC_NIX_BPF_STATS_MAX)
938 stats[green_pkt_pass] = rsp->prof.green_pkt_pass;
940 if (green_octs_pass != ROC_NIX_BPF_STATS_MAX)
941 stats[green_octs_pass] = rsp->prof.green_octs_pass;
943 if (green_pkt_drop != ROC_NIX_BPF_STATS_MAX)
944 stats[green_pkt_drop] = rsp->prof.green_pkt_drop;
946 if (green_octs_drop != ROC_NIX_BPF_STATS_MAX)
947 stats[green_octs_drop] = rsp->prof.green_octs_pass;
949 if (yellow_pkt_pass != ROC_NIX_BPF_STATS_MAX)
950 stats[yellow_pkt_pass] = rsp->prof.yellow_pkt_pass;
952 if (yellow_octs_pass != ROC_NIX_BPF_STATS_MAX)
953 stats[yellow_octs_pass] = rsp->prof.yellow_octs_pass;
955 if (yellow_pkt_drop != ROC_NIX_BPF_STATS_MAX)
956 stats[yellow_pkt_drop] = rsp->prof.yellow_pkt_drop;
958 if (yellow_octs_drop != ROC_NIX_BPF_STATS_MAX)
959 stats[yellow_octs_drop] = rsp->prof.yellow_octs_drop;
961 if (red_pkt_pass != ROC_NIX_BPF_STATS_MAX)
962 stats[red_pkt_pass] = rsp->prof.red_pkt_pass;
964 if (red_octs_pass != ROC_NIX_BPF_STATS_MAX)
965 stats[red_octs_pass] = rsp->prof.red_octs_pass;
967 if (red_pkt_drop != ROC_NIX_BPF_STATS_MAX)
968 stats[red_pkt_drop] = rsp->prof.red_pkt_drop;
970 if (red_octs_drop != ROC_NIX_BPF_STATS_MAX)
971 stats[red_octs_drop] = rsp->prof.red_octs_drop;
977 roc_nix_bpf_stats_reset(struct roc_nix *roc_nix, uint16_t id, uint64_t mask,
978 enum roc_nix_bpf_level_flag lvl_flag)
980 struct mbox *mbox = get_mbox(roc_nix);
981 struct nix_cn10k_aq_enq_req *aq;
984 if (roc_model_is_cn9k())
985 return NIX_ERR_HW_NOTSUP;
987 level_idx = roc_nix_bpf_level_to_idx(lvl_flag);
988 if (level_idx == ROC_NIX_BPF_LEVEL_IDX_INVALID)
989 return NIX_ERR_PARAM;
991 aq = mbox_alloc_msg_nix_cn10k_aq_enq(mbox);
994 aq->qidx = (sw_to_hw_lvl_map[level_idx] << 14 | id);
995 aq->ctype = NIX_AQ_CTYPE_BAND_PROF;
996 aq->op = NIX_AQ_INSTOP_WRITE;
998 if (mask & ROC_NIX_BPF_GREEN_PKT_F_PASS) {
999 aq->prof.green_pkt_pass = 0;
1000 aq->prof_mask.green_pkt_pass = ~(aq->prof_mask.green_pkt_pass);
1002 if (mask & ROC_NIX_BPF_GREEN_OCTS_F_PASS) {
1003 aq->prof.green_octs_pass = 0;
1004 aq->prof_mask.green_octs_pass =
1005 ~(aq->prof_mask.green_octs_pass);
1007 if (mask & ROC_NIX_BPF_GREEN_PKT_F_DROP) {
1008 aq->prof.green_pkt_drop = 0;
1009 aq->prof_mask.green_pkt_drop = ~(aq->prof_mask.green_pkt_drop);
1011 if (mask & ROC_NIX_BPF_GREEN_OCTS_F_DROP) {
1012 aq->prof.green_octs_drop = 0;
1013 aq->prof_mask.green_octs_drop =
1014 ~(aq->prof_mask.green_octs_drop);
1016 if (mask & ROC_NIX_BPF_YELLOW_PKT_F_PASS) {
1017 aq->prof.yellow_pkt_pass = 0;
1018 aq->prof_mask.yellow_pkt_pass =
1019 ~(aq->prof_mask.yellow_pkt_pass);
1021 if (mask & ROC_NIX_BPF_YELLOW_OCTS_F_PASS) {
1022 aq->prof.yellow_octs_pass = 0;
1023 aq->prof_mask.yellow_octs_pass =
1024 ~(aq->prof_mask.yellow_octs_pass);
1026 if (mask & ROC_NIX_BPF_YELLOW_PKT_F_DROP) {
1027 aq->prof.yellow_pkt_drop = 0;
1028 aq->prof_mask.yellow_pkt_drop =
1029 ~(aq->prof_mask.yellow_pkt_drop);
1031 if (mask & ROC_NIX_BPF_YELLOW_OCTS_F_DROP) {
1032 aq->prof.yellow_octs_drop = 0;
1033 aq->prof_mask.yellow_octs_drop =
1034 ~(aq->prof_mask.yellow_octs_drop);
1036 if (mask & ROC_NIX_BPF_RED_PKT_F_PASS) {
1037 aq->prof.red_pkt_pass = 0;
1038 aq->prof_mask.red_pkt_pass = ~(aq->prof_mask.red_pkt_pass);
1040 if (mask & ROC_NIX_BPF_RED_OCTS_F_PASS) {
1041 aq->prof.red_octs_pass = 0;
1042 aq->prof_mask.red_octs_pass = ~(aq->prof_mask.red_octs_pass);
1044 if (mask & ROC_NIX_BPF_RED_PKT_F_DROP) {
1045 aq->prof.red_pkt_drop = 0;
1046 aq->prof_mask.red_pkt_drop = ~(aq->prof_mask.red_pkt_drop);
1048 if (mask & ROC_NIX_BPF_RED_OCTS_F_DROP) {
1049 aq->prof.red_octs_drop = 0;
1050 aq->prof_mask.red_octs_drop = ~(aq->prof_mask.red_octs_drop);
1053 return mbox_process(mbox);
1057 roc_nix_bpf_lf_stats_read(struct roc_nix *roc_nix, uint64_t mask,
1058 uint64_t stats[ROC_NIX_BPF_STATS_MAX])
1060 uint8_t yellow_pkt_pass, yellow_octs_pass, yellow_pkt_drop;
1061 uint8_t green_octs_drop, yellow_octs_drop, red_octs_drop;
1062 uint8_t green_pkt_pass, green_octs_pass, green_pkt_drop;
1063 uint8_t red_pkt_pass, red_octs_pass, red_pkt_drop;
1064 struct nix *nix = roc_nix_to_nix_priv(roc_nix);
1067 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_GREEN_PKT_F_PASS);
1069 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_GREEN_OCTS_F_PASS);
1071 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_GREEN_PKT_F_DROP);
1073 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_GREEN_OCTS_F_DROP);
1075 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_YELLOW_PKT_F_PASS);
1077 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_YELLOW_OCTS_F_PASS);
1079 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_YELLOW_PKT_F_DROP);
1081 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_YELLOW_OCTS_F_DROP);
1083 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_RED_PKT_F_PASS);
1085 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_RED_OCTS_F_PASS);
1087 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_RED_PKT_F_DROP);
1089 roc_nix_bpf_stats_to_idx(mask & ROC_NIX_BPF_RED_OCTS_F_DROP);
1091 if (green_pkt_pass != ROC_NIX_BPF_STATS_MAX) {
1092 stats[green_pkt_pass] =
1093 NIX_RD_STATS(NIX_STAT_LF_RX_RX_GC_OCTS_PASSED);
1096 if (green_octs_pass != ROC_NIX_BPF_STATS_MAX) {
1097 stats[green_octs_pass] =
1098 NIX_RD_STATS(NIX_STAT_LF_RX_RX_YC_PKTS_PASSED);
1101 if (green_pkt_drop != ROC_NIX_BPF_STATS_MAX) {
1102 stats[green_pkt_drop] =
1103 NIX_RD_STATS(NIX_STAT_LF_RX_RX_GC_OCTS_DROP);
1106 if (green_octs_drop != ROC_NIX_BPF_STATS_MAX) {
1107 stats[green_octs_drop] =
1108 NIX_RD_STATS(NIX_STAT_LF_RX_RX_YC_PKTS_DROP);
1111 if (yellow_pkt_pass != ROC_NIX_BPF_STATS_MAX) {
1112 stats[yellow_pkt_pass] =
1113 NIX_RD_STATS(NIX_STAT_LF_RX_RX_GC_PKTS_PASSED);
1116 if (yellow_octs_pass != ROC_NIX_BPF_STATS_MAX) {
1117 stats[yellow_octs_pass] =
1118 NIX_RD_STATS(NIX_STAT_LF_RX_RX_RC_OCTS_PASSED);
1121 if (yellow_pkt_drop != ROC_NIX_BPF_STATS_MAX) {
1122 stats[yellow_pkt_drop] =
1123 NIX_RD_STATS(NIX_STAT_LF_RX_RX_GC_PKTS_DROP);
1126 if (yellow_octs_drop != ROC_NIX_BPF_STATS_MAX) {
1127 stats[yellow_octs_drop] =
1128 NIX_RD_STATS(NIX_STAT_LF_RX_RX_RC_OCTS_DROP);
1131 if (red_pkt_pass != ROC_NIX_BPF_STATS_MAX) {
1132 stats[red_pkt_pass] =
1133 NIX_RD_STATS(NIX_STAT_LF_RX_RX_YC_OCTS_PASSED);
1136 if (red_octs_pass != ROC_NIX_BPF_STATS_MAX) {
1137 stats[red_octs_pass] =
1138 NIX_RD_STATS(NIX_STAT_LF_RX_RX_RC_PKTS_PASSED);
1141 if (red_pkt_drop != ROC_NIX_BPF_STATS_MAX) {
1142 stats[red_pkt_drop] =
1143 NIX_RD_STATS(NIX_STAT_LF_RX_RX_YC_OCTS_DROP);
1146 if (red_octs_drop != ROC_NIX_BPF_STATS_MAX) {
1147 stats[red_octs_drop] =
1148 NIX_RD_STATS(NIX_STAT_LF_RX_RX_RC_PKTS_DROP);
1155 roc_nix_bpf_lf_stats_reset(struct roc_nix *roc_nix, uint64_t mask)
1157 struct nix *nix = roc_nix_to_nix_priv(roc_nix);
1159 if (mask & ROC_NIX_BPF_GREEN_PKT_F_PASS)
1160 NIX_RST_STATS(ROC_NIX_BPF_GREEN_PKT_F_PASS);
1161 if (mask & ROC_NIX_BPF_GREEN_OCTS_F_PASS)
1162 NIX_RST_STATS(ROC_NIX_BPF_GREEN_OCTS_F_PASS);
1163 if (mask & ROC_NIX_BPF_GREEN_PKT_F_DROP)
1164 NIX_RST_STATS(ROC_NIX_BPF_GREEN_PKT_F_DROP);
1165 if (mask & ROC_NIX_BPF_GREEN_OCTS_F_DROP)
1166 NIX_RST_STATS(ROC_NIX_BPF_GREEN_OCTS_F_DROP);
1167 if (mask & ROC_NIX_BPF_YELLOW_PKT_F_PASS)
1168 NIX_RST_STATS(ROC_NIX_BPF_YELLOW_PKT_F_PASS);
1169 if (mask & ROC_NIX_BPF_YELLOW_OCTS_F_PASS)
1170 NIX_RST_STATS(ROC_NIX_BPF_YELLOW_OCTS_F_PASS);
1171 if (mask & ROC_NIX_BPF_YELLOW_PKT_F_DROP)
1172 NIX_RST_STATS(ROC_NIX_BPF_YELLOW_PKT_F_DROP);
1173 if (mask & ROC_NIX_BPF_YELLOW_OCTS_F_DROP)
1174 NIX_RST_STATS(ROC_NIX_BPF_YELLOW_OCTS_F_DROP);
1175 if (mask & ROC_NIX_BPF_RED_PKT_F_PASS)
1176 NIX_RST_STATS(ROC_NIX_BPF_RED_PKT_F_PASS);
1177 if (mask & ROC_NIX_BPF_RED_OCTS_F_PASS)
1178 NIX_RST_STATS(ROC_NIX_BPF_RED_OCTS_F_PASS);
1179 if (mask & ROC_NIX_BPF_RED_PKT_F_DROP)
1180 NIX_RST_STATS(ROC_NIX_BPF_RED_PKT_F_DROP);
1181 if (mask & ROC_NIX_BPF_RED_OCTS_F_DROP)
1182 NIX_RST_STATS(ROC_NIX_BPF_RED_OCTS_F_DROP);