1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(C) 2021 Marvell.
9 nix_tm_lvl2nix_tl1_root(uint32_t lvl)
13 return NIX_TXSCH_LVL_TL1;
15 return NIX_TXSCH_LVL_TL2;
17 return NIX_TXSCH_LVL_TL3;
19 return NIX_TXSCH_LVL_TL4;
21 return NIX_TXSCH_LVL_SMQ;
23 return NIX_TXSCH_LVL_CNT;
28 nix_tm_lvl2nix_tl2_root(uint32_t lvl)
32 return NIX_TXSCH_LVL_TL2;
34 return NIX_TXSCH_LVL_TL3;
36 return NIX_TXSCH_LVL_TL4;
38 return NIX_TXSCH_LVL_SMQ;
40 return NIX_TXSCH_LVL_CNT;
45 nix_tm_lvl2nix(struct nix *nix, uint32_t lvl)
47 if (nix_tm_have_tl1_access(nix))
48 return nix_tm_lvl2nix_tl1_root(lvl);
50 return nix_tm_lvl2nix_tl2_root(lvl);
54 struct nix_tm_shaper_profile *
55 nix_tm_shaper_profile_search(struct nix *nix, uint32_t id)
57 struct nix_tm_shaper_profile *profile;
59 TAILQ_FOREACH(profile, &nix->shaper_profile_list, shaper) {
60 if (profile->id == id)
67 nix_tm_node_search(struct nix *nix, uint32_t node_id, enum roc_nix_tm_tree tree)
69 struct nix_tm_node_list *list;
70 struct nix_tm_node *node;
72 list = nix_tm_node_list(nix, tree);
73 TAILQ_FOREACH(node, list, node) {
74 if (node->id == node_id)
81 nix_tm_shaper_rate_conv(uint64_t value, uint64_t *exponent_p,
82 uint64_t *mantissa_p, uint64_t *div_exp_p)
84 uint64_t div_exp, exponent, mantissa;
87 if (value < NIX_TM_MIN_SHAPER_RATE || value > NIX_TM_MAX_SHAPER_RATE)
90 if (value <= NIX_TM_SHAPER_RATE(0, 0, 0)) {
91 /* Calculate rate div_exp and mantissa using
92 * the following formula:
94 * value = (2E6 * (256 + mantissa)
95 * / ((1 << div_exp) * 256))
99 mantissa = NIX_TM_MAX_RATE_MANTISSA;
101 while (value < (NIX_TM_SHAPER_RATE_CONST / (1 << div_exp)))
104 while (value < ((NIX_TM_SHAPER_RATE_CONST * (256 + mantissa)) /
105 ((1 << div_exp) * 256)))
108 /* Calculate rate exponent and mantissa using
109 * the following formula:
111 * value = (2E6 * ((256 + mantissa) << exponent)) / 256
115 exponent = NIX_TM_MAX_RATE_EXPONENT;
116 mantissa = NIX_TM_MAX_RATE_MANTISSA;
118 while (value < (NIX_TM_SHAPER_RATE_CONST * (1 << exponent)))
121 while (value < ((NIX_TM_SHAPER_RATE_CONST *
122 ((256 + mantissa) << exponent)) /
127 if (div_exp > NIX_TM_MAX_RATE_DIV_EXP ||
128 exponent > NIX_TM_MAX_RATE_EXPONENT ||
129 mantissa > NIX_TM_MAX_RATE_MANTISSA)
133 *div_exp_p = div_exp;
135 *exponent_p = exponent;
137 *mantissa_p = mantissa;
139 /* Calculate real rate value */
140 return NIX_TM_SHAPER_RATE(exponent, mantissa, div_exp);
144 nix_tm_shaper_burst_conv(uint64_t value, uint64_t *exponent_p,
145 uint64_t *mantissa_p)
147 uint64_t exponent, mantissa;
149 if (value < NIX_TM_MIN_SHAPER_BURST || value > NIX_TM_MAX_SHAPER_BURST)
152 /* Calculate burst exponent and mantissa using
153 * the following formula:
155 * value = (((256 + mantissa) << (exponent + 1)
159 exponent = NIX_TM_MAX_BURST_EXPONENT;
160 mantissa = NIX_TM_MAX_BURST_MANTISSA;
162 while (value < (1ull << (exponent + 1)))
165 while (value < ((256 + mantissa) << (exponent + 1)) / 256)
168 if (exponent > NIX_TM_MAX_BURST_EXPONENT ||
169 mantissa > NIX_TM_MAX_BURST_MANTISSA)
173 *exponent_p = exponent;
175 *mantissa_p = mantissa;
177 return NIX_TM_SHAPER_BURST(exponent, mantissa);
181 nix_tm_check_rr(struct nix *nix, uint32_t parent_id, enum roc_nix_tm_tree tree,
182 uint32_t *rr_prio, uint32_t *max_prio)
184 uint32_t node_cnt[NIX_TM_TLX_SP_PRIO_MAX];
185 struct nix_tm_node_list *list;
186 struct nix_tm_node *node;
187 uint32_t rr_num = 0, i;
188 uint32_t children = 0;
191 memset(node_cnt, 0, sizeof(node_cnt));
193 *max_prio = UINT32_MAX;
195 list = nix_tm_node_list(nix, tree);
196 TAILQ_FOREACH(node, list, node) {
200 if (!(node->parent->id == parent_id))
203 priority = node->priority;
204 node_cnt[priority]++;
208 for (i = 0; i < NIX_TM_TLX_SP_PRIO_MAX; i++) {
212 if (node_cnt[i] > rr_num) {
214 rr_num = node_cnt[i];
218 /* RR group of single RR child is considered as SP */
224 /* Max prio will be returned only when we have non zero prio
225 * or if a parent has single child.
227 if (i > 1 || (children == 1))
233 nix_tm_max_prio(struct nix *nix, uint16_t hw_lvl)
235 if (hw_lvl >= NIX_TXSCH_LVL_CNT)
238 /* MDQ does not support SP */
239 if (hw_lvl == NIX_TXSCH_LVL_MDQ)
242 /* PF's TL1 with VF's enabled does not support SP */
243 if (hw_lvl == NIX_TXSCH_LVL_TL1 && (!nix_tm_have_tl1_access(nix) ||
244 (nix->tm_flags & NIX_TM_TL1_NO_SP)))
247 return NIX_TM_TLX_SP_PRIO_MAX - 1;
251 nix_tm_validate_prio(struct nix *nix, uint32_t lvl, uint32_t parent_id,
252 uint32_t priority, enum roc_nix_tm_tree tree)
254 uint8_t priorities[NIX_TM_TLX_SP_PRIO_MAX];
255 struct nix_tm_node_list *list;
256 struct nix_tm_node *node;
260 list = nix_tm_node_list(nix, tree);
261 /* Validate priority against max */
262 if (priority > nix_tm_max_prio(nix, nix_tm_lvl2nix(nix, lvl - 1)))
263 return NIX_ERR_TM_PRIO_EXCEEDED;
265 if (parent_id == ROC_NIX_TM_NODE_ID_INVALID)
268 memset(priorities, 0, sizeof(priorities));
269 priorities[priority] = 1;
271 TAILQ_FOREACH(node, list, node) {
275 if (node->parent->id != parent_id)
278 priorities[node->priority]++;
281 for (i = 0; i < NIX_TM_TLX_SP_PRIO_MAX; i++)
282 if (priorities[i] > 1)
285 /* At max, one rr groups per parent */
287 return NIX_ERR_TM_MULTIPLE_RR_GROUPS;
289 /* Check for previous priority to avoid holes in priorities */
290 if (priority && !priorities[priority - 1])
291 return NIX_ERR_TM_PRIO_ORDER;
297 nix_tm_child_res_valid(struct nix_tm_node_list *list,
298 struct nix_tm_node *parent)
300 struct nix_tm_node *child;
302 TAILQ_FOREACH(child, list, node) {
303 if (child->parent != parent)
305 if (!(child->flags & NIX_TM_NODE_HWRES))
312 nix_tm_sw_xoff_prep(struct nix_tm_node *node, bool enable,
313 volatile uint64_t *reg, volatile uint64_t *regval)
315 uint32_t hw_lvl = node->hw_lvl;
316 uint32_t schq = node->hw_id;
319 plt_tm_dbg("sw xoff config node %s(%u) lvl %u id %u, enable %u (%p)",
320 nix_tm_hwlvl2str(hw_lvl), schq, node->lvl, node->id, enable,
326 case NIX_TXSCH_LVL_MDQ:
327 reg[k] = NIX_AF_MDQX_SW_XOFF(schq);
330 case NIX_TXSCH_LVL_TL4:
331 reg[k] = NIX_AF_TL4X_SW_XOFF(schq);
334 case NIX_TXSCH_LVL_TL3:
335 reg[k] = NIX_AF_TL3X_SW_XOFF(schq);
338 case NIX_TXSCH_LVL_TL2:
339 reg[k] = NIX_AF_TL2X_SW_XOFF(schq);
342 case NIX_TXSCH_LVL_TL1:
343 reg[k] = NIX_AF_TL1X_SW_XOFF(schq);
353 /* Search for min rate in topology */
355 nix_tm_shaper_profile_rate_min(struct nix *nix)
357 struct nix_tm_shaper_profile *profile;
358 uint64_t rate_min = 1E9; /* 1 Gbps */
360 TAILQ_FOREACH(profile, &nix->shaper_profile_list, shaper) {
361 if (profile->peak.rate && profile->peak.rate < rate_min)
362 rate_min = profile->peak.rate;
364 if (profile->commit.rate && profile->commit.rate < rate_min)
365 rate_min = profile->commit.rate;
371 nix_tm_resource_avail(struct nix *nix, uint8_t hw_lvl, bool contig)
373 uint32_t pos = 0, start_pos = 0;
374 struct plt_bitmap *bmp;
378 bmp = contig ? nix->schq_contig_bmp[hw_lvl] : nix->schq_bmp[hw_lvl];
379 plt_bitmap_scan_init(bmp);
381 if (!plt_bitmap_scan(bmp, &pos, &slab))
387 count += __builtin_popcountll(slab);
388 if (!plt_bitmap_scan(bmp, &pos, &slab))
390 } while (pos != start_pos);
396 nix_tm_resource_estimate(struct nix *nix, uint16_t *schq_contig, uint16_t *schq,
397 enum roc_nix_tm_tree tree)
399 struct nix_tm_node_list *list;
400 uint8_t contig_cnt, hw_lvl;
401 struct nix_tm_node *parent;
402 uint16_t cnt = 0, avail;
404 list = nix_tm_node_list(nix, tree);
405 /* Walk through parents from TL1..TL4 */
406 for (hw_lvl = NIX_TXSCH_LVL_TL1; hw_lvl > 0; hw_lvl--) {
407 TAILQ_FOREACH(parent, list, node) {
408 if (hw_lvl != parent->hw_lvl)
411 /* Skip accounting for children whose
412 * parent does not indicate so.
414 if (!parent->child_realloc)
417 /* Count children needed */
418 schq[hw_lvl - 1] += parent->rr_num;
419 if (parent->max_prio != UINT32_MAX) {
420 contig_cnt = parent->max_prio + 1;
421 schq_contig[hw_lvl - 1] += contig_cnt;
422 /* When we have SP + DWRR at a parent,
423 * we will always have a spare schq at rr prio
424 * location in contiguous queues. Hence reduce
425 * discontiguous count by 1.
427 if (parent->max_prio > 0 && parent->rr_num)
428 schq[hw_lvl - 1] -= 1;
433 schq[nix->tm_root_lvl] = 1;
434 if (!nix_tm_have_tl1_access(nix))
435 schq[NIX_TXSCH_LVL_TL1] = 1;
437 /* Now check for existing resources */
438 for (hw_lvl = 0; hw_lvl < NIX_TXSCH_LVL_CNT; hw_lvl++) {
439 avail = nix_tm_resource_avail(nix, hw_lvl, false);
440 if (schq[hw_lvl] <= avail)
443 schq[hw_lvl] -= avail;
445 /* For contiguous queues, realloc everything */
446 avail = nix_tm_resource_avail(nix, hw_lvl, true);
447 if (schq_contig[hw_lvl] <= avail)
448 schq_contig[hw_lvl] = 0;
451 cnt += schq_contig[hw_lvl];
453 plt_tm_dbg("Estimate resources needed for %s: dis %u cont %u",
454 nix_tm_hwlvl2str(hw_lvl), schq[hw_lvl],
455 schq_contig[hw_lvl]);
462 roc_nix_tm_node_lvl(struct roc_nix *roc_nix, uint32_t node_id)
464 struct nix *nix = roc_nix_to_nix_priv(roc_nix);
465 struct nix_tm_node *node;
467 node = nix_tm_node_search(nix, node_id, ROC_NIX_TM_USER);
469 return NIX_ERR_TM_INVALID_NODE;
474 struct roc_nix_tm_node *
475 roc_nix_tm_node_get(struct roc_nix *roc_nix, uint32_t node_id)
477 struct nix *nix = roc_nix_to_nix_priv(roc_nix);
478 struct nix_tm_node *node;
480 node = nix_tm_node_search(nix, node_id, ROC_NIX_TM_USER);
481 return (struct roc_nix_tm_node *)node;
484 struct roc_nix_tm_node *
485 roc_nix_tm_node_next(struct roc_nix *roc_nix, struct roc_nix_tm_node *__prev)
487 struct nix_tm_node *prev = (struct nix_tm_node *)__prev;
488 struct nix *nix = roc_nix_to_nix_priv(roc_nix);
489 struct nix_tm_node_list *list;
491 list = nix_tm_node_list(nix, ROC_NIX_TM_USER);
493 /* HEAD of the list */
495 return (struct roc_nix_tm_node *)TAILQ_FIRST(list);
498 if (prev->tree != ROC_NIX_TM_USER)
501 return (struct roc_nix_tm_node *)TAILQ_NEXT(prev, node);
504 struct roc_nix_tm_shaper_profile *
505 roc_nix_tm_shaper_profile_get(struct roc_nix *roc_nix, uint32_t profile_id)
507 struct nix *nix = roc_nix_to_nix_priv(roc_nix);
508 struct nix_tm_shaper_profile *profile;
510 profile = nix_tm_shaper_profile_search(nix, profile_id);
511 return (struct roc_nix_tm_shaper_profile *)profile;
514 struct roc_nix_tm_shaper_profile *
515 roc_nix_tm_shaper_profile_next(struct roc_nix *roc_nix,
516 struct roc_nix_tm_shaper_profile *__prev)
518 struct nix *nix = roc_nix_to_nix_priv(roc_nix);
519 struct nix_tm_shaper_profile_list *list;
520 struct nix_tm_shaper_profile *prev;
522 prev = (struct nix_tm_shaper_profile *)__prev;
523 list = &nix->shaper_profile_list;
525 /* HEAD of the list */
527 return (struct roc_nix_tm_shaper_profile *)TAILQ_FIRST(list);
529 return (struct roc_nix_tm_shaper_profile *)TAILQ_NEXT(prev, shaper);
533 nix_tm_node_alloc(void)
535 struct nix_tm_node *node;
537 node = plt_zmalloc(sizeof(struct nix_tm_node), 0);
541 node->free_fn = plt_free;
546 nix_tm_node_free(struct nix_tm_node *node)
548 if (!node || node->free_fn == NULL)
551 (node->free_fn)(node);
554 struct nix_tm_shaper_profile *
555 nix_tm_shaper_profile_alloc(void)
557 struct nix_tm_shaper_profile *profile;
559 profile = plt_zmalloc(sizeof(struct nix_tm_shaper_profile), 0);
563 profile->free_fn = plt_free;
568 nix_tm_shaper_profile_free(struct nix_tm_shaper_profile *profile)
570 if (!profile || !profile->free_fn)
573 (profile->free_fn)(profile);