1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(C) 2021 Marvell.
5 #ifndef _ROC_NIX_PRIV_H_
6 #define _ROC_NIX_PRIV_H_
9 #define NIX_CQ_ENTRY_SZ 128
10 #define NIX_CQ_ENTRY64_SZ 512
11 #define NIX_CQ_ALIGN ((uint16_t)512)
12 #define NIX_MAX_SQB ((uint16_t)512)
13 #define NIX_DEF_SQB ((uint16_t)16)
14 #define NIX_MIN_SQB ((uint16_t)8)
15 #define NIX_SQB_LIST_SPACE ((uint16_t)2)
16 #define NIX_SQB_LOWER_THRESH ((uint16_t)70)
18 /* Apply BP/DROP when CQ is 95% full */
19 #define NIX_CQ_THRESH_LEVEL (5 * 256 / 100)
20 #define NIX_CQ_FULL_ERRATA_SKID (1024ull * 256)
21 #define NIX_RQ_AURA_THRESH(x) (((x)*95) / 100)
23 /* IRQ triggered when NIX_LF_CINTX_CNT[QCOUNT] crosses this value */
24 #define CQ_CQE_THRESH_DEFAULT 0x1ULL
25 #define CQ_TIMER_THRESH_DEFAULT 0xAULL /* ~1usec i.e (0xA * 100nsec) */
26 #define CQ_TIMER_THRESH_MAX 255
34 #define NIX_TM_MAX_HW_TXSCHQ 1024
35 #define NIX_TM_HW_ID_INVALID UINT32_MAX
36 #define NIX_TM_CHAN_INVALID UINT16_MAX
39 #define NIX_TM_HIERARCHY_ENA BIT_ULL(0)
40 #define NIX_TM_TL1_NO_SP BIT_ULL(1)
41 #define NIX_TM_TL1_ACCESS BIT_ULL(2)
44 /** Token bucket rate (bytes per second) */
47 /** Token bucket size (bytes), a.k.a. max burst size */
52 TAILQ_ENTRY(nix_tm_node) node;
55 enum roc_nix_tm_tree tree;
62 uint32_t shaper_profile_id;
63 void (*free_fn)(void *node);
71 uint32_t parent_hw_id;
73 #define NIX_TM_NODE_HWRES BIT_ULL(0)
74 #define NIX_TM_NODE_ENABLED BIT_ULL(1)
75 /* Shaper algorithm for RED state @NIX_REDALG_E */
76 uint32_t red_algo : 2;
77 uint32_t pkt_mode : 1;
78 uint32_t pkt_mode_set : 1;
82 struct nix_tm_node *parent;
84 /* Non-leaf node sp count */
85 uint32_t n_sp_priorities;
92 struct nix_tm_shaper_profile {
93 TAILQ_ENTRY(nix_tm_shaper_profile) shaper;
94 struct nix_tm_tb commit;
95 struct nix_tm_tb peak;
101 void (*free_fn)(void *profile);
106 TAILQ_HEAD(nix_tm_node_list, nix_tm_node);
107 TAILQ_HEAD(nix_tm_shaper_profile_list, nix_tm_shaper_profile);
110 uint16_t reta[ROC_NIX_RSS_GRPS][ROC_NIX_RSS_RETA_MAX];
111 enum roc_nix_rss_reta_sz reta_sz;
112 struct plt_pci_device *pci_dev;
113 uint16_t bpid[NIX_MAX_CHAN];
114 struct nix_qint *qints_mem;
115 struct nix_qint *cints_mem;
116 uint8_t configured_qints;
117 uint8_t configured_cints;
118 struct roc_nix_sq **sqs;
119 uint16_t vwqe_interval;
120 uint16_t tx_chan_base;
121 uint16_t rx_chan_base;
122 uint16_t nb_rx_queues;
123 uint16_t nb_tx_queues;
124 uint8_t lso_tsov6_idx;
125 uint8_t lso_tsov4_idx;
126 uint8_t lso_udp_tun_idx[ROC_NIX_LSO_TUN_MAX];
127 uint8_t lso_tun_idx[ROC_NIX_LSO_TUN_MAX];
139 /* Without FCS, with L2 overhead */
156 /* Traffic manager info */
158 /* Contiguous resources per lvl */
159 struct plt_bitmap *schq_contig_bmp[NIX_TXSCH_LVL_CNT];
160 /* Dis-contiguous resources per lvl */
161 struct plt_bitmap *schq_bmp[NIX_TXSCH_LVL_CNT];
164 struct nix_tm_shaper_profile_list shaper_profile_list;
165 struct nix_tm_node_list trees[ROC_NIX_TM_TREE_MAX];
166 enum roc_nix_tm_tree tm_tree;
167 uint64_t tm_rate_min;
168 uint16_t tm_root_lvl;
170 uint16_t tm_link_cfg_lvl;
171 uint16_t contig_rsvd[NIX_TXSCH_LVL_CNT];
172 uint16_t discontig_rsvd[NIX_TXSCH_LVL_CNT];
175 uint16_t cpt_msixoff[MAX_RVU_BLKLF_CNT];
180 uint32_t inb_spi_mask;
183 uint16_t outb_err_sso_pffunc;
184 struct roc_cpt_lf *cpt_lf_base;
186 /* Mode provided by driver */
189 } __plt_cache_aligned;
191 enum nix_err_status {
192 NIX_ERR_PARAM = -2048,
194 NIX_ERR_INVALID_RANGE,
198 NIX_ERR_QUEUE_INVALID_RANGE,
199 NIX_ERR_AQ_READ_FAILED,
200 NIX_ERR_AQ_WRITE_FAILED,
201 NIX_ERR_TM_LEAF_NODE_GET,
202 NIX_ERR_TM_INVALID_LVL,
203 NIX_ERR_TM_INVALID_PRIO,
204 NIX_ERR_TM_INVALID_PARENT,
205 NIX_ERR_TM_NODE_EXISTS,
206 NIX_ERR_TM_INVALID_NODE,
207 NIX_ERR_TM_INVALID_SHAPER_PROFILE,
208 NIX_ERR_TM_PKT_MODE_MISMATCH,
209 NIX_ERR_TM_WEIGHT_EXCEED,
210 NIX_ERR_TM_CHILD_EXISTS,
211 NIX_ERR_TM_INVALID_PEAK_SZ,
212 NIX_ERR_TM_INVALID_PEAK_RATE,
213 NIX_ERR_TM_INVALID_COMMIT_SZ,
214 NIX_ERR_TM_INVALID_COMMIT_RATE,
215 NIX_ERR_TM_SHAPER_PROFILE_IN_USE,
216 NIX_ERR_TM_SHAPER_PROFILE_EXISTS,
217 NIX_ERR_TM_SHAPER_PKT_LEN_ADJUST,
218 NIX_ERR_TM_INVALID_TREE,
219 NIX_ERR_TM_PARENT_PRIO_UPDATE,
220 NIX_ERR_TM_PRIO_EXCEEDED,
221 NIX_ERR_TM_PRIO_ORDER,
222 NIX_ERR_TM_MULTIPLE_RR_GROUPS,
223 NIX_ERR_TM_SQ_UPDATE_FAIL,
228 nix_q_size_16, /* 16 entries */
229 nix_q_size_64, /* 64 entries */
236 nix_q_size_1M, /* Million entries */
240 static inline struct nix *
241 roc_nix_to_nix_priv(struct roc_nix *roc_nix)
243 return (struct nix *)&roc_nix->reserved[0];
246 static inline struct roc_nix *
247 nix_priv_to_roc_nix(struct nix *nix)
249 return (struct roc_nix *)((char *)nix -
250 offsetof(struct roc_nix, reserved));
254 int nix_register_irqs(struct nix *nix);
255 void nix_unregister_irqs(struct nix *nix);
258 #define NIX_TM_TREE_MASK_ALL \
259 (BIT(ROC_NIX_TM_DEFAULT) | BIT(ROC_NIX_TM_RLIMIT) | \
260 BIT(ROC_NIX_TM_USER))
263 * NIX_TM_DFLT_RR_WT * NIX_TM_RR_QUANTUM_MAX / ROC_NIX_TM_MAX_SCHED_WT
265 #define NIX_TM_DFLT_RR_WT 71
267 /* Default TL1 priority and Quantum from AF */
268 #define NIX_TM_TL1_DFLT_RR_QTM ((1 << 24) - 1)
269 #define NIX_TM_TL1_DFLT_RR_PRIO 1
271 struct nix_tm_shaper_data {
272 uint64_t burst_exponent;
273 uint64_t burst_mantissa;
281 static inline uint64_t
282 nix_tm_weight_to_rr_quantum(uint64_t weight)
284 uint64_t max = NIX_CN9K_TM_RR_QUANTUM_MAX;
286 /* From CN10K onwards, we only configure RR weight */
287 if (!roc_model_is_cn9k())
290 weight &= (uint64_t)max;
291 return (weight * max) / ROC_NIX_CN9K_TM_RR_WEIGHT_MAX;
295 nix_tm_have_tl1_access(struct nix *nix)
297 return !!(nix->tm_flags & NIX_TM_TL1_ACCESS);
301 nix_tm_is_leaf(struct nix *nix, int lvl)
303 if (nix_tm_have_tl1_access(nix))
304 return (lvl == ROC_TM_LVL_QUEUE);
305 return (lvl == ROC_TM_LVL_SCH4);
308 static inline struct nix_tm_node_list *
309 nix_tm_node_list(struct nix *nix, enum roc_nix_tm_tree tree)
311 return &nix->trees[tree];
314 static inline const char *
315 nix_tm_hwlvl2str(uint32_t hw_lvl)
318 case NIX_TXSCH_LVL_MDQ:
320 case NIX_TXSCH_LVL_TL4:
322 case NIX_TXSCH_LVL_TL3:
324 case NIX_TXSCH_LVL_TL2:
326 case NIX_TXSCH_LVL_TL1:
335 static inline const char *
336 nix_tm_tree2str(enum roc_nix_tm_tree tree)
338 if (tree == ROC_NIX_TM_DEFAULT)
339 return "Default Tree";
340 else if (tree == ROC_NIX_TM_RLIMIT)
341 return "Rate Limit Tree";
342 else if (tree == ROC_NIX_TM_USER)
351 int nix_tm_conf_init(struct roc_nix *roc_nix);
352 void nix_tm_conf_fini(struct roc_nix *roc_nix);
353 int nix_tm_leaf_data_get(struct nix *nix, uint16_t sq, uint32_t *rr_quantum,
355 int nix_tm_sq_flush_pre(struct roc_nix_sq *sq);
356 int nix_tm_sq_flush_post(struct roc_nix_sq *sq);
357 int nix_tm_smq_xoff(struct nix *nix, struct nix_tm_node *node, bool enable);
358 int nix_tm_prepare_default_tree(struct roc_nix *roc_nix);
359 int nix_tm_node_add(struct roc_nix *roc_nix, struct nix_tm_node *node);
360 int nix_tm_node_delete(struct roc_nix *roc_nix, uint32_t node_id,
361 enum roc_nix_tm_tree tree, bool free);
362 int nix_tm_free_node_resource(struct nix *nix, struct nix_tm_node *node);
363 int nix_tm_free_resources(struct roc_nix *roc_nix, uint32_t tree_mask,
365 int nix_tm_clear_path_xoff(struct nix *nix, struct nix_tm_node *node);
366 void nix_tm_clear_shaper_profiles(struct nix *nix);
367 int nix_tm_alloc_txschq(struct nix *nix, enum roc_nix_tm_tree tree);
368 int nix_tm_assign_resources(struct nix *nix, enum roc_nix_tm_tree tree);
369 int nix_tm_release_resources(struct nix *nix, uint8_t hw_lvl, bool contig,
371 void nix_tm_copy_rsp_to_nix(struct nix *nix, struct nix_txsch_alloc_rsp *rsp);
373 int nix_tm_txsch_reg_config(struct nix *nix, enum roc_nix_tm_tree tree);
374 int nix_tm_update_parent_info(struct nix *nix, enum roc_nix_tm_tree tree);
375 int nix_tm_sq_sched_conf(struct nix *nix, struct nix_tm_node *node,
376 bool rr_quantum_only);
378 int nix_rq_cn9k_cfg(struct dev *dev, struct roc_nix_rq *rq, uint16_t qints,
380 int nix_rq_cfg(struct dev *dev, struct roc_nix_rq *rq, uint16_t qints, bool cfg,
382 int nix_rq_ena_dis(struct dev *dev, struct roc_nix_rq *rq, bool enable);
383 int nix_tm_bp_config_get(struct roc_nix *roc_nix, bool *is_enabled);
384 int nix_tm_bp_config_set(struct roc_nix *roc_nix, uint16_t sq, uint16_t tc,
386 void nix_rq_vwqe_flush(struct roc_nix_rq *rq, uint16_t vwqe_interval);
391 uint16_t nix_tm_lvl2nix(struct nix *nix, uint32_t lvl);
392 uint16_t nix_tm_lvl2nix_tl1_root(uint32_t lvl);
393 uint16_t nix_tm_lvl2nix_tl2_root(uint32_t lvl);
394 uint16_t nix_tm_resource_avail(struct nix *nix, uint8_t hw_lvl, bool contig);
395 int nix_tm_validate_prio(struct nix *nix, uint32_t lvl, uint32_t parent_id,
396 uint32_t priority, enum roc_nix_tm_tree tree);
397 struct nix_tm_node *nix_tm_node_search(struct nix *nix, uint32_t node_id,
398 enum roc_nix_tm_tree tree);
399 struct nix_tm_shaper_profile *nix_tm_shaper_profile_search(struct nix *nix,
401 uint8_t nix_tm_sw_xoff_prep(struct nix_tm_node *node, bool enable,
402 volatile uint64_t *reg, volatile uint64_t *regval);
403 uint32_t nix_tm_check_rr(struct nix *nix, uint32_t parent_id,
404 enum roc_nix_tm_tree tree, uint32_t *rr_prio,
406 uint64_t nix_tm_shaper_profile_rate_min(struct nix *nix);
407 uint64_t nix_tm_shaper_rate_conv(uint64_t value, uint64_t *exponent_p,
408 uint64_t *mantissa_p, uint64_t *div_exp_p,
410 uint64_t nix_tm_shaper_burst_conv(uint64_t value, uint64_t *exponent_p,
411 uint64_t *mantissa_p);
412 bool nix_tm_child_res_valid(struct nix_tm_node_list *list,
413 struct nix_tm_node *parent);
414 uint16_t nix_tm_resource_estimate(struct nix *nix, uint16_t *schq_contig,
415 uint16_t *schq, enum roc_nix_tm_tree tree);
416 uint8_t nix_tm_tl1_default_prep(uint32_t schq, volatile uint64_t *reg,
417 volatile uint64_t *regval);
418 uint8_t nix_tm_topology_reg_prep(struct nix *nix, struct nix_tm_node *node,
419 volatile uint64_t *reg,
420 volatile uint64_t *regval,
421 volatile uint64_t *regval_mask);
422 uint8_t nix_tm_sched_reg_prep(struct nix *nix, struct nix_tm_node *node,
423 volatile uint64_t *reg,
424 volatile uint64_t *regval);
425 uint8_t nix_tm_shaper_reg_prep(struct nix_tm_node *node,
426 struct nix_tm_shaper_profile *profile,
427 volatile uint64_t *reg,
428 volatile uint64_t *regval);
429 struct nix_tm_node *nix_tm_node_alloc(void);
430 void nix_tm_node_free(struct nix_tm_node *node);
431 struct nix_tm_shaper_profile *nix_tm_shaper_profile_alloc(void);
432 void nix_tm_shaper_profile_free(struct nix_tm_shaper_profile *profile);
434 uint64_t nix_get_blkaddr(struct dev *dev);
435 void nix_lf_rq_dump(__io struct nix_cn10k_rq_ctx_s *ctx);
436 int nix_lf_gen_reg_dump(uintptr_t nix_lf_base, uint64_t *data);
437 int nix_lf_stat_reg_dump(uintptr_t nix_lf_base, uint64_t *data,
438 uint8_t lf_tx_stats, uint8_t lf_rx_stats);
439 int nix_lf_int_reg_dump(uintptr_t nix_lf_base, uint64_t *data, uint16_t qints,
441 int nix_q_ctx_get(struct dev *dev, uint8_t ctype, uint16_t qid,
447 int nix_tel_node_add(struct roc_nix *roc_nix);
448 void nix_tel_node_del(struct roc_nix *roc_nix);
449 int nix_tel_node_add_rq(struct roc_nix_rq *rq);
450 int nix_tel_node_add_cq(struct roc_nix_cq *cq);
451 int nix_tel_node_add_sq(struct roc_nix_sq *sq);
453 #endif /* _ROC_NIX_PRIV_H_ */