1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2019-2021 Broadcom
9 #include <rte_common.h>
10 #include <rte_errno.h>
15 #include "tf_common.h"
17 #include "tf_em_common.h"
21 #include "tf_ext_flow_handle.h"
25 #define PTU_PTE_VALID 0x1UL
26 #define PTU_PTE_LAST 0x2UL
27 #define PTU_PTE_NEXT_TO_LAST 0x4UL
29 /* Number of pointers per page_size */
30 #define MAX_PAGE_PTRS(page_size) ((page_size) / sizeof(void *))
33 * Function to free a page table
36 * Pointer to the page table to free
39 tf_em_free_pg_tbl(struct hcapi_cfa_em_page_tbl *tp)
43 for (i = 0; i < tp->pg_count; i++) {
44 if (!tp->pg_va_tbl[i]) {
46 "No mapping for page: %d table: %016" PRIu64 "\n",
48 (uint64_t)(uintptr_t)tp);
52 tfp_free(tp->pg_va_tbl[i]);
53 tp->pg_va_tbl[i] = NULL;
57 tfp_free(tp->pg_va_tbl);
59 tfp_free(tp->pg_pa_tbl);
64 * Function to free an EM table
67 * Pointer to the EM table to free
70 tf_em_free_page_table(struct hcapi_cfa_em_table *tbl)
72 struct hcapi_cfa_em_page_tbl *tp;
75 for (i = 0; i < tbl->num_lvl; i++) {
78 "EEM: Freeing page table: size %u lvl %d cnt %u\n",
83 tf_em_free_pg_tbl(tp);
89 tbl->num_data_pages = 0;
93 * Allocation of page tables
96 * Pointer to a TruFlow handle
99 * Page count to allocate
106 * -ENOMEM - Out of memory
109 tf_em_alloc_pg_tbl(struct hcapi_cfa_em_page_tbl *tp,
114 struct tfp_calloc_parms parms;
116 parms.nitems = pg_count;
117 parms.size = sizeof(void *);
120 if (tfp_calloc(&parms) != 0)
123 tp->pg_va_tbl = parms.mem_va;
125 if (tfp_calloc(&parms) != 0) {
126 tfp_free(tp->pg_va_tbl);
130 tp->pg_pa_tbl = parms.mem_va;
133 tp->pg_size = pg_size;
135 for (i = 0; i < pg_count; i++) {
137 parms.size = pg_size;
138 parms.alignment = TF_EM_PAGE_ALIGNMENT;
140 if (tfp_calloc(&parms) != 0)
143 tp->pg_pa_tbl[i] = (uintptr_t)parms.mem_pa;
144 tp->pg_va_tbl[i] = parms.mem_va;
146 memset(tp->pg_va_tbl[i], 0, pg_size);
153 tf_em_free_pg_tbl(tp);
158 * Allocates EM page tables
161 * Table to allocate pages for
165 * -ENOMEM - Out of memory
168 tf_em_alloc_page_table(struct hcapi_cfa_em_table *tbl)
170 struct hcapi_cfa_em_page_tbl *tp;
175 for (i = 0; i < tbl->num_lvl; i++) {
176 tp = &tbl->pg_tbl[i];
178 rc = tf_em_alloc_pg_tbl(tp,
183 "Failed to allocate page table: lvl: %d, rc:%s\n",
189 for (j = 0; j < tp->pg_count; j++) {
191 "EEM: Allocated page table: size %u lvl %d cnt"
196 (void *)(uintptr_t)tp->pg_va_tbl[j],
197 (void *)(uintptr_t)tp->pg_pa_tbl[j]);
203 tf_em_free_page_table(tbl);
208 * Links EM page tables
211 * Pointer to page table
214 * Pointer to the next page table
217 * Flag controlling if the page table is last
220 tf_em_link_page_table(struct hcapi_cfa_em_page_tbl *tp,
221 struct hcapi_cfa_em_page_tbl *tp_next,
224 uint64_t *pg_pa = tp_next->pg_pa_tbl;
231 for (i = 0; i < tp->pg_count; i++) {
232 pg_va = tp->pg_va_tbl[i];
234 for (j = 0; j < MAX_PAGE_PTRS(tp->pg_size); j++) {
235 if (k == tp_next->pg_count - 2 && set_pte_last)
236 valid = PTU_PTE_NEXT_TO_LAST | PTU_PTE_VALID;
237 else if (k == tp_next->pg_count - 1 && set_pte_last)
238 valid = PTU_PTE_LAST | PTU_PTE_VALID;
240 valid = PTU_PTE_VALID;
242 pg_va[j] = tfp_cpu_to_le_64(pg_pa[k] | valid);
243 if (++k >= tp_next->pg_count)
250 * Setup a EM page table
253 * Pointer to EM page table
256 tf_em_setup_page_table(struct hcapi_cfa_em_table *tbl)
258 struct hcapi_cfa_em_page_tbl *tp_next;
259 struct hcapi_cfa_em_page_tbl *tp;
260 bool set_pte_last = 0;
263 for (i = 0; i < tbl->num_lvl - 1; i++) {
264 tp = &tbl->pg_tbl[i];
265 tp_next = &tbl->pg_tbl[i + 1];
266 if (i == tbl->num_lvl - 2)
268 tf_em_link_page_table(tp, tp_next, set_pte_last);
271 tbl->l0_addr = tbl->pg_tbl[TF_PT_LVL_0].pg_va_tbl[0];
272 tbl->l0_dma_addr = tbl->pg_tbl[TF_PT_LVL_0].pg_pa_tbl[0];
276 * Unregisters EM Ctx in Firmware
279 * Pointer to a TruFlow handle
282 * Pointer to a table scope control block
285 * Receive or transmit direction
288 tf_em_ctx_unreg(struct tf *tfp,
289 struct tf_tbl_scope_cb *tbl_scope_cb,
292 struct hcapi_cfa_em_ctx_mem_info *ctxp = &tbl_scope_cb->em_ctx_info[dir];
293 struct hcapi_cfa_em_table *tbl;
296 for (i = TF_KEY0_TABLE; i < TF_MAX_TABLE; i++) {
297 tbl = &ctxp->em_tables[i];
299 if (tbl->num_entries != 0 && tbl->entry_size != 0) {
300 tf_msg_em_mem_unrgtr(tfp, &tbl->ctx_id);
301 tf_em_free_page_table(tbl);
307 * Registers EM Ctx in Firmware
310 * Pointer to a TruFlow handle
313 * Pointer to a table scope control block
316 * Receive or transmit direction
320 * -ENOMEM - Out of Memory
323 tf_em_ctx_reg(struct tf *tfp,
324 struct tf_tbl_scope_cb *tbl_scope_cb,
327 struct hcapi_cfa_em_ctx_mem_info *ctxp = &tbl_scope_cb->em_ctx_info[dir];
328 struct hcapi_cfa_em_table *tbl;
332 for (i = TF_KEY0_TABLE; i < TF_MAX_TABLE; i++) {
333 tbl = &ctxp->em_tables[i];
335 if (tbl->num_entries && tbl->entry_size) {
336 rc = tf_em_size_table(tbl, TF_EM_PAGE_SIZE);
341 rc = tf_em_alloc_page_table(tbl);
345 tf_em_setup_page_table(tbl);
346 rc = tf_msg_em_mem_rgtr(tfp,
348 TF_EM_PAGE_SIZE_ENUM,
358 tf_em_ctx_unreg(tfp, tbl_scope_cb, dir);
363 tf_em_ext_alloc(struct tf *tfp,
364 struct tf_alloc_tbl_scope_parms *parms)
368 struct tf_tbl_scope_cb *tbl_scope_cb;
369 struct hcapi_cfa_em_table *em_tables;
370 struct tf_free_tbl_scope_parms free_parms;
371 struct tf_rm_allocate_parms aparms = { 0 };
372 struct tf_rm_free_parms fparms = { 0 };
373 struct tfp_calloc_parms cparms;
374 struct tf_session *tfs = NULL;
375 struct em_ext_db *ext_db = NULL;
376 void *ext_ptr = NULL;
380 rc = tf_session_get_session_internal(tfp, &tfs);
382 TFP_DRV_LOG(ERR, "Failed to get tf_session, rc:%s\n",
387 rc = tf_session_get_em_ext_db(tfp, &ext_ptr);
390 "Failed to get em_ext_db from session, rc:%s\n",
394 ext_db = (struct em_ext_db *)ext_ptr;
396 rc = tfp_get_pf(tfp, &pf);
399 "EEM: PF query error rc:%s\n",
404 /* Get Table Scope control block from the session pool */
405 aparms.rm_db = ext_db->eem_db[TF_DIR_RX];
406 aparms.subtype = TF_EM_TBL_TYPE_TBL_SCOPE;
407 aparms.index = (uint32_t *)&parms->tbl_scope_id;
408 rc = tf_rm_allocate(&aparms);
411 "Failed to allocate table scope\n");
415 /* Create tbl_scope, initialize and attach to the session */
417 cparms.size = sizeof(struct tf_tbl_scope_cb);
418 cparms.alignment = 0;
419 rc = tfp_calloc(&cparms);
423 "Failed to allocate session table scope, rc:%s\n",
428 tbl_scope_cb = cparms.mem_va;
429 tbl_scope_cb->tbl_scope_id = parms->tbl_scope_id;
430 tbl_scope_cb->pf = pf;
432 for (dir = 0; dir < TF_DIR_MAX; dir++) {
433 rc = tf_msg_em_qcaps(tfp,
435 &tbl_scope_cb->em_caps[dir]);
438 "EEM: Unable to query for EEM capability,"
446 * Validate and setup table sizes
448 if (tf_em_validate_num_entries(tbl_scope_cb, parms))
451 for (dir = 0; dir < TF_DIR_MAX; dir++) {
453 * Allocate tables and signal configuration to FW
455 rc = tf_em_ctx_reg(tfp, tbl_scope_cb, dir);
458 "EEM: Unable to register for EEM ctx,"
464 em_tables = tbl_scope_cb->em_ctx_info[dir].em_tables;
465 rc = tf_msg_em_cfg(tfp,
466 em_tables[TF_KEY0_TABLE].num_entries,
467 em_tables[TF_KEY0_TABLE].ctx_id,
468 em_tables[TF_KEY1_TABLE].ctx_id,
469 em_tables[TF_RECORD_TABLE].ctx_id,
470 em_tables[TF_EFC_TABLE].ctx_id,
471 parms->hw_flow_cache_flush_timer,
475 "TBL: Unable to configure EEM in firmware"
481 rc = tf_msg_em_op(tfp,
483 HWRM_TF_EXT_EM_OP_INPUT_OP_EXT_EM_ENABLE);
487 "EEM: Unable to enable EEM in firmware"
493 /* Allocate the pool of offsets of the external memory.
494 * Initially, this is a single fixed size pool for all external
495 * actions related to a single table scope.
497 rc = tf_create_tbl_pool_external(dir,
499 em_tables[TF_RECORD_TABLE].num_entries,
500 em_tables[TF_RECORD_TABLE].entry_size);
503 "%s TBL: Unable to allocate idx pools %s\n",
510 /* Insert into session tbl_scope list */
511 ll_insert(&ext_db->tbl_scope_ll, &tbl_scope_cb->ll_entry);
515 free_parms.tbl_scope_id = parms->tbl_scope_id;
516 /* Insert into session list prior to ext_free */
517 ll_insert(&ext_db->tbl_scope_ll, &tbl_scope_cb->ll_entry);
518 tf_em_ext_free(tfp, &free_parms);
522 tfp_free(tbl_scope_cb);
525 /* Free Table control block */
526 fparms.rm_db = ext_db->eem_db[TF_DIR_RX];
527 fparms.subtype = TF_EM_TBL_TYPE_TBL_SCOPE;
528 fparms.index = parms->tbl_scope_id;
529 rc = tf_rm_free(&fparms);
531 TFP_DRV_LOG(ERR, "Failed to free table scope\n");
537 tf_em_ext_free(struct tf *tfp,
538 struct tf_free_tbl_scope_parms *parms)
542 struct tf_tbl_scope_cb *tbl_scope_cb;
543 struct tf_session *tfs;
544 struct em_ext_db *ext_db = NULL;
545 void *ext_ptr = NULL;
546 struct tf_rm_free_parms aparms = { 0 };
548 rc = tf_session_get_session_internal(tfp, &tfs);
550 TFP_DRV_LOG(ERR, "Failed to get tf_session, rc:%s\n",
555 rc = tf_session_get_em_ext_db(tfp, &ext_ptr);
558 "Failed to get em_ext_db from session, rc:%s\n",
562 ext_db = (struct em_ext_db *)ext_ptr;
564 tbl_scope_cb = tf_em_ext_common_tbl_scope_find(tfp, parms->tbl_scope_id);
565 if (tbl_scope_cb == NULL) {
566 TFP_DRV_LOG(ERR, "Table scope error\n");
570 /* Free Table control block */
571 aparms.rm_db = ext_db->eem_db[TF_DIR_RX];
572 aparms.subtype = TF_EM_TBL_TYPE_TBL_SCOPE;
573 aparms.index = parms->tbl_scope_id;
574 rc = tf_rm_free(&aparms);
577 "Failed to free table scope\n");
580 /* free table scope locks */
581 for (dir = 0; dir < TF_DIR_MAX; dir++) {
582 /* Free associated external pools
584 tf_destroy_tbl_pool_external(dir,
588 HWRM_TF_EXT_EM_OP_INPUT_OP_EXT_EM_DISABLE);
590 /* free table scope and all associated resources */
591 tf_em_ctx_unreg(tfp, tbl_scope_cb, dir);
594 /* remove from session list and free tbl_scope */
595 ll_delete(&ext_db->tbl_scope_ll, &tbl_scope_cb->ll_entry);
596 tfp_free(tbl_scope_cb);