1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2019-2020 Broadcom
12 #include "tf_session.h"
15 #include "tf_msg_common.h"
17 #include "hsi_struct_def_dpdk.h"
21 * Endian converts min and max values from the HW response to the query
23 #define TF_HW_RESP_TO_QUERY(query, index, response, element) do { \
24 (query)->hw_query[index].min = \
25 tfp_le_to_cpu_16(response. element ## _min); \
26 (query)->hw_query[index].max = \
27 tfp_le_to_cpu_16(response. element ## _max); \
31 * Endian converts the number of entries from the alloc to the request
33 #define TF_HW_ALLOC_TO_REQ(alloc, index, request, element) \
34 (request. num_ ## element = tfp_cpu_to_le_16((alloc)->hw_num[index]))
37 * Endian converts the start and stride value from the free to the request
39 #define TF_HW_FREE_TO_REQ(hw_entry, index, request, element) do { \
40 request.element ## _start = \
41 tfp_cpu_to_le_16(hw_entry[index].start); \
42 request.element ## _stride = \
43 tfp_cpu_to_le_16(hw_entry[index].stride); \
47 * Endian converts the start and stride from the HW response to the
50 #define TF_HW_RESP_TO_ALLOC(hw_entry, index, response, element) do { \
51 hw_entry[index].start = \
52 tfp_le_to_cpu_16(response.element ## _start); \
53 hw_entry[index].stride = \
54 tfp_le_to_cpu_16(response.element ## _stride); \
58 * Endian converts min and max values from the SRAM response to the
61 #define TF_SRAM_RESP_TO_QUERY(query, index, response, element) do { \
62 (query)->sram_query[index].min = \
63 tfp_le_to_cpu_16(response.element ## _min); \
64 (query)->sram_query[index].max = \
65 tfp_le_to_cpu_16(response.element ## _max); \
69 * Endian converts the number of entries from the action (alloc) to
72 #define TF_SRAM_ALLOC_TO_REQ(action, index, request, element) \
73 (request. num_ ## element = tfp_cpu_to_le_16((action)->sram_num[index]))
76 * Endian converts the start and stride value from the free to the request
78 #define TF_SRAM_FREE_TO_REQ(sram_entry, index, request, element) do { \
79 request.element ## _start = \
80 tfp_cpu_to_le_16(sram_entry[index].start); \
81 request.element ## _stride = \
82 tfp_cpu_to_le_16(sram_entry[index].stride); \
86 * Endian converts the start and stride from the HW response to the
89 #define TF_SRAM_RESP_TO_ALLOC(sram_entry, index, response, element) do { \
90 sram_entry[index].start = \
91 tfp_le_to_cpu_16(response.element ## _start); \
92 sram_entry[index].stride = \
93 tfp_le_to_cpu_16(response.element ## _stride); \
97 * This is the MAX data we can transport across regular HWRM
99 #define TF_PCI_BUF_SIZE_MAX 88
102 * If data bigger than TF_PCI_BUF_SIZE_MAX then use DMA method
104 struct tf_msg_dma_buf {
110 tf_tcam_tbl_2_hwrm(enum tf_tcam_tbl_type tcam_type,
116 case TF_TCAM_TBL_TYPE_L2_CTXT_TCAM:
117 *hwrm_type = TF_DEV_DATA_TYPE_TF_L2_CTX_ENTRY;
119 case TF_TCAM_TBL_TYPE_PROF_TCAM:
120 *hwrm_type = TF_DEV_DATA_TYPE_TF_PROF_TCAM_ENTRY;
122 case TF_TCAM_TBL_TYPE_WC_TCAM:
123 *hwrm_type = TF_DEV_DATA_TYPE_TF_WC_ENTRY;
125 case TF_TCAM_TBL_TYPE_VEB_TCAM:
128 case TF_TCAM_TBL_TYPE_SP_TCAM:
131 case TF_TCAM_TBL_TYPE_CT_RULE_TCAM:
143 * Sends session open request to TF Firmware
146 tf_msg_session_open(struct tf *tfp,
147 char *ctrl_chan_name,
148 uint8_t *fw_session_id)
151 struct hwrm_tf_session_open_input req = { 0 };
152 struct hwrm_tf_session_open_output resp = { 0 };
153 struct tfp_send_msg_parms parms = { 0 };
155 /* Populate the request */
156 memcpy(&req.session_name, ctrl_chan_name, TF_SESSION_NAME_MAX);
158 parms.tf_type = HWRM_TF_SESSION_OPEN;
159 parms.req_data = (uint32_t *)&req;
160 parms.req_size = sizeof(req);
161 parms.resp_data = (uint32_t *)&resp;
162 parms.resp_size = sizeof(resp);
163 parms.mailbox = TF_KONG_MB;
165 rc = tfp_send_msg_direct(tfp,
170 *fw_session_id = resp.fw_session_id;
176 * Sends session attach request to TF Firmware
179 tf_msg_session_attach(struct tf *tfp __rte_unused,
180 char *ctrl_chan_name __rte_unused,
181 uint8_t tf_fw_session_id __rte_unused)
187 * Sends session close request to TF Firmware
190 tf_msg_session_close(struct tf *tfp)
193 struct hwrm_tf_session_close_input req = { 0 };
194 struct hwrm_tf_session_close_output resp = { 0 };
195 struct tf_session *tfs = (struct tf_session *)(tfp->session->core_data);
196 struct tfp_send_msg_parms parms = { 0 };
198 /* Populate the request */
200 tfp_cpu_to_le_32(tfs->session_id.internal.fw_session_id);
202 parms.tf_type = HWRM_TF_SESSION_CLOSE;
203 parms.req_data = (uint32_t *)&req;
204 parms.req_size = sizeof(req);
205 parms.resp_data = (uint32_t *)&resp;
206 parms.resp_size = sizeof(resp);
207 parms.mailbox = TF_KONG_MB;
209 rc = tfp_send_msg_direct(tfp,
215 * Sends session query config request to TF Firmware
218 tf_msg_session_qcfg(struct tf *tfp)
221 struct hwrm_tf_session_qcfg_input req = { 0 };
222 struct hwrm_tf_session_qcfg_output resp = { 0 };
223 struct tf_session *tfs = (struct tf_session *)(tfp->session->core_data);
224 struct tfp_send_msg_parms parms = { 0 };
226 /* Populate the request */
228 tfp_cpu_to_le_32(tfs->session_id.internal.fw_session_id);
230 parms.tf_type = HWRM_TF_SESSION_QCFG,
231 parms.req_data = (uint32_t *)&req;
232 parms.req_size = sizeof(req);
233 parms.resp_data = (uint32_t *)&resp;
234 parms.resp_size = sizeof(resp);
235 parms.mailbox = TF_KONG_MB;
237 rc = tfp_send_msg_direct(tfp,
243 * Sends session HW resource query capability request to TF Firmware
246 tf_msg_session_hw_resc_qcaps(struct tf *tfp,
248 struct tf_rm_hw_query *query)
251 struct tfp_send_msg_parms parms = { 0 };
252 struct tf_session_hw_resc_qcaps_input req = { 0 };
253 struct tf_session_hw_resc_qcaps_output resp = { 0 };
254 struct tf_session *tfs = (struct tf_session *)(tfp->session->core_data);
256 memset(query, 0, sizeof(*query));
258 /* Populate the request */
260 tfp_cpu_to_le_32(tfs->session_id.internal.fw_session_id);
261 req.flags = tfp_cpu_to_le_16(dir);
266 HWRM_TFT_SESSION_HW_RESC_QCAPS,
270 rc = tfp_send_msg_tunneled(tfp, &parms);
274 /* Process the response */
275 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_L2_CTXT_TCAM, resp,
276 l2_ctx_tcam_entries);
277 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_PROF_FUNC, resp,
279 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_PROF_TCAM, resp,
281 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_EM_PROF_ID, resp,
283 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_EM_REC, resp,
285 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_WC_TCAM_PROF_ID, resp,
287 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_WC_TCAM, resp,
289 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_METER_PROF, resp,
291 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_METER_INST,
293 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_MIRROR, resp,
295 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_UPAR, resp,
297 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_SP_TCAM, resp,
299 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_L2_FUNC, resp,
301 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_FKB, resp,
303 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_TBL_SCOPE, resp,
305 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_EPOCH0, resp,
307 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_EPOCH1, resp,
309 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_METADATA, resp,
311 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_CT_STATE, resp,
313 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_RANGE_PROF, resp,
315 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_RANGE_ENTRY, resp,
317 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_LAG_ENTRY, resp,
320 return tfp_le_to_cpu_32(parms.tf_resp_code);
324 * Sends session HW resource allocation request to TF Firmware
327 tf_msg_session_hw_resc_alloc(struct tf *tfp __rte_unused,
329 struct tf_rm_hw_alloc *hw_alloc __rte_unused,
330 struct tf_rm_entry *hw_entry __rte_unused)
333 struct tfp_send_msg_parms parms = { 0 };
334 struct tf_session_hw_resc_alloc_input req = { 0 };
335 struct tf_session_hw_resc_alloc_output resp = { 0 };
336 struct tf_session *tfs = (struct tf_session *)(tfp->session->core_data);
338 memset(hw_entry, 0, sizeof(*hw_entry));
340 /* Populate the request */
342 tfp_cpu_to_le_32(tfs->session_id.internal.fw_session_id);
343 req.flags = tfp_cpu_to_le_16(dir);
345 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_L2_CTXT_TCAM, req,
346 l2_ctx_tcam_entries);
347 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_PROF_FUNC, req,
349 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_PROF_TCAM, req,
351 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_EM_PROF_ID, req,
353 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_EM_REC, req,
355 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_WC_TCAM_PROF_ID, req,
357 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_WC_TCAM, req,
359 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_METER_PROF, req,
361 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_METER_INST, req,
363 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_MIRROR, req,
365 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_UPAR, req,
367 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_SP_TCAM, req,
369 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_L2_FUNC, req,
371 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_FKB, req,
373 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_TBL_SCOPE, req,
375 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_EPOCH0, req,
377 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_EPOCH1, req,
379 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_METADATA, req,
381 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_CT_STATE, req,
383 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_RANGE_PROF, req,
385 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_RANGE_ENTRY, req,
387 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_LAG_ENTRY, req,
393 HWRM_TFT_SESSION_HW_RESC_ALLOC,
397 rc = tfp_send_msg_tunneled(tfp, &parms);
401 /* Process the response */
402 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_L2_CTXT_TCAM, resp,
403 l2_ctx_tcam_entries);
404 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_PROF_FUNC, resp,
406 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_PROF_TCAM, resp,
408 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_EM_PROF_ID, resp,
410 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_EM_REC, resp,
412 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_WC_TCAM_PROF_ID, resp,
414 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_WC_TCAM, resp,
416 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_METER_PROF, resp,
418 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_METER_INST, resp,
420 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_MIRROR, resp,
422 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_UPAR, resp,
424 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_SP_TCAM, resp,
426 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_L2_FUNC, resp,
428 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_FKB, resp,
430 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_TBL_SCOPE, resp,
432 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_EPOCH0, resp,
434 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_EPOCH1, resp,
436 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_METADATA, resp,
438 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_CT_STATE, resp,
440 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_RANGE_PROF, resp,
442 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_RANGE_ENTRY, resp,
444 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_LAG_ENTRY, resp,
447 return tfp_le_to_cpu_32(parms.tf_resp_code);
451 * Sends session HW resource free request to TF Firmware
454 tf_msg_session_hw_resc_free(struct tf *tfp,
456 struct tf_rm_entry *hw_entry)
459 struct tfp_send_msg_parms parms = { 0 };
460 struct tf_session_hw_resc_free_input req = { 0 };
461 struct tf_session *tfs = (struct tf_session *)(tfp->session->core_data);
463 memset(hw_entry, 0, sizeof(*hw_entry));
465 /* Populate the request */
467 tfp_cpu_to_le_32(tfs->session_id.internal.fw_session_id);
468 req.flags = tfp_cpu_to_le_16(dir);
470 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_L2_CTXT_TCAM, req,
471 l2_ctx_tcam_entries);
472 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_PROF_FUNC, req,
474 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_PROF_TCAM, req,
476 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_EM_PROF_ID, req,
478 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_EM_REC, req,
480 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_WC_TCAM_PROF_ID, req,
482 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_WC_TCAM, req,
484 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_METER_PROF, req,
486 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_METER_INST, req,
488 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_MIRROR, req,
490 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_UPAR, req,
492 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_SP_TCAM, req,
494 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_L2_FUNC, req,
496 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_FKB, req,
498 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_TBL_SCOPE, req,
500 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_EPOCH0, req,
502 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_EPOCH1, req,
504 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_METADATA, req,
506 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_CT_STATE, req,
508 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_RANGE_PROF, req,
510 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_RANGE_ENTRY, req,
512 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_LAG_ENTRY, req,
515 MSG_PREP_NO_RESP(parms,
518 HWRM_TFT_SESSION_HW_RESC_FREE,
521 rc = tfp_send_msg_tunneled(tfp, &parms);
525 return tfp_le_to_cpu_32(parms.tf_resp_code);
529 * Sends session HW resource flush request to TF Firmware
532 tf_msg_session_hw_resc_flush(struct tf *tfp,
534 struct tf_rm_entry *hw_entry)
537 struct tfp_send_msg_parms parms = { 0 };
538 struct tf_session_hw_resc_free_input req = { 0 };
539 struct tf_session *tfs = (struct tf_session *)(tfp->session->core_data);
541 /* Populate the request */
543 tfp_cpu_to_le_32(tfs->session_id.internal.fw_session_id);
544 req.flags = tfp_cpu_to_le_16(dir);
546 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_L2_CTXT_TCAM, req,
547 l2_ctx_tcam_entries);
548 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_PROF_FUNC, req,
550 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_PROF_TCAM, req,
552 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_EM_PROF_ID, req,
554 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_EM_REC, req,
556 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_WC_TCAM_PROF_ID, req,
558 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_WC_TCAM, req,
560 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_METER_PROF, req,
562 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_METER_INST, req,
564 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_MIRROR, req,
566 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_UPAR, req,
568 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_SP_TCAM, req,
570 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_L2_FUNC, req,
572 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_FKB, req,
574 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_TBL_SCOPE, req,
576 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_EPOCH0, req,
578 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_EPOCH1, req,
580 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_METADATA, req,
582 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_CT_STATE, req,
584 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_RANGE_PROF, req,
586 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_RANGE_ENTRY, req,
588 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_LAG_ENTRY, req,
591 MSG_PREP_NO_RESP(parms,
594 HWRM_TFT_SESSION_HW_RESC_FLUSH,
597 rc = tfp_send_msg_tunneled(tfp, &parms);
601 return tfp_le_to_cpu_32(parms.tf_resp_code);
605 * Sends session SRAM resource query capability request to TF Firmware
608 tf_msg_session_sram_resc_qcaps(struct tf *tfp __rte_unused,
610 struct tf_rm_sram_query *query __rte_unused)
613 struct tfp_send_msg_parms parms = { 0 };
614 struct tf_session_sram_resc_qcaps_input req = { 0 };
615 struct tf_session_sram_resc_qcaps_output resp = { 0 };
616 struct tf_session *tfs = (struct tf_session *)(tfp->session->core_data);
618 /* Populate the request */
620 tfp_cpu_to_le_32(tfs->session_id.internal.fw_session_id);
621 req.flags = tfp_cpu_to_le_16(dir);
626 HWRM_TFT_SESSION_SRAM_RESC_QCAPS,
630 rc = tfp_send_msg_tunneled(tfp, &parms);
634 /* Process the response */
635 TF_SRAM_RESP_TO_QUERY(query, TF_RESC_TYPE_SRAM_FULL_ACTION, resp,
637 TF_SRAM_RESP_TO_QUERY(query, TF_RESC_TYPE_SRAM_MCG, resp,
639 TF_SRAM_RESP_TO_QUERY(query, TF_RESC_TYPE_SRAM_ENCAP_8B, resp,
641 TF_SRAM_RESP_TO_QUERY(query, TF_RESC_TYPE_SRAM_ENCAP_16B, resp,
643 TF_SRAM_RESP_TO_QUERY(query, TF_RESC_TYPE_SRAM_ENCAP_64B, resp,
645 TF_SRAM_RESP_TO_QUERY(query, TF_RESC_TYPE_SRAM_SP_SMAC, resp,
647 TF_SRAM_RESP_TO_QUERY(query, TF_RESC_TYPE_SRAM_SP_SMAC_IPV4, resp,
649 TF_SRAM_RESP_TO_QUERY(query, TF_RESC_TYPE_SRAM_SP_SMAC_IPV6, resp,
651 TF_SRAM_RESP_TO_QUERY(query, TF_RESC_TYPE_SRAM_COUNTER_64B, resp,
653 TF_SRAM_RESP_TO_QUERY(query, TF_RESC_TYPE_SRAM_NAT_SPORT, resp,
655 TF_SRAM_RESP_TO_QUERY(query, TF_RESC_TYPE_SRAM_NAT_DPORT, resp,
657 TF_SRAM_RESP_TO_QUERY(query, TF_RESC_TYPE_SRAM_NAT_S_IPV4, resp,
659 TF_SRAM_RESP_TO_QUERY(query, TF_RESC_TYPE_SRAM_NAT_D_IPV4, resp,
662 return tfp_le_to_cpu_32(parms.tf_resp_code);
666 * Sends session SRAM resource allocation request to TF Firmware
669 tf_msg_session_sram_resc_alloc(struct tf *tfp __rte_unused,
671 struct tf_rm_sram_alloc *sram_alloc __rte_unused,
672 struct tf_rm_entry *sram_entry __rte_unused)
675 struct tfp_send_msg_parms parms = { 0 };
676 struct tf_session_sram_resc_alloc_input req = { 0 };
677 struct tf_session_sram_resc_alloc_output resp;
678 struct tf_session *tfs = (struct tf_session *)(tfp->session->core_data);
680 memset(&resp, 0, sizeof(resp));
682 /* Populate the request */
684 tfp_cpu_to_le_32(tfs->session_id.internal.fw_session_id);
685 req.flags = tfp_cpu_to_le_16(dir);
687 TF_SRAM_ALLOC_TO_REQ(sram_alloc, TF_RESC_TYPE_SRAM_FULL_ACTION, req,
689 TF_SRAM_ALLOC_TO_REQ(sram_alloc, TF_RESC_TYPE_SRAM_MCG, req,
691 TF_SRAM_ALLOC_TO_REQ(sram_alloc, TF_RESC_TYPE_SRAM_ENCAP_8B, req,
693 TF_SRAM_ALLOC_TO_REQ(sram_alloc, TF_RESC_TYPE_SRAM_ENCAP_16B, req,
695 TF_SRAM_ALLOC_TO_REQ(sram_alloc, TF_RESC_TYPE_SRAM_ENCAP_64B, req,
697 TF_SRAM_ALLOC_TO_REQ(sram_alloc, TF_RESC_TYPE_SRAM_SP_SMAC, req,
699 TF_SRAM_ALLOC_TO_REQ(sram_alloc, TF_RESC_TYPE_SRAM_SP_SMAC_IPV4,
701 TF_SRAM_ALLOC_TO_REQ(sram_alloc, TF_RESC_TYPE_SRAM_SP_SMAC_IPV6,
703 TF_SRAM_ALLOC_TO_REQ(sram_alloc, TF_RESC_TYPE_SRAM_COUNTER_64B,
705 TF_SRAM_ALLOC_TO_REQ(sram_alloc, TF_RESC_TYPE_SRAM_NAT_SPORT, req,
707 TF_SRAM_ALLOC_TO_REQ(sram_alloc, TF_RESC_TYPE_SRAM_NAT_DPORT, req,
709 TF_SRAM_ALLOC_TO_REQ(sram_alloc, TF_RESC_TYPE_SRAM_NAT_S_IPV4, req,
711 TF_SRAM_ALLOC_TO_REQ(sram_alloc, TF_RESC_TYPE_SRAM_NAT_D_IPV4, req,
717 HWRM_TFT_SESSION_SRAM_RESC_ALLOC,
721 rc = tfp_send_msg_tunneled(tfp, &parms);
725 /* Process the response */
726 TF_SRAM_RESP_TO_ALLOC(sram_entry, TF_RESC_TYPE_SRAM_FULL_ACTION,
728 TF_SRAM_RESP_TO_ALLOC(sram_entry, TF_RESC_TYPE_SRAM_MCG, resp,
730 TF_SRAM_RESP_TO_ALLOC(sram_entry, TF_RESC_TYPE_SRAM_ENCAP_8B, resp,
732 TF_SRAM_RESP_TO_ALLOC(sram_entry, TF_RESC_TYPE_SRAM_ENCAP_16B, resp,
734 TF_SRAM_RESP_TO_ALLOC(sram_entry, TF_RESC_TYPE_SRAM_ENCAP_64B, resp,
736 TF_SRAM_RESP_TO_ALLOC(sram_entry, TF_RESC_TYPE_SRAM_SP_SMAC, resp,
738 TF_SRAM_RESP_TO_ALLOC(sram_entry, TF_RESC_TYPE_SRAM_SP_SMAC_IPV4,
740 TF_SRAM_RESP_TO_ALLOC(sram_entry, TF_RESC_TYPE_SRAM_SP_SMAC_IPV6,
742 TF_SRAM_RESP_TO_ALLOC(sram_entry, TF_RESC_TYPE_SRAM_COUNTER_64B, resp,
744 TF_SRAM_RESP_TO_ALLOC(sram_entry, TF_RESC_TYPE_SRAM_NAT_SPORT, resp,
746 TF_SRAM_RESP_TO_ALLOC(sram_entry, TF_RESC_TYPE_SRAM_NAT_DPORT, resp,
748 TF_SRAM_RESP_TO_ALLOC(sram_entry, TF_RESC_TYPE_SRAM_NAT_S_IPV4, resp,
750 TF_SRAM_RESP_TO_ALLOC(sram_entry, TF_RESC_TYPE_SRAM_NAT_D_IPV4, resp,
753 return tfp_le_to_cpu_32(parms.tf_resp_code);
757 * Sends session SRAM resource free request to TF Firmware
760 tf_msg_session_sram_resc_free(struct tf *tfp __rte_unused,
762 struct tf_rm_entry *sram_entry __rte_unused)
765 struct tfp_send_msg_parms parms = { 0 };
766 struct tf_session_sram_resc_free_input req = { 0 };
767 struct tf_session *tfs = (struct tf_session *)(tfp->session->core_data);
769 /* Populate the request */
771 tfp_cpu_to_le_32(tfs->session_id.internal.fw_session_id);
772 req.flags = tfp_cpu_to_le_16(dir);
774 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_FULL_ACTION, req,
776 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_MCG, req,
778 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_ENCAP_8B, req,
780 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_ENCAP_16B, req,
782 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_ENCAP_64B, req,
784 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_SP_SMAC, req,
786 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_SP_SMAC_IPV4, req,
788 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_SP_SMAC_IPV6, req,
790 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_COUNTER_64B, req,
792 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_NAT_SPORT, req,
794 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_NAT_DPORT, req,
796 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_NAT_S_IPV4, req,
798 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_NAT_D_IPV4, req,
801 MSG_PREP_NO_RESP(parms,
804 HWRM_TFT_SESSION_SRAM_RESC_FREE,
807 rc = tfp_send_msg_tunneled(tfp, &parms);
811 return tfp_le_to_cpu_32(parms.tf_resp_code);
815 * Sends session SRAM resource flush request to TF Firmware
818 tf_msg_session_sram_resc_flush(struct tf *tfp,
820 struct tf_rm_entry *sram_entry)
823 struct tfp_send_msg_parms parms = { 0 };
824 struct tf_session_sram_resc_free_input req = { 0 };
825 struct tf_session *tfs = (struct tf_session *)(tfp->session->core_data);
827 /* Populate the request */
829 tfp_cpu_to_le_32(tfs->session_id.internal.fw_session_id);
830 req.flags = tfp_cpu_to_le_16(dir);
832 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_FULL_ACTION, req,
834 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_MCG, req,
836 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_ENCAP_8B, req,
838 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_ENCAP_16B, req,
840 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_ENCAP_64B, req,
842 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_SP_SMAC, req,
844 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_SP_SMAC_IPV4, req,
846 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_SP_SMAC_IPV6, req,
848 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_COUNTER_64B, req,
850 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_NAT_SPORT, req,
852 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_NAT_DPORT, req,
854 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_NAT_S_IPV4, req,
856 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_NAT_D_IPV4, req,
859 MSG_PREP_NO_RESP(parms,
862 HWRM_TFT_SESSION_SRAM_RESC_FLUSH,
865 rc = tfp_send_msg_tunneled(tfp, &parms);
869 return tfp_le_to_cpu_32(parms.tf_resp_code);
873 * Sends EM mem register request to Firmware
875 int tf_msg_em_mem_rgtr(struct tf *tfp,
882 struct hwrm_tf_ctxt_mem_rgtr_input req = { 0 };
883 struct hwrm_tf_ctxt_mem_rgtr_output resp = { 0 };
884 struct tfp_send_msg_parms parms = { 0 };
886 req.page_level = page_lvl;
887 req.page_size = page_size;
888 req.page_dir = tfp_cpu_to_le_64(dma_addr);
890 parms.tf_type = HWRM_TF_CTXT_MEM_RGTR;
891 parms.req_data = (uint32_t *)&req;
892 parms.req_size = sizeof(req);
893 parms.resp_data = (uint32_t *)&resp;
894 parms.resp_size = sizeof(resp);
895 parms.mailbox = TF_KONG_MB;
897 rc = tfp_send_msg_direct(tfp,
902 *ctx_id = tfp_le_to_cpu_16(resp.ctx_id);
908 * Sends EM mem unregister request to Firmware
910 int tf_msg_em_mem_unrgtr(struct tf *tfp,
914 struct hwrm_tf_ctxt_mem_unrgtr_input req = {0};
915 struct hwrm_tf_ctxt_mem_unrgtr_output resp = {0};
916 struct tfp_send_msg_parms parms = { 0 };
918 req.ctx_id = tfp_cpu_to_le_32(*ctx_id);
920 parms.tf_type = HWRM_TF_CTXT_MEM_UNRGTR;
921 parms.req_data = (uint32_t *)&req;
922 parms.req_size = sizeof(req);
923 parms.resp_data = (uint32_t *)&resp;
924 parms.resp_size = sizeof(resp);
925 parms.mailbox = TF_KONG_MB;
927 rc = tfp_send_msg_direct(tfp,
933 * Sends EM qcaps request to Firmware
935 int tf_msg_em_qcaps(struct tf *tfp,
937 struct tf_em_caps *em_caps)
940 struct hwrm_tf_ext_em_qcaps_input req = {0};
941 struct hwrm_tf_ext_em_qcaps_output resp = { 0 };
943 struct tfp_send_msg_parms parms = { 0 };
945 flags = (dir == TF_DIR_TX ? HWRM_TF_EXT_EM_QCAPS_INPUT_FLAGS_DIR_TX :
946 HWRM_TF_EXT_EM_QCAPS_INPUT_FLAGS_DIR_RX);
947 req.flags = tfp_cpu_to_le_32(flags);
949 parms.tf_type = HWRM_TF_EXT_EM_QCAPS;
950 parms.req_data = (uint32_t *)&req;
951 parms.req_size = sizeof(req);
952 parms.resp_data = (uint32_t *)&resp;
953 parms.resp_size = sizeof(resp);
954 parms.mailbox = TF_KONG_MB;
956 rc = tfp_send_msg_direct(tfp,
961 em_caps->supported = tfp_le_to_cpu_32(resp.supported);
962 em_caps->max_entries_supported =
963 tfp_le_to_cpu_32(resp.max_entries_supported);
964 em_caps->key_entry_size = tfp_le_to_cpu_16(resp.key_entry_size);
965 em_caps->record_entry_size =
966 tfp_le_to_cpu_16(resp.record_entry_size);
967 em_caps->efc_entry_size = tfp_le_to_cpu_16(resp.efc_entry_size);
973 * Sends EM config request to Firmware
975 int tf_msg_em_cfg(struct tf *tfp,
976 uint32_t num_entries,
977 uint16_t key0_ctx_id,
978 uint16_t key1_ctx_id,
979 uint16_t record_ctx_id,
984 struct hwrm_tf_ext_em_cfg_input req = {0};
985 struct hwrm_tf_ext_em_cfg_output resp = {0};
987 struct tfp_send_msg_parms parms = { 0 };
989 flags = (dir == TF_DIR_TX ? HWRM_TF_EXT_EM_CFG_INPUT_FLAGS_DIR_TX :
990 HWRM_TF_EXT_EM_CFG_INPUT_FLAGS_DIR_RX);
991 flags |= HWRM_TF_EXT_EM_QCAPS_INPUT_FLAGS_PREFERRED_OFFLOAD;
993 req.flags = tfp_cpu_to_le_32(flags);
994 req.num_entries = tfp_cpu_to_le_32(num_entries);
996 req.key0_ctx_id = tfp_cpu_to_le_16(key0_ctx_id);
997 req.key1_ctx_id = tfp_cpu_to_le_16(key1_ctx_id);
998 req.record_ctx_id = tfp_cpu_to_le_16(record_ctx_id);
999 req.efc_ctx_id = tfp_cpu_to_le_16(efc_ctx_id);
1001 parms.tf_type = HWRM_TF_EXT_EM_CFG;
1002 parms.req_data = (uint32_t *)&req;
1003 parms.req_size = sizeof(req);
1004 parms.resp_data = (uint32_t *)&resp;
1005 parms.resp_size = sizeof(resp);
1006 parms.mailbox = TF_KONG_MB;
1008 rc = tfp_send_msg_direct(tfp,
1014 * Sends EM operation request to Firmware
1016 int tf_msg_em_op(struct tf *tfp,
1021 struct hwrm_tf_ext_em_op_input req = {0};
1022 struct hwrm_tf_ext_em_op_output resp = {0};
1024 struct tfp_send_msg_parms parms = { 0 };
1026 flags = (dir == TF_DIR_TX ? HWRM_TF_EXT_EM_CFG_INPUT_FLAGS_DIR_TX :
1027 HWRM_TF_EXT_EM_CFG_INPUT_FLAGS_DIR_RX);
1028 req.flags = tfp_cpu_to_le_32(flags);
1029 req.op = tfp_cpu_to_le_16(op);
1031 parms.tf_type = HWRM_TF_EXT_EM_OP;
1032 parms.req_data = (uint32_t *)&req;
1033 parms.req_size = sizeof(req);
1034 parms.resp_data = (uint32_t *)&resp;
1035 parms.resp_size = sizeof(resp);
1036 parms.mailbox = TF_KONG_MB;
1038 rc = tfp_send_msg_direct(tfp,
1044 tf_msg_set_tbl_entry(struct tf *tfp,
1046 enum tf_tbl_type type,
1052 struct tfp_send_msg_parms parms = { 0 };
1053 struct tf_tbl_type_set_input req = { 0 };
1054 struct tf_session *tfs = (struct tf_session *)(tfp->session->core_data);
1056 /* Populate the request */
1058 tfp_cpu_to_le_32(tfs->session_id.internal.fw_session_id);
1059 req.flags = tfp_cpu_to_le_16(dir);
1060 req.type = tfp_cpu_to_le_32(type);
1061 req.size = tfp_cpu_to_le_16(size);
1062 req.index = tfp_cpu_to_le_32(index);
1064 tfp_memcpy(&req.data,
1068 MSG_PREP_NO_RESP(parms,
1071 HWRM_TFT_TBL_TYPE_SET,
1074 rc = tfp_send_msg_tunneled(tfp, &parms);
1078 return tfp_le_to_cpu_32(parms.tf_resp_code);
1082 tf_msg_get_tbl_entry(struct tf *tfp,
1084 enum tf_tbl_type type,
1090 struct tfp_send_msg_parms parms = { 0 };
1091 struct tf_tbl_type_get_input req = { 0 };
1092 struct tf_tbl_type_get_output resp = { 0 };
1093 struct tf_session *tfs = (struct tf_session *)(tfp->session->core_data);
1095 /* Populate the request */
1097 tfp_cpu_to_le_32(tfs->session_id.internal.fw_session_id);
1098 req.flags = tfp_cpu_to_le_16(dir);
1099 req.type = tfp_cpu_to_le_32(type);
1100 req.index = tfp_cpu_to_le_32(index);
1105 HWRM_TFT_TBL_TYPE_GET,
1109 rc = tfp_send_msg_tunneled(tfp, &parms);
1113 /* Verify that we got enough buffer to return the requested data */
1114 if (resp.size < size)
1121 return tfp_le_to_cpu_32(parms.tf_resp_code);
1124 #define TF_BYTES_PER_SLICE(tfp) 12
1125 #define NUM_SLICES(tfp, bytes) \
1126 (((bytes) + TF_BYTES_PER_SLICE(tfp) - 1) / TF_BYTES_PER_SLICE(tfp))
1129 tf_msg_get_dma_buf(struct tf_msg_dma_buf *buf, int size)
1131 struct tfp_calloc_parms alloc_parms;
1134 /* Allocate session */
1135 alloc_parms.nitems = 1;
1136 alloc_parms.size = size;
1137 alloc_parms.alignment = 0;
1138 rc = tfp_calloc(&alloc_parms);
1142 "Failed to allocate tcam dma entry, rc:%d\n",
1147 buf->pa_addr = (uintptr_t)alloc_parms.mem_pa;
1148 buf->va_addr = alloc_parms.mem_va;
1154 tf_msg_tcam_entry_set(struct tf *tfp,
1155 struct tf_set_tcam_entry_parms *parms)
1158 struct tfp_send_msg_parms mparms = { 0 };
1159 struct hwrm_tf_tcam_set_input req = { 0 };
1160 struct hwrm_tf_tcam_set_output resp = { 0 };
1161 uint16_t key_bytes =
1162 TF_BITS2BYTES_WORD_ALIGN(parms->key_sz_in_bits);
1163 uint16_t result_bytes =
1164 TF_BITS2BYTES_WORD_ALIGN(parms->result_sz_in_bits);
1165 struct tf_msg_dma_buf buf = { 0 };
1166 uint8_t *data = NULL;
1169 rc = tf_tcam_tbl_2_hwrm(parms->tcam_tbl_type, &req.type);
1173 req.idx = tfp_cpu_to_le_16(parms->idx);
1174 if (parms->dir == TF_DIR_TX)
1175 req.flags |= HWRM_TF_TCAM_SET_INPUT_FLAGS_DIR_TX;
1177 req.key_size = key_bytes;
1178 req.mask_offset = key_bytes;
1179 /* Result follows after key and mask, thus multiply by 2 */
1180 req.result_offset = 2 * key_bytes;
1181 req.result_size = result_bytes;
1182 data_size = 2 * req.key_size + req.result_size;
1184 if (data_size <= TF_PCI_BUF_SIZE_MAX) {
1185 /* use pci buffer */
1186 data = &req.dev_data[0];
1188 /* use dma buffer */
1189 req.flags |= HWRM_TF_TCAM_SET_INPUT_FLAGS_DMA;
1190 rc = tf_msg_get_dma_buf(&buf, data_size);
1194 memcpy(&req.dev_data[0], &buf.pa_addr, sizeof(buf.pa_addr));
1197 memcpy(&data[0], parms->key, key_bytes);
1198 memcpy(&data[key_bytes], parms->mask, key_bytes);
1199 memcpy(&data[req.result_offset], parms->result, result_bytes);
1201 mparms.tf_type = HWRM_TF_TCAM_SET;
1202 mparms.req_data = (uint32_t *)&req;
1203 mparms.req_size = sizeof(req);
1204 mparms.resp_data = (uint32_t *)&resp;
1205 mparms.resp_size = sizeof(resp);
1206 mparms.mailbox = TF_KONG_MB;
1208 rc = tfp_send_msg_direct(tfp,
1213 if (buf.va_addr != NULL)
1214 tfp_free(buf.va_addr);
1220 tf_msg_tcam_entry_free(struct tf *tfp,
1221 struct tf_free_tcam_entry_parms *in_parms)
1224 struct hwrm_tf_tcam_free_input req = { 0 };
1225 struct hwrm_tf_tcam_free_output resp = { 0 };
1226 struct tfp_send_msg_parms parms = { 0 };
1228 /* Populate the request */
1229 rc = tf_tcam_tbl_2_hwrm(in_parms->tcam_tbl_type, &req.type);
1234 req.idx_list[0] = tfp_cpu_to_le_16(in_parms->idx);
1235 if (in_parms->dir == TF_DIR_TX)
1236 req.flags |= HWRM_TF_TCAM_FREE_INPUT_FLAGS_DIR_TX;
1238 parms.tf_type = HWRM_TF_TCAM_FREE;
1239 parms.req_data = (uint32_t *)&req;
1240 parms.req_size = sizeof(req);
1241 parms.resp_data = (uint32_t *)&resp;
1242 parms.resp_size = sizeof(resp);
1243 parms.mailbox = TF_KONG_MB;
1245 rc = tfp_send_msg_direct(tfp,