1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2019-2020 Broadcom
12 #include "tf_session.h"
15 #include "tf_msg_common.h"
17 #include "hsi_struct_def_dpdk.h"
21 * Endian converts min and max values from the HW response to the query
23 #define TF_HW_RESP_TO_QUERY(query, index, response, element) do { \
24 (query)->hw_query[index].min = \
25 tfp_le_to_cpu_16(response. element ## _min); \
26 (query)->hw_query[index].max = \
27 tfp_le_to_cpu_16(response. element ## _max); \
31 * Endian converts the number of entries from the alloc to the request
33 #define TF_HW_ALLOC_TO_REQ(alloc, index, request, element) \
34 (request. num_ ## element = tfp_cpu_to_le_16((alloc)->hw_num[index]))
37 * Endian converts the start and stride value from the free to the request
39 #define TF_HW_FREE_TO_REQ(hw_entry, index, request, element) do { \
40 request.element ## _start = \
41 tfp_cpu_to_le_16(hw_entry[index].start); \
42 request.element ## _stride = \
43 tfp_cpu_to_le_16(hw_entry[index].stride); \
47 * Endian converts the start and stride from the HW response to the
50 #define TF_HW_RESP_TO_ALLOC(hw_entry, index, response, element) do { \
51 hw_entry[index].start = \
52 tfp_le_to_cpu_16(response.element ## _start); \
53 hw_entry[index].stride = \
54 tfp_le_to_cpu_16(response.element ## _stride); \
58 * Endian converts min and max values from the SRAM response to the
61 #define TF_SRAM_RESP_TO_QUERY(query, index, response, element) do { \
62 (query)->sram_query[index].min = \
63 tfp_le_to_cpu_16(response.element ## _min); \
64 (query)->sram_query[index].max = \
65 tfp_le_to_cpu_16(response.element ## _max); \
69 * Endian converts the number of entries from the action (alloc) to
72 #define TF_SRAM_ALLOC_TO_REQ(action, index, request, element) \
73 (request. num_ ## element = tfp_cpu_to_le_16((action)->sram_num[index]))
76 * Endian converts the start and stride value from the free to the request
78 #define TF_SRAM_FREE_TO_REQ(sram_entry, index, request, element) do { \
79 request.element ## _start = \
80 tfp_cpu_to_le_16(sram_entry[index].start); \
81 request.element ## _stride = \
82 tfp_cpu_to_le_16(sram_entry[index].stride); \
86 * Endian converts the start and stride from the HW response to the
89 #define TF_SRAM_RESP_TO_ALLOC(sram_entry, index, response, element) do { \
90 sram_entry[index].start = \
91 tfp_le_to_cpu_16(response.element ## _start); \
92 sram_entry[index].stride = \
93 tfp_le_to_cpu_16(response.element ## _stride); \
97 * This is the MAX data we can transport across regular HWRM
99 #define TF_PCI_BUF_SIZE_MAX 88
102 * If data bigger than TF_PCI_BUF_SIZE_MAX then use DMA method
104 struct tf_msg_dma_buf {
110 * Sends session open request to TF Firmware
113 tf_msg_session_open(struct tf *tfp,
114 char *ctrl_chan_name,
115 uint8_t *fw_session_id)
118 struct hwrm_tf_session_open_input req = { 0 };
119 struct hwrm_tf_session_open_output resp = { 0 };
120 struct tfp_send_msg_parms parms = { 0 };
122 /* Populate the request */
123 memcpy(&req.session_name, ctrl_chan_name, TF_SESSION_NAME_MAX);
125 parms.tf_type = HWRM_TF_SESSION_OPEN;
126 parms.req_data = (uint32_t *)&req;
127 parms.req_size = sizeof(req);
128 parms.resp_data = (uint32_t *)&resp;
129 parms.resp_size = sizeof(resp);
130 parms.mailbox = TF_KONG_MB;
132 rc = tfp_send_msg_direct(tfp,
137 *fw_session_id = resp.fw_session_id;
143 * Sends session attach request to TF Firmware
146 tf_msg_session_attach(struct tf *tfp __rte_unused,
147 char *ctrl_chan_name __rte_unused,
148 uint8_t tf_fw_session_id __rte_unused)
154 * Sends session close request to TF Firmware
157 tf_msg_session_close(struct tf *tfp)
160 struct hwrm_tf_session_close_input req = { 0 };
161 struct hwrm_tf_session_close_output resp = { 0 };
162 struct tf_session *tfs = (struct tf_session *)(tfp->session->core_data);
163 struct tfp_send_msg_parms parms = { 0 };
165 /* Populate the request */
167 tfp_cpu_to_le_32(tfs->session_id.internal.fw_session_id);
169 parms.tf_type = HWRM_TF_SESSION_CLOSE;
170 parms.req_data = (uint32_t *)&req;
171 parms.req_size = sizeof(req);
172 parms.resp_data = (uint32_t *)&resp;
173 parms.resp_size = sizeof(resp);
174 parms.mailbox = TF_KONG_MB;
176 rc = tfp_send_msg_direct(tfp,
182 * Sends session query config request to TF Firmware
185 tf_msg_session_qcfg(struct tf *tfp)
188 struct hwrm_tf_session_qcfg_input req = { 0 };
189 struct hwrm_tf_session_qcfg_output resp = { 0 };
190 struct tf_session *tfs = (struct tf_session *)(tfp->session->core_data);
191 struct tfp_send_msg_parms parms = { 0 };
193 /* Populate the request */
195 tfp_cpu_to_le_32(tfs->session_id.internal.fw_session_id);
197 parms.tf_type = HWRM_TF_SESSION_QCFG,
198 parms.req_data = (uint32_t *)&req;
199 parms.req_size = sizeof(req);
200 parms.resp_data = (uint32_t *)&resp;
201 parms.resp_size = sizeof(resp);
202 parms.mailbox = TF_KONG_MB;
204 rc = tfp_send_msg_direct(tfp,
210 * Sends session HW resource query capability request to TF Firmware
213 tf_msg_session_hw_resc_qcaps(struct tf *tfp,
215 struct tf_rm_hw_query *query)
218 struct tfp_send_msg_parms parms = { 0 };
219 struct tf_session_hw_resc_qcaps_input req = { 0 };
220 struct tf_session_hw_resc_qcaps_output resp = { 0 };
221 struct tf_session *tfs = (struct tf_session *)(tfp->session->core_data);
223 memset(query, 0, sizeof(*query));
225 /* Populate the request */
227 tfp_cpu_to_le_32(tfs->session_id.internal.fw_session_id);
228 req.flags = tfp_cpu_to_le_16(dir);
233 HWRM_TFT_SESSION_HW_RESC_QCAPS,
237 rc = tfp_send_msg_tunneled(tfp, &parms);
241 /* Process the response */
242 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_L2_CTXT_TCAM, resp,
243 l2_ctx_tcam_entries);
244 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_PROF_FUNC, resp,
246 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_PROF_TCAM, resp,
248 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_EM_PROF_ID, resp,
250 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_EM_REC, resp,
252 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_WC_TCAM_PROF_ID, resp,
254 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_WC_TCAM, resp,
256 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_METER_PROF, resp,
258 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_METER_INST,
260 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_MIRROR, resp,
262 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_UPAR, resp,
264 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_SP_TCAM, resp,
266 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_L2_FUNC, resp,
268 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_FKB, resp,
270 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_TBL_SCOPE, resp,
272 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_EPOCH0, resp,
274 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_EPOCH1, resp,
276 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_METADATA, resp,
278 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_CT_STATE, resp,
280 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_RANGE_PROF, resp,
282 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_RANGE_ENTRY, resp,
284 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_LAG_ENTRY, resp,
287 return tfp_le_to_cpu_32(parms.tf_resp_code);
291 * Sends session HW resource allocation request to TF Firmware
294 tf_msg_session_hw_resc_alloc(struct tf *tfp __rte_unused,
296 struct tf_rm_hw_alloc *hw_alloc __rte_unused,
297 struct tf_rm_entry *hw_entry __rte_unused)
300 struct tfp_send_msg_parms parms = { 0 };
301 struct tf_session_hw_resc_alloc_input req = { 0 };
302 struct tf_session_hw_resc_alloc_output resp = { 0 };
303 struct tf_session *tfs = (struct tf_session *)(tfp->session->core_data);
305 memset(hw_entry, 0, sizeof(*hw_entry));
307 /* Populate the request */
309 tfp_cpu_to_le_32(tfs->session_id.internal.fw_session_id);
310 req.flags = tfp_cpu_to_le_16(dir);
312 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_L2_CTXT_TCAM, req,
313 l2_ctx_tcam_entries);
314 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_PROF_FUNC, req,
316 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_PROF_TCAM, req,
318 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_EM_PROF_ID, req,
320 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_EM_REC, req,
322 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_WC_TCAM_PROF_ID, req,
324 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_WC_TCAM, req,
326 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_METER_PROF, req,
328 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_METER_INST, req,
330 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_MIRROR, req,
332 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_UPAR, req,
334 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_SP_TCAM, req,
336 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_L2_FUNC, req,
338 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_FKB, req,
340 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_TBL_SCOPE, req,
342 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_EPOCH0, req,
344 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_EPOCH1, req,
346 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_METADATA, req,
348 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_CT_STATE, req,
350 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_RANGE_PROF, req,
352 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_RANGE_ENTRY, req,
354 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_LAG_ENTRY, req,
360 HWRM_TFT_SESSION_HW_RESC_ALLOC,
364 rc = tfp_send_msg_tunneled(tfp, &parms);
368 /* Process the response */
369 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_L2_CTXT_TCAM, resp,
370 l2_ctx_tcam_entries);
371 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_PROF_FUNC, resp,
373 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_PROF_TCAM, resp,
375 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_EM_PROF_ID, resp,
377 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_EM_REC, resp,
379 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_WC_TCAM_PROF_ID, resp,
381 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_WC_TCAM, resp,
383 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_METER_PROF, resp,
385 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_METER_INST, resp,
387 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_MIRROR, resp,
389 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_UPAR, resp,
391 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_SP_TCAM, resp,
393 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_L2_FUNC, resp,
395 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_FKB, resp,
397 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_TBL_SCOPE, resp,
399 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_EPOCH0, resp,
401 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_EPOCH1, resp,
403 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_METADATA, resp,
405 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_CT_STATE, resp,
407 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_RANGE_PROF, resp,
409 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_RANGE_ENTRY, resp,
411 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_LAG_ENTRY, resp,
414 return tfp_le_to_cpu_32(parms.tf_resp_code);
418 * Sends session HW resource free request to TF Firmware
421 tf_msg_session_hw_resc_free(struct tf *tfp,
423 struct tf_rm_entry *hw_entry)
426 struct tfp_send_msg_parms parms = { 0 };
427 struct tf_session_hw_resc_free_input req = { 0 };
428 struct tf_session *tfs = (struct tf_session *)(tfp->session->core_data);
430 memset(hw_entry, 0, sizeof(*hw_entry));
432 /* Populate the request */
434 tfp_cpu_to_le_32(tfs->session_id.internal.fw_session_id);
435 req.flags = tfp_cpu_to_le_16(dir);
437 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_L2_CTXT_TCAM, req,
438 l2_ctx_tcam_entries);
439 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_PROF_FUNC, req,
441 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_PROF_TCAM, req,
443 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_EM_PROF_ID, req,
445 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_EM_REC, req,
447 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_WC_TCAM_PROF_ID, req,
449 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_WC_TCAM, req,
451 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_METER_PROF, req,
453 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_METER_INST, req,
455 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_MIRROR, req,
457 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_UPAR, req,
459 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_SP_TCAM, req,
461 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_L2_FUNC, req,
463 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_FKB, req,
465 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_TBL_SCOPE, req,
467 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_EPOCH0, req,
469 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_EPOCH1, req,
471 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_METADATA, req,
473 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_CT_STATE, req,
475 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_RANGE_PROF, req,
477 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_RANGE_ENTRY, req,
479 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_LAG_ENTRY, req,
482 MSG_PREP_NO_RESP(parms,
485 HWRM_TFT_SESSION_HW_RESC_FREE,
488 rc = tfp_send_msg_tunneled(tfp, &parms);
492 return tfp_le_to_cpu_32(parms.tf_resp_code);
496 * Sends session HW resource flush request to TF Firmware
499 tf_msg_session_hw_resc_flush(struct tf *tfp,
501 struct tf_rm_entry *hw_entry)
504 struct tfp_send_msg_parms parms = { 0 };
505 struct tf_session_hw_resc_free_input req = { 0 };
506 struct tf_session *tfs = (struct tf_session *)(tfp->session->core_data);
508 /* Populate the request */
510 tfp_cpu_to_le_32(tfs->session_id.internal.fw_session_id);
511 req.flags = tfp_cpu_to_le_16(dir);
513 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_L2_CTXT_TCAM, req,
514 l2_ctx_tcam_entries);
515 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_PROF_FUNC, req,
517 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_PROF_TCAM, req,
519 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_EM_PROF_ID, req,
521 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_EM_REC, req,
523 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_WC_TCAM_PROF_ID, req,
525 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_WC_TCAM, req,
527 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_METER_PROF, req,
529 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_METER_INST, req,
531 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_MIRROR, req,
533 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_UPAR, req,
535 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_SP_TCAM, req,
537 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_L2_FUNC, req,
539 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_FKB, req,
541 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_TBL_SCOPE, req,
543 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_EPOCH0, req,
545 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_EPOCH1, req,
547 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_METADATA, req,
549 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_CT_STATE, req,
551 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_RANGE_PROF, req,
553 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_RANGE_ENTRY, req,
555 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_LAG_ENTRY, req,
558 MSG_PREP_NO_RESP(parms,
561 HWRM_TFT_SESSION_HW_RESC_FLUSH,
564 rc = tfp_send_msg_tunneled(tfp, &parms);
568 return tfp_le_to_cpu_32(parms.tf_resp_code);
572 * Sends session SRAM resource query capability request to TF Firmware
575 tf_msg_session_sram_resc_qcaps(struct tf *tfp __rte_unused,
577 struct tf_rm_sram_query *query __rte_unused)
580 struct tfp_send_msg_parms parms = { 0 };
581 struct tf_session_sram_resc_qcaps_input req = { 0 };
582 struct tf_session_sram_resc_qcaps_output resp = { 0 };
583 struct tf_session *tfs = (struct tf_session *)(tfp->session->core_data);
585 /* Populate the request */
587 tfp_cpu_to_le_32(tfs->session_id.internal.fw_session_id);
588 req.flags = tfp_cpu_to_le_16(dir);
593 HWRM_TFT_SESSION_SRAM_RESC_QCAPS,
597 rc = tfp_send_msg_tunneled(tfp, &parms);
601 /* Process the response */
602 TF_SRAM_RESP_TO_QUERY(query, TF_RESC_TYPE_SRAM_FULL_ACTION, resp,
604 TF_SRAM_RESP_TO_QUERY(query, TF_RESC_TYPE_SRAM_MCG, resp,
606 TF_SRAM_RESP_TO_QUERY(query, TF_RESC_TYPE_SRAM_ENCAP_8B, resp,
608 TF_SRAM_RESP_TO_QUERY(query, TF_RESC_TYPE_SRAM_ENCAP_16B, resp,
610 TF_SRAM_RESP_TO_QUERY(query, TF_RESC_TYPE_SRAM_ENCAP_64B, resp,
612 TF_SRAM_RESP_TO_QUERY(query, TF_RESC_TYPE_SRAM_SP_SMAC, resp,
614 TF_SRAM_RESP_TO_QUERY(query, TF_RESC_TYPE_SRAM_SP_SMAC_IPV4, resp,
616 TF_SRAM_RESP_TO_QUERY(query, TF_RESC_TYPE_SRAM_SP_SMAC_IPV6, resp,
618 TF_SRAM_RESP_TO_QUERY(query, TF_RESC_TYPE_SRAM_COUNTER_64B, resp,
620 TF_SRAM_RESP_TO_QUERY(query, TF_RESC_TYPE_SRAM_NAT_SPORT, resp,
622 TF_SRAM_RESP_TO_QUERY(query, TF_RESC_TYPE_SRAM_NAT_DPORT, resp,
624 TF_SRAM_RESP_TO_QUERY(query, TF_RESC_TYPE_SRAM_NAT_S_IPV4, resp,
626 TF_SRAM_RESP_TO_QUERY(query, TF_RESC_TYPE_SRAM_NAT_D_IPV4, resp,
629 return tfp_le_to_cpu_32(parms.tf_resp_code);
633 * Sends session SRAM resource allocation request to TF Firmware
636 tf_msg_session_sram_resc_alloc(struct tf *tfp __rte_unused,
638 struct tf_rm_sram_alloc *sram_alloc __rte_unused,
639 struct tf_rm_entry *sram_entry __rte_unused)
642 struct tfp_send_msg_parms parms = { 0 };
643 struct tf_session_sram_resc_alloc_input req = { 0 };
644 struct tf_session_sram_resc_alloc_output resp;
645 struct tf_session *tfs = (struct tf_session *)(tfp->session->core_data);
647 memset(&resp, 0, sizeof(resp));
649 /* Populate the request */
651 tfp_cpu_to_le_32(tfs->session_id.internal.fw_session_id);
652 req.flags = tfp_cpu_to_le_16(dir);
654 TF_SRAM_ALLOC_TO_REQ(sram_alloc, TF_RESC_TYPE_SRAM_FULL_ACTION, req,
656 TF_SRAM_ALLOC_TO_REQ(sram_alloc, TF_RESC_TYPE_SRAM_MCG, req,
658 TF_SRAM_ALLOC_TO_REQ(sram_alloc, TF_RESC_TYPE_SRAM_ENCAP_8B, req,
660 TF_SRAM_ALLOC_TO_REQ(sram_alloc, TF_RESC_TYPE_SRAM_ENCAP_16B, req,
662 TF_SRAM_ALLOC_TO_REQ(sram_alloc, TF_RESC_TYPE_SRAM_ENCAP_64B, req,
664 TF_SRAM_ALLOC_TO_REQ(sram_alloc, TF_RESC_TYPE_SRAM_SP_SMAC, req,
666 TF_SRAM_ALLOC_TO_REQ(sram_alloc, TF_RESC_TYPE_SRAM_SP_SMAC_IPV4,
668 TF_SRAM_ALLOC_TO_REQ(sram_alloc, TF_RESC_TYPE_SRAM_SP_SMAC_IPV6,
670 TF_SRAM_ALLOC_TO_REQ(sram_alloc, TF_RESC_TYPE_SRAM_COUNTER_64B,
672 TF_SRAM_ALLOC_TO_REQ(sram_alloc, TF_RESC_TYPE_SRAM_NAT_SPORT, req,
674 TF_SRAM_ALLOC_TO_REQ(sram_alloc, TF_RESC_TYPE_SRAM_NAT_DPORT, req,
676 TF_SRAM_ALLOC_TO_REQ(sram_alloc, TF_RESC_TYPE_SRAM_NAT_S_IPV4, req,
678 TF_SRAM_ALLOC_TO_REQ(sram_alloc, TF_RESC_TYPE_SRAM_NAT_D_IPV4, req,
684 HWRM_TFT_SESSION_SRAM_RESC_ALLOC,
688 rc = tfp_send_msg_tunneled(tfp, &parms);
692 /* Process the response */
693 TF_SRAM_RESP_TO_ALLOC(sram_entry, TF_RESC_TYPE_SRAM_FULL_ACTION,
695 TF_SRAM_RESP_TO_ALLOC(sram_entry, TF_RESC_TYPE_SRAM_MCG, resp,
697 TF_SRAM_RESP_TO_ALLOC(sram_entry, TF_RESC_TYPE_SRAM_ENCAP_8B, resp,
699 TF_SRAM_RESP_TO_ALLOC(sram_entry, TF_RESC_TYPE_SRAM_ENCAP_16B, resp,
701 TF_SRAM_RESP_TO_ALLOC(sram_entry, TF_RESC_TYPE_SRAM_ENCAP_64B, resp,
703 TF_SRAM_RESP_TO_ALLOC(sram_entry, TF_RESC_TYPE_SRAM_SP_SMAC, resp,
705 TF_SRAM_RESP_TO_ALLOC(sram_entry, TF_RESC_TYPE_SRAM_SP_SMAC_IPV4,
707 TF_SRAM_RESP_TO_ALLOC(sram_entry, TF_RESC_TYPE_SRAM_SP_SMAC_IPV6,
709 TF_SRAM_RESP_TO_ALLOC(sram_entry, TF_RESC_TYPE_SRAM_COUNTER_64B, resp,
711 TF_SRAM_RESP_TO_ALLOC(sram_entry, TF_RESC_TYPE_SRAM_NAT_SPORT, resp,
713 TF_SRAM_RESP_TO_ALLOC(sram_entry, TF_RESC_TYPE_SRAM_NAT_DPORT, resp,
715 TF_SRAM_RESP_TO_ALLOC(sram_entry, TF_RESC_TYPE_SRAM_NAT_S_IPV4, resp,
717 TF_SRAM_RESP_TO_ALLOC(sram_entry, TF_RESC_TYPE_SRAM_NAT_D_IPV4, resp,
720 return tfp_le_to_cpu_32(parms.tf_resp_code);
724 * Sends session SRAM resource free request to TF Firmware
727 tf_msg_session_sram_resc_free(struct tf *tfp __rte_unused,
729 struct tf_rm_entry *sram_entry __rte_unused)
732 struct tfp_send_msg_parms parms = { 0 };
733 struct tf_session_sram_resc_free_input req = { 0 };
734 struct tf_session *tfs = (struct tf_session *)(tfp->session->core_data);
736 /* Populate the request */
738 tfp_cpu_to_le_32(tfs->session_id.internal.fw_session_id);
739 req.flags = tfp_cpu_to_le_16(dir);
741 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_FULL_ACTION, req,
743 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_MCG, req,
745 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_ENCAP_8B, req,
747 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_ENCAP_16B, req,
749 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_ENCAP_64B, req,
751 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_SP_SMAC, req,
753 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_SP_SMAC_IPV4, req,
755 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_SP_SMAC_IPV6, req,
757 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_COUNTER_64B, req,
759 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_NAT_SPORT, req,
761 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_NAT_DPORT, req,
763 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_NAT_S_IPV4, req,
765 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_NAT_D_IPV4, req,
768 MSG_PREP_NO_RESP(parms,
771 HWRM_TFT_SESSION_SRAM_RESC_FREE,
774 rc = tfp_send_msg_tunneled(tfp, &parms);
778 return tfp_le_to_cpu_32(parms.tf_resp_code);
782 * Sends session SRAM resource flush request to TF Firmware
785 tf_msg_session_sram_resc_flush(struct tf *tfp,
787 struct tf_rm_entry *sram_entry)
790 struct tfp_send_msg_parms parms = { 0 };
791 struct tf_session_sram_resc_free_input req = { 0 };
792 struct tf_session *tfs = (struct tf_session *)(tfp->session->core_data);
794 /* Populate the request */
796 tfp_cpu_to_le_32(tfs->session_id.internal.fw_session_id);
797 req.flags = tfp_cpu_to_le_16(dir);
799 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_FULL_ACTION, req,
801 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_MCG, req,
803 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_ENCAP_8B, req,
805 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_ENCAP_16B, req,
807 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_ENCAP_64B, req,
809 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_SP_SMAC, req,
811 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_SP_SMAC_IPV4, req,
813 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_SP_SMAC_IPV6, req,
815 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_COUNTER_64B, req,
817 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_NAT_SPORT, req,
819 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_NAT_DPORT, req,
821 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_NAT_S_IPV4, req,
823 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_NAT_D_IPV4, req,
826 MSG_PREP_NO_RESP(parms,
829 HWRM_TFT_SESSION_SRAM_RESC_FLUSH,
832 rc = tfp_send_msg_tunneled(tfp, &parms);
836 return tfp_le_to_cpu_32(parms.tf_resp_code);