1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2019-2020 Broadcom
12 #include "tf_session.h"
15 #include "tf_msg_common.h"
17 #include "hsi_struct_def_dpdk.h"
21 * Endian converts min and max values from the HW response to the query
23 #define TF_HW_RESP_TO_QUERY(query, index, response, element) do { \
24 (query)->hw_query[index].min = \
25 tfp_le_to_cpu_16(response. element ## _min); \
26 (query)->hw_query[index].max = \
27 tfp_le_to_cpu_16(response. element ## _max); \
31 * Endian converts the number of entries from the alloc to the request
33 #define TF_HW_ALLOC_TO_REQ(alloc, index, request, element) \
34 (request. num_ ## element = tfp_cpu_to_le_16((alloc)->hw_num[index]))
37 * Endian converts the start and stride value from the free to the request
39 #define TF_HW_FREE_TO_REQ(hw_entry, index, request, element) do { \
40 request.element ## _start = \
41 tfp_cpu_to_le_16(hw_entry[index].start); \
42 request.element ## _stride = \
43 tfp_cpu_to_le_16(hw_entry[index].stride); \
47 * Endian converts the start and stride from the HW response to the
50 #define TF_HW_RESP_TO_ALLOC(hw_entry, index, response, element) do { \
51 hw_entry[index].start = \
52 tfp_le_to_cpu_16(response.element ## _start); \
53 hw_entry[index].stride = \
54 tfp_le_to_cpu_16(response.element ## _stride); \
58 * Endian converts min and max values from the SRAM response to the
61 #define TF_SRAM_RESP_TO_QUERY(query, index, response, element) do { \
62 (query)->sram_query[index].min = \
63 tfp_le_to_cpu_16(response.element ## _min); \
64 (query)->sram_query[index].max = \
65 tfp_le_to_cpu_16(response.element ## _max); \
69 * Endian converts the number of entries from the action (alloc) to
72 #define TF_SRAM_ALLOC_TO_REQ(action, index, request, element) \
73 (request. num_ ## element = tfp_cpu_to_le_16((action)->sram_num[index]))
76 * Endian converts the start and stride value from the free to the request
78 #define TF_SRAM_FREE_TO_REQ(sram_entry, index, request, element) do { \
79 request.element ## _start = \
80 tfp_cpu_to_le_16(sram_entry[index].start); \
81 request.element ## _stride = \
82 tfp_cpu_to_le_16(sram_entry[index].stride); \
86 * Endian converts the start and stride from the HW response to the
89 #define TF_SRAM_RESP_TO_ALLOC(sram_entry, index, response, element) do { \
90 sram_entry[index].start = \
91 tfp_le_to_cpu_16(response.element ## _start); \
92 sram_entry[index].stride = \
93 tfp_le_to_cpu_16(response.element ## _stride); \
97 * Sends session open request to TF Firmware
100 tf_msg_session_open(struct tf *tfp,
101 char *ctrl_chan_name,
102 uint8_t *fw_session_id)
105 struct hwrm_tf_session_open_input req = { 0 };
106 struct hwrm_tf_session_open_output resp = { 0 };
107 struct tfp_send_msg_parms parms = { 0 };
109 /* Populate the request */
110 memcpy(&req.session_name, ctrl_chan_name, TF_SESSION_NAME_MAX);
112 parms.tf_type = HWRM_TF_SESSION_OPEN;
113 parms.req_data = (uint32_t *)&req;
114 parms.req_size = sizeof(req);
115 parms.resp_data = (uint32_t *)&resp;
116 parms.resp_size = sizeof(resp);
117 parms.mailbox = TF_KONG_MB;
119 rc = tfp_send_msg_direct(tfp,
124 *fw_session_id = resp.fw_session_id;
130 * Sends session attach request to TF Firmware
133 tf_msg_session_attach(struct tf *tfp __rte_unused,
134 char *ctrl_chan_name __rte_unused,
135 uint8_t tf_fw_session_id __rte_unused)
141 * Sends session close request to TF Firmware
144 tf_msg_session_close(struct tf *tfp)
147 struct hwrm_tf_session_close_input req = { 0 };
148 struct hwrm_tf_session_close_output resp = { 0 };
149 struct tf_session *tfs = (struct tf_session *)(tfp->session->core_data);
150 struct tfp_send_msg_parms parms = { 0 };
152 /* Populate the request */
154 tfp_cpu_to_le_32(tfs->session_id.internal.fw_session_id);
156 parms.tf_type = HWRM_TF_SESSION_CLOSE;
157 parms.req_data = (uint32_t *)&req;
158 parms.req_size = sizeof(req);
159 parms.resp_data = (uint32_t *)&resp;
160 parms.resp_size = sizeof(resp);
161 parms.mailbox = TF_KONG_MB;
163 rc = tfp_send_msg_direct(tfp,
169 * Sends session query config request to TF Firmware
172 tf_msg_session_qcfg(struct tf *tfp)
175 struct hwrm_tf_session_qcfg_input req = { 0 };
176 struct hwrm_tf_session_qcfg_output resp = { 0 };
177 struct tf_session *tfs = (struct tf_session *)(tfp->session->core_data);
178 struct tfp_send_msg_parms parms = { 0 };
180 /* Populate the request */
182 tfp_cpu_to_le_32(tfs->session_id.internal.fw_session_id);
184 parms.tf_type = HWRM_TF_SESSION_QCFG,
185 parms.req_data = (uint32_t *)&req;
186 parms.req_size = sizeof(req);
187 parms.resp_data = (uint32_t *)&resp;
188 parms.resp_size = sizeof(resp);
189 parms.mailbox = TF_KONG_MB;
191 rc = tfp_send_msg_direct(tfp,
197 * Sends session HW resource query capability request to TF Firmware
200 tf_msg_session_hw_resc_qcaps(struct tf *tfp,
202 struct tf_rm_hw_query *query)
205 struct tfp_send_msg_parms parms = { 0 };
206 struct tf_session_hw_resc_qcaps_input req = { 0 };
207 struct tf_session_hw_resc_qcaps_output resp = { 0 };
208 struct tf_session *tfs = (struct tf_session *)(tfp->session->core_data);
210 memset(query, 0, sizeof(*query));
212 /* Populate the request */
214 tfp_cpu_to_le_32(tfs->session_id.internal.fw_session_id);
215 req.flags = tfp_cpu_to_le_16(dir);
220 HWRM_TFT_SESSION_HW_RESC_QCAPS,
224 rc = tfp_send_msg_tunneled(tfp, &parms);
228 /* Process the response */
229 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_L2_CTXT_TCAM, resp,
230 l2_ctx_tcam_entries);
231 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_PROF_FUNC, resp,
233 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_PROF_TCAM, resp,
235 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_EM_PROF_ID, resp,
237 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_EM_REC, resp,
239 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_WC_TCAM_PROF_ID, resp,
241 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_WC_TCAM, resp,
243 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_METER_PROF, resp,
245 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_METER_INST,
247 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_MIRROR, resp,
249 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_UPAR, resp,
251 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_SP_TCAM, resp,
253 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_L2_FUNC, resp,
255 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_FKB, resp,
257 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_TBL_SCOPE, resp,
259 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_EPOCH0, resp,
261 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_EPOCH1, resp,
263 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_METADATA, resp,
265 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_CT_STATE, resp,
267 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_RANGE_PROF, resp,
269 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_RANGE_ENTRY, resp,
271 TF_HW_RESP_TO_QUERY(query, TF_RESC_TYPE_HW_LAG_ENTRY, resp,
274 return tfp_le_to_cpu_32(parms.tf_resp_code);
278 * Sends session HW resource allocation request to TF Firmware
281 tf_msg_session_hw_resc_alloc(struct tf *tfp __rte_unused,
283 struct tf_rm_hw_alloc *hw_alloc __rte_unused,
284 struct tf_rm_entry *hw_entry __rte_unused)
287 struct tfp_send_msg_parms parms = { 0 };
288 struct tf_session_hw_resc_alloc_input req = { 0 };
289 struct tf_session_hw_resc_alloc_output resp = { 0 };
290 struct tf_session *tfs = (struct tf_session *)(tfp->session->core_data);
292 memset(hw_entry, 0, sizeof(*hw_entry));
294 /* Populate the request */
296 tfp_cpu_to_le_32(tfs->session_id.internal.fw_session_id);
297 req.flags = tfp_cpu_to_le_16(dir);
299 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_L2_CTXT_TCAM, req,
300 l2_ctx_tcam_entries);
301 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_PROF_FUNC, req,
303 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_PROF_TCAM, req,
305 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_EM_PROF_ID, req,
307 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_EM_REC, req,
309 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_WC_TCAM_PROF_ID, req,
311 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_WC_TCAM, req,
313 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_METER_PROF, req,
315 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_METER_INST, req,
317 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_MIRROR, req,
319 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_UPAR, req,
321 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_SP_TCAM, req,
323 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_L2_FUNC, req,
325 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_FKB, req,
327 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_TBL_SCOPE, req,
329 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_EPOCH0, req,
331 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_EPOCH1, req,
333 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_METADATA, req,
335 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_CT_STATE, req,
337 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_RANGE_PROF, req,
339 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_RANGE_ENTRY, req,
341 TF_HW_ALLOC_TO_REQ(hw_alloc, TF_RESC_TYPE_HW_LAG_ENTRY, req,
347 HWRM_TFT_SESSION_HW_RESC_ALLOC,
351 rc = tfp_send_msg_tunneled(tfp, &parms);
355 /* Process the response */
356 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_L2_CTXT_TCAM, resp,
357 l2_ctx_tcam_entries);
358 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_PROF_FUNC, resp,
360 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_PROF_TCAM, resp,
362 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_EM_PROF_ID, resp,
364 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_EM_REC, resp,
366 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_WC_TCAM_PROF_ID, resp,
368 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_WC_TCAM, resp,
370 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_METER_PROF, resp,
372 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_METER_INST, resp,
374 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_MIRROR, resp,
376 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_UPAR, resp,
378 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_SP_TCAM, resp,
380 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_L2_FUNC, resp,
382 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_FKB, resp,
384 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_TBL_SCOPE, resp,
386 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_EPOCH0, resp,
388 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_EPOCH1, resp,
390 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_METADATA, resp,
392 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_CT_STATE, resp,
394 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_RANGE_PROF, resp,
396 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_RANGE_ENTRY, resp,
398 TF_HW_RESP_TO_ALLOC(hw_entry, TF_RESC_TYPE_HW_LAG_ENTRY, resp,
401 return tfp_le_to_cpu_32(parms.tf_resp_code);
405 * Sends session HW resource free request to TF Firmware
408 tf_msg_session_hw_resc_free(struct tf *tfp,
410 struct tf_rm_entry *hw_entry)
413 struct tfp_send_msg_parms parms = { 0 };
414 struct tf_session_hw_resc_free_input req = { 0 };
415 struct tf_session *tfs = (struct tf_session *)(tfp->session->core_data);
417 memset(hw_entry, 0, sizeof(*hw_entry));
419 /* Populate the request */
421 tfp_cpu_to_le_32(tfs->session_id.internal.fw_session_id);
422 req.flags = tfp_cpu_to_le_16(dir);
424 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_L2_CTXT_TCAM, req,
425 l2_ctx_tcam_entries);
426 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_PROF_FUNC, req,
428 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_PROF_TCAM, req,
430 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_EM_PROF_ID, req,
432 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_EM_REC, req,
434 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_WC_TCAM_PROF_ID, req,
436 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_WC_TCAM, req,
438 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_METER_PROF, req,
440 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_METER_INST, req,
442 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_MIRROR, req,
444 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_UPAR, req,
446 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_SP_TCAM, req,
448 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_L2_FUNC, req,
450 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_FKB, req,
452 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_TBL_SCOPE, req,
454 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_EPOCH0, req,
456 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_EPOCH1, req,
458 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_METADATA, req,
460 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_CT_STATE, req,
462 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_RANGE_PROF, req,
464 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_RANGE_ENTRY, req,
466 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_LAG_ENTRY, req,
469 MSG_PREP_NO_RESP(parms,
472 HWRM_TFT_SESSION_HW_RESC_FREE,
475 rc = tfp_send_msg_tunneled(tfp, &parms);
479 return tfp_le_to_cpu_32(parms.tf_resp_code);
483 * Sends session HW resource flush request to TF Firmware
486 tf_msg_session_hw_resc_flush(struct tf *tfp,
488 struct tf_rm_entry *hw_entry)
491 struct tfp_send_msg_parms parms = { 0 };
492 struct tf_session_hw_resc_free_input req = { 0 };
493 struct tf_session *tfs = (struct tf_session *)(tfp->session->core_data);
495 /* Populate the request */
497 tfp_cpu_to_le_32(tfs->session_id.internal.fw_session_id);
498 req.flags = tfp_cpu_to_le_16(dir);
500 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_L2_CTXT_TCAM, req,
501 l2_ctx_tcam_entries);
502 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_PROF_FUNC, req,
504 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_PROF_TCAM, req,
506 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_EM_PROF_ID, req,
508 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_EM_REC, req,
510 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_WC_TCAM_PROF_ID, req,
512 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_WC_TCAM, req,
514 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_METER_PROF, req,
516 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_METER_INST, req,
518 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_MIRROR, req,
520 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_UPAR, req,
522 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_SP_TCAM, req,
524 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_L2_FUNC, req,
526 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_FKB, req,
528 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_TBL_SCOPE, req,
530 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_EPOCH0, req,
532 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_EPOCH1, req,
534 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_METADATA, req,
536 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_CT_STATE, req,
538 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_RANGE_PROF, req,
540 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_RANGE_ENTRY, req,
542 TF_HW_FREE_TO_REQ(hw_entry, TF_RESC_TYPE_HW_LAG_ENTRY, req,
545 MSG_PREP_NO_RESP(parms,
548 HWRM_TFT_SESSION_HW_RESC_FLUSH,
551 rc = tfp_send_msg_tunneled(tfp, &parms);
555 return tfp_le_to_cpu_32(parms.tf_resp_code);
559 * Sends session SRAM resource query capability request to TF Firmware
562 tf_msg_session_sram_resc_qcaps(struct tf *tfp __rte_unused,
564 struct tf_rm_sram_query *query __rte_unused)
567 struct tfp_send_msg_parms parms = { 0 };
568 struct tf_session_sram_resc_qcaps_input req = { 0 };
569 struct tf_session_sram_resc_qcaps_output resp = { 0 };
570 struct tf_session *tfs = (struct tf_session *)(tfp->session->core_data);
572 /* Populate the request */
574 tfp_cpu_to_le_32(tfs->session_id.internal.fw_session_id);
575 req.flags = tfp_cpu_to_le_16(dir);
580 HWRM_TFT_SESSION_SRAM_RESC_QCAPS,
584 rc = tfp_send_msg_tunneled(tfp, &parms);
588 /* Process the response */
589 TF_SRAM_RESP_TO_QUERY(query, TF_RESC_TYPE_SRAM_FULL_ACTION, resp,
591 TF_SRAM_RESP_TO_QUERY(query, TF_RESC_TYPE_SRAM_MCG, resp,
593 TF_SRAM_RESP_TO_QUERY(query, TF_RESC_TYPE_SRAM_ENCAP_8B, resp,
595 TF_SRAM_RESP_TO_QUERY(query, TF_RESC_TYPE_SRAM_ENCAP_16B, resp,
597 TF_SRAM_RESP_TO_QUERY(query, TF_RESC_TYPE_SRAM_ENCAP_64B, resp,
599 TF_SRAM_RESP_TO_QUERY(query, TF_RESC_TYPE_SRAM_SP_SMAC, resp,
601 TF_SRAM_RESP_TO_QUERY(query, TF_RESC_TYPE_SRAM_SP_SMAC_IPV4, resp,
603 TF_SRAM_RESP_TO_QUERY(query, TF_RESC_TYPE_SRAM_SP_SMAC_IPV6, resp,
605 TF_SRAM_RESP_TO_QUERY(query, TF_RESC_TYPE_SRAM_COUNTER_64B, resp,
607 TF_SRAM_RESP_TO_QUERY(query, TF_RESC_TYPE_SRAM_NAT_SPORT, resp,
609 TF_SRAM_RESP_TO_QUERY(query, TF_RESC_TYPE_SRAM_NAT_DPORT, resp,
611 TF_SRAM_RESP_TO_QUERY(query, TF_RESC_TYPE_SRAM_NAT_S_IPV4, resp,
613 TF_SRAM_RESP_TO_QUERY(query, TF_RESC_TYPE_SRAM_NAT_D_IPV4, resp,
616 return tfp_le_to_cpu_32(parms.tf_resp_code);
620 * Sends session SRAM resource allocation request to TF Firmware
623 tf_msg_session_sram_resc_alloc(struct tf *tfp __rte_unused,
625 struct tf_rm_sram_alloc *sram_alloc __rte_unused,
626 struct tf_rm_entry *sram_entry __rte_unused)
629 struct tfp_send_msg_parms parms = { 0 };
630 struct tf_session_sram_resc_alloc_input req = { 0 };
631 struct tf_session_sram_resc_alloc_output resp;
632 struct tf_session *tfs = (struct tf_session *)(tfp->session->core_data);
634 memset(&resp, 0, sizeof(resp));
636 /* Populate the request */
638 tfp_cpu_to_le_32(tfs->session_id.internal.fw_session_id);
639 req.flags = tfp_cpu_to_le_16(dir);
641 TF_SRAM_ALLOC_TO_REQ(sram_alloc, TF_RESC_TYPE_SRAM_FULL_ACTION, req,
643 TF_SRAM_ALLOC_TO_REQ(sram_alloc, TF_RESC_TYPE_SRAM_MCG, req,
645 TF_SRAM_ALLOC_TO_REQ(sram_alloc, TF_RESC_TYPE_SRAM_ENCAP_8B, req,
647 TF_SRAM_ALLOC_TO_REQ(sram_alloc, TF_RESC_TYPE_SRAM_ENCAP_16B, req,
649 TF_SRAM_ALLOC_TO_REQ(sram_alloc, TF_RESC_TYPE_SRAM_ENCAP_64B, req,
651 TF_SRAM_ALLOC_TO_REQ(sram_alloc, TF_RESC_TYPE_SRAM_SP_SMAC, req,
653 TF_SRAM_ALLOC_TO_REQ(sram_alloc, TF_RESC_TYPE_SRAM_SP_SMAC_IPV4,
655 TF_SRAM_ALLOC_TO_REQ(sram_alloc, TF_RESC_TYPE_SRAM_SP_SMAC_IPV6,
657 TF_SRAM_ALLOC_TO_REQ(sram_alloc, TF_RESC_TYPE_SRAM_COUNTER_64B,
659 TF_SRAM_ALLOC_TO_REQ(sram_alloc, TF_RESC_TYPE_SRAM_NAT_SPORT, req,
661 TF_SRAM_ALLOC_TO_REQ(sram_alloc, TF_RESC_TYPE_SRAM_NAT_DPORT, req,
663 TF_SRAM_ALLOC_TO_REQ(sram_alloc, TF_RESC_TYPE_SRAM_NAT_S_IPV4, req,
665 TF_SRAM_ALLOC_TO_REQ(sram_alloc, TF_RESC_TYPE_SRAM_NAT_D_IPV4, req,
671 HWRM_TFT_SESSION_SRAM_RESC_ALLOC,
675 rc = tfp_send_msg_tunneled(tfp, &parms);
679 /* Process the response */
680 TF_SRAM_RESP_TO_ALLOC(sram_entry, TF_RESC_TYPE_SRAM_FULL_ACTION,
682 TF_SRAM_RESP_TO_ALLOC(sram_entry, TF_RESC_TYPE_SRAM_MCG, resp,
684 TF_SRAM_RESP_TO_ALLOC(sram_entry, TF_RESC_TYPE_SRAM_ENCAP_8B, resp,
686 TF_SRAM_RESP_TO_ALLOC(sram_entry, TF_RESC_TYPE_SRAM_ENCAP_16B, resp,
688 TF_SRAM_RESP_TO_ALLOC(sram_entry, TF_RESC_TYPE_SRAM_ENCAP_64B, resp,
690 TF_SRAM_RESP_TO_ALLOC(sram_entry, TF_RESC_TYPE_SRAM_SP_SMAC, resp,
692 TF_SRAM_RESP_TO_ALLOC(sram_entry, TF_RESC_TYPE_SRAM_SP_SMAC_IPV4,
694 TF_SRAM_RESP_TO_ALLOC(sram_entry, TF_RESC_TYPE_SRAM_SP_SMAC_IPV6,
696 TF_SRAM_RESP_TO_ALLOC(sram_entry, TF_RESC_TYPE_SRAM_COUNTER_64B, resp,
698 TF_SRAM_RESP_TO_ALLOC(sram_entry, TF_RESC_TYPE_SRAM_NAT_SPORT, resp,
700 TF_SRAM_RESP_TO_ALLOC(sram_entry, TF_RESC_TYPE_SRAM_NAT_DPORT, resp,
702 TF_SRAM_RESP_TO_ALLOC(sram_entry, TF_RESC_TYPE_SRAM_NAT_S_IPV4, resp,
704 TF_SRAM_RESP_TO_ALLOC(sram_entry, TF_RESC_TYPE_SRAM_NAT_D_IPV4, resp,
707 return tfp_le_to_cpu_32(parms.tf_resp_code);
711 * Sends session SRAM resource free request to TF Firmware
714 tf_msg_session_sram_resc_free(struct tf *tfp __rte_unused,
716 struct tf_rm_entry *sram_entry __rte_unused)
719 struct tfp_send_msg_parms parms = { 0 };
720 struct tf_session_sram_resc_free_input req = { 0 };
721 struct tf_session *tfs = (struct tf_session *)(tfp->session->core_data);
723 /* Populate the request */
725 tfp_cpu_to_le_32(tfs->session_id.internal.fw_session_id);
726 req.flags = tfp_cpu_to_le_16(dir);
728 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_FULL_ACTION, req,
730 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_MCG, req,
732 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_ENCAP_8B, req,
734 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_ENCAP_16B, req,
736 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_ENCAP_64B, req,
738 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_SP_SMAC, req,
740 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_SP_SMAC_IPV4, req,
742 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_SP_SMAC_IPV6, req,
744 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_COUNTER_64B, req,
746 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_NAT_SPORT, req,
748 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_NAT_DPORT, req,
750 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_NAT_S_IPV4, req,
752 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_NAT_D_IPV4, req,
755 MSG_PREP_NO_RESP(parms,
758 HWRM_TFT_SESSION_SRAM_RESC_FREE,
761 rc = tfp_send_msg_tunneled(tfp, &parms);
765 return tfp_le_to_cpu_32(parms.tf_resp_code);
769 * Sends session SRAM resource flush request to TF Firmware
772 tf_msg_session_sram_resc_flush(struct tf *tfp,
774 struct tf_rm_entry *sram_entry)
777 struct tfp_send_msg_parms parms = { 0 };
778 struct tf_session_sram_resc_free_input req = { 0 };
779 struct tf_session *tfs = (struct tf_session *)(tfp->session->core_data);
781 /* Populate the request */
783 tfp_cpu_to_le_32(tfs->session_id.internal.fw_session_id);
784 req.flags = tfp_cpu_to_le_16(dir);
786 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_FULL_ACTION, req,
788 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_MCG, req,
790 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_ENCAP_8B, req,
792 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_ENCAP_16B, req,
794 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_ENCAP_64B, req,
796 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_SP_SMAC, req,
798 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_SP_SMAC_IPV4, req,
800 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_SP_SMAC_IPV6, req,
802 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_COUNTER_64B, req,
804 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_NAT_SPORT, req,
806 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_NAT_DPORT, req,
808 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_NAT_S_IPV4, req,
810 TF_SRAM_FREE_TO_REQ(sram_entry, TF_RESC_TYPE_SRAM_NAT_D_IPV4, req,
813 MSG_PREP_NO_RESP(parms,
816 HWRM_TFT_SESSION_SRAM_RESC_FLUSH,
819 rc = tfp_send_msg_tunneled(tfp, &parms);
823 return tfp_le_to_cpu_32(parms.tf_resp_code);