int
eal_trace_init(void)
{
+ uint8_t i;
+
/* Trace memory should start with 8B aligned for natural alignment */
RTE_BUILD_BUG_ON((offsetof(struct __rte_trace_header, mem) % 8) != 0);
goto fail;
}
+ if (trace.args.nb_args)
+ trace.status = true;
+
if (!rte_trace_is_enabled())
return 0;
*/
trace_uuid_generate();
+ /* Apply buffer size configuration for trace output */
+ trace_bufsz_args_apply();
+
/* Generate CTF TDSL metadata */
if (trace_metadata_create() < 0)
goto fail;
if (trace_epoch_time_save() < 0)
goto fail;
+ /* Apply global configurations */
+ for (i = 0; i < trace.args.nb_args; i++)
+ trace_args_apply(trace.args.args[i]);
+
rte_trace_mode_set(trace.mode);
return 0;
return;
trace_mem_per_thread_free();
trace_metadata_destroy();
+ eal_trace_args_free();
}
bool
}
/* First attempt from huge page */
- header = rte_malloc(NULL, trace_mem_sz(trace->buff_len), 8);
+ header = eal_malloc_no_trace(NULL, trace_mem_sz(trace->buff_len), 8);
if (header) {
trace->lcore_meta[count].area = TRACE_AREA_HUGEPAGE;
goto found;
for (count = 0; count < trace->nb_trace_mem_list; count++) {
mem = trace->lcore_meta[count].mem;
if (trace->lcore_meta[count].area == TRACE_AREA_HUGEPAGE)
- rte_free(mem);
+ eal_free_no_trace(mem);
else if (trace->lcore_meta[count].area == TRACE_AREA_HEAP)
free(mem);
}