static RTE_DEFINE_PER_LCORE(int, ctf_count);
static struct trace_point_head tp_list = STAILQ_HEAD_INITIALIZER(tp_list);
-static struct trace trace;
+static struct trace trace = { .args = STAILQ_HEAD_INITIALIZER(trace.args), };
struct trace *
trace_obj_get(void)
int
eal_trace_init(void)
{
+ struct trace_arg *arg;
+
/* Trace memory should start with 8B aligned for natural alignment */
RTE_BUILD_BUG_ON((offsetof(struct __rte_trace_header, mem) % 8) != 0);
goto fail;
}
+ if (!STAILQ_EMPTY(&trace.args))
+ trace.status = true;
+
if (!rte_trace_is_enabled())
return 0;
*/
trace_uuid_generate();
+ /* Apply buffer size configuration for trace output */
+ trace_bufsz_args_apply();
+
/* Generate CTF TDSL metadata */
if (trace_metadata_create() < 0)
goto fail;
if (trace_epoch_time_save() < 0)
goto fail;
+ /* Apply global configurations */
+ STAILQ_FOREACH(arg, &trace.args, next)
+ trace_args_apply(arg->val);
+
rte_trace_mode_set(trace.mode);
return 0;
return;
trace_mem_per_thread_free();
trace_metadata_destroy();
+ eal_trace_args_free();
}
bool
}
/* First attempt from huge page */
- header = rte_malloc(NULL, trace_mem_sz(trace->buff_len), 8);
+ header = eal_malloc_no_trace(NULL, trace_mem_sz(trace->buff_len), 8);
if (header) {
trace->lcore_meta[count].area = TRACE_AREA_HUGEPAGE;
goto found;
for (count = 0; count < trace->nb_trace_mem_list; count++) {
mem = trace->lcore_meta[count].mem;
if (trace->lcore_meta[count].area == TRACE_AREA_HUGEPAGE)
- rte_free(mem);
+ eal_free_no_trace(mem);
else if (trace->lcore_meta[count].area == TRACE_AREA_HEAP)
free(mem);
}