1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(C) 2020 Marvell International Ltd.
5 #ifndef _RTE_TRACE_POINT_H_
6 #error do not include this file directly, use <rte_trace_point.h> instead
9 #ifndef _RTE_TRACE_POINT_PROVIDER_H_
10 #define _RTE_TRACE_POINT_PROVIDER_H_
12 #ifdef ALLOW_EXPERIMENTAL_API
14 #include <rte_branch_prediction.h>
15 #include <rte_cycles.h>
16 #include <rte_per_lcore.h>
17 #include <rte_string_fns.h>
20 #define __RTE_TRACE_EVENT_HEADER_ID_SHIFT (48)
22 #define __RTE_TRACE_FIELD_SIZE_SHIFT 0
23 #define __RTE_TRACE_FIELD_SIZE_MASK (0xffffULL << __RTE_TRACE_FIELD_SIZE_SHIFT)
24 #define __RTE_TRACE_FIELD_ID_SHIFT (16)
25 #define __RTE_TRACE_FIELD_ID_MASK (0xffffULL << __RTE_TRACE_FIELD_ID_SHIFT)
26 #define __RTE_TRACE_FIELD_ENABLE_MASK (1ULL << 63)
27 #define __RTE_TRACE_FIELD_ENABLE_DISCARD (1ULL << 62)
29 struct __rte_trace_stream_header {
33 char thread_name[__RTE_TRACE_EMIT_STRING_LEN_MAX];
36 struct __rte_trace_header {
39 struct __rte_trace_stream_header stream_header;
43 RTE_DECLARE_PER_LCORE(void *, trace_mem);
45 static __rte_always_inline void *
46 __rte_trace_mem_get(uint64_t in)
48 struct __rte_trace_header *trace = RTE_PER_LCORE(trace_mem);
49 const uint16_t sz = in & __RTE_TRACE_FIELD_SIZE_MASK;
51 /* Trace memory is not initialized for this thread */
52 if (unlikely(trace == NULL)) {
53 __rte_trace_mem_per_thread_alloc();
54 trace = RTE_PER_LCORE(trace_mem);
55 if (unlikely(trace == NULL))
58 /* Check the wrap around case */
59 uint32_t offset = trace->offset;
60 if (unlikely((offset + sz) >= trace->len)) {
61 /* Disable the trace event if it in DISCARD mode */
62 if (unlikely(in & __RTE_TRACE_FIELD_ENABLE_DISCARD))
67 /* Align to event header size */
68 offset = RTE_ALIGN_CEIL(offset, __RTE_TRACE_EVENT_HEADER_SZ);
69 void *mem = RTE_PTR_ADD(&trace->mem[0], offset);
71 trace->offset = offset;
76 static __rte_always_inline void *
77 __rte_trace_point_emit_ev_header(void *mem, uint64_t in)
81 /* Event header [63:0] = id [63:48] | timestamp [47:0] */
82 val = rte_get_tsc_cycles() &
83 ~(0xffffULL << __RTE_TRACE_EVENT_HEADER_ID_SHIFT);
84 val |= ((in & __RTE_TRACE_FIELD_ID_MASK) <<
85 (__RTE_TRACE_EVENT_HEADER_ID_SHIFT - __RTE_TRACE_FIELD_ID_SHIFT));
87 *(uint64_t *)mem = val;
88 return RTE_PTR_ADD(mem, __RTE_TRACE_EVENT_HEADER_SZ);
91 #define __rte_trace_point_emit_header_generic(t) \
94 const uint64_t val = __atomic_load_n(t, __ATOMIC_ACQUIRE); \
95 if (likely(!(val & __RTE_TRACE_FIELD_ENABLE_MASK))) \
97 mem = __rte_trace_mem_get(val); \
98 if (unlikely(mem == NULL)) \
100 mem = __rte_trace_point_emit_ev_header(mem, val); \
103 #define __rte_trace_point_emit_header_fp(t) \
104 if (!__rte_trace_point_fp_is_enabled()) \
106 __rte_trace_point_emit_header_generic(t)
108 #define __rte_trace_point_emit(in, type) \
110 memcpy(mem, &(in), sizeof(in)); \
111 mem = RTE_PTR_ADD(mem, sizeof(in)); \
114 #define rte_trace_point_emit_string(in) \
116 if (unlikely(in == NULL)) \
118 rte_strscpy(mem, in, __RTE_TRACE_EMIT_STRING_LEN_MAX); \
119 mem = RTE_PTR_ADD(mem, __RTE_TRACE_EMIT_STRING_LEN_MAX); \
124 #define __rte_trace_point_emit_header_generic(t) RTE_SET_USED(t)
125 #define __rte_trace_point_emit_header_fp(t) RTE_SET_USED(t)
126 #define __rte_trace_point_emit(in, type) RTE_SET_USED(in)
127 #define rte_trace_point_emit_string(in) RTE_SET_USED(in)
131 #endif /* _RTE_TRACE_POINT_PROVIDER_H_ */