1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(C) 2020 Marvell International Ltd.
9 #include <rte_common.h>
10 #include <rte_debug.h>
12 #include <rte_errno.h>
13 #include <rte_string_fns.h>
15 #include "graph_private.h"
17 static struct node_head node_list = STAILQ_HEAD_INITIALIZER(node_list);
18 static rte_node_t node_id;
20 #define NODE_ID_CHECK(id) ID_CHECK(id, node_id)
22 /* Private functions */
24 node_list_head_get(void)
30 node_from_name(const char *name)
34 STAILQ_FOREACH(node, &node_list, next)
35 if (strncmp(node->name, name, RTE_NODE_NAMESIZE) == 0)
42 node_has_duplicate_entry(const char *name)
46 /* Is duplicate name registered */
47 STAILQ_FOREACH(node, &node_list, next) {
48 if (strncmp(node->name, name, RTE_NODE_NAMESIZE) == 0) {
56 /* Public functions */
58 __rte_node_register(const struct rte_node_register *reg)
64 /* Limit Node specific metadata to one cacheline on 64B CL machine */
65 RTE_BUILD_BUG_ON((offsetof(struct rte_node, nodes) -
66 offsetof(struct rte_node, ctx)) !=
67 RTE_CACHE_LINE_MIN_SIZE);
69 graph_spinlock_lock();
72 if (reg == NULL || reg->process == NULL) {
77 /* Check for duplicate name */
78 if (node_has_duplicate_entry(reg->name))
81 sz = sizeof(struct node) + (reg->nb_edges * RTE_NODE_NAMESIZE);
88 /* Initialize the node */
89 if (rte_strscpy(node->name, reg->name, RTE_NODE_NAMESIZE) < 0) {
93 node->flags = reg->flags;
94 node->process = reg->process;
95 node->init = reg->init;
96 node->fini = reg->fini;
97 node->nb_edges = reg->nb_edges;
98 node->parent_id = reg->parent_id;
99 for (i = 0; i < reg->nb_edges; i++) {
100 if (rte_strscpy(node->next_nodes[i], reg->next_nodes[i],
101 RTE_NODE_NAMESIZE) < 0) {
107 node->id = node_id++;
109 /* Add the node at tail */
110 STAILQ_INSERT_TAIL(&node_list, node, next);
111 graph_spinlock_unlock();
117 graph_spinlock_unlock();
118 return RTE_NODE_ID_INVALID;
122 clone_name(struct rte_node_register *reg, struct node *node, const char *name)
126 #define SZ RTE_NODE_NAMESIZE
127 rc = rte_strscpy(reg->name, node->name, SZ);
131 rc = rte_strscpy(reg->name + sz, "-", RTE_MAX((int16_t)(SZ - sz), 0));
135 sz = rte_strscpy(reg->name + sz, name, RTE_MAX((int16_t)(SZ - sz), 0));
146 node_clone(struct node *node, const char *name)
148 rte_node_t rc = RTE_NODE_ID_INVALID;
149 struct rte_node_register *reg;
152 /* Don't allow to clone a node from a cloned node */
153 if (node->parent_id != RTE_NODE_ID_INVALID) {
158 /* Check for duplicate name */
159 if (node_has_duplicate_entry(name))
162 reg = calloc(1, sizeof(*reg) + (sizeof(char *) * node->nb_edges));
168 /* Clone the source node */
169 reg->flags = node->flags;
170 reg->process = node->process;
171 reg->init = node->init;
172 reg->fini = node->fini;
173 reg->nb_edges = node->nb_edges;
174 reg->parent_id = node->id;
176 for (i = 0; i < node->nb_edges; i++)
177 reg->next_nodes[i] = node->next_nodes[i];
179 /* Naming ceremony of the new node. name is node->name + "-" + name */
180 if (clone_name(reg, node, name))
183 rc = __rte_node_register(reg);
191 rte_node_clone(rte_node_t id, const char *name)
196 STAILQ_FOREACH(node, &node_list, next)
198 return node_clone(node, name);
201 return RTE_NODE_ID_INVALID;
205 rte_node_from_name(const char *name)
209 STAILQ_FOREACH(node, &node_list, next)
210 if (strncmp(node->name, name, RTE_NODE_NAMESIZE) == 0)
213 return RTE_NODE_ID_INVALID;
217 rte_node_id_to_name(rte_node_t id)
222 STAILQ_FOREACH(node, &node_list, next)
231 rte_node_edge_count(rte_node_t id)
236 STAILQ_FOREACH(node, &node_list, next)
238 return node->nb_edges;
240 return RTE_EDGE_ID_INVALID;
244 edge_update(struct node *node, struct node *prev, rte_edge_t from,
245 const char **next_nodes, rte_edge_t nb_edges)
247 rte_edge_t i, max_edges, count = 0;
248 struct node *new_node;
252 if (from == RTE_EDGE_ID_INVALID)
253 from = node->nb_edges;
255 /* Don't create hole in next_nodes[] list */
256 if (from > node->nb_edges) {
261 /* Remove me from list */
262 STAILQ_REMOVE(&node_list, node, node, next);
264 /* Allocate the storage space for new node if required */
265 max_edges = from + nb_edges;
266 need_realloc = max_edges > node->nb_edges;
268 sz = sizeof(struct node) + (max_edges * RTE_NODE_NAMESIZE);
269 new_node = realloc(node, sz);
270 if (new_node == NULL) {
278 /* Update the new nodes name */
279 for (i = from; i < max_edges; i++, count++) {
280 if (rte_strscpy(node->next_nodes[i], next_nodes[count],
281 RTE_NODE_NAMESIZE) < 0) {
287 /* Update the linked list to point new node address in prev node */
289 STAILQ_INSERT_AFTER(&node_list, prev, node, next);
291 STAILQ_INSERT_HEAD(&node_list, node, next);
294 node->nb_edges = max_edges;
301 rte_node_edge_shrink(rte_node_t id, rte_edge_t size)
303 rte_edge_t rc = RTE_EDGE_ID_INVALID;
307 graph_spinlock_lock();
309 STAILQ_FOREACH(node, &node_list, next) {
310 if (node->id == id) {
311 if (node->nb_edges < size) {
315 node->nb_edges = size;
322 graph_spinlock_unlock();
327 rte_node_edge_update(rte_node_t id, rte_edge_t from, const char **next_nodes,
330 rte_edge_t rc = RTE_EDGE_ID_INVALID;
331 struct node *n, *prev;
334 graph_spinlock_lock();
337 STAILQ_FOREACH(n, &node_list, next) {
339 rc = edge_update(n, prev, from, next_nodes, nb_edges);
345 graph_spinlock_unlock();
351 node_copy_edges(struct node *node, char *next_nodes[])
355 for (i = 0; i < node->nb_edges; i++)
356 next_nodes[i] = node->next_nodes[i];
362 rte_node_edge_get(rte_node_t id, char *next_nodes[])
364 rte_node_t rc = RTE_NODE_ID_INVALID;
368 graph_spinlock_lock();
370 STAILQ_FOREACH(node, &node_list, next) {
371 if (node->id == id) {
372 if (next_nodes == NULL)
373 rc = sizeof(char *) * node->nb_edges;
375 rc = node_copy_edges(node, next_nodes);
380 graph_spinlock_unlock();
386 node_scan_dump(FILE *f, rte_node_t id, bool all)
390 RTE_ASSERT(f != NULL);
393 STAILQ_FOREACH(node, &node_list, next) {
396 } else if (node->id == id) {
406 rte_node_dump(FILE *f, rte_node_t id)
408 node_scan_dump(f, id, false);
412 rte_node_list_dump(FILE *f)
414 node_scan_dump(f, 0, true);
418 rte_node_max_count(void)