1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2019 Intel Corporation
7 #include <rte_atomic.h>
9 #include <rte_malloc.h>
10 #include <rte_random.h>
11 #include <rte_stack.h>
15 #define STACK_SIZE 4096
19 test_stack_push_pop(struct rte_stack *s, void **obj_table, unsigned int bulk_sz)
24 popped_objs = rte_calloc(NULL, STACK_SIZE, sizeof(void *), 0);
25 if (popped_objs == NULL) {
26 printf("[%s():%u] failed to calloc %zu bytes\n",
27 __func__, __LINE__, STACK_SIZE * sizeof(void *));
31 for (i = 0; i < STACK_SIZE; i += bulk_sz) {
32 ret = rte_stack_push(s, &obj_table[i], bulk_sz);
35 printf("[%s():%u] push returned: %d (expected %u)\n",
36 __func__, __LINE__, ret, bulk_sz);
37 rte_free(popped_objs);
41 if (rte_stack_count(s) != i + bulk_sz) {
42 printf("[%s():%u] stack count: %u (expected %u)\n",
43 __func__, __LINE__, rte_stack_count(s),
45 rte_free(popped_objs);
49 if (rte_stack_free_count(s) != STACK_SIZE - i - bulk_sz) {
50 printf("[%s():%u] stack free count: %u (expected %u)\n",
51 __func__, __LINE__, rte_stack_count(s),
52 STACK_SIZE - i - bulk_sz);
53 rte_free(popped_objs);
58 for (i = 0; i < STACK_SIZE; i += bulk_sz) {
59 ret = rte_stack_pop(s, &popped_objs[i], bulk_sz);
62 printf("[%s():%u] pop returned: %d (expected %u)\n",
63 __func__, __LINE__, ret, bulk_sz);
64 rte_free(popped_objs);
68 if (rte_stack_count(s) != STACK_SIZE - i - bulk_sz) {
69 printf("[%s():%u] stack count: %u (expected %u)\n",
70 __func__, __LINE__, rte_stack_count(s),
71 STACK_SIZE - i - bulk_sz);
72 rte_free(popped_objs);
76 if (rte_stack_free_count(s) != i + bulk_sz) {
77 printf("[%s():%u] stack free count: %u (expected %u)\n",
78 __func__, __LINE__, rte_stack_count(s),
80 rte_free(popped_objs);
85 for (i = 0; i < STACK_SIZE; i++) {
86 if (obj_table[i] != popped_objs[STACK_SIZE - i - 1]) {
87 printf("[%s():%u] Incorrect value %p at index 0x%x\n",
89 popped_objs[STACK_SIZE - i - 1], i);
90 rte_free(popped_objs);
95 rte_free(popped_objs);
101 test_stack_basic(uint32_t flags)
103 struct rte_stack *s = NULL;
104 void **obj_table = NULL;
107 obj_table = rte_calloc(NULL, STACK_SIZE, sizeof(void *), 0);
108 if (obj_table == NULL) {
109 printf("[%s():%u] failed to calloc %zu bytes\n",
110 __func__, __LINE__, STACK_SIZE * sizeof(void *));
114 for (i = 0; i < STACK_SIZE; i++)
115 obj_table[i] = (void *)(uintptr_t)i;
117 s = rte_stack_create(__func__, STACK_SIZE, rte_socket_id(), flags);
119 printf("[%s():%u] failed to create a stack\n",
124 if (rte_stack_lookup(__func__) != s) {
125 printf("[%s():%u] failed to lookup a stack\n",
130 if (rte_stack_count(s) != 0) {
131 printf("[%s():%u] stack count: %u (expected 0)\n",
132 __func__, __LINE__, rte_stack_count(s));
136 if (rte_stack_free_count(s) != STACK_SIZE) {
137 printf("[%s():%u] stack free count: %u (expected %u)\n",
138 __func__, __LINE__, rte_stack_count(s), STACK_SIZE);
142 ret = test_stack_push_pop(s, obj_table, 1);
144 printf("[%s():%u] Single object push/pop failed\n",
149 ret = test_stack_push_pop(s, obj_table, MAX_BULK);
151 printf("[%s():%u] Bulk object push/pop failed\n",
156 ret = rte_stack_push(s, obj_table, 2 * STACK_SIZE);
158 printf("[%s():%u] Excess objects push succeeded\n",
163 ret = rte_stack_pop(s, obj_table, 1);
165 printf("[%s():%u] Empty stack pop succeeded\n",
181 test_stack_name_reuse(uint32_t flags)
183 struct rte_stack *s[2];
185 s[0] = rte_stack_create("test", STACK_SIZE, rte_socket_id(), flags);
187 printf("[%s():%u] Failed to create a stack\n",
192 s[1] = rte_stack_create("test", STACK_SIZE, rte_socket_id(), flags);
194 printf("[%s():%u] Failed to detect re-used name\n",
199 rte_stack_free(s[0]);
205 test_stack_name_length(uint32_t flags)
207 char name[RTE_STACK_NAMESIZE + 1];
210 memset(name, 's', sizeof(name));
211 name[RTE_STACK_NAMESIZE] = '\0';
213 s = rte_stack_create(name, STACK_SIZE, rte_socket_id(), flags);
215 printf("[%s():%u] Failed to prevent long name\n",
220 if (rte_errno != ENAMETOOLONG) {
221 printf("[%s():%u] rte_stack failed to set correct errno on failed lookup\n",
230 test_lookup_null(void)
232 struct rte_stack *s = rte_stack_lookup("stack_not_found");
235 printf("[%s():%u] rte_stack found a non-existent stack\n",
240 if (rte_errno != ENOENT) {
241 printf("[%s():%u] rte_stack failed to set correct errno on failed lookup\n",
246 s = rte_stack_lookup(NULL);
249 printf("[%s():%u] rte_stack found a non-existent stack\n",
254 if (rte_errno != EINVAL) {
255 printf("[%s():%u] rte_stack failed to set correct errno on failed lookup\n",
266 /* Check whether the library proper handles a NULL pointer */
267 rte_stack_free(NULL);
272 #define NUM_ITERS_PER_THREAD 100000
280 stack_thread_push_pop(void *args)
282 struct test_args *t = args;
286 obj_table = rte_calloc(NULL, STACK_SIZE, sizeof(void *), 0);
287 if (obj_table == NULL) {
288 printf("[%s():%u] failed to calloc %zu bytes\n",
289 __func__, __LINE__, STACK_SIZE * sizeof(void *));
293 for (i = 0; i < NUM_ITERS_PER_THREAD; i++) {
294 unsigned int success, num;
296 /* Reserve up to min(MAX_BULK, available slots) stack entries,
297 * then push and pop those stack entries.
300 uint64_t sz = rte_atomic64_read(t->sz);
301 volatile uint64_t *sz_addr;
303 sz_addr = (volatile uint64_t *)t->sz;
305 num = RTE_MIN(rte_rand() % MAX_BULK, STACK_SIZE - sz);
307 success = rte_atomic64_cmpset(sz_addr, sz, sz + num);
308 } while (success == 0);
310 if (rte_stack_push(t->s, obj_table, num) != num) {
311 printf("[%s():%u] Failed to push %u pointers\n",
312 __func__, __LINE__, num);
317 if (rte_stack_pop(t->s, obj_table, num) != num) {
318 printf("[%s():%u] Failed to pop %u pointers\n",
319 __func__, __LINE__, num);
324 rte_atomic64_sub(t->sz, num);
332 test_stack_multithreaded(uint32_t flags)
334 struct test_args *args;
335 unsigned int lcore_id;
339 if (rte_lcore_count() < 2) {
340 printf("Not enough cores for test_stack_multithreaded, expecting at least 2\n");
344 printf("[%s():%u] Running with %u lcores\n",
345 __func__, __LINE__, rte_lcore_count());
347 args = rte_malloc(NULL, sizeof(struct test_args) * RTE_MAX_LCORE, 0);
349 printf("[%s():%u] failed to malloc %zu bytes\n",
351 sizeof(struct test_args) * RTE_MAX_LCORE);
355 s = rte_stack_create("test", STACK_SIZE, rte_socket_id(), flags);
357 printf("[%s():%u] Failed to create a stack\n",
363 rte_atomic64_init(&size);
365 RTE_LCORE_FOREACH_SLAVE(lcore_id) {
366 args[lcore_id].s = s;
367 args[lcore_id].sz = &size;
369 if (rte_eal_remote_launch(stack_thread_push_pop,
370 &args[lcore_id], lcore_id))
371 rte_panic("Failed to launch lcore %d\n", lcore_id);
374 lcore_id = rte_lcore_id();
376 args[lcore_id].s = s;
377 args[lcore_id].sz = &size;
379 stack_thread_push_pop(&args[lcore_id]);
381 rte_eal_mp_wait_lcore();
390 __test_stack(uint32_t flags)
392 if (test_stack_basic(flags) < 0)
395 if (test_lookup_null() < 0)
398 if (test_free_null() < 0)
401 if (test_stack_name_reuse(flags) < 0)
404 if (test_stack_name_length(flags) < 0)
407 if (test_stack_multithreaded(flags) < 0)
416 return __test_stack(0);
422 return __test_stack(RTE_STACK_F_LF);
425 REGISTER_TEST_COMMAND(stack_autotest, test_stack);
426 REGISTER_TEST_COMMAND(stack_lf_autotest, test_lf_stack);