1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2015-2019 Vladimir Medvedkin <medvedkinv@gmail.com>
5 #include <rte_common.h>
8 #include <rte_random.h>
9 #include <rte_malloc.h>
13 #include <rte_thash.h>
15 #define HASH_MSK(reta_sz) ((1 << reta_sz) - 1)
16 #define TUPLE_SZ (RTE_THASH_V4_L4_LEN * 4)
18 struct test_thash_v4 {
27 struct test_thash_v6 {
36 /*From 82599 Datasheet 7.1.2.8.3 RSS Verification Suite*/
37 struct test_thash_v4 v4_tbl[] = {
38 {RTE_IPV4(161, 142, 100, 80), RTE_IPV4(66, 9, 149, 187),
39 1766, 2794, 0x323e8fc2, 0x51ccc178},
40 {RTE_IPV4(65, 69, 140, 83), RTE_IPV4(199, 92, 111, 2),
41 4739, 14230, 0xd718262a, 0xc626b0ea},
42 {RTE_IPV4(12, 22, 207, 184), RTE_IPV4(24, 19, 198, 95),
43 38024, 12898, 0xd2d0a5de, 0x5c2b394a},
44 {RTE_IPV4(209, 142, 163, 6), RTE_IPV4(38, 27, 205, 30),
45 2217, 48228, 0x82989176, 0xafc7327f},
46 {RTE_IPV4(202, 188, 127, 2), RTE_IPV4(153, 39, 163, 191),
47 1303, 44251, 0x5d1809c5, 0x10e828a2},
50 struct test_thash_v6 v6_tbl[] = {
51 /*3ffe:2501:200:3::1*/
52 {{0x3f, 0xfe, 0x25, 0x01, 0x02, 0x00, 0x00, 0x03,
53 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,},
54 /*3ffe:2501:200:1fff::7*/
55 {0x3f, 0xfe, 0x25, 0x01, 0x02, 0x00, 0x1f, 0xff,
56 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x07,},
57 1766, 2794, 0x2cc18cd5, 0x40207d3d},
59 {{0xff, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
60 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,},
61 /*3ffe:501:8::260:97ff:fe40:efab*/
62 {0x3f, 0xfe, 0x05, 0x01, 0x00, 0x08, 0x00, 0x00,
63 0x02, 0x60, 0x97, 0xff, 0xfe, 0x40, 0xef, 0xab,},
64 4739, 14230, 0x0f0c461c, 0xdde51bbf},
65 /*fe80::200:f8ff:fe21:67cf*/
66 {{0xfe, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
67 0x02, 0x00, 0xf8, 0xff, 0xfe, 0x21, 0x67, 0xcf,},
68 /*3ffe:1900:4545:3:200:f8ff:fe21:67cf*/
69 {0x3f, 0xfe, 0x19, 0x00, 0x45, 0x45, 0x00, 0x03,
70 0x02, 0x00, 0xf8, 0xff, 0xfe, 0x21, 0x67, 0xcf,},
71 38024, 44251, 0x4b61e985, 0x02d1feef},
74 uint8_t default_rss_key[] = {
75 0x6d, 0x5a, 0x56, 0xda, 0x25, 0x5b, 0x0e, 0xc2,
76 0x41, 0x67, 0x25, 0x3d, 0x43, 0xa3, 0x8f, 0xb0,
77 0xd0, 0xca, 0x2b, 0xcb, 0xae, 0x7b, 0x30, 0xb4,
78 0x77, 0xcb, 0x2d, 0xa3, 0x80, 0x30, 0xf2, 0x0c,
79 0x6a, 0x42, 0xb7, 0x3b, 0xbe, 0xac, 0x01, 0xfa,
82 static const uint8_t big_rss_key[] = {
83 0x6d, 0x5a, 0x56, 0xda, 0x25, 0x5b, 0x0e, 0xc2,
84 0x41, 0x67, 0x25, 0x3d, 0x43, 0xa3, 0x8f, 0xb0,
85 0xd0, 0xca, 0x2b, 0xcb, 0xae, 0x7b, 0x30, 0xb4,
86 0x77, 0xcb, 0x2d, 0xa3, 0x80, 0x30, 0xf2, 0x0c,
87 0x6a, 0x42, 0xb7, 0x3b, 0xbe, 0xac, 0x01, 0xfa,
88 0x6d, 0x5a, 0x56, 0xda, 0x25, 0x5b, 0x0e, 0xc2,
89 0x41, 0x67, 0x25, 0x3d, 0x43, 0xa3, 0x8f, 0xb0,
90 0xd0, 0xca, 0x2b, 0xcb, 0xae, 0x7b, 0x30, 0xb4,
91 0x77, 0xcb, 0x2d, 0xa3, 0x80, 0x30, 0xf2, 0x0c,
92 0x6a, 0x42, 0xb7, 0x3b, 0xbe, 0xac, 0x01, 0xfa,
93 0x6d, 0x5a, 0x56, 0xda, 0x25, 0x5b, 0x0e, 0xc2,
94 0x41, 0x67, 0x25, 0x3d, 0x43, 0xa3, 0x8f, 0xb0,
95 0xd0, 0xca, 0x2b, 0xcb, 0xae, 0x7b, 0x30, 0xb4,
96 0x77, 0xcb, 0x2d, 0xa3, 0x80, 0x30, 0xf2, 0x0c,
97 0x6a, 0x42, 0xb7, 0x3b, 0xbe, 0xac, 0x01, 0xfa,
98 0x6d, 0x5a, 0x56, 0xda, 0x25, 0x5b, 0x0e, 0xc2,
99 0x41, 0x67, 0x25, 0x3d, 0x43, 0xa3, 0x8f, 0xb0,
100 0xd0, 0xca, 0x2b, 0xcb, 0xae, 0x7b, 0x30, 0xb4,
101 0x77, 0xcb, 0x2d, 0xa3, 0x80, 0x30, 0xf2, 0x0c,
102 0x6a, 0x42, 0xb7, 0x3b, 0xbe, 0xac, 0x01, 0xfa,
103 0x6d, 0x5a, 0x56, 0xda, 0x25, 0x5b, 0x0e, 0xc2,
104 0x41, 0x67, 0x25, 0x3d, 0x43, 0xa3, 0x8f, 0xb0,
105 0xd0, 0xca, 0x2b, 0xcb, 0xae, 0x7b, 0x30, 0xb4,
106 0x77, 0xcb, 0x2d, 0xa3, 0x80, 0x30, 0xf2, 0x0c,
107 0x6a, 0x42, 0xb7, 0x3b, 0xbe, 0xac, 0x01, 0xfa,
111 test_toeplitz_hash_calc(void)
114 union rte_thash_tuple tuple;
115 uint32_t rss_l3, rss_l3l4;
116 uint8_t rss_key_be[RTE_DIM(default_rss_key)];
117 struct rte_ipv6_hdr ipv6_hdr;
120 rte_convert_rss_key((uint32_t *)&default_rss_key,
121 (uint32_t *)rss_key_be, RTE_DIM(default_rss_key));
124 for (i = 0; i < RTE_DIM(v4_tbl); i++) {
125 tuple.v4.src_addr = v4_tbl[i].src_ip;
126 tuple.v4.dst_addr = v4_tbl[i].dst_ip;
127 tuple.v4.sport = v4_tbl[i].src_port;
128 tuple.v4.dport = v4_tbl[i].dst_port;
129 /*Calculate hash with original key*/
130 rss_l3 = rte_softrss((uint32_t *)&tuple,
131 RTE_THASH_V4_L3_LEN, default_rss_key);
132 rss_l3l4 = rte_softrss((uint32_t *)&tuple,
133 RTE_THASH_V4_L4_LEN, default_rss_key);
134 if ((rss_l3 != v4_tbl[i].hash_l3) ||
135 (rss_l3l4 != v4_tbl[i].hash_l3l4))
137 /*Calculate hash with converted key*/
138 rss_l3 = rte_softrss_be((uint32_t *)&tuple,
139 RTE_THASH_V4_L3_LEN, rss_key_be);
140 rss_l3l4 = rte_softrss_be((uint32_t *)&tuple,
141 RTE_THASH_V4_L4_LEN, rss_key_be);
142 if ((rss_l3 != v4_tbl[i].hash_l3) ||
143 (rss_l3l4 != v4_tbl[i].hash_l3l4))
146 for (i = 0; i < RTE_DIM(v6_tbl); i++) {
148 for (j = 0; j < RTE_DIM(ipv6_hdr.src_addr); j++)
149 ipv6_hdr.src_addr[j] = v6_tbl[i].src_ip[j];
150 for (j = 0; j < RTE_DIM(ipv6_hdr.dst_addr); j++)
151 ipv6_hdr.dst_addr[j] = v6_tbl[i].dst_ip[j];
152 /*Load and convert ipv6 address into tuple*/
153 rte_thash_load_v6_addrs(&ipv6_hdr, &tuple);
154 tuple.v6.sport = v6_tbl[i].src_port;
155 tuple.v6.dport = v6_tbl[i].dst_port;
156 /*Calculate hash with original key*/
157 rss_l3 = rte_softrss((uint32_t *)&tuple,
158 RTE_THASH_V6_L3_LEN, default_rss_key);
159 rss_l3l4 = rte_softrss((uint32_t *)&tuple,
160 RTE_THASH_V6_L4_LEN, default_rss_key);
161 if ((rss_l3 != v6_tbl[i].hash_l3) ||
162 (rss_l3l4 != v6_tbl[i].hash_l3l4))
164 /*Calculate hash with converted key*/
165 rss_l3 = rte_softrss_be((uint32_t *)&tuple,
166 RTE_THASH_V6_L3_LEN, rss_key_be);
167 rss_l3l4 = rte_softrss_be((uint32_t *)&tuple,
168 RTE_THASH_V6_L4_LEN, rss_key_be);
169 if ((rss_l3 != v6_tbl[i].hash_l3) ||
170 (rss_l3l4 != v6_tbl[i].hash_l3l4))
177 test_toeplitz_hash_gfni(void)
180 union rte_thash_tuple tuple;
181 uint32_t rss_l3, rss_l3l4;
182 uint64_t rss_key_matrixes[RTE_DIM(default_rss_key)];
184 if (!rte_thash_gfni_supported())
187 /* Convert RSS key into matrixes */
188 rte_thash_complete_matrix(rss_key_matrixes, default_rss_key,
189 RTE_DIM(default_rss_key));
191 for (i = 0; i < RTE_DIM(v4_tbl); i++) {
192 tuple.v4.src_addr = rte_cpu_to_be_32(v4_tbl[i].src_ip);
193 tuple.v4.dst_addr = rte_cpu_to_be_32(v4_tbl[i].dst_ip);
194 tuple.v4.sport = rte_cpu_to_be_16(v4_tbl[i].dst_port);
195 tuple.v4.dport = rte_cpu_to_be_16(v4_tbl[i].src_port);
197 rss_l3 = rte_thash_gfni(rss_key_matrixes, (uint8_t *)&tuple,
198 RTE_THASH_V4_L3_LEN * 4);
199 rss_l3l4 = rte_thash_gfni(rss_key_matrixes, (uint8_t *)&tuple,
200 RTE_THASH_V4_L4_LEN * 4);
201 if ((rss_l3 != v4_tbl[i].hash_l3) ||
202 (rss_l3l4 != v4_tbl[i].hash_l3l4))
206 for (i = 0; i < RTE_DIM(v6_tbl); i++) {
207 for (j = 0; j < RTE_DIM(tuple.v6.src_addr); j++)
208 tuple.v6.src_addr[j] = v6_tbl[i].src_ip[j];
209 for (j = 0; j < RTE_DIM(tuple.v6.dst_addr); j++)
210 tuple.v6.dst_addr[j] = v6_tbl[i].dst_ip[j];
211 tuple.v6.sport = rte_cpu_to_be_16(v6_tbl[i].dst_port);
212 tuple.v6.dport = rte_cpu_to_be_16(v6_tbl[i].src_port);
213 rss_l3 = rte_thash_gfni(rss_key_matrixes, (uint8_t *)&tuple,
214 RTE_THASH_V6_L3_LEN * 4);
215 rss_l3l4 = rte_thash_gfni(rss_key_matrixes, (uint8_t *)&tuple,
216 RTE_THASH_V6_L4_LEN * 4);
217 if ((rss_l3 != v6_tbl[i].hash_l3) ||
218 (rss_l3l4 != v6_tbl[i].hash_l3l4))
229 SCALAR_DATA_BUF_1_HASH_IDX = 0,
230 SCALAR_DATA_BUF_2_HASH_IDX,
231 GFNI_DATA_BUF_1_HASH_IDX,
232 GFNI_DATA_BUF_2_HASH_IDX,
237 test_toeplitz_hash_rand_data(void)
239 uint32_t data[2][DATA_SZ];
240 uint32_t scalar_data[2][DATA_SZ];
241 uint32_t hash[HASH_IDXES] = { 0 };
242 uint64_t rss_key_matrixes[RTE_DIM(default_rss_key)];
245 if (!rte_thash_gfni_supported())
248 rte_thash_complete_matrix(rss_key_matrixes, default_rss_key,
249 RTE_DIM(default_rss_key));
251 for (i = 0; i < ITER; i++) {
252 for (j = 0; j < DATA_SZ; j++) {
253 data[0][j] = rte_rand();
254 data[1][j] = rte_rand();
255 scalar_data[0][j] = rte_cpu_to_be_32(data[0][j]);
256 scalar_data[1][j] = rte_cpu_to_be_32(data[1][j]);
259 hash[SCALAR_DATA_BUF_1_HASH_IDX] = rte_softrss(scalar_data[0],
260 DATA_SZ, default_rss_key);
261 hash[SCALAR_DATA_BUF_2_HASH_IDX] = rte_softrss(scalar_data[1],
262 DATA_SZ, default_rss_key);
263 hash[GFNI_DATA_BUF_1_HASH_IDX] = rte_thash_gfni(
264 rss_key_matrixes, (uint8_t *)data[0],
265 DATA_SZ * sizeof(uint32_t));
266 hash[GFNI_DATA_BUF_2_HASH_IDX] = rte_thash_gfni(
267 rss_key_matrixes, (uint8_t *)data[1],
268 DATA_SZ * sizeof(uint32_t));
270 if ((hash[SCALAR_DATA_BUF_1_HASH_IDX] !=
271 hash[GFNI_DATA_BUF_1_HASH_IDX]) ||
272 (hash[SCALAR_DATA_BUF_2_HASH_IDX] !=
273 hash[GFNI_DATA_BUF_2_HASH_IDX]))
287 test_big_tuple_gfni(void)
290 uint32_t arr_softrss[16];
291 uint32_t hash_1, hash_2;
292 uint64_t rss_key_matrixes[RTE_DIM(big_rss_key)];
293 unsigned int i, size = RTE_DIM(arr) * sizeof(uint32_t);
295 if (!rte_thash_gfni_supported())
298 /* Convert RSS key into matrixes */
299 rte_thash_complete_matrix(rss_key_matrixes, big_rss_key,
300 RTE_DIM(big_rss_key));
302 for (i = 0; i < RTE_DIM(arr); i++) {
304 arr_softrss[i] = rte_be_to_cpu_32(arr[i]);
307 hash_1 = rte_softrss(arr_softrss, RTE_DIM(arr), big_rss_key);
308 hash_2 = rte_thash_gfni(rss_key_matrixes, (uint8_t *)arr, size);
310 if (hash_1 != hash_2)
317 test_create_invalid(void)
319 struct rte_thash_ctx *ctx;
323 ctx = rte_thash_init_ctx(NULL, key_len, reta_sz, NULL, 0);
324 RTE_TEST_ASSERT(ctx == NULL,
325 "Call succeeded with invalid parameters\n");
327 ctx = rte_thash_init_ctx("test", 0, reta_sz, NULL, 0);
328 RTE_TEST_ASSERT(ctx == NULL,
329 "Call succeeded with invalid parameters\n");
331 ctx = rte_thash_init_ctx(NULL, key_len, 1, NULL, 0);
332 RTE_TEST_ASSERT(ctx == NULL,
333 "Call succeeded with invalid parameters\n");
335 ctx = rte_thash_init_ctx(NULL, key_len, 17, NULL, 0);
336 RTE_TEST_ASSERT(ctx == NULL,
337 "Call succeeded with invalid parameters\n");
343 test_multiple_create(void)
345 struct rte_thash_ctx *ctx;
350 for (i = 0; i < 100; i++) {
351 ctx = rte_thash_init_ctx("test", key_len, reta_sz, NULL, 0);
352 RTE_TEST_ASSERT(ctx != NULL, "Can not create CTX\n");
354 rte_thash_free_ctx(ctx);
363 struct rte_thash_ctx *ctx;
365 ctx = rte_thash_init_ctx("test", 40, 7, NULL, 0);
366 RTE_TEST_ASSERT(ctx != NULL, "Can not create CTX\n");
368 rte_thash_free_ctx(ctx);
369 rte_thash_free_ctx(NULL);
375 test_add_invalid_helper(void)
377 struct rte_thash_ctx *ctx;
378 const int key_len = 40;
382 ctx = rte_thash_init_ctx("test", key_len, reta_sz, NULL, 0);
383 RTE_TEST_ASSERT(ctx != NULL, "can not create thash ctx\n");
385 ret = rte_thash_add_helper(NULL, "test", reta_sz, 0);
386 RTE_TEST_ASSERT(ret == -EINVAL,
387 "Call succeeded with invalid parameters\n");
389 ret = rte_thash_add_helper(ctx, NULL, reta_sz, 0);
390 RTE_TEST_ASSERT(ret == -EINVAL,
391 "Call succeeded with invalid parameters\n");
393 ret = rte_thash_add_helper(ctx, "test", reta_sz - 1, 0);
394 RTE_TEST_ASSERT(ret == -EINVAL,
395 "Call succeeded with invalid parameters\n");
397 ret = rte_thash_add_helper(ctx, "test", reta_sz, key_len * 8);
398 RTE_TEST_ASSERT(ret == -EINVAL,
399 "Call succeeded with invalid parameters\n");
401 ret = rte_thash_add_helper(ctx, "first_range", reta_sz, 0);
402 RTE_TEST_ASSERT(ret == 0, "Can not create helper\n");
404 ret = rte_thash_add_helper(ctx, "first_range", reta_sz, 0);
405 RTE_TEST_ASSERT(ret == -EEXIST,
406 "Call succeeded with duplicated name\n");
409 * Create second helper with offset 3 * reta_sz.
410 * Note first_range helper created range in key:
411 * [0, 32 + length{= reta_sz} - 1), i.e [0, 37).
412 * second range is [44, 81)
414 ret = rte_thash_add_helper(ctx, "second_range", reta_sz,
416 RTE_TEST_ASSERT(ret == 0, "Can not create helper\n");
419 * Try to create overlapping with first_ and second_ ranges,
422 ret = rte_thash_add_helper(ctx, "third_range", 2 * reta_sz, reta_sz);
423 RTE_TEST_ASSERT(ret == -EEXIST,
424 "Call succeeded with overlapping ranges\n");
426 rte_thash_free_ctx(ctx);
432 test_find_existing(void)
434 struct rte_thash_ctx *ctx, *ret_ctx;
436 ctx = rte_thash_init_ctx("test", 40, 7, NULL, 0);
437 RTE_TEST_ASSERT(ctx != NULL, "can not create thash ctx\n");
439 ret_ctx = rte_thash_find_existing("test");
440 RTE_TEST_ASSERT(ret_ctx != NULL, "can not find existing ctx\n");
442 rte_thash_free_ctx(ctx);
448 test_get_helper(void)
450 struct rte_thash_ctx *ctx;
451 struct rte_thash_subtuple_helper *h;
454 ctx = rte_thash_init_ctx("test", 40, 7, NULL, 0);
455 RTE_TEST_ASSERT(ctx != NULL, "Can not create thash ctx\n");
457 h = rte_thash_get_helper(NULL, "first_range");
458 RTE_TEST_ASSERT(h == NULL, "Call succeeded with invalid parameters\n");
460 h = rte_thash_get_helper(ctx, NULL);
461 RTE_TEST_ASSERT(h == NULL, "Call succeeded with invalid parameters\n");
463 ret = rte_thash_add_helper(ctx, "first_range", 8, 0);
464 RTE_TEST_ASSERT(ret == 0, "Can not create helper\n");
466 h = rte_thash_get_helper(ctx, "first_range");
467 RTE_TEST_ASSERT(h != NULL, "Can not find helper\n");
469 rte_thash_free_ctx(ctx);
475 test_period_overflow(void)
477 struct rte_thash_ctx *ctx;
478 int reta_sz = 7; /* reflects polynomial degree */
481 /* first create without RTE_THASH_IGNORE_PERIOD_OVERFLOW flag */
482 ctx = rte_thash_init_ctx("test", 40, reta_sz, NULL, 0);
483 RTE_TEST_ASSERT(ctx != NULL, "Can not create thash ctx\n");
485 /* requested range > (2^reta_sz) - 1 */
486 ret = rte_thash_add_helper(ctx, "test", (1 << reta_sz), 0);
487 RTE_TEST_ASSERT(ret == -ENOSPC,
488 "Call succeeded with invalid parameters\n");
490 /* requested range == len + 32 - 1, smaller than (2^reta_sz) - 1 */
491 ret = rte_thash_add_helper(ctx, "test", (1 << reta_sz) - 32, 0);
492 RTE_TEST_ASSERT(ret == 0, "Can not create helper\n");
494 rte_thash_free_ctx(ctx);
496 /* create with RTE_THASH_IGNORE_PERIOD_OVERFLOW flag */
497 ctx = rte_thash_init_ctx("test", 40, reta_sz, NULL,
498 RTE_THASH_IGNORE_PERIOD_OVERFLOW);
499 RTE_TEST_ASSERT(ctx != NULL, "Can not create thash ctx\n");
501 /* requested range > (2^reta_sz - 1) */
502 ret = rte_thash_add_helper(ctx, "test", (1 << reta_sz) + 10, 0);
503 RTE_TEST_ASSERT(ret == 0, "Can not create helper\n");
505 rte_thash_free_ctx(ctx);
511 test_predictable_rss_min_seq(void)
513 struct rte_thash_ctx *ctx;
514 struct rte_thash_subtuple_helper *h;
515 const int key_len = 40;
517 uint8_t initial_key[key_len];
518 const uint8_t *new_key;
520 union rte_thash_tuple tuple;
521 uint32_t orig_hash, adj_hash, adj;
522 unsigned int desired_value = 27 & HASH_MSK(reta_sz);
523 uint16_t port_value = 22;
525 memset(initial_key, 0, key_len);
527 ctx = rte_thash_init_ctx("test", key_len, reta_sz, initial_key,
528 RTE_THASH_MINIMAL_SEQ);
529 RTE_TEST_ASSERT(ctx != NULL, "can not create thash ctx\n");
531 ret = rte_thash_add_helper(ctx, "snat", sizeof(uint16_t) * 8,
532 offsetof(union rte_thash_tuple, v4.sport) * 8);
533 RTE_TEST_ASSERT(ret == 0, "can not add helper, ret %d\n", ret);
535 h = rte_thash_get_helper(ctx, "snat");
536 RTE_TEST_ASSERT(h != NULL, "can not find helper\n");
538 new_key = rte_thash_get_key(ctx);
539 tuple.v4.src_addr = RTE_IPV4(0, 0, 0, 0);
540 tuple.v4.dst_addr = RTE_IPV4(0, 0, 0, 0);
542 tuple.v4.sport = rte_cpu_to_be_16(port_value);
544 tuple.v4.sctp_tag = rte_be_to_cpu_32(tuple.v4.sctp_tag);
546 orig_hash = rte_softrss((uint32_t *)&tuple,
547 RTE_THASH_V4_L4_LEN, new_key);
548 adj = rte_thash_get_complement(h, orig_hash, desired_value);
550 tuple.v4.sctp_tag = rte_cpu_to_be_32(tuple.v4.sctp_tag);
551 tuple.v4.sport ^= rte_cpu_to_be_16(adj);
552 tuple.v4.sctp_tag = rte_be_to_cpu_32(tuple.v4.sctp_tag);
554 adj_hash = rte_softrss((uint32_t *)&tuple,
555 RTE_THASH_V4_L4_LEN, new_key);
556 RTE_TEST_ASSERT((adj_hash & HASH_MSK(reta_sz)) ==
557 desired_value, "bad desired value\n");
559 rte_thash_free_ctx(ctx);
565 * This test creates 7 subranges in the following order:
566 * range_one = [56, 95), len = 8, offset = 56
567 * range_two = [64, 103), len = 8, offset = 64
568 * range_three = [120, 159), len = 8, offset = 120
569 * range_four = [48, 87), len = 8, offset = 48
570 * range_five = [57, 95), len = 7, offset = 57
571 * range_six = [40, 111), len = 40, offset = 40
572 * range_seven = [0, 39), len = 8, offset = 0
581 struct range rng_arr[] = {
584 {"three", 8, 120, 15},
592 test_predictable_rss_multirange(void)
594 struct rte_thash_ctx *ctx;
595 struct rte_thash_subtuple_helper *h[RTE_DIM(rng_arr)];
596 const uint8_t *new_key;
597 const int key_len = 40;
599 unsigned int i, j, k;
601 uint32_t desired_value = rte_rand() & HASH_MSK(reta_sz);
602 uint8_t tuples[RTE_DIM(rng_arr)][16] = { {0} };
604 uint32_t hashes[RTE_DIM(rng_arr)];
605 uint32_t adj_hashes[RTE_DIM(rng_arr)];
608 ctx = rte_thash_init_ctx("test", key_len, reta_sz, NULL, 0);
609 RTE_TEST_ASSERT(ctx != NULL, "can not create thash ctx\n");
611 for (i = 0; i < RTE_DIM(rng_arr); i++) {
612 ret = rte_thash_add_helper(ctx, rng_arr[i].name,
613 rng_arr[i].len, rng_arr[i].offset);
614 RTE_TEST_ASSERT(ret == 0, "can not add helper\n");
616 h[i] = rte_thash_get_helper(ctx, rng_arr[i].name);
617 RTE_TEST_ASSERT(h[i] != NULL, "can not find helper\n");
619 new_key = rte_thash_get_key(ctx);
622 * calculate hashes, complements, then adjust keys with
623 * complements and recalsulate hashes
625 for (i = 0; i < RTE_DIM(rng_arr); i++) {
626 for (k = 0; k < 100; k++) {
627 /* init with random keys */
628 ptr = (uint32_t *)&tuples[i][0];
629 for (j = 0; j < 4; j++)
631 /* convert keys from BE to CPU byte order */
632 for (j = 0; j < 4; j++)
633 ptr[j] = rte_be_to_cpu_32(ptr[j]);
635 hashes[i] = rte_softrss(ptr, 4, new_key);
636 adj = rte_thash_get_complement(h[i], hashes[i],
638 /* convert back to BE to adjust the value */
639 for (j = 0; j < 4; j++)
640 ptr[j] = rte_cpu_to_be_32(ptr[j]);
642 tuples[i][rng_arr[i].byte_idx] ^= adj;
644 for (j = 0; j < 4; j++)
645 ptr[j] = rte_be_to_cpu_32(ptr[j]);
647 adj_hashes[i] = rte_softrss(ptr, 4, new_key);
648 RTE_TEST_ASSERT((adj_hashes[i] & HASH_MSK(reta_sz)) ==
650 "bad desired value for %d tuple\n", i);
654 rte_thash_free_ctx(ctx);
660 cmp_tuple_eq(void *userdata, uint8_t *tuple)
662 return memcmp(userdata, tuple, TUPLE_SZ);
666 test_adjust_tuple(void)
668 struct rte_thash_ctx *ctx;
669 struct rte_thash_subtuple_helper *h;
670 const int key_len = 40;
671 const uint8_t *new_key;
672 uint8_t tuple[TUPLE_SZ];
673 uint32_t tmp_tuple[TUPLE_SZ / sizeof(uint32_t)];
674 uint32_t tuple_copy[TUPLE_SZ / sizeof(uint32_t)];
676 int reta_sz = CHAR_BIT;
678 unsigned int i, desired_value = rte_rand() & HASH_MSK(reta_sz);
680 memset(tuple, 0xab, TUPLE_SZ);
682 ctx = rte_thash_init_ctx("test", key_len, reta_sz, NULL, 0);
683 RTE_TEST_ASSERT(ctx != NULL, "can not create thash ctx\n");
686 * set offset to be in the middle of a byte
687 * set size of the subtuple to be 2 * rets_sz
688 * to have the room for random bits
690 ret = rte_thash_add_helper(ctx, "test", reta_sz * 2,
692 RTE_TEST_ASSERT(ret == 0, "can not add helper, ret %d\n", ret);
694 new_key = rte_thash_get_key(ctx);
696 h = rte_thash_get_helper(ctx, "test");
697 RTE_TEST_ASSERT(h != NULL, "can not find helper\n");
699 ret = rte_thash_adjust_tuple(ctx, h, tuple, TUPLE_SZ, desired_value,
701 RTE_TEST_ASSERT(ret == 0, "can not adjust tuple, ret %d\n", ret);
703 for (i = 0; i < (TUPLE_SZ / 4); i++)
705 rte_be_to_cpu_32(*(uint32_t *)&tuple[i * 4]);
707 hash = rte_softrss(tmp_tuple, TUPLE_SZ / 4, new_key);
708 RTE_TEST_ASSERT((hash & HASH_MSK(reta_sz)) ==
709 desired_value, "bad desired value\n");
712 /* Pass previously calculated tuple to callback function */
713 memcpy(tuple_copy, tuple, TUPLE_SZ);
715 memset(tuple, 0xab, TUPLE_SZ);
716 ret = rte_thash_adjust_tuple(ctx, h, tuple, TUPLE_SZ, desired_value,
717 1, cmp_tuple_eq, tuple_copy);
718 RTE_TEST_ASSERT(ret == -EEXIST,
719 "adjust tuple didn't indicate collision\n");
722 * Make the function to generate random bits into subtuple
723 * after first adjustment attempt.
725 memset(tuple, 0xab, TUPLE_SZ);
726 ret = rte_thash_adjust_tuple(ctx, h, tuple, TUPLE_SZ, desired_value,
727 2, cmp_tuple_eq, tuple_copy);
728 RTE_TEST_ASSERT(ret == 0, "can not adjust tuple, ret %d\n", ret);
730 for (i = 0; i < (TUPLE_SZ / 4); i++)
732 rte_be_to_cpu_32(*(uint32_t *)&tuple[i * 4]);
734 hash = rte_softrss(tmp_tuple, TUPLE_SZ / 4, new_key);
735 RTE_TEST_ASSERT((hash & HASH_MSK(reta_sz)) ==
736 desired_value, "bad desired value\n");
738 rte_thash_free_ctx(ctx);
743 static struct unit_test_suite thash_tests = {
744 .suite_name = "thash autotest",
748 TEST_CASE(test_toeplitz_hash_calc),
749 TEST_CASE(test_toeplitz_hash_gfni),
750 TEST_CASE(test_toeplitz_hash_rand_data),
751 TEST_CASE(test_big_tuple_gfni),
752 TEST_CASE(test_create_invalid),
753 TEST_CASE(test_multiple_create),
754 TEST_CASE(test_free_null),
755 TEST_CASE(test_add_invalid_helper),
756 TEST_CASE(test_find_existing),
757 TEST_CASE(test_get_helper),
758 TEST_CASE(test_period_overflow),
759 TEST_CASE(test_predictable_rss_min_seq),
760 TEST_CASE(test_predictable_rss_multirange),
761 TEST_CASE(test_adjust_tuple),
769 return unit_test_suite_runner(&thash_tests);
772 REGISTER_TEST_COMMAND(thash_autotest, test_thash);