1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2010-2015 Intel Corporation.
22 #include <rte_config.h>
24 #include <rte_byteorder.h>
26 /* jhash.h: Jenkins hash support.
28 * Copyright (C) 2006 Bob Jenkins (bob_jenkins@burtleburtle.net)
30 * http://burtleburtle.net/bob/hash/
32 * These are the credits from Bob's sources:
34 * lookup3.c, by Bob Jenkins, May 2006, Public Domain.
36 * These are functions for producing 32-bit hashes for hash table lookup.
37 * hashword(), hashlittle(), hashlittle2(), hashbig(), mix(), and final()
38 * are externally useful functions. Routines to test the hash are included
39 * if SELF_TEST is defined. You can use this free for any purpose. It's in
40 * the public domain. It has no warranty.
45 #define rot(x, k) (((x) << (k)) | ((x) >> (32-(k))))
47 /** @internal Internal function. NOTE: Arguments are modified. */
48 #define __rte_jhash_mix(a, b, c) do { \
49 a -= c; a ^= rot(c, 4); c += b; \
50 b -= a; b ^= rot(a, 6); a += c; \
51 c -= b; c ^= rot(b, 8); b += a; \
52 a -= c; a ^= rot(c, 16); c += b; \
53 b -= a; b ^= rot(a, 19); a += c; \
54 c -= b; c ^= rot(b, 4); b += a; \
57 #define __rte_jhash_final(a, b, c) do { \
58 c ^= b; c -= rot(b, 14); \
59 a ^= c; a -= rot(c, 11); \
60 b ^= a; b -= rot(a, 25); \
61 c ^= b; c -= rot(b, 16); \
62 a ^= c; a -= rot(c, 4); \
63 b ^= a; b -= rot(a, 14); \
64 c ^= b; c -= rot(b, 24); \
67 /** The golden ratio: an arbitrary value. */
68 #define RTE_JHASH_GOLDEN_RATIO 0xdeadbeef
70 #if RTE_BYTE_ORDER == RTE_LITTLE_ENDIAN
71 #define BIT_SHIFT(x, y, k) (((x) >> (k)) | ((uint64_t)(y) << (32-(k))))
73 #define BIT_SHIFT(x, y, k) (((uint64_t)(x) << (k)) | ((y) >> (32-(k))))
76 #define LOWER8b_MASK rte_le_to_cpu_32(0xff)
77 #define LOWER16b_MASK rte_le_to_cpu_32(0xffff)
78 #define LOWER24b_MASK rte_le_to_cpu_32(0xffffff)
81 __rte_jhash_2hashes(const void *key, uint32_t length, uint32_t *pc,
82 uint32_t *pb, unsigned check_align)
86 /* Set up the internal state */
87 a = b = c = RTE_JHASH_GOLDEN_RATIO + ((uint32_t)length) + *pc;
91 * Check key alignment. For x86 architecture, first case is always optimal
92 * If check_align is not set, first case will be used
94 #if defined(RTE_ARCH_X86)
95 const uint32_t *k = (const uint32_t *)key;
98 const uint32_t *k = (uint32_t *)((uintptr_t)key & (uintptr_t)~3);
99 const uint32_t s = ((uintptr_t)key & 3) * CHAR_BIT;
101 if (!check_align || s == 0) {
102 while (length > 12) {
107 __rte_jhash_mix(a, b, c);
115 c += k[2]; b += k[1]; a += k[0]; break;
117 c += k[2] & LOWER24b_MASK; b += k[1]; a += k[0]; break;
119 c += k[2] & LOWER16b_MASK; b += k[1]; a += k[0]; break;
121 c += k[2] & LOWER8b_MASK; b += k[1]; a += k[0]; break;
123 b += k[1]; a += k[0]; break;
125 b += k[1] & LOWER24b_MASK; a += k[0]; break;
127 b += k[1] & LOWER16b_MASK; a += k[0]; break;
129 b += k[1] & LOWER8b_MASK; a += k[0]; break;
133 a += k[0] & LOWER24b_MASK; break;
135 a += k[0] & LOWER16b_MASK; break;
137 a += k[0] & LOWER8b_MASK; break;
138 /* zero length strings require no mixing */
145 /* all but the last block: affect some 32 bits of (a, b, c) */
146 while (length > 12) {
147 a += BIT_SHIFT(k[0], k[1], s);
148 b += BIT_SHIFT(k[1], k[2], s);
149 c += BIT_SHIFT(k[2], k[3], s);
150 __rte_jhash_mix(a, b, c);
156 /* last block: affect all 32 bits of (c) */
159 a += BIT_SHIFT(k[0], k[1], s);
160 b += BIT_SHIFT(k[1], k[2], s);
161 c += BIT_SHIFT(k[2], k[3], s);
164 a += BIT_SHIFT(k[0], k[1], s);
165 b += BIT_SHIFT(k[1], k[2], s);
166 c += BIT_SHIFT(k[2], k[3], s) & LOWER24b_MASK;
169 a += BIT_SHIFT(k[0], k[1], s);
170 b += BIT_SHIFT(k[1], k[2], s);
171 c += BIT_SHIFT(k[2], k[3], s) & LOWER16b_MASK;
174 a += BIT_SHIFT(k[0], k[1], s);
175 b += BIT_SHIFT(k[1], k[2], s);
176 c += BIT_SHIFT(k[2], k[3], s) & LOWER8b_MASK;
179 a += BIT_SHIFT(k[0], k[1], s);
180 b += BIT_SHIFT(k[1], k[2], s);
183 a += BIT_SHIFT(k[0], k[1], s);
184 b += BIT_SHIFT(k[1], k[2], s) & LOWER24b_MASK;
187 a += BIT_SHIFT(k[0], k[1], s);
188 b += BIT_SHIFT(k[1], k[2], s) & LOWER16b_MASK;
191 a += BIT_SHIFT(k[0], k[1], s);
192 b += BIT_SHIFT(k[1], k[2], s) & LOWER8b_MASK;
195 a += BIT_SHIFT(k[0], k[1], s);
198 a += BIT_SHIFT(k[0], k[1], s) & LOWER24b_MASK;
201 a += BIT_SHIFT(k[0], k[1], s) & LOWER16b_MASK;
204 a += BIT_SHIFT(k[0], k[1], s) & LOWER8b_MASK;
206 /* zero length strings require no mixing */
214 __rte_jhash_final(a, b, c);
221 * Same as rte_jhash, but takes two seeds and return two uint32_ts.
222 * pc and pb must be non-null, and *pc and *pb must both be initialized
223 * with seeds. If you pass in (*pb)=0, the output (*pc) will be
224 * the same as the return value from rte_jhash.
227 * Key to calculate hash of.
229 * Length of key in bytes.
231 * IN: seed OUT: primary hash value.
233 * IN: second seed OUT: secondary hash value.
236 rte_jhash_2hashes(const void *key, uint32_t length, uint32_t *pc, uint32_t *pb)
238 __rte_jhash_2hashes(key, length, pc, pb, 1);
242 * Same as rte_jhash_32b, but takes two seeds and return two uint32_ts.
243 * pc and pb must be non-null, and *pc and *pb must both be initialized
244 * with seeds. If you pass in (*pb)=0, the output (*pc) will be
245 * the same as the return value from rte_jhash_32b.
248 * Key to calculate hash of.
250 * Length of key in units of 4 bytes.
252 * IN: seed OUT: primary hash value.
254 * IN: second seed OUT: secondary hash value.
257 rte_jhash_32b_2hashes(const uint32_t *k, uint32_t length, uint32_t *pc, uint32_t *pb)
259 __rte_jhash_2hashes((const void *) k, (length << 2), pc, pb, 0);
263 * The most generic version, hashes an arbitrary sequence
264 * of bytes. No alignment or length assumptions are made about
265 * the input key. For keys not aligned to four byte boundaries
266 * or a multiple of four bytes in length, the memory region
267 * just after may be read (but not used in the computation).
268 * This may cross a page boundary.
271 * Key to calculate hash of.
273 * Length of key in bytes.
275 * Initialising value of hash.
277 * Calculated hash value.
279 static inline uint32_t
280 rte_jhash(const void *key, uint32_t length, uint32_t initval)
282 uint32_t initval2 = 0;
284 rte_jhash_2hashes(key, length, &initval, &initval2);
290 * A special optimized version that handles 1 or more of uint32_ts.
291 * The length parameter here is the number of uint32_ts in the key.
294 * Key to calculate hash of.
296 * Length of key in units of 4 bytes.
298 * Initialising value of hash.
300 * Calculated hash value.
302 static inline uint32_t
303 rte_jhash_32b(const uint32_t *k, uint32_t length, uint32_t initval)
305 uint32_t initval2 = 0;
307 rte_jhash_32b_2hashes(k, length, &initval, &initval2);
312 static inline uint32_t
313 __rte_jhash_3words(uint32_t a, uint32_t b, uint32_t c, uint32_t initval)
315 a += RTE_JHASH_GOLDEN_RATIO + initval;
316 b += RTE_JHASH_GOLDEN_RATIO + initval;
317 c += RTE_JHASH_GOLDEN_RATIO + initval;
319 __rte_jhash_final(a, b, c);
325 * A special ultra-optimized versions that knows it is hashing exactly
329 * First word to calculate hash of.
331 * Second word to calculate hash of.
333 * Third word to calculate hash of.
335 * Initialising value of hash.
337 * Calculated hash value.
339 static inline uint32_t
340 rte_jhash_3words(uint32_t a, uint32_t b, uint32_t c, uint32_t initval)
342 return __rte_jhash_3words(a + 12, b + 12, c + 12, initval);
346 * A special ultra-optimized versions that knows it is hashing exactly
350 * First word to calculate hash of.
352 * Second word to calculate hash of.
354 * Initialising value of hash.
356 * Calculated hash value.
358 static inline uint32_t
359 rte_jhash_2words(uint32_t a, uint32_t b, uint32_t initval)
361 return __rte_jhash_3words(a + 8, b + 8, 8, initval);
365 * A special ultra-optimized versions that knows it is hashing exactly
369 * Word to calculate hash of.
371 * Initialising value of hash.
373 * Calculated hash value.
375 static inline uint32_t
376 rte_jhash_1word(uint32_t a, uint32_t initval)
378 return __rte_jhash_3words(a + 4, 4, 4, initval);
385 #endif /* _RTE_JHASH_H */