4 * Copyright(c) 2010-2015 Intel Corporation. All rights reserved.
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
11 * * Redistributions of source code must retain the above copyright
12 * notice, this list of conditions and the following disclaimer.
13 * * Redistributions in binary form must reproduce the above copyright
14 * notice, this list of conditions and the following disclaimer in
15 * the documentation and/or other materials provided with the
17 * * Neither the name of Intel Corporation nor the names of its
18 * contributors may be used to endorse or promote products derived
19 * from this software without specific prior written permission.
21 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
22 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
23 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
24 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
25 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
26 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
27 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
28 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
29 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
30 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
31 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
52 #include <rte_byteorder.h>
54 /* jhash.h: Jenkins hash support.
56 * Copyright (C) 2006 Bob Jenkins (bob_jenkins@burtleburtle.net)
58 * http://burtleburtle.net/bob/hash/
60 * These are the credits from Bob's sources:
62 * lookup3.c, by Bob Jenkins, May 2006, Public Domain.
64 * These are functions for producing 32-bit hashes for hash table lookup.
65 * hashword(), hashlittle(), hashlittle2(), hashbig(), mix(), and final()
66 * are externally useful functions. Routines to test the hash are included
67 * if SELF_TEST is defined. You can use this free for any purpose. It's in
68 * the public domain. It has no warranty.
73 #define rot(x, k) (((x) << (k)) | ((x) >> (32-(k))))
75 /** @internal Internal function. NOTE: Arguments are modified. */
76 #define __rte_jhash_mix(a, b, c) do { \
77 a -= c; a ^= rot(c, 4); c += b; \
78 b -= a; b ^= rot(a, 6); a += c; \
79 c -= b; c ^= rot(b, 8); b += a; \
80 a -= c; a ^= rot(c, 16); c += b; \
81 b -= a; b ^= rot(a, 19); a += c; \
82 c -= b; c ^= rot(b, 4); b += a; \
85 #define __rte_jhash_final(a, b, c) do { \
86 c ^= b; c -= rot(b, 14); \
87 a ^= c; a -= rot(c, 11); \
88 b ^= a; b -= rot(a, 25); \
89 c ^= b; c -= rot(b, 16); \
90 a ^= c; a -= rot(c, 4); \
91 b ^= a; b -= rot(a, 14); \
92 c ^= b; c -= rot(b, 24); \
95 /** The golden ratio: an arbitrary value. */
96 #define RTE_JHASH_GOLDEN_RATIO 0xdeadbeef
98 #if RTE_BYTE_ORDER == RTE_LITTLE_ENDIAN
99 #define BIT_SHIFT(x, y, k) (((x) >> (k)) | ((uint64_t)(y) << (32-(k))))
101 #define BIT_SHIFT(x, y, k) (((uint64_t)(x) << (k)) | ((y) >> (32-(k))))
104 #define LOWER8b_MASK rte_le_to_cpu_32(0xff)
105 #define LOWER16b_MASK rte_le_to_cpu_32(0xffff)
106 #define LOWER24b_MASK rte_le_to_cpu_32(0xffffff)
109 __rte_jhash_2hashes(const void *key, uint32_t length, uint32_t *pc,
110 uint32_t *pb, unsigned check_align)
114 /* Set up the internal state */
115 a = b = c = RTE_JHASH_GOLDEN_RATIO + ((uint32_t)length) + *pc;
119 * Check key alignment. For x86 architecture, first case is always optimal
120 * If check_align is not set, first case will be used
122 #if defined(RTE_ARCH_X86_64) || defined(RTE_ARCH_I686) || defined(RTE_ARCH_X86_X32)
123 const uint32_t *k = (const uint32_t *)key;
124 const uint32_t s = 0;
126 const uint32_t *k = (uint32_t *)((uintptr_t)key & (uintptr_t)~3);
127 const uint32_t s = ((uintptr_t)key & 3) * CHAR_BIT;
129 if (!check_align || s == 0) {
130 while (length > 12) {
135 __rte_jhash_mix(a, b, c);
143 c += k[2]; b += k[1]; a += k[0]; break;
145 c += k[2] & LOWER24b_MASK; b += k[1]; a += k[0]; break;
147 c += k[2] & LOWER16b_MASK; b += k[1]; a += k[0]; break;
149 c += k[2] & LOWER8b_MASK; b += k[1]; a += k[0]; break;
151 b += k[1]; a += k[0]; break;
153 b += k[1] & LOWER24b_MASK; a += k[0]; break;
155 b += k[1] & LOWER16b_MASK; a += k[0]; break;
157 b += k[1] & LOWER8b_MASK; a += k[0]; break;
161 a += k[0] & LOWER24b_MASK; break;
163 a += k[0] & LOWER16b_MASK; break;
165 a += k[0] & LOWER8b_MASK; break;
166 /* zero length strings require no mixing */
173 /* all but the last block: affect some 32 bits of (a, b, c) */
174 while (length > 12) {
175 a += BIT_SHIFT(k[0], k[1], s);
176 b += BIT_SHIFT(k[1], k[2], s);
177 c += BIT_SHIFT(k[2], k[3], s);
178 __rte_jhash_mix(a, b, c);
184 /* last block: affect all 32 bits of (c) */
187 a += BIT_SHIFT(k[0], k[1], s);
188 b += BIT_SHIFT(k[1], k[2], s);
189 c += BIT_SHIFT(k[2], k[3], s);
192 a += BIT_SHIFT(k[0], k[1], s);
193 b += BIT_SHIFT(k[1], k[2], s);
194 c += BIT_SHIFT(k[2], k[3], s) & LOWER24b_MASK;
197 a += BIT_SHIFT(k[0], k[1], s);
198 b += BIT_SHIFT(k[1], k[2], s);
199 c += BIT_SHIFT(k[2], k[3], s) & LOWER16b_MASK;
202 a += BIT_SHIFT(k[0], k[1], s);
203 b += BIT_SHIFT(k[1], k[2], s);
204 c += BIT_SHIFT(k[2], k[3], s) & LOWER8b_MASK;
207 a += BIT_SHIFT(k[0], k[1], s);
208 b += BIT_SHIFT(k[1], k[2], s);
211 a += BIT_SHIFT(k[0], k[1], s);
212 b += BIT_SHIFT(k[1], k[2], s) & LOWER24b_MASK;
215 a += BIT_SHIFT(k[0], k[1], s);
216 b += BIT_SHIFT(k[1], k[2], s) & LOWER16b_MASK;
219 a += BIT_SHIFT(k[0], k[1], s);
220 b += BIT_SHIFT(k[1], k[2], s) & LOWER8b_MASK;
223 a += BIT_SHIFT(k[0], k[1], s);
226 a += BIT_SHIFT(k[0], k[1], s) & LOWER24b_MASK;
229 a += BIT_SHIFT(k[0], k[1], s) & LOWER16b_MASK;
232 a += BIT_SHIFT(k[0], k[1], s) & LOWER8b_MASK;
234 /* zero length strings require no mixing */
242 __rte_jhash_final(a, b, c);
249 * Same as rte_jhash, but takes two seeds and return two uint32_ts.
250 * pc and pb must be non-null, and *pc and *pb must both be initialized
251 * with seeds. If you pass in (*pb)=0, the output (*pc) will be
252 * the same as the return value from rte_jhash.
255 * Key to calculate hash of.
257 * Length of key in bytes.
259 * IN: seed OUT: primary hash value.
261 * IN: second seed OUT: secondary hash value.
264 rte_jhash_2hashes(const void *key, uint32_t length, uint32_t *pc, uint32_t *pb)
266 __rte_jhash_2hashes(key, length, pc, pb, 1);
270 * Same as rte_jhash_32b, but takes two seeds and return two uint32_ts.
271 * pc and pb must be non-null, and *pc and *pb must both be initialized
272 * with seeds. If you pass in (*pb)=0, the output (*pc) will be
273 * the same as the return value from rte_jhash_32b.
276 * Key to calculate hash of.
278 * Length of key in units of 4 bytes.
280 * IN: seed OUT: primary hash value.
282 * IN: second seed OUT: secondary hash value.
285 rte_jhash_32b_2hashes(const uint32_t *k, uint32_t length, uint32_t *pc, uint32_t *pb)
287 __rte_jhash_2hashes((const void *) k, (length << 2), pc, pb, 0);
291 * The most generic version, hashes an arbitrary sequence
292 * of bytes. No alignment or length assumptions are made about
296 * Key to calculate hash of.
298 * Length of key in bytes.
300 * Initialising value of hash.
302 * Calculated hash value.
304 static inline uint32_t
305 rte_jhash(const void *key, uint32_t length, uint32_t initval)
307 uint32_t initval2 = 0;
309 rte_jhash_2hashes(key, length, &initval, &initval2);
315 * A special optimized version that handles 1 or more of uint32_ts.
316 * The length parameter here is the number of uint32_ts in the key.
319 * Key to calculate hash of.
321 * Length of key in units of 4 bytes.
323 * Initialising value of hash.
325 * Calculated hash value.
327 static inline uint32_t
328 rte_jhash_32b(const uint32_t *k, uint32_t length, uint32_t initval)
330 uint32_t initval2 = 0;
332 rte_jhash_32b_2hashes(k, length, &initval, &initval2);
337 static inline uint32_t
338 __rte_jhash_3words(uint32_t a, uint32_t b, uint32_t c, uint32_t initval)
340 a += RTE_JHASH_GOLDEN_RATIO + initval;
341 b += RTE_JHASH_GOLDEN_RATIO + initval;
342 c += RTE_JHASH_GOLDEN_RATIO + initval;
344 __rte_jhash_final(a, b, c);
350 * A special ultra-optimized versions that knows it is hashing exactly
354 * First word to calculate hash of.
356 * Second word to calculate hash of.
358 * Third word to calculate hash of.
360 * Initialising value of hash.
362 * Calculated hash value.
364 static inline uint32_t
365 rte_jhash_3words(uint32_t a, uint32_t b, uint32_t c, uint32_t initval)
367 return __rte_jhash_3words(a + 12, b + 12, c + 12, initval);
371 * A special ultra-optimized versions that knows it is hashing exactly
375 * First word to calculate hash of.
377 * Second word to calculate hash of.
379 * Initialising value of hash.
381 * Calculated hash value.
383 static inline uint32_t
384 rte_jhash_2words(uint32_t a, uint32_t b, uint32_t initval)
386 return __rte_jhash_3words(a + 8, b + 8, 8, initval);
390 * A special ultra-optimized versions that knows it is hashing exactly
394 * Word to calculate hash of.
396 * Initialising value of hash.
398 * Calculated hash value.
400 static inline uint32_t
401 rte_jhash_1word(uint32_t a, uint32_t initval)
403 return __rte_jhash_3words(a + 4, 4, 4, initval);
410 #endif /* _RTE_JHASH_H */