4 * Copyright(c) 2010-2015 Intel Corporation. All rights reserved.
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
11 * * Redistributions of source code must retain the above copyright
12 * notice, this list of conditions and the following disclaimer.
13 * * Redistributions in binary form must reproduce the above copyright
14 * notice, this list of conditions and the following disclaimer in
15 * the documentation and/or other materials provided with the
17 * * Neither the name of Intel Corporation nor the names of its
18 * contributors may be used to endorse or promote products derived
19 * from this software without specific prior written permission.
21 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
22 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
23 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
24 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
25 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
26 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
27 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
28 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
29 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
30 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
31 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
51 #include <rte_byteorder.h>
53 /* jhash.h: Jenkins hash support.
55 * Copyright (C) 2006 Bob Jenkins (bob_jenkins@burtleburtle.net)
57 * http://burtleburtle.net/bob/hash/
59 * These are the credits from Bob's sources:
61 * lookup3.c, by Bob Jenkins, May 2006, Public Domain.
63 * These are functions for producing 32-bit hashes for hash table lookup.
64 * hashword(), hashlittle(), hashlittle2(), hashbig(), mix(), and final()
65 * are externally useful functions. Routines to test the hash are included
66 * if SELF_TEST is defined. You can use this free for any purpose. It's in
67 * the public domain. It has no warranty.
72 #define rot(x, k) (((x) << (k)) | ((x) >> (32-(k))))
74 /** @internal Internal function. NOTE: Arguments are modified. */
75 #define __rte_jhash_mix(a, b, c) do { \
76 a -= c; a ^= rot(c, 4); c += b; \
77 b -= a; b ^= rot(a, 6); a += c; \
78 c -= b; c ^= rot(b, 8); b += a; \
79 a -= c; a ^= rot(c, 16); c += b; \
80 b -= a; b ^= rot(a, 19); a += c; \
81 c -= b; c ^= rot(b, 4); b += a; \
84 #define __rte_jhash_final(a, b, c) do { \
85 c ^= b; c -= rot(b, 14); \
86 a ^= c; a -= rot(c, 11); \
87 b ^= a; b -= rot(a, 25); \
88 c ^= b; c -= rot(b, 16); \
89 a ^= c; a -= rot(c, 4); \
90 b ^= a; b -= rot(a, 14); \
91 c ^= b; c -= rot(b, 24); \
94 /** The golden ratio: an arbitrary value. */
95 #define RTE_JHASH_GOLDEN_RATIO 0xdeadbeef
97 #if RTE_BYTE_ORDER == RTE_LITTLE_ENDIAN
98 #define BIT_SHIFT(x, y, k) (((x) >> (k)) | ((uint64_t)(y) << (32-(k))))
100 #define BIT_SHIFT(x, y, k) (((uint64_t)(x) << (k)) | ((y) >> (32-(k))))
103 #define LOWER8b_MASK rte_le_to_cpu_32(0xff)
104 #define LOWER16b_MASK rte_le_to_cpu_32(0xffff)
105 #define LOWER24b_MASK rte_le_to_cpu_32(0xffffff)
108 __rte_jhash_2hashes(const void *key, uint32_t length, uint32_t *pc,
109 uint32_t *pb, unsigned check_align)
113 /* Set up the internal state */
114 a = b = c = RTE_JHASH_GOLDEN_RATIO + ((uint32_t)length) + *pc;
118 * Check key alignment. For x86 architecture, first case is always optimal
119 * If check_align is not set, first case will be used
121 #if defined(RTE_ARCH_X86_64) || defined(RTE_ARCH_I686) || defined(RTE_ARCH_X86_X32)
122 const uint32_t *k = key;
123 const uint32_t s = 0;
125 const uint32_t *k = (uint32_t *)(uintptr_t)key & (uintptr_t)~3);
126 const uint32_t s = ((uintptr_t)key & 3) * CHAR_BIT;
128 if (!check_align || s == 0) {
129 while (length > 12) {
134 __rte_jhash_mix(a, b, c);
142 c += k[2]; b += k[1]; a += k[0]; break;
144 c += k[2] & LOWER24b_MASK; b += k[1]; a += k[0]; break;
146 c += k[2] & LOWER16b_MASK; b += k[1]; a += k[0]; break;
148 c += k[2] & LOWER8b_MASK; b += k[1]; a += k[0]; break;
150 b += k[1]; a += k[0]; break;
152 b += k[1] & LOWER24b_MASK; a += k[0]; break;
154 b += k[1] & LOWER16b_MASK; a += k[0]; break;
156 b += k[1] & LOWER8b_MASK; a += k[0]; break;
160 a += k[0] & LOWER24b_MASK; break;
162 a += k[0] & LOWER16b_MASK; break;
164 a += k[0] & LOWER8b_MASK; break;
165 /* zero length strings require no mixing */
172 /* all but the last block: affect some 32 bits of (a, b, c) */
173 while (length > 12) {
174 a += BIT_SHIFT(k[0], k[1], s);
175 b += BIT_SHIFT(k[1], k[2], s);
176 c += BIT_SHIFT(k[2], k[3], s);
177 __rte_jhash_mix(a, b, c);
183 /* last block: affect all 32 bits of (c) */
186 a += BIT_SHIFT(k[0], k[1], s);
187 b += BIT_SHIFT(k[1], k[2], s);
188 c += BIT_SHIFT(k[2], k[3], s);
191 a += BIT_SHIFT(k[0], k[1], s);
192 b += BIT_SHIFT(k[1], k[2], s);
193 c += BIT_SHIFT(k[2], k[3], s) & LOWER24b_MASK;
196 a += BIT_SHIFT(k[0], k[1], s);
197 b += BIT_SHIFT(k[1], k[2], s);
198 c += BIT_SHIFT(k[2], k[3], s) & LOWER16b_MASK;
201 a += BIT_SHIFT(k[0], k[1], s);
202 b += BIT_SHIFT(k[1], k[2], s);
203 c += BIT_SHIFT(k[2], k[3], s) & LOWER8b_MASK;
206 a += BIT_SHIFT(k[0], k[1], s);
207 b += BIT_SHIFT(k[1], k[2], s);
210 a += BIT_SHIFT(k[0], k[1], s);
211 b += BIT_SHIFT(k[1], k[2], s) & LOWER24b_MASK;
214 a += BIT_SHIFT(k[0], k[1], s);
215 b += BIT_SHIFT(k[1], k[2], s) & LOWER16b_MASK;
218 a += BIT_SHIFT(k[0], k[1], s);
219 b += BIT_SHIFT(k[1], k[2], s) & LOWER8b_MASK;
222 a += BIT_SHIFT(k[0], k[1], s);
225 a += BIT_SHIFT(k[0], k[1], s) & LOWER24b_MASK;
228 a += BIT_SHIFT(k[0], k[1], s) & LOWER16b_MASK;
231 a += BIT_SHIFT(k[0], k[1], s) & LOWER8b_MASK;
233 /* zero length strings require no mixing */
241 __rte_jhash_final(a, b, c);
248 * Same as rte_jhash, but takes two seeds and return two uint32_ts.
249 * pc and pb must be non-null, and *pc and *pb must both be initialized
250 * with seeds. If you pass in (*pb)=0, the output (*pc) will be
251 * the same as the return value from rte_jhash.
254 * Key to calculate hash of.
256 * Length of key in bytes.
258 * IN: seed OUT: primary hash value.
260 * IN: second seed OUT: secondary hash value.
263 rte_jhash_2hashes(const void *key, uint32_t length, uint32_t *pc, uint32_t *pb)
265 __rte_jhash_2hashes(key, length, pc, pb, 1);
269 * Same as rte_jhash2, but takes two seeds and return two uint32_ts.
270 * pc and pb must be non-null, and *pc and *pb must both be initialized
271 * with seeds. If you pass in (*pb)=0, the output (*pc) will be
272 * the same as the return value from rte_jhash2.
275 * Key to calculate hash of.
277 * Length of key in units of 4 bytes.
279 * IN: seed OUT: primary hash value.
281 * IN: second seed OUT: secondary hash value.
284 rte_jhash_32b_2hashes(const uint32_t *k, uint32_t length, uint32_t *pc, uint32_t *pb)
286 __rte_jhash_2hashes((const void *) k, (length << 2), pc, pb, 0);
290 * The most generic version, hashes an arbitrary sequence
291 * of bytes. No alignment or length assumptions are made about
295 * Key to calculate hash of.
297 * Length of key in bytes.
299 * Initialising value of hash.
301 * Calculated hash value.
303 static inline uint32_t
304 rte_jhash(const void *key, uint32_t length, uint32_t initval)
306 uint32_t initval2 = 0;
308 rte_jhash_2hashes(key, length, &initval, &initval2);
314 * A special optimized version that handles 1 or more of uint32_ts.
315 * The length parameter here is the number of uint32_ts in the key.
318 * Key to calculate hash of.
320 * Length of key in units of 4 bytes.
322 * Initialising value of hash.
324 * Calculated hash value.
326 static inline uint32_t
327 rte_jhash_32b(const uint32_t *k, uint32_t length, uint32_t initval)
329 uint32_t initval2 = 0;
331 rte_jhash_32b_2hashes(k, length, &initval, &initval2);
336 static inline uint32_t
337 __attribute__ ((deprecated))
338 rte_jhash2(const uint32_t *k, uint32_t length, uint32_t initval)
340 uint32_t initval2 = 0;
342 rte_jhash_32b_2hashes(k, length, &initval, &initval2);
347 static inline uint32_t
348 __rte_jhash_3words(uint32_t a, uint32_t b, uint32_t c, uint32_t initval)
350 a += RTE_JHASH_GOLDEN_RATIO + initval;
351 b += RTE_JHASH_GOLDEN_RATIO + initval;
352 c += RTE_JHASH_GOLDEN_RATIO + initval;
354 __rte_jhash_final(a, b, c);
360 * A special ultra-optimized versions that knows it is hashing exactly
364 * First word to calculate hash of.
366 * Second word to calculate hash of.
368 * Third word to calculate hash of.
370 * Initialising value of hash.
372 * Calculated hash value.
374 static inline uint32_t
375 rte_jhash_3words(uint32_t a, uint32_t b, uint32_t c, uint32_t initval)
377 return __rte_jhash_3words(a + 12, b + 12, c + 12, initval);
381 * A special ultra-optimized versions that knows it is hashing exactly
385 * First word to calculate hash of.
387 * Second word to calculate hash of.
389 * Initialising value of hash.
391 * Calculated hash value.
393 static inline uint32_t
394 rte_jhash_2words(uint32_t a, uint32_t b, uint32_t initval)
396 return __rte_jhash_3words(a + 8, b + 8, 8, initval);
400 * A special ultra-optimized versions that knows it is hashing exactly
404 * Word to calculate hash of.
406 * Initialising value of hash.
408 * Calculated hash value.
410 static inline uint32_t
411 rte_jhash_1word(uint32_t a, uint32_t initval)
413 return __rte_jhash_3words(a + 4, 4, 4, initval);
420 #endif /* _RTE_JHASH_H */