1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2010-2015 Intel Corporation.
18 //#include <rte_byteorder.h>
20 /* jhash.h: Jenkins hash support.
22 * Copyright (C) 2006 Bob Jenkins (bob_jenkins@burtleburtle.net)
24 * http://burtleburtle.net/bob/hash/
26 * These are the credits from Bob's sources:
28 * lookup3.c, by Bob Jenkins, May 2006, Public Domain.
30 * These are functions for producing 32-bit hashes for hash table lookup.
31 * hashword(), hashlittle(), hashlittle2(), hashbig(), mix(), and final()
32 * are externally useful functions. Routines to test the hash are included
33 * if SELF_TEST is defined. You can use this free for any purpose. It's in
34 * the public domain. It has no warranty.
39 #define rot(x, k) (((x) << (k)) | ((x) >> (32-(k))))
41 /** @internal Internal function. NOTE: Arguments are modified. */
42 #define __rte_jhash_mix(a, b, c) do { \
43 a -= c; a ^= rot(c, 4); c += b; \
44 b -= a; b ^= rot(a, 6); a += c; \
45 c -= b; c ^= rot(b, 8); b += a; \
46 a -= c; a ^= rot(c, 16); c += b; \
47 b -= a; b ^= rot(a, 19); a += c; \
48 c -= b; c ^= rot(b, 4); b += a; \
51 #define __rte_jhash_final(a, b, c) do { \
52 c ^= b; c -= rot(b, 14); \
53 a ^= c; a -= rot(c, 11); \
54 b ^= a; b -= rot(a, 25); \
55 c ^= b; c -= rot(b, 16); \
56 a ^= c; a -= rot(c, 4); \
57 b ^= a; b -= rot(a, 14); \
58 c ^= b; c -= rot(b, 24); \
61 /** The golden ratio: an arbitrary value. */
62 #define RTE_JHASH_GOLDEN_RATIO 0xdeadbeef
64 #if RTE_BYTE_ORDER == RTE_LITTLE_ENDIAN
65 #define BIT_SHIFT(x, y, k) (((x) >> (k)) | ((uint64_t)(y) << (32-(k))))
67 #define BIT_SHIFT(x, y, k) (((uint64_t)(x) << (k)) | ((y) >> (32-(k))))
70 #define LOWER8b_MASK rte_le_to_cpu_32(0xff)
71 #define LOWER16b_MASK rte_le_to_cpu_32(0xffff)
72 #define LOWER24b_MASK rte_le_to_cpu_32(0xffffff)
75 __rte_jhash_2hashes(const void *key, uint32_t length, uint32_t *pc,
76 uint32_t *pb, unsigned check_align)
80 /* Set up the internal state */
81 a = b = c = RTE_JHASH_GOLDEN_RATIO + ((uint32_t)length) + *pc;
85 * Check key alignment. For x86 architecture, first case is always optimal
86 * If check_align is not set, first case will be used
88 #if defined(RTE_ARCH_X86_64) || defined(RTE_ARCH_I686) || defined(RTE_ARCH_X86_X32)
89 const uint32_t *k = (const uint32_t *)key;
92 const uint32_t *k = (uint32_t *)((uintptr_t)key & (uintptr_t)~3);
93 const uint32_t s = ((uintptr_t)key & 3) * CHAR_BIT;
95 if (!check_align || s == 0) {
101 __rte_jhash_mix(a, b, c);
109 c += k[2]; b += k[1]; a += k[0]; break;
111 c += k[2] & LOWER24b_MASK; b += k[1]; a += k[0]; break;
113 c += k[2] & LOWER16b_MASK; b += k[1]; a += k[0]; break;
115 c += k[2] & LOWER8b_MASK; b += k[1]; a += k[0]; break;
117 b += k[1]; a += k[0]; break;
119 b += k[1] & LOWER24b_MASK; a += k[0]; break;
121 b += k[1] & LOWER16b_MASK; a += k[0]; break;
123 b += k[1] & LOWER8b_MASK; a += k[0]; break;
127 a += k[0] & LOWER24b_MASK; break;
129 a += k[0] & LOWER16b_MASK; break;
131 a += k[0] & LOWER8b_MASK; break;
132 /* zero length strings require no mixing */
139 /* all but the last block: affect some 32 bits of (a, b, c) */
140 while (length > 12) {
141 a += BIT_SHIFT(k[0], k[1], s);
142 b += BIT_SHIFT(k[1], k[2], s);
143 c += BIT_SHIFT(k[2], k[3], s);
144 __rte_jhash_mix(a, b, c);
150 /* last block: affect all 32 bits of (c) */
153 a += BIT_SHIFT(k[0], k[1], s);
154 b += BIT_SHIFT(k[1], k[2], s);
155 c += BIT_SHIFT(k[2], k[3], s);
158 a += BIT_SHIFT(k[0], k[1], s);
159 b += BIT_SHIFT(k[1], k[2], s);
160 c += BIT_SHIFT(k[2], k[3], s) & LOWER24b_MASK;
163 a += BIT_SHIFT(k[0], k[1], s);
164 b += BIT_SHIFT(k[1], k[2], s);
165 c += BIT_SHIFT(k[2], k[3], s) & LOWER16b_MASK;
168 a += BIT_SHIFT(k[0], k[1], s);
169 b += BIT_SHIFT(k[1], k[2], s);
170 c += BIT_SHIFT(k[2], k[3], s) & LOWER8b_MASK;
173 a += BIT_SHIFT(k[0], k[1], s);
174 b += BIT_SHIFT(k[1], k[2], s);
177 a += BIT_SHIFT(k[0], k[1], s);
178 b += BIT_SHIFT(k[1], k[2], s) & LOWER24b_MASK;
181 a += BIT_SHIFT(k[0], k[1], s);
182 b += BIT_SHIFT(k[1], k[2], s) & LOWER16b_MASK;
185 a += BIT_SHIFT(k[0], k[1], s);
186 b += BIT_SHIFT(k[1], k[2], s) & LOWER8b_MASK;
189 a += BIT_SHIFT(k[0], k[1], s);
192 a += BIT_SHIFT(k[0], k[1], s) & LOWER24b_MASK;
195 a += BIT_SHIFT(k[0], k[1], s) & LOWER16b_MASK;
198 a += BIT_SHIFT(k[0], k[1], s) & LOWER8b_MASK;
200 /* zero length strings require no mixing */
208 __rte_jhash_final(a, b, c);
215 * Same as rte_jhash, but takes two seeds and return two uint32_ts.
216 * pc and pb must be non-null, and *pc and *pb must both be initialized
217 * with seeds. If you pass in (*pb)=0, the output (*pc) will be
218 * the same as the return value from rte_jhash.
221 * Key to calculate hash of.
223 * Length of key in bytes.
225 * IN: seed OUT: primary hash value.
227 * IN: second seed OUT: secondary hash value.
230 rte_jhash_2hashes(const void *key, uint32_t length, uint32_t *pc, uint32_t *pb)
232 __rte_jhash_2hashes(key, length, pc, pb, 1);
236 * Same as rte_jhash_32b, but takes two seeds and return two uint32_ts.
237 * pc and pb must be non-null, and *pc and *pb must both be initialized
238 * with seeds. If you pass in (*pb)=0, the output (*pc) will be
239 * the same as the return value from rte_jhash_32b.
242 * Key to calculate hash of.
244 * Length of key in units of 4 bytes.
246 * IN: seed OUT: primary hash value.
248 * IN: second seed OUT: secondary hash value.
251 rte_jhash_32b_2hashes(const uint32_t *k, uint32_t length, uint32_t *pc, uint32_t *pb)
253 __rte_jhash_2hashes((const void *) k, (length << 2), pc, pb, 0);
257 * The most generic version, hashes an arbitrary sequence
258 * of bytes. No alignment or length assumptions are made about
259 * the input key. For keys not aligned to four byte boundaries
260 * or a multiple of four bytes in length, the memory region
261 * just after may be read (but not used in the computation).
262 * This may cross a page boundary.
265 * Key to calculate hash of.
267 * Length of key in bytes.
269 * Initialising value of hash.
271 * Calculated hash value.
273 static inline uint32_t
274 rte_jhash(const void *key, uint32_t length, uint32_t initval)
276 uint32_t initval2 = 0;
278 rte_jhash_2hashes(key, length, &initval, &initval2);
284 * A special optimized version that handles 1 or more of uint32_ts.
285 * The length parameter here is the number of uint32_ts in the key.
288 * Key to calculate hash of.
290 * Length of key in units of 4 bytes.
292 * Initialising value of hash.
294 * Calculated hash value.
296 static inline uint32_t
297 rte_jhash_32b(const uint32_t *k, uint32_t length, uint32_t initval)
299 uint32_t initval2 = 0;
301 rte_jhash_32b_2hashes(k, length, &initval, &initval2);
306 static inline uint32_t
307 __rte_jhash_3words(uint32_t a, uint32_t b, uint32_t c, uint32_t initval)
309 a += RTE_JHASH_GOLDEN_RATIO + initval;
310 b += RTE_JHASH_GOLDEN_RATIO + initval;
311 c += RTE_JHASH_GOLDEN_RATIO + initval;
313 __rte_jhash_final(a, b, c);
319 * A special ultra-optimized versions that knows it is hashing exactly
323 * First word to calculate hash of.
325 * Second word to calculate hash of.
327 * Third word to calculate hash of.
329 * Initialising value of hash.
331 * Calculated hash value.
333 static inline uint32_t
334 rte_jhash_3words(uint32_t a, uint32_t b, uint32_t c, uint32_t initval)
336 return __rte_jhash_3words(a + 12, b + 12, c + 12, initval);
340 * A special ultra-optimized versions that knows it is hashing exactly
344 * First word to calculate hash of.
346 * Second word to calculate hash of.
348 * Initialising value of hash.
350 * Calculated hash value.
352 static inline uint32_t
353 rte_jhash_2words(uint32_t a, uint32_t b, uint32_t initval)
355 return __rte_jhash_3words(a + 8, b + 8, 8, initval);
359 * A special ultra-optimized versions that knows it is hashing exactly
363 * Word to calculate hash of.
365 * Initialising value of hash.
367 * Calculated hash value.
369 static inline uint32_t
370 rte_jhash_1word(uint32_t a, uint32_t initval)
372 return __rte_jhash_3words(a + 4, 4, 4, initval);
379 #endif /* _RTE_JHASH_H */