23812ce246d6c0fb4c8c77bc384d46dfcf44bfd9
1 // SPDX-FileCopyrightText: 2009-2012 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
3 // SPDX-License-Identifier: MIT
8 #if defined(__FreeBSD__)
9 #include <sys/endian.h>
13 * Example hash function.
18 * Source: http://burtleburtle.net/bob/c/lookup3.c
19 * Originally Public Domain
22 #define rot(x, k) (((x) << (k)) | ((x) >> (32 - (k))))
24 #define mix(a, b, c) \
26 a -= c; a ^= rot(c, 4); c += b; \
27 b -= a; b ^= rot(a, 6); a += c; \
28 c -= b; c ^= rot(b, 8); b += a; \
29 a -= c; a ^= rot(c, 16); c += b; \
30 b -= a; b ^= rot(a, 19); a += c; \
31 c -= b; c ^= rot(b, 4); b += a; \
34 #define final(a, b, c) \
36 c ^= b; c -= rot(b, 14); \
37 a ^= c; a -= rot(c, 11); \
38 b ^= a; b -= rot(a, 25); \
39 c ^= b; c -= rot(b, 16); \
40 a ^= c; a -= rot(c, 4); \
41 b ^= a; b -= rot(a, 14); \
42 c ^= b; c -= rot(b, 24); \
45 #if (BYTE_ORDER == LITTLE_ENDIAN)
46 #define HASH_LITTLE_ENDIAN 1
48 #define HASH_LITTLE_ENDIAN 0
53 * hashlittle() -- hash a variable-length key into a 32-bit value
54 * k : the key (the unaligned variable-length array of bytes)
55 * length : the length of the key, counting by bytes
56 * initval : can be any 4-byte value
57 * Returns a 32-bit value. Every bit of the key affects every bit of
58 * the return value. Two keys differing by one or two bits will have
59 * totally different hash values.
61 * The best hash table sizes are powers of 2. There is no need to do
62 * mod a prime (mod is sooo slow!). If you need less than 32 bits,
63 * use a bitmask. For example, if you need only 10 bits, do
64 * h = (h & hashmask(10));
65 * In which case, the hash table should have hashsize(10) elements.
67 * If you are hashing n strings (uint8_t **)k, do it like this:
68 * for (i = 0, h = 0; i < n; ++i) h = hashlittle(k[i], len[i], h);
70 * By Bob Jenkins, 2006. bob_jenkins@burtleburtle.net. You may use this
71 * code any way you wish, private, educational, or commercial. It's free.
73 * Use for hash table lookup, or anything where one collision in 2^^32 is
74 * acceptable. Do NOT use for cryptographic purposes.
77 uint32_t hashlittle(const void *key
, size_t length
, uint32_t initval
)
79 uint32_t a
, b
, c
; /* internal state */
85 /* Set up the internal state */
86 a
= b
= c
= 0xdeadbeef + ((uint32_t)length
) + initval
;
89 if (HASH_LITTLE_ENDIAN
&& ((u
.i
& 0x3) == 0)) {
90 const uint32_t *k
= (const uint32_t *) key
; /* read 32-bit chunks */
92 /*------ all but last block: aligned reads and affect 32 bits of (a,b,c) */
102 /*----------------------------- handle the last (probably partial) block */
104 * "k[2]&0xffffff" actually reads beyond the end of the string, but
105 * then masks off the part it's not allowed to read. Because the
106 * string is aligned, the masked-off tail is in the same word as the
107 * rest of the string. Every machine with memory protection I've seen
108 * does it on word boundaries, so is OK with this. But VALGRIND will
109 * still catch it and complain. The masking trick does make the hash
110 * noticeably faster for short strings (like English words).
115 case 12: c
+=k
[2]; b
+=k
[1]; a
+=k
[0]; break;
116 case 11: c
+=k
[2]&0xffffff; b
+=k
[1]; a
+=k
[0]; break;
117 case 10: c
+=k
[2]&0xffff; b
+=k
[1]; a
+=k
[0]; break;
118 case 9 : c
+=k
[2]&0xff; b
+=k
[1]; a
+=k
[0]; break;
119 case 8 : b
+=k
[1]; a
+=k
[0]; break;
120 case 7 : b
+=k
[1]&0xffffff; a
+=k
[0]; break;
121 case 6 : b
+=k
[1]&0xffff; a
+=k
[0]; break;
122 case 5 : b
+=k
[1]&0xff; a
+=k
[0]; break;
123 case 4 : a
+=k
[0]; break;
124 case 3 : a
+=k
[0]&0xffffff; break;
125 case 2 : a
+=k
[0]&0xffff; break;
126 case 1 : a
+=k
[0]&0xff; break;
127 case 0 : return c
; /* zero length strings require no mixing */
130 #else /* make valgrind happy */
134 k8
= (const uint8_t *) k
;
136 case 12: c
+=k
[2]; b
+=k
[1]; a
+=k
[0]; break;
137 case 11: c
+=((uint32_t) k8
[10])<<16; /* fall through */
138 case 10: c
+=((uint32_t) k8
[9])<<8; /* fall through */
139 case 9 : c
+=k8
[8]; /* fall through */
140 case 8 : b
+=k
[1]; a
+=k
[0]; break;
141 case 7 : b
+=((uint32_t) k8
[6])<<16; /* fall through */
142 case 6 : b
+=((uint32_t) k8
[5])<<8; /* fall through */
143 case 5 : b
+=k8
[4]; /* fall through */
144 case 4 : a
+=k
[0]; break;
145 case 3 : a
+=((uint32_t) k8
[2])<<16; /* fall through */
146 case 2 : a
+=((uint32_t) k8
[1])<<8; /* fall through */
147 case 1 : a
+=k8
[0]; break;
151 #endif /* !valgrind */
153 } else if (HASH_LITTLE_ENDIAN
&& ((u
.i
& 0x1) == 0)) {
154 const uint16_t *k
= (const uint16_t *) key
; /* read 16-bit chunks */
157 /*--------------- all but last block: aligned reads and different mixing */
160 a
+= k
[0] + (((uint32_t) k
[1])<<16);
161 b
+= k
[2] + (((uint32_t) k
[3])<<16);
162 c
+= k
[4] + (((uint32_t) k
[5])<<16);
168 /*----------------------------- handle the last (probably partial) block */
169 k8
= (const uint8_t *) k
;
172 case 12: c
+=k
[4]+(((uint32_t) k
[5])<<16);
173 b
+=k
[2]+(((uint32_t) k
[3])<<16);
174 a
+=k
[0]+(((uint32_t) k
[1])<<16);
176 case 11: c
+=((uint32_t) k8
[10])<<16; /* fall through */
178 b
+=k
[2]+(((uint32_t) k
[3])<<16);
179 a
+=k
[0]+(((uint32_t) k
[1])<<16);
181 case 9 : c
+=k8
[8]; /* fall through */
182 case 8 : b
+=k
[2]+(((uint32_t) k
[3])<<16);
183 a
+=k
[0]+(((uint32_t) k
[1])<<16);
185 case 7 : b
+=((uint32_t) k8
[6])<<16; /* fall through */
187 a
+=k
[0]+(((uint32_t) k
[1])<<16);
189 case 5 : b
+=k8
[4]; /* fall through */
190 case 4 : a
+=k
[0]+(((uint32_t) k
[1])<<16);
192 case 3 : a
+=((uint32_t) k8
[2])<<16; /* fall through */
197 case 0 : return c
; /* zero length requires no mixing */
200 } else { /* need to read the key one byte at a time */
201 const uint8_t *k
= (const uint8_t *)key
;
203 /*--------------- all but the last block: affect some 32 bits of (a, b, c) */
204 while (length
> 12) {
206 a
+= ((uint32_t) k
[1])<<8;
207 a
+= ((uint32_t) k
[2])<<16;
208 a
+= ((uint32_t) k
[3])<<24;
210 b
+= ((uint32_t) k
[5])<<8;
211 b
+= ((uint32_t) k
[6])<<16;
212 b
+= ((uint32_t) k
[7])<<24;
214 c
+= ((uint32_t) k
[9])<<8;
215 c
+= ((uint32_t) k
[10])<<16;
216 c
+= ((uint32_t) k
[11])<<24;
222 /*-------------------------------- last block: affect all 32 bits of (c) */
223 switch (length
) { /* all the case statements fall through */
224 case 12: c
+=((uint32_t) k
[11])<<24; /* fall through */
225 case 11: c
+=((uint32_t) k
[10])<<16; /* fall through */
226 case 10: c
+=((uint32_t) k
[9])<<8; /* fall through */
227 case 9 : c
+=k
[8]; /* fall through */
228 case 8 : b
+=((uint32_t) k
[7])<<24; /* fall through */
229 case 7 : b
+=((uint32_t) k
[6])<<16; /* fall through */
230 case 6 : b
+=((uint32_t) k
[5])<<8; /* fall through */
231 case 5 : b
+=k
[4]; /* fall through */
232 case 4 : a
+=((uint32_t) k
[3])<<24; /* fall through */
233 case 3 : a
+=((uint32_t) k
[2])<<16; /* fall through */
234 case 2 : a
+=((uint32_t) k
[1])<<8; /* fall through */
246 uint32_t jhash(const void *key
, size_t length
, uint32_t seed
)
248 return hashlittle(key
, length
, seed
);
251 #endif /* _JHASH_H */
This page took 0.037329 seconds and 4 git commands to generate.