1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
|
#ifndef DEBUG_ZAPHOD32_HASH
#define DEBUG_ZAPHOD32_HASH 0
#if DEBUG_ZAPHOD32_HASH == 1
#include <stdio.h>
#define ZAPHOD32_WARN6(pat,v0,v1,v2,v3,v4,v5) printf(pat, v0, v1, v2, v3, v4, v5)
#define ZAPHOD32_WARN5(pat,v0,v1,v2,v3,v4) printf(pat, v0, v1, v2, v3, v4)
#define ZAPHOD32_WARN4(pat,v0,v1,v2,v3) printf(pat, v0, v1, v2, v3)
#define ZAPHOD32_WARN3(pat,v0,v1,v2) printf(pat, v0, v1, v2)
#define ZAPHOD32_WARN2(pat,v0,v1) printf(pat, v0, v1)
#define NOTE3(pat,v0,v1,v2) printf(pat, v0, v1, v2)
#elif DEBUG_ZAPHOD32_HASH == 2
#define ZAPHOD32_WARN6(pat,v0,v1,v2,v3,v4,v5)
#define ZAPHOD32_WARN5(pat,v0,v1,v2,v3,v4)
#define ZAPHOD32_WARN4(pat,v0,v1,v2,v3)
#define ZAPHOD32_WARN3(pat,v0,v1,v2)
#define ZAPHOD32_WARN2(pat,v0,v1)
#define NOTE3(pat,v0,v1,v2) printf(pat, v0, v1, v2)
#else
#define ZAPHOD32_WARN6(pat,v0,v1,v2,v3,v4,v5)
#define ZAPHOD32_WARN5(pat,v0,v1,v2,v3,v4)
#define ZAPHOD32_WARN4(pat,v0,v1,v2,v3)
#define ZAPHOD32_WARN3(pat,v0,v1,v2)
#define NOTE3(pat,v0,v1,v2)
#define ZAPHOD32_WARN2(pat,v0,v1)
#endif
/* Find best way to ROTL32/ROTL64 */
#ifndef ROTL32
#if defined(_MSC_VER)
#include <stdlib.h> /* Microsoft put _rotl declaration in here */
#define ROTL32(x,r) _rotl(x,r)
#define ROTR32(x,r) _rotr(x,r)
#else
/* gcc recognises this code and generates a rotate instruction for CPUs with one */
#define ROTL32(x,r) (((U32)(x) << (r)) | ((U32)(x) >> (32 - (r))))
#define ROTR32(x,r) (((U32)(x) << (32 - (r))) | ((U32)(x) >> (r)))
#endif
#endif
#ifndef PERL_SEEN_HV_FUNC_H
#if !defined(U64)
#include <stdint.h>
#define U64 uint64_t
#endif
#if !defined(U32)
#define U32 uint32_t
#endif
#if !defined(U8)
#define U8 unsigned char
#endif
#if !defined(U16)
#define U16 uint16_t
#endif
#ifndef STRLEN
#define STRLEN int
#endif
#endif
#ifndef ZAPHOD32_STATIC_INLINE
#ifdef PERL_STATIC_INLINE
#define ZAPHOD32_STATIC_INLINE PERL_STATIC_INLINE
#else
#define ZAPHOD32_STATIC_INLINE static inline
#endif
#endif
#ifndef STMT_START
#define STMT_START do
#define STMT_END while(0)
#endif
#ifndef ZAPHOD32_ALLOW_UNALIGNED_AND_LITTLE_ENDIAN
/* ZAPHOD32_ALLOW_UNALIGNED_AND_LITTLE_ENDIAN only matters if nothing has defined U8TO64_LE etc,
* and when built with Perl these should be defined before this file is loaded.
*/
#ifdef U32_ALIGNMENT_REQUIRED
#define ZAPHOD32_ALLOW_UNALIGNED_AND_LITTLE_ENDIAN 0
#else
#define ZAPHOD32_ALLOW_UNALIGNED_AND_LITTLE_ENDIAN 1
#endif
#endif
#ifndef U8TO32_LE
#if ZAPHOD32_ALLOW_UNALIGNED_AND_LITTLE_ENDIAN
#define U8TO32_LE(ptr) (*((const U32 *)(ptr)))
#else
#define U8TO32_LE(ptr) (\
(U32)(ptr)[3] << 24 | \
(U32)(ptr)[2] << 16 | \
(U32)(ptr)[1] << 8 | \
(U32)(ptr)[0] \
)
#endif
#endif
#ifndef U8TO16_LE
#if ZAPHOD32_ALLOW_UNALIGNED_AND_LITTLE_ENDIAN
#define U8TO16_LE(ptr) (*((const U16 *)(ptr)))
#else
#define U8TO16_LE(ptr) (\
(U16)(ptr)[1] << 8 | \
(U16)(ptr)[0] \
)
#endif
#endif
/* This is two marsaglia xor-shift permutes, with a prime-multiple
* sandwiched inside. The end result of doing this twice with different
* primes is a completely avalanched v. */
#define ZAPHOD32_SCRAMBLE32(v,prime) STMT_START { \
v ^= (v>>9); \
v ^= (v<<21); \
v ^= (v>>16); \
v *= prime; \
v ^= (v>>17); \
v ^= (v<<15); \
v ^= (v>>23); \
} STMT_END
#define ZAPHOD32_FINALIZE(v0,v1,v2) STMT_START { \
ZAPHOD32_WARN3("v0=%08x v1=%08x v2=%08x - ZAPHOD32 FINALIZE\n", \
(unsigned int)v0, (unsigned int)v1, (unsigned int)v2); \
v2 += v0; \
v1 -= v2; \
v1 = ROTL32(v1, 6); \
v2 ^= v1; \
v2 = ROTL32(v2, 28); \
v1 ^= v2; \
v0 += v1; \
v1 = ROTL32(v1, 24); \
v2 += v1; \
v2 = ROTL32(v2, 18) + v1; \
v0 ^= v2; \
v0 = ROTL32(v0, 20); \
v2 += v0; \
v1 ^= v2; \
v0 += v1; \
v0 = ROTL32(v0, 5); \
v2 += v0; \
v2 = ROTL32(v2, 22); \
v0 -= v1; \
v1 -= v2; \
v1 = ROTL32(v1, 17); \
} STMT_END
#define ZAPHOD32_MIX(v0,v1,v2,text) STMT_START { \
ZAPHOD32_WARN4("v0=%08x v1=%08x v2=%08x - ZAPHOD32 %s MIX\n", \
(unsigned int)v0,(unsigned int)v1,(unsigned int)v2, text ); \
v0 = ROTL32(v0,16) - v2; \
v1 = ROTR32(v1,13) ^ v2; \
v2 = ROTL32(v2,17) + v1; \
v0 = ROTR32(v0, 2) + v1; \
v1 = ROTR32(v1,17) - v0; \
v2 = ROTR32(v2, 7) ^ v0; \
} STMT_END
ZAPHOD32_STATIC_INLINE
void zaphod32_seed_state (
const U8 *seed_ch,
U8 *state_ch
) {
const U32 *seed= (const U32 *)seed_ch;
U32 *state= (U32 *)state_ch;
/* hex expansion of pi, skipping first two digits. pi= 3.2[43f6...]*/
/* pi value in hex from here:
* http://turner.faculty.swau.edu/mathematics/materialslibrary/pi/pibases.html*/
/* Ensure that the three state vectors are nonzero regardless of the seed. */
/* The idea of these two steps is to ensure that the 0 state comes from a seed
* utterly unlike that of the value we replace it with.*/
state[0]= seed[0] ^ 0x43f6a888;
state[1]= seed[1] ^ 0x5a308d31;
state[2]= seed[2] ^ 0x3198a2e0;
if (!state[0]) state[0] = 1;
if (!state[1]) state[1] = 2;
if (!state[2]) state[2] = 4;
/* these are pseduo-randomly selected primes between 2**31 and 2**32
* (I generated a big list and then randomly chose some from the list) */
ZAPHOD32_SCRAMBLE32(state[0],0x9fade23b);
ZAPHOD32_SCRAMBLE32(state[1],0xaa6f908d);
ZAPHOD32_SCRAMBLE32(state[2],0xcdf6b72d);
/* now that we have scrambled we do some mixing to avalanche the
* state bits to gether */
ZAPHOD32_MIX(state[0],state[1],state[2],"ZAPHOD32 SEED-STATE A 1/4");
ZAPHOD32_MIX(state[0],state[1],state[2],"ZAPHOD32 SEED-STATE A 2/4");
ZAPHOD32_MIX(state[0],state[1],state[2],"ZAPHOD32 SEED-STATE A 3/4");
ZAPHOD32_MIX(state[0],state[1],state[2],"ZAPHOD32 SEED-STATE A 4/4");
/* and then scramble them again with different primes */
ZAPHOD32_SCRAMBLE32(state[0],0xc95d22a9);
ZAPHOD32_SCRAMBLE32(state[1],0x8497242b);
ZAPHOD32_SCRAMBLE32(state[2],0x9c5cc4e9);
/* and a thorough final mix */
ZAPHOD32_MIX(state[0],state[1],state[2],"ZAPHOD32 SEED-STATE B 1/5");
ZAPHOD32_MIX(state[0],state[1],state[2],"ZAPHOD32 SEED-STATE B 2/5");
ZAPHOD32_MIX(state[0],state[1],state[2],"ZAPHOD32 SEED-STATE B 3/5");
ZAPHOD32_MIX(state[0],state[1],state[2],"ZAPHOD32 SEED-STATE B 4/5");
ZAPHOD32_MIX(state[0],state[1],state[2],"ZAPHOD32 SEED-STATE B 5/5");
}
ZAPHOD32_STATIC_INLINE
U32 zaphod32_hash_with_state(
const U8 *state_ch,
const U8 *key,
const STRLEN key_len
) {
U32 *state= (U32 *)state_ch;
const U8 *end;
STRLEN len = key_len;
U32 v0= state[0];
U32 v1= state[1];
U32 v2= state[2] ^ (0xC41A7AB1 * (key_len + 1));
ZAPHOD32_WARN4("v0=%08x v1=%08x v2=%08x ln=%08x HASH START\n",
(unsigned int)state[0], (unsigned int)state[1],
(unsigned int)state[2], (unsigned int)key_len);
{
switch (len) {
default: goto zaphod32_read8;
case 12: v2 += (U32)key[11] << 24; /* FALLTHROUGH */
case 11: v2 += (U32)key[10] << 16; /* FALLTHROUGH */
case 10: v2 += (U32)U8TO16_LE(key+8);
v1 -= U8TO32_LE(key+4);
v0 += U8TO32_LE(key+0);
goto zaphod32_finalize;
case 9: v2 += (U32)key[8]; /* FALLTHROUGH */
case 8: v1 -= U8TO32_LE(key+4);
v0 += U8TO32_LE(key+0);
goto zaphod32_finalize;
case 7: v2 += (U32)key[6]; /* FALLTHROUGH */
case 6: v0 += (U32)U8TO16_LE(key+4);
v1 -= U8TO32_LE(key+0);
goto zaphod32_finalize;
case 5: v0 += (U32)key[4]; /* FALLTHROUGH */
case 4: v1 -= U8TO32_LE(key+0);
goto zaphod32_finalize;
case 3: v2 += (U32)key[2]; /* FALLTHROUGH */
case 2: v0 += (U32)U8TO16_LE(key);
break;
case 1: v0 += (U32)key[0];
break;
case 0: v2 ^= 0xFF;
break;
}
v0 -= v2;
v2 = ROTL32(v2, 8) ^ v0;
v0 = ROTR32(v0,16) + v2;
v2 += v0;
v0 += v0 >> 9;
v0 += v2;
v2 ^= v0;
v2 += v2 << 4;
v0 -= v2;
v2 = ROTR32(v2, 8) ^ v0;
v0 = ROTL32(v0,16) ^ v2;
v2 = ROTL32(v2,10) + v0;
v0 = ROTR32(v0,30) + v2;
v2 = ROTR32(v2,12);
return v0 ^ v2;
}
/* if (len >= 8) */ /* this block is only reached by a goto above, so this condition
is commented out, but if the above block is removed it would
be necessary to use this. */
{
zaphod32_read8:
len = key_len & 0x7;
end = key + key_len - len;
do {
v1 -= U8TO32_LE(key+0);
v0 += U8TO32_LE(key+4);
ZAPHOD32_MIX(v0,v1,v2,"MIX 2-WORDS A");
key += 8;
} while ( key < end );
}
if ( len >= 4 ) {
v1 -= U8TO32_LE(key);
key += 4;
}
v0 += (U32)(key_len) << 24;
switch (len & 0x3) {
case 3: v2 += (U32)key[2]; /* FALLTHROUGH */
case 2: v0 += (U32)U8TO16_LE(key);
break;
case 1: v0 += (U32)key[0];
break;
case 0: v2 ^= 0xFF;
break;
}
zaphod32_finalize:
ZAPHOD32_FINALIZE(v0,v1,v2);
ZAPHOD32_WARN4("v0=%08x v1=%08x v2=%08x hh=%08x - FINAL\n\n",
(unsigned int)v0, (unsigned int)v1, (unsigned int)v2,
(unsigned int)v0 ^ v1 ^ v2);
return v0 ^ v1 ^ v2;
}
ZAPHOD32_STATIC_INLINE U32 zaphod32_hash(
const U8 *seed_ch,
const U8 *key,
const STRLEN key_len
) {
U32 state[3];
zaphod32_seed_state(seed_ch,(U8*)state);
return zaphod32_hash_with_state((U8*)state,key,key_len);
}
#endif
|