line |
stmt |
bran |
cond |
sub |
pod |
time |
code |
1
|
|
|
|
|
|
|
#ifndef STADTX_HASH_H |
2
|
|
|
|
|
|
|
#define STADTX_HASH_H |
3
|
|
|
|
|
|
|
|
4
|
|
|
|
|
|
|
#ifndef DEBUG_STADTX_HASH |
5
|
|
|
|
|
|
|
#define DEBUG_STADTX_HASH 0 |
6
|
|
|
|
|
|
|
#endif |
7
|
|
|
|
|
|
|
|
8
|
|
|
|
|
|
|
#ifndef _ROTL_SIZED |
9
|
|
|
|
|
|
|
#define _ROTL_SIZED(x,r,s) ( ((x) << (r)) | ((x) >> ((s) - (r))) ) |
10
|
|
|
|
|
|
|
#endif |
11
|
|
|
|
|
|
|
#ifndef ROTL64 |
12
|
|
|
|
|
|
|
#define ROTL64(x,r) _ROTL_SIZED(x,r,64) |
13
|
|
|
|
|
|
|
#endif |
14
|
|
|
|
|
|
|
#ifndef _ROTR_SIZED |
15
|
|
|
|
|
|
|
#define _ROTR_SIZED(x,r,s) ( ((x) << ((s) - (r))) | ((x) >> (r)) ) |
16
|
|
|
|
|
|
|
#endif |
17
|
|
|
|
|
|
|
#ifndef ROTR64 |
18
|
|
|
|
|
|
|
#define ROTR64(x,r) _ROTR_SIZED(x,r,64) |
19
|
|
|
|
|
|
|
#endif |
20
|
|
|
|
|
|
|
|
21
|
|
|
|
|
|
|
#ifndef H_PERL |
22
|
|
|
|
|
|
|
|
23
|
|
|
|
|
|
|
#if !defined(U64) |
24
|
|
|
|
|
|
|
#include |
25
|
|
|
|
|
|
|
#define U64 uint64_t |
26
|
|
|
|
|
|
|
#endif |
27
|
|
|
|
|
|
|
|
28
|
|
|
|
|
|
|
#if !defined(U32) |
29
|
|
|
|
|
|
|
#define U32 uint32_t |
30
|
|
|
|
|
|
|
#endif |
31
|
|
|
|
|
|
|
|
32
|
|
|
|
|
|
|
#if !defined(U8) |
33
|
|
|
|
|
|
|
#define U8 unsigned char |
34
|
|
|
|
|
|
|
#endif |
35
|
|
|
|
|
|
|
|
36
|
|
|
|
|
|
|
#if !defined(U16) |
37
|
|
|
|
|
|
|
#define U16 uint16_t |
38
|
|
|
|
|
|
|
#endif |
39
|
|
|
|
|
|
|
|
40
|
|
|
|
|
|
|
#ifndef STRLEN |
41
|
|
|
|
|
|
|
#define STRLEN int |
42
|
|
|
|
|
|
|
#endif |
43
|
|
|
|
|
|
|
#else |
44
|
|
|
|
|
|
|
#define U64 U64TYPE |
45
|
|
|
|
|
|
|
#endif |
46
|
|
|
|
|
|
|
|
47
|
|
|
|
|
|
|
#ifndef STADTX_STATIC_INLINE |
48
|
|
|
|
|
|
|
#ifdef PERL_STATIC_INLINE |
49
|
|
|
|
|
|
|
#define STADTX_STATIC_INLINE PERL_STATIC_INLINE |
50
|
|
|
|
|
|
|
#else |
51
|
|
|
|
|
|
|
#define STADTX_STATIC_INLINE static inline |
52
|
|
|
|
|
|
|
#endif |
53
|
|
|
|
|
|
|
#endif |
54
|
|
|
|
|
|
|
|
55
|
|
|
|
|
|
|
#ifndef STMT_START |
56
|
|
|
|
|
|
|
#define STMT_START do |
57
|
|
|
|
|
|
|
#define STMT_END while(0) |
58
|
|
|
|
|
|
|
#endif |
59
|
|
|
|
|
|
|
|
60
|
|
|
|
|
|
|
#ifndef STADTX_UNALIGNED_AND_LITTLE_ENDIAN |
61
|
|
|
|
|
|
|
#define STADTX_UNALIGNED_AND_LITTLE_ENDIAN 1 |
62
|
|
|
|
|
|
|
#endif |
63
|
|
|
|
|
|
|
|
64
|
|
|
|
|
|
|
#if STADTX_ALLOW_UNALIGNED_AND_LITTLE_ENDIAN |
65
|
|
|
|
|
|
|
#ifndef U8TO64_LE |
66
|
|
|
|
|
|
|
#define U8TO64_LE(ptr) (*((const U64 *)(ptr))) |
67
|
|
|
|
|
|
|
#endif |
68
|
|
|
|
|
|
|
#ifndef U8TO32_LE |
69
|
|
|
|
|
|
|
#define U8TO32_LE(ptr) (*((const U32 *)(ptr))) |
70
|
|
|
|
|
|
|
#endif |
71
|
|
|
|
|
|
|
#ifndef U8TO16_LE |
72
|
|
|
|
|
|
|
#define U8TO16_LE(ptr) (*((const U16 *)(ptr))) |
73
|
|
|
|
|
|
|
#endif |
74
|
|
|
|
|
|
|
#else |
75
|
|
|
|
|
|
|
#ifndef U8TO64_LE |
76
|
|
|
|
|
|
|
#define U8TO64_LE(ptr) (\ |
77
|
|
|
|
|
|
|
(U64)(ptr)[7] << 56 | \ |
78
|
|
|
|
|
|
|
(U64)(ptr)[6] << 48 | \ |
79
|
|
|
|
|
|
|
(U64)(ptr)[5] << 40 | \ |
80
|
|
|
|
|
|
|
(U64)(ptr)[4] << 32 | \ |
81
|
|
|
|
|
|
|
(U64)(ptr)[3] << 24 | \ |
82
|
|
|
|
|
|
|
(U64)(ptr)[2] << 16 | \ |
83
|
|
|
|
|
|
|
(U64)(ptr)[1] << 8 | \ |
84
|
|
|
|
|
|
|
(U64)(ptr)[0] \ |
85
|
|
|
|
|
|
|
) |
86
|
|
|
|
|
|
|
#endif |
87
|
|
|
|
|
|
|
#ifndef U8TO32_LE |
88
|
|
|
|
|
|
|
#define U8TO32_LE(ptr) (\ |
89
|
|
|
|
|
|
|
(U32)(ptr)[3] << 24 | \ |
90
|
|
|
|
|
|
|
(U32)(ptr)[2] << 16 | \ |
91
|
|
|
|
|
|
|
(U32)(ptr)[1] << 8 | \ |
92
|
|
|
|
|
|
|
(U32)(ptr)[0] \ |
93
|
|
|
|
|
|
|
) |
94
|
|
|
|
|
|
|
#endif |
95
|
|
|
|
|
|
|
#ifndef U8TO16_LE |
96
|
|
|
|
|
|
|
#define U8TO16_LE(ptr) (\ |
97
|
|
|
|
|
|
|
(U16)(ptr)[1] << 8 | \ |
98
|
|
|
|
|
|
|
(U16)(ptr)[0] \ |
99
|
|
|
|
|
|
|
) |
100
|
|
|
|
|
|
|
#endif |
101
|
|
|
|
|
|
|
#endif |
102
|
|
|
|
|
|
|
|
103
|
|
|
|
|
|
|
/* do a marsaglia xor-shift permutation followed by a |
104
|
|
|
|
|
|
|
* multiply by a prime (presumably large) and another |
105
|
|
|
|
|
|
|
* marsaglia xor-shift permutation. |
106
|
|
|
|
|
|
|
* One of these thoroughly changes the bits of the input. |
107
|
|
|
|
|
|
|
* Two of these with different primes passes the Strict Avalanche Criteria |
108
|
|
|
|
|
|
|
* in all the tests I did. |
109
|
|
|
|
|
|
|
* |
110
|
|
|
|
|
|
|
* Note that v cannot end up zero after a scramble64 unless it |
111
|
|
|
|
|
|
|
* was zero in the first place. |
112
|
|
|
|
|
|
|
*/ |
113
|
|
|
|
|
|
|
#define STADTX_SCRAMBLE64(v,prime) STMT_START { \ |
114
|
|
|
|
|
|
|
v ^= (v >> 13); \ |
115
|
|
|
|
|
|
|
v ^= (v << 35); \ |
116
|
|
|
|
|
|
|
v ^= (v >> 30); \ |
117
|
|
|
|
|
|
|
v *= prime; \ |
118
|
|
|
|
|
|
|
v ^= (v >> 19); \ |
119
|
|
|
|
|
|
|
v ^= (v << 15); \ |
120
|
|
|
|
|
|
|
v ^= (v >> 46); \ |
121
|
|
|
|
|
|
|
} STMT_END |
122
|
|
|
|
|
|
|
|
123
|
|
|
|
|
|
|
|
124
|
91
|
|
|
|
|
|
STADTX_STATIC_INLINE void stadtx_seed_state ( |
125
|
|
|
|
|
|
|
const U8 *seed_ch, |
126
|
|
|
|
|
|
|
U8 *state_ch |
127
|
|
|
|
|
|
|
) { |
128
|
91
|
|
|
|
|
|
U64 *seed= (U64 *)seed_ch; |
129
|
91
|
|
|
|
|
|
U64 *state= (U64 *)state_ch; |
130
|
|
|
|
|
|
|
/* first we apply two masks to each word of the seed, this means that |
131
|
|
|
|
|
|
|
* a) at least one of state[0] and state[2] is nonzero, |
132
|
|
|
|
|
|
|
* b) at least one of state[1] and state[3] is nonzero |
133
|
|
|
|
|
|
|
* c) that state[0] and state[2] are different |
134
|
|
|
|
|
|
|
* d) that state[1] and state[3] are different |
135
|
|
|
|
|
|
|
* e) that the replacement value for any zero's is a totally different from the seed value. |
136
|
|
|
|
|
|
|
* (iow, if seed[0] is 0x43f6a8885a308d31UL then state[0] becomes 0, which is the replaced |
137
|
|
|
|
|
|
|
* with 1, which is totally different.). */ |
138
|
|
|
|
|
|
|
/* hex expansion of pi, skipping first two digits. pi= 3.2[43f6...]*/ |
139
|
|
|
|
|
|
|
/* pi value in hex from here: |
140
|
|
|
|
|
|
|
* http://turner.faculty.swau.edu/mathematics/materialslibrary/pi/pibases.html*/ |
141
|
91
|
|
|
|
|
|
state[0]= seed[0] ^ 0x43f6a8885a308d31UL; |
142
|
91
|
|
|
|
|
|
state[1]= seed[1] ^ 0x3198a2e03707344aUL; |
143
|
91
|
|
|
|
|
|
state[2]= seed[0] ^ 0x4093822299f31d00UL; |
144
|
91
|
|
|
|
|
|
state[3]= seed[1] ^ 0x82efa98ec4e6c894UL; |
145
|
91
|
50
|
|
|
|
|
if (!state[0]) state[0]=1; |
146
|
91
|
50
|
|
|
|
|
if (!state[1]) state[1]=2; |
147
|
91
|
50
|
|
|
|
|
if (!state[2]) state[2]=4; |
148
|
91
|
50
|
|
|
|
|
if (!state[3]) state[3]=8; |
149
|
|
|
|
|
|
|
/* and now for good measure we double scramble all four - |
150
|
|
|
|
|
|
|
* a double scramble guarantees a complete avalanche of all the |
151
|
|
|
|
|
|
|
* bits in the seed - IOW, by the time we are hashing the |
152
|
|
|
|
|
|
|
* four state vectors should be completely different and utterly |
153
|
|
|
|
|
|
|
* uncognizable from the input seed bits */ |
154
|
91
|
|
|
|
|
|
STADTX_SCRAMBLE64(state[0],0x801178846e899d17UL); |
155
|
91
|
|
|
|
|
|
STADTX_SCRAMBLE64(state[0],0xdd51e5d1c9a5a151UL); |
156
|
91
|
|
|
|
|
|
STADTX_SCRAMBLE64(state[1],0x93a7d6c8c62e4835UL); |
157
|
91
|
|
|
|
|
|
STADTX_SCRAMBLE64(state[1],0x803340f36895c2b5UL); |
158
|
91
|
|
|
|
|
|
STADTX_SCRAMBLE64(state[2],0xbea9344eb7565eebUL); |
159
|
91
|
|
|
|
|
|
STADTX_SCRAMBLE64(state[2],0xcd95d1e509b995cdUL); |
160
|
91
|
|
|
|
|
|
STADTX_SCRAMBLE64(state[3],0x9999791977e30c13UL); |
161
|
91
|
|
|
|
|
|
STADTX_SCRAMBLE64(state[3],0xaab8b6b05abfc6cdUL); |
162
|
91
|
|
|
|
|
|
} |
163
|
|
|
|
|
|
|
|
164
|
|
|
|
|
|
|
#define STADTX_K0_U64 0xb89b0f8e1655514fUL |
165
|
|
|
|
|
|
|
#define STADTX_K1_U64 0x8c6f736011bd5127UL |
166
|
|
|
|
|
|
|
#define STADTX_K2_U64 0x8f29bd94edce7b39UL |
167
|
|
|
|
|
|
|
#define STADTX_K3_U64 0x9c1b8e1e9628323fUL |
168
|
|
|
|
|
|
|
|
169
|
|
|
|
|
|
|
#define STADTX_K2_U32 0x802910e3 |
170
|
|
|
|
|
|
|
#define STADTX_K3_U32 0x819b13af |
171
|
|
|
|
|
|
|
#define STADTX_K4_U32 0x91cb27e5 |
172
|
|
|
|
|
|
|
#define STADTX_K5_U32 0xc1a269c1 |
173
|
|
|
|
|
|
|
|
174
|
2257003
|
|
|
|
|
|
STADTX_STATIC_INLINE U64 stadtx_hash_with_state( |
175
|
|
|
|
|
|
|
const U8 *state_ch, |
176
|
|
|
|
|
|
|
const U8 *key, |
177
|
|
|
|
|
|
|
const STRLEN key_len |
178
|
|
|
|
|
|
|
) { |
179
|
2257003
|
|
|
|
|
|
U64 *state= (U64 *)state_ch; |
180
|
2257003
|
|
|
|
|
|
U64 len = key_len; |
181
|
2257003
|
|
|
|
|
|
U64 v0= state[0] ^ ((key_len+1) * STADTX_K0_U64); |
182
|
2257003
|
|
|
|
|
|
U64 v1= state[1] ^ ((key_len+2) * STADTX_K1_U64); |
183
|
2257003
|
100
|
|
|
|
|
if (len < 32) { |
184
|
2256572
|
|
|
|
|
|
switch(len >> 3) { |
185
|
|
|
|
|
|
|
case 3: |
186
|
0
|
|
|
|
|
|
v0 += U8TO64_LE(key) * STADTX_K3_U64; |
187
|
0
|
|
|
|
|
|
v0= ROTR64(v0, 17) ^ v1; |
188
|
0
|
|
|
|
|
|
v1= ROTR64(v1, 53) + v0; |
189
|
0
|
|
|
|
|
|
key += 8; |
190
|
|
|
|
|
|
|
case 2: |
191
|
0
|
|
|
|
|
|
v0 += U8TO64_LE(key) * STADTX_K3_U64; |
192
|
0
|
|
|
|
|
|
v0= ROTR64(v0, 17) ^ v1; |
193
|
0
|
|
|
|
|
|
v1= ROTR64(v1, 53) + v0; |
194
|
0
|
|
|
|
|
|
key += 8; |
195
|
|
|
|
|
|
|
case 1: |
196
|
141
|
|
|
|
|
|
v0 += U8TO64_LE(key) * STADTX_K3_U64; |
197
|
141
|
|
|
|
|
|
v0= ROTR64(v0, 17) ^ v1; |
198
|
141
|
|
|
|
|
|
v1= ROTR64(v1, 53) + v0; |
199
|
141
|
|
|
|
|
|
key += 8; |
200
|
|
|
|
|
|
|
case 0: |
201
|
2256572
|
|
|
|
|
|
default: break; |
202
|
|
|
|
|
|
|
} |
203
|
2256572
|
|
|
|
|
|
switch ( len & 0x7 ) { |
204
|
3
|
|
|
|
|
|
case 7: v0 += (U64)key[6] << 32; |
205
|
9
|
|
|
|
|
|
case 6: v1 += (U64)key[5] << 48; |
206
|
1800054
|
|
|
|
|
|
case 5: v0 += (U64)key[4] << 16; |
207
|
2205102
|
|
|
|
|
|
case 4: v1 += (U64)U8TO32_LE(key); |
208
|
2205102
|
|
|
|
|
|
break; |
209
|
40779
|
|
|
|
|
|
case 3: v0 += (U64)key[2] << 48; |
210
|
49048
|
|
|
|
|
|
case 2: v1 += (U64)U8TO16_LE(key); |
211
|
49048
|
|
|
|
|
|
break; |
212
|
2422
|
|
|
|
|
|
case 1: v0 += (U64)key[0]; |
213
|
2422
|
|
|
|
|
|
case 0: v1 = ROTL64(v1, 32) ^ 0xFF; |
214
|
2422
|
|
|
|
|
|
break; |
215
|
|
|
|
|
|
|
} |
216
|
2256572
|
|
|
|
|
|
v1 ^= v0; |
217
|
2256572
|
|
|
|
|
|
v0 = ROTR64(v0,33) + v1; |
218
|
2256572
|
|
|
|
|
|
v1 = ROTL64(v1,17) ^ v0; |
219
|
2256572
|
|
|
|
|
|
v0 = ROTL64(v0,43) + v1; |
220
|
2256572
|
|
|
|
|
|
v1 = ROTL64(v1,31) - v0; |
221
|
2256572
|
|
|
|
|
|
v0 = ROTL64(v0,13) ^ v1; |
222
|
2256572
|
|
|
|
|
|
v1 -= v0; |
223
|
2256572
|
|
|
|
|
|
v0 = ROTL64(v0,41) + v1; |
224
|
2256572
|
|
|
|
|
|
v1 = ROTL64(v1,37) ^ v0; |
225
|
2256572
|
|
|
|
|
|
v0 = ROTR64(v0,39) + v1; |
226
|
2256572
|
|
|
|
|
|
v1 = ROTR64(v1,15) + v0; |
227
|
2256572
|
|
|
|
|
|
v0 = ROTL64(v0,15) ^ v1; |
228
|
2256572
|
|
|
|
|
|
v1 = ROTR64(v1, 5); |
229
|
2256572
|
|
|
|
|
|
return v0 ^ v1; |
230
|
|
|
|
|
|
|
} else { |
231
|
431
|
|
|
|
|
|
U64 v2= state[2] ^ ((key_len+3) * STADTX_K2_U64); |
232
|
431
|
|
|
|
|
|
U64 v3= state[3] ^ ((key_len+4) * STADTX_K3_U64); |
233
|
|
|
|
|
|
|
|
234
|
|
|
|
|
|
|
do { |
235
|
597774
|
|
|
|
|
|
v0 += (U64)U8TO64_LE(key+ 0) * STADTX_K2_U32; v0= ROTL64(v0,57) ^ v3; |
236
|
597774
|
|
|
|
|
|
v1 += (U64)U8TO64_LE(key+ 8) * STADTX_K3_U32; v1= ROTL64(v1,63) ^ v2; |
237
|
597774
|
|
|
|
|
|
v2 += (U64)U8TO64_LE(key+16) * STADTX_K4_U32; v2= ROTR64(v2,47) + v0; |
238
|
597774
|
|
|
|
|
|
v3 += (U64)U8TO64_LE(key+24) * STADTX_K5_U32; v3= ROTR64(v3,11) - v1; |
239
|
597774
|
|
|
|
|
|
key += 32; |
240
|
597774
|
|
|
|
|
|
len -= 32; |
241
|
597774
|
100
|
|
|
|
|
} while ( len >= 32 ); |
242
|
|
|
|
|
|
|
|
243
|
431
|
|
|
|
|
|
switch ( len >> 3 ) { |
244
|
0
|
|
|
|
|
|
case 3: v0 += ((U64)U8TO64_LE(key) * STADTX_K2_U32); key += 8; v0= ROTL64(v0,57) ^ v3; |
245
|
102
|
|
|
|
|
|
case 2: v1 += ((U64)U8TO64_LE(key) * STADTX_K3_U32); key += 8; v1= ROTL64(v1,63) ^ v2; |
246
|
102
|
|
|
|
|
|
case 1: v2 += ((U64)U8TO64_LE(key) * STADTX_K4_U32); key += 8; v2= ROTR64(v2,47) + v0; |
247
|
431
|
|
|
|
|
|
case 0: v3 = ROTR64(v3,11) - v1; |
248
|
|
|
|
|
|
|
} |
249
|
431
|
|
|
|
|
|
v0 ^= (len+1) * STADTX_K3_U64; |
250
|
431
|
|
|
|
|
|
switch ( len & 0x7 ) { |
251
|
0
|
|
|
|
|
|
case 7: v1 += (U64)key[6]; |
252
|
0
|
|
|
|
|
|
case 6: v2 += (U64)U8TO16_LE(key+4); |
253
|
0
|
|
|
|
|
|
v3 += (U64)U8TO32_LE(key); |
254
|
0
|
|
|
|
|
|
break; |
255
|
0
|
|
|
|
|
|
case 5: v1 += (U64)key[4]; |
256
|
0
|
|
|
|
|
|
case 4: v2 += (U64)U8TO32_LE(key); |
257
|
0
|
|
|
|
|
|
break; |
258
|
0
|
|
|
|
|
|
case 3: v3 += (U64)key[2]; |
259
|
0
|
|
|
|
|
|
case 2: v1 += (U64)U8TO16_LE(key); |
260
|
0
|
|
|
|
|
|
break; |
261
|
0
|
|
|
|
|
|
case 1: v2 += (U64)key[0]; |
262
|
431
|
|
|
|
|
|
case 0: v3 = ROTL64(v3, 32) ^ 0xFF; |
263
|
431
|
|
|
|
|
|
break; |
264
|
|
|
|
|
|
|
} |
265
|
|
|
|
|
|
|
|
266
|
431
|
|
|
|
|
|
v1 -= v2; |
267
|
431
|
|
|
|
|
|
v0 = ROTR64(v0,19); |
268
|
431
|
|
|
|
|
|
v1 -= v0; |
269
|
431
|
|
|
|
|
|
v1 = ROTR64(v1,53); |
270
|
431
|
|
|
|
|
|
v3 ^= v1; |
271
|
431
|
|
|
|
|
|
v0 -= v3; |
272
|
431
|
|
|
|
|
|
v3 = ROTL64(v3,43); |
273
|
431
|
|
|
|
|
|
v0 += v3; |
274
|
431
|
|
|
|
|
|
v0 = ROTR64(v0, 3); |
275
|
431
|
|
|
|
|
|
v3 -= v0; |
276
|
431
|
|
|
|
|
|
v2 = ROTR64(v2,43) - v3; |
277
|
431
|
|
|
|
|
|
v2 = ROTL64(v2,55) ^ v0; |
278
|
431
|
|
|
|
|
|
v1 -= v2; |
279
|
431
|
|
|
|
|
|
v3 = ROTR64(v3, 7) - v2; |
280
|
431
|
|
|
|
|
|
v2 = ROTR64(v2,31); |
281
|
431
|
|
|
|
|
|
v3 += v2; |
282
|
431
|
|
|
|
|
|
v2 -= v1; |
283
|
431
|
|
|
|
|
|
v3 = ROTR64(v3,39); |
284
|
431
|
|
|
|
|
|
v2 ^= v3; |
285
|
431
|
|
|
|
|
|
v3 = ROTR64(v3,17) ^ v2; |
286
|
431
|
|
|
|
|
|
v1 += v3; |
287
|
431
|
|
|
|
|
|
v1 = ROTR64(v1, 9); |
288
|
431
|
|
|
|
|
|
v2 ^= v1; |
289
|
431
|
|
|
|
|
|
v2 = ROTL64(v2,24); |
290
|
431
|
|
|
|
|
|
v3 ^= v2; |
291
|
431
|
|
|
|
|
|
v3 = ROTR64(v3,59); |
292
|
431
|
|
|
|
|
|
v0 = ROTR64(v0, 1) - v1; |
293
|
|
|
|
|
|
|
|
294
|
431
|
|
|
|
|
|
return v0 ^ v1 ^ v2 ^ v3; |
295
|
|
|
|
|
|
|
} |
296
|
|
|
|
|
|
|
} |
297
|
|
|
|
|
|
|
|
298
|
|
|
|
|
|
|
STADTX_STATIC_INLINE U64 stadtx_hash( |
299
|
|
|
|
|
|
|
const U8 *seed_ch, |
300
|
|
|
|
|
|
|
const U8 *key, |
301
|
|
|
|
|
|
|
const STRLEN key_len |
302
|
|
|
|
|
|
|
) { |
303
|
|
|
|
|
|
|
U64 state[4]; |
304
|
|
|
|
|
|
|
stadtx_seed_state(seed_ch,(U8*)state); |
305
|
|
|
|
|
|
|
return stadtx_hash_with_state((U8*)state,key,key_len); |
306
|
|
|
|
|
|
|
} |
307
|
|
|
|
|
|
|
|
308
|
|
|
|
|
|
|
#endif |