4#ifndef SIP_HASH_STREAMING
5 #define SIP_HASH_STREAMING 1
8#if defined(__MINGW32__)
12 #define __LITTLE_ENDIAN LITTLE_ENDIAN
13 #define __BIG_ENDIAN BIG_ENDIAN
15 #define BYTE_ORDER __LITTLE_ENDIAN
16#elif !defined(BYTE_ORDER)
21#define LITTLE_ENDIAN __LITTLE_ENDIAN
24#define BIG_ENDIAN __BIG_ENDIAN
27#if BYTE_ORDER == LITTLE_ENDIAN
30#elif BYTE_ORDER == BIG_ENDIAN
34 #error "Only strictly little or big endian supported"
37#ifndef UNALIGNED_WORD_ACCESS
38# if defined(__i386) || defined(__i386__) || defined(_M_IX86) || \
39 defined(__x86_64) || defined(__x86_64__) || defined(_M_AMD64) || \
40 defined(__powerpc64__) || defined(__aarch64__) || \
42# define UNALIGNED_WORD_ACCESS 1
45#ifndef UNALIGNED_WORD_ACCESS
46# define UNALIGNED_WORD_ACCESS 0
50 (((uint32_t)((p)[0]) ) | ((uint32_t)((p)[1]) << 8) | \
51 ((uint32_t)((p)[2]) << 16) | ((uint32_t)((p)[3]) << 24)) \
53#define U32TO8_LE(p, v) \
55 (p)[0] = (uint8_t)((v) ); \
56 (p)[1] = (uint8_t)((v) >> 8); \
57 (p)[2] = (uint8_t)((v) >> 16); \
58 (p)[3] = (uint8_t)((v) >> 24); \
63 ((uint64_t)U8TO32_LE(p) | ((uint64_t)U8TO32_LE((p) + 4)) << 32 )
65#define U64TO8_LE(p, v) \
67 U32TO8_LE((p), (uint32_t)((v) )); \
68 U32TO8_LE((p) + 4, (uint32_t)((v) >> 32)); \
72 ((v) << (s)) | ((v) >> (64 - (s)))
74#define ROTL64_TO(v, s) ((v) = ROTL64((v), (s)))
76#define ADD64_TO(v, s) ((v) += (s))
77#define XOR64_TO(v, s) ((v) ^= (s))
78#define XOR64_INT(v, x) ((v) ^= (x))
80#define U8TO64_LE(p) u8to64_le(p)
82u8to64_le(
const uint8_t *p)
85 ret.lo = U8TO32_LE(p);
86 ret.hi = U8TO32_LE(p + 4);
90#define U64TO8_LE(p, v) u64to8_le(p, v)
92u64to8_le(uint8_t *p, uint64_t v)
95 U32TO8_LE(p + 4, v.hi);
98#define ROTL64_TO(v, s) ((s) > 32 ? rotl64_swap(rotl64_to(&(v), (s) - 32)) : \
99 (s) == 32 ? rotl64_swap(&(v)) : rotl64_to(&(v), (s)))
100static inline uint64_t *
101rotl64_to(uint64_t *v,
unsigned int s)
103 uint32_t uhi = (v->hi << s) | (v->lo >> (32 - s));
104 uint32_t ulo = (v->lo << s) | (v->hi >> (32 - s));
110static inline uint64_t *
111rotl64_swap(uint64_t *v)
119#define ADD64_TO(v, s) add64_to(&(v), (s))
120static inline uint64_t *
121add64_to(uint64_t *v,
const uint64_t s)
125 if (v->lo < s.lo) v->hi++;
129#define XOR64_TO(v, s) xor64_to(&(v), (s))
130static inline uint64_t *
131xor64_to(uint64_t *v,
const uint64_t s)
138#define XOR64_INT(v, x) ((v).lo ^= (x))
144} sip_init_state_bin = {
"uespemos""modnarod""arenegyl""setybdet"};
145#define sip_init_state sip_init_state_bin.u64
147#if SIP_HASH_STREAMING
148struct sip_interface_st {
149 void (*init)(
sip_state *s,
const uint8_t *key);
150 void (*update)(
sip_state *s,
const uint8_t *data,
size_t len);
151 void (*
final)(
sip_state *s, uint64_t *digest);
154static void int_sip_init(
sip_state *state,
const uint8_t *key);
155static void int_sip_update(
sip_state *state,
const uint8_t *data,
size_t len);
156static void int_sip_final(
sip_state *state, uint64_t *digest);
158static const sip_interface sip_methods = {
165#define SIP_COMPRESS(v0, v1, v2, v3) \
167 ADD64_TO((v0), (v1)); \
168 ADD64_TO((v2), (v3)); \
169 ROTL64_TO((v1), 13); \
170 ROTL64_TO((v3), 16); \
171 XOR64_TO((v1), (v0)); \
172 XOR64_TO((v3), (v2)); \
173 ROTL64_TO((v0), 32); \
174 ADD64_TO((v2), (v1)); \
175 ADD64_TO((v0), (v3)); \
176 ROTL64_TO((v1), 17); \
177 ROTL64_TO((v3), 21); \
178 XOR64_TO((v1), (v2)); \
179 XOR64_TO((v3), (v0)); \
180 ROTL64_TO((v2), 32); \
183#if SIP_HASH_STREAMING
189 for (v = 0; v < 4; v++) {
191 printf(
"v%d: %" PRIx64
"\n", v, state->v[v]);
193 printf(
"v%d: %" PRIx32
"%.8" PRIx32
"\n", v, state->v[v].hi, state->v[v].lo);
199int_sip_init(
sip_state *state,
const uint8_t key[16])
204 k1 = U8TO64_LE(key +
sizeof(uint64_t));
206 state->v[0] = k0; XOR64_TO(state->v[0], sip_init_state[0]);
207 state->v[1] = k1; XOR64_TO(state->v[1], sip_init_state[1]);
208 state->v[2] = k0; XOR64_TO(state->v[2], sip_init_state[2]);
209 state->v[3] = k1; XOR64_TO(state->v[3], sip_init_state[3]);
217 for (i = 0; i < n; i++) {
218 SIP_COMPRESS(state->v[0], state->v[1], state->v[2], state->v[3]);
223int_sip_update_block(
sip_state *state, uint64_t m)
225 XOR64_TO(state->v[3], m);
226 int_sip_round(state, state->c);
227 XOR64_TO(state->v[0], m);
231int_sip_pre_update(
sip_state *state,
const uint8_t **pdata,
size_t *plen)
236 if (!state->buflen)
return;
238 to_read =
sizeof(uint64_t) - state->buflen;
239 memcpy(state->buf + state->buflen, *pdata, to_read);
240 m = U8TO64_LE(state->buf);
241 int_sip_update_block(state, m);
248int_sip_post_update(
sip_state *state,
const uint8_t *data,
size_t len)
250 uint8_t r = len %
sizeof(uint64_t);
252 memcpy(state->buf, data + len - r, r);
258int_sip_update(
sip_state *state,
const uint8_t *data,
size_t len)
263 state->msglen_byte = state->msglen_byte + (len % 256);
264 data64 = (uint64_t *) data;
266 int_sip_pre_update(state, &data, &len);
268 end = data64 + (len /
sizeof(uint64_t));
270#if BYTE_ORDER == LITTLE_ENDIAN
271 while (data64 != end) {
272 int_sip_update_block(state, *data64++);
274#elif BYTE_ORDER == BIG_ENDIAN
277 uint8_t *data8 = data;
278 for (; data8 != (uint8_t *) end; data8 +=
sizeof(uint64_t)) {
279 m = U8TO64_LE(data8);
280 int_sip_update_block(state, m);
285 int_sip_post_update(state, data, len);
293 for (i = state->buflen; i <
sizeof(uint64_t); i++) {
294 state->buf[i] = 0x00;
296 state->buf[
sizeof(uint64_t) - 1] = state->msglen_byte;
300int_sip_final(
sip_state *state, uint64_t *digest)
304 int_sip_pad_final_block(state);
306 m = U8TO64_LE(state->buf);
307 int_sip_update_block(state, m);
309 XOR64_INT(state->v[2], 0xff);
311 int_sip_round(state, state->d);
313 *digest = state->v[0];
314 XOR64_TO(*digest, state->v[1]);
315 XOR64_TO(*digest, state->v[2]);
316 XOR64_TO(*digest, state->v[3]);
320sip_hash_new(
const uint8_t key[16],
int c,
int d)
325 return sip_hash_init(h, key, c, d);
329sip_hash_init(
sip_hash *h,
const uint8_t key[16],
int c,
int d)
333 h->state->buflen = 0;
334 h->state->msglen_byte = 0;
335 h->methods = &sip_methods;
336 h->methods->init(h->state, key);
341sip_hash_update(
sip_hash *h,
const uint8_t *msg,
size_t len)
343 h->methods->update(h->state, msg, len);
348sip_hash_final(
sip_hash *h, uint8_t **digest,
size_t* len)
353 h->methods->final(h->state, &digest64);
354 if (!(ret = (uint8_t *)malloc(
sizeof(uint64_t))))
return 0;
355 U64TO8_LE(ret, digest64);
356 *len =
sizeof(uint64_t);
363sip_hash_final_integer(
sip_hash *h, uint64_t *digest)
365 h->methods->final(h->state, digest);
370sip_hash_digest(
sip_hash *h,
const uint8_t *data,
size_t data_len, uint8_t **digest,
size_t *digest_len)
372 if (!sip_hash_update(h, data, data_len))
return 0;
373 return sip_hash_final(h, digest, digest_len);
377sip_hash_digest_integer(
sip_hash *h,
const uint8_t *data,
size_t data_len, uint64_t *digest)
379 if (!sip_hash_update(h, data, data_len))
return 0;
380 return sip_hash_final_integer(h, digest);
392 int_sip_dump(h->state);
396#define SIP_ROUND(m, v0, v1, v2, v3) \
398 XOR64_TO((v3), (m)); \
399 SIP_COMPRESS(v0, v1, v2, v3); \
400 XOR64_TO((v0), (m)); \
404sip_hash13(
const uint8_t key[16],
const uint8_t *data,
size_t len)
407 uint64_t v0, v1, v2, v3;
409 const uint8_t *end = data + len - (len %
sizeof(uint64_t));
412 k1 = U8TO64_LE(key +
sizeof(uint64_t));
414 v0 = k0; XOR64_TO(v0, sip_init_state[0]);
415 v1 = k1; XOR64_TO(v1, sip_init_state[1]);
416 v2 = k0; XOR64_TO(v2, sip_init_state[2]);
417 v3 = k1; XOR64_TO(v3, sip_init_state[3]);
419#if BYTE_ORDER == LITTLE_ENDIAN && UNALIGNED_WORD_ACCESS
421 uint64_t *data64 = (uint64_t *)data;
422 while (data64 != (uint64_t *) end) {
424 SIP_ROUND(m, v0, v1, v2, v3);
428 for (; data != end; data +=
sizeof(uint64_t)) {
430 SIP_ROUND(m, v0, v1, v2, v3);
435 last = (uint64_t)len << 56;
436#define OR_BYTE(n) (last |= ((uint64_t) end[n]) << ((n) * 8))
440#define OR_BYTE(n) do { \
442 last.hi |= ((uint32_t) end[n]) << ((n) >= 4 ? (n) * 8 - 32 : 0); \
444 last.lo |= ((uint32_t) end[n]) << ((n) >= 4 ? 0 : (n) * 8); \
448 switch (len %
sizeof(uint64_t)) {
456#if BYTE_ORDER == LITTLE_ENDIAN && UNALIGNED_WORD_ACCESS
458 last |= (uint64_t) ((uint32_t *) end)[0];
460 last.lo |= ((uint32_t *) end)[0];
477 SIP_ROUND(last, v0, v1, v2, v3);
481 SIP_COMPRESS(v0, v1, v2, v3);
482 SIP_COMPRESS(v0, v1, v2, v3);
483 SIP_COMPRESS(v0, v1, v2, v3);