Rizin
unix-like reverse engineering framework and cli tools
xxhash.c
Go to the documentation of this file.
1 // SPDX-FileCopyrightText: 2012-2016, Yann Collet
2 // SPDX-License-Identifier: BSD-2-Clause
3 
4 /*
5 * xxHash - Fast Hash algorithm
6 * Copyright (C) 2012-2016, Yann Collet
7 *
8 * BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php)
9 *
10 * Redistribution and use in source and binary forms, with or without
11 * modification, are permitted provided that the following conditions are
12 * met:
13 *
14 * * Redistributions of source code must retain the above copyright
15 * notice, this list of conditions and the following disclaimer.
16 * * Redistributions in binary form must reproduce the above
17 * copyright notice, this list of conditions and the following disclaimer
18 * in the documentation and/or other materials provided with the
19 * distribution.
20 *
21 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
22 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
23 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
24 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
25 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
26 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
27 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
28 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
29 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
30 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
31 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 *
33 * You can contact the author at :
34 * - xxHash homepage: http://www.xxhash.com
35 * - xxHash source repository : https://github.com/Cyan4973/xxHash
36 */
37 
38 
39 /* *************************************
40 * Tuning parameters
41 ***************************************/
55 #ifndef XXH_FORCE_MEMORY_ACCESS /* can be defined externally, on command line for example */
56 # if defined(__GNUC__) && ( defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_6J__) \
57  || defined(__ARM_ARCH_6K__) || defined(__ARM_ARCH_6Z__) \
58  || defined(__ARM_ARCH_6ZK__) || defined(__ARM_ARCH_6T2__) )
59 # define XXH_FORCE_MEMORY_ACCESS 2
60 # elif (defined(__INTEL_COMPILER) && !defined(_WIN32)) || \
61  (defined(__GNUC__) && ( defined(__ARM_ARCH_7__) || defined(__ARM_ARCH_7A__) \
62  || defined(__ARM_ARCH_7R__) || defined(__ARM_ARCH_7M__) \
63  || defined(__ARM_ARCH_7S__) ))
64 # define XXH_FORCE_MEMORY_ACCESS 1
65 # endif
66 #endif
67 
73 #ifndef XXH_ACCEPT_NULL_INPUT_POINTER /* can be defined externally */
74 # define XXH_ACCEPT_NULL_INPUT_POINTER 0
75 #endif
76 
85 #ifndef XXH_FORCE_NATIVE_FORMAT /* can be defined externally */
86 # define XXH_FORCE_NATIVE_FORMAT 0
87 #endif
88 
96 #ifndef XXH_FORCE_ALIGN_CHECK /* can be defined externally */
97 # if defined(__i386) || defined(_M_IX86) || defined(__x86_64__) || defined(_M_X64)
98 # define XXH_FORCE_ALIGN_CHECK 0
99 # else
100 # define XXH_FORCE_ALIGN_CHECK 1
101 # endif
102 #endif
103 
104 
105 /* *************************************
106 * Includes & Memory related functions
107 ***************************************/
110 #include <stdlib.h>
111 static void* XXH_malloc(size_t s) { return malloc(s); }
112 static void XXH_free (void* p) { free(p); }
114 #include <string.h>
115 static void* XXH_memcpy(void* dest, const void* src, size_t size) { return memcpy(dest,src,size); }
116 
117 #include <assert.h> /* assert */
118 
119 #define XXH_STATIC_LINKING_ONLY
120 #include "xxhash.h"
121 
122 
123 /* *************************************
124 * Compiler Specific Options
125 ***************************************/
126 #ifdef _MSC_VER /* Visual Studio */
127 # pragma warning(disable : 4127) /* disable: C4127: conditional expression is constant */
128 # define FORCE_INLINE static __forceinline
129 #else
130 # if defined (__cplusplus) || defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L /* C99 */
131 # ifdef __GNUC__
132 # define FORCE_INLINE static inline __attribute__((always_inline))
133 # else
134 # define FORCE_INLINE static inline
135 # endif
136 # else
137 # define FORCE_INLINE static
138 # endif /* __STDC_VERSION__ */
139 #endif
140 
141 
142 /* *************************************
143 * Basic Types
144 ***************************************/
145 #ifndef MEM_MODULE
146 # if !defined (__VMS) \
147  && (defined (__cplusplus) \
148  || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) /* C99 */) )
149 # include <stdint.h>
150  typedef uint8_t BYTE;
151  typedef uint16_t U16;
152  typedef uint32_t U32;
153 # else
154  typedef unsigned char BYTE;
155  typedef unsigned short U16;
156  typedef unsigned int U32;
157 # endif
158 #endif
159 
160 #if (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==2))
161 
162 /* Force direct memory access. Only works on CPU which support unaligned memory access in hardware */
163 static U32 XXH_read32(const void* memPtr) { return *(const U32*) memPtr; }
164 
165 #elif (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==1))
166 
167 /* __pack instructions are safer, but compiler specific, hence potentially problematic for some compilers */
168 /* currently only defined for gcc and icc */
169 typedef union { U32 u32; } __attribute__((packed)) unalign;
170 static U32 XXH_read32(const void* ptr) { return ((const unalign*)ptr)->u32; }
171 
172 #else
173 
174 /* portable and safe solution. Generally efficient.
175  * see : http://stackoverflow.com/a/32095106/646947
176  */
177 static U32 XXH_read32(const void* memPtr)
178 {
179  U32 val;
180  memcpy(&val, memPtr, sizeof(val));
181  return val;
182 }
183 
184 #endif /* XXH_FORCE_DIRECT_MEMORY_ACCESS */
185 
186 
187 /* ****************************************
188 * Compiler-specific Functions and Macros
189 ******************************************/
190 #define XXH_GCC_VERSION (__GNUC__ * 100 + __GNUC_MINOR__)
191 
192 /* Note : although _rotl exists for minGW (GCC under windows), performance seems poor */
193 #if defined(_MSC_VER)
194 # define XXH_rotl32(x,r) _rotl(x,r)
195 # define XXH_rotl64(x,r) _rotl64(x,r)
196 #else
197 # define XXH_rotl32(x,r) ((x << r) | (x >> (32 - r)))
198 # define XXH_rotl64(x,r) ((x << r) | (x >> (64 - r)))
199 #endif
200 
201 #if defined(_MSC_VER) /* Visual Studio */
202 # define XXH_swap32 _byteswap_ulong
203 #elif XXH_GCC_VERSION >= 403
204 # define XXH_swap32 __builtin_bswap32
205 #else
206 static U32 XXH_swap32 (U32 x)
207 {
208  return ((x << 24) & 0xff000000 ) |
209  ((x << 8) & 0x00ff0000 ) |
210  ((x >> 8) & 0x0000ff00 ) |
211  ((x >> 24) & 0x000000ff );
212 }
213 #endif
214 
215 
216 /* *************************************
217 * Architecture Macros
218 ***************************************/
220 
221 /* XXH_CPU_LITTLE_ENDIAN can be defined externally, for example on the compiler command line */
222 #ifndef XXH_CPU_LITTLE_ENDIAN
223 static int XXH_isLittleEndian(void)
224 {
225  const union { U32 u; BYTE c[4]; } one = { 1 }; /* don't use static : performance detrimental */
226  return one.c[0];
227 }
228 # define XXH_CPU_LITTLE_ENDIAN XXH_isLittleEndian()
229 #endif
230 
231 
232 /* ***************************
233 * Memory reads
234 *****************************/
236 
238 {
239  if (align==XXH_unaligned)
240  return endian==XXH_littleEndian ? XXH_read32(ptr) : XXH_swap32(XXH_read32(ptr));
241  else
242  return endian==XXH_littleEndian ? *(const U32*)ptr : XXH_swap32(*(const U32*)ptr);
243 }
244 
245 FORCE_INLINE U32 XXH_readLE32(const void* ptr, XXH_endianess endian)
246 {
247  return XXH_readLE32_align(ptr, endian, XXH_unaligned);
248 }
249 
250 static U32 XXH_readBE32(const void* ptr)
251 {
253 }
254 
255 
256 /* *************************************
257 * Macros
258 ***************************************/
259 #define XXH_STATIC_ASSERT(c) { enum { XXH_sa = 1/(int)(!!(c)) }; } /* use after variable declarations */
261 
262 
263 /* *******************************************************************
264 * 32-bit hash functions
265 *********************************************************************/
266 static const U32 PRIME32_1 = 2654435761U;
267 static const U32 PRIME32_2 = 2246822519U;
268 static const U32 PRIME32_3 = 3266489917U;
269 static const U32 PRIME32_4 = 668265263U;
270 static const U32 PRIME32_5 = 374761393U;
271 
272 static U32 XXH32_round(U32 seed, U32 input)
273 {
274  seed += input * PRIME32_2;
275  seed = XXH_rotl32(seed, 13);
276  seed *= PRIME32_1;
277  return seed;
278 }
279 
280 /* mix all bits */
282 {
283  h32 ^= h32 >> 15;
284  h32 *= PRIME32_2;
285  h32 ^= h32 >> 13;
286  h32 *= PRIME32_3;
287  h32 ^= h32 >> 16;
288  return(h32);
289 }
290 
291 #define XXH_get32bits(p) XXH_readLE32_align(p, endian, align)
292 
293 static U32
294 XXH32_finalize(U32 h32, const void* ptr, size_t len,
295  XXH_endianess endian, XXH_alignment align)
296 
297 {
298  const BYTE* p = (const BYTE*)ptr;
299 #define PROCESS1 \
300  h32 += (*p) * PRIME32_5; \
301  p++; \
302  h32 = XXH_rotl32(h32, 11) * PRIME32_1 ;
303 
304 #define PROCESS4 \
305  h32 += XXH_get32bits(p) * PRIME32_3; \
306  p+=4; \
307  h32 = XXH_rotl32(h32, 17) * PRIME32_4 ;
308 
309  switch(len&15) /* or switch(bEnd - p) */
310  {
311  case 12: PROCESS4;
312  /* fallthrough */
313  case 8: PROCESS4;
314  /* fallthrough */
315  case 4: PROCESS4;
316  return XXH32_avalanche(h32);
317 
318  case 13: PROCESS4;
319  /* fallthrough */
320  case 9: PROCESS4;
321  /* fallthrough */
322  case 5: PROCESS4;
323  PROCESS1;
324  return XXH32_avalanche(h32);
325 
326  case 14: PROCESS4;
327  /* fallthrough */
328  case 10: PROCESS4;
329  /* fallthrough */
330  case 6: PROCESS4;
331  PROCESS1;
332  PROCESS1;
333  return XXH32_avalanche(h32);
334 
335  case 15: PROCESS4;
336  /* fallthrough */
337  case 11: PROCESS4;
338  /* fallthrough */
339  case 7: PROCESS4;
340  /* fallthrough */
341  case 3: PROCESS1;
342  /* fallthrough */
343  case 2: PROCESS1;
344  /* fallthrough */
345  case 1: PROCESS1;
346  /* fallthrough */
347  case 0: return XXH32_avalanche(h32);
348  }
349  assert(0);
350  return h32; /* reaching this point is deemed impossible */
351 }
352 
353 
355 XXH32_endian_align(const void* input, size_t len, U32 seed,
356  XXH_endianess endian, XXH_alignment align)
357 {
358  const BYTE* p = (const BYTE*)input;
359  const BYTE* bEnd = p + len;
360  U32 h32;
361 
362 #if defined(XXH_ACCEPT_NULL_INPUT_POINTER) && (XXH_ACCEPT_NULL_INPUT_POINTER>=1)
363  if (p==NULL) {
364  len=0;
365  bEnd=p=(const BYTE*)(size_t)16;
366  }
367 #endif
368 
369  if (len>=16) {
370  const BYTE* const limit = bEnd - 15;
371  U32 v1 = seed + PRIME32_1 + PRIME32_2;
372  U32 v2 = seed + PRIME32_2;
373  U32 v3 = seed + 0;
374  U32 v4 = seed - PRIME32_1;
375 
376  do {
377  v1 = XXH32_round(v1, XXH_get32bits(p)); p+=4;
378  v2 = XXH32_round(v2, XXH_get32bits(p)); p+=4;
379  v3 = XXH32_round(v3, XXH_get32bits(p)); p+=4;
380  v4 = XXH32_round(v4, XXH_get32bits(p)); p+=4;
381  } while (p < limit);
382 
383  h32 = XXH_rotl32(v1, 1) + XXH_rotl32(v2, 7)
384  + XXH_rotl32(v3, 12) + XXH_rotl32(v4, 18);
385  } else {
386  h32 = seed + PRIME32_5;
387  }
388 
389  h32 += (U32)len;
390 
391  return XXH32_finalize(h32, p, len&15, endian, align);
392 }
393 
394 
395 XXH_PUBLIC_API unsigned int XXH32 (const void* input, size_t len, unsigned int seed)
396 {
397 #if 0
398  /* Simple version, good for code maintenance, but unfortunately slow for small inputs */
400  XXH32_reset(&state, seed);
402  return XXH32_digest(&state);
403 #else
405 
406  if (XXH_FORCE_ALIGN_CHECK) {
407  if ((((size_t)input) & 3) == 0) { /* Input is 4-bytes aligned, leverage the speed benefit */
408  if ((endian_detected==XXH_littleEndian) || XXH_FORCE_NATIVE_FORMAT)
410  else
412  } }
413 
414  if ((endian_detected==XXH_littleEndian) || XXH_FORCE_NATIVE_FORMAT)
416  else
418 #endif
419 }
420 
421 
422 
423 /*====== Hash streaming ======*/
424 
426 {
427  return (XXH32_state_t*)XXH_malloc(sizeof(XXH32_state_t));
428 }
430 {
431  XXH_free(statePtr);
432  return XXH_OK;
433 }
434 
436 {
437  memcpy(dstState, srcState, sizeof(*dstState));
438 }
439 
441 {
442  XXH32_state_t state; /* using a local state to memcpy() in order to avoid strict-aliasing warnings */
443  memset(&state, 0, sizeof(state));
444  state.v1 = seed + PRIME32_1 + PRIME32_2;
445  state.v2 = seed + PRIME32_2;
446  state.v3 = seed + 0;
447  state.v4 = seed - PRIME32_1;
448  /* do not write into reserved, planned to be removed in a future version */
449  memcpy(statePtr, &state, sizeof(state) - sizeof(state.reserved));
450  return XXH_OK;
451 }
452 
453 
456 {
457  const BYTE* p = (const BYTE*)input;
458  const BYTE* const bEnd = p + len;
459 
460  if (input==NULL)
461 #if defined(XXH_ACCEPT_NULL_INPUT_POINTER) && (XXH_ACCEPT_NULL_INPUT_POINTER>=1)
462  return XXH_OK;
463 #else
464  return XXH_ERROR;
465 #endif
466 
467  state->total_len_32 += (unsigned)len;
468  state->large_len |= (len>=16) | (state->total_len_32>=16);
469 
470  if (state->memsize + len < 16) { /* fill in tmp buffer */
471  XXH_memcpy((BYTE*)(state->mem32) + state->memsize, input, len);
472  state->memsize += (unsigned)len;
473  return XXH_OK;
474  }
475 
476  if (state->memsize) { /* some data left from previous update */
477  XXH_memcpy((BYTE*)(state->mem32) + state->memsize, input, 16-state->memsize);
478  { const U32* p32 = state->mem32;
479  state->v1 = XXH32_round(state->v1, XXH_readLE32(p32, endian)); p32++;
480  state->v2 = XXH32_round(state->v2, XXH_readLE32(p32, endian)); p32++;
481  state->v3 = XXH32_round(state->v3, XXH_readLE32(p32, endian)); p32++;
482  state->v4 = XXH32_round(state->v4, XXH_readLE32(p32, endian));
483  }
484  p += 16-state->memsize;
485  state->memsize = 0;
486  }
487 
488  if (p <= bEnd-16) {
489  const BYTE* const limit = bEnd - 16;
490  U32 v1 = state->v1;
491  U32 v2 = state->v2;
492  U32 v3 = state->v3;
493  U32 v4 = state->v4;
494 
495  do {
496  v1 = XXH32_round(v1, XXH_readLE32(p, endian)); p+=4;
497  v2 = XXH32_round(v2, XXH_readLE32(p, endian)); p+=4;
498  v3 = XXH32_round(v3, XXH_readLE32(p, endian)); p+=4;
499  v4 = XXH32_round(v4, XXH_readLE32(p, endian)); p+=4;
500  } while (p<=limit);
501 
502  state->v1 = v1;
503  state->v2 = v2;
504  state->v3 = v3;
505  state->v4 = v4;
506  }
507 
508  if (p < bEnd) {
509  XXH_memcpy(state->mem32, p, (size_t)(bEnd-p));
510  state->memsize = (unsigned)(bEnd-p);
511  }
512 
513  return XXH_OK;
514 }
515 
516 
518 {
520 
521  if ((endian_detected==XXH_littleEndian) || XXH_FORCE_NATIVE_FORMAT)
522  return XXH32_update_endian(state_in, input, len, XXH_littleEndian);
523  else
524  return XXH32_update_endian(state_in, input, len, XXH_bigEndian);
525 }
526 
527 
530 {
531  U32 h32;
532 
533  if (state->large_len) {
534  h32 = XXH_rotl32(state->v1, 1)
535  + XXH_rotl32(state->v2, 7)
536  + XXH_rotl32(state->v3, 12)
537  + XXH_rotl32(state->v4, 18);
538  } else {
539  h32 = state->v3 /* == seed */ + PRIME32_5;
540  }
541 
542  h32 += state->total_len_32;
543 
544  return XXH32_finalize(h32, state->mem32, state->memsize, endian, XXH_aligned);
545 }
546 
547 
548 XXH_PUBLIC_API unsigned int XXH32_digest (const XXH32_state_t* state_in)
549 {
551 
552  if ((endian_detected==XXH_littleEndian) || XXH_FORCE_NATIVE_FORMAT)
553  return XXH32_digest_endian(state_in, XXH_littleEndian);
554  else
555  return XXH32_digest_endian(state_in, XXH_bigEndian);
556 }
557 
558 
559 /*====== Canonical representation ======*/
560 
568 {
570  if (XXH_CPU_LITTLE_ENDIAN) hash = XXH_swap32(hash);
571  memcpy(dst, &hash, sizeof(*dst));
572 }
573 
575 {
576  return XXH_readBE32(src);
577 }
578 
579 
580 #ifndef XXH_NO_LONG_LONG
581 
582 /* *******************************************************************
583 * 64-bit hash functions
584 *********************************************************************/
585 
586 /*====== Memory access ======*/
587 
588 #ifndef MEM_MODULE
589 # define MEM_MODULE
590 # if !defined (__VMS) \
591  && (defined (__cplusplus) \
592  || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) /* C99 */) )
593 # include <stdint.h>
594  typedef uint64_t U64;
595 # else
596  /* if compiler doesn't support unsigned long long, replace by another 64-bit type */
597  typedef unsigned long long U64;
598 # endif
599 #endif
600 
601 
602 #if (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==2))
603 
604 /* Force direct memory access. Only works on CPU which support unaligned memory access in hardware */
605 static U64 XXH_read64(const void* memPtr) { return *(const U64*) memPtr; }
606 
607 #elif (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==1))
608 
609 /* __pack instructions are safer, but compiler specific, hence potentially problematic for some compilers */
610 /* currently only defined for gcc and icc */
611 typedef union { U32 u32; U64 u64; } __attribute__((packed)) unalign64;
612 static U64 XXH_read64(const void* ptr) { return ((const unalign64*)ptr)->u64; }
613 
614 #else
615 
616 /* portable and safe solution. Generally efficient.
617  * see : http://stackoverflow.com/a/32095106/646947
618  */
619 
620 static U64 XXH_read64(const void* memPtr)
621 {
622  U64 val;
623  memcpy(&val, memPtr, sizeof(val));
624  return val;
625 }
626 
627 #endif /* XXH_FORCE_DIRECT_MEMORY_ACCESS */
628 
629 #if defined(_MSC_VER) /* Visual Studio */
630 # define XXH_swap64 _byteswap_uint64
631 #elif XXH_GCC_VERSION >= 403
632 # define XXH_swap64 __builtin_bswap64
633 #else
634 static U64 XXH_swap64 (U64 x)
635 {
636  return ((x << 56) & 0xff00000000000000ULL) |
637  ((x << 40) & 0x00ff000000000000ULL) |
638  ((x << 24) & 0x0000ff0000000000ULL) |
639  ((x << 8) & 0x000000ff00000000ULL) |
640  ((x >> 8) & 0x00000000ff000000ULL) |
641  ((x >> 24) & 0x0000000000ff0000ULL) |
642  ((x >> 40) & 0x000000000000ff00ULL) |
643  ((x >> 56) & 0x00000000000000ffULL);
644 }
645 #endif
646 
648 {
649  if (align==XXH_unaligned)
650  return endian==XXH_littleEndian ? XXH_read64(ptr) : XXH_swap64(XXH_read64(ptr));
651  else
652  return endian==XXH_littleEndian ? *(const U64*)ptr : XXH_swap64(*(const U64*)ptr);
653 }
654 
655 FORCE_INLINE U64 XXH_readLE64(const void* ptr, XXH_endianess endian)
656 {
657  return XXH_readLE64_align(ptr, endian, XXH_unaligned);
658 }
659 
660 static U64 XXH_readBE64(const void* ptr)
661 {
663 }
664 
665 
666 /*====== xxh64 ======*/
667 
668 static const U64 PRIME64_1 = 11400714785074694791ULL;
669 static const U64 PRIME64_2 = 14029467366897019727ULL;
670 static const U64 PRIME64_3 = 1609587929392839161ULL;
671 static const U64 PRIME64_4 = 9650029242287828579ULL;
672 static const U64 PRIME64_5 = 2870177450012600261ULL;
673 
675 {
676  acc += input * PRIME64_2;
677  acc = XXH_rotl64(acc, 31);
678  acc *= PRIME64_1;
679  return acc;
680 }
681 
683 {
684  val = XXH64_round(0, val);
685  acc ^= val;
686  acc = acc * PRIME64_1 + PRIME64_4;
687  return acc;
688 }
689 
691 {
692  h64 ^= h64 >> 33;
693  h64 *= PRIME64_2;
694  h64 ^= h64 >> 29;
695  h64 *= PRIME64_3;
696  h64 ^= h64 >> 32;
697  return h64;
698 }
699 
700 
701 #define XXH_get64bits(p) XXH_readLE64_align(p, endian, align)
702 
703 static U64
704 XXH64_finalize(U64 h64, const void* ptr, size_t len,
705  XXH_endianess endian, XXH_alignment align)
706 {
707  const BYTE* p = (const BYTE*)ptr;
708 
709 #define PROCESS1_64 \
710  h64 ^= (*p) * PRIME64_5; \
711  p++; \
712  h64 = XXH_rotl64(h64, 11) * PRIME64_1;
713 
714 #define PROCESS4_64 \
715  h64 ^= (U64)(XXH_get32bits(p)) * PRIME64_1; \
716  p+=4; \
717  h64 = XXH_rotl64(h64, 23) * PRIME64_2 + PRIME64_3;
718 
719 #define PROCESS8_64 { \
720  U64 const k1 = XXH64_round(0, XXH_get64bits(p)); \
721  p+=8; \
722  h64 ^= k1; \
723  h64 = XXH_rotl64(h64,27) * PRIME64_1 + PRIME64_4; \
724 }
725 
726  switch(len&31) {
727  case 24: PROCESS8_64;
728  /* fallthrough */
729  case 16: PROCESS8_64;
730  /* fallthrough */
731  case 8: PROCESS8_64;
732  return XXH64_avalanche(h64);
733 
734  case 28: PROCESS8_64;
735  /* fallthrough */
736  case 20: PROCESS8_64;
737  /* fallthrough */
738  case 12: PROCESS8_64;
739  /* fallthrough */
740  case 4: PROCESS4_64;
741  return XXH64_avalanche(h64);
742 
743  case 25: PROCESS8_64;
744  /* fallthrough */
745  case 17: PROCESS8_64;
746  /* fallthrough */
747  case 9: PROCESS8_64;
748  PROCESS1_64;
749  return XXH64_avalanche(h64);
750 
751  case 29: PROCESS8_64;
752  /* fallthrough */
753  case 21: PROCESS8_64;
754  /* fallthrough */
755  case 13: PROCESS8_64;
756  /* fallthrough */
757  case 5: PROCESS4_64;
758  PROCESS1_64;
759  return XXH64_avalanche(h64);
760 
761  case 26: PROCESS8_64;
762  /* fallthrough */
763  case 18: PROCESS8_64;
764  /* fallthrough */
765  case 10: PROCESS8_64;
766  PROCESS1_64;
767  PROCESS1_64;
768  return XXH64_avalanche(h64);
769 
770  case 30: PROCESS8_64;
771  /* fallthrough */
772  case 22: PROCESS8_64;
773  /* fallthrough */
774  case 14: PROCESS8_64;
775  /* fallthrough */
776  case 6: PROCESS4_64;
777  PROCESS1_64;
778  PROCESS1_64;
779  return XXH64_avalanche(h64);
780 
781  case 27: PROCESS8_64;
782  /* fallthrough */
783  case 19: PROCESS8_64;
784  /* fallthrough */
785  case 11: PROCESS8_64;
786  PROCESS1_64;
787  PROCESS1_64;
788  PROCESS1_64;
789  return XXH64_avalanche(h64);
790 
791  case 31: PROCESS8_64;
792  /* fallthrough */
793  case 23: PROCESS8_64;
794  /* fallthrough */
795  case 15: PROCESS8_64;
796  /* fallthrough */
797  case 7: PROCESS4_64;
798  /* fallthrough */
799  case 3: PROCESS1_64;
800  /* fallthrough */
801  case 2: PROCESS1_64;
802  /* fallthrough */
803  case 1: PROCESS1_64;
804  /* fallthrough */
805  case 0: return XXH64_avalanche(h64);
806  }
807 
808  /* impossible to reach */
809  assert(0);
810  return 0; /* unreachable, but some compilers complain without it */
811 }
812 
814 XXH64_endian_align(const void* input, size_t len, U64 seed,
815  XXH_endianess endian, XXH_alignment align)
816 {
817  const BYTE* p = (const BYTE*)input;
818  const BYTE* bEnd = p + len;
819  U64 h64;
820 
821 #if defined(XXH_ACCEPT_NULL_INPUT_POINTER) && (XXH_ACCEPT_NULL_INPUT_POINTER>=1)
822  if (p==NULL) {
823  len=0;
824  bEnd=p=(const BYTE*)(size_t)32;
825  }
826 #endif
827 
828  if (len>=32) {
829  const BYTE* const limit = bEnd - 32;
830  U64 v1 = seed + PRIME64_1 + PRIME64_2;
831  U64 v2 = seed + PRIME64_2;
832  U64 v3 = seed + 0;
833  U64 v4 = seed - PRIME64_1;
834 
835  do {
836  v1 = XXH64_round(v1, XXH_get64bits(p)); p+=8;
837  v2 = XXH64_round(v2, XXH_get64bits(p)); p+=8;
838  v3 = XXH64_round(v3, XXH_get64bits(p)); p+=8;
839  v4 = XXH64_round(v4, XXH_get64bits(p)); p+=8;
840  } while (p<=limit);
841 
842  h64 = XXH_rotl64(v1, 1) + XXH_rotl64(v2, 7) + XXH_rotl64(v3, 12) + XXH_rotl64(v4, 18);
843  h64 = XXH64_mergeRound(h64, v1);
844  h64 = XXH64_mergeRound(h64, v2);
845  h64 = XXH64_mergeRound(h64, v3);
846  h64 = XXH64_mergeRound(h64, v4);
847 
848  } else {
849  h64 = seed + PRIME64_5;
850  }
851 
852  h64 += (U64) len;
853 
854  return XXH64_finalize(h64, p, len, endian, align);
855 }
856 
857 
858 XXH_PUBLIC_API unsigned long long XXH64 (const void* input, size_t len, unsigned long long seed)
859 {
860 #if 0
861  /* Simple version, good for code maintenance, but unfortunately slow for small inputs */
863  XXH64_reset(&state, seed);
865  return XXH64_digest(&state);
866 #else
868 
869  if (XXH_FORCE_ALIGN_CHECK) {
870  if ((((size_t)input) & 7)==0) { /* Input is aligned, let's leverage the speed advantage */
871  if ((endian_detected==XXH_littleEndian) || XXH_FORCE_NATIVE_FORMAT)
873  else
875  } }
876 
877  if ((endian_detected==XXH_littleEndian) || XXH_FORCE_NATIVE_FORMAT)
879  else
881 #endif
882 }
883 
884 /*====== Hash Streaming ======*/
885 
887 {
888  return (XXH64_state_t*)XXH_malloc(sizeof(XXH64_state_t));
889 }
891 {
892  XXH_free(statePtr);
893  return XXH_OK;
894 }
895 
897 {
898  memcpy(dstState, srcState, sizeof(*dstState));
899 }
900 
901 XXH_PUBLIC_API XXH_errorcode XXH64_reset(XXH64_state_t* statePtr, unsigned long long seed)
902 {
903  XXH64_state_t state; /* using a local state to memcpy() in order to avoid strict-aliasing warnings */
904  memset(&state, 0, sizeof(state));
905  state.v1 = seed + PRIME64_1 + PRIME64_2;
906  state.v2 = seed + PRIME64_2;
907  state.v3 = seed + 0;
908  state.v4 = seed - PRIME64_1;
909  /* do not write into reserved, planned to be removed in a future version */
910  memcpy(statePtr, &state, sizeof(state) - sizeof(state.reserved));
911  return XXH_OK;
912 }
913 
916 {
917  const BYTE* p = (const BYTE*)input;
918  const BYTE* const bEnd = p + len;
919 
920  if (input==NULL)
921 #if defined(XXH_ACCEPT_NULL_INPUT_POINTER) && (XXH_ACCEPT_NULL_INPUT_POINTER>=1)
922  return XXH_OK;
923 #else
924  return XXH_ERROR;
925 #endif
926 
927  state->total_len += len;
928 
929  if (state->memsize + len < 32) { /* fill in tmp buffer */
930  XXH_memcpy(((BYTE*)state->mem64) + state->memsize, input, len);
931  state->memsize += (U32)len;
932  return XXH_OK;
933  }
934 
935  if (state->memsize) { /* tmp buffer is full */
936  XXH_memcpy(((BYTE*)state->mem64) + state->memsize, input, 32-state->memsize);
937  state->v1 = XXH64_round(state->v1, XXH_readLE64(state->mem64+0, endian));
938  state->v2 = XXH64_round(state->v2, XXH_readLE64(state->mem64+1, endian));
939  state->v3 = XXH64_round(state->v3, XXH_readLE64(state->mem64+2, endian));
940  state->v4 = XXH64_round(state->v4, XXH_readLE64(state->mem64+3, endian));
941  p += 32-state->memsize;
942  state->memsize = 0;
943  }
944 
945  if (p+32 <= bEnd) {
946  const BYTE* const limit = bEnd - 32;
947  U64 v1 = state->v1;
948  U64 v2 = state->v2;
949  U64 v3 = state->v3;
950  U64 v4 = state->v4;
951 
952  do {
953  v1 = XXH64_round(v1, XXH_readLE64(p, endian)); p+=8;
954  v2 = XXH64_round(v2, XXH_readLE64(p, endian)); p+=8;
955  v3 = XXH64_round(v3, XXH_readLE64(p, endian)); p+=8;
956  v4 = XXH64_round(v4, XXH_readLE64(p, endian)); p+=8;
957  } while (p<=limit);
958 
959  state->v1 = v1;
960  state->v2 = v2;
961  state->v3 = v3;
962  state->v4 = v4;
963  }
964 
965  if (p < bEnd) {
966  XXH_memcpy(state->mem64, p, (size_t)(bEnd-p));
967  state->memsize = (unsigned)(bEnd-p);
968  }
969 
970  return XXH_OK;
971 }
972 
974 {
976 
977  if ((endian_detected==XXH_littleEndian) || XXH_FORCE_NATIVE_FORMAT)
978  return XXH64_update_endian(state_in, input, len, XXH_littleEndian);
979  else
980  return XXH64_update_endian(state_in, input, len, XXH_bigEndian);
981 }
982 
984 {
985  U64 h64;
986 
987  if (state->total_len >= 32) {
988  U64 const v1 = state->v1;
989  U64 const v2 = state->v2;
990  U64 const v3 = state->v3;
991  U64 const v4 = state->v4;
992 
993  h64 = XXH_rotl64(v1, 1) + XXH_rotl64(v2, 7) + XXH_rotl64(v3, 12) + XXH_rotl64(v4, 18);
994  h64 = XXH64_mergeRound(h64, v1);
995  h64 = XXH64_mergeRound(h64, v2);
996  h64 = XXH64_mergeRound(h64, v3);
997  h64 = XXH64_mergeRound(h64, v4);
998  } else {
999  h64 = state->v3 /*seed*/ + PRIME64_5;
1000  }
1001 
1002  h64 += (U64) state->total_len;
1003 
1004  return XXH64_finalize(h64, state->mem64, (size_t)state->total_len, endian, XXH_aligned);
1005 }
1006 
1007 XXH_PUBLIC_API unsigned long long XXH64_digest (const XXH64_state_t* state_in)
1008 {
1010 
1011  if ((endian_detected==XXH_littleEndian) || XXH_FORCE_NATIVE_FORMAT)
1012  return XXH64_digest_endian(state_in, XXH_littleEndian);
1013  else
1014  return XXH64_digest_endian(state_in, XXH_bigEndian);
1015 }
1016 
1017 
1018 /*====== Canonical representation ======*/
1019 
1021 {
1022  XXH_STATIC_ASSERT(sizeof(XXH64_canonical_t) == sizeof(XXH64_hash_t));
1023  if (XXH_CPU_LITTLE_ENDIAN) hash = XXH_swap64(hash);
1024  memcpy(dst, &hash, sizeof(*dst));
1025 }
1026 
1028 {
1029  return XXH_readBE64(src);
1030 }
1031 
1032 #endif /* XXH_NO_LONG_LONG */
size_t len
Definition: 6502dis.c:15
lzma_index * src
Definition: index.h:567
ut16 val
Definition: armass64_const.h:6
#define NULL
Definition: cris-opc.c:27
RZ_API void Ht_() free(HtName_(Ht) *ht)
Definition: ht_inc.c:130
voidpf void uLong size
Definition: ioapi.h:138
@ v1
Definition: lanai.h:85
return memset(p, 0, total)
void * p
Definition: libc.cpp:67
memcpy(mem, inblock.get(), min(CONTAINING_RECORD(inblock.get(), MEMBLOCK, data) ->size, size))
#define __attribute__(x)
Definition: ansidecl.h:266
void * malloc(size_t size)
Definition: malloc.c:123
static void struct sockaddr socklen_t static fromlen static backlog static fork char char char static envp int struct rusage static rusage struct utsname static buf struct sembuf unsigned
Definition: sflib.h:97
XXH_PUBLIC_API unsigned int XXH32_digest(const XXH32_state_t *state_in)
Definition: xxhash.c:546
FORCE_INLINE U32 XXH32_digest_endian(const XXH32_state_t *state, XXH_endianess endian)
Definition: xxhash.c:527
FORCE_INLINE U32 XXH32_endian_align(const void *input, size_t len, U32 seed, XXH_endianess endian, XXH_alignment align)
Definition: xxhash.c:352
XXH_PUBLIC_API XXH32_hash_t XXH32_hashFromCanonical(const XXH32_canonical_t *src)
Definition: xxhash.c:572
XXH_endianess
Definition: xxhash.c:216
@ XXH_littleEndian
Definition: xxhash.c:216
@ XXH_bigEndian
Definition: xxhash.c:216
XXH_PUBLIC_API XXH32_state_t * XXH32_createState(void)
Definition: xxhash.c:422
unsigned long long U64
Definition: xxhash.c:595
FORCE_INLINE U64 XXH_readLE64(const void *ptr, XXH_endianess endian)
Definition: xxhash.c:653
XXH_PUBLIC_API unsigned int XXH32(const void *input, size_t len, unsigned int seed)
Definition: xxhash.c:392
XXH_PUBLIC_API XXH_errorcode XXH64_update(XXH64_state_t *state_in, const void *input, size_t len)
Definition: xxhash.c:971
FORCE_INLINE U64 XXH_readLE64_align(const void *ptr, XXH_endianess endian, XXH_alignment align)
Definition: xxhash.c:645
XXH_PUBLIC_API void XXH32_copyState(XXH32_state_t *dstState, const XXH32_state_t *srcState)
Definition: xxhash.c:432
unsigned char BYTE
Definition: xxhash.c:151
XXH_PUBLIC_API XXH_errorcode XXH32_reset(XXH32_state_t *statePtr, unsigned int seed)
Definition: xxhash.c:437
FORCE_INLINE U32 XXH_readLE32(const void *ptr, XXH_endianess endian)
Definition: xxhash.c:242
XXH_PUBLIC_API XXH_errorcode XXH64_reset(XXH64_state_t *statePtr, unsigned long long seed)
Definition: xxhash.c:898
FORCE_INLINE U64 XXH64_digest_endian(const XXH64_state_t *state, XXH_endianess endian)
Definition: xxhash.c:981
FORCE_INLINE XXH_errorcode XXH32_update_endian(XXH32_state_t *state, const void *input, size_t len, XXH_endianess endian)
Definition: xxhash.c:452
XXH_PUBLIC_API XXH64_state_t * XXH64_createState(void)
Definition: xxhash.c:883
FORCE_INLINE U32 XXH_readLE32_align(const void *ptr, XXH_endianess endian, XXH_alignment align)
Definition: xxhash.c:234
XXH_PUBLIC_API XXH_errorcode XXH32_freeState(XXH32_state_t *statePtr)
Definition: xxhash.c:426
XXH_PUBLIC_API unsigned long long XXH64(const void *input, size_t len, unsigned long long seed)
Definition: xxhash.c:855
unsigned int U32
Definition: xxhash.c:153
XXH_PUBLIC_API XXH64_hash_t XXH64_hashFromCanonical(const XXH64_canonical_t *src)
Definition: xxhash.c:1025
XXH_PUBLIC_API void XXH32_canonicalFromHash(XXH32_canonical_t *dst, XXH32_hash_t hash)
Definition: xxhash.c:565
XXH_alignment
Definition: xxhash.c:232
@ XXH_aligned
Definition: xxhash.c:232
@ XXH_unaligned
Definition: xxhash.c:232
unsigned short U16
Definition: xxhash.c:152
XXH_PUBLIC_API unsigned long long XXH64_digest(const XXH64_state_t *state_in)
Definition: xxhash.c:1005
XXH_PUBLIC_API XXH_errorcode XXH64_freeState(XXH64_state_t *statePtr)
Definition: xxhash.c:887
FORCE_INLINE XXH_errorcode XXH64_update_endian(XXH64_state_t *state, const void *input, size_t len, XXH_endianess endian)
Definition: xxhash.c:912
FORCE_INLINE U64 XXH64_endian_align(const void *input, size_t len, U64 seed, XXH_endianess endian, XXH_alignment align)
Definition: xxhash.c:811
XXH_PUBLIC_API void XXH64_canonicalFromHash(XXH64_canonical_t *dst, XXH64_hash_t hash)
Definition: xxhash.c:1018
XXH_PUBLIC_API unsigned XXH_versionNumber(void)
Definition: xxhash.c:257
XXH_PUBLIC_API XXH_errorcode XXH32_update(XXH32_state_t *state_in, const void *input, size_t len)
Definition: xxhash.c:515
XXH_PUBLIC_API void XXH64_copyState(XXH64_state_t *dstState, const XXH64_state_t *srcState)
Definition: xxhash.c:893
struct XXH32_state_s XXH32_state_t
Definition: xxhash.h:172
struct XXH64_state_s XXH64_state_t
Definition: xxhash.h:229
unsigned long long XXH64_hash_t
Definition: xxhash.h:219
XXH_errorcode
Definition: xxhash.h:79
@ XXH_ERROR
Definition: xxhash.h:79
@ XXH_OK
Definition: xxhash.h:79
#define XXH_PUBLIC_API
Definition: xxhash.h:110
unsigned int XXH32_hash_t
Definition: xxhash.h:162
#define XXH_VERSION_NUMBER
Definition: xxhash.h:155
unsigned long long U64
Definition: lz4.c:290
unsigned char BYTE
Definition: lz4.c:286
unsigned int U32
Definition: lz4.c:288
char * dst
Definition: lz4.h:724
char * dest
Definition: lz4.h:697
assert(limit<=UINT32_MAX/2)
static uint32_t const uint8_t uint32_t uint32_t limit
Definition: memcmplen.h:45
int x
Definition: mipsasm.c:20
static RzSocket * s
Definition: rtr.c:28
unsigned short uint16_t
Definition: sftypes.h:30
unsigned int uint32_t
Definition: sftypes.h:29
unsigned long uint64_t
Definition: sftypes.h:28
unsigned char uint8_t
Definition: sftypes.h:31
#define c(i)
Definition: sha256.c:43
Definition: dis.h:43
#define XXH_rotl64(x, r)
Definition: xxhash.c:198
#define PROCESS4
static U64 XXH_read64(const void *memPtr)
Definition: xxhash.c:620
static U64 XXH64_finalize(U64 h64, const void *ptr, size_t len, XXH_endianess endian, XXH_alignment align)
Definition: xxhash.c:704
static const U64 PRIME64_5
Definition: xxhash.c:672
static U64 XXH64_mergeRound(U64 acc, U64 val)
Definition: xxhash.c:682
#define XXH_STATIC_ASSERT(c)
Definition: xxhash.c:259
#define PROCESS4_64
static const U32 PRIME32_5
Definition: xxhash.c:270
#define XXH_CPU_LITTLE_ENDIAN
Definition: xxhash.c:228
static const U64 PRIME64_2
Definition: xxhash.c:669
static void * XXH_malloc(size_t s)
Definition: xxhash.c:111
static U32 XXH32_round(U32 seed, U32 input)
Definition: xxhash.c:272
static const U32 PRIME32_1
Definition: xxhash.c:266
static const U64 PRIME64_4
Definition: xxhash.c:671
static U64 XXH64_avalanche(U64 h64)
Definition: xxhash.c:690
static U64 XXH_swap64(U64 x)
Definition: xxhash.c:634
#define PROCESS8_64
#define XXH_rotl32(x, r)
Definition: xxhash.c:197
static void XXH_free(void *p)
Definition: xxhash.c:112
static U32 XXH_swap32(U32 x)
Definition: xxhash.c:206
static const U64 PRIME64_1
Definition: xxhash.c:668
static const U32 PRIME32_2
Definition: xxhash.c:267
static U64 XXH_readBE64(const void *ptr)
Definition: xxhash.c:660
static const U64 PRIME64_3
Definition: xxhash.c:670
static const U32 PRIME32_3
Definition: xxhash.c:268
#define PROCESS1_64
#define PROCESS1
#define FORCE_INLINE
Definition: xxhash.c:137
static int XXH_isLittleEndian(void)
Definition: xxhash.c:223
#define XXH_get64bits(p)
Definition: xxhash.c:701
static const U32 PRIME32_4
Definition: xxhash.c:269
static U32 XXH_read32(const void *memPtr)
Definition: xxhash.c:177
#define XXH_get32bits(p)
Definition: xxhash.c:291
static U64 XXH64_round(U64 acc, U64 input)
Definition: xxhash.c:674
static U32 XXH_readBE32(const void *ptr)
Definition: xxhash.c:250
#define XXH_FORCE_ALIGN_CHECK
Definition: xxhash.c:100
static U32 XXH32_avalanche(U32 h32)
Definition: xxhash.c:281
static void * XXH_memcpy(void *dest, const void *src, size_t size)
Definition: xxhash.c:115
static U32 XXH32_finalize(U32 h32, const void *ptr, size_t len, XXH_endianess endian, XXH_alignment align)
Definition: xxhash.c:294
#define XXH_FORCE_NATIVE_FORMAT
Definition: xxhash.c:86
static bool input(void *ud, zip_uint8_t *data, zip_uint64_t length)