79#if defined (__cplusplus)
102#if (defined(XXH_INLINE_ALL) || defined(XXH_PRIVATE_API)) \
103 && !defined(XXH_INLINE_ALL_31684351384)
105# define XXH_INLINE_ALL_31684351384
107# undef XXH_STATIC_LINKING_ONLY
108# define XXH_STATIC_LINKING_ONLY
110# undef XXH_PUBLIC_API
111# if defined(__GNUC__)
112# define XXH_PUBLIC_API static __inline __attribute__((unused))
113# elif defined (__cplusplus) || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) )
114# define XXH_PUBLIC_API static inline
115# elif defined(_MSC_VER)
116# define XXH_PUBLIC_API static __inline
119# define XXH_PUBLIC_API static
135# undef XXH_versionNumber
138# undef XXH32_createState
139# undef XXH32_freeState
143# undef XXH32_copyState
144# undef XXH32_canonicalFromHash
145# undef XXH32_hashFromCanonical
148# undef XXH64_createState
149# undef XXH64_freeState
153# undef XXH64_copyState
154# undef XXH64_canonicalFromHash
155# undef XXH64_hashFromCanonical
158# undef XXH3_64bits_withSecret
159# undef XXH3_64bits_withSeed
160# undef XXH3_64bits_withSecretandSeed
161# undef XXH3_createState
162# undef XXH3_freeState
163# undef XXH3_copyState
164# undef XXH3_64bits_reset
165# undef XXH3_64bits_reset_withSeed
166# undef XXH3_64bits_reset_withSecret
167# undef XXH3_64bits_update
168# undef XXH3_64bits_digest
169# undef XXH3_generateSecret
173# undef XXH3_128bits_withSeed
174# undef XXH3_128bits_withSecret
175# undef XXH3_128bits_reset
176# undef XXH3_128bits_reset_withSeed
177# undef XXH3_128bits_reset_withSecret
178# undef XXH3_128bits_reset_withSecretandSeed
179# undef XXH3_128bits_update
180# undef XXH3_128bits_digest
181# undef XXH128_isEqual
183# undef XXH128_canonicalFromHash
184# undef XXH128_hashFromCanonical
189# define XXH_NAMESPACE XXH_INLINE_
197# define XXH_IPREF(Id) XXH_NAMESPACE ## Id
198# define XXH_OK XXH_IPREF(XXH_OK)
199# define XXH_ERROR XXH_IPREF(XXH_ERROR)
200# define XXH_errorcode XXH_IPREF(XXH_errorcode)
201# define XXH32_canonical_t XXH_IPREF(XXH32_canonical_t)
202# define XXH64_canonical_t XXH_IPREF(XXH64_canonical_t)
203# define XXH128_canonical_t XXH_IPREF(XXH128_canonical_t)
204# define XXH32_state_s XXH_IPREF(XXH32_state_s)
205# define XXH32_state_t XXH_IPREF(XXH32_state_t)
206# define XXH64_state_s XXH_IPREF(XXH64_state_s)
207# define XXH64_state_t XXH_IPREF(XXH64_state_t)
208# define XXH3_state_s XXH_IPREF(XXH3_state_s)
209# define XXH3_state_t XXH_IPREF(XXH3_state_t)
210# define XXH128_hash_t XXH_IPREF(XXH128_hash_t)
212# undef XXHASH_H_5627135585666179
213# undef XXHASH_H_STATIC_13879238742
221#ifndef XXHASH_H_5627135585666179
222#define XXHASH_H_5627135585666179 1
231#if !defined(XXH_INLINE_ALL) && !defined(XXH_PRIVATE_API)
232# if defined(_WIN32) && defined(_MSC_VER) && (defined(XXH_IMPORT) || defined(XXH_EXPORT))
234# define XXH_PUBLIC_API __declspec(dllexport)
236# define XXH_PUBLIC_API __declspec(dllimport)
239# define XXH_PUBLIC_API
257# define XXH_NAMESPACE
262# define XXH_CAT(A,B) A##B
263# define XXH_NAME2(A,B) XXH_CAT(A,B)
264# define XXH_versionNumber XXH_NAME2(XXH_NAMESPACE, XXH_versionNumber)
266# define XXH32 XXH_NAME2(XXH_NAMESPACE, XXH32)
267# define XXH32_createState XXH_NAME2(XXH_NAMESPACE, XXH32_createState)
268# define XXH32_freeState XXH_NAME2(XXH_NAMESPACE, XXH32_freeState)
269# define XXH32_reset XXH_NAME2(XXH_NAMESPACE, XXH32_reset)
270# define XXH32_update XXH_NAME2(XXH_NAMESPACE, XXH32_update)
271# define XXH32_digest XXH_NAME2(XXH_NAMESPACE, XXH32_digest)
272# define XXH32_copyState XXH_NAME2(XXH_NAMESPACE, XXH32_copyState)
273# define XXH32_canonicalFromHash XXH_NAME2(XXH_NAMESPACE, XXH32_canonicalFromHash)
274# define XXH32_hashFromCanonical XXH_NAME2(XXH_NAMESPACE, XXH32_hashFromCanonical)
276# define XXH64 XXH_NAME2(XXH_NAMESPACE, XXH64)
277# define XXH64_createState XXH_NAME2(XXH_NAMESPACE, XXH64_createState)
278# define XXH64_freeState XXH_NAME2(XXH_NAMESPACE, XXH64_freeState)
279# define XXH64_reset XXH_NAME2(XXH_NAMESPACE, XXH64_reset)
280# define XXH64_update XXH_NAME2(XXH_NAMESPACE, XXH64_update)
281# define XXH64_digest XXH_NAME2(XXH_NAMESPACE, XXH64_digest)
282# define XXH64_copyState XXH_NAME2(XXH_NAMESPACE, XXH64_copyState)
283# define XXH64_canonicalFromHash XXH_NAME2(XXH_NAMESPACE, XXH64_canonicalFromHash)
284# define XXH64_hashFromCanonical XXH_NAME2(XXH_NAMESPACE, XXH64_hashFromCanonical)
286# define XXH3_64bits XXH_NAME2(XXH_NAMESPACE, XXH3_64bits)
287# define XXH3_64bits_withSecret XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_withSecret)
288# define XXH3_64bits_withSeed XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_withSeed)
289# define XXH3_64bits_withSecretandSeed XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_withSecretandSeed)
290# define XXH3_createState XXH_NAME2(XXH_NAMESPACE, XXH3_createState)
291# define XXH3_freeState XXH_NAME2(XXH_NAMESPACE, XXH3_freeState)
292# define XXH3_copyState XXH_NAME2(XXH_NAMESPACE, XXH3_copyState)
293# define XXH3_64bits_reset XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_reset)
294# define XXH3_64bits_reset_withSeed XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_reset_withSeed)
295# define XXH3_64bits_reset_withSecret XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_reset_withSecret)
296# define XXH3_64bits_reset_withSecretandSeed XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_reset_withSecretandSeed)
297# define XXH3_64bits_update XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_update)
298# define XXH3_64bits_digest XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_digest)
299# define XXH3_generateSecret XXH_NAME2(XXH_NAMESPACE, XXH3_generateSecret)
300# define XXH3_generateSecret_fromSeed XXH_NAME2(XXH_NAMESPACE, XXH3_generateSecret_fromSeed)
302# define XXH128 XXH_NAME2(XXH_NAMESPACE, XXH128)
303# define XXH3_128bits XXH_NAME2(XXH_NAMESPACE, XXH3_128bits)
304# define XXH3_128bits_withSeed XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_withSeed)
305# define XXH3_128bits_withSecret XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_withSecret)
306# define XXH3_128bits_withSecretandSeed XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_withSecretandSeed)
307# define XXH3_128bits_reset XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_reset)
308# define XXH3_128bits_reset_withSeed XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_reset_withSeed)
309# define XXH3_128bits_reset_withSecret XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_reset_withSecret)
310# define XXH3_128bits_reset_withSecretandSeed XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_reset_withSecretandSeed)
311# define XXH3_128bits_update XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_update)
312# define XXH3_128bits_digest XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_digest)
313# define XXH128_isEqual XXH_NAME2(XXH_NAMESPACE, XXH128_isEqual)
314# define XXH128_cmp XXH_NAME2(XXH_NAMESPACE, XXH128_cmp)
315# define XXH128_canonicalFromHash XXH_NAME2(XXH_NAMESPACE, XXH128_canonicalFromHash)
316# define XXH128_hashFromCanonical XXH_NAME2(XXH_NAMESPACE, XXH128_hashFromCanonical)
323#define XXH_VERSION_MAJOR 0
324#define XXH_VERSION_MINOR 8
325#define XXH_VERSION_RELEASE 1
326#define XXH_VERSION_NUMBER (XXH_VERSION_MAJOR *100*100 + XXH_VERSION_MINOR *100 + XXH_VERSION_RELEASE)
349#if defined(XXH_DOXYGEN)
355typedef uint32_t XXH32_hash_t;
357#elif !defined (__VMS) \
358 && (defined (__cplusplus) \
359 || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) ) )
361 typedef uint32_t XXH32_hash_t;
365# if UINT_MAX == 0xFFFFFFFFUL
366 typedef unsigned int XXH32_hash_t;
368# if ULONG_MAX == 0xFFFFFFFFUL
369 typedef unsigned long XXH32_hash_t;
371# error "unsupported platform: need a 32-bit type"
603#ifdef __has_attribute
604# define XXH_HAS_ATTRIBUTE(x) __has_attribute(x)
606# define XXH_HAS_ATTRIBUTE(x) 0
610#if defined(__STDC_VERSION__) && (__STDC_VERSION__ > 201710L) && defined(__has_c_attribute)
611# define XXH_HAS_C_ATTRIBUTE(x) __has_c_attribute(x)
613# define XXH_HAS_C_ATTRIBUTE(x) 0
616#if defined(__cplusplus) && defined(__has_cpp_attribute)
617# define XXH_HAS_CPP_ATTRIBUTE(x) __has_cpp_attribute(x)
619# define XXH_HAS_CPP_ATTRIBUTE(x) 0
628#if XXH_HAS_C_ATTRIBUTE(x)
629# define XXH_FALLTHROUGH [[fallthrough]]
630#elif XXH_HAS_CPP_ATTRIBUTE(x)
631# define XXH_FALLTHROUGH [[fallthrough]]
632#elif XXH_HAS_ATTRIBUTE(__fallthrough__)
633# define XXH_FALLTHROUGH __attribute__ ((fallthrough))
635# define XXH_FALLTHROUGH
644#ifndef XXH_NO_LONG_LONG
648#if defined(XXH_DOXYGEN)
655#elif !defined (__VMS) \
656 && (defined (__cplusplus) \
657 || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) ) )
662# if defined(__LP64__) && ULONG_MAX == 0xFFFFFFFFFFFFFFFFULL
801#define XXH3_SECRET_SIZE_MIN 136
947#if defined(XXH_STATIC_LINKING_ONLY) && !defined(XXHASH_H_STATIC_13879238742)
948#define XXHASH_H_STATIC_13879238742
975struct XXH32_state_s {
976 XXH32_hash_t total_len_32;
977 XXH32_hash_t large_len;
979 XXH32_hash_t mem32[4];
980 XXH32_hash_t memsize;
981 XXH32_hash_t reserved;
985#ifndef XXH_NO_LONG_LONG
999struct XXH64_state_s {
1003 XXH32_hash_t memsize;
1004 XXH32_hash_t reserved32;
1008#if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 201112L)
1009# include <stdalign.h>
1010# define XXH_ALIGN(n) alignas(n)
1011#elif defined(__cplusplus) && (__cplusplus >= 201103L)
1013# define XXH_ALIGN(n) alignas(n)
1014#elif defined(__GNUC__)
1015# define XXH_ALIGN(n) __attribute__ ((aligned(n)))
1016#elif defined(_MSC_VER)
1017# define XXH_ALIGN(n) __declspec(align(n))
1019# define XXH_ALIGN(n)
1023#if !(defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 201112L)) \
1024 && ! (defined(__cplusplus) && (__cplusplus >= 201103L)) \
1025 && defined(__GNUC__)
1026# define XXH_ALIGN_MEMBER(align, type) type XXH_ALIGN(align)
1028# define XXH_ALIGN_MEMBER(align, type) XXH_ALIGN(align) type
1038#define XXH3_INTERNALBUFFER_SIZE 256
1047#define XXH3_SECRET_DEFAULT_SIZE 192
1071struct XXH3_state_s {
1074 XXH_ALIGN_MEMBER(64,
unsigned char customSecret[XXH3_SECRET_DEFAULT_SIZE]);
1076 XXH_ALIGN_MEMBER(64,
unsigned char buffer[XXH3_INTERNALBUFFER_SIZE]);
1078 XXH32_hash_t bufferedSize;
1080 XXH32_hash_t useSeed;
1082 size_t nbStripesSoFar;
1086 size_t nbStripesPerBlock;
1094 const unsigned char* extSecret;
1100#undef XXH_ALIGN_MEMBER
1113#define XXH3_INITSTATE(XXH3_state_ptr) { (XXH3_state_ptr)->seed = 0; }
1153XXH_PUBLIC_API XXH_errorcode XXH3_generateSecret(
void* secretBuffer,
size_t secretSize,
const void* customSeed,
size_t customSeedSize);
1199XXH3_64bits_withSecretandSeed(
const void*
data,
size_t len,
1200 const void* secret,
size_t secretSize,
1204XXH3_128bits_withSecretandSeed(
const void*
data,
size_t len,
1205 const void* secret,
size_t secretSize,
1209XXH3_64bits_reset_withSecretandSeed(
XXH3_state_t* statePtr,
1210 const void* secret,
size_t secretSize,
1214XXH3_128bits_reset_withSecretandSeed(
XXH3_state_t* statePtr,
1215 const void* secret,
size_t secretSize,
1220#if defined(XXH_INLINE_ALL) || defined(XXH_PRIVATE_API)
1221# define XXH_IMPLEMENTATION
1254#if ( defined(XXH_INLINE_ALL) || defined(XXH_PRIVATE_API) \
1255 || defined(XXH_IMPLEMENTATION) ) && !defined(XXH_IMPLEM_13a8737387)
1256# define XXH_IMPLEM_13a8737387
1274# define XXH_NO_LONG_LONG
1275# undef XXH_NO_LONG_LONG
1326# define XXH_FORCE_MEMORY_ACCESS 0
1354# define XXH_FORCE_ALIGN_CHECK 0
1376# define XXH_NO_INLINE_HINTS 0
1393# define XXH3_INLINE_SECRET 0
1405# define XXH32_ENDJMP 0
1414# define XXH_OLD_NAMES
1415# undef XXH_OLD_NAMES
1421#ifndef XXH_FORCE_MEMORY_ACCESS
1423# if !defined(__clang__) && \
1425 (defined(__INTEL_COMPILER) && !defined(_WIN32)) || \
1427 defined(__GNUC__) && ( \
1428 (defined(__ARM_ARCH) && __ARM_ARCH >= 7) || \
1430 defined(__mips__) && \
1431 (__mips <= 5 || __mips_isa_rev < 6) && \
1432 (!defined(__mips16) || defined(__mips_mips16e2)) \
1437# define XXH_FORCE_MEMORY_ACCESS 1
1441#ifndef XXH_FORCE_ALIGN_CHECK
1442# if defined(__i386) || defined(__x86_64__) || defined(__aarch64__) \
1443 || defined(_M_IX86) || defined(_M_X64) || defined(_M_ARM64)
1444# define XXH_FORCE_ALIGN_CHECK 0
1446# define XXH_FORCE_ALIGN_CHECK 1
1450#ifndef XXH_NO_INLINE_HINTS
1451# if defined(__OPTIMIZE_SIZE__) \
1452 || defined(__NO_INLINE__)
1453# define XXH_NO_INLINE_HINTS 1
1455# define XXH_NO_INLINE_HINTS 0
1459#ifndef XXH3_INLINE_SECRET
1460# if (defined(__GNUC__) && !defined(__clang__) && __GNUC__ >= 12) \
1461 || !defined(XXH_INLINE_ALL)
1462# define XXH3_INLINE_SECRET 0
1464# define XXH3_INLINE_SECRET 1
1470# define XXH32_ENDJMP 0
1492static void* XXH_malloc(
size_t s) {
return malloc(
s); }
1498static void XXH_free(
void*
p) { free(
p); }
1506static void* XXH_memcpy(
void* dest,
const void* src,
size_t size)
1518# pragma warning(disable : 4127)
1521#if XXH_NO_INLINE_HINTS
1522# if defined(__GNUC__) || defined(__clang__)
1523# define XXH_FORCE_INLINE static __attribute__((unused))
1525# define XXH_FORCE_INLINE static
1527# define XXH_NO_INLINE static
1529#elif defined(__GNUC__) || defined(__clang__)
1530# define XXH_FORCE_INLINE static __inline__ __attribute__((always_inline, unused))
1531# define XXH_NO_INLINE static __attribute__((noinline))
1532#elif defined(_MSC_VER)
1533# define XXH_FORCE_INLINE static __forceinline
1534# define XXH_NO_INLINE static __declspec(noinline)
1535#elif defined (__cplusplus) \
1536 || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L))
1537# define XXH_FORCE_INLINE static inline
1538# define XXH_NO_INLINE static
1540# define XXH_FORCE_INLINE static
1541# define XXH_NO_INLINE static
1544#if XXH3_INLINE_SECRET
1545# define XXH3_WITH_SECRET_INLINE XXH_FORCE_INLINE
1547# define XXH3_WITH_SECRET_INLINE XXH_NO_INLINE
1562#ifndef XXH_DEBUGLEVEL
1564# define XXH_DEBUGLEVEL DEBUGLEVEL
1566# define XXH_DEBUGLEVEL 0
1570#if (XXH_DEBUGLEVEL>=1)
1572# define XXH_ASSERT(c) assert(c)
1574# define XXH_ASSERT(c) ((void)0)
1578#ifndef XXH_STATIC_ASSERT
1579# if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 201112L)
1580# define XXH_STATIC_ASSERT_WITH_MESSAGE(c,m) do { _Static_assert((c),m); } while(0)
1581# elif defined(__cplusplus) && (__cplusplus >= 201103L)
1582# define XXH_STATIC_ASSERT_WITH_MESSAGE(c,m) do { static_assert((c),m); } while(0)
1584# define XXH_STATIC_ASSERT_WITH_MESSAGE(c,m) do { struct xxh_sa { char x[(c) ? 1 : -1]; }; } while(0)
1586# define XXH_STATIC_ASSERT(c) XXH_STATIC_ASSERT_WITH_MESSAGE((c),#c)
1605#if defined(__GNUC__) || defined(__clang__)
1606# define XXH_COMPILER_GUARD(var) __asm__ __volatile__("" : "+r" (var))
1608# define XXH_COMPILER_GUARD(var) ((void)0)
1614#if !defined (__VMS) \
1615 && (defined (__cplusplus) \
1616 || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) ) )
1618 typedef uint8_t xxh_u8;
1620 typedef unsigned char xxh_u8;
1622typedef XXH32_hash_t xxh_u32;
1682#if (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==3))
1687#elif (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==2))
1693static xxh_u32 XXH_read32(
const void* memPtr) {
return *(
const xxh_u32*) memPtr; }
1695#elif (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==1))
1704typedef union { xxh_u32 u32; }
__attribute__((packed)) unalign;
1706static xxh_u32 XXH_read32(
const void*
ptr)
1708 typedef union { xxh_u32 u32; }
__attribute__((packed)) xxh_unalign;
1709 return ((
const xxh_unalign*)
ptr)->u32;
1718static xxh_u32 XXH_read32(
const void* memPtr)
1721 XXH_memcpy(&
val, memPtr,
sizeof(
val));
1746#ifndef XXH_CPU_LITTLE_ENDIAN
1751# if defined(_WIN32) \
1752 || defined(__LITTLE_ENDIAN__) \
1753 || (defined(__BYTE_ORDER__) && __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__)
1754# define XXH_CPU_LITTLE_ENDIAN 1
1755# elif defined(__BIG_ENDIAN__) \
1756 || (defined(__BYTE_ORDER__) && __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__)
1757# define XXH_CPU_LITTLE_ENDIAN 0
1765static int XXH_isLittleEndian(
void)
1771 const union { xxh_u32
u; xxh_u8 c[4]; } one = { 1 };
1774# define XXH_CPU_LITTLE_ENDIAN XXH_isLittleEndian()
1784#define XXH_GCC_VERSION (__GNUC__ * 100 + __GNUC_MINOR__)
1787# define XXH_HAS_BUILTIN(x) __has_builtin(x)
1789# define XXH_HAS_BUILTIN(x) 0
1805#if !defined(NO_CLANG_BUILTIN) && XXH_HAS_BUILTIN(__builtin_rotateleft32) \
1806 && XXH_HAS_BUILTIN(__builtin_rotateleft64)
1807# define XXH_rotl32 __builtin_rotateleft32
1808# define XXH_rotl64 __builtin_rotateleft64
1810#elif defined(_MSC_VER)
1811# define XXH_rotl32(x,r) _rotl(x,r)
1812# define XXH_rotl64(x,r) _rotl64(x,r)
1814# define XXH_rotl32(x,r) (((x) << (r)) | ((x) >> (32 - (r))))
1815# define XXH_rotl64(x,r) (((x) << (r)) | ((x) >> (64 - (r))))
1826#if defined(_MSC_VER)
1827# define XXH_swap32 _byteswap_ulong
1828#elif XXH_GCC_VERSION >= 403
1829# define XXH_swap32 __builtin_bswap32
1831static xxh_u32 XXH_swap32 (xxh_u32 x)
1833 return ((x << 24) & 0xff000000 ) |
1834 ((x << 8) & 0x00ff0000 ) |
1835 ((x >> 8) & 0x0000ff00 ) |
1836 ((x >> 24) & 0x000000ff );
1859#if (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==3))
1861XXH_FORCE_INLINE xxh_u32 XXH_readLE32(
const void* memPtr)
1863 const xxh_u8* bytePtr = (
const xxh_u8 *)memPtr;
1865 | ((xxh_u32)bytePtr[1] << 8)
1866 | ((xxh_u32)bytePtr[2] << 16)
1867 | ((xxh_u32)bytePtr[3] << 24);
1870XXH_FORCE_INLINE xxh_u32 XXH_readBE32(
const void* memPtr)
1872 const xxh_u8* bytePtr = (
const xxh_u8 *)memPtr;
1874 | ((xxh_u32)bytePtr[2] << 8)
1875 | ((xxh_u32)bytePtr[1] << 16)
1876 | ((xxh_u32)bytePtr[0] << 24);
1880XXH_FORCE_INLINE xxh_u32 XXH_readLE32(
const void*
ptr)
1882 return XXH_CPU_LITTLE_ENDIAN ? XXH_read32(
ptr) : XXH_swap32(XXH_read32(
ptr));
1885static xxh_u32 XXH_readBE32(
const void*
ptr)
1887 return XXH_CPU_LITTLE_ENDIAN ? XXH_swap32(XXH_read32(
ptr)) : XXH_read32(
ptr);
1891XXH_FORCE_INLINE xxh_u32
1892XXH_readLE32_align(
const void*
ptr, XXH_alignment
align)
1894 if (
align==XXH_unaligned) {
1895 return XXH_readLE32(
ptr);
1897 return XXH_CPU_LITTLE_ENDIAN ? *(
const xxh_u32*)
ptr : XXH_swap32(*(const xxh_u32*)
ptr);
1919#define XXH_PRIME32_1 0x9E3779B1U
1920#define XXH_PRIME32_2 0x85EBCA77U
1921#define XXH_PRIME32_3 0xC2B2AE3DU
1922#define XXH_PRIME32_4 0x27D4EB2FU
1923#define XXH_PRIME32_5 0x165667B1U
1926# define PRIME32_1 XXH_PRIME32_1
1927# define PRIME32_2 XXH_PRIME32_2
1928# define PRIME32_3 XXH_PRIME32_3
1929# define PRIME32_4 XXH_PRIME32_4
1930# define PRIME32_5 XXH_PRIME32_5
1944static xxh_u32 XXH32_round(xxh_u32 acc, xxh_u32 input)
1946 acc += input * XXH_PRIME32_2;
1947 acc = XXH_rotl32(acc, 13);
1948 acc *= XXH_PRIME32_1;
1949#if (defined(__SSE4_1__) || defined(__aarch64__)) && !defined(XXH_ENABLE_AUTOVECTORIZE)
1983 XXH_COMPILER_GUARD(acc);
1998static xxh_u32 XXH32_avalanche(xxh_u32 h32)
2001 h32 *= XXH_PRIME32_2;
2003 h32 *= XXH_PRIME32_3;
2008#define XXH_get32bits(p) XXH_readLE32_align(p, align)
2025XXH32_finalize(xxh_u32 h32,
const xxh_u8*
ptr,
size_t len, XXH_alignment
align)
2027#define XXH_PROCESS1 do { \
2028 h32 += (*ptr++) * XXH_PRIME32_5; \
2029 h32 = XXH_rotl32(h32, 11) * XXH_PRIME32_1; \
2032#define XXH_PROCESS4 do { \
2033 h32 += XXH_get32bits(ptr) * XXH_PRIME32_3; \
2035 h32 = XXH_rotl32(h32, 17) * XXH_PRIME32_4; \
2041 if (!XXH32_ENDJMP) {
2051 return XXH32_avalanche(h32);
2054 case 12: XXH_PROCESS4;
2056 case 8: XXH_PROCESS4;
2058 case 4: XXH_PROCESS4;
2059 return XXH32_avalanche(h32);
2061 case 13: XXH_PROCESS4;
2063 case 9: XXH_PROCESS4;
2065 case 5: XXH_PROCESS4;
2067 return XXH32_avalanche(h32);
2069 case 14: XXH_PROCESS4;
2071 case 10: XXH_PROCESS4;
2073 case 6: XXH_PROCESS4;
2076 return XXH32_avalanche(h32);
2078 case 15: XXH_PROCESS4;
2080 case 11: XXH_PROCESS4;
2082 case 7: XXH_PROCESS4;
2084 case 3: XXH_PROCESS1;
2086 case 2: XXH_PROCESS1;
2088 case 1: XXH_PROCESS1;
2090 case 0:
return XXH32_avalanche(h32);
2098# define PROCESS1 XXH_PROCESS1
2099# define PROCESS4 XXH_PROCESS4
2113XXH_FORCE_INLINE xxh_u32
2114XXH32_endian_align(
const xxh_u8* input,
size_t len, xxh_u32 seed, XXH_alignment
align)
2118 if (input==
NULL) XXH_ASSERT(
len == 0);
2121 const xxh_u8*
const bEnd = input +
len;
2122 const xxh_u8*
const limit = bEnd - 15;
2123 xxh_u32 v1 = seed + XXH_PRIME32_1 + XXH_PRIME32_2;
2124 xxh_u32 v2 = seed + XXH_PRIME32_2;
2125 xxh_u32 v3 = seed + 0;
2126 xxh_u32 v4 = seed - XXH_PRIME32_1;
2129 v1 = XXH32_round(v1, XXH_get32bits(input)); input += 4;
2130 v2 = XXH32_round(v2, XXH_get32bits(input)); input += 4;
2131 v3 = XXH32_round(v3, XXH_get32bits(input)); input += 4;
2132 v4 = XXH32_round(v4, XXH_get32bits(input)); input += 4;
2133 }
while (input < limit);
2135 h32 = XXH_rotl32(v1, 1) + XXH_rotl32(v2, 7)
2136 + XXH_rotl32(v3, 12) + XXH_rotl32(v4, 18);
2138 h32 = seed + XXH_PRIME32_5;
2141 h32 += (xxh_u32)
len;
2143 return XXH32_finalize(h32, input,
len&15,
align);
2156 if (XXH_FORCE_ALIGN_CHECK) {
2157 if ((((
size_t)input) & 3) == 0) {
2158 return XXH32_endian_align((
const xxh_u8*)input,
len, seed, XXH_aligned);
2161 return XXH32_endian_align((
const xxh_u8*)input,
len, seed, XXH_unaligned);
2185 XXH_memcpy(dstState, srcState,
sizeof(*dstState));
2193 state.v[0] = seed + XXH_PRIME32_1 + XXH_PRIME32_2;
2194 state.v[1] = seed + XXH_PRIME32_2;
2195 state.v[2] = seed + 0;
2196 state.v[3] = seed - XXH_PRIME32_1;
2208 XXH_ASSERT(
len == 0);
2212 {
const xxh_u8*
p = (
const xxh_u8*)input;
2213 const xxh_u8*
const bEnd =
p +
len;
2215 state->total_len_32 += (XXH32_hash_t)
len;
2216 state->large_len |= (XXH32_hash_t)((
len>=16) | (
state->total_len_32>=16));
2219 XXH_memcpy((xxh_u8*)(
state->mem32) +
state->memsize, input,
len);
2220 state->memsize += (XXH32_hash_t)
len;
2224 if (
state->memsize) {
2225 XXH_memcpy((xxh_u8*)(
state->mem32) +
state->memsize, input, 16-
state->memsize);
2226 {
const xxh_u32* p32 =
state->mem32;
2227 state->v[0] = XXH32_round(
state->v[0], XXH_readLE32(p32)); p32++;
2228 state->v[1] = XXH32_round(
state->v[1], XXH_readLE32(p32)); p32++;
2229 state->v[2] = XXH32_round(
state->v[2], XXH_readLE32(p32)); p32++;
2230 state->v[3] = XXH32_round(
state->v[3], XXH_readLE32(p32));
2237 const xxh_u8*
const limit = bEnd - 16;
2240 state->v[0] = XXH32_round(
state->v[0], XXH_readLE32(
p));
p+=4;
2241 state->v[1] = XXH32_round(
state->v[1], XXH_readLE32(
p));
p+=4;
2242 state->v[2] = XXH32_round(
state->v[2], XXH_readLE32(
p));
p+=4;
2243 state->v[3] = XXH32_round(
state->v[3], XXH_readLE32(
p));
p+=4;
2249 XXH_memcpy(
state->mem32,
p, (
size_t)(bEnd-
p));
2250 state->memsize = (unsigned)(bEnd-
p);
2263 if (
state->large_len) {
2264 h32 = XXH_rotl32(
state->v[0], 1)
2265 + XXH_rotl32(
state->v[1], 7)
2266 + XXH_rotl32(
state->v[2], 12)
2267 + XXH_rotl32(
state->v[3], 18);
2269 h32 =
state->v[2] + XXH_PRIME32_5;
2272 h32 +=
state->total_len_32;
2274 return XXH32_finalize(h32, (
const xxh_u8*)
state->mem32,
state->memsize, XXH_aligned);
2297 if (XXH_CPU_LITTLE_ENDIAN)
hash = XXH_swap32(
hash);
2298 XXH_memcpy(dst, &
hash,
sizeof(*dst));
2303 return XXH_readBE32(src);
2307#ifndef XXH_NO_LONG_LONG
2325#if (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==3))
2330#elif (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==2))
2333static xxh_u64 XXH_read64(
const void* memPtr)
2335 return *(
const xxh_u64*) memPtr;
2338#elif (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==1))
2347typedef union { xxh_u32 u32; xxh_u64 u64; }
__attribute__((packed)) unalign64;
2349static xxh_u64 XXH_read64(
const void*
ptr)
2351 typedef union { xxh_u32 u32; xxh_u64 u64; }
__attribute__((packed)) xxh_unalign64;
2352 return ((
const xxh_unalign64*)
ptr)->u64;
2361static xxh_u64 XXH_read64(
const void* memPtr)
2364 XXH_memcpy(&
val, memPtr,
sizeof(
val));
2370#if defined(_MSC_VER)
2371# define XXH_swap64 _byteswap_uint64
2372#elif XXH_GCC_VERSION >= 403
2373# define XXH_swap64 __builtin_bswap64
2375static xxh_u64 XXH_swap64(xxh_u64 x)
2377 return ((x << 56) & 0xff00000000000000ULL) |
2378 ((x << 40) & 0x00ff000000000000ULL) |
2379 ((x << 24) & 0x0000ff0000000000ULL) |
2380 ((x << 8) & 0x000000ff00000000ULL) |
2381 ((x >> 8) & 0x00000000ff000000ULL) |
2382 ((x >> 24) & 0x0000000000ff0000ULL) |
2383 ((x >> 40) & 0x000000000000ff00ULL) |
2384 ((x >> 56) & 0x00000000000000ffULL);
2390#if (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==3))
2392XXH_FORCE_INLINE xxh_u64 XXH_readLE64(
const void* memPtr)
2394 const xxh_u8* bytePtr = (
const xxh_u8 *)memPtr;
2396 | ((xxh_u64)bytePtr[1] << 8)
2397 | ((xxh_u64)bytePtr[2] << 16)
2398 | ((xxh_u64)bytePtr[3] << 24)
2399 | ((xxh_u64)bytePtr[4] << 32)
2400 | ((xxh_u64)bytePtr[5] << 40)
2401 | ((xxh_u64)bytePtr[6] << 48)
2402 | ((xxh_u64)bytePtr[7] << 56);
2405XXH_FORCE_INLINE xxh_u64 XXH_readBE64(
const void* memPtr)
2407 const xxh_u8* bytePtr = (
const xxh_u8 *)memPtr;
2409 | ((xxh_u64)bytePtr[6] << 8)
2410 | ((xxh_u64)bytePtr[5] << 16)
2411 | ((xxh_u64)bytePtr[4] << 24)
2412 | ((xxh_u64)bytePtr[3] << 32)
2413 | ((xxh_u64)bytePtr[2] << 40)
2414 | ((xxh_u64)bytePtr[1] << 48)
2415 | ((xxh_u64)bytePtr[0] << 56);
2419XXH_FORCE_INLINE xxh_u64 XXH_readLE64(
const void*
ptr)
2421 return XXH_CPU_LITTLE_ENDIAN ? XXH_read64(
ptr) : XXH_swap64(XXH_read64(
ptr));
2424static xxh_u64 XXH_readBE64(
const void*
ptr)
2426 return XXH_CPU_LITTLE_ENDIAN ? XXH_swap64(XXH_read64(
ptr)) : XXH_read64(
ptr);
2430XXH_FORCE_INLINE xxh_u64
2431XXH_readLE64_align(
const void*
ptr, XXH_alignment
align)
2433 if (
align==XXH_unaligned)
2434 return XXH_readLE64(
ptr);
2436 return XXH_CPU_LITTLE_ENDIAN ? *(
const xxh_u64*)
ptr : XXH_swap64(*(const xxh_u64*)
ptr);
2448#define XXH_PRIME64_1 0x9E3779B185EBCA87ULL
2449#define XXH_PRIME64_2 0xC2B2AE3D27D4EB4FULL
2450#define XXH_PRIME64_3 0x165667B19E3779F9ULL
2451#define XXH_PRIME64_4 0x85EBCA77C2B2AE63ULL
2452#define XXH_PRIME64_5 0x27D4EB2F165667C5ULL
2455# define PRIME64_1 XXH_PRIME64_1
2456# define PRIME64_2 XXH_PRIME64_2
2457# define PRIME64_3 XXH_PRIME64_3
2458# define PRIME64_4 XXH_PRIME64_4
2459# define PRIME64_5 XXH_PRIME64_5
2462static xxh_u64 XXH64_round(xxh_u64 acc, xxh_u64 input)
2464 acc += input * XXH_PRIME64_2;
2465 acc = XXH_rotl64(acc, 31);
2466 acc *= XXH_PRIME64_1;
2470static xxh_u64 XXH64_mergeRound(xxh_u64 acc, xxh_u64
val)
2472 val = XXH64_round(0,
val);
2474 acc = acc * XXH_PRIME64_1 + XXH_PRIME64_4;
2478static xxh_u64 XXH64_avalanche(xxh_u64 h64)
2481 h64 *= XXH_PRIME64_2;
2483 h64 *= XXH_PRIME64_3;
2489#define XXH_get64bits(p) XXH_readLE64_align(p, align)
2492XXH64_finalize(xxh_u64 h64,
const xxh_u8*
ptr,
size_t len, XXH_alignment
align)
2497 xxh_u64
const k1 = XXH64_round(0, XXH_get64bits(
ptr));
2500 h64 = XXH_rotl64(h64,27) * XXH_PRIME64_1 + XXH_PRIME64_4;
2504 h64 ^= (xxh_u64)(XXH_get32bits(
ptr)) * XXH_PRIME64_1;
2506 h64 = XXH_rotl64(h64, 23) * XXH_PRIME64_2 + XXH_PRIME64_3;
2510 h64 ^= (*
ptr++) * XXH_PRIME64_5;
2511 h64 = XXH_rotl64(h64, 11) * XXH_PRIME64_1;
2514 return XXH64_avalanche(h64);
2518# define PROCESS1_64 XXH_PROCESS1_64
2519# define PROCESS4_64 XXH_PROCESS4_64
2520# define PROCESS8_64 XXH_PROCESS8_64
2522# undef XXH_PROCESS1_64
2523# undef XXH_PROCESS4_64
2524# undef XXH_PROCESS8_64
2527XXH_FORCE_INLINE xxh_u64
2528XXH64_endian_align(
const xxh_u8* input,
size_t len, xxh_u64 seed, XXH_alignment
align)
2531 if (input==
NULL) XXH_ASSERT(
len == 0);
2534 const xxh_u8*
const bEnd = input +
len;
2535 const xxh_u8*
const limit = bEnd - 31;
2536 xxh_u64 v1 = seed + XXH_PRIME64_1 + XXH_PRIME64_2;
2537 xxh_u64 v2 = seed + XXH_PRIME64_2;
2538 xxh_u64 v3 = seed + 0;
2539 xxh_u64 v4 = seed - XXH_PRIME64_1;
2542 v1 = XXH64_round(v1, XXH_get64bits(input)); input+=8;
2543 v2 = XXH64_round(v2, XXH_get64bits(input)); input+=8;
2544 v3 = XXH64_round(v3, XXH_get64bits(input)); input+=8;
2545 v4 = XXH64_round(v4, XXH_get64bits(input)); input+=8;
2546 }
while (input<limit);
2548 h64 = XXH_rotl64(v1, 1) + XXH_rotl64(v2, 7) + XXH_rotl64(v3, 12) + XXH_rotl64(v4, 18);
2549 h64 = XXH64_mergeRound(h64, v1);
2550 h64 = XXH64_mergeRound(h64, v2);
2551 h64 = XXH64_mergeRound(h64, v3);
2552 h64 = XXH64_mergeRound(h64, v4);
2555 h64 = seed + XXH_PRIME64_5;
2558 h64 += (xxh_u64)
len;
2560 return XXH64_finalize(h64, input,
len,
align);
2574 if (XXH_FORCE_ALIGN_CHECK) {
2575 if ((((
size_t)input) & 7)==0) {
2576 return XXH64_endian_align((
const xxh_u8*)input,
len, seed, XXH_aligned);
2579 return XXH64_endian_align((
const xxh_u8*)input,
len, seed, XXH_unaligned);
2601 XXH_memcpy(dstState, srcState,
sizeof(*dstState));
2609 state.v[0] = seed + XXH_PRIME64_1 + XXH_PRIME64_2;
2610 state.v[1] = seed + XXH_PRIME64_2;
2611 state.v[2] = seed + 0;
2612 state.v[3] = seed - XXH_PRIME64_1;
2614 XXH_memcpy(statePtr, &
state,
sizeof(
state) -
sizeof(
state.reserved64));
2623 XXH_ASSERT(
len == 0);
2627 {
const xxh_u8*
p = (
const xxh_u8*)input;
2628 const xxh_u8*
const bEnd =
p +
len;
2633 XXH_memcpy(((xxh_u8*)
state->mem64) +
state->memsize, input,
len);
2638 if (
state->memsize) {
2639 XXH_memcpy(((xxh_u8*)
state->mem64) +
state->memsize, input, 32-
state->memsize);
2644 p += 32 -
state->memsize;
2649 const xxh_u8*
const limit = bEnd - 32;
2652 state->v[0] = XXH64_round(
state->v[0], XXH_readLE64(
p));
p+=8;
2653 state->v[1] = XXH64_round(
state->v[1], XXH_readLE64(
p));
p+=8;
2654 state->v[2] = XXH64_round(
state->v[2], XXH_readLE64(
p));
p+=8;
2655 state->v[3] = XXH64_round(
state->v[3], XXH_readLE64(
p));
p+=8;
2661 XXH_memcpy(
state->mem64,
p, (
size_t)(bEnd-
p));
2662 state->memsize = (unsigned)(bEnd-
p);
2675 if (
state->total_len >= 32) {
2676 h64 = XXH_rotl64(
state->v[0], 1) + XXH_rotl64(
state->v[1], 7) + XXH_rotl64(
state->v[2], 12) + XXH_rotl64(
state->v[3], 18);
2677 h64 = XXH64_mergeRound(h64,
state->v[0]);
2678 h64 = XXH64_mergeRound(h64,
state->v[1]);
2679 h64 = XXH64_mergeRound(h64,
state->v[2]);
2680 h64 = XXH64_mergeRound(h64,
state->v[3]);
2682 h64 =
state->v[2] + XXH_PRIME64_5;
2685 h64 += (xxh_u64)
state->total_len;
2687 return XXH64_finalize(h64, (
const xxh_u8*)
state->mem64, (
size_t)
state->total_len, XXH_aligned);
2697 if (XXH_CPU_LITTLE_ENDIAN)
hash = XXH_swap64(
hash);
2698 XXH_memcpy(dst, &
hash,
sizeof(*dst));
2704 return XXH_readBE64(src);
2722#if ((defined(sun) || defined(__sun)) && __cplusplus)
2723# define XXH_RESTRICT
2724#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
2725# define XXH_RESTRICT restrict
2728# define XXH_RESTRICT
2731#if (defined(__GNUC__) && (__GNUC__ >= 3)) \
2732 || (defined(__INTEL_COMPILER) && (__INTEL_COMPILER >= 800)) \
2733 || defined(__clang__)
2734# define XXH_likely(x) __builtin_expect(x, 1)
2735# define XXH_unlikely(x) __builtin_expect(x, 0)
2737# define XXH_likely(x) (x)
2738# define XXH_unlikely(x) (x)
2741#if defined(__GNUC__)
2742# if defined(__AVX2__)
2743# include <immintrin.h>
2744# elif defined(__SSE2__)
2745# include <emmintrin.h>
2746# elif defined(__ARM_NEON__) || defined(__ARM_NEON)
2747# define inline __inline__
2748# include <arm_neon.h>
2751#elif defined(_MSC_VER)
2824#if defined(__thumb__) && !defined(__thumb2__) && defined(__ARM_ARCH_ISA_ARM)
2825# warning "XXH3 is highly inefficient without ARM or Thumb-2."
2843# define XXH_VECTOR XXH_SCALAR
2853enum XXH_VECTOR_TYPE {
2875# define XXH_ACC_ALIGN 8
2880# define XXH_SCALAR 0
2883# define XXH_AVX512 3
2889# if defined(__AVX512F__)
2890# define XXH_VECTOR XXH_AVX512
2891# elif defined(__AVX2__)
2892# define XXH_VECTOR XXH_AVX2
2893# elif defined(__SSE2__) || defined(_M_AMD64) || defined(_M_X64) || (defined(_M_IX86_FP) && (_M_IX86_FP == 2))
2894# define XXH_VECTOR XXH_SSE2
2896 defined(__ARM_NEON__) || defined(__ARM_NEON) \
2897 || defined(_M_ARM64) || defined(_M_ARM_ARMV7VE) \
2899 defined(_WIN32) || defined(__LITTLE_ENDIAN__) \
2900 || (defined(__BYTE_ORDER__) && __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__) \
2902# define XXH_VECTOR XXH_NEON
2903# elif (defined(__PPC64__) && defined(__POWER8_VECTOR__)) \
2904 || (defined(__s390x__) && defined(__VEC__)) \
2905 && defined(__GNUC__)
2906# define XXH_VECTOR XXH_VSX
2908# define XXH_VECTOR XXH_SCALAR
2916#ifndef XXH_ACC_ALIGN
2917# if defined(XXH_X86DISPATCH)
2918# define XXH_ACC_ALIGN 64
2919# elif XXH_VECTOR == XXH_SCALAR
2920# define XXH_ACC_ALIGN 8
2921# elif XXH_VECTOR == XXH_SSE2
2922# define XXH_ACC_ALIGN 16
2923# elif XXH_VECTOR == XXH_AVX2
2924# define XXH_ACC_ALIGN 32
2925# elif XXH_VECTOR == XXH_NEON
2926# define XXH_ACC_ALIGN 16
2927# elif XXH_VECTOR == XXH_VSX
2928# define XXH_ACC_ALIGN 16
2929# elif XXH_VECTOR == XXH_AVX512
2930# define XXH_ACC_ALIGN 64
2934#if defined(XXH_X86DISPATCH) || XXH_VECTOR == XXH_SSE2 \
2935 || XXH_VECTOR == XXH_AVX2 || XXH_VECTOR == XXH_AVX512
2936# define XXH_SEC_ALIGN XXH_ACC_ALIGN
2938# define XXH_SEC_ALIGN 8
2962#if XXH_VECTOR == XXH_AVX2 \
2963 && defined(__GNUC__) && !defined(__clang__) \
2964 && defined(__OPTIMIZE__) && !defined(__OPTIMIZE_SIZE__)
2965# pragma GCC push_options
2966# pragma GCC optimize("-O2")
2970#if XXH_VECTOR == XXH_NEON
3051# if !defined(XXH_NO_VZIP_HACK) \
3052 && defined(__GNUC__) \
3053 && !defined(__aarch64__) && !defined(__arm64__) && !defined(_M_ARM64)
3054# define XXH_SPLIT_IN_PLACE(in, outLo, outHi) \
3059 __asm__("vzip.32 %e0, %f0" : "+w" (in)); \
3060 (outLo) = vget_low_u32 (vreinterpretq_u32_u64(in)); \
3061 (outHi) = vget_high_u32(vreinterpretq_u32_u64(in)); \
3064# define XXH_SPLIT_IN_PLACE(in, outLo, outHi) \
3066 (outLo) = vmovn_u64 (in); \
3067 (outHi) = vshrn_n_u64 ((in), 32); \
3080#if XXH_VECTOR == XXH_VSX
3081# if defined(__s390x__)
3082# include <s390intrin.h>
3092# if defined(__GNUC__) && !defined(__APPLE_ALTIVEC__)
3093# define __APPLE_ALTIVEC__
3095# include <altivec.h>
3098typedef __vector
unsigned long long xxh_u64x2;
3099typedef __vector
unsigned char xxh_u8x16;
3100typedef __vector
unsigned xxh_u32x4;
3103# if defined(__BIG_ENDIAN__) \
3104 || (defined(__BYTE_ORDER__) && __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__)
3105# define XXH_VSX_BE 1
3106# elif defined(__VEC_ELEMENT_REG_ORDER__) && __VEC_ELEMENT_REG_ORDER__ == __ORDER_BIG_ENDIAN__
3107# warning "-maltivec=be is not recommended. Please use native endianness."
3108# define XXH_VSX_BE 1
3110# define XXH_VSX_BE 0
3115# if defined(__POWER9_VECTOR__) || (defined(__clang__) && defined(__s390x__))
3116# define XXH_vec_revb vec_revb
3121XXH_FORCE_INLINE xxh_u64x2 XXH_vec_revb(xxh_u64x2
val)
3123 xxh_u8x16
const vByteSwap = { 0x07, 0x06, 0x05, 0x04, 0x03, 0x02, 0x01, 0x00,
3124 0x0F, 0x0E, 0x0D, 0x0C, 0x0B, 0x0A, 0x09, 0x08 };
3125 return vec_perm(
val,
val, vByteSwap);
3133XXH_FORCE_INLINE xxh_u64x2 XXH_vec_loadu(
const void *
ptr)
3136 XXH_memcpy(&
ret,
ptr,
sizeof(xxh_u64x2));
3149# if defined(__s390x__)
3151# define XXH_vec_mulo vec_mulo
3152# define XXH_vec_mule vec_mule
3153# elif defined(__clang__) && XXH_HAS_BUILTIN(__builtin_altivec_vmuleuw)
3155# define XXH_vec_mulo __builtin_altivec_vmulouw
3156# define XXH_vec_mule __builtin_altivec_vmuleuw
3160XXH_FORCE_INLINE xxh_u64x2 XXH_vec_mulo(xxh_u32x4
a, xxh_u32x4 b)
3163 __asm__(
"vmulouw %0, %1, %2" :
"=v" (
result) :
"v" (
a),
"v" (b));
3166XXH_FORCE_INLINE xxh_u64x2 XXH_vec_mule(xxh_u32x4
a, xxh_u32x4 b)
3169 __asm__(
"vmuleuw %0, %1, %2" :
"=v" (
result) :
"v" (
a),
"v" (b));
3178#if defined(XXH_NO_PREFETCH)
3179# define XXH_PREFETCH(ptr) (void)(ptr)
3181# if defined(_MSC_VER) && (defined(_M_X64) || defined(_M_IX86))
3182# include <mmintrin.h>
3183# define XXH_PREFETCH(ptr) _mm_prefetch((const char*)(ptr), _MM_HINT_T0)
3184# elif defined(__GNUC__) && ( (__GNUC__ >= 4) || ( (__GNUC__ == 3) && (__GNUC_MINOR__ >= 1) ) )
3185# define XXH_PREFETCH(ptr) __builtin_prefetch((ptr), 0 , 3 )
3187# define XXH_PREFETCH(ptr) (void)(ptr)
3196#define XXH_SECRET_DEFAULT_SIZE 192
3198#if (XXH_SECRET_DEFAULT_SIZE < XXH3_SECRET_SIZE_MIN)
3199# error "default keyset is not large enough"
3203XXH_ALIGN(64) static const xxh_u8 XXH3_kSecret[XXH_SECRET_DEFAULT_SIZE] = {
3204 0xb8, 0xfe, 0x6c, 0x39, 0x23, 0xa4, 0x4b, 0xbe, 0x7c, 0x01, 0x81, 0x2c, 0xf7, 0x21, 0xad, 0x1c,
3205 0xde, 0xd4, 0x6d, 0xe9, 0x83, 0x90, 0x97, 0xdb, 0x72, 0x40, 0xa4, 0xa4, 0xb7, 0xb3, 0x67, 0x1f,
3206 0xcb, 0x79, 0xe6, 0x4e, 0xcc, 0xc0, 0xe5, 0x78, 0x82, 0x5a, 0xd0, 0x7d, 0xcc, 0xff, 0x72, 0x21,
3207 0xb8, 0x08, 0x46, 0x74, 0xf7, 0x43, 0x24, 0x8e, 0xe0, 0x35, 0x90, 0xe6, 0x81, 0x3a, 0x26, 0x4c,
3208 0x3c, 0x28, 0x52, 0xbb, 0x91, 0xc3, 0x00, 0xcb, 0x88, 0xd0, 0x65, 0x8b, 0x1b, 0x53, 0x2e, 0xa3,
3209 0x71, 0x64, 0x48, 0x97, 0xa2, 0x0d, 0xf9, 0x4e, 0x38, 0x19, 0xef, 0x46, 0xa9, 0xde, 0xac, 0xd8,
3210 0xa8, 0xfa, 0x76, 0x3f, 0xe3, 0x9c, 0x34, 0x3f, 0xf9, 0xdc, 0xbb, 0xc7, 0xc7, 0x0b, 0x4f, 0x1d,
3211 0x8a, 0x51, 0xe0, 0x4b, 0xcd, 0xb4, 0x59, 0x31, 0xc8, 0x9f, 0x7e, 0xc9, 0xd9, 0x78, 0x73, 0x64,
3212 0xea, 0xc5, 0xac, 0x83, 0x34, 0xd3, 0xeb, 0xc3, 0xc5, 0x81, 0xa0, 0xff, 0xfa, 0x13, 0x63, 0xeb,
3213 0x17, 0x0d, 0xdd, 0x51, 0xb7, 0xf0, 0xda, 0x49, 0xd3, 0x16, 0x55, 0x26, 0x29, 0xd4, 0x68, 0x9e,
3214 0x2b, 0x16, 0xbe, 0x58, 0x7d, 0x47, 0xa1, 0xfc, 0x8f, 0xf8, 0xb8, 0xd1, 0x7a, 0xd0, 0x31, 0xce,
3215 0x45, 0xcb, 0x3a, 0x8f, 0x95, 0x16, 0x04, 0x28, 0xaf, 0xd7, 0xfb, 0xca, 0xbb, 0x4b, 0x40, 0x7e,
3220# define kSecret XXH3_kSecret
3240XXH_FORCE_INLINE xxh_u64
3241XXH_mult32to64(xxh_u64 x, xxh_u64 y)
3243 return (x & 0xFFFFFFFF) * (y & 0xFFFFFFFF);
3245#elif defined(_MSC_VER) && defined(_M_IX86)
3247# define XXH_mult32to64(x, y) __emulu((unsigned)(x), (unsigned)(y))
3256# define XXH_mult32to64(x, y) ((xxh_u64)(xxh_u32)(x) * (xxh_u64)(xxh_u32)(y))
3269XXH_mult64to128(xxh_u64 lhs, xxh_u64 rhs)
3286#if defined(__GNUC__) && !defined(__wasm__) \
3287 && defined(__SIZEOF_INT128__) \
3288 || (defined(_INTEGRAL_MAX_BITS) && _INTEGRAL_MAX_BITS >= 128)
3290 __uint128_t
const product = (__uint128_t)lhs * (__uint128_t)rhs;
3292 r128.
low64 = (xxh_u64)(product);
3293 r128.
high64 = (xxh_u64)(product >> 64);
3303#elif defined(_M_X64) || defined(_M_IA64)
3306# pragma intrinsic(_umul128)
3308 xxh_u64 product_high;
3309 xxh_u64
const product_low = _umul128(lhs, rhs, &product_high);
3311 r128.
low64 = product_low;
3312 r128.
high64 = product_high;
3320#elif defined(_M_ARM64)
3323# pragma intrinsic(__umulh)
3326 r128.
low64 = lhs * rhs;
3327 r128.
high64 = __umulh(lhs, rhs);
3375 xxh_u64
const lo_lo = XXH_mult32to64(lhs & 0xFFFFFFFF, rhs & 0xFFFFFFFF);
3376 xxh_u64
const hi_lo = XXH_mult32to64(lhs >> 32, rhs & 0xFFFFFFFF);
3377 xxh_u64
const lo_hi = XXH_mult32to64(lhs & 0xFFFFFFFF, rhs >> 32);
3378 xxh_u64
const hi_hi = XXH_mult32to64(lhs >> 32, rhs >> 32);
3381 xxh_u64
const cross = (lo_lo >> 32) + (hi_lo & 0xFFFFFFFF) + lo_hi;
3382 xxh_u64
const upper = (hi_lo >> 32) + (cross >> 32) + hi_hi;
3383 xxh_u64
const lower = (cross << 32) | (lo_lo & 0xFFFFFFFF);
3403XXH3_mul128_fold64(xxh_u64 lhs, xxh_u64 rhs)
3410XXH_FORCE_INLINE xxh_u64 XXH_xorshift64(xxh_u64 v64,
int shift)
3412 XXH_ASSERT(0 <= shift && shift < 64);
3413 return v64 ^ (v64 >> shift);
3422 h64 = XXH_xorshift64(h64, 37);
3423 h64 *= 0x165667919E3779F9ULL;
3424 h64 = XXH_xorshift64(h64, 32);
3436 h64 ^= XXH_rotl64(h64, 49) ^ XXH_rotl64(h64, 24);
3437 h64 *= 0x9FB21C651E98DF25ULL;
3438 h64 ^= (h64 >> 35) +
len ;
3439 h64 *= 0x9FB21C651E98DF25ULL;
3440 return XXH_xorshift64(h64, 28);
3478XXH3_len_1to3_64b(
const xxh_u8* input,
size_t len,
const xxh_u8* secret,
XXH64_hash_t seed)
3480 XXH_ASSERT(input !=
NULL);
3481 XXH_ASSERT(1 <=
len &&
len <= 3);
3482 XXH_ASSERT(secret !=
NULL);
3488 { xxh_u8
const c1 = input[0];
3489 xxh_u8
const c2 = input[
len >> 1];
3490 xxh_u8
const c3 = input[
len - 1];
3491 xxh_u32
const combined = ((xxh_u32)c1 << 16) | ((xxh_u32)c2 << 24)
3492 | ((xxh_u32)c3 << 0) | ((xxh_u32)
len << 8);
3493 xxh_u64
const bitflip = (XXH_readLE32(secret) ^ XXH_readLE32(secret+4)) + seed;
3494 xxh_u64
const keyed = (xxh_u64)combined ^ bitflip;
3495 return XXH64_avalanche(keyed);
3500XXH3_len_4to8_64b(
const xxh_u8* input,
size_t len,
const xxh_u8* secret,
XXH64_hash_t seed)
3502 XXH_ASSERT(input !=
NULL);
3503 XXH_ASSERT(secret !=
NULL);
3504 XXH_ASSERT(4 <=
len &&
len <= 8);
3505 seed ^= (xxh_u64)XXH_swap32((xxh_u32)seed) << 32;
3506 { xxh_u32
const input1 = XXH_readLE32(input);
3507 xxh_u32
const input2 = XXH_readLE32(input +
len - 4);
3508 xxh_u64
const bitflip = (XXH_readLE64(secret+8) ^ XXH_readLE64(secret+16)) - seed;
3509 xxh_u64
const input64 = input2 + (((xxh_u64)input1) << 32);
3510 xxh_u64
const keyed = input64 ^ bitflip;
3511 return XXH3_rrmxmx(keyed,
len);
3516XXH3_len_9to16_64b(
const xxh_u8* input,
size_t len,
const xxh_u8* secret,
XXH64_hash_t seed)
3518 XXH_ASSERT(input !=
NULL);
3519 XXH_ASSERT(secret !=
NULL);
3520 XXH_ASSERT(9 <=
len &&
len <= 16);
3521 { xxh_u64
const bitflip1 = (XXH_readLE64(secret+24) ^ XXH_readLE64(secret+32)) + seed;
3522 xxh_u64
const bitflip2 = (XXH_readLE64(secret+40) ^ XXH_readLE64(secret+48)) - seed;
3523 xxh_u64
const input_lo = XXH_readLE64(input) ^ bitflip1;
3524 xxh_u64
const input_hi = XXH_readLE64(input +
len - 8) ^ bitflip2;
3525 xxh_u64
const acc =
len
3526 + XXH_swap64(input_lo) + input_hi
3527 + XXH3_mul128_fold64(input_lo, input_hi);
3528 return XXH3_avalanche(acc);
3533XXH3_len_0to16_64b(
const xxh_u8* input,
size_t len,
const xxh_u8* secret,
XXH64_hash_t seed)
3535 XXH_ASSERT(
len <= 16);
3536 {
if (XXH_likely(
len > 8))
return XXH3_len_9to16_64b(input,
len, secret, seed);
3537 if (XXH_likely(
len >= 4))
return XXH3_len_4to8_64b(input,
len, secret, seed);
3538 if (
len)
return XXH3_len_1to3_64b(input,
len, secret, seed);
3539 return XXH64_avalanche(seed ^ (XXH_readLE64(secret+56) ^ XXH_readLE64(secret+64)));
3569XXH_FORCE_INLINE xxh_u64 XXH3_mix16B(
const xxh_u8* XXH_RESTRICT input,
3570 const xxh_u8* XXH_RESTRICT secret, xxh_u64 seed64)
3572#if defined(__GNUC__) && !defined(__clang__) \
3573 && defined(__i386__) && defined(__SSE2__) \
3574 && !defined(XXH_ENABLE_AUTOVECTORIZE)
3590 XXH_COMPILER_GUARD(seed64);
3592 { xxh_u64
const input_lo = XXH_readLE64(input);
3593 xxh_u64
const input_hi = XXH_readLE64(input+8);
3594 return XXH3_mul128_fold64(
3595 input_lo ^ (XXH_readLE64(secret) + seed64),
3596 input_hi ^ (XXH_readLE64(secret+8) - seed64)
3603XXH3_len_17to128_64b(
const xxh_u8* XXH_RESTRICT input,
size_t len,
3604 const xxh_u8* XXH_RESTRICT secret,
size_t secretSize,
3608 XXH_ASSERT(16 <
len &&
len <= 128);
3610 { xxh_u64 acc =
len * XXH_PRIME64_1;
3614 acc += XXH3_mix16B(input+48, secret+96, seed);
3615 acc += XXH3_mix16B(input+
len-64, secret+112, seed);
3617 acc += XXH3_mix16B(input+32, secret+64, seed);
3618 acc += XXH3_mix16B(input+
len-48, secret+80, seed);
3620 acc += XXH3_mix16B(input+16, secret+32, seed);
3621 acc += XXH3_mix16B(input+
len-32, secret+48, seed);
3623 acc += XXH3_mix16B(input+0, secret+0, seed);
3624 acc += XXH3_mix16B(input+
len-16, secret+16, seed);
3626 return XXH3_avalanche(acc);
3630#define XXH3_MIDSIZE_MAX 240
3633XXH3_len_129to240_64b(
const xxh_u8* XXH_RESTRICT input,
size_t len,
3634 const xxh_u8* XXH_RESTRICT secret,
size_t secretSize,
3638 XXH_ASSERT(128 <
len &&
len <= XXH3_MIDSIZE_MAX);
3640 #define XXH3_MIDSIZE_STARTOFFSET 3
3641 #define XXH3_MIDSIZE_LASTOFFSET 17
3643 { xxh_u64 acc =
len * XXH_PRIME64_1;
3644 int const nbRounds = (int)
len / 16;
3646 for (i=0; i<8; i++) {
3647 acc += XXH3_mix16B(input+(16*i), secret+(16*i), seed);
3649 acc = XXH3_avalanche(acc);
3650 XXH_ASSERT(nbRounds >= 8);
3651#if defined(__clang__) \
3652 && (defined(__ARM_NEON) || defined(__ARM_NEON__)) \
3653 && !defined(XXH_ENABLE_AUTOVECTORIZE)
3674 #pragma clang loop vectorize(disable)
3676 for (i=8 ; i < nbRounds; i++) {
3677 acc += XXH3_mix16B(input+(16*i), secret+(16*(i-8)) + XXH3_MIDSIZE_STARTOFFSET, seed);
3681 return XXH3_avalanche(acc);
3688#define XXH_STRIPE_LEN 64
3689#define XXH_SECRET_CONSUME_RATE 8
3690#define XXH_ACC_NB (XXH_STRIPE_LEN / sizeof(xxh_u64))
3693# define STRIPE_LEN XXH_STRIPE_LEN
3694# define ACC_NB XXH_ACC_NB
3697XXH_FORCE_INLINE
void XXH_writeLE64(
void* dst, xxh_u64 v64)
3699 if (!XXH_CPU_LITTLE_ENDIAN) v64 = XXH_swap64(v64);
3700 XXH_memcpy(dst, &v64,
sizeof(v64));
3708#if !defined (__VMS) \
3709 && (defined (__cplusplus) \
3710 || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) ) )
3711 typedef int64_t xxh_i64;
3714 typedef long long xxh_i64;
3740#if (XXH_VECTOR == XXH_AVX512) \
3741 || (defined(XXH_DISPATCH_AVX512) && XXH_DISPATCH_AVX512 != 0)
3743#ifndef XXH_TARGET_AVX512
3744# define XXH_TARGET_AVX512
3747XXH_FORCE_INLINE XXH_TARGET_AVX512
void
3748XXH3_accumulate_512_avx512(
void* XXH_RESTRICT acc,
3749 const void* XXH_RESTRICT input,
3750 const void* XXH_RESTRICT secret)
3752 __m512i*
const xacc = (__m512i *) acc;
3753 XXH_ASSERT((((
size_t)acc) & 63) == 0);
3754 XXH_STATIC_ASSERT(XXH_STRIPE_LEN ==
sizeof(__m512i));
3758 __m512i
const data_vec = _mm512_loadu_si512 (input);
3760 __m512i
const key_vec = _mm512_loadu_si512 (secret);
3762 __m512i
const data_key = _mm512_xor_si512 (data_vec, key_vec);
3764 __m512i
const data_key_lo = _mm512_shuffle_epi32 (data_key, (_MM_PERM_ENUM)_MM_SHUFFLE(0, 3, 0, 1));
3766 __m512i
const product = _mm512_mul_epu32 (data_key, data_key_lo);
3768 __m512i
const data_swap = _mm512_shuffle_epi32(data_vec, (_MM_PERM_ENUM)_MM_SHUFFLE(1, 0, 3, 2));
3769 __m512i
const sum = _mm512_add_epi64(*xacc, data_swap);
3771 *xacc = _mm512_add_epi64(product, sum);
3796XXH_FORCE_INLINE XXH_TARGET_AVX512
void
3797XXH3_scrambleAcc_avx512(
void* XXH_RESTRICT acc,
const void* XXH_RESTRICT secret)
3799 XXH_ASSERT((((
size_t)acc) & 63) == 0);
3800 XXH_STATIC_ASSERT(XXH_STRIPE_LEN ==
sizeof(__m512i));
3801 { __m512i*
const xacc = (__m512i*) acc;
3802 const __m512i prime32 = _mm512_set1_epi32((
int)XXH_PRIME32_1);
3805 __m512i
const acc_vec = *xacc;
3806 __m512i
const shifted = _mm512_srli_epi64 (acc_vec, 47);
3807 __m512i
const data_vec = _mm512_xor_si512 (acc_vec, shifted);
3809 __m512i
const key_vec = _mm512_loadu_si512 (secret);
3810 __m512i
const data_key = _mm512_xor_si512 (data_vec, key_vec);
3813 __m512i
const data_key_hi = _mm512_shuffle_epi32 (data_key, (_MM_PERM_ENUM)_MM_SHUFFLE(0, 3, 0, 1));
3814 __m512i
const prod_lo = _mm512_mul_epu32 (data_key, prime32);
3815 __m512i
const prod_hi = _mm512_mul_epu32 (data_key_hi, prime32);
3816 *xacc = _mm512_add_epi64(prod_lo, _mm512_slli_epi64(prod_hi, 32));
3820XXH_FORCE_INLINE XXH_TARGET_AVX512
void
3821XXH3_initCustomSecret_avx512(
void* XXH_RESTRICT customSecret, xxh_u64 seed64)
3823 XXH_STATIC_ASSERT((XXH_SECRET_DEFAULT_SIZE & 63) == 0);
3824 XXH_STATIC_ASSERT(XXH_SEC_ALIGN == 64);
3825 XXH_ASSERT(((
size_t)customSecret & 63) == 0);
3826 (
void)(&XXH_writeLE64);
3827 {
int const nbRounds = XXH_SECRET_DEFAULT_SIZE /
sizeof(__m512i);
3828 __m512i
const seed = _mm512_mask_set1_epi64(_mm512_set1_epi64((xxh_i64)seed64), 0xAA, (xxh_i64)(0
U - seed64));
3830 const __m512i*
const src = (
const __m512i*) ((
const void*) XXH3_kSecret);
3831 __m512i*
const dest = ( __m512i*) customSecret;
3833 XXH_ASSERT(((
size_t)src & 63) == 0);
3834 XXH_ASSERT(((
size_t)dest & 63) == 0);
3835 for (i=0; i < nbRounds; ++i) {
3841 } remote_const_void;
3842 remote_const_void.cp = src + i;
3843 dest[i] = _mm512_add_epi64(_mm512_stream_load_si512(remote_const_void.p), seed);
3849#if (XXH_VECTOR == XXH_AVX2) \
3850 || (defined(XXH_DISPATCH_AVX2) && XXH_DISPATCH_AVX2 != 0)
3852#ifndef XXH_TARGET_AVX2
3853# define XXH_TARGET_AVX2
3856XXH_FORCE_INLINE XXH_TARGET_AVX2
void
3857XXH3_accumulate_512_avx2(
void* XXH_RESTRICT acc,
3858 const void* XXH_RESTRICT input,
3859 const void* XXH_RESTRICT secret)
3861 XXH_ASSERT((((
size_t)acc) & 31) == 0);
3862 { __m256i*
const xacc = (__m256i *) acc;
3865 const __m256i*
const xinput = (
const __m256i *) input;
3868 const __m256i*
const xsecret = (
const __m256i *) secret;
3871 for (i=0; i < XXH_STRIPE_LEN/
sizeof(__m256i); i++) {
3873 __m256i
const data_vec = _mm256_loadu_si256 (xinput+i);
3875 __m256i
const key_vec = _mm256_loadu_si256 (xsecret+i);
3877 __m256i
const data_key = _mm256_xor_si256 (data_vec, key_vec);
3879 __m256i
const data_key_lo = _mm256_shuffle_epi32 (data_key, _MM_SHUFFLE(0, 3, 0, 1));
3881 __m256i
const product = _mm256_mul_epu32 (data_key, data_key_lo);
3883 __m256i
const data_swap = _mm256_shuffle_epi32(data_vec, _MM_SHUFFLE(1, 0, 3, 2));
3884 __m256i
const sum = _mm256_add_epi64(xacc[i], data_swap);
3886 xacc[i] = _mm256_add_epi64(product, sum);
3890XXH_FORCE_INLINE XXH_TARGET_AVX2
void
3891XXH3_scrambleAcc_avx2(
void* XXH_RESTRICT acc,
const void* XXH_RESTRICT secret)
3893 XXH_ASSERT((((
size_t)acc) & 31) == 0);
3894 { __m256i*
const xacc = (__m256i*) acc;
3897 const __m256i*
const xsecret = (
const __m256i *) secret;
3898 const __m256i prime32 = _mm256_set1_epi32((
int)XXH_PRIME32_1);
3901 for (i=0; i < XXH_STRIPE_LEN/
sizeof(__m256i); i++) {
3903 __m256i
const acc_vec = xacc[i];
3904 __m256i
const shifted = _mm256_srli_epi64 (acc_vec, 47);
3905 __m256i
const data_vec = _mm256_xor_si256 (acc_vec, shifted);
3907 __m256i
const key_vec = _mm256_loadu_si256 (xsecret+i);
3908 __m256i
const data_key = _mm256_xor_si256 (data_vec, key_vec);
3911 __m256i
const data_key_hi = _mm256_shuffle_epi32 (data_key, _MM_SHUFFLE(0, 3, 0, 1));
3912 __m256i
const prod_lo = _mm256_mul_epu32 (data_key, prime32);
3913 __m256i
const prod_hi = _mm256_mul_epu32 (data_key_hi, prime32);
3914 xacc[i] = _mm256_add_epi64(prod_lo, _mm256_slli_epi64(prod_hi, 32));
3919XXH_FORCE_INLINE XXH_TARGET_AVX2
void XXH3_initCustomSecret_avx2(
void* XXH_RESTRICT customSecret, xxh_u64 seed64)
3921 XXH_STATIC_ASSERT((XXH_SECRET_DEFAULT_SIZE & 31) == 0);
3922 XXH_STATIC_ASSERT((XXH_SECRET_DEFAULT_SIZE /
sizeof(__m256i)) == 6);
3923 XXH_STATIC_ASSERT(XXH_SEC_ALIGN <= 64);
3924 (
void)(&XXH_writeLE64);
3925 XXH_PREFETCH(customSecret);
3926 { __m256i
const seed = _mm256_set_epi64x((xxh_i64)(0
U - seed64), (xxh_i64)seed64, (xxh_i64)(0
U - seed64), (xxh_i64)seed64);
3928 const __m256i*
const src = (
const __m256i*) ((
const void*) XXH3_kSecret);
3929 __m256i* dest = ( __m256i*) customSecret;
3931# if defined(__GNUC__) || defined(__clang__)
3937 XXH_COMPILER_GUARD(dest);
3939 XXH_ASSERT(((
size_t)src & 31) == 0);
3940 XXH_ASSERT(((
size_t)dest & 31) == 0);
3943 dest[0] = _mm256_add_epi64(_mm256_stream_load_si256(src+0), seed);
3944 dest[1] = _mm256_add_epi64(_mm256_stream_load_si256(src+1), seed);
3945 dest[2] = _mm256_add_epi64(_mm256_stream_load_si256(src+2), seed);
3946 dest[3] = _mm256_add_epi64(_mm256_stream_load_si256(src+3), seed);
3947 dest[4] = _mm256_add_epi64(_mm256_stream_load_si256(src+4), seed);
3948 dest[5] = _mm256_add_epi64(_mm256_stream_load_si256(src+5), seed);
3955#if (XXH_VECTOR == XXH_SSE2) || defined(XXH_X86DISPATCH)
3957#ifndef XXH_TARGET_SSE2
3958# define XXH_TARGET_SSE2
3961XXH_FORCE_INLINE XXH_TARGET_SSE2
void
3962XXH3_accumulate_512_sse2(
void* XXH_RESTRICT acc,
3963 const void* XXH_RESTRICT input,
3964 const void* XXH_RESTRICT secret)
3967 XXH_ASSERT((((
size_t)acc) & 15) == 0);
3968 { __m128i*
const xacc = (__m128i *) acc;
3971 const __m128i*
const xinput = (
const __m128i *) input;
3974 const __m128i*
const xsecret = (
const __m128i *) secret;
3977 for (i=0; i < XXH_STRIPE_LEN/
sizeof(__m128i); i++) {
3979 __m128i
const data_vec = _mm_loadu_si128 (xinput+i);
3981 __m128i
const key_vec = _mm_loadu_si128 (xsecret+i);
3983 __m128i
const data_key = _mm_xor_si128 (data_vec, key_vec);
3985 __m128i
const data_key_lo = _mm_shuffle_epi32 (data_key, _MM_SHUFFLE(0, 3, 0, 1));
3987 __m128i
const product = _mm_mul_epu32 (data_key, data_key_lo);
3989 __m128i
const data_swap = _mm_shuffle_epi32(data_vec, _MM_SHUFFLE(1,0,3,2));
3990 __m128i
const sum = _mm_add_epi64(xacc[i], data_swap);
3992 xacc[i] = _mm_add_epi64(product, sum);
3996XXH_FORCE_INLINE XXH_TARGET_SSE2
void
3997XXH3_scrambleAcc_sse2(
void* XXH_RESTRICT acc,
const void* XXH_RESTRICT secret)
3999 XXH_ASSERT((((
size_t)acc) & 15) == 0);
4000 { __m128i*
const xacc = (__m128i*) acc;
4003 const __m128i*
const xsecret = (
const __m128i *) secret;
4004 const __m128i prime32 = _mm_set1_epi32((
int)XXH_PRIME32_1);
4007 for (i=0; i < XXH_STRIPE_LEN/
sizeof(__m128i); i++) {
4009 __m128i
const acc_vec = xacc[i];
4010 __m128i
const shifted = _mm_srli_epi64 (acc_vec, 47);
4011 __m128i
const data_vec = _mm_xor_si128 (acc_vec, shifted);
4013 __m128i
const key_vec = _mm_loadu_si128 (xsecret+i);
4014 __m128i
const data_key = _mm_xor_si128 (data_vec, key_vec);
4017 __m128i
const data_key_hi = _mm_shuffle_epi32 (data_key, _MM_SHUFFLE(0, 3, 0, 1));
4018 __m128i
const prod_lo = _mm_mul_epu32 (data_key, prime32);
4019 __m128i
const prod_hi = _mm_mul_epu32 (data_key_hi, prime32);
4020 xacc[i] = _mm_add_epi64(prod_lo, _mm_slli_epi64(prod_hi, 32));
4025XXH_FORCE_INLINE XXH_TARGET_SSE2
void XXH3_initCustomSecret_sse2(
void* XXH_RESTRICT customSecret, xxh_u64 seed64)
4027 XXH_STATIC_ASSERT((XXH_SECRET_DEFAULT_SIZE & 15) == 0);
4028 (
void)(&XXH_writeLE64);
4029 {
int const nbRounds = XXH_SECRET_DEFAULT_SIZE /
sizeof(__m128i);
4031# if defined(_MSC_VER) && defined(_M_IX86) && _MSC_VER < 1900
4033 XXH_ALIGN(16) const xxh_i64 seed64x2[2] = { (xxh_i64)seed64, (xxh_i64)(0
U - seed64) };
4034 __m128i
const seed = _mm_load_si128((__m128i
const*)seed64x2);
4036 __m128i
const seed = _mm_set_epi64x((xxh_i64)(0
U - seed64), (xxh_i64)seed64);
4040 const void*
const src16 = XXH3_kSecret;
4041 __m128i* dst16 = (__m128i*) customSecret;
4042# if defined(__GNUC__) || defined(__clang__)
4048 XXH_COMPILER_GUARD(dst16);
4050 XXH_ASSERT(((
size_t)src16 & 15) == 0);
4051 XXH_ASSERT(((
size_t)dst16 & 15) == 0);
4053 for (i=0; i < nbRounds; ++i) {
4054 dst16[i] = _mm_add_epi64(_mm_load_si128((
const __m128i *)src16+i), seed);
4060#if (XXH_VECTOR == XXH_NEON)
4062XXH_FORCE_INLINE
void
4063XXH3_accumulate_512_neon(
void* XXH_RESTRICT acc,
4064 const void* XXH_RESTRICT input,
4065 const void* XXH_RESTRICT secret)
4067 XXH_ASSERT((((
size_t)acc) & 15) == 0);
4069 uint64x2_t*
const xacc = (uint64x2_t *) acc;
4071 uint8_t
const*
const xinput = (
const uint8_t *) input;
4072 uint8_t
const*
const xsecret = (
const uint8_t *) secret;
4075 for (i=0; i < XXH_STRIPE_LEN /
sizeof(uint64x2_t); i++) {
4077 uint8x16_t data_vec = vld1q_u8(xinput + (i * 16));
4079 uint8x16_t key_vec = vld1q_u8(xsecret + (i * 16));
4080 uint64x2_t data_key;
4081 uint32x2_t data_key_lo, data_key_hi;
4083 uint64x2_t
const data64 = vreinterpretq_u64_u8(data_vec);
4084 uint64x2_t
const swapped = vextq_u64(data64, data64, 1);
4085 xacc[i] = vaddq_u64 (xacc[i], swapped);
4087 data_key = vreinterpretq_u64_u8(veorq_u8(data_vec, key_vec));
4091 XXH_SPLIT_IN_PLACE(data_key, data_key_lo, data_key_hi);
4093 xacc[i] = vmlal_u32 (xacc[i], data_key_lo, data_key_hi);
4099XXH_FORCE_INLINE
void
4100XXH3_scrambleAcc_neon(
void* XXH_RESTRICT acc,
const void* XXH_RESTRICT secret)
4102 XXH_ASSERT((((
size_t)acc) & 15) == 0);
4104 { uint64x2_t* xacc = (uint64x2_t*) acc;
4105 uint8_t
const* xsecret = (uint8_t
const*) secret;
4106 uint32x2_t prime = vdup_n_u32 (XXH_PRIME32_1);
4109 for (i=0; i < XXH_STRIPE_LEN/
sizeof(uint64x2_t); i++) {
4111 uint64x2_t acc_vec = xacc[i];
4112 uint64x2_t shifted = vshrq_n_u64 (acc_vec, 47);
4113 uint64x2_t data_vec = veorq_u64 (acc_vec, shifted);
4116 uint8x16_t key_vec = vld1q_u8 (xsecret + (i * 16));
4117 uint64x2_t data_key = veorq_u64 (data_vec, vreinterpretq_u64_u8(key_vec));
4120 uint32x2_t data_key_lo, data_key_hi;
4124 XXH_SPLIT_IN_PLACE(data_key, data_key_lo, data_key_hi);
4143 uint64x2_t prod_hi = vmull_u32 (data_key_hi, prime);
4145 xacc[i] = vshlq_n_u64(prod_hi, 32);
4147 xacc[i] = vmlal_u32(xacc[i], data_key_lo, prime);
4154#if (XXH_VECTOR == XXH_VSX)
4156XXH_FORCE_INLINE
void
4157XXH3_accumulate_512_vsx(
void* XXH_RESTRICT acc,
4158 const void* XXH_RESTRICT input,
4159 const void* XXH_RESTRICT secret)
4162 unsigned int*
const xacc = (
unsigned int*) acc;
4163 xxh_u64x2
const*
const xinput = (xxh_u64x2
const*) input;
4164 xxh_u64x2
const*
const xsecret = (xxh_u64x2
const*) secret;
4165 xxh_u64x2
const v32 = { 32, 32 };
4167 for (i = 0; i < XXH_STRIPE_LEN /
sizeof(xxh_u64x2); i++) {
4169 xxh_u64x2
const data_vec = XXH_vec_loadu(xinput + i);
4171 xxh_u64x2
const key_vec = XXH_vec_loadu(xsecret + i);
4172 xxh_u64x2
const data_key = data_vec ^ key_vec;
4174 xxh_u32x4
const shuffled = (xxh_u32x4)vec_rl(data_key, v32);
4176 xxh_u64x2
const product = XXH_vec_mulo((xxh_u32x4)data_key, shuffled);
4178 xxh_u64x2 acc_vec = (xxh_u64x2)vec_xl(0, xacc + 4 * i);
4183 acc_vec += vec_permi(data_vec, data_vec, 2);
4185 acc_vec += vec_xxpermdi(data_vec, data_vec, 2);
4188 vec_xst((xxh_u32x4)acc_vec, 0, xacc + 4 * i);
4192XXH_FORCE_INLINE
void
4193XXH3_scrambleAcc_vsx(
void* XXH_RESTRICT acc,
const void* XXH_RESTRICT secret)
4195 XXH_ASSERT((((
size_t)acc) & 15) == 0);
4197 { xxh_u64x2*
const xacc = (xxh_u64x2*) acc;
4198 const xxh_u64x2*
const xsecret = (
const xxh_u64x2*) secret;
4200 xxh_u64x2
const v32 = { 32, 32 };
4201 xxh_u64x2
const v47 = { 47, 47 };
4202 xxh_u32x4
const prime = { XXH_PRIME32_1, XXH_PRIME32_1, XXH_PRIME32_1, XXH_PRIME32_1 };
4204 for (i = 0; i < XXH_STRIPE_LEN /
sizeof(xxh_u64x2); i++) {
4206 xxh_u64x2
const acc_vec = xacc[i];
4207 xxh_u64x2
const data_vec = acc_vec ^ (acc_vec >> v47);
4210 xxh_u64x2
const key_vec = XXH_vec_loadu(xsecret + i);
4211 xxh_u64x2
const data_key = data_vec ^ key_vec;
4215 xxh_u64x2
const prod_even = XXH_vec_mule((xxh_u32x4)data_key, prime);
4217 xxh_u64x2
const prod_odd = XXH_vec_mulo((xxh_u32x4)data_key, prime);
4218 xacc[i] = prod_odd + (prod_even << v32);
4226XXH_FORCE_INLINE
void
4227XXH3_accumulate_512_scalar(
void* XXH_RESTRICT acc,
4228 const void* XXH_RESTRICT input,
4229 const void* XXH_RESTRICT secret)
4231 xxh_u64*
const xacc = (xxh_u64*) acc;
4232 const xxh_u8*
const xinput = (
const xxh_u8*) input;
4233 const xxh_u8*
const xsecret = (
const xxh_u8*) secret;
4235 XXH_ASSERT(((
size_t)acc & (XXH_ACC_ALIGN-1)) == 0);
4236 for (i=0; i < XXH_ACC_NB; i++) {
4237 xxh_u64
const data_val = XXH_readLE64(xinput + 8*i);
4238 xxh_u64
const data_key = data_val ^ XXH_readLE64(xsecret + i*8);
4239 xacc[i ^ 1] += data_val;
4240 xacc[i] += XXH_mult32to64(data_key & 0xFFFFFFFF, data_key >> 32);
4244XXH_FORCE_INLINE
void
4245XXH3_scrambleAcc_scalar(
void* XXH_RESTRICT acc,
const void* XXH_RESTRICT secret)
4247 xxh_u64*
const xacc = (xxh_u64*) acc;
4248 const xxh_u8*
const xsecret = (
const xxh_u8*) secret;
4250 XXH_ASSERT((((
size_t)acc) & (XXH_ACC_ALIGN-1)) == 0);
4251 for (i=0; i < XXH_ACC_NB; i++) {
4252 xxh_u64
const key64 = XXH_readLE64(xsecret + 8*i);
4253 xxh_u64 acc64 = xacc[i];
4254 acc64 = XXH_xorshift64(acc64, 47);
4256 acc64 *= XXH_PRIME32_1;
4261XXH_FORCE_INLINE
void
4262XXH3_initCustomSecret_scalar(
void* XXH_RESTRICT customSecret, xxh_u64 seed64)
4269 const xxh_u8* kSecretPtr = XXH3_kSecret;
4270 XXH_STATIC_ASSERT((XXH_SECRET_DEFAULT_SIZE & 15) == 0);
4272#if defined(__clang__) && defined(__aarch64__)
4301 XXH_COMPILER_GUARD(kSecretPtr);
4307 XXH_ASSERT(kSecretPtr == XXH3_kSecret);
4309 {
int const nbRounds = XXH_SECRET_DEFAULT_SIZE / 16;
4311 for (i=0; i < nbRounds; i++) {
4318 xxh_u64 lo = XXH_readLE64(kSecretPtr + 16*i) + seed64;
4319 xxh_u64 hi = XXH_readLE64(kSecretPtr + 16*i + 8) - seed64;
4320 XXH_writeLE64((xxh_u8*)customSecret + 16*i, lo);
4321 XXH_writeLE64((xxh_u8*)customSecret + 16*i + 8, hi);
4326typedef void (*XXH3_f_accumulate_512)(
void* XXH_RESTRICT,
const void*,
const void*);
4327typedef void (*XXH3_f_scrambleAcc)(
void* XXH_RESTRICT,
const void*);
4328typedef void (*XXH3_f_initCustomSecret)(
void* XXH_RESTRICT, xxh_u64);
4331#if (XXH_VECTOR == XXH_AVX512)
4333#define XXH3_accumulate_512 XXH3_accumulate_512_avx512
4334#define XXH3_scrambleAcc XXH3_scrambleAcc_avx512
4335#define XXH3_initCustomSecret XXH3_initCustomSecret_avx512
4337#elif (XXH_VECTOR == XXH_AVX2)
4339#define XXH3_accumulate_512 XXH3_accumulate_512_avx2
4340#define XXH3_scrambleAcc XXH3_scrambleAcc_avx2
4341#define XXH3_initCustomSecret XXH3_initCustomSecret_avx2
4343#elif (XXH_VECTOR == XXH_SSE2)
4345#define XXH3_accumulate_512 XXH3_accumulate_512_sse2
4346#define XXH3_scrambleAcc XXH3_scrambleAcc_sse2
4347#define XXH3_initCustomSecret XXH3_initCustomSecret_sse2
4349#elif (XXH_VECTOR == XXH_NEON)
4351#define XXH3_accumulate_512 XXH3_accumulate_512_neon
4352#define XXH3_scrambleAcc XXH3_scrambleAcc_neon
4353#define XXH3_initCustomSecret XXH3_initCustomSecret_scalar
4355#elif (XXH_VECTOR == XXH_VSX)
4357#define XXH3_accumulate_512 XXH3_accumulate_512_vsx
4358#define XXH3_scrambleAcc XXH3_scrambleAcc_vsx
4359#define XXH3_initCustomSecret XXH3_initCustomSecret_scalar
4363#define XXH3_accumulate_512 XXH3_accumulate_512_scalar
4364#define XXH3_scrambleAcc XXH3_scrambleAcc_scalar
4365#define XXH3_initCustomSecret XXH3_initCustomSecret_scalar
4371#ifndef XXH_PREFETCH_DIST
4373# define XXH_PREFETCH_DIST 320
4375# if (XXH_VECTOR == XXH_AVX512)
4376# define XXH_PREFETCH_DIST 512
4378# define XXH_PREFETCH_DIST 384
4388XXH_FORCE_INLINE
void
4389XXH3_accumulate( xxh_u64* XXH_RESTRICT acc,
4390 const xxh_u8* XXH_RESTRICT input,
4391 const xxh_u8* XXH_RESTRICT secret,
4393 XXH3_f_accumulate_512 f_acc512)
4396 for (
n = 0;
n < nbStripes;
n++ ) {
4397 const xxh_u8*
const in = input +
n*XXH_STRIPE_LEN;
4398 XXH_PREFETCH(in + XXH_PREFETCH_DIST);
4401 secret +
n*XXH_SECRET_CONSUME_RATE);
4405XXH_FORCE_INLINE
void
4406XXH3_hashLong_internal_loop(xxh_u64* XXH_RESTRICT acc,
4407 const xxh_u8* XXH_RESTRICT input,
size_t len,
4408 const xxh_u8* XXH_RESTRICT secret,
size_t secretSize,
4409 XXH3_f_accumulate_512 f_acc512,
4410 XXH3_f_scrambleAcc f_scramble)
4412 size_t const nbStripesPerBlock = (secretSize - XXH_STRIPE_LEN) / XXH_SECRET_CONSUME_RATE;
4413 size_t const block_len = XXH_STRIPE_LEN * nbStripesPerBlock;
4414 size_t const nb_blocks = (
len - 1) / block_len;
4420 for (
n = 0;
n < nb_blocks;
n++) {
4421 XXH3_accumulate(acc, input +
n*block_len, secret, nbStripesPerBlock, f_acc512);
4422 f_scramble(acc, secret + secretSize - XXH_STRIPE_LEN);
4426 XXH_ASSERT(
len > XXH_STRIPE_LEN);
4427 {
size_t const nbStripes = ((
len - 1) - (block_len * nb_blocks)) / XXH_STRIPE_LEN;
4428 XXH_ASSERT(nbStripes <= (secretSize / XXH_SECRET_CONSUME_RATE));
4429 XXH3_accumulate(acc, input + nb_blocks*block_len, secret, nbStripes, f_acc512);
4432 {
const xxh_u8*
const p = input +
len - XXH_STRIPE_LEN;
4433#define XXH_SECRET_LASTACC_START 7
4434 f_acc512(acc,
p, secret + secretSize - XXH_STRIPE_LEN - XXH_SECRET_LASTACC_START);
4438XXH_FORCE_INLINE xxh_u64
4439XXH3_mix2Accs(
const xxh_u64* XXH_RESTRICT acc,
const xxh_u8* XXH_RESTRICT secret)
4441 return XXH3_mul128_fold64(
4442 acc[0] ^ XXH_readLE64(secret),
4443 acc[1] ^ XXH_readLE64(secret+8) );
4447XXH3_mergeAccs(
const xxh_u64* XXH_RESTRICT acc,
const xxh_u8* XXH_RESTRICT secret, xxh_u64
start)
4449 xxh_u64 result64 =
start;
4452 for (i = 0; i < 4; i++) {
4453 result64 += XXH3_mix2Accs(acc+2*i, secret + 16*i);
4454#if defined(__clang__) \
4455 && (defined(__arm__) || defined(__thumb__)) \
4456 && (defined(__ARM_NEON) || defined(__ARM_NEON__)) \
4457 && !defined(XXH_ENABLE_AUTOVECTORIZE)
4466 XXH_COMPILER_GUARD(result64);
4470 return XXH3_avalanche(result64);
4473#define XXH3_INIT_ACC { XXH_PRIME32_3, XXH_PRIME64_1, XXH_PRIME64_2, XXH_PRIME64_3, \
4474 XXH_PRIME64_4, XXH_PRIME32_2, XXH_PRIME64_5, XXH_PRIME32_1 }
4477XXH3_hashLong_64b_internal(
const void* XXH_RESTRICT input,
size_t len,
4478 const void* XXH_RESTRICT secret,
size_t secretSize,
4479 XXH3_f_accumulate_512 f_acc512,
4480 XXH3_f_scrambleAcc f_scramble)
4482 XXH_ALIGN(XXH_ACC_ALIGN) xxh_u64 acc[XXH_ACC_NB] = XXH3_INIT_ACC;
4484 XXH3_hashLong_internal_loop(acc, (
const xxh_u8*)input,
len, (
const xxh_u8*)secret, secretSize, f_acc512, f_scramble);
4487 XXH_STATIC_ASSERT(
sizeof(acc) == 64);
4489#define XXH_SECRET_MERGEACCS_START 11
4490 XXH_ASSERT(secretSize >=
sizeof(acc) + XXH_SECRET_MERGEACCS_START);
4491 return XXH3_mergeAccs(acc, (
const xxh_u8*)secret + XXH_SECRET_MERGEACCS_START, (xxh_u64)
len * XXH_PRIME64_1);
4500XXH3_hashLong_64b_withSecret(
const void* XXH_RESTRICT input,
size_t len,
4501 XXH64_hash_t seed64,
const xxh_u8* XXH_RESTRICT secret,
size_t secretLen)
4504 return XXH3_hashLong_64b_internal(input,
len, secret, secretLen, XXH3_accumulate_512, XXH3_scrambleAcc);
4514XXH3_hashLong_64b_default(
const void* XXH_RESTRICT input,
size_t len,
4515 XXH64_hash_t seed64,
const xxh_u8* XXH_RESTRICT secret,
size_t secretLen)
4518 return XXH3_hashLong_64b_internal(input,
len, XXH3_kSecret,
sizeof(XXH3_kSecret), XXH3_accumulate_512, XXH3_scrambleAcc);
4533XXH3_hashLong_64b_withSeed_internal(
const void* input,
size_t len,
4535 XXH3_f_accumulate_512 f_acc512,
4536 XXH3_f_scrambleAcc f_scramble,
4537 XXH3_f_initCustomSecret f_initSec)
4540 return XXH3_hashLong_64b_internal(input,
len,
4541 XXH3_kSecret,
sizeof(XXH3_kSecret),
4542 f_acc512, f_scramble);
4543 { XXH_ALIGN(XXH_SEC_ALIGN) xxh_u8 secret[XXH_SECRET_DEFAULT_SIZE];
4544 f_initSec(secret, seed);
4545 return XXH3_hashLong_64b_internal(input,
len, secret,
sizeof(secret),
4546 f_acc512, f_scramble);
4554XXH3_hashLong_64b_withSeed(
const void* input,
size_t len,
4555 XXH64_hash_t seed,
const xxh_u8* secret,
size_t secretLen)
4558 return XXH3_hashLong_64b_withSeed_internal(input,
len, seed,
4559 XXH3_accumulate_512, XXH3_scrambleAcc, XXH3_initCustomSecret);
4563typedef XXH64_hash_t (*XXH3_hashLong64_f)(
const void* XXH_RESTRICT, size_t,
4567XXH3_64bits_internal(
const void* XXH_RESTRICT input,
size_t len,
4568 XXH64_hash_t seed64,
const void* XXH_RESTRICT secret,
size_t secretLen,
4569 XXH3_hashLong64_f f_hashLong)
4580 return XXH3_len_0to16_64b((
const xxh_u8*)input,
len, (
const xxh_u8*)secret, seed64);
4582 return XXH3_len_17to128_64b((
const xxh_u8*)input,
len, (
const xxh_u8*)secret, secretLen, seed64);
4583 if (
len <= XXH3_MIDSIZE_MAX)
4584 return XXH3_len_129to240_64b((
const xxh_u8*)input,
len, (
const xxh_u8*)secret, secretLen, seed64);
4585 return f_hashLong(input,
len, seed64, (
const xxh_u8*)secret, secretLen);
4594 return XXH3_64bits_internal(input,
len, 0, XXH3_kSecret,
sizeof(XXH3_kSecret), XXH3_hashLong_64b_default);
4601 return XXH3_64bits_internal(input,
len, 0, secret, secretSize, XXH3_hashLong_64b_withSecret);
4608 return XXH3_64bits_internal(input,
len, seed, XXH3_kSecret,
sizeof(XXH3_kSecret), XXH3_hashLong_64b_withSeed);
4612XXH3_64bits_withSecretandSeed(
const void* input,
size_t len,
const void* secret,
size_t secretSize,
XXH64_hash_t seed)
4614 if (
len <= XXH3_MIDSIZE_MAX)
4615 return XXH3_64bits_internal(input,
len, seed, XXH3_kSecret,
sizeof(XXH3_kSecret),
NULL);
4616 return XXH3_hashLong_64b_withSecret(input,
len, seed, (
const xxh_u8*)secret, secretSize);
4645static void* XXH_alignedMalloc(
size_t s,
size_t align)
4649 XXH_ASSERT(
s != 0 &&
s < (
s +
align));
4651 xxh_u8* base = (xxh_u8*)XXH_malloc(
s +
align);
4663 XXH_ASSERT((
size_t)
ptr %
align == 0);
4676static void XXH_alignedFree(
void*
p)
4679 xxh_u8*
ptr = (xxh_u8*)
p;
4692 XXH3_INITSTATE(
state);
4699 XXH_alignedFree(statePtr);
4707 XXH_memcpy(dst_state, src_state,
sizeof(*dst_state));
4713 const void* secret,
size_t secretSize)
4718 XXH_ASSERT(statePtr !=
NULL);
4720 memset((
char*)statePtr + initStart, 0, initLength);
4721 statePtr->acc[0] = XXH_PRIME32_3;
4722 statePtr->acc[1] = XXH_PRIME64_1;
4723 statePtr->acc[2] = XXH_PRIME64_2;
4724 statePtr->acc[3] = XXH_PRIME64_3;
4725 statePtr->acc[4] = XXH_PRIME64_4;
4726 statePtr->acc[5] = XXH_PRIME32_2;
4727 statePtr->acc[6] = XXH_PRIME64_5;
4728 statePtr->acc[7] = XXH_PRIME32_1;
4729 statePtr->seed = seed;
4730 statePtr->useSeed = (seed != 0);
4731 statePtr->extSecret = (
const unsigned char*)secret;
4733 statePtr->secretLimit = secretSize - XXH_STRIPE_LEN;
4734 statePtr->nbStripesPerBlock = statePtr->secretLimit / XXH_SECRET_CONSUME_RATE;
4742 XXH3_reset_internal(statePtr, 0, XXH3_kSecret, XXH_SECRET_DEFAULT_SIZE);
4751 XXH3_reset_internal(statePtr, 0, secret, secretSize);
4763 if ((seed != statePtr->seed) || (statePtr->extSecret !=
NULL))
4764 XXH3_initCustomSecret(statePtr->customSecret, seed);
4765 XXH3_reset_internal(statePtr, seed,
NULL, XXH_SECRET_DEFAULT_SIZE);
4771XXH3_64bits_reset_withSecretandSeed(
XXH3_state_t* statePtr,
const void* secret,
size_t secretSize,
XXH64_hash_t seed64)
4776 XXH3_reset_internal(statePtr, seed64, secret, secretSize);
4777 statePtr->useSeed = 1;
4784XXH_FORCE_INLINE
void
4785XXH3_consumeStripes(xxh_u64* XXH_RESTRICT acc,
4786 size_t* XXH_RESTRICT nbStripesSoFarPtr,
size_t nbStripesPerBlock,
4787 const xxh_u8* XXH_RESTRICT input,
size_t nbStripes,
4788 const xxh_u8* XXH_RESTRICT secret,
size_t secretLimit,
4789 XXH3_f_accumulate_512 f_acc512,
4790 XXH3_f_scrambleAcc f_scramble)
4792 XXH_ASSERT(nbStripes <= nbStripesPerBlock);
4793 XXH_ASSERT(*nbStripesSoFarPtr < nbStripesPerBlock);
4794 if (nbStripesPerBlock - *nbStripesSoFarPtr <= nbStripes) {
4796 size_t const nbStripesToEndofBlock = nbStripesPerBlock - *nbStripesSoFarPtr;
4797 size_t const nbStripesAfterBlock = nbStripes - nbStripesToEndofBlock;
4798 XXH3_accumulate(acc, input, secret + nbStripesSoFarPtr[0] * XXH_SECRET_CONSUME_RATE, nbStripesToEndofBlock, f_acc512);
4799 f_scramble(acc, secret + secretLimit);
4800 XXH3_accumulate(acc, input + nbStripesToEndofBlock * XXH_STRIPE_LEN, secret, nbStripesAfterBlock, f_acc512);
4801 *nbStripesSoFarPtr = nbStripesAfterBlock;
4803 XXH3_accumulate(acc, input, secret + nbStripesSoFarPtr[0] * XXH_SECRET_CONSUME_RATE, nbStripes, f_acc512);
4804 *nbStripesSoFarPtr += nbStripes;
4808#ifndef XXH3_STREAM_USE_STACK
4810# define XXH3_STREAM_USE_STACK 1
4818 const xxh_u8* XXH_RESTRICT input,
size_t len,
4819 XXH3_f_accumulate_512 f_acc512,
4820 XXH3_f_scrambleAcc f_scramble)
4823 XXH_ASSERT(
len == 0);
4828 {
const xxh_u8*
const bEnd = input +
len;
4829 const unsigned char*
const secret = (
state->extSecret ==
NULL) ?
state->customSecret :
state->extSecret;
4830#if defined(XXH3_STREAM_USE_STACK) && XXH3_STREAM_USE_STACK >= 1
4835 XXH_ALIGN(XXH_ACC_ALIGN) xxh_u64 acc[8];
memcpy(acc,
state->acc,
sizeof(acc));
4837 xxh_u64* XXH_RESTRICT
const acc =
state->acc;
4840 XXH_ASSERT(
state->bufferedSize <= XXH3_INTERNALBUFFER_SIZE);
4843 if (
state->bufferedSize +
len <= XXH3_INTERNALBUFFER_SIZE) {
4844 XXH_memcpy(
state->buffer +
state->bufferedSize, input,
len);
4845 state->bufferedSize += (XXH32_hash_t)
len;
4850 #define XXH3_INTERNALBUFFER_STRIPES (XXH3_INTERNALBUFFER_SIZE / XXH_STRIPE_LEN)
4851 XXH_STATIC_ASSERT(XXH3_INTERNALBUFFER_SIZE % XXH_STRIPE_LEN == 0);
4857 if (
state->bufferedSize) {
4858 size_t const loadSize = XXH3_INTERNALBUFFER_SIZE -
state->bufferedSize;
4859 XXH_memcpy(
state->buffer +
state->bufferedSize, input, loadSize);
4861 XXH3_consumeStripes(acc,
4862 &
state->nbStripesSoFar,
state->nbStripesPerBlock,
4863 state->buffer, XXH3_INTERNALBUFFER_STRIPES,
4864 secret,
state->secretLimit,
4865 f_acc512, f_scramble);
4866 state->bufferedSize = 0;
4868 XXH_ASSERT(input < bEnd);
4871 if ((
size_t)(bEnd - input) >
state->nbStripesPerBlock * XXH_STRIPE_LEN) {
4872 size_t nbStripes = (size_t)(bEnd - 1 - input) / XXH_STRIPE_LEN;
4873 XXH_ASSERT(
state->nbStripesPerBlock >=
state->nbStripesSoFar);
4875 {
size_t const nbStripesToEnd =
state->nbStripesPerBlock -
state->nbStripesSoFar;
4876 XXH_ASSERT(nbStripes <= nbStripes);
4877 XXH3_accumulate(acc, input, secret +
state->nbStripesSoFar * XXH_SECRET_CONSUME_RATE, nbStripesToEnd, f_acc512);
4878 f_scramble(acc, secret +
state->secretLimit);
4879 state->nbStripesSoFar = 0;
4880 input += nbStripesToEnd * XXH_STRIPE_LEN;
4881 nbStripes -= nbStripesToEnd;
4884 while(nbStripes >=
state->nbStripesPerBlock) {
4885 XXH3_accumulate(acc, input, secret,
state->nbStripesPerBlock, f_acc512);
4886 f_scramble(acc, secret +
state->secretLimit);
4887 input +=
state->nbStripesPerBlock * XXH_STRIPE_LEN;
4888 nbStripes -=
state->nbStripesPerBlock;
4891 XXH3_accumulate(acc, input, secret, nbStripes, f_acc512);
4892 input += nbStripes * XXH_STRIPE_LEN;
4893 XXH_ASSERT(input < bEnd);
4894 state->nbStripesSoFar = nbStripes;
4896 XXH_memcpy(
state->buffer +
sizeof(
state->buffer) - XXH_STRIPE_LEN, input - XXH_STRIPE_LEN, XXH_STRIPE_LEN);
4897 XXH_ASSERT(bEnd - input <= XXH_STRIPE_LEN);
4901 if (bEnd - input > XXH3_INTERNALBUFFER_SIZE) {
4902 const xxh_u8*
const limit = bEnd - XXH3_INTERNALBUFFER_SIZE;
4904 XXH3_consumeStripes(acc,
4905 &
state->nbStripesSoFar,
state->nbStripesPerBlock,
4906 input, XXH3_INTERNALBUFFER_STRIPES,
4907 secret,
state->secretLimit,
4908 f_acc512, f_scramble);
4909 input += XXH3_INTERNALBUFFER_SIZE;
4910 }
while (input<limit);
4912 XXH_memcpy(
state->buffer +
sizeof(
state->buffer) - XXH_STRIPE_LEN, input - XXH_STRIPE_LEN, XXH_STRIPE_LEN);
4917 XXH_ASSERT(input < bEnd);
4918 XXH_ASSERT(bEnd - input <= XXH3_INTERNALBUFFER_SIZE);
4919 XXH_ASSERT(
state->bufferedSize == 0);
4920 XXH_memcpy(
state->buffer, input, (
size_t)(bEnd-input));
4921 state->bufferedSize = (XXH32_hash_t)(bEnd-input);
4922#if defined(XXH3_STREAM_USE_STACK) && XXH3_STREAM_USE_STACK >= 1
4935 return XXH3_update(
state, (
const xxh_u8*)input,
len,
4936 XXH3_accumulate_512, XXH3_scrambleAcc);
4940XXH_FORCE_INLINE
void
4943 const unsigned char* secret)
4949 XXH_memcpy(acc,
state->acc,
sizeof(
state->acc));
4950 if (
state->bufferedSize >= XXH_STRIPE_LEN) {
4951 size_t const nbStripes = (
state->bufferedSize - 1) / XXH_STRIPE_LEN;
4952 size_t nbStripesSoFar =
state->nbStripesSoFar;
4953 XXH3_consumeStripes(acc,
4954 &nbStripesSoFar,
state->nbStripesPerBlock,
4955 state->buffer, nbStripes,
4956 secret,
state->secretLimit,
4957 XXH3_accumulate_512, XXH3_scrambleAcc);
4959 XXH3_accumulate_512(acc,
4960 state->buffer +
state->bufferedSize - XXH_STRIPE_LEN,
4961 secret +
state->secretLimit - XXH_SECRET_LASTACC_START);
4963 xxh_u8 lastStripe[XXH_STRIPE_LEN];
4964 size_t const catchupSize = XXH_STRIPE_LEN -
state->bufferedSize;
4965 XXH_ASSERT(
state->bufferedSize > 0);
4966 XXH_memcpy(lastStripe,
state->buffer +
sizeof(
state->buffer) - catchupSize, catchupSize);
4967 XXH_memcpy(lastStripe + catchupSize,
state->buffer,
state->bufferedSize);
4968 XXH3_accumulate_512(acc,
4970 secret +
state->secretLimit - XXH_SECRET_LASTACC_START);
4977 const unsigned char*
const secret = (
state->extSecret ==
NULL) ?
state->customSecret :
state->extSecret;
4978 if (
state->totalLen > XXH3_MIDSIZE_MAX) {
4980 XXH3_digest_long(acc,
state, secret);
4981 return XXH3_mergeAccs(acc,
4982 secret + XXH_SECRET_MERGEACCS_START,
4983 (xxh_u64)
state->totalLen * XXH_PRIME64_1);
4989 secret,
state->secretLimit + XXH_STRIPE_LEN);
5012XXH3_len_1to3_128b(
const xxh_u8* input,
size_t len,
const xxh_u8* secret,
XXH64_hash_t seed)
5015 XXH_ASSERT(input !=
NULL);
5016 XXH_ASSERT(1 <=
len &&
len <= 3);
5017 XXH_ASSERT(secret !=
NULL);
5023 { xxh_u8
const c1 = input[0];
5024 xxh_u8
const c2 = input[
len >> 1];
5025 xxh_u8
const c3 = input[
len - 1];
5026 xxh_u32
const combinedl = ((xxh_u32)c1 <<16) | ((xxh_u32)c2 << 24)
5027 | ((xxh_u32)c3 << 0) | ((xxh_u32)
len << 8);
5028 xxh_u32
const combinedh = XXH_rotl32(XXH_swap32(combinedl), 13);
5029 xxh_u64
const bitflipl = (XXH_readLE32(secret) ^ XXH_readLE32(secret+4)) + seed;
5030 xxh_u64
const bitfliph = (XXH_readLE32(secret+8) ^ XXH_readLE32(secret+12)) - seed;
5031 xxh_u64
const keyed_lo = (xxh_u64)combinedl ^ bitflipl;
5032 xxh_u64
const keyed_hi = (xxh_u64)combinedh ^ bitfliph;
5034 h128.
low64 = XXH64_avalanche(keyed_lo);
5035 h128.
high64 = XXH64_avalanche(keyed_hi);
5041XXH3_len_4to8_128b(
const xxh_u8* input,
size_t len,
const xxh_u8* secret,
XXH64_hash_t seed)
5043 XXH_ASSERT(input !=
NULL);
5044 XXH_ASSERT(secret !=
NULL);
5045 XXH_ASSERT(4 <=
len &&
len <= 8);
5046 seed ^= (xxh_u64)XXH_swap32((xxh_u32)seed) << 32;
5047 { xxh_u32
const input_lo = XXH_readLE32(input);
5048 xxh_u32
const input_hi = XXH_readLE32(input +
len - 4);
5049 xxh_u64
const input_64 = input_lo + ((xxh_u64)input_hi << 32);
5050 xxh_u64
const bitflip = (XXH_readLE64(secret+16) ^ XXH_readLE64(secret+24)) + seed;
5051 xxh_u64
const keyed = input_64 ^ bitflip;
5060 m128.
low64 *= 0x9FB21C651E98DF25ULL;
5068XXH3_len_9to16_128b(
const xxh_u8* input,
size_t len,
const xxh_u8* secret,
XXH64_hash_t seed)
5070 XXH_ASSERT(input !=
NULL);
5071 XXH_ASSERT(secret !=
NULL);
5072 XXH_ASSERT(9 <=
len &&
len <= 16);
5073 { xxh_u64
const bitflipl = (XXH_readLE64(secret+32) ^ XXH_readLE64(secret+40)) - seed;
5074 xxh_u64
const bitfliph = (XXH_readLE64(secret+48) ^ XXH_readLE64(secret+56)) + seed;
5075 xxh_u64
const input_lo = XXH_readLE64(input);
5076 xxh_u64 input_hi = XXH_readLE64(input +
len - 8);
5077 XXH128_hash_t m128 = XXH_mult64to128(input_lo ^ input_hi ^ bitflipl, XXH_PRIME64_1);
5082 m128.
low64 += (xxh_u64)(
len - 1) << 54;
5083 input_hi ^= bitfliph;
5091 if (
sizeof(
void *) <
sizeof(xxh_u64)) {
5098 m128.
high64 += (input_hi & 0xFFFFFFFF00000000ULL) + XXH_mult32to64((xxh_u32)input_hi, XXH_PRIME32_2);
5124 m128.
high64 += input_hi + XXH_mult32to64((xxh_u32)input_hi, XXH_PRIME32_2 - 1);
5143XXH3_len_0to16_128b(
const xxh_u8* input,
size_t len,
const xxh_u8* secret,
XXH64_hash_t seed)
5145 XXH_ASSERT(
len <= 16);
5146 {
if (
len > 8)
return XXH3_len_9to16_128b(input,
len, secret, seed);
5147 if (
len >= 4)
return XXH3_len_4to8_128b(input,
len, secret, seed);
5148 if (
len)
return XXH3_len_1to3_128b(input,
len, secret, seed);
5150 xxh_u64
const bitflipl = XXH_readLE64(secret+64) ^ XXH_readLE64(secret+72);
5151 xxh_u64
const bitfliph = XXH_readLE64(secret+80) ^ XXH_readLE64(secret+88);
5152 h128.
low64 = XXH64_avalanche(seed ^ bitflipl);
5153 h128.
high64 = XXH64_avalanche( seed ^ bitfliph);
5162XXH128_mix32B(
XXH128_hash_t acc,
const xxh_u8* input_1,
const xxh_u8* input_2,
5165 acc.
low64 += XXH3_mix16B (input_1, secret+0, seed);
5166 acc.
low64 ^= XXH_readLE64(input_2) + XXH_readLE64(input_2 + 8);
5167 acc.
high64 += XXH3_mix16B (input_2, secret+16, seed);
5168 acc.
high64 ^= XXH_readLE64(input_1) + XXH_readLE64(input_1 + 8);
5174XXH3_len_17to128_128b(
const xxh_u8* XXH_RESTRICT input,
size_t len,
5175 const xxh_u8* XXH_RESTRICT secret,
size_t secretSize,
5179 XXH_ASSERT(16 <
len &&
len <= 128);
5187 acc = XXH128_mix32B(acc, input+48, input+
len-64, secret+96, seed);
5189 acc = XXH128_mix32B(acc, input+32, input+
len-48, secret+64, seed);
5191 acc = XXH128_mix32B(acc, input+16, input+
len-32, secret+32, seed);
5193 acc = XXH128_mix32B(acc, input, input+
len-16, secret, seed);
5197 + (acc.
high64 * XXH_PRIME64_4)
5198 + ((
len - seed) * XXH_PRIME64_2);
5207XXH3_len_129to240_128b(
const xxh_u8* XXH_RESTRICT input,
size_t len,
5208 const xxh_u8* XXH_RESTRICT secret,
size_t secretSize,
5212 XXH_ASSERT(128 <
len &&
len <= XXH3_MIDSIZE_MAX);
5215 int const nbRounds = (int)
len / 32;
5219 for (i=0; i<4; i++) {
5220 acc = XXH128_mix32B(acc,
5222 input + (32 * i) + 16,
5228 XXH_ASSERT(nbRounds >= 4);
5229 for (i=4 ; i < nbRounds; i++) {
5230 acc = XXH128_mix32B(acc,
5232 input + (32 * i) + 16,
5233 secret + XXH3_MIDSIZE_STARTOFFSET + (32 * (i - 4)),
5237 acc = XXH128_mix32B(acc,
5246 + (acc.
high64 * XXH_PRIME64_4)
5247 + ((
len - seed) * XXH_PRIME64_2);
5256XXH3_hashLong_128b_internal(
const void* XXH_RESTRICT input,
size_t len,
5257 const xxh_u8* XXH_RESTRICT secret,
size_t secretSize,
5258 XXH3_f_accumulate_512 f_acc512,
5259 XXH3_f_scrambleAcc f_scramble)
5261 XXH_ALIGN(XXH_ACC_ALIGN) xxh_u64 acc[XXH_ACC_NB] = XXH3_INIT_ACC;
5263 XXH3_hashLong_internal_loop(acc, (
const xxh_u8*)input,
len, secret, secretSize, f_acc512, f_scramble);
5266 XXH_STATIC_ASSERT(
sizeof(acc) == 64);
5267 XXH_ASSERT(secretSize >=
sizeof(acc) + XXH_SECRET_MERGEACCS_START);
5269 h128.
low64 = XXH3_mergeAccs(acc,
5270 secret + XXH_SECRET_MERGEACCS_START,
5271 (xxh_u64)
len * XXH_PRIME64_1);
5272 h128.
high64 = XXH3_mergeAccs(acc,
5274 -
sizeof(acc) - XXH_SECRET_MERGEACCS_START,
5275 ~((xxh_u64)
len * XXH_PRIME64_2));
5284XXH3_hashLong_128b_default(
const void* XXH_RESTRICT input,
size_t len,
5286 const void* XXH_RESTRICT secret,
size_t secretLen)
5289 return XXH3_hashLong_128b_internal(input,
len, XXH3_kSecret,
sizeof(XXH3_kSecret),
5290 XXH3_accumulate_512, XXH3_scrambleAcc);
5298XXH3_hashLong_128b_withSecret(
const void* XXH_RESTRICT input,
size_t len,
5300 const void* XXH_RESTRICT secret,
size_t secretLen)
5303 return XXH3_hashLong_128b_internal(input,
len, (
const xxh_u8*)secret, secretLen,
5304 XXH3_accumulate_512, XXH3_scrambleAcc);
5308XXH3_hashLong_128b_withSeed_internal(
const void* XXH_RESTRICT input,
size_t len,
5310 XXH3_f_accumulate_512 f_acc512,
5311 XXH3_f_scrambleAcc f_scramble,
5312 XXH3_f_initCustomSecret f_initSec)
5315 return XXH3_hashLong_128b_internal(input,
len,
5316 XXH3_kSecret,
sizeof(XXH3_kSecret),
5317 f_acc512, f_scramble);
5318 { XXH_ALIGN(XXH_SEC_ALIGN) xxh_u8 secret[XXH_SECRET_DEFAULT_SIZE];
5319 f_initSec(secret, seed64);
5320 return XXH3_hashLong_128b_internal(input,
len, (
const xxh_u8*)secret,
sizeof(secret),
5321 f_acc512, f_scramble);
5329XXH3_hashLong_128b_withSeed(
const void* input,
size_t len,
5330 XXH64_hash_t seed64,
const void* XXH_RESTRICT secret,
size_t secretLen)
5333 return XXH3_hashLong_128b_withSeed_internal(input,
len, seed64,
5334 XXH3_accumulate_512, XXH3_scrambleAcc, XXH3_initCustomSecret);
5337typedef XXH128_hash_t (*XXH3_hashLong128_f)(
const void* XXH_RESTRICT, size_t,
5341XXH3_128bits_internal(
const void* input,
size_t len,
5342 XXH64_hash_t seed64,
const void* XXH_RESTRICT secret,
size_t secretLen,
5343 XXH3_hashLong128_f f_hl128)
5353 return XXH3_len_0to16_128b((
const xxh_u8*)input,
len, (
const xxh_u8*)secret, seed64);
5355 return XXH3_len_17to128_128b((
const xxh_u8*)input,
len, (
const xxh_u8*)secret, secretLen, seed64);
5356 if (
len <= XXH3_MIDSIZE_MAX)
5357 return XXH3_len_129to240_128b((
const xxh_u8*)input,
len, (
const xxh_u8*)secret, secretLen, seed64);
5358 return f_hl128(input,
len, seed64, secret, secretLen);
5367 return XXH3_128bits_internal(input,
len, 0,
5368 XXH3_kSecret,
sizeof(XXH3_kSecret),
5369 XXH3_hashLong_128b_default);
5376 return XXH3_128bits_internal(input,
len, 0,
5377 (
const xxh_u8*)secret, secretSize,
5378 XXH3_hashLong_128b_withSecret);
5385 return XXH3_128bits_internal(input,
len, seed,
5386 XXH3_kSecret,
sizeof(XXH3_kSecret),
5387 XXH3_hashLong_128b_withSeed);
5392XXH3_128bits_withSecretandSeed(
const void* input,
size_t len,
const void* secret,
size_t secretSize,
XXH64_hash_t seed)
5394 if (
len <= XXH3_MIDSIZE_MAX)
5395 return XXH3_128bits_internal(input,
len, seed, XXH3_kSecret,
sizeof(XXH3_kSecret),
NULL);
5396 return XXH3_hashLong_128b_withSecret(input,
len, seed, secret, secretSize);
5437XXH3_128bits_reset_withSecretandSeed(
XXH3_state_t* statePtr,
const void* secret,
size_t secretSize,
XXH64_hash_t seed)
5439 return XXH3_64bits_reset_withSecretandSeed(statePtr, secret, secretSize, seed);
5446 return XXH3_update(
state, (
const xxh_u8*)input,
len,
5447 XXH3_accumulate_512, XXH3_scrambleAcc);
5453 const unsigned char*
const secret = (
state->extSecret ==
NULL) ?
state->customSecret :
state->extSecret;
5454 if (
state->totalLen > XXH3_MIDSIZE_MAX) {
5456 XXH3_digest_long(acc,
state, secret);
5457 XXH_ASSERT(
state->secretLimit + XXH_STRIPE_LEN >=
sizeof(acc) + XXH_SECRET_MERGEACCS_START);
5459 h128.
low64 = XXH3_mergeAccs(acc,
5460 secret + XXH_SECRET_MERGEACCS_START,
5461 (xxh_u64)
state->totalLen * XXH_PRIME64_1);
5462 h128.
high64 = XXH3_mergeAccs(acc,
5463 secret +
state->secretLimit + XXH_STRIPE_LEN
5464 -
sizeof(acc) - XXH_SECRET_MERGEACCS_START,
5465 ~((xxh_u64)
state->totalLen * XXH_PRIME64_2));
5473 secret,
state->secretLimit + XXH_STRIPE_LEN);
5485 return !(memcmp(&h1, &h2,
sizeof(h1)));
5499 if (hcmp)
return hcmp;
5510 if (XXH_CPU_LITTLE_ENDIAN) {
5511 hash.high64 = XXH_swap64(
hash.high64);
5512 hash.low64 = XXH_swap64(
hash.low64);
5514 XXH_memcpy(dst, &
hash.high64,
sizeof(
hash.high64));
5515 XXH_memcpy((
char*)dst +
sizeof(
hash.high64), &
hash.low64,
sizeof(
hash.low64));
5523 h.
high64 = XXH_readBE64(src);
5534#define XXH_MIN(x, y) (((x) > (y)) ? (y) : (x))
5538 XXH_writeLE64( dst, XXH_readLE64(dst) ^ h128.
low64 );
5539 XXH_writeLE64( (
char*)dst+8, XXH_readLE64((
char*)dst+8) ^ h128.
high64 );
5544XXH3_generateSecret(
void* secretBuffer,
size_t secretSize,
const void* customSeed,
size_t customSeedSize)
5546 XXH_ASSERT(secretBuffer !=
NULL);
5550 if (customSeedSize == 0) {
5551 customSeed = XXH3_kSecret;
5552 customSeedSize = XXH_SECRET_DEFAULT_SIZE;
5554 XXH_ASSERT(customSeed !=
NULL);
5559 while (
pos < secretSize) {
5560 size_t const toCopy = XXH_MIN((secretSize -
pos), customSeedSize);
5561 memcpy((
char*)secretBuffer +
pos, customSeed, toCopy);
5565 {
size_t const nbSeg16 = secretSize / 16;
5569 for (
n=0;
n<nbSeg16;
n++) {
5570 XXH128_hash_t const h128 = XXH128(&scrambler,
sizeof(scrambler),
n);
5571 XXH3_combine16((
char*)secretBuffer +
n*16, h128);
5581XXH3_generateSecret_fromSeed(
void* secretBuffer,
XXH64_hash_t seed)
5583 XXH_ALIGN(XXH_SEC_ALIGN) xxh_u8 secret[XXH_SECRET_DEFAULT_SIZE];
5584 XXH3_initCustomSecret(secret, seed);
5585 XXH_ASSERT(secretBuffer !=
NULL);
5586 memcpy(secretBuffer, secret, XXH_SECRET_DEFAULT_SIZE);
5592#if XXH_VECTOR == XXH_AVX2 \
5593 && defined(__GNUC__) && !defined(__clang__) \
5594 && defined(__OPTIMIZE__) && !defined(__OPTIMIZE_SIZE__)
5595# pragma GCC pop_options
5608#if defined (__cplusplus)
memset(ptr, 0, type->size)
unsigned long long XXH64_hash_t
#define XXH_VERSION_NUMBER
XXH_PUBLIC_API unsigned XXH_versionNumber(void)
Obtains the xxHash version.
struct XXH32_state_s XXH32_state_t
The opaque state struct for the XXH32 streaming API.
XXH_PUBLIC_API XXH32_hash_t XXH32(const void *input, size_t length, XXH32_hash_t seed)
Calculates the 32-bit hash of input using xxHash32.
XXH_PUBLIC_API XXH32_hash_t XXH32_hashFromCanonical(const XXH32_canonical_t *src)
Converts an XXH32_canonical_t to a native XXH32_hash_t.
XXH_PUBLIC_API XXH32_hash_t XXH32_digest(const XXH32_state_t *statePtr)
Returns the calculated hash value from an XXH32_state_t.
XXH_PUBLIC_API XXH32_state_t * XXH32_createState(void)
Allocates an XXH32_state_t.
XXH_PUBLIC_API XXH_errorcode XXH32_reset(XXH32_state_t *statePtr, XXH32_hash_t seed)
Resets an XXH32_state_t to begin a new hash.
XXH_PUBLIC_API void XXH32_copyState(XXH32_state_t *dst_state, const XXH32_state_t *src_state)
Copies one XXH32_state_t to another.
XXH_PUBLIC_API XXH_errorcode XXH32_freeState(XXH32_state_t *statePtr)
Frees an XXH32_state_t.
XXH_PUBLIC_API XXH_errorcode XXH32_update(XXH32_state_t *statePtr, const void *input, size_t length)
Consumes a block of input to an XXH32_state_t.
XXH_PUBLIC_API void XXH32_canonicalFromHash(XXH32_canonical_t *dst, XXH32_hash_t hash)
Converts an XXH32_hash_t to a big endian XXH32_canonical_t.
XXH_PUBLIC_API XXH128_hash_t XXH3_128bits_withSecret(const void *data, size_t len, const void *secret, size_t secretSize)
XXH_PUBLIC_API XXH128_hash_t XXH3_128bits_withSeed(const void *data, size_t len, XXH64_hash_t seed)
XXH_PUBLIC_API XXH_errorcode XXH3_128bits_update(XXH3_state_t *statePtr, const void *input, size_t length)
XXH_PUBLIC_API XXH_errorcode XXH3_64bits_reset_withSeed(XXH3_state_t *statePtr, XXH64_hash_t seed)
XXH_PUBLIC_API XXH_errorcode XXH3_128bits_reset_withSeed(XXH3_state_t *statePtr, XXH64_hash_t seed)
XXH_PUBLIC_API XXH128_hash_t XXH3_128bits(const void *data, size_t len)
XXH_PUBLIC_API XXH3_state_t * XXH3_createState(void)
XXH_PUBLIC_API XXH64_hash_t XXH3_64bits_digest(const XXH3_state_t *statePtr)
XXH_PUBLIC_API int XXH128_cmp(const void *h128_1, const void *h128_2)
XXH_PUBLIC_API XXH128_hash_t XXH128_hashFromCanonical(const XXH128_canonical_t *src)
XXH_PUBLIC_API XXH64_hash_t XXH3_64bits_withSeed(const void *data, size_t len, XXH64_hash_t seed)
XXH_PUBLIC_API XXH_errorcode XXH3_64bits_reset_withSecret(XXH3_state_t *statePtr, const void *secret, size_t secretSize)
XXH_PUBLIC_API XXH_errorcode XXH3_64bits_update(XXH3_state_t *statePtr, const void *input, size_t length)
XXH_PUBLIC_API XXH64_hash_t XXH3_64bits(const void *data, size_t len)
XXH_PUBLIC_API XXH_errorcode XXH3_128bits_reset(XXH3_state_t *statePtr)
struct XXH3_state_s XXH3_state_t
The state struct for the XXH3 streaming API.
XXH_PUBLIC_API XXH_errorcode XXH3_freeState(XXH3_state_t *statePtr)
XXH_PUBLIC_API void XXH3_copyState(XXH3_state_t *dst_state, const XXH3_state_t *src_state)
XXH_PUBLIC_API int XXH128_isEqual(XXH128_hash_t h1, XXH128_hash_t h2)
XXH_PUBLIC_API XXH128_hash_t XXH3_128bits_digest(const XXH3_state_t *statePtr)
XXH_PUBLIC_API XXH_errorcode XXH3_64bits_reset(XXH3_state_t *statePtr)
XXH_PUBLIC_API void XXH128_canonicalFromHash(XXH128_canonical_t *dst, XXH128_hash_t hash)
XXH_PUBLIC_API XXH64_hash_t XXH3_64bits_withSecret(const void *data, size_t len, const void *secret, size_t secretSize)
#define XXH3_SECRET_SIZE_MIN
XXH_PUBLIC_API XXH_errorcode XXH3_128bits_reset_withSecret(XXH3_state_t *statePtr, const void *secret, size_t secretSize)
struct XXH64_state_s XXH64_state_t
The opaque state struct for the XXH64 streaming API.
XXH_PUBLIC_API XXH64_hash_t XXH64(const void *input, size_t length, XXH64_hash_t seed)
Calculates the 64-bit hash of input using xxHash64.
XXH_PUBLIC_API void XXH64_copyState(XXH64_state_t *dst_state, const XXH64_state_t *src_state)
XXH_PUBLIC_API XXH_errorcode XXH64_reset(XXH64_state_t *statePtr, XXH64_hash_t seed)
XXH_PUBLIC_API XXH_errorcode XXH64_update(XXH64_state_t *statePtr, const void *input, size_t length)
XXH_PUBLIC_API XXH64_state_t * XXH64_createState(void)
XXH_PUBLIC_API XXH64_hash_t XXH64_digest(const XXH64_state_t *statePtr)
XXH_PUBLIC_API XXH64_hash_t XXH64_hashFromCanonical(const XXH64_canonical_t *src)
XXH_PUBLIC_API XXH_errorcode XXH64_freeState(XXH64_state_t *statePtr)
XXH_PUBLIC_API void XXH64_canonicalFromHash(XXH64_canonical_t *dst, XXH64_hash_t hash)
hash(string $algo, string $data, bool $binary=false, array $options=[])
unsigned const char * pos
#define offsetof(STRUCTURE, FIELD)
unsigned char digest[sizeof(XXH128_hash_t)]
The return value from 128-bit hashes.
Canonical (big endian) representation of XXH32_hash_t.
unsigned char digest[sizeof(XXH64_hash_t)]
ZEND_API void(ZEND_FASTCALL *zend_touch_vm_stack_data)(void *vm_stack_data)