244#if defined (__cplusplus)
268# define XXH_STATIC_LINKING_ONLY
281# define XXH_IMPLEMENTATION
302# define XXH_INLINE_ALL
303# undef XXH_INLINE_ALL
307# define XXH_PRIVATE_API
308# undef XXH_PRIVATE_API
322# define XXH_NAMESPACE
326#if (defined(XXH_INLINE_ALL) || defined(XXH_PRIVATE_API)) \
327 && !defined(XXH_INLINE_ALL_31684351384)
329# define XXH_INLINE_ALL_31684351384
331# undef XXH_STATIC_LINKING_ONLY
332# define XXH_STATIC_LINKING_ONLY
334# undef XXH_PUBLIC_API
335# if defined(__GNUC__)
336# define XXH_PUBLIC_API static __inline __attribute__((__unused__))
337# elif defined (__cplusplus) || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) )
338# define XXH_PUBLIC_API static inline
339# elif defined(_MSC_VER)
340# define XXH_PUBLIC_API static __inline
343# define XXH_PUBLIC_API static
359# undef XXH_versionNumber
362# undef XXH32_createState
363# undef XXH32_freeState
367# undef XXH32_copyState
368# undef XXH32_canonicalFromHash
369# undef XXH32_hashFromCanonical
372# undef XXH64_createState
373# undef XXH64_freeState
377# undef XXH64_copyState
378# undef XXH64_canonicalFromHash
379# undef XXH64_hashFromCanonical
382# undef XXH3_64bits_withSecret
383# undef XXH3_64bits_withSeed
384# undef XXH3_64bits_withSecretandSeed
385# undef XXH3_createState
386# undef XXH3_freeState
387# undef XXH3_copyState
388# undef XXH3_64bits_reset
389# undef XXH3_64bits_reset_withSeed
390# undef XXH3_64bits_reset_withSecret
391# undef XXH3_64bits_update
392# undef XXH3_64bits_digest
393# undef XXH3_generateSecret
397# undef XXH3_128bits_withSeed
398# undef XXH3_128bits_withSecret
399# undef XXH3_128bits_reset
400# undef XXH3_128bits_reset_withSeed
401# undef XXH3_128bits_reset_withSecret
402# undef XXH3_128bits_reset_withSecretandSeed
403# undef XXH3_128bits_update
404# undef XXH3_128bits_digest
405# undef XXH128_isEqual
407# undef XXH128_canonicalFromHash
408# undef XXH128_hashFromCanonical
413# define XXH_NAMESPACE XXH_INLINE_
421# define XXH_IPREF(Id) XXH_NAMESPACE ## Id
422# define XXH_OK XXH_IPREF(XXH_OK)
423# define XXH_ERROR XXH_IPREF(XXH_ERROR)
424# define XXH_errorcode XXH_IPREF(XXH_errorcode)
425# define XXH32_canonical_t XXH_IPREF(XXH32_canonical_t)
426# define XXH64_canonical_t XXH_IPREF(XXH64_canonical_t)
427# define XXH128_canonical_t XXH_IPREF(XXH128_canonical_t)
428# define XXH32_state_s XXH_IPREF(XXH32_state_s)
429# define XXH32_state_t XXH_IPREF(XXH32_state_t)
430# define XXH64_state_s XXH_IPREF(XXH64_state_s)
431# define XXH64_state_t XXH_IPREF(XXH64_state_t)
432# define XXH3_state_s XXH_IPREF(XXH3_state_s)
433# define XXH3_state_t XXH_IPREF(XXH3_state_t)
434# define XXH128_hash_t XXH_IPREF(XXH128_hash_t)
436# undef XXHASH_H_5627135585666179
437# undef XXHASH_H_STATIC_13879238742
443#ifndef XXHASH_H_5627135585666179
444#define XXHASH_H_5627135585666179 1
447#if !defined(XXH_INLINE_ALL) && !defined(XXH_PRIVATE_API)
448# if defined(_WIN32) && defined(_MSC_VER) && (defined(XXH_IMPORT) || defined(XXH_EXPORT))
450# define XXH_PUBLIC_API __declspec(dllexport)
452# define XXH_PUBLIC_API __declspec(dllimport)
455# define XXH_PUBLIC_API
460# define XXH_CAT(A,B) A##B
461# define XXH_NAME2(A,B) XXH_CAT(A,B)
462# define XXH_versionNumber XXH_NAME2(XXH_NAMESPACE, XXH_versionNumber)
464# define XXH32 XXH_NAME2(XXH_NAMESPACE, XXH32)
465# define XXH32_createState XXH_NAME2(XXH_NAMESPACE, XXH32_createState)
466# define XXH32_freeState XXH_NAME2(XXH_NAMESPACE, XXH32_freeState)
467# define XXH32_reset XXH_NAME2(XXH_NAMESPACE, XXH32_reset)
468# define XXH32_update XXH_NAME2(XXH_NAMESPACE, XXH32_update)
469# define XXH32_digest XXH_NAME2(XXH_NAMESPACE, XXH32_digest)
470# define XXH32_copyState XXH_NAME2(XXH_NAMESPACE, XXH32_copyState)
471# define XXH32_canonicalFromHash XXH_NAME2(XXH_NAMESPACE, XXH32_canonicalFromHash)
472# define XXH32_hashFromCanonical XXH_NAME2(XXH_NAMESPACE, XXH32_hashFromCanonical)
474# define XXH64 XXH_NAME2(XXH_NAMESPACE, XXH64)
475# define XXH64_createState XXH_NAME2(XXH_NAMESPACE, XXH64_createState)
476# define XXH64_freeState XXH_NAME2(XXH_NAMESPACE, XXH64_freeState)
477# define XXH64_reset XXH_NAME2(XXH_NAMESPACE, XXH64_reset)
478# define XXH64_update XXH_NAME2(XXH_NAMESPACE, XXH64_update)
479# define XXH64_digest XXH_NAME2(XXH_NAMESPACE, XXH64_digest)
480# define XXH64_copyState XXH_NAME2(XXH_NAMESPACE, XXH64_copyState)
481# define XXH64_canonicalFromHash XXH_NAME2(XXH_NAMESPACE, XXH64_canonicalFromHash)
482# define XXH64_hashFromCanonical XXH_NAME2(XXH_NAMESPACE, XXH64_hashFromCanonical)
484# define XXH3_64bits XXH_NAME2(XXH_NAMESPACE, XXH3_64bits)
485# define XXH3_64bits_withSecret XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_withSecret)
486# define XXH3_64bits_withSeed XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_withSeed)
487# define XXH3_64bits_withSecretandSeed XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_withSecretandSeed)
488# define XXH3_createState XXH_NAME2(XXH_NAMESPACE, XXH3_createState)
489# define XXH3_freeState XXH_NAME2(XXH_NAMESPACE, XXH3_freeState)
490# define XXH3_copyState XXH_NAME2(XXH_NAMESPACE, XXH3_copyState)
491# define XXH3_64bits_reset XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_reset)
492# define XXH3_64bits_reset_withSeed XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_reset_withSeed)
493# define XXH3_64bits_reset_withSecret XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_reset_withSecret)
494# define XXH3_64bits_reset_withSecretandSeed XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_reset_withSecretandSeed)
495# define XXH3_64bits_update XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_update)
496# define XXH3_64bits_digest XXH_NAME2(XXH_NAMESPACE, XXH3_64bits_digest)
497# define XXH3_generateSecret XXH_NAME2(XXH_NAMESPACE, XXH3_generateSecret)
498# define XXH3_generateSecret_fromSeed XXH_NAME2(XXH_NAMESPACE, XXH3_generateSecret_fromSeed)
500# define XXH128 XXH_NAME2(XXH_NAMESPACE, XXH128)
501# define XXH3_128bits XXH_NAME2(XXH_NAMESPACE, XXH3_128bits)
502# define XXH3_128bits_withSeed XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_withSeed)
503# define XXH3_128bits_withSecret XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_withSecret)
504# define XXH3_128bits_withSecretandSeed XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_withSecretandSeed)
505# define XXH3_128bits_reset XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_reset)
506# define XXH3_128bits_reset_withSeed XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_reset_withSeed)
507# define XXH3_128bits_reset_withSecret XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_reset_withSecret)
508# define XXH3_128bits_reset_withSecretandSeed XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_reset_withSecretandSeed)
509# define XXH3_128bits_update XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_update)
510# define XXH3_128bits_digest XXH_NAME2(XXH_NAMESPACE, XXH3_128bits_digest)
511# define XXH128_isEqual XXH_NAME2(XXH_NAMESPACE, XXH128_isEqual)
512# define XXH128_cmp XXH_NAME2(XXH_NAMESPACE, XXH128_cmp)
513# define XXH128_canonicalFromHash XXH_NAME2(XXH_NAMESPACE, XXH128_canonicalFromHash)
514# define XXH128_hashFromCanonical XXH_NAME2(XXH_NAMESPACE, XXH128_hashFromCanonical)
523#if !defined(XXH_INLINE_ALL) && !defined(XXH_PRIVATE_API)
524# if defined(_WIN32) && defined(_MSC_VER) && (defined(XXH_IMPORT) || defined(XXH_EXPORT))
526# define XXH_PUBLIC_API __declspec(dllexport)
528# define XXH_PUBLIC_API __declspec(dllimport)
531# define XXH_PUBLIC_API
535#if defined (__GNUC__)
536# define XXH_CONSTF __attribute__((__const__))
537# define XXH_PUREF __attribute__((__pure__))
538# define XXH_MALLOCF __attribute__((__malloc__))
548#define XXH_VERSION_MAJOR 0
549#define XXH_VERSION_MINOR 8
550#define XXH_VERSION_RELEASE 3
552#define XXH_VERSION_NUMBER (XXH_VERSION_MAJOR *100*100 + XXH_VERSION_MINOR *100 + XXH_VERSION_RELEASE)
581#if defined(XXH_DOXYGEN)
589#elif !defined (__VMS) \
590 && (defined (__cplusplus) \
591 || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) ) )
593# include <inttypes.h>
601# if UINT_MAX == 0xFFFFFFFFUL
603# elif ULONG_MAX == 0xFFFFFFFFUL
606# error "unsupported platform: need a 32-bit type"
755 unsigned char digest[4];
787#ifdef __has_attribute
788# define XXH_HAS_ATTRIBUTE(x) __has_attribute(x)
790# define XXH_HAS_ATTRIBUTE(x) 0
800#define XXH_C23_VN 201711L
805#if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= XXH_C23_VN) && defined(__has_c_attribute)
806# define XXH_HAS_C_ATTRIBUTE(x) __has_c_attribute(x)
808# define XXH_HAS_C_ATTRIBUTE(x) 0
813#if defined(__cplusplus) && defined(__has_cpp_attribute)
814# define XXH_HAS_CPP_ATTRIBUTE(x) __has_cpp_attribute(x)
816# define XXH_HAS_CPP_ATTRIBUTE(x) 0
827#if XXH_HAS_C_ATTRIBUTE(fallthrough) || XXH_HAS_CPP_ATTRIBUTE(fallthrough)
828# define XXH_FALLTHROUGH [[fallthrough]]
829#elif XXH_HAS_ATTRIBUTE(__fallthrough__)
830# define XXH_FALLTHROUGH __attribute__ ((__fallthrough__))
832# define XXH_FALLTHROUGH
842#if XXH_HAS_ATTRIBUTE(noescape)
843# define XXH_NOESCAPE __attribute__((__noescape__))
856#ifndef XXH_NO_LONG_LONG
860#if defined(XXH_DOXYGEN)
867#elif !defined (__VMS) \
868 && (defined (__cplusplus) \
869 || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) ) )
871# include <inttypes.h>
878# if defined(__LP64__) && ULONG_MAX == 0xFFFFFFFFFFFFFFFFULL
1120# define XXH_SCALAR 0
1123# define XXH_AVX512 3
1191#define XXH3_SECRET_SIZE_MIN 136
1229#ifndef XXH_NO_STREAM
1456#ifndef XXH_NO_STREAM
1644#if defined(XXH_STATIC_LINKING_ONLY) && !defined(XXHASH_H_STATIC_13879238742)
1645#define XXHASH_H_STATIC_13879238742
1682#ifndef XXH_NO_LONG_LONG
1707#if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 201112L)
1708# define XXH_ALIGN(n) _Alignas(n)
1709#elif defined(__cplusplus) && (__cplusplus >= 201103L)
1711# define XXH_ALIGN(n) alignas(n)
1712#elif defined(__GNUC__)
1713# define XXH_ALIGN(n) __attribute__ ((aligned(n)))
1714#elif defined(_MSC_VER)
1715# define XXH_ALIGN(n) __declspec(align(n))
1717# define XXH_ALIGN(n)
1721#if !(defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 201112L)) \
1722 && ! (defined(__cplusplus) && (__cplusplus >= 201103L)) \
1723 && defined(__GNUC__)
1724# define XXH_ALIGN_MEMBER(align, type) type XXH_ALIGN(align)
1726# define XXH_ALIGN_MEMBER(align, type) XXH_ALIGN(align) type
1736#define XXH3_INTERNALBUFFER_SIZE 256
1746#define XXH3_SECRET_DEFAULT_SIZE 192
1773 XXH_ALIGN_MEMBER(64,
unsigned char customSecret[XXH3_SECRET_DEFAULT_SIZE]);
1799#undef XXH_ALIGN_MEMBER
1812#define XXH3_INITSTATE(XXH3_state_ptr) \
1814 XXH3_state_t* tmp_xxh3_state_ptr = (XXH3_state_ptr); \
1815 tmp_xxh3_state_ptr->seed = 0; \
1816 tmp_xxh3_state_ptr->extSecret = NULL; \
1945#define XXH3_MIDSIZE_MAX 240
1983 XXH_NOESCAPE
const void* secret,
size_t secretSize,
2002 XXH_NOESCAPE
const void* secret,
size_t secretSize,
2005#ifndef XXH_NO_STREAM
2021 XXH_NOESCAPE
const void* secret,
size_t secretSize,
2047 XXH_NOESCAPE
const void* secret,
size_t secretSize,
2054#if defined(XXH_INLINE_ALL) || defined(XXH_PRIVATE_API)
2055# define XXH_IMPLEMENTATION
2088#if ( defined(XXH_INLINE_ALL) || defined(XXH_PRIVATE_API) \
2089 || defined(XXH_IMPLEMENTATION) ) && !defined(XXH_IMPLEM_13a8737387)
2090# define XXH_IMPLEM_13a8737387
2108# define XXH_NO_LONG_LONG
2109# undef XXH_NO_LONG_LONG
2160# define XXH_FORCE_MEMORY_ACCESS 0
2188# define XXH_SIZE_OPT 0
2218# define XXH_FORCE_ALIGN_CHECK 0
2240# define XXH_NO_INLINE_HINTS 0
2257# define XXH3_INLINE_SECRET 0
2269# define XXH32_ENDJMP 0
2278# define XXH_OLD_NAMES
2279# undef XXH_OLD_NAMES
2289# define XXH_NO_STREAM
2290# undef XXH_NO_STREAM
2296#ifndef XXH_FORCE_MEMORY_ACCESS
2300# if defined(__GNUC__) && !(defined(__ARM_ARCH) && __ARM_ARCH < 7 && defined(__ARM_FEATURE_UNALIGNED))
2301# define XXH_FORCE_MEMORY_ACCESS 1
2307# if (defined(__GNUC__) || defined(__clang__)) && defined(__OPTIMIZE_SIZE__)
2308# define XXH_SIZE_OPT 1
2310# define XXH_SIZE_OPT 0
2314#ifndef XXH_FORCE_ALIGN_CHECK
2316# if XXH_SIZE_OPT >= 1 || \
2317 defined(__i386) || defined(__x86_64__) || defined(__aarch64__) || defined(__ARM_FEATURE_UNALIGNED) \
2318 || defined(_M_IX86) || defined(_M_X64) || defined(_M_ARM64) || defined(_M_ARM)
2319# define XXH_FORCE_ALIGN_CHECK 0
2321# define XXH_FORCE_ALIGN_CHECK 1
2325#ifndef XXH_NO_INLINE_HINTS
2326# if XXH_SIZE_OPT >= 1 || defined(__NO_INLINE__)
2327# define XXH_NO_INLINE_HINTS 1
2329# define XXH_NO_INLINE_HINTS 0
2333#ifndef XXH3_INLINE_SECRET
2334# if (defined(__GNUC__) && !defined(__clang__) && __GNUC__ >= 12) \
2335 || !defined(XXH_INLINE_ALL)
2336# define XXH3_INLINE_SECRET 0
2338# define XXH3_INLINE_SECRET 1
2344# define XXH32_ENDJMP 0
2356#if defined(XXH_NO_STREAM)
2358#elif defined(XXH_NO_STDLIB)
2369static XXH_CONSTF
void* XXH_malloc(
size_t s) { (void)s;
return NULL; }
2370static void XXH_free(
void* p) { (void)p; }
2384static XXH_MALLOCF
void* XXH_malloc(
size_t s) {
return malloc(s); }
2390static void XXH_free(
void* p) { free(p); }
2400static void* XXH_memcpy(
void* dest,
const void* src,
size_t size)
2402 return memcpy(dest,src,size);
2412# pragma warning(disable : 4127)
2415#if XXH_NO_INLINE_HINTS
2416# if defined(__GNUC__) || defined(__clang__)
2417# define XXH_FORCE_INLINE static __attribute__((__unused__))
2419# define XXH_FORCE_INLINE static
2421# define XXH_NO_INLINE static
2423#elif defined(__GNUC__) || defined(__clang__)
2424# define XXH_FORCE_INLINE static __inline__ __attribute__((__always_inline__, __unused__))
2425# define XXH_NO_INLINE static __attribute__((__noinline__))
2426#elif defined(_MSC_VER)
2427# define XXH_FORCE_INLINE static __forceinline
2428# define XXH_NO_INLINE static __declspec(noinline)
2429#elif defined (__cplusplus) \
2430 || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L))
2431# define XXH_FORCE_INLINE static inline
2432# define XXH_NO_INLINE static
2434# define XXH_FORCE_INLINE static
2435# define XXH_NO_INLINE static
2438#if defined(XXH_INLINE_ALL)
2439# define XXH_STATIC XXH_FORCE_INLINE
2441# define XXH_STATIC static
2444#if XXH3_INLINE_SECRET
2445# define XXH3_WITH_SECRET_INLINE XXH_FORCE_INLINE
2447# define XXH3_WITH_SECRET_INLINE XXH_NO_INLINE
2450#if ((defined(sun) || defined(__sun)) && __cplusplus)
2451# define XXH_RESTRICT
2452#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
2453# define XXH_RESTRICT restrict
2454#elif (defined (__GNUC__) && ((__GNUC__ > 3) || (__GNUC__ == 3 && __GNUC_MINOR__ >= 1))) \
2455 || (defined (__clang__)) \
2456 || (defined (_MSC_VER) && (_MSC_VER >= 1400)) \
2457 || (defined (__INTEL_COMPILER) && (__INTEL_COMPILER >= 1300))
2462# define XXH_RESTRICT __restrict
2464# define XXH_RESTRICT
2478#ifndef XXH_DEBUGLEVEL
2480# define XXH_DEBUGLEVEL DEBUGLEVEL
2482# define XXH_DEBUGLEVEL 0
2486#if (XXH_DEBUGLEVEL>=1)
2488# define XXH_ASSERT(c) assert(c)
2490# if defined(__INTEL_COMPILER)
2491# define XXH_ASSERT(c) XXH_ASSUME((unsigned char) (c))
2493# define XXH_ASSERT(c) XXH_ASSUME(c)
2498#ifndef XXH_STATIC_ASSERT
2499# if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 201112L)
2500# define XXH_STATIC_ASSERT_WITH_MESSAGE(c,m) do { _Static_assert((c),m); } while(0)
2501# elif defined(__cplusplus) && (__cplusplus >= 201103L)
2502# define XXH_STATIC_ASSERT_WITH_MESSAGE(c,m) do { static_assert((c),m); } while(0)
2504# define XXH_STATIC_ASSERT_WITH_MESSAGE(c,m) do { struct xxh_sa { char x[(c) ? 1 : -1]; }; } while(0)
2506# define XXH_STATIC_ASSERT(c) XXH_STATIC_ASSERT_WITH_MESSAGE((c),#c)
2525#if defined(__GNUC__) || defined(__clang__)
2526# define XXH_COMPILER_GUARD(var) __asm__("" : "+r" (var))
2528# define XXH_COMPILER_GUARD(var) ((void)0)
2533#if defined(__clang__) && defined(__ARM_ARCH) && !defined(__wasm__)
2534# define XXH_COMPILER_GUARD_CLANG_NEON(var) __asm__("" : "+w" (var))
2536# define XXH_COMPILER_GUARD_CLANG_NEON(var) ((void)0)
2542#if !defined (__VMS) \
2543 && (defined (__cplusplus) \
2544 || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) ) )
2546# include <inttypes.h>
2550 typedef uint8_t xxh_u8;
2552 typedef unsigned char xxh_u8;
2557# warning "XXH_OLD_NAMES is planned to be removed starting v0.9. If the program depends on it, consider moving away from it by employing newer type names directly"
2615#if (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==3))
2620#elif (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==2))
2626static xxh_u32 XXH_read32(
const void* memPtr) {
return *(
const xxh_u32*) memPtr; }
2628#elif (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==1))
2638typedef union { xxh_u32 u32; } __attribute__((__packed__)) unalign;
2640static xxh_u32 XXH_read32(
const void* ptr)
2642 typedef __attribute__((__aligned__(1))) xxh_u32 xxh_unalign32;
2643 return *((const xxh_unalign32*)ptr);
2652static xxh_u32 XXH_read32(
const void* memPtr)
2655 XXH_memcpy(&val, memPtr,
sizeof(val));
2680#ifndef XXH_CPU_LITTLE_ENDIAN
2685# if defined(_WIN32) \
2686 || defined(__LITTLE_ENDIAN__) \
2687 || (defined(__BYTE_ORDER__) && __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__)
2688# define XXH_CPU_LITTLE_ENDIAN 1
2689# elif defined(__BIG_ENDIAN__) \
2690 || (defined(__BYTE_ORDER__) && __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__)
2691# define XXH_CPU_LITTLE_ENDIAN 0
2699static int XXH_isLittleEndian(
void)
2705 const union { xxh_u32 u; xxh_u8 c[4]; } one = { 1 };
2708# define XXH_CPU_LITTLE_ENDIAN XXH_isLittleEndian()
2718#define XXH_GCC_VERSION (__GNUC__ * 100 + __GNUC_MINOR__)
2721# define XXH_HAS_BUILTIN(x) __has_builtin(x)
2723# define XXH_HAS_BUILTIN(x) 0
2755#if XXH_HAS_BUILTIN(__builtin_unreachable)
2756# define XXH_UNREACHABLE() __builtin_unreachable()
2758#elif defined(_MSC_VER)
2759# define XXH_UNREACHABLE() __assume(0)
2762# define XXH_UNREACHABLE()
2765#if XXH_HAS_BUILTIN(__builtin_assume)
2766# define XXH_ASSUME(c) __builtin_assume(c)
2768# define XXH_ASSUME(c) if (!(c)) { XXH_UNREACHABLE(); }
2784#if !defined(NO_CLANG_BUILTIN) && XXH_HAS_BUILTIN(__builtin_rotateleft32) \
2785 && XXH_HAS_BUILTIN(__builtin_rotateleft64)
2786# define XXH_rotl32 __builtin_rotateleft32
2787# define XXH_rotl64 __builtin_rotateleft64
2788#elif XXH_HAS_BUILTIN(__builtin_stdc_rotate_left)
2789# define XXH_rotl32 __builtin_stdc_rotate_left
2790# define XXH_rotl64 __builtin_stdc_rotate_left
2792#elif defined(_MSC_VER)
2793# define XXH_rotl32(x,r) _rotl(x,r)
2794# define XXH_rotl64(x,r) _rotl64(x,r)
2796# define XXH_rotl32(x,r) (((x) << (r)) | ((x) >> (32 - (r))))
2797# define XXH_rotl64(x,r) (((x) << (r)) | ((x) >> (64 - (r))))
2808#if defined(_MSC_VER)
2809# define XXH_swap32 _byteswap_ulong
2810#elif XXH_GCC_VERSION >= 403
2811# define XXH_swap32 __builtin_bswap32
2813static xxh_u32 XXH_swap32 (xxh_u32 x)
2815 return ((x << 24) & 0xff000000 ) |
2816 ((x << 8) & 0x00ff0000 ) |
2817 ((x >> 8) & 0x0000ff00 ) |
2818 ((x >> 24) & 0x000000ff );
2841#if (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==3))
2843XXH_FORCE_INLINE xxh_u32 XXH_readLE32(
const void* memPtr)
2845 const xxh_u8* bytePtr = (
const xxh_u8 *)memPtr;
2847 | ((xxh_u32)bytePtr[1] << 8)
2848 | ((xxh_u32)bytePtr[2] << 16)
2849 | ((xxh_u32)bytePtr[3] << 24);
2852XXH_FORCE_INLINE xxh_u32 XXH_readBE32(
const void* memPtr)
2854 const xxh_u8* bytePtr = (
const xxh_u8 *)memPtr;
2856 | ((xxh_u32)bytePtr[2] << 8)
2857 | ((xxh_u32)bytePtr[1] << 16)
2858 | ((xxh_u32)bytePtr[0] << 24);
2862XXH_FORCE_INLINE xxh_u32 XXH_readLE32(
const void* ptr)
2867static xxh_u32 XXH_readBE32(
const void* ptr)
2873XXH_FORCE_INLINE xxh_u32
2877 return XXH_readLE32(ptr);
2903#define XXH_PRIME32_1 0x9E3779B1U
2904#define XXH_PRIME32_2 0x85EBCA77U
2905#define XXH_PRIME32_3 0xC2B2AE3DU
2906#define XXH_PRIME32_4 0x27D4EB2FU
2907#define XXH_PRIME32_5 0x165667B1U
2910# define PRIME32_1 XXH_PRIME32_1
2911# define PRIME32_2 XXH_PRIME32_2
2912# define PRIME32_3 XXH_PRIME32_3
2913# define PRIME32_4 XXH_PRIME32_4
2914# define PRIME32_5 XXH_PRIME32_5
2928static xxh_u32 XXH32_round(xxh_u32 acc, xxh_u32 input)
2931 acc = XXH_rotl32(acc, 13);
2933#if (defined(__SSE4_1__) || defined(__aarch64__) || defined(__wasm_simd128__)) && !defined(XXH_ENABLE_AUTOVECTORIZE)
2970 XXH_COMPILER_GUARD(acc);
2985static xxh_u32 XXH32_avalanche(xxh_u32 hash)
2995#define XXH_get32bits(p) XXH_readLE32_align(p, align)
3001XXH_FORCE_INLINE
void
3002XXH32_initAccs(xxh_u32 *acc, xxh_u32 seed)
3004 XXH_ASSERT(acc != NULL);
3017XXH_FORCE_INLINE
const xxh_u8 *
3019 xxh_u32 *XXH_RESTRICT acc,
3020 xxh_u8
const *XXH_RESTRICT input,
3025 const xxh_u8*
const bEnd = input + len;
3026 const xxh_u8*
const limit = bEnd - 15;
3027 XXH_ASSERT(acc != NULL);
3028 XXH_ASSERT(input != NULL);
3029 XXH_ASSERT(len >= 16);
3031 acc[0] = XXH32_round(acc[0], XXH_get32bits(input)); input += 4;
3032 acc[1] = XXH32_round(acc[1], XXH_get32bits(input)); input += 4;
3033 acc[2] = XXH32_round(acc[2], XXH_get32bits(input)); input += 4;
3034 acc[3] = XXH32_round(acc[3], XXH_get32bits(input)); input += 4;
3035 }
while (input < limit);
3044XXH_FORCE_INLINE XXH_PUREF xxh_u32
3045XXH32_mergeAccs(
const xxh_u32 *acc)
3047 XXH_ASSERT(acc != NULL);
3048 return XXH_rotl32(acc[0], 1) + XXH_rotl32(acc[1], 7)
3049 + XXH_rotl32(acc[2], 12) + XXH_rotl32(acc[3], 18);
3067static XXH_PUREF xxh_u32
3068XXH32_finalize(xxh_u32 hash,
const xxh_u8* ptr,
size_t len,
XXH_alignment align)
3070#define XXH_PROCESS1 do { \
3071 hash += (*ptr++) * XXH_PRIME32_5; \
3072 hash = XXH_rotl32(hash, 11) * XXH_PRIME32_1; \
3075#define XXH_PROCESS4 do { \
3076 hash += XXH_get32bits(ptr) * XXH_PRIME32_3; \
3078 hash = XXH_rotl32(hash, 17) * XXH_PRIME32_4; \
3081 if (ptr==NULL) XXH_ASSERT(len == 0);
3094 return XXH32_avalanche(hash);
3097 case 12: XXH_PROCESS4;
3099 case 8: XXH_PROCESS4;
3101 case 4: XXH_PROCESS4;
3102 return XXH32_avalanche(hash);
3104 case 13: XXH_PROCESS4;
3106 case 9: XXH_PROCESS4;
3108 case 5: XXH_PROCESS4;
3110 return XXH32_avalanche(hash);
3112 case 14: XXH_PROCESS4;
3114 case 10: XXH_PROCESS4;
3116 case 6: XXH_PROCESS4;
3119 return XXH32_avalanche(hash);
3121 case 15: XXH_PROCESS4;
3123 case 11: XXH_PROCESS4;
3125 case 7: XXH_PROCESS4;
3127 case 3: XXH_PROCESS1;
3129 case 2: XXH_PROCESS1;
3131 case 1: XXH_PROCESS1;
3133 case 0:
return XXH32_avalanche(hash);
3141# define PROCESS1 XXH_PROCESS1
3142# define PROCESS4 XXH_PROCESS4
3156XXH_FORCE_INLINE XXH_PUREF xxh_u32
3157XXH32_endian_align(
const xxh_u8* input,
size_t len, xxh_u32 seed,
XXH_alignment align)
3161 if (input==NULL) XXH_ASSERT(len == 0);
3165 XXH32_initAccs(acc, seed);
3167 input = XXH32_consumeLong(acc, input, len, align);
3169 h32 = XXH32_mergeAccs(acc);
3174 h32 += (xxh_u32)len;
3176 return XXH32_finalize(h32, input, len&15, align);
3182#if !defined(XXH_NO_STREAM) && XXH_SIZE_OPT >= 2
3190 if ((((
size_t)input) & 3) == 0) {
3191 return XXH32_endian_align((
const xxh_u8*)input, len, seed,
XXH_aligned);
3194 return XXH32_endian_align((
const xxh_u8*)input, len, seed,
XXH_unaligned);
3201#ifndef XXH_NO_STREAM
3217 XXH_memcpy(dstState, srcState,
sizeof(*dstState));
3223 XXH_ASSERT(statePtr != NULL);
3224 memset(statePtr, 0,
sizeof(*statePtr));
3225 XXH32_initAccs(statePtr->
acc, seed);
3235 XXH_ASSERT(len == 0);
3249 {
const xxh_u8* xinput = (
const xxh_u8*)input;
3250 const xxh_u8*
const bEnd = xinput + len;
3260 XXH_ASSERT(xinput <= bEnd);
3261 if ((
size_t)(bEnd - xinput) >=
sizeof(state->
buffer)) {
3263 xinput = XXH32_consumeLong(state->
acc, xinput, (
size_t)(bEnd - xinput),
XXH_unaligned);
3266 if (xinput < bEnd) {
3268 XXH_memcpy(state->
buffer, xinput, (
size_t)(bEnd-xinput));
3283 h32 = XXH32_mergeAccs(state->
acc);
3301 XXH_memcpy(dst, &hash,
sizeof(*dst));
3306 return XXH_readBE32(src);
3310#ifndef XXH_NO_LONG_LONG
3328#if (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==3))
3333#elif (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==2))
3336static xxh_u64 XXH_read64(
const void* memPtr)
3338 return *(
const xxh_u64*) memPtr;
3341#elif (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==1))
3351typedef union { xxh_u32 u32; xxh_u64 u64; } __attribute__((__packed__)) unalign64;
3353static xxh_u64 XXH_read64(
const void* ptr)
3355 typedef __attribute__((__aligned__(1))) xxh_u64 xxh_unalign64;
3356 return *((const xxh_unalign64*)ptr);
3365static xxh_u64 XXH_read64(
const void* memPtr)
3368 XXH_memcpy(&val, memPtr,
sizeof(val));
3374#if defined(_MSC_VER)
3375# define XXH_swap64 _byteswap_uint64
3376#elif XXH_GCC_VERSION >= 403
3377# define XXH_swap64 __builtin_bswap64
3379static xxh_u64 XXH_swap64(xxh_u64 x)
3381 return ((x << 56) & 0xff00000000000000ULL) |
3382 ((x << 40) & 0x00ff000000000000ULL) |
3383 ((x << 24) & 0x0000ff0000000000ULL) |
3384 ((x << 8) & 0x000000ff00000000ULL) |
3385 ((x >> 8) & 0x00000000ff000000ULL) |
3386 ((x >> 24) & 0x0000000000ff0000ULL) |
3387 ((x >> 40) & 0x000000000000ff00ULL) |
3388 ((x >> 56) & 0x00000000000000ffULL);
3394#if (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==3))
3396XXH_FORCE_INLINE xxh_u64 XXH_readLE64(
const void* memPtr)
3398 const xxh_u8* bytePtr = (
const xxh_u8 *)memPtr;
3400 | ((xxh_u64)bytePtr[1] << 8)
3401 | ((xxh_u64)bytePtr[2] << 16)
3402 | ((xxh_u64)bytePtr[3] << 24)
3403 | ((xxh_u64)bytePtr[4] << 32)
3404 | ((xxh_u64)bytePtr[5] << 40)
3405 | ((xxh_u64)bytePtr[6] << 48)
3406 | ((xxh_u64)bytePtr[7] << 56);
3409XXH_FORCE_INLINE xxh_u64 XXH_readBE64(
const void* memPtr)
3411 const xxh_u8* bytePtr = (
const xxh_u8 *)memPtr;
3413 | ((xxh_u64)bytePtr[6] << 8)
3414 | ((xxh_u64)bytePtr[5] << 16)
3415 | ((xxh_u64)bytePtr[4] << 24)
3416 | ((xxh_u64)bytePtr[3] << 32)
3417 | ((xxh_u64)bytePtr[2] << 40)
3418 | ((xxh_u64)bytePtr[1] << 48)
3419 | ((xxh_u64)bytePtr[0] << 56);
3423XXH_FORCE_INLINE xxh_u64 XXH_readLE64(
const void* ptr)
3428static xxh_u64 XXH_readBE64(
const void* ptr)
3434XXH_FORCE_INLINE xxh_u64
3438 return XXH_readLE64(ptr);
3454#define XXH_PRIME64_1 0x9E3779B185EBCA87ULL
3455#define XXH_PRIME64_2 0xC2B2AE3D27D4EB4FULL
3456#define XXH_PRIME64_3 0x165667B19E3779F9ULL
3457#define XXH_PRIME64_4 0x85EBCA77C2B2AE63ULL
3458#define XXH_PRIME64_5 0x27D4EB2F165667C5ULL
3461# define PRIME64_1 XXH_PRIME64_1
3462# define PRIME64_2 XXH_PRIME64_2
3463# define PRIME64_3 XXH_PRIME64_3
3464# define PRIME64_4 XXH_PRIME64_4
3465# define PRIME64_5 XXH_PRIME64_5
3469static xxh_u64 XXH64_round(xxh_u64 acc, xxh_u64 input)
3472 acc = XXH_rotl64(acc, 31);
3474#if (defined(__AVX512F__)) && !defined(XXH_ENABLE_AUTOVECTORIZE)
3489 XXH_COMPILER_GUARD(acc);
3494static xxh_u64 XXH64_mergeRound(xxh_u64 acc, xxh_u64 val)
3496 val = XXH64_round(0, val);
3503static xxh_u64 XXH64_avalanche(xxh_u64 hash)
3514#define XXH_get64bits(p) XXH_readLE64_align(p, align)
3520XXH_FORCE_INLINE
void
3521XXH64_initAccs(xxh_u64 *acc, xxh_u64 seed)
3523 XXH_ASSERT(acc != NULL);
3536XXH_FORCE_INLINE
const xxh_u8 *
3538 xxh_u64 *XXH_RESTRICT acc,
3539 xxh_u8
const *XXH_RESTRICT input,
3544 const xxh_u8*
const bEnd = input + len;
3545 const xxh_u8*
const limit = bEnd - 31;
3546 XXH_ASSERT(acc != NULL);
3547 XXH_ASSERT(input != NULL);
3548 XXH_ASSERT(len >= 32);
3551 if (
sizeof(
void *) <
sizeof(xxh_u64)) {
3553 for (i = 0; i < 4; i++) {
3554 acc[i] = XXH64_round(acc[i], XXH_get64bits(input));
3558 acc[0] = XXH64_round(acc[0], XXH_get64bits(input)); input += 8;
3559 acc[1] = XXH64_round(acc[1], XXH_get64bits(input)); input += 8;
3560 acc[2] = XXH64_round(acc[2], XXH_get64bits(input)); input += 8;
3561 acc[3] = XXH64_round(acc[3], XXH_get64bits(input)); input += 8;
3563 }
while (input < limit);
3572XXH_FORCE_INLINE XXH_PUREF xxh_u64
3573XXH64_mergeAccs(
const xxh_u64 *acc)
3575 XXH_ASSERT(acc != NULL);
3577 xxh_u64 h64 = XXH_rotl64(acc[0], 1) + XXH_rotl64(acc[1], 7)
3578 + XXH_rotl64(acc[2], 12) + XXH_rotl64(acc[3], 18);
3580 if (
sizeof(
void *) <
sizeof(xxh_u64)) {
3582 for (i = 0; i < 4; i++) {
3583 h64 = XXH64_mergeRound(h64, acc[i]);
3586 h64 = XXH64_mergeRound(h64, acc[0]);
3587 h64 = XXH64_mergeRound(h64, acc[1]);
3588 h64 = XXH64_mergeRound(h64, acc[2]);
3589 h64 = XXH64_mergeRound(h64, acc[3]);
3610XXH_STATIC XXH_PUREF xxh_u64
3611XXH64_finalize(xxh_u64 hash,
const xxh_u8* ptr,
size_t len,
XXH_alignment align)
3613 if (ptr==NULL) XXH_ASSERT(len == 0);
3616 xxh_u64
const k1 = XXH64_round(0, XXH_get64bits(ptr));
3633 return XXH64_avalanche(hash);
3637# define PROCESS1_64 XXH_PROCESS1_64
3638# define PROCESS4_64 XXH_PROCESS4_64
3639# define PROCESS8_64 XXH_PROCESS8_64
3641# undef XXH_PROCESS1_64
3642# undef XXH_PROCESS4_64
3643# undef XXH_PROCESS8_64
3654XXH_FORCE_INLINE XXH_PUREF xxh_u64
3655XXH64_endian_align(
const xxh_u8* input,
size_t len, xxh_u64 seed,
XXH_alignment align)
3658 if (input==NULL) XXH_ASSERT(len == 0);
3662 XXH64_initAccs(acc, seed);
3664 input = XXH64_consumeLong(acc, input, len, align);
3666 h64 = XXH64_mergeAccs(acc);
3671 h64 += (xxh_u64) len;
3673 return XXH64_finalize(h64, input, len, align);
3680#if !defined(XXH_NO_STREAM) && XXH_SIZE_OPT >= 2
3688 if ((((
size_t)input) & 7)==0) {
3689 return XXH64_endian_align((
const xxh_u8*)input, len, seed,
XXH_aligned);
3692 return XXH64_endian_align((
const xxh_u8*)input, len, seed,
XXH_unaligned);
3698#ifndef XXH_NO_STREAM
3714 XXH_memcpy(dstState, srcState,
sizeof(*dstState));
3720 XXH_ASSERT(statePtr != NULL);
3721 memset(statePtr, 0,
sizeof(*statePtr));
3722 XXH64_initAccs(statePtr->acc, seed);
3731 XXH_ASSERT(len == 0);
3735 state->total_len += len;
3737 XXH_ASSERT(state->bufferedSize <=
sizeof(state->buffer));
3738 if (len <
sizeof(state->buffer) - state->bufferedSize) {
3739 XXH_memcpy(state->buffer + state->bufferedSize, input, len);
3744 {
const xxh_u8* xinput = (
const xxh_u8*)input;
3745 const xxh_u8*
const bEnd = xinput + len;
3747 if (state->bufferedSize) {
3748 XXH_memcpy(state->buffer + state->bufferedSize, xinput,
sizeof(state->buffer) - state->bufferedSize);
3749 xinput +=
sizeof(state->buffer) - state->bufferedSize;
3751 (void)XXH64_consumeLong(state->acc, state->buffer,
sizeof(state->buffer),
XXH_aligned);
3752 state->bufferedSize = 0;
3755 XXH_ASSERT(xinput <= bEnd);
3756 if ((
size_t)(bEnd - xinput) >=
sizeof(state->buffer)) {
3758 xinput = XXH64_consumeLong(state->acc, xinput, (
size_t)(bEnd - xinput),
XXH_unaligned);
3761 if (xinput < bEnd) {
3763 XXH_memcpy(state->buffer, xinput, (
size_t)(bEnd-xinput));
3764 state->bufferedSize = (unsigned)(bEnd-xinput);
3777 if (state->total_len >= 32) {
3778 h64 = XXH64_mergeAccs(state->acc);
3783 h64 += (xxh_u64) state->total_len;
3785 return XXH64_finalize(h64, state->buffer, (
size_t)state->total_len,
XXH_aligned);
3796 XXH_memcpy(dst, &hash,
sizeof(*dst));
3802 return XXH_readBE64(src);
3821#if (defined(__GNUC__) && (__GNUC__ >= 3)) \
3822 || (defined(__INTEL_COMPILER) && (__INTEL_COMPILER >= 800)) \
3823 || defined(__clang__)
3824# define XXH_likely(x) __builtin_expect(x, 1)
3825# define XXH_unlikely(x) __builtin_expect(x, 0)
3827# define XXH_likely(x) (x)
3828# define XXH_unlikely(x) (x)
3831#ifndef XXH_HAS_INCLUDE
3832# ifdef __has_include
3837# define XXH_HAS_INCLUDE __has_include
3839# define XXH_HAS_INCLUDE(x) 0
3843#if defined(__GNUC__) || defined(__clang__)
3844# if defined(__ARM_FEATURE_SVE)
3845# include <arm_sve.h>
3847# if defined(__ARM_NEON__) || defined(__ARM_NEON) \
3848 || (defined(_M_ARM) && _M_ARM >= 7) \
3849 || defined(_M_ARM64) || defined(_M_ARM64EC) \
3850 || (defined(__wasm_simd128__) && XXH_HAS_INCLUDE(<arm_neon.h>))
3851# define inline __inline__
3852# include <arm_neon.h>
3854# elif defined(__AVX2__)
3855# include <immintrin.h>
3856# elif defined(__SSE2__)
3857# include <emmintrin.h>
3858# elif defined(__loongarch_sx)
3859# include <lsxintrin.h>
3863#if defined(_MSC_VER)
3936#if defined(__thumb__) && !defined(__thumb2__) && defined(__ARM_ARCH_ISA_ARM)
3937# warning "XXH3 is highly inefficient without ARM or Thumb-2."
3955# define XXH_VECTOR XXH_SCALAR
3965# define XXH_ACC_ALIGN 8
3973# if defined(__ARM_FEATURE_SVE)
3974# define XXH_VECTOR XXH_SVE
3976 defined(__ARM_NEON__) || defined(__ARM_NEON) \
3977 || defined(_M_ARM) || defined(_M_ARM64) || defined(_M_ARM64EC) \
3978 || (defined(__wasm_simd128__) && XXH_HAS_INCLUDE(<arm_neon.h>)) \
3980 defined(_WIN32) || defined(__LITTLE_ENDIAN__) \
3981 || (defined(__BYTE_ORDER__) && __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__) \
3983# define XXH_VECTOR XXH_NEON
3984# elif defined(__AVX512F__)
3985# define XXH_VECTOR XXH_AVX512
3986# elif defined(__AVX2__)
3987# define XXH_VECTOR XXH_AVX2
3988# elif defined(__SSE2__) || defined(_M_AMD64) || defined(_M_X64) || (defined(_M_IX86_FP) && (_M_IX86_FP == 2))
3989# define XXH_VECTOR XXH_SSE2
3990# elif (defined(__PPC64__) && defined(__POWER8_VECTOR__)) \
3991 || (defined(__s390x__) && defined(__VEC__)) \
3992 && defined(__GNUC__)
3993# define XXH_VECTOR XXH_VSX
3994# elif defined(__loongarch_sx)
3995# define XXH_VECTOR XXH_LSX
3997# define XXH_VECTOR XXH_SCALAR
4002#if (XXH_VECTOR == XXH_SVE) && !defined(__ARM_FEATURE_SVE)
4004# pragma warning(once : 4606)
4006# warning "__ARM_FEATURE_SVE isn't supported. Use SCALAR instead."
4009# define XXH_VECTOR XXH_SCALAR
4016#ifndef XXH_ACC_ALIGN
4017# if defined(XXH_X86DISPATCH)
4018# define XXH_ACC_ALIGN 64
4019# elif XXH_VECTOR == XXH_SCALAR
4020# define XXH_ACC_ALIGN 8
4021# elif XXH_VECTOR == XXH_SSE2
4022# define XXH_ACC_ALIGN 16
4023# elif XXH_VECTOR == XXH_AVX2
4024# define XXH_ACC_ALIGN 32
4025# elif XXH_VECTOR == XXH_NEON
4026# define XXH_ACC_ALIGN 16
4027# elif XXH_VECTOR == XXH_VSX
4028# define XXH_ACC_ALIGN 16
4029# elif XXH_VECTOR == XXH_AVX512
4030# define XXH_ACC_ALIGN 64
4031# elif XXH_VECTOR == XXH_SVE
4032# define XXH_ACC_ALIGN 64
4033# elif XXH_VECTOR == XXH_LSX
4034# define XXH_ACC_ALIGN 64
4038#if defined(XXH_X86DISPATCH) || XXH_VECTOR == XXH_SSE2 \
4039 || XXH_VECTOR == XXH_AVX2 || XXH_VECTOR == XXH_AVX512
4040# define XXH_SEC_ALIGN XXH_ACC_ALIGN
4041#elif XXH_VECTOR == XXH_SVE
4042# define XXH_SEC_ALIGN XXH_ACC_ALIGN
4044# define XXH_SEC_ALIGN 8
4047#if defined(__GNUC__) || defined(__clang__)
4048# define XXH_ALIASING __attribute__((__may_alias__))
4050# define XXH_ALIASING
4074#if XXH_VECTOR == XXH_AVX2 \
4075 && defined(__GNUC__) && !defined(__clang__) \
4076 && defined(__OPTIMIZE__) && XXH_SIZE_OPT <= 0
4077# pragma GCC push_options
4078# pragma GCC optimize("-O2")
4081#if XXH_VECTOR == XXH_NEON
4090typedef uint64x2_t xxh_aliasing_uint64x2_t XXH_ALIASING;
4105#if defined(__aarch64__) && defined(__GNUC__) && !defined(__clang__)
4106XXH_FORCE_INLINE uint64x2_t XXH_vld1q_u64(
void const* ptr)
4108 return *(xxh_aliasing_uint64x2_t
const *)ptr;
4111XXH_FORCE_INLINE uint64x2_t XXH_vld1q_u64(
void const* ptr)
4113 return vreinterpretq_u64_u8(vld1q_u8((uint8_t
const*)ptr));
4125#if defined(__aarch64__) && defined(__GNUC__) && !defined(__clang__) && __GNUC__ < 11
4126XXH_FORCE_INLINE uint64x2_t
4127XXH_vmlal_low_u32(uint64x2_t acc, uint32x4_t lhs, uint32x4_t rhs)
4130 __asm__(
"umlal %0.2d, %1.2s, %2.2s" :
"+w" (acc) :
"w" (lhs),
"w" (rhs));
4133XXH_FORCE_INLINE uint64x2_t
4134XXH_vmlal_high_u32(uint64x2_t acc, uint32x4_t lhs, uint32x4_t rhs)
4137 return vmlal_high_u32(acc, lhs, rhs);
4141XXH_FORCE_INLINE uint64x2_t
4142XXH_vmlal_low_u32(uint64x2_t acc, uint32x4_t lhs, uint32x4_t rhs)
4144 return vmlal_u32(acc, vget_low_u32(lhs), vget_low_u32(rhs));
4148XXH_FORCE_INLINE uint64x2_t
4149XXH_vmlal_high_u32(uint64x2_t acc, uint32x4_t lhs, uint32x4_t rhs)
4151 return vmlal_u32(acc, vget_high_u32(lhs), vget_high_u32(rhs));
4193# ifndef XXH3_NEON_LANES
4194# if (defined(__aarch64__) || defined(__arm64__) || defined(_M_ARM64) || defined(_M_ARM64EC)) \
4195 && !defined(__APPLE__) && XXH_SIZE_OPT <= 0
4196# define XXH3_NEON_LANES 6
4198# define XXH3_NEON_LANES XXH_ACC_NB
4211#if XXH_VECTOR == XXH_VSX
4220# pragma push_macro("bool")
4221# pragma push_macro("vector")
4222# pragma push_macro("pixel")
4228# if defined(__s390x__)
4229# include <s390intrin.h>
4231# include <altivec.h>
4235# pragma pop_macro("pixel")
4236# pragma pop_macro("vector")
4237# pragma pop_macro("bool")
4239typedef __vector
unsigned long long xxh_u64x2;
4240typedef __vector
unsigned char xxh_u8x16;
4241typedef __vector
unsigned xxh_u32x4;
4246typedef xxh_u64x2 xxh_aliasing_u64x2 XXH_ALIASING;
4249# if defined(__BIG_ENDIAN__) \
4250 || (defined(__BYTE_ORDER__) && __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__)
4251# define XXH_VSX_BE 1
4252# elif defined(__VEC_ELEMENT_REG_ORDER__) && __VEC_ELEMENT_REG_ORDER__ == __ORDER_BIG_ENDIAN__
4253# warning "-maltivec=be is not recommended. Please use native endianness."
4254# define XXH_VSX_BE 1
4256# define XXH_VSX_BE 0
4261# if defined(__POWER9_VECTOR__) || (defined(__clang__) && defined(__s390x__))
4262# define XXH_vec_revb vec_revb
4267XXH_FORCE_INLINE xxh_u64x2 XXH_vec_revb(xxh_u64x2 val)
4269 xxh_u8x16
const vByteSwap = { 0x07, 0x06, 0x05, 0x04, 0x03, 0x02, 0x01, 0x00,
4270 0x0F, 0x0E, 0x0D, 0x0C, 0x0B, 0x0A, 0x09, 0x08 };
4271 return vec_perm(val, val, vByteSwap);
4279XXH_FORCE_INLINE xxh_u64x2 XXH_vec_loadu(
const void *ptr)
4282 XXH_memcpy(&ret, ptr,
sizeof(xxh_u64x2));
4284 ret = XXH_vec_revb(ret);
4295# if defined(__s390x__)
4297# define XXH_vec_mulo vec_mulo
4298# define XXH_vec_mule vec_mule
4299# elif defined(__clang__) && XXH_HAS_BUILTIN(__builtin_altivec_vmuleuw) && !defined(__ibmxl__)
4302# define XXH_vec_mulo __builtin_altivec_vmulouw
4303# define XXH_vec_mule __builtin_altivec_vmuleuw
4307XXH_FORCE_INLINE xxh_u64x2 XXH_vec_mulo(xxh_u32x4 a, xxh_u32x4 b)
4310 __asm__(
"vmulouw %0, %1, %2" :
"=v" (result) :
"v" (a),
"v" (b));
4313XXH_FORCE_INLINE xxh_u64x2 XXH_vec_mule(xxh_u32x4 a, xxh_u32x4 b)
4316 __asm__(
"vmuleuw %0, %1, %2" :
"=v" (result) :
"v" (a),
"v" (b));
4322#if XXH_VECTOR == XXH_SVE
4323#define ACCRND(acc, offset) \
4325 svuint64_t input_vec = svld1_u64(mask, xinput + offset); \
4326 svuint64_t secret_vec = svld1_u64(mask, xsecret + offset); \
4327 svuint64_t mixed = sveor_u64_x(mask, secret_vec, input_vec); \
4328 svuint64_t swapped = svtbl_u64(input_vec, kSwap); \
4329 svuint64_t mixed_lo = svextw_u64_x(mask, mixed); \
4330 svuint64_t mixed_hi = svlsr_n_u64_x(mask, mixed, 32); \
4331 svuint64_t mul = svmad_u64_x(mask, mixed_lo, mixed_hi, swapped); \
4332 acc = svadd_u64_x(mask, acc, mul); \
4338#if defined(XXH_NO_PREFETCH)
4339# define XXH_PREFETCH(ptr) (void)(ptr)
4341# if XXH_SIZE_OPT >= 1
4342# define XXH_PREFETCH(ptr) (void)(ptr)
4343# elif defined(_MSC_VER) && (defined(_M_X64) || defined(_M_IX86))
4344# include <mmintrin.h>
4345# define XXH_PREFETCH(ptr) _mm_prefetch((const char*)(ptr), _MM_HINT_T0)
4346# elif defined(__GNUC__) && ( (__GNUC__ >= 4) || ( (__GNUC__ == 3) && (__GNUC_MINOR__ >= 1) ) )
4347# define XXH_PREFETCH(ptr) __builtin_prefetch((ptr), 0 , 3 )
4349# define XXH_PREFETCH(ptr) (void)(ptr)
4358#define XXH_SECRET_DEFAULT_SIZE 192
4360#if (XXH_SECRET_DEFAULT_SIZE < XXH3_SECRET_SIZE_MIN)
4361# error "default keyset is not large enough"
4365XXH_ALIGN(64) static const xxh_u8 XXH3_kSecret[XXH_SECRET_DEFAULT_SIZE] = {
4366 0xb8, 0xfe, 0x6c, 0x39, 0x23, 0xa4, 0x4b, 0xbe, 0x7c, 0x01, 0x81, 0x2c, 0xf7, 0x21, 0xad, 0x1c,
4367 0xde, 0xd4, 0x6d, 0xe9, 0x83, 0x90, 0x97, 0xdb, 0x72, 0x40, 0xa4, 0xa4, 0xb7, 0xb3, 0x67, 0x1f,
4368 0xcb, 0x79, 0xe6, 0x4e, 0xcc, 0xc0, 0xe5, 0x78, 0x82, 0x5a, 0xd0, 0x7d, 0xcc, 0xff, 0x72, 0x21,
4369 0xb8, 0x08, 0x46, 0x74, 0xf7, 0x43, 0x24, 0x8e, 0xe0, 0x35, 0x90, 0xe6, 0x81, 0x3a, 0x26, 0x4c,
4370 0x3c, 0x28, 0x52, 0xbb, 0x91, 0xc3, 0x00, 0xcb, 0x88, 0xd0, 0x65, 0x8b, 0x1b, 0x53, 0x2e, 0xa3,
4371 0x71, 0x64, 0x48, 0x97, 0xa2, 0x0d, 0xf9, 0x4e, 0x38, 0x19, 0xef, 0x46, 0xa9, 0xde, 0xac, 0xd8,
4372 0xa8, 0xfa, 0x76, 0x3f, 0xe3, 0x9c, 0x34, 0x3f, 0xf9, 0xdc, 0xbb, 0xc7, 0xc7, 0x0b, 0x4f, 0x1d,
4373 0x8a, 0x51, 0xe0, 0x4b, 0xcd, 0xb4, 0x59, 0x31, 0xc8, 0x9f, 0x7e, 0xc9, 0xd9, 0x78, 0x73, 0x64,
4374 0xea, 0xc5, 0xac, 0x83, 0x34, 0xd3, 0xeb, 0xc3, 0xc5, 0x81, 0xa0, 0xff, 0xfa, 0x13, 0x63, 0xeb,
4375 0x17, 0x0d, 0xdd, 0x51, 0xb7, 0xf0, 0xda, 0x49, 0xd3, 0x16, 0x55, 0x26, 0x29, 0xd4, 0x68, 0x9e,
4376 0x2b, 0x16, 0xbe, 0x58, 0x7d, 0x47, 0xa1, 0xfc, 0x8f, 0xf8, 0xb8, 0xd1, 0x7a, 0xd0, 0x31, 0xce,
4377 0x45, 0xcb, 0x3a, 0x8f, 0x95, 0x16, 0x04, 0x28, 0xaf, 0xd7, 0xfb, 0xca, 0xbb, 0x4b, 0x40, 0x7e,
4380static const xxh_u64 PRIME_MX1 = 0x165667919E3779F9ULL;
4381static const xxh_u64 PRIME_MX2 = 0x9FB21C651E98DF25ULL;
4384# define kSecret XXH3_kSecret
4404XXH_FORCE_INLINE xxh_u64
4405XXH_mult32to64(xxh_u64 x, xxh_u64 y)
4407 return (x & 0xFFFFFFFF) * (y & 0xFFFFFFFF);
4409#elif defined(_MSC_VER) && defined(_M_IX86)
4410# define XXH_mult32to64(x, y) __emulu((unsigned)(x), (unsigned)(y))
4419# define XXH_mult32to64(x, y) ((xxh_u64)(xxh_u32)(x) * (xxh_u64)(xxh_u32)(y))
4432XXH_mult64to128(xxh_u64 lhs, xxh_u64 rhs)
4449#if (defined(__GNUC__) || defined(__clang__)) && !defined(__wasm__) \
4450 && defined(__SIZEOF_INT128__) \
4451 || (defined(_INTEGRAL_MAX_BITS) && _INTEGRAL_MAX_BITS >= 128)
4453 __uint128_t
const product = (__uint128_t)lhs * (__uint128_t)rhs;
4455 r128.
low64 = (xxh_u64)(product);
4456 r128.
high64 = (xxh_u64)(product >> 64);
4466#elif (defined(_M_X64) || defined(_M_IA64)) && !defined(_M_ARM64EC)
4469# pragma intrinsic(_umul128)
4471 xxh_u64 product_high;
4472 xxh_u64
const product_low = _umul128(lhs, rhs, &product_high);
4474 r128.
low64 = product_low;
4475 r128.
high64 = product_high;
4483#elif defined(_M_ARM64) || defined(_M_ARM64EC)
4486# pragma intrinsic(__umulh)
4489 r128.
low64 = lhs * rhs;
4490 r128.
high64 = __umulh(lhs, rhs);
4538 xxh_u64
const lo_lo = XXH_mult32to64(lhs & 0xFFFFFFFF, rhs & 0xFFFFFFFF);
4539 xxh_u64
const hi_lo = XXH_mult32to64(lhs >> 32, rhs & 0xFFFFFFFF);
4540 xxh_u64
const lo_hi = XXH_mult32to64(lhs & 0xFFFFFFFF, rhs >> 32);
4541 xxh_u64
const hi_hi = XXH_mult32to64(lhs >> 32, rhs >> 32);
4544 xxh_u64
const cross = (lo_lo >> 32) + (hi_lo & 0xFFFFFFFF) + lo_hi;
4545 xxh_u64
const upper = (hi_lo >> 32) + (cross >> 32) + hi_hi;
4546 xxh_u64
const lower = (cross << 32) | (lo_lo & 0xFFFFFFFF);
4566XXH3_mul128_fold64(xxh_u64 lhs, xxh_u64 rhs)
4573XXH_FORCE_INLINE XXH_CONSTF xxh_u64 XXH_xorshift64(xxh_u64 v64,
int shift)
4575 XXH_ASSERT(0 <= shift && shift < 64);
4576 return v64 ^ (v64 >> shift);
4585 h64 = XXH_xorshift64(h64, 37);
4587 h64 = XXH_xorshift64(h64, 32);
4596static XXH64_hash_t XXH3_rrmxmx(xxh_u64 h64, xxh_u64 len)
4599 h64 ^= XXH_rotl64(h64, 49) ^ XXH_rotl64(h64, 24);
4601 h64 ^= (h64 >> 35) + len ;
4603 return XXH_xorshift64(h64, 28);
4641XXH3_len_1to3_64b(
const xxh_u8* input,
size_t len,
const xxh_u8* secret,
XXH64_hash_t seed)
4643 XXH_ASSERT(input != NULL);
4644 XXH_ASSERT(1 <= len && len <= 3);
4645 XXH_ASSERT(secret != NULL);
4651 { xxh_u8
const c1 = input[0];
4652 xxh_u8
const c2 = input[len >> 1];
4653 xxh_u8
const c3 = input[len - 1];
4654 xxh_u32
const combined = ((xxh_u32)c1 << 16) | ((xxh_u32)c2 << 24)
4655 | ((xxh_u32)c3 << 0) | ((xxh_u32)len << 8);
4656 xxh_u64
const bitflip = (XXH_readLE32(secret) ^ XXH_readLE32(secret+4)) + seed;
4657 xxh_u64
const keyed = (xxh_u64)combined ^ bitflip;
4658 return XXH64_avalanche(keyed);
4663XXH3_len_4to8_64b(
const xxh_u8* input,
size_t len,
const xxh_u8* secret,
XXH64_hash_t seed)
4665 XXH_ASSERT(input != NULL);
4666 XXH_ASSERT(secret != NULL);
4667 XXH_ASSERT(4 <= len && len <= 8);
4668 seed ^= (xxh_u64)XXH_swap32((xxh_u32)seed) << 32;
4669 { xxh_u32
const input1 = XXH_readLE32(input);
4670 xxh_u32
const input2 = XXH_readLE32(input + len - 4);
4671 xxh_u64
const bitflip = (XXH_readLE64(secret+8) ^ XXH_readLE64(secret+16)) - seed;
4672 xxh_u64
const input64 = input2 + (((xxh_u64)input1) << 32);
4673 xxh_u64
const keyed = input64 ^ bitflip;
4674 return XXH3_rrmxmx(keyed, len);
4679XXH3_len_9to16_64b(
const xxh_u8* input,
size_t len,
const xxh_u8* secret,
XXH64_hash_t seed)
4681 XXH_ASSERT(input != NULL);
4682 XXH_ASSERT(secret != NULL);
4683 XXH_ASSERT(9 <= len && len <= 16);
4684 { xxh_u64
const bitflip1 = (XXH_readLE64(secret+24) ^ XXH_readLE64(secret+32)) + seed;
4685 xxh_u64
const bitflip2 = (XXH_readLE64(secret+40) ^ XXH_readLE64(secret+48)) - seed;
4686 xxh_u64
const input_lo = XXH_readLE64(input) ^ bitflip1;
4687 xxh_u64
const input_hi = XXH_readLE64(input + len - 8) ^ bitflip2;
4688 xxh_u64
const acc = len
4689 + XXH_swap64(input_lo) + input_hi
4690 + XXH3_mul128_fold64(input_lo, input_hi);
4691 return XXH3_avalanche(acc);
4696XXH3_len_0to16_64b(
const xxh_u8* input,
size_t len,
const xxh_u8* secret,
XXH64_hash_t seed)
4698 XXH_ASSERT(len <= 16);
4699 {
if (XXH_likely(len > 8))
return XXH3_len_9to16_64b(input, len, secret, seed);
4700 if (XXH_likely(len >= 4))
return XXH3_len_4to8_64b(input, len, secret, seed);
4701 if (len)
return XXH3_len_1to3_64b(input, len, secret, seed);
4702 return XXH64_avalanche(seed ^ (XXH_readLE64(secret+56) ^ XXH_readLE64(secret+64)));
4732XXH_FORCE_INLINE xxh_u64 XXH3_mix16B(
const xxh_u8* XXH_RESTRICT input,
4733 const xxh_u8* XXH_RESTRICT secret, xxh_u64 seed64)
4735#if defined(__GNUC__) && !defined(__clang__) \
4736 && defined(__i386__) && defined(__SSE2__) \
4737 && !defined(XXH_ENABLE_AUTOVECTORIZE)
4753 XXH_COMPILER_GUARD(seed64);
4755 { xxh_u64
const input_lo = XXH_readLE64(input);
4756 xxh_u64
const input_hi = XXH_readLE64(input+8);
4757 return XXH3_mul128_fold64(
4758 input_lo ^ (XXH_readLE64(secret) + seed64),
4759 input_hi ^ (XXH_readLE64(secret+8) - seed64)
4766XXH3_len_17to128_64b(
const xxh_u8* XXH_RESTRICT input,
size_t len,
4767 const xxh_u8* XXH_RESTRICT secret,
size_t secretSize,
4771 XXH_ASSERT(16 < len && len <= 128);
4774#if XXH_SIZE_OPT >= 1
4776 unsigned int i = (
unsigned int)(len - 1) / 32;
4778 acc += XXH3_mix16B(input+16 * i, secret+32*i, seed);
4779 acc += XXH3_mix16B(input+len-16*(i+1), secret+32*i+16, seed);
4785 acc += XXH3_mix16B(input+48, secret+96, seed);
4786 acc += XXH3_mix16B(input+len-64, secret+112, seed);
4788 acc += XXH3_mix16B(input+32, secret+64, seed);
4789 acc += XXH3_mix16B(input+len-48, secret+80, seed);
4791 acc += XXH3_mix16B(input+16, secret+32, seed);
4792 acc += XXH3_mix16B(input+len-32, secret+48, seed);
4794 acc += XXH3_mix16B(input+0, secret+0, seed);
4795 acc += XXH3_mix16B(input+len-16, secret+16, seed);
4797 return XXH3_avalanche(acc);
4802XXH3_len_129to240_64b(
const xxh_u8* XXH_RESTRICT input,
size_t len,
4803 const xxh_u8* XXH_RESTRICT secret,
size_t secretSize,
4809 #define XXH3_MIDSIZE_STARTOFFSET 3
4810 #define XXH3_MIDSIZE_LASTOFFSET 17
4814 unsigned int const nbRounds = (
unsigned int)len / 16;
4817 for (i=0; i<8; i++) {
4818 acc += XXH3_mix16B(input+(16*i), secret+(16*i), seed);
4821 acc_end = XXH3_mix16B(input + len - 16, secret +
XXH3_SECRET_SIZE_MIN - XXH3_MIDSIZE_LASTOFFSET, seed);
4822 XXH_ASSERT(nbRounds >= 8);
4823 acc = XXH3_avalanche(acc);
4824#if defined(__clang__) \
4825 && (defined(__ARM_NEON) || defined(__ARM_NEON__)) \
4826 && !defined(XXH_ENABLE_AUTOVECTORIZE)
4847 #pragma clang loop vectorize(disable)
4849 for (i=8 ; i < nbRounds; i++) {
4853 XXH_COMPILER_GUARD(acc);
4854 acc_end += XXH3_mix16B(input+(16*i), secret+(16*(i-8)) + XXH3_MIDSIZE_STARTOFFSET, seed);
4856 return XXH3_avalanche(acc + acc_end);
4863#define XXH_STRIPE_LEN 64
4864#define XXH_SECRET_CONSUME_RATE 8
4865#define XXH_ACC_NB (XXH_STRIPE_LEN / sizeof(xxh_u64))
4868# define STRIPE_LEN XXH_STRIPE_LEN
4869# define ACC_NB XXH_ACC_NB
4872#ifndef XXH_PREFETCH_DIST
4874# define XXH_PREFETCH_DIST 320
4876# if (XXH_VECTOR == XXH_AVX512)
4877# define XXH_PREFETCH_DIST 512
4879# define XXH_PREFETCH_DIST 384
4894#define XXH3_ACCUMULATE_TEMPLATE(name) \
4896XXH3_accumulate_##name(xxh_u64* XXH_RESTRICT acc, \
4897 const xxh_u8* XXH_RESTRICT input, \
4898 const xxh_u8* XXH_RESTRICT secret, \
4902 for (n = 0; n < nbStripes; n++ ) { \
4903 const xxh_u8* const in = input + n*XXH_STRIPE_LEN; \
4904 XXH_PREFETCH(in + XXH_PREFETCH_DIST); \
4905 XXH3_accumulate_512_##name( \
4908 secret + n*XXH_SECRET_CONSUME_RATE); \
4913XXH_FORCE_INLINE
void XXH_writeLE64(
void* dst, xxh_u64 v64)
4916 XXH_memcpy(dst, &v64,
sizeof(v64));
4924#if !defined (__VMS) \
4925 && (defined (__cplusplus) \
4926 || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) ) )
4927 typedef int64_t xxh_i64;
4930 typedef long long xxh_i64;
4957#if (XXH_VECTOR == XXH_AVX512) \
4958 || (defined(XXH_DISPATCH_AVX512) && XXH_DISPATCH_AVX512 != 0)
4960#ifndef XXH_TARGET_AVX512
4961# define XXH_TARGET_AVX512
4965XXH3_accumulate_512_avx512(
void* XXH_RESTRICT acc,
4966 const void* XXH_RESTRICT input,
4967 const void* XXH_RESTRICT secret)
4969 __m512i*
const xacc = (__m512i *) acc;
4970 XXH_ASSERT((((
size_t)acc) & 63) == 0);
4971 XXH_STATIC_ASSERT(XXH_STRIPE_LEN ==
sizeof(__m512i));
4975 __m512i
const data_vec = _mm512_loadu_si512 (input);
4977 __m512i
const key_vec = _mm512_loadu_si512 (secret);
4979 __m512i
const data_key = _mm512_xor_si512 (data_vec, key_vec);
4981 __m512i
const data_key_lo = _mm512_srli_epi64 (data_key, 32);
4983 __m512i
const product = _mm512_mul_epu32 (data_key, data_key_lo);
4985 __m512i
const data_swap = _mm512_shuffle_epi32(data_vec, (_MM_PERM_ENUM)_MM_SHUFFLE(1, 0, 3, 2));
4986 __m512i
const sum = _mm512_add_epi64(*xacc, data_swap);
4988 *xacc = _mm512_add_epi64(product, sum);
5015XXH3_scrambleAcc_avx512(
void* XXH_RESTRICT acc,
const void* XXH_RESTRICT secret)
5017 XXH_ASSERT((((
size_t)acc) & 63) == 0);
5018 XXH_STATIC_ASSERT(XXH_STRIPE_LEN ==
sizeof(__m512i));
5019 { __m512i*
const xacc = (__m512i*) acc;
5020 const __m512i prime32 = _mm512_set1_epi32((
int)
XXH_PRIME32_1);
5023 __m512i
const acc_vec = *xacc;
5024 __m512i
const shifted = _mm512_srli_epi64 (acc_vec, 47);
5026 __m512i
const key_vec = _mm512_loadu_si512 (secret);
5027 __m512i
const data_key = _mm512_ternarylogic_epi32(key_vec, acc_vec, shifted, 0x96 );
5030 __m512i
const data_key_hi = _mm512_srli_epi64 (data_key, 32);
5031 __m512i
const prod_lo = _mm512_mul_epu32 (data_key, prime32);
5032 __m512i
const prod_hi = _mm512_mul_epu32 (data_key_hi, prime32);
5033 *xacc = _mm512_add_epi64(prod_lo, _mm512_slli_epi64(prod_hi, 32));
5038XXH3_initCustomSecret_avx512(
void* XXH_RESTRICT customSecret, xxh_u64 seed64)
5040 XXH_STATIC_ASSERT((XXH_SECRET_DEFAULT_SIZE & 63) == 0);
5041 XXH_STATIC_ASSERT(XXH_SEC_ALIGN == 64);
5042 XXH_ASSERT(((
size_t)customSecret & 63) == 0);
5043 (void)(&XXH_writeLE64);
5044 {
int const nbRounds = XXH_SECRET_DEFAULT_SIZE /
sizeof(__m512i);
5045 __m512i
const seed_pos = _mm512_set1_epi64((xxh_i64)seed64);
5046 __m512i
const seed = _mm512_mask_sub_epi64(seed_pos, 0xAA, _mm512_set1_epi8(0), seed_pos);
5048 const __m512i*
const src = (
const __m512i*) ((
const void*) XXH3_kSecret);
5049 __m512i*
const dest = ( __m512i*) customSecret;
5051 XXH_ASSERT(((
size_t)src & 63) == 0);
5052 XXH_ASSERT(((
size_t)dest & 63) == 0);
5053 for (i=0; i < nbRounds; ++i) {
5054 dest[i] = _mm512_add_epi64(_mm512_load_si512(src + i), seed);
5060#if (XXH_VECTOR == XXH_AVX2) \
5061 || (defined(XXH_DISPATCH_AVX2) && XXH_DISPATCH_AVX2 != 0)
5063#ifndef XXH_TARGET_AVX2
5064# define XXH_TARGET_AVX2
5068XXH3_accumulate_512_avx2(
void* XXH_RESTRICT acc,
5069 const void* XXH_RESTRICT input,
5070 const void* XXH_RESTRICT secret)
5072 XXH_ASSERT((((
size_t)acc) & 31) == 0);
5073 { __m256i*
const xacc = (__m256i *) acc;
5076 const __m256i*
const xinput = (
const __m256i *) input;
5079 const __m256i*
const xsecret = (
const __m256i *) secret;
5082 for (i=0; i < XXH_STRIPE_LEN/
sizeof(__m256i); i++) {
5084 __m256i
const data_vec = _mm256_loadu_si256 (xinput+i);
5086 __m256i
const key_vec = _mm256_loadu_si256 (xsecret+i);
5088 __m256i
const data_key = _mm256_xor_si256 (data_vec, key_vec);
5090 __m256i
const data_key_lo = _mm256_srli_epi64 (data_key, 32);
5092 __m256i
const product = _mm256_mul_epu32 (data_key, data_key_lo);
5094 __m256i
const data_swap = _mm256_shuffle_epi32(data_vec, _MM_SHUFFLE(1, 0, 3, 2));
5095 __m256i
const sum = _mm256_add_epi64(xacc[i], data_swap);
5097 xacc[i] = _mm256_add_epi64(product, sum);
5103XXH3_scrambleAcc_avx2(
void* XXH_RESTRICT acc,
const void* XXH_RESTRICT secret)
5105 XXH_ASSERT((((
size_t)acc) & 31) == 0);
5106 { __m256i*
const xacc = (__m256i*) acc;
5109 const __m256i*
const xsecret = (
const __m256i *) secret;
5110 const __m256i prime32 = _mm256_set1_epi32((
int)
XXH_PRIME32_1);
5113 for (i=0; i < XXH_STRIPE_LEN/
sizeof(__m256i); i++) {
5115 __m256i
const acc_vec = xacc[i];
5116 __m256i
const shifted = _mm256_srli_epi64 (acc_vec, 47);
5117 __m256i
const data_vec = _mm256_xor_si256 (acc_vec, shifted);
5119 __m256i
const key_vec = _mm256_loadu_si256 (xsecret+i);
5120 __m256i
const data_key = _mm256_xor_si256 (data_vec, key_vec);
5123 __m256i
const data_key_hi = _mm256_srli_epi64 (data_key, 32);
5124 __m256i
const prod_lo = _mm256_mul_epu32 (data_key, prime32);
5125 __m256i
const prod_hi = _mm256_mul_epu32 (data_key_hi, prime32);
5126 xacc[i] = _mm256_add_epi64(prod_lo, _mm256_slli_epi64(prod_hi, 32));
5131XXH_FORCE_INLINE
XXH_TARGET_AVX2 void XXH3_initCustomSecret_avx2(
void* XXH_RESTRICT customSecret, xxh_u64 seed64)
5133 XXH_STATIC_ASSERT((XXH_SECRET_DEFAULT_SIZE & 31) == 0);
5134 XXH_STATIC_ASSERT((XXH_SECRET_DEFAULT_SIZE /
sizeof(__m256i)) == 6);
5135 XXH_STATIC_ASSERT(XXH_SEC_ALIGN <= 64);
5136 (void)(&XXH_writeLE64);
5137 XXH_PREFETCH(customSecret);
5138 { __m256i
const seed = _mm256_set_epi64x((xxh_i64)(0U - seed64), (xxh_i64)seed64, (xxh_i64)(0U - seed64), (xxh_i64)seed64);
5140 const __m256i*
const src = (
const __m256i*) ((
const void*) XXH3_kSecret);
5141 __m256i* dest = ( __m256i*) customSecret;
5143# if defined(__GNUC__) || defined(__clang__)
5149 XXH_COMPILER_GUARD(dest);
5151 XXH_ASSERT(((
size_t)src & 31) == 0);
5152 XXH_ASSERT(((
size_t)dest & 31) == 0);
5155 dest[0] = _mm256_add_epi64(_mm256_load_si256(src+0), seed);
5156 dest[1] = _mm256_add_epi64(_mm256_load_si256(src+1), seed);
5157 dest[2] = _mm256_add_epi64(_mm256_load_si256(src+2), seed);
5158 dest[3] = _mm256_add_epi64(_mm256_load_si256(src+3), seed);
5159 dest[4] = _mm256_add_epi64(_mm256_load_si256(src+4), seed);
5160 dest[5] = _mm256_add_epi64(_mm256_load_si256(src+5), seed);
5167#if (XXH_VECTOR == XXH_SSE2) || defined(XXH_X86DISPATCH)
5169#ifndef XXH_TARGET_SSE2
5170# define XXH_TARGET_SSE2
5174XXH3_accumulate_512_sse2(
void* XXH_RESTRICT acc,
5175 const void* XXH_RESTRICT input,
5176 const void* XXH_RESTRICT secret)
5179 XXH_ASSERT((((
size_t)acc) & 15) == 0);
5180 { __m128i*
const xacc = (__m128i *) acc;
5183 const __m128i*
const xinput = (
const __m128i *) input;
5186 const __m128i*
const xsecret = (
const __m128i *) secret;
5189 for (i=0; i < XXH_STRIPE_LEN/
sizeof(__m128i); i++) {
5191 __m128i
const data_vec = _mm_loadu_si128 (xinput+i);
5193 __m128i
const key_vec = _mm_loadu_si128 (xsecret+i);
5195 __m128i
const data_key = _mm_xor_si128 (data_vec, key_vec);
5197 __m128i
const data_key_lo = _mm_shuffle_epi32 (data_key, _MM_SHUFFLE(0, 3, 0, 1));
5199 __m128i
const product = _mm_mul_epu32 (data_key, data_key_lo);
5201 __m128i
const data_swap = _mm_shuffle_epi32(data_vec, _MM_SHUFFLE(1,0,3,2));
5202 __m128i
const sum = _mm_add_epi64(xacc[i], data_swap);
5204 xacc[i] = _mm_add_epi64(product, sum);
5210XXH3_scrambleAcc_sse2(
void* XXH_RESTRICT acc,
const void* XXH_RESTRICT secret)
5212 XXH_ASSERT((((
size_t)acc) & 15) == 0);
5213 { __m128i*
const xacc = (__m128i*) acc;
5216 const __m128i*
const xsecret = (
const __m128i *) secret;
5220 for (i=0; i < XXH_STRIPE_LEN/
sizeof(__m128i); i++) {
5222 __m128i
const acc_vec = xacc[i];
5223 __m128i
const shifted = _mm_srli_epi64 (acc_vec, 47);
5224 __m128i
const data_vec = _mm_xor_si128 (acc_vec, shifted);
5226 __m128i
const key_vec = _mm_loadu_si128 (xsecret+i);
5227 __m128i
const data_key = _mm_xor_si128 (data_vec, key_vec);
5230 __m128i
const data_key_hi = _mm_shuffle_epi32 (data_key, _MM_SHUFFLE(0, 3, 0, 1));
5231 __m128i
const prod_lo = _mm_mul_epu32 (data_key, prime32);
5232 __m128i
const prod_hi = _mm_mul_epu32 (data_key_hi, prime32);
5233 xacc[i] = _mm_add_epi64(prod_lo, _mm_slli_epi64(prod_hi, 32));
5238XXH_FORCE_INLINE
XXH_TARGET_SSE2 void XXH3_initCustomSecret_sse2(
void* XXH_RESTRICT customSecret, xxh_u64 seed64)
5240 XXH_STATIC_ASSERT((XXH_SECRET_DEFAULT_SIZE & 15) == 0);
5241 (void)(&XXH_writeLE64);
5242 {
int const nbRounds = XXH_SECRET_DEFAULT_SIZE /
sizeof(__m128i);
5244# if defined(_MSC_VER) && defined(_M_IX86) && _MSC_VER < 1900
5246 XXH_ALIGN(16) const xxh_i64 seed64x2[2] = { (xxh_i64)seed64, (xxh_i64)(0U - seed64) };
5247 __m128i
const seed = _mm_load_si128((__m128i
const*)seed64x2);
5249 __m128i
const seed = _mm_set_epi64x((xxh_i64)(0U - seed64), (xxh_i64)seed64);
5253 const void*
const src16 = XXH3_kSecret;
5254 __m128i* dst16 = (__m128i*) customSecret;
5255# if defined(__GNUC__) || defined(__clang__)
5261 XXH_COMPILER_GUARD(dst16);
5263 XXH_ASSERT(((
size_t)src16 & 15) == 0);
5264 XXH_ASSERT(((
size_t)dst16 & 15) == 0);
5266 for (i=0; i < nbRounds; ++i) {
5267 dst16[i] = _mm_add_epi64(_mm_load_si128((
const __m128i *)src16+i), seed);
5273#if (XXH_VECTOR == XXH_NEON)
5276XXH_FORCE_INLINE
void
5277XXH3_scalarRound(
void* XXH_RESTRICT acc,
void const* XXH_RESTRICT input,
5278 void const* XXH_RESTRICT secret,
size_t lane);
5280XXH_FORCE_INLINE
void
5281XXH3_scalarScrambleRound(
void* XXH_RESTRICT acc,
5282 void const* XXH_RESTRICT secret,
size_t lane);
5308XXH_FORCE_INLINE
void
5309XXH3_accumulate_512_neon(
void* XXH_RESTRICT acc,
5310 const void* XXH_RESTRICT input,
5311 const void* XXH_RESTRICT secret)
5313 XXH_ASSERT((((
size_t)acc) & 15) == 0);
5316 xxh_aliasing_uint64x2_t*
const xacc = (xxh_aliasing_uint64x2_t*) acc;
5318 uint8_t
const* xinput = (
const uint8_t *) input;
5319 uint8_t
const* xsecret = (
const uint8_t *) secret;
5322#ifdef __wasm_simd128__
5340 XXH_COMPILER_GUARD(xsecret);
5344 XXH3_scalarRound(acc, input, secret, i);
5350 uint64x2_t data_vec_1 = XXH_vld1q_u64(xinput + (i * 16));
5351 uint64x2_t data_vec_2 = XXH_vld1q_u64(xinput + ((i+1) * 16));
5353 uint64x2_t key_vec_1 = XXH_vld1q_u64(xsecret + (i * 16));
5354 uint64x2_t key_vec_2 = XXH_vld1q_u64(xsecret + ((i+1) * 16));
5356 uint64x2_t data_swap_1 = vextq_u64(data_vec_1, data_vec_1, 1);
5357 uint64x2_t data_swap_2 = vextq_u64(data_vec_2, data_vec_2, 1);
5359 uint64x2_t data_key_1 = veorq_u64(data_vec_1, key_vec_1);
5360 uint64x2_t data_key_2 = veorq_u64(data_vec_2, key_vec_2);
5375 uint32x4x2_t unzipped = vuzpq_u32(
5376 vreinterpretq_u32_u64(data_key_1),
5377 vreinterpretq_u32_u64(data_key_2)
5380 uint32x4_t data_key_lo = unzipped.val[0];
5382 uint32x4_t data_key_hi = unzipped.val[1];
5390 uint64x2_t sum_1 = XXH_vmlal_low_u32(data_swap_1, data_key_lo, data_key_hi);
5391 uint64x2_t sum_2 = XXH_vmlal_high_u32(data_swap_2, data_key_lo, data_key_hi);
5404 XXH_COMPILER_GUARD_CLANG_NEON(sum_1);
5405 XXH_COMPILER_GUARD_CLANG_NEON(sum_2);
5407 xacc[i] = vaddq_u64(xacc[i], sum_1);
5408 xacc[i+1] = vaddq_u64(xacc[i+1], sum_2);
5413 uint64x2_t data_vec = XXH_vld1q_u64(xinput + (i * 16));
5415 uint64x2_t key_vec = XXH_vld1q_u64(xsecret + (i * 16));
5417 uint64x2_t data_swap = vextq_u64(data_vec, data_vec, 1);
5419 uint64x2_t data_key = veorq_u64(data_vec, key_vec);
5422 uint32x2_t data_key_lo = vmovn_u64(data_key);
5424 uint32x2_t data_key_hi = vshrn_n_u64(data_key, 32);
5426 uint64x2_t sum = vmlal_u32(data_swap, data_key_lo, data_key_hi);
5428 XXH_COMPILER_GUARD_CLANG_NEON(sum);
5430 xacc[i] = vaddq_u64 (xacc[i], sum);
5434XXH_FORCE_INLINE XXH3_ACCUMULATE_TEMPLATE(neon)
5436XXH_FORCE_INLINE
void
5437XXH3_scrambleAcc_neon(
void* XXH_RESTRICT acc,
const void* XXH_RESTRICT secret)
5439 XXH_ASSERT((((
size_t)acc) & 15) == 0);
5441 { xxh_aliasing_uint64x2_t* xacc = (xxh_aliasing_uint64x2_t*) acc;
5442 uint8_t
const* xsecret = (uint8_t
const*) secret;
5446#ifndef __wasm_simd128__
5450 uint32x4_t
const kPrimeHi = vreinterpretq_u32_u64(vdupq_n_u64((xxh_u64)
XXH_PRIME32_1 << 32));
5455 XXH3_scalarScrambleRound(acc, secret, i);
5459 uint64x2_t acc_vec = xacc[i];
5460 uint64x2_t shifted = vshrq_n_u64(acc_vec, 47);
5461 uint64x2_t data_vec = veorq_u64(acc_vec, shifted);
5464 uint64x2_t key_vec = XXH_vld1q_u64(xsecret + (i * 16));
5465 uint64x2_t data_key = veorq_u64(data_vec, key_vec);
5467#ifdef __wasm_simd128__
5482 uint32x4_t prod_hi = vmulq_u32 (vreinterpretq_u32_u64(data_key), kPrimeHi);
5484 uint32x2_t data_key_lo = vmovn_u64(data_key);
5486 xacc[i] = vmlal_u32(vreinterpretq_u64_u32(prod_hi), data_key_lo, kPrimeLo);
5493#if (XXH_VECTOR == XXH_VSX)
5495XXH_FORCE_INLINE
void
5496XXH3_accumulate_512_vsx(
void* XXH_RESTRICT acc,
5497 const void* XXH_RESTRICT input,
5498 const void* XXH_RESTRICT secret)
5501 xxh_aliasing_u64x2*
const xacc = (xxh_aliasing_u64x2*) acc;
5502 xxh_u8
const*
const xinput = (xxh_u8
const*) input;
5503 xxh_u8
const*
const xsecret = (xxh_u8
const*) secret;
5504 xxh_u64x2
const v32 = { 32, 32 };
5506 for (i = 0; i < XXH_STRIPE_LEN /
sizeof(xxh_u64x2); i++) {
5508 xxh_u64x2
const data_vec = XXH_vec_loadu(xinput + 16*i);
5510 xxh_u64x2
const key_vec = XXH_vec_loadu(xsecret + 16*i);
5511 xxh_u64x2
const data_key = data_vec ^ key_vec;
5513 xxh_u32x4
const shuffled = (xxh_u32x4)vec_rl(data_key, v32);
5515 xxh_u64x2
const product = XXH_vec_mulo((xxh_u32x4)data_key, shuffled);
5517 xxh_u64x2 acc_vec = xacc[i];
5522 acc_vec += vec_permi(data_vec, data_vec, 2);
5524 acc_vec += vec_xxpermdi(data_vec, data_vec, 2);
5529XXH_FORCE_INLINE XXH3_ACCUMULATE_TEMPLATE(vsx)
5531XXH_FORCE_INLINE
void
5532XXH3_scrambleAcc_vsx(
void* XXH_RESTRICT acc,
const void* XXH_RESTRICT secret)
5534 XXH_ASSERT((((
size_t)acc) & 15) == 0);
5536 { xxh_aliasing_u64x2*
const xacc = (xxh_aliasing_u64x2*) acc;
5537 const xxh_u8*
const xsecret = (
const xxh_u8*) secret;
5539 xxh_u64x2
const v32 = { 32, 32 };
5540 xxh_u64x2
const v47 = { 47, 47 };
5543 for (i = 0; i < XXH_STRIPE_LEN /
sizeof(xxh_u64x2); i++) {
5545 xxh_u64x2
const acc_vec = xacc[i];
5546 xxh_u64x2
const data_vec = acc_vec ^ (acc_vec >> v47);
5549 xxh_u64x2
const key_vec = XXH_vec_loadu(xsecret + 16*i);
5550 xxh_u64x2
const data_key = data_vec ^ key_vec;
5554 xxh_u64x2
const prod_even = XXH_vec_mule((xxh_u32x4)data_key, prime);
5556 xxh_u64x2
const prod_odd = XXH_vec_mulo((xxh_u32x4)data_key, prime);
5557 xacc[i] = prod_odd + (prod_even << v32);
5563#if (XXH_VECTOR == XXH_SVE)
5565XXH_FORCE_INLINE
void
5566XXH3_accumulate_512_sve(
void* XXH_RESTRICT acc,
5567 const void* XXH_RESTRICT input,
5568 const void* XXH_RESTRICT secret)
5570 uint64_t *xacc = (uint64_t *)acc;
5571 const uint64_t *xinput = (
const uint64_t *)(
const void *)input;
5572 const uint64_t *xsecret = (
const uint64_t *)(
const void *)secret;
5573 svuint64_t kSwap = sveor_n_u64_z(svptrue_b64(), svindex_u64(0, 1), 1);
5574 uint64_t element_count = svcntd();
5575 if (element_count >= 8) {
5576 svbool_t mask = svptrue_pat_b64(SV_VL8);
5577 svuint64_t vacc = svld1_u64(mask, xacc);
5579 svst1_u64(mask, xacc, vacc);
5580 }
else if (element_count == 2) {
5581 svbool_t mask = svptrue_pat_b64(SV_VL2);
5582 svuint64_t acc0 = svld1_u64(mask, xacc + 0);
5583 svuint64_t acc1 = svld1_u64(mask, xacc + 2);
5584 svuint64_t acc2 = svld1_u64(mask, xacc + 4);
5585 svuint64_t acc3 = svld1_u64(mask, xacc + 6);
5590 svst1_u64(mask, xacc + 0, acc0);
5591 svst1_u64(mask, xacc + 2, acc1);
5592 svst1_u64(mask, xacc + 4, acc2);
5593 svst1_u64(mask, xacc + 6, acc3);
5595 svbool_t mask = svptrue_pat_b64(SV_VL4);
5596 svuint64_t acc0 = svld1_u64(mask, xacc + 0);
5597 svuint64_t acc1 = svld1_u64(mask, xacc + 4);
5600 svst1_u64(mask, xacc + 0, acc0);
5601 svst1_u64(mask, xacc + 4, acc1);
5605XXH_FORCE_INLINE
void
5606XXH3_accumulate_sve(xxh_u64* XXH_RESTRICT acc,
5607 const xxh_u8* XXH_RESTRICT input,
5608 const xxh_u8* XXH_RESTRICT secret,
5611 if (nbStripes != 0) {
5612 uint64_t *xacc = (uint64_t *)acc;
5613 const uint64_t *xinput = (
const uint64_t *)(
const void *)input;
5614 const uint64_t *xsecret = (
const uint64_t *)(
const void *)secret;
5615 svuint64_t kSwap = sveor_n_u64_z(svptrue_b64(), svindex_u64(0, 1), 1);
5616 uint64_t element_count = svcntd();
5617 if (element_count >= 8) {
5618 svbool_t mask = svptrue_pat_b64(SV_VL8);
5619 svuint64_t vacc = svld1_u64(mask, xacc + 0);
5622 svprfd(mask, xinput + 128, SV_PLDL1STRM);
5627 }
while (nbStripes != 0);
5629 svst1_u64(mask, xacc + 0, vacc);
5630 }
else if (element_count == 2) {
5631 svbool_t mask = svptrue_pat_b64(SV_VL2);
5632 svuint64_t acc0 = svld1_u64(mask, xacc + 0);
5633 svuint64_t acc1 = svld1_u64(mask, xacc + 2);
5634 svuint64_t acc2 = svld1_u64(mask, xacc + 4);
5635 svuint64_t acc3 = svld1_u64(mask, xacc + 6);
5637 svprfd(mask, xinput + 128, SV_PLDL1STRM);
5645 }
while (nbStripes != 0);
5647 svst1_u64(mask, xacc + 0, acc0);
5648 svst1_u64(mask, xacc + 2, acc1);
5649 svst1_u64(mask, xacc + 4, acc2);
5650 svst1_u64(mask, xacc + 6, acc3);
5652 svbool_t mask = svptrue_pat_b64(SV_VL4);
5653 svuint64_t acc0 = svld1_u64(mask, xacc + 0);
5654 svuint64_t acc1 = svld1_u64(mask, xacc + 4);
5656 svprfd(mask, xinput + 128, SV_PLDL1STRM);
5662 }
while (nbStripes != 0);
5664 svst1_u64(mask, xacc + 0, acc0);
5665 svst1_u64(mask, xacc + 4, acc1);
5672#if (XXH_VECTOR == XXH_LSX)
5673#define _LSX_SHUFFLE(z, y, x, w) (((z) << 6) | ((y) << 4) | ((x) << 2) | (w))
5675XXH_FORCE_INLINE
void
5676XXH3_accumulate_512_lsx(
void* XXH_RESTRICT acc,
5677 const void* XXH_RESTRICT input,
5678 const void* XXH_RESTRICT secret)
5680 XXH_ASSERT((((
size_t)acc) & 15) == 0);
5682 __m128i*
const xacc = (__m128i *) acc;
5683 const __m128i*
const xinput = (
const __m128i *) input;
5684 const __m128i*
const xsecret = (
const __m128i *) secret;
5686 for (
size_t i = 0; i < XXH_STRIPE_LEN /
sizeof(__m128i); i++) {
5688 __m128i
const data_vec = __lsx_vld(xinput + i, 0);
5690 __m128i
const key_vec = __lsx_vld(xsecret + i, 0);
5692 __m128i
const data_key = __lsx_vxor_v(data_vec, key_vec);
5694 __m128i
const data_key_lo = __lsx_vsrli_d(data_key, 32);
5697 __m128i
const product = __lsx_vmulwev_d_wu(data_key, data_key_lo);
5699 __m128i
const data_swap = __lsx_vshuf4i_w(data_vec, _LSX_SHUFFLE(1, 0, 3, 2));
5700 __m128i
const sum = __lsx_vadd_d(xacc[i], data_swap);
5702 xacc[i] = __lsx_vadd_d(product, sum);
5706XXH_FORCE_INLINE XXH3_ACCUMULATE_TEMPLATE(lsx)
5708XXH_FORCE_INLINE
void
5709XXH3_scrambleAcc_lsx(
void* XXH_RESTRICT acc,
const void* XXH_RESTRICT secret)
5711 XXH_ASSERT((((
size_t)acc) & 15) == 0);
5713 __m128i*
const xacc = (__m128i*) acc;
5714 const __m128i*
const xsecret = (
const __m128i *) secret;
5715 const __m128i prime32 = __lsx_vreplgr2vr_w((
int)
XXH_PRIME32_1);
5717 for (
size_t i = 0; i < XXH_STRIPE_LEN /
sizeof(__m128i); i++) {
5719 __m128i
const acc_vec = xacc[i];
5720 __m128i
const shifted = __lsx_vsrli_d(acc_vec, 47);
5721 __m128i
const data_vec = __lsx_vxor_v(acc_vec, shifted);
5723 __m128i
const key_vec = __lsx_vld(xsecret + i, 0);
5724 __m128i
const data_key = __lsx_vxor_v(data_vec, key_vec);
5727 __m128i
const data_key_hi = __lsx_vsrli_d(data_key, 32);
5728 __m128i
const prod_lo = __lsx_vmulwev_d_wu(data_key, prime32);
5729 __m128i
const prod_hi = __lsx_vmulwev_d_wu(data_key_hi, prime32);
5730 xacc[i] = __lsx_vadd_d(prod_lo, __lsx_vslli_d(prod_hi, 32));
5739#if defined(__aarch64__) && (defined(__GNUC__) || defined(__clang__))
5754XXH_FORCE_INLINE xxh_u64
5755XXH_mult32to64_add64(xxh_u64 lhs, xxh_u64 rhs, xxh_u64 acc)
5759 __asm__(
"umaddl %x0, %w1, %w2, %x3" :
"=r" (ret) :
"r" (lhs),
"r" (rhs),
"r" (acc));
5763XXH_FORCE_INLINE xxh_u64
5764XXH_mult32to64_add64(xxh_u64 lhs, xxh_u64 rhs, xxh_u64 acc)
5766 return XXH_mult32to64((xxh_u32)lhs, (xxh_u32)rhs) + acc;
5777XXH_FORCE_INLINE
void
5778XXH3_scalarRound(
void* XXH_RESTRICT acc,
5779 void const* XXH_RESTRICT input,
5780 void const* XXH_RESTRICT secret,
5783 xxh_u64* xacc = (xxh_u64*) acc;
5784 xxh_u8
const* xinput = (xxh_u8
const*) input;
5785 xxh_u8
const* xsecret = (xxh_u8
const*) secret;
5786 XXH_ASSERT(lane < XXH_ACC_NB);
5789 xxh_u64
const data_val = XXH_readLE64(xinput + lane * 8);
5790 xxh_u64
const data_key = data_val ^ XXH_readLE64(xsecret + lane * 8);
5791 xacc[lane ^ 1] += data_val;
5792 xacc[lane] = XXH_mult32to64_add64(data_key , data_key >> 32, xacc[lane]);
5800XXH_FORCE_INLINE
void
5801XXH3_accumulate_512_scalar(
void* XXH_RESTRICT acc,
5802 const void* XXH_RESTRICT input,
5803 const void* XXH_RESTRICT secret)
5807#if defined(__GNUC__) && !defined(__clang__) \
5808 && (defined(__arm__) || defined(__thumb2__)) \
5809 && defined(__ARM_FEATURE_UNALIGNED) \
5810 && XXH_SIZE_OPT <= 0
5811# pragma GCC unroll 8
5813 for (i=0; i < XXH_ACC_NB; i++) {
5814 XXH3_scalarRound(acc, input, secret, i);
5817XXH_FORCE_INLINE XXH3_ACCUMULATE_TEMPLATE(scalar)
5826XXH_FORCE_INLINE
void
5827XXH3_scalarScrambleRound(
void* XXH_RESTRICT acc,
5828 void const* XXH_RESTRICT secret,
5831 xxh_u64*
const xacc = (xxh_u64*) acc;
5832 const xxh_u8*
const xsecret = (
const xxh_u8*) secret;
5834 XXH_ASSERT(lane < XXH_ACC_NB);
5836 xxh_u64
const key64 = XXH_readLE64(xsecret + lane * 8);
5837 xxh_u64 acc64 = xacc[lane];
5838 acc64 = XXH_xorshift64(acc64, 47);
5849XXH_FORCE_INLINE
void
5850XXH3_scrambleAcc_scalar(
void* XXH_RESTRICT acc,
const void* XXH_RESTRICT secret)
5853 for (i=0; i < XXH_ACC_NB; i++) {
5854 XXH3_scalarScrambleRound(acc, secret, i);
5858XXH_FORCE_INLINE
void
5859XXH3_initCustomSecret_scalar(
void* XXH_RESTRICT customSecret, xxh_u64 seed64)
5866 const xxh_u8* kSecretPtr = XXH3_kSecret;
5867 XXH_STATIC_ASSERT((XXH_SECRET_DEFAULT_SIZE & 15) == 0);
5869#if defined(__GNUC__) && defined(__aarch64__)
5902 XXH_COMPILER_GUARD(kSecretPtr);
5904 {
int const nbRounds = XXH_SECRET_DEFAULT_SIZE / 16;
5906 for (i=0; i < nbRounds; i++) {
5913 xxh_u64 lo = XXH_readLE64(kSecretPtr + 16*i) + seed64;
5914 xxh_u64 hi = XXH_readLE64(kSecretPtr + 16*i + 8) - seed64;
5915 XXH_writeLE64((xxh_u8*)customSecret + 16*i, lo);
5916 XXH_writeLE64((xxh_u8*)customSecret + 16*i + 8, hi);
5921typedef void (*XXH3_f_accumulate)(xxh_u64* XXH_RESTRICT,
const xxh_u8* XXH_RESTRICT,
const xxh_u8* XXH_RESTRICT, size_t);
5922typedef void (*XXH3_f_scrambleAcc)(
void* XXH_RESTRICT,
const void*);
5923typedef void (*XXH3_f_initCustomSecret)(
void* XXH_RESTRICT, xxh_u64);
5926#if (XXH_VECTOR == XXH_AVX512)
5928#define XXH3_accumulate_512 XXH3_accumulate_512_avx512
5929#define XXH3_accumulate XXH3_accumulate_avx512
5930#define XXH3_scrambleAcc XXH3_scrambleAcc_avx512
5931#define XXH3_initCustomSecret XXH3_initCustomSecret_avx512
5933#elif (XXH_VECTOR == XXH_AVX2)
5935#define XXH3_accumulate_512 XXH3_accumulate_512_avx2
5936#define XXH3_accumulate XXH3_accumulate_avx2
5937#define XXH3_scrambleAcc XXH3_scrambleAcc_avx2
5938#define XXH3_initCustomSecret XXH3_initCustomSecret_avx2
5940#elif (XXH_VECTOR == XXH_SSE2)
5942#define XXH3_accumulate_512 XXH3_accumulate_512_sse2
5943#define XXH3_accumulate XXH3_accumulate_sse2
5944#define XXH3_scrambleAcc XXH3_scrambleAcc_sse2
5945#define XXH3_initCustomSecret XXH3_initCustomSecret_sse2
5947#elif (XXH_VECTOR == XXH_NEON)
5949#define XXH3_accumulate_512 XXH3_accumulate_512_neon
5950#define XXH3_accumulate XXH3_accumulate_neon
5951#define XXH3_scrambleAcc XXH3_scrambleAcc_neon
5952#define XXH3_initCustomSecret XXH3_initCustomSecret_scalar
5954#elif (XXH_VECTOR == XXH_VSX)
5956#define XXH3_accumulate_512 XXH3_accumulate_512_vsx
5957#define XXH3_accumulate XXH3_accumulate_vsx
5958#define XXH3_scrambleAcc XXH3_scrambleAcc_vsx
5959#define XXH3_initCustomSecret XXH3_initCustomSecret_scalar
5961#elif (XXH_VECTOR == XXH_SVE)
5962#define XXH3_accumulate_512 XXH3_accumulate_512_sve
5963#define XXH3_accumulate XXH3_accumulate_sve
5964#define XXH3_scrambleAcc XXH3_scrambleAcc_scalar
5965#define XXH3_initCustomSecret XXH3_initCustomSecret_scalar
5967#elif (XXH_VECTOR == XXH_LSX)
5968#define XXH3_accumulate_512 XXH3_accumulate_512_lsx
5969#define XXH3_accumulate XXH3_accumulate_lsx
5970#define XXH3_scrambleAcc XXH3_scrambleAcc_lsx
5971#define XXH3_initCustomSecret XXH3_initCustomSecret_scalar
5975#define XXH3_accumulate_512 XXH3_accumulate_512_scalar
5976#define XXH3_accumulate XXH3_accumulate_scalar
5977#define XXH3_scrambleAcc XXH3_scrambleAcc_scalar
5978#define XXH3_initCustomSecret XXH3_initCustomSecret_scalar
5982#if XXH_SIZE_OPT >= 1
5983# undef XXH3_initCustomSecret
5984# define XXH3_initCustomSecret XXH3_initCustomSecret_scalar
5987XXH_FORCE_INLINE
void
5988XXH3_hashLong_internal_loop(xxh_u64* XXH_RESTRICT acc,
5989 const xxh_u8* XXH_RESTRICT input,
size_t len,
5990 const xxh_u8* XXH_RESTRICT secret,
size_t secretSize,
5991 XXH3_f_accumulate f_acc,
5992 XXH3_f_scrambleAcc f_scramble)
5994 size_t const nbStripesPerBlock = (secretSize - XXH_STRIPE_LEN) / XXH_SECRET_CONSUME_RATE;
5995 size_t const block_len = XXH_STRIPE_LEN * nbStripesPerBlock;
5996 size_t const nb_blocks = (len - 1) / block_len;
6002 for (n = 0; n < nb_blocks; n++) {
6003 f_acc(acc, input + n*block_len, secret, nbStripesPerBlock);
6004 f_scramble(acc, secret + secretSize - XXH_STRIPE_LEN);
6008 XXH_ASSERT(len > XXH_STRIPE_LEN);
6009 {
size_t const nbStripes = ((len - 1) - (block_len * nb_blocks)) / XXH_STRIPE_LEN;
6010 XXH_ASSERT(nbStripes <= (secretSize / XXH_SECRET_CONSUME_RATE));
6011 f_acc(acc, input + nb_blocks*block_len, secret, nbStripes);
6014 {
const xxh_u8*
const p = input + len - XXH_STRIPE_LEN;
6015#define XXH_SECRET_LASTACC_START 7
6016 XXH3_accumulate_512(acc, p, secret + secretSize - XXH_STRIPE_LEN - XXH_SECRET_LASTACC_START);
6020XXH_FORCE_INLINE xxh_u64
6021XXH3_mix2Accs(
const xxh_u64* XXH_RESTRICT acc,
const xxh_u8* XXH_RESTRICT secret)
6023 return XXH3_mul128_fold64(
6024 acc[0] ^ XXH_readLE64(secret),
6025 acc[1] ^ XXH_readLE64(secret+8) );
6029XXH3_mergeAccs(
const xxh_u64* XXH_RESTRICT acc,
const xxh_u8* XXH_RESTRICT secret, xxh_u64 start)
6031 xxh_u64 result64 = start;
6034 for (i = 0; i < 4; i++) {
6035 result64 += XXH3_mix2Accs(acc+2*i, secret + 16*i);
6036#if defined(__clang__) \
6037 && (defined(__arm__) || defined(__thumb__)) \
6038 && (defined(__ARM_NEON) || defined(__ARM_NEON__)) \
6039 && !defined(XXH_ENABLE_AUTOVECTORIZE)
6048 XXH_COMPILER_GUARD(result64);
6052 return XXH3_avalanche(result64);
6056#define XXH_SECRET_MERGEACCS_START 11
6059XXH3_finalizeLong_64b(
const xxh_u64* XXH_RESTRICT acc,
const xxh_u8* XXH_RESTRICT secret, xxh_u64 len)
6061 return XXH3_mergeAccs(acc, secret + XXH_SECRET_MERGEACCS_START, len *
XXH_PRIME64_1);
6064#define XXH3_INIT_ACC { XXH_PRIME32_3, XXH_PRIME64_1, XXH_PRIME64_2, XXH_PRIME64_3, \
6065 XXH_PRIME64_4, XXH_PRIME32_2, XXH_PRIME64_5, XXH_PRIME32_1 }
6068XXH3_hashLong_64b_internal(
const void* XXH_RESTRICT input,
size_t len,
6069 const void* XXH_RESTRICT secret,
size_t secretSize,
6070 XXH3_f_accumulate f_acc,
6071 XXH3_f_scrambleAcc f_scramble)
6073 XXH_ALIGN(
XXH_ACC_ALIGN) xxh_u64 acc[XXH_ACC_NB] = XXH3_INIT_ACC;
6075 XXH3_hashLong_internal_loop(acc, (
const xxh_u8*)input, len, (
const xxh_u8*)secret, secretSize, f_acc, f_scramble);
6078 XXH_STATIC_ASSERT(
sizeof(acc) == 64);
6079 XXH_ASSERT(secretSize >=
sizeof(acc) + XXH_SECRET_MERGEACCS_START);
6080 return XXH3_finalizeLong_64b(acc, (
const xxh_u8*)secret, (xxh_u64)len);
6091XXH3_hashLong_64b_withSecret(
const void* XXH_RESTRICT input,
size_t len,
6092 XXH64_hash_t seed64,
const xxh_u8* XXH_RESTRICT secret,
size_t secretLen)
6095 return XXH3_hashLong_64b_internal(input, len, secret, secretLen, XXH3_accumulate, XXH3_scrambleAcc);
6105XXH3_hashLong_64b_default(
const void* XXH_RESTRICT input,
size_t len,
6106 XXH64_hash_t seed64,
const xxh_u8* XXH_RESTRICT secret,
size_t secretLen)
6108 (void)seed64; (void)secret; (void)secretLen;
6109 return XXH3_hashLong_64b_internal(input, len, XXH3_kSecret,
sizeof(XXH3_kSecret), XXH3_accumulate, XXH3_scrambleAcc);
6124XXH3_hashLong_64b_withSeed_internal(
const void* input,
size_t len,
6126 XXH3_f_accumulate f_acc,
6127 XXH3_f_scrambleAcc f_scramble,
6128 XXH3_f_initCustomSecret f_initSec)
6130#if XXH_SIZE_OPT <= 0
6132 return XXH3_hashLong_64b_internal(input, len,
6133 XXH3_kSecret,
sizeof(XXH3_kSecret),
6136 { XXH_ALIGN(XXH_SEC_ALIGN) xxh_u8 secret[XXH_SECRET_DEFAULT_SIZE];
6137 f_initSec(secret, seed);
6138 return XXH3_hashLong_64b_internal(input, len, secret,
sizeof(secret),
6147XXH3_hashLong_64b_withSeed(
const void* XXH_RESTRICT input,
size_t len,
6148 XXH64_hash_t seed,
const xxh_u8* XXH_RESTRICT secret,
size_t secretLen)
6150 (void)secret; (void)secretLen;
6151 return XXH3_hashLong_64b_withSeed_internal(input, len, seed,
6152 XXH3_accumulate, XXH3_scrambleAcc, XXH3_initCustomSecret);
6156typedef XXH64_hash_t (*XXH3_hashLong64_f)(
const void* XXH_RESTRICT, size_t,
6160XXH3_64bits_internal(
const void* XXH_RESTRICT input,
size_t len,
6161 XXH64_hash_t seed64,
const void* XXH_RESTRICT secret,
size_t secretLen,
6162 XXH3_hashLong64_f f_hashLong)
6173 return XXH3_len_0to16_64b((
const xxh_u8*)input, len, (
const xxh_u8*)secret, seed64);
6175 return XXH3_len_17to128_64b((
const xxh_u8*)input, len, (
const xxh_u8*)secret, secretLen, seed64);
6177 return XXH3_len_129to240_64b((
const xxh_u8*)input, len, (
const xxh_u8*)secret, secretLen, seed64);
6178 return f_hashLong(input, len, seed64, (
const xxh_u8*)secret, secretLen);
6187 return XXH3_64bits_internal(input, length, 0, XXH3_kSecret,
sizeof(XXH3_kSecret), XXH3_hashLong_64b_default);
6194 return XXH3_64bits_internal(input, length, 0, secret, secretSize, XXH3_hashLong_64b_withSecret);
6201 return XXH3_64bits_internal(input, length, seed, XXH3_kSecret,
sizeof(XXH3_kSecret), XXH3_hashLong_64b_withSeed);
6208 return XXH3_64bits_internal(input, length, seed, XXH3_kSecret,
sizeof(XXH3_kSecret), NULL);
6209 return XXH3_hashLong_64b_withSecret(input, length, seed, (
const xxh_u8*)secret, secretSize);
6214#ifndef XXH_NO_STREAM
6238static XXH_MALLOCF
void* XXH_alignedMalloc(
size_t s,
size_t align)
6240 XXH_ASSERT(align <= 128 && align >= 8);
6241 XXH_ASSERT((align & (align-1)) == 0);
6242 XXH_ASSERT(s != 0 && s < (s + align));
6244 xxh_u8* base = (xxh_u8*)XXH_malloc(s + align);
6252 size_t offset = align - ((size_t)base & (align - 1));
6254 xxh_u8* ptr = base + offset;
6256 XXH_ASSERT((
size_t)ptr % align == 0);
6259 ptr[-1] = (xxh_u8)offset;
6269static void XXH_alignedFree(
void* p)
6272 xxh_u8* ptr = (xxh_u8*)p;
6274 xxh_u8 offset = ptr[-1];
6276 xxh_u8* base = ptr - offset;
6294 if (state==NULL)
return NULL;
6313 XXH_alignedFree(statePtr);
6321 XXH_memcpy(dst_state, src_state,
sizeof(*dst_state));
6327 const void* secret,
size_t secretSize)
6329 size_t const initStart = offsetof(
XXH3_state_t, bufferedSize);
6330 size_t const initLength = offsetof(
XXH3_state_t, nbStripesPerBlock) - initStart;
6331 XXH_ASSERT(offsetof(
XXH3_state_t, nbStripesPerBlock) > initStart);
6332 XXH_ASSERT(statePtr != NULL);
6334 memset((
char*)statePtr + initStart, 0, initLength);
6343 statePtr->
seed = seed;
6344 statePtr->
useSeed = (seed != 0);
6345 statePtr->
extSecret = (
const unsigned char*)secret;
6347 statePtr->
secretLimit = secretSize - XXH_STRIPE_LEN;
6356 XXH3_reset_internal(statePtr, 0, XXH3_kSecret, XXH_SECRET_DEFAULT_SIZE);
6365 XXH3_reset_internal(statePtr, 0, secret, secretSize);
6377 if ((seed != statePtr->seed) || (statePtr->extSecret != NULL))
6378 XXH3_initCustomSecret(statePtr->customSecret, seed);
6379 XXH3_reset_internal(statePtr, seed, NULL, XXH_SECRET_DEFAULT_SIZE);
6390 XXH3_reset_internal(statePtr, seed64, secret, secretSize);
6391 statePtr->useSeed = 1;
6412XXH_FORCE_INLINE
const xxh_u8 *
6413XXH3_consumeStripes(xxh_u64* XXH_RESTRICT acc,
6414 size_t* XXH_RESTRICT nbStripesSoFarPtr,
size_t nbStripesPerBlock,
6415 const xxh_u8* XXH_RESTRICT input,
size_t nbStripes,
6416 const xxh_u8* XXH_RESTRICT secret,
size_t secretLimit,
6417 XXH3_f_accumulate f_acc,
6418 XXH3_f_scrambleAcc f_scramble)
6420 const xxh_u8* initialSecret = secret + *nbStripesSoFarPtr * XXH_SECRET_CONSUME_RATE;
6422 if (nbStripes >= (nbStripesPerBlock - *nbStripesSoFarPtr)) {
6424 size_t nbStripesThisIter = nbStripesPerBlock - *nbStripesSoFarPtr;
6428 f_acc(acc, input, initialSecret, nbStripesThisIter);
6429 f_scramble(acc, secret + secretLimit);
6430 input += nbStripesThisIter * XXH_STRIPE_LEN;
6431 nbStripes -= nbStripesThisIter;
6433 nbStripesThisIter = nbStripesPerBlock;
6434 initialSecret = secret;
6435 }
while (nbStripes >= nbStripesPerBlock);
6436 *nbStripesSoFarPtr = 0;
6439 if (nbStripes > 0) {
6440 f_acc(acc, input, initialSecret, nbStripes);
6441 input += nbStripes * XXH_STRIPE_LEN;
6442 *nbStripesSoFarPtr += nbStripes;
6448#ifndef XXH3_STREAM_USE_STACK
6449# if XXH_SIZE_OPT <= 0 && !defined(__clang__)
6450# define XXH3_STREAM_USE_STACK 1
6458 const xxh_u8* XXH_RESTRICT input,
size_t len,
6459 XXH3_f_accumulate f_acc,
6460 XXH3_f_scrambleAcc f_scramble)
6463 XXH_ASSERT(len == 0);
6467 XXH_ASSERT(state != NULL);
6468 {
const xxh_u8*
const bEnd = input + len;
6469 const unsigned char*
const secret = (state->extSecret == NULL) ? state->customSecret : state->extSecret;
6470#if defined(XXH3_STREAM_USE_STACK) && XXH3_STREAM_USE_STACK >= 1
6476 XXH_memcpy(acc, state->acc,
sizeof(acc));
6478 xxh_u64* XXH_RESTRICT
const acc = state->acc;
6480 state->totalLen += len;
6484 if (len <= XXH3_INTERNALBUFFER_SIZE - state->bufferedSize) {
6485 XXH_memcpy(state->buffer + state->bufferedSize, input, len);
6491 #define XXH3_INTERNALBUFFER_STRIPES (XXH3_INTERNALBUFFER_SIZE / XXH_STRIPE_LEN)
6498 if (state->bufferedSize) {
6500 XXH_memcpy(state->buffer + state->bufferedSize, input, loadSize);
6502 XXH3_consumeStripes(acc,
6503 &state->nbStripesSoFar, state->nbStripesPerBlock,
6504 state->buffer, XXH3_INTERNALBUFFER_STRIPES,
6505 secret, state->secretLimit,
6507 state->bufferedSize = 0;
6509 XXH_ASSERT(input < bEnd);
6511 size_t nbStripes = (size_t)(bEnd - 1 - input) / XXH_STRIPE_LEN;
6512 input = XXH3_consumeStripes(acc,
6513 &state->nbStripesSoFar, state->nbStripesPerBlock,
6515 secret, state->secretLimit,
6517 XXH_memcpy(state->buffer +
sizeof(state->buffer) - XXH_STRIPE_LEN, input - XXH_STRIPE_LEN, XXH_STRIPE_LEN);
6521 XXH_ASSERT(input < bEnd);
6523 XXH_ASSERT(state->bufferedSize == 0);
6524 XXH_memcpy(state->buffer, input, (
size_t)(bEnd-input));
6526#if defined(XXH3_STREAM_USE_STACK) && XXH3_STREAM_USE_STACK >= 1
6528 XXH_memcpy(state->acc, acc,
sizeof(acc));
6539 return XXH3_update(state, (
const xxh_u8*)input, len,
6540 XXH3_accumulate, XXH3_scrambleAcc);
6544XXH_FORCE_INLINE
void
6547 const unsigned char* secret)
6549 xxh_u8 lastStripe[XXH_STRIPE_LEN];
6550 const xxh_u8* lastStripePtr;
6556 XXH_memcpy(acc, state->
acc,
sizeof(state->
acc));
6559 size_t const nbStripes = (state->
bufferedSize - 1) / XXH_STRIPE_LEN;
6561 XXH3_consumeStripes(acc,
6563 state->
buffer, nbStripes,
6565 XXH3_accumulate, XXH3_scrambleAcc);
6569 size_t const catchupSize = XXH_STRIPE_LEN - state->
bufferedSize;
6571 XXH_memcpy(lastStripe, state->
buffer +
sizeof(state->
buffer) - catchupSize, catchupSize);
6573 lastStripePtr = lastStripe;
6576 XXH3_accumulate_512(acc,
6578 secret + state->
secretLimit - XXH_SECRET_LASTACC_START);
6584 const unsigned char*
const secret = (state->extSecret == NULL) ? state->customSecret : state->extSecret;
6587 XXH3_digest_long(acc, state, secret);
6588 return XXH3_finalizeLong_64b(acc, secret, (xxh_u64)state->totalLen);
6594 secret, state->secretLimit + XXH_STRIPE_LEN);
6617XXH3_len_1to3_128b(
const xxh_u8* input,
size_t len,
const xxh_u8* secret,
XXH64_hash_t seed)
6620 XXH_ASSERT(input != NULL);
6621 XXH_ASSERT(1 <= len && len <= 3);
6622 XXH_ASSERT(secret != NULL);
6628 { xxh_u8
const c1 = input[0];
6629 xxh_u8
const c2 = input[len >> 1];
6630 xxh_u8
const c3 = input[len - 1];
6631 xxh_u32
const combinedl = ((xxh_u32)c1 <<16) | ((xxh_u32)c2 << 24)
6632 | ((xxh_u32)c3 << 0) | ((xxh_u32)len << 8);
6633 xxh_u32
const combinedh = XXH_rotl32(XXH_swap32(combinedl), 13);
6634 xxh_u64
const bitflipl = (XXH_readLE32(secret) ^ XXH_readLE32(secret+4)) + seed;
6635 xxh_u64
const bitfliph = (XXH_readLE32(secret+8) ^ XXH_readLE32(secret+12)) - seed;
6636 xxh_u64
const keyed_lo = (xxh_u64)combinedl ^ bitflipl;
6637 xxh_u64
const keyed_hi = (xxh_u64)combinedh ^ bitfliph;
6639 h128.
low64 = XXH64_avalanche(keyed_lo);
6640 h128.
high64 = XXH64_avalanche(keyed_hi);
6646XXH3_len_4to8_128b(
const xxh_u8* input,
size_t len,
const xxh_u8* secret,
XXH64_hash_t seed)
6648 XXH_ASSERT(input != NULL);
6649 XXH_ASSERT(secret != NULL);
6650 XXH_ASSERT(4 <= len && len <= 8);
6651 seed ^= (xxh_u64)XXH_swap32((xxh_u32)seed) << 32;
6652 { xxh_u32
const input_lo = XXH_readLE32(input);
6653 xxh_u32
const input_hi = XXH_readLE32(input + len - 4);
6654 xxh_u64
const input_64 = input_lo + ((xxh_u64)input_hi << 32);
6655 xxh_u64
const bitflip = (XXH_readLE64(secret+16) ^ XXH_readLE64(secret+24)) + seed;
6656 xxh_u64
const keyed = input_64 ^ bitflip;
6665 m128.
low64 *= PRIME_MX2;
6673XXH3_len_9to16_128b(
const xxh_u8* input,
size_t len,
const xxh_u8* secret,
XXH64_hash_t seed)
6675 XXH_ASSERT(input != NULL);
6676 XXH_ASSERT(secret != NULL);
6677 XXH_ASSERT(9 <= len && len <= 16);
6678 { xxh_u64
const bitflipl = (XXH_readLE64(secret+32) ^ XXH_readLE64(secret+40)) - seed;
6679 xxh_u64
const bitfliph = (XXH_readLE64(secret+48) ^ XXH_readLE64(secret+56)) + seed;
6680 xxh_u64
const input_lo = XXH_readLE64(input);
6681 xxh_u64 input_hi = XXH_readLE64(input + len - 8);
6687 m128.
low64 += (xxh_u64)(len - 1) << 54;
6688 input_hi ^= bitfliph;
6696 if (
sizeof(
void *) <
sizeof(xxh_u64)) {
6703 m128.
high64 += (input_hi & 0xFFFFFFFF00000000ULL) + XXH_mult32to64((xxh_u32)input_hi,
XXH_PRIME32_2);
6748XXH3_len_0to16_128b(
const xxh_u8* input,
size_t len,
const xxh_u8* secret,
XXH64_hash_t seed)
6750 XXH_ASSERT(len <= 16);
6751 {
if (len > 8)
return XXH3_len_9to16_128b(input, len, secret, seed);
6752 if (len >= 4)
return XXH3_len_4to8_128b(input, len, secret, seed);
6753 if (len)
return XXH3_len_1to3_128b(input, len, secret, seed);
6755 xxh_u64
const bitflipl = XXH_readLE64(secret+64) ^ XXH_readLE64(secret+72);
6756 xxh_u64
const bitfliph = XXH_readLE64(secret+80) ^ XXH_readLE64(secret+88);
6757 h128.
low64 = XXH64_avalanche(seed ^ bitflipl);
6758 h128.
high64 = XXH64_avalanche( seed ^ bitfliph);
6767XXH128_mix32B(
XXH128_hash_t acc,
const xxh_u8* input_1,
const xxh_u8* input_2,
6770 acc.
low64 += XXH3_mix16B (input_1, secret+0, seed);
6771 acc.
low64 ^= XXH_readLE64(input_2) + XXH_readLE64(input_2 + 8);
6772 acc.
high64 += XXH3_mix16B (input_2, secret+16, seed);
6773 acc.
high64 ^= XXH_readLE64(input_1) + XXH_readLE64(input_1 + 8);
6779XXH3_len_17to128_128b(
const xxh_u8* XXH_RESTRICT input,
size_t len,
6780 const xxh_u8* XXH_RESTRICT secret,
size_t secretSize,
6784 XXH_ASSERT(16 < len && len <= 128);
6790#if XXH_SIZE_OPT >= 1
6793 unsigned int i = (
unsigned int)(len - 1) / 32;
6795 acc = XXH128_mix32B(acc, input+16*i, input+len-16*(i+1), secret+32*i, seed);
6802 acc = XXH128_mix32B(acc, input+48, input+len-64, secret+96, seed);
6804 acc = XXH128_mix32B(acc, input+32, input+len-48, secret+64, seed);
6806 acc = XXH128_mix32B(acc, input+16, input+len-32, secret+32, seed);
6808 acc = XXH128_mix32B(acc, input, input+len-16, secret, seed);
6823XXH3_len_129to240_128b(
const xxh_u8* XXH_RESTRICT input,
size_t len,
6824 const xxh_u8* XXH_RESTRICT secret,
size_t secretSize,
6840 for (i = 32; i < 160; i += 32) {
6841 acc = XXH128_mix32B(acc,
6854 for (i=160; i <= len; i += 32) {
6855 acc = XXH128_mix32B(acc,
6858 secret + XXH3_MIDSIZE_STARTOFFSET + i - 160,
6862 acc = XXH128_mix32B(acc,
6881XXH3_finalizeLong_128b(
const xxh_u64* XXH_RESTRICT acc,
const xxh_u8* XXH_RESTRICT secret,
size_t secretSize, xxh_u64 len)
6884 h128.
low64 = XXH3_finalizeLong_64b(acc, secret, len);
6885 h128.
high64 = XXH3_mergeAccs(acc, secret + secretSize
6886 - XXH_STRIPE_LEN - XXH_SECRET_MERGEACCS_START,
6892XXH3_hashLong_128b_internal(
const void* XXH_RESTRICT input,
size_t len,
6893 const xxh_u8* XXH_RESTRICT secret,
size_t secretSize,
6894 XXH3_f_accumulate f_acc,
6895 XXH3_f_scrambleAcc f_scramble)
6897 XXH_ALIGN(
XXH_ACC_ALIGN) xxh_u64 acc[XXH_ACC_NB] = XXH3_INIT_ACC;
6899 XXH3_hashLong_internal_loop(acc, (
const xxh_u8*)input, len, secret, secretSize, f_acc, f_scramble);
6902 XXH_STATIC_ASSERT(
sizeof(acc) == 64);
6903 XXH_ASSERT(secretSize >=
sizeof(acc) + XXH_SECRET_MERGEACCS_START);
6904 return XXH3_finalizeLong_128b(acc, secret, secretSize, (xxh_u64)len);
6911XXH3_hashLong_128b_default(
const void* XXH_RESTRICT input,
size_t len,
6913 const void* XXH_RESTRICT secret,
size_t secretLen)
6915 (void)seed64; (void)secret; (void)secretLen;
6916 return XXH3_hashLong_128b_internal(input, len, XXH3_kSecret,
sizeof(XXH3_kSecret),
6917 XXH3_accumulate, XXH3_scrambleAcc);
6928XXH3_hashLong_128b_withSecret(
const void* XXH_RESTRICT input,
size_t len,
6930 const void* XXH_RESTRICT secret,
size_t secretLen)
6933 return XXH3_hashLong_128b_internal(input, len, (
const xxh_u8*)secret, secretLen,
6934 XXH3_accumulate, XXH3_scrambleAcc);
6938XXH3_hashLong_128b_withSeed_internal(
const void* XXH_RESTRICT input,
size_t len,
6940 XXH3_f_accumulate f_acc,
6941 XXH3_f_scrambleAcc f_scramble,
6942 XXH3_f_initCustomSecret f_initSec)
6945 return XXH3_hashLong_128b_internal(input, len,
6946 XXH3_kSecret,
sizeof(XXH3_kSecret),
6948 { XXH_ALIGN(XXH_SEC_ALIGN) xxh_u8 secret[XXH_SECRET_DEFAULT_SIZE];
6949 f_initSec(secret, seed64);
6950 return XXH3_hashLong_128b_internal(input, len, (
const xxh_u8*)secret,
sizeof(secret),
6959XXH3_hashLong_128b_withSeed(
const void* input,
size_t len,
6960 XXH64_hash_t seed64,
const void* XXH_RESTRICT secret,
size_t secretLen)
6962 (void)secret; (void)secretLen;
6963 return XXH3_hashLong_128b_withSeed_internal(input, len, seed64,
6964 XXH3_accumulate, XXH3_scrambleAcc, XXH3_initCustomSecret);
6967typedef XXH128_hash_t (*XXH3_hashLong128_f)(
const void* XXH_RESTRICT, size_t,
6971XXH3_128bits_internal(
const void* input,
size_t len,
6972 XXH64_hash_t seed64,
const void* XXH_RESTRICT secret,
size_t secretLen,
6973 XXH3_hashLong128_f f_hl128)
6983 return XXH3_len_0to16_128b((
const xxh_u8*)input, len, (
const xxh_u8*)secret, seed64);
6985 return XXH3_len_17to128_128b((
const xxh_u8*)input, len, (
const xxh_u8*)secret, secretLen, seed64);
6987 return XXH3_len_129to240_128b((
const xxh_u8*)input, len, (
const xxh_u8*)secret, secretLen, seed64);
6988 return f_hl128(input, len, seed64, secret, secretLen);
6997 return XXH3_128bits_internal(input, len, 0,
6998 XXH3_kSecret,
sizeof(XXH3_kSecret),
6999 XXH3_hashLong_128b_default);
7006 return XXH3_128bits_internal(input, len, 0,
7007 (
const xxh_u8*)secret, secretSize,
7008 XXH3_hashLong_128b_withSecret);
7015 return XXH3_128bits_internal(input, len, seed,
7016 XXH3_kSecret,
sizeof(XXH3_kSecret),
7017 XXH3_hashLong_128b_withSeed);
7025 return XXH3_128bits_internal(input, len, seed, XXH3_kSecret,
sizeof(XXH3_kSecret), NULL);
7026 return XXH3_hashLong_128b_withSecret(input, len, seed, secret, secretSize);
7038#ifndef XXH_NO_STREAM
7082 const unsigned char*
const secret = (state->extSecret == NULL) ? state->customSecret : state->extSecret;
7085 XXH3_digest_long(acc, state, secret);
7086 XXH_ASSERT(state->secretLimit + XXH_STRIPE_LEN >=
sizeof(acc) + XXH_SECRET_MERGEACCS_START);
7087 return XXH3_finalizeLong_128b(acc, secret, state->secretLimit + XXH_STRIPE_LEN, (xxh_u64)state->totalLen);
7093 secret, state->secretLimit + XXH_STRIPE_LEN);
7105 return !(memcmp(&h1, &h2,
sizeof(h1)));
7119 if (hcmp)
return hcmp;
7135 XXH_memcpy((
char*)dst +
sizeof(hash.
high64), &hash.
low64,
sizeof(hash.
low64));
7143 h.
high64 = XXH_readBE64(src);
7144 h.
low64 = XXH_readBE64(src->digest + 8);
7154#define XXH_MIN(x, y) (((x) > (y)) ? (y) : (x))
7156XXH_FORCE_INLINE
void XXH3_combine16(
void* dst,
XXH128_hash_t h128)
7158 XXH_writeLE64( dst, XXH_readLE64(dst) ^ h128.
low64 );
7159 XXH_writeLE64( (
char*)dst+8, XXH_readLE64((
char*)dst+8) ^ h128.
high64 );
7164XXH3_generateSecret(XXH_NOESCAPE
void* secretBuffer,
size_t secretSize, XXH_NOESCAPE
const void* customSeed,
size_t customSeedSize)
7166#if (XXH_DEBUGLEVEL >= 1)
7167 XXH_ASSERT(secretBuffer != NULL);
7171 if (secretBuffer == NULL)
return XXH_ERROR;
7175 if (customSeedSize == 0) {
7176 customSeed = XXH3_kSecret;
7177 customSeedSize = XXH_SECRET_DEFAULT_SIZE;
7179#if (XXH_DEBUGLEVEL >= 1)
7180 XXH_ASSERT(customSeed != NULL);
7182 if (customSeed == NULL)
return XXH_ERROR;
7187 while (pos < secretSize) {
7188 size_t const toCopy = XXH_MIN((secretSize - pos), customSeedSize);
7189 memcpy((
char*)secretBuffer + pos, customSeed, toCopy);
7193 {
size_t const nbSeg16 = secretSize / 16;
7197 for (n=0; n<nbSeg16; n++) {
7199 XXH3_combine16((
char*)secretBuffer + n*16, h128);
7211 XXH_ALIGN(XXH_SEC_ALIGN) xxh_u8 secret[XXH_SECRET_DEFAULT_SIZE];
7212 XXH3_initCustomSecret(secret, seed);
7213 XXH_ASSERT(secretBuffer != NULL);
7214 memcpy(secretBuffer, secret, XXH_SECRET_DEFAULT_SIZE);
7220#if XXH_VECTOR == XXH_AVX2 \
7221 && defined(__GNUC__) && !defined(__clang__) \
7222 && defined(__OPTIMIZE__) && XXH_SIZE_OPT <= 0
7223# pragma GCC pop_options
7236#if defined (__cplusplus)
XXH_errorcode XXH32_reset(XXH32_state_t *statePtr, XXH32_hash_t seed)
Resets an XXH32_state_t to begin a new hash.
Definition xxhash.h:3221
XXH32_hash_t XXH32(const void *input, size_t length, XXH32_hash_t seed)
Calculates the 32-bit hash of input using xxHash32.
Definition xxhash.h:3180
XXH_errorcode XXH32_update(XXH32_state_t *statePtr, const void *input, size_t length)
Consumes a block of input to an XXH32_state_t.
Definition xxhash.h:3232
XXH32_state_t * XXH32_createState(void)
Allocates an XXH32_state_t.
Definition xxhash.h:3203
XXH_errorcode XXH32_freeState(XXH32_state_t *statePtr)
Frees an XXH32_state_t.
Definition xxhash.h:3208
void XXH32_canonicalFromHash(XXH32_canonical_t *dst, XXH32_hash_t hash)
Converts an XXH32_hash_t to a big endian XXH32_canonical_t.
Definition xxhash.h:3297
XXH32_hash_t XXH32_hashFromCanonical(const XXH32_canonical_t *src)
Converts an XXH32_canonical_t to a native XXH32_hash_t.
Definition xxhash.h:3304
XXH32_hash_t XXH32_digest(const XXH32_state_t *statePtr)
Returns the calculated hash value from an XXH32_state_t.
Definition xxhash.h:3278
void XXH32_copyState(XXH32_state_t *dst_state, const XXH32_state_t *src_state)
Copies one XXH32_state_t to another.
Definition xxhash.h:3215
#define XXH_PRIME32_2
Definition xxhash.h:2904
#define XXH_PRIME32_1
Definition xxhash.h:2903
#define XXH_PRIME32_5
Definition xxhash.h:2907
#define XXH_PRIME32_3
Definition xxhash.h:2905
XXH64_hash_t XXH3_64bits_digest(const XXH3_state_t *statePtr)
Returns the calculated XXH3 64-bit hash value from an XXH3_state_t.
Definition xxhash.h:6582
XXH_errorcode XXH3_128bits_update(XXH3_state_t *statePtr, const void *input, size_t length)
Consumes a block of input to an XXH3_state_t.
Definition xxhash.h:7074
void XXH3_generateSecret_fromSeed(void *secretBuffer, XXH64_hash_t seed)
Generate the same secret as the _withSeed() variants.
Definition xxhash.h:7209
void XXH128_canonicalFromHash(XXH128_canonical_t *dst, XXH128_hash_t hash)
Converts an XXH128_hash_t to a big endian XXH128_canonical_t.
Definition xxhash.h:7127
XXH64_hash_t XXH3_64bits_withSeed(const void *input, size_t length, XXH64_hash_t seed)
Calculates 64-bit seeded variant of XXH3 hash of input.
Definition xxhash.h:6199
int XXH128_cmp(const void *h128_1, const void *h128_2)
Compares two XXH128_hash_t.
Definition xxhash.h:7113
XXH128_hash_t XXH3_128bits_withSeed(const void *data, size_t len, XXH64_hash_t seed)
Calculates 128-bit seeded variant of XXH3 hash of data.
Definition xxhash.h:7013
XXH128_hash_t XXH128(const void *data, size_t len, XXH64_hash_t seed)
Calculates the 128-bit hash of data using XXH3.
Definition xxhash.h:7031
XXH_errorcode XXH3_generateSecret(void *secretBuffer, size_t secretSize, const void *customSeed, size_t customSeedSize)
Derive a high-entropy secret from any user-defined content, named customSeed.
Definition xxhash.h:7164
XXH_errorcode XXH3_64bits_reset_withSecretandSeed(XXH3_state_t *statePtr, const void *secret, size_t secretSize, XXH64_hash_t seed64)
Resets an XXH3_state_t with secret data to begin a new hash.
Definition xxhash.h:6385
XXH_errorcode XXH3_64bits_reset_withSeed(XXH3_state_t *statePtr, XXH64_hash_t seed)
Resets an XXH3_state_t with 64-bit seed to begin a new hash.
Definition xxhash.h:6373
XXH128_hash_t XXH3_128bits_digest(const XXH3_state_t *statePtr)
Returns the calculated XXH3 128-bit hash value from an XXH3_state_t.
Definition xxhash.h:7080
XXH_errorcode XXH3_64bits_reset_withSecret(XXH3_state_t *statePtr, const void *secret, size_t secretSize)
Resets an XXH3_state_t with secret data to begin a new hash.
Definition xxhash.h:6362
XXH3_state_t * XXH3_createState(void)
Allocate an XXH3_state_t.
Definition xxhash.h:6291
XXH_errorcode XXH3_128bits_reset_withSeed(XXH3_state_t *statePtr, XXH64_hash_t seed)
Resets an XXH3_state_t with 64-bit seed to begin a new hash.
Definition xxhash.h:7060
XXH128_hash_t XXH3_128bits(const void *data, size_t len)
Calculates 128-bit unseeded variant of XXH3 of data.
Definition xxhash.h:6995
XXH128_hash_t XXH3_128bits_withSecret(const void *data, size_t len, const void *secret, size_t secretSize)
Calculates 128-bit variant of XXH3 with a custom "secret".
Definition xxhash.h:7004
XXH_errorcode XXH3_128bits_reset_withSecretandSeed(XXH3_state_t *statePtr, const void *secret, size_t secretSize, XXH64_hash_t seed64)
Resets an XXH3_state_t with secret data to begin a new hash.
Definition xxhash.h:7067
XXH_errorcode XXH3_128bits_reset(XXH3_state_t *statePtr)
Resets an XXH3_state_t to begin a new hash.
Definition xxhash.h:7046
XXH128_hash_t XXH128_hashFromCanonical(const XXH128_canonical_t *src)
Converts an XXH128_canonical_t to a native XXH128_hash_t.
Definition xxhash.h:7140
void XXH3_copyState(XXH3_state_t *dst_state, const XXH3_state_t *src_state)
Copies one XXH3_state_t to another.
Definition xxhash.h:6319
XXH64_hash_t XXH3_64bits_withSecret(const void *data, size_t len, const void *secret, size_t secretSize)
Calculates 64-bit variant of XXH3 with a custom "secret".
Definition xxhash.h:6192
XXH64_hash_t XXH3_64bits(const void *input, size_t length)
Calculates 64-bit unseeded variant of XXH3 hash of input.
Definition xxhash.h:6185
XXH_errorcode XXH3_64bits_update(XXH3_state_t *statePtr, const void *input, size_t length)
Consumes a block of input to an XXH3_state_t.
Definition xxhash.h:6537
XXH128_hash_t XXH3_128bits_withSecretandSeed(const void *input, size_t length, const void *secret, size_t secretSize, XXH64_hash_t seed64)
Calculates 128-bit seeded variant of XXH3 hash of data.
Definition xxhash.h:7022
XXH_errorcode XXH3_64bits_reset(XXH3_state_t *statePtr)
Resets an XXH3_state_t to begin a new hash.
Definition xxhash.h:6353
int XXH128_isEqual(XXH128_hash_t h1, XXH128_hash_t h2)
Check equality of two XXH128_hash_t values.
Definition xxhash.h:7102
#define XXH3_SECRET_SIZE_MIN
Definition xxhash.h:1191
XXH_errorcode XXH3_freeState(XXH3_state_t *statePtr)
Frees an XXH3_state_t.
Definition xxhash.h:6311
XXH_errorcode XXH3_128bits_reset_withSecret(XXH3_state_t *statePtr, const void *secret, size_t secretSize)
Resets an XXH3_state_t with secret data to begin a new hash.
Definition xxhash.h:7053
XXH64_hash_t XXH3_64bits_withSecretandSeed(const void *data, size_t len, const void *secret, size_t secretSize, XXH64_hash_t seed)
Calculates 64/128-bit seeded variant of XXH3 hash of data.
Definition xxhash.h:6205
void XXH64_canonicalFromHash(XXH64_canonical_t *dst, XXH64_hash_t hash)
Converts an XXH64_hash_t to a big endian XXH64_canonical_t.
Definition xxhash.h:3792
void XXH64_copyState(XXH64_state_t *dst_state, const XXH64_state_t *src_state)
Copies one XXH64_state_t to another.
Definition xxhash.h:3712
XXH64_hash_t XXH64_hashFromCanonical(const XXH64_canonical_t *src)
Converts an XXH64_canonical_t to a native XXH64_hash_t.
Definition xxhash.h:3800
XXH64_hash_t XXH64(const void *input, size_t length, XXH64_hash_t seed)
Calculates the 64-bit hash of input using xxHash64.
Definition xxhash.h:3678
XXH_errorcode XXH64_freeState(XXH64_state_t *statePtr)
Frees an XXH64_state_t.
Definition xxhash.h:3705
XXH64_state_t * XXH64_createState(void)
Allocates an XXH64_state_t.
Definition xxhash.h:3700
XXH_errorcode XXH64_update(XXH64_state_t *statePtr, const void *input, size_t length)
Consumes a block of input to an XXH64_state_t.
Definition xxhash.h:3728
XXH64_hash_t XXH64_digest(const XXH64_state_t *statePtr)
Returns the calculated hash value from an XXH64_state_t.
Definition xxhash.h:3773
XXH_errorcode XXH64_reset(XXH64_state_t *statePtr, XXH64_hash_t seed)
Resets an XXH64_state_t to begin a new hash.
Definition xxhash.h:3718
#define XXH_PRIME64_1
Definition xxhash.h:3454
#define XXH_PRIME64_2
Definition xxhash.h:3455
#define XXH_PRIME64_4
Definition xxhash.h:3457
#define XXH_PRIME64_3
Definition xxhash.h:3456
#define XXH_PRIME64_5
Definition xxhash.h:3458
#define XXH_TARGET_SSE2
Allows a function to be compiled with SSE2 intrinsics.
Definition xxhash.h:5170
#define XXH_TARGET_AVX512
Like XXH_TARGET_SSE2, but for AVX512.
Definition xxhash.h:4961
#define XXH_TARGET_AVX2
Like XXH_TARGET_SSE2, but for AVX2.
Definition xxhash.h:5064
XXH_alignment
Definition xxhash.h:2831
@ XXH_aligned
Definition xxhash.h:2832
@ XXH_unaligned
Definition xxhash.h:2833
uint32_t XXH32_hash_t
An unsigned 32-bit integer.
Definition xxhash.h:587
XXH_errorcode
Exit code for the streaming API.
Definition xxhash.h:572
uint64_t XXH64_hash_t
An unsigned 64-bit integer.
Definition xxhash.h:866
#define XXH_PUBLIC_API
Marks a global symbol.
Definition xxhash.h:455
unsigned XXH_versionNumber(void)
Obtains the xxHash version.
Definition xxhash.h:2888
#define XXH_VERSION_NUMBER
Version number, encoded as two digits each.
Definition xxhash.h:552
@ XXH_ERROR
Definition xxhash.h:574
@ XXH_OK
Definition xxhash.h:573
#define XXH_ACC_ALIGN
Selects the minimum alignment for XXH3's accumulators.
Definition xxhash.h:3965
#define XXH_CPU_LITTLE_ENDIAN
Whether the target is little endian.
Definition xxhash.h:2708
#define XXH3_NEON_LANES
Controls the NEON to scalar ratio for XXH3.
Definition xxhash.h:4198
#define XXH32_ENDJMP
Whether to use a jump for XXH32_finalize.
Definition xxhash.h:2269
#define XXH_FORCE_ALIGN_CHECK
If defined to non-zero, adds a special path for aligned inputs (XXH32() and XXH64() only).
Definition xxhash.h:2218
The return value from 128-bit hashes.
Definition xxhash.h:1382
XXH64_hash_t low64
Definition xxhash.h:1383
XXH64_hash_t high64
Definition xxhash.h:1384
Canonical (big endian) representation of XXH32_hash_t.
Definition xxhash.h:754
XXH32_hash_t bufferedSize
Definition xxhash.h:1677
XXH32_hash_t total_len_32
Definition xxhash.h:1673
XXH32_hash_t large_len
Definition xxhash.h:1674
XXH32_hash_t reserved
Definition xxhash.h:1678
XXH32_hash_t acc[4]
Definition xxhash.h:1675
unsigned char buffer[16]
Definition xxhash.h:1676
const unsigned char * extSecret
Definition xxhash.h:1793
XXH32_hash_t bufferedSize
Definition xxhash.h:1777
XXH64_hash_t reserved64
Definition xxhash.h:1791
XXH64_hash_t totalLen
Definition xxhash.h:1783
size_t nbStripesSoFar
Definition xxhash.h:1781
XXH32_hash_t useSeed
Definition xxhash.h:1779
size_t secretLimit
Definition xxhash.h:1787
size_t nbStripesPerBlock
Definition xxhash.h:1785
XXH64_hash_t seed
Definition xxhash.h:1789
unsigned char buffer[XXH3_INTERNALBUFFER_SIZE]
Definition xxhash.h:1775
unsigned char customSecret[XXH3_SECRET_DEFAULT_SIZE]
Definition xxhash.h:1773
XXH64_hash_t acc[8]
Definition xxhash.h:1771
Canonical (big endian) representation of XXH64_hash_t.
Definition xxhash.h:1028
unsigned char buffer[32]
Definition xxhash.h:1699
XXH32_hash_t reserved32
Definition xxhash.h:1701
XXH64_hash_t acc[4]
Definition xxhash.h:1698
XXH64_hash_t reserved64
Definition xxhash.h:1702
XXH32_hash_t bufferedSize
Definition xxhash.h:1700
XXH64_hash_t total_len
Definition xxhash.h:1697
#define XXH3_INTERNALBUFFER_SIZE
The size of the internal XXH3 buffer.
Definition xxhash.h:1736
#define XXH3_MIDSIZE_MAX
Maximum size of "short" key in bytes.
Definition xxhash.h:1945
#define XXH3_INITSTATE(XXH3_state_ptr)
Initializes a stack-allocated XXH3_state_s.
Definition xxhash.h:1812