|
|
|
@ -92,9 +92,8 @@ FORCE_INLINE uint64_t fmix64 ( uint64_t k )
@@ -92,9 +92,8 @@ FORCE_INLINE uint64_t fmix64 ( uint64_t k )
|
|
|
|
|
//-----------------------------------------------------------------------------
|
|
|
|
|
|
|
|
|
|
void MurmurHash3_x86_32 ( const void * key, int len, |
|
|
|
|
uint32_t seed, void * out ) |
|
|
|
|
{ |
|
|
|
|
const uint8_t * data = (const uint8_t*)key; |
|
|
|
|
uint32_t seed, void * out ) { |
|
|
|
|
const uint8_t * data = static_cast<const uint8_t*>(key); |
|
|
|
|
const int nblocks = len / 4; |
|
|
|
|
|
|
|
|
|
uint32_t h1 = seed; |
|
|
|
@ -105,7 +104,7 @@ void MurmurHash3_x86_32 ( const void * key, int len,
@@ -105,7 +104,7 @@ void MurmurHash3_x86_32 ( const void * key, int len,
|
|
|
|
|
//----------
|
|
|
|
|
// body
|
|
|
|
|
|
|
|
|
|
const uint32_t * blocks = (const uint32_t *)(data + nblocks*4); |
|
|
|
|
const uint32_t * blocks = reinterpret_cast<const unsigned int *>(data + nblocks*4); |
|
|
|
|
|
|
|
|
|
for(int i = -nblocks; i; i++) |
|
|
|
|
{ |
|
|
|
@ -123,26 +122,28 @@ void MurmurHash3_x86_32 ( const void * key, int len,
@@ -123,26 +122,28 @@ void MurmurHash3_x86_32 ( const void * key, int len,
|
|
|
|
|
//----------
|
|
|
|
|
// tail
|
|
|
|
|
|
|
|
|
|
const uint8_t * tail = (const uint8_t*)(data + nblocks*4); |
|
|
|
|
const uint8_t * tail = (data + nblocks*4); |
|
|
|
|
|
|
|
|
|
uint32_t k1 = 0; |
|
|
|
|
|
|
|
|
|
switch(len & 3) |
|
|
|
|
{ |
|
|
|
|
case 3: k1 ^= tail[2] << 16; |
|
|
|
|
case 2: k1 ^= tail[1] << 8; |
|
|
|
|
case 1: k1 ^= tail[0]; |
|
|
|
|
k1 *= c1; k1 = ROTL32(k1,15); k1 *= c2; h1 ^= k1; |
|
|
|
|
switch(len & 3) { |
|
|
|
|
case 3: k1 ^= static_cast<unsigned int>(tail[2]) << 16; |
|
|
|
|
case 2: k1 ^= static_cast<unsigned int>(tail[1]) << 8; |
|
|
|
|
case 1: k1 ^= static_cast<unsigned int>(tail[0]); |
|
|
|
|
k1 *= c1; k1 = ROTL32(k1,15); k1 *= c2; h1 ^= k1; |
|
|
|
|
break; |
|
|
|
|
default: |
|
|
|
|
break; |
|
|
|
|
}; |
|
|
|
|
|
|
|
|
|
//----------
|
|
|
|
|
// finalization
|
|
|
|
|
|
|
|
|
|
h1 ^= len; |
|
|
|
|
h1 ^= static_cast<unsigned int>(len); |
|
|
|
|
|
|
|
|
|
h1 = fmix32(h1); |
|
|
|
|
|
|
|
|
|
*(uint32_t*)out = h1; |
|
|
|
|
*static_cast<uint32_t*>(out) = h1; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
//-----------------------------------------------------------------------------
|
|
|
|
@ -150,7 +151,7 @@ void MurmurHash3_x86_32 ( const void * key, int len,
@@ -150,7 +151,7 @@ void MurmurHash3_x86_32 ( const void * key, int len,
|
|
|
|
|
void MurmurHash3_x86_128 ( const void * key, const int len, |
|
|
|
|
uint32_t seed, void * out ) |
|
|
|
|
{ |
|
|
|
|
const uint8_t * data = (const uint8_t*)key; |
|
|
|
|
const uint8_t * data = static_cast<const uint8_t*>(key); |
|
|
|
|
const int nblocks = len / 16; |
|
|
|
|
|
|
|
|
|
uint32_t h1 = seed; |
|
|
|
@ -158,15 +159,15 @@ void MurmurHash3_x86_128 ( const void * key, const int len,
@@ -158,15 +159,15 @@ void MurmurHash3_x86_128 ( const void * key, const int len,
|
|
|
|
|
uint32_t h3 = seed; |
|
|
|
|
uint32_t h4 = seed; |
|
|
|
|
|
|
|
|
|
const uint32_t c1 = 0x239b961b; |
|
|
|
|
const uint32_t c1 = 0x239b961b; |
|
|
|
|
const uint32_t c2 = 0xab0e9789; |
|
|
|
|
const uint32_t c3 = 0x38b34ae5; |
|
|
|
|
const uint32_t c3 = 0x38b34ae5; |
|
|
|
|
const uint32_t c4 = 0xa1e38b93; |
|
|
|
|
|
|
|
|
|
//----------
|
|
|
|
|
// body
|
|
|
|
|
|
|
|
|
|
const uint32_t * blocks = (const uint32_t *)(data + nblocks*16); |
|
|
|
|
const uint32_t * blocks = reinterpret_cast<const uint32_t *>(data + nblocks*16); |
|
|
|
|
|
|
|
|
|
for(int i = -nblocks; i; i++) |
|
|
|
|
{ |
|
|
|
@ -195,43 +196,45 @@ void MurmurHash3_x86_128 ( const void * key, const int len,
@@ -195,43 +196,45 @@ void MurmurHash3_x86_128 ( const void * key, const int len,
|
|
|
|
|
//----------
|
|
|
|
|
// tail
|
|
|
|
|
|
|
|
|
|
const uint8_t * tail = (const uint8_t*)(data + nblocks*16); |
|
|
|
|
const uint8_t * tail = (data + nblocks*16); |
|
|
|
|
|
|
|
|
|
uint32_t k1 = 0; |
|
|
|
|
uint32_t k2 = 0; |
|
|
|
|
uint32_t k3 = 0; |
|
|
|
|
uint32_t k4 = 0; |
|
|
|
|
|
|
|
|
|
switch(len & 15) |
|
|
|
|
{ |
|
|
|
|
case 15: k4 ^= tail[14] << 16; |
|
|
|
|
case 14: k4 ^= tail[13] << 8; |
|
|
|
|
case 13: k4 ^= tail[12] << 0; |
|
|
|
|
switch(len & 15) { |
|
|
|
|
case 15: k4 ^= static_cast<unsigned int>(tail[14]) << 16; |
|
|
|
|
case 14: k4 ^= static_cast<unsigned int>(tail[13]) << 8; |
|
|
|
|
case 13: k4 ^= static_cast<unsigned int>(tail[12]) << 0; |
|
|
|
|
k4 *= c4; k4 = ROTL32(k4,18); k4 *= c1; h4 ^= k4; |
|
|
|
|
|
|
|
|
|
case 12: k3 ^= tail[11] << 24; |
|
|
|
|
case 11: k3 ^= tail[10] << 16; |
|
|
|
|
case 10: k3 ^= tail[ 9] << 8; |
|
|
|
|
case 9: k3 ^= tail[ 8] << 0; |
|
|
|
|
case 12: k3 ^= static_cast<unsigned int>(tail[11]) << 24; |
|
|
|
|
case 11: k3 ^= static_cast<unsigned int>(tail[10]) << 16; |
|
|
|
|
case 10: k3 ^= static_cast<unsigned int>(tail[ 9]) << 8; |
|
|
|
|
case 9: k3 ^= static_cast<unsigned int>(tail[ 8]) << 0; |
|
|
|
|
k3 *= c3; k3 = ROTL32(k3,17); k3 *= c4; h3 ^= k3; |
|
|
|
|
|
|
|
|
|
case 8: k2 ^= tail[ 7] << 24; |
|
|
|
|
case 7: k2 ^= tail[ 6] << 16; |
|
|
|
|
case 6: k2 ^= tail[ 5] << 8; |
|
|
|
|
case 5: k2 ^= tail[ 4] << 0; |
|
|
|
|
case 8: k2 ^= static_cast<unsigned int>(tail[ 7]) << 24; |
|
|
|
|
case 7: k2 ^= static_cast<unsigned int>(tail[ 6]) << 16; |
|
|
|
|
case 6: k2 ^= static_cast<unsigned int>(tail[ 5]) << 8; |
|
|
|
|
case 5: k2 ^= static_cast<unsigned int>(tail[ 4]) << 0; |
|
|
|
|
k2 *= c2; k2 = ROTL32(k2,16); k2 *= c3; h2 ^= k2; |
|
|
|
|
|
|
|
|
|
case 4: k1 ^= tail[ 3] << 24; |
|
|
|
|
case 3: k1 ^= tail[ 2] << 16; |
|
|
|
|
case 2: k1 ^= tail[ 1] << 8; |
|
|
|
|
case 1: k1 ^= tail[ 0] << 0; |
|
|
|
|
case 4: k1 ^= static_cast<unsigned int>(tail[ 3]) << 24; |
|
|
|
|
case 3: k1 ^= static_cast<unsigned int>(tail[ 2]) << 16; |
|
|
|
|
case 2: k1 ^= static_cast<unsigned int>(tail[ 1]) << 8; |
|
|
|
|
case 1: k1 ^= static_cast<unsigned int>(tail[ 0]) << 0; |
|
|
|
|
k1 *= c1; k1 = ROTL32(k1,15); k1 *= c2; h1 ^= k1; |
|
|
|
|
break; |
|
|
|
|
default: |
|
|
|
|
break; |
|
|
|
|
}; |
|
|
|
|
|
|
|
|
|
//----------
|
|
|
|
|
// finalization
|
|
|
|
|
|
|
|
|
|
h1 ^= len; h2 ^= len; h3 ^= len; h4 ^= len; |
|
|
|
|
h1 ^= static_cast<unsigned int>(len); h2 ^= static_cast<unsigned int>(len); h3 ^= static_cast<unsigned int>(len); h4 ^= static_cast<unsigned int>(len); |
|
|
|
|
|
|
|
|
|
h1 += h2; h1 += h3; h1 += h4; |
|
|
|
|
h2 += h1; h3 += h1; h4 += h1; |
|
|
|
@ -244,18 +247,17 @@ void MurmurHash3_x86_128 ( const void * key, const int len,
@@ -244,18 +247,17 @@ void MurmurHash3_x86_128 ( const void * key, const int len,
|
|
|
|
|
h1 += h2; h1 += h3; h1 += h4; |
|
|
|
|
h2 += h1; h3 += h1; h4 += h1; |
|
|
|
|
|
|
|
|
|
((uint32_t*)out)[0] = h1; |
|
|
|
|
((uint32_t*)out)[1] = h2; |
|
|
|
|
((uint32_t*)out)[2] = h3; |
|
|
|
|
((uint32_t*)out)[3] = h4; |
|
|
|
|
(static_cast<uint32_t*>(out))[0] = h1; |
|
|
|
|
(static_cast<uint32_t*>(out))[1] = h2; |
|
|
|
|
(static_cast<uint32_t*>(out))[2] = h3; |
|
|
|
|
(static_cast<uint32_t*>(out))[3] = h4; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
//-----------------------------------------------------------------------------
|
|
|
|
|
|
|
|
|
|
void MurmurHash3_x64_128 ( const void * key, const int len, |
|
|
|
|
const uint32_t seed, void * out ) |
|
|
|
|
{ |
|
|
|
|
const uint8_t * data = (const uint8_t*)key; |
|
|
|
|
const uint32_t seed, void * out ) { |
|
|
|
|
const uint8_t * data = reinterpret_cast<const uint8_t*>(key); |
|
|
|
|
const int nblocks = len / 16; |
|
|
|
|
|
|
|
|
|
uint64_t h1 = seed; |
|
|
|
@ -267,10 +269,9 @@ void MurmurHash3_x64_128 ( const void * key, const int len,
@@ -267,10 +269,9 @@ void MurmurHash3_x64_128 ( const void * key, const int len,
|
|
|
|
|
//----------
|
|
|
|
|
// body
|
|
|
|
|
|
|
|
|
|
const uint64_t * blocks = (const uint64_t *)(data); |
|
|
|
|
const uint64_t * blocks = reinterpret_cast<const uint64_t *>(data); |
|
|
|
|
|
|
|
|
|
for(int i = 0; i < nblocks; i++) |
|
|
|
|
{ |
|
|
|
|
for(int i = 0; i < nblocks; i++) { |
|
|
|
|
uint64_t k1 = getblock64(blocks,i*2+0); |
|
|
|
|
uint64_t k2 = getblock64(blocks,i*2+1); |
|
|
|
|
|
|
|
|
@ -286,37 +287,39 @@ void MurmurHash3_x64_128 ( const void * key, const int len,
@@ -286,37 +287,39 @@ void MurmurHash3_x64_128 ( const void * key, const int len,
|
|
|
|
|
//----------
|
|
|
|
|
// tail
|
|
|
|
|
|
|
|
|
|
const uint8_t * tail = (const uint8_t*)(data + nblocks*16); |
|
|
|
|
const uint8_t * tail = (data + nblocks*16); |
|
|
|
|
|
|
|
|
|
uint64_t k1 = 0; |
|
|
|
|
uint64_t k2 = 0; |
|
|
|
|
|
|
|
|
|
switch(len & 15) |
|
|
|
|
{ |
|
|
|
|
case 15: k2 ^= ((uint64_t)tail[14]) << 48; |
|
|
|
|
case 14: k2 ^= ((uint64_t)tail[13]) << 40; |
|
|
|
|
case 13: k2 ^= ((uint64_t)tail[12]) << 32; |
|
|
|
|
case 12: k2 ^= ((uint64_t)tail[11]) << 24; |
|
|
|
|
case 11: k2 ^= ((uint64_t)tail[10]) << 16; |
|
|
|
|
case 10: k2 ^= ((uint64_t)tail[ 9]) << 8; |
|
|
|
|
case 9: k2 ^= ((uint64_t)tail[ 8]) << 0; |
|
|
|
|
k2 *= c2; k2 = ROTL64(k2,33); k2 *= c1; h2 ^= k2; |
|
|
|
|
|
|
|
|
|
case 8: k1 ^= ((uint64_t)tail[ 7]) << 56; |
|
|
|
|
case 7: k1 ^= ((uint64_t)tail[ 6]) << 48; |
|
|
|
|
case 6: k1 ^= ((uint64_t)tail[ 5]) << 40; |
|
|
|
|
case 5: k1 ^= ((uint64_t)tail[ 4]) << 32; |
|
|
|
|
case 4: k1 ^= ((uint64_t)tail[ 3]) << 24; |
|
|
|
|
case 3: k1 ^= ((uint64_t)tail[ 2]) << 16; |
|
|
|
|
case 2: k1 ^= ((uint64_t)tail[ 1]) << 8; |
|
|
|
|
case 1: k1 ^= ((uint64_t)tail[ 0]) << 0; |
|
|
|
|
k1 *= c1; k1 = ROTL64(k1,31); k1 *= c2; h1 ^= k1; |
|
|
|
|
switch(len & 15) { |
|
|
|
|
case 15: k2 ^= (static_cast<uint64_t>(tail[14])) << 48; |
|
|
|
|
case 14: k2 ^= (static_cast<uint64_t>(tail[13])) << 40; |
|
|
|
|
case 13: k2 ^= (static_cast<uint64_t>(tail[12])) << 32; |
|
|
|
|
case 12: k2 ^= (static_cast<uint64_t>(tail[11])) << 24; |
|
|
|
|
case 11: k2 ^= (static_cast<uint64_t>(tail[10])) << 16; |
|
|
|
|
case 10: k2 ^= (static_cast<uint64_t>(tail[ 9])) << 8; |
|
|
|
|
case 9: k2 ^= (static_cast<uint64_t>(tail[ 8])) << 0; |
|
|
|
|
k2 *= c2; k2 = ROTL64(k2,33); k2 *= c1; h2 ^= k2; |
|
|
|
|
|
|
|
|
|
case 8: k1 ^= (static_cast<uint64_t>(tail[ 7])) << 56; |
|
|
|
|
case 7: k1 ^= (static_cast<uint64_t>(tail[ 6])) << 48; |
|
|
|
|
case 6: k1 ^= (static_cast<uint64_t>(tail[ 5])) << 40; |
|
|
|
|
case 5: k1 ^= (static_cast<uint64_t>(tail[ 4])) << 32; |
|
|
|
|
case 4: k1 ^= (static_cast<uint64_t>(tail[ 3])) << 24; |
|
|
|
|
case 3: k1 ^= (static_cast<uint64_t>(tail[ 2])) << 16; |
|
|
|
|
case 2: k1 ^= (static_cast<uint64_t>(tail[ 1])) << 8; |
|
|
|
|
case 1: k1 ^= (static_cast<uint64_t>(tail[ 0])) << 0; |
|
|
|
|
k1 *= c1; k1 = ROTL64(k1,31); k1 *= c2; h1 ^= k1; |
|
|
|
|
break; |
|
|
|
|
default: |
|
|
|
|
break; |
|
|
|
|
}; |
|
|
|
|
|
|
|
|
|
//----------
|
|
|
|
|
// finalization
|
|
|
|
|
|
|
|
|
|
h1 ^= len; h2 ^= len; |
|
|
|
|
h1 ^= static_cast<unsigned int>(len); h2 ^= static_cast<unsigned int>(len); |
|
|
|
|
|
|
|
|
|
h1 += h2; |
|
|
|
|
h2 += h1; |
|
|
|
@ -327,8 +330,8 @@ void MurmurHash3_x64_128 ( const void * key, const int len,
@@ -327,8 +330,8 @@ void MurmurHash3_x64_128 ( const void * key, const int len,
|
|
|
|
|
h1 += h2; |
|
|
|
|
h2 += h1; |
|
|
|
|
|
|
|
|
|
((uint64_t*)out)[0] = h1; |
|
|
|
|
((uint64_t*)out)[1] = h2; |
|
|
|
|
(static_cast<uint64_t*>(out))[0] = h1; |
|
|
|
|
(static_cast<uint64_t*>(out))[1] = h2; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
//-----------------------------------------------------------------------------
|
|
|
|
|
//-----------------------------------------------------------------------------
|
|
|
|
|