|
|
|
@ -137,8 +137,8 @@ void MurmurHash3_x86_32 ( const void * key, int len,
@@ -137,8 +137,8 @@ void MurmurHash3_x86_32 ( const void * key, int len,
|
|
|
|
|
uint32_t k1 = 0; |
|
|
|
|
|
|
|
|
|
switch(len & 3) { |
|
|
|
|
case 3: k1 ^= static_cast<unsigned int>(tail[2]) << 16; |
|
|
|
|
case 2: k1 ^= static_cast<unsigned int>(tail[1]) << 8; |
|
|
|
|
case 3: k1 ^= static_cast<unsigned int>(tail[2]) << 16; // fall-through
|
|
|
|
|
case 2: k1 ^= static_cast<unsigned int>(tail[1]) << 8; // fall-through
|
|
|
|
|
case 1: k1 ^= static_cast<unsigned int>(tail[0]); |
|
|
|
|
k1 *= c1; k1 = ROTL32(k1,15); k1 *= c2; h1 ^= k1; |
|
|
|
|
break; |
|
|
|
@ -215,26 +215,26 @@ void MurmurHash3_x86_128 ( const void * key, const int len,
@@ -215,26 +215,26 @@ void MurmurHash3_x86_128 ( const void * key, const int len,
|
|
|
|
|
|
|
|
|
|
// These are supposed to fall through
|
|
|
|
|
switch(len & 15) { |
|
|
|
|
case 15: k4 ^= static_cast<unsigned int>(tail[14]) << 16; |
|
|
|
|
case 14: k4 ^= static_cast<unsigned int>(tail[13]) << 8; |
|
|
|
|
case 15: k4 ^= static_cast<unsigned int>(tail[14]) << 16; // fall-through
|
|
|
|
|
case 14: k4 ^= static_cast<unsigned int>(tail[13]) << 8; // fall-through
|
|
|
|
|
case 13: k4 ^= static_cast<unsigned int>(tail[12]) << 0; |
|
|
|
|
k4 *= c4; k4 = ROTL32(k4,18); k4 *= c1; h4 ^= k4; |
|
|
|
|
|
|
|
|
|
case 12: k3 ^= static_cast<unsigned int>(tail[11]) << 24; |
|
|
|
|
case 11: k3 ^= static_cast<unsigned int>(tail[10]) << 16; |
|
|
|
|
case 10: k3 ^= static_cast<unsigned int>(tail[ 9]) << 8; |
|
|
|
|
case 12: k3 ^= static_cast<unsigned int>(tail[11]) << 24; // fall-through
|
|
|
|
|
case 11: k3 ^= static_cast<unsigned int>(tail[10]) << 16; // fall-through
|
|
|
|
|
case 10: k3 ^= static_cast<unsigned int>(tail[ 9]) << 8; // fall-through
|
|
|
|
|
case 9: k3 ^= static_cast<unsigned int>(tail[ 8]) << 0; |
|
|
|
|
k3 *= c3; k3 = ROTL32(k3,17); k3 *= c4; h3 ^= k3; |
|
|
|
|
|
|
|
|
|
case 8: k2 ^= static_cast<unsigned int>(tail[ 7]) << 24; |
|
|
|
|
case 7: k2 ^= static_cast<unsigned int>(tail[ 6]) << 16; |
|
|
|
|
case 6: k2 ^= static_cast<unsigned int>(tail[ 5]) << 8; |
|
|
|
|
case 8: k2 ^= static_cast<unsigned int>(tail[ 7]) << 24; // fall-through
|
|
|
|
|
case 7: k2 ^= static_cast<unsigned int>(tail[ 6]) << 16; // fall-through
|
|
|
|
|
case 6: k2 ^= static_cast<unsigned int>(tail[ 5]) << 8; // fall-through
|
|
|
|
|
case 5: k2 ^= static_cast<unsigned int>(tail[ 4]) << 0; |
|
|
|
|
k2 *= c2; k2 = ROTL32(k2,16); k2 *= c3; h2 ^= k2; |
|
|
|
|
|
|
|
|
|
case 4: k1 ^= static_cast<unsigned int>(tail[ 3]) << 24; |
|
|
|
|
case 3: k1 ^= static_cast<unsigned int>(tail[ 2]) << 16; |
|
|
|
|
case 2: k1 ^= static_cast<unsigned int>(tail[ 1]) << 8; |
|
|
|
|
case 4: k1 ^= static_cast<unsigned int>(tail[ 3]) << 24; // fall-through
|
|
|
|
|
case 3: k1 ^= static_cast<unsigned int>(tail[ 2]) << 16; // fall-through
|
|
|
|
|
case 2: k1 ^= static_cast<unsigned int>(tail[ 1]) << 8; // fall-through
|
|
|
|
|
case 1: k1 ^= static_cast<unsigned int>(tail[ 0]) << 0; |
|
|
|
|
k1 *= c1; k1 = ROTL32(k1,15); k1 *= c2; h1 ^= k1; |
|
|
|
|
break; |
|
|
|
@ -304,22 +304,22 @@ void MurmurHash3_x64_128 ( const void * key, const int len,
@@ -304,22 +304,22 @@ void MurmurHash3_x64_128 ( const void * key, const int len,
|
|
|
|
|
uint64_t k2 = 0; |
|
|
|
|
|
|
|
|
|
switch(len & 15) { |
|
|
|
|
case 15: k2 ^= (static_cast<uint64_t>(tail[14])) << 48; |
|
|
|
|
case 14: k2 ^= (static_cast<uint64_t>(tail[13])) << 40; |
|
|
|
|
case 13: k2 ^= (static_cast<uint64_t>(tail[12])) << 32; |
|
|
|
|
case 12: k2 ^= (static_cast<uint64_t>(tail[11])) << 24; |
|
|
|
|
case 11: k2 ^= (static_cast<uint64_t>(tail[10])) << 16; |
|
|
|
|
case 10: k2 ^= (static_cast<uint64_t>(tail[ 9])) << 8; |
|
|
|
|
case 15: k2 ^= (static_cast<uint64_t>(tail[14])) << 48; // fall-through
|
|
|
|
|
case 14: k2 ^= (static_cast<uint64_t>(tail[13])) << 40; // fall-through
|
|
|
|
|
case 13: k2 ^= (static_cast<uint64_t>(tail[12])) << 32; // fall-through
|
|
|
|
|
case 12: k2 ^= (static_cast<uint64_t>(tail[11])) << 24; // fall-through
|
|
|
|
|
case 11: k2 ^= (static_cast<uint64_t>(tail[10])) << 16; // fall-through
|
|
|
|
|
case 10: k2 ^= (static_cast<uint64_t>(tail[ 9])) << 8; // fall-through
|
|
|
|
|
case 9: k2 ^= (static_cast<uint64_t>(tail[ 8])) << 0; |
|
|
|
|
k2 *= c2; k2 = ROTL64(k2,33); k2 *= c1; h2 ^= k2; |
|
|
|
|
|
|
|
|
|
case 8: k1 ^= (static_cast<uint64_t>(tail[ 7])) << 56; |
|
|
|
|
case 7: k1 ^= (static_cast<uint64_t>(tail[ 6])) << 48; |
|
|
|
|
case 6: k1 ^= (static_cast<uint64_t>(tail[ 5])) << 40; |
|
|
|
|
case 5: k1 ^= (static_cast<uint64_t>(tail[ 4])) << 32; |
|
|
|
|
case 4: k1 ^= (static_cast<uint64_t>(tail[ 3])) << 24; |
|
|
|
|
case 3: k1 ^= (static_cast<uint64_t>(tail[ 2])) << 16; |
|
|
|
|
case 2: k1 ^= (static_cast<uint64_t>(tail[ 1])) << 8; |
|
|
|
|
case 8: k1 ^= (static_cast<uint64_t>(tail[ 7])) << 56; // fall-through
|
|
|
|
|
case 7: k1 ^= (static_cast<uint64_t>(tail[ 6])) << 48; // fall-through
|
|
|
|
|
case 6: k1 ^= (static_cast<uint64_t>(tail[ 5])) << 40; // fall-through
|
|
|
|
|
case 5: k1 ^= (static_cast<uint64_t>(tail[ 4])) << 32; // fall-through
|
|
|
|
|
case 4: k1 ^= (static_cast<uint64_t>(tail[ 3])) << 24; // fall-through
|
|
|
|
|
case 3: k1 ^= (static_cast<uint64_t>(tail[ 2])) << 16; // fall-through
|
|
|
|
|
case 2: k1 ^= (static_cast<uint64_t>(tail[ 1])) << 8; // fall-through
|
|
|
|
|
case 1: k1 ^= (static_cast<uint64_t>(tail[ 0])) << 0; |
|
|
|
|
k1 *= c1; k1 = ROTL64(k1,31); k1 *= c2; h1 ^= k1; |
|
|
|
|
break; |
|
|
|
|