Reorder function parameters so that hash state is the first argument.
Motivation: hash state being first allows for fewer unnecessary moves between registers since (a) this matches the argument order in CombineContiguousImpl and (b) hash state is also the return value (on ARM64, the return value and the first argument use the same register) - [example assembly diff](https://godbolt.org/z/c1h5dMe9K) for related change.
PiperOrigin-RevId: 842309048
Change-Id: I5b1f0fb381728ced2b3fba53fb9adbc0e4a45189
diff --git a/absl/hash/internal/hash.cc b/absl/hash/internal/hash.cc
index 35ab0a2..6b02f19 100644
--- a/absl/hash/internal/hash.cc
+++ b/absl/hash/internal/hash.cc
@@ -58,7 +58,7 @@
}
#ifdef ABSL_AES_INTERNAL_HAVE_X86_SIMD
-uint64_t LowLevelHash33To64(const uint8_t* ptr, size_t len, uint64_t seed) {
+uint64_t LowLevelHash33To64(uint64_t seed, const uint8_t* ptr, size_t len) {
assert(len > 32);
assert(len <= 64);
__m128i state =
@@ -91,7 +91,7 @@
return x64 ^ y64;
}
#else
-uint64_t LowLevelHash33To64(const uint8_t* ptr, size_t len, uint64_t seed) {
+uint64_t LowLevelHash33To64(uint64_t seed, const uint8_t* ptr, size_t len) {
assert(len > 32);
assert(len <= 64);
uint64_t current_state = seed ^ kStaticRandomData[0] ^ len;
@@ -101,7 +101,7 @@
#endif // ABSL_AES_INTERNAL_HAVE_X86_SIMD
[[maybe_unused]] ABSL_ATTRIBUTE_NOINLINE uint64_t
-LowLevelHashLenGt64(const void* data, size_t len, uint64_t seed) {
+LowLevelHashLenGt64(uint64_t seed, const void* data, size_t len) {
assert(len > 64);
const uint8_t* ptr = static_cast<const uint8_t*>(data);
uint64_t current_state = seed ^ kStaticRandomData[0] ^ len;
@@ -149,17 +149,17 @@
return Mix32Bytes(last_32_ptr, current_state);
}
-[[maybe_unused]] uint64_t LowLevelHashLenGt32(const void* data, size_t len,
- uint64_t seed) {
+[[maybe_unused]] uint64_t LowLevelHashLenGt32(uint64_t seed, const void* data,
+ size_t len) {
assert(len > 32);
if (ABSL_PREDICT_FALSE(len > 64)) {
- return LowLevelHashLenGt64(data, len, seed);
+ return LowLevelHashLenGt64(seed, data, len);
}
- return LowLevelHash33To64(static_cast<const uint8_t*>(data), len, seed);
+ return LowLevelHash33To64(seed, static_cast<const uint8_t*>(data), len);
}
ABSL_ATTRIBUTE_ALWAYS_INLINE inline uint64_t HashBlockOn32Bit(
- const unsigned char* data, size_t len, uint64_t state) {
+ uint64_t state, const unsigned char* data, size_t len) {
// TODO(b/417141985): expose and use CityHash32WithSeed.
// Note: we can't use PrecombineLengthMix here because len can be up to 1024.
return CombineRawImpl(
@@ -168,9 +168,9 @@
}
ABSL_ATTRIBUTE_NOINLINE uint64_t
-SplitAndCombineOn32Bit(const unsigned char* first, size_t len, uint64_t state) {
+SplitAndCombineOn32Bit(uint64_t state, const unsigned char* first, size_t len) {
while (len >= PiecewiseChunkSize()) {
- state = HashBlockOn32Bit(first, PiecewiseChunkSize(), state);
+ state = HashBlockOn32Bit(state, first, PiecewiseChunkSize());
len -= PiecewiseChunkSize();
first += PiecewiseChunkSize();
}
@@ -185,9 +185,9 @@
}
ABSL_ATTRIBUTE_ALWAYS_INLINE inline uint64_t HashBlockOn64Bit(
- const unsigned char* data, size_t len, uint64_t state) {
+ uint64_t state, const unsigned char* data, size_t len) {
#ifdef ABSL_HAVE_INTRINSIC_INT128
- return LowLevelHashLenGt32(data, len, state);
+ return LowLevelHashLenGt32(state, data, len);
#else
return hash_internal::CityHash64WithSeed(reinterpret_cast<const char*>(data),
len, state);
@@ -195,9 +195,9 @@
}
ABSL_ATTRIBUTE_NOINLINE uint64_t
-SplitAndCombineOn64Bit(const unsigned char* first, size_t len, uint64_t state) {
+SplitAndCombineOn64Bit(uint64_t state, const unsigned char* first, size_t len) {
while (len >= PiecewiseChunkSize()) {
- state = HashBlockOn64Bit(first, PiecewiseChunkSize(), state);
+ state = HashBlockOn64Bit(state, first, PiecewiseChunkSize());
len -= PiecewiseChunkSize();
first += PiecewiseChunkSize();
}
@@ -213,26 +213,26 @@
} // namespace
-uint64_t CombineLargeContiguousImplOn32BitLengthGt8(const unsigned char* first,
- size_t len,
- uint64_t state) {
+uint64_t CombineLargeContiguousImplOn32BitLengthGt8(uint64_t state,
+ const unsigned char* first,
+ size_t len) {
assert(len > 8);
assert(sizeof(size_t) == 4); // NOLINT(misc-static-assert)
if (ABSL_PREDICT_TRUE(len <= PiecewiseChunkSize())) {
- return HashBlockOn32Bit(first, len, state);
+ return HashBlockOn32Bit(state, first, len);
}
- return SplitAndCombineOn32Bit(first, len, state);
+ return SplitAndCombineOn32Bit(state, first, len);
}
-uint64_t CombineLargeContiguousImplOn64BitLengthGt32(const unsigned char* first,
- size_t len,
- uint64_t state) {
+uint64_t CombineLargeContiguousImplOn64BitLengthGt32(uint64_t state,
+ const unsigned char* first,
+ size_t len) {
assert(len > 32);
assert(sizeof(size_t) == 8); // NOLINT(misc-static-assert)
if (ABSL_PREDICT_TRUE(len <= PiecewiseChunkSize())) {
- return HashBlockOn64Bit(first, len, state);
+ return HashBlockOn64Bit(state, first, len);
}
- return SplitAndCombineOn64Bit(first, len, state);
+ return SplitAndCombineOn64Bit(state, first, len);
}
ABSL_CONST_INIT const void* const MixingHashState::kSeed = &kSeed;
diff --git a/absl/hash/internal/hash.h b/absl/hash/internal/hash.h
index 2f91a8b..02df7fa 100644
--- a/absl/hash/internal/hash.h
+++ b/absl/hash/internal/hash.h
@@ -1030,11 +1030,12 @@
// Slow dispatch path for calls to CombineContiguousImpl with a size argument
// larger than inlined size. Has the same effect as calling
// CombineContiguousImpl() repeatedly with the chunk stride size.
-uint64_t CombineLargeContiguousImplOn32BitLengthGt8(const unsigned char* first,
- size_t len, uint64_t state);
-uint64_t CombineLargeContiguousImplOn64BitLengthGt32(const unsigned char* first,
- size_t len,
- uint64_t state);
+uint64_t CombineLargeContiguousImplOn32BitLengthGt8(uint64_t state,
+ const unsigned char* first,
+ size_t len);
+uint64_t CombineLargeContiguousImplOn64BitLengthGt32(uint64_t state,
+ const unsigned char* first,
+ size_t len);
ABSL_ATTRIBUTE_ALWAYS_INLINE inline uint64_t CombineSmallContiguousImpl(
uint64_t state, const unsigned char* first, size_t len) {
@@ -1092,7 +1093,7 @@
return CombineSmallContiguousImpl(PrecombineLengthMix(state, len), first,
len);
}
- return CombineLargeContiguousImplOn32BitLengthGt8(first, len, state);
+ return CombineLargeContiguousImplOn32BitLengthGt8(state, first, len);
}
inline uint64_t CombineContiguousImpl(
@@ -1115,7 +1116,7 @@
// We must not mix length into the state here because calling
// CombineContiguousImpl twice with PiecewiseChunkSize() must be equivalent
// to calling CombineLargeContiguousImpl once with 2 * PiecewiseChunkSize().
- return CombineLargeContiguousImplOn64BitLengthGt32(first, len, state);
+ return CombineLargeContiguousImplOn64BitLengthGt32(state, first, len);
}
#if defined(ABSL_INTERNAL_LEGACY_HASH_NAMESPACE) && \
diff --git a/absl/hash/internal/low_level_hash_test.cc b/absl/hash/internal/low_level_hash_test.cc
index b4fe872..cdd279b 100644
--- a/absl/hash/internal/low_level_hash_test.cc
+++ b/absl/hash/internal/low_level_hash_test.cc
@@ -442,7 +442,7 @@
auto hash_fn = [](absl::string_view s, uint64_t state) {
return absl::hash_internal::CombineLargeContiguousImplOn64BitLengthGt32(
- reinterpret_cast<const unsigned char*>(s.data()), s.size(), state);
+ state, reinterpret_cast<const unsigned char*>(s.data()), s.size());
};
#if UPDATE_GOLDEN