Bug 1480361 - Introduce HashTable::LookupReason. r=luke
Again inspired by PLDHashTable, this makes things a bit clearer than passing 0
or sCollisionBit.
It also guarantees more strongly that we'll end up with appropriately
specialized code for the Add vs. NonAdd cases. (That guarantee isn't currently
needed because the compiler inlines things sufficiently anyway, but it can't
hurt.)
--- a/mfbt/HashTable.h
+++ b/mfbt/HashTable.h
@@ -1723,28 +1723,33 @@ private:
bool underloaded() { return wouldBeUnderloaded(capacity(), mEntryCount); }
static MOZ_ALWAYS_INLINE bool match(Entry& aEntry, const Lookup& aLookup)
{
return HashPolicy::match(HashPolicy::getKey(aEntry.get()), aLookup);
}
+ enum LookupReason
+ {
+ ForNonAdd,
+ ForAdd
+ };
+
// Warning: in order for readonlyThreadsafeLookup() to be safe this
// function must not modify the table in any way when |collisionBit| is 0.
// (The use of the METER() macro to increment stats violates this
// restriction but we will live with that for now because it's enabled so
// rarely.)
+ template<LookupReason Reason>
MOZ_ALWAYS_INLINE Entry& lookup(const Lookup& aLookup,
- HashNumber aKeyHash,
- uint32_t aCollisionBit) const
+ HashNumber aKeyHash) const
{
MOZ_ASSERT(isLiveHash(aKeyHash));
MOZ_ASSERT(!(aKeyHash & sCollisionBit));
- MOZ_ASSERT(aCollisionBit == 0 || aCollisionBit == sCollisionBit);
MOZ_ASSERT(mTable);
METER(mStats.mSearches++);
// Compute the primary hash address.
HashNumber h1 = hash1(aKeyHash);
Entry* entry = &mTable[h1];
// Miss: return space for a new entry.
@@ -1761,17 +1766,17 @@ private:
// Collision: double hash.
DoubleHash dh = hash2(aKeyHash);
// Save the first removed entry pointer so we can recycle later.
Entry* firstRemoved = nullptr;
while (true) {
- if (aCollisionBit == sCollisionBit && !firstRemoved) {
+ if (Reason == ForAdd && !firstRemoved) {
if (MOZ_UNLIKELY(entry->isRemoved())) {
firstRemoved = entry;
} else {
entry->setCollision();
}
}
METER(mStats.mSteps++);
@@ -2123,39 +2128,39 @@ public:
MOZ_ALWAYS_INLINE Ptr lookup(const Lookup& aLookup) const
{
ReentrancyGuard g(*this);
if (!HasHash<HashPolicy>(aLookup)) {
return Ptr();
}
HashNumber keyHash = prepareHash(aLookup);
- return Ptr(lookup(aLookup, keyHash, 0), *this);
+ return Ptr(lookup<ForNonAdd>(aLookup, keyHash), *this);
}
MOZ_ALWAYS_INLINE Ptr readonlyThreadsafeLookup(const Lookup& aLookup) const
{
if (!HasHash<HashPolicy>(aLookup)) {
return Ptr();
}
HashNumber keyHash = prepareHash(aLookup);
- return Ptr(lookup(aLookup, keyHash, 0), *this);
+ return Ptr(lookup<ForNonAdd>(aLookup, keyHash), *this);
}
MOZ_ALWAYS_INLINE AddPtr lookupForAdd(const Lookup& aLookup) const
{
ReentrancyGuard g(*this);
if (!EnsureHash<HashPolicy>(aLookup)) {
return AddPtr();
}
HashNumber keyHash = prepareHash(aLookup);
// Directly call the constructor in the return statement to avoid
// excess copying when building with Visual Studio 2017.
// See bug 1385181.
- return AddPtr(lookup(aLookup, keyHash, sCollisionBit), *this, keyHash);
+ return AddPtr(lookup<ForAdd>(aLookup, keyHash), *this, keyHash);
}
template<typename... Args>
MOZ_MUST_USE bool add(AddPtr& aPtr, Args&&... aArgs)
{
ReentrancyGuard g(*this);
MOZ_ASSERT(mTable);
MOZ_ASSERT_IF(aPtr.isValid(), aPtr.mTable == this);
@@ -2247,17 +2252,17 @@ public:
#ifdef DEBUG
aPtr.mGeneration = generation();
aPtr.mMutationCount = mMutationCount;
#endif
{
ReentrancyGuard g(*this);
// Check that aLookup has not been destroyed.
MOZ_ASSERT(prepareHash(aLookup) == aPtr.mKeyHash);
- aPtr.mEntry = &lookup(aLookup, aPtr.mKeyHash, sCollisionBit);
+ aPtr.mEntry = &lookup<ForAdd>(aLookup, aPtr.mKeyHash);
}
return aPtr.found() || add(aPtr, std::forward<Args>(aArgs)...);
}
void remove(Ptr aPtr)
{
MOZ_ASSERT(mTable);
ReentrancyGuard g(*this);