summaryrefslogtreecommitdiff
path: root/Source/WTF/wtf/HashTable.h
diff options
context:
space:
mode:
Diffstat (limited to 'Source/WTF/wtf/HashTable.h')
-rw-r--r--Source/WTF/wtf/HashTable.h356
1 files changed, 249 insertions, 107 deletions
diff --git a/Source/WTF/wtf/HashTable.h b/Source/WTF/wtf/HashTable.h
index 8519ae364..a576e62ce 100644
--- a/Source/WTF/wtf/HashTable.h
+++ b/Source/WTF/wtf/HashTable.h
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2005, 2006, 2007, 2008, 2011, 2012 Apple Inc. All rights reserved.
+ * Copyright (C) 2005, 2006, 2007, 2008, 2011, 2012, 2015 Apple Inc. All rights reserved.
* Copyright (C) 2008 David Levin <levin@chromium.org>
*
* This library is free software; you can redistribute it and/or
@@ -19,10 +19,10 @@
*
*/
-#ifndef WTF_HashTable_h
-#define WTF_HashTable_h
+#pragma once
#include <atomic>
+#include <iterator>
#include <mutex>
#include <string.h>
#include <type_traits>
@@ -30,6 +30,8 @@
#include <wtf/Assertions.h>
#include <wtf/FastMalloc.h>
#include <wtf/HashTraits.h>
+#include <wtf/Lock.h>
+#include <wtf/MathExtras.h>
#include <wtf/StdLibExtras.h>
#include <wtf/ValueCheck.h>
@@ -101,7 +103,7 @@ namespace WTF {
typedef enum { HashItemKnownGood } HashItemKnownGoodTag;
template<typename Key, typename Value, typename Extractor, typename HashFunctions, typename Traits, typename KeyTraits>
- class HashTableConstIterator {
+ class HashTableConstIterator : public std::iterator<std::forward_iterator_tag, Value, std::ptrdiff_t, const Value*, const Value&> {
private:
typedef HashTable<Key, Value, Extractor, HashFunctions, Traits, KeyTraits> HashTableType;
typedef HashTableIterator<Key, Value, Extractor, HashFunctions, Traits, KeyTraits> iterator;
@@ -237,7 +239,7 @@ namespace WTF {
};
template<typename Key, typename Value, typename Extractor, typename HashFunctions, typename Traits, typename KeyTraits>
- class HashTableIterator {
+ class HashTableIterator : public std::iterator<std::forward_iterator_tag, Value, std::ptrdiff_t, Value*, Value&> {
private:
typedef HashTable<Key, Value, Extractor, HashFunctions, Traits, KeyTraits> HashTableType;
typedef HashTableIterator<Key, Value, Extractor, HashFunctions, Traits, KeyTraits> iterator;
@@ -276,17 +278,23 @@ namespace WTF {
const_iterator m_iterator;
};
- template<typename HashFunctions> class IdentityHashTranslator {
+ template<typename ValueTraits, typename HashFunctions> class IdentityHashTranslator {
public:
template<typename T> static unsigned hash(const T& key) { return HashFunctions::hash(key); }
template<typename T, typename U> static bool equal(const T& a, const U& b) { return HashFunctions::equal(a, b); }
- template<typename T, typename U, typename V> static void translate(T& location, const U&, V&& value) { location = std::forward<V>(value); }
+ template<typename T, typename U, typename V> static void translate(T& location, const U&, V&& value)
+ {
+ ValueTraits::assignToEmpty(location, std::forward<V>(value));
+ }
};
template<typename IteratorType> struct HashTableAddResult {
+ HashTableAddResult() : isNewEntry(false) { }
HashTableAddResult(IteratorType iter, bool isNewEntry) : iterator(iter), isNewEntry(isNewEntry) { }
IteratorType iterator;
bool isNewEntry;
+
+ explicit operator bool() const { return isNewEntry; }
};
template<typename Key, typename Value, typename Extractor, typename HashFunctions, typename Traits, typename KeyTraits>
@@ -297,7 +305,7 @@ namespace WTF {
typedef Traits ValueTraits;
typedef Key KeyType;
typedef Value ValueType;
- typedef IdentityHashTranslator<HashFunctions> IdentityTranslatorType;
+ typedef IdentityHashTranslator<ValueTraits, HashFunctions> IdentityTranslatorType;
typedef HashTableAddResult<iterator> AddResult;
#if DUMP_HASHTABLE_STATS_PER_TABLE
@@ -313,16 +321,16 @@ namespace WTF {
{
}
- int numAccesses;
- int numRehashes;
- int numRemoves;
- int numReinserts;
+ unsigned numAccesses;
+ unsigned numRehashes;
+ unsigned numRemoves;
+ unsigned numReinserts;
- int maxCollisions;
- int numCollisions;
- int collisionGraph[4096];
+ unsigned maxCollisions;
+ unsigned numCollisions;
+ unsigned collisionGraph[4096];
- void recordCollisionAtCount(int count)
+ void recordCollisionAtCount(unsigned count)
{
if (count > maxCollisions)
maxCollisions = count;
@@ -336,7 +344,7 @@ namespace WTF {
dataLogF("%d accesses\n", numAccesses);
dataLogF("%d total collisions, average %.2f probes per access\n", numCollisions, 1.0 * (numAccesses + numCollisions) / numAccesses);
dataLogF("longest collision chain: %d\n", maxCollisions);
- for (int i = 1; i <= maxCollisions; i++) {
+ for (unsigned i = 1; i <= maxCollisions; i++) {
dataLogF(" %d lookups with exactly %d collisions (%.2f%% , %.2f%% with this many or more)\n", collisionGraph[i], i, 100.0 * (collisionGraph[i] - collisionGraph[i+1]) / numAccesses, 100.0 * collisionGraph[i] / numAccesses);
}
dataLogF("%d rehashes\n", numRehashes);
@@ -360,6 +368,9 @@ namespace WTF {
void swap(HashTable&);
HashTable& operator=(const HashTable&);
+ HashTable(HashTable&&);
+ HashTable& operator=(HashTable&&);
+
// When the hash table is empty, just return the same iterator for end as for begin.
// This is more efficient because we don't have to skip all the empty and deleted
// buckets, and iterating an empty table is a common case that's worth optimizing.
@@ -368,12 +379,12 @@ namespace WTF {
const_iterator begin() const { return isEmpty() ? end() : makeConstIterator(m_table); }
const_iterator end() const { return makeKnownGoodConstIterator(m_table + m_tableSize); }
- int size() const { return m_keyCount; }
- int capacity() const { return m_tableSize; }
+ unsigned size() const { return m_keyCount; }
+ unsigned capacity() const { return m_tableSize; }
bool isEmpty() const { return !m_keyCount; }
AddResult add(const ValueType& value) { return add<IdentityTranslatorType>(Extractor::extract(value), value); }
- AddResult add(ValueType&& value) { return add<IdentityTranslatorType>(Extractor::extract(value), std::move(value)); }
+ AddResult add(ValueType&& value) { return add<IdentityTranslatorType>(Extractor::extract(value), WTFMove(value)); }
// A special version of add() that finds the object by hashing and comparing
// with some other type, to avoid the cost of type conversion if the object is already
@@ -393,6 +404,8 @@ namespace WTF {
void remove(iterator);
void removeWithoutEntryConsistencyCheck(iterator);
void removeWithoutEntryConsistencyCheck(const_iterator);
+ template<typename Functor>
+ void removeIf(const Functor&);
void clear();
static bool isEmptyBucket(const ValueType& value) { return isHashTraitsEmptyValue<KeyTraits>(Extractor::extract(value)); }
@@ -401,6 +414,7 @@ namespace WTF {
ValueType* lookup(const Key& key) { return lookup<IdentityTranslatorType>(key); }
template<typename HashTranslator, typename T> ValueType* lookup(const T&);
+ template<typename HashTranslator, typename T> ValueType* inlineLookup(const T&);
#if !ASSERT_DISABLED
void checkTableConsistency() const;
@@ -416,8 +430,8 @@ namespace WTF {
#endif
private:
- static ValueType* allocateTable(int size);
- static void deallocateTable(ValueType* table, int size);
+ static ValueType* allocateTable(unsigned size);
+ static void deallocateTable(ValueType* table, unsigned size);
typedef std::pair<ValueType*, bool> LookupType;
typedef std::pair<LookupType, unsigned> FullLookupType;
@@ -426,6 +440,8 @@ namespace WTF {
template<typename HashTranslator, typename T> FullLookupType fullLookupForWriting(const T&);
template<typename HashTranslator, typename T> LookupType lookupForWriting(const T&);
+ template<typename HashTranslator, typename T, typename Extra> void addUniqueForInitialization(T&& key, Extra&&);
+
template<typename HashTranslator, typename T> void checkKey(const T&);
void removeAndInvalidateWithoutEntryConsistencyCheck(ValueType*);
@@ -438,11 +454,11 @@ namespace WTF {
ValueType* expand(ValueType* entry = nullptr);
void shrink() { rehash(m_tableSize / 2, nullptr); }
- ValueType* rehash(int newTableSize, ValueType* entry);
+ ValueType* rehash(unsigned newTableSize, ValueType* entry);
ValueType* reinsert(ValueType&&);
static void initializeBucket(ValueType& bucket);
- static void deleteBucket(ValueType& bucket) { bucket.~ValueType(); Traits::constructDeletedValue(bucket); }
+ static void deleteBucket(ValueType& bucket) { hashTraitsDeleteBucket<Traits>(bucket); }
FullLookupType makeLookupResult(ValueType* position, bool found, unsigned hash)
{ return FullLookupType(LookupType(position, found), hash); }
@@ -464,21 +480,21 @@ namespace WTF {
static void invalidateIterators() { }
#endif
- static const int m_maxLoad = 2;
- static const int m_minLoad = 6;
+ static const unsigned m_maxLoad = 2;
+ static const unsigned m_minLoad = 6;
ValueType* m_table;
- int m_tableSize;
- int m_tableSizeMask;
- int m_keyCount;
- int m_deletedCount;
+ unsigned m_tableSize;
+ unsigned m_tableSizeMask;
+ unsigned m_keyCount;
+ unsigned m_deletedCount;
#if CHECK_HASHTABLE_ITERATORS
public:
// All access to m_iterators should be guarded with m_mutex.
mutable const_iterator* m_iterators;
- // Use OwnPtr so HashTable can still be memmove'd or memcpy'ed.
- mutable std::unique_ptr<std::mutex> m_mutex;
+ // Use std::unique_ptr so HashTable can still be memmove'd or memcpy'ed.
+ mutable std::unique_ptr<Lock> m_mutex;
#endif
#if DUMP_HASHTABLE_STATS_PER_TABLE
@@ -521,7 +537,7 @@ namespace WTF {
struct HashTableCapacityForSize {
static const unsigned value = HashTableCapacityForSizeSplitter<size, !(size & (size - 1))>::value;
COMPILE_ASSERT(size > 0, HashTableNonZeroMinimumCapacity);
- COMPILE_ASSERT(!static_cast<int>(value >> 31), HashTableNoCapacityOverflow);
+ COMPILE_ASSERT(!static_cast<unsigned>(value >> 31), HashTableNoCapacityOverflow);
COMPILE_ASSERT(value > (2 * size), HashTableCapacityHoldsContentSize);
};
@@ -534,7 +550,7 @@ namespace WTF {
, m_deletedCount(0)
#if CHECK_HASHTABLE_ITERATORS
, m_iterators(0)
- , m_mutex(std::make_unique<std::mutex>())
+ , m_mutex(std::make_unique<Lock>())
#endif
#if DUMP_HASHTABLE_STATS_PER_TABLE
, m_stats(std::make_unique<Stats>())
@@ -582,13 +598,20 @@ namespace WTF {
template<typename HashTranslator, typename T>
inline auto HashTable<Key, Value, Extractor, HashFunctions, Traits, KeyTraits>::lookup(const T& key) -> ValueType*
{
+ return inlineLookup<HashTranslator>(key);
+ }
+
+ template<typename Key, typename Value, typename Extractor, typename HashFunctions, typename Traits, typename KeyTraits>
+ template<typename HashTranslator, typename T>
+ ALWAYS_INLINE auto HashTable<Key, Value, Extractor, HashFunctions, Traits, KeyTraits>::inlineLookup(const T& key) -> ValueType*
+ {
checkKey<HashTranslator>(key);
- int k = 0;
- int sizeMask = m_tableSizeMask;
+ unsigned k = 0;
+ unsigned sizeMask = m_tableSizeMask;
ValueType* table = m_table;
unsigned h = HashTranslator::hash(key);
- int i = h & sizeMask;
+ unsigned i = h & sizeMask;
if (!table)
return 0;
@@ -641,15 +664,15 @@ namespace WTF {
ASSERT(m_table);
checkKey<HashTranslator>(key);
- int k = 0;
+ unsigned k = 0;
ValueType* table = m_table;
- int sizeMask = m_tableSizeMask;
+ unsigned sizeMask = m_tableSizeMask;
unsigned h = HashTranslator::hash(key);
- int i = h & sizeMask;
+ unsigned i = h & sizeMask;
#if DUMP_HASHTABLE_STATS
++HashTableStats::numAccesses;
- int probeCount = 0;
+ unsigned probeCount = 0;
#endif
#if DUMP_HASHTABLE_STATS_PER_TABLE
@@ -702,11 +725,11 @@ namespace WTF {
ASSERT(m_table);
checkKey<HashTranslator>(key);
- int k = 0;
+ unsigned k = 0;
ValueType* table = m_table;
- int sizeMask = m_tableSizeMask;
+ unsigned sizeMask = m_tableSizeMask;
unsigned h = HashTranslator::hash(key);
- int i = h & sizeMask;
+ unsigned i = h & sizeMask;
#if DUMP_HASHTABLE_STATS
++HashTableStats::numAccesses;
@@ -756,12 +779,65 @@ namespace WTF {
}
}
+ template<typename Key, typename Value, typename Extractor, typename HashFunctions, typename Traits, typename KeyTraits>
+ template<typename HashTranslator, typename T, typename Extra>
+ ALWAYS_INLINE void HashTable<Key, Value, Extractor, HashFunctions, Traits, KeyTraits>::addUniqueForInitialization(T&& key, Extra&& extra)
+ {
+ ASSERT(m_table);
+
+ checkKey<HashTranslator>(key);
+
+ invalidateIterators();
+
+ internalCheckTableConsistency();
+
+ unsigned k = 0;
+ ValueType* table = m_table;
+ unsigned sizeMask = m_tableSizeMask;
+ unsigned h = HashTranslator::hash(key);
+ unsigned i = h & sizeMask;
+
+#if DUMP_HASHTABLE_STATS
+ ++HashTableStats::numAccesses;
+ unsigned probeCount = 0;
+#endif
+
+#if DUMP_HASHTABLE_STATS_PER_TABLE
+ ++m_stats->numAccesses;
+#endif
+
+ ValueType* entry;
+ while (1) {
+ entry = table + i;
+
+ if (isEmptyBucket(*entry))
+ break;
+
+#if DUMP_HASHTABLE_STATS
+ ++probeCount;
+ HashTableStats::recordCollisionAtCount(probeCount);
+#endif
+
+#if DUMP_HASHTABLE_STATS_PER_TABLE
+ m_stats->recordCollisionAtCount(probeCount);
+#endif
+
+ if (k == 0)
+ k = 1 | doubleHash(h);
+ i = (i + k) & sizeMask;
+ }
+
+ HashTranslator::translate(*entry, std::forward<T>(key), std::forward<Extra>(extra));
+
+ internalCheckTableConsistency();
+ }
+
template<bool emptyValueIsZero> struct HashTableBucketInitializer;
template<> struct HashTableBucketInitializer<false> {
template<typename Traits, typename Value> static void initialize(Value& bucket)
{
- new (NotNull, &bucket) Value(Traits::emptyValue());
+ new (NotNull, std::addressof(bucket)) Value(Traits::emptyValue());
}
};
@@ -771,7 +847,7 @@ namespace WTF {
// This initializes the bucket without copying the empty value.
// That makes it possible to use this with types that don't support copying.
// The memset to 0 looks like a slow operation but is optimized by the compilers.
- memset(&bucket, 0, sizeof(bucket));
+ memset(std::addressof(bucket), 0, sizeof(bucket));
}
};
@@ -783,7 +859,7 @@ namespace WTF {
template<typename Key, typename Value, typename Extractor, typename HashFunctions, typename Traits, typename KeyTraits>
template<typename HashTranslator, typename T, typename Extra>
- inline auto HashTable<Key, Value, Extractor, HashFunctions, Traits, KeyTraits>::add(T&& key, Extra&& extra) -> AddResult
+ ALWAYS_INLINE auto HashTable<Key, Value, Extractor, HashFunctions, Traits, KeyTraits>::add(T&& key, Extra&& extra) -> AddResult
{
checkKey<HashTranslator>(key);
@@ -796,11 +872,11 @@ namespace WTF {
ASSERT(m_table);
- int k = 0;
+ unsigned k = 0;
ValueType* table = m_table;
- int sizeMask = m_tableSizeMask;
+ unsigned sizeMask = m_tableSizeMask;
unsigned h = HashTranslator::hash(key);
- int i = h & sizeMask;
+ unsigned i = h & sizeMask;
#if DUMP_HASHTABLE_STATS
++HashTableStats::numAccesses;
@@ -919,7 +995,7 @@ namespace WTF {
Value* newEntry = lookupForWriting(Extractor::extract(entry)).first;
newEntry->~Value();
- new (NotNull, newEntry) ValueType(std::move(entry));
+ new (NotNull, newEntry) ValueType(WTFMove(entry));
return newEntry;
}
@@ -1031,22 +1107,52 @@ namespace WTF {
}
template<typename Key, typename Value, typename Extractor, typename HashFunctions, typename Traits, typename KeyTraits>
- auto HashTable<Key, Value, Extractor, HashFunctions, Traits, KeyTraits>::allocateTable(int size) -> ValueType*
+ template<typename Functor>
+ inline void HashTable<Key, Value, Extractor, HashFunctions, Traits, KeyTraits>::removeIf(const Functor& functor)
+ {
+ // We must use local copies in case "functor" or "deleteBucket"
+ // make a function call, which prevents the compiler from keeping
+ // the values in register.
+ unsigned removedBucketCount = 0;
+ ValueType* table = m_table;
+
+ for (unsigned i = m_tableSize; i--;) {
+ ValueType& bucket = table[i];
+ if (isEmptyOrDeletedBucket(bucket))
+ continue;
+
+ if (!functor(bucket))
+ continue;
+
+ deleteBucket(bucket);
+ ++removedBucketCount;
+ }
+ m_deletedCount += removedBucketCount;
+ m_keyCount -= removedBucketCount;
+
+ if (shouldShrink())
+ shrink();
+
+ internalCheckTableConsistency();
+ }
+
+ template<typename Key, typename Value, typename Extractor, typename HashFunctions, typename Traits, typename KeyTraits>
+ auto HashTable<Key, Value, Extractor, HashFunctions, Traits, KeyTraits>::allocateTable(unsigned size) -> ValueType*
{
// would use a template member function with explicit specializations here, but
// gcc doesn't appear to support that
if (Traits::emptyValueIsZero)
return static_cast<ValueType*>(fastZeroedMalloc(size * sizeof(ValueType)));
ValueType* result = static_cast<ValueType*>(fastMalloc(size * sizeof(ValueType)));
- for (int i = 0; i < size; i++)
+ for (unsigned i = 0; i < size; i++)
initializeBucket(result[i]);
return result;
}
template<typename Key, typename Value, typename Extractor, typename HashFunctions, typename Traits, typename KeyTraits>
- void HashTable<Key, Value, Extractor, HashFunctions, Traits, KeyTraits>::deallocateTable(ValueType* table, int size)
+ void HashTable<Key, Value, Extractor, HashFunctions, Traits, KeyTraits>::deallocateTable(ValueType* table, unsigned size)
{
- for (int i = 0; i < size; ++i) {
+ for (unsigned i = 0; i < size; ++i) {
if (!isDeletedBucket(table[i]))
table[i].~ValueType();
}
@@ -1056,7 +1162,7 @@ namespace WTF {
template<typename Key, typename Value, typename Extractor, typename HashFunctions, typename Traits, typename KeyTraits>
auto HashTable<Key, Value, Extractor, HashFunctions, Traits, KeyTraits>::expand(ValueType* entry) -> ValueType*
{
- int newSize;
+ unsigned newSize;
if (m_tableSize == 0)
newSize = KeyTraits::minimumTableSize;
else if (mustRehashInPlace())
@@ -1068,11 +1174,11 @@ namespace WTF {
}
template<typename Key, typename Value, typename Extractor, typename HashFunctions, typename Traits, typename KeyTraits>
- auto HashTable<Key, Value, Extractor, HashFunctions, Traits, KeyTraits>::rehash(int newTableSize, ValueType* entry) -> ValueType*
+ auto HashTable<Key, Value, Extractor, HashFunctions, Traits, KeyTraits>::rehash(unsigned newTableSize, ValueType* entry) -> ValueType*
{
internalCheckTableConsistencyExceptSize();
- int oldTableSize = m_tableSize;
+ unsigned oldTableSize = m_tableSize;
ValueType* oldTable = m_table;
#if DUMP_HASHTABLE_STATS
@@ -1090,14 +1196,21 @@ namespace WTF {
m_table = allocateTable(newTableSize);
Value* newEntry = nullptr;
- for (int i = 0; i != oldTableSize; ++i) {
- if (isEmptyOrDeletedBucket(oldTable[i])) {
- ASSERT(&oldTable[i] != entry);
+ for (unsigned i = 0; i != oldTableSize; ++i) {
+ if (isDeletedBucket(oldTable[i])) {
+ ASSERT(std::addressof(oldTable[i]) != entry);
continue;
}
- Value* reinsertedEntry = reinsert(std::move(oldTable[i]));
- if (&oldTable[i] == entry) {
+ if (isEmptyBucket(oldTable[i])) {
+ ASSERT(std::addressof(oldTable[i]) != entry);
+ oldTable[i].~ValueType();
+ continue;
+ }
+
+ Value* reinsertedEntry = reinsert(WTFMove(oldTable[i]));
+ oldTable[i].~ValueType();
+ if (std::addressof(oldTable[i]) == entry) {
ASSERT(!newEntry);
newEntry = reinsertedEntry;
}
@@ -1105,7 +1218,7 @@ namespace WTF {
m_deletedCount = 0;
- deallocateTable(oldTable, oldTableSize);
+ fastFree(oldTable);
internalCheckTableConsistency();
return newEntry;
@@ -1123,30 +1236,45 @@ namespace WTF {
m_tableSize = 0;
m_tableSizeMask = 0;
m_keyCount = 0;
+ m_deletedCount = 0;
}
template<typename Key, typename Value, typename Extractor, typename HashFunctions, typename Traits, typename KeyTraits>
HashTable<Key, Value, Extractor, HashFunctions, Traits, KeyTraits>::HashTable(const HashTable& other)
- : m_table(0)
+ : m_table(nullptr)
, m_tableSize(0)
, m_tableSizeMask(0)
, m_keyCount(0)
, m_deletedCount(0)
#if CHECK_HASHTABLE_ITERATORS
- , m_iterators(0)
- , m_mutex(std::make_unique<std::mutex>())
+ , m_iterators(nullptr)
+ , m_mutex(std::make_unique<Lock>())
#endif
#if DUMP_HASHTABLE_STATS_PER_TABLE
, m_stats(std::make_unique<Stats>(*other.m_stats))
#endif
{
- // Copy the hash table the dumb way, by adding each element to the new table.
- // It might be more efficient to copy the table slots, but it's not clear that efficiency is needed.
- // FIXME: It's likely that this can be improved, for static analyses that use
- // HashSets. https://bugs.webkit.org/show_bug.cgi?id=118455
- const_iterator end = other.end();
- for (const_iterator it = other.begin(); it != end; ++it)
- add(*it);
+ unsigned otherKeyCount = other.size();
+ if (!otherKeyCount)
+ return;
+
+ unsigned bestTableSize = WTF::roundUpToPowerOfTwo(otherKeyCount) * 2;
+
+ // With maxLoad at 1/2 and minLoad at 1/6, our average load is 2/6.
+ // If we are getting halfway between 2/6 and 1/2 (past 5/12), we double the size to avoid being too close to
+ // loadMax and bring the ratio close to 2/6. This give us a load in the bounds [3/12, 5/12).
+ bool aboveThreeQuarterLoad = otherKeyCount * 12 >= bestTableSize * 5;
+ if (aboveThreeQuarterLoad)
+ bestTableSize *= 2;
+
+ unsigned minimumTableSize = KeyTraits::minimumTableSize;
+ m_tableSize = std::max<unsigned>(bestTableSize, minimumTableSize);
+ m_tableSizeMask = m_tableSize - 1;
+ m_keyCount = otherKeyCount;
+ m_table = allocateTable(m_tableSize);
+
+ for (const auto& otherValue : other)
+ addUniqueForInitialization<IdentityTranslatorType>(Extractor::extract(otherValue), otherValue);
}
template<typename Key, typename Value, typename Extractor, typename HashFunctions, typename Traits, typename KeyTraits>
@@ -1155,38 +1283,57 @@ namespace WTF {
invalidateIterators();
other.invalidateIterators();
- ValueType* tmp_table = m_table;
- m_table = other.m_table;
- other.m_table = tmp_table;
+ std::swap(m_table, other.m_table);
+ std::swap(m_tableSize, other.m_tableSize);
+ std::swap(m_tableSizeMask, other.m_tableSizeMask);
+ std::swap(m_keyCount, other.m_keyCount);
+ std::swap(m_deletedCount, other.m_deletedCount);
- int tmp_tableSize = m_tableSize;
- m_tableSize = other.m_tableSize;
- other.m_tableSize = tmp_tableSize;
+#if DUMP_HASHTABLE_STATS_PER_TABLE
+ m_stats.swap(other.m_stats);
+#endif
+ }
- int tmp_tableSizeMask = m_tableSizeMask;
- m_tableSizeMask = other.m_tableSizeMask;
- other.m_tableSizeMask = tmp_tableSizeMask;
+ template<typename Key, typename Value, typename Extractor, typename HashFunctions, typename Traits, typename KeyTraits>
+ auto HashTable<Key, Value, Extractor, HashFunctions, Traits, KeyTraits>::operator=(const HashTable& other) -> HashTable&
+ {
+ HashTable tmp(other);
+ swap(tmp);
+ return *this;
+ }
- int tmp_keyCount = m_keyCount;
- m_keyCount = other.m_keyCount;
- other.m_keyCount = tmp_keyCount;
+ template<typename Key, typename Value, typename Extractor, typename HashFunctions, typename Traits, typename KeyTraits>
+ inline HashTable<Key, Value, Extractor, HashFunctions, Traits, KeyTraits>::HashTable(HashTable&& other)
+#if CHECK_HASHTABLE_ITERATORS
+ : m_iterators(nullptr)
+ , m_mutex(std::make_unique<Lock>())
+#endif
+ {
+ other.invalidateIterators();
- int tmp_deletedCount = m_deletedCount;
+ m_table = other.m_table;
+ m_tableSize = other.m_tableSize;
+ m_tableSizeMask = other.m_tableSizeMask;
+ m_keyCount = other.m_keyCount;
m_deletedCount = other.m_deletedCount;
- other.m_deletedCount = tmp_deletedCount;
+
+ other.m_table = nullptr;
+ other.m_tableSize = 0;
+ other.m_tableSizeMask = 0;
+ other.m_keyCount = 0;
+ other.m_deletedCount = 0;
#if DUMP_HASHTABLE_STATS_PER_TABLE
- m_stats.swap(other.m_stats);
+ m_stats = WTFMove(other.m_stats);
+ other.m_stats = nullptr;
#endif
}
template<typename Key, typename Value, typename Extractor, typename HashFunctions, typename Traits, typename KeyTraits>
- auto HashTable<Key, Value, Extractor, HashFunctions, Traits, KeyTraits>::operator=(const HashTable& other) -> HashTable&
+ inline auto HashTable<Key, Value, Extractor, HashFunctions, Traits, KeyTraits>::operator=(HashTable&& other) -> HashTable&
{
- // FIXME: It's likely that this can be improved, for static analyses that use
- // HashSets. https://bugs.webkit.org/show_bug.cgi?id=118455
- HashTable tmp(other);
- swap(tmp);
+ HashTable temp = WTFMove(other);
+ swap(temp);
return *this;
}
@@ -1206,9 +1353,9 @@ namespace WTF {
if (!m_table)
return;
- int count = 0;
- int deletedCount = 0;
- for (int j = 0; j < m_tableSize; ++j) {
+ unsigned count = 0;
+ unsigned deletedCount = 0;
+ for (unsigned j = 0; j < m_tableSize; ++j) {
ValueType* entry = m_table + j;
if (isEmptyBucket(*entry))
continue;
@@ -1239,7 +1386,7 @@ namespace WTF {
template<typename Key, typename Value, typename Extractor, typename HashFunctions, typename Traits, typename KeyTraits>
void HashTable<Key, Value, Extractor, HashFunctions, Traits, KeyTraits>::invalidateIterators()
{
- std::lock_guard<std::mutex> lock(*m_mutex);
+ std::lock_guard<Lock> lock(*m_mutex);
const_iterator* next;
for (const_iterator* p = m_iterators; p; p = next) {
next = p->m_next;
@@ -1261,7 +1408,7 @@ namespace WTF {
if (!table) {
it->m_next = 0;
} else {
- std::lock_guard<std::mutex> lock(*table->m_mutex);
+ std::lock_guard<Lock> lock(*table->m_mutex);
ASSERT(table->m_iterators != it);
it->m_next = table->m_iterators;
table->m_iterators = it;
@@ -1275,15 +1422,12 @@ namespace WTF {
template<typename Key, typename Value, typename Extractor, typename HashFunctions, typename Traits, typename KeyTraits>
void removeIterator(HashTableConstIterator<Key, Value, Extractor, HashFunctions, Traits, KeyTraits>* it)
{
- typedef HashTable<Key, Value, Extractor, HashFunctions, Traits, KeyTraits> HashTableType;
- typedef HashTableConstIterator<Key, Value, Extractor, HashFunctions, Traits, KeyTraits> const_iterator;
-
// Delete iterator from doubly-linked list of iterators.
if (!it->m_table) {
ASSERT(!it->m_next);
ASSERT(!it->m_previous);
} else {
- std::lock_guard<std::mutex> lock(*it->m_table->m_mutex);
+ std::lock_guard<Lock> lock(*it->m_table->m_mutex);
if (it->m_next) {
ASSERT(it->m_next->m_previous == it);
it->m_next->m_previous = it->m_previous;
@@ -1307,7 +1451,7 @@ namespace WTF {
// iterator adapters
- template<typename HashTableType, typename ValueType> struct HashTableConstIteratorAdapter {
+ template<typename HashTableType, typename ValueType> struct HashTableConstIteratorAdapter : public std::iterator<std::forward_iterator_tag, ValueType, std::ptrdiff_t, const ValueType*, const ValueType&> {
HashTableConstIteratorAdapter() {}
HashTableConstIteratorAdapter(const typename HashTableType::const_iterator& impl) : m_impl(impl) {}
@@ -1321,7 +1465,7 @@ namespace WTF {
typename HashTableType::const_iterator m_impl;
};
- template<typename HashTableType, typename ValueType> struct HashTableIteratorAdapter {
+ template<typename HashTableType, typename ValueType> struct HashTableIteratorAdapter : public std::iterator<std::forward_iterator_tag, ValueType, std::ptrdiff_t, ValueType*, ValueType&> {
HashTableIteratorAdapter() {}
HashTableIteratorAdapter(const typename HashTableType::iterator& impl) : m_impl(impl) {}
@@ -1392,5 +1536,3 @@ namespace WTF {
} // namespace WTF
#include <wtf/HashIterators.h>
-
-#endif // WTF_HashTable_h