Index: runtime/lib/collection_patch.dart |
diff --git a/runtime/lib/collection_patch.dart b/runtime/lib/collection_patch.dart |
index 33f6ac5b61d0ea47fd1355d9f9f4bbe763fdb1bc..3716cca786927354b2ce06b4ab104f620a5dcb9e 100644 |
--- a/runtime/lib/collection_patch.dart |
+++ b/runtime/lib/collection_patch.dart |
@@ -38,7 +38,7 @@ class _HashMap<K, V> implements HashMap<K, V> { |
Type get runtimeType => HashMap; |
int _elementCount = 0; |
- List<_HashMapEntry> _buckets = new List<_HashMapEntry>(_INITIAL_CAPACITY); |
+ List<_HashMapEntry> _buckets = new List(_INITIAL_CAPACITY); |
int _modificationCount = 0; |
int get length => _elementCount; |
@@ -170,7 +170,7 @@ class _HashMap<K, V> implements HashMap<K, V> { |
void clear() { |
_elementCount = 0; |
- _buckets = new List<_HashMapEntry>(_INITIAL_CAPACITY); |
+ _buckets = new List(_INITIAL_CAPACITY); |
_modificationCount = (_modificationCount + 1) & _MODIFICATION_COUNT_MASK; |
} |
@@ -201,7 +201,7 @@ class _HashMap<K, V> implements HashMap<K, V> { |
List oldBuckets = _buckets; |
int oldLength = oldBuckets.length; |
int newLength = oldLength << 1; |
- List newBuckets = new List<_HashMapEntry>(newLength); |
+ List newBuckets = new List(newLength); |
for (int i = 0; i < oldLength; i++) { |
_HashMapEntry entry = oldBuckets[i]; |
while (entry != null) { |
@@ -504,276 +504,86 @@ class _HashMapValueIterator<V> extends _HashMapIterator<V> { |
} |
patch class HashSet<E> { |
- /* patch */ factory HashSet({ bool equals(E e1, E e2), |
- int hashCode(E e), |
- bool isValidKey(potentialKey) }) { |
- if (isValidKey == null) { |
- if (hashCode == null) { |
- if (equals == null) { |
- return new _HashSet<E>(); |
- } |
- if (identical(identical, equals)) { |
- return new _IdentityHashSet<E>(); |
- } |
- _hashCode = _defaultHashCode; |
- } else if (equals == null) { |
- _equals = _defaultEquals; |
- } |
- isValidKey = new _TypeTest<E>().test; |
- } else { |
- if (hashCode == null) hashCode = _defaultHashCode; |
- if (equals == null) equals = _defaultEquals; |
- } |
- return new _CustomHashSet<E>(equals, hashCode, isValidKey); |
- } |
-} |
- |
-class _HashSet<E> extends _HashSetBase<E> implements HashSet<E> { |
static const int _INITIAL_CAPACITY = 8; |
+ final _HashTable<E> _table; |
- List<_HashSetEntry> _buckets = new List<_HashSetEntry>(_INITIAL_CAPACITY); |
- int _elementCount = 0; |
- int _modificationCount = 0; |
+ /* patch */ HashSet() : _table = new _HashTable(_INITIAL_CAPACITY) { |
+ _table._container = this; |
+ } |
- bool _equals(e1, e2) => e1 == e2; |
- int _hashCode(e) => e.hashCode; |
+ factory HashSet.from(Iterable<E> iterable) { |
+ return new HashSet<E>()..addAll(iterable); |
+ } |
// Iterable. |
- Iterator<E> get iterator => new _HashSetIterator<E>(this); |
- |
- int get length => _elementCount; |
+ /* patch */ Iterator<E> get iterator => new _HashTableKeyIterator<E>(_table); |
- bool get isEmpty => _elementCount == 0; |
- |
- bool get isNotEmpty => _elementCount != 0; |
+ /* patch */ int get length => _table._elementCount; |
- bool contains(Object object) { |
- int index = _hashCode(object) & (_buckets.length - 1); |
- HashSetEntry entry = _buckets[index]; |
- while (entry != null) { |
- if (_equals(entry.key, object)) return true; |
- entry = entry.next; |
- } |
- return false; |
- } |
+ /* patch */ bool get isEmpty => _table._elementCount == 0; |
- // Set |
+ /* patch */ bool get isNotEmpty => !isEmpty; |
- void _add(E element) { |
- int hashCode = _hashCode(element); |
- int index = hashCode & (_buckets.length - 1); |
- HashSetEntry entry = _buckets[index]; |
- while (entry != null) { |
- if (_equals(entry.key, element)) return; |
- entry = entry.next; |
- } |
- _addEntry(element, hashCode, index); |
- } |
+ /* patch */ bool contains(Object object) => _table._get(object) >= 0; |
- void add(E element) { |
- _add(element); |
+ // Collection. |
+ /* patch */ void add(E element) { |
+ _table._put(element); |
+ _table._checkCapacity(); |
} |
- void addAll(Iterable<E> objects) { |
- int ctr = 0; |
+ /* patch */ void addAll(Iterable<E> objects) { |
for (E object in objects) { |
- ctr++; |
- _add(object); |
+ _table._put(object); |
+ _table._checkCapacity(); |
} |
} |
- bool _remove(Object object, int hashCode) { |
- int index = hashCode & (_buckets.length - 1); |
- _HashSetEntry entry = _buckets[index]; |
- _HashSetEntry previous = null; |
- while (entry != null) { |
- if (_equals(entry.key, object)) { |
- _HashSetEntry next = entry.remove(); |
- if (previous == null) { |
- _buckets[index] = next; |
- } else { |
- previous.next = next; |
- } |
- _elementCount--; |
- _modificationCount = |
- (_modificationCount + 1) & _MODIFICATION_COUNT_MASK; |
- return true; |
- } |
- previous = entry; |
- entry = entry.next; |
- } |
- return false; |
+ /* patch */ bool remove(Object object) { |
+ int offset = _table._remove(object); |
+ _table._checkCapacity(); |
+ return offset >= 0; |
} |
- bool remove(Object object) => _remove(object, _hashCode(object)); |
- |
- void removeAll(Iterable<Object> objectsToRemove) { |
+ /* patch */ void removeAll(Iterable<Object> objectsToRemove) { |
for (Object object in objectsToRemove) { |
- _remove(object, _hashCode(object)); |
+ _table._remove(object); |
+ _table._checkCapacity(); |
} |
} |
void _filterWhere(bool test(E element), bool removeMatching) { |
- int length = _buckets.length; |
- for (int index = 0; index < length; index++) { |
- HashSetEntry entry = _buckets[index]; |
- HashSetEntry previous = null; |
- while (entry != null) { |
- int modificationCount = _modificationCount; |
- bool testResult = test(entry.key); |
- if (modificationCount != _modificationCount) { |
- throw new ConcurrentModificationError(this); |
- } |
- if (testResult == removeMatching) { |
- HashSetEntry next = entry.remove(); |
- if (previous == null) { |
- _buckets[index] = next; |
- } else { |
- previous.next = next; |
- } |
- _elementCount--; |
- _modificationCount = |
- (_modificationCount + 1) & _MODIFICATION_COUNT_MASK; |
- entry = next; |
- } else { |
- previous = entry; |
- entry = entry.next; |
+ int entrySize = _table._entrySize; |
+ int length = _table._table.length; |
+ for (int offset = 0; offset < length; offset += entrySize) { |
+ Object entry = _table._table[offset]; |
+ if (!_table._isFree(entry)) { |
+ E key = identical(entry, _NULL) ? null : entry; |
+ int modificationCount = _table._modificationCount; |
+ bool shouldRemove = (removeMatching == test(key)); |
+ _table._checkModification(modificationCount); |
+ if (shouldRemove) { |
+ _table._deleteEntry(offset); |
} |
} |
} |
+ _table._checkCapacity(); |
} |
- void removeWhere(bool test(E element)) { |
+ /* patch */ void removeWhere(bool test(E element)) { |
_filterWhere(test, true); |
} |
- void retainWhere(bool test(E element)) { |
+ /* patch */ void retainWhere(bool test(E element)) { |
_filterWhere(test, false); |
} |
- void clear() { |
- _elementCount = 0; |
- _buckets = new List<HashSetEntry>(_INITIAL_CAPACITY); |
- _modificationCount++; |
- } |
- |
- void _addEntry(E key, int hashCode, int index) { |
- _buckets[index] = new _HashSetEntry(key, hashCode, _buckets[index]); |
- int newElements = _elementCount + 1; |
- _elementCount = newElements; |
- int length = _buckets.length; |
- // If we end up with more than 75% non-empty entries, we |
- // resize the backing store. |
- if ((newElements << 2) > ((length << 1) + length)) _resize(); |
- _modificationCount = (_modificationCount + 1) & _MODIFICATION_COUNT_MASK; |
+ /* patch */ void clear() { |
+ _table._clear(); |
} |
- |
- void _resize() { |
- int oldLength = _buckets.length; |
- int newLength = oldLength << 1; |
- List oldBuckets = _buckets; |
- List newBuckets = new List<_HashSetEntry>(newLength); |
- for (int i = 0; i < oldLength; i++) { |
- _HashSetEntry entry = oldBuckets[i]; |
- while (entry != null) { |
- _HashSetEntry next = entry.next; |
- int newIndex = entry.hashCode & (newLength - 1); |
- entry.next = newBuckets[newIndex]; |
- newBuckets[newIndex] = entry; |
- entry = next; |
- } |
- } |
- _buckets = newBuckets; |
- } |
- |
- HashSet<E> _newSet() => new _HashSet<E>(); |
-} |
- |
-class _IdentityHashSet<E> extends _HashSet<E> { |
- bool _equals(e1, e2) => identical(e1, e2); |
- HashSet<E> _newSet() => new _IdentityHashSet<E>(); |
-} |
- |
-class _CustomHashSet<E> extends _HashSet<E> { |
- final _Equality<E> _equality; |
- final _Hasher<E> _hasher; |
- final _Predicate _validKey; |
- _CustomHashSet(this._equality, this._hasher, this._validKey); |
- |
- E operator[](Object key) { |
- if (!_validKey(key)) return null; |
- return super[key]; |
- } |
- |
- bool remove(Object key) { |
- if (!_validKey(key)) return false; |
- return super.remove(key); |
- } |
- |
- bool containsKey(Object key) { |
- if (!_validKey(key)) return false; |
- return super.containsKey(key); |
- } |
- |
- bool _equals(e1, e2) => _equality(e1, e2); |
- int _hashCode(e) => _hasher(e); |
- |
- HashSet<E> _newSet() => new _CustomHashSet<E>(_equality, _hasher, _validKey); |
-} |
- |
-class _HashSetEntry { |
- final key; |
- final int hashCode; |
- _HashSetEntry next; |
- _HashSetEntry(this.key, this.hashCode, this.next); |
- |
- _HashSetEntry remove() { |
- _HashSetEntry result = next; |
- next = null; |
- return result; |
- } |
-} |
- |
-class _HashSetIterator<E> implements Iterator<E> { |
- final _HashSet _set; |
- final int _modificationCount; |
- int _index = 0; |
- _HashSetEntry _next = null; |
- E _current = null; |
- |
- _HashSetIterator(_HashSet hashSet) |
- : _set = hashSet, _modificationCount = hashSet._modificationCount; |
- |
- bool moveNext() { |
- if (_modificationCount != _set._modificationCount) { |
- throw new ConcurrentModificationError(_set); |
- } |
- if (_next != null) { |
- _current = _next.key; |
- _next = _next.next; |
- return true; |
- } |
- List<_HashSetEntry> buckets = _set._buckets; |
- while (_index < buckets.length) { |
- _next = buckets[_index]; |
- _index = _index + 1; |
- if (_next != null) { |
- _current = _next.key; |
- _next = _next.next; |
- return true; |
- } |
- } |
- _current = null; |
- return false; |
- } |
- |
- E get current => _current; |
} |
class _LinkedHashMapEntry extends _HashMapEntry { |
- /// Double-linked list of entries of a linked hash map. |
- /// The _LinkedHashMap itself is the head of the list, so the type is "var". |
- /// Both are initialized to `this` when initialized. |
var _nextEntry; |
var _previousEntry; |
_LinkedHashMapEntry(key, value, int hashCode, _LinkedHashMapEntry next, |
@@ -849,11 +659,6 @@ class _LinkedHashMapValueIterator<V> extends _LinkedHashMapIterator<V> { |
* A hash-based map that iterates keys and values in key insertion order. |
*/ |
patch class LinkedHashMap<K, V> { |
- /// Holds a double-linked list of entries in insertion order. |
- /// The fields have the same name as the ones in [_LinkedHashMapEntry], |
- /// and this map is itself used as the head entry of the list. |
- /// Set to `this` when initialized, representing the empty list (containing |
- /// only the head entry itself). |
var _nextEntry; |
var _previousEntry; |
@@ -933,7 +738,6 @@ abstract class _LinkedHashMapMixin<K, V> implements LinkedHashMap<K, V> { |
buckets[index] = entry; |
int newElements = _elementCount + 1; |
_elementCount = newElements; |
- |
// If we end up with more than 75% non-empty entries, we |
// resize the backing store. |
if ((newElements << 2) > ((length << 1) + length)) _resize(); |
@@ -984,192 +788,703 @@ class _LinkedCustomHashMap<K, V> extends _CustomHashMap<K, V> |
} |
-patch class LinkedHashSet<E> { |
- /* patch */ factory LinkedHashSet({ bool equals(E e1, E e2), |
- int hashCode(E e), |
- bool isValidKey(potentialKey) }) { |
- if (isValidKey == null) { |
- if (hashCode == null) { |
- if (equals == null) { |
- return new _LinkedHashSet<E>(); |
- } |
- if (identical(identical, equals)) { |
- return new _LinkedIdentityHashSet<E>(); |
- } |
- _hashCode = _defaultHashCode; |
- } else if (equals == null) { |
- _equals = _defaultEquals; |
+patch class LinkedHashSet<E> extends _HashSetBase<E> { |
+ static const int _INITIAL_CAPACITY = 8; |
+ _LinkedHashTable<E> _table; |
+ |
+ /* patch */ LinkedHashSet() { |
+ _table = new _LinkedHashTable(_INITIAL_CAPACITY); |
+ _table._container = this; |
+ } |
+ |
+ // Iterable. |
+ /* patch */ Iterator<E> get iterator { |
+ return new _LinkedHashTableKeyIterator<E>(_table); |
+ } |
+ |
+ /* patch */ int get length => _table._elementCount; |
+ |
+ /* patch */ bool get isEmpty => _table._elementCount == 0; |
+ |
+ /* patch */ bool get isNotEmpty => !isEmpty; |
+ |
+ /* patch */ bool contains(Object object) => _table._get(object) >= 0; |
+ |
+ /* patch */ void forEach(void action(E element)) { |
+ int offset = _table._next(_LinkedHashTable._HEAD_OFFSET); |
+ int modificationCount = _table._modificationCount; |
+ while (offset != _LinkedHashTable._HEAD_OFFSET) { |
+ E key = _table._key(offset); |
+ action(key); |
+ _table._checkModification(modificationCount); |
+ offset = _table._next(offset); |
+ } |
+ } |
+ |
+ /* patch */ E get first { |
+ int firstOffset = _table._next(_LinkedHashTable._HEAD_OFFSET); |
+ if (firstOffset == _LinkedHashTable._HEAD_OFFSET) { |
+ throw new StateError("No elements"); |
+ } |
+ return _table._key(firstOffset); |
+ } |
+ |
+ /* patch */ E get last { |
+ int lastOffset = _table._prev(_LinkedHashTable._HEAD_OFFSET); |
+ if (lastOffset == _LinkedHashTable._HEAD_OFFSET) { |
+ throw new StateError("No elements"); |
+ } |
+ return _table._key(lastOffset); |
+ } |
+ |
+ // Collection. |
+ void _filterWhere(bool test(E element), bool removeMatching) { |
+ int entrySize = _table._entrySize; |
+ int length = _table._table.length; |
+ int offset = _table._next(_LinkedHashTable._HEAD_OFFSET); |
+ while (offset != _LinkedHashTable._HEAD_OFFSET) { |
+ E key = _table._key(offset); |
+ int nextOffset = _table._next(offset); |
+ int modificationCount = _table._modificationCount; |
+ bool shouldRemove = (removeMatching == test(key)); |
+ _table._checkModification(modificationCount); |
+ if (shouldRemove) { |
+ _table._deleteEntry(offset); |
} |
- isValidKey = new _TypeTest<E>().test; |
- } else { |
- if (hashCode == null) hashCode = _defaultHashCode; |
- if (equals == null) equals = _defaultEquals; |
+ offset = nextOffset; |
} |
- return new _LinkedCustomHashSet<E>(equals, hashCode, isValidKey); |
+ _table._checkCapacity(); |
} |
-} |
-class _LinkedHashSetEntry extends _HashSetEntry { |
- /// Links this element into a double-linked list of elements of a hash set. |
- /// The hash set object itself is used as the head entry of the list, so |
- /// the field is typed as "var". |
- /// Both links are initialized to `this` when the object is created. |
- var _nextEntry; |
- var _previousEntry; |
- _LinkedHashSetEntry(var key, int hashCode, _LinkedHashSetEntry next, |
- this._previousEntry, this._nextEntry) |
- : super(key, hashCode, next) { |
- _previousEntry._nextEntry = _nextEntry._previousEntry = this; |
+ /* patch */ void add(E element) { |
+ _table._put(element); |
+ _table._checkCapacity(); |
} |
- _LinkedHashSetEntry remove() { |
- _previousEntry._nextEntry = _nextEntry; |
- _nextEntry._previousEntry = _previousEntry; |
- _nextEntry = _previousEntry = this; |
- return super.remove(); |
+ /* patch */ void addAll(Iterable<E> objects) { |
+ for (E object in objects) { |
+ _table._put(object); |
+ _table._checkCapacity(); |
+ } |
+ } |
+ |
+ /* patch */ bool remove(Object object) { |
+ int offset = _table._remove(object); |
+ if (offset >= 0) { |
+ _table._checkCapacity(); |
+ return true; |
+ } |
+ return false; |
+ } |
+ |
+ /* patch */ void removeAll(Iterable objectsToRemove) { |
+ for (Object object in objectsToRemove) { |
+ if (_table._remove(object) >= 0) { |
+ _table._checkCapacity(); |
+ } |
+ } |
+ } |
+ |
+ /* patch */ void removeWhere(bool test(E element)) { |
+ _filterWhere(test, true); |
+ } |
+ |
+ /* patch */ void retainWhere(bool test(E element)) { |
+ _filterWhere(test, false); |
+ } |
+ |
+ /* patch */ void clear() { |
+ _table._clear(); |
} |
} |
-class _LinkedHashSet<E> extends _HashSet<E> |
- implements LinkedHashSet<E> { |
- /// Holds a double linked list of the element entries of the set in |
- /// insertion order. |
- /// The fields have the same names as the ones in [_LinkedHashSetEntry], |
- /// allowing this object to be used as the head entry of the list. |
- /// The fields are initialized to `this` when created, representing the |
- /// empty list that only contains the head entry. |
- var _nextEntry; |
- var _previousEntry; |
+class _DeadEntry { |
+ const _DeadEntry(); |
+} |
- _LinkedHashSet() { |
- _nextEntry = _previousEntry = this; |
+class _NullKey { |
+ const _NullKey(); |
+ int get hashCode => null.hashCode; |
+} |
+ |
+const _TOMBSTONE = const _DeadEntry(); |
+const _NULL = const _NullKey(); |
+ |
+class _HashTable<K> { |
+ /** |
+ * Table of entries with [_entrySize] slots per entry. |
+ * |
+ * Capacity in entries must be factor of two. |
+ */ |
+ List _table; |
+ /** Current capacity. Always equal to [:_table.length ~/ _entrySize:]. */ |
+ int _capacity; |
+ /** Count of occupied entries, including deleted ones. */ |
+ int _entryCount = 0; |
+ /** Count of deleted entries. */ |
+ int _deletedCount = 0; |
+ /** Counter incremented when table is modified. */ |
+ int _modificationCount = 0; |
+ /** If set, used as the source object for [ConcurrentModificationError]s. */ |
+ Object _container; |
+ |
+ _HashTable(int initialCapacity) : _capacity = initialCapacity { |
+ _table = _createTable(initialCapacity); |
} |
- // Iterable. |
- Iterator<E> get iterator => new _LinkedHashSetIterator<E>(this); |
+ /** Reads key from table. Converts _NULL marker to null. */ |
+ Object _key(offset) { |
+ assert(!_isFree(_table[offset])); |
+ Object key = _table[offset]; |
+ if (!identical(key, _NULL)) return key; |
+ return null; |
+ } |
- void forEach(void action(E element)) { |
- var cursor = _nextEntry; |
- int modificationCount = _modificationCount; |
- while (!identical(cursor, this)) { |
- _LinkedHashSetEntry entry = cursor; |
- action(entry.key); |
- if (_modificationCount != modificationCount) { |
- throw new ConcurrentModificationError(this); |
+ /** Writes key to table. Converts null to _NULL marker. */ |
+ void _setKey(int offset, Object key) { |
+ if (key == null) key = _NULL; |
+ _table[offset] = key; |
+ } |
+ |
+ int get _elementCount => _entryCount - _deletedCount; |
+ |
+ /** Size of each entry. */ |
+ int get _entrySize => 1; |
+ |
+ void _checkModification(int expectedModificationCount) { |
+ if (_modificationCount != expectedModificationCount) { |
+ throw new ConcurrentModificationError(_container); |
+ } |
+ } |
+ |
+ void _recordModification() { |
+ // Value cycles after 2^30 modifications. If you keep hold of an |
+ // iterator for that long, you might miss a modification detection, |
+ // and iteration can go sour. Don't do that. |
+ _modificationCount = (_modificationCount + 1) & (0x3FFFFFFF); |
+ } |
+ |
+ /** |
+ * Create an empty table. |
+ */ |
+ List _createTable(int capacity) { |
+ List table = new List(capacity * _entrySize); |
+ return table; |
+ } |
+ |
+ /** First table probe. */ |
+ int _firstProbe(int hashCode, int capacity) { |
+ return hashCode & (capacity - 1); |
+ } |
+ |
+ /** Following table probes. */ |
+ int _nextProbe(int previousIndex, int probeCount, int capacity) { |
+ // When capacity is a power of 2, this probing algorithm (the triangular |
+ // number sequence modulo capacity) is guaranteed to hit all indices exactly |
+ // once before repeating. |
+ return (previousIndex + probeCount) & (capacity - 1); |
+ } |
+ |
+ /** Whether an object is a free-marker (either tombstone or free). */ |
+ bool _isFree(Object marker) => |
+ marker == null || identical(marker, _TOMBSTONE); |
+ |
+ /** |
+ * Look up the offset for an object in the table. |
+ * |
+ * Finds the offset of the object in the table, if it is there, |
+ * or the first free offset for its hashCode. |
+ */ |
+ int _probeForAdd(int hashCode, Object object) { |
+ int entrySize = _entrySize; |
+ int index = _firstProbe(hashCode, _capacity); |
+ int firstTombstone = -1; |
+ int probeCount = 0; |
+ while (true) { |
+ int offset = index * entrySize; |
+ Object entry = _table[offset]; |
+ if (identical(entry, _TOMBSTONE)) { |
+ if (firstTombstone < 0) firstTombstone = offset; |
+ } else if (entry == null) { |
+ if (firstTombstone < 0) return offset; |
+ return firstTombstone; |
+ } else if (identical(_NULL, entry) ? _equals(null, object) |
+ : _equals(entry, object)) { |
+ return offset; |
} |
- cursor = entry._nextEntry; |
+ // The _nextProbe is designed so that it hits |
+ // every index eventually. |
+ index = _nextProbe(index, ++probeCount, _capacity); |
} |
} |
- E get first { |
- if (identical(_nextEntry, this)) { |
- throw new StateError("No elements"); |
+ /** |
+ * Look up the offset for an object in the table. |
+ * |
+ * If the object is in the table, its offset is returned. |
+ * |
+ * If the object is not in the table, Otherwise a negative value is returned. |
+ */ |
+ int _probeForLookup(int hashCode, Object object) { |
+ int entrySize = _entrySize; |
+ int index = _firstProbe(hashCode, _capacity); |
+ int probeCount = 0; |
+ while (true) { |
+ int offset = index * entrySize; |
+ Object entry = _table[offset]; |
+ if (entry == null) { |
+ return -1; |
+ } else if (!identical(_TOMBSTONE, entry)) { |
+ if (identical(_NULL, entry) ? _equals(null, object) |
+ : _equals(entry, object)) { |
+ return offset; |
+ } |
+ } |
+ // The _nextProbe is designed so that it hits |
+ // every index eventually. |
+ index = _nextProbe(index, ++probeCount, _capacity); |
} |
- _LinkedHashSetEntry entry = _nextEntry; |
- return entry.key; |
} |
- E get last { |
- if (identical(_previousEntry, this)) { |
- throw new StateError("No elements"); |
+ // Override the following two to change equality/hashCode computations |
+ |
+ /** |
+ * Compare two object for equality. |
+ * |
+ * The first object is the one already in the table, |
+ * and the second is the one being searched for. |
+ */ |
+ bool _equals(Object element, Object other) { |
+ return element == other; |
+ } |
+ |
+ /** |
+ * Compute hash-code for an object. |
+ */ |
+ int _hashCodeOf(Object object) => object.hashCode; |
+ |
+ /** |
+ * Ensure that the table isn't too full for its own good. |
+ * |
+ * Call this after adding an element. |
+ */ |
+ int _checkCapacity() { |
+ // Compute everything in multiples of entrySize to avoid division. |
+ int freeCount = _capacity - _entryCount; |
+ if (freeCount * 4 < _capacity || |
+ freeCount < _deletedCount) { |
+ // Less than 25% free or more deleted entries than free entries. |
+ _grow(_entryCount - _deletedCount); |
} |
- _LinkedHashSetEntry entry = _previousEntry; |
- return entry.key; |
} |
- // Set. |
- void _filterWhere(bool test(E element), bool removeMatching) { |
- var cursor = _nextEntry; |
- while (!identical(cursor, this)) { |
- _LinkedHashSetEntry entry = cursor; |
- int modificationCount = _modificationCount; |
- bool testResult = test(entry.key); |
- if (modificationCount != _modificationCount) { |
- throw new ConcurrentModificationError(this); |
+ void _grow(int contentCount) { |
+ int capacity = _capacity; |
+ // Don't grow to less than twice the needed capacity. |
+ int minCapacity = contentCount * 2; |
+ while (capacity < minCapacity) { |
+ capacity *= 2; |
+ } |
+ // Reset to another table and add all existing elements. |
+ List oldTable = _table; |
+ _table = _createTable(capacity); |
+ _capacity = capacity; |
+ _entryCount = 0; |
+ _deletedCount = 0; |
+ _addAllEntries(oldTable); |
+ _recordModification(); |
+ } |
+ |
+ /** |
+ * Copies all non-free entries from the old table to the new empty table. |
+ */ |
+ void _addAllEntries(List oldTable) { |
+ for (int i = 0; i < oldTable.length; i += _entrySize) { |
+ Object object = oldTable[i]; |
+ if (!_isFree(object)) { |
+ int toOffset = _put(object); |
+ _copyEntry(oldTable, i, toOffset); |
} |
- cursor = entry._nextEntry; |
- if (testResult == removeMatching) { |
- _remove(entry.key, entry.hashCode); |
+ } |
+ } |
+ |
+ /** |
+ * Copies everything but the key element from one entry to another. |
+ * |
+ * Called while growing the base array. |
+ * |
+ * Override this if any non-key fields need copying. |
+ */ |
+ void _copyEntry(List fromTable, int fromOffset, int toOffset) {} |
+ |
+ // The following three methods are for simple get/set/remove operations. |
+ // They only affect the key of an entry. The remaining fields must be |
+ // filled by the caller. |
+ |
+ /** |
+ * Returns the offset of a key in [_table], or negative if it's not there. |
+ */ |
+ int _get(Object key) { |
+ return _probeForLookup(_hashCodeOf(key), key); |
+ } |
+ |
+ /** |
+ * Puts the key into the table and returns its offset into [_table]. |
+ * |
+ * If [_entrySize] is greater than 1, the caller should fill the |
+ * remaining fields. |
+ * |
+ * Remember to call [_checkCapacity] after using this method. |
+ */ |
+ int _put(K key) { |
+ int offset = _probeForAdd(_hashCodeOf(key), key); |
+ Object oldEntry = _table[offset]; |
+ if (oldEntry == null) { |
+ _entryCount++; |
+ } else if (identical(oldEntry, _TOMBSTONE)) { |
+ _deletedCount--; |
+ } else { |
+ return offset; |
+ } |
+ _setKey(offset, key); |
+ _recordModification(); |
+ return offset; |
+ } |
+ |
+ /** |
+ * Removes a key from the table and returns its offset into [_table]. |
+ * |
+ * Returns null if the key was not in the table. |
+ * If [_entrySize] is greater than 1, the caller should clean up the |
+ * remaining fields. |
+ */ |
+ int _remove(Object key) { |
+ int offset = _probeForLookup(_hashCodeOf(key), key); |
+ if (offset >= 0) { |
+ _deleteEntry(offset); |
+ } |
+ return offset; |
+ } |
+ |
+ /** Clears the table completely, leaving it empty. */ |
+ void _clear() { |
+ if (_elementCount == 0) return; |
+ for (int i = 0; i < _table.length; i++) { |
+ _table[i] = null; |
+ } |
+ _entryCount = _deletedCount = 0; |
+ _recordModification(); |
+ } |
+ |
+ /** Clears an entry in the table. */ |
+ void _deleteEntry(int offset) { |
+ assert(!_isFree(_table[offset])); |
+ _setKey(offset, _TOMBSTONE); |
+ _deletedCount++; |
+ _recordModification(); |
+ } |
+} |
+ |
+/** |
+ * Generic iterable based on a [_HashTable]. |
+ */ |
+abstract class _HashTableIterable<E> extends IterableBase<E> { |
+ final _HashTable _hashTable; |
+ _HashTableIterable(this._hashTable); |
+ |
+ Iterator<E> get iterator; |
+ |
+ /** |
+ * Return the iterated value for a given entry. |
+ */ |
+ E _valueAt(int offset, Object key); |
+ |
+ int get length => _hashTable._elementCount; |
+ |
+ bool get isEmpty => _hashTable._elementCount == 0; |
+ |
+ void forEach(void action(E element)) { |
+ int entrySize = _hashTable._entrySize; |
+ List table = _hashTable._table; |
+ int modificationCount = _hashTable._modificationCount; |
+ for (int offset = 0; offset < table.length; offset += entrySize) { |
+ Object entry = table[offset]; |
+ if (!_hashTable._isFree(entry)) { |
+ E value = _valueAt(offset, entry); |
+ action(value); |
} |
+ _hashTable._checkModification(modificationCount); |
} |
} |
+} |
- void _addEntry(E key, int hashCode, int index) { |
- _buckets[index] = |
- new _LinkedHashSetEntry(key, hashCode, _buckets[index], |
- _previousEntry, this); |
- int newElements = _elementCount + 1; |
- _elementCount = newElements; |
- int length = _buckets.length; |
- // If we end up with more than 75% non-empty entries, we |
- // resize the backing store. |
- if ((newElements << 2) > ((length << 1) + length)) _resize(); |
- _modificationCount = (_modificationCount + 1) & _MODIFICATION_COUNT_MASK; |
+abstract class _HashTableIterator<E> implements Iterator<E> { |
+ final _HashTable _hashTable; |
+ final int _modificationCount; |
+ /** Location right after last found element. */ |
+ int _offset = 0; |
+ E _current = null; |
+ |
+ _HashTableIterator(_HashTable hashTable) |
+ : _hashTable = hashTable, |
+ _modificationCount = hashTable._modificationCount; |
+ |
+ bool moveNext() { |
+ _hashTable._checkModification(_modificationCount); |
+ |
+ List table = _hashTable._table; |
+ int entrySize = _hashTable._entrySize; |
+ |
+ while (_offset < table.length) { |
+ int currentOffset = _offset; |
+ Object entry = table[currentOffset]; |
+ _offset = currentOffset + entrySize; |
+ if (!_hashTable._isFree(entry)) { |
+ _current = _valueAt(currentOffset, entry); |
+ return true; |
+ } |
+ } |
+ _current = null; |
+ return false; |
} |
- HashSet<E> _newSet() => new _LinkedHashSet<E>(); |
+ E get current => _current; |
+ |
+ E _valueAt(int offset, Object key); |
} |
-class _LinkedIdentityHashSet<E> extends _LinkedHashSet<E> { |
- bool _equals(e1, e2) => identical(e1, e2); |
- HashSet<E> _newSet() => new _LinkedIdentityHashSet<E>(); |
+class _HashTableKeyIterable<K> extends _HashTableIterable<K> { |
+ _HashTableKeyIterable(_HashTable<K> hashTable) : super(hashTable); |
+ |
+ Iterator<K> get iterator => new _HashTableKeyIterator<K>(_hashTable); |
+ |
+ K _valueAt(int offset, Object key) { |
+ if (identical(key, _NULL)) return null; |
+ return key; |
+ } |
+ |
+ bool contains(Object value) => _hashTable._get(value) >= 0; |
} |
-class _LinkedCustomHashSet<E> extends _LinkedHashSet<E> { |
- final _Equality<E> _equality; |
- final _Hasher<E> _hasher; |
- final _Predicate _validKey; |
+class _HashTableKeyIterator<K> extends _HashTableIterator<K> { |
+ _HashTableKeyIterator(_HashTable hashTable) : super(hashTable); |
+ |
+ K _valueAt(int offset, Object key) { |
+ if (identical(key, _NULL)) return null; |
+ return key; |
+ } |
+} |
+ |
+class _HashTableValueIterable<V> extends _HashTableIterable<V> { |
+ final int _entryIndex; |
+ |
+ _HashTableValueIterable(_HashTable hashTable, this._entryIndex) |
+ : super(hashTable); |
+ |
+ Iterator<V> get iterator { |
+ return new _HashTableValueIterator<V>(_hashTable, _entryIndex); |
+ } |
+ |
+ V _valueAt(int offset, Object key) => _hashTable._table[offset + _entryIndex]; |
+} |
+ |
+class _HashTableValueIterator<V> extends _HashTableIterator<V> { |
+ final int _entryIndex; |
+ |
+ _HashTableValueIterator(_HashTable hashTable, this._entryIndex) |
+ : super(hashTable); |
+ |
+ V _valueAt(int offset, Object key) => _hashTable._table[offset + _entryIndex]; |
+} |
+ |
+class _HashMapTable<K, V> extends _HashTable<K> { |
+ static const int _INITIAL_CAPACITY = 8; |
+ static const int _VALUE_INDEX = 1; |
+ |
+ _HashMapTable() : super(_INITIAL_CAPACITY); |
+ |
+ int get _entrySize => 2; |
+ |
+ V _value(int offset) => _table[offset + _VALUE_INDEX]; |
+ void _setValue(int offset, V value) { _table[offset + _VALUE_INDEX] = value; } |
+ |
+ _copyEntry(List fromTable, int fromOffset, int toOffset) { |
+ _table[toOffset + _VALUE_INDEX] = fromTable[fromOffset + _VALUE_INDEX]; |
+ } |
+} |
- _LinkedCustomHashSet(this._equality, this._hasher, bool validKey(object)) |
- : _validKey = (validKey != null) ? validKey : new _TypeTest<E>().test; |
+/** Unique marker object for the head of a linked list of entries. */ |
+class _LinkedHashTableHeadMarker { |
+ const _LinkedHashTableHeadMarker(); |
+} |
+ |
+const _LinkedHashTableHeadMarker _HEAD_MARKER = |
+ const _LinkedHashTableHeadMarker(); |
+ |
+class _LinkedHashTable<K> extends _HashTable<K> { |
+ static const _NEXT_INDEX = 1; |
+ static const _PREV_INDEX = 2; |
+ static const _HEAD_OFFSET = 0; |
+ |
+ _LinkedHashTable(int initialCapacity) : super(initialCapacity); |
+ |
+ int get _entrySize => 3; |
- bool _equals(e1, e2) => _equality(e1, e2); |
+ List _createTable(int capacity) { |
+ List result = new List(capacity * _entrySize); |
+ result[_HEAD_OFFSET] = _HEAD_MARKER; |
+ result[_HEAD_OFFSET + _NEXT_INDEX] = _HEAD_OFFSET; |
+ result[_HEAD_OFFSET + _PREV_INDEX] = _HEAD_OFFSET; |
+ return result; |
+ } |
- int _hashCode(e) => _hasher(e); |
+ int _next(int offset) => _table[offset + _NEXT_INDEX]; |
+ void _setNext(int offset, int to) { _table[offset + _NEXT_INDEX] = to; } |
+ |
+ int _prev(int offset) => _table[offset + _PREV_INDEX]; |
+ void _setPrev(int offset, int to) { _table[offset + _PREV_INDEX] = to; } |
+ |
+ void _linkLast(int offset) { |
+ // Add entry at offset at end of double-linked list. |
+ int last = _prev(_HEAD_OFFSET); |
+ _setNext(offset, _HEAD_OFFSET); |
+ _setPrev(offset, last); |
+ _setNext(last, offset); |
+ _setPrev(_HEAD_OFFSET, offset); |
+ } |
+ |
+ void _unlink(int offset) { |
+ assert(offset != _HEAD_OFFSET); |
+ int next = _next(offset); |
+ int prev = _prev(offset); |
+ _setNext(offset, null); |
+ _setPrev(offset, null); |
+ _setNext(prev, next); |
+ _setPrev(next, prev); |
+ } |
+ |
+ /** |
+ * Copies all non-free entries from the old table to the new empty table. |
+ */ |
+ void _addAllEntries(List oldTable) { |
+ int offset = oldTable[_HEAD_OFFSET + _NEXT_INDEX]; |
+ while (offset != _HEAD_OFFSET) { |
+ Object object = oldTable[offset]; |
+ int nextOffset = oldTable[offset + _NEXT_INDEX]; |
+ int toOffset = _put(object); |
+ _copyEntry(oldTable, offset, toOffset); |
+ offset = nextOffset; |
+ } |
+ } |
- bool contains(Object o) { |
- if (!_validKey(o)) return false; |
- return super.contains(o); |
+ void _clear() { |
+ if (_elementCount == 0) return; |
+ _setNext(_HEAD_OFFSET, _HEAD_OFFSET); |
+ _setPrev(_HEAD_OFFSET, _HEAD_OFFSET); |
+ for (int i = _entrySize; i < _table.length; i++) { |
+ _table[i] = null; |
+ } |
+ _entryCount = _deletedCount = 0; |
+ _recordModification(); |
} |
- bool remove(Object o) { |
- if (!_validKey(o)) return false; |
- return super.remove(o); |
+ int _put(K key) { |
+ int offset = _probeForAdd(_hashCodeOf(key), key); |
+ Object oldEntry = _table[offset]; |
+ if (identical(oldEntry, _TOMBSTONE)) { |
+ _deletedCount--; |
+ } else if (oldEntry == null) { |
+ _entryCount++; |
+ } else { |
+ return offset; |
+ } |
+ _recordModification(); |
+ _setKey(offset, key); |
+ _linkLast(offset); |
+ return offset; |
} |
- E operator[](Object o) { |
- if (!_validKey(o)) return null; |
- return super[o]; |
+ void _deleteEntry(int offset) { |
+ _unlink(offset); |
+ _setKey(offset, _TOMBSTONE); |
+ _deletedCount++; |
+ _recordModification(); |
} |
+} |
+ |
+class _LinkedHashTableKeyIterable<K> extends IterableBase<K> { |
+ final _LinkedHashTable<K> _table; |
+ _LinkedHashTableKeyIterable(this._table); |
+ Iterator<K> get iterator => new _LinkedHashTableKeyIterator<K>(_table); |
+ |
+ bool contains(Object value) => _table._get(value) >= 0; |
+ |
+ int get length => _table._elementCount; |
+} |
+ |
+class _LinkedHashTableKeyIterator<K> extends _LinkedHashTableIterator<K> { |
+ _LinkedHashTableKeyIterator(_LinkedHashTable<K> hashTable): super(hashTable); |
+ |
+ K _getCurrent(int offset) => _hashTable._key(offset); |
+} |
- HashSet<E> _newSet() => |
- new _LinkedCustomHashSet<E>(_equality, _hasher, _validKey); |
+class _LinkedHashTableValueIterable<V> extends IterableBase<V> { |
+ final _LinkedHashTable _hashTable; |
+ final int _valueIndex; |
+ _LinkedHashTableValueIterable(this._hashTable, this._valueIndex); |
+ Iterator<V> get iterator => |
+ new _LinkedHashTableValueIterator<V>(_hashTable, _valueIndex); |
+ int get length => _hashTable._elementCount; |
} |
-class _LinkedHashSetIterator<E> implements Iterator<E> { |
- final _LinkedHashSet _set; |
+class _LinkedHashTableValueIterator<V> extends _LinkedHashTableIterator<V> { |
+ final int _valueIndex; |
+ |
+ _LinkedHashTableValueIterator(_LinkedHashTable hashTable, this._valueIndex) |
+ : super(hashTable); |
+ |
+ V _getCurrent(int offset) => _hashTable._table[offset + _valueIndex]; |
+} |
+ |
+abstract class _LinkedHashTableIterator<T> implements Iterator<T> { |
+ final _LinkedHashTable _hashTable; |
final int _modificationCount; |
- var _next; |
- E _current; |
+ int _offset; |
+ T _current; |
- _LinkedHashSetIterator(_LinkedHashSet hashSet) |
- : _set = hashSet, |
- _modificationCount = hashSet._modificationCount, |
- _next = hashSet._nextEntry; |
+ _LinkedHashTableIterator(_LinkedHashTable table) |
+ : _hashTable = table, |
+ _modificationCount = table._modificationCount, |
+ _offset = table._next(_LinkedHashTable._HEAD_OFFSET); |
bool moveNext() { |
- if (_modificationCount != _set._modificationCount) { |
- throw new ConcurrentModificationError(_set); |
- } |
- if (identical(_set, _next)) { |
+ _hashTable._checkModification(_modificationCount); |
+ if (_offset == _LinkedHashTable._HEAD_OFFSET) { |
_current = null; |
return false; |
} |
- _LinkedHashSetEntry entry = _next; |
- _current = entry.key; |
- _next = entry._nextEntry; |
+ _current = _getCurrent(_offset); |
+ _offset = _hashTable._next(_offset); |
return true; |
} |
- E get current => _current; |
+ T _getCurrent(int offset); |
+ |
+ T get current => _current; |
+} |
+ |
+class _LinkedHashMapTable<K, V> extends _LinkedHashTable<K> { |
+ static const int _INITIAL_CAPACITY = 8; |
+ static const int _VALUE_INDEX = 3; |
+ |
+ int get _entrySize => 4; |
+ |
+ _LinkedHashMapTable() : super(_INITIAL_CAPACITY); |
+ |
+ V _value(int offset) => _table[offset + _VALUE_INDEX]; |
+ void _setValue(int offset, V value) { _table[offset + _VALUE_INDEX] = value; } |
+ |
+ _copyEntry(List oldTable, int fromOffset, int toOffset) { |
+ _table[toOffset + _VALUE_INDEX] = oldTable[fromOffset + _VALUE_INDEX]; |
+ } |
} |