| Index: runtime/lib/collection_patch.dart
|
| diff --git a/runtime/lib/collection_patch.dart b/runtime/lib/collection_patch.dart
|
| index 3716cca786927354b2ce06b4ab104f620a5dcb9e..61cf8d6c089a0ef93c9e3cf483d91d9cd6cdbbed 100644
|
| --- a/runtime/lib/collection_patch.dart
|
| +++ b/runtime/lib/collection_patch.dart
|
| @@ -430,7 +430,7 @@ abstract class _HashMapIterable<E> extends IterableBase<E> {
|
| class _HashMapKeyIterable<K> extends _HashMapIterable<K> {
|
| _HashMapKeyIterable(HashMap map) : super(map);
|
| Iterator<K> get iterator => new _HashMapKeyIterator<K>(_map);
|
| - bool contains(K key) => _map.containsKey(key);
|
| + bool contains(Object key) => _map.containsKey(key);
|
| void forEach(void action(K key)) {
|
| _map.forEach((K key, _) {
|
| action(key);
|
| @@ -441,7 +441,7 @@ class _HashMapKeyIterable<K> extends _HashMapIterable<K> {
|
| class _HashMapValueIterable<V> extends _HashMapIterable<V> {
|
| _HashMapValueIterable(HashMap map) : super(map);
|
| Iterator<V> get iterator => new _HashMapValueIterator<V>(_map);
|
| - bool contains(V value) => _map.containsValue(value);
|
| + bool contains(Object value) => _map.containsValue(value);
|
| void forEach(void action(V value)) {
|
| _map.forEach((_, V value) {
|
| action(value);
|
| @@ -504,86 +504,291 @@ class _HashMapValueIterator<V> extends _HashMapIterator<V> {
|
| }
|
|
|
| patch class HashSet<E> {
|
| + /* patch */ factory HashSet({ bool equals(E e1, E e2),
|
| + int hashCode(E e),
|
| + bool isValidKey(potentialKey) }) {
|
| + if (isValidKey == null) {
|
| + if (hashCode == null) {
|
| + if (equals == null) {
|
| + return new _HashSet<E>();
|
| + }
|
| + if (identical(identical, equals)) {
|
| + return new _IdentityHashSet<E>();
|
| + }
|
| + _hashCode = _defaultHashCode;
|
| + } else if (equals == null) {
|
| + _equals = _defaultEquals;
|
| + }
|
| + isValidKey = new _TypeTest<E>().test;
|
| + } else {
|
| + if (hashCode == null) hashCode = _defaultHashCode;
|
| + if (equals == null) equals = _defaultEquals;
|
| + }
|
| + return new _CustomHashSet<E>(equals, hashCode, isValidKey);
|
| + }
|
| +}
|
| +
|
| +class _HashSet<E> extends _HashSetBase<E> implements HashSet<E> {
|
| static const int _INITIAL_CAPACITY = 8;
|
| - final _HashTable<E> _table;
|
|
|
| - /* patch */ HashSet() : _table = new _HashTable(_INITIAL_CAPACITY) {
|
| - _table._container = this;
|
| - }
|
| + List<_HashSetEntry> _buckets = new List(_INITIAL_CAPACITY);
|
| + int _elementCount = 0;
|
| + int _modificationCount = 0;
|
|
|
| - factory HashSet.from(Iterable<E> iterable) {
|
| - return new HashSet<E>()..addAll(iterable);
|
| - }
|
| + bool _equals(e1, e2) => e1 == e2;
|
| + int _hashCode(e) => e.hashCode;
|
|
|
| // Iterable.
|
| - /* patch */ Iterator<E> get iterator => new _HashTableKeyIterator<E>(_table);
|
|
|
| - /* patch */ int get length => _table._elementCount;
|
| + Iterator<E> get iterator => new _HashSetIterator<E>(this);
|
| +
|
| + int get length => _elementCount;
|
| +
|
| + bool get isEmpty => _elementCount == 0;
|
|
|
| - /* patch */ bool get isEmpty => _table._elementCount == 0;
|
| + bool get isNotEmpty => _elementCount != 0;
|
|
|
| - /* patch */ bool get isNotEmpty => !isEmpty;
|
| + bool contains(Object object) {
|
| + int index = _hashCode(object) & (_buckets.length - 1);
|
| + HashSetEntry entry = _buckets[index];
|
| + while (entry != null) {
|
| + if (_equals(entry.key, object)) return true;
|
| + entry = entry.next;
|
| + }
|
| + return false;
|
| + }
|
|
|
| - /* patch */ bool contains(Object object) => _table._get(object) >= 0;
|
| + // Set.
|
|
|
| - // Collection.
|
| - /* patch */ void add(E element) {
|
| - _table._put(element);
|
| - _table._checkCapacity();
|
| + void add(E element) {
|
| + int hashCode = _hashCode(element);
|
| + int index = hashCode & (_buckets.length - 1);
|
| + HashSetEntry entry = _buckets[index];
|
| + while (entry != null) {
|
| + if (_equals(entry.key, element)) return;
|
| + entry = entry.next;
|
| + }
|
| + _addEntry(element, hashCode, index);
|
| }
|
|
|
| - /* patch */ void addAll(Iterable<E> objects) {
|
| + void addAll(Iterable<E> objects) {
|
| + int ctr = 0;
|
| for (E object in objects) {
|
| - _table._put(object);
|
| - _table._checkCapacity();
|
| + ctr++;
|
| + add(object);
|
| }
|
| }
|
|
|
| - /* patch */ bool remove(Object object) {
|
| - int offset = _table._remove(object);
|
| - _table._checkCapacity();
|
| - return offset >= 0;
|
| + bool _remove(Object object, int hashCode) {
|
| + int index = hashCode & (_buckets.length - 1);
|
| + _HashSetEntry entry = _buckets[index];
|
| + _HashSetEntry previous = null;
|
| + while (entry != null) {
|
| + if (_equals(entry.key, object)) {
|
| + _HashSetEntry next = entry.remove();
|
| + if (previous == null) {
|
| + _buckets[index] = next;
|
| + } else {
|
| + previous.next = next;
|
| + }
|
| + _elementCount--;
|
| + _modificationCount =
|
| + (_modificationCount + 1) & _MODIFICATION_COUNT_MASK;
|
| + return true;
|
| + }
|
| + previous = entry;
|
| + entry = entry.next;
|
| + }
|
| + return false;
|
| }
|
|
|
| - /* patch */ void removeAll(Iterable<Object> objectsToRemove) {
|
| + bool remove(Object object) => _remove(object, _hashCode(object));
|
| +
|
| + void removeAll(Iterable<Object> objectsToRemove) {
|
| for (Object object in objectsToRemove) {
|
| - _table._remove(object);
|
| - _table._checkCapacity();
|
| + _remove(object, _hashCode(object));
|
| }
|
| }
|
|
|
| + void retainAll(Iterable<Object> objectsToRetain) {
|
| + super._retainAll(objectsToRetain, (o) => o is E);
|
| + }
|
| +
|
| void _filterWhere(bool test(E element), bool removeMatching) {
|
| - int entrySize = _table._entrySize;
|
| - int length = _table._table.length;
|
| - for (int offset = 0; offset < length; offset += entrySize) {
|
| - Object entry = _table._table[offset];
|
| - if (!_table._isFree(entry)) {
|
| - E key = identical(entry, _NULL) ? null : entry;
|
| - int modificationCount = _table._modificationCount;
|
| - bool shouldRemove = (removeMatching == test(key));
|
| - _table._checkModification(modificationCount);
|
| - if (shouldRemove) {
|
| - _table._deleteEntry(offset);
|
| + int length = _buckets.length;
|
| + for (int index = 0; index < length; index++) {
|
| + HashSetEntry entry = _buckets[index];
|
| + HashSetEntry previous = null;
|
| + while (entry != null) {
|
| + int modificationCount = _modificationCount;
|
| + bool testResult = test(entry.key);
|
| + if (modificationCount != _modificationCount) {
|
| + throw new ConcurrentModificationError(this);
|
| + }
|
| + if (testResult == removeMatching) {
|
| + HashSetEntry next = entry.remove();
|
| + if (previous == null) {
|
| + _buckets[index] = next;
|
| + } else {
|
| + previous.next = next;
|
| + }
|
| + _elementCount--;
|
| + _modificationCount =
|
| + (_modificationCount + 1) & _MODIFICATION_COUNT_MASK;
|
| + entry = next;
|
| + } else {
|
| + previous = entry;
|
| + entry = entry.next;
|
| }
|
| }
|
| }
|
| - _table._checkCapacity();
|
| }
|
|
|
| - /* patch */ void removeWhere(bool test(E element)) {
|
| + void removeWhere(bool test(E element)) {
|
| _filterWhere(test, true);
|
| }
|
|
|
| - /* patch */ void retainWhere(bool test(E element)) {
|
| + void retainWhere(bool test(E element)) {
|
| _filterWhere(test, false);
|
| }
|
|
|
| - /* patch */ void clear() {
|
| - _table._clear();
|
| + void clear() {
|
| + _elementCount = 0;
|
| + _buckets = new List(_INITIAL_CAPACITY);
|
| + _modificationCount++;
|
| + }
|
| +
|
| + void _addEntry(E key, int hashCode, int index) {
|
| + _buckets[index] = new _HashSetEntry(key, hashCode, _buckets[index]);
|
| + int newElements = _elementCount + 1;
|
| + _elementCount = newElements;
|
| + int length = _buckets.length;
|
| + // If we end up with more than 75% non-empty entries, we
|
| + // resize the backing store.
|
| + if ((newElements << 2) > ((length << 1) + length)) _resize();
|
| + _modificationCount = (_modificationCount + 1) & _MODIFICATION_COUNT_MASK;
|
| }
|
| +
|
| + void _resize() {
|
| + int oldLength = _buckets.length;
|
| + int newLength = oldLength << 1;
|
| + List oldBuckets = _buckets;
|
| + List newBuckets = new List(newLength);
|
| + for (int i = 0; i < oldLength; i++) {
|
| + _HashSetEntry entry = oldBuckets[i];
|
| + while (entry != null) {
|
| + _HashSetEntry next = entry.next;
|
| + int newIndex = entry.hashCode & (newLength - 1);
|
| + entry.next = newBuckets[newIndex];
|
| + newBuckets[newIndex] = entry;
|
| + entry = next;
|
| + }
|
| + }
|
| + _buckets = newBuckets;
|
| + }
|
| +
|
| + HashSet<E> _newSet() => new _HashSet<E>();
|
| +}
|
| +
|
| +class _IdentityHashSet<E> extends _HashSet<E> {
|
| + bool _equals(e1, e2) => identical(e1, e2);
|
| + HashSet<E> _newSet() => new _IdentityHashSet<E>();
|
| +}
|
| +
|
| +class _CustomHashSet<E> extends _HashSet<E> {
|
| + final _Equality<E> _equality;
|
| + final _Hasher<E> _hasher;
|
| + final _Predicate _validKey;
|
| + _CustomHashSet(this._equality, this._hasher, this._validKey);
|
| +
|
| + bool remove(Object element) {
|
| + if (!_validKey(element)) return false;
|
| + return super.remove(element);
|
| + }
|
| +
|
| + bool contains(Object element) {
|
| + if (!_validKey(element)) return false;
|
| + return super.contains(element);
|
| + }
|
| +
|
| + bool containsAll(Iterable<Object> elements) {
|
| + for (Object element in elements) {
|
| + if (!_validKey(element) || !this.contains(element)) return false;
|
| + }
|
| + return true;
|
| + }
|
| +
|
| + void removeAll(Iterable<Object> elements) {
|
| + for (Object element in elements) {
|
| + if (_validKey(element)) {
|
| + super._remove(element, _hasher(element));
|
| + }
|
| + }
|
| + }
|
| +
|
| + void retainAll(Iterable<Object> elements) {
|
| + super._retainAll(elements, _validKey);
|
| + }
|
| +
|
| + bool _equals(e1, e2) => _equality(e1, e2);
|
| + int _hashCode(e) => _hasher(e);
|
| +
|
| + HashSet<E> _newSet() => new _CustomHashSet<E>(_equality, _hasher, _validKey);
|
| +}
|
| +
|
| +class _HashSetEntry {
|
| + final key;
|
| + final int hashCode;
|
| + _HashSetEntry next;
|
| + _HashSetEntry(this.key, this.hashCode, this.next);
|
| +
|
| + _HashSetEntry remove() {
|
| + _HashSetEntry result = next;
|
| + next = null;
|
| + return result;
|
| + }
|
| +}
|
| +
|
| +class _HashSetIterator<E> implements Iterator<E> {
|
| + final _HashSet _set;
|
| + final int _modificationCount;
|
| + int _index = 0;
|
| + _HashSetEntry _next = null;
|
| + E _current = null;
|
| +
|
| + _HashSetIterator(_HashSet hashSet)
|
| + : _set = hashSet, _modificationCount = hashSet._modificationCount;
|
| +
|
| + bool moveNext() {
|
| + if (_modificationCount != _set._modificationCount) {
|
| + throw new ConcurrentModificationError(_set);
|
| + }
|
| + if (_next != null) {
|
| + _current = _next.key;
|
| + _next = _next.next;
|
| + return true;
|
| + }
|
| + List<_HashSetEntry> buckets = _set._buckets;
|
| + while (_index < buckets.length) {
|
| + _next = buckets[_index];
|
| + _index = _index + 1;
|
| + if (_next != null) {
|
| + _current = _next.key;
|
| + _next = _next.next;
|
| + return true;
|
| + }
|
| + }
|
| + _current = null;
|
| + return false;
|
| + }
|
| +
|
| + E get current => _current;
|
| }
|
|
|
| class _LinkedHashMapEntry extends _HashMapEntry {
|
| + /// Double-linked list of entries of a linked hash map.
|
| + /// The _LinkedHashMap itself is the head of the list, so the type is "var".
|
| + /// Both are initialized to `this` when initialized.
|
| var _nextEntry;
|
| var _previousEntry;
|
| _LinkedHashMapEntry(key, value, int hashCode, _LinkedHashMapEntry next,
|
| @@ -598,7 +803,7 @@ class _LinkedHashMapKeyIterable<K> extends IterableBase<K> {
|
| LinkedHashMap<K, dynamic> _map;
|
| _LinkedHashMapKeyIterable(this._map);
|
| Iterator<K> get iterator => new _LinkedHashMapKeyIterator<K>(_map);
|
| - bool contains(K key) => _map.containsKey(key);
|
| + bool contains(Object key) => _map.containsKey(key);
|
| bool get isEmpty => _map.isEmpty;
|
| bool get isNotEmpty => _map.isNotEmpty;
|
| int get length => _map.length;
|
| @@ -608,7 +813,7 @@ class _LinkedHashMapValueIterable<V> extends IterableBase<V> {
|
| LinkedHashMap<dynamic, V> _map;
|
| _LinkedHashMapValueIterable(this._map);
|
| Iterator<K> get iterator => new _LinkedHashMapValueIterator<V>(_map);
|
| - bool contains(V value) => _map.containsValue(value);
|
| + bool contains(Object value) => _map.containsValue(value);
|
| bool get isEmpty => _map.isEmpty;
|
| bool get isNotEmpty => _map.isNotEmpty;
|
| int get length => _map.length;
|
| @@ -659,6 +864,11 @@ class _LinkedHashMapValueIterator<V> extends _LinkedHashMapIterator<V> {
|
| * A hash-based map that iterates keys and values in key insertion order.
|
| */
|
| patch class LinkedHashMap<K, V> {
|
| + /// Holds a double-linked list of entries in insertion order.
|
| + /// The fields have the same name as the ones in [_LinkedHashMapEntry],
|
| + /// and this map is itself used as the head entry of the list.
|
| + /// Set to `this` when initialized, representing the empty list (containing
|
| + /// only the head entry itself).
|
| var _nextEntry;
|
| var _previousEntry;
|
|
|
| @@ -738,6 +948,7 @@ abstract class _LinkedHashMapMixin<K, V> implements LinkedHashMap<K, V> {
|
| buckets[index] = entry;
|
| int newElements = _elementCount + 1;
|
| _elementCount = newElements;
|
| +
|
| // If we end up with more than 75% non-empty entries, we
|
| // resize the backing store.
|
| if ((newElements << 2) > ((length << 1) + length)) _resize();
|
| @@ -788,703 +999,213 @@ class _LinkedCustomHashMap<K, V> extends _CustomHashMap<K, V>
|
| }
|
|
|
|
|
| -patch class LinkedHashSet<E> extends _HashSetBase<E> {
|
| - static const int _INITIAL_CAPACITY = 8;
|
| - _LinkedHashTable<E> _table;
|
| -
|
| - /* patch */ LinkedHashSet() {
|
| - _table = new _LinkedHashTable(_INITIAL_CAPACITY);
|
| - _table._container = this;
|
| - }
|
| -
|
| - // Iterable.
|
| - /* patch */ Iterator<E> get iterator {
|
| - return new _LinkedHashTableKeyIterator<E>(_table);
|
| - }
|
| -
|
| - /* patch */ int get length => _table._elementCount;
|
| -
|
| - /* patch */ bool get isEmpty => _table._elementCount == 0;
|
| -
|
| - /* patch */ bool get isNotEmpty => !isEmpty;
|
| -
|
| - /* patch */ bool contains(Object object) => _table._get(object) >= 0;
|
| -
|
| - /* patch */ void forEach(void action(E element)) {
|
| - int offset = _table._next(_LinkedHashTable._HEAD_OFFSET);
|
| - int modificationCount = _table._modificationCount;
|
| - while (offset != _LinkedHashTable._HEAD_OFFSET) {
|
| - E key = _table._key(offset);
|
| - action(key);
|
| - _table._checkModification(modificationCount);
|
| - offset = _table._next(offset);
|
| - }
|
| - }
|
| -
|
| - /* patch */ E get first {
|
| - int firstOffset = _table._next(_LinkedHashTable._HEAD_OFFSET);
|
| - if (firstOffset == _LinkedHashTable._HEAD_OFFSET) {
|
| - throw new StateError("No elements");
|
| - }
|
| - return _table._key(firstOffset);
|
| - }
|
| -
|
| - /* patch */ E get last {
|
| - int lastOffset = _table._prev(_LinkedHashTable._HEAD_OFFSET);
|
| - if (lastOffset == _LinkedHashTable._HEAD_OFFSET) {
|
| - throw new StateError("No elements");
|
| - }
|
| - return _table._key(lastOffset);
|
| - }
|
| -
|
| - // Collection.
|
| - void _filterWhere(bool test(E element), bool removeMatching) {
|
| - int entrySize = _table._entrySize;
|
| - int length = _table._table.length;
|
| - int offset = _table._next(_LinkedHashTable._HEAD_OFFSET);
|
| - while (offset != _LinkedHashTable._HEAD_OFFSET) {
|
| - E key = _table._key(offset);
|
| - int nextOffset = _table._next(offset);
|
| - int modificationCount = _table._modificationCount;
|
| - bool shouldRemove = (removeMatching == test(key));
|
| - _table._checkModification(modificationCount);
|
| - if (shouldRemove) {
|
| - _table._deleteEntry(offset);
|
| - }
|
| - offset = nextOffset;
|
| - }
|
| - _table._checkCapacity();
|
| - }
|
| -
|
| - /* patch */ void add(E element) {
|
| - _table._put(element);
|
| - _table._checkCapacity();
|
| - }
|
| -
|
| - /* patch */ void addAll(Iterable<E> objects) {
|
| - for (E object in objects) {
|
| - _table._put(object);
|
| - _table._checkCapacity();
|
| - }
|
| - }
|
| -
|
| - /* patch */ bool remove(Object object) {
|
| - int offset = _table._remove(object);
|
| - if (offset >= 0) {
|
| - _table._checkCapacity();
|
| - return true;
|
| - }
|
| - return false;
|
| - }
|
| -
|
| - /* patch */ void removeAll(Iterable objectsToRemove) {
|
| - for (Object object in objectsToRemove) {
|
| - if (_table._remove(object) >= 0) {
|
| - _table._checkCapacity();
|
| +patch class LinkedHashSet<E> {
|
| + /* patch */ factory LinkedHashSet({ bool equals(E e1, E e2),
|
| + int hashCode(E e),
|
| + bool isValidKey(potentialKey) }) {
|
| + if (isValidKey == null) {
|
| + if (hashCode == null) {
|
| + if (equals == null) {
|
| + return new _LinkedHashSet<E>();
|
| + }
|
| + if (identical(identical, equals)) {
|
| + return new _LinkedIdentityHashSet<E>();
|
| + }
|
| + _hashCode = _defaultHashCode;
|
| + } else if (equals == null) {
|
| + _equals = _defaultEquals;
|
| }
|
| + isValidKey = new _TypeTest<E>().test;
|
| + } else {
|
| + if (hashCode == null) hashCode = _defaultHashCode;
|
| + if (equals == null) equals = _defaultEquals;
|
| }
|
| - }
|
| -
|
| - /* patch */ void removeWhere(bool test(E element)) {
|
| - _filterWhere(test, true);
|
| - }
|
| -
|
| - /* patch */ void retainWhere(bool test(E element)) {
|
| - _filterWhere(test, false);
|
| - }
|
| -
|
| - /* patch */ void clear() {
|
| - _table._clear();
|
| + return new _LinkedCustomHashSet<E>(equals, hashCode, isValidKey);
|
| }
|
| }
|
|
|
| -class _DeadEntry {
|
| - const _DeadEntry();
|
| -}
|
| -
|
| -class _NullKey {
|
| - const _NullKey();
|
| - int get hashCode => null.hashCode;
|
| -}
|
| -
|
| -const _TOMBSTONE = const _DeadEntry();
|
| -const _NULL = const _NullKey();
|
| -
|
| -class _HashTable<K> {
|
| - /**
|
| - * Table of entries with [_entrySize] slots per entry.
|
| - *
|
| - * Capacity in entries must be factor of two.
|
| - */
|
| - List _table;
|
| - /** Current capacity. Always equal to [:_table.length ~/ _entrySize:]. */
|
| - int _capacity;
|
| - /** Count of occupied entries, including deleted ones. */
|
| - int _entryCount = 0;
|
| - /** Count of deleted entries. */
|
| - int _deletedCount = 0;
|
| - /** Counter incremented when table is modified. */
|
| - int _modificationCount = 0;
|
| - /** If set, used as the source object for [ConcurrentModificationError]s. */
|
| - Object _container;
|
| -
|
| - _HashTable(int initialCapacity) : _capacity = initialCapacity {
|
| - _table = _createTable(initialCapacity);
|
| - }
|
| -
|
| - /** Reads key from table. Converts _NULL marker to null. */
|
| - Object _key(offset) {
|
| - assert(!_isFree(_table[offset]));
|
| - Object key = _table[offset];
|
| - if (!identical(key, _NULL)) return key;
|
| - return null;
|
| +class _LinkedHashSetEntry extends _HashSetEntry {
|
| + /// Links this element into a double-linked list of elements of a hash set.
|
| + /// The hash set object itself is used as the head entry of the list, so
|
| + /// the field is typed as "var".
|
| + /// Both links are initialized to `this` when the object is created.
|
| + var _nextEntry;
|
| + var _previousEntry;
|
| + _LinkedHashSetEntry(var key, int hashCode, _LinkedHashSetEntry next,
|
| + this._previousEntry, this._nextEntry)
|
| + : super(key, hashCode, next) {
|
| + _previousEntry._nextEntry = _nextEntry._previousEntry = this;
|
| }
|
|
|
| - /** Writes key to table. Converts null to _NULL marker. */
|
| - void _setKey(int offset, Object key) {
|
| - if (key == null) key = _NULL;
|
| - _table[offset] = key;
|
| + _LinkedHashSetEntry remove() {
|
| + _previousEntry._nextEntry = _nextEntry;
|
| + _nextEntry._previousEntry = _previousEntry;
|
| + _nextEntry = _previousEntry = this;
|
| + return super.remove();
|
| }
|
| +}
|
|
|
| - int get _elementCount => _entryCount - _deletedCount;
|
| -
|
| - /** Size of each entry. */
|
| - int get _entrySize => 1;
|
| -
|
| - void _checkModification(int expectedModificationCount) {
|
| - if (_modificationCount != expectedModificationCount) {
|
| - throw new ConcurrentModificationError(_container);
|
| - }
|
| - }
|
| +class _LinkedHashSet<E> extends _HashSet<E>
|
| + implements LinkedHashSet<E> {
|
| + /// Holds a double linked list of the element entries of the set in
|
| + /// insertion order.
|
| + /// The fields have the same names as the ones in [_LinkedHashSetEntry],
|
| + /// allowing this object to be used as the head entry of the list.
|
| + /// The fields are initialized to `this` when created, representing the
|
| + /// empty list that only contains the head entry.
|
| + var _nextEntry;
|
| + var _previousEntry;
|
|
|
| - void _recordModification() {
|
| - // Value cycles after 2^30 modifications. If you keep hold of an
|
| - // iterator for that long, you might miss a modification detection,
|
| - // and iteration can go sour. Don't do that.
|
| - _modificationCount = (_modificationCount + 1) & (0x3FFFFFFF);
|
| - }
|
| -
|
| - /**
|
| - * Create an empty table.
|
| - */
|
| - List _createTable(int capacity) {
|
| - List table = new List(capacity * _entrySize);
|
| - return table;
|
| - }
|
| -
|
| - /** First table probe. */
|
| - int _firstProbe(int hashCode, int capacity) {
|
| - return hashCode & (capacity - 1);
|
| - }
|
| -
|
| - /** Following table probes. */
|
| - int _nextProbe(int previousIndex, int probeCount, int capacity) {
|
| - // When capacity is a power of 2, this probing algorithm (the triangular
|
| - // number sequence modulo capacity) is guaranteed to hit all indices exactly
|
| - // once before repeating.
|
| - return (previousIndex + probeCount) & (capacity - 1);
|
| - }
|
| -
|
| - /** Whether an object is a free-marker (either tombstone or free). */
|
| - bool _isFree(Object marker) =>
|
| - marker == null || identical(marker, _TOMBSTONE);
|
| -
|
| - /**
|
| - * Look up the offset for an object in the table.
|
| - *
|
| - * Finds the offset of the object in the table, if it is there,
|
| - * or the first free offset for its hashCode.
|
| - */
|
| - int _probeForAdd(int hashCode, Object object) {
|
| - int entrySize = _entrySize;
|
| - int index = _firstProbe(hashCode, _capacity);
|
| - int firstTombstone = -1;
|
| - int probeCount = 0;
|
| - while (true) {
|
| - int offset = index * entrySize;
|
| - Object entry = _table[offset];
|
| - if (identical(entry, _TOMBSTONE)) {
|
| - if (firstTombstone < 0) firstTombstone = offset;
|
| - } else if (entry == null) {
|
| - if (firstTombstone < 0) return offset;
|
| - return firstTombstone;
|
| - } else if (identical(_NULL, entry) ? _equals(null, object)
|
| - : _equals(entry, object)) {
|
| - return offset;
|
| - }
|
| - // The _nextProbe is designed so that it hits
|
| - // every index eventually.
|
| - index = _nextProbe(index, ++probeCount, _capacity);
|
| - }
|
| + _LinkedHashSet() {
|
| + _nextEntry = _previousEntry = this;
|
| }
|
|
|
| - /**
|
| - * Look up the offset for an object in the table.
|
| - *
|
| - * If the object is in the table, its offset is returned.
|
| - *
|
| - * If the object is not in the table, Otherwise a negative value is returned.
|
| - */
|
| - int _probeForLookup(int hashCode, Object object) {
|
| - int entrySize = _entrySize;
|
| - int index = _firstProbe(hashCode, _capacity);
|
| - int probeCount = 0;
|
| - while (true) {
|
| - int offset = index * entrySize;
|
| - Object entry = _table[offset];
|
| - if (entry == null) {
|
| - return -1;
|
| - } else if (!identical(_TOMBSTONE, entry)) {
|
| - if (identical(_NULL, entry) ? _equals(null, object)
|
| - : _equals(entry, object)) {
|
| - return offset;
|
| - }
|
| - }
|
| - // The _nextProbe is designed so that it hits
|
| - // every index eventually.
|
| - index = _nextProbe(index, ++probeCount, _capacity);
|
| - }
|
| - }
|
| + // Iterable.
|
|
|
| - // Override the following two to change equality/hashCode computations
|
| -
|
| - /**
|
| - * Compare two object for equality.
|
| - *
|
| - * The first object is the one already in the table,
|
| - * and the second is the one being searched for.
|
| - */
|
| - bool _equals(Object element, Object other) {
|
| - return element == other;
|
| - }
|
| -
|
| - /**
|
| - * Compute hash-code for an object.
|
| - */
|
| - int _hashCodeOf(Object object) => object.hashCode;
|
| -
|
| - /**
|
| - * Ensure that the table isn't too full for its own good.
|
| - *
|
| - * Call this after adding an element.
|
| - */
|
| - int _checkCapacity() {
|
| - // Compute everything in multiples of entrySize to avoid division.
|
| - int freeCount = _capacity - _entryCount;
|
| - if (freeCount * 4 < _capacity ||
|
| - freeCount < _deletedCount) {
|
| - // Less than 25% free or more deleted entries than free entries.
|
| - _grow(_entryCount - _deletedCount);
|
| - }
|
| - }
|
| + Iterator<E> get iterator => new _LinkedHashSetIterator<E>(this);
|
|
|
| - void _grow(int contentCount) {
|
| - int capacity = _capacity;
|
| - // Don't grow to less than twice the needed capacity.
|
| - int minCapacity = contentCount * 2;
|
| - while (capacity < minCapacity) {
|
| - capacity *= 2;
|
| - }
|
| - // Reset to another table and add all existing elements.
|
| - List oldTable = _table;
|
| - _table = _createTable(capacity);
|
| - _capacity = capacity;
|
| - _entryCount = 0;
|
| - _deletedCount = 0;
|
| - _addAllEntries(oldTable);
|
| - _recordModification();
|
| - }
|
| -
|
| - /**
|
| - * Copies all non-free entries from the old table to the new empty table.
|
| - */
|
| - void _addAllEntries(List oldTable) {
|
| - for (int i = 0; i < oldTable.length; i += _entrySize) {
|
| - Object object = oldTable[i];
|
| - if (!_isFree(object)) {
|
| - int toOffset = _put(object);
|
| - _copyEntry(oldTable, i, toOffset);
|
| + void forEach(void action(E element)) {
|
| + var cursor = _nextEntry;
|
| + int modificationCount = _modificationCount;
|
| + while (!identical(cursor, this)) {
|
| + _LinkedHashSetEntry entry = cursor;
|
| + action(entry.key);
|
| + if (_modificationCount != modificationCount) {
|
| + throw new ConcurrentModificationError(this);
|
| }
|
| + cursor = entry._nextEntry;
|
| }
|
| }
|
|
|
| - /**
|
| - * Copies everything but the key element from one entry to another.
|
| - *
|
| - * Called while growing the base array.
|
| - *
|
| - * Override this if any non-key fields need copying.
|
| - */
|
| - void _copyEntry(List fromTable, int fromOffset, int toOffset) {}
|
| -
|
| - // The following three methods are for simple get/set/remove operations.
|
| - // They only affect the key of an entry. The remaining fields must be
|
| - // filled by the caller.
|
| -
|
| - /**
|
| - * Returns the offset of a key in [_table], or negative if it's not there.
|
| - */
|
| - int _get(Object key) {
|
| - return _probeForLookup(_hashCodeOf(key), key);
|
| - }
|
| -
|
| - /**
|
| - * Puts the key into the table and returns its offset into [_table].
|
| - *
|
| - * If [_entrySize] is greater than 1, the caller should fill the
|
| - * remaining fields.
|
| - *
|
| - * Remember to call [_checkCapacity] after using this method.
|
| - */
|
| - int _put(K key) {
|
| - int offset = _probeForAdd(_hashCodeOf(key), key);
|
| - Object oldEntry = _table[offset];
|
| - if (oldEntry == null) {
|
| - _entryCount++;
|
| - } else if (identical(oldEntry, _TOMBSTONE)) {
|
| - _deletedCount--;
|
| - } else {
|
| - return offset;
|
| - }
|
| - _setKey(offset, key);
|
| - _recordModification();
|
| - return offset;
|
| - }
|
| -
|
| - /**
|
| - * Removes a key from the table and returns its offset into [_table].
|
| - *
|
| - * Returns null if the key was not in the table.
|
| - * If [_entrySize] is greater than 1, the caller should clean up the
|
| - * remaining fields.
|
| - */
|
| - int _remove(Object key) {
|
| - int offset = _probeForLookup(_hashCodeOf(key), key);
|
| - if (offset >= 0) {
|
| - _deleteEntry(offset);
|
| + E get first {
|
| + if (identical(_nextEntry, this)) {
|
| + throw new StateError("No elements");
|
| }
|
| - return offset;
|
| + _LinkedHashSetEntry entry = _nextEntry;
|
| + return entry.key;
|
| }
|
|
|
| - /** Clears the table completely, leaving it empty. */
|
| - void _clear() {
|
| - if (_elementCount == 0) return;
|
| - for (int i = 0; i < _table.length; i++) {
|
| - _table[i] = null;
|
| + E get last {
|
| + if (identical(_previousEntry, this)) {
|
| + throw new StateError("No elements");
|
| }
|
| - _entryCount = _deletedCount = 0;
|
| - _recordModification();
|
| + _LinkedHashSetEntry entry = _previousEntry;
|
| + return entry.key;
|
| }
|
|
|
| - /** Clears an entry in the table. */
|
| - void _deleteEntry(int offset) {
|
| - assert(!_isFree(_table[offset]));
|
| - _setKey(offset, _TOMBSTONE);
|
| - _deletedCount++;
|
| - _recordModification();
|
| - }
|
| -}
|
| -
|
| -/**
|
| - * Generic iterable based on a [_HashTable].
|
| - */
|
| -abstract class _HashTableIterable<E> extends IterableBase<E> {
|
| - final _HashTable _hashTable;
|
| - _HashTableIterable(this._hashTable);
|
| + // Set.
|
|
|
| - Iterator<E> get iterator;
|
| -
|
| - /**
|
| - * Return the iterated value for a given entry.
|
| - */
|
| - E _valueAt(int offset, Object key);
|
| -
|
| - int get length => _hashTable._elementCount;
|
| -
|
| - bool get isEmpty => _hashTable._elementCount == 0;
|
| -
|
| - void forEach(void action(E element)) {
|
| - int entrySize = _hashTable._entrySize;
|
| - List table = _hashTable._table;
|
| - int modificationCount = _hashTable._modificationCount;
|
| - for (int offset = 0; offset < table.length; offset += entrySize) {
|
| - Object entry = table[offset];
|
| - if (!_hashTable._isFree(entry)) {
|
| - E value = _valueAt(offset, entry);
|
| - action(value);
|
| + void _filterWhere(bool test(E element), bool removeMatching) {
|
| + var cursor = _nextEntry;
|
| + while (!identical(cursor, this)) {
|
| + _LinkedHashSetEntry entry = cursor;
|
| + int modificationCount = _modificationCount;
|
| + bool testResult = test(entry.key);
|
| + if (modificationCount != _modificationCount) {
|
| + throw new ConcurrentModificationError(this);
|
| }
|
| - _hashTable._checkModification(modificationCount);
|
| - }
|
| - }
|
| -}
|
| -
|
| -abstract class _HashTableIterator<E> implements Iterator<E> {
|
| - final _HashTable _hashTable;
|
| - final int _modificationCount;
|
| - /** Location right after last found element. */
|
| - int _offset = 0;
|
| - E _current = null;
|
| -
|
| - _HashTableIterator(_HashTable hashTable)
|
| - : _hashTable = hashTable,
|
| - _modificationCount = hashTable._modificationCount;
|
| -
|
| - bool moveNext() {
|
| - _hashTable._checkModification(_modificationCount);
|
| -
|
| - List table = _hashTable._table;
|
| - int entrySize = _hashTable._entrySize;
|
| -
|
| - while (_offset < table.length) {
|
| - int currentOffset = _offset;
|
| - Object entry = table[currentOffset];
|
| - _offset = currentOffset + entrySize;
|
| - if (!_hashTable._isFree(entry)) {
|
| - _current = _valueAt(currentOffset, entry);
|
| - return true;
|
| + cursor = entry._nextEntry;
|
| + if (testResult == removeMatching) {
|
| + _remove(entry.key, entry.hashCode);
|
| }
|
| }
|
| - _current = null;
|
| - return false;
|
| }
|
|
|
| - E get current => _current;
|
| -
|
| - E _valueAt(int offset, Object key);
|
| -}
|
| -
|
| -class _HashTableKeyIterable<K> extends _HashTableIterable<K> {
|
| - _HashTableKeyIterable(_HashTable<K> hashTable) : super(hashTable);
|
| -
|
| - Iterator<K> get iterator => new _HashTableKeyIterator<K>(_hashTable);
|
| -
|
| - K _valueAt(int offset, Object key) {
|
| - if (identical(key, _NULL)) return null;
|
| - return key;
|
| - }
|
| -
|
| - bool contains(Object value) => _hashTable._get(value) >= 0;
|
| -}
|
| -
|
| -class _HashTableKeyIterator<K> extends _HashTableIterator<K> {
|
| - _HashTableKeyIterator(_HashTable hashTable) : super(hashTable);
|
| -
|
| - K _valueAt(int offset, Object key) {
|
| - if (identical(key, _NULL)) return null;
|
| - return key;
|
| + void _addEntry(E key, int hashCode, int index) {
|
| + _buckets[index] =
|
| + new _LinkedHashSetEntry(key, hashCode, _buckets[index],
|
| + _previousEntry, this);
|
| + int newElements = _elementCount + 1;
|
| + _elementCount = newElements;
|
| + int length = _buckets.length;
|
| + // If we end up with more than 75% non-empty entries, we
|
| + // resize the backing store.
|
| + if ((newElements << 2) > ((length << 1) + length)) _resize();
|
| + _modificationCount = (_modificationCount + 1) & _MODIFICATION_COUNT_MASK;
|
| }
|
| -}
|
| -
|
| -class _HashTableValueIterable<V> extends _HashTableIterable<V> {
|
| - final int _entryIndex;
|
|
|
| - _HashTableValueIterable(_HashTable hashTable, this._entryIndex)
|
| - : super(hashTable);
|
| -
|
| - Iterator<V> get iterator {
|
| - return new _HashTableValueIterator<V>(_hashTable, _entryIndex);
|
| + void clear() {
|
| + _nextEntry = _previousEntry = this;
|
| + super.clear();
|
| }
|
|
|
| - V _valueAt(int offset, Object key) => _hashTable._table[offset + _entryIndex];
|
| -}
|
| -
|
| -class _HashTableValueIterator<V> extends _HashTableIterator<V> {
|
| - final int _entryIndex;
|
| -
|
| - _HashTableValueIterator(_HashTable hashTable, this._entryIndex)
|
| - : super(hashTable);
|
| -
|
| - V _valueAt(int offset, Object key) => _hashTable._table[offset + _entryIndex];
|
| -}
|
| -
|
| -class _HashMapTable<K, V> extends _HashTable<K> {
|
| - static const int _INITIAL_CAPACITY = 8;
|
| - static const int _VALUE_INDEX = 1;
|
| -
|
| - _HashMapTable() : super(_INITIAL_CAPACITY);
|
| -
|
| - int get _entrySize => 2;
|
| -
|
| - V _value(int offset) => _table[offset + _VALUE_INDEX];
|
| - void _setValue(int offset, V value) { _table[offset + _VALUE_INDEX] = value; }
|
| -
|
| - _copyEntry(List fromTable, int fromOffset, int toOffset) {
|
| - _table[toOffset + _VALUE_INDEX] = fromTable[fromOffset + _VALUE_INDEX];
|
| - }
|
| + HashSet<E> _newSet() => new _LinkedHashSet<E>();
|
| }
|
|
|
| -/** Unique marker object for the head of a linked list of entries. */
|
| -class _LinkedHashTableHeadMarker {
|
| - const _LinkedHashTableHeadMarker();
|
| +class _LinkedIdentityHashSet<E> extends _LinkedHashSet<E> {
|
| + bool _equals(e1, e2) => identical(e1, e2);
|
| + HashSet<E> _newSet() => new _LinkedIdentityHashSet<E>();
|
| }
|
|
|
| -const _LinkedHashTableHeadMarker _HEAD_MARKER =
|
| - const _LinkedHashTableHeadMarker();
|
| +class _LinkedCustomHashSet<E> extends _LinkedHashSet<E> {
|
| + final _Equality<E> _equality;
|
| + final _Hasher<E> _hasher;
|
| + final _Predicate _validKey;
|
|
|
| -class _LinkedHashTable<K> extends _HashTable<K> {
|
| - static const _NEXT_INDEX = 1;
|
| - static const _PREV_INDEX = 2;
|
| - static const _HEAD_OFFSET = 0;
|
| + _LinkedCustomHashSet(this._equality, this._hasher, bool validKey(object))
|
| + : _validKey = (validKey != null) ? validKey : new _TypeTest<E>().test;
|
|
|
| - _LinkedHashTable(int initialCapacity) : super(initialCapacity);
|
| + bool _equals(e1, e2) => _equality(e1, e2);
|
|
|
| - int get _entrySize => 3;
|
| + int _hashCode(e) => _hasher(e);
|
|
|
| - List _createTable(int capacity) {
|
| - List result = new List(capacity * _entrySize);
|
| - result[_HEAD_OFFSET] = _HEAD_MARKER;
|
| - result[_HEAD_OFFSET + _NEXT_INDEX] = _HEAD_OFFSET;
|
| - result[_HEAD_OFFSET + _PREV_INDEX] = _HEAD_OFFSET;
|
| - return result;
|
| + bool contains(Object o) {
|
| + if (!_validKey(o)) return false;
|
| + return super.contains(o);
|
| }
|
|
|
| - int _next(int offset) => _table[offset + _NEXT_INDEX];
|
| - void _setNext(int offset, int to) { _table[offset + _NEXT_INDEX] = to; }
|
| -
|
| - int _prev(int offset) => _table[offset + _PREV_INDEX];
|
| - void _setPrev(int offset, int to) { _table[offset + _PREV_INDEX] = to; }
|
| -
|
| - void _linkLast(int offset) {
|
| - // Add entry at offset at end of double-linked list.
|
| - int last = _prev(_HEAD_OFFSET);
|
| - _setNext(offset, _HEAD_OFFSET);
|
| - _setPrev(offset, last);
|
| - _setNext(last, offset);
|
| - _setPrev(_HEAD_OFFSET, offset);
|
| - }
|
| -
|
| - void _unlink(int offset) {
|
| - assert(offset != _HEAD_OFFSET);
|
| - int next = _next(offset);
|
| - int prev = _prev(offset);
|
| - _setNext(offset, null);
|
| - _setPrev(offset, null);
|
| - _setNext(prev, next);
|
| - _setPrev(next, prev);
|
| - }
|
| -
|
| - /**
|
| - * Copies all non-free entries from the old table to the new empty table.
|
| - */
|
| - void _addAllEntries(List oldTable) {
|
| - int offset = oldTable[_HEAD_OFFSET + _NEXT_INDEX];
|
| - while (offset != _HEAD_OFFSET) {
|
| - Object object = oldTable[offset];
|
| - int nextOffset = oldTable[offset + _NEXT_INDEX];
|
| - int toOffset = _put(object);
|
| - _copyEntry(oldTable, offset, toOffset);
|
| - offset = nextOffset;
|
| - }
|
| + bool remove(Object o) {
|
| + if (!_validKey(o)) return false;
|
| + return super.remove(o);
|
| }
|
|
|
| - void _clear() {
|
| - if (_elementCount == 0) return;
|
| - _setNext(_HEAD_OFFSET, _HEAD_OFFSET);
|
| - _setPrev(_HEAD_OFFSET, _HEAD_OFFSET);
|
| - for (int i = _entrySize; i < _table.length; i++) {
|
| - _table[i] = null;
|
| + bool containsAll(Iterable<Object> elements) {
|
| + for (Object element in elements) {
|
| + if (!_validKey(element) || !this.contains(element)) return false;
|
| }
|
| - _entryCount = _deletedCount = 0;
|
| - _recordModification();
|
| + return true;
|
| }
|
|
|
| - int _put(K key) {
|
| - int offset = _probeForAdd(_hashCodeOf(key), key);
|
| - Object oldEntry = _table[offset];
|
| - if (identical(oldEntry, _TOMBSTONE)) {
|
| - _deletedCount--;
|
| - } else if (oldEntry == null) {
|
| - _entryCount++;
|
| - } else {
|
| - return offset;
|
| + void removeAll(Iterable<Object> elements) {
|
| + for (Object element in elements) {
|
| + if (_validKey(element)) {
|
| + super._remove(element, _hasher(element));
|
| + }
|
| }
|
| - _recordModification();
|
| - _setKey(offset, key);
|
| - _linkLast(offset);
|
| - return offset;
|
| }
|
|
|
| - void _deleteEntry(int offset) {
|
| - _unlink(offset);
|
| - _setKey(offset, _TOMBSTONE);
|
| - _deletedCount++;
|
| - _recordModification();
|
| + void retainAll(Iterable<Object> elements) {
|
| + super._retainAll(elements, _validKey);
|
| }
|
| -}
|
| -
|
| -class _LinkedHashTableKeyIterable<K> extends IterableBase<K> {
|
| - final _LinkedHashTable<K> _table;
|
| - _LinkedHashTableKeyIterable(this._table);
|
| - Iterator<K> get iterator => new _LinkedHashTableKeyIterator<K>(_table);
|
| -
|
| - bool contains(Object value) => _table._get(value) >= 0;
|
| -
|
| - int get length => _table._elementCount;
|
| -}
|
| -
|
| -class _LinkedHashTableKeyIterator<K> extends _LinkedHashTableIterator<K> {
|
| - _LinkedHashTableKeyIterator(_LinkedHashTable<K> hashTable): super(hashTable);
|
| -
|
| - K _getCurrent(int offset) => _hashTable._key(offset);
|
| -}
|
|
|
| -class _LinkedHashTableValueIterable<V> extends IterableBase<V> {
|
| - final _LinkedHashTable _hashTable;
|
| - final int _valueIndex;
|
| - _LinkedHashTableValueIterable(this._hashTable, this._valueIndex);
|
| - Iterator<V> get iterator =>
|
| - new _LinkedHashTableValueIterator<V>(_hashTable, _valueIndex);
|
| - int get length => _hashTable._elementCount;
|
| + HashSet<E> _newSet() =>
|
| + new _LinkedCustomHashSet<E>(_equality, _hasher, _validKey);
|
| }
|
|
|
| -class _LinkedHashTableValueIterator<V> extends _LinkedHashTableIterator<V> {
|
| - final int _valueIndex;
|
| -
|
| - _LinkedHashTableValueIterator(_LinkedHashTable hashTable, this._valueIndex)
|
| - : super(hashTable);
|
| -
|
| - V _getCurrent(int offset) => _hashTable._table[offset + _valueIndex];
|
| -}
|
| -
|
| -abstract class _LinkedHashTableIterator<T> implements Iterator<T> {
|
| - final _LinkedHashTable _hashTable;
|
| +class _LinkedHashSetIterator<E> implements Iterator<E> {
|
| + final _LinkedHashSet _set;
|
| final int _modificationCount;
|
| - int _offset;
|
| - T _current;
|
| + var _next;
|
| + E _current;
|
|
|
| - _LinkedHashTableIterator(_LinkedHashTable table)
|
| - : _hashTable = table,
|
| - _modificationCount = table._modificationCount,
|
| - _offset = table._next(_LinkedHashTable._HEAD_OFFSET);
|
| + _LinkedHashSetIterator(_LinkedHashSet hashSet)
|
| + : _set = hashSet,
|
| + _modificationCount = hashSet._modificationCount,
|
| + _next = hashSet._nextEntry;
|
|
|
| bool moveNext() {
|
| - _hashTable._checkModification(_modificationCount);
|
| - if (_offset == _LinkedHashTable._HEAD_OFFSET) {
|
| + if (_modificationCount != _set._modificationCount) {
|
| + throw new ConcurrentModificationError(_set);
|
| + }
|
| + if (identical(_set, _next)) {
|
| _current = null;
|
| return false;
|
| }
|
| - _current = _getCurrent(_offset);
|
| - _offset = _hashTable._next(_offset);
|
| + _LinkedHashSetEntry entry = _next;
|
| + _current = entry.key;
|
| + _next = entry._nextEntry;
|
| return true;
|
| }
|
|
|
| - T _getCurrent(int offset);
|
| -
|
| - T get current => _current;
|
| -}
|
| -
|
| -class _LinkedHashMapTable<K, V> extends _LinkedHashTable<K> {
|
| - static const int _INITIAL_CAPACITY = 8;
|
| - static const int _VALUE_INDEX = 3;
|
| -
|
| - int get _entrySize => 4;
|
| -
|
| - _LinkedHashMapTable() : super(_INITIAL_CAPACITY);
|
| -
|
| - V _value(int offset) => _table[offset + _VALUE_INDEX];
|
| - void _setValue(int offset, V value) { _table[offset + _VALUE_INDEX] = value; }
|
| -
|
| - _copyEntry(List oldTable, int fromOffset, int toOffset) {
|
| - _table[toOffset + _VALUE_INDEX] = oldTable[fromOffset + _VALUE_INDEX];
|
| - }
|
| + E get current => _current;
|
| }
|
|
|