Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(766)

Unified Diff: third_party/WebKit/Source/wtf/FlatTable.h

Issue 2396533004: Introduce a FlatMap and FlatSet into WTF (Closed)
Patch Set: Fix compile Created 4 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: third_party/WebKit/Source/wtf/FlatTable.h
diff --git a/third_party/WebKit/Source/wtf/FlatTable.h b/third_party/WebKit/Source/wtf/FlatTable.h
new file mode 100644
index 0000000000000000000000000000000000000000..d9175002648b1ab2061afd84604a389c42e37794
--- /dev/null
+++ b/third_party/WebKit/Source/wtf/FlatTable.h
@@ -0,0 +1,349 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef WTF_FlatTable_h
+#define WTF_FlatTable_h
+
+#include "base/macros.h"
+#include "wtf/Vector.h"
+#include <vector>
+
+namespace WTF {
+
+// Iterator customization.
+template <typename Delegate>
+struct ForwardIteratorDelegate : public Delegate {
+ using FlatTableDelegate = Delegate;
+ static constexpr bool is_const = false;
+ static inline size_t next(size_t index) { return index + 1; }
+ static inline size_t prev(size_t index) { return index - 1; }
+};
+
+template <typename Delegate>
+struct ConstForwardIteratorDelegate : public Delegate {
+ using FlatTableDelegate = Delegate;
+ static constexpr bool is_const = true;
+ static inline size_t next(size_t index) { return index + 1; }
+ static inline size_t prev(size_t index) { return index - 1; }
+};
+
+template <typename Delegate>
+struct ReverseIteratorDelegate : public Delegate {
+ using FlatTableDelegate = Delegate;
+ static constexpr bool is_const = false;
+ static inline size_t next(size_t index) { return index - 1; }
+ static inline size_t prev(size_t index) { return index + 1; }
+};
+
+template <typename Delegate>
+struct ConstReverseIteratorDelegate : public Delegate {
+ using FlatTableDelegate = Delegate;
+ static constexpr bool is_const = true;
+ static inline size_t next(size_t index) { return index - 1; }
+ static inline size_t prev(size_t index) { return index + 1; }
+};
+
+// A FlatTable containing common code for FlatMap and FlatSet. Typicall it
+// shouldn't be used directly.
+template <typename Delegate>
+class FlatTable {
+ public:
+ FlatTable() : ring_buffer_(4), front_(0), back_(0), search_step_size_(1) {
+ ComputeModulusMask();
+ }
+
+ using key_type = typename Delegate::key_type;
+ using value_type = typename Delegate::value_type;
+
+ bool empty() const { return front_ == back_; }
+
+ void clear() {
+ ring_buffer_.clear();
+ ring_buffer_.resize(4);
Sami 2016/10/07 09:49:11 Should we make this a named constant?
+ front_ = 0;
+ back_ = 0;
+ search_step_size_ = 1;
+ ComputeModulusMask();
+ }
+
+ size_t size() const { return (back_ - front_) & modulus_mask_; }
+
+ template <typename IteratorDelegate>
+ class FlatTableIterator {
+ public:
+ using ValueType =
+ typename std::conditional<IteratorDelegate::is_const,
+ const typename IteratorDelegate::value_type,
+ typename IteratorDelegate::value_type>::type;
+ using FlatTableDelegate = typename IteratorDelegate::FlatTableDelegate;
+ using FlatTableType =
+ typename std::conditional<IteratorDelegate::is_const,
+ const FlatTable<FlatTableDelegate>,
+ FlatTable<FlatTableDelegate>>::type;
+
+ FlatTableIterator& operator++() {
+ m_pos = IteratorDelegate::next(m_pos);
+ return *this;
+ }
+
+ FlatTableIterator& operator--() {
+ m_pos = IteratorDelegate::prev(m_pos);
+ return *this;
+ }
+
+ FlatTableIterator operator++(int /*unused*/) {
+ FlatTableIterator result(*this);
+ m_pos = IteratorDelegate::next(m_pos);
+ return result;
+ }
+
+ FlatTableIterator operator--(int /*unused*/) {
+ FlatTableIterator result(*this);
+ m_pos = IteratorDelegate::prev(m_pos);
+ return result;
+ }
+
+ ValueType* operator->() const { return &m_flatTable->Element(m_pos); }
+
+ ValueType& operator*() const { return m_flatTable->Element(m_pos); }
+
+ template <typename OtherT>
+ bool operator==(const OtherT& other) const {
+ return m_flatTable == other.m_flatTable && m_pos == other.m_pos;
+ }
+
+ template <typename OtherT>
+ bool operator!=(const OtherT& other) const {
+ return m_flatTable != other.m_flatTable || m_pos != other.m_pos;
+ }
+
+ private:
+ friend class FlatTable<FlatTableDelegate>;
+
+ FlatTableIterator(size_t pos, FlatTableType* flatTable)
+ : m_pos(pos), m_flatTable(flatTable) {
+ DCHECK(m_flatTable);
+ }
+
+ size_t m_pos;
+ FlatTableType* m_flatTable;
+ };
+
+ using iterator = FlatTableIterator<ForwardIteratorDelegate<Delegate>>;
+ using const_iterator =
+ FlatTableIterator<ConstForwardIteratorDelegate<Delegate>>;
+ using reverse_iterator = FlatTableIterator<ReverseIteratorDelegate<Delegate>>;
+ using const_reverse_iterator =
+ FlatTableIterator<ConstReverseIteratorDelegate<Delegate>>;
+
+ // Inserts |value| into the map, invalidating any iterators.
+ // O(log n + n / 2)
Sami 2016/10/07 09:49:11 Maybe document the return value?
alex clarke (OOO till 29th) 2016/10/13 14:16:32 Done.
+ std::pair<iterator, bool> insert(value_type value) {
+ if (empty()) {
+ Element(back_++) = std::move(value);
+ search_step_size_ = 1;
+ return std::make_pair(iterator(front_, this), true);
+ }
+
+ size_t index;
+ if (BinarySearch(Delegate::ToKey(value), &index)) {
+ // Replace existing value.
+ Element(index) = std::move(value);
+ return std::make_pair(iterator(index, this), false);
+ }
+
+ return std::make_pair(iterator(InsertUniqueImpl(std::move(value)), this),
+ true);
+ }
+
+ // Inserts |value| into the map, whose key must be unique, invalidating any
+ // iterators. O(n / 2)
+ void insertUnique(value_type value) {
+ if (empty()) {
Sami 2016/10/07 09:49:11 DCHECK that value is actually unique? Edit: I gue
alex clarke (OOO till 29th) 2016/10/13 14:16:32 Right, that should be enough?
+ Element(back_++) = std::move(value);
+ search_step_size_ = 1;
+ return;
+ }
+
+ InsertUniqueImpl(value);
+ }
+
+ // Erases an entry corresponding to |key|, if any, from the map.
+ // Invalidates any iterators. O(log n + n / 2)
+ size_t erase(const key_type& key) {
+ if (empty())
+ return 0;
+
+ size_t keyIndex;
+ if (!BinarySearch(key, &keyIndex))
+ return 0;
+
+ erase(iterator(keyIndex, this));
+ return 1u;
+ }
+
+ // Erases |it| from the map, invalidating any iterators. O(n / 2)
+ template <typename Iterator>
+ void erase(const Iterator& it) {
+ DCHECK_EQ(it.m_flatTable, this);
+ size_t eraseFrontCost = it.m_pos - front_;
+ size_t eraseBackCost = back_ - it.m_pos;
+ if (eraseFrontCost >= eraseBackCost) {
+ EraseByMovingBack(it.m_pos);
+ } else {
+ EraseByMovingFront(it.m_pos);
+ }
+
+ if (size() <= search_step_size_)
+ search_step_size_ /= 2;
+ }
+
+ // O(log n)
+ iterator find(const key_type& key) {
+ size_t keyIndex;
+ if (!BinarySearch(key, &keyIndex))
+ return end();
+
+ return iterator(keyIndex, this);
+ }
+
+ // O(log n)
+ const_iterator find(const key_type& key) const {
+ size_t keyIndex;
+ if (!BinarySearch(key, &keyIndex))
+ return end();
+
+ return iterator(keyIndex, this);
+ }
+
+ iterator begin() { return iterator(front_, this); }
+ iterator end() { return iterator(back_, this); }
+ const_iterator begin() const { return const_iterator(front_, this); }
+ const_iterator end() const { return const_iterator(back_, this); }
+
+ reverse_iterator rbegin() { return reverse_iterator(back_ - 1, this); }
+ const_reverse_iterator rbegin() const {
+ return const_reverse_iterator(back_ - 1, this);
+ }
+ reverse_iterator rend() { return reverse_iterator(front_ - 1, this); }
+ const_reverse_iterator rend() const {
+ return const_reverse_iterator(front_ - 1, this);
+ }
+
+ protected:
+ inline value_type& Element(size_t pos) {
+ return ring_buffer_[pos & modulus_mask_];
+ }
+
+ const inline value_type& Element(size_t pos) const {
+ return ring_buffer_[pos & modulus_mask_];
+ }
+
+ template <typename Iterator>
+ bool iteratorBelongsToThisTable(const Iterator& it) const {
+ return it.m_flatTable == this;
+ }
+
+ template <typename Iterator>
+ size_t iteratorPosition(const Iterator& it) const {
+ return it.m_pos;
+ }
+
+ size_t InsertUniqueImpl(value_type value) {
Sami 2016/10/07 09:49:11 Maybe the great renaming will fix this but I find
alex clarke (OOO till 29th) 2016/10/13 14:16:32 Done.
+ // Grow the vector if needed.
+ if (((back_ + 1) & modulus_mask_) == (front_ & modulus_mask_))
+ Grow();
+
+ // Shuffle Elements to make way for |value|.
+ size_t mid = front_ + size() / 2;
+ size_t i;
+ if (value < Element(mid)) {
+ front_--;
+ for (i = front_; Element(i + 1) < value; i++) {
+ Element(i) = std::move(Element(i + 1));
+ }
+ } else {
+ DCHECK(Element(mid) < value);
+ for (i = back_++; value < Element(i - 1); i--) {
+ Element(i) = std::move(Element(i - 1));
+ }
+ }
+ DCHECK(Element(i) != value);
+ Element(i) = std::move(value);
+ if ((size() / 2) >= search_step_size_)
+ search_step_size_ *= 2;
+ return i;
+ }
+
+ void Grow() {
+ std::vector<value_type> new_buffer(ring_buffer_.size() * 2);
+ size_t old_back = back_;
+ back_ = 0;
+ for (size_t i = front_; i != old_back; i++) {
+ new_buffer[back_++] = std::move(Element(i));
+ }
+ front_ = 0;
+ std::swap(ring_buffer_, new_buffer);
+ ComputeModulusMask();
+ }
+
+ bool BinarySearch(const key_type& key, size_t* index) const {
+ DCHECK(!empty());
+ size_t low = front_;
+ for (size_t step_size = search_step_size_; step_size; step_size /= 2) {
+ size_t mid = low + step_size;
+ if (mid < back_ && !(key < Delegate::ToKey(Element(mid))))
+ low = mid;
+ }
+ *index = low;
+ return key == Delegate::ToKey(Element(low));
+ }
+
+ void EraseByMovingFront(size_t index_to_erase) {
+ if (index_to_erase == front_) {
+ Element(index_to_erase) = value_type();
+ }
+ for (size_t i = index_to_erase; i != front_; i--) {
+ Element(i) = std::move(Element(i - 1));
+ }
+ front_++;
+ }
+
+ void EraseByMovingBack(size_t index_to_erase) {
+ back_--;
+ if (index_to_erase == back_) {
+ Element(index_to_erase) = value_type();
+ }
+ for (size_t i = index_to_erase; i != (back_ + 1); i++) {
+ Element(i) = std::move(Element(i + 1));
+ }
+ }
+
+ void ComputeModulusMask() { modulus_mask_ = ring_buffer_.size() - 1; }
Sami 2016/10/07 09:49:11 DCHECK that this is 2^n-1?
alex clarke (OOO till 29th) 2016/10/13 14:16:32 Done something similar.
+
+ // The ring buffer is always a power of two in size, because computing
+ // modulus for powers of two is super fast.
+ std::vector<value_type> ring_buffer_;
+
+ // Since |ring_buffer_| is a power of two in size, we can compute the modulus
+ // using a bitwise and with |modulus_mask_|.
+ size_t modulus_mask_;
+
+ // The index of the first element.
+ // NOTE only |front_| mod size() is a valid array index.
+ size_t front_;
+
+ // The index of the last element + 1.
+ // NOTE only |back_| mod size() is a valid array index.
+ size_t back_;
+
+ // Equivalent to 2 ^ ceil(log2(size()) - 1).
+ size_t search_step_size_;
+
+ DISALLOW_COPY_AND_ASSIGN(FlatTable);
+};
+
+} // namespace WTF
+
+#endif // WTF_FlatTable_h
« third_party/WebKit/Source/wtf/FlatSet.h ('K') | « third_party/WebKit/Source/wtf/FlatSetTest.cpp ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698