Chromium Code Reviews| Index: src/store-buffer.h |
| =================================================================== |
| --- src/store-buffer.h (revision 6554) |
| +++ src/store-buffer.h (working copy) |
| @@ -36,6 +36,7 @@ |
| namespace v8 { |
| namespace internal { |
| +typedef void (*ObjectSlotCallback)(HeapObject** from, HeapObject* to); |
| // Used to implement the write barrier by collecting addresses of pointers |
| // between spaces. |
| @@ -46,12 +47,36 @@ |
| static void Setup(); |
| static void TearDown(); |
| + // This is used by the mutator to enter addresses into the store buffer. |
| static inline void Mark(Address addr); |
| + // This is used by the heap traversal to enter the addresses into the store |
| + // buffer that should still be in the store buffer after GC. It enters |
| + // addresses directly into the old buffer because the GC starts by wiping the |
| + // old buffer and thereafter only visits each cell once so there is no need |
| + // to attempt to remove any dupes. During the first part of a scavenge we |
| + // are using the store buffer to access the old spaces and at the same time |
| + // we are rebuilding the store buffer using this function. There is, however |
| + // no issue of overwriting the buffer we are iterating over, because this |
| + // stage of the scavenge can only reduce the number of addresses in the store |
| + // buffer (some objects are promoted so pointers to them do not need to be in |
| + // the store buffer). The later parts of the scavenge process the promotion |
| + // queue and they can overflow this buffer, which we must check for. |
| + static inline void EnterDirectlyIntoStoreBuffer(Address addr); |
| + |
| + enum RebuildStoreBufferMode { |
| + kRebuildStoreBufferWhileIterating, |
| + kPreserveStoreBufferWhileIterating}; |
|
Vyacheslav Egorov (Chromium)
2011/02/02 13:15:47
new line + indent closing }
Erik Corry
2011/02/03 13:21:17
Forgot this one :-(
|
| + |
| + // Iterates over all pointers that go from old space to new space. It will |
| + // delete the store buffer as it starts so the callback should reenter |
| + // surviving old-to-new pointers into the store buffer to rebuild it. |
| + static void IteratePointersToNewSpace(ObjectSlotCallback callback); |
| + |
| static const int kStoreBufferOverflowBit = 1 << 16; |
| static const int kStoreBufferSize = kStoreBufferOverflowBit; |
| static const int kStoreBufferLength = kStoreBufferSize / sizeof(Address); |
| - static const int kOldStoreBufferLength = kStoreBufferLength * 16; |
| + static const int kOldStoreBufferLength = kStoreBufferLength * 4; |
| static const int kHashMapLengthLog2 = 12; |
| static const int kHashMapLength = 1 << kHashMapLengthLog2; |
| @@ -62,7 +87,13 @@ |
| static Object*** Start() { return reinterpret_cast<Object***>(old_start_); } |
| static Object*** Top() { return reinterpret_cast<Object***>(old_top_); } |
| - static bool must_scan_entire_memory() { return must_scan_entire_memory_; } |
| + enum StoreBufferMode { |
| + kStoreBufferFunctional, |
| + kStoreBufferDisabled, |
| + kStoreBufferBeingRebuilt |
| + }; |
| + |
| + static StoreBufferMode store_buffer_mode() { return store_buffer_mode_; } |
| static bool old_buffer_is_sorted() { return old_buffer_is_sorted_; } |
| // Goes through the store buffer removing pointers to things that have |
| @@ -86,8 +117,10 @@ |
| static Address* old_top_; |
| static bool old_buffer_is_sorted_; |
| - static bool must_scan_entire_memory_; |
| + static StoreBufferMode store_buffer_mode_; |
| static bool during_gc_; |
| + static bool store_buffer_rebuilding_enabled_; |
| + static bool may_move_store_buffer_entries_; |
| static VirtualMemory* virtual_memory_; |
| static uintptr_t* hash_map_1_; |
| @@ -97,8 +130,44 @@ |
| static void Uniq(); |
| static void ZapHashTables(); |
| static bool HashTablesAreZapped(); |
| + |
| + friend class StoreBufferRebuildScope; |
| + friend class DontMoveStoreBufferEntriesScope; |
| }; |
| + |
| +class StoreBufferRebuildScope { |
| + public: |
| + StoreBufferRebuildScope() : |
| + stored_state_(StoreBuffer::store_buffer_rebuilding_enabled_) { |
| + StoreBuffer::store_buffer_rebuilding_enabled_ = true; |
| + } |
| + |
| + ~StoreBufferRebuildScope() { |
| + StoreBuffer::store_buffer_rebuilding_enabled_ = stored_state_; |
| + StoreBuffer::CheckForFullBuffer(); |
| + } |
| + |
| + private: |
| + bool stored_state_; |
| +}; |
| + |
| + |
| +class DontMoveStoreBufferEntriesScope { |
| + public: |
| + DontMoveStoreBufferEntriesScope() : |
| + stored_state_(StoreBuffer::may_move_store_buffer_entries_) { |
| + StoreBuffer::may_move_store_buffer_entries_ = false; |
| + } |
| + |
| + ~DontMoveStoreBufferEntriesScope() { |
| + StoreBuffer::may_move_store_buffer_entries_ = stored_state_; |
| + } |
| + |
| + private: |
| + bool stored_state_; |
| +}; |
| + |
| } } // namespace v8::internal |
| #endif // V8_WRITE_BARRIER_H_ |