| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef V8_STORE_BUFFER_H_ | 5 #ifndef V8_STORE_BUFFER_H_ |
| 6 #define V8_STORE_BUFFER_H_ | 6 #define V8_STORE_BUFFER_H_ |
| 7 | 7 |
| 8 #include "src/allocation.h" | 8 #include "src/allocation.h" |
| 9 #include "src/base/logging.h" | 9 #include "src/base/logging.h" |
| 10 #include "src/base/platform/platform.h" | 10 #include "src/base/platform/platform.h" |
| 11 #include "src/globals.h" | 11 #include "src/globals.h" |
| 12 #include "src/heap/slot-set.h" |
| 12 | 13 |
| 13 namespace v8 { | 14 namespace v8 { |
| 14 namespace internal { | 15 namespace internal { |
| 15 | 16 |
| 16 class Page; | 17 class Page; |
| 17 class PagedSpace; | 18 class PagedSpace; |
| 18 class StoreBuffer; | 19 class StoreBuffer; |
| 19 | 20 |
| 20 typedef void (*ObjectSlotCallback)(HeapObject** from, HeapObject* to); | 21 typedef void (*ObjectSlotCallback)(HeapObject** from, HeapObject* to); |
| 21 | 22 |
| 22 // Used to implement the write barrier by collecting addresses of pointers | 23 // Used to implement the write barrier by collecting addresses of pointers |
| 23 // between spaces. | 24 // between spaces. |
| 24 class StoreBuffer { | 25 class StoreBuffer { |
| 25 public: | 26 public: |
| 26 explicit StoreBuffer(Heap* heap); | 27 explicit StoreBuffer(Heap* heap); |
| 27 | |
| 28 static void StoreBufferOverflow(Isolate* isolate); | 28 static void StoreBufferOverflow(Isolate* isolate); |
| 29 | |
| 30 void SetUp(); | 29 void SetUp(); |
| 31 void TearDown(); | 30 void TearDown(); |
| 32 | 31 |
| 32 static const int kStoreBufferOverflowBit = 1 << (14 + kPointerSizeLog2); |
| 33 static const int kStoreBufferSize = kStoreBufferOverflowBit; |
| 34 static const int kStoreBufferLength = kStoreBufferSize / sizeof(Address); |
| 35 |
| 33 // This is used to add addresses to the store buffer non-concurrently. | 36 // This is used to add addresses to the store buffer non-concurrently. |
| 34 inline void Mark(Address addr); | 37 inline void Mark(Address addr); |
| 35 | 38 |
| 36 // This is used by the heap traversal to enter the addresses into the store | 39 // Slots that do not point to the ToSpace after callback invocation will be |
| 37 // buffer that should still be in the store buffer after GC. It enters | 40 // removed from the set. |
| 38 // addresses directly into the old buffer because the GC starts by wiping the | |
| 39 // old buffer and thereafter only visits each cell once so there is no need | |
| 40 // to attempt to remove any dupes. During the first part of a GC we | |
| 41 // are using the store buffer to access the old spaces and at the same time | |
| 42 // we are rebuilding the store buffer using this function. There is, however | |
| 43 // no issue of overwriting the buffer we are iterating over, because this | |
| 44 // stage of the scavenge can only reduce the number of addresses in the store | |
| 45 // buffer (some objects are promoted so pointers to them do not need to be in | |
| 46 // the store buffer). The later parts of the GC scan the pages that are | |
| 47 // exempt from the store buffer and process the promotion queue. These steps | |
| 48 // can overflow this buffer. We check for this and on overflow we call the | |
| 49 // callback set up with the StoreBufferRebuildScope object. | |
| 50 inline void EnterDirectlyIntoStoreBuffer(Address addr); | |
| 51 | |
| 52 // Iterates over all pointers that go from old space to new space. It will | |
| 53 // delete the store buffer as it starts so the callback should reenter | |
| 54 // surviving old-to-new pointers into the store buffer to rebuild it. | |
| 55 void IteratePointersToNewSpace(ObjectSlotCallback callback); | 41 void IteratePointersToNewSpace(ObjectSlotCallback callback); |
| 56 | 42 |
| 57 static const int kStoreBufferOverflowBit = 1 << (14 + kPointerSizeLog2); | |
| 58 static const int kStoreBufferSize = kStoreBufferOverflowBit; | |
| 59 static const int kStoreBufferLength = kStoreBufferSize / sizeof(Address); | |
| 60 static const int kOldStoreBufferLength = kStoreBufferLength * 16; | |
| 61 static const int kHashSetLengthLog2 = 12; | |
| 62 static const int kHashSetLength = 1 << kHashSetLengthLog2; | |
| 63 | |
| 64 void Compact(); | |
| 65 | |
| 66 void GCPrologue(); | |
| 67 void GCEpilogue(); | |
| 68 | |
| 69 Object*** Limit() { return reinterpret_cast<Object***>(old_limit_); } | |
| 70 Object*** Start() { return reinterpret_cast<Object***>(old_start_); } | |
| 71 Object*** Top() { return reinterpret_cast<Object***>(old_top_); } | |
| 72 void SetTop(Object*** top) { | |
| 73 DCHECK(top >= Start()); | |
| 74 DCHECK(top <= Limit()); | |
| 75 old_top_ = reinterpret_cast<Address*>(top); | |
| 76 } | |
| 77 | |
| 78 bool old_buffer_is_sorted() { return old_buffer_is_sorted_; } | |
| 79 bool old_buffer_is_filtered() { return old_buffer_is_filtered_; } | |
| 80 | |
| 81 void EnsureSpace(intptr_t space_needed); | |
| 82 void Verify(); | 43 void Verify(); |
| 83 | 44 |
| 84 bool PrepareForIteration(); | |
| 85 | |
| 86 void Filter(int flag); | |
| 87 | |
| 88 // Eliminates all stale store buffer entries from the store buffer, i.e., | 45 // Eliminates all stale store buffer entries from the store buffer, i.e., |
| 89 // slots that are not part of live objects anymore. This method must be | 46 // slots that are not part of live objects anymore. This method must be |
| 90 // called after marking, when the whole transitive closure is known and | 47 // called after marking, when the whole transitive closure is known and |
| 91 // must be called before sweeping when mark bits are still intact. | 48 // must be called before sweeping when mark bits are still intact. |
| 92 void ClearInvalidStoreBufferEntries(); | 49 void ClearInvalidStoreBufferEntries(); |
| 93 void VerifyValidStoreBufferEntries(); | 50 void VerifyValidStoreBufferEntries(); |
| 94 | 51 |
| 95 private: | 52 private: |
| 96 Heap* heap_; | 53 Heap* heap_; |
| 97 | 54 |
| 98 // The store buffer is divided up into a new buffer that is constantly being | 55 // The start and the limit of the buffer that contains store slots |
| 99 // filled by mutator activity and an old buffer that is filled with the data | 56 // added from the generated code. |
| 100 // from the new buffer after compression. | |
| 101 Address* start_; | 57 Address* start_; |
| 102 Address* limit_; | 58 Address* limit_; |
| 103 | 59 |
| 104 Address* old_start_; | |
| 105 Address* old_limit_; | |
| 106 Address* old_top_; | |
| 107 Address* old_reserved_limit_; | |
| 108 base::VirtualMemory* old_virtual_memory_; | |
| 109 | |
| 110 bool old_buffer_is_sorted_; | |
| 111 bool old_buffer_is_filtered_; | |
| 112 bool during_gc_; | |
| 113 // The garbage collector iterates over many pointers to new space that are not | |
| 114 // handled by the store buffer. This flag indicates whether the pointers | |
| 115 // found by the callbacks should be added to the store buffer or not. | |
| 116 bool store_buffer_rebuilding_enabled_; | |
| 117 StoreBufferCallback callback_; | |
| 118 bool may_move_store_buffer_entries_; | |
| 119 | |
| 120 base::VirtualMemory* virtual_memory_; | 60 base::VirtualMemory* virtual_memory_; |
| 121 | 61 |
| 122 // Two hash sets used for filtering. | |
| 123 // If address is in the hash set then it is guaranteed to be in the | |
| 124 // old part of the store buffer. | |
| 125 uintptr_t* hash_set_1_; | |
| 126 uintptr_t* hash_set_2_; | |
| 127 bool hash_sets_are_empty_; | |
| 128 | |
| 129 // Used for synchronization of concurrent store buffer access. | 62 // Used for synchronization of concurrent store buffer access. |
| 130 base::Mutex mutex_; | 63 base::Mutex mutex_; |
| 131 | 64 |
| 132 void ClearFilteringHashSets(); | 65 void InsertEntriesFromBuffer(); |
| 133 | 66 |
| 134 bool SpaceAvailable(intptr_t space_needed); | 67 inline uint32_t AddressToSlotSetAndOffset(Address slot_address, |
| 135 void ExemptPopularPages(int prime_sample_step, int threshold); | 68 SlotSet** slots); |
| 136 | 69 |
| 137 void ProcessOldToNewSlot(Address slot_address, | 70 template <typename Callback> |
| 138 ObjectSlotCallback slot_callback); | 71 void Iterate(Callback callback); |
| 139 | |
| 140 void FindPointersToNewSpaceInRegion(Address start, Address end, | |
| 141 ObjectSlotCallback slot_callback); | |
| 142 | |
| 143 void IteratePointersInStoreBuffer(ObjectSlotCallback slot_callback); | |
| 144 | 72 |
| 145 #ifdef VERIFY_HEAP | 73 #ifdef VERIFY_HEAP |
| 146 void VerifyPointers(LargeObjectSpace* space); | 74 void VerifyPointers(LargeObjectSpace* space); |
| 147 #endif | 75 #endif |
| 148 | |
| 149 friend class DontMoveStoreBufferEntriesScope; | |
| 150 friend class FindPointersToNewSpaceVisitor; | |
| 151 friend class StoreBufferRebuildScope; | |
| 152 }; | 76 }; |
| 153 | 77 |
| 154 | 78 |
| 155 class StoreBufferRebuilder { | |
| 156 public: | |
| 157 explicit StoreBufferRebuilder(StoreBuffer* store_buffer) | |
| 158 : store_buffer_(store_buffer) {} | |
| 159 | |
| 160 void Callback(MemoryChunk* page, StoreBufferEvent event); | |
| 161 | |
| 162 private: | |
| 163 StoreBuffer* store_buffer_; | |
| 164 | |
| 165 // We record in this variable how full the store buffer was when we started | |
| 166 // iterating over the current page, finding pointers to new space. If the | |
| 167 // store buffer overflows again we can exempt the page from the store buffer | |
| 168 // by rewinding to this point instead of having to search the store buffer. | |
| 169 Object*** start_of_current_page_; | |
| 170 // The current page we are scanning in the store buffer iterator. | |
| 171 MemoryChunk* current_page_; | |
| 172 }; | |
| 173 | |
| 174 | |
| 175 class StoreBufferRebuildScope { | |
| 176 public: | |
| 177 explicit StoreBufferRebuildScope(Heap* heap, StoreBuffer* store_buffer, | |
| 178 StoreBufferCallback callback) | |
| 179 : store_buffer_(store_buffer), | |
| 180 stored_state_(store_buffer->store_buffer_rebuilding_enabled_), | |
| 181 stored_callback_(store_buffer->callback_) { | |
| 182 store_buffer_->store_buffer_rebuilding_enabled_ = true; | |
| 183 store_buffer_->callback_ = callback; | |
| 184 (*callback)(heap, NULL, kStoreBufferStartScanningPagesEvent); | |
| 185 } | |
| 186 | |
| 187 ~StoreBufferRebuildScope() { | |
| 188 store_buffer_->callback_ = stored_callback_; | |
| 189 store_buffer_->store_buffer_rebuilding_enabled_ = stored_state_; | |
| 190 } | |
| 191 | |
| 192 private: | |
| 193 StoreBuffer* store_buffer_; | |
| 194 bool stored_state_; | |
| 195 StoreBufferCallback stored_callback_; | |
| 196 }; | |
| 197 | |
| 198 | |
| 199 class DontMoveStoreBufferEntriesScope { | |
| 200 public: | |
| 201 explicit DontMoveStoreBufferEntriesScope(StoreBuffer* store_buffer) | |
| 202 : store_buffer_(store_buffer), | |
| 203 stored_state_(store_buffer->may_move_store_buffer_entries_) { | |
| 204 store_buffer_->may_move_store_buffer_entries_ = false; | |
| 205 } | |
| 206 | |
| 207 ~DontMoveStoreBufferEntriesScope() { | |
| 208 store_buffer_->may_move_store_buffer_entries_ = stored_state_; | |
| 209 } | |
| 210 | |
| 211 private: | |
| 212 StoreBuffer* store_buffer_; | |
| 213 bool stored_state_; | |
| 214 }; | |
| 215 | |
| 216 class LocalStoreBuffer BASE_EMBEDDED { | 79 class LocalStoreBuffer BASE_EMBEDDED { |
| 217 public: | 80 public: |
| 218 LocalStoreBuffer() : top_(new Node(nullptr)) {} | 81 LocalStoreBuffer() : top_(new Node(nullptr)) {} |
| 219 | 82 |
| 220 ~LocalStoreBuffer() { | 83 ~LocalStoreBuffer() { |
| 221 Node* current = top_; | 84 Node* current = top_; |
| 222 while (current != nullptr) { | 85 while (current != nullptr) { |
| 223 Node* tmp = current->next; | 86 Node* tmp = current->next; |
| 224 delete current; | 87 delete current; |
| 225 current = tmp; | 88 current = tmp; |
| (...skipping 16 matching lines...) Expand all Loading... |
| 242 int count; | 105 int count; |
| 243 }; | 106 }; |
| 244 | 107 |
| 245 Node* top_; | 108 Node* top_; |
| 246 }; | 109 }; |
| 247 | 110 |
| 248 } // namespace internal | 111 } // namespace internal |
| 249 } // namespace v8 | 112 } // namespace v8 |
| 250 | 113 |
| 251 #endif // V8_STORE_BUFFER_H_ | 114 #endif // V8_STORE_BUFFER_H_ |
| OLD | NEW |