| OLD | NEW |
| (Empty) |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | |
| 2 // Use of this source code is governed by a BSD-style license that can be | |
| 3 // found in the LICENSE file. | |
| 4 | |
| 5 #ifndef V8_STORE_BUFFER_H_ | |
| 6 #define V8_STORE_BUFFER_H_ | |
| 7 | |
| 8 #include "src/allocation.h" | |
| 9 #include "src/base/logging.h" | |
| 10 #include "src/base/platform/platform.h" | |
| 11 #include "src/globals.h" | |
| 12 | |
| 13 namespace v8 { | |
| 14 namespace internal { | |
| 15 | |
| 16 class Page; | |
| 17 class PagedSpace; | |
| 18 class StoreBuffer; | |
| 19 | |
| 20 typedef void (*ObjectSlotCallback)(HeapObject** from, HeapObject* to); | |
| 21 | |
| 22 typedef void (StoreBuffer::*RegionCallback)(Address start, | |
| 23 Address end, | |
| 24 ObjectSlotCallback slot_callback, | |
| 25 bool clear_maps); | |
| 26 | |
| 27 // Used to implement the write barrier by collecting addresses of pointers | |
| 28 // between spaces. | |
| 29 class StoreBuffer { | |
| 30 public: | |
| 31 explicit StoreBuffer(Heap* heap); | |
| 32 | |
| 33 static void StoreBufferOverflow(Isolate* isolate); | |
| 34 | |
| 35 inline Address TopAddress(); | |
| 36 | |
| 37 void SetUp(); | |
| 38 void TearDown(); | |
| 39 | |
| 40 // This is used by the mutator to enter addresses into the store buffer. | |
| 41 inline void Mark(Address addr); | |
| 42 | |
| 43 // This is used by the heap traversal to enter the addresses into the store | |
| 44 // buffer that should still be in the store buffer after GC. It enters | |
| 45 // addresses directly into the old buffer because the GC starts by wiping the | |
| 46 // old buffer and thereafter only visits each cell once so there is no need | |
| 47 // to attempt to remove any dupes. During the first part of a GC we | |
| 48 // are using the store buffer to access the old spaces and at the same time | |
| 49 // we are rebuilding the store buffer using this function. There is, however | |
| 50 // no issue of overwriting the buffer we are iterating over, because this | |
| 51 // stage of the scavenge can only reduce the number of addresses in the store | |
| 52 // buffer (some objects are promoted so pointers to them do not need to be in | |
| 53 // the store buffer). The later parts of the GC scan the pages that are | |
| 54 // exempt from the store buffer and process the promotion queue. These steps | |
| 55 // can overflow this buffer. We check for this and on overflow we call the | |
| 56 // callback set up with the StoreBufferRebuildScope object. | |
| 57 inline void EnterDirectlyIntoStoreBuffer(Address addr); | |
| 58 | |
| 59 // Iterates over all pointers that go from old space to new space. It will | |
| 60 // delete the store buffer as it starts so the callback should reenter | |
| 61 // surviving old-to-new pointers into the store buffer to rebuild it. | |
| 62 void IteratePointersToNewSpace(ObjectSlotCallback callback); | |
| 63 | |
| 64 // Same as IteratePointersToNewSpace but additonally clears maps in objects | |
| 65 // referenced from the store buffer that do not contain a forwarding pointer. | |
| 66 void IteratePointersToNewSpaceAndClearMaps(ObjectSlotCallback callback); | |
| 67 | |
| 68 static const int kStoreBufferOverflowBit = 1 << (14 + kPointerSizeLog2); | |
| 69 static const int kStoreBufferSize = kStoreBufferOverflowBit; | |
| 70 static const int kStoreBufferLength = kStoreBufferSize / sizeof(Address); | |
| 71 static const int kOldStoreBufferLength = kStoreBufferLength * 16; | |
| 72 static const int kHashSetLengthLog2 = 12; | |
| 73 static const int kHashSetLength = 1 << kHashSetLengthLog2; | |
| 74 | |
| 75 void Compact(); | |
| 76 | |
| 77 void GCPrologue(); | |
| 78 void GCEpilogue(); | |
| 79 | |
| 80 Object*** Limit() { return reinterpret_cast<Object***>(old_limit_); } | |
| 81 Object*** Start() { return reinterpret_cast<Object***>(old_start_); } | |
| 82 Object*** Top() { return reinterpret_cast<Object***>(old_top_); } | |
| 83 void SetTop(Object*** top) { | |
| 84 DCHECK(top >= Start()); | |
| 85 DCHECK(top <= Limit()); | |
| 86 old_top_ = reinterpret_cast<Address*>(top); | |
| 87 } | |
| 88 | |
| 89 bool old_buffer_is_sorted() { return old_buffer_is_sorted_; } | |
| 90 bool old_buffer_is_filtered() { return old_buffer_is_filtered_; } | |
| 91 | |
| 92 // Goes through the store buffer removing pointers to things that have | |
| 93 // been promoted. Rebuilds the store buffer completely if it overflowed. | |
| 94 void SortUniq(); | |
| 95 | |
| 96 void EnsureSpace(intptr_t space_needed); | |
| 97 void Verify(); | |
| 98 | |
| 99 bool PrepareForIteration(); | |
| 100 | |
| 101 #ifdef DEBUG | |
| 102 void Clean(); | |
| 103 // Slow, for asserts only. | |
| 104 bool CellIsInStoreBuffer(Address cell); | |
| 105 #endif | |
| 106 | |
| 107 void Filter(int flag); | |
| 108 | |
| 109 private: | |
| 110 Heap* heap_; | |
| 111 | |
| 112 // The store buffer is divided up into a new buffer that is constantly being | |
| 113 // filled by mutator activity and an old buffer that is filled with the data | |
| 114 // from the new buffer after compression. | |
| 115 Address* start_; | |
| 116 Address* limit_; | |
| 117 | |
| 118 Address* old_start_; | |
| 119 Address* old_limit_; | |
| 120 Address* old_top_; | |
| 121 Address* old_reserved_limit_; | |
| 122 base::VirtualMemory* old_virtual_memory_; | |
| 123 | |
| 124 bool old_buffer_is_sorted_; | |
| 125 bool old_buffer_is_filtered_; | |
| 126 bool during_gc_; | |
| 127 // The garbage collector iterates over many pointers to new space that are not | |
| 128 // handled by the store buffer. This flag indicates whether the pointers | |
| 129 // found by the callbacks should be added to the store buffer or not. | |
| 130 bool store_buffer_rebuilding_enabled_; | |
| 131 StoreBufferCallback callback_; | |
| 132 bool may_move_store_buffer_entries_; | |
| 133 | |
| 134 base::VirtualMemory* virtual_memory_; | |
| 135 | |
| 136 // Two hash sets used for filtering. | |
| 137 // If address is in the hash set then it is guaranteed to be in the | |
| 138 // old part of the store buffer. | |
| 139 uintptr_t* hash_set_1_; | |
| 140 uintptr_t* hash_set_2_; | |
| 141 bool hash_sets_are_empty_; | |
| 142 | |
| 143 void ClearFilteringHashSets(); | |
| 144 | |
| 145 bool SpaceAvailable(intptr_t space_needed); | |
| 146 void Uniq(); | |
| 147 void ExemptPopularPages(int prime_sample_step, int threshold); | |
| 148 | |
| 149 // Set the map field of the object to NULL if contains a map. | |
| 150 inline void ClearDeadObject(HeapObject *object); | |
| 151 | |
| 152 void IteratePointersToNewSpace(ObjectSlotCallback callback, bool clear_maps); | |
| 153 | |
| 154 void FindPointersToNewSpaceInRegion(Address start, | |
| 155 Address end, | |
| 156 ObjectSlotCallback slot_callback, | |
| 157 bool clear_maps); | |
| 158 | |
| 159 // For each region of pointers on a page in use from an old space call | |
| 160 // visit_pointer_region callback. | |
| 161 // If either visit_pointer_region or callback can cause an allocation | |
| 162 // in old space and changes in allocation watermark then | |
| 163 // can_preallocate_during_iteration should be set to true. | |
| 164 void IteratePointersOnPage( | |
| 165 PagedSpace* space, | |
| 166 Page* page, | |
| 167 RegionCallback region_callback, | |
| 168 ObjectSlotCallback slot_callback); | |
| 169 | |
| 170 void IteratePointersInStoreBuffer(ObjectSlotCallback slot_callback, | |
| 171 bool clear_maps); | |
| 172 | |
| 173 #ifdef VERIFY_HEAP | |
| 174 void VerifyPointers(LargeObjectSpace* space); | |
| 175 #endif | |
| 176 | |
| 177 friend class StoreBufferRebuildScope; | |
| 178 friend class DontMoveStoreBufferEntriesScope; | |
| 179 }; | |
| 180 | |
| 181 | |
| 182 class StoreBufferRebuildScope { | |
| 183 public: | |
| 184 explicit StoreBufferRebuildScope(Heap* heap, | |
| 185 StoreBuffer* store_buffer, | |
| 186 StoreBufferCallback callback) | |
| 187 : store_buffer_(store_buffer), | |
| 188 stored_state_(store_buffer->store_buffer_rebuilding_enabled_), | |
| 189 stored_callback_(store_buffer->callback_) { | |
| 190 store_buffer_->store_buffer_rebuilding_enabled_ = true; | |
| 191 store_buffer_->callback_ = callback; | |
| 192 (*callback)(heap, NULL, kStoreBufferStartScanningPagesEvent); | |
| 193 } | |
| 194 | |
| 195 ~StoreBufferRebuildScope() { | |
| 196 store_buffer_->callback_ = stored_callback_; | |
| 197 store_buffer_->store_buffer_rebuilding_enabled_ = stored_state_; | |
| 198 } | |
| 199 | |
| 200 private: | |
| 201 StoreBuffer* store_buffer_; | |
| 202 bool stored_state_; | |
| 203 StoreBufferCallback stored_callback_; | |
| 204 }; | |
| 205 | |
| 206 | |
| 207 class DontMoveStoreBufferEntriesScope { | |
| 208 public: | |
| 209 explicit DontMoveStoreBufferEntriesScope(StoreBuffer* store_buffer) | |
| 210 : store_buffer_(store_buffer), | |
| 211 stored_state_(store_buffer->may_move_store_buffer_entries_) { | |
| 212 store_buffer_->may_move_store_buffer_entries_ = false; | |
| 213 } | |
| 214 | |
| 215 ~DontMoveStoreBufferEntriesScope() { | |
| 216 store_buffer_->may_move_store_buffer_entries_ = stored_state_; | |
| 217 } | |
| 218 | |
| 219 private: | |
| 220 StoreBuffer* store_buffer_; | |
| 221 bool stored_state_; | |
| 222 }; | |
| 223 | |
| 224 } } // namespace v8::internal | |
| 225 | |
| 226 #endif // V8_STORE_BUFFER_H_ | |
| OLD | NEW |