| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef V8_STORE_BUFFER_H_ | 5 #ifndef V8_STORE_BUFFER_H_ |
| 6 #define V8_STORE_BUFFER_H_ | 6 #define V8_STORE_BUFFER_H_ |
| 7 | 7 |
| 8 #include "src/allocation.h" | 8 #include "src/allocation.h" |
| 9 #include "src/base/logging.h" | 9 #include "src/base/logging.h" |
| 10 #include "src/base/platform/platform.h" | 10 #include "src/base/platform/platform.h" |
| 11 #include "src/globals.h" | 11 #include "src/globals.h" |
| 12 | 12 |
| 13 namespace v8 { | 13 namespace v8 { |
| 14 namespace internal { | 14 namespace internal { |
| 15 | 15 |
| 16 class Page; | 16 class Page; |
| 17 class PagedSpace; | 17 class PagedSpace; |
| 18 class StoreBuffer; | 18 class StoreBuffer; |
| 19 | 19 |
| 20 typedef void (*ObjectSlotCallback)(HeapObject** from, HeapObject* to); | 20 typedef void (*ObjectSlotCallback)(HeapObject** from, HeapObject* to); |
| 21 | 21 |
| 22 typedef void (StoreBuffer::*RegionCallback)(Address start, |
| 23 Address end, |
| 24 ObjectSlotCallback slot_callback, |
| 25 bool clear_maps); |
| 26 |
| 22 // Used to implement the write barrier by collecting addresses of pointers | 27 // Used to implement the write barrier by collecting addresses of pointers |
| 23 // between spaces. | 28 // between spaces. |
| 24 class StoreBuffer { | 29 class StoreBuffer { |
| 25 public: | 30 public: |
| 26 explicit StoreBuffer(Heap* heap); | 31 explicit StoreBuffer(Heap* heap); |
| 27 | 32 |
| 28 static void StoreBufferOverflow(Isolate* isolate); | 33 static void StoreBufferOverflow(Isolate* isolate); |
| 29 | 34 |
| 30 inline Address TopAddress(); | 35 inline Address TopAddress(); |
| 31 | 36 |
| (...skipping 24 matching lines...) Expand all Loading... |
| 56 // surviving old-to-new pointers into the store buffer to rebuild it. | 61 // surviving old-to-new pointers into the store buffer to rebuild it. |
| 57 void IteratePointersToNewSpace(ObjectSlotCallback callback); | 62 void IteratePointersToNewSpace(ObjectSlotCallback callback); |
| 58 | 63 |
| 59 // Same as IteratePointersToNewSpace but additonally clears maps in objects | 64 // Same as IteratePointersToNewSpace but additonally clears maps in objects |
| 60 // referenced from the store buffer that do not contain a forwarding pointer. | 65 // referenced from the store buffer that do not contain a forwarding pointer. |
| 61 void IteratePointersToNewSpaceAndClearMaps(ObjectSlotCallback callback); | 66 void IteratePointersToNewSpaceAndClearMaps(ObjectSlotCallback callback); |
| 62 | 67 |
| 63 static const int kStoreBufferOverflowBit = 1 << (14 + kPointerSizeLog2); | 68 static const int kStoreBufferOverflowBit = 1 << (14 + kPointerSizeLog2); |
| 64 static const int kStoreBufferSize = kStoreBufferOverflowBit; | 69 static const int kStoreBufferSize = kStoreBufferOverflowBit; |
| 65 static const int kStoreBufferLength = kStoreBufferSize / sizeof(Address); | 70 static const int kStoreBufferLength = kStoreBufferSize / sizeof(Address); |
| 66 static const int kOldRegularStoreBufferLength = kStoreBufferLength * 16; | 71 static const int kOldStoreBufferLength = kStoreBufferLength * 16; |
| 67 static const int kHashSetLengthLog2 = 12; | 72 static const int kHashSetLengthLog2 = 12; |
| 68 static const int kHashSetLength = 1 << kHashSetLengthLog2; | 73 static const int kHashSetLength = 1 << kHashSetLengthLog2; |
| 69 | 74 |
| 70 void Compact(); | 75 void Compact(); |
| 71 | 76 |
| 72 void GCPrologue(bool allow_overflow); | 77 void GCPrologue(); |
| 73 void GCEpilogue(); | 78 void GCEpilogue(); |
| 74 | 79 |
| 75 Object*** Limit() { return reinterpret_cast<Object***>(old_limit_); } | 80 Object*** Limit() { return reinterpret_cast<Object***>(old_limit_); } |
| 76 Object*** Start() { return reinterpret_cast<Object***>(old_start_); } | 81 Object*** Start() { return reinterpret_cast<Object***>(old_start_); } |
| 77 Object*** Top() { return reinterpret_cast<Object***>(old_top_); } | 82 Object*** Top() { return reinterpret_cast<Object***>(old_top_); } |
| 78 void SetTop(Object*** top) { | 83 void SetTop(Object*** top) { |
| 79 ASSERT(top >= Start()); | 84 ASSERT(top >= Start()); |
| 80 ASSERT(top <= Limit()); | 85 ASSERT(top <= Limit()); |
| 81 old_top_ = reinterpret_cast<Address*>(top); | 86 old_top_ = reinterpret_cast<Address*>(top); |
| 82 } | 87 } |
| (...skipping 23 matching lines...) Expand all Loading... |
| 106 | 111 |
| 107 // The store buffer is divided up into a new buffer that is constantly being | 112 // The store buffer is divided up into a new buffer that is constantly being |
| 108 // filled by mutator activity and an old buffer that is filled with the data | 113 // filled by mutator activity and an old buffer that is filled with the data |
| 109 // from the new buffer after compression. | 114 // from the new buffer after compression. |
| 110 Address* start_; | 115 Address* start_; |
| 111 Address* limit_; | 116 Address* limit_; |
| 112 | 117 |
| 113 Address* old_start_; | 118 Address* old_start_; |
| 114 Address* old_limit_; | 119 Address* old_limit_; |
| 115 Address* old_top_; | 120 Address* old_top_; |
| 116 | |
| 117 // The regular limit specifies how big the store buffer may become during | |
| 118 // mutator execution or while scavenging. | |
| 119 Address* old_regular_limit_; | |
| 120 | |
| 121 // The reserved limit is bigger then the regular limit. It should be the size | |
| 122 // of a semi-space to avoid new scan-on-scavenge during new space evacuation | |
| 123 // after sweeping in a full garbage collection. | |
| 124 Address* old_reserved_limit_; | 121 Address* old_reserved_limit_; |
| 125 | |
| 126 base::VirtualMemory* old_virtual_memory_; | 122 base::VirtualMemory* old_virtual_memory_; |
| 127 int old_store_buffer_length_; | |
| 128 | 123 |
| 129 bool old_buffer_is_sorted_; | 124 bool old_buffer_is_sorted_; |
| 130 bool old_buffer_is_filtered_; | 125 bool old_buffer_is_filtered_; |
| 131 | 126 bool during_gc_; |
| 132 // If allow_overflow_ is set, we allow the store buffer to grow until | |
| 133 // old_reserved_limit_. But we will shrink the store buffer in the epilogue to | |
| 134 // stay within the old_regular_limit_. | |
| 135 bool allow_overflow_; | |
| 136 | |
| 137 // The garbage collector iterates over many pointers to new space that are not | 127 // The garbage collector iterates over many pointers to new space that are not |
| 138 // handled by the store buffer. This flag indicates whether the pointers | 128 // handled by the store buffer. This flag indicates whether the pointers |
| 139 // found by the callbacks should be added to the store buffer or not. | 129 // found by the callbacks should be added to the store buffer or not. |
| 140 bool store_buffer_rebuilding_enabled_; | 130 bool store_buffer_rebuilding_enabled_; |
| 141 StoreBufferCallback callback_; | 131 StoreBufferCallback callback_; |
| 142 bool may_move_store_buffer_entries_; | 132 bool may_move_store_buffer_entries_; |
| 143 | 133 |
| 144 base::VirtualMemory* virtual_memory_; | 134 base::VirtualMemory* virtual_memory_; |
| 145 | 135 |
| 146 // Two hash sets used for filtering. | 136 // Two hash sets used for filtering. |
| 147 // If address is in the hash set then it is guaranteed to be in the | 137 // If address is in the hash set then it is guaranteed to be in the |
| 148 // old part of the store buffer. | 138 // old part of the store buffer. |
| 149 uintptr_t* hash_set_1_; | 139 uintptr_t* hash_set_1_; |
| 150 uintptr_t* hash_set_2_; | 140 uintptr_t* hash_set_2_; |
| 151 bool hash_sets_are_empty_; | 141 bool hash_sets_are_empty_; |
| 152 | 142 |
| 153 void ClearFilteringHashSets(); | 143 void ClearFilteringHashSets(); |
| 154 | 144 |
| 155 bool SpaceAvailable(intptr_t space_needed); | 145 bool SpaceAvailable(intptr_t space_needed); |
| 156 void Uniq(); | 146 void Uniq(); |
| 157 void ExemptPopularPages(int prime_sample_step, int threshold); | 147 void ExemptPopularPages(int prime_sample_step, int threshold); |
| 158 | 148 |
| 159 enum ExemptPopularPagesMode { | |
| 160 ENSURE_SPACE, | |
| 161 SHRINK_TO_REGULAR_SIZE | |
| 162 }; | |
| 163 | |
| 164 template <ExemptPopularPagesMode mode> | |
| 165 void IterativelyExemptPopularPages(intptr_t space_needed); | |
| 166 | |
| 167 // Set the map field of the object to NULL if contains a map. | 149 // Set the map field of the object to NULL if contains a map. |
| 168 inline void ClearDeadObject(HeapObject *object); | 150 inline void ClearDeadObject(HeapObject *object); |
| 169 | 151 |
| 170 void IteratePointersToNewSpace(ObjectSlotCallback callback, bool clear_maps); | 152 void IteratePointersToNewSpace(ObjectSlotCallback callback, bool clear_maps); |
| 171 | 153 |
| 172 void FindPointersToNewSpaceInRegion(Address start, | 154 void FindPointersToNewSpaceInRegion(Address start, |
| 173 Address end, | 155 Address end, |
| 174 ObjectSlotCallback slot_callback, | 156 ObjectSlotCallback slot_callback, |
| 175 bool clear_maps); | 157 bool clear_maps); |
| 176 | 158 |
| 159 // For each region of pointers on a page in use from an old space call |
| 160 // visit_pointer_region callback. |
| 161 // If either visit_pointer_region or callback can cause an allocation |
| 162 // in old space and changes in allocation watermark then |
| 163 // can_preallocate_during_iteration should be set to true. |
| 164 void IteratePointersOnPage( |
| 165 PagedSpace* space, |
| 166 Page* page, |
| 167 RegionCallback region_callback, |
| 168 ObjectSlotCallback slot_callback); |
| 169 |
| 177 void IteratePointersInStoreBuffer(ObjectSlotCallback slot_callback, | 170 void IteratePointersInStoreBuffer(ObjectSlotCallback slot_callback, |
| 178 bool clear_maps); | 171 bool clear_maps); |
| 179 | 172 |
| 180 #ifdef VERIFY_HEAP | 173 #ifdef VERIFY_HEAP |
| 181 void VerifyPointers(LargeObjectSpace* space); | 174 void VerifyPointers(LargeObjectSpace* space); |
| 182 #endif | 175 #endif |
| 183 | 176 |
| 184 friend class StoreBufferRebuildScope; | 177 friend class StoreBufferRebuildScope; |
| 185 friend class DontMoveStoreBufferEntriesScope; | 178 friend class DontMoveStoreBufferEntriesScope; |
| 186 }; | 179 }; |
| (...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 224 } | 217 } |
| 225 | 218 |
| 226 private: | 219 private: |
| 227 StoreBuffer* store_buffer_; | 220 StoreBuffer* store_buffer_; |
| 228 bool stored_state_; | 221 bool stored_state_; |
| 229 }; | 222 }; |
| 230 | 223 |
| 231 } } // namespace v8::internal | 224 } } // namespace v8::internal |
| 232 | 225 |
| 233 #endif // V8_STORE_BUFFER_H_ | 226 #endif // V8_STORE_BUFFER_H_ |
| OLD | NEW |