OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_STORE_BUFFER_H_ | 5 #ifndef V8_STORE_BUFFER_H_ |
6 #define V8_STORE_BUFFER_H_ | 6 #define V8_STORE_BUFFER_H_ |
7 | 7 |
8 #include "src/allocation.h" | 8 #include "src/allocation.h" |
9 #include "src/base/logging.h" | 9 #include "src/base/logging.h" |
10 #include "src/base/platform/platform.h" | 10 #include "src/base/platform/platform.h" |
11 #include "src/globals.h" | 11 #include "src/globals.h" |
12 | 12 |
13 namespace v8 { | 13 namespace v8 { |
14 namespace internal { | 14 namespace internal { |
15 | 15 |
16 class Page; | 16 class Page; |
17 class PagedSpace; | 17 class PagedSpace; |
18 class StoreBuffer; | 18 class StoreBuffer; |
19 | 19 |
20 typedef void (*ObjectSlotCallback)(HeapObject** from, HeapObject* to); | 20 typedef void (*ObjectSlotCallback)(HeapObject** from, HeapObject* to); |
21 | 21 |
22 typedef void (StoreBuffer::*RegionCallback)(Address start, | |
23 Address end, | |
24 ObjectSlotCallback slot_callback, | |
25 bool clear_maps); | |
26 | |
27 // Used to implement the write barrier by collecting addresses of pointers | 22 // Used to implement the write barrier by collecting addresses of pointers |
28 // between spaces. | 23 // between spaces. |
29 class StoreBuffer { | 24 class StoreBuffer { |
30 public: | 25 public: |
31 explicit StoreBuffer(Heap* heap); | 26 explicit StoreBuffer(Heap* heap); |
32 | 27 |
33 static void StoreBufferOverflow(Isolate* isolate); | 28 static void StoreBufferOverflow(Isolate* isolate); |
34 | 29 |
35 inline Address TopAddress(); | 30 inline Address TopAddress(); |
36 | 31 |
(...skipping 24 matching lines...) Expand all Loading... |
61 // surviving old-to-new pointers into the store buffer to rebuild it. | 56 // surviving old-to-new pointers into the store buffer to rebuild it. |
62 void IteratePointersToNewSpace(ObjectSlotCallback callback); | 57 void IteratePointersToNewSpace(ObjectSlotCallback callback); |
63 | 58 |
64 // Same as IteratePointersToNewSpace but additonally clears maps in objects | 59 // Same as IteratePointersToNewSpace but additonally clears maps in objects |
65 // referenced from the store buffer that do not contain a forwarding pointer. | 60 // referenced from the store buffer that do not contain a forwarding pointer. |
66 void IteratePointersToNewSpaceAndClearMaps(ObjectSlotCallback callback); | 61 void IteratePointersToNewSpaceAndClearMaps(ObjectSlotCallback callback); |
67 | 62 |
68 static const int kStoreBufferOverflowBit = 1 << (14 + kPointerSizeLog2); | 63 static const int kStoreBufferOverflowBit = 1 << (14 + kPointerSizeLog2); |
69 static const int kStoreBufferSize = kStoreBufferOverflowBit; | 64 static const int kStoreBufferSize = kStoreBufferOverflowBit; |
70 static const int kStoreBufferLength = kStoreBufferSize / sizeof(Address); | 65 static const int kStoreBufferLength = kStoreBufferSize / sizeof(Address); |
71 static const int kOldStoreBufferLength = kStoreBufferLength * 16; | 66 static const int kOldRegularStoreBufferLength = kStoreBufferLength * 16; |
72 static const int kHashSetLengthLog2 = 12; | 67 static const int kHashSetLengthLog2 = 12; |
73 static const int kHashSetLength = 1 << kHashSetLengthLog2; | 68 static const int kHashSetLength = 1 << kHashSetLengthLog2; |
74 | 69 |
75 void Compact(); | 70 void Compact(); |
76 | 71 |
77 void GCPrologue(); | 72 void GCPrologue(bool allow_overflow); |
78 void GCEpilogue(); | 73 void GCEpilogue(); |
79 | 74 |
80 Object*** Limit() { return reinterpret_cast<Object***>(old_limit_); } | 75 Object*** Limit() { return reinterpret_cast<Object***>(old_limit_); } |
81 Object*** Start() { return reinterpret_cast<Object***>(old_start_); } | 76 Object*** Start() { return reinterpret_cast<Object***>(old_start_); } |
82 Object*** Top() { return reinterpret_cast<Object***>(old_top_); } | 77 Object*** Top() { return reinterpret_cast<Object***>(old_top_); } |
83 void SetTop(Object*** top) { | 78 void SetTop(Object*** top) { |
84 ASSERT(top >= Start()); | 79 ASSERT(top >= Start()); |
85 ASSERT(top <= Limit()); | 80 ASSERT(top <= Limit()); |
86 old_top_ = reinterpret_cast<Address*>(top); | 81 old_top_ = reinterpret_cast<Address*>(top); |
87 } | 82 } |
(...skipping 23 matching lines...) Expand all Loading... |
111 | 106 |
112 // The store buffer is divided up into a new buffer that is constantly being | 107 // The store buffer is divided up into a new buffer that is constantly being |
113 // filled by mutator activity and an old buffer that is filled with the data | 108 // filled by mutator activity and an old buffer that is filled with the data |
114 // from the new buffer after compression. | 109 // from the new buffer after compression. |
115 Address* start_; | 110 Address* start_; |
116 Address* limit_; | 111 Address* limit_; |
117 | 112 |
118 Address* old_start_; | 113 Address* old_start_; |
119 Address* old_limit_; | 114 Address* old_limit_; |
120 Address* old_top_; | 115 Address* old_top_; |
| 116 |
| 117 // The regular limit specifies how big the store buffer may become during |
| 118 // mutator execution or while scavenging. |
| 119 Address* old_regular_limit_; |
| 120 |
| 121 // The reserved limit is bigger then the regular limit. It should be the size |
| 122 // of a semi-space to avoid new scan-on-scavenge during new space evacuation |
| 123 // after sweeping in a full garbage collection. |
121 Address* old_reserved_limit_; | 124 Address* old_reserved_limit_; |
| 125 |
122 base::VirtualMemory* old_virtual_memory_; | 126 base::VirtualMemory* old_virtual_memory_; |
| 127 int old_store_buffer_length_; |
123 | 128 |
124 bool old_buffer_is_sorted_; | 129 bool old_buffer_is_sorted_; |
125 bool old_buffer_is_filtered_; | 130 bool old_buffer_is_filtered_; |
126 bool during_gc_; | 131 |
| 132 // If allow_overflow_ is set, we allow the store buffer to grow until |
| 133 // old_reserved_limit_. But we will shrink the store buffer in the epilogue to |
| 134 // stay within the old_regular_limit_. |
| 135 bool allow_overflow_; |
| 136 |
127 // The garbage collector iterates over many pointers to new space that are not | 137 // The garbage collector iterates over many pointers to new space that are not |
128 // handled by the store buffer. This flag indicates whether the pointers | 138 // handled by the store buffer. This flag indicates whether the pointers |
129 // found by the callbacks should be added to the store buffer or not. | 139 // found by the callbacks should be added to the store buffer or not. |
130 bool store_buffer_rebuilding_enabled_; | 140 bool store_buffer_rebuilding_enabled_; |
131 StoreBufferCallback callback_; | 141 StoreBufferCallback callback_; |
132 bool may_move_store_buffer_entries_; | 142 bool may_move_store_buffer_entries_; |
133 | 143 |
134 base::VirtualMemory* virtual_memory_; | 144 base::VirtualMemory* virtual_memory_; |
135 | 145 |
136 // Two hash sets used for filtering. | 146 // Two hash sets used for filtering. |
137 // If address is in the hash set then it is guaranteed to be in the | 147 // If address is in the hash set then it is guaranteed to be in the |
138 // old part of the store buffer. | 148 // old part of the store buffer. |
139 uintptr_t* hash_set_1_; | 149 uintptr_t* hash_set_1_; |
140 uintptr_t* hash_set_2_; | 150 uintptr_t* hash_set_2_; |
141 bool hash_sets_are_empty_; | 151 bool hash_sets_are_empty_; |
142 | 152 |
143 void ClearFilteringHashSets(); | 153 void ClearFilteringHashSets(); |
144 | 154 |
145 bool SpaceAvailable(intptr_t space_needed); | 155 bool SpaceAvailable(intptr_t space_needed); |
146 void Uniq(); | 156 void Uniq(); |
147 void ExemptPopularPages(int prime_sample_step, int threshold); | 157 void ExemptPopularPages(int prime_sample_step, int threshold); |
148 | 158 |
| 159 enum ExemptPopularPagesMode { |
| 160 ENSURE_SPACE, |
| 161 SHRINK_TO_REGULAR_SIZE |
| 162 }; |
| 163 |
| 164 template <ExemptPopularPagesMode mode> |
| 165 void IterativelyExemptPopularPages(intptr_t space_needed); |
| 166 |
149 // Set the map field of the object to NULL if contains a map. | 167 // Set the map field of the object to NULL if contains a map. |
150 inline void ClearDeadObject(HeapObject *object); | 168 inline void ClearDeadObject(HeapObject *object); |
151 | 169 |
152 void IteratePointersToNewSpace(ObjectSlotCallback callback, bool clear_maps); | 170 void IteratePointersToNewSpace(ObjectSlotCallback callback, bool clear_maps); |
153 | 171 |
154 void FindPointersToNewSpaceInRegion(Address start, | 172 void FindPointersToNewSpaceInRegion(Address start, |
155 Address end, | 173 Address end, |
156 ObjectSlotCallback slot_callback, | 174 ObjectSlotCallback slot_callback, |
157 bool clear_maps); | 175 bool clear_maps); |
158 | 176 |
159 // For each region of pointers on a page in use from an old space call | |
160 // visit_pointer_region callback. | |
161 // If either visit_pointer_region or callback can cause an allocation | |
162 // in old space and changes in allocation watermark then | |
163 // can_preallocate_during_iteration should be set to true. | |
164 void IteratePointersOnPage( | |
165 PagedSpace* space, | |
166 Page* page, | |
167 RegionCallback region_callback, | |
168 ObjectSlotCallback slot_callback); | |
169 | |
170 void IteratePointersInStoreBuffer(ObjectSlotCallback slot_callback, | 177 void IteratePointersInStoreBuffer(ObjectSlotCallback slot_callback, |
171 bool clear_maps); | 178 bool clear_maps); |
172 | 179 |
173 #ifdef VERIFY_HEAP | 180 #ifdef VERIFY_HEAP |
174 void VerifyPointers(LargeObjectSpace* space); | 181 void VerifyPointers(LargeObjectSpace* space); |
175 #endif | 182 #endif |
176 | 183 |
177 friend class StoreBufferRebuildScope; | 184 friend class StoreBufferRebuildScope; |
178 friend class DontMoveStoreBufferEntriesScope; | 185 friend class DontMoveStoreBufferEntriesScope; |
179 }; | 186 }; |
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
217 } | 224 } |
218 | 225 |
219 private: | 226 private: |
220 StoreBuffer* store_buffer_; | 227 StoreBuffer* store_buffer_; |
221 bool stored_state_; | 228 bool stored_state_; |
222 }; | 229 }; |
223 | 230 |
224 } } // namespace v8::internal | 231 } } // namespace v8::internal |
225 | 232 |
226 #endif // V8_STORE_BUFFER_H_ | 233 #endif // V8_STORE_BUFFER_H_ |
OLD | NEW |