OLD | NEW |
1 // Copyright 2016 the V8 project authors. All rights reserved. | 1 // Copyright 2016 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_REMEMBERED_SET_H | 5 #ifndef V8_REMEMBERED_SET_H |
6 #define V8_REMEMBERED_SET_H | 6 #define V8_REMEMBERED_SET_H |
7 | 7 |
8 #include "src/heap/heap.h" | 8 #include "src/heap/heap.h" |
9 #include "src/heap/slot-set.h" | 9 #include "src/heap/slot-set.h" |
10 #include "src/heap/spaces.h" | 10 #include "src/heap/spaces.h" |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
49 uintptr_t start_offset = start - page->address(); | 49 uintptr_t start_offset = start - page->address(); |
50 uintptr_t end_offset = end - page->address(); | 50 uintptr_t end_offset = end - page->address(); |
51 DCHECK_LT(start_offset, end_offset); | 51 DCHECK_LT(start_offset, end_offset); |
52 DCHECK_LE(end_offset, static_cast<uintptr_t>(Page::kPageSize)); | 52 DCHECK_LE(end_offset, static_cast<uintptr_t>(Page::kPageSize)); |
53 slot_set->RemoveRange(static_cast<uint32_t>(start_offset), | 53 slot_set->RemoveRange(static_cast<uint32_t>(start_offset), |
54 static_cast<uint32_t>(end_offset)); | 54 static_cast<uint32_t>(end_offset)); |
55 } | 55 } |
56 } | 56 } |
57 | 57 |
58 // Iterates and filters the remembered set with the given callback. | 58 // Iterates and filters the remembered set with the given callback. |
59 // The callback should take (Address slot) and return SlotCallbackResult. | 59 // The callback should take (Address slot) and return SlotSet::CallbackResult. |
60 template <typename Callback> | 60 template <typename Callback> |
61 static void Iterate(Heap* heap, Callback callback) { | 61 static void Iterate(Heap* heap, Callback callback) { |
62 MemoryChunkIterator it(heap, direction == OLD_TO_OLD | 62 PointerChunkIterator it(heap); |
63 ? MemoryChunkIterator::ALL | |
64 : MemoryChunkIterator::ALL_BUT_CODE_SPACE); | |
65 MemoryChunk* chunk; | 63 MemoryChunk* chunk; |
66 while ((chunk = it.next()) != nullptr) { | 64 while ((chunk = it.next()) != nullptr) { |
67 SlotSet* slots = GetSlotSet(chunk); | 65 SlotSet* slots = GetSlotSet(chunk); |
68 if (slots != nullptr) { | 66 if (slots != nullptr) { |
69 size_t pages = (chunk->size() + Page::kPageSize - 1) / Page::kPageSize; | 67 size_t pages = (chunk->size() + Page::kPageSize - 1) / Page::kPageSize; |
70 int new_count = 0; | 68 int new_count = 0; |
71 for (size_t page = 0; page < pages; page++) { | 69 for (size_t page = 0; page < pages; page++) { |
72 new_count += slots[page].Iterate(callback); | 70 new_count += slots[page].Iterate(callback); |
73 } | 71 } |
74 if (new_count == 0) { | 72 if (new_count == 0) { |
75 ReleaseSlotSet(chunk); | 73 ReleaseSlotSet(chunk); |
76 } | 74 } |
77 } | 75 } |
78 } | 76 } |
79 } | 77 } |
80 | 78 |
81 // Iterates and filters the remembered set with the given callback. | 79 // Iterates and filters the remembered set with the given callback. |
82 // The callback should take (HeapObject** slot, HeapObject* target) and | 80 // The callback should take (HeapObject** slot, HeapObject* target) and |
83 // update the slot. | 81 // update the slot. |
84 // A special wrapper takes care of filtering the slots based on their values. | 82 // A special wrapper takes care of filtering the slots based on their values. |
85 // For OLD_TO_NEW case: slots that do not point to the ToSpace after | 83 // For OLD_TO_NEW case: slots that do not point to the ToSpace after |
86 // callback invocation will be removed from the set. | 84 // callback invocation will be removed from the set. |
87 template <typename Callback> | 85 template <typename Callback> |
88 static void IterateWithWrapper(Heap* heap, Callback callback) { | 86 static void IterateWithWrapper(Heap* heap, Callback callback) { |
89 Iterate(heap, [heap, callback](Address addr) { | 87 Iterate(heap, [heap, callback](Address addr) { |
90 return Wrapper(heap, addr, callback); | 88 return Wrapper(heap, addr, callback); |
91 }); | 89 }); |
92 } | 90 } |
93 | 91 |
94 // Given a page and a typed slot in that page, this function adds the slot | |
95 // to the remembered set. | |
96 static void InsertTyped(Page* page, SlotType slot_type, Address slot_addr) { | |
97 STATIC_ASSERT(direction == OLD_TO_OLD); | |
98 TypedSlotSet* slot_set = page->typed_old_to_old_slots(); | |
99 if (slot_set == nullptr) { | |
100 page->AllocateTypedOldToOldSlots(); | |
101 slot_set = page->typed_old_to_old_slots(); | |
102 } | |
103 uintptr_t offset = slot_addr - page->address(); | |
104 DCHECK_LT(offset, static_cast<uintptr_t>(TypedSlotSet::kMaxOffset)); | |
105 slot_set->Insert(slot_type, static_cast<uint32_t>(offset)); | |
106 } | |
107 | |
108 // Given a page and a range of typed slots in that page, this function removes | |
109 // the slots from the remembered set. | |
110 static void RemoveRangeTyped(Page* page, Address start, Address end) { | |
111 TypedSlotSet* slots = page->typed_old_to_old_slots(); | |
112 if (slots != nullptr) { | |
113 slots->Iterate([start, end](SlotType slot_type, Address slot_addr) { | |
114 return start <= slot_addr && slot_addr < end ? REMOVE_SLOT : KEEP_SLOT; | |
115 }); | |
116 } | |
117 } | |
118 | |
119 // Iterates and filters typed old to old pointers with the given callback. | |
120 // The callback should take (SlotType slot_type, Address slot_addr) and | |
121 // return SlotCallbackResult. | |
122 template <typename Callback> | |
123 static void IterateTyped(Heap* heap, Callback callback) { | |
124 MemoryChunkIterator it(heap, MemoryChunkIterator::ALL_BUT_MAP_SPACE); | |
125 MemoryChunk* chunk; | |
126 while ((chunk = it.next()) != nullptr) { | |
127 TypedSlotSet* slots = chunk->typed_old_to_old_slots(); | |
128 if (slots != nullptr) { | |
129 int new_count = slots->Iterate(callback); | |
130 if (new_count == 0) { | |
131 chunk->ReleaseTypedOldToOldSlots(); | |
132 } | |
133 } | |
134 } | |
135 } | |
136 | |
137 // Clear all old to old slots from the remembered set. | |
138 static void ClearAll(Heap* heap) { | |
139 STATIC_ASSERT(direction == OLD_TO_OLD); | |
140 MemoryChunkIterator it(heap, MemoryChunkIterator::ALL); | |
141 MemoryChunk* chunk; | |
142 while ((chunk = it.next()) != nullptr) { | |
143 chunk->ReleaseOldToOldSlots(); | |
144 chunk->ReleaseTypedOldToOldSlots(); | |
145 } | |
146 } | |
147 | |
148 // Eliminates all stale slots from the remembered set, i.e. | 92 // Eliminates all stale slots from the remembered set, i.e. |
149 // slots that are not part of live objects anymore. This method must be | 93 // slots that are not part of live objects anymore. This method must be |
150 // called after marking, when the whole transitive closure is known and | 94 // called after marking, when the whole transitive closure is known and |
151 // must be called before sweeping when mark bits are still intact. | 95 // must be called before sweeping when mark bits are still intact. |
152 static void ClearInvalidSlots(Heap* heap); | 96 static void ClearInvalidSlots(Heap* heap); |
153 | 97 |
154 static void VerifyValidSlots(Heap* heap); | 98 static void VerifyValidSlots(Heap* heap); |
155 | 99 |
156 private: | 100 private: |
157 static SlotSet* GetSlotSet(MemoryChunk* chunk) { | 101 static SlotSet* GetSlotSet(MemoryChunk* chunk) { |
(...skipping 16 matching lines...) Expand all Loading... |
174 if (direction == OLD_TO_OLD) { | 118 if (direction == OLD_TO_OLD) { |
175 chunk->AllocateOldToOldSlots(); | 119 chunk->AllocateOldToOldSlots(); |
176 return chunk->old_to_old_slots(); | 120 return chunk->old_to_old_slots(); |
177 } else { | 121 } else { |
178 chunk->AllocateOldToNewSlots(); | 122 chunk->AllocateOldToNewSlots(); |
179 return chunk->old_to_new_slots(); | 123 return chunk->old_to_new_slots(); |
180 } | 124 } |
181 } | 125 } |
182 | 126 |
183 template <typename Callback> | 127 template <typename Callback> |
184 static SlotCallbackResult Wrapper(Heap* heap, Address slot_address, | 128 static SlotSet::CallbackResult Wrapper(Heap* heap, Address slot_address, |
185 Callback slot_callback) { | 129 Callback slot_callback) { |
186 STATIC_ASSERT(direction == OLD_TO_NEW); | 130 STATIC_ASSERT(direction == OLD_TO_NEW); |
187 Object** slot = reinterpret_cast<Object**>(slot_address); | 131 Object** slot = reinterpret_cast<Object**>(slot_address); |
188 Object* object = *slot; | 132 Object* object = *slot; |
189 if (heap->InFromSpace(object)) { | 133 if (heap->InFromSpace(object)) { |
190 HeapObject* heap_object = reinterpret_cast<HeapObject*>(object); | 134 HeapObject* heap_object = reinterpret_cast<HeapObject*>(object); |
191 DCHECK(heap_object->IsHeapObject()); | 135 DCHECK(heap_object->IsHeapObject()); |
192 slot_callback(reinterpret_cast<HeapObject**>(slot), heap_object); | 136 slot_callback(reinterpret_cast<HeapObject**>(slot), heap_object); |
193 object = *slot; | 137 object = *slot; |
194 // If the object was in from space before and is after executing the | 138 // If the object was in from space before and is after executing the |
195 // callback in to space, the object is still live. | 139 // callback in to space, the object is still live. |
196 // Unfortunately, we do not know about the slot. It could be in a | 140 // Unfortunately, we do not know about the slot. It could be in a |
197 // just freed free space object. | 141 // just freed free space object. |
198 if (heap->InToSpace(object)) { | 142 if (heap->InToSpace(object)) { |
199 return KEEP_SLOT; | 143 return SlotSet::KEEP_SLOT; |
200 } | 144 } |
201 } else { | 145 } else { |
202 DCHECK(!heap->InNewSpace(object)); | 146 DCHECK(!heap->InNewSpace(object)); |
203 } | 147 } |
204 return REMOVE_SLOT; | 148 return SlotSet::REMOVE_SLOT; |
205 } | 149 } |
206 | 150 |
207 static bool IsValidSlot(Heap* heap, Object** slot); | 151 static bool IsValidSlot(Heap* heap, Object** slot); |
208 }; | 152 }; |
209 | 153 |
210 // Buffer for keeping thead local migration slots during compaction. | |
211 // TODO(ulan): Remove this once every thread gets local pages in compaction | |
212 // space. | |
213 class LocalSlotsBuffer BASE_EMBEDDED { | |
214 public: | |
215 LocalSlotsBuffer() : top_(new Node(nullptr)) {} | |
216 | |
217 ~LocalSlotsBuffer() { | |
218 Node* current = top_; | |
219 while (current != nullptr) { | |
220 Node* tmp = current->next; | |
221 delete current; | |
222 current = tmp; | |
223 } | |
224 } | |
225 | |
226 void Record(Address addr) { | |
227 EnsureSpaceFor(1); | |
228 uintptr_t entry = reinterpret_cast<uintptr_t>(addr); | |
229 DCHECK_GE(entry, static_cast<uintptr_t>(NUMBER_OF_SLOT_TYPES)); | |
230 Insert(entry); | |
231 } | |
232 | |
233 void Record(SlotType type, Address addr) { | |
234 EnsureSpaceFor(2); | |
235 Insert(static_cast<uintptr_t>(type)); | |
236 uintptr_t entry = reinterpret_cast<uintptr_t>(addr); | |
237 DCHECK_GE(entry, static_cast<uintptr_t>(NUMBER_OF_SLOT_TYPES)); | |
238 Insert(entry); | |
239 } | |
240 | |
241 template <typename UntypedCallback, typename TypedCallback> | |
242 void Iterate(UntypedCallback untyped_callback, TypedCallback typed_callback) { | |
243 Node* current = top_; | |
244 bool typed = false; | |
245 SlotType type; | |
246 Address addr; | |
247 while (current != nullptr) { | |
248 for (int i = 0; i < current->count; i++) { | |
249 uintptr_t entry = current->buffer[i]; | |
250 if (entry < NUMBER_OF_SLOT_TYPES) { | |
251 DCHECK(!typed); | |
252 typed = true; | |
253 type = static_cast<SlotType>(entry); | |
254 } else { | |
255 addr = reinterpret_cast<Address>(entry); | |
256 if (typed) { | |
257 typed_callback(type, addr); | |
258 typed = false; | |
259 } else { | |
260 untyped_callback(addr); | |
261 } | |
262 } | |
263 } | |
264 current = current->next; | |
265 } | |
266 } | |
267 | |
268 private: | |
269 void EnsureSpaceFor(int count) { | |
270 if (top_->remaining_free_slots() < count) top_ = new Node(top_); | |
271 } | |
272 | |
273 void Insert(uintptr_t entry) { top_->buffer[top_->count++] = entry; } | |
274 | |
275 static const int kBufferSize = 16 * KB; | |
276 | |
277 struct Node : Malloced { | |
278 explicit Node(Node* next_node) : next(next_node), count(0) {} | |
279 | |
280 inline int remaining_free_slots() { return kBufferSize - count; } | |
281 | |
282 Node* next; | |
283 uintptr_t buffer[kBufferSize]; | |
284 int count; | |
285 }; | |
286 | |
287 Node* top_; | |
288 }; | |
289 | |
290 } // namespace internal | 154 } // namespace internal |
291 } // namespace v8 | 155 } // namespace v8 |
292 | 156 |
293 #endif // V8_REMEMBERED_SET_H | 157 #endif // V8_REMEMBERED_SET_H |
OLD | NEW |